Template
1
0
Fork 0
mirror of https://codeberg.org/forgejo/forgejo synced 2024-11-29 21:26:10 +01:00
forgejo/.github/workflows/pull-db-tests.yml

260 lines
7.5 KiB
YAML
Raw Normal View History

name: db-tests
on:
pull_request:
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
files-changed:
uses: ./.github/workflows/files-changed.yml
test-pgsql:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
needs: files-changed
runs-on: ubuntu-latest
services:
pgsql:
image: postgres:15
env:
POSTGRES_DB: test
POSTGRES_PASSWORD: postgres
ports:
- "5432:5432"
ldap:
image: gitea/test-openldap:latest
ports:
- "389:389"
- "636:636"
minio:
# as github actions doesn't support "entrypoint", we need to use a non-official image
# that has a custom entrypoint set to "minio server /data"
image: bitnami/minio:2021.3.17
env:
MINIO_ACCESS_KEY: 123456
MINIO_SECRET_KEY: 12345678
ports:
- "9000:9000"
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v4
with:
go-version: "~1.21"
check-latest: true
- name: Add hosts to /etc/hosts
run: '[ -e "/.dockerenv" ] || [ -e "/run/.containerenv" ] || echo "127.0.0.1 pgsql ldap minio" | sudo tee -a /etc/hosts'
- run: make deps-backend
- run: make backend
env:
TAGS: bindata
- run: make test-pgsql-migration test-pgsql
timeout-minutes: 50
env:
TAGS: bindata gogit
RACE_ENABLED: true
TEST_TAGS: gogit
TEST_LDAP: 1
USE_REPO_TEST_DIR: 1
test-sqlite:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
needs: files-changed
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v4
with:
go-version: "~1.21"
check-latest: true
- run: make deps-backend
- run: make backend
env:
TAGS: bindata gogit sqlite sqlite_unlock_notify
- run: make test-sqlite-migration test-sqlite
timeout-minutes: 50
env:
TAGS: bindata gogit sqlite sqlite_unlock_notify
RACE_ENABLED: true
TEST_TAGS: gogit sqlite sqlite_unlock_notify
USE_REPO_TEST_DIR: 1
test-unit:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
needs: files-changed
runs-on: ubuntu-latest
services:
mysql:
image: mysql:5.7
env:
MYSQL_ALLOW_EMPTY_PASSWORD: true
MYSQL_DATABASE: test
ports:
- "3306:3306"
elasticsearch:
image: elasticsearch:7.5.0
env:
discovery.type: single-node
ports:
- "9200:9200"
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012) Fix #24662. Replace #24822 and #25708 (although it has been merged) ## Background In the past, Gitea supported issue searching with a keyword and conditions in a less efficient way. It worked by searching for issues with the keyword and obtaining limited IDs (as it is heavy to get all) on the indexer (bleve/elasticsearch/meilisearch), and then querying with conditions on the database to find a subset of the found IDs. This is why the results could be incomplete. To solve this issue, we need to store all fields that could be used as conditions in the indexer and support both keyword and additional conditions when searching with the indexer. ## Major changes - Redefine `IndexerData` to include all fields that could be used as filter conditions. - Refactor `Search(ctx context.Context, kw string, repoIDs []int64, limit, start int, state string)` to `Search(ctx context.Context, options *SearchOptions)`, so it supports more conditions now. - Change the data type stored in `issueIndexerQueue`. Use `IndexerMetadata` instead of `IndexerData` in case the data has been updated while it is in the queue. This also reduces the storage size of the queue. - Enhance searching with Bleve/Elasticsearch/Meilisearch, make them fully support `SearchOptions`. Also, update the data versions. - Keep most logic of database indexer, but remove `issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is the entry point to search issues. - Start a Meilisearch instance to test it in unit tests. - Add unit tests with almost full coverage to test Bleve/Elasticsearch/Meilisearch indexer. --------- Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 08:28:53 +02:00
meilisearch:
image: getmeili/meilisearch:v1.2.0
env:
MEILI_ENV: development # disable auth
ports:
- "7700:7700"
smtpimap:
image: tabascoterrier/docker-imap-devel:latest
ports:
- "25:25"
- "143:143"
- "587:587"
- "993:993"
redis:
image: redis
options: >- # wait until redis has started
--health-cmd "redis-cli ping"
--health-interval 5s
--health-timeout 3s
--health-retries 10
ports:
- 6379:6379
minio:
image: bitnami/minio:2021.3.17
env:
MINIO_ACCESS_KEY: 123456
MINIO_SECRET_KEY: 12345678
ports:
- "9000:9000"
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v4
with:
go-version: "~1.21"
check-latest: true
- name: Add hosts to /etc/hosts
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012) Fix #24662. Replace #24822 and #25708 (although it has been merged) ## Background In the past, Gitea supported issue searching with a keyword and conditions in a less efficient way. It worked by searching for issues with the keyword and obtaining limited IDs (as it is heavy to get all) on the indexer (bleve/elasticsearch/meilisearch), and then querying with conditions on the database to find a subset of the found IDs. This is why the results could be incomplete. To solve this issue, we need to store all fields that could be used as conditions in the indexer and support both keyword and additional conditions when searching with the indexer. ## Major changes - Redefine `IndexerData` to include all fields that could be used as filter conditions. - Refactor `Search(ctx context.Context, kw string, repoIDs []int64, limit, start int, state string)` to `Search(ctx context.Context, options *SearchOptions)`, so it supports more conditions now. - Change the data type stored in `issueIndexerQueue`. Use `IndexerMetadata` instead of `IndexerData` in case the data has been updated while it is in the queue. This also reduces the storage size of the queue. - Enhance searching with Bleve/Elasticsearch/Meilisearch, make them fully support `SearchOptions`. Also, update the data versions. - Keep most logic of database indexer, but remove `issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is the entry point to search issues. - Start a Meilisearch instance to test it in unit tests. - Add unit tests with almost full coverage to test Bleve/Elasticsearch/Meilisearch indexer. --------- Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 08:28:53 +02:00
run: '[ -e "/.dockerenv" ] || [ -e "/run/.containerenv" ] || echo "127.0.0.1 mysql elasticsearch meilisearch smtpimap" | sudo tee -a /etc/hosts'
- run: make deps-backend
- run: make backend
env:
TAGS: bindata
- name: unit-tests
run: make unit-test-coverage test-check
env:
TAGS: bindata
RACE_ENABLED: true
GITHUB_READ_TOKEN: ${{ secrets.GITHUB_READ_TOKEN }}
- name: unit-tests-gogit
run: make unit-test-coverage test-check
env:
TAGS: bindata gogit
RACE_ENABLED: true
GITHUB_READ_TOKEN: ${{ secrets.GITHUB_READ_TOKEN }}
test-mysql5:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
needs: files-changed
runs-on: ubuntu-latest
services:
mysql:
image: mysql:5.7
env:
MYSQL_ALLOW_EMPTY_PASSWORD: true
MYSQL_DATABASE: test
ports:
- "3306:3306"
elasticsearch:
image: elasticsearch:7.5.0
env:
discovery.type: single-node
ports:
- "9200:9200"
smtpimap:
image: tabascoterrier/docker-imap-devel:latest
ports:
- "25:25"
- "143:143"
- "587:587"
- "993:993"
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v4
with:
go-version: "~1.21"
check-latest: true
- name: Add hosts to /etc/hosts
run: '[ -e "/.dockerenv" ] || [ -e "/run/.containerenv" ] || echo "127.0.0.1 mysql elasticsearch smtpimap" | sudo tee -a /etc/hosts'
- run: make deps-backend
- run: make backend
env:
TAGS: bindata
- name: run tests
run: make test-mysql-migration integration-test-coverage
env:
TAGS: bindata
RACE_ENABLED: true
USE_REPO_TEST_DIR: 1
TEST_INDEXER_CODE_ES_URL: "http://elastic:changeme@elasticsearch:9200"
test-mysql8:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
needs: files-changed
runs-on: ubuntu-latest
services:
mysql8:
image: mysql:8
env:
MYSQL_ALLOW_EMPTY_PASSWORD: true
MYSQL_DATABASE: testgitea
ports:
- "3306:3306"
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v4
with:
go-version: "~1.21"
check-latest: true
- name: Add hosts to /etc/hosts
run: '[ -e "/.dockerenv" ] || [ -e "/run/.containerenv" ] || echo "127.0.0.1 mysql8" | sudo tee -a /etc/hosts'
- run: make deps-backend
- run: make backend
env:
TAGS: bindata
- run: make test-mysql8-migration test-mysql8
timeout-minutes: 50
env:
TAGS: bindata
USE_REPO_TEST_DIR: 1
test-mssql:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
needs: files-changed
runs-on: ubuntu-latest
services:
mssql:
image: mcr.microsoft.com/mssql/server:latest
env:
ACCEPT_EULA: Y
MSSQL_PID: Standard
SA_PASSWORD: MwantsaSecurePassword1
ports:
- "1433:1433"
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v4
with:
go-version: "~1.21"
check-latest: true
- name: Add hosts to /etc/hosts
run: '[ -e "/.dockerenv" ] || [ -e "/run/.containerenv" ] || echo "127.0.0.1 mssql" | sudo tee -a /etc/hosts'
- run: make deps-backend
- run: make backend
env:
TAGS: bindata
- run: make test-mssql-migration test-mssql
timeout-minutes: 50
env:
TAGS: bindata
USE_REPO_TEST_DIR: 1