chore(server): separate elasticsearch to run independently (#12299)

close CLOUD-217

<!-- This is an auto-generated comment: release notes by coderabbit.ai -->
## Summary by CodeRabbit

- **New Features**
  - Introduced a dedicated test job for Elasticsearch, running only Elasticsearch-specific tests during CI.
- **Chores**
  - Enhanced server test workflows with explicit setup steps and automated coverage uploads.
  - Improved test suite structure for Elasticsearch provider to enable conditional and asynchronous test execution based on environment variables.
<!-- end of auto-generated comment: release notes by coderabbit.ai -->
This commit is contained in:
fengmk2
2025-05-15 06:16:06 +00:00
parent fabcdd3b2c
commit 6a13d69dea
2 changed files with 110 additions and 25 deletions

View File

@@ -586,6 +586,76 @@ jobs:
image: manticoresearch/manticore:9.2.14 image: manticoresearch/manticore:9.2.14
ports: ports:
- 9308:9308 - 9308:9308
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
full-cache: true
- name: Download server-native.node
uses: actions/download-artifact@v4
with:
name: server-native.node
path: ./packages/backend/native
- name: Prepare Server Test Environment
uses: ./.github/actions/server-test-env
- name: Run server tests
run: yarn affine @affine/server test:coverage --forbid-only
env:
CARGO_TARGET_DIR: '${{ github.workspace }}/target'
CI_NODE_INDEX: ${{ matrix.node_index }}
CI_NODE_TOTAL: ${{ matrix.total_nodes }}
- name: Upload server test coverage results
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./packages/backend/server/.coverage/lcov.info
flags: server-test
name: affine
fail_ci_if_error: false
server-test-elasticsearch:
name: Server Test with Elasticsearch
runs-on: ubuntu-latest
needs:
- optimize_ci
- build-server-native
if: needs.optimize_ci.outputs.skip == 'false'
strategy:
fail-fast: false
env:
NODE_ENV: test
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
REDIS_SERVER_HOST: localhost
AFFINE_INDEXER_SEARCH_PROVIDER: elasticsearch
AFFINE_INDEXER_SEARCH_ENDPOINT: http://localhost:9200
services:
postgres:
image: pgvector/pgvector:pg16
env:
POSTGRES_PASSWORD: affine
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
redis:
image: redis
ports:
- 6379:6379
mailer:
image: mailhog/mailhog
ports:
- 1025:1025
- 8025:8025
steps: steps:
# https://github.com/elastic/elastic-github-actions/blob/master/elasticsearch/README.md # https://github.com/elastic/elastic-github-actions/blob/master/elasticsearch/README.md
- name: Configure sysctl limits for Elasticsearch - name: Configure sysctl limits for Elasticsearch
@@ -618,8 +688,8 @@ jobs:
- name: Prepare Server Test Environment - name: Prepare Server Test Environment
uses: ./.github/actions/server-test-env uses: ./.github/actions/server-test-env
- name: Run server tests - name: Run server tests with elasticsearch only
run: yarn affine @affine/server test:coverage --forbid-only run: yarn affine @affine/server test:coverage "**/*/*elasticsearch.spec.ts" --forbid-only
env: env:
CARGO_TARGET_DIR: '${{ github.workspace }}/target' CARGO_TARGET_DIR: '${{ github.workspace }}/target'
CI_NODE_INDEX: ${{ matrix.node_index }} CI_NODE_INDEX: ${{ matrix.node_index }}

View File

@@ -2,18 +2,33 @@ import { randomUUID } from 'node:crypto';
import { readFile } from 'node:fs/promises'; import { readFile } from 'node:fs/promises';
import path from 'node:path'; import path from 'node:path';
import test from 'ava'; import _test from 'ava';
import { omit, pick } from 'lodash-es'; import { omit, pick } from 'lodash-es';
import { createModule } from '../../../../__tests__/create-module'; import {
createModule,
TestingModule,
} from '../../../../__tests__/create-module';
import { Mockers } from '../../../../__tests__/mocks'; import { Mockers } from '../../../../__tests__/mocks';
import { ConfigModule } from '../../../../base/config'; import { ConfigModule } from '../../../../base/config';
import { User, Workspace } from '../../../../models';
import { IndexerModule } from '../../'; import { IndexerModule } from '../../';
import { SearchProviderType } from '../../config'; import { SearchProviderType } from '../../config';
import { AggregateQueryDSL, ElasticsearchProvider } from '../../providers'; import { AggregateQueryDSL, ElasticsearchProvider } from '../../providers';
import { blockMapping, docMapping, SearchTable } from '../../tables'; import { blockMapping, docMapping, SearchTable } from '../../tables';
const module = await createModule({ const test =
process.env.AFFINE_INDEXER_SEARCH_PROVIDER === 'elasticsearch'
? _test
: _test.skip;
let module: TestingModule;
let searchProvider: ElasticsearchProvider;
let user: User;
let workspace: Workspace;
_test.before(async () => {
module = await createModule({
imports: [ imports: [
IndexerModule, IndexerModule,
ConfigModule.override({ ConfigModule.override({
@@ -28,12 +43,11 @@ const module = await createModule({
}), }),
], ],
providers: [ElasticsearchProvider], providers: [ElasticsearchProvider],
}); });
const searchProvider = module.get(ElasticsearchProvider); searchProvider = module.get(ElasticsearchProvider);
const user = await module.create(Mockers.User); user = await module.create(Mockers.User);
const workspace = await module.create(Mockers.Workspace); workspace = await module.create(Mockers.Workspace);
test.before(async () => {
await searchProvider.createTable( await searchProvider.createTable(
SearchTable.block, SearchTable.block,
JSON.stringify(blockMapping) JSON.stringify(blockMapping)
@@ -159,7 +173,7 @@ test.before(async () => {
}); });
}); });
test.after.always(async () => { _test.after.always(async () => {
await searchProvider.deleteByQuery( await searchProvider.deleteByQuery(
SearchTable.block, SearchTable.block,
{ {
@@ -182,6 +196,7 @@ test.after.always(async () => {
refresh: true, refresh: true,
} }
); );
await module.close(); await module.close();
}); });