feat(nbstore): add indexer storage (#10953)

This commit is contained in:
EYHN
2025-03-31 12:59:51 +00:00
parent c9e14ac0db
commit 8957d0645f
82 changed files with 3393 additions and 4753 deletions

View File

@@ -9,7 +9,6 @@ import {
} from '@toeverything/infra';
import { fileTypeFromBuffer } from 'file-type';
import {
combineLatest,
EMPTY,
filter,
firstValueFrom,
@@ -88,16 +87,9 @@ export class UnusedBlobs extends Entity {
async getUnusedBlobs(abortSignal?: AbortSignal) {
// Wait for both sync and indexing to complete
const ready$ = combineLatest([
this.workspaceService.workspace.engine.doc.state$.pipe(
filter(state => state.syncing === 0 && !state.syncRetrying)
),
this.docsSearchService.indexer.status$.pipe(
filter(
status => status.remaining === undefined || status.remaining === 0
)
),
]).pipe(map(() => true));
const ready$ = this.workspaceService.workspace.engine.doc.state$
.pipe(filter(state => state.syncing === 0 && !state.syncRetrying))
.pipe(map(() => true));
await firstValueFrom(
abortSignal
@@ -105,6 +97,8 @@ export class UnusedBlobs extends Entity {
: ready$
);
await this.docsSearchService.indexer.waitForCompleted(abortSignal);
const [blobs, usedBlobs] = await Promise.all([
this.listBlobs(),
this.getUsedBlobs(),
@@ -121,7 +115,8 @@ export class UnusedBlobs extends Entity {
}
private async getUsedBlobs(): Promise<string[]> {
const result = await this.docsSearchService.indexer.blockIndex.aggregate(
const result = await this.docsSearchService.indexer.aggregate(
'block',
{
type: 'boolean',
occur: 'must',

View File

@@ -1,301 +0,0 @@
import { DebugLogger } from '@affine/debug';
import type { Job, JobQueue } from '@toeverything/infra';
import {
Entity,
IndexedDBIndexStorage,
IndexedDBJobQueue,
JobRunner,
LiveData,
} from '@toeverything/infra';
import { map } from 'rxjs';
import { WorkspaceDBService } from '../../db';
import type { WorkspaceLocalState, WorkspaceService } from '../../workspace';
import { blockIndexSchema, docIndexSchema } from '../schema';
import { createWorker, type IndexerWorker } from '../worker/out-worker';
export function isEmptyUpdate(binary: Uint8Array) {
return (
binary.byteLength === 0 ||
(binary.byteLength === 2 && binary[0] === 0 && binary[1] === 0)
);
}
const logger = new DebugLogger('crawler');
const WORKSPACE_DOCS_INDEXER_VERSION_KEY = 'docs-indexer-version';
interface IndexerJobPayload {
docId: string;
}
export class DocsIndexer extends Entity {
/**
* increase this number to re-index all docs
*/
static INDEXER_VERSION = 19;
private readonly jobQueue: JobQueue<IndexerJobPayload> =
new IndexedDBJobQueue<IndexerJobPayload>(
'jq:' + this.workspaceService.workspace.id
);
private readonly runner = new JobRunner(
this.jobQueue,
(jobs, signal) => this.execJob(jobs, signal),
() =>
new Promise<void>(resolve =>
requestIdleCallback(() => resolve(), {
timeout: 200,
})
)
);
private readonly indexStorage = new IndexedDBIndexStorage(
'idx:' + this.workspaceService.workspace.id
);
readonly docIndex = this.indexStorage.getIndex('doc', docIndexSchema);
readonly blockIndex = this.indexStorage.getIndex('block', blockIndexSchema);
private readonly workspaceEngine = this.workspaceService.workspace.engine;
private readonly workspaceId = this.workspaceService.workspace.id;
private worker: IndexerWorker | null = null;
readonly status$ = LiveData.from<{ remaining?: number }>(
this.jobQueue.status$.pipe(
map(status => ({
remaining: status.remaining,
}))
),
{}
);
constructor(
private readonly workspaceService: WorkspaceService,
private readonly workspaceLocalState: WorkspaceLocalState
) {
super();
}
setupListener() {
this.workspaceEngine.doc.storage.connection
.waitForConnected()
.then(() => {
this.disposables.push(
this.workspaceEngine.doc.storage.subscribeDocUpdate(updated => {
if (WorkspaceDBService.isDBDocId(updated.docId)) {
// skip db doc
return;
}
this.jobQueue
.enqueue([
{
batchKey: updated.docId,
payload: { docId: updated.docId },
},
])
.catch(err => {
console.error('Error enqueueing job', err);
});
})
);
})
.catch(err => {
console.error('Error waiting for doc storage connection', err);
});
}
async execJob(jobs: Job<IndexerJobPayload>[], signal: AbortSignal) {
if (jobs.length === 0) {
return;
}
const dbVersion = this.getVersion();
if (dbVersion > DocsIndexer.INDEXER_VERSION) {
// stop if db version is higher then self
this.runner.stop();
throw new Error('Indexer is outdated');
}
const isUpgrade = dbVersion < DocsIndexer.INDEXER_VERSION;
// jobs should have the same storage docId, so we just pick the first one
const docId = jobs[0].payload.docId;
const worker = await this.ensureWorker(signal);
const startTime = performance.now();
logger.debug('Start crawling job for docId:', docId);
let workerOutput;
if (docId === this.workspaceId) {
const rootDocBuffer = (
await this.workspaceEngine.doc.storage.getDoc(this.workspaceId)
)?.bin;
if (!rootDocBuffer) {
return;
}
const allIndexedDocs = (await this.docIndex.getAll()).map(d => d.id);
workerOutput = await worker.run({
type: 'rootDoc',
allIndexedDocs,
rootDocBuffer,
reindexAll: isUpgrade,
rootDocId: this.workspaceId,
});
} else {
const rootDocBuffer = (
await this.workspaceEngine.doc.storage.getDoc(this.workspaceId)
)?.bin;
const docBuffer =
(await this.workspaceEngine.doc.storage.getDoc(docId))?.bin ??
new Uint8Array(0);
if (!rootDocBuffer) {
return;
}
workerOutput = await worker.run({
type: 'doc',
docBuffer,
docId,
rootDocBuffer,
rootDocId: this.workspaceId,
});
}
if (workerOutput.deletedDoc || workerOutput.addedDoc) {
if (workerOutput.deletedDoc) {
const docIndexWriter = await this.docIndex.write();
for (const docId of workerOutput.deletedDoc) {
docIndexWriter.delete(docId);
}
await docIndexWriter.commit();
const blockIndexWriter = await this.blockIndex.write();
for (const docId of workerOutput.deletedDoc) {
const oldBlocks = await blockIndexWriter.search(
{
type: 'match',
field: 'docId',
match: docId,
},
{
pagination: {
limit: Number.MAX_SAFE_INTEGER,
},
}
);
for (const block of oldBlocks.nodes) {
blockIndexWriter.delete(block.id);
}
}
await blockIndexWriter.commit();
}
if (workerOutput.addedDoc) {
const docIndexWriter = await this.docIndex.write();
for (const { doc } of workerOutput.addedDoc) {
docIndexWriter.put(doc);
}
await docIndexWriter.commit();
const blockIndexWriter = await this.blockIndex.write();
for (const { id, blocks } of workerOutput.addedDoc) {
// delete old blocks
const oldBlocks = await blockIndexWriter.search(
{
type: 'match',
field: 'docId',
match: id,
},
{
pagination: {
limit: Number.MAX_SAFE_INTEGER,
},
}
);
for (const block of oldBlocks.nodes) {
blockIndexWriter.delete(block.id);
}
for (const block of blocks) {
blockIndexWriter.insert(block);
}
}
await blockIndexWriter.commit();
}
}
if (workerOutput.reindexDoc) {
await this.jobQueue.enqueue(
workerOutput.reindexDoc.map(({ docId }) => ({
batchKey: docId,
payload: { docId },
}))
);
}
if (isUpgrade) {
this.setVersion();
}
const duration = performance.now() - startTime;
logger.debug(
'Finish crawling job for docId:' + docId + ' in ' + duration + 'ms '
);
}
startCrawling() {
this.runner.start();
this.jobQueue
.enqueue([
{
batchKey: this.workspaceId,
payload: { docId: this.workspaceId },
},
])
.catch(err => {
console.error('Error enqueueing job', err);
});
}
async ensureWorker(signal: AbortSignal): Promise<IndexerWorker> {
if (!this.worker) {
this.worker = await createWorker(signal);
}
return this.worker;
}
getVersion() {
const version = this.workspaceLocalState.get<number>(
WORKSPACE_DOCS_INDEXER_VERSION_KEY
);
if (typeof version !== 'number') {
return -1;
} else {
return version;
}
}
setVersion(version = DocsIndexer.INDEXER_VERSION) {
if (this.getVersion() >= version) {
return;
}
return this.workspaceLocalState.set(
WORKSPACE_DOCS_INDEXER_VERSION_KEY,
version
);
}
override dispose(): void {
super.dispose();
this.runner.stop();
this.worker?.dispose();
}
}

View File

@@ -2,17 +2,12 @@ export { DocsSearchService } from './services/docs-search';
import { type Framework } from '@toeverything/infra';
import {
WorkspaceLocalState,
WorkspaceScope,
WorkspaceService,
} from '../workspace';
import { DocsIndexer } from './entities/docs-indexer';
import { DocsService } from '../doc';
import { WorkspaceScope, WorkspaceService } from '../workspace';
import { DocsSearchService } from './services/docs-search';
export function configureDocsSearchModule(framework: Framework) {
framework
.scope(WorkspaceScope)
.service(DocsSearchService, [WorkspaceService])
.entity(DocsIndexer, [WorkspaceService, WorkspaceLocalState]);
.service(DocsSearchService, [WorkspaceService, DocsService]);
}

View File

@@ -1,35 +0,0 @@
import { defineSchema } from '@toeverything/infra';
export const docIndexSchema = defineSchema({
docId: 'String',
title: 'FullText',
// summary of the doc, used for preview
summary: { type: 'String', index: false },
journal: 'String',
});
export type DocIndexSchema = typeof docIndexSchema;
export const blockIndexSchema = defineSchema({
docId: 'String',
blockId: 'String',
content: 'FullText',
flavour: 'String',
blob: 'String',
// reference doc id
// ['xxx','yyy']
refDocId: 'String',
// reference info, used for backlink to specific block
// [{"docId":"xxx","mode":"page","blockIds":["gt5Yfq1maYvgNgpi13rIq"]},{"docId":"yyy","mode":"edgeless","blockIds":["k5prpOlDF-9CzfatmO0W7"]}]
ref: { type: 'String', index: false },
// parent block flavour
parentFlavour: 'String',
// parent block id
parentBlockId: 'String',
// additional info
// { "databaseName": "xxx", "displayMode": "page/edgeless", "noteBlockId": "xxx" }
additional: { type: 'String', index: false },
markdownPreview: { type: 'String', index: false },
});
export type BlockIndexSchema = typeof blockIndexSchema;

View File

@@ -1,31 +1,31 @@
import { toURLSearchParams } from '@affine/core/modules/navigation';
import type { IndexerSyncState } from '@affine/nbstore';
import type { ReferenceParams } from '@blocksuite/affine/model';
import { fromPromise, OnEvent, Service } from '@toeverything/infra';
import { fromPromise, LiveData, Service } from '@toeverything/infra';
import { isEmpty, omit } from 'lodash-es';
import { map, type Observable, of, switchMap } from 'rxjs';
import { z } from 'zod';
import type { DocsService } from '../../doc/services/docs';
import type { WorkspaceService } from '../../workspace';
import { WorkspaceEngineBeforeStart } from '../../workspace';
import { DocsIndexer } from '../entities/docs-indexer';
@OnEvent(WorkspaceEngineBeforeStart, s => s.handleWorkspaceEngineBeforeStart)
export class DocsSearchService extends Service {
readonly indexer = this.framework.createEntity(DocsIndexer);
constructor(private readonly workspaceService: WorkspaceService) {
constructor(
private readonly workspaceService: WorkspaceService,
private readonly docsService: DocsService
) {
super();
}
handleWorkspaceEngineBeforeStart() {
// skip if in shared mode
if (this.workspaceService.workspace.openOptions.isSharedMode) {
return;
}
this.indexer.setupListener();
this.indexer.startCrawling();
get indexer() {
return this.workspaceService.workspace.engine.indexer;
}
readonly indexerState$ = LiveData.from(this.indexer.state$, {
indexing: 0,
errorMessage: null,
} as IndexerSyncState);
search$(query: string): Observable<
{
docId: string;
@@ -35,8 +35,9 @@ export class DocsSearchService extends Service {
blockContent?: string;
}[]
> {
return this.indexer.blockIndex
return this.indexer
.aggregate$(
'block',
{
type: 'boolean',
occur: 'must',
@@ -89,47 +90,40 @@ export class DocsSearchService extends Service {
}
)
.pipe(
switchMap(({ buckets }) => {
return fromPromise(async () => {
const docData = await this.indexer.docIndex.getAll(
buckets.map(bucket => bucket.key)
);
map(({ buckets }) => {
const result = [];
const result = [];
for (const bucket of buckets) {
const firstMatchFlavour = bucket.hits.nodes[0]?.fields.flavour;
if (firstMatchFlavour === 'affine:page') {
// is title match
const blockContent =
bucket.hits.nodes[1]?.highlights.content[0]; // try to get block content
result.push({
docId: bucket.key,
title: bucket.hits.nodes[0].highlights.content[0],
score: bucket.score,
blockContent,
});
} else {
const title =
docData.find(doc => doc.id === bucket.key)?.get('title') ??
'';
const matchedBlockId = bucket.hits.nodes[0]?.fields.blockId;
// is block match
result.push({
docId: bucket.key,
title: typeof title === 'string' ? title : title[0],
blockId:
typeof matchedBlockId === 'string'
? matchedBlockId
: matchedBlockId[0],
score: bucket.score,
blockContent: bucket.hits.nodes[0]?.highlights.content[0],
});
}
for (const bucket of buckets) {
const firstMatchFlavour = bucket.hits.nodes[0]?.fields.flavour;
if (firstMatchFlavour === 'affine:page') {
// is title match
const blockContent = bucket.hits.nodes[1]?.highlights.content[0]; // try to get block content
result.push({
docId: bucket.key,
title: bucket.hits.nodes[0].highlights.content[0],
score: bucket.score,
blockContent,
});
} else {
const title =
this.docsService.list.doc$(bucket.key).value?.title$.value ??
'';
const matchedBlockId = bucket.hits.nodes[0]?.fields.blockId;
// is block match
result.push({
docId: bucket.key,
title: title,
blockId:
typeof matchedBlockId === 'string'
? matchedBlockId
: matchedBlockId[0],
score: bucket.score,
blockContent: bucket.hits.nodes[0]?.highlights.content[0],
});
}
}
return result;
});
return result;
})
);
}
@@ -140,8 +134,9 @@ export class DocsSearchService extends Service {
return of([]);
}
return this.indexer.blockIndex
return this.indexer
.search$(
'block',
{
type: 'boolean',
occur: 'must',
@@ -185,18 +180,12 @@ export class DocsSearchService extends Service {
).values()
);
const docData = await this.indexer.docIndex.getAll(
Array.from(new Set(refs.map(ref => ref.docId)))
);
return refs
.flatMap(ref => {
const doc = docData.find(doc => doc.id === ref.docId);
const doc = this.docsService.list.doc$(ref.docId).value;
if (!doc) return null;
const titles = doc.get('title');
const title =
(Array.isArray(titles) ? titles[0] : titles) ?? '';
const title = doc.title$.value;
const params = omit(ref, ['docId']);
return {
@@ -214,8 +203,9 @@ export class DocsSearchService extends Service {
}
watchRefsTo(docId: string) {
return this.indexer.blockIndex
return this.indexer
.aggregate$(
'block',
{
type: 'boolean',
occur: 'must',
@@ -262,13 +252,10 @@ export class DocsSearchService extends Service {
.pipe(
switchMap(({ buckets }) => {
return fromPromise(async () => {
const docData = await this.indexer.docIndex.getAll(
buckets.map(bucket => bucket.key)
);
return buckets.flatMap(bucket => {
const title =
docData.find(doc => doc.id === bucket.key)?.get('title') ?? '';
this.docsService.list.doc$(bucket.key).value?.title$.value ??
'';
return bucket.hits.nodes.map(node => {
const blockId = node.fields.blockId ?? '';
@@ -297,7 +284,7 @@ export class DocsSearchService extends Service {
return {
docId: bucket.key,
blockId: typeof blockId === 'string' ? blockId : blockId[0],
title: typeof title === 'string' ? title : title[0],
title: title,
markdownPreview:
typeof markdownPreview === 'string'
? markdownPreview
@@ -330,8 +317,9 @@ export class DocsSearchService extends Service {
const DatabaseAdditionalSchema = z.object({
databaseName: z.string().optional(),
});
return this.indexer.blockIndex
return this.indexer
.search$(
'block',
{
type: 'boolean',
occur: 'must',
@@ -397,8 +385,9 @@ export class DocsSearchService extends Service {
}
watchDocSummary(docId: string) {
return this.indexer.docIndex
return this.indexer
.search$(
'doc',
{
type: 'match',
field: 'docId',
@@ -422,8 +411,4 @@ export class DocsSearchService extends Service {
})
);
}
override dispose(): void {
this.indexer.dispose();
}
}

View File

@@ -1,105 +0,0 @@
import { DebugLogger } from '@affine/debug';
import { MANUALLY_STOP, throwIfAborted } from '@toeverything/infra';
import type {
WorkerIngoingMessage,
WorkerInput,
WorkerOutgoingMessage,
WorkerOutput,
} from './types';
const logger = new DebugLogger('affine:indexer-worker');
export async function createWorker(abort: AbortSignal) {
let worker: Worker | null = null;
while (throwIfAborted(abort)) {
try {
worker = await new Promise<Worker>((resolve, reject) => {
// @TODO(@forehalo): need to make a general worker
const worker = new Worker(
/* webpackChunkName: "worker" */ new URL(
'./in-worker.ts',
import.meta.url
)
);
worker.addEventListener('error', reject);
worker.addEventListener('message', event => {
if (event.data.type === 'init') {
resolve(worker);
}
});
worker.postMessage({ type: 'init', msgId: 0 } as WorkerIngoingMessage);
setTimeout(() => {
reject('timeout');
}, 1000 * 30 /* 30 sec */);
});
} catch (err) {
logger.debug(
`Indexer worker init failed, ${err}, will retry in 5 seconds.`
);
await new Promise(resolve => setTimeout(resolve, 5000));
}
if (worker) {
break;
}
}
if (!worker) {
// never reach here
throw new Error('Worker is not created');
}
const terminateAbort = new AbortController();
let msgId = 1;
return {
run: async (input: WorkerInput) => {
const dispose: (() => void)[] = [];
return new Promise<WorkerOutput>((resolve, reject) => {
const currentMsgId = msgId++;
const msgHandler = (event: MessageEvent<WorkerOutgoingMessage>) => {
if (event.data.msgId === currentMsgId) {
if (event.data.type === 'done') {
resolve(event.data.output);
} else if (event.data.type === 'failed') {
reject(new Error(event.data.error));
} else {
reject(new Error('Unknown message type'));
}
}
};
const abortHandler = (reason: any) => {
reject(reason);
};
worker.addEventListener('message', msgHandler);
dispose.push(() => {
worker?.removeEventListener('message', msgHandler);
});
terminateAbort.signal.addEventListener('abort', abortHandler);
dispose.push(() => {
terminateAbort.signal.removeEventListener('abort', abortHandler);
});
worker.postMessage({
type: 'run',
msgId: currentMsgId,
input,
} as WorkerIngoingMessage);
}).finally(() => {
for (const d of dispose) {
d();
}
});
},
dispose: () => {
terminateAbort.abort(MANUALLY_STOP);
worker.terminate();
},
};
}
export type IndexerWorker = Awaited<ReturnType<typeof createWorker>>;

View File

@@ -1,53 +0,0 @@
import type { Document } from '@toeverything/infra';
import type { BlockIndexSchema, DocIndexSchema } from '../schema';
export type WorkerIngoingMessage = (
| {
type: 'init';
}
| {
type: 'run';
input: WorkerInput;
}
) & { msgId: number };
export type WorkerOutgoingMessage = (
| {
type: 'init';
}
| {
type: 'done';
output: WorkerOutput;
}
| {
type: 'failed';
error: string;
}
) & { msgId: number };
export type WorkerInput =
| {
type: 'rootDoc';
rootDocBuffer: Uint8Array;
rootDocId: string;
allIndexedDocs: string[];
reindexAll?: boolean;
}
| {
type: 'doc';
docId: string;
rootDocId: string;
rootDocBuffer: Uint8Array;
docBuffer: Uint8Array;
};
export interface WorkerOutput {
reindexDoc?: { docId: string }[];
addedDoc?: {
id: string;
blocks: Document<BlockIndexSchema>[];
doc: Document<DocIndexSchema>;
}[];
deletedDoc?: string[];
}

View File

@@ -19,6 +19,7 @@ import { useI18n } from '@affine/i18n';
import { track } from '@affine/track';
import {
LiveData,
MANUALLY_STOP,
useLiveData,
useService,
useServices,
@@ -98,19 +99,23 @@ export const ExplorerDocNode = ({
);
const searching = children === null;
const indexerLoading = useLiveData(
docsSearchService.indexer.status$.map(
v => v.remaining === undefined || v.remaining > 0
)
);
const [referencesLoading, setReferencesLoading] = useState(true);
useLayoutEffect(() => {
setReferencesLoading(
prev =>
prev &&
indexerLoading /* after loading becomes false, it never becomes true */
);
}, [indexerLoading]);
const abortController = new AbortController();
docsSearchService.indexer
.waitForDocCompletedWithPriority(docId, 100, abortController.signal)
.then(() => {
setReferencesLoading(false);
})
.catch(err => {
if (err !== MANUALLY_STOP) {
console.error(err);
}
});
return () => {
abortController.abort(MANUALLY_STOP);
};
}, [docId, docsSearchService]);
const dndData = useMemo(() => {
return {

View File

@@ -33,10 +33,11 @@ export class DocsQuickSearchSession
super();
}
private readonly isIndexerLoading$ =
this.docsSearchService.indexer.status$.map(({ remaining }) => {
return remaining === undefined || remaining > 0;
});
private readonly isIndexerLoading$ = this.docsSearchService.indexerState$.map(
({ completed }) => {
return !completed;
}
);
private readonly isQueryLoading$ = new LiveData(false);

View File

@@ -17,7 +17,7 @@ import { cssVarV2 } from '@toeverything/theme/v2';
import Fuse, { type FuseResultMatch } from 'fuse.js';
import { html } from 'lit';
import { unsafeHTML } from 'lit/directives/unsafe-html.js';
import { map, takeWhile } from 'rxjs';
import { map } from 'rxjs';
import type { CollectionService } from '../../collection';
import type { DocDisplayMetaService } from '../../doc-display-meta';
@@ -119,15 +119,6 @@ export class SearchMenuService extends Service {
[]
);
const { signal: isIndexerLoading, cleanup: cleanupIndexerLoading } =
createSignalFromObservable(
this.docsSearch.indexer.status$.pipe(
map(status => status.remaining !== undefined && status.remaining > 0),
takeWhile(isLoading => isLoading, true)
),
false
);
const overflowText = computed(() => {
const overflowCount = docsSignal.value.length - MAX_DOCS;
return I18n.t('com.affine.editor.at-menu.more-docs-hint', {
@@ -137,14 +128,12 @@ export class SearchMenuService extends Service {
abortSignal.addEventListener('abort', () => {
cleanupDocs();
cleanupIndexerLoading();
});
return {
name: I18n.t('com.affine.editor.at-menu.link-to-doc', {
query,
}),
loading: isIndexerLoading,
items: docsSignal,
maxDisplay: MAX_DOCS,
overflowText,
@@ -153,8 +142,9 @@ export class SearchMenuService extends Service {
// only search docs by title, excluding blocks
private searchDocs$(query: string) {
return this.docsSearch.indexer.docIndex
return this.docsSearch.indexer
.aggregate$(
'doc',
{
type: 'boolean',
occur: 'must',

View File

@@ -460,6 +460,22 @@ class CloudWorkspaceFlavourProvider implements WorkspaceFlavourProvider {
id: `${this.flavour}:${workspaceId}`,
},
},
indexer: {
name: 'IndexedDBIndexerStorage',
opts: {
flavour: this.flavour,
type: 'workspace',
id: workspaceId,
},
},
indexerSync: {
name: 'IndexedDBIndexerSyncStorage',
opts: {
flavour: this.flavour,
type: 'workspace',
id: workspaceId,
},
},
},
remotes: {
[`cloud:${this.flavour}`]: {

View File

@@ -349,6 +349,22 @@ class LocalWorkspaceFlavourProvider implements WorkspaceFlavourProvider {
id: workspaceId,
},
},
indexer: {
name: 'IndexedDBIndexerStorage',
opts: {
flavour: this.flavour,
type: 'workspace',
id: workspaceId,
},
},
indexerSync: {
name: 'IndexedDBIndexerSyncStorage',
opts: {
flavour: this.flavour,
type: 'workspace',
id: workspaceId,
},
},
},
remotes: {
v1: {

View File

@@ -36,6 +36,13 @@ export class WorkspaceEngine extends Entity<{
return this.client.blobFrontend;
}
get indexer() {
if (!this.client) {
throw new Error('Engine is not initialized');
}
return this.client.indexerFrontend;
}
get awareness() {
if (!this.client) {
throw new Error('Engine is not initialized');