feat(infra): collect more data to indexer (#8528)

This commit is contained in:
EYHN
2024-10-19 20:22:26 +08:00
committed by GitHub
parent 8f92be926b
commit 01c3a3b4c0
11 changed files with 341 additions and 169 deletions

View File

@@ -40,7 +40,10 @@ export class Document<S extends Schema = any> {
} }
} else { } else {
for (const key in map) { for (const key in map) {
doc.insert(key, map[key] as string | string[]); if (map[key] === undefined) {
continue;
}
doc.insert(key, map[key]);
} }
} }
return doc; return doc;

View File

@@ -1,3 +1,4 @@
import { DebugLogger } from '@affine/debug';
import { import {
type DBSchema, type DBSchema,
type IDBPDatabase, type IDBPDatabase,
@@ -25,6 +26,8 @@ import {
} from './inverted-index'; } from './inverted-index';
import { Match } from './match'; import { Match } from './match';
const logger = new DebugLogger('indexeddb');
export interface IndexDB extends DBSchema { export interface IndexDB extends DBSchema {
kvMetadata: { kvMetadata: {
key: string; key: string;
@@ -75,14 +78,19 @@ export class DataStruct {
constructor( constructor(
readonly databaseName: string, readonly databaseName: string,
schema: Schema readonly schema: Schema
) { ) {
for (const [key, type] of Object.entries(schema)) { for (const [key, type] of Object.entries(schema)) {
if (type === 'String') { const typeInfo = typeof type === 'string' ? { type } : type;
if (typeInfo.index === false) {
// If index is false, we don't need to create an inverted index for this field.
continue;
}
if (typeInfo.type === 'String') {
this.invertedIndex.set(key, new StringInvertedIndex(key)); this.invertedIndex.set(key, new StringInvertedIndex(key));
} else if (type === 'Integer') { } else if (typeInfo.type === 'Integer') {
this.invertedIndex.set(key, new IntegerInvertedIndex(key)); this.invertedIndex.set(key, new IntegerInvertedIndex(key));
} else if (type === 'FullText') { } else if (typeInfo.type === 'FullText') {
this.invertedIndex.set(key, new FullTextInvertedIndex(key)); this.invertedIndex.set(key, new FullTextInvertedIndex(key));
} else if (type === 'Boolean') { } else if (type === 'Boolean') {
this.invertedIndex.set(key, new BooleanInvertedIndex(key)); this.invertedIndex.set(key, new BooleanInvertedIndex(key));
@@ -102,17 +110,29 @@ export class DataStruct {
throw new Error('Document already exists'); throw new Error('Document already exists');
} }
const dataMap = new Map();
for (const [key, values] of document.fields) {
const type = this.schema[key as string];
if (!type) {
return;
}
const typeInfo = typeof type === 'string' ? { type } : type;
if (typeInfo.store !== false) {
// If store is false, the field will not be stored
dataMap.set(key, values);
}
}
const nid = await trx.objectStore('records').add({ const nid = await trx.objectStore('records').add({
id: document.id, id: document.id,
data: new Map(document.fields as Map<string, string[]>), data: dataMap,
}); });
for (const [key, values] of document.fields) { for (const [key, values] of document.fields) {
const iidx = this.invertedIndex.get(key as string); const iidx = this.invertedIndex.get(key as string);
if (!iidx) { if (!iidx) {
throw new Error( return;
`Inverted index '${key.toString()}' not found, document not match schema`
);
} }
await iidx.insert(trx, nid, values); await iidx.insert(trx, nid, values);
} }
@@ -164,7 +184,7 @@ export class DataStruct {
if (query.type === 'match') { if (query.type === 'match') {
const iidx = this.invertedIndex.get(query.field as string); const iidx = this.invertedIndex.get(query.field as string);
if (!iidx) { if (!iidx) {
throw new Error(`Field '${query.field as string}' not found`); return new Match();
} }
return await iidx.match(trx, query.match); return await iidx.match(trx, query.match);
} else if (query.type === 'boolean') { } else if (query.type === 'boolean') {
@@ -187,7 +207,7 @@ export class DataStruct {
} else if (query.type === 'exists') { } else if (query.type === 'exists') {
const iidx = this.invertedIndex.get(query.field as string); const iidx = this.invertedIndex.get(query.field as string);
if (!iidx) { if (!iidx) {
throw new Error(`Field '${query.field as string}' not found`); return new Match();
} }
return await iidx.all(trx); return await iidx.all(trx);
} }
@@ -217,31 +237,41 @@ export class DataStruct {
query: Query<any>, query: Query<any>,
options: SearchOptions<any> options: SearchOptions<any>
): Promise<SearchResult<any, any>> { ): Promise<SearchResult<any, any>> {
const pagination = { const startTime = performance.now();
skip: options.pagination?.skip ?? 0, try {
limit: options.pagination?.limit ?? 100, const pagination = {
}; skip: options.pagination?.skip ?? 0,
limit: options.pagination?.limit ?? 100,
};
const match = await this.query(trx, query); const match = await this.query(trx, query);
const nids = match const nids = match
.toArray() .toArray()
.slice(pagination.skip, pagination.skip + pagination.limit); .slice(pagination.skip, pagination.skip + pagination.limit);
const nodes = []; const nodes = [];
for (const nid of nids) { for (const nid of nids) {
nodes.push(await this.resultNode(trx, match, nid, options)); nodes.push(await this.resultNode(trx, match, nid, options));
}
return {
pagination: {
count: match.size(),
hasMore: match.size() > pagination.limit + pagination.skip,
limit: pagination.limit,
skip: pagination.skip,
},
nodes: nodes,
};
} finally {
logger.debug(
`[indexer ${this.databaseName}] search`,
performance.now() - startTime,
'ms',
query
);
} }
return {
pagination: {
count: match.size(),
hasMore: match.size() > pagination.limit + pagination.skip,
limit: pagination.limit,
skip: pagination.skip,
},
nodes: nodes,
};
} }
async aggregate( async aggregate(
@@ -250,95 +280,105 @@ export class DataStruct {
field: string, field: string,
options: AggregateOptions<any> options: AggregateOptions<any>
): Promise<AggregateResult<any, any>> { ): Promise<AggregateResult<any, any>> {
const pagination = { const startTime = performance.now();
skip: options.pagination?.skip ?? 0, try {
limit: options.pagination?.limit ?? 100, const pagination = {
}; skip: options.pagination?.skip ?? 0,
limit: options.pagination?.limit ?? 100,
};
const hitPagination = options.hits const hitPagination = options.hits
? { ? {
skip: options.hits.pagination?.skip ?? 0, skip: options.hits.pagination?.skip ?? 0,
limit: options.hits.pagination?.limit ?? 3, limit: options.hits.pagination?.limit ?? 3,
} }
: { : {
skip: 0, skip: 0,
limit: 0, limit: 0,
}; };
const match = await this.query(trx, query); const match = await this.query(trx, query);
const nids = match.toArray(); const nids = match.toArray();
const buckets: { const buckets: {
key: string; key: string;
nids: number[]; nids: number[];
hits: SearchResult<any, any>['nodes']; hits: SearchResult<any, any>['nodes'];
}[] = []; }[] = [];
for (const nid of nids) { for (const nid of nids) {
const values = (await trx.objectStore('records').get(nid))?.data.get( const values = (await trx.objectStore('records').get(nid))?.data.get(
field field
); );
for (const value of values ?? []) { for (const value of values ?? []) {
let bucket; let bucket;
let bucketIndex = buckets.findIndex(b => b.key === value); let bucketIndex = buckets.findIndex(b => b.key === value);
if (bucketIndex === -1) { if (bucketIndex === -1) {
bucket = { key: value, nids: [], hits: [] }; bucket = { key: value, nids: [], hits: [] };
buckets.push(bucket); buckets.push(bucket);
bucketIndex = buckets.length - 1; bucketIndex = buckets.length - 1;
} else { } else {
bucket = buckets[bucketIndex]; bucket = buckets[bucketIndex];
} }
if (
bucketIndex >= pagination.skip &&
bucketIndex < pagination.skip + pagination.limit
) {
bucket.nids.push(nid);
if ( if (
bucket.nids.length - 1 >= hitPagination.skip && bucketIndex >= pagination.skip &&
bucket.nids.length - 1 < hitPagination.skip + hitPagination.limit bucketIndex < pagination.skip + pagination.limit
) { ) {
bucket.hits.push( bucket.nids.push(nid);
await this.resultNode(trx, match, nid, options.hits ?? {}) if (
); bucket.nids.length - 1 >= hitPagination.skip &&
bucket.nids.length - 1 < hitPagination.skip + hitPagination.limit
) {
bucket.hits.push(
await this.resultNode(trx, match, nid, options.hits ?? {})
);
}
} }
} }
} }
return {
buckets: buckets
.slice(pagination.skip, pagination.skip + pagination.limit)
.map(bucket => {
const result = {
key: bucket.key,
score: match.getScore(bucket.nids[0]),
count: bucket.nids.length,
} as AggregateResult<any, any>['buckets'][number];
if (options.hits) {
(result as any).hits = {
pagination: {
count: bucket.nids.length,
hasMore:
bucket.nids.length >
hitPagination.limit + hitPagination.skip,
limit: hitPagination.limit,
skip: hitPagination.skip,
},
nodes: bucket.hits,
} as SearchResult<any, any>;
}
return result;
}),
pagination: {
count: buckets.length,
hasMore: buckets.length > pagination.limit + pagination.skip,
limit: pagination.limit,
skip: pagination.skip,
},
};
} finally {
logger.debug(
`[indexer ${this.databaseName}] aggregate`,
performance.now() - startTime,
'ms'
);
} }
return {
buckets: buckets
.slice(pagination.skip, pagination.skip + pagination.limit)
.map(bucket => {
const result = {
key: bucket.key,
score: match.getScore(bucket.nids[0]),
count: bucket.nids.length,
} as AggregateResult<any, any>['buckets'][number];
if (options.hits) {
(result as any).hits = {
pagination: {
count: bucket.nids.length,
hasMore:
bucket.nids.length > hitPagination.limit + hitPagination.skip,
limit: hitPagination.limit,
skip: hitPagination.skip,
},
nodes: bucket.hits,
} as SearchResult<any, any>;
}
return result;
}),
pagination: {
count: buckets.length,
hasMore: buckets.length > pagination.limit + pagination.skip,
limit: pagination.limit,
skip: pagination.skip,
},
};
} }
async getAll( async getAll(

View File

@@ -21,7 +21,11 @@ export interface InvertedIndex {
} }
export class StringInvertedIndex implements InvertedIndex { export class StringInvertedIndex implements InvertedIndex {
constructor(readonly fieldKey: string) {} constructor(
readonly fieldKey: string,
readonly index: boolean = true,
readonly store: boolean = true
) {}
async match(trx: DataStructROTransaction, term: string): Promise<Match> { async match(trx: DataStructROTransaction, term: string): Promise<Match> {
const objs = await trx const objs = await trx
@@ -69,7 +73,11 @@ export class StringInvertedIndex implements InvertedIndex {
} }
export class IntegerInvertedIndex implements InvertedIndex { export class IntegerInvertedIndex implements InvertedIndex {
constructor(readonly fieldKey: string) {} constructor(
readonly fieldKey: string,
readonly index: boolean = true,
readonly store: boolean = true
) {}
async match(trx: DataStructROTransaction, term: string): Promise<Match> { async match(trx: DataStructROTransaction, term: string): Promise<Match> {
const objs = await trx const objs = await trx
@@ -118,7 +126,11 @@ export class IntegerInvertedIndex implements InvertedIndex {
} }
export class BooleanInvertedIndex implements InvertedIndex { export class BooleanInvertedIndex implements InvertedIndex {
constructor(readonly fieldKey: string) {} constructor(
readonly fieldKey: string,
readonly index: boolean = true,
readonly store: boolean = true
) {}
// eslint-disable-next-line sonarjs/no-identical-functions // eslint-disable-next-line sonarjs/no-identical-functions
async all(trx: DataStructROTransaction): Promise<Match> { async all(trx: DataStructROTransaction): Promise<Match> {
@@ -172,7 +184,11 @@ export class BooleanInvertedIndex implements InvertedIndex {
} }
export class FullTextInvertedIndex implements InvertedIndex { export class FullTextInvertedIndex implements InvertedIndex {
constructor(readonly fieldKey: string) {} constructor(
readonly fieldKey: string,
readonly index: boolean = true,
readonly store: boolean = true
) {}
async match(trx: DataStructROTransaction, term: string): Promise<Match> { async match(trx: DataStructROTransaction, term: string): Promise<Match> {
const queryTokens = new GeneralTokenizer().tokenize(term); const queryTokens = new GeneralTokenizer().tokenize(term);

View File

@@ -31,13 +31,15 @@ export class DataStruct {
constructor(schema: Schema) { constructor(schema: Schema) {
for (const [key, type] of Object.entries(schema)) { for (const [key, type] of Object.entries(schema)) {
if (type === 'String') { const typeInfo = typeof type === 'string' ? { type } : type;
if (typeInfo.type === 'String') {
this.invertedIndex.set(key, new StringInvertedIndex(key)); this.invertedIndex.set(key, new StringInvertedIndex(key));
} else if (type === 'Integer') { } else if (typeInfo.type === 'Integer') {
this.invertedIndex.set(key, new IntegerInvertedIndex(key)); this.invertedIndex.set(key, new IntegerInvertedIndex(key));
} else if (type === 'FullText') { } else if (typeInfo.type === 'FullText') {
this.invertedIndex.set(key, new FullTextInvertedIndex(key)); this.invertedIndex.set(key, new FullTextInvertedIndex(key));
} else if (type === 'Boolean') { } else if (typeInfo.type === 'Boolean') {
this.invertedIndex.set(key, new BooleanInvertedIndex(key)); this.invertedIndex.set(key, new BooleanInvertedIndex(key));
} else { } else {
throw new Error(`Field type '${type}' not supported`); throw new Error(`Field type '${type}' not supported`);

View File

@@ -1,6 +1,24 @@
import type { FieldType } from './field-type'; import type { FieldType } from './field-type';
export type Schema = Record<string, FieldType>; export type Schema = Record<
string,
| FieldType
| {
type: FieldType;
/**
* If false, the field will not be indexed, and thus not searchable.
*
* default: true
*/
index?: boolean;
/**
* If false, the field will not be stored, and not included in the search result.
*
* default: true
*/
store?: boolean;
}
>;
export function defineSchema<T extends Schema>(schema: T): T { export function defineSchema<T extends Schema>(schema: T): T {
return schema; return schema;

View File

@@ -1,6 +1,10 @@
import { DocLinksService } from '@affine/core/modules/doc-link'; import {
type Backlink,
DocLinksService,
type Link,
} from '@affine/core/modules/doc-link';
import { useI18n } from '@affine/i18n'; import { useI18n } from '@affine/i18n';
import { useLiveData, useServices } from '@toeverything/infra'; import { LiveData, useLiveData, useServices } from '@toeverything/infra';
import { useCallback, useState } from 'react'; import { useCallback, useState } from 'react';
import { AffinePageReference } from '../../affine/reference-link'; import { AffinePageReference } from '../../affine/reference-link';
@@ -13,8 +17,12 @@ export const BiDirectionalLinkPanel = () => {
}); });
const t = useI18n(); const t = useI18n();
const links = useLiveData(docLinksService.links.links$); const links = useLiveData(
const backlinks = useLiveData(docLinksService.backlinks.backlinks$); show ? docLinksService.links.links$ : new LiveData([] as Link[])
);
const backlinks = useLiveData(
show ? docLinksService.backlinks.backlinks$ : new LiveData([] as Backlink[])
);
const handleClickShow = useCallback(() => { const handleClickShow = useCallback(() => {
setShow(!show); setShow(!show);
}, [show]); }, [show]);

View File

@@ -103,7 +103,10 @@ export const DocPropertiesTableHeader = ({
EditorSettingService, EditorSettingService,
}); });
const docBacklinks = docLinksService.backlinks; const docBacklinks = docLinksService.backlinks;
const backlinks = useLiveData(docBacklinks.backlinks$); const backlinks = useMemo(
() => docBacklinks.backlinks$.value,
[docBacklinks]
);
const displayDocInfo = useLiveData( const displayDocInfo = useLiveData(
editorSettingService.editorSetting.settings$.selector(s => s.displayDocInfo) editorSettingService.editorSetting.settings$.selector(s => s.displayDocInfo)

View File

@@ -36,7 +36,7 @@ export class DocsIndexer extends Entity {
/** /**
* increase this number to re-index all docs * increase this number to re-index all docs
*/ */
static INDEXER_VERSION = 2; static INDEXER_VERSION = 5;
private readonly jobQueue: JobQueue<IndexerJobPayload> = private readonly jobQueue: JobQueue<IndexerJobPayload> =
new IndexedDBJobQueue<IndexerJobPayload>( new IndexedDBJobQueue<IndexerJobPayload>(

View File

@@ -2,6 +2,9 @@ import { defineSchema } from '@toeverything/infra';
export const docIndexSchema = defineSchema({ export const docIndexSchema = defineSchema({
title: 'FullText', title: 'FullText',
// summary of the doc, used for preview
summary: { type: 'String', index: false },
journal: 'String',
}); });
export type DocIndexSchema = typeof docIndexSchema; export type DocIndexSchema = typeof docIndexSchema;
@@ -15,9 +18,16 @@ export const blockIndexSchema = defineSchema({
// reference doc id // reference doc id
// ['xxx','yyy'] // ['xxx','yyy']
refDocId: 'String', refDocId: 'String',
// reference info // reference info, used for backlink to specific block
// [{"docId":"xxx","mode":"page","blockIds":["gt5Yfq1maYvgNgpi13rIq"]},{"docId":"yyy","mode":"edgeless","blockIds":["k5prpOlDF-9CzfatmO0W7"]}] // [{"docId":"xxx","mode":"page","blockIds":["gt5Yfq1maYvgNgpi13rIq"]},{"docId":"yyy","mode":"edgeless","blockIds":["k5prpOlDF-9CzfatmO0W7"]}]
ref: 'String', ref: { type: 'String', index: false },
// parent block flavour
parentFlavour: 'String',
// parent block id
parentBlockId: 'String',
// additional info
// { "databaseName": "xxx" }
additional: { type: 'String', index: false },
}); });
export type BlockIndexSchema = typeof blockIndexSchema; export type BlockIndexSchema = typeof blockIndexSchema;

View File

@@ -9,10 +9,10 @@ import {
Array as YArray, Array as YArray,
Doc as YDoc, Doc as YDoc,
Map as YMap, Map as YMap,
type Text as YText, Text as YText,
} from 'yjs'; } from 'yjs';
import type { BlockIndexSchema, docIndexSchema } from '../schema'; import type { BlockIndexSchema, DocIndexSchema } from '../schema';
import type { import type {
WorkerIngoingMessage, WorkerIngoingMessage,
WorkerInput, WorkerInput,
@@ -68,12 +68,6 @@ async function crawlingDocData({
return {}; return {};
} }
const ydoc = new YDoc();
if (!isEmptyUpdate(docBuffer)) {
applyUpdate(ydoc, docBuffer);
}
let docExists: boolean | null = null; let docExists: boolean | null = null;
( (
@@ -89,23 +83,68 @@ async function crawlingDocData({
deletedDoc: [docId], deletedDoc: [docId],
}; };
} else { } else {
const ydoc = new YDoc();
let docTitle = '';
let summaryLenNeeded = 1000;
let summary = '';
const blockDocuments: Document<BlockIndexSchema>[] = [];
if (!isEmptyUpdate(docBuffer)) {
applyUpdate(ydoc, docBuffer);
}
const blocks = ydoc.getMap<any>('blocks'); const blocks = ydoc.getMap<any>('blocks');
if (blocks.size === 0) { if (blocks.size === 0) {
return {}; return { deletedDoc: [docId] };
} }
let docTitle = ''; let rootBlockId: string | null = null;
const blockDocuments: Document<BlockIndexSchema>[] = [];
for (const block of blocks.values()) { for (const block of blocks.values()) {
const flavour = block.get('sys:flavour')?.toString(); const flavour = block.get('sys:flavour')?.toString();
const blockId = block.get('sys:id')?.toString(); const blockId = block.get('sys:id')?.toString();
if (flavour === 'affine:page' && blockId) {
if (!flavour || !blockId) { rootBlockId = blockId;
continue;
} }
}
if (!rootBlockId) {
return { deletedDoc: [docId] };
}
const queue: { parent?: string; id: string }[] = [{ id: rootBlockId }];
const visited = new Set<string>(); // avoid loop
const pushChildren = (id: string, block: YMap<any>) => {
const children = block.get('sys:children');
if (children instanceof YArray && children.length) {
for (let i = children.length - 1; i >= 0; i--) {
const childId = children.get(i);
if (childId && !visited.has(childId)) {
queue.push({ parent: id, id: childId });
visited.add(childId);
}
}
}
};
while (queue.length) {
const next = queue.pop();
if (!next) {
break;
}
const { parent: parentBlockId, id: blockId } = next;
const block = blockId ? blocks.get(blockId) : null;
const parentBlock = parentBlockId ? blocks.get(parentBlockId) : null;
if (!block) {
break;
}
const flavour = block.get('sys:flavour')?.toString();
const parentFlavour = parentBlock?.get('sys:flavour')?.toString();
pushChildren(blockId, block);
if (flavour === 'affine:page') { if (flavour === 'affine:page') {
docTitle = block.get('prop:title').toString(); docTitle = block.get('prop:title').toString();
@@ -150,6 +189,11 @@ async function crawlingDocData({
.filter(ref => !!ref) .filter(ref => !!ref)
); );
const databaseName =
flavour === 'affine:paragraph' && parentFlavour === 'affine:database' // if block is a database row
? parentBlock?.get('prop:title')?.toString()
: undefined;
blockDocuments.push( blockDocuments.push(
Document.from<BlockIndexSchema>(`${docId}:${blockId}`, { Document.from<BlockIndexSchema>(`${docId}:${blockId}`, {
docId, docId,
@@ -164,8 +208,18 @@ async function crawlingDocData({
}, },
{ refDocId: [], ref: [] } { refDocId: [], ref: [] }
), ),
parentFlavour,
parentBlockId,
additional: databaseName
? JSON.stringify({ databaseName })
: undefined,
}) })
); );
if (summaryLenNeeded > 0) {
summary += text.toString();
summaryLenNeeded -= text.length;
}
} }
if ( if (
@@ -183,6 +237,8 @@ async function crawlingDocData({
blockId, blockId,
refDocId: [pageId], refDocId: [pageId],
ref: [JSON.stringify({ docId: pageId, ...params })], ref: [JSON.stringify({ docId: pageId, ...params })],
parentFlavour,
parentBlockId,
}) })
); );
} }
@@ -197,6 +253,8 @@ async function crawlingDocData({
flavour, flavour,
blockId, blockId,
blob: [blobId], blob: [blobId],
parentFlavour,
parentBlockId,
}) })
); );
} }
@@ -237,6 +295,8 @@ async function crawlingDocData({
flavour, flavour,
blockId, blockId,
content: texts, content: texts,
parentFlavour,
parentBlockId,
}) })
); );
} }
@@ -244,32 +304,35 @@ async function crawlingDocData({
if (flavour === 'affine:database') { if (flavour === 'affine:database') {
const texts = []; const texts = [];
const columnsObj = block.get('prop:columns'); const columnsObj = block.get('prop:columns');
if (!(columnsObj instanceof YArray)) { const databaseTitle = block.get('prop:title');
continue; if (databaseTitle instanceof YText) {
texts.push(databaseTitle.toString());
} }
for (const column of columnsObj) { if (columnsObj instanceof YArray) {
if (!(column instanceof YMap)) { for (const column of columnsObj) {
continue; if (!(column instanceof YMap)) {
}
if (typeof column.get('name') === 'string') {
texts.push(column.get('name'));
}
const data = column.get('data');
if (!(data instanceof YMap)) {
continue;
}
const options = data.get('options');
if (!(options instanceof YArray)) {
continue;
}
for (const option of options) {
if (!(option instanceof YMap)) {
continue; continue;
} }
const value = option.get('value'); if (typeof column.get('name') === 'string') {
if (typeof value === 'string') { texts.push(column.get('name'));
texts.push(value); }
const data = column.get('data');
if (!(data instanceof YMap)) {
continue;
}
const options = data.get('options');
if (!(options instanceof YArray)) {
continue;
}
for (const option of options) {
if (!(option instanceof YMap)) {
continue;
}
const value = option.get('value');
if (typeof value === 'string') {
texts.push(value);
}
} }
} }
} }
@@ -289,8 +352,9 @@ async function crawlingDocData({
addedDoc: [ addedDoc: [
{ {
id: docId, id: docId,
doc: Document.from<typeof docIndexSchema>(docId, { doc: Document.from<DocIndexSchema>(docId, {
title: docTitle, title: docTitle,
summary,
}), }),
blocks: blockDocuments, blocks: blockDocuments,
}, },

View File

@@ -22,6 +22,7 @@ import {
useServices, useServices,
} from '@toeverything/infra'; } from '@toeverything/infra';
import { useCallback, useLayoutEffect, useMemo, useState } from 'react'; import { useCallback, useLayoutEffect, useMemo, useState } from 'react';
import { NEVER } from 'rxjs';
import { ExplorerTreeNode, type ExplorerTreeNodeDropEffect } from '../../tree'; import { ExplorerTreeNode, type ExplorerTreeNodeDropEffect } from '../../tree';
import type { GenericExplorerNode } from '../types'; import type { GenericExplorerNode } from '../types';
@@ -82,10 +83,15 @@ export const ExplorerDocNode = ({
const children = useLiveData( const children = useLiveData(
useMemo( useMemo(
() => LiveData.from(docsSearchService.watchRefsFrom(docId), null), () =>
[docsSearchService, docId] LiveData.from(
!collapsed ? docsSearchService.watchRefsFrom(docId) : NEVER,
null
),
[docsSearchService, docId, collapsed]
) )
); );
const searching = children === null;
const indexerLoading = useLiveData( const indexerLoading = useLiveData(
docsSearchService.indexer.status$.map( docsSearchService.indexer.status$.map(
@@ -231,7 +237,9 @@ export const ExplorerDocNode = ({
} }
reorderable={reorderable} reorderable={reorderable}
onRename={handleRename} onRename={handleRename}
childrenPlaceholder={<Empty onDrop={handleDropOnPlaceholder} />} childrenPlaceholder={
searching ? null : <Empty onDrop={handleDropOnPlaceholder} />
}
operations={finalOperations} operations={finalOperations}
dropEffect={handleDropEffectOnDoc} dropEffect={handleDropEffectOnDoc}
data-testid={`explorer-doc-${docId}`} data-testid={`explorer-doc-${docId}`}