feat(native): native reader for indexer (#14055)

This commit is contained in:
DarkSky
2025-12-07 16:22:11 +08:00
committed by GitHub
parent 69cdeedc4e
commit cf4e37c584
28 changed files with 1376 additions and 569 deletions

9
Cargo.lock generated
View File

@@ -125,18 +125,23 @@ dependencies = [
"affine_media_capture", "affine_media_capture",
"affine_nbstore", "affine_nbstore",
"affine_sqlite_v1", "affine_sqlite_v1",
"chrono",
"napi", "napi",
"napi-build", "napi-build",
"napi-derive", "napi-derive",
"once_cell", "once_cell",
"serde_json",
"sqlx", "sqlx",
"thiserror 2.0.12",
"tokio", "tokio",
"uuid",
] ]
[[package]] [[package]]
name = "affine_nbstore" name = "affine_nbstore"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"affine_common",
"affine_schema", "affine_schema",
"anyhow", "anyhow",
"chrono", "chrono",
@@ -144,10 +149,14 @@ dependencies = [
"napi", "napi",
"napi-build", "napi-build",
"napi-derive", "napi-derive",
"serde",
"serde_json",
"sqlx", "sqlx",
"thiserror 2.0.12", "thiserror 2.0.12",
"tokio", "tokio",
"uniffi", "uniffi",
"uuid",
"y-octo",
] ]
[[package]] [[package]]

View File

@@ -7,7 +7,7 @@ version = "1.0.0"
crate-type = ["cdylib"] crate-type = ["cdylib"]
[dependencies] [dependencies]
affine_common = { workspace = true, features = ["doc-loader"] } affine_common = { workspace = true, features = ["doc-loader", "hashcash"] }
chrono = { workspace = true } chrono = { workspace = true }
file-format = { workspace = true } file-format = { workspace = true }
infer = { workspace = true } infer = { workspace = true }

View File

@@ -4,7 +4,7 @@ name = "affine_common"
version = "0.1.0" version = "0.1.0"
[features] [features]
default = ["hashcash"] default = []
doc-loader = [ doc-loader = [
"docx-parser", "docx-parser",
"infer", "infer",
@@ -35,7 +35,7 @@ tree-sitter = [
"dep:tree-sitter-scala", "dep:tree-sitter-scala",
"dep:tree-sitter-typescript", "dep:tree-sitter-typescript",
] ]
ydoc-loader = ["assert-json-diff", "y-octo"] ydoc-loader = ["assert-json-diff", "serde", "serde_json", "thiserror", "y-octo"]
[dependencies] [dependencies]
chrono = { workspace = true } chrono = { workspace = true }

View File

@@ -17,14 +17,6 @@ const BOOKMARK_FLAVOURS: [&str; 5] = [
"affine:embed-loom", "affine:embed-loom",
]; ];
#[derive(Debug, Clone)]
pub struct CrawlDocInput {
pub doc_bin: Vec<u8>,
pub root_doc_bin: Option<Vec<u8>>,
pub space_id: String,
pub doc_id: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BlockInfo { pub struct BlockInfo {
pub block_id: String, pub block_id: String,
@@ -87,15 +79,8 @@ impl From<JwstCodecError> for ParseError {
} }
} }
pub fn parse_doc_from_binary(input: CrawlDocInput) -> Result<CrawlResult, ParseError> { pub fn parse_doc_from_binary(doc_bin: Vec<u8>, doc_id: String) -> Result<CrawlResult, ParseError> {
let CrawlDocInput { if doc_bin.is_empty() || doc_bin == [0, 0] {
doc_bin,
root_doc_bin: _,
space_id: _,
doc_id,
} = input;
if doc_bin.is_empty() {
return Err(ParseError::InvalidBinary); return Err(ParseError::InvalidBinary);
} }
@@ -509,14 +494,10 @@ mod tests {
#[test] #[test]
fn test_parse_doc_from_binary() { fn test_parse_doc_from_binary() {
let json = include_bytes!("../fixtures/demo.ydoc.json"); let json = include_bytes!("../fixtures/demo.ydoc.json");
let input = CrawlDocInput { let doc_bin = include_bytes!("../fixtures/demo.ydoc").to_vec();
doc_bin: include_bytes!("../fixtures/demo.ydoc").to_vec(), let doc_id = "dYpV7PPhk8amRkY5IAcVO".to_string();
root_doc_bin: None,
space_id: "o9WCLGyxkLxdULZ-f2B9V".to_string(),
doc_id: "dYpV7PPhk8amRkY5IAcVO".to_string(),
};
let result = parse_doc_from_binary(input).unwrap(); let result = parse_doc_from_binary(doc_bin, doc_id).unwrap();
let config = assert_json_diff::Config::new(assert_json_diff::CompareMode::Strict) let config = assert_json_diff::Config::new(assert_json_diff::CompareMode::Strict)
.numeric_mode(assert_json_diff::NumericMode::AssumeFloat); .numeric_mode(assert_json_diff::NumericMode::AssumeFloat);
assert_json_diff::assert_json_matches!( assert_json_diff::assert_json_matches!(

View File

@@ -139,4 +139,8 @@ export class CloudIndexerStorage extends IndexerStorageBase {
override refresh<T extends keyof IndexerSchema>(_table: T): Promise<void> { override refresh<T extends keyof IndexerSchema>(_table: T): Promise<void> {
return Promise.resolve(); return Promise.resolve();
} }
override async refreshIfNeed(): Promise<void> {
return Promise.resolve();
}
} }

View File

@@ -176,6 +176,21 @@ export class IndexedDBIndexerStorage extends IndexerStorageBase {
this.emitTableUpdated(table); this.emitTableUpdated(table);
} }
override async refreshIfNeed(): Promise<void> {
const needRefreshTable = Object.entries(this.pendingUpdates)
.filter(
([, updates]) =>
updates.deleteByQueries.length > 0 ||
updates.deletes.length > 0 ||
updates.inserts.length > 0 ||
updates.updates.length > 0
)
.map(([table]) => table as keyof IndexerSchema);
for (const table of needRefreshTable) {
await this.refresh(table);
}
}
private watchTableUpdated(table: keyof IndexerSchema) { private watchTableUpdated(table: keyof IndexerSchema) {
return new Observable(subscriber => { return new Observable(subscriber => {
const listener = (ev: MessageEvent) => { const listener = (ev: MessageEvent) => {

View File

@@ -1,6 +1,7 @@
import { AutoReconnectConnection } from '../../connection'; import { AutoReconnectConnection } from '../../connection';
import type { import type {
BlobRecord, BlobRecord,
CrawlResult,
DocClock, DocClock,
DocRecord, DocRecord,
ListedBlobRecord, ListedBlobRecord,
@@ -81,6 +82,7 @@ export interface NativeDBApis {
peer: string, peer: string,
blobId: string blobId: string
) => Promise<Date | null>; ) => Promise<Date | null>;
crawlDocData: (id: string, docId: string) => Promise<CrawlResult>;
} }
type NativeDBApisWrapper = NativeDBApis extends infer APIs type NativeDBApisWrapper = NativeDBApis extends infer APIs

View File

@@ -1,5 +1,7 @@
import { share } from '../../connection'; import { share } from '../../connection';
import { import {
type BlockInfo,
type CrawlResult,
type DocClocks, type DocClocks,
type DocRecord, type DocRecord,
DocStorageBase, DocStorageBase,
@@ -79,4 +81,127 @@ export class SqliteDocStorage extends DocStorageBase<SqliteNativeDBOptions> {
updates.map(update => update.timestamp) updates.map(update => update.timestamp)
); );
} }
override async crawlDocData(docId: string): Promise<CrawlResult | null> {
const result = await this.db.crawlDocData(docId);
return normalizeNativeCrawlResult(result);
}
}
function normalizeNativeCrawlResult(result: unknown): CrawlResult | null {
if (!isRecord(result)) {
console.warn('[nbstore] crawlDocData returned non-object result');
return null;
}
if (
typeof result.title !== 'string' ||
typeof result.summary !== 'string' ||
!Array.isArray(result.blocks)
) {
console.warn('[nbstore] crawlDocData result missing basic fields');
return null;
}
const { title, summary } = result as { title: string; summary: string };
const rawBlocks = result.blocks as unknown[];
const blocks: BlockInfo[] = [];
for (const block of rawBlocks) {
const normalized = normalizeBlock(block);
if (normalized) {
blocks.push(normalized);
}
}
if (blocks.length === 0) {
console.warn('[nbstore] crawlDocData has no valid blocks');
return null;
}
return {
blocks,
title,
summary,
};
}
function normalizeBlock(block: unknown): BlockInfo | null {
if (!isRecord(block)) {
return null;
}
const blockId = readStringField(block, 'blockId');
const flavour = readStringField(block, 'flavour');
if (!blockId || !flavour) {
return null;
}
return {
blockId,
flavour,
content: readStringArrayField(block, 'content'),
blob: readStringArrayField(block, 'blob'),
refDocId: readStringArrayField(block, 'refDocId'),
refInfo: readStringArrayField(block, 'refInfo'),
parentFlavour: readStringField(block, 'parentFlavour'),
parentBlockId: readStringField(block, 'parentBlockId'),
additional: safeAdditionalField(block),
};
}
function readStringField(
target: Record<string, unknown>,
key: string
): string | undefined {
const value = readField(target, key);
return typeof value === 'string' && value ? value : undefined;
}
function readStringArrayField(
target: Record<string, unknown>,
key: string
): string[] | undefined {
const value = readField(target, key);
if (Array.isArray(value)) {
const filtered = value.filter(
(item): item is string => typeof item === 'string' && item.length > 0
);
return filtered.length ? filtered : undefined;
}
if (typeof value === 'string' && value.length > 0) {
return [value];
}
return undefined;
}
function safeAdditionalField(
target: Record<string, unknown>
): string | undefined {
const value = readField(target, 'additional');
if (typeof value !== 'string' || value.length === 0) {
return undefined;
}
try {
const parsed = JSON.parse(value);
return JSON.stringify(parsed);
} catch {
console.warn(
'[nbstore] ignore invalid additional payload in crawlDocData block'
);
return undefined;
}
}
function readField(target: Record<string, unknown>, key: string) {
return target[key] ?? target[toSnakeCase(key)];
}
function toSnakeCase(key: string) {
return key.replace(/[A-Z]/g, letter => `_${letter.toLowerCase()}`);
}
function isRecord(value: unknown): value is Record<string, unknown> {
return typeof value === 'object' && value !== null;
} }

View File

@@ -7,6 +7,24 @@ import type { Locker } from './lock';
import { SingletonLocker } from './lock'; import { SingletonLocker } from './lock';
import { type Storage } from './storage'; import { type Storage } from './storage';
export interface BlockInfo {
blockId: string;
flavour: string;
content?: string[];
blob?: string[];
refDocId?: string[];
refInfo?: string[];
parentFlavour?: string;
parentBlockId?: string;
additional?: string;
}
export interface CrawlResult {
blocks: BlockInfo[];
title: string;
summary: string;
}
export interface DocClock { export interface DocClock {
docId: string; docId: string;
timestamp: Date; timestamp: Date;
@@ -94,6 +112,8 @@ export interface DocStorage extends Storage {
subscribeDocUpdate( subscribeDocUpdate(
callback: (update: DocRecord, origin?: string) => void callback: (update: DocRecord, origin?: string) => void
): () => void; ): () => void;
crawlDocData?(docId: string): Promise<CrawlResult | null>;
} }
export abstract class DocStorageBase<Opts = {}> implements DocStorage { export abstract class DocStorageBase<Opts = {}> implements DocStorage {
@@ -174,6 +194,10 @@ export abstract class DocStorageBase<Opts = {}> implements DocStorage {
}; };
} }
async crawlDocData(_docId: string): Promise<CrawlResult | null> {
return null;
}
// REGION: api for internal usage // REGION: api for internal usage
protected on( protected on(
event: 'update', event: 'update',

View File

@@ -85,4 +85,7 @@ export class DummyIndexerStorage extends IndexerStorageBase {
override refresh<T extends keyof IndexerSchema>(_table: T): Promise<void> { override refresh<T extends keyof IndexerSchema>(_table: T): Promise<void> {
return Promise.resolve(); return Promise.resolve();
} }
override async refreshIfNeed(): Promise<void> {
return Promise.resolve();
}
} }

View File

@@ -62,6 +62,7 @@ export interface IndexerStorage extends Storage {
): Promise<void>; ): Promise<void>;
refresh<T extends keyof IndexerSchema>(table: T): Promise<void>; refresh<T extends keyof IndexerSchema>(table: T): Promise<void>;
refreshIfNeed(): Promise<void>;
} }
type ResultPagination = { type ResultPagination = {
@@ -173,4 +174,6 @@ export abstract class IndexerStorageBase implements IndexerStorage {
): Promise<void>; ): Promise<void>;
abstract refresh<T extends keyof IndexerSchema>(table: T): Promise<void>; abstract refresh<T extends keyof IndexerSchema>(table: T): Promise<void>;
abstract refreshIfNeed(): Promise<void>;
} }

View File

@@ -117,6 +117,8 @@ export class IndexerSyncImpl implements IndexerSync {
private readonly indexer: IndexerStorage; private readonly indexer: IndexerStorage;
private readonly remote?: IndexerStorage; private readonly remote?: IndexerStorage;
private lastRefreshed = Date.now();
state$ = this.status.state$.pipe( state$ = this.status.state$.pipe(
// throttle the state to 1 second to avoid spamming the UI // throttle the state to 1 second to avoid spamming the UI
throttleTime(1000, undefined, { throttleTime(1000, undefined, {
@@ -378,8 +380,7 @@ export class IndexerSyncImpl implements IndexerSync {
this.status.statusUpdatedSubject$.next(docId); this.status.statusUpdatedSubject$.next(docId);
} }
} }
await this.indexer.refresh('block'); await this.refreshIfNeed();
await this.indexer.refresh('doc');
// #endregion // #endregion
} else { } else {
// #region crawl doc // #region crawl doc
@@ -407,33 +408,40 @@ export class IndexerSyncImpl implements IndexerSync {
continue; continue;
} }
const docBin = await this.doc.getDoc(docId);
if (!docBin) {
// doc is deleted, just skip
continue;
}
console.log('[indexer] start indexing doc', docId); console.log('[indexer] start indexing doc', docId);
const docYDoc = new YDoc({ guid: docId });
applyUpdate(docYDoc, docBin.bin);
let blocks: IndexerDocument<'block'>[] = []; let blocks: IndexerDocument<'block'>[] = [];
let preview: string | undefined; let preview: string | undefined;
try { const nativeResult = await this.tryNativeCrawlDocData(docId);
const result = await crawlingDocData({ if (nativeResult) {
ydoc: docYDoc, blocks = nativeResult.block;
rootYDoc: this.status.rootDoc, preview = nativeResult.summary;
spaceId: this.status.rootDocId, } else {
docId, const docBin = await this.doc.getDoc(docId);
}); if (!docBin) {
if (!result) { // doc is deleted, just skip
// doc is empty without root block, just skip
continue; continue;
} }
blocks = result.blocks; const docYDoc = new YDoc({ guid: docId });
preview = result.preview; applyUpdate(docYDoc, docBin.bin);
} catch (error) {
console.error('error crawling doc', error); try {
const result = await crawlingDocData({
ydoc: docYDoc,
rootYDoc: this.status.rootDoc,
spaceId: this.status.rootDocId,
docId,
});
if (!result) {
// doc is empty without root block, just skip
continue;
}
blocks = result.blocks;
preview = result.preview;
} catch (error) {
console.error('error crawling doc', error);
}
} }
await this.indexer.deleteByQuery('block', { await this.indexer.deleteByQuery('block', {
@@ -446,8 +454,6 @@ export class IndexerSyncImpl implements IndexerSync {
await this.indexer.insert('block', block); await this.indexer.insert('block', block);
} }
await this.indexer.refresh('block');
if (preview) { if (preview) {
await this.indexer.update( await this.indexer.update(
'doc', 'doc',
@@ -455,9 +461,10 @@ export class IndexerSyncImpl implements IndexerSync {
summary: preview, summary: preview,
}) })
); );
await this.indexer.refresh('doc');
} }
await this.refreshIfNeed();
await this.indexerSync.setDocIndexedClock({ await this.indexerSync.setDocIndexedClock({
docId, docId,
timestamp: docClock.timestamp, timestamp: docClock.timestamp,
@@ -471,10 +478,19 @@ export class IndexerSyncImpl implements IndexerSync {
this.status.completeJob(); this.status.completeJob();
} }
} finally { } finally {
await this.refreshIfNeed();
unsubscribe(); unsubscribe();
} }
} }
private async refreshIfNeed(): Promise<void> {
if (this.lastRefreshed + 100 < Date.now()) {
console.log('[indexer] refreshing indexer');
await this.indexer.refreshIfNeed();
this.lastRefreshed = Date.now();
}
}
/** /**
* Get all docs from the root doc, without deleted docs * Get all docs from the root doc, without deleted docs
*/ */
@@ -484,6 +500,36 @@ export class IndexerSyncImpl implements IndexerSync {
}); });
} }
private async tryNativeCrawlDocData(docId: string) {
try {
const result = await this.doc.crawlDocData?.(docId);
if (result) {
return {
title: result.title,
block: result.blocks.map(block =>
IndexerDocument.from<'block'>(`${docId}:${block.blockId}`, {
docId,
blockId: block.blockId,
content: block.content,
flavour: block.flavour,
blob: block.blob,
refDocId: block.refDocId,
ref: block.refInfo,
parentFlavour: block.parentFlavour,
parentBlockId: block.parentBlockId,
additional: block.additional,
})
),
summary: result.summary,
};
}
return null;
} catch (error) {
console.warn('[indexer] native crawlDocData failed', docId, error);
return null;
}
}
private async getAllDocsFromIndexer() { private async getAllDocsFromIndexer() {
const docs = await this.indexer.search( const docs = await this.indexer.search(
'doc', 'doc',

View File

@@ -778,6 +778,8 @@ internal interface UniffiForeignFutureCompleteVoid : com.sun.jna.Callback {
@@ -804,6 +806,8 @@ fun uniffi_affine_mobile_native_checksum_method_docstoragepool_clear_clocks(
): Short ): Short
fun uniffi_affine_mobile_native_checksum_method_docstoragepool_connect( fun uniffi_affine_mobile_native_checksum_method_docstoragepool_connect(
): Short ): Short
fun uniffi_affine_mobile_native_checksum_method_docstoragepool_crawl_doc_data(
): Short
fun uniffi_affine_mobile_native_checksum_method_docstoragepool_delete_blob( fun uniffi_affine_mobile_native_checksum_method_docstoragepool_delete_blob(
): Short ): Short
fun uniffi_affine_mobile_native_checksum_method_docstoragepool_delete_doc( fun uniffi_affine_mobile_native_checksum_method_docstoragepool_delete_doc(
@@ -913,6 +917,8 @@ fun uniffi_affine_mobile_native_fn_method_docstoragepool_clear_clocks(`ptr`: Poi
): Long ): Long
fun uniffi_affine_mobile_native_fn_method_docstoragepool_connect(`ptr`: Pointer,`universalId`: RustBuffer.ByValue,`path`: RustBuffer.ByValue, fun uniffi_affine_mobile_native_fn_method_docstoragepool_connect(`ptr`: Pointer,`universalId`: RustBuffer.ByValue,`path`: RustBuffer.ByValue,
): Long ): Long
fun uniffi_affine_mobile_native_fn_method_docstoragepool_crawl_doc_data(`ptr`: Pointer,`universalId`: RustBuffer.ByValue,`docId`: RustBuffer.ByValue,
): Long
fun uniffi_affine_mobile_native_fn_method_docstoragepool_delete_blob(`ptr`: Pointer,`universalId`: RustBuffer.ByValue,`key`: RustBuffer.ByValue,`permanently`: Byte, fun uniffi_affine_mobile_native_fn_method_docstoragepool_delete_blob(`ptr`: Pointer,`universalId`: RustBuffer.ByValue,`key`: RustBuffer.ByValue,`permanently`: Byte,
): Long ): Long
fun uniffi_affine_mobile_native_fn_method_docstoragepool_delete_doc(`ptr`: Pointer,`universalId`: RustBuffer.ByValue,`docId`: RustBuffer.ByValue, fun uniffi_affine_mobile_native_fn_method_docstoragepool_delete_doc(`ptr`: Pointer,`universalId`: RustBuffer.ByValue,`docId`: RustBuffer.ByValue,
@@ -1107,6 +1113,9 @@ private fun uniffiCheckApiChecksums(lib: IntegrityCheckingUniffiLib) {
if (lib.uniffi_affine_mobile_native_checksum_method_docstoragepool_connect() != 19047.toShort()) { if (lib.uniffi_affine_mobile_native_checksum_method_docstoragepool_connect() != 19047.toShort()) {
throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project") throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project")
} }
if (lib.uniffi_affine_mobile_native_checksum_method_docstoragepool_crawl_doc_data() != 36347.toShort()) {
throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project")
}
if (lib.uniffi_affine_mobile_native_checksum_method_docstoragepool_delete_blob() != 53695.toShort()) { if (lib.uniffi_affine_mobile_native_checksum_method_docstoragepool_delete_blob() != 53695.toShort()) {
throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project") throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project")
} }
@@ -1602,6 +1611,8 @@ public interface DocStoragePoolInterface {
*/ */
suspend fun `connect`(`universalId`: kotlin.String, `path`: kotlin.String) suspend fun `connect`(`universalId`: kotlin.String, `path`: kotlin.String)
suspend fun `crawlDocData`(`universalId`: kotlin.String, `docId`: kotlin.String): CrawlResult
suspend fun `deleteBlob`(`universalId`: kotlin.String, `key`: kotlin.String, `permanently`: kotlin.Boolean) suspend fun `deleteBlob`(`universalId`: kotlin.String, `key`: kotlin.String, `permanently`: kotlin.Boolean)
suspend fun `deleteDoc`(`universalId`: kotlin.String, `docId`: kotlin.String) suspend fun `deleteDoc`(`universalId`: kotlin.String, `docId`: kotlin.String)
@@ -1787,6 +1798,27 @@ open class DocStoragePool: Disposable, AutoCloseable, DocStoragePoolInterface
} }
@Throws(UniffiException::class)
@Suppress("ASSIGNED_BUT_NEVER_ACCESSED_VARIABLE")
override suspend fun `crawlDocData`(`universalId`: kotlin.String, `docId`: kotlin.String) : CrawlResult {
return uniffiRustCallAsync(
callWithPointer { thisPtr ->
UniffiLib.INSTANCE.uniffi_affine_mobile_native_fn_method_docstoragepool_crawl_doc_data(
thisPtr,
FfiConverterString.lower(`universalId`),FfiConverterString.lower(`docId`),
)
},
{ future, callback, continuation -> UniffiLib.INSTANCE.ffi_affine_mobile_native_rust_future_poll_rust_buffer(future, callback, continuation) },
{ future, continuation -> UniffiLib.INSTANCE.ffi_affine_mobile_native_rust_future_complete_rust_buffer(future, continuation) },
{ future -> UniffiLib.INSTANCE.ffi_affine_mobile_native_rust_future_free_rust_buffer(future) },
// lift function
{ FfiConverterTypeCrawlResult.lift(it) },
// Error FFI converter
UniffiException.ErrorHandler,
)
}
@Throws(UniffiException::class) @Throws(UniffiException::class)
@Suppress("ASSIGNED_BUT_NEVER_ACCESSED_VARIABLE") @Suppress("ASSIGNED_BUT_NEVER_ACCESSED_VARIABLE")
override suspend fun `deleteBlob`(`universalId`: kotlin.String, `key`: kotlin.String, `permanently`: kotlin.Boolean) { override suspend fun `deleteBlob`(`universalId`: kotlin.String, `key`: kotlin.String, `permanently`: kotlin.Boolean) {
@@ -2424,6 +2456,102 @@ public object FfiConverterTypeBlob: FfiConverterRustBuffer<Blob> {
data class BlockInfo (
var `blockId`: kotlin.String,
var `flavour`: kotlin.String,
var `content`: List<kotlin.String>?,
var `blob`: List<kotlin.String>?,
var `refDocId`: List<kotlin.String>?,
var `refInfo`: List<kotlin.String>?,
var `parentFlavour`: kotlin.String?,
var `parentBlockId`: kotlin.String?,
var `additional`: kotlin.String?
) {
companion object
}
/**
* @suppress
*/
public object FfiConverterTypeBlockInfo: FfiConverterRustBuffer<BlockInfo> {
override fun read(buf: ByteBuffer): BlockInfo {
return BlockInfo(
FfiConverterString.read(buf),
FfiConverterString.read(buf),
FfiConverterOptionalSequenceString.read(buf),
FfiConverterOptionalSequenceString.read(buf),
FfiConverterOptionalSequenceString.read(buf),
FfiConverterOptionalSequenceString.read(buf),
FfiConverterOptionalString.read(buf),
FfiConverterOptionalString.read(buf),
FfiConverterOptionalString.read(buf),
)
}
override fun allocationSize(value: BlockInfo) = (
FfiConverterString.allocationSize(value.`blockId`) +
FfiConverterString.allocationSize(value.`flavour`) +
FfiConverterOptionalSequenceString.allocationSize(value.`content`) +
FfiConverterOptionalSequenceString.allocationSize(value.`blob`) +
FfiConverterOptionalSequenceString.allocationSize(value.`refDocId`) +
FfiConverterOptionalSequenceString.allocationSize(value.`refInfo`) +
FfiConverterOptionalString.allocationSize(value.`parentFlavour`) +
FfiConverterOptionalString.allocationSize(value.`parentBlockId`) +
FfiConverterOptionalString.allocationSize(value.`additional`)
)
override fun write(value: BlockInfo, buf: ByteBuffer) {
FfiConverterString.write(value.`blockId`, buf)
FfiConverterString.write(value.`flavour`, buf)
FfiConverterOptionalSequenceString.write(value.`content`, buf)
FfiConverterOptionalSequenceString.write(value.`blob`, buf)
FfiConverterOptionalSequenceString.write(value.`refDocId`, buf)
FfiConverterOptionalSequenceString.write(value.`refInfo`, buf)
FfiConverterOptionalString.write(value.`parentFlavour`, buf)
FfiConverterOptionalString.write(value.`parentBlockId`, buf)
FfiConverterOptionalString.write(value.`additional`, buf)
}
}
data class CrawlResult (
var `blocks`: List<BlockInfo>,
var `title`: kotlin.String,
var `summary`: kotlin.String
) {
companion object
}
/**
* @suppress
*/
public object FfiConverterTypeCrawlResult: FfiConverterRustBuffer<CrawlResult> {
override fun read(buf: ByteBuffer): CrawlResult {
return CrawlResult(
FfiConverterSequenceTypeBlockInfo.read(buf),
FfiConverterString.read(buf),
FfiConverterString.read(buf),
)
}
override fun allocationSize(value: CrawlResult) = (
FfiConverterSequenceTypeBlockInfo.allocationSize(value.`blocks`) +
FfiConverterString.allocationSize(value.`title`) +
FfiConverterString.allocationSize(value.`summary`)
)
override fun write(value: CrawlResult, buf: ByteBuffer) {
FfiConverterSequenceTypeBlockInfo.write(value.`blocks`, buf)
FfiConverterString.write(value.`title`, buf)
FfiConverterString.write(value.`summary`, buf)
}
}
data class DocClock ( data class DocClock (
var `docId`: kotlin.String, var `docId`: kotlin.String,
var `timestamp`: kotlin.Long var `timestamp`: kotlin.Long
@@ -2732,6 +2860,38 @@ public object FfiConverterOptionalLong: FfiConverterRustBuffer<kotlin.Long?> {
/**
* @suppress
*/
public object FfiConverterOptionalString: FfiConverterRustBuffer<kotlin.String?> {
override fun read(buf: ByteBuffer): kotlin.String? {
if (buf.get().toInt() == 0) {
return null
}
return FfiConverterString.read(buf)
}
override fun allocationSize(value: kotlin.String?): ULong {
if (value == null) {
return 1UL
} else {
return 1UL + FfiConverterString.allocationSize(value)
}
}
override fun write(value: kotlin.String?, buf: ByteBuffer) {
if (value == null) {
buf.put(0)
} else {
buf.put(1)
FfiConverterString.write(value, buf)
}
}
}
/** /**
* @suppress * @suppress
*/ */
@@ -2828,6 +2988,38 @@ public object FfiConverterOptionalTypeDocRecord: FfiConverterRustBuffer<DocRecor
/**
* @suppress
*/
public object FfiConverterOptionalSequenceString: FfiConverterRustBuffer<List<kotlin.String>?> {
override fun read(buf: ByteBuffer): List<kotlin.String>? {
if (buf.get().toInt() == 0) {
return null
}
return FfiConverterSequenceString.read(buf)
}
override fun allocationSize(value: List<kotlin.String>?): ULong {
if (value == null) {
return 1UL
} else {
return 1UL + FfiConverterSequenceString.allocationSize(value)
}
}
override fun write(value: List<kotlin.String>?, buf: ByteBuffer) {
if (value == null) {
buf.put(0)
} else {
buf.put(1)
FfiConverterSequenceString.write(value, buf)
}
}
}
/** /**
* @suppress * @suppress
*/ */
@@ -2856,6 +3048,62 @@ public object FfiConverterSequenceLong: FfiConverterRustBuffer<List<kotlin.Long>
/**
* @suppress
*/
public object FfiConverterSequenceString: FfiConverterRustBuffer<List<kotlin.String>> {
override fun read(buf: ByteBuffer): List<kotlin.String> {
val len = buf.getInt()
return List<kotlin.String>(len) {
FfiConverterString.read(buf)
}
}
override fun allocationSize(value: List<kotlin.String>): ULong {
val sizeForLength = 4UL
val sizeForItems = value.map { FfiConverterString.allocationSize(it) }.sum()
return sizeForLength + sizeForItems
}
override fun write(value: List<kotlin.String>, buf: ByteBuffer) {
buf.putInt(value.size)
value.iterator().forEach {
FfiConverterString.write(it, buf)
}
}
}
/**
* @suppress
*/
public object FfiConverterSequenceTypeBlockInfo: FfiConverterRustBuffer<List<BlockInfo>> {
override fun read(buf: ByteBuffer): List<BlockInfo> {
val len = buf.getInt()
return List<BlockInfo>(len) {
FfiConverterTypeBlockInfo.read(buf)
}
}
override fun allocationSize(value: List<BlockInfo>): ULong {
val sizeForLength = 4UL
val sizeForItems = value.map { FfiConverterTypeBlockInfo.allocationSize(it) }.sum()
return sizeForLength + sizeForItems
}
override fun write(value: List<BlockInfo>, buf: ByteBuffer) {
buf.putInt(value.size)
value.iterator().forEach {
FfiConverterTypeBlockInfo.write(it, buf)
}
}
}
/** /**
* @suppress * @suppress
*/ */

View File

@@ -1,3 +1,5 @@
import type { CrawlResult } from '@affine/nbstore';
export interface Blob { export interface Blob {
key: string; key: string;
// base64 encoded data // base64 encoded data
@@ -149,4 +151,8 @@ export interface NbStorePlugin {
uploadedAt: number | null; uploadedAt: number | null;
}) => Promise<void>; }) => Promise<void>;
clearClocks: (options: { id: string }) => Promise<void>; clearClocks: (options: { id: string }) => Promise<void>;
crawlDocData: (options: {
id: string;
docId: string;
}) => Promise<CrawlResult>;
} }

View File

@@ -336,4 +336,7 @@ export const NbStoreNativeDBApis: NativeDBApis = {
uploadedAt: uploadedAt ? uploadedAt.getTime() : null, uploadedAt: uploadedAt ? uploadedAt.getTime() : null,
}); });
}, },
crawlDocData: async function (id: string, docId: string) {
return NbStore.crawlDocData({ id, docId });
},
}; };

View File

@@ -47,4 +47,5 @@ export const nbstoreHandlers: NativeDBApis = {
clearClocks: POOL.clearClocks.bind(POOL), clearClocks: POOL.clearClocks.bind(POOL),
setBlobUploadedAt: POOL.setBlobUploadedAt.bind(POOL), setBlobUploadedAt: POOL.setBlobUploadedAt.bind(POOL),
getBlobUploadedAt: POOL.getBlobUploadedAt.bind(POOL), getBlobUploadedAt: POOL.getBlobUploadedAt.bind(POOL),
crawlDocData: POOL.crawlDocData.bind(POOL),
}; };

View File

@@ -1,3 +1,5 @@
import type { CrawlResult } from '@affine/nbstore';
export interface Blob { export interface Blob {
key: string; key: string;
// base64 encoded data // base64 encoded data
@@ -149,4 +151,8 @@ export interface NbStorePlugin {
uploadedAt: number | null; uploadedAt: number | null;
}) => Promise<void>; }) => Promise<void>;
clearClocks: (options: { id: string }) => Promise<void>; clearClocks: (options: { id: string }) => Promise<void>;
crawlDocData: (options: {
id: string;
docId: string;
}) => Promise<CrawlResult>;
} }

View File

@@ -4,6 +4,7 @@ import {
} from '@affine/core/modules/workspace-engine'; } from '@affine/core/modules/workspace-engine';
import { import {
type BlobRecord, type BlobRecord,
type CrawlResult,
type DocClock, type DocClock,
type DocRecord, type DocRecord,
type ListedBlobRecord, type ListedBlobRecord,
@@ -336,4 +337,10 @@ export const NbStoreNativeDBApis: NativeDBApis = {
uploadedAt: uploadedAt ? uploadedAt.getTime() : null, uploadedAt: uploadedAt ? uploadedAt.getTime() : null,
}); });
}, },
crawlDocData: async function (
id: string,
docId: string
): Promise<CrawlResult> {
return await NbStore.crawlDocData({ id, docId });
},
}; };

View File

@@ -15,7 +15,7 @@ path = "uniffi-bindgen.rs"
use-as-lib = ["affine_nbstore/use-as-lib"] use-as-lib = ["affine_nbstore/use-as-lib"]
[dependencies] [dependencies]
affine_common = { workspace = true } affine_common = { workspace = true, features = ["hashcash"] }
affine_nbstore = { workspace = true } affine_nbstore = { workspace = true }
anyhow = { workspace = true } anyhow = { workspace = true }
base64-simd = { workspace = true } base64-simd = { workspace = true }

View File

@@ -183,6 +183,52 @@ impl From<affine_nbstore::ListedBlob> for ListedBlob {
} }
} }
#[derive(uniffi::Record)]
pub struct BlockInfo {
pub block_id: String,
pub flavour: String,
pub content: Option<Vec<String>>,
pub blob: Option<Vec<String>>,
pub ref_doc_id: Option<Vec<String>>,
pub ref_info: Option<Vec<String>>,
pub parent_flavour: Option<String>,
pub parent_block_id: Option<String>,
pub additional: Option<String>,
}
impl From<affine_nbstore::indexer::NativeBlockInfo> for BlockInfo {
fn from(value: affine_nbstore::indexer::NativeBlockInfo) -> Self {
Self {
block_id: value.block_id,
flavour: value.flavour,
content: value.content,
blob: value.blob,
ref_doc_id: value.ref_doc_id,
ref_info: value.ref_info,
parent_flavour: value.parent_flavour,
parent_block_id: value.parent_block_id,
additional: value.additional,
}
}
}
#[derive(uniffi::Record)]
pub struct CrawlResult {
pub blocks: Vec<BlockInfo>,
pub title: String,
pub summary: String,
}
impl From<affine_nbstore::indexer::NativeCrawlResult> for CrawlResult {
fn from(value: affine_nbstore::indexer::NativeCrawlResult) -> Self {
Self {
blocks: value.blocks.into_iter().map(Into::into).collect(),
title: value.title,
summary: value.summary,
}
}
}
#[derive(uniffi::Object)] #[derive(uniffi::Object)]
pub struct DocStoragePool { pub struct DocStoragePool {
inner: SqliteDocStoragePool, inner: SqliteDocStoragePool,
@@ -643,4 +689,14 @@ impl DocStoragePool {
.map(|t| t.and_utc().timestamp_millis()), .map(|t| t.and_utc().timestamp_millis()),
) )
} }
pub async fn crawl_doc_data(&self, universal_id: String, doc_id: String) -> Result<CrawlResult> {
let result = self
.inner
.get(universal_id.clone())
.await?
.crawl_doc_data(&doc_id)
.await?;
Ok(result.into())
}
} }

View File

@@ -7,7 +7,7 @@ version = "0.0.0"
crate-type = ["cdylib", "rlib"] crate-type = ["cdylib", "rlib"]
[dependencies] [dependencies]
affine_common = { workspace = true } affine_common = { workspace = true, features = ["hashcash"] }
affine_media_capture = { path = "./media_capture" } affine_media_capture = { path = "./media_capture" }
affine_nbstore = { path = "./nbstore" } affine_nbstore = { path = "./nbstore" }
affine_sqlite_v1 = { path = "./sqlite_v1" } affine_sqlite_v1 = { path = "./sqlite_v1" }
@@ -22,8 +22,14 @@ sqlx = { workspace = true, default-features = false, features = [
"sqlite", "sqlite",
"tls-rustls", "tls-rustls",
] } ] }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["full"] } tokio = { workspace = true, features = ["full"] }
[dev-dependencies]
chrono = { workspace = true }
serde_json = { workspace = true }
uuid = { workspace = true }
[build-dependencies] [build-dependencies]
napi-build = { workspace = true } napi-build = { workspace = true }
sqlx = { workspace = true, default-features = false, features = [ sqlx = { workspace = true, default-features = false, features = [

View File

@@ -55,6 +55,7 @@ export declare class DocStoragePool {
connect(universalId: string, path: string): Promise<void> connect(universalId: string, path: string): Promise<void>
disconnect(universalId: string): Promise<void> disconnect(universalId: string): Promise<void>
checkpoint(universalId: string): Promise<void> checkpoint(universalId: string): Promise<void>
crawlDocData(universalId: string, docId: string): Promise<NativeCrawlResult>
setSpaceId(universalId: string, spaceId: string): Promise<void> setSpaceId(universalId: string, spaceId: string): Promise<void>
pushUpdate(universalId: string, docId: string, update: Uint8Array): Promise<Date> pushUpdate(universalId: string, docId: string, update: Uint8Array): Promise<Date>
getDocSnapshot(universalId: string, docId: string): Promise<DocRecord | null> getDocSnapshot(universalId: string, docId: string): Promise<DocRecord | null>
@@ -115,6 +116,24 @@ export interface ListedBlob {
createdAt: Date createdAt: Date
} }
export interface NativeBlockInfo {
blockId: string
flavour: string
content?: Array<string>
blob?: Array<string>
refDocId?: Array<string>
refInfo?: Array<string>
parentFlavour?: string
parentBlockId?: string
additional?: string
}
export interface NativeCrawlResult {
blocks: Array<NativeBlockInfo>
title: string
summary: string
}
export interface SetBlob { export interface SetBlob {
key: string key: string
data: Uint8Array data: Uint8Array

View File

@@ -10,11 +10,13 @@ crate-type = ["cdylib", "rlib"]
use-as-lib = ["napi-derive/noop", "napi/noop"] use-as-lib = ["napi-derive/noop", "napi/noop"]
[dependencies] [dependencies]
affine_common = { workspace = true, features = ["ydoc-loader"] }
affine_schema = { path = "../schema" } affine_schema = { path = "../schema" }
anyhow = { workspace = true } anyhow = { workspace = true }
chrono = { workspace = true } chrono = { workspace = true }
napi = { workspace = true } napi = { workspace = true }
napi-derive = { workspace = true } napi-derive = { workspace = true }
serde = { workspace = true, features = ["derive"] }
sqlx = { workspace = true, default-features = false, features = [ sqlx = { workspace = true, default-features = false, features = [
"chrono", "chrono",
"macros", "macros",
@@ -25,6 +27,7 @@ sqlx = { workspace = true, default-features = false, features = [
] } ] }
thiserror = { workspace = true } thiserror = { workspace = true }
tokio = { workspace = true, features = ["full"] } tokio = { workspace = true, features = ["full"] }
y-octo = { workspace = true }
[target.'cfg(any(target_os = "ios", target_os = "android"))'.dependencies] [target.'cfg(any(target_os = "ios", target_os = "android"))'.dependencies]
uniffi = { workspace = true } uniffi = { workspace = true }
@@ -42,3 +45,8 @@ sqlx = { workspace = true, default-features = false, features = [
"tls-rustls", "tls-rustls",
] } ] }
tokio = { workspace = true, features = ["full"] } tokio = { workspace = true, features = ["full"] }
[dev-dependencies]
serde_json = { workspace = true }
uuid = { workspace = true, features = ["v4"] }

View File

@@ -103,7 +103,7 @@ impl SqliteDocStorage {
sqlx::query(r#"INSERT INTO updates (doc_id, data, created_at) VALUES ($1, $2, $3);"#) sqlx::query(r#"INSERT INTO updates (doc_id, data, created_at) VALUES ($1, $2, $3);"#)
.bind(doc_id) .bind(doc_id)
.bind(update.as_ref()) .bind(update)
.bind(timestamp) .bind(timestamp)
.execute(&mut *tx) .execute(&mut *tx)
.await?; .await?;
@@ -358,7 +358,7 @@ mod tests {
assert_eq!(result.len(), 4); assert_eq!(result.len(), 4);
assert_eq!( assert_eq!(
result.iter().map(|u| u.bin.as_ref()).collect::<Vec<_>>(), result.iter().map(|u| u.bin.to_vec()).collect::<Vec<_>>(),
updates updates
); );
} }
@@ -382,7 +382,7 @@ mod tests {
let result = storage.get_doc_snapshot("test".to_string()).await.unwrap(); let result = storage.get_doc_snapshot("test".to_string()).await.unwrap();
assert!(result.is_some()); assert!(result.is_some());
assert_eq!(result.unwrap().bin.as_ref(), vec![0, 0]); assert_eq!(result.unwrap().bin.to_vec(), vec![0, 0]);
} }
#[tokio::test] #[tokio::test]
@@ -400,7 +400,7 @@ mod tests {
let result = storage.get_doc_snapshot("test".to_string()).await.unwrap(); let result = storage.get_doc_snapshot("test".to_string()).await.unwrap();
assert!(result.is_some()); assert!(result.is_some());
assert_eq!(result.unwrap().bin.as_ref(), vec![0, 0]); assert_eq!(result.unwrap().bin.to_vec(), vec![0, 0]);
let snapshot = DocRecord { let snapshot = DocRecord {
doc_id: "test".to_string(), doc_id: "test".to_string(),
@@ -416,7 +416,7 @@ mod tests {
let result = storage.get_doc_snapshot("test".to_string()).await.unwrap(); let result = storage.get_doc_snapshot("test".to_string()).await.unwrap();
assert!(result.is_some()); assert!(result.is_some());
assert_eq!(result.unwrap().bin.as_ref(), vec![0, 0]); assert_eq!(result.unwrap().bin.to_vec(), vec![0, 0]);
} }
#[tokio::test] #[tokio::test]

View File

@@ -1,3 +1,5 @@
use affine_common::doc_parser::ParseError;
pub type Result<T> = std::result::Result<T, Error>; pub type Result<T> = std::result::Result<T, Error>;
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]
@@ -8,4 +10,6 @@ pub enum Error {
MigrateError(#[from] sqlx::migrate::MigrateError), MigrateError(#[from] sqlx::migrate::MigrateError),
#[error("Invalid operation")] #[error("Invalid operation")]
InvalidOperation, InvalidOperation,
#[error(transparent)]
Parse(#[from] ParseError),
} }

View File

@@ -0,0 +1,180 @@
use affine_common::doc_parser::{parse_doc_from_binary, BlockInfo, CrawlResult, ParseError};
use napi_derive::napi;
use serde::Serialize;
use y_octo::DocOptions;
use super::{error::Result, storage::SqliteDocStorage};
#[napi(object)]
#[derive(Debug, Serialize)]
pub struct NativeBlockInfo {
pub block_id: String,
pub flavour: String,
pub content: Option<Vec<String>>,
pub blob: Option<Vec<String>>,
pub ref_doc_id: Option<Vec<String>>,
pub ref_info: Option<Vec<String>>,
pub parent_flavour: Option<String>,
pub parent_block_id: Option<String>,
pub additional: Option<String>,
}
#[napi(object)]
#[derive(Debug, Serialize)]
pub struct NativeCrawlResult {
pub blocks: Vec<NativeBlockInfo>,
pub title: String,
pub summary: String,
}
impl From<BlockInfo> for NativeBlockInfo {
fn from(value: BlockInfo) -> Self {
Self {
block_id: value.block_id,
flavour: value.flavour,
content: value.content,
blob: value.blob,
ref_doc_id: value.ref_doc_id,
ref_info: value.ref_info,
parent_flavour: value.parent_flavour,
parent_block_id: value.parent_block_id,
additional: value.additional,
}
}
}
impl From<CrawlResult> for NativeCrawlResult {
fn from(value: CrawlResult) -> Self {
Self {
blocks: value.blocks.into_iter().map(Into::into).collect(),
title: value.title,
summary: value.summary,
}
}
}
impl SqliteDocStorage {
pub async fn crawl_doc_data(&self, doc_id: &str) -> Result<NativeCrawlResult> {
let doc_bin = self
.load_doc_binary(doc_id)
.await?
.ok_or(ParseError::DocNotFound)?;
let result = parse_doc_from_binary(doc_bin, doc_id.to_string())?;
Ok(result.into())
}
async fn load_doc_binary(&self, doc_id: &str) -> Result<Option<Vec<u8>>> {
let snapshot = self.get_doc_snapshot(doc_id.to_string()).await?;
let mut updates = self.get_doc_updates(doc_id.to_string()).await?;
if snapshot.is_none() && updates.is_empty() {
return Ok(None);
}
updates.sort_by(|a, b| a.timestamp.cmp(&b.timestamp));
let mut segments =
Vec::with_capacity(snapshot.as_ref().map(|_| 1).unwrap_or(0) + updates.len());
if let Some(record) = snapshot {
segments.push(record.bin.to_vec());
}
segments.extend(updates.into_iter().map(|update| update.bin.to_vec()));
merge_updates(segments, doc_id).map(Some)
}
}
fn merge_updates(mut segments: Vec<Vec<u8>>, guid: &str) -> Result<Vec<u8>> {
if segments.is_empty() {
return Err(ParseError::DocNotFound.into());
}
if segments.len() == 1 {
return segments.pop().ok_or(ParseError::DocNotFound.into());
}
let mut doc = DocOptions::new().with_guid(guid.to_string()).build();
for update in segments.iter() {
doc
.apply_update_from_binary_v1(update)
.map_err(|_| ParseError::InvalidBinary)?;
}
let buffer = doc
.encode_update_v1()
.map_err(|err| ParseError::ParserError(err.to_string()))?;
Ok(buffer)
}
#[cfg(test)]
mod tests {
use std::path::{Path, PathBuf};
use affine_common::doc_parser::ParseError;
use chrono::Utc;
use serde_json::Value;
use tokio::fs;
use uuid::Uuid;
use super::{super::error::Error, *};
const DEMO_BIN: &[u8] = include_bytes!("../../../../common/native/fixtures/demo.ydoc");
const DEMO_JSON: &[u8] = include_bytes!("../../../../common/native/fixtures/demo.ydoc.json");
fn temp_workspace_dir() -> PathBuf {
std::env::temp_dir().join(format!("affine-native-{}", Uuid::new_v4()))
}
async fn init_db(path: &Path) -> SqliteDocStorage {
fs::create_dir_all(path.parent().unwrap()).await.unwrap();
let storage = SqliteDocStorage::new(path.to_string_lossy().into_owned());
storage.connect().await.unwrap();
storage
}
async fn cleanup(path: &Path) {
let _ = fs::remove_dir_all(path.parent().unwrap()).await;
}
#[tokio::test]
async fn parse_demo_snapshot_matches_fixture() {
let base = temp_workspace_dir();
fs::create_dir_all(&base).await.unwrap();
let db_path = base.join("storage.db");
let storage = init_db(&db_path).await;
sqlx::query(r#"INSERT INTO snapshots (doc_id, data, updated_at) VALUES (?, ?, ?)"#)
.bind("demo-doc")
.bind(DEMO_BIN)
.bind(Utc::now().naive_utc())
.execute(&storage.pool)
.await
.unwrap();
let result = storage.crawl_doc_data("demo-doc").await.unwrap();
let expected: Value = serde_json::from_slice(DEMO_JSON).unwrap();
let actual = serde_json::to_value(&result).unwrap();
assert_eq!(expected, actual);
storage.close().await;
cleanup(&db_path).await;
}
#[tokio::test]
async fn missing_doc_returns_error() {
let base = temp_workspace_dir();
fs::create_dir_all(&base).await.unwrap();
let db_path = base.join("storage.db");
let storage = init_db(&db_path).await;
let err = storage.crawl_doc_data("absent-doc").await.unwrap_err();
assert!(matches!(err, Error::Parse(ParseError::DocNotFound)));
storage.close().await;
cleanup(&db_path).await;
}
}

View File

@@ -3,6 +3,7 @@ pub mod blob_sync;
pub mod doc; pub mod doc;
pub mod doc_sync; pub mod doc_sync;
pub mod error; pub mod error;
pub mod indexer;
pub mod pool; pub mod pool;
pub mod storage; pub mod storage;
@@ -117,6 +118,20 @@ impl DocStoragePool {
Ok(()) Ok(())
} }
#[napi]
pub async fn crawl_doc_data(
&self,
universal_id: String,
doc_id: String,
) -> Result<indexer::NativeCrawlResult> {
let result = self
.get(universal_id)
.await?
.crawl_doc_data(&doc_id)
.await?;
Ok(result)
}
#[napi] #[napi]
pub async fn set_space_id(&self, universal_id: String, space_id: String) -> Result<()> { pub async fn set_space_id(&self, universal_id: String, space_id: String) -> Result<()> {
self.get(universal_id).await?.set_space_id(space_id).await?; self.get(universal_id).await?.set_space_id(space_id).await?;