diff --git a/Cargo.lock b/Cargo.lock index c472c8f0a1..557c2ef60e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -125,18 +125,23 @@ dependencies = [ "affine_media_capture", "affine_nbstore", "affine_sqlite_v1", + "chrono", "napi", "napi-build", "napi-derive", "once_cell", + "serde_json", "sqlx", + "thiserror 2.0.12", "tokio", + "uuid", ] [[package]] name = "affine_nbstore" version = "0.0.0" dependencies = [ + "affine_common", "affine_schema", "anyhow", "chrono", @@ -144,10 +149,14 @@ dependencies = [ "napi", "napi-build", "napi-derive", + "serde", + "serde_json", "sqlx", "thiserror 2.0.12", "tokio", "uniffi", + "uuid", + "y-octo", ] [[package]] diff --git a/packages/backend/native/Cargo.toml b/packages/backend/native/Cargo.toml index b0eb4d03f2..9ba4202fe9 100644 --- a/packages/backend/native/Cargo.toml +++ b/packages/backend/native/Cargo.toml @@ -7,7 +7,7 @@ version = "1.0.0" crate-type = ["cdylib"] [dependencies] -affine_common = { workspace = true, features = ["doc-loader"] } +affine_common = { workspace = true, features = ["doc-loader", "hashcash"] } chrono = { workspace = true } file-format = { workspace = true } infer = { workspace = true } diff --git a/packages/common/native/Cargo.toml b/packages/common/native/Cargo.toml index e83fdaa36d..507f9def77 100644 --- a/packages/common/native/Cargo.toml +++ b/packages/common/native/Cargo.toml @@ -4,7 +4,7 @@ name = "affine_common" version = "0.1.0" [features] -default = ["hashcash"] +default = [] doc-loader = [ "docx-parser", "infer", @@ -35,7 +35,7 @@ tree-sitter = [ "dep:tree-sitter-scala", "dep:tree-sitter-typescript", ] -ydoc-loader = ["assert-json-diff", "y-octo"] +ydoc-loader = ["assert-json-diff", "serde", "serde_json", "thiserror", "y-octo"] [dependencies] chrono = { workspace = true } diff --git a/packages/common/native/src/doc_parser.rs b/packages/common/native/src/doc_parser.rs index e81c23bacf..35ff68a397 100644 --- a/packages/common/native/src/doc_parser.rs +++ b/packages/common/native/src/doc_parser.rs @@ -17,14 +17,6 @@ const BOOKMARK_FLAVOURS: [&str; 5] = [ "affine:embed-loom", ]; -#[derive(Debug, Clone)] -pub struct CrawlDocInput { - pub doc_bin: Vec, - pub root_doc_bin: Option>, - pub space_id: String, - pub doc_id: String, -} - #[derive(Debug, Clone, Serialize, Deserialize)] pub struct BlockInfo { pub block_id: String, @@ -87,15 +79,8 @@ impl From for ParseError { } } -pub fn parse_doc_from_binary(input: CrawlDocInput) -> Result { - let CrawlDocInput { - doc_bin, - root_doc_bin: _, - space_id: _, - doc_id, - } = input; - - if doc_bin.is_empty() { +pub fn parse_doc_from_binary(doc_bin: Vec, doc_id: String) -> Result { + if doc_bin.is_empty() || doc_bin == [0, 0] { return Err(ParseError::InvalidBinary); } @@ -509,14 +494,10 @@ mod tests { #[test] fn test_parse_doc_from_binary() { let json = include_bytes!("../fixtures/demo.ydoc.json"); - let input = CrawlDocInput { - doc_bin: include_bytes!("../fixtures/demo.ydoc").to_vec(), - root_doc_bin: None, - space_id: "o9WCLGyxkLxdULZ-f2B9V".to_string(), - doc_id: "dYpV7PPhk8amRkY5IAcVO".to_string(), - }; + let doc_bin = include_bytes!("../fixtures/demo.ydoc").to_vec(); + let doc_id = "dYpV7PPhk8amRkY5IAcVO".to_string(); - let result = parse_doc_from_binary(input).unwrap(); + let result = parse_doc_from_binary(doc_bin, doc_id).unwrap(); let config = assert_json_diff::Config::new(assert_json_diff::CompareMode::Strict) .numeric_mode(assert_json_diff::NumericMode::AssumeFloat); assert_json_diff::assert_json_matches!( diff --git a/packages/common/nbstore/src/impls/cloud/indexer.ts b/packages/common/nbstore/src/impls/cloud/indexer.ts index 5ff20427af..fee60180fa 100644 --- a/packages/common/nbstore/src/impls/cloud/indexer.ts +++ b/packages/common/nbstore/src/impls/cloud/indexer.ts @@ -139,4 +139,8 @@ export class CloudIndexerStorage extends IndexerStorageBase { override refresh(_table: T): Promise { return Promise.resolve(); } + + override async refreshIfNeed(): Promise { + return Promise.resolve(); + } } diff --git a/packages/common/nbstore/src/impls/idb/indexer/index.ts b/packages/common/nbstore/src/impls/idb/indexer/index.ts index 6b31666a68..3da48a5709 100644 --- a/packages/common/nbstore/src/impls/idb/indexer/index.ts +++ b/packages/common/nbstore/src/impls/idb/indexer/index.ts @@ -176,6 +176,21 @@ export class IndexedDBIndexerStorage extends IndexerStorageBase { this.emitTableUpdated(table); } + override async refreshIfNeed(): Promise { + const needRefreshTable = Object.entries(this.pendingUpdates) + .filter( + ([, updates]) => + updates.deleteByQueries.length > 0 || + updates.deletes.length > 0 || + updates.inserts.length > 0 || + updates.updates.length > 0 + ) + .map(([table]) => table as keyof IndexerSchema); + for (const table of needRefreshTable) { + await this.refresh(table); + } + } + private watchTableUpdated(table: keyof IndexerSchema) { return new Observable(subscriber => { const listener = (ev: MessageEvent) => { diff --git a/packages/common/nbstore/src/impls/sqlite/db.ts b/packages/common/nbstore/src/impls/sqlite/db.ts index ed4f838ff8..f8d501ab50 100644 --- a/packages/common/nbstore/src/impls/sqlite/db.ts +++ b/packages/common/nbstore/src/impls/sqlite/db.ts @@ -1,6 +1,7 @@ import { AutoReconnectConnection } from '../../connection'; import type { BlobRecord, + CrawlResult, DocClock, DocRecord, ListedBlobRecord, @@ -81,6 +82,7 @@ export interface NativeDBApis { peer: string, blobId: string ) => Promise; + crawlDocData: (id: string, docId: string) => Promise; } type NativeDBApisWrapper = NativeDBApis extends infer APIs diff --git a/packages/common/nbstore/src/impls/sqlite/doc.ts b/packages/common/nbstore/src/impls/sqlite/doc.ts index aa2e954e5c..cc2d1ea90a 100644 --- a/packages/common/nbstore/src/impls/sqlite/doc.ts +++ b/packages/common/nbstore/src/impls/sqlite/doc.ts @@ -1,5 +1,7 @@ import { share } from '../../connection'; import { + type BlockInfo, + type CrawlResult, type DocClocks, type DocRecord, DocStorageBase, @@ -79,4 +81,127 @@ export class SqliteDocStorage extends DocStorageBase { updates.map(update => update.timestamp) ); } + + override async crawlDocData(docId: string): Promise { + const result = await this.db.crawlDocData(docId); + return normalizeNativeCrawlResult(result); + } +} + +function normalizeNativeCrawlResult(result: unknown): CrawlResult | null { + if (!isRecord(result)) { + console.warn('[nbstore] crawlDocData returned non-object result'); + return null; + } + + if ( + typeof result.title !== 'string' || + typeof result.summary !== 'string' || + !Array.isArray(result.blocks) + ) { + console.warn('[nbstore] crawlDocData result missing basic fields'); + return null; + } + + const { title, summary } = result as { title: string; summary: string }; + const rawBlocks = result.blocks as unknown[]; + + const blocks: BlockInfo[] = []; + for (const block of rawBlocks) { + const normalized = normalizeBlock(block); + if (normalized) { + blocks.push(normalized); + } + } + + if (blocks.length === 0) { + console.warn('[nbstore] crawlDocData has no valid blocks'); + return null; + } + + return { + blocks, + title, + summary, + }; +} + +function normalizeBlock(block: unknown): BlockInfo | null { + if (!isRecord(block)) { + return null; + } + + const blockId = readStringField(block, 'blockId'); + const flavour = readStringField(block, 'flavour'); + + if (!blockId || !flavour) { + return null; + } + + return { + blockId, + flavour, + content: readStringArrayField(block, 'content'), + blob: readStringArrayField(block, 'blob'), + refDocId: readStringArrayField(block, 'refDocId'), + refInfo: readStringArrayField(block, 'refInfo'), + parentFlavour: readStringField(block, 'parentFlavour'), + parentBlockId: readStringField(block, 'parentBlockId'), + additional: safeAdditionalField(block), + }; +} + +function readStringField( + target: Record, + key: string +): string | undefined { + const value = readField(target, key); + return typeof value === 'string' && value ? value : undefined; +} + +function readStringArrayField( + target: Record, + key: string +): string[] | undefined { + const value = readField(target, key); + if (Array.isArray(value)) { + const filtered = value.filter( + (item): item is string => typeof item === 'string' && item.length > 0 + ); + return filtered.length ? filtered : undefined; + } + if (typeof value === 'string' && value.length > 0) { + return [value]; + } + return undefined; +} + +function safeAdditionalField( + target: Record +): string | undefined { + const value = readField(target, 'additional'); + if (typeof value !== 'string' || value.length === 0) { + return undefined; + } + try { + const parsed = JSON.parse(value); + return JSON.stringify(parsed); + } catch { + console.warn( + '[nbstore] ignore invalid additional payload in crawlDocData block' + ); + return undefined; + } +} + +function readField(target: Record, key: string) { + return target[key] ?? target[toSnakeCase(key)]; +} + +function toSnakeCase(key: string) { + return key.replace(/[A-Z]/g, letter => `_${letter.toLowerCase()}`); +} + +function isRecord(value: unknown): value is Record { + return typeof value === 'object' && value !== null; } diff --git a/packages/common/nbstore/src/storage/doc.ts b/packages/common/nbstore/src/storage/doc.ts index b0ecec16a7..9c0d37f483 100644 --- a/packages/common/nbstore/src/storage/doc.ts +++ b/packages/common/nbstore/src/storage/doc.ts @@ -7,6 +7,24 @@ import type { Locker } from './lock'; import { SingletonLocker } from './lock'; import { type Storage } from './storage'; +export interface BlockInfo { + blockId: string; + flavour: string; + content?: string[]; + blob?: string[]; + refDocId?: string[]; + refInfo?: string[]; + parentFlavour?: string; + parentBlockId?: string; + additional?: string; +} + +export interface CrawlResult { + blocks: BlockInfo[]; + title: string; + summary: string; +} + export interface DocClock { docId: string; timestamp: Date; @@ -94,6 +112,8 @@ export interface DocStorage extends Storage { subscribeDocUpdate( callback: (update: DocRecord, origin?: string) => void ): () => void; + + crawlDocData?(docId: string): Promise; } export abstract class DocStorageBase implements DocStorage { @@ -174,6 +194,10 @@ export abstract class DocStorageBase implements DocStorage { }; } + async crawlDocData(_docId: string): Promise { + return null; + } + // REGION: api for internal usage protected on( event: 'update', diff --git a/packages/common/nbstore/src/storage/dummy/indexer.ts b/packages/common/nbstore/src/storage/dummy/indexer.ts index 958151c62a..5a22515b13 100644 --- a/packages/common/nbstore/src/storage/dummy/indexer.ts +++ b/packages/common/nbstore/src/storage/dummy/indexer.ts @@ -85,4 +85,7 @@ export class DummyIndexerStorage extends IndexerStorageBase { override refresh(_table: T): Promise { return Promise.resolve(); } + override async refreshIfNeed(): Promise { + return Promise.resolve(); + } } diff --git a/packages/common/nbstore/src/storage/indexer.ts b/packages/common/nbstore/src/storage/indexer.ts index 6b9de86db4..edfc6c1ed0 100644 --- a/packages/common/nbstore/src/storage/indexer.ts +++ b/packages/common/nbstore/src/storage/indexer.ts @@ -62,6 +62,7 @@ export interface IndexerStorage extends Storage { ): Promise; refresh(table: T): Promise; + refreshIfNeed(): Promise; } type ResultPagination = { @@ -173,4 +174,6 @@ export abstract class IndexerStorageBase implements IndexerStorage { ): Promise; abstract refresh(table: T): Promise; + + abstract refreshIfNeed(): Promise; } diff --git a/packages/common/nbstore/src/sync/indexer/index.ts b/packages/common/nbstore/src/sync/indexer/index.ts index 4d686766c5..9dd6c52022 100644 --- a/packages/common/nbstore/src/sync/indexer/index.ts +++ b/packages/common/nbstore/src/sync/indexer/index.ts @@ -117,6 +117,8 @@ export class IndexerSyncImpl implements IndexerSync { private readonly indexer: IndexerStorage; private readonly remote?: IndexerStorage; + private lastRefreshed = Date.now(); + state$ = this.status.state$.pipe( // throttle the state to 1 second to avoid spamming the UI throttleTime(1000, undefined, { @@ -378,8 +380,7 @@ export class IndexerSyncImpl implements IndexerSync { this.status.statusUpdatedSubject$.next(docId); } } - await this.indexer.refresh('block'); - await this.indexer.refresh('doc'); + await this.refreshIfNeed(); // #endregion } else { // #region crawl doc @@ -407,33 +408,40 @@ export class IndexerSyncImpl implements IndexerSync { continue; } - const docBin = await this.doc.getDoc(docId); - if (!docBin) { - // doc is deleted, just skip - continue; - } console.log('[indexer] start indexing doc', docId); - const docYDoc = new YDoc({ guid: docId }); - applyUpdate(docYDoc, docBin.bin); let blocks: IndexerDocument<'block'>[] = []; let preview: string | undefined; - try { - const result = await crawlingDocData({ - ydoc: docYDoc, - rootYDoc: this.status.rootDoc, - spaceId: this.status.rootDocId, - docId, - }); - if (!result) { - // doc is empty without root block, just skip + const nativeResult = await this.tryNativeCrawlDocData(docId); + if (nativeResult) { + blocks = nativeResult.block; + preview = nativeResult.summary; + } else { + const docBin = await this.doc.getDoc(docId); + if (!docBin) { + // doc is deleted, just skip continue; } - blocks = result.blocks; - preview = result.preview; - } catch (error) { - console.error('error crawling doc', error); + const docYDoc = new YDoc({ guid: docId }); + applyUpdate(docYDoc, docBin.bin); + + try { + const result = await crawlingDocData({ + ydoc: docYDoc, + rootYDoc: this.status.rootDoc, + spaceId: this.status.rootDocId, + docId, + }); + if (!result) { + // doc is empty without root block, just skip + continue; + } + blocks = result.blocks; + preview = result.preview; + } catch (error) { + console.error('error crawling doc', error); + } } await this.indexer.deleteByQuery('block', { @@ -446,8 +454,6 @@ export class IndexerSyncImpl implements IndexerSync { await this.indexer.insert('block', block); } - await this.indexer.refresh('block'); - if (preview) { await this.indexer.update( 'doc', @@ -455,9 +461,10 @@ export class IndexerSyncImpl implements IndexerSync { summary: preview, }) ); - await this.indexer.refresh('doc'); } + await this.refreshIfNeed(); + await this.indexerSync.setDocIndexedClock({ docId, timestamp: docClock.timestamp, @@ -471,10 +478,19 @@ export class IndexerSyncImpl implements IndexerSync { this.status.completeJob(); } } finally { + await this.refreshIfNeed(); unsubscribe(); } } + private async refreshIfNeed(): Promise { + if (this.lastRefreshed + 100 < Date.now()) { + console.log('[indexer] refreshing indexer'); + await this.indexer.refreshIfNeed(); + this.lastRefreshed = Date.now(); + } + } + /** * Get all docs from the root doc, without deleted docs */ @@ -484,6 +500,36 @@ export class IndexerSyncImpl implements IndexerSync { }); } + private async tryNativeCrawlDocData(docId: string) { + try { + const result = await this.doc.crawlDocData?.(docId); + if (result) { + return { + title: result.title, + block: result.blocks.map(block => + IndexerDocument.from<'block'>(`${docId}:${block.blockId}`, { + docId, + blockId: block.blockId, + content: block.content, + flavour: block.flavour, + blob: block.blob, + refDocId: block.refDocId, + ref: block.refInfo, + parentFlavour: block.parentFlavour, + parentBlockId: block.parentBlockId, + additional: block.additional, + }) + ), + summary: result.summary, + }; + } + return null; + } catch (error) { + console.warn('[indexer] native crawlDocData failed', docId, error); + return null; + } + } + private async getAllDocsFromIndexer() { const docs = await this.indexer.search( 'doc', diff --git a/packages/frontend/apps/android/App/app/src/main/java/app/affine/pro/plugin/NbStorePlugin.kt b/packages/frontend/apps/android/App/app/src/main/java/app/affine/pro/plugin/NbStorePlugin.kt index 84d1a8fc41..be0a5107ab 100644 --- a/packages/frontend/apps/android/App/app/src/main/java/app/affine/pro/plugin/NbStorePlugin.kt +++ b/packages/frontend/apps/android/App/app/src/main/java/app/affine/pro/plugin/NbStorePlugin.kt @@ -15,571 +15,607 @@ import uniffi.affine_mobile_native.newDocStoragePool @CapacitorPlugin(name = "NbStoreDocStorage") class NbStorePlugin : Plugin() { - private val docStoragePool by lazy { - newDocStoragePool() - } + private val docStoragePool by lazy { + newDocStoragePool() + } - @PluginMethod - fun connect(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val spaceId = call.getStringEnsure("spaceId") - val spaceType = call.getStringEnsure("spaceType") - val peer = call.getStringEnsure("peer") - val appStoragePath = activity?.filesDir ?: run { - Timber.w("Failed to connect storage, cannot access device file system.") - call.reject("Failed to connect storage, cannot access device file system.") - return@launch - } - val peerDir = appStoragePath.resolve("workspaces") - .resolve(spaceType) - .resolve( - peer.replace(Regex("[/!@#$%^&*()+~`\"':;,?<>|]"), "_") - .replace(Regex("_+"), "_") - .replace(Regex("_+$"), "") - ) - Timber.i("NbStore connecting... peerDir[$peerDir].") - peerDir.mkdirs() - val db = peerDir.resolve("$spaceId.db") - docStoragePool.connect(id, db.path) - Timber.i("NbStore connected [ id = $id ].") - call.resolve() - } catch (e: Exception) { - Timber.e(e, "Failed to connect NbStore.") - call.reject("Failed to connect NbStore.", e) - } + @PluginMethod + fun connect(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val spaceId = call.getStringEnsure("spaceId") + val spaceType = call.getStringEnsure("spaceType") + val peer = call.getStringEnsure("peer") + val appStoragePath = activity?.filesDir ?: run { + Timber.w("Failed to connect storage, cannot access device file system.") + call.reject("Failed to connect storage, cannot access device file system.") + return@launch } + val peerDir = appStoragePath.resolve("workspaces") + .resolve(spaceType) + .resolve( + peer.replace(Regex("[/!@#$%^&*()+~`\"':;,?<>|]"), "_") + .replace(Regex("_+"), "_") + .replace(Regex("_+$"), "") + ) + Timber.i("NbStore connecting... peerDir[$peerDir].") + peerDir.mkdirs() + val db = peerDir.resolve("$spaceId.db") + docStoragePool.connect(id, db.path) + Timber.i("NbStore connected [ id = $id ].") + call.resolve() + } catch (e: Exception) { + Timber.e(e, "Failed to connect NbStore.") + call.reject("Failed to connect NbStore.", e) + } } + } - @PluginMethod - fun disconnect(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - docStoragePool.disconnect(universalId = id) - Timber.i("NbStore disconnected [ id = $id ].") - call.resolve() - } catch (e: Exception) { - Timber.e(e, "Failed to disconnect NbStore") - call.reject("Failed to disconnect NbStore", null, e) - } - } + @PluginMethod + fun disconnect(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + docStoragePool.disconnect(universalId = id) + Timber.i("NbStore disconnected [ id = $id ].") + call.resolve() + } catch (e: Exception) { + Timber.e(e, "Failed to disconnect NbStore") + call.reject("Failed to disconnect NbStore", null, e) + } } + } - @PluginMethod - fun setSpaceId(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val spaceId = call.getStringEnsure("spaceId") - docStoragePool.setSpaceId(universalId = id, spaceId = spaceId) - Timber.i("Set space id: [ id = $id, spaceId = $spaceId ].") - call.resolve() - } catch (e: Exception) { - Timber.e(e, "Failed to set space id.") - call.reject("Failed to set space id, ${e.message}", null, e) - } - } + @PluginMethod + fun setSpaceId(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val spaceId = call.getStringEnsure("spaceId") + docStoragePool.setSpaceId(universalId = id, spaceId = spaceId) + Timber.i("Set space id: [ id = $id, spaceId = $spaceId ].") + call.resolve() + } catch (e: Exception) { + Timber.e(e, "Failed to set space id.") + call.reject("Failed to set space id, ${e.message}", null, e) + } } + } - @PluginMethod - fun pushUpdate(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val docId = call.getStringEnsure("docId") - val data = call.getStringEnsure("data") - val timestamp = docStoragePool.pushUpdate( - universalId = id, - docId = docId, - update = data - ) - call.resolve(JSObject().put("timestamp", timestamp)) - } catch (e: Exception) { - call.reject("Failed to push update, ${e.message}", null, e) - } - } + @PluginMethod + fun pushUpdate(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val docId = call.getStringEnsure("docId") + val data = call.getStringEnsure("data") + val timestamp = docStoragePool.pushUpdate( + universalId = id, + docId = docId, + update = data + ) + call.resolve(JSObject().put("timestamp", timestamp)) + } catch (e: Exception) { + call.reject("Failed to push update, ${e.message}", null, e) + } } + } - @PluginMethod - fun getDocSnapshot(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val docId = call.getStringEnsure("docId") - val record = docStoragePool.getDocSnapshot(universalId = id, docId = docId) - record?.let { - call.resolve( - JSObject() - .put("docId", it.docId) - .put("bin", it.bin) - .put("timestamp", it.timestamp) - ) - } ?: call.resolve() - } catch (e: Exception) { - call.reject("Failed to get doc snapshot, ${e.message}", null, e) - } - } + @PluginMethod + fun getDocSnapshot(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val docId = call.getStringEnsure("docId") + val record = docStoragePool.getDocSnapshot(universalId = id, docId = docId) + record?.let { + call.resolve( + JSObject() + .put("docId", it.docId) + .put("bin", it.bin) + .put("timestamp", it.timestamp) + ) + } ?: call.resolve() + } catch (e: Exception) { + call.reject("Failed to get doc snapshot, ${e.message}", null, e) + } } + } - @PluginMethod - fun setDocSnapshot(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val docId = call.getStringEnsure("docId") - val bin = call.getStringEnsure("bin") - val timestamp = call.getLongEnsure("timestamp") - val success = docStoragePool.setDocSnapshot( - universalId = id, - snapshot = DocRecord(docId, bin, timestamp) - ) - call.resolve(JSObject().put("success", success)) - } catch (e: Exception) { - call.reject("Failed to set doc snapshot, ${e.message}", null, e) - } - } + @PluginMethod + fun setDocSnapshot(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val docId = call.getStringEnsure("docId") + val bin = call.getStringEnsure("bin") + val timestamp = call.getLongEnsure("timestamp") + val success = docStoragePool.setDocSnapshot( + universalId = id, + snapshot = DocRecord(docId, bin, timestamp) + ) + call.resolve(JSObject().put("success", success)) + } catch (e: Exception) { + call.reject("Failed to set doc snapshot, ${e.message}", null, e) + } } + } - @PluginMethod - fun getDocUpdates(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val docId = call.getStringEnsure("docId") - val updates = docStoragePool.getDocUpdates(universalId = id, docId = docId) - val mapped = JSArray(updates.map { - JSObject() - .put("docId", it.docId) - .put("timestamp", it.timestamp) - .put("bin", it.bin) - }) - call.resolve(JSObject().put("updates", mapped)) - } catch (e: Exception) { - call.reject("Failed to get doc updates, ${e.message}", null, e) - } - } + @PluginMethod + fun getDocUpdates(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val docId = call.getStringEnsure("docId") + val updates = docStoragePool.getDocUpdates(universalId = id, docId = docId) + val mapped = JSArray(updates.map { + JSObject() + .put("docId", it.docId) + .put("timestamp", it.timestamp) + .put("bin", it.bin) + }) + call.resolve(JSObject().put("updates", mapped)) + } catch (e: Exception) { + call.reject("Failed to get doc updates, ${e.message}", null, e) + } } + } - @PluginMethod - fun markUpdatesMerged(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val docId = call.getStringEnsure("docId") - val times = call.getListEnsure("timestamps") - val count = docStoragePool.markUpdatesMerged( - universalId = id, - docId = docId, - updates = times - ) - call.resolve(JSObject().put("count", count)) - } catch (e: Exception) { - call.reject("Failed to mark updates merged, ${e.message}", null, e) - } - } + @PluginMethod + fun markUpdatesMerged(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val docId = call.getStringEnsure("docId") + val times = call.getListEnsure("timestamps") + val count = docStoragePool.markUpdatesMerged( + universalId = id, + docId = docId, + updates = times + ) + call.resolve(JSObject().put("count", count)) + } catch (e: Exception) { + call.reject("Failed to mark updates merged, ${e.message}", null, e) + } } + } - @PluginMethod - fun deleteDoc(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val docId = call.getStringEnsure("docId") - docStoragePool.deleteDoc(universalId = id, docId = docId) - call.resolve() - } catch (e: Exception) { - call.reject("Failed to delete doc: ${e.message}", null, e) - } - } + @PluginMethod + fun deleteDoc(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val docId = call.getStringEnsure("docId") + docStoragePool.deleteDoc(universalId = id, docId = docId) + call.resolve() + } catch (e: Exception) { + call.reject("Failed to delete doc: ${e.message}", null, e) + } } + } - @PluginMethod - fun getDocClocks(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val after = call.getLong("after") - val docClocks = docStoragePool.getDocClocks( - universalId = id, - after = after, - ) - val mapped = JSArray(docClocks.map { - JSObject() - .put("docId", it.docId) - .put("timestamp", it.timestamp) - }) - call.resolve(JSObject().put("clocks", mapped)) - } catch (e: Exception) { - call.reject("Failed to get doc clocks: ${e.message}", null, e) - } - } + @PluginMethod + fun getDocClocks(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val after = call.getLong("after") + val docClocks = docStoragePool.getDocClocks( + universalId = id, + after = after, + ) + val mapped = JSArray(docClocks.map { + JSObject() + .put("docId", it.docId) + .put("timestamp", it.timestamp) + }) + call.resolve(JSObject().put("clocks", mapped)) + } catch (e: Exception) { + call.reject("Failed to get doc clocks: ${e.message}", null, e) + } } + } - @PluginMethod - fun getDocClock(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val docId = call.getStringEnsure("docId") - val docClock = docStoragePool.getDocClock(universalId = id, docId = docId) - docClock?.let { - call.resolve( - JSObject() - .put("docId", it.docId) - .put("timestamp", it.timestamp) - ) - } ?: call.resolve() - } catch (e: Exception) { - call.reject("Failed to get doc clock: ${e.message}", null, e) - } - } + @PluginMethod + fun getDocClock(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val docId = call.getStringEnsure("docId") + val docClock = docStoragePool.getDocClock(universalId = id, docId = docId) + docClock?.let { + call.resolve( + JSObject() + .put("docId", it.docId) + .put("timestamp", it.timestamp) + ) + } ?: call.resolve() + } catch (e: Exception) { + call.reject("Failed to get doc clock: ${e.message}", null, e) + } } + } - @PluginMethod - fun getBlob(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val key = call.getStringEnsure("key") - val blob = docStoragePool.getBlob(universalId = id, key = key) - blob?.let { - call.resolve( - JSObject() - .put("key", it.key) - .put("data", it.data) - .put("mime", it.mime) - .put("size", it.size) - .put("createdAt", it.createdAt) - ) - } ?: call.resolve() - } catch (e: Exception) { - call.reject("Failed to get blob: ${e.message}", null, e) - } - } + @PluginMethod + fun getBlob(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val key = call.getStringEnsure("key") + val blob = docStoragePool.getBlob(universalId = id, key = key) + blob?.let { + call.resolve( + JSObject() + .put("key", it.key) + .put("data", it.data) + .put("mime", it.mime) + .put("size", it.size) + .put("createdAt", it.createdAt) + ) + } ?: call.resolve() + } catch (e: Exception) { + call.reject("Failed to get blob: ${e.message}", null, e) + } } + } - @PluginMethod - fun setBlob(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val key = call.getStringEnsure("key") - val data = call.getStringEnsure("data") - val mime = call.getStringEnsure("mime") - docStoragePool.setBlob(universalId = id, blob = SetBlob(key, data, mime)) - call.resolve() - } catch (e: Exception) { - call.reject("Failed to set blob: ${e.message}", null, e) - } - } + @PluginMethod + fun setBlob(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val key = call.getStringEnsure("key") + val data = call.getStringEnsure("data") + val mime = call.getStringEnsure("mime") + docStoragePool.setBlob(universalId = id, blob = SetBlob(key, data, mime)) + call.resolve() + } catch (e: Exception) { + call.reject("Failed to set blob: ${e.message}", null, e) + } } + } - @PluginMethod - fun deleteBlob(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val key = call.getStringEnsure("key") - val permanently = call.getBoolean("permanently") ?: false - docStoragePool.deleteBlob(universalId = id, key = key, permanently = permanently) - call.resolve() - } catch (e: Exception) { - call.reject("Failed to delete blob: ${e.message}", null, e) - } - } + @PluginMethod + fun deleteBlob(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val key = call.getStringEnsure("key") + val permanently = call.getBoolean("permanently") ?: false + docStoragePool.deleteBlob(universalId = id, key = key, permanently = permanently) + call.resolve() + } catch (e: Exception) { + call.reject("Failed to delete blob: ${e.message}", null, e) + } } + } - @PluginMethod - fun releaseBlobs(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - docStoragePool.releaseBlobs(universalId = id) - call.resolve() - } catch (e: Exception) { - call.reject("Failed to release blobs: ${e.message}", null, e) - } - } + @PluginMethod + fun releaseBlobs(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + docStoragePool.releaseBlobs(universalId = id) + call.resolve() + } catch (e: Exception) { + call.reject("Failed to release blobs: ${e.message}", null, e) + } } + } - @PluginMethod - fun listBlobs(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val blobs = docStoragePool.listBlobs(universalId = id) - val mapped = JSArray(blobs.map { - JSObject() - .put("key", it.key) - .put("size", it.size) - .put("mime", it.mime) - .put("createdAt", it.createdAt) - }) - call.resolve(JSObject().put("blobs", mapped)) - } catch (e: Exception) { - call.reject("Failed to list blobs: ${e.message}", null, e) - } - } + @PluginMethod + fun listBlobs(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val blobs = docStoragePool.listBlobs(universalId = id) + val mapped = JSArray(blobs.map { + JSObject() + .put("key", it.key) + .put("size", it.size) + .put("mime", it.mime) + .put("createdAt", it.createdAt) + }) + call.resolve(JSObject().put("blobs", mapped)) + } catch (e: Exception) { + call.reject("Failed to list blobs: ${e.message}", null, e) + } } + } - @PluginMethod - fun getPeerRemoteClocks(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val peer = call.getStringEnsure("peer") - val clocks = docStoragePool.getPeerRemoteClocks( - universalId = id, - peer = peer, - ) - val mapped = JSArray(clocks.map { - JSObject() - .put("docId", it.docId) - .put("timestamp", it.timestamp) - }) - call.resolve(JSObject().put("clocks", mapped)) - } catch (e: Exception) { - call.reject("Failed to get peer remote clocks: ${e.message}", null, e) - } - } + @PluginMethod + fun getPeerRemoteClocks(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val peer = call.getStringEnsure("peer") + val clocks = docStoragePool.getPeerRemoteClocks( + universalId = id, + peer = peer, + ) + val mapped = JSArray(clocks.map { + JSObject() + .put("docId", it.docId) + .put("timestamp", it.timestamp) + }) + call.resolve(JSObject().put("clocks", mapped)) + } catch (e: Exception) { + call.reject("Failed to get peer remote clocks: ${e.message}", null, e) + } } + } - @PluginMethod - fun getPeerRemoteClock(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val peer = call.getStringEnsure("peer") - val docId = call.getStringEnsure("docId") - val clock = docStoragePool.getPeerRemoteClock( - universalId = id, - peer = peer, - docId = docId, - ) - clock?.let { - call.resolve( - JSObject() - .put("docId", it.docId) - .put("timestamp", it.timestamp) - ) - } ?: call.resolve() - } catch (e: Exception) { - call.reject("Failed to get peer remote clock: ${e.message}", null, e) - } - } + @PluginMethod + fun getPeerRemoteClock(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val peer = call.getStringEnsure("peer") + val docId = call.getStringEnsure("docId") + val clock = docStoragePool.getPeerRemoteClock( + universalId = id, + peer = peer, + docId = docId, + ) + clock?.let { + call.resolve( + JSObject() + .put("docId", it.docId) + .put("timestamp", it.timestamp) + ) + } ?: call.resolve() + } catch (e: Exception) { + call.reject("Failed to get peer remote clock: ${e.message}", null, e) + } } + } - @PluginMethod - fun setPeerRemoteClock(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val peer = call.getStringEnsure("peer") - val docId = call.getStringEnsure("docId") - val timestamp = call.getLongEnsure("timestamp") - docStoragePool.setPeerRemoteClock( - universalId = id, - peer = peer, - docId = docId, - clock = timestamp, - ) - call.resolve() - } catch (e: Exception) { - call.reject("Failed to set peer remote clock: ${e.message}", null, e) - } - } + @PluginMethod + fun setPeerRemoteClock(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val peer = call.getStringEnsure("peer") + val docId = call.getStringEnsure("docId") + val timestamp = call.getLongEnsure("timestamp") + docStoragePool.setPeerRemoteClock( + universalId = id, + peer = peer, + docId = docId, + clock = timestamp, + ) + call.resolve() + } catch (e: Exception) { + call.reject("Failed to set peer remote clock: ${e.message}", null, e) + } } + } - @PluginMethod - fun getPeerPulledRemoteClocks(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val peer = call.getStringEnsure("peer") - val clocks = docStoragePool.getPeerPulledRemoteClocks( - universalId = id, - peer = peer, - ) - val mapped = JSArray(clocks.map { - JSObject() - .put("docId", it.docId) - .put("timestamp", it.timestamp) - }) - call.resolve(JSObject().put("clocks", mapped)) - } catch (e: Exception) { - call.reject("Failed to get peer pulled remote clocks: ${e.message}", null, e) - } - } + @PluginMethod + fun getPeerPulledRemoteClocks(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val peer = call.getStringEnsure("peer") + val clocks = docStoragePool.getPeerPulledRemoteClocks( + universalId = id, + peer = peer, + ) + val mapped = JSArray(clocks.map { + JSObject() + .put("docId", it.docId) + .put("timestamp", it.timestamp) + }) + call.resolve(JSObject().put("clocks", mapped)) + } catch (e: Exception) { + call.reject("Failed to get peer pulled remote clocks: ${e.message}", null, e) + } } + } - @PluginMethod - fun getPeerPulledRemoteClock(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val peer = call.getStringEnsure("peer") - val docId = call.getStringEnsure("docId") - val clock = docStoragePool.getPeerPulledRemoteClock( - universalId = id, - peer = peer, - docId = docId, - ) - clock?.let { - call.resolve( - JSObject() - .put("docId", it.docId) - .put("timestamp", it.timestamp) - ) - } ?: call.resolve() - } catch (e: Exception) { - call.reject("Failed to get peer pulled remote clock: ${e.message}", null, e) - } - } + @PluginMethod + fun getPeerPulledRemoteClock(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val peer = call.getStringEnsure("peer") + val docId = call.getStringEnsure("docId") + val clock = docStoragePool.getPeerPulledRemoteClock( + universalId = id, + peer = peer, + docId = docId, + ) + clock?.let { + call.resolve( + JSObject() + .put("docId", it.docId) + .put("timestamp", it.timestamp) + ) + } ?: call.resolve() + } catch (e: Exception) { + call.reject("Failed to get peer pulled remote clock: ${e.message}", null, e) + } } + } - @PluginMethod - fun setPeerPulledRemoteClock(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val peer = call.getStringEnsure("peer") - val docId = call.getStringEnsure("docId") - val timestamp = call.getLongEnsure("timestamp") - docStoragePool.setPeerPulledRemoteClock( - universalId = id, - peer = peer, - docId = docId, - clock = timestamp, - ) - call.resolve() - } catch (e: Exception) { - call.reject("Failed to set peer pulled remote clock: ${e.message}", null, e) - } - } + @PluginMethod + fun setPeerPulledRemoteClock(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val peer = call.getStringEnsure("peer") + val docId = call.getStringEnsure("docId") + val timestamp = call.getLongEnsure("timestamp") + docStoragePool.setPeerPulledRemoteClock( + universalId = id, + peer = peer, + docId = docId, + clock = timestamp, + ) + call.resolve() + } catch (e: Exception) { + call.reject("Failed to set peer pulled remote clock: ${e.message}", null, e) + } } + } - @PluginMethod - fun getPeerPushedClocks(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val peer = call.getStringEnsure("peer") - val clocks = docStoragePool.getPeerPushedClocks( - universalId = id, - peer = peer, - ) - val mapped = JSArray(clocks.map { - JSObject() - .put("docId", it.docId) - .put("timestamp", it.timestamp) - }) - call.resolve(JSObject().put("clocks", mapped)) - } catch (e: Exception) { - call.reject("Failed to get peer pushed clocks: ${e.message}", null, e) - } - } + @PluginMethod + fun getPeerPushedClocks(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val peer = call.getStringEnsure("peer") + val clocks = docStoragePool.getPeerPushedClocks( + universalId = id, + peer = peer, + ) + val mapped = JSArray(clocks.map { + JSObject() + .put("docId", it.docId) + .put("timestamp", it.timestamp) + }) + call.resolve(JSObject().put("clocks", mapped)) + } catch (e: Exception) { + call.reject("Failed to get peer pushed clocks: ${e.message}", null, e) + } } + } - @PluginMethod - fun getPeerPushedClock(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val peer = call.getStringEnsure("peer") - val docId = call.getStringEnsure("docId") - val clock = docStoragePool.getPeerPushedClock( - universalId = id, - peer = peer, - docId = docId, - ) - clock?.let { - call.resolve( - JSObject() - .put("docId", it.docId) - .put("timestamp", it.timestamp) - ) - } ?: call.resolve() - } catch (e: Exception) { - call.reject("Failed to get peer pushed clock: ${e.message}", null, e) - } - } + @PluginMethod + fun getPeerPushedClock(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val peer = call.getStringEnsure("peer") + val docId = call.getStringEnsure("docId") + val clock = docStoragePool.getPeerPushedClock( + universalId = id, + peer = peer, + docId = docId, + ) + clock?.let { + call.resolve( + JSObject() + .put("docId", it.docId) + .put("timestamp", it.timestamp) + ) + } ?: call.resolve() + } catch (e: Exception) { + call.reject("Failed to get peer pushed clock: ${e.message}", null, e) + } } + } - @PluginMethod - fun setPeerPushedClock(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val peer = call.getStringEnsure("peer") - val docId = call.getStringEnsure("docId") - val timestamp = call.getLongEnsure("timestamp") - docStoragePool.setPeerPushedClock( - universalId = id, - peer = peer, - docId = docId, - clock = timestamp, - ) - call.resolve() - } catch (e: Exception) { - call.reject("Failed to set peer pushed clock: ${e.message}", null, e) - } - } + @PluginMethod + fun setPeerPushedClock(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val peer = call.getStringEnsure("peer") + val docId = call.getStringEnsure("docId") + val timestamp = call.getLongEnsure("timestamp") + docStoragePool.setPeerPushedClock( + universalId = id, + peer = peer, + docId = docId, + clock = timestamp, + ) + call.resolve() + } catch (e: Exception) { + call.reject("Failed to set peer pushed clock: ${e.message}", null, e) + } } + } - @PluginMethod - fun getBlobUploadedAt(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val peer = call.getStringEnsure("peer") - val blobId = call.getStringEnsure("blobId") - val uploadedAt = docStoragePool.getBlobUploadedAt( - universalId = id, - peer = peer, - blobId = blobId, - ) - uploadedAt?.let { - call.resolve(JSObject().put("uploadedAt", it)) - } ?: call.resolve() - } catch (e: Exception) { - call.reject("Failed to get blob uploaded: ${e.message}", null, e) - } - } + @PluginMethod + fun getBlobUploadedAt(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val peer = call.getStringEnsure("peer") + val blobId = call.getStringEnsure("blobId") + val uploadedAt = docStoragePool.getBlobUploadedAt( + universalId = id, + peer = peer, + blobId = blobId, + ) + uploadedAt?.let { + call.resolve(JSObject().put("uploadedAt", it)) + } ?: call.resolve() + } catch (e: Exception) { + call.reject("Failed to get blob uploaded: ${e.message}", null, e) + } } + } - @PluginMethod - fun setBlobUploadedAt(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - val peer = call.getStringEnsure("peer") - val blobId = call.getStringEnsure("blobId") - val uploadedAt = call.getLong("uploadedAt") - docStoragePool.setBlobUploadedAt( - universalId = id, - peer = peer, - blobId = blobId, - uploadedAt = uploadedAt, - ) - call.resolve() - } catch (e: Exception) { - call.reject("Failed to set blob uploaded: ${e.message}", null, e) - } - } + @PluginMethod + fun setBlobUploadedAt(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val peer = call.getStringEnsure("peer") + val blobId = call.getStringEnsure("blobId") + val uploadedAt = call.getLong("uploadedAt") + docStoragePool.setBlobUploadedAt( + universalId = id, + peer = peer, + blobId = blobId, + uploadedAt = uploadedAt, + ) + call.resolve() + } catch (e: Exception) { + call.reject("Failed to set blob uploaded: ${e.message}", null, e) + } } + } - @PluginMethod - fun clearClocks(call: PluginCall) { - launch(Dispatchers.IO) { - try { - val id = call.getStringEnsure("id") - docStoragePool.clearClocks(universalId = id) - call.resolve() - } catch (e: Exception) { - call.reject("Failed to clear clocks: ${e.message}", null, e) - } - } + @PluginMethod + fun clearClocks(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + docStoragePool.clearClocks(universalId = id) + call.resolve() + } catch (e: Exception) { + call.reject("Failed to clear clocks: ${e.message}", null, e) + } } + } + + @PluginMethod + fun crawlDocData(call: PluginCall) { + launch(Dispatchers.IO) { + try { + val id = call.getStringEnsure("id") + val docId = call.getStringEnsure("docId") + val result = docStoragePool.crawlDocData( + universalId = id, + docId = docId, + ) + val blocks = JSArray( + result.blocks.map { block -> + JSObject() + .put("blockId", block.blockId) + .put("flavour", block.flavour) + .put("content", block.content) + .put("blob", block.blob) + .put("refDocId", block.refDocId) + .put("refInfo", block.refInfo) + .put("parentFlavour", block.parentFlavour) + .put("parentBlockId", block.parentBlockId) + .put("additional", block.additional) + } + ) + call.resolve( + JSObject() + .put("title", result.title) + .put("blocks", blocks) + .put("summary", result.summary) + ) + } catch (e: Exception) { + call.reject("Failed to crawl doc data: ${e.message}", null, e) + } + } + } } diff --git a/packages/frontend/apps/android/App/app/src/main/java/uniffi/affine_mobile_native/affine_mobile_native.kt b/packages/frontend/apps/android/App/app/src/main/java/uniffi/affine_mobile_native/affine_mobile_native.kt index 8d1ff7fb91..7e7d67a9f0 100644 --- a/packages/frontend/apps/android/App/app/src/main/java/uniffi/affine_mobile_native/affine_mobile_native.kt +++ b/packages/frontend/apps/android/App/app/src/main/java/uniffi/affine_mobile_native/affine_mobile_native.kt @@ -778,6 +778,8 @@ internal interface UniffiForeignFutureCompleteVoid : com.sun.jna.Callback { + + @@ -804,6 +806,8 @@ fun uniffi_affine_mobile_native_checksum_method_docstoragepool_clear_clocks( ): Short fun uniffi_affine_mobile_native_checksum_method_docstoragepool_connect( ): Short +fun uniffi_affine_mobile_native_checksum_method_docstoragepool_crawl_doc_data( +): Short fun uniffi_affine_mobile_native_checksum_method_docstoragepool_delete_blob( ): Short fun uniffi_affine_mobile_native_checksum_method_docstoragepool_delete_doc( @@ -913,6 +917,8 @@ fun uniffi_affine_mobile_native_fn_method_docstoragepool_clear_clocks(`ptr`: Poi ): Long fun uniffi_affine_mobile_native_fn_method_docstoragepool_connect(`ptr`: Pointer,`universalId`: RustBuffer.ByValue,`path`: RustBuffer.ByValue, ): Long +fun uniffi_affine_mobile_native_fn_method_docstoragepool_crawl_doc_data(`ptr`: Pointer,`universalId`: RustBuffer.ByValue,`docId`: RustBuffer.ByValue, +): Long fun uniffi_affine_mobile_native_fn_method_docstoragepool_delete_blob(`ptr`: Pointer,`universalId`: RustBuffer.ByValue,`key`: RustBuffer.ByValue,`permanently`: Byte, ): Long fun uniffi_affine_mobile_native_fn_method_docstoragepool_delete_doc(`ptr`: Pointer,`universalId`: RustBuffer.ByValue,`docId`: RustBuffer.ByValue, @@ -1107,6 +1113,9 @@ private fun uniffiCheckApiChecksums(lib: IntegrityCheckingUniffiLib) { if (lib.uniffi_affine_mobile_native_checksum_method_docstoragepool_connect() != 19047.toShort()) { throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project") } + if (lib.uniffi_affine_mobile_native_checksum_method_docstoragepool_crawl_doc_data() != 36347.toShort()) { + throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project") + } if (lib.uniffi_affine_mobile_native_checksum_method_docstoragepool_delete_blob() != 53695.toShort()) { throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project") } @@ -1602,6 +1611,8 @@ public interface DocStoragePoolInterface { */ suspend fun `connect`(`universalId`: kotlin.String, `path`: kotlin.String) + suspend fun `crawlDocData`(`universalId`: kotlin.String, `docId`: kotlin.String): CrawlResult + suspend fun `deleteBlob`(`universalId`: kotlin.String, `key`: kotlin.String, `permanently`: kotlin.Boolean) suspend fun `deleteDoc`(`universalId`: kotlin.String, `docId`: kotlin.String) @@ -1787,6 +1798,27 @@ open class DocStoragePool: Disposable, AutoCloseable, DocStoragePoolInterface } + @Throws(UniffiException::class) + @Suppress("ASSIGNED_BUT_NEVER_ACCESSED_VARIABLE") + override suspend fun `crawlDocData`(`universalId`: kotlin.String, `docId`: kotlin.String) : CrawlResult { + return uniffiRustCallAsync( + callWithPointer { thisPtr -> + UniffiLib.INSTANCE.uniffi_affine_mobile_native_fn_method_docstoragepool_crawl_doc_data( + thisPtr, + FfiConverterString.lower(`universalId`),FfiConverterString.lower(`docId`), + ) + }, + { future, callback, continuation -> UniffiLib.INSTANCE.ffi_affine_mobile_native_rust_future_poll_rust_buffer(future, callback, continuation) }, + { future, continuation -> UniffiLib.INSTANCE.ffi_affine_mobile_native_rust_future_complete_rust_buffer(future, continuation) }, + { future -> UniffiLib.INSTANCE.ffi_affine_mobile_native_rust_future_free_rust_buffer(future) }, + // lift function + { FfiConverterTypeCrawlResult.lift(it) }, + // Error FFI converter + UniffiException.ErrorHandler, + ) + } + + @Throws(UniffiException::class) @Suppress("ASSIGNED_BUT_NEVER_ACCESSED_VARIABLE") override suspend fun `deleteBlob`(`universalId`: kotlin.String, `key`: kotlin.String, `permanently`: kotlin.Boolean) { @@ -2424,6 +2456,102 @@ public object FfiConverterTypeBlob: FfiConverterRustBuffer { +data class BlockInfo ( + var `blockId`: kotlin.String, + var `flavour`: kotlin.String, + var `content`: List?, + var `blob`: List?, + var `refDocId`: List?, + var `refInfo`: List?, + var `parentFlavour`: kotlin.String?, + var `parentBlockId`: kotlin.String?, + var `additional`: kotlin.String? +) { + + companion object +} + +/** + * @suppress + */ +public object FfiConverterTypeBlockInfo: FfiConverterRustBuffer { + override fun read(buf: ByteBuffer): BlockInfo { + return BlockInfo( + FfiConverterString.read(buf), + FfiConverterString.read(buf), + FfiConverterOptionalSequenceString.read(buf), + FfiConverterOptionalSequenceString.read(buf), + FfiConverterOptionalSequenceString.read(buf), + FfiConverterOptionalSequenceString.read(buf), + FfiConverterOptionalString.read(buf), + FfiConverterOptionalString.read(buf), + FfiConverterOptionalString.read(buf), + ) + } + + override fun allocationSize(value: BlockInfo) = ( + FfiConverterString.allocationSize(value.`blockId`) + + FfiConverterString.allocationSize(value.`flavour`) + + FfiConverterOptionalSequenceString.allocationSize(value.`content`) + + FfiConverterOptionalSequenceString.allocationSize(value.`blob`) + + FfiConverterOptionalSequenceString.allocationSize(value.`refDocId`) + + FfiConverterOptionalSequenceString.allocationSize(value.`refInfo`) + + FfiConverterOptionalString.allocationSize(value.`parentFlavour`) + + FfiConverterOptionalString.allocationSize(value.`parentBlockId`) + + FfiConverterOptionalString.allocationSize(value.`additional`) + ) + + override fun write(value: BlockInfo, buf: ByteBuffer) { + FfiConverterString.write(value.`blockId`, buf) + FfiConverterString.write(value.`flavour`, buf) + FfiConverterOptionalSequenceString.write(value.`content`, buf) + FfiConverterOptionalSequenceString.write(value.`blob`, buf) + FfiConverterOptionalSequenceString.write(value.`refDocId`, buf) + FfiConverterOptionalSequenceString.write(value.`refInfo`, buf) + FfiConverterOptionalString.write(value.`parentFlavour`, buf) + FfiConverterOptionalString.write(value.`parentBlockId`, buf) + FfiConverterOptionalString.write(value.`additional`, buf) + } +} + + + +data class CrawlResult ( + var `blocks`: List, + var `title`: kotlin.String, + var `summary`: kotlin.String +) { + + companion object +} + +/** + * @suppress + */ +public object FfiConverterTypeCrawlResult: FfiConverterRustBuffer { + override fun read(buf: ByteBuffer): CrawlResult { + return CrawlResult( + FfiConverterSequenceTypeBlockInfo.read(buf), + FfiConverterString.read(buf), + FfiConverterString.read(buf), + ) + } + + override fun allocationSize(value: CrawlResult) = ( + FfiConverterSequenceTypeBlockInfo.allocationSize(value.`blocks`) + + FfiConverterString.allocationSize(value.`title`) + + FfiConverterString.allocationSize(value.`summary`) + ) + + override fun write(value: CrawlResult, buf: ByteBuffer) { + FfiConverterSequenceTypeBlockInfo.write(value.`blocks`, buf) + FfiConverterString.write(value.`title`, buf) + FfiConverterString.write(value.`summary`, buf) + } +} + + + data class DocClock ( var `docId`: kotlin.String, var `timestamp`: kotlin.Long @@ -2732,6 +2860,38 @@ public object FfiConverterOptionalLong: FfiConverterRustBuffer { +/** + * @suppress + */ +public object FfiConverterOptionalString: FfiConverterRustBuffer { + override fun read(buf: ByteBuffer): kotlin.String? { + if (buf.get().toInt() == 0) { + return null + } + return FfiConverterString.read(buf) + } + + override fun allocationSize(value: kotlin.String?): ULong { + if (value == null) { + return 1UL + } else { + return 1UL + FfiConverterString.allocationSize(value) + } + } + + override fun write(value: kotlin.String?, buf: ByteBuffer) { + if (value == null) { + buf.put(0) + } else { + buf.put(1) + FfiConverterString.write(value, buf) + } + } +} + + + + /** * @suppress */ @@ -2828,6 +2988,38 @@ public object FfiConverterOptionalTypeDocRecord: FfiConverterRustBuffer?> { + override fun read(buf: ByteBuffer): List? { + if (buf.get().toInt() == 0) { + return null + } + return FfiConverterSequenceString.read(buf) + } + + override fun allocationSize(value: List?): ULong { + if (value == null) { + return 1UL + } else { + return 1UL + FfiConverterSequenceString.allocationSize(value) + } + } + + override fun write(value: List?, buf: ByteBuffer) { + if (value == null) { + buf.put(0) + } else { + buf.put(1) + FfiConverterSequenceString.write(value, buf) + } + } +} + + + + /** * @suppress */ @@ -2856,6 +3048,62 @@ public object FfiConverterSequenceLong: FfiConverterRustBuffer +/** + * @suppress + */ +public object FfiConverterSequenceString: FfiConverterRustBuffer> { + override fun read(buf: ByteBuffer): List { + val len = buf.getInt() + return List(len) { + FfiConverterString.read(buf) + } + } + + override fun allocationSize(value: List): ULong { + val sizeForLength = 4UL + val sizeForItems = value.map { FfiConverterString.allocationSize(it) }.sum() + return sizeForLength + sizeForItems + } + + override fun write(value: List, buf: ByteBuffer) { + buf.putInt(value.size) + value.iterator().forEach { + FfiConverterString.write(it, buf) + } + } +} + + + + +/** + * @suppress + */ +public object FfiConverterSequenceTypeBlockInfo: FfiConverterRustBuffer> { + override fun read(buf: ByteBuffer): List { + val len = buf.getInt() + return List(len) { + FfiConverterTypeBlockInfo.read(buf) + } + } + + override fun allocationSize(value: List): ULong { + val sizeForLength = 4UL + val sizeForItems = value.map { FfiConverterTypeBlockInfo.allocationSize(it) }.sum() + return sizeForLength + sizeForItems + } + + override fun write(value: List, buf: ByteBuffer) { + buf.putInt(value.size) + value.iterator().forEach { + FfiConverterTypeBlockInfo.write(it, buf) + } + } +} + + + + /** * @suppress */ diff --git a/packages/frontend/apps/android/src/plugins/nbstore/definitions.ts b/packages/frontend/apps/android/src/plugins/nbstore/definitions.ts index a90f271fe0..bc2c1892e8 100644 --- a/packages/frontend/apps/android/src/plugins/nbstore/definitions.ts +++ b/packages/frontend/apps/android/src/plugins/nbstore/definitions.ts @@ -1,3 +1,5 @@ +import type { CrawlResult } from '@affine/nbstore'; + export interface Blob { key: string; // base64 encoded data @@ -149,4 +151,8 @@ export interface NbStorePlugin { uploadedAt: number | null; }) => Promise; clearClocks: (options: { id: string }) => Promise; + crawlDocData: (options: { + id: string; + docId: string; + }) => Promise; } diff --git a/packages/frontend/apps/android/src/plugins/nbstore/index.ts b/packages/frontend/apps/android/src/plugins/nbstore/index.ts index 926253785c..3390afb40d 100644 --- a/packages/frontend/apps/android/src/plugins/nbstore/index.ts +++ b/packages/frontend/apps/android/src/plugins/nbstore/index.ts @@ -336,4 +336,7 @@ export const NbStoreNativeDBApis: NativeDBApis = { uploadedAt: uploadedAt ? uploadedAt.getTime() : null, }); }, + crawlDocData: async function (id: string, docId: string) { + return NbStore.crawlDocData({ id, docId }); + }, }; diff --git a/packages/frontend/apps/electron/src/helper/nbstore/handlers.ts b/packages/frontend/apps/electron/src/helper/nbstore/handlers.ts index 21040120eb..5a9ed91296 100644 --- a/packages/frontend/apps/electron/src/helper/nbstore/handlers.ts +++ b/packages/frontend/apps/electron/src/helper/nbstore/handlers.ts @@ -47,4 +47,5 @@ export const nbstoreHandlers: NativeDBApis = { clearClocks: POOL.clearClocks.bind(POOL), setBlobUploadedAt: POOL.setBlobUploadedAt.bind(POOL), getBlobUploadedAt: POOL.getBlobUploadedAt.bind(POOL), + crawlDocData: POOL.crawlDocData.bind(POOL), }; diff --git a/packages/frontend/apps/ios/src/plugins/nbstore/definitions.ts b/packages/frontend/apps/ios/src/plugins/nbstore/definitions.ts index a90f271fe0..bc2c1892e8 100644 --- a/packages/frontend/apps/ios/src/plugins/nbstore/definitions.ts +++ b/packages/frontend/apps/ios/src/plugins/nbstore/definitions.ts @@ -1,3 +1,5 @@ +import type { CrawlResult } from '@affine/nbstore'; + export interface Blob { key: string; // base64 encoded data @@ -149,4 +151,8 @@ export interface NbStorePlugin { uploadedAt: number | null; }) => Promise; clearClocks: (options: { id: string }) => Promise; + crawlDocData: (options: { + id: string; + docId: string; + }) => Promise; } diff --git a/packages/frontend/apps/ios/src/plugins/nbstore/index.ts b/packages/frontend/apps/ios/src/plugins/nbstore/index.ts index 4491240967..abc7461dce 100644 --- a/packages/frontend/apps/ios/src/plugins/nbstore/index.ts +++ b/packages/frontend/apps/ios/src/plugins/nbstore/index.ts @@ -4,6 +4,7 @@ import { } from '@affine/core/modules/workspace-engine'; import { type BlobRecord, + type CrawlResult, type DocClock, type DocRecord, type ListedBlobRecord, @@ -336,4 +337,10 @@ export const NbStoreNativeDBApis: NativeDBApis = { uploadedAt: uploadedAt ? uploadedAt.getTime() : null, }); }, + crawlDocData: async function ( + id: string, + docId: string + ): Promise { + return await NbStore.crawlDocData({ id, docId }); + }, }; diff --git a/packages/frontend/mobile-native/Cargo.toml b/packages/frontend/mobile-native/Cargo.toml index 7562bd8752..2ca44b6bed 100644 --- a/packages/frontend/mobile-native/Cargo.toml +++ b/packages/frontend/mobile-native/Cargo.toml @@ -15,7 +15,7 @@ path = "uniffi-bindgen.rs" use-as-lib = ["affine_nbstore/use-as-lib"] [dependencies] -affine_common = { workspace = true } +affine_common = { workspace = true, features = ["hashcash"] } affine_nbstore = { workspace = true } anyhow = { workspace = true } base64-simd = { workspace = true } diff --git a/packages/frontend/mobile-native/src/lib.rs b/packages/frontend/mobile-native/src/lib.rs index 1e098a52a2..7abf0fe833 100644 --- a/packages/frontend/mobile-native/src/lib.rs +++ b/packages/frontend/mobile-native/src/lib.rs @@ -183,6 +183,52 @@ impl From for ListedBlob { } } +#[derive(uniffi::Record)] +pub struct BlockInfo { + pub block_id: String, + pub flavour: String, + pub content: Option>, + pub blob: Option>, + pub ref_doc_id: Option>, + pub ref_info: Option>, + pub parent_flavour: Option, + pub parent_block_id: Option, + pub additional: Option, +} + +impl From for BlockInfo { + fn from(value: affine_nbstore::indexer::NativeBlockInfo) -> Self { + Self { + block_id: value.block_id, + flavour: value.flavour, + content: value.content, + blob: value.blob, + ref_doc_id: value.ref_doc_id, + ref_info: value.ref_info, + parent_flavour: value.parent_flavour, + parent_block_id: value.parent_block_id, + additional: value.additional, + } + } +} + +#[derive(uniffi::Record)] +pub struct CrawlResult { + pub blocks: Vec, + pub title: String, + pub summary: String, +} + +impl From for CrawlResult { + fn from(value: affine_nbstore::indexer::NativeCrawlResult) -> Self { + Self { + blocks: value.blocks.into_iter().map(Into::into).collect(), + title: value.title, + summary: value.summary, + } + } +} + #[derive(uniffi::Object)] pub struct DocStoragePool { inner: SqliteDocStoragePool, @@ -643,4 +689,14 @@ impl DocStoragePool { .map(|t| t.and_utc().timestamp_millis()), ) } + + pub async fn crawl_doc_data(&self, universal_id: String, doc_id: String) -> Result { + let result = self + .inner + .get(universal_id.clone()) + .await? + .crawl_doc_data(&doc_id) + .await?; + Ok(result.into()) + } } diff --git a/packages/frontend/native/Cargo.toml b/packages/frontend/native/Cargo.toml index 978fa2ae11..ed3eeb08eb 100644 --- a/packages/frontend/native/Cargo.toml +++ b/packages/frontend/native/Cargo.toml @@ -7,7 +7,7 @@ version = "0.0.0" crate-type = ["cdylib", "rlib"] [dependencies] -affine_common = { workspace = true } +affine_common = { workspace = true, features = ["hashcash"] } affine_media_capture = { path = "./media_capture" } affine_nbstore = { path = "./nbstore" } affine_sqlite_v1 = { path = "./sqlite_v1" } @@ -22,8 +22,14 @@ sqlx = { workspace = true, default-features = false, features = [ "sqlite", "tls-rustls", ] } +thiserror = { workspace = true } tokio = { workspace = true, features = ["full"] } +[dev-dependencies] +chrono = { workspace = true } +serde_json = { workspace = true } +uuid = { workspace = true } + [build-dependencies] napi-build = { workspace = true } sqlx = { workspace = true, default-features = false, features = [ diff --git a/packages/frontend/native/index.d.ts b/packages/frontend/native/index.d.ts index 7d36a2780b..36f2f775c8 100644 --- a/packages/frontend/native/index.d.ts +++ b/packages/frontend/native/index.d.ts @@ -55,6 +55,7 @@ export declare class DocStoragePool { connect(universalId: string, path: string): Promise disconnect(universalId: string): Promise checkpoint(universalId: string): Promise + crawlDocData(universalId: string, docId: string): Promise setSpaceId(universalId: string, spaceId: string): Promise pushUpdate(universalId: string, docId: string, update: Uint8Array): Promise getDocSnapshot(universalId: string, docId: string): Promise @@ -115,6 +116,24 @@ export interface ListedBlob { createdAt: Date } +export interface NativeBlockInfo { + blockId: string + flavour: string + content?: Array + blob?: Array + refDocId?: Array + refInfo?: Array + parentFlavour?: string + parentBlockId?: string + additional?: string +} + +export interface NativeCrawlResult { + blocks: Array + title: string + summary: string +} + export interface SetBlob { key: string data: Uint8Array diff --git a/packages/frontend/native/nbstore/Cargo.toml b/packages/frontend/native/nbstore/Cargo.toml index 1402c9585a..7b74d970db 100644 --- a/packages/frontend/native/nbstore/Cargo.toml +++ b/packages/frontend/native/nbstore/Cargo.toml @@ -10,11 +10,13 @@ crate-type = ["cdylib", "rlib"] use-as-lib = ["napi-derive/noop", "napi/noop"] [dependencies] +affine_common = { workspace = true, features = ["ydoc-loader"] } affine_schema = { path = "../schema" } anyhow = { workspace = true } chrono = { workspace = true } napi = { workspace = true } napi-derive = { workspace = true } +serde = { workspace = true, features = ["derive"] } sqlx = { workspace = true, default-features = false, features = [ "chrono", "macros", @@ -25,6 +27,7 @@ sqlx = { workspace = true, default-features = false, features = [ ] } thiserror = { workspace = true } tokio = { workspace = true, features = ["full"] } +y-octo = { workspace = true } [target.'cfg(any(target_os = "ios", target_os = "android"))'.dependencies] uniffi = { workspace = true } @@ -42,3 +45,8 @@ sqlx = { workspace = true, default-features = false, features = [ "tls-rustls", ] } tokio = { workspace = true, features = ["full"] } + + +[dev-dependencies] +serde_json = { workspace = true } +uuid = { workspace = true, features = ["v4"] } diff --git a/packages/frontend/native/nbstore/src/doc.rs b/packages/frontend/native/nbstore/src/doc.rs index 326e81fc45..a13dd0d447 100644 --- a/packages/frontend/native/nbstore/src/doc.rs +++ b/packages/frontend/native/nbstore/src/doc.rs @@ -103,7 +103,7 @@ impl SqliteDocStorage { sqlx::query(r#"INSERT INTO updates (doc_id, data, created_at) VALUES ($1, $2, $3);"#) .bind(doc_id) - .bind(update.as_ref()) + .bind(update) .bind(timestamp) .execute(&mut *tx) .await?; @@ -358,7 +358,7 @@ mod tests { assert_eq!(result.len(), 4); assert_eq!( - result.iter().map(|u| u.bin.as_ref()).collect::>(), + result.iter().map(|u| u.bin.to_vec()).collect::>(), updates ); } @@ -382,7 +382,7 @@ mod tests { let result = storage.get_doc_snapshot("test".to_string()).await.unwrap(); assert!(result.is_some()); - assert_eq!(result.unwrap().bin.as_ref(), vec![0, 0]); + assert_eq!(result.unwrap().bin.to_vec(), vec![0, 0]); } #[tokio::test] @@ -400,7 +400,7 @@ mod tests { let result = storage.get_doc_snapshot("test".to_string()).await.unwrap(); assert!(result.is_some()); - assert_eq!(result.unwrap().bin.as_ref(), vec![0, 0]); + assert_eq!(result.unwrap().bin.to_vec(), vec![0, 0]); let snapshot = DocRecord { doc_id: "test".to_string(), @@ -416,7 +416,7 @@ mod tests { let result = storage.get_doc_snapshot("test".to_string()).await.unwrap(); assert!(result.is_some()); - assert_eq!(result.unwrap().bin.as_ref(), vec![0, 0]); + assert_eq!(result.unwrap().bin.to_vec(), vec![0, 0]); } #[tokio::test] diff --git a/packages/frontend/native/nbstore/src/error.rs b/packages/frontend/native/nbstore/src/error.rs index 2e20a99da7..608368c911 100644 --- a/packages/frontend/native/nbstore/src/error.rs +++ b/packages/frontend/native/nbstore/src/error.rs @@ -1,3 +1,5 @@ +use affine_common::doc_parser::ParseError; + pub type Result = std::result::Result; #[derive(Debug, thiserror::Error)] @@ -8,4 +10,6 @@ pub enum Error { MigrateError(#[from] sqlx::migrate::MigrateError), #[error("Invalid operation")] InvalidOperation, + #[error(transparent)] + Parse(#[from] ParseError), } diff --git a/packages/frontend/native/nbstore/src/indexer.rs b/packages/frontend/native/nbstore/src/indexer.rs new file mode 100644 index 0000000000..149c8e305f --- /dev/null +++ b/packages/frontend/native/nbstore/src/indexer.rs @@ -0,0 +1,180 @@ +use affine_common::doc_parser::{parse_doc_from_binary, BlockInfo, CrawlResult, ParseError}; +use napi_derive::napi; +use serde::Serialize; +use y_octo::DocOptions; + +use super::{error::Result, storage::SqliteDocStorage}; + +#[napi(object)] +#[derive(Debug, Serialize)] +pub struct NativeBlockInfo { + pub block_id: String, + pub flavour: String, + pub content: Option>, + pub blob: Option>, + pub ref_doc_id: Option>, + pub ref_info: Option>, + pub parent_flavour: Option, + pub parent_block_id: Option, + pub additional: Option, +} + +#[napi(object)] +#[derive(Debug, Serialize)] +pub struct NativeCrawlResult { + pub blocks: Vec, + pub title: String, + pub summary: String, +} + +impl From for NativeBlockInfo { + fn from(value: BlockInfo) -> Self { + Self { + block_id: value.block_id, + flavour: value.flavour, + content: value.content, + blob: value.blob, + ref_doc_id: value.ref_doc_id, + ref_info: value.ref_info, + parent_flavour: value.parent_flavour, + parent_block_id: value.parent_block_id, + additional: value.additional, + } + } +} + +impl From for NativeCrawlResult { + fn from(value: CrawlResult) -> Self { + Self { + blocks: value.blocks.into_iter().map(Into::into).collect(), + title: value.title, + summary: value.summary, + } + } +} + +impl SqliteDocStorage { + pub async fn crawl_doc_data(&self, doc_id: &str) -> Result { + let doc_bin = self + .load_doc_binary(doc_id) + .await? + .ok_or(ParseError::DocNotFound)?; + + let result = parse_doc_from_binary(doc_bin, doc_id.to_string())?; + Ok(result.into()) + } + + async fn load_doc_binary(&self, doc_id: &str) -> Result>> { + let snapshot = self.get_doc_snapshot(doc_id.to_string()).await?; + let mut updates = self.get_doc_updates(doc_id.to_string()).await?; + + if snapshot.is_none() && updates.is_empty() { + return Ok(None); + } + + updates.sort_by(|a, b| a.timestamp.cmp(&b.timestamp)); + + let mut segments = + Vec::with_capacity(snapshot.as_ref().map(|_| 1).unwrap_or(0) + updates.len()); + if let Some(record) = snapshot { + segments.push(record.bin.to_vec()); + } + segments.extend(updates.into_iter().map(|update| update.bin.to_vec())); + + merge_updates(segments, doc_id).map(Some) + } +} + +fn merge_updates(mut segments: Vec>, guid: &str) -> Result> { + if segments.is_empty() { + return Err(ParseError::DocNotFound.into()); + } + + if segments.len() == 1 { + return segments.pop().ok_or(ParseError::DocNotFound.into()); + } + + let mut doc = DocOptions::new().with_guid(guid.to_string()).build(); + for update in segments.iter() { + doc + .apply_update_from_binary_v1(update) + .map_err(|_| ParseError::InvalidBinary)?; + } + + let buffer = doc + .encode_update_v1() + .map_err(|err| ParseError::ParserError(err.to_string()))?; + + Ok(buffer) +} + +#[cfg(test)] +mod tests { + use std::path::{Path, PathBuf}; + + use affine_common::doc_parser::ParseError; + use chrono::Utc; + use serde_json::Value; + use tokio::fs; + use uuid::Uuid; + + use super::{super::error::Error, *}; + + const DEMO_BIN: &[u8] = include_bytes!("../../../../common/native/fixtures/demo.ydoc"); + const DEMO_JSON: &[u8] = include_bytes!("../../../../common/native/fixtures/demo.ydoc.json"); + + fn temp_workspace_dir() -> PathBuf { + std::env::temp_dir().join(format!("affine-native-{}", Uuid::new_v4())) + } + + async fn init_db(path: &Path) -> SqliteDocStorage { + fs::create_dir_all(path.parent().unwrap()).await.unwrap(); + let storage = SqliteDocStorage::new(path.to_string_lossy().into_owned()); + storage.connect().await.unwrap(); + storage + } + + async fn cleanup(path: &Path) { + let _ = fs::remove_dir_all(path.parent().unwrap()).await; + } + + #[tokio::test] + async fn parse_demo_snapshot_matches_fixture() { + let base = temp_workspace_dir(); + fs::create_dir_all(&base).await.unwrap(); + let db_path = base.join("storage.db"); + + let storage = init_db(&db_path).await; + sqlx::query(r#"INSERT INTO snapshots (doc_id, data, updated_at) VALUES (?, ?, ?)"#) + .bind("demo-doc") + .bind(DEMO_BIN) + .bind(Utc::now().naive_utc()) + .execute(&storage.pool) + .await + .unwrap(); + + let result = storage.crawl_doc_data("demo-doc").await.unwrap(); + + let expected: Value = serde_json::from_slice(DEMO_JSON).unwrap(); + let actual = serde_json::to_value(&result).unwrap(); + assert_eq!(expected, actual); + + storage.close().await; + cleanup(&db_path).await; + } + + #[tokio::test] + async fn missing_doc_returns_error() { + let base = temp_workspace_dir(); + fs::create_dir_all(&base).await.unwrap(); + let db_path = base.join("storage.db"); + + let storage = init_db(&db_path).await; + + let err = storage.crawl_doc_data("absent-doc").await.unwrap_err(); + assert!(matches!(err, Error::Parse(ParseError::DocNotFound))); + + storage.close().await; + cleanup(&db_path).await; + } +} diff --git a/packages/frontend/native/nbstore/src/lib.rs b/packages/frontend/native/nbstore/src/lib.rs index d76c66b5e3..01336d930c 100644 --- a/packages/frontend/native/nbstore/src/lib.rs +++ b/packages/frontend/native/nbstore/src/lib.rs @@ -3,6 +3,7 @@ pub mod blob_sync; pub mod doc; pub mod doc_sync; pub mod error; +pub mod indexer; pub mod pool; pub mod storage; @@ -117,6 +118,20 @@ impl DocStoragePool { Ok(()) } + #[napi] + pub async fn crawl_doc_data( + &self, + universal_id: String, + doc_id: String, + ) -> Result { + let result = self + .get(universal_id) + .await? + .crawl_doc_data(&doc_id) + .await?; + Ok(result) + } + #[napi] pub async fn set_space_id(&self, universal_id: String, space_id: String) -> Result<()> { self.get(universal_id).await?.set_space_id(space_id).await?;