feat(native): native reader for indexer (#14055)

This commit is contained in:
DarkSky
2025-12-07 16:22:11 +08:00
committed by GitHub
parent 69cdeedc4e
commit cf4e37c584
28 changed files with 1376 additions and 569 deletions

View File

@@ -778,6 +778,8 @@ internal interface UniffiForeignFutureCompleteVoid : com.sun.jna.Callback {
@@ -804,6 +806,8 @@ fun uniffi_affine_mobile_native_checksum_method_docstoragepool_clear_clocks(
): Short
fun uniffi_affine_mobile_native_checksum_method_docstoragepool_connect(
): Short
fun uniffi_affine_mobile_native_checksum_method_docstoragepool_crawl_doc_data(
): Short
fun uniffi_affine_mobile_native_checksum_method_docstoragepool_delete_blob(
): Short
fun uniffi_affine_mobile_native_checksum_method_docstoragepool_delete_doc(
@@ -913,6 +917,8 @@ fun uniffi_affine_mobile_native_fn_method_docstoragepool_clear_clocks(`ptr`: Poi
): Long
fun uniffi_affine_mobile_native_fn_method_docstoragepool_connect(`ptr`: Pointer,`universalId`: RustBuffer.ByValue,`path`: RustBuffer.ByValue,
): Long
fun uniffi_affine_mobile_native_fn_method_docstoragepool_crawl_doc_data(`ptr`: Pointer,`universalId`: RustBuffer.ByValue,`docId`: RustBuffer.ByValue,
): Long
fun uniffi_affine_mobile_native_fn_method_docstoragepool_delete_blob(`ptr`: Pointer,`universalId`: RustBuffer.ByValue,`key`: RustBuffer.ByValue,`permanently`: Byte,
): Long
fun uniffi_affine_mobile_native_fn_method_docstoragepool_delete_doc(`ptr`: Pointer,`universalId`: RustBuffer.ByValue,`docId`: RustBuffer.ByValue,
@@ -1107,6 +1113,9 @@ private fun uniffiCheckApiChecksums(lib: IntegrityCheckingUniffiLib) {
if (lib.uniffi_affine_mobile_native_checksum_method_docstoragepool_connect() != 19047.toShort()) {
throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project")
}
if (lib.uniffi_affine_mobile_native_checksum_method_docstoragepool_crawl_doc_data() != 36347.toShort()) {
throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project")
}
if (lib.uniffi_affine_mobile_native_checksum_method_docstoragepool_delete_blob() != 53695.toShort()) {
throw RuntimeException("UniFFI API checksum mismatch: try cleaning and rebuilding your project")
}
@@ -1602,6 +1611,8 @@ public interface DocStoragePoolInterface {
*/
suspend fun `connect`(`universalId`: kotlin.String, `path`: kotlin.String)
suspend fun `crawlDocData`(`universalId`: kotlin.String, `docId`: kotlin.String): CrawlResult
suspend fun `deleteBlob`(`universalId`: kotlin.String, `key`: kotlin.String, `permanently`: kotlin.Boolean)
suspend fun `deleteDoc`(`universalId`: kotlin.String, `docId`: kotlin.String)
@@ -1787,6 +1798,27 @@ open class DocStoragePool: Disposable, AutoCloseable, DocStoragePoolInterface
}
@Throws(UniffiException::class)
@Suppress("ASSIGNED_BUT_NEVER_ACCESSED_VARIABLE")
override suspend fun `crawlDocData`(`universalId`: kotlin.String, `docId`: kotlin.String) : CrawlResult {
return uniffiRustCallAsync(
callWithPointer { thisPtr ->
UniffiLib.INSTANCE.uniffi_affine_mobile_native_fn_method_docstoragepool_crawl_doc_data(
thisPtr,
FfiConverterString.lower(`universalId`),FfiConverterString.lower(`docId`),
)
},
{ future, callback, continuation -> UniffiLib.INSTANCE.ffi_affine_mobile_native_rust_future_poll_rust_buffer(future, callback, continuation) },
{ future, continuation -> UniffiLib.INSTANCE.ffi_affine_mobile_native_rust_future_complete_rust_buffer(future, continuation) },
{ future -> UniffiLib.INSTANCE.ffi_affine_mobile_native_rust_future_free_rust_buffer(future) },
// lift function
{ FfiConverterTypeCrawlResult.lift(it) },
// Error FFI converter
UniffiException.ErrorHandler,
)
}
@Throws(UniffiException::class)
@Suppress("ASSIGNED_BUT_NEVER_ACCESSED_VARIABLE")
override suspend fun `deleteBlob`(`universalId`: kotlin.String, `key`: kotlin.String, `permanently`: kotlin.Boolean) {
@@ -2424,6 +2456,102 @@ public object FfiConverterTypeBlob: FfiConverterRustBuffer<Blob> {
data class BlockInfo (
var `blockId`: kotlin.String,
var `flavour`: kotlin.String,
var `content`: List<kotlin.String>?,
var `blob`: List<kotlin.String>?,
var `refDocId`: List<kotlin.String>?,
var `refInfo`: List<kotlin.String>?,
var `parentFlavour`: kotlin.String?,
var `parentBlockId`: kotlin.String?,
var `additional`: kotlin.String?
) {
companion object
}
/**
* @suppress
*/
public object FfiConverterTypeBlockInfo: FfiConverterRustBuffer<BlockInfo> {
override fun read(buf: ByteBuffer): BlockInfo {
return BlockInfo(
FfiConverterString.read(buf),
FfiConverterString.read(buf),
FfiConverterOptionalSequenceString.read(buf),
FfiConverterOptionalSequenceString.read(buf),
FfiConverterOptionalSequenceString.read(buf),
FfiConverterOptionalSequenceString.read(buf),
FfiConverterOptionalString.read(buf),
FfiConverterOptionalString.read(buf),
FfiConverterOptionalString.read(buf),
)
}
override fun allocationSize(value: BlockInfo) = (
FfiConverterString.allocationSize(value.`blockId`) +
FfiConverterString.allocationSize(value.`flavour`) +
FfiConverterOptionalSequenceString.allocationSize(value.`content`) +
FfiConverterOptionalSequenceString.allocationSize(value.`blob`) +
FfiConverterOptionalSequenceString.allocationSize(value.`refDocId`) +
FfiConverterOptionalSequenceString.allocationSize(value.`refInfo`) +
FfiConverterOptionalString.allocationSize(value.`parentFlavour`) +
FfiConverterOptionalString.allocationSize(value.`parentBlockId`) +
FfiConverterOptionalString.allocationSize(value.`additional`)
)
override fun write(value: BlockInfo, buf: ByteBuffer) {
FfiConverterString.write(value.`blockId`, buf)
FfiConverterString.write(value.`flavour`, buf)
FfiConverterOptionalSequenceString.write(value.`content`, buf)
FfiConverterOptionalSequenceString.write(value.`blob`, buf)
FfiConverterOptionalSequenceString.write(value.`refDocId`, buf)
FfiConverterOptionalSequenceString.write(value.`refInfo`, buf)
FfiConverterOptionalString.write(value.`parentFlavour`, buf)
FfiConverterOptionalString.write(value.`parentBlockId`, buf)
FfiConverterOptionalString.write(value.`additional`, buf)
}
}
data class CrawlResult (
var `blocks`: List<BlockInfo>,
var `title`: kotlin.String,
var `summary`: kotlin.String
) {
companion object
}
/**
* @suppress
*/
public object FfiConverterTypeCrawlResult: FfiConverterRustBuffer<CrawlResult> {
override fun read(buf: ByteBuffer): CrawlResult {
return CrawlResult(
FfiConverterSequenceTypeBlockInfo.read(buf),
FfiConverterString.read(buf),
FfiConverterString.read(buf),
)
}
override fun allocationSize(value: CrawlResult) = (
FfiConverterSequenceTypeBlockInfo.allocationSize(value.`blocks`) +
FfiConverterString.allocationSize(value.`title`) +
FfiConverterString.allocationSize(value.`summary`)
)
override fun write(value: CrawlResult, buf: ByteBuffer) {
FfiConverterSequenceTypeBlockInfo.write(value.`blocks`, buf)
FfiConverterString.write(value.`title`, buf)
FfiConverterString.write(value.`summary`, buf)
}
}
data class DocClock (
var `docId`: kotlin.String,
var `timestamp`: kotlin.Long
@@ -2732,6 +2860,38 @@ public object FfiConverterOptionalLong: FfiConverterRustBuffer<kotlin.Long?> {
/**
* @suppress
*/
public object FfiConverterOptionalString: FfiConverterRustBuffer<kotlin.String?> {
override fun read(buf: ByteBuffer): kotlin.String? {
if (buf.get().toInt() == 0) {
return null
}
return FfiConverterString.read(buf)
}
override fun allocationSize(value: kotlin.String?): ULong {
if (value == null) {
return 1UL
} else {
return 1UL + FfiConverterString.allocationSize(value)
}
}
override fun write(value: kotlin.String?, buf: ByteBuffer) {
if (value == null) {
buf.put(0)
} else {
buf.put(1)
FfiConverterString.write(value, buf)
}
}
}
/**
* @suppress
*/
@@ -2828,6 +2988,38 @@ public object FfiConverterOptionalTypeDocRecord: FfiConverterRustBuffer<DocRecor
/**
* @suppress
*/
public object FfiConverterOptionalSequenceString: FfiConverterRustBuffer<List<kotlin.String>?> {
override fun read(buf: ByteBuffer): List<kotlin.String>? {
if (buf.get().toInt() == 0) {
return null
}
return FfiConverterSequenceString.read(buf)
}
override fun allocationSize(value: List<kotlin.String>?): ULong {
if (value == null) {
return 1UL
} else {
return 1UL + FfiConverterSequenceString.allocationSize(value)
}
}
override fun write(value: List<kotlin.String>?, buf: ByteBuffer) {
if (value == null) {
buf.put(0)
} else {
buf.put(1)
FfiConverterSequenceString.write(value, buf)
}
}
}
/**
* @suppress
*/
@@ -2856,6 +3048,62 @@ public object FfiConverterSequenceLong: FfiConverterRustBuffer<List<kotlin.Long>
/**
* @suppress
*/
public object FfiConverterSequenceString: FfiConverterRustBuffer<List<kotlin.String>> {
override fun read(buf: ByteBuffer): List<kotlin.String> {
val len = buf.getInt()
return List<kotlin.String>(len) {
FfiConverterString.read(buf)
}
}
override fun allocationSize(value: List<kotlin.String>): ULong {
val sizeForLength = 4UL
val sizeForItems = value.map { FfiConverterString.allocationSize(it) }.sum()
return sizeForLength + sizeForItems
}
override fun write(value: List<kotlin.String>, buf: ByteBuffer) {
buf.putInt(value.size)
value.iterator().forEach {
FfiConverterString.write(it, buf)
}
}
}
/**
* @suppress
*/
public object FfiConverterSequenceTypeBlockInfo: FfiConverterRustBuffer<List<BlockInfo>> {
override fun read(buf: ByteBuffer): List<BlockInfo> {
val len = buf.getInt()
return List<BlockInfo>(len) {
FfiConverterTypeBlockInfo.read(buf)
}
}
override fun allocationSize(value: List<BlockInfo>): ULong {
val sizeForLength = 4UL
val sizeForItems = value.map { FfiConverterTypeBlockInfo.allocationSize(it) }.sum()
return sizeForLength + sizeForItems
}
override fun write(value: List<BlockInfo>, buf: ByteBuffer) {
buf.putInt(value.size)
value.iterator().forEach {
FfiConverterTypeBlockInfo.write(it, buf)
}
}
}
/**
* @suppress
*/

View File

@@ -1,3 +1,5 @@
import type { CrawlResult } from '@affine/nbstore';
export interface Blob {
key: string;
// base64 encoded data
@@ -149,4 +151,8 @@ export interface NbStorePlugin {
uploadedAt: number | null;
}) => Promise<void>;
clearClocks: (options: { id: string }) => Promise<void>;
crawlDocData: (options: {
id: string;
docId: string;
}) => Promise<CrawlResult>;
}

View File

@@ -336,4 +336,7 @@ export const NbStoreNativeDBApis: NativeDBApis = {
uploadedAt: uploadedAt ? uploadedAt.getTime() : null,
});
},
crawlDocData: async function (id: string, docId: string) {
return NbStore.crawlDocData({ id, docId });
},
};

View File

@@ -47,4 +47,5 @@ export const nbstoreHandlers: NativeDBApis = {
clearClocks: POOL.clearClocks.bind(POOL),
setBlobUploadedAt: POOL.setBlobUploadedAt.bind(POOL),
getBlobUploadedAt: POOL.getBlobUploadedAt.bind(POOL),
crawlDocData: POOL.crawlDocData.bind(POOL),
};

View File

@@ -1,3 +1,5 @@
import type { CrawlResult } from '@affine/nbstore';
export interface Blob {
key: string;
// base64 encoded data
@@ -149,4 +151,8 @@ export interface NbStorePlugin {
uploadedAt: number | null;
}) => Promise<void>;
clearClocks: (options: { id: string }) => Promise<void>;
crawlDocData: (options: {
id: string;
docId: string;
}) => Promise<CrawlResult>;
}

View File

@@ -4,6 +4,7 @@ import {
} from '@affine/core/modules/workspace-engine';
import {
type BlobRecord,
type CrawlResult,
type DocClock,
type DocRecord,
type ListedBlobRecord,
@@ -336,4 +337,10 @@ export const NbStoreNativeDBApis: NativeDBApis = {
uploadedAt: uploadedAt ? uploadedAt.getTime() : null,
});
},
crawlDocData: async function (
id: string,
docId: string
): Promise<CrawlResult> {
return await NbStore.crawlDocData({ id, docId });
},
};

View File

@@ -15,7 +15,7 @@ path = "uniffi-bindgen.rs"
use-as-lib = ["affine_nbstore/use-as-lib"]
[dependencies]
affine_common = { workspace = true }
affine_common = { workspace = true, features = ["hashcash"] }
affine_nbstore = { workspace = true }
anyhow = { workspace = true }
base64-simd = { workspace = true }

View File

@@ -183,6 +183,52 @@ impl From<affine_nbstore::ListedBlob> for ListedBlob {
}
}
#[derive(uniffi::Record)]
pub struct BlockInfo {
pub block_id: String,
pub flavour: String,
pub content: Option<Vec<String>>,
pub blob: Option<Vec<String>>,
pub ref_doc_id: Option<Vec<String>>,
pub ref_info: Option<Vec<String>>,
pub parent_flavour: Option<String>,
pub parent_block_id: Option<String>,
pub additional: Option<String>,
}
impl From<affine_nbstore::indexer::NativeBlockInfo> for BlockInfo {
fn from(value: affine_nbstore::indexer::NativeBlockInfo) -> Self {
Self {
block_id: value.block_id,
flavour: value.flavour,
content: value.content,
blob: value.blob,
ref_doc_id: value.ref_doc_id,
ref_info: value.ref_info,
parent_flavour: value.parent_flavour,
parent_block_id: value.parent_block_id,
additional: value.additional,
}
}
}
#[derive(uniffi::Record)]
pub struct CrawlResult {
pub blocks: Vec<BlockInfo>,
pub title: String,
pub summary: String,
}
impl From<affine_nbstore::indexer::NativeCrawlResult> for CrawlResult {
fn from(value: affine_nbstore::indexer::NativeCrawlResult) -> Self {
Self {
blocks: value.blocks.into_iter().map(Into::into).collect(),
title: value.title,
summary: value.summary,
}
}
}
#[derive(uniffi::Object)]
pub struct DocStoragePool {
inner: SqliteDocStoragePool,
@@ -643,4 +689,14 @@ impl DocStoragePool {
.map(|t| t.and_utc().timestamp_millis()),
)
}
pub async fn crawl_doc_data(&self, universal_id: String, doc_id: String) -> Result<CrawlResult> {
let result = self
.inner
.get(universal_id.clone())
.await?
.crawl_doc_data(&doc_id)
.await?;
Ok(result.into())
}
}

View File

@@ -7,7 +7,7 @@ version = "0.0.0"
crate-type = ["cdylib", "rlib"]
[dependencies]
affine_common = { workspace = true }
affine_common = { workspace = true, features = ["hashcash"] }
affine_media_capture = { path = "./media_capture" }
affine_nbstore = { path = "./nbstore" }
affine_sqlite_v1 = { path = "./sqlite_v1" }
@@ -22,8 +22,14 @@ sqlx = { workspace = true, default-features = false, features = [
"sqlite",
"tls-rustls",
] }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["full"] }
[dev-dependencies]
chrono = { workspace = true }
serde_json = { workspace = true }
uuid = { workspace = true }
[build-dependencies]
napi-build = { workspace = true }
sqlx = { workspace = true, default-features = false, features = [

View File

@@ -55,6 +55,7 @@ export declare class DocStoragePool {
connect(universalId: string, path: string): Promise<void>
disconnect(universalId: string): Promise<void>
checkpoint(universalId: string): Promise<void>
crawlDocData(universalId: string, docId: string): Promise<NativeCrawlResult>
setSpaceId(universalId: string, spaceId: string): Promise<void>
pushUpdate(universalId: string, docId: string, update: Uint8Array): Promise<Date>
getDocSnapshot(universalId: string, docId: string): Promise<DocRecord | null>
@@ -115,6 +116,24 @@ export interface ListedBlob {
createdAt: Date
}
export interface NativeBlockInfo {
blockId: string
flavour: string
content?: Array<string>
blob?: Array<string>
refDocId?: Array<string>
refInfo?: Array<string>
parentFlavour?: string
parentBlockId?: string
additional?: string
}
export interface NativeCrawlResult {
blocks: Array<NativeBlockInfo>
title: string
summary: string
}
export interface SetBlob {
key: string
data: Uint8Array

View File

@@ -10,11 +10,13 @@ crate-type = ["cdylib", "rlib"]
use-as-lib = ["napi-derive/noop", "napi/noop"]
[dependencies]
affine_common = { workspace = true, features = ["ydoc-loader"] }
affine_schema = { path = "../schema" }
anyhow = { workspace = true }
chrono = { workspace = true }
napi = { workspace = true }
napi-derive = { workspace = true }
serde = { workspace = true, features = ["derive"] }
sqlx = { workspace = true, default-features = false, features = [
"chrono",
"macros",
@@ -25,6 +27,7 @@ sqlx = { workspace = true, default-features = false, features = [
] }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["full"] }
y-octo = { workspace = true }
[target.'cfg(any(target_os = "ios", target_os = "android"))'.dependencies]
uniffi = { workspace = true }
@@ -42,3 +45,8 @@ sqlx = { workspace = true, default-features = false, features = [
"tls-rustls",
] }
tokio = { workspace = true, features = ["full"] }
[dev-dependencies]
serde_json = { workspace = true }
uuid = { workspace = true, features = ["v4"] }

View File

@@ -103,7 +103,7 @@ impl SqliteDocStorage {
sqlx::query(r#"INSERT INTO updates (doc_id, data, created_at) VALUES ($1, $2, $3);"#)
.bind(doc_id)
.bind(update.as_ref())
.bind(update)
.bind(timestamp)
.execute(&mut *tx)
.await?;
@@ -358,7 +358,7 @@ mod tests {
assert_eq!(result.len(), 4);
assert_eq!(
result.iter().map(|u| u.bin.as_ref()).collect::<Vec<_>>(),
result.iter().map(|u| u.bin.to_vec()).collect::<Vec<_>>(),
updates
);
}
@@ -382,7 +382,7 @@ mod tests {
let result = storage.get_doc_snapshot("test".to_string()).await.unwrap();
assert!(result.is_some());
assert_eq!(result.unwrap().bin.as_ref(), vec![0, 0]);
assert_eq!(result.unwrap().bin.to_vec(), vec![0, 0]);
}
#[tokio::test]
@@ -400,7 +400,7 @@ mod tests {
let result = storage.get_doc_snapshot("test".to_string()).await.unwrap();
assert!(result.is_some());
assert_eq!(result.unwrap().bin.as_ref(), vec![0, 0]);
assert_eq!(result.unwrap().bin.to_vec(), vec![0, 0]);
let snapshot = DocRecord {
doc_id: "test".to_string(),
@@ -416,7 +416,7 @@ mod tests {
let result = storage.get_doc_snapshot("test".to_string()).await.unwrap();
assert!(result.is_some());
assert_eq!(result.unwrap().bin.as_ref(), vec![0, 0]);
assert_eq!(result.unwrap().bin.to_vec(), vec![0, 0]);
}
#[tokio::test]

View File

@@ -1,3 +1,5 @@
use affine_common::doc_parser::ParseError;
pub type Result<T> = std::result::Result<T, Error>;
#[derive(Debug, thiserror::Error)]
@@ -8,4 +10,6 @@ pub enum Error {
MigrateError(#[from] sqlx::migrate::MigrateError),
#[error("Invalid operation")]
InvalidOperation,
#[error(transparent)]
Parse(#[from] ParseError),
}

View File

@@ -0,0 +1,180 @@
use affine_common::doc_parser::{parse_doc_from_binary, BlockInfo, CrawlResult, ParseError};
use napi_derive::napi;
use serde::Serialize;
use y_octo::DocOptions;
use super::{error::Result, storage::SqliteDocStorage};
#[napi(object)]
#[derive(Debug, Serialize)]
pub struct NativeBlockInfo {
pub block_id: String,
pub flavour: String,
pub content: Option<Vec<String>>,
pub blob: Option<Vec<String>>,
pub ref_doc_id: Option<Vec<String>>,
pub ref_info: Option<Vec<String>>,
pub parent_flavour: Option<String>,
pub parent_block_id: Option<String>,
pub additional: Option<String>,
}
#[napi(object)]
#[derive(Debug, Serialize)]
pub struct NativeCrawlResult {
pub blocks: Vec<NativeBlockInfo>,
pub title: String,
pub summary: String,
}
impl From<BlockInfo> for NativeBlockInfo {
fn from(value: BlockInfo) -> Self {
Self {
block_id: value.block_id,
flavour: value.flavour,
content: value.content,
blob: value.blob,
ref_doc_id: value.ref_doc_id,
ref_info: value.ref_info,
parent_flavour: value.parent_flavour,
parent_block_id: value.parent_block_id,
additional: value.additional,
}
}
}
impl From<CrawlResult> for NativeCrawlResult {
fn from(value: CrawlResult) -> Self {
Self {
blocks: value.blocks.into_iter().map(Into::into).collect(),
title: value.title,
summary: value.summary,
}
}
}
impl SqliteDocStorage {
pub async fn crawl_doc_data(&self, doc_id: &str) -> Result<NativeCrawlResult> {
let doc_bin = self
.load_doc_binary(doc_id)
.await?
.ok_or(ParseError::DocNotFound)?;
let result = parse_doc_from_binary(doc_bin, doc_id.to_string())?;
Ok(result.into())
}
async fn load_doc_binary(&self, doc_id: &str) -> Result<Option<Vec<u8>>> {
let snapshot = self.get_doc_snapshot(doc_id.to_string()).await?;
let mut updates = self.get_doc_updates(doc_id.to_string()).await?;
if snapshot.is_none() && updates.is_empty() {
return Ok(None);
}
updates.sort_by(|a, b| a.timestamp.cmp(&b.timestamp));
let mut segments =
Vec::with_capacity(snapshot.as_ref().map(|_| 1).unwrap_or(0) + updates.len());
if let Some(record) = snapshot {
segments.push(record.bin.to_vec());
}
segments.extend(updates.into_iter().map(|update| update.bin.to_vec()));
merge_updates(segments, doc_id).map(Some)
}
}
fn merge_updates(mut segments: Vec<Vec<u8>>, guid: &str) -> Result<Vec<u8>> {
if segments.is_empty() {
return Err(ParseError::DocNotFound.into());
}
if segments.len() == 1 {
return segments.pop().ok_or(ParseError::DocNotFound.into());
}
let mut doc = DocOptions::new().with_guid(guid.to_string()).build();
for update in segments.iter() {
doc
.apply_update_from_binary_v1(update)
.map_err(|_| ParseError::InvalidBinary)?;
}
let buffer = doc
.encode_update_v1()
.map_err(|err| ParseError::ParserError(err.to_string()))?;
Ok(buffer)
}
#[cfg(test)]
mod tests {
use std::path::{Path, PathBuf};
use affine_common::doc_parser::ParseError;
use chrono::Utc;
use serde_json::Value;
use tokio::fs;
use uuid::Uuid;
use super::{super::error::Error, *};
const DEMO_BIN: &[u8] = include_bytes!("../../../../common/native/fixtures/demo.ydoc");
const DEMO_JSON: &[u8] = include_bytes!("../../../../common/native/fixtures/demo.ydoc.json");
fn temp_workspace_dir() -> PathBuf {
std::env::temp_dir().join(format!("affine-native-{}", Uuid::new_v4()))
}
async fn init_db(path: &Path) -> SqliteDocStorage {
fs::create_dir_all(path.parent().unwrap()).await.unwrap();
let storage = SqliteDocStorage::new(path.to_string_lossy().into_owned());
storage.connect().await.unwrap();
storage
}
async fn cleanup(path: &Path) {
let _ = fs::remove_dir_all(path.parent().unwrap()).await;
}
#[tokio::test]
async fn parse_demo_snapshot_matches_fixture() {
let base = temp_workspace_dir();
fs::create_dir_all(&base).await.unwrap();
let db_path = base.join("storage.db");
let storage = init_db(&db_path).await;
sqlx::query(r#"INSERT INTO snapshots (doc_id, data, updated_at) VALUES (?, ?, ?)"#)
.bind("demo-doc")
.bind(DEMO_BIN)
.bind(Utc::now().naive_utc())
.execute(&storage.pool)
.await
.unwrap();
let result = storage.crawl_doc_data("demo-doc").await.unwrap();
let expected: Value = serde_json::from_slice(DEMO_JSON).unwrap();
let actual = serde_json::to_value(&result).unwrap();
assert_eq!(expected, actual);
storage.close().await;
cleanup(&db_path).await;
}
#[tokio::test]
async fn missing_doc_returns_error() {
let base = temp_workspace_dir();
fs::create_dir_all(&base).await.unwrap();
let db_path = base.join("storage.db");
let storage = init_db(&db_path).await;
let err = storage.crawl_doc_data("absent-doc").await.unwrap_err();
assert!(matches!(err, Error::Parse(ParseError::DocNotFound)));
storage.close().await;
cleanup(&db_path).await;
}
}

View File

@@ -3,6 +3,7 @@ pub mod blob_sync;
pub mod doc;
pub mod doc_sync;
pub mod error;
pub mod indexer;
pub mod pool;
pub mod storage;
@@ -117,6 +118,20 @@ impl DocStoragePool {
Ok(())
}
#[napi]
pub async fn crawl_doc_data(
&self,
universal_id: String,
doc_id: String,
) -> Result<indexer::NativeCrawlResult> {
let result = self
.get(universal_id)
.await?
.crawl_doc_data(&doc_id)
.await?;
Ok(result)
}
#[napi]
pub async fn set_space_id(&self, universal_id: String, space_id: String) -> Result<()> {
self.get(universal_id).await?.set_space_id(space_id).await?;