feat(native): native reader for indexer (#14055)

This commit is contained in:
DarkSky
2025-12-07 16:22:11 +08:00
committed by GitHub
parent 69cdeedc4e
commit cf4e37c584
28 changed files with 1376 additions and 569 deletions

View File

@@ -15,7 +15,7 @@ path = "uniffi-bindgen.rs"
use-as-lib = ["affine_nbstore/use-as-lib"]
[dependencies]
affine_common = { workspace = true }
affine_common = { workspace = true, features = ["hashcash"] }
affine_nbstore = { workspace = true }
anyhow = { workspace = true }
base64-simd = { workspace = true }

View File

@@ -183,6 +183,52 @@ impl From<affine_nbstore::ListedBlob> for ListedBlob {
}
}
#[derive(uniffi::Record)]
pub struct BlockInfo {
pub block_id: String,
pub flavour: String,
pub content: Option<Vec<String>>,
pub blob: Option<Vec<String>>,
pub ref_doc_id: Option<Vec<String>>,
pub ref_info: Option<Vec<String>>,
pub parent_flavour: Option<String>,
pub parent_block_id: Option<String>,
pub additional: Option<String>,
}
impl From<affine_nbstore::indexer::NativeBlockInfo> for BlockInfo {
fn from(value: affine_nbstore::indexer::NativeBlockInfo) -> Self {
Self {
block_id: value.block_id,
flavour: value.flavour,
content: value.content,
blob: value.blob,
ref_doc_id: value.ref_doc_id,
ref_info: value.ref_info,
parent_flavour: value.parent_flavour,
parent_block_id: value.parent_block_id,
additional: value.additional,
}
}
}
#[derive(uniffi::Record)]
pub struct CrawlResult {
pub blocks: Vec<BlockInfo>,
pub title: String,
pub summary: String,
}
impl From<affine_nbstore::indexer::NativeCrawlResult> for CrawlResult {
fn from(value: affine_nbstore::indexer::NativeCrawlResult) -> Self {
Self {
blocks: value.blocks.into_iter().map(Into::into).collect(),
title: value.title,
summary: value.summary,
}
}
}
#[derive(uniffi::Object)]
pub struct DocStoragePool {
inner: SqliteDocStoragePool,
@@ -643,4 +689,14 @@ impl DocStoragePool {
.map(|t| t.and_utc().timestamp_millis()),
)
}
pub async fn crawl_doc_data(&self, universal_id: String, doc_id: String) -> Result<CrawlResult> {
let result = self
.inner
.get(universal_id.clone())
.await?
.crawl_doc_data(&doc_id)
.await?;
Ok(result.into())
}
}