From 059d3aa04aa90f9693d5f64a67efa39c61646284 Mon Sep 17 00:00:00 2001 From: DarkSky Date: Sun, 1 Feb 2026 20:15:34 +0800 Subject: [PATCH] feat: improve native --- Cargo.lock | 2 + packages/backend/native/Cargo.toml | 1 + packages/backend/native/src/doc.rs | 80 +++++++++++++++-------- packages/backend/native/src/doc_loader.rs | 7 +- packages/backend/native/src/lib.rs | 42 +++++------- packages/common/native/Cargo.toml | 2 + packages/common/native/src/lib.rs | 2 + packages/common/native/src/napi_utils.rs | 22 +++++++ 8 files changed, 101 insertions(+), 57 deletions(-) create mode 100644 packages/common/native/src/napi_utils.rs diff --git a/Cargo.lock b/Cargo.lock index 63793b04dd..9c39a662a2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -44,6 +44,7 @@ dependencies = [ "docx-parser", "infer", "nanoid", + "napi", "path-ext", "pdf-extract", "pulldown-cmark", @@ -126,6 +127,7 @@ dependencies = [ "affine_nbstore", "affine_sqlite_v1", "chrono", + "mimalloc", "napi", "napi-build", "napi-derive", diff --git a/packages/backend/native/Cargo.toml b/packages/backend/native/Cargo.toml index df3493f835..faecf5eebe 100644 --- a/packages/backend/native/Cargo.toml +++ b/packages/backend/native/Cargo.toml @@ -11,6 +11,7 @@ crate-type = ["cdylib"] affine_common = { workspace = true, features = [ "doc-loader", "hashcash", + "napi", "ydoc-loader", ] } chrono = { workspace = true } diff --git a/packages/backend/native/src/doc.rs b/packages/backend/native/src/doc.rs index 642300f0d5..d11ba85d3a 100644 --- a/packages/backend/native/src/doc.rs +++ b/packages/backend/native/src/doc.rs @@ -1,4 +1,7 @@ -use affine_common::doc_parser::{self, BlockInfo, CrawlResult, MarkdownResult, PageDocContent, WorkspaceDocContent}; +use affine_common::{ + doc_parser::{self, BlockInfo, CrawlResult, MarkdownResult, PageDocContent, WorkspaceDocContent}, + napi_utils::map_napi_err, +}; use napi::bindgen_prelude::*; use napi_derive::napi; @@ -95,22 +98,25 @@ impl From for NativeCrawlResult { #[napi] pub fn parse_doc_from_binary(doc_bin: Buffer, doc_id: String) -> Result { - let result = doc_parser::parse_doc_from_binary(doc_bin.into(), doc_id) - .map_err(|e| Error::new(Status::GenericFailure, e.to_string()))?; + let result = map_napi_err( + doc_parser::parse_doc_from_binary(doc_bin.into(), doc_id), + Status::GenericFailure, + )?; Ok(result.into()) } #[napi] pub fn parse_page_doc(doc_bin: Buffer, max_summary_length: Option) -> Result> { - let result = doc_parser::parse_page_doc(doc_bin.into(), max_summary_length.map(|v| v as isize)) - .map_err(|e| Error::new(Status::GenericFailure, e.to_string()))?; + let result = map_napi_err( + doc_parser::parse_page_doc(doc_bin.into(), max_summary_length.map(|v| v as isize)), + Status::GenericFailure, + )?; Ok(result.map(Into::into)) } #[napi] pub fn parse_workspace_doc(doc_bin: Buffer) -> Result> { - let result = - doc_parser::parse_workspace_doc(doc_bin.into()).map_err(|e| Error::new(Status::GenericFailure, e.to_string()))?; + let result = map_napi_err(doc_parser::parse_workspace_doc(doc_bin.into()), Status::GenericFailure)?; Ok(result.map(Into::into)) } @@ -121,15 +127,19 @@ pub fn parse_doc_to_markdown( ai_editable: Option, doc_url_prefix: Option, ) -> Result { - let result = doc_parser::parse_doc_to_markdown(doc_bin.into(), doc_id, ai_editable.unwrap_or(false), doc_url_prefix) - .map_err(|e| Error::new(Status::GenericFailure, e.to_string()))?; + let result = map_napi_err( + doc_parser::parse_doc_to_markdown(doc_bin.into(), doc_id, ai_editable.unwrap_or(false), doc_url_prefix), + Status::GenericFailure, + )?; Ok(result.into()) } #[napi] pub fn read_all_doc_ids_from_root_doc(doc_bin: Buffer, include_trash: Option) -> Result> { - let result = doc_parser::get_doc_ids_from_binary(doc_bin.into(), include_trash.unwrap_or(false)) - .map_err(|e| Error::new(Status::GenericFailure, e.to_string()))?; + let result = map_napi_err( + doc_parser::get_doc_ids_from_binary(doc_bin.into(), include_trash.unwrap_or(false)), + Status::GenericFailure, + )?; Ok(result) } @@ -144,8 +154,10 @@ pub fn read_all_doc_ids_from_root_doc(doc_bin: Buffer, include_trash: Option Result { - let result = doc_parser::build_full_doc(&title, &markdown, &doc_id) - .map_err(|e| Error::new(Status::GenericFailure, e.to_string()))?; + let result = map_napi_err( + doc_parser::build_full_doc(&title, &markdown, &doc_id), + Status::GenericFailure, + )?; Ok(Buffer::from(result)) } @@ -161,8 +173,10 @@ pub fn create_doc_with_markdown(title: String, markdown: String, doc_id: String) /// A Buffer containing only the delta (changes) as a y-octo update binary #[napi] pub fn update_doc_with_markdown(existing_binary: Buffer, new_markdown: String, doc_id: String) -> Result { - let result = doc_parser::update_doc(&existing_binary, &new_markdown, &doc_id) - .map_err(|e| Error::new(Status::GenericFailure, e.to_string()))?; + let result = map_napi_err( + doc_parser::update_doc(&existing_binary, &new_markdown, &doc_id), + Status::GenericFailure, + )?; Ok(Buffer::from(result)) } @@ -177,8 +191,10 @@ pub fn update_doc_with_markdown(existing_binary: Buffer, new_markdown: String, d /// A Buffer containing only the delta (changes) as a y-octo update binary #[napi] pub fn update_doc_title(existing_binary: Buffer, title: String, doc_id: String) -> Result { - let result = doc_parser::update_doc_title(&existing_binary, &doc_id, &title) - .map_err(|e| Error::new(Status::GenericFailure, e.to_string()))?; + let result = map_napi_err( + doc_parser::update_doc_title(&existing_binary, &doc_id, &title), + Status::GenericFailure, + )?; Ok(Buffer::from(result)) } @@ -202,14 +218,16 @@ pub fn update_doc_properties( created_by: Option, updated_by: Option, ) -> Result { - let result = doc_parser::update_doc_properties( - &existing_binary, - &properties_doc_id, - &target_doc_id, - created_by.as_deref(), - updated_by.as_deref(), - ) - .map_err(|e| Error::new(Status::GenericFailure, e.to_string()))?; + let result = map_napi_err( + doc_parser::update_doc_properties( + &existing_binary, + &properties_doc_id, + &target_doc_id, + created_by.as_deref(), + updated_by.as_deref(), + ), + Status::GenericFailure, + )?; Ok(Buffer::from(result)) } @@ -225,8 +243,10 @@ pub fn update_doc_properties( /// A Buffer containing the y-octo update binary to apply to the root doc #[napi] pub fn add_doc_to_root_doc(root_doc_bin: Buffer, doc_id: String, title: Option) -> Result { - let result = doc_parser::add_doc_to_root_doc(root_doc_bin.into(), &doc_id, title.as_deref()) - .map_err(|e| Error::new(Status::GenericFailure, e.to_string()))?; + let result = map_napi_err( + doc_parser::add_doc_to_root_doc(root_doc_bin.into(), &doc_id, title.as_deref()), + Status::GenericFailure, + )?; Ok(Buffer::from(result)) } @@ -241,7 +261,9 @@ pub fn add_doc_to_root_doc(root_doc_bin: Buffer, doc_id: String, title: Option Result { - let result = doc_parser::update_root_doc_meta_title(&root_doc_bin, &doc_id, &title) - .map_err(|e| Error::new(Status::GenericFailure, e.to_string()))?; + let result = map_napi_err( + doc_parser::update_root_doc_meta_title(&root_doc_bin, &doc_id, &title), + Status::GenericFailure, + )?; Ok(Buffer::from(result)) } diff --git a/packages/backend/native/src/doc_loader.rs b/packages/backend/native/src/doc_loader.rs index d6fb5352a7..ffe5fef1e4 100644 --- a/packages/backend/native/src/doc_loader.rs +++ b/packages/backend/native/src/doc_loader.rs @@ -1,7 +1,6 @@ -use affine_common::doc_loader::Doc; +use affine_common::{doc_loader::Doc, napi_utils::map_napi_err}; use napi::{ - Env, Result, Task, - anyhow::anyhow, + Env, Result, Status, Task, bindgen_prelude::{AsyncTask, Buffer}, }; @@ -54,7 +53,7 @@ impl Task for AsyncParseDocResponse { type JsValue = ParsedDoc; fn compute(&mut self) -> Result { - let doc = Doc::new(&self.file_path, &self.doc).map_err(|e| anyhow!(e))?; + let doc = map_napi_err(Doc::new(&self.file_path, &self.doc), Status::GenericFailure)?; Ok(Document { inner: doc }) } diff --git a/packages/backend/native/src/lib.rs b/packages/backend/native/src/lib.rs index e02a926ca0..253c492858 100644 --- a/packages/backend/native/src/lib.rs +++ b/packages/backend/native/src/lib.rs @@ -9,9 +9,8 @@ pub mod hashcash; pub mod html_sanitize; pub mod tiktoken; -use std::fmt::{Debug, Display}; - -use napi::{Error, Result, Status, bindgen_prelude::*}; +use affine_common::napi_utils::map_napi_err; +use napi::{Result, Status, bindgen_prelude::*}; use y_octo::Doc; #[cfg(not(target_arch = "arm"))] @@ -21,35 +20,16 @@ static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc; #[macro_use] extern crate napi_derive; -fn map_err_inner(v: std::result::Result, status: Status) -> Result { - match v { - Ok(val) => Ok(val), - Err(e) => { - dbg!(&e); - Err(Error::new(status, e.to_string())) - } - } -} - -macro_rules! map_err { - ($val: expr) => { - map_err_inner($val, Status::GenericFailure) - }; - ($val: expr, $stauts: ident) => { - map_err_inner($val, $stauts) - }; -} - /// Merge updates in form like `Y.applyUpdate(doc, update)` way and return the /// result binary. #[napi(catch_unwind)] pub fn merge_updates_in_apply_way(updates: Vec) -> Result { let mut doc = Doc::default(); for update in updates { - map_err!(doc.apply_update_from_binary_v1(update.as_ref()))?; + map_napi_err(doc.apply_update_from_binary_v1(update.as_ref()), Status::GenericFailure)?; } - let buf = map_err!(doc.encode_update_v1())?; + let buf = map_napi_err(doc.encode_update_v1(), Status::GenericFailure)?; Ok(buf.into()) } @@ -59,3 +39,17 @@ pub const AFFINE_PRO_PUBLIC_KEY: Option<&'static str> = std::option_env!("AFFINE #[napi] pub const AFFINE_PRO_LICENSE_AES_KEY: Option<&'static str> = std::option_env!("AFFINE_PRO_LICENSE_AES_KEY"); + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn merge_updates_reports_generic_failure() { + let err = match merge_updates_in_apply_way(vec![Buffer::from(vec![0])]) { + Ok(_) => panic!("expected error"), + Err(err) => err, + }; + assert_eq!(err.status, Status::GenericFailure); + } +} diff --git a/packages/common/native/Cargo.toml b/packages/common/native/Cargo.toml index 0d20fcaca4..ee9c0f6eeb 100644 --- a/packages/common/native/Cargo.toml +++ b/packages/common/native/Cargo.toml @@ -22,6 +22,7 @@ doc-loader = [ "url", ] hashcash = ["chrono", "sha3", "rand"] +napi = ["dep:napi"] tree-sitter = [ "cc", "dep:tree-sitter", @@ -53,6 +54,7 @@ chrono = { workspace = true, optional = true } docx-parser = { workspace = true, optional = true } infer = { workspace = true, optional = true } nanoid = { workspace = true, optional = true } +napi = { workspace = true, optional = true } path-ext = { workspace = true, optional = true } pdf-extract = { workspace = true, optional = true } pulldown-cmark = { workspace = true, optional = true } diff --git a/packages/common/native/src/lib.rs b/packages/common/native/src/lib.rs index 771f7dfb0f..d6e6383e18 100644 --- a/packages/common/native/src/lib.rs +++ b/packages/common/native/src/lib.rs @@ -4,3 +4,5 @@ pub mod doc_loader; pub mod doc_parser; #[cfg(feature = "hashcash")] pub mod hashcash; +#[cfg(feature = "napi")] +pub mod napi_utils; diff --git a/packages/common/native/src/napi_utils.rs b/packages/common/native/src/napi_utils.rs new file mode 100644 index 0000000000..28b7babbd2 --- /dev/null +++ b/packages/common/native/src/napi_utils.rs @@ -0,0 +1,22 @@ +use std::fmt::{Debug, Display}; + +use napi::{Error, Result, Status}; + +pub fn to_napi_error(err: E, status: Status) -> Error { + Error::new(status, err.to_string()) +} + +pub fn map_napi_err(value: std::result::Result, status: Status) -> Result { + value.map_err(|err| to_napi_error(err, status)) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn map_napi_err_keeps_message() { + let err = map_napi_err::<(), _>(Err("boom"), Status::GenericFailure).unwrap_err(); + assert!(err.to_string().contains("boom")); + } +}