feat(native): sync yocto codes (#14243)

#### PR Dependency Tree


* **PR #14243** 👈

This tree was auto-generated by
[Charcoal](https://github.com/danerwilliams/charcoal)

<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

* **New Features**
* Batch management API for coordinated document mutations and change
tracking.
* New document accessors (IDs, state snapshots, change/delete set
queries) and subscriber count.

* **Chores**
  * Upgraded Rust edition across packages to 2024.
  * Repository-wide formatting, stylistic cleanups and test adjustments.

* **Breaking Changes**
* Removed the Node native bindings package and its JS/TS declarations
and tests (no longer published/available).

<sub>✏️ Tip: You can customize this high-level summary in your review
settings.</sub>
<!-- end of auto-generated comment: release notes by coderabbit.ai -->
This commit is contained in:
DarkSky
2026-01-11 06:08:33 +08:00
committed by GitHub
parent d515d295ce
commit ca2462f987
143 changed files with 1396 additions and 4841 deletions

View File

@@ -1,6 +1,8 @@
# Editor configuration, see http://editorconfig.org
root = true
[*.rs]
max_line_length = 120
[*]
charset = utf-8
indent_style = space

View File

@@ -798,49 +798,6 @@ jobs:
name: fuzz-artifact
path: packages/common/y-octo/utils/fuzz/artifacts/**/*
y-octo-binding-test:
name: y-octo binding test on ${{ matrix.settings.target }}
runs-on: ${{ matrix.settings.os }}
strategy:
fail-fast: false
matrix:
settings:
- { target: 'x86_64-unknown-linux-gnu', os: 'ubuntu-latest' }
- { target: 'aarch64-unknown-linux-gnu', os: 'ubuntu-24.04-arm' }
- { target: 'x86_64-apple-darwin', os: 'macos-15-intel' }
- { target: 'aarch64-apple-darwin', os: 'macos-latest' }
- { target: 'x86_64-pc-windows-msvc', os: 'windows-latest' }
- { target: 'aarch64-pc-windows-msvc', os: 'windows-11-arm' }
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
extra-flags: workspaces focus @affine-tools/cli @affine/monorepo @y-octo/node
electron-install: false
- name: Install rustup (Windows 11 ARM)
if: matrix.settings.os == 'windows-11-arm'
shell: pwsh
run: |
Invoke-WebRequest -Uri "https://static.rust-lang.org/rustup/dist/aarch64-pc-windows-msvc/rustup-init.exe" -OutFile rustup-init.exe
.\rustup-init.exe --default-toolchain none -y
"$env:USERPROFILE\.cargo\bin" | Out-File -Append -Encoding ascii $env:GITHUB_PATH
"CARGO_HOME=$env:USERPROFILE\.cargo" | Out-File -Append -Encoding ascii $env:GITHUB_ENV
- name: Install Rust (Windows 11 ARM)
if: matrix.settings.os == 'windows-11-arm'
shell: pwsh
run: |
rustup install stable
rustup target add ${{ matrix.settings.target }}
cargo --version
- name: Build Rust
uses: ./.github/actions/build-rust
with:
target: ${{ matrix.settings.target }}
package: '@y-octo/node'
- name: Run tests
run: yarn affine @y-octo/node test
rust-test:
name: Run native tests
runs-on: ubuntu-latest
@@ -1387,7 +1344,6 @@ jobs:
- miri
- loom
- fuzzing
- y-octo-binding-test
- server-test
- server-e2e-test
- rust-test

View File

@@ -1,4 +1,8 @@
exclude = ["node_modules/**/*.toml", "target/**/*.toml"]
exclude = [
"node_modules/**/*.toml",
"target/**/*.toml",
"packages/frontend/apps/ios/App/Packages/AffineGraphQL/**/*.toml",
]
# https://taplo.tamasfe.dev/configuration/formatter-options.html
[formatting]

13
Cargo.lock generated
View File

@@ -6054,7 +6054,7 @@ checksum = "66fee0b777b0f5ac1c69bb06d361268faafa61cd4682ae064a171c16c433e9e4"
[[package]]
name = "y-octo"
version = "0.0.1"
version = "0.0.2"
dependencies = [
"ahash",
"arbitrary",
@@ -6081,17 +6081,6 @@ dependencies = [
"yrs",
]
[[package]]
name = "y-octo-node"
version = "0.0.1"
dependencies = [
"anyhow",
"napi",
"napi-build",
"napi-derive",
"y-octo",
]
[[package]]
name = "y-octo-utils"
version = "0.0.1"

View File

@@ -3,7 +3,6 @@ members = [
"./packages/backend/native",
"./packages/common/native",
"./packages/common/y-octo/core",
"./packages/common/y-octo/node",
"./packages/common/y-octo/utils",
"./packages/frontend/mobile-native",
"./packages/frontend/native",

View File

@@ -8,7 +8,6 @@
".",
"blocksuite/**/*",
"packages/*/*",
"packages/common/y-octo/node",
"packages/frontend/apps/*",
"tools/*",
"docs/reference",

View File

@@ -1,5 +1,5 @@
[package]
edition = "2021"
edition = "2024"
license-file = "LICENSE"
name = "affine_server_native"
version = "1.0.0"

View File

@@ -1,6 +1,4 @@
use affine_common::doc_parser::{
self, BlockInfo, CrawlResult, MarkdownResult, PageDocContent, WorkspaceDocContent,
};
use affine_common::doc_parser::{self, BlockInfo, CrawlResult, MarkdownResult, PageDocContent, WorkspaceDocContent};
use napi::bindgen_prelude::*;
use napi_derive::napi;
@@ -103,10 +101,7 @@ pub fn parse_doc_from_binary(doc_bin: Buffer, doc_id: String) -> Result<NativeCr
}
#[napi]
pub fn parse_page_doc(
doc_bin: Buffer,
max_summary_length: Option<i32>,
) -> Result<Option<NativePageDocContent>> {
pub fn parse_page_doc(doc_bin: Buffer, max_summary_length: Option<i32>) -> Result<Option<NativePageDocContent>> {
let result = doc_parser::parse_page_doc(doc_bin.into(), max_summary_length.map(|v| v as isize))
.map_err(|e| Error::new(Status::GenericFailure, e.to_string()))?;
Ok(result.map(Into::into))
@@ -114,8 +109,8 @@ pub fn parse_page_doc(
#[napi]
pub fn parse_workspace_doc(doc_bin: Buffer) -> Result<Option<NativeWorkspaceDocContent>> {
let result = doc_parser::parse_workspace_doc(doc_bin.into())
.map_err(|e| Error::new(Status::GenericFailure, e.to_string()))?;
let result =
doc_parser::parse_workspace_doc(doc_bin.into()).map_err(|e| Error::new(Status::GenericFailure, e.to_string()))?;
Ok(result.map(Into::into))
}
@@ -126,21 +121,13 @@ pub fn parse_doc_to_markdown(
ai_editable: Option<bool>,
doc_url_prefix: Option<String>,
) -> Result<NativeMarkdownResult> {
let result = doc_parser::parse_doc_to_markdown(
doc_bin.into(),
doc_id,
ai_editable.unwrap_or(false),
doc_url_prefix,
)
.map_err(|e| Error::new(Status::GenericFailure, e.to_string()))?;
let result = doc_parser::parse_doc_to_markdown(doc_bin.into(), doc_id, ai_editable.unwrap_or(false), doc_url_prefix)
.map_err(|e| Error::new(Status::GenericFailure, e.to_string()))?;
Ok(result.into())
}
#[napi]
pub fn read_all_doc_ids_from_root_doc(
doc_bin: Buffer,
include_trash: Option<bool>,
) -> Result<Vec<String>> {
pub fn read_all_doc_ids_from_root_doc(doc_bin: Buffer, include_trash: Option<bool>) -> Result<Vec<String>> {
let result = doc_parser::get_doc_ids_from_binary(doc_bin.into(), include_trash.unwrap_or(false))
.map_err(|e| Error::new(Status::GenericFailure, e.to_string()))?;
Ok(result)

View File

@@ -1,8 +1,8 @@
use affine_common::doc_loader::Doc;
use napi::{
Env, Result, Task,
anyhow::anyhow,
bindgen_prelude::{AsyncTask, Buffer},
Env, Result, Task,
};
#[napi(object)]

View File

@@ -1,4 +1,4 @@
use mp4parse::{read_mp4, TrackType};
use mp4parse::{TrackType, read_mp4};
use napi_derive::napi;
#[napi]
@@ -6,9 +6,7 @@ pub fn get_mime(input: &[u8]) -> String {
let mimetype = if let Some(kind) = infer::get(&input[..4096.min(input.len())]) {
kind.mime_type().to_string()
} else {
file_format::FileFormat::from_bytes(input)
.media_type()
.to_string()
file_format::FileFormat::from_bytes(input).media_type().to_string()
};
if mimetype == "video/mp4" {
detect_mp4_flavor(input)

View File

@@ -1,7 +1,7 @@
use std::convert::TryFrom;
use affine_common::hashcash::Stamp;
use napi::{bindgen_prelude::AsyncTask, Env, Result as NapiResult, Task};
use napi::{Env, Result as NapiResult, Task, bindgen_prelude::AsyncTask};
use napi_derive::napi;
pub struct AsyncVerifyChallengeResponse {
@@ -61,9 +61,6 @@ impl Task for AsyncMintChallengeResponse {
}
#[napi]
pub fn mint_challenge_response(
resource: String,
bits: Option<u32>,
) -> AsyncTask<AsyncMintChallengeResponse> {
pub fn mint_challenge_response(resource: String, bits: Option<u32>) -> AsyncTask<AsyncMintChallengeResponse> {
AsyncTask::new(AsyncMintChallengeResponse { bits, resource })
}

View File

@@ -11,7 +11,7 @@ pub mod tiktoken;
use std::fmt::{Debug, Display};
use napi::{bindgen_prelude::*, Error, Result, Status};
use napi::{Error, Result, Status, bindgen_prelude::*};
use y_octo::Doc;
#[cfg(not(target_arch = "arm"))]
@@ -58,5 +58,4 @@ pub fn merge_updates_in_apply_way(updates: Vec<Buffer>) -> Result<Buffer> {
pub const AFFINE_PRO_PUBLIC_KEY: Option<&'static str> = std::option_env!("AFFINE_PRO_PUBLIC_KEY");
#[napi]
pub const AFFINE_PRO_LICENSE_AES_KEY: Option<&'static str> =
std::option_env!("AFFINE_PRO_LICENSE_AES_KEY");
pub const AFFINE_PRO_LICENSE_AES_KEY: Option<&'static str> = std::option_env!("AFFINE_PRO_LICENSE_AES_KEY");

View File

@@ -57,11 +57,11 @@ fn try_remove_label(s: &str, i: usize) -> Option<usize> {
return None;
}
if let Some(ch) = s[next_idx..].chars().next() {
if ch == '.' {
next_idx += ch.len_utf8();
return Some(next_idx);
}
if let Some(ch) = s[next_idx..].chars().next()
&& ch == '.'
{
next_idx += ch.len_utf8();
return Some(next_idx);
}
None
}
@@ -84,9 +84,7 @@ fn remove_label(s: &str) -> String {
pub fn clean_content(content: &str) -> String {
let content = content.replace("\x00", "");
remove_label(&collapse_whitespace(&content))
.trim()
.to_string()
remove_label(&collapse_whitespace(&content)).trim().to_string()
}
#[cfg(test)]

View File

@@ -1,5 +1,5 @@
[package]
edition = "2021"
edition = "2024"
license-file = "LICENSE"
name = "affine_common"
version = "0.1.0"

View File

@@ -1,7 +1,7 @@
use std::hint::black_box;
use affine_common::hashcash::Stamp;
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use criterion::{BenchmarkId, Criterion, criterion_group, criterion_main};
fn bench_hashcash(c: &mut Criterion) {
let mut group = c.benchmark_group("hashcash");

View File

@@ -1,6 +1,6 @@
use std::{
io::Cursor,
panic::{catch_unwind, AssertUnwindSafe},
panic::{AssertUnwindSafe, catch_unwind},
path::PathBuf,
};
@@ -22,9 +22,7 @@ pub struct DocOptions {
impl Default for DocOptions {
fn default() -> Self {
Self {
code_threshold: 1000,
}
Self { code_threshold: 1000 }
}
}
@@ -39,9 +37,7 @@ impl Doc {
}
pub fn with_options(file_path: &str, doc: &[u8], options: DocOptions) -> LoaderResult<Self> {
if let Some(kind) =
infer::get(&doc[..4096.min(doc.len())]).or(infer::get_from_path(file_path).ok().flatten())
{
if let Some(kind) = infer::get(&doc[..4096.min(doc.len())]).or(infer::get_from_path(file_path).ok().flatten()) {
if kind.extension() == "pdf" {
return Self::load_pdf(file_path, doc);
} else if kind.extension() == "docx" {
@@ -66,11 +62,10 @@ impl Doc {
}
"rs" | "c" | "cpp" | "h" | "hpp" | "js" | "ts" | "tsx" | "go" | "py" => {
let name = path.full_str().to_string();
let loader =
SourceCodeLoader::from_string(string).with_parser_option(LanguageParserOptions {
language: get_language_by_filename(&name)?,
parser_threshold: options.code_threshold,
});
let loader = SourceCodeLoader::from_string(string).with_parser_option(LanguageParserOptions {
language: get_language_by_filename(&name)?,
parser_threshold: options.code_threshold,
});
let splitter = TokenSplitter::default();
return Self::from_loader(file_path, loader, splitter);
}
@@ -89,10 +84,7 @@ impl Doc {
splitter: impl TextSplitter + 'static,
) -> Result<Doc, LoaderError> {
let name = file_path.to_string();
let chunks = catch_unwind(AssertUnwindSafe(|| {
Self::get_chunks_from_loader(loader, splitter)
}))
.map_err(|e| {
let chunks = catch_unwind(AssertUnwindSafe(|| Self::get_chunks_from_loader(loader, splitter))).map_err(|e| {
LoaderError::Other(match e.downcast::<String>() {
Ok(v) => *v,
Err(e) => match e.downcast::<&str>() {
@@ -124,8 +116,7 @@ impl Doc {
}
fn load_docx(file_path: &str, doc: &[u8]) -> LoaderResult<Self> {
let loader = DocxLoader::new(Cursor::new(doc))
.ok_or(LoaderError::Other("Failed to parse docx document".into()))?;
let loader = DocxLoader::new(Cursor::new(doc)).ok_or(LoaderError::Other("Failed to parse docx document".into()))?;
let splitter = TokenSplitter::default();
Self::from_loader(file_path, loader, splitter)
}
@@ -175,8 +166,7 @@ mod tests {
let buffer = read(fixtures.join(fixture)).unwrap();
let doc = Doc::with_options(fixture, &buffer, DocOptions { code_threshold: 0 }).unwrap();
for chunk in doc.chunks.iter() {
let output =
read_to_string(fixtures.join(format!("{}.{}.md", fixture, chunk.index))).unwrap();
let output = read_to_string(fixtures.join(format!("{}.{}.md", fixture, chunk.index))).unwrap();
assert_eq!(chunk.content, output);
}
}

View File

@@ -61,9 +61,7 @@ mod tests {
for (idx, doc) in documents.into_iter().enumerate() {
assert_eq!(
doc.page_content,
String::from_utf8_lossy(
&read(get_fixtures_path().join(format!("demo.docx.{}.md", idx))).unwrap()
)
String::from_utf8_lossy(&read(get_fixtures_path().join(format!("demo.docx.{}.md", idx))).unwrap())
);
}
}

View File

@@ -29,10 +29,8 @@ impl<R: Read> HtmlLoader<R> {
impl<R: Read + Send + Sync + 'static> Loader for HtmlLoader<R> {
fn load(mut self) -> LoaderResult<Vec<Document>> {
let cleaned_html = readability::extractor::extract(&mut self.html, &self.url)?;
let doc =
Document::new(format!("{}\n{}", cleaned_html.title, cleaned_html.text)).with_metadata(
HashMap::from([("source".to_string(), Value::from(self.url.as_str()))]),
);
let doc = Document::new(format!("{}\n{}", cleaned_html.title, cleaned_html.text))
.with_metadata(HashMap::from([("source".to_string(), Value::from(self.url.as_str()))]));
Ok(vec![doc])
}
@@ -46,10 +44,7 @@ mod tests {
fn test_html_loader() {
let input = "<p>Hello world!</p>";
let html_loader = HtmlLoader::new(
input.as_bytes(),
Url::parse("https://example.com/").unwrap(),
);
let html_loader = HtmlLoader::new(input.as_bytes(), Url::parse("https://example.com/").unwrap());
let documents = html_loader.load().unwrap();
@@ -66,16 +61,14 @@ mod tests {
#[test]
fn test_html_load_from_path() {
let buffer = include_bytes!("../../../fixtures/sample.html");
let html_loader = HtmlLoader::new(
Cursor::new(buffer),
Url::parse("https://example.com/").unwrap(),
);
let html_loader = HtmlLoader::new(Cursor::new(buffer), Url::parse("https://example.com/").unwrap());
let documents = html_loader.load().unwrap();
let expected = "Example Domain\n\n This domain is for use in illustrative examples in \
documents. You may\n use this domain in literature without prior \
coordination or asking for\n permission.\n More information...";
let expected =
"Example Domain\n\n This domain is for use in illustrative examples in documents. You may\n use \
this domain in literature without prior coordination or asking for\n permission.\n More \
information...";
assert_eq!(documents.len(), 1);
assert_eq!(

View File

@@ -23,6 +23,6 @@ pub trait Loader: Send + Sync {
pub use docx::DocxLoader;
pub use html::HtmlLoader;
pub use pdf::PdfExtractLoader;
pub use source::{get_language_by_filename, LanguageParserOptions, SourceCodeLoader};
pub use source::{LanguageParserOptions, SourceCodeLoader, get_language_by_filename};
pub use text::TextLoader;
pub use url::Url;

View File

@@ -1,4 +1,4 @@
use pdf_extract::{output_doc, output_doc_encrypted, PlainTextOutput};
use pdf_extract::{PlainTextOutput, output_doc, output_doc_encrypted};
/**
* modified from https://github.com/Abraxas-365/langchain-rust/tree/v4.6.0/src/document_loaders
@@ -72,8 +72,7 @@ mod tests {
assert_eq!(docs.len(), 1);
assert_eq!(
&docs[0].page_content[..100],
"\n\nSample PDF\nThis is a simple PDF file. Fun fun fun.\n\nLorem ipsum dolor sit amet, \
consectetuer a"
"\n\nSample PDF\nThis is a simple PDF file. Fun fun fun.\n\nLorem ipsum dolor sit amet, consectetuer a"
);
}

View File

@@ -3,7 +3,7 @@
*/
mod parser;
pub use parser::{get_language_by_filename, LanguageParser, LanguageParserOptions};
pub use parser::{LanguageParser, LanguageParserOptions, get_language_by_filename};
use super::*;

View File

@@ -59,11 +59,7 @@ pub struct LanguageParser {
impl Debug for LanguageParser {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"LanguageParser {{ language: {:?} }}",
self.parser_options.language
)
write!(f, "LanguageParser {{ language: {:?} }}", self.parser_options.language)
}
}
@@ -77,10 +73,7 @@ impl Clone for LanguageParser {
}
pub fn get_language_by_filename(name: &str) -> LoaderResult<Language> {
let extension = name
.split('.')
.next_back()
.ok_or(LoaderError::UnsupportedLanguage)?;
let extension = name.split('.').next_back().ok_or(LoaderError::UnsupportedLanguage)?;
let language = match extension.to_lowercase().as_str() {
"rs" => Language::Rust,
"c" => Language::C,
@@ -133,10 +126,7 @@ impl LanguageParser {
impl LanguageParser {
pub fn parse_code(&mut self, code: &String) -> LoaderResult<Vec<Document>> {
let tree = self
.parser
.parse(code, None)
.ok_or(LoaderError::UnsupportedLanguage)?;
let tree = self.parser.parse(code, None).ok_or(LoaderError::UnsupportedLanguage)?;
if self.parser_options.parser_threshold > tree.root_node().end_position().row as u64 {
return Ok(vec![Document::new(code).with_metadata(HashMap::from([
(
@@ -152,11 +142,7 @@ impl LanguageParser {
self.extract_functions_classes(tree, code)
}
pub fn extract_functions_classes(
&self,
tree: Tree,
code: &String,
) -> LoaderResult<Vec<Document>> {
pub fn extract_functions_classes(&self, tree: Tree, code: &String) -> LoaderResult<Vec<Document>> {
let mut chunks = Vec::new();
let count = tree.root_node().child_count();

View File

@@ -10,9 +10,7 @@ pub struct TextLoader {
impl TextLoader {
pub fn new<T: Into<String>>(input: T) -> Self {
Self {
content: input.into(),
}
Self { content: input.into() }
}
}

View File

@@ -7,8 +7,8 @@ mod types;
pub use document::{Chunk, Doc};
pub use error::{LoaderError, LoaderResult};
use loader::{
get_language_by_filename, DocxLoader, HtmlLoader, LanguageParserOptions, Loader,
PdfExtractLoader, SourceCodeLoader, TextLoader, Url,
DocxLoader, HtmlLoader, LanguageParserOptions, Loader, PdfExtractLoader, SourceCodeLoader, TextLoader, Url,
get_language_by_filename,
};
use splitter::{MarkdownSplitter, TextSplitter, TextSplitterError, TokenSplitter};
use types::Document;

View File

@@ -2,7 +2,7 @@
* modified from https://github.com/Abraxas-365/langchain-rust/tree/v4.6.0/src/text_splitter
*/
use text_splitter::ChunkConfig;
use tiktoken_rs::{get_bpe_from_model, get_bpe_from_tokenizer, tokenizer::Tokenizer, CoreBPE};
use tiktoken_rs::{CoreBPE, get_bpe_from_model, get_bpe_from_tokenizer, tokenizer::Tokenizer};
use super::TextSplitterError;
@@ -79,8 +79,8 @@ impl TryFrom<&SplitterOptions> for ChunkConfig<CoreBPE> {
fn try_from(options: &SplitterOptions) -> Result<Self, Self::Error> {
let tk = if !options.encoding_name.is_empty() {
let tokenizer = SplitterOptions::get_tokenizer_from_str(&options.encoding_name)
.ok_or(TextSplitterError::TokenizerNotFound)?;
let tokenizer =
SplitterOptions::get_tokenizer_from_str(&options.encoding_name).ok_or(TextSplitterError::TokenizerNotFound)?;
get_bpe_from_tokenizer(tokenizer).map_err(|_| TextSplitterError::InvalidTokenizer)?
} else {

View File

@@ -5,16 +5,13 @@ use y_octo::{Any, DocOptions, JwstCodecError, Map, Value};
use super::{
blocksuite::{
collect_child_ids, get_block_id, get_flavour, get_list_depth, get_string, nearest_by_flavour,
DocContext,
DocContext, collect_child_ids, get_block_id, get_flavour, get_list_depth, get_string, nearest_by_flavour,
},
delta_markdown::{
delta_value_to_inline_markdown, extract_inline_references, text_to_inline_markdown,
text_to_markdown, DeltaToMdOptions,
},
value::{
any_as_string, any_truthy, build_reference_payload, params_value_to_json, value_to_string,
DeltaToMdOptions, delta_value_to_inline_markdown, extract_inline_references, text_to_inline_markdown,
text_to_markdown,
},
value::{any_as_string, any_truthy, build_reference_payload, params_value_to_json, value_to_string},
};
const SUMMARY_LIMIT: usize = 1000;
@@ -402,17 +399,10 @@ pub fn parse_doc_from_binary(doc_bin: Vec<u8>, doc_id: String) -> Result<CrawlRe
None => continue,
};
let parent_block = parent_block_id
.as_ref()
.and_then(|id| context.block_pool.get(id));
let parent_block = parent_block_id.as_ref().and_then(|id| context.block_pool.get(id));
let parent_flavour = parent_block.and_then(get_flavour);
let note_block = nearest_by_flavour(
&block_id,
NOTE_FLAVOUR,
&context.parent_lookup,
&context.block_pool,
);
let note_block = nearest_by_flavour(&block_id, NOTE_FLAVOUR, &context.parent_lookup, &context.block_pool);
let note_block_id = note_block.as_ref().and_then(get_block_id);
let display_mode = determine_display_mode(note_block.as_ref());
@@ -438,14 +428,9 @@ pub fn parse_doc_from_binary(doc_bin: Vec<u8>, doc_id: String) -> Result<CrawlRe
continue;
}
if matches!(
flavour.as_str(),
"affine:paragraph" | "affine:list" | "affine:code"
) {
if matches!(flavour.as_str(), "affine:paragraph" | "affine:list" | "affine:code") {
if let Some(text) = block.get("prop:text").and_then(|value| value.to_text()) {
let database_name = if flavour == "affine:paragraph"
&& parent_flavour.as_deref() == Some("affine:database")
{
let database_name = if flavour == "affine:paragraph" && parent_flavour.as_deref() == Some("affine:database") {
parent_block.and_then(|map| get_string(map, "prop:title"))
} else {
None
@@ -467,10 +452,7 @@ pub fn parse_doc_from_binary(doc_bin: Vec<u8>, doc_id: String) -> Result<CrawlRe
continue;
}
if matches!(
flavour.as_str(),
"affine:embed-linked-doc" | "affine:embed-synced-doc"
) {
if matches!(flavour.as_str(), "affine:embed-linked-doc" | "affine:embed-synced-doc") {
if let Some(page_id) = get_string(block, "prop:pageId") {
let mut info = build_block(None);
let payload = embed_ref_payload(block, &page_id);
@@ -515,11 +497,7 @@ pub fn parse_doc_from_binary(doc_bin: Vec<u8>, doc_id: String) -> Result<CrawlRe
&flavour,
parent_flavour.as_ref(),
parent_block_id.as_ref(),
compose_additional(
&display_mode,
note_block_id.as_ref(),
database_name.as_ref(),
),
compose_additional(&display_mode, note_block_id.as_ref(), database_name.as_ref()),
);
info.content = Some(texts);
blocks.push(info);
@@ -559,10 +537,7 @@ pub fn parse_doc_from_binary(doc_bin: Vec<u8>, doc_id: String) -> Result<CrawlRe
})
}
pub fn get_doc_ids_from_binary(
doc_bin: Vec<u8>,
include_trash: bool,
) -> Result<Vec<String>, ParseError> {
pub fn get_doc_ids_from_binary(doc_bin: Vec<u8>, include_trash: bool) -> Result<Vec<String>, ParseError> {
if doc_bin.is_empty() || doc_bin == [0, 0] {
return Err(ParseError::InvalidBinary);
}
@@ -695,10 +670,7 @@ fn compose_additional(
database_name: Option<&String>,
) -> Option<String> {
let mut payload = JsonMap::new();
payload.insert(
"displayMode".into(),
JsonValue::String(display_mode.to_string()),
);
payload.insert("displayMode".into(), JsonValue::String(display_mode.to_string()));
if let Some(note_id) = note_block_id {
payload.insert("noteBlockId".into(), JsonValue::String(note_id.clone()));
}
@@ -721,10 +693,7 @@ fn apply_doc_ref(info: &mut BlockInfo, page_id: String, payload: Option<String>)
}
fn embed_ref_payload(block: &Map, page_id: &str) -> Option<String> {
let params = block
.get("prop:params")
.as_ref()
.and_then(params_value_to_json);
let params = block.get("prop:params").as_ref().and_then(params_value_to_json);
Some(build_reference_payload(page_id, params))
}
@@ -746,10 +715,10 @@ fn gather_surface_texts(block: &Map) -> Vec<String> {
if let Some(value_map) = elements.get("value").and_then(|value| value.to_map()) {
for value in value_map.values() {
if let Some(element) = value.to_map() {
if let Some(text) = element.get("text").and_then(|value| value.to_text()) {
texts.push(text.to_string());
}
if let Some(element) = value.to_map()
&& let Some(text) = element.get("text").and_then(|value| value.to_text())
{
texts.push(text.to_string());
}
}
}
@@ -784,12 +753,12 @@ fn gather_database_texts(block: &Map) -> (Vec<String>, Option<String>) {
fn gather_table_contents(block: &Map) -> Vec<String> {
let mut contents = Vec::new();
for key in block.keys() {
if key.starts_with("prop:cells.") && key.ends_with(".text") {
if let Some(value) = block.get(key).and_then(|value| value_to_string(&value)) {
if !value.is_empty() {
contents.push(value);
}
}
if key.starts_with("prop:cells.")
&& key.ends_with(".text")
&& let Some(value) = block.get(key).and_then(|value| value_to_string(&value))
&& !value.is_empty()
{
contents.push(value);
}
}
contents
@@ -800,11 +769,7 @@ struct DatabaseTable {
rows: Vec<Vec<String>>,
}
fn build_database_table(
block: &Map,
context: &DocContext,
md_options: &DeltaToMdOptions,
) -> Option<DatabaseTable> {
fn build_database_table(block: &Map, context: &DocContext, md_options: &DeltaToMdOptions) -> Option<DatabaseTable> {
let columns = parse_database_columns(block)?;
let cells_map = block.get("prop:cells").and_then(|v| v.to_map())?;
let child_ids = collect_child_ids(block);
@@ -826,15 +791,14 @@ fn build_database_table(
cell_text = text;
}
}
} else if let Some(row_cells) = &row_cells {
if let Some(cell_val) = row_cells.get(&column.id).and_then(|v| v.to_map()) {
if let Some(value) = cell_val.get("value") {
if let Some(text_md) = delta_value_to_inline_markdown(&value, md_options) {
cell_text = text_md;
} else {
cell_text = format_cell_value(&value, column);
}
}
} else if let Some(row_cells) = &row_cells
&& let Some(cell_val) = row_cells.get(&column.id).and_then(|v| v.to_map())
&& let Some(value) = cell_val.get("value")
{
if let Some(text_md) = delta_value_to_inline_markdown(&value, md_options) {
cell_text = text_md;
} else {
cell_text = format_cell_value(&value, column);
}
}
@@ -852,26 +816,26 @@ fn append_database_summary(summary: &mut String, block: &Map, context: &DocConte
return;
};
if let Some(title) = get_string(block, "prop:title") {
if !title.is_empty() {
summary.push_str(&title);
summary.push('|');
}
if let Some(title) = get_string(block, "prop:title")
&& !title.is_empty()
{
summary.push_str(&title);
summary.push('|');
}
for column in table.columns.iter() {
if let Some(name) = column.name.as_ref() {
if !name.is_empty() {
summary.push_str(name);
summary.push('|');
}
if let Some(name) = column.name.as_ref()
&& !name.is_empty()
{
summary.push_str(name);
summary.push('|');
}
for option in column.options.iter() {
if let Some(value) = option.value.as_ref() {
if !value.is_empty() {
summary.push_str(value);
summary.push('|');
}
if let Some(value) = option.value.as_ref()
&& !value.is_empty()
{
summary.push_str(value);
summary.push('|');
}
}
}
@@ -920,9 +884,7 @@ struct DatabaseColumn {
}
fn parse_database_columns(block: &Map) -> Option<Vec<DatabaseColumn>> {
let columns = block
.get("prop:columns")
.and_then(|value| value.to_array())?;
let columns = block.get("prop:columns").and_then(|value| value.to_array())?;
let mut parsed = Vec::new();
for column_value in columns.iter() {
if let Some(column) = column_value.to_map() {
@@ -967,9 +929,7 @@ fn format_option_tag(option: &DatabaseOption) -> String {
let value = option.value.as_deref().unwrap_or_default();
let color = option.color.as_deref().unwrap_or_default();
format!(
"<span data-affine-option data-value=\"{id}\" data-option-color=\"{color}\">{value}</span>"
)
format!("<span data-affine-option data-value=\"{id}\" data-option-color=\"{color}\">{value}</span>")
}
fn format_cell_value(value: &Value, column: &DatabaseColumn) -> String {
@@ -991,15 +951,8 @@ fn format_cell_value(value: &Value, column: &DatabaseColumn) -> String {
}
"multi-select" => {
let ids: Vec<String> = match value {
Value::Any(Any::Array(ids)) => ids
.iter()
.filter_map(any_as_string)
.map(str::to_string)
.collect(),
Value::Array(array) => array
.iter()
.filter_map(|id_val| value_to_string(&id_val))
.collect(),
Value::Any(Any::Array(ids)) => ids.iter().filter_map(any_as_string).map(str::to_string).collect(),
Value::Array(array) => array.iter().filter_map(|id_val| value_to_string(&id_val)).collect(),
_ => Vec::new(),
};

View File

@@ -25,13 +25,13 @@ pub(super) fn build_block_index(blocks_map: &Map) -> BlockIndex {
let mut parent_lookup: HashMap<String, String> = HashMap::new();
for (_, value) in blocks_map.iter() {
if let Some(block_map) = value.to_map() {
if let Some(block_id) = get_block_id(&block_map) {
for child_id in collect_child_ids(&block_map) {
parent_lookup.insert(child_id, block_id.clone());
}
block_pool.insert(block_id, block_map);
if let Some(block_map) = value.to_map()
&& let Some(block_id) = get_block_id(&block_map)
{
for child_id in collect_child_ids(&block_map) {
parent_lookup.insert(child_id, block_id.clone());
}
block_pool.insert(block_id, block_map);
}
}
@@ -80,18 +80,13 @@ impl BlockWalker {
let mut child_ids = collect_child_ids(block);
for child_id in child_ids.drain(..).rev() {
if self.visited.insert(child_id.clone()) {
self
.queue
.push((Some(parent_block_id.to_string()), child_id));
self.queue.push((Some(parent_block_id.to_string()), child_id));
}
}
}
}
pub(super) fn find_block_id_by_flavour(
block_pool: &HashMap<String, Map>,
flavour: &str,
) -> Option<String> {
pub(super) fn find_block_id_by_flavour(block_pool: &HashMap<String, Map>, flavour: &str) -> Option<String> {
block_pool.iter().find_map(|(id, block)| {
get_flavour(block)
.filter(|block_flavour| block_flavour == flavour)
@@ -133,12 +128,12 @@ pub(super) fn get_list_depth(
let mut current_id = block_id.to_string();
while let Some(parent_id) = parent_lookup.get(&current_id) {
if let Some(parent_block) = blocks.get(parent_id) {
if get_flavour(parent_block).as_deref() == Some("affine:list") {
depth += 1;
current_id = parent_id.clone();
continue;
}
if let Some(parent_block) = blocks.get(parent_id)
&& get_flavour(parent_block).as_deref() == Some("affine:list")
{
depth += 1;
current_id = parent_id.clone();
continue;
}
break;
}
@@ -153,10 +148,10 @@ pub(super) fn nearest_by_flavour(
) -> Option<Map> {
let mut cursor = Some(start.to_string());
while let Some(node) = cursor {
if let Some(block) = blocks.get(&node) {
if get_flavour(block).as_deref() == Some(flavour) {
return Some(block.clone());
}
if let Some(block) = blocks.get(&node)
&& get_flavour(block).as_deref() == Some(flavour)
{
return Some(block.clone());
}
cursor = parent_lookup.get(&node).cloned();
}

View File

@@ -7,8 +7,7 @@ use std::{
use y_octo::{AHashMap, Any, Map, Text, TextAttributes, TextDeltaOp, TextInsert, Value};
use super::value::{
any_as_string, any_as_u64, any_truthy, build_reference_payload, params_any_map_to_json,
value_to_any,
any_as_string, any_as_u64, any_truthy, build_reference_payload, params_any_map_to_json, value_to_any,
};
#[derive(Debug, Clone)]
@@ -45,12 +44,7 @@ impl DeltaToMdOptions {
}
let mut parts = Vec::new();
parts.push(
reference
.ref_type
.clone()
.unwrap_or_else(|| "LinkedPage".into()),
);
parts.push(reference.ref_type.clone().unwrap_or_else(|| "LinkedPage".into()));
parts.push(reference.page_id.clone());
if let Some(mode) = reference.mode.as_ref() {
parts.push(mode.clone());
@@ -60,22 +54,14 @@ impl DeltaToMdOptions {
}
}
pub(super) fn text_to_markdown(
block: &Map,
key: &str,
options: &DeltaToMdOptions,
) -> Option<String> {
pub(super) fn text_to_markdown(block: &Map, key: &str, options: &DeltaToMdOptions) -> Option<String> {
block
.get(key)
.and_then(|value| value.to_text())
.map(|text| delta_to_markdown(&text, options))
}
pub(super) fn text_to_inline_markdown(
block: &Map,
key: &str,
options: &DeltaToMdOptions,
) -> Option<String> {
pub(super) fn text_to_inline_markdown(block: &Map, key: &str, options: &DeltaToMdOptions) -> Option<String> {
block
.get(key)
.and_then(|value| value.to_text())
@@ -89,8 +75,7 @@ pub(super) fn extract_inline_references(delta: &[TextDeltaOp]) -> Vec<InlineRefe
for op in delta {
let attrs = match op {
TextDeltaOp::Insert {
format: Some(format),
..
format: Some(format), ..
} => format,
_ => continue,
};
@@ -123,10 +108,7 @@ fn parse_inline_reference(value: &Any) -> Option<InlineReference> {
_ => return None,
};
let page_id = map
.get("pageId")
.and_then(any_as_string)
.map(str::to_string)?;
let page_id = map.get("pageId").and_then(any_as_string).map(str::to_string)?;
let title = map.get("title").and_then(any_as_string).map(str::to_string);
let ref_type = map.get("type").and_then(any_as_string).map(str::to_string);
let params = map.get("params").and_then(|value| match value {
@@ -161,20 +143,12 @@ fn delta_to_inline_markdown(text: &Text, options: &DeltaToMdOptions) -> String {
delta_to_markdown_with_options(&text.to_delta(), options, false)
}
fn delta_to_markdown_with_options(
delta: &[TextDeltaOp],
options: &DeltaToMdOptions,
trailing_newline: bool,
) -> String {
fn delta_to_markdown_with_options(delta: &[TextDeltaOp], options: &DeltaToMdOptions, trailing_newline: bool) -> String {
let ops = build_delta_ops(delta);
delta_ops_to_markdown_with_options(&ops, options, trailing_newline)
}
fn delta_ops_to_markdown_with_options(
ops: &[DeltaOp],
options: &DeltaToMdOptions,
trailing_newline: bool,
) -> String {
fn delta_ops_to_markdown_with_options(ops: &[DeltaOp], options: &DeltaToMdOptions, trailing_newline: bool) -> String {
let root = convert_delta_ops(ops, options);
let mut rendered = render_node(&root);
rendered = rendered.trim_end().to_string();
@@ -235,10 +209,7 @@ fn delta_op_from_any(value: &Any) -> Option<DeltaOp> {
_ => DeltaInsert::Embed(vec![insert_value.clone()]),
};
let attributes = map
.get("attributes")
.and_then(any_to_attributes)
.unwrap_or_default();
let attributes = map.get("attributes").and_then(any_to_attributes).unwrap_or_default();
Some(DeltaOp { insert, attributes })
}
@@ -260,10 +231,7 @@ fn delta_any_to_inline_markdown(value: &Any, options: &DeltaToMdOptions) -> Opti
delta_ops_from_any(value).map(|ops| delta_ops_to_markdown_with_options(&ops, options, false))
}
pub(super) fn delta_value_to_inline_markdown(
value: &Value,
options: &DeltaToMdOptions,
) -> Option<String> {
pub(super) fn delta_value_to_inline_markdown(value: &Value, options: &DeltaToMdOptions) -> Option<String> {
if let Some(text) = value.to_text() {
return Some(delta_to_inline_markdown(&text, options));
}
@@ -428,13 +396,7 @@ fn convert_delta_ops(ops: &[DeltaOp], options: &DeltaToMdOptions) -> Rc<RefCell<
}
}
apply_inline_attributes(
&mut el,
&op.attributes,
next_attrs,
&mut active_inline,
options,
);
apply_inline_attributes(&mut el, &op.attributes, next_attrs, &mut active_inline, options);
Node::append(&el, Node::new_text(segment));
if line_index + 1 < lines.len() {
new_line(&root, &mut line, &mut el, &mut active_inline);
@@ -504,10 +466,10 @@ fn apply_inline_attributes(
if !is_inline_attribute(attr) || !any_truthy(value) {
continue;
}
if let Some(active) = active_inline.get(attr) {
if active == value {
continue;
}
if let Some(active) = active_inline.get(attr)
&& active == value
{
continue;
}
let next_matches = next
@@ -532,11 +494,7 @@ fn apply_inline_attributes(
}
}
fn inline_node_for_attr(
attr: &str,
attrs: &TextAttributes,
options: &DeltaToMdOptions,
) -> Option<Rc<RefCell<Node>>> {
fn inline_node_for_attr(attr: &str, attrs: &TextAttributes, options: &DeltaToMdOptions) -> Option<Rc<RefCell<Node>>> {
match attr {
"italic" => Some(Node::new_inline("_", "_")),
"bold" => Some(Node::new_inline("**", "**")),
@@ -544,13 +502,10 @@ fn inline_node_for_attr(
.get(attr)
.and_then(any_as_string)
.map(|url| Node::new_inline("[", &format!("]({url})"))),
"reference" => attrs
.get(attr)
.and_then(parse_inline_reference)
.map(|reference| {
let (title, link) = options.build_reference_link(&reference);
Node::new_inline("[", &format!("{title}]({link})"))
}),
"reference" => attrs.get(attr).and_then(parse_inline_reference).map(|reference| {
let (title, link) = options.build_reference_link(&reference);
Node::new_inline("[", &format!("{title}]({link})"))
}),
"strike" => Some(Node::new_inline("~~", "~~")),
"code" => Some(Node::new_inline("`", "`")),
_ => None,
@@ -562,10 +517,7 @@ fn has_block_level_attribute(attrs: &TextAttributes) -> bool {
}
fn is_inline_attribute(attr: &str) -> bool {
matches!(
attr,
"italic" | "bold" | "link" | "reference" | "strike" | "code"
)
matches!(attr, "italic" | "bold" | "link" | "reference" | "strike" | "code")
}
fn encode_link(link: &str) -> String {
@@ -683,9 +635,7 @@ impl Node {
fn append(parent: &Rc<RefCell<Node>>, child: Rc<RefCell<Node>>) {
if let Some(old_parent) = child.borrow().parent.as_ref().and_then(|p| p.upgrade()) {
let mut old_parent = old_parent.borrow_mut();
old_parent
.children
.retain(|existing| !Rc::ptr_eq(existing, &child));
old_parent.children.retain(|existing| !Rc::ptr_eq(existing, &child));
}
child.borrow_mut().parent = Some(Rc::downgrade(parent));

View File

@@ -4,7 +4,6 @@ mod delta_markdown;
mod value;
pub use affine::{
get_doc_ids_from_binary, parse_doc_from_binary, parse_doc_to_markdown, parse_page_doc,
parse_workspace_doc, BlockInfo, CrawlResult, MarkdownResult, PageDocContent, ParseError,
WorkspaceDocContent,
BlockInfo, CrawlResult, MarkdownResult, PageDocContent, ParseError, WorkspaceDocContent, get_doc_ids_from_binary,
parse_doc_from_binary, parse_doc_to_markdown, parse_page_doc, parse_workspace_doc,
};

View File

@@ -34,11 +34,7 @@ impl Stamp {
}
pub fn check<S: AsRef<str>>(&self, bits: u32, resource: S) -> bool {
if self.version == "1"
&& bits <= self.claim
&& self.check_expiration()
&& self.resource == resource.as_ref()
{
if self.version == "1" && bits <= self.claim && self.check_expiration() && self.resource == resource.as_ref() {
let hex_digits = ((self.claim as f32) / 4.).floor() as usize;
// check challenge
@@ -64,12 +60,7 @@ impl Stamp {
let now = Utc::now();
let ts = now.format("%Y%m%d%H%M%S");
let bits = bits.unwrap_or(20);
let rand = String::from_iter(
Alphanumeric
.sample_iter(rng())
.take(SALT_LENGTH)
.map(char::from),
);
let rand = String::from_iter(Alphanumeric.sample_iter(rng()).take(SALT_LENGTH).map(char::from));
let challenge = format!("{}:{}:{}:{}:{}:{}", version, bits, ts, &resource, "", rand);
Stamp {
@@ -102,22 +93,12 @@ impl TryFrom<&str> for Stamp {
fn try_from(value: &str) -> Result<Self, Self::Error> {
let stamp_vec = value.split(':').collect::<Vec<&str>>();
if stamp_vec.len() != 7
|| stamp_vec
.iter()
.enumerate()
.any(|(i, s)| i != 4 && s.is_empty())
{
return Err(format!(
"Malformed stamp, expected 6 parts, got {}",
stamp_vec.len()
));
if stamp_vec.len() != 7 || stamp_vec.iter().enumerate().any(|(i, s)| i != 4 && s.is_empty()) {
return Err(format!("Malformed stamp, expected 6 parts, got {}", stamp_vec.len()));
}
Ok(Stamp {
version: stamp_vec[0].to_string(),
claim: stamp_vec[1]
.parse()
.map_err(|_| "Malformed stamp".to_string())?,
claim: stamp_vec[1].parse().map_err(|_| "Malformed stamp".to_string())?,
ts: stamp_vec[2].to_string(),
resource: stamp_vec[3].to_string(),
ext: stamp_vec[4].to_string(),
@@ -129,7 +110,7 @@ impl TryFrom<&str> for Stamp {
#[cfg(test)]
mod tests {
use rand::{distr::Alphanumeric, Rng};
use rand::{Rng, distr::Alphanumeric};
use rayon::prelude::*;
use super::Stamp;
@@ -139,9 +120,7 @@ mod tests {
{
let response = Stamp::mint("test".into(), Some(20)).format();
assert!(
Stamp::try_from(response.as_str())
.unwrap()
.check(20, "test"),
Stamp::try_from(response.as_str()).unwrap().check(20, "test"),
"should pass"
);
}
@@ -149,18 +128,14 @@ mod tests {
{
let response = Stamp::mint("test".into(), Some(19)).format();
assert!(
!Stamp::try_from(response.as_str())
.unwrap()
.check(20, "test"),
!Stamp::try_from(response.as_str()).unwrap().check(20, "test"),
"should fail with lower bits"
);
}
{
let response = Stamp::mint("test".into(), Some(20)).format();
assert!(
!Stamp::try_from(response.as_str())
.unwrap()
.check(20, "test2"),
!Stamp::try_from(response.as_str()).unwrap().check(20, "test2"),
"should fail with different resource"
);
}
@@ -177,10 +152,7 @@ mod tests {
let response = Stamp::mint("test".into(), Some(20));
assert_eq!(
response.format(),
format!(
"1:20:{}:test::{}:{}",
response.ts, response.rand, response.counter
)
format!("1:20:{}:test::{}:{}", response.ts, response.rand, response.counter)
);
}
@@ -195,9 +167,7 @@ mod tests {
.collect::<String>();
let response = Stamp::mint(resource.clone(), Some(bit)).format();
assert!(
Stamp::try_from(response.as_str())
.unwrap()
.check(bit, resource),
Stamp::try_from(response.as_str()).unwrap().check(bit, resource),
"should pass"
);
});

View File

@@ -6,15 +6,15 @@ authors = [
"Brooklyn <lynweklm@gmail.com>",
]
description = "High-performance and thread-safe CRDT implementation compatible with Yjs"
edition = "2021"
edition = "2024"
homepage = "https://github.com/toeverything/y-octo"
include = ["LICENSE", "README.md", "benches/**/*", "bin/**/*", "src/**/*"]
keywords = ["collaboration", "crdt", "crdts", "yata", "yjs"]
include = ["src/**/*", "benches/**/*", "bin/**/*", "LICENSE", "README.md"]
keywords = ["collaboration", "crdt", "crdts", "yjs", "yata"]
license = "MIT"
name = "y-octo"
readme = "README.md"
repository = "https://github.com/toeverything/y-octo"
version = "0.0.1"
version = "0.0.2"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@@ -36,9 +36,11 @@ thiserror = { workspace = true }
[features]
bench = []
debug = []
default = []
events = []
large_refs = []
serde_json = []
subscribe = []
[target.'cfg(fuzzing)'.dependencies]
arbitrary = { workspace = true }

View File

@@ -2,7 +2,7 @@ mod utils;
use std::time::Duration;
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput};
use criterion::{BenchmarkId, Criterion, Throughput, criterion_group, criterion_main};
use path_ext::PathExt;
use utils::Files;

View File

@@ -1,6 +1,6 @@
use std::time::Duration;
use criterion::{criterion_group, criterion_main, Criterion};
use criterion::{Criterion, criterion_group, criterion_main};
use rand::{Rng, SeedableRng};
fn operations(c: &mut Criterion) {

View File

@@ -1,4 +1,4 @@
use criterion::{criterion_group, criterion_main, Criterion, SamplingMode};
use criterion::{Criterion, SamplingMode, criterion_group, criterion_main};
use y_octo::{read_var_i32, read_var_u64, write_var_i32, write_var_u64};
const BENCHMARK_SIZE: u32 = 100000;

View File

@@ -1,6 +1,6 @@
use std::time::Duration;
use criterion::{criterion_group, criterion_main, Criterion};
use criterion::{Criterion, criterion_group, criterion_main};
fn operations(c: &mut Criterion) {
let mut group = c.benchmark_group("ops/map");

View File

@@ -1,6 +1,6 @@
use std::time::Duration;
use criterion::{criterion_group, criterion_main, Criterion};
use criterion::{Criterion, criterion_group, criterion_main};
use rand::{Rng, SeedableRng};
fn operations(c: &mut Criterion) {

View File

@@ -2,7 +2,7 @@ mod utils;
use std::time::Duration;
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput};
use criterion::{BenchmarkId, Criterion, Throughput, criterion_group, criterion_main};
use path_ext::PathExt;
use utils::Files;

View File

@@ -19,8 +19,8 @@ pub fn write_var_buffer<W: Write>(buffer: &mut W, data: &[u8]) -> Result<(), Err
#[cfg(test)]
mod tests {
use nom::{
error::{Error, ErrorKind},
AsBytes, Err,
error::{Error, ErrorKind},
};
use super::*;
@@ -36,26 +36,17 @@ mod tests {
// Test case 2: truncated input, missing buffer
let input = [0x05, 0x01, 0x02, 0x03];
let result = read_var_buffer(&input);
assert_eq!(
result,
Err(Err::Error(Error::new(&input[1..], ErrorKind::Eof)))
);
assert_eq!(result, Err(Err::Error(Error::new(&input[1..], ErrorKind::Eof))));
// Test case 3: invalid input
let input = [0xFF, 0x01, 0x02, 0x03];
let result = read_var_buffer(&input);
assert_eq!(
result,
Err(Err::Error(Error::new(&input[2..], ErrorKind::Eof)))
);
assert_eq!(result, Err(Err::Error(Error::new(&input[2..], ErrorKind::Eof))));
// Test case 4: invalid var int encoding
let input = [0xFF, 0x80, 0x80, 0x80, 0x80, 0x80, 0x01];
let result = read_var_buffer(&input);
assert_eq!(
result,
Err(Err::Error(Error::new(&input[7..], ErrorKind::Eof)))
);
assert_eq!(result, Err(Err::Error(Error::new(&input[7..], ErrorKind::Eof))));
}
#[test]
@@ -66,7 +57,7 @@ mod tests {
#[cfg(not(miri))]
{
use rand::{rng, Rng};
use rand::{Rng, rng};
let mut rng = rng();
for _ in 0..100 {
test_var_buf_enc_dec(&{

View File

@@ -1,6 +1,6 @@
use std::io::{Error, Write};
use nom::{combinator::map_res, Parser};
use nom::{Parser, combinator::map_res};
use super::*;
@@ -17,8 +17,8 @@ pub fn write_var_string<W: Write, S: AsRef<str>>(buffer: &mut W, input: S) -> Re
#[cfg(test)]
mod tests {
use nom::{
error::{Error, ErrorKind},
AsBytes, Err,
error::{Error, ErrorKind},
};
use super::*;
@@ -34,42 +34,27 @@ mod tests {
// Test case 2: missing string length
let input = [0x68, 0x65, 0x6C, 0x6C, 0x6F];
let result = read_var_string(&input);
assert_eq!(
result,
Err(Err::Error(Error::new(&input[1..], ErrorKind::Eof)))
);
assert_eq!(result, Err(Err::Error(Error::new(&input[1..], ErrorKind::Eof))));
// Test case 3: truncated input
let input = [0x05, 0x68, 0x65, 0x6C, 0x6C];
let result = read_var_string(&input);
assert_eq!(
result,
Err(Err::Error(Error::new(&input[1..], ErrorKind::Eof)))
);
assert_eq!(result, Err(Err::Error(Error::new(&input[1..], ErrorKind::Eof))));
// Test case 4: invalid input
let input = [0xFF, 0x01, 0x02, 0x03, 0x04];
let result = read_var_string(&input);
assert_eq!(
result,
Err(Err::Error(Error::new(&input[2..], ErrorKind::Eof)))
);
assert_eq!(result, Err(Err::Error(Error::new(&input[2..], ErrorKind::Eof))));
// Test case 5: invalid var int encoding
let input = [0xFF, 0x80, 0x80, 0x80, 0x80, 0x80, 0x01];
let result = read_var_string(&input);
assert_eq!(
result,
Err(Err::Error(Error::new(&input[7..], ErrorKind::Eof)))
);
assert_eq!(result, Err(Err::Error(Error::new(&input[7..], ErrorKind::Eof))));
// Test case 6: invalid input, invalid UTF-8 encoding
let input = [0x05, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF];
let result = read_var_string(&input);
assert_eq!(
result,
Err(Err::Error(Error::new(&input[..], ErrorKind::MapRes)))
);
assert_eq!(result, Err(Err::Error(Error::new(&input[..], ErrorKind::MapRes))));
}
#[test]

View File

@@ -20,6 +20,10 @@ impl Awareness {
}
}
pub fn local_id(&self) -> u64 {
self.local_id
}
pub fn on_update(&mut self, f: impl Fn(&Awareness, AwarenessEvent) + Send + Sync + 'static) {
self.callback = Some(Arc::new(f));
}
@@ -29,10 +33,7 @@ impl Awareness {
}
pub fn get_local_state(&self) -> Option<String> {
self
.awareness
.get(&self.local_id)
.map(|state| state.content.clone())
self.awareness.get(&self.local_id).map(|state| state.content.clone())
}
fn mut_local_state(&mut self) -> &mut AwarenessState {
@@ -42,20 +43,14 @@ impl Awareness {
pub fn set_local_state(&mut self, content: String) {
self.mut_local_state().set_content(content);
if let Some(cb) = self.callback.as_ref() {
cb(
self,
AwarenessEventBuilder::new().update(self.local_id).build(),
);
cb(self, AwarenessEventBuilder::new().update(self.local_id).build());
}
}
pub fn clear_local_state(&mut self) {
self.mut_local_state().delete();
if let Some(cb) = self.callback.as_ref() {
cb(
self,
AwarenessEventBuilder::new().remove(self.local_id).build(),
);
cb(self, AwarenessEventBuilder::new().remove(self.local_id).build());
}
}
@@ -107,9 +102,7 @@ impl AwarenessEvent {
pub fn get_updated(&self, states: &AwarenessStates) -> AwarenessStates {
states
.iter()
.filter(|(id, _)| {
self.added.contains(id) || self.updated.contains(id) || self.removed.contains(id)
})
.filter(|(id, _)| self.added.contains(id) || self.updated.contains(id) || self.removed.contains(id))
.map(|(id, state)| (*id, state.clone()))
.collect()
}
@@ -187,14 +180,8 @@ mod tests {
assert!(awareness.get_states().contains_key(&1));
// local state will not apply
assert_eq!(
awareness.get_states().get(&0).unwrap().content,
"null".to_string()
);
assert_eq!(
awareness.get_states().get(&1).unwrap().content,
"test1".to_string()
);
assert_eq!(awareness.get_states().get(&0).unwrap().content, "null".to_string());
assert_eq!(awareness.get_states().get(&1).unwrap().content, "test1".to_string());
}
{

View File

@@ -0,0 +1,119 @@
use super::*;
#[derive(Debug, PartialEq)]
pub struct Batch {
doc: Doc,
before_state: StateVector,
after_state: StateVector,
changed: HashMap<YTypeRef, Vec<SmolStr>>,
}
impl Batch {
pub fn new(doc: Doc) -> Self {
let current_state = doc.get_state_vector();
Batch {
doc,
before_state: current_state.clone(),
after_state: current_state,
changed: HashMap::new(),
}
}
pub fn with_batch<T, F>(&mut self, f: F) -> T
where
F: FnOnce(Doc) -> T,
{
let ret = f(self.doc.clone());
for (k, v) in self.doc.get_changed() {
self.changed.entry(k).or_default().extend(v.iter().cloned());
}
ret
}
}
pub fn batch_commit<T, F>(mut doc: Doc, f: F) -> Option<T>
where
F: FnOnce(Doc) -> T,
{
// Initialize batch cleanups list
let mut batch_cleanups = vec![];
// Initial call and result initialization
let mut initial_call = false;
{
if doc.batch.is_none() {
initial_call = true;
// Start a new batch
let batch = Batch::new(doc.clone());
doc.batch = Somr::new(batch);
batch_cleanups.push(doc.batch.clone());
}
}
let batch = doc.batch.get_mut()?;
let result = Some(batch.with_batch(f));
if initial_call
&& let Some(current_batch) = doc.batch.get()
&& Some(current_batch) == batch_cleanups[0].get()
{
// Process observer calls and perform cleanup if this is the initial call
cleanup_batches(&mut batch_cleanups);
doc.batch.swap_take();
}
result
}
fn cleanup_batches(batch_cleanups: &mut Vec<Somr<Batch>>) {
for batch in batch_cleanups.drain(..) {
if let Some(batch) = batch.get() {
println!("changed: {:?}", batch.changed);
} else {
panic!("Batch not initialized");
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn should_get_changed_items() {
loom_model!({
let doc = DocOptions::new().with_client_id(1).build();
batch_commit(doc.clone(), |d| {
let mut arr = d.get_or_create_array("arr").unwrap();
let mut text = d.create_text().unwrap();
let mut map = d.create_map().unwrap();
batch_commit(doc.clone(), |_| {
arr.insert(0, Value::from(text.clone())).unwrap();
arr.insert(1, Value::from(map.clone())).unwrap();
});
batch_commit(doc.clone(), |_| {
text.insert(0, "hello world").unwrap();
text.remove(5, 6).unwrap();
});
batch_commit(doc.clone(), |_| {
map.insert("key".into(), 123).unwrap();
});
batch_commit(doc.clone(), |_| {
map.remove("key");
});
batch_commit(doc.clone(), |_| {
arr.remove(0, 1).unwrap();
});
});
});
}
}

View File

@@ -40,10 +40,10 @@ impl<R: CrdtReader> CrdtRead<R> for Any {
0 => Ok(Any::Undefined),
1 => Ok(Any::Null),
// in yjs implementation, flag 2 only save 32bit integer
2 => Ok(Any::Integer(reader.read_var_i32()?)), // Integer
2 => Ok(Any::Integer(reader.read_var_i32()?)), // Integer
3 => Ok(Any::Float32(reader.read_f32_be()?.into())), // Float32
4 => Ok(Any::Float64(reader.read_f64_be()?.into())), // Float64
5 => Ok(Any::BigInt64(reader.read_i64_be()?)), // BigInt64
5 => Ok(Any::BigInt64(reader.read_i64_be()?)), // BigInt64
6 => Ok(Any::False),
7 => Ok(Any::True),
8 => Ok(Any::String(reader.read_var_string()?)), // String
@@ -57,9 +57,7 @@ impl<R: CrdtReader> CrdtRead<R> for Any {
} // Object
10 => {
let len = reader.read_var_u64()?;
let any = (0..len)
.map(|_| Self::read(reader))
.collect::<Result<Vec<_>, _>>()?;
let any = (0..len).map(|_| Self::read(reader)).collect::<Result<Vec<_>, _>>()?;
Ok(Any::Array(any))
} // Array
@@ -250,11 +248,7 @@ impl From<f64> for Any {
impl From<bool> for Any {
fn from(value: bool) -> Self {
if value {
Self::True
} else {
Self::False
}
if value { Self::True } else { Self::False }
}
}
@@ -344,11 +338,7 @@ impl From<&[u8]> for Any {
// TODO: impl for Any::Undefined
impl<T: Into<Any>> From<Option<T>> for Any {
fn from(value: Option<T>) -> Self {
if let Some(val) = value {
val.into()
} else {
Any::Null
}
if let Some(val) = value { val.into() } else { Any::Null }
}
}
@@ -374,12 +364,8 @@ impl From<serde_json::Value> for Any {
}
}
serde_json::Value::String(s) => Self::String(s),
serde_json::Value::Array(vec) => {
Self::Array(vec.into_iter().map(|v| v.into()).collect::<Vec<_>>())
}
serde_json::Value::Object(obj) => {
Self::Object(obj.into_iter().map(|(k, v)| (k, v.into())).collect())
}
serde_json::Value::Array(vec) => Self::Array(vec.into_iter().map(|v| v.into()).collect::<Vec<_>>()),
serde_json::Value::Object(obj) => Self::Object(obj.into_iter().map(|(k, v)| (k, v.into())).collect()),
}
}
}
@@ -587,10 +573,7 @@ mod tests {
Any::Object(
vec![
("type".to_string(), Any::String("Email".to_string())),
(
"address".to_string(),
Any::String("alice@example.com".to_string()),
),
("address".to_string(), Any::String("alice@example.com".to_string())),
]
.into_iter()
.collect(),
@@ -698,19 +681,11 @@ mod tests {
);
assert_eq!(
vec![("key".to_string(), 10u64.into())]
.into_iter()
.collect::<Any>(),
Any::Object(HashMap::from_iter(vec![(
"key".to_string(),
Any::Integer(10)
)]))
vec![("key".to_string(), 10u64.into())].into_iter().collect::<Any>(),
Any::Object(HashMap::from_iter(vec![("key".to_string(), Any::Integer(10))]))
);
let any: Any = 10u64.into();
assert_eq!(
[any].iter().collect::<Any>(),
Any::Array(vec![Any::Integer(10)])
);
assert_eq!([any].iter().collect::<Any>(), Any::Array(vec![Any::Integer(10)]));
}
}

View File

@@ -90,16 +90,9 @@ impl std::fmt::Debug for Content {
.field("key", key)
.field("value", value)
.finish(),
Self::Type(arg0) => f
.debug_tuple("Type")
.field(&arg0.ty().unwrap().kind())
.finish(),
Self::Type(arg0) => f.debug_tuple("Type").field(&arg0.ty().unwrap().kind()).finish(),
Self::Any(arg0) => f.debug_tuple("Any").field(arg0).finish(),
Self::Doc { guid, opts } => f
.debug_struct("Doc")
.field("guid", guid)
.field("opts", opts)
.finish(),
Self::Doc { guid, opts } => f.debug_struct("Doc").field("guid", guid).field("opts", opts).finish(),
}
}
}
@@ -111,11 +104,7 @@ impl Content {
2 => {
let len = decoder.read_var_u64()?;
let strings = (0..len)
.map(|_| {
decoder
.read_var_string()
.map(|s| (s != "undefined").then_some(s))
})
.map(|_| decoder.read_var_string().map(|s| (s != "undefined").then_some(s)))
.collect::<Result<Vec<_>, _>>()?;
Ok(Self::Json(strings))
@@ -124,16 +113,14 @@ impl Content {
4 => Ok(Self::String(decoder.read_var_string()?)), // String
5 => {
let string = decoder.read_var_string()?;
let json =
serde_json::from_str(&string).map_err(|_| JwstCodecError::DamagedDocumentJson)?;
let json = serde_json::from_str(&string).map_err(|_| JwstCodecError::DamagedDocumentJson)?;
Ok(Self::Embed(json))
} // Embed
6 => {
let key = decoder.read_var_string()?;
let value = decoder.read_var_string()?;
let value =
serde_json::from_str(&value).map_err(|_| JwstCodecError::DamagedDocumentJson)?;
let value = serde_json::from_str(&value).map_err(|_| JwstCodecError::DamagedDocumentJson)?;
Ok(Self::Format { key, value })
} // Format
@@ -199,15 +186,11 @@ impl Content {
encoder.write_var_string(string)?;
}
Self::Embed(val) => {
encoder.write_var_string(
serde_json::to_string(val).map_err(|_| JwstCodecError::DamagedDocumentJson)?,
)?;
encoder.write_var_string(serde_json::to_string(val).map_err(|_| JwstCodecError::DamagedDocumentJson)?)?;
}
Self::Format { key, value } => {
encoder.write_var_string(key)?;
encoder.write_var_string(
serde_json::to_string(value).map_err(|_| JwstCodecError::DamagedDocumentJson)?,
)?;
encoder.write_var_string(serde_json::to_string(value).map_err(|_| JwstCodecError::DamagedDocumentJson)?)?;
}
Self::Type(ty) => {
if let Some(ty) = ty.ty() {
@@ -237,9 +220,7 @@ impl Content {
// TODO: need a custom wrapper with length cached, this cost too much
Self::String(string) => string.chars().map(|c| c.len_utf16()).sum::<usize>() as u64,
Self::Any(any) => any.len() as u64,
Self::Binary(_) | Self::Embed(_) | Self::Format { .. } | Self::Type(_) | Self::Doc { .. } => {
1
}
Self::Binary(_) | Self::Embed(_) | Self::Format { .. } | Self::Type(_) | Self::Doc { .. } => 1,
}
}
@@ -249,20 +230,14 @@ impl Content {
#[allow(dead_code)]
pub fn splittable(&self) -> bool {
matches!(
self,
Self::String { .. } | Self::Any { .. } | Self::Json { .. }
)
matches!(self, Self::String { .. } | Self::Any { .. } | Self::Json { .. })
}
pub fn split(&self, diff: u64) -> JwstCodecResult<(Self, Self)> {
match self {
Self::String(str) => {
let (left, right) = Self::split_as_utf16_str(str.as_str(), diff);
Ok((
Self::String(left.to_string()),
Self::String(right.to_string()),
))
Ok((Self::String(left.to_string()), Self::String(right.to_string())))
}
Self::Json(vec) => {
let (left, right) = vec.split_at(diff as usize);
@@ -321,11 +296,7 @@ mod tests {
loom_model!({
let contents = [
Content::Deleted(42),
Content::Json(vec![
None,
Some("test_1".to_string()),
Some("test_2".to_string()),
]),
Content::Json(vec![None, Some("test_1".to_string()), Some("test_2".to_string())]),
Content::Binary(vec![1, 2, 3]),
Content::String("hello".to_string()),
Content::Embed(Any::True),
@@ -336,10 +307,7 @@ mod tests {
Content::Type(YTypeRef::new(YTypeKind::Array, None)),
Content::Type(YTypeRef::new(YTypeKind::Map, None)),
Content::Type(YTypeRef::new(YTypeKind::Text, None)),
Content::Type(YTypeRef::new(
YTypeKind::XMLElement,
Some("test".to_string()),
)),
Content::Type(YTypeRef::new(YTypeKind::XMLElement, Some("test".to_string()))),
Content::Type(YTypeRef::new(YTypeKind::XMLFragment, None)),
Content::Type(YTypeRef::new(YTypeKind::XMLHook, Some("test".to_string()))),
Content::Type(YTypeRef::new(YTypeKind::XMLText, None)),
@@ -360,11 +328,7 @@ mod tests {
fn test_content_split() {
let contents = [
Content::String("hello".to_string()),
Content::Json(vec![
None,
Some("test_1".to_string()),
Some("test_2".to_string()),
]),
Content::Json(vec![None, Some("test_1".to_string()), Some("test_2".to_string())]),
Content::Any(vec![Any::BigInt64(42), Any::String("Test Any".to_string())]),
Content::Binary(vec![]),
];
@@ -390,18 +354,12 @@ mod tests {
let (left, right) = contents[2].split(1).unwrap();
assert!(contents[2].splittable());
assert_eq!(left, Content::Any(vec![Any::BigInt64(42)]));
assert_eq!(
right,
Content::Any(vec![Any::String("Test Any".to_string())])
);
assert_eq!(right, Content::Any(vec![Any::String("Test Any".to_string())]));
}
{
assert!(!contents[3].splittable());
assert_eq!(
contents[3].split(2),
Err(JwstCodecError::ContentSplitNotSupport(2))
);
assert_eq!(contents[3].split(2), Err(JwstCodecError::ContentSplitNotSupport(2)));
}
}

View File

@@ -1,5 +1,5 @@
use std::{
collections::{hash_map::Entry, VecDeque},
collections::{VecDeque, hash_map::Entry},
ops::{Deref, DerefMut, Range},
};
@@ -188,10 +188,7 @@ mod tests {
{
let mut delete_set = delete_set;
delete_set.add(1, 5, 10);
assert_eq!(
delete_set.get(&1),
Some(&OrderRange::from(vec![0..15, 20..30]))
);
assert_eq!(delete_set.get(&1), Some(&OrderRange::from(vec![0..15, 20..30])));
}
}
@@ -213,10 +210,7 @@ mod tests {
{
let mut delete_set = delete_set;
delete_set.batch_add_ranges(1, vec![40..50, 10..20]);
assert_eq!(
delete_set.get(&1),
Some(&OrderRange::from(vec![0..30, 40..50]))
);
assert_eq!(delete_set.get(&1), Some(&OrderRange::from(vec![0..30, 40..50])));
}
}

View File

@@ -48,11 +48,7 @@ impl<'b> RawDecoder<'b> {
let pos = self.buffer.position() as usize;
let buf = self.buffer.into_inner();
if pos == 0 {
buf
} else {
&buf[pos..]
}
if pos == 0 { buf } else { &buf[pos..] }
}
}
@@ -88,24 +84,15 @@ impl CrdtReader for RawDecoder<'_> {
}
fn read_f32_be(&mut self) -> JwstCodecResult<f32> {
self
.buffer
.read_f32::<BigEndian>()
.map_err(reader::map_read_error)
self.buffer.read_f32::<BigEndian>().map_err(reader::map_read_error)
}
fn read_f64_be(&mut self) -> JwstCodecResult<f64> {
self
.buffer
.read_f64::<BigEndian>()
.map_err(reader::map_read_error)
self.buffer.read_f64::<BigEndian>().map_err(reader::map_read_error)
}
fn read_i64_be(&mut self) -> JwstCodecResult<i64> {
self
.buffer
.read_i64::<BigEndian>()
.map_err(reader::map_read_error)
self.buffer.read_i64::<BigEndian>().map_err(reader::map_read_error)
}
#[inline(always)]
@@ -151,22 +138,13 @@ impl CrdtWriter for RawEncoder {
Ok(())
}
fn write_f32_be(&mut self, num: f32) -> JwstCodecResult {
self
.buffer
.write_f32::<BigEndian>(num)
.map_err(writer::map_write_error)
self.buffer.write_f32::<BigEndian>(num).map_err(writer::map_write_error)
}
fn write_f64_be(&mut self, num: f64) -> JwstCodecResult {
self
.buffer
.write_f64::<BigEndian>(num)
.map_err(writer::map_write_error)
self.buffer.write_f64::<BigEndian>(num).map_err(writer::map_write_error)
}
fn write_i64_be(&mut self, num: i64) -> JwstCodecResult {
self
.buffer
.write_i64::<BigEndian>(num)
.map_err(writer::map_write_error)
self.buffer.write_i64::<BigEndian>(num).map_err(writer::map_write_error)
}
#[inline(always)]
@@ -197,10 +175,7 @@ mod tests {
let mut reader = RawDecoder::new(&[0x5, b'h', b'e', b'l', b'l', b'o']);
assert_eq!(reader.clone().read_var_string().unwrap(), "hello");
assert_eq!(
reader.clone().read_var_buffer().unwrap().as_slice(),
b"hello"
);
assert_eq!(reader.clone().read_var_buffer().unwrap().as_slice(), b"hello");
assert_eq!(reader.read_u8().unwrap(), 5);
assert_eq!(reader.read_u8().unwrap(), b'h');

View File

@@ -232,12 +232,7 @@ impl Item {
!has_id && self.parent.is_some() || has_id && self.parent.is_none() && self.parent_sub.is_none()
}
pub fn read<R: CrdtReader>(
decoder: &mut R,
id: Id,
info: u8,
first_5_bit: u8,
) -> JwstCodecResult<Self> {
pub fn read<R: CrdtReader>(decoder: &mut R, id: Id, info: u8, first_5_bit: u8) -> JwstCodecResult<Self> {
let flags: ItemFlag = info.into();
let has_left_id = flags.check(item_flags::ITEM_HAS_LEFT_ID);
let has_right_id = flags.check(item_flags::ITEM_HAS_RIGHT_ID);
@@ -350,6 +345,22 @@ impl Item {
Ok(())
}
pub fn deep_compare(&self, other: &Self) -> bool {
if self.id != other.id
|| self.deleted() != other.deleted()
|| self.len() != other.len()
|| self.left.get().map(|l| l.last_id()) != other.left.get().map(|l| l.last_id())
|| self.right.get().map(|r| r.id) != other.right.get().map(|r| r.id)
|| self.origin_left_id != other.origin_left_id
|| self.origin_right_id != other.origin_right_id
|| self.parent_sub != other.parent_sub
{
return false;
}
true
}
}
#[allow(dead_code)]

View File

@@ -16,7 +16,7 @@ pub use delete_set::DeleteSet;
pub use id::{Client, Clock, Id};
pub use io::{CrdtRead, CrdtReader, CrdtWrite, CrdtWriter, RawDecoder, RawEncoder};
pub(crate) use item::{Item, ItemRef, Parent};
pub(crate) use item_flag::{item_flags, ItemFlag};
pub(crate) use item_flag::{ItemFlag, item_flags};
pub(crate) use refs::Node;
pub use update::Update;
#[cfg(test)]

View File

@@ -79,10 +79,10 @@ impl Node {
_ => {
let item = Somr::new(Item::read(decoder, id, info, first_5_bit)?);
if let Content::Type(ty) = &item.get().unwrap().content {
if let Some(mut ty) = ty.ty_mut() {
ty.item = item.clone();
}
if let Content::Type(ty) = &item.get().unwrap().content
&& let Some(mut ty) = ty.ty_mut()
{
ty.item = item.clone();
}
Ok(Node::Item(item))
@@ -282,8 +282,7 @@ impl Node {
l.extend(r.drain(0..));
}
(Content::String(l), Content::String(r)) => {
let allow_merge_string =
matches!(parent_kind, Some(YTypeKind::Text | YTypeKind::XMLText));
let allow_merge_string = matches!(parent_kind, Some(YTypeKind::Text | YTypeKind::XMLText));
if !allow_merge_string {
return false;
@@ -299,12 +298,11 @@ impl Node {
}
}
if let Some(Parent::Type(p)) = &litem.parent {
if let Some(parent) = p.ty_mut() {
if let Some(markers) = &parent.markers {
markers.replace_marker(rref.clone(), lref.clone(), -(llen as i64));
}
}
if let Some(Parent::Type(p)) = &litem.parent
&& let Some(parent) = p.ty_mut()
&& let Some(markers) = &parent.markers
{
markers.replace_marker(rref.clone(), lref.clone(), -(llen as i64));
}
if ritem.keep() {
@@ -455,15 +453,15 @@ mod tests {
#[cfg(not(loom))]
fn struct_info_round_trip(info: &mut Node) -> JwstCodecResult {
if let Node::Item(item) = info {
if let Some(item) = item.get_mut() {
if !item.is_valid() {
return Ok(());
}
if let Node::Item(item) = info
&& let Some(item) = item.get_mut()
{
if !item.is_valid() {
return Ok(());
}
if item.content.countable() {
item.flags.set_countable();
}
if item.content.countable() {
item.flags.set_countable();
}
}
let mut encoder = RawEncoder::default();

View File

@@ -47,9 +47,7 @@ impl<R: CrdtReader> CrdtRead<R> for Update {
let delete_set = DeleteSet::read(decoder)?;
if !decoder.is_empty() {
return Err(JwstCodecError::UpdateNotFullyConsumed(
decoder.len() as usize
));
return Err(JwstCodecError::UpdateNotFullyConsumed(decoder.len() as usize));
}
Ok(Update {
@@ -282,23 +280,24 @@ impl<'a> UpdateIterator<'a> {
fn get_missing_dep(&self, struct_info: &Node) -> Option<Client> {
if let Some(item) = struct_info.as_item().get() {
let id = item.id;
if let Some(left) = &item.origin_left_id {
if left.client != id.client && left.clock >= self.state.get(&left.client) {
return Some(left.client);
}
if let Some(left) = &item.origin_left_id
&& left.client != id.client
&& left.clock >= self.state.get(&left.client)
{
return Some(left.client);
}
if let Some(right) = &item.origin_right_id {
if right.client != id.client && right.clock >= self.state.get(&right.client) {
return Some(right.client);
}
if let Some(right) = &item.origin_right_id
&& right.client != id.client
&& right.clock >= self.state.get(&right.client)
{
return Some(right.client);
}
if let Some(parent) = &item.parent {
match parent {
Parent::Id(parent_id)
if parent_id.client != id.client
&& parent_id.clock >= self.state.get(&parent_id.client) =>
if parent_id.client != id.client && parent_id.clock >= self.state.get(&parent_id.client) =>
{
return Some(parent_id.client);
}
@@ -319,15 +318,7 @@ impl<'a> UpdateIterator<'a> {
// Safety:
// client index of updates and update length are both checked in next_client
// safe to use unwrap
cur.replace(
self
.update
.structs
.get_mut(&client)
.unwrap()
.pop_front()
.unwrap(),
);
cur.replace(self.update.structs.get_mut(&client).unwrap().pop_front().unwrap());
}
cur
@@ -437,10 +428,7 @@ impl Iterator for DeleteSetIterator<'_> {
return Some((client, range));
} else {
// all state missing
self
.update
.pending_delete_set
.add(client, start, end - start);
self.update.pending_delete_set.add(client, start, end - start);
}
}
@@ -478,17 +466,9 @@ mod tests {
fn test_parse_doc() {
let docs = [
(include_bytes!("../../fixtures/basic.bin").to_vec(), 1, 188),
(
include_bytes!("../../fixtures/database.bin").to_vec(),
1,
149,
),
(include_bytes!("../../fixtures/database.bin").to_vec(), 1, 149),
(include_bytes!("../../fixtures/large.bin").to_vec(), 1, 9036),
(
include_bytes!("../../fixtures/with-subdoc.bin").to_vec(),
2,
30,
),
(include_bytes!("../../fixtures/with-subdoc.bin").to_vec(), 2, 30),
(
include_bytes!("../../fixtures/edge-case-left-right-same-node.bin").to_vec(),
2,
@@ -500,10 +480,7 @@ mod tests {
let update = parse_doc_update(doc).unwrap();
assert_eq!(update.structs.len(), clients);
assert_eq!(
update.structs.iter().map(|s| s.1.len()).sum::<usize>(),
structs
);
assert_eq!(update.structs.iter().map(|s| s.1.len()).sum::<usize>(), structs);
}
}
@@ -526,9 +503,7 @@ mod tests {
#[ignore = "just for local data test"]
#[test]
fn test_parse_local_doc() {
let json =
serde_json::from_slice::<Vec<Data>>(include_bytes!("../../fixtures/local_docs.json"))
.unwrap();
let json = serde_json::from_slice::<Vec<Data>>(include_bytes!("../../fixtures/local_docs.json")).unwrap();
for ws in json {
let data = &ws.blob[5..=(ws.blob.len() - 2)];
@@ -609,13 +584,7 @@ mod tests {
assert_eq!(iter.next(), None);
assert!(!update.pending_structs.is_empty());
assert_eq!(
update
.pending_structs
.get_mut(&0)
.unwrap()
.pop_front()
.unwrap()
.id(),
update.pending_structs.get_mut(&0).unwrap().pop_front().unwrap().id(),
(0, 4).into()
);
assert!(!update.missing_state.is_empty());

View File

@@ -7,9 +7,7 @@ pub(crate) struct ItemBuilder {
#[allow(dead_code)]
impl ItemBuilder {
pub fn new() -> ItemBuilder {
Self {
item: Item::default(),
}
Self { item: Item::default() }
}
pub fn id(mut self, id: Id) -> ItemBuilder {
@@ -93,10 +91,7 @@ mod tests {
assert_eq!(item.origin_right_id, Some(Id::new(4, 5)));
assert!(matches!(item.parent, Some(Parent::String(text)) if text == "test"));
assert_eq!(item.parent_sub, None);
assert_eq!(
item.content,
Content::Any(vec![Any::String("Hello".into())])
);
assert_eq!(item.content, Content::Any(vec![Any::String("Hello".into())]));
});
}
}

View File

@@ -73,10 +73,10 @@ impl OrderRange {
}
}
next_old = old_iter.next();
if let Some(next_old) = &next_old {
if next_old.start > new_range.end {
continue;
}
if let Some(next_old) = &next_old
&& next_old.start > new_range.end
{
continue;
}
}
next_new = new_iter.next();
@@ -184,10 +184,10 @@ impl OrderRange {
}
fn make_single(&mut self) {
if let OrderRange::Fragment(ranges) = self {
if ranges.len() == 1 {
*self = OrderRange::Range(ranges[0].clone());
}
if let OrderRange::Fragment(ranges) = self
&& ranges.len() == 1
{
*self = OrderRange::Range(ranges[0].clone());
}
}
@@ -278,10 +278,7 @@ impl<'a> IntoIterator for &'a OrderRange {
type IntoIter = OrderRangeIter<'a>;
fn into_iter(self) -> Self::IntoIter {
OrderRangeIter {
range: self,
idx: 0,
}
OrderRangeIter { range: self, idx: 0 }
}
}
@@ -394,18 +391,9 @@ mod tests {
assert!(OrderRange::check_range_covered(&[0..1], &[0..3]));
assert!(OrderRange::check_range_covered(&[1..2], &[0..3]));
assert!(OrderRange::check_range_covered(&[1..2, 2..3], &[0..3]));
assert!(!OrderRange::check_range_covered(
&[1..2, 2..3, 3..4],
&[0..3]
));
assert!(OrderRange::check_range_covered(
&[0..1, 2..3],
&[0..2, 2..4]
));
assert!(OrderRange::check_range_covered(
&[0..1, 2..3, 3..4],
&[0..2, 2..4]
),);
assert!(!OrderRange::check_range_covered(&[1..2, 2..3, 3..4], &[0..3]));
assert!(OrderRange::check_range_covered(&[0..1, 2..3], &[0..2, 2..4]));
assert!(OrderRange::check_range_covered(&[0..1, 2..3, 3..4], &[0..2, 2..4]),);
}
#[test]
@@ -469,10 +457,7 @@ mod tests {
fn iter() {
let range: OrderRange = vec![(0..10), (20..30)].into();
assert_eq!(
range.into_iter().collect::<Vec<_>>(),
vec![(0..10), (20..30)]
);
assert_eq!(range.into_iter().collect::<Vec<_>>(), vec![(0..10), (20..30)]);
let range: OrderRange = OrderRange::Range(0..10);

View File

@@ -289,8 +289,7 @@ impl<T> FlattenGet<T> for Option<Somr<T>> {
impl<T: PartialEq> PartialEq for Somr<T> {
fn eq(&self, other: &Self) -> bool {
self.ptr() == other.ptr()
|| !self.dangling() && !other.dangling() && self.inner() == other.inner()
self.ptr() == other.ptr() || !self.dangling() && !other.dangling() && self.inner() == other.inner()
}
}
@@ -385,10 +384,7 @@ mod tests {
let five_ref = five.clone();
assert!(!five_ref.is_owned());
assert_eq!(five_ref.get(), Some(&5));
assert_eq!(
five_ref.ptr().as_ptr() as usize,
five.ptr().as_ptr() as usize
);
assert_eq!(five_ref.ptr().as_ptr() as usize, five.ptr().as_ptr() as usize);
drop(five);
// owner released

View File

@@ -1,8 +1,8 @@
use std::ops::{Deref, DerefMut};
use super::{
Client, ClientMap, Clock, CrdtRead, CrdtReader, CrdtWrite, CrdtWriter, HashMapExt, Id,
JwstCodecResult, HASHMAP_SAFE_CAPACITY,
Client, ClientMap, Clock, CrdtRead, CrdtReader, CrdtWrite, CrdtWriter, HASHMAP_SAFE_CAPACITY, HashMapExt, Id,
JwstCodecResult,
};
#[derive(Default, Debug, PartialEq, Clone)]

View File

@@ -1,7 +1,11 @@
#[cfg(feature = "events")]
use publisher::DocPublisher;
use super::{history::StoreHistory, store::StoreRef, *};
use super::{
history::StoreHistory,
store::{ChangedTypeRefs, StoreRef},
*,
};
use crate::sync::{Arc, RwLock};
#[cfg(feature = "debug")]
@@ -43,24 +47,6 @@ impl Default for DocOptions {
gc: true,
}
} else {
/// It tends to generate small numbers.
/// Since the client id will be included in all crdt items, the
/// small client helps to reduce the binary size.
///
/// NOTE: The probability of 36% of the random number generated by
/// this function is greater than [u32::MAX]
fn prefer_small_random() -> u64 {
use rand::{distr::Distribution, rng};
use rand_distr::Exp;
let scale_factor = u16::MAX as f64;
let v: f64 = Exp::new(1.0 / scale_factor)
.map(|exp| exp.sample(&mut rng()))
.unwrap_or_else(|_| rand::random());
(v * scale_factor) as u64
}
Self {
client_id: prefer_small_random(),
guid: nanoid::nanoid!(),
@@ -138,6 +124,7 @@ pub struct Doc {
pub(crate) store: StoreRef,
#[cfg(feature = "events")]
pub publisher: Arc<DocPublisher>,
pub(crate) batch: Somr<Batch>,
}
unsafe impl Send for Doc {}
@@ -171,6 +158,7 @@ impl Doc {
store,
#[cfg(feature = "events")]
publisher,
batch: Somr::none(),
}
}
@@ -182,6 +170,14 @@ impl Doc {
self.client_id
}
pub fn set_client(&mut self, client_id: u64) {
self.client_id = client_id;
}
pub fn renew_client(&mut self) {
self.client_id = prefer_small_random();
}
pub fn clients(&self) -> Vec<u64> {
self.store.read().unwrap().clients()
}
@@ -205,6 +201,17 @@ impl Doc {
}
}
pub(crate) fn get_changed(&self) -> ChangedTypeRefs {
self.store.write().unwrap().get_changed()
}
pub fn store_compare(&self, other: &Doc) -> bool {
let store = self.store.read().unwrap();
let other_store = other.store.read().unwrap();
store.deep_compare(&other_store)
}
pub fn options(&self) -> &DocOptions {
&self.opts
}
@@ -220,10 +227,7 @@ impl Doc {
Self::try_from_binary_v1_with_options(binary, DocOptions::default())
}
pub fn try_from_binary_v1_with_options<T: AsRef<[u8]>>(
binary: T,
options: DocOptions,
) -> JwstCodecResult<Self> {
pub fn try_from_binary_v1_with_options<T: AsRef<[u8]>>(binary: T, options: DocOptions) -> JwstCodecResult<Self> {
let mut doc = Doc::with_options(options);
doc.apply_update_from_binary_v1(binary)?;
Ok(doc)
@@ -316,9 +320,7 @@ impl Doc {
}
pub fn create_text(&self) -> JwstCodecResult<Text> {
YTypeBuilder::new(self.store.clone())
.with_kind(YTypeKind::Text)
.build()
YTypeBuilder::new(self.store.clone()).with_kind(YTypeKind::Text).build()
}
pub fn get_or_create_array<S: AsRef<str>>(&self, str: S) -> JwstCodecResult<Array> {
@@ -342,9 +344,7 @@ impl Doc {
}
pub fn create_map(&self) -> JwstCodecResult<Map> {
YTypeBuilder::new(self.store.clone())
.with_kind(YTypeKind::Map)
.build()
YTypeBuilder::new(self.store.clone()).with_kind(YTypeKind::Map).build()
}
pub fn get_map(&self, str: &str) -> JwstCodecResult<Map> {
@@ -378,6 +378,10 @@ impl Doc {
self.store.read().unwrap().get_state_vector()
}
pub fn get_delete_sets(&self) -> DeleteSet {
self.store.read().unwrap().get_delete_sets()
}
#[cfg(feature = "events")]
pub fn subscribe(&self, cb: impl Fn(&[u8], &[History]) + Sync + Send + 'static) {
self.publisher.subscribe(cb);
@@ -393,6 +397,11 @@ impl Doc {
self.publisher.count()
}
#[cfg(feature = "events")]
pub fn subscriber_count(&self) -> usize {
Arc::<DocPublisher>::strong_count(&self.publisher)
}
pub fn gc(&self) -> JwstCodecResult<()> {
self.store.write().unwrap().optimize()
}
@@ -400,7 +409,7 @@ impl Doc {
#[cfg(test)]
mod tests {
use yrs::{types::ToJson, updates::decoder::Decode, Array, Map, Options, Transact};
use yrs::{Array, Map, Options, Transact, types::ToJson, updates::decoder::Decode};
use super::*;
@@ -443,23 +452,14 @@ mod tests {
let mut doc = Doc::try_from_binary_v1(binary).unwrap();
let mut doc_new = Doc::try_from_binary_v1(binary_new).unwrap();
let diff_update = doc_new
.encode_state_as_update_v1(&doc.get_state_vector())
.unwrap();
let diff_update = doc_new.encode_state_as_update_v1(&doc.get_state_vector()).unwrap();
let diff_update_reverse = doc
.encode_state_as_update_v1(&doc_new.get_state_vector())
.unwrap();
let diff_update_reverse = doc.encode_state_as_update_v1(&doc_new.get_state_vector()).unwrap();
doc.apply_update_from_binary_v1(diff_update).unwrap();
doc_new
.apply_update_from_binary_v1(diff_update_reverse)
.unwrap();
doc_new.apply_update_from_binary_v1(diff_update_reverse).unwrap();
assert_eq!(
doc.encode_update_v1().unwrap(),
doc_new.encode_update_v1().unwrap()
);
assert_eq!(doc.encode_update_v1().unwrap(), doc_new.encode_update_v1().unwrap());
});
}
@@ -491,43 +491,43 @@ mod tests {
assert_json_diff::assert_json_matches!(array.to_json(&doc.transact()), json, config);
};
let binary = {
let doc = Doc::new();
let mut array = doc.get_or_create_array("abc").unwrap();
array.insert(0, 42).unwrap();
array.insert(1, -42).unwrap();
array.insert(2, true).unwrap();
array.insert(3, false).unwrap();
array.insert(4, "hello").unwrap();
array.insert(5, "world").unwrap();
{
let binary = {
let doc = Doc::new();
let mut array = doc.get_or_create_array("abc").unwrap();
array.insert(0, 42).unwrap();
array.insert(1, -42).unwrap();
array.insert(2, true).unwrap();
array.insert(3, false).unwrap();
array.insert(4, "hello").unwrap();
array.insert(5, "world").unwrap();
let mut sub_array = doc.create_array().unwrap();
array.insert(6, sub_array.clone()).unwrap();
// FIXME: array need insert first to compatible with yrs
sub_array.insert(0, 1).unwrap();
let mut sub_array = doc.create_array().unwrap();
array.insert(6, sub_array.clone()).unwrap();
// FIXME: array need insert first to compatible with yrs
sub_array.insert(0, 1).unwrap();
doc.encode_update_v1().unwrap()
};
doc.encode_update_v1().unwrap()
};
let ydoc = yrs::Doc::with_options(yrs_options);
let array = ydoc.get_or_insert_array("abc");
let mut trx = ydoc.transact_mut();
trx
.apply_update(yrs::Update::decode_v1(&binary).unwrap())
.unwrap();
let ydoc = yrs::Doc::with_options(yrs_options);
let array = ydoc.get_or_insert_array("abc");
let mut trx = ydoc.transact_mut();
trx.apply_update(yrs::Update::decode_v1(&binary).unwrap()).unwrap();
let config = assert_json_diff::Config::new(assert_json_diff::CompareMode::Strict)
.numeric_mode(assert_json_diff::NumericMode::AssumeFloat);
assert_json_diff::assert_json_matches!(array.to_json(&trx), json, config);
let config = assert_json_diff::Config::new(assert_json_diff::CompareMode::Strict)
.numeric_mode(assert_json_diff::NumericMode::AssumeFloat);
assert_json_diff::assert_json_matches!(array.to_json(&trx), json, config);
let mut doc = Doc::new();
let array = doc.get_or_create_array("abc").unwrap();
doc.apply_update_from_binary_v1(binary).unwrap();
let mut doc = Doc::new();
let array = doc.get_or_create_array("abc").unwrap();
doc.apply_update_from_binary_v1(binary).unwrap();
let list = array.iter().collect::<Vec<_>>();
let list = array.iter().collect::<Vec<_>>();
assert!(list.len() == 7);
assert!(matches!(list[6], Value::Array(_)));
assert!(list.len() == 7);
assert!(matches!(list[6], Value::Array(_)));
}
}
#[test]
@@ -551,11 +551,7 @@ mod tests {
count_clone2.fetch_add(1, Ordering::SeqCst);
});
doc_clone
.get_or_create_array("abc")
.unwrap()
.insert(0, 42)
.unwrap();
doc_clone.get_or_create_array("abc").unwrap().insert(0, 42).unwrap();
// wait observer, cycle once every 100mm
std::thread::sleep(std::time::Duration::from_millis(200));
@@ -594,8 +590,8 @@ mod tests {
doc
.apply_update_from_binary_v1(vec![
1, 1, 1, 1, 40, 0, 1, 0, 11, 115, 117, 98, 95, 109, 97, 112, 95, 107, 101, 121, 1, 119,
13, 115, 117, 98, 95, 109, 97, 112, 95, 118, 97, 108, 117, 101, 0,
1, 1, 1, 1, 40, 0, 1, 0, 11, 115, 117, 98, 95, 109, 97, 112, 95, 107, 101, 121, 1, 119, 13, 115, 117, 98, 95,
109, 97, 112, 95, 118, 97, 108, 117, 101, 0,
])
.unwrap();
@@ -612,8 +608,8 @@ mod tests {
.sum::<usize>();
doc
.apply_update_from_binary_v1(vec![
1, 1, 1, 1, 40, 0, 1, 0, 11, 115, 117, 98, 95, 109, 97, 112, 95, 107, 101, 121, 1, 119,
13, 115, 117, 98, 95, 109, 97, 112, 95, 118, 97, 108, 117, 101, 0,
1, 1, 1, 1, 40, 0, 1, 0, 11, 115, 117, 98, 95, 109, 97, 112, 95, 107, 101, 121, 1, 119, 13, 115, 117, 98, 95,
109, 97, 112, 95, 118, 97, 108, 117, 101, 0,
])
.unwrap();

View File

@@ -69,11 +69,7 @@ impl StoreHistory {
self.parse_items(store_items)
}
pub fn parse_delete_sets(
&self,
old_sets: &ClientMap<OrderRange>,
new_sets: &ClientMap<OrderRange>,
) -> Vec<History> {
pub fn parse_delete_sets(&self, old_sets: &ClientMap<OrderRange>, new_sets: &ClientMap<OrderRange>) -> Vec<History> {
let store = self.store.read().unwrap();
let deleted_items = new_sets
.iter()
@@ -109,11 +105,7 @@ impl StoreHistory {
let store = self.store.read().unwrap();
let mut sort_iter: Box<dyn Iterator<Item = Item>> = Box::new(
SortedNodes::new(if let Some(client) = client {
store
.items
.get(client)
.map(|i| vec![(client, i)])
.unwrap_or_default()
store.items.get(client).map(|i| vec![(client, i)]).unwrap_or_default()
} else {
store.items.iter().collect::<Vec<_>>()
})
@@ -273,10 +265,10 @@ impl Iterator for SortedNodes<'_> {
type Item = Node;
fn next(&mut self) -> Option<Self::Item> {
if let Some(current) = self.current.as_mut() {
if let Some(node) = current.pop_back() {
return Some(node);
}
if let Some(current) = self.current.as_mut()
&& let Some(node) = current.pop_back()
{
return Some(node);
}
if let Some((_, nodes)) = self.nodes.pop() {
@@ -318,10 +310,7 @@ mod test {
let update = doc.encode_update().unwrap();
assert_eq!(
history.parse_store(Default::default()),
history.parse_update(&update,)
);
assert_eq!(history.parse_store(Default::default()), history.parse_update(&update,));
});
}
}

View File

@@ -1,4 +1,5 @@
mod awareness;
mod batch;
mod codec;
mod common;
mod document;
@@ -12,6 +13,7 @@ mod utils;
pub use ahash::{HashMap, HashMapExt, HashSet, HashSetExt};
pub use awareness::{Awareness, AwarenessEvent};
pub use batch::{Batch, batch_commit};
pub use codec::*;
pub use common::*;
pub use document::{Doc, DocOptions};

View File

@@ -34,7 +34,10 @@ impl DocPublisher {
observing: Arc::new(AtomicBool::new(false)),
};
if cfg!(not(any(feature = "bench", fuzzing, loom, miri))) {
if cfg!(all(
feature = "subscribe",
not(any(feature = "bench", fuzzing, loom, miri))
)) {
publisher.start();
}
@@ -111,7 +114,7 @@ impl DocPublisher {
last_deletes = deletes;
for cb in subscribers.iter() {
use std::panic::{catch_unwind, AssertUnwindSafe};
use std::panic::{AssertUnwindSafe, catch_unwind};
// catch panic if callback throw
catch_unwind(AssertUnwindSafe(|| {
cb(&binary, &history);
@@ -177,10 +180,7 @@ mod tests {
let ret = [
vec![vec!["(1, 0)", "test.key1", "val1"]],
vec![
vec!["(1, 1)", "test.key2", "val2"],
vec!["(1, 2)", "test.key3", "val3"],
],
vec![vec!["(1, 1)", "test.key2", "val2"], vec!["(1, 2)", "test.key3", "val3"]],
vec![
vec!["(1, 3)", "array.0", "val1"],
vec!["(1, 4)", "array.1", "val2"],
@@ -205,12 +205,7 @@ mod tests {
let ret = ret[cycle].clone();
for (i, h) in history.iter().enumerate() {
println!(
"history change by {} at {}: {}",
h.id,
h.parent.join("."),
h.content
);
println!("history change by {} at {}: {}", h.id, h.parent.join("."), h.content);
// lost first update by unknown reason in asan test, skip it if asan enabled
if option_env!("ASAN_OPTIONS").is_none() {
let ret = &ret[i];

View File

@@ -1,5 +1,5 @@
use std::{
collections::{hash_map::Entry, VecDeque},
collections::{VecDeque, hash_map::Entry},
mem,
ops::{Deref, Range},
};
@@ -10,6 +10,8 @@ use crate::{
sync::{Arc, RwLock, RwLockWriteGuard, Weak},
};
pub type ChangedTypeRefs = HashMap<YTypeRef, Vec<SmolStr>>;
unsafe impl Send for DocStore {}
unsafe impl Sync for DocStore {}
@@ -26,6 +28,8 @@ pub(crate) struct DocStore {
pub dangling_types: HashMap<usize, YTypeRef>,
pub pending: Option<Update>,
pub last_optimized_state: StateVector,
// changed item's parent, value is the parent's sub key if exists
pub changed: ChangedTypeRefs,
}
pub(crate) type StoreRef = Arc<RwLock<DocStore>>;
@@ -102,6 +106,10 @@ impl DocStore {
Self::items_as_state_vector(&self.items)
}
pub fn get_delete_sets(&self) -> DeleteSet {
self.delete_set.clone()
}
fn items_as_state_vector(items: &ClientMap<VecDeque<Node>>) -> StateVector {
let mut state = StateVector::default();
for (client, structs) in items.iter() {
@@ -175,10 +183,10 @@ impl DocStore {
let id = (self.client(), self.get_state(self.client())).into();
let item = Somr::new(Item::new(id, content, left, right, parent, parent_sub));
if let Content::Type(ty) = &item.get().unwrap().content {
if let Some(mut ty) = ty.ty_mut() {
ty.item = item.clone();
}
if let Content::Type(ty) = &item.get().unwrap().content
&& let Some(mut ty) = ty.ty_mut()
{
ty.item = item.clone();
}
item
@@ -190,10 +198,10 @@ impl DocStore {
pub fn get_node_with_idx<I: Into<Id>>(&self, id: I) -> Option<(Node, usize)> {
let id = id.into();
if let Some(items) = self.items.get(&id.client) {
if let Some(index) = Self::get_node_index(items, id.clock) {
return items.get(index).map(|item| (item.clone(), index));
}
if let Some(items) = self.items.get(&id.client)
&& let Some(index) = Self::get_node_index(items, id.clock)
{
return items.get(index).map(|item| (item.clone(), index));
}
None
@@ -204,20 +212,16 @@ impl DocStore {
let id = id.into();
if let Some(items) = self.items.get_mut(&id.client) {
if let Some(idx) = Self::get_node_index(items, id.clock) {
return Self::split_node_at(items, idx, diff);
}
if let Some(items) = self.items.get_mut(&id.client)
&& let Some(idx) = Self::get_node_index(items, id.clock)
{
return Self::split_node_at(items, idx, diff);
}
Err(JwstCodecError::StructSequenceNotExists(id.client))
}
pub fn split_node_at(
items: &mut VecDeque<Node>,
idx: usize,
diff: u64,
) -> JwstCodecResult<(Node, Node)> {
pub fn split_node_at(items: &mut VecDeque<Node>, idx: usize, diff: u64) -> JwstCodecResult<(Node, Node)> {
debug_assert!(diff > 0);
let node = items.get(idx).unwrap().clone();
@@ -263,16 +267,16 @@ impl DocStore {
pub fn split_at_and_get_right<I: Into<Id>>(&mut self, id: I) -> JwstCodecResult<Node> {
let id = id.into();
if let Some(items) = self.items.get_mut(&id.client) {
if let Some(index) = Self::get_node_index(items, id.clock) {
let item = items.get(index).unwrap().clone();
let offset = id.clock - item.clock();
if offset > 0 && item.is_item() {
let (_, right) = Self::split_node_at(items, index, offset)?;
return Ok(right);
} else {
return Ok(item);
}
if let Some(items) = self.items.get_mut(&id.client)
&& let Some(index) = Self::get_node_index(items, id.clock)
{
let item = items.get(index).unwrap().clone();
let offset = id.clock - item.clock();
if offset > 0 && item.is_item() {
let (_, right) = Self::split_node_at(items, index, offset)?;
return Ok(right);
} else {
return Ok(item);
}
}
@@ -281,16 +285,16 @@ impl DocStore {
pub fn split_at_and_get_left<I: Into<Id>>(&mut self, id: I) -> JwstCodecResult<Node> {
let id = id.into();
if let Some(items) = self.items.get_mut(&id.client) {
if let Some(index) = Self::get_node_index(items, id.clock) {
let item = items.get(index).unwrap().clone();
let offset = id.clock - item.clock();
if offset != item.len() - 1 && !item.is_gc() {
let (left, _) = Self::split_node_at(items, index, offset + 1)?;
return Ok(left);
} else {
return Ok(item);
}
if let Some(items) = self.items.get_mut(&id.client)
&& let Some(index) = Self::get_node_index(items, id.clock)
{
let item = items.get(index).unwrap().clone();
let offset = id.clock - item.clock();
if offset != item.len() - 1 && !item.is_gc() {
let (left, _) = Self::split_node_at(items, index, offset + 1)?;
return Ok(left);
} else {
return Ok(item);
}
}
@@ -430,12 +434,7 @@ impl DocStore {
Ok(())
}
pub fn integrate(
&mut self,
mut node: Node,
offset: u64,
parent: Option<&mut YType>,
) -> JwstCodecResult {
pub fn integrate(&mut self, mut node: Node, offset: u64, parent: Option<&mut YType>) -> JwstCodecResult {
match &mut node {
Node::Item(item_owner_ref) => {
assert!(
@@ -451,9 +450,7 @@ impl DocStore {
if offset > 0 {
this.id.clock += offset;
if let Node::Item(left_ref) =
self.split_at_and_get_left(Id::new(this.id.client, this.id.clock - 1))?
{
if let Node::Item(left_ref) = self.split_at_and_get_left(Id::new(this.id.client, this.id.clock - 1))? {
this.origin_left_id = left_ref.get().map(|left| left.last_id());
this.left = left_ref;
}
@@ -550,11 +547,7 @@ impl DocStore {
} else {
// no left, parent.start = this
right = if let Some(parent_sub) = &this.parent_sub {
parent
.map
.get(parent_sub)
.map(|n| Node::Item(n.clone()).head())
.into()
parent.map.get(parent_sub).map(|n| Node::Item(n.clone()).head()).into()
} else {
mem::replace(&mut parent.start, item_owner_ref.clone())
};
@@ -571,9 +564,7 @@ impl DocStore {
} else {
// no right, parent.start = this, delete this.left
if let Some(parent_sub) = &this.parent_sub {
parent
.map
.insert(parent_sub.clone(), item_owner_ref.clone());
parent.map.insert(parent_sub.clone(), item_owner_ref.clone());
if let Some(left) = this.left.get() {
self.delete_item(left, Some(parent));
@@ -582,11 +573,7 @@ impl DocStore {
}
this.right = right.clone();
let parent_deleted = parent
.item
.get()
.map(|item| item.deleted())
.unwrap_or(false);
let parent_deleted = parent.item.get().map(|item| item.deleted()).unwrap_or(false);
// should delete
if parent_deleted || this.parent_sub.is_some() && this.right.is_some() {
@@ -599,6 +586,9 @@ impl DocStore {
}
parent_lock.take();
// mark changed item's parent
Self::mark_changed(&mut self.changed, ty.clone(), this.parent_sub.clone());
} else {
// if parent not exists, integrate GC node instead
// don't delete it because it may referenced by other nodes
@@ -621,7 +611,7 @@ impl DocStore {
pub fn delete_item(&mut self, item: &Item, parent: Option<&mut YType>) {
let mut pending_delete_sets = HashMap::new();
Self::delete_item_inner(&mut pending_delete_sets, item, parent);
Self::delete_item_inner(&mut pending_delete_sets, &mut self.changed, item, parent);
for (client, ranges) in pending_delete_sets {
self.delete_set.batch_add_ranges(client, ranges);
}
@@ -629,6 +619,7 @@ impl DocStore {
fn delete_item_inner(
delete_set: &mut HashMap<u64, Vec<Range<u64>>>,
changed: &mut ChangedTypeRefs,
item: &Item,
parent: Option<&mut YType>,
) {
@@ -663,7 +654,7 @@ impl DocStore {
let mut item_ref = ty.start.clone();
while let Some(item) = item_ref.get() {
if !item.deleted() {
Self::delete_item_inner(delete_set, item, Some(&mut ty));
Self::delete_item_inner(delete_set, changed, item, Some(&mut ty));
}
item_ref = item.right.clone();
@@ -671,10 +662,10 @@ impl DocStore {
let map_values = ty.map.values().cloned().collect::<Vec<_>>();
for item in map_values {
if let Some(item) = item.get() {
if !item.deleted() {
Self::delete_item_inner(delete_set, item, Some(&mut ty));
}
if let Some(item) = item.get()
&& !item.deleted()
{
Self::delete_item_inner(delete_set, changed, item, Some(&mut ty));
}
}
}
@@ -684,6 +675,11 @@ impl DocStore {
}
_ => {}
}
// mark deleted item's parent
if let Some(Parent::Type(ty)) = &item.parent {
Self::mark_changed(changed, ty.clone(), item.parent_sub.clone());
}
}
pub fn delete_node(&mut self, struct_info: &Node, parent: Option<&mut YType>) {
@@ -696,59 +692,55 @@ impl DocStore {
let start = range.start;
let end = range.end;
if let Some(items) = self.items.get_mut(&client) {
if let Some(mut idx) = DocStore::get_node_index(items, start) {
{
// id.clock <= range.start < id.end
// need to split the item and delete the right part
// -----item-----
// ^start
let node = &items[idx];
let id = node.id();
if !node.deleted() && id.clock < start {
DocStore::split_node_at(items, idx, start - id.clock)?;
idx += 1;
}
};
let mut pending_delete_sets = HashMap::new();
while idx < items.len() {
let node = items[idx].clone();
let id = node.id();
if id.clock < end {
if !node.deleted() {
if let Some(item) = node.as_item().get() {
// need to split the item
// -----item-----
// ^end
if end < id.clock + node.len() {
DocStore::split_node_at(items, idx, end - id.clock)?;
}
Self::delete_item_inner(&mut pending_delete_sets, item, None);
}
}
} else {
break;
}
if let Some(items) = self.items.get_mut(&client)
&& let Some(mut idx) = DocStore::get_node_index(items, start)
{
{
// id.clock <= range.start < id.end
// need to split the item and delete the right part
// -----item-----
// ^start
let node = &items[idx];
let id = node.id();
if !node.deleted() && id.clock < start {
DocStore::split_node_at(items, idx, start - id.clock)?;
idx += 1;
}
for (client, ranges) in pending_delete_sets {
self.delete_set.batch_add_ranges(client, ranges);
};
let mut pending_delete_sets = HashMap::new();
while idx < items.len() {
let node = items[idx].clone();
let id = node.id();
if id.clock < end {
if !node.deleted()
&& let Some(item) = node.as_item().get()
{
// need to split the item
// -----item-----
// ^end
if end < id.clock + node.len() {
DocStore::split_node_at(items, idx, end - id.clock)?;
}
Self::delete_item_inner(&mut pending_delete_sets, &mut self.changed, item, None);
}
} else {
break;
}
idx += 1;
}
}
for (client, ranges) in pending_delete_sets {
self.delete_set.batch_add_ranges(client, ranges);
}
};
Ok(())
}
fn diff_state_vectors(
local_state_vector: &StateVector,
remote_state_vector: &StateVector,
) -> Vec<(Client, Clock)> {
fn diff_state_vectors(local_state_vector: &StateVector, remote_state_vector: &StateVector) -> Vec<(Client, Clock)> {
let mut diff = Vec::new();
for (client, &remote_clock) in remote_state_vector.iter() {
@@ -776,19 +768,28 @@ impl DocStore {
..Update::default()
};
if with_pending {
if let Some(pending) = &self.pending {
Update::merge_into(&mut update, [pending.clone()])
}
if with_pending && let Some(pending) = &self.pending {
Update::merge_into(&mut update, [pending.clone()])
}
Ok(update)
}
fn diff_structs(
map: &ClientMap<VecDeque<Node>>,
sv: &StateVector,
) -> JwstCodecResult<ClientMap<VecDeque<Node>>> {
fn mark_changed(changed: &mut ChangedTypeRefs, parent: YTypeRef, parent_sub: Option<SmolStr>) {
if parent.inner.is_some() {
let vec = changed.entry(parent).or_default();
if let Some(parent_sub) = parent_sub {
// only record the sub key if exists
vec.push(parent_sub);
}
}
}
pub fn get_changed(&mut self) -> ChangedTypeRefs {
mem::replace(&mut self.changed, HashMap::new())
}
fn diff_structs(map: &ClientMap<VecDeque<Node>>, sv: &StateVector) -> JwstCodecResult<ClientMap<VecDeque<Node>>> {
let local_state_vector = Self::items_as_state_vector(map);
let diff = Self::diff_state_vectors(&local_state_vector, sv);
let mut update_structs = ClientMap::new();
@@ -915,11 +916,11 @@ impl DocStore {
}
fn gc_content(content: &Content) -> JwstCodecResult {
if let Content::Type(ty) = content {
if let Some(mut ty) = ty.ty_mut() {
ty.start = Somr::none();
ty.map.clear();
}
if let Content::Type(ty) = content
&& let Some(mut ty) = ty.ty_mut()
{
ty.start = Somr::none();
ty.map.clear();
}
Ok(())
@@ -935,9 +936,7 @@ impl DocStore {
}
let nodes = self.items.get_mut(client).unwrap();
let first_change = Self::get_node_index(nodes, before_state)
.unwrap_or(1)
.max(1);
let first_change = Self::get_node_index(nodes, before_state).unwrap_or(1).max(1);
let mut idx = nodes.len() - 1;
while idx > 0 && idx >= first_change {
@@ -969,6 +968,39 @@ impl DocStore {
// return the index of processed items
idx - pos
}
pub fn deep_compare(&self, other: &Self) -> bool {
if self.items.len() != other.items.len() {
return false;
}
for (client, structs) in self.items.iter() {
if let Some(other_structs) = other.items.get(client) {
if structs.len() != other_structs.len() {
return false;
}
for (struct_info, other_struct_info) in structs.iter().zip(other_structs.iter()) {
if struct_info != other_struct_info {
return false;
}
if let (Node::Item(item), Node::Item(other_item)) = (struct_info, other_struct_info)
&& !match (item.get(), other_item.get()) {
(Some(item), Some(other_item)) => item.deep_compare(other_item),
(None, None) => true,
_ => false,
}
{
return false;
}
}
} else {
return false;
}
}
true
}
}
#[cfg(test)]
@@ -991,10 +1023,9 @@ mod tests {
let struct_info1 = Node::new_gc(Id::new(1, 1), 5);
let struct_info2 = Node::new_skip(Id::new(1, 6), 7);
doc_store.items.insert(
client_id,
VecDeque::from([struct_info1, struct_info2.clone()]),
);
doc_store
.items
.insert(client_id, VecDeque::from([struct_info1, struct_info2.clone()]));
let state = doc_store.get_state(client_id);
@@ -1022,24 +1053,15 @@ mod tests {
let struct_info2 = Node::new_gc((2, 0).into(), 6);
let struct_info3 = Node::new_skip((2, 6).into(), 1);
doc_store.items.insert(client1, VecDeque::from([struct_info1.clone()]));
doc_store
.items
.insert(client1, VecDeque::from([struct_info1.clone()]));
doc_store.items.insert(
client2,
VecDeque::from([struct_info2, struct_info3.clone()]),
);
.insert(client2, VecDeque::from([struct_info2, struct_info3.clone()]));
let state_map = doc_store.get_state_vector();
assert_eq!(
state_map.get(&client1),
struct_info1.clock() + struct_info1.len()
);
assert_eq!(
state_map.get(&client2),
struct_info3.clock() + struct_info3.len()
);
assert_eq!(state_map.get(&client1), struct_info1.clock() + struct_info1.len());
assert_eq!(state_map.get(&client2), struct_info3.clock() + struct_info3.len());
assert!(doc_store.self_check().is_ok());
});
@@ -1059,10 +1081,7 @@ mod tests {
assert!(doc_store.add_node(struct_info2).is_ok());
assert_eq!(
doc_store.add_node(struct_info3_err),
Err(JwstCodecError::StructClockInvalid {
expect: 6,
actually: 5
})
Err(JwstCodecError::StructClockInvalid { expect: 6, actually: 5 })
);
assert!(doc_store.add_node(struct_info3.clone()).is_ok());
assert_eq!(
@@ -1163,15 +1182,64 @@ mod tests {
// s1 used to be (1, 4), but it actually ref of first item in store, so now it
// should be (1, 2)
assert_eq!(
s1, left,
"doc internal mutation should not modify the pointer"
);
assert_eq!(s1, left, "doc internal mutation should not modify the pointer");
let right = doc_store.split_at_and_get_right((1, 5)).unwrap();
assert_eq!(right.len(), 3); // base => b_ase
});
}
#[test]
fn should_mark_changed_items() {
loom_model!({
let doc = DocOptions::new().with_client_id(1).build();
let mut arr = doc.get_or_create_array("arr").unwrap();
let mut text = doc.create_text().unwrap();
let mut map = doc.create_map().unwrap();
arr.insert(0, Value::from(text.clone())).unwrap();
arr.insert(1, Value::from(map.clone())).unwrap();
{
let changed = doc.store.write().unwrap().get_changed();
// for array, we will only record the type ref itself
assert_eq!(changed.len(), 1);
assert_eq!(changed.get(&arr.0), Some(&vec![]));
}
text.insert(0, "hello world").unwrap();
text.remove(5, 6).unwrap();
{
let changed = doc.store.write().unwrap().get_changed();
assert_eq!(changed.len(), 1);
assert_eq!(changed.get(&text.0), Some(&vec![]));
}
map.insert("key".into(), 123).unwrap();
{
let changed = doc.store.write().unwrap().get_changed();
assert_eq!(changed.len(), 1);
assert_eq!(changed.get(&map.0), Some(&vec!["key".into()]));
}
map.remove("key");
{
let changed = doc.store.write().unwrap().get_changed();
assert_eq!(changed.len(), 1);
assert_eq!(changed.get(&map.0), Some(&vec!["key".into()]));
}
arr.remove(0, 1).unwrap();
{
let changed = doc.store.write().unwrap().get_changed();
assert_eq!(changed.len(), 2);
// text's children mark parent(text) changed
assert_eq!(changed.get(&text.0), Some(&vec![]));
// text mark parent(arr) changed
assert_eq!(changed.get(&arr.0), Some(&vec![]));
}
});
}
#[test]
fn should_replace_gc_item_with_content_deleted() {
loom_model!({
@@ -1195,13 +1263,7 @@ mod tests {
store.gc_delete_set().unwrap();
assert_eq!(
&store
.get_node((1, 0))
.unwrap()
.as_item()
.get()
.unwrap()
.content,
&store.get_node((1, 0)).unwrap().as_item().get().unwrap().content,
&Content::Deleted(4)
);
});
@@ -1226,13 +1288,7 @@ mod tests {
assert_eq!(arr.len(), 0);
assert_eq!(
&store
.get_node((1, 0))
.unwrap()
.as_item()
.get()
.unwrap()
.content,
&store.get_node((1, 0)).unwrap().as_item().get().unwrap().content,
&Content::Deleted(1)
);
@@ -1256,9 +1312,7 @@ mod tests {
let mut pages = doc.get_or_create_map("pages").unwrap();
let page1 = doc.create_text().unwrap();
let mut page1_ref = page1.clone();
pages
.insert("page1".to_string(), Value::from(page1))
.unwrap();
pages.insert("page1".to_string(), Value::from(page1)).unwrap();
page1_ref.insert(0, "hello").unwrap();
doc.encode_update_v1().unwrap()
};
@@ -1276,13 +1330,7 @@ mod tests {
store.gc_delete_set().unwrap();
assert_eq!(
&store
.get_node((1, 0))
.unwrap()
.as_item()
.get()
.unwrap()
.content,
&store.get_node((1, 0)).unwrap().as_item().get().unwrap().content,
&Content::Deleted(1)
);

View File

@@ -52,6 +52,11 @@ impl Iterator for ArrayIter<'_> {
}
impl Array {
#[inline(always)]
pub fn id(&self) -> Option<Id> {
self._id()
}
#[inline]
pub fn len(&self) -> u64 {
self.content_len()
@@ -126,15 +131,26 @@ mod tests {
array.insert(0, "Hello").unwrap();
array.insert(2, "World").unwrap();
assert_eq!(
array.get(0).unwrap(),
Value::Any(Any::String("Hello".into()))
);
assert_eq!(array.get(0).unwrap(), Value::Any(Any::String("Hello".into())));
assert_eq!(array.get(1).unwrap(), Value::Any(Any::String(" ".into())));
assert_eq!(
array.get(2).unwrap(),
Value::Any(Any::String("World".into()))
);
assert_eq!(array.get(2).unwrap(), Value::Any(Any::String("World".into())));
});
}
#[test]
fn test_yarray_delete() {
let options = DocOptions::default();
loom_model!({
let doc = Doc::with_options(options.clone());
let mut array = doc.get_or_create_array("abc").unwrap();
array.insert(0, " ").unwrap();
array.insert(0, "Hello").unwrap();
array.insert(2, "World").unwrap();
array.remove(0, 2).unwrap();
assert_eq!(array.get(0).unwrap(), Value::Any(Any::String("World".into())));
});
}
@@ -163,15 +179,9 @@ mod tests {
doc.apply_update(update).unwrap();
let array = doc.get_or_create_array("abc").unwrap();
assert_eq!(
array.get(0).unwrap(),
Value::Any(Any::String("Hello".into()))
);
assert_eq!(array.get(0).unwrap(), Value::Any(Any::String("Hello".into())));
assert_eq!(array.get(5).unwrap(), Value::Any(Any::String(" ".into())));
assert_eq!(
array.get(6).unwrap(),
Value::Any(Any::String("World".into()))
);
assert_eq!(array.get(6).unwrap(), Value::Any(Any::String("World".into())));
assert_eq!(array.get(11).unwrap(), Value::Any(Any::String("!".into())));
});
@@ -196,15 +206,9 @@ mod tests {
doc.apply_update(update).unwrap();
let array = doc.get_or_create_array("abc").unwrap();
assert_eq!(
array.get(0).unwrap(),
Value::Any(Any::String("Hello".into()))
);
assert_eq!(array.get(0).unwrap(), Value::Any(Any::String("Hello".into())));
assert_eq!(array.get(5).unwrap(), Value::Any(Any::String(" ".into())));
assert_eq!(
array.get(6).unwrap(),
Value::Any(Any::String("World".into()))
);
assert_eq!(array.get(6).unwrap(), Value::Any(Any::String("World".into())));
assert_eq!(array.get(11).unwrap(), Value::Any(Any::String("!".into())));
});
}
@@ -237,10 +241,7 @@ mod tests {
.unwrap();
let arr = doc.get_or_create_array("abc").unwrap();
assert_eq!(
arr.get(2).unwrap(),
Value::Any(Any::String("world".to_string()))
)
assert_eq!(arr.get(2).unwrap(), Value::Any(Any::String("world".to_string())))
});
}
}

View File

@@ -6,6 +6,7 @@ pub(crate) use search_marker::MarkerList;
use super::*;
#[derive(Debug)]
pub(crate) struct ItemPosition {
pub parent: YTypeRef,
pub left: ItemRef,
@@ -55,6 +56,11 @@ impl ItemPosition {
}
pub(crate) trait ListType: AsInner<Inner = YTypeRef> {
#[inline(always)]
fn _id(&self) -> Option<Id> {
self.as_inner().ty().and_then(|ty| ty.item.get().map(|item| item.id))
}
#[inline(always)]
fn content_len(&self) -> u64 {
self.as_inner().ty().unwrap().len
@@ -84,23 +90,29 @@ pub(crate) trait ListType: AsInner<Inner = YTypeRef> {
return Some(pos);
}
if let Some(markers) = &inner.markers {
if let Some(marker) = markers.find_marker(inner, index) {
if marker.index > remaining {
remaining = 0
} else {
remaining -= marker.index;
}
pos.index = marker.index;
pos.left = marker
.ptr
.get()
.map(|ptr| ptr.left.clone())
.unwrap_or_default();
pos.right = marker.ptr;
if let Some(markers) = &inner.markers
&& let Some(marker) = markers.find_marker(inner, index)
{
if marker.index > remaining {
remaining = 0
} else {
remaining -= marker.index;
}
pos.index = marker.index;
pos.left = marker.ptr.get().map(|ptr| ptr.left.clone()).unwrap_or_default();
pos.right = marker.ptr;
};
// avoid the first item of the list being deleted
while let Some(item) = pos.right.get() {
if item.deleted() {
pos.right = item.right.clone();
continue;
} else {
break;
}
}
while remaining > 0 {
if let Some(item) = pos.right.get() {
if item.indexable() {
@@ -141,16 +153,11 @@ pub(crate) trait ListType: AsInner<Inner = YTypeRef> {
Ok(())
}
fn insert_after(
ty: &mut YType,
store: &mut DocStore,
pos: ItemPosition,
content: Content,
) -> JwstCodecResult {
if let Some(markers) = &ty.markers {
if content.countable() {
markers.update_marker_changes(pos.index, content.clock_len() as i64);
}
fn insert_after(ty: &mut YType, store: &mut DocStore, pos: ItemPosition, content: Content) -> JwstCodecResult {
if let Some(markers) = &ty.markers
&& content.countable()
{
markers.update_marker_changes(pos.index, content.clock_len() as i64);
}
let item = store.create_item(
@@ -189,7 +196,12 @@ pub(crate) trait ListType: AsInner<Inner = YTypeRef> {
return Ok(());
}
if idx >= self.content_len() {
let content_len = self.content_len();
if content_len == 0 {
return Ok(());
}
if idx >= content_len {
return Err(JwstCodecError::IndexOutOfBound(idx));
}
@@ -204,34 +216,32 @@ pub(crate) trait ListType: AsInner<Inner = YTypeRef> {
Ok(())
}
fn remove_after(
ty: &mut YType,
store: &mut DocStore,
mut pos: ItemPosition,
len: u64,
) -> JwstCodecResult {
fn remove_after(ty: &mut YType, store: &mut DocStore, mut pos: ItemPosition, len: u64) -> JwstCodecResult {
pos.normalize(store)?;
let mut remaining = len;
while remaining > 0 {
if let Some(item) = pos.right.get() {
if item.indexable() {
let content_len = item.len();
if remaining < content_len {
store.split_node(item.id, remaining)?;
remaining = 0;
} else {
remaining -= content_len;
}
let item_ref = pos.right.clone();
let Some((indexable, content_len, item_id)) = item_ref.get().map(|item| (item.indexable(), item.len(), item.id))
else {
break;
};
store.delete_item(item, Some(ty));
if indexable {
if remaining < content_len {
store.split_node(item_id, remaining)?;
remaining = 0;
} else {
remaining -= content_len;
}
pos.forward();
} else {
break;
if let Some(item) = item_ref.get() {
store.delete_item(item, Some(ty));
}
}
pos.forward();
}
if let Some(markers) = &ty.markers {

View File

@@ -69,11 +69,7 @@ impl MarkerList {
}
// mark pos and push to the end of the linked list
fn mark_position(
list: &mut VecDeque<SearchMarker>,
ptr: Somr<Item>,
index: u64,
) -> Option<SearchMarker> {
fn mark_position(list: &mut VecDeque<SearchMarker>, ptr: Somr<Item>, index: u64) -> Option<SearchMarker> {
if list.len() >= MAX_SEARCH_MARKER {
let mut oldest_marker = list.pop_front().unwrap();
oldest_marker.overwrite_marker(ptr, index);
@@ -126,9 +122,7 @@ impl MarkerList {
let mut list = self.borrow_mut();
let marker = list
.iter_mut()
.min_by_key(|m| (index as i64 - m.index as i64).abs());
let marker = list.iter_mut().min_by_key(|m| (index as i64 - m.index as i64).abs());
let mut marker_index = marker.as_ref().map(|m| m.index).unwrap_or(0);
@@ -201,8 +195,7 @@ impl MarkerList {
match marker {
Some(marker)
if (marker.index as f64 - marker_index as f64).abs()
< parent.len as f64 / MAX_SEARCH_MARKER as f64 =>
if (marker.index as f64 - marker_index as f64).abs() < parent.len as f64 / MAX_SEARCH_MARKER as f64 =>
{
// adjust existing marker
marker.overwrite_marker(item_ptr, marker_index);

View File

@@ -2,13 +2,18 @@ use std::{collections::hash_map::Iter, rc::Rc};
use super::*;
use crate::{
JwstCodecResult,
doc::{AsInner, Node, Parent, YTypeRef},
impl_type, JwstCodecResult,
impl_type,
};
impl_type!(Map);
pub(crate) trait MapType: AsInner<Inner = YTypeRef> {
fn _id(&self) -> Option<Id> {
self.as_inner().ty().and_then(|ty| ty.item.get().map(|item| item.id))
}
fn _insert<V: Into<Value>>(&mut self, key: String, value: V) -> JwstCodecResult {
if let Some((mut store, mut ty)) = self.as_inner().write() {
let left = ty.map.get(&SmolStr::new(&key)).cloned();
@@ -54,12 +59,11 @@ pub(crate) trait MapType: AsInner<Inner = YTypeRef> {
}
fn _remove(&mut self, key: &str) {
if let Some((mut store, mut ty)) = self.as_inner().write() {
if let Some(item) = ty.map.get(key).cloned() {
if let Some(item) = item.get() {
store.delete_item(item, Some(&mut ty));
}
}
if let Some((mut store, mut ty)) = self.as_inner().write()
&& let Some(item) = ty.map.get(key).cloned()
&& let Some(item) = item.get()
{
store.delete_item(item, Some(&mut ty));
}
}
@@ -113,10 +117,10 @@ impl<'a> Iterator for EntriesInnerIterator<'a> {
fn next(&mut self) -> Option<Self::Item> {
if let Some(iter) = &mut self.iter {
for (k, v) in iter {
if let Some(item) = v.get() {
if !item.deleted() {
return Some((k.as_str(), item));
}
if let Some(item) = v.get()
&& !item.deleted()
{
return Some((k.as_str(), item));
}
}
@@ -154,6 +158,11 @@ impl<'a> Iterator for EntriesIterator<'a> {
impl MapType for Map {}
impl Map {
#[inline(always)]
pub fn id(&self) -> Option<Id> {
self._id()
}
#[inline(always)]
pub fn insert<V: Into<Value>>(&mut self, key: String, value: V) -> JwstCodecResult {
self._insert(key, value)
@@ -220,7 +229,7 @@ impl serde::Serialize for Map {
#[cfg(test)]
mod tests {
use super::*;
use crate::{loom_model, Any, Doc};
use crate::{Any, Doc, loom_model};
#[test]
fn test_map_basic() {
@@ -228,10 +237,7 @@ mod tests {
let doc = Doc::new();
let mut map = doc.get_or_create_map("map").unwrap();
map.insert("1".to_string(), "value").unwrap();
assert_eq!(
map.get("1").unwrap(),
Value::Any(Any::String("value".to_string()))
);
assert_eq!(map.get("1").unwrap(), Value::Any(Any::String("value".to_string())));
assert!(!map.contains_key("nonexistent_key"));
assert_eq!(map.len(), 1);
assert!(map.contains_key("1"));
@@ -252,10 +258,7 @@ mod tests {
let binary = doc.encode_update_v1().unwrap();
let new_doc = Doc::try_from_binary_v1(binary).unwrap();
let map = new_doc.get_or_create_map("map").unwrap();
assert_eq!(
map.get("1").unwrap(),
Value::Any(Any::String("value".to_string()))
);
assert_eq!(map.get("1").unwrap(), Value::Any(Any::String("value".to_string())));
assert_eq!(map.get("2").unwrap(), Value::Any(Any::False));
assert_eq!(map.len(), 2);
});
@@ -268,10 +271,7 @@ mod tests {
let mut map = doc.get_or_create_map("map").unwrap();
map.insert("1".to_string(), "value").unwrap();
map.insert("1".to_string(), "value2").unwrap();
assert_eq!(
map.get("1").unwrap(),
Value::Any(Any::String("value2".to_string()))
);
assert_eq!(map.get("1").unwrap(), Value::Any(Any::String("value2".to_string())));
assert_eq!(map.len(), 1);
});
}
@@ -290,14 +290,8 @@ mod tests {
{
let doc = Doc::try_from_binary_v1(binary).unwrap();
let map = doc.get_or_create_map("map").unwrap();
assert_eq!(
map.get("1").unwrap(),
Value::Any(Any::String("value1".to_string()))
);
assert_eq!(
map.get("2").unwrap(),
Value::Any(Any::String("value2".to_string()))
);
assert_eq!(map.get("1").unwrap(), Value::Any(Any::String("value1".to_string())));
assert_eq!(map.get("2").unwrap(), Value::Any(Any::String("value2".to_string())));
}
});
}

View File

@@ -5,7 +5,11 @@ mod text;
mod value;
mod xml;
use std::{collections::hash_map::Entry, sync::Weak};
use std::{
collections::hash_map::Entry,
hash::{Hash, Hasher},
sync::Weak,
};
pub use array::*;
use list::*;
@@ -19,8 +23,8 @@ use super::{
*,
};
use crate::{
sync::{Arc, RwLock, RwLockReadGuard, RwLockWriteGuard},
Item, JwstCodecError, JwstCodecResult,
sync::{Arc, RwLock, RwLockReadGuard, RwLockWriteGuard},
};
#[derive(Debug, Default)]
@@ -45,9 +49,7 @@ pub(crate) struct YTypeRef {
impl PartialEq for YType {
fn eq(&self, other: &Self) -> bool {
self.root_name == other.root_name
|| (self.start.is_some() && self.start == other.start)
|| self.map == other.map
self.root_name == other.root_name || (self.start.is_some() && self.start == other.start) || self.map == other.map
}
}
@@ -62,6 +64,14 @@ impl PartialEq for YTypeRef {
}
}
impl Eq for YTypeRef {}
impl Hash for YTypeRef {
fn hash<H: Hasher>(&self, state: &mut H) {
self.inner.ptr().hash(state);
}
}
impl YType {
pub fn new(kind: YTypeKind, tag_name: Option<String>) -> Self {
YType {
@@ -129,15 +139,11 @@ impl YTypeRef {
#[allow(dead_code)]
pub fn read(&self) -> Option<(RwLockReadGuard<'_, DocStore>, RwLockReadGuard<'_, YType>)> {
self
.store()
.and_then(|store| self.ty().map(|ty| (store, ty)))
self.store().and_then(|store| self.ty().map(|ty| (store, ty)))
}
pub fn write(&self) -> Option<(RwLockWriteGuard<'_, DocStore>, RwLockWriteGuard<'_, YType>)> {
self
.store_mut()
.and_then(|store| self.ty_mut().map(|ty| (store, ty)))
self.store_mut().and_then(|store| self.ty_mut().map(|ty| (store, ty)))
}
}
@@ -238,9 +244,7 @@ impl YTypeBuilder {
let ty_ref = ty.clone();
store
.dangling_types
.insert(ty.inner.ptr().as_ptr() as usize, ty);
store.dangling_types.insert(ty.inner.ptr().as_ptr() as usize, ty);
ty_ref
};
@@ -338,14 +342,10 @@ macro_rules! impl_type {
inner.set_kind(super::YTypeKind::$name)?;
Ok($name::new(value.clone()))
}
_ => Err($crate::JwstCodecError::TypeCastError(std::stringify!(
$name
))),
_ => Err($crate::JwstCodecError::TypeCastError(std::stringify!($name))),
}
} else {
Err($crate::JwstCodecError::TypeCastError(std::stringify!(
$name
)))
Err($crate::JwstCodecError::TypeCastError(std::stringify!($name)))
}
}
}

View File

@@ -1,9 +1,10 @@
use std::{collections::BTreeMap, fmt::Display};
use super::{list::ListType, AsInner};
use super::{AsInner, list::ListType};
use crate::{
Any, Content, JwstCodecError, JwstCodecResult,
doc::{DocStore, ItemRef, Node, Parent, Somr, YType, YTypeRef},
impl_type, Any, Content, JwstCodecError, JwstCodecResult,
impl_type,
};
impl_type!(Text);
@@ -85,21 +86,12 @@ impl Text {
Content::Json(values) => {
let converted = values
.iter()
.map(|value| {
value
.as_ref()
.map(|s| Any::String(s.clone()))
.unwrap_or(Any::Undefined)
})
.map(|value| value.as_ref().map(|s| Any::String(s.clone())).unwrap_or(Any::Undefined))
.collect::<Vec<_>>();
push_insert(&mut ops, TextInsert::Embed(converted), &attrs);
}
Content::Binary(value) => {
push_insert(
&mut ops,
TextInsert::Embed(vec![Any::Binary(value.clone())]),
&attrs,
);
push_insert(&mut ops, TextInsert::Embed(vec![Any::Binary(value.clone())]), &attrs);
}
_ => {}
}
@@ -121,13 +113,7 @@ impl Text {
let attrs = format.clone().unwrap_or_default();
match insert {
TextInsert::Text(text) => {
insert_text_content(
&mut store,
&mut ty,
&mut pos,
Content::String(text.clone()),
attrs,
)?;
insert_text_content(&mut store, &mut ty, &mut pos, Content::String(text.clone()), attrs)?;
}
TextInsert::Embed(values) => {
for value in values {
@@ -225,38 +211,29 @@ fn is_nullish(value: &Any) -> bool {
}
fn push_insert(ops: &mut Vec<TextDeltaOp>, insert: TextInsert, attrs: &TextAttributes) {
let format = if attrs.is_empty() {
None
} else {
Some(attrs.clone())
};
let format = if attrs.is_empty() { None } else { Some(attrs.clone()) };
if let Some(TextDeltaOp::Insert {
insert: TextInsert::Text(prev),
format: prev_format,
}) = ops.last_mut()
&& let TextInsert::Text(text) = insert
{
if let TextInsert::Text(text) = insert {
if prev_format.as_ref() == format.as_ref() {
prev.push_str(&text);
return;
}
ops.push(TextDeltaOp::Insert {
insert: TextInsert::Text(text),
format,
});
if prev_format.as_ref() == format.as_ref() {
prev.push_str(&text);
return;
}
ops.push(TextDeltaOp::Insert {
insert: TextInsert::Text(text),
format,
});
return;
}
ops.push(TextDeltaOp::Insert { insert, format });
}
fn advance_text_position(
store: &mut DocStore,
pos: &mut TextPosition,
mut remaining: u64,
) -> JwstCodecResult {
fn advance_text_position(store: &mut DocStore, pos: &mut TextPosition, mut remaining: u64) -> JwstCodecResult {
while remaining > 0 {
let Some(item) = pos.right.get() else {
return Err(JwstCodecError::IndexOutOfBound(pos.index + remaining));
@@ -311,16 +288,11 @@ fn minimize_attribute_changes(pos: &mut TextPosition, attrs: &TextAttributes) {
}
}
fn insert_item(
store: &mut DocStore,
ty: &mut YType,
pos: &mut TextPosition,
content: Content,
) -> JwstCodecResult {
if let Some(markers) = &ty.markers {
if content.countable() {
markers.update_marker_changes(pos.index, content.clock_len() as i64);
}
fn insert_item(store: &mut DocStore, ty: &mut YType, pos: &mut TextPosition, content: Content) -> JwstCodecResult {
if let Some(markers) = &ty.markers
&& content.countable()
{
markers.update_marker_changes(pos.index, content.clock_len() as i64);
}
let item = store.create_item(
@@ -383,14 +355,13 @@ fn insert_negated_attributes(
continue;
}
if let Content::Format { key, value } = &item.content {
if let Some(negated_value) = negated.get(key.as_str()) {
if negated_value == value {
negated.remove(key.as_str());
pos.forward();
continue;
}
}
if let Content::Format { key, value } = &item.content
&& let Some(negated_value) = negated.get(key.as_str())
&& negated_value == value
{
negated.remove(key.as_str());
pos.forward();
continue;
}
break;
@@ -488,12 +459,7 @@ fn format_text(
Ok(())
}
fn delete_text(
store: &mut DocStore,
ty: &mut YType,
pos: &mut TextPosition,
mut remaining: u64,
) -> JwstCodecResult {
fn delete_text(store: &mut DocStore, ty: &mut YType, pos: &mut TextPosition, mut remaining: u64) -> JwstCodecResult {
if remaining == 0 {
return Ok(());
}
@@ -501,19 +467,23 @@ fn delete_text(
let start = remaining;
while remaining > 0 {
let Some(item) = pos.right.get() else {
let item_ref = pos.right.clone();
let Some((indexable, item_len, item_id)) = item_ref.get().map(|item| (item.indexable(), item.len(), item.id))
else {
break;
};
if item.indexable() {
let item_len = item.len();
if indexable {
if remaining < item_len {
store.split_node(item.id, remaining)?;
store.split_node(item_id, remaining)?;
remaining = 0;
} else {
remaining -= item_len;
}
store.delete_item(item, Some(ty));
if let Some(item) = item_ref.get() {
store.delete_item(item, Some(ty));
}
}
pos.forward();
@@ -535,7 +505,7 @@ mod tests {
use super::{TextAttributes, TextDeltaOp, TextInsert};
#[cfg(not(loom))]
use crate::sync::{Arc, AtomicUsize, Ordering};
use crate::{loom_model, sync::thread, Any, Doc};
use crate::{Any, Doc, loom_model, sync::thread};
#[test]
fn test_manipulate_text() {
@@ -676,9 +646,7 @@ mod tests {
fn loom_parallel_ins_del_text() {
let seed = rand::rng().random();
let mut rand = ChaCha20Rng::seed_from_u64(seed);
let ranges = (0..20)
.map(|_| rand.random_range(0..16))
.collect::<Vec<_>>();
let ranges = (0..20).map(|_| rand.random_range(0..16)).collect::<Vec<_>>();
loom_model!({
let doc = Doc::new();

View File

@@ -2,7 +2,7 @@ use std::fmt::Display;
use super::*;
#[derive(Debug, PartialEq)]
#[derive(Debug, Clone, PartialEq)]
pub enum Value {
Any(Any),
Doc(Doc),
@@ -45,9 +45,7 @@ impl Value {
}
pub fn from_vec<T: Into<Any>>(el: Vec<T>) -> Self {
Value::Any(Any::Array(
el.into_iter().map(|item| item.into()).collect::<Vec<_>>(),
))
Value::Any(Any::Array(el.into_iter().map(|item| item.into()).collect::<Vec<_>>()))
}
}

View File

@@ -16,9 +16,7 @@ pub fn encode_update_as_message(update: Vec<u8>) -> JwstCodecResult<Vec<u8>> {
Ok(buffer)
}
pub fn merge_updates_v1<V: AsRef<[u8]>, I: IntoIterator<Item = V>>(
updates: I,
) -> JwstCodecResult<Update> {
pub fn merge_updates_v1<V: AsRef<[u8]>, I: IntoIterator<Item = V>>(updates: I) -> JwstCodecResult<Update> {
let updates = updates
.into_iter()
.map(Update::decode_v1)
@@ -26,3 +24,21 @@ pub fn merge_updates_v1<V: AsRef<[u8]>, I: IntoIterator<Item = V>>(
Ok(Update::merge(updates))
}
/// It tends to generate small numbers.
/// Since the client id will be included in all crdt items, the
/// small client helps to reduce the binary size.
///
/// NOTE: The probability of 36% of the random number generated by
/// this function is greater than [u32::MAX]
pub fn prefer_small_random() -> u64 {
use rand::{distr::Distribution, rng};
use rand_distr::Exp;
let scale_factor = u16::MAX as f64;
let v: f64 = Exp::new(1.0 / scale_factor)
.map(|exp| exp.sample(&mut rng()))
.unwrap_or_else(|_| rand::random());
(v * scale_factor) as u64
}

View File

@@ -6,18 +6,16 @@ mod sync;
pub use codec::*;
pub use doc::{
encode_awareness_as_message, encode_update_as_message, merge_updates_v1, Any, Array, Awareness,
AwarenessEvent, Client, ClientMap, Clock, CrdtRead, CrdtReader, CrdtWrite, CrdtWriter, Doc,
DocOptions, HashMap as AHashMap, HashMapExt, History, HistoryOptions, Id, Map, RawDecoder,
RawEncoder, StateVector, StoreHistory, Text, TextAttributes, TextDelta, TextDeltaOp, TextInsert,
Update, Value,
Any, Array, Awareness, AwarenessEvent, Batch, Client, ClientMap, Clock, CrdtRead, CrdtReader, CrdtWrite, CrdtWriter,
Doc, DocOptions, HashMap as AHashMap, HashMapExt, History, HistoryOptions, Id, Map, RawDecoder, RawEncoder,
StateVector, StoreHistory, Text, TextAttributes, TextDelta, TextDeltaOp, TextInsert, Update, Value, batch_commit,
encode_awareness_as_message, encode_update_as_message, merge_updates_v1,
};
pub(crate) use doc::{Content, Item};
use log::{debug, warn};
use nom::IResult;
pub use protocol::{
read_sync_message, write_sync_message, AwarenessState, AwarenessStates, DocMessage, SyncMessage,
SyncMessageScanner,
AwarenessState, AwarenessStates, DocMessage, SyncMessage, SyncMessageScanner, read_sync_message, write_sync_message,
};
use thiserror::Error;

View File

@@ -1,4 +1,4 @@
use nom::{multi::count, Parser};
use nom::{Parser, multi::count};
use super::*;
@@ -65,11 +65,7 @@ fn read_awareness_state(input: &[u8]) -> IResult<&[u8], (u64, AwarenessState)> {
Ok((tail, (client_id, AwarenessState { clock, content })))
}
fn write_awareness_state<W: Write>(
buffer: &mut W,
client_id: u64,
state: &AwarenessState,
) -> Result<(), IoError> {
fn write_awareness_state<W: Write>(buffer: &mut W, client_id: u64, state: &AwarenessState) -> Result<(), IoError> {
write_var_u64(buffer, client_id)?;
write_var_u64(buffer, state.clock)?;
write_var_string(buffer, state.content.clone())?;
@@ -118,14 +114,8 @@ mod tests {
];
let expected = HashMap::from([
(
1,
AwarenessState::new(5, String::from_utf8(vec![1]).unwrap()),
),
(
2,
AwarenessState::new(10, String::from_utf8(vec![2, 3]).unwrap()),
),
(1, AwarenessState::new(5, String::from_utf8(vec![1]).unwrap())),
(2, AwarenessState::new(10, String::from_utf8(vec![2, 3]).unwrap())),
(
5,
AwarenessState::new(5, String::from_utf8(vec![1, 2, 3, 4, 5]).unwrap()),

View File

@@ -5,10 +5,9 @@ use super::*;
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
pub enum DocMessage {
// state vector
// TODO: temporarily skipped in the test, because yrs decoding needs to ensure that the update
// in step1 is the correct state vector binary and any data can be included in our
// implementation (we will ensure the correctness of encoding and decoding in the subsequent
// decoding process)
// TODO: temporarily skipped in the test, because yrs decoding needs to ensure that the update in step1 is the
// correct state vector binary and any data can be included in our implementation (we will ensure the
// correctness of encoding and decoding in the subsequent decoding process)
#[cfg_attr(test, proptest(skip))]
Step1(Vec<u8>),
// update

View File

@@ -8,16 +8,16 @@ use std::{
io::{Error as IoError, Write},
};
use awareness::{read_awareness, write_awareness};
pub use awareness::{AwarenessState, AwarenessStates};
use awareness::{read_awareness, write_awareness};
pub use doc::DocMessage;
use doc::{read_doc_message, write_doc_message};
use log::debug;
use nom::{
error::{Error, ErrorKind},
IResult,
error::{Error, ErrorKind},
};
pub use scanner::SyncMessageScanner;
pub use sync::{read_sync_message, write_sync_message, SyncMessage};
pub use sync::{SyncMessage, read_sync_message, write_sync_message};
use super::*;

View File

@@ -1,21 +1,21 @@
pub use std::sync::{Arc, Weak};
#[allow(unused)]
#[cfg(not(loom))]
pub(crate) use std::sync::{
atomic::{AtomicBool, AtomicU16, AtomicU32, AtomicU8, Ordering},
Mutex, RwLock, RwLockReadGuard, RwLockWriteGuard,
atomic::{AtomicBool, AtomicU8, AtomicU16, AtomicU32, Ordering},
};
pub use std::sync::{Arc, Weak};
#[cfg(all(test, not(loom)))]
pub(crate) use std::{
sync::{atomic::AtomicUsize, MutexGuard},
sync::{MutexGuard, atomic::AtomicUsize},
thread,
};
#[cfg(loom)]
pub(crate) use loom::{
sync::{
atomic::{AtomicBool, AtomicU16, AtomicU8, AtomicUsize, Ordering},
Mutex, MutexGuard, RwLock, RwLockReadGuard, RwLockWriteGuard,
atomic::{AtomicBool, AtomicU8, AtomicU16, AtomicU32, AtomicUsize, Ordering},
},
thread,
};

View File

@@ -1,2 +0,0 @@
*.node
.coverage

View File

@@ -1,20 +0,0 @@
[package]
authors = ["DarkSky <darksky2048@gmail.com>"]
edition = "2021"
license = "MIT"
name = "y-octo-node"
repository = "https://github.com/toeverything/y-octo"
version = "0.0.1"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
crate-type = ["cdylib"]
[dependencies]
anyhow = { workspace = true }
napi = { workspace = true, features = ["anyhow", "napi4"] }
napi-derive = { workspace = true }
y-octo = { workspace = true, features = ["events", "large_refs"] }
[build-dependencies]
napi-build = { workspace = true }

View File

@@ -1,3 +0,0 @@
fn main() {
napi_build::setup();
}

View File

@@ -1,47 +0,0 @@
/* auto-generated by NAPI-RS */
/* eslint-disable */
export declare class Doc {
constructor(clientId?: number | undefined | null)
get clientId(): number
get guid(): string
get keys(): Array<string>
getOrCreateArray(key: string): YArray
getOrCreateText(key: string): YText
getOrCreateMap(key: string): YMap
createArray(): YArray
createText(): YText
createMap(): YMap
applyUpdate(update: Uint8Array): void
encodeStateAsUpdateV1(state?: Uint8Array | undefined | null): Uint8Array
gc(): void
onUpdate(callback: (result: Uint8Array) => void): void
}
export declare class YArray {
constructor()
get length(): number
get isEmpty(): boolean
get<T = unknown>(index: number): T
insert(index: number, value: YArray | YMap | YText | boolean | number | string | Record<string, any> | null | undefined): void
remove(index: number, len: number): void
toJson(): JsArray
}
export declare class YMap {
get length(): number
get isEmpty(): boolean
get<T = unknown>(key: string): T
set(key: string, value: YArray | YMap | YText | boolean | number | string | Record<string, any> | null | undefined): void
remove(key: string): void
toJson(): object
}
export declare class YText {
constructor()
get len(): number
get isEmpty(): boolean
insert(index: number, str: string): void
remove(index: number, len: number): void
get length(): number
toString(): string
}

View File

@@ -1,783 +0,0 @@
// prettier-ignore
/* eslint-disable */
// @ts-nocheck
/* auto-generated by NAPI-RS */
const { readFileSync } = require('node:fs')
let nativeBinding = null;
const loadErrors = [];
const isMusl = () => {
let musl = false;
if (process.platform === 'linux') {
musl = isMuslFromFilesystem();
if (musl === null) {
musl = isMuslFromReport();
}
if (musl === null) {
musl = isMuslFromChildProcess();
}
}
return musl;
};
const isFileMusl = f => f.includes('libc.musl-') || f.includes('ld-musl-');
const isMuslFromFilesystem = () => {
try {
return readFileSync('/usr/bin/ldd', 'utf-8').includes('musl');
} catch {
return null;
}
};
const isMuslFromReport = () => {
let report = null;
if (typeof process.report?.getReport === 'function') {
process.report.excludeNetwork = true;
report = process.report.getReport();
}
if (!report) {
return null;
}
if (report.header && report.header.glibcVersionRuntime) {
return false;
}
if (Array.isArray(report.sharedObjects)) {
if (report.sharedObjects.some(isFileMusl)) {
return true;
}
}
return false;
};
const isMuslFromChildProcess = () => {
try {
return require('child_process')
.execSync('ldd --version', { encoding: 'utf8' })
.includes('musl');
} catch (e) {
// If we reach this case, we don't know if the system is musl or not, so is better to just fallback to false
return false;
}
};
function requireNative() {
if (process.env.NAPI_RS_NATIVE_LIBRARY_PATH) {
try {
return require(process.env.NAPI_RS_NATIVE_LIBRARY_PATH);
} catch (err) {
loadErrors.push(err);
}
} else if (process.platform === 'android') {
if (process.arch === 'arm64') {
try {
return require('./y-octo.android-arm64.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-android-arm64');
const bindingPackageVersion =
require('@y-octo/node-android-arm64/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
} else if (process.arch === 'arm') {
try {
return require('./y-octo.android-arm-eabi.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-android-arm-eabi');
const bindingPackageVersion =
require('@y-octo/node-android-arm-eabi/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
} else {
loadErrors.push(
new Error(`Unsupported architecture on Android ${process.arch}`)
);
}
} else if (process.platform === 'win32') {
if (process.arch === 'x64') {
if (
process.config?.variables?.shlib_suffix === 'dll.a' ||
process.config?.variables?.node_target_type === 'shared_library'
) {
try {
return require('./y-octo.win32-x64-gnu.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-win32-x64-gnu');
const bindingPackageVersion =
require('@y-octo/node-win32-x64-gnu/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
} else {
try {
return require('./y-octo.win32-x64-msvc.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-win32-x64-msvc');
const bindingPackageVersion =
require('@y-octo/node-win32-x64-msvc/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
}
} else if (process.arch === 'ia32') {
try {
return require('./y-octo.win32-ia32-msvc.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-win32-ia32-msvc');
const bindingPackageVersion =
require('@y-octo/node-win32-ia32-msvc/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
} else if (process.arch === 'arm64') {
try {
return require('./y-octo.win32-arm64-msvc.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-win32-arm64-msvc');
const bindingPackageVersion =
require('@y-octo/node-win32-arm64-msvc/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
} else {
loadErrors.push(
new Error(`Unsupported architecture on Windows: ${process.arch}`)
);
}
} else if (process.platform === 'darwin') {
try {
return require('./y-octo.darwin-universal.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-darwin-universal');
const bindingPackageVersion =
require('@y-octo/node-darwin-universal/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
if (process.arch === 'x64') {
try {
return require('./y-octo.darwin-x64.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-darwin-x64');
const bindingPackageVersion =
require('@y-octo/node-darwin-x64/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
} else if (process.arch === 'arm64') {
try {
return require('./y-octo.darwin-arm64.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-darwin-arm64');
const bindingPackageVersion =
require('@y-octo/node-darwin-arm64/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
} else {
loadErrors.push(
new Error(`Unsupported architecture on macOS: ${process.arch}`)
);
}
} else if (process.platform === 'freebsd') {
if (process.arch === 'x64') {
try {
return require('./y-octo.freebsd-x64.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-freebsd-x64');
const bindingPackageVersion =
require('@y-octo/node-freebsd-x64/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
} else if (process.arch === 'arm64') {
try {
return require('./y-octo.freebsd-arm64.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-freebsd-arm64');
const bindingPackageVersion =
require('@y-octo/node-freebsd-arm64/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
} else {
loadErrors.push(
new Error(`Unsupported architecture on FreeBSD: ${process.arch}`)
);
}
} else if (process.platform === 'linux') {
if (process.arch === 'x64') {
if (isMusl()) {
try {
return require('./y-octo.linux-x64-musl.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-linux-x64-musl');
const bindingPackageVersion =
require('@y-octo/node-linux-x64-musl/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
} else {
try {
return require('./y-octo.linux-x64-gnu.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-linux-x64-gnu');
const bindingPackageVersion =
require('@y-octo/node-linux-x64-gnu/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
}
} else if (process.arch === 'arm64') {
if (isMusl()) {
try {
return require('./y-octo.linux-arm64-musl.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-linux-arm64-musl');
const bindingPackageVersion =
require('@y-octo/node-linux-arm64-musl/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
} else {
try {
return require('./y-octo.linux-arm64-gnu.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-linux-arm64-gnu');
const bindingPackageVersion =
require('@y-octo/node-linux-arm64-gnu/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
}
} else if (process.arch === 'arm') {
if (isMusl()) {
try {
return require('./y-octo.linux-arm-musleabihf.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-linux-arm-musleabihf');
const bindingPackageVersion =
require('@y-octo/node-linux-arm-musleabihf/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
} else {
try {
return require('./y-octo.linux-arm-gnueabihf.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-linux-arm-gnueabihf');
const bindingPackageVersion =
require('@y-octo/node-linux-arm-gnueabihf/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
}
} else if (process.arch === 'loong64') {
if (isMusl()) {
try {
return require('./y-octo.linux-loong64-musl.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-linux-loong64-musl');
const bindingPackageVersion =
require('@y-octo/node-linux-loong64-musl/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
} else {
try {
return require('./y-octo.linux-loong64-gnu.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-linux-loong64-gnu');
const bindingPackageVersion =
require('@y-octo/node-linux-loong64-gnu/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
}
} else if (process.arch === 'riscv64') {
if (isMusl()) {
try {
return require('./y-octo.linux-riscv64-musl.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-linux-riscv64-musl');
const bindingPackageVersion =
require('@y-octo/node-linux-riscv64-musl/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
} else {
try {
return require('./y-octo.linux-riscv64-gnu.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-linux-riscv64-gnu');
const bindingPackageVersion =
require('@y-octo/node-linux-riscv64-gnu/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
}
} else if (process.arch === 'ppc64') {
try {
return require('./y-octo.linux-ppc64-gnu.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-linux-ppc64-gnu');
const bindingPackageVersion =
require('@y-octo/node-linux-ppc64-gnu/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
} else if (process.arch === 's390x') {
try {
return require('./y-octo.linux-s390x-gnu.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-linux-s390x-gnu');
const bindingPackageVersion =
require('@y-octo/node-linux-s390x-gnu/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
} else {
loadErrors.push(
new Error(`Unsupported architecture on Linux: ${process.arch}`)
);
}
} else if (process.platform === 'openharmony') {
if (process.arch === 'arm64') {
try {
return require('./y-octo.openharmony-arm64.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-openharmony-arm64');
const bindingPackageVersion =
require('@y-octo/node-openharmony-arm64/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
} else if (process.arch === 'x64') {
try {
return require('./y-octo.openharmony-x64.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-openharmony-x64');
const bindingPackageVersion =
require('@y-octo/node-openharmony-x64/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
} else if (process.arch === 'arm') {
try {
return require('./y-octo.openharmony-arm.node');
} catch (e) {
loadErrors.push(e);
}
try {
const binding = require('@y-octo/node-openharmony-arm');
const bindingPackageVersion =
require('@y-octo/node-openharmony-arm/package.json').version;
if (
bindingPackageVersion !== '0.25.7' &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK &&
process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0'
) {
throw new Error(
`Native binding package version mismatch, expected 0.25.7 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`
);
}
return binding;
} catch (e) {
loadErrors.push(e);
}
} else {
loadErrors.push(
new Error(`Unsupported architecture on OpenHarmony: ${process.arch}`)
);
}
} else {
loadErrors.push(
new Error(
`Unsupported OS: ${process.platform}, architecture: ${process.arch}`
)
);
}
}
nativeBinding = requireNative();
if (!nativeBinding || process.env.NAPI_RS_FORCE_WASI) {
let wasiBinding = null;
let wasiBindingError = null;
try {
wasiBinding = require('./y-octo.wasi.cjs');
nativeBinding = wasiBinding;
} catch (err) {
if (process.env.NAPI_RS_FORCE_WASI) {
wasiBindingError = err;
}
}
if (!nativeBinding) {
try {
wasiBinding = require('@y-octo/node-wasm32-wasi');
nativeBinding = wasiBinding;
} catch (err) {
if (process.env.NAPI_RS_FORCE_WASI) {
wasiBindingError.cause = err;
loadErrors.push(err);
}
}
}
if (process.env.NAPI_RS_FORCE_WASI === 'error' && !wasiBinding) {
const error = new Error(
'WASI binding not found and NAPI_RS_FORCE_WASI is set to error'
);
error.cause = wasiBindingError;
throw error;
}
}
if (!nativeBinding) {
if (loadErrors.length > 0) {
throw new Error(
`Cannot find native binding. ` +
`npm has a bug related to optional dependencies (https://github.com/npm/cli/issues/4828). ` +
'Please try `npm i` again after removing both package-lock.json and node_modules directory.',
{
cause: loadErrors.reduce((err, cur) => {
cur.cause = err;
return cur;
}),
}
);
}
throw new Error(`Failed to load native binding`);
}
module.exports = nativeBinding;
module.exports.Doc = nativeBinding.Doc;
module.exports.YArray = nativeBinding.YArray;
module.exports.YMap = nativeBinding.YMap;
module.exports.YText = nativeBinding.YText;

View File

@@ -1,72 +0,0 @@
{
"name": "@y-octo/node",
"private": true,
"main": "index.js",
"types": "index.d.ts",
"napi": {
"binaryName": "y-octo",
"targets": [
"x86_64-unknown-linux-gnu",
"x86_64-apple-darwin",
"x86_64-pc-windows-msvc",
"aarch64-apple-darwin",
"aarch64-pc-windows-msvc",
"aarch64-unknown-linux-gnu",
"x86_64-unknown-linux-musl",
"aarch64-unknown-linux-musl"
],
"ts": {
"constEnum": false
}
},
"license": "MIT",
"devDependencies": {
"@napi-rs/cli": "3.0.0",
"@types/node": "^22.14.1",
"@types/prompts": "^2.4.9",
"c8": "^10.1.3",
"prompts": "^2.4.2",
"ts-node": "^10.9.2",
"typescript": "^5.8.3",
"yjs": "^13.6.27"
},
"engines": {
"node": ">= 10"
},
"scripts": {
"artifacts": "napi artifacts",
"build": "napi build --platform --release --no-const-enum",
"build:debug": "napi build --platform --no-const-enum",
"universal": "napi universal",
"test": "NODE_NO_WARNINGS=1 node ./scripts/run-test.mts all",
"test:watch": "yarn exec ts-node-esm ./scripts/run-test.ts all --watch",
"test:coverage": "NODE_OPTIONS=\"--loader ts-node/esm\" c8 node ./scripts/run-test.mts all",
"version": "napi version"
},
"version": "0.25.7",
"sharedConfig": {
"nodeArgs": [
"--loader",
"ts-node/esm",
"--es-module-specifier-resolution=node"
],
"env": {
"TS_NODE_TRANSPILE_ONLY": "1",
"TS_NODE_PROJECT": "./tsconfig.json",
"NODE_ENV": "development",
"DEBUG": "napi:*"
}
},
"c8": {
"reporter": [
"text",
"lcov"
],
"report-dir": ".coverage",
"exclude": [
"scripts",
"node_modules",
"**/*.spec.ts"
]
}
}

View File

@@ -1,78 +0,0 @@
#!/usr/bin/env ts-node-esm
import { resolve } from 'node:path';
import { spawn } from 'node:child_process';
import { readdir } from 'node:fs/promises';
import * as process from 'node:process';
import { fileURLToPath } from 'node:url';
import prompts from 'prompts';
import pkg from '../package.json' with { type: 'json' };
const root = fileURLToPath(new URL('..', import.meta.url));
const testDir = resolve(root, 'tests');
const files = await readdir(testDir);
const watchMode = process.argv.includes('--watch');
const sharedArgs = [
...pkg.sharedConfig.nodeArgs,
'--test',
watchMode ? '--watch' : '',
];
const env = {
...pkg.sharedConfig.env,
PATH: process.env.PATH,
NODE_ENV: 'test',
NODE_NO_WARNINGS: '1',
};
if (process.argv[2] === 'all') {
const cp = spawn(
'node',
[...sharedArgs, ...files.map(f => resolve(testDir, f))],
{
cwd: root,
env,
stdio: 'inherit',
shell: true,
}
);
cp.on('exit', code => {
process.exit(code ?? 0);
});
} else {
const result = await prompts([
{
type: 'select',
name: 'file',
message: 'Select a file to run',
choices: files.map(file => ({
title: file,
value: file,
})),
initial: 1,
},
]);
const target = resolve(testDir, result.file);
const cp = spawn(
'node',
[
...sharedArgs,
'--test-reporter=spec',
'--test-reporter-destination=stdout',
target,
],
{
cwd: root,
env,
stdio: 'inherit',
shell: true,
}
);
cp.on('exit', code => {
process.exit(code ?? 0);
});
}

View File

@@ -1,163 +0,0 @@
use napi::{
bindgen_prelude::{Array as JsArray, Env, JsObjectValue, JsValue, Null, ToNapiValue, Unknown},
ValueType,
};
use y_octo::{Any, Array, Value};
use super::*;
#[napi]
pub struct YArray {
pub(crate) array: Array,
}
#[napi]
impl YArray {
#[allow(clippy::new_without_default)]
#[napi(constructor)]
pub fn new() -> Self {
unimplemented!()
}
pub(crate) fn inner_new(array: Array) -> Self {
Self { array }
}
#[napi(getter)]
pub fn length(&self) -> i64 {
self.array.len() as i64
}
#[napi(getter)]
pub fn is_empty(&self) -> bool {
self.array.is_empty()
}
#[napi(ts_generic_types = "T = unknown", ts_return_type = "T")]
pub fn get<'a>(&'a self, env: &'a Env, index: i64) -> Result<MixedYType<'a>> {
if let Some(value) = self.array.get(index as u64) {
match value {
Value::Any(any) => get_js_unknown_from_any(env, any).map(MixedYType::D),
Value::Array(array) => Ok(MixedYType::A(YArray::inner_new(array))),
Value::Map(map) => Ok(MixedYType::B(YMap::inner_new(map))),
Value::Text(text) => Ok(MixedYType::C(YText::inner_new(text))),
_ => Null.into_unknown(env).map(MixedYType::D),
}
.map_err(anyhow::Error::from)
} else {
Ok(MixedYType::D(Null.into_unknown(env)?))
}
}
#[napi(
ts_args_type = "index: number, value: YArray | YMap | YText | boolean | number | string | \
Record<string, any> | null | undefined"
)]
pub fn insert(&mut self, index: i64, value: MixedRefYType) -> Result<()> {
match value {
MixedRefYType::A(array) => self
.array
.insert(index as u64, array.array.clone())
.map_err(anyhow::Error::from),
MixedRefYType::B(map) => self
.array
.insert(index as u64, map.map.clone())
.map_err(anyhow::Error::from),
MixedRefYType::C(text) => self
.array
.insert(index as u64, text.text.clone())
.map_err(anyhow::Error::from),
MixedRefYType::D(unknown) => match unknown.get_type() {
Ok(value_type) => match value_type {
ValueType::Undefined | ValueType::Null => self
.array
.insert(index as u64, Any::Null)
.map_err(anyhow::Error::from),
ValueType::Boolean => match unsafe { unknown.cast::<bool>() } {
Ok(boolean) => self
.array
.insert(index as u64, boolean)
.map_err(anyhow::Error::from),
Err(e) => Err(anyhow::Error::new(e).context("Failed to coerce value to boolean")),
},
ValueType::Number => match unknown.coerce_to_number().and_then(|v| v.get_double()) {
Ok(number) => self
.array
.insert(index as u64, number)
.map_err(anyhow::Error::from),
Err(e) => Err(anyhow::Error::new(e).context("Failed to coerce value to number")),
},
ValueType::String => {
match unknown
.coerce_to_string()
.and_then(|v| v.into_utf8())
.and_then(|s| s.as_str().map(|s| s.to_string()))
{
Ok(string) => self
.array
.insert(index as u64, string)
.map_err(anyhow::Error::from),
Err(e) => Err(anyhow::Error::new(e).context("Failed to coerce value to string")),
}
}
ValueType::Object => match unknown
.coerce_to_object()
.and_then(|o| o.get_array_length().map(|l| (o, l)))
{
Ok((object, length)) => {
for i in 0..length {
if let Ok(any) = object
.get_element::<Unknown>(i)
.and_then(get_any_from_js_unknown)
{
self
.array
.insert(index as u64 + i as u64, Value::Any(any))
.map_err(anyhow::Error::from)?;
}
}
Ok(())
}
Err(e) => Err(anyhow::Error::new(e).context("Failed to coerce value to object")),
},
ValueType::BigInt => Err(anyhow::Error::msg("BigInt values are not supported")),
ValueType::Symbol => Err(anyhow::Error::msg("Symbol values are not supported")),
ValueType::Function => Err(anyhow::Error::msg("Function values are not supported")),
ValueType::External => Err(anyhow::Error::msg("External values are not supported")),
ValueType::Unknown => Err(anyhow::Error::msg("Unknown values are not supported")),
},
Err(e) => Err(anyhow::Error::from(e)),
},
}
}
#[napi]
pub fn remove(&mut self, index: i64, len: i64) -> Result<()> {
self
.array
.remove(index as u64, len as u64)
.map_err(anyhow::Error::from)
}
#[napi]
pub fn to_json<'env>(&'env self, env: &'env Env) -> Result<JsArray<'env>> {
let mut js_array = env.create_array(0)?;
for value in self.array.iter() {
js_array.insert(get_js_unknown_from_value(env, value)?)?;
}
Ok(js_array)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_array_init() {
let doc = Doc::new(None);
let array = doc.get_or_create_array("array".into()).unwrap();
assert_eq!(array.length(), 0);
}
}

View File

@@ -1,176 +0,0 @@
use napi::{
bindgen_prelude::Uint8Array,
threadsafe_function::{ThreadsafeFunction, ThreadsafeFunctionCallMode},
};
use y_octo::{CrdtRead, Doc as YDoc, History, RawDecoder, StateVector};
use super::*;
#[napi]
pub struct Doc {
doc: YDoc,
}
#[napi]
impl Doc {
#[napi(constructor)]
pub fn new(client_id: Option<i64>) -> Self {
Self {
doc: if let Some(client_id) = client_id {
YDoc::with_client(client_id as u64)
} else {
YDoc::default()
},
}
}
#[napi(getter)]
pub fn client_id(&self) -> i64 {
self.doc.client() as i64
}
#[napi(getter)]
pub fn guid(&self) -> &str {
self.doc.guid()
}
#[napi(getter)]
pub fn keys(&self) -> Vec<String> {
self.doc.keys()
}
#[napi]
pub fn get_or_create_array(&self, key: String) -> Result<YArray> {
self
.doc
.get_or_create_array(key)
.map(YArray::inner_new)
.map_err(anyhow::Error::from)
}
#[napi]
pub fn get_or_create_text(&self, key: String) -> Result<YText> {
self
.doc
.get_or_create_text(key)
.map(YText::inner_new)
.map_err(anyhow::Error::from)
}
#[napi]
pub fn get_or_create_map(&self, key: String) -> Result<YMap> {
self
.doc
.get_or_create_map(key)
.map(YMap::inner_new)
.map_err(anyhow::Error::from)
}
#[napi]
pub fn create_array(&self) -> Result<YArray> {
self
.doc
.create_array()
.map(YArray::inner_new)
.map_err(anyhow::Error::from)
}
#[napi]
pub fn create_text(&self) -> Result<YText> {
self
.doc
.create_text()
.map(YText::inner_new)
.map_err(anyhow::Error::from)
}
#[napi]
pub fn create_map(&self) -> Result<YMap> {
self
.doc
.create_map()
.map(YMap::inner_new)
.map_err(anyhow::Error::from)
}
#[napi]
pub fn apply_update(&mut self, update: &[u8]) -> Result<()> {
self.doc.apply_update_from_binary_v1(update)?;
Ok(())
}
#[napi]
pub fn encode_state_as_update_v1(&self, state: Option<&[u8]>) -> Result<Uint8Array> {
let result = match state {
Some(state) => {
let mut decoder = RawDecoder::new(state);
let state = StateVector::read(&mut decoder)?;
self.doc.encode_state_as_update_v1(&state)
}
None => self.doc.encode_update_v1(),
};
result.map(|v| v.into()).map_err(anyhow::Error::from)
}
#[napi]
pub fn gc(&self) -> Result<()> {
self.doc.gc().map_err(anyhow::Error::from)
}
#[napi(ts_args_type = "callback: (result: Uint8Array) => void")]
pub fn on_update(&mut self, callback: ThreadsafeFunction<Uint8Array>) -> Result<()> {
let callback = move |update: &[u8], _h: &[History]| {
callback.call(
Ok(update.to_vec().into()),
ThreadsafeFunctionCallMode::Blocking,
);
};
self.doc.subscribe(Box::new(callback));
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_doc_client() {
let client_id = 1;
let doc = Doc::new(Some(client_id));
assert_eq!(doc.client_id(), 1);
}
#[test]
fn test_doc_guid() {
let doc = Doc::new(None);
assert_eq!(doc.guid().len(), 21);
}
#[test]
fn test_create_array() {
let doc = Doc::new(None);
let array = doc.get_or_create_array("array".into()).unwrap();
assert_eq!(array.length(), 0);
}
#[test]
fn test_create_text() {
let doc = Doc::new(None);
let text = doc.get_or_create_text("text".into()).unwrap();
assert_eq!(text.len(), 0);
}
#[test]
fn test_keys() {
let doc = Doc::new(None);
doc.get_or_create_array("array".into()).unwrap();
doc.get_or_create_text("text".into()).unwrap();
doc.get_or_create_map("map".into()).unwrap();
let mut keys = doc.keys();
keys.sort();
assert_eq!(keys, vec!["array", "map", "text"]);
}
}

View File

@@ -1,17 +0,0 @@
use anyhow::Result;
use napi_derive::napi;
mod array;
mod doc;
mod map;
mod text;
mod utils;
pub use array::YArray;
pub use doc::Doc;
pub use map::YMap;
pub use text::YText;
use utils::{
get_any_from_js_object, get_any_from_js_unknown, get_js_unknown_from_any,
get_js_unknown_from_value, MixedRefYType, MixedYType,
};

View File

@@ -1,128 +0,0 @@
use napi::bindgen_prelude::{Env, JsValue, Null, Object, ToNapiValue, ValueType};
use y_octo::{Any, Map, Value};
use super::*;
#[napi]
pub struct YMap {
pub(crate) map: Map,
}
#[napi]
impl YMap {
pub(crate) fn inner_new(map: Map) -> Self {
Self { map }
}
#[napi(getter)]
pub fn length(&self) -> i64 {
self.map.len() as i64
}
#[napi(getter)]
pub fn is_empty(&self) -> bool {
self.map.is_empty()
}
#[napi(ts_generic_types = "T = unknown", ts_return_type = "T")]
pub fn get<'a>(&'a self, env: &'a Env, key: String) -> Result<MixedYType<'a>> {
if let Some(value) = self.map.get(&key) {
match value {
Value::Any(any) => get_js_unknown_from_any(env, any).map(MixedYType::D),
Value::Array(array) => Ok(MixedYType::A(YArray::inner_new(array))),
Value::Map(map) => Ok(MixedYType::B(YMap::inner_new(map))),
Value::Text(text) => Ok(MixedYType::C(YText::inner_new(text))),
_ => Null.into_unknown(env).map(MixedYType::D),
}
.map_err(anyhow::Error::from)
} else {
Ok(MixedYType::D(Null.into_unknown(env)?))
}
}
#[napi(
ts_args_type = "key: string, value: YArray | YMap | YText | boolean | number | string | \
Record<string, any> | null | undefined"
)]
pub fn set(&mut self, key: String, value: MixedRefYType) -> Result<()> {
match value {
MixedRefYType::A(array) => self
.map
.insert(key, array.array.clone())
.map_err(anyhow::Error::from),
MixedRefYType::B(map) => self
.map
.insert(key, map.map.clone())
.map_err(anyhow::Error::from),
MixedRefYType::C(text) => self
.map
.insert(key, text.text.clone())
.map_err(anyhow::Error::from),
MixedRefYType::D(unknown) => match unknown.get_type() {
Ok(value_type) => match value_type {
ValueType::Undefined | ValueType::Null => {
self.map.insert(key, Any::Null).map_err(anyhow::Error::from)
}
ValueType::Boolean => match unsafe { unknown.cast::<bool>() } {
Ok(boolean) => self.map.insert(key, boolean).map_err(anyhow::Error::from),
Err(e) => Err(anyhow::Error::from(e).context("Failed to coerce value to boolean")),
},
ValueType::Number => match unknown.coerce_to_number().and_then(|v| v.get_double()) {
Ok(number) => self.map.insert(key, number).map_err(anyhow::Error::from),
Err(e) => Err(anyhow::Error::from(e).context("Failed to coerce value to number")),
},
ValueType::String => {
match unknown
.coerce_to_string()
.and_then(|v| v.into_utf8())
.and_then(|s| s.as_str().map(|s| s.to_string()))
{
Ok(string) => self.map.insert(key, string).map_err(anyhow::Error::from),
Err(e) => Err(anyhow::Error::from(e).context("Failed to coerce value to string")),
}
}
ValueType::Object => match unknown.coerce_to_object().and_then(get_any_from_js_object) {
Ok(any) => self
.map
.insert(key, Value::Any(any))
.map_err(anyhow::Error::from),
Err(e) => Err(anyhow::Error::from(e).context("Failed to coerce value to object")),
},
ValueType::BigInt => Err(anyhow::Error::msg("BigInt values are not supported")),
ValueType::Symbol => Err(anyhow::Error::msg("Symbol values are not supported")),
ValueType::Function => Err(anyhow::Error::msg("Function values are not supported")),
ValueType::External => Err(anyhow::Error::msg("External values are not supported")),
ValueType::Unknown => Err(anyhow::Error::msg("Unknown values are not supported")),
},
Err(e) => Err(anyhow::Error::from(e)),
},
}
}
#[napi]
pub fn remove(&mut self, key: String) {
self.map.remove(&key);
}
#[napi]
pub fn to_json(&self, env: Env) -> Result<Object<'_>> {
let mut js_object = Object::new(&env)?;
for (key, value) in self.map.iter() {
js_object.set(key, get_js_unknown_from_value(&env, value))?;
}
Ok(js_object)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_map_init() {
let doc = Doc::new(None);
let text = doc.get_or_create_map("map".into()).unwrap();
assert_eq!(text.length(), 0);
}
}

View File

@@ -1,82 +0,0 @@
use y_octo::Text;
use super::*;
#[napi]
pub struct YText {
pub(crate) text: Text,
}
#[napi]
impl YText {
#[allow(clippy::new_without_default)]
#[napi(constructor)]
pub fn new() -> Self {
unimplemented!()
}
pub(crate) fn inner_new(text: Text) -> Self {
Self { text }
}
#[napi(getter)]
pub fn len(&self) -> i64 {
self.text.len() as i64
}
#[napi(getter)]
pub fn is_empty(&self) -> bool {
self.text.is_empty()
}
#[napi]
pub fn insert(&mut self, index: i64, str: String) -> Result<()> {
self
.text
.insert(index as u64, str)
.map_err(anyhow::Error::from)
}
#[napi]
pub fn remove(&mut self, index: i64, len: i64) -> Result<()> {
self
.text
.remove(index as u64, len as u64)
.map_err(anyhow::Error::from)
}
#[napi(getter)]
pub fn length(&self) -> i64 {
self.text.len() as i64
}
#[allow(clippy::inherent_to_string)]
#[napi]
pub fn to_string(&self) -> String {
self.text.to_string()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_text_init() {
let doc = Doc::new(None);
let text = doc.get_or_create_text("text".into()).unwrap();
assert_eq!(text.len(), 0);
}
#[test]
fn test_text_edit() {
let doc = Doc::new(None);
let mut text = doc.get_or_create_text("text".into()).unwrap();
text.insert(0, "hello".into()).unwrap();
assert_eq!(text.to_string(), "hello");
text.insert(5, " world".into()).unwrap();
assert_eq!(text.to_string(), "hello world");
text.remove(5, 6).unwrap();
assert_eq!(text.to_string(), "hello");
}
}

View File

@@ -1,122 +0,0 @@
use napi::bindgen_prelude::{
Array, Either4, Env, Error, External, JsObjectValue, JsValue, Null, Object, Result, Status,
ToNapiValue, Unknown, ValueType,
};
use y_octo::{AHashMap, Any, HashMapExt, Value};
use super::*;
pub type MixedYType<'a> = Either4<YArray, YMap, YText, Unknown<'a>>;
pub type MixedRefYType<'a> = Either4<&'a YArray, &'a YMap, &'a YText, Unknown<'a>>;
pub fn get_js_unknown_from_any(env: &Env, any: Any) -> Result<Unknown<'_>> {
match any {
Any::Null | Any::Undefined => Null.into_unknown(env),
Any::True => true.into_unknown(env),
Any::False => false.into_unknown(env),
Any::Integer(number) => number.into_unknown(env),
Any::BigInt64(number) => number.into_unknown(env),
Any::Float32(number) => number.0.into_unknown(env),
Any::Float64(number) => number.0.into_unknown(env),
Any::String(string) => string.into_unknown(env),
Any::Array(array) => {
let js_array = Array::from_vec(
env,
array
.into_iter()
.map(|value| get_js_unknown_from_any(env, value))
.collect::<Result<Vec<Unknown>>>()?,
)?;
Ok(js_array.to_unknown())
}
_ => Null.into_unknown(env),
}
}
pub fn get_js_unknown_from_value(env: &Env, value: Value) -> Result<Unknown<'_>> {
match value {
Value::Any(any) => get_js_unknown_from_any(env, any),
Value::Array(array) => {
let external = External::new(YArray::inner_new(array));
Ok(unsafe {
Unknown::from_raw_unchecked(env.raw(), ToNapiValue::to_napi_value(env.raw(), external)?)
})
}
Value::Map(map) => {
let external = External::new(YMap::inner_new(map));
Ok(unsafe {
Unknown::from_raw_unchecked(env.raw(), ToNapiValue::to_napi_value(env.raw(), external)?)
})
}
Value::Text(text) => {
let external = External::new(YText::inner_new(text));
external.into_unknown(env)
}
_ => Null.into_unknown(env),
}
}
pub fn get_any_from_js_object(object: Object) -> Result<Any> {
if let Ok(length) = object.get_array_length() {
let mut array = Vec::with_capacity(length as usize);
for i in 0..length {
if let Ok(value) = object.get_element::<Unknown>(i) {
array.push(get_any_from_js_unknown(value)?);
}
}
Ok(Any::Array(array))
} else {
let mut map = AHashMap::new();
let keys = object.get_property_names()?;
if let Ok(length) = keys.get_array_length() {
for i in 0..length {
if let Ok(key) = keys.get_element::<Unknown>(i).and_then(|o| {
o.coerce_to_string().and_then(|obj| {
obj
.into_utf8()
.and_then(|s| s.as_str().map(|s| s.to_string()))
})
}) {
if let Ok(value) = object.get_named_property_unchecked::<Unknown>(&key) {
map.insert(key, get_any_from_js_unknown(value)?);
}
}
}
}
Ok(Any::Object(map))
}
}
pub fn get_any_from_js_unknown(js_unknown: Unknown) -> Result<Any> {
match js_unknown.get_type()? {
ValueType::Undefined | ValueType::Null => Ok(Any::Null),
ValueType::Boolean => Ok(unsafe { js_unknown.cast::<bool>()? }.into()),
ValueType::Number => Ok(
js_unknown
.coerce_to_number()
.and_then(|v| v.get_double())
.map(|v| v.into())?,
),
ValueType::String => Ok(
js_unknown
.coerce_to_string()
.and_then(|v| v.into_utf8())
.and_then(|s| s.as_str().map(|s| s.to_string()))?
.into(),
),
ValueType::Object => {
if let Ok(object) = js_unknown.coerce_to_object() {
get_any_from_js_object(object)
} else {
Err(Error::new(
Status::InvalidArg,
"Failed to coerce value to object",
))
}
}
_ => Err(Error::new(
Status::InvalidArg,
"Failed to coerce value to any",
)),
}
}

View File

@@ -1,62 +0,0 @@
import assert, { equal, deepEqual } from 'node:assert';
import { test } from 'node:test';
import { Doc, type YArray } from '../index';
test('array test', { concurrency: false }, async t => {
let client_id: number;
let doc: Doc;
t.beforeEach(async () => {
client_id = (Math.random() * 100000) | 0;
doc = new Doc(client_id);
});
t.afterEach(async () => {
client_id = -1;
// @ts-expect-error - doc must not null in next range
doc = null;
});
await t.test('array should be created', () => {
let arr = doc.getOrCreateArray('arr');
deepEqual(doc.keys, ['arr']);
equal(arr.length, 0);
});
await t.test('array editing', () => {
let arr = doc.getOrCreateArray('arr');
arr.insert(0, true);
arr.insert(1, false);
arr.insert(2, 1);
arr.insert(3, 'hello world');
equal(arr.length, 4);
equal(arr.get(0), true);
equal(arr.get(1), false);
equal(arr.get(2), 1);
equal(arr.get(3), 'hello world');
equal(arr.length, 4);
arr.remove(1, 1);
equal(arr.length, 3);
equal(arr.get(2), 'hello world');
});
await t.test('sub array should can edit', () => {
let map = doc.getOrCreateMap('map');
let sub = doc.createArray();
map.set('sub', sub);
sub.insert(0, true);
sub.insert(1, false);
sub.insert(2, 1);
sub.insert(3, 'hello world');
equal(sub.length, 4);
let sub2 = map.get<YArray>('sub');
assert(sub2);
equal(sub2.get(0), true);
equal(sub2.get(1), false);
equal(sub2.get(2), 1);
equal(sub2.get(3), 'hello world');
equal(sub2.length, 4);
});
});

View File

@@ -1,99 +0,0 @@
import { equal } from 'node:assert';
import { test } from 'node:test';
import { Doc } from '../index';
import * as Y from 'yjs';
test('doc test', { concurrency: false }, async t => {
let client_id: number;
let doc: Doc;
t.beforeEach(async () => {
client_id = (Math.random() * 100000) | 0;
doc = new Doc(client_id);
});
t.afterEach(async () => {
client_id = -1;
// @ts-expect-error - doc must not null in next range
doc = null;
});
await t.test('doc id should be set', () => {
equal(doc.clientId, client_id);
});
await t.test('y-octo doc update should be apply', () => {
let array = doc.getOrCreateArray('array');
let map = doc.getOrCreateMap('map');
let text = doc.getOrCreateText('text');
array.insert(0, true);
array.insert(1, false);
array.insert(2, 1);
array.insert(3, 'hello world');
map.set('a', true);
map.set('b', false);
map.set('c', 1);
map.set('d', 'hello world');
text.insert(0, 'a');
text.insert(1, 'b');
text.insert(2, 'c');
let doc2 = new Doc(client_id);
doc2.applyUpdate(doc.encodeStateAsUpdateV1());
let array2 = doc2.getOrCreateArray('array');
let map2 = doc2.getOrCreateMap('map');
let text2 = doc2.getOrCreateText('text');
equal(doc2.clientId, client_id);
equal(array2.length, 4);
equal(array2.get(0), true);
equal(array2.get(1), false);
equal(array2.get(2), 1);
equal(array2.get(3), 'hello world');
equal(map2.length, 4);
equal(map2.get('a'), true);
equal(map2.get('b'), false);
equal(map2.get('c'), 1);
equal(map2.get('d'), 'hello world');
equal(text2.toString(), 'abc');
});
await t.test('yjs doc update should be apply', () => {
let doc2 = new Y.Doc();
let array2 = doc2.getArray('array');
let map2 = doc2.getMap('map');
let text2 = doc2.getText('text');
array2.insert(0, [true]);
array2.insert(1, [false]);
array2.insert(2, [1]);
array2.insert(3, ['hello world']);
map2.set('a', true);
map2.set('b', false);
map2.set('c', 1);
map2.set('d', 'hello world');
text2.insert(0, 'a');
text2.insert(1, 'b');
text2.insert(2, 'c');
doc.applyUpdate(Buffer.from(Y.encodeStateAsUpdate(doc2)));
let array = doc.getOrCreateArray('array');
let map = doc.getOrCreateMap('map');
let text = doc.getOrCreateText('text');
equal(array.length, 4);
equal(array.get(0), true);
equal(array.get(1), false);
equal(array.get(2), 1);
equal(array.get(3), 'hello world');
equal(map.length, 4);
equal(map.get('a'), true);
equal(map.get('b'), false);
equal(map.get('c'), 1);
equal(map.get('d'), 'hello world');
equal(text.toString(), 'abc');
});
});

View File

@@ -1,152 +0,0 @@
import assert, { equal, deepEqual } from 'node:assert';
import { test } from 'node:test';
import * as Y from 'yjs';
import { Doc, type YArray, type YMap, type YText } from '../index';
test('map test', { concurrency: false }, async t => {
let client_id: number;
let doc: Doc;
t.beforeEach(async () => {
client_id = (Math.random() * 100000) | 0;
doc = new Doc(client_id);
});
t.afterEach(async () => {
client_id = -1;
// @ts-expect-error - doc must not null in next range
doc = null;
});
await t.test('map should be created', () => {
let map = doc.getOrCreateMap('map');
deepEqual(doc.keys, ['map']);
equal(map.length, 0);
});
await t.test('map editing', () => {
let map = doc.getOrCreateMap('map');
map.set('a', true);
map.set('b', false);
map.set('c', 1);
map.set('d', 'hello world');
equal(map.length, 4);
equal(map.get('a'), true);
equal(map.get('b'), false);
equal(map.get('c'), 1);
equal(map.get('d'), 'hello world');
equal(map.length, 4);
map.remove('b');
equal(map.length, 3);
equal(map.get('d'), 'hello world');
});
await t.test('map should can be nested', () => {
let map = doc.getOrCreateMap('map');
let sub = doc.createMap();
map.set('sub', sub);
sub.set('a', true);
sub.set('b', false);
sub.set('c', 1);
sub.set('d', 'hello world');
equal(sub.length, 4);
let sub2 = map.get<YMap>('sub');
assert(sub2);
equal(sub2.get('a'), true);
equal(sub2.get('b'), false);
equal(sub2.get('c'), 1);
equal(sub2.get('d'), 'hello world');
equal(sub2.length, 4);
});
await t.test('y-octo to yjs compatibility test with nested type', () => {
let map = doc.getOrCreateMap('map');
let sub_array = doc.createArray();
let sub_map = doc.createMap();
let sub_text = doc.createText();
map.set('array', sub_array);
map.set('map', sub_map);
map.set('text', sub_text);
sub_array.insert(0, true);
sub_array.insert(1, false);
sub_array.insert(2, 1);
sub_array.insert(3, 'hello world');
sub_map.set('a', true);
sub_map.set('b', false);
sub_map.set('c', 1);
sub_map.set('d', 'hello world');
sub_text.insert(0, 'a');
sub_text.insert(1, 'b');
sub_text.insert(2, 'c');
let doc2 = new Y.Doc();
Y.applyUpdate(doc2, doc.encodeStateAsUpdateV1());
let map2 = doc2.getMap<any>('map');
let sub_array2 = map2.get('array') as Y.Array<any>;
let sub_map2 = map2.get('map') as Y.Map<any>;
let sub_text2 = map2.get('text') as Y.Text;
assert(sub_array2);
equal(sub_array2.length, 4);
equal(sub_array2.get(0), true);
equal(sub_array2.get(1), false);
equal(sub_array2.get(2), 1);
equal(sub_array2.get(3), 'hello world');
assert(sub_map2);
equal(sub_map2.get('a'), true);
equal(sub_map2.get('b'), false);
equal(sub_map2.get('c'), 1);
equal(sub_map2.get('d'), 'hello world');
assert(sub_text2);
equal(sub_text2.toString(), 'abc');
});
await t.test('yjs to y-octo compatibility test with nested type', () => {
let doc2 = new Y.Doc();
let map2 = doc2.getMap<any>('map');
let sub_array2 = new Y.Array<any>();
let sub_map2 = new Y.Map<any>();
let sub_text2 = new Y.Text();
map2.set('array', sub_array2);
map2.set('map', sub_map2);
map2.set('text', sub_text2);
sub_array2.insert(0, [true]);
sub_array2.insert(1, [false]);
sub_array2.insert(2, [1]);
sub_array2.insert(3, ['hello world']);
sub_map2.set('a', true);
sub_map2.set('b', false);
sub_map2.set('c', 1);
sub_map2.set('d', 'hello world');
sub_text2.insert(0, 'a');
sub_text2.insert(1, 'b');
sub_text2.insert(2, 'c');
doc.applyUpdate(Buffer.from(Y.encodeStateAsUpdate(doc2)));
let map = doc.getOrCreateMap('map');
let sub_array = map.get<YArray>('array');
let sub_map = map.get<YMap>('map');
let sub_text = map.get<YText>('text');
assert(sub_array);
equal(sub_array.length, 4);
equal(sub_array.get(0), true);
equal(sub_array.get(1), false);
equal(sub_array.get(2), 1);
equal(sub_array.get(3), 'hello world');
assert(sub_map);
equal(sub_map.get('a'), true);
equal(sub_map.get('b'), false);
equal(sub_map.get('c'), 1);
equal(sub_map.get('d'), 'hello world');
assert(sub_text);
equal(sub_text.toString(), 'abc');
});
});

View File

@@ -1,54 +0,0 @@
import assert, { equal, deepEqual } from 'node:assert';
import { test } from 'node:test';
import { Doc, type YText } from '../index';
test('text test', { concurrency: false }, async t => {
let client_id: number;
let doc: Doc;
t.beforeEach(async () => {
client_id = (Math.random() * 100000) | 0;
doc = new Doc(client_id);
});
t.afterEach(async () => {
client_id = -1;
// @ts-expect-error - doc must not null in next range
doc = null;
});
await t.test('text should be created', () => {
let text = doc.getOrCreateText('text');
deepEqual(doc.keys, ['text']);
equal(text.len, 0);
});
await t.test('text editing', () => {
let text = doc.getOrCreateText('text');
text.insert(0, 'a');
text.insert(1, 'b');
text.insert(2, 'c');
equal(text.toString(), 'abc');
text.remove(0, 1);
equal(text.toString(), 'bc');
text.remove(1, 1);
equal(text.toString(), 'b');
text.remove(0, 1);
equal(text.toString(), '');
});
await t.test('sub text should can edit', () => {
let map = doc.getOrCreateMap('map');
let sub = doc.createText();
map.set('sub', sub);
sub.insert(0, 'a');
sub.insert(1, 'b');
sub.insert(2, 'c');
equal(sub.toString(), 'abc');
let sub2 = map.get<YText>('sub');
assert(sub2);
equal(sub2.toString(), 'abc');
});
});

View File

@@ -1,10 +0,0 @@
{
"extends": "../../../../tsconfig.node.json",
"compilerOptions": {
"noEmit": false,
"outDir": "lib",
"composite": true
},
"include": ["index.d.ts", "tests/**/*.mts"],
"references": []
}

View File

@@ -1,6 +1,6 @@
[package]
authors = ["x1a0t <405028157@qq.com>", "DarkSky <darksky2048@gmail.com>"]
edition = "2021"
edition = "2024"
license = "MIT"
name = "y-octo-utils"
version = "0.0.1"

View File

@@ -2,7 +2,7 @@ mod utils;
use std::time::Duration;
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput};
use criterion::{BenchmarkId, Criterion, Throughput, criterion_group, criterion_main};
use path_ext::PathExt;
use utils::Files;
@@ -19,7 +19,7 @@ fn apply(c: &mut Criterion) {
&file.content,
|b, content| {
b.iter(|| {
use yrs::{updates::decoder::Decode, Doc, Transact, Update};
use yrs::{Doc, Transact, Update, updates::decoder::Decode};
let update = Update::decode_v1(content).unwrap();
let doc = Doc::new();
doc.transact_mut().apply_update(update).unwrap();

View File

@@ -1,6 +1,6 @@
use std::time::Duration;
use criterion::{criterion_group, criterion_main, Criterion};
use criterion::{Criterion, criterion_group, criterion_main};
use rand::{Rng, SeedableRng};
fn operations(c: &mut Criterion) {

View File

@@ -1,4 +1,4 @@
use criterion::{criterion_group, criterion_main, Criterion, SamplingMode};
use criterion::{Criterion, SamplingMode, criterion_group, criterion_main};
use lib0::{
decoding::{Cursor, Read},
encoding::Write,

View File

@@ -1,6 +1,6 @@
use std::time::Duration;
use criterion::{criterion_group, criterion_main, Criterion};
use criterion::{Criterion, criterion_group, criterion_main};
fn operations(c: &mut Criterion) {
let mut group = c.benchmark_group("ops/map");

View File

@@ -1,6 +1,6 @@
use std::time::Duration;
use criterion::{criterion_group, criterion_main, Criterion};
use criterion::{Criterion, criterion_group, criterion_main};
use rand::{Rng, SeedableRng};
fn operations(c: &mut Criterion) {

View File

@@ -2,7 +2,7 @@ mod utils;
use std::time::Duration;
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput};
use criterion::{BenchmarkId, Criterion, Throughput, criterion_group, criterion_main};
use path_ext::PathExt;
use utils::Files;
@@ -19,7 +19,7 @@ fn update(c: &mut Criterion) {
&file.content,
|b, content| {
b.iter(|| {
use yrs::{updates::decoder::Decode, Update};
use yrs::{Update, updates::decoder::Decode};
Update::decode_v1(content).unwrap()
});
},

View File

@@ -87,12 +87,7 @@ fn convert_to_markdown() -> impl Iterator<Item = String> {
let diff = -(1.0 - changes_dur_secs / base_dur_secs) * 100.0;
difference = format!("{diff:+.2}%");
if is_significant(
changes_dur_secs,
changes_err_secs,
base_dur_secs,
base_err_secs,
) {
if is_significant(changes_dur_secs, changes_err_secs, base_dur_secs, base_err_secs) {
difference = format!("**{difference}**");
}
}
@@ -100,16 +95,8 @@ fn convert_to_markdown() -> impl Iterator<Item = String> {
Some(format!(
"| {} | {} | {} | {} |",
name.replace('|', "\\|"),
if base_undefined {
"N/A"
} else {
&base_duration
},
if changes_undefined {
"N/A"
} else {
&changes_duration
},
if base_undefined { "N/A" } else { &base_duration },
if changes_undefined { "N/A" } else { &changes_duration },
difference
))
})

View File

@@ -1,5 +1,5 @@
[package]
edition = "2021"
edition = "2024"
name = "y-octo-fuzz"
publish = false
version = "0.0.0"

View File

@@ -69,10 +69,7 @@ pub fn yrs_create_array_from_nest_type(
let str = text.get_string(&trx);
let len = str.chars().fold(0, |acc, _| acc + 1);
let index = random_pick_num(len, insert_pos) as usize;
let byte_start_offset = str
.chars()
.take(index)
.fold(0, |acc, ch| acc + ch.len_utf8());
let byte_start_offset = str.chars().take(index).fold(0, |acc, ch| acc + ch.len_utf8());
Some(text.insert_embed(&mut trx, byte_start_offset as u32, array_prelim))
}
@@ -80,10 +77,7 @@ pub fn yrs_create_array_from_nest_type(
let str = xml_text.get_string(&trx);
let len = str.chars().fold(0, |acc, _| acc + 1);
let index = random_pick_num(len, insert_pos) as usize;
let byte_start_offset = str
.chars()
.take(index)
.fold(0, |acc, ch| acc + ch.len_utf8());
let byte_start_offset = str.chars().take(index).fold(0, |acc, ch| acc + ch.len_utf8());
Some(xml_text.insert_embed(&mut trx, byte_start_offset as u32, array_prelim))
}
@@ -114,26 +108,14 @@ mod tests {
nest_data_op_type: NestDataOpType::Insert,
};
ops_registry.operate_yrs_nest_type(
&doc,
YrsNestType::ArrayType(array_ref.clone()),
params.clone(),
);
ops_registry.operate_yrs_nest_type(&doc, YrsNestType::ArrayType(array_ref.clone()), params.clone());
assert_eq!(array_ref.len(&doc.transact()), 1);
params.nest_data_op_type = NestDataOpType::Delete;
ops_registry.operate_yrs_nest_type(
&doc,
YrsNestType::ArrayType(array_ref.clone()),
params.clone(),
);
ops_registry.operate_yrs_nest_type(&doc, YrsNestType::ArrayType(array_ref.clone()), params.clone());
assert_eq!(array_ref.len(&doc.transact()), 0);
params.nest_data_op_type = NestDataOpType::Clear;
ops_registry.operate_yrs_nest_type(
&doc,
YrsNestType::ArrayType(array_ref.clone()),
params.clone(),
);
ops_registry.operate_yrs_nest_type(&doc, YrsNestType::ArrayType(array_ref.clone()), params.clone());
assert_eq!(array_ref.len(&doc.transact()), 0);
}

View File

@@ -26,9 +26,7 @@ fn remove_op(doc: &yrs::Doc, nest_input: &YrsNestType, params: CRDTParam) {
random_pick_num((len - 1) as u32, &params.insert_pos)
};
iter
.nth(skip_step as usize)
.map(|(key, _value)| key.to_string())
iter.nth(skip_step as usize).map(|(key, _value)| key.to_string())
};
if let Some(key) = rand_key {
@@ -77,10 +75,7 @@ pub fn yrs_create_map_from_nest_type(
let str = text.get_string(&trx);
let len = str.chars().fold(0, |acc, _| acc + 1);
let index = random_pick_num(len, insert_pos) as usize;
let byte_start_offset = str
.chars()
.take(index)
.fold(0, |acc, ch| acc + ch.len_utf8());
let byte_start_offset = str.chars().take(index).fold(0, |acc, ch| acc + ch.len_utf8());
Some(text.insert_embed(&mut trx, byte_start_offset as u32, map_prelim))
}
@@ -88,10 +83,7 @@ pub fn yrs_create_map_from_nest_type(
let str = xml_text.get_string(&trx);
let len = str.chars().fold(0, |acc, _| acc + 1);
let index = random_pick_num(len, insert_pos) as usize;
let byte_start_offset = str
.chars()
.take(index)
.fold(0, |acc, ch| acc + ch.len_utf8());
let byte_start_offset = str.chars().take(index).fold(0, |acc, ch| acc + ch.len_utf8());
Some(xml_text.insert_embed(&mut trx, byte_start_offset as u32, map_prelim))
}

Some files were not shown because too many files have changed in this diff Show More