feat(y-octo): import y-octo monorepo (#11750)

This commit is contained in:
Brooooooklyn
2025-04-21 02:51:15 +00:00
parent e3973538e8
commit 95dbda24fc
127 changed files with 17319 additions and 18 deletions

View File

@@ -0,0 +1,71 @@
[package]
authors = ["x1a0t <405028157@qq.com>", "DarkSky <darksky2048@gmail.com>"]
edition = "2021"
license = "MIT"
name = "y-octo-utils"
version = "0.0.1"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
bench = ["regex"]
default = ["merger"]
fuzz = ["arbitrary", "phf"]
merger = ["clap", "y-octo/large_refs"]
[dependencies]
arbitrary = { workspace = true, features = ["derive"], optional = true }
clap = { workspace = true, features = ["derive"], optional = true }
lib0 = { workspace = true, features = ["lib0-serde"] }
phf = { workspace = true, features = ["macros"], optional = true }
rand = { workspace = true }
rand_chacha = { workspace = true }
regex = { workspace = true, optional = true }
y-octo = { workspace = true }
y-sync = { workspace = true }
yrs = { workspace = true }
[dev-dependencies]
criterion = { workspace = true }
path-ext = { workspace = true }
proptest = { workspace = true }
proptest-derive = { workspace = true }
[[bin]]
name = "bench_result_render"
path = "bin/bench_result_render.rs"
[[bin]]
name = "doc_merger"
path = "bin/doc_merger.rs"
[[bin]]
name = "memory_leak_test"
path = "bin/memory_leak_test.rs"
[[bench]]
harness = false
name = "array_ops_benchmarks"
[[bench]]
harness = false
name = "codec_benchmarks"
[[bench]]
harness = false
name = "map_ops_benchmarks"
[[bench]]
harness = false
name = "text_ops_benchmarks"
[[bench]]
harness = false
name = "apply_benchmarks"
[[bench]]
harness = false
name = "update_benchmarks"
[lib]
bench = true

View File

@@ -0,0 +1,35 @@
mod utils;
use std::time::Duration;
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput};
use path_ext::PathExt;
use utils::Files;
fn apply(c: &mut Criterion) {
let files = Files::load();
let mut group = c.benchmark_group("apply");
group.measurement_time(Duration::from_secs(15));
for file in &files.files {
group.throughput(Throughput::Bytes(file.content.len() as u64));
group.bench_with_input(
BenchmarkId::new("apply with yrs", file.path.name_str()),
&file.content,
|b, content| {
b.iter(|| {
use yrs::{updates::decoder::Decode, Doc, Transact, Update};
let update = Update::decode_v1(content).unwrap();
let doc = Doc::new();
doc.transact_mut().apply_update(update).unwrap();
});
},
);
}
group.finish();
}
criterion_group!(benches, apply);
criterion_main!(benches);

View File

@@ -0,0 +1,79 @@
use std::time::Duration;
use criterion::{criterion_group, criterion_main, Criterion};
use rand::{Rng, SeedableRng};
fn operations(c: &mut Criterion) {
let mut group = c.benchmark_group("ops/array");
group.measurement_time(Duration::from_secs(15));
group.bench_function("yrs/insert", |b| {
let base_text = "test1 test2 test3 test4 test5 test6 test7 test8 test9";
let mut rng = rand_chacha::ChaCha20Rng::seed_from_u64(1234);
let idxs = (0..99)
.map(|_| rng.random_range(0..base_text.len() as u32))
.collect::<Vec<_>>();
b.iter(|| {
use yrs::{Array, Doc, Transact};
let doc = Doc::new();
let array = doc.get_or_insert_array("test");
let mut trx = doc.transact_mut();
for c in base_text.chars() {
array.push_back(&mut trx, c.to_string());
}
for idx in &idxs {
array.insert(&mut trx, *idx, "test");
}
drop(trx);
});
});
group.bench_function("yrs/insert range", |b| {
let base_text = "test1 test2 test3 test4 test5 test6 test7 test8 test9";
let mut rng = rand_chacha::ChaCha20Rng::seed_from_u64(1234);
let idxs = (0..99)
.map(|_| rng.random_range(0..base_text.len() as u32))
.collect::<Vec<_>>();
b.iter(|| {
use yrs::{Array, Doc, Transact};
let doc = Doc::new();
let array = doc.get_or_insert_array("test");
let mut trx = doc.transact_mut();
for c in base_text.chars() {
array.push_back(&mut trx, c.to_string());
}
for idx in &idxs {
array.insert_range(&mut trx, *idx, vec!["test1", "test2"]);
}
drop(trx);
});
});
group.bench_function("yrs/remove", |b| {
let base_text = "test1 test2 test3 test4 test5 test6 test7 test8 test9";
b.iter(|| {
use yrs::{Array, Doc, Transact};
let doc = Doc::new();
let array = doc.get_or_insert_array("test");
let mut trx = doc.transact_mut();
for c in base_text.chars() {
array.push_back(&mut trx, c.to_string());
}
for idx in (base_text.len() as u32)..0 {
array.remove(&mut trx, idx);
}
drop(trx);
});
});
group.finish();
}
criterion_group!(benches, operations);
criterion_main!(benches);

View File

@@ -0,0 +1,89 @@
use criterion::{criterion_group, criterion_main, Criterion, SamplingMode};
use lib0::{
decoding::{Cursor, Read},
encoding::Write,
};
const BENCHMARK_SIZE: u32 = 100000;
fn codec(c: &mut Criterion) {
let mut codec_group = c.benchmark_group("codec");
codec_group.sampling_mode(SamplingMode::Flat);
{
codec_group.bench_function("lib0 encode var_int (64 bit)", |b| {
b.iter(|| {
let mut encoder = Vec::with_capacity(BENCHMARK_SIZE as usize * 8);
for i in 0..(BENCHMARK_SIZE as i64) {
encoder.write_var(i);
}
})
});
codec_group.bench_function("lib0 decode var_int (64 bit)", |b| {
let mut encoder = Vec::with_capacity(BENCHMARK_SIZE as usize * 8);
for i in 0..(BENCHMARK_SIZE as i64) {
encoder.write_var(i);
}
b.iter(|| {
let mut decoder = Cursor::from(&encoder);
for i in 0..(BENCHMARK_SIZE as i64) {
let num: i64 = decoder.read_var().unwrap();
assert_eq!(num, i);
}
})
});
}
{
codec_group.bench_function("lib0 encode var_uint (32 bit)", |b| {
b.iter(|| {
let mut encoder = Vec::with_capacity(BENCHMARK_SIZE as usize * 8);
for i in 0..BENCHMARK_SIZE {
encoder.write_var(i);
}
})
});
codec_group.bench_function("lib0 decode var_uint (32 bit)", |b| {
let mut encoder = Vec::with_capacity(BENCHMARK_SIZE as usize * 8);
for i in 0..BENCHMARK_SIZE {
encoder.write_var(i);
}
b.iter(|| {
let mut decoder = Cursor::from(&encoder);
for i in 0..BENCHMARK_SIZE {
let num: u32 = decoder.read_var().unwrap();
assert_eq!(num, i);
}
})
});
}
{
codec_group.bench_function("lib0 encode var_uint (64 bit)", |b| {
b.iter(|| {
let mut encoder = Vec::with_capacity(BENCHMARK_SIZE as usize * 8);
for i in 0..(BENCHMARK_SIZE as u64) {
encoder.write_var(i);
}
})
});
codec_group.bench_function("lib0 decode var_uint (64 bit)", |b| {
let mut encoder = Vec::with_capacity(BENCHMARK_SIZE as usize * 8);
for i in 0..(BENCHMARK_SIZE as u64) {
encoder.write_var(i);
}
b.iter(|| {
let mut decoder = Cursor::from(&encoder);
for i in 0..(BENCHMARK_SIZE as u64) {
let num: u64 = decoder.read_var().unwrap();
assert_eq!(num, i);
}
})
});
}
}
criterion_group!(benches, codec);
criterion_main!(benches);

View File

@@ -0,0 +1,79 @@
use std::time::Duration;
use criterion::{criterion_group, criterion_main, Criterion};
fn operations(c: &mut Criterion) {
let mut group = c.benchmark_group("ops/map");
group.measurement_time(Duration::from_secs(15));
group.bench_function("yrs/insert", |b| {
let base_text = "test1 test2 test3 test4 test5 test6 test7 test8 test9"
.split(' ')
.collect::<Vec<_>>();
b.iter(|| {
use yrs::{Doc, Map, Transact};
let doc = Doc::new();
let map = doc.get_or_insert_map("test");
let mut trx = doc.transact_mut();
for (idx, key) in base_text.iter().enumerate() {
map.insert(&mut trx, key.to_string(), idx as f64);
}
drop(trx);
});
});
group.bench_function("yrs/get", |b| {
use yrs::{Doc, Map, Transact};
let base_text = "test1 test2 test3 test4 test5 test6 test7 test8 test9"
.split(' ')
.collect::<Vec<_>>();
let doc = Doc::new();
let map = doc.get_or_insert_map("test");
let mut trx = doc.transact_mut();
for (idx, key) in base_text.iter().enumerate() {
map.insert(&mut trx, key.to_string(), idx as f64);
}
drop(trx);
b.iter(|| {
let trx = doc.transact();
for key in &base_text {
map.get(&trx, key).unwrap();
}
});
});
group.bench_function("yrs/remove", |b| {
let base_text = "test1 test2 test3 test4 test5 test6 test7 test8 test9"
.split(' ')
.collect::<Vec<_>>();
b.iter(|| {
use yrs::{Doc, Map, Transact};
let doc = Doc::new();
let map = doc.get_or_insert_map("test");
let mut trx = doc.transact_mut();
for (idx, key) in base_text.iter().enumerate() {
map.insert(&mut trx, key.to_string(), idx as f64);
}
for key in &base_text {
map.remove(&mut trx, key).unwrap();
}
drop(trx);
});
});
group.finish();
}
criterion_group!(benches, operations);
criterion_main!(benches);

View File

@@ -0,0 +1,54 @@
use std::time::Duration;
use criterion::{criterion_group, criterion_main, Criterion};
use rand::{Rng, SeedableRng};
fn operations(c: &mut Criterion) {
let mut group = c.benchmark_group("ops/text");
group.measurement_time(Duration::from_secs(15));
group.bench_function("yrs/insert", |b| {
let base_text = "test1 test2 test3 test4 test5 test6 test7 test8 test9";
let mut rng = rand_chacha::ChaCha20Rng::seed_from_u64(1234);
let idxs = (0..99)
.map(|_| rng.random_range(0..base_text.len() as u32))
.collect::<Vec<_>>();
b.iter(|| {
use yrs::{Doc, Text, Transact};
let doc = Doc::new();
let text = doc.get_or_insert_text("test");
let mut trx = doc.transact_mut();
text.push(&mut trx, base_text);
for idx in &idxs {
text.insert(&mut trx, *idx, "test");
}
drop(trx);
});
});
group.bench_function("yrs/remove", |b| {
let base_text = "test1 test2 test3 test4 test5 test6 test7 test8 test9";
b.iter(|| {
use yrs::{Doc, Text, Transact};
let doc = Doc::new();
let text = doc.get_or_insert_text("test");
let mut trx = doc.transact_mut();
text.push(&mut trx, base_text);
text.push(&mut trx, base_text);
text.push(&mut trx, base_text);
for idx in (base_text.len() as u32)..0 {
text.remove_range(&mut trx, idx, 1);
}
drop(trx);
});
});
group.finish();
}
criterion_group!(benches, operations);
criterion_main!(benches);

View File

@@ -0,0 +1,33 @@
mod utils;
use std::time::Duration;
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput};
use path_ext::PathExt;
use utils::Files;
fn update(c: &mut Criterion) {
let files = Files::load();
let mut group = c.benchmark_group("update");
group.measurement_time(Duration::from_secs(15));
for file in &files.files {
group.throughput(Throughput::Bytes(file.content.len() as u64));
group.bench_with_input(
BenchmarkId::new("parse with yrs", file.path.name_str()),
&file.content,
|b, content| {
b.iter(|| {
use yrs::{updates::decoder::Decode, Update};
Update::decode_v1(content).unwrap()
});
},
);
}
group.finish();
}
criterion_group!(benches, update);
criterion_main!(benches);

View File

@@ -0,0 +1,42 @@
use std::{
fs::{read, read_dir},
path::{Path, PathBuf},
};
use path_ext::PathExt;
pub struct File {
pub path: PathBuf,
pub content: Vec<u8>,
}
const BASE: &str = "../y-octo/src/fixtures/";
impl File {
fn new(path: &Path) -> Self {
let content = read(path).unwrap();
Self {
path: path.into(),
content,
}
}
}
pub struct Files {
pub files: Vec<File>,
}
impl Files {
pub fn load() -> Self {
let path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join(BASE);
let files = read_dir(path).unwrap();
let files = files
.flatten()
.filter(|f| f.path().is_file() && f.path().ext_str() == "bin")
.map(|f| File::new(&f.path()))
.collect::<Vec<_>>();
Self { files }
}
}

View File

@@ -0,0 +1,3 @@
mod files;
pub use files::Files;

View File

@@ -0,0 +1,134 @@
use std::{
collections::HashMap,
io::{self, BufRead},
};
fn process_duration(duration: &str) -> Option<(f64, f64)> {
let dur_split: Vec<String> = duration.split('±').map(String::from).collect();
if dur_split.len() != 2 {
return None;
}
let units = dur_split[1]
.chars()
.skip_while(|c| c.is_ascii_digit())
.collect::<String>();
let dur_secs = dur_split[0].parse::<f64>().ok()?;
let error_secs = dur_split[1]
.chars()
.take_while(|c| c.is_ascii_digit())
.collect::<String>()
.parse::<f64>()
.ok()?;
Some((
convert_dur_to_seconds(dur_secs, &units),
convert_dur_to_seconds(error_secs, &units),
))
}
fn convert_dur_to_seconds(dur: f64, units: &str) -> f64 {
let factors: HashMap<_, _> = [
("s", 1.0),
("ms", 1.0 / 1000.0),
("µs", 1.0 / 1_000_000.0),
("ns", 1.0 / 1_000_000_000.0),
]
.iter()
.cloned()
.collect();
dur * factors.get(units).unwrap_or(&1.0)
}
fn is_significant(changes_dur: f64, changes_err: f64, base_dur: f64, base_err: f64) -> bool {
if changes_dur < base_dur {
changes_dur + changes_err < base_dur || base_dur - base_err > changes_dur
} else {
changes_dur - changes_err > base_dur || base_dur + base_err < changes_dur
}
}
fn convert_to_markdown() -> impl Iterator<Item = String> {
#[cfg(feature = "bench")]
let re = regex::Regex::new(r"\s{2,}").unwrap();
io::stdin()
.lock()
.lines()
.skip(2)
.flat_map(move |row| {
if let Ok(_row) = row {
let columns = {
#[cfg(feature = "bench")]
{
re.split(&_row).collect::<Vec<_>>()
}
#[cfg(not(feature = "bench"))]
Vec::<&str>::new()
};
let name = columns.first()?;
let base_duration = columns.get(2)?;
let changes_duration = columns.get(5)?;
Some((
name.to_string(),
base_duration.to_string(),
changes_duration.to_string(),
))
} else {
None
}
})
.flat_map(|(name, base_duration, changes_duration)| {
let mut difference = "N/A".to_string();
let base_undefined = base_duration == "?";
let changes_undefined = changes_duration == "?";
if !base_undefined && !changes_undefined {
let (base_dur_secs, base_err_secs) = process_duration(&base_duration)?;
let (changes_dur_secs, changes_err_secs) = process_duration(&changes_duration)?;
let diff = -(1.0 - changes_dur_secs / base_dur_secs) * 100.0;
difference = format!("{:+.2}%", diff);
if is_significant(
changes_dur_secs,
changes_err_secs,
base_dur_secs,
base_err_secs,
) {
difference = format!("**{}**", difference);
}
}
Some(format!(
"| {} | {} | {} | {} |",
name.replace('|', "\\|"),
if base_undefined {
"N/A"
} else {
&base_duration
},
if changes_undefined {
"N/A"
} else {
&changes_duration
},
difference
))
})
}
fn main() {
let platform = std::env::args().nth(1).expect("Missing platform argument");
let headers = vec![
format!("## Benchmark for {}", platform),
"<details>".to_string(),
" <summary>Click to view benchmark</summary>".to_string(),
"".to_string(),
"| Test | Base | PR | % |".to_string(),
"| --- | --- | --- | --- |".to_string(),
];
for line in headers.into_iter().chain(convert_to_markdown()) {
println!("{}", line);
}
println!("</details>");
}

View File

@@ -0,0 +1,100 @@
use std::{
fs::read,
io::{Error, ErrorKind},
path::PathBuf,
time::Instant,
};
use clap::Parser;
use y_octo::Doc;
/// ybinary merger
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
struct Args {
/// Path of the ybinary to read
#[arg(short, long)]
path: String,
}
fn load_path(path: &str) -> Result<Vec<Vec<u8>>, Error> {
let path = PathBuf::from(path);
if path.is_dir() {
let mut updates = Vec::new();
let mut paths = path
.read_dir()?
.filter_map(|entry| {
let entry = entry.ok()?;
if entry.path().is_file() {
Some(entry.path())
} else {
None
}
})
.collect::<Vec<_>>();
paths.sort();
for path in paths {
println!("read {:?}", path);
updates.push(read(path)?);
}
Ok(updates)
} else if path.is_file() {
Ok(vec![read(path)?])
} else {
Err(Error::new(ErrorKind::NotFound, "not a file or directory"))
}
}
fn main() {
let args = Args::parse();
jwst_merge(&args.path);
}
fn jwst_merge(path: &str) {
let updates = load_path(path).unwrap();
let mut doc = Doc::default();
for (i, update) in updates.iter().enumerate() {
println!("apply update{i} {} bytes", update.len());
doc.apply_update_from_binary_v1(update.clone()).unwrap();
}
println!("press enter to continue");
std::io::stdin().read_line(&mut String::new()).unwrap();
let ts = Instant::now();
let history = doc.history().parse_store(Default::default());
println!("history: {:?}", ts.elapsed());
for history in history.iter().take(100) {
println!("history: {:?}", history);
}
doc.gc().unwrap();
let binary = {
let binary = doc.encode_update_v1().unwrap();
println!("merged {} bytes", binary.len());
binary
};
{
let mut doc = Doc::default();
doc.apply_update_from_binary_v1(binary.clone()).unwrap();
let new_binary = doc.encode_update_v1().unwrap();
println!("re-encoded {} bytes", new_binary.len(),);
};
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
#[ignore = "only for debug"]
fn test_gc() {
jwst_merge("/Users/ds/Downloads/out");
}
}

View File

@@ -0,0 +1,79 @@
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha20Rng;
use y_octo::*;
fn run_text_test(seed: u64) {
let doc = Doc::with_client(1);
let mut rand = ChaCha20Rng::seed_from_u64(seed);
let mut text = doc.get_or_create_text("test").unwrap();
text.insert(0, "This is a string with length 32.").unwrap();
let iteration = 20;
let mut len = 32;
for i in 0..iteration {
let mut text = text.clone();
let ins = i % 2 == 0;
let pos = rand.random_range(0..if ins { text.len() } else { len / 2 });
if ins {
let str = format!("hello {i}");
text.insert(pos, &str).unwrap();
len += str.len() as u64;
} else {
text.remove(pos, 6).unwrap();
len -= 6;
}
}
assert_eq!(text.to_string().len(), len as usize);
assert_eq!(text.len(), len);
}
fn run_array_test(seed: u64) {
let doc = Doc::with_client(1);
let mut rand = ChaCha20Rng::seed_from_u64(seed);
let mut array = doc.get_or_create_array("test").unwrap();
array.push(1).unwrap();
let iteration = 20;
let mut len = 1;
for i in 0..iteration {
let mut array = array.clone();
let ins = i % 2 == 0;
let pos = rand.random_range(0..if ins { array.len() } else { len / 2 });
if ins {
array.insert(pos, 1).unwrap();
len += 1;
} else {
array.remove(pos, 1).unwrap();
len -= 1;
}
}
assert_eq!(array.len(), len);
}
fn run_map_test() {
let base_text = "test1 test2 test3 test4 test5 test6 test7 test8 test9"
.split(' ')
.collect::<Vec<_>>();
for _ in 0..10000 {
let doc = Doc::default();
let mut map = doc.get_or_create_map("test").unwrap();
for (idx, key) in base_text.iter().enumerate() {
map.insert(key.to_string(), idx).unwrap();
}
}
}
fn main() {
let mut rand = ChaCha20Rng::seed_from_u64(rand::rng().random());
for _ in 0..10000 {
let seed = rand.random();
run_array_test(seed);
run_text_test(seed);
run_map_test();
}
}

View File

@@ -0,0 +1,4 @@
target
corpus
artifacts
coverage

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,88 @@
[package]
edition = "2021"
name = "y-octo-fuzz"
publish = false
version = "0.0.0"
[package.metadata]
cargo-fuzz = true
[dependencies]
lib0 = "=0.16.10"
libfuzzer-sys = "0.4"
rand = "0.9"
rand_chacha = "0.9"
yrs = "=0.23.0"
y-octo-utils = { path = "..", features = ["fuzz"] }
[dependencies.y-octo]
path = "../../core"
# Prevent this from interfering with workspaces
[workspace]
members = ["."]
[profile.release]
debug = 1
[[bin]]
doc = false
name = "codec_doc_any_struct"
path = "fuzz_targets/codec_doc_any_struct.rs"
test = false
[[bin]]
doc = false
name = "codec_doc_any"
path = "fuzz_targets/codec_doc_any.rs"
test = false
[[bin]]
doc = false
name = "decode_bytes"
path = "fuzz_targets/decode_bytes.rs"
test = false
[[bin]]
doc = false
name = "ins_del_text"
path = "fuzz_targets/ins_del_text.rs"
test = false
[[bin]]
doc = false
name = "sync_message"
path = "fuzz_targets/sync_message.rs"
test = false
[[bin]]
doc = false
name = "i32_decode"
path = "fuzz_targets/i32_decode.rs"
test = false
[[bin]]
doc = false
name = "i32_encode"
path = "fuzz_targets/i32_encode.rs"
test = false
[[bin]]
doc = false
name = "u64_decode"
path = "fuzz_targets/u64_decode.rs"
test = false
[[bin]]
doc = false
name = "u64_encode"
path = "fuzz_targets/u64_encode.rs"
test = false
[[bin]]
doc = false
name = "apply_update"
path = "fuzz_targets/apply_update.rs"
test = false

View File

@@ -0,0 +1,51 @@
#![no_main]
use std::collections::HashSet;
use libfuzzer_sys::fuzz_target;
use y_octo_utils::{
gen_nest_type_from_nest_type, gen_nest_type_from_root, CRDTParam, ManipulateSource, OpType,
OpsRegistry, YrsNestType,
};
use yrs::Transact;
fuzz_target!(|crdt_params: Vec<CRDTParam>| {
let mut doc = yrs::Doc::new();
let mut cur_crdt_nest_type: Option<YrsNestType> = None;
let ops_registry = OpsRegistry::new();
let mut key_set = HashSet::<String>::new();
for crdt_param in crdt_params {
if key_set.contains(&crdt_param.key) {
continue;
}
key_set.insert(crdt_param.key.clone());
match crdt_param.op_type {
OpType::HandleCurrent => {
if cur_crdt_nest_type.is_some() {
ops_registry.operate_yrs_nest_type(&doc, cur_crdt_nest_type.clone().unwrap(), crdt_param);
}
}
OpType::CreateCRDTNestType => {
cur_crdt_nest_type = match cur_crdt_nest_type {
None => gen_nest_type_from_root(&mut doc, &crdt_param),
Some(mut nest_type) => match crdt_param.manipulate_source {
ManipulateSource::CurrentNestType => Some(nest_type),
ManipulateSource::NewNestTypeFromYDocRoot => {
gen_nest_type_from_root(&mut doc, &crdt_param)
}
ManipulateSource::NewNestTypeFromCurrent => {
gen_nest_type_from_nest_type(&mut doc, crdt_param.clone(), &mut nest_type)
}
},
};
}
};
}
let trx = doc.transact_mut();
let binary_from_yrs = trx.encode_update_v1();
let doc = y_octo::Doc::try_from_binary_v1(&binary_from_yrs).unwrap();
let binary = doc.encode_update_v1().unwrap();
assert_eq!(binary, binary_from_yrs);
});

View File

@@ -0,0 +1,17 @@
#![no_main]
use libfuzzer_sys::fuzz_target;
use y_octo::{Any, CrdtRead, CrdtWrite, RawDecoder, RawEncoder};
fuzz_target!(|data: &[u8]| {
if let Ok(any) = Any::read(&mut RawDecoder::new(data)) {
// ensure decoding and re-encoding results has same result
let mut buffer = RawEncoder::default();
if let Err(e) = any.write(&mut buffer) {
panic!("Failed to write message: {:?}, {:?}", any, e);
}
if let Ok(any2) = Any::read(&mut RawDecoder::new(&buffer.into_inner())) {
assert_eq!(any, any2);
}
}
});

View File

@@ -0,0 +1,43 @@
#![no_main]
use libfuzzer_sys::fuzz_target;
use rand::{distr::Alphanumeric, Rng};
use y_octo::{Any, CrdtRead, CrdtWrite, RawDecoder, RawEncoder};
fn get_random_string() -> String {
rand::rng()
.sample_iter(&Alphanumeric)
.take(7)
.map(char::from)
.collect()
}
fuzz_target!(|data: Vec<Any>| {
{
let any = Any::Object(
data
.iter()
.map(|a| (get_random_string(), a.clone()))
.collect(),
);
let mut buffer = RawEncoder::default();
if let Err(e) = any.write(&mut buffer) {
panic!("Failed to write message: {:?}, {:?}", any, e);
}
if let Ok(any2) = Any::read(&mut RawDecoder::new(&buffer.into_inner())) {
assert_eq!(any, any2);
}
}
{
let any = Any::Array(data);
let mut buffer = RawEncoder::default();
if let Err(e) = any.write(&mut buffer) {
panic!("Failed to write message: {:?}, {:?}", any, e);
}
if let Ok(any2) = Any::read(&mut RawDecoder::new(&buffer.into_inner())) {
assert_eq!(any, any2);
}
}
});

View File

@@ -0,0 +1,11 @@
#![no_main]
use libfuzzer_sys::fuzz_target;
use y_octo::{read_var_buffer, read_var_i32, read_var_string, read_var_u64};
fuzz_target!(|data: Vec<u8>| {
let _ = read_var_i32(&data);
let _ = read_var_u64(&data);
let _ = read_var_buffer(&data);
let _ = read_var_string(&data);
});

View File

@@ -0,0 +1,18 @@
#![no_main]
use lib0::encoding::Write;
use libfuzzer_sys::fuzz_target;
use y_octo::{read_var_i32, write_var_i32};
fuzz_target!(|data: Vec<i32>| {
for i in data {
let mut buf1 = Vec::new();
write_var_i32(&mut buf1, i).unwrap();
let mut buf2 = Vec::new();
buf2.write_var(i);
assert_eq!(read_var_i32(&buf1).unwrap().1, i);
assert_eq!(read_var_i32(&buf2).unwrap().1, i);
}
});

View File

@@ -0,0 +1,17 @@
#![no_main]
use libfuzzer_sys::fuzz_target;
use y_octo::write_var_i32;
fuzz_target!(|data: Vec<i32>| {
use lib0::encoding::Write;
for i in data {
let mut buf1 = Vec::new();
write_var_i32(&mut buf1, i).unwrap();
let mut buf2 = Vec::new();
buf2.write_var(i);
assert_eq!(buf1, buf2);
}
});

View File

@@ -0,0 +1,34 @@
#![no_main]
use libfuzzer_sys::fuzz_target;
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha20Rng;
use y_octo::*;
fuzz_target!(|seed: u64| {
// println!("seed: {}", seed);
let doc = Doc::with_client(1);
let mut rand = ChaCha20Rng::seed_from_u64(seed);
let mut text = doc.get_or_create_text("test").unwrap();
text.insert(0, "This is a string with length 32.").unwrap();
let iteration = 20;
let mut len = 32;
for i in 0..iteration {
let mut text = text.clone();
let ins = i % 2 == 0;
let pos = rand.random_range(0..if ins { text.len() } else { len / 2 });
if ins {
let str = format!("hello {i}");
text.insert(pos, &str).unwrap();
len += str.len() as u64;
} else {
text.remove(pos, 6).unwrap();
len -= 6;
}
}
assert_eq!(text.to_string().len(), len as usize);
assert_eq!(text.len(), len);
});

View File

@@ -0,0 +1,20 @@
#![no_main]
use libfuzzer_sys::fuzz_target;
use y_octo::{read_sync_message, write_sync_message};
fuzz_target!(|data: &[u8]| {
let result = read_sync_message(data);
if let Ok((_, msg)) = result {
// ensure decoding and re-encoding results has same result
let mut buffer = Vec::new();
if let Err(e) = write_sync_message(&mut buffer, &msg) {
panic!("Failed to write message: {:?}, {:?}", msg, e);
}
let result = read_sync_message(&buffer);
if let Ok((_, msg2)) = result {
assert_eq!(msg, msg2);
}
}
});

View File

@@ -0,0 +1,18 @@
#![no_main]
use lib0::encoding::Write;
use libfuzzer_sys::fuzz_target;
use y_octo::{read_var_u64, write_var_u64};
fuzz_target!(|data: Vec<u64>| {
for i in data {
let mut buf1 = Vec::new();
write_var_u64(&mut buf1, i).unwrap();
let mut buf2 = Vec::new();
buf2.write_var(i);
assert_eq!(read_var_u64(&buf1).unwrap().1, i);
assert_eq!(read_var_u64(&buf2).unwrap().1, i);
}
});

View File

@@ -0,0 +1,17 @@
#![no_main]
use lib0::encoding::Write;
use libfuzzer_sys::fuzz_target;
use y_octo::write_var_u64;
fuzz_target!(|data: Vec<u64>| {
for i in data {
let mut buf1 = Vec::new();
buf1.write_var(i);
let mut buf2 = Vec::new();
write_var_u64(&mut buf2, i).unwrap();
assert_eq!(buf1, buf2);
}
});

View File

@@ -0,0 +1,78 @@
use super::*;
#[cfg(test)]
mod tests {
use super::*;
use lib0::encoding::Write;
fn test_var_uint_enc_dec(num: u64) {
let mut buf1 = Vec::new();
write_var_u64(&mut buf1, num).unwrap();
let mut buf2 = Vec::new();
buf2.write_var(num);
{
let (rest, decoded_num) = read_var_u64(&buf1).unwrap();
assert_eq!(num, decoded_num);
assert_eq!(rest.len(), 0);
}
{
let (rest, decoded_num) = read_var_u64(&buf2).unwrap();
assert_eq!(num, decoded_num);
assert_eq!(rest.len(), 0);
}
}
fn test_var_int_enc_dec(num: i32) {
{
let mut buf1: Vec<u8> = Vec::new();
write_var_i32(&mut buf1, num).unwrap();
let (rest, decoded_num) = read_var_i32(&buf1).unwrap();
assert_eq!(num, decoded_num);
assert_eq!(rest.len(), 0);
}
{
let mut buf2 = Vec::new();
buf2.write_var(num);
let (rest, decoded_num) = read_var_i32(&buf2).unwrap();
assert_eq!(num, decoded_num);
assert_eq!(rest.len(), 0);
}
}
#[test]
fn test_var_uint_codec() {
test_var_uint_enc_dec(0);
test_var_uint_enc_dec(1);
test_var_uint_enc_dec(127);
test_var_uint_enc_dec(0b1000_0000);
test_var_uint_enc_dec(0b1_0000_0000);
test_var_uint_enc_dec(0b1_1111_1111);
test_var_uint_enc_dec(0b10_0000_0000);
test_var_uint_enc_dec(0b11_1111_1111);
test_var_uint_enc_dec(0x7fff_ffff_ffff_ffff);
test_var_uint_enc_dec(u64::max_value());
}
#[test]
fn test_var_int() {
test_var_int_enc_dec(0);
test_var_int_enc_dec(1);
test_var_int_enc_dec(-1);
test_var_int_enc_dec(63);
test_var_int_enc_dec(-63);
test_var_int_enc_dec(64);
test_var_int_enc_dec(-64);
test_var_int_enc_dec(i32::MAX);
test_var_int_enc_dec(i32::MIN);
test_var_int_enc_dec(((1 << 20) - 1) * 8);
test_var_int_enc_dec(-((1 << 20) - 1) * 8);
}
}

View File

@@ -0,0 +1,21 @@
#[cfg(test)]
mod tests {
use y_octo::Doc;
use yrs::{Map, Transact};
#[test]
fn test_basic_yrs_binary_compatibility() {
let yrs_doc = yrs::Doc::new();
let map = yrs_doc.get_or_insert_map("abc");
let mut trx = yrs_doc.transact_mut();
map.insert(&mut trx, "a", 1);
let binary_from_yrs = trx.encode_update_v1();
let doc = Doc::try_from_binary_v1(&binary_from_yrs).unwrap();
let binary = doc.encode_update_v1().unwrap();
assert_eq!(binary_from_yrs, binary);
}
}

View File

@@ -0,0 +1,5 @@
pub mod types;
pub mod yrs_op;
pub use types::*;
pub use yrs_op::*;

View File

@@ -0,0 +1,63 @@
use yrs::{ArrayRef, MapRef, TextRef, XmlFragmentRef, XmlTextRef};
pub const NEST_DATA_INSERT: &str = "insert";
pub const NEST_DATA_DELETE: &str = "delete";
pub const NEST_DATA_CLEAR: &str = "clear";
#[derive(Hash, PartialEq, Eq, Clone, Debug, arbitrary::Arbitrary)]
pub enum OpType {
HandleCurrent,
CreateCRDTNestType,
}
#[derive(Hash, PartialEq, Eq, Clone, Debug, arbitrary::Arbitrary)]
pub enum NestDataOpType {
Insert,
Delete,
Clear,
}
#[derive(PartialEq, Clone, Debug, arbitrary::Arbitrary)]
pub struct CRDTParam {
pub op_type: OpType,
pub new_nest_type: CRDTNestType,
pub manipulate_source: ManipulateSource,
pub insert_pos: InsertPos,
pub key: String,
pub value: String,
pub nest_data_op_type: NestDataOpType,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, arbitrary::Arbitrary)]
pub enum CRDTNestType {
Array,
Map,
Text,
XMLElement,
XMLFragment,
XMLText,
}
#[derive(Debug, Clone, PartialEq, arbitrary::Arbitrary)]
pub enum ManipulateSource {
NewNestTypeFromYDocRoot,
CurrentNestType,
NewNestTypeFromCurrent,
}
#[derive(Debug, Clone, PartialEq, arbitrary::Arbitrary)]
pub enum InsertPos {
BEGIN,
MID,
END,
}
#[derive(Clone)]
pub enum YrsNestType {
ArrayType(ArrayRef),
MapType(MapRef),
TextType(TextRef),
XMLElementType(XmlFragmentRef),
XMLFragmentType(XmlFragmentRef),
XMLTextType(XmlTextRef),
}

View File

@@ -0,0 +1,172 @@
use phf::phf_map;
use super::*;
fn insert_op(doc: &yrs::Doc, nest_input: &YrsNestType, params: CRDTParam) {
let array = match nest_input {
YrsNestType::ArrayType(array) => array,
_ => unreachable!(),
};
let mut trx = doc.transact_mut();
let len = array.len(&trx);
let index = random_pick_num(len, &params.insert_pos);
array.insert(&mut trx, index, params.value);
}
fn delete_op(doc: &yrs::Doc, nest_input: &YrsNestType, params: CRDTParam) {
let array = match nest_input {
YrsNestType::ArrayType(array) => array,
_ => unreachable!(),
};
let mut trx = doc.transact_mut();
let len = array.len(&trx);
if len >= 1 {
let index = random_pick_num(len - 1, &params.insert_pos);
array.remove(&mut trx, index);
}
}
fn clear_op(doc: &yrs::Doc, nest_input: &YrsNestType, _params: CRDTParam) {
let array = match nest_input {
YrsNestType::ArrayType(array) => array,
_ => unreachable!(),
};
let mut trx = doc.transact_mut();
let len = array.len(&trx);
for _ in 0..len {
array.remove(&mut trx, 0);
}
}
pub static ARRAY_OPS: TestOps = phf_map! {
"insert" => insert_op,
"delete" => delete_op,
"clear" => clear_op,
};
pub fn yrs_create_array_from_nest_type(
doc: &yrs::Doc,
current: &mut YrsNestType,
insert_pos: &InsertPos,
key: String,
) -> Option<ArrayRef> {
let cal_index = |len: u32| -> u32 {
match insert_pos {
InsertPos::BEGIN => 0,
InsertPos::MID => len / 2,
InsertPos::END => len,
}
};
let mut trx = doc.transact_mut();
let array_prelim = ArrayPrelim::default();
match current {
YrsNestType::ArrayType(array) => {
let index = cal_index(array.len(&trx));
Some(array.insert(&mut trx, index, array_prelim))
}
YrsNestType::MapType(map) => Some(map.insert(&mut trx, key, array_prelim)),
YrsNestType::TextType(text) => {
let str = text.get_string(&trx);
let len = str.chars().fold(0, |acc, _| acc + 1);
let index = random_pick_num(len, insert_pos) as usize;
let byte_start_offset = str
.chars()
.take(index)
.fold(0, |acc, ch| acc + ch.len_utf8());
Some(text.insert_embed(&mut trx, byte_start_offset as u32, array_prelim))
}
YrsNestType::XMLTextType(xml_text) => {
let str = xml_text.get_string(&trx);
let len = str.chars().fold(0, |acc, _| acc + 1);
let index = random_pick_num(len, insert_pos) as usize;
let byte_start_offset = str
.chars()
.take(index)
.fold(0, |acc, ch| acc + ch.len_utf8());
Some(xml_text.insert_embed(&mut trx, byte_start_offset as u32, array_prelim))
}
_ => None,
}
}
#[cfg(test)]
mod tests {
use yrs::Doc;
use super::*;
#[test]
fn test_gen_array_ref_ops() {
let doc = Doc::new();
let array_ref = doc.get_or_insert_array("test_array");
let ops_registry = OpsRegistry::new();
let mut params = CRDTParam {
op_type: OpType::CreateCRDTNestType,
new_nest_type: CRDTNestType::Array,
manipulate_source: ManipulateSource::NewNestTypeFromYDocRoot,
insert_pos: InsertPos::BEGIN,
key: String::from("test_key"),
value: String::from("test_value"),
nest_data_op_type: NestDataOpType::Insert,
};
ops_registry.operate_yrs_nest_type(
&doc,
YrsNestType::ArrayType(array_ref.clone()),
params.clone(),
);
assert_eq!(array_ref.len(&doc.transact()), 1);
params.nest_data_op_type = NestDataOpType::Delete;
ops_registry.operate_yrs_nest_type(
&doc,
YrsNestType::ArrayType(array_ref.clone()),
params.clone(),
);
assert_eq!(array_ref.len(&doc.transact()), 0);
params.nest_data_op_type = NestDataOpType::Clear;
ops_registry.operate_yrs_nest_type(
&doc,
YrsNestType::ArrayType(array_ref.clone()),
params.clone(),
);
assert_eq!(array_ref.len(&doc.transact()), 0);
}
#[test]
fn test_yrs_create_array_from_nest_type() {
let doc = Doc::new();
let array_ref = doc.get_or_insert_array("test_array");
let key = String::from("test_key");
let new_array_ref = yrs_create_array_from_nest_type(
&doc,
&mut YrsNestType::ArrayType(array_ref.clone()),
&InsertPos::BEGIN,
key.clone(),
);
assert!(new_array_ref.is_some());
let map_ref = doc.get_or_insert_map("test_map");
let new_array_ref = yrs_create_array_from_nest_type(
&doc,
&mut YrsNestType::MapType(map_ref.clone()),
&InsertPos::BEGIN,
key.clone(),
);
assert!(new_array_ref.is_some());
let text_ref = doc.get_or_insert_text("test_text");
let new_array_ref = yrs_create_array_from_nest_type(
&doc,
&mut YrsNestType::TextType(text_ref.clone()),
&InsertPos::BEGIN,
key.clone(),
);
assert!(new_array_ref.is_some());
}
}

View File

@@ -0,0 +1,168 @@
use phf::phf_map;
use super::*;
fn insert_op(doc: &yrs::Doc, nest_input: &YrsNestType, params: CRDTParam) {
let map = match nest_input {
YrsNestType::MapType(map) => map,
_ => unreachable!(),
};
let mut trx = doc.transact_mut();
map.insert(&mut trx, params.key, params.value);
}
fn remove_op(doc: &yrs::Doc, nest_input: &YrsNestType, params: CRDTParam) {
let map = match nest_input {
YrsNestType::MapType(map) => map,
_ => unreachable!(),
};
let rand_key = {
let trx = doc.transact_mut();
let mut iter = map.iter(&trx);
let len = map.len(&trx) as usize;
let skip_step = if len <= 1 {
0
} else {
random_pick_num((len - 1) as u32, &params.insert_pos)
};
iter
.nth(skip_step as usize)
.map(|(key, _value)| key.to_string())
};
if let Some(key) = rand_key {
let mut trx = doc.transact_mut();
map.remove(&mut trx, &key).unwrap();
}
}
fn clear_op(doc: &yrs::Doc, nest_input: &YrsNestType, _params: CRDTParam) {
let map = match nest_input {
YrsNestType::MapType(map) => map,
_ => unreachable!(),
};
let mut trx = doc.transact_mut();
map.clear(&mut trx);
}
pub static MAP_OPS: TestOps = phf_map! {
"insert" => insert_op,
"delete" => remove_op,
"clear" => clear_op,
};
pub fn yrs_create_map_from_nest_type(
doc: &yrs::Doc,
current: &mut YrsNestType,
insert_pos: &InsertPos,
key: String,
) -> Option<MapRef> {
let cal_index = |len: u32| -> u32 {
match insert_pos {
InsertPos::BEGIN => 0,
InsertPos::MID => len / 2,
InsertPos::END => len,
}
};
let mut trx = doc.transact_mut();
let map_prelim = MapPrelim::from([("deepkey".to_owned(), "deepvalue")]);
match current {
YrsNestType::ArrayType(array) => {
let index = cal_index(array.len(&trx));
Some(array.insert(&mut trx, index, map_prelim))
}
YrsNestType::MapType(map) => Some(map.insert(&mut trx, key, map_prelim)),
YrsNestType::TextType(text) => {
let str = text.get_string(&trx);
let len = str.chars().fold(0, |acc, _| acc + 1);
let index = random_pick_num(len, insert_pos) as usize;
let byte_start_offset = str
.chars()
.take(index)
.fold(0, |acc, ch| acc + ch.len_utf8());
Some(text.insert_embed(&mut trx, byte_start_offset as u32, map_prelim))
}
YrsNestType::XMLTextType(xml_text) => {
let str = xml_text.get_string(&trx);
let len = str.chars().fold(0, |acc, _| acc + 1);
let index = random_pick_num(len, insert_pos) as usize;
let byte_start_offset = str
.chars()
.take(index)
.fold(0, |acc, ch| acc + ch.len_utf8());
Some(xml_text.insert_embed(&mut trx, byte_start_offset as u32, map_prelim))
}
_ => None,
}
}
#[cfg(test)]
mod tests {
use yrs::Doc;
use super::*;
#[test]
fn test_gen_map_ref_ops() {
let doc = Doc::new();
let map_ref = doc.get_or_insert_map("test_map");
let ops_registry = OpsRegistry::new();
let mut params = CRDTParam {
op_type: OpType::CreateCRDTNestType,
new_nest_type: CRDTNestType::Map,
manipulate_source: ManipulateSource::NewNestTypeFromYDocRoot,
insert_pos: InsertPos::BEGIN,
key: String::from("test_key"),
value: String::from("test_value"),
nest_data_op_type: NestDataOpType::Insert,
};
ops_registry.operate_yrs_nest_type(&doc, YrsNestType::MapType(map_ref.clone()), params.clone());
assert_eq!(map_ref.len(&doc.transact()), 1);
params.nest_data_op_type = NestDataOpType::Delete;
ops_registry.operate_yrs_nest_type(&doc, YrsNestType::MapType(map_ref.clone()), params.clone());
assert_eq!(map_ref.len(&doc.transact()), 0);
params.nest_data_op_type = NestDataOpType::Clear;
ops_registry.operate_yrs_nest_type(&doc, YrsNestType::MapType(map_ref.clone()), params.clone());
assert_eq!(map_ref.len(&doc.transact()), 0);
}
#[test]
fn test_yrs_create_map_from_nest_type() {
let doc = Doc::new();
let map_ref = doc.get_or_insert_map("test_map");
let key = String::from("test_key");
let new_map_ref = yrs_create_map_from_nest_type(
&doc,
&mut YrsNestType::MapType(map_ref.clone()),
&InsertPos::BEGIN,
key.clone(),
);
assert!(new_map_ref.is_some());
let map_ref = doc.get_or_insert_map("test_map");
let new_map_ref = yrs_create_map_from_nest_type(
&doc,
&mut YrsNestType::MapType(map_ref.clone()),
&InsertPos::BEGIN,
key.clone(),
);
assert!(new_map_ref.is_some());
let text_ref = doc.get_or_insert_text("test_text");
let new_map_ref = yrs_create_map_from_nest_type(
&doc,
&mut YrsNestType::TextType(text_ref.clone()),
&InsertPos::BEGIN,
key.clone(),
);
assert!(new_map_ref.is_some());
}
}

View File

@@ -0,0 +1,193 @@
pub mod array;
pub mod map;
pub mod text;
pub mod xml_element;
pub mod xml_fragment;
pub mod xml_text;
use std::collections::HashMap;
use array::*;
use map::*;
use text::*;
use xml_element::*;
use xml_fragment::*;
use xml_text::*;
use yrs::{
Array, ArrayPrelim, ArrayRef, Doc, GetString, Map, MapPrelim, MapRef, Text, TextPrelim, TextRef,
Transact, XmlFragment, XmlTextPrelim, XmlTextRef,
};
use super::*;
type TestOp = fn(doc: &Doc, nest_input: &YrsNestType, params: CRDTParam) -> ();
type TestOps = phf::Map<&'static str, TestOp>;
pub struct OpsRegistry<'a>(HashMap<CRDTNestType, &'a TestOps>);
impl Default for OpsRegistry<'_> {
fn default() -> Self {
OpsRegistry::new()
}
}
impl OpsRegistry<'_> {
pub fn new() -> Self {
let mut map = HashMap::new();
map.insert(CRDTNestType::Map, &MAP_OPS);
map.insert(CRDTNestType::Array, &ARRAY_OPS);
map.insert(CRDTNestType::Text, &TEXT_OPS);
map.insert(CRDTNestType::XMLElement, &XML_ELEMENT_OPS);
map.insert(CRDTNestType::XMLText, &XML_TEXT_OPS);
map.insert(CRDTNestType::XMLFragment, &XML_FRAGMENT_OPS);
OpsRegistry(map)
}
pub fn get_ops(&self, crdt_nest_type: &CRDTNestType) -> &TestOps {
match crdt_nest_type {
CRDTNestType::Map => self.0.get(&CRDTNestType::Map).unwrap(),
CRDTNestType::Array => self.0.get(&CRDTNestType::Array).unwrap(),
CRDTNestType::Text => self.0.get(&CRDTNestType::Text).unwrap(),
CRDTNestType::XMLElement => self.0.get(&CRDTNestType::XMLElement).unwrap(),
CRDTNestType::XMLFragment => self.0.get(&CRDTNestType::XMLFragment).unwrap(),
CRDTNestType::XMLText => self.0.get(&CRDTNestType::XMLText).unwrap(),
}
}
pub fn get_ops_from_yrs_nest_type(&self, yrs_nest_type: &YrsNestType) -> &TestOps {
match yrs_nest_type {
YrsNestType::MapType(_) => self.get_ops(&CRDTNestType::Map),
YrsNestType::ArrayType(_) => self.get_ops(&CRDTNestType::Array),
YrsNestType::TextType(_) => self.get_ops(&CRDTNestType::Text),
YrsNestType::XMLElementType(_) => self.get_ops(&CRDTNestType::XMLElement),
YrsNestType::XMLTextType(_) => self.get_ops(&CRDTNestType::XMLText),
YrsNestType::XMLFragmentType(_) => self.get_ops(&CRDTNestType::XMLFragment),
}
}
pub fn operate_yrs_nest_type(
&self,
doc: &yrs::Doc,
cur_crdt_nest_type: YrsNestType,
crdt_param: CRDTParam,
) {
let ops = self.get_ops_from_yrs_nest_type(&cur_crdt_nest_type);
ops
.get(match &crdt_param.nest_data_op_type {
NestDataOpType::Insert => NEST_DATA_INSERT,
NestDataOpType::Delete => NEST_DATA_DELETE,
NestDataOpType::Clear => NEST_DATA_CLEAR,
})
.unwrap()(doc, &cur_crdt_nest_type, crdt_param)
}
}
pub fn yrs_create_nest_type_from_root(
doc: &yrs::Doc,
target_type: CRDTNestType,
key: &str,
) -> YrsNestType {
match target_type {
CRDTNestType::Array => YrsNestType::ArrayType(doc.get_or_insert_array(key)),
CRDTNestType::Map => YrsNestType::MapType(doc.get_or_insert_map(key)),
CRDTNestType::Text => YrsNestType::TextType(doc.get_or_insert_text(key)),
CRDTNestType::XMLElement => YrsNestType::XMLElementType(doc.get_or_insert_xml_fragment(key)),
CRDTNestType::XMLFragment => YrsNestType::XMLFragmentType(doc.get_or_insert_xml_fragment(key)),
CRDTNestType::XMLText => {
YrsNestType::XMLTextType((AsRef::<XmlTextRef>::as_ref(&doc.get_or_insert_text(key))).clone())
}
}
}
pub fn gen_nest_type_from_root(doc: &mut Doc, crdt_param: &CRDTParam) -> Option<YrsNestType> {
match crdt_param.new_nest_type {
CRDTNestType::Array => Some(yrs_create_nest_type_from_root(
doc,
CRDTNestType::Array,
crdt_param.key.as_str(),
)),
CRDTNestType::Map => Some(yrs_create_nest_type_from_root(
doc,
CRDTNestType::Map,
crdt_param.key.as_str(),
)),
CRDTNestType::Text => Some(yrs_create_nest_type_from_root(
doc,
CRDTNestType::Text,
crdt_param.key.as_str(),
)),
CRDTNestType::XMLText => Some(yrs_create_nest_type_from_root(
doc,
CRDTNestType::XMLText,
crdt_param.key.as_str(),
)),
CRDTNestType::XMLElement => Some(yrs_create_nest_type_from_root(
doc,
CRDTNestType::XMLElement,
crdt_param.key.as_str(),
)),
CRDTNestType::XMLFragment => Some(yrs_create_nest_type_from_root(
doc,
CRDTNestType::XMLFragment,
crdt_param.key.as_str(),
)),
}
}
pub fn gen_nest_type_from_nest_type(
doc: &mut Doc,
crdt_param: CRDTParam,
nest_type: &mut YrsNestType,
) -> Option<YrsNestType> {
match crdt_param.new_nest_type {
CRDTNestType::Array => {
yrs_create_array_from_nest_type(doc, nest_type, &crdt_param.insert_pos, crdt_param.key)
.map(YrsNestType::ArrayType)
}
CRDTNestType::Map => {
yrs_create_map_from_nest_type(doc, nest_type, &crdt_param.insert_pos, crdt_param.key)
.map(YrsNestType::MapType)
}
CRDTNestType::Text => {
yrs_create_text_from_nest_type(doc, nest_type, &crdt_param.insert_pos, crdt_param.key)
.map(YrsNestType::TextType)
}
_ => None,
}
}
pub fn random_pick_num(len: u32, insert_pos: &InsertPos) -> u32 {
match insert_pos {
InsertPos::BEGIN => 0,
InsertPos::MID => len / 2,
InsertPos::END => len,
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_ops_registry_new() {
let ops_registry = OpsRegistry::new();
assert_eq!(ops_registry.0.len(), 6);
}
#[test]
fn test_ops_registry_get_ops() {
let ops_registry = OpsRegistry::new();
let ops = ops_registry.get_ops(&CRDTNestType::Array);
assert!(!ops.is_empty());
}
#[test]
fn test_ops_registry_get_ops_from_yrs_nest_type() {
let doc = yrs::Doc::new();
let array = doc.get_or_insert_array("array");
let ops_registry = OpsRegistry::new();
let ops = ops_registry.get_ops_from_yrs_nest_type(&YrsNestType::ArrayType(array));
assert!(!ops.is_empty());
}
}

View File

@@ -0,0 +1,180 @@
use phf::phf_map;
use super::*;
fn insert_op(doc: &yrs::Doc, nest_input: &YrsNestType, params: CRDTParam) {
let text = match nest_input {
YrsNestType::TextType(text) => text,
_ => unreachable!(),
};
let mut trx = doc.transact_mut();
let str = text.get_string(&trx);
let len = str.chars().fold(0, |acc, _| acc + 1);
let index = random_pick_num(len, &params.insert_pos) as usize;
let byte_start_offset = str
.chars()
.take(index)
.fold(0, |acc, ch| acc + ch.len_utf8());
text.insert(&mut trx, byte_start_offset as u32, &params.value);
}
fn remove_op(doc: &yrs::Doc, nest_input: &YrsNestType, params: CRDTParam) {
let text = match nest_input {
YrsNestType::TextType(text) => text,
_ => unreachable!(),
};
let mut trx = doc.transact_mut();
let str = text.get_string(&trx);
let len = str.chars().fold(0, |acc, _| acc + 1);
if len < 1 {
return;
}
let index = random_pick_num(len - 1, &params.insert_pos) as usize;
let byte_start_offset = str
.chars()
.take(index)
.fold(0, |acc, ch| acc + ch.len_utf8());
let char_byte_len = str.chars().nth(index).unwrap().len_utf8();
text.remove_range(&mut trx, byte_start_offset as u32, char_byte_len as u32);
}
fn clear_op(doc: &yrs::Doc, nest_input: &YrsNestType, _params: CRDTParam) {
let text = match nest_input {
YrsNestType::TextType(text) => text,
_ => unreachable!(),
};
let mut trx = doc.transact_mut();
let str = text.get_string(&trx);
let byte_len = str.chars().fold(0, |acc, ch| acc + ch.len_utf8());
text.remove_range(&mut trx, 0, byte_len as u32);
}
pub static TEXT_OPS: TestOps = phf_map! {
"insert" => insert_op,
"delete" => remove_op,
"clear" => clear_op,
};
pub fn yrs_create_text_from_nest_type(
doc: &yrs::Doc,
current: &mut YrsNestType,
insert_pos: &InsertPos,
key: String,
) -> Option<TextRef> {
let cal_index_closure = |len: u32| -> u32 { random_pick_num(len, insert_pos) };
let mut trx = doc.transact_mut();
let text_prelim = TextPrelim::new("");
match current {
YrsNestType::ArrayType(array) => {
let index = cal_index_closure(array.len(&trx));
Some(array.insert(&mut trx, index, text_prelim))
}
YrsNestType::MapType(map) => Some(map.insert(&mut trx, key, text_prelim)),
YrsNestType::TextType(text) => {
let str = text.get_string(&trx);
let len = str.chars().fold(0, |acc, _| acc + 1);
let index = random_pick_num(len, insert_pos) as usize;
let byte_start_offset = str
.chars()
.take(index)
.fold(0, |acc, ch| acc + ch.len_utf8());
Some(text.insert_embed(&mut trx, byte_start_offset as u32, text_prelim))
}
YrsNestType::XMLTextType(xml_text) => {
let str = xml_text.get_string(&trx);
let len = str.chars().fold(0, |acc, _| acc + 1);
let index = random_pick_num(len, insert_pos) as usize;
let byte_start_offset = str
.chars()
.take(index)
.fold(0, |acc, ch| acc + ch.len_utf8());
Some(xml_text.insert_embed(&mut trx, byte_start_offset as u32, text_prelim))
}
_ => None,
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_gen_array_ref_ops() {
let doc = Doc::new();
let text_ref = doc.get_or_insert_text("test_text");
let ops_registry = OpsRegistry::new();
let mut params = CRDTParam {
op_type: OpType::CreateCRDTNestType,
new_nest_type: CRDTNestType::Text,
manipulate_source: ManipulateSource::NewNestTypeFromYDocRoot,
insert_pos: InsertPos::BEGIN,
key: String::from("test_key"),
value: String::from("test_value"),
nest_data_op_type: NestDataOpType::Insert,
};
ops_registry.operate_yrs_nest_type(
&doc,
YrsNestType::TextType(text_ref.clone()),
params.clone(),
);
assert_eq!(text_ref.len(&doc.transact()), 10);
params.nest_data_op_type = NestDataOpType::Delete;
ops_registry.operate_yrs_nest_type(
&doc,
YrsNestType::TextType(text_ref.clone()),
params.clone(),
);
assert_eq!(text_ref.len(&doc.transact()), 9);
params.nest_data_op_type = NestDataOpType::Clear;
ops_registry.operate_yrs_nest_type(
&doc,
YrsNestType::TextType(text_ref.clone()),
params.clone(),
);
assert_eq!(text_ref.len(&doc.transact()), 0);
}
#[test]
fn test_yrs_create_array_from_nest_type() {
let doc = Doc::new();
let array_ref = doc.get_or_insert_array("test_array");
let key = String::from("test_key");
let next_text_ref = yrs_create_text_from_nest_type(
&doc,
&mut YrsNestType::ArrayType(array_ref.clone()),
&InsertPos::BEGIN,
key.clone(),
);
assert!(next_text_ref.is_some());
let map_ref = doc.get_or_insert_map("test_map");
let next_text_ref = yrs_create_text_from_nest_type(
&doc,
&mut YrsNestType::MapType(map_ref.clone()),
&InsertPos::BEGIN,
key.clone(),
);
assert!(next_text_ref.is_some());
let text_ref = doc.get_or_insert_text("test_text");
let next_text_ref = yrs_create_text_from_nest_type(
&doc,
&mut YrsNestType::TextType(text_ref.clone()),
&InsertPos::BEGIN,
key.clone(),
);
assert!(next_text_ref.is_some());
}
}

View File

@@ -0,0 +1,45 @@
use phf::phf_map;
use super::*;
fn insert_op(doc: &yrs::Doc, nest_input: &YrsNestType, params: CRDTParam) {
let xml_element = match nest_input {
YrsNestType::XMLElementType(xml_element) => xml_element,
_ => unreachable!(),
};
let mut trx = doc.transact_mut();
let len = xml_element.len(&trx);
let index = random_pick_num(len, &params.insert_pos);
xml_element.insert(&mut trx, index, XmlTextPrelim::new(params.value));
}
fn remove_op(doc: &yrs::Doc, nest_input: &YrsNestType, params: CRDTParam) {
let xml_element = match nest_input {
YrsNestType::XMLElementType(xml_element) => xml_element,
_ => unreachable!(),
};
let mut trx = doc.transact_mut();
let len = xml_element.len(&trx);
if len >= 1 {
let index = random_pick_num(len - 1, &params.insert_pos);
xml_element.remove_range(&mut trx, index, 1);
}
}
fn clear_op(doc: &yrs::Doc, nest_input: &YrsNestType, _params: CRDTParam) {
let xml_element = match nest_input {
YrsNestType::XMLElementType(xml_element) => xml_element,
_ => unreachable!(),
};
let mut trx = doc.transact_mut();
let len = xml_element.len(&trx);
for _ in 0..len {
xml_element.remove_range(&mut trx, 0, 1);
}
}
pub static XML_ELEMENT_OPS: TestOps = phf_map! {
"insert" => insert_op,
"delete" => remove_op,
"clear" => clear_op,
};

View File

@@ -0,0 +1,45 @@
use phf::phf_map;
use super::*;
fn insert_op(doc: &yrs::Doc, nest_input: &YrsNestType, params: CRDTParam) {
let xml_fragment = match nest_input {
YrsNestType::XMLFragmentType(xml_fragment) => xml_fragment,
_ => unreachable!(),
};
let mut trx = doc.transact_mut();
let len = xml_fragment.len(&trx);
let index = random_pick_num(len, &params.insert_pos);
xml_fragment.insert(&mut trx, index, XmlTextPrelim::new(params.value));
}
fn remove_op(doc: &yrs::Doc, nest_input: &YrsNestType, params: CRDTParam) {
let xml_fragment = match nest_input {
YrsNestType::XMLFragmentType(xml_fragment) => xml_fragment,
_ => unreachable!(),
};
let mut trx = doc.transact_mut();
let len = xml_fragment.len(&trx);
if len >= 1 {
let index = random_pick_num(len - 1, &params.insert_pos);
xml_fragment.remove_range(&mut trx, index, 1);
}
}
fn clear_op(doc: &yrs::Doc, nest_input: &YrsNestType, _params: CRDTParam) {
let xml_fragment = match nest_input {
YrsNestType::XMLFragmentType(xml_fragment) => xml_fragment,
_ => unreachable!(),
};
let mut trx = doc.transact_mut();
let len = xml_fragment.len(&trx);
for _ in 0..len {
xml_fragment.remove_range(&mut trx, 0, 1);
}
}
pub static XML_FRAGMENT_OPS: TestOps = phf_map! {
"insert" => insert_op,
"delete" => remove_op,
"clear" => clear_op,
};

View File

@@ -0,0 +1,62 @@
use phf::phf_map;
use super::*;
fn insert_op(doc: &yrs::Doc, nest_input: &YrsNestType, params: CRDTParam) {
let xml_text = match nest_input {
YrsNestType::XMLTextType(xml_text) => xml_text,
_ => unreachable!(),
};
let mut trx = doc.transact_mut();
let str = xml_text.get_string(&trx);
let len = str.chars().fold(0, |acc, _| acc + 1);
let index = random_pick_num(len, &params.insert_pos) as usize;
let byte_start_offset = str
.chars()
.take(index)
.fold(0, |acc, ch| acc + ch.len_utf8());
xml_text.insert(&mut trx, byte_start_offset as u32, &params.value);
}
fn remove_op(doc: &yrs::Doc, nest_input: &YrsNestType, params: CRDTParam) {
let xml_text = match nest_input {
YrsNestType::XMLTextType(xml_text) => xml_text,
_ => unreachable!(),
};
let mut trx = doc.transact_mut();
let str = xml_text.get_string(&trx);
let len = str.chars().fold(0, |acc, _| acc + 1);
if len < 1 {
return;
}
let index = random_pick_num(len - 1, &params.insert_pos) as usize;
let byte_start_offset = str
.chars()
.take(index)
.fold(0, |acc, ch| acc + ch.len_utf8());
let char_byte_len = str.chars().nth(index).unwrap().len_utf8();
xml_text.remove_range(&mut trx, byte_start_offset as u32, char_byte_len as u32);
}
fn clear_op(doc: &yrs::Doc, nest_input: &YrsNestType, _params: CRDTParam) {
let xml_text = match nest_input {
YrsNestType::XMLTextType(xml_text) => xml_text,
_ => unreachable!(),
};
let mut trx = doc.transact_mut();
let str = xml_text.get_string(&trx);
let byte_len = str.chars().fold(0, |acc, ch| acc + ch.len_utf8());
xml_text.remove_range(&mut trx, 0, byte_len as u32);
}
pub static XML_TEXT_OPS: TestOps = phf_map! {
"insert" => insert_op,
"delete" => remove_op,
"clear" => clear_op,
};

View File

@@ -0,0 +1,7 @@
mod doc;
#[cfg(feature = "fuzz")]
pub mod doc_operation;
#[cfg(feature = "fuzz")]
pub use doc_operation::*;