mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-12 12:28:42 +00:00
feat(ios): nbstore swift native binding (#9211)
This commit is contained in:
@@ -12,8 +12,22 @@ name = "uniffi-bindgen"
|
||||
path = "uniffi-bindgen.rs"
|
||||
|
||||
[dependencies]
|
||||
affine_common = { workspace = true }
|
||||
uniffi = { version = "0.28", features = ["cli"] }
|
||||
affine_common = { workspace = true }
|
||||
affine_nbstore = { workspace = true, features = ["use-as-lib"] }
|
||||
anyhow = { workspace = true }
|
||||
base64-simd = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
dashmap = { workspace = true }
|
||||
sqlx = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
uniffi = { workspace = true, features = ["cli"] }
|
||||
|
||||
[target.'cfg(any(target_os = "ios", target_os = "macos"))'.dependencies]
|
||||
objc2 = { workspace = true }
|
||||
objc2-foundation = { workspace = true, features = ["NSArray", "NSFileManager", "NSPathUtilities", "NSString", "NSURL"] }
|
||||
|
||||
[target.'cfg(not(any(target_os = "ios", target_os = "macos")))'.dependencies]
|
||||
homedir = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
uniffi = { version = "0.28", features = ["build"] }
|
||||
uniffi = { workspace = true, features = ["build"] }
|
||||
|
||||
29
packages/frontend/mobile-native/src/error.rs
Normal file
29
packages/frontend/mobile-native/src/error.rs
Normal file
@@ -0,0 +1,29 @@
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(uniffi::Error, Error, Debug)]
|
||||
pub enum UniffiError {
|
||||
#[error("Get user document directory failed")]
|
||||
GetUserDocumentDirectoryFailed,
|
||||
#[error("Create affine dir failed: {0}")]
|
||||
CreateAffineDirFailed(String),
|
||||
#[error("Empty doc storage path")]
|
||||
EmptyDocStoragePath,
|
||||
#[error("Empty space id")]
|
||||
EmptySpaceId,
|
||||
#[error("Sqlx error: {0}")]
|
||||
SqlxError(String),
|
||||
#[error("Base64 decoding error: {0}")]
|
||||
Base64DecodingError(String),
|
||||
#[error("Invalid universal storage id: {0}. It should be in format of @peer($peer);@type($type);@id($id);")]
|
||||
InvalidUniversalId(String),
|
||||
#[error("Invalid space type: {0}")]
|
||||
InvalidSpaceType(String),
|
||||
#[error("Concat space dir failed: {0}")]
|
||||
ConcatSpaceDirFailed(String),
|
||||
}
|
||||
|
||||
impl From<sqlx::Error> for UniffiError {
|
||||
fn from(err: sqlx::Error) -> Self {
|
||||
UniffiError::SqlxError(err.to_string())
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,15 @@
|
||||
use std::fmt::Display;
|
||||
use std::str::FromStr;
|
||||
use std::time::SystemTime;
|
||||
|
||||
use affine_common::hashcash::Stamp;
|
||||
use affine_nbstore::storage;
|
||||
use dashmap::{mapref::one::RefMut, DashMap, Entry};
|
||||
|
||||
use crate::error::UniffiError;
|
||||
|
||||
mod error;
|
||||
mod utils;
|
||||
|
||||
uniffi::setup_scaffolding!("affine_mobile_native");
|
||||
|
||||
@@ -6,3 +17,680 @@ uniffi::setup_scaffolding!("affine_mobile_native");
|
||||
pub fn hashcash_mint(resource: String, bits: u32) -> String {
|
||||
Stamp::mint(resource, Some(bits)).format()
|
||||
}
|
||||
|
||||
#[derive(uniffi::Record)]
|
||||
pub struct DocRecord {
|
||||
pub doc_id: String,
|
||||
// base64 encoded data
|
||||
pub data: String,
|
||||
pub timestamp: SystemTime,
|
||||
}
|
||||
|
||||
impl From<affine_nbstore::DocRecord> for DocRecord {
|
||||
fn from(record: affine_nbstore::DocRecord) -> Self {
|
||||
Self {
|
||||
doc_id: record.doc_id,
|
||||
data: base64_simd::STANDARD.encode_to_string(&record.data),
|
||||
timestamp: record.timestamp.and_utc().into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<DocRecord> for affine_nbstore::DocRecord {
|
||||
type Error = UniffiError;
|
||||
|
||||
fn try_from(record: DocRecord) -> Result<Self, Self::Error> {
|
||||
Ok(Self {
|
||||
doc_id: record.doc_id,
|
||||
data: base64_simd::STANDARD
|
||||
.decode_to_vec(record.data)
|
||||
.map_err(|e| UniffiError::Base64DecodingError(e.to_string()))?,
|
||||
timestamp: chrono::DateTime::<chrono::Utc>::from(record.timestamp).naive_utc(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(uniffi::Record)]
|
||||
pub struct DocUpdate {
|
||||
pub doc_id: String,
|
||||
pub created_at: SystemTime,
|
||||
// base64 encoded data
|
||||
pub data: String,
|
||||
}
|
||||
|
||||
impl From<affine_nbstore::DocUpdate> for DocUpdate {
|
||||
fn from(update: affine_nbstore::DocUpdate) -> Self {
|
||||
Self {
|
||||
doc_id: update.doc_id,
|
||||
created_at: update.created_at.and_utc().into(),
|
||||
data: base64_simd::STANDARD.encode_to_string(&update.data),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DocUpdate> for affine_nbstore::DocUpdate {
|
||||
fn from(update: DocUpdate) -> Self {
|
||||
Self {
|
||||
doc_id: update.doc_id,
|
||||
created_at: chrono::DateTime::<chrono::Utc>::from(update.created_at).naive_utc(),
|
||||
data: update.data.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(uniffi::Record)]
|
||||
pub struct DocClock {
|
||||
pub doc_id: String,
|
||||
pub timestamp: SystemTime,
|
||||
}
|
||||
|
||||
impl From<affine_nbstore::DocClock> for DocClock {
|
||||
fn from(clock: affine_nbstore::DocClock) -> Self {
|
||||
Self {
|
||||
doc_id: clock.doc_id,
|
||||
timestamp: clock.timestamp.and_utc().into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DocClock> for affine_nbstore::DocClock {
|
||||
fn from(clock: DocClock) -> Self {
|
||||
Self {
|
||||
doc_id: clock.doc_id,
|
||||
timestamp: chrono::DateTime::<chrono::Utc>::from(clock.timestamp).naive_utc(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(uniffi::Record)]
|
||||
pub struct Blob {
|
||||
pub key: String,
|
||||
// base64 encoded data
|
||||
pub data: String,
|
||||
pub mime: String,
|
||||
pub size: i64,
|
||||
pub created_at: SystemTime,
|
||||
}
|
||||
|
||||
impl From<affine_nbstore::Blob> for Blob {
|
||||
fn from(blob: affine_nbstore::Blob) -> Self {
|
||||
Self {
|
||||
key: blob.key,
|
||||
data: base64_simd::STANDARD.encode_to_string(&blob.data),
|
||||
mime: blob.mime,
|
||||
size: blob.size,
|
||||
created_at: blob.created_at.and_utc().into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(uniffi::Record)]
|
||||
pub struct SetBlob {
|
||||
pub key: String,
|
||||
// base64 encoded data
|
||||
pub data: String,
|
||||
pub mime: String,
|
||||
}
|
||||
|
||||
impl TryFrom<SetBlob> for affine_nbstore::SetBlob {
|
||||
type Error = UniffiError;
|
||||
|
||||
fn try_from(blob: SetBlob) -> Result<Self, Self::Error> {
|
||||
Ok(Self {
|
||||
key: blob.key,
|
||||
data: base64_simd::STANDARD
|
||||
.decode_to_vec(blob.data)
|
||||
.map_err(|e| UniffiError::Base64DecodingError(e.to_string()))?,
|
||||
mime: blob.mime,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(uniffi::Record)]
|
||||
pub struct ListedBlob {
|
||||
pub key: String,
|
||||
pub size: i64,
|
||||
pub mime: String,
|
||||
pub created_at: SystemTime,
|
||||
}
|
||||
|
||||
impl From<affine_nbstore::ListedBlob> for ListedBlob {
|
||||
fn from(blob: affine_nbstore::ListedBlob) -> Self {
|
||||
Self {
|
||||
key: blob.key,
|
||||
size: blob.size,
|
||||
mime: blob.mime,
|
||||
created_at: blob.created_at.and_utc().into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(uniffi::Object)]
|
||||
pub struct DocStoragePool {
|
||||
inner: DashMap<String, storage::SqliteDocStorage>,
|
||||
}
|
||||
|
||||
impl DocStoragePool {
|
||||
fn ensure_storage<'a>(
|
||||
&'a self,
|
||||
universal_id: &str,
|
||||
) -> Result<RefMut<'a, String, storage::SqliteDocStorage>, UniffiError> {
|
||||
let entry = self.inner.entry(universal_id.to_string());
|
||||
|
||||
if let Entry::Occupied(storage) = entry {
|
||||
return Ok(storage.into_ref());
|
||||
}
|
||||
let options = parse_universal_id(entry.key())?;
|
||||
let db_path = utils::get_db_path(&options)?;
|
||||
if db_path.is_empty() {
|
||||
return Err(UniffiError::EmptyDocStoragePath);
|
||||
}
|
||||
let storage = storage::SqliteDocStorage::new(db_path);
|
||||
Ok(entry.or_insert(storage))
|
||||
}
|
||||
}
|
||||
|
||||
#[uniffi::export]
|
||||
impl DocStoragePool {
|
||||
/// Initialize the database and run migrations.
|
||||
pub async fn connect(&self, universal_id: String) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.connect().await?)
|
||||
}
|
||||
|
||||
pub async fn close(&self, universal_id: String) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
storage.close().await;
|
||||
self.inner.remove(&universal_id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn is_closed(&self, universal_id: String) -> bool {
|
||||
let storage = self.ensure_storage(&universal_id).unwrap();
|
||||
storage.is_closed()
|
||||
}
|
||||
|
||||
pub async fn checkpoint(&self, universal_id: String) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.checkpoint().await?)
|
||||
}
|
||||
|
||||
pub async fn validate(&self, universal_id: String) -> Result<bool, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.validate().await?)
|
||||
}
|
||||
|
||||
pub async fn set_space_id(
|
||||
&self,
|
||||
universal_id: String,
|
||||
space_id: String,
|
||||
) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
if space_id.is_empty() {
|
||||
return Err(UniffiError::EmptySpaceId);
|
||||
}
|
||||
Ok(storage.set_space_id(space_id).await?)
|
||||
}
|
||||
|
||||
pub async fn push_update(
|
||||
&self,
|
||||
universal_id: String,
|
||||
doc_id: String,
|
||||
update: String,
|
||||
) -> Result<SystemTime, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.push_update(
|
||||
doc_id,
|
||||
base64_simd::STANDARD
|
||||
.decode_to_vec(update)
|
||||
.map_err(|e| UniffiError::Base64DecodingError(e.to_string()))?,
|
||||
)
|
||||
.await?
|
||||
.and_utc()
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn get_doc_snapshot(
|
||||
&self,
|
||||
universal_id: String,
|
||||
doc_id: String,
|
||||
) -> Result<Option<DocRecord>, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.get_doc_snapshot(doc_id).await?.map(Into::into))
|
||||
}
|
||||
|
||||
pub async fn set_doc_snapshot(
|
||||
&self,
|
||||
universal_id: String,
|
||||
snapshot: DocRecord,
|
||||
) -> Result<bool, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.set_doc_snapshot(snapshot.try_into()?).await?)
|
||||
}
|
||||
|
||||
pub async fn get_doc_updates(
|
||||
&self,
|
||||
universal_id: String,
|
||||
doc_id: String,
|
||||
) -> Result<Vec<DocUpdate>, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.get_doc_updates(doc_id)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn mark_updates_merged(
|
||||
&self,
|
||||
universal_id: String,
|
||||
doc_id: String,
|
||||
updates: Vec<SystemTime>,
|
||||
) -> Result<u32, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.mark_updates_merged(
|
||||
doc_id,
|
||||
updates
|
||||
.into_iter()
|
||||
.map(|t| chrono::DateTime::<chrono::Utc>::from(t).naive_utc())
|
||||
.collect(),
|
||||
)
|
||||
.await?,
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn delete_doc(&self, universal_id: String, doc_id: String) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.delete_doc(doc_id).await?)
|
||||
}
|
||||
|
||||
pub async fn get_doc_clocks(
|
||||
&self,
|
||||
universal_id: String,
|
||||
after: Option<SystemTime>,
|
||||
) -> Result<Vec<DocClock>, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.get_doc_clocks(after.map(|t| chrono::DateTime::<chrono::Utc>::from(t).naive_utc()))
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn get_doc_clock(
|
||||
&self,
|
||||
universal_id: String,
|
||||
doc_id: String,
|
||||
) -> Result<Option<DocClock>, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.get_doc_clock(doc_id).await?.map(Into::into))
|
||||
}
|
||||
|
||||
pub async fn get_blob(
|
||||
&self,
|
||||
universal_id: String,
|
||||
key: String,
|
||||
) -> Result<Option<Blob>, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.get_blob(key).await?.map(Into::into))
|
||||
}
|
||||
|
||||
pub async fn set_blob(&self, universal_id: String, blob: SetBlob) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.set_blob(blob.try_into()?).await?)
|
||||
}
|
||||
|
||||
pub async fn delete_blob(
|
||||
&self,
|
||||
universal_id: String,
|
||||
key: String,
|
||||
permanently: bool,
|
||||
) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.delete_blob(key, permanently).await?)
|
||||
}
|
||||
|
||||
pub async fn release_blobs(&self, universal_id: String) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.release_blobs().await?)
|
||||
}
|
||||
|
||||
pub async fn list_blobs(&self, universal_id: String) -> Result<Vec<ListedBlob>, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.list_blobs()
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn get_peer_remote_clocks(
|
||||
&self,
|
||||
universal_id: String,
|
||||
peer: String,
|
||||
) -> Result<Vec<DocClock>, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.get_peer_remote_clocks(peer)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn get_peer_remote_clock(
|
||||
&self,
|
||||
universal_id: String,
|
||||
peer: String,
|
||||
doc_id: String,
|
||||
) -> Result<DocClock, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.get_peer_remote_clock(peer, doc_id).await?.into())
|
||||
}
|
||||
|
||||
pub async fn set_peer_remote_clock(
|
||||
&self,
|
||||
universal_id: String,
|
||||
peer: String,
|
||||
doc_id: String,
|
||||
clock: SystemTime,
|
||||
) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.set_peer_remote_clock(
|
||||
peer,
|
||||
doc_id,
|
||||
chrono::DateTime::<chrono::Utc>::from(clock).naive_utc(),
|
||||
)
|
||||
.await?,
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn get_peer_pulled_remote_clocks(
|
||||
&self,
|
||||
universal_id: String,
|
||||
peer: String,
|
||||
) -> Result<Vec<DocClock>, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.get_peer_pulled_remote_clocks(peer)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn get_peer_pulled_remote_clock(
|
||||
&self,
|
||||
universal_id: String,
|
||||
peer: String,
|
||||
doc_id: String,
|
||||
) -> Result<DocClock, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.get_peer_pulled_remote_clock(peer, doc_id)
|
||||
.await?
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn set_peer_pulled_remote_clock(
|
||||
&self,
|
||||
universal_id: String,
|
||||
peer: String,
|
||||
doc_id: String,
|
||||
clock: SystemTime,
|
||||
) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.set_peer_pulled_remote_clock(
|
||||
peer,
|
||||
doc_id,
|
||||
chrono::DateTime::<chrono::Utc>::from(clock).naive_utc(),
|
||||
)
|
||||
.await?,
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn get_peer_pushed_clocks(
|
||||
&self,
|
||||
universal_id: String,
|
||||
peer: String,
|
||||
) -> Result<Vec<DocClock>, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.get_peer_pushed_clocks(peer)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn set_peer_pushed_clock(
|
||||
&self,
|
||||
universal_id: String,
|
||||
peer: String,
|
||||
doc_id: String,
|
||||
clock: SystemTime,
|
||||
) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.set_peer_pushed_clock(
|
||||
peer,
|
||||
doc_id,
|
||||
chrono::DateTime::<chrono::Utc>::from(clock).naive_utc(),
|
||||
)
|
||||
.await?,
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn clear_clocks(&self, universal_id: String) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.clear_clocks().await?)
|
||||
}
|
||||
}
|
||||
|
||||
#[uniffi::export]
|
||||
pub fn get_db_path(peer: String, space_type: String, id: String) -> Result<String, UniffiError> {
|
||||
let options = StorageOptions {
|
||||
peer,
|
||||
space_type: SpaceType::from_str(&space_type)?,
|
||||
id,
|
||||
};
|
||||
utils::get_db_path(&options)
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Default)]
|
||||
pub enum SpaceType {
|
||||
#[default]
|
||||
Userspace,
|
||||
Workspace,
|
||||
}
|
||||
|
||||
impl Display for SpaceType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
SpaceType::Userspace => write!(f, "userspace"),
|
||||
SpaceType::Workspace => write!(f, "workspace"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for SpaceType {
|
||||
type Err = UniffiError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
Ok(match s {
|
||||
"userspace" => Self::Userspace,
|
||||
"workspace" => Self::Workspace,
|
||||
_ => return Err(UniffiError::InvalidSpaceType(s.to_string())),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct StorageOptions {
|
||||
pub peer: String,
|
||||
pub space_type: SpaceType,
|
||||
pub id: String,
|
||||
}
|
||||
|
||||
pub fn parse_universal_id(id: &str) -> Result<StorageOptions, UniffiError> {
|
||||
let mut result = StorageOptions {
|
||||
peer: String::new(),
|
||||
space_type: SpaceType::default(),
|
||||
id: String::new(),
|
||||
};
|
||||
|
||||
let mut key = String::new();
|
||||
let mut value = String::new();
|
||||
let mut is_in_value = false;
|
||||
let mut chars = id.chars().peekable();
|
||||
|
||||
while let Some(ch) = chars.next() {
|
||||
if is_in_value {
|
||||
if ch == ')' && chars.peek() == Some(&';') {
|
||||
// Store the collected value in the appropriate field
|
||||
match key.as_str() {
|
||||
"peer" => result.peer = value.clone(),
|
||||
"type" => result.space_type = SpaceType::from_str(&value)?,
|
||||
"id" => result.id = value.clone(),
|
||||
_ => return Err(UniffiError::InvalidUniversalId(id.to_string())),
|
||||
}
|
||||
key.clear();
|
||||
value.clear();
|
||||
is_in_value = false;
|
||||
chars.next(); // Skip the semicolon
|
||||
continue;
|
||||
}
|
||||
value.push(ch);
|
||||
continue;
|
||||
}
|
||||
|
||||
if ch == '@' {
|
||||
// Find the position of next '('
|
||||
let mut temp_chars = chars.clone();
|
||||
let mut found_paren = false;
|
||||
let mut key_chars = Vec::new();
|
||||
|
||||
while let Some(next_ch) = temp_chars.next() {
|
||||
if next_ch == '(' {
|
||||
found_paren = true;
|
||||
break;
|
||||
}
|
||||
key_chars.push(next_ch);
|
||||
}
|
||||
|
||||
// Invalid format if no '(' found or it's immediately after '@'
|
||||
if !found_paren || key_chars.is_empty() {
|
||||
return Err(UniffiError::InvalidUniversalId(id.to_string()));
|
||||
}
|
||||
|
||||
key = key_chars.into_iter().collect();
|
||||
// Advance the original iterator to the position after the key
|
||||
for _ in 0..key.len() + 1 {
|
||||
chars.next();
|
||||
}
|
||||
is_in_value = true;
|
||||
} else {
|
||||
return Err(UniffiError::InvalidUniversalId(id.to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
// Validate the parsed results
|
||||
if result.peer.is_empty() || result.id.is_empty() {
|
||||
return Err(UniffiError::InvalidUniversalId(id.to_string()));
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// ... existing test functions ...
|
||||
|
||||
#[test]
|
||||
fn test_universal_id() {
|
||||
let options = StorageOptions {
|
||||
peer: "123".to_string(),
|
||||
space_type: SpaceType::Workspace,
|
||||
id: "456".to_string(),
|
||||
};
|
||||
|
||||
let id = format!(
|
||||
"@peer({});@type({});@id({});",
|
||||
options.peer, options.space_type, options.id
|
||||
);
|
||||
let result = parse_universal_id(&id).unwrap();
|
||||
|
||||
assert_eq!(result.peer, "123");
|
||||
assert_eq!(result.space_type, SpaceType::Workspace);
|
||||
assert_eq!(result.id, "456");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_universal_id_valid_cases() {
|
||||
let testcases = vec![
|
||||
"@peer(123);@type(userspace);@id(456);",
|
||||
"@peer(123);@type(workspace);@id(456);",
|
||||
"@peer(https://app.affine.pro);@type(userspace);@id(hello:world);",
|
||||
"@peer(@name);@type(userspace);@id(@id);",
|
||||
"@peer(@peer(name);@type(userspace);@id(@id);",
|
||||
];
|
||||
|
||||
for id in testcases {
|
||||
let result = parse_universal_id(id);
|
||||
assert!(result.is_ok(), "Failed to parse: {}", id);
|
||||
|
||||
let parsed = result.unwrap();
|
||||
assert!(!parsed.peer.is_empty());
|
||||
assert!(!parsed.id.is_empty());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_universal_id_invalid_cases() {
|
||||
let testcases = vec![
|
||||
// invalid space type
|
||||
"@peer(123);@type(anyspace);@id(456);",
|
||||
// invalid peer
|
||||
"@peer(@peer(name););@type(userspace);@id(@id);",
|
||||
];
|
||||
|
||||
for id in testcases {
|
||||
let result = parse_universal_id(id);
|
||||
assert!(result.is_err(), "Should have failed to parse: {}", id);
|
||||
|
||||
match result {
|
||||
Err(UniffiError::InvalidUniversalId(_)) => (),
|
||||
Err(UniffiError::InvalidSpaceType(_)) => (),
|
||||
_ => panic!("Expected InvalidUniversalId error for: {}", id),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
141
packages/frontend/mobile-native/src/utils.rs
Normal file
141
packages/frontend/mobile-native/src/utils.rs
Normal file
@@ -0,0 +1,141 @@
|
||||
use std::fs;
|
||||
|
||||
#[cfg(not(any(target_os = "ios", target_os = "macos")))]
|
||||
use homedir::my_home;
|
||||
#[cfg(any(target_os = "ios", target_os = "macos"))]
|
||||
use objc2::rc::autoreleasepool;
|
||||
#[cfg(any(target_os = "ios", target_os = "macos"))]
|
||||
use objc2_foundation::{NSFileManager, NSSearchPathDirectory, NSSearchPathDomainMask, NSString};
|
||||
|
||||
use crate::{error::UniffiError, SpaceType, StorageOptions};
|
||||
|
||||
const DB_FILE_NAME: &str = "storage.db";
|
||||
|
||||
#[cfg(any(target_os = "ios", target_os = "macos"))]
|
||||
pub(crate) fn get_db_path(options: &StorageOptions) -> Result<String, UniffiError> {
|
||||
let file_manager = unsafe { NSFileManager::defaultManager() };
|
||||
// equivalent to Swift:
|
||||
// ```swift
|
||||
// guard let documentsPath = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else {
|
||||
// return nil
|
||||
// }
|
||||
// ```
|
||||
let urls = unsafe {
|
||||
file_manager.URLsForDirectory_inDomains(
|
||||
NSSearchPathDirectory::NSDocumentDirectory,
|
||||
NSSearchPathDomainMask::NSUserDomainMask,
|
||||
)
|
||||
};
|
||||
let document_directory = urls
|
||||
.first()
|
||||
.ok_or(UniffiError::GetUserDocumentDirectoryFailed)?;
|
||||
|
||||
let affine_dir = unsafe {
|
||||
let spaces_dir = match options.space_type {
|
||||
SpaceType::Userspace => "userspaces",
|
||||
SpaceType::Workspace => "workspaces",
|
||||
};
|
||||
let escaped_peer = escape_filename(&options.peer);
|
||||
document_directory
|
||||
.URLByAppendingPathComponent(&NSString::from_str(".affine"))
|
||||
.and_then(|url| url.URLByAppendingPathComponent(&NSString::from_str(spaces_dir)))
|
||||
.and_then(|url| url.URLByAppendingPathComponent(&NSString::from_str(&escaped_peer)))
|
||||
.and_then(|url| url.URLByAppendingPathComponent(&NSString::from_str(&options.id)))
|
||||
}
|
||||
.ok_or(UniffiError::ConcatSpaceDirFailed(format!(
|
||||
"{}:{}:{}",
|
||||
options.peer, options.space_type, options.id
|
||||
)))?;
|
||||
let affine_dir_str = autoreleasepool(|pool| {
|
||||
Ok::<String, UniffiError>(
|
||||
unsafe { affine_dir.path() }
|
||||
.ok_or(UniffiError::GetUserDocumentDirectoryFailed)?
|
||||
.as_str(pool)
|
||||
.to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
// Replicate Swift's appending ".affine" subdir, creating it if necessary
|
||||
fs::create_dir_all(&affine_dir_str)
|
||||
.map_err(|_| UniffiError::CreateAffineDirFailed(affine_dir_str.clone()))?;
|
||||
|
||||
let db_path = autoreleasepool(|pool| {
|
||||
let db_path =
|
||||
unsafe { affine_dir.URLByAppendingPathComponent(&NSString::from_str(DB_FILE_NAME)) }.ok_or(
|
||||
UniffiError::ConcatSpaceDirFailed(format!(
|
||||
"{}:{}:{}/{DB_FILE_NAME}",
|
||||
options.peer, options.space_type, options.id
|
||||
)),
|
||||
)?;
|
||||
Ok::<String, UniffiError>(
|
||||
unsafe { db_path.path() }
|
||||
.ok_or(UniffiError::GetUserDocumentDirectoryFailed)?
|
||||
.as_str(pool)
|
||||
.to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(db_path)
|
||||
}
|
||||
|
||||
#[cfg(not(any(target_os = "ios", target_os = "macos")))]
|
||||
pub(crate) fn get_db_path(options: &StorageOptions) -> Result<String, UniffiError> {
|
||||
let home_dir = my_home()
|
||||
.map_err(|_| UniffiError::GetUserDocumentDirectoryFailed)?
|
||||
.ok_or(UniffiError::GetUserDocumentDirectoryFailed)?;
|
||||
let spaces_dir = match options.space_type {
|
||||
SpaceType::Userspace => "userspaces",
|
||||
SpaceType::Workspace => "workspaces",
|
||||
};
|
||||
let escaped_peer = escape_filename(&options.peer);
|
||||
let db_path = home_dir
|
||||
.join(".affine")
|
||||
.join(spaces_dir)
|
||||
.join(&escaped_peer)
|
||||
.join(&options.id);
|
||||
fs::create_dir_all(&db_path)
|
||||
.map_err(|_| UniffiError::CreateAffineDirFailed(db_path.to_string_lossy().to_string()))?;
|
||||
db_path
|
||||
.join(DB_FILE_NAME)
|
||||
.to_str()
|
||||
.map(|p| p.to_owned())
|
||||
.ok_or(UniffiError::GetUserDocumentDirectoryFailed)
|
||||
}
|
||||
|
||||
fn escape_filename(name: &str) -> String {
|
||||
// First replace special chars with '_'
|
||||
let with_underscores = name.replace(|c: char| "\\/!@#$%^&*()+~`\"':;,?<>|".contains(c), "_");
|
||||
|
||||
// Then collapse multiple '_' into single '_'
|
||||
let mut result = String::with_capacity(with_underscores.len());
|
||||
let mut last_was_underscore = false;
|
||||
|
||||
for c in with_underscores.chars() {
|
||||
if c == '_' {
|
||||
if !last_was_underscore {
|
||||
result.push(c);
|
||||
}
|
||||
last_was_underscore = true;
|
||||
} else {
|
||||
result.push(c);
|
||||
last_was_underscore = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Remove trailing underscore
|
||||
result.trim_end_matches('_').to_string()
|
||||
}
|
||||
|
||||
#[cfg(all(test, any(target_os = "ios", target_os = "macos")))]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_escape_filename() {
|
||||
assert_eq!(escape_filename("hello@world"), "hello_world");
|
||||
assert_eq!(escape_filename("test!!file"), "test_file");
|
||||
assert_eq!(escape_filename("_test_"), "_test"); // Leading underscore preserved
|
||||
assert_eq!(escape_filename("multi___under"), "multi_under");
|
||||
assert_eq!(escape_filename("path/to\\file"), "path_to_file");
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user