mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-14 21:27:20 +00:00
feat(ios): nbstore swift native binding (#9211)
This commit is contained in:
@@ -60,6 +60,10 @@
|
||||
FC68EB0AF532CFC21C3344DD /* Pods-App.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-App.debug.xcconfig"; path = "Pods/Target Support Files/Pods-App/Pods-App.debug.xcconfig"; sourceTree = "<group>"; };
|
||||
/* End PBXFileReference section */
|
||||
|
||||
/* Begin PBXFileSystemSynchronizedRootGroup section */
|
||||
C45499AB2D140B5000E21978 /* NBStore */ = {isa = PBXFileSystemSynchronizedRootGroup; explicitFileTypes = {}; explicitFolders = (); path = NBStore; sourceTree = "<group>"; };
|
||||
/* End PBXFileSystemSynchronizedRootGroup section */
|
||||
|
||||
/* Begin PBXFrameworksBuildPhase section */
|
||||
504EC3011FED79650016851F /* Frameworks */ = {
|
||||
isa = PBXFrameworksBuildPhase;
|
||||
@@ -139,6 +143,7 @@
|
||||
9D90BE1A2CCB9876006677DB /* Plugins */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
C45499AB2D140B5000E21978 /* NBStore */,
|
||||
E93B276A2CED9298001409B8 /* NavigationGesture */,
|
||||
9D90BE192CCB9876006677DB /* Cookie */,
|
||||
);
|
||||
@@ -201,6 +206,9 @@
|
||||
);
|
||||
dependencies = (
|
||||
);
|
||||
fileSystemSynchronizedGroups = (
|
||||
C45499AB2D140B5000E21978 /* NBStore */,
|
||||
);
|
||||
name = App;
|
||||
productName = App;
|
||||
productReference = 504EC3041FED79650016851F /* App.app */;
|
||||
|
||||
@@ -20,6 +20,7 @@ class AFFiNEViewController: CAPBridgeViewController {
|
||||
HashcashPlugin(),
|
||||
NavigationGesturePlugin(),
|
||||
IntelligentsPlugin(representController: self),
|
||||
NbStorePlugin(),
|
||||
]
|
||||
plugins.forEach { bridge?.registerPluginInstance($0) }
|
||||
}
|
||||
|
||||
@@ -0,0 +1,402 @@
|
||||
import Capacitor
|
||||
import Foundation
|
||||
|
||||
@objc(NbStorePlugin)
|
||||
public class NbStorePlugin: CAPPlugin, CAPBridgedPlugin {
|
||||
private let docStoragePool: DocStoragePool = .init(noPointer: DocStoragePool.NoPointer())
|
||||
|
||||
public let identifier = "NbStorePlugin"
|
||||
public let jsName = "NbStoreDocStorage"
|
||||
public let pluginMethods: [CAPPluginMethod] = [
|
||||
CAPPluginMethod(name: "getSpaceDBPath", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "connect", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "close", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "isClosed", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "checkpoint", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "validate", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "setSpaceId", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "pushUpdate", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "getDocSnapshot", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "setDocSnapshot", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "getDocUpdates", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "markUpdatesMerged", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "deleteDoc", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "getDocClocks", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "getDocClock", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "getBlob", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "setBlob", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "deleteBlob", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "releaseBlobs", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "listBlobs", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "getPeerRemoteClocks", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "getPeerRemoteClock", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "setPeerRemoteClock", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "getPeerPulledRemoteClocks", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "getPeerPulledRemoteClock", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "setPeerPulledRemoteClock", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "getPeerPushedClocks", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "setPeerPushedClock", returnType: CAPPluginReturnPromise),
|
||||
CAPPluginMethod(name: "clearClocks", returnType: CAPPluginReturnPromise),
|
||||
]
|
||||
|
||||
@objc func getSpaceDBPath(_ call: CAPPluginCall) {
|
||||
let peer = call.getString("peer") ?? ""
|
||||
let spaceType = call.getString("spaceType") ?? ""
|
||||
let id = call.getString("id") ?? ""
|
||||
|
||||
do {
|
||||
let path = try getDbPath(peer: peer, spaceType: spaceType, id: id)
|
||||
call.resolve(["path": path])
|
||||
} catch {
|
||||
call.reject("Failed to get space DB path", nil, error)
|
||||
}
|
||||
}
|
||||
|
||||
@objc func connect(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
try? await docStoragePool.connect(universalId: id)
|
||||
}
|
||||
|
||||
@objc func close(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
try? await docStoragePool.close(universalId: id)
|
||||
}
|
||||
|
||||
@objc func isClosed(_ call: CAPPluginCall) {
|
||||
let id = call.getString("id") ?? ""
|
||||
call.resolve(["isClosed": docStoragePool.isClosed(universalId: id)])
|
||||
}
|
||||
|
||||
@objc func checkpoint(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
try? await docStoragePool.checkpoint(universalId: id)
|
||||
}
|
||||
|
||||
@objc func validate(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let validate = (try? await docStoragePool.validate(universalId: id)) ?? false
|
||||
call.resolve(["isValidate": validate])
|
||||
}
|
||||
|
||||
@objc func setSpaceId(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let spaceId = call.getString("spaceId") ?? ""
|
||||
do {
|
||||
try await docStoragePool.setSpaceId(universalId: id, spaceId: spaceId)
|
||||
call.resolve()
|
||||
} catch {
|
||||
call.reject("Failed to set space id", nil, error)
|
||||
}
|
||||
}
|
||||
|
||||
@objc func pushUpdate(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let docId = call.getString("docId") ?? ""
|
||||
let data = call.getString("data") ?? ""
|
||||
do {
|
||||
let timestamp = try await docStoragePool.pushUpdate(universalId: id, docId: docId, update: data)
|
||||
call.resolve(["timestamp": timestamp.timeIntervalSince1970])
|
||||
|
||||
} catch {
|
||||
call.reject("Failed to push update", nil, error)
|
||||
}
|
||||
}
|
||||
|
||||
@objc func getDocSnapshot(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let docId = call.getString("docId") ?? ""
|
||||
do {
|
||||
if let record = try await docStoragePool.getDocSnapshot(universalId: id, docId: docId) {
|
||||
call.resolve([
|
||||
"docId": record.docId,
|
||||
"data": record.data,
|
||||
"timestamp": record.timestamp.timeIntervalSince1970,
|
||||
])
|
||||
} else {
|
||||
call.resolve()
|
||||
}
|
||||
} catch {
|
||||
call.reject("Failed to get doc snapshot", nil, error)
|
||||
}
|
||||
}
|
||||
|
||||
@objc func setDocSnapshot(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let docId = call.getString("docId") ?? ""
|
||||
let data = call.getString("data") ?? ""
|
||||
let timestamp = Date()
|
||||
do {
|
||||
let success = try await docStoragePool.setDocSnapshot(
|
||||
universalId: id,
|
||||
snapshot: DocRecord(docId: docId, data: data, timestamp: timestamp)
|
||||
)
|
||||
call.resolve(["success": success])
|
||||
} catch {
|
||||
call.reject("Failed to set doc snapshot", nil, error)
|
||||
}
|
||||
}
|
||||
|
||||
@objc func getDocUpdates(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let docId = call.getString("docId") ?? ""
|
||||
do {
|
||||
let updates = try await docStoragePool.getDocUpdates(universalId: id, docId: docId)
|
||||
let mapped = updates.map { [
|
||||
"docId": $0.docId,
|
||||
"createdAt": $0.createdAt.timeIntervalSince1970,
|
||||
"data": $0.data,
|
||||
] }
|
||||
call.resolve(["updates": mapped])
|
||||
} catch {
|
||||
call.reject("Failed to get doc updates", nil, error)
|
||||
}
|
||||
}
|
||||
|
||||
@objc func markUpdatesMerged(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let docId = call.getString("docId") ?? ""
|
||||
let times = call.getArray("timestamps", Double.self) ?? []
|
||||
let dateArray = times.map { Date(timeIntervalSince1970: $0) }
|
||||
do {
|
||||
let count = try await docStoragePool.markUpdatesMerged(universalId: id, docId: docId, updates: dateArray)
|
||||
call.resolve(["count": count])
|
||||
} catch {
|
||||
call.reject("Failed to mark updates merged", nil, error)
|
||||
}
|
||||
}
|
||||
|
||||
@objc func deleteDoc(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let docId = call.getString("docId") ?? ""
|
||||
do {
|
||||
try await docStoragePool.deleteDoc(universalId: id, docId: docId)
|
||||
call.resolve()
|
||||
} catch {
|
||||
call.reject("Failed to delete doc", nil, error)
|
||||
}
|
||||
}
|
||||
|
||||
@objc func getDocClocks(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let after = call.getInt("after")
|
||||
do {
|
||||
let docClocks = try await docStoragePool.getDocClocks(
|
||||
universalId: id,
|
||||
after: after != nil ? Date(timeIntervalSince1970: TimeInterval(after!)) : nil
|
||||
)
|
||||
let mapped = docClocks.map { [
|
||||
"docId": $0.docId,
|
||||
"timestamp": $0.timestamp.timeIntervalSince1970,
|
||||
] }
|
||||
call.resolve(["clocks": mapped])
|
||||
} catch {
|
||||
call.reject("Failed to get doc clocks", nil, error)
|
||||
}
|
||||
}
|
||||
|
||||
@objc func getDocClock(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let docId = call.getString("docId") ?? ""
|
||||
do {
|
||||
if let docClock = try await docStoragePool.getDocClock(universalId: id, docId: docId) {
|
||||
call.resolve([
|
||||
"docId": docClock.docId,
|
||||
"timestamp": docClock.timestamp.timeIntervalSince1970,
|
||||
])
|
||||
} else {
|
||||
call.resolve()
|
||||
}
|
||||
} catch {
|
||||
call.reject("Failed to get doc clock for docId: \(docId)", nil, error)
|
||||
}
|
||||
}
|
||||
|
||||
@objc func getBlob(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let key = call.getString("key") ?? ""
|
||||
if let blob = try? await docStoragePool.getBlob(universalId: id, key: key) {
|
||||
call.resolve(["blob": blob])
|
||||
} else {
|
||||
call.resolve()
|
||||
}
|
||||
}
|
||||
|
||||
@objc func setBlob(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let key = call.getString("key") ?? ""
|
||||
let data = call.getString("data") ?? ""
|
||||
let mime = call.getString("mime") ?? ""
|
||||
try? await docStoragePool.setBlob(universalId: id, blob: SetBlob(key: key, data: data, mime: mime))
|
||||
}
|
||||
|
||||
@objc func deleteBlob(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let key = call.getString("key") ?? ""
|
||||
let permanently = call.getBool("permanently") ?? false
|
||||
try? await docStoragePool.deleteBlob(universalId: id, key: key, permanently: permanently)
|
||||
}
|
||||
|
||||
@objc func releaseBlobs(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
try? await docStoragePool.releaseBlobs(universalId: id)
|
||||
}
|
||||
|
||||
@objc func listBlobs(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
if let blobs = try? await docStoragePool.listBlobs(universalId: id) {
|
||||
let mapped = blobs.map { [
|
||||
"key": $0.key,
|
||||
"size": $0.size,
|
||||
"mime": $0.mime,
|
||||
"createdAt": $0.createdAt.timeIntervalSince1970,
|
||||
] }
|
||||
call.resolve(["blobs": mapped])
|
||||
} else {
|
||||
call.resolve()
|
||||
}
|
||||
}
|
||||
|
||||
@objc func getPeerRemoteClocks(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let peer = call.getString("peer") ?? ""
|
||||
do {
|
||||
let clocks = try await docStoragePool.getPeerRemoteClocks(universalId: id, peer: peer)
|
||||
let mapped = clocks.map { [
|
||||
"docId": $0.docId,
|
||||
"timestamp": $0.timestamp.timeIntervalSince1970,
|
||||
] }
|
||||
call.resolve(["clocks": mapped])
|
||||
|
||||
} catch {
|
||||
call.reject("Failed to get peer remote clocks", nil, error)
|
||||
}
|
||||
}
|
||||
|
||||
@objc func getPeerRemoteClock(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let peer = call.getString("peer") ?? ""
|
||||
let docId = call.getString("docId") ?? ""
|
||||
do {
|
||||
let clock = try await docStoragePool.getPeerRemoteClock(universalId: id, peer: peer, docId: docId)
|
||||
call.resolve([
|
||||
"docId": clock.docId,
|
||||
"timestamp": clock.timestamp.timeIntervalSince1970,
|
||||
])
|
||||
|
||||
} catch {
|
||||
call.reject("Failed to get peer remote clock", nil, error)
|
||||
}
|
||||
}
|
||||
|
||||
@objc func setPeerRemoteClock(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let peer = call.getString("peer") ?? ""
|
||||
let docId = call.getString("docId") ?? ""
|
||||
let timestamp = call.getDouble("timestamp") ?? 0
|
||||
do {
|
||||
try await docStoragePool.setPeerRemoteClock(
|
||||
universalId: id,
|
||||
peer: peer,
|
||||
docId: docId,
|
||||
clock: Date(timeIntervalSince1970: timestamp)
|
||||
)
|
||||
call.resolve()
|
||||
} catch {
|
||||
call.reject("Failed to set peer remote clock", nil, error)
|
||||
}
|
||||
}
|
||||
|
||||
@objc func getPeerPulledRemoteClocks(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let peer = call.getString("peer") ?? ""
|
||||
do {
|
||||
let clocks = try await docStoragePool.getPeerPulledRemoteClocks(universalId: id, peer: peer)
|
||||
let mapped = clocks.map { [
|
||||
"docId": $0.docId,
|
||||
"timestamp": $0.timestamp.timeIntervalSince1970,
|
||||
] }
|
||||
call.resolve(["clocks": mapped])
|
||||
|
||||
} catch {
|
||||
call.reject("Failed to get peer pulled remote clocks", nil, error)
|
||||
}
|
||||
}
|
||||
|
||||
@objc func getPeerPulledRemoteClock(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let peer = call.getString("peer") ?? ""
|
||||
let docId = call.getString("docId") ?? ""
|
||||
do {
|
||||
let clock = try await docStoragePool.getPeerPulledRemoteClock(universalId: id, peer: peer, docId: docId)
|
||||
call.resolve([
|
||||
"docId": clock.docId,
|
||||
"timestamp": clock.timestamp.timeIntervalSince1970,
|
||||
])
|
||||
|
||||
} catch {
|
||||
call.reject("Failed to get peer pulled remote clock", nil, error)
|
||||
}
|
||||
}
|
||||
|
||||
@objc func setPeerPulledRemoteClock(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let peer = call.getString("peer") ?? ""
|
||||
let docId = call.getString("docId") ?? ""
|
||||
let timestamp = call.getDouble("timestamp") ?? 0
|
||||
do {
|
||||
try await docStoragePool.setPeerPulledRemoteClock(
|
||||
universalId: id,
|
||||
peer: peer,
|
||||
docId: docId,
|
||||
clock: Date(timeIntervalSince1970: timestamp)
|
||||
)
|
||||
call.resolve()
|
||||
} catch {
|
||||
call.reject("Failed to set peer pulled remote clock", nil, error)
|
||||
}
|
||||
}
|
||||
|
||||
@objc func getPeerPushedClocks(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let peer = call.getString("peer") ?? ""
|
||||
do {
|
||||
let clocks = try await docStoragePool.getPeerPushedClocks(universalId: id, peer: peer)
|
||||
let mapped = clocks.map { [
|
||||
"docId": $0.docId,
|
||||
"timestamp": $0.timestamp.timeIntervalSince1970,
|
||||
] }
|
||||
call.resolve(["clocks": mapped])
|
||||
|
||||
} catch {
|
||||
call.reject("Failed to get peer pushed clocks", nil, error)
|
||||
}
|
||||
}
|
||||
|
||||
@objc func setPeerPushedClock(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
let peer = call.getString("peer") ?? ""
|
||||
let docId = call.getString("docId") ?? ""
|
||||
let timestamp = call.getDouble("timestamp") ?? 0
|
||||
do {
|
||||
try await docStoragePool.setPeerPushedClock(
|
||||
universalId: id,
|
||||
peer: peer,
|
||||
docId: docId,
|
||||
clock: Date(timeIntervalSince1970: timestamp)
|
||||
)
|
||||
call.resolve()
|
||||
} catch {
|
||||
call.reject("Failed to set peer pushed clock", nil, error)
|
||||
}
|
||||
}
|
||||
|
||||
@objc func clearClocks(_ call: CAPPluginCall) async {
|
||||
let id = call.getString("id") ?? ""
|
||||
do {
|
||||
try await docStoragePool.clearClocks(universalId: id)
|
||||
call.resolve()
|
||||
} catch {
|
||||
call.reject("Failed to clear clocks", nil, error)
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -250,6 +250,161 @@ typedef struct UniffiForeignFutureStructVoid {
|
||||
typedef void (*UniffiForeignFutureCompleteVoid)(uint64_t, UniffiForeignFutureStructVoid
|
||||
);
|
||||
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_CLONE_DOCSTORAGEPOOL
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_CLONE_DOCSTORAGEPOOL
|
||||
void*_Nonnull uniffi_affine_mobile_native_fn_clone_docstoragepool(void*_Nonnull ptr, RustCallStatus *_Nonnull out_status
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_FREE_DOCSTORAGEPOOL
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_FREE_DOCSTORAGEPOOL
|
||||
void uniffi_affine_mobile_native_fn_free_docstoragepool(void*_Nonnull ptr, RustCallStatus *_Nonnull out_status
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_CHECKPOINT
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_CHECKPOINT
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_checkpoint(void*_Nonnull ptr, RustBuffer universal_id
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_CLEAR_CLOCKS
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_CLEAR_CLOCKS
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_clear_clocks(void*_Nonnull ptr, RustBuffer universal_id
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_CLOSE
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_CLOSE
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_close(void*_Nonnull ptr, RustBuffer universal_id
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_CONNECT
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_CONNECT
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_connect(void*_Nonnull ptr, RustBuffer universal_id
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_DELETE_BLOB
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_DELETE_BLOB
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_delete_blob(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer key, int8_t permanently
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_DELETE_DOC
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_DELETE_DOC
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_delete_doc(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer doc_id
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_BLOB
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_BLOB
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_get_blob(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer key
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_DOC_CLOCK
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_DOC_CLOCK
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_get_doc_clock(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer doc_id
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_DOC_CLOCKS
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_DOC_CLOCKS
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_get_doc_clocks(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer after
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_DOC_SNAPSHOT
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_DOC_SNAPSHOT
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_get_doc_snapshot(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer doc_id
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_DOC_UPDATES
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_DOC_UPDATES
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_get_doc_updates(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer doc_id
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_PEER_PULLED_REMOTE_CLOCK
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_PEER_PULLED_REMOTE_CLOCK
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_get_peer_pulled_remote_clock(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer peer, RustBuffer doc_id
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_PEER_PULLED_REMOTE_CLOCKS
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_PEER_PULLED_REMOTE_CLOCKS
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_get_peer_pulled_remote_clocks(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer peer
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_PEER_PUSHED_CLOCKS
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_PEER_PUSHED_CLOCKS
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_get_peer_pushed_clocks(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer peer
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_PEER_REMOTE_CLOCK
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_PEER_REMOTE_CLOCK
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_get_peer_remote_clock(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer peer, RustBuffer doc_id
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_PEER_REMOTE_CLOCKS
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_PEER_REMOTE_CLOCKS
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_get_peer_remote_clocks(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer peer
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_IS_CLOSED
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_IS_CLOSED
|
||||
int8_t uniffi_affine_mobile_native_fn_method_docstoragepool_is_closed(void*_Nonnull ptr, RustBuffer universal_id, RustCallStatus *_Nonnull out_status
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_LIST_BLOBS
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_LIST_BLOBS
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_list_blobs(void*_Nonnull ptr, RustBuffer universal_id
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_MARK_UPDATES_MERGED
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_MARK_UPDATES_MERGED
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_mark_updates_merged(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer doc_id, RustBuffer updates
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_PUSH_UPDATE
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_PUSH_UPDATE
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_push_update(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer doc_id, RustBuffer update
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_RELEASE_BLOBS
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_RELEASE_BLOBS
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_release_blobs(void*_Nonnull ptr, RustBuffer universal_id
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_SET_BLOB
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_SET_BLOB
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_set_blob(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer blob
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_SET_DOC_SNAPSHOT
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_SET_DOC_SNAPSHOT
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_set_doc_snapshot(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer snapshot
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_SET_PEER_PULLED_REMOTE_CLOCK
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_SET_PEER_PULLED_REMOTE_CLOCK
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_set_peer_pulled_remote_clock(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer peer, RustBuffer doc_id, RustBuffer clock
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_SET_PEER_PUSHED_CLOCK
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_SET_PEER_PUSHED_CLOCK
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_set_peer_pushed_clock(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer peer, RustBuffer doc_id, RustBuffer clock
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_SET_PEER_REMOTE_CLOCK
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_SET_PEER_REMOTE_CLOCK
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_set_peer_remote_clock(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer peer, RustBuffer doc_id, RustBuffer clock
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_SET_SPACE_ID
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_SET_SPACE_ID
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_set_space_id(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer space_id
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_VALIDATE
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_VALIDATE
|
||||
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_validate(void*_Nonnull ptr, RustBuffer universal_id
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_FUNC_GET_DB_PATH
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_FUNC_GET_DB_PATH
|
||||
RustBuffer uniffi_affine_mobile_native_fn_func_get_db_path(RustBuffer peer, RustBuffer space_type, RustBuffer id, RustCallStatus *_Nonnull out_status
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_FUNC_HASHCASH_MINT
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_FUNC_HASHCASH_MINT
|
||||
@@ -534,12 +689,186 @@ void ffi_affine_mobile_native_rust_future_free_void(uint64_t handle
|
||||
#ifndef UNIFFI_FFIDEF_FFI_AFFINE_MOBILE_NATIVE_RUST_FUTURE_COMPLETE_VOID
|
||||
#define UNIFFI_FFIDEF_FFI_AFFINE_MOBILE_NATIVE_RUST_FUTURE_COMPLETE_VOID
|
||||
void ffi_affine_mobile_native_rust_future_complete_void(uint64_t handle, RustCallStatus *_Nonnull out_status
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_FUNC_GET_DB_PATH
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_FUNC_GET_DB_PATH
|
||||
uint16_t uniffi_affine_mobile_native_checksum_func_get_db_path(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_FUNC_HASHCASH_MINT
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_FUNC_HASHCASH_MINT
|
||||
uint16_t uniffi_affine_mobile_native_checksum_func_hashcash_mint(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_CHECKPOINT
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_CHECKPOINT
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_checkpoint(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_CLEAR_CLOCKS
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_CLEAR_CLOCKS
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_clear_clocks(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_CLOSE
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_CLOSE
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_close(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_CONNECT
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_CONNECT
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_connect(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_DELETE_BLOB
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_DELETE_BLOB
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_delete_blob(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_DELETE_DOC
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_DELETE_DOC
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_delete_doc(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_BLOB
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_BLOB
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_get_blob(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_DOC_CLOCK
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_DOC_CLOCK
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_get_doc_clock(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_DOC_CLOCKS
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_DOC_CLOCKS
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_get_doc_clocks(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_DOC_SNAPSHOT
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_DOC_SNAPSHOT
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_get_doc_snapshot(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_DOC_UPDATES
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_DOC_UPDATES
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_get_doc_updates(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_PEER_PULLED_REMOTE_CLOCK
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_PEER_PULLED_REMOTE_CLOCK
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_get_peer_pulled_remote_clock(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_PEER_PULLED_REMOTE_CLOCKS
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_PEER_PULLED_REMOTE_CLOCKS
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_get_peer_pulled_remote_clocks(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_PEER_PUSHED_CLOCKS
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_PEER_PUSHED_CLOCKS
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_get_peer_pushed_clocks(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_PEER_REMOTE_CLOCK
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_PEER_REMOTE_CLOCK
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_get_peer_remote_clock(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_PEER_REMOTE_CLOCKS
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_PEER_REMOTE_CLOCKS
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_get_peer_remote_clocks(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_IS_CLOSED
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_IS_CLOSED
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_is_closed(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_LIST_BLOBS
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_LIST_BLOBS
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_list_blobs(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_MARK_UPDATES_MERGED
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_MARK_UPDATES_MERGED
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_mark_updates_merged(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_PUSH_UPDATE
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_PUSH_UPDATE
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_push_update(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_RELEASE_BLOBS
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_RELEASE_BLOBS
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_release_blobs(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_SET_BLOB
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_SET_BLOB
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_set_blob(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_SET_DOC_SNAPSHOT
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_SET_DOC_SNAPSHOT
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_set_doc_snapshot(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_SET_PEER_PULLED_REMOTE_CLOCK
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_SET_PEER_PULLED_REMOTE_CLOCK
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_set_peer_pulled_remote_clock(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_SET_PEER_PUSHED_CLOCK
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_SET_PEER_PUSHED_CLOCK
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_set_peer_pushed_clock(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_SET_PEER_REMOTE_CLOCK
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_SET_PEER_REMOTE_CLOCK
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_set_peer_remote_clock(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_SET_SPACE_ID
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_SET_SPACE_ID
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_set_space_id(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_VALIDATE
|
||||
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_VALIDATE
|
||||
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_validate(void
|
||||
|
||||
);
|
||||
#endif
|
||||
#ifndef UNIFFI_FFIDEF_FFI_AFFINE_MOBILE_NATIVE_UNIFFI_CONTRACT_VERSION
|
||||
|
||||
@@ -71,4 +71,4 @@ for arch in $ARCHS; do
|
||||
esac
|
||||
done
|
||||
|
||||
$HOME/.cargo/bin/cargo run --bin uniffi-bindgen generate --library $SRCROOT/lib${FFI_TARGET}.a --language swift --out-dir $SRCROOT/../../ios/App/App/uniffi
|
||||
$HOME/.cargo/bin/cargo run -p affine_mobile_native --bin uniffi-bindgen generate --library $SRCROOT/lib${FFI_TARGET}.a --language swift --out-dir $SRCROOT/../../ios/App/App/uniffi
|
||||
|
||||
@@ -27,9 +27,11 @@
|
||||
"next-themes": "^0.4.4",
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0",
|
||||
"react-router-dom": "^6.28.0"
|
||||
"react-router-dom": "^6.28.0",
|
||||
"yjs": "13.6.18"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@affine/native": "workspace:*",
|
||||
"@capacitor/cli": "^6.2.0",
|
||||
"@types/react": "^19.0.1",
|
||||
"@types/react-dom": "^19.0.2",
|
||||
|
||||
33
packages/frontend/apps/ios/src/plugins/nbstore/blob.ts
Normal file
33
packages/frontend/apps/ios/src/plugins/nbstore/blob.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { type BlobRecord, BlobStorageBase, share } from '@affine/nbstore';
|
||||
|
||||
import { NativeDBConnection } from './db';
|
||||
|
||||
export class SqliteBlobStorage extends BlobStorageBase {
|
||||
override connection = share(
|
||||
new NativeDBConnection(this.peer, this.spaceType, this.spaceId)
|
||||
);
|
||||
|
||||
get db() {
|
||||
return this.connection.inner;
|
||||
}
|
||||
|
||||
override async get(key: string) {
|
||||
return this.db.getBlob(key);
|
||||
}
|
||||
|
||||
override async set(blob: BlobRecord) {
|
||||
await this.db.setBlob(blob);
|
||||
}
|
||||
|
||||
override async delete(key: string, permanently: boolean) {
|
||||
await this.db.deleteBlob(key, permanently);
|
||||
}
|
||||
|
||||
override async release() {
|
||||
await this.db.releaseBlobs();
|
||||
}
|
||||
|
||||
override async list() {
|
||||
return this.db.listBlobs();
|
||||
}
|
||||
}
|
||||
60
packages/frontend/apps/ios/src/plugins/nbstore/db.ts
Normal file
60
packages/frontend/apps/ios/src/plugins/nbstore/db.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import type { DocStorage } from '@affine/native';
|
||||
import {
|
||||
AutoReconnectConnection,
|
||||
isValidSpaceType,
|
||||
type SpaceType,
|
||||
universalId,
|
||||
} from '@affine/nbstore';
|
||||
|
||||
import { NativeDocStorage, NbStoreDocStorage } from './plugin';
|
||||
|
||||
export class NativeDBConnection extends AutoReconnectConnection<DocStorage> {
|
||||
private readonly universalId: string;
|
||||
|
||||
constructor(
|
||||
private readonly peer: string,
|
||||
private readonly type: SpaceType,
|
||||
private readonly id: string
|
||||
) {
|
||||
super();
|
||||
if (!isValidSpaceType(type)) {
|
||||
throw new TypeError(`Invalid space type: ${type}`);
|
||||
}
|
||||
this.universalId = universalId({
|
||||
peer: peer,
|
||||
type: type,
|
||||
id: id,
|
||||
});
|
||||
}
|
||||
|
||||
async getDBPath() {
|
||||
const { path } = await NbStoreDocStorage.getSpaceDBPath({
|
||||
peer: this.peer,
|
||||
spaceType: this.type,
|
||||
id: this.id,
|
||||
});
|
||||
return path;
|
||||
}
|
||||
|
||||
override get shareId(): string {
|
||||
return `sqlite:${this.peer}:${this.type}:${this.id}`;
|
||||
}
|
||||
|
||||
override async doConnect() {
|
||||
const conn = new NativeDocStorage(this.universalId);
|
||||
await conn.connect();
|
||||
console.info('[nbstore] connection established', this.shareId);
|
||||
return conn;
|
||||
}
|
||||
|
||||
override doDisconnect(conn: NativeDocStorage) {
|
||||
conn
|
||||
.close()
|
||||
.then(() => {
|
||||
console.info('[nbstore] connection closed', this.shareId);
|
||||
})
|
||||
.catch(err => {
|
||||
console.error('[nbstore] connection close failed', this.shareId, err);
|
||||
});
|
||||
}
|
||||
}
|
||||
144
packages/frontend/apps/ios/src/plugins/nbstore/definitions.ts
Normal file
144
packages/frontend/apps/ios/src/plugins/nbstore/definitions.ts
Normal file
@@ -0,0 +1,144 @@
|
||||
export interface Blob {
|
||||
key: string;
|
||||
// base64 encoded data
|
||||
data: string;
|
||||
mime: string;
|
||||
size: number;
|
||||
createdAt: number;
|
||||
}
|
||||
|
||||
export interface SetBlob {
|
||||
key: string;
|
||||
// base64 encoded data
|
||||
data: string;
|
||||
mime: string;
|
||||
}
|
||||
|
||||
export interface ListedBlob {
|
||||
key: string;
|
||||
mime: string;
|
||||
size: number;
|
||||
createdAt: number;
|
||||
}
|
||||
|
||||
export interface DocClock {
|
||||
docId: string;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
export interface NbStorePlugin {
|
||||
getSpaceDBPath: (options: {
|
||||
peer: string;
|
||||
spaceType: string;
|
||||
id: string;
|
||||
}) => Promise<{ path: string }>;
|
||||
create: (options: { id: string; path: string }) => Promise<void>;
|
||||
connect: (options: { id: string }) => Promise<void>;
|
||||
close: (options: { id: string }) => Promise<void>;
|
||||
isClosed: (options: { id: string }) => Promise<{ isClosed: boolean }>;
|
||||
checkpoint: (options: { id: string }) => Promise<void>;
|
||||
validate: (options: { id: string }) => Promise<{ isValidate: boolean }>;
|
||||
|
||||
setSpaceId: (options: { id: string; spaceId: string }) => Promise<void>;
|
||||
pushUpdate: (options: {
|
||||
id: string;
|
||||
docId: string;
|
||||
data: string;
|
||||
}) => Promise<{ timestamp: number }>;
|
||||
getDocSnapshot: (options: { id: string; docId: string }) => Promise<
|
||||
| {
|
||||
docId: string;
|
||||
// base64 encoded data
|
||||
data: string;
|
||||
timestamp: number;
|
||||
}
|
||||
| undefined
|
||||
>;
|
||||
setDocSnapshot: (options: {
|
||||
id: string;
|
||||
docId: string;
|
||||
data: string;
|
||||
}) => Promise<{ success: boolean }>;
|
||||
getDocUpdates: (options: { id: string; docId: string }) => Promise<
|
||||
{
|
||||
docId: string;
|
||||
createdAt: number;
|
||||
// base64 encoded data
|
||||
data: string;
|
||||
}[]
|
||||
>;
|
||||
markUpdatesMerged: (options: {
|
||||
id: string;
|
||||
docId: string;
|
||||
timestamps: number[];
|
||||
}) => Promise<{ count: number }>;
|
||||
deleteDoc: (options: { id: string; docId: string }) => Promise<void>;
|
||||
getDocClocks: (options: { id: string; after: number }) => Promise<
|
||||
{
|
||||
docId: string;
|
||||
timestamp: number;
|
||||
}[]
|
||||
>;
|
||||
getDocClock: (options: { id: string; docId: string }) => Promise<
|
||||
| {
|
||||
docId: string;
|
||||
timestamp: number;
|
||||
}
|
||||
| undefined
|
||||
>;
|
||||
getBlob: (options: { id: string; key: string }) => Promise<Blob | null>;
|
||||
setBlob: (options: { id: string } & SetBlob) => Promise<void>;
|
||||
deleteBlob: (options: {
|
||||
id: string;
|
||||
key: string;
|
||||
permanently: boolean;
|
||||
}) => Promise<void>;
|
||||
releaseBlobs: (options: { id: string }) => Promise<void>;
|
||||
listBlobs: (options: { id: string }) => Promise<Array<ListedBlob>>;
|
||||
getPeerRemoteClocks: (options: {
|
||||
id: string;
|
||||
peer: string;
|
||||
}) => Promise<Array<DocClock>>;
|
||||
getPeerRemoteClock: (options: {
|
||||
id: string;
|
||||
peer: string;
|
||||
docId: string;
|
||||
}) => Promise<DocClock>;
|
||||
setPeerRemoteClock: (options: {
|
||||
id: string;
|
||||
peer: string;
|
||||
docId: string;
|
||||
clock: number;
|
||||
}) => Promise<void>;
|
||||
getPeerPushedClocks: (options: {
|
||||
id: string;
|
||||
peer: string;
|
||||
}) => Promise<Array<DocClock>>;
|
||||
getPeerPushedClock: (options: {
|
||||
id: string;
|
||||
peer: string;
|
||||
docId: string;
|
||||
}) => Promise<DocClock>;
|
||||
setPeerPushedClock: (options: {
|
||||
id: string;
|
||||
peer: string;
|
||||
docId: string;
|
||||
clock: number;
|
||||
}) => Promise<void>;
|
||||
getPeerPulledRemoteClocks: (options: {
|
||||
id: string;
|
||||
peer: string;
|
||||
}) => Promise<Array<DocClock>>;
|
||||
getPeerPulledRemoteClock: (options: {
|
||||
id: string;
|
||||
peer: string;
|
||||
docId: string;
|
||||
}) => Promise<DocClock>;
|
||||
setPeerPulledRemoteClock: (options: {
|
||||
id: string;
|
||||
peer: string;
|
||||
docId: string;
|
||||
clock: number;
|
||||
}) => Promise<void>;
|
||||
clearClocks: (options: { id: string }) => Promise<void>;
|
||||
}
|
||||
83
packages/frontend/apps/ios/src/plugins/nbstore/doc.ts
Normal file
83
packages/frontend/apps/ios/src/plugins/nbstore/doc.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import {
|
||||
type DocClocks,
|
||||
type DocRecord,
|
||||
DocStorageBase,
|
||||
type DocUpdate,
|
||||
share,
|
||||
} from '@affine/nbstore';
|
||||
|
||||
import { NativeDBConnection } from './db';
|
||||
|
||||
export class SqliteDocStorage extends DocStorageBase {
|
||||
override connection = share(
|
||||
new NativeDBConnection(this.peer, this.spaceType, this.spaceId)
|
||||
);
|
||||
|
||||
get db() {
|
||||
return this.connection.inner;
|
||||
}
|
||||
|
||||
override async pushDocUpdate(update: DocUpdate) {
|
||||
const timestamp = await this.db.pushUpdate(update.docId, update.bin);
|
||||
|
||||
return { docId: update.docId, timestamp };
|
||||
}
|
||||
|
||||
override async deleteDoc(docId: string) {
|
||||
await this.db.deleteDoc(docId);
|
||||
}
|
||||
|
||||
override async getDocTimestamps(after?: Date) {
|
||||
const clocks = await this.db.getDocClocks(after);
|
||||
|
||||
return clocks.reduce((ret, cur) => {
|
||||
ret[cur.docId] = cur.timestamp;
|
||||
return ret;
|
||||
}, {} as DocClocks);
|
||||
}
|
||||
|
||||
override async getDocTimestamp(docId: string) {
|
||||
return this.db.getDocClock(docId);
|
||||
}
|
||||
|
||||
protected override async getDocSnapshot(docId: string) {
|
||||
const snapshot = await this.db.getDocSnapshot(docId);
|
||||
|
||||
if (!snapshot) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
docId,
|
||||
bin: snapshot.data,
|
||||
timestamp: snapshot.timestamp,
|
||||
};
|
||||
}
|
||||
|
||||
protected override async setDocSnapshot(
|
||||
snapshot: DocRecord
|
||||
): Promise<boolean> {
|
||||
return this.db.setDocSnapshot({
|
||||
docId: snapshot.docId,
|
||||
data: Buffer.from(snapshot.bin),
|
||||
timestamp: new Date(snapshot.timestamp),
|
||||
});
|
||||
}
|
||||
|
||||
protected override async getDocUpdates(docId: string) {
|
||||
return this.db.getDocUpdates(docId).then(updates =>
|
||||
updates.map(update => ({
|
||||
docId,
|
||||
bin: update.data,
|
||||
timestamp: update.createdAt,
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
protected override markUpdatesMerged(docId: string, updates: DocRecord[]) {
|
||||
return this.db.markUpdatesMerged(
|
||||
docId,
|
||||
updates.map(update => update.timestamp)
|
||||
);
|
||||
}
|
||||
}
|
||||
128
packages/frontend/apps/ios/src/plugins/nbstore/handlers.ts
Normal file
128
packages/frontend/apps/ios/src/plugins/nbstore/handlers.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
import {
|
||||
type BlobRecord,
|
||||
type DocClock,
|
||||
type DocUpdate,
|
||||
} from '@affine/nbstore';
|
||||
|
||||
import { ensureStorage, getStorage } from './storage';
|
||||
|
||||
export const nbstoreHandlers = {
|
||||
connect: async (id: string) => {
|
||||
await ensureStorage(id);
|
||||
},
|
||||
|
||||
close: async (id: string) => {
|
||||
const store = getStorage(id);
|
||||
|
||||
if (store) {
|
||||
store.disconnect();
|
||||
// The store may be shared with other tabs, so we don't delete it from cache
|
||||
// the underlying connection will handle the close correctly
|
||||
// STORE_CACHE.delete(`${spaceType}:${spaceId}`);
|
||||
}
|
||||
},
|
||||
|
||||
pushDocUpdate: async (id: string, update: DocUpdate) => {
|
||||
const store = await ensureStorage(id);
|
||||
return store.get('doc').pushDocUpdate(update);
|
||||
},
|
||||
|
||||
getDoc: async (id: string, docId: string) => {
|
||||
const store = await ensureStorage(id);
|
||||
return store.get('doc').getDoc(docId);
|
||||
},
|
||||
|
||||
deleteDoc: async (id: string, docId: string) => {
|
||||
const store = await ensureStorage(id);
|
||||
return store.get('doc').deleteDoc(docId);
|
||||
},
|
||||
|
||||
getDocTimestamps: async (id: string, after?: Date) => {
|
||||
const store = await ensureStorage(id);
|
||||
return store.get('doc').getDocTimestamps(after);
|
||||
},
|
||||
|
||||
getDocTimestamp: async (id: string, docId: string) => {
|
||||
const store = await ensureStorage(id);
|
||||
return store.get('doc').getDocTimestamp(docId);
|
||||
},
|
||||
|
||||
setBlob: async (id: string, blob: BlobRecord) => {
|
||||
const store = await ensureStorage(id);
|
||||
return store.get('blob').set(blob);
|
||||
},
|
||||
|
||||
getBlob: async (id: string, key: string) => {
|
||||
const store = await ensureStorage(id);
|
||||
return store.get('blob').get(key);
|
||||
},
|
||||
|
||||
deleteBlob: async (id: string, key: string, permanently: boolean) => {
|
||||
const store = await ensureStorage(id);
|
||||
return store.get('blob').delete(key, permanently);
|
||||
},
|
||||
|
||||
listBlobs: async (id: string) => {
|
||||
const store = await ensureStorage(id);
|
||||
return store.get('blob').list();
|
||||
},
|
||||
|
||||
releaseBlobs: async (id: string) => {
|
||||
const store = await ensureStorage(id);
|
||||
return store.get('blob').release();
|
||||
},
|
||||
|
||||
getPeerRemoteClocks: async (id: string, peer: string) => {
|
||||
const store = await ensureStorage(id);
|
||||
return store.get('sync').getPeerRemoteClocks(peer);
|
||||
},
|
||||
|
||||
getPeerRemoteClock: async (id: string, peer: string, docId: string) => {
|
||||
const store = await ensureStorage(id);
|
||||
return store.get('sync').getPeerRemoteClock(peer, docId);
|
||||
},
|
||||
|
||||
setPeerRemoteClock: async (id: string, peer: string, clock: DocClock) => {
|
||||
const store = await ensureStorage(id);
|
||||
return store.get('sync').setPeerRemoteClock(peer, clock);
|
||||
},
|
||||
|
||||
getPeerPulledRemoteClocks: async (id: string, peer: string) => {
|
||||
const store = await ensureStorage(id);
|
||||
return store.get('sync').getPeerPulledRemoteClocks(peer);
|
||||
},
|
||||
|
||||
getPeerPulledRemoteClock: async (id: string, peer: string, docId: string) => {
|
||||
const store = await ensureStorage(id);
|
||||
return store.get('sync').getPeerPulledRemoteClock(peer, docId);
|
||||
},
|
||||
|
||||
setPeerPulledRemoteClock: async (
|
||||
id: string,
|
||||
peer: string,
|
||||
clock: DocClock
|
||||
) => {
|
||||
const store = await ensureStorage(id);
|
||||
return store.get('sync').setPeerPulledRemoteClock(peer, clock);
|
||||
},
|
||||
|
||||
getPeerPushedClocks: async (id: string, peer: string) => {
|
||||
const store = await ensureStorage(id);
|
||||
return store.get('sync').getPeerPushedClocks(peer);
|
||||
},
|
||||
|
||||
getPeerPushedClock: async (id: string, peer: string, docId: string) => {
|
||||
const store = await ensureStorage(id);
|
||||
return store.get('sync').getPeerPushedClock(peer, docId);
|
||||
},
|
||||
|
||||
setPeerPushedClock: async (id: string, peer: string, clock: DocClock) => {
|
||||
const store = await ensureStorage(id);
|
||||
return store.get('sync').setPeerPushedClock(peer, clock);
|
||||
},
|
||||
|
||||
clearClocks: async (id: string) => {
|
||||
const store = await ensureStorage(id);
|
||||
return store.get('sync').clearClocks();
|
||||
},
|
||||
};
|
||||
5
packages/frontend/apps/ios/src/plugins/nbstore/index.ts
Normal file
5
packages/frontend/apps/ios/src/plugins/nbstore/index.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export * from './definitions';
|
||||
export { nbstoreHandlers } from './handlers';
|
||||
export { NbStoreDocStorage } from './plugin';
|
||||
export * from './storage';
|
||||
export { universalId } from '@affine/nbstore';
|
||||
312
packages/frontend/apps/ios/src/plugins/nbstore/plugin.ts
Normal file
312
packages/frontend/apps/ios/src/plugins/nbstore/plugin.ts
Normal file
@@ -0,0 +1,312 @@
|
||||
import {
|
||||
base64ToUint8Array,
|
||||
uint8ArrayToBase64,
|
||||
} from '@affine/core/modules/workspace-engine';
|
||||
import {
|
||||
type Blob,
|
||||
type DocClock,
|
||||
type DocRecord,
|
||||
type DocStorage,
|
||||
type DocUpdate,
|
||||
type ListedBlob,
|
||||
} from '@affine/native';
|
||||
import { registerPlugin } from '@capacitor/core';
|
||||
|
||||
import type { NbStorePlugin } from './definitions';
|
||||
|
||||
export const NbStoreDocStorage =
|
||||
registerPlugin<NbStorePlugin>('NbStoreDocStorage');
|
||||
|
||||
export interface SetBlob {
|
||||
key: string;
|
||||
data: Uint8Array;
|
||||
mime: string;
|
||||
}
|
||||
|
||||
export class NativeDocStorage implements DocStorage {
|
||||
constructor(private readonly universalId: string) {}
|
||||
|
||||
/** Initialize the database and run migrations. */
|
||||
connect(): Promise<void> {
|
||||
return NbStoreDocStorage.connect({
|
||||
id: this.universalId,
|
||||
});
|
||||
}
|
||||
|
||||
close(): Promise<void> {
|
||||
return NbStoreDocStorage.close({
|
||||
id: this.universalId,
|
||||
});
|
||||
}
|
||||
|
||||
get isClosed(): Promise<boolean> {
|
||||
return NbStoreDocStorage.isClosed({
|
||||
id: this.universalId,
|
||||
}).then(result => result.isClosed);
|
||||
}
|
||||
/**
|
||||
* Flush the WAL file to the database file.
|
||||
* See https://www.sqlite.org/pragma.html#pragma_wal_checkpoint:~:text=PRAGMA%20schema.wal_checkpoint%3B
|
||||
*/
|
||||
checkpoint(): Promise<void> {
|
||||
return NbStoreDocStorage.checkpoint({
|
||||
id: this.universalId,
|
||||
});
|
||||
}
|
||||
|
||||
validate(): Promise<boolean> {
|
||||
return NbStoreDocStorage.validate({
|
||||
id: this.universalId,
|
||||
}).then(result => result.isValidate);
|
||||
}
|
||||
|
||||
setSpaceId(spaceId: string): Promise<void> {
|
||||
return NbStoreDocStorage.setSpaceId({
|
||||
id: this.universalId,
|
||||
spaceId,
|
||||
});
|
||||
}
|
||||
|
||||
async pushUpdate(docId: string, update: Uint8Array): Promise<Date> {
|
||||
return NbStoreDocStorage.pushUpdate({
|
||||
id: this.universalId,
|
||||
docId,
|
||||
data: await uint8ArrayToBase64(update),
|
||||
}).then(result => new Date(result.timestamp));
|
||||
}
|
||||
|
||||
getDocSnapshot(docId: string): Promise<DocRecord | null> {
|
||||
return NbStoreDocStorage.getDocSnapshot({
|
||||
id: this.universalId,
|
||||
docId,
|
||||
}).then(result => {
|
||||
if (result) {
|
||||
return {
|
||||
...result,
|
||||
data: base64ToUint8Array(result.data),
|
||||
timestamp: new Date(result.timestamp),
|
||||
};
|
||||
}
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
async setDocSnapshot(snapshot: DocRecord): Promise<boolean> {
|
||||
return NbStoreDocStorage.setDocSnapshot({
|
||||
id: this.universalId,
|
||||
docId: snapshot.docId,
|
||||
data: await uint8ArrayToBase64(snapshot.data),
|
||||
}).then(result => result.success);
|
||||
}
|
||||
|
||||
getDocUpdates(docId: string): Promise<Array<DocUpdate>> {
|
||||
return NbStoreDocStorage.getDocUpdates({
|
||||
id: this.universalId,
|
||||
docId,
|
||||
}).then(result =>
|
||||
result.map(update => ({
|
||||
...update,
|
||||
data: base64ToUint8Array(update.data),
|
||||
createdAt: new Date(update.createdAt),
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
markUpdatesMerged(docId: string, updates: Array<Date>): Promise<number> {
|
||||
return NbStoreDocStorage.markUpdatesMerged({
|
||||
id: this.universalId,
|
||||
docId,
|
||||
timestamps: updates.map(date => date.getTime()),
|
||||
}).then(result => result.count);
|
||||
}
|
||||
|
||||
deleteDoc(docId: string): Promise<void> {
|
||||
return NbStoreDocStorage.deleteDoc({
|
||||
id: this.universalId,
|
||||
docId,
|
||||
});
|
||||
}
|
||||
|
||||
getDocClocks(after: Date): Promise<Array<DocClock>> {
|
||||
return NbStoreDocStorage.getDocClocks({
|
||||
id: this.universalId,
|
||||
after: after.getTime(),
|
||||
}).then(result =>
|
||||
result.map(clock => ({
|
||||
...clock,
|
||||
timestamp: new Date(clock.timestamp),
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
getDocClock(docId: string): Promise<DocClock | null> {
|
||||
return NbStoreDocStorage.getDocClock({
|
||||
id: this.universalId,
|
||||
docId,
|
||||
}).then(result => {
|
||||
if (result) {
|
||||
return {
|
||||
...result,
|
||||
timestamp: new Date(result.timestamp),
|
||||
};
|
||||
}
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
getBlob(key: string): Promise<Blob | null> {
|
||||
return NbStoreDocStorage.getBlob({
|
||||
id: this.universalId,
|
||||
key,
|
||||
}).then(result => {
|
||||
if (result) {
|
||||
return {
|
||||
...result,
|
||||
data: base64ToUint8Array(result.data),
|
||||
createdAt: new Date(result.createdAt),
|
||||
};
|
||||
}
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
async setBlob(blob: SetBlob): Promise<void> {
|
||||
return NbStoreDocStorage.setBlob({
|
||||
id: this.universalId,
|
||||
key: blob.key,
|
||||
data: await uint8ArrayToBase64(blob.data),
|
||||
mime: blob.mime,
|
||||
});
|
||||
}
|
||||
|
||||
deleteBlob(key: string, permanently: boolean): Promise<void> {
|
||||
return NbStoreDocStorage.deleteBlob({
|
||||
id: this.universalId,
|
||||
key,
|
||||
permanently,
|
||||
});
|
||||
}
|
||||
|
||||
releaseBlobs(): Promise<void> {
|
||||
return NbStoreDocStorage.releaseBlobs({
|
||||
id: this.universalId,
|
||||
});
|
||||
}
|
||||
|
||||
async listBlobs(): Promise<Array<ListedBlob>> {
|
||||
return (
|
||||
await NbStoreDocStorage.listBlobs({
|
||||
id: this.universalId,
|
||||
})
|
||||
).map(blob => ({
|
||||
...blob,
|
||||
createdAt: new Date(blob.createdAt),
|
||||
}));
|
||||
}
|
||||
|
||||
getPeerRemoteClocks(peer: string): Promise<Array<DocClock>> {
|
||||
return NbStoreDocStorage.getPeerRemoteClocks({
|
||||
id: this.universalId,
|
||||
peer,
|
||||
}).then(result =>
|
||||
result.map(clock => ({
|
||||
...clock,
|
||||
timestamp: new Date(clock.timestamp),
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
getPeerRemoteClock(peer: string, docId: string): Promise<DocClock> {
|
||||
return NbStoreDocStorage.getPeerRemoteClock({
|
||||
id: this.universalId,
|
||||
peer,
|
||||
docId,
|
||||
}).then(result => ({
|
||||
...result,
|
||||
timestamp: new Date(result.timestamp),
|
||||
}));
|
||||
}
|
||||
|
||||
setPeerRemoteClock(peer: string, docId: string, clock: Date): Promise<void> {
|
||||
return NbStoreDocStorage.setPeerRemoteClock({
|
||||
id: this.universalId,
|
||||
peer,
|
||||
docId,
|
||||
clock: clock.getTime(),
|
||||
});
|
||||
}
|
||||
|
||||
getPeerPulledRemoteClocks(peer: string): Promise<Array<DocClock>> {
|
||||
return NbStoreDocStorage.getPeerPulledRemoteClocks({
|
||||
id: this.universalId,
|
||||
peer,
|
||||
}).then(result =>
|
||||
result.map(clock => ({
|
||||
...clock,
|
||||
timestamp: new Date(clock.timestamp),
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
getPeerPulledRemoteClock(peer: string, docId: string): Promise<DocClock> {
|
||||
return NbStoreDocStorage.getPeerPulledRemoteClock({
|
||||
id: this.universalId,
|
||||
peer,
|
||||
docId,
|
||||
}).then(result => ({
|
||||
...result,
|
||||
timestamp: new Date(result.timestamp),
|
||||
}));
|
||||
}
|
||||
|
||||
setPeerPulledRemoteClock(
|
||||
peer: string,
|
||||
docId: string,
|
||||
clock: Date
|
||||
): Promise<void> {
|
||||
return NbStoreDocStorage.setPeerPulledRemoteClock({
|
||||
id: this.universalId,
|
||||
peer,
|
||||
docId,
|
||||
clock: clock.getTime(),
|
||||
});
|
||||
}
|
||||
|
||||
getPeerPushedClocks(peer: string): Promise<Array<DocClock>> {
|
||||
return NbStoreDocStorage.getPeerPushedClocks({
|
||||
id: this.universalId,
|
||||
peer,
|
||||
}).then(result =>
|
||||
result.map(clock => ({
|
||||
...clock,
|
||||
timestamp: new Date(clock.timestamp),
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
getPeerPushedClock(peer: string, docId: string): Promise<DocClock> {
|
||||
return NbStoreDocStorage.getPeerPushedClock({
|
||||
id: this.universalId,
|
||||
peer,
|
||||
docId,
|
||||
}).then(result => ({
|
||||
...result,
|
||||
timestamp: new Date(result.timestamp),
|
||||
}));
|
||||
}
|
||||
|
||||
setPeerPushedClock(peer: string, docId: string, clock: Date): Promise<void> {
|
||||
return NbStoreDocStorage.setPeerPushedClock({
|
||||
id: this.universalId,
|
||||
peer,
|
||||
docId,
|
||||
clock: clock.getTime(),
|
||||
});
|
||||
}
|
||||
|
||||
clearClocks(): Promise<void> {
|
||||
return NbStoreDocStorage.clearClocks({
|
||||
id: this.universalId,
|
||||
});
|
||||
}
|
||||
}
|
||||
83
packages/frontend/apps/ios/src/plugins/nbstore/storage.ts
Normal file
83
packages/frontend/apps/ios/src/plugins/nbstore/storage.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import { parseUniversalId, SpaceStorage } from '@affine/nbstore';
|
||||
import { applyUpdate, Doc as YDoc } from 'yjs';
|
||||
|
||||
import { SqliteBlobStorage } from './blob';
|
||||
import { NativeDBConnection } from './db';
|
||||
import { SqliteDocStorage } from './doc';
|
||||
import { SqliteSyncStorage } from './sync';
|
||||
|
||||
export class SqliteSpaceStorage extends SpaceStorage {
|
||||
get connection() {
|
||||
const docStore = this.get('doc');
|
||||
|
||||
if (!docStore) {
|
||||
throw new Error('doc store not found');
|
||||
}
|
||||
|
||||
const connection = docStore.connection;
|
||||
|
||||
if (!(connection instanceof NativeDBConnection)) {
|
||||
throw new Error('doc store connection is not a Sqlite connection');
|
||||
}
|
||||
|
||||
return connection;
|
||||
}
|
||||
|
||||
async getDBPath() {
|
||||
return this.connection.getDBPath();
|
||||
}
|
||||
|
||||
async getWorkspaceName() {
|
||||
const docStore = this.tryGet('doc');
|
||||
|
||||
if (!docStore) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const doc = await docStore.getDoc(docStore.spaceId);
|
||||
if (!doc) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const ydoc = new YDoc();
|
||||
applyUpdate(ydoc, doc.bin);
|
||||
return ydoc.getMap('meta').get('name') as string;
|
||||
}
|
||||
|
||||
async checkpoint() {
|
||||
await this.connection.inner.checkpoint();
|
||||
}
|
||||
}
|
||||
|
||||
const STORE_CACHE = new Map<string, SqliteSpaceStorage>();
|
||||
|
||||
export function getStorage(universalId: string) {
|
||||
return STORE_CACHE.get(universalId);
|
||||
}
|
||||
|
||||
export async function ensureStorage(universalId: string) {
|
||||
const { peer, type, id } = parseUniversalId(universalId);
|
||||
let store = STORE_CACHE.get(universalId);
|
||||
|
||||
if (!store) {
|
||||
const opts = {
|
||||
peer,
|
||||
type,
|
||||
id,
|
||||
};
|
||||
|
||||
store = new SqliteSpaceStorage([
|
||||
new SqliteDocStorage(opts),
|
||||
new SqliteBlobStorage(opts),
|
||||
new SqliteSyncStorage(opts),
|
||||
]);
|
||||
|
||||
store.connect();
|
||||
|
||||
await store.waitForConnected();
|
||||
|
||||
STORE_CACHE.set(universalId, store);
|
||||
}
|
||||
|
||||
return store;
|
||||
}
|
||||
70
packages/frontend/apps/ios/src/plugins/nbstore/sync.ts
Normal file
70
packages/frontend/apps/ios/src/plugins/nbstore/sync.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import {
|
||||
BasicSyncStorage,
|
||||
type DocClock,
|
||||
type DocClocks,
|
||||
share,
|
||||
} from '@affine/nbstore';
|
||||
|
||||
import { NativeDBConnection } from './db';
|
||||
|
||||
export class SqliteSyncStorage extends BasicSyncStorage {
|
||||
override connection = share(
|
||||
new NativeDBConnection(this.peer, this.spaceType, this.spaceId)
|
||||
);
|
||||
|
||||
get db() {
|
||||
return this.connection.inner;
|
||||
}
|
||||
|
||||
override async getPeerRemoteClocks(peer: string) {
|
||||
const records = await this.db.getPeerRemoteClocks(peer);
|
||||
return records.reduce((clocks, { docId, timestamp }) => {
|
||||
clocks[docId] = timestamp;
|
||||
return clocks;
|
||||
}, {} as DocClocks);
|
||||
}
|
||||
|
||||
override async getPeerRemoteClock(peer: string, docId: string) {
|
||||
return this.db.getPeerRemoteClock(peer, docId);
|
||||
}
|
||||
|
||||
override async setPeerRemoteClock(peer: string, clock: DocClock) {
|
||||
await this.db.setPeerRemoteClock(peer, clock.docId, clock.timestamp);
|
||||
}
|
||||
|
||||
override async getPeerPulledRemoteClock(peer: string, docId: string) {
|
||||
return this.db.getPeerPulledRemoteClock(peer, docId);
|
||||
}
|
||||
|
||||
override async getPeerPulledRemoteClocks(peer: string) {
|
||||
const records = await this.db.getPeerPulledRemoteClocks(peer);
|
||||
return records.reduce((clocks, { docId, timestamp }) => {
|
||||
clocks[docId] = timestamp;
|
||||
return clocks;
|
||||
}, {} as DocClocks);
|
||||
}
|
||||
|
||||
override async setPeerPulledRemoteClock(peer: string, clock: DocClock) {
|
||||
await this.db.setPeerPulledRemoteClock(peer, clock.docId, clock.timestamp);
|
||||
}
|
||||
|
||||
override async getPeerPushedClocks(peer: string) {
|
||||
const records = await this.db.getPeerPushedClocks(peer);
|
||||
return records.reduce((clocks, { docId, timestamp }) => {
|
||||
clocks[docId] = timestamp;
|
||||
return clocks;
|
||||
}, {} as DocClocks);
|
||||
}
|
||||
|
||||
override async getPeerPushedClock(peer: string, docId: string) {
|
||||
return this.db.getPeerPushedClock(peer, docId);
|
||||
}
|
||||
|
||||
override async setPeerPushedClock(peer: string, clock: DocClock) {
|
||||
await this.db.setPeerPushedClock(peer, clock.docId, clock.timestamp);
|
||||
}
|
||||
|
||||
override async clearClocks() {
|
||||
await this.db.clearClocks();
|
||||
}
|
||||
}
|
||||
@@ -8,5 +8,9 @@
|
||||
"rootDir": "./src"
|
||||
},
|
||||
"include": ["./src"],
|
||||
"references": [{ "path": "../../core" }]
|
||||
"references": [
|
||||
{ "path": "../../core" },
|
||||
{ "path": "../../native" },
|
||||
{ "path": "../../../common/nbstore" }
|
||||
]
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@ import {
|
||||
import { WorkspaceEngineStorageProvider } from './providers/engine';
|
||||
|
||||
export { CloudBlobStorage } from './impls/engine/blob-cloud';
|
||||
export { base64ToUint8Array, uint8ArrayToBase64 } from './utils/base64';
|
||||
|
||||
export function configureBrowserWorkspaceFlavours(framework: Framework) {
|
||||
framework
|
||||
|
||||
@@ -12,8 +12,22 @@ name = "uniffi-bindgen"
|
||||
path = "uniffi-bindgen.rs"
|
||||
|
||||
[dependencies]
|
||||
affine_common = { workspace = true }
|
||||
uniffi = { version = "0.28", features = ["cli"] }
|
||||
affine_common = { workspace = true }
|
||||
affine_nbstore = { workspace = true, features = ["use-as-lib"] }
|
||||
anyhow = { workspace = true }
|
||||
base64-simd = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
dashmap = { workspace = true }
|
||||
sqlx = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
uniffi = { workspace = true, features = ["cli"] }
|
||||
|
||||
[target.'cfg(any(target_os = "ios", target_os = "macos"))'.dependencies]
|
||||
objc2 = { workspace = true }
|
||||
objc2-foundation = { workspace = true, features = ["NSArray", "NSFileManager", "NSPathUtilities", "NSString", "NSURL"] }
|
||||
|
||||
[target.'cfg(not(any(target_os = "ios", target_os = "macos")))'.dependencies]
|
||||
homedir = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
uniffi = { version = "0.28", features = ["build"] }
|
||||
uniffi = { workspace = true, features = ["build"] }
|
||||
|
||||
29
packages/frontend/mobile-native/src/error.rs
Normal file
29
packages/frontend/mobile-native/src/error.rs
Normal file
@@ -0,0 +1,29 @@
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(uniffi::Error, Error, Debug)]
|
||||
pub enum UniffiError {
|
||||
#[error("Get user document directory failed")]
|
||||
GetUserDocumentDirectoryFailed,
|
||||
#[error("Create affine dir failed: {0}")]
|
||||
CreateAffineDirFailed(String),
|
||||
#[error("Empty doc storage path")]
|
||||
EmptyDocStoragePath,
|
||||
#[error("Empty space id")]
|
||||
EmptySpaceId,
|
||||
#[error("Sqlx error: {0}")]
|
||||
SqlxError(String),
|
||||
#[error("Base64 decoding error: {0}")]
|
||||
Base64DecodingError(String),
|
||||
#[error("Invalid universal storage id: {0}. It should be in format of @peer($peer);@type($type);@id($id);")]
|
||||
InvalidUniversalId(String),
|
||||
#[error("Invalid space type: {0}")]
|
||||
InvalidSpaceType(String),
|
||||
#[error("Concat space dir failed: {0}")]
|
||||
ConcatSpaceDirFailed(String),
|
||||
}
|
||||
|
||||
impl From<sqlx::Error> for UniffiError {
|
||||
fn from(err: sqlx::Error) -> Self {
|
||||
UniffiError::SqlxError(err.to_string())
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,15 @@
|
||||
use std::fmt::Display;
|
||||
use std::str::FromStr;
|
||||
use std::time::SystemTime;
|
||||
|
||||
use affine_common::hashcash::Stamp;
|
||||
use affine_nbstore::storage;
|
||||
use dashmap::{mapref::one::RefMut, DashMap, Entry};
|
||||
|
||||
use crate::error::UniffiError;
|
||||
|
||||
mod error;
|
||||
mod utils;
|
||||
|
||||
uniffi::setup_scaffolding!("affine_mobile_native");
|
||||
|
||||
@@ -6,3 +17,680 @@ uniffi::setup_scaffolding!("affine_mobile_native");
|
||||
pub fn hashcash_mint(resource: String, bits: u32) -> String {
|
||||
Stamp::mint(resource, Some(bits)).format()
|
||||
}
|
||||
|
||||
#[derive(uniffi::Record)]
|
||||
pub struct DocRecord {
|
||||
pub doc_id: String,
|
||||
// base64 encoded data
|
||||
pub data: String,
|
||||
pub timestamp: SystemTime,
|
||||
}
|
||||
|
||||
impl From<affine_nbstore::DocRecord> for DocRecord {
|
||||
fn from(record: affine_nbstore::DocRecord) -> Self {
|
||||
Self {
|
||||
doc_id: record.doc_id,
|
||||
data: base64_simd::STANDARD.encode_to_string(&record.data),
|
||||
timestamp: record.timestamp.and_utc().into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<DocRecord> for affine_nbstore::DocRecord {
|
||||
type Error = UniffiError;
|
||||
|
||||
fn try_from(record: DocRecord) -> Result<Self, Self::Error> {
|
||||
Ok(Self {
|
||||
doc_id: record.doc_id,
|
||||
data: base64_simd::STANDARD
|
||||
.decode_to_vec(record.data)
|
||||
.map_err(|e| UniffiError::Base64DecodingError(e.to_string()))?,
|
||||
timestamp: chrono::DateTime::<chrono::Utc>::from(record.timestamp).naive_utc(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(uniffi::Record)]
|
||||
pub struct DocUpdate {
|
||||
pub doc_id: String,
|
||||
pub created_at: SystemTime,
|
||||
// base64 encoded data
|
||||
pub data: String,
|
||||
}
|
||||
|
||||
impl From<affine_nbstore::DocUpdate> for DocUpdate {
|
||||
fn from(update: affine_nbstore::DocUpdate) -> Self {
|
||||
Self {
|
||||
doc_id: update.doc_id,
|
||||
created_at: update.created_at.and_utc().into(),
|
||||
data: base64_simd::STANDARD.encode_to_string(&update.data),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DocUpdate> for affine_nbstore::DocUpdate {
|
||||
fn from(update: DocUpdate) -> Self {
|
||||
Self {
|
||||
doc_id: update.doc_id,
|
||||
created_at: chrono::DateTime::<chrono::Utc>::from(update.created_at).naive_utc(),
|
||||
data: update.data.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(uniffi::Record)]
|
||||
pub struct DocClock {
|
||||
pub doc_id: String,
|
||||
pub timestamp: SystemTime,
|
||||
}
|
||||
|
||||
impl From<affine_nbstore::DocClock> for DocClock {
|
||||
fn from(clock: affine_nbstore::DocClock) -> Self {
|
||||
Self {
|
||||
doc_id: clock.doc_id,
|
||||
timestamp: clock.timestamp.and_utc().into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DocClock> for affine_nbstore::DocClock {
|
||||
fn from(clock: DocClock) -> Self {
|
||||
Self {
|
||||
doc_id: clock.doc_id,
|
||||
timestamp: chrono::DateTime::<chrono::Utc>::from(clock.timestamp).naive_utc(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(uniffi::Record)]
|
||||
pub struct Blob {
|
||||
pub key: String,
|
||||
// base64 encoded data
|
||||
pub data: String,
|
||||
pub mime: String,
|
||||
pub size: i64,
|
||||
pub created_at: SystemTime,
|
||||
}
|
||||
|
||||
impl From<affine_nbstore::Blob> for Blob {
|
||||
fn from(blob: affine_nbstore::Blob) -> Self {
|
||||
Self {
|
||||
key: blob.key,
|
||||
data: base64_simd::STANDARD.encode_to_string(&blob.data),
|
||||
mime: blob.mime,
|
||||
size: blob.size,
|
||||
created_at: blob.created_at.and_utc().into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(uniffi::Record)]
|
||||
pub struct SetBlob {
|
||||
pub key: String,
|
||||
// base64 encoded data
|
||||
pub data: String,
|
||||
pub mime: String,
|
||||
}
|
||||
|
||||
impl TryFrom<SetBlob> for affine_nbstore::SetBlob {
|
||||
type Error = UniffiError;
|
||||
|
||||
fn try_from(blob: SetBlob) -> Result<Self, Self::Error> {
|
||||
Ok(Self {
|
||||
key: blob.key,
|
||||
data: base64_simd::STANDARD
|
||||
.decode_to_vec(blob.data)
|
||||
.map_err(|e| UniffiError::Base64DecodingError(e.to_string()))?,
|
||||
mime: blob.mime,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(uniffi::Record)]
|
||||
pub struct ListedBlob {
|
||||
pub key: String,
|
||||
pub size: i64,
|
||||
pub mime: String,
|
||||
pub created_at: SystemTime,
|
||||
}
|
||||
|
||||
impl From<affine_nbstore::ListedBlob> for ListedBlob {
|
||||
fn from(blob: affine_nbstore::ListedBlob) -> Self {
|
||||
Self {
|
||||
key: blob.key,
|
||||
size: blob.size,
|
||||
mime: blob.mime,
|
||||
created_at: blob.created_at.and_utc().into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(uniffi::Object)]
|
||||
pub struct DocStoragePool {
|
||||
inner: DashMap<String, storage::SqliteDocStorage>,
|
||||
}
|
||||
|
||||
impl DocStoragePool {
|
||||
fn ensure_storage<'a>(
|
||||
&'a self,
|
||||
universal_id: &str,
|
||||
) -> Result<RefMut<'a, String, storage::SqliteDocStorage>, UniffiError> {
|
||||
let entry = self.inner.entry(universal_id.to_string());
|
||||
|
||||
if let Entry::Occupied(storage) = entry {
|
||||
return Ok(storage.into_ref());
|
||||
}
|
||||
let options = parse_universal_id(entry.key())?;
|
||||
let db_path = utils::get_db_path(&options)?;
|
||||
if db_path.is_empty() {
|
||||
return Err(UniffiError::EmptyDocStoragePath);
|
||||
}
|
||||
let storage = storage::SqliteDocStorage::new(db_path);
|
||||
Ok(entry.or_insert(storage))
|
||||
}
|
||||
}
|
||||
|
||||
#[uniffi::export]
|
||||
impl DocStoragePool {
|
||||
/// Initialize the database and run migrations.
|
||||
pub async fn connect(&self, universal_id: String) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.connect().await?)
|
||||
}
|
||||
|
||||
pub async fn close(&self, universal_id: String) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
storage.close().await;
|
||||
self.inner.remove(&universal_id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn is_closed(&self, universal_id: String) -> bool {
|
||||
let storage = self.ensure_storage(&universal_id).unwrap();
|
||||
storage.is_closed()
|
||||
}
|
||||
|
||||
pub async fn checkpoint(&self, universal_id: String) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.checkpoint().await?)
|
||||
}
|
||||
|
||||
pub async fn validate(&self, universal_id: String) -> Result<bool, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.validate().await?)
|
||||
}
|
||||
|
||||
pub async fn set_space_id(
|
||||
&self,
|
||||
universal_id: String,
|
||||
space_id: String,
|
||||
) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
if space_id.is_empty() {
|
||||
return Err(UniffiError::EmptySpaceId);
|
||||
}
|
||||
Ok(storage.set_space_id(space_id).await?)
|
||||
}
|
||||
|
||||
pub async fn push_update(
|
||||
&self,
|
||||
universal_id: String,
|
||||
doc_id: String,
|
||||
update: String,
|
||||
) -> Result<SystemTime, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.push_update(
|
||||
doc_id,
|
||||
base64_simd::STANDARD
|
||||
.decode_to_vec(update)
|
||||
.map_err(|e| UniffiError::Base64DecodingError(e.to_string()))?,
|
||||
)
|
||||
.await?
|
||||
.and_utc()
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn get_doc_snapshot(
|
||||
&self,
|
||||
universal_id: String,
|
||||
doc_id: String,
|
||||
) -> Result<Option<DocRecord>, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.get_doc_snapshot(doc_id).await?.map(Into::into))
|
||||
}
|
||||
|
||||
pub async fn set_doc_snapshot(
|
||||
&self,
|
||||
universal_id: String,
|
||||
snapshot: DocRecord,
|
||||
) -> Result<bool, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.set_doc_snapshot(snapshot.try_into()?).await?)
|
||||
}
|
||||
|
||||
pub async fn get_doc_updates(
|
||||
&self,
|
||||
universal_id: String,
|
||||
doc_id: String,
|
||||
) -> Result<Vec<DocUpdate>, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.get_doc_updates(doc_id)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn mark_updates_merged(
|
||||
&self,
|
||||
universal_id: String,
|
||||
doc_id: String,
|
||||
updates: Vec<SystemTime>,
|
||||
) -> Result<u32, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.mark_updates_merged(
|
||||
doc_id,
|
||||
updates
|
||||
.into_iter()
|
||||
.map(|t| chrono::DateTime::<chrono::Utc>::from(t).naive_utc())
|
||||
.collect(),
|
||||
)
|
||||
.await?,
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn delete_doc(&self, universal_id: String, doc_id: String) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.delete_doc(doc_id).await?)
|
||||
}
|
||||
|
||||
pub async fn get_doc_clocks(
|
||||
&self,
|
||||
universal_id: String,
|
||||
after: Option<SystemTime>,
|
||||
) -> Result<Vec<DocClock>, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.get_doc_clocks(after.map(|t| chrono::DateTime::<chrono::Utc>::from(t).naive_utc()))
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn get_doc_clock(
|
||||
&self,
|
||||
universal_id: String,
|
||||
doc_id: String,
|
||||
) -> Result<Option<DocClock>, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.get_doc_clock(doc_id).await?.map(Into::into))
|
||||
}
|
||||
|
||||
pub async fn get_blob(
|
||||
&self,
|
||||
universal_id: String,
|
||||
key: String,
|
||||
) -> Result<Option<Blob>, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.get_blob(key).await?.map(Into::into))
|
||||
}
|
||||
|
||||
pub async fn set_blob(&self, universal_id: String, blob: SetBlob) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.set_blob(blob.try_into()?).await?)
|
||||
}
|
||||
|
||||
pub async fn delete_blob(
|
||||
&self,
|
||||
universal_id: String,
|
||||
key: String,
|
||||
permanently: bool,
|
||||
) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.delete_blob(key, permanently).await?)
|
||||
}
|
||||
|
||||
pub async fn release_blobs(&self, universal_id: String) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.release_blobs().await?)
|
||||
}
|
||||
|
||||
pub async fn list_blobs(&self, universal_id: String) -> Result<Vec<ListedBlob>, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.list_blobs()
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn get_peer_remote_clocks(
|
||||
&self,
|
||||
universal_id: String,
|
||||
peer: String,
|
||||
) -> Result<Vec<DocClock>, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.get_peer_remote_clocks(peer)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn get_peer_remote_clock(
|
||||
&self,
|
||||
universal_id: String,
|
||||
peer: String,
|
||||
doc_id: String,
|
||||
) -> Result<DocClock, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.get_peer_remote_clock(peer, doc_id).await?.into())
|
||||
}
|
||||
|
||||
pub async fn set_peer_remote_clock(
|
||||
&self,
|
||||
universal_id: String,
|
||||
peer: String,
|
||||
doc_id: String,
|
||||
clock: SystemTime,
|
||||
) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.set_peer_remote_clock(
|
||||
peer,
|
||||
doc_id,
|
||||
chrono::DateTime::<chrono::Utc>::from(clock).naive_utc(),
|
||||
)
|
||||
.await?,
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn get_peer_pulled_remote_clocks(
|
||||
&self,
|
||||
universal_id: String,
|
||||
peer: String,
|
||||
) -> Result<Vec<DocClock>, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.get_peer_pulled_remote_clocks(peer)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn get_peer_pulled_remote_clock(
|
||||
&self,
|
||||
universal_id: String,
|
||||
peer: String,
|
||||
doc_id: String,
|
||||
) -> Result<DocClock, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.get_peer_pulled_remote_clock(peer, doc_id)
|
||||
.await?
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn set_peer_pulled_remote_clock(
|
||||
&self,
|
||||
universal_id: String,
|
||||
peer: String,
|
||||
doc_id: String,
|
||||
clock: SystemTime,
|
||||
) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.set_peer_pulled_remote_clock(
|
||||
peer,
|
||||
doc_id,
|
||||
chrono::DateTime::<chrono::Utc>::from(clock).naive_utc(),
|
||||
)
|
||||
.await?,
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn get_peer_pushed_clocks(
|
||||
&self,
|
||||
universal_id: String,
|
||||
peer: String,
|
||||
) -> Result<Vec<DocClock>, UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.get_peer_pushed_clocks(peer)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn set_peer_pushed_clock(
|
||||
&self,
|
||||
universal_id: String,
|
||||
peer: String,
|
||||
doc_id: String,
|
||||
clock: SystemTime,
|
||||
) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(
|
||||
storage
|
||||
.set_peer_pushed_clock(
|
||||
peer,
|
||||
doc_id,
|
||||
chrono::DateTime::<chrono::Utc>::from(clock).naive_utc(),
|
||||
)
|
||||
.await?,
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn clear_clocks(&self, universal_id: String) -> Result<(), UniffiError> {
|
||||
let storage = self.ensure_storage(&universal_id)?;
|
||||
Ok(storage.clear_clocks().await?)
|
||||
}
|
||||
}
|
||||
|
||||
#[uniffi::export]
|
||||
pub fn get_db_path(peer: String, space_type: String, id: String) -> Result<String, UniffiError> {
|
||||
let options = StorageOptions {
|
||||
peer,
|
||||
space_type: SpaceType::from_str(&space_type)?,
|
||||
id,
|
||||
};
|
||||
utils::get_db_path(&options)
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Default)]
|
||||
pub enum SpaceType {
|
||||
#[default]
|
||||
Userspace,
|
||||
Workspace,
|
||||
}
|
||||
|
||||
impl Display for SpaceType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
SpaceType::Userspace => write!(f, "userspace"),
|
||||
SpaceType::Workspace => write!(f, "workspace"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for SpaceType {
|
||||
type Err = UniffiError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
Ok(match s {
|
||||
"userspace" => Self::Userspace,
|
||||
"workspace" => Self::Workspace,
|
||||
_ => return Err(UniffiError::InvalidSpaceType(s.to_string())),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct StorageOptions {
|
||||
pub peer: String,
|
||||
pub space_type: SpaceType,
|
||||
pub id: String,
|
||||
}
|
||||
|
||||
pub fn parse_universal_id(id: &str) -> Result<StorageOptions, UniffiError> {
|
||||
let mut result = StorageOptions {
|
||||
peer: String::new(),
|
||||
space_type: SpaceType::default(),
|
||||
id: String::new(),
|
||||
};
|
||||
|
||||
let mut key = String::new();
|
||||
let mut value = String::new();
|
||||
let mut is_in_value = false;
|
||||
let mut chars = id.chars().peekable();
|
||||
|
||||
while let Some(ch) = chars.next() {
|
||||
if is_in_value {
|
||||
if ch == ')' && chars.peek() == Some(&';') {
|
||||
// Store the collected value in the appropriate field
|
||||
match key.as_str() {
|
||||
"peer" => result.peer = value.clone(),
|
||||
"type" => result.space_type = SpaceType::from_str(&value)?,
|
||||
"id" => result.id = value.clone(),
|
||||
_ => return Err(UniffiError::InvalidUniversalId(id.to_string())),
|
||||
}
|
||||
key.clear();
|
||||
value.clear();
|
||||
is_in_value = false;
|
||||
chars.next(); // Skip the semicolon
|
||||
continue;
|
||||
}
|
||||
value.push(ch);
|
||||
continue;
|
||||
}
|
||||
|
||||
if ch == '@' {
|
||||
// Find the position of next '('
|
||||
let mut temp_chars = chars.clone();
|
||||
let mut found_paren = false;
|
||||
let mut key_chars = Vec::new();
|
||||
|
||||
while let Some(next_ch) = temp_chars.next() {
|
||||
if next_ch == '(' {
|
||||
found_paren = true;
|
||||
break;
|
||||
}
|
||||
key_chars.push(next_ch);
|
||||
}
|
||||
|
||||
// Invalid format if no '(' found or it's immediately after '@'
|
||||
if !found_paren || key_chars.is_empty() {
|
||||
return Err(UniffiError::InvalidUniversalId(id.to_string()));
|
||||
}
|
||||
|
||||
key = key_chars.into_iter().collect();
|
||||
// Advance the original iterator to the position after the key
|
||||
for _ in 0..key.len() + 1 {
|
||||
chars.next();
|
||||
}
|
||||
is_in_value = true;
|
||||
} else {
|
||||
return Err(UniffiError::InvalidUniversalId(id.to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
// Validate the parsed results
|
||||
if result.peer.is_empty() || result.id.is_empty() {
|
||||
return Err(UniffiError::InvalidUniversalId(id.to_string()));
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// ... existing test functions ...
|
||||
|
||||
#[test]
|
||||
fn test_universal_id() {
|
||||
let options = StorageOptions {
|
||||
peer: "123".to_string(),
|
||||
space_type: SpaceType::Workspace,
|
||||
id: "456".to_string(),
|
||||
};
|
||||
|
||||
let id = format!(
|
||||
"@peer({});@type({});@id({});",
|
||||
options.peer, options.space_type, options.id
|
||||
);
|
||||
let result = parse_universal_id(&id).unwrap();
|
||||
|
||||
assert_eq!(result.peer, "123");
|
||||
assert_eq!(result.space_type, SpaceType::Workspace);
|
||||
assert_eq!(result.id, "456");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_universal_id_valid_cases() {
|
||||
let testcases = vec![
|
||||
"@peer(123);@type(userspace);@id(456);",
|
||||
"@peer(123);@type(workspace);@id(456);",
|
||||
"@peer(https://app.affine.pro);@type(userspace);@id(hello:world);",
|
||||
"@peer(@name);@type(userspace);@id(@id);",
|
||||
"@peer(@peer(name);@type(userspace);@id(@id);",
|
||||
];
|
||||
|
||||
for id in testcases {
|
||||
let result = parse_universal_id(id);
|
||||
assert!(result.is_ok(), "Failed to parse: {}", id);
|
||||
|
||||
let parsed = result.unwrap();
|
||||
assert!(!parsed.peer.is_empty());
|
||||
assert!(!parsed.id.is_empty());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_universal_id_invalid_cases() {
|
||||
let testcases = vec![
|
||||
// invalid space type
|
||||
"@peer(123);@type(anyspace);@id(456);",
|
||||
// invalid peer
|
||||
"@peer(@peer(name););@type(userspace);@id(@id);",
|
||||
];
|
||||
|
||||
for id in testcases {
|
||||
let result = parse_universal_id(id);
|
||||
assert!(result.is_err(), "Should have failed to parse: {}", id);
|
||||
|
||||
match result {
|
||||
Err(UniffiError::InvalidUniversalId(_)) => (),
|
||||
Err(UniffiError::InvalidSpaceType(_)) => (),
|
||||
_ => panic!("Expected InvalidUniversalId error for: {}", id),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
141
packages/frontend/mobile-native/src/utils.rs
Normal file
141
packages/frontend/mobile-native/src/utils.rs
Normal file
@@ -0,0 +1,141 @@
|
||||
use std::fs;
|
||||
|
||||
#[cfg(not(any(target_os = "ios", target_os = "macos")))]
|
||||
use homedir::my_home;
|
||||
#[cfg(any(target_os = "ios", target_os = "macos"))]
|
||||
use objc2::rc::autoreleasepool;
|
||||
#[cfg(any(target_os = "ios", target_os = "macos"))]
|
||||
use objc2_foundation::{NSFileManager, NSSearchPathDirectory, NSSearchPathDomainMask, NSString};
|
||||
|
||||
use crate::{error::UniffiError, SpaceType, StorageOptions};
|
||||
|
||||
const DB_FILE_NAME: &str = "storage.db";
|
||||
|
||||
#[cfg(any(target_os = "ios", target_os = "macos"))]
|
||||
pub(crate) fn get_db_path(options: &StorageOptions) -> Result<String, UniffiError> {
|
||||
let file_manager = unsafe { NSFileManager::defaultManager() };
|
||||
// equivalent to Swift:
|
||||
// ```swift
|
||||
// guard let documentsPath = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else {
|
||||
// return nil
|
||||
// }
|
||||
// ```
|
||||
let urls = unsafe {
|
||||
file_manager.URLsForDirectory_inDomains(
|
||||
NSSearchPathDirectory::NSDocumentDirectory,
|
||||
NSSearchPathDomainMask::NSUserDomainMask,
|
||||
)
|
||||
};
|
||||
let document_directory = urls
|
||||
.first()
|
||||
.ok_or(UniffiError::GetUserDocumentDirectoryFailed)?;
|
||||
|
||||
let affine_dir = unsafe {
|
||||
let spaces_dir = match options.space_type {
|
||||
SpaceType::Userspace => "userspaces",
|
||||
SpaceType::Workspace => "workspaces",
|
||||
};
|
||||
let escaped_peer = escape_filename(&options.peer);
|
||||
document_directory
|
||||
.URLByAppendingPathComponent(&NSString::from_str(".affine"))
|
||||
.and_then(|url| url.URLByAppendingPathComponent(&NSString::from_str(spaces_dir)))
|
||||
.and_then(|url| url.URLByAppendingPathComponent(&NSString::from_str(&escaped_peer)))
|
||||
.and_then(|url| url.URLByAppendingPathComponent(&NSString::from_str(&options.id)))
|
||||
}
|
||||
.ok_or(UniffiError::ConcatSpaceDirFailed(format!(
|
||||
"{}:{}:{}",
|
||||
options.peer, options.space_type, options.id
|
||||
)))?;
|
||||
let affine_dir_str = autoreleasepool(|pool| {
|
||||
Ok::<String, UniffiError>(
|
||||
unsafe { affine_dir.path() }
|
||||
.ok_or(UniffiError::GetUserDocumentDirectoryFailed)?
|
||||
.as_str(pool)
|
||||
.to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
// Replicate Swift's appending ".affine" subdir, creating it if necessary
|
||||
fs::create_dir_all(&affine_dir_str)
|
||||
.map_err(|_| UniffiError::CreateAffineDirFailed(affine_dir_str.clone()))?;
|
||||
|
||||
let db_path = autoreleasepool(|pool| {
|
||||
let db_path =
|
||||
unsafe { affine_dir.URLByAppendingPathComponent(&NSString::from_str(DB_FILE_NAME)) }.ok_or(
|
||||
UniffiError::ConcatSpaceDirFailed(format!(
|
||||
"{}:{}:{}/{DB_FILE_NAME}",
|
||||
options.peer, options.space_type, options.id
|
||||
)),
|
||||
)?;
|
||||
Ok::<String, UniffiError>(
|
||||
unsafe { db_path.path() }
|
||||
.ok_or(UniffiError::GetUserDocumentDirectoryFailed)?
|
||||
.as_str(pool)
|
||||
.to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(db_path)
|
||||
}
|
||||
|
||||
#[cfg(not(any(target_os = "ios", target_os = "macos")))]
|
||||
pub(crate) fn get_db_path(options: &StorageOptions) -> Result<String, UniffiError> {
|
||||
let home_dir = my_home()
|
||||
.map_err(|_| UniffiError::GetUserDocumentDirectoryFailed)?
|
||||
.ok_or(UniffiError::GetUserDocumentDirectoryFailed)?;
|
||||
let spaces_dir = match options.space_type {
|
||||
SpaceType::Userspace => "userspaces",
|
||||
SpaceType::Workspace => "workspaces",
|
||||
};
|
||||
let escaped_peer = escape_filename(&options.peer);
|
||||
let db_path = home_dir
|
||||
.join(".affine")
|
||||
.join(spaces_dir)
|
||||
.join(&escaped_peer)
|
||||
.join(&options.id);
|
||||
fs::create_dir_all(&db_path)
|
||||
.map_err(|_| UniffiError::CreateAffineDirFailed(db_path.to_string_lossy().to_string()))?;
|
||||
db_path
|
||||
.join(DB_FILE_NAME)
|
||||
.to_str()
|
||||
.map(|p| p.to_owned())
|
||||
.ok_or(UniffiError::GetUserDocumentDirectoryFailed)
|
||||
}
|
||||
|
||||
fn escape_filename(name: &str) -> String {
|
||||
// First replace special chars with '_'
|
||||
let with_underscores = name.replace(|c: char| "\\/!@#$%^&*()+~`\"':;,?<>|".contains(c), "_");
|
||||
|
||||
// Then collapse multiple '_' into single '_'
|
||||
let mut result = String::with_capacity(with_underscores.len());
|
||||
let mut last_was_underscore = false;
|
||||
|
||||
for c in with_underscores.chars() {
|
||||
if c == '_' {
|
||||
if !last_was_underscore {
|
||||
result.push(c);
|
||||
}
|
||||
last_was_underscore = true;
|
||||
} else {
|
||||
result.push(c);
|
||||
last_was_underscore = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Remove trailing underscore
|
||||
result.trim_end_matches('_').to_string()
|
||||
}
|
||||
|
||||
#[cfg(all(test, any(target_os = "ios", target_os = "macos")))]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_escape_filename() {
|
||||
assert_eq!(escape_filename("hello@world"), "hello_world");
|
||||
assert_eq!(escape_filename("test!!file"), "test_file");
|
||||
assert_eq!(escape_filename("_test_"), "_test"); // Leading underscore preserved
|
||||
assert_eq!(escape_filename("multi___under"), "multi_under");
|
||||
assert_eq!(escape_filename("path/to\\file"), "path_to_file");
|
||||
}
|
||||
}
|
||||
@@ -4,19 +4,19 @@ name = "affine_native"
|
||||
version = "0.0.0"
|
||||
|
||||
[lib]
|
||||
crate-type = ["rlib", "cdylib"]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
affine_common = { workspace = true }
|
||||
affine_common = { workspace = true }
|
||||
affine_nbstore = { path = "./nbstore" }
|
||||
affine_sqlite_v1 = { path = "./sqlite_v1" }
|
||||
affine_nbstore = { path = "./nbstore" }
|
||||
napi = { workspace = true }
|
||||
napi-derive = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
sqlx = { workspace = true, default-features = false, features = ["chrono", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
napi = { workspace = true }
|
||||
napi-derive = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
sqlx = { workspace = true, default-features = false, features = ["chrono", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
|
||||
[build-dependencies]
|
||||
napi-build = { workspace = true }
|
||||
sqlx = { workspace = true, default-features = false, features = ["chrono", "json", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
napi-build = { workspace = true }
|
||||
sqlx = { workspace = true, default-features = false, features = ["chrono", "json", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
|
||||
@@ -4,7 +4,10 @@ name = "affine_nbstore"
|
||||
version = "0.0.0"
|
||||
|
||||
[lib]
|
||||
crate-type = ["rlib", "cdylib"]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[features]
|
||||
use-as-lib = ["napi-derive/noop", "napi/noop"]
|
||||
|
||||
[dependencies]
|
||||
affine_schema = { path = "../schema" }
|
||||
@@ -15,9 +18,12 @@ napi-derive = { workspace = true }
|
||||
sqlx = { workspace = true, default-features = false, features = ["chrono", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
|
||||
[target.'cfg(any(target_os = "ios", target_os = "android"))'.dependencies]
|
||||
uniffi = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
affine_schema = { path = "../schema" }
|
||||
dotenvy = { workspace = true }
|
||||
dotenvy = { workspace = true }
|
||||
napi-build = { workspace = true }
|
||||
sqlx = { workspace = true, default-features = false, features = ["chrono", "json", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use std::ops::Deref;
|
||||
|
||||
use super::{storage::SqliteDocStorage, Blob, ListedBlob, SetBlob};
|
||||
|
||||
type Result<T> = std::result::Result<T, sqlx::Error>;
|
||||
@@ -22,7 +24,7 @@ impl SqliteDocStorage {
|
||||
DO UPDATE SET data=$2, mime=$3, size=$4, deleted_at=NULL;"#,
|
||||
)
|
||||
.bind(blob.key)
|
||||
.bind(blob.data.as_ref())
|
||||
.bind(blob.data.deref())
|
||||
.bind(blob.mime)
|
||||
.bind(blob.data.len() as i64)
|
||||
.execute(&self.pool)
|
||||
@@ -67,7 +69,6 @@ impl SqliteDocStorage {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use napi::bindgen_prelude::Uint8Array;
|
||||
use sqlx::Row;
|
||||
|
||||
use super::*;
|
||||
@@ -87,7 +88,7 @@ mod tests {
|
||||
storage
|
||||
.set_blob(SetBlob {
|
||||
key: format!("test_{}", i),
|
||||
data: Uint8Array::from(vec![0, 0]),
|
||||
data: vec![0, 0].into(),
|
||||
mime: "text/plain".to_string(),
|
||||
})
|
||||
.await
|
||||
@@ -127,7 +128,7 @@ mod tests {
|
||||
storage
|
||||
.set_blob(SetBlob {
|
||||
key: format!("test_{}", i),
|
||||
data: Uint8Array::from(vec![0, 0]),
|
||||
data: vec![0, 0].into(),
|
||||
mime: "text/plain".to_string(),
|
||||
})
|
||||
.await
|
||||
@@ -175,7 +176,7 @@ mod tests {
|
||||
storage
|
||||
.set_blob(SetBlob {
|
||||
key: format!("test_{}", i),
|
||||
data: Uint8Array::from(vec![0, 0]),
|
||||
data: vec![0, 0].into(),
|
||||
mime: "text/plain".to_string(),
|
||||
})
|
||||
.await
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use std::ops::Deref;
|
||||
|
||||
use chrono::NaiveDateTime;
|
||||
use sqlx::{QueryBuilder, Row};
|
||||
|
||||
@@ -110,7 +112,7 @@ impl SqliteDocStorage {
|
||||
WHERE updated_at <= $3;"#,
|
||||
)
|
||||
.bind(snapshot.doc_id)
|
||||
.bind(snapshot.data.as_ref())
|
||||
.bind(snapshot.data.deref())
|
||||
.bind(snapshot.timestamp)
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
@@ -206,7 +208,6 @@ impl SqliteDocStorage {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use chrono::{DateTime, Utc};
|
||||
use napi::bindgen_prelude::Uint8Array;
|
||||
|
||||
use super::*;
|
||||
|
||||
@@ -252,7 +253,7 @@ mod tests {
|
||||
storage
|
||||
.set_doc_snapshot(DocRecord {
|
||||
doc_id: "test".to_string(),
|
||||
data: Uint8Array::from(vec![0, 0]),
|
||||
data: vec![0, 0].into(),
|
||||
timestamp: Utc::now().naive_utc(),
|
||||
})
|
||||
.await
|
||||
@@ -331,7 +332,7 @@ mod tests {
|
||||
|
||||
let snapshot = DocRecord {
|
||||
doc_id: "test".to_string(),
|
||||
data: Uint8Array::from(vec![0, 0]),
|
||||
data: vec![0, 0].into(),
|
||||
timestamp: Utc::now().naive_utc(),
|
||||
};
|
||||
|
||||
@@ -349,7 +350,7 @@ mod tests {
|
||||
|
||||
let snapshot = DocRecord {
|
||||
doc_id: "test".to_string(),
|
||||
data: Uint8Array::from(vec![0, 0]),
|
||||
data: vec![0, 0].into(),
|
||||
timestamp: Utc::now().naive_utc(),
|
||||
};
|
||||
|
||||
@@ -362,7 +363,7 @@ mod tests {
|
||||
|
||||
let snapshot = DocRecord {
|
||||
doc_id: "test".to_string(),
|
||||
data: Uint8Array::from(vec![0, 1]),
|
||||
data: vec![0, 1].into(),
|
||||
timestamp: DateTime::from_timestamp_millis(Utc::now().timestamp_millis() - 1000)
|
||||
.unwrap()
|
||||
.naive_utc(),
|
||||
|
||||
@@ -1,27 +1,47 @@
|
||||
mod blob;
|
||||
mod doc;
|
||||
mod storage;
|
||||
mod sync;
|
||||
pub mod blob;
|
||||
pub mod doc;
|
||||
pub mod storage;
|
||||
pub mod sync;
|
||||
|
||||
use chrono::NaiveDateTime;
|
||||
use napi::bindgen_prelude::*;
|
||||
use napi_derive::napi;
|
||||
|
||||
fn map_err(err: sqlx::Error) -> napi::Error {
|
||||
napi::Error::from(anyhow::Error::from(err))
|
||||
#[cfg(feature = "use-as-lib")]
|
||||
type Result<T> = anyhow::Result<T>;
|
||||
|
||||
#[cfg(not(feature = "use-as-lib"))]
|
||||
type Result<T> = napi::Result<T>;
|
||||
|
||||
#[cfg(not(feature = "use-as-lib"))]
|
||||
fn map_err(err: sqlx::Error) -> Error {
|
||||
Error::from(anyhow::Error::from(err))
|
||||
}
|
||||
|
||||
#[cfg(feature = "use-as-lib")]
|
||||
fn map_err(err: sqlx::Error) -> anyhow::Error {
|
||||
anyhow::Error::from(err)
|
||||
}
|
||||
|
||||
#[cfg(feature = "use-as-lib")]
|
||||
pub type Data = Vec<u8>;
|
||||
|
||||
#[cfg(not(feature = "use-as-lib"))]
|
||||
pub type Data = Uint8Array;
|
||||
|
||||
#[napi(object)]
|
||||
pub struct DocUpdate {
|
||||
pub doc_id: String,
|
||||
pub created_at: NaiveDateTime,
|
||||
pub data: Uint8Array,
|
||||
#[napi(ts_type = "Uint8Array")]
|
||||
pub data: Data,
|
||||
}
|
||||
|
||||
#[napi(object)]
|
||||
pub struct DocRecord {
|
||||
pub doc_id: String,
|
||||
pub data: Uint8Array,
|
||||
#[napi(ts_type = "Uint8Array")]
|
||||
pub data: Data,
|
||||
pub timestamp: NaiveDateTime,
|
||||
}
|
||||
|
||||
@@ -35,14 +55,16 @@ pub struct DocClock {
|
||||
#[napi(object)]
|
||||
pub struct SetBlob {
|
||||
pub key: String,
|
||||
pub data: Uint8Array,
|
||||
#[napi(ts_type = "Uint8Array")]
|
||||
pub data: Data,
|
||||
pub mime: String,
|
||||
}
|
||||
|
||||
#[napi(object)]
|
||||
pub struct Blob {
|
||||
pub key: String,
|
||||
pub data: Uint8Array,
|
||||
#[napi(ts_type = "Uint8Array")]
|
||||
pub data: Data,
|
||||
pub mime: String,
|
||||
pub size: i64,
|
||||
pub created_at: NaiveDateTime,
|
||||
@@ -64,7 +86,7 @@ pub struct DocStorage {
|
||||
#[napi]
|
||||
impl DocStorage {
|
||||
#[napi(constructor, async_runtime)]
|
||||
pub fn new(path: String) -> napi::Result<Self> {
|
||||
pub fn new(path: String) -> Result<Self> {
|
||||
Ok(Self {
|
||||
storage: storage::SqliteDocStorage::new(path),
|
||||
})
|
||||
@@ -72,19 +94,19 @@ impl DocStorage {
|
||||
|
||||
#[napi]
|
||||
/// Initialize the database and run migrations.
|
||||
pub async fn connect(&self) -> napi::Result<()> {
|
||||
pub async fn connect(&self) -> Result<()> {
|
||||
self.storage.connect().await.map_err(map_err)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn close(&self) -> napi::Result<()> {
|
||||
pub async fn close(&self) -> Result<()> {
|
||||
self.storage.close().await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[napi(getter)]
|
||||
pub async fn is_closed(&self) -> napi::Result<bool> {
|
||||
pub async fn is_closed(&self) -> Result<bool> {
|
||||
Ok(self.storage.is_closed())
|
||||
}
|
||||
|
||||
@@ -93,26 +115,22 @@ impl DocStorage {
|
||||
* See https://www.sqlite.org/pragma.html#pragma_wal_checkpoint:~:text=PRAGMA%20schema.wal_checkpoint%3B
|
||||
*/
|
||||
#[napi]
|
||||
pub async fn checkpoint(&self) -> napi::Result<()> {
|
||||
pub async fn checkpoint(&self) -> Result<()> {
|
||||
self.storage.checkpoint().await.map_err(map_err)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn validate(&self) -> napi::Result<bool> {
|
||||
pub async fn validate(&self) -> Result<bool> {
|
||||
self.storage.validate().await.map_err(map_err)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn set_space_id(&self, space_id: String) -> napi::Result<()> {
|
||||
pub async fn set_space_id(&self, space_id: String) -> Result<()> {
|
||||
self.storage.set_space_id(space_id).await.map_err(map_err)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn push_update(
|
||||
&self,
|
||||
doc_id: String,
|
||||
update: Uint8Array,
|
||||
) -> napi::Result<NaiveDateTime> {
|
||||
pub async fn push_update(&self, doc_id: String, update: Uint8Array) -> Result<NaiveDateTime> {
|
||||
self
|
||||
.storage
|
||||
.push_update(doc_id, update)
|
||||
@@ -121,12 +139,12 @@ impl DocStorage {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn get_doc_snapshot(&self, doc_id: String) -> napi::Result<Option<DocRecord>> {
|
||||
pub async fn get_doc_snapshot(&self, doc_id: String) -> Result<Option<DocRecord>> {
|
||||
self.storage.get_doc_snapshot(doc_id).await.map_err(map_err)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn set_doc_snapshot(&self, snapshot: DocRecord) -> napi::Result<bool> {
|
||||
pub async fn set_doc_snapshot(&self, snapshot: DocRecord) -> Result<bool> {
|
||||
self
|
||||
.storage
|
||||
.set_doc_snapshot(snapshot)
|
||||
@@ -135,7 +153,7 @@ impl DocStorage {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn get_doc_updates(&self, doc_id: String) -> napi::Result<Vec<DocUpdate>> {
|
||||
pub async fn get_doc_updates(&self, doc_id: String) -> Result<Vec<DocUpdate>> {
|
||||
self.storage.get_doc_updates(doc_id).await.map_err(map_err)
|
||||
}
|
||||
|
||||
@@ -144,7 +162,7 @@ impl DocStorage {
|
||||
&self,
|
||||
doc_id: String,
|
||||
updates: Vec<NaiveDateTime>,
|
||||
) -> napi::Result<u32> {
|
||||
) -> Result<u32> {
|
||||
self
|
||||
.storage
|
||||
.mark_updates_merged(doc_id, updates)
|
||||
@@ -153,32 +171,32 @@ impl DocStorage {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn delete_doc(&self, doc_id: String) -> napi::Result<()> {
|
||||
pub async fn delete_doc(&self, doc_id: String) -> Result<()> {
|
||||
self.storage.delete_doc(doc_id).await.map_err(map_err)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn get_doc_clocks(&self, after: Option<NaiveDateTime>) -> napi::Result<Vec<DocClock>> {
|
||||
pub async fn get_doc_clocks(&self, after: Option<NaiveDateTime>) -> Result<Vec<DocClock>> {
|
||||
self.storage.get_doc_clocks(after).await.map_err(map_err)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn get_doc_clock(&self, doc_id: String) -> napi::Result<Option<DocClock>> {
|
||||
pub async fn get_doc_clock(&self, doc_id: String) -> Result<Option<DocClock>> {
|
||||
self.storage.get_doc_clock(doc_id).await.map_err(map_err)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn get_blob(&self, key: String) -> napi::Result<Option<Blob>> {
|
||||
pub async fn get_blob(&self, key: String) -> Result<Option<Blob>> {
|
||||
self.storage.get_blob(key).await.map_err(map_err)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn set_blob(&self, blob: SetBlob) -> napi::Result<()> {
|
||||
pub async fn set_blob(&self, blob: SetBlob) -> Result<()> {
|
||||
self.storage.set_blob(blob).await.map_err(map_err)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn delete_blob(&self, key: String, permanently: bool) -> napi::Result<()> {
|
||||
pub async fn delete_blob(&self, key: String, permanently: bool) -> Result<()> {
|
||||
self
|
||||
.storage
|
||||
.delete_blob(key, permanently)
|
||||
@@ -187,17 +205,17 @@ impl DocStorage {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn release_blobs(&self) -> napi::Result<()> {
|
||||
pub async fn release_blobs(&self) -> Result<()> {
|
||||
self.storage.release_blobs().await.map_err(map_err)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn list_blobs(&self) -> napi::Result<Vec<ListedBlob>> {
|
||||
pub async fn list_blobs(&self) -> Result<Vec<ListedBlob>> {
|
||||
self.storage.list_blobs().await.map_err(map_err)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn get_peer_remote_clocks(&self, peer: String) -> napi::Result<Vec<DocClock>> {
|
||||
pub async fn get_peer_remote_clocks(&self, peer: String) -> Result<Vec<DocClock>> {
|
||||
self
|
||||
.storage
|
||||
.get_peer_remote_clocks(peer)
|
||||
@@ -206,11 +224,7 @@ impl DocStorage {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn get_peer_remote_clock(
|
||||
&self,
|
||||
peer: String,
|
||||
doc_id: String,
|
||||
) -> napi::Result<DocClock> {
|
||||
pub async fn get_peer_remote_clock(&self, peer: String, doc_id: String) -> Result<DocClock> {
|
||||
self
|
||||
.storage
|
||||
.get_peer_remote_clock(peer, doc_id)
|
||||
@@ -224,7 +238,7 @@ impl DocStorage {
|
||||
peer: String,
|
||||
doc_id: String,
|
||||
clock: NaiveDateTime,
|
||||
) -> napi::Result<()> {
|
||||
) -> Result<()> {
|
||||
self
|
||||
.storage
|
||||
.set_peer_remote_clock(peer, doc_id, clock)
|
||||
@@ -233,7 +247,7 @@ impl DocStorage {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn get_peer_pulled_remote_clocks(&self, peer: String) -> napi::Result<Vec<DocClock>> {
|
||||
pub async fn get_peer_pulled_remote_clocks(&self, peer: String) -> Result<Vec<DocClock>> {
|
||||
self
|
||||
.storage
|
||||
.get_peer_pulled_remote_clocks(peer)
|
||||
@@ -246,7 +260,7 @@ impl DocStorage {
|
||||
&self,
|
||||
peer: String,
|
||||
doc_id: String,
|
||||
) -> napi::Result<DocClock> {
|
||||
) -> Result<DocClock> {
|
||||
self
|
||||
.storage
|
||||
.get_peer_pulled_remote_clock(peer, doc_id)
|
||||
@@ -260,7 +274,7 @@ impl DocStorage {
|
||||
peer: String,
|
||||
doc_id: String,
|
||||
clock: NaiveDateTime,
|
||||
) -> napi::Result<()> {
|
||||
) -> Result<()> {
|
||||
self
|
||||
.storage
|
||||
.set_peer_pulled_remote_clock(peer, doc_id, clock)
|
||||
@@ -269,7 +283,7 @@ impl DocStorage {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn get_peer_pushed_clocks(&self, peer: String) -> napi::Result<Vec<DocClock>> {
|
||||
pub async fn get_peer_pushed_clocks(&self, peer: String) -> Result<Vec<DocClock>> {
|
||||
self
|
||||
.storage
|
||||
.get_peer_pushed_clocks(peer)
|
||||
@@ -278,11 +292,7 @@ impl DocStorage {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn get_peer_pushed_clock(
|
||||
&self,
|
||||
peer: String,
|
||||
doc_id: String,
|
||||
) -> napi::Result<DocClock> {
|
||||
pub async fn get_peer_pushed_clock(&self, peer: String, doc_id: String) -> Result<DocClock> {
|
||||
self
|
||||
.storage
|
||||
.get_peer_pushed_clock(peer, doc_id)
|
||||
@@ -296,7 +306,7 @@ impl DocStorage {
|
||||
peer: String,
|
||||
doc_id: String,
|
||||
clock: NaiveDateTime,
|
||||
) -> napi::Result<()> {
|
||||
) -> Result<()> {
|
||||
self
|
||||
.storage
|
||||
.set_peer_pushed_clock(peer, doc_id, clock)
|
||||
@@ -305,7 +315,7 @@ impl DocStorage {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn clear_clocks(&self) -> napi::Result<()> {
|
||||
pub async fn clear_clocks(&self) -> Result<()> {
|
||||
self.storage.clear_clocks().await.map_err(map_err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,8 +44,8 @@
|
||||
},
|
||||
"scripts": {
|
||||
"artifacts": "napi artifacts",
|
||||
"build": "napi build --platform --release --no-const-enum",
|
||||
"build:debug": "napi build --platform",
|
||||
"build": "napi build -p affine_native --platform --release --no-const-enum",
|
||||
"build:debug": "napi build -p affine_native --platform",
|
||||
"universal": "napi universal",
|
||||
"test": "ava",
|
||||
"version": "napi version"
|
||||
|
||||
@@ -4,4 +4,4 @@ name = "affine_schema"
|
||||
version = "0.0.0"
|
||||
|
||||
[dependencies]
|
||||
sqlx = { workspace = true, default-features = false, features = ["migrate"] }
|
||||
sqlx = { workspace = true, default-features = false, features = ["migrate"] }
|
||||
|
||||
@@ -4,7 +4,7 @@ name = "affine_sqlite_v1"
|
||||
version = "0.0.0"
|
||||
|
||||
[lib]
|
||||
crate-type = ["rlib", "cdylib"]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
affine_schema = { path = "../schema" }
|
||||
@@ -17,7 +17,7 @@ tokio = { workspace = true, features = ["full"] }
|
||||
|
||||
[build-dependencies]
|
||||
affine_schema = { path = "../schema" }
|
||||
dotenvy = { workspace = true }
|
||||
dotenvy = { workspace = true }
|
||||
napi-build = { workspace = true }
|
||||
sqlx = { workspace = true, default-features = false, features = ["chrono", "json", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
|
||||
Reference in New Issue
Block a user