mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-14 21:27:20 +00:00
feat(nbstore): improve nbstore (#9512)
This commit is contained in:
3
packages/common/env/src/ua-helper.ts
vendored
3
packages/common/env/src/ua-helper.ts
vendored
@@ -32,6 +32,9 @@ export class UaHelper {
|
||||
}
|
||||
|
||||
private isStandaloneMode() {
|
||||
if (typeof window === 'undefined') {
|
||||
return false;
|
||||
}
|
||||
if ('standalone' in window.navigator) {
|
||||
return !!window.navigator.standalone;
|
||||
}
|
||||
|
||||
@@ -27,7 +27,9 @@ describe('op client', () => {
|
||||
port1.postMessage = vi.fn(port1.postMessage);
|
||||
// @ts-expect-error patch postMessage
|
||||
ctx.postMessage = port1.postMessage;
|
||||
ctx.producer = new OpClient(port1);
|
||||
ctx.producer = new OpClient(port1, {
|
||||
timeout: 1000,
|
||||
});
|
||||
// @ts-expect-error internal api
|
||||
ctx.handlers = ctx.producer.handlers;
|
||||
vi.useFakeTimers();
|
||||
|
||||
@@ -31,7 +31,7 @@ export class OpClient<Ops extends OpSchema> extends AutoMessageHandler {
|
||||
private readonly pendingCalls = new Map<string, PendingCall>();
|
||||
private readonly obs = new Map<string, Observer<any>>();
|
||||
private readonly options: OpClientOptions = {
|
||||
timeout: 3000,
|
||||
timeout: Infinity,
|
||||
};
|
||||
|
||||
constructor(port: MessageCommunicapable, options: OpClientOptions = {}) {
|
||||
@@ -139,9 +139,12 @@ export class OpClient<Ops extends OpSchema> extends AutoMessageHandler {
|
||||
raise('canceled');
|
||||
};
|
||||
|
||||
const timeout = setTimeout(() => {
|
||||
raise('timeout');
|
||||
}, this.options.timeout);
|
||||
const timeout =
|
||||
this.options.timeout === Infinity
|
||||
? 0
|
||||
: setTimeout(() => {
|
||||
raise('timeout');
|
||||
}, this.options.timeout);
|
||||
|
||||
const transferables = fetchTransferables(payload);
|
||||
|
||||
|
||||
@@ -6,7 +6,8 @@
|
||||
"sideEffects": false,
|
||||
"exports": {
|
||||
".": "./src/index.ts",
|
||||
"./worker": "./src/worker/index.ts",
|
||||
"./worker/client": "./src/worker/client.ts",
|
||||
"./worker/consumer": "./src/worker/consumer.ts",
|
||||
"./idb": "./src/impls/idb/index.ts",
|
||||
"./idb/v1": "./src/impls/idb/v1/index.ts",
|
||||
"./cloud": "./src/impls/cloud/index.ts",
|
||||
@@ -24,7 +25,6 @@
|
||||
"yjs": "^13.6.21"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@affine/electron-api": "workspace:*",
|
||||
"@affine/graphql": "workspace:*",
|
||||
"fake-indexeddb": "^6.0.0",
|
||||
"idb": "^8.0.0",
|
||||
@@ -32,7 +32,6 @@
|
||||
"vitest": "2.1.8"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@affine/electron-api": "workspace:*",
|
||||
"@affine/graphql": "workspace:*",
|
||||
"idb": "^8.0.0",
|
||||
"socket.io-client": "^4.7.5"
|
||||
|
||||
@@ -9,6 +9,7 @@ import { DocFrontend } from '../frontend/doc';
|
||||
import { BroadcastChannelAwarenessStorage } from '../impls/broadcast-channel/awareness';
|
||||
import { IndexedDBDocStorage } from '../impls/idb';
|
||||
import { AwarenessSyncImpl } from '../sync/awareness';
|
||||
import { DocSyncImpl } from '../sync/doc';
|
||||
import { expectYjsEqual } from './utils';
|
||||
|
||||
test('doc', async () => {
|
||||
@@ -19,7 +20,7 @@ test('doc', async () => {
|
||||
|
||||
const docStorage = new IndexedDBDocStorage({
|
||||
id: 'ws1',
|
||||
peer: 'a',
|
||||
flavour: 'a',
|
||||
type: 'workspace',
|
||||
});
|
||||
|
||||
@@ -27,7 +28,7 @@ test('doc', async () => {
|
||||
|
||||
await docStorage.connection.waitForConnected();
|
||||
|
||||
const frontend1 = new DocFrontend(docStorage, null);
|
||||
const frontend1 = new DocFrontend(docStorage, DocSyncImpl.dummy);
|
||||
frontend1.start();
|
||||
frontend1.addDoc(doc1);
|
||||
await vitest.waitFor(async () => {
|
||||
@@ -42,7 +43,7 @@ test('doc', async () => {
|
||||
const doc2 = new YDoc({
|
||||
guid: 'test-doc',
|
||||
});
|
||||
const frontend2 = new DocFrontend(docStorage, null);
|
||||
const frontend2 = new DocFrontend(docStorage, DocSyncImpl.dummy);
|
||||
frontend2.start();
|
||||
frontend2.addDoc(doc2);
|
||||
|
||||
@@ -57,15 +58,11 @@ test('doc', async () => {
|
||||
|
||||
test('awareness', async () => {
|
||||
const storage1 = new BroadcastChannelAwarenessStorage({
|
||||
id: 'ws1',
|
||||
peer: 'a',
|
||||
type: 'workspace',
|
||||
id: 'ws1:a',
|
||||
});
|
||||
|
||||
const storage2 = new BroadcastChannelAwarenessStorage({
|
||||
id: 'ws1',
|
||||
peer: 'b',
|
||||
type: 'workspace',
|
||||
id: 'ws1:b',
|
||||
});
|
||||
|
||||
storage1.connection.connect();
|
||||
@@ -90,13 +87,23 @@ test('awareness', async () => {
|
||||
const awarenessC = new Awareness(docC);
|
||||
|
||||
{
|
||||
const sync = new AwarenessSyncImpl(storage1, [storage2]);
|
||||
const sync = new AwarenessSyncImpl({
|
||||
local: storage1,
|
||||
remotes: {
|
||||
b: storage2,
|
||||
},
|
||||
});
|
||||
const frontend = new AwarenessFrontend(sync);
|
||||
frontend.connect(awarenessA);
|
||||
frontend.connect(awarenessB);
|
||||
}
|
||||
{
|
||||
const sync = new AwarenessSyncImpl(storage2, [storage1]);
|
||||
const sync = new AwarenessSyncImpl({
|
||||
local: storage2,
|
||||
remotes: {
|
||||
a: storage1,
|
||||
},
|
||||
});
|
||||
const frontend = new AwarenessFrontend(sync);
|
||||
frontend.connect(awarenessC);
|
||||
}
|
||||
|
||||
@@ -19,30 +19,37 @@ test('doc', async () => {
|
||||
|
||||
const peerADoc = new IndexedDBDocStorage({
|
||||
id: 'ws1',
|
||||
peer: 'a',
|
||||
flavour: 'a',
|
||||
type: 'workspace',
|
||||
});
|
||||
|
||||
const peerASync = new IndexedDBSyncStorage({
|
||||
id: 'ws1',
|
||||
peer: 'a',
|
||||
flavour: 'a',
|
||||
type: 'workspace',
|
||||
});
|
||||
|
||||
const peerBDoc = new IndexedDBDocStorage({
|
||||
id: 'ws1',
|
||||
peer: 'b',
|
||||
flavour: 'b',
|
||||
type: 'workspace',
|
||||
});
|
||||
const peerCDoc = new IndexedDBDocStorage({
|
||||
id: 'ws1',
|
||||
peer: 'c',
|
||||
flavour: 'c',
|
||||
type: 'workspace',
|
||||
});
|
||||
|
||||
const peerA = new SpaceStorage([peerADoc, peerASync]);
|
||||
const peerB = new SpaceStorage([peerBDoc]);
|
||||
const peerC = new SpaceStorage([peerCDoc]);
|
||||
const peerA = new SpaceStorage({
|
||||
doc: peerADoc,
|
||||
sync: peerASync,
|
||||
});
|
||||
const peerB = new SpaceStorage({
|
||||
doc: peerBDoc,
|
||||
});
|
||||
const peerC = new SpaceStorage({
|
||||
doc: peerCDoc,
|
||||
});
|
||||
|
||||
peerA.connect();
|
||||
peerB.connect();
|
||||
@@ -57,7 +64,13 @@ test('doc', async () => {
|
||||
bin: update,
|
||||
});
|
||||
|
||||
const sync = new Sync(peerA, [peerB, peerC]);
|
||||
const sync = new Sync({
|
||||
local: peerA,
|
||||
remotes: {
|
||||
b: peerB,
|
||||
c: peerC,
|
||||
},
|
||||
});
|
||||
sync.start();
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
@@ -109,25 +122,31 @@ test('doc', async () => {
|
||||
test('blob', async () => {
|
||||
const a = new IndexedDBBlobStorage({
|
||||
id: 'ws1',
|
||||
peer: 'a',
|
||||
flavour: 'a',
|
||||
type: 'workspace',
|
||||
});
|
||||
|
||||
const b = new IndexedDBBlobStorage({
|
||||
id: 'ws1',
|
||||
peer: 'b',
|
||||
flavour: 'b',
|
||||
type: 'workspace',
|
||||
});
|
||||
|
||||
const c = new IndexedDBBlobStorage({
|
||||
id: 'ws1',
|
||||
peer: 'c',
|
||||
flavour: 'c',
|
||||
type: 'workspace',
|
||||
});
|
||||
|
||||
const peerA = new SpaceStorage([a]);
|
||||
const peerB = new SpaceStorage([b]);
|
||||
const peerC = new SpaceStorage([c]);
|
||||
const peerA = new SpaceStorage({
|
||||
blob: a,
|
||||
});
|
||||
const peerB = new SpaceStorage({
|
||||
blob: b,
|
||||
});
|
||||
const peerC = new SpaceStorage({
|
||||
blob: c,
|
||||
});
|
||||
|
||||
peerA.connect();
|
||||
peerB.connect();
|
||||
@@ -151,7 +170,13 @@ test('blob', async () => {
|
||||
createdAt: new Date(100),
|
||||
});
|
||||
|
||||
const sync = new Sync(peerA, [peerB, peerC]);
|
||||
const sync = new Sync({
|
||||
local: peerA,
|
||||
remotes: {
|
||||
b: peerB,
|
||||
c: peerC,
|
||||
},
|
||||
});
|
||||
sync.start();
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
|
||||
@@ -92,6 +92,7 @@ export abstract class AutoReconnectConnection<T = any>
|
||||
})
|
||||
.catch(error => {
|
||||
if (!this.connectingAbort?.signal.aborted) {
|
||||
console.error('failed to connect', error);
|
||||
this.setStatus('error', error as any);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -3,7 +3,7 @@ import {
|
||||
applyAwarenessUpdate,
|
||||
type Awareness,
|
||||
encodeAwarenessUpdate,
|
||||
} from 'y-protocols/awareness.js';
|
||||
} from 'y-protocols/awareness';
|
||||
|
||||
import type { AwarenessRecord } from '../storage/awareness';
|
||||
import type { AwarenessSync } from '../sync/awareness';
|
||||
|
||||
@@ -3,21 +3,33 @@ import type { BlobSync } from '../sync/blob';
|
||||
|
||||
export class BlobFrontend {
|
||||
constructor(
|
||||
readonly storage: BlobStorage,
|
||||
readonly sync?: BlobSync
|
||||
public readonly storage: BlobStorage,
|
||||
private readonly sync: BlobSync
|
||||
) {}
|
||||
|
||||
get(blobId: string) {
|
||||
return this.sync
|
||||
? this.sync.downloadBlob(blobId)
|
||||
: this.storage.get(blobId);
|
||||
return this.sync.downloadBlob(blobId);
|
||||
}
|
||||
|
||||
set(blob: BlobRecord) {
|
||||
return this.sync ? this.sync.uploadBlob(blob) : this.storage.set(blob);
|
||||
return this.sync.uploadBlob(blob);
|
||||
}
|
||||
|
||||
fullSync() {
|
||||
return this.sync.fullSync();
|
||||
}
|
||||
|
||||
addPriority(_id: string, _priority: number) {
|
||||
// not support yet
|
||||
}
|
||||
|
||||
readonly state$ = this.sync.state$;
|
||||
|
||||
setMaxBlobSize(max: number) {
|
||||
this.sync.setMaxBlobSize(max);
|
||||
}
|
||||
|
||||
onReachedMaxBlobSize(cb: (byteSize: number) => void): () => void {
|
||||
return this.sync.onReachedMaxBlobSize(cb);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { groupBy } from 'lodash-es';
|
||||
import { nanoid } from 'nanoid';
|
||||
import { Subject } from 'rxjs';
|
||||
import type { Subscription } from 'rxjs';
|
||||
import { combineLatest, map, Observable, Subject } from 'rxjs';
|
||||
import {
|
||||
applyUpdate,
|
||||
type Doc as YDoc,
|
||||
@@ -12,7 +13,7 @@ import type { DocRecord, DocStorage } from '../storage';
|
||||
import type { DocSync } from '../sync/doc';
|
||||
import { AsyncPriorityQueue } from '../utils/async-priority-queue';
|
||||
import { isEmptyUpdate } from '../utils/is-empty-update';
|
||||
import { throwIfAborted } from '../utils/throw-if-aborted';
|
||||
import { MANUALLY_STOP, throwIfAborted } from '../utils/throw-if-aborted';
|
||||
|
||||
const NBSTORE_ORIGIN = 'nbstore-frontend';
|
||||
|
||||
@@ -36,6 +37,64 @@ interface DocFrontendOptions {
|
||||
mergeUpdates?: (updates: Uint8Array[]) => Promise<Uint8Array> | Uint8Array;
|
||||
}
|
||||
|
||||
export type DocFrontendDocState = {
|
||||
/**
|
||||
* some data is available in yjs doc instance
|
||||
*/
|
||||
ready: boolean;
|
||||
/**
|
||||
* data is loaded from local doc storage and applied to yjs doc instance
|
||||
*/
|
||||
loaded: boolean;
|
||||
/**
|
||||
* some data is being applied to yjs doc instance, or some data is being saved to local doc storage
|
||||
*/
|
||||
updating: boolean;
|
||||
/**
|
||||
* the doc is syncing with remote peers
|
||||
*/
|
||||
syncing: boolean;
|
||||
/**
|
||||
* the doc is synced with remote peers
|
||||
*/
|
||||
synced: boolean;
|
||||
/**
|
||||
* the doc is retrying to sync with remote peers
|
||||
*/
|
||||
syncRetrying: boolean;
|
||||
/**
|
||||
* the error message when syncing with remote peers
|
||||
*/
|
||||
syncErrorMessage: string | null;
|
||||
};
|
||||
|
||||
export type DocFrontendState = {
|
||||
/**
|
||||
* total number of docs
|
||||
*/
|
||||
total: number;
|
||||
/**
|
||||
* number of docs that have been loaded to yjs doc instance
|
||||
*/
|
||||
loaded: number;
|
||||
/**
|
||||
* number of docs that are syncing with remote peers
|
||||
*/
|
||||
syncing: number;
|
||||
/**
|
||||
* whether all docs are synced with remote peers
|
||||
*/
|
||||
synced: boolean;
|
||||
/**
|
||||
* whether the doc is retrying to sync with remote peers
|
||||
*/
|
||||
syncRetrying: boolean;
|
||||
/**
|
||||
* the error message when syncing with remote peers
|
||||
*/
|
||||
syncErrorMessage: string | null;
|
||||
};
|
||||
|
||||
export class DocFrontend {
|
||||
private readonly uniqueId = `frontend:${nanoid()}`;
|
||||
|
||||
@@ -55,11 +114,68 @@ export class DocFrontend {
|
||||
private readonly abort = new AbortController();
|
||||
|
||||
constructor(
|
||||
private readonly storage: DocStorage,
|
||||
private readonly sync: DocSync | null,
|
||||
public readonly storage: DocStorage,
|
||||
private readonly sync: DocSync,
|
||||
readonly options: DocFrontendOptions = {}
|
||||
) {}
|
||||
|
||||
docState$(docId: string): Observable<DocFrontendDocState> {
|
||||
const frontendState$ = new Observable<{
|
||||
ready: boolean;
|
||||
loaded: boolean;
|
||||
updating: boolean;
|
||||
}>(subscribe => {
|
||||
const next = () => {
|
||||
subscribe.next({
|
||||
ready: this.status.readyDocs.has(docId),
|
||||
loaded: this.status.connectedDocs.has(docId),
|
||||
updating:
|
||||
(this.status.jobMap.get(docId)?.length ?? 0) > 0 ||
|
||||
this.status.currentJob?.docId === docId,
|
||||
});
|
||||
};
|
||||
next();
|
||||
return this.statusUpdatedSubject$.subscribe(updatedId => {
|
||||
if (updatedId === docId) next();
|
||||
});
|
||||
});
|
||||
const syncState$ = this.sync.docState$(docId);
|
||||
return combineLatest([frontendState$, syncState$]).pipe(
|
||||
map(([frontend, sync]) => ({
|
||||
...frontend,
|
||||
synced: sync.synced,
|
||||
syncing: sync.syncing,
|
||||
syncRetrying: sync.retrying,
|
||||
syncErrorMessage: sync.errorMessage,
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
state$ = combineLatest([
|
||||
new Observable<{ total: number; loaded: number }>(subscriber => {
|
||||
const next = () => {
|
||||
subscriber.next({
|
||||
total: this.status.docs.size,
|
||||
loaded: this.status.connectedDocs.size,
|
||||
});
|
||||
};
|
||||
next();
|
||||
return this.statusUpdatedSubject$.subscribe(() => {
|
||||
next();
|
||||
});
|
||||
}),
|
||||
this.sync.state$,
|
||||
]).pipe(
|
||||
map(([frontend, sync]) => ({
|
||||
total: sync.total ?? frontend.total,
|
||||
loaded: frontend.loaded,
|
||||
syncing: sync.syncing,
|
||||
synced: sync.synced,
|
||||
syncRetrying: sync.retrying,
|
||||
syncErrorMessage: sync.errorMessage,
|
||||
}))
|
||||
) satisfies Observable<DocFrontendState>;
|
||||
|
||||
start() {
|
||||
if (this.abort.signal.aborted) {
|
||||
throw new Error('doc frontend can only start once');
|
||||
@@ -70,10 +186,11 @@ export class DocFrontend {
|
||||
}
|
||||
|
||||
stop() {
|
||||
this.abort.abort();
|
||||
this.abort.abort(MANUALLY_STOP);
|
||||
}
|
||||
|
||||
private async mainLoop(signal?: AbortSignal) {
|
||||
await this.storage.connection.waitForConnected(signal);
|
||||
const dispose = this.storage.subscribeDocUpdate((record, origin) => {
|
||||
this.event.onStorageUpdate(record, origin);
|
||||
});
|
||||
@@ -314,4 +431,96 @@ export class DocFrontend {
|
||||
|
||||
return merge(updates.filter(bin => !isEmptyUpdate(bin)));
|
||||
}
|
||||
|
||||
async waitForSynced(abort?: AbortSignal) {
|
||||
let sub: Subscription | undefined = undefined;
|
||||
return Promise.race([
|
||||
new Promise<void>(resolve => {
|
||||
sub = this.state$?.subscribe(status => {
|
||||
if (status.synced) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}),
|
||||
new Promise<void>((_, reject) => {
|
||||
if (abort?.aborted) {
|
||||
reject(abort?.reason);
|
||||
}
|
||||
abort?.addEventListener('abort', () => {
|
||||
reject(abort.reason);
|
||||
});
|
||||
}),
|
||||
]).finally(() => {
|
||||
sub?.unsubscribe();
|
||||
});
|
||||
}
|
||||
|
||||
async waitForDocLoaded(docId: string, abort?: AbortSignal) {
|
||||
let sub: Subscription | undefined = undefined;
|
||||
return Promise.race([
|
||||
new Promise<void>(resolve => {
|
||||
sub = this.docState$(docId).subscribe(state => {
|
||||
if (state.loaded) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}),
|
||||
new Promise<void>((_, reject) => {
|
||||
if (abort?.aborted) {
|
||||
reject(abort?.reason);
|
||||
}
|
||||
abort?.addEventListener('abort', () => {
|
||||
reject(abort.reason);
|
||||
});
|
||||
}),
|
||||
]).finally(() => {
|
||||
sub?.unsubscribe();
|
||||
});
|
||||
}
|
||||
|
||||
async waitForDocSynced(docId: string, abort?: AbortSignal) {
|
||||
let sub: Subscription | undefined = undefined;
|
||||
return Promise.race([
|
||||
new Promise<void>(resolve => {
|
||||
sub = this.docState$(docId).subscribe(state => {
|
||||
if (state.syncing) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}),
|
||||
new Promise<void>((_, reject) => {
|
||||
if (abort?.aborted) {
|
||||
reject(abort?.reason);
|
||||
}
|
||||
abort?.addEventListener('abort', () => {
|
||||
reject(abort.reason);
|
||||
});
|
||||
}),
|
||||
]).finally(() => {
|
||||
sub?.unsubscribe();
|
||||
});
|
||||
}
|
||||
|
||||
async waitForDocReady(docId: string, abort?: AbortSignal) {
|
||||
let sub: Subscription | undefined = undefined;
|
||||
return Promise.race([
|
||||
new Promise<void>(resolve => {
|
||||
sub = this.docState$(docId).subscribe(state => {
|
||||
if (state.ready) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}),
|
||||
new Promise<void>((_, reject) => {
|
||||
if (abort?.aborted) {
|
||||
reject(abort?.reason);
|
||||
}
|
||||
abort?.addEventListener('abort', () => {
|
||||
reject(abort.reason);
|
||||
});
|
||||
}),
|
||||
]).finally(() => {
|
||||
sub?.unsubscribe();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
3
packages/common/nbstore/src/frontend/index.ts
Normal file
3
packages/common/nbstore/src/frontend/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from './awareness';
|
||||
export * from './blob';
|
||||
export * from './doc';
|
||||
@@ -22,13 +22,27 @@ type ChannelMessage =
|
||||
collectId: string;
|
||||
};
|
||||
|
||||
interface BroadcastChannelAwarenessStorageOptions {
|
||||
id: string;
|
||||
}
|
||||
|
||||
export class BroadcastChannelAwarenessStorage extends AwarenessStorageBase {
|
||||
static readonly identifier = 'BroadcastChannelAwarenessStorage';
|
||||
|
||||
override readonly storageType = 'awareness';
|
||||
override readonly connection = new BroadcastChannelConnection(this.options);
|
||||
override readonly connection = new BroadcastChannelConnection({
|
||||
id: this.options.id,
|
||||
});
|
||||
get channel() {
|
||||
return this.connection.inner;
|
||||
}
|
||||
|
||||
constructor(
|
||||
private readonly options: BroadcastChannelAwarenessStorageOptions
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
private readonly subscriptions = new Map<
|
||||
string,
|
||||
Set<{
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
import { AutoReconnectConnection } from '../../connection';
|
||||
import type { StorageOptions } from '../../storage';
|
||||
|
||||
export interface BroadcastChannelConnectionOptions {
|
||||
id: string;
|
||||
}
|
||||
|
||||
export class BroadcastChannelConnection extends AutoReconnectConnection<BroadcastChannel> {
|
||||
readonly channelName = `channel:${this.opts.peer}:${this.opts.type}:${this.opts.id}`;
|
||||
readonly channelName = `channel:${this.opts.id}`;
|
||||
|
||||
constructor(private readonly opts: StorageOptions) {
|
||||
constructor(private readonly opts: BroadcastChannelConnectionOptions) {
|
||||
super();
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
import type { StorageConstructor } from '..';
|
||||
import { BroadcastChannelAwarenessStorage } from './awareness';
|
||||
|
||||
export const broadcastChannelStorages = [
|
||||
BroadcastChannelAwarenessStorage,
|
||||
] satisfies StorageConstructor[];
|
||||
@@ -4,21 +4,33 @@ import { share } from '../../connection';
|
||||
import {
|
||||
type AwarenessRecord,
|
||||
AwarenessStorageBase,
|
||||
type AwarenessStorageOptions,
|
||||
} from '../../storage/awareness';
|
||||
import type { SpaceType } from '../../utils/universal-id';
|
||||
import {
|
||||
base64ToUint8Array,
|
||||
SocketConnection,
|
||||
uint8ArrayToBase64,
|
||||
} from './socket';
|
||||
|
||||
interface CloudAwarenessStorageOptions extends AwarenessStorageOptions {
|
||||
socketOptions: SocketOptions;
|
||||
interface CloudAwarenessStorageOptions {
|
||||
socketOptions?: SocketOptions;
|
||||
serverBaseUrl: string;
|
||||
type: SpaceType;
|
||||
id: string;
|
||||
}
|
||||
|
||||
export class CloudAwarenessStorage extends AwarenessStorageBase<CloudAwarenessStorageOptions> {
|
||||
export class CloudAwarenessStorage extends AwarenessStorageBase {
|
||||
static readonly identifier = 'CloudAwarenessStorage';
|
||||
|
||||
constructor(private readonly options: CloudAwarenessStorageOptions) {
|
||||
super();
|
||||
}
|
||||
|
||||
connection = share(
|
||||
new SocketConnection(this.peer, this.options.socketOptions)
|
||||
new SocketConnection(
|
||||
`${this.options.serverBaseUrl}/`,
|
||||
this.options.socketOptions
|
||||
)
|
||||
);
|
||||
|
||||
private get socket() {
|
||||
@@ -28,8 +40,8 @@ export class CloudAwarenessStorage extends AwarenessStorageBase<CloudAwarenessSt
|
||||
override async update(record: AwarenessRecord): Promise<void> {
|
||||
const encodedUpdate = await uint8ArrayToBase64(record.bin);
|
||||
this.socket.emit('space:update-awareness', {
|
||||
spaceType: this.spaceType,
|
||||
spaceId: this.spaceId,
|
||||
spaceType: this.options.type,
|
||||
spaceId: this.options.id,
|
||||
docId: record.docId,
|
||||
awarenessUpdate: encodedUpdate,
|
||||
});
|
||||
@@ -44,8 +56,8 @@ export class CloudAwarenessStorage extends AwarenessStorageBase<CloudAwarenessSt
|
||||
// leave awareness
|
||||
const leave = () => {
|
||||
this.socket.emit('space:leave-awareness', {
|
||||
spaceType: this.spaceType,
|
||||
spaceId: this.spaceId,
|
||||
spaceType: this.options.type,
|
||||
spaceId: this.options.id,
|
||||
docId: id,
|
||||
});
|
||||
};
|
||||
@@ -53,14 +65,14 @@ export class CloudAwarenessStorage extends AwarenessStorageBase<CloudAwarenessSt
|
||||
// join awareness, and collect awareness from others
|
||||
const joinAndCollect = async () => {
|
||||
await this.socket.emitWithAck('space:join-awareness', {
|
||||
spaceType: this.spaceType,
|
||||
spaceId: this.spaceId,
|
||||
spaceType: this.options.type,
|
||||
spaceId: this.options.id,
|
||||
docId: id,
|
||||
clientVersion: BUILD_CONFIG.appVersion,
|
||||
});
|
||||
this.socket.emit('space:load-awarenesses', {
|
||||
spaceType: this.spaceType,
|
||||
spaceId: this.spaceId,
|
||||
spaceType: this.options.type,
|
||||
spaceId: this.options.id,
|
||||
docId: id,
|
||||
});
|
||||
};
|
||||
@@ -87,8 +99,8 @@ export class CloudAwarenessStorage extends AwarenessStorageBase<CloudAwarenessSt
|
||||
docId: string;
|
||||
}) => {
|
||||
if (
|
||||
spaceId === this.spaceId &&
|
||||
spaceType === this.spaceType &&
|
||||
spaceId === this.options.id &&
|
||||
spaceType === this.options.type &&
|
||||
docId === id
|
||||
) {
|
||||
(async () => {
|
||||
@@ -96,8 +108,8 @@ export class CloudAwarenessStorage extends AwarenessStorageBase<CloudAwarenessSt
|
||||
if (record) {
|
||||
const encodedUpdate = await uint8ArrayToBase64(record.bin);
|
||||
this.socket.emit('space:update-awareness', {
|
||||
spaceType: this.spaceType,
|
||||
spaceId: this.spaceId,
|
||||
spaceType: this.options.type,
|
||||
spaceId: this.options.id,
|
||||
docId: record.docId,
|
||||
awarenessUpdate: encodedUpdate,
|
||||
});
|
||||
@@ -118,8 +130,8 @@ export class CloudAwarenessStorage extends AwarenessStorageBase<CloudAwarenessSt
|
||||
awarenessUpdate: string;
|
||||
}) => {
|
||||
if (
|
||||
spaceId === this.spaceId &&
|
||||
spaceType === this.spaceType &&
|
||||
spaceId === this.options.id &&
|
||||
spaceType === this.options.type &&
|
||||
docId === id
|
||||
) {
|
||||
onUpdate({
|
||||
|
||||
@@ -1,35 +1,30 @@
|
||||
import {
|
||||
deleteBlobMutation,
|
||||
gqlFetcherFactory,
|
||||
listBlobsQuery,
|
||||
releaseDeletedBlobsMutation,
|
||||
setBlobMutation,
|
||||
} from '@affine/graphql';
|
||||
|
||||
import { DummyConnection } from '../../connection';
|
||||
import {
|
||||
type BlobRecord,
|
||||
BlobStorageBase,
|
||||
type BlobStorageOptions,
|
||||
} from '../../storage';
|
||||
import { type BlobRecord, BlobStorageBase } from '../../storage';
|
||||
import { HttpConnection } from './http';
|
||||
|
||||
interface CloudBlobStorageOptions extends BlobStorageOptions {
|
||||
apiBaseUrl: string;
|
||||
interface CloudBlobStorageOptions {
|
||||
serverBaseUrl: string;
|
||||
id: string;
|
||||
}
|
||||
|
||||
export class CloudBlobStorage extends BlobStorageBase<CloudBlobStorageOptions> {
|
||||
private readonly gql = gqlFetcherFactory(
|
||||
this.options.apiBaseUrl + '/graphql'
|
||||
);
|
||||
override connection = new DummyConnection();
|
||||
export class CloudBlobStorage extends BlobStorageBase {
|
||||
static readonly identifier = 'CloudBlobStorage';
|
||||
|
||||
constructor(private readonly options: CloudBlobStorageOptions) {
|
||||
super();
|
||||
}
|
||||
|
||||
readonly connection = new HttpConnection(this.options.serverBaseUrl);
|
||||
|
||||
override async get(key: string) {
|
||||
const res = await fetch(
|
||||
this.options.apiBaseUrl +
|
||||
'/api/workspaces/' +
|
||||
this.spaceId +
|
||||
'/blobs/' +
|
||||
key,
|
||||
const res = await this.connection.fetch(
|
||||
'/api/workspaces/' + this.options.id + '/blobs/' + key,
|
||||
{
|
||||
cache: 'default',
|
||||
headers: {
|
||||
@@ -38,49 +33,53 @@ export class CloudBlobStorage extends BlobStorageBase<CloudBlobStorageOptions> {
|
||||
}
|
||||
);
|
||||
|
||||
if (!res.ok) {
|
||||
if (res.status === 404) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const data = await res.arrayBuffer();
|
||||
try {
|
||||
const blob = await res.blob();
|
||||
|
||||
return {
|
||||
key,
|
||||
data: new Uint8Array(data),
|
||||
mime: res.headers.get('content-type') || '',
|
||||
size: data.byteLength,
|
||||
createdAt: new Date(res.headers.get('last-modified') || Date.now()),
|
||||
};
|
||||
return {
|
||||
key,
|
||||
data: new Uint8Array(await blob.arrayBuffer()),
|
||||
mime: blob.type,
|
||||
size: blob.size,
|
||||
createdAt: new Date(res.headers.get('last-modified') || Date.now()),
|
||||
};
|
||||
} catch (err) {
|
||||
throw new Error('blob download error: ' + err);
|
||||
}
|
||||
}
|
||||
|
||||
override async set(blob: BlobRecord) {
|
||||
await this.gql({
|
||||
await this.connection.gql({
|
||||
query: setBlobMutation,
|
||||
variables: {
|
||||
workspaceId: this.spaceId,
|
||||
workspaceId: this.options.id,
|
||||
blob: new File([blob.data], blob.key, { type: blob.mime }),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
override async delete(key: string, permanently: boolean) {
|
||||
await this.gql({
|
||||
await this.connection.gql({
|
||||
query: deleteBlobMutation,
|
||||
variables: { workspaceId: this.spaceId, key, permanently },
|
||||
variables: { workspaceId: this.options.id, key, permanently },
|
||||
});
|
||||
}
|
||||
|
||||
override async release() {
|
||||
await this.gql({
|
||||
await this.connection.gql({
|
||||
query: releaseDeletedBlobsMutation,
|
||||
variables: { workspaceId: this.spaceId },
|
||||
variables: { workspaceId: this.options.id },
|
||||
});
|
||||
}
|
||||
|
||||
override async list() {
|
||||
const res = await this.gql({
|
||||
const res = await this.connection.gql({
|
||||
query: listBlobsQuery,
|
||||
variables: { workspaceId: this.spaceId },
|
||||
variables: { workspaceId: this.options.id },
|
||||
});
|
||||
|
||||
return res.workspace.blobs.map(blob => ({
|
||||
|
||||
82
packages/common/nbstore/src/impls/cloud/doc-static.ts
Normal file
82
packages/common/nbstore/src/impls/cloud/doc-static.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import {
|
||||
type DocClock,
|
||||
type DocClocks,
|
||||
type DocRecord,
|
||||
DocStorageBase,
|
||||
type DocStorageOptions,
|
||||
type DocUpdate,
|
||||
} from '../../storage';
|
||||
import { HttpConnection } from './http';
|
||||
|
||||
interface CloudDocStorageOptions extends DocStorageOptions {
|
||||
serverBaseUrl: string;
|
||||
}
|
||||
|
||||
export class StaticCloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
|
||||
static readonly identifier = 'StaticCloudDocStorage';
|
||||
|
||||
constructor(options: CloudDocStorageOptions) {
|
||||
super({ ...options, readonlyMode: true });
|
||||
}
|
||||
|
||||
override connection = new HttpConnection(this.options.serverBaseUrl);
|
||||
override async pushDocUpdate(
|
||||
update: DocUpdate,
|
||||
_origin?: string
|
||||
): Promise<DocClock> {
|
||||
// http is readonly
|
||||
return { docId: update.docId, timestamp: new Date() };
|
||||
}
|
||||
override async getDocTimestamp(docId: string): Promise<DocClock | null> {
|
||||
// http doesn't support this, so we just return a new timestamp
|
||||
return {
|
||||
docId,
|
||||
timestamp: new Date(),
|
||||
};
|
||||
}
|
||||
override async getDocTimestamps(): Promise<DocClocks> {
|
||||
// http doesn't support this
|
||||
return {};
|
||||
}
|
||||
override deleteDoc(_docId: string): Promise<void> {
|
||||
// http is readonly
|
||||
return Promise.resolve();
|
||||
}
|
||||
protected override async getDocSnapshot(
|
||||
docId: string
|
||||
): Promise<DocRecord | null> {
|
||||
const arrayBuffer = await this.connection.fetchArrayBuffer(
|
||||
`/api/workspaces/${this.spaceId}/docs/${docId}`,
|
||||
{
|
||||
priority: 'high',
|
||||
headers: {
|
||||
Accept: 'application/octet-stream', // this is necessary for ios native fetch to return arraybuffer
|
||||
},
|
||||
}
|
||||
);
|
||||
if (!arrayBuffer) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
docId: docId,
|
||||
bin: new Uint8Array(arrayBuffer),
|
||||
timestamp: new Date(),
|
||||
};
|
||||
}
|
||||
protected override setDocSnapshot(
|
||||
_snapshot: DocRecord,
|
||||
_prevSnapshot: DocRecord | null
|
||||
): Promise<boolean> {
|
||||
// http is readonly
|
||||
return Promise.resolve(false);
|
||||
}
|
||||
protected override getDocUpdates(_docId: string): Promise<DocRecord[]> {
|
||||
return Promise.resolve([]);
|
||||
}
|
||||
protected override markUpdatesMerged(
|
||||
_docId: string,
|
||||
_updates: DocRecord[]
|
||||
): Promise<number> {
|
||||
return Promise.resolve(0);
|
||||
}
|
||||
}
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
type DocStorageOptions,
|
||||
type DocUpdate,
|
||||
} from '../../storage';
|
||||
import type { SpaceType } from '../../utils/universal-id';
|
||||
import {
|
||||
base64ToUint8Array,
|
||||
type ServerEventsMap,
|
||||
@@ -20,15 +21,20 @@ import {
|
||||
} from './socket';
|
||||
|
||||
interface CloudDocStorageOptions extends DocStorageOptions {
|
||||
socketOptions: SocketOptions;
|
||||
socketOptions?: SocketOptions;
|
||||
serverBaseUrl: string;
|
||||
type: SpaceType;
|
||||
}
|
||||
|
||||
export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
|
||||
static readonly identifier = 'CloudDocStorage';
|
||||
|
||||
get socket() {
|
||||
return this.connection.inner;
|
||||
}
|
||||
|
||||
readonly spaceType = this.options.type;
|
||||
|
||||
onServerUpdate: ServerEventsMap['space:broadcast-doc-update'] = message => {
|
||||
if (
|
||||
this.spaceType === message.spaceType &&
|
||||
|
||||
69
packages/common/nbstore/src/impls/cloud/http.ts
Normal file
69
packages/common/nbstore/src/impls/cloud/http.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { gqlFetcherFactory } from '@affine/graphql';
|
||||
|
||||
import { DummyConnection } from '../../connection';
|
||||
|
||||
export class HttpConnection extends DummyConnection {
|
||||
readonly fetch = async (input: string, init?: RequestInit) => {
|
||||
const externalSignal = init?.signal;
|
||||
if (externalSignal?.aborted) {
|
||||
throw externalSignal.reason;
|
||||
}
|
||||
const abortController = new AbortController();
|
||||
externalSignal?.addEventListener('abort', reason => {
|
||||
abortController.abort(reason);
|
||||
});
|
||||
|
||||
const timeout = 15000;
|
||||
const timeoutId = setTimeout(() => {
|
||||
abortController.abort('timeout');
|
||||
}, timeout);
|
||||
|
||||
const res = await globalThis
|
||||
.fetch(new URL(input, this.serverBaseUrl), {
|
||||
...init,
|
||||
signal: abortController.signal,
|
||||
headers: {
|
||||
...init?.headers,
|
||||
'x-affine-version': BUILD_CONFIG.appVersion,
|
||||
},
|
||||
})
|
||||
.catch(err => {
|
||||
throw new Error('fetch error: ' + err);
|
||||
});
|
||||
clearTimeout(timeoutId);
|
||||
if (!res.ok && res.status !== 404) {
|
||||
let reason: string | any = '';
|
||||
if (res.headers.get('Content-Type')?.includes('application/json')) {
|
||||
try {
|
||||
reason = await res.json();
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
throw new Error('fetch error status: ' + res.status + ' ' + reason);
|
||||
}
|
||||
return res;
|
||||
};
|
||||
|
||||
readonly fetchArrayBuffer = async (input: string, init?: RequestInit) => {
|
||||
const res = await this.fetch(input, init);
|
||||
if (res.status === 404) {
|
||||
// 404
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
return await res.arrayBuffer();
|
||||
} catch (err) {
|
||||
throw new Error('fetch download error: ' + err);
|
||||
}
|
||||
};
|
||||
|
||||
readonly gql = gqlFetcherFactory(
|
||||
new URL('/graphql', this.serverBaseUrl).href,
|
||||
this.fetch
|
||||
);
|
||||
|
||||
constructor(private readonly serverBaseUrl: string) {
|
||||
super();
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,17 @@
|
||||
import type { StorageConstructor } from '..';
|
||||
import { CloudAwarenessStorage } from './awareness';
|
||||
import { CloudBlobStorage } from './blob';
|
||||
import { CloudDocStorage } from './doc';
|
||||
import { StaticCloudDocStorage } from './doc-static';
|
||||
|
||||
export * from './awareness';
|
||||
export * from './blob';
|
||||
export * from './doc';
|
||||
export * from './doc-static';
|
||||
|
||||
export const cloudStorages = [
|
||||
CloudDocStorage,
|
||||
StaticCloudDocStorage,
|
||||
CloudBlobStorage,
|
||||
CloudAwarenessStorage,
|
||||
] satisfies StorageConstructor[];
|
||||
|
||||
@@ -162,7 +162,7 @@ export class SocketConnection extends AutoReconnectConnection<Socket> {
|
||||
|
||||
constructor(
|
||||
private readonly endpoint: string,
|
||||
private readonly socketOptions: SocketOptions
|
||||
private readonly socketOptions?: SocketOptions
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
@@ -4,11 +4,17 @@ import {
|
||||
BlobStorageBase,
|
||||
type ListedBlobRecord,
|
||||
} from '../../storage';
|
||||
import { IDBConnection } from './db';
|
||||
import { IDBConnection, type IDBConnectionOptions } from './db';
|
||||
|
||||
export class IndexedDBBlobStorage extends BlobStorageBase {
|
||||
static readonly identifier = 'IndexedDBBlobStorage';
|
||||
|
||||
readonly connection = share(new IDBConnection(this.options));
|
||||
|
||||
constructor(private readonly options: IDBConnectionOptions) {
|
||||
super();
|
||||
}
|
||||
|
||||
get db() {
|
||||
return this.connection.inner.db;
|
||||
}
|
||||
|
||||
@@ -1,20 +1,26 @@
|
||||
import { type IDBPDatabase, openDB } from 'idb';
|
||||
|
||||
import { AutoReconnectConnection } from '../../connection';
|
||||
import type { StorageOptions } from '../../storage';
|
||||
import type { SpaceType } from '../../utils/universal-id';
|
||||
import { type DocStorageSchema, migrator } from './schema';
|
||||
|
||||
export interface IDBConnectionOptions {
|
||||
flavour: string;
|
||||
type: SpaceType;
|
||||
id: string;
|
||||
}
|
||||
|
||||
export class IDBConnection extends AutoReconnectConnection<{
|
||||
db: IDBPDatabase<DocStorageSchema>;
|
||||
channel: BroadcastChannel;
|
||||
}> {
|
||||
readonly dbName = `${this.opts.peer}:${this.opts.type}:${this.opts.id}`;
|
||||
readonly dbName = `${this.opts.flavour}:${this.opts.type}:${this.opts.id}`;
|
||||
|
||||
override get shareId() {
|
||||
return `idb(${migrator.version}):${this.dbName}`;
|
||||
}
|
||||
|
||||
constructor(private readonly opts: StorageOptions) {
|
||||
constructor(private readonly opts: IDBConnectionOptions) {
|
||||
super();
|
||||
}
|
||||
|
||||
|
||||
@@ -3,10 +3,9 @@ import {
|
||||
type DocClocks,
|
||||
type DocRecord,
|
||||
DocStorageBase,
|
||||
type DocStorageOptions,
|
||||
type DocUpdate,
|
||||
} from '../../storage';
|
||||
import { IDBConnection } from './db';
|
||||
import { IDBConnection, type IDBConnectionOptions } from './db';
|
||||
import { IndexedDBLocker } from './lock';
|
||||
|
||||
interface ChannelMessage {
|
||||
@@ -15,7 +14,9 @@ interface ChannelMessage {
|
||||
origin?: string;
|
||||
}
|
||||
|
||||
export class IndexedDBDocStorage extends DocStorageBase {
|
||||
export class IndexedDBDocStorage extends DocStorageBase<IDBConnectionOptions> {
|
||||
static readonly identifier = 'IndexedDBDocStorage';
|
||||
|
||||
readonly connection = new IDBConnection(this.options);
|
||||
|
||||
get db() {
|
||||
@@ -30,10 +31,6 @@ export class IndexedDBDocStorage extends DocStorageBase {
|
||||
|
||||
private _lastTimestamp = new Date(0);
|
||||
|
||||
constructor(options: DocStorageOptions) {
|
||||
super(options);
|
||||
}
|
||||
|
||||
private generateTimestamp() {
|
||||
const timestamp = new Date();
|
||||
if (timestamp.getTime() <= this._lastTimestamp.getTime()) {
|
||||
|
||||
@@ -1,3 +1,20 @@
|
||||
import type { StorageConstructor } from '..';
|
||||
import { IndexedDBBlobStorage } from './blob';
|
||||
import { IndexedDBDocStorage } from './doc';
|
||||
import { IndexedDBSyncStorage } from './sync';
|
||||
import { IndexedDBV1BlobStorage, IndexedDBV1DocStorage } from './v1';
|
||||
|
||||
export * from './blob';
|
||||
export * from './doc';
|
||||
export * from './sync';
|
||||
|
||||
export const idbStorages = [
|
||||
IndexedDBDocStorage,
|
||||
IndexedDBBlobStorage,
|
||||
IndexedDBSyncStorage,
|
||||
] satisfies StorageConstructor[];
|
||||
|
||||
export const idbv1Storages = [
|
||||
IndexedDBV1DocStorage,
|
||||
IndexedDBV1BlobStorage,
|
||||
] satisfies StorageConstructor[];
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
import { share } from '../../connection';
|
||||
import { BasicSyncStorage, type DocClock, type DocClocks } from '../../storage';
|
||||
import { IDBConnection } from './db';
|
||||
export class IndexedDBSyncStorage extends BasicSyncStorage {
|
||||
import { type DocClock, type DocClocks, SyncStorageBase } from '../../storage';
|
||||
import { IDBConnection, type IDBConnectionOptions } from './db';
|
||||
|
||||
export class IndexedDBSyncStorage extends SyncStorageBase {
|
||||
static readonly identifier = 'IndexedDBSyncStorage';
|
||||
|
||||
constructor(private readonly options: IDBConnectionOptions) {
|
||||
super();
|
||||
}
|
||||
|
||||
readonly connection = share(new IDBConnection(this.options));
|
||||
|
||||
get db() {
|
||||
|
||||
@@ -1,12 +1,18 @@
|
||||
import { share } from '../../../connection';
|
||||
import { BlobStorageBase, type ListedBlobRecord } from '../../../storage';
|
||||
import { BlobIDBConnection } from './db';
|
||||
import { BlobIDBConnection, type BlobIDBConnectionOptions } from './db';
|
||||
|
||||
/**
|
||||
* @deprecated readonly
|
||||
*/
|
||||
export class IndexedDBV1BlobStorage extends BlobStorageBase {
|
||||
readonly connection = share(new BlobIDBConnection(this.spaceId));
|
||||
static readonly identifier = 'IndexedDBV1BlobStorage';
|
||||
|
||||
constructor(private readonly options: BlobIDBConnectionOptions) {
|
||||
super();
|
||||
}
|
||||
|
||||
readonly connection = share(new BlobIDBConnection(this.options));
|
||||
|
||||
get db() {
|
||||
return this.connection.inner;
|
||||
|
||||
@@ -42,19 +42,23 @@ export interface BlobDBSchema extends DBSchema {
|
||||
};
|
||||
}
|
||||
|
||||
export interface BlobIDBConnectionOptions {
|
||||
id: string;
|
||||
}
|
||||
|
||||
export class BlobIDBConnection extends AutoReconnectConnection<
|
||||
IDBPDatabase<BlobDBSchema>
|
||||
> {
|
||||
constructor(private readonly workspaceId: string) {
|
||||
constructor(private readonly options: BlobIDBConnectionOptions) {
|
||||
super();
|
||||
}
|
||||
|
||||
override get shareId() {
|
||||
return `idb(old-blob):${this.workspaceId}`;
|
||||
return `idb(old-blob):${this.options.id}`;
|
||||
}
|
||||
|
||||
override async doConnect() {
|
||||
return openDB<BlobDBSchema>(`${this.workspaceId}_blob`, 1, {
|
||||
return openDB<BlobDBSchema>(`${this.options.id}_blob`, 1, {
|
||||
upgrade: db => {
|
||||
db.createObjectStore('blob');
|
||||
},
|
||||
|
||||
@@ -10,6 +10,8 @@ import { DocIDBConnection } from './db';
|
||||
* @deprecated readonly
|
||||
*/
|
||||
export class IndexedDBV1DocStorage extends DocStorageBase {
|
||||
static readonly identifier = 'IndexedDBV1DocStorage';
|
||||
|
||||
readonly connection = share(new DocIDBConnection());
|
||||
|
||||
get db() {
|
||||
|
||||
@@ -1,47 +1,24 @@
|
||||
import type { Storage } from '../storage';
|
||||
import { BroadcastChannelAwarenessStorage } from './broadcast-channel/awareness';
|
||||
import {
|
||||
CloudAwarenessStorage,
|
||||
CloudBlobStorage,
|
||||
CloudDocStorage,
|
||||
} from './cloud';
|
||||
import {
|
||||
IndexedDBBlobStorage,
|
||||
IndexedDBDocStorage,
|
||||
IndexedDBSyncStorage,
|
||||
} from './idb';
|
||||
import { IndexedDBV1BlobStorage, IndexedDBV1DocStorage } from './idb/v1';
|
||||
import type { broadcastChannelStorages } from './broadcast-channel';
|
||||
import type { cloudStorages } from './cloud';
|
||||
import type { idbStorages, idbv1Storages } from './idb';
|
||||
import type { sqliteStorages } from './sqlite';
|
||||
|
||||
type StorageConstructor = new (...args: any[]) => Storage;
|
||||
|
||||
const idb: StorageConstructor[] = [
|
||||
IndexedDBDocStorage,
|
||||
IndexedDBBlobStorage,
|
||||
IndexedDBSyncStorage,
|
||||
BroadcastChannelAwarenessStorage,
|
||||
];
|
||||
|
||||
const idbv1: StorageConstructor[] = [
|
||||
IndexedDBV1DocStorage,
|
||||
IndexedDBV1BlobStorage,
|
||||
];
|
||||
|
||||
const cloud: StorageConstructor[] = [
|
||||
CloudDocStorage,
|
||||
CloudBlobStorage,
|
||||
CloudAwarenessStorage,
|
||||
];
|
||||
|
||||
export const storages: StorageConstructor[] = cloud.concat(idbv1, idb);
|
||||
|
||||
const AvailableStorageImplementations = storages.reduce(
|
||||
(acc, curr) => {
|
||||
acc[curr.name] = curr;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, StorageConstructor>
|
||||
);
|
||||
|
||||
export const getAvailableStorageImplementations = (name: string) => {
|
||||
return AvailableStorageImplementations[name];
|
||||
export type StorageConstructor = {
|
||||
new (...args: any[]): Storage;
|
||||
readonly identifier: string;
|
||||
};
|
||||
|
||||
type Storages =
|
||||
| typeof cloudStorages
|
||||
| typeof idbv1Storages
|
||||
| typeof idbStorages
|
||||
| typeof sqliteStorages
|
||||
| typeof broadcastChannelStorages;
|
||||
|
||||
// oxlint-disable-next-line no-redeclare
|
||||
export type AvailableStorageImplementations = {
|
||||
[key in Storages[number]['identifier']]: Storages[number] & {
|
||||
identifier: key;
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,11 +1,15 @@
|
||||
import { share } from '../../connection';
|
||||
import { type BlobRecord, BlobStorageBase } from '../../storage';
|
||||
import { NativeDBConnection } from './db';
|
||||
import { NativeDBConnection, type SqliteNativeDBOptions } from './db';
|
||||
|
||||
export class SqliteBlobStorage extends BlobStorageBase {
|
||||
override connection = share(
|
||||
new NativeDBConnection(this.peer, this.spaceType, this.spaceId)
|
||||
);
|
||||
static readonly identifier = 'SqliteBlobStorage';
|
||||
|
||||
override connection = share(new NativeDBConnection(this.options));
|
||||
|
||||
constructor(private readonly options: SqliteNativeDBOptions) {
|
||||
super();
|
||||
}
|
||||
|
||||
get db() {
|
||||
return this.connection.apis;
|
||||
|
||||
@@ -1,9 +1,81 @@
|
||||
import { apis } from '@affine/electron-api';
|
||||
|
||||
import { AutoReconnectConnection } from '../../connection';
|
||||
import { type SpaceType, universalId } from '../../storage';
|
||||
import type {
|
||||
BlobRecord,
|
||||
DocClock,
|
||||
DocRecord,
|
||||
ListedBlobRecord,
|
||||
} from '../../storage';
|
||||
import { type SpaceType, universalId } from '../../utils/universal-id';
|
||||
|
||||
type NativeDBApis = NonNullable<typeof apis>['nbstore'] extends infer APIs
|
||||
export interface SqliteNativeDBOptions {
|
||||
readonly flavour: string;
|
||||
readonly type: SpaceType;
|
||||
readonly id: string;
|
||||
}
|
||||
|
||||
export type NativeDBApis = {
|
||||
connect(id: string): Promise<void>;
|
||||
disconnect(id: string): Promise<void>;
|
||||
pushUpdate(id: string, docId: string, update: Uint8Array): Promise<Date>;
|
||||
getDocSnapshot(id: string, docId: string): Promise<DocRecord | null>;
|
||||
setDocSnapshot(id: string, snapshot: DocRecord): Promise<boolean>;
|
||||
getDocUpdates(id: string, docId: string): Promise<DocRecord[]>;
|
||||
markUpdatesMerged(
|
||||
id: string,
|
||||
docId: string,
|
||||
updates: Date[]
|
||||
): Promise<number>;
|
||||
deleteDoc(id: string, docId: string): Promise<void>;
|
||||
getDocClocks(
|
||||
id: string,
|
||||
after?: Date | undefined | null
|
||||
): Promise<DocClock[]>;
|
||||
getDocClock(id: string, docId: string): Promise<DocClock | null>;
|
||||
getBlob(id: string, key: string): Promise<BlobRecord | null>;
|
||||
setBlob(id: string, blob: BlobRecord): Promise<void>;
|
||||
deleteBlob(id: string, key: string, permanently: boolean): Promise<void>;
|
||||
releaseBlobs(id: string): Promise<void>;
|
||||
listBlobs(id: string): Promise<ListedBlobRecord[]>;
|
||||
getPeerRemoteClocks(id: string, peer: string): Promise<DocClock[]>;
|
||||
getPeerRemoteClock(
|
||||
id: string,
|
||||
peer: string,
|
||||
docId: string
|
||||
): Promise<DocClock>;
|
||||
setPeerRemoteClock(
|
||||
id: string,
|
||||
peer: string,
|
||||
docId: string,
|
||||
clock: Date
|
||||
): Promise<void>;
|
||||
getPeerPulledRemoteClocks(id: string, peer: string): Promise<DocClock[]>;
|
||||
getPeerPulledRemoteClock(
|
||||
id: string,
|
||||
peer: string,
|
||||
docId: string
|
||||
): Promise<DocClock>;
|
||||
setPeerPulledRemoteClock(
|
||||
id: string,
|
||||
peer: string,
|
||||
docId: string,
|
||||
clock: Date
|
||||
): Promise<void>;
|
||||
getPeerPushedClocks(id: string, peer: string): Promise<DocClock[]>;
|
||||
getPeerPushedClock(
|
||||
id: string,
|
||||
peer: string,
|
||||
docId: string
|
||||
): Promise<DocClock>;
|
||||
setPeerPushedClock(
|
||||
id: string,
|
||||
peer: string,
|
||||
docId: string,
|
||||
clock: Date
|
||||
): Promise<void>;
|
||||
clearClocks(id: string): Promise<void>;
|
||||
};
|
||||
|
||||
type NativeDBApisWrapper = NativeDBApis extends infer APIs
|
||||
? {
|
||||
[K in keyof APIs]: APIs[K] extends (...args: any[]) => any
|
||||
? Parameters<APIs[K]> extends [string, ...infer Rest]
|
||||
@@ -13,49 +85,56 @@ type NativeDBApis = NonNullable<typeof apis>['nbstore'] extends infer APIs
|
||||
}
|
||||
: never;
|
||||
|
||||
export class NativeDBConnection extends AutoReconnectConnection<void> {
|
||||
readonly apis: NativeDBApis;
|
||||
let apis: NativeDBApis | null = null;
|
||||
|
||||
constructor(
|
||||
private readonly peer: string,
|
||||
private readonly type: SpaceType,
|
||||
private readonly id: string
|
||||
) {
|
||||
export function bindNativeDBApis(a: NativeDBApis) {
|
||||
apis = a;
|
||||
}
|
||||
|
||||
export class NativeDBConnection extends AutoReconnectConnection<void> {
|
||||
readonly apis: NativeDBApisWrapper;
|
||||
|
||||
readonly flavour = this.options.flavour;
|
||||
readonly type = this.options.type;
|
||||
readonly id = this.options.id;
|
||||
|
||||
constructor(private readonly options: SqliteNativeDBOptions) {
|
||||
super();
|
||||
|
||||
if (!apis) {
|
||||
throw new Error('Not in electron context.');
|
||||
throw new Error('Not in native context.');
|
||||
}
|
||||
|
||||
this.apis = this.bindApis(apis.nbstore);
|
||||
this.apis = this.warpApis(apis);
|
||||
}
|
||||
|
||||
override get shareId(): string {
|
||||
return `sqlite:${this.peer}:${this.type}:${this.id}`;
|
||||
return `sqlite:${this.flavour}:${this.type}:${this.id}`;
|
||||
}
|
||||
|
||||
bindApis(originalApis: NonNullable<typeof apis>['nbstore']): NativeDBApis {
|
||||
warpApis(originalApis: NativeDBApis): NativeDBApisWrapper {
|
||||
const id = universalId({
|
||||
peer: this.peer,
|
||||
peer: this.flavour,
|
||||
type: this.type,
|
||||
id: this.id,
|
||||
});
|
||||
return new Proxy(originalApis, {
|
||||
get: (target, key: keyof NativeDBApis) => {
|
||||
const v = target[key];
|
||||
if (typeof v !== 'function') {
|
||||
return v;
|
||||
}
|
||||
return new Proxy(
|
||||
{},
|
||||
{
|
||||
get: (_target, key: keyof NativeDBApisWrapper) => {
|
||||
const v = originalApis[key];
|
||||
|
||||
return async (...args: any[]) => {
|
||||
return v.call(
|
||||
originalApis,
|
||||
id,
|
||||
// @ts-expect-error I don't know why it complains ts(2556)
|
||||
...args
|
||||
);
|
||||
};
|
||||
},
|
||||
}) as unknown as NativeDBApis;
|
||||
return async (...args: any[]) => {
|
||||
return v.call(
|
||||
originalApis,
|
||||
id,
|
||||
// @ts-expect-error I don't know why it complains ts(2556)
|
||||
...args
|
||||
);
|
||||
};
|
||||
},
|
||||
}
|
||||
) as unknown as NativeDBApisWrapper;
|
||||
}
|
||||
|
||||
override async doConnect() {
|
||||
@@ -63,7 +142,7 @@ export class NativeDBConnection extends AutoReconnectConnection<void> {
|
||||
}
|
||||
|
||||
override doDisconnect() {
|
||||
this.apis.close().catch(err => {
|
||||
this.apis.disconnect().catch(err => {
|
||||
console.error('NativeDBConnection close failed', err);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,54 +1,82 @@
|
||||
import { share } from '../../connection';
|
||||
import { type DocClock, DocStorageBase, type DocUpdate } from '../../storage';
|
||||
import { NativeDBConnection } from './db';
|
||||
import {
|
||||
type DocClocks,
|
||||
type DocRecord,
|
||||
DocStorageBase,
|
||||
type DocUpdate,
|
||||
} from '../../storage';
|
||||
import { NativeDBConnection, type SqliteNativeDBOptions } from './db';
|
||||
|
||||
export class SqliteDocStorage extends DocStorageBase {
|
||||
override connection = share(
|
||||
new NativeDBConnection(this.peer, this.spaceType, this.spaceId)
|
||||
);
|
||||
export class SqliteDocStorage extends DocStorageBase<SqliteNativeDBOptions> {
|
||||
static readonly identifier = 'SqliteDocStorage';
|
||||
override connection = share(new NativeDBConnection(this.options));
|
||||
|
||||
get db() {
|
||||
return this.connection.apis;
|
||||
}
|
||||
|
||||
override async getDoc(docId: string) {
|
||||
return this.db.getDoc(docId);
|
||||
}
|
||||
|
||||
override async pushDocUpdate(update: DocUpdate) {
|
||||
return this.db.pushDocUpdate(update);
|
||||
const timestamp = await this.db.pushUpdate(update.docId, update.bin);
|
||||
|
||||
this.emit(
|
||||
'update',
|
||||
{
|
||||
docId: update.docId,
|
||||
bin: update.bin,
|
||||
timestamp,
|
||||
editor: update.editor,
|
||||
},
|
||||
origin
|
||||
);
|
||||
|
||||
return { docId: update.docId, timestamp };
|
||||
}
|
||||
|
||||
override async deleteDoc(docId: string) {
|
||||
return this.db.deleteDoc(docId);
|
||||
await this.db.deleteDoc(docId);
|
||||
}
|
||||
|
||||
override async getDocTimestamps(after?: Date) {
|
||||
return this.db.getDocTimestamps(after ? new Date(after) : undefined);
|
||||
const clocks = await this.db.getDocClocks(after);
|
||||
|
||||
return clocks.reduce((ret, cur) => {
|
||||
ret[cur.docId] = cur.timestamp;
|
||||
return ret;
|
||||
}, {} as DocClocks);
|
||||
}
|
||||
|
||||
override getDocTimestamp(docId: string): Promise<DocClock | null> {
|
||||
return this.db.getDocTimestamp(docId);
|
||||
override async getDocTimestamp(docId: string) {
|
||||
return this.db.getDocClock(docId);
|
||||
}
|
||||
|
||||
protected override async getDocSnapshot() {
|
||||
// handled in db
|
||||
// see electron/src/helper/nbstore/doc.ts
|
||||
return null;
|
||||
protected override async getDocSnapshot(docId: string) {
|
||||
const snapshot = await this.db.getDocSnapshot(docId);
|
||||
|
||||
if (!snapshot) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return snapshot;
|
||||
}
|
||||
|
||||
protected override async setDocSnapshot(): Promise<boolean> {
|
||||
// handled in db
|
||||
return true;
|
||||
protected override async setDocSnapshot(
|
||||
snapshot: DocRecord
|
||||
): Promise<boolean> {
|
||||
return this.db.setDocSnapshot({
|
||||
docId: snapshot.docId,
|
||||
bin: snapshot.bin,
|
||||
timestamp: snapshot.timestamp,
|
||||
});
|
||||
}
|
||||
|
||||
protected override async getDocUpdates() {
|
||||
// handled in db
|
||||
return [];
|
||||
protected override async getDocUpdates(docId: string) {
|
||||
return this.db.getDocUpdates(docId);
|
||||
}
|
||||
|
||||
protected override markUpdatesMerged() {
|
||||
// handled in db
|
||||
return Promise.resolve(0);
|
||||
protected override markUpdatesMerged(docId: string, updates: DocRecord[]) {
|
||||
return this.db.markUpdatesMerged(
|
||||
docId,
|
||||
updates.map(update => update.timestamp)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,16 @@
|
||||
import type { StorageConstructor } from '..';
|
||||
import { SqliteBlobStorage } from './blob';
|
||||
import { SqliteDocStorage } from './doc';
|
||||
import { SqliteSyncStorage } from './sync';
|
||||
|
||||
export * from './blob';
|
||||
export { bindNativeDBApis, type NativeDBApis } from './db';
|
||||
export * from './doc';
|
||||
export * from './sync';
|
||||
export * from './v1';
|
||||
|
||||
export const sqliteStorages = [
|
||||
SqliteDocStorage,
|
||||
SqliteBlobStorage,
|
||||
SqliteSyncStorage,
|
||||
] satisfies StorageConstructor[];
|
||||
|
||||
@@ -1,18 +1,26 @@
|
||||
import { share } from '../../connection';
|
||||
import { BasicSyncStorage, type DocClock } from '../../storage';
|
||||
import { NativeDBConnection } from './db';
|
||||
import { type DocClock, SyncStorageBase } from '../../storage';
|
||||
import { NativeDBConnection, type SqliteNativeDBOptions } from './db';
|
||||
|
||||
export class SqliteSyncStorage extends BasicSyncStorage {
|
||||
override connection = share(
|
||||
new NativeDBConnection(this.peer, this.spaceType, this.spaceId)
|
||||
);
|
||||
export class SqliteSyncStorage extends SyncStorageBase {
|
||||
static readonly identifier = 'SqliteSyncStorage';
|
||||
|
||||
override connection = share(new NativeDBConnection(this.options));
|
||||
|
||||
constructor(private readonly options: SqliteNativeDBOptions) {
|
||||
super();
|
||||
}
|
||||
|
||||
get db() {
|
||||
return this.connection.apis;
|
||||
}
|
||||
|
||||
override async getPeerRemoteClocks(peer: string) {
|
||||
return this.db.getPeerRemoteClocks(peer);
|
||||
return this.db
|
||||
.getPeerRemoteClocks(peer)
|
||||
.then(clocks =>
|
||||
Object.fromEntries(clocks.map(clock => [clock.docId, clock.timestamp]))
|
||||
);
|
||||
}
|
||||
|
||||
override async getPeerRemoteClock(peer: string, docId: string) {
|
||||
@@ -20,11 +28,15 @@ export class SqliteSyncStorage extends BasicSyncStorage {
|
||||
}
|
||||
|
||||
override async setPeerRemoteClock(peer: string, clock: DocClock) {
|
||||
await this.db.setPeerRemoteClock(peer, clock);
|
||||
await this.db.setPeerRemoteClock(peer, clock.docId, clock.timestamp);
|
||||
}
|
||||
|
||||
override async getPeerPulledRemoteClocks(peer: string) {
|
||||
return this.db.getPeerPulledRemoteClocks(peer);
|
||||
return this.db
|
||||
.getPeerPulledRemoteClocks(peer)
|
||||
.then(clocks =>
|
||||
Object.fromEntries(clocks.map(clock => [clock.docId, clock.timestamp]))
|
||||
);
|
||||
}
|
||||
|
||||
override async getPeerPulledRemoteClock(peer: string, docId: string) {
|
||||
@@ -32,11 +44,15 @@ export class SqliteSyncStorage extends BasicSyncStorage {
|
||||
}
|
||||
|
||||
override async setPeerPulledRemoteClock(peer: string, clock: DocClock) {
|
||||
await this.db.setPeerPulledRemoteClock(peer, clock);
|
||||
await this.db.setPeerPulledRemoteClock(peer, clock.docId, clock.timestamp);
|
||||
}
|
||||
|
||||
override async getPeerPushedClocks(peer: string) {
|
||||
return this.db.getPeerPushedClocks(peer);
|
||||
return this.db
|
||||
.getPeerPushedClocks(peer)
|
||||
.then(clocks =>
|
||||
Object.fromEntries(clocks.map(clock => [clock.docId, clock.timestamp]))
|
||||
);
|
||||
}
|
||||
|
||||
override async getPeerPushedClock(peer: string, docId: string) {
|
||||
@@ -44,7 +60,7 @@ export class SqliteSyncStorage extends BasicSyncStorage {
|
||||
}
|
||||
|
||||
override async setPeerPushedClock(peer: string, clock: DocClock) {
|
||||
await this.db.setPeerPushedClock(peer, clock);
|
||||
await this.db.setPeerPushedClock(peer, clock.docId, clock.timestamp);
|
||||
}
|
||||
|
||||
override async clearClocks() {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { apis } from '@affine/electron-api';
|
||||
|
||||
import { DummyConnection } from '../../../connection';
|
||||
import { BlobStorageBase } from '../../../storage';
|
||||
import type { SpaceType } from '../../../utils/universal-id';
|
||||
import { apis } from './db';
|
||||
|
||||
/**
|
||||
* @deprecated readonly
|
||||
@@ -9,18 +9,22 @@ import { BlobStorageBase } from '../../../storage';
|
||||
export class SqliteV1BlobStorage extends BlobStorageBase {
|
||||
override connection = new DummyConnection();
|
||||
|
||||
get db() {
|
||||
constructor(private readonly options: { type: SpaceType; id: string }) {
|
||||
super();
|
||||
}
|
||||
|
||||
private get db() {
|
||||
if (!apis) {
|
||||
throw new Error('Not in electron context.');
|
||||
}
|
||||
|
||||
return apis.db;
|
||||
return apis;
|
||||
}
|
||||
|
||||
override async get(key: string) {
|
||||
const data: Uint8Array | null = await this.db.getBlob(
|
||||
this.spaceType,
|
||||
this.spaceId,
|
||||
this.options.type,
|
||||
this.options.id,
|
||||
key
|
||||
);
|
||||
|
||||
@@ -38,12 +42,12 @@ export class SqliteV1BlobStorage extends BlobStorageBase {
|
||||
|
||||
override async delete(key: string, permanently: boolean) {
|
||||
if (permanently) {
|
||||
await this.db.deleteBlob(this.spaceType, this.spaceId, key);
|
||||
await this.db.deleteBlob(this.options.type, this.options.id, key);
|
||||
}
|
||||
}
|
||||
|
||||
override async list() {
|
||||
const keys = await this.db.getBlobKeys(this.spaceType, this.spaceId);
|
||||
const keys = await this.db.getBlobKeys(this.options.type, this.options.id);
|
||||
|
||||
return keys.map(key => ({
|
||||
key,
|
||||
|
||||
26
packages/common/nbstore/src/impls/sqlite/v1/db.ts
Normal file
26
packages/common/nbstore/src/impls/sqlite/v1/db.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import type { SpaceType } from '../../../utils/universal-id';
|
||||
|
||||
interface NativeDBV1Apis {
|
||||
getBlob: (
|
||||
spaceType: SpaceType,
|
||||
workspaceId: string,
|
||||
key: string
|
||||
) => Promise<Buffer | null>;
|
||||
deleteBlob: (
|
||||
spaceType: SpaceType,
|
||||
workspaceId: string,
|
||||
key: string
|
||||
) => Promise<void>;
|
||||
getBlobKeys: (spaceType: SpaceType, workspaceId: string) => Promise<string[]>;
|
||||
getDocAsUpdates: (
|
||||
spaceType: SpaceType,
|
||||
workspaceId: string,
|
||||
subdocId: string
|
||||
) => Promise<Uint8Array>;
|
||||
}
|
||||
|
||||
export let apis: NativeDBV1Apis | null = null;
|
||||
|
||||
export function bindNativeDBV1Apis(a: NativeDBV1Apis) {
|
||||
apis = a;
|
||||
}
|
||||
@@ -1,24 +1,27 @@
|
||||
import { apis } from '@affine/electron-api';
|
||||
|
||||
import { DummyConnection } from '../../../connection';
|
||||
import {
|
||||
type DocRecord,
|
||||
DocStorageBase,
|
||||
type DocUpdate,
|
||||
} from '../../../storage';
|
||||
import type { SpaceType } from '../../../utils/universal-id';
|
||||
import { apis } from './db';
|
||||
|
||||
/**
|
||||
* @deprecated readonly
|
||||
*/
|
||||
export class SqliteV1DocStorage extends DocStorageBase {
|
||||
export class SqliteV1DocStorage extends DocStorageBase<{
|
||||
type: SpaceType;
|
||||
id: string;
|
||||
}> {
|
||||
override connection = new DummyConnection();
|
||||
|
||||
get db() {
|
||||
private get db() {
|
||||
if (!apis) {
|
||||
throw new Error('Not in electron context.');
|
||||
}
|
||||
|
||||
return apis.db;
|
||||
return apis;
|
||||
}
|
||||
|
||||
override async pushDocUpdate(update: DocUpdate) {
|
||||
@@ -29,8 +32,8 @@ export class SqliteV1DocStorage extends DocStorageBase {
|
||||
|
||||
override async getDoc(docId: string) {
|
||||
const bin = await this.db.getDocAsUpdates(
|
||||
this.spaceType,
|
||||
this.spaceId,
|
||||
this.options.type,
|
||||
this.options.id,
|
||||
docId
|
||||
);
|
||||
|
||||
@@ -41,8 +44,8 @@ export class SqliteV1DocStorage extends DocStorageBase {
|
||||
};
|
||||
}
|
||||
|
||||
override async deleteDoc(docId: string) {
|
||||
await this.db.deleteDoc(this.spaceType, this.spaceId, docId);
|
||||
override async deleteDoc() {
|
||||
return;
|
||||
}
|
||||
|
||||
protected override async getDocSnapshot() {
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
export * from './blob';
|
||||
export { bindNativeDBV1Apis } from './db';
|
||||
export * from './doc';
|
||||
|
||||
@@ -1,2 +1,5 @@
|
||||
export * from './connection';
|
||||
export * from './frontend';
|
||||
export * from './storage';
|
||||
export * from './sync';
|
||||
export * from './utils/universal-id';
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { type Storage, StorageBase, type StorageOptions } from './storage';
|
||||
|
||||
export interface AwarenessStorageOptions extends StorageOptions {}
|
||||
import type { Connection } from '../connection';
|
||||
import { type Storage } from './storage';
|
||||
|
||||
export type AwarenessRecord = {
|
||||
docId: string;
|
||||
@@ -23,13 +22,9 @@ export interface AwarenessStorage extends Storage {
|
||||
): () => void;
|
||||
}
|
||||
|
||||
export abstract class AwarenessStorageBase<
|
||||
Options extends AwarenessStorageOptions = AwarenessStorageOptions,
|
||||
>
|
||||
extends StorageBase<Options>
|
||||
implements AwarenessStorage
|
||||
{
|
||||
override readonly storageType = 'awareness';
|
||||
export abstract class AwarenessStorageBase implements AwarenessStorage {
|
||||
readonly storageType = 'awareness';
|
||||
abstract readonly connection: Connection;
|
||||
|
||||
abstract update(record: AwarenessRecord, origin?: string): Promise<void>;
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { type Storage, StorageBase, type StorageOptions } from './storage';
|
||||
|
||||
export interface BlobStorageOptions extends StorageOptions {}
|
||||
import type { Connection } from '../connection';
|
||||
import { type Storage } from './storage';
|
||||
|
||||
export interface BlobRecord {
|
||||
key: string;
|
||||
@@ -29,13 +28,9 @@ export interface BlobStorage extends Storage {
|
||||
list(signal?: AbortSignal): Promise<ListedBlobRecord[]>;
|
||||
}
|
||||
|
||||
export abstract class BlobStorageBase<
|
||||
Options extends BlobStorageOptions = BlobStorageOptions,
|
||||
>
|
||||
extends StorageBase<Options>
|
||||
implements BlobStorage
|
||||
{
|
||||
override readonly storageType = 'blob';
|
||||
export abstract class BlobStorageBase implements BlobStorage {
|
||||
readonly storageType = 'blob';
|
||||
abstract readonly connection: Connection;
|
||||
|
||||
abstract get(key: string, signal?: AbortSignal): Promise<BlobRecord | null>;
|
||||
abstract set(blob: BlobRecord, signal?: AbortSignal): Promise<void>;
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import EventEmitter2 from 'eventemitter2';
|
||||
import { diffUpdate, encodeStateVectorFromUpdate, mergeUpdates } from 'yjs';
|
||||
|
||||
import type { Connection } from '../connection';
|
||||
import { isEmptyUpdate } from '../utils/is-empty-update';
|
||||
import type { Locker } from './lock';
|
||||
import { SingletonLocker } from './lock';
|
||||
import { type Storage, StorageBase, type StorageOptions } from './storage';
|
||||
import { type Storage } from './storage';
|
||||
|
||||
export interface DocClock {
|
||||
docId: string;
|
||||
@@ -33,13 +34,19 @@ export interface Editor {
|
||||
avatarUrl: string | null;
|
||||
}
|
||||
|
||||
export interface DocStorageOptions extends StorageOptions {
|
||||
export interface DocStorageOptions {
|
||||
mergeUpdates?: (updates: Uint8Array[]) => Promise<Uint8Array> | Uint8Array;
|
||||
id: string;
|
||||
|
||||
/**
|
||||
* open as readonly mode.
|
||||
*/
|
||||
readonlyMode?: boolean;
|
||||
}
|
||||
|
||||
export interface DocStorage extends Storage {
|
||||
readonly storageType: 'doc';
|
||||
|
||||
readonly isReadonly: boolean;
|
||||
/**
|
||||
* Get a doc record with latest binary.
|
||||
*/
|
||||
@@ -88,18 +95,22 @@ export interface DocStorage extends Storage {
|
||||
): () => void;
|
||||
}
|
||||
|
||||
export abstract class DocStorageBase<
|
||||
Opts extends DocStorageOptions = DocStorageOptions,
|
||||
>
|
||||
extends StorageBase<Opts>
|
||||
implements DocStorage
|
||||
{
|
||||
export abstract class DocStorageBase<Opts = {}> implements DocStorage {
|
||||
get isReadonly(): boolean {
|
||||
return this.options.readonlyMode ?? false;
|
||||
}
|
||||
private readonly event = new EventEmitter2();
|
||||
override readonly storageType = 'doc';
|
||||
readonly storageType = 'doc';
|
||||
abstract readonly connection: Connection;
|
||||
protected readonly locker: Locker = new SingletonLocker();
|
||||
protected readonly spaceId = this.options.id;
|
||||
|
||||
constructor(protected readonly options: Opts & DocStorageOptions) {}
|
||||
|
||||
async getDoc(docId: string) {
|
||||
await using _lock = await this.lockDocForUpdate(docId);
|
||||
await using _lock = this.isReadonly
|
||||
? undefined
|
||||
: await this.lockDocForUpdate(docId);
|
||||
|
||||
const snapshot = await this.getDocSnapshot(docId);
|
||||
const updates = await this.getDocUpdates(docId);
|
||||
@@ -117,10 +128,13 @@ export abstract class DocStorageBase<
|
||||
editor,
|
||||
};
|
||||
|
||||
await this.setDocSnapshot(newSnapshot, snapshot);
|
||||
// if is readonly, we will not set the new snapshot
|
||||
if (!this.isReadonly) {
|
||||
await this.setDocSnapshot(newSnapshot, snapshot);
|
||||
|
||||
// always mark updates as merged unless throws
|
||||
await this.markUpdatesMerged(docId, updates);
|
||||
// always mark updates as merged unless throws
|
||||
await this.markUpdatesMerged(docId, updates);
|
||||
}
|
||||
|
||||
return newSnapshot;
|
||||
}
|
||||
|
||||
16
packages/common/nbstore/src/storage/dummy/awareness.ts
Normal file
16
packages/common/nbstore/src/storage/dummy/awareness.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { DummyConnection } from '../../connection';
|
||||
import { type AwarenessRecord, AwarenessStorageBase } from '../awareness';
|
||||
|
||||
export class DummyAwarenessStorage extends AwarenessStorageBase {
|
||||
override update(_record: AwarenessRecord, _origin?: string): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
override subscribeUpdate(
|
||||
_id: string,
|
||||
_onUpdate: (update: AwarenessRecord, origin?: string) => void,
|
||||
_onCollect: () => Promise<AwarenessRecord | null>
|
||||
): () => void {
|
||||
return () => {};
|
||||
}
|
||||
override connection = new DummyConnection();
|
||||
}
|
||||
32
packages/common/nbstore/src/storage/dummy/blob.ts
Normal file
32
packages/common/nbstore/src/storage/dummy/blob.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { DummyConnection } from '../../connection';
|
||||
import {
|
||||
type BlobRecord,
|
||||
BlobStorageBase,
|
||||
type ListedBlobRecord,
|
||||
} from '../blob';
|
||||
|
||||
export class DummyBlobStorage extends BlobStorageBase {
|
||||
override get(
|
||||
_key: string,
|
||||
_signal?: AbortSignal
|
||||
): Promise<BlobRecord | null> {
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
override set(_blob: BlobRecord, _signal?: AbortSignal): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
override delete(
|
||||
_key: string,
|
||||
_permanently: boolean,
|
||||
_signal?: AbortSignal
|
||||
): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
override release(_signal?: AbortSignal): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
override list(_signal?: AbortSignal): Promise<ListedBlobRecord[]> {
|
||||
return Promise.resolve([]);
|
||||
}
|
||||
override connection = new DummyConnection();
|
||||
}
|
||||
41
packages/common/nbstore/src/storage/dummy/doc.ts
Normal file
41
packages/common/nbstore/src/storage/dummy/doc.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { DummyConnection } from '../../connection';
|
||||
import {
|
||||
type DocClock,
|
||||
type DocClocks,
|
||||
type DocDiff,
|
||||
type DocRecord,
|
||||
type DocStorage,
|
||||
type DocUpdate,
|
||||
} from '../doc';
|
||||
|
||||
export class DummyDocStorage implements DocStorage {
|
||||
readonly storageType = 'doc';
|
||||
readonly isReadonly = true;
|
||||
getDoc(_docId: string): Promise<DocRecord | null> {
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
getDocDiff(_docId: string, _state?: Uint8Array): Promise<DocDiff | null> {
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
pushDocUpdate(update: DocUpdate, _origin?: string): Promise<DocClock> {
|
||||
return Promise.resolve({
|
||||
docId: update.docId,
|
||||
timestamp: new Date(),
|
||||
});
|
||||
}
|
||||
getDocTimestamp(_docId: string): Promise<DocClock | null> {
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
getDocTimestamps(_after?: Date): Promise<DocClocks> {
|
||||
return Promise.resolve({});
|
||||
}
|
||||
deleteDoc(_docId: string): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
subscribeDocUpdate(
|
||||
_callback: (update: DocRecord, origin?: string) => void
|
||||
): () => void {
|
||||
return () => {};
|
||||
}
|
||||
connection = new DummyConnection();
|
||||
}
|
||||
49
packages/common/nbstore/src/storage/dummy/sync.ts
Normal file
49
packages/common/nbstore/src/storage/dummy/sync.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { DummyConnection } from '../../connection';
|
||||
import type { DocClock, DocClocks } from '../doc';
|
||||
import { SyncStorageBase } from '../sync';
|
||||
|
||||
export class DummySyncStorage extends SyncStorageBase {
|
||||
override getPeerRemoteClock(
|
||||
_peer: string,
|
||||
_docId: string
|
||||
): Promise<DocClock | null> {
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
override getPeerRemoteClocks(_peer: string): Promise<DocClocks> {
|
||||
return Promise.resolve({});
|
||||
}
|
||||
override setPeerRemoteClock(_peer: string, _clock: DocClock): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
override getPeerPulledRemoteClock(
|
||||
_peer: string,
|
||||
_docId: string
|
||||
): Promise<DocClock | null> {
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
override getPeerPulledRemoteClocks(_peer: string): Promise<DocClocks> {
|
||||
return Promise.resolve({});
|
||||
}
|
||||
override setPeerPulledRemoteClock(
|
||||
_peer: string,
|
||||
_clock: DocClock
|
||||
): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
override getPeerPushedClock(
|
||||
_peer: string,
|
||||
_docId: string
|
||||
): Promise<DocClock | null> {
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
override getPeerPushedClocks(_peer: string): Promise<DocClocks> {
|
||||
return Promise.resolve({});
|
||||
}
|
||||
override setPeerPushedClock(_peer: string, _clock: DocClock): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
override clearClocks(): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
override connection = new DummyConnection();
|
||||
}
|
||||
1
packages/common/nbstore/src/storage/errors/index.ts
Normal file
1
packages/common/nbstore/src/storage/errors/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from './over-capacity';
|
||||
@@ -0,0 +1,5 @@
|
||||
export class OverCapacityError extends Error {
|
||||
constructor(public originError?: any) {
|
||||
super('Storage over capacity. Origin error: ' + originError);
|
||||
}
|
||||
}
|
||||
@@ -3,56 +3,60 @@ import EventEmitter2 from 'eventemitter2';
|
||||
import type { AwarenessStorage } from './awareness';
|
||||
import type { BlobStorage } from './blob';
|
||||
import type { DocStorage } from './doc';
|
||||
import type { Storage, StorageType } from './storage';
|
||||
import { DummyAwarenessStorage } from './dummy/awareness';
|
||||
import { DummyBlobStorage } from './dummy/blob';
|
||||
import { DummyDocStorage } from './dummy/doc';
|
||||
import { DummySyncStorage } from './dummy/sync';
|
||||
import type { StorageType } from './storage';
|
||||
import type { SyncStorage } from './sync';
|
||||
|
||||
type Storages = DocStorage | BlobStorage | SyncStorage | AwarenessStorage;
|
||||
|
||||
export type SpaceStorageOptions = {
|
||||
[K in StorageType]?: Storages & { storageType: K };
|
||||
};
|
||||
|
||||
export class SpaceStorage {
|
||||
protected readonly storages: Map<StorageType, Storage> = new Map();
|
||||
protected readonly storages: {
|
||||
[K in StorageType]: Storages & { storageType: K };
|
||||
};
|
||||
private readonly event = new EventEmitter2();
|
||||
private readonly disposables: Set<() => void> = new Set();
|
||||
|
||||
constructor(storages: Storage[] = []) {
|
||||
this.storages = new Map(
|
||||
storages.map(storage => [storage.storageType, storage])
|
||||
);
|
||||
}
|
||||
|
||||
tryGet<T extends StorageType>(
|
||||
type: T
|
||||
): Extract<Storages, { storageType: T }> | undefined {
|
||||
return this.storages.get(type) as unknown as Extract<
|
||||
Storages,
|
||||
{ storageType: T }
|
||||
>;
|
||||
constructor(storages: SpaceStorageOptions) {
|
||||
this.storages = {
|
||||
awareness: storages.awareness ?? new DummyAwarenessStorage(),
|
||||
blob: storages.blob ?? new DummyBlobStorage(),
|
||||
doc: storages.doc ?? new DummyDocStorage(),
|
||||
sync: storages.sync ?? new DummySyncStorage(),
|
||||
};
|
||||
}
|
||||
|
||||
get<T extends StorageType>(type: T): Extract<Storages, { storageType: T }> {
|
||||
const storage = this.tryGet(type);
|
||||
const storage = this.storages[type];
|
||||
|
||||
if (!storage) {
|
||||
throw new Error(`Storage ${type} not registered.`);
|
||||
}
|
||||
|
||||
return storage as Extract<Storages, { storageType: T }>;
|
||||
return storage as unknown as Extract<Storages, { storageType: T }>;
|
||||
}
|
||||
|
||||
connect() {
|
||||
Array.from(this.storages.values()).forEach(storage => {
|
||||
Object.values(this.storages).forEach(storage => {
|
||||
storage.connection.connect();
|
||||
});
|
||||
}
|
||||
|
||||
disconnect() {
|
||||
Array.from(this.storages.values()).forEach(storage => {
|
||||
Object.values(this.storages).forEach(storage => {
|
||||
storage.connection.disconnect();
|
||||
});
|
||||
}
|
||||
|
||||
async waitForConnected(signal?: AbortSignal) {
|
||||
await Promise.all(
|
||||
Array.from(this.storages.values()).map(storage =>
|
||||
Object.values(this.storages).map(storage =>
|
||||
storage.connection.waitForConnected(signal)
|
||||
)
|
||||
);
|
||||
@@ -61,13 +65,13 @@ export class SpaceStorage {
|
||||
async destroy() {
|
||||
this.disposables.forEach(disposable => disposable());
|
||||
this.event.removeAllListeners();
|
||||
this.storages.clear();
|
||||
}
|
||||
}
|
||||
|
||||
export * from './awareness';
|
||||
export * from './blob';
|
||||
export * from './doc';
|
||||
export * from './errors';
|
||||
export * from './history';
|
||||
export * from './storage';
|
||||
export * from './sync';
|
||||
|
||||
@@ -1,120 +1,8 @@
|
||||
import type { Connection } from '../connection';
|
||||
|
||||
export type SpaceType = 'workspace' | 'userspace';
|
||||
export type StorageType = 'blob' | 'doc' | 'sync' | 'awareness';
|
||||
|
||||
export interface StorageOptions {
|
||||
peer: string;
|
||||
type: SpaceType;
|
||||
id: string;
|
||||
}
|
||||
|
||||
export function universalId({ peer, type, id }: StorageOptions) {
|
||||
return `@peer(${peer});@type(${type});@id(${id});`;
|
||||
}
|
||||
|
||||
export function isValidSpaceType(type: string): type is SpaceType {
|
||||
return type === 'workspace' || type === 'userspace';
|
||||
}
|
||||
|
||||
export function isValidUniversalId(opts: Record<string, string>): boolean {
|
||||
const requiredKeys: Array<keyof StorageOptions> = [
|
||||
'peer',
|
||||
'type',
|
||||
'id',
|
||||
] as const;
|
||||
|
||||
for (const key of requiredKeys) {
|
||||
if (!opts[key]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return isValidSpaceType(opts.type);
|
||||
}
|
||||
|
||||
export function parseUniversalId(id: string) {
|
||||
const result: Partial<StorageOptions> = {};
|
||||
let key = '';
|
||||
let value = '';
|
||||
let isInValue = false;
|
||||
|
||||
let i = -1;
|
||||
|
||||
while (++i < id.length) {
|
||||
const ch = id[i];
|
||||
const nextCh = id[i + 1];
|
||||
|
||||
// when we are in value string, we only care about ch and next char to be [')', ';'] to end the id part
|
||||
if (isInValue) {
|
||||
if (ch === ')' && nextCh === ';') {
|
||||
// @ts-expect-error we know the key is valid
|
||||
result[key] = value;
|
||||
key = '';
|
||||
value = '';
|
||||
isInValue = false;
|
||||
i++;
|
||||
continue;
|
||||
}
|
||||
|
||||
value += ch;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ch === '@') {
|
||||
const keyEnd = id.indexOf('(', i);
|
||||
// we find '@' but no '(' in lookahead or '(' is immediately after '@', invalid id
|
||||
if (keyEnd === -1 || keyEnd === i + 1) {
|
||||
break;
|
||||
}
|
||||
|
||||
key = id.slice(i + 1, keyEnd);
|
||||
i = keyEnd;
|
||||
isInValue = true;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!isValidUniversalId(result)) {
|
||||
throw new Error(
|
||||
`Invalid universal storage id: ${id}. It should be in format of @peer(\${peer});@type(\${type});@id(\${id});`
|
||||
);
|
||||
}
|
||||
|
||||
return result as StorageOptions;
|
||||
}
|
||||
|
||||
export interface Storage {
|
||||
readonly storageType: StorageType;
|
||||
readonly connection: Connection;
|
||||
readonly peer: string;
|
||||
readonly spaceType: string;
|
||||
readonly spaceId: string;
|
||||
readonly universalId: string;
|
||||
}
|
||||
|
||||
export abstract class StorageBase<Opts extends StorageOptions = StorageOptions>
|
||||
implements Storage
|
||||
{
|
||||
abstract readonly storageType: StorageType;
|
||||
abstract readonly connection: Connection;
|
||||
|
||||
get peer() {
|
||||
return this.options.peer;
|
||||
}
|
||||
|
||||
get spaceType() {
|
||||
return this.options.type;
|
||||
}
|
||||
|
||||
get spaceId() {
|
||||
return this.options.id;
|
||||
}
|
||||
|
||||
get universalId() {
|
||||
return universalId(this.options);
|
||||
}
|
||||
|
||||
constructor(public readonly options: Opts) {}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import type { Connection } from '../connection';
|
||||
import type { DocClock, DocClocks } from './doc';
|
||||
import { type Storage, StorageBase, type StorageOptions } from './storage';
|
||||
|
||||
export interface SyncStorageOptions extends StorageOptions {}
|
||||
import { type Storage } from './storage';
|
||||
|
||||
export interface SyncStorage extends Storage {
|
||||
readonly storageType: 'sync';
|
||||
@@ -21,13 +20,9 @@ export interface SyncStorage extends Storage {
|
||||
clearClocks(): Promise<void>;
|
||||
}
|
||||
|
||||
export abstract class BasicSyncStorage<
|
||||
Opts extends SyncStorageOptions = SyncStorageOptions,
|
||||
>
|
||||
extends StorageBase<Opts>
|
||||
implements SyncStorage
|
||||
{
|
||||
override readonly storageType = 'sync';
|
||||
export abstract class SyncStorageBase implements SyncStorage {
|
||||
readonly storageType = 'sync';
|
||||
abstract readonly connection: Connection;
|
||||
|
||||
abstract getPeerRemoteClock(
|
||||
peer: string,
|
||||
|
||||
@@ -2,6 +2,7 @@ import type {
|
||||
AwarenessRecord,
|
||||
AwarenessStorage,
|
||||
} from '../../storage/awareness';
|
||||
import type { PeerStorageOptions } from '../types';
|
||||
|
||||
export interface AwarenessSync {
|
||||
update(record: AwarenessRecord, origin?: string): Promise<void>;
|
||||
@@ -13,14 +14,13 @@ export interface AwarenessSync {
|
||||
}
|
||||
|
||||
export class AwarenessSyncImpl implements AwarenessSync {
|
||||
constructor(
|
||||
readonly local: AwarenessStorage,
|
||||
readonly remotes: AwarenessStorage[]
|
||||
) {}
|
||||
constructor(readonly storages: PeerStorageOptions<AwarenessStorage>) {}
|
||||
|
||||
async update(record: AwarenessRecord, origin?: string) {
|
||||
await Promise.all(
|
||||
[this.local, ...this.remotes].map(peer => peer.update(record, origin))
|
||||
[this.storages.local, ...Object.values(this.storages.remotes)].map(peer =>
|
||||
peer.update(record, origin)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -29,9 +29,10 @@ export class AwarenessSyncImpl implements AwarenessSync {
|
||||
onUpdate: (update: AwarenessRecord, origin?: string) => void,
|
||||
onCollect: () => Promise<AwarenessRecord | null>
|
||||
): () => void {
|
||||
const unsubscribes = [this.local, ...this.remotes].map(peer =>
|
||||
peer.subscribeUpdate(id, onUpdate, onCollect)
|
||||
);
|
||||
const unsubscribes = [
|
||||
this.storages.local,
|
||||
...Object.values(this.storages.remotes),
|
||||
].map(peer => peer.subscribeUpdate(id, onUpdate, onCollect));
|
||||
return () => {
|
||||
unsubscribes.forEach(unsubscribe => unsubscribe());
|
||||
};
|
||||
|
||||
@@ -1,34 +1,48 @@
|
||||
import EventEmitter2 from 'eventemitter2';
|
||||
import { difference } from 'lodash-es';
|
||||
import { BehaviorSubject, type Observable } from 'rxjs';
|
||||
|
||||
import type { BlobRecord, BlobStorage } from '../../storage';
|
||||
import { OverCapacityError } from '../../storage';
|
||||
import { MANUALLY_STOP, throwIfAborted } from '../../utils/throw-if-aborted';
|
||||
import type { PeerStorageOptions } from '../types';
|
||||
|
||||
export interface BlobSyncState {
|
||||
isStorageOverCapacity: boolean;
|
||||
}
|
||||
|
||||
export interface BlobSync {
|
||||
readonly state$: Observable<BlobSyncState>;
|
||||
downloadBlob(
|
||||
blobId: string,
|
||||
signal?: AbortSignal
|
||||
): Promise<BlobRecord | null>;
|
||||
uploadBlob(blob: BlobRecord, signal?: AbortSignal): Promise<void>;
|
||||
fullSync(signal?: AbortSignal): Promise<void>;
|
||||
setMaxBlobSize(size: number): void;
|
||||
onReachedMaxBlobSize(cb: (byteSize: number) => void): () => void;
|
||||
}
|
||||
|
||||
export class BlobSyncImpl implements BlobSync {
|
||||
readonly state$ = new BehaviorSubject<BlobSyncState>({
|
||||
isStorageOverCapacity: false,
|
||||
});
|
||||
private abort: AbortController | null = null;
|
||||
private maxBlobSize: number = 1024 * 1024 * 100; // 100MB
|
||||
readonly event = new EventEmitter2();
|
||||
|
||||
constructor(
|
||||
readonly local: BlobStorage,
|
||||
readonly remotes: BlobStorage[]
|
||||
) {}
|
||||
constructor(readonly storages: PeerStorageOptions<BlobStorage>) {}
|
||||
|
||||
async downloadBlob(blobId: string, signal?: AbortSignal) {
|
||||
const localBlob = await this.local.get(blobId, signal);
|
||||
const localBlob = await this.storages.local.get(blobId, signal);
|
||||
if (localBlob) {
|
||||
return localBlob;
|
||||
}
|
||||
|
||||
for (const storage of this.remotes) {
|
||||
for (const storage of Object.values(this.storages.remotes)) {
|
||||
const data = await storage.get(blobId, signal);
|
||||
if (data) {
|
||||
await this.local.set(data, signal);
|
||||
await this.storages.local.set(data, signal);
|
||||
return data;
|
||||
}
|
||||
}
|
||||
@@ -36,21 +50,35 @@ export class BlobSyncImpl implements BlobSync {
|
||||
}
|
||||
|
||||
async uploadBlob(blob: BlobRecord, signal?: AbortSignal) {
|
||||
await this.local.set(blob);
|
||||
if (blob.data.length > this.maxBlobSize) {
|
||||
this.event.emit('abort-large-blob', blob.data.length);
|
||||
console.error('blob over limit, abort set');
|
||||
}
|
||||
|
||||
await this.storages.local.set(blob);
|
||||
await Promise.allSettled(
|
||||
this.remotes.map(remote => remote.set(blob, signal))
|
||||
Object.values(this.storages.remotes).map(async remote => {
|
||||
try {
|
||||
return await remote.set(blob, signal);
|
||||
} catch (err) {
|
||||
if (err instanceof OverCapacityError) {
|
||||
this.state$.next({ isStorageOverCapacity: true });
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
private async sync(signal?: AbortSignal) {
|
||||
async fullSync(signal?: AbortSignal) {
|
||||
throwIfAborted(signal);
|
||||
|
||||
for (const remote of this.remotes) {
|
||||
for (const [remotePeer, remote] of Object.entries(this.storages.remotes)) {
|
||||
let localList: string[] = [];
|
||||
let remoteList: string[] = [];
|
||||
|
||||
try {
|
||||
localList = (await this.local.list(signal)).map(b => b.key);
|
||||
localList = (await this.storages.local.list(signal)).map(b => b.key);
|
||||
throwIfAborted(signal);
|
||||
remoteList = (await remote.list(signal)).map(b => b.key);
|
||||
throwIfAborted(signal);
|
||||
@@ -65,7 +93,7 @@ export class BlobSyncImpl implements BlobSync {
|
||||
const needUpload = difference(localList, remoteList);
|
||||
for (const key of needUpload) {
|
||||
try {
|
||||
const data = await this.local.get(key, signal);
|
||||
const data = await this.storages.local.get(key, signal);
|
||||
throwIfAborted(signal);
|
||||
if (data) {
|
||||
await remote.set(data, signal);
|
||||
@@ -76,7 +104,7 @@ export class BlobSyncImpl implements BlobSync {
|
||||
throw err;
|
||||
}
|
||||
console.error(
|
||||
`error when sync ${key} from [${this.local.peer}] to [${remote.peer}]`,
|
||||
`error when sync ${key} from [local] to [${remotePeer}]`,
|
||||
err
|
||||
);
|
||||
}
|
||||
@@ -89,7 +117,7 @@ export class BlobSyncImpl implements BlobSync {
|
||||
const data = await remote.get(key, signal);
|
||||
throwIfAborted(signal);
|
||||
if (data) {
|
||||
await this.local.set(data, signal);
|
||||
await this.storages.local.set(data, signal);
|
||||
throwIfAborted(signal);
|
||||
}
|
||||
} catch (err) {
|
||||
@@ -97,7 +125,7 @@ export class BlobSyncImpl implements BlobSync {
|
||||
throw err;
|
||||
}
|
||||
console.error(
|
||||
`error when sync ${key} from [${remote.peer}] to [${this.local.peer}]`,
|
||||
`error when sync ${key} from [${remotePeer}] to [local]`,
|
||||
err
|
||||
);
|
||||
}
|
||||
@@ -107,13 +135,13 @@ export class BlobSyncImpl implements BlobSync {
|
||||
|
||||
start() {
|
||||
if (this.abort) {
|
||||
this.abort.abort();
|
||||
this.abort.abort(MANUALLY_STOP);
|
||||
}
|
||||
|
||||
const abort = new AbortController();
|
||||
this.abort = abort;
|
||||
|
||||
this.sync(abort.signal).catch(error => {
|
||||
this.fullSync(abort.signal).catch(error => {
|
||||
if (error === MANUALLY_STOP) {
|
||||
return;
|
||||
}
|
||||
@@ -130,4 +158,15 @@ export class BlobSyncImpl implements BlobSync {
|
||||
// TODO: implement
|
||||
return () => {};
|
||||
}
|
||||
|
||||
setMaxBlobSize(size: number): void {
|
||||
this.maxBlobSize = size;
|
||||
}
|
||||
|
||||
onReachedMaxBlobSize(cb: (byteSize: number) => void): () => void {
|
||||
this.event.on('abort-large-blob', cb);
|
||||
return () => {
|
||||
this.event.off('abort-large-blob', cb);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,17 +1,23 @@
|
||||
import type { Observable } from 'rxjs';
|
||||
import { combineLatest, map } from 'rxjs';
|
||||
import { combineLatest, map, of } from 'rxjs';
|
||||
|
||||
import type { DocStorage, SyncStorage } from '../../storage';
|
||||
import { DummyDocStorage } from '../../storage/dummy/doc';
|
||||
import { DummySyncStorage } from '../../storage/dummy/sync';
|
||||
import { MANUALLY_STOP } from '../../utils/throw-if-aborted';
|
||||
import type { PeerStorageOptions } from '../types';
|
||||
import { DocSyncPeer } from './peer';
|
||||
|
||||
export interface DocSyncState {
|
||||
total: number;
|
||||
syncing: number;
|
||||
synced: boolean;
|
||||
retrying: boolean;
|
||||
errorMessage: string | null;
|
||||
}
|
||||
|
||||
export interface DocSyncDocState {
|
||||
synced: boolean;
|
||||
syncing: boolean;
|
||||
retrying: boolean;
|
||||
errorMessage: string | null;
|
||||
@@ -24,43 +30,70 @@ export interface DocSync {
|
||||
}
|
||||
|
||||
export class DocSyncImpl implements DocSync {
|
||||
private readonly peers: DocSyncPeer[] = this.remotes.map(
|
||||
remote => new DocSyncPeer(this.local, this.sync, remote)
|
||||
private readonly peers: DocSyncPeer[] = Object.entries(
|
||||
this.storages.remotes
|
||||
).map(
|
||||
([peerId, remote]) =>
|
||||
new DocSyncPeer(peerId, this.storages.local, this.sync, remote)
|
||||
);
|
||||
private abort: AbortController | null = null;
|
||||
|
||||
readonly state$: Observable<DocSyncState> = combineLatest(
|
||||
this.peers.map(peer => peer.peerState$)
|
||||
).pipe(
|
||||
map(allPeers => ({
|
||||
total: allPeers.reduce((acc, peer) => acc + peer.total, 0),
|
||||
syncing: allPeers.reduce((acc, peer) => acc + peer.syncing, 0),
|
||||
retrying: allPeers.some(peer => peer.retrying),
|
||||
errorMessage:
|
||||
allPeers.find(peer => peer.errorMessage)?.errorMessage ?? null,
|
||||
}))
|
||||
);
|
||||
|
||||
constructor(
|
||||
readonly local: DocStorage,
|
||||
readonly sync: SyncStorage,
|
||||
readonly remotes: DocStorage[]
|
||||
) {}
|
||||
|
||||
docState$(docId: string): Observable<DocSyncDocState> {
|
||||
return combineLatest(this.peers.map(peer => peer.docState$(docId))).pipe(
|
||||
get state$() {
|
||||
return combineLatest(this.peers.map(peer => peer.peerState$)).pipe(
|
||||
map(allPeers => ({
|
||||
total: allPeers.reduce((acc, peer) => Math.max(acc, peer.total), 0),
|
||||
syncing: allPeers.reduce((acc, peer) => Math.max(acc, peer.syncing), 0),
|
||||
synced: allPeers.every(peer => peer.synced),
|
||||
retrying: allPeers.some(peer => peer.retrying),
|
||||
errorMessage:
|
||||
allPeers.find(peer => peer.errorMessage)?.errorMessage ?? null,
|
||||
retrying: allPeers.some(peer => peer.retrying),
|
||||
syncing: allPeers.some(peer => peer.syncing),
|
||||
}))
|
||||
) as Observable<DocSyncState>;
|
||||
}
|
||||
|
||||
constructor(
|
||||
readonly storages: PeerStorageOptions<DocStorage>,
|
||||
readonly sync: SyncStorage
|
||||
) {}
|
||||
|
||||
/**
|
||||
* for testing
|
||||
*/
|
||||
static get dummy() {
|
||||
return new DocSyncImpl(
|
||||
{
|
||||
local: new DummyDocStorage(),
|
||||
remotes: {},
|
||||
},
|
||||
new DummySyncStorage()
|
||||
);
|
||||
}
|
||||
|
||||
docState$(docId: string): Observable<DocSyncDocState> {
|
||||
if (this.peers.length === 0) {
|
||||
return of({
|
||||
errorMessage: null,
|
||||
retrying: false,
|
||||
syncing: false,
|
||||
synced: true,
|
||||
});
|
||||
}
|
||||
return combineLatest(this.peers.map(peer => peer.docState$(docId))).pipe(
|
||||
map(allPeers => {
|
||||
return {
|
||||
errorMessage:
|
||||
allPeers.find(peer => peer.errorMessage)?.errorMessage ?? null,
|
||||
retrying: allPeers.some(peer => peer.retrying),
|
||||
syncing: allPeers.some(peer => peer.syncing),
|
||||
synced: allPeers.every(peer => peer.synced),
|
||||
};
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
start() {
|
||||
if (this.abort) {
|
||||
this.abort.abort();
|
||||
this.abort.abort(MANUALLY_STOP);
|
||||
}
|
||||
const abort = new AbortController();
|
||||
this.abort = abort;
|
||||
|
||||
@@ -43,6 +43,7 @@ interface Status {
|
||||
remoteClocks: ClockMap;
|
||||
syncing: boolean;
|
||||
retrying: boolean;
|
||||
skipped: boolean;
|
||||
errorMessage: string | null;
|
||||
}
|
||||
|
||||
@@ -50,11 +51,13 @@ interface PeerState {
|
||||
total: number;
|
||||
syncing: number;
|
||||
retrying: boolean;
|
||||
synced: boolean;
|
||||
errorMessage: string | null;
|
||||
}
|
||||
|
||||
interface PeerDocState {
|
||||
syncing: boolean;
|
||||
synced: boolean;
|
||||
retrying: boolean;
|
||||
errorMessage: string | null;
|
||||
}
|
||||
@@ -92,10 +95,11 @@ export class DocSyncPeer {
|
||||
/**
|
||||
* random unique id for recognize self in "update" event
|
||||
*/
|
||||
private readonly uniqueId = `sync:${this.local.universalId}:${this.remote.universalId}:${nanoid()}`;
|
||||
private readonly uniqueId = `sync:${this.peerId}:${nanoid()}`;
|
||||
private readonly prioritySettings = new Map<string, number>();
|
||||
|
||||
constructor(
|
||||
readonly peerId: string,
|
||||
readonly local: DocStorage,
|
||||
readonly syncMetadata: SyncStorage,
|
||||
readonly remote: DocStorage,
|
||||
@@ -110,43 +114,59 @@ export class DocSyncPeer {
|
||||
remoteClocks: new ClockMap(new Map()),
|
||||
syncing: false,
|
||||
retrying: false,
|
||||
skipped: false,
|
||||
errorMessage: null,
|
||||
};
|
||||
private readonly statusUpdatedSubject$ = new Subject<string | true>();
|
||||
|
||||
peerState$ = new Observable<PeerState>(subscribe => {
|
||||
const next = () => {
|
||||
if (!this.status.syncing) {
|
||||
// if syncing = false, jobMap is empty
|
||||
subscribe.next({
|
||||
total: this.status.docs.size,
|
||||
syncing: this.status.docs.size,
|
||||
retrying: this.status.retrying,
|
||||
errorMessage: this.status.errorMessage,
|
||||
});
|
||||
} else {
|
||||
const syncing = this.status.jobMap.size;
|
||||
subscribe.next({
|
||||
total: this.status.docs.size,
|
||||
syncing: syncing,
|
||||
retrying: this.status.retrying,
|
||||
errorMessage: this.status.errorMessage,
|
||||
});
|
||||
}
|
||||
};
|
||||
next();
|
||||
return this.statusUpdatedSubject$.subscribe(() => {
|
||||
get peerState$() {
|
||||
return new Observable<PeerState>(subscribe => {
|
||||
const next = () => {
|
||||
if (this.status.skipped) {
|
||||
subscribe.next({
|
||||
total: 0,
|
||||
syncing: 0,
|
||||
synced: true,
|
||||
retrying: false,
|
||||
errorMessage: null,
|
||||
});
|
||||
} else if (!this.status.syncing) {
|
||||
// if syncing = false, jobMap is empty
|
||||
subscribe.next({
|
||||
total: this.status.docs.size,
|
||||
syncing: this.status.docs.size,
|
||||
synced: false,
|
||||
retrying: this.status.retrying,
|
||||
errorMessage: this.status.errorMessage,
|
||||
});
|
||||
} else {
|
||||
const syncing = this.status.jobMap.size;
|
||||
subscribe.next({
|
||||
total: this.status.docs.size,
|
||||
syncing: syncing,
|
||||
retrying: this.status.retrying,
|
||||
errorMessage: this.status.errorMessage,
|
||||
synced: syncing === 0,
|
||||
});
|
||||
}
|
||||
};
|
||||
next();
|
||||
return this.statusUpdatedSubject$.subscribe(() => {
|
||||
next();
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
docState$(docId: string) {
|
||||
return new Observable<PeerDocState>(subscribe => {
|
||||
const next = () => {
|
||||
const syncing =
|
||||
!this.status.connectedDocs.has(docId) ||
|
||||
this.status.jobMap.has(docId);
|
||||
|
||||
subscribe.next({
|
||||
syncing:
|
||||
!this.status.connectedDocs.has(docId) ||
|
||||
this.status.jobMap.has(docId),
|
||||
syncing: syncing,
|
||||
synced: !syncing,
|
||||
retrying: this.status.retrying,
|
||||
errorMessage: this.status.errorMessage,
|
||||
});
|
||||
@@ -161,22 +181,21 @@ export class DocSyncPeer {
|
||||
private readonly jobs = createJobErrorCatcher({
|
||||
connect: async (docId: string, signal?: AbortSignal) => {
|
||||
const pushedClock =
|
||||
(await this.syncMetadata.getPeerPushedClock(this.remote.peer, docId))
|
||||
(await this.syncMetadata.getPeerPushedClock(this.peerId, docId))
|
||||
?.timestamp ?? null;
|
||||
const clock = await this.local.getDocTimestamp(docId);
|
||||
|
||||
throwIfAborted(signal);
|
||||
if (pushedClock === null || pushedClock !== clock?.timestamp) {
|
||||
if (
|
||||
!this.remote.isReadonly &&
|
||||
(pushedClock === null || pushedClock !== clock?.timestamp)
|
||||
) {
|
||||
await this.jobs.pullAndPush(docId, signal);
|
||||
} else {
|
||||
// no need to push
|
||||
const pulled =
|
||||
(
|
||||
await this.syncMetadata.getPeerPulledRemoteClock(
|
||||
this.remote.peer,
|
||||
docId
|
||||
)
|
||||
)?.timestamp ?? null;
|
||||
(await this.syncMetadata.getPeerPulledRemoteClock(this.peerId, docId))
|
||||
?.timestamp ?? null;
|
||||
if (pulled === null || pulled !== this.status.remoteClocks.get(docId)) {
|
||||
await this.jobs.pull(docId, signal);
|
||||
}
|
||||
@@ -214,7 +233,7 @@ export class DocSyncPeer {
|
||||
});
|
||||
}
|
||||
throwIfAborted(signal);
|
||||
await this.syncMetadata.setPeerPushedClock(this.remote.peer, {
|
||||
await this.syncMetadata.setPeerPushedClock(this.peerId, {
|
||||
docId,
|
||||
timestamp: maxClock,
|
||||
});
|
||||
@@ -249,7 +268,7 @@ export class DocSyncPeer {
|
||||
this.uniqueId
|
||||
);
|
||||
throwIfAborted(signal);
|
||||
await this.syncMetadata.setPeerPulledRemoteClock(this.remote.peer, {
|
||||
await this.syncMetadata.setPeerPulledRemoteClock(this.peerId, {
|
||||
docId,
|
||||
timestamp: remoteClock,
|
||||
});
|
||||
@@ -273,7 +292,7 @@ export class DocSyncPeer {
|
||||
});
|
||||
}
|
||||
throwIfAborted(signal);
|
||||
await this.syncMetadata.setPeerPushedClock(this.remote.peer, {
|
||||
await this.syncMetadata.setPeerPushedClock(this.peerId, {
|
||||
docId,
|
||||
timestamp: localClock,
|
||||
});
|
||||
@@ -294,7 +313,7 @@ export class DocSyncPeer {
|
||||
remoteClock,
|
||||
});
|
||||
}
|
||||
await this.syncMetadata.setPeerPushedClock(this.remote.peer, {
|
||||
await this.syncMetadata.setPeerPushedClock(this.peerId, {
|
||||
docId,
|
||||
timestamp: localDocRecord.timestamp,
|
||||
});
|
||||
@@ -322,7 +341,7 @@ export class DocSyncPeer {
|
||||
this.uniqueId
|
||||
);
|
||||
throwIfAborted(signal);
|
||||
await this.syncMetadata.setPeerPulledRemoteClock(this.remote.peer, {
|
||||
await this.syncMetadata.setPeerPulledRemoteClock(this.peerId, {
|
||||
docId,
|
||||
timestamp: remoteClock,
|
||||
});
|
||||
@@ -360,7 +379,7 @@ export class DocSyncPeer {
|
||||
);
|
||||
throwIfAborted(signal);
|
||||
|
||||
await this.syncMetadata.setPeerPulledRemoteClock(this.remote.peer, {
|
||||
await this.syncMetadata.setPeerPulledRemoteClock(this.peerId, {
|
||||
docId,
|
||||
timestamp: remoteClock,
|
||||
});
|
||||
@@ -372,7 +391,7 @@ export class DocSyncPeer {
|
||||
updateRemoteClock: async (docId: string, remoteClock: Date) => {
|
||||
const updated = this.status.remoteClocks.setIfBigger(docId, remoteClock);
|
||||
if (updated) {
|
||||
await this.syncMetadata.setPeerRemoteClock(this.remote.peer, {
|
||||
await this.syncMetadata.setPeerRemoteClock(this.peerId, {
|
||||
docId,
|
||||
timestamp: remoteClock,
|
||||
});
|
||||
@@ -455,6 +474,7 @@ export class DocSyncPeer {
|
||||
jobMap: new Map(),
|
||||
remoteClocks: new ClockMap(new Map()),
|
||||
syncing: false,
|
||||
skipped: false,
|
||||
// tell ui to show retrying status
|
||||
retrying: true,
|
||||
// error message from last retry
|
||||
@@ -482,6 +502,17 @@ export class DocSyncPeer {
|
||||
|
||||
private async retryLoop(signal?: AbortSignal) {
|
||||
throwIfAborted(signal);
|
||||
if (this.local.isReadonly) {
|
||||
// Local is readonly, skip sync
|
||||
this.status.skipped = true;
|
||||
this.statusUpdatedSubject$.next(true);
|
||||
await new Promise((_, reject) => {
|
||||
signal?.addEventListener('abort', reason => {
|
||||
reject(reason);
|
||||
});
|
||||
});
|
||||
return;
|
||||
}
|
||||
const abort = new AbortController();
|
||||
|
||||
signal?.addEventListener('abort', reason => {
|
||||
@@ -536,8 +567,8 @@ export class DocSyncPeer {
|
||||
if (
|
||||
origin === this.uniqueId ||
|
||||
origin?.startsWith(
|
||||
`sync:${this.local.peer}:${this.remote.peer}:`
|
||||
// skip if local and remote is same
|
||||
`sync:${this.peerId}:`
|
||||
// skip if peerId is same
|
||||
)
|
||||
) {
|
||||
return;
|
||||
@@ -572,7 +603,7 @@ export class DocSyncPeer {
|
||||
|
||||
// get cached clocks from metadata
|
||||
const cachedClocks = await this.syncMetadata.getPeerRemoteClocks(
|
||||
this.remote.peer
|
||||
this.peerId
|
||||
);
|
||||
throwIfAborted(signal);
|
||||
for (const [id, v] of Object.entries(cachedClocks)) {
|
||||
|
||||
@@ -1,65 +1,63 @@
|
||||
import { combineLatest, map, type Observable, of } from 'rxjs';
|
||||
import { map, type Observable } from 'rxjs';
|
||||
|
||||
import type {
|
||||
AwarenessStorage,
|
||||
BlobStorage,
|
||||
DocStorage,
|
||||
SpaceStorage,
|
||||
} from '../storage';
|
||||
import type { SpaceStorage } from '../storage';
|
||||
import { AwarenessSyncImpl } from './awareness';
|
||||
import { BlobSyncImpl } from './blob';
|
||||
import { DocSyncImpl, type DocSyncState } from './doc';
|
||||
import type { PeerStorageOptions } from './types';
|
||||
|
||||
export type { BlobSyncState } from './blob';
|
||||
export type { DocSyncDocState, DocSyncState } from './doc';
|
||||
|
||||
export interface SyncState {
|
||||
doc?: DocSyncState;
|
||||
}
|
||||
|
||||
export class Sync {
|
||||
readonly doc: DocSyncImpl | null;
|
||||
readonly blob: BlobSyncImpl | null;
|
||||
readonly awareness: AwarenessSyncImpl | null;
|
||||
readonly doc: DocSyncImpl;
|
||||
readonly blob: BlobSyncImpl;
|
||||
readonly awareness: AwarenessSyncImpl;
|
||||
|
||||
readonly state$: Observable<SyncState>;
|
||||
|
||||
constructor(
|
||||
readonly local: SpaceStorage,
|
||||
readonly peers: SpaceStorage[]
|
||||
) {
|
||||
const doc = local.tryGet('doc');
|
||||
const blob = local.tryGet('blob');
|
||||
const sync = local.tryGet('sync');
|
||||
const awareness = local.tryGet('awareness');
|
||||
constructor(readonly storages: PeerStorageOptions<SpaceStorage>) {
|
||||
const doc = storages.local.get('doc');
|
||||
const blob = storages.local.get('blob');
|
||||
const sync = storages.local.get('sync');
|
||||
const awareness = storages.local.get('awareness');
|
||||
|
||||
this.doc =
|
||||
doc && sync
|
||||
? new DocSyncImpl(
|
||||
doc,
|
||||
sync,
|
||||
peers
|
||||
.map(peer => peer.tryGet('doc'))
|
||||
.filter((v): v is DocStorage => !!v)
|
||||
)
|
||||
: null;
|
||||
this.blob = blob
|
||||
? new BlobSyncImpl(
|
||||
blob,
|
||||
peers
|
||||
.map(peer => peer.tryGet('blob'))
|
||||
.filter((v): v is BlobStorage => !!v)
|
||||
)
|
||||
: null;
|
||||
this.awareness = awareness
|
||||
? new AwarenessSyncImpl(
|
||||
awareness,
|
||||
peers
|
||||
.map(peer => peer.tryGet('awareness'))
|
||||
.filter((v): v is AwarenessStorage => !!v)
|
||||
)
|
||||
: null;
|
||||
|
||||
this.state$ = combineLatest([this.doc?.state$ ?? of(undefined)]).pipe(
|
||||
map(([doc]) => ({ doc }))
|
||||
this.doc = new DocSyncImpl(
|
||||
{
|
||||
local: doc,
|
||||
remotes: Object.fromEntries(
|
||||
Object.entries(storages.remotes).map(([peerId, remote]) => [
|
||||
peerId,
|
||||
remote.get('doc'),
|
||||
])
|
||||
),
|
||||
},
|
||||
sync
|
||||
);
|
||||
this.blob = new BlobSyncImpl({
|
||||
local: blob,
|
||||
remotes: Object.fromEntries(
|
||||
Object.entries(storages.remotes).map(([peerId, remote]) => [
|
||||
peerId,
|
||||
remote.get('blob'),
|
||||
])
|
||||
),
|
||||
});
|
||||
this.awareness = new AwarenessSyncImpl({
|
||||
local: awareness,
|
||||
remotes: Object.fromEntries(
|
||||
Object.entries(storages.remotes).map(([peerId, remote]) => [
|
||||
peerId,
|
||||
remote.get('awareness'),
|
||||
])
|
||||
),
|
||||
});
|
||||
|
||||
this.state$ = this.doc.state$.pipe(map(doc => ({ doc })));
|
||||
}
|
||||
|
||||
start() {
|
||||
|
||||
4
packages/common/nbstore/src/sync/types.ts
Normal file
4
packages/common/nbstore/src/sync/types.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export interface PeerStorageOptions<S> {
|
||||
local: S;
|
||||
remotes: Record<string, S>;
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import { parseUniversalId, universalId } from '../storage';
|
||||
import { parseUniversalId, universalId } from '../universal-id';
|
||||
|
||||
describe('parseUniversalId', () => {
|
||||
it('should generate universal id', () => {
|
||||
93
packages/common/nbstore/src/utils/universal-id.ts
Normal file
93
packages/common/nbstore/src/utils/universal-id.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
export type SpaceType = 'workspace' | 'userspace';
|
||||
|
||||
export function universalId({
|
||||
peer,
|
||||
type,
|
||||
id,
|
||||
}: {
|
||||
peer: string;
|
||||
type: SpaceType;
|
||||
id: string;
|
||||
}) {
|
||||
return `@peer(${peer});@type(${type});@id(${id});`;
|
||||
}
|
||||
|
||||
export function isValidSpaceType(type: string): type is SpaceType {
|
||||
return type === 'workspace' || type === 'userspace';
|
||||
}
|
||||
|
||||
export function isValidUniversalId(opts: Record<string, string>): boolean {
|
||||
const requiredKeys = ['peer', 'type', 'id'] as const;
|
||||
|
||||
for (const key of requiredKeys) {
|
||||
if (!opts[key]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return isValidSpaceType(opts.type);
|
||||
}
|
||||
|
||||
export function parseUniversalId(id: string): {
|
||||
peer: string;
|
||||
type: SpaceType;
|
||||
id: string;
|
||||
} {
|
||||
const result: Partial<{
|
||||
peer: string;
|
||||
type: SpaceType;
|
||||
id: string;
|
||||
}> = {};
|
||||
let key = '';
|
||||
let value = '';
|
||||
let isInValue = false;
|
||||
|
||||
let i = -1;
|
||||
|
||||
while (++i < id.length) {
|
||||
const ch = id[i];
|
||||
const nextCh = id[i + 1];
|
||||
|
||||
// when we are in value string, we only care about ch and next char to be [')', ';'] to end the id part
|
||||
if (isInValue) {
|
||||
if (ch === ')' && nextCh === ';') {
|
||||
// @ts-expect-error we know the key is valid
|
||||
result[key] = value;
|
||||
key = '';
|
||||
value = '';
|
||||
isInValue = false;
|
||||
i++;
|
||||
continue;
|
||||
}
|
||||
|
||||
value += ch;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ch === '@') {
|
||||
const keyEnd = id.indexOf('(', i);
|
||||
// we find '@' but no '(' in lookahead or '(' is immediately after '@', invalid id
|
||||
if (keyEnd === -1 || keyEnd === i + 1) {
|
||||
break;
|
||||
}
|
||||
|
||||
key = id.slice(i + 1, keyEnd);
|
||||
i = keyEnd;
|
||||
isInValue = true;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!isValidUniversalId(result)) {
|
||||
throw new Error(
|
||||
`Invalid universal storage id: ${id}. It should be in format of @peer(\${peer});@type(\${type});@id(\${id});`
|
||||
);
|
||||
}
|
||||
|
||||
return result as {
|
||||
peer: string;
|
||||
type: SpaceType;
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
@@ -1,54 +1,58 @@
|
||||
import type { OpClient } from '@toeverything/infra/op';
|
||||
|
||||
import { DummyConnection } from '../connection';
|
||||
import { DocFrontend } from '../frontend/doc';
|
||||
import { AwarenessFrontend, BlobFrontend, DocFrontend } from '../frontend';
|
||||
import {
|
||||
type AwarenessRecord,
|
||||
type AwarenessStorage,
|
||||
type BlobRecord,
|
||||
type BlobStorage,
|
||||
type DocRecord,
|
||||
type DocStorage,
|
||||
type DocUpdate,
|
||||
type ListedBlobRecord,
|
||||
type StorageOptions,
|
||||
universalId,
|
||||
} from '../storage';
|
||||
import type { AwarenessSync } from '../sync/awareness';
|
||||
import type { BlobSync } from '../sync/blob';
|
||||
import type { DocSync } from '../sync/doc';
|
||||
import type { WorkerOps } from './ops';
|
||||
import type { WorkerInitOptions, WorkerOps } from './ops';
|
||||
|
||||
export type { WorkerInitOptions } from './ops';
|
||||
|
||||
export class WorkerClient {
|
||||
constructor(
|
||||
private readonly client: OpClient<WorkerOps>,
|
||||
private readonly options: StorageOptions
|
||||
) {}
|
||||
options: WorkerInitOptions
|
||||
) {
|
||||
client.listen();
|
||||
this.client.call('worker.init', options).catch(err => {
|
||||
console.error('error initializing worker', err);
|
||||
});
|
||||
this.docStorage = new WorkerDocStorage(this.client);
|
||||
this.blobStorage = new WorkerBlobStorage(this.client);
|
||||
this.docSync = new WorkerDocSync(this.client);
|
||||
this.blobSync = new WorkerBlobSync(this.client);
|
||||
this.awarenessSync = new WorkerAwarenessSync(this.client);
|
||||
this.docFrontend = new DocFrontend(this.docStorage, this.docSync);
|
||||
this.blobFrontend = new BlobFrontend(this.blobStorage, this.blobSync);
|
||||
this.awarenessFrontend = new AwarenessFrontend(this.awarenessSync);
|
||||
}
|
||||
|
||||
readonly docStorage = new WorkerDocStorage(this.client, this.options);
|
||||
readonly blobStorage = new WorkerBlobStorage(this.client, this.options);
|
||||
readonly awarenessStorage = new WorkerAwarenessStorage(
|
||||
this.client,
|
||||
this.options
|
||||
);
|
||||
readonly docSync = new WorkerDocSync(this.client);
|
||||
readonly blobSync = new WorkerBlobSync(this.client);
|
||||
readonly awarenessSync = new WorkerAwarenessSync(this.client);
|
||||
private readonly docStorage: WorkerDocStorage;
|
||||
private readonly blobStorage: WorkerBlobStorage;
|
||||
private readonly docSync: WorkerDocSync;
|
||||
private readonly blobSync: WorkerBlobSync;
|
||||
private readonly awarenessSync: WorkerAwarenessSync;
|
||||
|
||||
readonly docFrontend = new DocFrontend(this.docStorage, this.docSync);
|
||||
readonly docFrontend: DocFrontend;
|
||||
readonly blobFrontend: BlobFrontend;
|
||||
readonly awarenessFrontend: AwarenessFrontend;
|
||||
}
|
||||
|
||||
class WorkerDocStorage implements DocStorage {
|
||||
constructor(
|
||||
private readonly client: OpClient<WorkerOps>,
|
||||
private readonly options: StorageOptions
|
||||
) {}
|
||||
constructor(private readonly client: OpClient<WorkerOps>) {}
|
||||
|
||||
readonly peer = this.options.peer;
|
||||
readonly spaceType = this.options.type;
|
||||
readonly spaceId = this.options.id;
|
||||
readonly universalId = universalId(this.options);
|
||||
readonly storageType = 'doc';
|
||||
readonly isReadonly = false;
|
||||
|
||||
async getDoc(docId: string) {
|
||||
return this.client.call('docStorage.getDoc', docId);
|
||||
@@ -119,16 +123,9 @@ class WorkerDocConnection extends DummyConnection {
|
||||
}
|
||||
|
||||
class WorkerBlobStorage implements BlobStorage {
|
||||
constructor(
|
||||
private readonly client: OpClient<WorkerOps>,
|
||||
private readonly options: StorageOptions
|
||||
) {}
|
||||
constructor(private readonly client: OpClient<WorkerOps>) {}
|
||||
|
||||
readonly storageType = 'blob';
|
||||
readonly peer = this.options.peer;
|
||||
readonly spaceType = this.options.type;
|
||||
readonly spaceId = this.options.id;
|
||||
readonly universalId = universalId(this.options);
|
||||
|
||||
get(key: string, _signal?: AbortSignal): Promise<BlobRecord | null> {
|
||||
return this.client.call('blobStorage.getBlob', key);
|
||||
@@ -156,63 +153,6 @@ class WorkerBlobStorage implements BlobStorage {
|
||||
connection = new DummyConnection();
|
||||
}
|
||||
|
||||
class WorkerAwarenessStorage implements AwarenessStorage {
|
||||
constructor(
|
||||
private readonly client: OpClient<WorkerOps>,
|
||||
private readonly options: StorageOptions
|
||||
) {}
|
||||
|
||||
readonly storageType = 'awareness';
|
||||
readonly peer = this.options.peer;
|
||||
readonly spaceType = this.options.type;
|
||||
readonly spaceId = this.options.id;
|
||||
readonly universalId = universalId(this.options);
|
||||
|
||||
update(record: AwarenessRecord, origin?: string): Promise<void> {
|
||||
return this.client.call('awarenessStorage.update', {
|
||||
awareness: record,
|
||||
origin,
|
||||
});
|
||||
}
|
||||
subscribeUpdate(
|
||||
id: string,
|
||||
onUpdate: (update: AwarenessRecord, origin?: string) => void,
|
||||
onCollect: () => Promise<AwarenessRecord | null>
|
||||
): () => void {
|
||||
const subscription = this.client
|
||||
.ob$('awarenessStorage.subscribeUpdate', id)
|
||||
.subscribe({
|
||||
next: update => {
|
||||
if (update.type === 'awareness-update') {
|
||||
onUpdate(update.awareness, update.origin);
|
||||
}
|
||||
if (update.type === 'awareness-collect') {
|
||||
onCollect()
|
||||
.then(record => {
|
||||
if (record) {
|
||||
this.client
|
||||
.call('awarenessStorage.collect', {
|
||||
awareness: record,
|
||||
collectId: update.collectId,
|
||||
})
|
||||
.catch(err => {
|
||||
console.error('error feedback collected awareness', err);
|
||||
});
|
||||
}
|
||||
})
|
||||
.catch(err => {
|
||||
console.error('error collecting awareness', err);
|
||||
});
|
||||
}
|
||||
},
|
||||
});
|
||||
return () => {
|
||||
subscription.unsubscribe();
|
||||
};
|
||||
}
|
||||
connection = new DummyConnection();
|
||||
}
|
||||
|
||||
class WorkerDocSync implements DocSync {
|
||||
constructor(private readonly client: OpClient<WorkerOps>) {}
|
||||
|
||||
@@ -234,6 +174,22 @@ class WorkerDocSync implements DocSync {
|
||||
|
||||
class WorkerBlobSync implements BlobSync {
|
||||
constructor(private readonly client: OpClient<WorkerOps>) {}
|
||||
readonly state$ = this.client.ob$('blobSync.state');
|
||||
setMaxBlobSize(size: number): void {
|
||||
this.client.call('blobSync.setMaxBlobSize', size).catch(err => {
|
||||
console.error('error setting max blob size', err);
|
||||
});
|
||||
}
|
||||
onReachedMaxBlobSize(cb: (byteSize: number) => void): () => void {
|
||||
const subscription = this.client
|
||||
.ob$('blobSync.onReachedMaxBlobSize')
|
||||
.subscribe(byteSize => {
|
||||
cb(byteSize);
|
||||
});
|
||||
return () => {
|
||||
subscription.unsubscribe();
|
||||
};
|
||||
}
|
||||
downloadBlob(
|
||||
blobId: string,
|
||||
_signal?: AbortSignal
|
||||
@@ -243,6 +199,27 @@ class WorkerBlobSync implements BlobSync {
|
||||
uploadBlob(blob: BlobRecord, _signal?: AbortSignal): Promise<void> {
|
||||
return this.client.call('blobSync.uploadBlob', blob);
|
||||
}
|
||||
fullSync(signal?: AbortSignal): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const abortListener = () => {
|
||||
reject(signal?.reason);
|
||||
subscription.unsubscribe();
|
||||
};
|
||||
|
||||
signal?.addEventListener('abort', abortListener);
|
||||
|
||||
const subscription = this.client.ob$('blobSync.fullSync').subscribe({
|
||||
next() {
|
||||
signal?.removeEventListener('abort', abortListener);
|
||||
resolve();
|
||||
},
|
||||
error(err) {
|
||||
signal?.removeEventListener('abort', abortListener);
|
||||
reject(err);
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
class WorkerAwarenessSync implements AwarenessSync {
|
||||
|
||||
@@ -1,27 +1,29 @@
|
||||
import type { OpConsumer } from '@toeverything/infra/op';
|
||||
import { Observable } from 'rxjs';
|
||||
|
||||
import { getAvailableStorageImplementations } from '../impls';
|
||||
import { SpaceStorage, type StorageOptions } from '../storage';
|
||||
import { type StorageConstructor } from '../impls';
|
||||
import { SpaceStorage } from '../storage';
|
||||
import type { AwarenessRecord } from '../storage/awareness';
|
||||
import { Sync } from '../sync';
|
||||
import type { WorkerOps } from './ops';
|
||||
import type { PeerStorageOptions } from '../sync/types';
|
||||
import type { WorkerInitOptions, WorkerOps } from './ops';
|
||||
|
||||
export type { WorkerOps };
|
||||
|
||||
export class WorkerConsumer {
|
||||
private remotes: SpaceStorage[] = [];
|
||||
private local: SpaceStorage | null = null;
|
||||
private storages: PeerStorageOptions<SpaceStorage> | null = null;
|
||||
private sync: Sync | null = null;
|
||||
|
||||
get ensureLocal() {
|
||||
if (!this.local) {
|
||||
if (!this.storages) {
|
||||
throw new Error('Not initialized');
|
||||
}
|
||||
return this.local;
|
||||
return this.storages.local;
|
||||
}
|
||||
|
||||
get ensureSync() {
|
||||
if (!this.sync) {
|
||||
throw new Error('Not initialized');
|
||||
throw new Error('Sync not initialized');
|
||||
}
|
||||
return this.sync;
|
||||
}
|
||||
@@ -31,11 +33,7 @@ export class WorkerConsumer {
|
||||
}
|
||||
|
||||
get docSync() {
|
||||
const docSync = this.ensureSync.doc;
|
||||
if (!docSync) {
|
||||
throw new Error('Doc sync not initialized');
|
||||
}
|
||||
return docSync;
|
||||
return this.ensureSync.doc;
|
||||
}
|
||||
|
||||
get blobStorage() {
|
||||
@@ -43,11 +41,7 @@ export class WorkerConsumer {
|
||||
}
|
||||
|
||||
get blobSync() {
|
||||
const blobSync = this.ensureSync.blob;
|
||||
if (!blobSync) {
|
||||
throw new Error('Blob sync not initialized');
|
||||
}
|
||||
return blobSync;
|
||||
return this.ensureSync.blob;
|
||||
}
|
||||
|
||||
get syncStorage() {
|
||||
@@ -59,41 +53,58 @@ export class WorkerConsumer {
|
||||
}
|
||||
|
||||
get awarenessSync() {
|
||||
const awarenessSync = this.ensureSync.awareness;
|
||||
if (!awarenessSync) {
|
||||
throw new Error('Awareness sync not initialized');
|
||||
}
|
||||
return awarenessSync;
|
||||
return this.ensureSync.awareness;
|
||||
}
|
||||
|
||||
constructor(private readonly consumer: OpConsumer<WorkerOps>) {}
|
||||
|
||||
listen() {
|
||||
constructor(
|
||||
private readonly consumer: OpConsumer<WorkerOps>,
|
||||
private readonly availableStorageImplementations: StorageConstructor[]
|
||||
) {
|
||||
this.registerHandlers();
|
||||
this.consumer.listen();
|
||||
}
|
||||
|
||||
async init(init: {
|
||||
local: { name: string; opts: StorageOptions }[];
|
||||
remotes: { name: string; opts: StorageOptions }[][];
|
||||
}) {
|
||||
this.local = new SpaceStorage(
|
||||
init.local.map(opt => {
|
||||
const Storage = getAvailableStorageImplementations(opt.name);
|
||||
return new Storage(opt.opts);
|
||||
})
|
||||
);
|
||||
this.remotes = init.remotes.map(opts => {
|
||||
return new SpaceStorage(
|
||||
opts.map(opt => {
|
||||
const Storage = getAvailableStorageImplementations(opt.name);
|
||||
return new Storage(opt.opts);
|
||||
init(init: WorkerInitOptions) {
|
||||
this.storages = {
|
||||
local: new SpaceStorage(
|
||||
Object.fromEntries(
|
||||
Object.entries(init.local).map(([type, opt]) => {
|
||||
const Storage = this.availableStorageImplementations.find(
|
||||
impl => impl.identifier === opt.name
|
||||
);
|
||||
if (!Storage) {
|
||||
throw new Error(`Storage implementation ${opt.name} not found`);
|
||||
}
|
||||
return [type, new Storage(opt.opts as any)];
|
||||
})
|
||||
)
|
||||
),
|
||||
remotes: Object.fromEntries(
|
||||
Object.entries(init.remotes).map(([peer, opts]) => {
|
||||
return [
|
||||
peer,
|
||||
new SpaceStorage(
|
||||
Object.fromEntries(
|
||||
Object.entries(opts).map(([type, opt]) => {
|
||||
const Storage = this.availableStorageImplementations.find(
|
||||
impl => impl.identifier === opt.name
|
||||
);
|
||||
if (!Storage) {
|
||||
throw new Error(
|
||||
`Storage implementation ${opt.name} not found`
|
||||
);
|
||||
}
|
||||
return [type, new Storage(opt.opts as any)];
|
||||
})
|
||||
)
|
||||
),
|
||||
];
|
||||
})
|
||||
);
|
||||
});
|
||||
this.sync = new Sync(this.local, this.remotes);
|
||||
this.local.connect();
|
||||
for (const remote of this.remotes) {
|
||||
),
|
||||
};
|
||||
this.sync = new Sync(this.storages);
|
||||
this.storages.local.connect();
|
||||
for (const remote of Object.values(this.storages.remotes)) {
|
||||
remote.connect();
|
||||
}
|
||||
this.sync.start();
|
||||
@@ -101,9 +112,9 @@ export class WorkerConsumer {
|
||||
|
||||
async destroy() {
|
||||
this.sync?.stop();
|
||||
this.local?.disconnect();
|
||||
await this.local?.destroy();
|
||||
for (const remote of this.remotes) {
|
||||
this.storages?.local.disconnect();
|
||||
await this.storages?.local.destroy();
|
||||
for (const remote of Object.values(this.storages?.remotes ?? {})) {
|
||||
remote.disconnect();
|
||||
await remote.destroy();
|
||||
}
|
||||
@@ -144,7 +155,7 @@ export class WorkerConsumer {
|
||||
subscriber.next(true);
|
||||
subscriber.complete();
|
||||
})
|
||||
.catch(error => {
|
||||
.catch((error: any) => {
|
||||
subscriber.error(error);
|
||||
});
|
||||
return () => abortController.abort();
|
||||
@@ -224,6 +235,29 @@ export class WorkerConsumer {
|
||||
}),
|
||||
'blobSync.downloadBlob': key => this.blobSync.downloadBlob(key),
|
||||
'blobSync.uploadBlob': blob => this.blobSync.uploadBlob(blob),
|
||||
'blobSync.fullSync': () =>
|
||||
new Observable(subscriber => {
|
||||
const abortController = new AbortController();
|
||||
this.blobSync
|
||||
.fullSync(abortController.signal)
|
||||
.then(() => {
|
||||
subscriber.next(true);
|
||||
subscriber.complete();
|
||||
})
|
||||
.catch(error => {
|
||||
subscriber.error(error);
|
||||
});
|
||||
return () => abortController.abort();
|
||||
}),
|
||||
'blobSync.state': () => this.blobSync.state$,
|
||||
'blobSync.setMaxBlobSize': size => this.blobSync.setMaxBlobSize(size),
|
||||
'blobSync.onReachedMaxBlobSize': () =>
|
||||
new Observable(subscriber => {
|
||||
const undo = this.blobSync.onReachedMaxBlobSize(byteSize => {
|
||||
subscriber.next(byteSize);
|
||||
});
|
||||
return () => undo();
|
||||
}),
|
||||
'awarenessSync.update': ({ awareness, origin }) =>
|
||||
this.awarenessSync.update(awareness, origin),
|
||||
'awarenessSync.subscribeUpdate': docId =>
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import type { AvailableStorageImplementations } from '../impls';
|
||||
import type {
|
||||
BlobRecord,
|
||||
DocClock,
|
||||
@@ -6,20 +7,27 @@ import type {
|
||||
DocRecord,
|
||||
DocUpdate,
|
||||
ListedBlobRecord,
|
||||
StorageOptions,
|
||||
StorageType,
|
||||
} from '../storage';
|
||||
import type { AwarenessRecord } from '../storage/awareness';
|
||||
import type { BlobSyncState } from '../sync/blob';
|
||||
import type { DocSyncDocState, DocSyncState } from '../sync/doc';
|
||||
|
||||
type StorageInitOptions = Values<{
|
||||
[key in keyof AvailableStorageImplementations]: {
|
||||
name: key;
|
||||
opts: ConstructorParameters<AvailableStorageImplementations[key]>[0];
|
||||
};
|
||||
}>;
|
||||
|
||||
export interface WorkerInitOptions {
|
||||
local: { [key in StorageType]?: StorageInitOptions };
|
||||
remotes: Record<string, { [key in StorageType]?: StorageInitOptions }>;
|
||||
}
|
||||
|
||||
interface GroupedWorkerOps {
|
||||
worker: {
|
||||
init: [
|
||||
{
|
||||
local: { name: string; opts: StorageOptions }[];
|
||||
remotes: { name: string; opts: StorageOptions }[][];
|
||||
},
|
||||
void,
|
||||
];
|
||||
init: [WorkerInitOptions, void];
|
||||
destroy: [void, void];
|
||||
};
|
||||
|
||||
@@ -83,6 +91,10 @@ interface GroupedWorkerOps {
|
||||
blobSync: {
|
||||
downloadBlob: [string, BlobRecord | null];
|
||||
uploadBlob: [BlobRecord, void];
|
||||
fullSync: [void, boolean];
|
||||
setMaxBlobSize: [number, void];
|
||||
onReachedMaxBlobSize: [void, number];
|
||||
state: [void, BlobSyncState];
|
||||
};
|
||||
|
||||
awarenessSync: {
|
||||
|
||||
@@ -10,9 +10,6 @@
|
||||
{
|
||||
"path": "../../frontend/graphql"
|
||||
},
|
||||
{
|
||||
"path": "../../frontend/electron-api"
|
||||
},
|
||||
{
|
||||
"path": "../infra"
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user