feat(core): new worker workspace engine (#9257)

This commit is contained in:
EYHN
2025-01-17 00:22:18 +08:00
committed by GitHub
parent 7dc470e7ea
commit a2ffdb4047
219 changed files with 4267 additions and 7194 deletions

View File

@@ -1,5 +1,6 @@
import type { Slot } from '@blocksuite/global/utils'; import type { Slot } from '@blocksuite/global/utils';
import type { BlobEngine } from '@blocksuite/sync'; import type { BlobEngine } from '@blocksuite/sync';
import type { Awareness } from 'y-protocols/awareness.js';
import type * as Y from 'yjs'; import type * as Y from 'yjs';
import type { Schema } from '../schema/schema.js'; import type { Schema } from '../schema/schema.js';
@@ -15,6 +16,8 @@ export interface Workspace {
readonly idGenerator: IdGenerator; readonly idGenerator: IdGenerator;
readonly blobSync: BlobEngine; readonly blobSync: BlobEngine;
readonly awarenessStore: AwarenessStore; readonly awarenessStore: AwarenessStore;
readonly onLoadDoc?: (doc: Y.Doc) => void;
readonly onLoadAwareness?: (awareness: Awareness) => void;
get schema(): Schema; get schema(): Schema;
get doc(): Y.Doc; get doc(): Y.Doc;

View File

@@ -1,6 +1,7 @@
import { assertExists } from '@blocksuite/global/utils'; import { assertExists } from '@blocksuite/global/utils';
import { diffUpdate, encodeStateVectorFromUpdate, mergeUpdates } from 'yjs'; import { diffUpdate, encodeStateVectorFromUpdate, mergeUpdates } from 'yjs';
import { MANUALLY_STOP } from '../../utils/throw-if-aborted.js';
import type { DocSource } from '../source.js'; import type { DocSource } from '../source.js';
type ChannelMessage = type ChannelMessage =
@@ -85,7 +86,7 @@ export class BroadcastChannelDocSource implements DocSource {
{ signal: abortController.signal } { signal: abortController.signal }
); );
return () => { return () => {
abortController.abort(); abortController.abort(MANUALLY_STOP);
}; };
} }
} }

View File

@@ -50,7 +50,6 @@ describe('message', () => {
removeEventListener: vi.fn(), removeEventListener: vi.fn(),
}; };
ctx.handler = new CustomMessageHandler(ctx.receivePort); ctx.handler = new CustomMessageHandler(ctx.receivePort);
ctx.handler.listen();
}); });
it('should ignore unknown message type', ctx => { it('should ignore unknown message type', ctx => {

View File

@@ -162,16 +162,19 @@ export class OpClient<Ops extends OpSchema> extends AutoMessageHandler {
op: Op, op: Op,
...args: OpInput<Ops, Op> ...args: OpInput<Ops, Op>
): Observable<Out> { ): Observable<Out> {
const payload = args[0];
const msg = {
type: 'subscribe',
id: this.nextCallId(op),
name: op as string,
payload,
} satisfies SubscribeMessage;
const sub$ = new Observable<Out>(ob => { const sub$ = new Observable<Out>(ob => {
const payload = args[0];
const msg = {
type: 'subscribe',
id: this.nextCallId(op),
name: op as string,
payload,
} satisfies SubscribeMessage;
const transferables = fetchTransferables(payload);
this.port.postMessage(msg, { transfer: transferables });
this.obs.set(msg.id, ob); this.obs.set(msg.id, ob);
return () => { return () => {
@@ -184,9 +187,6 @@ export class OpClient<Ops extends OpSchema> extends AutoMessageHandler {
}; };
}); });
const transferables = fetchTransferables(payload);
this.port.postMessage(msg, { transfer: transferables });
return sub$; return sub$;
} }

View File

@@ -1,6 +1,7 @@
import EventEmitter2 from 'eventemitter2'; import EventEmitter2 from 'eventemitter2';
import { defer, from, fromEvent, Observable, of, take, takeUntil } from 'rxjs'; import { defer, from, fromEvent, Observable, of, take, takeUntil } from 'rxjs';
import { MANUALLY_STOP } from '../utils';
import { import {
AutoMessageHandler, AutoMessageHandler,
type CallMessage, type CallMessage,
@@ -45,7 +46,7 @@ export class OpConsumer<Ops extends OpSchema> extends AutoMessageHandler {
}; };
} }
private readonly handleCallMessage: MessageHandlers['call'] = async msg => { private readonly handleCallMessage: MessageHandlers['call'] = msg => {
const abortController = new AbortController(); const abortController = new AbortController();
this.processing.set(msg.id, abortController); this.processing.set(msg.id, abortController);
@@ -119,7 +120,7 @@ export class OpConsumer<Ops extends OpSchema> extends AutoMessageHandler {
return; return;
} }
abortController.abort(); abortController.abort(MANUALLY_STOP);
}; };
register<Op extends OpNames<Ops>>(op: Op, handler: OpHandler<Ops, Op>) { register<Op extends OpNames<Ops>>(op: Op, handler: OpHandler<Ops, Op>) {
@@ -181,7 +182,7 @@ export class OpConsumer<Ops extends OpSchema> extends AutoMessageHandler {
super.close(); super.close();
this.registeredOpHandlers.clear(); this.registeredOpHandlers.clear();
this.processing.forEach(controller => { this.processing.forEach(controller => {
controller.abort(); controller.abort(MANUALLY_STOP);
}); });
this.processing.clear(); this.processing.clear();
this.eventBus.removeAllListeners(); this.eventBus.removeAllListeners();

View File

@@ -134,7 +134,9 @@ export abstract class AutoMessageHandler {
private listening = false; private listening = false;
protected abstract handlers: Partial<MessageHandlers>; protected abstract handlers: Partial<MessageHandlers>;
constructor(protected readonly port: MessageCommunicapable) {} constructor(protected readonly port: MessageCommunicapable) {
this.listen();
}
protected handleMessage = ignoreUnknownEvent((msg: Messages) => { protected handleMessage = ignoreUnknownEvent((msg: Messages) => {
const handler = this.handlers[msg.type]; const handler = this.handlers[msg.type];
@@ -145,7 +147,7 @@ export abstract class AutoMessageHandler {
handler(msg as any); handler(msg as any);
}); });
listen() { protected listen() {
if (this.listening) { if (this.listening) {
return; return;
} }

View File

@@ -1,136 +0,0 @@
import { nanoid } from 'nanoid';
import {
afterEach,
beforeEach,
describe,
expect,
test as t,
type TestAPI,
vitest,
} from 'vitest';
import { Doc } from 'yjs';
import { DocEngine } from '../../../sync';
import { MiniSyncServer } from '../../../sync/doc/__tests__/utils';
import { MemoryStorage } from '../../../sync/doc/storage';
import { createORMClient, type DBSchemaBuilder, f, YjsDBAdapter } from '../';
const TEST_SCHEMA = {
tags: {
id: f.string().primaryKey().default(nanoid),
name: f.string(),
color: f.string().optional(),
colors: f.json<string[]>().optional(),
},
} satisfies DBSchemaBuilder;
const ORMClient = createORMClient(TEST_SCHEMA);
type Context = {
server: MiniSyncServer;
user1: {
client: InstanceType<typeof ORMClient>;
engine: DocEngine;
};
user2: {
client: InstanceType<typeof ORMClient>;
engine: DocEngine;
};
};
function createEngine(server: MiniSyncServer) {
return new DocEngine(new MemoryStorage(), server.client());
}
async function createClient(server: MiniSyncServer, clientId: number) {
const engine = createEngine(server);
const Client = createORMClient(TEST_SCHEMA);
// define the hooks
Client.defineHook('tags', 'migrate field `color` to field `colors`', {
deserialize(data) {
if (!data.colors && data.color) {
data.colors = [data.color];
}
return data;
},
});
const client = new Client(
new YjsDBAdapter(TEST_SCHEMA, {
getDoc(guid: string) {
const doc = new Doc({ guid });
doc.clientID = clientId;
engine.addDoc(doc);
return doc;
},
})
);
return {
engine,
client,
};
}
beforeEach<Context>(async t => {
t.server = new MiniSyncServer();
// we set user2's clientId greater than user1's clientId,
// so all conflicts will be resolved to user2's changes
t.user1 = await createClient(t.server, 1);
t.user2 = await createClient(t.server, 2);
t.user1.engine.start();
t.user2.engine.start();
});
afterEach<Context>(async t => {
t.user1.engine.stop();
t.user2.engine.stop();
});
const test = t as TestAPI<Context>;
describe('ORM compatibility in synchronization scenerio', () => {
test('2 clients create at the same time', async t => {
const { user1, user2 } = t;
const tag1 = user1.client.tags.create({
name: 'tag1',
color: 'blue',
});
const tag2 = user2.client.tags.create({
name: 'tag2',
color: 'red',
});
await vitest.waitFor(() => {
expect(user1.client.tags.keys()).toHaveLength(2);
expect(user2.client.tags.keys()).toHaveLength(2);
});
expect(user2.client.tags.get(tag1.id)).toStrictEqual(tag1);
expect(user1.client.tags.get(tag2.id)).toStrictEqual(tag2);
});
test('2 clients updating the same entity', async t => {
const { user1, user2 } = t;
const tag = user1.client.tags.create({
name: 'tag1',
color: 'blue',
});
await vitest.waitFor(() => {
expect(user2.client.tags.keys()).toHaveLength(1);
});
user1.client.tags.update(tag.id, { color: 'red' });
user2.client.tags.update(tag.id, { color: 'gray' });
await vitest.waitFor(() => {
expect(user1.client.tags.get(tag.id)).toHaveProperty('color', 'gray');
expect(user2.client.tags.get(tag.id)).toHaveProperty('color', 'gray');
});
});
});

View File

@@ -1,23 +0,0 @@
import type { Awareness } from 'y-protocols/awareness.js';
export interface AwarenessConnection {
connect(awareness: Awareness): void;
disconnect(): void;
dispose?(): void;
}
export class AwarenessEngine {
constructor(public readonly connections: AwarenessConnection[]) {}
connect(awareness: Awareness) {
this.connections.forEach(connection => connection.connect(awareness));
}
disconnect() {
this.connections.forEach(connection => connection.disconnect());
}
dispose() {
this.connections.forEach(connection => connection.dispose?.());
}
}

View File

@@ -1,264 +0,0 @@
import { DebugLogger } from '@affine/debug';
import EventEmitter2 from 'eventemitter2';
import { difference } from 'lodash-es';
import { LiveData } from '../../livedata';
import type { Memento } from '../../storage';
import { MANUALLY_STOP } from '../../utils';
import { BlobStorageOverCapacity } from './error';
const logger = new DebugLogger('affine:blob-engine');
export interface BlobStorage {
name: string;
readonly: boolean;
get: (key: string) => Promise<Blob | null>;
set: (key: string, value: Blob) => Promise<string>;
delete: (key: string) => Promise<void>;
list: () => Promise<string[]>;
}
export interface BlobStatus {
isStorageOverCapacity: boolean;
}
/**
* # BlobEngine
*
* sync blobs between storages in background.
*
* all operations priority use local, then use remote.
*/
export class BlobEngine {
readonly name = 'blob-engine';
readonly readonly = this.local.readonly;
readonly event = new EventEmitter2();
private abort: AbortController | null = null;
readonly isStorageOverCapacity$ = new LiveData(false);
singleBlobSizeLimit: number = 100 * 1024 * 1024;
onAbortLargeBlob = (callback: (blob: Blob) => void) => {
this.event.on('abort-large-blob', callback);
return () => {
this.event.off('abort-large-blob', callback);
};
};
constructor(
private readonly local: BlobStorage,
private readonly remotes: BlobStorage[]
) {}
start() {
if (this.abort || this.isStorageOverCapacity$.value) {
return;
}
this.abort = new AbortController();
const abortSignal = this.abort.signal;
const sync = () => {
if (abortSignal.aborted) {
return;
}
this.sync()
.catch(error => {
logger.error('sync blob error', error);
})
.finally(() => {
// sync every 1 minute
setTimeout(sync, 60000);
});
};
sync();
}
stop() {
this.abort?.abort(MANUALLY_STOP);
this.abort = null;
}
get storages() {
return [this.local, ...this.remotes];
}
async sync() {
if (this.local.readonly) {
return;
}
logger.debug('start syncing blob...');
for (const remote of this.remotes) {
let localList: string[] = [];
let remoteList: string[] = [];
if (!remote.readonly) {
try {
localList = await this.local.list();
remoteList = await remote.list();
} catch (err) {
logger.error(`error when sync`, err);
continue;
}
const needUpload = difference(localList, remoteList);
for (const key of needUpload) {
try {
const data = await this.local.get(key);
if (data) {
await remote.set(key, data);
}
} catch (err) {
logger.error(
`error when sync ${key} from [${this.local.name}] to [${remote.name}]`,
err
);
}
}
}
const needDownload = difference(remoteList, localList);
for (const key of needDownload) {
try {
const data = await remote.get(key);
if (data) {
await this.local.set(key, data);
}
} catch (err) {
if (err instanceof BlobStorageOverCapacity) {
this.isStorageOverCapacity$.value = true;
}
logger.error(
`error when sync ${key} from [${remote.name}] to [${this.local.name}]`,
err
);
}
}
}
logger.debug('finish syncing blob');
}
async get(key: string) {
logger.debug('get blob', key);
for (const storage of this.storages) {
const data = await storage.get(key);
if (data) {
return data;
}
}
return null;
}
async set(key: string, value: Blob) {
if (this.local.readonly) {
throw new Error('local peer is readonly');
}
if (value.size > this.singleBlobSizeLimit) {
this.event.emit('abort-large-blob', value);
logger.error('blob over limit, abort set');
return key;
}
// await upload to the local peer
await this.local.set(key, value);
// uploads to other peers in the background
Promise.allSettled(
this.remotes
.filter(r => !r.readonly)
.map(peer =>
peer.set(key, value).catch(err => {
logger.error('Error when uploading to peer', err);
})
)
)
.then(result => {
if (result.some(({ status }) => status === 'rejected')) {
logger.error(
`blob ${key} update finish, but some peers failed to update`
);
} else {
logger.debug(`blob ${key} update finish`);
}
})
.catch(() => {
// Promise.allSettled never reject
});
return key;
}
async delete(_key: string) {
// not supported
}
async list() {
const blobList = new Set<string>();
for (const peer of this.storages) {
const list = await peer.list();
if (list) {
for (const blob of list) {
blobList.add(blob);
}
}
}
return Array.from(blobList);
}
}
export const EmptyBlobStorage: BlobStorage = {
name: 'empty',
readonly: true,
async get(_key: string) {
return null;
},
async set(_key: string, _value: Blob) {
throw new Error('not supported');
},
async delete(_key: string) {
throw new Error('not supported');
},
async list() {
return [];
},
};
export class MemoryBlobStorage implements BlobStorage {
name = 'testing';
readonly = false;
constructor(private readonly state: Memento) {}
get(key: string) {
return Promise.resolve(this.state.get<Blob>(key) ?? null);
}
set(key: string, value: Blob) {
this.state.set(key, value);
const list = this.state.get<Set<string>>('list') ?? new Set<string>();
list.add(key);
this.state.set('list', list);
return Promise.resolve(key);
}
delete(key: string) {
this.state.set(key, null);
const list = this.state.get<Set<string>>('list') ?? new Set<string>();
list.delete(key);
this.state.set('list', list);
return Promise.resolve();
}
list() {
const list = this.state.get<Set<string>>('list');
return Promise.resolve(list ? Array.from(list) : []);
}
}

View File

@@ -1,5 +0,0 @@
export class BlobStorageOverCapacity extends Error {
constructor(public originError?: any) {
super('Blob storage over capacity.');
}
}

View File

@@ -1,127 +0,0 @@
# DocEngine
The synchronization algorithm for yjs docs.
```
┌─────────┐ ┌───────────┐ ┌────────┐
│ Storage ◄──┤ DocEngine ├──► Server │
└─────────┘ └───────────┘ └────────┘
```
# Core Components
## DocStorage
```ts
export interface DocStorage {
eventBus: DocEventBus;
doc: ByteKV;
syncMetadata: ByteKV;
serverClock: ByteKV;
}
```
Represents the local storage used, Specific implementations are replaceable, such as `IndexedDBDocStorage` on the `browser` and `SqliteDocStorage` on the `desktop`.
### DocEventBus
Each `DocStorage` contains a `DocEventBus`, which is used to communicate with other engines that share the same storage.
With `DocEventBus` we can sync updates between engines without connecting to the server.
For example, on the `browser`, we have multiple tabs, all tabs share the same `IndexedDBDocStorage`, so we use `BroadcastChannel` to implement `DocEventBus`, which allows us to broadcast events to all tabs.
On the `desktop` app, if we have multiple Windows sharing the same `SqliteDocStorage`, we must build a mechanism to broadcast events between all Windows (currently not implemented).
## DocServer
```ts
export interface DocServer {
pullDoc(
docId: string,
stateVector: Uint8Array
): Promise<{
data: Uint8Array;
serverClock: number;
stateVector?: Uint8Array;
} | null>;
pushDoc(docId: string, data: Uint8Array): Promise<{ serverClock: number }>;
subscribeAllDocs(cb: (updates: { docId: string; data: Uint8Array; serverClock: number }) => void): Promise<() => void>;
loadServerClock(after: number): Promise<Map<string, number>>;
waitForConnectingServer(signal: AbortSignal): Promise<void>;
disconnectServer(): void;
onInterrupted(cb: (reason: string) => void): void;
}
```
Represents the server we want to synchronize, there is a simulated implementation in `tests/sync.spec.ts`, and the real implementation is in `packages/backend/server`.
### ServerClock
`ServerClock` is a clock generated after each updates is stored in the Server. It is used to determine the order in which updates are stored in the Server.
The `DocEngine` decides whether to pull updates from the server based on the `ServerClock`.
The `ServerClock` written later must be **greater** than all previously. So on the client side, we can use `loadServerClock(the largest ServerClock previously received)` to obtain all changed `ServerClock`.
## DocEngine
The `DocEngine` is where all the synchronization logic actually happens.
Due to the complexity of the implementation, we divide it into 2 parts.
## DocEngine - LocalPart
Synchronizing **the `YDoc` instance** and **storage**.
The typical workflow is:
1. load data from storage, apply to `YDoc` instance.
2. track `YDoc` changes
3. write the changes back to storage.
### SeqNum
There is a `SeqNum` on each Doc data in `Storage`. Every time `LocalPart` writes data, `SeqNum` will be +1.
There is also a `PushedSeqNum`, which is used for RemotePart later.
## DocEngine - RemotePart
Synchronizing `Storage` and `Server`.
The typical workflow is:
1. Connect with the server, Load `ServerClocks` for all docs, Start subscribing to server-side updates.
2. Check whether each doc requires `push` and `pull`
3. Execute all push and pull
4. Listen for updates from `LocalPart` and push the updates to the server
5. Listen for server-side updates and write them to storage.
### PushedSeqNum
Each Doc will record a `PushedSeqNum`, used to determine whether the doc has unpush updates.
After each `push` is completed, `PushedSeqNum` + 1
If `PushedSeqNum` and `SeqNum` are still different after we complete the push (usually means the previous `push` failed)
Then do a full pull and push and set `pushedSeqNum` = `SeqNum`
### PulledServerClock
Each Doc also record `PulledServerClock`, Used to compare with ServerClock to determine whether to `pull` doc.
When the `pull` is completed, set `PulledServerClock` = `ServerClock` returned by the server.
### Retry
The `RemotePart` may fail at any time, and `RemotePart`'s built-in retry mechanism will restart the process in 5 seconds after failure.

View File

@@ -1,41 +0,0 @@
import { describe, expect, test } from 'vitest';
import { PriorityQueue } from '../priority-queue';
describe('Priority Queue', () => {
test('priority', () => {
const queue = new PriorityQueue();
queue.push('foo', 1);
queue.push('bar', 2);
queue.push('baz', 0);
expect(queue.pop()).toBe('bar');
expect(queue.pop()).toBe('foo');
expect(queue.pop()).toBe('baz');
expect(queue.pop()).toBe(null);
queue.push('B', 1);
queue.push('A', 1);
// if priority same then follow id binary order
expect(queue.pop()).toBe('B');
expect(queue.pop()).toBe('A');
expect(queue.pop()).toBe(null);
queue.push('A', 1);
queue.push('B', 2);
queue.push('A', 3); // same id but different priority, update the priority
expect(queue.pop()).toBe('A');
expect(queue.pop()).toBe('B');
expect(queue.pop()).toBe(null);
queue.push('A', 1);
queue.push('B', 2);
queue.remove('B');
expect(queue.pop()).toBe('A');
expect(queue.pop()).toBe(null);
});
});

View File

@@ -1,128 +0,0 @@
import { describe, expect, test, vitest } from 'vitest';
import { Doc as YDoc, encodeStateAsUpdate } from 'yjs';
import { DocEngine } from '..';
import { MemoryStorage } from '../storage';
import { MiniSyncServer } from './utils';
describe('sync', () => {
test('basic sync', async () => {
const storage = new MemoryStorage();
const server = new MiniSyncServer();
const engine = new DocEngine(storage, server.client()).start();
const doc = new YDoc({ guid: 'a' });
engine.addDoc(doc);
const map = doc.getMap('aaa');
map.set('a', 1);
await engine.waitForSynced();
expect(server.db.size).toBe(1);
expect(storage.docDb.keys().length).toBe(1);
});
test('can pull from server', async () => {
const server = new MiniSyncServer();
{
const engine = new DocEngine(
new MemoryStorage(),
server.client()
).start();
const doc = new YDoc({ guid: 'a' });
engine.addDoc(doc);
const map = doc.getMap('aaa');
map.set('a', 1);
await engine.waitForSynced();
expect(server.db.size).toBe(1);
}
{
const engine = new DocEngine(
new MemoryStorage(),
server.client()
).start();
const doc = new YDoc({ guid: 'a' });
engine.addDoc(doc);
await engine.waitForSynced();
expect(doc.getMap('aaa').get('a')).toBe(1);
}
});
test('2 client', async () => {
const server = new MiniSyncServer();
await Promise.all([
(async () => {
const engine = new DocEngine(
new MemoryStorage(),
server.client()
).start();
const doc = new YDoc({ guid: 'a' });
engine.addDoc(doc);
const map = doc.getMap('aaa');
map.set('a', 1);
await vitest.waitUntil(() => {
return map.get('b') === 2;
});
})(),
(async () => {
const engine = new DocEngine(
new MemoryStorage(),
server.client()
).start();
const doc = new YDoc({ guid: 'a' });
engine.addDoc(doc);
const map = doc.getMap('aaa');
map.set('b', 2);
await vitest.waitUntil(() => {
return map.get('a') === 1;
});
})(),
]);
});
test('2 client share storage and eventBus (simulate different tabs in same browser)', async () => {
const server = new MiniSyncServer();
const storage = new MemoryStorage();
await Promise.all([
(async () => {
const engine = new DocEngine(storage, server.client()).start();
const doc = new YDoc({ guid: 'a' });
engine.addDoc(doc);
const map = doc.getMap('aaa');
map.set('a', 1);
await vitest.waitUntil(() => map.get('b') === 2);
})(),
(async () => {
const engine = new DocEngine(storage, server.client()).start();
const doc = new YDoc({ guid: 'a' });
engine.addDoc(doc);
const map = doc.getMap('aaa');
map.set('b', 2);
await vitest.waitUntil(() => map.get('a') === 1);
})(),
]);
});
test('legacy data', async () => {
const server = new MiniSyncServer();
const storage = new MemoryStorage();
{
// write legacy data to storage
const doc = new YDoc({ guid: 'a' });
const map = doc.getMap('aaa');
map.set('a', 1);
await storage.doc.set('a', encodeStateAsUpdate(doc));
}
const engine = new DocEngine(storage, server.client()).start();
const doc = new YDoc({ guid: 'a' });
engine.addDoc(doc);
// should load to ydoc and save to server
await vitest.waitUntil(
() => doc.getMap('aaa').get('a') === 1 && server.db.size === 1
);
});
});

View File

@@ -1,108 +0,0 @@
import { nanoid } from 'nanoid';
import { diffUpdate, encodeStateVectorFromUpdate, mergeUpdates } from 'yjs';
import { AsyncLock } from '../../../utils';
import type { DocServer } from '../server';
import { isEmptyUpdate } from '../utils';
export class MiniSyncServer {
lock = new AsyncLock();
db = new Map<string, { data: Uint8Array; clock: number }>();
listeners = new Set<{
cb: (updates: {
docId: string;
data: Uint8Array;
serverClock: number;
}) => void;
clientId: string;
}>();
client() {
return new MiniServerClient(nanoid(), this);
}
}
export class MiniServerClient implements DocServer {
constructor(
private readonly id: string,
private readonly server: MiniSyncServer
) {}
async pullDoc(docId: string, stateVector: Uint8Array) {
using _lock = await this.server.lock.acquire();
const doc = this.server.db.get(docId);
if (!doc) {
return null;
}
const data = doc.data;
return {
data:
!isEmptyUpdate(data) && stateVector.length > 0
? diffUpdate(data, stateVector)
: data,
serverClock: 0,
stateVector: !isEmptyUpdate(data)
? encodeStateVectorFromUpdate(data)
: new Uint8Array(),
};
}
async pushDoc(
docId: string,
data: Uint8Array
): Promise<{ serverClock: number }> {
using _lock = await this.server.lock.acquire();
const doc = this.server.db.get(docId);
const oldData = doc?.data ?? new Uint8Array();
const newClock = (doc?.clock ?? 0) + 1;
this.server.db.set(docId, {
data: !isEmptyUpdate(data)
? !isEmptyUpdate(oldData)
? mergeUpdates([oldData, data])
: data
: oldData,
clock: newClock,
});
for (const { clientId, cb } of this.server.listeners) {
if (clientId !== this.id) {
cb({
docId,
data,
serverClock: newClock,
});
}
}
return { serverClock: newClock };
}
async loadServerClock(after: number): Promise<Map<string, number>> {
using _lock = await this.server.lock.acquire();
const map = new Map<string, number>();
for (const [docId, { clock }] of this.server.db) {
if (clock > after) {
map.set(docId, clock);
}
}
return map;
}
async subscribeAllDocs(
cb: (updates: {
docId: string;
data: Uint8Array;
serverClock: number;
}) => void
): Promise<() => void> {
const listener = { cb, clientId: this.id };
this.server.listeners.add(listener);
return () => {
this.server.listeners.delete(listener);
};
}
async waitForConnectingServer(): Promise<void> {}
disconnectServer(): void {}
onInterrupted(_cb: (reason: string) => void): void {}
}

View File

@@ -1,43 +0,0 @@
import { PriorityQueue } from './priority-queue';
export class AsyncPriorityQueue extends PriorityQueue {
private _resolveUpdate: (() => void) | null = null;
private _waitForUpdate: Promise<void> | null = null;
async asyncPop(abort?: AbortSignal): Promise<string> {
const update = this.pop();
if (update) {
return update;
} else {
if (!this._waitForUpdate) {
this._waitForUpdate = new Promise(resolve => {
this._resolveUpdate = resolve;
});
}
await Promise.race([
this._waitForUpdate,
new Promise((_, reject) => {
if (abort?.aborted) {
reject(abort?.reason);
}
abort?.addEventListener('abort', () => {
reject(abort.reason);
});
}),
]);
return this.asyncPop(abort);
}
}
override push(id: string, priority: number = 0) {
super.push(id, priority);
if (this._resolveUpdate) {
const resolve = this._resolveUpdate;
this._resolveUpdate = null;
this._waitForUpdate = null;
resolve();
}
}
}

View File

@@ -1,32 +0,0 @@
export class ClockMap {
max: number = 0;
constructor(private readonly map: Map<string, number>) {
for (const value of map.values()) {
if (value > this.max) {
this.max = value;
}
}
}
get(id: string): number {
return this.map.get(id) ?? 0;
}
set(id: string, value: number) {
this.map.set(id, value);
if (value > this.max) {
this.max = value;
}
}
setIfBigger(id: string, value: number) {
if (value > this.get(id)) {
this.set(id, value);
}
}
clear() {
this.map.clear();
this.max = 0;
}
}

View File

@@ -1,50 +0,0 @@
export type DocEvent =
| {
type: 'ClientUpdateCommitted';
clientId: string;
docId: string;
update: Uint8Array;
seqNum: number;
}
| {
type: 'ServerUpdateCommitted';
docId: string;
update: Uint8Array;
clientId: string;
};
export interface DocEventBus {
emit(event: DocEvent): void;
on(cb: (event: DocEvent) => void): () => void;
}
export class MemoryDocEventBus implements DocEventBus {
listeners = new Set<(event: DocEvent) => void>();
emit(event: DocEvent): void {
for (const listener of this.listeners) {
try {
listener(event);
} catch (e) {
console.error(e);
}
}
}
on(cb: (event: DocEvent) => void): () => void {
this.listeners.add(cb);
return () => {
this.listeners.delete(cb);
};
}
}
export class DocEventBusInner implements DocEventBus {
constructor(private readonly eventBusBehavior: DocEventBus) {}
emit(event: DocEvent) {
this.eventBusBehavior.emit(event);
}
on(cb: (event: DocEvent) => void) {
return this.eventBusBehavior.on(cb);
}
}

View File

@@ -1,232 +0,0 @@
import { DebugLogger } from '@affine/debug';
import { nanoid } from 'nanoid';
import { map } from 'rxjs';
import type { Doc as YDoc } from 'yjs';
import { LiveData } from '../../livedata';
import { MANUALLY_STOP } from '../../utils';
import { DocEngineLocalPart } from './local';
import { DocEngineRemotePart } from './remote';
import type { DocServer } from './server';
import type { DocStorage } from './storage';
import { DocStorageInner } from './storage';
const logger = new DebugLogger('doc-engine');
export type { DocEvent, DocEventBus } from './event';
export { MemoryDocEventBus } from './event';
export type { DocServer } from './server';
export type { DocStorage } from './storage';
export {
MemoryStorage as MemoryDocStorage,
ReadonlyStorage as ReadonlyDocStorage,
} from './storage';
export interface DocEngineDocState {
/**
* is syncing with the server
*/
syncing: boolean;
/**
* is saving to local storage
*/
saving: boolean;
/**
* is loading from local storage
*/
loading: boolean;
retrying: boolean;
ready: boolean;
errorMessage: string | null;
serverClock: number | null;
}
export class DocEngine {
readonly clientId: string;
localPart: DocEngineLocalPart;
remotePart: DocEngineRemotePart | null;
storage: DocStorageInner;
engineState$ = LiveData.computed(get => {
const localState = get(this.localPart.engineState$);
if (this.remotePart) {
const remoteState = get(this.remotePart?.engineState$);
return {
total: remoteState.total,
syncing: remoteState.syncing,
saving: localState.syncing,
retrying: remoteState.retrying,
errorMessage: remoteState.errorMessage,
};
}
return {
total: localState.total,
syncing: localState.syncing,
saving: localState.syncing,
retrying: false,
errorMessage: null,
};
});
docState$(docId: string) {
const localState$ = this.localPart.docState$(docId);
const remoteState$ = this.remotePart?.docState$(docId);
return LiveData.computed<DocEngineDocState>(get => {
const localState = get(localState$);
const remoteState = remoteState$ ? get(remoteState$) : null;
if (remoteState) {
return {
syncing: remoteState.syncing,
saving: localState.syncing,
loading: localState.syncing,
retrying: remoteState.retrying,
ready: localState.ready,
errorMessage: remoteState.errorMessage,
serverClock: remoteState.serverClock,
};
}
return {
syncing: localState.syncing,
saving: localState.syncing,
loading: localState.syncing,
ready: localState.ready,
retrying: false,
errorMessage: null,
serverClock: null,
};
});
}
markAsReady(docId: string) {
this.localPart.actions.markAsReady(docId);
}
constructor(
storage: DocStorage,
private readonly server?: DocServer | null
) {
this.clientId = nanoid();
this.storage = new DocStorageInner(storage);
this.localPart = new DocEngineLocalPart(this.clientId, this.storage);
this.remotePart = this.server
? new DocEngineRemotePart(this.clientId, this.storage, this.server)
: null;
}
abort = new AbortController();
start() {
this.abort.abort(MANUALLY_STOP);
this.abort = new AbortController();
Promise.all([
this.localPart.mainLoop(this.abort.signal),
this.remotePart?.mainLoop(this.abort.signal),
]).catch(err => {
if (err === MANUALLY_STOP) {
return;
}
logger.error('Doc engine error', err);
});
return this;
}
stop() {
this.abort.abort(MANUALLY_STOP);
}
async resetSyncStatus() {
this.stop();
await this.storage.clearSyncMetadata();
await this.storage.clearServerClock();
}
addDoc(doc: YDoc, withSubDocs = true) {
this.remotePart?.actions.addDoc(doc.guid);
this.localPart.actions.addDoc(doc);
if (withSubDocs) {
doc.on('subdocs', ({ added, loaded }) => {
// added: the subdocs that are existing on the ydoc
// loaded: the subdocs that have been called `ydoc.load()`
//
// we add all existing subdocs to remote part, let them sync between storage and server
// but only add loaded subdocs to local part, let them sync between storage and ydoc
// sync data to ydoc will consume more memory, so we only sync the ydoc that are necessary.
for (const subdoc of added) {
this.remotePart?.actions.addDoc(subdoc.guid);
}
for (const subdoc of loaded) {
this.localPart.actions.addDoc(subdoc);
}
});
}
}
setPriority(docId: string, priority: number) {
this.localPart.setPriority(docId, priority);
this.remotePart?.setPriority(docId, priority);
}
/**
* ## Saved:
* YDoc changes have been saved to storage, and the browser can be safely closed without losing data.
*/
waitForSaved() {
return new Promise<void>(resolve => {
this.engineState$
.pipe(map(state => state.saving === 0))
.subscribe(saved => {
if (saved) {
resolve();
}
});
});
}
/**
* ## Synced:
* is fully synchronized with the server
*/
waitForSynced() {
return new Promise<void>(resolve => {
this.engineState$
.pipe(map(state => state.syncing === 0 && state.saving === 0))
.subscribe(synced => {
if (synced) {
resolve();
}
});
});
}
/**
* ## Ready:
*
* means that the doc has been loaded and the data can be modified.
* (is not force, you can still modify it if you know you are creating some new data)
*
* this is a temporary solution to deal with the yjs overwrite issue.
*
* if content is loaded from storage
* or if content is pulled from the server, it will be true, otherwise be false.
*
* For example, when opening a doc that is not in storage, ready = false until the content is pulled from the server.
*/
waitForReady(docId: string) {
return new Promise<void>(resolve => {
this.docState$(docId)
.pipe(map(state => state.ready))
.subscribe(ready => {
if (ready) {
resolve();
}
});
});
}
dispose() {
this.stop();
this.server?.dispose?.();
}
}

View File

@@ -1,302 +0,0 @@
import { DebugLogger } from '@affine/debug';
import { Unreachable } from '@affine/env/constant';
import { groupBy } from 'lodash-es';
import { Observable, Subject } from 'rxjs';
import type { Doc as YDoc } from 'yjs';
import { applyUpdate, encodeStateAsUpdate, mergeUpdates } from 'yjs';
import { LiveData } from '../../livedata';
import { throwIfAborted } from '../../utils';
import { AsyncPriorityQueue } from './async-priority-queue';
import type { DocEvent } from './event';
import type { DocStorageInner } from './storage';
import { isEmptyUpdate } from './utils';
type Job =
| {
type: 'load';
docId: string;
}
| {
type: 'save';
docId: string;
update: Uint8Array;
}
| {
type: 'apply';
docId: string;
update: Uint8Array;
isInitialize: boolean;
};
const DOC_ENGINE_ORIGIN = 'doc-engine';
const logger = new DebugLogger('doc-engine:local');
export interface LocalEngineState {
total: number;
syncing: number;
}
export interface LocalDocState {
ready: boolean;
loading: boolean;
syncing: boolean;
}
/**
* never fail
*/
export class DocEngineLocalPart {
private readonly prioritySettings = new Map<string, number>();
private readonly statusUpdatedSubject$ = new Subject<string>();
private readonly status = {
docs: new Map<string, YDoc>(),
connectedDocs: new Set<string>(),
readyDocs: new Set<string>(),
jobDocQueue: new AsyncPriorityQueue(),
jobMap: new Map<string, Job[]>(),
currentJob: null as { docId: string; jobs: Job[] } | null,
};
engineState$ = LiveData.from<LocalEngineState>(
new Observable(subscribe => {
const next = () => {
subscribe.next({
total: this.status.docs.size,
syncing: this.status.jobMap.size + (this.status.currentJob ? 1 : 0),
});
};
next();
return this.statusUpdatedSubject$.subscribe(() => {
next();
});
}),
{ syncing: 0, total: 0 }
);
docState$(docId: string) {
return LiveData.from<LocalDocState>(
new Observable(subscribe => {
const next = () => {
subscribe.next({
ready: this.status.readyDocs.has(docId) ?? false,
loading: this.status.connectedDocs.has(docId),
syncing:
(this.status.jobMap.get(docId)?.length ?? 0) > 0 ||
this.status.currentJob?.docId === docId,
});
};
next();
return this.statusUpdatedSubject$.subscribe(updatedId => {
if (updatedId === docId) next();
});
}),
{ ready: false, loading: false, syncing: false }
);
}
constructor(
private readonly clientId: string,
private readonly storage: DocStorageInner
) {}
async mainLoop(signal?: AbortSignal) {
const dispose = this.storage.eventBus.on(event => {
const handler = this.events[event.type];
if (handler) {
handler(event as any);
}
});
try {
// eslint-disable-next-line no-constant-condition
while (true) {
throwIfAborted(signal);
const docId = await this.status.jobDocQueue.asyncPop(signal);
const jobs = this.status.jobMap.get(docId);
this.status.jobMap.delete(docId);
if (!jobs) {
continue;
}
this.status.currentJob = { docId, jobs };
this.statusUpdatedSubject$.next(docId);
const { apply, load, save } = groupBy(jobs, job => job.type) as {
[key in Job['type']]?: Job[];
};
if (load?.length) {
await this.jobs.load(load[0] as any, signal);
}
for (const applyJob of apply ?? []) {
await this.jobs.apply(applyJob as any, signal);
}
if (save?.length) {
await this.jobs.save(docId, save as any, signal);
}
this.status.currentJob = null;
this.statusUpdatedSubject$.next(docId);
}
} finally {
dispose();
for (const docs of this.status.connectedDocs) {
const doc = this.status.docs.get(docs);
if (doc) {
doc.off('update', this.handleDocUpdate);
}
}
}
}
readonly actions = {
addDoc: (doc: YDoc) => {
this.schedule({
type: 'load',
docId: doc.guid,
});
this.status.docs.set(doc.guid, doc);
this.statusUpdatedSubject$.next(doc.guid);
},
markAsReady: (docId: string) => {
this.status.readyDocs.add(docId);
this.statusUpdatedSubject$.next(docId);
},
};
readonly jobs = {
load: async (job: Job & { type: 'load' }, signal?: AbortSignal) => {
const doc = this.status.docs.get(job.docId);
if (!doc) {
throw new Unreachable('doc not found');
}
const existingData = encodeStateAsUpdate(doc);
if (!isEmptyUpdate(existingData)) {
this.schedule({
type: 'save',
docId: doc.guid,
update: existingData,
});
}
// mark doc as loaded
doc.emit('sync', [true, doc]);
doc.on('update', this.handleDocUpdate);
this.status.connectedDocs.add(job.docId);
this.statusUpdatedSubject$.next(job.docId);
const docData = await this.storage.loadDocFromLocal(job.docId, signal);
if (!docData || isEmptyUpdate(docData)) {
return;
}
this.applyUpdate(job.docId, docData);
this.status.readyDocs.add(job.docId);
this.statusUpdatedSubject$.next(job.docId);
},
save: async (
docId: string,
jobs: (Job & { type: 'save' })[],
signal?: AbortSignal
) => {
if (this.status.connectedDocs.has(docId)) {
const merged = mergeUpdates(
jobs.map(j => j.update).filter(update => !isEmptyUpdate(update))
);
const newSeqNum = await this.storage.commitDocAsClientUpdate(
docId,
merged,
signal
);
this.storage.eventBus.emit({
type: 'ClientUpdateCommitted',
seqNum: newSeqNum,
docId: docId,
clientId: this.clientId,
update: merged,
});
}
},
apply: async (job: Job & { type: 'apply' }, signal?: AbortSignal) => {
throwIfAborted(signal);
if (this.status.connectedDocs.has(job.docId)) {
this.applyUpdate(job.docId, job.update);
}
if (job.isInitialize && !isEmptyUpdate(job.update)) {
this.status.readyDocs.add(job.docId);
this.statusUpdatedSubject$.next(job.docId);
}
},
};
readonly events: {
[key in DocEvent['type']]?: (event: DocEvent & { type: key }) => void;
} = {
ServerUpdateCommitted: ({ docId, update, clientId }) => {
this.schedule({
type: 'apply',
docId,
update,
isInitialize: clientId === this.clientId,
});
},
ClientUpdateCommitted: ({ docId, update, clientId }) => {
if (clientId !== this.clientId) {
this.schedule({
type: 'apply',
docId,
update,
isInitialize: false,
});
}
},
};
handleDocUpdate = (update: Uint8Array, origin: any, doc: YDoc) => {
if (origin === DOC_ENGINE_ORIGIN) {
return;
}
this.schedule({
type: 'save',
docId: doc.guid,
update,
});
};
applyUpdate(docId: string, update: Uint8Array) {
const doc = this.status.docs.get(docId);
if (doc && !isEmptyUpdate(update)) {
try {
applyUpdate(doc, update, DOC_ENGINE_ORIGIN);
} catch (err) {
logger.error('failed to apply update yjs doc', err);
}
}
}
schedule(job: Job) {
const priority = this.prioritySettings.get(job.docId) ?? 0;
this.status.jobDocQueue.push(job.docId, priority);
const existingJobs = this.status.jobMap.get(job.docId) ?? [];
existingJobs.push(job);
this.status.jobMap.set(job.docId, existingJobs);
this.statusUpdatedSubject$.next(job.docId);
}
setPriority(docId: string, priority: number) {
this.prioritySettings.set(docId, priority);
this.status.jobDocQueue.updatePriority(docId, priority);
}
}

View File

@@ -1,24 +0,0 @@
AFFiNE currently has a lot of data stored using the old ID format. Here, we record the usage of IDs to avoid forgetting.
## Old ID Format
The format is:
- `{workspace-id}:space:{nanoid}` Common
- `{workspace-id}:space:page:{nanoid}`
> Note: sometimes the `workspace-id` is not same with current workspace id.
## Usage
- Local Storage
- indexeddb: Both new and old IDs coexist
- sqlite: Both new and old IDs coexist
- server-clock: Only new IDs are stored
- sync-metadata: Both new and old IDs coexist
- Server Storage
- Only stores new IDs but accepts writes using old IDs
- Protocols
- When the client submits an update, both new and old IDs are used.
- When the server broadcasts updates sent by other clients, both new and old IDs are used.
- When the server responds to `client-pre-sync` (listing all updated docids), only new IDs are used.

View File

@@ -1,69 +0,0 @@
import { BinarySearchTree } from '@datastructures-js/binary-search-tree';
export class PriorityQueue {
tree = new BinarySearchTree<{ id: string; priority: number }>((a, b) => {
return a.priority === b.priority
? a.id === b.id
? 0
: a.id > b.id
? 1
: -1
: a.priority - b.priority;
});
priorityMap = new Map<string, number>();
push(id: string, priority: number = 0) {
const oldPriority = this.priorityMap.get(id);
if (oldPriority === priority) {
return;
}
if (oldPriority !== undefined) {
this.remove(id);
}
this.tree.insert({ id, priority });
this.priorityMap.set(id, priority);
}
pop() {
const node = this.tree.max();
if (!node) {
return null;
}
this.tree.removeNode(node);
const { id } = node.getValue();
this.priorityMap.delete(id);
return id;
}
remove(id: string, priority?: number) {
priority ??= this.priorityMap.get(id);
if (priority === undefined) {
return false;
}
const removed = this.tree.remove({ id, priority });
if (removed) {
this.priorityMap.delete(id);
}
return removed;
}
clear() {
this.tree.clear();
this.priorityMap.clear();
}
updatePriority(id: string, priority: number) {
if (this.remove(id)) {
this.push(id, priority);
}
}
get length() {
return this.tree.count;
}
}

View File

@@ -1,611 +0,0 @@
import { DebugLogger } from '@affine/debug';
import { remove } from 'lodash-es';
import { Observable, Subject } from 'rxjs';
import { diffUpdate, encodeStateVectorFromUpdate, mergeUpdates } from 'yjs';
import { LiveData } from '../../livedata';
import { throwIfAborted } from '../../utils';
import { AsyncPriorityQueue } from './async-priority-queue';
import { ClockMap } from './clock';
import type { DocEvent } from './event';
import type { DocServer } from './server';
import type { DocStorageInner } from './storage';
import { isEmptyUpdate } from './utils';
const logger = new DebugLogger('doc-engine:remote');
type Job =
| {
type: 'connect';
docId: string;
}
| {
type: 'push';
docId: string;
update: Uint8Array;
seqNum: number;
}
| {
type: 'pull';
docId: string;
}
| {
type: 'pullAndPush';
docId: string;
}
| {
type: 'save';
docId: string;
update?: Uint8Array;
serverClock: number;
};
export interface Status {
docs: Set<string>;
connectedDocs: Set<string>;
jobDocQueue: AsyncPriorityQueue;
jobMap: Map<string, Job[]>;
serverClocks: ClockMap;
syncing: boolean;
retrying: boolean;
errorMessage: string | null;
}
export interface RemoteEngineState {
total: number;
syncing: number;
retrying: boolean;
errorMessage: string | null;
}
export interface RemoteDocState {
syncing: boolean;
retrying: boolean;
serverClock: number | null;
errorMessage: string | null;
}
export class DocEngineRemotePart {
private readonly prioritySettings = new Map<string, number>();
constructor(
private readonly clientId: string,
private readonly storage: DocStorageInner,
private readonly server: DocServer
) {}
private status: Status = {
docs: new Set<string>(),
connectedDocs: new Set<string>(),
jobDocQueue: new AsyncPriorityQueue(),
jobMap: new Map(),
serverClocks: new ClockMap(new Map()),
syncing: false,
retrying: false,
errorMessage: null,
};
private readonly statusUpdatedSubject$ = new Subject<string | true>();
engineState$ = LiveData.from<RemoteEngineState>(
new Observable(subscribe => {
const next = () => {
if (!this.status.syncing) {
// if syncing = false, jobMap is empty
subscribe.next({
total: this.status.docs.size,
syncing: this.status.docs.size,
retrying: this.status.retrying,
errorMessage: this.status.errorMessage,
});
} else {
const syncing = this.status.jobMap.size;
subscribe.next({
total: this.status.docs.size,
syncing: syncing,
retrying: this.status.retrying,
errorMessage: this.status.errorMessage,
});
}
};
next();
return this.statusUpdatedSubject$.subscribe(() => {
next();
});
}),
{
syncing: 0,
total: 0,
retrying: false,
errorMessage: null,
}
);
docState$(docId: string) {
return LiveData.from<RemoteDocState>(
new Observable(subscribe => {
const next = () => {
subscribe.next({
syncing:
!this.status.connectedDocs.has(docId) ||
this.status.jobMap.has(docId),
serverClock: this.status.serverClocks.get(docId),
retrying: this.status.retrying,
errorMessage: this.status.errorMessage,
});
};
next();
return this.statusUpdatedSubject$.subscribe(updatedId => {
if (updatedId === true || updatedId === docId) next();
});
}),
{ syncing: false, retrying: false, errorMessage: null, serverClock: null }
);
}
readonly jobs = {
connect: async (docId: string, signal?: AbortSignal) => {
const pushedSeqNum = await this.storage.loadDocSeqNumPushed(
docId,
signal
);
const seqNum = await this.storage.loadDocSeqNum(docId, signal);
if (pushedSeqNum === null || pushedSeqNum !== seqNum) {
await this.jobs.pullAndPush(docId, signal);
} else {
const pulled = await this.storage.loadDocServerClockPulled(docId);
if (
pulled === null ||
pulled !== this.status.serverClocks.get(normalizeServerDocId(docId))
) {
await this.jobs.pull(docId, signal);
}
}
this.status.connectedDocs.add(docId);
this.statusUpdatedSubject$.next(docId);
},
push: async (
docId: string,
jobs: (Job & { type: 'push' })[],
signal?: AbortSignal
) => {
if (this.status.connectedDocs.has(docId)) {
const maxSeqNum = Math.max(...jobs.map(j => j.seqNum));
const pushedSeqNum =
(await this.storage.loadDocSeqNumPushed(docId, signal)) ?? 0;
if (maxSeqNum - pushedSeqNum === jobs.length) {
const merged = mergeUpdates(
jobs.map(j => j.update).filter(update => !isEmptyUpdate(update))
);
if (!isEmptyUpdate(merged)) {
const { serverClock } = await this.server.pushDoc(docId, merged);
this.schedule({
type: 'save',
docId,
serverClock,
});
}
await this.storage.saveDocPushedSeqNum(
docId,
{ add: jobs.length },
signal
);
} else {
// maybe other tab is modifying the doc, do full pull and push for safety
await this.jobs.pullAndPush(docId, signal);
}
}
},
pullAndPush: async (docId: string, signal?: AbortSignal) => {
const seqNum = await this.storage.loadDocSeqNum(docId, signal);
const data = await this.storage.loadDocFromLocal(docId, signal);
const stateVector =
data && !isEmptyUpdate(data)
? encodeStateVectorFromUpdate(data)
: new Uint8Array();
const serverData = await this.server.pullDoc(docId, stateVector);
if (serverData) {
const {
data: newData,
stateVector: serverStateVector,
serverClock,
} = serverData;
await this.storage.saveServerClock(
new Map([[normalizeServerDocId(docId), serverClock]]),
signal
);
this.actions.updateServerClock(
normalizeServerDocId(docId),
serverClock
);
await this.storage.commitDocAsServerUpdate(
docId,
newData,
serverClock,
signal
);
this.storage.eventBus.emit({
type: 'ServerUpdateCommitted',
docId,
clientId: this.clientId,
update: newData,
});
const diff =
data && serverStateVector && serverStateVector.length > 0
? diffUpdate(data, serverStateVector)
: data;
if (diff && !isEmptyUpdate(diff)) {
const { serverClock } = await this.server.pushDoc(docId, diff);
this.schedule({
type: 'save',
docId,
serverClock,
});
}
await this.storage.saveDocPushedSeqNum(docId, seqNum, signal);
} else {
if (data && !isEmptyUpdate(data)) {
const { serverClock } = await this.server.pushDoc(docId, data);
await this.storage.saveDocServerClockPulled(
docId,
serverClock,
signal
);
await this.storage.saveServerClock(
new Map([[normalizeServerDocId(docId), serverClock]]),
signal
);
this.actions.updateServerClock(
normalizeServerDocId(docId),
serverClock
);
}
await this.storage.saveDocPushedSeqNum(docId, seqNum, signal);
}
},
pull: async (docId: string, signal?: AbortSignal) => {
const data = await this.storage.loadDocFromLocal(docId, signal);
const stateVector =
data && !isEmptyUpdate(data)
? encodeStateVectorFromUpdate(data)
: new Uint8Array();
const serverDoc = await this.server.pullDoc(docId, stateVector);
if (!serverDoc) {
return;
}
const { data: newData, serverClock } = serverDoc;
await this.storage.commitDocAsServerUpdate(
docId,
newData,
serverClock,
signal
);
this.storage.eventBus.emit({
type: 'ServerUpdateCommitted',
docId,
clientId: this.clientId,
update: newData,
});
await this.storage.saveServerClock(
new Map([[normalizeServerDocId(docId), serverClock]]),
signal
);
this.actions.updateServerClock(normalizeServerDocId(docId), serverClock);
},
save: async (
docId: string,
jobs: (Job & { type: 'save' })[],
signal?: AbortSignal
) => {
const serverClock = jobs.reduce((a, b) => Math.max(a, b.serverClock), 0);
await this.storage.saveServerClock(
new Map([[normalizeServerDocId(docId), serverClock]]),
signal
);
this.actions.updateServerClock(normalizeServerDocId(docId), serverClock);
if (this.status.connectedDocs.has(docId)) {
const data = jobs
.map(j => j.update)
.filter((update): update is Uint8Array =>
update ? !isEmptyUpdate(update) : false
);
const update = data.length > 0 ? mergeUpdates(data) : new Uint8Array();
await this.storage.commitDocAsServerUpdate(
docId,
update,
serverClock,
signal
);
this.storage.eventBus.emit({
type: 'ServerUpdateCommitted',
docId,
clientId: this.clientId,
update,
});
}
},
};
readonly actions = {
updateServerClock: (docId: string, serverClock: number) => {
this.status.serverClocks.setIfBigger(docId, serverClock);
this.statusUpdatedSubject$.next(docId);
},
addDoc: (docId: string) => {
if (!this.status.docs.has(docId)) {
this.status.docs.add(docId);
this.statusUpdatedSubject$.next(docId);
this.schedule({
type: 'connect',
docId,
});
}
},
};
readonly events: {
[key in DocEvent['type']]?: (event: DocEvent & { type: key }) => void;
} = {
ClientUpdateCommitted: ({ clientId, docId, seqNum, update }) => {
if (clientId !== this.clientId) {
return;
}
this.schedule({
type: 'push',
docId,
update,
seqNum,
});
},
};
async mainLoop(signal?: AbortSignal) {
// eslint-disable-next-line no-constant-condition
while (true) {
try {
await this.retryLoop(signal);
} catch (err) {
if (signal?.aborted) {
return;
}
logger.error('Remote sync error, retry in 5s', err);
this.status.errorMessage =
err instanceof Error ? err.message : `${err}`;
this.statusUpdatedSubject$.next(true);
} finally {
this.status = {
docs: this.status.docs,
connectedDocs: new Set<string>(),
jobDocQueue: new AsyncPriorityQueue(),
jobMap: new Map(),
serverClocks: new ClockMap(new Map()),
syncing: false,
retrying: true,
errorMessage: this.status.errorMessage,
};
this.statusUpdatedSubject$.next(true);
}
await Promise.race([
new Promise<void>(resolve => {
setTimeout(resolve, 5 * 1000);
}),
new Promise((_, reject) => {
// exit if manually stopped
if (signal?.aborted) {
reject(signal.reason);
}
signal?.addEventListener('abort', () => {
reject(signal.reason);
});
}),
]);
}
}
async retryLoop(signal?: AbortSignal) {
throwIfAborted(signal);
const abort = new AbortController();
signal?.addEventListener('abort', reason => {
abort.abort(reason);
});
signal = abort.signal;
const disposes: (() => void)[] = [];
try {
disposes.push(
this.storage.eventBus.on(event => {
const handler = this.events[event.type];
handler?.(event as any);
})
);
throwIfAborted(signal);
for (const doc of this.status.docs) {
this.schedule({
type: 'connect',
docId: doc,
});
}
logger.info('Remote sync started');
this.status.syncing = true;
this.statusUpdatedSubject$.next(true);
this.server.onInterrupted(reason => {
abort.abort(reason);
});
await Promise.race([
this.server.waitForConnectingServer(signal),
new Promise<void>((_, reject) => {
setTimeout(() => {
reject(new Error('Connect to server timeout'));
}, 1000 * 30);
}),
new Promise((_, reject) => {
signal?.addEventListener('abort', reason => {
reject(reason);
});
}),
]);
// reset retrying flag after connected with server
this.status.retrying = false;
this.statusUpdatedSubject$.next(true);
throwIfAborted(signal);
disposes.push(
await this.server.subscribeAllDocs(({ docId, data, serverClock }) => {
this.schedule({
type: 'save',
docId: docId,
serverClock,
update: data,
});
})
);
const cachedClocks = await this.storage.loadServerClock(signal);
for (const [id, v] of cachedClocks) {
this.actions.updateServerClock(id, v);
}
const maxClockValue = this.status.serverClocks.max;
const newClocks = await this.server.loadServerClock(maxClockValue);
for (const [id, v] of newClocks) {
this.actions.updateServerClock(id, v);
}
await this.storage.saveServerClock(newClocks, signal);
// eslint-disable-next-line no-constant-condition
while (true) {
throwIfAborted(signal);
const docId = await this.status.jobDocQueue.asyncPop(signal);
// eslint-disable-next-line no-constant-condition
while (true) {
const jobs = this.status.jobMap.get(docId);
if (!jobs || jobs.length === 0) {
this.status.jobMap.delete(docId);
this.statusUpdatedSubject$.next(docId);
break;
}
const connect = remove(jobs, j => j.type === 'connect');
if (connect && connect.length > 0) {
await this.jobs.connect(docId, signal);
continue;
}
const pullAndPush = remove(jobs, j => j.type === 'pullAndPush');
if (pullAndPush && pullAndPush.length > 0) {
await this.jobs.pullAndPush(docId, signal);
continue;
}
const pull = remove(jobs, j => j.type === 'pull');
if (pull && pull.length > 0) {
await this.jobs.pull(docId, signal);
continue;
}
const push = remove(jobs, j => j.type === 'push');
if (push && push.length > 0) {
await this.jobs.push(
docId,
push as (Job & { type: 'push' })[],
signal
);
continue;
}
const save = remove(jobs, j => j.type === 'save');
if (save && save.length > 0) {
await this.jobs.save(
docId,
save as (Job & { type: 'save' })[],
signal
);
continue;
}
}
}
} finally {
for (const dispose of disposes) {
dispose();
}
try {
this.server.disconnectServer();
} catch (err) {
logger.error('Error on disconnect server', err);
}
this.status.syncing = false;
logger.info('Remote sync ended');
}
}
schedule(job: Job) {
const priority = this.prioritySettings.get(job.docId) ?? 0;
this.status.jobDocQueue.push(job.docId, priority);
const existingJobs = this.status.jobMap.get(job.docId) ?? [];
existingJobs.push(job);
this.status.jobMap.set(job.docId, existingJobs);
this.statusUpdatedSubject$.next(job.docId);
}
setPriority(docId: string, priority: number) {
this.prioritySettings.set(docId, priority);
this.status.jobDocQueue.updatePriority(docId, priority);
}
}
// use normalized id in server clock
function normalizeServerDocId(raw: string) {
enum DocVariant {
Workspace = 'workspace',
Page = 'page',
Space = 'space',
Settings = 'settings',
Unknown = 'unknown',
}
try {
if (!raw.length) {
throw new Error('Invalid Empty Doc ID');
}
let parts = raw.split(':');
if (parts.length > 3) {
// special adapt case `wsId:space:page:pageId`
if (parts[1] === DocVariant.Space && parts[2] === DocVariant.Page) {
parts = [parts[0], DocVariant.Space, parts[3]];
} else {
throw new Error(`Invalid format of Doc ID: ${raw}`);
}
} else if (parts.length === 2) {
// `${variant}:${guid}`
throw new Error('not supported');
} else if (parts.length === 1) {
// ${ws} or ${pageId}
parts = ['', DocVariant.Unknown, parts[0]];
}
const docId = parts.at(2);
if (!docId) {
throw new Error('ID is required');
}
return docId;
} catch (err) {
logger.error('Error on normalize docId ' + raw, err);
return raw;
}
}

View File

@@ -1,28 +0,0 @@
export interface DocServer {
pullDoc(
docId: string,
stateVector: Uint8Array
): Promise<{
data: Uint8Array;
serverClock: number;
stateVector?: Uint8Array;
} | null>;
pushDoc(docId: string, data: Uint8Array): Promise<{ serverClock: number }>;
loadServerClock(after: number): Promise<Map<string, number>>;
subscribeAllDocs(
cb: (updates: {
docId: string;
data: Uint8Array;
serverClock: number;
}) => void
): Promise<() => void>;
waitForConnectingServer(signal: AbortSignal): Promise<void>;
disconnectServer(): void;
onInterrupted(cb: (reason: string) => void): void;
dispose?(): void;
}

View File

@@ -1,361 +0,0 @@
import { mergeUpdates } from 'yjs';
import type { ByteKV, Memento } from '../../storage';
import { MemoryMemento, ReadonlyByteKV, wrapMemento } from '../../storage';
import { AsyncLock, throwIfAborted } from '../../utils';
import type { DocEventBus } from '.';
import { DocEventBusInner, MemoryDocEventBus } from './event';
import { isEmptyUpdate } from './utils';
export interface DocStorage {
eventBus: DocEventBus;
doc: ByteKV;
syncMetadata: ByteKV;
serverClock: ByteKV;
}
const Keys = {
SeqNum: (docId: string) => `${docId}:seqNum`,
SeqNumPushed: (docId: string) => `${docId}:seqNumPushed`,
ServerClockPulled: (docId: string) => `${docId}:serverClockPulled`,
UpdatedTime: (docId: string) => `${docId}:updateTime`,
};
const Values = {
UInt64: {
parse: (buffer: Uint8Array) => {
const view = new DataView(buffer.buffer);
return Number(view.getBigUint64(0, false));
},
serialize: (value: number) => {
const buffer = new ArrayBuffer(8);
const view = new DataView(buffer);
view.setBigUint64(0, BigInt(value), false);
return new Uint8Array(buffer);
},
},
};
export class DocStorageInner {
public readonly eventBus = new DocEventBusInner(this.behavior.eventBus);
constructor(public readonly behavior: DocStorage) {}
async loadServerClock(signal?: AbortSignal): Promise<Map<string, number>> {
throwIfAborted(signal);
const list = await this.behavior.serverClock.keys();
const map = new Map<string, number>();
for (const key of list) {
const docId = key;
const value = await this.behavior.serverClock.get(key);
if (value) {
map.set(docId, Values.UInt64.parse(value));
}
}
return map;
}
async saveServerClock(map: Map<string, number>, signal?: AbortSignal) {
throwIfAborted(signal);
await this.behavior.serverClock.transaction(async transaction => {
for (const [docId, value] of map) {
const key = docId;
const oldBuffer = await transaction.get(key);
const old = oldBuffer ? Values.UInt64.parse(oldBuffer) : 0;
if (old < value) {
await transaction.set(key, Values.UInt64.serialize(value));
}
}
});
}
async loadDocSeqNum(docId: string, signal?: AbortSignal) {
throwIfAborted(signal);
const bytes = await this.behavior.syncMetadata.get(Keys.SeqNum(docId));
if (bytes === null) {
return 0;
}
return Values.UInt64.parse(bytes);
}
async saveDocSeqNum(
docId: string,
seqNum: number | true,
signal?: AbortSignal
) {
throwIfAborted(signal);
return await this.behavior.syncMetadata.transaction(async transaction => {
const key = Keys.SeqNum(docId);
const oldBytes = await transaction.get(key);
const old = oldBytes ? Values.UInt64.parse(oldBytes) : 0;
if (seqNum === true) {
await transaction.set(key, Values.UInt64.serialize(old + 1));
return old + 1;
}
if (old < seqNum) {
await transaction.set(key, Values.UInt64.serialize(seqNum));
return seqNum;
}
return old;
});
}
async loadDocSeqNumPushed(docId: string, signal?: AbortSignal) {
throwIfAborted(signal);
const bytes = await this.behavior.syncMetadata.get(
Keys.SeqNumPushed(docId)
);
if (bytes === null) {
return null;
}
return Values.UInt64.parse(bytes);
}
async saveDocPushedSeqNum(
docId: string,
seqNum: number | { add: number },
signal?: AbortSignal
) {
throwIfAborted(signal);
await this.behavior.syncMetadata.transaction(async transaction => {
const key = Keys.SeqNumPushed(docId);
const oldBytes = await transaction.get(key);
const old = oldBytes ? Values.UInt64.parse(oldBytes) : null;
if (typeof seqNum === 'object') {
return transaction.set(
key,
Values.UInt64.serialize((old ?? 0) + seqNum.add)
);
}
if (old === null || old < seqNum) {
return transaction.set(key, Values.UInt64.serialize(seqNum));
}
});
}
async loadDocServerClockPulled(docId: string, signal?: AbortSignal) {
throwIfAborted(signal);
const bytes = await this.behavior.syncMetadata.get(
Keys.ServerClockPulled(docId)
);
if (bytes === null) {
return null;
}
return bytes ? Values.UInt64.parse(bytes) : 0;
}
async saveDocServerClockPulled(
docId: string,
serverClock: number,
signal?: AbortSignal
) {
throwIfAborted(signal);
await this.behavior.syncMetadata.transaction(async transaction => {
const oldBytes = await transaction.get(Keys.ServerClockPulled(docId));
const old = oldBytes ? Values.UInt64.parse(oldBytes) : null;
if (old === null || old < serverClock) {
await transaction.set(
Keys.ServerClockPulled(docId),
Values.UInt64.serialize(serverClock)
);
}
});
}
async loadDocFromLocal(docId: string, signal?: AbortSignal) {
throwIfAborted(signal);
return await this.behavior.doc.get(docId);
}
/**
* Confirm that server updates are applied in the order they occur!!!
*/
async commitDocAsServerUpdate(
docId: string,
update: Uint8Array,
serverClock: number,
signal?: AbortSignal
) {
throwIfAborted(signal);
await this.behavior.doc.transaction(async tx => {
const data = await tx.get(docId);
await tx.set(
docId,
data && !isEmptyUpdate(data)
? !isEmptyUpdate(update)
? mergeUpdates([data, update])
: data
: update
);
});
await this.saveDocServerClockPulled(docId, serverClock);
}
async commitDocAsClientUpdate(
docId: string,
update: Uint8Array,
signal?: AbortSignal
) {
throwIfAborted(signal);
await this.behavior.doc.transaction(async tx => {
const data = await tx.get(docId);
await tx.set(
docId,
data && !isEmptyUpdate(data)
? !isEmptyUpdate(update)
? mergeUpdates([data, update])
: data
: update
);
});
return await this.saveDocSeqNum(docId, true);
}
clearSyncMetadata() {
return this.behavior.syncMetadata.clear();
}
async clearServerClock() {
return this.behavior.serverClock.clear();
}
}
export class ReadonlyStorage implements DocStorage {
constructor(
private readonly map: {
[key: string]: Uint8Array;
}
) {}
eventBus = new MemoryDocEventBus();
doc = new ReadonlyByteKV(new Map(Object.entries(this.map)));
serverClock = new ReadonlyByteKV();
syncMetadata = new ReadonlyByteKV();
}
export class MemoryStorage implements DocStorage {
constructor(private readonly memo: Memento = new MemoryMemento()) {}
eventBus = new MemoryDocEventBus();
lock = new AsyncLock();
readonly docDb = wrapMemento(this.memo, 'doc:');
readonly syncMetadataDb = wrapMemento(this.memo, 'syncMetadata:');
readonly serverClockDb = wrapMemento(this.memo, 'serverClock:');
readonly doc = {
transaction: async cb => {
using _lock = await this.lock.acquire();
return await cb({
get: async key => {
return this.docDb.get(key) ?? null;
},
set: async (key, value) => {
this.docDb.set(key, value);
},
keys: async () => {
return Array.from(this.docDb.keys());
},
clear: () => {
this.docDb.clear();
},
del: key => {
this.docDb.del(key);
},
});
},
get(key) {
return this.transaction(async tx => tx.get(key));
},
set(key, value) {
return this.transaction(async tx => tx.set(key, value));
},
keys() {
return this.transaction(async tx => tx.keys());
},
clear() {
return this.transaction(async tx => tx.clear());
},
del(key) {
return this.transaction(async tx => tx.del(key));
},
} satisfies ByteKV;
readonly syncMetadata = {
transaction: async cb => {
using _lock = await this.lock.acquire();
return await cb({
get: async key => {
return this.syncMetadataDb.get(key) ?? null;
},
set: async (key, value) => {
this.syncMetadataDb.set(key, value);
},
keys: async () => {
return Array.from(this.syncMetadataDb.keys());
},
clear: () => {
this.syncMetadataDb.clear();
},
del: key => {
this.syncMetadataDb.del(key);
},
});
},
get(key) {
return this.transaction(async tx => tx.get(key));
},
set(key, value) {
return this.transaction(async tx => tx.set(key, value));
},
keys() {
return this.transaction(async tx => tx.keys());
},
clear() {
return this.transaction(async tx => tx.clear());
},
del(key) {
return this.transaction(async tx => tx.del(key));
},
} satisfies ByteKV;
readonly serverClock = {
transaction: async cb => {
using _lock = await this.lock.acquire();
return await cb({
get: async key => {
return this.serverClockDb.get(key) ?? null;
},
set: async (key, value) => {
this.serverClockDb.set(key, value);
},
keys: async () => {
return Array.from(this.serverClockDb.keys());
},
clear: () => {
this.serverClockDb.clear();
},
del: key => {
this.serverClockDb.del(key);
},
});
},
get(key) {
return this.transaction(async tx => tx.get(key));
},
set(key, value) {
return this.transaction(async tx => tx.set(key, value));
},
keys() {
return this.transaction(async tx => tx.keys());
},
clear() {
return this.transaction(async tx => tx.clear());
},
del(key) {
return this.transaction(async tx => tx.del(key));
},
} satisfies ByteKV;
}

View File

@@ -1,6 +0,0 @@
export function isEmptyUpdate(binary: Uint8Array) {
return (
binary.byteLength === 0 ||
(binary.byteLength === 2 && binary[0] === 0 && binary[1] === 0)
);
}

View File

@@ -1,9 +1,3 @@
export type { AwarenessConnection } from './awareness';
export { AwarenessEngine } from './awareness';
export type { BlobStatus, BlobStorage } from './blob/blob';
export { BlobEngine, EmptyBlobStorage } from './blob/blob';
export { BlobStorageOverCapacity } from './blob/error';
export * from './doc';
export * from './indexer'; export * from './indexer';
export { export {
IndexedDBIndex, IndexedDBIndex,

View File

@@ -48,7 +48,10 @@ export class JobRunner<J> {
// TODO: retry logic // TODO: retry logic
await this.queue.return(jobs); await this.queue.return(jobs);
} }
logger.error('Error processing jobs', err); logger.error(
'Error processing jobs',
err instanceof Error ? (err.stack ?? err.message) : err
);
} }
} else { } else {
await new Promise(resolve => setTimeout(resolve, 1000)); await new Promise(resolve => setTimeout(resolve, 1000));

View File

@@ -9,6 +9,7 @@
"./worker/client": "./src/worker/client.ts", "./worker/client": "./src/worker/client.ts",
"./worker/consumer": "./src/worker/consumer.ts", "./worker/consumer": "./src/worker/consumer.ts",
"./idb": "./src/impls/idb/index.ts", "./idb": "./src/impls/idb/index.ts",
"./broadcast-channel": "./src/impls/broadcast-channel/index.ts",
"./idb/v1": "./src/impls/idb/v1/index.ts", "./idb/v1": "./src/impls/idb/v1/index.ts",
"./cloud": "./src/impls/cloud/index.ts", "./cloud": "./src/impls/cloud/index.ts",
"./sqlite": "./src/impls/sqlite/index.ts", "./sqlite": "./src/impls/sqlite/index.ts",

View File

@@ -30,7 +30,7 @@ test('doc', async () => {
const frontend1 = new DocFrontend(docStorage, DocSyncImpl.dummy); const frontend1 = new DocFrontend(docStorage, DocSyncImpl.dummy);
frontend1.start(); frontend1.start();
frontend1.addDoc(doc1); frontend1.connectDoc(doc1);
await vitest.waitFor(async () => { await vitest.waitFor(async () => {
const doc = await docStorage.getDoc('test-doc'); const doc = await docStorage.getDoc('test-doc');
expectYjsEqual(doc!.bin, { expectYjsEqual(doc!.bin, {
@@ -45,7 +45,7 @@ test('doc', async () => {
}); });
const frontend2 = new DocFrontend(docStorage, DocSyncImpl.dummy); const frontend2 = new DocFrontend(docStorage, DocSyncImpl.dummy);
frontend2.start(); frontend2.start();
frontend2.addDoc(doc2); frontend2.connectDoc(doc2);
await vitest.waitFor(async () => { await vitest.waitFor(async () => {
expectYjsEqual(doc2, { expectYjsEqual(doc2, {
@@ -94,8 +94,8 @@ test('awareness', async () => {
}, },
}); });
const frontend = new AwarenessFrontend(sync); const frontend = new AwarenessFrontend(sync);
frontend.connect(awarenessA); frontend.connectAwareness(awarenessA);
frontend.connect(awarenessB); frontend.connectAwareness(awarenessB);
} }
{ {
const sync = new AwarenessSyncImpl({ const sync = new AwarenessSyncImpl({
@@ -105,7 +105,7 @@ test('awareness', async () => {
}, },
}); });
const frontend = new AwarenessFrontend(sync); const frontend = new AwarenessFrontend(sync);
frontend.connect(awarenessC); frontend.connectAwareness(awarenessC);
} }
awarenessA.setLocalState({ awarenessA.setLocalState({

View File

@@ -0,0 +1,200 @@
import { expect, test, vitest } from 'vitest';
import { AutoReconnectConnection } from '../connection';
test('connect and disconnect', async () => {
class TestConnection extends AutoReconnectConnection<{
disconnect: () => void;
}> {
connectCount = 0;
abortCount = 0;
disconnectCount = 0;
notListenAbort = false;
override async doConnect(signal?: AbortSignal) {
this.connectCount++;
return new Promise<{ disconnect: () => void }>((resolve, reject) => {
setTimeout(() => {
resolve({
disconnect: () => {
this.disconnectCount++;
},
});
}, 300);
if (!this.notListenAbort) {
signal?.addEventListener('abort', reason => {
reject(reason);
});
}
}).catch(err => {
this.abortCount++;
throw err;
});
}
override doDisconnect(t: { disconnect: () => void }) {
return t.disconnect();
}
}
const connection = new TestConnection();
connection.connect();
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(1);
expect(connection.disconnectCount).toBe(0);
expect(connection.abortCount).toBe(0);
expect(connection.status).toBe('connected');
});
connection.disconnect();
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(1);
expect(connection.disconnectCount).toBe(1);
expect(connection.abortCount).toBe(0);
expect(connection.status).toBe('closed');
});
// connect twice
connection.connect();
connection.connect();
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(2);
expect(connection.disconnectCount).toBe(1);
expect(connection.abortCount).toBe(0);
expect(connection.status).toBe('connected');
});
connection.disconnect();
connection.disconnect();
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(2);
expect(connection.disconnectCount).toBe(2);
expect(connection.abortCount).toBe(0);
expect(connection.status).toBe('closed');
});
// calling connect disconnect consecutively, the previous connect call will be aborted.
connection.connect();
connection.disconnect();
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(3);
expect(connection.disconnectCount).toBe(2);
expect(connection.abortCount).toBe(1);
expect(connection.status).toBe('closed');
});
connection.connect();
connection.disconnect();
connection.connect();
connection.disconnect();
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(5);
expect(connection.disconnectCount).toBe(2);
expect(connection.abortCount).toBe(3);
expect(connection.status).toBe('closed');
});
// if connection is not listening to abort event, disconnect will be called
connection.notListenAbort = true;
connection.connect();
connection.disconnect();
connection.connect();
connection.disconnect();
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(7);
expect(connection.disconnectCount).toBe(4);
expect(connection.abortCount).toBe(3);
expect(connection.status).toBe('closed');
});
});
test('retry when connect failed', async () => {
class TestConnection extends AutoReconnectConnection {
override retryDelay = 300;
connectCount = 0;
override async doConnect() {
this.connectCount++;
if (this.connectCount === 3) {
return { hello: 'world' };
}
throw new Error('not connected, count: ' + this.connectCount);
}
override doDisconnect() {
return Promise.resolve();
}
}
const connection = new TestConnection();
connection.connect();
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(1);
expect(connection.status).toBe('error');
expect(connection.error?.message).toContain('not connected, count: 1');
});
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(2);
expect(connection.status).toBe('error');
expect(connection.error?.message).toBe('not connected, count: 2');
});
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(3);
expect(connection.status).toBe('connected');
expect(connection.error).toBeUndefined();
});
});
test('retry when error', async () => {
class TestConnection extends AutoReconnectConnection {
override retryDelay = 300;
connectCount = 0;
disconnectCount = 0;
override async doConnect() {
this.connectCount++;
return {
hello: 'world',
};
}
override doDisconnect(conn: any) {
this.disconnectCount++;
expect(conn).toEqual({
hello: 'world',
});
}
triggerError(error: Error) {
this.error = error;
}
}
const connection = new TestConnection();
connection.connect();
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(1);
expect(connection.status).toBe('connected');
});
connection.triggerError(new Error('test error'));
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(1);
expect(connection.disconnectCount).toBe(1);
expect(connection.status).toBe('error');
expect(connection.error?.message).toBe('test error');
});
// waitfor reconnect
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(2);
expect(connection.disconnectCount).toBe(1);
expect(connection.status).toBe('connected');
expect(connection.error).toBeUndefined();
});
});

View File

@@ -1,5 +1,6 @@
import EventEmitter2 from 'eventemitter2'; import EventEmitter2 from 'eventemitter2';
import { throttle } from 'lodash-es';
import { MANUALLY_STOP } from '../utils/throw-if-aborted';
export type ConnectionStatus = export type ConnectionStatus =
| 'idle' | 'idle'
@@ -10,6 +11,7 @@ export type ConnectionStatus =
export interface Connection<T = any> { export interface Connection<T = any> {
readonly status: ConnectionStatus; readonly status: ConnectionStatus;
readonly error?: Error;
readonly inner: T; readonly inner: T;
connect(): void; connect(): void;
disconnect(): void; disconnect(): void;
@@ -23,16 +25,15 @@ export abstract class AutoReconnectConnection<T = any>
implements Connection<T> implements Connection<T>
{ {
private readonly event = new EventEmitter2(); private readonly event = new EventEmitter2();
private _inner: T | null = null; private _inner: T | undefined = undefined;
private _status: ConnectionStatus = 'idle'; private _status: ConnectionStatus = 'idle';
protected error?: Error; private _error: Error | undefined = undefined;
retryDelay = 3000;
private refCount = 0; private refCount = 0;
private _enableAutoReconnect = false;
private connectingAbort?: AbortController; private connectingAbort?: AbortController;
private reconnectingAbort?: AbortController;
constructor() { constructor() {}
this.autoReconnect();
}
get shareId(): string | undefined { get shareId(): string | undefined {
return undefined; return undefined;
@@ -43,7 +44,7 @@ export abstract class AutoReconnectConnection<T = any>
} }
get inner(): T { get inner(): T {
if (!this._inner) { if (this._inner === undefined) {
throw new Error( throw new Error(
`Connection ${this.constructor.name} has not been established.` `Connection ${this.constructor.name} has not been established.`
); );
@@ -52,7 +53,7 @@ export abstract class AutoReconnectConnection<T = any>
return this._inner; return this._inner;
} }
protected set inner(inner: T | null) { private set inner(inner: T | undefined) {
this._inner = inner; this._inner = inner;
} }
@@ -60,12 +61,23 @@ export abstract class AutoReconnectConnection<T = any>
return this._status; return this._status;
} }
protected setStatus(status: ConnectionStatus, error?: Error) { get error() {
const shouldEmit = status !== this._status || error !== this.error; return this._error;
}
protected set error(error: Error | undefined) {
this.handleError(error);
}
private setStatus(status: ConnectionStatus, error?: Error) {
const shouldEmit = status !== this._status || error !== this._error;
this._status = status; this._status = status;
this.error = error; // we only clear-up error when status is connected
if (error || status === 'connected') {
this._error = error;
}
if (shouldEmit) { if (shouldEmit) {
this.emitStatusChanged(status, error); this.emitStatusChanged(status, this._error);
} }
} }
@@ -73,15 +85,15 @@ export abstract class AutoReconnectConnection<T = any>
protected abstract doDisconnect(conn: T): void; protected abstract doDisconnect(conn: T): void;
private innerConnect() { private innerConnect() {
if (this.status === 'idle' || this.status === 'error') { if (this.status !== 'connecting') {
this._enableAutoReconnect = true;
this.setStatus('connecting'); this.setStatus('connecting');
this.connectingAbort = new AbortController(); this.connectingAbort = new AbortController();
this.doConnect(this.connectingAbort.signal) const signal = this.connectingAbort.signal;
this.doConnect(signal)
.then(value => { .then(value => {
if (!this.connectingAbort?.signal.aborted) { if (!signal.aborted) {
this.setStatus('connected');
this._inner = value; this._inner = value;
this.setStatus('connected');
} else { } else {
try { try {
this.doDisconnect(value); this.doDisconnect(value);
@@ -91,14 +103,45 @@ export abstract class AutoReconnectConnection<T = any>
} }
}) })
.catch(error => { .catch(error => {
if (!this.connectingAbort?.signal.aborted) { if (!signal.aborted) {
console.error('failed to connect', error); console.error('failed to connect', error);
this.setStatus('error', error as any); this.handleError(error as any);
} }
}); });
} }
} }
private innerDisconnect() {
this.connectingAbort?.abort(MANUALLY_STOP);
this.reconnectingAbort?.abort(MANUALLY_STOP);
try {
if (this._inner) {
this.doDisconnect(this._inner);
}
} catch (error) {
console.error('failed to disconnect', error);
}
this.reconnectingAbort = undefined;
this.connectingAbort = undefined;
this._inner = undefined;
}
private handleError(reason?: Error) {
// on error
console.error('connection error, will reconnect', reason);
this.innerDisconnect();
this.setStatus('error', reason);
// reconnect
this.reconnectingAbort = new AbortController();
const signal = this.reconnectingAbort.signal;
setTimeout(() => {
if (!signal.aborted) {
this.innerConnect();
}
}, this.retryDelay);
}
connect() { connect() {
this.refCount++; this.refCount++;
if (this.refCount === 1) { if (this.refCount === 1) {
@@ -106,36 +149,16 @@ export abstract class AutoReconnectConnection<T = any>
} }
} }
disconnect() { disconnect(force?: boolean) {
this.refCount--; if (force) {
if (this.refCount === 0) { this.refCount = 0;
this._enableAutoReconnect = false; } else {
this.connectingAbort?.abort(); this.refCount = Math.max(this.refCount - 1, 0);
try { }
if (this._inner) { if (this.refCount === 0) {
this.doDisconnect(this._inner); this.innerDisconnect();
} this.setStatus('closed');
} catch (error) {
console.error('failed to disconnect', error);
}
this.setStatus('closed');
this._inner = null;
} }
}
private autoReconnect() {
// TODO:
// - maximum retry count
// - dynamic sleep time (attempt < 3 ? 1s : 1min)?
this.onStatusChanged(
throttle(() => {
() => {
if (this._enableAutoReconnect) {
this.innerConnect();
}
};
}, 1000)
);
} }
waitForConnected(signal?: AbortSignal) { waitForConnected(signal?: AbortSignal) {

View File

@@ -13,7 +13,7 @@ type AwarenessChanges = Record<'added' | 'updated' | 'removed', number[]>;
export class AwarenessFrontend { export class AwarenessFrontend {
constructor(private readonly sync: AwarenessSync) {} constructor(private readonly sync: AwarenessSync) {}
connect(awareness: Awareness) { connectAwareness(awareness: Awareness) {
const uniqueId = nanoid(); const uniqueId = nanoid();
const handleAwarenessUpdate = ( const handleAwarenessUpdate = (
changes: AwarenessChanges, changes: AwarenessChanges,
@@ -27,7 +27,6 @@ export class AwarenessFrontend {
); );
const update = encodeAwarenessUpdate(awareness, changedClients); const update = encodeAwarenessUpdate(awareness, changedClients);
this.sync this.sync
.update( .update(
{ {

View File

@@ -1,7 +1,14 @@
import { groupBy } from 'lodash-es'; import { groupBy } from 'lodash-es';
import { nanoid } from 'nanoid'; import { nanoid } from 'nanoid';
import type { Subscription } from 'rxjs'; import type { Subscription } from 'rxjs';
import { combineLatest, map, Observable, Subject } from 'rxjs'; import {
combineLatest,
map,
Observable,
ReplaySubject,
share,
Subject,
} from 'rxjs';
import { import {
applyUpdate, applyUpdate,
type Doc as YDoc, type Doc as YDoc,
@@ -173,7 +180,10 @@ export class DocFrontend {
synced: sync.synced, synced: sync.synced,
syncRetrying: sync.retrying, syncRetrying: sync.retrying,
syncErrorMessage: sync.errorMessage, syncErrorMessage: sync.errorMessage,
})) })),
share({
connector: () => new ReplaySubject(1),
})
) satisfies Observable<DocFrontendState>; ) satisfies Observable<DocFrontendState>;
start() { start() {
@@ -241,19 +251,11 @@ export class DocFrontend {
} }
/** /**
* Add a doc to the frontend, the doc will sync with the doc storage. * Connect a doc to the frontend, the doc will sync with the doc storage.
* @param doc - The doc to add * @param doc - The doc to connect
* @param withSubDoc - Whether to add the subdocs of the doc
*/ */
addDoc(doc: YDoc, withSubDoc: boolean = false) { connectDoc(doc: YDoc) {
this._addDoc(doc); this._connectDoc(doc);
if (withSubDoc) {
doc.on('subdocs', ({ loaded }) => {
for (const subdoc of loaded) {
this._addDoc(subdoc);
}
});
}
} }
readonly jobs = { readonly jobs = {
@@ -275,18 +277,16 @@ export class DocFrontend {
// mark doc as loaded // mark doc as loaded
doc.emit('sync', [true, doc]); doc.emit('sync', [true, doc]);
this.status.connectedDocs.add(job.docId);
this.statusUpdatedSubject$.next(job.docId);
const docRecord = await this.storage.getDoc(job.docId); const docRecord = await this.storage.getDoc(job.docId);
throwIfAborted(signal); throwIfAborted(signal);
if (!docRecord || isEmptyUpdate(docRecord.bin)) { if (docRecord && !isEmptyUpdate(docRecord.bin)) {
return; this.applyUpdate(job.docId, docRecord.bin);
this.status.readyDocs.add(job.docId);
} }
this.applyUpdate(job.docId, docRecord.bin); this.status.connectedDocs.add(job.docId);
this.status.readyDocs.add(job.docId);
this.statusUpdatedSubject$.next(job.docId); this.statusUpdatedSubject$.next(job.docId);
}, },
save: async ( save: async (
@@ -339,12 +339,12 @@ export class DocFrontend {
}; };
/** /**
* Remove a doc from the frontend, the doc will stop syncing with the doc storage. * Disconnect a doc from the frontend, the doc will stop syncing with the doc storage.
* It's not recommended to use this method directly, better to use `doc.destroy()`. * It's not recommended to use this method directly, better to use `doc.destroy()`.
* *
* @param doc - The doc to remove * @param doc - The doc to disconnect
*/ */
removeDoc(doc: YDoc) { disconnectDoc(doc: YDoc) {
this.status.docs.delete(doc.guid); this.status.docs.delete(doc.guid);
this.status.connectedDocs.delete(doc.guid); this.status.connectedDocs.delete(doc.guid);
this.status.readyDocs.delete(doc.guid); this.status.readyDocs.delete(doc.guid);
@@ -370,7 +370,10 @@ export class DocFrontend {
}; };
} }
private _addDoc(doc: YDoc) { private _connectDoc(doc: YDoc) {
if (this.status.docs.has(doc.guid)) {
throw new Error('doc already connected');
}
this.schedule({ this.schedule({
type: 'load', type: 'load',
docId: doc.guid, docId: doc.guid,
@@ -382,7 +385,7 @@ export class DocFrontend {
doc.on('update', this.handleDocUpdate); doc.on('update', this.handleDocUpdate);
doc.on('destroy', () => { doc.on('destroy', () => {
this.removeDoc(doc); this.disconnectDoc(doc);
}); });
} }

View File

@@ -15,12 +15,7 @@ export class BroadcastChannelConnection extends AutoReconnectConnection<Broadcas
return new BroadcastChannel(this.channelName); return new BroadcastChannel(this.channelName);
} }
override doDisconnect() { override doDisconnect(channel: BroadcastChannel) {
this.close(); channel.close();
}
private close(error?: Error) {
this.maybeConnection?.close();
this.setStatus('closed', error);
} }
} }

View File

@@ -1,5 +1,3 @@
import type { SocketOptions } from 'socket.io-client';
import { share } from '../../connection'; import { share } from '../../connection';
import { import {
type AwarenessRecord, type AwarenessRecord,
@@ -13,7 +11,6 @@ import {
} from './socket'; } from './socket';
interface CloudAwarenessStorageOptions { interface CloudAwarenessStorageOptions {
socketOptions?: SocketOptions;
serverBaseUrl: string; serverBaseUrl: string;
type: SpaceType; type: SpaceType;
id: string; id: string;
@@ -26,12 +23,7 @@ export class CloudAwarenessStorage extends AwarenessStorageBase {
super(); super();
} }
connection = share( connection = share(new SocketConnection(`${this.options.serverBaseUrl}/`));
new SocketConnection(
`${this.options.serverBaseUrl}/`,
this.options.socketOptions
)
);
private get socket() { private get socket() {
return this.connection.inner; return this.connection.inner;
@@ -52,9 +44,14 @@ export class CloudAwarenessStorage extends AwarenessStorageBase {
onUpdate: (update: AwarenessRecord, origin?: string) => void, onUpdate: (update: AwarenessRecord, origin?: string) => void,
onCollect: () => Promise<AwarenessRecord | null> onCollect: () => Promise<AwarenessRecord | null>
): () => void { ): () => void {
// TODO: handle disconnect
// leave awareness // leave awareness
const leave = () => { const leave = () => {
if (this.connection.status !== 'connected') return;
this.socket.off('space:collect-awareness', handleCollectAwareness);
this.socket.off(
'space:broadcast-awareness-update',
handleBroadcastAwarenessUpdate
);
this.socket.emit('space:leave-awareness', { this.socket.emit('space:leave-awareness', {
spaceType: this.options.type, spaceType: this.options.type,
spaceId: this.options.id, spaceId: this.options.id,
@@ -64,6 +61,11 @@ export class CloudAwarenessStorage extends AwarenessStorageBase {
// join awareness, and collect awareness from others // join awareness, and collect awareness from others
const joinAndCollect = async () => { const joinAndCollect = async () => {
this.socket.on('space:collect-awareness', handleCollectAwareness);
this.socket.on(
'space:broadcast-awareness-update',
handleBroadcastAwarenessUpdate
);
await this.socket.emitWithAck('space:join-awareness', { await this.socket.emitWithAck('space:join-awareness', {
spaceType: this.options.type, spaceType: this.options.type,
spaceId: this.options.id, spaceId: this.options.id,
@@ -77,7 +79,11 @@ export class CloudAwarenessStorage extends AwarenessStorageBase {
}); });
}; };
joinAndCollect().catch(err => console.error('awareness join failed', err)); if (this.connection.status === 'connected') {
joinAndCollect().catch(err =>
console.error('awareness join failed', err)
);
}
const unsubscribeConnectionStatusChanged = this.connection.onStatusChanged( const unsubscribeConnectionStatusChanged = this.connection.onStatusChanged(
status => { status => {
@@ -141,18 +147,9 @@ export class CloudAwarenessStorage extends AwarenessStorageBase {
} }
}; };
this.socket.on('space:collect-awareness', handleCollectAwareness);
this.socket.on(
'space:broadcast-awareness-update',
handleBroadcastAwarenessUpdate
);
return () => { return () => {
leave(); leave();
this.socket.off('space:collect-awareness', handleCollectAwareness);
this.socket.off(
'space:broadcast-awareness-update',
handleBroadcastAwarenessUpdate
);
unsubscribeConnectionStatusChanged(); unsubscribeConnectionStatusChanged();
}; };
} }

View File

@@ -45,23 +45,28 @@ export class StaticCloudDocStorage extends DocStorageBase<CloudDocStorageOptions
protected override async getDocSnapshot( protected override async getDocSnapshot(
docId: string docId: string
): Promise<DocRecord | null> { ): Promise<DocRecord | null> {
const arrayBuffer = await this.connection.fetchArrayBuffer( try {
`/api/workspaces/${this.spaceId}/docs/${docId}`, const arrayBuffer = await this.connection.fetchArrayBuffer(
{ `/api/workspaces/${this.spaceId}/docs/${docId}`,
priority: 'high', {
headers: { priority: 'high',
Accept: 'application/octet-stream', // this is necessary for ios native fetch to return arraybuffer headers: {
}, Accept: 'application/octet-stream', // this is necessary for ios native fetch to return arraybuffer
},
}
);
if (!arrayBuffer) {
return null;
} }
); return {
if (!arrayBuffer) { docId: docId,
bin: new Uint8Array(arrayBuffer),
timestamp: new Date(),
};
} catch (error) {
console.error(error);
return null; return null;
} }
return {
docId: docId,
bin: new Uint8Array(arrayBuffer),
timestamp: new Date(),
};
} }
protected override setDocSnapshot( protected override setDocSnapshot(
_snapshot: DocRecord, _snapshot: DocRecord,

View File

@@ -1,10 +1,5 @@
import type { Socket, SocketOptions } from 'socket.io-client'; import type { Socket } from 'socket.io-client';
import {
type Connection,
type ConnectionStatus,
share,
} from '../../connection';
import { import {
type DocClock, type DocClock,
type DocClocks, type DocClocks,
@@ -12,6 +7,7 @@ import {
type DocStorageOptions, type DocStorageOptions,
type DocUpdate, type DocUpdate,
} from '../../storage'; } from '../../storage';
import { getIdConverter, type IdConverter } from '../../utils/id-converter';
import type { SpaceType } from '../../utils/universal-id'; import type { SpaceType } from '../../utils/universal-id';
import { import {
base64ToUint8Array, base64ToUint8Array,
@@ -21,7 +17,6 @@ import {
} from './socket'; } from './socket';
interface CloudDocStorageOptions extends DocStorageOptions { interface CloudDocStorageOptions extends DocStorageOptions {
socketOptions?: SocketOptions;
serverBaseUrl: string; serverBaseUrl: string;
type: SpaceType; type: SpaceType;
} }
@@ -32,7 +27,12 @@ export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
get socket() { get socket() {
return this.connection.inner; return this.connection.inner;
} }
get idConverter() {
if (!this.connection.idConverter) {
throw new Error('Id converter not initialized');
}
return this.connection.idConverter;
}
readonly spaceType = this.options.type; readonly spaceType = this.options.type;
onServerUpdate: ServerEventsMap['space:broadcast-doc-update'] = message => { onServerUpdate: ServerEventsMap['space:broadcast-doc-update'] = message => {
@@ -41,7 +41,7 @@ export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
this.spaceId === message.spaceId this.spaceId === message.spaceId
) { ) {
this.emit('update', { this.emit('update', {
docId: message.docId, docId: this.idConverter.oldIdToNewId(message.docId),
bin: base64ToUint8Array(message.update), bin: base64ToUint8Array(message.update),
timestamp: new Date(message.timestamp), timestamp: new Date(message.timestamp),
editor: message.editor, editor: message.editor,
@@ -58,10 +58,13 @@ export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
const response = await this.socket.emitWithAck('space:load-doc', { const response = await this.socket.emitWithAck('space:load-doc', {
spaceType: this.spaceType, spaceType: this.spaceType,
spaceId: this.spaceId, spaceId: this.spaceId,
docId, docId: this.idConverter.newIdToOldId(docId),
}); });
if ('error' in response) { if ('error' in response) {
if (response.error.name === 'DOC_NOT_FOUND') {
return null;
}
// TODO: use [UserFriendlyError] // TODO: use [UserFriendlyError]
throw new Error(response.error.message); throw new Error(response.error.message);
} }
@@ -77,11 +80,14 @@ export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
const response = await this.socket.emitWithAck('space:load-doc', { const response = await this.socket.emitWithAck('space:load-doc', {
spaceType: this.spaceType, spaceType: this.spaceType,
spaceId: this.spaceId, spaceId: this.spaceId,
docId, docId: this.idConverter.newIdToOldId(docId),
stateVector: state ? await uint8ArrayToBase64(state) : void 0, stateVector: state ? await uint8ArrayToBase64(state) : void 0,
}); });
if ('error' in response) { if ('error' in response) {
if (response.error.name === 'DOC_NOT_FOUND') {
return null;
}
// TODO: use [UserFriendlyError] // TODO: use [UserFriendlyError]
throw new Error(response.error.message); throw new Error(response.error.message);
} }
@@ -98,8 +104,8 @@ export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
const response = await this.socket.emitWithAck('space:push-doc-update', { const response = await this.socket.emitWithAck('space:push-doc-update', {
spaceType: this.spaceType, spaceType: this.spaceType,
spaceId: this.spaceId, spaceId: this.spaceId,
docId: update.docId, docId: this.idConverter.newIdToOldId(update.docId),
updates: await uint8ArrayToBase64(update.bin), update: await uint8ArrayToBase64(update.bin),
}); });
if ('error' in response) { if ('error' in response) {
@@ -120,7 +126,7 @@ export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
const response = await this.socket.emitWithAck('space:load-doc', { const response = await this.socket.emitWithAck('space:load-doc', {
spaceType: this.spaceType, spaceType: this.spaceType,
spaceId: this.spaceId, spaceId: this.spaceId,
docId, docId: this.idConverter.newIdToOldId(docId),
}); });
if ('error' in response) { if ('error' in response) {
@@ -150,7 +156,7 @@ export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
} }
return Object.entries(response.data).reduce((ret, [docId, timestamp]) => { return Object.entries(response.data).reduce((ret, [docId, timestamp]) => {
ret[docId] = new Date(timestamp); ret[this.idConverter.oldIdToNewId(docId)] = new Date(timestamp);
return ret; return ret;
}, {} as DocClocks); }, {} as DocClocks);
} }
@@ -159,7 +165,7 @@ export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
this.socket.emit('space:delete-doc', { this.socket.emit('space:delete-doc', {
spaceType: this.spaceType, spaceType: this.spaceType,
spaceId: this.spaceId, spaceId: this.spaceId,
docId, docId: this.idConverter.newIdToOldId(docId),
}); });
} }
@@ -174,83 +180,74 @@ export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
} }
} }
class CloudDocStorageConnection implements Connection<Socket> { class CloudDocStorageConnection extends SocketConnection {
connection = share(
new SocketConnection(
`${this.options.serverBaseUrl}/`,
this.options.socketOptions
)
);
private disposeConnectionStatusListener?: () => void;
private get socket() {
return this.connection.inner;
}
constructor( constructor(
private readonly options: CloudDocStorageOptions, private readonly options: CloudDocStorageOptions,
private readonly onServerUpdate: ServerEventsMap['space:broadcast-doc-update'] private readonly onServerUpdate: ServerEventsMap['space:broadcast-doc-update']
) {} ) {
super(`${options.serverBaseUrl}/`);
get status() {
return this.connection.status;
} }
get inner() { idConverter: IdConverter | null = null;
return this.connection.inner;
}
connect(): void { override async doConnect(signal?: AbortSignal) {
if (!this.disposeConnectionStatusListener) { const socket = await super.doConnect(signal);
this.disposeConnectionStatusListener = this.connection.onStatusChanged(
status => {
if (status === 'connected') {
this.join().catch(err => {
console.error('doc storage join failed', err);
});
this.socket.on('space:broadcast-doc-update', this.onServerUpdate);
}
}
);
}
return this.connection.connect();
}
async join() {
try { try {
const res = await this.socket.emitWithAck('space:join', { const res = await socket.emitWithAck('space:join', {
spaceType: this.options.type, spaceType: this.options.type,
spaceId: this.options.id, spaceId: this.options.id,
clientVersion: BUILD_CONFIG.appVersion, clientVersion: BUILD_CONFIG.appVersion,
}); });
if ('error' in res) { if ('error' in res) {
this.connection.setStatus('closed', new Error(res.error.message)); throw new Error(res.error.message);
} }
if (!this.idConverter) {
this.idConverter = await this.getIdConverter(socket);
}
socket.on('space:broadcast-doc-update', this.onServerUpdate);
return socket;
} catch (e) { } catch (e) {
this.connection.setStatus('error', e as Error); socket.close();
throw e;
} }
} }
disconnect() { override doDisconnect(socket: Socket) {
if (this.disposeConnectionStatusListener) { socket.emit('space:leave', {
this.disposeConnectionStatusListener();
}
this.socket.emit('space:leave', {
spaceType: this.options.type, spaceType: this.options.type,
spaceId: this.options.id, spaceId: this.options.id,
}); });
this.socket.off('space:broadcast-doc-update', this.onServerUpdate); socket.off('space:broadcast-doc-update', this.onServerUpdate);
this.connection.disconnect(); super.disconnect();
} }
waitForConnected(signal?: AbortSignal): Promise<void> { async getIdConverter(socket: Socket) {
return this.connection.waitForConnected(signal); return getIdConverter(
} {
onStatusChanged( getDocBuffer: async id => {
cb: (status: ConnectionStatus, error?: Error) => void const response = await socket.emitWithAck('space:load-doc', {
): () => void { spaceType: this.options.type,
return this.connection.onStatusChanged(cb); spaceId: this.options.id,
docId: id,
});
if ('error' in response) {
if (response.error.name === 'DOC_NOT_FOUND') {
return null;
}
// TODO: use [UserFriendlyError]
throw new Error(response.error.message);
}
return base64ToUint8Array(response.data.missing);
},
},
this.options.id
);
} }
} }

View File

@@ -23,6 +23,7 @@ export class HttpConnection extends DummyConnection {
...init, ...init,
signal: abortController.signal, signal: abortController.signal,
headers: { headers: {
...this.requestHeaders,
...init?.headers, ...init?.headers,
'x-affine-version': BUILD_CONFIG.appVersion, 'x-affine-version': BUILD_CONFIG.appVersion,
}, },
@@ -35,7 +36,7 @@ export class HttpConnection extends DummyConnection {
let reason: string | any = ''; let reason: string | any = '';
if (res.headers.get('Content-Type')?.includes('application/json')) { if (res.headers.get('Content-Type')?.includes('application/json')) {
try { try {
reason = await res.json(); reason = JSON.stringify(await res.json());
} catch { } catch {
// ignore // ignore
} }
@@ -63,7 +64,10 @@ export class HttpConnection extends DummyConnection {
this.fetch this.fetch
); );
constructor(private readonly serverBaseUrl: string) { constructor(
private readonly serverBaseUrl: string,
private readonly requestHeaders?: Record<string, string>
) {
super(); super();
} }
} }

View File

@@ -4,10 +4,8 @@ import {
type SocketOptions, type SocketOptions,
} from 'socket.io-client'; } from 'socket.io-client';
import { import { AutoReconnectConnection } from '../../connection';
AutoReconnectConnection, import { throwIfAborted } from '../../utils/throw-if-aborted';
type ConnectionStatus,
} from '../../connection';
// TODO(@forehalo): use [UserFriendlyError] // TODO(@forehalo): use [UserFriendlyError]
interface EventError { interface EventError {
@@ -82,7 +80,7 @@ interface ClientEvents {
}; };
'space:push-doc-update': [ 'space:push-doc-update': [
{ spaceType: string; spaceId: string; docId: string; updates: string }, { spaceType: string; spaceId: string; docId: string; update: string },
{ timestamp: number }, { timestamp: number },
]; ];
'space:load-doc-timestamps': [ 'space:load-doc-timestamps': [
@@ -153,12 +151,24 @@ export function base64ToUint8Array(base64: string) {
return new Uint8Array(binaryArray); return new Uint8Array(binaryArray);
} }
const SOCKET_IOMANAGER_CACHE = new Map<string, SocketIOManager>();
function getSocketIOManager(endpoint: string) {
let manager = SOCKET_IOMANAGER_CACHE.get(endpoint);
if (!manager) {
manager = new SocketIOManager(endpoint, {
autoConnect: false,
transports: ['websocket'],
secure: new URL(endpoint).protocol === 'https:',
// we will handle reconnection by ourselves
reconnection: false,
});
SOCKET_IOMANAGER_CACHE.set(endpoint, manager);
}
return manager;
}
export class SocketConnection extends AutoReconnectConnection<Socket> { export class SocketConnection extends AutoReconnectConnection<Socket> {
manager = new SocketIOManager(this.endpoint, { manager = getSocketIOManager(this.endpoint);
autoConnect: false,
transports: ['websocket'],
secure: new URL(this.endpoint).protocol === 'https:',
});
constructor( constructor(
private readonly endpoint: string, private readonly endpoint: string,
@@ -171,32 +181,42 @@ export class SocketConnection extends AutoReconnectConnection<Socket> {
return `socket:${this.endpoint}`; return `socket:${this.endpoint}`;
} }
override async doConnect() { override async doConnect(signal?: AbortSignal) {
const conn = this.manager.socket('/', this.socketOptions); const socket = this.manager.socket('/', this.socketOptions);
try {
throwIfAborted(signal);
await Promise.race([
new Promise<void>((resolve, reject) => {
socket.once('connect', () => {
resolve();
});
socket.once('connect_error', err => {
reject(err);
});
socket.open();
}),
new Promise<void>((_resolve, reject) => {
signal?.addEventListener('abort', () => {
reject(new Error('Aborted'));
});
}),
]);
} catch (err) {
socket.close();
throw err;
}
await new Promise<void>((resolve, reject) => { socket.on('disconnect', this.handleDisconnect);
conn.once('connect', () => {
resolve();
});
conn.once('connect_error', err => {
reject(err);
});
conn.open();
});
return conn; return socket;
} }
override doDisconnect(conn: Socket) { override doDisconnect(conn: Socket) {
conn.off('disconnect', this.handleDisconnect);
conn.close(); conn.close();
} }
/** handleDisconnect = (reason: SocketIO.DisconnectReason) => {
* Socket connection allow explicitly set status by user this.error = new Error(reason);
* };
* used when join space failed
*/
override setStatus(status: ConnectionStatus, error?: Error) {
super.setStatus(status, error);
}
} }

View File

@@ -25,22 +25,14 @@ export class IDBConnection extends AutoReconnectConnection<{
} }
override async doConnect() { override async doConnect() {
// indexeddb will responsible for version control, so the db.version always match migrator.version
const db = await openDB<DocStorageSchema>(this.dbName, migrator.version, {
upgrade: migrator.migrate,
});
db.addEventListener('versionchange', this.handleVersionChange);
return { return {
db: await openDB<DocStorageSchema>(this.dbName, migrator.version, { db,
upgrade: migrator.migrate,
blocking: () => {
// if, for example, an tab with newer version is opened, this function will be called.
// we should close current connection to allow the new version to upgrade the db.
this.setStatus(
'closed',
new Error('Blocking a new version. Closing the connection.')
);
},
blocked: () => {
// fallback to retry auto retry
this.setStatus('error', new Error('Blocked by other tabs.'));
},
}),
channel: new BroadcastChannel('idb:' + this.dbName), channel: new BroadcastChannel('idb:' + this.dbName),
}; };
} }
@@ -49,7 +41,19 @@ export class IDBConnection extends AutoReconnectConnection<{
db: IDBPDatabase<DocStorageSchema>; db: IDBPDatabase<DocStorageSchema>;
channel: BroadcastChannel; channel: BroadcastChannel;
}) { }) {
db.db.removeEventListener('versionchange', this.handleVersionChange);
db.channel.close(); db.channel.close();
db.db.close(); db.db.close();
} }
handleVersionChange = (e: IDBVersionChangeEvent) => {
if (e.newVersion !== migrator.version) {
this.error = new Error(
'Database version mismatch, expected ' +
migrator.version +
' but got ' +
e.newVersion
);
}
};
} }

View File

@@ -29,26 +29,35 @@ export class IndexedDBDocStorage extends DocStorageBase<IDBConnectionOptions> {
override locker = new IndexedDBLocker(this.connection); override locker = new IndexedDBLocker(this.connection);
private _lastTimestamp = new Date(0);
private generateTimestamp() {
const timestamp = new Date();
if (timestamp.getTime() <= this._lastTimestamp.getTime()) {
timestamp.setTime(this._lastTimestamp.getTime() + 1);
}
this._lastTimestamp = timestamp;
return timestamp;
}
override async pushDocUpdate(update: DocUpdate, origin?: string) { override async pushDocUpdate(update: DocUpdate, origin?: string) {
const trx = this.db.transaction(['updates', 'clocks'], 'readwrite'); let timestamp = new Date();
const timestamp = this.generateTimestamp();
await trx.objectStore('updates').add({
...update,
createdAt: timestamp,
});
await trx.objectStore('clocks').put({ docId: update.docId, timestamp }); let retry = 0;
while (true) {
try {
const trx = this.db.transaction(['updates', 'clocks'], 'readwrite');
await trx.objectStore('updates').add({
...update,
createdAt: timestamp,
});
await trx.objectStore('clocks').put({ docId: update.docId, timestamp });
trx.commit();
} catch (e) {
if (e instanceof Error && e.name === 'ConstraintError') {
retry++;
if (retry < 10) {
timestamp = new Date(timestamp.getTime() + 1);
continue;
}
}
throw e;
}
break;
}
this.emit( this.emit(
'update', 'update',
@@ -191,9 +200,9 @@ export class IndexedDBDocStorage extends DocStorageBase<IDBConnectionOptions> {
}; };
} }
handleChannelMessage(event: MessageEvent<ChannelMessage>) { handleChannelMessage = (event: MessageEvent<ChannelMessage>) => {
if (event.data.type === 'update') { if (event.data.type === 'update') {
this.emit('update', event.data.update, event.data.origin); this.emit('update', event.data.update, event.data.origin);
} }
} };
} }

View File

@@ -2,7 +2,6 @@ import type { StorageConstructor } from '..';
import { IndexedDBBlobStorage } from './blob'; import { IndexedDBBlobStorage } from './blob';
import { IndexedDBDocStorage } from './doc'; import { IndexedDBDocStorage } from './doc';
import { IndexedDBSyncStorage } from './sync'; import { IndexedDBSyncStorage } from './sync';
import { IndexedDBV1BlobStorage, IndexedDBV1DocStorage } from './v1';
export * from './blob'; export * from './blob';
export * from './doc'; export * from './doc';
@@ -13,8 +12,3 @@ export const idbStorages = [
IndexedDBBlobStorage, IndexedDBBlobStorage,
IndexedDBSyncStorage, IndexedDBSyncStorage,
] satisfies StorageConstructor[]; ] satisfies StorageConstructor[];
export const idbv1Storages = [
IndexedDBV1DocStorage,
IndexedDBV1BlobStorage,
] satisfies StorageConstructor[];

View File

@@ -19,6 +19,9 @@ export class IndexedDBV1BlobStorage extends BlobStorageBase {
} }
override async get(key: string) { override async get(key: string) {
if (!this.db) {
return null;
}
const trx = this.db.transaction('blob', 'readonly'); const trx = this.db.transaction('blob', 'readonly');
const blob = await trx.store.get(key); const blob = await trx.store.get(key);
if (!blob) { if (!blob) {
@@ -34,6 +37,9 @@ export class IndexedDBV1BlobStorage extends BlobStorageBase {
} }
override async delete(key: string, permanently: boolean) { override async delete(key: string, permanently: boolean) {
if (!this.db) {
return;
}
if (permanently) { if (permanently) {
const trx = this.db.transaction('blob', 'readwrite'); const trx = this.db.transaction('blob', 'readwrite');
await trx.store.delete(key); await trx.store.delete(key);
@@ -41,6 +47,9 @@ export class IndexedDBV1BlobStorage extends BlobStorageBase {
} }
override async list() { override async list() {
if (!this.db) {
return [];
}
const trx = this.db.transaction('blob', 'readonly'); const trx = this.db.transaction('blob', 'readonly');
const it = trx.store.iterate(); const it = trx.store.iterate();

View File

@@ -15,23 +15,26 @@ export interface DocDBSchema extends DBSchema {
}; };
} }
export class DocIDBConnection extends AutoReconnectConnection< export class DocIDBConnection extends AutoReconnectConnection<IDBPDatabase<DocDBSchema> | null> {
IDBPDatabase<DocDBSchema>
> {
override get shareId() { override get shareId() {
return 'idb(old):affine-local'; return 'idb(old):affine-local';
} }
override async doConnect() { override async doConnect() {
return openDB<DocDBSchema>('affine-local', 1, { const dbs = await indexedDB.databases();
upgrade: db => { if (dbs.some(d => d.name === 'affine-local')) {
db.createObjectStore('workspace', { keyPath: 'id' }); return openDB<DocDBSchema>('affine-local', 1, {
}, upgrade: db => {
}); db.createObjectStore('workspace', { keyPath: 'id' });
},
});
} else {
return null;
}
} }
override doDisconnect(conn: IDBPDatabase<DocDBSchema>) { override doDisconnect(conn: IDBPDatabase<DocDBSchema> | null) {
conn.close(); conn?.close();
} }
} }
@@ -46,9 +49,7 @@ export interface BlobIDBConnectionOptions {
id: string; id: string;
} }
export class BlobIDBConnection extends AutoReconnectConnection< export class BlobIDBConnection extends AutoReconnectConnection<IDBPDatabase<BlobDBSchema> | null> {
IDBPDatabase<BlobDBSchema>
> {
constructor(private readonly options: BlobIDBConnectionOptions) { constructor(private readonly options: BlobIDBConnectionOptions) {
super(); super();
} }
@@ -58,14 +59,19 @@ export class BlobIDBConnection extends AutoReconnectConnection<
} }
override async doConnect() { override async doConnect() {
return openDB<BlobDBSchema>(`${this.options.id}_blob`, 1, { const dbs = await indexedDB.databases();
upgrade: db => { if (dbs.some(d => d.name === `${this.options.id}_blob`)) {
db.createObjectStore('blob'); return openDB<BlobDBSchema>(`${this.options.id}_blob`, 1, {
}, upgrade: db => {
}); db.createObjectStore('blob');
},
});
} else {
return null;
}
} }
override doDisconnect(conn: IDBPDatabase<BlobDBSchema>) { override doDisconnect(conn: IDBPDatabase<BlobDBSchema> | null) {
conn.close(); conn?.close();
} }
} }

View File

@@ -1,9 +1,20 @@
import { once } from 'lodash-es';
import {
applyUpdate,
type Array as YArray,
Doc as YDoc,
type Map as YMap,
} from 'yjs';
import { share } from '../../../connection'; import { share } from '../../../connection';
import { import {
type DocClocks,
type DocRecord, type DocRecord,
DocStorageBase, DocStorageBase,
type DocStorageOptions,
type DocUpdate, type DocUpdate,
} from '../../../storage'; } from '../../../storage';
import { getIdConverter } from '../../../utils/id-converter';
import { DocIDBConnection } from './db'; import { DocIDBConnection } from './db';
/** /**
@@ -14,6 +25,13 @@ export class IndexedDBV1DocStorage extends DocStorageBase {
readonly connection = share(new DocIDBConnection()); readonly connection = share(new DocIDBConnection());
constructor(opts: DocStorageOptions) {
super({
...opts,
readonlyMode: true,
});
}
get db() { get db() {
return this.connection.inner; return this.connection.inner;
} }
@@ -23,26 +41,11 @@ export class IndexedDBV1DocStorage extends DocStorageBase {
} }
override async getDoc(docId: string) { override async getDoc(docId: string) {
const trx = this.db.transaction('workspace', 'readonly'); if (!this.db) {
const record = await trx.store.get(docId);
if (!record?.updates.length) {
return null; return null;
} }
const oldId = (await this.getIdConverter()).newIdToOldId(docId);
if (record.updates.length === 1) { return this.rawGetDoc(oldId);
return {
docId,
bin: record.updates[0].update,
timestamp: new Date(record.updates[0].timestamp),
};
}
return {
docId,
bin: await this.mergeUpdates(record.updates.map(update => update.update)),
timestamp: new Date(record.updates.at(-1)?.timestamp ?? Date.now()),
};
} }
protected override async getDocSnapshot() { protected override async getDocSnapshot() {
@@ -55,12 +58,60 @@ export class IndexedDBV1DocStorage extends DocStorageBase {
} }
override async deleteDoc(docId: string) { override async deleteDoc(docId: string) {
if (!this.db) {
return;
}
const oldId = (await this.getIdConverter()).newIdToOldId(docId);
const trx = this.db.transaction('workspace', 'readwrite'); const trx = this.db.transaction('workspace', 'readwrite');
await trx.store.delete(docId); await trx.store.delete(oldId);
} }
override async getDocTimestamps() { override async getDocTimestamps(): Promise<DocClocks> {
return {}; if (!this.db) {
return {};
}
const idConverter = await this.getIdConverter();
const oldIds: string[] = [this.spaceId];
const rootDocBuffer = await this.rawGetDoc(this.spaceId);
if (rootDocBuffer) {
const ydoc = new YDoc({
guid: this.spaceId,
});
applyUpdate(ydoc, rootDocBuffer.bin);
// get all ids from rootDoc.meta.pages.[*].id, trust this id as normalized id
const normalizedDocIds = (
(ydoc.getMap('meta') as YMap<any> | undefined)?.get('pages') as
| YArray<YMap<any>>
| undefined
)
?.map(i => i.get('id') as string)
.filter(i => !!i);
const spaces = ydoc.getMap('spaces') as YMap<any> | undefined;
for (const pageId of normalizedDocIds ?? []) {
const subdoc = spaces?.get(pageId);
if (subdoc && subdoc instanceof YDoc) {
oldIds.push(subdoc.guid);
}
}
}
const trx = this.db.transaction('workspace', 'readonly');
const allKeys = await trx.store.getAllKeys();
oldIds.push(...allKeys.filter(k => k.startsWith(`db$${this.spaceId}$`)));
oldIds.push(
...allKeys.filter(k =>
k.match(new RegExp(`^userdata\\$[\\w-]+\\$${this.spaceId}$`))
)
);
return Object.fromEntries(
oldIds.map(id => [idConverter.oldIdToNewId(id), new Date(1)])
);
} }
override async getDocTimestamp(_docId: string) { override async getDocTimestamp(_docId: string) {
@@ -78,4 +129,59 @@ export class IndexedDBV1DocStorage extends DocStorageBase {
protected override async markUpdatesMerged(): Promise<number> { protected override async markUpdatesMerged(): Promise<number> {
return 0; return 0;
} }
private async rawGetDoc(id: string) {
if (!this.db) {
return null;
}
const trx = this.db.transaction('workspace', 'readonly');
const record = await trx.store.get(id);
if (!record?.updates.length) {
return null;
}
if (record.updates.length === 1) {
return {
docId: id,
bin: record.updates[0].update,
timestamp: new Date(record.updates[0].timestamp),
};
}
return {
docId: id,
bin: await this.mergeUpdates(record.updates.map(update => update.update)),
timestamp: new Date(record.updates.at(-1)?.timestamp ?? Date.now()),
};
}
private readonly getIdConverter = once(async () => {
const idConverter = getIdConverter(
{
getDocBuffer: async id => {
if (!this.db) {
return null;
}
const trx = this.db.transaction('workspace', 'readonly');
const record = await trx.store.get(id);
if (!record?.updates.length) {
return null;
}
if (record.updates.length === 1) {
return record.updates[0].update;
}
return await this.mergeUpdates(
record.updates.map(update => update.update)
);
},
},
this.spaceId
);
return await idConverter;
});
} }

View File

@@ -1,2 +1,11 @@
import type { StorageConstructor } from '../..';
import { IndexedDBV1BlobStorage } from './blob';
import { IndexedDBV1DocStorage } from './doc';
export * from './blob'; export * from './blob';
export * from './doc'; export * from './doc';
export const idbV1Storages = [
IndexedDBV1DocStorage,
IndexedDBV1BlobStorage,
] satisfies StorageConstructor[];

View File

@@ -1,8 +1,10 @@
import type { Storage } from '../storage'; import type { Storage } from '../storage';
import type { broadcastChannelStorages } from './broadcast-channel'; import type { broadcastChannelStorages } from './broadcast-channel';
import type { cloudStorages } from './cloud'; import type { cloudStorages } from './cloud';
import type { idbStorages, idbv1Storages } from './idb'; import type { idbStorages } from './idb';
import type { idbV1Storages } from './idb/v1';
import type { sqliteStorages } from './sqlite'; import type { sqliteStorages } from './sqlite';
import type { sqliteV1Storages } from './sqlite/v1';
export type StorageConstructor = { export type StorageConstructor = {
new (...args: any[]): Storage; new (...args: any[]): Storage;
@@ -11,9 +13,10 @@ export type StorageConstructor = {
type Storages = type Storages =
| typeof cloudStorages | typeof cloudStorages
| typeof idbv1Storages | typeof idbV1Storages
| typeof idbStorages | typeof idbStorages
| typeof sqliteStorages | typeof sqliteStorages
| typeof sqliteV1Storages
| typeof broadcastChannelStorages; | typeof broadcastChannelStorages;
// oxlint-disable-next-line no-redeclare // oxlint-disable-next-line no-redeclare

View File

@@ -41,7 +41,7 @@ export type NativeDBApis = {
id: string, id: string,
peer: string, peer: string,
docId: string docId: string
): Promise<DocClock>; ): Promise<DocClock | null>;
setPeerRemoteClock( setPeerRemoteClock(
id: string, id: string,
peer: string, peer: string,
@@ -53,7 +53,7 @@ export type NativeDBApis = {
id: string, id: string,
peer: string, peer: string,
docId: string docId: string
): Promise<DocClock>; ): Promise<DocClock | null>;
setPeerPulledRemoteClock( setPeerPulledRemoteClock(
id: string, id: string,
peer: string, peer: string,
@@ -65,7 +65,7 @@ export type NativeDBApis = {
id: string, id: string,
peer: string, peer: string,
docId: string docId: string
): Promise<DocClock>; ): Promise<DocClock | null>;
setPeerPushedClock( setPeerPushedClock(
id: string, id: string,
peer: string, peer: string,

View File

@@ -7,7 +7,6 @@ export * from './blob';
export { bindNativeDBApis, type NativeDBApis } from './db'; export { bindNativeDBApis, type NativeDBApis } from './db';
export * from './doc'; export * from './doc';
export * from './sync'; export * from './sync';
export * from './v1';
export const sqliteStorages = [ export const sqliteStorages = [
SqliteDocStorage, SqliteDocStorage,

View File

@@ -7,6 +7,7 @@ import { apis } from './db';
* @deprecated readonly * @deprecated readonly
*/ */
export class SqliteV1BlobStorage extends BlobStorageBase { export class SqliteV1BlobStorage extends BlobStorageBase {
static identifier = 'SqliteV1BlobStorage';
override connection = new DummyConnection(); override connection = new DummyConnection();
constructor(private readonly options: { type: SpaceType; id: string }) { constructor(private readonly options: { type: SpaceType; id: string }) {

View File

@@ -4,6 +4,8 @@ import {
DocStorageBase, DocStorageBase,
type DocUpdate, type DocUpdate,
} from '../../../storage'; } from '../../../storage';
import { getIdConverter, type IdConverter } from '../../../utils/id-converter';
import { isEmptyUpdate } from '../../../utils/is-empty-update';
import type { SpaceType } from '../../../utils/universal-id'; import type { SpaceType } from '../../../utils/universal-id';
import { apis } from './db'; import { apis } from './db';
@@ -14,8 +16,14 @@ export class SqliteV1DocStorage extends DocStorageBase<{
type: SpaceType; type: SpaceType;
id: string; id: string;
}> { }> {
static identifier = 'SqliteV1DocStorage';
cachedIdConverter: Promise<IdConverter> | null = null;
override connection = new DummyConnection(); override connection = new DummyConnection();
constructor(options: { type: SpaceType; id: string }) {
super({ ...options, readonlyMode: true });
}
private get db() { private get db() {
if (!apis) { if (!apis) {
throw new Error('Not in electron context.'); throw new Error('Not in electron context.');
@@ -26,17 +34,21 @@ export class SqliteV1DocStorage extends DocStorageBase<{
override async pushDocUpdate(update: DocUpdate) { override async pushDocUpdate(update: DocUpdate) {
// no more writes // no more writes
return { docId: update.docId, timestamp: new Date() }; return { docId: update.docId, timestamp: new Date() };
} }
override async getDoc(docId: string) { override async getDoc(docId: string) {
const idConverter = await this.getIdConverter();
const bin = await this.db.getDocAsUpdates( const bin = await this.db.getDocAsUpdates(
this.options.type, this.options.type,
this.options.id, this.options.id,
docId idConverter.newIdToOldId(docId)
); );
if (isEmptyUpdate(bin)) {
return null;
}
return { return {
docId, docId,
bin, bin,
@@ -71,4 +83,37 @@ export class SqliteV1DocStorage extends DocStorageBase<{
protected override async markUpdatesMerged(): Promise<number> { protected override async markUpdatesMerged(): Promise<number> {
return 0; return 0;
} }
private async getIdConverter() {
if (this.cachedIdConverter) {
return await this.cachedIdConverter;
}
this.cachedIdConverter = getIdConverter(
{
getDocBuffer: async id => {
if (!this.db) {
return null;
}
const updates = await this.db.getDocAsUpdates(
this.options.type,
this.options.id,
id
);
if (isEmptyUpdate(updates)) {
return null;
}
if (!updates) {
return null;
}
return updates;
},
},
this.spaceId
);
return await this.cachedIdConverter;
}
} }

View File

@@ -1,3 +1,12 @@
import type { StorageConstructor } from '../..';
import { SqliteV1BlobStorage } from './blob';
import { SqliteV1DocStorage } from './doc';
export * from './blob'; export * from './blob';
export { bindNativeDBV1Apis } from './db'; export { bindNativeDBV1Apis } from './db';
export * from './doc'; export * from './doc';
export const sqliteV1Storages = [
SqliteV1DocStorage,
SqliteV1BlobStorage,
] satisfies StorageConstructor[];

View File

@@ -151,7 +151,7 @@ export abstract class DocStorageBase<Opts = {}> implements DocStorage {
return { return {
docId, docId,
missing: state ? diffUpdate(doc.bin, state) : doc.bin, missing: state && state.length > 0 ? diffUpdate(doc.bin, state) : doc.bin,
state: encodeStateVectorFromUpdate(doc.bin), state: encodeStateVectorFromUpdate(doc.bin),
timestamp: doc.timestamp, timestamp: doc.timestamp,
}; };

View File

@@ -18,8 +18,11 @@ export class AwarenessSyncImpl implements AwarenessSync {
async update(record: AwarenessRecord, origin?: string) { async update(record: AwarenessRecord, origin?: string) {
await Promise.all( await Promise.all(
[this.storages.local, ...Object.values(this.storages.remotes)].map(peer => [this.storages.local, ...Object.values(this.storages.remotes)].map(
peer.update(record, origin) peer =>
peer.connection.status === 'connected'
? peer.update(record, origin)
: Promise.resolve()
) )
); );
} }

View File

@@ -73,10 +73,14 @@ export class BlobSyncImpl implements BlobSync {
async fullSync(signal?: AbortSignal) { async fullSync(signal?: AbortSignal) {
throwIfAborted(signal); throwIfAborted(signal);
await this.storages.local.connection.waitForConnected(signal);
for (const [remotePeer, remote] of Object.entries(this.storages.remotes)) { for (const [remotePeer, remote] of Object.entries(this.storages.remotes)) {
let localList: string[] = []; let localList: string[] = [];
let remoteList: string[] = []; let remoteList: string[] = [];
await remote.connection.waitForConnected(signal);
try { try {
localList = (await this.storages.local.list(signal)).map(b => b.key); localList = (await this.storages.local.list(signal)).map(b => b.key);
throwIfAborted(signal); throwIfAborted(signal);
@@ -150,7 +154,7 @@ export class BlobSyncImpl implements BlobSync {
} }
stop() { stop() {
this.abort?.abort(); this.abort?.abort(MANUALLY_STOP);
this.abort = null; this.abort = null;
} }

View File

@@ -1,5 +1,5 @@
import type { Observable } from 'rxjs'; import type { Observable } from 'rxjs';
import { combineLatest, map, of } from 'rxjs'; import { combineLatest, map, of, ReplaySubject, share } from 'rxjs';
import type { DocStorage, SyncStorage } from '../../storage'; import type { DocStorage, SyncStorage } from '../../storage';
import { DummyDocStorage } from '../../storage/dummy/doc'; import { DummyDocStorage } from '../../storage/dummy/doc';
@@ -38,18 +38,32 @@ export class DocSyncImpl implements DocSync {
); );
private abort: AbortController | null = null; private abort: AbortController | null = null;
get state$() { state$ = combineLatest(this.peers.map(peer => peer.peerState$)).pipe(
return combineLatest(this.peers.map(peer => peer.peerState$)).pipe( map(allPeers =>
map(allPeers => ({ allPeers.length === 0
total: allPeers.reduce((acc, peer) => Math.max(acc, peer.total), 0), ? {
syncing: allPeers.reduce((acc, peer) => Math.max(acc, peer.syncing), 0), total: 0,
synced: allPeers.every(peer => peer.synced), syncing: 0,
retrying: allPeers.some(peer => peer.retrying), synced: true,
errorMessage: retrying: false,
allPeers.find(peer => peer.errorMessage)?.errorMessage ?? null, errorMessage: null,
})) }
) as Observable<DocSyncState>; : {
} total: allPeers.reduce((acc, peer) => Math.max(acc, peer.total), 0),
syncing: allPeers.reduce(
(acc, peer) => Math.max(acc, peer.syncing),
0
),
synced: allPeers.every(peer => peer.synced),
retrying: allPeers.some(peer => peer.retrying),
errorMessage:
allPeers.find(peer => peer.errorMessage)?.errorMessage ?? null,
}
),
share({
connector: () => new ReplaySubject(1),
})
) as Observable<DocSyncState>;
constructor( constructor(
readonly storages: PeerStorageOptions<DocStorage>, readonly storages: PeerStorageOptions<DocStorage>,
@@ -105,7 +119,7 @@ export class DocSyncImpl implements DocSync {
} }
stop() { stop() {
this.abort?.abort(); this.abort?.abort(MANUALLY_STOP);
this.abort = null; this.abort = null;
} }

View File

@@ -1,6 +1,6 @@
import { remove } from 'lodash-es'; import { remove } from 'lodash-es';
import { nanoid } from 'nanoid'; import { nanoid } from 'nanoid';
import { Observable, Subject } from 'rxjs'; import { Observable, ReplaySubject, share, Subject } from 'rxjs';
import { diffUpdate, encodeStateVectorFromUpdate, mergeUpdates } from 'yjs'; import { diffUpdate, encodeStateVectorFromUpdate, mergeUpdates } from 'yjs';
import type { DocStorage, SyncStorage } from '../../storage'; import type { DocStorage, SyncStorage } from '../../storage';
@@ -119,54 +119,65 @@ export class DocSyncPeer {
}; };
private readonly statusUpdatedSubject$ = new Subject<string | true>(); private readonly statusUpdatedSubject$ = new Subject<string | true>();
get peerState$() { peerState$ = new Observable<PeerState>(subscribe => {
return new Observable<PeerState>(subscribe => { const next = () => {
const next = () => { if (this.status.skipped) {
if (this.status.skipped) { subscribe.next({
subscribe.next({ total: 0,
total: 0, syncing: 0,
syncing: 0, synced: true,
synced: true, retrying: false,
retrying: false, errorMessage: null,
errorMessage: null, });
}); } else if (!this.status.syncing) {
} else if (!this.status.syncing) { // if syncing = false, jobMap is empty
// if syncing = false, jobMap is empty subscribe.next({
subscribe.next({ total: this.status.docs.size,
total: this.status.docs.size, syncing: this.status.docs.size,
syncing: this.status.docs.size, synced: false,
synced: false, retrying: this.status.retrying,
retrying: this.status.retrying, errorMessage: this.status.errorMessage,
errorMessage: this.status.errorMessage, });
}); } else {
} else { const syncing = this.status.jobMap.size;
const syncing = this.status.jobMap.size; subscribe.next({
subscribe.next({ total: this.status.docs.size,
total: this.status.docs.size, syncing: syncing,
syncing: syncing, retrying: this.status.retrying,
retrying: this.status.retrying, errorMessage: this.status.errorMessage,
errorMessage: this.status.errorMessage, synced: syncing === 0,
synced: syncing === 0, });
}); }
} };
}; next();
const dispose = this.statusUpdatedSubject$.subscribe(() => {
next(); next();
return this.statusUpdatedSubject$.subscribe(() => {
next();
});
}); });
} return () => {
dispose.unsubscribe();
};
}).pipe(
share({
connector: () => new ReplaySubject(1),
})
);
docState$(docId: string) { docState$(docId: string) {
return new Observable<PeerDocState>(subscribe => { return new Observable<PeerDocState>(subscribe => {
const next = () => { const next = () => {
const syncing = if (this.status.skipped) {
!this.status.connectedDocs.has(docId) || subscribe.next({
this.status.jobMap.has(docId); syncing: false,
synced: true,
retrying: false,
errorMessage: null,
});
}
subscribe.next({ subscribe.next({
syncing: syncing, syncing:
synced: !syncing, !this.status.connectedDocs.has(docId) ||
this.status.jobMap.has(docId),
synced: !this.status.jobMap.has(docId),
retrying: this.status.retrying, retrying: this.status.retrying,
errorMessage: this.status.errorMessage, errorMessage: this.status.errorMessage,
}); });
@@ -524,10 +535,6 @@ export class DocSyncPeer {
const disposes: (() => void)[] = []; const disposes: (() => void)[] = [];
try { try {
console.info('Remote sync started');
this.status.syncing = true;
this.statusUpdatedSubject$.next(true);
// wait for all storages to connect, timeout after 30s // wait for all storages to connect, timeout after 30s
await Promise.race([ await Promise.race([
Promise.all([ Promise.all([
@@ -547,6 +554,10 @@ export class DocSyncPeer {
}), }),
]); ]);
console.info('Remote sync started');
this.status.syncing = true;
this.statusUpdatedSubject$.next(true);
// throw error if failed to connect // throw error if failed to connect
for (const storage of [this.remote, this.local, this.syncMetadata]) { for (const storage of [this.remote, this.local, this.syncMetadata]) {
// abort if disconnected // abort if disconnected

View File

@@ -0,0 +1,73 @@
import {
applyUpdate,
type Array as YArray,
Doc as YDoc,
type Map as YMap,
} from 'yjs';
type PromiseResult<T> = T extends Promise<infer R> ? R : never;
export type IdConverter = PromiseResult<ReturnType<typeof getIdConverter>>;
export async function getIdConverter(
storage: {
getDocBuffer: (id: string) => Promise<Uint8Array | null>;
},
spaceId: string
) {
const oldIdToNewId = { [spaceId]: spaceId };
const newIdToOldId = { [spaceId]: spaceId };
const rootDocBuffer = await storage.getDocBuffer(spaceId);
if (rootDocBuffer) {
const ydoc = new YDoc({
guid: spaceId,
});
applyUpdate(ydoc, rootDocBuffer);
// get all ids from rootDoc.meta.pages.[*].id, trust this id as normalized id
const normalizedDocIds = (
(ydoc.getMap('meta') as YMap<any> | undefined)?.get('pages') as
| YArray<YMap<any>>
| undefined
)
?.map(i => i.get('id') as string)
.filter(i => !!i);
const spaces = ydoc.getMap('spaces') as YMap<any> | undefined;
for (const pageId of normalizedDocIds ?? []) {
const subdoc = spaces?.get(pageId);
if (subdoc && subdoc instanceof YDoc) {
oldIdToNewId[subdoc.guid] = pageId;
newIdToOldId[pageId] = subdoc.guid;
}
}
}
return {
newIdToOldId(newId: string) {
if (newId.startsWith(`db$`)) {
// db$docId -> db$${spaceId}$docId
return newId.replace(`db$`, `db$${spaceId}$`);
}
if (newId.startsWith(`userdata$`)) {
// userdata$userId$docId -> userdata$userId$spaceId$docId
return newId.replace(
new RegExp(`^(userdata\\$[\\w-]+)\\$([^\\$]+)`),
(_, p1, p2) => `${p1}$${spaceId}$${p2}`
);
}
return newIdToOldId[newId] ?? newId;
},
oldIdToNewId(oldId: string) {
// db$${spaceId}$docId -> db$docId
if (oldId.startsWith(`db$${spaceId}$`)) {
return oldId.replace(`db$${spaceId}$`, `db$`);
}
// userdata$userId$spaceId$docId -> userdata$userId$docId
if (oldId.match(new RegExp(`^userdata\\$[\\w-]+\\$${spaceId}$`))) {
return oldId.replace(`$${spaceId}$`, '$');
}
return oldIdToNewId[oldId] ?? oldId;
},
};
}

View File

@@ -23,7 +23,6 @@ export class WorkerClient {
private readonly client: OpClient<WorkerOps>, private readonly client: OpClient<WorkerOps>,
options: WorkerInitOptions options: WorkerInitOptions
) { ) {
client.listen();
this.client.call('worker.init', options).catch(err => { this.client.call('worker.init', options).catch(err => {
console.error('error initializing worker', err); console.error('error initializing worker', err);
}); });
@@ -156,7 +155,9 @@ class WorkerBlobStorage implements BlobStorage {
class WorkerDocSync implements DocSync { class WorkerDocSync implements DocSync {
constructor(private readonly client: OpClient<WorkerOps>) {} constructor(private readonly client: OpClient<WorkerOps>) {}
readonly state$ = this.client.ob$('docSync.state'); get state$() {
return this.client.ob$('docSync.state');
}
docState$(docId: string) { docState$(docId: string) {
return this.client.ob$('docSync.docState', docId); return this.client.ob$('docSync.docState', docId);
@@ -174,7 +175,9 @@ class WorkerDocSync implements DocSync {
class WorkerBlobSync implements BlobSync { class WorkerBlobSync implements BlobSync {
constructor(private readonly client: OpClient<WorkerOps>) {} constructor(private readonly client: OpClient<WorkerOps>) {}
readonly state$ = this.client.ob$('blobSync.state'); get state$() {
return this.client.ob$('blobSync.state');
}
setMaxBlobSize(size: number): void { setMaxBlobSize(size: number): void {
this.client.call('blobSync.setMaxBlobSize', size).catch(err => { this.client.call('blobSync.setMaxBlobSize', size).catch(err => {
console.error('error setting max blob size', err); console.error('error setting max blob size', err);

View File

@@ -1,3 +1,4 @@
import { MANUALLY_STOP } from '@toeverything/infra';
import type { OpConsumer } from '@toeverything/infra/op'; import type { OpConsumer } from '@toeverything/infra/op';
import { Observable } from 'rxjs'; import { Observable } from 'rxjs';
@@ -11,6 +12,7 @@ import type { WorkerInitOptions, WorkerOps } from './ops';
export type { WorkerOps }; export type { WorkerOps };
export class WorkerConsumer { export class WorkerConsumer {
private inited = false;
private storages: PeerStorageOptions<SpaceStorage> | null = null; private storages: PeerStorageOptions<SpaceStorage> | null = null;
private sync: Sync | null = null; private sync: Sync | null = null;
@@ -57,14 +59,18 @@ export class WorkerConsumer {
} }
constructor( constructor(
private readonly consumer: OpConsumer<WorkerOps>,
private readonly availableStorageImplementations: StorageConstructor[] private readonly availableStorageImplementations: StorageConstructor[]
) { ) {}
this.registerHandlers();
this.consumer.listen(); bindConsumer(consumer: OpConsumer<WorkerOps>) {
this.registerHandlers(consumer);
} }
init(init: WorkerInitOptions) { init(init: WorkerInitOptions) {
if (this.inited) {
return;
}
this.inited = true;
this.storages = { this.storages = {
local: new SpaceStorage( local: new SpaceStorage(
Object.fromEntries( Object.fromEntries(
@@ -120,13 +126,13 @@ export class WorkerConsumer {
} }
} }
private registerHandlers() { private registerHandlers(consumer: OpConsumer<WorkerOps>) {
const collectJobs = new Map< const collectJobs = new Map<
string, string,
(awareness: AwarenessRecord | null) => void (awareness: AwarenessRecord | null) => void
>(); >();
let collectId = 0; let collectId = 0;
this.consumer.registerAll({ consumer.registerAll({
'worker.init': this.init.bind(this), 'worker.init': this.init.bind(this),
'worker.destroy': this.destroy.bind(this), 'worker.destroy': this.destroy.bind(this),
'docStorage.getDoc': (docId: string) => this.docStorage.getDoc(docId), 'docStorage.getDoc': (docId: string) => this.docStorage.getDoc(docId),
@@ -158,7 +164,7 @@ export class WorkerConsumer {
.catch((error: any) => { .catch((error: any) => {
subscriber.error(error); subscriber.error(error);
}); });
return () => abortController.abort(); return () => abortController.abort(MANUALLY_STOP);
}), }),
'blobStorage.getBlob': key => this.blobStorage.get(key), 'blobStorage.getBlob': key => this.blobStorage.get(key),
'blobStorage.setBlob': blob => this.blobStorage.set(blob), 'blobStorage.setBlob': blob => this.blobStorage.set(blob),
@@ -212,13 +218,7 @@ export class WorkerConsumer {
}), }),
'awarenessStorage.collect': ({ collectId, awareness }) => 'awarenessStorage.collect': ({ collectId, awareness }) =>
collectJobs.get(collectId)?.(awareness), collectJobs.get(collectId)?.(awareness),
'docSync.state': () => 'docSync.state': () => this.docSync.state$,
new Observable(subscriber => {
const subscription = this.docSync.state$.subscribe(state => {
subscriber.next(state);
});
return () => subscription.unsubscribe();
}),
'docSync.docState': docId => 'docSync.docState': docId =>
new Observable(subscriber => { new Observable(subscriber => {
const subscription = this.docSync const subscription = this.docSync
@@ -247,7 +247,7 @@ export class WorkerConsumer {
.catch(error => { .catch(error => {
subscriber.error(error); subscriber.error(error);
}); });
return () => abortController.abort(); return () => abortController.abort(MANUALLY_STOP);
}), }),
'blobSync.state': () => this.blobSync.state$, 'blobSync.state': () => this.blobSync.state$,
'blobSync.setMaxBlobSize': size => this.blobSync.setMaxBlobSize(size), 'blobSync.setMaxBlobSize': size => this.blobSync.setMaxBlobSize(size),
@@ -262,7 +262,7 @@ export class WorkerConsumer {
this.awarenessSync.update(awareness, origin), this.awarenessSync.update(awareness, origin),
'awarenessSync.subscribeUpdate': docId => 'awarenessSync.subscribeUpdate': docId =>
new Observable(subscriber => { new Observable(subscriber => {
return this.awarenessStorage.subscribeUpdate( return this.awarenessSync.subscribeUpdate(
docId, docId,
(update, origin) => { (update, origin) => {
subscriber.next({ subscriber.next({
@@ -279,6 +279,10 @@ export class WorkerConsumer {
collectJobs.delete(currentCollectId.toString()); collectJobs.delete(currentCollectId.toString());
}); });
}); });
subscriber.next({
type: 'awareness-collect',
collectId: currentCollectId.toString(),
});
return promise; return promise;
} }
); );

View File

@@ -6,12 +6,8 @@ import { configureCommonModules } from '@affine/core/modules';
import { I18nProvider } from '@affine/core/modules/i18n'; import { I18nProvider } from '@affine/core/modules/i18n';
import { LifecycleService } from '@affine/core/modules/lifecycle'; import { LifecycleService } from '@affine/core/modules/lifecycle';
import { configureLocalStorageStateStorageImpls } from '@affine/core/modules/storage'; import { configureLocalStorageStateStorageImpls } from '@affine/core/modules/storage';
import { configureIndexedDBUserspaceStorageProvider } from '@affine/core/modules/userspace';
import { configureBrowserWorkbenchModule } from '@affine/core/modules/workbench'; import { configureBrowserWorkbenchModule } from '@affine/core/modules/workbench';
import { import { configureBrowserWorkspaceFlavours } from '@affine/core/modules/workspace-engine';
configureBrowserWorkspaceFlavours,
configureIndexedDBWorkspaceEngineStorageProvider,
} from '@affine/core/modules/workspace-engine';
import { Framework, FrameworkRoot, getCurrentStore } from '@toeverything/infra'; import { Framework, FrameworkRoot, getCurrentStore } from '@toeverything/infra';
import { Suspense } from 'react'; import { Suspense } from 'react';
import { RouterProvider } from 'react-router-dom'; import { RouterProvider } from 'react-router-dom';
@@ -25,8 +21,6 @@ configureCommonModules(framework);
configureBrowserWorkbenchModule(framework); configureBrowserWorkbenchModule(framework);
configureLocalStorageStateStorageImpls(framework); configureLocalStorageStateStorageImpls(framework);
configureBrowserWorkspaceFlavours(framework); configureBrowserWorkspaceFlavours(framework);
configureIndexedDBWorkspaceEngineStorageProvider(framework);
configureIndexedDBUserspaceStorageProvider(framework);
configureMobileModules(framework); configureMobileModules(framework);
const frameworkProvider = framework.provider(); const frameworkProvider = framework.provider();

View File

@@ -12,11 +12,13 @@
"@affine/core": "workspace:*", "@affine/core": "workspace:*",
"@affine/electron-api": "workspace:*", "@affine/electron-api": "workspace:*",
"@affine/i18n": "workspace:*", "@affine/i18n": "workspace:*",
"@affine/nbstore": "workspace:*",
"@emotion/react": "^11.14.0", "@emotion/react": "^11.14.0",
"@sentry/react": "^8.44.0", "@sentry/react": "^8.44.0",
"@toeverything/infra": "workspace:*", "@toeverything/infra": "workspace:*",
"@toeverything/theme": "^1.1.3", "@toeverything/theme": "^1.1.3",
"@vanilla-extract/css": "^1.16.1", "@vanilla-extract/css": "^1.16.1",
"async-call-rpc": "^6.4.2",
"next-themes": "^0.4.4", "next-themes": "^0.4.4",
"react": "^19.0.0", "react": "^19.0.0",
"react-dom": "^19.0.0", "react-dom": "^19.0.0",

View File

@@ -19,25 +19,26 @@ import { configureFindInPageModule } from '@affine/core/modules/find-in-page';
import { GlobalContextService } from '@affine/core/modules/global-context'; import { GlobalContextService } from '@affine/core/modules/global-context';
import { I18nProvider } from '@affine/core/modules/i18n'; import { I18nProvider } from '@affine/core/modules/i18n';
import { LifecycleService } from '@affine/core/modules/lifecycle'; import { LifecycleService } from '@affine/core/modules/lifecycle';
import { configureElectronStateStorageImpls } from '@affine/core/modules/storage'; import {
configureElectronStateStorageImpls,
NbstoreProvider,
} from '@affine/core/modules/storage';
import { import {
ClientSchemeProvider, ClientSchemeProvider,
PopupWindowProvider, PopupWindowProvider,
} from '@affine/core/modules/url'; } from '@affine/core/modules/url';
import { configureSqliteUserspaceStorageProvider } from '@affine/core/modules/userspace';
import { import {
configureDesktopWorkbenchModule, configureDesktopWorkbenchModule,
WorkbenchService, WorkbenchService,
} from '@affine/core/modules/workbench'; } from '@affine/core/modules/workbench';
import { WorkspacesService } from '@affine/core/modules/workspace'; import { WorkspacesService } from '@affine/core/modules/workspace';
import { import { configureBrowserWorkspaceFlavours } from '@affine/core/modules/workspace-engine';
configureBrowserWorkspaceFlavours,
configureSqliteWorkspaceEngineStorageProvider,
} from '@affine/core/modules/workspace-engine';
import createEmotionCache from '@affine/core/utils/create-emotion-cache'; import createEmotionCache from '@affine/core/utils/create-emotion-cache';
import { apis, events } from '@affine/electron-api'; import { apis, events } from '@affine/electron-api';
import { WorkerClient } from '@affine/nbstore/worker/client';
import { CacheProvider } from '@emotion/react'; import { CacheProvider } from '@emotion/react';
import { Framework, FrameworkRoot, getCurrentStore } from '@toeverything/infra'; import { Framework, FrameworkRoot, getCurrentStore } from '@toeverything/infra';
import { OpClient } from '@toeverything/infra/op';
import { Suspense } from 'react'; import { Suspense } from 'react';
import { RouterProvider } from 'react-router-dom'; import { RouterProvider } from 'react-router-dom';
@@ -71,14 +72,61 @@ const framework = new Framework();
configureCommonModules(framework); configureCommonModules(framework);
configureElectronStateStorageImpls(framework); configureElectronStateStorageImpls(framework);
configureBrowserWorkspaceFlavours(framework); configureBrowserWorkspaceFlavours(framework);
configureSqliteWorkspaceEngineStorageProvider(framework);
configureSqliteUserspaceStorageProvider(framework);
configureDesktopWorkbenchModule(framework); configureDesktopWorkbenchModule(framework);
configureAppTabsHeaderModule(framework); configureAppTabsHeaderModule(framework);
configureFindInPageModule(framework); configureFindInPageModule(framework);
configureDesktopApiModule(framework); configureDesktopApiModule(framework);
configureSpellCheckSettingModule(framework); configureSpellCheckSettingModule(framework);
framework.impl(NbstoreProvider, {
openStore(key, options) {
const { port1: portForOpClient, port2: portForWorker } =
new MessageChannel();
let portFromWorker: MessagePort | null = null;
let portId = crypto.randomUUID();
const handleMessage = (ev: MessageEvent) => {
if (
ev.data.type === 'electron:worker-connect' &&
ev.data.portId === portId
) {
portFromWorker = ev.ports[0];
// connect portForWorker and portFromWorker
portFromWorker.addEventListener('message', ev => {
portForWorker.postMessage(ev.data);
});
portForWorker.addEventListener('message', ev => {
// oxlint-disable-next-line no-non-null-assertion
portFromWorker!.postMessage(ev.data);
});
portForWorker.start();
portFromWorker.start();
}
};
window.addEventListener('message', handleMessage);
// oxlint-disable-next-line no-non-null-assertion
apis!.worker.connectWorker(key, portId).catch(err => {
console.error('failed to connect worker', err);
});
const store = new WorkerClient(new OpClient(portForOpClient), options);
portForOpClient.start();
return {
store,
dispose: () => {
window.removeEventListener('message', handleMessage);
portForOpClient.close();
portForWorker.close();
portFromWorker?.close();
// oxlint-disable-next-line no-non-null-assertion
apis!.worker.disconnectWorker(key, portId).catch(err => {
console.error('failed to disconnect worker', err);
});
},
};
},
});
framework.impl(PopupWindowProvider, p => { framework.impl(PopupWindowProvider, p => {
const apis = p.get(DesktopApiService).api; const apis = p.get(DesktopApiService).api;
return { return {

View File

@@ -0,0 +1,36 @@
import '@affine/core/bootstrap/electron';
import { apis } from '@affine/electron-api';
import { broadcastChannelStorages } from '@affine/nbstore/broadcast-channel';
import { cloudStorages } from '@affine/nbstore/cloud';
import { bindNativeDBApis, sqliteStorages } from '@affine/nbstore/sqlite';
import {
bindNativeDBV1Apis,
sqliteV1Storages,
} from '@affine/nbstore/sqlite/v1';
import {
WorkerConsumer,
type WorkerOps,
} from '@affine/nbstore/worker/consumer';
import { OpConsumer } from '@toeverything/infra/op';
// oxlint-disable-next-line no-non-null-assertion
bindNativeDBApis(apis!.nbstore);
// oxlint-disable-next-line no-non-null-assertion
bindNativeDBV1Apis(apis!.db);
const worker = new WorkerConsumer([
...sqliteStorages,
...sqliteV1Storages,
...broadcastChannelStorages,
...cloudStorages,
]);
window.addEventListener('message', ev => {
if (ev.data.type === 'electron:worker-connect') {
const port = ev.ports[0];
const consumer = new OpConsumer<WorkerOps>(port);
worker.bindConsumer(consumer);
}
});

View File

@@ -0,0 +1,96 @@
import '@affine/core/bootstrap/electron';
import type { ClientHandler } from '@affine/electron-api';
import { broadcastChannelStorages } from '@affine/nbstore/broadcast-channel';
import { cloudStorages } from '@affine/nbstore/cloud';
import { bindNativeDBApis, sqliteStorages } from '@affine/nbstore/sqlite';
import {
bindNativeDBV1Apis,
sqliteV1Storages,
} from '@affine/nbstore/sqlite/v1';
import {
WorkerConsumer,
type WorkerOps,
} from '@affine/nbstore/worker/consumer';
import { OpConsumer } from '@toeverything/infra/op';
import { AsyncCall } from 'async-call-rpc';
const worker = new WorkerConsumer([
...sqliteStorages,
...sqliteV1Storages,
...broadcastChannelStorages,
...cloudStorages,
]);
let activeConnectionCount = 0;
let electronAPIsInitialized = false;
function connectElectronAPIs(port: MessagePort) {
if (electronAPIsInitialized) {
return;
}
electronAPIsInitialized = true;
port.postMessage({ type: '__electron-apis-init__' });
const { promise, resolve } = Promise.withResolvers<MessagePort>();
port.addEventListener('message', event => {
if (event.data.type === '__electron-apis__') {
const [port] = event.ports;
resolve(port);
}
});
const rpc = AsyncCall<Record<string, any>>(null, {
channel: promise.then(p => ({
on(listener) {
p.onmessage = e => {
listener(e.data);
};
p.start();
return () => {
p.onmessage = null;
try {
p.close();
} catch (err) {
console.error('close port error', err);
}
};
},
send(data) {
p.postMessage(data);
},
})),
log: false,
});
const electronAPIs = new Proxy<ClientHandler>(rpc as any, {
get(_, namespace: string) {
return new Proxy(rpc as any, {
get(_, method: string) {
return rpc[`${namespace}:${method}`];
},
});
},
});
bindNativeDBApis(electronAPIs.nbstore);
bindNativeDBV1Apis(electronAPIs.db);
}
(globalThis as any).onconnect = (event: MessageEvent) => {
activeConnectionCount++;
const port = event.ports[0];
port.addEventListener('message', (event: MessageEvent) => {
if (event.data.type === '__close__') {
activeConnectionCount--;
if (activeConnectionCount === 0) {
globalThis.close();
}
}
});
connectElectronAPIs(port);
const consumer = new OpConsumer<WorkerOps>(port);
worker.bindConsumer(consumer);
};

View File

@@ -1,3 +1,12 @@
import '@affine/core/bootstrap/electron'; import '@affine/core/bootstrap/electron';
import '@affine/component/theme'; import '@affine/component/theme';
import './global.css'; import './global.css';
import { apis } from '@affine/electron-api';
import { bindNativeDBApis } from '@affine/nbstore/sqlite';
import { bindNativeDBV1Apis } from '@affine/nbstore/sqlite/v1';
// oxlint-disable-next-line no-non-null-assertion
bindNativeDBApis(apis!.nbstore);
// oxlint-disable-next-line no-non-null-assertion
bindNativeDBV1Apis(apis!.db);

View File

@@ -11,7 +11,7 @@ import { configureDesktopApiModule } from '@affine/core/modules/desktop-api';
import { configureI18nModule, I18nProvider } from '@affine/core/modules/i18n'; import { configureI18nModule, I18nProvider } from '@affine/core/modules/i18n';
import { import {
configureElectronStateStorageImpls, configureElectronStateStorageImpls,
configureGlobalStorageModule, configureStorageModule,
} from '@affine/core/modules/storage'; } from '@affine/core/modules/storage';
import { configureAppThemeModule } from '@affine/core/modules/theme'; import { configureAppThemeModule } from '@affine/core/modules/theme';
import { Framework, FrameworkRoot } from '@toeverything/infra'; import { Framework, FrameworkRoot } from '@toeverything/infra';
@@ -19,7 +19,7 @@ import { Framework, FrameworkRoot } from '@toeverything/infra';
import * as styles from './app.css'; import * as styles from './app.css';
const framework = new Framework(); const framework = new Framework();
configureGlobalStorageModule(framework); configureStorageModule(framework);
configureElectronStateStorageImpls(framework); configureElectronStateStorageImpls(framework);
configureAppTabsHeaderModule(framework); configureAppTabsHeaderModule(framework);
configureAppSidebarModule(framework); configureAppSidebarModule(framework);

View File

@@ -12,6 +12,7 @@
{ "path": "../../core" }, { "path": "../../core" },
{ "path": "../../electron-api" }, { "path": "../../electron-api" },
{ "path": "../../i18n" }, { "path": "../../i18n" },
{ "path": "../../../common/nbstore" },
{ "path": "../../../common/infra" }, { "path": "../../../common/infra" },
{ "path": "../../../../tools/utils" } { "path": "../../../../tools/utils" }
] ]

View File

@@ -2,5 +2,6 @@ export const config = {
entry: { entry: {
app: './src/index.tsx', app: './src/index.tsx',
shell: './src/shell/index.tsx', shell: './src/shell/index.tsx',
backgroundWorker: './src/background-worker/index.ts',
}, },
}; };

View File

@@ -15,6 +15,7 @@ import {
switchToPreviousTab, switchToPreviousTab,
undoCloseTab, undoCloseTab,
} from '../windows-manager'; } from '../windows-manager';
import { WorkerManager } from '../worker/pool';
import { applicationMenuSubjects } from './subject'; import { applicationMenuSubjects } from './subject';
// Unique id for menuitems // Unique id for menuitems
@@ -113,6 +114,21 @@ export function createApplicationMenu() {
showDevTools(); showDevTools();
}, },
}, },
{
label: 'Open worker devtools',
click: () => {
Menu.buildFromTemplate(
Array.from(WorkerManager.instance.workers.values()).map(item => ({
label: `${item.key}`,
click: () => {
item.browserWindow.webContents.openDevTools({
mode: 'undocked',
});
},
}))
).popup();
},
},
{ type: 'separator' }, { type: 'separator' },
{ role: 'resetZoom' }, { role: 'resetZoom' },
{ role: 'zoomIn' }, { role: 'zoomIn' },
@@ -199,7 +215,7 @@ export function createApplicationMenu() {
{ {
label: 'Learn More', label: 'Learn More',
click: async () => { click: async () => {
// oxlint-disable-next-line // oxlint-disable-next-line no-var-requires
const { shell } = require('electron'); const { shell } = require('electron');
await shell.openExternal('https://affine.pro/'); await shell.openExternal('https://affine.pro/');
}, },
@@ -220,7 +236,7 @@ export function createApplicationMenu() {
{ {
label: 'Documentation', label: 'Documentation',
click: async () => { click: async () => {
// oxlint-disable-next-line // oxlint-disable-next-line no-var-requires
const { shell } = require('electron'); const { shell } = require('electron');
await shell.openExternal( await shell.openExternal(
'https://docs.affine.pro/docs/hello-bonjour-aloha-你好' 'https://docs.affine.pro/docs/hello-bonjour-aloha-你好'

View File

@@ -1,4 +1,5 @@
export const mainWindowOrigin = process.env.DEV_SERVER_URL || 'file://.'; export const mainWindowOrigin = process.env.DEV_SERVER_URL || 'file://.';
export const onboardingViewUrl = `${mainWindowOrigin}${mainWindowOrigin.endsWith('/') ? '' : '/'}onboarding`; export const onboardingViewUrl = `${mainWindowOrigin}${mainWindowOrigin.endsWith('/') ? '' : '/'}onboarding`;
export const shellViewUrl = `${mainWindowOrigin}${mainWindowOrigin.endsWith('/') ? '' : '/'}shell.html`; export const shellViewUrl = `${mainWindowOrigin}${mainWindowOrigin.endsWith('/') ? '' : '/'}shell.html`;
export const backgroundWorkerViewUrl = `${mainWindowOrigin}${mainWindowOrigin.endsWith('/') ? '' : '/'}background-worker.html`;
export const customThemeViewUrl = `${mainWindowOrigin}${mainWindowOrigin.endsWith('/') ? '' : '/'}theme-editor`; export const customThemeViewUrl = `${mainWindowOrigin}${mainWindowOrigin.endsWith('/') ? '' : '/'}theme-editor`;

View File

@@ -8,6 +8,7 @@ import { getLogFilePath, logger, revealLogFile } from './logger';
import { sharedStorageHandlers } from './shared-storage'; import { sharedStorageHandlers } from './shared-storage';
import { uiHandlers } from './ui/handlers'; import { uiHandlers } from './ui/handlers';
import { updaterHandlers } from './updater'; import { updaterHandlers } from './updater';
import { workerHandlers } from './worker/handlers';
export const debugHandlers = { export const debugHandlers = {
revealLogFile: async () => { revealLogFile: async () => {
@@ -27,6 +28,7 @@ export const allHandlers = {
configStorage: configStorageHandlers, configStorage: configStorageHandlers,
findInPage: findInPageHandlers, findInPage: findInPageHandlers,
sharedStorage: sharedStorageHandlers, sharedStorage: sharedStorageHandlers,
worker: workerHandlers,
}; };
export const registerHandlers = () => { export const registerHandlers = () => {

View File

@@ -25,7 +25,6 @@ import {
import { isMacOS } from '../../shared/utils'; import { isMacOS } from '../../shared/utils';
import { beforeAppQuit } from '../cleanup'; import { beforeAppQuit } from '../cleanup';
import { isDev } from '../config';
import { mainWindowOrigin, shellViewUrl } from '../constants'; import { mainWindowOrigin, shellViewUrl } from '../constants';
import { ensureHelperProcess } from '../helper-process'; import { ensureHelperProcess } from '../helper-process';
import { logger } from '../logger'; import { logger } from '../logger';
@@ -871,9 +870,6 @@ export class WebContentViewsManager {
}); });
view.webContents.loadURL(shellViewUrl).catch(err => logger.error(err)); view.webContents.loadURL(shellViewUrl).catch(err => logger.error(err));
if (isDev) {
view.webContents.openDevTools();
}
} }
view.webContents.on('destroyed', () => { view.webContents.on('destroyed', () => {

View File

@@ -0,0 +1,19 @@
import type { NamespaceHandlers } from '../type';
import { WorkerManager } from './pool';
export const workerHandlers = {
connectWorker: async (e, key: string, portId: string) => {
const { portForRenderer } = await WorkerManager.instance.connectWorker(
key,
portId,
e.sender
);
e.sender.postMessage('worker-connect', { portId }, [portForRenderer]);
return {
portId: portId,
};
},
disconnectWorker: async (_, key: string, portId: string) => {
WorkerManager.instance.disconnectWorker(key, portId);
},
} satisfies NamespaceHandlers;

View File

@@ -0,0 +1,96 @@
import { join } from 'node:path';
import { BrowserWindow, MessageChannelMain, type WebContents } from 'electron';
import { backgroundWorkerViewUrl } from '../constants';
import { ensureHelperProcess } from '../helper-process';
import { logger } from '../logger';
async function getAdditionalArguments() {
const { getExposedMeta } = await import('../exposed');
const mainExposedMeta = getExposedMeta();
const helperProcessManager = await ensureHelperProcess();
const helperExposedMeta = await helperProcessManager.rpc?.getMeta();
return [
`--main-exposed-meta=` + JSON.stringify(mainExposedMeta),
`--helper-exposed-meta=` + JSON.stringify(helperExposedMeta),
`--window-name=worker`,
];
}
export class WorkerManager {
static readonly instance = new WorkerManager();
workers = new Map<
string,
{ browserWindow: BrowserWindow; ports: Set<string>; key: string }
>();
private async getOrCreateWorker(key: string) {
const additionalArguments = await getAdditionalArguments();
const helperProcessManager = await ensureHelperProcess();
const exists = this.workers.get(key);
if (exists) {
return exists;
} else {
const worker = new BrowserWindow({
width: 1200,
height: 600,
webPreferences: {
preload: join(__dirname, './preload.js'),
additionalArguments: additionalArguments,
},
show: false,
});
let disconnectHelperProcess: (() => void) | null = null;
worker.on('close', e => {
e.preventDefault();
if (worker && !worker.isDestroyed()) {
worker.destroy();
this.workers.delete(key);
disconnectHelperProcess?.();
}
});
worker.loadURL(backgroundWorkerViewUrl).catch(e => {
logger.error('failed to load url', e);
});
worker.webContents.addListener('did-finish-load', () => {
disconnectHelperProcess = helperProcessManager.connectRenderer(
worker.webContents
);
});
const record = { browserWindow: worker, ports: new Set<string>(), key };
this.workers.set(key, record);
return record;
}
}
async connectWorker(
key: string,
portId: string,
bindWebContent: WebContents
) {
bindWebContent.addListener('destroyed', () => {
this.disconnectWorker(key, portId);
});
const worker = await this.getOrCreateWorker(key);
const { port1: portForWorker, port2: portForRenderer } =
new MessageChannelMain();
worker.browserWindow.webContents.postMessage('worker-connect', { portId }, [
portForWorker,
]);
return { portForRenderer, portId };
}
disconnectWorker(key: string, portId: string) {
const worker = this.workers.get(key);
if (worker) {
worker.ports.delete(portId);
if (worker.ports.size === 0) {
worker.browserWindow.destroy();
this.workers.delete(key);
}
}
}
}

View File

@@ -2,11 +2,13 @@ import '@sentry/electron/preload';
import { contextBridge } from 'electron'; import { contextBridge } from 'electron';
import { apis, appInfo, events, requestWebWorkerPort } from './electron-api'; import { apis, appInfo, events } from './electron-api';
import { sharedStorage } from './shared-storage'; import { sharedStorage } from './shared-storage';
import { listenWorkerApis } from './worker';
contextBridge.exposeInMainWorld('__appInfo', appInfo); contextBridge.exposeInMainWorld('__appInfo', appInfo);
contextBridge.exposeInMainWorld('__apis', apis); contextBridge.exposeInMainWorld('__apis', apis);
contextBridge.exposeInMainWorld('__events', events); contextBridge.exposeInMainWorld('__events', events);
contextBridge.exposeInMainWorld('__sharedStorage', sharedStorage); contextBridge.exposeInMainWorld('__sharedStorage', sharedStorage);
contextBridge.exposeInMainWorld('__requestWebWorkerPort', requestWebWorkerPort);
listenWorkerApis();

View File

@@ -248,53 +248,3 @@ export const events = {
...mainAPIs.events, ...mainAPIs.events,
...helperAPIs.events, ...helperAPIs.events,
}; };
/**
* Create MessagePort that can be used by web workers
*
* !!!
* SHOULD ONLY BE USED IN RENDERER PROCESS
* !!!
*/
export function requestWebWorkerPort() {
const ch = new MessageChannel();
const localPort = ch.port1;
const remotePort = ch.port2;
// todo: should be able to let the web worker use the electron APIs directly for better performance
const flattenedAPIs = Object.entries(apis).flatMap(([namespace, api]) => {
return Object.entries(api as any).map(([method, fn]) => [
`${namespace}:${method}`,
fn,
]);
});
AsyncCall(Object.fromEntries(flattenedAPIs), {
channel: createMessagePortChannel(localPort),
log: false,
});
const cleanup = () => {
remotePort.close();
localPort.close();
};
const portId = crypto.randomUUID();
setTimeout(() => {
// @ts-expect-error this function should only be evaluated in the renderer process
window.postMessage(
{
type: 'electron:request-api-port',
portId,
ports: [remotePort],
},
'*',
[remotePort]
);
});
localPort.start();
return { portId, cleanup };
}

View File

@@ -0,0 +1,33 @@
import { ipcRenderer } from 'electron';
export function listenWorkerApis() {
ipcRenderer.on('worker-connect', (ev, data) => {
const portForRenderer = ev.ports[0];
// @ts-expect-error this function should only be evaluated in the renderer process
if (document.readyState === 'complete') {
// @ts-expect-error this function should only be evaluated in the renderer process
window.postMessage(
{
type: 'electron:worker-connect',
portId: data.portId,
},
'*',
[portForRenderer]
);
} else {
// @ts-expect-error this function should only be evaluated in the renderer process
window.addEventListener('load', () => {
// @ts-expect-error this function should only be evaluated in the renderer process
window.postMessage(
{
type: 'electron:worker-connect',
portId: data.portId,
},
'*',
[portForRenderer]
);
});
}
});
}

View File

@@ -24,6 +24,9 @@
9D90BE2B2CCB9876006677DB /* config.xml in Resources */ = {isa = PBXBuildFile; fileRef = 9D90BE1F2CCB9876006677DB /* config.xml */; }; 9D90BE2B2CCB9876006677DB /* config.xml in Resources */ = {isa = PBXBuildFile; fileRef = 9D90BE1F2CCB9876006677DB /* config.xml */; };
9D90BE2D2CCB9876006677DB /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 9D90BE222CCB9876006677DB /* Main.storyboard */; }; 9D90BE2D2CCB9876006677DB /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 9D90BE222CCB9876006677DB /* Main.storyboard */; };
9D90BE2E2CCB9876006677DB /* public in Resources */ = {isa = PBXBuildFile; fileRef = 9D90BE232CCB9876006677DB /* public */; }; 9D90BE2E2CCB9876006677DB /* public in Resources */ = {isa = PBXBuildFile; fileRef = 9D90BE232CCB9876006677DB /* public */; };
9DEC593B2D3002E70027CEBD /* AffineHttpHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9DEC593A2D3002C70027CEBD /* AffineHttpHandler.swift */; };
9DEC593F2D30EFA40027CEBD /* AffineWsHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9DEC593E2D30EFA40027CEBD /* AffineWsHandler.swift */; };
9DEC59432D323EE40027CEBD /* Mutex.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9DEC59422D323EE00027CEBD /* Mutex.swift */; };
9DFCD1462D27D1D70028C92B /* libaffine_mobile_native.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9DFCD1452D27D1D70028C92B /* libaffine_mobile_native.a */; }; 9DFCD1462D27D1D70028C92B /* libaffine_mobile_native.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9DFCD1452D27D1D70028C92B /* libaffine_mobile_native.a */; };
C4C413792CBE705D00337889 /* Pods_App.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = AF277DCFFFF123FFC6DF26C7 /* Pods_App.framework */; }; C4C413792CBE705D00337889 /* Pods_App.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = AF277DCFFFF123FFC6DF26C7 /* Pods_App.framework */; };
C4C97C7C2D030BE000BC2AD1 /* affine_mobile_native.swift in Sources */ = {isa = PBXBuildFile; fileRef = C4C97C6F2D0307B700BC2AD1 /* affine_mobile_native.swift */; }; C4C97C7C2D030BE000BC2AD1 /* affine_mobile_native.swift in Sources */ = {isa = PBXBuildFile; fileRef = C4C97C6F2D0307B700BC2AD1 /* affine_mobile_native.swift */; };
@@ -52,6 +55,9 @@
9D90BE202CCB9876006677DB /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; }; 9D90BE202CCB9876006677DB /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
9D90BE212CCB9876006677DB /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; }; 9D90BE212CCB9876006677DB /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
9D90BE232CCB9876006677DB /* public */ = {isa = PBXFileReference; lastKnownFileType = folder; path = public; sourceTree = "<group>"; }; 9D90BE232CCB9876006677DB /* public */ = {isa = PBXFileReference; lastKnownFileType = folder; path = public; sourceTree = "<group>"; };
9DEC593A2D3002C70027CEBD /* AffineHttpHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AffineHttpHandler.swift; sourceTree = "<group>"; };
9DEC593E2D30EFA40027CEBD /* AffineWsHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AffineWsHandler.swift; sourceTree = "<group>"; };
9DEC59422D323EE00027CEBD /* Mutex.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Mutex.swift; sourceTree = "<group>"; };
9DFCD1452D27D1D70028C92B /* libaffine_mobile_native.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; path = libaffine_mobile_native.a; sourceTree = "<group>"; }; 9DFCD1452D27D1D70028C92B /* libaffine_mobile_native.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; path = libaffine_mobile_native.a; sourceTree = "<group>"; };
AF277DCFFFF123FFC6DF26C7 /* Pods_App.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_App.framework; sourceTree = BUILT_PRODUCTS_DIR; }; AF277DCFFFF123FFC6DF26C7 /* Pods_App.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_App.framework; sourceTree = BUILT_PRODUCTS_DIR; };
AF51FD2D460BCFE21FA515B2 /* Pods-App.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-App.release.xcconfig"; path = "Pods/Target Support Files/Pods-App/Pods-App.release.xcconfig"; sourceTree = "<group>"; }; AF51FD2D460BCFE21FA515B2 /* Pods-App.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-App.release.xcconfig"; path = "Pods/Target Support Files/Pods-App/Pods-App.release.xcconfig"; sourceTree = "<group>"; };
@@ -156,6 +162,9 @@
9D90BE242CCB9876006677DB /* App */ = { 9D90BE242CCB9876006677DB /* App */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
9DEC59422D323EE00027CEBD /* Mutex.swift */,
9DEC593A2D3002C70027CEBD /* AffineHttpHandler.swift */,
9DEC593E2D30EFA40027CEBD /* AffineWsHandler.swift */,
9D52FC422D26CDB600105D0A /* JSValueContainerExt.swift */, 9D52FC422D26CDB600105D0A /* JSValueContainerExt.swift */,
9D90BE1A2CCB9876006677DB /* Plugins */, 9D90BE1A2CCB9876006677DB /* Plugins */,
9D90BE1C2CCB9876006677DB /* AppDelegate.swift */, 9D90BE1C2CCB9876006677DB /* AppDelegate.swift */,
@@ -331,13 +340,16 @@
9D52FC432D26CDBF00105D0A /* JSValueContainerExt.swift in Sources */, 9D52FC432D26CDBF00105D0A /* JSValueContainerExt.swift in Sources */,
5075136E2D1925BC00AD60C0 /* IntelligentsPlugin.swift in Sources */, 5075136E2D1925BC00AD60C0 /* IntelligentsPlugin.swift in Sources */,
5075136A2D1924C600AD60C0 /* RootViewController.swift in Sources */, 5075136A2D1924C600AD60C0 /* RootViewController.swift in Sources */,
9DEC593B2D3002E70027CEBD /* AffineHttpHandler.swift in Sources */,
C4C97C7C2D030BE000BC2AD1 /* affine_mobile_native.swift in Sources */, C4C97C7C2D030BE000BC2AD1 /* affine_mobile_native.swift in Sources */,
C4C97C7D2D030BE000BC2AD1 /* affine_mobile_nativeFFI.h in Sources */, C4C97C7D2D030BE000BC2AD1 /* affine_mobile_nativeFFI.h in Sources */,
C4C97C7E2D030BE000BC2AD1 /* affine_mobile_nativeFFI.modulemap in Sources */, C4C97C7E2D030BE000BC2AD1 /* affine_mobile_nativeFFI.modulemap in Sources */,
E93B276C2CED92B1001409B8 /* NavigationGesturePlugin.swift in Sources */, E93B276C2CED92B1001409B8 /* NavigationGesturePlugin.swift in Sources */,
9DEC59432D323EE40027CEBD /* Mutex.swift in Sources */,
9D90BE252CCB9876006677DB /* CookieManager.swift in Sources */, 9D90BE252CCB9876006677DB /* CookieManager.swift in Sources */,
9D90BE262CCB9876006677DB /* CookiePlugin.swift in Sources */, 9D90BE262CCB9876006677DB /* CookiePlugin.swift in Sources */,
9D6A85332CCF6DA700DAB35F /* HashcashPlugin.swift in Sources */, 9D6A85332CCF6DA700DAB35F /* HashcashPlugin.swift in Sources */,
9DEC593F2D30EFA40027CEBD /* AffineWsHandler.swift in Sources */,
9D90BE272CCB9876006677DB /* AffineViewController.swift in Sources */, 9D90BE272CCB9876006677DB /* AffineViewController.swift in Sources */,
9D90BE282CCB9876006677DB /* AppDelegate.swift in Sources */, 9D90BE282CCB9876006677DB /* AppDelegate.swift in Sources */,
); );

View File

@@ -0,0 +1,114 @@
//
// RequestUrlSchemeHandler.swift
// App
//
// Created by EYHN on 2025/1/9.
//
import WebKit
enum AffineHttpError: Error {
case invalidOperation(reason: String), invalidState(reason: String)
}
class AffineHttpHandler: NSObject, WKURLSchemeHandler {
func webView(_ webView: WKWebView, start urlSchemeTask: any WKURLSchemeTask) {
urlSchemeTask.stopped = Mutex.init(false)
guard let rawUrl = urlSchemeTask.request.url else {
urlSchemeTask.didFailWithError(AffineHttpError.invalidOperation(reason: "bad request"))
return
}
guard let scheme = rawUrl.scheme else {
urlSchemeTask.didFailWithError(AffineHttpError.invalidOperation(reason: "bad request"))
return
}
let httpProtocol = scheme == "affine-http" ? "http" : "https"
guard let urlComponents = URLComponents(url: rawUrl, resolvingAgainstBaseURL: true) else {
urlSchemeTask.didFailWithError(AffineHttpError.invalidOperation(reason: "bad request"))
return
}
guard let host = urlComponents.host else {
urlSchemeTask.didFailWithError(AffineHttpError.invalidOperation(reason: "bad url"))
return
}
let path = urlComponents.path
let query = urlComponents.query != nil ? "?\(urlComponents.query!)" : ""
guard let targetUrl = URL(string: "\(httpProtocol)://\(host)\(path)\(query)") else {
urlSchemeTask.didFailWithError(AffineHttpError.invalidOperation(reason: "bad url"))
return
}
var request = URLRequest(url: targetUrl);
request.httpMethod = urlSchemeTask.request.httpMethod;
request.httpShouldHandleCookies = true
request.httpBody = urlSchemeTask.request.httpBody
urlSchemeTask.request.allHTTPHeaderFields?.filter({
key, value in
let normalizedKey = key.lowercased()
return normalizedKey == "content-type" ||
normalizedKey == "content-length" ||
normalizedKey == "accept"
}).forEach {
key, value in
request.setValue(value, forHTTPHeaderField: key)
}
URLSession.shared.dataTask(with: request) {
rawData, rawResponse, error in
urlSchemeTask.stopped?.withLock({
if $0 {
return
}
if error != nil {
urlSchemeTask.didFailWithError(error!)
} else {
guard let httpResponse = rawResponse as? HTTPURLResponse else {
urlSchemeTask.didFailWithError(AffineHttpError.invalidState(reason: "bad response"))
return
}
let inheritedHeaders = httpResponse.allHeaderFields.filter({
key, value in
let normalizedKey = (key as? String)?.lowercased()
return normalizedKey == "content-type" ||
normalizedKey == "content-length"
}) as? [String: String] ?? [:]
let newHeaders: [String: String] = [
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "*"
]
guard let response = HTTPURLResponse.init(url: rawUrl, statusCode: httpResponse.statusCode, httpVersion: nil, headerFields: inheritedHeaders.merging(newHeaders, uniquingKeysWith: { (_, newHeaders) in newHeaders })) else {
urlSchemeTask.didFailWithError(AffineHttpError.invalidState(reason: "failed to create response"))
return
}
urlSchemeTask.didReceive(response)
if rawData != nil {
urlSchemeTask.didReceive(rawData!)
}
urlSchemeTask.didFinish()
}
})
}
}
func webView(_ webView: WKWebView, stop urlSchemeTask: WKURLSchemeTask) {
urlSchemeTask.stopped?.withLock({
$0 = true
})
}
}
private extension WKURLSchemeTask {
var stopped: Mutex<Bool>? {
get {
return objc_getAssociatedObject(self, &stoppedKey) as? Mutex<Bool> ?? nil
}
set {
objc_setAssociatedObject(self, &stoppedKey, newValue, .OBJC_ASSOCIATION_ASSIGN)
}
}
}
private var stoppedKey = malloc(1)

View File

@@ -13,6 +13,19 @@ class AFFiNEViewController: CAPBridgeViewController {
intelligentsButton.delegate = self intelligentsButton.delegate = self
dismissIntelligentsButton() dismissIntelligentsButton()
} }
override func webViewConfiguration(for instanceConfiguration: InstanceConfiguration) -> WKWebViewConfiguration {
let configuration = super.webViewConfiguration(for: instanceConfiguration)
return configuration
}
override func webView(with frame: CGRect, configuration: WKWebViewConfiguration) -> WKWebView {
configuration.setURLSchemeHandler(AffineHttpHandler(), forURLScheme: "affine-http")
configuration.setURLSchemeHandler(AffineHttpHandler(), forURLScheme: "affine-https")
configuration.setURLSchemeHandler(AffineWsHandler(), forURLScheme: "affine-ws")
configuration.setURLSchemeHandler(AffineWsHandler(), forURLScheme: "affine-wss")
return super.webView(with: frame, configuration: configuration)
}
override func capacitorDidLoad() { override func capacitorDidLoad() {
let plugins: [CAPPlugin] = [ let plugins: [CAPPlugin] = [

View File

@@ -0,0 +1,197 @@
//
// RequestUrlSchemeHandler.swift
// App
//
// Created by EYHN on 2025/1/9.
//
import WebKit
enum AffineWsError: Error {
case invalidOperation(reason: String), invalidState(reason: String)
}
/**
this custom url scheme handler simulates websocket connection through an http request.
frontend open websocket connections and send messages by sending requests to affine-ws:// or affine-wss://
the handler has two endpoints:
`affine-ws:///open?uuid={uuid}&url={wsUrl}`: open a websocket connection and return received data through the SSE protocol. If the front-end closes the http connection, the websocket connection will also be closed.
`affine-ws:///send?uuid={uuid}`: send the request body data to the websocket connection with the specified uuid.
*/
class AffineWsHandler: NSObject, WKURLSchemeHandler {
var wsTasks: [UUID: URLSessionWebSocketTask] = [:]
func webView(_ webView: WKWebView, start urlSchemeTask: any WKURLSchemeTask) {
urlSchemeTask.stopped = Mutex.init(false)
guard let rawUrl = urlSchemeTask.request.url else {
urlSchemeTask.didFailWithError(AffineWsError.invalidOperation(reason: "bad request"))
return
}
guard let urlComponents = URLComponents(url: rawUrl, resolvingAgainstBaseURL: true) else {
urlSchemeTask.didFailWithError(AffineWsError.invalidOperation(reason: "bad request"))
return
}
let path = urlComponents.path
if path == "/open" {
guard let targetUrlStr = urlComponents.queryItems?.first(where: { $0.name == "url" })?.value else {
urlSchemeTask.didFailWithError(AffineWsError.invalidOperation(reason: "url is request"))
return
}
guard let targetUrl = URL(string: targetUrlStr) else {
urlSchemeTask.didFailWithError(AffineWsError.invalidOperation(reason: "failed to parse url"))
return
}
guard let uuidStr = urlComponents.queryItems?.first(where: { $0.name == "uuid" })?.value else {
urlSchemeTask.didFailWithError(AffineWsError.invalidOperation(reason: "url is request"))
return
}
guard let uuid = UUID(uuidString: uuidStr) else {
urlSchemeTask.didFailWithError(AffineWsError.invalidOperation(reason: "invalid uuid"))
return
}
guard let response = HTTPURLResponse.init(url: rawUrl, statusCode: 200, httpVersion: nil, headerFields: [
"X-Accel-Buffering": "no",
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "*"
]) else {
urlSchemeTask.didFailWithError(AffineHttpError.invalidState(reason: "failed to create response"))
return
}
urlSchemeTask.didReceive(response)
let jsonEncoder = JSONEncoder()
let json = String(data: try! jsonEncoder.encode(["type": "start"]), encoding: .utf8)!
urlSchemeTask.didReceive("data: \(json)\n\n".data(using: .utf8)!)
var request = URLRequest(url: targetUrl);
request.httpShouldHandleCookies = true
let webSocketTask = URLSession.shared.webSocketTask(with: targetUrl)
self.wsTasks[uuid] = webSocketTask
webSocketTask.resume()
urlSchemeTask.wsTask = webSocketTask
var completionHandler: ((Result<URLSessionWebSocketTask.Message, any Error>) -> Void)!
completionHandler = {
let result = $0
urlSchemeTask.stopped?.withLock({
let stopped = $0
if stopped {
return
}
let jsonEncoder = JSONEncoder()
switch result {
case .success(let message):
if case .string(let string) = message {
let json = String(data: try! jsonEncoder.encode(["type": "message", "data": string]), encoding: .utf8)!
urlSchemeTask.didReceive("data: \(json)\n\n".data(using: .utf8)!)
}
case .failure(let error):
let json = String(data: try! jsonEncoder.encode(["type": "error", "error": error.localizedDescription]), encoding: .utf8)!
urlSchemeTask.didReceive("data: \(json)\n\n".data(using: .utf8)!)
urlSchemeTask.didFinish()
}
})
// recursive calls
webSocketTask.receive(completionHandler: completionHandler)
}
webSocketTask.receive(completionHandler: completionHandler)
} else if path == "/send" {
if urlSchemeTask.request.httpMethod != "POST" {
urlSchemeTask.didFailWithError(AffineWsError.invalidOperation(reason: "Method should be POST"))
return
}
guard let uuidStr = urlComponents.queryItems?.first(where: { $0.name == "uuid" })?.value else {
urlSchemeTask.didFailWithError(AffineWsError.invalidOperation(reason: "url is request"))
return
}
guard let uuid = UUID(uuidString: uuidStr) else {
urlSchemeTask.didFailWithError(AffineWsError.invalidOperation(reason: "invalid uuid"))
return
}
guard let ContentType = urlSchemeTask.request.allHTTPHeaderFields?.first(where: {$0.key.lowercased() == "content-type"})?.value else {
urlSchemeTask.didFailWithError(AffineWsError.invalidOperation(reason: "content-type is request"))
return
}
if ContentType != "text/plain" {
urlSchemeTask.didFailWithError(AffineWsError.invalidOperation(reason: "content-type not support"))
return
}
guard let body = urlSchemeTask.request.httpBody else {
urlSchemeTask.didFailWithError(AffineWsError.invalidOperation(reason: "no body"))
return
}
let stringBody = String(decoding: body, as: UTF8.self)
guard let webSocketTask = self.wsTasks[uuid] else {
urlSchemeTask.didFailWithError(AffineWsError.invalidOperation(reason: "connection not found"))
return
}
guard let response = HTTPURLResponse.init(url: rawUrl, statusCode: 200, httpVersion: nil, headerFields: [
"Content-Type": "application/json",
"Cache-Control": "no-cache",
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "*"
]) else {
urlSchemeTask.didFailWithError(AffineHttpError.invalidState(reason: "failed to create response"))
return
}
let jsonEncoder = JSONEncoder()
webSocketTask.send(.string(stringBody), completionHandler: {
error in
urlSchemeTask.stopped?.withLock({
if $0 {
return
}
if error != nil {
let json = try! jsonEncoder.encode(["error": error!.localizedDescription])
urlSchemeTask.didReceive(response)
urlSchemeTask.didReceive(json)
} else {
urlSchemeTask.didReceive(response)
urlSchemeTask.didReceive(try! jsonEncoder.encode(["uuid": uuid.uuidString.data(using: .utf8)!]))
urlSchemeTask.didFinish()
}
})
})
}
}
func webView(_ webView: WKWebView, stop urlSchemeTask: WKURLSchemeTask) {
urlSchemeTask.stopped?.withLock({
$0 = false
})
urlSchemeTask.wsTask?.cancel(with: .abnormalClosure, reason: "Closed".data(using: .utf8))
}
}
private extension WKURLSchemeTask {
var stopped: Mutex<Bool>? {
get {
return objc_getAssociatedObject(self, &stoppedKey) as? Mutex<Bool> ?? nil
}
set {
objc_setAssociatedObject(self, &stoppedKey, newValue, .OBJC_ASSOCIATION_ASSIGN)
}
}
var wsTask: URLSessionWebSocketTask? {
get {
return objc_getAssociatedObject(self, &wsTaskKey) as? URLSessionWebSocketTask
}
set {
objc_setAssociatedObject(self, &stoppedKey, newValue, .OBJC_ASSOCIATION_ASSIGN)
}
}
}
private var stoppedKey = malloc(1)
private var wsTaskKey = malloc(1)

View File

@@ -0,0 +1,23 @@
//
// Mutex.swift
// App
//
// Created by EYHN on 2025/1/11.
//
import Foundation
final class Mutex<Wrapped>: @unchecked Sendable {
private let lock = NSLock.init()
private var wrapped: Wrapped
init(_ wrapped: Wrapped) {
self.wrapped = wrapped
}
func withLock<R>(_ body: @Sendable (inout Wrapped) throws -> R) rethrows -> R {
self.lock.lock()
defer { self.lock.unlock() }
return try body(&wrapped)
}
}

View File

@@ -30,6 +30,7 @@ public class NbStorePlugin: CAPPlugin, CAPBridgedPlugin {
CAPPluginMethod(name: "getPeerPulledRemoteClocks", returnType: CAPPluginReturnPromise), CAPPluginMethod(name: "getPeerPulledRemoteClocks", returnType: CAPPluginReturnPromise),
CAPPluginMethod(name: "getPeerPulledRemoteClock", returnType: CAPPluginReturnPromise), CAPPluginMethod(name: "getPeerPulledRemoteClock", returnType: CAPPluginReturnPromise),
CAPPluginMethod(name: "setPeerPulledRemoteClock", returnType: CAPPluginReturnPromise), CAPPluginMethod(name: "setPeerPulledRemoteClock", returnType: CAPPluginReturnPromise),
CAPPluginMethod(name: "getPeerPushedClock", returnType: CAPPluginReturnPromise),
CAPPluginMethod(name: "getPeerPushedClocks", returnType: CAPPluginReturnPromise), CAPPluginMethod(name: "getPeerPushedClocks", returnType: CAPPluginReturnPromise),
CAPPluginMethod(name: "setPeerPushedClock", returnType: CAPPluginReturnPromise), CAPPluginMethod(name: "setPeerPushedClock", returnType: CAPPluginReturnPromise),
CAPPluginMethod(name: "clearClocks", returnType: CAPPluginReturnPromise), CAPPluginMethod(name: "clearClocks", returnType: CAPPluginReturnPromise),
@@ -334,11 +335,14 @@ public class NbStorePlugin: CAPPlugin, CAPBridgedPlugin {
let peer = try call.getStringEnsure("peer") let peer = try call.getStringEnsure("peer")
let docId = try call.getStringEnsure("docId") let docId = try call.getStringEnsure("docId")
let clock = try await docStoragePool.getPeerRemoteClock(universalId: id, peer: peer, docId: docId) if let clock = try await docStoragePool.getPeerRemoteClock(universalId: id, peer: peer, docId: docId) {
call.resolve([ call.resolve([
"docId": clock.docId, "docId": clock.docId,
"timestamp": clock.timestamp, "timestamp": clock.timestamp,
]) ])
} else {
call.resolve()
}
} catch { } catch {
call.reject("Failed to get peer remote clock, \(error)", nil, error) call.reject("Failed to get peer remote clock, \(error)", nil, error)
@@ -391,11 +395,14 @@ public class NbStorePlugin: CAPPlugin, CAPBridgedPlugin {
let peer = try call.getStringEnsure("peer") let peer = try call.getStringEnsure("peer")
let docId = try call.getStringEnsure("docId") let docId = try call.getStringEnsure("docId")
let clock = try await docStoragePool.getPeerPulledRemoteClock(universalId: id, peer: peer, docId: docId) if let clock = try await docStoragePool.getPeerPulledRemoteClock(universalId: id, peer: peer, docId: docId) {
call.resolve([ call.resolve([
"docId": clock.docId, "docId": clock.docId,
"timestamp": clock.timestamp, "timestamp": clock.timestamp,
]) ])
} else {
call.resolve()
}
} catch { } catch {
call.reject("Failed to get peer pulled remote clock, \(error)", nil, error) call.reject("Failed to get peer pulled remote clock, \(error)", nil, error)
@@ -424,6 +431,26 @@ public class NbStorePlugin: CAPPlugin, CAPBridgedPlugin {
} }
} }
@objc func getPeerPushedClock(_ call: CAPPluginCall) {
Task {
do {
let id = try call.getStringEnsure("id")
let peer = try call.getStringEnsure("peer")
let docId = try call.getStringEnsure("docId")
if let clock = try await docStoragePool.getPeerPushedClock(universalId: id, peer: peer, docId: docId) {
call.resolve([
"docId": clock.docId,
"timestamp": clock.timestamp,
])
} else {
call.resolve()
}
} catch {
call.reject("Failed to get peer pushed clock, \(error)", nil, error)
}
}
}
@objc func getPeerPushedClocks(_ call: CAPPluginCall) { @objc func getPeerPushedClocks(_ call: CAPPluginCall) {
Task { Task {
do { do {

View File

@@ -0,0 +1,36 @@
//
// SafeWKURLSchemeTask.swift
// App
//
// Created by EYHN on 2025/1/11.
//
import WebKit
class SafeWKURLSchemeTask: WKURLSchemeTask, NSObject {
var origin: any WKURLSchemeTask
init(origin: any WKURLSchemeTask) {
self.origin = origin
self.request = origin.request
}
var request: URLRequest
func didReceive(_ response: URLResponse) {
<#code#>
}
func didReceive(_ data: Data) {
self.origin.didReceive(<#T##response: URLResponse##URLResponse#>)
}
func didFinish() {
self.origin.didFinish()
}
func didFailWithError(_ error: any Error) {
self.origin.didFailWithError(error)
}
}

View File

@@ -321,6 +321,11 @@ uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_get_peer_pulled_re
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_get_peer_pulled_remote_clocks(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer peer uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_get_peer_pulled_remote_clocks(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer peer
); );
#endif #endif
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_PEER_PUSHED_CLOCK
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_PEER_PUSHED_CLOCK
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_get_peer_pushed_clock(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer peer, RustBuffer doc_id
);
#endif
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_PEER_PUSHED_CLOCKS #ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_PEER_PUSHED_CLOCKS
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_PEER_PUSHED_CLOCKS #define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGEPOOL_GET_PEER_PUSHED_CLOCKS
uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_get_peer_pushed_clocks(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer peer uint64_t uniffi_affine_mobile_native_fn_method_docstoragepool_get_peer_pushed_clocks(void*_Nonnull ptr, RustBuffer universal_id, RustBuffer peer
@@ -759,6 +764,12 @@ uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_get_peer_pul
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_PEER_PULLED_REMOTE_CLOCKS #define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_PEER_PULLED_REMOTE_CLOCKS
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_get_peer_pulled_remote_clocks(void uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_get_peer_pulled_remote_clocks(void
);
#endif
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_PEER_PUSHED_CLOCK
#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_PEER_PUSHED_CLOCK
uint16_t uniffi_affine_mobile_native_checksum_method_docstoragepool_get_peer_pushed_clock(void
); );
#endif #endif
#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_PEER_PUSHED_CLOCKS #ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGEPOOL_GET_PEER_PUSHED_CLOCKS

View File

@@ -14,10 +14,10 @@ const config: CapacitorConfig = {
}, },
plugins: { plugins: {
CapacitorCookies: { CapacitorCookies: {
enabled: true, enabled: false,
}, },
CapacitorHttp: { CapacitorHttp: {
enabled: true, enabled: false,
}, },
Keyboard: { Keyboard: {
resize: KeyboardResize.Native, resize: KeyboardResize.Native,

View File

@@ -26,6 +26,7 @@
"@capacitor/keyboard": "^6.0.3", "@capacitor/keyboard": "^6.0.3",
"@sentry/react": "^8.44.0", "@sentry/react": "^8.44.0",
"@toeverything/infra": "workspace:^", "@toeverything/infra": "workspace:^",
"async-call-rpc": "^6.4.2",
"next-themes": "^0.4.4", "next-themes": "^0.4.4",
"react": "^19.0.0", "react": "^19.0.0",
"react-dom": "^19.0.0", "react-dom": "^19.0.0",

View File

@@ -12,23 +12,22 @@ import {
DefaultServerService, DefaultServerService,
ServersService, ServersService,
ValidatorProvider, ValidatorProvider,
WebSocketAuthProvider,
} from '@affine/core/modules/cloud'; } from '@affine/core/modules/cloud';
import { DocsService } from '@affine/core/modules/doc'; import { DocsService } from '@affine/core/modules/doc';
import { GlobalContextService } from '@affine/core/modules/global-context'; import { GlobalContextService } from '@affine/core/modules/global-context';
import { I18nProvider } from '@affine/core/modules/i18n'; import { I18nProvider } from '@affine/core/modules/i18n';
import { LifecycleService } from '@affine/core/modules/lifecycle'; import { LifecycleService } from '@affine/core/modules/lifecycle';
import { configureLocalStorageStateStorageImpls } from '@affine/core/modules/storage'; import {
configureLocalStorageStateStorageImpls,
NbstoreProvider,
} from '@affine/core/modules/storage';
import { PopupWindowProvider } from '@affine/core/modules/url'; import { PopupWindowProvider } from '@affine/core/modules/url';
import { ClientSchemeProvider } from '@affine/core/modules/url/providers/client-schema'; import { ClientSchemeProvider } from '@affine/core/modules/url/providers/client-schema';
import { configureIndexedDBUserspaceStorageProvider } from '@affine/core/modules/userspace';
import { configureBrowserWorkbenchModule } from '@affine/core/modules/workbench'; import { configureBrowserWorkbenchModule } from '@affine/core/modules/workbench';
import { WorkspacesService } from '@affine/core/modules/workspace'; import { WorkspacesService } from '@affine/core/modules/workspace';
import { import { configureBrowserWorkspaceFlavours } from '@affine/core/modules/workspace-engine';
configureBrowserWorkspaceFlavours,
configureIndexedDBWorkspaceEngineStorageProvider,
} from '@affine/core/modules/workspace-engine';
import { I18n } from '@affine/i18n'; import { I18n } from '@affine/i18n';
import { WorkerClient } from '@affine/nbstore/worker/client';
import { import {
defaultBlockMarkdownAdapterMatchers, defaultBlockMarkdownAdapterMatchers,
docLinkBaseURLMiddleware, docLinkBaseURLMiddleware,
@@ -44,16 +43,17 @@ import { Browser } from '@capacitor/browser';
import { Haptics } from '@capacitor/haptics'; import { Haptics } from '@capacitor/haptics';
import { Keyboard, KeyboardStyle } from '@capacitor/keyboard'; import { Keyboard, KeyboardStyle } from '@capacitor/keyboard';
import { Framework, FrameworkRoot, getCurrentStore } from '@toeverything/infra'; import { Framework, FrameworkRoot, getCurrentStore } from '@toeverything/infra';
import { OpClient } from '@toeverything/infra/op';
import { AsyncCall } from 'async-call-rpc';
import { useTheme } from 'next-themes'; import { useTheme } from 'next-themes';
import { Suspense, useEffect } from 'react'; import { Suspense, useEffect } from 'react';
import { RouterProvider } from 'react-router-dom'; import { RouterProvider } from 'react-router-dom';
import { BlocksuiteMenuConfigProvider } from './bs-menu-config'; import { BlocksuiteMenuConfigProvider } from './bs-menu-config';
import { configureFetchProvider } from './fetch';
import { ModalConfigProvider } from './modal-config'; import { ModalConfigProvider } from './modal-config';
import { Cookie } from './plugins/cookie';
import { Hashcash } from './plugins/hashcash'; import { Hashcash } from './plugins/hashcash';
import { Intelligents } from './plugins/intelligents'; import { Intelligents } from './plugins/intelligents';
import { NbStoreNativeDBApis } from './plugins/nbstore';
import { enableNavigationGesture$ } from './web-navigation-control'; import { enableNavigationGesture$ } from './web-navigation-control';
const future = { const future = {
@@ -65,9 +65,52 @@ configureCommonModules(framework);
configureBrowserWorkbenchModule(framework); configureBrowserWorkbenchModule(framework);
configureLocalStorageStateStorageImpls(framework); configureLocalStorageStateStorageImpls(framework);
configureBrowserWorkspaceFlavours(framework); configureBrowserWorkspaceFlavours(framework);
configureIndexedDBWorkspaceEngineStorageProvider(framework);
configureIndexedDBUserspaceStorageProvider(framework);
configureMobileModules(framework); configureMobileModules(framework);
framework.impl(NbstoreProvider, {
openStore(_key, options) {
const worker = new Worker(
new URL(
/* webpackChunkName: "nbstore-worker" */ './worker.ts',
import.meta.url
)
);
const { port1: nativeDBApiChannelServer, port2: nativeDBApiChannelClient } =
new MessageChannel();
AsyncCall<typeof NbStoreNativeDBApis>(NbStoreNativeDBApis, {
channel: {
on(listener) {
const f = (e: MessageEvent<any>) => {
listener(e.data);
};
nativeDBApiChannelServer.addEventListener('message', f);
return () => {
nativeDBApiChannelServer.removeEventListener('message', f);
};
},
send(data) {
nativeDBApiChannelServer.postMessage(data);
},
},
log: false,
});
nativeDBApiChannelServer.start();
worker.postMessage(
{
type: 'native-db-api-channel',
port: nativeDBApiChannelClient,
},
[nativeDBApiChannelClient]
);
const client = new WorkerClient(new OpClient(worker), options);
return {
store: client,
dispose: () => {
worker.terminate();
nativeDBApiChannelServer.close();
},
};
},
});
framework.impl(PopupWindowProvider, { framework.impl(PopupWindowProvider, {
open: (url: string) => { open: (url: string) => {
Browser.open({ Browser.open({
@@ -81,18 +124,6 @@ framework.impl(ClientSchemeProvider, {
return 'affine'; return 'affine';
}, },
}); });
configureFetchProvider(framework);
framework.impl(WebSocketAuthProvider, {
getAuthToken: async url => {
const cookies = await Cookie.getCookies({
url,
});
return {
userId: cookies['affine_user_id'],
token: cookies['affine_session'],
};
},
});
framework.impl(ValidatorProvider, { framework.impl(ValidatorProvider, {
async validate(_challenge, resource) { async validate(_challenge, resource) {
const res = await Hashcash.hash({ challenge: resource }); const res = await Hashcash.hash({ challenge: resource });

View File

@@ -1,191 +0,0 @@
/**
* this file is modified from part of https://github.com/ionic-team/capacitor/blob/74c3e9447e1e32e73f818d252eb12f453d849e8d/ios/Capacitor/Capacitor/assets/native-bridge.js#L466
*
* for support arraybuffer response type
*/
import { RawFetchProvider } from '@affine/core/modules/cloud/provider/fetch';
import { CapacitorHttp } from '@capacitor/core';
import type { Framework } from '@toeverything/infra';
const readFileAsBase64 = (file: File) =>
new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onloadend = () => {
const data = reader.result;
if (data === null) {
reject(new Error('Failed to read file'));
} else {
resolve(btoa(data as string));
}
};
reader.onerror = reject;
reader.readAsBinaryString(file);
});
const convertFormData = async (formData: FormData) => {
const newFormData = [];
for (const pair of formData.entries()) {
const [key, value] = pair;
if (value instanceof File) {
const base64File = await readFileAsBase64(value);
newFormData.push({
key,
value: base64File,
type: 'base64File',
contentType: value.type,
fileName: value.name,
});
} else {
newFormData.push({ key, value, type: 'string' });
}
}
return newFormData;
};
const convertBody = async (body: unknown, contentType: string) => {
if (body instanceof ReadableStream || body instanceof Uint8Array) {
let encodedData;
if (body instanceof ReadableStream) {
const reader = body.getReader();
const chunks = [];
while (true) {
const { done, value } = await reader.read();
if (done) break;
chunks.push(value);
}
const concatenated = new Uint8Array(
chunks.reduce((acc, chunk) => acc + chunk.length, 0)
);
let position = 0;
for (const chunk of chunks) {
concatenated.set(chunk, position);
position += chunk.length;
}
encodedData = concatenated;
} else {
encodedData = body;
}
let data = new TextDecoder().decode(encodedData);
let type;
if (contentType === 'application/json') {
try {
data = JSON.parse(data);
} catch {
// ignore
}
type = 'json';
} else if (contentType === 'multipart/form-data') {
type = 'formData';
} else if (
contentType === null || contentType === void 0
? void 0
: contentType.startsWith('image')
) {
type = 'image';
} else if (contentType === 'application/octet-stream') {
type = 'binary';
} else {
type = 'text';
}
return {
data,
type,
headers: { 'Content-Type': contentType || 'application/octet-stream' },
};
} else if (body instanceof URLSearchParams) {
return {
data: body.toString(),
type: 'text',
};
} else if (body instanceof FormData) {
const formData = await convertFormData(body);
return {
data: formData,
type: 'formData',
};
} else if (body instanceof File) {
const fileData = await readFileAsBase64(body);
return {
data: fileData,
type: 'file',
headers: { 'Content-Type': body.type },
};
}
return { data: body, type: 'json' };
};
function base64ToUint8Array(base64: string) {
const binaryString = atob(base64);
const binaryArray = [...binaryString].map(function (char) {
return char.charCodeAt(0);
});
return new Uint8Array(binaryArray);
}
export function configureFetchProvider(framework: Framework) {
framework.override(RawFetchProvider, {
fetch: async (input, init) => {
const request = new Request(input, init);
const { method } = request;
const tag = `CapacitorHttp fetch ${Date.now()} ${input}`;
console.time(tag);
try {
const { body } = request;
const optionHeaders = Object.fromEntries(request.headers.entries());
const {
data: requestData,
type,
headers,
} = await convertBody(
(init === null || init === void 0 ? void 0 : init.body) ||
body ||
undefined,
optionHeaders['Content-Type'] || optionHeaders['content-type']
);
const accept = optionHeaders['Accept'] || optionHeaders['accept'];
const nativeResponse = await CapacitorHttp.request({
url: request.url,
method: method,
data: requestData,
dataType: type as any,
responseType:
accept === 'application/octet-stream' ? 'arraybuffer' : undefined,
headers: Object.assign(Object.assign({}, headers), optionHeaders),
});
const contentType =
nativeResponse.headers['Content-Type'] ||
nativeResponse.headers['content-type'];
let data =
accept === 'application/octet-stream'
? base64ToUint8Array(nativeResponse.data)
: contentType === null || contentType === void 0
? void 0
: contentType.startsWith('application/json')
? JSON.stringify(nativeResponse.data)
: contentType === 'application/octet-stream'
? base64ToUint8Array(nativeResponse.data)
: nativeResponse.data;
// use null data for 204 No Content HTTP response
if (nativeResponse.status === 204) {
data = null;
}
// intercept & parse response before returning
const response = new Response(new Blob([data], { type: contentType }), {
headers: nativeResponse.headers,
status: nativeResponse.status,
});
/*
* copy url to response, `cordova-plugin-ionic` uses this url from the response
* we need `Object.defineProperty` because url is an inherited getter on the Response
* see: https://stackoverflow.com/a/57382543
* */
Object.defineProperty(response, 'url', {
value: nativeResponse.url,
});
console.timeEnd(tag);
return response;
} catch (error) {
console.timeEnd(tag);
throw error;
}
},
});
}

View File

@@ -1,5 +1,8 @@
import './setup'; import './setup';
import '@affine/component/theme';
import '@affine/core/mobile/styles/mobile.css';
import { bindNativeDBApis } from '@affine/nbstore/sqlite';
import { import {
init, init,
reactRouterV6BrowserTracingIntegration, reactRouterV6BrowserTracingIntegration,
@@ -15,18 +18,15 @@ import {
} from 'react-router-dom'; } from 'react-router-dom';
import { App } from './app'; import { App } from './app';
import { NbStoreNativeDBApis } from './plugins/nbstore';
bindNativeDBApis(NbStoreNativeDBApis);
// TODO(@L-Sun) Uncomment this when the `show` method implement by `@capacitor/keyboard` in ios
// import './virtual-keyboard';
function main() { function main() {
if (BUILD_CONFIG.debug || window.SENTRY_RELEASE) { if (BUILD_CONFIG.debug || window.SENTRY_RELEASE) {
// workaround for Capacitor HttpPlugin
// capacitor-http-plugin will replace window.XMLHttpRequest with its own implementation
// but XMLHttpRequest.prototype is not defined which is used by sentry
// see: https://github.com/ionic-team/capacitor/blob/74c3e9447e1e32e73f818d252eb12f453d849e8d/core/native-bridge.ts#L581
if ('CapacitorWebXMLHttpRequest' in window) {
window.XMLHttpRequest.prototype = (
window.CapacitorWebXMLHttpRequest as any
).prototype;
}
// https://docs.sentry.io/platforms/javascript/guides/react/#configure // https://docs.sentry.io/platforms/javascript/guides/react/#configure
init({ init({
dsn: process.env.SENTRY_DSN, dsn: process.env.SENTRY_DSN,

View File

@@ -1,6 +0,0 @@
export interface CookiePlugin {
/**
* Returns the screen's current orientation.
*/
getCookies(options: { url: string }): Promise<Record<string, string>>;
}

View File

@@ -1,8 +0,0 @@
import { registerPlugin } from '@capacitor/core';
import type { CookiePlugin } from './definitions';
const Cookie = registerPlugin<CookiePlugin>('Cookie');
export * from './definitions';
export { Cookie };

View File

@@ -70,12 +70,12 @@ export interface NbStorePlugin {
timestamps: number[]; timestamps: number[];
}) => Promise<{ count: number }>; }) => Promise<{ count: number }>;
deleteDoc: (options: { id: string; docId: string }) => Promise<void>; deleteDoc: (options: { id: string; docId: string }) => Promise<void>;
getDocClocks: (options: { id: string; after?: number | null }) => Promise< getDocClocks: (options: { id: string; after?: number | null }) => Promise<{
{ clocks: {
docId: string; docId: string;
timestamp: number; timestamp: number;
}[] }[];
>; }>;
getDocClock: (options: { id: string; docId: string }) => Promise< getDocClock: (options: { id: string; docId: string }) => Promise<
| { | {
docId: string; docId: string;
@@ -95,47 +95,47 @@ export interface NbStorePlugin {
getPeerRemoteClocks: (options: { getPeerRemoteClocks: (options: {
id: string; id: string;
peer: string; peer: string;
}) => Promise<Array<DocClock>>; }) => Promise<{ clocks: Array<DocClock> }>;
getPeerRemoteClock: (options: { getPeerRemoteClock: (options: {
id: string; id: string;
peer: string; peer: string;
docId: string; docId: string;
}) => Promise<DocClock>; }) => Promise<DocClock | null>;
setPeerRemoteClock: (options: { setPeerRemoteClock: (options: {
id: string; id: string;
peer: string; peer: string;
docId: string; docId: string;
clock: number; timestamp: number;
}) => Promise<void>; }) => Promise<void>;
getPeerPushedClocks: (options: { getPeerPushedClocks: (options: {
id: string; id: string;
peer: string; peer: string;
}) => Promise<Array<DocClock>>; }) => Promise<{ clocks: Array<DocClock> }>;
getPeerPushedClock: (options: { getPeerPushedClock: (options: {
id: string; id: string;
peer: string; peer: string;
docId: string; docId: string;
}) => Promise<DocClock>; }) => Promise<DocClock | null>;
setPeerPushedClock: (options: { setPeerPushedClock: (options: {
id: string; id: string;
peer: string; peer: string;
docId: string; docId: string;
clock: number; timestamp: number;
}) => Promise<void>; }) => Promise<void>;
getPeerPulledRemoteClocks: (options: { getPeerPulledRemoteClocks: (options: {
id: string; id: string;
peer: string; peer: string;
}) => Promise<Array<DocClock>>; }) => Promise<{ clocks: Array<DocClock> }>;
getPeerPulledRemoteClock: (options: { getPeerPulledRemoteClock: (options: {
id: string; id: string;
peer: string; peer: string;
docId: string; docId: string;
}) => Promise<DocClock>; }) => Promise<DocClock | null>;
setPeerPulledRemoteClock: (options: { setPeerPulledRemoteClock: (options: {
id: string; id: string;
peer: string; peer: string;
docId: string; docId: string;
clock: number; timestamp: number;
}) => Promise<void>; }) => Promise<void>;
clearClocks: (options: { id: string }) => Promise<void>; clearClocks: (options: { id: string }) => Promise<void>;
} }

View File

@@ -96,10 +96,12 @@ export const NbStoreNativeDBApis: NativeDBApis = {
id: string, id: string,
after?: Date | undefined | null after?: Date | undefined | null
): Promise<DocClock[]> { ): Promise<DocClock[]> {
const clocks = await NbStore.getDocClocks({ const clocks = (
id, await NbStore.getDocClocks({
after: after?.getTime(), id,
}); after: after?.getTime(),
})
).clocks;
return clocks.map(c => ({ return clocks.map(c => ({
docId: c.docId, docId: c.docId,
timestamp: new Date(c.timestamp), timestamp: new Date(c.timestamp),
@@ -176,30 +178,30 @@ export const NbStoreNativeDBApis: NativeDBApis = {
id: string, id: string,
peer: string peer: string
): Promise<DocClock[]> { ): Promise<DocClock[]> {
const clocks = await NbStore.getPeerRemoteClocks({ const clocks = (
id, await NbStore.getPeerRemoteClocks({
peer, id,
}); peer,
})
).clocks;
return clocks.map(c => ({ return clocks.map(c => ({
docId: c.docId, docId: c.docId,
timestamp: new Date(c.timestamp), timestamp: new Date(c.timestamp),
})); }));
}, },
getPeerRemoteClock: async function ( getPeerRemoteClock: async function (id: string, peer: string, docId: string) {
id: string,
peer: string,
docId: string
): Promise<DocClock> {
const clock = await NbStore.getPeerRemoteClock({ const clock = await NbStore.getPeerRemoteClock({
id, id,
peer, peer,
docId, docId,
}); });
return { return clock
docId: clock.docId, ? {
timestamp: new Date(clock.timestamp), docId: clock.docId,
}; timestamp: new Date(clock.timestamp),
}
: null;
}, },
setPeerRemoteClock: async function ( setPeerRemoteClock: async function (
id: string, id: string,
@@ -211,17 +213,19 @@ export const NbStoreNativeDBApis: NativeDBApis = {
id, id,
peer, peer,
docId, docId,
clock: clock.getTime(), timestamp: clock.getTime(),
}); });
}, },
getPeerPulledRemoteClocks: async function ( getPeerPulledRemoteClocks: async function (
id: string, id: string,
peer: string peer: string
): Promise<DocClock[]> { ): Promise<DocClock[]> {
const clocks = await NbStore.getPeerPulledRemoteClocks({ const clocks = (
id, await NbStore.getPeerPulledRemoteClocks({
peer, id,
}); peer,
})
).clocks;
return clocks.map(c => ({ return clocks.map(c => ({
docId: c.docId, docId: c.docId,
timestamp: new Date(c.timestamp), timestamp: new Date(c.timestamp),
@@ -231,16 +235,18 @@ export const NbStoreNativeDBApis: NativeDBApis = {
id: string, id: string,
peer: string, peer: string,
docId: string docId: string
): Promise<DocClock> { ) {
const clock = await NbStore.getPeerPulledRemoteClock({ const clock = await NbStore.getPeerPulledRemoteClock({
id, id,
peer, peer,
docId, docId,
}); });
return { return clock
docId: clock.docId, ? {
timestamp: new Date(clock.timestamp), docId: clock.docId,
}; timestamp: new Date(clock.timestamp),
}
: null;
}, },
setPeerPulledRemoteClock: async function ( setPeerPulledRemoteClock: async function (
id: string, id: string,
@@ -252,17 +258,19 @@ export const NbStoreNativeDBApis: NativeDBApis = {
id, id,
peer, peer,
docId, docId,
clock: clock.getTime(), timestamp: clock.getTime(),
}); });
}, },
getPeerPushedClocks: async function ( getPeerPushedClocks: async function (
id: string, id: string,
peer: string peer: string
): Promise<DocClock[]> { ): Promise<DocClock[]> {
const clocks = await NbStore.getPeerPushedClocks({ const clocks = (
id, await NbStore.getPeerPushedClocks({
peer, id,
}); peer,
})
).clocks;
return clocks.map(c => ({ return clocks.map(c => ({
docId: c.docId, docId: c.docId,
timestamp: new Date(c.timestamp), timestamp: new Date(c.timestamp),
@@ -272,16 +280,18 @@ export const NbStoreNativeDBApis: NativeDBApis = {
id: string, id: string,
peer: string, peer: string,
docId: string docId: string
): Promise<DocClock> { ): Promise<DocClock | null> {
const clock = await NbStore.getPeerPushedClock({ const clock = await NbStore.getPeerPushedClock({
id, id,
peer, peer,
docId, docId,
}); });
return { return clock
docId: clock.docId, ? {
timestamp: new Date(clock.timestamp), docId: clock.docId,
}; timestamp: new Date(clock.timestamp),
}
: null;
}, },
setPeerPushedClock: async function ( setPeerPushedClock: async function (
id: string, id: string,
@@ -293,7 +303,7 @@ export const NbStoreNativeDBApis: NativeDBApis = {
id, id,
peer, peer,
docId, docId,
clock: clock.getTime(), timestamp: clock.getTime(),
}); });
}, },
clearClocks: async function (id: string): Promise<void> { clearClocks: async function (id: string): Promise<void> {

View File

@@ -1,6 +1,191 @@
import '@affine/core/bootstrap/browser'; import '@affine/core/bootstrap/browser';
import '@affine/component/theme';
import '@affine/core/mobile/styles/mobile.css';
// TODO(@L-Sun) Uncomment this when the `show` method implement by `@capacitor/keyboard` in ios /**
// import './virtual-keyboard'; * the below code includes the custom fetch and websocket implementation for ios webview.
* should be included in the entry file of the app or webworker.
*/
/*
* we override the browser's fetch function with our custom fetch function to
* overcome the restrictions of cross-domain and third-party cookies in ios webview.
*
* the custom fetch function will convert the request to `affine-http://` or `affine-https://`
* and send the request to the server.
*/
const rawFetch = globalThis.fetch;
globalThis.fetch = async (input: RequestInfo | URL, init?: RequestInit) => {
const url = new URL(
typeof input === 'string'
? input
: input instanceof URL
? input.toString()
: input.url,
globalThis.location.origin
);
if (url.protocol === 'capacitor:') {
return rawFetch(input, init);
}
if (url.protocol === 'http:') {
url.protocol = 'affine-http:';
}
if (url.protocol === 'https:') {
url.protocol = 'affine-https:';
}
return rawFetch(url, input instanceof Request ? input : init);
};
/**
* we create a custom websocket class to simulate the browser's websocket connection
* through the custom url scheme handler.
*
* to overcome the restrictions of cross-domain and third-party cookies in ios webview,
* the front-end opens a websocket connection and sends a message by sending a request
* to `affine-ws://` or `affine-wss://`.
*
* the scheme has two endpoints:
*
* `affine-ws:///open?uuid={uuid}&url={wsUrl}`: opens a websocket connection and returns
* the received data via the SSE protocol.
* If the front-end closes the http connection, the websocket connection will also be closed.
*
* `affine-ws:///send?uuid={uuid}`: sends the request body data to the websocket connection
* with the specified uuid.
*/
class WrappedWebSocket {
static CLOSED = WebSocket.CLOSED;
static CLOSING = WebSocket.CLOSING;
static CONNECTING = WebSocket.CONNECTING;
static OPEN = WebSocket.OPEN;
readonly isWss: boolean;
readonly uuid = crypto.randomUUID();
readyState: number = WebSocket.CONNECTING;
events: Record<string, ((event: any) => void)[]> = {};
onopen: ((event: any) => void) | undefined = undefined;
onclose: ((event: any) => void) | undefined = undefined;
onerror: ((event: any) => void) | undefined = undefined;
onmessage: ((event: any) => void) | undefined = undefined;
eventSource: EventSource;
constructor(
readonly url: string,
_protocols?: string | string[] // not supported yet
) {
const parsedUrl = new URL(url);
this.isWss = parsedUrl.protocol === 'wss:';
this.eventSource = new EventSource(
`${this.isWss ? 'affine-wss' : 'affine-ws'}:///open?uuid=${this.uuid}&url=${encodeURIComponent(this.url)}`
);
this.eventSource.addEventListener('open', () => {
this.emitOpen(new Event('open'));
});
this.eventSource.addEventListener('error', () => {
this.eventSource.close();
this.emitError(new Event('error'));
this.emitClose(new CloseEvent('close'));
});
this.eventSource.addEventListener('message', data => {
const decodedData = JSON.parse(data.data);
if (decodedData.type === 'message') {
this.emitMessage(
new MessageEvent('message', { data: decodedData.data })
);
}
});
}
send(data: string) {
rawFetch(
`${this.isWss ? 'affine-wss' : 'affine-ws'}:///send?uuid=${this.uuid}`,
{
method: 'POST',
headers: {
'Content-Type': 'text/plain',
},
body: data,
}
).catch(e => {
console.error('Failed to send message', e);
});
}
close() {
this.eventSource.close();
this.emitClose(new CloseEvent('close'));
}
addEventListener(type: string, listener: (event: any) => void) {
this.events[type] = this.events[type] || [];
this.events[type].push(listener);
}
removeEventListener(type: string, listener: (event: any) => void) {
this.events[type] = this.events[type] || [];
this.events[type] = this.events[type].filter(l => l !== listener);
}
private emitOpen(event: Event) {
this.readyState = WebSocket.OPEN;
this.events['open']?.forEach(listener => {
try {
listener(event);
} catch (e) {
console.error(e);
}
});
try {
this.onopen?.(event);
} catch (e) {
console.error(e);
}
}
private emitClose(event: CloseEvent) {
this.readyState = WebSocket.CLOSED;
this.events['close']?.forEach(listener => {
try {
listener(event);
} catch (e) {
console.error(e);
}
});
try {
this.onclose?.(event);
} catch (e) {
console.error(e);
}
}
private emitMessage(event: MessageEvent) {
this.events['message']?.forEach(listener => {
try {
listener(event);
} catch (e) {
console.error(e);
}
});
try {
this.onmessage?.(event);
} catch (e) {
console.error(e);
}
}
private emitError(event: Event) {
this.events['error']?.forEach(listener => {
try {
listener(event);
} catch (e) {
console.error(e);
}
});
try {
this.onerror?.(event);
} catch (e) {
console.error(e);
}
}
}
globalThis.WebSocket = WrappedWebSocket as any;

View File

@@ -0,0 +1,52 @@
import './setup';
import { broadcastChannelStorages } from '@affine/nbstore/broadcast-channel';
import { cloudStorages } from '@affine/nbstore/cloud';
import {
bindNativeDBApis,
type NativeDBApis,
sqliteStorages,
} from '@affine/nbstore/sqlite';
import {
WorkerConsumer,
type WorkerOps,
} from '@affine/nbstore/worker/consumer';
import { type MessageCommunicapable, OpConsumer } from '@toeverything/infra/op';
import { AsyncCall } from 'async-call-rpc';
globalThis.addEventListener('message', e => {
if (e.data.type === 'native-db-api-channel') {
const port = e.ports[0] as MessagePort;
const rpc = AsyncCall<NativeDBApis>(
{},
{
channel: {
on(listener) {
const f = (e: MessageEvent<any>) => {
listener(e.data);
};
port.addEventListener('message', f);
return () => {
port.removeEventListener('message', f);
};
},
send(data) {
port.postMessage(data);
},
},
}
);
bindNativeDBApis(rpc);
port.start();
}
});
const consumer = new OpConsumer<WorkerOps>(globalThis as MessageCommunicapable);
const worker = new WorkerConsumer([
...sqliteStorages,
...broadcastChannelStorages,
...cloudStorages,
]);
worker.bindConsumer(consumer);

View File

@@ -12,6 +12,7 @@
"@affine/component": "workspace:*", "@affine/component": "workspace:*",
"@affine/core": "workspace:*", "@affine/core": "workspace:*",
"@affine/i18n": "workspace:*", "@affine/i18n": "workspace:*",
"@affine/nbstore": "workspace:*",
"@blocksuite/affine": "workspace:*", "@blocksuite/affine": "workspace:*",
"@blocksuite/icons": "2.2.2", "@blocksuite/icons": "2.2.2",
"@sentry/react": "^8.44.0", "@sentry/react": "^8.44.0",

View File

@@ -6,15 +6,16 @@ import { router } from '@affine/core/mobile/router';
import { configureCommonModules } from '@affine/core/modules'; import { configureCommonModules } from '@affine/core/modules';
import { I18nProvider } from '@affine/core/modules/i18n'; import { I18nProvider } from '@affine/core/modules/i18n';
import { LifecycleService } from '@affine/core/modules/lifecycle'; import { LifecycleService } from '@affine/core/modules/lifecycle';
import { configureLocalStorageStateStorageImpls } from '@affine/core/modules/storage';
import { PopupWindowProvider } from '@affine/core/modules/url';
import { configureIndexedDBUserspaceStorageProvider } from '@affine/core/modules/userspace';
import { configureBrowserWorkbenchModule } from '@affine/core/modules/workbench';
import { import {
configureBrowserWorkspaceFlavours, configureLocalStorageStateStorageImpls,
configureIndexedDBWorkspaceEngineStorageProvider, NbstoreProvider,
} from '@affine/core/modules/workspace-engine'; } from '@affine/core/modules/storage';
import { PopupWindowProvider } from '@affine/core/modules/url';
import { configureBrowserWorkbenchModule } from '@affine/core/modules/workbench';
import { configureBrowserWorkspaceFlavours } from '@affine/core/modules/workspace-engine';
import { WorkerClient } from '@affine/nbstore/worker/client';
import { Framework, FrameworkRoot, getCurrentStore } from '@toeverything/infra'; import { Framework, FrameworkRoot, getCurrentStore } from '@toeverything/infra';
import { OpClient } from '@toeverything/infra/op';
import { Suspense } from 'react'; import { Suspense } from 'react';
import { RouterProvider } from 'react-router-dom'; import { RouterProvider } from 'react-router-dom';
@@ -27,9 +28,43 @@ configureCommonModules(framework);
configureBrowserWorkbenchModule(framework); configureBrowserWorkbenchModule(framework);
configureLocalStorageStateStorageImpls(framework); configureLocalStorageStateStorageImpls(framework);
configureBrowserWorkspaceFlavours(framework); configureBrowserWorkspaceFlavours(framework);
configureIndexedDBWorkspaceEngineStorageProvider(framework);
configureIndexedDBUserspaceStorageProvider(framework);
configureMobileModules(framework); configureMobileModules(framework);
framework.impl(NbstoreProvider, {
openStore(key, options) {
if (window.SharedWorker) {
const worker = new SharedWorker(
new URL(
/* webpackChunkName: "nbstore" */ './nbstore.ts',
import.meta.url
),
{ name: key }
);
const client = new WorkerClient(new OpClient(worker.port), options);
worker.port.start();
return {
store: client,
dispose: () => {
worker.port.postMessage({ type: '__close__' });
worker.port.close();
},
};
} else {
const worker = new Worker(
new URL(
/* webpackChunkName: "nbstore" */ './nbstore.ts',
import.meta.url
)
);
const client = new WorkerClient(new OpClient(worker), options);
return {
store: client,
dispose: () => {
worker.terminate();
},
};
}
},
});
framework.impl(PopupWindowProvider, { framework.impl(PopupWindowProvider, {
open: (target: string) => { open: (target: string) => {
const targetUrl = new URL(target); const targetUrl = new URL(target);

Some files were not shown because too many files have changed in this diff Show More