feat(core): new worker workspace engine (#9257)

This commit is contained in:
EYHN
2025-01-17 00:22:18 +08:00
committed by GitHub
parent 7dc470e7ea
commit a2ffdb4047
219 changed files with 4267 additions and 7194 deletions

View File

@@ -30,7 +30,7 @@ test('doc', async () => {
const frontend1 = new DocFrontend(docStorage, DocSyncImpl.dummy);
frontend1.start();
frontend1.addDoc(doc1);
frontend1.connectDoc(doc1);
await vitest.waitFor(async () => {
const doc = await docStorage.getDoc('test-doc');
expectYjsEqual(doc!.bin, {
@@ -45,7 +45,7 @@ test('doc', async () => {
});
const frontend2 = new DocFrontend(docStorage, DocSyncImpl.dummy);
frontend2.start();
frontend2.addDoc(doc2);
frontend2.connectDoc(doc2);
await vitest.waitFor(async () => {
expectYjsEqual(doc2, {
@@ -94,8 +94,8 @@ test('awareness', async () => {
},
});
const frontend = new AwarenessFrontend(sync);
frontend.connect(awarenessA);
frontend.connect(awarenessB);
frontend.connectAwareness(awarenessA);
frontend.connectAwareness(awarenessB);
}
{
const sync = new AwarenessSyncImpl({
@@ -105,7 +105,7 @@ test('awareness', async () => {
},
});
const frontend = new AwarenessFrontend(sync);
frontend.connect(awarenessC);
frontend.connectAwareness(awarenessC);
}
awarenessA.setLocalState({

View File

@@ -0,0 +1,200 @@
import { expect, test, vitest } from 'vitest';
import { AutoReconnectConnection } from '../connection';
test('connect and disconnect', async () => {
class TestConnection extends AutoReconnectConnection<{
disconnect: () => void;
}> {
connectCount = 0;
abortCount = 0;
disconnectCount = 0;
notListenAbort = false;
override async doConnect(signal?: AbortSignal) {
this.connectCount++;
return new Promise<{ disconnect: () => void }>((resolve, reject) => {
setTimeout(() => {
resolve({
disconnect: () => {
this.disconnectCount++;
},
});
}, 300);
if (!this.notListenAbort) {
signal?.addEventListener('abort', reason => {
reject(reason);
});
}
}).catch(err => {
this.abortCount++;
throw err;
});
}
override doDisconnect(t: { disconnect: () => void }) {
return t.disconnect();
}
}
const connection = new TestConnection();
connection.connect();
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(1);
expect(connection.disconnectCount).toBe(0);
expect(connection.abortCount).toBe(0);
expect(connection.status).toBe('connected');
});
connection.disconnect();
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(1);
expect(connection.disconnectCount).toBe(1);
expect(connection.abortCount).toBe(0);
expect(connection.status).toBe('closed');
});
// connect twice
connection.connect();
connection.connect();
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(2);
expect(connection.disconnectCount).toBe(1);
expect(connection.abortCount).toBe(0);
expect(connection.status).toBe('connected');
});
connection.disconnect();
connection.disconnect();
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(2);
expect(connection.disconnectCount).toBe(2);
expect(connection.abortCount).toBe(0);
expect(connection.status).toBe('closed');
});
// calling connect disconnect consecutively, the previous connect call will be aborted.
connection.connect();
connection.disconnect();
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(3);
expect(connection.disconnectCount).toBe(2);
expect(connection.abortCount).toBe(1);
expect(connection.status).toBe('closed');
});
connection.connect();
connection.disconnect();
connection.connect();
connection.disconnect();
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(5);
expect(connection.disconnectCount).toBe(2);
expect(connection.abortCount).toBe(3);
expect(connection.status).toBe('closed');
});
// if connection is not listening to abort event, disconnect will be called
connection.notListenAbort = true;
connection.connect();
connection.disconnect();
connection.connect();
connection.disconnect();
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(7);
expect(connection.disconnectCount).toBe(4);
expect(connection.abortCount).toBe(3);
expect(connection.status).toBe('closed');
});
});
test('retry when connect failed', async () => {
class TestConnection extends AutoReconnectConnection {
override retryDelay = 300;
connectCount = 0;
override async doConnect() {
this.connectCount++;
if (this.connectCount === 3) {
return { hello: 'world' };
}
throw new Error('not connected, count: ' + this.connectCount);
}
override doDisconnect() {
return Promise.resolve();
}
}
const connection = new TestConnection();
connection.connect();
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(1);
expect(connection.status).toBe('error');
expect(connection.error?.message).toContain('not connected, count: 1');
});
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(2);
expect(connection.status).toBe('error');
expect(connection.error?.message).toBe('not connected, count: 2');
});
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(3);
expect(connection.status).toBe('connected');
expect(connection.error).toBeUndefined();
});
});
test('retry when error', async () => {
class TestConnection extends AutoReconnectConnection {
override retryDelay = 300;
connectCount = 0;
disconnectCount = 0;
override async doConnect() {
this.connectCount++;
return {
hello: 'world',
};
}
override doDisconnect(conn: any) {
this.disconnectCount++;
expect(conn).toEqual({
hello: 'world',
});
}
triggerError(error: Error) {
this.error = error;
}
}
const connection = new TestConnection();
connection.connect();
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(1);
expect(connection.status).toBe('connected');
});
connection.triggerError(new Error('test error'));
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(1);
expect(connection.disconnectCount).toBe(1);
expect(connection.status).toBe('error');
expect(connection.error?.message).toBe('test error');
});
// waitfor reconnect
await vitest.waitFor(() => {
expect(connection.connectCount).toBe(2);
expect(connection.disconnectCount).toBe(1);
expect(connection.status).toBe('connected');
expect(connection.error).toBeUndefined();
});
});

View File

@@ -1,5 +1,6 @@
import EventEmitter2 from 'eventemitter2';
import { throttle } from 'lodash-es';
import { MANUALLY_STOP } from '../utils/throw-if-aborted';
export type ConnectionStatus =
| 'idle'
@@ -10,6 +11,7 @@ export type ConnectionStatus =
export interface Connection<T = any> {
readonly status: ConnectionStatus;
readonly error?: Error;
readonly inner: T;
connect(): void;
disconnect(): void;
@@ -23,16 +25,15 @@ export abstract class AutoReconnectConnection<T = any>
implements Connection<T>
{
private readonly event = new EventEmitter2();
private _inner: T | null = null;
private _inner: T | undefined = undefined;
private _status: ConnectionStatus = 'idle';
protected error?: Error;
private _error: Error | undefined = undefined;
retryDelay = 3000;
private refCount = 0;
private _enableAutoReconnect = false;
private connectingAbort?: AbortController;
private reconnectingAbort?: AbortController;
constructor() {
this.autoReconnect();
}
constructor() {}
get shareId(): string | undefined {
return undefined;
@@ -43,7 +44,7 @@ export abstract class AutoReconnectConnection<T = any>
}
get inner(): T {
if (!this._inner) {
if (this._inner === undefined) {
throw new Error(
`Connection ${this.constructor.name} has not been established.`
);
@@ -52,7 +53,7 @@ export abstract class AutoReconnectConnection<T = any>
return this._inner;
}
protected set inner(inner: T | null) {
private set inner(inner: T | undefined) {
this._inner = inner;
}
@@ -60,12 +61,23 @@ export abstract class AutoReconnectConnection<T = any>
return this._status;
}
protected setStatus(status: ConnectionStatus, error?: Error) {
const shouldEmit = status !== this._status || error !== this.error;
get error() {
return this._error;
}
protected set error(error: Error | undefined) {
this.handleError(error);
}
private setStatus(status: ConnectionStatus, error?: Error) {
const shouldEmit = status !== this._status || error !== this._error;
this._status = status;
this.error = error;
// we only clear-up error when status is connected
if (error || status === 'connected') {
this._error = error;
}
if (shouldEmit) {
this.emitStatusChanged(status, error);
this.emitStatusChanged(status, this._error);
}
}
@@ -73,15 +85,15 @@ export abstract class AutoReconnectConnection<T = any>
protected abstract doDisconnect(conn: T): void;
private innerConnect() {
if (this.status === 'idle' || this.status === 'error') {
this._enableAutoReconnect = true;
if (this.status !== 'connecting') {
this.setStatus('connecting');
this.connectingAbort = new AbortController();
this.doConnect(this.connectingAbort.signal)
const signal = this.connectingAbort.signal;
this.doConnect(signal)
.then(value => {
if (!this.connectingAbort?.signal.aborted) {
this.setStatus('connected');
if (!signal.aborted) {
this._inner = value;
this.setStatus('connected');
} else {
try {
this.doDisconnect(value);
@@ -91,14 +103,45 @@ export abstract class AutoReconnectConnection<T = any>
}
})
.catch(error => {
if (!this.connectingAbort?.signal.aborted) {
if (!signal.aborted) {
console.error('failed to connect', error);
this.setStatus('error', error as any);
this.handleError(error as any);
}
});
}
}
private innerDisconnect() {
this.connectingAbort?.abort(MANUALLY_STOP);
this.reconnectingAbort?.abort(MANUALLY_STOP);
try {
if (this._inner) {
this.doDisconnect(this._inner);
}
} catch (error) {
console.error('failed to disconnect', error);
}
this.reconnectingAbort = undefined;
this.connectingAbort = undefined;
this._inner = undefined;
}
private handleError(reason?: Error) {
// on error
console.error('connection error, will reconnect', reason);
this.innerDisconnect();
this.setStatus('error', reason);
// reconnect
this.reconnectingAbort = new AbortController();
const signal = this.reconnectingAbort.signal;
setTimeout(() => {
if (!signal.aborted) {
this.innerConnect();
}
}, this.retryDelay);
}
connect() {
this.refCount++;
if (this.refCount === 1) {
@@ -106,36 +149,16 @@ export abstract class AutoReconnectConnection<T = any>
}
}
disconnect() {
this.refCount--;
if (this.refCount === 0) {
this._enableAutoReconnect = false;
this.connectingAbort?.abort();
try {
if (this._inner) {
this.doDisconnect(this._inner);
}
} catch (error) {
console.error('failed to disconnect', error);
}
this.setStatus('closed');
this._inner = null;
disconnect(force?: boolean) {
if (force) {
this.refCount = 0;
} else {
this.refCount = Math.max(this.refCount - 1, 0);
}
if (this.refCount === 0) {
this.innerDisconnect();
this.setStatus('closed');
}
}
private autoReconnect() {
// TODO:
// - maximum retry count
// - dynamic sleep time (attempt < 3 ? 1s : 1min)?
this.onStatusChanged(
throttle(() => {
() => {
if (this._enableAutoReconnect) {
this.innerConnect();
}
};
}, 1000)
);
}
waitForConnected(signal?: AbortSignal) {

View File

@@ -13,7 +13,7 @@ type AwarenessChanges = Record<'added' | 'updated' | 'removed', number[]>;
export class AwarenessFrontend {
constructor(private readonly sync: AwarenessSync) {}
connect(awareness: Awareness) {
connectAwareness(awareness: Awareness) {
const uniqueId = nanoid();
const handleAwarenessUpdate = (
changes: AwarenessChanges,
@@ -27,7 +27,6 @@ export class AwarenessFrontend {
);
const update = encodeAwarenessUpdate(awareness, changedClients);
this.sync
.update(
{

View File

@@ -1,7 +1,14 @@
import { groupBy } from 'lodash-es';
import { nanoid } from 'nanoid';
import type { Subscription } from 'rxjs';
import { combineLatest, map, Observable, Subject } from 'rxjs';
import {
combineLatest,
map,
Observable,
ReplaySubject,
share,
Subject,
} from 'rxjs';
import {
applyUpdate,
type Doc as YDoc,
@@ -173,7 +180,10 @@ export class DocFrontend {
synced: sync.synced,
syncRetrying: sync.retrying,
syncErrorMessage: sync.errorMessage,
}))
})),
share({
connector: () => new ReplaySubject(1),
})
) satisfies Observable<DocFrontendState>;
start() {
@@ -241,19 +251,11 @@ export class DocFrontend {
}
/**
* Add a doc to the frontend, the doc will sync with the doc storage.
* @param doc - The doc to add
* @param withSubDoc - Whether to add the subdocs of the doc
* Connect a doc to the frontend, the doc will sync with the doc storage.
* @param doc - The doc to connect
*/
addDoc(doc: YDoc, withSubDoc: boolean = false) {
this._addDoc(doc);
if (withSubDoc) {
doc.on('subdocs', ({ loaded }) => {
for (const subdoc of loaded) {
this._addDoc(subdoc);
}
});
}
connectDoc(doc: YDoc) {
this._connectDoc(doc);
}
readonly jobs = {
@@ -275,18 +277,16 @@ export class DocFrontend {
// mark doc as loaded
doc.emit('sync', [true, doc]);
this.status.connectedDocs.add(job.docId);
this.statusUpdatedSubject$.next(job.docId);
const docRecord = await this.storage.getDoc(job.docId);
throwIfAborted(signal);
if (!docRecord || isEmptyUpdate(docRecord.bin)) {
return;
if (docRecord && !isEmptyUpdate(docRecord.bin)) {
this.applyUpdate(job.docId, docRecord.bin);
this.status.readyDocs.add(job.docId);
}
this.applyUpdate(job.docId, docRecord.bin);
this.status.readyDocs.add(job.docId);
this.status.connectedDocs.add(job.docId);
this.statusUpdatedSubject$.next(job.docId);
},
save: async (
@@ -339,12 +339,12 @@ export class DocFrontend {
};
/**
* Remove a doc from the frontend, the doc will stop syncing with the doc storage.
* Disconnect a doc from the frontend, the doc will stop syncing with the doc storage.
* It's not recommended to use this method directly, better to use `doc.destroy()`.
*
* @param doc - The doc to remove
* @param doc - The doc to disconnect
*/
removeDoc(doc: YDoc) {
disconnectDoc(doc: YDoc) {
this.status.docs.delete(doc.guid);
this.status.connectedDocs.delete(doc.guid);
this.status.readyDocs.delete(doc.guid);
@@ -370,7 +370,10 @@ export class DocFrontend {
};
}
private _addDoc(doc: YDoc) {
private _connectDoc(doc: YDoc) {
if (this.status.docs.has(doc.guid)) {
throw new Error('doc already connected');
}
this.schedule({
type: 'load',
docId: doc.guid,
@@ -382,7 +385,7 @@ export class DocFrontend {
doc.on('update', this.handleDocUpdate);
doc.on('destroy', () => {
this.removeDoc(doc);
this.disconnectDoc(doc);
});
}

View File

@@ -15,12 +15,7 @@ export class BroadcastChannelConnection extends AutoReconnectConnection<Broadcas
return new BroadcastChannel(this.channelName);
}
override doDisconnect() {
this.close();
}
private close(error?: Error) {
this.maybeConnection?.close();
this.setStatus('closed', error);
override doDisconnect(channel: BroadcastChannel) {
channel.close();
}
}

View File

@@ -1,5 +1,3 @@
import type { SocketOptions } from 'socket.io-client';
import { share } from '../../connection';
import {
type AwarenessRecord,
@@ -13,7 +11,6 @@ import {
} from './socket';
interface CloudAwarenessStorageOptions {
socketOptions?: SocketOptions;
serverBaseUrl: string;
type: SpaceType;
id: string;
@@ -26,12 +23,7 @@ export class CloudAwarenessStorage extends AwarenessStorageBase {
super();
}
connection = share(
new SocketConnection(
`${this.options.serverBaseUrl}/`,
this.options.socketOptions
)
);
connection = share(new SocketConnection(`${this.options.serverBaseUrl}/`));
private get socket() {
return this.connection.inner;
@@ -52,9 +44,14 @@ export class CloudAwarenessStorage extends AwarenessStorageBase {
onUpdate: (update: AwarenessRecord, origin?: string) => void,
onCollect: () => Promise<AwarenessRecord | null>
): () => void {
// TODO: handle disconnect
// leave awareness
const leave = () => {
if (this.connection.status !== 'connected') return;
this.socket.off('space:collect-awareness', handleCollectAwareness);
this.socket.off(
'space:broadcast-awareness-update',
handleBroadcastAwarenessUpdate
);
this.socket.emit('space:leave-awareness', {
spaceType: this.options.type,
spaceId: this.options.id,
@@ -64,6 +61,11 @@ export class CloudAwarenessStorage extends AwarenessStorageBase {
// join awareness, and collect awareness from others
const joinAndCollect = async () => {
this.socket.on('space:collect-awareness', handleCollectAwareness);
this.socket.on(
'space:broadcast-awareness-update',
handleBroadcastAwarenessUpdate
);
await this.socket.emitWithAck('space:join-awareness', {
spaceType: this.options.type,
spaceId: this.options.id,
@@ -77,7 +79,11 @@ export class CloudAwarenessStorage extends AwarenessStorageBase {
});
};
joinAndCollect().catch(err => console.error('awareness join failed', err));
if (this.connection.status === 'connected') {
joinAndCollect().catch(err =>
console.error('awareness join failed', err)
);
}
const unsubscribeConnectionStatusChanged = this.connection.onStatusChanged(
status => {
@@ -141,18 +147,9 @@ export class CloudAwarenessStorage extends AwarenessStorageBase {
}
};
this.socket.on('space:collect-awareness', handleCollectAwareness);
this.socket.on(
'space:broadcast-awareness-update',
handleBroadcastAwarenessUpdate
);
return () => {
leave();
this.socket.off('space:collect-awareness', handleCollectAwareness);
this.socket.off(
'space:broadcast-awareness-update',
handleBroadcastAwarenessUpdate
);
unsubscribeConnectionStatusChanged();
};
}

View File

@@ -45,23 +45,28 @@ export class StaticCloudDocStorage extends DocStorageBase<CloudDocStorageOptions
protected override async getDocSnapshot(
docId: string
): Promise<DocRecord | null> {
const arrayBuffer = await this.connection.fetchArrayBuffer(
`/api/workspaces/${this.spaceId}/docs/${docId}`,
{
priority: 'high',
headers: {
Accept: 'application/octet-stream', // this is necessary for ios native fetch to return arraybuffer
},
try {
const arrayBuffer = await this.connection.fetchArrayBuffer(
`/api/workspaces/${this.spaceId}/docs/${docId}`,
{
priority: 'high',
headers: {
Accept: 'application/octet-stream', // this is necessary for ios native fetch to return arraybuffer
},
}
);
if (!arrayBuffer) {
return null;
}
);
if (!arrayBuffer) {
return {
docId: docId,
bin: new Uint8Array(arrayBuffer),
timestamp: new Date(),
};
} catch (error) {
console.error(error);
return null;
}
return {
docId: docId,
bin: new Uint8Array(arrayBuffer),
timestamp: new Date(),
};
}
protected override setDocSnapshot(
_snapshot: DocRecord,

View File

@@ -1,10 +1,5 @@
import type { Socket, SocketOptions } from 'socket.io-client';
import type { Socket } from 'socket.io-client';
import {
type Connection,
type ConnectionStatus,
share,
} from '../../connection';
import {
type DocClock,
type DocClocks,
@@ -12,6 +7,7 @@ import {
type DocStorageOptions,
type DocUpdate,
} from '../../storage';
import { getIdConverter, type IdConverter } from '../../utils/id-converter';
import type { SpaceType } from '../../utils/universal-id';
import {
base64ToUint8Array,
@@ -21,7 +17,6 @@ import {
} from './socket';
interface CloudDocStorageOptions extends DocStorageOptions {
socketOptions?: SocketOptions;
serverBaseUrl: string;
type: SpaceType;
}
@@ -32,7 +27,12 @@ export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
get socket() {
return this.connection.inner;
}
get idConverter() {
if (!this.connection.idConverter) {
throw new Error('Id converter not initialized');
}
return this.connection.idConverter;
}
readonly spaceType = this.options.type;
onServerUpdate: ServerEventsMap['space:broadcast-doc-update'] = message => {
@@ -41,7 +41,7 @@ export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
this.spaceId === message.spaceId
) {
this.emit('update', {
docId: message.docId,
docId: this.idConverter.oldIdToNewId(message.docId),
bin: base64ToUint8Array(message.update),
timestamp: new Date(message.timestamp),
editor: message.editor,
@@ -58,10 +58,13 @@ export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
const response = await this.socket.emitWithAck('space:load-doc', {
spaceType: this.spaceType,
spaceId: this.spaceId,
docId,
docId: this.idConverter.newIdToOldId(docId),
});
if ('error' in response) {
if (response.error.name === 'DOC_NOT_FOUND') {
return null;
}
// TODO: use [UserFriendlyError]
throw new Error(response.error.message);
}
@@ -77,11 +80,14 @@ export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
const response = await this.socket.emitWithAck('space:load-doc', {
spaceType: this.spaceType,
spaceId: this.spaceId,
docId,
docId: this.idConverter.newIdToOldId(docId),
stateVector: state ? await uint8ArrayToBase64(state) : void 0,
});
if ('error' in response) {
if (response.error.name === 'DOC_NOT_FOUND') {
return null;
}
// TODO: use [UserFriendlyError]
throw new Error(response.error.message);
}
@@ -98,8 +104,8 @@ export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
const response = await this.socket.emitWithAck('space:push-doc-update', {
spaceType: this.spaceType,
spaceId: this.spaceId,
docId: update.docId,
updates: await uint8ArrayToBase64(update.bin),
docId: this.idConverter.newIdToOldId(update.docId),
update: await uint8ArrayToBase64(update.bin),
});
if ('error' in response) {
@@ -120,7 +126,7 @@ export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
const response = await this.socket.emitWithAck('space:load-doc', {
spaceType: this.spaceType,
spaceId: this.spaceId,
docId,
docId: this.idConverter.newIdToOldId(docId),
});
if ('error' in response) {
@@ -150,7 +156,7 @@ export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
}
return Object.entries(response.data).reduce((ret, [docId, timestamp]) => {
ret[docId] = new Date(timestamp);
ret[this.idConverter.oldIdToNewId(docId)] = new Date(timestamp);
return ret;
}, {} as DocClocks);
}
@@ -159,7 +165,7 @@ export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
this.socket.emit('space:delete-doc', {
spaceType: this.spaceType,
spaceId: this.spaceId,
docId,
docId: this.idConverter.newIdToOldId(docId),
});
}
@@ -174,83 +180,74 @@ export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
}
}
class CloudDocStorageConnection implements Connection<Socket> {
connection = share(
new SocketConnection(
`${this.options.serverBaseUrl}/`,
this.options.socketOptions
)
);
private disposeConnectionStatusListener?: () => void;
private get socket() {
return this.connection.inner;
}
class CloudDocStorageConnection extends SocketConnection {
constructor(
private readonly options: CloudDocStorageOptions,
private readonly onServerUpdate: ServerEventsMap['space:broadcast-doc-update']
) {}
get status() {
return this.connection.status;
) {
super(`${options.serverBaseUrl}/`);
}
get inner() {
return this.connection.inner;
}
idConverter: IdConverter | null = null;
connect(): void {
if (!this.disposeConnectionStatusListener) {
this.disposeConnectionStatusListener = this.connection.onStatusChanged(
status => {
if (status === 'connected') {
this.join().catch(err => {
console.error('doc storage join failed', err);
});
this.socket.on('space:broadcast-doc-update', this.onServerUpdate);
}
}
);
}
return this.connection.connect();
}
override async doConnect(signal?: AbortSignal) {
const socket = await super.doConnect(signal);
async join() {
try {
const res = await this.socket.emitWithAck('space:join', {
const res = await socket.emitWithAck('space:join', {
spaceType: this.options.type,
spaceId: this.options.id,
clientVersion: BUILD_CONFIG.appVersion,
});
if ('error' in res) {
this.connection.setStatus('closed', new Error(res.error.message));
throw new Error(res.error.message);
}
if (!this.idConverter) {
this.idConverter = await this.getIdConverter(socket);
}
socket.on('space:broadcast-doc-update', this.onServerUpdate);
return socket;
} catch (e) {
this.connection.setStatus('error', e as Error);
socket.close();
throw e;
}
}
disconnect() {
if (this.disposeConnectionStatusListener) {
this.disposeConnectionStatusListener();
}
this.socket.emit('space:leave', {
override doDisconnect(socket: Socket) {
socket.emit('space:leave', {
spaceType: this.options.type,
spaceId: this.options.id,
});
this.socket.off('space:broadcast-doc-update', this.onServerUpdate);
this.connection.disconnect();
socket.off('space:broadcast-doc-update', this.onServerUpdate);
super.disconnect();
}
waitForConnected(signal?: AbortSignal): Promise<void> {
return this.connection.waitForConnected(signal);
}
onStatusChanged(
cb: (status: ConnectionStatus, error?: Error) => void
): () => void {
return this.connection.onStatusChanged(cb);
async getIdConverter(socket: Socket) {
return getIdConverter(
{
getDocBuffer: async id => {
const response = await socket.emitWithAck('space:load-doc', {
spaceType: this.options.type,
spaceId: this.options.id,
docId: id,
});
if ('error' in response) {
if (response.error.name === 'DOC_NOT_FOUND') {
return null;
}
// TODO: use [UserFriendlyError]
throw new Error(response.error.message);
}
return base64ToUint8Array(response.data.missing);
},
},
this.options.id
);
}
}

View File

@@ -23,6 +23,7 @@ export class HttpConnection extends DummyConnection {
...init,
signal: abortController.signal,
headers: {
...this.requestHeaders,
...init?.headers,
'x-affine-version': BUILD_CONFIG.appVersion,
},
@@ -35,7 +36,7 @@ export class HttpConnection extends DummyConnection {
let reason: string | any = '';
if (res.headers.get('Content-Type')?.includes('application/json')) {
try {
reason = await res.json();
reason = JSON.stringify(await res.json());
} catch {
// ignore
}
@@ -63,7 +64,10 @@ export class HttpConnection extends DummyConnection {
this.fetch
);
constructor(private readonly serverBaseUrl: string) {
constructor(
private readonly serverBaseUrl: string,
private readonly requestHeaders?: Record<string, string>
) {
super();
}
}

View File

@@ -4,10 +4,8 @@ import {
type SocketOptions,
} from 'socket.io-client';
import {
AutoReconnectConnection,
type ConnectionStatus,
} from '../../connection';
import { AutoReconnectConnection } from '../../connection';
import { throwIfAborted } from '../../utils/throw-if-aborted';
// TODO(@forehalo): use [UserFriendlyError]
interface EventError {
@@ -82,7 +80,7 @@ interface ClientEvents {
};
'space:push-doc-update': [
{ spaceType: string; spaceId: string; docId: string; updates: string },
{ spaceType: string; spaceId: string; docId: string; update: string },
{ timestamp: number },
];
'space:load-doc-timestamps': [
@@ -153,12 +151,24 @@ export function base64ToUint8Array(base64: string) {
return new Uint8Array(binaryArray);
}
const SOCKET_IOMANAGER_CACHE = new Map<string, SocketIOManager>();
function getSocketIOManager(endpoint: string) {
let manager = SOCKET_IOMANAGER_CACHE.get(endpoint);
if (!manager) {
manager = new SocketIOManager(endpoint, {
autoConnect: false,
transports: ['websocket'],
secure: new URL(endpoint).protocol === 'https:',
// we will handle reconnection by ourselves
reconnection: false,
});
SOCKET_IOMANAGER_CACHE.set(endpoint, manager);
}
return manager;
}
export class SocketConnection extends AutoReconnectConnection<Socket> {
manager = new SocketIOManager(this.endpoint, {
autoConnect: false,
transports: ['websocket'],
secure: new URL(this.endpoint).protocol === 'https:',
});
manager = getSocketIOManager(this.endpoint);
constructor(
private readonly endpoint: string,
@@ -171,32 +181,42 @@ export class SocketConnection extends AutoReconnectConnection<Socket> {
return `socket:${this.endpoint}`;
}
override async doConnect() {
const conn = this.manager.socket('/', this.socketOptions);
override async doConnect(signal?: AbortSignal) {
const socket = this.manager.socket('/', this.socketOptions);
try {
throwIfAborted(signal);
await Promise.race([
new Promise<void>((resolve, reject) => {
socket.once('connect', () => {
resolve();
});
socket.once('connect_error', err => {
reject(err);
});
socket.open();
}),
new Promise<void>((_resolve, reject) => {
signal?.addEventListener('abort', () => {
reject(new Error('Aborted'));
});
}),
]);
} catch (err) {
socket.close();
throw err;
}
await new Promise<void>((resolve, reject) => {
conn.once('connect', () => {
resolve();
});
conn.once('connect_error', err => {
reject(err);
});
conn.open();
});
socket.on('disconnect', this.handleDisconnect);
return conn;
return socket;
}
override doDisconnect(conn: Socket) {
conn.off('disconnect', this.handleDisconnect);
conn.close();
}
/**
* Socket connection allow explicitly set status by user
*
* used when join space failed
*/
override setStatus(status: ConnectionStatus, error?: Error) {
super.setStatus(status, error);
}
handleDisconnect = (reason: SocketIO.DisconnectReason) => {
this.error = new Error(reason);
};
}

View File

@@ -25,22 +25,14 @@ export class IDBConnection extends AutoReconnectConnection<{
}
override async doConnect() {
// indexeddb will responsible for version control, so the db.version always match migrator.version
const db = await openDB<DocStorageSchema>(this.dbName, migrator.version, {
upgrade: migrator.migrate,
});
db.addEventListener('versionchange', this.handleVersionChange);
return {
db: await openDB<DocStorageSchema>(this.dbName, migrator.version, {
upgrade: migrator.migrate,
blocking: () => {
// if, for example, an tab with newer version is opened, this function will be called.
// we should close current connection to allow the new version to upgrade the db.
this.setStatus(
'closed',
new Error('Blocking a new version. Closing the connection.')
);
},
blocked: () => {
// fallback to retry auto retry
this.setStatus('error', new Error('Blocked by other tabs.'));
},
}),
db,
channel: new BroadcastChannel('idb:' + this.dbName),
};
}
@@ -49,7 +41,19 @@ export class IDBConnection extends AutoReconnectConnection<{
db: IDBPDatabase<DocStorageSchema>;
channel: BroadcastChannel;
}) {
db.db.removeEventListener('versionchange', this.handleVersionChange);
db.channel.close();
db.db.close();
}
handleVersionChange = (e: IDBVersionChangeEvent) => {
if (e.newVersion !== migrator.version) {
this.error = new Error(
'Database version mismatch, expected ' +
migrator.version +
' but got ' +
e.newVersion
);
}
};
}

View File

@@ -29,26 +29,35 @@ export class IndexedDBDocStorage extends DocStorageBase<IDBConnectionOptions> {
override locker = new IndexedDBLocker(this.connection);
private _lastTimestamp = new Date(0);
private generateTimestamp() {
const timestamp = new Date();
if (timestamp.getTime() <= this._lastTimestamp.getTime()) {
timestamp.setTime(this._lastTimestamp.getTime() + 1);
}
this._lastTimestamp = timestamp;
return timestamp;
}
override async pushDocUpdate(update: DocUpdate, origin?: string) {
const trx = this.db.transaction(['updates', 'clocks'], 'readwrite');
const timestamp = this.generateTimestamp();
await trx.objectStore('updates').add({
...update,
createdAt: timestamp,
});
let timestamp = new Date();
await trx.objectStore('clocks').put({ docId: update.docId, timestamp });
let retry = 0;
while (true) {
try {
const trx = this.db.transaction(['updates', 'clocks'], 'readwrite');
await trx.objectStore('updates').add({
...update,
createdAt: timestamp,
});
await trx.objectStore('clocks').put({ docId: update.docId, timestamp });
trx.commit();
} catch (e) {
if (e instanceof Error && e.name === 'ConstraintError') {
retry++;
if (retry < 10) {
timestamp = new Date(timestamp.getTime() + 1);
continue;
}
}
throw e;
}
break;
}
this.emit(
'update',
@@ -191,9 +200,9 @@ export class IndexedDBDocStorage extends DocStorageBase<IDBConnectionOptions> {
};
}
handleChannelMessage(event: MessageEvent<ChannelMessage>) {
handleChannelMessage = (event: MessageEvent<ChannelMessage>) => {
if (event.data.type === 'update') {
this.emit('update', event.data.update, event.data.origin);
}
}
};
}

View File

@@ -2,7 +2,6 @@ import type { StorageConstructor } from '..';
import { IndexedDBBlobStorage } from './blob';
import { IndexedDBDocStorage } from './doc';
import { IndexedDBSyncStorage } from './sync';
import { IndexedDBV1BlobStorage, IndexedDBV1DocStorage } from './v1';
export * from './blob';
export * from './doc';
@@ -13,8 +12,3 @@ export const idbStorages = [
IndexedDBBlobStorage,
IndexedDBSyncStorage,
] satisfies StorageConstructor[];
export const idbv1Storages = [
IndexedDBV1DocStorage,
IndexedDBV1BlobStorage,
] satisfies StorageConstructor[];

View File

@@ -19,6 +19,9 @@ export class IndexedDBV1BlobStorage extends BlobStorageBase {
}
override async get(key: string) {
if (!this.db) {
return null;
}
const trx = this.db.transaction('blob', 'readonly');
const blob = await trx.store.get(key);
if (!blob) {
@@ -34,6 +37,9 @@ export class IndexedDBV1BlobStorage extends BlobStorageBase {
}
override async delete(key: string, permanently: boolean) {
if (!this.db) {
return;
}
if (permanently) {
const trx = this.db.transaction('blob', 'readwrite');
await trx.store.delete(key);
@@ -41,6 +47,9 @@ export class IndexedDBV1BlobStorage extends BlobStorageBase {
}
override async list() {
if (!this.db) {
return [];
}
const trx = this.db.transaction('blob', 'readonly');
const it = trx.store.iterate();

View File

@@ -15,23 +15,26 @@ export interface DocDBSchema extends DBSchema {
};
}
export class DocIDBConnection extends AutoReconnectConnection<
IDBPDatabase<DocDBSchema>
> {
export class DocIDBConnection extends AutoReconnectConnection<IDBPDatabase<DocDBSchema> | null> {
override get shareId() {
return 'idb(old):affine-local';
}
override async doConnect() {
return openDB<DocDBSchema>('affine-local', 1, {
upgrade: db => {
db.createObjectStore('workspace', { keyPath: 'id' });
},
});
const dbs = await indexedDB.databases();
if (dbs.some(d => d.name === 'affine-local')) {
return openDB<DocDBSchema>('affine-local', 1, {
upgrade: db => {
db.createObjectStore('workspace', { keyPath: 'id' });
},
});
} else {
return null;
}
}
override doDisconnect(conn: IDBPDatabase<DocDBSchema>) {
conn.close();
override doDisconnect(conn: IDBPDatabase<DocDBSchema> | null) {
conn?.close();
}
}
@@ -46,9 +49,7 @@ export interface BlobIDBConnectionOptions {
id: string;
}
export class BlobIDBConnection extends AutoReconnectConnection<
IDBPDatabase<BlobDBSchema>
> {
export class BlobIDBConnection extends AutoReconnectConnection<IDBPDatabase<BlobDBSchema> | null> {
constructor(private readonly options: BlobIDBConnectionOptions) {
super();
}
@@ -58,14 +59,19 @@ export class BlobIDBConnection extends AutoReconnectConnection<
}
override async doConnect() {
return openDB<BlobDBSchema>(`${this.options.id}_blob`, 1, {
upgrade: db => {
db.createObjectStore('blob');
},
});
const dbs = await indexedDB.databases();
if (dbs.some(d => d.name === `${this.options.id}_blob`)) {
return openDB<BlobDBSchema>(`${this.options.id}_blob`, 1, {
upgrade: db => {
db.createObjectStore('blob');
},
});
} else {
return null;
}
}
override doDisconnect(conn: IDBPDatabase<BlobDBSchema>) {
conn.close();
override doDisconnect(conn: IDBPDatabase<BlobDBSchema> | null) {
conn?.close();
}
}

View File

@@ -1,9 +1,20 @@
import { once } from 'lodash-es';
import {
applyUpdate,
type Array as YArray,
Doc as YDoc,
type Map as YMap,
} from 'yjs';
import { share } from '../../../connection';
import {
type DocClocks,
type DocRecord,
DocStorageBase,
type DocStorageOptions,
type DocUpdate,
} from '../../../storage';
import { getIdConverter } from '../../../utils/id-converter';
import { DocIDBConnection } from './db';
/**
@@ -14,6 +25,13 @@ export class IndexedDBV1DocStorage extends DocStorageBase {
readonly connection = share(new DocIDBConnection());
constructor(opts: DocStorageOptions) {
super({
...opts,
readonlyMode: true,
});
}
get db() {
return this.connection.inner;
}
@@ -23,26 +41,11 @@ export class IndexedDBV1DocStorage extends DocStorageBase {
}
override async getDoc(docId: string) {
const trx = this.db.transaction('workspace', 'readonly');
const record = await trx.store.get(docId);
if (!record?.updates.length) {
if (!this.db) {
return null;
}
if (record.updates.length === 1) {
return {
docId,
bin: record.updates[0].update,
timestamp: new Date(record.updates[0].timestamp),
};
}
return {
docId,
bin: await this.mergeUpdates(record.updates.map(update => update.update)),
timestamp: new Date(record.updates.at(-1)?.timestamp ?? Date.now()),
};
const oldId = (await this.getIdConverter()).newIdToOldId(docId);
return this.rawGetDoc(oldId);
}
protected override async getDocSnapshot() {
@@ -55,12 +58,60 @@ export class IndexedDBV1DocStorage extends DocStorageBase {
}
override async deleteDoc(docId: string) {
if (!this.db) {
return;
}
const oldId = (await this.getIdConverter()).newIdToOldId(docId);
const trx = this.db.transaction('workspace', 'readwrite');
await trx.store.delete(docId);
await trx.store.delete(oldId);
}
override async getDocTimestamps() {
return {};
override async getDocTimestamps(): Promise<DocClocks> {
if (!this.db) {
return {};
}
const idConverter = await this.getIdConverter();
const oldIds: string[] = [this.spaceId];
const rootDocBuffer = await this.rawGetDoc(this.spaceId);
if (rootDocBuffer) {
const ydoc = new YDoc({
guid: this.spaceId,
});
applyUpdate(ydoc, rootDocBuffer.bin);
// get all ids from rootDoc.meta.pages.[*].id, trust this id as normalized id
const normalizedDocIds = (
(ydoc.getMap('meta') as YMap<any> | undefined)?.get('pages') as
| YArray<YMap<any>>
| undefined
)
?.map(i => i.get('id') as string)
.filter(i => !!i);
const spaces = ydoc.getMap('spaces') as YMap<any> | undefined;
for (const pageId of normalizedDocIds ?? []) {
const subdoc = spaces?.get(pageId);
if (subdoc && subdoc instanceof YDoc) {
oldIds.push(subdoc.guid);
}
}
}
const trx = this.db.transaction('workspace', 'readonly');
const allKeys = await trx.store.getAllKeys();
oldIds.push(...allKeys.filter(k => k.startsWith(`db$${this.spaceId}$`)));
oldIds.push(
...allKeys.filter(k =>
k.match(new RegExp(`^userdata\\$[\\w-]+\\$${this.spaceId}$`))
)
);
return Object.fromEntries(
oldIds.map(id => [idConverter.oldIdToNewId(id), new Date(1)])
);
}
override async getDocTimestamp(_docId: string) {
@@ -78,4 +129,59 @@ export class IndexedDBV1DocStorage extends DocStorageBase {
protected override async markUpdatesMerged(): Promise<number> {
return 0;
}
private async rawGetDoc(id: string) {
if (!this.db) {
return null;
}
const trx = this.db.transaction('workspace', 'readonly');
const record = await trx.store.get(id);
if (!record?.updates.length) {
return null;
}
if (record.updates.length === 1) {
return {
docId: id,
bin: record.updates[0].update,
timestamp: new Date(record.updates[0].timestamp),
};
}
return {
docId: id,
bin: await this.mergeUpdates(record.updates.map(update => update.update)),
timestamp: new Date(record.updates.at(-1)?.timestamp ?? Date.now()),
};
}
private readonly getIdConverter = once(async () => {
const idConverter = getIdConverter(
{
getDocBuffer: async id => {
if (!this.db) {
return null;
}
const trx = this.db.transaction('workspace', 'readonly');
const record = await trx.store.get(id);
if (!record?.updates.length) {
return null;
}
if (record.updates.length === 1) {
return record.updates[0].update;
}
return await this.mergeUpdates(
record.updates.map(update => update.update)
);
},
},
this.spaceId
);
return await idConverter;
});
}

View File

@@ -1,2 +1,11 @@
import type { StorageConstructor } from '../..';
import { IndexedDBV1BlobStorage } from './blob';
import { IndexedDBV1DocStorage } from './doc';
export * from './blob';
export * from './doc';
export const idbV1Storages = [
IndexedDBV1DocStorage,
IndexedDBV1BlobStorage,
] satisfies StorageConstructor[];

View File

@@ -1,8 +1,10 @@
import type { Storage } from '../storage';
import type { broadcastChannelStorages } from './broadcast-channel';
import type { cloudStorages } from './cloud';
import type { idbStorages, idbv1Storages } from './idb';
import type { idbStorages } from './idb';
import type { idbV1Storages } from './idb/v1';
import type { sqliteStorages } from './sqlite';
import type { sqliteV1Storages } from './sqlite/v1';
export type StorageConstructor = {
new (...args: any[]): Storage;
@@ -11,9 +13,10 @@ export type StorageConstructor = {
type Storages =
| typeof cloudStorages
| typeof idbv1Storages
| typeof idbV1Storages
| typeof idbStorages
| typeof sqliteStorages
| typeof sqliteV1Storages
| typeof broadcastChannelStorages;
// oxlint-disable-next-line no-redeclare

View File

@@ -41,7 +41,7 @@ export type NativeDBApis = {
id: string,
peer: string,
docId: string
): Promise<DocClock>;
): Promise<DocClock | null>;
setPeerRemoteClock(
id: string,
peer: string,
@@ -53,7 +53,7 @@ export type NativeDBApis = {
id: string,
peer: string,
docId: string
): Promise<DocClock>;
): Promise<DocClock | null>;
setPeerPulledRemoteClock(
id: string,
peer: string,
@@ -65,7 +65,7 @@ export type NativeDBApis = {
id: string,
peer: string,
docId: string
): Promise<DocClock>;
): Promise<DocClock | null>;
setPeerPushedClock(
id: string,
peer: string,

View File

@@ -7,7 +7,6 @@ export * from './blob';
export { bindNativeDBApis, type NativeDBApis } from './db';
export * from './doc';
export * from './sync';
export * from './v1';
export const sqliteStorages = [
SqliteDocStorage,

View File

@@ -7,6 +7,7 @@ import { apis } from './db';
* @deprecated readonly
*/
export class SqliteV1BlobStorage extends BlobStorageBase {
static identifier = 'SqliteV1BlobStorage';
override connection = new DummyConnection();
constructor(private readonly options: { type: SpaceType; id: string }) {

View File

@@ -4,6 +4,8 @@ import {
DocStorageBase,
type DocUpdate,
} from '../../../storage';
import { getIdConverter, type IdConverter } from '../../../utils/id-converter';
import { isEmptyUpdate } from '../../../utils/is-empty-update';
import type { SpaceType } from '../../../utils/universal-id';
import { apis } from './db';
@@ -14,8 +16,14 @@ export class SqliteV1DocStorage extends DocStorageBase<{
type: SpaceType;
id: string;
}> {
static identifier = 'SqliteV1DocStorage';
cachedIdConverter: Promise<IdConverter> | null = null;
override connection = new DummyConnection();
constructor(options: { type: SpaceType; id: string }) {
super({ ...options, readonlyMode: true });
}
private get db() {
if (!apis) {
throw new Error('Not in electron context.');
@@ -26,17 +34,21 @@ export class SqliteV1DocStorage extends DocStorageBase<{
override async pushDocUpdate(update: DocUpdate) {
// no more writes
return { docId: update.docId, timestamp: new Date() };
}
override async getDoc(docId: string) {
const idConverter = await this.getIdConverter();
const bin = await this.db.getDocAsUpdates(
this.options.type,
this.options.id,
docId
idConverter.newIdToOldId(docId)
);
if (isEmptyUpdate(bin)) {
return null;
}
return {
docId,
bin,
@@ -71,4 +83,37 @@ export class SqliteV1DocStorage extends DocStorageBase<{
protected override async markUpdatesMerged(): Promise<number> {
return 0;
}
private async getIdConverter() {
if (this.cachedIdConverter) {
return await this.cachedIdConverter;
}
this.cachedIdConverter = getIdConverter(
{
getDocBuffer: async id => {
if (!this.db) {
return null;
}
const updates = await this.db.getDocAsUpdates(
this.options.type,
this.options.id,
id
);
if (isEmptyUpdate(updates)) {
return null;
}
if (!updates) {
return null;
}
return updates;
},
},
this.spaceId
);
return await this.cachedIdConverter;
}
}

View File

@@ -1,3 +1,12 @@
import type { StorageConstructor } from '../..';
import { SqliteV1BlobStorage } from './blob';
import { SqliteV1DocStorage } from './doc';
export * from './blob';
export { bindNativeDBV1Apis } from './db';
export * from './doc';
export const sqliteV1Storages = [
SqliteV1DocStorage,
SqliteV1BlobStorage,
] satisfies StorageConstructor[];

View File

@@ -151,7 +151,7 @@ export abstract class DocStorageBase<Opts = {}> implements DocStorage {
return {
docId,
missing: state ? diffUpdate(doc.bin, state) : doc.bin,
missing: state && state.length > 0 ? diffUpdate(doc.bin, state) : doc.bin,
state: encodeStateVectorFromUpdate(doc.bin),
timestamp: doc.timestamp,
};

View File

@@ -18,8 +18,11 @@ export class AwarenessSyncImpl implements AwarenessSync {
async update(record: AwarenessRecord, origin?: string) {
await Promise.all(
[this.storages.local, ...Object.values(this.storages.remotes)].map(peer =>
peer.update(record, origin)
[this.storages.local, ...Object.values(this.storages.remotes)].map(
peer =>
peer.connection.status === 'connected'
? peer.update(record, origin)
: Promise.resolve()
)
);
}

View File

@@ -73,10 +73,14 @@ export class BlobSyncImpl implements BlobSync {
async fullSync(signal?: AbortSignal) {
throwIfAborted(signal);
await this.storages.local.connection.waitForConnected(signal);
for (const [remotePeer, remote] of Object.entries(this.storages.remotes)) {
let localList: string[] = [];
let remoteList: string[] = [];
await remote.connection.waitForConnected(signal);
try {
localList = (await this.storages.local.list(signal)).map(b => b.key);
throwIfAborted(signal);
@@ -150,7 +154,7 @@ export class BlobSyncImpl implements BlobSync {
}
stop() {
this.abort?.abort();
this.abort?.abort(MANUALLY_STOP);
this.abort = null;
}

View File

@@ -1,5 +1,5 @@
import type { Observable } from 'rxjs';
import { combineLatest, map, of } from 'rxjs';
import { combineLatest, map, of, ReplaySubject, share } from 'rxjs';
import type { DocStorage, SyncStorage } from '../../storage';
import { DummyDocStorage } from '../../storage/dummy/doc';
@@ -38,18 +38,32 @@ export class DocSyncImpl implements DocSync {
);
private abort: AbortController | null = null;
get state$() {
return combineLatest(this.peers.map(peer => peer.peerState$)).pipe(
map(allPeers => ({
total: allPeers.reduce((acc, peer) => Math.max(acc, peer.total), 0),
syncing: allPeers.reduce((acc, peer) => Math.max(acc, peer.syncing), 0),
synced: allPeers.every(peer => peer.synced),
retrying: allPeers.some(peer => peer.retrying),
errorMessage:
allPeers.find(peer => peer.errorMessage)?.errorMessage ?? null,
}))
) as Observable<DocSyncState>;
}
state$ = combineLatest(this.peers.map(peer => peer.peerState$)).pipe(
map(allPeers =>
allPeers.length === 0
? {
total: 0,
syncing: 0,
synced: true,
retrying: false,
errorMessage: null,
}
: {
total: allPeers.reduce((acc, peer) => Math.max(acc, peer.total), 0),
syncing: allPeers.reduce(
(acc, peer) => Math.max(acc, peer.syncing),
0
),
synced: allPeers.every(peer => peer.synced),
retrying: allPeers.some(peer => peer.retrying),
errorMessage:
allPeers.find(peer => peer.errorMessage)?.errorMessage ?? null,
}
),
share({
connector: () => new ReplaySubject(1),
})
) as Observable<DocSyncState>;
constructor(
readonly storages: PeerStorageOptions<DocStorage>,
@@ -105,7 +119,7 @@ export class DocSyncImpl implements DocSync {
}
stop() {
this.abort?.abort();
this.abort?.abort(MANUALLY_STOP);
this.abort = null;
}

View File

@@ -1,6 +1,6 @@
import { remove } from 'lodash-es';
import { nanoid } from 'nanoid';
import { Observable, Subject } from 'rxjs';
import { Observable, ReplaySubject, share, Subject } from 'rxjs';
import { diffUpdate, encodeStateVectorFromUpdate, mergeUpdates } from 'yjs';
import type { DocStorage, SyncStorage } from '../../storage';
@@ -119,54 +119,65 @@ export class DocSyncPeer {
};
private readonly statusUpdatedSubject$ = new Subject<string | true>();
get peerState$() {
return new Observable<PeerState>(subscribe => {
const next = () => {
if (this.status.skipped) {
subscribe.next({
total: 0,
syncing: 0,
synced: true,
retrying: false,
errorMessage: null,
});
} else if (!this.status.syncing) {
// if syncing = false, jobMap is empty
subscribe.next({
total: this.status.docs.size,
syncing: this.status.docs.size,
synced: false,
retrying: this.status.retrying,
errorMessage: this.status.errorMessage,
});
} else {
const syncing = this.status.jobMap.size;
subscribe.next({
total: this.status.docs.size,
syncing: syncing,
retrying: this.status.retrying,
errorMessage: this.status.errorMessage,
synced: syncing === 0,
});
}
};
peerState$ = new Observable<PeerState>(subscribe => {
const next = () => {
if (this.status.skipped) {
subscribe.next({
total: 0,
syncing: 0,
synced: true,
retrying: false,
errorMessage: null,
});
} else if (!this.status.syncing) {
// if syncing = false, jobMap is empty
subscribe.next({
total: this.status.docs.size,
syncing: this.status.docs.size,
synced: false,
retrying: this.status.retrying,
errorMessage: this.status.errorMessage,
});
} else {
const syncing = this.status.jobMap.size;
subscribe.next({
total: this.status.docs.size,
syncing: syncing,
retrying: this.status.retrying,
errorMessage: this.status.errorMessage,
synced: syncing === 0,
});
}
};
next();
const dispose = this.statusUpdatedSubject$.subscribe(() => {
next();
return this.statusUpdatedSubject$.subscribe(() => {
next();
});
});
}
return () => {
dispose.unsubscribe();
};
}).pipe(
share({
connector: () => new ReplaySubject(1),
})
);
docState$(docId: string) {
return new Observable<PeerDocState>(subscribe => {
const next = () => {
const syncing =
!this.status.connectedDocs.has(docId) ||
this.status.jobMap.has(docId);
if (this.status.skipped) {
subscribe.next({
syncing: false,
synced: true,
retrying: false,
errorMessage: null,
});
}
subscribe.next({
syncing: syncing,
synced: !syncing,
syncing:
!this.status.connectedDocs.has(docId) ||
this.status.jobMap.has(docId),
synced: !this.status.jobMap.has(docId),
retrying: this.status.retrying,
errorMessage: this.status.errorMessage,
});
@@ -524,10 +535,6 @@ export class DocSyncPeer {
const disposes: (() => void)[] = [];
try {
console.info('Remote sync started');
this.status.syncing = true;
this.statusUpdatedSubject$.next(true);
// wait for all storages to connect, timeout after 30s
await Promise.race([
Promise.all([
@@ -547,6 +554,10 @@ export class DocSyncPeer {
}),
]);
console.info('Remote sync started');
this.status.syncing = true;
this.statusUpdatedSubject$.next(true);
// throw error if failed to connect
for (const storage of [this.remote, this.local, this.syncMetadata]) {
// abort if disconnected

View File

@@ -0,0 +1,73 @@
import {
applyUpdate,
type Array as YArray,
Doc as YDoc,
type Map as YMap,
} from 'yjs';
type PromiseResult<T> = T extends Promise<infer R> ? R : never;
export type IdConverter = PromiseResult<ReturnType<typeof getIdConverter>>;
export async function getIdConverter(
storage: {
getDocBuffer: (id: string) => Promise<Uint8Array | null>;
},
spaceId: string
) {
const oldIdToNewId = { [spaceId]: spaceId };
const newIdToOldId = { [spaceId]: spaceId };
const rootDocBuffer = await storage.getDocBuffer(spaceId);
if (rootDocBuffer) {
const ydoc = new YDoc({
guid: spaceId,
});
applyUpdate(ydoc, rootDocBuffer);
// get all ids from rootDoc.meta.pages.[*].id, trust this id as normalized id
const normalizedDocIds = (
(ydoc.getMap('meta') as YMap<any> | undefined)?.get('pages') as
| YArray<YMap<any>>
| undefined
)
?.map(i => i.get('id') as string)
.filter(i => !!i);
const spaces = ydoc.getMap('spaces') as YMap<any> | undefined;
for (const pageId of normalizedDocIds ?? []) {
const subdoc = spaces?.get(pageId);
if (subdoc && subdoc instanceof YDoc) {
oldIdToNewId[subdoc.guid] = pageId;
newIdToOldId[pageId] = subdoc.guid;
}
}
}
return {
newIdToOldId(newId: string) {
if (newId.startsWith(`db$`)) {
// db$docId -> db$${spaceId}$docId
return newId.replace(`db$`, `db$${spaceId}$`);
}
if (newId.startsWith(`userdata$`)) {
// userdata$userId$docId -> userdata$userId$spaceId$docId
return newId.replace(
new RegExp(`^(userdata\\$[\\w-]+)\\$([^\\$]+)`),
(_, p1, p2) => `${p1}$${spaceId}$${p2}`
);
}
return newIdToOldId[newId] ?? newId;
},
oldIdToNewId(oldId: string) {
// db$${spaceId}$docId -> db$docId
if (oldId.startsWith(`db$${spaceId}$`)) {
return oldId.replace(`db$${spaceId}$`, `db$`);
}
// userdata$userId$spaceId$docId -> userdata$userId$docId
if (oldId.match(new RegExp(`^userdata\\$[\\w-]+\\$${spaceId}$`))) {
return oldId.replace(`$${spaceId}$`, '$');
}
return oldIdToNewId[oldId] ?? oldId;
},
};
}

View File

@@ -23,7 +23,6 @@ export class WorkerClient {
private readonly client: OpClient<WorkerOps>,
options: WorkerInitOptions
) {
client.listen();
this.client.call('worker.init', options).catch(err => {
console.error('error initializing worker', err);
});
@@ -156,7 +155,9 @@ class WorkerBlobStorage implements BlobStorage {
class WorkerDocSync implements DocSync {
constructor(private readonly client: OpClient<WorkerOps>) {}
readonly state$ = this.client.ob$('docSync.state');
get state$() {
return this.client.ob$('docSync.state');
}
docState$(docId: string) {
return this.client.ob$('docSync.docState', docId);
@@ -174,7 +175,9 @@ class WorkerDocSync implements DocSync {
class WorkerBlobSync implements BlobSync {
constructor(private readonly client: OpClient<WorkerOps>) {}
readonly state$ = this.client.ob$('blobSync.state');
get state$() {
return this.client.ob$('blobSync.state');
}
setMaxBlobSize(size: number): void {
this.client.call('blobSync.setMaxBlobSize', size).catch(err => {
console.error('error setting max blob size', err);

View File

@@ -1,3 +1,4 @@
import { MANUALLY_STOP } from '@toeverything/infra';
import type { OpConsumer } from '@toeverything/infra/op';
import { Observable } from 'rxjs';
@@ -11,6 +12,7 @@ import type { WorkerInitOptions, WorkerOps } from './ops';
export type { WorkerOps };
export class WorkerConsumer {
private inited = false;
private storages: PeerStorageOptions<SpaceStorage> | null = null;
private sync: Sync | null = null;
@@ -57,14 +59,18 @@ export class WorkerConsumer {
}
constructor(
private readonly consumer: OpConsumer<WorkerOps>,
private readonly availableStorageImplementations: StorageConstructor[]
) {
this.registerHandlers();
this.consumer.listen();
) {}
bindConsumer(consumer: OpConsumer<WorkerOps>) {
this.registerHandlers(consumer);
}
init(init: WorkerInitOptions) {
if (this.inited) {
return;
}
this.inited = true;
this.storages = {
local: new SpaceStorage(
Object.fromEntries(
@@ -120,13 +126,13 @@ export class WorkerConsumer {
}
}
private registerHandlers() {
private registerHandlers(consumer: OpConsumer<WorkerOps>) {
const collectJobs = new Map<
string,
(awareness: AwarenessRecord | null) => void
>();
let collectId = 0;
this.consumer.registerAll({
consumer.registerAll({
'worker.init': this.init.bind(this),
'worker.destroy': this.destroy.bind(this),
'docStorage.getDoc': (docId: string) => this.docStorage.getDoc(docId),
@@ -158,7 +164,7 @@ export class WorkerConsumer {
.catch((error: any) => {
subscriber.error(error);
});
return () => abortController.abort();
return () => abortController.abort(MANUALLY_STOP);
}),
'blobStorage.getBlob': key => this.blobStorage.get(key),
'blobStorage.setBlob': blob => this.blobStorage.set(blob),
@@ -212,13 +218,7 @@ export class WorkerConsumer {
}),
'awarenessStorage.collect': ({ collectId, awareness }) =>
collectJobs.get(collectId)?.(awareness),
'docSync.state': () =>
new Observable(subscriber => {
const subscription = this.docSync.state$.subscribe(state => {
subscriber.next(state);
});
return () => subscription.unsubscribe();
}),
'docSync.state': () => this.docSync.state$,
'docSync.docState': docId =>
new Observable(subscriber => {
const subscription = this.docSync
@@ -247,7 +247,7 @@ export class WorkerConsumer {
.catch(error => {
subscriber.error(error);
});
return () => abortController.abort();
return () => abortController.abort(MANUALLY_STOP);
}),
'blobSync.state': () => this.blobSync.state$,
'blobSync.setMaxBlobSize': size => this.blobSync.setMaxBlobSize(size),
@@ -262,7 +262,7 @@ export class WorkerConsumer {
this.awarenessSync.update(awareness, origin),
'awarenessSync.subscribeUpdate': docId =>
new Observable(subscriber => {
return this.awarenessStorage.subscribeUpdate(
return this.awarenessSync.subscribeUpdate(
docId,
(update, origin) => {
subscriber.next({
@@ -279,6 +279,10 @@ export class WorkerConsumer {
collectJobs.delete(currentCollectId.toString());
});
});
subscriber.next({
type: 'awareness-collect',
collectId: currentCollectId.toString(),
});
return promise;
}
);