feat(server): sync data with ack (#4791)

This commit is contained in:
liuyi
2023-11-02 17:05:28 +08:00
committed by GitHub
parent af9663d3e7
commit 6a93203d68
11 changed files with 697 additions and 133 deletions

View File

@@ -0,0 +1,107 @@
interface SyncUpdateSender {
(
guid: string,
updates: Uint8Array[]
): Promise<{
accepted: boolean;
retry: boolean;
}>;
}
/**
* BatchSyncSender is simple wrapper with vanilla update sync with several advanced features:
* - ACK mechanism, send updates sequentially with previous sync request correctly responds with ACK
* - batching updates, when waiting for previous ACK, new updates will be buffered and sent in single sync request
* - retryable, allow retry when previous sync request failed but with retry flag been set to true
*/
export class BatchSyncSender {
private buffered: Uint8Array[] = [];
private job: Promise<void> | null = null;
private started = true;
constructor(
private guid: string,
private readonly rawSender: SyncUpdateSender
) {}
send(update: Uint8Array) {
this.buffered.push(update);
this.next();
return Promise.resolve();
}
stop() {
this.started = false;
}
start() {
this.started = true;
this.next();
}
private next() {
if (!this.started || this.job || !this.buffered.length) {
return;
}
const lastIndex = Math.min(
this.buffered.length - 1,
99 /* max batch updates size */
);
const updates = this.buffered.slice(0, lastIndex + 1);
if (updates.length) {
this.job = this.rawSender(this.guid, updates)
.then(({ accepted, retry }) => {
// remove pending updates if updates are accepted
if (accepted) {
this.buffered.splice(0, lastIndex + 1);
}
// stop when previous sending failed and non-recoverable
if (accepted || retry) {
// avoid call stack overflow
setTimeout(() => {
this.next();
}, 0);
} else {
this.stop();
}
})
.catch(() => {
this.stop();
})
.finally(() => {
this.job = null;
});
}
}
}
export class MultipleBatchSyncSender {
private senders: Record<string, BatchSyncSender> = {};
constructor(private readonly rawSender: SyncUpdateSender) {}
async send(guid: string, update: Uint8Array) {
return this.getSender(guid).send(update);
}
private getSender(guid: string) {
let sender = this.senders[guid];
if (!sender) {
sender = new BatchSyncSender(guid, this.rawSender);
this.senders[guid] = sender;
}
return sender;
}
start() {
Object.values(this.senders).forEach(sender => sender.start());
}
stop() {
Object.values(this.senders).forEach(sender => sender.stop());
}
}

View File

@@ -59,14 +59,6 @@ export const CRUD: WorkspaceCRUD<WorkspaceFlavour.AFFINE_CLOUD> = {
WorkspaceFlavour.AFFINE_CLOUD
);
const datasource = createAffineDataSource(
createWorkspace.id,
newBlockSuiteWorkspace.doc,
newBlockSuiteWorkspace.awarenessStore.awareness
);
await syncDataSourceFromDoc(upstreamWorkspace.doc, datasource);
Y.applyUpdate(
newBlockSuiteWorkspace.doc,
Y.encodeStateAsUpdate(upstreamWorkspace.doc)
@@ -85,6 +77,16 @@ export const CRUD: WorkspaceCRUD<WorkspaceFlavour.AFFINE_CLOUD> = {
})
);
const datasource = createAffineDataSource(
createWorkspace.id,
newBlockSuiteWorkspace.doc,
newBlockSuiteWorkspace.awarenessStore.awareness
);
const disconnect = datasource.onDocUpdate(() => {});
await syncDataSourceFromDoc(upstreamWorkspace.doc, datasource);
disconnect();
const provider = createIndexedDBProvider(
newBlockSuiteWorkspace.doc,
DEFAULT_DB_NAME

View File

@@ -1,4 +1,5 @@
import { DebugLogger } from '@affine/debug';
import { isEqual } from 'lodash-es';
import type { Socket } from 'socket.io-client';
import { Manager } from 'socket.io-client';
import {
@@ -10,6 +11,7 @@ import {
import type { DocDataSource } from 'y-provider';
import type { Doc } from 'yjs';
import { MultipleBatchSyncSender } from './batch-sync-sender';
import {
type AwarenessChanges,
base64ToUint8Array,
@@ -41,8 +43,44 @@ export const createAffineDataSource = (
console.warn('important!! please use doc.guid as roomName');
}
logger.debug('createAffineDataSource', id, rootDoc.guid, awareness);
logger.debug('createAffineDataSource', id, rootDoc.guid);
const socket = getIoManager().socket('/');
const syncSender = new MultipleBatchSyncSender(async (guid, updates) => {
const payload = await Promise.all(
updates.map(update => uint8ArrayToBase64(update))
);
return new Promise(resolve => {
socket.emit(
'client-update-v2',
{
workspaceId: rootDoc.guid,
guid,
updates: payload,
},
(response: {
// TODO: reuse `EventError` with server
error?: any;
data: any;
}) => {
// TODO: raise error with different code to users
if (response.error) {
logger.error('client-update-v2 error', {
workspaceId: rootDoc.guid,
guid,
response,
});
}
resolve({
accepted: !response.error,
// TODO: reuse `EventError` with server
retry: response.error?.code === 'INTERNAL',
});
}
);
});
});
return {
get socket() {
@@ -54,78 +92,93 @@ export const createAffineDataSource = (
: undefined;
return new Promise((resolve, reject) => {
logger.debug('doc-load', {
logger.debug('doc-load-v2', {
workspaceId: rootDoc.guid,
guid,
stateVector,
});
socket.emit(
'doc-load',
'doc-load-v2',
{
workspaceId: rootDoc.guid,
guid,
stateVector,
},
(docState: Error | { missing: string; state: string } | null) => {
(
response: // TODO: reuse `EventError` with server
{ error: any } | { data: { missing: string; state: string } }
) => {
logger.debug('doc-load callback', {
workspaceId: rootDoc.guid,
guid,
stateVector,
docState,
response,
});
if (docState instanceof Error) {
reject(docState);
return;
}
resolve(
docState
? {
missing: base64ToUint8Array(docState.missing),
state: docState.state
? base64ToUint8Array(docState.state)
: undefined,
}
: false
);
if ('error' in response) {
// TODO: result `EventError` with server
if (response.error.code === 'DOC_NOT_FOUND') {
resolve(false);
} else {
reject(new Error(response.error.message));
}
} else {
resolve({
missing: base64ToUint8Array(response.data.missing),
state: response.data.state
? base64ToUint8Array(response.data.state)
: undefined,
});
}
}
);
});
},
sendDocUpdate: async (guid: string, update: Uint8Array) => {
logger.debug('client-update', {
logger.debug('client-update-v2', {
workspaceId: rootDoc.guid,
guid,
update,
});
socket.emit('client-update', {
workspaceId: rootDoc.guid,
guid,
update: await uint8ArrayToBase64(update),
});
return Promise.resolve();
await syncSender.send(guid, update);
},
onDocUpdate: callback => {
socket.on('connect', () => {
socket.emit('client-handshake', rootDoc.guid);
});
const onUpdate = async (message: {
workspaceId: string;
guid: string;
update: string;
updates: string[];
}) => {
if (message.workspaceId === rootDoc.guid) {
callback(message.guid, base64ToUint8Array(message.update));
message.updates.forEach(update => {
callback(message.guid, base64ToUint8Array(update));
});
}
};
socket.on('server-update', onUpdate);
const destroyAwareness = setupAffineAwareness(socket, rootDoc, awareness);
let destroyAwareness = () => {};
socket.on('server-updates', onUpdate);
socket.on('connect', () => {
socket.emit(
'client-handshake',
rootDoc.guid,
(response: { error?: any }) => {
if (!response.error) {
syncSender.start();
destroyAwareness = setupAffineAwareness(
socket,
rootDoc,
awareness
);
}
}
);
});
socket.connect();
return () => {
syncSender.stop();
socket.emit('client-leave', rootDoc.guid);
socket.off('server-update', onUpdate);
socket.off('server-updates', onUpdate);
destroyAwareness();
socket.disconnect();
};
@@ -138,6 +191,23 @@ function setupAffineAwareness(
rootDoc: Doc,
awareness: Awareness
) {
let lastAwarenessState: Map<number, any> = new Map();
// can't compare on update binary because the protocol will encode clock in it but the state is still the same
const compareAwarenessState = (clients: number[]) => {
const newAwarenessState = new Map();
clients.forEach(client => {
newAwarenessState.set(client, awareness.states.get(client));
});
const equal = isEqual(lastAwarenessState, newAwarenessState);
if (!equal) {
lastAwarenessState = newAwarenessState;
}
return equal;
};
const awarenessBroadcast = ({
workspaceId,
awarenessUpdate,
@@ -148,7 +218,6 @@ function setupAffineAwareness(
if (workspaceId !== rootDoc.guid) {
return;
}
applyAwarenessUpdate(
awareness,
base64ToUint8Array(awarenessUpdate),
@@ -166,6 +235,11 @@ function setupAffineAwareness(
...cur,
]);
// hit the last awareness update cache, skip
if (compareAwarenessState(changedClients)) {
return;
}
const update = encodeAwarenessUpdate(awareness, changedClients);
uint8ArrayToBase64(update)
.then(encodedUpdate => {
@@ -174,7 +248,7 @@ function setupAffineAwareness(
awarenessUpdate: encodedUpdate,
});
})
.catch(err => console.error(err));
.catch(err => logger.error(err));
};
const newClientAwarenessInitHandler = () => {
@@ -188,7 +262,7 @@ function setupAffineAwareness(
awarenessUpdate: encodedAwarenessUpdate,
});
})
.catch(err => console.error(err));
.catch(err => logger.error(err));
};
const windowBeforeUnloadHandler = () => {
@@ -199,12 +273,10 @@ function setupAffineAwareness(
conn.on('new-client-awareness-init', newClientAwarenessInitHandler);
awareness.on('update', awarenessUpdate);
conn.on('connect', () => {
conn.emit('awareness-init', rootDoc.guid);
});
window.addEventListener('beforeunload', windowBeforeUnloadHandler);
conn.emit('awareness-init', rootDoc.guid);
return () => {
awareness.off('update', awarenessUpdate);
conn.off('server-awareness-broadcast', awarenessBroadcast);

View File

@@ -38,13 +38,10 @@ const createAffineSocketIOProvider: DocProviderCreator = (
const lazyProvider = createLazyProvider(doc, dataSource, {
origin: 'affine-socket-io',
});
return {
flavour: 'affine-socket-io',
...lazyProvider,
get status() {
return lazyProvider.status;
},
};
Object.assign(lazyProvider, { flavour: 'affine-socket-io' });
return lazyProvider as unknown as AffineSocketIOProvider;
};
const createIndexedDBBackgroundProvider: DocProviderCreator = (