refactor(workspace): split workspace interface and implementation (#5463)

@affine/workspace -> (@affine/workspace, @affine/workspace-impl)
This commit is contained in:
EYHN
2024-01-02 10:58:01 +00:00
parent 9d0b3b4947
commit 104c21d84c
77 changed files with 325 additions and 163 deletions

View File

@@ -0,0 +1 @@
lib

View File

@@ -0,0 +1,36 @@
{
"name": "@affine/workspace-impl",
"private": true,
"main": "./src/index.ts",
"exports": {
".": "./src/index.ts"
},
"peerDependencies": {
"@blocksuite/blocks": "*",
"@blocksuite/global": "*",
"@blocksuite/store": "*"
},
"dependencies": {
"@affine/debug": "workspace:*",
"@affine/electron-api": "workspace:*",
"@affine/env": "workspace:*",
"@affine/graphql": "workspace:*",
"@affine/workspace": "workspace:*",
"@toeverything/infra": "workspace:*",
"idb": "^8.0.0",
"idb-keyval": "^6.2.1",
"is-svg": "^5.0.0",
"lodash-es": "^4.17.21",
"nanoid": "^5.0.3",
"next-auth": "^4.24.5",
"socket.io-client": "^4.7.2",
"y-protocols": "^1.0.6",
"yjs": "^13.6.10"
},
"devDependencies": {
"fake-indexeddb": "^5.0.0",
"vitest": "1.1.1",
"ws": "^8.14.2"
},
"version": "0.11.0"
}

View File

@@ -0,0 +1,108 @@
import { DebugLogger } from '@affine/debug';
import type { AwarenessProvider } from '@affine/workspace';
import {
applyAwarenessUpdate,
type Awareness,
encodeAwarenessUpdate,
removeAwarenessStates,
} from 'y-protocols/awareness';
import { getIoManager } from '../utils/affine-io';
import { base64ToUint8Array, uint8ArrayToBase64 } from '../utils/base64';
const logger = new DebugLogger('affine:awareness:socketio');
type AwarenessChanges = Record<'added' | 'updated' | 'removed', number[]>;
export function createCloudAwarenessProvider(
workspaceId: string,
awareness: Awareness
): AwarenessProvider {
const socket = getIoManager().socket('/');
const awarenessBroadcast = ({
workspaceId: wsId,
awarenessUpdate,
}: {
workspaceId: string;
awarenessUpdate: string;
}) => {
if (wsId !== workspaceId) {
return;
}
applyAwarenessUpdate(
awareness,
base64ToUint8Array(awarenessUpdate),
'remote'
);
};
const awarenessUpdate = (changes: AwarenessChanges, origin: unknown) => {
if (origin === 'remote') {
return;
}
const changedClients = Object.values(changes).reduce((res, cur) =>
res.concat(cur)
);
const update = encodeAwarenessUpdate(awareness, changedClients);
uint8ArrayToBase64(update)
.then(encodedUpdate => {
socket.emit('awareness-update', {
workspaceId: workspaceId,
awarenessUpdate: encodedUpdate,
});
})
.catch(err => logger.error(err));
};
const newClientAwarenessInitHandler = () => {
const awarenessUpdate = encodeAwarenessUpdate(awareness, [
awareness.clientID,
]);
uint8ArrayToBase64(awarenessUpdate)
.then(encodedAwarenessUpdate => {
socket.emit('awareness-update', {
guid: workspaceId,
awarenessUpdate: encodedAwarenessUpdate,
});
})
.catch(err => logger.error(err));
};
const windowBeforeUnloadHandler = () => {
removeAwarenessStates(awareness, [awareness.clientID], 'window unload');
};
function handleConnect() {
socket.emit('client-handshake-awareness', workspaceId);
socket.emit('awareness-init', workspaceId);
}
return {
connect: () => {
socket.on('server-awareness-broadcast', awarenessBroadcast);
socket.on('new-client-awareness-init', newClientAwarenessInitHandler);
awareness.on('update', awarenessUpdate);
window.addEventListener('beforeunload', windowBeforeUnloadHandler);
socket.connect();
socket.on('connect', handleConnect);
socket.emit('client-handshake-awareness', workspaceId);
socket.emit('awareness-init', workspaceId);
},
disconnect: () => {
removeAwarenessStates(awareness, [awareness.clientID], 'disconnect');
awareness.off('update', awarenessUpdate);
socket.emit('client-leave-awareness', workspaceId);
socket.off('server-awareness-broadcast', awarenessBroadcast);
socket.off('new-client-awareness-init', newClientAwarenessInitHandler);
socket.off('connect', handleConnect);
window.removeEventListener('unload', windowBeforeUnloadHandler);
},
};
}

View File

@@ -0,0 +1,77 @@
import {
checkBlobSizesQuery,
deleteBlobMutation,
fetchWithTraceReport,
getBaseUrl,
listBlobsQuery,
setBlobMutation,
} from '@affine/graphql';
import { fetcher } from '@affine/graphql';
import type { BlobStorage } from '@affine/workspace';
import { bufferToBlob } from '../utils/buffer-to-blob';
export const createAffineCloudBlobStorage = (
workspaceId: string
): BlobStorage => {
return {
name: 'affine-cloud',
readonly: false,
get: async key => {
const suffix = key.startsWith('/')
? key
: `/api/workspaces/${workspaceId}/blobs/${key}`;
return fetchWithTraceReport(getBaseUrl() + suffix).then(async res => {
if (!res.ok) {
// status not in the range 200-299
return null;
}
return bufferToBlob(await res.arrayBuffer());
});
},
set: async (key, value) => {
const {
checkBlobSize: { size },
} = await fetcher({
query: checkBlobSizesQuery,
variables: {
workspaceId,
size: value.size,
},
});
if (size <= 0) {
throw new Error('Blob size limit exceeded');
}
const result = await fetcher({
query: setBlobMutation,
variables: {
workspaceId,
blob: new File([value], key),
},
});
console.assert(result.setBlob === key, 'Blob hash mismatch');
return result.setBlob;
},
list: async () => {
const result = await fetcher({
query: listBlobsQuery,
variables: {
workspaceId,
},
});
return result.listBlobs;
},
delete: async (key: string) => {
await fetcher({
query: deleteBlobMutation,
variables: {
workspaceId,
hash: key,
},
});
},
};
};

View File

@@ -0,0 +1,2 @@
export const CLOUD_WORKSPACE_CHANGED_BROADCAST_CHANNEL_KEY =
'affine-cloud-workspace-changed';

View File

@@ -0,0 +1,6 @@
export * from './awareness';
export * from './blob';
export * from './consts';
export * from './list';
export * from './sync';
export * from './workspace-factory';

View File

@@ -0,0 +1,155 @@
import { WorkspaceFlavour } from '@affine/env/workspace';
import {
createWorkspaceMutation,
deleteWorkspaceMutation,
getWorkspacesQuery,
} from '@affine/graphql';
import { fetcher } from '@affine/graphql';
import type { WorkspaceListProvider } from '@affine/workspace';
import { globalBlockSuiteSchema } from '@affine/workspace';
import { Workspace as BlockSuiteWorkspace } from '@blocksuite/store';
import { difference } from 'lodash-es';
import { nanoid } from 'nanoid';
import { applyUpdate, encodeStateAsUpdate } from 'yjs';
import { createLocalBlobStorage } from '../local/blob';
import { createLocalStorage } from '../local/sync';
import { CLOUD_WORKSPACE_CHANGED_BROADCAST_CHANNEL_KEY } from './consts';
import { createAffineStaticStorage } from './sync';
async function getCloudWorkspaceList() {
try {
const { workspaces } = await fetcher({
query: getWorkspacesQuery,
});
const ids = workspaces.map(({ id }) => id);
return ids.map(id => ({
id,
flavour: WorkspaceFlavour.AFFINE_CLOUD,
}));
} catch (err) {
if (err instanceof Array && err[0]?.message === 'Forbidden resource') {
// user not logged in
return [];
}
throw err;
}
}
export function createCloudWorkspaceListProvider(): WorkspaceListProvider {
const notifyChannel = new BroadcastChannel(
CLOUD_WORKSPACE_CHANGED_BROADCAST_CHANNEL_KEY
);
return {
name: WorkspaceFlavour.AFFINE_CLOUD,
async getList() {
return getCloudWorkspaceList();
},
async create(initial) {
const tempId = nanoid();
const workspace = new BlockSuiteWorkspace({
id: tempId,
idGenerator: () => nanoid(),
schema: globalBlockSuiteSchema,
});
// create workspace on cloud, get workspace id
const {
createWorkspace: { id: workspaceId },
} = await fetcher({
query: createWorkspaceMutation,
});
// save the initial state to local storage, then sync to cloud
const blobStorage = createLocalBlobStorage(workspaceId);
const syncStorage = createLocalStorage(workspaceId);
// apply initial state
await initial(workspace, blobStorage);
// save workspace to local storage, should be vary fast
await syncStorage.push(workspaceId, encodeStateAsUpdate(workspace.doc));
for (const subdocs of workspace.doc.getSubdocs()) {
await syncStorage.push(subdocs.guid, encodeStateAsUpdate(subdocs));
}
// notify all browser tabs, so they can update their workspace list
notifyChannel.postMessage(null);
return workspaceId;
},
async delete(id) {
await fetcher({
query: deleteWorkspaceMutation,
variables: {
id,
},
});
// notify all browser tabs, so they can update their workspace list
notifyChannel.postMessage(null);
},
subscribe(callback) {
let lastWorkspaceIDs: string[] = [];
function scan() {
(async () => {
const allWorkspaceIDs = (await getCloudWorkspaceList()).map(
workspace => workspace.id
);
const added = difference(allWorkspaceIDs, lastWorkspaceIDs);
const deleted = difference(lastWorkspaceIDs, allWorkspaceIDs);
lastWorkspaceIDs = allWorkspaceIDs;
callback({
added: added.map(id => ({
id,
flavour: WorkspaceFlavour.AFFINE_CLOUD,
})),
deleted: deleted.map(id => ({
id,
flavour: WorkspaceFlavour.AFFINE_CLOUD,
})),
});
})().catch(err => {
console.error(err);
});
}
scan();
// rescan if other tabs notify us
notifyChannel.addEventListener('message', scan);
return () => {
notifyChannel.removeEventListener('message', scan);
};
},
async getInformation(id) {
// get information from both cloud and local storage
// we use affine 'static' storage here, which use http protocol, no need to websocket.
const cloudStorage = createAffineStaticStorage(id);
const localStorage = createLocalStorage(id);
// download root doc
const localData = await localStorage.pull(id, new Uint8Array([]));
const cloudData = await cloudStorage.pull(id, new Uint8Array([]));
if (!cloudData && !localData) {
return;
}
const bs = new BlockSuiteWorkspace({
id,
schema: globalBlockSuiteSchema,
});
if (localData) applyUpdate(bs.doc, localData.data);
if (cloudData) applyUpdate(bs.doc, cloudData.data);
return {
name: bs.meta.name,
avatar: bs.meta.avatar,
};
},
};
}

View File

@@ -0,0 +1,107 @@
interface SyncUpdateSender {
(
guid: string,
updates: Uint8Array[]
): Promise<{
accepted: boolean;
retry: boolean;
}>;
}
/**
* BatchSyncSender is simple wrapper with vanilla update sync with several advanced features:
* - ACK mechanism, send updates sequentially with previous sync request correctly responds with ACK
* - batching updates, when waiting for previous ACK, new updates will be buffered and sent in single sync request
* - retryable, allow retry when previous sync request failed but with retry flag been set to true
*/
export class BatchSyncSender {
private readonly buffered: Uint8Array[] = [];
private job: Promise<void> | null = null;
private started = true;
constructor(
private readonly guid: string,
private readonly rawSender: SyncUpdateSender
) {}
send(update: Uint8Array) {
this.buffered.push(update);
this.next();
return Promise.resolve();
}
stop() {
this.started = false;
}
start() {
this.started = true;
this.next();
}
private next() {
if (!this.started || this.job || !this.buffered.length) {
return;
}
const lastIndex = Math.min(
this.buffered.length - 1,
99 /* max batch updates size */
);
const updates = this.buffered.slice(0, lastIndex + 1);
if (updates.length) {
this.job = this.rawSender(this.guid, updates)
.then(({ accepted, retry }) => {
// remove pending updates if updates are accepted
if (accepted) {
this.buffered.splice(0, lastIndex + 1);
}
// stop when previous sending failed and non-recoverable
if (accepted || retry) {
// avoid call stack overflow
setTimeout(() => {
this.next();
}, 0);
} else {
this.stop();
}
})
.catch(() => {
this.stop();
})
.finally(() => {
this.job = null;
});
}
}
}
export class MultipleBatchSyncSender {
private senders: Record<string, BatchSyncSender> = {};
constructor(private readonly rawSender: SyncUpdateSender) {}
async send(guid: string, update: Uint8Array) {
return this.getSender(guid).send(update);
}
private getSender(guid: string) {
let sender = this.senders[guid];
if (!sender) {
sender = new BatchSyncSender(guid, this.rawSender);
this.senders[guid] = sender;
}
return sender;
}
start() {
Object.values(this.senders).forEach(sender => sender.start());
}
stop() {
Object.values(this.senders).forEach(sender => sender.stop());
}
}

View File

@@ -0,0 +1,196 @@
import { DebugLogger } from '@affine/debug';
import { fetchWithTraceReport } from '@affine/graphql';
import type { SyncStorage } from '@affine/workspace';
import { getIoManager } from '../../utils/affine-io';
import { base64ToUint8Array, uint8ArrayToBase64 } from '../../utils/base64';
import { MultipleBatchSyncSender } from './batch-sync-sender';
const logger = new DebugLogger('affine:storage:socketio');
export function createAffineStorage(
workspaceId: string
): SyncStorage & { disconnect: () => void } {
logger.debug('createAffineStorage', workspaceId);
const socket = getIoManager().socket('/');
const syncSender = new MultipleBatchSyncSender(async (guid, updates) => {
const payload = await Promise.all(
updates.map(update => uint8ArrayToBase64(update))
);
return new Promise(resolve => {
socket.emit(
'client-update-v2',
{
workspaceId,
guid,
updates: payload,
},
(response: {
// TODO: reuse `EventError` with server
error?: any;
data: any;
}) => {
// TODO: raise error with different code to users
if (response.error) {
logger.error('client-update-v2 error', {
workspaceId,
guid,
response,
});
}
resolve({
accepted: !response.error,
// TODO: reuse `EventError` with server
retry: response.error?.code === 'INTERNAL',
});
}
);
});
});
function handleConnect() {
socket.emit(
'client-handshake-sync',
workspaceId,
(response: { error?: any }) => {
if (!response.error) {
syncSender.start();
}
}
);
}
socket.on('connect', handleConnect);
socket.connect();
socket.emit(
'client-handshake-sync',
workspaceId,
(response: { error?: any }) => {
if (!response.error) {
syncSender.start();
}
}
);
return {
name: 'affine-cloud',
async pull(docId, state) {
const stateVector = state ? await uint8ArrayToBase64(state) : undefined;
return new Promise((resolve, reject) => {
logger.debug('doc-load-v2', {
workspaceId: workspaceId,
guid: docId,
stateVector,
});
socket.emit(
'doc-load-v2',
{
workspaceId: workspaceId,
guid: docId,
stateVector,
},
(
response: // TODO: reuse `EventError` with server
{ error: any } | { data: { missing: string; state: string } }
) => {
logger.debug('doc-load callback', {
workspaceId: workspaceId,
guid: docId,
stateVector,
response,
});
if ('error' in response) {
// TODO: result `EventError` with server
if (response.error.code === 'DOC_NOT_FOUND') {
resolve(null);
} else {
reject(new Error(response.error.message));
}
} else {
resolve({
data: base64ToUint8Array(response.data.missing),
state: response.data.state
? base64ToUint8Array(response.data.state)
: undefined,
});
}
}
);
});
},
async push(docId, update) {
logger.debug('client-update-v2', {
workspaceId,
guid: docId,
update,
});
await syncSender.send(docId, update);
},
async subscribe(cb, disconnect) {
const handleUpdate = async (message: {
workspaceId: string;
guid: string;
updates: string[];
}) => {
if (message.workspaceId === workspaceId) {
message.updates.forEach(update => {
cb(message.guid, base64ToUint8Array(update));
});
}
};
socket.on('server-updates', handleUpdate);
socket.on('disconnect', reason => {
socket.off('server-updates', handleUpdate);
disconnect(reason);
});
return () => {
socket.off('server-updates', handleUpdate);
};
},
disconnect() {
syncSender.stop();
socket.emit('client-leave-sync', workspaceId);
socket.off('connect', handleConnect);
},
};
}
export function createAffineStaticStorage(workspaceId: string): SyncStorage {
logger.debug('createAffineStaticStorage', workspaceId);
return {
name: 'affine-cloud-static',
async pull(docId) {
const response = await fetchWithTraceReport(
runtimeConfig.serverUrlPrefix +
`/api/workspaces/${workspaceId}/docs/${docId}`,
{
priority: 'high',
}
);
if (response.ok) {
const arrayBuffer = await response.arrayBuffer();
return { data: new Uint8Array(arrayBuffer) };
}
return null;
},
async push() {
throw new Error('Not implemented');
},
async subscribe() {
throw new Error('Not implemented');
},
};
}

View File

@@ -0,0 +1,77 @@
import { setupEditorFlags } from '@affine/env/global';
import type { WorkspaceFactory } from '@affine/workspace';
import { BlobEngine, SyncEngine, WorkspaceEngine } from '@affine/workspace';
import { globalBlockSuiteSchema } from '@affine/workspace';
import { Workspace } from '@affine/workspace';
import { Workspace as BlockSuiteWorkspace } from '@blocksuite/store';
import { nanoid } from 'nanoid';
import { createBroadcastChannelAwarenessProvider } from '../local/awareness';
import { createLocalBlobStorage } from '../local/blob';
import { createStaticBlobStorage } from '../local/blob-static';
import { createLocalStorage } from '../local/sync';
import { createCloudAwarenessProvider } from './awareness';
import { createAffineCloudBlobStorage } from './blob';
import { createAffineStorage } from './sync';
export const cloudWorkspaceFactory: WorkspaceFactory = {
name: 'affine-cloud',
openWorkspace(metadata) {
const blobEngine = new BlobEngine(createLocalBlobStorage(metadata.id), [
createAffineCloudBlobStorage(metadata.id),
createStaticBlobStorage(),
]);
// create blocksuite workspace
const bs = new BlockSuiteWorkspace({
id: metadata.id,
blobStorages: [
() => ({
crud: blobEngine,
}),
],
idGenerator: () => nanoid(),
schema: globalBlockSuiteSchema,
});
const affineStorage = createAffineStorage(metadata.id);
const syncEngine = new SyncEngine(bs.doc, createLocalStorage(metadata.id), [
affineStorage,
]);
const awarenessProviders = [
createBroadcastChannelAwarenessProvider(
metadata.id,
bs.awarenessStore.awareness
),
createCloudAwarenessProvider(metadata.id, bs.awarenessStore.awareness),
];
const engine = new WorkspaceEngine(
blobEngine,
syncEngine,
awarenessProviders
);
setupEditorFlags(bs);
const workspace = new Workspace(metadata, engine, bs);
workspace.onStop.once(() => {
// affine sync storage need manually disconnect
affineStorage.disconnect();
});
return workspace;
},
async getWorkspaceBlob(id: string, blobKey: string): Promise<Blob | null> {
// try to get blob from local storage first
const localBlobStorage = createLocalBlobStorage(id);
const localBlob = await localBlobStorage.get(blobKey);
if (localBlob) {
return localBlob;
}
const blobStorage = createAffineCloudBlobStorage(id);
return await blobStorage.get(blobKey);
},
};

View File

@@ -0,0 +1,41 @@
import { WorkspaceList, WorkspaceManager } from '@affine/workspace';
import {
cloudWorkspaceFactory,
createCloudWorkspaceListProvider,
} from './cloud';
import {
createLocalWorkspaceListProvider,
LOCAL_WORKSPACE_LOCAL_STORAGE_KEY,
localWorkspaceFactory,
} from './local';
const list = new WorkspaceList([
createLocalWorkspaceListProvider(),
createCloudWorkspaceListProvider(),
]);
export const workspaceManager = new WorkspaceManager(list, [
localWorkspaceFactory,
cloudWorkspaceFactory,
]);
(window as any).workspaceManager = workspaceManager;
export * from './cloud';
export * from './local';
/**
* a hack for directly add local workspace to workspace list
* Used after copying sqlite database file to appdata folder
*/
export function _addLocalWorkspace(id: string) {
const allWorkspaceIDs: string[] = JSON.parse(
localStorage.getItem(LOCAL_WORKSPACE_LOCAL_STORAGE_KEY) ?? '[]'
);
allWorkspaceIDs.push(id);
localStorage.setItem(
LOCAL_WORKSPACE_LOCAL_STORAGE_KEY,
JSON.stringify(allWorkspaceIDs)
);
}

View File

@@ -0,0 +1,178 @@
import 'fake-indexeddb/auto';
import { SyncEngine, SyncEngineStep, SyncPeerStep } from '@affine/workspace';
import { __unstableSchemas, AffineSchemas } from '@blocksuite/blocks/models';
import { Schema, Workspace } from '@blocksuite/store';
import { beforeEach, describe, expect, test, vi } from 'vitest';
import { Doc } from 'yjs';
import { createIndexedDBStorage } from '..';
import { createTestStorage } from './test-storage';
const schema = new Schema();
schema.register(AffineSchemas).register(__unstableSchemas);
beforeEach(() => {
vi.useFakeTimers({ toFake: ['requestIdleCallback'] });
});
describe('SyncEngine', () => {
test('basic - indexeddb', async () => {
let prev: any;
{
const workspace = new Workspace({
id: 'test',
schema,
});
const syncEngine = new SyncEngine(
workspace.doc,
createIndexedDBStorage(workspace.doc.guid),
[
createIndexedDBStorage(workspace.doc.guid + '1'),
createIndexedDBStorage(workspace.doc.guid + '2'),
]
);
syncEngine.start();
const page = workspace.createPage({
id: 'page0',
});
await page.load();
const pageBlockId = page.addBlock('affine:page', {
title: new page.Text(''),
});
page.addBlock('affine:surface', {}, pageBlockId);
const frameId = page.addBlock('affine:note', {}, pageBlockId);
page.addBlock('affine:paragraph', {}, frameId);
await syncEngine.waitForSynced();
syncEngine.forceStop();
prev = workspace.doc.toJSON();
}
{
const workspace = new Workspace({
id: 'test',
schema,
});
const syncEngine = new SyncEngine(
workspace.doc,
createIndexedDBStorage(workspace.doc.guid),
[]
);
syncEngine.start();
await syncEngine.waitForSynced();
expect(workspace.doc.toJSON()).toEqual({
...prev,
});
syncEngine.forceStop();
}
{
const workspace = new Workspace({
id: 'test',
schema,
});
const syncEngine = new SyncEngine(
workspace.doc,
createIndexedDBStorage(workspace.doc.guid + '1'),
[]
);
syncEngine.start();
await syncEngine.waitForSynced();
expect(workspace.doc.toJSON()).toEqual({
...prev,
});
syncEngine.forceStop();
}
{
const workspace = new Workspace({
id: 'test',
schema,
});
const syncEngine = new SyncEngine(
workspace.doc,
createIndexedDBStorage(workspace.doc.guid + '2'),
[]
);
syncEngine.start();
await syncEngine.waitForSynced();
expect(workspace.doc.toJSON()).toEqual({
...prev,
});
syncEngine.forceStop();
}
});
test('status', async () => {
const ydoc = new Doc({ guid: 'test - status' });
const localStorage = createTestStorage(createIndexedDBStorage(ydoc.guid));
const remoteStorage = createTestStorage(createIndexedDBStorage(ydoc.guid));
localStorage.pausePull();
localStorage.pausePush();
remoteStorage.pausePull();
remoteStorage.pausePush();
const syncEngine = new SyncEngine(ydoc, localStorage, [remoteStorage]);
expect(syncEngine.status.step).toEqual(SyncEngineStep.Stopped);
syncEngine.start();
await vi.waitFor(() => {
expect(syncEngine.status.step).toEqual(SyncEngineStep.Syncing);
expect(syncEngine.status.local?.step).toEqual(
SyncPeerStep.LoadingRootDoc
);
});
localStorage.resumePull();
await vi.waitFor(() => {
expect(syncEngine.status.step).toEqual(SyncEngineStep.Syncing);
expect(syncEngine.status.local?.step).toEqual(SyncPeerStep.Synced);
expect(syncEngine.status.remotes[0]?.step).toEqual(
SyncPeerStep.LoadingRootDoc
);
});
remoteStorage.resumePull();
await vi.waitFor(() => {
expect(syncEngine.status.step).toEqual(SyncEngineStep.Synced);
expect(syncEngine.status.remotes[0]?.step).toEqual(SyncPeerStep.Synced);
expect(syncEngine.status.local?.step).toEqual(SyncPeerStep.Synced);
});
ydoc.getArray('test').insert(0, [1, 2, 3]);
await vi.waitFor(() => {
expect(syncEngine.status.step).toEqual(SyncEngineStep.Syncing);
expect(syncEngine.status.local?.step).toEqual(SyncPeerStep.Syncing);
expect(syncEngine.status.remotes[0]?.step).toEqual(SyncPeerStep.Syncing);
});
localStorage.resumePush();
await vi.waitFor(() => {
expect(syncEngine.status.step).toEqual(SyncEngineStep.Syncing);
expect(syncEngine.status.local?.step).toEqual(SyncPeerStep.Synced);
expect(syncEngine.status.remotes[0]?.step).toEqual(SyncPeerStep.Syncing);
});
remoteStorage.resumePush();
await vi.waitFor(() => {
expect(syncEngine.status.step).toEqual(SyncEngineStep.Synced);
expect(syncEngine.status.local?.step).toEqual(SyncPeerStep.Synced);
expect(syncEngine.status.remotes[0]?.step).toEqual(SyncPeerStep.Synced);
});
});
});

View File

@@ -0,0 +1,94 @@
import 'fake-indexeddb/auto';
import { SyncPeer, SyncPeerStep } from '@affine/workspace';
import { __unstableSchemas, AffineSchemas } from '@blocksuite/blocks/models';
import { Schema, Workspace } from '@blocksuite/store';
import { beforeEach, describe, expect, test, vi } from 'vitest';
import { createIndexedDBStorage } from '..';
const schema = new Schema();
schema.register(AffineSchemas).register(__unstableSchemas);
beforeEach(() => {
vi.useFakeTimers({ toFake: ['requestIdleCallback'] });
});
describe('SyncPeer', () => {
test('basic - indexeddb', async () => {
let prev: any;
{
const workspace = new Workspace({
id: 'test',
schema,
});
const syncPeer = new SyncPeer(
workspace.doc,
createIndexedDBStorage(workspace.doc.guid)
);
await syncPeer.waitForLoaded();
const page = workspace.createPage({
id: 'page0',
});
await page.load();
const pageBlockId = page.addBlock('affine:page', {
title: new page.Text(''),
});
page.addBlock('affine:surface', {}, pageBlockId);
const frameId = page.addBlock('affine:note', {}, pageBlockId);
page.addBlock('affine:paragraph', {}, frameId);
await syncPeer.waitForSynced();
syncPeer.stop();
prev = workspace.doc.toJSON();
}
{
const workspace = new Workspace({
id: 'test',
schema,
});
const syncPeer = new SyncPeer(
workspace.doc,
createIndexedDBStorage(workspace.doc.guid)
);
await syncPeer.waitForSynced();
expect(workspace.doc.toJSON()).toEqual({
...prev,
});
syncPeer.stop();
}
});
test('status', async () => {
const workspace = new Workspace({
id: 'test - status',
schema,
});
const syncPeer = new SyncPeer(
workspace.doc,
createIndexedDBStorage(workspace.doc.guid)
);
expect(syncPeer.status.step).toBe(SyncPeerStep.LoadingRootDoc);
await syncPeer.waitForSynced();
expect(syncPeer.status.step).toBe(SyncPeerStep.Synced);
const page = workspace.createPage({
id: 'page0',
});
expect(syncPeer.status.step).toBe(SyncPeerStep.LoadingSubDoc);
await page.load();
await syncPeer.waitForSynced();
page.addBlock('affine:page', {
title: new page.Text(''),
});
expect(syncPeer.status.step).toBe(SyncPeerStep.Syncing);
syncPeer.stop();
});
});

View File

@@ -0,0 +1,42 @@
import type { SyncStorage } from '@affine/workspace';
export function createTestStorage(origin: SyncStorage) {
const controler = {
pausedPull: Promise.resolve(),
resumePull: () => {},
pausedPush: Promise.resolve(),
resumePush: () => {},
};
return {
name: `${origin.name}(testing)`,
pull(docId: string, state: Uint8Array) {
return controler.pausedPull.then(() => origin.pull(docId, state));
},
push(docId: string, data: Uint8Array) {
return controler.pausedPush.then(() => origin.push(docId, data));
},
subscribe(
cb: (docId: string, data: Uint8Array) => void,
disconnect: (reason: string) => void
) {
return origin.subscribe(cb, disconnect);
},
pausePull() {
controler.pausedPull = new Promise(resolve => {
controler.resumePull = resolve;
});
},
resumePull() {
controler.resumePull?.();
},
pausePush() {
controler.pausedPush = new Promise(resolve => {
controler.resumePush = resolve;
});
},
resumePush() {
controler.resumePush?.();
},
};
}

View File

@@ -0,0 +1,62 @@
import type { AwarenessProvider } from '@affine/workspace';
import type { Awareness } from 'y-protocols/awareness.js';
import {
applyAwarenessUpdate,
encodeAwarenessUpdate,
} from 'y-protocols/awareness.js';
type AwarenessChanges = Record<'added' | 'updated' | 'removed', number[]>;
type ChannelMessage =
| { type: 'connect' }
| { type: 'update'; update: Uint8Array };
export function createBroadcastChannelAwarenessProvider(
workspaceId: string,
awareness: Awareness
): AwarenessProvider {
const channel = new BroadcastChannel('awareness:' + workspaceId);
function handleAwarenessUpdate(changes: AwarenessChanges, origin: unknown) {
if (origin === 'remote') {
return;
}
const changedClients = Object.values(changes).reduce((res, cur) =>
res.concat(cur)
);
const update = encodeAwarenessUpdate(awareness, changedClients);
channel.postMessage({
type: 'update',
update: update,
} satisfies ChannelMessage);
}
function handleChannelMessage(event: MessageEvent<ChannelMessage>) {
if (event.data.type === 'update') {
const update = event.data.update;
applyAwarenessUpdate(awareness, update, 'remote');
}
if (event.data.type === 'connect') {
channel.postMessage({
type: 'update',
update: encodeAwarenessUpdate(awareness, [awareness.clientID]),
} satisfies ChannelMessage);
}
}
return {
connect() {
channel.postMessage({
type: 'connect',
} satisfies ChannelMessage);
awareness.on('update', handleAwarenessUpdate);
channel.addEventListener('message', handleChannelMessage);
},
disconnect() {
awareness.off('update', handleAwarenessUpdate);
channel.removeEventListener('message', handleChannelMessage);
},
};
}

View File

@@ -0,0 +1,34 @@
import type { BlobStorage } from '@affine/workspace';
import { createStore, del, get, keys, set } from 'idb-keyval';
import { bufferToBlob } from '../utils/buffer-to-blob';
export const createIndexeddbBlobStorage = (
workspaceId: string
): BlobStorage => {
const db = createStore(`${workspaceId}_blob`, 'blob');
const mimeTypeDb = createStore(`${workspaceId}_blob_mime`, 'blob_mime');
return {
name: 'indexeddb',
readonly: false,
get: async (key: string) => {
const res = await get<ArrayBuffer>(key, db);
if (res) {
return bufferToBlob(res);
}
return null;
},
set: async (key: string, value: Blob) => {
await set(key, await value.arrayBuffer(), db);
await set(key, value.type, mimeTypeDb);
return key;
},
delete: async (key: string) => {
await del(key, db);
await del(key, mimeTypeDb);
},
list: async () => {
return keys<string>(db);
},
};
};

View File

@@ -0,0 +1,38 @@
import { apis } from '@affine/electron-api';
import type { BlobStorage } from '@affine/workspace';
import { assertExists } from '@blocksuite/global/utils';
import { bufferToBlob } from '../utils/buffer-to-blob';
export const createSQLiteBlobStorage = (workspaceId: string): BlobStorage => {
assertExists(apis);
return {
name: 'sqlite',
readonly: false,
get: async (key: string) => {
assertExists(apis);
const buffer = await apis.db.getBlob(workspaceId, key);
if (buffer) {
return bufferToBlob(buffer);
}
return null;
},
set: async (key: string, value: Blob) => {
assertExists(apis);
await apis.db.addBlob(
workspaceId,
key,
new Uint8Array(await value.arrayBuffer())
);
return key;
},
delete: async (key: string) => {
assertExists(apis);
return apis.db.deleteBlob(workspaceId, key);
},
list: async () => {
assertExists(apis);
return apis.db.getBlobKeys(workspaceId);
},
};
};

View File

@@ -0,0 +1,72 @@
import type { BlobStorage } from '@affine/workspace';
export const predefinedStaticFiles = [
'029uztLz2CzJezK7UUhrbGiWUdZ0J7NVs_qR6RDsvb8=',
'047ebf2c9a5c7c9d8521c2ea5e6140ff7732ef9e28a9f944e9bf3ca4',
'0hjYqQd8SvwHT2gPds7qFw8W6qIEGVbZvG45uzoYjUU=',
'1326bc48553a572c6756d9ee1b30a0dfdda26222fc2d2c872b14e609',
'27f983d0765289c19d10ee0b51c00c3c7665236a1a82406370d46e0a',
'28516717d63e469cd98729ff46be6595711898bab3dc43302319a987',
'4HXJrnBZGaGPFpowNawNog0aMg3dgoVaAnNqEMeUxq0=',
'5Cfem_137WmzR35ZeIC76oTkq5SQt-eHlZwJiLy0hgU=',
'6aa785ee927547ce9dd9d7b43e01eac948337fe57571443e87bc3a60',
'8oj6ym4HlTcshT40Zn6D5DeOgaVCSOOXJvT_EyiqUw8=',
'9288be57321c8772d04e05dbb69a22742372b3534442607a2d6a9998',
'9vXwWGEX5W9v5pzwpu0eK4pf22DZ_sCloO0zCH1aVQ4=',
'Bd5F0WRI0fLh8RK1al9PawPVT3jv7VwBrqiiBEtdV-g=',
'CBWoKrhSDndjBJzscQKENRqiXOOZnzIA5qyiCoy4-A0=',
'D7g-4LMqOsVWBNOD-_kGgCOvJEoc8rcpYbkfDlF2u5U=',
'Vqc8rxFbGyc5L1QeE_Zr10XEcIai_0Xw4Qv6d3ldRPE=',
'VuXYyM9JUv1Fv_qjg1v5Go4Zksz0r4NXFeh3Na7JkIc=',
'bfXllFddegV9vvxPcSWnOtm-_tuzXm-0OQ59z9Su1zA=',
'c820edeeba50006b531883903f5bb0b96bf523c9a6b3ce5868f03db5',
'cw9XjQ-pCeSW7LKMzVREGHeCPTXWYbtE-QbZLEY3RrI=',
'e93536e1be97e3b5206d43bf0793fdef24e60044d174f0abdefebe08',
'f9yKnlNMgKhF-CxOgHBsXkxfViCCkC6KwTv6Uj2Fcjw=',
'fb0SNPtMpQlzBQ90_PB7vCu34WpiSUJbNKocFkL2vIo=',
'gZLmSgmwumNdgf0eIfOSW44emctrLyFUaZapbk8eZ6s=',
'i39ZQ24NlUfWI0MhkbtvHTzGnWMVdr-aC2aOjvHPVg4=',
'k07JiWnb-S7qgd9gDQNgqo-LYMe03RX8fR0TXQ-SpG4=',
'nSEEkYxrThpZfLoPNOzMp6HWekvutAIYmADElDe1J6I=',
'pIqdA3pM1la1gKzxOmAcpLmTh3yXBrL9mGTz_hGj5xE=',
'qezoK6du9n3PF4dl4aq5r7LeXz_sV3xOVpFzVVgjNsE=',
'rY96Bunn-69CnNe5X_e5CJLwgCJnN6rcbUisecs8kkQ=',
'sNVNYDBzUDN2J9OFVJdLJlryBLzRZBLl-4MTNoPF1tA=',
'uvpOG9DrldeqIGNaqfwjFdMw_CcfXKfiEjYf7RXdeL0=',
'v2yF7lY2L5rtorTtTmYFsoMb9dBPKs5M1y9cUKxcI1M=',
];
export const createStaticBlobStorage = (): BlobStorage => {
return {
name: 'static',
readonly: true,
get: async (key: string) => {
const isStaticResource =
predefinedStaticFiles.includes(key) || key.startsWith('/static/');
if (!isStaticResource) {
return null;
}
const path = key.startsWith('/static/') ? key : `/static/${key}`;
const response = await fetch(path);
if (response.ok) {
return await response.blob();
}
return null;
},
set: async key => {
// ignore
return key;
},
delete: async () => {
// ignore
},
list: async () => {
// ignore
return [];
},
};
};

View File

@@ -0,0 +1,10 @@
import { createIndexeddbBlobStorage } from './blob-indexeddb';
import { createSQLiteBlobStorage } from './blob-sqlite';
export function createLocalBlobStorage(workspaceId: string) {
if (environment.isDesktop) {
return createSQLiteBlobStorage(workspaceId);
} else {
return createIndexeddbBlobStorage(workspaceId);
}
}

View File

@@ -0,0 +1,3 @@
export const LOCAL_WORKSPACE_LOCAL_STORAGE_KEY = 'affine-local-workspace';
export const LOCAL_WORKSPACE_CREATED_BROADCAST_CHANNEL_KEY =
'affine-local-workspace-created';

View File

@@ -0,0 +1,11 @@
export * from './awareness';
export * from './blob';
export * from './blob-indexeddb';
export * from './blob-sqlite';
export * from './blob-static';
export * from './consts';
export * from './list';
export * from './sync';
export * from './sync-indexeddb';
export * from './sync-sqlite';
export * from './workspace-factory';

View File

@@ -0,0 +1,130 @@
import { apis } from '@affine/electron-api';
import { WorkspaceFlavour } from '@affine/env/workspace';
import type { WorkspaceListProvider } from '@affine/workspace';
import { globalBlockSuiteSchema } from '@affine/workspace';
import { Workspace as BlockSuiteWorkspace } from '@blocksuite/store';
import { difference } from 'lodash-es';
import { nanoid } from 'nanoid';
import { applyUpdate, encodeStateAsUpdate } from 'yjs';
import { createLocalBlobStorage } from './blob';
import {
LOCAL_WORKSPACE_CREATED_BROADCAST_CHANNEL_KEY,
LOCAL_WORKSPACE_LOCAL_STORAGE_KEY,
} from './consts';
import { createLocalStorage } from './sync';
export function createLocalWorkspaceListProvider(): WorkspaceListProvider {
const notifyChannel = new BroadcastChannel(
LOCAL_WORKSPACE_CREATED_BROADCAST_CHANNEL_KEY
);
return {
name: WorkspaceFlavour.LOCAL,
getList() {
return Promise.resolve(
JSON.parse(
localStorage.getItem(LOCAL_WORKSPACE_LOCAL_STORAGE_KEY) ?? '[]'
).map((id: string) => ({ id, flavour: WorkspaceFlavour.LOCAL }))
);
},
subscribe(callback) {
let lastWorkspaceIDs: string[] = [];
function scan() {
const allWorkspaceIDs: string[] = JSON.parse(
localStorage.getItem(LOCAL_WORKSPACE_LOCAL_STORAGE_KEY) ?? '[]'
);
const added = difference(allWorkspaceIDs, lastWorkspaceIDs);
const deleted = difference(lastWorkspaceIDs, allWorkspaceIDs);
lastWorkspaceIDs = allWorkspaceIDs;
callback({
added: added.map(id => ({ id, flavour: WorkspaceFlavour.LOCAL })),
deleted: deleted.map(id => ({ id, flavour: WorkspaceFlavour.LOCAL })),
});
}
scan();
// rescan if other tabs notify us
notifyChannel.addEventListener('message', scan);
return () => {
notifyChannel.removeEventListener('message', scan);
};
},
async create(initial) {
const id = nanoid();
const blobStorage = createLocalBlobStorage(id);
const syncStorage = createLocalStorage(id);
const workspace = new BlockSuiteWorkspace({
id: id,
idGenerator: () => nanoid(),
schema: globalBlockSuiteSchema,
});
// apply initial state
await initial(workspace, blobStorage);
// save workspace to local storage
await syncStorage.push(id, encodeStateAsUpdate(workspace.doc));
for (const subdocs of workspace.doc.getSubdocs()) {
await syncStorage.push(subdocs.guid, encodeStateAsUpdate(subdocs));
}
// save workspace id to local storage
const allWorkspaceIDs: string[] = JSON.parse(
localStorage.getItem(LOCAL_WORKSPACE_LOCAL_STORAGE_KEY) ?? '[]'
);
allWorkspaceIDs.push(id);
localStorage.setItem(
LOCAL_WORKSPACE_LOCAL_STORAGE_KEY,
JSON.stringify(allWorkspaceIDs)
);
// notify all browser tabs, so they can update their workspace list
notifyChannel.postMessage(id);
return id;
},
async delete(workspaceId) {
const allWorkspaceIDs: string[] = JSON.parse(
localStorage.getItem(LOCAL_WORKSPACE_LOCAL_STORAGE_KEY) ?? '[]'
);
localStorage.setItem(
LOCAL_WORKSPACE_LOCAL_STORAGE_KEY,
JSON.stringify(allWorkspaceIDs.filter(x => x !== workspaceId))
);
if (apis && environment.isDesktop) {
await apis.workspace.delete(workspaceId);
}
// notify all browser tabs, so they can update their workspace list
notifyChannel.postMessage(workspaceId);
},
async getInformation(id) {
// get information from root doc
const storage = createLocalStorage(id);
const data = await storage.pull(id, new Uint8Array([]));
if (!data) {
return;
}
const bs = new BlockSuiteWorkspace({
id,
schema: globalBlockSuiteSchema,
});
applyUpdate(bs.doc, data.data);
return {
name: bs.meta.name,
avatar: bs.meta.avatar,
};
},
};
}

View File

@@ -0,0 +1,118 @@
import type { SyncStorage } from '@affine/workspace';
import { type DBSchema, type IDBPDatabase, openDB } from 'idb';
import { diffUpdate, encodeStateVectorFromUpdate } from 'yjs';
import { mergeUpdates } from '../utils/merge-updates';
export const dbVersion = 1;
export const DEFAULT_DB_NAME = 'affine-local';
type UpdateMessage = {
timestamp: number;
update: Uint8Array;
};
type WorkspacePersist = {
id: string;
updates: UpdateMessage[];
};
interface BlockSuiteBinaryDB extends DBSchema {
workspace: {
key: string;
value: WorkspacePersist;
};
milestone: {
key: string;
value: unknown;
};
}
export function upgradeDB(db: IDBPDatabase<BlockSuiteBinaryDB>) {
db.createObjectStore('workspace', { keyPath: 'id' });
db.createObjectStore('milestone', { keyPath: 'id' });
}
type ChannelMessage = {
type: 'db-updated';
payload: { docId: string; update: Uint8Array };
};
export function createIndexedDBStorage(
workspaceId: string,
dbName = DEFAULT_DB_NAME,
mergeCount = 1
): SyncStorage {
let dbPromise: Promise<IDBPDatabase<BlockSuiteBinaryDB>> | null = null;
const getDb = async () => {
if (dbPromise === null) {
dbPromise = openDB<BlockSuiteBinaryDB>(dbName, dbVersion, {
upgrade: upgradeDB,
});
}
return dbPromise;
};
// indexeddb could be shared between tabs, so we use broadcast channel to notify other tabs
const channel = new BroadcastChannel('indexeddb:' + workspaceId);
return {
name: 'indexeddb',
async pull(docId, state) {
const db = await getDb();
const store = db
.transaction('workspace', 'readonly')
.objectStore('workspace');
const data = await store.get(docId);
if (!data) {
return null;
}
const { updates } = data;
const update = mergeUpdates(updates.map(({ update }) => update));
const diff = state.length ? diffUpdate(update, state) : update;
return { data: diff, state: encodeStateVectorFromUpdate(update) };
},
async push(docId, update) {
const db = await getDb();
const store = db
.transaction('workspace', 'readwrite')
.objectStore('workspace');
// TODO: maybe we do not need to get data every time
const { updates } = (await store.get(docId)) ?? { updates: [] };
let rows: UpdateMessage[] = [
...updates,
{ timestamp: Date.now(), update },
];
if (mergeCount && rows.length >= mergeCount) {
const merged = mergeUpdates(rows.map(({ update }) => update));
rows = [{ timestamp: Date.now(), update: merged }];
}
await store.put({
id: docId,
updates: rows,
});
channel.postMessage({
type: 'db-updated',
payload: { docId, update },
} satisfies ChannelMessage);
},
async subscribe(cb, _disconnect) {
function onMessage(event: MessageEvent<ChannelMessage>) {
const { type, payload } = event.data;
if (type === 'db-updated') {
const { docId, update } = payload;
cb(docId, update);
}
}
channel.addEventListener('message', onMessage);
return () => {
channel.removeEventListener('message', onMessage);
};
},
};
}

View File

@@ -0,0 +1,44 @@
import { apis } from '@affine/electron-api';
import type { SyncStorage } from '@affine/workspace';
import { encodeStateVectorFromUpdate } from 'yjs';
export function createSQLiteStorage(workspaceId: string): SyncStorage {
if (!apis?.db) {
throw new Error('sqlite datasource is not available');
}
return {
name: 'sqlite',
async pull(docId, _state) {
if (!apis?.db) {
throw new Error('sqlite datasource is not available');
}
const update = await apis.db.getDocAsUpdates(
workspaceId,
workspaceId === docId ? undefined : docId
);
if (update) {
return {
data: update,
state: encodeStateVectorFromUpdate(update),
};
}
return null;
},
async push(docId, data) {
if (!apis?.db) {
throw new Error('sqlite datasource is not available');
}
return apis.db.applyDocUpdate(
workspaceId,
data,
workspaceId === docId ? undefined : docId
);
},
async subscribe(_cb, _disconnect) {
return () => {};
},
};
}

View File

@@ -0,0 +1,7 @@
import { createIndexedDBStorage } from './sync-indexeddb';
import { createSQLiteStorage } from './sync-sqlite';
export const createLocalStorage = (workspaceId: string) =>
environment.isDesktop
? createSQLiteStorage(workspaceId)
: createIndexedDBStorage(workspaceId);

View File

@@ -0,0 +1,54 @@
import { setupEditorFlags } from '@affine/env/global';
import type { WorkspaceFactory } from '@affine/workspace';
import { WorkspaceEngine } from '@affine/workspace';
import { BlobEngine } from '@affine/workspace';
import { SyncEngine } from '@affine/workspace';
import { globalBlockSuiteSchema } from '@affine/workspace';
import { Workspace } from '@affine/workspace';
import { Workspace as BlockSuiteWorkspace } from '@blocksuite/store';
import { nanoid } from 'nanoid';
import { createBroadcastChannelAwarenessProvider } from './awareness';
import { createLocalBlobStorage } from './blob';
import { createStaticBlobStorage } from './blob-static';
import { createLocalStorage } from './sync';
export const localWorkspaceFactory: WorkspaceFactory = {
name: 'local',
openWorkspace(metadata) {
const blobEngine = new BlobEngine(createLocalBlobStorage(metadata.id), [
createStaticBlobStorage(),
]);
const bs = new BlockSuiteWorkspace({
id: metadata.id,
blobStorages: [
() => ({
crud: blobEngine,
}),
],
idGenerator: () => nanoid(),
schema: globalBlockSuiteSchema,
});
const syncEngine = new SyncEngine(
bs.doc,
createLocalStorage(metadata.id),
[]
);
const awarenessProvider = createBroadcastChannelAwarenessProvider(
metadata.id,
bs.awarenessStore.awareness
);
const engine = new WorkspaceEngine(blobEngine, syncEngine, [
awarenessProvider,
]);
setupEditorFlags(bs);
return new Workspace(metadata, engine, bs);
},
async getWorkspaceBlob(id, blobKey) {
const blobStorage = createLocalBlobStorage(id);
return await blobStorage.get(blobKey);
},
};

View File

@@ -0,0 +1,18 @@
import { Buffer } from 'node:buffer';
import { describe, expect, test } from 'vitest';
import { isSvgBuffer } from '../buffer-to-blob';
describe('isSvgBuffer', () => {
test('basic', async () => {
expect(isSvgBuffer(Buffer.from('<svg></svg>'))).toBe(true);
expect(isSvgBuffer(Buffer.from(' \n\r\t<svg></svg>'))).toBe(true);
expect(isSvgBuffer(Buffer.from('<123>'))).toBe(false);
expect(
isSvgBuffer(
Buffer.from('<?xml version="1.0" encoding="UTF-8"?><svg></svg>')
)
).toBe(true);
});
});

View File

@@ -0,0 +1,26 @@
import { Manager } from 'socket.io-client';
let ioManager: Manager | null = null;
function getBaseUrl(): string {
if (environment.isDesktop) {
return runtimeConfig.serverUrlPrefix;
}
const { protocol, hostname, port } = window.location;
return `${protocol === 'https:' ? 'wss' : 'ws'}://${hostname}${
port ? `:${port}` : ''
}`;
}
// use lazy initialization socket.io io manager
export function getIoManager(): Manager {
if (ioManager) {
return ioManager;
}
ioManager = new Manager(`${getBaseUrl()}/`, {
autoConnect: false,
transports: ['websocket'],
secure: location.protocol === 'https:',
});
return ioManager;
}

View File

@@ -0,0 +1,28 @@
export function uint8ArrayToBase64(array: Uint8Array): Promise<string> {
return new Promise<string>(resolve => {
// Create a blob from the Uint8Array
const blob = new Blob([array]);
const reader = new FileReader();
reader.onload = function () {
const dataUrl = reader.result as string | null;
if (!dataUrl) {
resolve('');
return;
}
// The result includes the `data:` URL prefix and the MIME type. We only want the Base64 data
const base64 = dataUrl.split(',')[1];
resolve(base64);
};
reader.readAsDataURL(blob);
});
}
export function base64ToUint8Array(base64: string) {
const binaryString = atob(base64);
const binaryArray = binaryString.split('').map(function (char) {
return char.charCodeAt(0);
});
return new Uint8Array(binaryArray);
}

View File

@@ -0,0 +1,59 @@
import isSvg from 'is-svg';
function fastCheckIsNotSvg(buffer: Uint8Array) {
// check first non-whitespace character is not '<svg' or '<?xml'
for (let i = 0; i < buffer.length; i++) {
const ch = buffer[i];
// skip whitespace
if (
ch === 0x20 /* \s */ ||
ch === 0x09 /* \t */ ||
ch === 0x0b /* \v */ ||
ch === 0x0c /* \f */ ||
ch === 0x0a /* \n */ ||
ch === 0x0d /* \r */ ||
ch === 0xa0
) {
continue;
}
return (
!(
buffer[i] === /* '<' */ 0x3c &&
buffer[i + 1] === /* 's' */ 0x73 &&
buffer[i + 2] === /* 'v' */ 0x76 &&
buffer[i + 3] === /* 'g' */ 0x67
) &&
!(
buffer[i] === /* '<' */ 0x3c &&
buffer[i + 1] === /* '?' */ 0x3f &&
buffer[i + 2] === /* 'x' */ 0x78 &&
buffer[i + 3] === /* 'm' */ 0x6d &&
buffer[i + 4] === /* 'l' */ 0x6c
)
);
}
return true;
}
// this has a overhead of converting to string for testing if it is svg.
export function isSvgBuffer(buffer: Uint8Array) {
if (fastCheckIsNotSvg(buffer)) {
return false;
}
const decoder = new TextDecoder('utf-8');
const str = decoder.decode(buffer);
return isSvg(str);
}
export function bufferToBlob(buffer: Uint8Array | ArrayBuffer) {
const isSVG = isSvgBuffer(
buffer instanceof ArrayBuffer ? new Uint8Array(buffer) : buffer
);
// for svg blob, we need to explicitly set the type to image/svg+xml
return isSVG
? new Blob([buffer], { type: 'image/svg+xml' })
: new Blob([buffer]);
}

View File

@@ -0,0 +1,17 @@
import { applyUpdate, Doc, encodeStateAsUpdate } from 'yjs';
export function mergeUpdates(updates: Uint8Array[]) {
if (updates.length === 0) {
return new Uint8Array();
}
if (updates.length === 1) {
return updates[0];
}
const doc = new Doc();
doc.transact(() => {
updates.forEach(update => {
applyUpdate(doc, update);
});
});
return encodeStateAsUpdate(doc);
}

View File

@@ -0,0 +1,16 @@
{
"extends": "../../../tsconfig.json",
"include": ["./src"],
"compilerOptions": {
"noEmit": false,
"outDir": "lib"
},
"references": [
{ "path": "../../../tests/fixtures" },
{ "path": "../../common/env" },
{ "path": "../../common/debug" },
{ "path": "../../common/infra" },
{ "path": "../../frontend/graphql" },
{ "path": "../../frontend/electron-api" }
]
}