mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-13 21:05:19 +00:00
fix(core): avoid page full refresh (#3341)
Co-authored-by: Peng Xiao <pengxiao@outlook.com>
This commit is contained in:
@@ -1,11 +1,12 @@
|
||||
import type { WorkspaceAdapter } from '@affine/env/workspace';
|
||||
import { WorkspaceFlavour, WorkspaceVersion } from '@affine/env/workspace';
|
||||
import { getOrCreateWorkspace } from '@affine/workspace/manager';
|
||||
import type { BlockHub } from '@blocksuite/blocks';
|
||||
import { assertExists } from '@blocksuite/global/utils';
|
||||
import { atom } from 'jotai';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { getOrCreateWorkspace } from './manager';
|
||||
|
||||
const rootWorkspaceMetadataV1Schema = z.object({
|
||||
id: z.string(),
|
||||
flavour: z.nativeEnum(WorkspaceFlavour),
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
import { isBrowser, isDesktop } from '@affine/env/constant';
|
||||
import type { BlockSuiteFeatureFlags } from '@affine/env/global';
|
||||
import { WorkspaceFlavour } from '@affine/env/workspace';
|
||||
import {
|
||||
createAffineProviders,
|
||||
createLocalProviders,
|
||||
} from '@affine/workspace/providers';
|
||||
import { __unstableSchemas, AffineSchemas } from '@blocksuite/blocks/models';
|
||||
import type { DocProviderCreator, StoreOptions } from '@blocksuite/store';
|
||||
import {
|
||||
@@ -18,6 +14,7 @@ import type { Transaction } from 'yjs';
|
||||
|
||||
import { createStaticStorage } from '../blob/local-static-storage';
|
||||
import { createSQLiteStorage } from '../blob/sqlite-blob-storage';
|
||||
import { createAffineProviders, createLocalProviders } from '../providers';
|
||||
|
||||
function setEditorFlags(workspace: Workspace) {
|
||||
Object.entries(runtimeConfig.editorFlags).forEach(([key, value]) => {
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
export interface DatasourceDocAdapter {
|
||||
// request diff update from other clients
|
||||
queryDocState: (
|
||||
guid: string,
|
||||
options?: {
|
||||
stateVector?: Uint8Array;
|
||||
targetClientId?: number;
|
||||
}
|
||||
) => Promise<Uint8Array | false>;
|
||||
|
||||
// send update to the datasource
|
||||
sendDocUpdate: (guid: string, update: Uint8Array) => Promise<void>;
|
||||
|
||||
// listen to update from the datasource. Returns a function to unsubscribe.
|
||||
// this is optional because some datasource might not support it
|
||||
onDocUpdate?(
|
||||
callback: (guid: string, update: Uint8Array) => void
|
||||
): () => void;
|
||||
}
|
||||
@@ -1,148 +0,0 @@
|
||||
import type { PassiveDocProvider } from '@blocksuite/store';
|
||||
import {
|
||||
applyUpdate,
|
||||
type Doc,
|
||||
encodeStateAsUpdate,
|
||||
encodeStateVectorFromUpdate,
|
||||
} from 'yjs';
|
||||
|
||||
import type { DatasourceDocAdapter } from './datasource-doc-adapter';
|
||||
|
||||
const selfUpdateOrigin = 'lazy-provider-self-origin';
|
||||
|
||||
function getDoc(doc: Doc, guid: string): Doc | undefined {
|
||||
if (doc.guid === guid) {
|
||||
return doc;
|
||||
}
|
||||
for (const subdoc of doc.subdocs) {
|
||||
const found = getDoc(subdoc, guid);
|
||||
if (found) {
|
||||
return found;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a lazy provider that connects to a datasource and synchronizes a root document.
|
||||
*/
|
||||
export const createLazyProvider = (
|
||||
rootDoc: Doc,
|
||||
datasource: DatasourceDocAdapter
|
||||
): Omit<PassiveDocProvider, 'flavour'> => {
|
||||
let connected = false;
|
||||
const pendingMap = new Map<string, Uint8Array[]>(); // guid -> pending-updates
|
||||
const disposableMap = new Map<string, Set<() => void>>();
|
||||
let datasourceUnsub: (() => void) | undefined;
|
||||
|
||||
async function syncDoc(doc: Doc) {
|
||||
const guid = doc.guid;
|
||||
// perf: optimize me
|
||||
const currentUpdate = encodeStateAsUpdate(doc);
|
||||
|
||||
const remoteUpdate = await datasource.queryDocState(guid, {
|
||||
stateVector: encodeStateVectorFromUpdate(currentUpdate),
|
||||
});
|
||||
|
||||
const updates = [currentUpdate];
|
||||
pendingMap.set(guid, []);
|
||||
|
||||
if (remoteUpdate) {
|
||||
applyUpdate(doc, remoteUpdate, selfUpdateOrigin);
|
||||
const newUpdate = encodeStateAsUpdate(
|
||||
doc,
|
||||
encodeStateVectorFromUpdate(remoteUpdate)
|
||||
);
|
||||
updates.push(newUpdate);
|
||||
await datasource.sendDocUpdate(guid, newUpdate);
|
||||
}
|
||||
}
|
||||
|
||||
function setupDocListener(doc: Doc) {
|
||||
const disposables = new Set<() => void>();
|
||||
disposableMap.set(doc.guid, disposables);
|
||||
const updateHandler = async (update: Uint8Array, origin: unknown) => {
|
||||
if (origin === selfUpdateOrigin) {
|
||||
return;
|
||||
}
|
||||
datasource.sendDocUpdate(doc.guid, update).catch(console.error);
|
||||
};
|
||||
|
||||
const subdocLoadHandler = (event: { loaded: Set<Doc> }) => {
|
||||
event.loaded.forEach(subdoc => {
|
||||
connectDoc(subdoc).catch(console.error);
|
||||
});
|
||||
};
|
||||
|
||||
doc.on('update', updateHandler);
|
||||
doc.on('subdocs', subdocLoadHandler);
|
||||
// todo: handle destroy?
|
||||
disposables.add(() => {
|
||||
doc.off('update', updateHandler);
|
||||
doc.off('subdocs', subdocLoadHandler);
|
||||
});
|
||||
}
|
||||
|
||||
function setupDatasourceListeners() {
|
||||
datasourceUnsub = datasource.onDocUpdate?.((guid, update) => {
|
||||
const doc = getDoc(rootDoc, guid);
|
||||
if (doc) {
|
||||
applyUpdate(doc, update);
|
||||
//
|
||||
if (pendingMap.has(guid)) {
|
||||
pendingMap.get(guid)?.forEach(update => applyUpdate(doc, update));
|
||||
pendingMap.delete(guid);
|
||||
}
|
||||
} else {
|
||||
// This case happens when the father doc is not yet updated,
|
||||
// so that the child doc is not yet created.
|
||||
// We need to put it into cache so that it can be applied later.
|
||||
console.warn('idb: doc not found', guid);
|
||||
pendingMap.set(guid, (pendingMap.get(guid) ?? []).concat(update));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// when a subdoc is loaded, we need to sync it with the datasource and setup listeners
|
||||
async function connectDoc(doc: Doc) {
|
||||
setupDocListener(doc);
|
||||
await syncDoc(doc);
|
||||
await Promise.all(
|
||||
[...doc.subdocs]
|
||||
.filter(subdoc => subdoc.shouldLoad)
|
||||
.map(subdoc => connectDoc(subdoc))
|
||||
);
|
||||
}
|
||||
|
||||
function disposeAll() {
|
||||
disposableMap.forEach(disposables => {
|
||||
disposables.forEach(dispose => dispose());
|
||||
});
|
||||
disposableMap.clear();
|
||||
}
|
||||
|
||||
function connect() {
|
||||
connected = true;
|
||||
|
||||
// root doc should be already loaded,
|
||||
// but we want to populate the cache for later update events
|
||||
connectDoc(rootDoc).catch(console.error);
|
||||
setupDatasourceListeners();
|
||||
}
|
||||
|
||||
async function disconnect() {
|
||||
connected = false;
|
||||
disposeAll();
|
||||
datasourceUnsub?.();
|
||||
datasourceUnsub = undefined;
|
||||
}
|
||||
|
||||
return {
|
||||
get connected() {
|
||||
return connected;
|
||||
},
|
||||
passive: true,
|
||||
connect,
|
||||
disconnect,
|
||||
};
|
||||
};
|
||||
@@ -2,6 +2,7 @@ import type {
|
||||
SQLiteDBDownloadProvider,
|
||||
SQLiteProvider,
|
||||
} from '@affine/env/workspace';
|
||||
import { getDoc } from '@affine/y-provider';
|
||||
import { assertExists } from '@blocksuite/global/utils';
|
||||
import type { DocProviderCreator } from '@blocksuite/store';
|
||||
import { Workspace as BlockSuiteWorkspace } from '@blocksuite/store';
|
||||
@@ -19,6 +20,28 @@ type SubDocsEvent = {
|
||||
loaded: Set<Doc>;
|
||||
};
|
||||
|
||||
// workaround: there maybe new updates before SQLite is connected
|
||||
// we need to exchange them with the SQLite db
|
||||
// will be removed later when we have lazy load doc provider
|
||||
const syncDiff = async (rootDoc: Doc, subdocId?: string) => {
|
||||
try {
|
||||
const workspaceId = rootDoc.guid;
|
||||
const doc = subdocId ? getDoc(rootDoc, subdocId) : rootDoc;
|
||||
if (!doc) {
|
||||
logger.error('doc not found', workspaceId, subdocId);
|
||||
return;
|
||||
}
|
||||
const update = await window.apis?.db.getDocAsUpdates(workspaceId, subdocId);
|
||||
const diff = Y.encodeStateAsUpdate(
|
||||
doc,
|
||||
Y.encodeStateVectorFromUpdate(update)
|
||||
);
|
||||
await window.apis.db.applyDocUpdate(workspaceId, diff, subdocId);
|
||||
} catch (err) {
|
||||
logger.error('failed to sync diff', err);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* A provider that is responsible for syncing updates the workspace with the local SQLite database.
|
||||
*/
|
||||
@@ -74,6 +97,9 @@ export const createSQLiteProvider: DocProviderCreator = (
|
||||
};
|
||||
|
||||
function trackDoc(doc: Doc) {
|
||||
syncDiff(rootDoc, rootDoc !== doc ? doc.guid : undefined).catch(
|
||||
logger.error
|
||||
);
|
||||
doc.on('update', createOrHandleUpdate(doc));
|
||||
doc.on('subdocs', createOrGetHandleSubDocs(doc));
|
||||
doc.subdocs.forEach(doc => {
|
||||
@@ -93,6 +119,9 @@ export const createSQLiteProvider: DocProviderCreator = (
|
||||
let connected = false;
|
||||
|
||||
const connect = () => {
|
||||
if (connected) {
|
||||
return;
|
||||
}
|
||||
logger.info('connecting sqlite provider', id);
|
||||
trackDoc(rootDoc);
|
||||
|
||||
@@ -161,7 +190,7 @@ export const createSQLiteDBDownloadProvider: DocProviderCreator = (
|
||||
});
|
||||
|
||||
async function syncUpdates(doc: Doc) {
|
||||
logger.info('syncing updates from sqlite', id);
|
||||
logger.info('syncing updates from sqlite', doc.guid);
|
||||
const subdocId = doc.guid === id ? undefined : doc.guid;
|
||||
const updates = await apis.db.getDocAsUpdates(id, subdocId);
|
||||
|
||||
@@ -173,7 +202,10 @@ export const createSQLiteDBDownloadProvider: DocProviderCreator = (
|
||||
Y.applyUpdate(doc, updates, sqliteOrigin);
|
||||
}
|
||||
|
||||
const mergedUpdates = Y.encodeStateAsUpdate(doc);
|
||||
const mergedUpdates = Y.encodeStateAsUpdate(
|
||||
doc,
|
||||
Y.encodeStateVectorFromUpdate(updates)
|
||||
);
|
||||
|
||||
// also apply updates to sqlite
|
||||
await apis.db.applyDocUpdate(id, mergedUpdates, subdocId);
|
||||
|
||||
Reference in New Issue
Block a user