mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-12 04:18:54 +00:00
feat(infra): new doc sync engine (#6205)
https://github.com/toeverything/AFFiNE/blob/eyhn/feat/new-sync/packages/common/infra/src/workspace/engine/doc/README.md
This commit is contained in:
@@ -1,4 +0,0 @@
|
||||
import type { SyncEngineStatus } from '@toeverything/infra';
|
||||
import { atom } from 'jotai';
|
||||
|
||||
export const syncEngineStatusAtom = atom<SyncEngineStatus | null>(null);
|
||||
@@ -31,7 +31,7 @@ export const ExportPanel = ({
|
||||
setSaving(true);
|
||||
try {
|
||||
if (isOnline) {
|
||||
await workspace.engine.sync.waitForSynced();
|
||||
await workspace.engine.waitForSynced();
|
||||
await workspace.engine.blob.sync();
|
||||
}
|
||||
|
||||
|
||||
@@ -8,6 +8,8 @@ import { useWorkspace } from '@affine/core/hooks/use-workspace';
|
||||
import { useWorkspaceInfo } from '@affine/core/hooks/use-workspace-info';
|
||||
import { UNTITLED_WORKSPACE_NAME } from '@affine/env/constant';
|
||||
import { useAFFiNEI18N } from '@affine/i18n/hooks';
|
||||
import { ArrowRightSmallIcon } from '@blocksuite/icons';
|
||||
import { useCallback } from 'react';
|
||||
|
||||
import { DeleteLeaveWorkspace } from './delete-leave-workspace';
|
||||
import { EnableCloudPanel } from './enable-cloud';
|
||||
@@ -29,6 +31,17 @@ export const WorkspaceSettingDetail = (props: WorkspaceSettingDetailProps) => {
|
||||
|
||||
const workspaceInfo = useWorkspaceInfo(workspaceMetadata);
|
||||
|
||||
const handleResetSyncStatus = useCallback(() => {
|
||||
workspace?.engine.doc
|
||||
.resetSyncStatus()
|
||||
.then(() => {
|
||||
window.location.reload();
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err);
|
||||
});
|
||||
}, [workspace]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<SettingHeader
|
||||
@@ -64,6 +77,19 @@ export const WorkspaceSettingDetail = (props: WorkspaceSettingDetailProps) => {
|
||||
)}
|
||||
<SettingWrapper>
|
||||
<DeleteLeaveWorkspace {...props} />
|
||||
<SettingRow
|
||||
name={
|
||||
<span style={{ color: 'var(--affine-text-secondary-color)' }}>
|
||||
{t['com.affine.resetSyncStatus.button']()}
|
||||
</span>
|
||||
}
|
||||
desc={t['com.affine.resetSyncStatus.description']()}
|
||||
style={{ cursor: 'pointer' }}
|
||||
onClick={handleResetSyncStatus}
|
||||
data-testid="reset-sync-status"
|
||||
>
|
||||
<ArrowRightSmallIcon />
|
||||
</SettingRow>
|
||||
</SettingWrapper>
|
||||
</>
|
||||
);
|
||||
|
||||
@@ -5,13 +5,12 @@ import { Button } from '@affine/component/ui/button';
|
||||
import { Upload } from '@affine/core/components/pure/file-upload';
|
||||
import { useAsyncCallback } from '@affine/core/hooks/affine-async-hooks';
|
||||
import { useWorkspaceBlobObjectUrl } from '@affine/core/hooks/use-workspace-blob';
|
||||
import { useWorkspaceStatus } from '@affine/core/hooks/use-workspace-status';
|
||||
import { validateAndReduceImage } from '@affine/core/utils/reduce-image';
|
||||
import { UNTITLED_WORKSPACE_NAME } from '@affine/env/constant';
|
||||
import { useAFFiNEI18N } from '@affine/i18n/hooks';
|
||||
import { CameraIcon } from '@blocksuite/icons';
|
||||
import type { Workspace } from '@toeverything/infra';
|
||||
import { SyncPeerStep } from '@toeverything/infra';
|
||||
import { useLiveData } from '@toeverything/infra';
|
||||
import { useSetAtom } from 'jotai';
|
||||
import {
|
||||
type KeyboardEvent,
|
||||
@@ -32,13 +31,7 @@ export const ProfilePanel = ({ isOwner, workspace }: ProfilePanelProps) => {
|
||||
const t = useAFFiNEI18N();
|
||||
const pushNotification = useSetAtom(pushNotificationAtom);
|
||||
|
||||
const workspaceIsLoading =
|
||||
useWorkspaceStatus(
|
||||
workspace,
|
||||
status =>
|
||||
!status.engine.sync.local ||
|
||||
status.engine.sync.local?.step <= SyncPeerStep.LoadingRootDoc
|
||||
) ?? true;
|
||||
const workspaceIsReady = useLiveData(workspace?.engine.rootDocState)?.ready;
|
||||
|
||||
const [avatarBlob, setAvatarBlob] = useState<string | null>(null);
|
||||
const [name, setName] = useState('');
|
||||
@@ -158,7 +151,7 @@ export const ProfilePanel = ({ isOwner, workspace }: ProfilePanelProps) => {
|
||||
[pushNotification, setWorkspaceAvatar]
|
||||
);
|
||||
|
||||
const canAdjustAvatar = !workspaceIsLoading && avatarUrl && isOwner;
|
||||
const canAdjustAvatar = workspaceIsReady && avatarUrl && isOwner;
|
||||
|
||||
return (
|
||||
<div className={style.profileWrapper}>
|
||||
@@ -194,7 +187,7 @@ export const ProfilePanel = ({ isOwner, workspace }: ProfilePanelProps) => {
|
||||
<div className={style.label}>{t['Workspace Name']()}</div>
|
||||
<FlexWrapper alignItems="center" flexGrow="1">
|
||||
<Input
|
||||
disabled={workspaceIsLoading || !isOwner}
|
||||
disabled={!workspaceIsReady || !isOwner}
|
||||
value={input}
|
||||
style={{ width: 280, height: 32 }}
|
||||
data-testid="workspace-name-input"
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import { Loading } from '@affine/component/ui/loading';
|
||||
import { formatDate } from '@affine/core/components/page-list';
|
||||
import { useSyncEngineStatus } from '@affine/core/hooks/affine/use-sync-engine-status';
|
||||
import { useDocEngineStatus } from '@affine/core/hooks/affine/use-doc-engine-status';
|
||||
import { useAsyncCallback } from '@affine/core/hooks/affine-async-hooks';
|
||||
import { useAFFiNEI18N } from '@affine/i18n/hooks';
|
||||
import type { DocMeta } from '@blocksuite/store';
|
||||
import { SyncEngineStep } from '@toeverything/infra';
|
||||
import type { CommandCategory } from '@toeverything/infra/command';
|
||||
import clsx from 'clsx';
|
||||
import { Command } from 'cmdk';
|
||||
@@ -163,7 +162,7 @@ export const CMDKContainer = ({
|
||||
const [value, setValue] = useAtom(cmdkValueAtom);
|
||||
const isInEditor = pageMeta !== undefined;
|
||||
const [opening, setOpening] = useState(open);
|
||||
const { syncEngineStatus, progress } = useSyncEngineStatus();
|
||||
const { syncing, progress } = useDocEngineStatus();
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
|
||||
// fix list height animation on opening
|
||||
@@ -205,8 +204,7 @@ export const CMDKContainer = ({
|
||||
inEditor: isInEditor,
|
||||
})}
|
||||
>
|
||||
{!syncEngineStatus ||
|
||||
syncEngineStatus.step === SyncEngineStep.Syncing ? (
|
||||
{syncing ? (
|
||||
<Loading
|
||||
size={24}
|
||||
progress={progress ? Math.max(progress, 0.2) : undefined}
|
||||
|
||||
@@ -3,8 +3,8 @@ import { Avatar } from '@affine/component/ui/avatar';
|
||||
import { Loading } from '@affine/component/ui/loading';
|
||||
import { Tooltip } from '@affine/component/ui/tooltip';
|
||||
import { openSettingModalAtom } from '@affine/core/atoms';
|
||||
import { useDocEngineStatus } from '@affine/core/hooks/affine/use-doc-engine-status';
|
||||
import { useIsWorkspaceOwner } from '@affine/core/hooks/affine/use-is-workspace-owner';
|
||||
import { useSyncEngineStatus } from '@affine/core/hooks/affine/use-sync-engine-status';
|
||||
import { useWorkspaceBlobObjectUrl } from '@affine/core/hooks/use-workspace-blob';
|
||||
import { useWorkspaceInfo } from '@affine/core/hooks/use-workspace-info';
|
||||
import { UNTITLED_WORKSPACE_NAME } from '@affine/env/constant';
|
||||
@@ -17,7 +17,7 @@ import {
|
||||
NoNetworkIcon,
|
||||
UnsyncIcon,
|
||||
} from '@blocksuite/icons';
|
||||
import { SyncEngineStep, Workspace } from '@toeverything/infra';
|
||||
import { Workspace } from '@toeverything/infra';
|
||||
import { useService } from '@toeverything/infra/di';
|
||||
import { useSetAtom } from 'jotai';
|
||||
import { debounce } from 'lodash-es';
|
||||
@@ -94,8 +94,7 @@ const useSyncEngineSyncProgress = () => {
|
||||
const t = useAFFiNEI18N();
|
||||
const isOnline = useSystemOnline();
|
||||
const pushNotification = useSetAtom(pushNotificationAtom);
|
||||
const { syncEngineStatus, setSyncEngineStatus, progress } =
|
||||
useSyncEngineStatus();
|
||||
const { syncing, progress, retrying, errorMessage } = useDocEngineStatus();
|
||||
const [isOverCapacity, setIsOverCapacity] = useState(false);
|
||||
|
||||
const currentWorkspace = useService(Workspace);
|
||||
@@ -111,19 +110,6 @@ const useSyncEngineSyncProgress = () => {
|
||||
|
||||
// debounce sync engine status
|
||||
useEffect(() => {
|
||||
setSyncEngineStatus(currentWorkspace.engine.sync.status);
|
||||
const disposable = currentWorkspace.engine.sync.onStatusChange.on(
|
||||
debounce(
|
||||
status => {
|
||||
setSyncEngineStatus(status);
|
||||
},
|
||||
300,
|
||||
{
|
||||
maxWait: 500,
|
||||
trailing: true,
|
||||
}
|
||||
)
|
||||
);
|
||||
const disposableOverCapacity =
|
||||
currentWorkspace.engine.blob.onStatusChange.on(
|
||||
debounce(status => {
|
||||
@@ -153,17 +139,9 @@ const useSyncEngineSyncProgress = () => {
|
||||
})
|
||||
);
|
||||
return () => {
|
||||
disposable?.dispose();
|
||||
disposableOverCapacity?.dispose();
|
||||
};
|
||||
}, [
|
||||
currentWorkspace,
|
||||
isOwner,
|
||||
jumpToPricePlan,
|
||||
pushNotification,
|
||||
setSyncEngineStatus,
|
||||
t,
|
||||
]);
|
||||
}, [currentWorkspace, isOwner, jumpToPricePlan, pushNotification, t]);
|
||||
|
||||
const content = useMemo(() => {
|
||||
// TODO: add i18n
|
||||
@@ -176,21 +154,15 @@ const useSyncEngineSyncProgress = () => {
|
||||
if (!isOnline) {
|
||||
return 'Disconnected, please check your network connection';
|
||||
}
|
||||
if (!syncEngineStatus || syncEngineStatus.step === SyncEngineStep.Syncing) {
|
||||
if (syncing) {
|
||||
return (
|
||||
`Syncing with AFFiNE Cloud` +
|
||||
(progress ? ` (${Math.floor(progress * 100)}%)` : '')
|
||||
);
|
||||
} else if (
|
||||
syncEngineStatus &&
|
||||
syncEngineStatus.step < SyncEngineStep.Syncing
|
||||
) {
|
||||
return (
|
||||
syncEngineStatus.error ||
|
||||
'Disconnected, please check your network connection'
|
||||
);
|
||||
} else if (retrying && errorMessage) {
|
||||
return `${errorMessage}, reconnecting.`;
|
||||
}
|
||||
if (syncEngineStatus.retrying) {
|
||||
if (retrying) {
|
||||
return 'Sync disconnected due to unexpected issues, reconnecting.';
|
||||
}
|
||||
if (isOverCapacity) {
|
||||
@@ -199,29 +171,31 @@ const useSyncEngineSyncProgress = () => {
|
||||
return 'Synced with AFFiNE Cloud';
|
||||
}, [
|
||||
currentWorkspace.flavour,
|
||||
errorMessage,
|
||||
isOnline,
|
||||
isOverCapacity,
|
||||
progress,
|
||||
syncEngineStatus,
|
||||
retrying,
|
||||
syncing,
|
||||
]);
|
||||
|
||||
const CloudWorkspaceSyncStatus = useCallback(() => {
|
||||
if (!syncEngineStatus || syncEngineStatus.step === SyncEngineStep.Syncing) {
|
||||
if (syncing) {
|
||||
return SyncingWorkspaceStatus({
|
||||
progress: progress ? Math.max(progress, 0.2) : undefined,
|
||||
});
|
||||
} else if (syncEngineStatus.retrying || isOverCapacity) {
|
||||
} else if (retrying) {
|
||||
return UnSyncWorkspaceStatus();
|
||||
} else {
|
||||
return CloudWorkspaceStatus();
|
||||
}
|
||||
}, [isOverCapacity, progress, syncEngineStatus]);
|
||||
}, [progress, retrying, syncing]);
|
||||
|
||||
return {
|
||||
message: content,
|
||||
icon:
|
||||
currentWorkspace.flavour === WorkspaceFlavour.AFFINE_CLOUD ? (
|
||||
!isOnline || syncEngineStatus?.error ? (
|
||||
!isOnline ? (
|
||||
<OfflineStatus />
|
||||
) : (
|
||||
<CloudWorkspaceSyncStatus />
|
||||
|
||||
@@ -0,0 +1,20 @@
|
||||
import { useLiveData, useService, Workspace } from '@toeverything/infra';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
export function useDocEngineStatus() {
|
||||
const workspace = useService(Workspace);
|
||||
|
||||
const engineState = useLiveData(workspace.engine.docEngineState);
|
||||
|
||||
const progress =
|
||||
(engineState.total - engineState.syncing) / engineState.total;
|
||||
|
||||
return useMemo(
|
||||
() => ({
|
||||
...engineState,
|
||||
progress,
|
||||
syncing: engineState.syncing > 0,
|
||||
}),
|
||||
[engineState, progress]
|
||||
);
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
import { syncEngineStatusAtom } from '@affine/core/atoms/sync-engine-status';
|
||||
import { useAtom } from 'jotai';
|
||||
import { mean } from 'lodash-es';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
export function useSyncEngineStatus() {
|
||||
const [syncEngineStatus, setSyncEngineStatus] = useAtom(syncEngineStatusAtom);
|
||||
|
||||
const progress = useMemo(() => {
|
||||
if (!syncEngineStatus?.remotes || syncEngineStatus?.remotes.length === 0) {
|
||||
return null;
|
||||
}
|
||||
return mean(
|
||||
syncEngineStatus.remotes.map(peer => {
|
||||
if (!peer) {
|
||||
return 0;
|
||||
}
|
||||
const totalTask =
|
||||
peer.totalDocs + peer.pendingPullUpdates + peer.pendingPushUpdates;
|
||||
const doneTask = peer.loadedDocs;
|
||||
|
||||
return doneTask / totalTask;
|
||||
})
|
||||
);
|
||||
}, [syncEngineStatus?.remotes]);
|
||||
|
||||
return useMemo(
|
||||
() => ({
|
||||
syncEngineStatus,
|
||||
setSyncEngineStatus,
|
||||
progress,
|
||||
}),
|
||||
[progress, setSyncEngineStatus, syncEngineStatus]
|
||||
);
|
||||
}
|
||||
@@ -4,6 +4,17 @@ import { Observable } from 'rxjs';
|
||||
export class LocalStorageMemento implements Memento {
|
||||
constructor(private readonly prefix: string) {}
|
||||
|
||||
keys(): string[] {
|
||||
const keys: string[] = [];
|
||||
for (let i = 0; i < localStorage.length; i++) {
|
||||
const key = localStorage.key(i);
|
||||
if (key && key.startsWith(this.prefix)) {
|
||||
keys.push(key.slice(this.prefix.length));
|
||||
}
|
||||
}
|
||||
return keys;
|
||||
}
|
||||
|
||||
get<T>(key: string): T | null {
|
||||
const json = localStorage.getItem(this.prefix + key);
|
||||
return json ? JSON.parse(json) : null;
|
||||
@@ -29,6 +40,16 @@ export class LocalStorageMemento implements Memento {
|
||||
channel.postMessage(value);
|
||||
channel.close();
|
||||
}
|
||||
|
||||
del(key: string): void {
|
||||
localStorage.removeItem(this.prefix + key);
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
for (const key of this.keys()) {
|
||||
this.del(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class LocalStorageGlobalCache
|
||||
|
||||
@@ -15,12 +15,11 @@ import type { AffineEditorContainer } from '@blocksuite/presets';
|
||||
import type { Doc as BlockSuiteDoc } from '@blocksuite/store';
|
||||
import type { Doc } from '@toeverything/infra';
|
||||
import {
|
||||
DocStorageImpl,
|
||||
EmptyBlobStorage,
|
||||
LocalBlobStorage,
|
||||
LocalSyncStorage,
|
||||
PageManager,
|
||||
type PageMode,
|
||||
ReadonlyMappingSyncStorage,
|
||||
RemoteBlobStorage,
|
||||
ServiceProviderContext,
|
||||
useLiveData,
|
||||
@@ -29,6 +28,7 @@ import {
|
||||
WorkspaceManager,
|
||||
WorkspaceScope,
|
||||
} from '@toeverything/infra';
|
||||
import { ReadonlyDocStorage } from '@toeverything/infra';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import type { LoaderFunction } from 'react-router-dom';
|
||||
import {
|
||||
@@ -152,8 +152,8 @@ export const Component = () => {
|
||||
])
|
||||
.addImpl(RemoteBlobStorage('static'), StaticBlobStorage)
|
||||
.addImpl(
|
||||
LocalSyncStorage,
|
||||
ReadonlyMappingSyncStorage({
|
||||
DocStorageImpl,
|
||||
new ReadonlyDocStorage({
|
||||
[workspaceId]: new Uint8Array(workspaceArrayBuffer),
|
||||
[pageId]: new Uint8Array(pageArrayBuffer),
|
||||
})
|
||||
@@ -161,8 +161,8 @@ export const Component = () => {
|
||||
}
|
||||
);
|
||||
|
||||
workspace.engine.sync
|
||||
.waitForSynced()
|
||||
workspace.engine
|
||||
.waitForRootDocReady()
|
||||
.then(() => {
|
||||
const { page } = workspace.services.get(PageManager).open(pageId);
|
||||
|
||||
|
||||
@@ -304,7 +304,10 @@ export const DetailPage = ({ pageId }: { pageId: string }): ReactElement => {
|
||||
|
||||
// set sync engine priority target
|
||||
useEffect(() => {
|
||||
currentWorkspace.setPriorityRule(id => id.endsWith(pageId));
|
||||
currentWorkspace.setPriorityLoad(pageId, 10);
|
||||
return () => {
|
||||
currentWorkspace.setPriorityLoad(pageId, 5);
|
||||
};
|
||||
}, [currentWorkspace, pageId]);
|
||||
|
||||
const jumpOnce = useLiveData(pageRecord?.meta.map(meta => meta.jumpOnce));
|
||||
|
||||
@@ -70,7 +70,8 @@ export const Component = (): ReactElement => {
|
||||
}, [meta, workspaceManager, workspace, currentWorkspaceService]);
|
||||
|
||||
// avoid doing operation, before workspace is loaded
|
||||
const isRootDocLoaded = useLiveData(workspace?.engine.sync.isRootDocLoaded);
|
||||
const isRootDocReady =
|
||||
useLiveData(workspace?.engine.rootDocState)?.ready ?? false;
|
||||
|
||||
// if listLoading is false, we can show 404 page, otherwise we should show loading page.
|
||||
if (listLoading === false && meta === undefined) {
|
||||
@@ -81,7 +82,7 @@ export const Component = (): ReactElement => {
|
||||
return <WorkspaceFallback key="workspaceLoading" />;
|
||||
}
|
||||
|
||||
if (!isRootDocLoaded) {
|
||||
if (!isRootDocReady) {
|
||||
return (
|
||||
<ServiceProviderContext.Provider value={workspace.services}>
|
||||
<WorkspaceFallback key="workspaceLoading" />
|
||||
|
||||
@@ -34,7 +34,7 @@ export async function configureTestingEnvironment() {
|
||||
})
|
||||
);
|
||||
|
||||
await workspace.engine.sync.waitForSynced();
|
||||
await workspace.engine.waitForSynced();
|
||||
|
||||
const { page } = workspace.services.get(PageManager).open('page0');
|
||||
|
||||
|
||||
@@ -47,6 +47,7 @@
|
||||
"@types/mixpanel-browser": "^2.49.0",
|
||||
"@types/uuid": "^9.0.8",
|
||||
"builder-util-runtime": "^9.2.4",
|
||||
"core-js": "^3.36.1",
|
||||
"cross-env": "^7.0.3",
|
||||
"electron": "^29.0.1",
|
||||
"electron-log": "^5.1.1",
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import './polyfill/dispose';
|
||||
// Side effect import, "declare global"
|
||||
import '@affine/env/constant';
|
||||
|
||||
|
||||
2
packages/frontend/electron/renderer/polyfill/dispose.ts
Normal file
2
packages/frontend/electron/renderer/polyfill/dispose.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
import 'core-js/modules/esnext.symbol.async-dispose';
|
||||
import 'core-js/modules/esnext.symbol.dispose';
|
||||
@@ -1167,5 +1167,7 @@
|
||||
"com.affine.delete-tags.count_other": "{{count}} tags deleted",
|
||||
"com.affine.workbench.split-view-menu.keep-this-one": "Solo View",
|
||||
"com.affine.workbench.split-view.page-menu-open": "Open in split view",
|
||||
"com.affine.search-tags.placeholder": "Type here ..."
|
||||
"com.affine.search-tags.placeholder": "Type here ...",
|
||||
"com.affine.resetSyncStatus.button": "Reset Sync",
|
||||
"com.affine.resetSyncStatus.description": "This operation may fix some synchronization issues."
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
"@affine/core": "workspace:*",
|
||||
"@affine/env": "workspace:*",
|
||||
"@juggle/resize-observer": "^3.4.0",
|
||||
"core-js": "^3.36.1",
|
||||
"intl-segmenter-polyfill-rs": "^0.1.7",
|
||||
"mixpanel-browser": "^2.49.0",
|
||||
"react": "^18.2.0",
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import './polyfill/dispose';
|
||||
import './polyfill/intl-segmenter';
|
||||
import './polyfill/request-idle-callback';
|
||||
import './polyfill/resize-observer';
|
||||
|
||||
2
packages/frontend/web/src/polyfill/dispose.ts
Normal file
2
packages/frontend/web/src/polyfill/dispose.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
import 'core-js/modules/esnext.symbol.async-dispose';
|
||||
import 'core-js/modules/esnext.symbol.dispose';
|
||||
@@ -1,5 +1,5 @@
|
||||
import { DebugLogger } from '@affine/debug';
|
||||
import type { AwarenessProvider, RejectByVersion } from '@toeverything/infra';
|
||||
import type { AwarenessProvider } from '@toeverything/infra';
|
||||
import {
|
||||
applyAwarenessUpdate,
|
||||
type Awareness,
|
||||
@@ -135,7 +135,7 @@ export class AffineCloudAwarenessProvider implements AwarenessProvider {
|
||||
);
|
||||
};
|
||||
|
||||
handleReject = (_msg: RejectByVersion) => {
|
||||
handleReject = () => {
|
||||
this.socket.off('server-version-rejected', this.handleReject);
|
||||
this.disconnect();
|
||||
this.socket.disconnect();
|
||||
|
||||
24
packages/frontend/workspace-impl/src/cloud/doc-static.ts
Normal file
24
packages/frontend/workspace-impl/src/cloud/doc-static.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { fetchWithTraceReport } from '@affine/graphql';
|
||||
|
||||
export class AffineStaticDocStorage {
|
||||
name = 'affine-cloud-static';
|
||||
constructor(private readonly workspaceId: string) {}
|
||||
|
||||
async pull(
|
||||
docId: string
|
||||
): Promise<{ data: Uint8Array; state?: Uint8Array | undefined } | null> {
|
||||
const response = await fetchWithTraceReport(
|
||||
`/api/workspaces/${this.workspaceId}/docs/${docId}`,
|
||||
{
|
||||
priority: 'high',
|
||||
}
|
||||
);
|
||||
if (response.ok) {
|
||||
const arrayBuffer = await response.arrayBuffer();
|
||||
|
||||
return { data: new Uint8Array(arrayBuffer) };
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
183
packages/frontend/workspace-impl/src/cloud/doc.ts
Normal file
183
packages/frontend/workspace-impl/src/cloud/doc.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
import { DebugLogger } from '@affine/debug';
|
||||
import { type DocServer, throwIfAborted } from '@toeverything/infra';
|
||||
import type { Socket } from 'socket.io-client';
|
||||
|
||||
import { getIoManager } from '../utils/affine-io';
|
||||
import { base64ToUint8Array, uint8ArrayToBase64 } from '../utils/base64';
|
||||
|
||||
(window as any)._TEST_SIMULATE_SYNC_LAG = Promise.resolve();
|
||||
|
||||
const logger = new DebugLogger('affine-cloud-doc-engine-server');
|
||||
|
||||
export class AffineCloudDocEngineServer implements DocServer {
|
||||
socket = null as unknown as Socket;
|
||||
interruptCb: ((reason: string) => void) | null = null;
|
||||
SEND_TIMEOUT = 30000;
|
||||
|
||||
constructor(private readonly workspaceId: string) {}
|
||||
|
||||
private async clientHandShake() {
|
||||
await this.socket.emitWithAck('client-handshake-sync', {
|
||||
workspaceId: this.workspaceId,
|
||||
version: runtimeConfig.appVersion,
|
||||
});
|
||||
}
|
||||
|
||||
async pullDoc(docId: string, state: Uint8Array) {
|
||||
// for testing
|
||||
await (window as any)._TEST_SIMULATE_SYNC_LAG;
|
||||
|
||||
const stateVector = state ? await uint8ArrayToBase64(state) : undefined;
|
||||
|
||||
const response:
|
||||
| { error: any }
|
||||
| { data: { missing: string; state: string; timestamp: number } } =
|
||||
await this.socket.timeout(this.SEND_TIMEOUT).emitWithAck('doc-load-v2', {
|
||||
workspaceId: this.workspaceId,
|
||||
guid: docId,
|
||||
stateVector,
|
||||
});
|
||||
|
||||
if ('error' in response) {
|
||||
// TODO: result `EventError` with server
|
||||
if (response.error.code === 'DOC_NOT_FOUND') {
|
||||
return null;
|
||||
} else {
|
||||
throw new Error(response.error.message);
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
data: base64ToUint8Array(response.data.missing),
|
||||
stateVector: response.data.state
|
||||
? base64ToUint8Array(response.data.state)
|
||||
: undefined,
|
||||
serverClock: response.data.timestamp,
|
||||
};
|
||||
}
|
||||
}
|
||||
async pushDoc(docId: string, data: Uint8Array) {
|
||||
const payload = await uint8ArrayToBase64(data);
|
||||
|
||||
const response: {
|
||||
// TODO: reuse `EventError` with server
|
||||
error?: any;
|
||||
data: { timestamp: number };
|
||||
} = await this.socket
|
||||
.timeout(this.SEND_TIMEOUT)
|
||||
.emitWithAck('client-update-v2', {
|
||||
workspaceId: this.workspaceId,
|
||||
guid: docId,
|
||||
updates: [payload],
|
||||
});
|
||||
|
||||
// TODO: raise error with different code to users
|
||||
if (response.error) {
|
||||
logger.error('client-update-v2 error', {
|
||||
workspaceId: this.workspaceId,
|
||||
guid: docId,
|
||||
response,
|
||||
});
|
||||
|
||||
throw new Error(response.error);
|
||||
}
|
||||
|
||||
return { serverClock: response.data.timestamp };
|
||||
}
|
||||
async loadServerClock(after: number): Promise<Map<string, number>> {
|
||||
const response: {
|
||||
// TODO: reuse `EventError` with server
|
||||
error?: any;
|
||||
data: Record<string, number>;
|
||||
} = await this.socket
|
||||
.timeout(this.SEND_TIMEOUT)
|
||||
.emitWithAck('client-pre-sync', {
|
||||
workspaceId: this.workspaceId,
|
||||
timestamp: after,
|
||||
});
|
||||
|
||||
if (response.error) {
|
||||
logger.error('client-pre-sync error', {
|
||||
workspaceId: this.workspaceId,
|
||||
response,
|
||||
});
|
||||
|
||||
throw new Error(response.error);
|
||||
}
|
||||
|
||||
return new Map(Object.entries(response.data));
|
||||
}
|
||||
async subscribeAllDocs(
|
||||
cb: (updates: {
|
||||
docId: string;
|
||||
data: Uint8Array;
|
||||
serverClock: number;
|
||||
}) => void
|
||||
): Promise<() => void> {
|
||||
const handleUpdate = async (message: {
|
||||
workspaceId: string;
|
||||
guid: string;
|
||||
updates: string[];
|
||||
timestamp: number;
|
||||
}) => {
|
||||
if (message.workspaceId === this.workspaceId) {
|
||||
message.updates.forEach(update => {
|
||||
cb({
|
||||
docId: message.guid,
|
||||
data: base64ToUint8Array(update),
|
||||
serverClock: message.timestamp,
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
this.socket.on('server-updates', handleUpdate);
|
||||
|
||||
return () => {
|
||||
this.socket.off('server-updates', handleUpdate);
|
||||
};
|
||||
}
|
||||
async waitForConnectingServer(signal: AbortSignal): Promise<void> {
|
||||
const socket = getIoManager().socket('/');
|
||||
this.socket = socket;
|
||||
this.socket.on('server-version-rejected', this.handleVersionRejected);
|
||||
this.socket.on('disconnect', this.handleDisconnect);
|
||||
|
||||
throwIfAborted(signal);
|
||||
if (this.socket.connected) {
|
||||
await this.clientHandShake();
|
||||
} else {
|
||||
this.socket.connect();
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
this.socket.on('connect', () => {
|
||||
resolve();
|
||||
});
|
||||
signal.addEventListener('abort', () => {
|
||||
reject('aborted');
|
||||
});
|
||||
});
|
||||
throwIfAborted(signal);
|
||||
await this.clientHandShake();
|
||||
}
|
||||
}
|
||||
disconnectServer(): void {
|
||||
if (!this.socket) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.socket.emit('client-leave-sync', this.workspaceId);
|
||||
this.socket.off('server-version-rejected', this.handleVersionRejected);
|
||||
this.socket.off('disconnect', this.handleDisconnect);
|
||||
this.socket = null as unknown as Socket;
|
||||
}
|
||||
onInterrupted = (cb: (reason: string) => void) => {
|
||||
this.interruptCb = cb;
|
||||
};
|
||||
handleInterrupted = (reason: string) => {
|
||||
this.interruptCb?.(reason);
|
||||
};
|
||||
handleDisconnect = (reason: Socket.DisconnectReason) => {
|
||||
this.interruptCb?.(reason);
|
||||
};
|
||||
handleVersionRejected = () => {
|
||||
this.interruptCb?.('Client version rejected');
|
||||
};
|
||||
}
|
||||
@@ -1,6 +1,4 @@
|
||||
export * from './awareness';
|
||||
export * from './blob';
|
||||
export * from './consts';
|
||||
export { AffineCloudBlobStorage } from './blob';
|
||||
export { CLOUD_WORKSPACE_CHANGED_BROADCAST_CHANNEL_KEY } from './consts';
|
||||
export * from './list';
|
||||
export * from './sync';
|
||||
export * from './workspace-factory';
|
||||
|
||||
@@ -10,7 +10,6 @@ import { DocCollection } from '@blocksuite/store';
|
||||
import type { WorkspaceListProvider } from '@toeverything/infra';
|
||||
import {
|
||||
type BlobStorage,
|
||||
type SyncStorage,
|
||||
type WorkspaceInfo,
|
||||
type WorkspaceMetadata,
|
||||
} from '@toeverything/infra';
|
||||
@@ -21,10 +20,10 @@ import { applyUpdate, encodeStateAsUpdate } from 'yjs';
|
||||
|
||||
import { IndexedDBBlobStorage } from '../local/blob-indexeddb';
|
||||
import { SQLiteBlobStorage } from '../local/blob-sqlite';
|
||||
import { IndexedDBSyncStorage } from '../local/sync-indexeddb';
|
||||
import { SQLiteSyncStorage } from '../local/sync-sqlite';
|
||||
import { IndexedDBDocStorage } from '../local/doc-indexeddb';
|
||||
import { SqliteDocStorage } from '../local/doc-sqlite';
|
||||
import { CLOUD_WORKSPACE_CHANGED_BROADCAST_CHANNEL_KEY } from './consts';
|
||||
import { AffineStaticSyncStorage } from './sync';
|
||||
import { AffineStaticDocStorage } from './doc-static';
|
||||
|
||||
async function getCloudWorkspaceList() {
|
||||
try {
|
||||
@@ -94,17 +93,20 @@ export class CloudWorkspaceListProvider implements WorkspaceListProvider {
|
||||
const blobStorage = environment.isDesktop
|
||||
? new SQLiteBlobStorage(workspaceId)
|
||||
: new IndexedDBBlobStorage(workspaceId);
|
||||
const syncStorage = environment.isDesktop
|
||||
? new SQLiteSyncStorage(workspaceId)
|
||||
: new IndexedDBSyncStorage(workspaceId);
|
||||
const docStorage = environment.isDesktop
|
||||
? new SqliteDocStorage(workspaceId)
|
||||
: new IndexedDBDocStorage(workspaceId);
|
||||
|
||||
// apply initial state
|
||||
await initial(docCollection, blobStorage);
|
||||
|
||||
// save workspace to local storage, should be vary fast
|
||||
await syncStorage.push(workspaceId, encodeStateAsUpdate(docCollection.doc));
|
||||
await docStorage.doc.set(
|
||||
workspaceId,
|
||||
encodeStateAsUpdate(docCollection.doc)
|
||||
);
|
||||
for (const subdocs of docCollection.doc.getSubdocs()) {
|
||||
await syncStorage.push(subdocs.guid, encodeStateAsUpdate(subdocs));
|
||||
await docStorage.doc.set(subdocs.guid, encodeStateAsUpdate(subdocs));
|
||||
}
|
||||
|
||||
// notify all browser tabs, so they can update their workspace list
|
||||
@@ -155,13 +157,13 @@ export class CloudWorkspaceListProvider implements WorkspaceListProvider {
|
||||
// get information from both cloud and local storage
|
||||
|
||||
// we use affine 'static' storage here, which use http protocol, no need to websocket.
|
||||
const cloudStorage: SyncStorage = new AffineStaticSyncStorage(id);
|
||||
const localStorage = environment.isDesktop
|
||||
? new SQLiteSyncStorage(id)
|
||||
: new IndexedDBSyncStorage(id);
|
||||
const cloudStorage = new AffineStaticDocStorage(id);
|
||||
const docStorage = environment.isDesktop
|
||||
? new SqliteDocStorage(id)
|
||||
: new IndexedDBDocStorage(id);
|
||||
// download root doc
|
||||
const localData = await localStorage.pull(id, new Uint8Array([]));
|
||||
const cloudData = await cloudStorage.pull(id, new Uint8Array([]));
|
||||
const localData = await docStorage.doc.get(id);
|
||||
const cloudData = await cloudStorage.pull(id);
|
||||
|
||||
if (!cloudData && !localData) {
|
||||
return;
|
||||
@@ -172,7 +174,7 @@ export class CloudWorkspaceListProvider implements WorkspaceListProvider {
|
||||
schema: globalBlockSuiteSchema,
|
||||
});
|
||||
|
||||
if (localData) applyUpdate(bs.doc, localData.data);
|
||||
if (localData) applyUpdate(bs.doc, localData);
|
||||
if (cloudData) applyUpdate(bs.doc, cloudData.data);
|
||||
|
||||
return {
|
||||
|
||||
@@ -1,208 +0,0 @@
|
||||
import { DebugLogger } from '@affine/debug';
|
||||
import { fetchWithTraceReport } from '@affine/graphql';
|
||||
import {
|
||||
type RejectByVersion,
|
||||
type SyncErrorMessage,
|
||||
type SyncStorage,
|
||||
} from '@toeverything/infra';
|
||||
import type { CleanupService } from '@toeverything/infra/lifecycle';
|
||||
|
||||
import { getIoManager } from '../utils/affine-io';
|
||||
import { base64ToUint8Array, uint8ArrayToBase64 } from '../utils/base64';
|
||||
|
||||
const logger = new DebugLogger('affine:storage:socketio');
|
||||
|
||||
(window as any)._TEST_SIMULATE_SYNC_LAG = Promise.resolve();
|
||||
|
||||
export class AffineSyncStorage implements SyncStorage {
|
||||
name = 'affine-cloud';
|
||||
|
||||
SEND_TIMEOUT = 30000;
|
||||
|
||||
socket = getIoManager().socket('/');
|
||||
|
||||
errorMessage?: SyncErrorMessage;
|
||||
|
||||
constructor(
|
||||
private readonly workspaceId: string,
|
||||
cleanupService: CleanupService
|
||||
) {
|
||||
this.socket.on('connect', this.handleConnect);
|
||||
this.socket.on('server-version-rejected', this.handleReject);
|
||||
|
||||
if (this.socket.connected) {
|
||||
this.handleConnect();
|
||||
} else {
|
||||
this.socket.connect();
|
||||
}
|
||||
|
||||
cleanupService.add(() => {
|
||||
this.cleanup();
|
||||
});
|
||||
}
|
||||
|
||||
handleConnect = () => {
|
||||
this.socket.emit(
|
||||
'client-handshake-sync',
|
||||
{
|
||||
workspaceId: this.workspaceId,
|
||||
version: runtimeConfig.appVersion,
|
||||
},
|
||||
(res: any) => {
|
||||
logger.debug('client handshake finished', res);
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
handleReject = (message: RejectByVersion) => {
|
||||
this.socket.off('server-version-rejected', this.handleReject);
|
||||
this.cleanup();
|
||||
this.socket.disconnect();
|
||||
this.errorMessage = { type: 'outdated', message };
|
||||
};
|
||||
|
||||
async pull(
|
||||
docId: string,
|
||||
state: Uint8Array
|
||||
): Promise<{ data: Uint8Array; state?: Uint8Array } | null> {
|
||||
// for testing
|
||||
await (window as any)._TEST_SIMULATE_SYNC_LAG;
|
||||
|
||||
const stateVector = state ? await uint8ArrayToBase64(state) : undefined;
|
||||
|
||||
logger.debug('doc-load-v2', {
|
||||
workspaceId: this.workspaceId,
|
||||
guid: docId,
|
||||
stateVector,
|
||||
});
|
||||
|
||||
const response:
|
||||
| { error: any }
|
||||
| { data: { missing: string; state: string } } = await this.socket
|
||||
.timeout(this.SEND_TIMEOUT)
|
||||
.emitWithAck('doc-load-v2', {
|
||||
workspaceId: this.workspaceId,
|
||||
guid: docId,
|
||||
stateVector,
|
||||
});
|
||||
|
||||
logger.debug('doc-load callback', {
|
||||
workspaceId: this.workspaceId,
|
||||
guid: docId,
|
||||
stateVector,
|
||||
response,
|
||||
});
|
||||
|
||||
if ('error' in response) {
|
||||
// TODO: result `EventError` with server
|
||||
if (response.error.code === 'DOC_NOT_FOUND') {
|
||||
return null;
|
||||
} else {
|
||||
throw new Error(response.error.message);
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
data: base64ToUint8Array(response.data.missing),
|
||||
state: response.data.state
|
||||
? base64ToUint8Array(response.data.state)
|
||||
: undefined,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async push(docId: string, update: Uint8Array) {
|
||||
logger.debug('client-update-v2', {
|
||||
workspaceId: this.workspaceId,
|
||||
guid: docId,
|
||||
update,
|
||||
});
|
||||
|
||||
const payload = await uint8ArrayToBase64(update);
|
||||
|
||||
const response: {
|
||||
// TODO: reuse `EventError` with server
|
||||
error?: any;
|
||||
data: any;
|
||||
} = await this.socket
|
||||
.timeout(this.SEND_TIMEOUT)
|
||||
.emitWithAck('client-update-v2', {
|
||||
workspaceId: this.workspaceId,
|
||||
guid: docId,
|
||||
updates: [payload],
|
||||
});
|
||||
|
||||
// TODO: raise error with different code to users
|
||||
if (response.error) {
|
||||
logger.error('client-update-v2 error', {
|
||||
workspaceId: this.workspaceId,
|
||||
guid: docId,
|
||||
response,
|
||||
});
|
||||
|
||||
throw new Error(response.error);
|
||||
}
|
||||
}
|
||||
|
||||
async subscribe(
|
||||
cb: (docId: string, data: Uint8Array) => void,
|
||||
disconnect: (reason: string) => void
|
||||
) {
|
||||
const handleUpdate = async (message: {
|
||||
workspaceId: string;
|
||||
guid: string;
|
||||
updates: string[];
|
||||
}) => {
|
||||
if (message.workspaceId === this.workspaceId) {
|
||||
message.updates.forEach(update => {
|
||||
cb(message.guid, base64ToUint8Array(update));
|
||||
});
|
||||
}
|
||||
};
|
||||
const handleDisconnect = (reason: string) => {
|
||||
this.socket.off('server-updates', handleUpdate);
|
||||
disconnect(reason);
|
||||
};
|
||||
this.socket.on('server-updates', handleUpdate);
|
||||
|
||||
this.socket.on('disconnect', handleDisconnect);
|
||||
|
||||
return () => {
|
||||
this.socket.off('server-updates', handleUpdate);
|
||||
this.socket.off('disconnect', handleDisconnect);
|
||||
};
|
||||
}
|
||||
|
||||
cleanup() {
|
||||
this.socket.emit('client-leave-sync', this.workspaceId);
|
||||
this.socket.off('connect', this.handleConnect);
|
||||
}
|
||||
}
|
||||
|
||||
export class AffineStaticSyncStorage implements SyncStorage {
|
||||
name = 'affine-cloud-static';
|
||||
constructor(private readonly workspaceId: string) {}
|
||||
|
||||
async pull(
|
||||
docId: string
|
||||
): Promise<{ data: Uint8Array; state?: Uint8Array | undefined } | null> {
|
||||
const response = await fetchWithTraceReport(
|
||||
`/api/workspaces/${this.workspaceId}/docs/${docId}`,
|
||||
{
|
||||
priority: 'high',
|
||||
}
|
||||
);
|
||||
if (response.ok) {
|
||||
const arrayBuffer = await response.arrayBuffer();
|
||||
|
||||
return { data: new Uint8Array(arrayBuffer) };
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
push(): Promise<void> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
subscribe(): Promise<() => void> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
}
|
||||
@@ -3,19 +3,19 @@ import type { WorkspaceFactory } from '@toeverything/infra';
|
||||
import {
|
||||
AwarenessContext,
|
||||
AwarenessProvider,
|
||||
DocServerImpl,
|
||||
RemoteBlobStorage,
|
||||
RemoteSyncStorage,
|
||||
WorkspaceIdContext,
|
||||
WorkspaceScope,
|
||||
} from '@toeverything/infra';
|
||||
import type { ServiceCollection } from '@toeverything/infra/di';
|
||||
import { CleanupService } from '@toeverything/infra/lifecycle';
|
||||
|
||||
import { LocalWorkspaceFactory } from '../local';
|
||||
import { IndexedDBBlobStorage, SQLiteBlobStorage } from '../local';
|
||||
import { IndexedDBBlobStorage } from '../local/blob-indexeddb';
|
||||
import { SQLiteBlobStorage } from '../local/blob-sqlite';
|
||||
import { AffineCloudAwarenessProvider } from './awareness';
|
||||
import { AffineCloudBlobStorage } from './blob';
|
||||
import { AffineSyncStorage } from './sync';
|
||||
import { AffineCloudDocEngineServer } from './doc';
|
||||
|
||||
export class CloudWorkspaceFactory implements WorkspaceFactory {
|
||||
name = WorkspaceFlavour.AFFINE_CLOUD;
|
||||
@@ -28,10 +28,7 @@ export class CloudWorkspaceFactory implements WorkspaceFactory {
|
||||
.addImpl(RemoteBlobStorage('affine-cloud'), AffineCloudBlobStorage, [
|
||||
WorkspaceIdContext,
|
||||
])
|
||||
.addImpl(RemoteSyncStorage('affine-cloud'), AffineSyncStorage, [
|
||||
WorkspaceIdContext,
|
||||
CleanupService,
|
||||
])
|
||||
.addImpl(DocServerImpl, AffineCloudDocEngineServer, [WorkspaceIdContext])
|
||||
.addImpl(
|
||||
AwarenessProvider('affine-cloud'),
|
||||
AffineCloudAwarenessProvider,
|
||||
|
||||
@@ -9,11 +9,8 @@ import {
|
||||
} from '@toeverything/infra';
|
||||
|
||||
import { CloudWorkspaceFactory, CloudWorkspaceListProvider } from './cloud';
|
||||
import {
|
||||
LOCAL_WORKSPACE_LOCAL_STORAGE_KEY,
|
||||
LocalWorkspaceFactory,
|
||||
LocalWorkspaceListProvider,
|
||||
} from './local';
|
||||
import { LocalWorkspaceFactory, LocalWorkspaceListProvider } from './local';
|
||||
import { LOCAL_WORKSPACE_LOCAL_STORAGE_KEY } from './local/consts';
|
||||
import { WorkspaceLocalStateImpl } from './local-state';
|
||||
|
||||
export * from './cloud';
|
||||
|
||||
@@ -1,31 +1,38 @@
|
||||
import type {
|
||||
GlobalState,
|
||||
Workspace,
|
||||
WorkspaceLocalState,
|
||||
import {
|
||||
type GlobalState,
|
||||
type Memento,
|
||||
type Workspace,
|
||||
type WorkspaceLocalState,
|
||||
wrapMemento,
|
||||
} from '@toeverything/infra';
|
||||
|
||||
export class WorkspaceLocalStateImpl implements WorkspaceLocalState {
|
||||
constructor(
|
||||
private readonly workspace: Workspace,
|
||||
private readonly globalState: GlobalState
|
||||
) {}
|
||||
wrapped: Memento;
|
||||
constructor(workspace: Workspace, globalState: GlobalState) {
|
||||
this.wrapped = wrapMemento(globalState, `workspace-state:${workspace.id}:`);
|
||||
}
|
||||
|
||||
keys(): string[] {
|
||||
return this.wrapped.keys();
|
||||
}
|
||||
|
||||
get<T>(key: string): T | null {
|
||||
return this.globalState.get<T>(
|
||||
`workspace-state:${this.workspace.id}:${key}`
|
||||
);
|
||||
return this.wrapped.get<T>(key);
|
||||
}
|
||||
|
||||
watch<T>(key: string) {
|
||||
return this.globalState.watch<T>(
|
||||
`workspace-state:${this.workspace.id}:${key}`
|
||||
);
|
||||
return this.wrapped.watch<T>(key);
|
||||
}
|
||||
|
||||
set<T>(key: string, value: T | null): void {
|
||||
return this.globalState.set<T>(
|
||||
`workspace-state:${this.workspace.id}:${key}`,
|
||||
value
|
||||
);
|
||||
return this.wrapped.set<T>(key, value);
|
||||
}
|
||||
|
||||
del(key: string): void {
|
||||
return this.wrapped.del(key);
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
return this.wrapped.clear();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,195 +0,0 @@
|
||||
import 'fake-indexeddb/auto';
|
||||
|
||||
import { AffineSchemas } from '@blocksuite/blocks/schemas';
|
||||
import { DocCollection, Schema } from '@blocksuite/store';
|
||||
import { SyncEngine, SyncEngineStep, SyncPeerStep } from '@toeverything/infra';
|
||||
import { beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
import { Doc } from 'yjs';
|
||||
|
||||
import { IndexedDBSyncStorage } from '..';
|
||||
import { createTestStorage } from './test-storage';
|
||||
|
||||
const schema = new Schema();
|
||||
|
||||
schema.register(AffineSchemas);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers({ toFake: ['requestIdleCallback'] });
|
||||
});
|
||||
|
||||
describe('SyncEngine', () => {
|
||||
test('basic - indexeddb', async () => {
|
||||
let prev: any;
|
||||
{
|
||||
const docCollection = new DocCollection({
|
||||
id: 'test - syncengine - indexeddb',
|
||||
|
||||
schema,
|
||||
});
|
||||
|
||||
const syncEngine = new SyncEngine(
|
||||
docCollection.doc,
|
||||
new IndexedDBSyncStorage(docCollection.doc.guid),
|
||||
[
|
||||
new IndexedDBSyncStorage(docCollection.doc.guid + '1'),
|
||||
new IndexedDBSyncStorage(docCollection.doc.guid + '2'),
|
||||
]
|
||||
);
|
||||
syncEngine.start();
|
||||
|
||||
const page = docCollection.createDoc({
|
||||
id: 'page0',
|
||||
});
|
||||
page.load();
|
||||
const pageBlockId = page.addBlock(
|
||||
'affine:page' as keyof BlockSuite.BlockModels,
|
||||
{
|
||||
title: new page.Text(''),
|
||||
}
|
||||
);
|
||||
page.addBlock(
|
||||
'affine:surface' as keyof BlockSuite.BlockModels,
|
||||
{},
|
||||
pageBlockId
|
||||
);
|
||||
const frameId = page.addBlock(
|
||||
'affine:note' as keyof BlockSuite.BlockModels,
|
||||
{},
|
||||
pageBlockId
|
||||
);
|
||||
page.addBlock(
|
||||
'affine:paragraph' as keyof BlockSuite.BlockModels,
|
||||
{},
|
||||
frameId
|
||||
);
|
||||
await syncEngine.waitForSynced();
|
||||
syncEngine.forceStop();
|
||||
prev = docCollection.doc.toJSON();
|
||||
}
|
||||
|
||||
{
|
||||
const docCollection = new DocCollection({
|
||||
id: 'test - syncengine - indexeddb',
|
||||
|
||||
schema,
|
||||
});
|
||||
const syncEngine = new SyncEngine(
|
||||
docCollection.doc,
|
||||
new IndexedDBSyncStorage(docCollection.doc.guid),
|
||||
[]
|
||||
);
|
||||
syncEngine.start();
|
||||
await syncEngine.waitForSynced();
|
||||
expect(docCollection.doc.toJSON()).toEqual({
|
||||
...prev,
|
||||
});
|
||||
syncEngine.forceStop();
|
||||
}
|
||||
|
||||
{
|
||||
const docCollection = new DocCollection({
|
||||
id: 'test - syncengine - indexeddb',
|
||||
|
||||
schema,
|
||||
});
|
||||
const syncEngine = new SyncEngine(
|
||||
docCollection.doc,
|
||||
new IndexedDBSyncStorage(docCollection.doc.guid + '1'),
|
||||
[]
|
||||
);
|
||||
syncEngine.start();
|
||||
await syncEngine.waitForSynced();
|
||||
expect(docCollection.doc.toJSON()).toEqual({
|
||||
...prev,
|
||||
});
|
||||
syncEngine.forceStop();
|
||||
}
|
||||
|
||||
{
|
||||
const docCollection = new DocCollection({
|
||||
id: 'test - syncengine - indexeddb',
|
||||
|
||||
schema,
|
||||
});
|
||||
const syncEngine = new SyncEngine(
|
||||
docCollection.doc,
|
||||
new IndexedDBSyncStorage(docCollection.doc.guid + '2'),
|
||||
[]
|
||||
);
|
||||
syncEngine.start();
|
||||
await syncEngine.waitForSynced();
|
||||
expect(docCollection.doc.toJSON()).toEqual({
|
||||
...prev,
|
||||
});
|
||||
syncEngine.forceStop();
|
||||
}
|
||||
});
|
||||
|
||||
test('status', async () => {
|
||||
const ydoc = new Doc({ guid: 'test - syncengine - status' });
|
||||
|
||||
const localStorage = createTestStorage(new IndexedDBSyncStorage(ydoc.guid));
|
||||
const remoteStorage = createTestStorage(
|
||||
new IndexedDBSyncStorage(ydoc.guid + '1')
|
||||
);
|
||||
|
||||
localStorage.pausePull();
|
||||
localStorage.pausePush();
|
||||
remoteStorage.pausePull();
|
||||
remoteStorage.pausePush();
|
||||
|
||||
const syncEngine = new SyncEngine(ydoc, localStorage, [remoteStorage]);
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Stopped);
|
||||
|
||||
syncEngine.start();
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Syncing);
|
||||
expect(syncEngine.status.local?.step).toEqual(
|
||||
SyncPeerStep.LoadingRootDoc
|
||||
);
|
||||
});
|
||||
|
||||
localStorage.resumePull();
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Syncing);
|
||||
expect(syncEngine.status.local?.step).toEqual(SyncPeerStep.Synced);
|
||||
expect(syncEngine.status.remotes[0]?.step).toEqual(
|
||||
SyncPeerStep.LoadingRootDoc
|
||||
);
|
||||
});
|
||||
|
||||
remoteStorage.resumePull();
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Synced);
|
||||
expect(syncEngine.status.remotes[0]?.step).toEqual(SyncPeerStep.Synced);
|
||||
expect(syncEngine.status.local?.step).toEqual(SyncPeerStep.Synced);
|
||||
});
|
||||
|
||||
ydoc.getArray('test').insert(0, [1, 2, 3]);
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Syncing);
|
||||
expect(syncEngine.status.local?.step).toEqual(SyncPeerStep.Syncing);
|
||||
expect(syncEngine.status.remotes[0]?.step).toEqual(SyncPeerStep.Syncing);
|
||||
});
|
||||
|
||||
localStorage.resumePush();
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Syncing);
|
||||
expect(syncEngine.status.local?.step).toEqual(SyncPeerStep.Synced);
|
||||
expect(syncEngine.status.remotes[0]?.step).toEqual(SyncPeerStep.Syncing);
|
||||
});
|
||||
|
||||
remoteStorage.resumePush();
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(syncEngine.status.step).toEqual(SyncEngineStep.Synced);
|
||||
expect(syncEngine.status.local?.step).toEqual(SyncPeerStep.Synced);
|
||||
expect(syncEngine.status.remotes[0]?.step).toEqual(SyncPeerStep.Synced);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,109 +0,0 @@
|
||||
import 'fake-indexeddb/auto';
|
||||
|
||||
import { AffineSchemas } from '@blocksuite/blocks/schemas';
|
||||
import { DocCollection, Schema } from '@blocksuite/store';
|
||||
import { SyncPeer, SyncPeerStep } from '@toeverything/infra';
|
||||
import { beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
|
||||
import { IndexedDBSyncStorage } from '..';
|
||||
|
||||
const schema = new Schema();
|
||||
|
||||
schema.register(AffineSchemas);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers({ toFake: ['requestIdleCallback'] });
|
||||
});
|
||||
|
||||
describe('SyncPeer', () => {
|
||||
test('basic - indexeddb', async () => {
|
||||
let prev: any;
|
||||
{
|
||||
const docCollection = new DocCollection({
|
||||
id: 'test - syncpeer - indexeddb',
|
||||
|
||||
schema,
|
||||
});
|
||||
|
||||
const syncPeer = new SyncPeer(
|
||||
docCollection.doc,
|
||||
new IndexedDBSyncStorage(docCollection.doc.guid)
|
||||
);
|
||||
await syncPeer.waitForLoaded();
|
||||
|
||||
const page = docCollection.createDoc({
|
||||
id: 'page0',
|
||||
});
|
||||
page.load();
|
||||
const pageBlockId = page.addBlock(
|
||||
'affine:page' as keyof BlockSuite.BlockModels,
|
||||
{
|
||||
title: new page.Text(''),
|
||||
}
|
||||
);
|
||||
page.addBlock(
|
||||
'affine:surface' as keyof BlockSuite.BlockModels,
|
||||
{},
|
||||
pageBlockId
|
||||
);
|
||||
const frameId = page.addBlock(
|
||||
'affine:note' as keyof BlockSuite.BlockModels,
|
||||
{},
|
||||
pageBlockId
|
||||
);
|
||||
page.addBlock(
|
||||
'affine:paragraph' as keyof BlockSuite.BlockModels,
|
||||
{},
|
||||
frameId
|
||||
);
|
||||
await syncPeer.waitForSynced();
|
||||
syncPeer.stop();
|
||||
prev = docCollection.doc.toJSON();
|
||||
}
|
||||
|
||||
{
|
||||
const docCollection = new DocCollection({
|
||||
id: 'test - syncpeer - indexeddb',
|
||||
|
||||
schema,
|
||||
});
|
||||
const syncPeer = new SyncPeer(
|
||||
docCollection.doc,
|
||||
new IndexedDBSyncStorage(docCollection.doc.guid)
|
||||
);
|
||||
await syncPeer.waitForSynced();
|
||||
expect(docCollection.doc.toJSON()).toEqual({
|
||||
...prev,
|
||||
});
|
||||
syncPeer.stop();
|
||||
}
|
||||
});
|
||||
|
||||
test('status', async () => {
|
||||
const docCollection = new DocCollection({
|
||||
id: 'test - syncpeer - status',
|
||||
|
||||
schema,
|
||||
});
|
||||
|
||||
const syncPeer = new SyncPeer(
|
||||
docCollection.doc,
|
||||
new IndexedDBSyncStorage(docCollection.doc.guid)
|
||||
);
|
||||
expect(syncPeer.status.step).toBe(SyncPeerStep.LoadingRootDoc);
|
||||
await syncPeer.waitForSynced();
|
||||
expect(syncPeer.status.step).toBe(SyncPeerStep.Synced);
|
||||
|
||||
const page = docCollection.createDoc({
|
||||
id: 'page0',
|
||||
});
|
||||
expect(syncPeer.status.step).toBe(SyncPeerStep.LoadingSubDoc);
|
||||
page.load();
|
||||
await syncPeer.waitForSynced();
|
||||
page.addBlock('affine:page' as keyof BlockSuite.BlockModels, {
|
||||
title: new page.Text(''),
|
||||
});
|
||||
expect(syncPeer.status.step).toBe(SyncPeerStep.Syncing);
|
||||
syncPeer.stop();
|
||||
});
|
||||
});
|
||||
@@ -1,42 +0,0 @@
|
||||
import type { SyncStorage } from '@toeverything/infra';
|
||||
|
||||
export function createTestStorage(origin: SyncStorage) {
|
||||
const controler = {
|
||||
pausedPull: Promise.resolve(),
|
||||
resumePull: () => {},
|
||||
pausedPush: Promise.resolve(),
|
||||
resumePush: () => {},
|
||||
};
|
||||
|
||||
return {
|
||||
name: `${origin.name}(testing)`,
|
||||
pull(docId: string, state: Uint8Array) {
|
||||
return controler.pausedPull.then(() => origin.pull(docId, state));
|
||||
},
|
||||
push(docId: string, data: Uint8Array) {
|
||||
return controler.pausedPush.then(() => origin.push(docId, data));
|
||||
},
|
||||
subscribe(
|
||||
cb: (docId: string, data: Uint8Array) => void,
|
||||
disconnect: (reason: string) => void
|
||||
) {
|
||||
return origin.subscribe(cb, disconnect);
|
||||
},
|
||||
pausePull() {
|
||||
controler.pausedPull = new Promise(resolve => {
|
||||
controler.resumePull = resolve;
|
||||
});
|
||||
},
|
||||
resumePull() {
|
||||
controler.resumePull?.();
|
||||
},
|
||||
pausePush() {
|
||||
controler.pausedPush = new Promise(resolve => {
|
||||
controler.resumePush = resolve;
|
||||
});
|
||||
},
|
||||
resumePush() {
|
||||
controler.resumePush?.();
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,60 @@
|
||||
import type { DocEvent, DocEventBus } from '@toeverything/infra';
|
||||
|
||||
type LegacyChannelMessage = {
|
||||
type: 'db-updated';
|
||||
payload: {
|
||||
docId: string;
|
||||
update: Uint8Array;
|
||||
};
|
||||
__from_new_doc_engine?: boolean;
|
||||
};
|
||||
|
||||
export class BroadcastChannelDocEventBus implements DocEventBus {
|
||||
legacyChannel = new BroadcastChannel('indexeddb:' + this.workspaceId);
|
||||
senderChannel = new BroadcastChannel('doc:' + this.workspaceId);
|
||||
constructor(private readonly workspaceId: string) {
|
||||
this.legacyChannel.addEventListener(
|
||||
'message',
|
||||
(event: MessageEvent<LegacyChannelMessage>) => {
|
||||
if (event.data.__from_new_doc_engine) {
|
||||
return;
|
||||
}
|
||||
if (event.data.type === 'db-updated') {
|
||||
this.emit({
|
||||
type: 'LegacyClientUpdateCommitted',
|
||||
docId: event.data.payload.docId,
|
||||
update: event.data.payload.update,
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
emit(event: DocEvent): void {
|
||||
if (
|
||||
event.type === 'ClientUpdateCommitted' ||
|
||||
event.type === 'ServerUpdateCommitted'
|
||||
) {
|
||||
this.legacyChannel.postMessage({
|
||||
type: 'db-updated',
|
||||
payload: {
|
||||
docId: event.docId,
|
||||
update: event.update,
|
||||
},
|
||||
__from_new_doc_engine: true,
|
||||
} satisfies LegacyChannelMessage);
|
||||
}
|
||||
this.senderChannel.postMessage(event);
|
||||
}
|
||||
|
||||
on(cb: (event: DocEvent) => void): () => void {
|
||||
const listener = (event: MessageEvent<DocEvent>) => {
|
||||
cb(event.data);
|
||||
};
|
||||
const channel = new BroadcastChannel('doc:' + this.workspaceId);
|
||||
channel.addEventListener('message', listener);
|
||||
return () => {
|
||||
channel.removeEventListener('message', listener);
|
||||
channel.close();
|
||||
};
|
||||
}
|
||||
}
|
||||
246
packages/frontend/workspace-impl/src/local/doc-indexeddb.ts
Normal file
246
packages/frontend/workspace-impl/src/local/doc-indexeddb.ts
Normal file
@@ -0,0 +1,246 @@
|
||||
import type { ByteKV, ByteKVBehavior, DocStorage } from '@toeverything/infra';
|
||||
import {
|
||||
type DBSchema,
|
||||
type IDBPDatabase,
|
||||
type IDBPObjectStore,
|
||||
openDB,
|
||||
} from 'idb';
|
||||
import { mergeUpdates } from 'yjs';
|
||||
|
||||
import { BroadcastChannelDocEventBus } from './doc-broadcast-channel';
|
||||
|
||||
function isEmptyUpdate(binary: Uint8Array) {
|
||||
return (
|
||||
binary.byteLength === 0 ||
|
||||
(binary.byteLength === 2 && binary[0] === 0 && binary[1] === 0)
|
||||
);
|
||||
}
|
||||
|
||||
export class IndexedDBDocStorage implements DocStorage {
|
||||
constructor(private readonly workspaceId: string) {}
|
||||
eventBus = new BroadcastChannelDocEventBus(this.workspaceId);
|
||||
readonly doc = new Doc();
|
||||
readonly syncMetadata = new KV(`${this.workspaceId}:sync-metadata`);
|
||||
readonly serverClock = new KV(`${this.workspaceId}:server-clock`);
|
||||
}
|
||||
|
||||
interface DocDBSchema extends DBSchema {
|
||||
workspace: {
|
||||
key: string;
|
||||
value: {
|
||||
id: string;
|
||||
updates: {
|
||||
timestamp: number;
|
||||
update: Uint8Array;
|
||||
}[];
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
type DocType = DocStorage['doc'];
|
||||
class Doc implements DocType {
|
||||
dbName = 'affine-local';
|
||||
dbPromise: Promise<IDBPDatabase<DocDBSchema>> | null = null;
|
||||
dbVersion = 1;
|
||||
|
||||
constructor() {}
|
||||
|
||||
upgradeDB(db: IDBPDatabase<DocDBSchema>) {
|
||||
db.createObjectStore('workspace', { keyPath: 'id' });
|
||||
}
|
||||
|
||||
getDb() {
|
||||
if (this.dbPromise === null) {
|
||||
this.dbPromise = openDB<DocDBSchema>(this.dbName, this.dbVersion, {
|
||||
upgrade: db => this.upgradeDB(db),
|
||||
});
|
||||
}
|
||||
return this.dbPromise;
|
||||
}
|
||||
|
||||
async get(docId: string): Promise<Uint8Array | null> {
|
||||
const db = await this.getDb();
|
||||
const store = db
|
||||
.transaction('workspace', 'readonly')
|
||||
.objectStore('workspace');
|
||||
const data = await store.get(docId);
|
||||
|
||||
if (!data) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const updates = data.updates
|
||||
.map(({ update }) => update)
|
||||
.filter(update => !isEmptyUpdate(update));
|
||||
const update = updates.length > 0 ? mergeUpdates(updates) : null;
|
||||
|
||||
return update;
|
||||
}
|
||||
|
||||
async set(docId: string, data: Uint8Array) {
|
||||
const db = await this.getDb();
|
||||
const store = db
|
||||
.transaction('workspace', 'readwrite')
|
||||
.objectStore('workspace');
|
||||
|
||||
const rows = [{ timestamp: Date.now(), update: data }];
|
||||
await store.put({
|
||||
id: docId,
|
||||
updates: rows,
|
||||
});
|
||||
}
|
||||
|
||||
async keys() {
|
||||
const db = await this.getDb();
|
||||
const store = db
|
||||
.transaction('workspace', 'readonly')
|
||||
.objectStore('workspace');
|
||||
|
||||
return store.getAllKeys();
|
||||
}
|
||||
|
||||
clear(): void | Promise<void> {
|
||||
return;
|
||||
}
|
||||
|
||||
del(_key: string): void | Promise<void> {
|
||||
return;
|
||||
}
|
||||
|
||||
async transaction<T>(
|
||||
cb: (transaction: ByteKVBehavior) => Promise<T>
|
||||
): Promise<T> {
|
||||
const db = await this.getDb();
|
||||
const store = db
|
||||
.transaction('workspace', 'readwrite')
|
||||
.objectStore('workspace');
|
||||
return await cb({
|
||||
async get(docId) {
|
||||
const data = await store.get(docId);
|
||||
|
||||
if (!data) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const { updates } = data;
|
||||
const update = mergeUpdates(updates.map(({ update }) => update));
|
||||
|
||||
return update;
|
||||
},
|
||||
keys() {
|
||||
return store.getAllKeys();
|
||||
},
|
||||
async set(docId, data) {
|
||||
const rows = [{ timestamp: Date.now(), update: data }];
|
||||
await store.put({
|
||||
id: docId,
|
||||
updates: rows,
|
||||
});
|
||||
},
|
||||
async clear() {
|
||||
return await store.clear();
|
||||
},
|
||||
async del(key) {
|
||||
return store.delete(key);
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
interface KvDBSchema extends DBSchema {
|
||||
kv: {
|
||||
key: string;
|
||||
value: { key: string; val: Uint8Array };
|
||||
};
|
||||
}
|
||||
|
||||
class KV implements ByteKV {
|
||||
constructor(private readonly dbName: string) {}
|
||||
|
||||
dbPromise: Promise<IDBPDatabase<KvDBSchema>> | null = null;
|
||||
dbVersion = 1;
|
||||
|
||||
upgradeDB(db: IDBPDatabase<KvDBSchema>) {
|
||||
db.createObjectStore('kv', { keyPath: 'key' });
|
||||
}
|
||||
|
||||
getDb() {
|
||||
if (this.dbPromise === null) {
|
||||
this.dbPromise = openDB<KvDBSchema>(this.dbName, this.dbVersion, {
|
||||
upgrade: db => this.upgradeDB(db),
|
||||
});
|
||||
}
|
||||
return this.dbPromise;
|
||||
}
|
||||
|
||||
async transaction<T>(
|
||||
cb: (transaction: ByteKVBehavior) => Promise<T>
|
||||
): Promise<T> {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readwrite').objectStore('kv');
|
||||
|
||||
const behavior = new KVBehavior(store);
|
||||
return await cb(behavior);
|
||||
}
|
||||
|
||||
async get(key: string): Promise<Uint8Array | null> {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readonly').objectStore('kv');
|
||||
return new KVBehavior(store).get(key);
|
||||
}
|
||||
async set(key: string, value: Uint8Array): Promise<void> {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readwrite').objectStore('kv');
|
||||
return new KVBehavior(store).set(key, value);
|
||||
}
|
||||
async keys(): Promise<string[]> {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readwrite').objectStore('kv');
|
||||
return new KVBehavior(store).keys();
|
||||
}
|
||||
async clear() {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readwrite').objectStore('kv');
|
||||
return new KVBehavior(store).clear();
|
||||
}
|
||||
async del(key: string) {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readwrite').objectStore('kv');
|
||||
return new KVBehavior(store).del(key);
|
||||
}
|
||||
}
|
||||
|
||||
class KVBehavior implements ByteKVBehavior {
|
||||
constructor(
|
||||
private readonly store: IDBPObjectStore<KvDBSchema, ['kv'], 'kv', any>
|
||||
) {}
|
||||
async get(key: string): Promise<Uint8Array | null> {
|
||||
const value = await this.store.get(key);
|
||||
return value?.val ?? null;
|
||||
}
|
||||
async set(key: string, value: Uint8Array): Promise<void> {
|
||||
if (this.store.put === undefined) {
|
||||
throw new Error('Cannot set in a readonly transaction');
|
||||
}
|
||||
await this.store.put({
|
||||
key: key,
|
||||
val: value,
|
||||
});
|
||||
}
|
||||
async keys(): Promise<string[]> {
|
||||
return await this.store.getAllKeys();
|
||||
}
|
||||
async del(key: string) {
|
||||
if (this.store.delete === undefined) {
|
||||
throw new Error('Cannot set in a readonly transaction');
|
||||
}
|
||||
return await this.store.delete(key);
|
||||
}
|
||||
|
||||
async clear() {
|
||||
if (this.store.clear === undefined) {
|
||||
throw new Error('Cannot set in a readonly transaction');
|
||||
}
|
||||
return await this.store.clear();
|
||||
}
|
||||
}
|
||||
186
packages/frontend/workspace-impl/src/local/doc-sqlite.ts
Normal file
186
packages/frontend/workspace-impl/src/local/doc-sqlite.ts
Normal file
@@ -0,0 +1,186 @@
|
||||
import { apis } from '@affine/electron-api';
|
||||
import {
|
||||
AsyncLock,
|
||||
type ByteKV,
|
||||
type ByteKVBehavior,
|
||||
type DocStorage,
|
||||
MemoryDocEventBus,
|
||||
} from '@toeverything/infra';
|
||||
import {
|
||||
type DBSchema,
|
||||
type IDBPDatabase,
|
||||
type IDBPObjectStore,
|
||||
openDB,
|
||||
} from 'idb';
|
||||
|
||||
export class SqliteDocStorage implements DocStorage {
|
||||
constructor(private readonly workspaceId: string) {}
|
||||
eventBus = new MemoryDocEventBus();
|
||||
readonly doc = new Doc(this.workspaceId);
|
||||
readonly syncMetadata = new KV(`${this.workspaceId}:sync-metadata`);
|
||||
readonly serverClock = new KV(`${this.workspaceId}:server-clock`);
|
||||
}
|
||||
|
||||
type DocType = DocStorage['doc'];
|
||||
|
||||
class Doc implements DocType {
|
||||
lock = new AsyncLock();
|
||||
constructor(private readonly workspaceId: string) {
|
||||
if (!apis?.db) {
|
||||
throw new Error('sqlite datasource is not available');
|
||||
}
|
||||
}
|
||||
|
||||
async transaction<T>(
|
||||
cb: (transaction: ByteKVBehavior) => Promise<T>
|
||||
): Promise<T> {
|
||||
using _lock = await this.lock.acquire();
|
||||
return await cb(this);
|
||||
}
|
||||
|
||||
keys(): string[] | Promise<string[]> {
|
||||
return [];
|
||||
}
|
||||
|
||||
async get(docId: string) {
|
||||
if (!apis?.db) {
|
||||
throw new Error('sqlite datasource is not available');
|
||||
}
|
||||
const update = await apis.db.getDocAsUpdates(
|
||||
this.workspaceId,
|
||||
this.workspaceId === docId ? undefined : docId
|
||||
);
|
||||
|
||||
if (update) {
|
||||
if (
|
||||
update.byteLength === 0 ||
|
||||
(update.byteLength === 2 && update[0] === 0 && update[1] === 0)
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return update;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async set(docId: string, data: Uint8Array) {
|
||||
if (!apis?.db) {
|
||||
throw new Error('sqlite datasource is not available');
|
||||
}
|
||||
await apis.db.applyDocUpdate(
|
||||
this.workspaceId,
|
||||
data,
|
||||
this.workspaceId === docId ? undefined : docId
|
||||
);
|
||||
}
|
||||
|
||||
clear(): void | Promise<void> {
|
||||
return;
|
||||
}
|
||||
|
||||
del(): void | Promise<void> {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
interface KvDBSchema extends DBSchema {
|
||||
kv: {
|
||||
key: string;
|
||||
value: { key: string; val: Uint8Array };
|
||||
};
|
||||
}
|
||||
|
||||
class KV implements ByteKV {
|
||||
constructor(private readonly dbName: string) {}
|
||||
|
||||
dbPromise: Promise<IDBPDatabase<KvDBSchema>> | null = null;
|
||||
dbVersion = 1;
|
||||
|
||||
upgradeDB(db: IDBPDatabase<KvDBSchema>) {
|
||||
db.createObjectStore('kv', { keyPath: 'key' });
|
||||
}
|
||||
|
||||
getDb() {
|
||||
if (this.dbPromise === null) {
|
||||
this.dbPromise = openDB<KvDBSchema>(this.dbName, this.dbVersion, {
|
||||
upgrade: db => this.upgradeDB(db),
|
||||
});
|
||||
}
|
||||
return this.dbPromise;
|
||||
}
|
||||
|
||||
async transaction<T>(
|
||||
cb: (transaction: ByteKVBehavior) => Promise<T>
|
||||
): Promise<T> {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readwrite').objectStore('kv');
|
||||
|
||||
const behavior = new KVBehavior(store);
|
||||
return await cb(behavior);
|
||||
}
|
||||
|
||||
async get(key: string): Promise<Uint8Array | null> {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readonly').objectStore('kv');
|
||||
return new KVBehavior(store).get(key);
|
||||
}
|
||||
async set(key: string, value: Uint8Array): Promise<void> {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readwrite').objectStore('kv');
|
||||
return new KVBehavior(store).set(key, value);
|
||||
}
|
||||
async keys(): Promise<string[]> {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readwrite').objectStore('kv');
|
||||
return new KVBehavior(store).keys();
|
||||
}
|
||||
async clear() {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readwrite').objectStore('kv');
|
||||
return new KVBehavior(store).clear();
|
||||
}
|
||||
async del(key: string) {
|
||||
const db = await this.getDb();
|
||||
const store = db.transaction('kv', 'readwrite').objectStore('kv');
|
||||
return new KVBehavior(store).del(key);
|
||||
}
|
||||
}
|
||||
|
||||
class KVBehavior implements ByteKVBehavior {
|
||||
constructor(
|
||||
private readonly store: IDBPObjectStore<KvDBSchema, ['kv'], 'kv', any>
|
||||
) {}
|
||||
|
||||
async get(key: string): Promise<Uint8Array | null> {
|
||||
const value = await this.store.get(key);
|
||||
return value?.val ?? null;
|
||||
}
|
||||
async set(key: string, value: Uint8Array): Promise<void> {
|
||||
if (this.store.put === undefined) {
|
||||
throw new Error('Cannot set in a readonly transaction');
|
||||
}
|
||||
await this.store.put({
|
||||
key: key,
|
||||
val: value,
|
||||
});
|
||||
}
|
||||
async keys(): Promise<string[]> {
|
||||
return await this.store.getAllKeys();
|
||||
}
|
||||
|
||||
async del(key: string) {
|
||||
if (this.store.delete === undefined) {
|
||||
throw new Error('Cannot set in a readonly transaction');
|
||||
}
|
||||
return await this.store.delete(key);
|
||||
}
|
||||
|
||||
async clear() {
|
||||
if (this.store.clear === undefined) {
|
||||
throw new Error('Cannot set in a readonly transaction');
|
||||
}
|
||||
return await this.store.clear();
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,3 @@
|
||||
export * from './awareness';
|
||||
export * from './blob-indexeddb';
|
||||
export * from './blob-sqlite';
|
||||
export * from './blob-static';
|
||||
export * from './consts';
|
||||
export { StaticBlobStorage } from './blob-static';
|
||||
export * from './list';
|
||||
export * from './sync-indexeddb';
|
||||
export * from './sync-sqlite';
|
||||
export * from './workspace-factory';
|
||||
|
||||
@@ -18,8 +18,8 @@ import {
|
||||
LOCAL_WORKSPACE_CREATED_BROADCAST_CHANNEL_KEY,
|
||||
LOCAL_WORKSPACE_LOCAL_STORAGE_KEY,
|
||||
} from './consts';
|
||||
import { IndexedDBSyncStorage } from './sync-indexeddb';
|
||||
import { SQLiteSyncStorage } from './sync-sqlite';
|
||||
import { IndexedDBDocStorage } from './doc-indexeddb';
|
||||
import { SqliteDocStorage } from './doc-sqlite';
|
||||
|
||||
export class LocalWorkspaceListProvider implements WorkspaceListProvider {
|
||||
name = WorkspaceFlavour.LOCAL;
|
||||
@@ -62,9 +62,9 @@ export class LocalWorkspaceListProvider implements WorkspaceListProvider {
|
||||
const blobStorage = environment.isDesktop
|
||||
? new SQLiteBlobStorage(id)
|
||||
: new IndexedDBBlobStorage(id);
|
||||
const syncStorage = environment.isDesktop
|
||||
? new SQLiteSyncStorage(id)
|
||||
: new IndexedDBSyncStorage(id);
|
||||
const docStorage = environment.isDesktop
|
||||
? new SqliteDocStorage(id)
|
||||
: new IndexedDBDocStorage(id);
|
||||
|
||||
const workspace = new DocCollection({
|
||||
id: id,
|
||||
@@ -76,9 +76,9 @@ export class LocalWorkspaceListProvider implements WorkspaceListProvider {
|
||||
await initial(workspace, blobStorage);
|
||||
|
||||
// save workspace to local storage
|
||||
await syncStorage.push(id, encodeStateAsUpdate(workspace.doc));
|
||||
await docStorage.doc.set(id, encodeStateAsUpdate(workspace.doc));
|
||||
for (const subdocs of workspace.doc.getSubdocs()) {
|
||||
await syncStorage.push(subdocs.guid, encodeStateAsUpdate(subdocs));
|
||||
await docStorage.doc.set(subdocs.guid, encodeStateAsUpdate(subdocs));
|
||||
}
|
||||
|
||||
// save workspace id to local storage
|
||||
@@ -128,9 +128,9 @@ export class LocalWorkspaceListProvider implements WorkspaceListProvider {
|
||||
async getInformation(id: string): Promise<WorkspaceInfo | undefined> {
|
||||
// get information from root doc
|
||||
const storage = environment.isDesktop
|
||||
? new SQLiteSyncStorage(id)
|
||||
: new IndexedDBSyncStorage(id);
|
||||
const data = await storage.pull(id, new Uint8Array([]));
|
||||
? new SqliteDocStorage(id)
|
||||
: new IndexedDBDocStorage(id);
|
||||
const data = await storage.doc.get(id);
|
||||
|
||||
if (!data) {
|
||||
return;
|
||||
@@ -141,7 +141,7 @@ export class LocalWorkspaceListProvider implements WorkspaceListProvider {
|
||||
schema: globalBlockSuiteSchema,
|
||||
});
|
||||
|
||||
applyUpdate(bs.doc, data.data);
|
||||
applyUpdate(bs.doc, data);
|
||||
|
||||
return {
|
||||
name: bs.meta.name,
|
||||
|
||||
@@ -1,118 +0,0 @@
|
||||
import { mergeUpdates, type SyncStorage } from '@toeverything/infra';
|
||||
import { type DBSchema, type IDBPDatabase, openDB } from 'idb';
|
||||
import { diffUpdate, encodeStateVectorFromUpdate } from 'yjs';
|
||||
|
||||
export const dbVersion = 1;
|
||||
export const DEFAULT_DB_NAME = 'affine-local';
|
||||
|
||||
type UpdateMessage = {
|
||||
timestamp: number;
|
||||
update: Uint8Array;
|
||||
};
|
||||
|
||||
type WorkspacePersist = {
|
||||
id: string;
|
||||
updates: UpdateMessage[];
|
||||
};
|
||||
|
||||
interface BlockSuiteBinaryDB extends DBSchema {
|
||||
workspace: {
|
||||
key: string;
|
||||
value: WorkspacePersist;
|
||||
};
|
||||
milestone: {
|
||||
key: string;
|
||||
value: unknown;
|
||||
};
|
||||
}
|
||||
|
||||
export function upgradeDB(db: IDBPDatabase<BlockSuiteBinaryDB>) {
|
||||
db.createObjectStore('workspace', { keyPath: 'id' });
|
||||
db.createObjectStore('milestone', { keyPath: 'id' });
|
||||
}
|
||||
|
||||
type ChannelMessage = {
|
||||
type: 'db-updated';
|
||||
payload: { docId: string; update: Uint8Array };
|
||||
};
|
||||
|
||||
export class IndexedDBSyncStorage implements SyncStorage {
|
||||
name = 'indexeddb';
|
||||
dbName = DEFAULT_DB_NAME;
|
||||
mergeCount = 1;
|
||||
dbPromise: Promise<IDBPDatabase<BlockSuiteBinaryDB>> | null = null;
|
||||
// indexeddb could be shared between tabs, so we use broadcast channel to notify other tabs
|
||||
channel = new BroadcastChannel('indexeddb:' + this.workspaceId);
|
||||
|
||||
constructor(private readonly workspaceId: string) {}
|
||||
|
||||
getDb() {
|
||||
if (this.dbPromise === null) {
|
||||
this.dbPromise = openDB<BlockSuiteBinaryDB>(this.dbName, dbVersion, {
|
||||
upgrade: upgradeDB,
|
||||
});
|
||||
}
|
||||
return this.dbPromise;
|
||||
}
|
||||
|
||||
async pull(
|
||||
docId: string,
|
||||
state: Uint8Array
|
||||
): Promise<{ data: Uint8Array; state?: Uint8Array | undefined } | null> {
|
||||
const db = await this.getDb();
|
||||
const store = db
|
||||
.transaction('workspace', 'readonly')
|
||||
.objectStore('workspace');
|
||||
const data = await store.get(docId);
|
||||
|
||||
if (!data) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const { updates } = data;
|
||||
const update = mergeUpdates(updates.map(({ update }) => update));
|
||||
|
||||
const diff = state.length ? diffUpdate(update, state) : update;
|
||||
|
||||
return { data: diff, state: encodeStateVectorFromUpdate(update) };
|
||||
}
|
||||
|
||||
async push(docId: string, data: Uint8Array): Promise<void> {
|
||||
const db = await this.getDb();
|
||||
const store = db
|
||||
.transaction('workspace', 'readwrite')
|
||||
.objectStore('workspace');
|
||||
|
||||
// TODO: maybe we do not need to get data every time
|
||||
const { updates } = (await store.get(docId)) ?? { updates: [] };
|
||||
let rows: UpdateMessage[] = [
|
||||
...updates,
|
||||
{ timestamp: Date.now(), update: data },
|
||||
];
|
||||
if (this.mergeCount && rows.length >= this.mergeCount) {
|
||||
const merged = mergeUpdates(rows.map(({ update }) => update));
|
||||
rows = [{ timestamp: Date.now(), update: merged }];
|
||||
}
|
||||
await store.put({
|
||||
id: docId,
|
||||
updates: rows,
|
||||
});
|
||||
this.channel.postMessage({
|
||||
type: 'db-updated',
|
||||
payload: { docId, update: data },
|
||||
} satisfies ChannelMessage);
|
||||
}
|
||||
async subscribe(cb: (docId: string, data: Uint8Array) => void) {
|
||||
function onMessage(event: MessageEvent<ChannelMessage>) {
|
||||
const { type, payload } = event.data;
|
||||
if (type === 'db-updated') {
|
||||
const { docId, update } = payload;
|
||||
cb(docId, update);
|
||||
}
|
||||
}
|
||||
this.channel.addEventListener('message', onMessage);
|
||||
return () => {
|
||||
this.channel.removeEventListener('message', onMessage);
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
import { apis } from '@affine/electron-api';
|
||||
import { type SyncStorage } from '@toeverything/infra';
|
||||
import { encodeStateVectorFromUpdate } from 'yjs';
|
||||
|
||||
export class SQLiteSyncStorage implements SyncStorage {
|
||||
name = 'sqlite';
|
||||
constructor(private readonly workspaceId: string) {
|
||||
if (!apis?.db) {
|
||||
throw new Error('sqlite datasource is not available');
|
||||
}
|
||||
}
|
||||
|
||||
async pull(docId: string, _state: Uint8Array) {
|
||||
if (!apis?.db) {
|
||||
throw new Error('sqlite datasource is not available');
|
||||
}
|
||||
const update = await apis.db.getDocAsUpdates(
|
||||
this.workspaceId,
|
||||
this.workspaceId === docId ? undefined : docId
|
||||
);
|
||||
|
||||
if (update) {
|
||||
if (
|
||||
update.byteLength === 0 ||
|
||||
(update.byteLength === 2 && update[0] === 0 && update[1] === 0)
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
data: update,
|
||||
state: encodeStateVectorFromUpdate(update),
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async push(docId: string, data: Uint8Array) {
|
||||
if (!apis?.db) {
|
||||
throw new Error('sqlite datasource is not available');
|
||||
}
|
||||
return apis.db.applyDocUpdate(
|
||||
this.workspaceId,
|
||||
data,
|
||||
this.workspaceId === docId ? undefined : docId
|
||||
);
|
||||
}
|
||||
|
||||
async subscribe() {
|
||||
return () => {};
|
||||
}
|
||||
}
|
||||
@@ -2,8 +2,8 @@ import type { ServiceCollection, WorkspaceFactory } from '@toeverything/infra';
|
||||
import {
|
||||
AwarenessContext,
|
||||
AwarenessProvider,
|
||||
DocStorageImpl,
|
||||
LocalBlobStorage,
|
||||
LocalSyncStorage,
|
||||
RemoteBlobStorage,
|
||||
WorkspaceIdContext,
|
||||
WorkspaceScope,
|
||||
@@ -13,8 +13,8 @@ import { BroadcastChannelAwarenessProvider } from './awareness';
|
||||
import { IndexedDBBlobStorage } from './blob-indexeddb';
|
||||
import { SQLiteBlobStorage } from './blob-sqlite';
|
||||
import { StaticBlobStorage } from './blob-static';
|
||||
import { IndexedDBSyncStorage } from './sync-indexeddb';
|
||||
import { SQLiteSyncStorage } from './sync-sqlite';
|
||||
import { IndexedDBDocStorage } from './doc-indexeddb';
|
||||
import { SqliteDocStorage } from './doc-sqlite';
|
||||
|
||||
export class LocalWorkspaceFactory implements WorkspaceFactory {
|
||||
name = 'local';
|
||||
@@ -23,12 +23,12 @@ export class LocalWorkspaceFactory implements WorkspaceFactory {
|
||||
services
|
||||
.scope(WorkspaceScope)
|
||||
.addImpl(LocalBlobStorage, SQLiteBlobStorage, [WorkspaceIdContext])
|
||||
.addImpl(LocalSyncStorage, SQLiteSyncStorage, [WorkspaceIdContext]);
|
||||
.addImpl(DocStorageImpl, SqliteDocStorage, [WorkspaceIdContext]);
|
||||
} else {
|
||||
services
|
||||
.scope(WorkspaceScope)
|
||||
.addImpl(LocalBlobStorage, IndexedDBBlobStorage, [WorkspaceIdContext])
|
||||
.addImpl(LocalSyncStorage, IndexedDBSyncStorage, [WorkspaceIdContext]);
|
||||
.addImpl(DocStorageImpl, IndexedDBDocStorage, [WorkspaceIdContext]);
|
||||
}
|
||||
|
||||
services
|
||||
|
||||
Reference in New Issue
Block a user