diff --git a/apps/core/src/components/blocksuite/block-suite-page-list/index.tsx b/apps/core/src/components/blocksuite/block-suite-page-list/index.tsx
index e640fa8d6c..5e9e7264f1 100644
--- a/apps/core/src/components/blocksuite/block-suite-page-list/index.tsx
+++ b/apps/core/src/components/blocksuite/block-suite-page-list/index.tsx
@@ -4,13 +4,15 @@ import { PageList, PageListTrashView } from '@affine/component/page-list';
import type { Collection } from '@affine/env/filter';
import { Trans } from '@affine/i18n';
import { useAFFiNEI18N } from '@affine/i18n/hooks';
+import { assertExists } from '@blocksuite/global/utils';
import { EdgelessIcon, PageIcon } from '@blocksuite/icons';
-import type { PageMeta } from '@blocksuite/store';
+import { type PageMeta, type Workspace } from '@blocksuite/store';
import { useBlockSuitePageMeta } from '@toeverything/hooks/use-block-suite-page-meta';
-import { getPagePreviewText } from '@toeverything/hooks/use-block-suite-page-preview';
-import { useAtom } from 'jotai';
+import { useBlockSuitePagePreview } from '@toeverything/hooks/use-block-suite-page-preview';
+import { useBlockSuiteWorkspacePage } from '@toeverything/hooks/use-block-suite-workspace-page';
+import { useAtom, useAtomValue } from 'jotai';
import type React from 'react';
-import { useMemo } from 'react';
+import { Suspense, useMemo } from 'react';
import { allPageModeSelectAtom } from '../../../atoms';
import { useBlockSuiteMetaHelper } from '../../../hooks/affine/use-block-suite-meta-helper';
@@ -39,6 +41,34 @@ const filter = {
shared: (pageMeta: PageMeta) => pageMeta.isPublic && !pageMeta.trash,
};
+const PagePreviewInner = ({
+ workspace,
+ pageId,
+}: {
+ workspace: Workspace;
+ pageId: string;
+}) => {
+ const page = useBlockSuiteWorkspacePage(workspace, pageId);
+ assertExists(page);
+ const previewAtom = useBlockSuitePagePreview(page);
+ const preview = useAtomValue(previewAtom);
+ return preview;
+};
+
+const PagePreview = ({
+ workspace,
+ pageId,
+}: {
+ workspace: Workspace;
+ pageId: string;
+}) => {
+ return (
+
+
+
+ );
+};
+
const PageListEmpty = (props: {
createPage?: () => void;
listType: BlockSuitePageListProps['listType'];
@@ -147,8 +177,6 @@ export const BlockSuitePageList: React.FC = ({
if (listType === 'trash') {
const pageList: TrashListData[] = list.map(pageMeta => {
- const page = blockSuiteWorkspace.getPage(pageMeta.id);
- const preview = page ? getPagePreviewText(page) : undefined;
return {
icon: isPreferredEdgeless(pageMeta.id) ? (
@@ -157,7 +185,9 @@ export const BlockSuitePageList: React.FC = ({
),
pageId: pageMeta.id,
title: pageMeta.title,
- preview,
+ preview: (
+
+ ),
createDate: new Date(pageMeta.createDate),
trashDate: pageMeta.trashDate
? new Date(pageMeta.trashDate)
@@ -186,12 +216,13 @@ export const BlockSuitePageList: React.FC = ({
const pageList: ListData[] = list.map(pageMeta => {
const page = blockSuiteWorkspace.getPage(pageMeta.id);
- const preview = page ? getPagePreviewText(page) : undefined;
return {
icon: isPreferredEdgeless(pageMeta.id) ? : ,
pageId: pageMeta.id,
title: pageMeta.title,
- preview,
+ preview: (
+
+ ),
tags:
page?.meta.tags?.map(id => tagOptionMap[id]).filter(v => v != null) ??
[],
diff --git a/apps/core/src/components/blocksuite/block-suite-page-list/utils.tsx b/apps/core/src/components/blocksuite/block-suite-page-list/utils.tsx
index fc951a9412..54865613c5 100644
--- a/apps/core/src/components/blocksuite/block-suite-page-list/utils.tsx
+++ b/apps/core/src/components/blocksuite/block-suite-page-list/utils.tsx
@@ -1,3 +1,4 @@
+import { initEmptyPage } from '@affine/env/blocksuite';
import { useBlockSuiteWorkspaceHelper } from '@toeverything/hooks/use-block-suite-workspace-helper';
import { useAtomValue, useSetAtom } from 'jotai';
import { useCallback } from 'react';
@@ -15,15 +16,23 @@ export const usePageHelper = (blockSuiteWorkspace: BlockSuiteWorkspace) => {
[pageSettings]
);
const setPageMode = useSetAtom(setPageModeAtom);
- const createPageAndOpen = useCallback(() => {
- const page = createPage();
- return openPage(blockSuiteWorkspace.id, page.id);
- }, [blockSuiteWorkspace.id, createPage, openPage]);
- const createEdgelessAndOpen = useCallback(() => {
- const page = createPage();
- setPageMode(page.id, 'edgeless');
- return openPage(blockSuiteWorkspace.id, page.id);
- }, [blockSuiteWorkspace.id, createPage, openPage, setPageMode]);
+ const createPageAndOpen = useCallback(
+ (id?: string, mode?: 'page' | 'edgeless') => {
+ const page = createPage(id);
+ initEmptyPage(page); // we don't need to wait it to be loaded right?
+ if (mode) {
+ setPageMode(page.id, mode);
+ }
+ openPage(blockSuiteWorkspace.id, page.id);
+ },
+ [blockSuiteWorkspace.id, createPage, openPage, setPageMode]
+ );
+ const createEdgelessAndOpen = useCallback(
+ (id?: string) => {
+ return createPageAndOpen(id, 'edgeless');
+ },
+ [createPageAndOpen]
+ );
const importFileAndOpen = useCallback(async () => {
const { showImportModal } = await import('@blocksuite/blocks');
showImportModal({ workspace: blockSuiteWorkspace });
diff --git a/apps/core/src/layouts/workspace-layout.tsx b/apps/core/src/layouts/workspace-layout.tsx
index fb8f9a5376..36e0d2c0e4 100644
--- a/apps/core/src/layouts/workspace-layout.tsx
+++ b/apps/core/src/layouts/workspace-layout.tsx
@@ -27,7 +27,6 @@ import {
useSensor,
useSensors,
} from '@dnd-kit/core';
-import { useBlockSuiteWorkspaceHelper } from '@toeverything/hooks/use-block-suite-workspace-helper';
import { usePassiveWorkspaceEffect } from '@toeverything/plugin-infra/__internal__/react';
import { currentWorkspaceIdAtom } from '@toeverything/plugin-infra/manager';
import { useAtom, useAtomValue, useSetAtom } from 'jotai';
@@ -43,6 +42,7 @@ import {
} from '../atoms';
import { useAppSetting } from '../atoms/settings';
import { AppContainer } from '../components/affine/app-container';
+import { usePageHelper } from '../components/blocksuite/block-suite-page-list/utils';
import type { IslandItemNames } from '../components/pure/help-island';
import { HelpIsland } from '../components/pure/help-island';
import { processCollectionsDrag } from '../components/pure/workspace-slider-bar/collections';
@@ -158,13 +158,16 @@ export const WorkspaceLayoutInner: FC = ({ children }) => {
usePassiveWorkspaceEffect(currentWorkspace.blockSuiteWorkspace);
const [, setOpenWorkspacesModal] = useAtom(openWorkspacesModalAtom);
- const helper = useBlockSuiteWorkspaceHelper(
- currentWorkspace.blockSuiteWorkspace
- );
+ const helper = usePageHelper(currentWorkspace.blockSuiteWorkspace);
const handleCreatePage = useCallback(() => {
- return helper.createPage(nanoid());
- }, [helper]);
+ const id = nanoid();
+ helper.createPage(id);
+ const page = currentWorkspace.blockSuiteWorkspace.getPage(id);
+ assertExists(page);
+ return page;
+ }, [currentWorkspace.blockSuiteWorkspace, helper]);
+
const handleOpenWorkspaceListModal = useCallback(() => {
setOpenWorkspacesModal(true);
}, [setOpenWorkspacesModal]);
diff --git a/packages/component/src/components/block-suite-editor/index.tsx b/packages/component/src/components/block-suite-editor/index.tsx
index 7d83216dfc..7445b98003 100644
--- a/packages/component/src/components/block-suite-editor/index.tsx
+++ b/packages/component/src/components/block-suite-editor/index.tsx
@@ -45,7 +45,7 @@ const ImagePreviewModal = lazy(() =>
);
const BlockSuiteEditorImpl = (props: EditorProps): ReactElement => {
- const { onLoad, page, mode, style, onInit } = props;
+ const { onLoad, page, mode, style } = props;
if (!page.loaded) {
use(page.waitForLoaded());
}
@@ -66,14 +66,9 @@ const BlockSuiteEditorImpl = (props: EditorProps): ReactElement => {
editor.mode = mode;
}
- useEffect(() => {
- if (editor.page !== page) {
- editor.page = page;
- if (page.root === null) {
- onInit(page, editor);
- }
- }
- }, [editor, page, onInit]);
+ if (editor.page !== page) {
+ editor.page = page;
+ }
useEffect(() => {
if (editor.page && onLoad) {
diff --git a/packages/component/src/components/page-list/components/title-cell.tsx b/packages/component/src/components/page-list/components/title-cell.tsx
index dd52855664..7786d2cfef 100644
--- a/packages/component/src/components/page-list/components/title-cell.tsx
+++ b/packages/component/src/components/page-list/components/title-cell.tsx
@@ -11,7 +11,7 @@ import {
type TitleCellProps = {
icon: JSX.Element;
text: string;
- desc?: string;
+ desc?: React.ReactNode;
suffix?: JSX.Element;
/**
* Customize the children of the cell
diff --git a/packages/component/src/components/page-list/type.ts b/packages/component/src/components/page-list/type.ts
index 427811e291..721d30e437 100644
--- a/packages/component/src/components/page-list/type.ts
+++ b/packages/component/src/components/page-list/type.ts
@@ -15,7 +15,7 @@ export type ListData = {
pageId: string;
icon: JSX.Element;
title: string;
- preview?: string;
+ preview?: React.ReactNode;
tags: Tag[];
favorite: boolean;
createDate: Date;
@@ -34,7 +34,7 @@ export type TrashListData = {
pageId: string;
icon: JSX.Element;
title: string;
- preview?: string;
+ preview?: React.ReactNode;
createDate: Date;
// TODO remove optional after assert that trashDate is always set
trashDate?: Date;
diff --git a/packages/component/src/ui/layout/content.tsx b/packages/component/src/ui/layout/content.tsx
index 0d3b831655..5b0a5f2e3e 100644
--- a/packages/component/src/ui/layout/content.tsx
+++ b/packages/component/src/ui/layout/content.tsx
@@ -13,7 +13,7 @@ export type ContentProps = {
lineHeight?: CSSProperties['lineHeight'];
ellipsis?: boolean;
lineNum?: number;
- children: string;
+ children: React.ReactNode;
};
export const Content = styled('div', {
shouldForwardProp: prop => {
diff --git a/packages/hooks/src/use-block-suite-page-preview.ts b/packages/hooks/src/use-block-suite-page-preview.ts
index f9bda362fb..2aaff69393 100644
--- a/packages/hooks/src/use-block-suite-page-preview.ts
+++ b/packages/hooks/src/use-block-suite-page-preview.ts
@@ -26,6 +26,7 @@ export function useBlockSuitePagePreview(page: Page): Atom {
const disposable = page.slots.yUpdated.on(() => {
set(getPagePreviewText(page));
});
+ set(getPagePreviewText(page));
return () => {
disposable.dispose();
};
diff --git a/packages/hooks/src/use-block-suite-workspace-page.ts b/packages/hooks/src/use-block-suite-workspace-page.ts
index 368bf906fb..7ac68fb261 100644
--- a/packages/hooks/src/use-block-suite-workspace-page.ts
+++ b/packages/hooks/src/use-block-suite-workspace-page.ts
@@ -2,6 +2,7 @@ import { assertExists, DisposableGroup } from '@blocksuite/global/utils';
import type { Page, Workspace } from '@blocksuite/store';
import type { Atom } from 'jotai';
import { atom, useAtomValue } from 'jotai';
+import { useEffect } from 'react';
const weakMap = new WeakMap>>();
@@ -51,5 +52,13 @@ export function useBlockSuiteWorkspacePage(
): Page | null {
const pageAtom = getAtom(blockSuiteWorkspace, pageId);
assertExists(pageAtom);
- return useAtomValue(pageAtom);
+ const page = useAtomValue(pageAtom);
+
+ useEffect(() => {
+ if (!page?.loaded) {
+ page?.waitForLoaded().catch(console.error);
+ }
+ }, [page]);
+
+ return page;
}
diff --git a/packages/workspace/src/local/crud.ts b/packages/workspace/src/local/crud.ts
index 6637414c49..9ed739c64c 100644
--- a/packages/workspace/src/local/crud.ts
+++ b/packages/workspace/src/local/crud.ts
@@ -2,7 +2,6 @@ import { DebugLogger } from '@affine/debug';
import type { LocalWorkspace, WorkspaceCRUD } from '@affine/env/workspace';
import { WorkspaceFlavour } from '@affine/env/workspace';
import { nanoid, Workspace as BlockSuiteWorkspace } from '@blocksuite/store';
-import { createIndexedDBProvider } from '@toeverything/y-indexeddb';
import { createJSONStorage } from 'jotai/utils';
import { z } from 'zod';
@@ -75,12 +74,7 @@ export const CRUD: WorkspaceCRUD = {
}
});
});
-
- const persistence = createIndexedDBProvider(blockSuiteWorkspace.doc);
- persistence.connect();
- await persistence.whenSynced.then(() => {
- persistence.disconnect();
- });
+ // todo: do we need to persist doc to persistence datasource?
saveWorkspaceToLocalStorage(id);
return id;
},
diff --git a/packages/workspace/src/providers/__tests__/indexeddb-provider.spec.ts b/packages/workspace/src/providers/__tests__/indexeddb-provider.spec.ts
index 3771194198..48a8ce2726 100644
--- a/packages/workspace/src/providers/__tests__/indexeddb-provider.spec.ts
+++ b/packages/workspace/src/providers/__tests__/indexeddb-provider.spec.ts
@@ -68,7 +68,15 @@ describe('download provider', () => {
) as LocalIndexedDBDownloadProvider;
provider.sync();
await provider.whenReady;
- expect(workspace.doc.toJSON()).toEqual(prev);
+ expect(workspace.doc.toJSON()).toEqual({
+ ...prev,
+ // download provider only download the root doc
+ spaces: {
+ 'space:page0': {
+ blocks: {},
+ },
+ },
+ });
}
});
});
diff --git a/packages/workspace/src/providers/__tests__/sqlite-provider.spec.ts b/packages/workspace/src/providers/__tests__/sqlite-provider.spec.ts
index 2dbd96c016..5d394d9d5b 100644
--- a/packages/workspace/src/providers/__tests__/sqlite-provider.spec.ts
+++ b/packages/workspace/src/providers/__tests__/sqlite-provider.spec.ts
@@ -2,9 +2,11 @@ import type {
SQLiteDBDownloadProvider,
SQLiteProvider,
} from '@affine/env/workspace';
+import { getDoc } from '@affine/y-provider';
import { __unstableSchemas, AffineSchemas } from '@blocksuite/blocks/models';
import type { Y as YType } from '@blocksuite/store';
import { uuidv4, Workspace } from '@blocksuite/store';
+import { setTimeout } from 'timers/promises';
import { beforeEach, describe, expect, test, vi } from 'vitest';
import {
@@ -30,18 +32,26 @@ const mockedAddBlob = vi.fn();
vi.stubGlobal('window', {
apis: {
db: {
- getDocAsUpdates: async () => {
- return Y.encodeStateAsUpdate(offlineYdoc);
+ getDocAsUpdates: async (workspaceId, guid) => {
+ const subdoc = guid ? getDoc(offlineYdoc, guid) : offlineYdoc;
+ if (!subdoc) {
+ return false;
+ }
+ return Y.encodeStateAsUpdate(subdoc);
},
- applyDocUpdate: async (id: string, update: Uint8Array) => {
- Y.applyUpdate(offlineYdoc, update, 'sqlite');
+ applyDocUpdate: async (id, update, subdocId) => {
+ const subdoc = subdocId ? getDoc(offlineYdoc, subdocId) : offlineYdoc;
+ if (!subdoc) {
+ return;
+ }
+ Y.applyUpdate(subdoc, update, 'sqlite');
},
getBlobKeys: async () => {
// todo: may need to hack the way to get hash keys of blobs
return [];
},
addBlob: mockedAddBlob,
- } satisfies Partial['db']>,
+ } as Partial['db']>,
},
events: {
db: {
@@ -53,7 +63,7 @@ vi.stubGlobal('window', {
};
},
},
- } satisfies Partial>,
+ } as Partial>,
});
vi.stubGlobal('environment', {
@@ -84,48 +94,25 @@ beforeEach(() => {
describe('SQLite download provider', () => {
test('sync updates', async () => {
// on connect, the updates from sqlite should be sync'ed to the existing ydoc
- // and ydoc should be sync'ed back to sqlite
- // Workspace.Y.applyUpdate(workspace.doc);
workspace.doc.getText('text').insert(0, 'mem-hello');
- expect(offlineYdoc.getText('text').toString()).toBe('sqlite-hello');
-
downloadProvider.sync();
await downloadProvider.whenReady;
// depending on the nature of the sync, the data can be sync'ed in either direction
- const options = ['mem-hellosqlite-hello', 'sqlite-hellomem-hello'];
+ const options = ['sqlite-hellomem-hello', 'mem-hellosqlite-hello'];
const synced = options.filter(
- o => o === offlineYdoc.getText('text').toString()
+ o => o === workspace.doc.getText('text').toString()
);
expect(synced.length).toBe(1);
- expect(workspace.doc.getText('text').toString()).toBe(synced[0]);
-
- // workspace.doc.getText('text').insert(0, 'world');
-
- // // check if the data are sync'ed
- // expect(offlineYdoc.getText('text').toString()).toBe('world' + synced[0]);
});
- test.fails('blobs will be synced to sqlite on connect', async () => {
- // mock bs.list
- const bin = new Uint8Array([1, 2, 3]);
- const blob = new Blob([bin]);
- workspace.blobs.list = vi.fn(async () => ['blob1']);
- workspace.blobs.get = vi.fn(async () => {
- return blob;
- });
-
- downloadProvider.sync();
- await downloadProvider.whenReady;
- await new Promise(resolve => setTimeout(resolve, 100));
-
- expect(mockedAddBlob).toBeCalledWith(id, 'blob1', bin);
- });
-
- test('on db update', async () => {
+ // there is no updates from sqlite for now
+ test.skip('on db update', async () => {
provider.connect();
+ await setTimeout(200);
+
offlineYdoc.getText('text').insert(0, 'sqlite-world');
// @ts-expect-error
diff --git a/packages/workspace/src/providers/index.ts b/packages/workspace/src/providers/index.ts
index 8047fe9173..4b39483f20 100644
--- a/packages/workspace/src/providers/index.ts
+++ b/packages/workspace/src/providers/index.ts
@@ -10,7 +10,6 @@ import { createBroadcastChannelProvider } from '@blocksuite/store/providers/broa
import {
createIndexedDBProvider as create,
downloadBinary,
- EarlyDisconnectError,
} from '@toeverything/y-indexeddb';
import type { Doc } from 'yjs';
@@ -40,17 +39,6 @@ const createIndexedDBBackgroundProvider: DocProviderCreator = (
connect: () => {
logger.info('connect indexeddb provider', id);
indexeddbProvider.connect();
- indexeddbProvider.whenSynced
- .then(() => {
- connected = true;
- })
- .catch(error => {
- connected = false;
- if (error instanceof EarlyDisconnectError) {
- return;
- }
- throw error;
- });
},
disconnect: () => {
assertExists(indexeddbProvider);
@@ -61,7 +49,6 @@ const createIndexedDBBackgroundProvider: DocProviderCreator = (
};
};
-const cache: WeakMap = new WeakMap();
const indexedDBDownloadOrigin = 'indexeddb-download-provider';
const createIndexedDBDownloadProvider: DocProviderCreator = (
@@ -74,18 +61,11 @@ const createIndexedDBDownloadProvider: DocProviderCreator = (
_resolve = resolve;
_reject = reject;
});
- async function downloadBinaryRecursively(doc: Doc) {
- if (cache.has(doc)) {
- const binary = cache.get(doc) as Uint8Array;
+ async function downloadAndApply(doc: Doc) {
+ const binary = await downloadBinary(doc.guid);
+ if (binary) {
Y.applyUpdate(doc, binary, indexedDBDownloadOrigin);
- } else {
- const binary = await downloadBinary(doc.guid);
- if (binary) {
- Y.applyUpdate(doc, binary, indexedDBDownloadOrigin);
- cache.set(doc, binary);
- }
}
- await Promise.all([...doc.subdocs].map(downloadBinaryRecursively));
}
return {
flavour: 'local-indexeddb',
@@ -98,7 +78,7 @@ const createIndexedDBDownloadProvider: DocProviderCreator = (
},
sync: () => {
logger.info('sync indexeddb provider', id);
- downloadBinaryRecursively(doc).then(_resolve).catch(_reject);
+ downloadAndApply(doc).then(_resolve).catch(_reject);
},
};
};
diff --git a/packages/workspace/src/providers/sqlite-providers.ts b/packages/workspace/src/providers/sqlite-providers.ts
index b3e651dc91..28dd9f94ec 100644
--- a/packages/workspace/src/providers/sqlite-providers.ts
+++ b/packages/workspace/src/providers/sqlite-providers.ts
@@ -2,8 +2,10 @@ import type {
SQLiteDBDownloadProvider,
SQLiteProvider,
} from '@affine/env/workspace';
-import { getDoc } from '@affine/y-provider';
-import { assertExists } from '@blocksuite/global/utils';
+import {
+ createLazyProvider,
+ type DatasourceDocAdapter,
+} from '@affine/y-provider';
import type { DocProviderCreator } from '@blocksuite/store';
import { Workspace as BlockSuiteWorkspace } from '@blocksuite/store';
import type { Doc } from 'yjs';
@@ -14,32 +16,26 @@ const Y = BlockSuiteWorkspace.Y;
const sqliteOrigin = Symbol('sqlite-provider-origin');
-type SubDocsEvent = {
- added: Set;
- removed: Set;
- loaded: Set;
-};
-
-// workaround: there maybe new updates before SQLite is connected
-// we need to exchange them with the SQLite db
-// will be removed later when we have lazy load doc provider
-const syncDiff = async (rootDoc: Doc, subdocId?: string) => {
- try {
- const workspaceId = rootDoc.guid;
- const doc = subdocId ? getDoc(rootDoc, subdocId) : rootDoc;
- if (!doc) {
- logger.error('doc not found', workspaceId, subdocId);
- return;
- }
- const update = await window.apis?.db.getDocAsUpdates(workspaceId, subdocId);
- const diff = Y.encodeStateAsUpdate(
- doc,
- Y.encodeStateVectorFromUpdate(update)
- );
- await window.apis.db.applyDocUpdate(workspaceId, diff, subdocId);
- } catch (err) {
- logger.error('failed to sync diff', err);
+const createDatasource = (workspaceId: string): DatasourceDocAdapter => {
+ if (!window.apis?.db) {
+ throw new Error('sqlite datasource is not available');
}
+
+ return {
+ queryDocState: async guid => {
+ return window.apis.db.getDocAsUpdates(
+ workspaceId,
+ workspaceId === guid ? undefined : guid
+ );
+ },
+ sendDocUpdate: async (guid, update) => {
+ return window.apis.db.applyDocUpdate(
+ guid,
+ update,
+ workspaceId === guid ? undefined : guid
+ );
+ },
+ };
};
/**
@@ -49,126 +45,27 @@ export const createSQLiteProvider: DocProviderCreator = (
id,
rootDoc
): SQLiteProvider => {
- const { apis, events } = window;
- // make sure it is being used in Electron with APIs
- assertExists(apis);
- assertExists(events);
-
- const updateHandlerMap = new WeakMap<
- Doc,
- (update: Uint8Array, origin: unknown) => void
- >();
- const subDocsHandlerMap = new WeakMap void>();
-
- const createOrHandleUpdate = (doc: Doc) => {
- if (updateHandlerMap.has(doc)) {
- // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
- return updateHandlerMap.get(doc)!;
- }
-
- function handleUpdate(update: Uint8Array, origin: unknown) {
- if (origin === sqliteOrigin) {
- return;
- }
- const subdocId = doc.guid === id ? undefined : doc.guid;
- apis.db.applyDocUpdate(id, update, subdocId).catch(err => {
- logger.error(err);
- });
- }
- updateHandlerMap.set(doc, handleUpdate);
- return handleUpdate;
- };
-
- const createOrGetHandleSubDocs = (doc: Doc) => {
- if (subDocsHandlerMap.has(doc)) {
- // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
- return subDocsHandlerMap.get(doc)!;
- }
- function handleSubdocs(event: SubDocsEvent) {
- event.removed.forEach(doc => {
- untrackDoc(doc);
- });
- event.loaded.forEach(doc => {
- trackDoc(doc);
- });
- }
- subDocsHandlerMap.set(doc, handleSubdocs);
- return handleSubdocs;
- };
-
- function trackDoc(doc: Doc) {
- syncDiff(rootDoc, rootDoc !== doc ? doc.guid : undefined).catch(
- logger.error
- );
- doc.on('update', createOrHandleUpdate(doc));
- doc.on('subdocs', createOrGetHandleSubDocs(doc));
- doc.subdocs.forEach(doc => {
- trackDoc(doc);
- });
- }
-
- function untrackDoc(doc: Doc) {
- doc.subdocs.forEach(doc => {
- untrackDoc(doc);
- });
- doc.off('update', createOrHandleUpdate(doc));
- doc.off('subdocs', createOrGetHandleSubDocs(doc));
- }
-
- let unsubscribe = () => {};
+ let datasource: ReturnType | null = null;
+ let provider: ReturnType | null = null;
let connected = false;
-
- const connect = () => {
- if (connected) {
- return;
- }
- logger.info('connecting sqlite provider', id);
- trackDoc(rootDoc);
-
- unsubscribe = events.db.onExternalUpdate(
- ({
- update,
- workspaceId,
- docId,
- }: {
- workspaceId: string;
- update: Uint8Array;
- docId?: string;
- }) => {
- if (workspaceId === id) {
- if (docId) {
- for (const doc of rootDoc.subdocs) {
- if (doc.guid === docId) {
- Y.applyUpdate(doc, update, sqliteOrigin);
- return;
- }
- }
- } else {
- Y.applyUpdate(rootDoc, update, sqliteOrigin);
- }
- }
- }
- );
- connected = true;
- logger.info('connecting sqlite done', id);
- };
-
- const cleanup = () => {
- logger.info('disconnecting sqlite provider', id);
- unsubscribe();
- untrackDoc(rootDoc);
- connected = false;
- };
-
return {
flavour: 'sqlite',
passive: true,
- get connected(): boolean {
+ connect: () => {
+ datasource = createDatasource(id);
+ provider = createLazyProvider(rootDoc, datasource);
+ provider.connect();
+ connected = true;
+ },
+ disconnect: () => {
+ provider?.disconnect();
+ datasource = null;
+ provider = null;
+ connected = false;
+ },
+ get connected() {
return connected;
},
- cleanup,
- connect,
- disconnect: cleanup,
};
};
@@ -180,7 +77,6 @@ export const createSQLiteDBDownloadProvider: DocProviderCreator = (
rootDoc
): SQLiteDBDownloadProvider => {
const { apis } = window;
- let disconnected = false;
let _resolve: () => void;
let _reject: (error: unknown) => void;
@@ -194,33 +90,13 @@ export const createSQLiteDBDownloadProvider: DocProviderCreator = (
const subdocId = doc.guid === id ? undefined : doc.guid;
const updates = await apis.db.getDocAsUpdates(id, subdocId);
- if (disconnected) {
- return false;
- }
-
if (updates) {
Y.applyUpdate(doc, updates, sqliteOrigin);
}
- const mergedUpdates = Y.encodeStateAsUpdate(
- doc,
- Y.encodeStateVectorFromUpdate(updates)
- );
-
- // also apply updates to sqlite
- await apis.db.applyDocUpdate(id, mergedUpdates, subdocId);
-
return true;
}
- async function syncAllUpdates(doc: Doc) {
- if (await syncUpdates(doc)) {
- // load all subdocs
- const subdocs = Array.from(doc.subdocs);
- await Promise.all(subdocs.map(syncAllUpdates));
- }
- }
-
return {
flavour: 'sqlite-download',
active: true,
@@ -228,12 +104,12 @@ export const createSQLiteDBDownloadProvider: DocProviderCreator = (
return promise;
},
cleanup: () => {
- disconnected = true;
+ // todo
},
sync: async () => {
logger.info('connect sqlite download provider', id);
try {
- await syncAllUpdates(rootDoc);
+ await syncUpdates(rootDoc);
_resolve();
} catch (error) {
_reject(error);
diff --git a/packages/y-indexeddb/benchmark/README.md b/packages/y-indexeddb/benchmark/README.md
new file mode 100644
index 0000000000..a3e1419ef4
--- /dev/null
+++ b/packages/y-indexeddb/benchmark/README.md
@@ -0,0 +1 @@
+This benchmark is outdated because our new API for IndexedDB has no direct parity with the official provider.
diff --git a/packages/y-indexeddb/package.json b/packages/y-indexeddb/package.json
index 6c0fe6ea58..1b89c9e5f8 100644
--- a/packages/y-indexeddb/package.json
+++ b/packages/y-indexeddb/package.json
@@ -36,6 +36,7 @@
"idb": "^7.1.1"
},
"devDependencies": {
+ "@affine/y-provider": "workspace:*",
"@blocksuite/blocks": "0.0.0-20230721134812-6e0e3bef-nightly",
"@blocksuite/store": "0.0.0-20230721134812-6e0e3bef-nightly",
"vite": "^4.4.6",
diff --git a/packages/y-indexeddb/src/__tests__/index.spec.ts b/packages/y-indexeddb/src/__tests__/index.spec.ts
index 89d977be19..f8aa464479 100644
--- a/packages/y-indexeddb/src/__tests__/index.spec.ts
+++ b/packages/y-indexeddb/src/__tests__/index.spec.ts
@@ -3,18 +3,18 @@
*/
import 'fake-indexeddb/auto';
+import { setTimeout } from 'node:timers/promises';
+
import { __unstableSchemas, AffineSchemas } from '@blocksuite/blocks/models';
import { assertExists } from '@blocksuite/global/utils';
import type { Page } from '@blocksuite/store';
import { uuidv4, Workspace } from '@blocksuite/store';
import { openDB } from 'idb';
import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest';
-import { IndexeddbPersistence } from 'y-indexeddb';
import { applyUpdate, Doc, encodeStateAsUpdate } from 'yjs';
import type { WorkspacePersist } from '../index';
import {
- CleanupWhenConnectingError,
createIndexedDBProvider,
dbVersion,
DEFAULT_DB_NAME,
@@ -43,7 +43,7 @@ function initEmptyPage(page: Page) {
async function getUpdates(id: string): Promise {
const db = await openDB(rootDBName, dbVersion);
- const store = await db
+ const store = db
.transaction('workspace', 'readonly')
.objectStore('workspace');
const data = (await store.get(id)) as WorkspacePersist | undefined;
@@ -74,7 +74,10 @@ describe('indexeddb provider', () => {
test('connect', async () => {
const provider = createIndexedDBProvider(workspace.doc);
provider.connect();
- await provider.whenSynced;
+
+ // todo: has a better way to know when data is synced
+ await setTimeout(200);
+
const db = await openDB(rootDBName, dbVersion);
{
const store = db
@@ -96,9 +99,9 @@ describe('indexeddb provider', () => {
const frameId = page.addBlock('affine:note', {}, pageBlockId);
page.addBlock('affine:paragraph', {}, frameId);
}
- await new Promise(resolve => setTimeout(resolve, 1000));
+ await setTimeout(200);
{
- const store = await db
+ const store = db
.transaction('workspace', 'readonly')
.objectStore('workspace');
const data = (await store.get(id)) as WorkspacePersist | undefined;
@@ -130,22 +133,11 @@ describe('indexeddb provider', () => {
}
});
- test('disconnect suddenly', async () => {
- const provider = createIndexedDBProvider(workspace.doc, rootDBName);
- const fn = vi.fn();
- provider.connect();
- provider.disconnect();
- expect(fn).toBeCalledTimes(0);
- await provider.whenSynced.catch(fn);
- expect(fn).toBeCalledTimes(1);
- });
-
test('connect and disconnect', async () => {
const provider = createIndexedDBProvider(workspace.doc, rootDBName);
provider.connect();
expect(provider.connected).toBe(true);
- const p1 = provider.whenSynced;
- await p1;
+ await setTimeout(200);
const snapshot = encodeStateAsUpdate(workspace.doc);
provider.disconnect();
expect(provider.connected).toBe(false);
@@ -164,8 +156,7 @@ describe('indexeddb provider', () => {
expect(provider.connected).toBe(false);
provider.connect();
expect(provider.connected).toBe(true);
- const p2 = provider.whenSynced;
- await p2;
+ await setTimeout(200);
{
const updates = await getUpdates(workspace.id);
expect(updates).not.toEqual([]);
@@ -173,13 +164,12 @@ describe('indexeddb provider', () => {
expect(provider.connected).toBe(true);
provider.disconnect();
expect(provider.connected).toBe(false);
- expect(p1).not.toBe(p2);
});
test('cleanup', async () => {
const provider = createIndexedDBProvider(workspace.doc);
provider.connect();
- await provider.whenSynced;
+ await setTimeout(200);
const db = await openDB(rootDBName, dbVersion);
{
@@ -190,8 +180,8 @@ describe('indexeddb provider', () => {
expect(keys).contain(workspace.id);
}
- provider.disconnect();
await provider.cleanup();
+ provider.disconnect();
{
const store = db
@@ -202,17 +192,6 @@ describe('indexeddb provider', () => {
}
});
- test('cleanup when connecting', async () => {
- const provider = createIndexedDBProvider(workspace.doc);
- provider.connect();
- await expect(() => provider.cleanup()).rejects.toThrowError(
- CleanupWhenConnectingError
- );
- await provider.whenSynced;
- provider.disconnect();
- await provider.cleanup();
- });
-
test('merge', async () => {
setMergeCount(5);
const provider = createIndexedDBProvider(workspace.doc, rootDBName);
@@ -226,7 +205,7 @@ describe('indexeddb provider', () => {
page.addBlock('affine:paragraph', {}, frameId);
}
}
- await provider.whenSynced;
+ await setTimeout(200);
{
const updates = await getUpdates(id);
expect(updates.length).lessThanOrEqual(5);
@@ -242,14 +221,12 @@ describe('indexeddb provider', () => {
{
const provider = createIndexedDBProvider(doc, rootDBName);
provider.connect();
- await provider.whenSynced;
provider.disconnect();
}
{
const newDoc = new Workspace.Y.Doc();
const provider = createIndexedDBProvider(newDoc, rootDBName);
provider.connect();
- await provider.whenSynced;
provider.disconnect();
newDoc.getMap('map').forEach((value, key) => {
expect(value).toBe(parseInt(key));
@@ -257,42 +234,6 @@ describe('indexeddb provider', () => {
}
});
- test('migration', async () => {
- {
- const yDoc = new Doc();
- yDoc.getMap().set('foo', 'bar');
- const persistence = new IndexeddbPersistence('test', yDoc);
- await persistence.whenSynced;
- await persistence.destroy();
- }
- {
- const yDoc = new Doc({
- guid: 'test',
- });
- const provider = createIndexedDBProvider(yDoc);
- provider.connect();
- await provider.whenSynced;
- await new Promise(resolve => setTimeout(resolve, 0));
- expect(yDoc.getMap().get('foo')).toBe('bar');
- }
- localStorage.clear();
- {
- indexedDB.databases = vi.fn(async () => {
- throw new Error('not supported');
- });
- await expect(indexedDB.databases).rejects.toThrow('not supported');
- const yDoc = new Doc({
- guid: 'test',
- });
- expect(indexedDB.databases).toBeCalledTimes(1);
- const provider = createIndexedDBProvider(yDoc);
- provider.connect();
- await provider.whenSynced;
- expect(indexedDB.databases).toBeCalledTimes(2);
- expect(yDoc.getMap().get('foo')).toBe('bar');
- }
- });
-
test('beforeunload', async () => {
const oldAddEventListener = window.addEventListener;
window.addEventListener = vi.fn((event: string, fn, options) => {
@@ -311,7 +252,8 @@ describe('indexeddb provider', () => {
const map = doc.getMap('map');
map.set('1', 1);
provider.connect();
- await provider.whenSynced;
+
+ await setTimeout(200);
expect(window.addEventListener).toBeCalledTimes(1);
expect(window.removeEventListener).toBeCalledTimes(1);
@@ -396,7 +338,7 @@ describe('subDoc', () => {
map.set('2', 'test');
const provider = createIndexedDBProvider(doc);
provider.connect();
- await provider.whenSynced;
+ await setTimeout(200);
provider.disconnect();
json1 = doc.toJSON();
}
@@ -406,7 +348,7 @@ describe('subDoc', () => {
});
const provider = createIndexedDBProvider(doc);
provider.connect();
- await provider.whenSynced;
+ await setTimeout(200);
const map = doc.getMap();
const subDoc = map.get('1') as Doc;
subDoc.load();
@@ -431,7 +373,7 @@ describe('subDoc', () => {
});
await page1.waitForLoaded();
const { paragraphBlockId: paragraphBlockIdPage2 } = initEmptyPage(page1);
- await new Promise(resolve => setTimeout(resolve, 1000));
+ await setTimeout(200);
provider.disconnect();
{
const newWorkspace = new Workspace({
@@ -441,15 +383,17 @@ describe('subDoc', () => {
newWorkspace.register(AffineSchemas).register(__unstableSchemas);
const provider = createIndexedDBProvider(newWorkspace.doc, rootDBName);
provider.connect();
- await provider.whenSynced;
+ await setTimeout(200);
const page0 = newWorkspace.getPage('page0') as Page;
await page0.waitForLoaded();
+ await setTimeout(200);
{
const block = page0.getBlockById(paragraphBlockIdPage1);
assertExists(block);
}
const page1 = newWorkspace.getPage('page1') as Page;
await page1.waitForLoaded();
+ await setTimeout(200);
{
const block = page1.getBlockById(paragraphBlockIdPage2);
assertExists(block);
@@ -465,7 +409,7 @@ describe('utils', () => {
initEmptyPage(page);
const provider = createIndexedDBProvider(workspace.doc, rootDBName);
provider.connect();
- await provider.whenSynced;
+ await setTimeout(200);
provider.disconnect();
const update = (await downloadBinary(
workspace.id,
@@ -478,16 +422,12 @@ describe('utils', () => {
});
newWorkspace.register(AffineSchemas).register(__unstableSchemas);
applyUpdate(newWorkspace.doc, update);
- await new Promise(resolve =>
- setTimeout(() => {
- expect(workspace.doc.toJSON()['meta']).toEqual(
- newWorkspace.doc.toJSON()['meta']
- );
- expect(Object.keys(workspace.doc.toJSON()['spaces'])).toEqual(
- Object.keys(newWorkspace.doc.toJSON()['spaces'])
- );
- resolve();
- }, 0)
+ await setTimeout();
+ expect(workspace.doc.toJSON()['meta']).toEqual(
+ newWorkspace.doc.toJSON()['meta']
+ );
+ expect(Object.keys(workspace.doc.toJSON()['spaces'])).toEqual(
+ Object.keys(newWorkspace.doc.toJSON()['spaces'])
);
});
diff --git a/packages/y-indexeddb/src/index.ts b/packages/y-indexeddb/src/index.ts
index d96d857bcc..b30169f121 100644
--- a/packages/y-indexeddb/src/index.ts
+++ b/packages/y-indexeddb/src/index.ts
@@ -1,26 +1,17 @@
import { openDB } from 'idb';
import {
applyUpdate,
- diffUpdate,
Doc,
encodeStateAsUpdate,
encodeStateVector,
UndoManager,
} from 'yjs';
-import type {
- BlockSuiteBinaryDB,
- IndexedDBProvider,
- WorkspaceMilestone,
-} from './shared';
+import type { BlockSuiteBinaryDB, WorkspaceMilestone } from './shared';
import { dbVersion, DEFAULT_DB_NAME, upgradeDB } from './shared';
-import { tryMigrate } from './utils';
-const indexeddbOrigin = 'indexeddb-provider-origin';
const snapshotOrigin = 'snapshot-origin';
-let mergeCount = 500;
-
/**
* @internal
*/
@@ -40,10 +31,6 @@ export const writeOperation = async (op: Promise) => {
});
};
-export function setMergeCount(count: number) {
- mergeCount = count;
-}
-
export function revertUpdate(
doc: Doc,
snapshotUpdate: Uint8Array,
@@ -142,257 +129,10 @@ export const getMilestones = async (
return milestone.milestone;
};
-type SubDocsEvent = {
- added: Set;
- removed: Set;
- loaded: Set;
-};
-
/**
* We use `doc.guid` as the unique key, please make sure it not changes.
*/
-export const createIndexedDBProvider = (
- doc: Doc,
- dbName: string = DEFAULT_DB_NAME,
- /**
- * In the future, migrate will be removed and there will be a separate function
- */
- migrate = true
-): IndexedDBProvider => {
- let resolve: () => void;
- let reject: (reason?: unknown) => void;
- let early = true;
- let connected = false;
- const dbPromise = openDB(dbName, dbVersion, {
- upgrade: upgradeDB,
- });
-
- const updateHandlerMap = new WeakMap<
- Doc,
- (update: Uint8Array, origin: unknown) => void
- >();
- const destroyHandlerMap = new WeakMap void>();
- const subDocsHandlerMap = new WeakMap void>();
-
- const createOrGetHandleUpdate = (id: string, doc: Doc) => {
- if (updateHandlerMap.has(doc)) {
- // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
- return updateHandlerMap.get(doc)!;
- }
- const fn = async function handleUpdate(
- update: Uint8Array,
- origin: unknown
- ) {
- const db = await dbPromise;
- if (!connected) {
- return;
- }
- if (origin === indexeddbOrigin) {
- return;
- }
- const store = db
- .transaction('workspace', 'readwrite')
- .objectStore('workspace');
- let data = await store.get(id);
- if (!data) {
- data = {
- id,
- updates: [],
- };
- }
- data.updates.push({
- timestamp: Date.now(),
- update,
- });
- if (data.updates.length > mergeCount) {
- const updates = data.updates.map(({ update }) => update);
- const doc = new Doc();
- doc.transact(() => {
- updates.forEach(update => {
- applyUpdate(doc, update, indexeddbOrigin);
- });
- }, indexeddbOrigin);
-
- const update = encodeStateAsUpdate(doc);
- data = {
- id,
- updates: [
- {
- timestamp: Date.now(),
- update,
- },
- ],
- };
- await writeOperation(store.put(data));
- } else {
- await writeOperation(store.put(data));
- }
- };
- updateHandlerMap.set(doc, fn);
- return fn;
- };
-
- /* deepscan-disable UNUSED_PARAM */
- const createOrGetHandleDestroy = (_: string, doc: Doc) => {
- if (destroyHandlerMap.has(doc)) {
- // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
- return destroyHandlerMap.get(doc)!;
- }
- const fn = async function handleDestroy() {
- unTrackDoc(doc.guid, doc);
- };
- destroyHandlerMap.set(doc, fn);
- return fn;
- };
-
- /* deepscan-disable UNUSED_PARAM */
- const createOrGetHandleSubDocs = (_: string, doc: Doc) => {
- if (subDocsHandlerMap.has(doc)) {
- // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
- return subDocsHandlerMap.get(doc)!;
- }
- const fn = async function handleSubDocs(event: SubDocsEvent) {
- event.removed.forEach(doc => {
- unTrackDoc(doc.guid, doc);
- });
- event.loaded.forEach(doc => {
- trackDoc(doc.guid, doc);
- });
- };
- subDocsHandlerMap.set(doc, fn);
- return fn;
- };
-
- function trackDoc(id: string, doc: Doc) {
- doc.on('update', createOrGetHandleUpdate(id, doc));
- doc.on('destroy', createOrGetHandleDestroy(id, doc));
- doc.on('subdocs', createOrGetHandleSubDocs(id, doc));
-
- doc.subdocs.forEach(doc => {
- trackDoc(doc.guid, doc);
- });
- }
-
- function unTrackDoc(id: string, doc: Doc) {
- doc.subdocs.forEach(doc => {
- unTrackDoc(doc.guid, doc);
- });
- doc.off('update', createOrGetHandleUpdate(id, doc));
- doc.off('destroy', createOrGetHandleDestroy(id, doc));
- doc.off('subdocs', createOrGetHandleSubDocs(id, doc));
- }
-
- async function saveDocOperation(id: string, doc: Doc) {
- const db = await dbPromise;
- const store = db
- .transaction('workspace', 'readwrite')
- .objectStore('workspace');
- const data = await store.get(id);
- if (!connected) {
- return;
- }
- if (!data) {
- await writeOperation(
- db.put('workspace', {
- id,
- updates: [
- {
- timestamp: Date.now(),
- update: encodeStateAsUpdate(doc),
- },
- ],
- })
- );
- } else {
- const updates = data.updates.map(({ update }) => update);
- const fakeDoc = new Doc();
- fakeDoc.transact(() => {
- updates.forEach(update => {
- applyUpdate(fakeDoc, update, indexeddbOrigin);
- });
- }, indexeddbOrigin);
- const newUpdate = diffUpdate(
- encodeStateAsUpdate(doc),
- encodeStateAsUpdate(fakeDoc)
- );
- await writeOperation(
- store.put({
- ...data,
- updates: [
- ...data.updates,
- {
- timestamp: Date.now(),
- update: newUpdate,
- },
- ],
- })
- );
- doc.transact(() => {
- updates.forEach(update => {
- applyUpdate(doc, update, indexeddbOrigin);
- });
- }, indexeddbOrigin);
- }
- }
-
- const apis = {
- connect: async () => {
- if (connected) return;
-
- apis.whenSynced = new Promise((_resolve, _reject) => {
- early = true;
- resolve = _resolve;
- reject = _reject;
- });
- connected = true;
- trackDoc(doc.guid, doc);
-
- // only the runs `await` below, otherwise the logic is incorrect
- const db = await dbPromise;
- if (migrate) {
- // Tips:
- // this is only backward compatible with the yjs official version of y-indexeddb
- await tryMigrate(db, doc.guid, dbName);
- }
- if (!connected) {
- return;
- }
-
- // recursively save all docs into indexeddb
- const docs: [string, Doc][] = [];
- docs.push([doc.guid, doc]);
- while (docs.length > 0) {
- const [id, doc] = docs.pop() as [string, Doc];
- await saveDocOperation(id, doc);
- doc.subdocs.forEach(doc => {
- docs.push([doc.guid, doc]);
- });
- }
-
- early = false;
- resolve();
- },
- disconnect() {
- connected = false;
- if (early) {
- reject(new EarlyDisconnectError());
- }
- unTrackDoc(doc.guid, doc);
- },
- async cleanup() {
- if (connected) {
- throw new CleanupWhenConnectingError();
- }
- await (await dbPromise).delete('workspace', doc.guid);
- },
- whenSynced: Promise.resolve(),
- get connected() {
- return connected;
- },
- };
-
- return apis;
-};
+export * from './provider';
export * from './shared';
export * from './utils';
diff --git a/packages/y-indexeddb/src/provider.ts b/packages/y-indexeddb/src/provider.ts
new file mode 100644
index 0000000000..8005b34d77
--- /dev/null
+++ b/packages/y-indexeddb/src/provider.ts
@@ -0,0 +1,133 @@
+import {
+ createLazyProvider,
+ type DatasourceDocAdapter,
+ writeOperation,
+} from '@affine/y-provider';
+import { openDB } from 'idb';
+import type { Doc } from 'yjs';
+import { diffUpdate, mergeUpdates } from 'yjs';
+
+import {
+ type BlockSuiteBinaryDB,
+ dbVersion,
+ DEFAULT_DB_NAME,
+ type IndexedDBProvider,
+ type UpdateMessage,
+ upgradeDB,
+} from './shared';
+
+let mergeCount = 500;
+
+export function setMergeCount(count: number) {
+ mergeCount = count;
+}
+
+const createDatasource = ({
+ dbName,
+ mergeCount,
+}: {
+ dbName: string;
+ mergeCount?: number;
+}) => {
+ const dbPromise = openDB(dbName, dbVersion, {
+ upgrade: upgradeDB,
+ });
+ const adapter = {
+ queryDocState: async (guid, options) => {
+ try {
+ const db = await dbPromise;
+ const store = db
+ .transaction('workspace', 'readonly')
+ .objectStore('workspace');
+ const data = await store.get(guid);
+
+ if (!data) {
+ return false;
+ }
+
+ const { updates } = data;
+ const update = mergeUpdates(updates.map(({ update }) => update));
+
+ const diff = options?.stateVector
+ ? diffUpdate(update, options?.stateVector)
+ : update;
+
+ return diff;
+ } catch (err: any) {
+ if (!err.message?.includes('The database connection is closing.')) {
+ throw err;
+ }
+ return false;
+ }
+ },
+ sendDocUpdate: async (guid, update) => {
+ try {
+ const db = await dbPromise;
+ const store = db
+ .transaction('workspace', 'readwrite')
+ .objectStore('workspace');
+
+ // TODO: maybe we do not need to get data every time
+ const { updates } = (await store.get(guid)) ?? { updates: [] };
+ let rows: UpdateMessage[] = [
+ ...updates,
+ { timestamp: Date.now(), update },
+ ];
+ if (mergeCount && rows.length >= mergeCount) {
+ const merged = mergeUpdates(rows.map(({ update }) => update));
+ rows = [{ timestamp: Date.now(), update: merged }];
+ }
+
+ await writeOperation(
+ store.put({
+ id: guid,
+ updates: rows,
+ })
+ );
+ } catch (err: any) {
+ if (!err.message?.includes('The database connection is closing.')) {
+ throw err;
+ }
+ }
+ },
+ } satisfies DatasourceDocAdapter;
+
+ return {
+ ...adapter,
+ disconnect: () => {
+ dbPromise.then(db => db.close()).catch(console.error);
+ },
+ cleanup: async () => {
+ const db = await dbPromise;
+ await db.clear('workspace');
+ },
+ };
+};
+
+export const createIndexedDBProvider = (
+ doc: Doc,
+ dbName: string = DEFAULT_DB_NAME
+): IndexedDBProvider => {
+ let datasource: ReturnType | null = null;
+ let provider: ReturnType | null = null;
+
+ return {
+ connect: () => {
+ datasource = createDatasource({ dbName, mergeCount });
+ provider = createLazyProvider(doc, datasource);
+ provider.connect();
+ },
+ disconnect: () => {
+ datasource?.disconnect();
+ provider?.disconnect();
+ datasource = null;
+ provider = null;
+ },
+ cleanup: async () => {
+ await datasource?.cleanup();
+ },
+ get connected() {
+ return provider?.connected || false;
+ },
+ };
+};
diff --git a/packages/y-indexeddb/src/shared.ts b/packages/y-indexeddb/src/shared.ts
index 1b994f30cf..0dbd305a4b 100644
--- a/packages/y-indexeddb/src/shared.ts
+++ b/packages/y-indexeddb/src/shared.ts
@@ -12,7 +12,6 @@ export interface IndexedDBProvider {
connect: () => void;
disconnect: () => void;
cleanup: () => Promise;
- whenSynced: Promise;
readonly connected: boolean;
}
diff --git a/packages/y-indexeddb/tsconfig.json b/packages/y-indexeddb/tsconfig.json
index e9197b23ff..9d63fadea6 100644
--- a/packages/y-indexeddb/tsconfig.json
+++ b/packages/y-indexeddb/tsconfig.json
@@ -9,6 +9,9 @@
"references": [
{
"path": "./tsconfig.node.json"
+ },
+ {
+ "path": "../y-provider"
}
]
}
diff --git a/packages/y-provider/package.json b/packages/y-provider/package.json
index 514bf86dda..1d56b01020 100644
--- a/packages/y-provider/package.json
+++ b/packages/y-provider/package.json
@@ -3,11 +3,7 @@
"type": "module",
"version": "0.7.0-canary.51",
"description": "Yjs provider utilities for AFFiNE",
- "exports": {
- ".": "./src/index.ts"
- },
"main": "./src/index.ts",
- "module": "./src/index.ts",
"devDependencies": {
"@blocksuite/store": "0.0.0-20230721134812-6e0e3bef-nightly"
},
diff --git a/packages/y-provider/src/lazy-provider.ts b/packages/y-provider/src/lazy-provider.ts
index 241f428229..9413920853 100644
--- a/packages/y-provider/src/lazy-provider.ts
+++ b/packages/y-provider/src/lazy-provider.ts
@@ -3,6 +3,7 @@ import {
applyUpdate,
type Doc,
encodeStateAsUpdate,
+ encodeStateVector,
encodeStateVectorFromUpdate,
} from 'yjs';
@@ -33,30 +34,30 @@ export const createLazyProvider = (
let connected = false;
const pendingMap = new Map(); // guid -> pending-updates
const disposableMap = new Map void>>();
- const connectedDocs = new Set();
+ const connectedDocs = new Set();
let datasourceUnsub: (() => void) | undefined;
async function syncDoc(doc: Doc) {
const guid = doc.guid;
- // perf: optimize me
- const currentUpdate = encodeStateAsUpdate(doc);
const remoteUpdate = await datasource.queryDocState(guid, {
- stateVector: encodeStateVectorFromUpdate(currentUpdate),
+ stateVector: encodeStateVector(doc),
});
- const updates = [currentUpdate];
pendingMap.set(guid, []);
if (remoteUpdate) {
applyUpdate(doc, remoteUpdate, selfUpdateOrigin);
- const newUpdate = encodeStateAsUpdate(
- doc,
- encodeStateVectorFromUpdate(remoteUpdate)
- );
- updates.push(newUpdate);
- await datasource.sendDocUpdate(guid, newUpdate);
}
+
+ const sv = remoteUpdate
+ ? encodeStateVectorFromUpdate(remoteUpdate)
+ : undefined;
+
+ // perf: optimize me
+ // it is possible the doc is only in memory but not yet in the datasource
+ // we need to send the whole update to the datasource
+ await datasource.sendDocUpdate(guid, encodeStateAsUpdate(doc, sv));
}
/**
@@ -73,10 +74,7 @@ export const createLazyProvider = (
datasource.sendDocUpdate(doc.guid, update).catch(console.error);
};
- const subdocLoadHandler = (event: {
- loaded: Set;
- removed: Set;
- }) => {
+ const subdocsHandler = (event: { loaded: Set; removed: Set }) => {
event.loaded.forEach(subdoc => {
connectDoc(subdoc).catch(console.error);
});
@@ -86,11 +84,11 @@ export const createLazyProvider = (
};
doc.on('update', updateHandler);
- doc.on('subdocs', subdocLoadHandler);
+ doc.on('subdocs', subdocsHandler);
// todo: handle destroy?
disposables.add(() => {
doc.off('update', updateHandler);
- doc.off('subdocs', subdocLoadHandler);
+ doc.off('subdocs', subdocsHandler);
});
}
@@ -127,6 +125,7 @@ export const createLazyProvider = (
connectedDocs.add(doc.guid);
setupDocListener(doc);
await syncDoc(doc);
+
await Promise.all(
[...doc.subdocs]
.filter(subdoc => subdoc.shouldLoad)
@@ -150,6 +149,7 @@ export const createLazyProvider = (
disposables.forEach(dispose => dispose());
});
disposableMap.clear();
+ connectedDocs.clear();
}
/**
diff --git a/packages/y-provider/src/utils.ts b/packages/y-provider/src/utils.ts
index 2be791444e..31a17a6616 100644
--- a/packages/y-provider/src/utils.ts
+++ b/packages/y-provider/src/utils.ts
@@ -12,3 +12,19 @@ export function getDoc(doc: Doc, guid: string): Doc | undefined {
}
return undefined;
}
+
+const saveAlert = (event: BeforeUnloadEvent) => {
+ event.preventDefault();
+ return (event.returnValue =
+ 'Data is not saved. Are you sure you want to leave?');
+};
+
+export const writeOperation = async (op: Promise) => {
+ window.addEventListener('beforeunload', saveAlert, {
+ capture: true,
+ });
+ await op;
+ window.removeEventListener('beforeunload', saveAlert, {
+ capture: true,
+ });
+};
diff --git a/yarn.lock b/yarn.lock
index 39f2864602..28b5d8c966 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -637,7 +637,7 @@ __metadata:
languageName: unknown
linkType: soft
-"@affine/y-provider@workspace:packages/y-provider":
+"@affine/y-provider@workspace:*, @affine/y-provider@workspace:packages/y-provider":
version: 0.0.0-use.local
resolution: "@affine/y-provider@workspace:packages/y-provider"
dependencies:
@@ -4209,9 +4209,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/android-arm64@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/android-arm64@npm:0.18.15"
+"@esbuild/android-arm64@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/android-arm64@npm:0.18.16"
conditions: os=android & cpu=arm64
languageName: node
linkType: hard
@@ -4223,9 +4223,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/android-arm@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/android-arm@npm:0.18.15"
+"@esbuild/android-arm@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/android-arm@npm:0.18.16"
conditions: os=android & cpu=arm
languageName: node
linkType: hard
@@ -4237,9 +4237,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/android-x64@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/android-x64@npm:0.18.15"
+"@esbuild/android-x64@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/android-x64@npm:0.18.16"
conditions: os=android & cpu=x64
languageName: node
linkType: hard
@@ -4251,9 +4251,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/darwin-arm64@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/darwin-arm64@npm:0.18.15"
+"@esbuild/darwin-arm64@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/darwin-arm64@npm:0.18.16"
conditions: os=darwin & cpu=arm64
languageName: node
linkType: hard
@@ -4265,9 +4265,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/darwin-x64@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/darwin-x64@npm:0.18.15"
+"@esbuild/darwin-x64@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/darwin-x64@npm:0.18.16"
conditions: os=darwin & cpu=x64
languageName: node
linkType: hard
@@ -4279,9 +4279,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/freebsd-arm64@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/freebsd-arm64@npm:0.18.15"
+"@esbuild/freebsd-arm64@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/freebsd-arm64@npm:0.18.16"
conditions: os=freebsd & cpu=arm64
languageName: node
linkType: hard
@@ -4293,9 +4293,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/freebsd-x64@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/freebsd-x64@npm:0.18.15"
+"@esbuild/freebsd-x64@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/freebsd-x64@npm:0.18.16"
conditions: os=freebsd & cpu=x64
languageName: node
linkType: hard
@@ -4307,9 +4307,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/linux-arm64@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/linux-arm64@npm:0.18.15"
+"@esbuild/linux-arm64@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/linux-arm64@npm:0.18.16"
conditions: os=linux & cpu=arm64
languageName: node
linkType: hard
@@ -4321,9 +4321,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/linux-arm@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/linux-arm@npm:0.18.15"
+"@esbuild/linux-arm@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/linux-arm@npm:0.18.16"
conditions: os=linux & cpu=arm
languageName: node
linkType: hard
@@ -4335,9 +4335,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/linux-ia32@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/linux-ia32@npm:0.18.15"
+"@esbuild/linux-ia32@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/linux-ia32@npm:0.18.16"
conditions: os=linux & cpu=ia32
languageName: node
linkType: hard
@@ -4349,9 +4349,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/linux-loong64@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/linux-loong64@npm:0.18.15"
+"@esbuild/linux-loong64@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/linux-loong64@npm:0.18.16"
conditions: os=linux & cpu=loong64
languageName: node
linkType: hard
@@ -4363,9 +4363,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/linux-mips64el@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/linux-mips64el@npm:0.18.15"
+"@esbuild/linux-mips64el@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/linux-mips64el@npm:0.18.16"
conditions: os=linux & cpu=mips64el
languageName: node
linkType: hard
@@ -4377,9 +4377,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/linux-ppc64@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/linux-ppc64@npm:0.18.15"
+"@esbuild/linux-ppc64@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/linux-ppc64@npm:0.18.16"
conditions: os=linux & cpu=ppc64
languageName: node
linkType: hard
@@ -4391,9 +4391,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/linux-riscv64@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/linux-riscv64@npm:0.18.15"
+"@esbuild/linux-riscv64@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/linux-riscv64@npm:0.18.16"
conditions: os=linux & cpu=riscv64
languageName: node
linkType: hard
@@ -4405,9 +4405,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/linux-s390x@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/linux-s390x@npm:0.18.15"
+"@esbuild/linux-s390x@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/linux-s390x@npm:0.18.16"
conditions: os=linux & cpu=s390x
languageName: node
linkType: hard
@@ -4419,9 +4419,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/linux-x64@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/linux-x64@npm:0.18.15"
+"@esbuild/linux-x64@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/linux-x64@npm:0.18.16"
conditions: os=linux & cpu=x64
languageName: node
linkType: hard
@@ -4433,9 +4433,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/netbsd-x64@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/netbsd-x64@npm:0.18.15"
+"@esbuild/netbsd-x64@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/netbsd-x64@npm:0.18.16"
conditions: os=netbsd & cpu=x64
languageName: node
linkType: hard
@@ -4447,9 +4447,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/openbsd-x64@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/openbsd-x64@npm:0.18.15"
+"@esbuild/openbsd-x64@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/openbsd-x64@npm:0.18.16"
conditions: os=openbsd & cpu=x64
languageName: node
linkType: hard
@@ -4461,9 +4461,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/sunos-x64@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/sunos-x64@npm:0.18.15"
+"@esbuild/sunos-x64@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/sunos-x64@npm:0.18.16"
conditions: os=sunos & cpu=x64
languageName: node
linkType: hard
@@ -4475,9 +4475,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/win32-arm64@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/win32-arm64@npm:0.18.15"
+"@esbuild/win32-arm64@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/win32-arm64@npm:0.18.16"
conditions: os=win32 & cpu=arm64
languageName: node
linkType: hard
@@ -4489,9 +4489,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/win32-ia32@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/win32-ia32@npm:0.18.15"
+"@esbuild/win32-ia32@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/win32-ia32@npm:0.18.16"
conditions: os=win32 & cpu=ia32
languageName: node
linkType: hard
@@ -4503,9 +4503,9 @@ __metadata:
languageName: node
linkType: hard
-"@esbuild/win32-x64@npm:0.18.15":
- version: 0.18.15
- resolution: "@esbuild/win32-x64@npm:0.18.15"
+"@esbuild/win32-x64@npm:0.18.16":
+ version: 0.18.16
+ resolution: "@esbuild/win32-x64@npm:0.18.16"
conditions: os=win32 & cpu=x64
languageName: node
linkType: hard
@@ -6306,8 +6306,8 @@ __metadata:
linkType: hard
"@nestjs/common@npm:^10.0.4":
- version: 10.1.0
- resolution: "@nestjs/common@npm:10.1.0"
+ version: 10.1.1
+ resolution: "@nestjs/common@npm:10.1.1"
dependencies:
iterare: 1.2.1
tslib: 2.6.0
@@ -6322,13 +6322,13 @@ __metadata:
optional: true
class-validator:
optional: true
- checksum: 304b3e8f6396ef5b74079ad7c5d0294b81a671979e9f5a50fe3a9ea629d3b7d55edf57921b007503ea688c28eafc060749271d8b5d224ead1e275ea6b75edb81
+ checksum: e38f41af79b20ae2a22a2e68dffb23cbfc3529c125f5580f960cfe7f6a48d2d18e93641fbf99f634aef152dd3cd252ef56743d78fab218c51f7d45be1da584d4
languageName: node
linkType: hard
"@nestjs/core@npm:^10.0.4":
- version: 10.1.0
- resolution: "@nestjs/core@npm:10.1.0"
+ version: 10.1.1
+ resolution: "@nestjs/core@npm:10.1.1"
dependencies:
"@nuxtjs/opencollective": 0.3.2
fast-safe-stringify: 2.1.1
@@ -6350,7 +6350,7 @@ __metadata:
optional: true
"@nestjs/websockets":
optional: true
- checksum: 69a5f8471c52931ce0795c2797dc1351ebeda93e4292810269f588c92714bc9eab31f8280896407456d969b15f30376e1204df10b7c03cdb173c917dee83c939
+ checksum: da367f1ecaca32d8d508932e89682159f53404c49deb8a718133ce421882673e584f28bb958d11ad0e81ba048a4bf0d077697e537f70fe5653b3adf7449d8a32
languageName: node
linkType: hard
@@ -6412,8 +6412,8 @@ __metadata:
linkType: hard
"@nestjs/platform-express@npm:^10.0.4":
- version: 10.1.0
- resolution: "@nestjs/platform-express@npm:10.1.0"
+ version: 10.1.1
+ resolution: "@nestjs/platform-express@npm:10.1.1"
dependencies:
body-parser: 1.20.2
cors: 2.8.5
@@ -6423,13 +6423,13 @@ __metadata:
peerDependencies:
"@nestjs/common": ^10.0.0
"@nestjs/core": ^10.0.0
- checksum: 8a1a5089136549a68b32f5f15980f7bbd83991da955b8961947bf320780541e44a8c43d8a23626cfdc113044660343d152c6dfb675d6d7dbfb24efd74169db57
+ checksum: e1a09001b52bbae5dc44a33afbf2315b30f958865ba0fd28a5f6705e6e8e3abb2ee4b7a8802682c0d9ef8185fb55ede4fa7ccf353c431c3e71079a1d794e61f1
languageName: node
linkType: hard
"@nestjs/testing@npm:^10.0.4":
- version: 10.1.0
- resolution: "@nestjs/testing@npm:10.1.0"
+ version: 10.1.1
+ resolution: "@nestjs/testing@npm:10.1.1"
dependencies:
tslib: 2.6.0
peerDependencies:
@@ -6442,7 +6442,7 @@ __metadata:
optional: true
"@nestjs/platform-express":
optional: true
- checksum: 80e609e4d072ec9aa3a66ca22933e3d2303ecb3a9bf97db1b81a8f700d3303e6cba2769b701a237793488edf4c102fb2c4828ff9431129008ecec492ca95df34
+ checksum: 72ec032aac6f04bbba25cff6ac452954719fe81042122d88ecf8281ed3fe1b40c638ff9a7c7f45182397b7cb30b45a57c459d1f3585938a5b5719475d6c8db9e
languageName: node
linkType: hard
@@ -8714,8 +8714,8 @@ __metadata:
linkType: hard
"@sentry/cli@npm:^2.17.0":
- version: 2.19.4
- resolution: "@sentry/cli@npm:2.19.4"
+ version: 2.20.0
+ resolution: "@sentry/cli@npm:2.20.0"
dependencies:
https-proxy-agent: ^5.0.0
node-fetch: ^2.6.7
@@ -8724,7 +8724,7 @@ __metadata:
which: ^2.0.2
bin:
sentry-cli: bin/sentry-cli
- checksum: 1f2442857a5eec2bc6f872a633d88fc2f11ed7f434db36627a034d904390f4cbbb4dccc33c571a8815e423cd36b863c72621298d49a1541b28370c7f7308f0dc
+ checksum: 7ad32ecec015233b2aa823ed0a4a3d5f76bd2d91ae8cc43ab3f9a9c03359b05bae145f53e3dfb3504ae873479f703fec72de64e564aa43c5cc33847ce72f83b3
languageName: node
linkType: hard
@@ -10995,6 +10995,7 @@ __metadata:
version: 0.0.0-use.local
resolution: "@toeverything/y-indexeddb@workspace:packages/y-indexeddb"
dependencies:
+ "@affine/y-provider": "workspace:*"
"@blocksuite/blocks": 0.0.0-20230721134812-6e0e3bef-nightly
"@blocksuite/store": 0.0.0-20230721134812-6e0e3bef-nightly
idb: ^7.1.1
@@ -11595,9 +11596,9 @@ __metadata:
linkType: hard
"@types/node@npm:*":
- version: 20.4.3
- resolution: "@types/node@npm:20.4.3"
- checksum: 4df3664821af2328c25e3d78486ec94f45f5f4a35222678954b5c7dd5d4c4e5a0cdc9f51e09a7f7f078559639293f6cee79d37e751295552be824af1f3d8ad12
+ version: 20.4.4
+ resolution: "@types/node@npm:20.4.4"
+ checksum: 43f3c4a8acc38ae753e15a0e79bae0447d255b3742fa87f8e065d7b9d20ecb0e03d6c5b46c00d5d26f4552160381a00255f49205595a8ee48c2423e00263c930
languageName: node
linkType: hard
@@ -11609,9 +11610,9 @@ __metadata:
linkType: hard
"@types/node@npm:^18.11.18, @types/node@npm:^18.16.20, @types/node@npm:^18.16.3":
- version: 18.16.20
- resolution: "@types/node@npm:18.16.20"
- checksum: 00ea5668e12047447b20bb31408ae717823e5b879c4c6296036216756874b2dc7976086343ffb6a717eb52e8d73dc6972d90b9134ef464b7361673228a02ed41
+ version: 18.17.0
+ resolution: "@types/node@npm:18.17.0"
+ checksum: 3a43c5c5541342751b514485144818a515fac5427f663066068eaacbe8a108cbe1207aae75ec89d34c3b32414c334aad84e9083cf7fcf3ebfd970adc871314a4
languageName: node
linkType: hard
@@ -17407,31 +17408,31 @@ __metadata:
linkType: hard
"esbuild@npm:^0.18.0, esbuild@npm:^0.18.10, esbuild@npm:^0.18.15":
- version: 0.18.15
- resolution: "esbuild@npm:0.18.15"
+ version: 0.18.16
+ resolution: "esbuild@npm:0.18.16"
dependencies:
- "@esbuild/android-arm": 0.18.15
- "@esbuild/android-arm64": 0.18.15
- "@esbuild/android-x64": 0.18.15
- "@esbuild/darwin-arm64": 0.18.15
- "@esbuild/darwin-x64": 0.18.15
- "@esbuild/freebsd-arm64": 0.18.15
- "@esbuild/freebsd-x64": 0.18.15
- "@esbuild/linux-arm": 0.18.15
- "@esbuild/linux-arm64": 0.18.15
- "@esbuild/linux-ia32": 0.18.15
- "@esbuild/linux-loong64": 0.18.15
- "@esbuild/linux-mips64el": 0.18.15
- "@esbuild/linux-ppc64": 0.18.15
- "@esbuild/linux-riscv64": 0.18.15
- "@esbuild/linux-s390x": 0.18.15
- "@esbuild/linux-x64": 0.18.15
- "@esbuild/netbsd-x64": 0.18.15
- "@esbuild/openbsd-x64": 0.18.15
- "@esbuild/sunos-x64": 0.18.15
- "@esbuild/win32-arm64": 0.18.15
- "@esbuild/win32-ia32": 0.18.15
- "@esbuild/win32-x64": 0.18.15
+ "@esbuild/android-arm": 0.18.16
+ "@esbuild/android-arm64": 0.18.16
+ "@esbuild/android-x64": 0.18.16
+ "@esbuild/darwin-arm64": 0.18.16
+ "@esbuild/darwin-x64": 0.18.16
+ "@esbuild/freebsd-arm64": 0.18.16
+ "@esbuild/freebsd-x64": 0.18.16
+ "@esbuild/linux-arm": 0.18.16
+ "@esbuild/linux-arm64": 0.18.16
+ "@esbuild/linux-ia32": 0.18.16
+ "@esbuild/linux-loong64": 0.18.16
+ "@esbuild/linux-mips64el": 0.18.16
+ "@esbuild/linux-ppc64": 0.18.16
+ "@esbuild/linux-riscv64": 0.18.16
+ "@esbuild/linux-s390x": 0.18.16
+ "@esbuild/linux-x64": 0.18.16
+ "@esbuild/netbsd-x64": 0.18.16
+ "@esbuild/openbsd-x64": 0.18.16
+ "@esbuild/sunos-x64": 0.18.16
+ "@esbuild/win32-arm64": 0.18.16
+ "@esbuild/win32-ia32": 0.18.16
+ "@esbuild/win32-x64": 0.18.16
dependenciesMeta:
"@esbuild/android-arm":
optional: true
@@ -17479,7 +17480,7 @@ __metadata:
optional: true
bin:
esbuild: bin/esbuild
- checksum: 81eb78fb5ff9f4304512e383f518862875fa7a0cce0ef9e0270d80ebd21bf0210d5cd6cf721f8a96ca64cc2d36ade03d9ed80cbcd5e81530b5a7b95ab86f8ca6
+ checksum: 8b04f7087d4e95cffd43c37663c57a9f42e183cd3d02fe3cefee7e1534a84d5cec181f42187715471b4b3f5478e2e110530df7d3a8b12053cbc8cc35145363ab
languageName: node
linkType: hard
@@ -31923,9 +31924,9 @@ __metadata:
linkType: hard
"word-wrap@npm:^1.2.3":
- version: 1.2.4
- resolution: "word-wrap@npm:1.2.4"
- checksum: 8f1f2e0a397c0e074ca225ba9f67baa23f99293bc064e31355d426ae91b8b3f6b5f6c1fc9ae5e9141178bb362d563f55e62fd8d5c31f2a77e3ade56cb3e35bd1
+ version: 1.2.5
+ resolution: "word-wrap@npm:1.2.5"
+ checksum: f93ba3586fc181f94afdaff3a6fef27920b4b6d9eaefed0f428f8e07adea2a7f54a5f2830ce59406c8416f033f86902b91eb824072354645eea687dff3691ccb
languageName: node
linkType: hard