refactor(infra): directory structure (#4615)

This commit is contained in:
Joooye_34
2023-10-18 23:30:08 +08:00
committed by GitHub
parent 814d552be8
commit bed9310519
1150 changed files with 539 additions and 584 deletions

View File

@@ -0,0 +1,145 @@
/**
* @vitest-environment happy-dom
*/
import { uploadAvatarMutation } from '@affine/graphql';
import { render } from '@testing-library/react';
import type { Mock } from 'vitest';
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
import { useMutation, useQuery } from '../gql';
let fetch: Mock;
describe('GraphQL wrapper for SWR', () => {
beforeEach(() => {
fetch = vi.fn(() =>
Promise.resolve(
new Response(JSON.stringify({ data: { hello: 1 } }), {
headers: {
'content-type': 'application/json',
},
})
)
);
vi.stubGlobal('fetch', fetch);
});
afterEach(() => {
fetch.mockReset();
});
describe('useQuery', () => {
const Component = ({ id }: { id: number }) => {
const { data, isLoading, error } = useQuery({
query: {
id: 'query',
query: `
query {
hello
}
`,
operationName: 'query',
definitionName: 'query',
},
// @ts-expect-error forgive the fake variables
variables: { id },
});
if (isLoading) {
return <div>loading</div>;
}
if (error) {
return <div>error</div>;
}
// @ts-expect-error
return <div>number: {data!.hello}</div>;
};
it('should send query correctly', async () => {
const component = <Component id={1} />;
const renderer = render(component);
const el = await renderer.findByText('number: 1');
expect(el).toMatchInlineSnapshot(`
<div>
number:${' '}
1
</div>
`);
});
it('should not send request if cache hit', async () => {
const component = <Component id={2} />;
const renderer = render(component);
expect(fetch).toBeCalledTimes(1);
renderer.rerender(component);
expect(fetch).toBeCalledTimes(1);
render(<Component id={3} />);
expect(fetch).toBeCalledTimes(2);
});
});
describe('useMutation', () => {
const Component = () => {
const { trigger, error, isMutating } = useMutation({
mutation: {
id: 'mutation',
query: `
mutation {
hello
}
`,
operationName: 'mutation',
definitionName: 'mutation',
},
});
if (isMutating) {
return <div>mutating</div>;
}
if (error) {
return <div>error</div>;
}
return (
<div>
<button onClick={() => trigger()}>click</button>
</div>
);
};
it('should trigger mutation', async () => {
const component = <Component />;
const renderer = render(component);
const button = await renderer.findByText('click');
button.click();
expect(fetch).toBeCalledTimes(1);
renderer.rerender(component);
expect(renderer.asFragment()).toMatchInlineSnapshot(`
<DocumentFragment>
<div>
mutating
</div>
</DocumentFragment>
`);
});
it('should get rid of generated types', async () => {
function _NotActuallyRunDefinedForTypeTesting() {
const { trigger } = useMutation({
mutation: uploadAvatarMutation,
});
trigger({
avatar: new File([''], 'avatar.png'),
});
}
expect(_NotActuallyRunDefinedForTypeTesting).toBeTypeOf('function');
});
});
});

View File

@@ -0,0 +1,175 @@
import type {
AffineCloudWorkspace,
WorkspaceCRUD,
} from '@affine/env/workspace';
import { WorkspaceFlavour } from '@affine/env/workspace';
import {
createWorkspaceMutation,
deleteWorkspaceMutation,
getWorkspaceQuery,
getWorkspacesQuery,
} from '@affine/graphql';
import { createAffineDataSource } from '@affine/workspace/affine/index';
import { createIndexeddbStorage, Workspace } from '@blocksuite/store';
import { migrateLocalBlobStorage } from '@toeverything/infra/blocksuite';
import {
createIndexedDBProvider,
DEFAULT_DB_NAME,
} from '@toeverything/y-indexeddb';
import { getSession } from 'next-auth/react';
import { proxy } from 'valtio/vanilla';
import { syncDataSourceFromDoc } from 'y-provider';
import { getOrCreateWorkspace } from '../manager';
import { fetcher } from './gql';
const Y = Workspace.Y;
async function deleteLocalBlobStorage(id: string) {
const storage = createIndexeddbStorage(id);
const keys = await storage.crud.list();
for (const key of keys) {
await storage.crud.delete(key);
}
}
// we don't need to persistence the state into local storage
// because if a user clicks create multiple time and nothing happened
// because of the server delay or something, he/she will wait.
// and also the user journey of creating workspace is long.
const createdWorkspaces = proxy<string[]>([]);
export const CRUD: WorkspaceCRUD<WorkspaceFlavour.AFFINE_CLOUD> = {
create: async upstreamWorkspace => {
if (createdWorkspaces.some(id => id === upstreamWorkspace.id)) {
throw new Error('workspace already created');
}
const { createWorkspace } = await fetcher({
query: createWorkspaceMutation,
variables: {
init: new File(
[Y.encodeStateAsUpdate(upstreamWorkspace.doc)],
'initBinary.yDoc'
),
},
});
createdWorkspaces.push(upstreamWorkspace.id);
const newBlockSuiteWorkspace = getOrCreateWorkspace(
createWorkspace.id,
WorkspaceFlavour.AFFINE_CLOUD
);
const datasource = createAffineDataSource(
createWorkspace.id,
newBlockSuiteWorkspace.doc,
newBlockSuiteWorkspace.awarenessStore.awareness
);
await syncDataSourceFromDoc(upstreamWorkspace.doc, datasource);
Y.applyUpdate(
newBlockSuiteWorkspace.doc,
Y.encodeStateAsUpdate(upstreamWorkspace.doc)
);
await Promise.all(
[...upstreamWorkspace.doc.subdocs].map(async subdoc => {
subdoc.load();
return subdoc.whenLoaded.then(() => {
newBlockSuiteWorkspace.doc.subdocs.forEach(newSubdoc => {
if (newSubdoc.guid === subdoc.guid) {
Y.applyUpdate(newSubdoc, Y.encodeStateAsUpdate(subdoc));
}
});
});
})
);
const provider = createIndexedDBProvider(
newBlockSuiteWorkspace.doc,
DEFAULT_DB_NAME
);
provider.connect();
migrateLocalBlobStorage(upstreamWorkspace.id, createWorkspace.id)
.then(() => deleteLocalBlobStorage(upstreamWorkspace.id))
.catch(e => {
console.error('error when moving blob storage:', e);
});
// todo(himself65): delete old workspace in the future
return createWorkspace.id;
},
delete: async workspace => {
await fetcher({
query: deleteWorkspaceMutation,
variables: {
id: workspace.id,
},
});
},
get: async id => {
if (!environment.isServer && !navigator.onLine) {
// no network
return null;
}
if (
!(await getSession()
.then(() => true)
.catch(() => false))
) {
return null;
}
try {
await fetcher({
query: getWorkspaceQuery,
variables: {
id,
},
});
return {
id,
flavour: WorkspaceFlavour.AFFINE_CLOUD,
blockSuiteWorkspace: getOrCreateWorkspace(
id,
WorkspaceFlavour.AFFINE_CLOUD
),
} satisfies AffineCloudWorkspace;
} catch (e) {
console.error('error when fetching cloud workspace:', e);
return null;
}
},
list: async () => {
if (!environment.isServer && !navigator.onLine) {
// no network
return [];
}
if (
!(await getSession()
.then(() => true)
.catch(() => false))
) {
return [];
}
try {
const { workspaces } = await fetcher({
query: getWorkspacesQuery,
});
const ids = workspaces.map(({ id }) => id);
return ids.map(
id =>
({
id,
flavour: WorkspaceFlavour.AFFINE_CLOUD,
blockSuiteWorkspace: getOrCreateWorkspace(
id,
WorkspaceFlavour.AFFINE_CLOUD
),
}) satisfies AffineCloudWorkspace
);
} catch (e) {
console.error('error when fetching cloud workspaces:', e);
return [];
}
},
};

View File

@@ -0,0 +1,131 @@
import { setupGlobal } from '@affine/env/global';
import type {
GraphQLQuery,
MutationOptions,
QueryOptions,
QueryResponse,
QueryVariables,
} from '@affine/graphql';
import { gqlFetcherFactory } from '@affine/graphql';
import type { GraphQLError } from 'graphql';
import type { Key, SWRConfiguration, SWRResponse } from 'swr';
import useSWR from 'swr';
import type {
SWRMutationConfiguration,
SWRMutationResponse,
} from 'swr/mutation';
import useSWRMutation from 'swr/mutation';
setupGlobal();
export const fetcher = gqlFetcherFactory(
runtimeConfig.serverUrlPrefix + '/graphql'
);
/**
* A `useSWR` wrapper for sending graphql queries
*
* @example
*
* ```ts
* import { someQuery, someQueryWithNoVars } from '@affine/graphql'
*
* const swrResponse1 = useQuery({
* query: workspaceByIdQuery,
* variables: { id: '1' }
* })
*
* const swrResponse2 = useQuery({
* query: someQueryWithNoVars
* })
* ```
*/
export function useQuery<Query extends GraphQLQuery>(
options: QueryOptions<Query>
): SWRResponse<
QueryResponse<Query>,
GraphQLError | GraphQLError[],
{
suspense: true;
}
>;
export function useQuery<Query extends GraphQLQuery>(
options: QueryOptions<Query>,
config: Omit<
SWRConfiguration<
QueryResponse<Query>,
GraphQLError | GraphQLError[],
typeof fetcher<Query>
>,
'fetcher'
>
): SWRResponse<
QueryResponse<Query>,
GraphQLError | GraphQLError[],
{
suspense: true;
}
>;
export function useQuery<Query extends GraphQLQuery>(
options: QueryOptions<Query>,
config?: any
) {
return useSWR(
() => ['cloud', options.query.id, options.variables],
() => fetcher(options),
config
);
}
/**
* A useSWRMutation wrapper for sending graphql mutations
*
* @example
*
* ```ts
* import { someMutation } from '@affine/graphql'
*
* const { trigger } = useMutation({
* mutation: someMutation,
* })
*
* trigger({ name: 'John Doe' })
*/
export function useMutation<Mutation extends GraphQLQuery, K extends Key = Key>(
options: Omit<MutationOptions<Mutation>, 'variables'>
): SWRMutationResponse<
QueryResponse<Mutation>,
GraphQLError | GraphQLError[],
K,
QueryVariables<Mutation>
>;
export function useMutation<Mutation extends GraphQLQuery, K extends Key = Key>(
options: Omit<MutationOptions<Mutation>, 'variables'>,
config: Omit<
SWRMutationConfiguration<
QueryResponse<Mutation>,
GraphQLError | GraphQLError[],
K,
QueryVariables<Mutation>
>,
'fetcher'
>
): SWRMutationResponse<
QueryResponse<Mutation>,
GraphQLError | GraphQLError[],
K,
QueryVariables<Mutation>
>;
export function useMutation(
options: Omit<MutationOptions<GraphQLQuery>, 'variables'>,
config?: any
) {
return useSWRMutation(
() => ['cloud', options.mutation.id],
(_: unknown[], { arg }: { arg: any }) =>
fetcher({ ...options, query: options.mutation, variables: arg }),
config
);
}
export const gql = fetcher;

View File

@@ -0,0 +1,206 @@
import { DebugLogger } from '@affine/debug';
import type { Socket } from 'socket.io-client';
import { Manager } from 'socket.io-client';
import {
applyAwarenessUpdate,
type Awareness,
encodeAwarenessUpdate,
} from 'y-protocols/awareness';
import type { DocDataSource } from 'y-provider';
import type { Doc } from 'yjs';
import {
type AwarenessChanges,
base64ToUint8Array,
uint8ArrayToBase64,
} from './utils';
let ioManager: Manager | null = null;
// use lazy initialization to avoid global side effect
function getIoManager(): Manager {
if (ioManager) {
return ioManager;
}
ioManager = new Manager(runtimeConfig.serverUrlPrefix + '/', {
autoConnect: false,
transports: ['websocket'],
});
return ioManager;
}
const logger = new DebugLogger('affine:sync');
export const createAffineDataSource = (
id: string,
rootDoc: Doc,
awareness: Awareness
) => {
if (id !== rootDoc.guid) {
console.warn('important!! please use doc.guid as roomName');
}
logger.debug('createAffineDataSource', id, rootDoc.guid, awareness);
const socket = getIoManager().socket('/');
return {
get socket() {
return socket;
},
queryDocState: async (guid, options) => {
const stateVector = options?.stateVector
? await uint8ArrayToBase64(options.stateVector)
: undefined;
return new Promise((resolve, reject) => {
logger.debug('doc-load', {
workspaceId: rootDoc.guid,
guid,
stateVector,
});
socket.emit(
'doc-load',
{
workspaceId: rootDoc.guid,
guid,
stateVector,
},
(docState: Error | { missing: string; state: string } | null) => {
logger.debug('doc-load callback', {
workspaceId: rootDoc.guid,
guid,
stateVector,
docState,
});
if (docState instanceof Error) {
reject(docState);
return;
}
resolve(
docState
? {
missing: base64ToUint8Array(docState.missing),
state: docState.state
? base64ToUint8Array(docState.state)
: undefined,
}
: false
);
}
);
});
},
sendDocUpdate: async (guid: string, update: Uint8Array) => {
logger.debug('client-update', {
workspaceId: rootDoc.guid,
guid,
update,
});
socket.emit('client-update', {
workspaceId: rootDoc.guid,
guid,
update: await uint8ArrayToBase64(update),
});
return Promise.resolve();
},
onDocUpdate: callback => {
socket.on('connect', () => {
socket.emit('client-handshake', rootDoc.guid);
});
const onUpdate = async (message: {
workspaceId: string;
guid: string;
update: string;
}) => {
if (message.workspaceId === rootDoc.guid) {
callback(message.guid, base64ToUint8Array(message.update));
}
};
socket.on('server-update', onUpdate);
const destroyAwareness = setupAffineAwareness(socket, rootDoc, awareness);
socket.connect();
return () => {
socket.emit('client-leave', rootDoc.guid);
socket.off('server-update', onUpdate);
destroyAwareness();
socket.disconnect();
};
},
} satisfies DocDataSource & { readonly socket: Socket };
};
function setupAffineAwareness(
conn: Socket,
rootDoc: Doc,
awareness: Awareness
) {
const awarenessBroadcast = ({
workspaceId,
awarenessUpdate,
}: {
workspaceId: string;
awarenessUpdate: string;
}) => {
if (workspaceId !== rootDoc.guid) {
return;
}
applyAwarenessUpdate(
awareness,
base64ToUint8Array(awarenessUpdate),
'server'
);
};
const awarenessUpdate = (changes: AwarenessChanges, origin: unknown) => {
if (origin === 'server') {
return;
}
const changedClients = Object.values(changes).reduce((res, cur) => [
...res,
...cur,
]);
const update = encodeAwarenessUpdate(awareness, changedClients);
uint8ArrayToBase64(update)
.then(encodedUpdate => {
conn.emit('awareness-update', {
workspaceId: rootDoc.guid,
awarenessUpdate: encodedUpdate,
});
})
.catch(err => console.error(err));
};
const newClientAwarenessInitHandler = () => {
const awarenessUpdate = encodeAwarenessUpdate(awareness, [
awareness.clientID,
]);
uint8ArrayToBase64(awarenessUpdate)
.then(encodedAwarenessUpdate => {
conn.emit('awareness-update', {
guid: rootDoc.guid,
awarenessUpdate: encodedAwarenessUpdate,
});
})
.catch(err => console.error(err));
};
conn.on('server-awareness-broadcast', awarenessBroadcast);
conn.on('new-client-awareness-init', newClientAwarenessInitHandler);
awareness.on('update', awarenessUpdate);
conn.on('connect', () => {
conn.emit('awareness-init', rootDoc.guid);
});
return () => {
awareness.off('update', awarenessUpdate);
conn.off('server-awareness-broadcast', awarenessBroadcast);
conn.off('new-client-awareness-init', newClientAwarenessInitHandler);
};
}

View File

@@ -0,0 +1,111 @@
import { createIndexeddbStorage } from '@blocksuite/store';
import {
createIndexedDBDatasource,
DEFAULT_DB_NAME,
downloadBinary,
} from '@toeverything/y-indexeddb';
import { syncDataSource } from 'y-provider';
import type { Doc } from 'yjs';
import { applyUpdate } from 'yjs';
import { createCloudBlobStorage } from '../blob/cloud-blob-storage';
import { createAffineDataSource } from '.';
import { CRUD } from './crud';
let abortController: AbortController | undefined;
const downloadRootFromIndexedDB = async (
rootGuid: string,
doc: Doc,
signal: AbortSignal
): Promise<void> => {
if (signal.aborted) {
return;
}
const update = await downloadBinary(rootGuid);
if (update !== false) {
applyUpdate(doc, update);
}
};
export async function startSync() {
abortController = new AbortController();
const signal = abortController.signal;
const workspaces = await CRUD.list();
const syncDocPromises = workspaces.map(workspace =>
downloadRootFromIndexedDB(
workspace.id,
workspace.blockSuiteWorkspace.doc,
signal
)
);
await Promise.all(syncDocPromises);
const syncPromises = workspaces.map(workspace => {
const remoteDataSource = createAffineDataSource(
workspace.id,
workspace.blockSuiteWorkspace.doc,
workspace.blockSuiteWorkspace.awarenessStore.awareness
);
const indexeddbDataSource = createIndexedDBDatasource({
dbName: DEFAULT_DB_NAME,
});
return syncDataSource(
(): string[] => [
workspace.blockSuiteWorkspace.doc.guid,
...[...workspace.blockSuiteWorkspace.doc.subdocs].map(doc => doc.guid),
],
remoteDataSource,
indexeddbDataSource
);
});
const syncBlobPromises = workspaces.map(async workspace => {
const cloudBlobStorage = createCloudBlobStorage(workspace.id);
const indexeddbBlobStorage = createIndexeddbStorage(workspace.id);
return Promise.all([
cloudBlobStorage.crud.list(),
indexeddbBlobStorage.crud.list(),
]).then(([cloudKeys, indexeddbKeys]) => {
if (signal.aborted) {
return;
}
const cloudKeysSet = new Set(cloudKeys);
const indexeddbKeysSet = new Set(indexeddbKeys);
// missing in indexeddb
const missingLocalKeys = cloudKeys.filter(
key => !indexeddbKeysSet.has(key)
);
// missing in cloud
const missingCloudKeys = indexeddbKeys.filter(
key => !cloudKeysSet.has(key)
);
return Promise.all([
...missingLocalKeys.map(key =>
cloudBlobStorage.crud.get(key).then(async value => {
if (signal.aborted) {
return;
}
if (value) {
await indexeddbBlobStorage.crud.set(key, value);
}
})
),
...missingCloudKeys.map(key =>
indexeddbBlobStorage.crud.get(key).then(async value => {
if (signal.aborted) {
return;
}
if (value) {
await cloudBlobStorage.crud.set(key, value);
}
})
),
]);
});
});
await Promise.all([...syncPromises, ...syncBlobPromises]);
}
export async function stopSync() {
abortController?.abort();
}

View File

@@ -0,0 +1,45 @@
import type { Doc as YDoc } from 'yjs';
export type SubdocEvent = {
loaded: Set<YDoc>;
removed: Set<YDoc>;
added: Set<YDoc>;
};
export type UpdateHandler = (update: Uint8Array, origin: unknown) => void;
export type SubdocsHandler = (event: SubdocEvent) => void;
export type DestroyHandler = () => void;
export type AwarenessChanges = Record<
'added' | 'updated' | 'removed',
number[]
>;
export function uint8ArrayToBase64(array: Uint8Array): Promise<string> {
return new Promise<string>(resolve => {
// Create a blob from the Uint8Array
const blob = new Blob([array]);
const reader = new FileReader();
reader.onload = function () {
const dataUrl = reader.result as string | null;
if (!dataUrl) {
resolve('');
return;
}
// The result includes the `data:` URL prefix and the MIME type. We only want the Base64 data
const base64 = dataUrl.split(',')[1];
resolve(base64);
};
reader.readAsDataURL(blob);
});
}
export function base64ToUint8Array(base64: string) {
const binaryString = atob(base64);
const binaryArray = binaryString.split('').map(function (char) {
return char.charCodeAt(0);
});
return new Uint8Array(binaryArray);
}

View File

@@ -0,0 +1,280 @@
import type { WorkspaceAdapter } from '@affine/env/workspace';
import { WorkspaceFlavour } from '@affine/env/workspace';
import type { BlockHub } from '@blocksuite/blocks';
import { assertEquals, assertExists } from '@blocksuite/global/utils';
import {
currentPageIdAtom,
currentWorkspaceIdAtom,
} from '@toeverything/infra/atom';
import { WorkspaceVersion } from '@toeverything/infra/blocksuite';
import { type Atom, atom } from 'jotai/vanilla';
import { z } from 'zod';
import { getOrCreateWorkspace } from './manager';
const rootWorkspaceMetadataV1Schema = z.object({
id: z.string(),
flavour: z.nativeEnum(WorkspaceFlavour),
});
const rootWorkspaceMetadataV2Schema = rootWorkspaceMetadataV1Schema.extend({
version: z.nativeEnum(WorkspaceVersion),
});
const rootWorkspaceMetadataArraySchema = z.array(
z.union([rootWorkspaceMetadataV1Schema, rootWorkspaceMetadataV2Schema])
);
export type RootWorkspaceMetadataV2 = z.infer<
typeof rootWorkspaceMetadataV2Schema
>;
export type RootWorkspaceMetadataV1 = z.infer<
typeof rootWorkspaceMetadataV1Schema
>;
export type RootWorkspaceMetadata =
| RootWorkspaceMetadataV1
| RootWorkspaceMetadataV2;
export const workspaceAdaptersAtom = atom<
Record<
WorkspaceFlavour,
Pick<
WorkspaceAdapter<WorkspaceFlavour>,
'CRUD' | 'Events' | 'flavour' | 'loadPriority'
>
>
>(
null as unknown as Record<
WorkspaceFlavour,
Pick<
WorkspaceAdapter<WorkspaceFlavour>,
'CRUD' | 'Events' | 'flavour' | 'loadPriority'
>
>
);
// #region root atoms
// root primitive atom that stores the necessary data for the whole app
// be careful when you use this atom,
// it should be used only in the root component
/**
* root workspaces atom
* this atom stores the metadata of all workspaces,
* which is `id` and `flavor,` that is enough to load the real workspace data
*/
const METADATA_STORAGE_KEY = 'jotai-workspaces';
const rootWorkspacesMetadataPrimitiveAtom = atom<Promise<
RootWorkspaceMetadata[]
> | null>(null);
type Getter = <Value>(atom: Atom<Value>) => Value;
type FetchMetadata = (
get: Getter,
options: { signal: AbortSignal }
) => Promise<RootWorkspaceMetadata[]>;
/**
* @internal
*/
const fetchMetadata: FetchMetadata = async (get, { signal }) => {
const WorkspaceAdapters = get(workspaceAdaptersAtom);
assertExists(WorkspaceAdapters, 'workspace adapter should be defined');
const metadata: RootWorkspaceMetadata[] = [];
// step 1: try load metadata from localStorage.
//
// we need this step because workspaces have the order.
{
const loadFromLocalStorage = (): RootWorkspaceMetadata[] => {
// don't change this key,
// otherwise it will cause the data loss in the production
const primitiveMetadata = localStorage.getItem(METADATA_STORAGE_KEY);
if (primitiveMetadata) {
try {
const items = JSON.parse(primitiveMetadata) as z.infer<
typeof rootWorkspaceMetadataArraySchema
>;
rootWorkspaceMetadataArraySchema.parse(items);
return [...items];
} catch (e) {
console.error('cannot parse worksapce', e);
}
return [];
}
return [];
};
const maybeMetadata = loadFromLocalStorage();
// migration step, only data in `METADATA_STORAGE_KEY` will be migrated
if (
maybeMetadata.some(meta => !('version' in meta)) &&
!globalThis.$migrationDone
) {
await new Promise<void>((resolve, reject) => {
signal.addEventListener('abort', () => reject(), { once: true });
window.addEventListener('migration-done', () => resolve(), {
once: true,
});
});
}
metadata.push(...loadFromLocalStorage());
}
// step 2: fetch from adapters
{
const Adapters = Object.values(WorkspaceAdapters).sort(
(a, b) => a.loadPriority - b.loadPriority
);
for (const Adapter of Adapters) {
const { CRUD, flavour: currentFlavour } = Adapter;
if (
Adapter.Events['app:access'] &&
!(await Adapter.Events['app:access']())
) {
// skip the adapter if the user doesn't have access to it
const removed = metadata.filter(
meta => meta.flavour === currentFlavour
);
removed.forEach(meta => {
metadata.splice(metadata.indexOf(meta), 1);
});
Adapter.Events['service:stop']?.();
continue;
}
try {
const item = await CRUD.list();
// remove the metadata that is not in the list
// because we treat the workspace adapter as the source of truth
{
const removed = metadata.filter(
meta =>
meta.flavour === currentFlavour &&
!item.some(x => x.id === meta.id)
);
removed.forEach(meta => {
metadata.splice(metadata.indexOf(meta), 1);
});
}
// sort the metadata by the order of the list
if (metadata.length) {
item.sort((a, b) => {
return (
metadata.findIndex(x => x.id === a.id) -
metadata.findIndex(x => x.id === b.id)
);
});
}
metadata.push(
...item.map(x => ({
id: x.id,
flavour: x.flavour,
version: WorkspaceVersion.DatabaseV3,
}))
);
} catch (e) {
console.error('list data error:', e);
}
Adapter.Events['service:start']?.();
}
}
const metadataMap = new Map(metadata.map(x => [x.id, x]));
// init workspace data
metadataMap.forEach((meta, id) => {
if (
meta.flavour === WorkspaceFlavour.AFFINE_CLOUD ||
meta.flavour === WorkspaceFlavour.LOCAL
) {
getOrCreateWorkspace(id, meta.flavour);
} else {
throw new Error(`unknown flavour ${meta.flavour}`);
}
});
const result = Array.from(metadataMap.values());
console.info('metadata', result);
return result;
};
const rootWorkspacesMetadataPromiseAtom = atom<
Promise<RootWorkspaceMetadata[]>
>(async (get, { signal }) => {
const primitiveMetadata = get(rootWorkspacesMetadataPrimitiveAtom);
assertEquals(
primitiveMetadata,
null,
'rootWorkspacesMetadataPrimitiveAtom should be null'
);
return fetchMetadata(get, { signal });
});
type SetStateAction<Value> = Value | ((prev: Value) => Value);
export const rootWorkspacesMetadataAtom = atom<
Promise<RootWorkspaceMetadata[]>,
[
setStateAction: SetStateAction<RootWorkspaceMetadata[]>,
newWorkspaceId?: string,
],
void
>(
async get => {
const maybeMetadata = get(rootWorkspacesMetadataPrimitiveAtom);
if (maybeMetadata !== null) {
return maybeMetadata;
}
return get(rootWorkspacesMetadataPromiseAtom);
},
async (get, set, action, newWorkspaceId) => {
const metadataPromise = get(rootWorkspacesMetadataPromiseAtom);
const oldWorkspaceId = get(currentWorkspaceIdAtom);
const oldPageId = get(currentPageIdAtom);
// get metadata
set(rootWorkspacesMetadataPrimitiveAtom, async maybeMetadataPromise => {
let metadata: RootWorkspaceMetadata[] =
(await maybeMetadataPromise) ?? (await metadataPromise);
// update metadata
if (typeof action === 'function') {
metadata = action(metadata);
} else {
metadata = action;
}
const metadataMap = new Map(metadata.map(x => [x.id, x]));
metadata = Array.from(metadataMap.values());
// write back to localStorage
rootWorkspaceMetadataArraySchema.parse(metadata);
localStorage.setItem(METADATA_STORAGE_KEY, JSON.stringify(metadata));
// if the current workspace is deleted, reset the current workspace
if (oldWorkspaceId && metadata.some(x => x.id === oldWorkspaceId)) {
set(currentWorkspaceIdAtom, oldWorkspaceId);
set(currentPageIdAtom, oldPageId);
}
if (newWorkspaceId) {
set(currentPageIdAtom, null);
set(currentWorkspaceIdAtom, newWorkspaceId);
}
return metadata;
});
}
);
export const refreshRootMetadataAtom = atom(null, (get, set) => {
const abortController = new AbortController();
set(
rootWorkspacesMetadataPrimitiveAtom,
fetchMetadata(get, { signal: abortController.signal })
);
});
// blocksuite atoms,
// each app should have only one block-hub in the same time
export const rootBlockHubAtom = atom<Readonly<BlockHub> | null>(null);
//#endregion

View File

@@ -0,0 +1,75 @@
import {
checkBlobSizesQuery,
deleteBlobMutation,
fetchWithTraceReport,
listBlobsQuery,
setBlobMutation,
} from '@affine/graphql';
import { fetcher } from '@affine/workspace/affine/gql';
import type { BlobStorage } from '@blocksuite/store';
import { predefinedStaticFiles } from './local-static-storage';
export const createCloudBlobStorage = (workspaceId: string): BlobStorage => {
return {
crud: {
get: async key => {
const suffix = predefinedStaticFiles.includes(key)
? `/static/${key}`
: `/api/workspaces/${workspaceId}/blobs/${key}`;
return fetchWithTraceReport(
runtimeConfig.serverUrlPrefix + suffix
).then(res => {
if (!res.ok) {
// status not in the range 200-299
return null;
}
return res.blob();
});
},
set: async (key, value) => {
const {
checkBlobSize: { size },
} = await fetcher({
query: checkBlobSizesQuery,
variables: {
workspaceId,
size: value.size,
},
});
if (size <= 0) {
throw new Error('Blob size limit exceeded');
}
const result = await fetcher({
query: setBlobMutation,
variables: {
workspaceId,
blob: new File([value], key),
},
});
console.assert(result.setBlob === key, 'Blob hash mismatch');
return key;
},
list: async () => {
const result = await fetcher({
query: listBlobsQuery,
variables: {
workspaceId,
},
});
return result.listBlobs;
},
delete: async (key: string) => {
await fetcher({
query: deleteBlobMutation,
variables: {
workspaceId,
hash: key,
},
});
},
},
};
};

View File

@@ -0,0 +1,69 @@
import type { BlobStorage } from '@blocksuite/store';
export const predefinedStaticFiles = [
'029uztLz2CzJezK7UUhrbGiWUdZ0J7NVs_qR6RDsvb8=',
'047ebf2c9a5c7c9d8521c2ea5e6140ff7732ef9e28a9f944e9bf3ca4',
'0hjYqQd8SvwHT2gPds7qFw8W6qIEGVbZvG45uzoYjUU=',
'1326bc48553a572c6756d9ee1b30a0dfdda26222fc2d2c872b14e609',
'27f983d0765289c19d10ee0b51c00c3c7665236a1a82406370d46e0a',
'28516717d63e469cd98729ff46be6595711898bab3dc43302319a987',
'4HXJrnBZGaGPFpowNawNog0aMg3dgoVaAnNqEMeUxq0=',
'5Cfem_137WmzR35ZeIC76oTkq5SQt-eHlZwJiLy0hgU=',
'6aa785ee927547ce9dd9d7b43e01eac948337fe57571443e87bc3a60',
'8oj6ym4HlTcshT40Zn6D5DeOgaVCSOOXJvT_EyiqUw8=',
'9288be57321c8772d04e05dbb69a22742372b3534442607a2d6a9998',
'9vXwWGEX5W9v5pzwpu0eK4pf22DZ_sCloO0zCH1aVQ4=',
'Bd5F0WRI0fLh8RK1al9PawPVT3jv7VwBrqiiBEtdV-g=',
'CBWoKrhSDndjBJzscQKENRqiXOOZnzIA5qyiCoy4-A0=',
'D7g-4LMqOsVWBNOD-_kGgCOvJEoc8rcpYbkfDlF2u5U=',
'Vqc8rxFbGyc5L1QeE_Zr10XEcIai_0Xw4Qv6d3ldRPE=',
'VuXYyM9JUv1Fv_qjg1v5Go4Zksz0r4NXFeh3Na7JkIc=',
'bfXllFddegV9vvxPcSWnOtm-_tuzXm-0OQ59z9Su1zA=',
'c820edeeba50006b531883903f5bb0b96bf523c9a6b3ce5868f03db5',
'cw9XjQ-pCeSW7LKMzVREGHeCPTXWYbtE-QbZLEY3RrI=',
'e93536e1be97e3b5206d43bf0793fdef24e60044d174f0abdefebe08',
'f9yKnlNMgKhF-CxOgHBsXkxfViCCkC6KwTv6Uj2Fcjw=',
'fb0SNPtMpQlzBQ90_PB7vCu34WpiSUJbNKocFkL2vIo=',
'gZLmSgmwumNdgf0eIfOSW44emctrLyFUaZapbk8eZ6s=',
'i39ZQ24NlUfWI0MhkbtvHTzGnWMVdr-aC2aOjvHPVg4=',
'k07JiWnb-S7qgd9gDQNgqo-LYMe03RX8fR0TXQ-SpG4=',
'nSEEkYxrThpZfLoPNOzMp6HWekvutAIYmADElDe1J6I=',
'pIqdA3pM1la1gKzxOmAcpLmTh3yXBrL9mGTz_hGj5xE=',
'qezoK6du9n3PF4dl4aq5r7LeXz_sV3xOVpFzVVgjNsE=',
'rY96Bunn-69CnNe5X_e5CJLwgCJnN6rcbUisecs8kkQ=',
'sNVNYDBzUDN2J9OFVJdLJlryBLzRZBLl-4MTNoPF1tA=',
'uvpOG9DrldeqIGNaqfwjFdMw_CcfXKfiEjYf7RXdeL0=',
'v2yF7lY2L5rtorTtTmYFsoMb9dBPKs5M1y9cUKxcI1M=',
];
export const createStaticStorage = (): BlobStorage => {
return {
crud: {
get: async (key: string) => {
if (key.startsWith('/static/')) {
const response = await fetch(key);
if (response.ok) {
return response.blob();
}
} else if (predefinedStaticFiles.includes(key)) {
const response = await fetch(`/static/${key}`);
if (response.ok) {
return response.blob();
}
}
return null;
},
set: async (key: string) => {
// ignore
return key;
},
delete: async () => {
// ignore
},
list: async () => {
// ignore
return [];
},
},
};
};

View File

@@ -0,0 +1,29 @@
import { assertExists } from '@blocksuite/global/utils';
import type { BlobStorage } from '@blocksuite/store';
export const createSQLiteStorage = (workspaceId: string): BlobStorage => {
const apis = window.apis;
assertExists(apis);
return {
crud: {
get: async (key: string) => {
const buffer = await apis.db.getBlob(workspaceId, key);
return buffer ? new Blob([buffer]) : null;
},
set: async (key: string, value: Blob) => {
await apis.db.addBlob(
workspaceId,
key,
new Uint8Array(await value.arrayBuffer())
);
return key;
},
delete: async (key: string) => {
return apis.db.deleteBlob(workspaceId, key);
},
list: async () => {
return apis.db.getBlobKeys(workspaceId);
},
},
};
};

View File

@@ -0,0 +1,71 @@
/**
* @vitest-environment happy-dom
*/
import 'fake-indexeddb/auto';
import type { WorkspaceCRUD } from '@affine/env/workspace';
import { WorkspaceFlavour } from '@affine/env/workspace';
import { __unstableSchemas, AffineSchemas } from '@blocksuite/blocks/models';
import { assertExists } from '@blocksuite/global/utils';
import { Schema, Workspace } from '@blocksuite/store';
import { afterEach, assertType, describe, expect, test } from 'vitest';
import { CRUD } from '../crud';
const schema = new Schema();
schema.register(AffineSchemas).register(__unstableSchemas);
afterEach(() => {
localStorage.clear();
});
describe('crud', () => {
test('type', () => {
assertType<WorkspaceCRUD<WorkspaceFlavour.LOCAL>>(CRUD);
});
test('basic', async () => {
const workspace = await CRUD.get('not_exist');
expect(workspace).toBeNull();
expect(await CRUD.list()).toEqual([]);
});
test('delete not exist', async () => {
await expect(async () =>
CRUD.delete(new Workspace({ id: 'test', schema }))
).rejects.toThrowError();
});
test('create & delete', async () => {
const workspace = new Workspace({ id: 'test', schema });
const page = workspace.createPage({ id: 'page0' });
await page.waitForLoaded();
const pageBlockId = page.addBlock('affine:page', {
title: new page.Text(''),
});
page.addBlock('affine:surface', {}, pageBlockId);
const frameId = page.addBlock('affine:note', {}, pageBlockId);
page.addBlock('affine:paragraph', {}, frameId);
const id = await CRUD.create(workspace);
const list = await CRUD.list();
expect(list.length).toBe(1);
expect(list[0].id).toBe(id);
const localWorkspace = list.at(0);
assertExists(localWorkspace);
expect(localWorkspace.id).toBe(id);
expect(localWorkspace.flavour).toBe(WorkspaceFlavour.LOCAL);
expect(localWorkspace.blockSuiteWorkspace.doc.toJSON()).toEqual({
meta: expect.anything(),
spaces: expect.objectContaining({
page0: expect.anything(),
}),
});
await CRUD.delete(localWorkspace.blockSuiteWorkspace);
expect(await CRUD.get(id)).toBeNull();
expect(await CRUD.list()).toEqual([]);
});
});

View File

@@ -0,0 +1,112 @@
import { DebugLogger } from '@affine/debug';
import type { LocalWorkspace, WorkspaceCRUD } from '@affine/env/workspace';
import { WorkspaceFlavour } from '@affine/env/workspace';
import { Workspace as BlockSuiteWorkspace } from '@blocksuite/store';
import { createJSONStorage } from 'jotai/utils';
import { nanoid } from 'nanoid';
import { z } from 'zod';
import { getOrCreateWorkspace } from '../manager';
const getStorage = () => createJSONStorage(() => localStorage);
const kStoreKey = 'affine-local-workspace';
const schema = z.array(z.string());
const logger = new DebugLogger('affine:workspace:local:crud');
/**
* @internal
*/
export function saveWorkspaceToLocalStorage(workspaceId: string) {
const storage = getStorage();
!Array.isArray(storage.getItem(kStoreKey, [])) &&
storage.setItem(kStoreKey, []);
const data = storage.getItem(kStoreKey, []) as z.infer<typeof schema>;
const id = data.find(id => id === workspaceId);
if (!id) {
logger.debug('saveWorkspaceToLocalStorage', workspaceId);
storage.setItem(kStoreKey, [...data, workspaceId]);
}
}
export const CRUD: WorkspaceCRUD<WorkspaceFlavour.LOCAL> = {
get: async workspaceId => {
logger.debug('get', workspaceId);
const storage = getStorage();
!Array.isArray(storage.getItem(kStoreKey, [])) &&
storage.setItem(kStoreKey, []);
const data = storage.getItem(kStoreKey, []) as z.infer<typeof schema>;
const id = data.find(id => id === workspaceId);
if (!id) {
return null;
}
const blockSuiteWorkspace = getOrCreateWorkspace(
id,
WorkspaceFlavour.LOCAL
);
const workspace: LocalWorkspace = {
id,
flavour: WorkspaceFlavour.LOCAL,
blockSuiteWorkspace: blockSuiteWorkspace,
};
return workspace;
},
create: async ({ doc }) => {
logger.debug('create', doc);
const storage = getStorage();
!Array.isArray(storage.getItem(kStoreKey, [])) &&
storage.setItem(kStoreKey, []);
const binary = BlockSuiteWorkspace.Y.encodeStateAsUpdate(doc);
const id = nanoid();
const blockSuiteWorkspace = getOrCreateWorkspace(
id,
WorkspaceFlavour.LOCAL
);
BlockSuiteWorkspace.Y.applyUpdate(blockSuiteWorkspace.doc, binary);
doc.getSubdocs().forEach(subdoc => {
blockSuiteWorkspace.doc.getSubdocs().forEach(newDoc => {
if (subdoc.guid === newDoc.guid) {
BlockSuiteWorkspace.Y.applyUpdate(
newDoc,
BlockSuiteWorkspace.Y.encodeStateAsUpdate(subdoc)
);
}
});
});
// todo: do we need to persist doc to persistence datasource?
saveWorkspaceToLocalStorage(id);
return id;
},
delete: async workspace => {
logger.debug('delete', workspace);
const storage = getStorage();
!Array.isArray(storage.getItem(kStoreKey, [])) &&
storage.setItem(kStoreKey, []);
const data = storage.getItem(kStoreKey, []) as z.infer<typeof schema>;
const idx = data.findIndex(id => id === workspace.id);
if (idx === -1) {
throw new Error('workspace not found');
}
data.splice(idx, 1);
storage.setItem(kStoreKey, [...data]);
// flywire
if (window.apis && environment.isDesktop) {
await window.apis.workspace.delete(workspace.id);
}
},
list: async () => {
logger.debug('list');
const storage = getStorage();
const allWorkspaceIDs: string[] = storage.getItem(kStoreKey, []) as z.infer<
typeof schema
>;
const workspaces = (
await Promise.all(allWorkspaceIDs.map(id => CRUD.get(id)))
).filter(item => item !== null) as LocalWorkspace[];
return workspaces;
},
};

View File

@@ -0,0 +1,145 @@
import { isBrowser } from '@affine/env/constant';
import type { BlockSuiteFeatureFlags } from '@affine/env/global';
import { WorkspaceFlavour } from '@affine/env/workspace';
import { createAffinePublicProviders } from '@affine/workspace/providers';
import { __unstableSchemas, AffineSchemas } from '@blocksuite/blocks/models';
import type { DocProviderCreator, StoreOptions } from '@blocksuite/store';
import {
createIndexeddbStorage,
Generator,
Schema,
Workspace,
} from '@blocksuite/store';
import { INTERNAL_BLOCKSUITE_HASH_MAP } from '@toeverything/infra/__internal__/workspace';
import type { Doc } from 'yjs';
import type { Transaction } from 'yjs';
import { createCloudBlobStorage } from '../blob/cloud-blob-storage';
import { createStaticStorage } from '../blob/local-static-storage';
import { createSQLiteStorage } from '../blob/sqlite-blob-storage';
import { createAffineProviders, createLocalProviders } from '../providers';
function setEditorFlags(workspace: Workspace) {
Object.entries(runtimeConfig.editorFlags).forEach(([key, value]) => {
workspace.awarenessStore.setFlag(
key as keyof BlockSuiteFeatureFlags,
value
);
});
workspace.awarenessStore.setFlag(
'enable_bookmark_operation',
environment.isDesktop
);
}
type UpdateCallback = (
update: Uint8Array,
origin: string | number | null,
doc: Doc,
transaction: Transaction
) => void;
type SubdocEvent = {
loaded: Set<Doc>;
removed: Set<Doc>;
added: Set<Doc>;
};
const docUpdateCallbackWeakMap = new WeakMap<Doc, UpdateCallback>();
export const globalBlockSuiteSchema = new Schema();
globalBlockSuiteSchema.register(AffineSchemas).register(__unstableSchemas);
const createMonitor = (doc: Doc) => {
const onUpdate: UpdateCallback = (_, origin) => {
if (process.env.NODE_ENV === 'development') {
if (typeof origin !== 'string' && typeof origin !== 'number') {
console.warn(
'origin is not a string or number, this will cause problems in the future',
origin
);
}
} else {
// todo: add monitor in the future
}
};
docUpdateCallbackWeakMap.set(doc, onUpdate);
doc.on('update', onUpdate);
const onSubdocs = (event: SubdocEvent) => {
event.added.forEach(subdoc => {
if (!docUpdateCallbackWeakMap.has(subdoc)) {
createMonitor(subdoc);
}
});
event.removed.forEach(subdoc => {
if (docUpdateCallbackWeakMap.has(subdoc)) {
docUpdateCallbackWeakMap.delete(subdoc);
}
});
};
doc.on('subdocs', onSubdocs);
doc.on('destroy', () => {
docUpdateCallbackWeakMap.delete(doc);
doc.off('update', onSubdocs);
});
};
// if not exist, create a new workspace
export function getOrCreateWorkspace(
id: string,
flavour: WorkspaceFlavour
): Workspace {
const providerCreators: DocProviderCreator[] = [];
if (INTERNAL_BLOCKSUITE_HASH_MAP.has(id)) {
return INTERNAL_BLOCKSUITE_HASH_MAP.get(id) as Workspace;
}
const idGenerator = Generator.NanoID;
const blobStorages: StoreOptions['blobStorages'] = [];
if (flavour === WorkspaceFlavour.AFFINE_CLOUD) {
if (isBrowser) {
blobStorages.push(createIndexeddbStorage);
blobStorages.push(createCloudBlobStorage);
if (environment.isDesktop && runtimeConfig.enableSQLiteProvider) {
blobStorages.push(createSQLiteStorage);
}
providerCreators.push(...createAffineProviders());
// todo(JimmFly): add support for cloud storage
}
} else if (flavour === WorkspaceFlavour.LOCAL) {
if (isBrowser) {
blobStorages.push(createIndexeddbStorage);
if (environment.isDesktop && runtimeConfig.enableSQLiteProvider) {
blobStorages.push(createSQLiteStorage);
}
}
providerCreators.push(...createLocalProviders());
} else if (flavour === WorkspaceFlavour.AFFINE_PUBLIC) {
if (isBrowser) {
blobStorages.push(createIndexeddbStorage);
if (environment.isDesktop && runtimeConfig.enableSQLiteProvider) {
blobStorages.push(createSQLiteStorage);
}
}
blobStorages.push(createCloudBlobStorage);
providerCreators.push(...createAffinePublicProviders());
} else {
throw new Error('unsupported flavour');
}
blobStorages.push(createStaticStorage);
const workspace = new Workspace({
id,
isSSR: !isBrowser,
providerCreators: typeof window === 'undefined' ? [] : providerCreators,
blobStorages: blobStorages,
idGenerator,
schema: globalBlockSuiteSchema,
});
createMonitor(workspace.doc);
setEditorFlags(workspace);
INTERNAL_BLOCKSUITE_HASH_MAP.set(id, workspace);
return workspace;
}

View File

@@ -0,0 +1,86 @@
/**
* @vitest-environment happy-dom
*/
import 'fake-indexeddb/auto';
import type {
LocalIndexedDBBackgroundProvider,
LocalIndexedDBDownloadProvider,
} from '@affine/env/workspace';
import { __unstableSchemas, AffineSchemas } from '@blocksuite/blocks/models';
import { Schema, Workspace } from '@blocksuite/store';
import { afterEach, describe, expect, test } from 'vitest';
import {
createIndexedDBBackgroundProvider,
createIndexedDBDownloadProvider,
} from '..';
const schema = new Schema();
schema.register(AffineSchemas).register(__unstableSchemas);
afterEach(() => {
globalThis.localStorage.clear();
globalThis.indexedDB.deleteDatabase('affine-local');
});
describe('download provider', () => {
test('basic', async () => {
let prev: any;
{
const workspace = new Workspace({
id: 'test',
isSSR: true,
schema,
});
const provider = createIndexedDBBackgroundProvider(
workspace.id,
workspace.doc,
{
awareness: workspace.awarenessStore.awareness,
}
) as LocalIndexedDBBackgroundProvider;
provider.connect();
const page = workspace.createPage({
id: 'page0',
});
await page.waitForLoaded();
const pageBlockId = page.addBlock('affine:page', {
title: new page.Text(''),
});
page.addBlock('affine:surface', {}, pageBlockId);
const frameId = page.addBlock('affine:note', {}, pageBlockId);
page.addBlock('affine:paragraph', {}, frameId);
await new Promise(resolve => setTimeout(resolve, 1000));
provider.disconnect();
prev = workspace.doc.toJSON();
}
{
const workspace = new Workspace({
id: 'test',
isSSR: true,
schema,
});
const provider = createIndexedDBDownloadProvider(
workspace.id,
workspace.doc,
{
awareness: workspace.awarenessStore.awareness,
}
) as LocalIndexedDBDownloadProvider;
provider.sync();
await provider.whenReady;
expect(workspace.doc.toJSON()).toEqual({
...prev,
// download provider only download the root doc
spaces: {
page0: {
blocks: {},
},
},
});
}
});
});

View File

@@ -0,0 +1,103 @@
/**
* @vitest-environment happy-dom
*/
import 'fake-indexeddb/auto';
import type { AffineSocketIOProvider } from '@affine/env/workspace';
import { __unstableSchemas, AffineSchemas } from '@blocksuite/blocks/models';
import { Schema, Workspace } from '@blocksuite/store';
import { describe, expect, test } from 'vitest';
import * as awarenessProtocol from 'y-protocols/awareness';
import { Doc } from 'yjs';
import { createAffineSocketIOProvider } from '..';
const schema = new Schema();
schema.register(AffineSchemas).register(__unstableSchemas);
describe('sockio provider', () => {
test.skip('test storage', async () => {
const workspaceId = 'test-storage-ws';
{
const workspace = new Workspace({
id: workspaceId,
isSSR: true,
schema,
});
const provider = createAffineSocketIOProvider(
workspace.id,
workspace.doc,
{
awareness: workspace.awarenessStore.awareness,
}
) as AffineSocketIOProvider;
provider.connect();
const page = workspace.createPage({
id: 'page',
});
await page.waitForLoaded();
page.addBlock('affine:page', {
title: new page.Text('123123'),
});
await new Promise(resolve => setTimeout(resolve, 1000));
}
{
const workspace = new Workspace({
id: workspaceId,
isSSR: true,
schema,
});
const provider = createAffineSocketIOProvider(
workspace.id,
workspace.doc,
{
awareness: workspace.awarenessStore.awareness,
}
) as AffineSocketIOProvider;
provider.connect();
await new Promise(resolve => setTimeout(resolve, 1000));
const page = workspace.getPage('page')!;
await page.waitForLoaded();
const block = page.getBlockByFlavour('affine:page');
expect(block[0].flavour).toEqual('affine:page');
}
});
test.skip('test collaboration', async () => {
const workspaceId = 'test-collboration-ws';
{
const doc = new Doc({ guid: workspaceId });
const provider = createAffineSocketIOProvider(doc.guid, doc, {
awareness: new awarenessProtocol.Awareness(doc),
}) as AffineSocketIOProvider;
const doc2 = new Doc({ guid: workspaceId });
const provider2 = createAffineSocketIOProvider(doc2.guid, doc2, {
awareness: new awarenessProtocol.Awareness(doc2),
}) as AffineSocketIOProvider;
provider.connect();
provider2.connect();
await new Promise(resolve => setTimeout(resolve, 500));
const subdoc = new Doc();
const folder = doc.getMap();
folder.set('subDoc', subdoc);
subdoc.getText().insert(0, 'subDoc content');
await new Promise(resolve => setTimeout(resolve, 1000));
expect(
(doc2.getMap().get('subDoc') as Doc).getText().toJSON(),
'subDoc content'
);
}
});
});

View File

@@ -0,0 +1,165 @@
import type {
SQLiteDBDownloadProvider,
SQLiteProvider,
} from '@affine/env/workspace';
import { __unstableSchemas, AffineSchemas } from '@blocksuite/blocks/models';
import type { Y as YType } from '@blocksuite/store';
import { Schema, Workspace } from '@blocksuite/store';
import type { DBHandlerManager } from '@toeverything/infra/handler';
import type {
EventMap,
UnwrapManagerHandlerToClientSide,
} from '@toeverything/infra/type';
import { nanoid } from 'nanoid';
import { setTimeout } from 'timers/promises';
import { beforeEach, describe, expect, test, vi } from 'vitest';
import { getDoc } from 'y-provider';
import {
createSQLiteDBDownloadProvider,
createSQLiteProvider,
} from '../sqlite-providers';
const Y = Workspace.Y;
let id: string;
let workspace: Workspace;
let provider: SQLiteProvider;
let downloadProvider: SQLiteDBDownloadProvider;
let offlineYdoc: YType.Doc;
let triggerDBUpdate:
| Parameters<typeof window.events.db.onExternalUpdate>[0]
| null = null;
const mockedAddBlob = vi.fn();
vi.stubGlobal('window', {
apis: {
db: {
getDocAsUpdates: async (_, guid) => {
const subdoc = guid ? getDoc(offlineYdoc, guid) : offlineYdoc;
if (!subdoc) {
return false;
}
return Y.encodeStateAsUpdate(subdoc);
},
applyDocUpdate: async (_, update, subdocId) => {
const subdoc = subdocId ? getDoc(offlineYdoc, subdocId) : offlineYdoc;
if (!subdoc) {
return;
}
Y.applyUpdate(subdoc, update, 'sqlite');
},
getBlobKeys: async () => {
// todo: may need to hack the way to get hash keys of blobs
return [];
},
addBlob: mockedAddBlob,
} satisfies Partial<UnwrapManagerHandlerToClientSide<DBHandlerManager>>,
},
events: {
db: {
onExternalUpdate: fn => {
triggerDBUpdate = fn;
return () => {
triggerDBUpdate = null;
};
},
},
} as Partial<EventMap>,
});
vi.stubGlobal('environment', {
isDesktop: true,
});
const schema = new Schema();
schema.register(AffineSchemas).register(__unstableSchemas);
beforeEach(() => {
id = nanoid();
workspace = new Workspace({
id,
isSSR: true,
schema,
});
provider = createSQLiteProvider(workspace.id, workspace.doc, {
awareness: workspace.awarenessStore.awareness,
}) as SQLiteProvider;
downloadProvider = createSQLiteDBDownloadProvider(
workspace.id,
workspace.doc,
{
awareness: workspace.awarenessStore.awareness,
}
) as SQLiteDBDownloadProvider;
offlineYdoc = new Y.Doc();
offlineYdoc.getText('text').insert(0, 'sqlite-hello');
});
describe('SQLite download provider', () => {
test('sync updates', async () => {
// on connect, the updates from sqlite should be sync'ed to the existing ydoc
workspace.doc.getText('text').insert(0, 'mem-hello');
downloadProvider.sync();
await downloadProvider.whenReady;
// depending on the nature of the sync, the data can be sync'ed in either direction
const options = ['sqlite-hellomem-hello', 'mem-hellosqlite-hello'];
const synced = options.filter(
o => o === workspace.doc.getText('text').toString()
);
expect(synced.length).toBe(1);
});
// there is no updates from sqlite for now
test.skip('on db update', async () => {
provider.connect();
await setTimeout(200);
offlineYdoc.getText('text').insert(0, 'sqlite-world');
triggerDBUpdate?.({
workspaceId: id + '-another-id',
update: Y.encodeStateAsUpdate(offlineYdoc),
});
// not yet updated (because the workspace id is different)
expect(workspace.doc.getText('text').toString()).toBe('');
triggerDBUpdate?.({
workspaceId: id,
update: Y.encodeStateAsUpdate(offlineYdoc),
});
expect(workspace.doc.getText('text').toString()).toBe(
'sqlite-worldsqlite-hello'
);
});
test('disconnect handlers', async () => {
const offHandler = vi.fn();
let handleUpdate = () => {};
let handleSubdocs = () => {};
workspace.doc.on = (event: string, fn: () => void) => {
if (event === 'update') {
handleUpdate = fn;
} else if (event === 'subdocs') {
handleSubdocs = fn;
}
};
workspace.doc.off = offHandler;
provider.connect();
provider.disconnect();
expect(triggerDBUpdate).toBe(null);
expect(offHandler).toBeCalledWith('update', handleUpdate);
expect(offHandler).toBeCalledWith('subdocs', handleSubdocs);
});
});

View File

@@ -0,0 +1,98 @@
import { DebugLogger } from '@affine/debug';
import { fetchWithTraceReport } from '@affine/graphql';
import type { ActiveDocProvider, DocProviderCreator } from '@blocksuite/store';
import { Workspace } from '@blocksuite/store';
import type { Doc } from 'yjs';
const Y = Workspace.Y;
const logger = new DebugLogger('affine:cloud');
const hashMap = new Map<string, ArrayBuffer>();
export async function downloadBinaryFromCloud(
rootGuid: string,
pageGuid: string
): Promise<boolean | ArrayBuffer> {
if (hashMap.has(`${rootGuid}/${pageGuid}`)) {
return true;
}
const response = await fetchWithTraceReport(
runtimeConfig.serverUrlPrefix +
`/api/workspaces/${rootGuid}/docs/${pageGuid}`,
{
priority: 'high',
}
);
if (response.ok) {
const arrayBuffer = await response.arrayBuffer();
hashMap.set(`${rootGuid}/${pageGuid}`, arrayBuffer);
return arrayBuffer;
}
return false;
}
async function downloadBinary(rootGuid: string, doc: Doc) {
const buffer = await downloadBinaryFromCloud(rootGuid, doc.guid);
if (typeof buffer !== 'boolean') {
Y.applyUpdate(doc, new Uint8Array(buffer), 'affine-cloud');
}
}
export const createCloudDownloadProvider: DocProviderCreator = (
id,
doc
): ActiveDocProvider => {
let _resolve: () => void;
let _reject: (error: unknown) => void;
const promise = new Promise<void>((resolve, reject) => {
_resolve = resolve;
_reject = reject;
});
return {
flavour: 'affine-cloud-download',
active: true,
sync() {
downloadBinary(id, doc)
.then(() => {
logger.info(`Downloaded ${id}`);
_resolve();
})
.catch(_reject);
},
get whenReady() {
return promise;
},
};
};
export const createMergeCloudSnapshotProvider: DocProviderCreator = (
id,
doc
): ActiveDocProvider => {
let _resolve: () => void;
const promise = new Promise<void>(resolve => {
_resolve = resolve;
});
return {
flavour: 'affine-cloud-merge-snapshot',
active: true,
sync() {
downloadBinary(id, doc)
.then(() => {
logger.info(`Downloaded ${id}`);
_resolve();
})
// ignore error
.catch(e => {
console.error(e);
_resolve();
});
},
get whenReady() {
return promise;
},
};
};

View File

@@ -0,0 +1,163 @@
import { DebugLogger } from '@affine/debug';
import type {
AffineSocketIOProvider,
LocalIndexedDBBackgroundProvider,
LocalIndexedDBDownloadProvider,
} from '@affine/env/workspace';
import { assertExists } from '@blocksuite/global/utils';
import type { DocProviderCreator } from '@blocksuite/store';
import { Workspace } from '@blocksuite/store';
import { createBroadcastChannelProvider } from '@blocksuite/store/providers/broadcast-channel';
import {
createIndexedDBDatasource,
createIndexedDBProvider as create,
} from '@toeverything/y-indexeddb';
import { createLazyProvider } from 'y-provider';
import { encodeStateVector } from 'yjs';
import { createAffineDataSource } from '../affine';
import {
createCloudDownloadProvider,
createMergeCloudSnapshotProvider,
downloadBinaryFromCloud,
} from './cloud';
import {
createSQLiteDBDownloadProvider,
createSQLiteProvider,
} from './sqlite-providers';
const Y = Workspace.Y;
const logger = new DebugLogger('indexeddb-provider');
const createAffineSocketIOProvider: DocProviderCreator = (
id,
doc,
{ awareness }
): AffineSocketIOProvider => {
const dataSource = createAffineDataSource(id, doc, awareness);
const lazyProvider = createLazyProvider(doc, dataSource, {
origin: 'affine-socket-io',
});
return {
flavour: 'affine-socket-io',
...lazyProvider,
get status() {
return lazyProvider.status;
},
};
};
const createIndexedDBBackgroundProvider: DocProviderCreator = (
id,
blockSuiteWorkspace
): LocalIndexedDBBackgroundProvider => {
const indexeddbProvider = create(blockSuiteWorkspace);
let connected = false;
return {
flavour: 'local-indexeddb-background',
datasource: indexeddbProvider.datasource,
passive: true,
get status() {
return indexeddbProvider.status;
},
subscribeStatusChange: indexeddbProvider.subscribeStatusChange,
get connected() {
return connected;
},
cleanup: () => {
indexeddbProvider.cleanup().catch(console.error);
},
connect: () => {
logger.info('connect indexeddb provider', id);
indexeddbProvider.connect();
},
disconnect: () => {
assertExists(indexeddbProvider);
logger.info('disconnect indexeddb provider', id);
indexeddbProvider.disconnect();
connected = false;
},
};
};
const indexedDBDownloadOrigin = 'indexeddb-download-provider';
const createIndexedDBDownloadProvider: DocProviderCreator = (
id,
doc
): LocalIndexedDBDownloadProvider => {
const datasource = createIndexedDBDatasource({});
let _resolve: () => void;
let _reject: (error: unknown) => void;
const promise = new Promise<void>((resolve, reject) => {
_resolve = resolve;
_reject = reject;
});
return {
flavour: 'local-indexeddb',
active: true,
get whenReady() {
return promise;
},
cleanup: () => {
// todo: cleanup data
},
sync: () => {
logger.info('sync indexeddb provider', id);
datasource
.queryDocState(doc.guid, {
stateVector: encodeStateVector(doc),
})
.then(docState => {
if (docState) {
Y.applyUpdate(doc, docState.missing, indexedDBDownloadOrigin);
}
_resolve();
})
.catch(_reject);
},
};
};
export {
createAffineSocketIOProvider,
createBroadcastChannelProvider,
createIndexedDBBackgroundProvider,
createIndexedDBDownloadProvider,
createSQLiteDBDownloadProvider,
createSQLiteProvider,
downloadBinaryFromCloud,
};
export const createLocalProviders = (): DocProviderCreator[] => {
const providers = [
createIndexedDBBackgroundProvider,
createIndexedDBDownloadProvider,
] as DocProviderCreator[];
if (runtimeConfig.enableBroadcastChannelProvider) {
providers.push(createBroadcastChannelProvider);
}
if (environment.isDesktop && runtimeConfig.enableSQLiteProvider) {
providers.push(createSQLiteProvider, createSQLiteDBDownloadProvider);
}
return providers;
};
export const createAffineProviders = (): DocProviderCreator[] => {
return (
[
...createLocalProviders(),
runtimeConfig.enableCloud && createAffineSocketIOProvider,
runtimeConfig.enableCloud && createMergeCloudSnapshotProvider,
] as DocProviderCreator[]
).filter(v => Boolean(v));
};
export const createAffinePublicProviders = (): DocProviderCreator[] => {
return [createCloudDownloadProvider];
};

View File

@@ -0,0 +1,3 @@
import { DebugLogger } from '@affine/debug';
export const localProviderLogger = new DebugLogger('local-provider');

View File

@@ -0,0 +1,133 @@
import type {
SQLiteDBDownloadProvider,
SQLiteProvider,
} from '@affine/env/workspace';
import { assertExists } from '@blocksuite/global/utils';
import type { DocProviderCreator } from '@blocksuite/store';
import { Workspace as BlockSuiteWorkspace } from '@blocksuite/store';
import { createLazyProvider, type DocDataSource } from 'y-provider';
import type { Doc } from 'yjs';
import { localProviderLogger as logger } from './logger';
const Y = BlockSuiteWorkspace.Y;
const sqliteOrigin = 'sqlite-provider-origin';
const createDatasource = (workspaceId: string): DocDataSource => {
if (!window.apis?.db) {
throw new Error('sqlite datasource is not available');
}
return {
queryDocState: async guid => {
const update = await window.apis.db.getDocAsUpdates(
workspaceId,
workspaceId === guid ? undefined : guid
);
if (update) {
return {
missing: update,
};
}
return false;
},
sendDocUpdate: async (guid, update) => {
return window.apis.db.applyDocUpdate(
workspaceId,
update,
workspaceId === guid ? undefined : guid
);
},
};
};
/**
* A provider that is responsible for syncing updates the workspace with the local SQLite database.
*/
export const createSQLiteProvider: DocProviderCreator = (
id,
rootDoc
): SQLiteProvider => {
const datasource = createDatasource(id);
let provider: ReturnType<typeof createLazyProvider> | null = null;
let connected = false;
return {
flavour: 'sqlite',
datasource,
passive: true,
get status() {
assertExists(provider);
return provider.status;
},
subscribeStatusChange(onStatusChange) {
assertExists(provider);
return provider.subscribeStatusChange(onStatusChange);
},
connect: () => {
provider = createLazyProvider(rootDoc, datasource, { origin: 'sqlite' });
provider.connect();
connected = true;
},
disconnect: () => {
provider?.disconnect();
provider = null;
connected = false;
},
get connected() {
return connected;
},
};
};
/**
* A provider that is responsible for DOWNLOADING updates from the local SQLite database.
*/
export const createSQLiteDBDownloadProvider: DocProviderCreator = (
id,
rootDoc
): SQLiteDBDownloadProvider => {
const { apis } = window;
let _resolve: () => void;
let _reject: (error: unknown) => void;
const promise = new Promise<void>((resolve, reject) => {
_resolve = resolve;
_reject = reject;
});
async function syncUpdates(doc: Doc) {
logger.info('syncing updates from sqlite', doc.guid);
const subdocId = doc.guid === id ? undefined : doc.guid;
const updates = await apis.db.getDocAsUpdates(id, subdocId);
if (updates) {
Y.applyUpdate(doc, updates, sqliteOrigin);
}
return true;
}
return {
flavour: 'sqlite-download',
active: true,
get whenReady() {
return promise;
},
cleanup: () => {
// todo
},
sync: () => {
logger.info('connect sqlite download provider', id);
syncUpdates(rootDoc)
.then(() => {
_resolve();
})
.catch(error => {
_reject(error);
});
},
};
};