refactor(electron): cleanup secondary db logic (#6710)

This commit is contained in:
pengx17
2024-04-26 10:57:08 +00:00
parent d015be24e6
commit 5d114ea965
17 changed files with 63 additions and 1020 deletions

View File

@@ -26,7 +26,6 @@ export const runtimeFlagsSchema = z.object({
allowLocalWorkspace: z.boolean(), allowLocalWorkspace: z.boolean(),
// this is for the electron app // this is for the electron app
serverUrlPrefix: z.string(), serverUrlPrefix: z.string(),
enableMoveDatabase: z.boolean(),
appVersion: z.string(), appVersion: z.string(),
editorVersion: z.string(), editorVersion: z.string(),
appBuildType: z.union([ appBuildType: z.union([

View File

@@ -17,7 +17,6 @@ import { ExportPanel } from './export';
import { LabelsPanel } from './labels'; import { LabelsPanel } from './labels';
import { MembersPanel } from './members'; import { MembersPanel } from './members';
import { ProfilePanel } from './profile'; import { ProfilePanel } from './profile';
import { StoragePanel } from './storage';
import type { WorkspaceSettingDetailProps } from './types'; import type { WorkspaceSettingDetailProps } from './types';
export const WorkspaceSettingDetail = ({ export const WorkspaceSettingDetail = ({
@@ -70,9 +69,6 @@ export const WorkspaceSettingDetail = ({
</SettingWrapper> </SettingWrapper>
{environment.isDesktop && ( {environment.isDesktop && (
<SettingWrapper title={t['Storage and Export']()}> <SettingWrapper title={t['Storage and Export']()}>
{runtimeConfig.enableMoveDatabase ? (
<StoragePanel workspaceMetadata={workspaceMetadata} />
) : null}
<ExportPanel <ExportPanel
workspace={workspace} workspace={workspace}
workspaceMetadata={workspaceMetadata} workspaceMetadata={workspaceMetadata}

View File

@@ -1,123 +0,0 @@
import { FlexWrapper, toast } from '@affine/component';
import { SettingRow } from '@affine/component/setting-components';
import { Button } from '@affine/component/ui/button';
import { Tooltip } from '@affine/component/ui/tooltip';
import { apis, events } from '@affine/electron-api';
import { useAFFiNEI18N } from '@affine/i18n/hooks';
import type { WorkspaceMetadata } from '@toeverything/infra';
import { useCallback, useEffect, useMemo, useState } from 'react';
const useDBFileSecondaryPath = (workspaceId: string) => {
const [path, setPath] = useState<string | undefined>(undefined);
useEffect(() => {
if (apis && events && environment.isDesktop) {
apis?.workspace
.getMeta(workspaceId)
.then(meta => {
setPath(meta.secondaryDBPath);
})
.catch(err => {
console.error(err);
});
return events.workspace.onMetaChange((newMeta: any) => {
if (newMeta.workspaceId === workspaceId) {
const meta = newMeta.meta;
setPath(meta.secondaryDBPath);
}
});
}
return;
}, [workspaceId]);
return path;
};
interface StoragePanelProps {
workspaceMetadata: WorkspaceMetadata;
}
export const StoragePanel = ({ workspaceMetadata }: StoragePanelProps) => {
const workspaceId = workspaceMetadata.id;
const t = useAFFiNEI18N();
const secondaryPath = useDBFileSecondaryPath(workspaceId);
const [moveToInProgress, setMoveToInProgress] = useState<boolean>(false);
const onRevealDBFile = useCallback(() => {
apis?.dialog.revealDBFile(workspaceId).catch(err => {
console.error(err);
});
}, [workspaceId]);
const handleMoveTo = useCallback(() => {
if (moveToInProgress) {
return;
}
setMoveToInProgress(true);
apis?.dialog
.moveDBFile(workspaceId)
.then(result => {
if (!result?.error && !result?.canceled) {
toast(t['Move folder success']());
} else if (result?.error) {
toast(t[result.error]());
}
})
.catch(() => {
toast(t['UNKNOWN_ERROR']());
})
.finally(() => {
setMoveToInProgress(false);
});
}, [moveToInProgress, t, workspaceId]);
const rowContent = useMemo(
() =>
secondaryPath ? (
<FlexWrapper justifyContent="space-between">
<Tooltip
content={t['com.affine.settings.storage.db-location.change-hint']()}
side="top"
align="start"
>
<Button
data-testid="move-folder"
// className={style.urlButton}
size="large"
onClick={handleMoveTo}
>
{secondaryPath}
</Button>
</Tooltip>
<Button
data-testid="reveal-folder"
data-disabled={moveToInProgress}
onClick={onRevealDBFile}
>
{t['Open folder']()}
</Button>
</FlexWrapper>
) : (
<Button
data-testid="move-folder"
data-disabled={moveToInProgress}
onClick={handleMoveTo}
>
{t['Move folder']()}
</Button>
),
[handleMoveTo, moveToInProgress, onRevealDBFile, secondaryPath, t]
);
return (
<SettingRow
name={t['Storage']()}
desc={t[
secondaryPath
? 'com.affine.settings.storage.description-alt'
: 'com.affine.settings.storage.description'
]()}
spreadCol={!secondaryPath}
>
{rowContent}
</SettingRow>
);
};

View File

@@ -1,145 +1,38 @@
import type { Subject } from 'rxjs';
import {
concat,
defer,
from,
fromEvent,
interval,
lastValueFrom,
merge,
Observable,
} from 'rxjs';
import {
concatMap,
distinctUntilChanged,
filter,
ignoreElements,
last,
map,
shareReplay,
startWith,
switchMap,
take,
takeUntil,
tap,
} from 'rxjs/operators';
import { logger } from '../logger'; import { logger } from '../logger';
import { getWorkspaceMeta } from '../workspace/meta';
import { workspaceSubjects } from '../workspace/subjects';
import { SecondaryWorkspaceSQLiteDB } from './secondary-db';
import type { WorkspaceSQLiteDB } from './workspace-db-adapter'; import type { WorkspaceSQLiteDB } from './workspace-db-adapter';
import { openWorkspaceDatabase } from './workspace-db-adapter'; import { openWorkspaceDatabase } from './workspace-db-adapter';
// export for testing // export for testing
export const db$Map = new Map<string, Observable<WorkspaceSQLiteDB>>(); export const db$Map = new Map<string, Promise<WorkspaceSQLiteDB>>();
// use defer to prevent `app` is undefined while running tests async function getWorkspaceDB(id: string) {
const beforeQuit$ = defer(() => fromEvent(process, 'beforeExit')); let db = await db$Map.get(id);
// return a stream that emit a single event when the subject completes
function completed<T>(subject$: Subject<T>) {
return new Observable(subscriber => {
const sub = subject$.subscribe({
complete: () => {
subscriber.next();
subscriber.complete();
},
});
return () => sub.unsubscribe();
});
}
function getWorkspaceDB(id: string) {
if (!db$Map.has(id)) { if (!db$Map.has(id)) {
db$Map.set( const promise = openWorkspaceDatabase(id);
id, db$Map.set(id, promise);
from(openWorkspaceDatabase(id)).pipe( const _db = (db = await promise);
tap({ const cleanup = () => {
next: db => { db$Map.delete(id);
logger.info( _db
'[ensureSQLiteDB] db connection established', .destroy()
db.workspaceId .then(() => {
); logger.info('[ensureSQLiteDB] db connection closed', _db.workspaceId);
}, })
}), .catch(err => {
switchMap(db => logger.error('[ensureSQLiteDB] destroy db failed', err);
// takeUntil the polling stream, and then destroy the db });
concat( };
startPollingSecondaryDB(db).pipe(
ignoreElements(), db.update$.subscribe({
startWith(db), complete: cleanup,
takeUntil(merge(beforeQuit$, completed(db.update$))), });
last(),
tap({ process.on('beforeExit', cleanup);
next() {
logger.info(
'[ensureSQLiteDB] polling secondary db complete',
db.workspaceId
);
},
})
),
defer(async () => {
try {
await db.destroy();
db$Map.delete(id);
return db;
} catch (err) {
logger.error('[ensureSQLiteDB] destroy db failed', err);
throw err;
}
})
).pipe(startWith(db))
),
shareReplay(1)
)
);
} }
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
return db$Map.get(id)!; return db!;
}
function startPollingSecondaryDB(db: WorkspaceSQLiteDB) {
return merge(
getWorkspaceMeta(db.workspaceId),
workspaceSubjects.meta$.pipe(
map(({ meta }) => meta),
filter(meta => meta.id === db.workspaceId)
)
).pipe(
map(meta => meta?.secondaryDBPath),
filter((p): p is string => !!p),
distinctUntilChanged(),
switchMap(path => {
// on secondary db path change, destroy the old db and create a new one
const secondaryDB = new SecondaryWorkspaceSQLiteDB(path, db);
return new Observable<SecondaryWorkspaceSQLiteDB>(subscriber => {
subscriber.next(secondaryDB);
return () => {
secondaryDB.destroy().catch(err => {
subscriber.error(err);
});
};
});
}),
switchMap(secondaryDB => {
return interval(300000).pipe(
startWith(0),
concatMap(() => secondaryDB.pull()),
tap({
error: err => {
logger.error(`[ensureSQLiteDB] polling secondary db error`, err);
},
complete: () => {
logger.info('[ensureSQLiteDB] polling secondary db complete');
},
})
);
})
);
} }
export function ensureSQLiteDB(id: string) { export function ensureSQLiteDB(id: string) {
return lastValueFrom(getWorkspaceDB(id).pipe(take(1))); return getWorkspaceDB(id);
} }

View File

@@ -1,10 +1,8 @@
import { mainRPC } from '../main-rpc'; import { mainRPC } from '../main-rpc';
import type { MainEventRegister } from '../type'; import type { MainEventRegister } from '../type';
import { ensureSQLiteDB } from './ensure-db'; import { ensureSQLiteDB } from './ensure-db';
import { dbSubjects } from './subjects';
export * from './ensure-db'; export * from './ensure-db';
export * from './subjects';
export const dbHandlers = { export const dbHandlers = {
getDocAsUpdates: async (workspaceId: string, subdocId?: string) => { getDocAsUpdates: async (workspaceId: string, subdocId?: string) => {
@@ -17,7 +15,12 @@ export const dbHandlers = {
subdocId?: string subdocId?: string
) => { ) => {
const workspaceDB = await ensureSQLiteDB(workspaceId); const workspaceDB = await ensureSQLiteDB(workspaceId);
return workspaceDB.applyUpdate(update, 'renderer', subdocId); return workspaceDB.addUpdateToSQLite([
{
data: update,
docId: subdocId,
},
]);
}, },
addBlob: async (workspaceId: string, key: string, data: Uint8Array) => { addBlob: async (workspaceId: string, key: string, data: Uint8Array) => {
const workspaceDB = await ensureSQLiteDB(workspaceId); const workspaceDB = await ensureSQLiteDB(workspaceId);
@@ -40,17 +43,4 @@ export const dbHandlers = {
}, },
}; };
export const dbEvents = { export const dbEvents = {} satisfies Record<string, MainEventRegister>;
onExternalUpdate: (
fn: (update: {
workspaceId: string;
update: Uint8Array;
docId?: string;
}) => void
) => {
const sub = dbSubjects.externalUpdate$.subscribe(fn);
return () => {
sub.unsubscribe();
};
},
} satisfies Record<string, MainEventRegister>;

View File

@@ -1,304 +0,0 @@
import assert from 'node:assert';
import type { InsertRow } from '@affine/native';
import { debounce } from 'lodash-es';
import { applyUpdate, Doc as YDoc } from 'yjs';
import { logger } from '../logger';
import type { YOrigin } from '../type';
import { getWorkspaceMeta } from '../workspace/meta';
import { BaseSQLiteAdapter } from './base-db-adapter';
import type { WorkspaceSQLiteDB } from './workspace-db-adapter';
const FLUSH_WAIT_TIME = 5000;
const FLUSH_MAX_WAIT_TIME = 10000;
// todo: trim db when it is too big
export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
role = 'secondary';
yDoc = new YDoc();
firstConnected = false;
destroyed = false;
updateQueue: { data: Uint8Array; docId?: string }[] = [];
unsubscribers = new Set<() => void>();
constructor(
public override path: string,
public upstream: WorkspaceSQLiteDB
) {
super(path);
this.init();
logger.debug('[SecondaryWorkspaceSQLiteDB] created', this.workspaceId);
}
getDoc(docId?: string) {
if (!docId) {
return this.yDoc;
}
// this should be pretty fast and we don't need to cache it
for (const subdoc of this.yDoc.subdocs) {
if (subdoc.guid === docId) {
return subdoc;
}
}
return null;
}
override async destroy() {
await this.flushUpdateQueue();
this.unsubscribers.forEach(unsub => unsub());
this.yDoc.destroy();
await super.destroy();
this.destroyed = true;
}
get workspaceId() {
return this.upstream.workspaceId;
}
// do not update db immediately, instead, push to a queue
// and flush the queue in a future time
async addUpdateToUpdateQueue(update: InsertRow) {
this.updateQueue.push(update);
await this.debouncedFlush();
}
async flushUpdateQueue() {
if (this.destroyed) {
return;
}
logger.debug(
'flushUpdateQueue',
this.workspaceId,
'queue',
this.updateQueue.length
);
const updates = [...this.updateQueue];
this.updateQueue = [];
await this.run(async () => {
await this.addUpdateToSQLite(updates);
});
}
// flush after 5s, but will not wait for more than 10s
debouncedFlush = debounce(this.flushUpdateQueue, FLUSH_WAIT_TIME, {
maxWait: FLUSH_MAX_WAIT_TIME,
});
runCounter = 0;
// wrap the fn with connect and close
async run<T extends (...args: any[]) => any>(
fn: T
): Promise<
(T extends (...args: any[]) => infer U ? Awaited<U> : unknown) | undefined
> {
try {
if (this.destroyed) {
return;
}
await this.connectIfNeeded();
this.runCounter++;
return await fn();
} catch (err) {
logger.error(err);
throw err;
} finally {
this.runCounter--;
if (this.runCounter === 0) {
// just close db, but not the yDoc
await super.destroy();
}
}
}
setupListener(docId?: string) {
logger.debug(
'SecondaryWorkspaceSQLiteDB:setupListener',
this.workspaceId,
docId
);
const doc = this.getDoc(docId);
const upstreamDoc = this.upstream.getDoc(docId);
if (!doc || !upstreamDoc) {
logger.warn(
'[SecondaryWorkspaceSQLiteDB] setupListener: doc not found',
docId
);
return;
}
const onUpstreamUpdate = (update: Uint8Array, origin: YOrigin) => {
logger.debug(
'SecondaryWorkspaceSQLiteDB:onUpstreamUpdate',
origin,
this.workspaceId,
docId,
update.length
);
if (origin === 'renderer' || origin === 'self') {
// update to upstream yDoc should be replicated to self yDoc
this.applyUpdate(update, 'upstream', docId);
}
};
const onSelfUpdate = async (update: Uint8Array, origin: YOrigin) => {
logger.debug(
'SecondaryWorkspaceSQLiteDB:onSelfUpdate',
origin,
this.workspaceId,
docId,
update.length
);
// for self update from upstream, we need to push it to external DB
if (origin === 'upstream') {
await this.addUpdateToUpdateQueue({
data: update,
docId,
});
}
if (origin === 'self') {
this.upstream.applyUpdate(update, 'external', docId);
}
};
const onSubdocs = ({ added }: { added: Set<YDoc> }) => {
added.forEach(subdoc => {
this.setupListener(subdoc.guid);
});
};
doc.subdocs.forEach(subdoc => {
this.setupListener(subdoc.guid);
});
// listen to upstream update
this.upstream.yDoc.on('update', onUpstreamUpdate);
doc.on('update', (update, origin) => {
onSelfUpdate(update, origin).catch(err => {
logger.error(err);
});
});
doc.on('subdocs', onSubdocs);
this.unsubscribers.add(() => {
this.upstream.yDoc.off('update', onUpstreamUpdate);
doc.off('update', (update, origin) => {
onSelfUpdate(update, origin).catch(err => {
logger.error(err);
});
});
doc.off('subdocs', onSubdocs);
});
}
init() {
if (this.firstConnected) {
return;
}
this.firstConnected = true;
this.setupListener();
// apply all updates from upstream
// we assume here that the upstream ydoc is already sync'ed
const syncUpstreamDoc = (docId?: string) => {
const update = this.upstream.getDocAsUpdates(docId);
if (update) {
this.applyUpdate(update, 'upstream');
}
};
syncUpstreamDoc();
this.upstream.yDoc.subdocs.forEach(subdoc => {
syncUpstreamDoc(subdoc.guid);
});
}
applyUpdate = (
data: Uint8Array,
origin: YOrigin = 'upstream',
docId?: string
) => {
const doc = this.getDoc(docId);
if (doc) {
applyUpdate(this.yDoc, data, origin);
} else {
logger.warn(
'[SecondaryWorkspaceSQLiteDB] applyUpdate: doc not found',
docId
);
}
};
// TODO: have a better solution to handle blobs
async syncBlobs() {
await this.run(async () => {
// skip if upstream db is not connected (maybe it is already closed)
const blobsKeys = await this.getBlobKeys();
if (!this.upstream.db || this.upstream.db?.isClose) {
return;
}
const upstreamBlobsKeys = await this.upstream.getBlobKeys();
// put every missing blob to upstream
for (const key of blobsKeys) {
if (!upstreamBlobsKeys.includes(key)) {
const blob = await this.getBlob(key);
if (blob) {
await this.upstream.addBlob(key, blob);
logger.debug('syncBlobs', this.workspaceId, key);
}
}
}
});
}
/**
* pull from external DB file and apply to embedded yDoc
* workflow:
* - connect to external db
* - get updates
* - apply updates to local yDoc
* - get blobs and put new blobs to upstream
* - disconnect
*/
async pull() {
const start = performance.now();
assert(this.upstream.db, 'upstream db should be connected');
const rows = await this.run(async () => {
// TODO: no need to get all updates, just get the latest ones (using a cursor, etc)?
await this.syncBlobs();
return await this.getAllUpdates();
});
if (!rows || this.destroyed) {
return;
}
// apply root doc first
rows.forEach(row => {
if (!row.docId) {
this.applyUpdate(row.data, 'self');
}
});
rows.forEach(row => {
if (row.docId) {
this.applyUpdate(row.data, 'self', row.docId);
}
});
logger.debug(
'pull external updates',
this.path,
rows.length,
(performance.now() - start).toFixed(2),
'ms'
);
}
}
export async function getSecondaryWorkspaceDBPath(workspaceId: string) {
const meta = await getWorkspaceMeta(workspaceId);
return meta?.secondaryDBPath;
}

View File

@@ -1,9 +0,0 @@
import { Subject } from 'rxjs';
export const dbSubjects = {
externalUpdate$: new Subject<{
workspaceId: string;
update: Uint8Array;
docId?: string;
}>(),
};

View File

@@ -1,20 +1,16 @@
import type { InsertRow } from '@affine/native'; import type { InsertRow } from '@affine/native';
import { debounce } from 'lodash-es';
import { Subject } from 'rxjs'; import { Subject } from 'rxjs';
import { applyUpdate, Doc as YDoc, encodeStateAsUpdate } from 'yjs'; import { applyUpdate, Doc as YDoc } from 'yjs';
import { logger } from '../logger'; import { logger } from '../logger';
import type { YOrigin } from '../type';
import { getWorkspaceMeta } from '../workspace/meta'; import { getWorkspaceMeta } from '../workspace/meta';
import { BaseSQLiteAdapter } from './base-db-adapter'; import { BaseSQLiteAdapter } from './base-db-adapter';
import { dbSubjects } from './subjects'; import { mergeUpdate } from './merge-update';
const TRIM_SIZE = 500; const TRIM_SIZE = 500;
export class WorkspaceSQLiteDB extends BaseSQLiteAdapter { export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
role = 'primary'; role = 'primary';
yDoc = new YDoc();
firstConnected = false;
update$ = new Subject<void>(); update$ = new Subject<void>();
@@ -27,131 +23,30 @@ export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
override async destroy() { override async destroy() {
await super.destroy(); await super.destroy();
this.yDoc.destroy();
// when db is closed, we can safely remove it from ensure-db list // when db is closed, we can safely remove it from ensure-db list
this.update$.complete(); this.update$.complete();
this.firstConnected = false;
} }
getDoc(docId?: string) { getWorkspaceName = async () => {
if (!docId) { const ydoc = new YDoc();
return this.yDoc; const updates = await this.getUpdates();
} updates.forEach(update => {
// this should be pretty fast and we don't need to cache it applyUpdate(ydoc, update.data);
for (const subdoc of this.yDoc.subdocs) { });
if (subdoc.guid === docId) { return ydoc.getMap('meta').get('name') as string;
return subdoc;
}
}
return null;
}
getWorkspaceName = () => {
return this.yDoc.getMap('meta').get('name') as string;
}; };
setupListener(docId?: string) {
logger.debug('WorkspaceSQLiteDB:setupListener', this.workspaceId, docId);
const doc = this.getDoc(docId);
if (doc) {
const onUpdate = async (update: Uint8Array, origin: YOrigin) => {
logger.debug(
'WorkspaceSQLiteDB:onUpdate',
this.workspaceId,
docId,
update.length
);
const insertRows = [{ data: update, docId }];
if (origin === 'renderer') {
await this.addUpdateToSQLite(insertRows);
} else if (origin === 'external') {
dbSubjects.externalUpdate$.next({
workspaceId: this.workspaceId,
update,
docId,
});
await this.addUpdateToSQLite(insertRows);
logger.debug('external update', this.workspaceId);
}
};
doc.subdocs.forEach(subdoc => {
this.setupListener(subdoc.guid);
});
const onSubdocs = ({ added }: { added: Set<YDoc> }) => {
logger.info('onSubdocs', this.workspaceId, docId, added);
added.forEach(subdoc => {
this.setupListener(subdoc.guid);
});
};
doc.on('update', (update, origin) => {
onUpdate(update, origin).catch(err => {
logger.error(err);
});
});
doc.on('subdocs', onSubdocs);
} else {
logger.error('setupListener: doc not found', docId);
}
}
async init() { async init() {
const db = await super.connectIfNeeded(); const db = await super.connectIfNeeded();
await this.tryTrim();
if (!this.firstConnected) {
this.setupListener();
}
const updates = await this.getAllUpdates();
// apply root first (without ID).
// subdoc will be available after root is applied
updates.forEach(update => {
if (!update.docId) {
this.applyUpdate(update.data, 'self');
}
});
// then, for all subdocs, apply the updates
updates.forEach(update => {
if (update.docId) {
this.applyUpdate(update.data, 'self', update.docId);
}
});
this.firstConnected = true;
this.update$.next();
return db; return db;
} }
// unlike getUpdates, this will return updates in yDoc // getUpdates then encode
getDocAsUpdates = (docId?: string) => { getDocAsUpdates = async (docId?: string) => {
const doc = docId ? this.getDoc(docId) : this.yDoc; const updates = await this.getUpdates(docId);
if (doc) { return mergeUpdate(updates.map(row => row.data));
return encodeStateAsUpdate(doc);
}
return false;
};
// non-blocking and use yDoc to validate the update
// after that, the update is added to the db
applyUpdate = (
data: Uint8Array,
origin: YOrigin = 'renderer',
docId?: string
) => {
// todo: trim the updates when the number of records is too large
// 1. store the current ydoc state in the db
// 2. then delete the old updates
// yjs-idb will always trim the db for the first time after DB is loaded
const doc = this.getDoc(docId);
if (doc) {
applyUpdate(doc, data, origin);
} else {
logger.warn('[WorkspaceSQLiteDB] applyUpdate: doc not found', docId);
}
}; };
override async addBlob(key: string, value: Uint8Array) { override async addBlob(key: string, value: Uint8Array) {
@@ -167,28 +62,21 @@ export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
override async addUpdateToSQLite(data: InsertRow[]) { override async addUpdateToSQLite(data: InsertRow[]) {
this.update$.next(); this.update$.next();
data.forEach(row => {
this.trimWhenNecessary(row.docId)?.catch(err => {
logger.error('trimWhenNecessary failed', err);
});
});
await super.addUpdateToSQLite(data); await super.addUpdateToSQLite(data);
} }
trimWhenNecessary = debounce(async (docId?: string) => { private readonly tryTrim = async (docId?: string) => {
if (this.firstConnected) { const count = (await this.db?.getUpdatesCount(docId)) ?? 0;
const count = (await this.db?.getUpdatesCount(docId)) ?? 0; if (count > TRIM_SIZE) {
if (count > TRIM_SIZE) { logger.debug(`trim ${this.workspaceId}:${docId} ${count}`);
logger.debug(`trim ${this.workspaceId}:${docId} ${count}`); const update = await this.getDocAsUpdates(docId);
const update = this.getDocAsUpdates(docId); if (update) {
if (update) { const insertRows = [{ data: update, docId }];
const insertRows = [{ data: update, docId }]; await this.db?.replaceUpdates(docId, insertRows);
await this.db?.replaceUpdates(docId, insertRows); logger.debug(`trim ${this.workspaceId}:${docId} successfully`);
logger.debug(`trim ${this.workspaceId}:${docId} successfully`);
}
} }
} }
}, 1000); };
} }
export async function openWorkspaceDatabase(workspaceId: string) { export async function openWorkspaceDatabase(workspaceId: string) {

View File

@@ -1,5 +1,3 @@
import path from 'node:path';
import { ValidationResult } from '@affine/native'; import { ValidationResult } from '@affine/native';
import { WorkspaceVersion } from '@toeverything/infra/blocksuite'; import { WorkspaceVersion } from '@toeverything/infra/blocksuite';
import fs from 'fs-extra'; import fs from 'fs-extra';
@@ -11,10 +9,9 @@ import {
migrateToLatest, migrateToLatest,
migrateToSubdocAndReplaceDatabase, migrateToSubdocAndReplaceDatabase,
} from '../db/migration'; } from '../db/migration';
import type { WorkspaceSQLiteDB } from '../db/workspace-db-adapter';
import { logger } from '../logger'; import { logger } from '../logger';
import { mainRPC } from '../main-rpc'; import { mainRPC } from '../main-rpc';
import { listWorkspaces, storeWorkspaceMeta } from '../workspace'; import { storeWorkspaceMeta } from '../workspace';
import { import {
getWorkspaceDBPath, getWorkspaceDBPath,
getWorkspaceMeta, getWorkspaceMeta,
@@ -47,12 +44,6 @@ export interface SelectDBFileLocationResult {
canceled?: boolean; canceled?: boolean;
} }
export interface MoveDBFileResult {
filePath?: string;
error?: ErrorMessage;
canceled?: boolean;
}
// provide a backdoor to set dialog path for testing in playwright // provide a backdoor to set dialog path for testing in playwright
export interface FakeDialogResult { export interface FakeDialogResult {
canceled?: boolean; canceled?: boolean;
@@ -68,7 +59,7 @@ export async function revealDBFile(workspaceId: string) {
if (!meta) { if (!meta) {
return; return;
} }
await mainRPC.showItemInFolder(meta.secondaryDBPath ?? meta.mainDBPath); await mainRPC.showItemInFolder(meta.mainDBPath);
} }
// result will be used in the next call to showOpenDialog // result will be used in the next call to showOpenDialog
@@ -120,7 +111,10 @@ export async function saveDBFileAs(
name: '', name: '',
}, },
], ],
defaultPath: getDefaultDBFileName(db.getWorkspaceName(), workspaceId), defaultPath: getDefaultDBFileName(
await db.getWorkspaceName(),
workspaceId
),
message: 'Save Workspace as a SQLite Database file', message: 'Save Workspace as a SQLite Database file',
})); }));
const filePath = ret.filePath; const filePath = ret.filePath;
@@ -213,11 +207,6 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
return { error: 'DB_FILE_PATH_INVALID' }; return { error: 'DB_FILE_PATH_INVALID' };
} }
if (await dbFileAlreadyLoaded(originalPath)) {
logger.warn('loadDBFile: db file already loaded');
return { error: 'DB_FILE_ALREADY_LOADED' };
}
const { SqliteConnection } = await import('@affine/native'); const { SqliteConnection } = await import('@affine/native');
const validationResult = await SqliteConnection.validate(originalPath); const validationResult = await SqliteConnection.validate(originalPath);
@@ -294,100 +283,3 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
}; };
} }
} }
/**
* This function is called when the user clicks the "Move" button in the "Move Workspace Storage" setting.
*
* It will
* - copy the source db file to a new location
* - remove the old db external file
* - update the external db file path in the workspace meta
* - return the new file path
*/
export async function moveDBFile(
workspaceId: string,
dbFileDir?: string
): Promise<MoveDBFileResult> {
let db: WorkspaceSQLiteDB | null = null;
try {
db = await ensureSQLiteDB(workspaceId);
const meta = await getWorkspaceMeta(workspaceId);
const oldDir = meta.secondaryDBPath
? path.dirname(meta.secondaryDBPath)
: null;
const defaultDir = oldDir ?? (await mainRPC.getPath('documents'));
const newName = getDefaultDBFileName(db.getWorkspaceName(), workspaceId);
const newDirPath =
dbFileDir ??
(
getFakedResult() ??
(await mainRPC.showOpenDialog({
properties: ['openDirectory'],
title: 'Move Workspace Storage',
buttonLabel: 'Move',
defaultPath: defaultDir,
message: 'Move Workspace storage file',
}))
).filePaths?.[0];
// skips if
// - user canceled the dialog
// - user selected the same dir
if (!newDirPath || newDirPath === oldDir) {
return {
canceled: true,
};
}
const newFilePath = path.join(newDirPath, newName);
if (await fs.pathExists(newFilePath)) {
return {
error: 'FILE_ALREADY_EXISTS',
};
}
logger.info(`[moveDBFile] copy ${meta.mainDBPath} -> ${newFilePath}`);
await fs.copy(meta.mainDBPath, newFilePath);
// remove the old db file, but we don't care if it fails
if (meta.secondaryDBPath) {
await fs
.remove(meta.secondaryDBPath)
.then(() => {
logger.info(`[moveDBFile] removed ${meta.secondaryDBPath}`);
})
.catch(err => {
logger.error(
`[moveDBFile] remove ${meta.secondaryDBPath} failed`,
err
);
});
}
// update meta
await storeWorkspaceMeta(workspaceId, {
secondaryDBPath: newFilePath,
});
return {
filePath: newFilePath,
};
} catch (err) {
await db?.destroy();
logger.error('[moveDBFile]', err);
return {
error: 'UNKNOWN_ERROR',
};
}
}
async function dbFileAlreadyLoaded(path: string) {
const meta = await listWorkspaces();
const paths = meta.map(m => m[1].secondaryDBPath);
return paths.includes(path);
}

View File

@@ -1,6 +1,5 @@
import { import {
loadDBFile, loadDBFile,
moveDBFile,
revealDBFile, revealDBFile,
saveDBFileAs, saveDBFileAs,
selectDBFileLocation, selectDBFileLocation,
@@ -17,9 +16,6 @@ export const dialogHandlers = {
saveDBFileAs: async (workspaceId: string) => { saveDBFileAs: async (workspaceId: string) => {
return saveDBFileAs(workspaceId); return saveDBFileAs(workspaceId);
}, },
moveDBFile: (workspaceId: string, dbFileLocation?: string) => {
return moveDBFile(workspaceId, dbFileLocation);
},
selectDBFileLocation: async () => { selectDBFileLocation: async () => {
return selectDBFileLocation(); return selectDBFileLocation();
}, },

View File

@@ -12,7 +12,7 @@ function setupRendererConnection(rendererPort: Electron.MessagePortMain) {
try { try {
const start = performance.now(); const start = performance.now();
const result = await handler(...args); const result = await handler(...args);
logger.info( logger.debug(
'[async-api]', '[async-api]',
`${namespace}.${name}`, `${namespace}.${name}`,
args.filter( args.filter(

View File

@@ -1,7 +1,6 @@
export interface WorkspaceMeta { export interface WorkspaceMeta {
id: string; id: string;
mainDBPath: string; mainDBPath: string;
secondaryDBPath?: string; // assume there will be only one
} }
export type YOrigin = 'self' | 'external' | 'upstream' | 'renderer'; export type YOrigin = 'self' | 'external' | 'upstream' | 'renderer';

View File

@@ -52,26 +52,12 @@ export async function getWorkspaceMeta(
.then(() => true) .then(() => true)
.catch(() => false)) .catch(() => false))
) { ) {
// since not meta is found, we will migrate symlinked db file if needed
await fs.ensureDir(basePath); await fs.ensureDir(basePath);
const dbPath = await getWorkspaceDBPath(workspaceId); const dbPath = await getWorkspaceDBPath(workspaceId);
// todo: remove this after migration (in stable version)
const realDBPath = (await fs
.access(dbPath)
.then(() => true)
.catch(() => false))
? await fs.realpath(dbPath)
: dbPath;
const isLink = realDBPath !== dbPath;
if (isLink) {
await fs.copy(realDBPath, dbPath);
}
// create one if not exists // create one if not exists
const meta = { const meta = {
id: workspaceId, id: workspaceId,
mainDBPath: dbPath, mainDBPath: dbPath,
secondaryDBPath: isLink ? realDBPath : undefined,
}; };
await fs.writeJSON(metaPath, meta); await fs.writeJSON(metaPath, meta);
return meta; return meta;

View File

@@ -99,47 +99,3 @@ test('db should be removed in db$Map after destroyed', async () => {
await setTimeout(100); await setTimeout(100);
expect(db$Map.has(workspaceId)).toBe(false); expect(db$Map.has(workspaceId)).toBe(false);
}); });
// we have removed secondary db feature
test.skip('if db has a secondary db path, we should also poll that', async () => {
const { ensureSQLiteDB } = await import(
'@affine/electron/helper/db/ensure-db'
);
const { storeWorkspaceMeta } = await import(
'@affine/electron/helper/workspace'
);
const workspaceId = v4();
await storeWorkspaceMeta(workspaceId, {
secondaryDBPath: path.join(tmpDir, 'secondary.db'),
});
const db = await ensureSQLiteDB(workspaceId);
await setTimeout(10);
expect(constructorStub).toBeCalledTimes(1);
expect(constructorStub).toBeCalledWith(path.join(tmpDir, 'secondary.db'), db);
// if secondary meta is changed
await storeWorkspaceMeta(workspaceId, {
secondaryDBPath: path.join(tmpDir, 'secondary2.db'),
});
// wait the async `db.destroy()` to be called
await setTimeout(100);
expect(constructorStub).toBeCalledTimes(2);
expect(destroyStub).toBeCalledTimes(1);
// if secondary meta is changed (but another workspace)
await storeWorkspaceMeta(v4(), {
secondaryDBPath: path.join(tmpDir, 'secondary3.db'),
});
await vi.advanceTimersByTimeAsync(1500);
expect(constructorStub).toBeCalledTimes(2);
expect(destroyStub).toBeCalledTimes(1);
// if primary is destroyed, secondary should also be destroyed
await db.destroy();
await setTimeout(100);
expect(destroyStub).toBeCalledTimes(2);
});

View File

@@ -1,11 +1,9 @@
import path from 'node:path'; import path from 'node:path';
import { dbSubjects } from '@affine/electron/helper/db/subjects';
import { removeWithRetry } from '@affine-test/kit/utils/utils'; import { removeWithRetry } from '@affine-test/kit/utils/utils';
import fs from 'fs-extra'; import fs from 'fs-extra';
import { v4 } from 'uuid'; import { v4 } from 'uuid';
import { afterAll, afterEach, beforeAll, expect, test, vi } from 'vitest'; import { afterAll, afterEach, beforeAll, expect, test, vi } from 'vitest';
import { Doc as YDoc, encodeStateAsUpdate } from 'yjs';
const tmpDir = path.join(__dirname, 'tmp'); const tmpDir = path.join(__dirname, 'tmp');
const appDataPath = path.join(tmpDir, 'app-data'); const appDataPath = path.join(tmpDir, 'app-data');
@@ -26,31 +24,6 @@ afterAll(() => {
vi.doUnmock('@affine/electron/helper/main-rpc'); vi.doUnmock('@affine/electron/helper/main-rpc');
}); });
let testYDoc: YDoc;
let testYSubDoc: YDoc;
function getTestUpdates() {
testYDoc = new YDoc();
const yText = testYDoc.getText('test');
yText.insert(0, 'hello');
testYSubDoc = new YDoc();
testYDoc.getMap('subdocs').set('test-subdoc', testYSubDoc);
const updates = encodeStateAsUpdate(testYDoc);
return updates;
}
function getTestSubDocUpdates() {
const yText = testYSubDoc.getText('test');
yText.insert(0, 'hello');
const updates = encodeStateAsUpdate(testYSubDoc);
return updates;
}
test('can create new db file if not exists', async () => { test('can create new db file if not exists', async () => {
const { openWorkspaceDatabase } = await import( const { openWorkspaceDatabase } = await import(
'@affine/electron/helper/db/workspace-db-adapter' '@affine/electron/helper/db/workspace-db-adapter'
@@ -66,82 +39,6 @@ test('can create new db file if not exists', async () => {
await db.destroy(); await db.destroy();
}); });
test('on applyUpdate (from self), will not trigger update', async () => {
const { openWorkspaceDatabase } = await import(
'@affine/electron/helper/db/workspace-db-adapter'
);
const workspaceId = v4();
const onUpdate = vi.fn();
const db = await openWorkspaceDatabase(workspaceId);
db.update$.subscribe(onUpdate);
db.applyUpdate(getTestUpdates(), 'self');
expect(onUpdate).not.toHaveBeenCalled();
await db.destroy();
});
test('on applyUpdate (from renderer), will trigger update', async () => {
const { openWorkspaceDatabase } = await import(
'@affine/electron/helper/db/workspace-db-adapter'
);
const workspaceId = v4();
const onUpdate = vi.fn();
const onExternalUpdate = vi.fn();
const db = await openWorkspaceDatabase(workspaceId);
db.update$.subscribe(onUpdate);
const sub = dbSubjects.externalUpdate$.subscribe(onExternalUpdate);
db.applyUpdate(getTestUpdates(), 'renderer');
expect(onUpdate).toHaveBeenCalled();
sub.unsubscribe();
await db.destroy();
});
test('on applyUpdate (from renderer, subdoc), will trigger update', async () => {
const { openWorkspaceDatabase } = await import(
'@affine/electron/helper/db/workspace-db-adapter'
);
const workspaceId = v4();
const onUpdate = vi.fn();
const insertUpdates = vi.fn();
const db = await openWorkspaceDatabase(workspaceId);
db.applyUpdate(getTestUpdates(), 'renderer');
db.db!.insertUpdates = insertUpdates;
db.update$.subscribe(onUpdate);
const subdocUpdates = getTestSubDocUpdates();
db.applyUpdate(subdocUpdates, 'renderer', testYSubDoc.guid);
expect(onUpdate).toHaveBeenCalled();
expect(insertUpdates).toHaveBeenCalledWith([
{
docId: testYSubDoc.guid,
data: subdocUpdates,
},
]);
await db.destroy();
});
test('on applyUpdate (from external), will trigger update & send external update event', async () => {
const { openWorkspaceDatabase } = await import(
'@affine/electron/helper/db/workspace-db-adapter'
);
const workspaceId = v4();
const onUpdate = vi.fn();
const onExternalUpdate = vi.fn();
const db = await openWorkspaceDatabase(workspaceId);
db.update$.subscribe(onUpdate);
const sub = dbSubjects.externalUpdate$.subscribe(onExternalUpdate);
db.applyUpdate(getTestUpdates(), 'external');
expect(onUpdate).toHaveBeenCalled();
expect(onExternalUpdate).toHaveBeenCalled();
sub.unsubscribe();
await db.destroy();
});
test('on destroy, check if resources have been released', async () => { test('on destroy, check if resources have been released', async () => {
const { openWorkspaceDatabase } = await import( const { openWorkspaceDatabase } = await import(
'@affine/electron/helper/db/workspace-db-adapter' '@affine/electron/helper/db/workspace-db-adapter'

View File

@@ -127,7 +127,6 @@ describe('getWorkspaceMeta', () => {
expect(await getWorkspaceMeta(workspaceId)).toEqual({ expect(await getWorkspaceMeta(workspaceId)).toEqual({
id: workspaceId, id: workspaceId,
mainDBPath: path.join(workspacePath, 'storage.db'), mainDBPath: path.join(workspacePath, 'storage.db'),
secondaryDBPath: sourcePath,
}); });
expect( expect(
@@ -151,11 +150,4 @@ test('storeWorkspaceMeta', async () => {
expect(await fs.readJSON(path.join(workspacePath, 'meta.json'))).toEqual( expect(await fs.readJSON(path.join(workspacePath, 'meta.json'))).toEqual(
meta meta
); );
await storeWorkspaceMeta(workspaceId, {
secondaryDBPath: path.join(tmpDir, 'test.db'),
});
expect(await fs.readJSON(path.join(workspacePath, 'meta.json'))).toEqual({
...meta,
secondaryDBPath: path.join(tmpDir, 'test.db'),
});
}); });

View File

@@ -17,7 +17,6 @@ export function getRuntimeConfig(buildFlags: BuildFlags): RuntimeConfig {
enablePreloading: true, enablePreloading: true,
enableNewSettingModal: true, enableNewSettingModal: true,
enableNewSettingUnstableApi: false, enableNewSettingUnstableApi: false,
enableMoveDatabase: false,
enableCloud: true, enableCloud: true,
enableCaptcha: true, enableCaptcha: true,
enableEnhanceShareMode: false, enableEnhanceShareMode: false,
@@ -57,7 +56,6 @@ export function getRuntimeConfig(buildFlags: BuildFlags): RuntimeConfig {
enablePreloading: true, enablePreloading: true,
enableNewSettingModal: true, enableNewSettingModal: true,
enableNewSettingUnstableApi: false, enableNewSettingUnstableApi: false,
enableMoveDatabase: false,
enableCloud: true, enableCloud: true,
enableCaptcha: true, enableCaptcha: true,
enableEnhanceShareMode: false, enableEnhanceShareMode: false,
@@ -107,9 +105,6 @@ export function getRuntimeConfig(buildFlags: BuildFlags): RuntimeConfig {
enableEnhanceShareMode: process.env.ENABLE_ENHANCE_SHARE_MODE enableEnhanceShareMode: process.env.ENABLE_ENHANCE_SHARE_MODE
? process.env.ENABLE_ENHANCE_SHARE_MODE === 'true' ? process.env.ENABLE_ENHANCE_SHARE_MODE === 'true'
: currentBuildPreset.enableEnhanceShareMode, : currentBuildPreset.enableEnhanceShareMode,
enableMoveDatabase: process.env.ENABLE_MOVE_DATABASE
? process.env.ENABLE_MOVE_DATABASE === 'true'
: currentBuildPreset.enableMoveDatabase,
enablePayment: process.env.ENABLE_PAYMENT enablePayment: process.env.ENABLE_PAYMENT
? process.env.ENABLE_PAYMENT !== 'false' ? process.env.ENABLE_PAYMENT !== 'false'
: buildFlags.mode === 'development' : buildFlags.mode === 'development'