fix(electron): export and import (#9767)

This commit is contained in:
forehalo
2025-01-20 08:48:03 +00:00
parent 2e18ae59e3
commit cb53baca89
26 changed files with 332 additions and 453 deletions

View File

@@ -41,12 +41,6 @@ export class SqliteV1BlobStorage extends BlobStorageBase {
};
}
override async delete(key: string, permanently: boolean) {
if (permanently) {
await this.db.deleteBlob(this.options.type, this.options.id, key);
}
}
override async list() {
const keys = await this.db.getBlobKeys(this.options.type, this.options.id);
@@ -57,6 +51,9 @@ export class SqliteV1BlobStorage extends BlobStorageBase {
createdAt: new Date(),
}));
}
override async delete() {
// no more deletes
}
override async set() {
// no more writes

View File

@@ -6,17 +6,16 @@ interface NativeDBV1Apis {
workspaceId: string,
key: string
) => Promise<Buffer | null>;
deleteBlob: (
spaceType: SpaceType,
workspaceId: string,
key: string
) => Promise<void>;
getBlobKeys: (spaceType: SpaceType, workspaceId: string) => Promise<string[]>;
getDocAsUpdates: (
spaceType: SpaceType,
workspaceId: string,
subdocId: string
) => Promise<Uint8Array>;
getDocTimestamps: (
spaceType: SpaceType,
workspaceId: string
) => Promise<{ docId?: string; timestamp: Date }[]>;
}
export let apis: NativeDBV1Apis | null = null;

View File

@@ -56,6 +56,27 @@ export class SqliteV1DocStorage extends DocStorageBase<{
};
}
override async getDocTimestamps() {
const timestamps = await this.db.getDocTimestamps(
this.options.type,
this.options.id
);
if (!timestamps) {
return {};
}
const idConverter = await this.getIdConverter();
return timestamps.reduce(
(ret, { docId, timestamp }) => {
ret[idConverter.oldIdToNewId(docId ?? this.options.id)] = timestamp;
return ret;
},
{} as Record<string, Date>
);
}
override async deleteDoc() {
return;
}
@@ -64,10 +85,6 @@ export class SqliteV1DocStorage extends DocStorageBase<{
return null;
}
override async getDocTimestamps() {
return {};
}
override async getDocTimestamp() {
return null;
}

View File

@@ -1,12 +1,19 @@
import { ValidationResult } from '@affine/native';
import { parse } from 'node:path';
import { DocStorage, ValidationResult } from '@affine/native';
import { parseUniversalId } from '@affine/nbstore';
import fs from 'fs-extra';
import { nanoid } from 'nanoid';
import { logger } from '../logger';
import { mainRPC } from '../main-rpc';
import { ensureSQLiteDB } from '../nbstore/v1';
import { getDocStoragePool } from '../nbstore';
import { storeWorkspaceMeta } from '../workspace';
import { getWorkspaceDBPath, getWorkspacesBasePath } from '../workspace/meta';
import {
getSpaceDBPath,
getWorkspaceDBPath,
getWorkspacesBasePath,
} from '../workspace/meta';
export type ErrorMessage =
| 'DB_FILE_PATH_INVALID'
@@ -69,20 +76,26 @@ function getDefaultDBFileName(name: string, id: string) {
*
* It will just copy the file to the given path
*/
export async function saveDBFileAs(id: string): Promise<SaveDBFileResult> {
export async function saveDBFileAs(
universalId: string,
name: string
): Promise<SaveDBFileResult> {
try {
// TODO(@forehalo): use `nbstore` when it is ready
// const storage = await ensureStorage(id);
const { peer, type, id } = parseUniversalId(universalId);
const dbPath = await getSpaceDBPath(peer, type, id);
// connect to the pool and make sure all changes (WAL) are written to db
const pool = getDocStoragePool();
await pool.connect(universalId, dbPath);
await pool.checkpoint(universalId); // make sure all changes (WAL) are written to db
const storage = await ensureSQLiteDB('workspace', id);
await storage.checkpoint(); // make sure all changes (WAL) are written to db
const fakedResult = getFakedResult();
const dbPath = storage.path;
if (!dbPath) {
return {
error: 'DB_FILE_PATH_INVALID',
};
}
const ret =
fakedResult ??
(await mainRPC.showSaveDialog({
@@ -96,12 +109,10 @@ export async function saveDBFileAs(id: string): Promise<SaveDBFileResult> {
name: '',
},
],
defaultPath: getDefaultDBFileName(
(await storage.getWorkspaceName()) ?? 'db',
id
),
defaultPath: getDefaultDBFileName(name, id),
message: 'Save Workspace as a SQLite Database file',
}));
const filePath = ret.filePath;
if (ret.canceled || !filePath) {
return {
@@ -159,7 +170,7 @@ export async function selectDBFileLocation(): Promise<SelectDBFileLocationResult
* - return the new workspace id
*
* eg, it will create a new folder in app-data:
* <app-data>/<app-name>/workspaces/<workspace-id>/storage.db
* <app-data>/<app-name>/<workspaces|userspaces>/<peer>/<workspace-id>/storage.db
*
* On the renderer side, after the UI got a new workspace id, it will
* update the local workspace id list and then connect to it.
@@ -195,34 +206,29 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
}
const workspaceId = nanoid(10);
return await loadV1DBFile(originalPath, workspaceId);
let storage = new DocStorage(originalPath);
// TODO(forehalo): use `nbstore` when it is ready
// let storage = new DocStorage(originalPath);
// if imported db is not a valid v2 db, we will treat it as a v1 db
if (!(await storage.validate())) {
return await cpV1DBFile(originalPath, workspaceId);
}
// // if imported db is not a valid v2 db, we will treat it as a v1 db
// if (!(await storage.validate())) {
// return loadV1DBFile(originalPath, workspaceId);
// }
// v2 import logic
const internalFilePath = await getSpaceDBPath(
'local',
'workspace',
workspaceId
);
await fs.ensureDir(parse(internalFilePath).dir);
await fs.copy(originalPath, internalFilePath);
logger.info(`loadDBFile, copy: ${originalPath} -> ${internalFilePath}`);
// // v2 import logic
// const internalFilePath = await getSpaceDBPath(
// 'local',
// 'workspace',
// workspaceId
// );
// await fs.ensureDir(await getWorkspacesBasePath());
// await fs.copy(originalPath, internalFilePath);
// logger.info(`loadDBFile, copy: ${originalPath} -> ${internalFilePath}`);
storage = new DocStorage(internalFilePath);
await storage.setSpaceId(workspaceId);
// storage = new DocStorage(internalFilePath);
// await storage.connect();
// await storage.setSpaceId(workspaceId);
// await storage.close();
// return {
// workspaceId,
// };
return {
workspaceId,
};
} catch (err) {
logger.error('loadDBFile', err);
return {
@@ -231,7 +237,7 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
}
}
async function loadV1DBFile(
async function cpV1DBFile(
originalPath: string,
workspaceId: string
): Promise<LoadDBFileResult> {

View File

@@ -9,8 +9,8 @@ export const dialogHandlers = {
loadDBFile: async () => {
return loadDBFile();
},
saveDBFileAs: async (id: string) => {
return saveDBFileAs(id);
saveDBFileAs: async (universalId: string, name: string) => {
return saveDBFileAs(universalId, name);
},
selectDBFileLocation: async () => {
return selectDBFileLocation();

View File

@@ -9,12 +9,17 @@ import { getSpaceDBPath } from '../workspace/meta';
const POOL = new DocStoragePool();
export function getDocStoragePool() {
return POOL;
}
export const nbstoreHandlers: NativeDBApis = {
connect: async (universalId: string) => {
const { peer, type, id } = parseUniversalId(universalId);
const dbPath = await getSpaceDBPath(peer, type, id);
await fs.ensureDir(path.dirname(dbPath));
await POOL.connect(universalId, dbPath);
await POOL.setSpaceId(universalId, id);
},
disconnect: POOL.disconnect.bind(POOL),
pushUpdate: POOL.pushUpdate.bind(POOL),

View File

@@ -1,3 +1,4 @@
export { nbstoreHandlers } from './handlers';
export { getDocStoragePool } from './handlers';
export { dbEvents as dbEventsV1, dbHandlers as dbHandlersV1 } from './v1';
export { universalId } from '@affine/nbstore';

View File

@@ -255,4 +255,12 @@ export class SQLiteAdapter {
await this.db.clearSyncMetadata();
},
};
async getDocTimestamps() {
if (!this.db) {
logger.warn(`${this.path} is not connected`);
return [];
}
return await this.db.getDocTimestamps();
}
}

View File

@@ -1,6 +1,9 @@
import { existsSync } from 'node:fs';
import type { SpaceType } from '@affine/nbstore';
import { logger } from '../../logger';
import { getWorkspaceMeta } from '../../workspace/meta';
import type { WorkspaceSQLiteDB } from './workspace-db-adapter';
import { openWorkspaceDatabase } from './workspace-db-adapter';
@@ -35,10 +38,32 @@ async function getWorkspaceDB(spaceType: SpaceType, id: string) {
process.on('beforeExit', cleanup);
}
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
// oxlint-disable-next-line @typescript-eslint/no-non-null-assertion
return db!;
}
export function ensureSQLiteDB(spaceType: SpaceType, id: string) {
export async function ensureSQLiteDB(
spaceType: SpaceType,
id: string
): Promise<WorkspaceSQLiteDB | null> {
const meta = await getWorkspaceMeta(spaceType, id);
// do not auto create v1 db anymore
if (!existsSync(meta.mainDBPath)) {
return null;
}
return getWorkspaceDB(spaceType, id);
}
export async function ensureSQLiteDisconnected(
spaceType: SpaceType,
id: string
) {
const db = await ensureSQLiteDB(spaceType, id);
if (db) {
await db.destroy();
}
}

View File

@@ -1,6 +1,5 @@
import type { SpaceType } from '@affine/nbstore';
import { mainRPC } from '../../main-rpc';
import type { MainEventRegister } from '../../type';
import { ensureSQLiteDB } from './ensure-db';
@@ -13,119 +12,41 @@ export const dbHandlers = {
subdocId: string
) => {
const spaceDB = await ensureSQLiteDB(spaceType, workspaceId);
if (!spaceDB) {
// means empty update in yjs
return new Uint8Array([0, 0]);
}
return spaceDB.getDocAsUpdates(subdocId);
},
applyDocUpdate: async (
spaceType: SpaceType,
workspaceId: string,
update: Uint8Array,
subdocId: string
) => {
getDocTimestamps: async (spaceType: SpaceType, workspaceId: string) => {
const spaceDB = await ensureSQLiteDB(spaceType, workspaceId);
return spaceDB.addUpdateToSQLite(update, subdocId);
},
deleteDoc: async (
spaceType: SpaceType,
workspaceId: string,
subdocId: string
) => {
const spaceDB = await ensureSQLiteDB(spaceType, workspaceId);
return spaceDB.deleteUpdate(subdocId);
},
addBlob: async (
spaceType: SpaceType,
workspaceId: string,
key: string,
data: Uint8Array
) => {
const spaceDB = await ensureSQLiteDB(spaceType, workspaceId);
return spaceDB.addBlob(key, data);
if (!spaceDB) {
return [];
}
return spaceDB.getDocTimestamps();
},
getBlob: async (spaceType: SpaceType, workspaceId: string, key: string) => {
const spaceDB = await ensureSQLiteDB(spaceType, workspaceId);
if (!spaceDB) {
return null;
}
return spaceDB.getBlob(key);
},
deleteBlob: async (
spaceType: SpaceType,
workspaceId: string,
key: string
) => {
const spaceDB = await ensureSQLiteDB(spaceType, workspaceId);
return spaceDB.deleteBlob(key);
},
getBlobKeys: async (spaceType: SpaceType, workspaceId: string) => {
const spaceDB = await ensureSQLiteDB(spaceType, workspaceId);
if (!spaceDB) {
return [];
}
return spaceDB.getBlobKeys();
},
getDefaultStorageLocation: async () => {
return await mainRPC.getPath('sessionData');
},
getServerClock: async (
spaceType: SpaceType,
workspaceId: string,
key: string
) => {
const spaceDB = await ensureSQLiteDB(spaceType, workspaceId);
return spaceDB.adapter.serverClock.get(key);
},
setServerClock: async (
spaceType: SpaceType,
workspaceId: string,
key: string,
data: Uint8Array
) => {
const spaceDB = await ensureSQLiteDB(spaceType, workspaceId);
return spaceDB.adapter.serverClock.set(key, data);
},
getServerClockKeys: async (spaceType: SpaceType, workspaceId: string) => {
const spaceDB = await ensureSQLiteDB(spaceType, workspaceId);
return spaceDB.adapter.serverClock.keys();
},
clearServerClock: async (spaceType: SpaceType, workspaceId: string) => {
const spaceDB = await ensureSQLiteDB(spaceType, workspaceId);
return spaceDB.adapter.serverClock.clear();
},
delServerClock: async (
spaceType: SpaceType,
workspaceId: string,
key: string
) => {
const spaceDB = await ensureSQLiteDB(spaceType, workspaceId);
return spaceDB.adapter.serverClock.del(key);
},
getSyncMetadata: async (
spaceType: SpaceType,
workspaceId: string,
key: string
) => {
const spaceDB = await ensureSQLiteDB(spaceType, workspaceId);
return spaceDB.adapter.syncMetadata.get(key);
},
setSyncMetadata: async (
spaceType: SpaceType,
workspaceId: string,
key: string,
data: Uint8Array
) => {
const spaceDB = await ensureSQLiteDB(spaceType, workspaceId);
return spaceDB.adapter.syncMetadata.set(key, data);
},
getSyncMetadataKeys: async (spaceType: SpaceType, workspaceId: string) => {
const spaceDB = await ensureSQLiteDB(spaceType, workspaceId);
return spaceDB.adapter.syncMetadata.keys();
},
clearSyncMetadata: async (spaceType: SpaceType, workspaceId: string) => {
const spaceDB = await ensureSQLiteDB(spaceType, workspaceId);
return spaceDB.adapter.syncMetadata.clear();
},
delSyncMetadata: async (
spaceType: SpaceType,
workspaceId: string,
key: string
) => {
const spaceDB = await ensureSQLiteDB(spaceType, workspaceId);
return spaceDB.adapter.syncMetadata.del(key);
},
};
export const dbEvents = {} satisfies Record<string, MainEventRegister>;

View File

@@ -67,6 +67,10 @@ export class WorkspaceSQLiteDB {
}
};
async getDocTimestamps() {
return this.adapter.getDocTimestamps();
}
async addBlob(key: string, value: Uint8Array) {
this.update$.next();
const res = await this.adapter.addBlob(key, value);

View File

@@ -1,27 +1,63 @@
import path from 'node:path';
import { parseUniversalId } from '@affine/nbstore';
import fs from 'fs-extra';
import { logger } from '../logger';
import { ensureSQLiteDB } from '../nbstore/v1/ensure-db';
import { getDocStoragePool } from '../nbstore';
import { ensureSQLiteDisconnected } from '../nbstore/v1/ensure-db';
import type { WorkspaceMeta } from '../type';
import {
getDeletedWorkspacesBasePath,
getSpaceDBPath,
getWorkspaceBasePathV1,
getWorkspaceMeta,
} from './meta';
export async function deleteWorkspace(id: string) {
const basePath = await getWorkspaceBasePathV1('workspace', id);
async function deleteWorkspaceV1(workspaceId: string) {
try {
await ensureSQLiteDisconnected('workspace', workspaceId);
const basePath = await getWorkspaceBasePathV1('workspace', workspaceId);
await fs.rmdir(basePath, { recursive: true });
} catch (error) {
logger.error('deleteWorkspaceV1', error);
}
}
/**
* Permanently delete the workspace data
*/
export async function deleteWorkspace(universalId: string) {
const { peer, type, id } = parseUniversalId(universalId);
await deleteWorkspaceV1(id);
const dbPath = await getSpaceDBPath(peer, type, id);
try {
await getDocStoragePool().disconnect(universalId);
await fs.rmdir(path.dirname(dbPath), { recursive: true });
} catch (e) {
logger.error('deleteWorkspace', e);
}
}
/**
* Move the workspace folder to `deleted-workspaces`
* At the same time, permanently delete the v1 workspace folder if it's id exists in nbstore,
* because trashing always happens after full sync from v1 to nbstore.
*/
export async function trashWorkspace(universalId: string) {
const { peer, type, id } = parseUniversalId(universalId);
await deleteWorkspaceV1(id);
const dbPath = await getSpaceDBPath(peer, type, id);
const movedPath = path.join(await getDeletedWorkspacesBasePath(), `${id}`);
try {
const db = await ensureSQLiteDB('workspace', id);
await db.destroy();
return await fs.move(basePath, movedPath, {
await getDocStoragePool().disconnect(universalId);
return await fs.move(path.dirname(dbPath), movedPath, {
overwrite: true,
});
} catch (error) {
logger.error('deleteWorkspace', error);
logger.error('trashWorkspace', error);
}
}

View File

@@ -1,5 +1,5 @@
import type { MainEventRegister } from '../type';
import { deleteWorkspace } from './handlers';
import { deleteWorkspace, trashWorkspace } from './handlers';
export * from './handlers';
export * from './subjects';
@@ -7,5 +7,6 @@ export * from './subjects';
export const workspaceEvents = {} as Record<string, MainEventRegister>;
export const workspaceHandlers = {
delete: async (id: string) => deleteWorkspace(id),
delete: deleteWorkspace,
moveToTrash: trashWorkspace,
};

View File

@@ -1,10 +1,8 @@
import path from 'node:path';
import type { SpaceType } from '@affine/nbstore';
import fs from 'fs-extra';
import { type SpaceType } from '@affine/nbstore';
import { isWindows } from '../../shared/utils';
import { logger } from '../logger';
import { mainRPC } from '../main-rpc';
import type { WorkspaceMeta } from '../type';
@@ -94,31 +92,10 @@ export async function getWorkspaceMeta(
spaceType: SpaceType,
workspaceId: string
): Promise<WorkspaceMeta> {
try {
const basePath = await getWorkspaceBasePathV1(spaceType, workspaceId);
const metaPath = await getWorkspaceMetaPath(spaceType, workspaceId);
if (
!(await fs
.access(metaPath)
.then(() => true)
.catch(() => false))
) {
await fs.ensureDir(basePath);
const dbPath = await getWorkspaceDBPath(spaceType, workspaceId);
// create one if not exists
const meta = {
id: workspaceId,
mainDBPath: dbPath,
type: spaceType,
};
await fs.writeJSON(metaPath, meta);
return meta;
} else {
const meta = await fs.readJSON(metaPath);
return meta;
}
} catch (err) {
logger.error('getWorkspaceMeta failed', err);
throw err;
}
const dbPath = await getWorkspaceDBPath(spaceType, workspaceId);
return {
mainDBPath: dbPath,
id: workspaceId,
};
}

View File

@@ -1,105 +0,0 @@
import path from 'node:path';
import { setTimeout } from 'node:timers/promises';
import fs from 'fs-extra';
import { v4 } from 'uuid';
import { afterAll, afterEach, beforeEach, expect, test, vi } from 'vitest';
const tmpDir = path.join(__dirname, 'tmp');
const appDataPath = path.join(tmpDir, 'app-data');
vi.doMock('@affine/electron/helper/main-rpc', () => ({
mainRPC: {
getPath: async () => appDataPath,
},
}));
const constructorStub = vi.fn();
const destroyStub = vi.fn();
destroyStub.mockReturnValue(Promise.resolve());
function existProcess() {
process.emit('beforeExit', 0);
}
vi.doMock('@affine/electron/helper/db/secondary-db', () => {
return {
SecondaryWorkspaceSQLiteDB: class {
constructor(...args: any[]) {
constructorStub(...args);
}
connectIfNeeded = () => Promise.resolve();
pull = () => Promise.resolve();
destroy = destroyStub;
},
};
});
beforeEach(() => {
vi.useFakeTimers({ shouldAdvanceTime: true });
});
afterEach(async () => {
existProcess();
try {
await fs.remove(tmpDir);
} catch (e) {
console.error(e);
}
vi.useRealTimers();
});
afterAll(() => {
vi.doUnmock('@affine/electron/helper/main-rpc');
});
test('can get a valid WorkspaceSQLiteDB', async () => {
const { ensureSQLiteDB } = await import(
'@affine/electron/helper/nbstore/v1/ensure-db'
);
const workspaceId = v4();
const db0 = await ensureSQLiteDB('workspace', workspaceId);
expect(db0).toBeDefined();
expect(db0.workspaceId).toBe(workspaceId);
const db1 = await ensureSQLiteDB('workspace', v4());
expect(db1).not.toBe(db0);
expect(db1.workspaceId).not.toBe(db0.workspaceId);
// ensure that the db is cached
expect(await ensureSQLiteDB('workspace', workspaceId)).toBe(db0);
});
test('db should be destroyed when app quits', async () => {
const { ensureSQLiteDB } = await import(
'@affine/electron/helper/nbstore/v1/ensure-db'
);
const workspaceId = v4();
const db0 = await ensureSQLiteDB('workspace', workspaceId);
const db1 = await ensureSQLiteDB('workspace', v4());
expect(db0.adapter).not.toBeNull();
expect(db1.adapter).not.toBeNull();
existProcess();
// wait the async `db.destroy()` to be called
await setTimeout(100);
expect(db0.adapter.db).toBeNull();
expect(db1.adapter.db).toBeNull();
});
test('db should be removed in db$Map after destroyed', async () => {
const { ensureSQLiteDB, db$Map } = await import(
'@affine/electron/helper/nbstore/v1/ensure-db'
);
const workspaceId = v4();
const db = await ensureSQLiteDB('workspace', workspaceId);
await db.destroy();
await setTimeout(100);
expect(db$Map.has(`workspace:${workspaceId}`)).toBe(false);
});

View File

@@ -1,59 +0,0 @@
import path from 'node:path';
import fs from 'fs-extra';
import { v4 } from 'uuid';
import { afterAll, afterEach, beforeAll, expect, test, vi } from 'vitest';
const tmpDir = path.join(__dirname, 'tmp');
const appDataPath = path.join(tmpDir, 'app-data');
beforeAll(() => {
vi.doMock('@affine/electron/helper/main-rpc', () => ({
mainRPC: {
getPath: async () => appDataPath,
},
}));
});
afterEach(async () => {
try {
await fs.remove(tmpDir);
} catch (e) {
console.error(e);
}
});
afterAll(() => {
vi.doUnmock('@affine/electron/helper/main-rpc');
});
test('can create new db file if not exists', async () => {
const { openWorkspaceDatabase } = await import(
'@affine/electron/helper/nbstore/v1/workspace-db-adapter'
);
const workspaceId = v4();
const db = await openWorkspaceDatabase('workspace', workspaceId);
const dbPath = path.join(
appDataPath,
`workspaces/${workspaceId}`,
`storage.db`
);
expect(await fs.exists(dbPath)).toBe(true);
await db.destroy();
});
test('on destroy, check if resources have been released', async () => {
const { openWorkspaceDatabase } = await import(
'@affine/electron/helper/nbstore/v1/workspace-db-adapter'
);
const workspaceId = v4();
const db = await openWorkspaceDatabase('workspace', workspaceId);
const updateSub = {
complete: vi.fn(),
next: vi.fn(),
};
db.update$ = updateSub as any;
await db.destroy();
expect(db.adapter.db).toBe(null);
expect(updateSub.complete).toHaveBeenCalled();
});

View File

@@ -1,5 +1,6 @@
import path from 'node:path';
import { universalId } from '@affine/nbstore';
import fs from 'fs-extra';
import { v4 } from 'uuid';
import { afterAll, afterEach, describe, expect, test, vi } from 'vitest';
@@ -31,15 +32,22 @@ afterAll(() => {
vi.doUnmock('@affine/electron/helper/main-rpc');
});
describe('delete workspace', () => {
test('deleteWorkspace', async () => {
const { deleteWorkspace } = await import(
describe('workspace db management', () => {
test('trash workspace', async () => {
const { trashWorkspace } = await import(
'@affine/electron/helper/workspace/handlers'
);
const workspaceId = v4();
const workspacePath = path.join(appDataPath, 'workspaces', workspaceId);
const workspacePath = path.join(
appDataPath,
'workspaces',
'local',
workspaceId
);
await fs.ensureDir(workspacePath);
await deleteWorkspace(workspaceId);
await trashWorkspace(
universalId({ peer: 'local', type: 'workspace', id: workspaceId })
);
expect(await fs.pathExists(workspacePath)).toBe(false);
// removed workspace will be moved to deleted-workspaces
expect(
@@ -48,78 +56,28 @@ describe('delete workspace', () => {
)
).toBe(true);
});
});
describe('getWorkspaceMeta', () => {
test('can get meta', async () => {
const { getWorkspaceMeta } = await import(
'@affine/electron/helper/workspace/meta'
test('delete workspace', async () => {
const { deleteWorkspace } = await import(
'@affine/electron/helper/workspace/handlers'
);
const workspaceId = v4();
const workspacePath = path.join(appDataPath, 'workspaces', workspaceId);
const meta = {
id: workspaceId,
};
await fs.ensureDir(workspacePath);
await fs.writeJSON(path.join(workspacePath, 'meta.json'), meta);
expect(await getWorkspaceMeta('workspace', workspaceId)).toEqual(meta);
});
test('can create meta if not exists', async () => {
const { getWorkspaceMeta } = await import(
'@affine/electron/helper/workspace/meta'
const workspacePath = path.join(
appDataPath,
'workspaces',
'local',
workspaceId
);
const workspaceId = v4();
const workspacePath = path.join(appDataPath, 'workspaces', workspaceId);
await fs.ensureDir(workspacePath);
expect(await getWorkspaceMeta('workspace', workspaceId)).toEqual({
id: workspaceId,
mainDBPath: path.join(workspacePath, 'storage.db'),
type: 'workspace',
});
await deleteWorkspace(
universalId({ peer: 'local', type: 'workspace', id: workspaceId })
);
expect(await fs.pathExists(workspacePath)).toBe(false);
// deleted workspace will remove it permanently
expect(
await fs.pathExists(path.join(workspacePath, 'meta.json'))
).toBeTruthy();
});
test('can migrate meta if db file is a link', async () => {
const { getWorkspaceMeta } = await import(
'@affine/electron/helper/workspace/meta'
);
const workspaceId = v4();
const workspacePath = path.join(appDataPath, 'workspaces', workspaceId);
await fs.ensureDir(workspacePath);
const sourcePath = path.join(tmpDir, 'source.db');
await fs.writeFile(sourcePath, 'test');
await fs.ensureSymlink(sourcePath, path.join(workspacePath, 'storage.db'));
expect(await getWorkspaceMeta('workspace', workspaceId)).toEqual({
id: workspaceId,
mainDBPath: path.join(workspacePath, 'storage.db'),
type: 'workspace',
});
expect(
await fs.pathExists(path.join(workspacePath, 'meta.json'))
).toBeTruthy();
await fs.pathExists(
path.join(appDataPath, 'deleted-workspaces', workspaceId)
)
).toBe(false);
});
});
test('storeWorkspaceMeta', async () => {
const { storeWorkspaceMeta } = await import(
'@affine/electron/helper/workspace/handlers'
);
const workspaceId = v4();
const workspacePath = path.join(appDataPath, 'workspaces', workspaceId);
await fs.ensureDir(workspacePath);
const meta = {
id: workspaceId,
mainDBPath: path.join(workspacePath, 'storage.db'),
type: 'workspace',
};
await storeWorkspaceMeta(workspaceId, meta);
expect(await fs.readJSON(path.join(workspacePath, 'meta.json'))).toEqual(
meta
);
});

View File

@@ -7,6 +7,7 @@ import { DesktopApiService } from '@affine/core/modules/desktop-api';
import { WorkspacePermissionService } from '@affine/core/modules/permissions';
import type { Workspace } from '@affine/core/modules/workspace';
import { useI18n } from '@affine/i18n';
import { universalId } from '@affine/nbstore';
import track from '@affine/track';
import { useLiveData, useService } from '@toeverything/infra';
import { useState } from 'react';
@@ -16,7 +17,6 @@ interface ExportPanelProps {
}
export const DesktopExportPanel = ({ workspace }: ExportPanelProps) => {
const workspaceId = workspace.id;
const workspacePermissionService = useService(
WorkspacePermissionService
).permission;
@@ -43,7 +43,14 @@ export const DesktopExportPanel = ({ workspace }: ExportPanelProps) => {
await workspace.engine.blob.fullSync();
}
const result = await desktopApi.handler?.dialog.saveDBFileAs(workspaceId);
const result = await desktopApi.handler?.dialog.saveDBFileAs(
universalId({
peer: workspace.flavour,
type: 'workspace',
id: workspace.id,
}),
workspace.name$.getValue() ?? 'db'
);
if (result?.error) {
throw new Error(result.error);
} else if (!result?.canceled) {
@@ -54,7 +61,7 @@ export const DesktopExportPanel = ({ workspace }: ExportPanelProps) => {
} finally {
setSaving(false);
}
}, [desktopApi, isOnline, saving, t, workspace, workspaceId]);
}, [desktopApi, isOnline, saving, t, workspace]);
if (isTeam && !isOwner && !isAdmin) {
return null;

View File

@@ -120,6 +120,7 @@ class CloudWorkspaceFlavourProvider implements WorkspaceFlavourProvider {
id: id,
},
});
// TODO(@forehalo): when deleting cloud workspace, should we delete the workspace folder in local?
this.revalidate();
await this.waitForLoaded();
}

View File

@@ -1,5 +1,9 @@
import { DebugLogger } from '@affine/debug';
import type { BlobStorage, DocStorage } from '@affine/nbstore';
import {
type BlobStorage,
type DocStorage,
universalId,
} from '@affine/nbstore';
import {
IndexedDBBlobStorage,
IndexedDBDocStorage,
@@ -101,11 +105,13 @@ class LocalWorkspaceFlavourProvider implements WorkspaceFlavourProvider {
async deleteWorkspace(id: string): Promise<void> {
setLocalWorkspaceIds(ids => ids.filter(x => x !== id));
// TODO(@forehalo): deleting logic for indexeddb workspaces
if (BUILD_CONFIG.isElectron) {
const electronApi = this.framework.get(DesktopApiService);
await electronApi.handler.workspace.delete(id);
await electronApi.handler.workspace.moveToTrash(
universalId({ peer: 'local', type: 'workspace', id })
);
}
// notify all browser tabs, so they can update their workspace list
this.notifyChannel.postMessage(id);
}

View File

@@ -1,11 +1,18 @@
/* auto-generated by NAPI-RS */
/* eslint-disable */
export declare class DocStorage {
constructor(path: string)
validate(): Promise<boolean>
setSpaceId(spaceId: string): Promise<void>
}
export declare class DocStoragePool {
constructor()
/** Initialize the database and run migrations. */
connect(universalId: string, path: string): Promise<void>
disconnect(universalId: string): Promise<void>
setSpaceId(universalId: string, spaceId: string): Promise<void>
disconnect(universalId: string): Promise<void>
checkpoint(universalId: string): Promise<void>
pushUpdate(universalId: string, docId: string, update: Uint8Array): Promise<Date>
getDocSnapshot(universalId: string, docId: string): Promise<DocRecord | null>
setDocSnapshot(universalId: string, snapshot: DocRecord): Promise<boolean>
@@ -39,6 +46,7 @@ export declare class SqliteConnection {
deleteBlob(key: string): Promise<void>
getBlobKeys(): Promise<Array<string>>
getUpdates(docId?: string | undefined | null): Promise<Array<UpdateRow>>
getDocTimestamps(): Promise<Array<DocTimestampRow>>
deleteUpdates(docId?: string | undefined | null): Promise<void>
getUpdatesCount(docId?: string | undefined | null): Promise<number>
getAllUpdates(): Promise<Array<UpdateRow>>
@@ -93,6 +101,11 @@ export interface DocRecord {
timestamp: Date
}
export interface DocTimestampRow {
docId?: string
timestamp: Date
}
export interface DocUpdate {
docId: string
timestamp: Date

View File

@@ -364,6 +364,7 @@ if (!nativeBinding) {
throw new Error(`Failed to load native binding`)
}
module.exports.DocStorage = nativeBinding.DocStorage
module.exports.DocStoragePool = nativeBinding.DocStoragePool
module.exports.SqliteConnection = nativeBinding.SqliteConnection
module.exports.mintChallengeResponse = nativeBinding.mintChallengeResponse

View File

@@ -9,6 +9,7 @@ use chrono::NaiveDateTime;
use napi::bindgen_prelude::*;
use napi_derive::napi;
use pool::SqliteDocStoragePool;
use storage::SqliteDocStorage;
#[cfg(feature = "use-as-lib")]
type Result<T> = anyhow::Result<T>;
@@ -99,12 +100,6 @@ impl DocStoragePool {
Ok(())
}
#[napi]
pub async fn disconnect(&self, universal_id: String) -> Result<()> {
self.pool.disconnect(universal_id).await?;
Ok(())
}
#[napi]
pub async fn set_space_id(&self, universal_id: String, space_id: String) -> Result<()> {
self
@@ -115,6 +110,18 @@ impl DocStoragePool {
Ok(())
}
#[napi]
pub async fn disconnect(&self, universal_id: String) -> Result<()> {
self.pool.disconnect(universal_id).await?;
Ok(())
}
#[napi]
pub async fn checkpoint(&self, universal_id: String) -> Result<()> {
self.pool.ensure_storage(universal_id)?.checkpoint().await?;
Ok(())
}
#[napi]
pub async fn push_update(
&self,
@@ -430,3 +437,32 @@ impl DocStoragePool {
Ok(())
}
}
#[napi]
pub struct DocStorage {
storage: SqliteDocStorage,
}
#[napi]
impl DocStorage {
#[napi(constructor, async_runtime)]
pub fn new(path: String) -> Self {
Self {
storage: SqliteDocStorage::new(path),
}
}
#[napi]
pub async fn validate(&self) -> Result<bool> {
Ok(self.storage.validate().await?)
}
#[napi]
pub async fn set_space_id(&self, space_id: String) -> Result<()> {
self.storage.connect().await?;
self.storage.set_space_id(space_id).await?;
println!("clocks {:?}", self.storage.get_doc_clocks(None).await?);
self.storage.close().await;
Ok(())
}
}

View File

@@ -46,9 +46,13 @@ impl SqliteDocStoragePool {
}
pub async fn disconnect(&self, universal_id: String) -> Result<()> {
let storage = self.ensure_storage(universal_id.to_owned())?;
storage.close().await;
self.inner.remove(&universal_id);
let entry = self.inner.entry(universal_id);
if let Entry::Occupied(entry) = entry {
let storage = entry.remove();
storage.close().await;
}
Ok(())
}
}

View File

@@ -25,6 +25,12 @@ pub struct UpdateRow {
pub doc_id: Option<String>,
}
#[napi(object)]
pub struct DocTimestampRow {
pub doc_id: Option<String>,
pub timestamp: NaiveDateTime,
}
#[napi(object)]
pub struct InsertRow {
pub doc_id: Option<String>,
@@ -146,6 +152,20 @@ impl SqliteConnection {
Ok(updates)
}
#[napi]
pub async fn get_doc_timestamps(&self) -> napi::Result<Vec<DocTimestampRow>> {
// get the greatest timestamp of each doc_id
let updates = sqlx::query_as!(
DocTimestampRow,
"SELECT doc_id, MAX(timestamp) as timestamp FROM updates GROUP BY doc_id"
)
.fetch_all(&self.pool)
.await
.map_err(anyhow::Error::from)?;
Ok(updates)
}
#[napi]
pub async fn delete_updates(&self, doc_id: Option<String>) -> napi::Result<()> {
match doc_id {

View File

@@ -21,6 +21,7 @@ test('check workspace has a DB file', async ({ appInfo, workspace }) => {
const dbPath = path.join(
appInfo.sessionData,
'workspaces',
'local',
w.meta.id,
'storage.db'
);
@@ -28,8 +29,7 @@ test('check workspace has a DB file', async ({ appInfo, workspace }) => {
expect(await fs.exists(dbPath)).toBe(true);
});
// TODO(@eyhn): fix this
test.skip('export then add', async ({ page, appInfo, workspace }) => {
test('export then add', async ({ page, appInfo, workspace }) => {
await clickNewPageButton(page);
const w = await workspace.current();