mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-13 21:05:19 +00:00
feat(electron): backup panel (#9738)
fix PD-2071, PD-2059, PD-2069, PD-2068
This commit is contained in:
@@ -176,10 +176,21 @@ export async function selectDBFileLocation(): Promise<SelectDBFileLocationResult
|
||||
* update the local workspace id list and then connect to it.
|
||||
*
|
||||
*/
|
||||
export async function loadDBFile(): Promise<LoadDBFileResult> {
|
||||
export async function loadDBFile(
|
||||
dbFilePath?: string
|
||||
): Promise<LoadDBFileResult> {
|
||||
try {
|
||||
const ret =
|
||||
const provided =
|
||||
getFakedResult() ??
|
||||
(dbFilePath
|
||||
? {
|
||||
filePath: dbFilePath,
|
||||
filePaths: [dbFilePath],
|
||||
canceled: false,
|
||||
}
|
||||
: undefined);
|
||||
const ret =
|
||||
provided ??
|
||||
(await mainRPC.showOpenDialog({
|
||||
properties: ['openFile'],
|
||||
title: 'Load Workspace',
|
||||
@@ -249,6 +260,12 @@ async function cpV1DBFile(
|
||||
return { error: 'DB_FILE_INVALID' }; // invalid db file
|
||||
}
|
||||
|
||||
// checkout to make sure wal is flushed
|
||||
const connection = new SqliteConnection(originalPath);
|
||||
await connection.connect();
|
||||
await connection.checkpoint();
|
||||
await connection.close();
|
||||
|
||||
const internalFilePath = await getWorkspaceDBPath('workspace', workspaceId);
|
||||
|
||||
await fs.ensureDir(await getWorkspacesBasePath());
|
||||
|
||||
@@ -6,8 +6,8 @@ import {
|
||||
} from './dialog';
|
||||
|
||||
export const dialogHandlers = {
|
||||
loadDBFile: async () => {
|
||||
return loadDBFile();
|
||||
loadDBFile: async (dbFilePath?: string) => {
|
||||
return loadDBFile(dbFilePath);
|
||||
},
|
||||
saveDBFileAs: async (universalId: string, name: string) => {
|
||||
return saveDBFileAs(universalId, name);
|
||||
|
||||
@@ -64,6 +64,7 @@ export async function ensureSQLiteDisconnected(
|
||||
const db = await ensureSQLiteDB(spaceType, id);
|
||||
|
||||
if (db) {
|
||||
await db.checkpoint();
|
||||
await db.destroy();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ import { mergeUpdate } from './merge-update';
|
||||
|
||||
const TRIM_SIZE = 1;
|
||||
|
||||
export class WorkspaceSQLiteDB {
|
||||
export class WorkspaceSQLiteDB implements AsyncDisposable {
|
||||
lock = new AsyncLock();
|
||||
update$ = new Subject<void>();
|
||||
adapter = new SQLiteAdapter(this.path);
|
||||
@@ -32,17 +32,32 @@ export class WorkspaceSQLiteDB {
|
||||
this.update$.complete();
|
||||
}
|
||||
|
||||
[Symbol.asyncDispose] = async () => {
|
||||
await this.destroy();
|
||||
};
|
||||
|
||||
private readonly toDBDocId = (docId: string) => {
|
||||
return this.workspaceId === docId ? undefined : docId;
|
||||
};
|
||||
|
||||
getWorkspaceName = async () => {
|
||||
getWorkspaceMeta = async () => {
|
||||
const ydoc = new YDoc();
|
||||
const updates = await this.adapter.getUpdates();
|
||||
updates.forEach(update => {
|
||||
applyUpdate(ydoc, update.data);
|
||||
});
|
||||
return ydoc.getMap('meta').get('name') as string;
|
||||
logger.log(
|
||||
`ydoc.getMap('meta').get('name')`,
|
||||
ydoc.getMap('meta').get('name'),
|
||||
this.path,
|
||||
updates.length
|
||||
);
|
||||
return ydoc.getMap('meta').toJSON();
|
||||
};
|
||||
|
||||
getWorkspaceName = async () => {
|
||||
const meta = await this.getWorkspaceMeta();
|
||||
return meta.name;
|
||||
};
|
||||
|
||||
async init() {
|
||||
|
||||
@@ -1,11 +1,18 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import { parseUniversalId } from '@affine/nbstore';
|
||||
import { DocStorage } from '@affine/native';
|
||||
import {
|
||||
parseUniversalId,
|
||||
universalId as generateUniversalId,
|
||||
} from '@affine/nbstore';
|
||||
import fs from 'fs-extra';
|
||||
import { applyUpdate, Doc as YDoc } from 'yjs';
|
||||
|
||||
import { isWindows } from '../../shared/utils';
|
||||
import { logger } from '../logger';
|
||||
import { getDocStoragePool } from '../nbstore';
|
||||
import { ensureSQLiteDisconnected } from '../nbstore/v1/ensure-db';
|
||||
import { WorkspaceSQLiteDB } from '../nbstore/v1/workspace-db-adapter';
|
||||
import type { WorkspaceMeta } from '../type';
|
||||
import {
|
||||
getDeletedWorkspacesBasePath,
|
||||
@@ -50,12 +57,28 @@ export async function trashWorkspace(universalId: string) {
|
||||
await deleteWorkspaceV1(id);
|
||||
|
||||
const dbPath = await getSpaceDBPath(peer, type, id);
|
||||
const movedPath = path.join(await getDeletedWorkspacesBasePath(), `${id}`);
|
||||
const basePath = await getDeletedWorkspacesBasePath();
|
||||
const movedPath = path.join(basePath, `${id}`);
|
||||
try {
|
||||
await getDocStoragePool().disconnect(universalId);
|
||||
return await fs.move(path.dirname(dbPath), movedPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
const storage = new DocStorage(dbPath);
|
||||
if (await storage.validate()) {
|
||||
const pool = getDocStoragePool();
|
||||
await pool.checkpoint(universalId);
|
||||
await pool.disconnect(universalId);
|
||||
}
|
||||
await fs.ensureDir(movedPath);
|
||||
// todo(@pengx17): it seems the db file is still being used at the point
|
||||
// on windows so that it cannot be moved. we will fallback to copy the dir instead.
|
||||
if (isWindows()) {
|
||||
await fs.copy(path.dirname(dbPath), movedPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
await fs.rmdir(path.dirname(dbPath), { recursive: true });
|
||||
} else {
|
||||
return await fs.move(path.dirname(dbPath), movedPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('trashWorkspace', error);
|
||||
}
|
||||
@@ -79,3 +102,139 @@ export async function storeWorkspaceMeta(
|
||||
logger.error('storeWorkspaceMeta failed', err);
|
||||
}
|
||||
}
|
||||
|
||||
type WorkspaceDocMeta = {
|
||||
id: string;
|
||||
name: string;
|
||||
avatar: Uint8Array | null;
|
||||
fileSize: number;
|
||||
updatedAt: Date;
|
||||
createdAt: Date;
|
||||
docCount: number;
|
||||
dbPath: string;
|
||||
};
|
||||
|
||||
async function getWorkspaceDocMetaV1(
|
||||
workspaceId: string,
|
||||
dbPath: string
|
||||
): Promise<WorkspaceDocMeta | null> {
|
||||
try {
|
||||
await using db = new WorkspaceSQLiteDB(dbPath, workspaceId);
|
||||
await db.init();
|
||||
await db.checkpoint();
|
||||
const meta = await db.getWorkspaceMeta();
|
||||
const dbFileSize = await fs.stat(dbPath);
|
||||
return {
|
||||
id: workspaceId,
|
||||
name: meta.name,
|
||||
avatar: await db.getBlob(meta.avatar),
|
||||
fileSize: dbFileSize.size,
|
||||
updatedAt: dbFileSize.mtime,
|
||||
createdAt: dbFileSize.birthtime,
|
||||
docCount: meta.pages.length,
|
||||
dbPath,
|
||||
};
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
async function getWorkspaceDocMeta(
|
||||
workspaceId: string,
|
||||
dbPath: string
|
||||
): Promise<WorkspaceDocMeta | null> {
|
||||
const pool = getDocStoragePool();
|
||||
const universalId = generateUniversalId({
|
||||
peer: 'deleted-local',
|
||||
type: 'workspace',
|
||||
id: workspaceId,
|
||||
});
|
||||
try {
|
||||
await pool.connect(universalId, dbPath);
|
||||
await pool.checkpoint(universalId);
|
||||
const snapshot = await pool.getDocSnapshot(universalId, workspaceId);
|
||||
const pendingUpdates = await pool.getDocUpdates(universalId, workspaceId);
|
||||
if (snapshot) {
|
||||
const updates = snapshot.bin;
|
||||
const ydoc = new YDoc();
|
||||
applyUpdate(ydoc, updates);
|
||||
pendingUpdates.forEach(update => {
|
||||
applyUpdate(ydoc, update.bin);
|
||||
});
|
||||
const meta = ydoc.getMap('meta').toJSON();
|
||||
const dbFileStat = await fs.stat(dbPath);
|
||||
const blob = meta.avatar
|
||||
? await pool.getBlob(universalId, meta.avatar)
|
||||
: null;
|
||||
return {
|
||||
id: workspaceId,
|
||||
name: meta.name,
|
||||
avatar: blob ? blob.data : null,
|
||||
fileSize: dbFileStat.size,
|
||||
updatedAt: dbFileStat.mtime,
|
||||
createdAt: dbFileStat.birthtime,
|
||||
docCount: meta.pages.length,
|
||||
dbPath,
|
||||
};
|
||||
}
|
||||
} catch {
|
||||
// try using v1
|
||||
return await getWorkspaceDocMetaV1(workspaceId, dbPath);
|
||||
} finally {
|
||||
await pool.disconnect(universalId);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export async function getDeletedWorkspaces() {
|
||||
const basePath = await getDeletedWorkspacesBasePath();
|
||||
const directories = await fs.readdir(basePath);
|
||||
const workspaceEntries = await Promise.all(
|
||||
directories.map(async dir => {
|
||||
const stats = await fs.stat(path.join(basePath, dir));
|
||||
if (!stats.isDirectory()) {
|
||||
return null;
|
||||
}
|
||||
const dbfileStats = await fs.stat(path.join(basePath, dir, 'storage.db'));
|
||||
return {
|
||||
id: dir,
|
||||
mtime: new Date(dbfileStats.mtime),
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
const workspaceIds = workspaceEntries
|
||||
.filter(v => v !== null)
|
||||
.sort((a, b) => b.mtime.getTime() - a.mtime.getTime())
|
||||
.map(entry => entry.id);
|
||||
|
||||
const items: WorkspaceDocMeta[] = [];
|
||||
|
||||
// todo(@pengx17): add cursor based pagination
|
||||
for (const id of workspaceIds) {
|
||||
const meta = await getWorkspaceDocMeta(
|
||||
id,
|
||||
path.join(basePath, id, 'storage.db')
|
||||
);
|
||||
if (meta) {
|
||||
items.push(meta);
|
||||
} else {
|
||||
logger.warn('getDeletedWorkspaces', `No meta found for ${id}`);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
items: items,
|
||||
};
|
||||
}
|
||||
|
||||
export async function deleteBackupWorkspace(id: string) {
|
||||
const basePath = await getDeletedWorkspacesBasePath();
|
||||
const workspacePath = path.join(basePath, id);
|
||||
await fs.rmdir(workspacePath, { recursive: true });
|
||||
logger.info(
|
||||
'deleteBackupWorkspace',
|
||||
`Deleted backup workspace: ${workspacePath}`
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
import type { MainEventRegister } from '../type';
|
||||
import { deleteWorkspace, trashWorkspace } from './handlers';
|
||||
import {
|
||||
deleteBackupWorkspace,
|
||||
deleteWorkspace,
|
||||
getDeletedWorkspaces,
|
||||
trashWorkspace,
|
||||
} from './handlers';
|
||||
|
||||
export * from './handlers';
|
||||
export * from './subjects';
|
||||
@@ -9,4 +14,8 @@ export const workspaceEvents = {} as Record<string, MainEventRegister>;
|
||||
export const workspaceHandlers = {
|
||||
delete: deleteWorkspace,
|
||||
moveToTrash: trashWorkspace,
|
||||
getBackupWorkspaces: async () => {
|
||||
return getDeletedWorkspaces();
|
||||
},
|
||||
deleteBackupWorkspace: async (id: string) => deleteBackupWorkspace(id),
|
||||
};
|
||||
|
||||
@@ -51,6 +51,7 @@ class HelperProcessManager {
|
||||
const helperProcess = utilityProcess.fork(HELPER_PROCESS_PATH, [], {
|
||||
// todo: port number should not being used
|
||||
execArgv: isDev ? ['--inspect=40894'] : [],
|
||||
serviceName: 'affine-helper',
|
||||
});
|
||||
this.#process = helperProcess;
|
||||
this.ready = new Promise((resolve, reject) => {
|
||||
|
||||
Reference in New Issue
Block a user