fix(electron): remove all migration code in electron (#6969)

The migration code import blocksuite in the dependency tree and prevent affine from running because electron's helper process tries to run browser only code that is part of the side effect of `@blocksuite/blocks`.

![image.png](https://graphite-user-uploaded-assets-prod.s3.amazonaws.com/T2klNLEk0wxLh4NRDzhk/53da3972-7433-4631-b8c2-d3e322066c7d.png)

[The side effect free trick in esbuild config](https://github.com/toeverything/AFFiNE/pull/6415)  does not clean up these - not sure why.

It has been already 6 month since we introduced the migration code in DB.
Instead of finding out the real root cause, I think may be better to remove the migration code completely so that no blocksuite code will be in the import paths in helper.js.

![image.png](https://graphite-user-uploaded-assets-prod.s3.amazonaws.com/T2klNLEk0wxLh4NRDzhk/3b03522a-2a25-4bdb-8287-86a6b94623e1.png)
This commit is contained in:
pengx17
2024-05-17 03:01:14 +00:00
parent 10015c59b7
commit bd5023d4ab
7 changed files with 5 additions and 294 deletions

View File

@@ -1,10 +1,8 @@
import type { InsertRow } from '@affine/native';
import { SqliteConnection, ValidationResult } from '@affine/native';
import { WorkspaceVersion } from '@toeverything/infra/blocksuite';
import { SqliteConnection } from '@affine/native';
import type { ByteKVBehavior } from '@toeverything/infra/storage';
import { logger } from '../logger';
import { applyGuidCompatibilityFix, migrateToLatest } from './migration';
/**
* A base class for SQLite DB adapter that provides basic methods around updates & blobs
@@ -15,17 +13,8 @@ export class SQLiteAdapter {
async connectIfNeeded() {
if (!this.db) {
const validation = await SqliteConnection.validate(this.path);
if (validation === ValidationResult.MissingVersionColumn) {
await migrateToLatest(this.path, WorkspaceVersion.SubDoc);
}
this.db = new SqliteConnection(this.path);
await this.db.connect();
const maxVersion = await this.db.getMaxVersion();
if (maxVersion !== WorkspaceVersion.Surface) {
await migrateToLatest(this.path, WorkspaceVersion.Surface);
}
await applyGuidCompatibilityFix(this.db);
logger.info(`[SQLiteAdapter]`, 'connected:', this.path);
}
return this.db;

View File

@@ -1,140 +0,0 @@
import { resolve } from 'node:path';
import { SqliteConnection } from '@affine/native';
import { AffineSchemas } from '@blocksuite/blocks/schemas';
import { Schema } from '@blocksuite/store';
import {
forceUpgradePages,
migrateGuidCompatibility,
migrateToSubdoc,
WorkspaceVersion,
} from '@toeverything/infra/blocksuite';
import fs from 'fs-extra';
import { nanoid } from 'nanoid';
import { applyUpdate, Doc as YDoc, encodeStateAsUpdate } from 'yjs';
import { mainRPC } from '../main-rpc';
export const migrateToSubdocAndReplaceDatabase = async (path: string) => {
const db = new SqliteConnection(path);
await db.connect();
const rows = await db.getAllUpdates();
const originalDoc = new YDoc();
// 1. apply all updates to the root doc
rows.forEach(row => {
applyUpdate(originalDoc, row.data);
});
// 2. migrate using migrateToSubdoc
const migratedDoc = migrateToSubdoc(originalDoc);
// 3. replace db rows with the migrated doc
await replaceRows(db, migratedDoc, true);
// 4. close db
await db.close();
};
// v1 v2 -> v3
// v3 -> v4
export const migrateToLatest = async (
path: string,
version: WorkspaceVersion
) => {
const connection = new SqliteConnection(path);
await connection.connect();
if (version === WorkspaceVersion.SubDoc) {
await connection.initVersion();
} else {
await connection.setVersion(version);
}
const schema = new Schema();
schema.register(AffineSchemas);
const rootDoc = new YDoc();
const downloadBinary = async (doc: YDoc, isRoot: boolean): Promise<void> => {
const update = (
await connection.getUpdates(isRoot ? undefined : doc.guid)
).map(update => update.data);
// Buffer[] -> Uint8Array[]
const data = update.map(update => new Uint8Array(update));
data.forEach(data => {
applyUpdate(doc, data);
});
// trigger data manually
if (isRoot) {
doc.getMap('meta');
doc.getMap('spaces');
} else {
doc.getMap('blocks');
}
await Promise.all(
[...doc.subdocs].map(subdoc => {
return downloadBinary(subdoc, false);
})
);
};
await downloadBinary(rootDoc, true);
const result = await forceUpgradePages(rootDoc, schema);
if (result) {
const uploadBinary = async (doc: YDoc, isRoot: boolean) => {
await connection.replaceUpdates(doc.guid, [
{
docId: isRoot ? undefined : doc.guid,
data: encodeStateAsUpdate(doc),
},
]);
// connection..applyUpdate(encodeStateAsUpdate(doc), 'self', doc.guid)
await Promise.all(
[...doc.subdocs].map(subdoc => {
return uploadBinary(subdoc, false);
})
);
};
await uploadBinary(rootDoc, true);
}
await connection.close();
};
export const copyToTemp = async (path: string) => {
const tmpDirPath = resolve(await mainRPC.getPath('sessionData'), 'tmp');
const tmpFilePath = resolve(tmpDirPath, nanoid());
await fs.ensureDir(tmpDirPath);
await fs.copyFile(path, tmpFilePath);
return tmpFilePath;
};
async function replaceRows(
db: SqliteConnection,
doc: YDoc,
isRoot: boolean
): Promise<void> {
const migratedUpdates = encodeStateAsUpdate(doc);
const docId = isRoot ? undefined : doc.guid;
const rows = [{ data: migratedUpdates, docId: docId }];
await db.replaceUpdates(docId, rows);
await Promise.all(
[...doc.subdocs].map(async subdoc => {
await replaceRows(db, subdoc, false);
})
);
}
export const applyGuidCompatibilityFix = async (db: SqliteConnection) => {
const oldRows = await db.getUpdates(undefined);
const rootDoc = new YDoc();
oldRows.forEach(row => applyUpdate(rootDoc, row.data));
// see comments of migrateGuidCompatibility
migrateGuidCompatibility(rootDoc);
// todo: backup?
await db.replaceUpdates(undefined, [
{
docId: undefined,
data: encodeStateAsUpdate(rootDoc),
},
]);
};

View File

@@ -1,4 +1,4 @@
import { AsyncLock } from '@toeverything/infra';
import { AsyncLock } from '@toeverything/infra/utils';
import { Subject } from 'rxjs';
import { applyUpdate, Doc as YDoc } from 'yjs';

View File

@@ -1,14 +1,8 @@
import { ValidationResult } from '@affine/native';
import { WorkspaceVersion } from '@toeverything/infra/blocksuite';
import fs from 'fs-extra';
import { nanoid } from 'nanoid';
import { ensureSQLiteDB } from '../db/ensure-db';
import {
copyToTemp,
migrateToLatest,
migrateToSubdocAndReplaceDatabase,
} from '../db/migration';
import { logger } from '../logger';
import { mainRPC } from '../main-rpc';
import { storeWorkspaceMeta } from '../workspace';
@@ -195,7 +189,7 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
],
message: 'Load Workspace from a AFFiNE file',
}));
let originalPath = ret.filePaths?.[0];
const originalPath = ret.filePaths?.[0];
if (ret.canceled || !originalPath) {
logger.info('loadDBFile canceled');
return { canceled: true };
@@ -211,57 +205,10 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
const validationResult = await SqliteConnection.validate(originalPath);
if (validationResult === ValidationResult.MissingDocIdColumn) {
try {
const tmpDBPath = await copyToTemp(originalPath);
await migrateToSubdocAndReplaceDatabase(tmpDBPath);
originalPath = tmpDBPath;
} catch (error) {
logger.warn(`loadDBFile, migration failed: ${originalPath}`, error);
return { error: 'DB_FILE_MIGRATION_FAILED' };
}
}
if (validationResult === ValidationResult.MissingVersionColumn) {
try {
const tmpDBPath = await copyToTemp(originalPath);
await migrateToLatest(tmpDBPath, WorkspaceVersion.SubDoc);
originalPath = tmpDBPath;
} catch (error) {
logger.warn(
`loadDBFile, migration version column failed: ${originalPath}`,
error
);
return { error: 'DB_FILE_MIGRATION_FAILED' };
}
}
if (
validationResult !== ValidationResult.MissingVersionColumn &&
validationResult !== ValidationResult.MissingDocIdColumn &&
validationResult !== ValidationResult.Valid
) {
if (validationResult !== ValidationResult.Valid) {
return { error: 'DB_FILE_INVALID' }; // invalid db file
}
const db = new SqliteConnection(originalPath);
try {
await db.connect();
if ((await db.getMaxVersion()) === WorkspaceVersion.DatabaseV3) {
const tmpDBPath = await copyToTemp(originalPath);
await migrateToLatest(tmpDBPath, WorkspaceVersion.SubDoc);
originalPath = tmpDBPath;
}
} catch (error) {
logger.warn(
`loadDBFile, migration version column failed: ${originalPath}`,
error
);
return { error: 'DB_FILE_MIGRATION_FAILED' };
} finally {
await db.close();
}
// copy the db file to a new workspace id
const workspaceId = nanoid(10);
const internalFilePath = await getWorkspaceDBPath(workspaceId);

View File

@@ -1,86 +0,0 @@
import path from 'node:path';
import { SqliteConnection } from '@affine/native';
import { removeWithRetry } from '@affine-test/kit/utils/utils';
import {
afterAll,
afterEach,
beforeAll,
describe,
expect,
it,
vi,
} from 'vitest';
import { applyUpdate, Doc as YDoc } from 'yjs';
const tmpDir = path.join(__dirname, 'tmp');
const testDBFilePath = path.resolve(__dirname, 'old-db.affine');
const appDataPath = path.join(tmpDir, 'app-data');
beforeAll(() => {
vi.doMock('@affine/electron/helper/main-rpc', () => ({
mainRPC: {
getPath: async () => appDataPath,
channel: {
on: () => {},
send: () => {},
},
},
}));
});
afterEach(async () => {
await removeWithRetry(tmpDir);
});
afterAll(() => {
vi.doUnmock('@affine/electron/helper/main-rpc');
});
describe('migrateToSubdocAndReplaceDatabase', () => {
it('should migrate and replace the database', async () => {
const { copyToTemp, migrateToSubdocAndReplaceDatabase } = await import(
'@affine/electron/helper/db/migration'
);
const copiedDbFilePath = await copyToTemp(testDBFilePath);
await migrateToSubdocAndReplaceDatabase(copiedDbFilePath);
const db = new SqliteConnection(copiedDbFilePath);
await db.connect();
// check if db has two rows, one for root doc and one for subdoc
const rows = await db.getAllUpdates();
expect(rows.length).toBe(2);
const rootUpdate = rows.find(row => row.docId === undefined)!.data;
const subdocUpdate = rows.find(row => row.docId !== undefined)!.data;
expect(rootUpdate).toBeDefined();
expect(subdocUpdate).toBeDefined();
// apply updates
const rootDoc = new YDoc();
applyUpdate(rootDoc, rootUpdate);
// check if root doc has one subdoc
expect(rootDoc.subdocs.size).toBe(1);
// populates subdoc
applyUpdate(rootDoc.subdocs.values().next().value, subdocUpdate);
// check if root doc's meta is correct
const meta = rootDoc.getMap('meta').toJSON();
expect(meta.workspaceVersion).toBe(1);
expect(meta.name).toBe('hiw');
expect(meta.pages.length).toBe(1);
const pageMeta = meta.pages[0];
expect(pageMeta.title).toBe('Welcome to AFFiNEd');
// get the subdoc through id
const subDoc = rootDoc.getMap('spaces').get(pageMeta.id) as YDoc;
expect(subDoc).toEqual(rootDoc.subdocs.values().next().value);
await db.close();
});
});