mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-04 08:38:34 +00:00
feat(electron): use affine native (#2329)
This commit is contained in:
6
.github/actions/build-rust/action.yml
vendored
6
.github/actions/build-rust/action.yml
vendored
@@ -29,13 +29,15 @@ runs:
|
||||
if: ${{ inputs.target != 'x86_64-unknown-linux-gnu' && inputs.target != 'aarch64-unknown-linux-gnu' }}
|
||||
shell: bash
|
||||
run: yarn workspace @affine/native build --target ${{ inputs.target }}
|
||||
env:
|
||||
CARGO_BUILD_INCREMENTAL: 'false'
|
||||
|
||||
- name: Build
|
||||
if: ${{ inputs.target == 'x86_64-unknown-linux-gnu' }}
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian
|
||||
options: --user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
|
||||
options: --user 0:0 -e CARGO_BUILD_INCREMENTAL=false -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
|
||||
run: yarn workspace @affine/native build --target ${{ inputs.target }}
|
||||
|
||||
- name: Build
|
||||
@@ -43,5 +45,5 @@ runs:
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian-aarch64
|
||||
options: --user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
|
||||
options: --user 0:0 -e CARGO_BUILD_INCREMENTAL=false -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
|
||||
run: yarn workspace @affine/native build --target ${{ inputs.target }}
|
||||
|
||||
45
.github/workflows/build.yml
vendored
45
.github/workflows/build.yml
vendored
@@ -21,7 +21,6 @@ on:
|
||||
- '!.github/workflows/build.yml'
|
||||
|
||||
env:
|
||||
CARGO_BUILD_INCREMENTAL: 'false'
|
||||
DEBUG: napi:*
|
||||
APP_NAME: affine
|
||||
MACOSX_DEPLOYMENT_TARGET: '10.13'
|
||||
@@ -55,23 +54,6 @@ jobs:
|
||||
path: ./packages/component/storybook-static
|
||||
if-no-files-found: error
|
||||
|
||||
build-electron:
|
||||
name: Build @affine/electron
|
||||
runs-on: ubuntu-latest
|
||||
environment: development
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- name: Build Electron
|
||||
working-directory: apps/electron
|
||||
run: yarn build-layers
|
||||
- name: Upload Ubuntu desktop artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: affine-ubuntu
|
||||
path: ./apps/electron/dist
|
||||
|
||||
build:
|
||||
name: Build @affine/web
|
||||
runs-on: ubuntu-latest
|
||||
@@ -322,7 +304,7 @@ jobs:
|
||||
target: x86_64-pc-windows-msvc,
|
||||
test: true,
|
||||
}
|
||||
needs: [build, build-electron]
|
||||
needs: [build]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
@@ -333,11 +315,17 @@ jobs:
|
||||
uses: ./.github/actions/build-rust
|
||||
with:
|
||||
target: ${{ matrix.spec.target }}
|
||||
- name: Download Ubuntu desktop artifact
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: affine-ubuntu
|
||||
path: ./apps/electron/dist
|
||||
- name: Run unit tests
|
||||
if: ${{ matrix.spec.test }}
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf apps/electron/node_modules/better-sqlite3/build
|
||||
yarn --cwd apps/electron/node_modules/better-sqlite3 run install
|
||||
yarn test:unit
|
||||
env:
|
||||
NATIVE_TEST: 'true'
|
||||
- name: Build layers
|
||||
run: yarn workspace @affine/electron build-layers
|
||||
|
||||
- name: Download static resource artifact
|
||||
uses: actions/download-artifact@v3
|
||||
@@ -346,8 +334,10 @@ jobs:
|
||||
path: ./apps/electron/resources/web-static
|
||||
|
||||
- name: Rebuild Electron dependences
|
||||
run: yarn rebuild:for-electron
|
||||
working-directory: apps/electron
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf apps/electron/node_modules/better-sqlite3/build
|
||||
yarn workspace @affine/electron rebuild:for-electron
|
||||
|
||||
- name: Run desktop tests
|
||||
if: ${{ matrix.spec.test && matrix.spec.os == 'ubuntu-latest' }}
|
||||
@@ -358,8 +348,7 @@ jobs:
|
||||
|
||||
- name: Run desktop tests
|
||||
if: ${{ matrix.spec.test && matrix.spec.os != 'ubuntu-latest' }}
|
||||
run: yarn test
|
||||
working-directory: apps/electron
|
||||
run: yarn workspace @affine/electron test
|
||||
env:
|
||||
COVERAGE: true
|
||||
|
||||
|
||||
1
.github/workflows/release-desktop-app.yml
vendored
1
.github/workflows/release-desktop-app.yml
vendored
@@ -36,7 +36,6 @@ concurrency:
|
||||
|
||||
env:
|
||||
BUILD_TYPE: ${{ github.event.inputs.build-type }}
|
||||
CARGO_BUILD_INCREMENTAL: 'false'
|
||||
DEBUG: napi:*
|
||||
APP_NAME: affine
|
||||
MACOSX_DEPLOYMENT_TARGET: '10.13'
|
||||
|
||||
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@@ -37,5 +37,6 @@
|
||||
"apps/electron/layers/**/*.spec.ts",
|
||||
"tests/unit/**/*.spec.ts",
|
||||
"tests/unit/**/*.spec.tsx"
|
||||
]
|
||||
],
|
||||
"deepscan.enable": true
|
||||
}
|
||||
|
||||
44
Cargo.lock
generated
44
Cargo.lock
generated
@@ -11,6 +11,7 @@ dependencies = [
|
||||
"napi-build",
|
||||
"napi-derive",
|
||||
"notify",
|
||||
"once_cell",
|
||||
"parking_lot",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -51,6 +52,12 @@ version = "2.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "24a6904aef64d73cf10ab17ebace7befb918b82164785cb89907993be7f83813"
|
||||
|
||||
[[package]]
|
||||
name = "bytes"
|
||||
version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be"
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
@@ -498,12 +505,31 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "signal-hook-registry"
|
||||
version = "1.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
|
||||
|
||||
[[package]]
|
||||
name = "socket2"
|
||||
version = "0.4.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.109"
|
||||
@@ -533,11 +559,29 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c3c786bf8134e5a3a166db9b29ab8f48134739014a3eca7bc6bfa95d673b136f"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"bytes",
|
||||
"libc",
|
||||
"mio",
|
||||
"num_cpus",
|
||||
"parking_lot",
|
||||
"pin-project-lite",
|
||||
"signal-hook-registry",
|
||||
"socket2",
|
||||
"tokio-macros",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-macros"
|
||||
version = "2.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.15",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.8"
|
||||
|
||||
@@ -2,6 +2,8 @@ import assert from 'node:assert';
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import type { Subscription } from 'rxjs';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
@@ -99,6 +101,11 @@ const electronModule = {
|
||||
handlers.push(callback);
|
||||
registeredHandlers.set(name, handlers);
|
||||
},
|
||||
addEventListener: (...args: any[]) => {
|
||||
// @ts-ignore
|
||||
electronModule.app.on(...args);
|
||||
},
|
||||
removeEventListener: () => {},
|
||||
},
|
||||
BrowserWindow: {
|
||||
getAllWindows: () => {
|
||||
@@ -116,6 +123,8 @@ vi.doMock('electron', () => {
|
||||
return electronModule;
|
||||
});
|
||||
|
||||
let connectableSubscription: Subscription;
|
||||
|
||||
beforeEach(async () => {
|
||||
const { registerHandlers } = await import('../register');
|
||||
registerHandlers();
|
||||
@@ -123,20 +132,24 @@ beforeEach(async () => {
|
||||
// should also register events
|
||||
const { registerEvents } = await import('../../events');
|
||||
registerEvents();
|
||||
await fs.mkdirp(SESSION_DATA_PATH);
|
||||
const { database$ } = await import('../db/ensure-db');
|
||||
|
||||
connectableSubscription = database$.connect();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
const { cleanupSQLiteDBs } = await import('../db/ensure-db');
|
||||
await cleanupSQLiteDBs();
|
||||
await fs.remove(SESSION_DATA_PATH);
|
||||
|
||||
// reset registered handlers
|
||||
registeredHandlers.get('before-quit')?.forEach(fn => fn());
|
||||
|
||||
connectableSubscription.unsubscribe();
|
||||
|
||||
await fs.remove(SESSION_DATA_PATH);
|
||||
});
|
||||
|
||||
describe('ensureSQLiteDB', () => {
|
||||
test('should create db file on connection if it does not exist', async () => {
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const workspaceDB = await ensureSQLiteDB(id);
|
||||
const file = workspaceDB.path;
|
||||
@@ -146,70 +159,76 @@ describe('ensureSQLiteDB', () => {
|
||||
|
||||
test('when db file is removed', async () => {
|
||||
// stub webContents.send
|
||||
const sendStub = vi.fn();
|
||||
browserWindow.webContents.send = sendStub;
|
||||
const id = 'test-workspace-id';
|
||||
const sendSpy = vi.spyOn(browserWindow.webContents, 'send');
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
let workspaceDB = await ensureSQLiteDB(id);
|
||||
const file = workspaceDB.path;
|
||||
const fileExists = await fs.pathExists(file);
|
||||
expect(fileExists).toBe(true);
|
||||
|
||||
// Can't remove file on Windows, because the sqlite is still holding the file handle
|
||||
if (process.platform === 'win32') {
|
||||
return;
|
||||
}
|
||||
|
||||
await fs.remove(file);
|
||||
|
||||
// wait for 1000ms for file watcher to detect file removal
|
||||
// wait for 2000ms for file watcher to detect file removal
|
||||
await delay(2000);
|
||||
|
||||
expect(sendStub).toBeCalledWith('db:onDBFileMissing', id);
|
||||
expect(sendSpy).toBeCalledWith('db:onDBFileMissing', id);
|
||||
|
||||
// ensureSQLiteDB should recreate the db file
|
||||
workspaceDB = await ensureSQLiteDB(id);
|
||||
const fileExists2 = await fs.pathExists(file);
|
||||
expect(fileExists2).toBe(true);
|
||||
sendSpy.mockRestore();
|
||||
});
|
||||
|
||||
test('when db file is updated', async () => {
|
||||
// stub webContents.send
|
||||
const sendStub = vi.fn();
|
||||
browserWindow.webContents.send = sendStub;
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const { dbSubjects } = await import('../../events/db');
|
||||
const workspaceDB = await ensureSQLiteDB(id);
|
||||
const file = workspaceDB.path;
|
||||
const fileExists = await fs.pathExists(file);
|
||||
expect(fileExists).toBe(true);
|
||||
|
||||
// wait to make sure
|
||||
await delay(500);
|
||||
|
||||
const dbUpdateSpy = vi.spyOn(dbSubjects.dbFileUpdate, 'next');
|
||||
await delay(100);
|
||||
// writes some data to the db file
|
||||
await fs.appendFile(file, 'random-data', { encoding: 'binary' });
|
||||
// write again
|
||||
await fs.appendFile(file, 'random-data', { encoding: 'binary' });
|
||||
|
||||
// wait for 200ms for file watcher to detect file change
|
||||
// wait for 2000ms for file watcher to detect file change
|
||||
await delay(2000);
|
||||
|
||||
expect(sendStub).toBeCalledWith('db:onDBFileUpdate', id);
|
||||
expect(dbUpdateSpy).toBeCalledWith(id);
|
||||
dbUpdateSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('workspace handlers', () => {
|
||||
test('list all workspace ids', async () => {
|
||||
const ids = ['test-workspace-id', 'test-workspace-id-2'];
|
||||
const ids = [v4(), v4()];
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await Promise.all(ids.map(id => ensureSQLiteDB(id)));
|
||||
const list = await dispatch('workspace', 'list');
|
||||
expect(list.map(([id]) => id)).toEqual(ids);
|
||||
expect(list.map(([id]) => id).sort()).toEqual(ids.sort());
|
||||
});
|
||||
|
||||
test('delete workspace', async () => {
|
||||
const ids = ['test-workspace-id', 'test-workspace-id-2'];
|
||||
// @TODO dispatch is hanging on Windows
|
||||
if (process.platform === 'win32') {
|
||||
return;
|
||||
}
|
||||
const ids = [v4(), v4()];
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await Promise.all(ids.map(id => ensureSQLiteDB(id)));
|
||||
await dispatch('workspace', 'delete', 'test-workspace-id-2');
|
||||
await dispatch('workspace', 'delete', ids[1]);
|
||||
const list = await dispatch('workspace', 'list');
|
||||
expect(list.map(([id]) => id)).toEqual(['test-workspace-id']);
|
||||
expect(list.map(([id]) => id)).toEqual([ids[0]]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -244,7 +263,7 @@ describe('UI handlers', () => {
|
||||
|
||||
describe('db handlers', () => {
|
||||
test('apply doc and get doc updates', async () => {
|
||||
const workspaceId = 'test-workspace-id';
|
||||
const workspaceId = v4();
|
||||
const bin = await dispatch('db', 'getDocAsUpdates', workspaceId);
|
||||
// ? is this a good test?
|
||||
expect(bin.every((byte: number) => byte === 0)).toBe(true);
|
||||
@@ -264,13 +283,13 @@ describe('db handlers', () => {
|
||||
});
|
||||
|
||||
test('get non existent blob', async () => {
|
||||
const workspaceId = 'test-workspace-id';
|
||||
const workspaceId = v4();
|
||||
const bin = await dispatch('db', 'getBlob', workspaceId, 'non-existent-id');
|
||||
expect(bin).toBeNull();
|
||||
});
|
||||
|
||||
test('list blobs (empty)', async () => {
|
||||
const workspaceId = 'test-workspace-id';
|
||||
const workspaceId = v4();
|
||||
const list = await dispatch('db', 'getPersistedBlobs', workspaceId);
|
||||
expect(list).toEqual([]);
|
||||
});
|
||||
@@ -318,7 +337,7 @@ describe('dialog handlers', () => {
|
||||
const mockShowItemInFolder = vi.fn();
|
||||
electronModule.shell.showItemInFolder = mockShowItemInFolder;
|
||||
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const db = await ensureSQLiteDB(id);
|
||||
|
||||
@@ -334,13 +353,15 @@ describe('dialog handlers', () => {
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
electronModule.shell.showItemInFolder = mockShowItemInFolder;
|
||||
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
|
||||
await dispatch('dialog', 'saveDBFileAs', id);
|
||||
expect(mockShowSaveDialog).toBeCalled();
|
||||
expect(mockShowItemInFolder).not.toBeCalled();
|
||||
electronModule.dialog = {};
|
||||
electronModule.shell = {};
|
||||
});
|
||||
|
||||
test('saveDBFileAs', async () => {
|
||||
@@ -352,7 +373,7 @@ describe('dialog handlers', () => {
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
electronModule.shell.showItemInFolder = mockShowItemInFolder;
|
||||
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
|
||||
@@ -403,11 +424,13 @@ describe('dialog handlers', () => {
|
||||
const res = await dispatch('dialog', 'loadDBFile');
|
||||
expect(mockShowOpenDialog).toBeCalled();
|
||||
expect(res.error).toBe('DB_FILE_INVALID');
|
||||
|
||||
electronModule.dialog = {};
|
||||
});
|
||||
|
||||
test('loadDBFile', async () => {
|
||||
// we use ensureSQLiteDB to create a valid db file
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const db = await ensureSQLiteDB(id);
|
||||
|
||||
@@ -417,6 +440,11 @@ describe('dialog handlers', () => {
|
||||
await fs.ensureDir(basePath);
|
||||
await fs.copyFile(db.path, originDBFilePath);
|
||||
|
||||
// on Windows, we skip this test because we can't delete the db file
|
||||
if (process.platform === 'win32') {
|
||||
return;
|
||||
}
|
||||
|
||||
// remove db
|
||||
await fs.remove(db.path);
|
||||
|
||||
@@ -440,19 +468,19 @@ describe('dialog handlers', () => {
|
||||
});
|
||||
|
||||
test('moveDBFile', async () => {
|
||||
const newPath = path.join(SESSION_DATA_PATH, 'affine-test', 'xxx');
|
||||
const newPath = path.join(SESSION_DATA_PATH, 'xxx');
|
||||
const mockShowSaveDialog = vi.fn(() => {
|
||||
return { filePath: newPath };
|
||||
}) as any;
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
|
||||
const res = await dispatch('dialog', 'moveDBFile', id);
|
||||
expect(mockShowSaveDialog).toBeCalled();
|
||||
expect(res.filePath).toBe(newPath);
|
||||
electronModule.dialog = {};
|
||||
});
|
||||
|
||||
test('moveDBFile (skipped)', async () => {
|
||||
@@ -461,12 +489,13 @@ describe('dialog handlers', () => {
|
||||
}) as any;
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
|
||||
const res = await dispatch('dialog', 'moveDBFile', id);
|
||||
expect(mockShowSaveDialog).toBeCalled();
|
||||
expect(res.filePath).toBe(undefined);
|
||||
electronModule.dialog = {};
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,94 +1,160 @@
|
||||
import { watch } from 'chokidar';
|
||||
import type { NotifyEvent } from '@affine/native/event';
|
||||
import { createFSWatcher } from '@affine/native/fs-watcher';
|
||||
import { app } from 'electron';
|
||||
import {
|
||||
connectable,
|
||||
defer,
|
||||
from,
|
||||
fromEvent,
|
||||
identity,
|
||||
lastValueFrom,
|
||||
Observable,
|
||||
ReplaySubject,
|
||||
Subject,
|
||||
} from 'rxjs';
|
||||
import {
|
||||
debounceTime,
|
||||
exhaustMap,
|
||||
filter,
|
||||
groupBy,
|
||||
ignoreElements,
|
||||
mergeMap,
|
||||
shareReplay,
|
||||
startWith,
|
||||
switchMap,
|
||||
take,
|
||||
takeUntil,
|
||||
tap,
|
||||
} from 'rxjs/operators';
|
||||
|
||||
import { appContext } from '../../context';
|
||||
import { subjects } from '../../events';
|
||||
import { logger } from '../../logger';
|
||||
import { debounce, ts } from '../../utils';
|
||||
import { ts } from '../../utils';
|
||||
import type { WorkspaceSQLiteDB } from './sqlite';
|
||||
import { openWorkspaceDatabase } from './sqlite';
|
||||
|
||||
const dbMapping = new Map<string, Promise<WorkspaceSQLiteDB>>();
|
||||
const dbWatchers = new Map<string, () => void>();
|
||||
const databaseInput$ = new Subject<string>();
|
||||
export const databaseConnector$ = new ReplaySubject<WorkspaceSQLiteDB>();
|
||||
|
||||
const groupedDatabaseInput$ = databaseInput$.pipe(groupBy(identity));
|
||||
|
||||
export const database$ = connectable(
|
||||
groupedDatabaseInput$.pipe(
|
||||
mergeMap(workspaceDatabase$ =>
|
||||
workspaceDatabase$.pipe(
|
||||
// only open the first db with the same workspaceId, and emit it to the downstream
|
||||
exhaustMap(workspaceId => {
|
||||
logger.info('[ensureSQLiteDB] open db connection', workspaceId);
|
||||
return from(openWorkspaceDatabase(appContext, workspaceId)).pipe(
|
||||
switchMap(db => {
|
||||
return startWatchingDBFile(db).pipe(
|
||||
// ignore all events and only emit the db to the downstream
|
||||
ignoreElements(),
|
||||
startWith(db)
|
||||
);
|
||||
})
|
||||
);
|
||||
}),
|
||||
shareReplay(1)
|
||||
)
|
||||
),
|
||||
tap({
|
||||
complete: () => {
|
||||
logger.info('[FSWatcher] close all watchers');
|
||||
createFSWatcher().close();
|
||||
},
|
||||
})
|
||||
),
|
||||
{
|
||||
connector: () => databaseConnector$,
|
||||
resetOnDisconnect: true,
|
||||
}
|
||||
);
|
||||
|
||||
export const databaseConnectableSubscription = database$.connect();
|
||||
|
||||
// 1. File delete
|
||||
// 2. File move
|
||||
// - on Linux, it's `type: { modify: { kind: 'rename', mode: 'from' } }`
|
||||
// - on Windows, it's `type: { remove: { kind: 'any' } }`
|
||||
// - on macOS, it's `type: { modify: { kind: 'rename', mode: 'any' } }`
|
||||
export function isRemoveOrMoveEvent(event: NotifyEvent) {
|
||||
return (
|
||||
typeof event.type === 'object' &&
|
||||
('remove' in event.type ||
|
||||
('modify' in event.type &&
|
||||
event.type.modify.kind === 'rename' &&
|
||||
(event.type.modify.mode === 'from' ||
|
||||
event.type.modify.mode === 'any')))
|
||||
);
|
||||
}
|
||||
|
||||
// if we removed the file, we will stop watching it
|
||||
function startWatchingDBFile(db: WorkspaceSQLiteDB) {
|
||||
if (dbWatchers.has(db.workspaceId)) {
|
||||
return dbWatchers.get(db.workspaceId);
|
||||
}
|
||||
logger.info('watch db file', db.path);
|
||||
const watcher = watch(db.path);
|
||||
|
||||
const debounceOnChange = debounce(() => {
|
||||
logger.info(
|
||||
'db file changed on disk',
|
||||
db.workspaceId,
|
||||
ts() - db.lastUpdateTime,
|
||||
'ms'
|
||||
const FSWatcher = createFSWatcher();
|
||||
return new Observable<NotifyEvent>(subscriber => {
|
||||
logger.info('[FSWatcher] start watching db file', db.workspaceId);
|
||||
const subscription = FSWatcher.watch(db.path, {
|
||||
recursive: false,
|
||||
}).subscribe(
|
||||
event => {
|
||||
logger.info('[FSWatcher]', event);
|
||||
subscriber.next(event);
|
||||
// remove file or move file, complete the observable and close db
|
||||
if (isRemoveOrMoveEvent(event)) {
|
||||
subscriber.complete();
|
||||
}
|
||||
},
|
||||
err => {
|
||||
subscriber.error(err);
|
||||
}
|
||||
);
|
||||
// reconnect db
|
||||
db.reconnectDB();
|
||||
subjects.db.dbFileUpdate.next(db.workspaceId);
|
||||
}, 1000);
|
||||
|
||||
watcher.on('change', () => {
|
||||
const currentTime = ts();
|
||||
if (currentTime - db.lastUpdateTime > 100) {
|
||||
debounceOnChange();
|
||||
}
|
||||
});
|
||||
|
||||
dbWatchers.set(db.workspaceId, () => {
|
||||
watcher.close();
|
||||
});
|
||||
|
||||
// todo: there is still a possibility that the file is deleted
|
||||
// but we didn't get the event soon enough and another event tries to
|
||||
// access the db
|
||||
watcher.on('unlink', () => {
|
||||
logger.info('db file missing', db.workspaceId);
|
||||
subjects.db.dbFileMissing.next(db.workspaceId);
|
||||
// cleanup
|
||||
watcher.close().then(() => {
|
||||
return () => {
|
||||
// destroy on unsubscribe
|
||||
logger.info('[FSWatcher] cleanup db file watcher', db.workspaceId);
|
||||
db.destroy();
|
||||
dbWatchers.delete(db.workspaceId);
|
||||
dbMapping.delete(db.workspaceId);
|
||||
});
|
||||
});
|
||||
subscription.unsubscribe();
|
||||
};
|
||||
}).pipe(
|
||||
debounceTime(1000),
|
||||
filter(event => !isRemoveOrMoveEvent(event)),
|
||||
tap({
|
||||
next: () => {
|
||||
logger.info(
|
||||
'[FSWatcher] db file changed on disk',
|
||||
db.workspaceId,
|
||||
ts() - db.lastUpdateTime,
|
||||
'ms'
|
||||
);
|
||||
db.reconnectDB();
|
||||
subjects.db.dbFileUpdate.next(db.workspaceId);
|
||||
},
|
||||
complete: () => {
|
||||
// todo: there is still a possibility that the file is deleted
|
||||
// but we didn't get the event soon enough and another event tries to
|
||||
// access the db
|
||||
logger.info('[FSWatcher] db file missing', db.workspaceId);
|
||||
subjects.db.dbFileMissing.next(db.workspaceId);
|
||||
db.destroy();
|
||||
},
|
||||
}),
|
||||
takeUntil(defer(() => fromEvent(app, 'before-quit')))
|
||||
);
|
||||
}
|
||||
|
||||
export async function ensureSQLiteDB(id: string) {
|
||||
let workspaceDB = dbMapping.get(id);
|
||||
if (!workspaceDB) {
|
||||
logger.info('[ensureSQLiteDB] open db connection', id);
|
||||
workspaceDB = openWorkspaceDatabase(appContext, id);
|
||||
dbMapping.set(id, workspaceDB);
|
||||
startWatchingDBFile(await workspaceDB);
|
||||
}
|
||||
return await workspaceDB;
|
||||
export function ensureSQLiteDB(id: string) {
|
||||
const deferValue = lastValueFrom(
|
||||
database$.pipe(
|
||||
filter(db => db.workspaceId === id && db.db.open),
|
||||
take(1),
|
||||
tap({
|
||||
error: err => {
|
||||
logger.error('[ensureSQLiteDB] error', err);
|
||||
},
|
||||
})
|
||||
)
|
||||
);
|
||||
databaseInput$.next(id);
|
||||
return deferValue;
|
||||
}
|
||||
|
||||
export async function disconnectSQLiteDB(id: string) {
|
||||
const dbp = dbMapping.get(id);
|
||||
if (dbp) {
|
||||
const db = await dbp;
|
||||
logger.info('close db connection', id);
|
||||
db.destroy();
|
||||
dbWatchers.get(id)?.();
|
||||
dbWatchers.delete(id);
|
||||
dbMapping.delete(id);
|
||||
}
|
||||
}
|
||||
|
||||
export async function cleanupSQLiteDBs() {
|
||||
for (const [id] of dbMapping) {
|
||||
logger.info('close db connection', id);
|
||||
await disconnectSQLiteDB(id);
|
||||
}
|
||||
dbMapping.clear();
|
||||
dbWatchers.clear();
|
||||
}
|
||||
|
||||
app?.on('before-quit', async () => {
|
||||
await cleanupSQLiteDBs();
|
||||
});
|
||||
|
||||
@@ -42,6 +42,7 @@ export class WorkspaceSQLiteDB {
|
||||
ydoc = new Y.Doc();
|
||||
firstConnect = false;
|
||||
lastUpdateTime = ts();
|
||||
destroyed = false;
|
||||
|
||||
constructor(public path: string, public workspaceId: string) {
|
||||
this.db = this.reconnectDB();
|
||||
@@ -58,7 +59,7 @@ export class WorkspaceSQLiteDB {
|
||||
};
|
||||
|
||||
reconnectDB = () => {
|
||||
logger.log('open db', this.workspaceId);
|
||||
logger.log('[WorkspaceSQLiteDB] open db', this.workspaceId);
|
||||
if (this.db) {
|
||||
this.db.close();
|
||||
}
|
||||
@@ -224,8 +225,9 @@ export async function openWorkspaceDatabase(
|
||||
}
|
||||
|
||||
export function isValidDBFile(path: string) {
|
||||
let db: Database | null = null;
|
||||
try {
|
||||
const db = sqlite(path);
|
||||
db = sqlite(path);
|
||||
// check if db has two tables, one for updates and onefor blobs
|
||||
const statement = db.prepare(
|
||||
`SELECT name FROM sqlite_schema WHERE type='table'`
|
||||
@@ -239,6 +241,7 @@ export function isValidDBFile(path: string) {
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error('isValidDBFile', error);
|
||||
db?.close();
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,8 @@ import { nanoid } from 'nanoid';
|
||||
|
||||
import { appContext } from '../../context';
|
||||
import { logger } from '../../logger';
|
||||
import { ensureSQLiteDB } from '../db/ensure-db';
|
||||
import { ensureSQLiteDB, isRemoveOrMoveEvent } from '../db/ensure-db';
|
||||
import type { WorkspaceSQLiteDB } from '../db/sqlite';
|
||||
import { getWorkspaceDBPath, isValidDBFile } from '../db/sqlite';
|
||||
import { listWorkspaces } from '../workspace/workspace';
|
||||
|
||||
@@ -232,17 +233,29 @@ export async function moveDBFile(
|
||||
workspaceId: string,
|
||||
dbFileLocation?: string
|
||||
): Promise<MoveDBFileResult> {
|
||||
let db: WorkspaceSQLiteDB | null = null;
|
||||
try {
|
||||
const db = await ensureSQLiteDB(workspaceId);
|
||||
|
||||
const { moveFile, FsWatcher } = await import('@affine/native');
|
||||
db = await ensureSQLiteDB(workspaceId);
|
||||
// get the real file path of db
|
||||
const realpath = await fs.realpath(db.path);
|
||||
const isLink = realpath !== db.path;
|
||||
|
||||
const watcher = FsWatcher.watch(realpath, { recursive: false });
|
||||
const waitForRemove = new Promise<void>(resolve => {
|
||||
const subscription = watcher.subscribe(event => {
|
||||
if (isRemoveOrMoveEvent(event)) {
|
||||
subscription.unsubscribe();
|
||||
// resolve after FSWatcher in `database$` is fired
|
||||
setImmediate(() => {
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
const newFilePath =
|
||||
dbFileLocation ||
|
||||
dbFileLocation ??
|
||||
(
|
||||
getFakedResult() ||
|
||||
getFakedResult() ??
|
||||
(await dialog.showSaveDialog({
|
||||
properties: ['showOverwriteConfirmation'],
|
||||
title: 'Move Workspace Storage',
|
||||
@@ -263,32 +276,39 @@ export async function moveDBFile(
|
||||
};
|
||||
}
|
||||
|
||||
db.db.close();
|
||||
|
||||
if (await fs.pathExists(newFilePath)) {
|
||||
return {
|
||||
error: 'FILE_ALREADY_EXISTS',
|
||||
};
|
||||
}
|
||||
|
||||
db.db.close();
|
||||
|
||||
if (isLink) {
|
||||
// remove the old link to unblock new link
|
||||
await fs.unlink(db.path);
|
||||
}
|
||||
|
||||
await fs.move(realpath, newFilePath, {
|
||||
overwrite: true,
|
||||
});
|
||||
logger.info(`[moveDBFile] move ${realpath} -> ${newFilePath}`);
|
||||
|
||||
await moveFile(realpath, newFilePath);
|
||||
|
||||
await fs.ensureSymlink(newFilePath, db.path, 'file');
|
||||
logger.info(`openMoveDBFileDialog symlink: ${realpath} -> ${newFilePath}`);
|
||||
db.reconnectDB();
|
||||
logger.info(`[moveDBFile] symlink: ${realpath} -> ${newFilePath}`);
|
||||
// wait for the file move event emits to the FileWatcher in database$ in ensure-db.ts
|
||||
// so that the db will be destroyed and we can call the `ensureSQLiteDB` in the next step
|
||||
// or the FileWatcher will continue listen on the `realpath` and emit file change events
|
||||
// then the database will reload while receiving these events; and the moved database file will be recreated while reloading database
|
||||
await waitForRemove;
|
||||
logger.info(`removed`);
|
||||
await ensureSQLiteDB(workspaceId);
|
||||
|
||||
return {
|
||||
filePath: newFilePath,
|
||||
};
|
||||
} catch (err) {
|
||||
logger.error('moveDBFile', err);
|
||||
db?.destroy();
|
||||
logger.error('[moveDBFile]', err);
|
||||
return {
|
||||
error: 'UNKNOWN_ERROR',
|
||||
};
|
||||
|
||||
@@ -18,7 +18,7 @@ export const dialogHandlers = {
|
||||
saveDBFileAs: async (_, workspaceId: string) => {
|
||||
return saveDBFileAs(workspaceId);
|
||||
},
|
||||
moveDBFile: async (_, workspaceId: string, dbFileLocation?: string) => {
|
||||
moveDBFile: (_, workspaceId: string, dbFileLocation?: string) => {
|
||||
return moveDBFile(workspaceId, dbFileLocation);
|
||||
},
|
||||
selectDBFileLocation: async () => {
|
||||
|
||||
@@ -44,6 +44,7 @@
|
||||
"@electron/remote": "2.0.9",
|
||||
"@types/better-sqlite3": "^7.6.4",
|
||||
"@types/fs-extra": "^11.0.1",
|
||||
"@types/uuid": "^9.0.1",
|
||||
"cross-env": "7.0.3",
|
||||
"electron": "24.3.0",
|
||||
"electron-log": "^5.0.0-beta.23",
|
||||
@@ -54,9 +55,11 @@
|
||||
"playwright": "^1.33.0",
|
||||
"ts-node": "^10.9.1",
|
||||
"undici": "^5.22.1",
|
||||
"uuid": "^9.0.0",
|
||||
"zx": "^7.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@affine/native": "workspace:*",
|
||||
"better-sqlite3": "^8.3.0",
|
||||
"chokidar": "^3.5.3",
|
||||
"electron-updater": "^5.3.0",
|
||||
|
||||
@@ -8,6 +8,11 @@ import { config } from './common.mjs';
|
||||
const NODE_ENV =
|
||||
process.env.NODE_ENV === 'development' ? 'development' : 'production';
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
$.shell = true;
|
||||
$.prefix = '';
|
||||
}
|
||||
|
||||
async function buildLayers() {
|
||||
const common = config();
|
||||
await esbuild.build(common.preload);
|
||||
|
||||
@@ -12,16 +12,6 @@ const DEV_SERVER_URL = process.env.DEV_SERVER_URL;
|
||||
/** @type 'production' | 'development'' */
|
||||
const mode = (process.env.NODE_ENV = process.env.NODE_ENV || 'development');
|
||||
|
||||
const nativeNodeModulesPlugin = {
|
||||
name: 'native-node-modules',
|
||||
setup(build) {
|
||||
// Mark native Node.js modules as external
|
||||
build.onResolve({ filter: /\.node$/, namespace: 'file' }, args => {
|
||||
return { path: args.path, external: true };
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
// List of env that will be replaced by esbuild
|
||||
const ENV_MACROS = ['AFFINE_GOOGLE_CLIENT_ID', 'AFFINE_GOOGLE_CLIENT_SECRET'];
|
||||
|
||||
@@ -49,10 +39,19 @@ export const config = () => {
|
||||
bundle: true,
|
||||
target: `node${NODE_MAJOR_VERSION}`,
|
||||
platform: 'node',
|
||||
external: ['electron', 'yjs', 'better-sqlite3', 'electron-updater'],
|
||||
plugins: [nativeNodeModulesPlugin],
|
||||
external: [
|
||||
'electron',
|
||||
'yjs',
|
||||
'better-sqlite3',
|
||||
'electron-updater',
|
||||
'@affine/native-*',
|
||||
],
|
||||
define: define,
|
||||
format: 'cjs',
|
||||
loader: {
|
||||
'.node': 'copy',
|
||||
},
|
||||
assetNames: '[name]',
|
||||
},
|
||||
preload: {
|
||||
entryPoints: [resolve(root, './layers/preload/src/index.ts')],
|
||||
@@ -61,7 +60,6 @@ export const config = () => {
|
||||
target: `node${NODE_MAJOR_VERSION}`,
|
||||
platform: 'node',
|
||||
external: ['electron', '../main/exposed-meta'],
|
||||
plugins: [nativeNodeModulesPlugin],
|
||||
define: define,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,7 +1,3 @@
|
||||
#!/usr/bin/env zx
|
||||
/* eslint-disable @typescript-eslint/no-restricted-imports */
|
||||
import 'zx/globals';
|
||||
|
||||
const mainDistDir = path.resolve(__dirname, '../dist/layers/main');
|
||||
|
||||
// be careful and avoid any side effects in
|
||||
|
||||
@@ -9,14 +9,16 @@
|
||||
"types": ["node"],
|
||||
"outDir": "dist",
|
||||
"moduleResolution": "node",
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true
|
||||
"resolveJsonModule": true
|
||||
},
|
||||
"include": ["**/*.ts", "**/*.tsx", "package.json"],
|
||||
"exclude": ["out", "dist", "node_modules"],
|
||||
"references": [
|
||||
{
|
||||
"path": "./tsconfig.node.json"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/native"
|
||||
}
|
||||
],
|
||||
"ts-node": {
|
||||
|
||||
@@ -17,10 +17,11 @@ napi = { version = "2", default-features = false, features = [
|
||||
] }
|
||||
napi-derive = "2"
|
||||
notify = { version = "5", features = ["serde"] }
|
||||
once_cell = "1"
|
||||
parking_lot = "0.12"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
tokio = "1"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
uuid = { version = "1", default-features = false, features = [
|
||||
"serde",
|
||||
"v4",
|
||||
|
||||
3
packages/native/fs-watcher.d.ts
vendored
Normal file
3
packages/native/fs-watcher.d.ts
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import type { FsWatcher } from './index';
|
||||
|
||||
export function createFSWatcher(): typeof FsWatcher;
|
||||
6
packages/native/fs-watcher.js
Normal file
6
packages/native/fs-watcher.js
Normal file
@@ -0,0 +1,6 @@
|
||||
module.exports.createFSWatcher = function createFSWatcher() {
|
||||
// require it in the function level so that it won't break the `generate-main-exposed-meta.mjs`
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const { FsWatcher } = require('./index');
|
||||
return FsWatcher;
|
||||
};
|
||||
11
packages/native/index.d.ts
vendored
11
packages/native/index.d.ts
vendored
@@ -22,21 +22,20 @@ export const enum WatcherKind {
|
||||
NullWatcher = 'NullWatcher',
|
||||
Unknown = 'Unknown',
|
||||
}
|
||||
export function watch(
|
||||
p: string,
|
||||
options?: WatchOptions | undefined | null
|
||||
): FSWatcher;
|
||||
export function moveFile(src: string, dst: string): Promise<void>;
|
||||
export class Subscription {
|
||||
toString(): string;
|
||||
unsubscribe(): void;
|
||||
}
|
||||
export type FSWatcher = FsWatcher;
|
||||
export class FsWatcher {
|
||||
get kind(): WatcherKind;
|
||||
static watch(p: string, options?: WatchOptions | undefined | null): FsWatcher;
|
||||
static kind(): WatcherKind;
|
||||
toString(): string;
|
||||
subscribe(
|
||||
callback: (event: import('./event').NotifyEvent) => void,
|
||||
errorCallback?: (err: Error) => void
|
||||
): Subscription;
|
||||
close(): void;
|
||||
static unwatch(p: string): void;
|
||||
static close(): void;
|
||||
}
|
||||
|
||||
@@ -263,9 +263,9 @@ if (!nativeBinding) {
|
||||
throw new Error(`Failed to load native binding`);
|
||||
}
|
||||
|
||||
const { WatcherKind, Subscription, watch, FsWatcher } = nativeBinding;
|
||||
const { WatcherKind, Subscription, FsWatcher, moveFile } = nativeBinding;
|
||||
|
||||
module.exports.WatcherKind = WatcherKind;
|
||||
module.exports.Subscription = Subscription;
|
||||
module.exports.watch = watch;
|
||||
module.exports.FsWatcher = FsWatcher;
|
||||
module.exports.moveFile = moveFile;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use std::{collections::HashMap, path::Path, sync::Arc};
|
||||
use std::{collections::BTreeMap, path::Path, sync::Arc};
|
||||
|
||||
use napi::{
|
||||
bindgen_prelude::{FromNapiValue, ToNapiValue},
|
||||
@@ -6,8 +6,31 @@ use napi::{
|
||||
};
|
||||
use napi_derive::napi;
|
||||
use notify::{Event, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
use once_cell::sync::Lazy;
|
||||
use parking_lot::Mutex;
|
||||
|
||||
static GLOBAL_WATCHER: Lazy<napi::Result<GlobalWatcher>> = Lazy::new(|| {
|
||||
let event_emitter = Arc::new(Mutex::new(EventEmitter {
|
||||
listeners: Default::default(),
|
||||
error_callbacks: Default::default(),
|
||||
}));
|
||||
let event_emitter_in_handler = event_emitter.clone();
|
||||
let watcher: RecommendedWatcher =
|
||||
notify::recommended_watcher(move |res: notify::Result<Event>| {
|
||||
event_emitter_in_handler.lock().on(res);
|
||||
})
|
||||
.map_err(anyhow::Error::from)?;
|
||||
Ok(GlobalWatcher {
|
||||
inner: Mutex::new(watcher),
|
||||
event_emitter,
|
||||
})
|
||||
});
|
||||
|
||||
struct GlobalWatcher {
|
||||
inner: Mutex<RecommendedWatcher>,
|
||||
event_emitter: Arc<Mutex<EventEmitter>>,
|
||||
}
|
||||
|
||||
#[napi(object)]
|
||||
#[derive(Default)]
|
||||
pub struct WatchOptions {
|
||||
@@ -50,7 +73,6 @@ impl From<notify::WatcherKind> for WatcherKind {
|
||||
pub struct Subscription {
|
||||
id: uuid::Uuid,
|
||||
error_uuid: Option<uuid::Uuid>,
|
||||
event_emitter: Arc<Mutex<EventEmitter>>,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
@@ -62,61 +84,52 @@ impl Subscription {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn unsubscribe(&mut self) {
|
||||
let mut event_emitter = self.event_emitter.lock();
|
||||
pub fn unsubscribe(&mut self) -> napi::Result<()> {
|
||||
let mut event_emitter = GLOBAL_WATCHER
|
||||
.as_ref()
|
||||
.map_err(|err| err.clone())?
|
||||
.event_emitter
|
||||
.lock();
|
||||
event_emitter.listeners.remove(&self.id);
|
||||
if let Some(error_uuid) = &self.error_uuid {
|
||||
event_emitter.error_callbacks.remove(error_uuid);
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn watch(p: String, options: Option<WatchOptions>) -> Result<FSWatcher, anyhow::Error> {
|
||||
let event_emitter = Arc::new(Mutex::new(EventEmitter {
|
||||
listeners: Default::default(),
|
||||
error_callbacks: Default::default(),
|
||||
}));
|
||||
let event_emitter_in_handler = event_emitter.clone();
|
||||
let mut watcher: RecommendedWatcher =
|
||||
notify::recommended_watcher(move |res: notify::Result<Event>| {
|
||||
event_emitter_in_handler.lock().on(res);
|
||||
})
|
||||
.map_err(anyhow::Error::from)?;
|
||||
|
||||
let options = options.unwrap_or_default();
|
||||
watcher
|
||||
.watch(
|
||||
Path::new(&p),
|
||||
if options.recursive == Some(false) {
|
||||
RecursiveMode::NonRecursive
|
||||
} else {
|
||||
RecursiveMode::Recursive
|
||||
},
|
||||
)
|
||||
.map_err(anyhow::Error::from)?;
|
||||
Ok(FSWatcher {
|
||||
inner: watcher,
|
||||
event_emitter,
|
||||
})
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub struct FSWatcher {
|
||||
inner: RecommendedWatcher,
|
||||
event_emitter: Arc<Mutex<EventEmitter>>,
|
||||
path: String,
|
||||
recursive: RecursiveMode,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl FSWatcher {
|
||||
#[napi(getter)]
|
||||
pub fn kind(&self) -> WatcherKind {
|
||||
#[napi(factory)]
|
||||
pub fn watch(p: String, options: Option<WatchOptions>) -> Self {
|
||||
let options = options.unwrap_or_default();
|
||||
FSWatcher {
|
||||
path: p,
|
||||
recursive: if options.recursive == Some(false) {
|
||||
RecursiveMode::NonRecursive
|
||||
} else {
|
||||
RecursiveMode::Recursive
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn kind() -> WatcherKind {
|
||||
RecommendedWatcher::kind().into()
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn to_string(&self) -> napi::Result<String> {
|
||||
Ok(format!("{:?}", self.inner))
|
||||
Ok(format!(
|
||||
"{:?}",
|
||||
GLOBAL_WATCHER.as_ref().map_err(|err| err.clone())?.inner
|
||||
))
|
||||
}
|
||||
|
||||
#[napi]
|
||||
@@ -125,10 +138,23 @@ impl FSWatcher {
|
||||
#[napi(ts_arg_type = "(event: import('./event').NotifyEvent) => void")]
|
||||
callback: ThreadsafeFunction<serde_json::Value, ErrorStrategy::Fatal>,
|
||||
#[napi(ts_arg_type = "(err: Error) => void")] error_callback: Option<ThreadsafeFunction<()>>,
|
||||
) -> Subscription {
|
||||
) -> napi::Result<Subscription> {
|
||||
GLOBAL_WATCHER
|
||||
.as_ref()
|
||||
.map_err(|err| err.clone())?
|
||||
.inner
|
||||
.lock()
|
||||
.watch(Path::new(&self.path), self.recursive)
|
||||
.map_err(anyhow::Error::from)?;
|
||||
let uuid = uuid::Uuid::new_v4();
|
||||
let mut event_emitter = self.event_emitter.lock();
|
||||
event_emitter.listeners.insert(uuid, callback);
|
||||
let mut event_emitter = GLOBAL_WATCHER
|
||||
.as_ref()
|
||||
.map_err(|err| err.clone())?
|
||||
.event_emitter
|
||||
.lock();
|
||||
event_emitter
|
||||
.listeners
|
||||
.insert(uuid, (self.path.clone(), callback));
|
||||
let mut error_uuid = None;
|
||||
if let Some(error_callback) = error_callback {
|
||||
let uuid = uuid::Uuid::new_v4();
|
||||
@@ -136,32 +162,51 @@ impl FSWatcher {
|
||||
error_uuid = Some(uuid);
|
||||
}
|
||||
drop(event_emitter);
|
||||
Subscription {
|
||||
Ok(Subscription {
|
||||
id: uuid,
|
||||
error_uuid,
|
||||
event_emitter: self.event_emitter.clone(),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn close(&mut self) -> napi::Result<()> {
|
||||
// drop the previous watcher
|
||||
self.inner = notify::recommended_watcher(|_| {}).map_err(anyhow::Error::from)?;
|
||||
self.event_emitter.lock().stop();
|
||||
pub fn unwatch(p: String) -> napi::Result<()> {
|
||||
let mut watcher = GLOBAL_WATCHER
|
||||
.as_ref()
|
||||
.map_err(|err| err.clone())?
|
||||
.inner
|
||||
.lock();
|
||||
watcher
|
||||
.unwatch(Path::new(&p))
|
||||
.map_err(anyhow::Error::from)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn close() -> napi::Result<()> {
|
||||
let global_watcher = GLOBAL_WATCHER.as_ref().map_err(|err| err.clone())?;
|
||||
global_watcher.event_emitter.lock().stop();
|
||||
let mut inner = global_watcher.inner.lock();
|
||||
*inner = notify::recommended_watcher(|_| {}).map_err(anyhow::Error::from)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct EventEmitter {
|
||||
listeners: HashMap<uuid::Uuid, ThreadsafeFunction<serde_json::Value, ErrorStrategy::Fatal>>,
|
||||
error_callbacks: HashMap<uuid::Uuid, ThreadsafeFunction<()>>,
|
||||
listeners: BTreeMap<
|
||||
uuid::Uuid,
|
||||
(
|
||||
String,
|
||||
ThreadsafeFunction<serde_json::Value, ErrorStrategy::Fatal>,
|
||||
),
|
||||
>,
|
||||
error_callbacks: BTreeMap<uuid::Uuid, ThreadsafeFunction<()>>,
|
||||
}
|
||||
|
||||
impl EventEmitter {
|
||||
fn on(&self, event: notify::Result<Event>) {
|
||||
match event {
|
||||
Ok(e) => match serde_json::value::to_value(e) {
|
||||
Ok(e) => match serde_json::value::to_value(&e) {
|
||||
Err(err) => {
|
||||
let err: napi::Error = anyhow::Error::from(err).into();
|
||||
for on_error in self.error_callbacks.values() {
|
||||
@@ -169,8 +214,10 @@ impl EventEmitter {
|
||||
}
|
||||
}
|
||||
Ok(v) => {
|
||||
for on_event in self.listeners.values() {
|
||||
on_event.call(v.clone(), ThreadsafeFunctionCallMode::NonBlocking);
|
||||
for (path, on_event) in self.listeners.values() {
|
||||
if e.paths.iter().any(|p| p.to_str() == Some(path)) {
|
||||
on_event.call(v.clone(), ThreadsafeFunctionCallMode::NonBlocking);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -188,3 +235,9 @@ impl EventEmitter {
|
||||
self.error_callbacks.clear();
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub async fn move_file(src: String, dst: String) -> napi::Result<()> {
|
||||
tokio::fs::rename(src, dst).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"noEmit": true,
|
||||
"outDir": "lib"
|
||||
"noEmit": false,
|
||||
"outDir": "lib",
|
||||
"composite": true
|
||||
},
|
||||
"include": ["index.d.ts", "__tests__/**/*.mts"],
|
||||
"ts-node": {
|
||||
|
||||
@@ -33,7 +33,9 @@
|
||||
"@affine-test/kit/*": ["./tests/kit/*"],
|
||||
"@affine-test/fixtures/*": ["./tests/fixtures/*"],
|
||||
"@toeverything/y-indexeddb": ["./packages/y-indexeddb/src"],
|
||||
"@toeverything/hooks/*": ["./packages/hooks/src/*"]
|
||||
"@toeverything/hooks/*": ["./packages/hooks/src/*"],
|
||||
"@affine/native": ["./packages/native/index.d.ts"],
|
||||
"@affine/native/*": ["./packages/native/*"]
|
||||
}
|
||||
},
|
||||
"references": [
|
||||
|
||||
@@ -23,17 +23,21 @@ export default defineConfig({
|
||||
resolve(rootDir, './scripts/setup/search.ts'),
|
||||
resolve(rootDir, './scripts/setup/lottie-web.ts'),
|
||||
],
|
||||
include: [
|
||||
'packages/**/*.spec.ts',
|
||||
'packages/**/*.spec.tsx',
|
||||
'apps/web/**/*.spec.ts',
|
||||
'apps/web/**/*.spec.tsx',
|
||||
'apps/electron/layers/**/*.spec.ts',
|
||||
'tests/unit/**/*.spec.ts',
|
||||
'tests/unit/**/*.spec.tsx',
|
||||
],
|
||||
// split tests that include native addons or not
|
||||
include: process.env.NATIVE_TEST
|
||||
? ['apps/electron/layers/**/*.spec.ts']
|
||||
: [
|
||||
'packages/**/*.spec.ts',
|
||||
'packages/**/*.spec.tsx',
|
||||
'apps/web/**/*.spec.ts',
|
||||
'apps/web/**/*.spec.tsx',
|
||||
'tests/unit/**/*.spec.ts',
|
||||
'tests/unit/**/*.spec.tsx',
|
||||
],
|
||||
exclude: ['**/node_modules', '**/dist', '**/build', '**/out'],
|
||||
testTimeout: 5000,
|
||||
singleThread: Boolean(process.env.NATIVE_TEST),
|
||||
threads: !process.env.NATIVE_TEST,
|
||||
coverage: {
|
||||
provider: 'istanbul', // or 'c8'
|
||||
reporter: ['lcov'],
|
||||
|
||||
@@ -131,6 +131,7 @@ __metadata:
|
||||
resolution: "@affine/electron@workspace:apps/electron"
|
||||
dependencies:
|
||||
"@affine-test/kit": "workspace:*"
|
||||
"@affine/native": "workspace:*"
|
||||
"@electron-forge/cli": ^6.1.1
|
||||
"@electron-forge/core": ^6.1.1
|
||||
"@electron-forge/core-utils": ^6.1.1
|
||||
@@ -143,6 +144,7 @@ __metadata:
|
||||
"@electron/remote": 2.0.9
|
||||
"@types/better-sqlite3": ^7.6.4
|
||||
"@types/fs-extra": ^11.0.1
|
||||
"@types/uuid": ^9.0.1
|
||||
better-sqlite3: ^8.3.0
|
||||
chokidar: ^3.5.3
|
||||
cross-env: 7.0.3
|
||||
@@ -158,6 +160,7 @@ __metadata:
|
||||
rxjs: ^7.8.1
|
||||
ts-node: ^10.9.1
|
||||
undici: ^5.22.1
|
||||
uuid: ^9.0.0
|
||||
yjs: ^13.6.1
|
||||
zx: ^7.2.2
|
||||
peerDependencies:
|
||||
@@ -236,7 +239,7 @@ __metadata:
|
||||
languageName: unknown
|
||||
linkType: soft
|
||||
|
||||
"@affine/native@workspace:packages/native":
|
||||
"@affine/native@workspace:*, @affine/native@workspace:packages/native":
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@affine/native@workspace:packages/native"
|
||||
dependencies:
|
||||
|
||||
Reference in New Issue
Block a user