mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-05 09:04:56 +00:00
Compare commits
196 Commits
v0.6.0-bet
...
v0.6.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c38fe87af9 | ||
|
|
dc377185b4 | ||
|
|
4d25a3f3fe | ||
|
|
63b66497d6 | ||
|
|
2dcc8e2b87 | ||
|
|
5769425ec1 | ||
|
|
8c3d35ad56 | ||
|
|
928ae30474 | ||
|
|
804e233a7f | ||
|
|
1fcdc0f856 | ||
|
|
b5f7a3177d | ||
|
|
3d17c50777 | ||
|
|
c2f8005574 | ||
|
|
6ab79dfa69 | ||
|
|
c8a1391dd8 | ||
|
|
ef7fd194c4 | ||
|
|
72ef788927 | ||
|
|
3c5c6ef4e6 | ||
|
|
6c63fcdbc7 | ||
|
|
d8d46cb3a9 | ||
|
|
1b6e95479f | ||
|
|
9aa211dc77 | ||
|
|
036559e165 | ||
|
|
eb1c4f7a07 | ||
|
|
a21067db17 | ||
|
|
af205cde7c | ||
|
|
a0ee00a4b2 | ||
|
|
8cd5f81076 | ||
|
|
d83ef83d05 | ||
|
|
a2acb6cf9f | ||
|
|
1e52c5fcfc | ||
|
|
d436325a5c | ||
|
|
9402c80133 | ||
|
|
0ade3e65ed | ||
|
|
f971e56f15 | ||
|
|
9731dd3261 | ||
|
|
5c87af6113 | ||
|
|
3ac51e8bf1 | ||
|
|
c3bfc16d27 | ||
|
|
2ca5ad6509 | ||
|
|
f9045d357a | ||
|
|
3dd89fc244 | ||
|
|
8cb095a28a | ||
|
|
e6c3c6b5f7 | ||
|
|
5699c99bf6 | ||
|
|
47babe25b7 | ||
|
|
fff6ff9778 | ||
|
|
3343110aef | ||
|
|
c4e9544b3f | ||
|
|
cc1315ef12 | ||
|
|
d1505a6c94 | ||
|
|
b3aac46e38 | ||
|
|
a0e28152bc | ||
|
|
05e45936b9 | ||
|
|
d273ee955b | ||
|
|
28e05dc92c | ||
|
|
cd5aec42a0 | ||
|
|
2352aa8c50 | ||
|
|
ee6860ed39 | ||
|
|
706f57f075 | ||
|
|
311dcd722a | ||
|
|
4ef9093b5b | ||
|
|
00489dc571 | ||
|
|
b7afdfc416 | ||
|
|
3490fa186c | ||
|
|
9b721f7628 | ||
|
|
d3bafe135d | ||
|
|
ea21ed6e0d | ||
|
|
db4a0fd57c | ||
|
|
7824d4c82d | ||
|
|
5283010850 | ||
|
|
fdd93d5ed4 | ||
|
|
d3df703189 | ||
|
|
fb5dcb0065 | ||
|
|
69fb7a590d | ||
|
|
79a2786816 | ||
|
|
5bb113a9a9 | ||
|
|
7e989ae8cb | ||
|
|
3676d6c3f0 | ||
|
|
7cfb8b0171 | ||
|
|
a3bbd7e098 | ||
|
|
27fa3a5d76 | ||
|
|
b342cc604c | ||
|
|
79ab2c1525 | ||
|
|
1e571089b3 | ||
|
|
4ab5457a44 | ||
|
|
2a31af0973 | ||
|
|
fad00c242b | ||
|
|
f93db613f4 | ||
|
|
13e85f11f8 | ||
|
|
e1d87cf698 | ||
|
|
369282e29e | ||
|
|
a018d50780 | ||
|
|
9379a0fb49 | ||
|
|
fb9d200dd3 | ||
|
|
7e8169c4b8 | ||
|
|
f18c164953 | ||
|
|
411593c8de | ||
|
|
cfc3fbbb3f | ||
|
|
589ae0a26c | ||
|
|
4ced5a226d | ||
|
|
cb914c0405 | ||
|
|
d30f99d8df | ||
|
|
eb01c2e76f | ||
|
|
72d4c785e5 | ||
|
|
151fb56281 | ||
|
|
f340e15987 | ||
|
|
f06f67e182 | ||
|
|
7050f41ba9 | ||
|
|
72066a6f54 | ||
|
|
52c1efee9e | ||
|
|
3eef11416a | ||
|
|
a84cfb80d1 | ||
|
|
840ce7d146 | ||
|
|
30cbde31cb | ||
|
|
7c6d5adde5 | ||
|
|
dc33130c79 | ||
|
|
7ed3250042 | ||
|
|
4abe62c9e0 | ||
|
|
e4ba72853d | ||
|
|
8281fa49c1 | ||
|
|
cffdd420e2 | ||
|
|
a7ac6562b0 | ||
|
|
e7e447d0e1 | ||
|
|
3a043f339c | ||
|
|
0a4b2b9ca7 | ||
|
|
ece0853265 | ||
|
|
8b3c87cdfa | ||
|
|
2ed8d63d8a | ||
|
|
f7487ad037 | ||
|
|
bfe3b2242e | ||
|
|
f0763337c4 | ||
|
|
540a93274a | ||
|
|
2dff731965 | ||
|
|
319d71345d | ||
|
|
a22c39f395 | ||
|
|
81e4122fc2 | ||
|
|
5832ba1f34 | ||
|
|
a9d417b3ce | ||
|
|
8e6bb78bea | ||
|
|
921f4c97d1 | ||
|
|
6d362f77ca | ||
|
|
76a93cb859 | ||
|
|
87d03f6e17 | ||
|
|
bd6bde130b | ||
|
|
b2da7cdff5 | ||
|
|
f982120a1f | ||
|
|
099b84c383 | ||
|
|
5266f6ac13 | ||
|
|
4cfba64aa5 | ||
|
|
d16927c4ad | ||
|
|
aa02779883 | ||
|
|
e734771beb | ||
|
|
d2badccce3 | ||
|
|
581bc97896 | ||
|
|
5911b526e5 | ||
|
|
19a8d85924 | ||
|
|
c5c9723c48 | ||
|
|
604b0441b0 | ||
|
|
0444dd9264 | ||
|
|
823bcbb6fb | ||
|
|
29bd170c2b | ||
|
|
8b672064d0 | ||
|
|
3299d64488 | ||
|
|
e5b47c307e | ||
|
|
93a584e4b9 | ||
|
|
cee829d08f | ||
|
|
79116f06dd | ||
|
|
460dc4d560 | ||
|
|
bd83f95745 | ||
|
|
40ee400285 | ||
|
|
f6da67df32 | ||
|
|
4948fb555c | ||
|
|
b6fd58e0f5 | ||
|
|
f4057593af | ||
|
|
d17a5c2784 | ||
|
|
cae3527c12 | ||
|
|
4c79a918b2 | ||
|
|
7e4edd2c65 | ||
|
|
01173babe6 | ||
|
|
569d71886c | ||
|
|
03b4f78743 | ||
|
|
a7b3aacc28 | ||
|
|
3f95c3a654 | ||
|
|
3dcee2fa60 | ||
|
|
8450523f05 | ||
|
|
05ee884532 | ||
|
|
4c2d17b07f | ||
|
|
abf57e5b45 | ||
|
|
7aef5de193 | ||
|
|
e765a7e831 | ||
|
|
99dc4fbf22 | ||
|
|
d905c9b5cf | ||
|
|
41936393ea | ||
|
|
d869ce1684 | ||
|
|
68a114c540 |
@@ -5,3 +5,6 @@ out
|
||||
storybook-static
|
||||
affine-out
|
||||
_next
|
||||
lib
|
||||
.eslintrc.js
|
||||
packages/i18n/src/i18n-generated.ts
|
||||
|
||||
124
.eslintrc.js
124
.eslintrc.js
@@ -1,3 +1,50 @@
|
||||
const { resolve } = require('node:path');
|
||||
|
||||
const createPattern = packageName => [
|
||||
{
|
||||
group: ['**/dist', '**/dist/**'],
|
||||
message: 'Do not import from dist',
|
||||
allowTypeImports: false,
|
||||
},
|
||||
{
|
||||
group: ['**/src', '**/src/**'],
|
||||
message: 'Do not import from src',
|
||||
allowTypeImports: false,
|
||||
},
|
||||
{
|
||||
group: [`@affine/${packageName}`],
|
||||
message: 'Do not import package itself',
|
||||
allowTypeImports: false,
|
||||
},
|
||||
{
|
||||
group: [`@toeverything/${packageName}`],
|
||||
message: 'Do not import package itself',
|
||||
allowTypeImports: false,
|
||||
},
|
||||
];
|
||||
|
||||
const allPackages = [
|
||||
'packages/cli',
|
||||
'packages/component',
|
||||
'packages/debug',
|
||||
'packages/env',
|
||||
'packages/graphql',
|
||||
'packages/hooks',
|
||||
'packages/i18n',
|
||||
'packages/jotai',
|
||||
'packages/native',
|
||||
'packages/plugin-infra',
|
||||
'packages/templates',
|
||||
'packages/theme',
|
||||
'packages/workspace',
|
||||
'packages/y-indexeddb',
|
||||
'apps/web',
|
||||
'apps/server',
|
||||
'apps/electron',
|
||||
'plugins/copilot',
|
||||
'plugins/bookmark-block',
|
||||
];
|
||||
|
||||
/**
|
||||
* @type {import('eslint').Linter.Config}
|
||||
*/
|
||||
@@ -27,23 +74,28 @@ const config = {
|
||||
},
|
||||
ecmaVersion: 'latest',
|
||||
sourceType: 'module',
|
||||
project: resolve(__dirname, './tsconfig.eslint.json'),
|
||||
},
|
||||
plugins: [
|
||||
'react',
|
||||
'@typescript-eslint',
|
||||
'simple-import-sort',
|
||||
'sonarjs',
|
||||
'import',
|
||||
'unused-imports',
|
||||
'unicorn',
|
||||
],
|
||||
rules: {
|
||||
'array-callback-return': 'error',
|
||||
'no-undef': 'off',
|
||||
'no-empty': 'off',
|
||||
'no-func-assign': 'off',
|
||||
'no-cond-assign': 'off',
|
||||
'no-constant-binary-expression': 'error',
|
||||
'no-constructor-return': 'error',
|
||||
'react/prop-types': 'off',
|
||||
'@typescript-eslint/consistent-type-imports': 'error',
|
||||
'@typescript-eslint/no-non-null-assertion': 'off',
|
||||
'@typescript-eslint/no-non-null-assertion': 'error',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-empty-function': 'off',
|
||||
'@typescript-eslint/no-unused-vars': [
|
||||
@@ -57,7 +109,15 @@ const config = {
|
||||
'unused-imports/no-unused-imports': 'error',
|
||||
'simple-import-sort/imports': 'error',
|
||||
'simple-import-sort/exports': 'error',
|
||||
'@typescript-eslint/ban-ts-comment': 0,
|
||||
'@typescript-eslint/ban-ts-comment': [
|
||||
'error',
|
||||
{
|
||||
'ts-expect-error': 'allow-with-description',
|
||||
'ts-ignore': true,
|
||||
'ts-nocheck': true,
|
||||
'ts-check': false,
|
||||
},
|
||||
],
|
||||
'@typescript-eslint/no-restricted-imports': [
|
||||
'error',
|
||||
{
|
||||
@@ -82,6 +142,21 @@ const config = {
|
||||
ignore: ['^\\[[a-zA-Z0-9-_]+\\]\\.tsx$'],
|
||||
},
|
||||
],
|
||||
'sonarjs/no-all-duplicated-branches': 'error',
|
||||
'sonarjs/no-element-overwrite': 'error',
|
||||
'sonarjs/no-empty-collection': 'error',
|
||||
'sonarjs/no-extra-arguments': 'error',
|
||||
'sonarjs/no-identical-conditions': 'error',
|
||||
'sonarjs/no-identical-expressions': 'error',
|
||||
'sonarjs/no-ignored-return': 'error',
|
||||
'sonarjs/no-one-iteration-loop': 'error',
|
||||
'sonarjs/no-use-of-empty-return-value': 'error',
|
||||
'sonarjs/non-existent-operator': 'error',
|
||||
'sonarjs/no-collapsible-if': 'error',
|
||||
'sonarjs/no-same-line-conditional': 'error',
|
||||
'sonarjs/no-duplicated-branches': 'error',
|
||||
'sonarjs/no-collection-size-mischeck': 'error',
|
||||
'sonarjs/no-useless-catch': 'error',
|
||||
},
|
||||
overrides: [
|
||||
{
|
||||
@@ -96,6 +171,51 @@ const config = {
|
||||
'@typescript-eslint/no-var-requires': 0,
|
||||
},
|
||||
},
|
||||
...allPackages.map(pkg => ({
|
||||
files: [`${pkg}/src/**/*.ts`, `${pkg}/src/**/*.tsx`],
|
||||
parserOptions: {
|
||||
project: resolve(__dirname, './tsconfig.eslint.json'),
|
||||
},
|
||||
rules: {
|
||||
'@typescript-eslint/no-restricted-imports': [
|
||||
'error',
|
||||
{
|
||||
patterns: createPattern(pkg),
|
||||
},
|
||||
],
|
||||
'@typescript-eslint/no-floating-promises': [
|
||||
'error',
|
||||
{
|
||||
ignoreVoid: false,
|
||||
ignoreIIFE: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
})),
|
||||
{
|
||||
files: [
|
||||
'**/__tests__/**/*',
|
||||
'**/*.stories.tsx',
|
||||
'**/*.spec.ts',
|
||||
'**/tests/**/*',
|
||||
'scripts/**/*',
|
||||
'**/benchmark/**/*',
|
||||
'**/__debug__/**/*',
|
||||
],
|
||||
rules: {
|
||||
'@typescript-eslint/no-non-null-assertion': 0,
|
||||
'@typescript-eslint/ban-ts-comment': [
|
||||
'error',
|
||||
{
|
||||
'ts-expect-error': false,
|
||||
'ts-ignore': true,
|
||||
'ts-nocheck': true,
|
||||
'ts-check': false,
|
||||
},
|
||||
],
|
||||
'@typescript-eslint/no-floating-promises': 0,
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
|
||||
1
.github/CLA.md
vendored
1
.github/CLA.md
vendored
@@ -58,3 +58,4 @@ Example:
|
||||
- Howard Do, @howarddo2208, 2023/04/20
|
||||
- 三咲智子 Kevin Deng, @sxzz, 2023/04/21
|
||||
- Moeyua, @moeyua, 2023/04/22
|
||||
- Shishu, @shishudesu, 2023/05/19
|
||||
|
||||
1
.github/CODEOWNERS
vendored
1
.github/CODEOWNERS
vendored
@@ -1 +0,0 @@
|
||||
**/en.json @JimmFly
|
||||
|
||||
14
.github/actions/build-rust/action.yml
vendored
14
.github/actions/build-rust/action.yml
vendored
@@ -29,21 +29,23 @@ runs:
|
||||
if: ${{ inputs.target != 'x86_64-unknown-linux-gnu' && inputs.target != 'aarch64-unknown-linux-gnu' }}
|
||||
shell: bash
|
||||
run: yarn workspace @affine/native build --target ${{ inputs.target }}
|
||||
env:
|
||||
CARGO_BUILD_INCREMENTAL: 'false'
|
||||
|
||||
- name: Build
|
||||
if: ${{ inputs.target == 'x86_64-unknown-linux-gnu' }}
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian
|
||||
options: --user 0:0 -e CARGO_BUILD_INCREMENTAL=false -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
|
||||
run: yarn workspace @affine/native build --target ${{ inputs.target }}
|
||||
options: --user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
|
||||
run: >-
|
||||
export CC=x86_64-unknown-linux-gnu-gcc &&
|
||||
export CC_x86_64_unknown_linux_gnu=x86_64-unknown-linux-gnu-gcc &&
|
||||
yarn workspace @affine/native build --target ${{ inputs.target }}
|
||||
|
||||
- name: Build
|
||||
if: ${{ inputs.target == 'aarch64-unknown-linux-gnu' }}
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian-aarch64
|
||||
options: --user 0:0 -e CARGO_BUILD_INCREMENTAL=false -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
|
||||
run: yarn workspace @affine/native build --target ${{ inputs.target }}
|
||||
options: --user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
|
||||
run: >-
|
||||
yarn workspace @affine/native build --target ${{ inputs.target }}
|
||||
|
||||
6
.github/labeler.yml
vendored
6
.github/labeler.yml
vendored
@@ -8,11 +8,17 @@ test:
|
||||
- '**/tests/**/*'
|
||||
- '**/__tests__/**/*'
|
||||
|
||||
plugin:copilot:
|
||||
- 'plugins/copilot/**/*'
|
||||
|
||||
mod:dev:
|
||||
- 'scripts/**/*'
|
||||
- 'packages/cli/**/*'
|
||||
- 'packages/debug/**/*'
|
||||
|
||||
mod:plugin-infra:
|
||||
- 'packages/plugin-infra/**/*'
|
||||
|
||||
mod:workspace: 'packages/workspace/**/*'
|
||||
|
||||
mod:i18n: 'packages/i18n/**/*'
|
||||
|
||||
24
.github/workflows/add-to-project.yml
vendored
24
.github/workflows/add-to-project.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Add to GitHub projects
|
||||
|
||||
on:
|
||||
issues:
|
||||
types:
|
||||
- opened
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
|
||||
jobs:
|
||||
add-to-project:
|
||||
name: Add issues and pull requests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/add-to-project@v0.4.0
|
||||
with:
|
||||
# You can target a repository in a different organization
|
||||
# to the issue
|
||||
project-url: https://github.com/orgs/toeverything/projects/10
|
||||
github-token: ${{ secrets.ADD_TO_PROJECT_PAT }}
|
||||
# labeled: bug, needs-triage
|
||||
# label-operator: OR
|
||||
94
.github/workflows/build.yml
vendored
94
.github/workflows/build.yml
vendored
@@ -23,6 +23,7 @@ on:
|
||||
env:
|
||||
DEBUG: napi:*
|
||||
APP_NAME: affine
|
||||
COVERAGE: true
|
||||
MACOSX_DEPLOYMENT_TARGET: '10.13'
|
||||
|
||||
jobs:
|
||||
@@ -35,7 +36,12 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- run: yarn lint --max-warnings=0
|
||||
- name: Run checks
|
||||
run: |
|
||||
yarn i18n-codegen gen
|
||||
yarn typecheck
|
||||
yarn lint --max-warnings=0
|
||||
yarn circular
|
||||
|
||||
build-storybook:
|
||||
name: Build Storybook
|
||||
@@ -46,15 +52,15 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- run: yarn build:storybook
|
||||
- run: yarn nx build @affine/storybook
|
||||
- name: Upload storybook artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: storybook
|
||||
path: ./packages/component/storybook-static
|
||||
path: ./packages/storybook/storybook-static
|
||||
if-no-files-found: error
|
||||
|
||||
build:
|
||||
build-web:
|
||||
name: Build @affine/web
|
||||
runs-on: ubuntu-latest
|
||||
environment: development
|
||||
@@ -71,20 +77,15 @@ jobs:
|
||||
key: ${{ runner.os }}-nextjs-${{ hashFiles('**/yarn.lock') }}-${{ hashFiles('**.[jt]s', '**.[jt]sx') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-nextjs-${{ hashFiles('**/yarn.lock') }}-
|
||||
- name: Build
|
||||
run: yarn build
|
||||
- name: Build Web
|
||||
run: yarn nx build @affine/web
|
||||
env:
|
||||
NEXT_PUBLIC_FIREBASE_API_KEY: ${{ secrets.NEXT_PUBLIC_FIREBASE_API_KEY }}
|
||||
NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN: ${{ secrets.NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN }}
|
||||
NEXT_PUBLIC_FIREBASE_PROJECT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_PROJECT_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET: ${{ secrets.NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET }}
|
||||
NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_APP_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_APP_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID }}
|
||||
API_SERVER_PROFILE: local
|
||||
ENABLE_DEBUG_PAGE: true
|
||||
ENABLE_DEBUG_PAGE: 1
|
||||
ENABLE_PLUGIN: true
|
||||
ENABLE_ALL_PAGE_FILTER: true
|
||||
ENABLE_LEGACY_PROVIDER: true
|
||||
COVERAGE: true
|
||||
ENABLE_PRELOADING: false
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
@@ -93,24 +94,18 @@ jobs:
|
||||
path: ./apps/web/.next
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Build @affine/web for desktop
|
||||
run: yarn build
|
||||
- name: Build Web (Desktop)
|
||||
run: yarn nx build @affine/web
|
||||
env:
|
||||
NEXT_PUBLIC_FIREBASE_API_KEY: ${{ secrets.NEXT_PUBLIC_FIREBASE_API_KEY }}
|
||||
NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN: ${{ secrets.NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN }}
|
||||
NEXT_PUBLIC_FIREBASE_PROJECT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_PROJECT_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET: ${{ secrets.NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET }}
|
||||
NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_APP_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_APP_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID }}
|
||||
API_SERVER_PROFILE: affine
|
||||
ENABLE_DEBUG_PAGE: true
|
||||
ENABLE_DEBUG_PAGE: 1
|
||||
ENABLE_PLUGIN: true
|
||||
ENABLE_ALL_PAGE_FILTER: true
|
||||
ENABLE_LEGACY_PROVIDER: false
|
||||
COVERAGE: true
|
||||
ENABLE_PRELOADING: false
|
||||
|
||||
- name: Export static resources
|
||||
run: yarn export
|
||||
working-directory: apps/web
|
||||
run: yarn workspace @affine/web export
|
||||
|
||||
- name: Upload static resources artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
@@ -159,8 +154,7 @@ jobs:
|
||||
env:
|
||||
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
|
||||
- name: Run server tests
|
||||
run: yarn test:coverage
|
||||
working-directory: apps/server
|
||||
run: yarn nx test:coverage @affine/server
|
||||
env:
|
||||
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
|
||||
- name: Upload server test coverage results
|
||||
@@ -187,19 +181,11 @@ jobs:
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: storybook
|
||||
path: ./packages/component/storybook-static
|
||||
path: ./packages/storybook/storybook-static
|
||||
- name: Run storybook tests
|
||||
working-directory: ./packages/component
|
||||
working-directory: ./packages/storybook
|
||||
run: |
|
||||
yarn exec concurrently -k -s first -n "SB,TEST" -c "magenta,blue" "yarn exec serve ./storybook-static -l 6006" "yarn exec wait-on tcp:6006 && yarn test-storybook --coverage"
|
||||
- name: Upload storybook test coverage results
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
files: ./packages/component/coverage/storybook/coverage-storybook.json
|
||||
flags: storybook-test
|
||||
name: affine
|
||||
fail_ci_if_error: true
|
||||
yarn exec concurrently -k -s first -n "SB,TEST" -c "magenta,blue" "yarn exec serve ./storybook-static -l 6006" "yarn exec wait-on tcp:6006 && yarn test"
|
||||
|
||||
e2e-test:
|
||||
name: E2E Test
|
||||
@@ -209,7 +195,7 @@ jobs:
|
||||
matrix:
|
||||
shard: [1, 2, 3, 4]
|
||||
environment: development
|
||||
needs: [build, build-storybook]
|
||||
needs: [build-web, build-storybook]
|
||||
services:
|
||||
octobase:
|
||||
image: ghcr.io/toeverything/cloud-self-hosted:nightly-latest
|
||||
@@ -236,14 +222,14 @@ jobs:
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: storybook
|
||||
path: ./packages/component/storybook-static
|
||||
path: ./packages/storybook/storybook-static
|
||||
|
||||
- name: Wait for Octobase Ready
|
||||
run: |
|
||||
node ./scripts/wait-3000-healthz.mjs
|
||||
|
||||
- name: Run playwright tests
|
||||
run: yarn test --forbid-only --shard=${{ matrix.shard }}/${{ strategy.job-total }}
|
||||
run: yarn e2e --forbid-only --shard=${{ matrix.shard }}/${{ strategy.job-total }}
|
||||
env:
|
||||
COVERAGE: true
|
||||
|
||||
@@ -267,7 +253,7 @@ jobs:
|
||||
path: ./test-results
|
||||
if-no-files-found: ignore
|
||||
|
||||
dekstop-test:
|
||||
desktop-test:
|
||||
name: Desktop Test
|
||||
runs-on: ${{ matrix.spec.os }}
|
||||
environment: development
|
||||
@@ -304,7 +290,7 @@ jobs:
|
||||
target: x86_64-pc-windows-msvc,
|
||||
test: true,
|
||||
}
|
||||
needs: [build]
|
||||
needs: [build-web]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
@@ -318,27 +304,17 @@ jobs:
|
||||
- name: Run unit tests
|
||||
if: ${{ matrix.spec.test }}
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf apps/electron/node_modules/better-sqlite3/build
|
||||
yarn --cwd apps/electron/node_modules/better-sqlite3 run install
|
||||
yarn test:unit
|
||||
run: yarn nx test @affine/monorepo
|
||||
env:
|
||||
NATIVE_TEST: 'true'
|
||||
- name: Build layers
|
||||
run: yarn workspace @affine/electron build-layers
|
||||
|
||||
run: yarn workspace @affine/electron build
|
||||
- name: Download static resource artifact
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: next-js-static
|
||||
path: ./apps/electron/resources/web-static
|
||||
|
||||
- name: Rebuild Electron dependences
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf apps/electron/node_modules/better-sqlite3/build
|
||||
yarn workspace @affine/electron rebuild:for-electron --arch=${{ matrix.spec.arch }}
|
||||
|
||||
- name: Run desktop tests
|
||||
if: ${{ matrix.spec.test && matrix.spec.os == 'ubuntu-latest' }}
|
||||
run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn workspace @affine/electron test
|
||||
@@ -398,7 +374,7 @@ jobs:
|
||||
uses: ./.github/actions/setup-node
|
||||
|
||||
- name: Unit Test
|
||||
run: yarn run test:unit:coverage
|
||||
run: yarn nx test:coverage @affine/monorepo
|
||||
|
||||
- name: Upload unit test coverage results
|
||||
uses: codecov/codecov-action@v3
|
||||
|
||||
8
.github/workflows/languages-sync.yml
vendored
8
.github/workflows/languages-sync.yml
vendored
@@ -13,14 +13,6 @@ on:
|
||||
- '.github/workflows/languages-sync.yml'
|
||||
workflow_dispatch:
|
||||
|
||||
# Cancels all previous workflow runs for pull requests that have not completed.
|
||||
# See https://docs.github.com/en/actions/using-jobs/using-concurrency
|
||||
concurrency:
|
||||
# The concurrency group contains the workflow name and the branch name for
|
||||
# pull requests or the commit hash for any other events.
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
20
.github/workflows/nightly-build.yml
vendored
20
.github/workflows/nightly-build.yml
vendored
@@ -3,7 +3,10 @@ name: Build Canary Desktop App on Staging Branch
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
# 0.6.x-staging
|
||||
- v[0-9]+.[0-9]+.x-staging
|
||||
# 0.6.1-staging
|
||||
- v[0-9]+.[0-9]+.[0-9]+-staging
|
||||
paths-ignore:
|
||||
- README.md
|
||||
- .github/**
|
||||
@@ -50,15 +53,6 @@ jobs:
|
||||
working-directory: apps/electron
|
||||
run: yarn generate-assets
|
||||
env:
|
||||
NEXT_PUBLIC_FIREBASE_API_KEY: ${{ secrets.NEXT_PUBLIC_FIREBASE_API_KEY }}
|
||||
NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN: ${{ secrets.NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN }}
|
||||
NEXT_PUBLIC_FIREBASE_PROJECT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_PROJECT_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET: ${{ secrets.NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET }}
|
||||
NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_APP_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_APP_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID }}
|
||||
AFFINE_GOOGLE_CLIENT_ID: ${{ secrets.AFFINE_GOOGLE_CLIENT_ID }}
|
||||
AFFINE_GOOGLE_CLIENT_SECRET: ${{ secrets.AFFINE_GOOGLE_CLIENT_SECRET }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
NEXT_PUBLIC_SENTRY_DSN: ${{ secrets.NEXT_PUBLIC_SENTRY_DSN }}
|
||||
@@ -66,6 +60,7 @@ jobs:
|
||||
API_SERVER_PROFILE: prod
|
||||
ENABLE_TEST_PROPERTIES: false
|
||||
ENABLE_IMAGE_PREVIEW_MODAL: false
|
||||
ENABLE_BOOKMARK_OPERATION: true
|
||||
RELEASE_VERSION: ${{ needs.set-build-version.outputs.version }}
|
||||
|
||||
- name: Upload Artifact (web-static)
|
||||
@@ -127,14 +122,9 @@ jobs:
|
||||
with:
|
||||
name: before-make-web-static
|
||||
path: apps/electron/resources/web-static
|
||||
- name: Rebuild Electron dependences
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf apps/electron/node_modules/better-sqlite3/build
|
||||
yarn workspace @affine/electron rebuild:for-electron --arch=${{ matrix.spec.arch }}
|
||||
|
||||
- name: Build layers
|
||||
run: yarn workspace @affine/electron build-layers
|
||||
run: yarn workspace @affine/electron build
|
||||
|
||||
- name: Signing By Apple Developer ID
|
||||
if: ${{ matrix.spec.platform == 'darwin' }}
|
||||
|
||||
19
.github/workflows/release-desktop-app.yml
vendored
19
.github/workflows/release-desktop-app.yml
vendored
@@ -51,22 +51,13 @@ jobs:
|
||||
- name: generate-assets
|
||||
run: yarn workspace @affine/electron generate-assets
|
||||
env:
|
||||
NEXT_PUBLIC_FIREBASE_API_KEY: ${{ secrets.NEXT_PUBLIC_FIREBASE_API_KEY }}
|
||||
NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN: ${{ secrets.NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN }}
|
||||
NEXT_PUBLIC_FIREBASE_PROJECT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_PROJECT_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET: ${{ secrets.NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET }}
|
||||
NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_APP_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_APP_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID }}
|
||||
AFFINE_GOOGLE_CLIENT_ID: ${{ secrets.AFFINE_GOOGLE_CLIENT_ID }}
|
||||
AFFINE_GOOGLE_CLIENT_SECRET: ${{ secrets.AFFINE_GOOGLE_CLIENT_SECRET }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
NEXT_PUBLIC_SENTRY_DSN: ${{ secrets.NEXT_PUBLIC_SENTRY_DSN }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
API_SERVER_PROFILE: prod
|
||||
ENABLE_TEST_PROPERTIES: false
|
||||
ENABLE_IMAGE_PREVIEW_MODAL: false
|
||||
ENABLE_BOOKMARK_OPERATION: true
|
||||
RELEASE_VERSION: ${{ github.event.inputs.version }}
|
||||
|
||||
- name: Upload Artifact (web-static)
|
||||
@@ -125,14 +116,8 @@ jobs:
|
||||
name: before-make-web-static
|
||||
path: apps/electron/resources/web-static
|
||||
|
||||
- name: Rebuild Electron dependences
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf apps/electron/node_modules/better-sqlite3/build
|
||||
yarn workspace @affine/electron rebuild:for-electron --arch=${{ matrix.spec.arch }}
|
||||
|
||||
- name: Build layers
|
||||
run: yarn workspace @affine/electron build-layers
|
||||
run: yarn workspace @affine/electron build
|
||||
|
||||
- name: Signing By Apple Developer ID
|
||||
if: ${{ matrix.spec.platform == 'darwin' }}
|
||||
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -28,9 +28,9 @@ node_modules
|
||||
|
||||
# IDE - VSCode
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/settings.template.json
|
||||
!.vscode/launch.template.json
|
||||
!.vscode/extensions.json
|
||||
|
||||
# misc
|
||||
@@ -66,7 +66,11 @@ i18n-generated.ts
|
||||
# Cache
|
||||
.eslintcache
|
||||
next-env.d.ts
|
||||
.rollup.cache
|
||||
|
||||
# Rust
|
||||
target
|
||||
*.node
|
||||
tsconfig.node.tsbuildinfo
|
||||
lib
|
||||
affine.db
|
||||
|
||||
@@ -1 +1,4 @@
|
||||
pnpm-lock.yaml
|
||||
target
|
||||
lib
|
||||
test-results
|
||||
|
||||
@@ -6,6 +6,12 @@
|
||||
"name": "Run Dev",
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
},
|
||||
{
|
||||
"command": "yarn run dev:local",
|
||||
"name": "Run Dev Locally",
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -34,9 +34,11 @@
|
||||
"packages/**/*.spec.tsx",
|
||||
"apps/web/**/*.spec.ts",
|
||||
"apps/web/**/*.spec.tsx",
|
||||
"apps/electron/layers/**/*.spec.ts",
|
||||
"apps/electron/src/**/*.spec.ts",
|
||||
"tests/unit/**/*.spec.ts",
|
||||
"tests/unit/**/*.spec.tsx"
|
||||
],
|
||||
"deepscan.enable": true
|
||||
"rust-analyzer.check.extraEnv": {
|
||||
"DATABASE_URL": "sqlite:affine.db"
|
||||
}
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
@@ -16,4 +16,4 @@ plugins:
|
||||
- path: .yarn/plugins/@yarnpkg/plugin-workspace-tools.cjs
|
||||
spec: '@yarnpkg/plugin-workspace-tools'
|
||||
|
||||
yarnPath: .yarn/releases/yarn-3.5.0.cjs
|
||||
yarnPath: .yarn/releases/yarn-3.6.0.cjs
|
||||
|
||||
1446
Cargo.lock
generated
1446
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,8 @@
|
||||
[workspace]
|
||||
members = ["./packages/native"]
|
||||
members = ["./packages/native", "./packages/native/schema"]
|
||||
|
||||
[profile.dev.package.sqlx-macros]
|
||||
opt-level = 3
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
|
||||
25
README.md
25
README.md
@@ -24,12 +24,13 @@ See https://github.com/all-?/all-contributors/issues/361#issuecomment-637166066
|
||||
|
||||
<!-- ALL-CONTRIBUTORS-BADGE:END -->
|
||||
|
||||
[?style=flat-square&logoColor=white&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAACXBIWXMAAADAAAAAwAEwd99eAAABjElEQVRYhe1W0U3DMBB9RfyTDeoNyAYNG2QDOgJsECYgGxA26AZ4hIxgJqCZ4PjIGV+tUxK7raqiPsmKdXe5e3fOs7IiIlwSdxetfiNw7QRKAD0Ax/ssrI5QgQOw5v03AJOTJHcCL1x84LVmWzJyJlBg7P4BwCvb3pmIAbBPykZEqaulEU7YHNva1HypxUsKqIS9EvbynASs0n3ss+ciUIsuO8VvhL9emjdFBa3YO8XvALwpsZNYSqBB0PwUWgRZNksSL5GhlN0ngGd+dkpsD6AG8IGlslxwTh2fa09EBc3Dir32rRysuQlUAL54/wTAcpePPAXHPsOTGXhSEv69rAlYpZOt6DSO29J4D/TRRLJk6AvtaZSY9PkCFYVLqI9i/NF5YkkECgrXa6P4fVEn4iolrhNxRQqBZu7FqMNdZiMqAUPj2KdGZyicu1dHzlGqBHxn2sdTR53bmeJ+ebJd7LtXhGH4uQEwd0ttAPzMxGi5/6BdxTuMej41Bs59gGP+CU+Cq/4tvxH4HwR+Ab3Uqr/VGbqEAAAAAElFTkSuQmCC>)](https://app.affine.pro)
|
||||
[?style=flat-square&logoColor=white&logo=affine>)](https://app.affine.pro)
|
||||
[](https://affine.pro/download)
|
||||
[](https://affine.pro/download)
|
||||
[](https://affine.pro/download)
|
||||
[](https://affine.pro/download)
|
||||
|
||||
[](https://github.com/toeverything/AFFiNE/releases/latest)
|
||||
[![stars-icon]](https://github.com/toeverything/AFFiNE)
|
||||
[![All Contributors][all-contributors-badge]](#contributors)
|
||||
[![codecov]](https://codecov.io/gh/toeverything/AFFiNE)
|
||||
@@ -44,7 +45,7 @@ See https://github.com/all-?/all-contributors/issues/361#issuecomment-637166066
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
<a href="http://affine.pro"><img src="https://img.shields.io/badge/-AFFiNE-06449d?style=social&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEsAAABLCAMAAAAPkIrYAAAAP1BMVEU8b9w8b9w+b947cNw7b9w6b908b909b9w8b9w7b9w8b9w7cN08b9w7b908b9w7b9w8b907cNw8b9w8b91HcEx3NJCJAAAAFXRSTlP/3QWSgA+lHPlu6Di4XtIrxk/xRADGudUoAAAB9UlEQVR42tWYwbKjIBREG0GJKkRj/v9bZ1ZvRC99rzib11tTB9qqnKoW3/+X38vy7ifzQ1b/wk/8Q1bCv3y6Z6wFh2x2llIRGB6xRhzz6p+wVhRJD1gRZZYHrADYSyqsjFPGZtYbuFESesUysZXlcMnYyJpxTW5keQh5N7G6CUJCE2uHFNfEGiBmbmB1H4jxDawNcqbuPmtAJTtj6RZ0lpIwiR5jNmgfNtHHwLXPWfFYcS2NMdxkjac/dNaNCJPo3yf9pFuseHbDrBsRFguGs8te8Q4rXzTjVSPCIHp3FePKWbzi30xE+4zlBMmoJaGLfpLUmAmLiN4Xyibahy76WZRQMLJ2WX27on2oFvQVac8yi4p+J2forA0V8W1c++AVS1f1H6p9KKLHxk9RWKmsyB+VLC76gV65DLjokdg5KmsEMXsiDwXWSmTc9ezSoKJHoi9zUVihbMHfQOSsXB7Mrz1S1huKPde69sEsiKgNt8hYTjiWlAyENeu7IFe1D15RSEBN+yCiXw17K1RZm/w7UtJVWYN8f1ZyLlkVb2bT4vIVVrINH1dqX2YttkHmIWsfVWs646wcRFYis6fIVGpfYq1kjpGSW8kSRD+xYSmXRM0Ang9eSZioVdy/5pWaLqzIRyIpuVxYozvGf1m67I7pf/s3UXv+AP61NI2Y+BbSAAAAAElFTkSuQmCC" height=25></a>
|
||||
<a href="http://affine.pro"><img src="https://img.shields.io/badge/-AFFiNE-06449d?style=social&logo=affine" height=25></a>
|
||||
|
||||
<a href="https://community.affine.pro"><img src="https://img.shields.io/badge/-Community-424549?style=social&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAAXNJREFUWEftlitLRUEURtdVEVExWUx2qxgNVouoXYtNDP4Tw20WtftAsItZrHaTYBJREZ98MAc248wcZxi4CGfSeezHmm/23kyPAa/egPPTAXQK/FsFBP7ldVDRZoqcgO9I+2bHy3ZIJBfTCPCZM1tqAxwBmzUBrNQNbEx+5b0B5oEN4NCBrAMnMaiUAuPAs3HU82TLEZwBqwGbaJ4UgKQ8CFR6SoEl4LIWwCJwZQCegKkWBWLHVKSActvdzgG3DqitDf3/VQBskBDALrDnAKXUo3ueAF5KinAf2DKOmnzD7l214bdbA6hC1XHZNQa8hSBC0hwDa57xDHDvvvWB7ciOZoE79+8CWPbsBGc769eFxJdWIKcuyIdRoG3W7AAC1dJkHDIOo8B78+4rEBo8r4AkLFk6Jk3HaeDBBTgHVmIAfpJUz+cAFXVBreQCvQYW/lqEjV1NAMUMqpAaxQMHyDnjYtuS+0BxstwaqJooFqxToFPgB5FuPCEB6XK2AAAAAElFTkSuQmCC" height=25></a>
|
||||
|
||||
@@ -69,11 +70,11 @@ See https://github.com/all-?/all-contributors/issues/361#issuecomment-637166066
|
||||
|
||||
Before we tell you how to get started with AFFiNE, we'd like to shamelessly plug our awesome user and developer communities across [official social platforms](https://community.affine.pro/c/start-here/)! Once you’re familiar with using the software, maybe you will share your wisdom with others and even consider joining the [AFFiNE Ambassador program](https://community.affine.pro/c/start-here/affine-ambassador) to help spread AFFiNE to the world.
|
||||
|
||||
## Getting started & Stay tunned with us.
|
||||
## Getting started & staying tuned with us.
|
||||
|
||||
⚠️ Please note that AFFiNE is still under active development and is not yet ready for production use. ⚠️
|
||||
|
||||
[](https://app.affine.pro) No installation or registration required! Head over to our website and try it out now.
|
||||
[](https://app.affine.pro) No installation or registration required! Head over to our website and try it out now.
|
||||
|
||||
[](https://community.affine.pro) Our wonderful community, where you can meet and engage with the team, developers and other like-minded enthusiastic user of AFFiNE.
|
||||
|
||||
@@ -82,7 +83,7 @@ Star us, and you will receive all releases notifications from GitHub without any
|
||||
|
||||
## Features
|
||||
|
||||
- **Hyper merged** — Write, draw and plan all at once. Assemble any blocks you love on any canvas you like to enjoy seamless transitions bewtween workflows with AFFiNE.
|
||||
- **Hyper merged** — Write, draw and plan all at once. Assemble any blocks you love on any canvas you like to enjoy seamless transitions between workflows with AFFiNE.
|
||||
- **Privacy focussed** — AFFiNE is built with your privacy in mind and is one of our key concerns. We want you to keep control of your data, allowing you to store it as you like, where you like while still being able to freely edit and view your data on-demand.
|
||||
- **Offline-first** - With your privacy in mind we also decided to go offline-first. This means that AFFiNE can be used offline, whether you want to view or edit, with support for conflict-free merging when you are back online.
|
||||
- **Clean, intuitive design** — With AFFiNE you can concentrate on editing with a clean and modern interface. Which is responsive, so it looks great on tablets too, and mobile support is coming in the future.
|
||||
@@ -119,6 +120,15 @@ If you have questions, you are welcome to contact us. One of the best places to
|
||||
| [@toeverything/y-indexeddb](packages/y-indexeddb) | IndexedDB database adapter for Yjs | [](https://www.npmjs.com/package/@toeverything/y-indexeddb) |
|
||||
| [@toeverything/theme](packages/theme) | AFFiNE theme | [](https://www.npmjs.com/package/@toeverything/theme) |
|
||||
|
||||
## Plugins
|
||||
|
||||
> Plugins are a way to extend the functionality of AFFiNE.
|
||||
|
||||
| Name | |
|
||||
| ------------------------------------------------ | ----------------------------------------- |
|
||||
| [@affine/bookmark-block](plugins/bookmark-block) | A block for bookmarking a website |
|
||||
| [@affine/copilot](plugins/copilot) | AI Copilot that help you document writing |
|
||||
|
||||
## Thanks
|
||||
|
||||
We would also like to give thanks to open-source projects that make AFFiNE possible:
|
||||
@@ -126,11 +136,12 @@ We would also like to give thanks to open-source projects that make AFFiNE possi
|
||||
- [BlockSuite](https://github.com/toeverything/BlockSuite) - 💠 BlockSuite is the open-source collaborative editor project behind AFFiNE.
|
||||
- [OctoBase](https://github.com/toeverything/OctoBase) - 🐙 OctoBase is the open-source database behind AFFiNE, local-first, yet collaborative. A light-weight, scalable, data engine written in Rust.
|
||||
- [Yjs](https://github.com/yjs/yjs) & [Yrs](https://github.com/y-crdt/y-crdt) - Fundamental support of CRDTs for our implementation on state management and data sync.
|
||||
- [Next.js](https://github.com/vercel/next.js) - The React Framework.
|
||||
- [Electron](https://github.com/electron/electron) - Build cross-platform desktop apps with JavaScript, HTML, and CSS.
|
||||
- [React](https://github.com/facebook/react) - View layer support and web GUI framework.
|
||||
- [Rust](https://github.com/rust-lang/rust) - High performance language that extends the ability and availability of our real-time backend, OctoBase.
|
||||
- [Jotai](https://github.com/pmndrs/jotai) - Primitive and flexible state management for React.
|
||||
- [MUI](https://github.com/mui/material-ui) - Our most used graphic UI component library.
|
||||
- [async-call-rpc](https://github.com/Jack-Works/async-call-rpc) - A lightweight JSON RPC client & server.
|
||||
- Other upstream [dependencies](https://github.com/toeverything/AFFiNE/network/dependencies).
|
||||
|
||||
Thanks a lot to the community for providing such powerful and simple libraries, so that we can focus more on the implementation of the product logic, and we hope that in the future our projects will also provide a more easy-to-use knowledge base for everyone.
|
||||
@@ -140,7 +151,7 @@ Thanks a lot to the community for providing such powerful and simple libraries,
|
||||
We would like to express our gratitude to all the individuals who have already contributed to AFFiNE! If you have any AFFiNE-related project, documentation, tool or template, please feel free to contribute it by submitting a pull request to our curated list on GitHub: [awesome-affine](https://github.com/toeverything/awesome-affine).
|
||||
|
||||
<a href="https://github.com/toeverything/affine/graphs/contributors">
|
||||
<img src="https://user-images.githubusercontent.com/5910926/237263745-36bb975d-83d6-4a7c-a152-d9ad020e5023.png" />
|
||||
<img alt="contributors" src="https://opencollective.com/affine/contributors.svg?width=890&button=false" />
|
||||
</a>
|
||||
|
||||
## Self-Host
|
||||
|
||||
1
apps/electron/.gitignore
vendored
1
apps/electron/.gitignore
vendored
@@ -1,5 +1,6 @@
|
||||
*.autogen.*
|
||||
dist
|
||||
e2e-dist-*
|
||||
|
||||
resources/web-static
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ To run AFFiNE Desktop Client Application locally, run the following commands:
|
||||
```sh
|
||||
# in repo root
|
||||
yarn install
|
||||
yarn workspace @affine/native build
|
||||
yarn dev
|
||||
|
||||
# in apps/electron
|
||||
@@ -16,22 +17,6 @@ yarn dev # or yarn prod for production build
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### better-sqlite3 error
|
||||
|
||||
When running tests or starting electron, you may encounter the following error:
|
||||
|
||||
> Error: The module 'apps/electron/node_modules/better-sqlite3/build/Release/better_sqlite3.node'
|
||||
|
||||
This is due to the fact that the `better-sqlite3` package is built for the Node.js version in Electron & in your machine. To fix this, run the following command based on different cases:
|
||||
|
||||
```sh
|
||||
# for running unit tests, we are not using Electron's node:
|
||||
yarn rebuild better-sqlite3
|
||||
|
||||
# for running Electron, we are using Electron's node:
|
||||
yarn postinstall
|
||||
```
|
||||
|
||||
## Credits
|
||||
|
||||
Most of the boilerplate code is generously borrowed from the following
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
|
||||
const { z } = require('zod');
|
||||
|
||||
const {
|
||||
@@ -51,8 +52,6 @@ module.exports = {
|
||||
teamId: process.env.APPLE_TEAM_ID,
|
||||
}
|
||||
: undefined,
|
||||
// do we need the following line?
|
||||
extraResource: ['./resources/app-update.yml'],
|
||||
},
|
||||
makers: [
|
||||
{
|
||||
@@ -94,7 +93,7 @@ module.exports = {
|
||||
config: {
|
||||
name: 'AFFiNE',
|
||||
setupIcon: icoPath,
|
||||
// loadingGif: './resources/icons/loading.gif',
|
||||
loadingGif: './resources/icons/affine_installing.gif',
|
||||
},
|
||||
},
|
||||
],
|
||||
@@ -104,6 +103,27 @@ module.exports = {
|
||||
// so stable and canary will not share the same app data
|
||||
packageJson.productName = productName;
|
||||
},
|
||||
prePackage: async () => {
|
||||
const { rm, cp } = require('node:fs/promises');
|
||||
const { resolve } = require('node:path');
|
||||
|
||||
await rm(
|
||||
resolve(__dirname, './node_modules/@toeverything/plugin-infra'),
|
||||
{
|
||||
recursive: true,
|
||||
force: true,
|
||||
}
|
||||
);
|
||||
|
||||
await cp(
|
||||
resolve(__dirname, '../../packages/plugin-infra'),
|
||||
resolve(__dirname, './node_modules/@toeverything/plugin-infra'),
|
||||
{
|
||||
recursive: true,
|
||||
force: true,
|
||||
}
|
||||
);
|
||||
},
|
||||
generateAssets: async (_, platform, arch) => {
|
||||
if (process.env.SKIP_GENERATE_ASSETS) {
|
||||
return;
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/consistent-type-imports */
|
||||
// This file contains the main process events
|
||||
// It will guide preload and main process on the correct event types and payloads
|
||||
|
||||
export type MainIPCHandlerMap = typeof import('./main/src/exposed').handlers;
|
||||
|
||||
export type MainIPCEventMap = typeof import('./main/src/exposed').events;
|
||||
@@ -1,12 +0,0 @@
|
||||
import { app } from 'electron';
|
||||
|
||||
export const appContext = {
|
||||
get appName() {
|
||||
return app.name;
|
||||
},
|
||||
get appDataPath() {
|
||||
return app.getPath('sessionData');
|
||||
},
|
||||
};
|
||||
|
||||
export type AppContext = typeof appContext;
|
||||
@@ -1,38 +0,0 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
import type { MainEventListener } from './type';
|
||||
|
||||
interface DBFilePathMeta {
|
||||
workspaceId: string;
|
||||
path: string;
|
||||
realPath: string;
|
||||
}
|
||||
|
||||
export const dbSubjects = {
|
||||
// emit workspace ids
|
||||
dbFileMissing: new Subject<string>(),
|
||||
// emit workspace ids
|
||||
dbFileUpdate: new Subject<string>(),
|
||||
dbFilePathChange: new Subject<DBFilePathMeta>(),
|
||||
};
|
||||
|
||||
export const dbEvents = {
|
||||
onDBFileMissing: (fn: (workspaceId: string) => void) => {
|
||||
const sub = dbSubjects.dbFileMissing.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
onDBFileUpdate: (fn: (workspaceId: string) => void) => {
|
||||
const sub = dbSubjects.dbFileUpdate.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
onDBFilePathChange: (fn: (meta: DBFilePathMeta) => void) => {
|
||||
const sub = dbSubjects.dbFilePathChange.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
@@ -1,9 +0,0 @@
|
||||
export * from './register';
|
||||
|
||||
import { applicationMenuSubjects } from './application-menu';
|
||||
import { dbSubjects } from './db';
|
||||
|
||||
export const subjects = {
|
||||
db: dbSubjects,
|
||||
applicationMenu: applicationMenuSubjects,
|
||||
};
|
||||
@@ -1 +0,0 @@
|
||||
export type MainEventListener = (...args: any[]) => () => void;
|
||||
@@ -1,5 +0,0 @@
|
||||
import { allEvents as events } from './events';
|
||||
import { allHandlers as handlers } from './handlers';
|
||||
|
||||
// this will be used by preload script to expose all handlers and events to the renderer process
|
||||
export { events, handlers };
|
||||
@@ -1,501 +0,0 @@
|
||||
import assert from 'node:assert';
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import type { Subscription } from 'rxjs';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { MainIPCHandlerMap } from '../../../../constraints';
|
||||
|
||||
const registeredHandlers = new Map<
|
||||
string,
|
||||
((...args: any[]) => Promise<any>)[]
|
||||
>();
|
||||
|
||||
const delay = (ms: number) => new Promise(r => setTimeout(r, ms));
|
||||
|
||||
type WithoutFirstParameter<T> = T extends (_: any, ...args: infer P) => infer R
|
||||
? (...args: P) => R
|
||||
: T;
|
||||
|
||||
// common mock dispatcher for ipcMain.handle AND app.on
|
||||
// alternatively, we can use single parameter for T & F, eg, dispatch('workspace:list'),
|
||||
// however this is too hard to be typed correctly
|
||||
async function dispatch<
|
||||
T extends keyof MainIPCHandlerMap,
|
||||
F extends keyof MainIPCHandlerMap[T]
|
||||
>(
|
||||
namespace: T,
|
||||
functionName: F,
|
||||
// @ts-ignore
|
||||
...args: Parameters<WithoutFirstParameter<MainIPCHandlerMap[T][F]>>
|
||||
): // @ts-ignore
|
||||
ReturnType<MainIPCHandlerMap[T][F]> {
|
||||
// @ts-ignore
|
||||
const handlers = registeredHandlers.get(namespace + ':' + functionName);
|
||||
assert(handlers);
|
||||
|
||||
// we only care about the first handler here
|
||||
return await handlers[0](null, ...args);
|
||||
}
|
||||
|
||||
const SESSION_DATA_PATH = path.join(__dirname, './tmp', 'affine-test');
|
||||
|
||||
const browserWindow = {
|
||||
isDestroyed: () => {
|
||||
return false;
|
||||
},
|
||||
setWindowButtonVisibility: (_v: boolean) => {
|
||||
// will be stubbed later
|
||||
},
|
||||
webContents: {
|
||||
send: (_type: string, ..._args: any[]) => {
|
||||
// will be stubbed later
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const ipcMain = {
|
||||
handle: (key: string, callback: (...args: any[]) => Promise<any>) => {
|
||||
const handlers = registeredHandlers.get(key) || [];
|
||||
handlers.push(callback);
|
||||
registeredHandlers.set(key, handlers);
|
||||
},
|
||||
setMaxListeners: (_n: number) => {
|
||||
// noop
|
||||
},
|
||||
};
|
||||
|
||||
const nativeTheme = {
|
||||
themeSource: 'light',
|
||||
};
|
||||
|
||||
function compareBuffer(a: Uint8Array | null, b: Uint8Array | null) {
|
||||
if (
|
||||
(a === null && b === null) ||
|
||||
a === null ||
|
||||
b === null ||
|
||||
a.length !== b.length
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
if (a[i] !== b[i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
const electronModule = {
|
||||
app: {
|
||||
getPath: (name: string) => {
|
||||
assert(name === 'sessionData');
|
||||
return SESSION_DATA_PATH;
|
||||
},
|
||||
name: 'affine-test',
|
||||
on: (name: string, callback: (...args: any[]) => any) => {
|
||||
const handlers = registeredHandlers.get(name) || [];
|
||||
handlers.push(callback);
|
||||
registeredHandlers.set(name, handlers);
|
||||
},
|
||||
addEventListener: (...args: any[]) => {
|
||||
// @ts-ignore
|
||||
electronModule.app.on(...args);
|
||||
},
|
||||
removeEventListener: () => {},
|
||||
},
|
||||
BrowserWindow: {
|
||||
getAllWindows: () => {
|
||||
return [browserWindow];
|
||||
},
|
||||
},
|
||||
nativeTheme: nativeTheme,
|
||||
ipcMain,
|
||||
shell: {} as Partial<Electron.Shell>,
|
||||
dialog: {} as Partial<Electron.Dialog>,
|
||||
};
|
||||
|
||||
// dynamically import handlers so that we can inject local variables to mocks
|
||||
vi.doMock('electron', () => {
|
||||
return electronModule;
|
||||
});
|
||||
|
||||
let connectableSubscription: Subscription;
|
||||
|
||||
beforeEach(async () => {
|
||||
const { registerHandlers } = await import('../register');
|
||||
registerHandlers();
|
||||
|
||||
// should also register events
|
||||
const { registerEvents } = await import('../../events');
|
||||
registerEvents();
|
||||
await fs.mkdirp(SESSION_DATA_PATH);
|
||||
const { database$ } = await import('../db/ensure-db');
|
||||
|
||||
connectableSubscription = database$.connect();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// reset registered handlers
|
||||
registeredHandlers.get('before-quit')?.forEach(fn => fn());
|
||||
|
||||
connectableSubscription.unsubscribe();
|
||||
|
||||
await fs.remove(SESSION_DATA_PATH);
|
||||
});
|
||||
|
||||
describe('ensureSQLiteDB', () => {
|
||||
test('should create db file on connection if it does not exist', async () => {
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const workspaceDB = await ensureSQLiteDB(id);
|
||||
const file = workspaceDB.path;
|
||||
const fileExists = await fs.pathExists(file);
|
||||
expect(fileExists).toBe(true);
|
||||
});
|
||||
|
||||
test('when db file is removed', async () => {
|
||||
// stub webContents.send
|
||||
const sendSpy = vi.spyOn(browserWindow.webContents, 'send');
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
let workspaceDB = await ensureSQLiteDB(id);
|
||||
const file = workspaceDB.path;
|
||||
const fileExists = await fs.pathExists(file);
|
||||
expect(fileExists).toBe(true);
|
||||
|
||||
// Can't remove file on Windows, because the sqlite is still holding the file handle
|
||||
if (process.platform === 'win32') {
|
||||
return;
|
||||
}
|
||||
|
||||
await fs.remove(file);
|
||||
|
||||
// wait for 2000ms for file watcher to detect file removal
|
||||
await delay(2000);
|
||||
|
||||
expect(sendSpy).toBeCalledWith('db:onDBFileMissing', id);
|
||||
|
||||
// ensureSQLiteDB should recreate the db file
|
||||
workspaceDB = await ensureSQLiteDB(id);
|
||||
const fileExists2 = await fs.pathExists(file);
|
||||
expect(fileExists2).toBe(true);
|
||||
sendSpy.mockRestore();
|
||||
});
|
||||
|
||||
test('when db file is updated', async () => {
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const { dbSubjects } = await import('../../events/db');
|
||||
const workspaceDB = await ensureSQLiteDB(id);
|
||||
const file = workspaceDB.path;
|
||||
const fileExists = await fs.pathExists(file);
|
||||
expect(fileExists).toBe(true);
|
||||
const dbUpdateSpy = vi.spyOn(dbSubjects.dbFileUpdate, 'next');
|
||||
await delay(100);
|
||||
// writes some data to the db file
|
||||
await fs.appendFile(file, 'random-data', { encoding: 'binary' });
|
||||
// write again
|
||||
await fs.appendFile(file, 'random-data', { encoding: 'binary' });
|
||||
|
||||
// wait for 2000ms for file watcher to detect file change
|
||||
await delay(2000);
|
||||
|
||||
expect(dbUpdateSpy).toBeCalledWith(id);
|
||||
dbUpdateSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('workspace handlers', () => {
|
||||
test('list all workspace ids', async () => {
|
||||
const ids = [v4(), v4()];
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await Promise.all(ids.map(id => ensureSQLiteDB(id)));
|
||||
const list = await dispatch('workspace', 'list');
|
||||
expect(list.map(([id]) => id).sort()).toEqual(ids.sort());
|
||||
});
|
||||
|
||||
test('delete workspace', async () => {
|
||||
// @TODO dispatch is hanging on Windows
|
||||
if (process.platform === 'win32') {
|
||||
return;
|
||||
}
|
||||
const ids = [v4(), v4()];
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await Promise.all(ids.map(id => ensureSQLiteDB(id)));
|
||||
await dispatch('workspace', 'delete', ids[1]);
|
||||
const list = await dispatch('workspace', 'list');
|
||||
expect(list.map(([id]) => id)).toEqual([ids[0]]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('UI handlers', () => {
|
||||
test('theme-change', async () => {
|
||||
await dispatch('ui', 'handleThemeChange', 'dark');
|
||||
expect(nativeTheme.themeSource).toBe('dark');
|
||||
await dispatch('ui', 'handleThemeChange', 'light');
|
||||
expect(nativeTheme.themeSource).toBe('light');
|
||||
});
|
||||
|
||||
test('sidebar-visibility-change (macOS)', async () => {
|
||||
vi.stubGlobal('process', { platform: 'darwin' });
|
||||
const setWindowButtonVisibility = vi.fn();
|
||||
browserWindow.setWindowButtonVisibility = setWindowButtonVisibility;
|
||||
await dispatch('ui', 'handleSidebarVisibilityChange', true);
|
||||
expect(setWindowButtonVisibility).toBeCalledWith(true);
|
||||
await dispatch('ui', 'handleSidebarVisibilityChange', false);
|
||||
expect(setWindowButtonVisibility).toBeCalledWith(false);
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
|
||||
test('sidebar-visibility-change (non-macOS)', async () => {
|
||||
vi.stubGlobal('process', { platform: 'linux' });
|
||||
const setWindowButtonVisibility = vi.fn();
|
||||
browserWindow.setWindowButtonVisibility = setWindowButtonVisibility;
|
||||
await dispatch('ui', 'handleSidebarVisibilityChange', true);
|
||||
expect(setWindowButtonVisibility).not.toBeCalled();
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
});
|
||||
|
||||
describe('db handlers', () => {
|
||||
test('apply doc and get doc updates', async () => {
|
||||
const workspaceId = v4();
|
||||
const bin = await dispatch('db', 'getDocAsUpdates', workspaceId);
|
||||
// ? is this a good test?
|
||||
expect(bin.every((byte: number) => byte === 0)).toBe(true);
|
||||
|
||||
const ydoc = new Y.Doc();
|
||||
const ytext = ydoc.getText('test');
|
||||
ytext.insert(0, 'hello world');
|
||||
const bin2 = Y.encodeStateAsUpdate(ydoc);
|
||||
|
||||
await dispatch('db', 'applyDocUpdate', workspaceId, bin2);
|
||||
|
||||
const bin3 = await dispatch('db', 'getDocAsUpdates', workspaceId);
|
||||
const ydoc2 = new Y.Doc();
|
||||
Y.applyUpdate(ydoc2, bin3);
|
||||
const ytext2 = ydoc2.getText('test');
|
||||
expect(ytext2.toString()).toBe('hello world');
|
||||
});
|
||||
|
||||
test('get non existent blob', async () => {
|
||||
const workspaceId = v4();
|
||||
const bin = await dispatch('db', 'getBlob', workspaceId, 'non-existent-id');
|
||||
expect(bin).toBeNull();
|
||||
});
|
||||
|
||||
test('list blobs (empty)', async () => {
|
||||
const workspaceId = v4();
|
||||
const list = await dispatch('db', 'getPersistedBlobs', workspaceId);
|
||||
expect(list).toEqual([]);
|
||||
});
|
||||
|
||||
test('CRUD blobs', async () => {
|
||||
const testBin = new Uint8Array([1, 2, 3, 4, 5]);
|
||||
const testBin2 = new Uint8Array([6, 7, 8, 9, 10]);
|
||||
const workspaceId = 'test-workspace-id';
|
||||
|
||||
// add blob
|
||||
await dispatch('db', 'addBlob', workspaceId, 'testBin', testBin);
|
||||
|
||||
// get blob
|
||||
expect(
|
||||
compareBuffer(
|
||||
await dispatch('db', 'getBlob', workspaceId, 'testBin'),
|
||||
testBin
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// add another blob
|
||||
await dispatch('db', 'addBlob', workspaceId, 'testBin2', testBin2);
|
||||
expect(
|
||||
compareBuffer(
|
||||
await dispatch('db', 'getBlob', workspaceId, 'testBin2'),
|
||||
testBin2
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// list blobs
|
||||
let lists = await dispatch('db', 'getPersistedBlobs', workspaceId);
|
||||
expect(lists).toHaveLength(2);
|
||||
expect(lists).toContain('testBin');
|
||||
expect(lists).toContain('testBin2');
|
||||
|
||||
// delete blob
|
||||
await dispatch('db', 'deleteBlob', workspaceId, 'testBin');
|
||||
lists = await dispatch('db', 'getPersistedBlobs', workspaceId);
|
||||
expect(lists).toEqual(['testBin2']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('dialog handlers', () => {
|
||||
test('revealDBFile', async () => {
|
||||
const mockShowItemInFolder = vi.fn();
|
||||
electronModule.shell.showItemInFolder = mockShowItemInFolder;
|
||||
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const db = await ensureSQLiteDB(id);
|
||||
|
||||
await dispatch('dialog', 'revealDBFile', id);
|
||||
expect(mockShowItemInFolder).toBeCalledWith(db.path);
|
||||
});
|
||||
|
||||
test('saveDBFileAs (skipped)', async () => {
|
||||
const mockShowSaveDialog = vi.fn(() => {
|
||||
return { filePath: undefined };
|
||||
}) as any;
|
||||
const mockShowItemInFolder = vi.fn();
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
electronModule.shell.showItemInFolder = mockShowItemInFolder;
|
||||
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
|
||||
await dispatch('dialog', 'saveDBFileAs', id);
|
||||
expect(mockShowSaveDialog).toBeCalled();
|
||||
expect(mockShowItemInFolder).not.toBeCalled();
|
||||
electronModule.dialog = {};
|
||||
electronModule.shell = {};
|
||||
});
|
||||
|
||||
test('saveDBFileAs', async () => {
|
||||
const newSavedPath = path.join(SESSION_DATA_PATH, 'saved-to');
|
||||
const mockShowSaveDialog = vi.fn(() => {
|
||||
return { filePath: newSavedPath };
|
||||
}) as any;
|
||||
const mockShowItemInFolder = vi.fn();
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
electronModule.shell.showItemInFolder = mockShowItemInFolder;
|
||||
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
|
||||
await dispatch('dialog', 'saveDBFileAs', id);
|
||||
expect(mockShowSaveDialog).toBeCalled();
|
||||
expect(mockShowItemInFolder).toBeCalledWith(newSavedPath);
|
||||
|
||||
// check if file is saved to new path
|
||||
expect(await fs.exists(newSavedPath)).toBe(true);
|
||||
});
|
||||
|
||||
test('loadDBFile (skipped)', async () => {
|
||||
const mockShowOpenDialog = vi.fn(() => {
|
||||
return { filePaths: undefined };
|
||||
}) as any;
|
||||
electronModule.dialog.showOpenDialog = mockShowOpenDialog;
|
||||
|
||||
const res = await dispatch('dialog', 'loadDBFile');
|
||||
expect(mockShowOpenDialog).toBeCalled();
|
||||
expect(res.canceled).toBe(true);
|
||||
});
|
||||
|
||||
test('loadDBFile (error, in app-data)', async () => {
|
||||
const mockShowOpenDialog = vi.fn(() => {
|
||||
return {
|
||||
filePaths: [path.join(SESSION_DATA_PATH, 'workspaces')],
|
||||
};
|
||||
}) as any;
|
||||
electronModule.dialog.showOpenDialog = mockShowOpenDialog;
|
||||
|
||||
const res = await dispatch('dialog', 'loadDBFile');
|
||||
expect(mockShowOpenDialog).toBeCalled();
|
||||
expect(res.error).toBe('DB_FILE_PATH_INVALID');
|
||||
});
|
||||
|
||||
test('loadDBFile (error, not a valid db file)', async () => {
|
||||
// create a random db file
|
||||
const basePath = path.join(SESSION_DATA_PATH, 'random-path');
|
||||
const dbPath = path.join(basePath, 'xxx.db');
|
||||
await fs.ensureDir(basePath);
|
||||
await fs.writeFile(dbPath, 'hello world');
|
||||
|
||||
const mockShowOpenDialog = vi.fn(() => {
|
||||
return { filePaths: [dbPath] };
|
||||
}) as any;
|
||||
electronModule.dialog.showOpenDialog = mockShowOpenDialog;
|
||||
|
||||
const res = await dispatch('dialog', 'loadDBFile');
|
||||
expect(mockShowOpenDialog).toBeCalled();
|
||||
expect(res.error).toBe('DB_FILE_INVALID');
|
||||
|
||||
electronModule.dialog = {};
|
||||
});
|
||||
|
||||
test('loadDBFile', async () => {
|
||||
// we use ensureSQLiteDB to create a valid db file
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const db = await ensureSQLiteDB(id);
|
||||
|
||||
// copy db file to dbPath
|
||||
const basePath = path.join(SESSION_DATA_PATH, 'random-path');
|
||||
const originDBFilePath = path.join(basePath, 'xxx.db');
|
||||
await fs.ensureDir(basePath);
|
||||
await fs.copyFile(db.path, originDBFilePath);
|
||||
|
||||
// on Windows, we skip this test because we can't delete the db file
|
||||
if (process.platform === 'win32') {
|
||||
return;
|
||||
}
|
||||
|
||||
// remove db
|
||||
await fs.remove(db.path);
|
||||
|
||||
// try load originDBFilePath
|
||||
const mockShowOpenDialog = vi.fn(() => {
|
||||
return { filePaths: [originDBFilePath] };
|
||||
}) as any;
|
||||
electronModule.dialog.showOpenDialog = mockShowOpenDialog;
|
||||
|
||||
const res = await dispatch('dialog', 'loadDBFile');
|
||||
expect(mockShowOpenDialog).toBeCalled();
|
||||
expect(res.workspaceId).not.toBeUndefined();
|
||||
|
||||
const importedDb = await ensureSQLiteDB(res.workspaceId!);
|
||||
expect(await fs.realpath(importedDb.path)).toBe(originDBFilePath);
|
||||
expect(importedDb.path).not.toBe(originDBFilePath);
|
||||
|
||||
// try load it again, will trigger error (db file already loaded)
|
||||
const res2 = await dispatch('dialog', 'loadDBFile');
|
||||
expect(res2.error).toBe('DB_FILE_ALREADY_LOADED');
|
||||
});
|
||||
|
||||
test('moveDBFile', async () => {
|
||||
const newPath = path.join(SESSION_DATA_PATH, 'xxx');
|
||||
const mockShowSaveDialog = vi.fn(() => {
|
||||
return { filePath: newPath };
|
||||
}) as any;
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
const res = await dispatch('dialog', 'moveDBFile', id);
|
||||
expect(mockShowSaveDialog).toBeCalled();
|
||||
expect(res.filePath).toBe(newPath);
|
||||
electronModule.dialog = {};
|
||||
});
|
||||
|
||||
test('moveDBFile (skipped)', async () => {
|
||||
const mockShowSaveDialog = vi.fn(() => {
|
||||
return { filePath: null };
|
||||
}) as any;
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
|
||||
const res = await dispatch('dialog', 'moveDBFile', id);
|
||||
expect(mockShowSaveDialog).toBeCalled();
|
||||
expect(res.filePath).toBe(undefined);
|
||||
electronModule.dialog = {};
|
||||
});
|
||||
});
|
||||
@@ -1,160 +0,0 @@
|
||||
import type { NotifyEvent } from '@affine/native/event';
|
||||
import { createFSWatcher } from '@affine/native/fs-watcher';
|
||||
import { app } from 'electron';
|
||||
import {
|
||||
connectable,
|
||||
defer,
|
||||
from,
|
||||
fromEvent,
|
||||
identity,
|
||||
lastValueFrom,
|
||||
Observable,
|
||||
ReplaySubject,
|
||||
Subject,
|
||||
} from 'rxjs';
|
||||
import {
|
||||
debounceTime,
|
||||
exhaustMap,
|
||||
filter,
|
||||
groupBy,
|
||||
ignoreElements,
|
||||
mergeMap,
|
||||
shareReplay,
|
||||
startWith,
|
||||
switchMap,
|
||||
take,
|
||||
takeUntil,
|
||||
tap,
|
||||
} from 'rxjs/operators';
|
||||
|
||||
import { appContext } from '../../context';
|
||||
import { subjects } from '../../events';
|
||||
import { logger } from '../../logger';
|
||||
import { ts } from '../../utils';
|
||||
import type { WorkspaceSQLiteDB } from './sqlite';
|
||||
import { openWorkspaceDatabase } from './sqlite';
|
||||
|
||||
const databaseInput$ = new Subject<string>();
|
||||
export const databaseConnector$ = new ReplaySubject<WorkspaceSQLiteDB>();
|
||||
|
||||
const groupedDatabaseInput$ = databaseInput$.pipe(groupBy(identity));
|
||||
|
||||
export const database$ = connectable(
|
||||
groupedDatabaseInput$.pipe(
|
||||
mergeMap(workspaceDatabase$ =>
|
||||
workspaceDatabase$.pipe(
|
||||
// only open the first db with the same workspaceId, and emit it to the downstream
|
||||
exhaustMap(workspaceId => {
|
||||
logger.info('[ensureSQLiteDB] open db connection', workspaceId);
|
||||
return from(openWorkspaceDatabase(appContext, workspaceId)).pipe(
|
||||
switchMap(db => {
|
||||
return startWatchingDBFile(db).pipe(
|
||||
// ignore all events and only emit the db to the downstream
|
||||
ignoreElements(),
|
||||
startWith(db)
|
||||
);
|
||||
})
|
||||
);
|
||||
}),
|
||||
shareReplay(1)
|
||||
)
|
||||
),
|
||||
tap({
|
||||
complete: () => {
|
||||
logger.info('[FSWatcher] close all watchers');
|
||||
createFSWatcher().close();
|
||||
},
|
||||
})
|
||||
),
|
||||
{
|
||||
connector: () => databaseConnector$,
|
||||
resetOnDisconnect: true,
|
||||
}
|
||||
);
|
||||
|
||||
export const databaseConnectableSubscription = database$.connect();
|
||||
|
||||
// 1. File delete
|
||||
// 2. File move
|
||||
// - on Linux, it's `type: { modify: { kind: 'rename', mode: 'from' } }`
|
||||
// - on Windows, it's `type: { remove: { kind: 'any' } }`
|
||||
// - on macOS, it's `type: { modify: { kind: 'rename', mode: 'any' } }`
|
||||
export function isRemoveOrMoveEvent(event: NotifyEvent) {
|
||||
return (
|
||||
typeof event.type === 'object' &&
|
||||
('remove' in event.type ||
|
||||
('modify' in event.type &&
|
||||
event.type.modify.kind === 'rename' &&
|
||||
(event.type.modify.mode === 'from' ||
|
||||
event.type.modify.mode === 'any')))
|
||||
);
|
||||
}
|
||||
|
||||
// if we removed the file, we will stop watching it
|
||||
function startWatchingDBFile(db: WorkspaceSQLiteDB) {
|
||||
const FSWatcher = createFSWatcher();
|
||||
return new Observable<NotifyEvent>(subscriber => {
|
||||
logger.info('[FSWatcher] start watching db file', db.workspaceId);
|
||||
const subscription = FSWatcher.watch(db.path, {
|
||||
recursive: false,
|
||||
}).subscribe(
|
||||
event => {
|
||||
logger.info('[FSWatcher]', event);
|
||||
subscriber.next(event);
|
||||
// remove file or move file, complete the observable and close db
|
||||
if (isRemoveOrMoveEvent(event)) {
|
||||
subscriber.complete();
|
||||
}
|
||||
},
|
||||
err => {
|
||||
subscriber.error(err);
|
||||
}
|
||||
);
|
||||
return () => {
|
||||
// destroy on unsubscribe
|
||||
logger.info('[FSWatcher] cleanup db file watcher', db.workspaceId);
|
||||
db.destroy();
|
||||
subscription.unsubscribe();
|
||||
};
|
||||
}).pipe(
|
||||
debounceTime(1000),
|
||||
filter(event => !isRemoveOrMoveEvent(event)),
|
||||
tap({
|
||||
next: () => {
|
||||
logger.info(
|
||||
'[FSWatcher] db file changed on disk',
|
||||
db.workspaceId,
|
||||
ts() - db.lastUpdateTime,
|
||||
'ms'
|
||||
);
|
||||
db.reconnectDB();
|
||||
subjects.db.dbFileUpdate.next(db.workspaceId);
|
||||
},
|
||||
complete: () => {
|
||||
// todo: there is still a possibility that the file is deleted
|
||||
// but we didn't get the event soon enough and another event tries to
|
||||
// access the db
|
||||
logger.info('[FSWatcher] db file missing', db.workspaceId);
|
||||
subjects.db.dbFileMissing.next(db.workspaceId);
|
||||
db.destroy();
|
||||
},
|
||||
}),
|
||||
takeUntil(defer(() => fromEvent(app, 'before-quit')))
|
||||
);
|
||||
}
|
||||
|
||||
export function ensureSQLiteDB(id: string) {
|
||||
const deferValue = lastValueFrom(
|
||||
database$.pipe(
|
||||
filter(db => db.workspaceId === id && db.db.open),
|
||||
take(1),
|
||||
tap({
|
||||
error: err => {
|
||||
logger.error('[ensureSQLiteDB] error', err);
|
||||
},
|
||||
})
|
||||
)
|
||||
);
|
||||
databaseInput$.next(id);
|
||||
return deferValue;
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import { appContext } from '../../context';
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { ensureSQLiteDB } from './ensure-db';
|
||||
|
||||
export const dbHandlers = {
|
||||
getDocAsUpdates: async (_, id: string) => {
|
||||
const workspaceDB = await ensureSQLiteDB(id);
|
||||
return workspaceDB.getDocAsUpdates();
|
||||
},
|
||||
applyDocUpdate: async (_, id: string, update: Uint8Array) => {
|
||||
const workspaceDB = await ensureSQLiteDB(id);
|
||||
return workspaceDB.applyUpdate(update);
|
||||
},
|
||||
addBlob: async (_, workspaceId: string, key: string, data: Uint8Array) => {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return workspaceDB.addBlob(key, data);
|
||||
},
|
||||
getBlob: async (_, workspaceId: string, key: string) => {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return workspaceDB.getBlob(key);
|
||||
},
|
||||
deleteBlob: async (_, workspaceId: string, key: string) => {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return workspaceDB.deleteBlob(key);
|
||||
},
|
||||
getPersistedBlobs: async (_, workspaceId: string) => {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return workspaceDB.getPersistentBlobKeys();
|
||||
},
|
||||
getDefaultStorageLocation: async () => {
|
||||
return appContext.appDataPath;
|
||||
},
|
||||
getDBFilePath: async (_, workspaceId: string) => {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return {
|
||||
path: workspaceDB.path,
|
||||
realPath: await fs.realpath(workspaceDB.path),
|
||||
};
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
@@ -1,247 +0,0 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import type { Database } from 'better-sqlite3';
|
||||
import sqlite from 'better-sqlite3';
|
||||
import fs from 'fs-extra';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { AppContext } from '../../context';
|
||||
import { dbSubjects } from '../../events/db';
|
||||
import { logger } from '../../logger';
|
||||
import { ts } from '../../utils';
|
||||
|
||||
const schemas = [
|
||||
`CREATE TABLE IF NOT EXISTS "updates" (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
data BLOB NOT NULL,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS "blobs" (
|
||||
key TEXT PRIMARY KEY NOT NULL,
|
||||
data BLOB NOT NULL,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
)`,
|
||||
];
|
||||
|
||||
interface UpdateRow {
|
||||
id: number;
|
||||
data: Buffer;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
interface BlobRow {
|
||||
key: string;
|
||||
data: Buffer;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
const SQLITE_ORIGIN = Symbol('sqlite-origin');
|
||||
|
||||
export class WorkspaceSQLiteDB {
|
||||
db: Database;
|
||||
ydoc = new Y.Doc();
|
||||
firstConnect = false;
|
||||
lastUpdateTime = ts();
|
||||
destroyed = false;
|
||||
|
||||
constructor(public path: string, public workspaceId: string) {
|
||||
this.db = this.reconnectDB();
|
||||
}
|
||||
|
||||
// release resources
|
||||
destroy = () => {
|
||||
this.db?.close();
|
||||
this.ydoc.destroy();
|
||||
};
|
||||
|
||||
getWorkspaceName = () => {
|
||||
return this.ydoc.getMap('space:meta').get('name') as string;
|
||||
};
|
||||
|
||||
reconnectDB = () => {
|
||||
logger.log('[WorkspaceSQLiteDB] open db', this.workspaceId);
|
||||
if (this.db) {
|
||||
this.db.close();
|
||||
}
|
||||
|
||||
fs.realpath(this.path)
|
||||
.then(realPath => {
|
||||
dbSubjects.dbFilePathChange.next({
|
||||
workspaceId: this.workspaceId,
|
||||
path: this.path,
|
||||
realPath,
|
||||
});
|
||||
})
|
||||
.catch(() => {
|
||||
// skip error
|
||||
});
|
||||
|
||||
// use cached version?
|
||||
const db = (this.db = sqlite(this.path));
|
||||
db.exec(schemas.join(';'));
|
||||
|
||||
if (!this.firstConnect) {
|
||||
this.ydoc.on('update', (update: Uint8Array, origin) => {
|
||||
if (origin !== SQLITE_ORIGIN) {
|
||||
this.addUpdateToSQLite(update);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Y.transact(this.ydoc, () => {
|
||||
const updates = this.getUpdates();
|
||||
updates.forEach(update => {
|
||||
// give SQLITE_ORIGIN to skip self update
|
||||
Y.applyUpdate(this.ydoc, update.data, SQLITE_ORIGIN);
|
||||
});
|
||||
});
|
||||
|
||||
this.lastUpdateTime = ts();
|
||||
|
||||
if (this.firstConnect) {
|
||||
logger.info('db reconnected', this.workspaceId);
|
||||
} else {
|
||||
logger.info('db connected', this.workspaceId);
|
||||
}
|
||||
|
||||
this.firstConnect = true;
|
||||
|
||||
return db;
|
||||
};
|
||||
|
||||
getDocAsUpdates = () => {
|
||||
return Y.encodeStateAsUpdate(this.ydoc);
|
||||
};
|
||||
|
||||
// non-blocking and use yDoc to validate the update
|
||||
// after that, the update is added to the db
|
||||
applyUpdate = (data: Uint8Array) => {
|
||||
Y.applyUpdate(this.ydoc, data);
|
||||
|
||||
// todo: trim the updates when the number of records is too large
|
||||
// 1. store the current ydoc state in the db
|
||||
// 2. then delete the old updates
|
||||
// yjs-idb will always trim the db for the first time after DB is loaded
|
||||
this.lastUpdateTime = ts();
|
||||
logger.debug('applyUpdate', this.workspaceId, this.lastUpdateTime);
|
||||
};
|
||||
|
||||
addBlob = (key: string, data: Uint8Array) => {
|
||||
this.lastUpdateTime = ts();
|
||||
try {
|
||||
const statement = this.db.prepare(
|
||||
'INSERT INTO blobs (key, data) VALUES (?, ?) ON CONFLICT(key) DO UPDATE SET data = ?'
|
||||
);
|
||||
statement.run(key, data, data);
|
||||
return key;
|
||||
} catch (error) {
|
||||
logger.error('addBlob', error);
|
||||
}
|
||||
};
|
||||
|
||||
getBlob = (key: string) => {
|
||||
try {
|
||||
const statement = this.db.prepare('SELECT data FROM blobs WHERE key = ?');
|
||||
const row = statement.get(key) as BlobRow;
|
||||
if (!row) {
|
||||
return null;
|
||||
}
|
||||
return row.data;
|
||||
} catch (error) {
|
||||
logger.error('getBlob', error);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
deleteBlob = (key: string) => {
|
||||
this.lastUpdateTime = ts();
|
||||
try {
|
||||
const statement = this.db.prepare('DELETE FROM blobs WHERE key = ?');
|
||||
statement.run(key);
|
||||
} catch (error) {
|
||||
logger.error('deleteBlob', error);
|
||||
}
|
||||
};
|
||||
|
||||
getPersistentBlobKeys = () => {
|
||||
try {
|
||||
const statement = this.db.prepare('SELECT key FROM blobs');
|
||||
const rows = statement.all() as BlobRow[];
|
||||
return rows.map(row => row.key);
|
||||
} catch (error) {
|
||||
logger.error('getPersistentBlobKeys', error);
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
private getUpdates = () => {
|
||||
try {
|
||||
const statement = this.db.prepare('SELECT * FROM updates');
|
||||
const rows = statement.all() as UpdateRow[];
|
||||
return rows;
|
||||
} catch (error) {
|
||||
logger.error('getUpdates', error);
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
// batch write instead write per key stroke?
|
||||
private addUpdateToSQLite = (data: Uint8Array) => {
|
||||
try {
|
||||
const start = performance.now();
|
||||
const statement = this.db.prepare(
|
||||
'INSERT INTO updates (data) VALUES (?)'
|
||||
);
|
||||
statement.run(data);
|
||||
logger.debug(
|
||||
'addUpdateToSQLite',
|
||||
this.workspaceId,
|
||||
'length:',
|
||||
data.length,
|
||||
performance.now() - start,
|
||||
'ms'
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('addUpdateToSQLite', error);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export async function getWorkspaceDBPath(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
) {
|
||||
const basePath = path.join(context.appDataPath, 'workspaces', workspaceId);
|
||||
await fs.ensureDir(basePath);
|
||||
return path.join(basePath, 'storage.db');
|
||||
}
|
||||
|
||||
export async function openWorkspaceDatabase(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
) {
|
||||
const dbPath = await getWorkspaceDBPath(context, workspaceId);
|
||||
return new WorkspaceSQLiteDB(dbPath, workspaceId);
|
||||
}
|
||||
|
||||
export function isValidDBFile(path: string) {
|
||||
let db: Database | null = null;
|
||||
try {
|
||||
db = sqlite(path);
|
||||
// check if db has two tables, one for updates and onefor blobs
|
||||
const statement = db.prepare(
|
||||
`SELECT name FROM sqlite_schema WHERE type='table'`
|
||||
);
|
||||
const rows = statement.all() as { name: string }[];
|
||||
const tableNames = rows.map(row => row.name);
|
||||
if (!tableNames.includes('updates') || !tableNames.includes('blobs')) {
|
||||
return false;
|
||||
}
|
||||
db.close();
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error('isValidDBFile', error);
|
||||
db?.close();
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
export * from './register';
|
||||
@@ -1,8 +0,0 @@
|
||||
import { appContext } from '../../context';
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { deleteWorkspace, listWorkspaces } from './workspace';
|
||||
|
||||
export const workspaceHandlers = {
|
||||
list: async () => listWorkspaces(appContext),
|
||||
delete: async (_, id: string) => deleteWorkspace(appContext, id),
|
||||
} satisfies NamespaceHandlers;
|
||||
@@ -1,60 +0,0 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import type { AppContext } from '../../context';
|
||||
import { logger } from '../../logger';
|
||||
|
||||
interface WorkspaceMeta {
|
||||
path: string;
|
||||
realpath: string;
|
||||
}
|
||||
|
||||
export async function listWorkspaces(
|
||||
context: AppContext
|
||||
): Promise<[workspaceId: string, meta: WorkspaceMeta][]> {
|
||||
const basePath = path.join(context.appDataPath, 'workspaces');
|
||||
try {
|
||||
await fs.ensureDir(basePath);
|
||||
const dirs = await fs.readdir(basePath, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
|
||||
const meta = await Promise.all(
|
||||
dirs.map(async dir => {
|
||||
const dbFilePath = path.join(basePath, dir.name, 'storage.db');
|
||||
if (dir.isDirectory() && (await fs.exists(dbFilePath))) {
|
||||
// try read storage.db under it
|
||||
const realpath = await fs.realpath(dbFilePath);
|
||||
return [dir.name, { path: dbFilePath, realpath }] as [
|
||||
string,
|
||||
WorkspaceMeta
|
||||
];
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
return meta.filter((w): w is [string, WorkspaceMeta] => !!w);
|
||||
} catch (error) {
|
||||
logger.error('listWorkspaces', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteWorkspace(context: AppContext, id: string) {
|
||||
const basePath = path.join(context.appDataPath, 'workspaces', id);
|
||||
const movedPath = path.join(
|
||||
context.appDataPath,
|
||||
'delete-workspaces',
|
||||
`${id}`
|
||||
);
|
||||
try {
|
||||
return await fs.move(basePath, movedPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('deleteWorkspace', error);
|
||||
}
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
export function debounce<T extends (...args: any[]) => void>(
|
||||
fn: T,
|
||||
delay: number
|
||||
) {
|
||||
let timeoutId: NodeJS.Timer | undefined;
|
||||
return (...args: Parameters<T>) => {
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
timeoutId = setTimeout(() => {
|
||||
fn(...args);
|
||||
timeoutId = undefined;
|
||||
}, delay);
|
||||
};
|
||||
}
|
||||
|
||||
export function ts() {
|
||||
return new Date().getTime();
|
||||
}
|
||||
7
apps/electron/layers/preload/preload.d.ts
vendored
7
apps/electron/layers/preload/preload.d.ts
vendored
@@ -1,7 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/consistent-type-imports */
|
||||
|
||||
interface Window {
|
||||
apis: typeof import('./src/affine-apis').apis;
|
||||
events: typeof import('./src/affine-apis').events;
|
||||
appInfo: typeof import('./src/affine-apis').appInfo;
|
||||
}
|
||||
@@ -1,92 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
// NOTE: we will generate preload types from this file
|
||||
|
||||
import { ipcRenderer } from 'electron';
|
||||
|
||||
import type { MainIPCEventMap, MainIPCHandlerMap } from '../../constraints';
|
||||
|
||||
type WithoutFirstParameter<T> = T extends (_: any, ...args: infer P) => infer R
|
||||
? (...args: P) => R
|
||||
: T;
|
||||
|
||||
type HandlersMap<N extends keyof MainIPCHandlerMap> = {
|
||||
[K in keyof MainIPCHandlerMap[N]]: WithoutFirstParameter<
|
||||
MainIPCHandlerMap[N][K]
|
||||
>;
|
||||
};
|
||||
|
||||
type PreloadHandlers = {
|
||||
[N in keyof MainIPCHandlerMap]: HandlersMap<N>;
|
||||
};
|
||||
|
||||
type MainExposedMeta = {
|
||||
handlers: [namespace: string, handlerNames: string[]][];
|
||||
events: [namespace: string, eventNames: string[]][];
|
||||
};
|
||||
|
||||
// main handlers that can be invoked from the renderer process
|
||||
const apis: PreloadHandlers = (() => {
|
||||
// the following were generated by the build script
|
||||
// 1. bundle extra main/src/expose.ts entry
|
||||
// 2. use generate-main-exposed-meta.mjs to generate exposed-meta.js in dist
|
||||
//
|
||||
// we cannot directly import main/src/handlers.ts because it will be bundled into the preload bundle
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const {
|
||||
handlers: handlersMeta,
|
||||
}: MainExposedMeta = require('../main/exposed-meta');
|
||||
|
||||
const all = handlersMeta.map(([namespace, functionNames]) => {
|
||||
const namespaceApis = functionNames.map(name => {
|
||||
const channel = `${namespace}:${name}`;
|
||||
return [
|
||||
name,
|
||||
(...args: any[]) => {
|
||||
return ipcRenderer.invoke(channel, ...args);
|
||||
},
|
||||
];
|
||||
});
|
||||
return [namespace, Object.fromEntries(namespaceApis)];
|
||||
});
|
||||
|
||||
return Object.fromEntries(all);
|
||||
})();
|
||||
|
||||
// main events that can be listened to from the renderer process
|
||||
const events: MainIPCEventMap = (() => {
|
||||
const {
|
||||
events: eventsMeta,
|
||||
}: MainExposedMeta = require('../main/exposed-meta');
|
||||
|
||||
// NOTE: ui may try to listen to a lot of the same events, so we increase the limit...
|
||||
ipcRenderer.setMaxListeners(100);
|
||||
|
||||
const all = eventsMeta.map(([namespace, eventNames]) => {
|
||||
const namespaceEvents = eventNames.map(name => {
|
||||
const channel = `${namespace}:${name}`;
|
||||
return [
|
||||
name,
|
||||
(callback: (...args: any[]) => void) => {
|
||||
const fn: (
|
||||
event: Electron.IpcRendererEvent,
|
||||
...args: any[]
|
||||
) => void = (_, ...args) => {
|
||||
callback(...args);
|
||||
};
|
||||
ipcRenderer.on(channel, fn);
|
||||
return () => {
|
||||
ipcRenderer.off(channel, fn);
|
||||
};
|
||||
},
|
||||
];
|
||||
});
|
||||
return [namespace, Object.fromEntries(namespaceEvents)];
|
||||
});
|
||||
return Object.fromEntries(all);
|
||||
})();
|
||||
|
||||
const appInfo = {
|
||||
electron: true,
|
||||
};
|
||||
|
||||
export { apis, appInfo, events };
|
||||
@@ -1,18 +0,0 @@
|
||||
/**
|
||||
* @module preload
|
||||
*/
|
||||
|
||||
import { contextBridge } from 'electron';
|
||||
|
||||
import * as affineApis from './affine-apis';
|
||||
|
||||
/**
|
||||
* The "Main World" is the JavaScript context that your main renderer code runs in.
|
||||
* By default, the page you load in your renderer executes code in this world.
|
||||
*
|
||||
* @see https://www.electronjs.org/docs/api/context-bridge
|
||||
*/
|
||||
|
||||
contextBridge.exposeInMainWorld('apis', affineApis.apis);
|
||||
contextBridge.exposeInMainWorld('events', affineApis.events);
|
||||
contextBridge.exposeInMainWorld('appInfo', affineApis.appInfo);
|
||||
@@ -1,7 +0,0 @@
|
||||
export const isMacOS = () => {
|
||||
return process.platform === 'darwin';
|
||||
};
|
||||
|
||||
export const isWindows = () => {
|
||||
return process.platform === 'win32';
|
||||
};
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@affine/electron",
|
||||
"private": true,
|
||||
"version": "0.5.4-beta.2",
|
||||
"version": "0.6.2",
|
||||
"author": "affine",
|
||||
"repository": {
|
||||
"url": "https://github.com/toeverything/AFFiNE",
|
||||
@@ -10,25 +10,29 @@
|
||||
"description": "AFFiNE App",
|
||||
"homepage": "https://github.com/toeverything/AFFiNE",
|
||||
"scripts": {
|
||||
"dev": "yarn electron-rebuild && yarn cross-env DEV_SERVER_URL=http://localhost:8080 node scripts/dev.mjs",
|
||||
"watch": "yarn electron-rebuild && yarn cross-env DEV_SERVER_URL=http://localhost:8080 node scripts/dev.mjs --watch",
|
||||
"prod": "yarn electron-rebuild && yarn node scripts/dev.mjs",
|
||||
"build-layers": "zx scripts/build-layers.mjs",
|
||||
"dev": "yarn cross-env DEV_SERVER_URL=http://localhost:8080 node scripts/dev.mjs",
|
||||
"watch": "yarn cross-env DEV_SERVER_URL=http://localhost:8080 node scripts/dev.mjs --watch",
|
||||
"prod": "yarn node scripts/dev.mjs",
|
||||
"build": "zx scripts/build-layers.mjs",
|
||||
"generate-assets": "zx scripts/generate-assets.mjs",
|
||||
"generate-main-exposed-meta": "zx scripts/generate-main-exposed-meta.mjs",
|
||||
"package": "electron-forge package",
|
||||
"make": "electron-forge make",
|
||||
"rebuild:for-unit-test": "yarn rebuild better-sqlite3",
|
||||
"rebuild:for-electron": "yarn electron-rebuild",
|
||||
"test": "playwright test"
|
||||
"test": "DEBUG=pw:browser playwright test"
|
||||
},
|
||||
"config": {
|
||||
"forge": "./forge.config.js"
|
||||
},
|
||||
"main": "./dist/layers/main/index.js",
|
||||
"main": "./dist/main.js",
|
||||
"exports": {
|
||||
"./scripts/plugins/build-plugins.mjs": "./scripts/plugins/build-plugins.mjs"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@affine-test/kit": "workspace:*",
|
||||
"@affine/native": "workspace:*",
|
||||
"@blocksuite/blocks": "0.0.0-20230607055421-9b20fcaf-nightly",
|
||||
"@blocksuite/editor": "0.0.0-20230607055421-9b20fcaf-nightly",
|
||||
"@blocksuite/lit": "0.0.0-20230607055421-9b20fcaf-nightly",
|
||||
"@blocksuite/store": "0.0.0-20230607055421-9b20fcaf-nightly",
|
||||
"@electron-forge/cli": "^6.1.1",
|
||||
"@electron-forge/core": "^6.1.1",
|
||||
"@electron-forge/core-utils": "^6.1.1",
|
||||
@@ -37,28 +41,31 @@
|
||||
"@electron-forge/maker-squirrel": "^6.1.1",
|
||||
"@electron-forge/maker-zip": "^6.1.1",
|
||||
"@electron-forge/shared-types": "^6.1.1",
|
||||
"@electron/rebuild": "^3.2.13",
|
||||
"@electron/remote": "2.0.9",
|
||||
"@types/better-sqlite3": "^7.6.4",
|
||||
"@toeverything/infra": "workspace:*",
|
||||
"@types/fs-extra": "^11.0.1",
|
||||
"@types/uuid": "^9.0.1",
|
||||
"cross-env": "7.0.3",
|
||||
"electron": "24.3.1",
|
||||
"electron": "=25.0.1",
|
||||
"electron-log": "^5.0.0-beta.24",
|
||||
"electron-squirrel-startup": "1.0.0",
|
||||
"electron-window-state": "^5.0.3",
|
||||
"esbuild": "^0.17.19",
|
||||
"fs-extra": "^11.1.1",
|
||||
"playwright": "^1.33.0",
|
||||
"jotai": "^2.1.1",
|
||||
"playwright": "=1.33.0",
|
||||
"ts-node": "^10.9.1",
|
||||
"undici": "^5.22.1",
|
||||
"uuid": "^9.0.0",
|
||||
"which": "^3.0.1",
|
||||
"zx": "^7.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"better-sqlite3": "^8.3.0",
|
||||
"chokidar": "^3.5.3",
|
||||
"@toeverything/plugin-infra": "workspace:*",
|
||||
"async-call-rpc": "^6.3.1",
|
||||
"electron-updater": "^5.3.0",
|
||||
"link-preview-js": "^3.0.4",
|
||||
"lodash-es": "^4.17.21",
|
||||
"nanoid": "^4.0.2",
|
||||
"rxjs": "^7.8.1",
|
||||
"yjs": "^13.6.1"
|
||||
|
||||
BIN
apps/electron/resources/icons/affine_installing.gif
Normal file
BIN
apps/electron/resources/icons/affine_installing.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 2.1 MiB |
@@ -1,9 +1,12 @@
|
||||
#!/usr/bin/env zx
|
||||
import 'zx/globals';
|
||||
|
||||
import { resolve } from 'node:path';
|
||||
|
||||
import { spawnSync } from 'child_process';
|
||||
import * as esbuild from 'esbuild';
|
||||
|
||||
import { config } from './common.mjs';
|
||||
import { config, rootDir } from './common.mjs';
|
||||
|
||||
const NODE_ENV =
|
||||
process.env.NODE_ENV === 'development' ? 'development' : 'production';
|
||||
@@ -15,18 +18,24 @@ if (process.platform === 'win32') {
|
||||
|
||||
async function buildLayers() {
|
||||
const common = config();
|
||||
await esbuild.build(common.preload);
|
||||
console.log('Build plugin infra');
|
||||
spawnSync('yarn', ['build'], {
|
||||
stdio: 'inherit',
|
||||
cwd: resolve(rootDir, './packages/plugin-infra'),
|
||||
});
|
||||
|
||||
console.log('Build plugins');
|
||||
await import('./plugins/build-plugins.mjs');
|
||||
|
||||
await esbuild.build(common.workers);
|
||||
await esbuild.build({
|
||||
...common.main,
|
||||
...common.layers,
|
||||
define: {
|
||||
...common.main.define,
|
||||
...common.define,
|
||||
'process.env.NODE_ENV': `"${NODE_ENV}"`,
|
||||
'process.env.BUILD_TYPE': `"${process.env.BUILD_TYPE || 'stable'}"`,
|
||||
},
|
||||
});
|
||||
|
||||
await $`yarn workspace @affine/electron generate-main-exposed-meta`;
|
||||
}
|
||||
|
||||
await buildLayers();
|
||||
|
||||
@@ -2,7 +2,10 @@ import { resolve } from 'node:path';
|
||||
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
export const root = fileURLToPath(new URL('..', import.meta.url));
|
||||
export const electronDir = fileURLToPath(new URL('..', import.meta.url));
|
||||
|
||||
export const rootDir = resolve(electronDir, '..', '..');
|
||||
|
||||
export const NODE_MAJOR_VERSION = 18;
|
||||
|
||||
// hard-coded for now:
|
||||
@@ -15,7 +18,7 @@ const mode = (process.env.NODE_ENV = process.env.NODE_ENV || 'development');
|
||||
// List of env that will be replaced by esbuild
|
||||
const ENV_MACROS = ['AFFINE_GOOGLE_CLIENT_ID', 'AFFINE_GOOGLE_CLIENT_SECRET'];
|
||||
|
||||
/** @return {{main: import('esbuild').BuildOptions, preload: import('esbuild').BuildOptions}} */
|
||||
/** @return {{layers: import('esbuild').BuildOptions, workers: import('esbuild').BuildOptions}} */
|
||||
export const config = () => {
|
||||
const define = Object.fromEntries([
|
||||
...ENV_MACROS.map(key => [
|
||||
@@ -23,6 +26,7 @@ export const config = () => {
|
||||
JSON.stringify(process.env[key] ?? ''),
|
||||
]),
|
||||
['process.env.NODE_ENV', `"${mode}"`],
|
||||
['process.env.USE_WORKER', '"true"'],
|
||||
]);
|
||||
|
||||
if (DEV_SERVER_URL) {
|
||||
@@ -30,16 +34,18 @@ export const config = () => {
|
||||
}
|
||||
|
||||
return {
|
||||
main: {
|
||||
layers: {
|
||||
entryPoints: [
|
||||
resolve(root, './layers/main/src/index.ts'),
|
||||
resolve(root, './layers/main/src/exposed.ts'),
|
||||
resolve(electronDir, './src/main/index.ts'),
|
||||
resolve(electronDir, './src/preload/index.ts'),
|
||||
resolve(electronDir, './src/helper/index.ts'),
|
||||
],
|
||||
outdir: resolve(root, './dist/layers/main'),
|
||||
entryNames: '[dir]',
|
||||
outdir: resolve(electronDir, './dist'),
|
||||
bundle: true,
|
||||
target: `node${NODE_MAJOR_VERSION}`,
|
||||
platform: 'node',
|
||||
external: ['electron', 'yjs', 'better-sqlite3', 'electron-updater'],
|
||||
external: ['electron', 'electron-updater', '@toeverything/plugin-infra'],
|
||||
define: define,
|
||||
format: 'cjs',
|
||||
loader: {
|
||||
@@ -48,14 +54,23 @@ export const config = () => {
|
||||
assetNames: '[name]',
|
||||
treeShaking: true,
|
||||
},
|
||||
preload: {
|
||||
entryPoints: [resolve(root, './layers/preload/src/index.ts')],
|
||||
outdir: resolve(root, './dist/layers/preload'),
|
||||
workers: {
|
||||
entryPoints: [
|
||||
resolve(electronDir, './src/main/workers/plugin.worker.ts'),
|
||||
],
|
||||
entryNames: '[dir]/[name]',
|
||||
outdir: resolve(electronDir, './dist/workers'),
|
||||
bundle: true,
|
||||
target: `node${NODE_MAJOR_VERSION}`,
|
||||
platform: 'node',
|
||||
external: ['electron', '../main/exposed-meta'],
|
||||
external: ['@toeverything/plugin-infra', 'async-call-rpc'],
|
||||
define: define,
|
||||
format: 'cjs',
|
||||
loader: {
|
||||
'.node': 'copy',
|
||||
},
|
||||
assetNames: '[name]',
|
||||
treeShaking: true,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
/* eslint-disable no-async-promise-executor */
|
||||
import { execSync, spawn } from 'node:child_process';
|
||||
import { spawn } from 'node:child_process';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import path, { resolve } from 'node:path';
|
||||
|
||||
import electronPath from 'electron';
|
||||
import * as esbuild from 'esbuild';
|
||||
import which from 'which';
|
||||
|
||||
import { config, root } from './common.mjs';
|
||||
import { config, electronDir, rootDir } from './common.mjs';
|
||||
|
||||
// this means we don't spawn electron windows, mainly for testing
|
||||
const watchMode = process.argv.includes('--watch');
|
||||
@@ -21,7 +22,10 @@ const stderrFilterPatterns = [
|
||||
|
||||
// these are set before calling `config`, so we have a chance to override them
|
||||
try {
|
||||
const devJson = readFileSync(path.resolve(root, './dev.json'), 'utf-8');
|
||||
const devJson = readFileSync(
|
||||
path.resolve(electronDir, './dev.json'),
|
||||
'utf-8'
|
||||
);
|
||||
const devEnv = JSON.parse(devJson);
|
||||
Object.assign(process.env, devEnv);
|
||||
} catch (err) {
|
||||
@@ -64,20 +68,29 @@ function spawnOrReloadElectron() {
|
||||
}
|
||||
|
||||
const common = config();
|
||||
const yarnPath = which.sync('yarn');
|
||||
async function watchPlugins() {
|
||||
spawn(yarnPath, ['dev'], {
|
||||
stdio: 'inherit',
|
||||
cwd: resolve(rootDir, './packages/plugin-infra'),
|
||||
});
|
||||
await import('./plugins/dev-plugins.mjs');
|
||||
}
|
||||
|
||||
function watchPreload() {
|
||||
async function watchLayers() {
|
||||
return new Promise(async resolve => {
|
||||
let initialBuild = false;
|
||||
const preloadBuild = await esbuild.context({
|
||||
...common.preload,
|
||||
|
||||
const buildContext = await esbuild.context({
|
||||
...common.layers,
|
||||
plugins: [
|
||||
...(common.preload.plugins ?? []),
|
||||
...(common.layers.plugins ?? []),
|
||||
{
|
||||
name: 'electron-dev:reload-app-on-preload-change',
|
||||
name: 'electron-dev:reload-app-on-layers-change',
|
||||
setup(build) {
|
||||
build.onEnd(() => {
|
||||
if (initialBuild) {
|
||||
console.log(`[preload] has changed, [re]launching electron...`);
|
||||
console.log(`[layers] has changed, [re]launching electron...`);
|
||||
spawnOrReloadElectron();
|
||||
} else {
|
||||
resolve();
|
||||
@@ -88,27 +101,24 @@ function watchPreload() {
|
||||
},
|
||||
],
|
||||
});
|
||||
// watch will trigger build.onEnd() on first run & on subsequent changes
|
||||
await preloadBuild.watch();
|
||||
await buildContext.watch();
|
||||
});
|
||||
}
|
||||
|
||||
async function watchMain() {
|
||||
async function watchWorkers() {
|
||||
return new Promise(async resolve => {
|
||||
let initialBuild = false;
|
||||
|
||||
const mainBuild = await esbuild.context({
|
||||
...common.main,
|
||||
const buildContext = await esbuild.context({
|
||||
...common.workers,
|
||||
plugins: [
|
||||
...(common.main.plugins ?? []),
|
||||
...(common.workers.plugins ?? []),
|
||||
{
|
||||
name: 'electron-dev:reload-app-on-main-change',
|
||||
name: 'electron-dev:reload-app-on-workers-change',
|
||||
setup(build) {
|
||||
build.onEnd(() => {
|
||||
execSync('yarn generate-main-exposed-meta');
|
||||
|
||||
if (initialBuild) {
|
||||
console.log(`[main] has changed, [re]launching electron...`);
|
||||
console.log(`[workers] has changed, [re]launching electron...`);
|
||||
spawnOrReloadElectron();
|
||||
} else {
|
||||
resolve();
|
||||
@@ -119,13 +129,14 @@ async function watchMain() {
|
||||
},
|
||||
],
|
||||
});
|
||||
await mainBuild.watch();
|
||||
await buildContext.watch();
|
||||
});
|
||||
}
|
||||
|
||||
async function main() {
|
||||
await watchMain();
|
||||
await watchPreload();
|
||||
await watchPlugins();
|
||||
await watchLayers();
|
||||
await watchWorkers();
|
||||
|
||||
if (watchMode) {
|
||||
console.log(`Watching for changes...`);
|
||||
|
||||
24
apps/electron/scripts/generate-assets.mjs
Normal file → Executable file
24
apps/electron/scripts/generate-assets.mjs
Normal file → Executable file
@@ -32,10 +32,6 @@ if (releaseVersionEnv && electronPackageJson.version !== releaseVersionEnv) {
|
||||
}
|
||||
// copy web dist files to electron dist
|
||||
|
||||
// step 1: clean up
|
||||
await cleanup();
|
||||
echo('Clean up done');
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
$.shell = 'powershell.exe';
|
||||
$.prefix = '';
|
||||
@@ -43,11 +39,11 @@ if (process.platform === 'win32') {
|
||||
|
||||
cd(repoRootDir);
|
||||
|
||||
// step 2: build web (nextjs) dist
|
||||
// step 1: build web (nextjs) dist
|
||||
if (!process.env.SKIP_WEB_BUILD) {
|
||||
process.env.ENABLE_LEGACY_PROVIDER = 'false';
|
||||
await $`yarn build`;
|
||||
await $`yarn export`;
|
||||
await $`yarn nx build @affine/web`;
|
||||
await $`yarn nx export @affine/web`;
|
||||
|
||||
// step 1.5: amend sourceMappingURL to allow debugging in devtools
|
||||
await glob('**/*.{js,css}', { cwd: affineWebOutDir }).then(files => {
|
||||
@@ -67,7 +63,7 @@ if (!process.env.SKIP_WEB_BUILD) {
|
||||
await fs.move(affineWebOutDir, publicAffineOutDir, { overwrite: true });
|
||||
}
|
||||
|
||||
// step 3: update app-updater.yml content with build type in resources folder
|
||||
// step 2: update app-updater.yml content with build type in resources folder
|
||||
if (process.env.BUILD_TYPE === 'internal') {
|
||||
const appUpdaterYml = path.join(publicDistDir, 'app-update.yml');
|
||||
const appUpdaterYmlContent = await fs.readFile(appUpdaterYml, 'utf-8');
|
||||
@@ -77,15 +73,3 @@ if (process.env.BUILD_TYPE === 'internal') {
|
||||
);
|
||||
await fs.writeFile(appUpdaterYml, newAppUpdaterYmlContent);
|
||||
}
|
||||
|
||||
/// --------
|
||||
/// --------
|
||||
/// --------
|
||||
async function cleanup() {
|
||||
if (!process.env.SKIP_WEB_BUILD) {
|
||||
await fs.emptyDir(publicAffineOutDir);
|
||||
}
|
||||
await fs.emptyDir(path.join(electronRootDir, 'layers', 'main', 'dist'));
|
||||
await fs.emptyDir(path.join(electronRootDir, 'layers', 'preload', 'dist'));
|
||||
await fs.remove(path.join(electronRootDir, 'out'));
|
||||
}
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
const mainDistDir = path.resolve(__dirname, '../dist/layers/main');
|
||||
|
||||
// be careful and avoid any side effects in
|
||||
const { handlers, events } = await import(
|
||||
'file://' + path.resolve(mainDistDir, 'exposed.js')
|
||||
);
|
||||
|
||||
const handlersMeta = Object.entries(handlers).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.keys(namespaceHandlers).map(handlerName => handlerName),
|
||||
];
|
||||
}
|
||||
);
|
||||
|
||||
const eventsMeta = Object.entries(events).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.keys(namespaceHandlers).map(handlerName => handlerName),
|
||||
];
|
||||
}
|
||||
);
|
||||
|
||||
const meta = {
|
||||
handlers: handlersMeta,
|
||||
events: eventsMeta,
|
||||
};
|
||||
|
||||
await fs.writeFile(
|
||||
path.resolve(mainDistDir, 'exposed-meta.js'),
|
||||
`module.exports = ${JSON.stringify(meta)};`
|
||||
);
|
||||
|
||||
console.log('generate main exposed-meta.js done');
|
||||
20
apps/electron/scripts/plugins/build-plugins.mjs
Executable file
20
apps/electron/scripts/plugins/build-plugins.mjs
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env node
|
||||
import { build } from 'esbuild';
|
||||
|
||||
import { definePluginServerConfig } from './utils.mjs';
|
||||
|
||||
await build({
|
||||
...definePluginServerConfig('bookmark-block'),
|
||||
external: [
|
||||
// server.ts
|
||||
'link-preview-js',
|
||||
// ui.ts
|
||||
'@toeverything/plugin-infra',
|
||||
'@affine/component',
|
||||
'@blocksuite/store',
|
||||
'@blocksuite/blocks',
|
||||
'react',
|
||||
'react-dom',
|
||||
'foxact',
|
||||
],
|
||||
});
|
||||
22
apps/electron/scripts/plugins/dev-plugins.mjs
Executable file
22
apps/electron/scripts/plugins/dev-plugins.mjs
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/usr/bin/env node
|
||||
import { context } from 'esbuild';
|
||||
|
||||
import { definePluginServerConfig } from './utils.mjs';
|
||||
|
||||
const plugin = await context({
|
||||
...definePluginServerConfig('bookmark-block'),
|
||||
external: [
|
||||
// server.ts
|
||||
'link-preview-js',
|
||||
// ui.ts
|
||||
'@toeverything/plugin-infra',
|
||||
'@affine/component',
|
||||
'@blocksuite/store',
|
||||
'@blocksuite/blocks',
|
||||
'react',
|
||||
'react-dom',
|
||||
'foxact',
|
||||
],
|
||||
});
|
||||
|
||||
await plugin.watch();
|
||||
34
apps/electron/scripts/plugins/utils.mjs
Normal file
34
apps/electron/scripts/plugins/utils.mjs
Normal file
@@ -0,0 +1,34 @@
|
||||
import { resolve } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
export const rootDir = fileURLToPath(new URL('../../../..', import.meta.url));
|
||||
export const electronOutputDir = resolve(
|
||||
rootDir,
|
||||
'apps',
|
||||
'electron',
|
||||
'dist',
|
||||
'plugins'
|
||||
);
|
||||
export const pluginDir = resolve(rootDir, 'plugins');
|
||||
|
||||
/**
|
||||
*
|
||||
* @param pluginDirName {string}
|
||||
* @return {import('esbuild').BuildOptions}
|
||||
*/
|
||||
export function definePluginServerConfig(pluginDirName) {
|
||||
const pluginRootDir = resolve(pluginDir, pluginDirName);
|
||||
const mainEntryFile = resolve(pluginRootDir, 'src/index.ts');
|
||||
const serverOutputDir = resolve(electronOutputDir, pluginDirName);
|
||||
return {
|
||||
entryPoints: [mainEntryFile],
|
||||
platform: 'neutral',
|
||||
format: 'esm',
|
||||
outExtension: {
|
||||
'.js': '.mjs',
|
||||
},
|
||||
outdir: serverOutputDir,
|
||||
bundle: true,
|
||||
splitting: true,
|
||||
};
|
||||
}
|
||||
134
apps/electron/src/helper/db/__tests__/ensure-db.spec.ts
Normal file
134
apps/electron/src/helper/db/__tests__/ensure-db.spec.ts
Normal file
@@ -0,0 +1,134 @@
|
||||
import path from 'node:path';
|
||||
import { setTimeout } from 'node:timers/promises';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, beforeEach, expect, test, vi } from 'vitest';
|
||||
|
||||
const tmpDir = path.join(__dirname, 'tmp');
|
||||
const appDataPath = path.join(tmpDir, 'app-data');
|
||||
|
||||
vi.doMock('../../main-rpc', () => ({
|
||||
mainRPC: {
|
||||
getPath: async () => appDataPath,
|
||||
},
|
||||
}));
|
||||
|
||||
const constructorStub = vi.fn();
|
||||
const destroyStub = vi.fn();
|
||||
destroyStub.mockReturnValue(Promise.resolve());
|
||||
|
||||
function existProcess() {
|
||||
process.emit('beforeExit', 0);
|
||||
}
|
||||
|
||||
vi.doMock('../secondary-db', () => {
|
||||
return {
|
||||
SecondaryWorkspaceSQLiteDB: class {
|
||||
constructor(...args: any[]) {
|
||||
constructorStub(...args);
|
||||
}
|
||||
|
||||
connectIfNeeded = () => Promise.resolve();
|
||||
|
||||
pull = () => Promise.resolve();
|
||||
|
||||
destroy = destroyStub;
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers({ shouldAdvanceTime: true });
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
existProcess();
|
||||
// wait for the db to be closed on Windows
|
||||
if (process.platform === 'win32') {
|
||||
await setTimeout(200);
|
||||
}
|
||||
await fs.remove(tmpDir);
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
test('can get a valid WorkspaceSQLiteDB', async () => {
|
||||
const { ensureSQLiteDB } = await import('../ensure-db');
|
||||
const workspaceId = v4();
|
||||
const db0 = await ensureSQLiteDB(workspaceId);
|
||||
expect(db0).toBeDefined();
|
||||
expect(db0.workspaceId).toBe(workspaceId);
|
||||
|
||||
const db1 = await ensureSQLiteDB(v4());
|
||||
expect(db1).not.toBe(db0);
|
||||
expect(db1.workspaceId).not.toBe(db0.workspaceId);
|
||||
|
||||
// ensure that the db is cached
|
||||
expect(await ensureSQLiteDB(workspaceId)).toBe(db0);
|
||||
});
|
||||
|
||||
test('db should be destroyed when app quits', async () => {
|
||||
const { ensureSQLiteDB } = await import('../ensure-db');
|
||||
const workspaceId = v4();
|
||||
const db0 = await ensureSQLiteDB(workspaceId);
|
||||
const db1 = await ensureSQLiteDB(v4());
|
||||
|
||||
expect(db0.db).not.toBeNull();
|
||||
expect(db1.db).not.toBeNull();
|
||||
|
||||
existProcess();
|
||||
|
||||
// wait the async `db.destroy()` to be called
|
||||
await setTimeout(100);
|
||||
|
||||
expect(db0.db).toBeNull();
|
||||
expect(db1.db).toBeNull();
|
||||
});
|
||||
|
||||
test('db should be removed in db$Map after destroyed', async () => {
|
||||
const { ensureSQLiteDB, db$Map } = await import('../ensure-db');
|
||||
const workspaceId = v4();
|
||||
const db = await ensureSQLiteDB(workspaceId);
|
||||
await db.destroy();
|
||||
await setTimeout(100);
|
||||
expect(db$Map.has(workspaceId)).toBe(false);
|
||||
});
|
||||
|
||||
test('if db has a secondary db path, we should also poll that', async () => {
|
||||
const { ensureSQLiteDB } = await import('../ensure-db');
|
||||
const { storeWorkspaceMeta } = await import('../../workspace');
|
||||
const workspaceId = v4();
|
||||
await storeWorkspaceMeta(workspaceId, {
|
||||
secondaryDBPath: path.join(tmpDir, 'secondary.db'),
|
||||
});
|
||||
|
||||
const db = await ensureSQLiteDB(workspaceId);
|
||||
|
||||
await setTimeout(10);
|
||||
|
||||
expect(constructorStub).toBeCalledTimes(1);
|
||||
expect(constructorStub).toBeCalledWith(path.join(tmpDir, 'secondary.db'), db);
|
||||
|
||||
// if secondary meta is changed
|
||||
await storeWorkspaceMeta(workspaceId, {
|
||||
secondaryDBPath: path.join(tmpDir, 'secondary2.db'),
|
||||
});
|
||||
|
||||
// wait the async `db.destroy()` to be called
|
||||
await setTimeout(100);
|
||||
expect(constructorStub).toBeCalledTimes(2);
|
||||
expect(destroyStub).toBeCalledTimes(1);
|
||||
|
||||
// if secondary meta is changed (but another workspace)
|
||||
await storeWorkspaceMeta(v4(), {
|
||||
secondaryDBPath: path.join(tmpDir, 'secondary3.db'),
|
||||
});
|
||||
await vi.advanceTimersByTimeAsync(1500);
|
||||
expect(constructorStub).toBeCalledTimes(2);
|
||||
expect(destroyStub).toBeCalledTimes(1);
|
||||
|
||||
// if primary is destroyed, secondary should also be destroyed
|
||||
await db.destroy();
|
||||
await setTimeout(100);
|
||||
expect(destroyStub).toBeCalledTimes(2);
|
||||
});
|
||||
@@ -0,0 +1,99 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, expect, test, vi } from 'vitest';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import { dbSubjects } from '../subjects';
|
||||
|
||||
const tmpDir = path.join(__dirname, 'tmp');
|
||||
const appDataPath = path.join(tmpDir, 'app-data');
|
||||
|
||||
vi.doMock('../../main-rpc', () => ({
|
||||
mainRPC: {
|
||||
getPath: async () => appDataPath,
|
||||
},
|
||||
}));
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.remove(tmpDir);
|
||||
});
|
||||
|
||||
function getTestUpdates() {
|
||||
const testYDoc = new Y.Doc();
|
||||
const yText = testYDoc.getText('test');
|
||||
yText.insert(0, 'hello');
|
||||
const updates = Y.encodeStateAsUpdate(testYDoc);
|
||||
|
||||
return updates;
|
||||
}
|
||||
test('can create new db file if not exists', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const db = await openWorkspaceDatabase(workspaceId);
|
||||
const dbPath = path.join(
|
||||
appDataPath,
|
||||
`workspaces/${workspaceId}`,
|
||||
`storage.db`
|
||||
);
|
||||
expect(await fs.exists(dbPath)).toBe(true);
|
||||
await db.destroy();
|
||||
});
|
||||
|
||||
test('on applyUpdate (from self), will not trigger update', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const onUpdate = vi.fn();
|
||||
|
||||
const db = await openWorkspaceDatabase(workspaceId);
|
||||
db.update$.subscribe(onUpdate);
|
||||
db.applyUpdate(getTestUpdates(), 'self');
|
||||
expect(onUpdate).not.toHaveBeenCalled();
|
||||
await db.destroy();
|
||||
});
|
||||
|
||||
test('on applyUpdate (from renderer), will trigger update', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const onUpdate = vi.fn();
|
||||
const onExternalUpdate = vi.fn();
|
||||
|
||||
const db = await openWorkspaceDatabase(workspaceId);
|
||||
db.update$.subscribe(onUpdate);
|
||||
const sub = dbSubjects.externalUpdate.subscribe(onExternalUpdate);
|
||||
db.applyUpdate(getTestUpdates(), 'renderer');
|
||||
expect(onUpdate).toHaveBeenCalled();
|
||||
sub.unsubscribe();
|
||||
await db.destroy();
|
||||
});
|
||||
|
||||
test('on applyUpdate (from external), will trigger update & send external update event', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const onUpdate = vi.fn();
|
||||
const onExternalUpdate = vi.fn();
|
||||
|
||||
const db = await openWorkspaceDatabase(workspaceId);
|
||||
db.update$.subscribe(onUpdate);
|
||||
const sub = dbSubjects.externalUpdate.subscribe(onExternalUpdate);
|
||||
db.applyUpdate(getTestUpdates(), 'external');
|
||||
expect(onUpdate).toHaveBeenCalled();
|
||||
expect(onExternalUpdate).toHaveBeenCalled();
|
||||
sub.unsubscribe();
|
||||
await db.destroy();
|
||||
});
|
||||
|
||||
test('on destroy, check if resources have been released', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const db = await openWorkspaceDatabase(workspaceId);
|
||||
const updateSub = {
|
||||
complete: vi.fn(),
|
||||
next: vi.fn(),
|
||||
};
|
||||
db.update$ = updateSub as any;
|
||||
await db.destroy();
|
||||
expect(db.db).toBe(null);
|
||||
expect(updateSub.complete).toHaveBeenCalled();
|
||||
});
|
||||
116
apps/electron/src/helper/db/base-db-adapter.ts
Normal file
116
apps/electron/src/helper/db/base-db-adapter.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import { SqliteConnection } from '@affine/native';
|
||||
|
||||
import { logger } from '../logger';
|
||||
|
||||
/**
|
||||
* A base class for SQLite DB adapter that provides basic methods around updates & blobs
|
||||
*/
|
||||
export abstract class BaseSQLiteAdapter {
|
||||
db: SqliteConnection | null = null;
|
||||
abstract role: string;
|
||||
|
||||
constructor(public readonly path: string) {}
|
||||
|
||||
async connectIfNeeded() {
|
||||
if (!this.db) {
|
||||
this.db = new SqliteConnection(this.path);
|
||||
await this.db.connect();
|
||||
logger.info(`[SQLiteAdapter:${this.role}]`, 'connected:', this.path);
|
||||
}
|
||||
return this.db;
|
||||
}
|
||||
|
||||
async destroy() {
|
||||
const { db } = this;
|
||||
this.db = null;
|
||||
// log after close will sometimes crash the app when quitting
|
||||
logger.info(`[SQLiteAdapter:${this.role}]`, 'destroyed:', this.path);
|
||||
await db?.close();
|
||||
}
|
||||
|
||||
async addBlob(key: string, data: Uint8Array) {
|
||||
try {
|
||||
if (!this.db) {
|
||||
logger.warn(`${this.path} is not connected`);
|
||||
return;
|
||||
}
|
||||
await this.db.addBlob(key, data);
|
||||
} catch (error) {
|
||||
logger.error('addBlob', error);
|
||||
}
|
||||
}
|
||||
|
||||
async getBlob(key: string) {
|
||||
try {
|
||||
if (!this.db) {
|
||||
logger.warn(`${this.path} is not connected`);
|
||||
return;
|
||||
}
|
||||
const blob = await this.db.getBlob(key);
|
||||
return blob?.data;
|
||||
} catch (error) {
|
||||
logger.error('getBlob', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async deleteBlob(key: string) {
|
||||
try {
|
||||
if (!this.db) {
|
||||
logger.warn(`${this.path} is not connected`);
|
||||
return;
|
||||
}
|
||||
await this.db.deleteBlob(key);
|
||||
} catch (error) {
|
||||
logger.error(`${this.path} delete blob failed`, error);
|
||||
}
|
||||
}
|
||||
|
||||
async getBlobKeys() {
|
||||
try {
|
||||
if (!this.db) {
|
||||
logger.warn(`${this.path} is not connected`);
|
||||
return [];
|
||||
}
|
||||
return await this.db.getBlobKeys();
|
||||
} catch (error) {
|
||||
logger.error(`getBlobKeys failed`, error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
async getUpdates() {
|
||||
try {
|
||||
if (!this.db) {
|
||||
logger.warn(`${this.path} is not connected`);
|
||||
return [];
|
||||
}
|
||||
return await this.db.getUpdates();
|
||||
} catch (error) {
|
||||
logger.error('getUpdates', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// add a single update to SQLite
|
||||
async addUpdateToSQLite(updates: Uint8Array[]) {
|
||||
// batch write instead write per key stroke?
|
||||
try {
|
||||
if (!this.db) {
|
||||
logger.warn(`${this.path} is not connected`);
|
||||
return;
|
||||
}
|
||||
const start = performance.now();
|
||||
await this.db.insertUpdates(updates);
|
||||
logger.debug(
|
||||
`[SQLiteAdapter][${this.role}] addUpdateToSQLite`,
|
||||
'length:',
|
||||
updates.length,
|
||||
performance.now() - start,
|
||||
'ms'
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('addUpdateToSQLite', this.path, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
140
apps/electron/src/helper/db/ensure-db.ts
Normal file
140
apps/electron/src/helper/db/ensure-db.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
import type { Subject } from 'rxjs';
|
||||
import { Observable } from 'rxjs';
|
||||
import {
|
||||
concat,
|
||||
defer,
|
||||
from,
|
||||
fromEvent,
|
||||
interval,
|
||||
lastValueFrom,
|
||||
merge,
|
||||
} from 'rxjs';
|
||||
import {
|
||||
concatMap,
|
||||
distinctUntilChanged,
|
||||
filter,
|
||||
ignoreElements,
|
||||
last,
|
||||
map,
|
||||
shareReplay,
|
||||
startWith,
|
||||
switchMap,
|
||||
take,
|
||||
takeUntil,
|
||||
tap,
|
||||
} from 'rxjs/operators';
|
||||
|
||||
import { logger } from '../logger';
|
||||
import { getWorkspaceMeta, workspaceSubjects } from '../workspace';
|
||||
import { SecondaryWorkspaceSQLiteDB } from './secondary-db';
|
||||
import type { WorkspaceSQLiteDB } from './workspace-db-adapter';
|
||||
import { openWorkspaceDatabase } from './workspace-db-adapter';
|
||||
|
||||
// export for testing
|
||||
export const db$Map = new Map<string, Observable<WorkspaceSQLiteDB>>();
|
||||
|
||||
// use defer to prevent `app` is undefined while running tests
|
||||
const beforeQuit$ = defer(() => fromEvent(process, 'beforeExit'));
|
||||
|
||||
// return a stream that emit a single event when the subject completes
|
||||
function completed<T>(subject: Subject<T>) {
|
||||
return new Observable(subscriber => {
|
||||
const sub = subject.subscribe({
|
||||
complete: () => {
|
||||
subscriber.next();
|
||||
subscriber.complete();
|
||||
},
|
||||
});
|
||||
return () => sub.unsubscribe();
|
||||
});
|
||||
}
|
||||
|
||||
function getWorkspaceDB$(id: string) {
|
||||
if (!db$Map.has(id)) {
|
||||
db$Map.set(
|
||||
id,
|
||||
from(openWorkspaceDatabase(id)).pipe(
|
||||
tap({
|
||||
next: db => {
|
||||
logger.info(
|
||||
'[ensureSQLiteDB] db connection established',
|
||||
db.workspaceId
|
||||
);
|
||||
},
|
||||
}),
|
||||
switchMap(db =>
|
||||
// takeUntil the polling stream, and then destroy the db
|
||||
concat(
|
||||
startPollingSecondaryDB(db).pipe(
|
||||
ignoreElements(),
|
||||
startWith(db),
|
||||
takeUntil(merge(beforeQuit$, completed(db.update$))),
|
||||
last(),
|
||||
tap({
|
||||
next() {
|
||||
logger.info(
|
||||
'[ensureSQLiteDB] polling secondary db complete',
|
||||
db.workspaceId
|
||||
);
|
||||
},
|
||||
})
|
||||
),
|
||||
defer(async () => {
|
||||
try {
|
||||
await db.destroy();
|
||||
db$Map.delete(id);
|
||||
return db;
|
||||
} catch (err) {
|
||||
logger.error('[ensureSQLiteDB] destroy db failed', err);
|
||||
throw err;
|
||||
}
|
||||
})
|
||||
).pipe(startWith(db))
|
||||
),
|
||||
shareReplay(1)
|
||||
)
|
||||
);
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
return db$Map.get(id)!;
|
||||
}
|
||||
|
||||
function startPollingSecondaryDB(db: WorkspaceSQLiteDB) {
|
||||
return merge(
|
||||
getWorkspaceMeta(db.workspaceId),
|
||||
workspaceSubjects.meta.pipe(
|
||||
map(({ meta }) => meta),
|
||||
filter(meta => meta.id === db.workspaceId)
|
||||
)
|
||||
).pipe(
|
||||
map(meta => meta?.secondaryDBPath),
|
||||
filter((p): p is string => !!p),
|
||||
distinctUntilChanged(),
|
||||
switchMap(path => {
|
||||
// on secondary db path change, destroy the old db and create a new one
|
||||
const secondaryDB = new SecondaryWorkspaceSQLiteDB(path, db);
|
||||
return new Observable<SecondaryWorkspaceSQLiteDB>(subscriber => {
|
||||
subscriber.next(secondaryDB);
|
||||
return () => secondaryDB.destroy();
|
||||
});
|
||||
}),
|
||||
switchMap(secondaryDB => {
|
||||
return interval(300000).pipe(
|
||||
startWith(0),
|
||||
concatMap(() => secondaryDB.pull()),
|
||||
tap({
|
||||
error: err => {
|
||||
logger.error(`[ensureSQLiteDB] polling secondary db error`, err);
|
||||
},
|
||||
complete: () => {
|
||||
logger.info('[ensureSQLiteDB] polling secondary db complete');
|
||||
},
|
||||
})
|
||||
);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
export function ensureSQLiteDB(id: string) {
|
||||
return lastValueFrom(getWorkspaceDB$(id).pipe(take(1)));
|
||||
}
|
||||
48
apps/electron/src/helper/db/index.ts
Normal file
48
apps/electron/src/helper/db/index.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { mainRPC } from '../main-rpc';
|
||||
import type { MainEventRegister } from '../type';
|
||||
import { ensureSQLiteDB } from './ensure-db';
|
||||
import { dbSubjects } from './subjects';
|
||||
|
||||
export * from './ensure-db';
|
||||
export * from './subjects';
|
||||
|
||||
export const dbHandlers = {
|
||||
getDocAsUpdates: async (id: string) => {
|
||||
const workspaceDB = await ensureSQLiteDB(id);
|
||||
return workspaceDB.getDocAsUpdates();
|
||||
},
|
||||
applyDocUpdate: async (id: string, update: Uint8Array) => {
|
||||
const workspaceDB = await ensureSQLiteDB(id);
|
||||
return workspaceDB.applyUpdate(update);
|
||||
},
|
||||
addBlob: async (workspaceId: string, key: string, data: Uint8Array) => {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return workspaceDB.addBlob(key, data);
|
||||
},
|
||||
getBlob: async (workspaceId: string, key: string) => {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return workspaceDB.getBlob(key);
|
||||
},
|
||||
deleteBlob: async (workspaceId: string, key: string) => {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return workspaceDB.deleteBlob(key);
|
||||
},
|
||||
getBlobKeys: async (workspaceId: string) => {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return workspaceDB.getBlobKeys();
|
||||
},
|
||||
getDefaultStorageLocation: async () => {
|
||||
return await mainRPC.getPath('sessionData');
|
||||
},
|
||||
};
|
||||
|
||||
export const dbEvents = {
|
||||
onExternalUpdate: (
|
||||
fn: (update: { workspaceId: string; update: Uint8Array }) => void
|
||||
) => {
|
||||
const sub = dbSubjects.externalUpdate.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventRegister>;
|
||||
11
apps/electron/src/helper/db/merge-update.ts
Normal file
11
apps/electron/src/helper/db/merge-update.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import * as Y from 'yjs';
|
||||
|
||||
export function mergeUpdate(updates: Uint8Array[]) {
|
||||
const yDoc = new Y.Doc();
|
||||
Y.transact(yDoc, () => {
|
||||
for (const update of updates) {
|
||||
Y.applyUpdate(yDoc, update);
|
||||
}
|
||||
});
|
||||
return Y.encodeStateAsUpdate(yDoc);
|
||||
}
|
||||
215
apps/electron/src/helper/db/secondary-db.ts
Normal file
215
apps/electron/src/helper/db/secondary-db.ts
Normal file
@@ -0,0 +1,215 @@
|
||||
import assert from 'node:assert';
|
||||
|
||||
import type { SqliteConnection } from '@affine/native';
|
||||
import { debounce } from 'lodash-es';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import { logger } from '../logger';
|
||||
import type { YOrigin } from '../type';
|
||||
import { getWorkspaceMeta } from '../workspace';
|
||||
import { BaseSQLiteAdapter } from './base-db-adapter';
|
||||
import { mergeUpdate } from './merge-update';
|
||||
import type { WorkspaceSQLiteDB } from './workspace-db-adapter';
|
||||
|
||||
const FLUSH_WAIT_TIME = 5000;
|
||||
const FLUSH_MAX_WAIT_TIME = 10000;
|
||||
|
||||
export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
role = 'secondary';
|
||||
yDoc = new Y.Doc();
|
||||
firstConnected = false;
|
||||
destroyed = false;
|
||||
|
||||
updateQueue: Uint8Array[] = [];
|
||||
|
||||
unsubscribers = new Set<() => void>();
|
||||
|
||||
constructor(
|
||||
public override path: string,
|
||||
public upstream: WorkspaceSQLiteDB
|
||||
) {
|
||||
super(path);
|
||||
this.setupAndListen();
|
||||
logger.debug('[SecondaryWorkspaceSQLiteDB] created', this.workspaceId);
|
||||
}
|
||||
|
||||
override async destroy() {
|
||||
await this.flushUpdateQueue();
|
||||
this.unsubscribers.forEach(unsub => unsub());
|
||||
this.yDoc.destroy();
|
||||
await super.destroy();
|
||||
this.destroyed = true;
|
||||
}
|
||||
|
||||
get workspaceId() {
|
||||
return this.upstream.workspaceId;
|
||||
}
|
||||
|
||||
// do not update db immediately, instead, push to a queue
|
||||
// and flush the queue in a future time
|
||||
async addUpdateToUpdateQueue(db: SqliteConnection, update: Uint8Array) {
|
||||
this.updateQueue.push(update);
|
||||
await this.debouncedFlush();
|
||||
}
|
||||
|
||||
async flushUpdateQueue() {
|
||||
if (this.destroyed) {
|
||||
return;
|
||||
}
|
||||
logger.debug(
|
||||
'flushUpdateQueue',
|
||||
this.workspaceId,
|
||||
'queue',
|
||||
this.updateQueue.length
|
||||
);
|
||||
const updates = [...this.updateQueue];
|
||||
this.updateQueue = [];
|
||||
await this.run(async () => {
|
||||
await this.addUpdateToSQLite(updates);
|
||||
});
|
||||
}
|
||||
|
||||
// flush after 5s, but will not wait for more than 10s
|
||||
debouncedFlush = debounce(this.flushUpdateQueue, FLUSH_WAIT_TIME, {
|
||||
maxWait: FLUSH_MAX_WAIT_TIME,
|
||||
});
|
||||
|
||||
runCounter = 0;
|
||||
|
||||
// wrap the fn with connect and close
|
||||
async run<T extends (...args: any[]) => any>(
|
||||
fn: T
|
||||
): Promise<
|
||||
(T extends (...args: any[]) => infer U ? Awaited<U> : unknown) | undefined
|
||||
> {
|
||||
try {
|
||||
if (this.destroyed) {
|
||||
return;
|
||||
}
|
||||
await this.connectIfNeeded();
|
||||
this.runCounter++;
|
||||
return await fn();
|
||||
} catch (err) {
|
||||
logger.error(err);
|
||||
throw err;
|
||||
} finally {
|
||||
this.runCounter--;
|
||||
if (this.runCounter === 0) {
|
||||
// just close db, but not the yDoc
|
||||
await super.destroy();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setupAndListen() {
|
||||
if (this.firstConnected) {
|
||||
return;
|
||||
}
|
||||
this.firstConnected = true;
|
||||
|
||||
const onUpstreamUpdate = (update: Uint8Array, origin: YOrigin) => {
|
||||
if (origin === 'renderer') {
|
||||
// update to upstream yDoc should be replicated to self yDoc
|
||||
this.applyUpdate(update, 'upstream');
|
||||
}
|
||||
};
|
||||
|
||||
const onSelfUpdate = async (update: Uint8Array, origin: YOrigin) => {
|
||||
// for self update from upstream, we need to push it to external DB
|
||||
if (origin === 'upstream' && this.db) {
|
||||
await this.addUpdateToUpdateQueue(this.db, update);
|
||||
}
|
||||
|
||||
if (origin === 'self') {
|
||||
this.upstream.applyUpdate(update, 'external');
|
||||
}
|
||||
};
|
||||
|
||||
// listen to upstream update
|
||||
this.upstream.yDoc.on('update', onUpstreamUpdate);
|
||||
this.yDoc.on('update', onSelfUpdate);
|
||||
|
||||
this.unsubscribers.add(() => {
|
||||
this.upstream.yDoc.off('update', onUpstreamUpdate);
|
||||
this.yDoc.off('update', onSelfUpdate);
|
||||
});
|
||||
|
||||
this.run(() => {
|
||||
// apply all updates from upstream
|
||||
const upstreamUpdate = this.upstream.getDocAsUpdates();
|
||||
// to initialize the yDoc, we need to apply all updates from the db
|
||||
this.applyUpdate(upstreamUpdate, 'upstream');
|
||||
})
|
||||
.then(() => {
|
||||
logger.debug('run success');
|
||||
})
|
||||
.catch(err => {
|
||||
logger.error('run error', err);
|
||||
});
|
||||
}
|
||||
|
||||
applyUpdate = (data: Uint8Array, origin: YOrigin = 'upstream') => {
|
||||
Y.applyUpdate(this.yDoc, data, origin);
|
||||
};
|
||||
|
||||
// TODO: have a better solution to handle blobs
|
||||
async syncBlobs() {
|
||||
await this.run(async () => {
|
||||
// skip if upstream db is not connected (maybe it is already closed)
|
||||
const blobsKeys = await this.getBlobKeys();
|
||||
if (!this.upstream.db || this.upstream.db?.isClose) {
|
||||
return;
|
||||
}
|
||||
const upstreamBlobsKeys = await this.upstream.getBlobKeys();
|
||||
// put every missing blob to upstream
|
||||
for (const key of blobsKeys) {
|
||||
if (!upstreamBlobsKeys.includes(key)) {
|
||||
const blob = await this.getBlob(key);
|
||||
if (blob) {
|
||||
await this.upstream.addBlob(key, blob);
|
||||
logger.debug('syncBlobs', this.workspaceId, key);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* pull from external DB file and apply to embedded yDoc
|
||||
* workflow:
|
||||
* - connect to external db
|
||||
* - get updates
|
||||
* - apply updates to local yDoc
|
||||
* - get blobs and put new blobs to upstream
|
||||
* - disconnect
|
||||
*/
|
||||
async pull() {
|
||||
const start = performance.now();
|
||||
assert(this.upstream.db, 'upstream db should be connected');
|
||||
const updates = await this.run(async () => {
|
||||
// TODO: no need to get all updates, just get the latest ones (using a cursor, etc)?
|
||||
await this.syncBlobs();
|
||||
return (await this.getUpdates()).map(update => update.data);
|
||||
});
|
||||
|
||||
if (!updates || this.destroyed) {
|
||||
return;
|
||||
}
|
||||
|
||||
const merged = mergeUpdate(updates);
|
||||
this.applyUpdate(merged, 'self');
|
||||
|
||||
logger.debug(
|
||||
'pull external updates',
|
||||
this.path,
|
||||
updates.length,
|
||||
(performance.now() - start).toFixed(2),
|
||||
'ms'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function getSecondaryWorkspaceDBPath(workspaceId: string) {
|
||||
const meta = await getWorkspaceMeta(workspaceId);
|
||||
return meta?.secondaryDBPath;
|
||||
}
|
||||
5
apps/electron/src/helper/db/subjects.ts
Normal file
5
apps/electron/src/helper/db/subjects.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
export const dbSubjects = {
|
||||
externalUpdate: new Subject<{ workspaceId: string; update: Uint8Array }>(),
|
||||
};
|
||||
102
apps/electron/src/helper/db/workspace-db-adapter.ts
Normal file
102
apps/electron/src/helper/db/workspace-db-adapter.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
import { Subject } from 'rxjs';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import { logger } from '../logger';
|
||||
import type { YOrigin } from '../type';
|
||||
import { getWorkspaceMeta } from '../workspace';
|
||||
import { BaseSQLiteAdapter } from './base-db-adapter';
|
||||
import { mergeUpdate } from './merge-update';
|
||||
import { dbSubjects } from './subjects';
|
||||
|
||||
export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
role = 'primary';
|
||||
yDoc = new Y.Doc();
|
||||
firstConnected = false;
|
||||
|
||||
update$ = new Subject<void>();
|
||||
|
||||
constructor(public override path: string, public workspaceId: string) {
|
||||
super(path);
|
||||
}
|
||||
|
||||
override async destroy() {
|
||||
await super.destroy();
|
||||
this.yDoc.destroy();
|
||||
|
||||
// when db is closed, we can safely remove it from ensure-db list
|
||||
this.update$.complete();
|
||||
this.firstConnected = false;
|
||||
}
|
||||
|
||||
getWorkspaceName = () => {
|
||||
return this.yDoc.getMap('space:meta').get('name') as string;
|
||||
};
|
||||
|
||||
async init() {
|
||||
const db = await super.connectIfNeeded();
|
||||
|
||||
if (!this.firstConnected) {
|
||||
this.yDoc.on('update', async (update: Uint8Array, origin: YOrigin) => {
|
||||
if (origin === 'renderer') {
|
||||
await this.addUpdateToSQLite([update]);
|
||||
} else if (origin === 'external') {
|
||||
dbSubjects.externalUpdate.next({
|
||||
workspaceId: this.workspaceId,
|
||||
update,
|
||||
});
|
||||
await this.addUpdateToSQLite([update]);
|
||||
logger.debug('external update', this.workspaceId);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const updates = await this.getUpdates();
|
||||
const merged = mergeUpdate(updates.map(update => update.data));
|
||||
|
||||
// to initialize the yDoc, we need to apply all updates from the db
|
||||
this.applyUpdate(merged, 'self');
|
||||
|
||||
this.firstConnected = true;
|
||||
this.update$.next();
|
||||
|
||||
return db;
|
||||
}
|
||||
|
||||
getDocAsUpdates = () => {
|
||||
return Y.encodeStateAsUpdate(this.yDoc);
|
||||
};
|
||||
|
||||
// non-blocking and use yDoc to validate the update
|
||||
// after that, the update is added to the db
|
||||
applyUpdate = (data: Uint8Array, origin: YOrigin = 'renderer') => {
|
||||
// todo: trim the updates when the number of records is too large
|
||||
// 1. store the current ydoc state in the db
|
||||
// 2. then delete the old updates
|
||||
// yjs-idb will always trim the db for the first time after DB is loaded
|
||||
Y.applyUpdate(this.yDoc, data, origin);
|
||||
};
|
||||
|
||||
override async addBlob(key: string, value: Uint8Array) {
|
||||
this.update$.next();
|
||||
const res = await super.addBlob(key, value);
|
||||
return res;
|
||||
}
|
||||
|
||||
override async deleteBlob(key: string) {
|
||||
this.update$.next();
|
||||
await super.deleteBlob(key);
|
||||
}
|
||||
|
||||
override async addUpdateToSQLite(data: Uint8Array[]) {
|
||||
this.update$.next();
|
||||
await super.addUpdateToSQLite(data);
|
||||
}
|
||||
}
|
||||
|
||||
export async function openWorkspaceDatabase(workspaceId: string) {
|
||||
const meta = await getWorkspaceMeta(workspaceId);
|
||||
const db = new WorkspaceSQLiteDB(meta.mainDBPath, workspaceId);
|
||||
await db.init();
|
||||
logger.info(`openWorkspaceDatabase [${workspaceId}]`);
|
||||
return db;
|
||||
}
|
||||
@@ -1,26 +1,33 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import { dialog, shell } from 'electron';
|
||||
import fs from 'fs-extra';
|
||||
import { nanoid } from 'nanoid';
|
||||
|
||||
import { appContext } from '../../context';
|
||||
import { logger } from '../../logger';
|
||||
import { ensureSQLiteDB, isRemoveOrMoveEvent } from '../db/ensure-db';
|
||||
import type { WorkspaceSQLiteDB } from '../db/sqlite';
|
||||
import { getWorkspaceDBPath, isValidDBFile } from '../db/sqlite';
|
||||
import { listWorkspaces } from '../workspace/workspace';
|
||||
import { ensureSQLiteDB } from '../db/ensure-db';
|
||||
import type { WorkspaceSQLiteDB } from '../db/workspace-db-adapter';
|
||||
import { logger } from '../logger';
|
||||
import { mainRPC } from '../main-rpc';
|
||||
import {
|
||||
getWorkspaceDBPath,
|
||||
getWorkspaceMeta,
|
||||
getWorkspacesBasePath,
|
||||
listWorkspaces,
|
||||
storeWorkspaceMeta,
|
||||
} from '../workspace';
|
||||
|
||||
// NOTE:
|
||||
// we are using native dialogs because HTML dialogs do not give full file paths
|
||||
|
||||
export async function revealDBFile(workspaceId: string) {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
shell.showItemInFolder(await fs.realpath(workspaceDB.path));
|
||||
const meta = await getWorkspaceMeta(workspaceId);
|
||||
if (!meta) {
|
||||
return;
|
||||
}
|
||||
await mainRPC.showItemInFolder(meta.secondaryDBPath ?? meta.mainDBPath);
|
||||
}
|
||||
|
||||
// provide a backdoor to set dialog path for testing in playwright
|
||||
interface FakeDialogResult {
|
||||
export interface FakeDialogResult {
|
||||
canceled?: boolean;
|
||||
filePath?: string;
|
||||
filePaths?: string[];
|
||||
@@ -54,12 +61,20 @@ const ErrorMessages = [
|
||||
|
||||
type ErrorMessage = (typeof ErrorMessages)[number];
|
||||
|
||||
interface SaveDBFileResult {
|
||||
export interface SaveDBFileResult {
|
||||
filePath?: string;
|
||||
canceled?: boolean;
|
||||
error?: ErrorMessage;
|
||||
}
|
||||
|
||||
const extension = 'affine';
|
||||
|
||||
function getDefaultDBFileName(name: string, id: string) {
|
||||
const fileName = `${name}_${id}.${extension}`;
|
||||
// make sure fileName is a valid file name
|
||||
return fileName.replace(/[/\\?%*:|"<>]/g, '-');
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is called when the user clicks the "Save" button in the "Save Workspace" dialog.
|
||||
*
|
||||
@@ -72,12 +87,18 @@ export async function saveDBFileAs(
|
||||
const db = await ensureSQLiteDB(workspaceId);
|
||||
const ret =
|
||||
getFakedResult() ??
|
||||
(await dialog.showSaveDialog({
|
||||
(await mainRPC.showSaveDialog({
|
||||
properties: ['showOverwriteConfirmation'],
|
||||
title: 'Save Workspace',
|
||||
showsTagField: false,
|
||||
buttonLabel: 'Save',
|
||||
defaultPath: `${db.getWorkspaceName()}_${workspaceId}.db`,
|
||||
filters: [
|
||||
{
|
||||
extensions: [extension],
|
||||
name: '',
|
||||
},
|
||||
],
|
||||
defaultPath: getDefaultDBFileName(db.getWorkspaceName(), workspaceId),
|
||||
message: 'Save Workspace as a SQLite Database file',
|
||||
}));
|
||||
const filePath = ret.filePath;
|
||||
@@ -89,7 +110,9 @@ export async function saveDBFileAs(
|
||||
|
||||
await fs.copyFile(db.path, filePath);
|
||||
logger.log('saved', filePath);
|
||||
shell.showItemInFolder(filePath);
|
||||
mainRPC.showItemInFolder(filePath).catch(err => {
|
||||
console.error(err);
|
||||
});
|
||||
return { filePath };
|
||||
} catch (err) {
|
||||
logger.error('saveDBFileAs', err);
|
||||
@@ -99,7 +122,7 @@ export async function saveDBFileAs(
|
||||
}
|
||||
}
|
||||
|
||||
interface SelectDBFileLocationResult {
|
||||
export interface SelectDBFileLocationResult {
|
||||
filePath?: string;
|
||||
error?: ErrorMessage;
|
||||
canceled?: boolean;
|
||||
@@ -109,27 +132,20 @@ export async function selectDBFileLocation(): Promise<SelectDBFileLocationResult
|
||||
try {
|
||||
const ret =
|
||||
getFakedResult() ??
|
||||
(await dialog.showSaveDialog({
|
||||
properties: ['showOverwriteConfirmation'],
|
||||
title: 'Set database location',
|
||||
showsTagField: false,
|
||||
(await mainRPC.showOpenDialog({
|
||||
properties: ['openDirectory'],
|
||||
title: 'Set Workspace Storage Location',
|
||||
buttonLabel: 'Select',
|
||||
defaultPath: `workspace-storage.db`,
|
||||
defaultPath: await mainRPC.getPath('documents'),
|
||||
message: "Select a location to store the workspace's database file",
|
||||
}));
|
||||
const filePath = ret.filePath;
|
||||
if (ret.canceled || !filePath) {
|
||||
const dir = ret.filePaths?.[0];
|
||||
if (ret.canceled || !dir) {
|
||||
return {
|
||||
canceled: true,
|
||||
};
|
||||
}
|
||||
// the same db file cannot be loaded twice
|
||||
if (await dbFileAlreadyLoaded(filePath)) {
|
||||
return {
|
||||
error: 'DB_FILE_ALREADY_LOADED',
|
||||
};
|
||||
}
|
||||
return { filePath };
|
||||
return { filePath: dir };
|
||||
} catch (err) {
|
||||
logger.error('selectDBFileLocation', err);
|
||||
return {
|
||||
@@ -138,7 +154,7 @@ export async function selectDBFileLocation(): Promise<SelectDBFileLocationResult
|
||||
}
|
||||
}
|
||||
|
||||
interface LoadDBFileResult {
|
||||
export interface LoadDBFileResult {
|
||||
workspaceId?: string;
|
||||
error?: ErrorMessage;
|
||||
canceled?: boolean;
|
||||
@@ -162,7 +178,7 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
|
||||
try {
|
||||
const ret =
|
||||
getFakedResult() ??
|
||||
(await dialog.showOpenDialog({
|
||||
(await mainRPC.showOpenDialog({
|
||||
properties: ['openFile'],
|
||||
title: 'Load Workspace',
|
||||
buttonLabel: 'Load',
|
||||
@@ -170,10 +186,10 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
|
||||
{
|
||||
name: 'SQLite Database',
|
||||
// do we want to support other file format?
|
||||
extensions: ['db'],
|
||||
extensions: ['db', 'affine'],
|
||||
},
|
||||
],
|
||||
message: 'Load Workspace from a SQLite Database file',
|
||||
message: 'Load Workspace from a AFFiNE file',
|
||||
}));
|
||||
const filePath = ret.filePaths?.[0];
|
||||
if (ret.canceled || !filePath) {
|
||||
@@ -182,7 +198,7 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
|
||||
}
|
||||
|
||||
// the imported file should not be in app data dir
|
||||
if (filePath.startsWith(path.join(appContext.appDataPath, 'workspaces'))) {
|
||||
if (filePath.startsWith(await getWorkspacesBasePath())) {
|
||||
logger.warn('loadDBFile: db file in app data dir');
|
||||
return { error: 'DB_FILE_PATH_INVALID' };
|
||||
}
|
||||
@@ -192,19 +208,27 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
|
||||
return { error: 'DB_FILE_ALREADY_LOADED' };
|
||||
}
|
||||
|
||||
if (!isValidDBFile(filePath)) {
|
||||
const { SqliteConnection } = await import('@affine/native');
|
||||
|
||||
if (!(await SqliteConnection.validate(filePath))) {
|
||||
// TODO: report invalid db file error?
|
||||
return { error: 'DB_FILE_INVALID' }; // invalid db file
|
||||
}
|
||||
|
||||
// symlink the db file to a new workspace id
|
||||
// copy the db file to a new workspace id
|
||||
const workspaceId = nanoid(10);
|
||||
const linkedFilePath = await getWorkspaceDBPath(appContext, workspaceId);
|
||||
const internalFilePath = await getWorkspaceDBPath(workspaceId);
|
||||
|
||||
await fs.ensureDir(path.join(appContext.appDataPath, 'workspaces'));
|
||||
await fs.ensureDir(await getWorkspacesBasePath());
|
||||
|
||||
await fs.symlink(filePath, linkedFilePath, 'file');
|
||||
logger.info(`loadDBFile, symlink: ${filePath} -> ${linkedFilePath}`);
|
||||
await fs.copy(filePath, internalFilePath);
|
||||
logger.info(`loadDBFile, copy: ${filePath} -> ${internalFilePath}`);
|
||||
|
||||
await storeWorkspaceMeta(workspaceId, {
|
||||
id: workspaceId,
|
||||
mainDBPath: internalFilePath,
|
||||
secondaryDBPath: filePath,
|
||||
});
|
||||
|
||||
return { workspaceId };
|
||||
} catch (err) {
|
||||
@@ -215,7 +239,7 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
|
||||
}
|
||||
}
|
||||
|
||||
interface MoveDBFileResult {
|
||||
export interface MoveDBFileResult {
|
||||
filePath?: string;
|
||||
error?: ErrorMessage;
|
||||
canceled?: boolean;
|
||||
@@ -225,58 +249,50 @@ interface MoveDBFileResult {
|
||||
* This function is called when the user clicks the "Move" button in the "Move Workspace Storage" setting.
|
||||
*
|
||||
* It will
|
||||
* - move the source db file to a new location
|
||||
* - symlink the new location to the old db file
|
||||
* - copy the source db file to a new location
|
||||
* - remove the old db external file
|
||||
* - update the external db file path in the workspace meta
|
||||
* - return the new file path
|
||||
*/
|
||||
export async function moveDBFile(
|
||||
workspaceId: string,
|
||||
dbFileLocation?: string
|
||||
dbFileDir?: string
|
||||
): Promise<MoveDBFileResult> {
|
||||
let db: WorkspaceSQLiteDB | null = null;
|
||||
try {
|
||||
const { moveFile, FsWatcher } = await import('@affine/native');
|
||||
db = await ensureSQLiteDB(workspaceId);
|
||||
// get the real file path of db
|
||||
const realpath = await fs.realpath(db.path);
|
||||
const isLink = realpath !== db.path;
|
||||
const watcher = FsWatcher.watch(realpath, { recursive: false });
|
||||
const waitForRemove = new Promise<void>(resolve => {
|
||||
const subscription = watcher.subscribe(event => {
|
||||
if (isRemoveOrMoveEvent(event)) {
|
||||
subscription.unsubscribe();
|
||||
// resolve after FSWatcher in `database$` is fired
|
||||
setImmediate(() => {
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
const newFilePath =
|
||||
dbFileLocation ??
|
||||
const meta = await getWorkspaceMeta(workspaceId);
|
||||
|
||||
const oldDir = meta.secondaryDBPath
|
||||
? path.dirname(meta.secondaryDBPath)
|
||||
: null;
|
||||
const defaultDir = oldDir ?? (await mainRPC.getPath('documents'));
|
||||
|
||||
const newName = getDefaultDBFileName(db.getWorkspaceName(), workspaceId);
|
||||
|
||||
const newDirPath =
|
||||
dbFileDir ??
|
||||
(
|
||||
getFakedResult() ??
|
||||
(await dialog.showSaveDialog({
|
||||
properties: ['showOverwriteConfirmation'],
|
||||
(await mainRPC.showOpenDialog({
|
||||
properties: ['openDirectory'],
|
||||
title: 'Move Workspace Storage',
|
||||
showsTagField: false,
|
||||
buttonLabel: 'Save',
|
||||
defaultPath: realpath,
|
||||
buttonLabel: 'Move',
|
||||
defaultPath: defaultDir,
|
||||
message: 'Move Workspace storage file',
|
||||
}))
|
||||
).filePath;
|
||||
).filePaths?.[0];
|
||||
|
||||
// skips if
|
||||
// - user canceled the dialog
|
||||
// - user selected the same file
|
||||
// - user selected the same file in the link file in app data dir
|
||||
if (!newFilePath || newFilePath === realpath || db.path === newFilePath) {
|
||||
// - user selected the same dir
|
||||
if (!newDirPath || newDirPath === oldDir) {
|
||||
return {
|
||||
canceled: true,
|
||||
};
|
||||
}
|
||||
|
||||
db.db.close();
|
||||
const newFilePath = path.join(newDirPath, newName);
|
||||
|
||||
if (await fs.pathExists(newFilePath)) {
|
||||
return {
|
||||
@@ -284,30 +300,35 @@ export async function moveDBFile(
|
||||
};
|
||||
}
|
||||
|
||||
if (isLink) {
|
||||
// remove the old link to unblock new link
|
||||
await fs.unlink(db.path);
|
||||
logger.info(`[moveDBFile] copy ${meta.mainDBPath} -> ${newFilePath}`);
|
||||
|
||||
await fs.copy(meta.mainDBPath, newFilePath);
|
||||
|
||||
// remove the old db file, but we don't care if it fails
|
||||
if (meta.secondaryDBPath) {
|
||||
await fs
|
||||
.remove(meta.secondaryDBPath)
|
||||
.then(() => {
|
||||
logger.info(`[moveDBFile] removed ${meta.secondaryDBPath}`);
|
||||
})
|
||||
.catch(err => {
|
||||
logger.error(
|
||||
`[moveDBFile] remove ${meta.secondaryDBPath} failed`,
|
||||
err
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
logger.info(`[moveDBFile] move ${realpath} -> ${newFilePath}`);
|
||||
|
||||
await moveFile(realpath, newFilePath);
|
||||
|
||||
await fs.ensureSymlink(newFilePath, db.path, 'file');
|
||||
logger.info(`[moveDBFile] symlink: ${realpath} -> ${newFilePath}`);
|
||||
// wait for the file move event emits to the FileWatcher in database$ in ensure-db.ts
|
||||
// so that the db will be destroyed and we can call the `ensureSQLiteDB` in the next step
|
||||
// or the FileWatcher will continue listen on the `realpath` and emit file change events
|
||||
// then the database will reload while receiving these events; and the moved database file will be recreated while reloading database
|
||||
await waitForRemove;
|
||||
logger.info(`removed`);
|
||||
await ensureSQLiteDB(workspaceId);
|
||||
// update meta
|
||||
await storeWorkspaceMeta(workspaceId, {
|
||||
secondaryDBPath: newFilePath,
|
||||
});
|
||||
|
||||
return {
|
||||
filePath: newFilePath,
|
||||
};
|
||||
} catch (err) {
|
||||
db?.destroy();
|
||||
await db?.destroy();
|
||||
logger.error('[moveDBFile]', err);
|
||||
return {
|
||||
error: 'UNKNOWN_ERROR',
|
||||
@@ -316,8 +337,7 @@ export async function moveDBFile(
|
||||
}
|
||||
|
||||
async function dbFileAlreadyLoaded(path: string) {
|
||||
const meta = await listWorkspaces(appContext);
|
||||
const realpath = await fs.realpath(path);
|
||||
const paths = meta.map(m => m[1].realpath);
|
||||
return paths.includes(realpath);
|
||||
const meta = await listWorkspaces();
|
||||
const paths = meta.map(m => m[1].secondaryDBPath);
|
||||
return paths.includes(path);
|
||||
}
|
||||
@@ -1,4 +1,3 @@
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import {
|
||||
loadDBFile,
|
||||
moveDBFile,
|
||||
@@ -9,25 +8,24 @@ import {
|
||||
} from './dialog';
|
||||
|
||||
export const dialogHandlers = {
|
||||
revealDBFile: async (_, workspaceId: string) => {
|
||||
revealDBFile: async (workspaceId: string) => {
|
||||
return revealDBFile(workspaceId);
|
||||
},
|
||||
loadDBFile: async () => {
|
||||
return loadDBFile();
|
||||
},
|
||||
saveDBFileAs: async (_, workspaceId: string) => {
|
||||
saveDBFileAs: async (workspaceId: string) => {
|
||||
return saveDBFileAs(workspaceId);
|
||||
},
|
||||
moveDBFile: (_, workspaceId: string, dbFileLocation?: string) => {
|
||||
moveDBFile: (workspaceId: string, dbFileLocation?: string) => {
|
||||
return moveDBFile(workspaceId, dbFileLocation);
|
||||
},
|
||||
selectDBFileLocation: async () => {
|
||||
return selectDBFileLocation();
|
||||
},
|
||||
setFakeDialogResult: async (
|
||||
_,
|
||||
result: Parameters<typeof setFakeDialogResult>[0]
|
||||
) => {
|
||||
return setFakeDialogResult(result);
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
};
|
||||
33
apps/electron/src/helper/exposed.ts
Normal file
33
apps/electron/src/helper/exposed.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { dbEvents, dbHandlers } from './db';
|
||||
import { dialogHandlers } from './dialog';
|
||||
import { workspaceEvents, workspaceHandlers } from './workspace';
|
||||
|
||||
export const handlers = {
|
||||
db: dbHandlers,
|
||||
workspace: workspaceHandlers,
|
||||
dialog: dialogHandlers,
|
||||
};
|
||||
|
||||
export const events = {
|
||||
db: dbEvents,
|
||||
workspace: workspaceEvents,
|
||||
};
|
||||
|
||||
export const getExposedMeta = () => {
|
||||
const handlersMeta = Object.entries(handlers).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [namespace, Object.keys(namespaceHandlers)] as [string, string[]];
|
||||
}
|
||||
);
|
||||
|
||||
const eventsMeta = Object.entries(events).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [namespace, Object.keys(namespaceHandlers)] as [string, string[]];
|
||||
}
|
||||
);
|
||||
|
||||
return {
|
||||
handlers: handlersMeta,
|
||||
events: eventsMeta,
|
||||
};
|
||||
};
|
||||
88
apps/electron/src/helper/index.ts
Normal file
88
apps/electron/src/helper/index.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import type { EventBasedChannel } from 'async-call-rpc';
|
||||
import { AsyncCall } from 'async-call-rpc';
|
||||
|
||||
import { events, handlers } from './exposed';
|
||||
import { logger } from './logger';
|
||||
|
||||
const createMessagePortMainChannel = (
|
||||
connection: Electron.MessagePortMain
|
||||
): EventBasedChannel => {
|
||||
return {
|
||||
on(listener) {
|
||||
const f = (e: Electron.MessageEvent) => {
|
||||
listener(e.data);
|
||||
};
|
||||
connection.on('message', f);
|
||||
// MUST start the connection to receive messages
|
||||
connection.start();
|
||||
return () => {
|
||||
connection.off('message', f);
|
||||
};
|
||||
},
|
||||
send(data) {
|
||||
connection.postMessage(data);
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
function setupRendererConnection(rendererPort: Electron.MessagePortMain) {
|
||||
const flattenedHandlers = Object.entries(handlers).flatMap(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return Object.entries(namespaceHandlers).map(([name, handler]) => {
|
||||
const handlerWithLog = async (...args: any[]) => {
|
||||
try {
|
||||
const start = performance.now();
|
||||
const result = await handler(...args);
|
||||
logger.info(
|
||||
'[async-api]',
|
||||
`${namespace}.${name}`,
|
||||
args.filter(
|
||||
arg => typeof arg !== 'function' && typeof arg !== 'object'
|
||||
),
|
||||
'-',
|
||||
(performance.now() - start).toFixed(2),
|
||||
'ms'
|
||||
);
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error('[async-api]', `${namespace}.${name}`, error);
|
||||
}
|
||||
};
|
||||
return [`${namespace}:${name}`, handlerWithLog];
|
||||
});
|
||||
}
|
||||
);
|
||||
const rpc = AsyncCall<PeersAPIs.RendererToHelper>(
|
||||
Object.fromEntries(flattenedHandlers),
|
||||
{
|
||||
channel: createMessagePortMainChannel(rendererPort),
|
||||
log: false,
|
||||
}
|
||||
);
|
||||
|
||||
for (const [namespace, namespaceEvents] of Object.entries(events)) {
|
||||
for (const [key, eventRegister] of Object.entries(namespaceEvents)) {
|
||||
const subscription = eventRegister((...args: any[]) => {
|
||||
const chan = `${namespace}:${key}`;
|
||||
rpc.postEvent(chan, ...args).catch(err => {
|
||||
console.error(err);
|
||||
});
|
||||
});
|
||||
process.on('exit', () => {
|
||||
subscription();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function main() {
|
||||
process.parentPort.on('message', e => {
|
||||
if (e.data.channel === 'renderer-connect' && e.ports.length === 1) {
|
||||
const rendererPort = e.ports[0];
|
||||
setupRendererConnection(rendererPort);
|
||||
logger.info('[helper] renderer connected');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
main();
|
||||
3
apps/electron/src/helper/logger.ts
Normal file
3
apps/electron/src/helper/logger.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
import log from 'electron-log';
|
||||
|
||||
export const logger = log.scope('helper');
|
||||
33
apps/electron/src/helper/main-rpc.ts
Normal file
33
apps/electron/src/helper/main-rpc.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { AsyncCall, type EventBasedChannel } from 'async-call-rpc';
|
||||
|
||||
import { getExposedMeta } from './exposed';
|
||||
|
||||
function createMessagePortMainChannel(
|
||||
connection: Electron.ParentPort
|
||||
): EventBasedChannel {
|
||||
return {
|
||||
on(listener) {
|
||||
const f = (e: Electron.MessageEvent) => {
|
||||
listener(e.data);
|
||||
};
|
||||
connection.on('message', f);
|
||||
return () => {
|
||||
connection.off('message', f);
|
||||
};
|
||||
},
|
||||
send(data) {
|
||||
connection.postMessage(data);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const helperToMainServer: PeersAPIs.HelperToMain = {
|
||||
getMeta: () => getExposedMeta(),
|
||||
};
|
||||
|
||||
export const mainRPC = AsyncCall<PeersAPIs.MainToHelper>(helperToMainServer, {
|
||||
strict: {
|
||||
unknownMessage: false,
|
||||
},
|
||||
channel: createMessagePortMainChannel(process.parentPort),
|
||||
});
|
||||
9
apps/electron/src/helper/type.ts
Normal file
9
apps/electron/src/helper/type.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
export interface WorkspaceMeta {
|
||||
id: string;
|
||||
mainDBPath: string;
|
||||
secondaryDBPath?: string; // assume there will be only one
|
||||
}
|
||||
|
||||
export type YOrigin = 'self' | 'external' | 'upstream' | 'renderer';
|
||||
|
||||
export type MainEventRegister = (...args: any[]) => () => void;
|
||||
1
apps/electron/src/helper/workspace/__tests__/.gitignore
vendored
Normal file
1
apps/electron/src/helper/workspace/__tests__/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
tmp
|
||||
142
apps/electron/src/helper/workspace/__tests__/handlers.spec.ts
Normal file
142
apps/electron/src/helper/workspace/__tests__/handlers.spec.ts
Normal file
@@ -0,0 +1,142 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, describe, expect, test, vi } from 'vitest';
|
||||
|
||||
const tmpDir = path.join(__dirname, 'tmp');
|
||||
const appDataPath = path.join(tmpDir, 'app-data');
|
||||
|
||||
vi.doMock('../../db/ensure-db', () => ({
|
||||
ensureSQLiteDB: async () => ({
|
||||
destroy: () => {},
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.doMock('../../main-rpc', () => ({
|
||||
mainRPC: {
|
||||
getPath: async () => appDataPath,
|
||||
},
|
||||
}));
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.remove(tmpDir);
|
||||
});
|
||||
|
||||
describe('list workspaces', () => {
|
||||
test('listWorkspaces (valid)', async () => {
|
||||
const { listWorkspaces } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(appDataPath, 'workspaces', workspaceId);
|
||||
const meta = {
|
||||
id: workspaceId,
|
||||
};
|
||||
await fs.ensureDir(workspacePath);
|
||||
await fs.writeJSON(path.join(workspacePath, 'meta.json'), meta);
|
||||
const workspaces = await listWorkspaces();
|
||||
expect(workspaces).toEqual([[workspaceId, meta]]);
|
||||
});
|
||||
|
||||
test('listWorkspaces (without meta json file)', async () => {
|
||||
const { listWorkspaces } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(appDataPath, 'workspaces', workspaceId);
|
||||
await fs.ensureDir(workspacePath);
|
||||
const workspaces = await listWorkspaces();
|
||||
expect(workspaces).toEqual([
|
||||
[
|
||||
workspaceId,
|
||||
// meta file will be created automatically
|
||||
{ id: workspaceId, mainDBPath: path.join(workspacePath, 'storage.db') },
|
||||
],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete workspace', () => {
|
||||
test('deleteWorkspace', async () => {
|
||||
const { deleteWorkspace } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(appDataPath, 'workspaces', workspaceId);
|
||||
await fs.ensureDir(workspacePath);
|
||||
await deleteWorkspace(workspaceId);
|
||||
expect(await fs.pathExists(workspacePath)).toBe(false);
|
||||
// removed workspace will be moved to deleted-workspaces
|
||||
expect(
|
||||
await fs.pathExists(
|
||||
path.join(appDataPath, 'deleted-workspaces', workspaceId)
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getWorkspaceMeta', () => {
|
||||
test('can get meta', async () => {
|
||||
const { getWorkspaceMeta } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(appDataPath, 'workspaces', workspaceId);
|
||||
const meta = {
|
||||
id: workspaceId,
|
||||
};
|
||||
await fs.ensureDir(workspacePath);
|
||||
await fs.writeJSON(path.join(workspacePath, 'meta.json'), meta);
|
||||
expect(await getWorkspaceMeta(workspaceId)).toEqual(meta);
|
||||
});
|
||||
|
||||
test('can create meta if not exists', async () => {
|
||||
const { getWorkspaceMeta } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(appDataPath, 'workspaces', workspaceId);
|
||||
await fs.ensureDir(workspacePath);
|
||||
expect(await getWorkspaceMeta(workspaceId)).toEqual({
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
});
|
||||
expect(
|
||||
await fs.pathExists(path.join(workspacePath, 'meta.json'))
|
||||
).toBeTruthy();
|
||||
});
|
||||
|
||||
test('can migrate meta if db file is a link', async () => {
|
||||
const { getWorkspaceMeta } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(appDataPath, 'workspaces', workspaceId);
|
||||
await fs.ensureDir(workspacePath);
|
||||
const sourcePath = path.join(tmpDir, 'source.db');
|
||||
await fs.writeFile(sourcePath, 'test');
|
||||
|
||||
await fs.ensureSymlink(sourcePath, path.join(workspacePath, 'storage.db'));
|
||||
|
||||
expect(await getWorkspaceMeta(workspaceId)).toEqual({
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
secondaryDBPath: sourcePath,
|
||||
});
|
||||
|
||||
expect(
|
||||
await fs.pathExists(path.join(workspacePath, 'meta.json'))
|
||||
).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
test('storeWorkspaceMeta', async () => {
|
||||
const { storeWorkspaceMeta } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(appDataPath, 'workspaces', workspaceId);
|
||||
await fs.ensureDir(workspacePath);
|
||||
const meta = {
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
};
|
||||
await storeWorkspaceMeta(workspaceId, meta);
|
||||
expect(await fs.readJSON(path.join(workspacePath, 'meta.json'))).toEqual(
|
||||
meta
|
||||
);
|
||||
await storeWorkspaceMeta(workspaceId, {
|
||||
secondaryDBPath: path.join(tmpDir, 'test.db'),
|
||||
});
|
||||
expect(await fs.readJSON(path.join(workspacePath, 'meta.json'))).toEqual({
|
||||
...meta,
|
||||
secondaryDBPath: path.join(tmpDir, 'test.db'),
|
||||
});
|
||||
});
|
||||
143
apps/electron/src/helper/workspace/handlers.ts
Normal file
143
apps/electron/src/helper/workspace/handlers.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import { ensureSQLiteDB } from '../db/ensure-db';
|
||||
import { logger } from '../logger';
|
||||
import { mainRPC } from '../main-rpc';
|
||||
import type { WorkspaceMeta } from '../type';
|
||||
import { workspaceSubjects } from './subjects';
|
||||
|
||||
let _appDataPath = '';
|
||||
|
||||
async function getAppDataPath() {
|
||||
if (_appDataPath) {
|
||||
return _appDataPath;
|
||||
}
|
||||
_appDataPath = await mainRPC.getPath('sessionData');
|
||||
return _appDataPath;
|
||||
}
|
||||
|
||||
export async function listWorkspaces(): Promise<
|
||||
[workspaceId: string, meta: WorkspaceMeta][]
|
||||
> {
|
||||
const basePath = await getWorkspacesBasePath();
|
||||
try {
|
||||
await fs.ensureDir(basePath);
|
||||
const dirs = (
|
||||
await fs.readdir(basePath, {
|
||||
withFileTypes: true,
|
||||
})
|
||||
).filter(d => d.isDirectory());
|
||||
const metaList = (
|
||||
await Promise.all(
|
||||
dirs.map(async dir => {
|
||||
// ? shall we put all meta in a single file instead of one file per workspace?
|
||||
return await getWorkspaceMeta(dir.name);
|
||||
})
|
||||
)
|
||||
).filter((w): w is WorkspaceMeta => !!w);
|
||||
return metaList.map(meta => [meta.id, meta]);
|
||||
} catch (error) {
|
||||
logger.error('listWorkspaces', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteWorkspace(id: string) {
|
||||
const basePath = await getWorkspaceBasePath(id);
|
||||
const movedPath = path.join(await getDeletedWorkspacesBasePath(), `${id}`);
|
||||
try {
|
||||
const db = await ensureSQLiteDB(id);
|
||||
await db.destroy();
|
||||
return await fs.move(basePath, movedPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('deleteWorkspace', error);
|
||||
}
|
||||
}
|
||||
|
||||
export async function getWorkspacesBasePath() {
|
||||
return path.join(await getAppDataPath(), 'workspaces');
|
||||
}
|
||||
|
||||
export async function getWorkspaceBasePath(workspaceId: string) {
|
||||
return path.join(await getAppDataPath(), 'workspaces', workspaceId);
|
||||
}
|
||||
|
||||
async function getDeletedWorkspacesBasePath() {
|
||||
return path.join(await getAppDataPath(), 'deleted-workspaces');
|
||||
}
|
||||
|
||||
export async function getWorkspaceDBPath(workspaceId: string) {
|
||||
return path.join(await getWorkspaceBasePath(workspaceId), 'storage.db');
|
||||
}
|
||||
|
||||
export async function getWorkspaceMetaPath(workspaceId: string) {
|
||||
return path.join(await getWorkspaceBasePath(workspaceId), 'meta.json');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get workspace meta, create one if not exists
|
||||
* This function will also migrate the workspace if needed
|
||||
*/
|
||||
export async function getWorkspaceMeta(
|
||||
workspaceId: string
|
||||
): Promise<WorkspaceMeta> {
|
||||
try {
|
||||
const basePath = await getWorkspaceBasePath(workspaceId);
|
||||
const metaPath = await getWorkspaceMetaPath(workspaceId);
|
||||
if (!(await fs.exists(metaPath))) {
|
||||
// since not meta is found, we will migrate symlinked db file if needed
|
||||
await fs.ensureDir(basePath);
|
||||
const dbPath = await getWorkspaceDBPath(workspaceId);
|
||||
|
||||
// todo: remove this after migration (in stable version)
|
||||
const realDBPath = (await fs.exists(dbPath))
|
||||
? await fs.realpath(dbPath)
|
||||
: dbPath;
|
||||
const isLink = realDBPath !== dbPath;
|
||||
if (isLink) {
|
||||
await fs.copy(realDBPath, dbPath);
|
||||
}
|
||||
// create one if not exists
|
||||
const meta = {
|
||||
id: workspaceId,
|
||||
mainDBPath: dbPath,
|
||||
secondaryDBPath: isLink ? realDBPath : undefined,
|
||||
};
|
||||
await fs.writeJSON(metaPath, meta);
|
||||
return meta;
|
||||
} else {
|
||||
const meta = await fs.readJSON(metaPath);
|
||||
return meta;
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('getWorkspaceMeta failed', err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
export async function storeWorkspaceMeta(
|
||||
workspaceId: string,
|
||||
meta: Partial<WorkspaceMeta>
|
||||
) {
|
||||
try {
|
||||
const basePath = await getWorkspaceBasePath(workspaceId);
|
||||
await fs.ensureDir(basePath);
|
||||
const metaPath = path.join(basePath, 'meta.json');
|
||||
const currentMeta = await getWorkspaceMeta(workspaceId);
|
||||
const newMeta = {
|
||||
...currentMeta,
|
||||
...meta,
|
||||
};
|
||||
await fs.writeJSON(metaPath, newMeta);
|
||||
workspaceSubjects.meta.next({
|
||||
workspaceId,
|
||||
meta: newMeta,
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error('storeWorkspaceMeta failed', err);
|
||||
}
|
||||
}
|
||||
25
apps/electron/src/helper/workspace/index.ts
Normal file
25
apps/electron/src/helper/workspace/index.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import type { MainEventRegister, WorkspaceMeta } from '../type';
|
||||
import { deleteWorkspace, getWorkspaceMeta, listWorkspaces } from './handlers';
|
||||
import { workspaceSubjects } from './subjects';
|
||||
|
||||
export * from './handlers';
|
||||
export * from './subjects';
|
||||
|
||||
export const workspaceEvents = {
|
||||
onMetaChange: (
|
||||
fn: (meta: { workspaceId: string; meta: WorkspaceMeta }) => void
|
||||
) => {
|
||||
const sub = workspaceSubjects.meta.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventRegister>;
|
||||
|
||||
export const workspaceHandlers = {
|
||||
list: async () => listWorkspaces(),
|
||||
delete: async (id: string) => deleteWorkspace(id),
|
||||
getMeta: async (id: string) => {
|
||||
return getWorkspaceMeta(id);
|
||||
},
|
||||
};
|
||||
7
apps/electron/src/helper/workspace/subjects.ts
Normal file
7
apps/electron/src/helper/workspace/subjects.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
import type { WorkspaceMeta } from '../type';
|
||||
|
||||
export const workspaceSubjects = {
|
||||
meta: new Subject<{ workspaceId: string; meta: WorkspaceMeta }>(),
|
||||
};
|
||||
1
apps/electron/src/main/__tests__/.gitignore
vendored
Normal file
1
apps/electron/src/main/__tests__/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
tmp
|
||||
173
apps/electron/src/main/__tests__/integration.spec.ts
Normal file
173
apps/electron/src/main/__tests__/integration.spec.ts
Normal file
@@ -0,0 +1,173 @@
|
||||
import assert from 'node:assert';
|
||||
import path from 'node:path';
|
||||
import { setTimeout } from 'node:timers/promises';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
|
||||
import type { MainIPCHandlerMap } from '../exposed';
|
||||
|
||||
const registeredHandlers = new Map<
|
||||
string,
|
||||
((...args: any[]) => Promise<any>)[]
|
||||
>();
|
||||
|
||||
type WithoutFirstParameter<T> = T extends (_: any, ...args: infer P) => infer R
|
||||
? (...args: P) => R
|
||||
: T;
|
||||
|
||||
// common mock dispatcher for ipcMain.handle AND app.on
|
||||
// alternatively, we can use single parameter for T & F, eg, dispatch('workspace:list'),
|
||||
// however this is too hard to be typed correctly
|
||||
async function dispatch<
|
||||
T extends keyof MainIPCHandlerMap,
|
||||
F extends keyof MainIPCHandlerMap[T]
|
||||
>(
|
||||
namespace: T,
|
||||
functionName: F,
|
||||
...args: Parameters<WithoutFirstParameter<MainIPCHandlerMap[T][F]>>
|
||||
): // @ts-expect-error
|
||||
ReturnType<MainIPCHandlerMap[T][F]> {
|
||||
// @ts-expect-error
|
||||
const handlers = registeredHandlers.get(namespace + ':' + functionName);
|
||||
assert(handlers);
|
||||
|
||||
// we only care about the first handler here
|
||||
return await handlers[0](null, ...args);
|
||||
}
|
||||
|
||||
const SESSION_DATA_PATH = path.join(__dirname, './tmp', 'affine-test');
|
||||
const DOCUMENTS_PATH = path.join(__dirname, './tmp', 'affine-test-documents');
|
||||
|
||||
const browserWindow = {
|
||||
isDestroyed: () => {
|
||||
return false;
|
||||
},
|
||||
setWindowButtonVisibility: (_v: boolean) => {
|
||||
// will be stubbed later
|
||||
},
|
||||
webContents: {
|
||||
send: (_type: string, ..._args: any[]) => {
|
||||
// will be stubbed later
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const ipcMain = {
|
||||
handle: (key: string, callback: (...args: any[]) => Promise<any>) => {
|
||||
const handlers = registeredHandlers.get(key) || [];
|
||||
handlers.push(callback);
|
||||
registeredHandlers.set(key, handlers);
|
||||
},
|
||||
setMaxListeners: (_n: number) => {
|
||||
// noop
|
||||
},
|
||||
};
|
||||
|
||||
const nativeTheme = {
|
||||
themeSource: 'light',
|
||||
};
|
||||
|
||||
const electronModule = {
|
||||
app: {
|
||||
getPath: (name: string) => {
|
||||
if (name === 'sessionData') {
|
||||
return SESSION_DATA_PATH;
|
||||
} else if (name === 'documents') {
|
||||
return DOCUMENTS_PATH;
|
||||
}
|
||||
throw new Error('not implemented');
|
||||
},
|
||||
name: 'affine-test',
|
||||
on: (name: string, callback: (...args: any[]) => any) => {
|
||||
const handlers = registeredHandlers.get(name) || [];
|
||||
handlers.push(callback);
|
||||
registeredHandlers.set(name, handlers);
|
||||
},
|
||||
addListener: (...args: any[]) => {
|
||||
// @ts-expect-error
|
||||
electronModule.app.on(...args);
|
||||
},
|
||||
removeListener: () => {},
|
||||
},
|
||||
BrowserWindow: {
|
||||
getAllWindows: () => {
|
||||
return [browserWindow];
|
||||
},
|
||||
},
|
||||
nativeTheme: nativeTheme,
|
||||
ipcMain,
|
||||
shell: {} as Partial<Electron.Shell>,
|
||||
dialog: {} as Partial<Electron.Dialog>,
|
||||
};
|
||||
|
||||
// dynamically import handlers so that we can inject local variables to mocks
|
||||
vi.doMock('electron', () => {
|
||||
return electronModule;
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
const { registerHandlers } = await import('../handlers');
|
||||
registerHandlers();
|
||||
|
||||
// should also register events
|
||||
const { registerEvents } = await import('../events');
|
||||
registerEvents();
|
||||
await fs.mkdirp(SESSION_DATA_PATH);
|
||||
|
||||
registeredHandlers.get('ready')?.forEach(fn => fn());
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// reset registered handlers
|
||||
registeredHandlers.get('before-quit')?.forEach(fn => fn());
|
||||
// wait for the db to be closed on Windows
|
||||
if (process.platform === 'win32') {
|
||||
await setTimeout(200);
|
||||
}
|
||||
await fs.remove(SESSION_DATA_PATH);
|
||||
});
|
||||
|
||||
describe('UI handlers', () => {
|
||||
test('theme-change', async () => {
|
||||
await dispatch('ui', 'handleThemeChange', 'dark');
|
||||
expect(nativeTheme.themeSource).toBe('dark');
|
||||
await dispatch('ui', 'handleThemeChange', 'light');
|
||||
expect(nativeTheme.themeSource).toBe('light');
|
||||
});
|
||||
|
||||
test('sidebar-visibility-change (macOS)', async () => {
|
||||
vi.stubGlobal('process', { platform: 'darwin' });
|
||||
const setWindowButtonVisibility = vi.fn();
|
||||
browserWindow.setWindowButtonVisibility = setWindowButtonVisibility;
|
||||
await dispatch('ui', 'handleSidebarVisibilityChange', true);
|
||||
expect(setWindowButtonVisibility).toBeCalledWith(true);
|
||||
await dispatch('ui', 'handleSidebarVisibilityChange', false);
|
||||
expect(setWindowButtonVisibility).toBeCalledWith(false);
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
|
||||
test('sidebar-visibility-change (non-macOS)', async () => {
|
||||
vi.stubGlobal('process', { platform: 'linux' });
|
||||
const setWindowButtonVisibility = vi.fn();
|
||||
browserWindow.setWindowButtonVisibility = setWindowButtonVisibility;
|
||||
await dispatch('ui', 'handleSidebarVisibilityChange', true);
|
||||
expect(setWindowButtonVisibility).not.toBeCalled();
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
});
|
||||
|
||||
describe('applicationMenu', () => {
|
||||
// test some basic IPC events
|
||||
test('applicationMenu event', async () => {
|
||||
const { applicationMenuSubjects } = await import('../application-menu');
|
||||
const sendStub = vi.fn();
|
||||
browserWindow.webContents.send = sendStub;
|
||||
applicationMenuSubjects.newPageAction.next();
|
||||
expect(sendStub).toHaveBeenCalledWith(
|
||||
'applicationMenu:onNewPageAction',
|
||||
undefined
|
||||
);
|
||||
browserWindow.webContents.send = () => {};
|
||||
});
|
||||
});
|
||||
@@ -1,9 +1,9 @@
|
||||
import { app, Menu } from 'electron';
|
||||
|
||||
import { isMacOS } from '../../utils';
|
||||
import { subjects } from './events';
|
||||
import { checkForUpdatesAndNotify } from './handlers/updater';
|
||||
import { revealLogFile } from './logger';
|
||||
import { revealLogFile } from '../logger';
|
||||
import { checkForUpdatesAndNotify } from '../updater';
|
||||
import { isMacOS } from '../utils';
|
||||
import { applicationMenuSubjects } from './subject';
|
||||
|
||||
// Unique id for menuitems
|
||||
const MENUITEM_NEW_PAGE = 'affine:new-page';
|
||||
@@ -43,7 +43,7 @@ export function createApplicationMenu() {
|
||||
label: 'New Page',
|
||||
accelerator: isMac ? 'Cmd+N' : 'Ctrl+N',
|
||||
click: () => {
|
||||
subjects.applicationMenu.newPageAction.next();
|
||||
applicationMenuSubjects.newPageAction.next();
|
||||
},
|
||||
},
|
||||
{ type: 'separator' },
|
||||
@@ -117,9 +117,9 @@ export function createApplicationMenu() {
|
||||
},
|
||||
},
|
||||
{
|
||||
label: 'Open logs folder',
|
||||
label: 'Open log file',
|
||||
click: async () => {
|
||||
revealLogFile();
|
||||
await revealLogFile();
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -132,9 +132,9 @@ export function createApplicationMenu() {
|
||||
},
|
||||
];
|
||||
|
||||
// @ts-ignore The snippet is copied from Electron official docs.
|
||||
// It's working as expected. No idea why it contains type errors.
|
||||
// Just ignore for now.
|
||||
// @ts-expect-error: The snippet is copied from Electron official docs.
|
||||
// It's working as expected. No idea why it contains type errors.
|
||||
// Just ignore for now.
|
||||
const menu = Menu.buildFromTemplate(template);
|
||||
Menu.setApplicationMenu(menu);
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
import { Subject } from 'rxjs';
|
||||
import type { MainEventRegister } from '../type';
|
||||
import { applicationMenuSubjects } from './subject';
|
||||
|
||||
import type { MainEventListener } from './type';
|
||||
|
||||
export const applicationMenuSubjects = {
|
||||
newPageAction: new Subject<void>(),
|
||||
};
|
||||
export * from './create';
|
||||
export * from './subject';
|
||||
|
||||
/**
|
||||
* Events triggered by application menu
|
||||
@@ -19,4 +17,4 @@ export const applicationMenuEvents = {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
} satisfies Record<string, MainEventRegister>;
|
||||
5
apps/electron/src/main/application-menu/subject.ts
Normal file
5
apps/electron/src/main/application-menu/subject.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
export const applicationMenuSubjects = {
|
||||
newPageAction: new Subject<void>(),
|
||||
};
|
||||
@@ -1,14 +1,12 @@
|
||||
import { app, BrowserWindow } from 'electron';
|
||||
|
||||
import { logger } from '../logger';
|
||||
import { applicationMenuEvents } from './application-menu';
|
||||
import { dbEvents } from './db';
|
||||
import { updaterEvents } from './updater';
|
||||
import { logger } from './logger';
|
||||
import { updaterEvents } from './updater/event';
|
||||
|
||||
export const allEvents = {
|
||||
db: dbEvents,
|
||||
updater: updaterEvents,
|
||||
applicationMenu: applicationMenuEvents,
|
||||
updater: updaterEvents,
|
||||
};
|
||||
|
||||
function getActiveWindows() {
|
||||
@@ -19,9 +17,18 @@ export function registerEvents() {
|
||||
// register events
|
||||
for (const [namespace, namespaceEvents] of Object.entries(allEvents)) {
|
||||
for (const [key, eventRegister] of Object.entries(namespaceEvents)) {
|
||||
const subscription = eventRegister((...args: any) => {
|
||||
const subscription = eventRegister((...args: any[]) => {
|
||||
const chan = `${namespace}:${key}`;
|
||||
logger.info('[ipc-event]', chan, args);
|
||||
logger.info(
|
||||
'[ipc-event]',
|
||||
chan,
|
||||
args.filter(
|
||||
a =>
|
||||
a !== undefined &&
|
||||
typeof a !== 'function' &&
|
||||
typeof a !== 'object'
|
||||
)
|
||||
);
|
||||
getActiveWindows().forEach(win => win.webContents.send(chan, ...args));
|
||||
});
|
||||
app.on('before-quit', () => {
|
||||
10
apps/electron/src/main/export/index.ts
Normal file
10
apps/electron/src/main/export/index.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { savePDFFileAs } from './pdf';
|
||||
|
||||
export const exportHandlers = {
|
||||
savePDFFileAs: async (_, title: string) => {
|
||||
return savePDFFileAs(title);
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
|
||||
export * from './pdf';
|
||||
61
apps/electron/src/main/export/pdf.ts
Normal file
61
apps/electron/src/main/export/pdf.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { BrowserWindow, dialog, shell } from 'electron';
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import { logger } from '../logger';
|
||||
import type { ErrorMessage } from './utils';
|
||||
import { getFakedResult } from './utils';
|
||||
|
||||
export interface SavePDFFileResult {
|
||||
filePath?: string;
|
||||
canceled?: boolean;
|
||||
error?: ErrorMessage;
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is called when the user clicks the "Export to PDF" button in the electron.
|
||||
*
|
||||
* It will just copy the file to the given path
|
||||
*/
|
||||
export async function savePDFFileAs(
|
||||
pageTitle: string
|
||||
): Promise<SavePDFFileResult> {
|
||||
try {
|
||||
const ret =
|
||||
getFakedResult() ??
|
||||
(await dialog.showSaveDialog({
|
||||
properties: ['showOverwriteConfirmation'],
|
||||
title: 'Save PDF',
|
||||
showsTagField: false,
|
||||
buttonLabel: 'Save',
|
||||
defaultPath: `${pageTitle}.pdf`,
|
||||
message: 'Save Page as a PDF file',
|
||||
}));
|
||||
const filePath = ret.filePath;
|
||||
if (ret.canceled || !filePath) {
|
||||
return {
|
||||
canceled: true,
|
||||
};
|
||||
}
|
||||
|
||||
await BrowserWindow.getFocusedWindow()
|
||||
?.webContents.printToPDF({
|
||||
pageSize: 'A4',
|
||||
printBackground: true,
|
||||
landscape: false,
|
||||
})
|
||||
.then(data => {
|
||||
fs.writeFile(filePath, data, error => {
|
||||
if (error) throw error;
|
||||
logger.log(`Wrote PDF successfully to ${filePath}`);
|
||||
});
|
||||
});
|
||||
|
||||
await shell.openPath(filePath);
|
||||
return { filePath };
|
||||
} catch (err) {
|
||||
logger.error('savePDFFileAs', err);
|
||||
return {
|
||||
error: 'UNKNOWN_ERROR',
|
||||
};
|
||||
}
|
||||
}
|
||||
24
apps/electron/src/main/export/utils.ts
Normal file
24
apps/electron/src/main/export/utils.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
// provide a backdoor to set dialog path for testing in playwright
|
||||
interface FakeDialogResult {
|
||||
canceled?: boolean;
|
||||
filePath?: string;
|
||||
filePaths?: string[];
|
||||
}
|
||||
// result will be used in the next call to showOpenDialog
|
||||
// if it is being read once, it will be reset to undefined
|
||||
let fakeDialogResult: FakeDialogResult | undefined = undefined;
|
||||
export function getFakedResult() {
|
||||
const result = fakeDialogResult;
|
||||
fakeDialogResult = undefined;
|
||||
return result;
|
||||
}
|
||||
|
||||
export function setFakeDialogResult(result: FakeDialogResult | undefined) {
|
||||
fakeDialogResult = result;
|
||||
// for convenience, we will fill filePaths with filePath if it is not set
|
||||
if (result?.filePaths === undefined && result?.filePath !== undefined) {
|
||||
result.filePaths = [result.filePath];
|
||||
}
|
||||
}
|
||||
const ErrorMessages = ['FILE_ALREADY_EXISTS', 'UNKNOWN_ERROR'] as const;
|
||||
export type ErrorMessage = (typeof ErrorMessages)[number];
|
||||
29
apps/electron/src/main/exposed.ts
Normal file
29
apps/electron/src/main/exposed.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { allEvents as events } from './events';
|
||||
import { allHandlers as handlers } from './handlers';
|
||||
|
||||
// this will be used by preload script to expose all handlers and events to the renderer process
|
||||
// - register in exposeInMainWorld in preload
|
||||
// - provide type hints
|
||||
export { events, handlers };
|
||||
|
||||
export const getExposedMeta = () => {
|
||||
const handlersMeta = Object.entries(handlers).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [namespace, Object.keys(namespaceHandlers)];
|
||||
}
|
||||
);
|
||||
|
||||
const eventsMeta = Object.entries(events).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [namespace, Object.keys(namespaceHandlers)];
|
||||
}
|
||||
);
|
||||
|
||||
return {
|
||||
handlers: handlersMeta,
|
||||
events: eventsMeta,
|
||||
};
|
||||
};
|
||||
|
||||
export type MainIPCHandlerMap = typeof handlers;
|
||||
export type MainIPCEventMap = typeof events;
|
||||
@@ -1,20 +1,16 @@
|
||||
import type {
|
||||
DebugHandlerManager,
|
||||
ExportHandlerManager,
|
||||
UIHandlerManager,
|
||||
UnwrapManagerHandlerToServerSide,
|
||||
UpdaterHandlerManager,
|
||||
} from '@toeverything/infra';
|
||||
import { ipcMain } from 'electron';
|
||||
|
||||
import { getLogFilePath, logger, revealLogFile } from '../logger';
|
||||
import { dbHandlers } from './db';
|
||||
import { dialogHandlers } from './dialog';
|
||||
import { exportHandlers } from './export';
|
||||
import { getLogFilePath, logger, revealLogFile } from './logger';
|
||||
import { uiHandlers } from './ui';
|
||||
import { updaterHandlers } from './updater';
|
||||
import { workspaceHandlers } from './workspace';
|
||||
|
||||
type IsomorphicHandler = (
|
||||
e: Electron.IpcMainInvokeEvent,
|
||||
...args: any[]
|
||||
) => Promise<any>;
|
||||
|
||||
type NamespaceHandlers = {
|
||||
[key: string]: IsomorphicHandler;
|
||||
};
|
||||
|
||||
export const debugHandlers = {
|
||||
revealLogFile: async () => {
|
||||
@@ -25,15 +21,32 @@ export const debugHandlers = {
|
||||
},
|
||||
};
|
||||
|
||||
type AllHandlers = {
|
||||
debug: UnwrapManagerHandlerToServerSide<
|
||||
Electron.IpcMainInvokeEvent,
|
||||
DebugHandlerManager
|
||||
>;
|
||||
export: UnwrapManagerHandlerToServerSide<
|
||||
Electron.IpcMainInvokeEvent,
|
||||
ExportHandlerManager
|
||||
>;
|
||||
ui: UnwrapManagerHandlerToServerSide<
|
||||
Electron.IpcMainInvokeEvent,
|
||||
UIHandlerManager
|
||||
>;
|
||||
updater: UnwrapManagerHandlerToServerSide<
|
||||
Electron.IpcMainInvokeEvent,
|
||||
UpdaterHandlerManager
|
||||
>;
|
||||
};
|
||||
|
||||
// Note: all of these handlers will be the single-source-of-truth for the apis exposed to the renderer process
|
||||
export const allHandlers = {
|
||||
workspace: workspaceHandlers,
|
||||
ui: uiHandlers,
|
||||
db: dbHandlers,
|
||||
dialog: dialogHandlers,
|
||||
debug: debugHandlers,
|
||||
ui: uiHandlers,
|
||||
export: exportHandlers,
|
||||
updater: updaterHandlers,
|
||||
} satisfies Record<string, NamespaceHandlers>;
|
||||
} satisfies AllHandlers;
|
||||
|
||||
export const registerHandlers = () => {
|
||||
// TODO: listen to namespace instead of individual event types
|
||||
@@ -44,6 +57,7 @@ export const registerHandlers = () => {
|
||||
ipcMain.handle(chan, async (e, ...args) => {
|
||||
const start = performance.now();
|
||||
try {
|
||||
// @ts-expect-error - TODO: fix this
|
||||
const result = await handler(e, ...args);
|
||||
logger.info(
|
||||
'[ipc-api]',
|
||||
111
apps/electron/src/main/helper-process.ts
Normal file
111
apps/electron/src/main/helper-process.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import { type _AsyncVersionOf, AsyncCall } from 'async-call-rpc';
|
||||
import {
|
||||
app,
|
||||
dialog,
|
||||
MessageChannelMain,
|
||||
shell,
|
||||
type UtilityProcess,
|
||||
utilityProcess,
|
||||
type WebContents,
|
||||
} from 'electron';
|
||||
|
||||
import { logger } from './logger';
|
||||
import { MessageEventChannel } from './utils';
|
||||
|
||||
const HELPER_PROCESS_PATH = path.join(__dirname, './helper.js');
|
||||
|
||||
function pickAndBind<T extends object, U extends keyof T>(
|
||||
obj: T,
|
||||
keys: U[]
|
||||
): { [K in U]: T[K] } {
|
||||
return keys.reduce((acc, key) => {
|
||||
const prop = obj[key];
|
||||
acc[key] =
|
||||
typeof prop === 'function'
|
||||
? // @ts-expect-error - a hack to bind the function
|
||||
prop.bind(obj)
|
||||
: prop;
|
||||
return acc;
|
||||
}, {} as any);
|
||||
}
|
||||
|
||||
class HelperProcessManager {
|
||||
ready: Promise<void>;
|
||||
#process: UtilityProcess;
|
||||
|
||||
// a rpc server for the main process -> helper process
|
||||
rpc?: _AsyncVersionOf<PeersAPIs.HelperToMain>;
|
||||
|
||||
static instance = new HelperProcessManager();
|
||||
|
||||
private constructor() {
|
||||
const helperProcess = utilityProcess.fork(HELPER_PROCESS_PATH);
|
||||
this.#process = helperProcess;
|
||||
this.ready = new Promise((resolve, reject) => {
|
||||
helperProcess.once('spawn', () => {
|
||||
try {
|
||||
this.#connectMain();
|
||||
resolve();
|
||||
} catch (err) {
|
||||
logger.error('[helper] connectMain error', err);
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
app.on('before-quit', () => {
|
||||
this.#process.kill();
|
||||
});
|
||||
}
|
||||
|
||||
// bridge renderer <-> helper process
|
||||
connectRenderer(renderer: WebContents) {
|
||||
// connect to the helper process
|
||||
const { port1: helperPort, port2: rendererPort } = new MessageChannelMain();
|
||||
this.#process.postMessage({ channel: 'renderer-connect' }, [helperPort]);
|
||||
renderer.postMessage('helper-connection', null, [rendererPort]);
|
||||
|
||||
return () => {
|
||||
helperPort.close();
|
||||
rendererPort.close();
|
||||
};
|
||||
}
|
||||
|
||||
// bridge main <-> helper process
|
||||
// also set up the RPC to the helper process
|
||||
#connectMain() {
|
||||
const dialogMethods = pickAndBind(dialog, [
|
||||
'showOpenDialog',
|
||||
'showSaveDialog',
|
||||
]);
|
||||
const shellMethods = pickAndBind(shell, [
|
||||
'openExternal',
|
||||
'showItemInFolder',
|
||||
]);
|
||||
const appMethods = pickAndBind(app, ['getPath']);
|
||||
|
||||
const mainToHelperServer: PeersAPIs.MainToHelper = {
|
||||
...dialogMethods,
|
||||
...shellMethods,
|
||||
...appMethods,
|
||||
};
|
||||
|
||||
const server = AsyncCall<PeersAPIs.HelperToMain>(mainToHelperServer, {
|
||||
strict: {
|
||||
// the channel is shared for other purposes as well so that we do not want to
|
||||
// restrict to only JSONRPC messages
|
||||
unknownMessage: false,
|
||||
},
|
||||
channel: new MessageEventChannel(this.#process),
|
||||
});
|
||||
this.rpc = server;
|
||||
}
|
||||
}
|
||||
|
||||
export async function ensureHelperProcess() {
|
||||
const helperProcessManager = HelperProcessManager.instance;
|
||||
await helperProcessManager.ready;
|
||||
return helperProcessManager;
|
||||
}
|
||||
@@ -2,14 +2,17 @@ import './security-restrictions';
|
||||
|
||||
import { app } from 'electron';
|
||||
|
||||
import { createApplicationMenu } from './application-menu';
|
||||
import { createApplicationMenu } from './application-menu/create';
|
||||
import { registerEvents } from './events';
|
||||
import { registerHandlers } from './handlers';
|
||||
import { registerUpdater } from './handlers/updater';
|
||||
import { ensureHelperProcess } from './helper-process';
|
||||
import { logger } from './logger';
|
||||
import { restoreOrCreateWindow } from './main-window';
|
||||
import { registerPlugin } from './plugin';
|
||||
import { registerProtocol } from './protocol';
|
||||
import { registerUpdater } from './updater';
|
||||
|
||||
if (require('electron-squirrel-startup')) app.quit();
|
||||
// allow tests to overwrite app name through passing args
|
||||
if (process.argv.includes('--app-name')) {
|
||||
const appNameIndex = process.argv.indexOf('--app-name');
|
||||
@@ -28,7 +31,9 @@ if (!isSingleInstance) {
|
||||
}
|
||||
|
||||
app.on('second-instance', () => {
|
||||
restoreOrCreateWindow();
|
||||
restoreOrCreateWindow().catch(e =>
|
||||
console.error('Failed to restore or create window:', e)
|
||||
);
|
||||
});
|
||||
|
||||
app.on('open-url', async (_, _url) => {
|
||||
@@ -55,20 +60,12 @@ app.on('activate', restoreOrCreateWindow);
|
||||
app
|
||||
.whenReady()
|
||||
.then(registerProtocol)
|
||||
.then(registerPlugin)
|
||||
.then(registerHandlers)
|
||||
.then(registerEvents)
|
||||
.then(ensureHelperProcess)
|
||||
.then(restoreOrCreateWindow)
|
||||
.then(createApplicationMenu)
|
||||
.then()
|
||||
.then(registerUpdater)
|
||||
.catch(e => console.error('Failed create window:', e));
|
||||
/**
|
||||
* Check new app version in production mode only
|
||||
*/
|
||||
// FIXME: add me back later
|
||||
// if (import.meta.env.PROD) {
|
||||
// app
|
||||
// .whenReady()
|
||||
// .then(() => import('electron-updater'))
|
||||
// .then(({ autoUpdater }) => autoUpdater.checkForUpdatesAndNotify())
|
||||
// .catch(e => console.error('Failed check updates:', e));
|
||||
// }
|
||||
@@ -1,13 +1,14 @@
|
||||
import { shell } from 'electron';
|
||||
import log from 'electron-log';
|
||||
|
||||
export const logger = log;
|
||||
export const logger = log.scope('main');
|
||||
log.initialize();
|
||||
|
||||
export function getLogFilePath() {
|
||||
return log.transports.file.getFile().path;
|
||||
}
|
||||
|
||||
export function revealLogFile() {
|
||||
export async function revealLogFile() {
|
||||
const filePath = getLogFilePath();
|
||||
shell.showItemInFolder(filePath);
|
||||
return await shell.openPath(filePath);
|
||||
}
|
||||
@@ -1,9 +1,13 @@
|
||||
import assert from 'node:assert';
|
||||
|
||||
import { BrowserWindow, nativeTheme } from 'electron';
|
||||
import electronWindowState from 'electron-window-state';
|
||||
import { join } from 'path';
|
||||
|
||||
import { isMacOS, isWindows } from '../../utils';
|
||||
import { getExposedMeta } from './exposed';
|
||||
import { ensureHelperProcess } from './helper-process';
|
||||
import { logger } from './logger';
|
||||
import { isMacOS, isWindows } from './utils';
|
||||
|
||||
const IS_DEV: boolean =
|
||||
process.env.NODE_ENV === 'development' && !process.env.CI;
|
||||
@@ -17,6 +21,13 @@ async function createWindow() {
|
||||
defaultHeight: 800,
|
||||
});
|
||||
|
||||
const helperProcessManager = await ensureHelperProcess();
|
||||
const helperExposedMeta = await helperProcessManager.rpc?.getMeta();
|
||||
|
||||
assert(helperExposedMeta, 'helperExposedMeta should be defined');
|
||||
|
||||
const mainExposedMeta = getExposedMeta();
|
||||
|
||||
const browserWindow = new BrowserWindow({
|
||||
titleBarStyle: isMacOS()
|
||||
? 'hiddenInset'
|
||||
@@ -39,7 +50,12 @@ async function createWindow() {
|
||||
sandbox: false,
|
||||
webviewTag: false, // The webview tag is not recommended. Consider alternatives like iframe or Electron's BrowserView. https://www.electronjs.org/docs/latest/api/webview-tag#warning
|
||||
spellcheck: false, // FIXME: enable?
|
||||
preload: join(__dirname, '../preload/index.js'),
|
||||
preload: join(__dirname, './preload.js'),
|
||||
// serialize exposed meta that to be used in preload
|
||||
additionalArguments: [
|
||||
`--main-exposed-meta=` + JSON.stringify(mainExposedMeta),
|
||||
`--helper-exposed-meta=` + JSON.stringify(helperExposedMeta),
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
@@ -47,6 +63,8 @@ async function createWindow() {
|
||||
|
||||
mainWindowState.manage(browserWindow);
|
||||
|
||||
let helperConnectionUnsub: (() => void) | undefined;
|
||||
|
||||
/**
|
||||
* If you install `show: true` then it can cause issues when trying to close the window.
|
||||
* Use `show: false` and listener events `ready-to-show` to fix these issues.
|
||||
@@ -60,6 +78,9 @@ async function createWindow() {
|
||||
} else {
|
||||
browserWindow.show();
|
||||
}
|
||||
helperConnectionUnsub = helperProcessManager.connectRenderer(
|
||||
browserWindow.webContents
|
||||
);
|
||||
|
||||
logger.info('main window is ready to show');
|
||||
|
||||
@@ -73,6 +94,7 @@ async function createWindow() {
|
||||
browserWindow.on('close', e => {
|
||||
e.preventDefault();
|
||||
browserWindow.destroy();
|
||||
helperConnectionUnsub?.();
|
||||
// TODO: gracefully close the app, for example, ask user to save unsaved changes
|
||||
});
|
||||
|
||||
70
apps/electron/src/main/plugin.ts
Normal file
70
apps/electron/src/main/plugin.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { join, resolve } from 'node:path';
|
||||
import { Worker } from 'node:worker_threads';
|
||||
|
||||
import { logger } from '@affine/electron/main/logger';
|
||||
import { AsyncCall } from 'async-call-rpc';
|
||||
import { ipcMain } from 'electron';
|
||||
|
||||
import { MessageEventChannel } from './utils';
|
||||
|
||||
declare global {
|
||||
// fixme(himself65):
|
||||
// remove this when bookmark block plugin is migrated to plugin-infra
|
||||
// eslint-disable-next-line no-var
|
||||
var asyncCall: Record<string, (...args: any) => PromiseLike<any>>;
|
||||
}
|
||||
|
||||
export function registerPlugin() {
|
||||
const pluginWorkerPath = join(__dirname, './workers/plugin.worker.js');
|
||||
const asyncCall = AsyncCall<
|
||||
Record<string, (...args: any) => PromiseLike<any>>
|
||||
>(
|
||||
{
|
||||
log: (...args: any[]) => {
|
||||
logger.log('Plugin Worker', ...args);
|
||||
},
|
||||
},
|
||||
{
|
||||
channel: new MessageEventChannel(new Worker(pluginWorkerPath)),
|
||||
}
|
||||
);
|
||||
globalThis.asyncCall = asyncCall;
|
||||
logger.info('import plugin manager');
|
||||
import('@toeverything/plugin-infra/manager')
|
||||
.then(({ rootStore, affinePluginsAtom }) => {
|
||||
logger.info('import plugin manager');
|
||||
const bookmarkPluginPath = join(
|
||||
process.env.PLUGIN_DIR ?? resolve(__dirname, './plugins'),
|
||||
'./bookmark-block/index.mjs'
|
||||
);
|
||||
logger.info('bookmark plugin path:', bookmarkPluginPath);
|
||||
import('file://' + bookmarkPluginPath);
|
||||
let dispose: () => void = () => {
|
||||
// noop
|
||||
};
|
||||
rootStore.sub(affinePluginsAtom, () => {
|
||||
dispose();
|
||||
const plugins = rootStore.get(affinePluginsAtom);
|
||||
Object.values(plugins).forEach(plugin => {
|
||||
logger.info('register plugin', plugin.definition.id);
|
||||
plugin.definition.commands.forEach(command => {
|
||||
logger.info('register plugin command', command);
|
||||
ipcMain.handle(command, (event, ...args) =>
|
||||
asyncCall[command](...args)
|
||||
);
|
||||
});
|
||||
});
|
||||
dispose = () => {
|
||||
Object.values(plugins).forEach(plugin => {
|
||||
plugin.definition.commands.forEach(command => {
|
||||
logger.info('unregister plugin command', command);
|
||||
ipcMain.removeHandler(command);
|
||||
});
|
||||
});
|
||||
};
|
||||
});
|
||||
})
|
||||
.catch(error => {
|
||||
logger.error('import plugin manager error', error);
|
||||
});
|
||||
}
|
||||
@@ -16,7 +16,7 @@ protocol.registerSchemesAsPrivileged([
|
||||
|
||||
function toAbsolutePath(url: string) {
|
||||
let realpath = decodeURIComponent(url);
|
||||
const webStaticDir = join(__dirname, '../../../resources/web-static');
|
||||
const webStaticDir = join(__dirname, '../resources/web-static');
|
||||
if (url.startsWith('./')) {
|
||||
// if is a file type, load the file in resources
|
||||
if (url.split('/').at(-1)?.includes('.')) {
|
||||
@@ -34,6 +34,7 @@ export function registerProtocol() {
|
||||
const url = request.url.replace(/^file:\/\//, '');
|
||||
const realpath = toAbsolutePath(url);
|
||||
callback(realpath);
|
||||
console.log('interceptFileProtocol realpath', request.url, realpath);
|
||||
return true;
|
||||
});
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
export type MainEventRegister = (...args: any[]) => () => void;
|
||||
|
||||
export type IsomorphicHandler = (
|
||||
e: Electron.IpcMainInvokeEvent,
|
||||
...args: any[]
|
||||
@@ -1,7 +1,7 @@
|
||||
import { app, BrowserWindow, shell } from 'electron';
|
||||
import { parse } from 'url';
|
||||
|
||||
import { logger } from '../../logger';
|
||||
import { logger } from '../logger';
|
||||
|
||||
const redirectUri = 'https://affine.pro/client/auth-callback';
|
||||
|
||||
@@ -28,10 +28,12 @@ export const getExchangeTokenParams = (code: string) => {
|
||||
};
|
||||
|
||||
export function getGoogleOauthCode() {
|
||||
shell.openExternal(oauthEndpoint);
|
||||
|
||||
return new Promise<ReturnType<typeof getExchangeTokenParams>>(
|
||||
(resolve, reject) => {
|
||||
shell.openExternal(oauthEndpoint).catch(e => {
|
||||
logger.error('Failed to open external url', e);
|
||||
reject(e);
|
||||
});
|
||||
const handleOpenUrl = async (_: any, url: string) => {
|
||||
const mainWindow = BrowserWindow.getAllWindows().find(
|
||||
w => !w.isDestroyed()
|
||||
@@ -1,7 +1,7 @@
|
||||
import { app, BrowserWindow, nativeTheme } from 'electron';
|
||||
|
||||
import { isMacOS } from '../../../../utils';
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { isMacOS } from '../utils';
|
||||
import { getGoogleOauthCode } from './google-auth';
|
||||
|
||||
export const uiHandlers = {
|
||||
@@ -39,4 +39,12 @@ export const uiHandlers = {
|
||||
getGoogleOauthCode: async () => {
|
||||
return getGoogleOauthCode();
|
||||
},
|
||||
/**
|
||||
* @deprecated Remove this when bookmark block plugin is migrated to plugin-infra
|
||||
*/
|
||||
getBookmarkDataByLink: async (_, link: string) => {
|
||||
return globalThis.asyncCall[
|
||||
'com.blocksuite.bookmark-block.get-bookmark-data-by-link'
|
||||
](link);
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
@@ -2,9 +2,9 @@ import { app } from 'electron';
|
||||
import type { AppUpdater } from 'electron-updater';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { isMacOS } from '../../../../utils';
|
||||
import { updaterSubjects } from '../../events/updater';
|
||||
import { logger } from '../../logger';
|
||||
import { logger } from '../logger';
|
||||
import { isMacOS } from '../utils';
|
||||
import { updaterSubjects } from './event';
|
||||
|
||||
export const ReleaseTypeSchema = z.enum([
|
||||
'stable',
|
||||
@@ -29,24 +29,25 @@ export const quitAndInstall = async () => {
|
||||
let lastCheckTime = 0;
|
||||
export const checkForUpdatesAndNotify = async (force = true) => {
|
||||
if (!_autoUpdater) {
|
||||
return; // ?
|
||||
return void 0;
|
||||
}
|
||||
// check every 30 minutes (1800 seconds) at most
|
||||
if (force || lastCheckTime + 1000 * 1800 < Date.now()) {
|
||||
lastCheckTime = Date.now();
|
||||
return _autoUpdater.checkForUpdatesAndNotify();
|
||||
return await _autoUpdater.checkForUpdatesAndNotify();
|
||||
}
|
||||
return void 0;
|
||||
};
|
||||
|
||||
export const registerUpdater = async () => {
|
||||
// require it will cause some side effects and will break generate-main-exposed-meta,
|
||||
// so we wrap it in a function
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const { autoUpdater } = require('electron-updater');
|
||||
|
||||
_autoUpdater = autoUpdater;
|
||||
|
||||
if (!_autoUpdater) {
|
||||
// skip auto update in dev mode
|
||||
if (!_autoUpdater || isDev) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -68,7 +69,9 @@ export const registerUpdater = async () => {
|
||||
// register events for checkForUpdatesAndNotify
|
||||
_autoUpdater.on('update-available', info => {
|
||||
if (allowAutoUpdate) {
|
||||
_autoUpdater!.downloadUpdate();
|
||||
_autoUpdater?.downloadUpdate().catch(e => {
|
||||
logger.error('Failed to download update', e);
|
||||
});
|
||||
logger.info('Update available, downloading...', info);
|
||||
}
|
||||
updaterSubjects.updateAvailable.next({
|
||||
@@ -1,8 +1,8 @@
|
||||
import { BehaviorSubject, Subject } from 'rxjs';
|
||||
|
||||
import type { MainEventListener } from './type';
|
||||
import type { MainEventRegister } from '../type';
|
||||
|
||||
interface UpdateMeta {
|
||||
export interface UpdateMeta {
|
||||
version: string;
|
||||
allowAutoUpdate: boolean;
|
||||
}
|
||||
@@ -33,4 +33,4 @@ export const updaterEvents = {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
} satisfies Record<string, MainEventRegister>;
|
||||
@@ -1,7 +1,7 @@
|
||||
import { app } from 'electron';
|
||||
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { checkForUpdatesAndNotify, quitAndInstall } from './updater';
|
||||
import { checkForUpdatesAndNotify, quitAndInstall } from './electron-updater';
|
||||
|
||||
export const updaterHandlers = {
|
||||
currentVersion: async () => {
|
||||
@@ -15,4 +15,4 @@ export const updaterHandlers = {
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
|
||||
export * from './updater';
|
||||
export * from './electron-updater';
|
||||
40
apps/electron/src/main/utils.ts
Normal file
40
apps/electron/src/main/utils.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import type { EventBasedChannel } from 'async-call-rpc';
|
||||
|
||||
export function getTime() {
|
||||
return new Date().getTime();
|
||||
}
|
||||
|
||||
export const isMacOS = () => {
|
||||
return process.platform === 'darwin';
|
||||
};
|
||||
|
||||
export const isWindows = () => {
|
||||
return process.platform === 'win32';
|
||||
};
|
||||
|
||||
interface MessagePortLike {
|
||||
postMessage: (data: unknown) => void;
|
||||
addListener: (event: 'message', listener: (...args: any[]) => void) => void;
|
||||
removeListener: (
|
||||
event: 'message',
|
||||
listener: (...args: any[]) => void
|
||||
) => void;
|
||||
}
|
||||
|
||||
export class MessageEventChannel implements EventBasedChannel {
|
||||
constructor(private worker: MessagePortLike) {}
|
||||
|
||||
on(listener: (data: unknown) => void) {
|
||||
const f = (data: unknown) => {
|
||||
listener(data);
|
||||
};
|
||||
this.worker.addListener('message', f);
|
||||
return () => {
|
||||
this.worker.removeListener('message', f);
|
||||
};
|
||||
}
|
||||
|
||||
send(data: unknown) {
|
||||
this.worker.postMessage(data);
|
||||
}
|
||||
}
|
||||
69
apps/electron/src/main/workers/plugin.worker.ts
Normal file
69
apps/electron/src/main/workers/plugin.worker.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { join, resolve } from 'node:path';
|
||||
import { parentPort } from 'node:worker_threads';
|
||||
|
||||
import { AsyncCall } from 'async-call-rpc';
|
||||
|
||||
import { MessageEventChannel } from '../utils';
|
||||
|
||||
const commandProxy: Record<string, (...args: any[]) => Promise<any>> = {};
|
||||
|
||||
if (!parentPort) {
|
||||
throw new Error('parentPort is undefined');
|
||||
}
|
||||
|
||||
const mainThread = AsyncCall<{
|
||||
log: (...args: any[]) => Promise<void>;
|
||||
}>(commandProxy, {
|
||||
channel: new MessageEventChannel(parentPort),
|
||||
});
|
||||
|
||||
globalThis.console.log = mainThread.log;
|
||||
globalThis.console.error = mainThread.log;
|
||||
globalThis.console.info = mainThread.log;
|
||||
globalThis.console.debug = mainThread.log;
|
||||
globalThis.console.warn = mainThread.log;
|
||||
|
||||
console.log('import plugin infra');
|
||||
|
||||
import('@toeverything/plugin-infra/manager')
|
||||
.then(({ rootStore, affinePluginsAtom }) => {
|
||||
const bookmarkPluginPath = join(
|
||||
process.env.PLUGIN_DIR ?? resolve(__dirname, '../plugins'),
|
||||
'./bookmark-block/index.mjs'
|
||||
);
|
||||
|
||||
console.log('import bookmark plugin', bookmarkPluginPath);
|
||||
|
||||
import('file://' + bookmarkPluginPath).catch(console.log);
|
||||
rootStore.sub(affinePluginsAtom, () => {
|
||||
const plugins = rootStore.get(affinePluginsAtom);
|
||||
Object.values(plugins).forEach(plugin => {
|
||||
console.log('handle plugin', plugin.definition.id);
|
||||
if (plugin.serverAdapter) {
|
||||
try {
|
||||
plugin.serverAdapter({
|
||||
registerCommand: (command, fn) => {
|
||||
console.log('register command', command);
|
||||
commandProxy[command] = fn;
|
||||
},
|
||||
unregisterCommand: command => {
|
||||
console.log('unregister command', command);
|
||||
delete commandProxy[command];
|
||||
},
|
||||
});
|
||||
} catch (e) {
|
||||
console.log(
|
||||
'error when handle plugin',
|
||||
plugin.definition.id,
|
||||
`${e}`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
console.log('no server adapter, skipping.');
|
||||
}
|
||||
});
|
||||
});
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err);
|
||||
});
|
||||
193
apps/electron/src/preload/affine-apis.ts
Normal file
193
apps/electron/src/preload/affine-apis.ts
Normal file
@@ -0,0 +1,193 @@
|
||||
// NOTE: we will generate preload types from this file
|
||||
import { AsyncCall, type EventBasedChannel } from 'async-call-rpc';
|
||||
import { ipcRenderer } from 'electron';
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
type ExposedMeta = {
|
||||
handlers: [namespace: string, handlerNames: string[]][];
|
||||
events: [namespace: string, eventNames: string[]][];
|
||||
};
|
||||
|
||||
export function getAffineAPIs() {
|
||||
const mainAPIs = getMainAPIs();
|
||||
const helperAPIs = getHelperAPIs();
|
||||
|
||||
return {
|
||||
apis: {
|
||||
...mainAPIs.apis,
|
||||
...helperAPIs.apis,
|
||||
},
|
||||
events: {
|
||||
...mainAPIs.events,
|
||||
...helperAPIs.events,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export const appInfo = {
|
||||
electron: true,
|
||||
};
|
||||
|
||||
function getMainAPIs() {
|
||||
const meta: ExposedMeta = (() => {
|
||||
const val = process.argv
|
||||
.find(arg => arg.startsWith('--main-exposed-meta='))
|
||||
?.split('=')[1];
|
||||
|
||||
return val ? JSON.parse(val) : null;
|
||||
})();
|
||||
|
||||
// main handlers that can be invoked from the renderer process
|
||||
const apis: any = (() => {
|
||||
const { handlers: handlersMeta } = meta;
|
||||
|
||||
const all = handlersMeta.map(([namespace, functionNames]) => {
|
||||
const namespaceApis = functionNames.map(name => {
|
||||
const channel = `${namespace}:${name}`;
|
||||
return [
|
||||
name,
|
||||
(...args: any[]) => {
|
||||
return ipcRenderer.invoke(channel, ...args);
|
||||
},
|
||||
];
|
||||
});
|
||||
return [namespace, Object.fromEntries(namespaceApis)];
|
||||
});
|
||||
|
||||
return Object.fromEntries(all);
|
||||
})();
|
||||
|
||||
// main events that can be listened to from the renderer process
|
||||
const events: any = (() => {
|
||||
const { events: eventsMeta } = meta;
|
||||
|
||||
// NOTE: ui may try to listen to a lot of the same events, so we increase the limit...
|
||||
ipcRenderer.setMaxListeners(100);
|
||||
|
||||
const all = eventsMeta.map(([namespace, eventNames]) => {
|
||||
const namespaceEvents = eventNames.map(name => {
|
||||
const channel = `${namespace}:${name}`;
|
||||
return [
|
||||
name,
|
||||
(callback: (...args: any[]) => void) => {
|
||||
const fn: (
|
||||
event: Electron.IpcRendererEvent,
|
||||
...args: any[]
|
||||
) => void = (_, ...args) => {
|
||||
callback(...args);
|
||||
};
|
||||
ipcRenderer.on(channel, fn);
|
||||
return () => {
|
||||
ipcRenderer.off(channel, fn);
|
||||
};
|
||||
},
|
||||
];
|
||||
});
|
||||
return [namespace, Object.fromEntries(namespaceEvents)];
|
||||
});
|
||||
return Object.fromEntries(all);
|
||||
})();
|
||||
|
||||
return { apis, events };
|
||||
}
|
||||
|
||||
const helperPort$ = new Promise<MessagePort>(resolve =>
|
||||
ipcRenderer.on('helper-connection', async e => {
|
||||
console.info('[preload] helper-connection', e);
|
||||
resolve(e.ports[0]);
|
||||
})
|
||||
);
|
||||
|
||||
const createMessagePortChannel = (port: MessagePort): EventBasedChannel => {
|
||||
return {
|
||||
on(listener) {
|
||||
port.onmessage = e => {
|
||||
listener(e.data);
|
||||
};
|
||||
port.start();
|
||||
return () => {
|
||||
port.onmessage = null;
|
||||
port.close();
|
||||
};
|
||||
},
|
||||
send(data) {
|
||||
port.postMessage(data);
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
function getHelperAPIs() {
|
||||
const events$ = new Subject<{ channel: string; args: any[] }>();
|
||||
const meta: ExposedMeta = (() => {
|
||||
const val = process.argv
|
||||
.find(arg => arg.startsWith('--helper-exposed-meta='))
|
||||
?.split('=')[1];
|
||||
|
||||
return val ? JSON.parse(val) : null;
|
||||
})();
|
||||
|
||||
const rendererToHelperServer: PeersAPIs.RendererToHelper = {
|
||||
postEvent: (channel, ...args) => {
|
||||
events$.next({ channel, args });
|
||||
},
|
||||
};
|
||||
|
||||
const rpc = AsyncCall<PeersAPIs.HelperToRenderer>(rendererToHelperServer, {
|
||||
channel: helperPort$.then(helperPort =>
|
||||
createMessagePortChannel(helperPort)
|
||||
),
|
||||
log: false,
|
||||
});
|
||||
|
||||
const toHelperHandler = (namespace: string, name: string) => {
|
||||
return rpc[`${namespace}:${name}`];
|
||||
};
|
||||
|
||||
const toHelperEventSubscriber = (namespace: string, name: string) => {
|
||||
return (callback: (...args: any[]) => void) => {
|
||||
const subscription = events$.subscribe(({ channel, args }) => {
|
||||
if (channel === `${namespace}:${name}`) {
|
||||
callback(...args);
|
||||
}
|
||||
});
|
||||
return () => {
|
||||
subscription.unsubscribe();
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
const setup = (meta: ExposedMeta) => {
|
||||
const { handlers: handlersMeta, events: eventsMeta } = meta;
|
||||
|
||||
const helperHandlers = Object.fromEntries(
|
||||
handlersMeta.map(([namespace, functionNames]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.fromEntries(
|
||||
functionNames.map(name => {
|
||||
return [name, toHelperHandler(namespace, name)];
|
||||
})
|
||||
),
|
||||
];
|
||||
})
|
||||
);
|
||||
|
||||
const helperEvents = Object.fromEntries(
|
||||
eventsMeta.map(([namespace, eventNames]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.fromEntries(
|
||||
eventNames.map(name => {
|
||||
return [name, toHelperEventSubscriber(namespace, name)];
|
||||
})
|
||||
),
|
||||
];
|
||||
})
|
||||
);
|
||||
return [helperHandlers, helperEvents];
|
||||
};
|
||||
|
||||
const [apis, events] = setup(meta);
|
||||
|
||||
return { apis, events };
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user