mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-07 01:53:45 +00:00
Compare commits
78 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4d25a3f3fe | ||
|
|
63b66497d6 | ||
|
|
2dcc8e2b87 | ||
|
|
5769425ec1 | ||
|
|
8c3d35ad56 | ||
|
|
928ae30474 | ||
|
|
804e233a7f | ||
|
|
1fcdc0f856 | ||
|
|
b5f7a3177d | ||
|
|
3d17c50777 | ||
|
|
c2f8005574 | ||
|
|
6ab79dfa69 | ||
|
|
c8a1391dd8 | ||
|
|
ef7fd194c4 | ||
|
|
72ef788927 | ||
|
|
3c5c6ef4e6 | ||
|
|
6c63fcdbc7 | ||
|
|
d8d46cb3a9 | ||
|
|
1b6e95479f | ||
|
|
9aa211dc77 | ||
|
|
036559e165 | ||
|
|
eb1c4f7a07 | ||
|
|
a21067db17 | ||
|
|
af205cde7c | ||
|
|
a0ee00a4b2 | ||
|
|
8cd5f81076 | ||
|
|
d83ef83d05 | ||
|
|
a2acb6cf9f | ||
|
|
1e52c5fcfc | ||
|
|
d436325a5c | ||
|
|
9402c80133 | ||
|
|
0ade3e65ed | ||
|
|
f971e56f15 | ||
|
|
9731dd3261 | ||
|
|
5c87af6113 | ||
|
|
3ac51e8bf1 | ||
|
|
c3bfc16d27 | ||
|
|
2ca5ad6509 | ||
|
|
f9045d357a | ||
|
|
3dd89fc244 | ||
|
|
8cb095a28a | ||
|
|
e6c3c6b5f7 | ||
|
|
5699c99bf6 | ||
|
|
47babe25b7 | ||
|
|
fff6ff9778 | ||
|
|
3343110aef | ||
|
|
c4e9544b3f | ||
|
|
cc1315ef12 | ||
|
|
d1505a6c94 | ||
|
|
b3aac46e38 | ||
|
|
a0e28152bc | ||
|
|
05e45936b9 | ||
|
|
d273ee955b | ||
|
|
28e05dc92c | ||
|
|
cd5aec42a0 | ||
|
|
2352aa8c50 | ||
|
|
ee6860ed39 | ||
|
|
706f57f075 | ||
|
|
311dcd722a | ||
|
|
4ef9093b5b | ||
|
|
00489dc571 | ||
|
|
b7afdfc416 | ||
|
|
3490fa186c | ||
|
|
9b721f7628 | ||
|
|
d3bafe135d | ||
|
|
ea21ed6e0d | ||
|
|
db4a0fd57c | ||
|
|
7824d4c82d | ||
|
|
5283010850 | ||
|
|
fdd93d5ed4 | ||
|
|
d3df703189 | ||
|
|
fb5dcb0065 | ||
|
|
69fb7a590d | ||
|
|
79a2786816 | ||
|
|
5bb113a9a9 | ||
|
|
7e989ae8cb | ||
|
|
3676d6c3f0 | ||
|
|
7cfb8b0171 |
@@ -6,3 +6,5 @@ storybook-static
|
||||
affine-out
|
||||
_next
|
||||
lib
|
||||
.eslintrc.js
|
||||
packages/i18n/src/i18n-generated.ts
|
||||
|
||||
103
.eslintrc.js
103
.eslintrc.js
@@ -1,3 +1,5 @@
|
||||
const { resolve } = require('node:path');
|
||||
|
||||
const createPattern = packageName => [
|
||||
{
|
||||
group: ['**/dist', '**/dist/**'],
|
||||
@@ -22,20 +24,25 @@ const createPattern = packageName => [
|
||||
];
|
||||
|
||||
const allPackages = [
|
||||
'cli',
|
||||
'component',
|
||||
'debug',
|
||||
'env',
|
||||
'graphql',
|
||||
'hooks',
|
||||
'i18n',
|
||||
'jotai',
|
||||
'native',
|
||||
'plugin-infra',
|
||||
'templates',
|
||||
'theme',
|
||||
'workspace',
|
||||
'y-indexeddb',
|
||||
'packages/cli',
|
||||
'packages/component',
|
||||
'packages/debug',
|
||||
'packages/env',
|
||||
'packages/graphql',
|
||||
'packages/hooks',
|
||||
'packages/i18n',
|
||||
'packages/jotai',
|
||||
'packages/native',
|
||||
'packages/plugin-infra',
|
||||
'packages/templates',
|
||||
'packages/theme',
|
||||
'packages/workspace',
|
||||
'packages/y-indexeddb',
|
||||
'apps/web',
|
||||
'apps/server',
|
||||
'apps/electron',
|
||||
'plugins/copilot',
|
||||
'plugins/bookmark-block',
|
||||
];
|
||||
|
||||
/**
|
||||
@@ -67,23 +74,28 @@ const config = {
|
||||
},
|
||||
ecmaVersion: 'latest',
|
||||
sourceType: 'module',
|
||||
project: resolve(__dirname, './tsconfig.eslint.json'),
|
||||
},
|
||||
plugins: [
|
||||
'react',
|
||||
'@typescript-eslint',
|
||||
'simple-import-sort',
|
||||
'sonarjs',
|
||||
'import',
|
||||
'unused-imports',
|
||||
'unicorn',
|
||||
],
|
||||
rules: {
|
||||
'array-callback-return': 'error',
|
||||
'no-undef': 'off',
|
||||
'no-empty': 'off',
|
||||
'no-func-assign': 'off',
|
||||
'no-cond-assign': 'off',
|
||||
'no-constant-binary-expression': 'error',
|
||||
'no-constructor-return': 'error',
|
||||
'react/prop-types': 'off',
|
||||
'@typescript-eslint/consistent-type-imports': 'error',
|
||||
'@typescript-eslint/no-non-null-assertion': 'off',
|
||||
'@typescript-eslint/no-non-null-assertion': 'error',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-empty-function': 'off',
|
||||
'@typescript-eslint/no-unused-vars': [
|
||||
@@ -97,7 +109,15 @@ const config = {
|
||||
'unused-imports/no-unused-imports': 'error',
|
||||
'simple-import-sort/imports': 'error',
|
||||
'simple-import-sort/exports': 'error',
|
||||
'@typescript-eslint/ban-ts-comment': 0,
|
||||
'@typescript-eslint/ban-ts-comment': [
|
||||
'error',
|
||||
{
|
||||
'ts-expect-error': 'allow-with-description',
|
||||
'ts-ignore': true,
|
||||
'ts-nocheck': true,
|
||||
'ts-check': false,
|
||||
},
|
||||
],
|
||||
'@typescript-eslint/no-restricted-imports': [
|
||||
'error',
|
||||
{
|
||||
@@ -122,6 +142,21 @@ const config = {
|
||||
ignore: ['^\\[[a-zA-Z0-9-_]+\\]\\.tsx$'],
|
||||
},
|
||||
],
|
||||
'sonarjs/no-all-duplicated-branches': 'error',
|
||||
'sonarjs/no-element-overwrite': 'error',
|
||||
'sonarjs/no-empty-collection': 'error',
|
||||
'sonarjs/no-extra-arguments': 'error',
|
||||
'sonarjs/no-identical-conditions': 'error',
|
||||
'sonarjs/no-identical-expressions': 'error',
|
||||
'sonarjs/no-ignored-return': 'error',
|
||||
'sonarjs/no-one-iteration-loop': 'error',
|
||||
'sonarjs/no-use-of-empty-return-value': 'error',
|
||||
'sonarjs/non-existent-operator': 'error',
|
||||
'sonarjs/no-collapsible-if': 'error',
|
||||
'sonarjs/no-same-line-conditional': 'error',
|
||||
'sonarjs/no-duplicated-branches': 'error',
|
||||
'sonarjs/no-collection-size-mischeck': 'error',
|
||||
'sonarjs/no-useless-catch': 'error',
|
||||
},
|
||||
overrides: [
|
||||
{
|
||||
@@ -137,7 +172,10 @@ const config = {
|
||||
},
|
||||
},
|
||||
...allPackages.map(pkg => ({
|
||||
files: [`packages/${pkg}/src/**/*.ts`, `packages/${pkg}/src/**/*.tsx`],
|
||||
files: [`${pkg}/src/**/*.ts`, `${pkg}/src/**/*.tsx`],
|
||||
parserOptions: {
|
||||
project: resolve(__dirname, './tsconfig.eslint.json'),
|
||||
},
|
||||
rules: {
|
||||
'@typescript-eslint/no-restricted-imports': [
|
||||
'error',
|
||||
@@ -145,8 +183,39 @@ const config = {
|
||||
patterns: createPattern(pkg),
|
||||
},
|
||||
],
|
||||
'@typescript-eslint/no-floating-promises': [
|
||||
'error',
|
||||
{
|
||||
ignoreVoid: false,
|
||||
ignoreIIFE: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
})),
|
||||
{
|
||||
files: [
|
||||
'**/__tests__/**/*',
|
||||
'**/*.stories.tsx',
|
||||
'**/*.spec.ts',
|
||||
'**/tests/**/*',
|
||||
'scripts/**/*',
|
||||
'**/benchmark/**/*',
|
||||
'**/__debug__/**/*',
|
||||
],
|
||||
rules: {
|
||||
'@typescript-eslint/no-non-null-assertion': 0,
|
||||
'@typescript-eslint/ban-ts-comment': [
|
||||
'error',
|
||||
{
|
||||
'ts-expect-error': false,
|
||||
'ts-ignore': true,
|
||||
'ts-nocheck': true,
|
||||
'ts-check': false,
|
||||
},
|
||||
],
|
||||
'@typescript-eslint/no-floating-promises': 0,
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
|
||||
1
.github/CODEOWNERS
vendored
1
.github/CODEOWNERS
vendored
@@ -1 +0,0 @@
|
||||
**/en.json @JimmFly
|
||||
|
||||
14
.github/actions/build-rust/action.yml
vendored
14
.github/actions/build-rust/action.yml
vendored
@@ -29,21 +29,23 @@ runs:
|
||||
if: ${{ inputs.target != 'x86_64-unknown-linux-gnu' && inputs.target != 'aarch64-unknown-linux-gnu' }}
|
||||
shell: bash
|
||||
run: yarn workspace @affine/native build --target ${{ inputs.target }}
|
||||
env:
|
||||
CARGO_BUILD_INCREMENTAL: 'false'
|
||||
|
||||
- name: Build
|
||||
if: ${{ inputs.target == 'x86_64-unknown-linux-gnu' }}
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian
|
||||
options: --user 0:0 -e CARGO_BUILD_INCREMENTAL=false -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
|
||||
run: yarn workspace @affine/native build --target ${{ inputs.target }}
|
||||
options: --user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
|
||||
run: >-
|
||||
export CC=x86_64-unknown-linux-gnu-gcc &&
|
||||
export CC_x86_64_unknown_linux_gnu=x86_64-unknown-linux-gnu-gcc &&
|
||||
yarn workspace @affine/native build --target ${{ inputs.target }}
|
||||
|
||||
- name: Build
|
||||
if: ${{ inputs.target == 'aarch64-unknown-linux-gnu' }}
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian-aarch64
|
||||
options: --user 0:0 -e CARGO_BUILD_INCREMENTAL=false -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
|
||||
run: yarn workspace @affine/native build --target ${{ inputs.target }}
|
||||
options: --user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
|
||||
run: >-
|
||||
yarn workspace @affine/native build --target ${{ inputs.target }}
|
||||
|
||||
79
.github/workflows/build.yml
vendored
79
.github/workflows/build.yml
vendored
@@ -23,6 +23,7 @@ on:
|
||||
env:
|
||||
DEBUG: napi:*
|
||||
APP_NAME: affine
|
||||
COVERAGE: true
|
||||
MACOSX_DEPLOYMENT_TARGET: '10.13'
|
||||
|
||||
jobs:
|
||||
@@ -51,15 +52,15 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- run: yarn build:storybook
|
||||
- run: yarn nx build @affine/storybook
|
||||
- name: Upload storybook artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: storybook
|
||||
path: ./packages/component/storybook-static
|
||||
path: ./packages/storybook/storybook-static
|
||||
if-no-files-found: error
|
||||
|
||||
build:
|
||||
build-web:
|
||||
name: Build @affine/web
|
||||
runs-on: ubuntu-latest
|
||||
environment: development
|
||||
@@ -76,22 +77,15 @@ jobs:
|
||||
key: ${{ runner.os }}-nextjs-${{ hashFiles('**/yarn.lock') }}-${{ hashFiles('**.[jt]s', '**.[jt]sx') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-nextjs-${{ hashFiles('**/yarn.lock') }}-
|
||||
- name: Build
|
||||
run: yarn build
|
||||
- name: Build Web
|
||||
run: yarn nx build @affine/web
|
||||
env:
|
||||
NEXT_PUBLIC_FIREBASE_API_KEY: ${{ secrets.NEXT_PUBLIC_FIREBASE_API_KEY }}
|
||||
NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN: ${{ secrets.NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN }}
|
||||
NEXT_PUBLIC_FIREBASE_PROJECT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_PROJECT_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET: ${{ secrets.NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET }}
|
||||
NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_APP_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_APP_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID }}
|
||||
API_SERVER_PROFILE: local
|
||||
ENABLE_DEBUG_PAGE: 1
|
||||
ENABLE_PLUGIN: true
|
||||
ENABLE_ALL_PAGE_FILTER: true
|
||||
ENABLE_LEGACY_PROVIDER: true
|
||||
COVERAGE: true
|
||||
ENABLE_PRELOADING: false
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
@@ -100,26 +94,18 @@ jobs:
|
||||
path: ./apps/web/.next
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Build @affine/web for desktop
|
||||
run: yarn build
|
||||
- name: Build Web (Desktop)
|
||||
run: yarn nx build @affine/web
|
||||
env:
|
||||
NEXT_PUBLIC_FIREBASE_API_KEY: ${{ secrets.NEXT_PUBLIC_FIREBASE_API_KEY }}
|
||||
NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN: ${{ secrets.NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN }}
|
||||
NEXT_PUBLIC_FIREBASE_PROJECT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_PROJECT_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET: ${{ secrets.NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET }}
|
||||
NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_APP_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_APP_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID }}
|
||||
API_SERVER_PROFILE: affine
|
||||
ENABLE_DEBUG_PAGE: 1
|
||||
ENABLE_PLUGIN: true
|
||||
ENABLE_ALL_PAGE_FILTER: true
|
||||
ENABLE_LEGACY_PROVIDER: false
|
||||
COVERAGE: true
|
||||
ENABLE_PRELOADING: false
|
||||
|
||||
- name: Export static resources
|
||||
run: yarn export
|
||||
working-directory: apps/web
|
||||
run: yarn workspace @affine/web export
|
||||
|
||||
- name: Upload static resources artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
@@ -168,8 +154,7 @@ jobs:
|
||||
env:
|
||||
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
|
||||
- name: Run server tests
|
||||
run: yarn test:coverage
|
||||
working-directory: apps/server
|
||||
run: yarn nx test:coverage @affine/server
|
||||
env:
|
||||
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
|
||||
- name: Upload server test coverage results
|
||||
@@ -196,19 +181,11 @@ jobs:
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: storybook
|
||||
path: ./packages/component/storybook-static
|
||||
path: ./packages/storybook/storybook-static
|
||||
- name: Run storybook tests
|
||||
working-directory: ./packages/component
|
||||
working-directory: ./packages/storybook
|
||||
run: |
|
||||
yarn exec concurrently -k -s first -n "SB,TEST" -c "magenta,blue" "yarn exec serve ./storybook-static -l 6006" "yarn exec wait-on tcp:6006 && yarn test-storybook --coverage"
|
||||
- name: Upload storybook test coverage results
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
files: ./packages/component/coverage/storybook/coverage-storybook.json
|
||||
flags: storybook-test
|
||||
name: affine
|
||||
fail_ci_if_error: true
|
||||
yarn exec concurrently -k -s first -n "SB,TEST" -c "magenta,blue" "yarn exec serve ./storybook-static -l 6006" "yarn exec wait-on tcp:6006 && yarn test"
|
||||
|
||||
e2e-test:
|
||||
name: E2E Test
|
||||
@@ -218,7 +195,7 @@ jobs:
|
||||
matrix:
|
||||
shard: [1, 2, 3, 4]
|
||||
environment: development
|
||||
needs: [build, build-storybook]
|
||||
needs: [build-web, build-storybook]
|
||||
services:
|
||||
octobase:
|
||||
image: ghcr.io/toeverything/cloud-self-hosted:nightly-latest
|
||||
@@ -245,14 +222,14 @@ jobs:
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: storybook
|
||||
path: ./packages/component/storybook-static
|
||||
path: ./packages/storybook/storybook-static
|
||||
|
||||
- name: Wait for Octobase Ready
|
||||
run: |
|
||||
node ./scripts/wait-3000-healthz.mjs
|
||||
|
||||
- name: Run playwright tests
|
||||
run: yarn test --forbid-only --shard=${{ matrix.shard }}/${{ strategy.job-total }}
|
||||
run: yarn e2e --forbid-only --shard=${{ matrix.shard }}/${{ strategy.job-total }}
|
||||
env:
|
||||
COVERAGE: true
|
||||
|
||||
@@ -276,7 +253,7 @@ jobs:
|
||||
path: ./test-results
|
||||
if-no-files-found: ignore
|
||||
|
||||
dekstop-test:
|
||||
desktop-test:
|
||||
name: Desktop Test
|
||||
runs-on: ${{ matrix.spec.os }}
|
||||
environment: development
|
||||
@@ -313,7 +290,7 @@ jobs:
|
||||
target: x86_64-pc-windows-msvc,
|
||||
test: true,
|
||||
}
|
||||
needs: [build]
|
||||
needs: [build-web]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
@@ -327,27 +304,17 @@ jobs:
|
||||
- name: Run unit tests
|
||||
if: ${{ matrix.spec.test }}
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf apps/electron/node_modules/better-sqlite3/build
|
||||
yarn --cwd apps/electron/node_modules/better-sqlite3 run install
|
||||
yarn test:unit
|
||||
run: yarn nx test @affine/monorepo
|
||||
env:
|
||||
NATIVE_TEST: 'true'
|
||||
- name: Build layers
|
||||
run: yarn workspace @affine/electron build-layers
|
||||
|
||||
run: yarn workspace @affine/electron build
|
||||
- name: Download static resource artifact
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: next-js-static
|
||||
path: ./apps/electron/resources/web-static
|
||||
|
||||
- name: Rebuild Electron dependences
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf apps/electron/node_modules/better-sqlite3/build
|
||||
yarn workspace @affine/electron rebuild:for-electron --arch=${{ matrix.spec.arch }}
|
||||
|
||||
- name: Run desktop tests
|
||||
if: ${{ matrix.spec.test && matrix.spec.os == 'ubuntu-latest' }}
|
||||
run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn workspace @affine/electron test
|
||||
@@ -407,7 +374,7 @@ jobs:
|
||||
uses: ./.github/actions/setup-node
|
||||
|
||||
- name: Unit Test
|
||||
run: yarn run test:unit:coverage
|
||||
run: yarn nx test:coverage @affine/monorepo
|
||||
|
||||
- name: Upload unit test coverage results
|
||||
uses: codecov/codecov-action@v3
|
||||
|
||||
21
.github/workflows/nightly-build.yml
vendored
21
.github/workflows/nightly-build.yml
vendored
@@ -3,7 +3,10 @@ name: Build Canary Desktop App on Staging Branch
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
# 0.6.x-staging
|
||||
- v[0-9]+.[0-9]+.x-staging
|
||||
# 0.6.1-staging
|
||||
- v[0-9]+.[0-9]+.[0-9]+-staging
|
||||
paths-ignore:
|
||||
- README.md
|
||||
- .github/**
|
||||
@@ -50,15 +53,6 @@ jobs:
|
||||
working-directory: apps/electron
|
||||
run: yarn generate-assets
|
||||
env:
|
||||
NEXT_PUBLIC_FIREBASE_API_KEY: ${{ secrets.NEXT_PUBLIC_FIREBASE_API_KEY }}
|
||||
NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN: ${{ secrets.NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN }}
|
||||
NEXT_PUBLIC_FIREBASE_PROJECT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_PROJECT_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET: ${{ secrets.NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET }}
|
||||
NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_APP_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_APP_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID }}
|
||||
AFFINE_GOOGLE_CLIENT_ID: ${{ secrets.AFFINE_GOOGLE_CLIENT_ID }}
|
||||
AFFINE_GOOGLE_CLIENT_SECRET: ${{ secrets.AFFINE_GOOGLE_CLIENT_SECRET }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
NEXT_PUBLIC_SENTRY_DSN: ${{ secrets.NEXT_PUBLIC_SENTRY_DSN }}
|
||||
@@ -66,8 +60,8 @@ jobs:
|
||||
API_SERVER_PROFILE: prod
|
||||
ENABLE_TEST_PROPERTIES: false
|
||||
ENABLE_IMAGE_PREVIEW_MODAL: false
|
||||
RELEASE_VERSION: ${{ needs.set-build-version.outputs.version }}
|
||||
ENABLE_BOOKMARK_OPERATION: true
|
||||
RELEASE_VERSION: ${{ needs.set-build-version.outputs.version }}
|
||||
|
||||
- name: Upload Artifact (web-static)
|
||||
uses: actions/upload-artifact@v3
|
||||
@@ -128,14 +122,9 @@ jobs:
|
||||
with:
|
||||
name: before-make-web-static
|
||||
path: apps/electron/resources/web-static
|
||||
- name: Rebuild Electron dependences
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf apps/electron/node_modules/better-sqlite3/build
|
||||
yarn workspace @affine/electron rebuild:for-electron --arch=${{ matrix.spec.arch }}
|
||||
|
||||
- name: Build layers
|
||||
run: yarn workspace @affine/electron build-layers
|
||||
run: yarn workspace @affine/electron build
|
||||
|
||||
- name: Signing By Apple Developer ID
|
||||
if: ${{ matrix.spec.platform == 'darwin' }}
|
||||
|
||||
20
.github/workflows/release-desktop-app.yml
vendored
20
.github/workflows/release-desktop-app.yml
vendored
@@ -51,24 +51,14 @@ jobs:
|
||||
- name: generate-assets
|
||||
run: yarn workspace @affine/electron generate-assets
|
||||
env:
|
||||
NEXT_PUBLIC_FIREBASE_API_KEY: ${{ secrets.NEXT_PUBLIC_FIREBASE_API_KEY }}
|
||||
NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN: ${{ secrets.NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN }}
|
||||
NEXT_PUBLIC_FIREBASE_PROJECT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_PROJECT_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET: ${{ secrets.NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET }}
|
||||
NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_APP_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_APP_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID }}
|
||||
AFFINE_GOOGLE_CLIENT_ID: ${{ secrets.AFFINE_GOOGLE_CLIENT_ID }}
|
||||
AFFINE_GOOGLE_CLIENT_SECRET: ${{ secrets.AFFINE_GOOGLE_CLIENT_SECRET }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
NEXT_PUBLIC_SENTRY_DSN: ${{ secrets.NEXT_PUBLIC_SENTRY_DSN }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
API_SERVER_PROFILE: prod
|
||||
ENABLE_TEST_PROPERTIES: false
|
||||
ENABLE_IMAGE_PREVIEW_MODAL: false
|
||||
RELEASE_VERSION: ${{ github.event.inputs.version }}
|
||||
ENABLE_BOOKMARK_OPERATION: true
|
||||
RELEASE_VERSION: ${{ github.event.inputs.version }}
|
||||
|
||||
- name: Upload Artifact (web-static)
|
||||
uses: actions/upload-artifact@v3
|
||||
@@ -126,14 +116,8 @@ jobs:
|
||||
name: before-make-web-static
|
||||
path: apps/electron/resources/web-static
|
||||
|
||||
- name: Rebuild Electron dependences
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf apps/electron/node_modules/better-sqlite3/build
|
||||
yarn workspace @affine/electron rebuild:for-electron --arch=${{ matrix.spec.arch }}
|
||||
|
||||
- name: Build layers
|
||||
run: yarn workspace @affine/electron build-layers
|
||||
run: yarn workspace @affine/electron build
|
||||
|
||||
- name: Signing By Apple Developer ID
|
||||
if: ${{ matrix.spec.platform == 'darwin' }}
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -66,9 +66,11 @@ i18n-generated.ts
|
||||
# Cache
|
||||
.eslintcache
|
||||
next-env.d.ts
|
||||
.rollup.cache
|
||||
|
||||
# Rust
|
||||
target
|
||||
*.node
|
||||
tsconfig.node.tsbuildinfo
|
||||
lib
|
||||
affine.db
|
||||
|
||||
6
.vscode/settings.template.json
vendored
6
.vscode/settings.template.json
vendored
@@ -34,9 +34,11 @@
|
||||
"packages/**/*.spec.tsx",
|
||||
"apps/web/**/*.spec.ts",
|
||||
"apps/web/**/*.spec.tsx",
|
||||
"apps/electron/layers/**/*.spec.ts",
|
||||
"apps/electron/src/**/*.spec.ts",
|
||||
"tests/unit/**/*.spec.ts",
|
||||
"tests/unit/**/*.spec.tsx"
|
||||
],
|
||||
"deepscan.enable": true
|
||||
"rust-analyzer.check.extraEnv": {
|
||||
"DATABASE_URL": "sqlite:affine.db"
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -16,4 +16,4 @@ plugins:
|
||||
- path: .yarn/plugins/@yarnpkg/plugin-workspace-tools.cjs
|
||||
spec: '@yarnpkg/plugin-workspace-tools'
|
||||
|
||||
yarnPath: .yarn/releases/yarn-3.5.0.cjs
|
||||
yarnPath: .yarn/releases/yarn-3.6.0.cjs
|
||||
|
||||
1433
Cargo.lock
generated
1433
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,8 @@
|
||||
[workspace]
|
||||
members = ["./packages/native"]
|
||||
members = ["./packages/native", "./packages/native/schema"]
|
||||
|
||||
[profile.dev.package.sqlx-macros]
|
||||
opt-level = 3
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
|
||||
@@ -83,7 +83,7 @@ Star us, and you will receive all releases notifications from GitHub without any
|
||||
|
||||
## Features
|
||||
|
||||
- **Hyper merged** — Write, draw and plan all at once. Assemble any blocks you love on any canvas you like to enjoy seamless transitions bewtween workflows with AFFiNE.
|
||||
- **Hyper merged** — Write, draw and plan all at once. Assemble any blocks you love on any canvas you like to enjoy seamless transitions between workflows with AFFiNE.
|
||||
- **Privacy focussed** — AFFiNE is built with your privacy in mind and is one of our key concerns. We want you to keep control of your data, allowing you to store it as you like, where you like while still being able to freely edit and view your data on-demand.
|
||||
- **Offline-first** - With your privacy in mind we also decided to go offline-first. This means that AFFiNE can be used offline, whether you want to view or edit, with support for conflict-free merging when you are back online.
|
||||
- **Clean, intuitive design** — With AFFiNE you can concentrate on editing with a clean and modern interface. Which is responsive, so it looks great on tablets too, and mobile support is coming in the future.
|
||||
@@ -136,11 +136,12 @@ We would also like to give thanks to open-source projects that make AFFiNE possi
|
||||
- [BlockSuite](https://github.com/toeverything/BlockSuite) - 💠 BlockSuite is the open-source collaborative editor project behind AFFiNE.
|
||||
- [OctoBase](https://github.com/toeverything/OctoBase) - 🐙 OctoBase is the open-source database behind AFFiNE, local-first, yet collaborative. A light-weight, scalable, data engine written in Rust.
|
||||
- [Yjs](https://github.com/yjs/yjs) & [Yrs](https://github.com/y-crdt/y-crdt) - Fundamental support of CRDTs for our implementation on state management and data sync.
|
||||
- [Next.js](https://github.com/vercel/next.js) - The React Framework.
|
||||
- [Electron](https://github.com/electron/electron) - Build cross-platform desktop apps with JavaScript, HTML, and CSS.
|
||||
- [React](https://github.com/facebook/react) - View layer support and web GUI framework.
|
||||
- [Rust](https://github.com/rust-lang/rust) - High performance language that extends the ability and availability of our real-time backend, OctoBase.
|
||||
- [Jotai](https://github.com/pmndrs/jotai) - Primitive and flexible state management for React.
|
||||
- [MUI](https://github.com/mui/material-ui) - Our most used graphic UI component library.
|
||||
- [async-call-rpc](https://github.com/Jack-Works/async-call-rpc) - A lightweight JSON RPC client & server.
|
||||
- Other upstream [dependencies](https://github.com/toeverything/AFFiNE/network/dependencies).
|
||||
|
||||
Thanks a lot to the community for providing such powerful and simple libraries, so that we can focus more on the implementation of the product logic, and we hope that in the future our projects will also provide a more easy-to-use knowledge base for everyone.
|
||||
@@ -150,7 +151,7 @@ Thanks a lot to the community for providing such powerful and simple libraries,
|
||||
We would like to express our gratitude to all the individuals who have already contributed to AFFiNE! If you have any AFFiNE-related project, documentation, tool or template, please feel free to contribute it by submitting a pull request to our curated list on GitHub: [awesome-affine](https://github.com/toeverything/awesome-affine).
|
||||
|
||||
<a href="https://github.com/toeverything/affine/graphs/contributors">
|
||||
<img src="https://user-images.githubusercontent.com/5910926/240508358-93eddded-48a0-40cd-85e4-a1d172dbe1d9.svg" />
|
||||
<img alt="contributors" src="https://opencollective.com/affine/contributors.svg?width=890&button=false" />
|
||||
</a>
|
||||
|
||||
## Self-Host
|
||||
|
||||
1
apps/electron/.gitignore
vendored
1
apps/electron/.gitignore
vendored
@@ -1,5 +1,6 @@
|
||||
*.autogen.*
|
||||
dist
|
||||
e2e-dist-*
|
||||
|
||||
resources/web-static
|
||||
|
||||
|
||||
@@ -17,22 +17,6 @@ yarn dev # or yarn prod for production build
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### better-sqlite3 error
|
||||
|
||||
When running tests or starting electron, you may encounter the following error:
|
||||
|
||||
> Error: The module 'apps/electron/node_modules/better-sqlite3/build/Release/better_sqlite3.node'
|
||||
|
||||
This is due to the fact that the `better-sqlite3` package is built for the Node.js version in Electron & in your machine. To fix this, run the following command based on different cases:
|
||||
|
||||
```sh
|
||||
# for running unit tests, we are not using Electron's node:
|
||||
yarn rebuild better-sqlite3
|
||||
|
||||
# for running Electron, we are using Electron's node:
|
||||
yarn postinstall
|
||||
```
|
||||
|
||||
## Credits
|
||||
|
||||
Most of the boilerplate code is generously borrowed from the following
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
|
||||
const { z } = require('zod');
|
||||
|
||||
const {
|
||||
@@ -51,8 +52,6 @@ module.exports = {
|
||||
teamId: process.env.APPLE_TEAM_ID,
|
||||
}
|
||||
: undefined,
|
||||
// do we need the following line?
|
||||
extraResource: ['./resources/app-update.yml'],
|
||||
},
|
||||
makers: [
|
||||
{
|
||||
@@ -104,6 +103,27 @@ module.exports = {
|
||||
// so stable and canary will not share the same app data
|
||||
packageJson.productName = productName;
|
||||
},
|
||||
prePackage: async () => {
|
||||
const { rm, cp } = require('node:fs/promises');
|
||||
const { resolve } = require('node:path');
|
||||
|
||||
await rm(
|
||||
resolve(__dirname, './node_modules/@toeverything/plugin-infra'),
|
||||
{
|
||||
recursive: true,
|
||||
force: true,
|
||||
}
|
||||
);
|
||||
|
||||
await cp(
|
||||
resolve(__dirname, '../../packages/plugin-infra'),
|
||||
resolve(__dirname, './node_modules/@toeverything/plugin-infra'),
|
||||
{
|
||||
recursive: true,
|
||||
force: true,
|
||||
}
|
||||
);
|
||||
},
|
||||
generateAssets: async (_, platform, arch) => {
|
||||
if (process.env.SKIP_GENERATE_ASSETS) {
|
||||
return;
|
||||
|
||||
@@ -1,494 +0,0 @@
|
||||
import assert from 'node:assert';
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { MainIPCHandlerMap } from '../exposed';
|
||||
|
||||
const registeredHandlers = new Map<
|
||||
string,
|
||||
((...args: any[]) => Promise<any>)[]
|
||||
>();
|
||||
|
||||
const delay = (ms: number) => new Promise(r => setTimeout(r, ms));
|
||||
|
||||
type WithoutFirstParameter<T> = T extends (_: any, ...args: infer P) => infer R
|
||||
? (...args: P) => R
|
||||
: T;
|
||||
|
||||
// common mock dispatcher for ipcMain.handle AND app.on
|
||||
// alternatively, we can use single parameter for T & F, eg, dispatch('workspace:list'),
|
||||
// however this is too hard to be typed correctly
|
||||
async function dispatch<
|
||||
T extends keyof MainIPCHandlerMap,
|
||||
F extends keyof MainIPCHandlerMap[T]
|
||||
>(
|
||||
namespace: T,
|
||||
functionName: F,
|
||||
// @ts-ignore
|
||||
...args: Parameters<WithoutFirstParameter<MainIPCHandlerMap[T][F]>>
|
||||
): // @ts-ignore
|
||||
ReturnType<MainIPCHandlerMap[T][F]> {
|
||||
// @ts-ignore
|
||||
const handlers = registeredHandlers.get(namespace + ':' + functionName);
|
||||
assert(handlers);
|
||||
|
||||
// we only care about the first handler here
|
||||
return await handlers[0](null, ...args);
|
||||
}
|
||||
|
||||
const SESSION_DATA_PATH = path.join(__dirname, './tmp', 'affine-test');
|
||||
const DOCUMENTS_PATH = path.join(__dirname, './tmp', 'affine-test-documents');
|
||||
|
||||
const browserWindow = {
|
||||
isDestroyed: () => {
|
||||
return false;
|
||||
},
|
||||
setWindowButtonVisibility: (_v: boolean) => {
|
||||
// will be stubbed later
|
||||
},
|
||||
webContents: {
|
||||
send: (_type: string, ..._args: any[]) => {
|
||||
// will be stubbed later
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const ipcMain = {
|
||||
handle: (key: string, callback: (...args: any[]) => Promise<any>) => {
|
||||
const handlers = registeredHandlers.get(key) || [];
|
||||
handlers.push(callback);
|
||||
registeredHandlers.set(key, handlers);
|
||||
},
|
||||
setMaxListeners: (_n: number) => {
|
||||
// noop
|
||||
},
|
||||
};
|
||||
|
||||
const nativeTheme = {
|
||||
themeSource: 'light',
|
||||
};
|
||||
|
||||
function compareBuffer(a: Uint8Array | null, b: Uint8Array | null) {
|
||||
if (
|
||||
(a === null && b === null) ||
|
||||
a === null ||
|
||||
b === null ||
|
||||
a.length !== b.length
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
if (a[i] !== b[i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
const electronModule = {
|
||||
app: {
|
||||
getPath: (name: string) => {
|
||||
if (name === 'sessionData') {
|
||||
return SESSION_DATA_PATH;
|
||||
} else if (name === 'documents') {
|
||||
return DOCUMENTS_PATH;
|
||||
}
|
||||
throw new Error('not implemented');
|
||||
},
|
||||
name: 'affine-test',
|
||||
on: (name: string, callback: (...args: any[]) => any) => {
|
||||
const handlers = registeredHandlers.get(name) || [];
|
||||
handlers.push(callback);
|
||||
registeredHandlers.set(name, handlers);
|
||||
},
|
||||
addEventListener: (...args: any[]) => {
|
||||
// @ts-ignore
|
||||
electronModule.app.on(...args);
|
||||
},
|
||||
removeEventListener: () => {},
|
||||
},
|
||||
BrowserWindow: {
|
||||
getAllWindows: () => {
|
||||
return [browserWindow];
|
||||
},
|
||||
},
|
||||
nativeTheme: nativeTheme,
|
||||
ipcMain,
|
||||
shell: {} as Partial<Electron.Shell>,
|
||||
dialog: {} as Partial<Electron.Dialog>,
|
||||
};
|
||||
|
||||
// dynamically import handlers so that we can inject local variables to mocks
|
||||
vi.doMock('electron', () => {
|
||||
return electronModule;
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
const { registerHandlers } = await import('../handlers');
|
||||
registerHandlers();
|
||||
|
||||
// should also register events
|
||||
const { registerEvents } = await import('../events');
|
||||
registerEvents();
|
||||
await fs.mkdirp(SESSION_DATA_PATH);
|
||||
await import('../db/ensure-db');
|
||||
|
||||
registeredHandlers.get('ready')?.forEach(fn => fn());
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// reset registered handlers
|
||||
registeredHandlers.get('before-quit')?.forEach(fn => fn());
|
||||
|
||||
await fs.remove(SESSION_DATA_PATH);
|
||||
});
|
||||
|
||||
describe('ensureSQLiteDB', () => {
|
||||
test('should create db file on connection if it does not exist', async () => {
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const workspaceDB = await ensureSQLiteDB(id);
|
||||
const file = workspaceDB.path;
|
||||
const fileExists = await fs.pathExists(file);
|
||||
expect(fileExists).toBe(true);
|
||||
});
|
||||
|
||||
test('should emit the same db instance for the same id', async () => {
|
||||
const [id1, id2] = [v4(), v4()];
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const workspaceDB1 = await ensureSQLiteDB(id1);
|
||||
const workspaceDB2 = await ensureSQLiteDB(id2);
|
||||
const workspaceDB3 = await ensureSQLiteDB(id1);
|
||||
expect(workspaceDB1).toBe(workspaceDB3);
|
||||
expect(workspaceDB1).not.toBe(workspaceDB2);
|
||||
});
|
||||
|
||||
test('when app quit, db should be closed', async () => {
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const workspaceDB = await ensureSQLiteDB(id);
|
||||
const file = workspaceDB.path;
|
||||
const fileExists = await fs.pathExists(file);
|
||||
expect(fileExists).toBe(true);
|
||||
registeredHandlers.get('before-quit')?.forEach(fn => fn());
|
||||
await delay(100);
|
||||
expect(workspaceDB.db).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
describe('workspace handlers', () => {
|
||||
test('list all workspace ids', async () => {
|
||||
const ids = [v4(), v4()];
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await Promise.all(ids.map(id => ensureSQLiteDB(id)));
|
||||
const list = await dispatch('workspace', 'list');
|
||||
expect(list.map(([id]) => id).sort()).toEqual(ids.sort());
|
||||
});
|
||||
|
||||
test('delete workspace', async () => {
|
||||
const ids = [v4(), v4()];
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const dbs = await Promise.all(ids.map(id => ensureSQLiteDB(id)));
|
||||
await dispatch('workspace', 'delete', ids[1]);
|
||||
const list = await dispatch('workspace', 'list');
|
||||
expect(list.map(([id]) => id)).toEqual([ids[0]]);
|
||||
// deleted db should be closed
|
||||
expect(dbs[1].db).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
describe('UI handlers', () => {
|
||||
test('theme-change', async () => {
|
||||
await dispatch('ui', 'handleThemeChange', 'dark');
|
||||
expect(nativeTheme.themeSource).toBe('dark');
|
||||
await dispatch('ui', 'handleThemeChange', 'light');
|
||||
expect(nativeTheme.themeSource).toBe('light');
|
||||
});
|
||||
|
||||
test('sidebar-visibility-change (macOS)', async () => {
|
||||
vi.stubGlobal('process', { platform: 'darwin' });
|
||||
const setWindowButtonVisibility = vi.fn();
|
||||
browserWindow.setWindowButtonVisibility = setWindowButtonVisibility;
|
||||
await dispatch('ui', 'handleSidebarVisibilityChange', true);
|
||||
expect(setWindowButtonVisibility).toBeCalledWith(true);
|
||||
await dispatch('ui', 'handleSidebarVisibilityChange', false);
|
||||
expect(setWindowButtonVisibility).toBeCalledWith(false);
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
|
||||
test('sidebar-visibility-change (non-macOS)', async () => {
|
||||
vi.stubGlobal('process', { platform: 'linux' });
|
||||
const setWindowButtonVisibility = vi.fn();
|
||||
browserWindow.setWindowButtonVisibility = setWindowButtonVisibility;
|
||||
await dispatch('ui', 'handleSidebarVisibilityChange', true);
|
||||
expect(setWindowButtonVisibility).not.toBeCalled();
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
});
|
||||
|
||||
describe('db handlers', () => {
|
||||
test('apply doc and get doc updates', async () => {
|
||||
const workspaceId = v4();
|
||||
const bin = await dispatch('db', 'getDocAsUpdates', workspaceId);
|
||||
// ? is this a good test?
|
||||
expect(bin.every((byte: number) => byte === 0)).toBe(true);
|
||||
|
||||
const ydoc = new Y.Doc();
|
||||
const ytext = ydoc.getText('test');
|
||||
ytext.insert(0, 'hello world');
|
||||
const bin2 = Y.encodeStateAsUpdate(ydoc);
|
||||
|
||||
await dispatch('db', 'applyDocUpdate', workspaceId, bin2);
|
||||
|
||||
const bin3 = await dispatch('db', 'getDocAsUpdates', workspaceId);
|
||||
const ydoc2 = new Y.Doc();
|
||||
Y.applyUpdate(ydoc2, bin3);
|
||||
const ytext2 = ydoc2.getText('test');
|
||||
expect(ytext2.toString()).toBe('hello world');
|
||||
});
|
||||
|
||||
test('get non existent blob', async () => {
|
||||
const workspaceId = v4();
|
||||
const bin = await dispatch('db', 'getBlob', workspaceId, 'non-existent-id');
|
||||
expect(bin).toBeNull();
|
||||
});
|
||||
|
||||
test('list blobs (empty)', async () => {
|
||||
const workspaceId = v4();
|
||||
const list = await dispatch('db', 'getBlobKeys', workspaceId);
|
||||
expect(list).toEqual([]);
|
||||
});
|
||||
|
||||
test('CRUD blobs', async () => {
|
||||
const testBin = new Uint8Array([1, 2, 3, 4, 5]);
|
||||
const testBin2 = new Uint8Array([6, 7, 8, 9, 10]);
|
||||
const workspaceId = 'test-workspace-id';
|
||||
|
||||
// add blob
|
||||
await dispatch('db', 'addBlob', workspaceId, 'testBin', testBin);
|
||||
|
||||
// get blob
|
||||
expect(
|
||||
compareBuffer(
|
||||
await dispatch('db', 'getBlob', workspaceId, 'testBin'),
|
||||
testBin
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// add another blob
|
||||
await dispatch('db', 'addBlob', workspaceId, 'testBin2', testBin2);
|
||||
expect(
|
||||
compareBuffer(
|
||||
await dispatch('db', 'getBlob', workspaceId, 'testBin2'),
|
||||
testBin2
|
||||
)
|
||||
).toBe(true);
|
||||
|
||||
// list blobs
|
||||
let lists = await dispatch('db', 'getBlobKeys', workspaceId);
|
||||
expect(lists).toHaveLength(2);
|
||||
expect(lists).toContain('testBin');
|
||||
expect(lists).toContain('testBin2');
|
||||
|
||||
// delete blob
|
||||
await dispatch('db', 'deleteBlob', workspaceId, 'testBin');
|
||||
lists = await dispatch('db', 'getBlobKeys', workspaceId);
|
||||
expect(lists).toEqual(['testBin2']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('dialog handlers', () => {
|
||||
test('revealDBFile', async () => {
|
||||
const mockShowItemInFolder = vi.fn();
|
||||
electronModule.shell.showItemInFolder = mockShowItemInFolder;
|
||||
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const db = await ensureSQLiteDB(id);
|
||||
|
||||
await dispatch('dialog', 'revealDBFile', id);
|
||||
expect(mockShowItemInFolder).toBeCalledWith(db.path);
|
||||
});
|
||||
|
||||
test('saveDBFileAs (skipped)', async () => {
|
||||
const mockShowSaveDialog = vi.fn(() => {
|
||||
return { filePath: undefined };
|
||||
}) as any;
|
||||
const mockShowItemInFolder = vi.fn();
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
electronModule.shell.showItemInFolder = mockShowItemInFolder;
|
||||
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
|
||||
await dispatch('dialog', 'saveDBFileAs', id);
|
||||
expect(mockShowSaveDialog).toBeCalled();
|
||||
expect(mockShowItemInFolder).not.toBeCalled();
|
||||
electronModule.dialog = {};
|
||||
electronModule.shell = {};
|
||||
});
|
||||
|
||||
test('saveDBFileAs', async () => {
|
||||
const newSavedPath = path.join(SESSION_DATA_PATH, 'saved-to');
|
||||
const mockShowSaveDialog = vi.fn(() => {
|
||||
return { filePath: newSavedPath };
|
||||
}) as any;
|
||||
const mockShowItemInFolder = vi.fn();
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
electronModule.shell.showItemInFolder = mockShowItemInFolder;
|
||||
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
|
||||
await dispatch('dialog', 'saveDBFileAs', id);
|
||||
expect(mockShowSaveDialog).toBeCalled();
|
||||
expect(mockShowItemInFolder).toBeCalledWith(newSavedPath);
|
||||
|
||||
// check if file is saved to new path
|
||||
expect(await fs.exists(newSavedPath)).toBe(true);
|
||||
});
|
||||
|
||||
test('loadDBFile (skipped)', async () => {
|
||||
const mockShowOpenDialog = vi.fn(() => {
|
||||
return { filePaths: undefined };
|
||||
}) as any;
|
||||
electronModule.dialog.showOpenDialog = mockShowOpenDialog;
|
||||
|
||||
const res = await dispatch('dialog', 'loadDBFile');
|
||||
expect(mockShowOpenDialog).toBeCalled();
|
||||
expect(res.canceled).toBe(true);
|
||||
});
|
||||
|
||||
test('loadDBFile (error, in app-data)', async () => {
|
||||
const mockShowOpenDialog = vi.fn(() => {
|
||||
return {
|
||||
filePaths: [path.join(SESSION_DATA_PATH, 'workspaces')],
|
||||
};
|
||||
}) as any;
|
||||
electronModule.dialog.showOpenDialog = mockShowOpenDialog;
|
||||
|
||||
const res = await dispatch('dialog', 'loadDBFile');
|
||||
expect(mockShowOpenDialog).toBeCalled();
|
||||
expect(res.error).toBe('DB_FILE_PATH_INVALID');
|
||||
});
|
||||
|
||||
test('loadDBFile (error, not a valid affine file)', async () => {
|
||||
// create a random db file
|
||||
const basePath = path.join(SESSION_DATA_PATH, 'random-path');
|
||||
const dbPath = path.join(basePath, 'xxx.affine');
|
||||
await fs.ensureDir(basePath);
|
||||
await fs.writeFile(dbPath, 'hello world');
|
||||
|
||||
const mockShowOpenDialog = vi.fn(() => {
|
||||
return { filePaths: [dbPath] };
|
||||
}) as any;
|
||||
electronModule.dialog.showOpenDialog = mockShowOpenDialog;
|
||||
|
||||
const res = await dispatch('dialog', 'loadDBFile');
|
||||
expect(mockShowOpenDialog).toBeCalled();
|
||||
expect(res.error).toBe('DB_FILE_INVALID');
|
||||
|
||||
electronModule.dialog = {};
|
||||
});
|
||||
|
||||
test('loadDBFile (correct)', async () => {
|
||||
// we use ensureSQLiteDB to create a valid db file
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const db = await ensureSQLiteDB(id);
|
||||
|
||||
// copy db file to dbPath
|
||||
const basePath = path.join(SESSION_DATA_PATH, 'random-path');
|
||||
const clonedDBPath = path.join(basePath, 'xxx.affine');
|
||||
await fs.ensureDir(basePath);
|
||||
await fs.copyFile(db.path, clonedDBPath);
|
||||
|
||||
// delete workspace
|
||||
await dispatch('workspace', 'delete', id);
|
||||
|
||||
// try load originDBFilePath
|
||||
const mockShowOpenDialog = vi.fn(() => {
|
||||
return { filePaths: [clonedDBPath] };
|
||||
}) as any;
|
||||
electronModule.dialog.showOpenDialog = mockShowOpenDialog;
|
||||
|
||||
const res = await dispatch('dialog', 'loadDBFile');
|
||||
expect(mockShowOpenDialog).toBeCalled();
|
||||
const newId = res.workspaceId;
|
||||
|
||||
expect(newId).not.toBeUndefined();
|
||||
|
||||
assert(newId);
|
||||
|
||||
const meta = await dispatch('workspace', 'getMeta', newId);
|
||||
|
||||
expect(meta.secondaryDBPath).toBe(clonedDBPath);
|
||||
|
||||
// try load it again, will trigger error (db file already loaded)
|
||||
const res2 = await dispatch('dialog', 'loadDBFile');
|
||||
expect(res2.error).toBe('DB_FILE_ALREADY_LOADED');
|
||||
});
|
||||
|
||||
test('moveDBFile (valid)', async () => {
|
||||
const sendStub = vi.fn();
|
||||
browserWindow.webContents.send = sendStub;
|
||||
const newPath = path.join(SESSION_DATA_PATH, 'xxx');
|
||||
const showOpenDialog = vi.fn(() => {
|
||||
return { filePaths: [newPath] };
|
||||
}) as any;
|
||||
electronModule.dialog.showOpenDialog = showOpenDialog;
|
||||
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const db = await ensureSQLiteDB(id);
|
||||
const res = await dispatch('dialog', 'moveDBFile', id);
|
||||
expect(showOpenDialog).toBeCalled();
|
||||
assert(res.filePath);
|
||||
expect(path.dirname(res.filePath)).toBe(newPath);
|
||||
expect(res.filePath.endsWith('.affine')).toBe(true);
|
||||
// should also send workspace meta change event
|
||||
expect(sendStub).toBeCalledWith('workspace:onMetaChange', {
|
||||
workspaceId: id,
|
||||
meta: { id, secondaryDBPath: res.filePath, mainDBPath: db.path },
|
||||
});
|
||||
electronModule.dialog = {};
|
||||
browserWindow.webContents.send = () => {};
|
||||
});
|
||||
|
||||
test('moveDBFile (canceled)', async () => {
|
||||
const showOpenDialog = vi.fn(() => {
|
||||
return { filePaths: null };
|
||||
}) as any;
|
||||
electronModule.dialog.showOpenDialog = showOpenDialog;
|
||||
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
|
||||
const res = await dispatch('dialog', 'moveDBFile', id);
|
||||
expect(showOpenDialog).toBeCalled();
|
||||
expect(res.filePath).toBe(undefined);
|
||||
electronModule.dialog = {};
|
||||
});
|
||||
});
|
||||
|
||||
describe('applicationMenu', () => {
|
||||
// test some basic IPC events
|
||||
test('applicationMenu event', async () => {
|
||||
const { applicationMenuSubjects } = await import('../application-menu');
|
||||
const sendStub = vi.fn();
|
||||
browserWindow.webContents.send = sendStub;
|
||||
applicationMenuSubjects.newPageAction.next();
|
||||
expect(sendStub).toHaveBeenCalledWith(
|
||||
'applicationMenu:onNewPageAction',
|
||||
undefined
|
||||
);
|
||||
browserWindow.webContents.send = () => {};
|
||||
});
|
||||
});
|
||||
@@ -1,12 +0,0 @@
|
||||
import { app } from 'electron';
|
||||
|
||||
export const appContext = {
|
||||
get appName() {
|
||||
return app.name;
|
||||
},
|
||||
get appDataPath() {
|
||||
return app.getPath('sessionData');
|
||||
},
|
||||
};
|
||||
|
||||
export type AppContext = typeof appContext;
|
||||
@@ -1,152 +0,0 @@
|
||||
import assert from 'assert';
|
||||
import type { Database } from 'better-sqlite3';
|
||||
import sqlite from 'better-sqlite3';
|
||||
|
||||
import { logger } from '../logger';
|
||||
|
||||
const schemas = [
|
||||
`CREATE TABLE IF NOT EXISTS "updates" (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
data BLOB NOT NULL,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS "blobs" (
|
||||
key TEXT PRIMARY KEY NOT NULL,
|
||||
data BLOB NOT NULL,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
)`,
|
||||
];
|
||||
|
||||
interface UpdateRow {
|
||||
id: number;
|
||||
data: Buffer;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
interface BlobRow {
|
||||
key: string;
|
||||
data: Buffer;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* A base class for SQLite DB adapter that provides basic methods around updates & blobs
|
||||
*/
|
||||
export abstract class BaseSQLiteAdapter {
|
||||
db: Database | null = null;
|
||||
abstract role: string;
|
||||
|
||||
constructor(public path: string) {}
|
||||
|
||||
ensureTables() {
|
||||
assert(this.db, 'db is not connected');
|
||||
this.db.exec(schemas.join(';'));
|
||||
}
|
||||
|
||||
// todo: what if SQLite DB wrapper later is not sync?
|
||||
connect(): Database | undefined {
|
||||
if (this.db) {
|
||||
return this.db;
|
||||
}
|
||||
logger.log(`[SQLiteAdapter][${this.role}] open db`, this.path);
|
||||
const db = (this.db = sqlite(this.path));
|
||||
this.ensureTables();
|
||||
return db;
|
||||
}
|
||||
|
||||
destroy() {
|
||||
this.db?.close();
|
||||
this.db = null;
|
||||
}
|
||||
|
||||
addBlob(key: string, data: Uint8Array) {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare(
|
||||
'INSERT INTO blobs (key, data) VALUES (?, ?) ON CONFLICT(key) DO UPDATE SET data = ?'
|
||||
);
|
||||
statement.run(key, data, data);
|
||||
return key;
|
||||
} catch (error) {
|
||||
logger.error('addBlob', error);
|
||||
}
|
||||
}
|
||||
|
||||
getBlob(key: string) {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare('SELECT data FROM blobs WHERE key = ?');
|
||||
const row = statement.get(key) as BlobRow;
|
||||
if (!row) {
|
||||
return null;
|
||||
}
|
||||
return row.data;
|
||||
} catch (error) {
|
||||
logger.error('getBlob', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
deleteBlob(key: string) {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare('DELETE FROM blobs WHERE key = ?');
|
||||
statement.run(key);
|
||||
} catch (error) {
|
||||
logger.error('deleteBlob', error);
|
||||
}
|
||||
}
|
||||
|
||||
getBlobKeys() {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare('SELECT key FROM blobs');
|
||||
const rows = statement.all() as BlobRow[];
|
||||
return rows.map(row => row.key);
|
||||
} catch (error) {
|
||||
logger.error('getBlobKeys', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
getUpdates() {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare('SELECT * FROM updates');
|
||||
const rows = statement.all() as UpdateRow[];
|
||||
return rows;
|
||||
} catch (error) {
|
||||
logger.error('getUpdates', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// add a single update to SQLite
|
||||
addUpdateToSQLite(updates: Uint8Array[]) {
|
||||
// batch write instead write per key stroke?
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const start = performance.now();
|
||||
const statement = this.db.prepare(
|
||||
'INSERT INTO updates (data) VALUES (?)'
|
||||
);
|
||||
const insertMany = this.db.transaction(updates => {
|
||||
for (const d of updates) {
|
||||
statement.run(d);
|
||||
}
|
||||
});
|
||||
|
||||
insertMany(updates);
|
||||
|
||||
logger.debug(
|
||||
`[SQLiteAdapter][${this.role}] addUpdateToSQLite`,
|
||||
'length:',
|
||||
updates.length,
|
||||
performance.now() - start,
|
||||
'ms'
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('addUpdateToSQLite', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,110 +0,0 @@
|
||||
import { app } from 'electron';
|
||||
import {
|
||||
defer,
|
||||
firstValueFrom,
|
||||
from,
|
||||
fromEvent,
|
||||
interval,
|
||||
merge,
|
||||
Observable,
|
||||
} from 'rxjs';
|
||||
import {
|
||||
distinctUntilChanged,
|
||||
filter,
|
||||
ignoreElements,
|
||||
last,
|
||||
map,
|
||||
shareReplay,
|
||||
startWith,
|
||||
switchMap,
|
||||
takeUntil,
|
||||
tap,
|
||||
} from 'rxjs/operators';
|
||||
|
||||
import { appContext } from '../context';
|
||||
import { logger } from '../logger';
|
||||
import { getWorkspaceMeta$ } from '../workspace';
|
||||
import { SecondaryWorkspaceSQLiteDB } from './secondary-db';
|
||||
import type { WorkspaceSQLiteDB } from './workspace-db-adapter';
|
||||
import { openWorkspaceDatabase } from './workspace-db-adapter';
|
||||
|
||||
const db$Map = new Map<string, Observable<WorkspaceSQLiteDB>>();
|
||||
|
||||
const beforeQuit$ = defer(() => fromEvent(app, 'before-quit'));
|
||||
|
||||
function getWorkspaceDB$(id: string) {
|
||||
if (!db$Map.has(id)) {
|
||||
db$Map.set(
|
||||
id,
|
||||
from(openWorkspaceDatabase(appContext, id)).pipe(
|
||||
shareReplay(1),
|
||||
switchMap(db => {
|
||||
return startPollingSecondaryDB(db).pipe(
|
||||
ignoreElements(),
|
||||
startWith(db),
|
||||
takeUntil(beforeQuit$),
|
||||
tap({
|
||||
complete: () => {
|
||||
logger.info('[ensureSQLiteDB] close db connection');
|
||||
db.destroy();
|
||||
db$Map.delete(id);
|
||||
},
|
||||
})
|
||||
);
|
||||
}),
|
||||
shareReplay(1)
|
||||
)
|
||||
);
|
||||
}
|
||||
return db$Map.get(id)!;
|
||||
}
|
||||
|
||||
function startPollingSecondaryDB(db: WorkspaceSQLiteDB) {
|
||||
const meta$ = getWorkspaceMeta$(db.workspaceId);
|
||||
const secondaryDB$ = meta$.pipe(
|
||||
map(meta => meta?.secondaryDBPath),
|
||||
distinctUntilChanged(),
|
||||
filter((p): p is string => !!p),
|
||||
switchMap(path => {
|
||||
return new Observable<SecondaryWorkspaceSQLiteDB>(observer => {
|
||||
const secondaryDB = new SecondaryWorkspaceSQLiteDB(path, db);
|
||||
observer.next(secondaryDB);
|
||||
return () => {
|
||||
logger.info(
|
||||
'[ensureSQLiteDB] close secondary db connection',
|
||||
secondaryDB.path
|
||||
);
|
||||
secondaryDB.destroy();
|
||||
};
|
||||
});
|
||||
}),
|
||||
takeUntil(db.update$.pipe(last())),
|
||||
shareReplay(1)
|
||||
);
|
||||
|
||||
const firstDelayedTick$ = defer(() => {
|
||||
return new Promise<number>(resolve =>
|
||||
setTimeout(() => {
|
||||
resolve(0);
|
||||
}, 1000)
|
||||
);
|
||||
});
|
||||
|
||||
// pull every 30 seconds
|
||||
const poll$ = merge(firstDelayedTick$, interval(30000)).pipe(
|
||||
switchMap(() => secondaryDB$),
|
||||
tap({
|
||||
next: secondaryDB => {
|
||||
secondaryDB.pull();
|
||||
},
|
||||
}),
|
||||
takeUntil(db.update$.pipe(last())),
|
||||
shareReplay(1)
|
||||
);
|
||||
|
||||
return poll$;
|
||||
}
|
||||
|
||||
export function ensureSQLiteDB(id: string) {
|
||||
return firstValueFrom(getWorkspaceDB$(id));
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
import type { Database } from 'better-sqlite3';
|
||||
import sqlite from 'better-sqlite3';
|
||||
|
||||
import { logger } from '../logger';
|
||||
|
||||
export function isValidateDB(db: Database) {
|
||||
// check if db has two tables, one for updates and one for blobs
|
||||
const statement = db.prepare(
|
||||
`SELECT name FROM sqlite_schema WHERE type='table'`
|
||||
);
|
||||
const rows = statement.all() as { name: string }[];
|
||||
const tableNames = rows.map(row => row.name);
|
||||
if (!tableNames.includes('updates') || !tableNames.includes('blobs')) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function isValidDBFile(path: string) {
|
||||
let db: Database | null = null;
|
||||
try {
|
||||
db = sqlite(path);
|
||||
// check if db has two tables, one for updates and one for blobs
|
||||
const statement = db.prepare(
|
||||
`SELECT name FROM sqlite_schema WHERE type='table'`
|
||||
);
|
||||
const rows = statement.all() as { name: string }[];
|
||||
const tableNames = rows.map(row => row.name);
|
||||
if (!tableNames.includes('updates') || !tableNames.includes('blobs')) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error('isValidDBFile', error);
|
||||
return false;
|
||||
} finally {
|
||||
db?.close();
|
||||
}
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
export type MainEventListener = (...args: any[]) => () => void;
|
||||
|
||||
export type IsomorphicHandler = (
|
||||
e: Electron.IpcMainInvokeEvent,
|
||||
...args: any[]
|
||||
) => Promise<any>;
|
||||
|
||||
export type NamespaceHandlers = {
|
||||
[key: string]: IsomorphicHandler;
|
||||
};
|
||||
|
||||
export interface WorkspaceMeta {
|
||||
id: string;
|
||||
mainDBPath: string;
|
||||
secondaryDBPath?: string; // assume there will be only one
|
||||
}
|
||||
|
||||
export type YOrigin = 'self' | 'external' | 'upstream' | 'renderer';
|
||||
@@ -1,49 +0,0 @@
|
||||
import { BrowserWindow } from 'electron';
|
||||
|
||||
import type { GetHTMLOptions } from './types';
|
||||
|
||||
async function getHTMLFromWindow(win: BrowserWindow): Promise<string> {
|
||||
return win.webContents
|
||||
.executeJavaScript(`document.documentElement.outerHTML;`)
|
||||
.then(html => html);
|
||||
}
|
||||
|
||||
// For normal web pages, obtaining html can be done directly,
|
||||
// but for some dynamic web pages, obtaining html should wait for the complete loading of web pages. shouldReGetHTML should be used to judge whether to obtain html again
|
||||
export async function getHTMLByURL(
|
||||
url: string,
|
||||
options: GetHTMLOptions
|
||||
): Promise<string> {
|
||||
return new Promise(resolve => {
|
||||
const { timeout = 10000, shouldReGetHTML } = options;
|
||||
const window = new BrowserWindow({
|
||||
show: false,
|
||||
});
|
||||
let html = '';
|
||||
window.loadURL(url);
|
||||
|
||||
const timer = setTimeout(() => {
|
||||
resolve(html);
|
||||
window.close();
|
||||
}, timeout);
|
||||
|
||||
async function loopHandle() {
|
||||
html = await getHTMLFromWindow(window);
|
||||
if (!shouldReGetHTML) {
|
||||
return html;
|
||||
}
|
||||
|
||||
if (await shouldReGetHTML(html)) {
|
||||
setTimeout(loopHandle, 1000);
|
||||
} else {
|
||||
window.close();
|
||||
clearTimeout(timer);
|
||||
resolve(html);
|
||||
}
|
||||
}
|
||||
|
||||
window.webContents.on('did-finish-load', async () => {
|
||||
loopHandle();
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -1,107 +0,0 @@
|
||||
import type { CheerioAPI, Element } from 'cheerio';
|
||||
import { load } from 'cheerio';
|
||||
|
||||
import type { Context, MetaData, Options, RuleSet } from './types';
|
||||
|
||||
export * from './types';
|
||||
|
||||
import { getHTMLByURL } from './get-html';
|
||||
import { metaDataRules } from './rules';
|
||||
import type { GetMetaDataOptions } from './types';
|
||||
|
||||
function runRule(ruleSet: RuleSet, $: CheerioAPI, context: Context) {
|
||||
let maxScore = 0;
|
||||
let value;
|
||||
|
||||
for (let currRule = 0; currRule < ruleSet.rules.length; currRule++) {
|
||||
const [query, handler] = ruleSet.rules[currRule];
|
||||
const elements = Array.from($(query));
|
||||
|
||||
if (elements.length) {
|
||||
for (const element of elements) {
|
||||
let score = ruleSet.rules.length - currRule;
|
||||
|
||||
if (ruleSet.scorer) {
|
||||
const newScore = ruleSet.scorer(element as Element, score);
|
||||
|
||||
if (newScore) {
|
||||
score = newScore;
|
||||
}
|
||||
}
|
||||
|
||||
if (score > maxScore) {
|
||||
maxScore = score;
|
||||
value = handler(element as Element);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (value) {
|
||||
if (ruleSet.processor) {
|
||||
value = ruleSet.processor(value, context);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
if (ruleSet.defaultValue) {
|
||||
return ruleSet.defaultValue(context);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async function getMetaDataByHTML(
|
||||
html: string,
|
||||
url: string,
|
||||
options: GetMetaDataOptions
|
||||
) {
|
||||
const { customRules = {} } = options;
|
||||
const rules: Record<string, RuleSet> = { ...metaDataRules };
|
||||
Object.keys(customRules).forEach((key: string) => {
|
||||
rules[key] = {
|
||||
rules: [...metaDataRules[key].rules, ...customRules[key].rules],
|
||||
defaultValue:
|
||||
customRules[key].defaultValue || metaDataRules[key].defaultValue,
|
||||
processor: customRules[key].processor || metaDataRules[key].processor,
|
||||
};
|
||||
});
|
||||
|
||||
const metadata: MetaData = {};
|
||||
const context: Context = {
|
||||
url,
|
||||
...options,
|
||||
};
|
||||
|
||||
const $ = load(html);
|
||||
|
||||
Object.keys(rules).forEach((key: string) => {
|
||||
const ruleSet = rules[key];
|
||||
metadata[key] = runRule(ruleSet, $, context) || undefined;
|
||||
});
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
export async function getMetaData(url: string, options: Options = {}) {
|
||||
const { customRules, forceImageHttps, shouldReGetHTML, ...other } = options;
|
||||
const html = await getHTMLByURL(url, {
|
||||
...other,
|
||||
shouldReGetHTML: async html => {
|
||||
const meta = await getMetaDataByHTML(html, url, {
|
||||
customRules,
|
||||
forceImageHttps,
|
||||
});
|
||||
return shouldReGetHTML ? await shouldReGetHTML(meta) : false;
|
||||
},
|
||||
}).catch(() => {
|
||||
// TODO: report error
|
||||
return '';
|
||||
});
|
||||
|
||||
return await getMetaDataByHTML(html, url, {
|
||||
customRules,
|
||||
forceImageHttps,
|
||||
});
|
||||
}
|
||||
@@ -1,690 +0,0 @@
|
||||
import type { RuleSet } from './types';
|
||||
import { getProvider, makeUrlAbsolute, makeUrlSecure, parseUrl } from './utils';
|
||||
|
||||
export const metaDataRules: Record<string, RuleSet> = {
|
||||
title: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:title"][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="twitter:title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['title', (element: any) => element.text],
|
||||
],
|
||||
},
|
||||
description: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="description" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="description" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="twitter:description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="summary" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="summary" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
},
|
||||
language: {
|
||||
rules: [
|
||||
['html[lang]', element => element.attribs['lang']],
|
||||
[
|
||||
'meta[property="language" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="language" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:locale"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:locale"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
processor: (language: any) => language.split('-')[0],
|
||||
},
|
||||
type: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:type"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:type"][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="parsely-type"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-type"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="medium"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="medium"][content]', element => element.attribs['content']],
|
||||
],
|
||||
},
|
||||
url: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:url"][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="al:web:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="al:web:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-link"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-link"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['a.amp-canurl', element => element.attribs['href']],
|
||||
['link[rel="canonical"][href]', element => element.attribs['href']],
|
||||
],
|
||||
defaultValue: context => context.url,
|
||||
processor: (url: any, context) => makeUrlAbsolute(context.url, url),
|
||||
},
|
||||
provider: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:site_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:site_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="publisher" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="publisher" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="application-name" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="application-name" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="al:android:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="al:android:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="al:iphone:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="al:iphone:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="al:ipad:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="al:ipad:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="al:ios:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="al:ios:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="twitter:app:name:iphone"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:app:name:iphone"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="twitter:app:name:ipad"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:app:name:ipad"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="twitter:app:name:googleplay"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:app:name:googleplay"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
defaultValue: context => getProvider(parseUrl(context.url)),
|
||||
},
|
||||
keywords: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="keywords" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="keywords" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-tags"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-tags"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.tags"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.tags"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="article:tag" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="article:tag" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="book:tag" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="book:tag" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="topic" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="topic" i][content]', element => element.attribs['content']],
|
||||
],
|
||||
processor: (keywords: any) =>
|
||||
keywords.split(',').map((keyword: string) => keyword.trim()),
|
||||
},
|
||||
section: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="article:section"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="article:section"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="category"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="category"][content]', element => element.attribs['content']],
|
||||
],
|
||||
},
|
||||
author: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="author" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="author" i][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="article:author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="article:author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="book:author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="book:author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['a[class*="author" i]', (element: any) => element.text],
|
||||
['[rel="author"]', (element: any) => element.text],
|
||||
[
|
||||
'meta[property="twitter:creator"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:creator"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="profile:username"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="profile:username"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
},
|
||||
published: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="article:published_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="article:published_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="published_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="published_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-pub-date"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-pub-date"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.date"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.date"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="date" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="date" i][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="release_date" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="release_date" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['time[datetime]', element => element.attribs['datetime']],
|
||||
['time[datetime][pubdate]', element => element.attribs['datetime']],
|
||||
],
|
||||
processor: (value: any) =>
|
||||
Date.parse(value.toString())
|
||||
? new Date(value.toString()).toISOString()
|
||||
: undefined,
|
||||
},
|
||||
modified: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:updated_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:updated_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="article:modified_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="article:modified_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="updated_time" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="updated_time" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="modified_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="modified_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="revised"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="revised"][content]', element => element.attribs['content']],
|
||||
],
|
||||
processor: (value: any) =>
|
||||
Date.parse(value.toString())
|
||||
? new Date(value.toString()).toISOString()
|
||||
: undefined,
|
||||
},
|
||||
robots: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="robots" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="robots" i][content]', element => element.attribs['content']],
|
||||
],
|
||||
processor: (keywords: any) =>
|
||||
keywords.split(',').map((keyword: string) => keyword.trim()),
|
||||
},
|
||||
copyright: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="copyright" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="copyright" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
},
|
||||
email: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="email" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="email" i][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="reply-to" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="reply-to" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
},
|
||||
twitter: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="twitter:site"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:site"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
},
|
||||
facebook: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="fb:pages"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="fb:pages"][content]', element => element.attribs['content']],
|
||||
],
|
||||
},
|
||||
image: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:image:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:image:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:image:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:image:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:image"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:image"][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="twitter:image"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:image"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="twitter:image:src"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:image:src"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="thumbnail"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="thumbnail"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-image-url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-image-url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.image.full"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.image.full"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
processor: (imageUrl: any, context) =>
|
||||
context.forceImageHttps === true
|
||||
? makeUrlSecure(makeUrlAbsolute(context.url, imageUrl))
|
||||
: makeUrlAbsolute(context.url, imageUrl),
|
||||
},
|
||||
icon: {
|
||||
rules: [
|
||||
[
|
||||
'link[rel="apple-touch-icon"][href]',
|
||||
element => element.attribs['href'],
|
||||
],
|
||||
[
|
||||
'link[rel="apple-touch-icon-precomposed"][href]',
|
||||
element => element.attribs['href'],
|
||||
],
|
||||
['link[rel="icon" i][href]', element => element.attribs['href']],
|
||||
['link[rel="fluid-icon"][href]', element => element.attribs['href']],
|
||||
['link[rel="shortcut icon"][href]', element => element.attribs['href']],
|
||||
['link[rel="Shortcut Icon"][href]', element => element.attribs['href']],
|
||||
['link[rel="mask-icon"][href]', element => element.attribs['href']],
|
||||
],
|
||||
scorer: element => {
|
||||
const sizes = element.attribs['sizes'];
|
||||
if (sizes) {
|
||||
const sizeMatches = sizes.match(/\d+/g);
|
||||
if (sizeMatches) {
|
||||
const parsed = parseInt(sizeMatches[0]);
|
||||
if (!isNaN(parsed)) {
|
||||
return parsed;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
defaultValue: context => makeUrlAbsolute(context.url, '/favicon.ico'),
|
||||
processor: (iconUrl, context) =>
|
||||
context.forceImageHttps === true
|
||||
? makeUrlSecure(makeUrlAbsolute(context.url, iconUrl))
|
||||
: makeUrlAbsolute(context.url, iconUrl),
|
||||
},
|
||||
video: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:video:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:video:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:video:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:video:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:video"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:video"][content]', element => element.attribs['content']],
|
||||
],
|
||||
processor: (imageUrl: any, context) =>
|
||||
context.forceImageHttps === true
|
||||
? makeUrlSecure(makeUrlAbsolute(context.url, imageUrl))
|
||||
: makeUrlAbsolute(context.url, imageUrl),
|
||||
},
|
||||
audio: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:audio:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:audio:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:audio:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:audio:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:audio"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:audio"][content]', element => element.attribs['content']],
|
||||
],
|
||||
processor: (imageUrl: any, context) =>
|
||||
context.forceImageHttps === true
|
||||
? makeUrlSecure(makeUrlAbsolute(context.url, imageUrl))
|
||||
: makeUrlAbsolute(context.url, imageUrl),
|
||||
},
|
||||
};
|
||||
@@ -1,43 +0,0 @@
|
||||
import type { Element } from 'cheerio';
|
||||
|
||||
export type MetaData = {
|
||||
title?: string;
|
||||
description?: string;
|
||||
icon?: string;
|
||||
image?: string;
|
||||
keywords?: string[];
|
||||
language?: string;
|
||||
type?: string;
|
||||
url?: string;
|
||||
provider?: string;
|
||||
|
||||
[x: string]: string | string[] | undefined;
|
||||
};
|
||||
|
||||
export type MetadataRule = [string, (el: Element) => string | null];
|
||||
|
||||
export type Context = {
|
||||
url: string;
|
||||
} & GetMetaDataOptions;
|
||||
|
||||
export type RuleSet = {
|
||||
rules: MetadataRule[];
|
||||
defaultValue?: (context: Context) => string | string[];
|
||||
scorer?: (el: Element, score: any) => any;
|
||||
processor?: (input: any, context: Context) => any;
|
||||
};
|
||||
|
||||
export type GetMetaDataOptions = {
|
||||
customRules?: Record<string, RuleSet>;
|
||||
forceImageHttps?: boolean;
|
||||
};
|
||||
|
||||
export type GetHTMLOptions = {
|
||||
timeout?: number;
|
||||
shouldReGetHTML?: (currentHTML: string) => boolean | Promise<boolean>;
|
||||
};
|
||||
|
||||
export type Options = {
|
||||
shouldReGetHTML?: (metaData: MetaData) => boolean | Promise<boolean>;
|
||||
} & GetMetaDataOptions &
|
||||
Omit<GetHTMLOptions, 'shouldReGetHTML'>;
|
||||
@@ -1,28 +0,0 @@
|
||||
import urlparse from 'url';
|
||||
|
||||
export function makeUrlAbsolute(base: string, relative: string): string {
|
||||
const relativeParsed = urlparse.parse(relative);
|
||||
|
||||
if (relativeParsed.host === null) {
|
||||
return urlparse.resolve(base, relative);
|
||||
}
|
||||
|
||||
return relative;
|
||||
}
|
||||
|
||||
export function makeUrlSecure(url: string): string {
|
||||
return url.replace(/^http:/, 'https:');
|
||||
}
|
||||
|
||||
export function parseUrl(url: string): string {
|
||||
return urlparse.parse(url).hostname || '';
|
||||
}
|
||||
|
||||
export function getProvider(host: string): string {
|
||||
return host
|
||||
.replace(/www[a-zA-Z0-9]*\./, '')
|
||||
.replace('.co.', '.')
|
||||
.split('.')
|
||||
.slice(0, -1)
|
||||
.join(' ');
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
export function getTime() {
|
||||
return new Date().getTime();
|
||||
}
|
||||
|
||||
export const isMacOS = () => {
|
||||
return process.platform === 'darwin';
|
||||
};
|
||||
|
||||
export const isWindows = () => {
|
||||
return process.platform === 'win32';
|
||||
};
|
||||
@@ -1,35 +0,0 @@
|
||||
import path from 'node:path';
|
||||
import { Worker } from 'node:worker_threads';
|
||||
|
||||
import { mergeUpdate } from './merge-update';
|
||||
|
||||
export function mergeUpdateWorker(updates: Uint8Array[]) {
|
||||
// fallback to main thread if worker is disabled (in vitest)
|
||||
if (process.env.USE_WORKER !== 'true') {
|
||||
return mergeUpdate(updates);
|
||||
}
|
||||
return new Promise<Uint8Array>((resolve, reject) => {
|
||||
// it is intended to have "./workers" in the path
|
||||
const workerFile = path.join(__dirname, './workers/merge-update.worker.js');
|
||||
|
||||
// convert updates to SharedArrayBuffer[s]
|
||||
const sharedArrayBufferUpdates = updates.map(update => {
|
||||
const buffer = new SharedArrayBuffer(update.byteLength);
|
||||
const view = new Uint8Array(buffer);
|
||||
view.set(update);
|
||||
return view;
|
||||
});
|
||||
|
||||
const worker = new Worker(workerFile, {
|
||||
workerData: sharedArrayBufferUpdates,
|
||||
});
|
||||
|
||||
worker.on('message', resolve);
|
||||
worker.on('error', reject);
|
||||
worker.on('exit', code => {
|
||||
if (code !== 0) {
|
||||
reject(new Error(`Worker stopped with exit code ${code}`));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
import { parentPort, workerData } from 'node:worker_threads';
|
||||
|
||||
import { mergeUpdate } from './merge-update';
|
||||
|
||||
function getMergeUpdate(updates: Uint8Array[]) {
|
||||
const update = mergeUpdate(updates);
|
||||
const buffer = new SharedArrayBuffer(update.byteLength);
|
||||
const view = new Uint8Array(buffer);
|
||||
view.set(update);
|
||||
|
||||
return update;
|
||||
}
|
||||
|
||||
parentPort?.postMessage(getMergeUpdate(workerData));
|
||||
@@ -1,44 +0,0 @@
|
||||
import { merge } from 'rxjs';
|
||||
import { filter, map } from 'rxjs/operators';
|
||||
|
||||
import { appContext } from '../context';
|
||||
import type {
|
||||
MainEventListener,
|
||||
NamespaceHandlers,
|
||||
WorkspaceMeta,
|
||||
} from '../type';
|
||||
import { deleteWorkspace, getWorkspaceMeta, listWorkspaces } from './handlers';
|
||||
import { workspaceSubjects } from './subjects';
|
||||
|
||||
export * from './handlers';
|
||||
export * from './subjects';
|
||||
|
||||
export const workspaceEvents = {
|
||||
onMetaChange: (
|
||||
fn: (meta: { workspaceId: string; meta: WorkspaceMeta }) => void
|
||||
) => {
|
||||
const sub = workspaceSubjects.meta.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
|
||||
export const workspaceHandlers = {
|
||||
list: async () => listWorkspaces(appContext),
|
||||
delete: async (_, id: string) => deleteWorkspace(appContext, id),
|
||||
getMeta: async (_, id: string) => {
|
||||
return getWorkspaceMeta(appContext, id);
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
|
||||
// used internally. Get a stream of workspace id -> meta
|
||||
export const getWorkspaceMeta$ = (workspaceId: string) => {
|
||||
return merge(
|
||||
getWorkspaceMeta(appContext, workspaceId),
|
||||
workspaceSubjects.meta.pipe(
|
||||
map(meta => meta.meta),
|
||||
filter(meta => meta.id === workspaceId)
|
||||
)
|
||||
);
|
||||
};
|
||||
6
apps/electron/layers/preload/preload.d.ts
vendored
6
apps/electron/layers/preload/preload.d.ts
vendored
@@ -1,6 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/consistent-type-imports */
|
||||
|
||||
declare interface Window {
|
||||
apis: import('./src/affine-apis').PreloadHandlers;
|
||||
events: import('./src/affine-apis').MainIPCEventMap;
|
||||
}
|
||||
@@ -1,97 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
|
||||
// NOTE: we will generate preload types from this file
|
||||
import { ipcRenderer } from 'electron';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-restricted-imports
|
||||
import type {
|
||||
MainIPCEventMap,
|
||||
MainIPCHandlerMap,
|
||||
} from '../../main/src/exposed';
|
||||
|
||||
type WithoutFirstParameter<T> = T extends (_: any, ...args: infer P) => infer R
|
||||
? (...args: P) => R
|
||||
: T;
|
||||
|
||||
type HandlersMap<N extends keyof MainIPCHandlerMap> = {
|
||||
[K in keyof MainIPCHandlerMap[N]]: WithoutFirstParameter<
|
||||
MainIPCHandlerMap[N][K]
|
||||
>;
|
||||
};
|
||||
|
||||
export type PreloadHandlers = {
|
||||
[N in keyof MainIPCHandlerMap]: HandlersMap<N>;
|
||||
};
|
||||
|
||||
type MainExposedMeta = {
|
||||
handlers: [namespace: string, handlerNames: string[]][];
|
||||
events: [namespace: string, eventNames: string[]][];
|
||||
};
|
||||
|
||||
const meta: MainExposedMeta = (() => {
|
||||
const val = process.argv
|
||||
.find(arg => arg.startsWith('--exposed-meta='))
|
||||
?.split('=')[1];
|
||||
|
||||
return val ? JSON.parse(val) : null;
|
||||
})();
|
||||
|
||||
// main handlers that can be invoked from the renderer process
|
||||
const apis: PreloadHandlers = (() => {
|
||||
const { handlers: handlersMeta } = meta;
|
||||
|
||||
const all = handlersMeta.map(([namespace, functionNames]) => {
|
||||
const namespaceApis = functionNames.map(name => {
|
||||
const channel = `${namespace}:${name}`;
|
||||
return [
|
||||
name,
|
||||
(...args: any[]) => {
|
||||
return ipcRenderer.invoke(channel, ...args);
|
||||
},
|
||||
];
|
||||
});
|
||||
return [namespace, Object.fromEntries(namespaceApis)];
|
||||
});
|
||||
|
||||
return Object.fromEntries(all);
|
||||
})();
|
||||
|
||||
// main events that can be listened to from the renderer process
|
||||
const events: MainIPCEventMap = (() => {
|
||||
const { events: eventsMeta } = meta;
|
||||
|
||||
// NOTE: ui may try to listen to a lot of the same events, so we increase the limit...
|
||||
ipcRenderer.setMaxListeners(100);
|
||||
|
||||
const all = eventsMeta.map(([namespace, eventNames]) => {
|
||||
const namespaceEvents = eventNames.map(name => {
|
||||
const channel = `${namespace}:${name}`;
|
||||
return [
|
||||
name,
|
||||
(callback: (...args: any[]) => void) => {
|
||||
const fn: (
|
||||
event: Electron.IpcRendererEvent,
|
||||
...args: any[]
|
||||
) => void = (_, ...args) => {
|
||||
callback(...args);
|
||||
};
|
||||
ipcRenderer.on(channel, fn);
|
||||
return () => {
|
||||
ipcRenderer.off(channel, fn);
|
||||
};
|
||||
},
|
||||
];
|
||||
});
|
||||
return [namespace, Object.fromEntries(namespaceEvents)];
|
||||
});
|
||||
return Object.fromEntries(all);
|
||||
})();
|
||||
|
||||
const appInfo = {
|
||||
electron: true,
|
||||
};
|
||||
|
||||
export { apis, appInfo, events };
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-restricted-imports
|
||||
export type { MainIPCEventMap } from '../../main/src/exposed';
|
||||
@@ -1,18 +0,0 @@
|
||||
/**
|
||||
* @module preload
|
||||
*/
|
||||
|
||||
import { contextBridge } from 'electron';
|
||||
|
||||
import * as affineApis from './affine-apis';
|
||||
|
||||
/**
|
||||
* The "Main World" is the JavaScript context that your main renderer code runs in.
|
||||
* By default, the page you load in your renderer executes code in this world.
|
||||
*
|
||||
* @see https://www.electronjs.org/docs/api/context-bridge
|
||||
*/
|
||||
|
||||
contextBridge.exposeInMainWorld('apis', affineApis.apis);
|
||||
contextBridge.exposeInMainWorld('events', affineApis.events);
|
||||
contextBridge.exposeInMainWorld('appInfo', affineApis.appInfo);
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@affine/electron",
|
||||
"private": true,
|
||||
"version": "0.6.0",
|
||||
"version": "0.6.1",
|
||||
"author": "affine",
|
||||
"repository": {
|
||||
"url": "https://github.com/toeverything/AFFiNE",
|
||||
@@ -10,24 +10,29 @@
|
||||
"description": "AFFiNE App",
|
||||
"homepage": "https://github.com/toeverything/AFFiNE",
|
||||
"scripts": {
|
||||
"dev": "yarn electron-rebuild && yarn cross-env DEV_SERVER_URL=http://localhost:8080 node scripts/dev.mjs",
|
||||
"watch": "yarn electron-rebuild && yarn cross-env DEV_SERVER_URL=http://localhost:8080 node scripts/dev.mjs --watch",
|
||||
"prod": "yarn electron-rebuild && yarn node scripts/dev.mjs",
|
||||
"build-layers": "zx scripts/build-layers.mjs",
|
||||
"dev": "yarn cross-env DEV_SERVER_URL=http://localhost:8080 node scripts/dev.mjs",
|
||||
"watch": "yarn cross-env DEV_SERVER_URL=http://localhost:8080 node scripts/dev.mjs --watch",
|
||||
"prod": "yarn node scripts/dev.mjs",
|
||||
"build": "zx scripts/build-layers.mjs",
|
||||
"generate-assets": "zx scripts/generate-assets.mjs",
|
||||
"package": "electron-forge package",
|
||||
"make": "electron-forge make",
|
||||
"rebuild:for-unit-test": "yarn rebuild better-sqlite3",
|
||||
"rebuild:for-electron": "yarn electron-rebuild",
|
||||
"test": "playwright test"
|
||||
"test": "DEBUG=pw:browser playwright test"
|
||||
},
|
||||
"config": {
|
||||
"forge": "./forge.config.js"
|
||||
},
|
||||
"main": "./dist/layers/main/index.js",
|
||||
"main": "./dist/main.js",
|
||||
"exports": {
|
||||
"./scripts/plugins/build-plugins.mjs": "./scripts/plugins/build-plugins.mjs"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@affine-test/kit": "workspace:*",
|
||||
"@affine/native": "workspace:*",
|
||||
"@blocksuite/blocks": "0.0.0-20230607055421-9b20fcaf-nightly",
|
||||
"@blocksuite/editor": "0.0.0-20230607055421-9b20fcaf-nightly",
|
||||
"@blocksuite/lit": "0.0.0-20230607055421-9b20fcaf-nightly",
|
||||
"@blocksuite/store": "0.0.0-20230607055421-9b20fcaf-nightly",
|
||||
"@electron-forge/cli": "^6.1.1",
|
||||
"@electron-forge/core": "^6.1.1",
|
||||
"@electron-forge/core-utils": "^6.1.1",
|
||||
@@ -36,29 +41,30 @@
|
||||
"@electron-forge/maker-squirrel": "^6.1.1",
|
||||
"@electron-forge/maker-zip": "^6.1.1",
|
||||
"@electron-forge/shared-types": "^6.1.1",
|
||||
"@electron/rebuild": "^3.2.13",
|
||||
"@electron/remote": "2.0.9",
|
||||
"@types/better-sqlite3": "^7.6.4",
|
||||
"@toeverything/infra": "workspace:*",
|
||||
"@types/fs-extra": "^11.0.1",
|
||||
"@types/uuid": "^9.0.1",
|
||||
"cross-env": "7.0.3",
|
||||
"electron": "25.0.0",
|
||||
"electron": "=25.0.1",
|
||||
"electron-log": "^5.0.0-beta.24",
|
||||
"electron-squirrel-startup": "1.0.0",
|
||||
"electron-window-state": "^5.0.3",
|
||||
"esbuild": "^0.17.19",
|
||||
"fs-extra": "^11.1.1",
|
||||
"jotai": "^2.1.1",
|
||||
"playwright": "=1.33.0",
|
||||
"ts-node": "^10.9.1",
|
||||
"undici": "^5.22.1",
|
||||
"uuid": "^9.0.0",
|
||||
"which": "^3.0.1",
|
||||
"zx": "^7.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"better-sqlite3": "^8.4.0",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"chokidar": "^3.5.3",
|
||||
"@toeverything/plugin-infra": "workspace:*",
|
||||
"async-call-rpc": "^6.3.1",
|
||||
"electron-updater": "^5.3.0",
|
||||
"link-preview-js": "^3.0.4",
|
||||
"lodash-es": "^4.17.21",
|
||||
"nanoid": "^4.0.2",
|
||||
"rxjs": "^7.8.1",
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
#!/usr/bin/env zx
|
||||
import 'zx/globals';
|
||||
|
||||
import { resolve } from 'node:path';
|
||||
|
||||
import { spawnSync } from 'child_process';
|
||||
import * as esbuild from 'esbuild';
|
||||
|
||||
import { config } from './common.mjs';
|
||||
import { config, rootDir } from './common.mjs';
|
||||
|
||||
const NODE_ENV =
|
||||
process.env.NODE_ENV === 'development' ? 'development' : 'production';
|
||||
@@ -15,12 +18,20 @@ if (process.platform === 'win32') {
|
||||
|
||||
async function buildLayers() {
|
||||
const common = config();
|
||||
await esbuild.build(common.preload);
|
||||
console.log('Build plugin infra');
|
||||
spawnSync('yarn', ['build'], {
|
||||
stdio: 'inherit',
|
||||
cwd: resolve(rootDir, './packages/plugin-infra'),
|
||||
});
|
||||
|
||||
console.log('Build plugins');
|
||||
await import('./plugins/build-plugins.mjs');
|
||||
|
||||
await esbuild.build(common.workers);
|
||||
await esbuild.build({
|
||||
...common.main,
|
||||
...common.layers,
|
||||
define: {
|
||||
...common.main.define,
|
||||
...common.define,
|
||||
'process.env.NODE_ENV': `"${NODE_ENV}"`,
|
||||
'process.env.BUILD_TYPE': `"${process.env.BUILD_TYPE || 'stable'}"`,
|
||||
},
|
||||
|
||||
@@ -2,7 +2,10 @@ import { resolve } from 'node:path';
|
||||
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
export const root = fileURLToPath(new URL('..', import.meta.url));
|
||||
export const electronDir = fileURLToPath(new URL('..', import.meta.url));
|
||||
|
||||
export const rootDir = resolve(electronDir, '..', '..');
|
||||
|
||||
export const NODE_MAJOR_VERSION = 18;
|
||||
|
||||
// hard-coded for now:
|
||||
@@ -15,7 +18,7 @@ const mode = (process.env.NODE_ENV = process.env.NODE_ENV || 'development');
|
||||
// List of env that will be replaced by esbuild
|
||||
const ENV_MACROS = ['AFFINE_GOOGLE_CLIENT_ID', 'AFFINE_GOOGLE_CLIENT_SECRET'];
|
||||
|
||||
/** @return {{main: import('esbuild').BuildOptions, preload: import('esbuild').BuildOptions}} */
|
||||
/** @return {{layers: import('esbuild').BuildOptions, workers: import('esbuild').BuildOptions}} */
|
||||
export const config = () => {
|
||||
const define = Object.fromEntries([
|
||||
...ENV_MACROS.map(key => [
|
||||
@@ -31,16 +34,18 @@ export const config = () => {
|
||||
}
|
||||
|
||||
return {
|
||||
main: {
|
||||
layers: {
|
||||
entryPoints: [
|
||||
resolve(root, './layers/main/src/index.ts'),
|
||||
resolve(root, './layers/main/src/workers/merge-update.worker.ts'),
|
||||
resolve(electronDir, './src/main/index.ts'),
|
||||
resolve(electronDir, './src/preload/index.ts'),
|
||||
resolve(electronDir, './src/helper/index.ts'),
|
||||
],
|
||||
outdir: resolve(root, './dist/layers/main'),
|
||||
entryNames: '[dir]',
|
||||
outdir: resolve(electronDir, './dist'),
|
||||
bundle: true,
|
||||
target: `node${NODE_MAJOR_VERSION}`,
|
||||
platform: 'node',
|
||||
external: ['electron', 'yjs', 'better-sqlite3', 'electron-updater'],
|
||||
external: ['electron', 'electron-updater', '@toeverything/plugin-infra'],
|
||||
define: define,
|
||||
format: 'cjs',
|
||||
loader: {
|
||||
@@ -49,14 +54,23 @@ export const config = () => {
|
||||
assetNames: '[name]',
|
||||
treeShaking: true,
|
||||
},
|
||||
preload: {
|
||||
entryPoints: [resolve(root, './layers/preload/src/index.ts')],
|
||||
outdir: resolve(root, './dist/layers/preload'),
|
||||
workers: {
|
||||
entryPoints: [
|
||||
resolve(electronDir, './src/main/workers/plugin.worker.ts'),
|
||||
],
|
||||
entryNames: '[dir]/[name]',
|
||||
outdir: resolve(electronDir, './dist/workers'),
|
||||
bundle: true,
|
||||
target: `node${NODE_MAJOR_VERSION}`,
|
||||
platform: 'node',
|
||||
external: ['electron'],
|
||||
external: ['@toeverything/plugin-infra', 'async-call-rpc'],
|
||||
define: define,
|
||||
format: 'cjs',
|
||||
loader: {
|
||||
'.node': 'copy',
|
||||
},
|
||||
assetNames: '[name]',
|
||||
treeShaking: true,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
/* eslint-disable no-async-promise-executor */
|
||||
import { spawn } from 'node:child_process';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import path, { resolve } from 'node:path';
|
||||
|
||||
import electronPath from 'electron';
|
||||
import * as esbuild from 'esbuild';
|
||||
import which from 'which';
|
||||
|
||||
import { config, root } from './common.mjs';
|
||||
import { config, electronDir, rootDir } from './common.mjs';
|
||||
|
||||
// this means we don't spawn electron windows, mainly for testing
|
||||
const watchMode = process.argv.includes('--watch');
|
||||
@@ -21,7 +22,10 @@ const stderrFilterPatterns = [
|
||||
|
||||
// these are set before calling `config`, so we have a chance to override them
|
||||
try {
|
||||
const devJson = readFileSync(path.resolve(root, './dev.json'), 'utf-8');
|
||||
const devJson = readFileSync(
|
||||
path.resolve(electronDir, './dev.json'),
|
||||
'utf-8'
|
||||
);
|
||||
const devEnv = JSON.parse(devJson);
|
||||
Object.assign(process.env, devEnv);
|
||||
} catch (err) {
|
||||
@@ -64,20 +68,29 @@ function spawnOrReloadElectron() {
|
||||
}
|
||||
|
||||
const common = config();
|
||||
const yarnPath = which.sync('yarn');
|
||||
async function watchPlugins() {
|
||||
spawn(yarnPath, ['dev'], {
|
||||
stdio: 'inherit',
|
||||
cwd: resolve(rootDir, './packages/plugin-infra'),
|
||||
});
|
||||
await import('./plugins/dev-plugins.mjs');
|
||||
}
|
||||
|
||||
function watchPreload() {
|
||||
async function watchLayers() {
|
||||
return new Promise(async resolve => {
|
||||
let initialBuild = false;
|
||||
const preloadBuild = await esbuild.context({
|
||||
...common.preload,
|
||||
|
||||
const buildContext = await esbuild.context({
|
||||
...common.layers,
|
||||
plugins: [
|
||||
...(common.preload.plugins ?? []),
|
||||
...(common.layers.plugins ?? []),
|
||||
{
|
||||
name: 'electron-dev:reload-app-on-preload-change',
|
||||
name: 'electron-dev:reload-app-on-layers-change',
|
||||
setup(build) {
|
||||
build.onEnd(() => {
|
||||
if (initialBuild) {
|
||||
console.log(`[preload] has changed, [re]launching electron...`);
|
||||
console.log(`[layers] has changed, [re]launching electron...`);
|
||||
spawnOrReloadElectron();
|
||||
} else {
|
||||
resolve();
|
||||
@@ -88,25 +101,24 @@ function watchPreload() {
|
||||
},
|
||||
],
|
||||
});
|
||||
// watch will trigger build.onEnd() on first run & on subsequent changes
|
||||
await preloadBuild.watch();
|
||||
await buildContext.watch();
|
||||
});
|
||||
}
|
||||
|
||||
async function watchMain() {
|
||||
async function watchWorkers() {
|
||||
return new Promise(async resolve => {
|
||||
let initialBuild = false;
|
||||
|
||||
const mainBuild = await esbuild.context({
|
||||
...common.main,
|
||||
const buildContext = await esbuild.context({
|
||||
...common.workers,
|
||||
plugins: [
|
||||
...(common.main.plugins ?? []),
|
||||
...(common.workers.plugins ?? []),
|
||||
{
|
||||
name: 'electron-dev:reload-app-on-main-change',
|
||||
name: 'electron-dev:reload-app-on-workers-change',
|
||||
setup(build) {
|
||||
build.onEnd(() => {
|
||||
if (initialBuild) {
|
||||
console.log(`[main] has changed, [re]launching electron...`);
|
||||
console.log(`[workers] has changed, [re]launching electron...`);
|
||||
spawnOrReloadElectron();
|
||||
} else {
|
||||
resolve();
|
||||
@@ -117,13 +129,14 @@ async function watchMain() {
|
||||
},
|
||||
],
|
||||
});
|
||||
await mainBuild.watch();
|
||||
await buildContext.watch();
|
||||
});
|
||||
}
|
||||
|
||||
async function main() {
|
||||
await watchMain();
|
||||
await watchPreload();
|
||||
await watchPlugins();
|
||||
await watchLayers();
|
||||
await watchWorkers();
|
||||
|
||||
if (watchMode) {
|
||||
console.log(`Watching for changes...`);
|
||||
|
||||
24
apps/electron/scripts/generate-assets.mjs
Normal file → Executable file
24
apps/electron/scripts/generate-assets.mjs
Normal file → Executable file
@@ -32,10 +32,6 @@ if (releaseVersionEnv && electronPackageJson.version !== releaseVersionEnv) {
|
||||
}
|
||||
// copy web dist files to electron dist
|
||||
|
||||
// step 1: clean up
|
||||
await cleanup();
|
||||
echo('Clean up done');
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
$.shell = 'powershell.exe';
|
||||
$.prefix = '';
|
||||
@@ -43,11 +39,11 @@ if (process.platform === 'win32') {
|
||||
|
||||
cd(repoRootDir);
|
||||
|
||||
// step 2: build web (nextjs) dist
|
||||
// step 1: build web (nextjs) dist
|
||||
if (!process.env.SKIP_WEB_BUILD) {
|
||||
process.env.ENABLE_LEGACY_PROVIDER = 'false';
|
||||
await $`yarn build`;
|
||||
await $`yarn export`;
|
||||
await $`yarn nx build @affine/web`;
|
||||
await $`yarn nx export @affine/web`;
|
||||
|
||||
// step 1.5: amend sourceMappingURL to allow debugging in devtools
|
||||
await glob('**/*.{js,css}', { cwd: affineWebOutDir }).then(files => {
|
||||
@@ -67,7 +63,7 @@ if (!process.env.SKIP_WEB_BUILD) {
|
||||
await fs.move(affineWebOutDir, publicAffineOutDir, { overwrite: true });
|
||||
}
|
||||
|
||||
// step 3: update app-updater.yml content with build type in resources folder
|
||||
// step 2: update app-updater.yml content with build type in resources folder
|
||||
if (process.env.BUILD_TYPE === 'internal') {
|
||||
const appUpdaterYml = path.join(publicDistDir, 'app-update.yml');
|
||||
const appUpdaterYmlContent = await fs.readFile(appUpdaterYml, 'utf-8');
|
||||
@@ -77,15 +73,3 @@ if (process.env.BUILD_TYPE === 'internal') {
|
||||
);
|
||||
await fs.writeFile(appUpdaterYml, newAppUpdaterYmlContent);
|
||||
}
|
||||
|
||||
/// --------
|
||||
/// --------
|
||||
/// --------
|
||||
async function cleanup() {
|
||||
if (!process.env.SKIP_WEB_BUILD) {
|
||||
await fs.emptyDir(publicAffineOutDir);
|
||||
}
|
||||
await fs.emptyDir(path.join(electronRootDir, 'layers', 'main', 'dist'));
|
||||
await fs.emptyDir(path.join(electronRootDir, 'layers', 'preload', 'dist'));
|
||||
await fs.remove(path.join(electronRootDir, 'out'));
|
||||
}
|
||||
|
||||
20
apps/electron/scripts/plugins/build-plugins.mjs
Executable file
20
apps/electron/scripts/plugins/build-plugins.mjs
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env node
|
||||
import { build } from 'esbuild';
|
||||
|
||||
import { definePluginServerConfig } from './utils.mjs';
|
||||
|
||||
await build({
|
||||
...definePluginServerConfig('bookmark-block'),
|
||||
external: [
|
||||
// server.ts
|
||||
'link-preview-js',
|
||||
// ui.ts
|
||||
'@toeverything/plugin-infra',
|
||||
'@affine/component',
|
||||
'@blocksuite/store',
|
||||
'@blocksuite/blocks',
|
||||
'react',
|
||||
'react-dom',
|
||||
'foxact',
|
||||
],
|
||||
});
|
||||
22
apps/electron/scripts/plugins/dev-plugins.mjs
Executable file
22
apps/electron/scripts/plugins/dev-plugins.mjs
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/usr/bin/env node
|
||||
import { context } from 'esbuild';
|
||||
|
||||
import { definePluginServerConfig } from './utils.mjs';
|
||||
|
||||
const plugin = await context({
|
||||
...definePluginServerConfig('bookmark-block'),
|
||||
external: [
|
||||
// server.ts
|
||||
'link-preview-js',
|
||||
// ui.ts
|
||||
'@toeverything/plugin-infra',
|
||||
'@affine/component',
|
||||
'@blocksuite/store',
|
||||
'@blocksuite/blocks',
|
||||
'react',
|
||||
'react-dom',
|
||||
'foxact',
|
||||
],
|
||||
});
|
||||
|
||||
await plugin.watch();
|
||||
34
apps/electron/scripts/plugins/utils.mjs
Normal file
34
apps/electron/scripts/plugins/utils.mjs
Normal file
@@ -0,0 +1,34 @@
|
||||
import { resolve } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
export const rootDir = fileURLToPath(new URL('../../../..', import.meta.url));
|
||||
export const electronOutputDir = resolve(
|
||||
rootDir,
|
||||
'apps',
|
||||
'electron',
|
||||
'dist',
|
||||
'plugins'
|
||||
);
|
||||
export const pluginDir = resolve(rootDir, 'plugins');
|
||||
|
||||
/**
|
||||
*
|
||||
* @param pluginDirName {string}
|
||||
* @return {import('esbuild').BuildOptions}
|
||||
*/
|
||||
export function definePluginServerConfig(pluginDirName) {
|
||||
const pluginRootDir = resolve(pluginDir, pluginDirName);
|
||||
const mainEntryFile = resolve(pluginRootDir, 'src/index.ts');
|
||||
const serverOutputDir = resolve(electronOutputDir, pluginDirName);
|
||||
return {
|
||||
entryPoints: [mainEntryFile],
|
||||
platform: 'neutral',
|
||||
format: 'esm',
|
||||
outExtension: {
|
||||
'.js': '.mjs',
|
||||
},
|
||||
outdir: serverOutputDir,
|
||||
bundle: true,
|
||||
splitting: true,
|
||||
};
|
||||
}
|
||||
@@ -1,56 +1,26 @@
|
||||
import path from 'node:path';
|
||||
import { setTimeout } from 'node:timers/promises';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, beforeEach, expect, test, vi } from 'vitest';
|
||||
|
||||
const tmpDir = path.join(__dirname, 'tmp');
|
||||
const appDataPath = path.join(tmpDir, 'app-data');
|
||||
|
||||
const registeredHandlers = new Map<
|
||||
string,
|
||||
((...args: any[]) => Promise<any>)[]
|
||||
>();
|
||||
|
||||
const SESSION_DATA_PATH = path.join(tmpDir, 'affine-test');
|
||||
const DOCUMENTS_PATH = path.join(tmpDir, 'affine-test-documents');
|
||||
|
||||
const electronModule = {
|
||||
app: {
|
||||
getPath: (name: string) => {
|
||||
if (name === 'sessionData') {
|
||||
return SESSION_DATA_PATH;
|
||||
} else if (name === 'documents') {
|
||||
return DOCUMENTS_PATH;
|
||||
}
|
||||
throw new Error('not implemented');
|
||||
},
|
||||
name: 'affine-test',
|
||||
on: (name: string, callback: (...args: any[]) => any) => {
|
||||
const handlers = registeredHandlers.get(name) || [];
|
||||
handlers.push(callback);
|
||||
registeredHandlers.set(name, handlers);
|
||||
},
|
||||
addEventListener: (...args: any[]) => {
|
||||
// @ts-ignore
|
||||
electronModule.app.on(...args);
|
||||
},
|
||||
removeEventListener: () => {},
|
||||
vi.doMock('../../main-rpc', () => ({
|
||||
mainRPC: {
|
||||
getPath: async () => appDataPath,
|
||||
},
|
||||
shell: {} as Partial<Electron.Shell>,
|
||||
dialog: {} as Partial<Electron.Dialog>,
|
||||
};
|
||||
|
||||
const runHandler = (key: string) => {
|
||||
registeredHandlers.get(key)?.forEach(handler => handler());
|
||||
};
|
||||
|
||||
// dynamically import handlers so that we can inject local variables to mocks
|
||||
vi.doMock('electron', () => {
|
||||
return electronModule;
|
||||
});
|
||||
}));
|
||||
|
||||
const constructorStub = vi.fn();
|
||||
const destroyStub = vi.fn();
|
||||
destroyStub.mockReturnValue(Promise.resolve());
|
||||
|
||||
function existProcess() {
|
||||
process.emit('beforeExit', 0);
|
||||
}
|
||||
|
||||
vi.doMock('../secondary-db', () => {
|
||||
return {
|
||||
@@ -59,6 +29,10 @@ vi.doMock('../secondary-db', () => {
|
||||
constructorStub(...args);
|
||||
}
|
||||
|
||||
connectIfNeeded = () => Promise.resolve();
|
||||
|
||||
pull = () => Promise.resolve();
|
||||
|
||||
destroy = destroyStub;
|
||||
},
|
||||
};
|
||||
@@ -69,7 +43,11 @@ beforeEach(() => {
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
runHandler('before-quit');
|
||||
existProcess();
|
||||
// wait for the db to be closed on Windows
|
||||
if (process.platform === 'win32') {
|
||||
await setTimeout(200);
|
||||
}
|
||||
await fs.remove(tmpDir);
|
||||
vi.useRealTimers();
|
||||
});
|
||||
@@ -98,42 +76,51 @@ test('db should be destroyed when app quits', async () => {
|
||||
expect(db0.db).not.toBeNull();
|
||||
expect(db1.db).not.toBeNull();
|
||||
|
||||
runHandler('before-quit');
|
||||
existProcess();
|
||||
|
||||
// wait the async `db.destroy()` to be called
|
||||
await setTimeout(100);
|
||||
|
||||
expect(db0.db).toBeNull();
|
||||
expect(db1.db).toBeNull();
|
||||
});
|
||||
|
||||
test('db should be removed in db$Map after destroyed', async () => {
|
||||
const { ensureSQLiteDB, db$Map } = await import('../ensure-db');
|
||||
const workspaceId = v4();
|
||||
const db = await ensureSQLiteDB(workspaceId);
|
||||
await db.destroy();
|
||||
await setTimeout(100);
|
||||
expect(db$Map.has(workspaceId)).toBe(false);
|
||||
});
|
||||
|
||||
test('if db has a secondary db path, we should also poll that', async () => {
|
||||
const { ensureSQLiteDB } = await import('../ensure-db');
|
||||
const { appContext } = await import('../../context');
|
||||
const { storeWorkspaceMeta } = await import('../../workspace');
|
||||
const workspaceId = v4();
|
||||
await storeWorkspaceMeta(appContext, workspaceId, {
|
||||
await storeWorkspaceMeta(workspaceId, {
|
||||
secondaryDBPath: path.join(tmpDir, 'secondary.db'),
|
||||
});
|
||||
|
||||
const db = await ensureSQLiteDB(workspaceId);
|
||||
|
||||
await vi.advanceTimersByTimeAsync(1500);
|
||||
|
||||
// not sure why but we still need to wait with real timer
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
await setTimeout(10);
|
||||
|
||||
expect(constructorStub).toBeCalledTimes(1);
|
||||
expect(constructorStub).toBeCalledWith(path.join(tmpDir, 'secondary.db'), db);
|
||||
|
||||
// if secondary meta is changed
|
||||
await storeWorkspaceMeta(appContext, workspaceId, {
|
||||
await storeWorkspaceMeta(workspaceId, {
|
||||
secondaryDBPath: path.join(tmpDir, 'secondary2.db'),
|
||||
});
|
||||
|
||||
await vi.advanceTimersByTimeAsync(1500);
|
||||
// wait the async `db.destroy()` to be called
|
||||
await setTimeout(100);
|
||||
expect(constructorStub).toBeCalledTimes(2);
|
||||
expect(destroyStub).toBeCalledTimes(1);
|
||||
|
||||
// if secondary meta is changed (but another workspace)
|
||||
await storeWorkspaceMeta(appContext, v4(), {
|
||||
await storeWorkspaceMeta(v4(), {
|
||||
secondaryDBPath: path.join(tmpDir, 'secondary3.db'),
|
||||
});
|
||||
await vi.advanceTimersByTimeAsync(1500);
|
||||
@@ -141,7 +128,7 @@ test('if db has a secondary db path, we should also poll that', async () => {
|
||||
expect(destroyStub).toBeCalledTimes(1);
|
||||
|
||||
// if primary is destroyed, secondary should also be destroyed
|
||||
db.destroy();
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
await db.destroy();
|
||||
await setTimeout(100);
|
||||
expect(destroyStub).toBeCalledTimes(2);
|
||||
});
|
||||
@@ -5,21 +5,19 @@ import { v4 } from 'uuid';
|
||||
import { afterEach, expect, test, vi } from 'vitest';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { AppContext } from '../../context';
|
||||
import { dbSubjects } from '../subjects';
|
||||
|
||||
const tmpDir = path.join(__dirname, 'tmp');
|
||||
const appDataPath = path.join(tmpDir, 'app-data');
|
||||
|
||||
const testAppContext: AppContext = {
|
||||
appDataPath: path.join(tmpDir, 'test-data'),
|
||||
appName: 'test',
|
||||
};
|
||||
vi.doMock('../../main-rpc', () => ({
|
||||
mainRPC: {
|
||||
getPath: async () => appDataPath,
|
||||
},
|
||||
}));
|
||||
|
||||
afterEach(async () => {
|
||||
if (process.platform !== 'win32') {
|
||||
// hmmm ....
|
||||
await fs.remove(tmpDir);
|
||||
}
|
||||
await fs.remove(tmpDir);
|
||||
});
|
||||
|
||||
function getTestUpdates() {
|
||||
@@ -33,14 +31,14 @@ function getTestUpdates() {
|
||||
test('can create new db file if not exists', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const db = await openWorkspaceDatabase(testAppContext, workspaceId);
|
||||
const db = await openWorkspaceDatabase(workspaceId);
|
||||
const dbPath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
appDataPath,
|
||||
`workspaces/${workspaceId}`,
|
||||
`storage.db`
|
||||
);
|
||||
expect(await fs.exists(dbPath)).toBe(true);
|
||||
db.destroy();
|
||||
await db.destroy();
|
||||
});
|
||||
|
||||
test('on applyUpdate (from self), will not trigger update', async () => {
|
||||
@@ -48,11 +46,11 @@ test('on applyUpdate (from self), will not trigger update', async () => {
|
||||
const workspaceId = v4();
|
||||
const onUpdate = vi.fn();
|
||||
|
||||
const db = await openWorkspaceDatabase(testAppContext, workspaceId);
|
||||
const db = await openWorkspaceDatabase(workspaceId);
|
||||
db.update$.subscribe(onUpdate);
|
||||
db.applyUpdate(getTestUpdates(), 'self');
|
||||
expect(onUpdate).not.toHaveBeenCalled();
|
||||
db.destroy();
|
||||
await db.destroy();
|
||||
});
|
||||
|
||||
test('on applyUpdate (from renderer), will trigger update', async () => {
|
||||
@@ -61,13 +59,13 @@ test('on applyUpdate (from renderer), will trigger update', async () => {
|
||||
const onUpdate = vi.fn();
|
||||
const onExternalUpdate = vi.fn();
|
||||
|
||||
const db = await openWorkspaceDatabase(testAppContext, workspaceId);
|
||||
const db = await openWorkspaceDatabase(workspaceId);
|
||||
db.update$.subscribe(onUpdate);
|
||||
const sub = dbSubjects.externalUpdate.subscribe(onExternalUpdate);
|
||||
db.applyUpdate(getTestUpdates(), 'renderer');
|
||||
expect(onUpdate).toHaveBeenCalled(); // not yet updated
|
||||
expect(onUpdate).toHaveBeenCalled();
|
||||
sub.unsubscribe();
|
||||
db.destroy();
|
||||
await db.destroy();
|
||||
});
|
||||
|
||||
test('on applyUpdate (from external), will trigger update & send external update event', async () => {
|
||||
@@ -76,26 +74,26 @@ test('on applyUpdate (from external), will trigger update & send external update
|
||||
const onUpdate = vi.fn();
|
||||
const onExternalUpdate = vi.fn();
|
||||
|
||||
const db = await openWorkspaceDatabase(testAppContext, workspaceId);
|
||||
const db = await openWorkspaceDatabase(workspaceId);
|
||||
db.update$.subscribe(onUpdate);
|
||||
const sub = dbSubjects.externalUpdate.subscribe(onExternalUpdate);
|
||||
db.applyUpdate(getTestUpdates(), 'external');
|
||||
expect(onUpdate).toHaveBeenCalled();
|
||||
expect(onExternalUpdate).toHaveBeenCalled();
|
||||
sub.unsubscribe();
|
||||
db.destroy();
|
||||
await db.destroy();
|
||||
});
|
||||
|
||||
test('on destroy, check if resources have been released', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const db = await openWorkspaceDatabase(testAppContext, workspaceId);
|
||||
const db = await openWorkspaceDatabase(workspaceId);
|
||||
const updateSub = {
|
||||
complete: vi.fn(),
|
||||
next: vi.fn(),
|
||||
};
|
||||
db.update$ = updateSub as any;
|
||||
db.destroy();
|
||||
await db.destroy();
|
||||
expect(db.db).toBe(null);
|
||||
expect(updateSub.complete).toHaveBeenCalled();
|
||||
});
|
||||
116
apps/electron/src/helper/db/base-db-adapter.ts
Normal file
116
apps/electron/src/helper/db/base-db-adapter.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import { SqliteConnection } from '@affine/native';
|
||||
|
||||
import { logger } from '../logger';
|
||||
|
||||
/**
|
||||
* A base class for SQLite DB adapter that provides basic methods around updates & blobs
|
||||
*/
|
||||
export abstract class BaseSQLiteAdapter {
|
||||
db: SqliteConnection | null = null;
|
||||
abstract role: string;
|
||||
|
||||
constructor(public readonly path: string) {}
|
||||
|
||||
async connectIfNeeded() {
|
||||
if (!this.db) {
|
||||
this.db = new SqliteConnection(this.path);
|
||||
await this.db.connect();
|
||||
logger.info(`[SQLiteAdapter:${this.role}]`, 'connected:', this.path);
|
||||
}
|
||||
return this.db;
|
||||
}
|
||||
|
||||
async destroy() {
|
||||
const { db } = this;
|
||||
this.db = null;
|
||||
// log after close will sometimes crash the app when quitting
|
||||
logger.info(`[SQLiteAdapter:${this.role}]`, 'destroyed:', this.path);
|
||||
await db?.close();
|
||||
}
|
||||
|
||||
async addBlob(key: string, data: Uint8Array) {
|
||||
try {
|
||||
if (!this.db) {
|
||||
logger.warn(`${this.path} is not connected`);
|
||||
return;
|
||||
}
|
||||
await this.db.addBlob(key, data);
|
||||
} catch (error) {
|
||||
logger.error('addBlob', error);
|
||||
}
|
||||
}
|
||||
|
||||
async getBlob(key: string) {
|
||||
try {
|
||||
if (!this.db) {
|
||||
logger.warn(`${this.path} is not connected`);
|
||||
return;
|
||||
}
|
||||
const blob = await this.db.getBlob(key);
|
||||
return blob?.data;
|
||||
} catch (error) {
|
||||
logger.error('getBlob', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async deleteBlob(key: string) {
|
||||
try {
|
||||
if (!this.db) {
|
||||
logger.warn(`${this.path} is not connected`);
|
||||
return;
|
||||
}
|
||||
await this.db.deleteBlob(key);
|
||||
} catch (error) {
|
||||
logger.error(`${this.path} delete blob failed`, error);
|
||||
}
|
||||
}
|
||||
|
||||
async getBlobKeys() {
|
||||
try {
|
||||
if (!this.db) {
|
||||
logger.warn(`${this.path} is not connected`);
|
||||
return [];
|
||||
}
|
||||
return await this.db.getBlobKeys();
|
||||
} catch (error) {
|
||||
logger.error(`getBlobKeys failed`, error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
async getUpdates() {
|
||||
try {
|
||||
if (!this.db) {
|
||||
logger.warn(`${this.path} is not connected`);
|
||||
return [];
|
||||
}
|
||||
return await this.db.getUpdates();
|
||||
} catch (error) {
|
||||
logger.error('getUpdates', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// add a single update to SQLite
|
||||
async addUpdateToSQLite(updates: Uint8Array[]) {
|
||||
// batch write instead write per key stroke?
|
||||
try {
|
||||
if (!this.db) {
|
||||
logger.warn(`${this.path} is not connected`);
|
||||
return;
|
||||
}
|
||||
const start = performance.now();
|
||||
await this.db.insertUpdates(updates);
|
||||
logger.debug(
|
||||
`[SQLiteAdapter][${this.role}] addUpdateToSQLite`,
|
||||
'length:',
|
||||
updates.length,
|
||||
performance.now() - start,
|
||||
'ms'
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('addUpdateToSQLite', this.path, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
140
apps/electron/src/helper/db/ensure-db.ts
Normal file
140
apps/electron/src/helper/db/ensure-db.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
import type { Subject } from 'rxjs';
|
||||
import { Observable } from 'rxjs';
|
||||
import {
|
||||
concat,
|
||||
defer,
|
||||
from,
|
||||
fromEvent,
|
||||
interval,
|
||||
lastValueFrom,
|
||||
merge,
|
||||
} from 'rxjs';
|
||||
import {
|
||||
concatMap,
|
||||
distinctUntilChanged,
|
||||
filter,
|
||||
ignoreElements,
|
||||
last,
|
||||
map,
|
||||
shareReplay,
|
||||
startWith,
|
||||
switchMap,
|
||||
take,
|
||||
takeUntil,
|
||||
tap,
|
||||
} from 'rxjs/operators';
|
||||
|
||||
import { logger } from '../logger';
|
||||
import { getWorkspaceMeta, workspaceSubjects } from '../workspace';
|
||||
import { SecondaryWorkspaceSQLiteDB } from './secondary-db';
|
||||
import type { WorkspaceSQLiteDB } from './workspace-db-adapter';
|
||||
import { openWorkspaceDatabase } from './workspace-db-adapter';
|
||||
|
||||
// export for testing
|
||||
export const db$Map = new Map<string, Observable<WorkspaceSQLiteDB>>();
|
||||
|
||||
// use defer to prevent `app` is undefined while running tests
|
||||
const beforeQuit$ = defer(() => fromEvent(process, 'beforeExit'));
|
||||
|
||||
// return a stream that emit a single event when the subject completes
|
||||
function completed<T>(subject: Subject<T>) {
|
||||
return new Observable(subscriber => {
|
||||
const sub = subject.subscribe({
|
||||
complete: () => {
|
||||
subscriber.next();
|
||||
subscriber.complete();
|
||||
},
|
||||
});
|
||||
return () => sub.unsubscribe();
|
||||
});
|
||||
}
|
||||
|
||||
function getWorkspaceDB$(id: string) {
|
||||
if (!db$Map.has(id)) {
|
||||
db$Map.set(
|
||||
id,
|
||||
from(openWorkspaceDatabase(id)).pipe(
|
||||
tap({
|
||||
next: db => {
|
||||
logger.info(
|
||||
'[ensureSQLiteDB] db connection established',
|
||||
db.workspaceId
|
||||
);
|
||||
},
|
||||
}),
|
||||
switchMap(db =>
|
||||
// takeUntil the polling stream, and then destroy the db
|
||||
concat(
|
||||
startPollingSecondaryDB(db).pipe(
|
||||
ignoreElements(),
|
||||
startWith(db),
|
||||
takeUntil(merge(beforeQuit$, completed(db.update$))),
|
||||
last(),
|
||||
tap({
|
||||
next() {
|
||||
logger.info(
|
||||
'[ensureSQLiteDB] polling secondary db complete',
|
||||
db.workspaceId
|
||||
);
|
||||
},
|
||||
})
|
||||
),
|
||||
defer(async () => {
|
||||
try {
|
||||
await db.destroy();
|
||||
db$Map.delete(id);
|
||||
return db;
|
||||
} catch (err) {
|
||||
logger.error('[ensureSQLiteDB] destroy db failed', err);
|
||||
throw err;
|
||||
}
|
||||
})
|
||||
).pipe(startWith(db))
|
||||
),
|
||||
shareReplay(1)
|
||||
)
|
||||
);
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
return db$Map.get(id)!;
|
||||
}
|
||||
|
||||
function startPollingSecondaryDB(db: WorkspaceSQLiteDB) {
|
||||
return merge(
|
||||
getWorkspaceMeta(db.workspaceId),
|
||||
workspaceSubjects.meta.pipe(
|
||||
map(({ meta }) => meta),
|
||||
filter(meta => meta.id === db.workspaceId)
|
||||
)
|
||||
).pipe(
|
||||
map(meta => meta?.secondaryDBPath),
|
||||
filter((p): p is string => !!p),
|
||||
distinctUntilChanged(),
|
||||
switchMap(path => {
|
||||
// on secondary db path change, destroy the old db and create a new one
|
||||
const secondaryDB = new SecondaryWorkspaceSQLiteDB(path, db);
|
||||
return new Observable<SecondaryWorkspaceSQLiteDB>(subscriber => {
|
||||
subscriber.next(secondaryDB);
|
||||
return () => secondaryDB.destroy();
|
||||
});
|
||||
}),
|
||||
switchMap(secondaryDB => {
|
||||
return interval(300000).pipe(
|
||||
startWith(0),
|
||||
concatMap(() => secondaryDB.pull()),
|
||||
tap({
|
||||
error: err => {
|
||||
logger.error(`[ensureSQLiteDB] polling secondary db error`, err);
|
||||
},
|
||||
complete: () => {
|
||||
logger.info('[ensureSQLiteDB] polling secondary db complete');
|
||||
},
|
||||
})
|
||||
);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
export function ensureSQLiteDB(id: string) {
|
||||
return lastValueFrom(getWorkspaceDB$(id).pipe(take(1)));
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import { appContext } from '../context';
|
||||
import type { MainEventListener, NamespaceHandlers } from '../type';
|
||||
import { mainRPC } from '../main-rpc';
|
||||
import type { MainEventRegister } from '../type';
|
||||
import { ensureSQLiteDB } from './ensure-db';
|
||||
import { dbSubjects } from './subjects';
|
||||
|
||||
@@ -7,34 +7,34 @@ export * from './ensure-db';
|
||||
export * from './subjects';
|
||||
|
||||
export const dbHandlers = {
|
||||
getDocAsUpdates: async (_, id: string) => {
|
||||
getDocAsUpdates: async (id: string) => {
|
||||
const workspaceDB = await ensureSQLiteDB(id);
|
||||
return workspaceDB.getDocAsUpdates();
|
||||
},
|
||||
applyDocUpdate: async (_, id: string, update: Uint8Array) => {
|
||||
applyDocUpdate: async (id: string, update: Uint8Array) => {
|
||||
const workspaceDB = await ensureSQLiteDB(id);
|
||||
return workspaceDB.applyUpdate(update);
|
||||
},
|
||||
addBlob: async (_, workspaceId: string, key: string, data: Uint8Array) => {
|
||||
addBlob: async (workspaceId: string, key: string, data: Uint8Array) => {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return workspaceDB.addBlob(key, data);
|
||||
},
|
||||
getBlob: async (_, workspaceId: string, key: string) => {
|
||||
getBlob: async (workspaceId: string, key: string) => {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return workspaceDB.getBlob(key);
|
||||
},
|
||||
deleteBlob: async (_, workspaceId: string, key: string) => {
|
||||
deleteBlob: async (workspaceId: string, key: string) => {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return workspaceDB.deleteBlob(key);
|
||||
},
|
||||
getBlobKeys: async (_, workspaceId: string) => {
|
||||
getBlobKeys: async (workspaceId: string) => {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return workspaceDB.getBlobKeys();
|
||||
},
|
||||
getDefaultStorageLocation: async () => {
|
||||
return appContext.appDataPath;
|
||||
return await mainRPC.getPath('sessionData');
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
};
|
||||
|
||||
export const dbEvents = {
|
||||
onExternalUpdate: (
|
||||
@@ -45,4 +45,4 @@ export const dbEvents = {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
} satisfies Record<string, MainEventRegister>;
|
||||
@@ -1,12 +1,14 @@
|
||||
import assert from 'node:assert';
|
||||
|
||||
import type { SqliteConnection } from '@affine/native';
|
||||
import { debounce } from 'lodash-es';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { AppContext } from '../context';
|
||||
import { logger } from '../logger';
|
||||
import type { YOrigin } from '../type';
|
||||
import { mergeUpdateWorker } from '../workers';
|
||||
import { getWorkspaceMeta } from '../workspace';
|
||||
import { BaseSQLiteAdapter } from './base-db-adapter';
|
||||
import { mergeUpdate } from './merge-update';
|
||||
import type { WorkspaceSQLiteDB } from './workspace-db-adapter';
|
||||
|
||||
const FLUSH_WAIT_TIME = 5000;
|
||||
@@ -16,6 +18,7 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
role = 'secondary';
|
||||
yDoc = new Y.Doc();
|
||||
firstConnected = false;
|
||||
destroyed = false;
|
||||
|
||||
updateQueue: Uint8Array[] = [];
|
||||
|
||||
@@ -30,16 +33,12 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
logger.debug('[SecondaryWorkspaceSQLiteDB] created', this.workspaceId);
|
||||
}
|
||||
|
||||
close() {
|
||||
this.db?.close();
|
||||
this.db = null;
|
||||
}
|
||||
|
||||
override destroy() {
|
||||
this.flushUpdateQueue();
|
||||
override async destroy() {
|
||||
await this.flushUpdateQueue();
|
||||
this.unsubscribers.forEach(unsub => unsub());
|
||||
this.db?.close();
|
||||
this.yDoc.destroy();
|
||||
await super.destroy();
|
||||
this.destroyed = true;
|
||||
}
|
||||
|
||||
get workspaceId() {
|
||||
@@ -48,12 +47,15 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
|
||||
// do not update db immediately, instead, push to a queue
|
||||
// and flush the queue in a future time
|
||||
addUpdateToUpdateQueue(update: Uint8Array) {
|
||||
async addUpdateToUpdateQueue(db: SqliteConnection, update: Uint8Array) {
|
||||
this.updateQueue.push(update);
|
||||
this.debouncedFlush();
|
||||
await this.debouncedFlush();
|
||||
}
|
||||
|
||||
flushUpdateQueue() {
|
||||
async flushUpdateQueue() {
|
||||
if (this.destroyed) {
|
||||
return;
|
||||
}
|
||||
logger.debug(
|
||||
'flushUpdateQueue',
|
||||
this.workspaceId,
|
||||
@@ -62,9 +64,9 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
);
|
||||
const updates = [...this.updateQueue];
|
||||
this.updateQueue = [];
|
||||
this.connect();
|
||||
this.addUpdateToSQLite(updates);
|
||||
this.close();
|
||||
await this.run(async () => {
|
||||
await this.addUpdateToSQLite(updates);
|
||||
});
|
||||
}
|
||||
|
||||
// flush after 5s, but will not wait for more than 10s
|
||||
@@ -75,23 +77,29 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
runCounter = 0;
|
||||
|
||||
// wrap the fn with connect and close
|
||||
// it only works for sync functions
|
||||
run = <T extends (...args: any[]) => any>(fn: T) => {
|
||||
async run<T extends (...args: any[]) => any>(
|
||||
fn: T
|
||||
): Promise<
|
||||
(T extends (...args: any[]) => infer U ? Awaited<U> : unknown) | undefined
|
||||
> {
|
||||
try {
|
||||
if (this.runCounter === 0) {
|
||||
this.connect();
|
||||
if (this.destroyed) {
|
||||
return;
|
||||
}
|
||||
await this.connectIfNeeded();
|
||||
this.runCounter++;
|
||||
return fn();
|
||||
return await fn();
|
||||
} catch (err) {
|
||||
logger.error(err);
|
||||
throw err;
|
||||
} finally {
|
||||
this.runCounter--;
|
||||
if (this.runCounter === 0) {
|
||||
this.close();
|
||||
// just close db, but not the yDoc
|
||||
await super.destroy();
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
setupAndListen() {
|
||||
if (this.firstConnected) {
|
||||
@@ -106,10 +114,10 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
}
|
||||
};
|
||||
|
||||
const onSelfUpdate = (update: Uint8Array, origin: YOrigin) => {
|
||||
const onSelfUpdate = async (update: Uint8Array, origin: YOrigin) => {
|
||||
// for self update from upstream, we need to push it to external DB
|
||||
if (origin === 'upstream') {
|
||||
this.addUpdateToUpdateQueue(update);
|
||||
if (origin === 'upstream' && this.db) {
|
||||
await this.addUpdateToUpdateQueue(this.db, update);
|
||||
}
|
||||
|
||||
if (origin === 'self') {
|
||||
@@ -131,9 +139,13 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
const upstreamUpdate = this.upstream.getDocAsUpdates();
|
||||
// to initialize the yDoc, we need to apply all updates from the db
|
||||
this.applyUpdate(upstreamUpdate, 'upstream');
|
||||
|
||||
this.pull();
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
logger.debug('run success');
|
||||
})
|
||||
.catch(err => {
|
||||
logger.error('run error', err);
|
||||
});
|
||||
}
|
||||
|
||||
applyUpdate = (data: Uint8Array, origin: YOrigin = 'upstream') => {
|
||||
@@ -141,17 +153,20 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
};
|
||||
|
||||
// TODO: have a better solution to handle blobs
|
||||
syncBlobs() {
|
||||
this.run(() => {
|
||||
// pull blobs
|
||||
const blobsKeys = this.getBlobKeys();
|
||||
const upstreamBlobsKeys = this.upstream.getBlobKeys();
|
||||
async syncBlobs() {
|
||||
await this.run(async () => {
|
||||
// skip if upstream db is not connected (maybe it is already closed)
|
||||
const blobsKeys = await this.getBlobKeys();
|
||||
if (!this.upstream.db || this.upstream.db?.isClose) {
|
||||
return;
|
||||
}
|
||||
const upstreamBlobsKeys = await this.upstream.getBlobKeys();
|
||||
// put every missing blob to upstream
|
||||
for (const key of blobsKeys) {
|
||||
if (!upstreamBlobsKeys.includes(key)) {
|
||||
const blob = this.getBlob(key);
|
||||
const blob = await this.getBlob(key);
|
||||
if (blob) {
|
||||
this.upstream.addBlob(key, blob);
|
||||
await this.upstream.addBlob(key, blob);
|
||||
logger.debug('syncBlobs', this.workspaceId, key);
|
||||
}
|
||||
}
|
||||
@@ -170,13 +185,18 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
*/
|
||||
async pull() {
|
||||
const start = performance.now();
|
||||
const updates = this.run(() => {
|
||||
assert(this.upstream.db, 'upstream db should be connected');
|
||||
const updates = await this.run(async () => {
|
||||
// TODO: no need to get all updates, just get the latest ones (using a cursor, etc)?
|
||||
this.syncBlobs();
|
||||
return this.getUpdates().map(update => update.data);
|
||||
await this.syncBlobs();
|
||||
return (await this.getUpdates()).map(update => update.data);
|
||||
});
|
||||
|
||||
const merged = await mergeUpdateWorker(updates);
|
||||
if (!updates || this.destroyed) {
|
||||
return;
|
||||
}
|
||||
|
||||
const merged = mergeUpdate(updates);
|
||||
this.applyUpdate(merged, 'self');
|
||||
|
||||
logger.debug(
|
||||
@@ -189,10 +209,7 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
}
|
||||
}
|
||||
|
||||
export async function getSecondaryWorkspaceDBPath(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
) {
|
||||
const meta = await getWorkspaceMeta(context, workspaceId);
|
||||
export async function getSecondaryWorkspaceDBPath(workspaceId: string) {
|
||||
const meta = await getWorkspaceMeta(workspaceId);
|
||||
return meta?.secondaryDBPath;
|
||||
}
|
||||
@@ -1,7 +1,5 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
export const dbSubjects = {
|
||||
// emit workspace id when the db file is missing
|
||||
fileMissing: new Subject<string>(),
|
||||
externalUpdate: new Subject<{ workspaceId: string; update: Uint8Array }>(),
|
||||
};
|
||||
@@ -1,13 +1,11 @@
|
||||
import type { Database } from 'better-sqlite3';
|
||||
import { Subject } from 'rxjs';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { AppContext } from '../context';
|
||||
import { logger } from '../logger';
|
||||
import type { YOrigin } from '../type';
|
||||
import { mergeUpdateWorker } from '../workers';
|
||||
import { getWorkspaceMeta } from '../workspace';
|
||||
import { BaseSQLiteAdapter } from './base-db-adapter';
|
||||
import { mergeUpdate } from './merge-update';
|
||||
import { dbSubjects } from './subjects';
|
||||
|
||||
export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
@@ -21,39 +19,39 @@ export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
super(path);
|
||||
}
|
||||
|
||||
override destroy() {
|
||||
this.db?.close();
|
||||
this.db = null;
|
||||
override async destroy() {
|
||||
await super.destroy();
|
||||
this.yDoc.destroy();
|
||||
|
||||
// when db is closed, we can safely remove it from ensure-db list
|
||||
this.update$.complete();
|
||||
this.firstConnected = false;
|
||||
}
|
||||
|
||||
getWorkspaceName = () => {
|
||||
return this.yDoc.getMap('space:meta').get('name') as string;
|
||||
};
|
||||
|
||||
async init(): Promise<Database | undefined> {
|
||||
const db = super.connect();
|
||||
async init() {
|
||||
const db = await super.connectIfNeeded();
|
||||
|
||||
if (!this.firstConnected) {
|
||||
this.yDoc.on('update', (update: Uint8Array, origin: YOrigin) => {
|
||||
this.yDoc.on('update', async (update: Uint8Array, origin: YOrigin) => {
|
||||
if (origin === 'renderer') {
|
||||
this.addUpdateToSQLite([update]);
|
||||
await this.addUpdateToSQLite([update]);
|
||||
} else if (origin === 'external') {
|
||||
this.addUpdateToSQLite([update]);
|
||||
logger.debug('external update', this.workspaceId);
|
||||
dbSubjects.externalUpdate.next({
|
||||
workspaceId: this.workspaceId,
|
||||
update,
|
||||
});
|
||||
await this.addUpdateToSQLite([update]);
|
||||
logger.debug('external update', this.workspaceId);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const updates = this.getUpdates();
|
||||
const merged = await mergeUpdateWorker(updates.map(update => update.data));
|
||||
const updates = await this.getUpdates();
|
||||
const merged = mergeUpdate(updates.map(update => update.data));
|
||||
|
||||
// to initialize the yDoc, we need to apply all updates from the db
|
||||
this.applyUpdate(merged, 'self');
|
||||
@@ -78,29 +76,27 @@ export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
Y.applyUpdate(this.yDoc, data, origin);
|
||||
};
|
||||
|
||||
override addBlob(key: string, value: Uint8Array) {
|
||||
const res = super.addBlob(key, value);
|
||||
override async addBlob(key: string, value: Uint8Array) {
|
||||
this.update$.next();
|
||||
const res = await super.addBlob(key, value);
|
||||
return res;
|
||||
}
|
||||
|
||||
override deleteBlob(key: string) {
|
||||
super.deleteBlob(key);
|
||||
override async deleteBlob(key: string) {
|
||||
this.update$.next();
|
||||
await super.deleteBlob(key);
|
||||
}
|
||||
|
||||
override addUpdateToSQLite(data: Uint8Array[]) {
|
||||
super.addUpdateToSQLite(data);
|
||||
override async addUpdateToSQLite(data: Uint8Array[]) {
|
||||
this.update$.next();
|
||||
await super.addUpdateToSQLite(data);
|
||||
}
|
||||
}
|
||||
|
||||
export async function openWorkspaceDatabase(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
) {
|
||||
const meta = await getWorkspaceMeta(context, workspaceId);
|
||||
export async function openWorkspaceDatabase(workspaceId: string) {
|
||||
const meta = await getWorkspaceMeta(workspaceId);
|
||||
const db = new WorkspaceSQLiteDB(meta.mainDBPath, workspaceId);
|
||||
await db.init();
|
||||
logger.info(`openWorkspaceDatabase [${workspaceId}]`);
|
||||
return db;
|
||||
}
|
||||
@@ -1,18 +1,16 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import { app } from 'electron';
|
||||
import { dialog, shell } from 'electron';
|
||||
import fs from 'fs-extra';
|
||||
import { nanoid } from 'nanoid';
|
||||
|
||||
import { appContext } from '../context';
|
||||
import { ensureSQLiteDB } from '../db/ensure-db';
|
||||
import { isValidDBFile } from '../db/helper';
|
||||
import type { WorkspaceSQLiteDB } from '../db/workspace-db-adapter';
|
||||
import { logger } from '../logger';
|
||||
import { mainRPC } from '../main-rpc';
|
||||
import {
|
||||
getWorkspaceDBPath,
|
||||
getWorkspaceMeta,
|
||||
getWorkspacesBasePath,
|
||||
listWorkspaces,
|
||||
storeWorkspaceMeta,
|
||||
} from '../workspace';
|
||||
@@ -21,11 +19,11 @@ import {
|
||||
// we are using native dialogs because HTML dialogs do not give full file paths
|
||||
|
||||
export async function revealDBFile(workspaceId: string) {
|
||||
const meta = await getWorkspaceMeta(appContext, workspaceId);
|
||||
const meta = await getWorkspaceMeta(workspaceId);
|
||||
if (!meta) {
|
||||
return;
|
||||
}
|
||||
shell.showItemInFolder(meta.secondaryDBPath ?? meta.mainDBPath);
|
||||
await mainRPC.showItemInFolder(meta.secondaryDBPath ?? meta.mainDBPath);
|
||||
}
|
||||
|
||||
// provide a backdoor to set dialog path for testing in playwright
|
||||
@@ -89,7 +87,7 @@ export async function saveDBFileAs(
|
||||
const db = await ensureSQLiteDB(workspaceId);
|
||||
const ret =
|
||||
getFakedResult() ??
|
||||
(await dialog.showSaveDialog({
|
||||
(await mainRPC.showSaveDialog({
|
||||
properties: ['showOverwriteConfirmation'],
|
||||
title: 'Save Workspace',
|
||||
showsTagField: false,
|
||||
@@ -112,7 +110,9 @@ export async function saveDBFileAs(
|
||||
|
||||
await fs.copyFile(db.path, filePath);
|
||||
logger.log('saved', filePath);
|
||||
shell.showItemInFolder(filePath);
|
||||
mainRPC.showItemInFolder(filePath).catch(err => {
|
||||
console.error(err);
|
||||
});
|
||||
return { filePath };
|
||||
} catch (err) {
|
||||
logger.error('saveDBFileAs', err);
|
||||
@@ -132,11 +132,11 @@ export async function selectDBFileLocation(): Promise<SelectDBFileLocationResult
|
||||
try {
|
||||
const ret =
|
||||
getFakedResult() ??
|
||||
(await dialog.showOpenDialog({
|
||||
(await mainRPC.showOpenDialog({
|
||||
properties: ['openDirectory'],
|
||||
title: 'Set Workspace Storage Location',
|
||||
buttonLabel: 'Select',
|
||||
defaultPath: app.getPath('documents'),
|
||||
defaultPath: await mainRPC.getPath('documents'),
|
||||
message: "Select a location to store the workspace's database file",
|
||||
}));
|
||||
const dir = ret.filePaths?.[0];
|
||||
@@ -178,7 +178,7 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
|
||||
try {
|
||||
const ret =
|
||||
getFakedResult() ??
|
||||
(await dialog.showOpenDialog({
|
||||
(await mainRPC.showOpenDialog({
|
||||
properties: ['openFile'],
|
||||
title: 'Load Workspace',
|
||||
buttonLabel: 'Load',
|
||||
@@ -198,7 +198,7 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
|
||||
}
|
||||
|
||||
// the imported file should not be in app data dir
|
||||
if (filePath.startsWith(path.join(appContext.appDataPath, 'workspaces'))) {
|
||||
if (filePath.startsWith(await getWorkspacesBasePath())) {
|
||||
logger.warn('loadDBFile: db file in app data dir');
|
||||
return { error: 'DB_FILE_PATH_INVALID' };
|
||||
}
|
||||
@@ -208,21 +208,23 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
|
||||
return { error: 'DB_FILE_ALREADY_LOADED' };
|
||||
}
|
||||
|
||||
if (!isValidDBFile(filePath)) {
|
||||
const { SqliteConnection } = await import('@affine/native');
|
||||
|
||||
if (!(await SqliteConnection.validate(filePath))) {
|
||||
// TODO: report invalid db file error?
|
||||
return { error: 'DB_FILE_INVALID' }; // invalid db file
|
||||
}
|
||||
|
||||
// copy the db file to a new workspace id
|
||||
const workspaceId = nanoid(10);
|
||||
const internalFilePath = getWorkspaceDBPath(appContext, workspaceId);
|
||||
const internalFilePath = await getWorkspaceDBPath(workspaceId);
|
||||
|
||||
await fs.ensureDir(path.join(appContext.appDataPath, 'workspaces'));
|
||||
await fs.ensureDir(await getWorkspacesBasePath());
|
||||
|
||||
await fs.copy(filePath, internalFilePath);
|
||||
logger.info(`loadDBFile, copy: ${filePath} -> ${internalFilePath}`);
|
||||
|
||||
await storeWorkspaceMeta(appContext, workspaceId, {
|
||||
await storeWorkspaceMeta(workspaceId, {
|
||||
id: workspaceId,
|
||||
mainDBPath: internalFilePath,
|
||||
secondaryDBPath: filePath,
|
||||
@@ -259,13 +261,12 @@ export async function moveDBFile(
|
||||
let db: WorkspaceSQLiteDB | null = null;
|
||||
try {
|
||||
db = await ensureSQLiteDB(workspaceId);
|
||||
|
||||
const meta = await getWorkspaceMeta(appContext, workspaceId);
|
||||
const meta = await getWorkspaceMeta(workspaceId);
|
||||
|
||||
const oldDir = meta.secondaryDBPath
|
||||
? path.dirname(meta.secondaryDBPath)
|
||||
: null;
|
||||
const defaultDir = oldDir ?? app.getPath('documents');
|
||||
const defaultDir = oldDir ?? (await mainRPC.getPath('documents'));
|
||||
|
||||
const newName = getDefaultDBFileName(db.getWorkspaceName(), workspaceId);
|
||||
|
||||
@@ -273,7 +274,7 @@ export async function moveDBFile(
|
||||
dbFileDir ??
|
||||
(
|
||||
getFakedResult() ??
|
||||
(await dialog.showOpenDialog({
|
||||
(await mainRPC.showOpenDialog({
|
||||
properties: ['openDirectory'],
|
||||
title: 'Move Workspace Storage',
|
||||
buttonLabel: 'Move',
|
||||
@@ -305,11 +306,21 @@ export async function moveDBFile(
|
||||
|
||||
// remove the old db file, but we don't care if it fails
|
||||
if (meta.secondaryDBPath) {
|
||||
fs.remove(meta.secondaryDBPath);
|
||||
await fs
|
||||
.remove(meta.secondaryDBPath)
|
||||
.then(() => {
|
||||
logger.info(`[moveDBFile] removed ${meta.secondaryDBPath}`);
|
||||
})
|
||||
.catch(err => {
|
||||
logger.error(
|
||||
`[moveDBFile] remove ${meta.secondaryDBPath} failed`,
|
||||
err
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// update meta
|
||||
await storeWorkspaceMeta(appContext, workspaceId, {
|
||||
await storeWorkspaceMeta(workspaceId, {
|
||||
secondaryDBPath: newFilePath,
|
||||
});
|
||||
|
||||
@@ -317,7 +328,7 @@ export async function moveDBFile(
|
||||
filePath: newFilePath,
|
||||
};
|
||||
} catch (err) {
|
||||
db?.destroy();
|
||||
await db?.destroy();
|
||||
logger.error('[moveDBFile]', err);
|
||||
return {
|
||||
error: 'UNKNOWN_ERROR',
|
||||
@@ -326,7 +337,7 @@ export async function moveDBFile(
|
||||
}
|
||||
|
||||
async function dbFileAlreadyLoaded(path: string) {
|
||||
const meta = await listWorkspaces(appContext);
|
||||
const meta = await listWorkspaces();
|
||||
const paths = meta.map(m => m[1].secondaryDBPath);
|
||||
return paths.includes(path);
|
||||
}
|
||||
@@ -1,4 +1,3 @@
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import {
|
||||
loadDBFile,
|
||||
moveDBFile,
|
||||
@@ -9,25 +8,24 @@ import {
|
||||
} from './dialog';
|
||||
|
||||
export const dialogHandlers = {
|
||||
revealDBFile: async (_, workspaceId: string) => {
|
||||
revealDBFile: async (workspaceId: string) => {
|
||||
return revealDBFile(workspaceId);
|
||||
},
|
||||
loadDBFile: async () => {
|
||||
return loadDBFile();
|
||||
},
|
||||
saveDBFileAs: async (_, workspaceId: string) => {
|
||||
saveDBFileAs: async (workspaceId: string) => {
|
||||
return saveDBFileAs(workspaceId);
|
||||
},
|
||||
moveDBFile: (_, workspaceId: string, dbFileLocation?: string) => {
|
||||
moveDBFile: (workspaceId: string, dbFileLocation?: string) => {
|
||||
return moveDBFile(workspaceId, dbFileLocation);
|
||||
},
|
||||
selectDBFileLocation: async () => {
|
||||
return selectDBFileLocation();
|
||||
},
|
||||
setFakeDialogResult: async (
|
||||
_,
|
||||
result: Parameters<typeof setFakeDialogResult>[0]
|
||||
) => {
|
||||
return setFakeDialogResult(result);
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
};
|
||||
33
apps/electron/src/helper/exposed.ts
Normal file
33
apps/electron/src/helper/exposed.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { dbEvents, dbHandlers } from './db';
|
||||
import { dialogHandlers } from './dialog';
|
||||
import { workspaceEvents, workspaceHandlers } from './workspace';
|
||||
|
||||
export const handlers = {
|
||||
db: dbHandlers,
|
||||
workspace: workspaceHandlers,
|
||||
dialog: dialogHandlers,
|
||||
};
|
||||
|
||||
export const events = {
|
||||
db: dbEvents,
|
||||
workspace: workspaceEvents,
|
||||
};
|
||||
|
||||
export const getExposedMeta = () => {
|
||||
const handlersMeta = Object.entries(handlers).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [namespace, Object.keys(namespaceHandlers)] as [string, string[]];
|
||||
}
|
||||
);
|
||||
|
||||
const eventsMeta = Object.entries(events).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [namespace, Object.keys(namespaceHandlers)] as [string, string[]];
|
||||
}
|
||||
);
|
||||
|
||||
return {
|
||||
handlers: handlersMeta,
|
||||
events: eventsMeta,
|
||||
};
|
||||
};
|
||||
88
apps/electron/src/helper/index.ts
Normal file
88
apps/electron/src/helper/index.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import type { EventBasedChannel } from 'async-call-rpc';
|
||||
import { AsyncCall } from 'async-call-rpc';
|
||||
|
||||
import { events, handlers } from './exposed';
|
||||
import { logger } from './logger';
|
||||
|
||||
const createMessagePortMainChannel = (
|
||||
connection: Electron.MessagePortMain
|
||||
): EventBasedChannel => {
|
||||
return {
|
||||
on(listener) {
|
||||
const f = (e: Electron.MessageEvent) => {
|
||||
listener(e.data);
|
||||
};
|
||||
connection.on('message', f);
|
||||
// MUST start the connection to receive messages
|
||||
connection.start();
|
||||
return () => {
|
||||
connection.off('message', f);
|
||||
};
|
||||
},
|
||||
send(data) {
|
||||
connection.postMessage(data);
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
function setupRendererConnection(rendererPort: Electron.MessagePortMain) {
|
||||
const flattenedHandlers = Object.entries(handlers).flatMap(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return Object.entries(namespaceHandlers).map(([name, handler]) => {
|
||||
const handlerWithLog = async (...args: any[]) => {
|
||||
try {
|
||||
const start = performance.now();
|
||||
const result = await handler(...args);
|
||||
logger.info(
|
||||
'[async-api]',
|
||||
`${namespace}.${name}`,
|
||||
args.filter(
|
||||
arg => typeof arg !== 'function' && typeof arg !== 'object'
|
||||
),
|
||||
'-',
|
||||
(performance.now() - start).toFixed(2),
|
||||
'ms'
|
||||
);
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error('[async-api]', `${namespace}.${name}`, error);
|
||||
}
|
||||
};
|
||||
return [`${namespace}:${name}`, handlerWithLog];
|
||||
});
|
||||
}
|
||||
);
|
||||
const rpc = AsyncCall<PeersAPIs.RendererToHelper>(
|
||||
Object.fromEntries(flattenedHandlers),
|
||||
{
|
||||
channel: createMessagePortMainChannel(rendererPort),
|
||||
log: false,
|
||||
}
|
||||
);
|
||||
|
||||
for (const [namespace, namespaceEvents] of Object.entries(events)) {
|
||||
for (const [key, eventRegister] of Object.entries(namespaceEvents)) {
|
||||
const subscription = eventRegister((...args: any[]) => {
|
||||
const chan = `${namespace}:${key}`;
|
||||
rpc.postEvent(chan, ...args).catch(err => {
|
||||
console.error(err);
|
||||
});
|
||||
});
|
||||
process.on('exit', () => {
|
||||
subscription();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function main() {
|
||||
process.parentPort.on('message', e => {
|
||||
if (e.data.channel === 'renderer-connect' && e.ports.length === 1) {
|
||||
const rendererPort = e.ports[0];
|
||||
setupRendererConnection(rendererPort);
|
||||
logger.info('[helper] renderer connected');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
main();
|
||||
3
apps/electron/src/helper/logger.ts
Normal file
3
apps/electron/src/helper/logger.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
import log from 'electron-log';
|
||||
|
||||
export const logger = log.scope('helper');
|
||||
33
apps/electron/src/helper/main-rpc.ts
Normal file
33
apps/electron/src/helper/main-rpc.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { AsyncCall, type EventBasedChannel } from 'async-call-rpc';
|
||||
|
||||
import { getExposedMeta } from './exposed';
|
||||
|
||||
function createMessagePortMainChannel(
|
||||
connection: Electron.ParentPort
|
||||
): EventBasedChannel {
|
||||
return {
|
||||
on(listener) {
|
||||
const f = (e: Electron.MessageEvent) => {
|
||||
listener(e.data);
|
||||
};
|
||||
connection.on('message', f);
|
||||
return () => {
|
||||
connection.off('message', f);
|
||||
};
|
||||
},
|
||||
send(data) {
|
||||
connection.postMessage(data);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const helperToMainServer: PeersAPIs.HelperToMain = {
|
||||
getMeta: () => getExposedMeta(),
|
||||
};
|
||||
|
||||
export const mainRPC = AsyncCall<PeersAPIs.MainToHelper>(helperToMainServer, {
|
||||
strict: {
|
||||
unknownMessage: false,
|
||||
},
|
||||
channel: createMessagePortMainChannel(process.parentPort),
|
||||
});
|
||||
9
apps/electron/src/helper/type.ts
Normal file
9
apps/electron/src/helper/type.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
export interface WorkspaceMeta {
|
||||
id: string;
|
||||
mainDBPath: string;
|
||||
secondaryDBPath?: string; // assume there will be only one
|
||||
}
|
||||
|
||||
export type YOrigin = 'self' | 'external' | 'upstream' | 'renderer';
|
||||
|
||||
export type MainEventRegister = (...args: any[]) => () => void;
|
||||
@@ -4,18 +4,8 @@ import fs from 'fs-extra';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, describe, expect, test, vi } from 'vitest';
|
||||
|
||||
import type { AppContext } from '../../context';
|
||||
|
||||
const tmpDir = path.join(__dirname, 'tmp');
|
||||
|
||||
const testAppContext: AppContext = {
|
||||
appDataPath: path.join(tmpDir, 'test-data'),
|
||||
appName: 'test',
|
||||
};
|
||||
|
||||
vi.doMock('../../context', () => ({
|
||||
appContext: testAppContext,
|
||||
}));
|
||||
const appDataPath = path.join(tmpDir, 'app-data');
|
||||
|
||||
vi.doMock('../../db/ensure-db', () => ({
|
||||
ensureSQLiteDB: async () => ({
|
||||
@@ -23,6 +13,12 @@ vi.doMock('../../db/ensure-db', () => ({
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.doMock('../../main-rpc', () => ({
|
||||
mainRPC: {
|
||||
getPath: async () => appDataPath,
|
||||
},
|
||||
}));
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.remove(tmpDir);
|
||||
});
|
||||
@@ -31,30 +27,22 @@ describe('list workspaces', () => {
|
||||
test('listWorkspaces (valid)', async () => {
|
||||
const { listWorkspaces } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
const workspacePath = path.join(appDataPath, 'workspaces', workspaceId);
|
||||
const meta = {
|
||||
id: workspaceId,
|
||||
};
|
||||
await fs.ensureDir(workspacePath);
|
||||
await fs.writeJSON(path.join(workspacePath, 'meta.json'), meta);
|
||||
const workspaces = await listWorkspaces(testAppContext);
|
||||
const workspaces = await listWorkspaces();
|
||||
expect(workspaces).toEqual([[workspaceId, meta]]);
|
||||
});
|
||||
|
||||
test('listWorkspaces (without meta json file)', async () => {
|
||||
const { listWorkspaces } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
const workspacePath = path.join(appDataPath, 'workspaces', workspaceId);
|
||||
await fs.ensureDir(workspacePath);
|
||||
const workspaces = await listWorkspaces(testAppContext);
|
||||
const workspaces = await listWorkspaces();
|
||||
expect(workspaces).toEqual([
|
||||
[
|
||||
workspaceId,
|
||||
@@ -69,18 +57,14 @@ describe('delete workspace', () => {
|
||||
test('deleteWorkspace', async () => {
|
||||
const { deleteWorkspace } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
const workspacePath = path.join(appDataPath, 'workspaces', workspaceId);
|
||||
await fs.ensureDir(workspacePath);
|
||||
await deleteWorkspace(testAppContext, workspaceId);
|
||||
await deleteWorkspace(workspaceId);
|
||||
expect(await fs.pathExists(workspacePath)).toBe(false);
|
||||
// removed workspace will be moved to delete-workspaces
|
||||
// removed workspace will be moved to deleted-workspaces
|
||||
expect(
|
||||
await fs.pathExists(
|
||||
path.join(testAppContext.appDataPath, 'delete-workspaces', workspaceId)
|
||||
path.join(appDataPath, 'deleted-workspaces', workspaceId)
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
@@ -90,29 +74,21 @@ describe('getWorkspaceMeta', () => {
|
||||
test('can get meta', async () => {
|
||||
const { getWorkspaceMeta } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
const workspacePath = path.join(appDataPath, 'workspaces', workspaceId);
|
||||
const meta = {
|
||||
id: workspaceId,
|
||||
};
|
||||
await fs.ensureDir(workspacePath);
|
||||
await fs.writeJSON(path.join(workspacePath, 'meta.json'), meta);
|
||||
expect(await getWorkspaceMeta(testAppContext, workspaceId)).toEqual(meta);
|
||||
expect(await getWorkspaceMeta(workspaceId)).toEqual(meta);
|
||||
});
|
||||
|
||||
test('can create meta if not exists', async () => {
|
||||
const { getWorkspaceMeta } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
const workspacePath = path.join(appDataPath, 'workspaces', workspaceId);
|
||||
await fs.ensureDir(workspacePath);
|
||||
expect(await getWorkspaceMeta(testAppContext, workspaceId)).toEqual({
|
||||
expect(await getWorkspaceMeta(workspaceId)).toEqual({
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
});
|
||||
@@ -124,18 +100,14 @@ describe('getWorkspaceMeta', () => {
|
||||
test('can migrate meta if db file is a link', async () => {
|
||||
const { getWorkspaceMeta } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
const workspacePath = path.join(appDataPath, 'workspaces', workspaceId);
|
||||
await fs.ensureDir(workspacePath);
|
||||
const sourcePath = path.join(tmpDir, 'source.db');
|
||||
await fs.writeFile(sourcePath, 'test');
|
||||
|
||||
await fs.ensureSymlink(sourcePath, path.join(workspacePath, 'storage.db'));
|
||||
|
||||
expect(await getWorkspaceMeta(testAppContext, workspaceId)).toEqual({
|
||||
expect(await getWorkspaceMeta(workspaceId)).toEqual({
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
secondaryDBPath: sourcePath,
|
||||
@@ -150,21 +122,17 @@ describe('getWorkspaceMeta', () => {
|
||||
test('storeWorkspaceMeta', async () => {
|
||||
const { storeWorkspaceMeta } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
const workspacePath = path.join(appDataPath, 'workspaces', workspaceId);
|
||||
await fs.ensureDir(workspacePath);
|
||||
const meta = {
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
};
|
||||
await storeWorkspaceMeta(testAppContext, workspaceId, meta);
|
||||
await storeWorkspaceMeta(workspaceId, meta);
|
||||
expect(await fs.readJSON(path.join(workspacePath, 'meta.json'))).toEqual(
|
||||
meta
|
||||
);
|
||||
await storeWorkspaceMeta(testAppContext, workspaceId, {
|
||||
await storeWorkspaceMeta(workspaceId, {
|
||||
secondaryDBPath: path.join(tmpDir, 'test.db'),
|
||||
});
|
||||
expect(await fs.readJSON(path.join(workspacePath, 'meta.json'))).toEqual({
|
||||
@@ -172,37 +140,3 @@ test('storeWorkspaceMeta', async () => {
|
||||
secondaryDBPath: path.join(tmpDir, 'test.db'),
|
||||
});
|
||||
});
|
||||
|
||||
test('getWorkspaceMeta observable', async () => {
|
||||
const { storeWorkspaceMeta } = await import('../handlers');
|
||||
const { getWorkspaceMeta$ } = await import('../index');
|
||||
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
|
||||
const metaChange = vi.fn();
|
||||
|
||||
const meta$ = getWorkspaceMeta$(workspaceId);
|
||||
|
||||
meta$.subscribe(metaChange);
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
expect(metaChange).toHaveBeenCalledWith({
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
});
|
||||
|
||||
await storeWorkspaceMeta(testAppContext, workspaceId, {
|
||||
secondaryDBPath: path.join(tmpDir, 'test.db'),
|
||||
});
|
||||
|
||||
expect(metaChange).toHaveBeenCalledWith({
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
secondaryDBPath: path.join(tmpDir, 'test.db'),
|
||||
});
|
||||
});
|
||||
@@ -2,26 +2,38 @@ import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import { type AppContext } from '../context';
|
||||
import { ensureSQLiteDB } from '../db/ensure-db';
|
||||
import { logger } from '../logger';
|
||||
import { mainRPC } from '../main-rpc';
|
||||
import type { WorkspaceMeta } from '../type';
|
||||
import { workspaceSubjects } from './subjects';
|
||||
|
||||
export async function listWorkspaces(
|
||||
context: AppContext
|
||||
): Promise<[workspaceId: string, meta: WorkspaceMeta][]> {
|
||||
const basePath = getWorkspacesBasePath(context);
|
||||
let _appDataPath = '';
|
||||
|
||||
async function getAppDataPath() {
|
||||
if (_appDataPath) {
|
||||
return _appDataPath;
|
||||
}
|
||||
_appDataPath = await mainRPC.getPath('sessionData');
|
||||
return _appDataPath;
|
||||
}
|
||||
|
||||
export async function listWorkspaces(): Promise<
|
||||
[workspaceId: string, meta: WorkspaceMeta][]
|
||||
> {
|
||||
const basePath = await getWorkspacesBasePath();
|
||||
try {
|
||||
await fs.ensureDir(basePath);
|
||||
const dirs = await fs.readdir(basePath, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
const dirs = (
|
||||
await fs.readdir(basePath, {
|
||||
withFileTypes: true,
|
||||
})
|
||||
).filter(d => d.isDirectory());
|
||||
const metaList = (
|
||||
await Promise.all(
|
||||
dirs.map(async dir => {
|
||||
// ? shall we put all meta in a single file instead of one file per workspace?
|
||||
return await getWorkspaceMeta(context, dir.name);
|
||||
return await getWorkspaceMeta(dir.name);
|
||||
})
|
||||
)
|
||||
).filter((w): w is WorkspaceMeta => !!w);
|
||||
@@ -32,16 +44,12 @@ export async function listWorkspaces(
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteWorkspace(context: AppContext, id: string) {
|
||||
const basePath = getWorkspaceBasePath(context, id);
|
||||
const movedPath = path.join(
|
||||
context.appDataPath,
|
||||
'delete-workspaces',
|
||||
`${id}`
|
||||
);
|
||||
export async function deleteWorkspace(id: string) {
|
||||
const basePath = await getWorkspaceBasePath(id);
|
||||
const movedPath = path.join(await getDeletedWorkspacesBasePath(), `${id}`);
|
||||
try {
|
||||
const db = await ensureSQLiteDB(id);
|
||||
db.destroy();
|
||||
await db.destroy();
|
||||
return await fs.move(basePath, movedPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
@@ -50,22 +58,24 @@ export async function deleteWorkspace(context: AppContext, id: string) {
|
||||
}
|
||||
}
|
||||
|
||||
export function getWorkspacesBasePath(context: AppContext) {
|
||||
return path.join(context.appDataPath, 'workspaces');
|
||||
export async function getWorkspacesBasePath() {
|
||||
return path.join(await getAppDataPath(), 'workspaces');
|
||||
}
|
||||
|
||||
export function getWorkspaceBasePath(context: AppContext, workspaceId: string) {
|
||||
return path.join(context.appDataPath, 'workspaces', workspaceId);
|
||||
export async function getWorkspaceBasePath(workspaceId: string) {
|
||||
return path.join(await getAppDataPath(), 'workspaces', workspaceId);
|
||||
}
|
||||
|
||||
export function getWorkspaceDBPath(context: AppContext, workspaceId: string) {
|
||||
const basePath = getWorkspaceBasePath(context, workspaceId);
|
||||
return path.join(basePath, 'storage.db');
|
||||
async function getDeletedWorkspacesBasePath() {
|
||||
return path.join(await getAppDataPath(), 'deleted-workspaces');
|
||||
}
|
||||
|
||||
export function getWorkspaceMetaPath(context: AppContext, workspaceId: string) {
|
||||
const basePath = getWorkspaceBasePath(context, workspaceId);
|
||||
return path.join(basePath, 'meta.json');
|
||||
export async function getWorkspaceDBPath(workspaceId: string) {
|
||||
return path.join(await getWorkspaceBasePath(workspaceId), 'storage.db');
|
||||
}
|
||||
|
||||
export async function getWorkspaceMetaPath(workspaceId: string) {
|
||||
return path.join(await getWorkspaceBasePath(workspaceId), 'meta.json');
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -73,16 +83,15 @@ export function getWorkspaceMetaPath(context: AppContext, workspaceId: string) {
|
||||
* This function will also migrate the workspace if needed
|
||||
*/
|
||||
export async function getWorkspaceMeta(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
): Promise<WorkspaceMeta> {
|
||||
try {
|
||||
const basePath = getWorkspaceBasePath(context, workspaceId);
|
||||
const metaPath = getWorkspaceMetaPath(context, workspaceId);
|
||||
const basePath = await getWorkspaceBasePath(workspaceId);
|
||||
const metaPath = await getWorkspaceMetaPath(workspaceId);
|
||||
if (!(await fs.exists(metaPath))) {
|
||||
// since not meta is found, we will migrate symlinked db file if needed
|
||||
await fs.ensureDir(basePath);
|
||||
const dbPath = getWorkspaceDBPath(context, workspaceId);
|
||||
const dbPath = await getWorkspaceDBPath(workspaceId);
|
||||
|
||||
// todo: remove this after migration (in stable version)
|
||||
const realDBPath = (await fs.exists(dbPath))
|
||||
@@ -111,15 +120,14 @@ export async function getWorkspaceMeta(
|
||||
}
|
||||
|
||||
export async function storeWorkspaceMeta(
|
||||
context: AppContext,
|
||||
workspaceId: string,
|
||||
meta: Partial<WorkspaceMeta>
|
||||
) {
|
||||
try {
|
||||
const basePath = getWorkspaceBasePath(context, workspaceId);
|
||||
const basePath = await getWorkspaceBasePath(workspaceId);
|
||||
await fs.ensureDir(basePath);
|
||||
const metaPath = path.join(basePath, 'meta.json');
|
||||
const currentMeta = await getWorkspaceMeta(context, workspaceId);
|
||||
const currentMeta = await getWorkspaceMeta(workspaceId);
|
||||
const newMeta = {
|
||||
...currentMeta,
|
||||
...meta,
|
||||
25
apps/electron/src/helper/workspace/index.ts
Normal file
25
apps/electron/src/helper/workspace/index.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import type { MainEventRegister, WorkspaceMeta } from '../type';
|
||||
import { deleteWorkspace, getWorkspaceMeta, listWorkspaces } from './handlers';
|
||||
import { workspaceSubjects } from './subjects';
|
||||
|
||||
export * from './handlers';
|
||||
export * from './subjects';
|
||||
|
||||
export const workspaceEvents = {
|
||||
onMetaChange: (
|
||||
fn: (meta: { workspaceId: string; meta: WorkspaceMeta }) => void
|
||||
) => {
|
||||
const sub = workspaceSubjects.meta.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventRegister>;
|
||||
|
||||
export const workspaceHandlers = {
|
||||
list: async () => listWorkspaces(),
|
||||
delete: async (id: string) => deleteWorkspace(id),
|
||||
getMeta: async (id: string) => {
|
||||
return getWorkspaceMeta(id);
|
||||
},
|
||||
};
|
||||
173
apps/electron/src/main/__tests__/integration.spec.ts
Normal file
173
apps/electron/src/main/__tests__/integration.spec.ts
Normal file
@@ -0,0 +1,173 @@
|
||||
import assert from 'node:assert';
|
||||
import path from 'node:path';
|
||||
import { setTimeout } from 'node:timers/promises';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
|
||||
import type { MainIPCHandlerMap } from '../exposed';
|
||||
|
||||
const registeredHandlers = new Map<
|
||||
string,
|
||||
((...args: any[]) => Promise<any>)[]
|
||||
>();
|
||||
|
||||
type WithoutFirstParameter<T> = T extends (_: any, ...args: infer P) => infer R
|
||||
? (...args: P) => R
|
||||
: T;
|
||||
|
||||
// common mock dispatcher for ipcMain.handle AND app.on
|
||||
// alternatively, we can use single parameter for T & F, eg, dispatch('workspace:list'),
|
||||
// however this is too hard to be typed correctly
|
||||
async function dispatch<
|
||||
T extends keyof MainIPCHandlerMap,
|
||||
F extends keyof MainIPCHandlerMap[T]
|
||||
>(
|
||||
namespace: T,
|
||||
functionName: F,
|
||||
...args: Parameters<WithoutFirstParameter<MainIPCHandlerMap[T][F]>>
|
||||
): // @ts-expect-error
|
||||
ReturnType<MainIPCHandlerMap[T][F]> {
|
||||
// @ts-expect-error
|
||||
const handlers = registeredHandlers.get(namespace + ':' + functionName);
|
||||
assert(handlers);
|
||||
|
||||
// we only care about the first handler here
|
||||
return await handlers[0](null, ...args);
|
||||
}
|
||||
|
||||
const SESSION_DATA_PATH = path.join(__dirname, './tmp', 'affine-test');
|
||||
const DOCUMENTS_PATH = path.join(__dirname, './tmp', 'affine-test-documents');
|
||||
|
||||
const browserWindow = {
|
||||
isDestroyed: () => {
|
||||
return false;
|
||||
},
|
||||
setWindowButtonVisibility: (_v: boolean) => {
|
||||
// will be stubbed later
|
||||
},
|
||||
webContents: {
|
||||
send: (_type: string, ..._args: any[]) => {
|
||||
// will be stubbed later
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const ipcMain = {
|
||||
handle: (key: string, callback: (...args: any[]) => Promise<any>) => {
|
||||
const handlers = registeredHandlers.get(key) || [];
|
||||
handlers.push(callback);
|
||||
registeredHandlers.set(key, handlers);
|
||||
},
|
||||
setMaxListeners: (_n: number) => {
|
||||
// noop
|
||||
},
|
||||
};
|
||||
|
||||
const nativeTheme = {
|
||||
themeSource: 'light',
|
||||
};
|
||||
|
||||
const electronModule = {
|
||||
app: {
|
||||
getPath: (name: string) => {
|
||||
if (name === 'sessionData') {
|
||||
return SESSION_DATA_PATH;
|
||||
} else if (name === 'documents') {
|
||||
return DOCUMENTS_PATH;
|
||||
}
|
||||
throw new Error('not implemented');
|
||||
},
|
||||
name: 'affine-test',
|
||||
on: (name: string, callback: (...args: any[]) => any) => {
|
||||
const handlers = registeredHandlers.get(name) || [];
|
||||
handlers.push(callback);
|
||||
registeredHandlers.set(name, handlers);
|
||||
},
|
||||
addListener: (...args: any[]) => {
|
||||
// @ts-expect-error
|
||||
electronModule.app.on(...args);
|
||||
},
|
||||
removeListener: () => {},
|
||||
},
|
||||
BrowserWindow: {
|
||||
getAllWindows: () => {
|
||||
return [browserWindow];
|
||||
},
|
||||
},
|
||||
nativeTheme: nativeTheme,
|
||||
ipcMain,
|
||||
shell: {} as Partial<Electron.Shell>,
|
||||
dialog: {} as Partial<Electron.Dialog>,
|
||||
};
|
||||
|
||||
// dynamically import handlers so that we can inject local variables to mocks
|
||||
vi.doMock('electron', () => {
|
||||
return electronModule;
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
const { registerHandlers } = await import('../handlers');
|
||||
registerHandlers();
|
||||
|
||||
// should also register events
|
||||
const { registerEvents } = await import('../events');
|
||||
registerEvents();
|
||||
await fs.mkdirp(SESSION_DATA_PATH);
|
||||
|
||||
registeredHandlers.get('ready')?.forEach(fn => fn());
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// reset registered handlers
|
||||
registeredHandlers.get('before-quit')?.forEach(fn => fn());
|
||||
// wait for the db to be closed on Windows
|
||||
if (process.platform === 'win32') {
|
||||
await setTimeout(200);
|
||||
}
|
||||
await fs.remove(SESSION_DATA_PATH);
|
||||
});
|
||||
|
||||
describe('UI handlers', () => {
|
||||
test('theme-change', async () => {
|
||||
await dispatch('ui', 'handleThemeChange', 'dark');
|
||||
expect(nativeTheme.themeSource).toBe('dark');
|
||||
await dispatch('ui', 'handleThemeChange', 'light');
|
||||
expect(nativeTheme.themeSource).toBe('light');
|
||||
});
|
||||
|
||||
test('sidebar-visibility-change (macOS)', async () => {
|
||||
vi.stubGlobal('process', { platform: 'darwin' });
|
||||
const setWindowButtonVisibility = vi.fn();
|
||||
browserWindow.setWindowButtonVisibility = setWindowButtonVisibility;
|
||||
await dispatch('ui', 'handleSidebarVisibilityChange', true);
|
||||
expect(setWindowButtonVisibility).toBeCalledWith(true);
|
||||
await dispatch('ui', 'handleSidebarVisibilityChange', false);
|
||||
expect(setWindowButtonVisibility).toBeCalledWith(false);
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
|
||||
test('sidebar-visibility-change (non-macOS)', async () => {
|
||||
vi.stubGlobal('process', { platform: 'linux' });
|
||||
const setWindowButtonVisibility = vi.fn();
|
||||
browserWindow.setWindowButtonVisibility = setWindowButtonVisibility;
|
||||
await dispatch('ui', 'handleSidebarVisibilityChange', true);
|
||||
expect(setWindowButtonVisibility).not.toBeCalled();
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
});
|
||||
|
||||
describe('applicationMenu', () => {
|
||||
// test some basic IPC events
|
||||
test('applicationMenu event', async () => {
|
||||
const { applicationMenuSubjects } = await import('../application-menu');
|
||||
const sendStub = vi.fn();
|
||||
browserWindow.webContents.send = sendStub;
|
||||
applicationMenuSubjects.newPageAction.next();
|
||||
expect(sendStub).toHaveBeenCalledWith(
|
||||
'applicationMenu:onNewPageAction',
|
||||
undefined
|
||||
);
|
||||
browserWindow.webContents.send = () => {};
|
||||
});
|
||||
});
|
||||
@@ -119,7 +119,7 @@ export function createApplicationMenu() {
|
||||
{
|
||||
label: 'Open log file',
|
||||
click: async () => {
|
||||
revealLogFile();
|
||||
await revealLogFile();
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -132,9 +132,9 @@ export function createApplicationMenu() {
|
||||
},
|
||||
];
|
||||
|
||||
// @ts-ignore The snippet is copied from Electron official docs.
|
||||
// It's working as expected. No idea why it contains type errors.
|
||||
// Just ignore for now.
|
||||
// @ts-expect-error: The snippet is copied from Electron official docs.
|
||||
// It's working as expected. No idea why it contains type errors.
|
||||
// Just ignore for now.
|
||||
const menu = Menu.buildFromTemplate(template);
|
||||
Menu.setApplicationMenu(menu);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { MainEventListener } from '../type';
|
||||
import type { MainEventRegister } from '../type';
|
||||
import { applicationMenuSubjects } from './subject';
|
||||
|
||||
export * from './create';
|
||||
@@ -17,4 +17,4 @@ export const applicationMenuEvents = {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
} satisfies Record<string, MainEventRegister>;
|
||||
@@ -1,16 +1,12 @@
|
||||
import { app, BrowserWindow } from 'electron';
|
||||
|
||||
import { applicationMenuEvents } from './application-menu';
|
||||
import { dbEvents } from './db';
|
||||
import { logger } from './logger';
|
||||
import { updaterEvents } from './updater/event';
|
||||
import { workspaceEvents } from './workspace';
|
||||
|
||||
export const allEvents = {
|
||||
applicationMenu: applicationMenuEvents,
|
||||
db: dbEvents,
|
||||
updater: updaterEvents,
|
||||
workspace: workspaceEvents,
|
||||
};
|
||||
|
||||
function getActiveWindows() {
|
||||
@@ -50,7 +50,7 @@ export async function savePDFFileAs(
|
||||
});
|
||||
});
|
||||
|
||||
shell.openPath(filePath);
|
||||
await shell.openPath(filePath);
|
||||
return { filePath };
|
||||
} catch (err) {
|
||||
logger.error('savePDFFileAs', err);
|
||||
@@ -9,19 +9,13 @@ export { events, handlers };
|
||||
export const getExposedMeta = () => {
|
||||
const handlersMeta = Object.entries(handlers).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.keys(namespaceHandlers).map(handlerName => handlerName),
|
||||
];
|
||||
return [namespace, Object.keys(namespaceHandlers)];
|
||||
}
|
||||
);
|
||||
|
||||
const eventsMeta = Object.entries(events).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.keys(namespaceHandlers).map(handlerName => handlerName),
|
||||
];
|
||||
return [namespace, Object.keys(namespaceHandlers)];
|
||||
}
|
||||
);
|
||||
|
||||
@@ -32,5 +26,4 @@ export const getExposedMeta = () => {
|
||||
};
|
||||
|
||||
export type MainIPCHandlerMap = typeof handlers;
|
||||
|
||||
export type MainIPCEventMap = typeof events;
|
||||
@@ -1,13 +1,16 @@
|
||||
import type {
|
||||
DebugHandlerManager,
|
||||
ExportHandlerManager,
|
||||
UIHandlerManager,
|
||||
UnwrapManagerHandlerToServerSide,
|
||||
UpdaterHandlerManager,
|
||||
} from '@toeverything/infra';
|
||||
import { ipcMain } from 'electron';
|
||||
|
||||
import { dbHandlers } from './db';
|
||||
import { dialogHandlers } from './dialog';
|
||||
import { exportHandlers } from './export';
|
||||
import { getLogFilePath, logger, revealLogFile } from './logger';
|
||||
import type { NamespaceHandlers } from './type';
|
||||
import { uiHandlers } from './ui';
|
||||
import { updaterHandlers } from './updater';
|
||||
import { workspaceHandlers } from './workspace';
|
||||
|
||||
export const debugHandlers = {
|
||||
revealLogFile: async () => {
|
||||
@@ -18,16 +21,32 @@ export const debugHandlers = {
|
||||
},
|
||||
};
|
||||
|
||||
type AllHandlers = {
|
||||
debug: UnwrapManagerHandlerToServerSide<
|
||||
Electron.IpcMainInvokeEvent,
|
||||
DebugHandlerManager
|
||||
>;
|
||||
export: UnwrapManagerHandlerToServerSide<
|
||||
Electron.IpcMainInvokeEvent,
|
||||
ExportHandlerManager
|
||||
>;
|
||||
ui: UnwrapManagerHandlerToServerSide<
|
||||
Electron.IpcMainInvokeEvent,
|
||||
UIHandlerManager
|
||||
>;
|
||||
updater: UnwrapManagerHandlerToServerSide<
|
||||
Electron.IpcMainInvokeEvent,
|
||||
UpdaterHandlerManager
|
||||
>;
|
||||
};
|
||||
|
||||
// Note: all of these handlers will be the single-source-of-truth for the apis exposed to the renderer process
|
||||
export const allHandlers = {
|
||||
db: dbHandlers,
|
||||
debug: debugHandlers,
|
||||
dialog: dialogHandlers,
|
||||
ui: uiHandlers,
|
||||
export: exportHandlers,
|
||||
updater: updaterHandlers,
|
||||
workspace: workspaceHandlers,
|
||||
} satisfies Record<string, NamespaceHandlers>;
|
||||
} satisfies AllHandlers;
|
||||
|
||||
export const registerHandlers = () => {
|
||||
// TODO: listen to namespace instead of individual event types
|
||||
@@ -38,6 +57,7 @@ export const registerHandlers = () => {
|
||||
ipcMain.handle(chan, async (e, ...args) => {
|
||||
const start = performance.now();
|
||||
try {
|
||||
// @ts-expect-error - TODO: fix this
|
||||
const result = await handler(e, ...args);
|
||||
logger.info(
|
||||
'[ipc-api]',
|
||||
111
apps/electron/src/main/helper-process.ts
Normal file
111
apps/electron/src/main/helper-process.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import { type _AsyncVersionOf, AsyncCall } from 'async-call-rpc';
|
||||
import {
|
||||
app,
|
||||
dialog,
|
||||
MessageChannelMain,
|
||||
shell,
|
||||
type UtilityProcess,
|
||||
utilityProcess,
|
||||
type WebContents,
|
||||
} from 'electron';
|
||||
|
||||
import { logger } from './logger';
|
||||
import { MessageEventChannel } from './utils';
|
||||
|
||||
const HELPER_PROCESS_PATH = path.join(__dirname, './helper.js');
|
||||
|
||||
function pickAndBind<T extends object, U extends keyof T>(
|
||||
obj: T,
|
||||
keys: U[]
|
||||
): { [K in U]: T[K] } {
|
||||
return keys.reduce((acc, key) => {
|
||||
const prop = obj[key];
|
||||
acc[key] =
|
||||
typeof prop === 'function'
|
||||
? // @ts-expect-error - a hack to bind the function
|
||||
prop.bind(obj)
|
||||
: prop;
|
||||
return acc;
|
||||
}, {} as any);
|
||||
}
|
||||
|
||||
class HelperProcessManager {
|
||||
ready: Promise<void>;
|
||||
#process: UtilityProcess;
|
||||
|
||||
// a rpc server for the main process -> helper process
|
||||
rpc?: _AsyncVersionOf<PeersAPIs.HelperToMain>;
|
||||
|
||||
static instance = new HelperProcessManager();
|
||||
|
||||
private constructor() {
|
||||
const helperProcess = utilityProcess.fork(HELPER_PROCESS_PATH);
|
||||
this.#process = helperProcess;
|
||||
this.ready = new Promise((resolve, reject) => {
|
||||
helperProcess.once('spawn', () => {
|
||||
try {
|
||||
this.#connectMain();
|
||||
resolve();
|
||||
} catch (err) {
|
||||
logger.error('[helper] connectMain error', err);
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
app.on('before-quit', () => {
|
||||
this.#process.kill();
|
||||
});
|
||||
}
|
||||
|
||||
// bridge renderer <-> helper process
|
||||
connectRenderer(renderer: WebContents) {
|
||||
// connect to the helper process
|
||||
const { port1: helperPort, port2: rendererPort } = new MessageChannelMain();
|
||||
this.#process.postMessage({ channel: 'renderer-connect' }, [helperPort]);
|
||||
renderer.postMessage('helper-connection', null, [rendererPort]);
|
||||
|
||||
return () => {
|
||||
helperPort.close();
|
||||
rendererPort.close();
|
||||
};
|
||||
}
|
||||
|
||||
// bridge main <-> helper process
|
||||
// also set up the RPC to the helper process
|
||||
#connectMain() {
|
||||
const dialogMethods = pickAndBind(dialog, [
|
||||
'showOpenDialog',
|
||||
'showSaveDialog',
|
||||
]);
|
||||
const shellMethods = pickAndBind(shell, [
|
||||
'openExternal',
|
||||
'showItemInFolder',
|
||||
]);
|
||||
const appMethods = pickAndBind(app, ['getPath']);
|
||||
|
||||
const mainToHelperServer: PeersAPIs.MainToHelper = {
|
||||
...dialogMethods,
|
||||
...shellMethods,
|
||||
...appMethods,
|
||||
};
|
||||
|
||||
const server = AsyncCall<PeersAPIs.HelperToMain>(mainToHelperServer, {
|
||||
strict: {
|
||||
// the channel is shared for other purposes as well so that we do not want to
|
||||
// restrict to only JSONRPC messages
|
||||
unknownMessage: false,
|
||||
},
|
||||
channel: new MessageEventChannel(this.#process),
|
||||
});
|
||||
this.rpc = server;
|
||||
}
|
||||
}
|
||||
|
||||
export async function ensureHelperProcess() {
|
||||
const helperProcessManager = HelperProcessManager.instance;
|
||||
await helperProcessManager.ready;
|
||||
return helperProcessManager;
|
||||
}
|
||||
@@ -5,8 +5,10 @@ import { app } from 'electron';
|
||||
import { createApplicationMenu } from './application-menu/create';
|
||||
import { registerEvents } from './events';
|
||||
import { registerHandlers } from './handlers';
|
||||
import { ensureHelperProcess } from './helper-process';
|
||||
import { logger } from './logger';
|
||||
import { restoreOrCreateWindow } from './main-window';
|
||||
import { registerPlugin } from './plugin';
|
||||
import { registerProtocol } from './protocol';
|
||||
import { registerUpdater } from './updater';
|
||||
|
||||
@@ -29,7 +31,9 @@ if (!isSingleInstance) {
|
||||
}
|
||||
|
||||
app.on('second-instance', () => {
|
||||
restoreOrCreateWindow();
|
||||
restoreOrCreateWindow().catch(e =>
|
||||
console.error('Failed to restore or create window:', e)
|
||||
);
|
||||
});
|
||||
|
||||
app.on('open-url', async (_, _url) => {
|
||||
@@ -56,9 +60,12 @@ app.on('activate', restoreOrCreateWindow);
|
||||
app
|
||||
.whenReady()
|
||||
.then(registerProtocol)
|
||||
.then(registerPlugin)
|
||||
.then(registerHandlers)
|
||||
.then(registerEvents)
|
||||
.then(ensureHelperProcess)
|
||||
.then(restoreOrCreateWindow)
|
||||
.then(createApplicationMenu)
|
||||
.then()
|
||||
.then(registerUpdater)
|
||||
.catch(e => console.error('Failed create window:', e));
|
||||
@@ -1,7 +1,8 @@
|
||||
import { shell } from 'electron';
|
||||
import log from 'electron-log';
|
||||
|
||||
export const logger = log;
|
||||
export const logger = log.scope('main');
|
||||
log.initialize();
|
||||
|
||||
export function getLogFilePath() {
|
||||
return log.transports.file.getFile().path;
|
||||
@@ -1,8 +1,11 @@
|
||||
import assert from 'node:assert';
|
||||
|
||||
import { BrowserWindow, nativeTheme } from 'electron';
|
||||
import electronWindowState from 'electron-window-state';
|
||||
import { join } from 'path';
|
||||
|
||||
import { getExposedMeta } from './exposed';
|
||||
import { ensureHelperProcess } from './helper-process';
|
||||
import { logger } from './logger';
|
||||
import { isMacOS, isWindows } from './utils';
|
||||
|
||||
@@ -18,7 +21,12 @@ async function createWindow() {
|
||||
defaultHeight: 800,
|
||||
});
|
||||
|
||||
const exposedMeta = getExposedMeta();
|
||||
const helperProcessManager = await ensureHelperProcess();
|
||||
const helperExposedMeta = await helperProcessManager.rpc?.getMeta();
|
||||
|
||||
assert(helperExposedMeta, 'helperExposedMeta should be defined');
|
||||
|
||||
const mainExposedMeta = getExposedMeta();
|
||||
|
||||
const browserWindow = new BrowserWindow({
|
||||
titleBarStyle: isMacOS()
|
||||
@@ -42,9 +50,12 @@ async function createWindow() {
|
||||
sandbox: false,
|
||||
webviewTag: false, // The webview tag is not recommended. Consider alternatives like iframe or Electron's BrowserView. https://www.electronjs.org/docs/latest/api/webview-tag#warning
|
||||
spellcheck: false, // FIXME: enable?
|
||||
preload: join(__dirname, '../preload/index.js'),
|
||||
preload: join(__dirname, './preload.js'),
|
||||
// serialize exposed meta that to be used in preload
|
||||
additionalArguments: [`--exposed-meta=` + JSON.stringify(exposedMeta)],
|
||||
additionalArguments: [
|
||||
`--main-exposed-meta=` + JSON.stringify(mainExposedMeta),
|
||||
`--helper-exposed-meta=` + JSON.stringify(helperExposedMeta),
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
@@ -52,6 +63,8 @@ async function createWindow() {
|
||||
|
||||
mainWindowState.manage(browserWindow);
|
||||
|
||||
let helperConnectionUnsub: (() => void) | undefined;
|
||||
|
||||
/**
|
||||
* If you install `show: true` then it can cause issues when trying to close the window.
|
||||
* Use `show: false` and listener events `ready-to-show` to fix these issues.
|
||||
@@ -65,6 +78,9 @@ async function createWindow() {
|
||||
} else {
|
||||
browserWindow.show();
|
||||
}
|
||||
helperConnectionUnsub = helperProcessManager.connectRenderer(
|
||||
browserWindow.webContents
|
||||
);
|
||||
|
||||
logger.info('main window is ready to show');
|
||||
|
||||
@@ -78,6 +94,7 @@ async function createWindow() {
|
||||
browserWindow.on('close', e => {
|
||||
e.preventDefault();
|
||||
browserWindow.destroy();
|
||||
helperConnectionUnsub?.();
|
||||
// TODO: gracefully close the app, for example, ask user to save unsaved changes
|
||||
});
|
||||
|
||||
70
apps/electron/src/main/plugin.ts
Normal file
70
apps/electron/src/main/plugin.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { join, resolve } from 'node:path';
|
||||
import { Worker } from 'node:worker_threads';
|
||||
|
||||
import { logger } from '@affine/electron/main/logger';
|
||||
import { AsyncCall } from 'async-call-rpc';
|
||||
import { ipcMain } from 'electron';
|
||||
|
||||
import { MessageEventChannel } from './utils';
|
||||
|
||||
declare global {
|
||||
// fixme(himself65):
|
||||
// remove this when bookmark block plugin is migrated to plugin-infra
|
||||
// eslint-disable-next-line no-var
|
||||
var asyncCall: Record<string, (...args: any) => PromiseLike<any>>;
|
||||
}
|
||||
|
||||
export function registerPlugin() {
|
||||
const pluginWorkerPath = join(__dirname, './workers/plugin.worker.js');
|
||||
const asyncCall = AsyncCall<
|
||||
Record<string, (...args: any) => PromiseLike<any>>
|
||||
>(
|
||||
{
|
||||
log: (...args: any[]) => {
|
||||
logger.log('Plugin Worker', ...args);
|
||||
},
|
||||
},
|
||||
{
|
||||
channel: new MessageEventChannel(new Worker(pluginWorkerPath)),
|
||||
}
|
||||
);
|
||||
globalThis.asyncCall = asyncCall;
|
||||
logger.info('import plugin manager');
|
||||
import('@toeverything/plugin-infra/manager')
|
||||
.then(({ rootStore, affinePluginsAtom }) => {
|
||||
logger.info('import plugin manager');
|
||||
const bookmarkPluginPath = join(
|
||||
process.env.PLUGIN_DIR ?? resolve(__dirname, './plugins'),
|
||||
'./bookmark-block/index.mjs'
|
||||
);
|
||||
logger.info('bookmark plugin path:', bookmarkPluginPath);
|
||||
import('file://' + bookmarkPluginPath);
|
||||
let dispose: () => void = () => {
|
||||
// noop
|
||||
};
|
||||
rootStore.sub(affinePluginsAtom, () => {
|
||||
dispose();
|
||||
const plugins = rootStore.get(affinePluginsAtom);
|
||||
Object.values(plugins).forEach(plugin => {
|
||||
logger.info('register plugin', plugin.definition.id);
|
||||
plugin.definition.commands.forEach(command => {
|
||||
logger.info('register plugin command', command);
|
||||
ipcMain.handle(command, (event, ...args) =>
|
||||
asyncCall[command](...args)
|
||||
);
|
||||
});
|
||||
});
|
||||
dispose = () => {
|
||||
Object.values(plugins).forEach(plugin => {
|
||||
plugin.definition.commands.forEach(command => {
|
||||
logger.info('unregister plugin command', command);
|
||||
ipcMain.removeHandler(command);
|
||||
});
|
||||
});
|
||||
};
|
||||
});
|
||||
})
|
||||
.catch(error => {
|
||||
logger.error('import plugin manager error', error);
|
||||
});
|
||||
}
|
||||
@@ -16,7 +16,7 @@ protocol.registerSchemesAsPrivileged([
|
||||
|
||||
function toAbsolutePath(url: string) {
|
||||
let realpath = decodeURIComponent(url);
|
||||
const webStaticDir = join(__dirname, '../../../resources/web-static');
|
||||
const webStaticDir = join(__dirname, '../resources/web-static');
|
||||
if (url.startsWith('./')) {
|
||||
// if is a file type, load the file in resources
|
||||
if (url.split('/').at(-1)?.includes('.')) {
|
||||
@@ -34,6 +34,7 @@ export function registerProtocol() {
|
||||
const url = request.url.replace(/^file:\/\//, '');
|
||||
const realpath = toAbsolutePath(url);
|
||||
callback(realpath);
|
||||
console.log('interceptFileProtocol realpath', request.url, realpath);
|
||||
return true;
|
||||
});
|
||||
|
||||
10
apps/electron/src/main/type.ts
Normal file
10
apps/electron/src/main/type.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
export type MainEventRegister = (...args: any[]) => () => void;
|
||||
|
||||
export type IsomorphicHandler = (
|
||||
e: Electron.IpcMainInvokeEvent,
|
||||
...args: any[]
|
||||
) => Promise<any>;
|
||||
|
||||
export type NamespaceHandlers = {
|
||||
[key: string]: IsomorphicHandler;
|
||||
};
|
||||
@@ -28,10 +28,12 @@ export const getExchangeTokenParams = (code: string) => {
|
||||
};
|
||||
|
||||
export function getGoogleOauthCode() {
|
||||
shell.openExternal(oauthEndpoint);
|
||||
|
||||
return new Promise<ReturnType<typeof getExchangeTokenParams>>(
|
||||
(resolve, reject) => {
|
||||
shell.openExternal(oauthEndpoint).catch(e => {
|
||||
logger.error('Failed to open external url', e);
|
||||
reject(e);
|
||||
});
|
||||
const handleOpenUrl = async (_: any, url: string) => {
|
||||
const mainWindow = BrowserWindow.getAllWindows().find(
|
||||
w => !w.isDestroyed()
|
||||
@@ -2,7 +2,6 @@ import { app, BrowserWindow, nativeTheme } from 'electron';
|
||||
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { isMacOS } from '../utils';
|
||||
import { getMetaData } from './get-meta-data';
|
||||
import { getGoogleOauthCode } from './google-auth';
|
||||
|
||||
export const uiHandlers = {
|
||||
@@ -40,11 +39,12 @@ export const uiHandlers = {
|
||||
getGoogleOauthCode: async () => {
|
||||
return getGoogleOauthCode();
|
||||
},
|
||||
getBookmarkDataByLink: async (_, url: string) => {
|
||||
return getMetaData(url, {
|
||||
shouldReGetHTML: metaData => {
|
||||
return !metaData.title && !metaData.description;
|
||||
},
|
||||
});
|
||||
/**
|
||||
* @deprecated Remove this when bookmark block plugin is migrated to plugin-infra
|
||||
*/
|
||||
getBookmarkDataByLink: async (_, link: string) => {
|
||||
return globalThis.asyncCall[
|
||||
'com.blocksuite.bookmark-block.get-bookmark-data-by-link'
|
||||
](link);
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
@@ -29,13 +29,14 @@ export const quitAndInstall = async () => {
|
||||
let lastCheckTime = 0;
|
||||
export const checkForUpdatesAndNotify = async (force = true) => {
|
||||
if (!_autoUpdater) {
|
||||
return; // ?
|
||||
return void 0;
|
||||
}
|
||||
// check every 30 minutes (1800 seconds) at most
|
||||
if (force || lastCheckTime + 1000 * 1800 < Date.now()) {
|
||||
lastCheckTime = Date.now();
|
||||
return await _autoUpdater.checkForUpdatesAndNotify();
|
||||
}
|
||||
return void 0;
|
||||
};
|
||||
|
||||
export const registerUpdater = async () => {
|
||||
@@ -45,7 +46,8 @@ export const registerUpdater = async () => {
|
||||
|
||||
_autoUpdater = autoUpdater;
|
||||
|
||||
if (!_autoUpdater) {
|
||||
// skip auto update in dev mode
|
||||
if (!_autoUpdater || isDev) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -67,7 +69,9 @@ export const registerUpdater = async () => {
|
||||
// register events for checkForUpdatesAndNotify
|
||||
_autoUpdater.on('update-available', info => {
|
||||
if (allowAutoUpdate) {
|
||||
_autoUpdater!.downloadUpdate();
|
||||
_autoUpdater?.downloadUpdate().catch(e => {
|
||||
logger.error('Failed to download update', e);
|
||||
});
|
||||
logger.info('Update available, downloading...', info);
|
||||
}
|
||||
updaterSubjects.updateAvailable.next({
|
||||
@@ -1,6 +1,6 @@
|
||||
import { BehaviorSubject, Subject } from 'rxjs';
|
||||
|
||||
import type { MainEventListener } from '../type';
|
||||
import type { MainEventRegister } from '../type';
|
||||
|
||||
export interface UpdateMeta {
|
||||
version: string;
|
||||
@@ -33,4 +33,4 @@ export const updaterEvents = {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
} satisfies Record<string, MainEventRegister>;
|
||||
40
apps/electron/src/main/utils.ts
Normal file
40
apps/electron/src/main/utils.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import type { EventBasedChannel } from 'async-call-rpc';
|
||||
|
||||
export function getTime() {
|
||||
return new Date().getTime();
|
||||
}
|
||||
|
||||
export const isMacOS = () => {
|
||||
return process.platform === 'darwin';
|
||||
};
|
||||
|
||||
export const isWindows = () => {
|
||||
return process.platform === 'win32';
|
||||
};
|
||||
|
||||
interface MessagePortLike {
|
||||
postMessage: (data: unknown) => void;
|
||||
addListener: (event: 'message', listener: (...args: any[]) => void) => void;
|
||||
removeListener: (
|
||||
event: 'message',
|
||||
listener: (...args: any[]) => void
|
||||
) => void;
|
||||
}
|
||||
|
||||
export class MessageEventChannel implements EventBasedChannel {
|
||||
constructor(private worker: MessagePortLike) {}
|
||||
|
||||
on(listener: (data: unknown) => void) {
|
||||
const f = (data: unknown) => {
|
||||
listener(data);
|
||||
};
|
||||
this.worker.addListener('message', f);
|
||||
return () => {
|
||||
this.worker.removeListener('message', f);
|
||||
};
|
||||
}
|
||||
|
||||
send(data: unknown) {
|
||||
this.worker.postMessage(data);
|
||||
}
|
||||
}
|
||||
69
apps/electron/src/main/workers/plugin.worker.ts
Normal file
69
apps/electron/src/main/workers/plugin.worker.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { join, resolve } from 'node:path';
|
||||
import { parentPort } from 'node:worker_threads';
|
||||
|
||||
import { AsyncCall } from 'async-call-rpc';
|
||||
|
||||
import { MessageEventChannel } from '../utils';
|
||||
|
||||
const commandProxy: Record<string, (...args: any[]) => Promise<any>> = {};
|
||||
|
||||
if (!parentPort) {
|
||||
throw new Error('parentPort is undefined');
|
||||
}
|
||||
|
||||
const mainThread = AsyncCall<{
|
||||
log: (...args: any[]) => Promise<void>;
|
||||
}>(commandProxy, {
|
||||
channel: new MessageEventChannel(parentPort),
|
||||
});
|
||||
|
||||
globalThis.console.log = mainThread.log;
|
||||
globalThis.console.error = mainThread.log;
|
||||
globalThis.console.info = mainThread.log;
|
||||
globalThis.console.debug = mainThread.log;
|
||||
globalThis.console.warn = mainThread.log;
|
||||
|
||||
console.log('import plugin infra');
|
||||
|
||||
import('@toeverything/plugin-infra/manager')
|
||||
.then(({ rootStore, affinePluginsAtom }) => {
|
||||
const bookmarkPluginPath = join(
|
||||
process.env.PLUGIN_DIR ?? resolve(__dirname, '../plugins'),
|
||||
'./bookmark-block/index.mjs'
|
||||
);
|
||||
|
||||
console.log('import bookmark plugin', bookmarkPluginPath);
|
||||
|
||||
import('file://' + bookmarkPluginPath).catch(console.log);
|
||||
rootStore.sub(affinePluginsAtom, () => {
|
||||
const plugins = rootStore.get(affinePluginsAtom);
|
||||
Object.values(plugins).forEach(plugin => {
|
||||
console.log('handle plugin', plugin.definition.id);
|
||||
if (plugin.serverAdapter) {
|
||||
try {
|
||||
plugin.serverAdapter({
|
||||
registerCommand: (command, fn) => {
|
||||
console.log('register command', command);
|
||||
commandProxy[command] = fn;
|
||||
},
|
||||
unregisterCommand: command => {
|
||||
console.log('unregister command', command);
|
||||
delete commandProxy[command];
|
||||
},
|
||||
});
|
||||
} catch (e) {
|
||||
console.log(
|
||||
'error when handle plugin',
|
||||
plugin.definition.id,
|
||||
`${e}`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
console.log('no server adapter, skipping.');
|
||||
}
|
||||
});
|
||||
});
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err);
|
||||
});
|
||||
193
apps/electron/src/preload/affine-apis.ts
Normal file
193
apps/electron/src/preload/affine-apis.ts
Normal file
@@ -0,0 +1,193 @@
|
||||
// NOTE: we will generate preload types from this file
|
||||
import { AsyncCall, type EventBasedChannel } from 'async-call-rpc';
|
||||
import { ipcRenderer } from 'electron';
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
type ExposedMeta = {
|
||||
handlers: [namespace: string, handlerNames: string[]][];
|
||||
events: [namespace: string, eventNames: string[]][];
|
||||
};
|
||||
|
||||
export function getAffineAPIs() {
|
||||
const mainAPIs = getMainAPIs();
|
||||
const helperAPIs = getHelperAPIs();
|
||||
|
||||
return {
|
||||
apis: {
|
||||
...mainAPIs.apis,
|
||||
...helperAPIs.apis,
|
||||
},
|
||||
events: {
|
||||
...mainAPIs.events,
|
||||
...helperAPIs.events,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export const appInfo = {
|
||||
electron: true,
|
||||
};
|
||||
|
||||
function getMainAPIs() {
|
||||
const meta: ExposedMeta = (() => {
|
||||
const val = process.argv
|
||||
.find(arg => arg.startsWith('--main-exposed-meta='))
|
||||
?.split('=')[1];
|
||||
|
||||
return val ? JSON.parse(val) : null;
|
||||
})();
|
||||
|
||||
// main handlers that can be invoked from the renderer process
|
||||
const apis: any = (() => {
|
||||
const { handlers: handlersMeta } = meta;
|
||||
|
||||
const all = handlersMeta.map(([namespace, functionNames]) => {
|
||||
const namespaceApis = functionNames.map(name => {
|
||||
const channel = `${namespace}:${name}`;
|
||||
return [
|
||||
name,
|
||||
(...args: any[]) => {
|
||||
return ipcRenderer.invoke(channel, ...args);
|
||||
},
|
||||
];
|
||||
});
|
||||
return [namespace, Object.fromEntries(namespaceApis)];
|
||||
});
|
||||
|
||||
return Object.fromEntries(all);
|
||||
})();
|
||||
|
||||
// main events that can be listened to from the renderer process
|
||||
const events: any = (() => {
|
||||
const { events: eventsMeta } = meta;
|
||||
|
||||
// NOTE: ui may try to listen to a lot of the same events, so we increase the limit...
|
||||
ipcRenderer.setMaxListeners(100);
|
||||
|
||||
const all = eventsMeta.map(([namespace, eventNames]) => {
|
||||
const namespaceEvents = eventNames.map(name => {
|
||||
const channel = `${namespace}:${name}`;
|
||||
return [
|
||||
name,
|
||||
(callback: (...args: any[]) => void) => {
|
||||
const fn: (
|
||||
event: Electron.IpcRendererEvent,
|
||||
...args: any[]
|
||||
) => void = (_, ...args) => {
|
||||
callback(...args);
|
||||
};
|
||||
ipcRenderer.on(channel, fn);
|
||||
return () => {
|
||||
ipcRenderer.off(channel, fn);
|
||||
};
|
||||
},
|
||||
];
|
||||
});
|
||||
return [namespace, Object.fromEntries(namespaceEvents)];
|
||||
});
|
||||
return Object.fromEntries(all);
|
||||
})();
|
||||
|
||||
return { apis, events };
|
||||
}
|
||||
|
||||
const helperPort$ = new Promise<MessagePort>(resolve =>
|
||||
ipcRenderer.on('helper-connection', async e => {
|
||||
console.info('[preload] helper-connection', e);
|
||||
resolve(e.ports[0]);
|
||||
})
|
||||
);
|
||||
|
||||
const createMessagePortChannel = (port: MessagePort): EventBasedChannel => {
|
||||
return {
|
||||
on(listener) {
|
||||
port.onmessage = e => {
|
||||
listener(e.data);
|
||||
};
|
||||
port.start();
|
||||
return () => {
|
||||
port.onmessage = null;
|
||||
port.close();
|
||||
};
|
||||
},
|
||||
send(data) {
|
||||
port.postMessage(data);
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
function getHelperAPIs() {
|
||||
const events$ = new Subject<{ channel: string; args: any[] }>();
|
||||
const meta: ExposedMeta = (() => {
|
||||
const val = process.argv
|
||||
.find(arg => arg.startsWith('--helper-exposed-meta='))
|
||||
?.split('=')[1];
|
||||
|
||||
return val ? JSON.parse(val) : null;
|
||||
})();
|
||||
|
||||
const rendererToHelperServer: PeersAPIs.RendererToHelper = {
|
||||
postEvent: (channel, ...args) => {
|
||||
events$.next({ channel, args });
|
||||
},
|
||||
};
|
||||
|
||||
const rpc = AsyncCall<PeersAPIs.HelperToRenderer>(rendererToHelperServer, {
|
||||
channel: helperPort$.then(helperPort =>
|
||||
createMessagePortChannel(helperPort)
|
||||
),
|
||||
log: false,
|
||||
});
|
||||
|
||||
const toHelperHandler = (namespace: string, name: string) => {
|
||||
return rpc[`${namespace}:${name}`];
|
||||
};
|
||||
|
||||
const toHelperEventSubscriber = (namespace: string, name: string) => {
|
||||
return (callback: (...args: any[]) => void) => {
|
||||
const subscription = events$.subscribe(({ channel, args }) => {
|
||||
if (channel === `${namespace}:${name}`) {
|
||||
callback(...args);
|
||||
}
|
||||
});
|
||||
return () => {
|
||||
subscription.unsubscribe();
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
const setup = (meta: ExposedMeta) => {
|
||||
const { handlers: handlersMeta, events: eventsMeta } = meta;
|
||||
|
||||
const helperHandlers = Object.fromEntries(
|
||||
handlersMeta.map(([namespace, functionNames]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.fromEntries(
|
||||
functionNames.map(name => {
|
||||
return [name, toHelperHandler(namespace, name)];
|
||||
})
|
||||
),
|
||||
];
|
||||
})
|
||||
);
|
||||
|
||||
const helperEvents = Object.fromEntries(
|
||||
eventsMeta.map(([namespace, eventNames]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.fromEntries(
|
||||
eventNames.map(name => {
|
||||
return [name, toHelperEventSubscriber(namespace, name)];
|
||||
})
|
||||
),
|
||||
];
|
||||
})
|
||||
);
|
||||
return [helperHandlers, helperEvents];
|
||||
};
|
||||
|
||||
const [apis, events] = setup(meta);
|
||||
|
||||
return { apis, events };
|
||||
}
|
||||
55
apps/electron/src/preload/bootstrap.ts
Normal file
55
apps/electron/src/preload/bootstrap.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import { contextBridge, ipcRenderer } from 'electron';
|
||||
|
||||
(async () => {
|
||||
const { appInfo, getAffineAPIs } = await import('./affine-apis');
|
||||
const { apis, events } = getAffineAPIs();
|
||||
|
||||
contextBridge.exposeInMainWorld('appInfo', appInfo);
|
||||
contextBridge.exposeInMainWorld('apis', apis);
|
||||
contextBridge.exposeInMainWorld('events', events);
|
||||
|
||||
// Credit to microsoft/vscode
|
||||
const globals = {
|
||||
ipcRenderer: {
|
||||
send(channel: string, ...args: any[]) {
|
||||
ipcRenderer.send(channel, ...args);
|
||||
},
|
||||
|
||||
invoke(channel: string, ...args: any[]) {
|
||||
return ipcRenderer.invoke(channel, ...args);
|
||||
},
|
||||
|
||||
on(
|
||||
channel: string,
|
||||
listener: (event: Electron.IpcRendererEvent, ...args: any[]) => void
|
||||
) {
|
||||
ipcRenderer.on(channel, listener);
|
||||
return this;
|
||||
},
|
||||
|
||||
once(
|
||||
channel: string,
|
||||
listener: (event: Electron.IpcRendererEvent, ...args: any[]) => void
|
||||
) {
|
||||
ipcRenderer.once(channel, listener);
|
||||
return this;
|
||||
},
|
||||
|
||||
removeListener(
|
||||
channel: string,
|
||||
listener: (event: Electron.IpcRendererEvent, ...args: any[]) => void
|
||||
) {
|
||||
ipcRenderer.removeListener(channel, listener);
|
||||
return this;
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
contextBridge.exposeInMainWorld('affine', globals);
|
||||
} catch (error) {
|
||||
console.error('Failed to expose affine APIs to window object!', error);
|
||||
}
|
||||
})().catch(err => {
|
||||
console.error('Failed to bootstrap preload script!', err);
|
||||
});
|
||||
1
apps/electron/src/preload/index.ts
Normal file
1
apps/electron/src/preload/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
import './bootstrap';
|
||||
35
apps/electron/src/types.d.ts
vendored
Normal file
35
apps/electron/src/types.d.ts
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
declare namespace PeersAPIs {
|
||||
import type { app, dialog, shell } from 'electron';
|
||||
|
||||
interface ExposedMeta {
|
||||
handlers: [string, string[]][];
|
||||
events: [string, string[]][];
|
||||
}
|
||||
|
||||
// render <-> helper
|
||||
interface RendererToHelper {
|
||||
postEvent: (channel: string, ...args: any[]) => void;
|
||||
}
|
||||
|
||||
interface HelperToRenderer {
|
||||
[key: string]: (...args: any[]) => Promise<any>;
|
||||
}
|
||||
|
||||
// helper <-> main
|
||||
interface HelperToMain {
|
||||
getMeta: () => ExposedMeta;
|
||||
}
|
||||
|
||||
type MainToHelper = Pick<
|
||||
typeof dialog & typeof shell & typeof app,
|
||||
| 'showOpenDialog'
|
||||
| 'showSaveDialog'
|
||||
| 'openExternal'
|
||||
| 'showItemInFolder'
|
||||
| 'getPath'
|
||||
>;
|
||||
|
||||
// render <-> main
|
||||
// these are handled via IPC
|
||||
// TODO: fix type
|
||||
}
|
||||
@@ -1,6 +1,3 @@
|
||||
// eslint-disable-next-line @typescript-eslint/triple-slash-reference
|
||||
/// <reference path="../layers/preload/preload.d.ts" />
|
||||
|
||||
/* eslint-disable no-empty-pattern */
|
||||
import crypto from 'node:crypto';
|
||||
import { join, resolve } from 'node:path';
|
||||
@@ -45,6 +42,7 @@ export const test = base.extend<{
|
||||
});
|
||||
}
|
||||
const logFilePath = await page.evaluate(async () => {
|
||||
// @ts-expect-error
|
||||
return window.apis?.debug.logFilePath();
|
||||
});
|
||||
// wat for blocksuite to be loaded
|
||||
@@ -82,8 +80,21 @@ export const test = base.extend<{
|
||||
// a random id to avoid conflicts between tests
|
||||
const id = generateUUID();
|
||||
const ext = process.platform === 'win32' ? '.cmd' : '';
|
||||
const dist = resolve(__dirname, '..', 'dist');
|
||||
const clonedDist = resolve(__dirname, '../e2e-dist-' + id);
|
||||
await fs.copy(dist, clonedDist);
|
||||
const packageJson = await fs.readJSON(
|
||||
resolve(__dirname, '..', 'package.json')
|
||||
);
|
||||
// overwrite the app name
|
||||
packageJson.name = 'affine-test-' + id;
|
||||
// overwrite the path to the main script
|
||||
packageJson.main = './main.js';
|
||||
// write to the cloned dist
|
||||
await fs.writeJSON(resolve(clonedDist, 'package.json'), packageJson);
|
||||
|
||||
const electronApp = await electron.launch({
|
||||
args: [resolve(__dirname, '..'), '--app-name', 'affine-test-' + id],
|
||||
args: [clonedDist],
|
||||
executablePath: resolve(
|
||||
__dirname,
|
||||
'..',
|
||||
@@ -97,11 +108,11 @@ export const test = base.extend<{
|
||||
colorScheme: 'light',
|
||||
});
|
||||
await use(electronApp);
|
||||
// FIXME: the following does not work well on CI
|
||||
// const sessionDataPath = await electronApp.evaluate(async ({ app }) => {
|
||||
// return app.getPath('sessionData');
|
||||
// });
|
||||
// await fs.rm(sessionDataPath, { recursive: true, force: true });
|
||||
try {
|
||||
await fs.rm(clonedDist, { recursive: true, force: true });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
},
|
||||
appInfo: async ({ electronApp }, use) => {
|
||||
const appInfo = await electronApp.evaluate(async ({ app }) => {
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
{
|
||||
"extends": "../../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "./lib",
|
||||
"baseUrl": ".",
|
||||
"noEmit": true,
|
||||
"target": "ESNext"
|
||||
},
|
||||
"references": [{ "path": "../../../tests/kit" }],
|
||||
"include": ["**.spec.ts", "**.test.ts"]
|
||||
"include": ["**.spec.ts", "**.test.ts", "fixture.ts"],
|
||||
"exclude": ["lib"]
|
||||
}
|
||||
|
||||
@@ -27,6 +27,7 @@ test('move workspace db file', async ({ page, appInfo, workspace }) => {
|
||||
|
||||
// move db file to tmp folder
|
||||
await page.evaluate(tmpPath => {
|
||||
// @ts-expect-error
|
||||
window.apis?.dialog.setFakeDialogResult({
|
||||
filePath: tmpPath,
|
||||
});
|
||||
@@ -61,6 +62,7 @@ test('export then add', async ({ page, appInfo, workspace }) => {
|
||||
|
||||
// export db file to tmp folder
|
||||
await page.evaluate(tmpPath => {
|
||||
// @ts-expect-error
|
||||
window.apis?.dialog.setFakeDialogResult({
|
||||
filePath: tmpPath,
|
||||
});
|
||||
@@ -79,6 +81,7 @@ test('export then add', async ({ page, appInfo, workspace }) => {
|
||||
await page.getByTestId('add-or-new-workspace').click();
|
||||
|
||||
await page.evaluate(tmpPath => {
|
||||
// @ts-expect-error
|
||||
window.apis?.dialog.setFakeDialogResult({
|
||||
filePath: tmpPath,
|
||||
});
|
||||
|
||||
@@ -11,20 +11,27 @@
|
||||
"outDir": "dist",
|
||||
"moduleResolution": "node",
|
||||
"resolveJsonModule": true,
|
||||
"noImplicitOverride": true,
|
||||
"noEmit": false
|
||||
"noImplicitOverride": true
|
||||
},
|
||||
"include": ["**/*.ts", "**/*.tsx"],
|
||||
"include": ["./src"],
|
||||
"exclude": ["node_modules", "out", "dist"],
|
||||
"references": [
|
||||
{
|
||||
"path": "./tsconfig.node.json"
|
||||
"path": "../../packages/plugin-infra"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/native"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/env"
|
||||
"path": "../../packages/infra"
|
||||
},
|
||||
|
||||
// Tests
|
||||
{
|
||||
"path": "./tsconfig.node.json"
|
||||
},
|
||||
{
|
||||
"path": "./tests/tsconfig.json"
|
||||
},
|
||||
{ "path": "../../tests/kit" }
|
||||
],
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"composite": true,
|
||||
"target": "ESNext",
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user