mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-04 08:38:34 +00:00
Compare commits
200 Commits
renovate/n
...
v0.6.0-bet
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4a41f74341 | ||
|
|
c0cf0d6e04 | ||
|
|
33176c7c9e | ||
|
|
6a4c3ddf11 | ||
|
|
6bd2e6a619 | ||
|
|
07a502e840 | ||
|
|
9c772bf153 | ||
|
|
df5a4a83bb | ||
|
|
2df606fb02 | ||
|
|
45f831a73c | ||
|
|
9c6916426e | ||
|
|
2428611748 | ||
|
|
60df372524 | ||
|
|
00c5e323b3 | ||
|
|
1521978bb5 | ||
|
|
f7f25af6b7 | ||
|
|
acf6e94188 | ||
|
|
7f15b06875 | ||
|
|
f1c1439b85 | ||
|
|
35b0853b3f | ||
|
|
bd3ea956e2 | ||
|
|
341731bbfc | ||
|
|
8ba27b3d43 | ||
|
|
cede62db4c | ||
|
|
ace8aeefbd | ||
|
|
2fd94c44a2 | ||
|
|
fd7315989c | ||
|
|
7d961c67d8 | ||
|
|
e2a1c3daab | ||
|
|
877b78b0a3 | ||
|
|
ab56d15f7b | ||
|
|
a4ae5e3141 | ||
|
|
0ad15232c7 | ||
|
|
0d20dc70af | ||
|
|
2d38f346eb | ||
|
|
88f13467ec | ||
|
|
75c4519829 | ||
|
|
4a99376c21 | ||
|
|
bd4d02bcc7 | ||
|
|
74476ca9f2 | ||
|
|
83926c8061 | ||
|
|
e78fe306a2 | ||
|
|
673d0f98b8 | ||
|
|
405d31b0a6 | ||
|
|
3bd0961c5b | ||
|
|
4457f3c565 | ||
|
|
0b17e06aa3 | ||
|
|
2c111ff7f9 | ||
|
|
cc37dccec5 | ||
|
|
f6f89be3a2 | ||
|
|
2a56786544 | ||
|
|
5e953e6955 | ||
|
|
588fcac81c | ||
|
|
817c59b00d | ||
|
|
9f2719b7bd | ||
|
|
0c1858141f | ||
|
|
e8e4f5ae1f | ||
|
|
61a167ed6e | ||
|
|
96d313d491 | ||
|
|
975023463c | ||
|
|
752d9182b5 | ||
|
|
e82100c201 | ||
|
|
92f91dee24 | ||
|
|
faaf4d60ee | ||
|
|
81ab927022 | ||
|
|
2a95ddd63c | ||
|
|
fd41659c4b | ||
|
|
a1564eb638 | ||
|
|
5eec494880 | ||
|
|
e074a7f450 | ||
|
|
c80203893c | ||
|
|
bf9445c869 | ||
|
|
f2a61c0a4e | ||
|
|
5c48363f50 | ||
|
|
a990d4a95e | ||
|
|
3ee1bbab71 | ||
|
|
2a767af71d | ||
|
|
625fcec51c | ||
|
|
ee8d50cb71 | ||
|
|
dfb135818a | ||
|
|
f63b78a355 | ||
|
|
cd2182bea8 | ||
|
|
19bdeebadb | ||
|
|
65750d4ef8 | ||
|
|
11e7a10e3d | ||
|
|
358d12f612 | ||
|
|
0d4d7dca0a | ||
|
|
04d6849581 | ||
|
|
d68d02503c | ||
|
|
eddf171144 | ||
|
|
75646d9667 | ||
|
|
4802182f34 | ||
|
|
edac4b2629 | ||
|
|
0e3d80940e | ||
|
|
2c0f799bc7 | ||
|
|
001db68499 | ||
|
|
2631deba23 | ||
|
|
1ccb8a37ed | ||
|
|
9c4d21beb1 | ||
|
|
6353babed4 | ||
|
|
ceca3a5518 | ||
|
|
7f38cc03ef | ||
|
|
0bebe94a28 | ||
|
|
1ed1f7c059 | ||
|
|
3aab657545 | ||
|
|
ac2fc262b9 | ||
|
|
6d5521981f | ||
|
|
60f2bfc9b2 | ||
|
|
6076ee3620 | ||
|
|
39dc71251b | ||
|
|
f829c9b3b1 | ||
|
|
48ef60cb0a | ||
|
|
341b18ded7 | ||
|
|
2efafe5420 | ||
|
|
02293325bc | ||
|
|
fb5e027c61 | ||
|
|
debf8d170e | ||
|
|
97e88b3d8b | ||
|
|
05bf41501a | ||
|
|
f2f5128783 | ||
|
|
1363094ce6 | ||
|
|
75c54f0af5 | ||
|
|
ec142a7189 | ||
|
|
6f859967a9 | ||
|
|
bcee63175c | ||
|
|
f62ca1822d | ||
|
|
684bbafbcf | ||
|
|
6cd0053b0c | ||
|
|
ccd3fb4925 | ||
|
|
d5c3d1b86a | ||
|
|
31e1575b5d | ||
|
|
403479996d | ||
|
|
19f7f591ce | ||
|
|
76289838d2 | ||
|
|
bb65262217 | ||
|
|
877b87aae0 | ||
|
|
0c5c1a5511 | ||
|
|
edda79c448 | ||
|
|
a4111f5550 | ||
|
|
e099734cc7 | ||
|
|
26f3380c1a | ||
|
|
4874adbf3f | ||
|
|
943e6c59e3 | ||
|
|
c0d6b8c458 | ||
|
|
26f5461f9a | ||
|
|
66303e5fd6 | ||
|
|
337fe18d4c | ||
|
|
cbcf8140e4 | ||
|
|
a998dc808a | ||
|
|
23f51a7ecc | ||
|
|
ab8cdb4222 | ||
|
|
5c6655ab0e | ||
|
|
9c6e687113 | ||
|
|
25cf2e9ba0 | ||
|
|
31bea47545 | ||
|
|
a34e2eb57d | ||
|
|
8527c5bfac | ||
|
|
599bf92c08 | ||
|
|
e8f70c6e45 | ||
|
|
c01f2d5eea | ||
|
|
581726ecc5 | ||
|
|
b15eae11cf | ||
|
|
1aef8862ad | ||
|
|
5fcaf7eef9 | ||
|
|
fac93b0328 | ||
|
|
54b8b36618 | ||
|
|
683343ad82 | ||
|
|
add5deae0f | ||
|
|
ec66b229fe | ||
|
|
5008958e84 | ||
|
|
5516c215cd | ||
|
|
7c90417b2b | ||
|
|
1922c07c00 | ||
|
|
c61c1e10a0 | ||
|
|
df93a870af | ||
|
|
6ab51b6d54 | ||
|
|
f25b75c0d8 | ||
|
|
93521f434f | ||
|
|
20fb801ecd | ||
|
|
9902892615 | ||
|
|
f8e184a6c0 | ||
|
|
66e1b5c537 | ||
|
|
37512bc18f | ||
|
|
5ba4fb8d7c | ||
|
|
5f28afa5fe | ||
|
|
270c00f021 | ||
|
|
e69831636a | ||
|
|
df60392c31 | ||
|
|
58fa9d1fb8 | ||
|
|
b4981abe4f | ||
|
|
4c230843ed | ||
|
|
c76bc34c6f | ||
|
|
8bbb9ca304 | ||
|
|
d9dbe64d9b | ||
|
|
d389e2bc43 | ||
|
|
64f4e634e8 | ||
|
|
cf6341d00b | ||
|
|
aad711c115 | ||
|
|
f787d19696 | ||
|
|
a0a22f417a |
@@ -17,7 +17,7 @@
|
||||
"hooks",
|
||||
"i18n",
|
||||
"jotai",
|
||||
"octobase-node",
|
||||
"native",
|
||||
"templates",
|
||||
"y-indexeddb",
|
||||
"debug",
|
||||
|
||||
@@ -5,3 +5,4 @@ out
|
||||
storybook-static
|
||||
affine-out
|
||||
_next
|
||||
lib
|
||||
|
||||
51
.eslintrc.js
51
.eslintrc.js
@@ -1,3 +1,43 @@
|
||||
const createPattern = packageName => [
|
||||
{
|
||||
group: ['**/dist', '**/dist/**'],
|
||||
message: 'Do not import from dist',
|
||||
allowTypeImports: false,
|
||||
},
|
||||
{
|
||||
group: ['**/src', '**/src/**'],
|
||||
message: 'Do not import from src',
|
||||
allowTypeImports: false,
|
||||
},
|
||||
{
|
||||
group: [`@affine/${packageName}`],
|
||||
message: 'Do not import package itself',
|
||||
allowTypeImports: false,
|
||||
},
|
||||
{
|
||||
group: [`@toeverything/${packageName}`],
|
||||
message: 'Do not import package itself',
|
||||
allowTypeImports: false,
|
||||
},
|
||||
];
|
||||
|
||||
const allPackages = [
|
||||
'cli',
|
||||
'component',
|
||||
'debug',
|
||||
'env',
|
||||
'graphql',
|
||||
'hooks',
|
||||
'i18n',
|
||||
'jotai',
|
||||
'native',
|
||||
'plugin-infra',
|
||||
'templates',
|
||||
'theme',
|
||||
'workspace',
|
||||
'y-indexeddb',
|
||||
];
|
||||
|
||||
/**
|
||||
* @type {import('eslint').Linter.Config}
|
||||
*/
|
||||
@@ -96,6 +136,17 @@ const config = {
|
||||
'@typescript-eslint/no-var-requires': 0,
|
||||
},
|
||||
},
|
||||
...allPackages.map(pkg => ({
|
||||
files: [`packages/${pkg}/src/**/*.ts`, `packages/${pkg}/src/**/*.tsx`],
|
||||
rules: {
|
||||
'@typescript-eslint/no-restricted-imports': [
|
||||
'error',
|
||||
{
|
||||
patterns: createPattern(pkg),
|
||||
},
|
||||
],
|
||||
},
|
||||
})),
|
||||
],
|
||||
};
|
||||
|
||||
|
||||
1
.github/CLA.md
vendored
1
.github/CLA.md
vendored
@@ -58,3 +58,4 @@ Example:
|
||||
- Howard Do, @howarddo2208, 2023/04/20
|
||||
- 三咲智子 Kevin Deng, @sxzz, 2023/04/21
|
||||
- Moeyua, @moeyua, 2023/04/22
|
||||
- Shishu, @shishudesu, 2023/05/19
|
||||
|
||||
49
.github/actions/build-rust/action.yml
vendored
Normal file
49
.github/actions/build-rust/action.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
name: 'AFFiNE Rust build'
|
||||
description: 'Rust build setup, including cache configuration'
|
||||
inputs:
|
||||
target:
|
||||
description: 'Cargo target'
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: stable
|
||||
targets: ${{ inputs.target }}
|
||||
|
||||
- name: Cache cargo
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
.cargo-cache
|
||||
target/${{ inputs.target }}
|
||||
key: stable-${{ inputs.target }}-cargo-cache
|
||||
|
||||
- name: Build
|
||||
if: ${{ inputs.target != 'x86_64-unknown-linux-gnu' && inputs.target != 'aarch64-unknown-linux-gnu' }}
|
||||
shell: bash
|
||||
run: yarn workspace @affine/native build --target ${{ inputs.target }}
|
||||
env:
|
||||
CARGO_BUILD_INCREMENTAL: 'false'
|
||||
|
||||
- name: Build
|
||||
if: ${{ inputs.target == 'x86_64-unknown-linux-gnu' }}
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian
|
||||
options: --user 0:0 -e CARGO_BUILD_INCREMENTAL=false -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
|
||||
run: yarn workspace @affine/native build --target ${{ inputs.target }}
|
||||
|
||||
- name: Build
|
||||
if: ${{ inputs.target == 'aarch64-unknown-linux-gnu' }}
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian-aarch64
|
||||
options: --user 0:0 -e CARGO_BUILD_INCREMENTAL=false -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
|
||||
run: yarn workspace @affine/native build --target ${{ inputs.target }}
|
||||
6
.github/labeler.yml
vendored
6
.github/labeler.yml
vendored
@@ -8,11 +8,17 @@ test:
|
||||
- '**/tests/**/*'
|
||||
- '**/__tests__/**/*'
|
||||
|
||||
plugin:copilot:
|
||||
- 'plugins/copilot/**/*'
|
||||
|
||||
mod:dev:
|
||||
- 'scripts/**/*'
|
||||
- 'packages/cli/**/*'
|
||||
- 'packages/debug/**/*'
|
||||
|
||||
mod:plugin-infra:
|
||||
- 'packages/plugin-infra/**/*'
|
||||
|
||||
mod:workspace: 'packages/workspace/**/*'
|
||||
|
||||
mod:i18n: 'packages/i18n/**/*'
|
||||
|
||||
24
.github/workflows/add-to-project.yml
vendored
24
.github/workflows/add-to-project.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Add to GitHub projects
|
||||
|
||||
on:
|
||||
issues:
|
||||
types:
|
||||
- opened
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
|
||||
jobs:
|
||||
add-to-project:
|
||||
name: Add issues and pull requests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/add-to-project@v0.4.0
|
||||
with:
|
||||
# You can target a repository in a different organization
|
||||
# to the issue
|
||||
project-url: https://github.com/orgs/toeverything/projects/10
|
||||
github-token: ${{ secrets.ADD_TO_PROJECT_PAT }}
|
||||
# labeled: bug, needs-triage
|
||||
# label-operator: OR
|
||||
145
.github/workflows/build.yml
vendored
145
.github/workflows/build.yml
vendored
@@ -4,9 +4,26 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- v[0-9]+.[0-9]+.x-staging
|
||||
- v[0-9]+.[0-9]+.x
|
||||
paths-ignore:
|
||||
- README.md
|
||||
- .github/**
|
||||
- '!.github/workflows/build.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- v[0-9]+.[0-9]+.x-staging
|
||||
- v[0-9]+.[0-9]+.x
|
||||
paths-ignore:
|
||||
- README.md
|
||||
- .github/**
|
||||
- '!.github/workflows/build.yml'
|
||||
|
||||
env:
|
||||
DEBUG: napi:*
|
||||
APP_NAME: affine
|
||||
MACOSX_DEPLOYMENT_TARGET: '10.13'
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
@@ -18,7 +35,12 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- run: yarn lint --max-warnings=0
|
||||
- name: Run checks
|
||||
run: |
|
||||
yarn i18n-codegen gen
|
||||
yarn typecheck
|
||||
yarn lint --max-warnings=0
|
||||
yarn circular
|
||||
|
||||
build-storybook:
|
||||
name: Build Storybook
|
||||
@@ -37,23 +59,6 @@ jobs:
|
||||
path: ./packages/component/storybook-static
|
||||
if-no-files-found: error
|
||||
|
||||
build-electron:
|
||||
name: Build @affine/electron
|
||||
runs-on: ubuntu-latest
|
||||
environment: development
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- name: Build Electron
|
||||
working-directory: apps/electron
|
||||
run: yarn build-layers
|
||||
- name: Upload Ubuntu desktop artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: affine-ubuntu
|
||||
path: ./apps/electron/dist
|
||||
|
||||
build:
|
||||
name: Build @affine/web
|
||||
runs-on: ubuntu-latest
|
||||
@@ -82,7 +87,9 @@ jobs:
|
||||
NEXT_PUBLIC_FIREBASE_APP_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_APP_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID }}
|
||||
API_SERVER_PROFILE: local
|
||||
ENABLE_DEBUG_PAGE: true
|
||||
ENABLE_DEBUG_PAGE: 1
|
||||
ENABLE_PLUGIN: true
|
||||
ENABLE_ALL_PAGE_FILTER: true
|
||||
ENABLE_LEGACY_PROVIDER: true
|
||||
COVERAGE: true
|
||||
|
||||
@@ -104,7 +111,9 @@ jobs:
|
||||
NEXT_PUBLIC_FIREBASE_APP_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_APP_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID }}
|
||||
API_SERVER_PROFILE: affine
|
||||
ENABLE_DEBUG_PAGE: true
|
||||
ENABLE_DEBUG_PAGE: 1
|
||||
ENABLE_PLUGIN: true
|
||||
ENABLE_ALL_PAGE_FILTER: true
|
||||
ENABLE_LEGACY_PROVIDER: false
|
||||
COVERAGE: true
|
||||
|
||||
@@ -261,7 +270,7 @@ jobs:
|
||||
|
||||
- name: Upload test results
|
||||
if: ${{ failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: test-results-e2e-${{ matrix.shard }}
|
||||
path: ./test-results
|
||||
@@ -269,28 +278,63 @@ jobs:
|
||||
|
||||
dekstop-test:
|
||||
name: Desktop Test
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.spec.os }}
|
||||
environment: development
|
||||
strategy:
|
||||
fail-fast: false
|
||||
# all combinations: macos-latest x64, macos-latest arm64, windows-latest x64, ubuntu-latest x64
|
||||
matrix:
|
||||
spec:
|
||||
- { os: macos-latest, platform: macos, arch: x64 }
|
||||
- { os: macos-latest, platform: macos, arch: arm64 }
|
||||
- { os: ubuntu-latest, platform: linux, arch: x64 }
|
||||
- { os: windows-latest, platform: windows, arch: x64 }
|
||||
needs: [build, build-electron]
|
||||
- {
|
||||
os: macos-latest,
|
||||
platform: macos,
|
||||
arch: x64,
|
||||
target: x86_64-apple-darwin,
|
||||
test: true,
|
||||
}
|
||||
- {
|
||||
os: macos-latest,
|
||||
platform: macos,
|
||||
arch: arm64,
|
||||
target: aarch64-apple-darwin,
|
||||
test: false,
|
||||
}
|
||||
- {
|
||||
os: ubuntu-latest,
|
||||
platform: linux,
|
||||
arch: x64,
|
||||
target: x86_64-unknown-linux-gnu,
|
||||
test: true,
|
||||
}
|
||||
- {
|
||||
os: windows-latest,
|
||||
platform: windows,
|
||||
arch: x64,
|
||||
target: x86_64-pc-windows-msvc,
|
||||
test: true,
|
||||
}
|
||||
needs: [build]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
with:
|
||||
playwright-install: true
|
||||
- name: Download Ubuntu desktop artifact
|
||||
uses: actions/download-artifact@v3
|
||||
- name: Build AFFiNE native
|
||||
uses: ./.github/actions/build-rust
|
||||
with:
|
||||
name: affine-ubuntu
|
||||
path: ./apps/electron/dist
|
||||
target: ${{ matrix.spec.target }}
|
||||
- name: Run unit tests
|
||||
if: ${{ matrix.spec.test }}
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf apps/electron/node_modules/better-sqlite3/build
|
||||
yarn --cwd apps/electron/node_modules/better-sqlite3 run install
|
||||
yarn test:unit
|
||||
env:
|
||||
NATIVE_TEST: 'true'
|
||||
- name: Build layers
|
||||
run: yarn workspace @affine/electron build-layers
|
||||
|
||||
- name: Download static resource artifact
|
||||
uses: actions/download-artifact@v3
|
||||
@@ -299,18 +343,47 @@ jobs:
|
||||
path: ./apps/electron/resources/web-static
|
||||
|
||||
- name: Rebuild Electron dependences
|
||||
run: yarn rebuild:for-electron
|
||||
working-directory: apps/electron
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf apps/electron/node_modules/better-sqlite3/build
|
||||
yarn workspace @affine/electron rebuild:for-electron --arch=${{ matrix.spec.arch }}
|
||||
|
||||
- name: Run desktop tests
|
||||
run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn test
|
||||
if: ${{ matrix.spec.test && matrix.spec.os == 'ubuntu-latest' }}
|
||||
run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn workspace @affine/electron test
|
||||
env:
|
||||
COVERAGE: true
|
||||
|
||||
- name: Run desktop tests
|
||||
if: ${{ matrix.spec.test && matrix.spec.os != 'ubuntu-latest' }}
|
||||
run: yarn workspace @affine/electron test
|
||||
env:
|
||||
COVERAGE: true
|
||||
|
||||
- name: Collect code coverage report
|
||||
if: ${{ matrix.spec.test }}
|
||||
run: yarn exec nyc report -t .nyc_output --report-dir .coverage --reporter=lcov
|
||||
|
||||
- name: Upload e2e test coverage results
|
||||
if: ${{ matrix.spec.test }}
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
files: ./.coverage/lcov.info
|
||||
flags: e2etest-${{ matrix.spec.os }}-${{ matrix.spec.arch }}
|
||||
name: affine
|
||||
fail_ci_if_error: true
|
||||
|
||||
- name: Run desktop tests
|
||||
if: ${{ matrix.spec.test && matrix.spec.os != 'ubuntu-latest' }}
|
||||
run: yarn test
|
||||
working-directory: apps/electron
|
||||
|
||||
- name: Upload test results
|
||||
if: ${{ failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: test-results-e2e-${{ matrix.shard }}
|
||||
name: test-results-e2e-${{ matrix.spec.os }}-${{ matrix.spec.arch }}
|
||||
path: ./test-results
|
||||
if-no-files-found: ignore
|
||||
|
||||
|
||||
8
.github/workflows/languages-sync.yml
vendored
8
.github/workflows/languages-sync.yml
vendored
@@ -13,14 +13,6 @@ on:
|
||||
- '.github/workflows/languages-sync.yml'
|
||||
workflow_dispatch:
|
||||
|
||||
# Cancels all previous workflow runs for pull requests that have not completed.
|
||||
# See https://docs.github.com/en/actions/using-jobs/using-concurrency
|
||||
concurrency:
|
||||
# The concurrency group contains the workflow name and the branch name for
|
||||
# pull requests or the commit hash for any other events.
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
233
.github/workflows/nightly-build.yml
vendored
Normal file
233
.github/workflows/nightly-build.yml
vendored
Normal file
@@ -0,0 +1,233 @@
|
||||
name: Build Canary Desktop App on Staging Branch
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- v[0-9]+.[0-9]+.x-staging
|
||||
paths-ignore:
|
||||
- README.md
|
||||
- .github/**
|
||||
- '!.github/workflows/nightly-build.yml'
|
||||
|
||||
permissions:
|
||||
actions: write
|
||||
contents: write
|
||||
security-events: write
|
||||
|
||||
concurrency:
|
||||
# The concurrency group contains the workflow name and the branch name for
|
||||
# pull requests or the commit hash for any other events.
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
BUILD_TYPE: internal
|
||||
|
||||
jobs:
|
||||
set-build-version:
|
||||
runs-on: ubuntu-latest
|
||||
environment: production
|
||||
outputs:
|
||||
version: 0.0.0-${{ steps.version.outputs.version }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: toeverything/set-build-version@latest
|
||||
- id: version
|
||||
run: echo ::set-output name=version::${{ env.BUILD_VERSION }}
|
||||
|
||||
before-make:
|
||||
runs-on: ubuntu-latest
|
||||
environment: production
|
||||
needs:
|
||||
- set-build-version
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- name: Replace Version
|
||||
run: ./scripts/set-version.sh ${{ needs.set-build-version.outputs.version }}
|
||||
- name: generate-assets
|
||||
working-directory: apps/electron
|
||||
run: yarn generate-assets
|
||||
env:
|
||||
NEXT_PUBLIC_FIREBASE_API_KEY: ${{ secrets.NEXT_PUBLIC_FIREBASE_API_KEY }}
|
||||
NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN: ${{ secrets.NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN }}
|
||||
NEXT_PUBLIC_FIREBASE_PROJECT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_PROJECT_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET: ${{ secrets.NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET }}
|
||||
NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_APP_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_APP_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID }}
|
||||
AFFINE_GOOGLE_CLIENT_ID: ${{ secrets.AFFINE_GOOGLE_CLIENT_ID }}
|
||||
AFFINE_GOOGLE_CLIENT_SECRET: ${{ secrets.AFFINE_GOOGLE_CLIENT_SECRET }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
NEXT_PUBLIC_SENTRY_DSN: ${{ secrets.NEXT_PUBLIC_SENTRY_DSN }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
API_SERVER_PROFILE: prod
|
||||
ENABLE_TEST_PROPERTIES: false
|
||||
ENABLE_IMAGE_PREVIEW_MODAL: false
|
||||
RELEASE_VERSION: ${{ needs.set-build-version.outputs.version }}
|
||||
ENABLE_BOOKMARK_OPERATION: true
|
||||
|
||||
- name: Upload Artifact (web-static)
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: before-make-web-static
|
||||
path: apps/electron/resources/web-static
|
||||
|
||||
make-distribution:
|
||||
environment: production
|
||||
strategy:
|
||||
# all combinations: macos-latest x64, macos-latest arm64, windows-latest x64, ubuntu-latest x64
|
||||
matrix:
|
||||
spec:
|
||||
- {
|
||||
os: macos-latest,
|
||||
platform: darwin,
|
||||
arch: x64,
|
||||
target: x86_64-apple-darwin,
|
||||
}
|
||||
- {
|
||||
os: macos-latest,
|
||||
platform: darwin,
|
||||
arch: arm64,
|
||||
target: aarch64-apple-darwin,
|
||||
}
|
||||
- {
|
||||
os: ubuntu-latest,
|
||||
platform: linux,
|
||||
arch: x64,
|
||||
target: x86_64-unknown-linux-gnu,
|
||||
}
|
||||
- {
|
||||
os: windows-latest,
|
||||
platform: win32,
|
||||
arch: x64,
|
||||
target: x86_64-pc-windows-msvc,
|
||||
}
|
||||
runs-on: ${{ matrix.spec.os }}
|
||||
needs:
|
||||
- before-make
|
||||
- set-build-version
|
||||
env:
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
SKIP_GENERATE_ASSETS: 1
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- name: Build AFFiNE native
|
||||
uses: ./.github/actions/build-rust
|
||||
with:
|
||||
target: ${{ matrix.spec.target }}
|
||||
- name: Replace Version
|
||||
run: ./scripts/set-version.sh ${{ needs.set-build-version.outputs.version }}
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: before-make-web-static
|
||||
path: apps/electron/resources/web-static
|
||||
- name: Rebuild Electron dependences
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf apps/electron/node_modules/better-sqlite3/build
|
||||
yarn workspace @affine/electron rebuild:for-electron --arch=${{ matrix.spec.arch }}
|
||||
|
||||
- name: Build layers
|
||||
run: yarn workspace @affine/electron build-layers
|
||||
|
||||
- name: Signing By Apple Developer ID
|
||||
if: ${{ matrix.spec.platform == 'darwin' }}
|
||||
uses: apple-actions/import-codesign-certs@v2
|
||||
with:
|
||||
p12-file-base64: ${{ secrets.CERTIFICATES_P12 }}
|
||||
p12-password: ${{ secrets.CERTIFICATES_P12_PASSWORD }}
|
||||
|
||||
- name: make
|
||||
run: yarn workspace @affine/electron make --platform=${{ matrix.spec.platform }} --arch=${{ matrix.spec.arch }}
|
||||
|
||||
- name: Save artifacts (mac)
|
||||
if: ${{ matrix.spec.platform == 'darwin' }}
|
||||
run: |
|
||||
mkdir -p builds
|
||||
mv apps/electron/out/*/make/*.dmg ./builds/affine-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.dmg
|
||||
mv apps/electron/out/*/make/zip/darwin/${{ matrix.spec.arch }}/*.zip ./builds/affine-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.zip
|
||||
- name: Save artifacts (windows)
|
||||
if: ${{ matrix.spec.platform == 'win32' }}
|
||||
run: |
|
||||
mkdir -p builds
|
||||
mv apps/electron/out/*/make/zip/win32/x64/AFFiNE*-win32-x64-*.zip ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.zip
|
||||
mv apps/electron/out/*/make/squirrel.windows/x64/*.exe ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.exe
|
||||
mv apps/electron/out/*/make/squirrel.windows/x64/*.msi ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.msi
|
||||
mv apps/electron/out/*/make/squirrel.windows/x64/*.nupkg ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.nupkg
|
||||
|
||||
- name: Save artifacts (linux)
|
||||
if: ${{ matrix.spec.platform == 'linux' }}
|
||||
run: |
|
||||
mkdir -p builds
|
||||
mv apps/electron/out/*/make/zip/linux/x64/*.zip ./builds/affine-${{ env.BUILD_TYPE }}-linux-x64.zip
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: affine-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}-builds
|
||||
path: builds
|
||||
|
||||
release:
|
||||
needs:
|
||||
- make-distribution
|
||||
- set-build-version
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Download Artifacts (macos-x64)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: affine-darwin-x64-builds
|
||||
path: ./
|
||||
- name: Download Artifacts (macos-arm64)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: affine-darwin-arm64-builds
|
||||
path: ./
|
||||
- name: Download Artifacts (windows-x64)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: affine-win32-x64-builds
|
||||
path: ./
|
||||
- name: Download Artifacts (linux-x64)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: affine-linux-x64-builds
|
||||
path: ./
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18
|
||||
- name: Generate Release yml
|
||||
run: |
|
||||
cp ./apps/electron/scripts/generate-yml.js .
|
||||
node generate-yml.js
|
||||
env:
|
||||
RELEASE_VERSION: ${{ needs.set-build-version.outputs.version }}
|
||||
- name: Create Release Draft
|
||||
uses: softprops/action-gh-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
||||
with:
|
||||
repository: 'toeverything/AFFiNE-Releases'
|
||||
name: ${{ needs.set-build-version.outputs.version }}
|
||||
tag_name: ${{ needs.set-build-version.outputs.version }}
|
||||
prerelease: true
|
||||
files: |
|
||||
./VERSION
|
||||
./*.zip
|
||||
./*.dmg
|
||||
./*.exe
|
||||
./*.nupkg
|
||||
./RELEASES
|
||||
./*.AppImage
|
||||
./*.apk
|
||||
./*.yml
|
||||
92
.github/workflows/release-desktop-app.yml
vendored
92
.github/workflows/release-desktop-app.yml
vendored
@@ -36,6 +36,9 @@ concurrency:
|
||||
|
||||
env:
|
||||
BUILD_TYPE: ${{ github.event.inputs.build-type }}
|
||||
DEBUG: napi:*
|
||||
APP_NAME: affine
|
||||
MACOSX_DEPLOYMENT_TARGET: '10.13'
|
||||
|
||||
jobs:
|
||||
before-make:
|
||||
@@ -46,8 +49,7 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- name: generate-assets
|
||||
working-directory: apps/electron
|
||||
run: yarn generate-assets
|
||||
run: yarn workspace @affine/electron generate-assets
|
||||
env:
|
||||
NEXT_PUBLIC_FIREBASE_API_KEY: ${{ secrets.NEXT_PUBLIC_FIREBASE_API_KEY }}
|
||||
NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN: ${{ secrets.NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN }}
|
||||
@@ -64,6 +66,9 @@ jobs:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
API_SERVER_PROFILE: prod
|
||||
ENABLE_TEST_PROPERTIES: false
|
||||
ENABLE_IMAGE_PREVIEW_MODAL: false
|
||||
RELEASE_VERSION: ${{ github.event.inputs.version }}
|
||||
ENABLE_BOOKMARK_OPERATION: true
|
||||
|
||||
- name: Upload Artifact (web-static)
|
||||
uses: actions/upload-artifact@v3
|
||||
@@ -71,28 +76,36 @@ jobs:
|
||||
name: before-make-web-static
|
||||
path: apps/electron/resources/web-static
|
||||
|
||||
- name: Upload Artifact (electron dist)
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: before-make-electron-dist
|
||||
path: apps/electron/dist
|
||||
|
||||
- name: Upload YML Build Script
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: release-yml-build-script
|
||||
path: apps/electron/scripts/generate-yml.js
|
||||
|
||||
make-distribution:
|
||||
environment: ${{ github.ref_name == 'master' && 'production' || 'development' }}
|
||||
strategy:
|
||||
# all combinations: macos-latest x64, macos-latest arm64, windows-latest x64, ubuntu-latest x64
|
||||
matrix:
|
||||
spec:
|
||||
- { os: macos-latest, platform: macos, arch: x64 }
|
||||
- { os: macos-latest, platform: macos, arch: arm64 }
|
||||
- { os: ubuntu-latest, platform: linux, arch: x64 }
|
||||
- { os: windows-latest, platform: windows, arch: x64 }
|
||||
- {
|
||||
os: macos-latest,
|
||||
platform: darwin,
|
||||
arch: x64,
|
||||
target: x86_64-apple-darwin,
|
||||
}
|
||||
- {
|
||||
os: macos-latest,
|
||||
platform: darwin,
|
||||
arch: arm64,
|
||||
target: aarch64-apple-darwin,
|
||||
}
|
||||
- {
|
||||
os: ubuntu-latest,
|
||||
platform: linux,
|
||||
arch: x64,
|
||||
target: x86_64-unknown-linux-gnu,
|
||||
}
|
||||
- {
|
||||
os: windows-latest,
|
||||
platform: win32,
|
||||
arch: x64,
|
||||
target: x86_64-pc-windows-msvc,
|
||||
}
|
||||
runs-on: ${{ matrix.spec.os }}
|
||||
needs: before-make
|
||||
env:
|
||||
@@ -104,34 +117,42 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- name: Build AFFiNE native
|
||||
uses: ./.github/actions/build-rust
|
||||
with:
|
||||
target: ${{ matrix.spec.target }}
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: before-make-web-static
|
||||
path: apps/electron/resources/web-static
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: before-make-electron-dist
|
||||
path: apps/electron/dist
|
||||
|
||||
- name: Rebuild Electron dependences
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf apps/electron/node_modules/better-sqlite3/build
|
||||
yarn workspace @affine/electron rebuild:for-electron --arch=${{ matrix.spec.arch }}
|
||||
|
||||
- name: Build layers
|
||||
run: yarn workspace @affine/electron build-layers
|
||||
|
||||
- name: Signing By Apple Developer ID
|
||||
if: ${{ matrix.spec.platform == 'macos' }}
|
||||
if: ${{ matrix.spec.platform == 'darwin' }}
|
||||
uses: apple-actions/import-codesign-certs@v2
|
||||
with:
|
||||
p12-file-base64: ${{ secrets.CERTIFICATES_P12 }}
|
||||
p12-password: ${{ secrets.CERTIFICATES_P12_PASSWORD }}
|
||||
|
||||
- name: make
|
||||
run: yarn make-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
|
||||
working-directory: apps/electron
|
||||
run: yarn workspace @affine/electron make --platform=${{ matrix.spec.platform }} --arch=${{ matrix.spec.arch }}
|
||||
|
||||
- name: Save artifacts (mac)
|
||||
if: ${{ matrix.spec.platform == 'macos' }}
|
||||
if: ${{ matrix.spec.platform == 'darwin' }}
|
||||
run: |
|
||||
mkdir -p builds
|
||||
mv apps/electron/out/*/make/*.dmg ./builds/affine-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.dmg
|
||||
mv apps/electron/out/*/make/zip/darwin/${{ matrix.spec.arch }}/*.zip ./builds/affine-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.zip
|
||||
- name: Save artifacts (windows)
|
||||
if: ${{ matrix.spec.platform == 'windows' }}
|
||||
if: ${{ matrix.spec.platform == 'win32' }}
|
||||
run: |
|
||||
mkdir -p builds
|
||||
mv apps/electron/out/*/make/zip/win32/x64/AFFiNE*-win32-x64-*.zip ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.zip
|
||||
@@ -156,37 +177,36 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Download Artifacts (macos-x64)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: affine-macos-x64-builds
|
||||
name: affine-darwin-x64-builds
|
||||
path: ./
|
||||
- name: Download Artifacts (macos-arm64)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: affine-macos-arm64-builds
|
||||
name: affine-darwin-arm64-builds
|
||||
path: ./
|
||||
- name: Download Artifacts (windows-x64)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: affine-windows-x64-builds
|
||||
name: affine-win32-x64-builds
|
||||
path: ./
|
||||
- name: Download Artifacts (linux-x64)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: affine-linux-x64-builds
|
||||
path: ./
|
||||
- name: Download Artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: release-yml-build-script
|
||||
path: ./
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18
|
||||
- name: Generate Release yml
|
||||
run: |
|
||||
RELEASE_VERSION=${{ github.event.inputs.version }} node generate-yml.js
|
||||
cp ./apps/electron/scripts/generate-yml.js .
|
||||
node generate-yml.js
|
||||
env:
|
||||
RELEASE_VERSION: ${{ github.event.inputs.version }}
|
||||
- name: Create Release Draft
|
||||
uses: softprops/action-gh-release@v1
|
||||
env:
|
||||
|
||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -28,9 +28,9 @@ node_modules
|
||||
|
||||
# IDE - VSCode
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/settings.template.json
|
||||
!.vscode/launch.template.json
|
||||
!.vscode/extensions.json
|
||||
|
||||
# misc
|
||||
@@ -66,3 +66,9 @@ i18n-generated.ts
|
||||
# Cache
|
||||
.eslintcache
|
||||
next-env.d.ts
|
||||
|
||||
# Rust
|
||||
target
|
||||
*.node
|
||||
tsconfig.node.tsbuildinfo
|
||||
lib
|
||||
|
||||
@@ -1 +1,4 @@
|
||||
pnpm-lock.yaml
|
||||
target
|
||||
lib
|
||||
test-results
|
||||
|
||||
9
.taplo.toml
Normal file
9
.taplo.toml
Normal file
@@ -0,0 +1,9 @@
|
||||
exclude = ["node_modules/**/*.toml"]
|
||||
|
||||
[[rule]]
|
||||
keys = ["dependencies", "*-dependencies"]
|
||||
|
||||
[rule.formatting]
|
||||
align_entries = true
|
||||
indent_tables = true
|
||||
reorder_keys = true
|
||||
@@ -6,6 +6,12 @@
|
||||
"name": "Run Dev",
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
},
|
||||
{
|
||||
"command": "yarn run dev:local",
|
||||
"name": "Run Dev Locally",
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -26,7 +26,6 @@
|
||||
"[toml]": {
|
||||
"editor.defaultFormatter": "tamasfe.even-better-toml"
|
||||
},
|
||||
"rust-analyzer.linkedProjects": ["packages/octobase-node/Cargo.toml"],
|
||||
"[typescriptreact]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
@@ -38,5 +37,6 @@
|
||||
"apps/electron/layers/**/*.spec.ts",
|
||||
"tests/unit/**/*.spec.ts",
|
||||
"tests/unit/**/*.spec.tsx"
|
||||
]
|
||||
],
|
||||
"deepscan.enable": true
|
||||
}
|
||||
785
Cargo.lock
generated
Normal file
785
Cargo.lock
generated
Normal file
@@ -0,0 +1,785 @@
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "affine_native"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"napi",
|
||||
"napi-build",
|
||||
"napi-derive",
|
||||
"notify",
|
||||
"once_cell",
|
||||
"parking_lot",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "67fc08ce920c31afb70f013dcce1bfc3a3195de6a228474e45e1f145b36f8d04"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.71"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
|
||||
|
||||
[[package]]
|
||||
name = "autocfg"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "24a6904aef64d73cf10ab17ebace7befb918b82164785cb89907993be7f83813"
|
||||
|
||||
[[package]]
|
||||
name = "bytes"
|
||||
version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be"
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "convert_case"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca"
|
||||
dependencies = [
|
||||
"unicode-segmentation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-channel"
|
||||
version = "0.5.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-utils"
|
||||
version = "0.8.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ctor"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dd4056f63fce3b82d852c3da92b08ea59959890813a7f4ce9c0ff85b10cf301b"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn 2.0.15",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "filetime"
|
||||
version = "0.2.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5cbc844cecaee9d4443931972e1289c8ff485cb4cc2767cb03ca139ed6885153"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fsevent-sys"
|
||||
version = "4.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.2.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c85e1d9ab2eadba7e5040d4e09cbd6d072b76a557ad64e797c2cb9d4da21d7e4"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"wasi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.2.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inotify"
|
||||
version = "0.9.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"inotify-sys",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inotify-sys"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itoa"
|
||||
version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6"
|
||||
|
||||
[[package]]
|
||||
name = "kqueue"
|
||||
version = "1.0.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2c8fc60ba15bf51257aa9807a48a61013db043fcf3a78cb0d916e8e396dcad98"
|
||||
dependencies = [
|
||||
"kqueue-sys",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "kqueue-sys"
|
||||
version = "1.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8367585489f01bc55dd27404dcf56b95e6da061a256a666ab23be9ba96a2e587"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.144"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2b00cc1c228a6782d0f076e7b232802e0c5689d41bb5df366f2a6b6621cfdfe1"
|
||||
|
||||
[[package]]
|
||||
name = "libloading"
|
||||
version = "0.7.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lock_api"
|
||||
version = "0.4.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"scopeguard",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
|
||||
|
||||
[[package]]
|
||||
name = "mio"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"log",
|
||||
"wasi",
|
||||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "napi"
|
||||
version = "2.12.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49ac8112fe5998579b22e29903c7b277fc7f91c7860c0236f35792caf8156e18"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.2.1",
|
||||
"ctor",
|
||||
"napi-derive",
|
||||
"napi-sys",
|
||||
"once_cell",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "napi-build"
|
||||
version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "882a73d9ef23e8dc2ebbffb6a6ae2ef467c0f18ac10711e4cc59c5485d41df0e"
|
||||
|
||||
[[package]]
|
||||
name = "napi-derive"
|
||||
version = "2.12.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c47e0f395207c062e680a158f0624ec456c1dfb3c96a8cb888e0401506d50ae9"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"convert_case",
|
||||
"napi-derive-backend",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "napi-derive-backend"
|
||||
version = "1.0.51"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0a83afae5b4ba6f98ed6e33a52da343fdeb66474f1162a38cde5a3d46eb054e7"
|
||||
dependencies = [
|
||||
"convert_case",
|
||||
"once_cell",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"regex",
|
||||
"semver",
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "napi-sys"
|
||||
version = "2.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "166b5ef52a3ab5575047a9fe8d4a030cdd0f63c96f071cd6907674453b07bae3"
|
||||
dependencies = [
|
||||
"libloading",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "notify"
|
||||
version = "6.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4d9ba6c734de18ca27c8cef5cd7058aa4ac9f63596131e4c7e41e579319032a2"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"crossbeam-channel",
|
||||
"filetime",
|
||||
"fsevent-sys",
|
||||
"inotify",
|
||||
"kqueue",
|
||||
"libc",
|
||||
"mio",
|
||||
"serde",
|
||||
"walkdir",
|
||||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num_cpus"
|
||||
version = "1.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"
|
||||
dependencies = [
|
||||
"hermit-abi",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.17.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
|
||||
dependencies = [
|
||||
"lock_api",
|
||||
"parking_lot_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot_core"
|
||||
version = "0.9.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
"smallvec",
|
||||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pin-project-lite"
|
||||
version = "0.2.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
|
||||
|
||||
[[package]]
|
||||
name = "ppv-lite86"
|
||||
version = "0.2.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.56"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f4f29d145265ec1c483c7c654450edde0bfe043d3938d6972630663356d9500"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"rand_chacha",
|
||||
"rand_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_chacha"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
|
||||
dependencies = [
|
||||
"ppv-lite86",
|
||||
"rand_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_core"
|
||||
version = "0.6.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.2.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "af83e617f331cc6ae2da5443c602dfa5af81e517212d9d611a5b3ba1777b5370"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a5996294f19bd3aae0453a862ad728f60e6600695733dd5df01da90c54363a3c"
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041"
|
||||
|
||||
[[package]]
|
||||
name = "same-file"
|
||||
version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
|
||||
dependencies = [
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scopeguard"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
||||
|
||||
[[package]]
|
||||
name = "semver"
|
||||
version = "1.0.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.162"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "71b2f6e1ab5c2b98c05f0f35b236b22e8df7ead6ffbf51d7808da7f8817e7ab6"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.162"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a2a0814352fd64b58489904a44ea8d90cb1a91dcb6b4f5ebabc32c8318e93cb6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.15",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.96"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"ryu",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "signal-hook-registry"
|
||||
version = "1.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
|
||||
|
||||
[[package]]
|
||||
name = "socket2"
|
||||
version = "0.4.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.109"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a34fcf3e8b60f57e6a14301a2e916d323af98b0ea63c599441eec8558660c822"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio"
|
||||
version = "1.28.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c3c786bf8134e5a3a166db9b29ab8f48134739014a3eca7bc6bfa95d673b136f"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"bytes",
|
||||
"libc",
|
||||
"mio",
|
||||
"num_cpus",
|
||||
"parking_lot",
|
||||
"pin-project-lite",
|
||||
"signal-hook-registry",
|
||||
"socket2",
|
||||
"tokio-macros",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-macros"
|
||||
version = "2.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.15",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-segmentation"
|
||||
version = "1.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36"
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4dad5567ad0cf5b760e5665964bec1b47dfd077ba8a2544b513f3556d3d239a2"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
"rand",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "walkdir"
|
||||
version = "2.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698"
|
||||
dependencies = [
|
||||
"same-file",
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.11.0+wasi-snapshot-preview1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
|
||||
dependencies = [
|
||||
"winapi-i686-pc-windows-gnu",
|
||||
"winapi-x86_64-pc-windows-gnu",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-i686-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||
|
||||
[[package]]
|
||||
name = "winapi-util"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
|
||||
dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-x86_64-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.45.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0"
|
||||
dependencies = [
|
||||
"windows-targets 0.42.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
|
||||
dependencies = [
|
||||
"windows-targets 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.42.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm 0.42.2",
|
||||
"windows_aarch64_msvc 0.42.2",
|
||||
"windows_i686_gnu 0.42.2",
|
||||
"windows_i686_msvc 0.42.2",
|
||||
"windows_x86_64_gnu 0.42.2",
|
||||
"windows_x86_64_gnullvm 0.42.2",
|
||||
"windows_x86_64_msvc 0.42.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm 0.48.0",
|
||||
"windows_aarch64_msvc 0.48.0",
|
||||
"windows_i686_gnu 0.48.0",
|
||||
"windows_i686_msvc 0.48.0",
|
||||
"windows_x86_64_gnu 0.48.0",
|
||||
"windows_x86_64_gnullvm 0.48.0",
|
||||
"windows_x86_64_msvc 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.42.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.42.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.42.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.42.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.42.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.42.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.42.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
|
||||
8
Cargo.toml
Normal file
8
Cargo.toml
Normal file
@@ -0,0 +1,8 @@
|
||||
[workspace]
|
||||
members = ["./packages/native"]
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
codegen-units = 1
|
||||
opt-level = 3
|
||||
strip = "symbols"
|
||||
20
README.md
20
README.md
@@ -24,12 +24,13 @@ See https://github.com/all-?/all-contributors/issues/361#issuecomment-637166066
|
||||
|
||||
<!-- ALL-CONTRIBUTORS-BADGE:END -->
|
||||
|
||||
[?style=flat-square&logoColor=white&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAACXBIWXMAAADAAAAAwAEwd99eAAABjElEQVRYhe1W0U3DMBB9RfyTDeoNyAYNG2QDOgJsECYgGxA26AZ4hIxgJqCZ4PjIGV+tUxK7raqiPsmKdXe5e3fOs7IiIlwSdxetfiNw7QRKAD0Ax/ssrI5QgQOw5v03AJOTJHcCL1x84LVmWzJyJlBg7P4BwCvb3pmIAbBPykZEqaulEU7YHNva1HypxUsKqIS9EvbynASs0n3ss+ciUIsuO8VvhL9emjdFBa3YO8XvALwpsZNYSqBB0PwUWgRZNksSL5GhlN0ngGd+dkpsD6AG8IGlslxwTh2fa09EBc3Dir32rRysuQlUAL54/wTAcpePPAXHPsOTGXhSEv69rAlYpZOt6DSO29J4D/TRRLJk6AvtaZSY9PkCFYVLqI9i/NF5YkkECgrXa6P4fVEn4iolrhNxRQqBZu7FqMNdZiMqAUPj2KdGZyicu1dHzlGqBHxn2sdTR53bmeJ+ebJd7LtXhGH4uQEwd0ttAPzMxGi5/6BdxTuMej41Bs59gGP+CU+Cq/4tvxH4HwR+Ab3Uqr/VGbqEAAAAAElFTkSuQmCC>)](https://app.affine.pro)
|
||||
[?style=flat-square&logoColor=white&logo=affine>)](https://app.affine.pro)
|
||||
[](https://affine.pro/download)
|
||||
[](https://affine.pro/download)
|
||||
[](https://affine.pro/download)
|
||||
[](https://affine.pro/download)
|
||||
|
||||
[](https://github.com/toeverything/AFFiNE/releases/latest)
|
||||
[![stars-icon]](https://github.com/toeverything/AFFiNE)
|
||||
[![All Contributors][all-contributors-badge]](#contributors)
|
||||
[![codecov]](https://codecov.io/gh/toeverything/AFFiNE)
|
||||
@@ -44,7 +45,7 @@ See https://github.com/all-?/all-contributors/issues/361#issuecomment-637166066
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
<a href="http://affine.pro"><img src="https://img.shields.io/badge/-AFFiNE-06449d?style=social&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEsAAABLCAMAAAAPkIrYAAAAP1BMVEU8b9w8b9w+b947cNw7b9w6b908b909b9w8b9w7b9w8b9w7cN08b9w7b908b9w7b9w8b907cNw8b9w8b91HcEx3NJCJAAAAFXRSTlP/3QWSgA+lHPlu6Di4XtIrxk/xRADGudUoAAAB9UlEQVR42tWYwbKjIBREG0GJKkRj/v9bZ1ZvRC99rzib11tTB9qqnKoW3/+X38vy7ifzQ1b/wk/8Q1bCv3y6Z6wFh2x2llIRGB6xRhzz6p+wVhRJD1gRZZYHrADYSyqsjFPGZtYbuFESesUysZXlcMnYyJpxTW5keQh5N7G6CUJCE2uHFNfEGiBmbmB1H4jxDawNcqbuPmtAJTtj6RZ0lpIwiR5jNmgfNtHHwLXPWfFYcS2NMdxkjac/dNaNCJPo3yf9pFuseHbDrBsRFguGs8te8Q4rXzTjVSPCIHp3FePKWbzi30xE+4zlBMmoJaGLfpLUmAmLiN4Xyibahy76WZRQMLJ2WX27on2oFvQVac8yi4p+J2forA0V8W1c++AVS1f1H6p9KKLHxk9RWKmsyB+VLC76gV65DLjokdg5KmsEMXsiDwXWSmTc9ezSoKJHoi9zUVihbMHfQOSsXB7Mrz1S1huKPde69sEsiKgNt8hYTjiWlAyENeu7IFe1D15RSEBN+yCiXw17K1RZm/w7UtJVWYN8f1ZyLlkVb2bT4vIVVrINH1dqX2YttkHmIWsfVWs646wcRFYis6fIVGpfYq1kjpGSW8kSRD+xYSmXRM0Ang9eSZioVdy/5pWaLqzIRyIpuVxYozvGf1m67I7pf/s3UXv+AP61NI2Y+BbSAAAAAElFTkSuQmCC" height=25></a>
|
||||
<a href="http://affine.pro"><img src="https://img.shields.io/badge/-AFFiNE-06449d?style=social&logo=affine" height=25></a>
|
||||
|
||||
<a href="https://community.affine.pro"><img src="https://img.shields.io/badge/-Community-424549?style=social&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAAXNJREFUWEftlitLRUEURtdVEVExWUx2qxgNVouoXYtNDP4Tw20WtftAsItZrHaTYBJREZ98MAc248wcZxi4CGfSeezHmm/23kyPAa/egPPTAXQK/FsFBP7ldVDRZoqcgO9I+2bHy3ZIJBfTCPCZM1tqAxwBmzUBrNQNbEx+5b0B5oEN4NCBrAMnMaiUAuPAs3HU82TLEZwBqwGbaJ4UgKQ8CFR6SoEl4LIWwCJwZQCegKkWBWLHVKSActvdzgG3DqitDf3/VQBskBDALrDnAKXUo3ueAF5KinAf2DKOmnzD7l214bdbA6hC1XHZNQa8hSBC0hwDa57xDHDvvvWB7ciOZoE79+8CWPbsBGc769eFxJdWIKcuyIdRoG3W7AAC1dJkHDIOo8B78+4rEBo8r4AkLFk6Jk3HaeDBBTgHVmIAfpJUz+cAFXVBreQCvQYW/lqEjV1NAMUMqpAaxQMHyDnjYtuS+0BxstwaqJooFqxToFPgB5FuPCEB6XK2AAAAAElFTkSuQmCC" height=25></a>
|
||||
|
||||
@@ -69,11 +70,11 @@ See https://github.com/all-?/all-contributors/issues/361#issuecomment-637166066
|
||||
|
||||
Before we tell you how to get started with AFFiNE, we'd like to shamelessly plug our awesome user and developer communities across [official social platforms](https://community.affine.pro/c/start-here/)! Once you’re familiar with using the software, maybe you will share your wisdom with others and even consider joining the [AFFiNE Ambassador program](https://community.affine.pro/c/start-here/affine-ambassador) to help spread AFFiNE to the world.
|
||||
|
||||
## Getting started & Stay tunned with us.
|
||||
## Getting started & staying tuned with us.
|
||||
|
||||
⚠️ Please note that AFFiNE is still under active development and is not yet ready for production use. ⚠️
|
||||
|
||||
[](https://app.affine.pro) No installation or registration required! Head over to our website and try it out now.
|
||||
[](https://app.affine.pro) No installation or registration required! Head over to our website and try it out now.
|
||||
|
||||
[](https://community.affine.pro) Our wonderful community, where you can meet and engage with the team, developers and other like-minded enthusiastic user of AFFiNE.
|
||||
|
||||
@@ -119,6 +120,15 @@ If you have questions, you are welcome to contact us. One of the best places to
|
||||
| [@toeverything/y-indexeddb](packages/y-indexeddb) | IndexedDB database adapter for Yjs | [](https://www.npmjs.com/package/@toeverything/y-indexeddb) |
|
||||
| [@toeverything/theme](packages/theme) | AFFiNE theme | [](https://www.npmjs.com/package/@toeverything/theme) |
|
||||
|
||||
## Plugins
|
||||
|
||||
> Plugins are a way to extend the functionality of AFFiNE.
|
||||
|
||||
| Name | |
|
||||
| ------------------------------------------------ | ----------------------------------------- |
|
||||
| [@affine/bookmark-block](plugins/bookmark-block) | A block for bookmarking a website |
|
||||
| [@affine/copilot](plugins/copilot) | AI Copilot that help you document writing |
|
||||
|
||||
## Thanks
|
||||
|
||||
We would also like to give thanks to open-source projects that make AFFiNE possible:
|
||||
@@ -140,7 +150,7 @@ Thanks a lot to the community for providing such powerful and simple libraries,
|
||||
We would like to express our gratitude to all the individuals who have already contributed to AFFiNE! If you have any AFFiNE-related project, documentation, tool or template, please feel free to contribute it by submitting a pull request to our curated list on GitHub: [awesome-affine](https://github.com/toeverything/awesome-affine).
|
||||
|
||||
<a href="https://github.com/toeverything/affine/graphs/contributors">
|
||||
<img src="https://user-images.githubusercontent.com/5910926/233382206-312428ca-094a-4579-ae06-213961ed7eab.svg" />
|
||||
<img src="https://user-images.githubusercontent.com/5910926/240508358-93eddded-48a0-40cd-85e4-a1d172dbe1d9.svg" />
|
||||
</a>
|
||||
|
||||
## Self-Host
|
||||
|
||||
@@ -7,6 +7,7 @@ To run AFFiNE Desktop Client Application locally, run the following commands:
|
||||
```sh
|
||||
# in repo root
|
||||
yarn install
|
||||
yarn workspace @affine/native build
|
||||
yarn dev
|
||||
|
||||
# in apps/electron
|
||||
|
||||
@@ -1,11 +1,16 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
const { z } = require('zod');
|
||||
|
||||
const {
|
||||
utils: { fromBuildIdentifier },
|
||||
} = require('@electron-forge/core');
|
||||
|
||||
const path = require('node:path');
|
||||
|
||||
const buildType = (process.env.BUILD_TYPE || 'stable').trim().toLowerCase();
|
||||
const ReleaseTypeSchema = z.enum(['stable', 'beta', 'canary', 'internal']);
|
||||
|
||||
const envBuildType = (process.env.BUILD_TYPE || 'canary').trim().toLowerCase();
|
||||
const buildType = ReleaseTypeSchema.parse(envBuildType);
|
||||
const stableBuild = buildType === 'stable';
|
||||
const productName = !stableBuild ? `AFFiNE-${buildType}` : 'AFFiNE';
|
||||
const icoPath = !stableBuild
|
||||
@@ -28,6 +33,7 @@ module.exports = {
|
||||
packagerConfig: {
|
||||
name: productName,
|
||||
appBundleId: fromBuildIdentifier({
|
||||
internal: 'pro.affine.internal',
|
||||
canary: 'pro.affine.canary',
|
||||
beta: 'pro.affine.beta',
|
||||
stable: 'pro.affine.app',
|
||||
@@ -88,7 +94,7 @@ module.exports = {
|
||||
config: {
|
||||
name: 'AFFiNE',
|
||||
setupIcon: icoPath,
|
||||
// loadingGif: './resources/icons/loading.gif',
|
||||
loadingGif: './resources/icons/affine_installing.gif',
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/consistent-type-imports */
|
||||
// This file contains the main process events
|
||||
// It will guide preload and main process on the correct event types and payloads
|
||||
|
||||
export type MainIPCHandlerMap = typeof import('./main/src/exposed').handlers;
|
||||
|
||||
export type MainIPCEventMap = typeof import('./main/src/exposed').events;
|
||||
@@ -2,10 +2,11 @@ import assert from 'node:assert';
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { MainIPCHandlerMap } from '../../../../constraints';
|
||||
import type { MainIPCHandlerMap } from '../exposed';
|
||||
|
||||
const registeredHandlers = new Map<
|
||||
string,
|
||||
@@ -40,6 +41,7 @@ ReturnType<MainIPCHandlerMap[T][F]> {
|
||||
}
|
||||
|
||||
const SESSION_DATA_PATH = path.join(__dirname, './tmp', 'affine-test');
|
||||
const DOCUMENTS_PATH = path.join(__dirname, './tmp', 'affine-test-documents');
|
||||
|
||||
const browserWindow = {
|
||||
isDestroyed: () => {
|
||||
@@ -61,6 +63,9 @@ const ipcMain = {
|
||||
handlers.push(callback);
|
||||
registeredHandlers.set(key, handlers);
|
||||
},
|
||||
setMaxListeners: (_n: number) => {
|
||||
// noop
|
||||
},
|
||||
};
|
||||
|
||||
const nativeTheme = {
|
||||
@@ -87,8 +92,12 @@ function compareBuffer(a: Uint8Array | null, b: Uint8Array | null) {
|
||||
const electronModule = {
|
||||
app: {
|
||||
getPath: (name: string) => {
|
||||
assert(name === 'sessionData');
|
||||
return SESSION_DATA_PATH;
|
||||
if (name === 'sessionData') {
|
||||
return SESSION_DATA_PATH;
|
||||
} else if (name === 'documents') {
|
||||
return DOCUMENTS_PATH;
|
||||
}
|
||||
throw new Error('not implemented');
|
||||
},
|
||||
name: 'affine-test',
|
||||
on: (name: string, callback: (...args: any[]) => any) => {
|
||||
@@ -96,6 +105,11 @@ const electronModule = {
|
||||
handlers.push(callback);
|
||||
registeredHandlers.set(name, handlers);
|
||||
},
|
||||
addEventListener: (...args: any[]) => {
|
||||
// @ts-ignore
|
||||
electronModule.app.on(...args);
|
||||
},
|
||||
removeEventListener: () => {},
|
||||
},
|
||||
BrowserWindow: {
|
||||
getAllWindows: () => {
|
||||
@@ -114,26 +128,28 @@ vi.doMock('electron', () => {
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
const { registerHandlers } = await import('../register');
|
||||
const { registerHandlers } = await import('../handlers');
|
||||
registerHandlers();
|
||||
|
||||
// should also register events
|
||||
const { registerEvents } = await import('../../events');
|
||||
const { registerEvents } = await import('../events');
|
||||
registerEvents();
|
||||
await fs.mkdirp(SESSION_DATA_PATH);
|
||||
await import('../db/ensure-db');
|
||||
|
||||
registeredHandlers.get('ready')?.forEach(fn => fn());
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
const { cleanupSQLiteDBs } = await import('../db/ensure-db');
|
||||
await cleanupSQLiteDBs();
|
||||
await fs.remove(SESSION_DATA_PATH);
|
||||
|
||||
// reset registered handlers
|
||||
registeredHandlers.get('before-quit')?.forEach(fn => fn());
|
||||
|
||||
await fs.remove(SESSION_DATA_PATH);
|
||||
});
|
||||
|
||||
describe('ensureSQLiteDB', () => {
|
||||
test('should create db file on connection if it does not exist', async () => {
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const workspaceDB = await ensureSQLiteDB(id);
|
||||
const file = workspaceDB.path;
|
||||
@@ -141,75 +157,47 @@ describe('ensureSQLiteDB', () => {
|
||||
expect(fileExists).toBe(true);
|
||||
});
|
||||
|
||||
test('when db file is removed', async () => {
|
||||
// stub webContents.send
|
||||
const sendStub = vi.fn();
|
||||
browserWindow.webContents.send = sendStub;
|
||||
const id = 'test-workspace-id';
|
||||
test('should emit the same db instance for the same id', async () => {
|
||||
const [id1, id2] = [v4(), v4()];
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
let workspaceDB = await ensureSQLiteDB(id);
|
||||
const file = workspaceDB.path;
|
||||
const fileExists = await fs.pathExists(file);
|
||||
expect(fileExists).toBe(true);
|
||||
|
||||
await fs.remove(file);
|
||||
|
||||
// wait for 1000ms for file watcher to detect file removal
|
||||
await delay(2000);
|
||||
|
||||
expect(sendStub).toBeCalledWith('db:onDbFileMissing', id);
|
||||
|
||||
// ensureSQLiteDB should recreate the db file
|
||||
workspaceDB = await ensureSQLiteDB(id);
|
||||
const fileExists2 = await fs.pathExists(file);
|
||||
expect(fileExists2).toBe(true);
|
||||
const workspaceDB1 = await ensureSQLiteDB(id1);
|
||||
const workspaceDB2 = await ensureSQLiteDB(id2);
|
||||
const workspaceDB3 = await ensureSQLiteDB(id1);
|
||||
expect(workspaceDB1).toBe(workspaceDB3);
|
||||
expect(workspaceDB1).not.toBe(workspaceDB2);
|
||||
});
|
||||
|
||||
test('when db file is updated', async () => {
|
||||
// stub webContents.send
|
||||
const sendStub = vi.fn();
|
||||
browserWindow.webContents.send = sendStub;
|
||||
const id = 'test-workspace-id';
|
||||
test('when app quit, db should be closed', async () => {
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const workspaceDB = await ensureSQLiteDB(id);
|
||||
const file = workspaceDB.path;
|
||||
const fileExists = await fs.pathExists(file);
|
||||
expect(fileExists).toBe(true);
|
||||
|
||||
// wait to make sure
|
||||
await delay(500);
|
||||
|
||||
// writes some data to the db file
|
||||
await fs.appendFile(file, 'random-data', { encoding: 'binary' });
|
||||
// write again
|
||||
await fs.appendFile(file, 'random-data', { encoding: 'binary' });
|
||||
|
||||
// wait for 200ms for file watcher to detect file change
|
||||
await delay(2000);
|
||||
|
||||
expect(sendStub).toBeCalledWith('db:onDbFileUpdate', id);
|
||||
|
||||
// should only call once for multiple writes
|
||||
expect(sendStub).toBeCalledTimes(1);
|
||||
registeredHandlers.get('before-quit')?.forEach(fn => fn());
|
||||
await delay(100);
|
||||
expect(workspaceDB.db).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
describe('workspace handlers', () => {
|
||||
test('list all workspace ids', async () => {
|
||||
const ids = ['test-workspace-id', 'test-workspace-id-2'];
|
||||
const ids = [v4(), v4()];
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await Promise.all(ids.map(id => ensureSQLiteDB(id)));
|
||||
const list = await dispatch('workspace', 'list');
|
||||
expect(list.map(([id]) => id)).toEqual(ids);
|
||||
expect(list.map(([id]) => id).sort()).toEqual(ids.sort());
|
||||
});
|
||||
|
||||
test('delete workspace', async () => {
|
||||
const ids = ['test-workspace-id', 'test-workspace-id-2'];
|
||||
const ids = [v4(), v4()];
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await Promise.all(ids.map(id => ensureSQLiteDB(id)));
|
||||
await dispatch('workspace', 'delete', 'test-workspace-id-2');
|
||||
const dbs = await Promise.all(ids.map(id => ensureSQLiteDB(id)));
|
||||
await dispatch('workspace', 'delete', ids[1]);
|
||||
const list = await dispatch('workspace', 'list');
|
||||
expect(list.map(([id]) => id)).toEqual(['test-workspace-id']);
|
||||
expect(list.map(([id]) => id)).toEqual([ids[0]]);
|
||||
// deleted db should be closed
|
||||
expect(dbs[1].db).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -244,7 +232,7 @@ describe('UI handlers', () => {
|
||||
|
||||
describe('db handlers', () => {
|
||||
test('apply doc and get doc updates', async () => {
|
||||
const workspaceId = 'test-workspace-id';
|
||||
const workspaceId = v4();
|
||||
const bin = await dispatch('db', 'getDocAsUpdates', workspaceId);
|
||||
// ? is this a good test?
|
||||
expect(bin.every((byte: number) => byte === 0)).toBe(true);
|
||||
@@ -264,14 +252,14 @@ describe('db handlers', () => {
|
||||
});
|
||||
|
||||
test('get non existent blob', async () => {
|
||||
const workspaceId = 'test-workspace-id';
|
||||
const workspaceId = v4();
|
||||
const bin = await dispatch('db', 'getBlob', workspaceId, 'non-existent-id');
|
||||
expect(bin).toBeNull();
|
||||
});
|
||||
|
||||
test('list blobs (empty)', async () => {
|
||||
const workspaceId = 'test-workspace-id';
|
||||
const list = await dispatch('db', 'getPersistedBlobs', workspaceId);
|
||||
const workspaceId = v4();
|
||||
const list = await dispatch('db', 'getBlobKeys', workspaceId);
|
||||
expect(list).toEqual([]);
|
||||
});
|
||||
|
||||
@@ -301,14 +289,14 @@ describe('db handlers', () => {
|
||||
).toBe(true);
|
||||
|
||||
// list blobs
|
||||
let lists = await dispatch('db', 'getPersistedBlobs', workspaceId);
|
||||
let lists = await dispatch('db', 'getBlobKeys', workspaceId);
|
||||
expect(lists).toHaveLength(2);
|
||||
expect(lists).toContain('testBin');
|
||||
expect(lists).toContain('testBin2');
|
||||
|
||||
// delete blob
|
||||
await dispatch('db', 'deleteBlob', workspaceId, 'testBin');
|
||||
lists = await dispatch('db', 'getPersistedBlobs', workspaceId);
|
||||
lists = await dispatch('db', 'getBlobKeys', workspaceId);
|
||||
expect(lists).toEqual(['testBin2']);
|
||||
});
|
||||
});
|
||||
@@ -318,7 +306,7 @@ describe('dialog handlers', () => {
|
||||
const mockShowItemInFolder = vi.fn();
|
||||
electronModule.shell.showItemInFolder = mockShowItemInFolder;
|
||||
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const db = await ensureSQLiteDB(id);
|
||||
|
||||
@@ -334,13 +322,15 @@ describe('dialog handlers', () => {
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
electronModule.shell.showItemInFolder = mockShowItemInFolder;
|
||||
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
|
||||
await dispatch('dialog', 'saveDBFileAs', id);
|
||||
expect(mockShowSaveDialog).toBeCalled();
|
||||
expect(mockShowItemInFolder).not.toBeCalled();
|
||||
electronModule.dialog = {};
|
||||
electronModule.shell = {};
|
||||
});
|
||||
|
||||
test('saveDBFileAs', async () => {
|
||||
@@ -352,7 +342,7 @@ describe('dialog handlers', () => {
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
electronModule.shell.showItemInFolder = mockShowItemInFolder;
|
||||
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
|
||||
@@ -388,10 +378,10 @@ describe('dialog handlers', () => {
|
||||
expect(res.error).toBe('DB_FILE_PATH_INVALID');
|
||||
});
|
||||
|
||||
test('loadDBFile (error, not a valid db file)', async () => {
|
||||
test('loadDBFile (error, not a valid affine file)', async () => {
|
||||
// create a random db file
|
||||
const basePath = path.join(SESSION_DATA_PATH, 'random-path');
|
||||
const dbPath = path.join(basePath, 'xxx.db');
|
||||
const dbPath = path.join(basePath, 'xxx.affine');
|
||||
await fs.ensureDir(basePath);
|
||||
await fs.writeFile(dbPath, 'hello world');
|
||||
|
||||
@@ -403,70 +393,102 @@ describe('dialog handlers', () => {
|
||||
const res = await dispatch('dialog', 'loadDBFile');
|
||||
expect(mockShowOpenDialog).toBeCalled();
|
||||
expect(res.error).toBe('DB_FILE_INVALID');
|
||||
|
||||
electronModule.dialog = {};
|
||||
});
|
||||
|
||||
test('loadDBFile', async () => {
|
||||
test('loadDBFile (correct)', async () => {
|
||||
// we use ensureSQLiteDB to create a valid db file
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const db = await ensureSQLiteDB(id);
|
||||
|
||||
// copy db file to dbPath
|
||||
const basePath = path.join(SESSION_DATA_PATH, 'random-path');
|
||||
const originDBFilePath = path.join(basePath, 'xxx.db');
|
||||
const clonedDBPath = path.join(basePath, 'xxx.affine');
|
||||
await fs.ensureDir(basePath);
|
||||
await fs.copyFile(db.path, originDBFilePath);
|
||||
await fs.copyFile(db.path, clonedDBPath);
|
||||
|
||||
// remove db
|
||||
await fs.remove(db.path);
|
||||
// delete workspace
|
||||
await dispatch('workspace', 'delete', id);
|
||||
|
||||
// try load originDBFilePath
|
||||
const mockShowOpenDialog = vi.fn(() => {
|
||||
return { filePaths: [originDBFilePath] };
|
||||
return { filePaths: [clonedDBPath] };
|
||||
}) as any;
|
||||
electronModule.dialog.showOpenDialog = mockShowOpenDialog;
|
||||
|
||||
const res = await dispatch('dialog', 'loadDBFile');
|
||||
expect(mockShowOpenDialog).toBeCalled();
|
||||
expect(res.workspaceId).not.toBeUndefined();
|
||||
const newId = res.workspaceId;
|
||||
|
||||
const importedDb = await ensureSQLiteDB(res.workspaceId!);
|
||||
expect(await fs.realpath(importedDb.path)).toBe(originDBFilePath);
|
||||
expect(importedDb.path).not.toBe(originDBFilePath);
|
||||
expect(newId).not.toBeUndefined();
|
||||
|
||||
assert(newId);
|
||||
|
||||
const meta = await dispatch('workspace', 'getMeta', newId);
|
||||
|
||||
expect(meta.secondaryDBPath).toBe(clonedDBPath);
|
||||
|
||||
// try load it again, will trigger error (db file already loaded)
|
||||
const res2 = await dispatch('dialog', 'loadDBFile');
|
||||
expect(res2.error).toBe('DB_FILE_ALREADY_LOADED');
|
||||
});
|
||||
|
||||
test('moveDBFile', async () => {
|
||||
const newPath = path.join(SESSION_DATA_PATH, 'affine-test', 'xxx');
|
||||
const mockShowSaveDialog = vi.fn(() => {
|
||||
return { filePath: newPath };
|
||||
test('moveDBFile (valid)', async () => {
|
||||
const sendStub = vi.fn();
|
||||
browserWindow.webContents.send = sendStub;
|
||||
const newPath = path.join(SESSION_DATA_PATH, 'xxx');
|
||||
const showOpenDialog = vi.fn(() => {
|
||||
return { filePaths: [newPath] };
|
||||
}) as any;
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
electronModule.dialog.showOpenDialog = showOpenDialog;
|
||||
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
|
||||
const db = await ensureSQLiteDB(id);
|
||||
const res = await dispatch('dialog', 'moveDBFile', id);
|
||||
expect(mockShowSaveDialog).toBeCalled();
|
||||
expect(res.filePath).toBe(newPath);
|
||||
expect(showOpenDialog).toBeCalled();
|
||||
assert(res.filePath);
|
||||
expect(path.dirname(res.filePath)).toBe(newPath);
|
||||
expect(res.filePath.endsWith('.affine')).toBe(true);
|
||||
// should also send workspace meta change event
|
||||
expect(sendStub).toBeCalledWith('workspace:onMetaChange', {
|
||||
workspaceId: id,
|
||||
meta: { id, secondaryDBPath: res.filePath, mainDBPath: db.path },
|
||||
});
|
||||
electronModule.dialog = {};
|
||||
browserWindow.webContents.send = () => {};
|
||||
});
|
||||
|
||||
test('moveDBFile (skipped)', async () => {
|
||||
const mockShowSaveDialog = vi.fn(() => {
|
||||
return { filePath: null };
|
||||
test('moveDBFile (canceled)', async () => {
|
||||
const showOpenDialog = vi.fn(() => {
|
||||
return { filePaths: null };
|
||||
}) as any;
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
electronModule.dialog.showOpenDialog = showOpenDialog;
|
||||
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
|
||||
const res = await dispatch('dialog', 'moveDBFile', id);
|
||||
expect(mockShowSaveDialog).toBeCalled();
|
||||
expect(showOpenDialog).toBeCalled();
|
||||
expect(res.filePath).toBe(undefined);
|
||||
electronModule.dialog = {};
|
||||
});
|
||||
});
|
||||
|
||||
describe('applicationMenu', () => {
|
||||
// test some basic IPC events
|
||||
test('applicationMenu event', async () => {
|
||||
const { applicationMenuSubjects } = await import('../application-menu');
|
||||
const sendStub = vi.fn();
|
||||
browserWindow.webContents.send = sendStub;
|
||||
applicationMenuSubjects.newPageAction.next();
|
||||
expect(sendStub).toHaveBeenCalledWith(
|
||||
'applicationMenu:onNewPageAction',
|
||||
undefined
|
||||
);
|
||||
browserWindow.webContents.send = () => {};
|
||||
});
|
||||
});
|
||||
142
apps/electron/layers/main/src/application-menu/create.ts
Normal file
142
apps/electron/layers/main/src/application-menu/create.ts
Normal file
@@ -0,0 +1,142 @@
|
||||
import { app, Menu } from 'electron';
|
||||
|
||||
import { revealLogFile } from '../logger';
|
||||
import { checkForUpdatesAndNotify } from '../updater';
|
||||
import { isMacOS } from '../utils';
|
||||
import { applicationMenuSubjects } from './subject';
|
||||
|
||||
// Unique id for menuitems
|
||||
const MENUITEM_NEW_PAGE = 'affine:new-page';
|
||||
|
||||
export function createApplicationMenu() {
|
||||
const isMac = isMacOS();
|
||||
|
||||
// Electron menu cannot be modified
|
||||
// You have to copy the complete default menu template event if you want to add a single custom item
|
||||
// See https://www.electronjs.org/docs/latest/api/menu#examples
|
||||
const template = [
|
||||
// { role: 'appMenu' }
|
||||
...(isMac
|
||||
? [
|
||||
{
|
||||
label: app.name,
|
||||
submenu: [
|
||||
{ role: 'about' },
|
||||
{ type: 'separator' },
|
||||
{ role: 'services' },
|
||||
{ type: 'separator' },
|
||||
{ role: 'hide' },
|
||||
{ role: 'hideOthers' },
|
||||
{ role: 'unhide' },
|
||||
{ type: 'separator' },
|
||||
{ role: 'quit' },
|
||||
],
|
||||
},
|
||||
]
|
||||
: []),
|
||||
// { role: 'fileMenu' }
|
||||
{
|
||||
label: 'File',
|
||||
submenu: [
|
||||
{
|
||||
id: MENUITEM_NEW_PAGE,
|
||||
label: 'New Page',
|
||||
accelerator: isMac ? 'Cmd+N' : 'Ctrl+N',
|
||||
click: () => {
|
||||
applicationMenuSubjects.newPageAction.next();
|
||||
},
|
||||
},
|
||||
{ type: 'separator' },
|
||||
isMac ? { role: 'close' } : { role: 'quit' },
|
||||
],
|
||||
},
|
||||
// { role: 'editMenu' }
|
||||
{
|
||||
label: 'Edit',
|
||||
submenu: [
|
||||
{ role: 'undo' },
|
||||
{ role: 'redo' },
|
||||
{ type: 'separator' },
|
||||
{ role: 'cut' },
|
||||
{ role: 'copy' },
|
||||
{ role: 'paste' },
|
||||
...(isMac
|
||||
? [
|
||||
{ role: 'pasteAndMatchStyle' },
|
||||
{ role: 'delete' },
|
||||
{ role: 'selectAll' },
|
||||
{ type: 'separator' },
|
||||
{
|
||||
label: 'Speech',
|
||||
submenu: [{ role: 'startSpeaking' }, { role: 'stopSpeaking' }],
|
||||
},
|
||||
]
|
||||
: [{ role: 'delete' }, { type: 'separator' }, { role: 'selectAll' }]),
|
||||
],
|
||||
},
|
||||
// { role: 'viewMenu' }
|
||||
{
|
||||
label: 'View',
|
||||
submenu: [
|
||||
{ role: 'reload' },
|
||||
{ role: 'forceReload' },
|
||||
{ role: 'toggleDevTools' },
|
||||
{ type: 'separator' },
|
||||
{ role: 'resetZoom' },
|
||||
{ role: 'zoomIn' },
|
||||
{ role: 'zoomOut' },
|
||||
{ type: 'separator' },
|
||||
{ role: 'togglefullscreen' },
|
||||
],
|
||||
},
|
||||
// { role: 'windowMenu' }
|
||||
{
|
||||
label: 'Window',
|
||||
submenu: [
|
||||
{ role: 'minimize' },
|
||||
{ role: 'zoom' },
|
||||
...(isMac
|
||||
? [
|
||||
{ type: 'separator' },
|
||||
{ role: 'front' },
|
||||
{ type: 'separator' },
|
||||
{ role: 'window' },
|
||||
]
|
||||
: [{ role: 'close' }]),
|
||||
],
|
||||
},
|
||||
{
|
||||
role: 'help',
|
||||
submenu: [
|
||||
{
|
||||
label: 'Learn More',
|
||||
click: async () => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const { shell } = require('electron');
|
||||
await shell.openExternal('https://affine.pro/');
|
||||
},
|
||||
},
|
||||
{
|
||||
label: 'Open log file',
|
||||
click: async () => {
|
||||
revealLogFile();
|
||||
},
|
||||
},
|
||||
{
|
||||
label: 'Check for Updates',
|
||||
click: async () => {
|
||||
await checkForUpdatesAndNotify(true);
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
// @ts-ignore The snippet is copied from Electron official docs.
|
||||
// It's working as expected. No idea why it contains type errors.
|
||||
// Just ignore for now.
|
||||
const menu = Menu.buildFromTemplate(template);
|
||||
Menu.setApplicationMenu(menu);
|
||||
|
||||
return menu;
|
||||
}
|
||||
20
apps/electron/layers/main/src/application-menu/index.ts
Normal file
20
apps/electron/layers/main/src/application-menu/index.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import type { MainEventListener } from '../type';
|
||||
import { applicationMenuSubjects } from './subject';
|
||||
|
||||
export * from './create';
|
||||
export * from './subject';
|
||||
|
||||
/**
|
||||
* Events triggered by application menu
|
||||
*/
|
||||
export const applicationMenuEvents = {
|
||||
/**
|
||||
* File -> New Page
|
||||
*/
|
||||
onNewPageAction: (fn: () => void) => {
|
||||
const sub = applicationMenuSubjects.newPageAction.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
@@ -0,0 +1,5 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
export const applicationMenuSubjects = {
|
||||
newPageAction: new Subject<void>(),
|
||||
};
|
||||
1
apps/electron/layers/main/src/db/__tests__/.gitignore
vendored
Normal file
1
apps/electron/layers/main/src/db/__tests__/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
tmp
|
||||
147
apps/electron/layers/main/src/db/__tests__/ensure-db.spec.ts
Normal file
147
apps/electron/layers/main/src/db/__tests__/ensure-db.spec.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, beforeEach, expect, test, vi } from 'vitest';
|
||||
|
||||
const tmpDir = path.join(__dirname, 'tmp');
|
||||
|
||||
const registeredHandlers = new Map<
|
||||
string,
|
||||
((...args: any[]) => Promise<any>)[]
|
||||
>();
|
||||
|
||||
const SESSION_DATA_PATH = path.join(tmpDir, 'affine-test');
|
||||
const DOCUMENTS_PATH = path.join(tmpDir, 'affine-test-documents');
|
||||
|
||||
const electronModule = {
|
||||
app: {
|
||||
getPath: (name: string) => {
|
||||
if (name === 'sessionData') {
|
||||
return SESSION_DATA_PATH;
|
||||
} else if (name === 'documents') {
|
||||
return DOCUMENTS_PATH;
|
||||
}
|
||||
throw new Error('not implemented');
|
||||
},
|
||||
name: 'affine-test',
|
||||
on: (name: string, callback: (...args: any[]) => any) => {
|
||||
const handlers = registeredHandlers.get(name) || [];
|
||||
handlers.push(callback);
|
||||
registeredHandlers.set(name, handlers);
|
||||
},
|
||||
addEventListener: (...args: any[]) => {
|
||||
// @ts-ignore
|
||||
electronModule.app.on(...args);
|
||||
},
|
||||
removeEventListener: () => {},
|
||||
},
|
||||
shell: {} as Partial<Electron.Shell>,
|
||||
dialog: {} as Partial<Electron.Dialog>,
|
||||
};
|
||||
|
||||
const runHandler = (key: string) => {
|
||||
registeredHandlers.get(key)?.forEach(handler => handler());
|
||||
};
|
||||
|
||||
// dynamically import handlers so that we can inject local variables to mocks
|
||||
vi.doMock('electron', () => {
|
||||
return electronModule;
|
||||
});
|
||||
|
||||
const constructorStub = vi.fn();
|
||||
const destroyStub = vi.fn();
|
||||
|
||||
vi.doMock('../secondary-db', () => {
|
||||
return {
|
||||
SecondaryWorkspaceSQLiteDB: class {
|
||||
constructor(...args: any[]) {
|
||||
constructorStub(...args);
|
||||
}
|
||||
|
||||
destroy = destroyStub;
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers({ shouldAdvanceTime: true });
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
runHandler('before-quit');
|
||||
await fs.remove(tmpDir);
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
test('can get a valid WorkspaceSQLiteDB', async () => {
|
||||
const { ensureSQLiteDB } = await import('../ensure-db');
|
||||
const workspaceId = v4();
|
||||
const db0 = await ensureSQLiteDB(workspaceId);
|
||||
expect(db0).toBeDefined();
|
||||
expect(db0.workspaceId).toBe(workspaceId);
|
||||
|
||||
const db1 = await ensureSQLiteDB(v4());
|
||||
expect(db1).not.toBe(db0);
|
||||
expect(db1.workspaceId).not.toBe(db0.workspaceId);
|
||||
|
||||
// ensure that the db is cached
|
||||
expect(await ensureSQLiteDB(workspaceId)).toBe(db0);
|
||||
});
|
||||
|
||||
test('db should be destroyed when app quits', async () => {
|
||||
const { ensureSQLiteDB } = await import('../ensure-db');
|
||||
const workspaceId = v4();
|
||||
const db0 = await ensureSQLiteDB(workspaceId);
|
||||
const db1 = await ensureSQLiteDB(v4());
|
||||
|
||||
expect(db0.db).not.toBeNull();
|
||||
expect(db1.db).not.toBeNull();
|
||||
|
||||
runHandler('before-quit');
|
||||
|
||||
expect(db0.db).toBeNull();
|
||||
expect(db1.db).toBeNull();
|
||||
});
|
||||
|
||||
test('if db has a secondary db path, we should also poll that', async () => {
|
||||
const { ensureSQLiteDB } = await import('../ensure-db');
|
||||
const { appContext } = await import('../../context');
|
||||
const { storeWorkspaceMeta } = await import('../../workspace');
|
||||
const workspaceId = v4();
|
||||
await storeWorkspaceMeta(appContext, workspaceId, {
|
||||
secondaryDBPath: path.join(tmpDir, 'secondary.db'),
|
||||
});
|
||||
|
||||
const db = await ensureSQLiteDB(workspaceId);
|
||||
|
||||
await vi.advanceTimersByTimeAsync(1500);
|
||||
|
||||
// not sure why but we still need to wait with real timer
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
expect(constructorStub).toBeCalledTimes(1);
|
||||
expect(constructorStub).toBeCalledWith(path.join(tmpDir, 'secondary.db'), db);
|
||||
|
||||
// if secondary meta is changed
|
||||
await storeWorkspaceMeta(appContext, workspaceId, {
|
||||
secondaryDBPath: path.join(tmpDir, 'secondary2.db'),
|
||||
});
|
||||
|
||||
await vi.advanceTimersByTimeAsync(1500);
|
||||
expect(constructorStub).toBeCalledTimes(2);
|
||||
expect(destroyStub).toBeCalledTimes(1);
|
||||
|
||||
// if secondary meta is changed (but another workspace)
|
||||
await storeWorkspaceMeta(appContext, v4(), {
|
||||
secondaryDBPath: path.join(tmpDir, 'secondary3.db'),
|
||||
});
|
||||
await vi.advanceTimersByTimeAsync(1500);
|
||||
expect(constructorStub).toBeCalledTimes(2);
|
||||
expect(destroyStub).toBeCalledTimes(1);
|
||||
|
||||
// if primary is destroyed, secondary should also be destroyed
|
||||
db.destroy();
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
expect(destroyStub).toBeCalledTimes(2);
|
||||
});
|
||||
@@ -0,0 +1,101 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, expect, test, vi } from 'vitest';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { AppContext } from '../../context';
|
||||
import { dbSubjects } from '../subjects';
|
||||
|
||||
const tmpDir = path.join(__dirname, 'tmp');
|
||||
|
||||
const testAppContext: AppContext = {
|
||||
appDataPath: path.join(tmpDir, 'test-data'),
|
||||
appName: 'test',
|
||||
};
|
||||
|
||||
afterEach(async () => {
|
||||
if (process.platform !== 'win32') {
|
||||
// hmmm ....
|
||||
await fs.remove(tmpDir);
|
||||
}
|
||||
});
|
||||
|
||||
function getTestUpdates() {
|
||||
const testYDoc = new Y.Doc();
|
||||
const yText = testYDoc.getText('test');
|
||||
yText.insert(0, 'hello');
|
||||
const updates = Y.encodeStateAsUpdate(testYDoc);
|
||||
|
||||
return updates;
|
||||
}
|
||||
test('can create new db file if not exists', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const db = await openWorkspaceDatabase(testAppContext, workspaceId);
|
||||
const dbPath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
`workspaces/${workspaceId}`,
|
||||
`storage.db`
|
||||
);
|
||||
expect(await fs.exists(dbPath)).toBe(true);
|
||||
db.destroy();
|
||||
});
|
||||
|
||||
test('on applyUpdate (from self), will not trigger update', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const onUpdate = vi.fn();
|
||||
|
||||
const db = await openWorkspaceDatabase(testAppContext, workspaceId);
|
||||
db.update$.subscribe(onUpdate);
|
||||
db.applyUpdate(getTestUpdates(), 'self');
|
||||
expect(onUpdate).not.toHaveBeenCalled();
|
||||
db.destroy();
|
||||
});
|
||||
|
||||
test('on applyUpdate (from renderer), will trigger update', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const onUpdate = vi.fn();
|
||||
const onExternalUpdate = vi.fn();
|
||||
|
||||
const db = await openWorkspaceDatabase(testAppContext, workspaceId);
|
||||
db.update$.subscribe(onUpdate);
|
||||
const sub = dbSubjects.externalUpdate.subscribe(onExternalUpdate);
|
||||
db.applyUpdate(getTestUpdates(), 'renderer');
|
||||
expect(onUpdate).toHaveBeenCalled(); // not yet updated
|
||||
sub.unsubscribe();
|
||||
db.destroy();
|
||||
});
|
||||
|
||||
test('on applyUpdate (from external), will trigger update & send external update event', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const onUpdate = vi.fn();
|
||||
const onExternalUpdate = vi.fn();
|
||||
|
||||
const db = await openWorkspaceDatabase(testAppContext, workspaceId);
|
||||
db.update$.subscribe(onUpdate);
|
||||
const sub = dbSubjects.externalUpdate.subscribe(onExternalUpdate);
|
||||
db.applyUpdate(getTestUpdates(), 'external');
|
||||
expect(onUpdate).toHaveBeenCalled();
|
||||
expect(onExternalUpdate).toHaveBeenCalled();
|
||||
sub.unsubscribe();
|
||||
db.destroy();
|
||||
});
|
||||
|
||||
test('on destroy, check if resources have been released', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const db = await openWorkspaceDatabase(testAppContext, workspaceId);
|
||||
const updateSub = {
|
||||
complete: vi.fn(),
|
||||
next: vi.fn(),
|
||||
};
|
||||
db.update$ = updateSub as any;
|
||||
db.destroy();
|
||||
expect(db.db).toBe(null);
|
||||
expect(updateSub.complete).toHaveBeenCalled();
|
||||
});
|
||||
152
apps/electron/layers/main/src/db/base-db-adapter.ts
Normal file
152
apps/electron/layers/main/src/db/base-db-adapter.ts
Normal file
@@ -0,0 +1,152 @@
|
||||
import assert from 'assert';
|
||||
import type { Database } from 'better-sqlite3';
|
||||
import sqlite from 'better-sqlite3';
|
||||
|
||||
import { logger } from '../logger';
|
||||
|
||||
const schemas = [
|
||||
`CREATE TABLE IF NOT EXISTS "updates" (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
data BLOB NOT NULL,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS "blobs" (
|
||||
key TEXT PRIMARY KEY NOT NULL,
|
||||
data BLOB NOT NULL,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
)`,
|
||||
];
|
||||
|
||||
interface UpdateRow {
|
||||
id: number;
|
||||
data: Buffer;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
interface BlobRow {
|
||||
key: string;
|
||||
data: Buffer;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* A base class for SQLite DB adapter that provides basic methods around updates & blobs
|
||||
*/
|
||||
export abstract class BaseSQLiteAdapter {
|
||||
db: Database | null = null;
|
||||
abstract role: string;
|
||||
|
||||
constructor(public path: string) {}
|
||||
|
||||
ensureTables() {
|
||||
assert(this.db, 'db is not connected');
|
||||
this.db.exec(schemas.join(';'));
|
||||
}
|
||||
|
||||
// todo: what if SQLite DB wrapper later is not sync?
|
||||
connect(): Database | undefined {
|
||||
if (this.db) {
|
||||
return this.db;
|
||||
}
|
||||
logger.log(`[SQLiteAdapter][${this.role}] open db`, this.path);
|
||||
const db = (this.db = sqlite(this.path));
|
||||
this.ensureTables();
|
||||
return db;
|
||||
}
|
||||
|
||||
destroy() {
|
||||
this.db?.close();
|
||||
this.db = null;
|
||||
}
|
||||
|
||||
addBlob(key: string, data: Uint8Array) {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare(
|
||||
'INSERT INTO blobs (key, data) VALUES (?, ?) ON CONFLICT(key) DO UPDATE SET data = ?'
|
||||
);
|
||||
statement.run(key, data, data);
|
||||
return key;
|
||||
} catch (error) {
|
||||
logger.error('addBlob', error);
|
||||
}
|
||||
}
|
||||
|
||||
getBlob(key: string) {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare('SELECT data FROM blobs WHERE key = ?');
|
||||
const row = statement.get(key) as BlobRow;
|
||||
if (!row) {
|
||||
return null;
|
||||
}
|
||||
return row.data;
|
||||
} catch (error) {
|
||||
logger.error('getBlob', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
deleteBlob(key: string) {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare('DELETE FROM blobs WHERE key = ?');
|
||||
statement.run(key);
|
||||
} catch (error) {
|
||||
logger.error('deleteBlob', error);
|
||||
}
|
||||
}
|
||||
|
||||
getBlobKeys() {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare('SELECT key FROM blobs');
|
||||
const rows = statement.all() as BlobRow[];
|
||||
return rows.map(row => row.key);
|
||||
} catch (error) {
|
||||
logger.error('getBlobKeys', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
getUpdates() {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare('SELECT * FROM updates');
|
||||
const rows = statement.all() as UpdateRow[];
|
||||
return rows;
|
||||
} catch (error) {
|
||||
logger.error('getUpdates', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// add a single update to SQLite
|
||||
addUpdateToSQLite(updates: Uint8Array[]) {
|
||||
// batch write instead write per key stroke?
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const start = performance.now();
|
||||
const statement = this.db.prepare(
|
||||
'INSERT INTO updates (data) VALUES (?)'
|
||||
);
|
||||
const insertMany = this.db.transaction(updates => {
|
||||
for (const d of updates) {
|
||||
statement.run(d);
|
||||
}
|
||||
});
|
||||
|
||||
insertMany(updates);
|
||||
|
||||
logger.debug(
|
||||
`[SQLiteAdapter][${this.role}] addUpdateToSQLite`,
|
||||
'length:',
|
||||
updates.length,
|
||||
performance.now() - start,
|
||||
'ms'
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('addUpdateToSQLite', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
110
apps/electron/layers/main/src/db/ensure-db.ts
Normal file
110
apps/electron/layers/main/src/db/ensure-db.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import { app } from 'electron';
|
||||
import {
|
||||
defer,
|
||||
firstValueFrom,
|
||||
from,
|
||||
fromEvent,
|
||||
interval,
|
||||
merge,
|
||||
Observable,
|
||||
} from 'rxjs';
|
||||
import {
|
||||
distinctUntilChanged,
|
||||
filter,
|
||||
ignoreElements,
|
||||
last,
|
||||
map,
|
||||
shareReplay,
|
||||
startWith,
|
||||
switchMap,
|
||||
takeUntil,
|
||||
tap,
|
||||
} from 'rxjs/operators';
|
||||
|
||||
import { appContext } from '../context';
|
||||
import { logger } from '../logger';
|
||||
import { getWorkspaceMeta$ } from '../workspace';
|
||||
import { SecondaryWorkspaceSQLiteDB } from './secondary-db';
|
||||
import type { WorkspaceSQLiteDB } from './workspace-db-adapter';
|
||||
import { openWorkspaceDatabase } from './workspace-db-adapter';
|
||||
|
||||
const db$Map = new Map<string, Observable<WorkspaceSQLiteDB>>();
|
||||
|
||||
const beforeQuit$ = defer(() => fromEvent(app, 'before-quit'));
|
||||
|
||||
function getWorkspaceDB$(id: string) {
|
||||
if (!db$Map.has(id)) {
|
||||
db$Map.set(
|
||||
id,
|
||||
from(openWorkspaceDatabase(appContext, id)).pipe(
|
||||
shareReplay(1),
|
||||
switchMap(db => {
|
||||
return startPollingSecondaryDB(db).pipe(
|
||||
ignoreElements(),
|
||||
startWith(db),
|
||||
takeUntil(beforeQuit$),
|
||||
tap({
|
||||
complete: () => {
|
||||
logger.info('[ensureSQLiteDB] close db connection');
|
||||
db.destroy();
|
||||
db$Map.delete(id);
|
||||
},
|
||||
})
|
||||
);
|
||||
}),
|
||||
shareReplay(1)
|
||||
)
|
||||
);
|
||||
}
|
||||
return db$Map.get(id)!;
|
||||
}
|
||||
|
||||
function startPollingSecondaryDB(db: WorkspaceSQLiteDB) {
|
||||
const meta$ = getWorkspaceMeta$(db.workspaceId);
|
||||
const secondaryDB$ = meta$.pipe(
|
||||
map(meta => meta?.secondaryDBPath),
|
||||
distinctUntilChanged(),
|
||||
filter((p): p is string => !!p),
|
||||
switchMap(path => {
|
||||
return new Observable<SecondaryWorkspaceSQLiteDB>(observer => {
|
||||
const secondaryDB = new SecondaryWorkspaceSQLiteDB(path, db);
|
||||
observer.next(secondaryDB);
|
||||
return () => {
|
||||
logger.info(
|
||||
'[ensureSQLiteDB] close secondary db connection',
|
||||
secondaryDB.path
|
||||
);
|
||||
secondaryDB.destroy();
|
||||
};
|
||||
});
|
||||
}),
|
||||
takeUntil(db.update$.pipe(last())),
|
||||
shareReplay(1)
|
||||
);
|
||||
|
||||
const firstDelayedTick$ = defer(() => {
|
||||
return new Promise<number>(resolve =>
|
||||
setTimeout(() => {
|
||||
resolve(0);
|
||||
}, 1000)
|
||||
);
|
||||
});
|
||||
|
||||
// pull every 30 seconds
|
||||
const poll$ = merge(firstDelayedTick$, interval(30000)).pipe(
|
||||
switchMap(() => secondaryDB$),
|
||||
tap({
|
||||
next: secondaryDB => {
|
||||
secondaryDB.pull();
|
||||
},
|
||||
}),
|
||||
takeUntil(db.update$.pipe(last())),
|
||||
shareReplay(1)
|
||||
);
|
||||
|
||||
return poll$;
|
||||
}
|
||||
|
||||
export function ensureSQLiteDB(id: string) {
|
||||
return firstValueFrom(getWorkspaceDB$(id));
|
||||
}
|
||||
38
apps/electron/layers/main/src/db/helper.ts
Normal file
38
apps/electron/layers/main/src/db/helper.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import type { Database } from 'better-sqlite3';
|
||||
import sqlite from 'better-sqlite3';
|
||||
|
||||
import { logger } from '../logger';
|
||||
|
||||
export function isValidateDB(db: Database) {
|
||||
// check if db has two tables, one for updates and one for blobs
|
||||
const statement = db.prepare(
|
||||
`SELECT name FROM sqlite_schema WHERE type='table'`
|
||||
);
|
||||
const rows = statement.all() as { name: string }[];
|
||||
const tableNames = rows.map(row => row.name);
|
||||
if (!tableNames.includes('updates') || !tableNames.includes('blobs')) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function isValidDBFile(path: string) {
|
||||
let db: Database | null = null;
|
||||
try {
|
||||
db = sqlite(path);
|
||||
// check if db has two tables, one for updates and one for blobs
|
||||
const statement = db.prepare(
|
||||
`SELECT name FROM sqlite_schema WHERE type='table'`
|
||||
);
|
||||
const rows = statement.all() as { name: string }[];
|
||||
const tableNames = rows.map(row => row.name);
|
||||
if (!tableNames.includes('updates') || !tableNames.includes('blobs')) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error('isValidDBFile', error);
|
||||
return false;
|
||||
} finally {
|
||||
db?.close();
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,10 @@
|
||||
import { appContext } from '../../context';
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { appContext } from '../context';
|
||||
import type { MainEventListener, NamespaceHandlers } from '../type';
|
||||
import { ensureSQLiteDB } from './ensure-db';
|
||||
import { dbSubjects } from './subjects';
|
||||
|
||||
export * from './ensure-db';
|
||||
export * from './subjects';
|
||||
|
||||
export const dbHandlers = {
|
||||
getDocAsUpdates: async (_, id: string) => {
|
||||
@@ -23,11 +27,22 @@ export const dbHandlers = {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return workspaceDB.deleteBlob(key);
|
||||
},
|
||||
getPersistedBlobs: async (_, workspaceId: string) => {
|
||||
getBlobKeys: async (_, workspaceId: string) => {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return workspaceDB.getPersistentBlobKeys();
|
||||
return workspaceDB.getBlobKeys();
|
||||
},
|
||||
getDefaultStorageLocation: async () => {
|
||||
return appContext.appDataPath;
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
|
||||
export const dbEvents = {
|
||||
onExternalUpdate: (
|
||||
fn: (update: { workspaceId: string; update: Uint8Array }) => void
|
||||
) => {
|
||||
const sub = dbSubjects.externalUpdate.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
198
apps/electron/layers/main/src/db/secondary-db.ts
Normal file
198
apps/electron/layers/main/src/db/secondary-db.ts
Normal file
@@ -0,0 +1,198 @@
|
||||
import { debounce } from 'lodash-es';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { AppContext } from '../context';
|
||||
import { logger } from '../logger';
|
||||
import type { YOrigin } from '../type';
|
||||
import { mergeUpdateWorker } from '../workers';
|
||||
import { getWorkspaceMeta } from '../workspace';
|
||||
import { BaseSQLiteAdapter } from './base-db-adapter';
|
||||
import type { WorkspaceSQLiteDB } from './workspace-db-adapter';
|
||||
|
||||
const FLUSH_WAIT_TIME = 5000;
|
||||
const FLUSH_MAX_WAIT_TIME = 10000;
|
||||
|
||||
export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
role = 'secondary';
|
||||
yDoc = new Y.Doc();
|
||||
firstConnected = false;
|
||||
|
||||
updateQueue: Uint8Array[] = [];
|
||||
|
||||
unsubscribers = new Set<() => void>();
|
||||
|
||||
constructor(
|
||||
public override path: string,
|
||||
public upstream: WorkspaceSQLiteDB
|
||||
) {
|
||||
super(path);
|
||||
this.setupAndListen();
|
||||
logger.debug('[SecondaryWorkspaceSQLiteDB] created', this.workspaceId);
|
||||
}
|
||||
|
||||
close() {
|
||||
this.db?.close();
|
||||
this.db = null;
|
||||
}
|
||||
|
||||
override destroy() {
|
||||
this.flushUpdateQueue();
|
||||
this.unsubscribers.forEach(unsub => unsub());
|
||||
this.db?.close();
|
||||
this.yDoc.destroy();
|
||||
}
|
||||
|
||||
get workspaceId() {
|
||||
return this.upstream.workspaceId;
|
||||
}
|
||||
|
||||
// do not update db immediately, instead, push to a queue
|
||||
// and flush the queue in a future time
|
||||
addUpdateToUpdateQueue(update: Uint8Array) {
|
||||
this.updateQueue.push(update);
|
||||
this.debouncedFlush();
|
||||
}
|
||||
|
||||
flushUpdateQueue() {
|
||||
logger.debug(
|
||||
'flushUpdateQueue',
|
||||
this.workspaceId,
|
||||
'queue',
|
||||
this.updateQueue.length
|
||||
);
|
||||
const updates = [...this.updateQueue];
|
||||
this.updateQueue = [];
|
||||
this.connect();
|
||||
this.addUpdateToSQLite(updates);
|
||||
this.close();
|
||||
}
|
||||
|
||||
// flush after 5s, but will not wait for more than 10s
|
||||
debouncedFlush = debounce(this.flushUpdateQueue, FLUSH_WAIT_TIME, {
|
||||
maxWait: FLUSH_MAX_WAIT_TIME,
|
||||
});
|
||||
|
||||
runCounter = 0;
|
||||
|
||||
// wrap the fn with connect and close
|
||||
// it only works for sync functions
|
||||
run = <T extends (...args: any[]) => any>(fn: T) => {
|
||||
try {
|
||||
if (this.runCounter === 0) {
|
||||
this.connect();
|
||||
}
|
||||
this.runCounter++;
|
||||
return fn();
|
||||
} catch (err) {
|
||||
logger.error(err);
|
||||
} finally {
|
||||
this.runCounter--;
|
||||
if (this.runCounter === 0) {
|
||||
this.close();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
setupAndListen() {
|
||||
if (this.firstConnected) {
|
||||
return;
|
||||
}
|
||||
this.firstConnected = true;
|
||||
|
||||
const onUpstreamUpdate = (update: Uint8Array, origin: YOrigin) => {
|
||||
if (origin === 'renderer') {
|
||||
// update to upstream yDoc should be replicated to self yDoc
|
||||
this.applyUpdate(update, 'upstream');
|
||||
}
|
||||
};
|
||||
|
||||
const onSelfUpdate = (update: Uint8Array, origin: YOrigin) => {
|
||||
// for self update from upstream, we need to push it to external DB
|
||||
if (origin === 'upstream') {
|
||||
this.addUpdateToUpdateQueue(update);
|
||||
}
|
||||
|
||||
if (origin === 'self') {
|
||||
this.upstream.applyUpdate(update, 'external');
|
||||
}
|
||||
};
|
||||
|
||||
// listen to upstream update
|
||||
this.upstream.yDoc.on('update', onUpstreamUpdate);
|
||||
this.yDoc.on('update', onSelfUpdate);
|
||||
|
||||
this.unsubscribers.add(() => {
|
||||
this.upstream.yDoc.off('update', onUpstreamUpdate);
|
||||
this.yDoc.off('update', onSelfUpdate);
|
||||
});
|
||||
|
||||
this.run(() => {
|
||||
// apply all updates from upstream
|
||||
const upstreamUpdate = this.upstream.getDocAsUpdates();
|
||||
// to initialize the yDoc, we need to apply all updates from the db
|
||||
this.applyUpdate(upstreamUpdate, 'upstream');
|
||||
|
||||
this.pull();
|
||||
});
|
||||
}
|
||||
|
||||
applyUpdate = (data: Uint8Array, origin: YOrigin = 'upstream') => {
|
||||
Y.applyUpdate(this.yDoc, data, origin);
|
||||
};
|
||||
|
||||
// TODO: have a better solution to handle blobs
|
||||
syncBlobs() {
|
||||
this.run(() => {
|
||||
// pull blobs
|
||||
const blobsKeys = this.getBlobKeys();
|
||||
const upstreamBlobsKeys = this.upstream.getBlobKeys();
|
||||
// put every missing blob to upstream
|
||||
for (const key of blobsKeys) {
|
||||
if (!upstreamBlobsKeys.includes(key)) {
|
||||
const blob = this.getBlob(key);
|
||||
if (blob) {
|
||||
this.upstream.addBlob(key, blob);
|
||||
logger.debug('syncBlobs', this.workspaceId, key);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* pull from external DB file and apply to embedded yDoc
|
||||
* workflow:
|
||||
* - connect to external db
|
||||
* - get updates
|
||||
* - apply updates to local yDoc
|
||||
* - get blobs and put new blobs to upstream
|
||||
* - disconnect
|
||||
*/
|
||||
async pull() {
|
||||
const start = performance.now();
|
||||
const updates = this.run(() => {
|
||||
// TODO: no need to get all updates, just get the latest ones (using a cursor, etc)?
|
||||
this.syncBlobs();
|
||||
return this.getUpdates().map(update => update.data);
|
||||
});
|
||||
|
||||
const merged = await mergeUpdateWorker(updates);
|
||||
this.applyUpdate(merged, 'self');
|
||||
|
||||
logger.debug(
|
||||
'pull external updates',
|
||||
this.path,
|
||||
updates.length,
|
||||
(performance.now() - start).toFixed(2),
|
||||
'ms'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function getSecondaryWorkspaceDBPath(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
) {
|
||||
const meta = await getWorkspaceMeta(context, workspaceId);
|
||||
return meta?.secondaryDBPath;
|
||||
}
|
||||
7
apps/electron/layers/main/src/db/subjects.ts
Normal file
7
apps/electron/layers/main/src/db/subjects.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
export const dbSubjects = {
|
||||
// emit workspace id when the db file is missing
|
||||
fileMissing: new Subject<string>(),
|
||||
externalUpdate: new Subject<{ workspaceId: string; update: Uint8Array }>(),
|
||||
};
|
||||
106
apps/electron/layers/main/src/db/workspace-db-adapter.ts
Normal file
106
apps/electron/layers/main/src/db/workspace-db-adapter.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import type { Database } from 'better-sqlite3';
|
||||
import { Subject } from 'rxjs';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { AppContext } from '../context';
|
||||
import { logger } from '../logger';
|
||||
import type { YOrigin } from '../type';
|
||||
import { mergeUpdateWorker } from '../workers';
|
||||
import { getWorkspaceMeta } from '../workspace';
|
||||
import { BaseSQLiteAdapter } from './base-db-adapter';
|
||||
import { dbSubjects } from './subjects';
|
||||
|
||||
export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
role = 'primary';
|
||||
yDoc = new Y.Doc();
|
||||
firstConnected = false;
|
||||
|
||||
update$ = new Subject<void>();
|
||||
|
||||
constructor(public override path: string, public workspaceId: string) {
|
||||
super(path);
|
||||
}
|
||||
|
||||
override destroy() {
|
||||
this.db?.close();
|
||||
this.db = null;
|
||||
this.yDoc.destroy();
|
||||
|
||||
// when db is closed, we can safely remove it from ensure-db list
|
||||
this.update$.complete();
|
||||
}
|
||||
|
||||
getWorkspaceName = () => {
|
||||
return this.yDoc.getMap('space:meta').get('name') as string;
|
||||
};
|
||||
|
||||
async init(): Promise<Database | undefined> {
|
||||
const db = super.connect();
|
||||
|
||||
if (!this.firstConnected) {
|
||||
this.yDoc.on('update', (update: Uint8Array, origin: YOrigin) => {
|
||||
if (origin === 'renderer') {
|
||||
this.addUpdateToSQLite([update]);
|
||||
} else if (origin === 'external') {
|
||||
this.addUpdateToSQLite([update]);
|
||||
logger.debug('external update', this.workspaceId);
|
||||
dbSubjects.externalUpdate.next({
|
||||
workspaceId: this.workspaceId,
|
||||
update,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const updates = this.getUpdates();
|
||||
const merged = await mergeUpdateWorker(updates.map(update => update.data));
|
||||
|
||||
// to initialize the yDoc, we need to apply all updates from the db
|
||||
this.applyUpdate(merged, 'self');
|
||||
|
||||
this.firstConnected = true;
|
||||
this.update$.next();
|
||||
|
||||
return db;
|
||||
}
|
||||
|
||||
getDocAsUpdates = () => {
|
||||
return Y.encodeStateAsUpdate(this.yDoc);
|
||||
};
|
||||
|
||||
// non-blocking and use yDoc to validate the update
|
||||
// after that, the update is added to the db
|
||||
applyUpdate = (data: Uint8Array, origin: YOrigin = 'renderer') => {
|
||||
// todo: trim the updates when the number of records is too large
|
||||
// 1. store the current ydoc state in the db
|
||||
// 2. then delete the old updates
|
||||
// yjs-idb will always trim the db for the first time after DB is loaded
|
||||
Y.applyUpdate(this.yDoc, data, origin);
|
||||
};
|
||||
|
||||
override addBlob(key: string, value: Uint8Array) {
|
||||
const res = super.addBlob(key, value);
|
||||
this.update$.next();
|
||||
return res;
|
||||
}
|
||||
|
||||
override deleteBlob(key: string) {
|
||||
super.deleteBlob(key);
|
||||
this.update$.next();
|
||||
}
|
||||
|
||||
override addUpdateToSQLite(data: Uint8Array[]) {
|
||||
super.addUpdateToSQLite(data);
|
||||
this.update$.next();
|
||||
}
|
||||
}
|
||||
|
||||
export async function openWorkspaceDatabase(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
) {
|
||||
const meta = await getWorkspaceMeta(context, workspaceId);
|
||||
const db = new WorkspaceSQLiteDB(meta.mainDBPath, workspaceId);
|
||||
await db.init();
|
||||
return db;
|
||||
}
|
||||
@@ -1,25 +1,35 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import { app } from 'electron';
|
||||
import { dialog, shell } from 'electron';
|
||||
import fs from 'fs-extra';
|
||||
import { nanoid } from 'nanoid';
|
||||
|
||||
import { appContext } from '../../context';
|
||||
import { logger } from '../../logger';
|
||||
import { appContext } from '../context';
|
||||
import { ensureSQLiteDB } from '../db/ensure-db';
|
||||
import { getWorkspaceDBPath, isValidDBFile } from '../db/sqlite';
|
||||
import { listWorkspaces } from '../workspace/workspace';
|
||||
import { isValidDBFile } from '../db/helper';
|
||||
import type { WorkspaceSQLiteDB } from '../db/workspace-db-adapter';
|
||||
import { logger } from '../logger';
|
||||
import {
|
||||
getWorkspaceDBPath,
|
||||
getWorkspaceMeta,
|
||||
listWorkspaces,
|
||||
storeWorkspaceMeta,
|
||||
} from '../workspace';
|
||||
|
||||
// NOTE:
|
||||
// we are using native dialogs because HTML dialogs do not give full file paths
|
||||
|
||||
export async function revealDBFile(workspaceId: string) {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
shell.showItemInFolder(workspaceDB.path);
|
||||
const meta = await getWorkspaceMeta(appContext, workspaceId);
|
||||
if (!meta) {
|
||||
return;
|
||||
}
|
||||
shell.showItemInFolder(meta.secondaryDBPath ?? meta.mainDBPath);
|
||||
}
|
||||
|
||||
// provide a backdoor to set dialog path for testing in playwright
|
||||
interface FakeDialogResult {
|
||||
export interface FakeDialogResult {
|
||||
canceled?: boolean;
|
||||
filePath?: string;
|
||||
filePaths?: string[];
|
||||
@@ -47,17 +57,26 @@ const ErrorMessages = [
|
||||
'DB_FILE_ALREADY_LOADED',
|
||||
'DB_FILE_PATH_INVALID',
|
||||
'DB_FILE_INVALID',
|
||||
'FILE_ALREADY_EXISTS',
|
||||
'UNKNOWN_ERROR',
|
||||
] as const;
|
||||
|
||||
type ErrorMessage = (typeof ErrorMessages)[number];
|
||||
|
||||
interface SaveDBFileResult {
|
||||
export interface SaveDBFileResult {
|
||||
filePath?: string;
|
||||
canceled?: boolean;
|
||||
error?: ErrorMessage;
|
||||
}
|
||||
|
||||
const extension = 'affine';
|
||||
|
||||
function getDefaultDBFileName(name: string, id: string) {
|
||||
const fileName = `${name}_${id}.${extension}`;
|
||||
// make sure fileName is a valid file name
|
||||
return fileName.replace(/[/\\?%*:|"<>]/g, '-');
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is called when the user clicks the "Save" button in the "Save Workspace" dialog.
|
||||
*
|
||||
@@ -75,7 +94,13 @@ export async function saveDBFileAs(
|
||||
title: 'Save Workspace',
|
||||
showsTagField: false,
|
||||
buttonLabel: 'Save',
|
||||
defaultPath: `${db.getWorkspaceName()}_${workspaceId}.db`,
|
||||
filters: [
|
||||
{
|
||||
extensions: [extension],
|
||||
name: '',
|
||||
},
|
||||
],
|
||||
defaultPath: getDefaultDBFileName(db.getWorkspaceName(), workspaceId),
|
||||
message: 'Save Workspace as a SQLite Database file',
|
||||
}));
|
||||
const filePath = ret.filePath;
|
||||
@@ -97,7 +122,7 @@ export async function saveDBFileAs(
|
||||
}
|
||||
}
|
||||
|
||||
interface SelectDBFileLocationResult {
|
||||
export interface SelectDBFileLocationResult {
|
||||
filePath?: string;
|
||||
error?: ErrorMessage;
|
||||
canceled?: boolean;
|
||||
@@ -107,27 +132,20 @@ export async function selectDBFileLocation(): Promise<SelectDBFileLocationResult
|
||||
try {
|
||||
const ret =
|
||||
getFakedResult() ??
|
||||
(await dialog.showSaveDialog({
|
||||
properties: ['showOverwriteConfirmation'],
|
||||
title: 'Set database location',
|
||||
showsTagField: false,
|
||||
(await dialog.showOpenDialog({
|
||||
properties: ['openDirectory'],
|
||||
title: 'Set Workspace Storage Location',
|
||||
buttonLabel: 'Select',
|
||||
defaultPath: `workspace-storage.db`,
|
||||
defaultPath: app.getPath('documents'),
|
||||
message: "Select a location to store the workspace's database file",
|
||||
}));
|
||||
const filePath = ret.filePath;
|
||||
if (ret.canceled || !filePath) {
|
||||
const dir = ret.filePaths?.[0];
|
||||
if (ret.canceled || !dir) {
|
||||
return {
|
||||
canceled: true,
|
||||
};
|
||||
}
|
||||
// the same db file cannot be loaded twice
|
||||
if (await dbFileAlreadyLoaded(filePath)) {
|
||||
return {
|
||||
error: 'DB_FILE_ALREADY_LOADED',
|
||||
};
|
||||
}
|
||||
return { filePath };
|
||||
return { filePath: dir };
|
||||
} catch (err) {
|
||||
logger.error('selectDBFileLocation', err);
|
||||
return {
|
||||
@@ -136,7 +154,7 @@ export async function selectDBFileLocation(): Promise<SelectDBFileLocationResult
|
||||
}
|
||||
}
|
||||
|
||||
interface LoadDBFileResult {
|
||||
export interface LoadDBFileResult {
|
||||
workspaceId?: string;
|
||||
error?: ErrorMessage;
|
||||
canceled?: boolean;
|
||||
@@ -168,10 +186,10 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
|
||||
{
|
||||
name: 'SQLite Database',
|
||||
// do we want to support other file format?
|
||||
extensions: ['db'],
|
||||
extensions: ['db', 'affine'],
|
||||
},
|
||||
],
|
||||
message: 'Load Workspace from a SQLite Database file',
|
||||
message: 'Load Workspace from a AFFiNE file',
|
||||
}));
|
||||
const filePath = ret.filePaths?.[0];
|
||||
if (ret.canceled || !filePath) {
|
||||
@@ -195,14 +213,20 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
|
||||
return { error: 'DB_FILE_INVALID' }; // invalid db file
|
||||
}
|
||||
|
||||
// symlink the db file to a new workspace id
|
||||
// copy the db file to a new workspace id
|
||||
const workspaceId = nanoid(10);
|
||||
const linkedFilePath = await getWorkspaceDBPath(appContext, workspaceId);
|
||||
const internalFilePath = getWorkspaceDBPath(appContext, workspaceId);
|
||||
|
||||
await fs.ensureDir(path.join(appContext.appDataPath, 'workspaces'));
|
||||
|
||||
await fs.symlink(filePath, linkedFilePath);
|
||||
logger.info(`loadDBFile, symlink: ${filePath} -> ${linkedFilePath}`);
|
||||
await fs.copy(filePath, internalFilePath);
|
||||
logger.info(`loadDBFile, copy: ${filePath} -> ${internalFilePath}`);
|
||||
|
||||
await storeWorkspaceMeta(appContext, workspaceId, {
|
||||
id: workspaceId,
|
||||
mainDBPath: internalFilePath,
|
||||
secondaryDBPath: filePath,
|
||||
});
|
||||
|
||||
return { workspaceId };
|
||||
} catch (err) {
|
||||
@@ -213,7 +237,7 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
|
||||
}
|
||||
}
|
||||
|
||||
interface MoveDBFileResult {
|
||||
export interface MoveDBFileResult {
|
||||
filePath?: string;
|
||||
error?: ErrorMessage;
|
||||
canceled?: boolean;
|
||||
@@ -223,62 +247,78 @@ interface MoveDBFileResult {
|
||||
* This function is called when the user clicks the "Move" button in the "Move Workspace Storage" setting.
|
||||
*
|
||||
* It will
|
||||
* - move the source db file to a new location
|
||||
* - symlink the new location to the old db file
|
||||
* - copy the source db file to a new location
|
||||
* - remove the old db external file
|
||||
* - update the external db file path in the workspace meta
|
||||
* - return the new file path
|
||||
*/
|
||||
export async function moveDBFile(
|
||||
workspaceId: string,
|
||||
dbFileLocation?: string
|
||||
dbFileDir?: string
|
||||
): Promise<MoveDBFileResult> {
|
||||
let db: WorkspaceSQLiteDB | null = null;
|
||||
try {
|
||||
const db = await ensureSQLiteDB(workspaceId);
|
||||
db = await ensureSQLiteDB(workspaceId);
|
||||
|
||||
// get the real file path of db
|
||||
const realpath = await fs.realpath(db.path);
|
||||
const isLink = realpath !== db.path;
|
||||
const meta = await getWorkspaceMeta(appContext, workspaceId);
|
||||
|
||||
const newFilePath =
|
||||
dbFileLocation ||
|
||||
const oldDir = meta.secondaryDBPath
|
||||
? path.dirname(meta.secondaryDBPath)
|
||||
: null;
|
||||
const defaultDir = oldDir ?? app.getPath('documents');
|
||||
|
||||
const newName = getDefaultDBFileName(db.getWorkspaceName(), workspaceId);
|
||||
|
||||
const newDirPath =
|
||||
dbFileDir ??
|
||||
(
|
||||
getFakedResult() ||
|
||||
(await dialog.showSaveDialog({
|
||||
properties: ['showOverwriteConfirmation'],
|
||||
getFakedResult() ??
|
||||
(await dialog.showOpenDialog({
|
||||
properties: ['openDirectory'],
|
||||
title: 'Move Workspace Storage',
|
||||
showsTagField: false,
|
||||
buttonLabel: 'Save',
|
||||
defaultPath: realpath,
|
||||
buttonLabel: 'Move',
|
||||
defaultPath: defaultDir,
|
||||
message: 'Move Workspace storage file',
|
||||
}))
|
||||
).filePath;
|
||||
).filePaths?.[0];
|
||||
|
||||
// skips if
|
||||
// - user canceled the dialog
|
||||
// - user selected the same file
|
||||
// - user selected the same file in the link file in app data dir
|
||||
if (!newFilePath || newFilePath === realpath || db.path === newFilePath) {
|
||||
// - user selected the same dir
|
||||
if (!newDirPath || newDirPath === oldDir) {
|
||||
return {
|
||||
canceled: true,
|
||||
};
|
||||
}
|
||||
|
||||
if (isLink) {
|
||||
// remove the old link to unblock new link
|
||||
await fs.unlink(db.path);
|
||||
const newFilePath = path.join(newDirPath, newName);
|
||||
|
||||
if (await fs.pathExists(newFilePath)) {
|
||||
return {
|
||||
error: 'FILE_ALREADY_EXISTS',
|
||||
};
|
||||
}
|
||||
|
||||
await fs.move(realpath, newFilePath, {
|
||||
overwrite: true,
|
||||
logger.info(`[moveDBFile] copy ${meta.mainDBPath} -> ${newFilePath}`);
|
||||
|
||||
await fs.copy(meta.mainDBPath, newFilePath);
|
||||
|
||||
// remove the old db file, but we don't care if it fails
|
||||
if (meta.secondaryDBPath) {
|
||||
fs.remove(meta.secondaryDBPath);
|
||||
}
|
||||
|
||||
// update meta
|
||||
await storeWorkspaceMeta(appContext, workspaceId, {
|
||||
secondaryDBPath: newFilePath,
|
||||
});
|
||||
|
||||
await fs.ensureSymlink(newFilePath, db.path);
|
||||
logger.info(`openMoveDBFileDialog symlink: ${realpath} -> ${newFilePath}`);
|
||||
db.reconnectDB();
|
||||
return {
|
||||
filePath: newFilePath,
|
||||
};
|
||||
} catch (err) {
|
||||
logger.error('moveDBFile', err);
|
||||
db?.destroy();
|
||||
logger.error('[moveDBFile]', err);
|
||||
return {
|
||||
error: 'UNKNOWN_ERROR',
|
||||
};
|
||||
@@ -287,7 +327,6 @@ export async function moveDBFile(
|
||||
|
||||
async function dbFileAlreadyLoaded(path: string) {
|
||||
const meta = await listWorkspaces(appContext);
|
||||
const realpath = await fs.realpath(path);
|
||||
const paths = meta.map(m => m[1].realpath);
|
||||
return paths.includes(realpath);
|
||||
const paths = meta.map(m => m[1].secondaryDBPath);
|
||||
return paths.includes(path);
|
||||
}
|
||||
@@ -18,7 +18,7 @@ export const dialogHandlers = {
|
||||
saveDBFileAs: async (_, workspaceId: string) => {
|
||||
return saveDBFileAs(workspaceId);
|
||||
},
|
||||
moveDBFile: async (_, workspaceId: string, dbFileLocation?: string) => {
|
||||
moveDBFile: (_, workspaceId: string, dbFileLocation?: string) => {
|
||||
return moveDBFile(workspaceId, dbFileLocation);
|
||||
},
|
||||
selectDBFileLocation: async () => {
|
||||
@@ -1,12 +1,16 @@
|
||||
import { app, BrowserWindow } from 'electron';
|
||||
|
||||
import { logger } from '../logger';
|
||||
import { applicationMenuEvents } from './application-menu';
|
||||
import { dbEvents } from './db';
|
||||
import { updaterEvents } from './updater';
|
||||
import { logger } from './logger';
|
||||
import { updaterEvents } from './updater/event';
|
||||
import { workspaceEvents } from './workspace';
|
||||
|
||||
export const allEvents = {
|
||||
applicationMenu: applicationMenuEvents,
|
||||
db: dbEvents,
|
||||
updater: updaterEvents,
|
||||
workspace: workspaceEvents,
|
||||
};
|
||||
|
||||
function getActiveWindows() {
|
||||
@@ -17,9 +21,18 @@ export function registerEvents() {
|
||||
// register events
|
||||
for (const [namespace, namespaceEvents] of Object.entries(allEvents)) {
|
||||
for (const [key, eventRegister] of Object.entries(namespaceEvents)) {
|
||||
const subscription = eventRegister((...args: any) => {
|
||||
const subscription = eventRegister((...args: any[]) => {
|
||||
const chan = `${namespace}:${key}`;
|
||||
logger.info('[ipc-event]', chan, args);
|
||||
logger.info(
|
||||
'[ipc-event]',
|
||||
chan,
|
||||
args.filter(
|
||||
a =>
|
||||
a !== undefined &&
|
||||
typeof a !== 'function' &&
|
||||
typeof a !== 'object'
|
||||
)
|
||||
);
|
||||
getActiveWindows().forEach(win => win.webContents.send(chan, ...args));
|
||||
});
|
||||
app.on('before-quit', () => {
|
||||
@@ -1,26 +0,0 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
import type { MainEventListener } from './type';
|
||||
|
||||
export const dbSubjects = {
|
||||
// emit workspace ids
|
||||
dbFileMissing: new Subject<string>(),
|
||||
// emit workspace ids
|
||||
dbFileUpdate: new Subject<string>(),
|
||||
};
|
||||
|
||||
export const dbEvents = {
|
||||
onDbFileMissing: (fn: (workspaceId: string) => void) => {
|
||||
const sub = dbSubjects.dbFileMissing.subscribe(fn);
|
||||
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
onDbFileUpdate: (fn: (workspaceId: string) => void) => {
|
||||
const sub = dbSubjects.dbFileUpdate.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
@@ -1,7 +0,0 @@
|
||||
export * from './register';
|
||||
|
||||
import { dbSubjects } from './db';
|
||||
|
||||
export const subjects = {
|
||||
db: dbSubjects,
|
||||
};
|
||||
@@ -1 +0,0 @@
|
||||
export type MainEventListener = (...args: any[]) => () => void;
|
||||
@@ -1,21 +0,0 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
import type { MainEventListener } from './type';
|
||||
|
||||
interface UpdateMeta {
|
||||
version: string;
|
||||
}
|
||||
|
||||
export const updaterSubjects = {
|
||||
// means it is ready for restart and install the new version
|
||||
clientUpdateReady: new Subject<UpdateMeta>(),
|
||||
};
|
||||
|
||||
export const updaterEvents = {
|
||||
onClientUpdateReady: (fn: (versionMeta: UpdateMeta) => void) => {
|
||||
const sub = updaterSubjects.clientUpdateReady.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
10
apps/electron/layers/main/src/export/index.ts
Normal file
10
apps/electron/layers/main/src/export/index.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { savePDFFileAs } from './pdf';
|
||||
|
||||
export const exportHandlers = {
|
||||
savePDFFileAs: async (_, title: string) => {
|
||||
return savePDFFileAs(title);
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
|
||||
export * from './pdf';
|
||||
61
apps/electron/layers/main/src/export/pdf.ts
Normal file
61
apps/electron/layers/main/src/export/pdf.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { BrowserWindow, dialog, shell } from 'electron';
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import { logger } from '../logger';
|
||||
import type { ErrorMessage } from './utils';
|
||||
import { getFakedResult } from './utils';
|
||||
|
||||
export interface SavePDFFileResult {
|
||||
filePath?: string;
|
||||
canceled?: boolean;
|
||||
error?: ErrorMessage;
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is called when the user clicks the "Export to PDF" button in the electron.
|
||||
*
|
||||
* It will just copy the file to the given path
|
||||
*/
|
||||
export async function savePDFFileAs(
|
||||
pageTitle: string
|
||||
): Promise<SavePDFFileResult> {
|
||||
try {
|
||||
const ret =
|
||||
getFakedResult() ??
|
||||
(await dialog.showSaveDialog({
|
||||
properties: ['showOverwriteConfirmation'],
|
||||
title: 'Save PDF',
|
||||
showsTagField: false,
|
||||
buttonLabel: 'Save',
|
||||
defaultPath: `${pageTitle}.pdf`,
|
||||
message: 'Save Page as a PDF file',
|
||||
}));
|
||||
const filePath = ret.filePath;
|
||||
if (ret.canceled || !filePath) {
|
||||
return {
|
||||
canceled: true,
|
||||
};
|
||||
}
|
||||
|
||||
await BrowserWindow.getFocusedWindow()
|
||||
?.webContents.printToPDF({
|
||||
pageSize: 'A4',
|
||||
printBackground: true,
|
||||
landscape: false,
|
||||
})
|
||||
.then(data => {
|
||||
fs.writeFile(filePath, data, error => {
|
||||
if (error) throw error;
|
||||
logger.log(`Wrote PDF successfully to ${filePath}`);
|
||||
});
|
||||
});
|
||||
|
||||
shell.openPath(filePath);
|
||||
return { filePath };
|
||||
} catch (err) {
|
||||
logger.error('savePDFFileAs', err);
|
||||
return {
|
||||
error: 'UNKNOWN_ERROR',
|
||||
};
|
||||
}
|
||||
}
|
||||
24
apps/electron/layers/main/src/export/utils.ts
Normal file
24
apps/electron/layers/main/src/export/utils.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
// provide a backdoor to set dialog path for testing in playwright
|
||||
interface FakeDialogResult {
|
||||
canceled?: boolean;
|
||||
filePath?: string;
|
||||
filePaths?: string[];
|
||||
}
|
||||
// result will be used in the next call to showOpenDialog
|
||||
// if it is being read once, it will be reset to undefined
|
||||
let fakeDialogResult: FakeDialogResult | undefined = undefined;
|
||||
export function getFakedResult() {
|
||||
const result = fakeDialogResult;
|
||||
fakeDialogResult = undefined;
|
||||
return result;
|
||||
}
|
||||
|
||||
export function setFakeDialogResult(result: FakeDialogResult | undefined) {
|
||||
fakeDialogResult = result;
|
||||
// for convenience, we will fill filePaths with filePath if it is not set
|
||||
if (result?.filePaths === undefined && result?.filePath !== undefined) {
|
||||
result.filePaths = [result.filePath];
|
||||
}
|
||||
}
|
||||
const ErrorMessages = ['FILE_ALREADY_EXISTS', 'UNKNOWN_ERROR'] as const;
|
||||
export type ErrorMessage = (typeof ErrorMessages)[number];
|
||||
@@ -2,4 +2,35 @@ import { allEvents as events } from './events';
|
||||
import { allHandlers as handlers } from './handlers';
|
||||
|
||||
// this will be used by preload script to expose all handlers and events to the renderer process
|
||||
// - register in exposeInMainWorld in preload
|
||||
// - provide type hints
|
||||
export { events, handlers };
|
||||
|
||||
export const getExposedMeta = () => {
|
||||
const handlersMeta = Object.entries(handlers).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.keys(namespaceHandlers).map(handlerName => handlerName),
|
||||
];
|
||||
}
|
||||
);
|
||||
|
||||
const eventsMeta = Object.entries(events).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.keys(namespaceHandlers).map(handlerName => handlerName),
|
||||
];
|
||||
}
|
||||
);
|
||||
|
||||
return {
|
||||
handlers: handlersMeta,
|
||||
events: eventsMeta,
|
||||
};
|
||||
};
|
||||
|
||||
export type MainIPCHandlerMap = typeof handlers;
|
||||
|
||||
export type MainIPCEventMap = typeof events;
|
||||
|
||||
@@ -1,21 +1,14 @@
|
||||
import { ipcMain } from 'electron';
|
||||
|
||||
import { getLogFilePath, logger, revealLogFile } from '../logger';
|
||||
import { dbHandlers } from './db';
|
||||
import { dialogHandlers } from './dialog';
|
||||
import { exportHandlers } from './export';
|
||||
import { getLogFilePath, logger, revealLogFile } from './logger';
|
||||
import type { NamespaceHandlers } from './type';
|
||||
import { uiHandlers } from './ui';
|
||||
import { updaterHandlers } from './updater';
|
||||
import { workspaceHandlers } from './workspace';
|
||||
|
||||
type IsomorphicHandler = (
|
||||
e: Electron.IpcMainInvokeEvent,
|
||||
...args: any[]
|
||||
) => Promise<any>;
|
||||
|
||||
type NamespaceHandlers = {
|
||||
[key: string]: IsomorphicHandler;
|
||||
};
|
||||
|
||||
export const debugHandlers = {
|
||||
revealLogFile: async () => {
|
||||
return revealLogFile();
|
||||
@@ -27,15 +20,18 @@ export const debugHandlers = {
|
||||
|
||||
// Note: all of these handlers will be the single-source-of-truth for the apis exposed to the renderer process
|
||||
export const allHandlers = {
|
||||
workspace: workspaceHandlers,
|
||||
ui: uiHandlers,
|
||||
db: dbHandlers,
|
||||
dialog: dialogHandlers,
|
||||
debug: debugHandlers,
|
||||
dialog: dialogHandlers,
|
||||
ui: uiHandlers,
|
||||
export: exportHandlers,
|
||||
updater: updaterHandlers,
|
||||
workspace: workspaceHandlers,
|
||||
} satisfies Record<string, NamespaceHandlers>;
|
||||
|
||||
export const registerHandlers = () => {
|
||||
// TODO: listen to namespace instead of individual event types
|
||||
ipcMain.setMaxListeners(100);
|
||||
for (const [namespace, namespaceHandlers] of Object.entries(allHandlers)) {
|
||||
for (const [key, handler] of Object.entries(namespaceHandlers)) {
|
||||
const chan = `${namespace}:${key}`;
|
||||
@@ -1,89 +0,0 @@
|
||||
import { watch } from 'chokidar';
|
||||
|
||||
import { appContext } from '../../context';
|
||||
import { subjects } from '../../events';
|
||||
import { logger } from '../../logger';
|
||||
import { debounce, ts } from '../../utils';
|
||||
import type { WorkspaceSQLiteDB } from './sqlite';
|
||||
import { openWorkspaceDatabase } from './sqlite';
|
||||
|
||||
const dbMapping = new Map<string, Promise<WorkspaceSQLiteDB>>();
|
||||
const dbWatchers = new Map<string, () => void>();
|
||||
|
||||
// if we removed the file, we will stop watching it
|
||||
function startWatchingDBFile(db: WorkspaceSQLiteDB) {
|
||||
if (dbWatchers.has(db.workspaceId)) {
|
||||
return dbWatchers.get(db.workspaceId);
|
||||
}
|
||||
logger.info('watch db file', db.path);
|
||||
const watcher = watch(db.path);
|
||||
|
||||
const debounceOnChange = debounce(() => {
|
||||
logger.info(
|
||||
'db file changed on disk',
|
||||
db.workspaceId,
|
||||
ts() - db.lastUpdateTime,
|
||||
'ms'
|
||||
);
|
||||
// reconnect db
|
||||
db.reconnectDB();
|
||||
subjects.db.dbFileUpdate.next(db.workspaceId);
|
||||
}, 1000);
|
||||
|
||||
watcher.on('change', () => {
|
||||
const currentTime = ts();
|
||||
if (currentTime - db.lastUpdateTime > 100) {
|
||||
debounceOnChange();
|
||||
}
|
||||
});
|
||||
|
||||
dbWatchers.set(db.workspaceId, () => {
|
||||
watcher.close();
|
||||
});
|
||||
|
||||
// todo: there is still a possibility that the file is deleted
|
||||
// but we didn't get the event soon enough and another event tries to
|
||||
// access the db
|
||||
watcher.on('unlink', () => {
|
||||
logger.info('db file missing', db.workspaceId);
|
||||
subjects.db.dbFileMissing.next(db.workspaceId);
|
||||
// cleanup
|
||||
watcher.close().then(() => {
|
||||
db.destroy();
|
||||
dbWatchers.delete(db.workspaceId);
|
||||
dbMapping.delete(db.workspaceId);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export async function ensureSQLiteDB(id: string) {
|
||||
let workspaceDB = dbMapping.get(id);
|
||||
if (!workspaceDB) {
|
||||
logger.info('[ensureSQLiteDB] open db connection', id);
|
||||
workspaceDB = openWorkspaceDatabase(appContext, id);
|
||||
dbMapping.set(id, workspaceDB);
|
||||
startWatchingDBFile(await workspaceDB);
|
||||
}
|
||||
return await workspaceDB;
|
||||
}
|
||||
|
||||
export async function disconnectSQLiteDB(id: string) {
|
||||
const dbp = dbMapping.get(id);
|
||||
if (dbp) {
|
||||
const db = await dbp;
|
||||
logger.info('close db connection', id);
|
||||
db.destroy();
|
||||
dbWatchers.get(id)?.();
|
||||
dbWatchers.delete(id);
|
||||
dbMapping.delete(id);
|
||||
}
|
||||
}
|
||||
|
||||
export async function cleanupSQLiteDBs() {
|
||||
for (const [id] of dbMapping) {
|
||||
logger.info('close db connection', id);
|
||||
await disconnectSQLiteDB(id);
|
||||
}
|
||||
dbMapping.clear();
|
||||
dbWatchers.clear();
|
||||
}
|
||||
@@ -1,231 +0,0 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import type { Database } from 'better-sqlite3';
|
||||
import sqlite from 'better-sqlite3';
|
||||
import fs from 'fs-extra';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { AppContext } from '../../context';
|
||||
import { logger } from '../../logger';
|
||||
import { ts } from '../../utils';
|
||||
|
||||
const schemas = [
|
||||
`CREATE TABLE IF NOT EXISTS "updates" (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
data BLOB NOT NULL,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS "blobs" (
|
||||
key TEXT PRIMARY KEY NOT NULL,
|
||||
data BLOB NOT NULL,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
)`,
|
||||
];
|
||||
|
||||
interface UpdateRow {
|
||||
id: number;
|
||||
data: Buffer;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
interface BlobRow {
|
||||
key: string;
|
||||
data: Buffer;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
const SQLITE_ORIGIN = Symbol('sqlite-origin');
|
||||
|
||||
export class WorkspaceSQLiteDB {
|
||||
db: Database;
|
||||
ydoc = new Y.Doc();
|
||||
firstConnect = false;
|
||||
lastUpdateTime = ts();
|
||||
|
||||
constructor(public path: string, public workspaceId: string) {
|
||||
this.db = this.reconnectDB();
|
||||
}
|
||||
|
||||
// release resources
|
||||
destroy = () => {
|
||||
this.db?.close();
|
||||
this.ydoc.destroy();
|
||||
};
|
||||
|
||||
getWorkspaceName = () => {
|
||||
return this.ydoc.getMap('space:meta').get('name') as string;
|
||||
};
|
||||
|
||||
reconnectDB = () => {
|
||||
logger.log('open db', this.workspaceId);
|
||||
if (this.db) {
|
||||
this.db.close();
|
||||
}
|
||||
|
||||
// use cached version?
|
||||
const db = (this.db = sqlite(this.path));
|
||||
db.exec(schemas.join(';'));
|
||||
|
||||
if (!this.firstConnect) {
|
||||
this.ydoc.on('update', (update: Uint8Array, origin) => {
|
||||
if (origin !== SQLITE_ORIGIN) {
|
||||
this.addUpdateToSQLite(update);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Y.transact(this.ydoc, () => {
|
||||
const updates = this.getUpdates();
|
||||
updates.forEach(update => {
|
||||
// give SQLITE_ORIGIN to skip self update
|
||||
Y.applyUpdate(this.ydoc, update.data, SQLITE_ORIGIN);
|
||||
});
|
||||
});
|
||||
|
||||
this.lastUpdateTime = ts();
|
||||
|
||||
if (this.firstConnect) {
|
||||
logger.info('db reconnected', this.workspaceId);
|
||||
} else {
|
||||
logger.info('db connected', this.workspaceId);
|
||||
}
|
||||
|
||||
this.firstConnect = true;
|
||||
|
||||
return db;
|
||||
};
|
||||
|
||||
getDocAsUpdates = () => {
|
||||
return Y.encodeStateAsUpdate(this.ydoc);
|
||||
};
|
||||
|
||||
// non-blocking and use yDoc to validate the update
|
||||
// after that, the update is added to the db
|
||||
applyUpdate = (data: Uint8Array) => {
|
||||
Y.applyUpdate(this.ydoc, data);
|
||||
|
||||
// todo: trim the updates when the number of records is too large
|
||||
// 1. store the current ydoc state in the db
|
||||
// 2. then delete the old updates
|
||||
// yjs-idb will always trim the db for the first time after DB is loaded
|
||||
this.lastUpdateTime = ts();
|
||||
logger.debug('applyUpdate', this.workspaceId, this.lastUpdateTime);
|
||||
};
|
||||
|
||||
addBlob = (key: string, data: Uint8Array) => {
|
||||
this.lastUpdateTime = ts();
|
||||
try {
|
||||
const statement = this.db.prepare(
|
||||
'INSERT INTO blobs (key, data) VALUES (?, ?) ON CONFLICT(key) DO UPDATE SET data = ?'
|
||||
);
|
||||
statement.run(key, data, data);
|
||||
return key;
|
||||
} catch (error) {
|
||||
logger.error('addBlob', error);
|
||||
}
|
||||
};
|
||||
|
||||
getBlob = (key: string) => {
|
||||
try {
|
||||
const statement = this.db.prepare('SELECT data FROM blobs WHERE key = ?');
|
||||
const row = statement.get(key) as BlobRow;
|
||||
if (!row) {
|
||||
return null;
|
||||
}
|
||||
return row.data;
|
||||
} catch (error) {
|
||||
logger.error('getBlob', error);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
deleteBlob = (key: string) => {
|
||||
this.lastUpdateTime = ts();
|
||||
try {
|
||||
const statement = this.db.prepare('DELETE FROM blobs WHERE key = ?');
|
||||
statement.run(key);
|
||||
} catch (error) {
|
||||
logger.error('deleteBlob', error);
|
||||
}
|
||||
};
|
||||
|
||||
getPersistentBlobKeys = () => {
|
||||
try {
|
||||
const statement = this.db.prepare('SELECT key FROM blobs');
|
||||
const rows = statement.all() as BlobRow[];
|
||||
return rows.map(row => row.key);
|
||||
} catch (error) {
|
||||
logger.error('getPersistentBlobKeys', error);
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
private getUpdates = () => {
|
||||
try {
|
||||
const statement = this.db.prepare('SELECT * FROM updates');
|
||||
const rows = statement.all() as UpdateRow[];
|
||||
return rows;
|
||||
} catch (error) {
|
||||
logger.error('getUpdates', error);
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
// batch write instead write per key stroke?
|
||||
private addUpdateToSQLite = (data: Uint8Array) => {
|
||||
try {
|
||||
const start = performance.now();
|
||||
const statement = this.db.prepare(
|
||||
'INSERT INTO updates (data) VALUES (?)'
|
||||
);
|
||||
statement.run(data);
|
||||
logger.debug(
|
||||
'addUpdateToSQLite',
|
||||
this.workspaceId,
|
||||
'length:',
|
||||
data.length,
|
||||
performance.now() - start,
|
||||
'ms'
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('addUpdateToSQLite', error);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export async function getWorkspaceDBPath(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
) {
|
||||
const basePath = path.join(context.appDataPath, 'workspaces', workspaceId);
|
||||
await fs.ensureDir(basePath);
|
||||
return path.join(basePath, 'storage.db');
|
||||
}
|
||||
|
||||
export async function openWorkspaceDatabase(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
) {
|
||||
const dbPath = await getWorkspaceDBPath(context, workspaceId);
|
||||
return new WorkspaceSQLiteDB(dbPath, workspaceId);
|
||||
}
|
||||
|
||||
export function isValidDBFile(path: string) {
|
||||
try {
|
||||
const db = sqlite(path);
|
||||
// check if db has two tables, one for updates and onefor blobs
|
||||
const statement = db.prepare(
|
||||
`SELECT name FROM sqlite_schema WHERE type='table'`
|
||||
);
|
||||
const rows = statement.all() as { name: string }[];
|
||||
const tableNames = rows.map(row => row.name);
|
||||
if (!tableNames.includes('updates') || !tableNames.includes('blobs')) {
|
||||
return false;
|
||||
}
|
||||
db.close();
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error('isValidDBFile', error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
export * from './register';
|
||||
@@ -1,8 +0,0 @@
|
||||
export type IsomorphicHandler = (
|
||||
e: Electron.IpcMainInvokeEvent,
|
||||
...args: any[]
|
||||
) => Promise<any>;
|
||||
|
||||
export type NamespaceHandlers = {
|
||||
[key: string]: IsomorphicHandler;
|
||||
};
|
||||
@@ -1,23 +0,0 @@
|
||||
import { BrowserWindow, nativeTheme } from 'electron';
|
||||
|
||||
import { isMacOS } from '../../../../utils';
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { getGoogleOauthCode } from './google-auth';
|
||||
|
||||
export const uiHandlers = {
|
||||
handleThemeChange: async (_, theme: (typeof nativeTheme)['themeSource']) => {
|
||||
nativeTheme.themeSource = theme;
|
||||
},
|
||||
handleSidebarVisibilityChange: async (_, visible: boolean) => {
|
||||
if (isMacOS()) {
|
||||
const windows = BrowserWindow.getAllWindows();
|
||||
windows.forEach(w => {
|
||||
// hide window buttons when sidebar is not visible
|
||||
w.setWindowButtonVisibility(visible);
|
||||
});
|
||||
}
|
||||
},
|
||||
getGoogleOauthCode: async () => {
|
||||
return getGoogleOauthCode();
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
@@ -1,10 +0,0 @@
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { updateClient } from './updater';
|
||||
|
||||
export const updaterHandlers = {
|
||||
updateClient: async () => {
|
||||
return updateClient();
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
|
||||
export * from './updater';
|
||||
@@ -1,69 +0,0 @@
|
||||
import type { AppUpdater } from 'electron-updater';
|
||||
|
||||
import { isMacOS } from '../../../../utils';
|
||||
import { updaterSubjects } from '../../events/updater';
|
||||
import { logger } from '../../logger';
|
||||
|
||||
const buildType = (process.env.BUILD_TYPE || 'canary').trim().toLowerCase();
|
||||
const mode = process.env.NODE_ENV;
|
||||
const isDev = mode === 'development';
|
||||
|
||||
let _autoUpdater: AppUpdater | null = null;
|
||||
|
||||
export const updateClient = async () => {
|
||||
_autoUpdater?.quitAndInstall();
|
||||
};
|
||||
|
||||
export const registerUpdater = async () => {
|
||||
// require it will cause some side effects and will break generate-main-exposed-meta,
|
||||
// so we wrap it in a function
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const { autoUpdater } = await import('electron-updater');
|
||||
|
||||
_autoUpdater = autoUpdater;
|
||||
|
||||
autoUpdater.autoDownload = false;
|
||||
autoUpdater.allowPrerelease = buildType !== 'stable';
|
||||
autoUpdater.autoInstallOnAppQuit = false;
|
||||
autoUpdater.autoRunAppAfterInstall = true;
|
||||
autoUpdater.setFeedURL({
|
||||
channel: buildType,
|
||||
provider: 'github',
|
||||
repo: 'AFFiNE',
|
||||
owner: 'toeverything',
|
||||
releaseType: buildType === 'stable' ? 'release' : 'prerelease',
|
||||
});
|
||||
|
||||
autoUpdater.autoDownload = false;
|
||||
autoUpdater.allowPrerelease = buildType !== 'stable';
|
||||
autoUpdater.autoInstallOnAppQuit = false;
|
||||
autoUpdater.autoRunAppAfterInstall = true;
|
||||
autoUpdater.setFeedURL({
|
||||
channel: buildType,
|
||||
provider: 'github',
|
||||
repo: 'AFFiNE',
|
||||
owner: 'toeverything',
|
||||
releaseType: buildType === 'stable' ? 'release' : 'prerelease',
|
||||
});
|
||||
|
||||
if (isMacOS()) {
|
||||
autoUpdater.on('update-available', () => {
|
||||
autoUpdater.downloadUpdate();
|
||||
logger.info('Update available, downloading...');
|
||||
});
|
||||
autoUpdater.on('download-progress', e => {
|
||||
logger.info(`Download progress: ${e.percent}`);
|
||||
});
|
||||
autoUpdater.on('update-downloaded', e => {
|
||||
updaterSubjects.clientUpdateReady.next({
|
||||
version: e.version,
|
||||
});
|
||||
logger.info('Update downloaded, ready to install');
|
||||
});
|
||||
autoUpdater.on('error', e => {
|
||||
logger.error('Error while updating client', e);
|
||||
});
|
||||
autoUpdater.forceDevUpdateConfig = isDev;
|
||||
await autoUpdater.checkForUpdatesAndNotify();
|
||||
}
|
||||
};
|
||||
@@ -1,8 +0,0 @@
|
||||
import { appContext } from '../../context';
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { deleteWorkspace, listWorkspaces } from './workspace';
|
||||
|
||||
export const workspaceHandlers = {
|
||||
list: async () => listWorkspaces(appContext),
|
||||
delete: async (_, id: string) => deleteWorkspace(appContext, id),
|
||||
} satisfies NamespaceHandlers;
|
||||
@@ -1,60 +0,0 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import type { AppContext } from '../../context';
|
||||
import { logger } from '../../logger';
|
||||
|
||||
interface WorkspaceMeta {
|
||||
path: string;
|
||||
realpath: string;
|
||||
}
|
||||
|
||||
export async function listWorkspaces(
|
||||
context: AppContext
|
||||
): Promise<[workspaceId: string, meta: WorkspaceMeta][]> {
|
||||
const basePath = path.join(context.appDataPath, 'workspaces');
|
||||
try {
|
||||
await fs.ensureDir(basePath);
|
||||
const dirs = await fs.readdir(basePath, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
|
||||
const meta = await Promise.all(
|
||||
dirs.map(async dir => {
|
||||
const dbFilePath = path.join(basePath, dir.name, 'storage.db');
|
||||
if (dir.isDirectory() && (await fs.exists(dbFilePath))) {
|
||||
// try read storage.db under it
|
||||
const realpath = await fs.realpath(dbFilePath);
|
||||
return [dir.name, { path: dbFilePath, realpath }] as [
|
||||
string,
|
||||
WorkspaceMeta
|
||||
];
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
return meta.filter((w): w is [string, WorkspaceMeta] => !!w);
|
||||
} catch (error) {
|
||||
logger.error('listWorkspaces', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteWorkspace(context: AppContext, id: string) {
|
||||
const basePath = path.join(context.appDataPath, 'workspaces', id);
|
||||
const movedPath = path.join(
|
||||
context.appDataPath,
|
||||
'delete-workspaces',
|
||||
`${id}`
|
||||
);
|
||||
try {
|
||||
return await fs.move(basePath, movedPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('deleteWorkspace', error);
|
||||
}
|
||||
}
|
||||
@@ -2,13 +2,15 @@ import './security-restrictions';
|
||||
|
||||
import { app } from 'electron';
|
||||
|
||||
import { createApplicationMenu } from './application-menu/create';
|
||||
import { registerEvents } from './events';
|
||||
import { registerHandlers } from './handlers';
|
||||
import { registerUpdater } from './handlers/updater';
|
||||
import { logger } from './logger';
|
||||
import { restoreOrCreateWindow } from './main-window';
|
||||
import { registerProtocol } from './protocol';
|
||||
import { registerUpdater } from './updater';
|
||||
|
||||
if (require('electron-squirrel-startup')) app.quit();
|
||||
// allow tests to overwrite app name through passing args
|
||||
if (process.argv.includes('--app-name')) {
|
||||
const appNameIndex = process.argv.indexOf('--app-name');
|
||||
@@ -57,16 +59,6 @@ app
|
||||
.then(registerHandlers)
|
||||
.then(registerEvents)
|
||||
.then(restoreOrCreateWindow)
|
||||
.then(createApplicationMenu)
|
||||
.then(registerUpdater)
|
||||
.catch(e => console.error('Failed create window:', e));
|
||||
/**
|
||||
* Check new app version in production mode only
|
||||
*/
|
||||
// FIXME: add me back later
|
||||
// if (import.meta.env.PROD) {
|
||||
// app
|
||||
// .whenReady()
|
||||
// .then(() => import('electron-updater'))
|
||||
// .then(({ autoUpdater }) => autoUpdater.checkForUpdatesAndNotify())
|
||||
// .catch(e => console.error('Failed check updates:', e));
|
||||
// }
|
||||
|
||||
@@ -7,7 +7,7 @@ export function getLogFilePath() {
|
||||
return log.transports.file.getFile().path;
|
||||
}
|
||||
|
||||
export function revealLogFile() {
|
||||
export async function revealLogFile() {
|
||||
const filePath = getLogFilePath();
|
||||
shell.showItemInFolder(filePath);
|
||||
return await shell.openPath(filePath);
|
||||
}
|
||||
|
||||
@@ -2,8 +2,9 @@ import { BrowserWindow, nativeTheme } from 'electron';
|
||||
import electronWindowState from 'electron-window-state';
|
||||
import { join } from 'path';
|
||||
|
||||
import { isMacOS } from '../../utils';
|
||||
import { getExposedMeta } from './exposed';
|
||||
import { logger } from './logger';
|
||||
import { isMacOS, isWindows } from './utils';
|
||||
|
||||
const IS_DEV: boolean =
|
||||
process.env.NODE_ENV === 'development' && !process.env.CI;
|
||||
@@ -17,14 +18,20 @@ async function createWindow() {
|
||||
defaultHeight: 800,
|
||||
});
|
||||
|
||||
const exposedMeta = getExposedMeta();
|
||||
|
||||
const browserWindow = new BrowserWindow({
|
||||
titleBarStyle: isMacOS() ? 'hiddenInset' : 'default',
|
||||
titleBarStyle: isMacOS()
|
||||
? 'hiddenInset'
|
||||
: isWindows()
|
||||
? 'hidden'
|
||||
: 'default',
|
||||
trafficLightPosition: { x: 24, y: 18 },
|
||||
x: mainWindowState.x,
|
||||
y: mainWindowState.y,
|
||||
width: mainWindowState.width,
|
||||
minWidth: 640,
|
||||
transparent: isMacOS(),
|
||||
minHeight: 480,
|
||||
visualEffectState: 'active',
|
||||
vibrancy: 'under-window',
|
||||
height: mainWindowState.height,
|
||||
@@ -36,6 +43,8 @@ async function createWindow() {
|
||||
webviewTag: false, // The webview tag is not recommended. Consider alternatives like iframe or Electron's BrowserView. https://www.electronjs.org/docs/latest/api/webview-tag#warning
|
||||
spellcheck: false, // FIXME: enable?
|
||||
preload: join(__dirname, '../preload/index.js'),
|
||||
// serialize exposed meta that to be used in preload
|
||||
additionalArguments: [`--exposed-meta=` + JSON.stringify(exposedMeta)],
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
18
apps/electron/layers/main/src/type.ts
Normal file
18
apps/electron/layers/main/src/type.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
export type MainEventListener = (...args: any[]) => () => void;
|
||||
|
||||
export type IsomorphicHandler = (
|
||||
e: Electron.IpcMainInvokeEvent,
|
||||
...args: any[]
|
||||
) => Promise<any>;
|
||||
|
||||
export type NamespaceHandlers = {
|
||||
[key: string]: IsomorphicHandler;
|
||||
};
|
||||
|
||||
export interface WorkspaceMeta {
|
||||
id: string;
|
||||
mainDBPath: string;
|
||||
secondaryDBPath?: string; // assume there will be only one
|
||||
}
|
||||
|
||||
export type YOrigin = 'self' | 'external' | 'upstream' | 'renderer';
|
||||
49
apps/electron/layers/main/src/ui/get-meta-data/get-html.ts
Normal file
49
apps/electron/layers/main/src/ui/get-meta-data/get-html.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { BrowserWindow } from 'electron';
|
||||
|
||||
import type { GetHTMLOptions } from './types';
|
||||
|
||||
async function getHTMLFromWindow(win: BrowserWindow): Promise<string> {
|
||||
return win.webContents
|
||||
.executeJavaScript(`document.documentElement.outerHTML;`)
|
||||
.then(html => html);
|
||||
}
|
||||
|
||||
// For normal web pages, obtaining html can be done directly,
|
||||
// but for some dynamic web pages, obtaining html should wait for the complete loading of web pages. shouldReGetHTML should be used to judge whether to obtain html again
|
||||
export async function getHTMLByURL(
|
||||
url: string,
|
||||
options: GetHTMLOptions
|
||||
): Promise<string> {
|
||||
return new Promise(resolve => {
|
||||
const { timeout = 10000, shouldReGetHTML } = options;
|
||||
const window = new BrowserWindow({
|
||||
show: false,
|
||||
});
|
||||
let html = '';
|
||||
window.loadURL(url);
|
||||
|
||||
const timer = setTimeout(() => {
|
||||
resolve(html);
|
||||
window.close();
|
||||
}, timeout);
|
||||
|
||||
async function loopHandle() {
|
||||
html = await getHTMLFromWindow(window);
|
||||
if (!shouldReGetHTML) {
|
||||
return html;
|
||||
}
|
||||
|
||||
if (await shouldReGetHTML(html)) {
|
||||
setTimeout(loopHandle, 1000);
|
||||
} else {
|
||||
window.close();
|
||||
clearTimeout(timer);
|
||||
resolve(html);
|
||||
}
|
||||
}
|
||||
|
||||
window.webContents.on('did-finish-load', async () => {
|
||||
loopHandle();
|
||||
});
|
||||
});
|
||||
}
|
||||
107
apps/electron/layers/main/src/ui/get-meta-data/index.ts
Normal file
107
apps/electron/layers/main/src/ui/get-meta-data/index.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
import type { CheerioAPI, Element } from 'cheerio';
|
||||
import { load } from 'cheerio';
|
||||
|
||||
import type { Context, MetaData, Options, RuleSet } from './types';
|
||||
|
||||
export * from './types';
|
||||
|
||||
import { getHTMLByURL } from './get-html';
|
||||
import { metaDataRules } from './rules';
|
||||
import type { GetMetaDataOptions } from './types';
|
||||
|
||||
function runRule(ruleSet: RuleSet, $: CheerioAPI, context: Context) {
|
||||
let maxScore = 0;
|
||||
let value;
|
||||
|
||||
for (let currRule = 0; currRule < ruleSet.rules.length; currRule++) {
|
||||
const [query, handler] = ruleSet.rules[currRule];
|
||||
const elements = Array.from($(query));
|
||||
|
||||
if (elements.length) {
|
||||
for (const element of elements) {
|
||||
let score = ruleSet.rules.length - currRule;
|
||||
|
||||
if (ruleSet.scorer) {
|
||||
const newScore = ruleSet.scorer(element as Element, score);
|
||||
|
||||
if (newScore) {
|
||||
score = newScore;
|
||||
}
|
||||
}
|
||||
|
||||
if (score > maxScore) {
|
||||
maxScore = score;
|
||||
value = handler(element as Element);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (value) {
|
||||
if (ruleSet.processor) {
|
||||
value = ruleSet.processor(value, context);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
if (ruleSet.defaultValue) {
|
||||
return ruleSet.defaultValue(context);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async function getMetaDataByHTML(
|
||||
html: string,
|
||||
url: string,
|
||||
options: GetMetaDataOptions
|
||||
) {
|
||||
const { customRules = {} } = options;
|
||||
const rules: Record<string, RuleSet> = { ...metaDataRules };
|
||||
Object.keys(customRules).forEach((key: string) => {
|
||||
rules[key] = {
|
||||
rules: [...metaDataRules[key].rules, ...customRules[key].rules],
|
||||
defaultValue:
|
||||
customRules[key].defaultValue || metaDataRules[key].defaultValue,
|
||||
processor: customRules[key].processor || metaDataRules[key].processor,
|
||||
};
|
||||
});
|
||||
|
||||
const metadata: MetaData = {};
|
||||
const context: Context = {
|
||||
url,
|
||||
...options,
|
||||
};
|
||||
|
||||
const $ = load(html);
|
||||
|
||||
Object.keys(rules).forEach((key: string) => {
|
||||
const ruleSet = rules[key];
|
||||
metadata[key] = runRule(ruleSet, $, context) || undefined;
|
||||
});
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
export async function getMetaData(url: string, options: Options = {}) {
|
||||
const { customRules, forceImageHttps, shouldReGetHTML, ...other } = options;
|
||||
const html = await getHTMLByURL(url, {
|
||||
...other,
|
||||
shouldReGetHTML: async html => {
|
||||
const meta = await getMetaDataByHTML(html, url, {
|
||||
customRules,
|
||||
forceImageHttps,
|
||||
});
|
||||
return shouldReGetHTML ? await shouldReGetHTML(meta) : false;
|
||||
},
|
||||
}).catch(() => {
|
||||
// TODO: report error
|
||||
return '';
|
||||
});
|
||||
|
||||
return await getMetaDataByHTML(html, url, {
|
||||
customRules,
|
||||
forceImageHttps,
|
||||
});
|
||||
}
|
||||
690
apps/electron/layers/main/src/ui/get-meta-data/rules.ts
Normal file
690
apps/electron/layers/main/src/ui/get-meta-data/rules.ts
Normal file
@@ -0,0 +1,690 @@
|
||||
import type { RuleSet } from './types';
|
||||
import { getProvider, makeUrlAbsolute, makeUrlSecure, parseUrl } from './utils';
|
||||
|
||||
export const metaDataRules: Record<string, RuleSet> = {
|
||||
title: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:title"][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="twitter:title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['title', (element: any) => element.text],
|
||||
],
|
||||
},
|
||||
description: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="description" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="description" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="twitter:description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="summary" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="summary" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
},
|
||||
language: {
|
||||
rules: [
|
||||
['html[lang]', element => element.attribs['lang']],
|
||||
[
|
||||
'meta[property="language" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="language" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:locale"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:locale"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
processor: (language: any) => language.split('-')[0],
|
||||
},
|
||||
type: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:type"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:type"][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="parsely-type"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-type"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="medium"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="medium"][content]', element => element.attribs['content']],
|
||||
],
|
||||
},
|
||||
url: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:url"][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="al:web:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="al:web:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-link"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-link"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['a.amp-canurl', element => element.attribs['href']],
|
||||
['link[rel="canonical"][href]', element => element.attribs['href']],
|
||||
],
|
||||
defaultValue: context => context.url,
|
||||
processor: (url: any, context) => makeUrlAbsolute(context.url, url),
|
||||
},
|
||||
provider: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:site_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:site_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="publisher" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="publisher" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="application-name" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="application-name" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="al:android:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="al:android:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="al:iphone:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="al:iphone:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="al:ipad:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="al:ipad:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="al:ios:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="al:ios:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="twitter:app:name:iphone"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:app:name:iphone"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="twitter:app:name:ipad"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:app:name:ipad"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="twitter:app:name:googleplay"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:app:name:googleplay"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
defaultValue: context => getProvider(parseUrl(context.url)),
|
||||
},
|
||||
keywords: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="keywords" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="keywords" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-tags"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-tags"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.tags"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.tags"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="article:tag" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="article:tag" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="book:tag" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="book:tag" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="topic" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="topic" i][content]', element => element.attribs['content']],
|
||||
],
|
||||
processor: (keywords: any) =>
|
||||
keywords.split(',').map((keyword: string) => keyword.trim()),
|
||||
},
|
||||
section: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="article:section"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="article:section"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="category"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="category"][content]', element => element.attribs['content']],
|
||||
],
|
||||
},
|
||||
author: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="author" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="author" i][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="article:author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="article:author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="book:author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="book:author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['a[class*="author" i]', (element: any) => element.text],
|
||||
['[rel="author"]', (element: any) => element.text],
|
||||
[
|
||||
'meta[property="twitter:creator"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:creator"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="profile:username"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="profile:username"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
},
|
||||
published: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="article:published_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="article:published_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="published_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="published_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-pub-date"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-pub-date"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.date"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.date"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="date" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="date" i][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="release_date" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="release_date" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['time[datetime]', element => element.attribs['datetime']],
|
||||
['time[datetime][pubdate]', element => element.attribs['datetime']],
|
||||
],
|
||||
processor: (value: any) =>
|
||||
Date.parse(value.toString())
|
||||
? new Date(value.toString()).toISOString()
|
||||
: undefined,
|
||||
},
|
||||
modified: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:updated_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:updated_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="article:modified_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="article:modified_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="updated_time" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="updated_time" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="modified_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="modified_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="revised"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="revised"][content]', element => element.attribs['content']],
|
||||
],
|
||||
processor: (value: any) =>
|
||||
Date.parse(value.toString())
|
||||
? new Date(value.toString()).toISOString()
|
||||
: undefined,
|
||||
},
|
||||
robots: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="robots" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="robots" i][content]', element => element.attribs['content']],
|
||||
],
|
||||
processor: (keywords: any) =>
|
||||
keywords.split(',').map((keyword: string) => keyword.trim()),
|
||||
},
|
||||
copyright: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="copyright" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="copyright" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
},
|
||||
email: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="email" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="email" i][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="reply-to" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="reply-to" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
},
|
||||
twitter: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="twitter:site"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:site"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
},
|
||||
facebook: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="fb:pages"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="fb:pages"][content]', element => element.attribs['content']],
|
||||
],
|
||||
},
|
||||
image: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:image:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:image:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:image:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:image:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:image"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:image"][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="twitter:image"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:image"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="twitter:image:src"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:image:src"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="thumbnail"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="thumbnail"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-image-url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-image-url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.image.full"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.image.full"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
processor: (imageUrl: any, context) =>
|
||||
context.forceImageHttps === true
|
||||
? makeUrlSecure(makeUrlAbsolute(context.url, imageUrl))
|
||||
: makeUrlAbsolute(context.url, imageUrl),
|
||||
},
|
||||
icon: {
|
||||
rules: [
|
||||
[
|
||||
'link[rel="apple-touch-icon"][href]',
|
||||
element => element.attribs['href'],
|
||||
],
|
||||
[
|
||||
'link[rel="apple-touch-icon-precomposed"][href]',
|
||||
element => element.attribs['href'],
|
||||
],
|
||||
['link[rel="icon" i][href]', element => element.attribs['href']],
|
||||
['link[rel="fluid-icon"][href]', element => element.attribs['href']],
|
||||
['link[rel="shortcut icon"][href]', element => element.attribs['href']],
|
||||
['link[rel="Shortcut Icon"][href]', element => element.attribs['href']],
|
||||
['link[rel="mask-icon"][href]', element => element.attribs['href']],
|
||||
],
|
||||
scorer: element => {
|
||||
const sizes = element.attribs['sizes'];
|
||||
if (sizes) {
|
||||
const sizeMatches = sizes.match(/\d+/g);
|
||||
if (sizeMatches) {
|
||||
const parsed = parseInt(sizeMatches[0]);
|
||||
if (!isNaN(parsed)) {
|
||||
return parsed;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
defaultValue: context => makeUrlAbsolute(context.url, '/favicon.ico'),
|
||||
processor: (iconUrl, context) =>
|
||||
context.forceImageHttps === true
|
||||
? makeUrlSecure(makeUrlAbsolute(context.url, iconUrl))
|
||||
: makeUrlAbsolute(context.url, iconUrl),
|
||||
},
|
||||
video: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:video:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:video:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:video:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:video:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:video"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:video"][content]', element => element.attribs['content']],
|
||||
],
|
||||
processor: (imageUrl: any, context) =>
|
||||
context.forceImageHttps === true
|
||||
? makeUrlSecure(makeUrlAbsolute(context.url, imageUrl))
|
||||
: makeUrlAbsolute(context.url, imageUrl),
|
||||
},
|
||||
audio: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:audio:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:audio:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:audio:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:audio:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:audio"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:audio"][content]', element => element.attribs['content']],
|
||||
],
|
||||
processor: (imageUrl: any, context) =>
|
||||
context.forceImageHttps === true
|
||||
? makeUrlSecure(makeUrlAbsolute(context.url, imageUrl))
|
||||
: makeUrlAbsolute(context.url, imageUrl),
|
||||
},
|
||||
};
|
||||
43
apps/electron/layers/main/src/ui/get-meta-data/types.ts
Normal file
43
apps/electron/layers/main/src/ui/get-meta-data/types.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import type { Element } from 'cheerio';
|
||||
|
||||
export type MetaData = {
|
||||
title?: string;
|
||||
description?: string;
|
||||
icon?: string;
|
||||
image?: string;
|
||||
keywords?: string[];
|
||||
language?: string;
|
||||
type?: string;
|
||||
url?: string;
|
||||
provider?: string;
|
||||
|
||||
[x: string]: string | string[] | undefined;
|
||||
};
|
||||
|
||||
export type MetadataRule = [string, (el: Element) => string | null];
|
||||
|
||||
export type Context = {
|
||||
url: string;
|
||||
} & GetMetaDataOptions;
|
||||
|
||||
export type RuleSet = {
|
||||
rules: MetadataRule[];
|
||||
defaultValue?: (context: Context) => string | string[];
|
||||
scorer?: (el: Element, score: any) => any;
|
||||
processor?: (input: any, context: Context) => any;
|
||||
};
|
||||
|
||||
export type GetMetaDataOptions = {
|
||||
customRules?: Record<string, RuleSet>;
|
||||
forceImageHttps?: boolean;
|
||||
};
|
||||
|
||||
export type GetHTMLOptions = {
|
||||
timeout?: number;
|
||||
shouldReGetHTML?: (currentHTML: string) => boolean | Promise<boolean>;
|
||||
};
|
||||
|
||||
export type Options = {
|
||||
shouldReGetHTML?: (metaData: MetaData) => boolean | Promise<boolean>;
|
||||
} & GetMetaDataOptions &
|
||||
Omit<GetHTMLOptions, 'shouldReGetHTML'>;
|
||||
28
apps/electron/layers/main/src/ui/get-meta-data/utils.ts
Normal file
28
apps/electron/layers/main/src/ui/get-meta-data/utils.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import urlparse from 'url';
|
||||
|
||||
export function makeUrlAbsolute(base: string, relative: string): string {
|
||||
const relativeParsed = urlparse.parse(relative);
|
||||
|
||||
if (relativeParsed.host === null) {
|
||||
return urlparse.resolve(base, relative);
|
||||
}
|
||||
|
||||
return relative;
|
||||
}
|
||||
|
||||
export function makeUrlSecure(url: string): string {
|
||||
return url.replace(/^http:/, 'https:');
|
||||
}
|
||||
|
||||
export function parseUrl(url: string): string {
|
||||
return urlparse.parse(url).hostname || '';
|
||||
}
|
||||
|
||||
export function getProvider(host: string): string {
|
||||
return host
|
||||
.replace(/www[a-zA-Z0-9]*\./, '')
|
||||
.replace('.co.', '.')
|
||||
.split('.')
|
||||
.slice(0, -1)
|
||||
.join(' ');
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
import { app, BrowserWindow, shell } from 'electron';
|
||||
import { parse } from 'url';
|
||||
|
||||
import { logger } from '../../logger';
|
||||
import { logger } from '../logger';
|
||||
|
||||
const redirectUri = 'https://affine.pro/client/auth-callback';
|
||||
|
||||
50
apps/electron/layers/main/src/ui/index.ts
Normal file
50
apps/electron/layers/main/src/ui/index.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import { app, BrowserWindow, nativeTheme } from 'electron';
|
||||
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { isMacOS } from '../utils';
|
||||
import { getMetaData } from './get-meta-data';
|
||||
import { getGoogleOauthCode } from './google-auth';
|
||||
|
||||
export const uiHandlers = {
|
||||
handleThemeChange: async (_, theme: (typeof nativeTheme)['themeSource']) => {
|
||||
nativeTheme.themeSource = theme;
|
||||
},
|
||||
handleSidebarVisibilityChange: async (_, visible: boolean) => {
|
||||
if (isMacOS()) {
|
||||
const windows = BrowserWindow.getAllWindows();
|
||||
windows.forEach(w => {
|
||||
// hide window buttons when sidebar is not visible
|
||||
w.setWindowButtonVisibility(visible);
|
||||
});
|
||||
}
|
||||
},
|
||||
handleMinimizeApp: async () => {
|
||||
const windows = BrowserWindow.getAllWindows();
|
||||
windows.forEach(w => {
|
||||
w.minimize();
|
||||
});
|
||||
},
|
||||
handleMaximizeApp: async () => {
|
||||
const windows = BrowserWindow.getAllWindows();
|
||||
windows.forEach(w => {
|
||||
if (w.isMaximized()) {
|
||||
w.unmaximize();
|
||||
} else {
|
||||
w.maximize();
|
||||
}
|
||||
});
|
||||
},
|
||||
handleCloseApp: async () => {
|
||||
app.quit();
|
||||
},
|
||||
getGoogleOauthCode: async () => {
|
||||
return getGoogleOauthCode();
|
||||
},
|
||||
getBookmarkDataByLink: async (_, url: string) => {
|
||||
return getMetaData(url, {
|
||||
shouldReGetHTML: metaData => {
|
||||
return !metaData.title && !metaData.description;
|
||||
},
|
||||
});
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
99
apps/electron/layers/main/src/updater/electron-updater.ts
Normal file
99
apps/electron/layers/main/src/updater/electron-updater.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
import { app } from 'electron';
|
||||
import type { AppUpdater } from 'electron-updater';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { logger } from '../logger';
|
||||
import { isMacOS } from '../utils';
|
||||
import { updaterSubjects } from './event';
|
||||
|
||||
export const ReleaseTypeSchema = z.enum([
|
||||
'stable',
|
||||
'beta',
|
||||
'canary',
|
||||
'internal',
|
||||
]);
|
||||
|
||||
export const envBuildType = (process.env.BUILD_TYPE || 'canary')
|
||||
.trim()
|
||||
.toLowerCase();
|
||||
export const buildType = ReleaseTypeSchema.parse(envBuildType);
|
||||
const mode = process.env.NODE_ENV;
|
||||
const isDev = mode === 'development';
|
||||
|
||||
let _autoUpdater: AppUpdater | null = null;
|
||||
|
||||
export const quitAndInstall = async () => {
|
||||
_autoUpdater?.quitAndInstall();
|
||||
};
|
||||
|
||||
let lastCheckTime = 0;
|
||||
export const checkForUpdatesAndNotify = async (force = true) => {
|
||||
if (!_autoUpdater) {
|
||||
return; // ?
|
||||
}
|
||||
// check every 30 minutes (1800 seconds) at most
|
||||
if (force || lastCheckTime + 1000 * 1800 < Date.now()) {
|
||||
lastCheckTime = Date.now();
|
||||
return await _autoUpdater.checkForUpdatesAndNotify();
|
||||
}
|
||||
};
|
||||
|
||||
export const registerUpdater = async () => {
|
||||
// so we wrap it in a function
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const { autoUpdater } = require('electron-updater');
|
||||
|
||||
_autoUpdater = autoUpdater;
|
||||
|
||||
if (!_autoUpdater) {
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: support auto update on windows and linux
|
||||
const allowAutoUpdate = isMacOS();
|
||||
|
||||
_autoUpdater.autoDownload = false;
|
||||
_autoUpdater.allowPrerelease = buildType !== 'stable';
|
||||
_autoUpdater.autoInstallOnAppQuit = false;
|
||||
_autoUpdater.autoRunAppAfterInstall = true;
|
||||
_autoUpdater.setFeedURL({
|
||||
channel: buildType,
|
||||
provider: 'github',
|
||||
repo: buildType !== 'internal' ? 'AFFiNE' : 'AFFiNE-Releases',
|
||||
owner: 'toeverything',
|
||||
releaseType: buildType === 'stable' ? 'release' : 'prerelease',
|
||||
});
|
||||
|
||||
// register events for checkForUpdatesAndNotify
|
||||
_autoUpdater.on('update-available', info => {
|
||||
if (allowAutoUpdate) {
|
||||
_autoUpdater!.downloadUpdate();
|
||||
logger.info('Update available, downloading...', info);
|
||||
}
|
||||
updaterSubjects.updateAvailable.next({
|
||||
version: info.version,
|
||||
allowAutoUpdate,
|
||||
});
|
||||
});
|
||||
_autoUpdater.on('download-progress', e => {
|
||||
logger.info(`Download progress: ${e.percent}`);
|
||||
updaterSubjects.downloadProgress.next(e.percent);
|
||||
});
|
||||
_autoUpdater.on('update-downloaded', e => {
|
||||
updaterSubjects.updateReady.next({
|
||||
version: e.version,
|
||||
allowAutoUpdate,
|
||||
});
|
||||
// I guess we can skip it?
|
||||
// updaterSubjects.clientDownloadProgress.next(100);
|
||||
logger.info('Update downloaded, ready to install');
|
||||
});
|
||||
_autoUpdater.on('error', e => {
|
||||
logger.error('Error while updating client', e);
|
||||
});
|
||||
_autoUpdater.forceDevUpdateConfig = isDev;
|
||||
|
||||
app.on('activate', async () => {
|
||||
await checkForUpdatesAndNotify(false);
|
||||
});
|
||||
};
|
||||
36
apps/electron/layers/main/src/updater/event.ts
Normal file
36
apps/electron/layers/main/src/updater/event.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { BehaviorSubject, Subject } from 'rxjs';
|
||||
|
||||
import type { MainEventListener } from '../type';
|
||||
|
||||
export interface UpdateMeta {
|
||||
version: string;
|
||||
allowAutoUpdate: boolean;
|
||||
}
|
||||
|
||||
export const updaterSubjects = {
|
||||
// means it is ready for restart and install the new version
|
||||
updateAvailable: new Subject<UpdateMeta>(),
|
||||
updateReady: new Subject<UpdateMeta>(),
|
||||
downloadProgress: new BehaviorSubject<number>(0),
|
||||
};
|
||||
|
||||
export const updaterEvents = {
|
||||
onUpdateAvailable: (fn: (versionMeta: UpdateMeta) => void) => {
|
||||
const sub = updaterSubjects.updateAvailable.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
onUpdateReady: (fn: (versionMeta: UpdateMeta) => void) => {
|
||||
const sub = updaterSubjects.updateReady.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
onDownloadProgress: (fn: (progress: number) => void) => {
|
||||
const sub = updaterSubjects.downloadProgress.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
18
apps/electron/layers/main/src/updater/index.ts
Normal file
18
apps/electron/layers/main/src/updater/index.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { app } from 'electron';
|
||||
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { checkForUpdatesAndNotify, quitAndInstall } from './electron-updater';
|
||||
|
||||
export const updaterHandlers = {
|
||||
currentVersion: async () => {
|
||||
return app.getVersion();
|
||||
},
|
||||
quitAndInstall: async () => {
|
||||
return quitAndInstall();
|
||||
},
|
||||
checkForUpdatesAndNotify: async () => {
|
||||
return checkForUpdatesAndNotify(true);
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
|
||||
export * from './electron-updater';
|
||||
@@ -1,19 +1,11 @@
|
||||
export function debounce<T extends (...args: any[]) => void>(
|
||||
fn: T,
|
||||
delay: number
|
||||
) {
|
||||
let timeoutId: NodeJS.Timer | undefined;
|
||||
return (...args: Parameters<T>) => {
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
timeoutId = setTimeout(() => {
|
||||
fn(...args);
|
||||
timeoutId = undefined;
|
||||
}, delay);
|
||||
};
|
||||
}
|
||||
|
||||
export function ts() {
|
||||
export function getTime() {
|
||||
return new Date().getTime();
|
||||
}
|
||||
|
||||
export const isMacOS = () => {
|
||||
return process.platform === 'darwin';
|
||||
};
|
||||
|
||||
export const isWindows = () => {
|
||||
return process.platform === 'win32';
|
||||
};
|
||||
|
||||
35
apps/electron/layers/main/src/workers/index.ts
Normal file
35
apps/electron/layers/main/src/workers/index.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import path from 'node:path';
|
||||
import { Worker } from 'node:worker_threads';
|
||||
|
||||
import { mergeUpdate } from './merge-update';
|
||||
|
||||
export function mergeUpdateWorker(updates: Uint8Array[]) {
|
||||
// fallback to main thread if worker is disabled (in vitest)
|
||||
if (process.env.USE_WORKER !== 'true') {
|
||||
return mergeUpdate(updates);
|
||||
}
|
||||
return new Promise<Uint8Array>((resolve, reject) => {
|
||||
// it is intended to have "./workers" in the path
|
||||
const workerFile = path.join(__dirname, './workers/merge-update.worker.js');
|
||||
|
||||
// convert updates to SharedArrayBuffer[s]
|
||||
const sharedArrayBufferUpdates = updates.map(update => {
|
||||
const buffer = new SharedArrayBuffer(update.byteLength);
|
||||
const view = new Uint8Array(buffer);
|
||||
view.set(update);
|
||||
return view;
|
||||
});
|
||||
|
||||
const worker = new Worker(workerFile, {
|
||||
workerData: sharedArrayBufferUpdates,
|
||||
});
|
||||
|
||||
worker.on('message', resolve);
|
||||
worker.on('error', reject);
|
||||
worker.on('exit', code => {
|
||||
if (code !== 0) {
|
||||
reject(new Error(`Worker stopped with exit code ${code}`));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
11
apps/electron/layers/main/src/workers/merge-update.ts
Normal file
11
apps/electron/layers/main/src/workers/merge-update.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import * as Y from 'yjs';
|
||||
|
||||
export function mergeUpdate(updates: Uint8Array[]) {
|
||||
const yDoc = new Y.Doc();
|
||||
Y.transact(yDoc, () => {
|
||||
for (const update of updates) {
|
||||
Y.applyUpdate(yDoc, update);
|
||||
}
|
||||
});
|
||||
return Y.encodeStateAsUpdate(yDoc);
|
||||
}
|
||||
14
apps/electron/layers/main/src/workers/merge-update.worker.ts
Normal file
14
apps/electron/layers/main/src/workers/merge-update.worker.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { parentPort, workerData } from 'node:worker_threads';
|
||||
|
||||
import { mergeUpdate } from './merge-update';
|
||||
|
||||
function getMergeUpdate(updates: Uint8Array[]) {
|
||||
const update = mergeUpdate(updates);
|
||||
const buffer = new SharedArrayBuffer(update.byteLength);
|
||||
const view = new Uint8Array(buffer);
|
||||
view.set(update);
|
||||
|
||||
return update;
|
||||
}
|
||||
|
||||
parentPort?.postMessage(getMergeUpdate(workerData));
|
||||
1
apps/electron/layers/main/src/workspace/__tests__/.gitignore
vendored
Normal file
1
apps/electron/layers/main/src/workspace/__tests__/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
tmp
|
||||
@@ -0,0 +1,208 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, describe, expect, test, vi } from 'vitest';
|
||||
|
||||
import type { AppContext } from '../../context';
|
||||
|
||||
const tmpDir = path.join(__dirname, 'tmp');
|
||||
|
||||
const testAppContext: AppContext = {
|
||||
appDataPath: path.join(tmpDir, 'test-data'),
|
||||
appName: 'test',
|
||||
};
|
||||
|
||||
vi.doMock('../../context', () => ({
|
||||
appContext: testAppContext,
|
||||
}));
|
||||
|
||||
vi.doMock('../../db/ensure-db', () => ({
|
||||
ensureSQLiteDB: async () => ({
|
||||
destroy: () => {},
|
||||
}),
|
||||
}));
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.remove(tmpDir);
|
||||
});
|
||||
|
||||
describe('list workspaces', () => {
|
||||
test('listWorkspaces (valid)', async () => {
|
||||
const { listWorkspaces } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
const meta = {
|
||||
id: workspaceId,
|
||||
};
|
||||
await fs.ensureDir(workspacePath);
|
||||
await fs.writeJSON(path.join(workspacePath, 'meta.json'), meta);
|
||||
const workspaces = await listWorkspaces(testAppContext);
|
||||
expect(workspaces).toEqual([[workspaceId, meta]]);
|
||||
});
|
||||
|
||||
test('listWorkspaces (without meta json file)', async () => {
|
||||
const { listWorkspaces } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
await fs.ensureDir(workspacePath);
|
||||
const workspaces = await listWorkspaces(testAppContext);
|
||||
expect(workspaces).toEqual([
|
||||
[
|
||||
workspaceId,
|
||||
// meta file will be created automatically
|
||||
{ id: workspaceId, mainDBPath: path.join(workspacePath, 'storage.db') },
|
||||
],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete workspace', () => {
|
||||
test('deleteWorkspace', async () => {
|
||||
const { deleteWorkspace } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
await fs.ensureDir(workspacePath);
|
||||
await deleteWorkspace(testAppContext, workspaceId);
|
||||
expect(await fs.pathExists(workspacePath)).toBe(false);
|
||||
// removed workspace will be moved to delete-workspaces
|
||||
expect(
|
||||
await fs.pathExists(
|
||||
path.join(testAppContext.appDataPath, 'delete-workspaces', workspaceId)
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getWorkspaceMeta', () => {
|
||||
test('can get meta', async () => {
|
||||
const { getWorkspaceMeta } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
const meta = {
|
||||
id: workspaceId,
|
||||
};
|
||||
await fs.ensureDir(workspacePath);
|
||||
await fs.writeJSON(path.join(workspacePath, 'meta.json'), meta);
|
||||
expect(await getWorkspaceMeta(testAppContext, workspaceId)).toEqual(meta);
|
||||
});
|
||||
|
||||
test('can create meta if not exists', async () => {
|
||||
const { getWorkspaceMeta } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
await fs.ensureDir(workspacePath);
|
||||
expect(await getWorkspaceMeta(testAppContext, workspaceId)).toEqual({
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
});
|
||||
expect(
|
||||
await fs.pathExists(path.join(workspacePath, 'meta.json'))
|
||||
).toBeTruthy();
|
||||
});
|
||||
|
||||
test('can migrate meta if db file is a link', async () => {
|
||||
const { getWorkspaceMeta } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
await fs.ensureDir(workspacePath);
|
||||
const sourcePath = path.join(tmpDir, 'source.db');
|
||||
await fs.writeFile(sourcePath, 'test');
|
||||
|
||||
await fs.ensureSymlink(sourcePath, path.join(workspacePath, 'storage.db'));
|
||||
|
||||
expect(await getWorkspaceMeta(testAppContext, workspaceId)).toEqual({
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
secondaryDBPath: sourcePath,
|
||||
});
|
||||
|
||||
expect(
|
||||
await fs.pathExists(path.join(workspacePath, 'meta.json'))
|
||||
).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
test('storeWorkspaceMeta', async () => {
|
||||
const { storeWorkspaceMeta } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
await fs.ensureDir(workspacePath);
|
||||
const meta = {
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
};
|
||||
await storeWorkspaceMeta(testAppContext, workspaceId, meta);
|
||||
expect(await fs.readJSON(path.join(workspacePath, 'meta.json'))).toEqual(
|
||||
meta
|
||||
);
|
||||
await storeWorkspaceMeta(testAppContext, workspaceId, {
|
||||
secondaryDBPath: path.join(tmpDir, 'test.db'),
|
||||
});
|
||||
expect(await fs.readJSON(path.join(workspacePath, 'meta.json'))).toEqual({
|
||||
...meta,
|
||||
secondaryDBPath: path.join(tmpDir, 'test.db'),
|
||||
});
|
||||
});
|
||||
|
||||
test('getWorkspaceMeta observable', async () => {
|
||||
const { storeWorkspaceMeta } = await import('../handlers');
|
||||
const { getWorkspaceMeta$ } = await import('../index');
|
||||
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
|
||||
const metaChange = vi.fn();
|
||||
|
||||
const meta$ = getWorkspaceMeta$(workspaceId);
|
||||
|
||||
meta$.subscribe(metaChange);
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
expect(metaChange).toHaveBeenCalledWith({
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
});
|
||||
|
||||
await storeWorkspaceMeta(testAppContext, workspaceId, {
|
||||
secondaryDBPath: path.join(tmpDir, 'test.db'),
|
||||
});
|
||||
|
||||
expect(metaChange).toHaveBeenCalledWith({
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
secondaryDBPath: path.join(tmpDir, 'test.db'),
|
||||
});
|
||||
});
|
||||
135
apps/electron/layers/main/src/workspace/handlers.ts
Normal file
135
apps/electron/layers/main/src/workspace/handlers.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import { type AppContext } from '../context';
|
||||
import { ensureSQLiteDB } from '../db/ensure-db';
|
||||
import { logger } from '../logger';
|
||||
import type { WorkspaceMeta } from '../type';
|
||||
import { workspaceSubjects } from './subjects';
|
||||
|
||||
export async function listWorkspaces(
|
||||
context: AppContext
|
||||
): Promise<[workspaceId: string, meta: WorkspaceMeta][]> {
|
||||
const basePath = getWorkspacesBasePath(context);
|
||||
try {
|
||||
await fs.ensureDir(basePath);
|
||||
const dirs = await fs.readdir(basePath, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
const metaList = (
|
||||
await Promise.all(
|
||||
dirs.map(async dir => {
|
||||
// ? shall we put all meta in a single file instead of one file per workspace?
|
||||
return await getWorkspaceMeta(context, dir.name);
|
||||
})
|
||||
)
|
||||
).filter((w): w is WorkspaceMeta => !!w);
|
||||
return metaList.map(meta => [meta.id, meta]);
|
||||
} catch (error) {
|
||||
logger.error('listWorkspaces', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteWorkspace(context: AppContext, id: string) {
|
||||
const basePath = getWorkspaceBasePath(context, id);
|
||||
const movedPath = path.join(
|
||||
context.appDataPath,
|
||||
'delete-workspaces',
|
||||
`${id}`
|
||||
);
|
||||
try {
|
||||
const db = await ensureSQLiteDB(id);
|
||||
db.destroy();
|
||||
return await fs.move(basePath, movedPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('deleteWorkspace', error);
|
||||
}
|
||||
}
|
||||
|
||||
export function getWorkspacesBasePath(context: AppContext) {
|
||||
return path.join(context.appDataPath, 'workspaces');
|
||||
}
|
||||
|
||||
export function getWorkspaceBasePath(context: AppContext, workspaceId: string) {
|
||||
return path.join(context.appDataPath, 'workspaces', workspaceId);
|
||||
}
|
||||
|
||||
export function getWorkspaceDBPath(context: AppContext, workspaceId: string) {
|
||||
const basePath = getWorkspaceBasePath(context, workspaceId);
|
||||
return path.join(basePath, 'storage.db');
|
||||
}
|
||||
|
||||
export function getWorkspaceMetaPath(context: AppContext, workspaceId: string) {
|
||||
const basePath = getWorkspaceBasePath(context, workspaceId);
|
||||
return path.join(basePath, 'meta.json');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get workspace meta, create one if not exists
|
||||
* This function will also migrate the workspace if needed
|
||||
*/
|
||||
export async function getWorkspaceMeta(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
): Promise<WorkspaceMeta> {
|
||||
try {
|
||||
const basePath = getWorkspaceBasePath(context, workspaceId);
|
||||
const metaPath = getWorkspaceMetaPath(context, workspaceId);
|
||||
if (!(await fs.exists(metaPath))) {
|
||||
// since not meta is found, we will migrate symlinked db file if needed
|
||||
await fs.ensureDir(basePath);
|
||||
const dbPath = getWorkspaceDBPath(context, workspaceId);
|
||||
|
||||
// todo: remove this after migration (in stable version)
|
||||
const realDBPath = (await fs.exists(dbPath))
|
||||
? await fs.realpath(dbPath)
|
||||
: dbPath;
|
||||
const isLink = realDBPath !== dbPath;
|
||||
if (isLink) {
|
||||
await fs.copy(realDBPath, dbPath);
|
||||
}
|
||||
// create one if not exists
|
||||
const meta = {
|
||||
id: workspaceId,
|
||||
mainDBPath: dbPath,
|
||||
secondaryDBPath: isLink ? realDBPath : undefined,
|
||||
};
|
||||
await fs.writeJSON(metaPath, meta);
|
||||
return meta;
|
||||
} else {
|
||||
const meta = await fs.readJSON(metaPath);
|
||||
return meta;
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('getWorkspaceMeta failed', err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
export async function storeWorkspaceMeta(
|
||||
context: AppContext,
|
||||
workspaceId: string,
|
||||
meta: Partial<WorkspaceMeta>
|
||||
) {
|
||||
try {
|
||||
const basePath = getWorkspaceBasePath(context, workspaceId);
|
||||
await fs.ensureDir(basePath);
|
||||
const metaPath = path.join(basePath, 'meta.json');
|
||||
const currentMeta = await getWorkspaceMeta(context, workspaceId);
|
||||
const newMeta = {
|
||||
...currentMeta,
|
||||
...meta,
|
||||
};
|
||||
await fs.writeJSON(metaPath, newMeta);
|
||||
workspaceSubjects.meta.next({
|
||||
workspaceId,
|
||||
meta: newMeta,
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error('storeWorkspaceMeta failed', err);
|
||||
}
|
||||
}
|
||||
44
apps/electron/layers/main/src/workspace/index.ts
Normal file
44
apps/electron/layers/main/src/workspace/index.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { merge } from 'rxjs';
|
||||
import { filter, map } from 'rxjs/operators';
|
||||
|
||||
import { appContext } from '../context';
|
||||
import type {
|
||||
MainEventListener,
|
||||
NamespaceHandlers,
|
||||
WorkspaceMeta,
|
||||
} from '../type';
|
||||
import { deleteWorkspace, getWorkspaceMeta, listWorkspaces } from './handlers';
|
||||
import { workspaceSubjects } from './subjects';
|
||||
|
||||
export * from './handlers';
|
||||
export * from './subjects';
|
||||
|
||||
export const workspaceEvents = {
|
||||
onMetaChange: (
|
||||
fn: (meta: { workspaceId: string; meta: WorkspaceMeta }) => void
|
||||
) => {
|
||||
const sub = workspaceSubjects.meta.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
|
||||
export const workspaceHandlers = {
|
||||
list: async () => listWorkspaces(appContext),
|
||||
delete: async (_, id: string) => deleteWorkspace(appContext, id),
|
||||
getMeta: async (_, id: string) => {
|
||||
return getWorkspaceMeta(appContext, id);
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
|
||||
// used internally. Get a stream of workspace id -> meta
|
||||
export const getWorkspaceMeta$ = (workspaceId: string) => {
|
||||
return merge(
|
||||
getWorkspaceMeta(appContext, workspaceId),
|
||||
workspaceSubjects.meta.pipe(
|
||||
map(meta => meta.meta),
|
||||
filter(meta => meta.id === workspaceId)
|
||||
)
|
||||
);
|
||||
};
|
||||
7
apps/electron/layers/main/src/workspace/subjects.ts
Normal file
7
apps/electron/layers/main/src/workspace/subjects.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
import type { WorkspaceMeta } from '../type';
|
||||
|
||||
export const workspaceSubjects = {
|
||||
meta: new Subject<{ workspaceId: string; meta: WorkspaceMeta }>(),
|
||||
};
|
||||
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"sourceMap": false,
|
||||
"moduleResolution": "Node",
|
||||
"skipLibCheck": true,
|
||||
"strict": true,
|
||||
"isolatedModules": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": ["src/**/*.ts", "../../types/**/*.d.ts", "index.ts", "../utils.ts"]
|
||||
}
|
||||
7
apps/electron/layers/preload/preload.d.ts
vendored
7
apps/electron/layers/preload/preload.d.ts
vendored
@@ -1,7 +1,6 @@
|
||||
/* eslint-disable @typescript-eslint/consistent-type-imports */
|
||||
|
||||
interface Window {
|
||||
apis?: typeof import('./src/affine-apis').apis;
|
||||
events?: typeof import('./src/affine-apis').events;
|
||||
appInfo?: typeof import('./src/affine-apis').appInfo;
|
||||
declare interface Window {
|
||||
apis: import('./src/affine-apis').PreloadHandlers;
|
||||
events: import('./src/affine-apis').MainIPCEventMap;
|
||||
}
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
// NOTE: we will generate preload types from this file
|
||||
|
||||
// NOTE: we will generate preload types from this file
|
||||
import { ipcRenderer } from 'electron';
|
||||
|
||||
import type { MainIPCEventMap, MainIPCHandlerMap } from '../../constraints';
|
||||
// eslint-disable-next-line @typescript-eslint/no-restricted-imports
|
||||
import type {
|
||||
MainIPCEventMap,
|
||||
MainIPCHandlerMap,
|
||||
} from '../../main/src/exposed';
|
||||
|
||||
type WithoutFirstParameter<T> = T extends (_: any, ...args: infer P) => infer R
|
||||
? (...args: P) => R
|
||||
@@ -15,7 +19,7 @@ type HandlersMap<N extends keyof MainIPCHandlerMap> = {
|
||||
>;
|
||||
};
|
||||
|
||||
type PreloadHandlers = {
|
||||
export type PreloadHandlers = {
|
||||
[N in keyof MainIPCHandlerMap]: HandlersMap<N>;
|
||||
};
|
||||
|
||||
@@ -24,17 +28,17 @@ type MainExposedMeta = {
|
||||
events: [namespace: string, eventNames: string[]][];
|
||||
};
|
||||
|
||||
const meta: MainExposedMeta = (() => {
|
||||
const val = process.argv
|
||||
.find(arg => arg.startsWith('--exposed-meta='))
|
||||
?.split('=')[1];
|
||||
|
||||
return val ? JSON.parse(val) : null;
|
||||
})();
|
||||
|
||||
// main handlers that can be invoked from the renderer process
|
||||
const apis: PreloadHandlers = (() => {
|
||||
// the following were generated by the build script
|
||||
// 1. bundle extra main/src/expose.ts entry
|
||||
// 2. use generate-main-exposed-meta.mjs to generate exposed-meta.js in dist
|
||||
//
|
||||
// we cannot directly import main/src/handlers.ts because it will be bundled into the preload bundle
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const {
|
||||
handlers: handlersMeta,
|
||||
}: MainExposedMeta = require('../main/exposed-meta');
|
||||
const { handlers: handlersMeta } = meta;
|
||||
|
||||
const all = handlersMeta.map(([namespace, functionNames]) => {
|
||||
const namespaceApis = functionNames.map(name => {
|
||||
@@ -54,9 +58,11 @@ const apis: PreloadHandlers = (() => {
|
||||
|
||||
// main events that can be listened to from the renderer process
|
||||
const events: MainIPCEventMap = (() => {
|
||||
const {
|
||||
events: eventsMeta,
|
||||
}: MainExposedMeta = require('../main/exposed-meta');
|
||||
const { events: eventsMeta } = meta;
|
||||
|
||||
// NOTE: ui may try to listen to a lot of the same events, so we increase the limit...
|
||||
ipcRenderer.setMaxListeners(100);
|
||||
|
||||
const all = eventsMeta.map(([namespace, eventNames]) => {
|
||||
const namespaceEvents = eventNames.map(name => {
|
||||
const channel = `${namespace}:${name}`;
|
||||
@@ -86,3 +92,6 @@ const appInfo = {
|
||||
};
|
||||
|
||||
export { apis, appInfo, events };
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-restricted-imports
|
||||
export type { MainIPCEventMap } from '../../main/src/exposed';
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"sourceMap": false,
|
||||
"moduleResolution": "Node",
|
||||
"skipLibCheck": true,
|
||||
"strict": true,
|
||||
"isolatedModules": true,
|
||||
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": ["src/**/*.ts", "../../types/**/*.d.ts"]
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
export const isMacOS = () => {
|
||||
return process.platform === 'darwin';
|
||||
};
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@affine/electron",
|
||||
"private": true,
|
||||
"version": "0.5.4-canary.30",
|
||||
"version": "0.6.0-beta.0",
|
||||
"author": "affine",
|
||||
"repository": {
|
||||
"url": "https://github.com/toeverything/AFFiNE",
|
||||
@@ -15,13 +15,8 @@
|
||||
"prod": "yarn electron-rebuild && yarn node scripts/dev.mjs",
|
||||
"build-layers": "zx scripts/build-layers.mjs",
|
||||
"generate-assets": "zx scripts/generate-assets.mjs",
|
||||
"generate-main-exposed-meta": "zx scripts/generate-main-exposed-meta.mjs",
|
||||
"package": "electron-forge package",
|
||||
"make": "electron-forge make",
|
||||
"make-macos-arm64": "electron-forge make --platform=darwin --arch=arm64",
|
||||
"make-macos-x64": "electron-forge make --platform=darwin --arch=x64",
|
||||
"make-windows-x64": "electron-forge make --platform=win32 --arch=x64",
|
||||
"make-linux-x64": "electron-forge make --platform=linux --arch=x64",
|
||||
"rebuild:for-unit-test": "yarn rebuild better-sqlite3",
|
||||
"rebuild:for-electron": "yarn electron-rebuild",
|
||||
"test": "playwright test"
|
||||
@@ -32,6 +27,7 @@
|
||||
"main": "./dist/layers/main/index.js",
|
||||
"devDependencies": {
|
||||
"@affine-test/kit": "workspace:*",
|
||||
"@affine/native": "workspace:*",
|
||||
"@electron-forge/cli": "^6.1.1",
|
||||
"@electron-forge/core": "^6.1.1",
|
||||
"@electron-forge/core-utils": "^6.1.1",
|
||||
@@ -44,22 +40,26 @@
|
||||
"@electron/remote": "2.0.9",
|
||||
"@types/better-sqlite3": "^7.6.4",
|
||||
"@types/fs-extra": "^11.0.1",
|
||||
"@types/uuid": "^9.0.1",
|
||||
"cross-env": "7.0.3",
|
||||
"electron": "24.2.0",
|
||||
"electron-log": "^5.0.0-beta.23",
|
||||
"electron": "25.0.0",
|
||||
"electron-log": "^5.0.0-beta.24",
|
||||
"electron-squirrel-startup": "1.0.0",
|
||||
"electron-window-state": "^5.0.3",
|
||||
"esbuild": "^0.17.18",
|
||||
"esbuild": "^0.17.19",
|
||||
"fs-extra": "^11.1.1",
|
||||
"playwright": "^1.33.0",
|
||||
"playwright": "=1.33.0",
|
||||
"ts-node": "^10.9.1",
|
||||
"undici": "^5.22.0",
|
||||
"undici": "^5.22.1",
|
||||
"uuid": "^9.0.0",
|
||||
"zx": "^7.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"better-sqlite3": "^8.3.0",
|
||||
"better-sqlite3": "^8.4.0",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"chokidar": "^3.5.3",
|
||||
"electron-updater": "^5.3.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"nanoid": "^4.0.2",
|
||||
"rxjs": "^7.8.1",
|
||||
"yjs": "^13.6.1"
|
||||
|
||||
BIN
apps/electron/resources/icons/affine_installing.gif
Normal file
BIN
apps/electron/resources/icons/affine_installing.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 2.1 MiB |
BIN
apps/electron/resources/icons/icon_internal.icns
Normal file
BIN
apps/electron/resources/icons/icon_internal.icns
Normal file
Binary file not shown.
BIN
apps/electron/resources/icons/icon_internal.ico
Normal file
BIN
apps/electron/resources/icons/icon_internal.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 85 KiB |
@@ -8,6 +8,11 @@ import { config } from './common.mjs';
|
||||
const NODE_ENV =
|
||||
process.env.NODE_ENV === 'development' ? 'development' : 'production';
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
$.shell = true;
|
||||
$.prefix = '';
|
||||
}
|
||||
|
||||
async function buildLayers() {
|
||||
const common = config();
|
||||
await esbuild.build(common.preload);
|
||||
@@ -20,8 +25,6 @@ async function buildLayers() {
|
||||
'process.env.BUILD_TYPE': `"${process.env.BUILD_TYPE || 'stable'}"`,
|
||||
},
|
||||
});
|
||||
|
||||
await $`yarn workspace @affine/electron generate-main-exposed-meta`;
|
||||
}
|
||||
|
||||
await buildLayers();
|
||||
|
||||
@@ -12,16 +12,6 @@ const DEV_SERVER_URL = process.env.DEV_SERVER_URL;
|
||||
/** @type 'production' | 'development'' */
|
||||
const mode = (process.env.NODE_ENV = process.env.NODE_ENV || 'development');
|
||||
|
||||
const nativeNodeModulesPlugin = {
|
||||
name: 'native-node-modules',
|
||||
setup(build) {
|
||||
// Mark native Node.js modules as external
|
||||
build.onResolve({ filter: /\.node$/, namespace: 'file' }, args => {
|
||||
return { path: args.path, external: true };
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
// List of env that will be replaced by esbuild
|
||||
const ENV_MACROS = ['AFFINE_GOOGLE_CLIENT_ID', 'AFFINE_GOOGLE_CLIENT_SECRET'];
|
||||
|
||||
@@ -33,6 +23,7 @@ export const config = () => {
|
||||
JSON.stringify(process.env[key] ?? ''),
|
||||
]),
|
||||
['process.env.NODE_ENV', `"${mode}"`],
|
||||
['process.env.USE_WORKER', '"true"'],
|
||||
]);
|
||||
|
||||
if (DEV_SERVER_URL) {
|
||||
@@ -43,16 +34,20 @@ export const config = () => {
|
||||
main: {
|
||||
entryPoints: [
|
||||
resolve(root, './layers/main/src/index.ts'),
|
||||
resolve(root, './layers/main/src/exposed.ts'),
|
||||
resolve(root, './layers/main/src/workers/merge-update.worker.ts'),
|
||||
],
|
||||
outdir: resolve(root, './dist/layers/main'),
|
||||
bundle: true,
|
||||
target: `node${NODE_MAJOR_VERSION}`,
|
||||
platform: 'node',
|
||||
external: ['electron', 'yjs', 'better-sqlite3', 'electron-updater'],
|
||||
plugins: [nativeNodeModulesPlugin],
|
||||
define: define,
|
||||
format: 'cjs',
|
||||
loader: {
|
||||
'.node': 'copy',
|
||||
},
|
||||
assetNames: '[name]',
|
||||
treeShaking: true,
|
||||
},
|
||||
preload: {
|
||||
entryPoints: [resolve(root, './layers/preload/src/index.ts')],
|
||||
@@ -60,8 +55,7 @@ export const config = () => {
|
||||
bundle: true,
|
||||
target: `node${NODE_MAJOR_VERSION}`,
|
||||
platform: 'node',
|
||||
external: ['electron', '../main/exposed-meta'],
|
||||
plugins: [nativeNodeModulesPlugin],
|
||||
external: ['electron'],
|
||||
define: define,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/* eslint-disable no-async-promise-executor */
|
||||
import { execSync, spawn } from 'node:child_process';
|
||||
import { spawn } from 'node:child_process';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
@@ -105,8 +105,6 @@ async function watchMain() {
|
||||
name: 'electron-dev:reload-app-on-main-change',
|
||||
setup(build) {
|
||||
build.onEnd(() => {
|
||||
execSync('yarn generate-main-exposed-meta');
|
||||
|
||||
if (initialBuild) {
|
||||
console.log(`[main] has changed, [re]launching electron...`);
|
||||
spawnOrReloadElectron();
|
||||
|
||||
@@ -1,14 +1,18 @@
|
||||
#!/usr/bin/env zx
|
||||
import 'zx/globals';
|
||||
|
||||
import { createRequire } from 'node:module';
|
||||
import path from 'node:path';
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
|
||||
const repoRootDir = path.join(__dirname, '..', '..', '..');
|
||||
const electronRootDir = path.join(__dirname, '..');
|
||||
const publicDistDir = path.join(electronRootDir, 'resources');
|
||||
const affineWebDir = path.join(repoRootDir, 'apps', 'web');
|
||||
const affineWebOutDir = path.join(affineWebDir, 'out');
|
||||
const publicAffineOutDir = path.join(publicDistDir, `web-static`);
|
||||
const releaseVersionEnv = process.env.RELEASE_VERSION || '';
|
||||
|
||||
console.log('build with following dir', {
|
||||
repoRootDir,
|
||||
@@ -19,9 +23,16 @@ console.log('build with following dir', {
|
||||
publicAffineOutDir,
|
||||
});
|
||||
|
||||
// step 0: check version match
|
||||
const electronPackageJson = require(`${electronRootDir}/package.json`);
|
||||
if (releaseVersionEnv && electronPackageJson.version !== releaseVersionEnv) {
|
||||
throw new Error(
|
||||
`Version mismatch, expected ${releaseVersionEnv} but got ${electronPackageJson.version}`
|
||||
);
|
||||
}
|
||||
// copy web dist files to electron dist
|
||||
|
||||
// step 0: clean up
|
||||
// step 1: clean up
|
||||
await cleanup();
|
||||
echo('Clean up done');
|
||||
|
||||
@@ -32,9 +43,6 @@ if (process.platform === 'win32') {
|
||||
|
||||
cd(repoRootDir);
|
||||
|
||||
// step 1: build electron resources
|
||||
await $`yarn workspace @affine/electron build-layers`;
|
||||
|
||||
// step 2: build web (nextjs) dist
|
||||
if (!process.env.SKIP_WEB_BUILD) {
|
||||
process.env.ENABLE_LEGACY_PROVIDER = 'false';
|
||||
@@ -59,6 +67,17 @@ if (!process.env.SKIP_WEB_BUILD) {
|
||||
await fs.move(affineWebOutDir, publicAffineOutDir, { overwrite: true });
|
||||
}
|
||||
|
||||
// step 3: update app-updater.yml content with build type in resources folder
|
||||
if (process.env.BUILD_TYPE === 'internal') {
|
||||
const appUpdaterYml = path.join(publicDistDir, 'app-update.yml');
|
||||
const appUpdaterYmlContent = await fs.readFile(appUpdaterYml, 'utf-8');
|
||||
const newAppUpdaterYmlContent = appUpdaterYmlContent.replace(
|
||||
'AFFiNE',
|
||||
'AFFiNE-Releases'
|
||||
);
|
||||
await fs.writeFile(appUpdaterYml, newAppUpdaterYmlContent);
|
||||
}
|
||||
|
||||
/// --------
|
||||
/// --------
|
||||
/// --------
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
#!/usr/bin/env zx
|
||||
/* eslint-disable @typescript-eslint/no-restricted-imports */
|
||||
import 'zx/globals';
|
||||
|
||||
const mainDistDir = path.resolve(__dirname, '../dist/layers/main');
|
||||
|
||||
// be careful and avoid any side effects in
|
||||
const { handlers, events } = await import(
|
||||
path.resolve(mainDistDir, 'exposed.js')
|
||||
);
|
||||
|
||||
const handlersMeta = Object.entries(handlers).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.keys(namespaceHandlers).map(handlerName => handlerName),
|
||||
];
|
||||
}
|
||||
);
|
||||
|
||||
const eventsMeta = Object.entries(events).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.keys(namespaceHandlers).map(handlerName => handlerName),
|
||||
];
|
||||
}
|
||||
);
|
||||
|
||||
const meta = {
|
||||
handlers: handlersMeta,
|
||||
events: eventsMeta,
|
||||
};
|
||||
|
||||
await fs.writeFile(
|
||||
path.resolve(mainDistDir, 'exposed-meta.js'),
|
||||
`module.exports = ${JSON.stringify(meta)};`
|
||||
);
|
||||
|
||||
console.log('generate main exposed-meta.js done');
|
||||
@@ -1,3 +1,5 @@
|
||||
import { platform } from 'node:os';
|
||||
|
||||
import { expect } from '@playwright/test';
|
||||
|
||||
import { test } from './fixture';
|
||||
@@ -11,8 +13,74 @@ test('new page', async ({ page, workspace }) => {
|
||||
expect(flavour).toBe('local');
|
||||
});
|
||||
|
||||
// macOS only
|
||||
if (platform() === 'darwin') {
|
||||
test('app sidebar router forward/back', async ({ page }) => {
|
||||
await page.getByTestId('help-island').click();
|
||||
await page.getByTestId('easy-guide').click();
|
||||
await page.getByTestId('onboarding-modal-next-button').click();
|
||||
await page.getByTestId('onboarding-modal-close-button').click();
|
||||
{
|
||||
// create pages
|
||||
await page.waitForTimeout(500);
|
||||
await page.getByTestId('new-page-button').click({
|
||||
delay: 100,
|
||||
});
|
||||
await page.waitForSelector('v-line');
|
||||
await page.focus('.affine-default-page-block-title');
|
||||
await page.type('.affine-default-page-block-title', 'test1', {
|
||||
delay: 100,
|
||||
});
|
||||
await page.waitForTimeout(500);
|
||||
await page.getByTestId('new-page-button').click({
|
||||
delay: 100,
|
||||
});
|
||||
await page.waitForSelector('v-line');
|
||||
await page.focus('.affine-default-page-block-title');
|
||||
await page.type('.affine-default-page-block-title', 'test2', {
|
||||
delay: 100,
|
||||
});
|
||||
await page.waitForTimeout(500);
|
||||
await page.getByTestId('new-page-button').click({
|
||||
delay: 100,
|
||||
});
|
||||
await page.waitForSelector('v-line');
|
||||
await page.focus('.affine-default-page-block-title');
|
||||
await page.type('.affine-default-page-block-title', 'test3', {
|
||||
delay: 100,
|
||||
});
|
||||
}
|
||||
{
|
||||
const title = (await page
|
||||
.locator('.affine-default-page-block-title')
|
||||
.textContent()) as string;
|
||||
expect(title.trim()).toBe('test3');
|
||||
}
|
||||
|
||||
await page.click('[data-testid="app-sidebar-arrow-button-back"]');
|
||||
await page.waitForTimeout(1000);
|
||||
await page.click('[data-testid="app-sidebar-arrow-button-back"]');
|
||||
await page.waitForTimeout(1000);
|
||||
{
|
||||
const title = (await page
|
||||
.locator('.affine-default-page-block-title')
|
||||
.textContent()) as string;
|
||||
expect(title.trim()).toBe('test1');
|
||||
}
|
||||
await page.click('[data-testid="app-sidebar-arrow-button-forward"]');
|
||||
await page.waitForTimeout(1000);
|
||||
await page.click('[data-testid="app-sidebar-arrow-button-forward"]');
|
||||
await page.waitForTimeout(1000);
|
||||
{
|
||||
const title = (await page
|
||||
.locator('.affine-default-page-block-title')
|
||||
.textContent()) as string;
|
||||
expect(title.trim()).toBe('test3');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
test('app theme', async ({ page, electronApp }) => {
|
||||
await page.waitForSelector('v-line');
|
||||
const root = page.locator('html');
|
||||
{
|
||||
const themeMode = await root.evaluate(element =>
|
||||
@@ -20,30 +88,25 @@ test('app theme', async ({ page, electronApp }) => {
|
||||
);
|
||||
expect(themeMode).toBe('light');
|
||||
|
||||
// check if electron theme source is set to light
|
||||
const themeSource = await electronApp.evaluate(({ nativeTheme }) => {
|
||||
return nativeTheme.themeSource;
|
||||
const theme = await electronApp.evaluate(({ nativeTheme }) => {
|
||||
return nativeTheme.shouldUseDarkColors ? 'dark' : 'light';
|
||||
});
|
||||
|
||||
expect(themeSource).toBe('light');
|
||||
expect(theme).toBe('light');
|
||||
}
|
||||
|
||||
{
|
||||
await page.getByTestId('editor-option-menu').click();
|
||||
await page.getByTestId('change-theme-dark').click();
|
||||
await page.waitForTimeout(50);
|
||||
{
|
||||
const themeMode = await root.evaluate(element =>
|
||||
element.getAttribute('data-theme')
|
||||
);
|
||||
expect(themeMode).toBe('dark');
|
||||
}
|
||||
|
||||
const themeSource = await electronApp.evaluate(({ nativeTheme }) => {
|
||||
return nativeTheme.themeSource;
|
||||
const themeMode = await root.evaluate(element =>
|
||||
element.getAttribute('data-theme')
|
||||
);
|
||||
expect(themeMode).toBe('dark');
|
||||
const theme = await electronApp.evaluate(({ nativeTheme }) => {
|
||||
return nativeTheme.shouldUseDarkColors ? 'dark' : 'light';
|
||||
});
|
||||
|
||||
expect(themeSource).toBe('dark');
|
||||
expect(theme).toBe('dark');
|
||||
}
|
||||
});
|
||||
|
||||
@@ -73,7 +136,7 @@ test('affine onboarding button', async ({ page }) => {
|
||||
'[data-testid=onboarding-modal-editing-video]'
|
||||
);
|
||||
expect(await editingVideo.isVisible()).toEqual(true);
|
||||
await page.getByTestId('onboarding-modal-ok-button').click();
|
||||
await page.getByTestId('onboarding-modal-close-button').click();
|
||||
|
||||
expect(await onboardingModal.isVisible()).toEqual(false);
|
||||
});
|
||||
|
||||
@@ -3,9 +3,14 @@
|
||||
|
||||
/* eslint-disable no-empty-pattern */
|
||||
import crypto from 'node:crypto';
|
||||
import { resolve } from 'node:path';
|
||||
import { join, resolve } from 'node:path';
|
||||
|
||||
import { test as base } from '@affine-test/kit/playwright';
|
||||
import {
|
||||
enableCoverage,
|
||||
istanbulTempDir,
|
||||
test as base,
|
||||
testResultDir,
|
||||
} from '@affine-test/kit/playwright';
|
||||
import fs from 'fs-extra';
|
||||
import type { ElectronApplication, Page } from 'playwright';
|
||||
import { _electron as electron } from 'playwright';
|
||||
@@ -42,7 +47,31 @@ export const test = base.extend<{
|
||||
const logFilePath = await page.evaluate(async () => {
|
||||
return window.apis?.debug.logFilePath();
|
||||
});
|
||||
// wat for blocksuite to be loaded
|
||||
await page.waitForSelector('v-line');
|
||||
if (enableCoverage) {
|
||||
await fs.promises.mkdir(istanbulTempDir, { recursive: true });
|
||||
await page.exposeFunction(
|
||||
'collectIstanbulCoverage',
|
||||
(coverageJSON?: string) => {
|
||||
if (coverageJSON)
|
||||
fs.writeFileSync(
|
||||
join(
|
||||
istanbulTempDir,
|
||||
`playwright_coverage_${generateUUID()}.json`
|
||||
),
|
||||
coverageJSON
|
||||
);
|
||||
}
|
||||
);
|
||||
}
|
||||
await use(page);
|
||||
if (enableCoverage) {
|
||||
await page.evaluate(() =>
|
||||
// @ts-expect-error
|
||||
window.collectIstanbulCoverage(JSON.stringify(window.__coverage__))
|
||||
);
|
||||
}
|
||||
await page.close();
|
||||
if (logFilePath) {
|
||||
const logs = await fs.readFile(logFilePath, 'utf-8');
|
||||
@@ -52,16 +81,27 @@ export const test = base.extend<{
|
||||
electronApp: async ({}, use) => {
|
||||
// a random id to avoid conflicts between tests
|
||||
const id = generateUUID();
|
||||
const ext = process.platform === 'win32' ? '.cmd' : '';
|
||||
const electronApp = await electron.launch({
|
||||
args: [resolve(__dirname, '..'), '--app-name', 'affine-test-' + id],
|
||||
executablePath: resolve(__dirname, '../node_modules/.bin/electron'),
|
||||
executablePath: resolve(
|
||||
__dirname,
|
||||
'..',
|
||||
'node_modules',
|
||||
'.bin',
|
||||
`electron${ext}`
|
||||
),
|
||||
recordVideo: {
|
||||
dir: testResultDir,
|
||||
},
|
||||
colorScheme: 'light',
|
||||
});
|
||||
const sessionDataPath = await electronApp.evaluate(async ({ app }) => {
|
||||
return app.getPath('sessionData');
|
||||
});
|
||||
await use(electronApp);
|
||||
await fs.rm(sessionDataPath, { recursive: true, force: true });
|
||||
// FIXME: the following does not work well on CI
|
||||
// const sessionDataPath = await electronApp.evaluate(async ({ app }) => {
|
||||
// return app.getPath('sessionData');
|
||||
// });
|
||||
// await fs.rm(sessionDataPath, { recursive: true, force: true });
|
||||
},
|
||||
appInfo: async ({ electronApp }, use) => {
|
||||
const appInfo = await electronApp.evaluate(async ({ app }) => {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { execSync } from 'node:child_process';
|
||||
import { join } from 'node:path';
|
||||
|
||||
export default async function () {
|
||||
execSync('yarn ts-node-esm scripts/', {
|
||||
cwd: path.join(__dirname, '..'),
|
||||
cwd: join(__dirname, '..'),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"extends": "../../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"baseUrl": ".",
|
||||
"noEmit": true
|
||||
"noEmit": true,
|
||||
"target": "ESNext"
|
||||
},
|
||||
"references": [{ "path": "../../../tests/kit" }],
|
||||
"include": ["**.spec.ts", "**.test.ts"]
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ test('move workspace db file', async ({ page, appInfo, workspace }) => {
|
||||
// goto settings
|
||||
await settingButton.click();
|
||||
|
||||
const tmpPath = path.join(appInfo.sessionData, w.id + '-tmp.db');
|
||||
const tmpPath = path.join(appInfo.sessionData, w.id + '-tmp-dir');
|
||||
|
||||
// move db file to tmp folder
|
||||
await page.evaluate(tmpPath => {
|
||||
@@ -36,6 +36,9 @@ test('move workspace db file', async ({ page, appInfo, workspace }) => {
|
||||
// check if db file exists
|
||||
await page.waitForSelector('text="Move folder success"');
|
||||
expect(await fs.exists(tmpPath)).toBe(true);
|
||||
// check if db file exists under tmpPath (a file ends with .affine)
|
||||
const files = await fs.readdir(tmpPath);
|
||||
expect(files.some(f => f.endsWith('.affine'))).toBe(true);
|
||||
});
|
||||
|
||||
test('export then add', async ({ page, appInfo, workspace }) => {
|
||||
@@ -56,7 +59,7 @@ test('export then add', async ({ page, appInfo, workspace }) => {
|
||||
|
||||
const tmpPath = path.join(appInfo.sessionData, w.id + '-tmp.db');
|
||||
|
||||
// move db file to tmp folder
|
||||
// export db file to tmp folder
|
||||
await page.evaluate(tmpPath => {
|
||||
window.apis?.dialog.setFakeDialogResult({
|
||||
filePath: tmpPath,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user