mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-10 19:38:39 +00:00
Compare commits
171 Commits
v0.6.0-can
...
v0.7.0-can
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0d07ff2390 | ||
|
|
42bab6990e | ||
|
|
89a566a645 | ||
|
|
7af5bd3894 | ||
|
|
a57c27679d | ||
|
|
68a72b2dfc | ||
|
|
602f795133 | ||
|
|
5df89a925b | ||
|
|
23126e1ff6 | ||
|
|
e1f715f837 | ||
|
|
fbcaed40e7 | ||
|
|
88757ce488 | ||
|
|
fc9462eee9 | ||
|
|
e1314730be | ||
|
|
36978dbed6 | ||
|
|
53d1991211 | ||
|
|
1ea445ab15 | ||
|
|
78410f531a | ||
|
|
96c0321696 | ||
|
|
0895f1fb30 | ||
|
|
b6188f4b11 | ||
|
|
2ed1a7b219 | ||
|
|
9bee6bd5cc | ||
|
|
198f30c86d | ||
|
|
454f1887cf | ||
|
|
4e1e4e9435 | ||
|
|
f7768563e1 | ||
|
|
6aa0e71b84 | ||
|
|
f1b3a10969 | ||
|
|
90e70ed986 | ||
|
|
094a479c2a | ||
|
|
20f1d487c8 | ||
|
|
4c9bda1406 | ||
|
|
d5debc0bf5 | ||
|
|
f5aee7c360 | ||
|
|
248cd9a8ab | ||
|
|
06abb702f5 | ||
|
|
ee289706ec | ||
|
|
6cbf310a5a | ||
|
|
855fd8a73a | ||
|
|
8dbd354659 | ||
|
|
1c7ae04f4f | ||
|
|
0bb6e362bf | ||
|
|
617350fc7d | ||
|
|
2713340532 | ||
|
|
31d552ab7e | ||
|
|
e11326f05f | ||
|
|
6648fe4dcc | ||
|
|
f669164674 | ||
|
|
c6d8904ca2 | ||
|
|
8c5a1e2de3 | ||
|
|
395414c336 | ||
|
|
96f653ea19 | ||
|
|
fa089de40d | ||
|
|
4175f5391e | ||
|
|
61c417992a | ||
|
|
befae6bc9b | ||
|
|
e7eb13e966 | ||
|
|
88eaaf9ce4 | ||
|
|
20cc082a02 | ||
|
|
402d12a0e1 | ||
|
|
58ba11e13c | ||
|
|
cb6ca52b03 | ||
|
|
cd2ab73e5d | ||
|
|
b16e725514 | ||
|
|
004fcc8e80 | ||
|
|
a01a3ef011 | ||
|
|
20cf45270d | ||
|
|
f3ac12254c | ||
|
|
e0eb216b9b | ||
|
|
90afed1e74 | ||
|
|
83d2ed8ace | ||
|
|
a0b64ca3e3 | ||
|
|
b9fc5ad769 | ||
|
|
ef1a44a413 | ||
|
|
798dc49da4 | ||
|
|
902081d44e | ||
|
|
7969b73979 | ||
|
|
c8734bd6ee | ||
|
|
6d3c273ffd | ||
|
|
f4b3830a0e | ||
|
|
36534f1915 | ||
|
|
7dcbe64d4e | ||
|
|
60057c666d | ||
|
|
60a83f4907 | ||
|
|
b11ce2c8d2 | ||
|
|
3b8f2c1ac3 | ||
|
|
be065e2de3 | ||
|
|
675c737e48 | ||
|
|
1255384cab | ||
|
|
3d423c3299 | ||
|
|
ad4737850d | ||
|
|
9dcacd413c | ||
|
|
c1998eddf3 | ||
|
|
db3f63e8f2 | ||
|
|
e562ca1011 | ||
|
|
f6adf93f90 | ||
|
|
053eba5d98 | ||
|
|
49f1ba676f | ||
|
|
48c109e149 | ||
|
|
259d7988d9 | ||
|
|
0a49258ddd | ||
|
|
fd35d3427e | ||
|
|
ef0a20b358 | ||
|
|
f01997f8ee | ||
|
|
281a068cfb | ||
|
|
fe5be0cb47 | ||
|
|
8aab1d6459 | ||
|
|
2eaaeef4a7 | ||
|
|
5fbfabb3b2 | ||
|
|
ec64260b6a | ||
|
|
2e23a4830b | ||
|
|
41a3d6f62f | ||
|
|
752bc9ca0e | ||
|
|
c08f6fdba4 | ||
|
|
b23b7e896b | ||
|
|
d68b421a4b | ||
|
|
1f510799e2 | ||
|
|
66ea97c7c9 | ||
|
|
ee300e7b60 | ||
|
|
ef2d135e9b | ||
|
|
c82fb89d57 | ||
|
|
725bf63a32 | ||
|
|
c1ca578f7d | ||
|
|
530dd5ff7f | ||
|
|
11370bc07e | ||
|
|
1c53daf1c4 | ||
|
|
b2556db33b | ||
|
|
89310c9b97 | ||
|
|
8e09af910f | ||
|
|
885aea3425 | ||
|
|
a616150f2d | ||
|
|
d80dae8a89 | ||
|
|
34ff08b92b | ||
|
|
2f7b51d7ff | ||
|
|
b7cee3185e | ||
|
|
1c5455e6ed | ||
|
|
2d303fd5d3 | ||
|
|
fbe2543c03 | ||
|
|
d6b640726e | ||
|
|
f875b37641 | ||
|
|
53c4fc6dfa | ||
|
|
6c310249d9 | ||
|
|
02e1f528bf | ||
|
|
c870104370 | ||
|
|
627d8ef787 | ||
|
|
5563823a7a | ||
|
|
d6804bb0fd | ||
|
|
1350633690 | ||
|
|
50196d8fde | ||
|
|
2e0ccb53ec | ||
|
|
1498ee405b | ||
|
|
cb863c4afa | ||
|
|
2629d39501 | ||
|
|
38305cd984 | ||
|
|
93116c24f2 | ||
|
|
017b9c8615 | ||
|
|
9ce3a96862 | ||
|
|
a0ff520ba4 | ||
|
|
a8b8986d89 | ||
|
|
8ffc096fee | ||
|
|
7e457f7b4c | ||
|
|
aedf2d339e | ||
|
|
ffd5ae52b3 | ||
|
|
3093194da8 | ||
|
|
68b4f792f0 | ||
|
|
e2c6e4f9fc | ||
|
|
9ff7dbffb7 | ||
|
|
0c561da061 | ||
|
|
06951319a6 | ||
|
|
0bfcab4067 |
@@ -5,3 +5,4 @@ out
|
||||
storybook-static
|
||||
affine-out
|
||||
_next
|
||||
lib
|
||||
|
||||
51
.eslintrc.js
51
.eslintrc.js
@@ -1,3 +1,43 @@
|
||||
const createPattern = packageName => [
|
||||
{
|
||||
group: ['**/dist', '**/dist/**'],
|
||||
message: 'Do not import from dist',
|
||||
allowTypeImports: false,
|
||||
},
|
||||
{
|
||||
group: ['**/src', '**/src/**'],
|
||||
message: 'Do not import from src',
|
||||
allowTypeImports: false,
|
||||
},
|
||||
{
|
||||
group: [`@affine/${packageName}`],
|
||||
message: 'Do not import package itself',
|
||||
allowTypeImports: false,
|
||||
},
|
||||
{
|
||||
group: [`@toeverything/${packageName}`],
|
||||
message: 'Do not import package itself',
|
||||
allowTypeImports: false,
|
||||
},
|
||||
];
|
||||
|
||||
const allPackages = [
|
||||
'cli',
|
||||
'component',
|
||||
'debug',
|
||||
'env',
|
||||
'graphql',
|
||||
'hooks',
|
||||
'i18n',
|
||||
'jotai',
|
||||
'native',
|
||||
'plugin-infra',
|
||||
'templates',
|
||||
'theme',
|
||||
'workspace',
|
||||
'y-indexeddb',
|
||||
];
|
||||
|
||||
/**
|
||||
* @type {import('eslint').Linter.Config}
|
||||
*/
|
||||
@@ -96,6 +136,17 @@ const config = {
|
||||
'@typescript-eslint/no-var-requires': 0,
|
||||
},
|
||||
},
|
||||
...allPackages.map(pkg => ({
|
||||
files: [`packages/${pkg}/src/**/*.ts`, `packages/${pkg}/src/**/*.tsx`],
|
||||
rules: {
|
||||
'@typescript-eslint/no-restricted-imports': [
|
||||
'error',
|
||||
{
|
||||
patterns: createPattern(pkg),
|
||||
},
|
||||
],
|
||||
},
|
||||
})),
|
||||
],
|
||||
};
|
||||
|
||||
|
||||
1
.github/CLA.md
vendored
1
.github/CLA.md
vendored
@@ -58,3 +58,4 @@ Example:
|
||||
- Howard Do, @howarddo2208, 2023/04/20
|
||||
- 三咲智子 Kevin Deng, @sxzz, 2023/04/21
|
||||
- Moeyua, @moeyua, 2023/04/22
|
||||
- Shishu, @shishudesu, 2023/05/19
|
||||
|
||||
6
.github/actions/build-rust/action.yml
vendored
6
.github/actions/build-rust/action.yml
vendored
@@ -29,13 +29,15 @@ runs:
|
||||
if: ${{ inputs.target != 'x86_64-unknown-linux-gnu' && inputs.target != 'aarch64-unknown-linux-gnu' }}
|
||||
shell: bash
|
||||
run: yarn workspace @affine/native build --target ${{ inputs.target }}
|
||||
env:
|
||||
CARGO_BUILD_INCREMENTAL: 'false'
|
||||
|
||||
- name: Build
|
||||
if: ${{ inputs.target == 'x86_64-unknown-linux-gnu' }}
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian
|
||||
options: --user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
|
||||
options: --user 0:0 -e CARGO_BUILD_INCREMENTAL=false -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
|
||||
run: yarn workspace @affine/native build --target ${{ inputs.target }}
|
||||
|
||||
- name: Build
|
||||
@@ -43,5 +45,5 @@ runs:
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian-aarch64
|
||||
options: --user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
|
||||
options: --user 0:0 -e CARGO_BUILD_INCREMENTAL=false -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
|
||||
run: yarn workspace @affine/native build --target ${{ inputs.target }}
|
||||
|
||||
6
.github/labeler.yml
vendored
6
.github/labeler.yml
vendored
@@ -8,11 +8,17 @@ test:
|
||||
- '**/tests/**/*'
|
||||
- '**/__tests__/**/*'
|
||||
|
||||
plugin:copilot:
|
||||
- 'plugins/copilot/**/*'
|
||||
|
||||
mod:dev:
|
||||
- 'scripts/**/*'
|
||||
- 'packages/cli/**/*'
|
||||
- 'packages/debug/**/*'
|
||||
|
||||
mod:plugin-infra:
|
||||
- 'packages/plugin-infra/**/*'
|
||||
|
||||
mod:workspace: 'packages/workspace/**/*'
|
||||
|
||||
mod:i18n: 'packages/i18n/**/*'
|
||||
|
||||
24
.github/workflows/add-to-project.yml
vendored
24
.github/workflows/add-to-project.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Add to GitHub projects
|
||||
|
||||
on:
|
||||
issues:
|
||||
types:
|
||||
- opened
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
|
||||
jobs:
|
||||
add-to-project:
|
||||
name: Add issues and pull requests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/add-to-project@v0.4.0
|
||||
with:
|
||||
# You can target a repository in a different organization
|
||||
# to the issue
|
||||
project-url: https://github.com/orgs/toeverything/projects/10
|
||||
github-token: ${{ secrets.ADD_TO_PROJECT_PAT }}
|
||||
# labeled: bug, needs-triage
|
||||
# label-operator: OR
|
||||
63
.github/workflows/build.yml
vendored
63
.github/workflows/build.yml
vendored
@@ -21,7 +21,6 @@ on:
|
||||
- '!.github/workflows/build.yml'
|
||||
|
||||
env:
|
||||
CARGO_BUILD_INCREMENTAL: 'false'
|
||||
DEBUG: napi:*
|
||||
APP_NAME: affine
|
||||
MACOSX_DEPLOYMENT_TARGET: '10.13'
|
||||
@@ -36,7 +35,12 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- run: yarn lint --max-warnings=0
|
||||
- name: Run checks
|
||||
run: |
|
||||
yarn i18n-codegen gen
|
||||
yarn typecheck
|
||||
yarn lint --max-warnings=0
|
||||
yarn circular
|
||||
|
||||
build-storybook:
|
||||
name: Build Storybook
|
||||
@@ -55,23 +59,6 @@ jobs:
|
||||
path: ./packages/component/storybook-static
|
||||
if-no-files-found: error
|
||||
|
||||
build-electron:
|
||||
name: Build @affine/electron
|
||||
runs-on: ubuntu-latest
|
||||
environment: development
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- name: Build Electron
|
||||
working-directory: apps/electron
|
||||
run: yarn build-layers
|
||||
- name: Upload Ubuntu desktop artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: affine-ubuntu
|
||||
path: ./apps/electron/dist
|
||||
|
||||
build:
|
||||
name: Build @affine/web
|
||||
runs-on: ubuntu-latest
|
||||
@@ -100,7 +87,9 @@ jobs:
|
||||
NEXT_PUBLIC_FIREBASE_APP_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_APP_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID }}
|
||||
API_SERVER_PROFILE: local
|
||||
ENABLE_DEBUG_PAGE: true
|
||||
ENABLE_DEBUG_PAGE: 1
|
||||
ENABLE_PLUGIN: true
|
||||
ENABLE_ALL_PAGE_FILTER: true
|
||||
ENABLE_LEGACY_PROVIDER: true
|
||||
COVERAGE: true
|
||||
|
||||
@@ -122,7 +111,9 @@ jobs:
|
||||
NEXT_PUBLIC_FIREBASE_APP_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_APP_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID }}
|
||||
API_SERVER_PROFILE: affine
|
||||
ENABLE_DEBUG_PAGE: true
|
||||
ENABLE_DEBUG_PAGE: 1
|
||||
ENABLE_PLUGIN: true
|
||||
ENABLE_ALL_PAGE_FILTER: true
|
||||
ENABLE_LEGACY_PROVIDER: false
|
||||
COVERAGE: true
|
||||
|
||||
@@ -322,7 +313,7 @@ jobs:
|
||||
target: x86_64-pc-windows-msvc,
|
||||
test: true,
|
||||
}
|
||||
needs: [build, build-electron]
|
||||
needs: [build]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
@@ -333,11 +324,17 @@ jobs:
|
||||
uses: ./.github/actions/build-rust
|
||||
with:
|
||||
target: ${{ matrix.spec.target }}
|
||||
- name: Download Ubuntu desktop artifact
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: affine-ubuntu
|
||||
path: ./apps/electron/dist
|
||||
- name: Run unit tests
|
||||
if: ${{ matrix.spec.test }}
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf apps/electron/node_modules/better-sqlite3/build
|
||||
yarn --cwd apps/electron/node_modules/better-sqlite3 run install
|
||||
yarn test:unit
|
||||
env:
|
||||
NATIVE_TEST: 'true'
|
||||
- name: Build layers
|
||||
run: yarn workspace @affine/electron build-layers
|
||||
|
||||
- name: Download static resource artifact
|
||||
uses: actions/download-artifact@v3
|
||||
@@ -346,20 +343,20 @@ jobs:
|
||||
path: ./apps/electron/resources/web-static
|
||||
|
||||
- name: Rebuild Electron dependences
|
||||
run: yarn rebuild:for-electron
|
||||
working-directory: apps/electron
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf apps/electron/node_modules/better-sqlite3/build
|
||||
yarn workspace @affine/electron rebuild:for-electron --arch=${{ matrix.spec.arch }}
|
||||
|
||||
- name: Run desktop tests
|
||||
if: ${{ matrix.spec.test && matrix.spec.os == 'ubuntu-latest' }}
|
||||
run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn test
|
||||
working-directory: apps/electron
|
||||
run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn workspace @affine/electron test
|
||||
env:
|
||||
COVERAGE: true
|
||||
|
||||
- name: Run desktop tests
|
||||
if: ${{ matrix.spec.test && matrix.spec.os != 'ubuntu-latest' }}
|
||||
run: yarn test
|
||||
working-directory: apps/electron
|
||||
run: yarn workspace @affine/electron test
|
||||
env:
|
||||
COVERAGE: true
|
||||
|
||||
|
||||
8
.github/workflows/languages-sync.yml
vendored
8
.github/workflows/languages-sync.yml
vendored
@@ -13,14 +13,6 @@ on:
|
||||
- '.github/workflows/languages-sync.yml'
|
||||
workflow_dispatch:
|
||||
|
||||
# Cancels all previous workflow runs for pull requests that have not completed.
|
||||
# See https://docs.github.com/en/actions/using-jobs/using-concurrency
|
||||
concurrency:
|
||||
# The concurrency group contains the workflow name and the branch name for
|
||||
# pull requests or the commit hash for any other events.
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
113
.github/workflows/nightly-build.yml
vendored
113
.github/workflows/nightly-build.yml
vendored
@@ -22,18 +22,30 @@ concurrency:
|
||||
|
||||
env:
|
||||
BUILD_TYPE: internal
|
||||
RELEASE_VERSION: ${{ github.ref_name }}-${{ github.sha }}
|
||||
|
||||
jobs:
|
||||
set-build-version:
|
||||
runs-on: ubuntu-latest
|
||||
environment: production
|
||||
outputs:
|
||||
version: 0.0.0-${{ steps.version.outputs.version }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: toeverything/set-build-version@latest
|
||||
- id: version
|
||||
run: echo ::set-output name=version::${{ env.BUILD_VERSION }}
|
||||
|
||||
before-make:
|
||||
runs-on: ubuntu-latest
|
||||
environment: production
|
||||
needs:
|
||||
- set-build-version
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- name: Replace Version
|
||||
run: ./scripts/set-version.sh ${{ env.RELEASE_VERSION }}
|
||||
run: ./scripts/set-version.sh ${{ needs.set-build-version.outputs.version }}
|
||||
- name: generate-assets
|
||||
working-directory: apps/electron
|
||||
run: yarn generate-assets
|
||||
@@ -53,6 +65,9 @@ jobs:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
API_SERVER_PROFILE: prod
|
||||
ENABLE_TEST_PROPERTIES: false
|
||||
ENABLE_IMAGE_PREVIEW_MODAL: false
|
||||
RELEASE_VERSION: ${{ needs.set-build-version.outputs.version }}
|
||||
ENABLE_BOOKMARK_OPERATION: true
|
||||
|
||||
- name: Upload Artifact (web-static)
|
||||
uses: actions/upload-artifact@v3
|
||||
@@ -60,30 +75,40 @@ jobs:
|
||||
name: before-make-web-static
|
||||
path: apps/electron/resources/web-static
|
||||
|
||||
- name: Upload Artifact (electron dist)
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: before-make-electron-dist
|
||||
path: apps/electron/dist
|
||||
|
||||
- name: Upload YML Build Script
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: release-yml-build-script
|
||||
path: apps/electron/scripts/generate-yml.js
|
||||
|
||||
make-distribution:
|
||||
environment: production
|
||||
strategy:
|
||||
# all combinations: macos-latest x64, macos-latest arm64, windows-latest x64, ubuntu-latest x64
|
||||
matrix:
|
||||
spec:
|
||||
- { os: macos-latest, platform: macos, arch: x64 }
|
||||
- { os: macos-latest, platform: macos, arch: arm64 }
|
||||
- { os: ubuntu-latest, platform: linux, arch: x64 }
|
||||
- { os: windows-latest, platform: windows, arch: x64 }
|
||||
- {
|
||||
os: macos-latest,
|
||||
platform: darwin,
|
||||
arch: x64,
|
||||
target: x86_64-apple-darwin,
|
||||
}
|
||||
- {
|
||||
os: macos-latest,
|
||||
platform: darwin,
|
||||
arch: arm64,
|
||||
target: aarch64-apple-darwin,
|
||||
}
|
||||
- {
|
||||
os: ubuntu-latest,
|
||||
platform: linux,
|
||||
arch: x64,
|
||||
target: x86_64-unknown-linux-gnu,
|
||||
}
|
||||
- {
|
||||
os: windows-latest,
|
||||
platform: win32,
|
||||
arch: x64,
|
||||
target: x86_64-pc-windows-msvc,
|
||||
}
|
||||
runs-on: ${{ matrix.spec.os }}
|
||||
needs: before-make
|
||||
needs:
|
||||
- before-make
|
||||
- set-build-version
|
||||
env:
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
|
||||
@@ -93,36 +118,43 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- name: Build AFFiNE native
|
||||
uses: ./.github/actions/build-rust
|
||||
with:
|
||||
target: ${{ matrix.spec.target }}
|
||||
- name: Replace Version
|
||||
run: ./scripts/set-version.sh ${{ env.RELEASE_VERSION }}
|
||||
run: ./scripts/set-version.sh ${{ needs.set-build-version.outputs.version }}
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: before-make-web-static
|
||||
path: apps/electron/resources/web-static
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: before-make-electron-dist
|
||||
path: apps/electron/dist
|
||||
- name: Rebuild Electron dependences
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf apps/electron/node_modules/better-sqlite3/build
|
||||
yarn workspace @affine/electron rebuild:for-electron --arch=${{ matrix.spec.arch }}
|
||||
|
||||
- name: Build layers
|
||||
run: yarn workspace @affine/electron build-layers
|
||||
|
||||
- name: Signing By Apple Developer ID
|
||||
if: ${{ matrix.spec.platform == 'macos' }}
|
||||
if: ${{ matrix.spec.platform == 'darwin' }}
|
||||
uses: apple-actions/import-codesign-certs@v2
|
||||
with:
|
||||
p12-file-base64: ${{ secrets.CERTIFICATES_P12 }}
|
||||
p12-password: ${{ secrets.CERTIFICATES_P12_PASSWORD }}
|
||||
|
||||
- name: make
|
||||
run: yarn make-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
|
||||
working-directory: apps/electron
|
||||
run: yarn workspace @affine/electron make --platform=${{ matrix.spec.platform }} --arch=${{ matrix.spec.arch }}
|
||||
|
||||
- name: Save artifacts (mac)
|
||||
if: ${{ matrix.spec.platform == 'macos' }}
|
||||
if: ${{ matrix.spec.platform == 'darwin' }}
|
||||
run: |
|
||||
mkdir -p builds
|
||||
mv apps/electron/out/*/make/*.dmg ./builds/affine-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.dmg
|
||||
mv apps/electron/out/*/make/zip/darwin/${{ matrix.spec.arch }}/*.zip ./builds/affine-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.zip
|
||||
- name: Save artifacts (windows)
|
||||
if: ${{ matrix.spec.platform == 'windows' }}
|
||||
if: ${{ matrix.spec.platform == 'win32' }}
|
||||
run: |
|
||||
mkdir -p builds
|
||||
mv apps/electron/out/*/make/zip/win32/x64/AFFiNE*-win32-x64-*.zip ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.zip
|
||||
@@ -143,52 +175,51 @@ jobs:
|
||||
path: builds
|
||||
|
||||
release:
|
||||
needs: make-distribution
|
||||
needs:
|
||||
- make-distribution
|
||||
- set-build-version
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Download Artifacts (macos-x64)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: affine-macos-x64-builds
|
||||
name: affine-darwin-x64-builds
|
||||
path: ./
|
||||
- name: Download Artifacts (macos-arm64)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: affine-macos-arm64-builds
|
||||
name: affine-darwin-arm64-builds
|
||||
path: ./
|
||||
- name: Download Artifacts (windows-x64)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: affine-windows-x64-builds
|
||||
name: affine-win32-x64-builds
|
||||
path: ./
|
||||
- name: Download Artifacts (linux-x64)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: affine-linux-x64-builds
|
||||
path: ./
|
||||
- name: Download Artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: release-yml-build-script
|
||||
path: ./
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18
|
||||
- name: Generate Release yml
|
||||
run: |
|
||||
cp ./apps/electron/scripts/generate-yml.js .
|
||||
node generate-yml.js
|
||||
env:
|
||||
RELEASE_VERSION: ${{ env.RELEASE_VERSION }}
|
||||
RELEASE_VERSION: ${{ needs.set-build-version.outputs.version }}
|
||||
- name: Create Release Draft
|
||||
uses: softprops/action-gh-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
||||
with:
|
||||
repository: 'toeverything/AFFiNE-Releases'
|
||||
name: ${{ env.RELEASE_VERSION }}
|
||||
tag_name: ${{ env.RELEASE_VERSION }}
|
||||
name: ${{ needs.set-build-version.outputs.version }}
|
||||
tag_name: ${{ needs.set-build-version.outputs.version }}
|
||||
prerelease: true
|
||||
files: |
|
||||
./VERSION
|
||||
|
||||
64
.github/workflows/release-desktop-app.yml
vendored
64
.github/workflows/release-desktop-app.yml
vendored
@@ -36,7 +36,6 @@ concurrency:
|
||||
|
||||
env:
|
||||
BUILD_TYPE: ${{ github.event.inputs.build-type }}
|
||||
CARGO_BUILD_INCREMENTAL: 'false'
|
||||
DEBUG: napi:*
|
||||
APP_NAME: affine
|
||||
MACOSX_DEPLOYMENT_TARGET: '10.13'
|
||||
@@ -50,8 +49,7 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- name: generate-assets
|
||||
working-directory: apps/electron
|
||||
run: yarn generate-assets
|
||||
run: yarn workspace @affine/electron generate-assets
|
||||
env:
|
||||
NEXT_PUBLIC_FIREBASE_API_KEY: ${{ secrets.NEXT_PUBLIC_FIREBASE_API_KEY }}
|
||||
NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN: ${{ secrets.NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN }}
|
||||
@@ -68,6 +66,9 @@ jobs:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
API_SERVER_PROFILE: prod
|
||||
ENABLE_TEST_PROPERTIES: false
|
||||
ENABLE_IMAGE_PREVIEW_MODAL: false
|
||||
RELEASE_VERSION: ${{ github.event.inputs.version }}
|
||||
ENABLE_BOOKMARK_OPERATION: true
|
||||
|
||||
- name: Upload Artifact (web-static)
|
||||
uses: actions/upload-artifact@v3
|
||||
@@ -75,18 +76,6 @@ jobs:
|
||||
name: before-make-web-static
|
||||
path: apps/electron/resources/web-static
|
||||
|
||||
- name: Upload Artifact (electron dist)
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: before-make-electron-dist
|
||||
path: apps/electron/dist
|
||||
|
||||
- name: Upload YML Build Script
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: release-yml-build-script
|
||||
path: apps/electron/scripts/generate-yml.js
|
||||
|
||||
make-distribution:
|
||||
environment: ${{ github.ref_name == 'master' && 'production' || 'development' }}
|
||||
strategy:
|
||||
@@ -95,13 +84,13 @@ jobs:
|
||||
spec:
|
||||
- {
|
||||
os: macos-latest,
|
||||
platform: macos,
|
||||
platform: darwin,
|
||||
arch: x64,
|
||||
target: x86_64-apple-darwin,
|
||||
}
|
||||
- {
|
||||
os: macos-latest,
|
||||
platform: macos,
|
||||
platform: darwin,
|
||||
arch: arm64,
|
||||
target: aarch64-apple-darwin,
|
||||
}
|
||||
@@ -113,7 +102,7 @@ jobs:
|
||||
}
|
||||
- {
|
||||
os: windows-latest,
|
||||
platform: windows,
|
||||
platform: win32,
|
||||
arch: x64,
|
||||
target: x86_64-pc-windows-msvc,
|
||||
}
|
||||
@@ -136,30 +125,34 @@ jobs:
|
||||
with:
|
||||
name: before-make-web-static
|
||||
path: apps/electron/resources/web-static
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: before-make-electron-dist
|
||||
path: apps/electron/dist
|
||||
|
||||
- name: Rebuild Electron dependences
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf apps/electron/node_modules/better-sqlite3/build
|
||||
yarn workspace @affine/electron rebuild:for-electron --arch=${{ matrix.spec.arch }}
|
||||
|
||||
- name: Build layers
|
||||
run: yarn workspace @affine/electron build-layers
|
||||
|
||||
- name: Signing By Apple Developer ID
|
||||
if: ${{ matrix.spec.platform == 'macos' }}
|
||||
if: ${{ matrix.spec.platform == 'darwin' }}
|
||||
uses: apple-actions/import-codesign-certs@v2
|
||||
with:
|
||||
p12-file-base64: ${{ secrets.CERTIFICATES_P12 }}
|
||||
p12-password: ${{ secrets.CERTIFICATES_P12_PASSWORD }}
|
||||
|
||||
- name: make
|
||||
run: yarn make-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
|
||||
working-directory: apps/electron
|
||||
run: yarn workspace @affine/electron make --platform=${{ matrix.spec.platform }} --arch=${{ matrix.spec.arch }}
|
||||
|
||||
- name: Save artifacts (mac)
|
||||
if: ${{ matrix.spec.platform == 'macos' }}
|
||||
if: ${{ matrix.spec.platform == 'darwin' }}
|
||||
run: |
|
||||
mkdir -p builds
|
||||
mv apps/electron/out/*/make/*.dmg ./builds/affine-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.dmg
|
||||
mv apps/electron/out/*/make/zip/darwin/${{ matrix.spec.arch }}/*.zip ./builds/affine-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.zip
|
||||
- name: Save artifacts (windows)
|
||||
if: ${{ matrix.spec.platform == 'windows' }}
|
||||
if: ${{ matrix.spec.platform == 'win32' }}
|
||||
run: |
|
||||
mkdir -p builds
|
||||
mv apps/electron/out/*/make/zip/win32/x64/AFFiNE*-win32-x64-*.zip ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.zip
|
||||
@@ -184,37 +177,36 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Download Artifacts (macos-x64)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: affine-macos-x64-builds
|
||||
name: affine-darwin-x64-builds
|
||||
path: ./
|
||||
- name: Download Artifacts (macos-arm64)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: affine-macos-arm64-builds
|
||||
name: affine-darwin-arm64-builds
|
||||
path: ./
|
||||
- name: Download Artifacts (windows-x64)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: affine-windows-x64-builds
|
||||
name: affine-win32-x64-builds
|
||||
path: ./
|
||||
- name: Download Artifacts (linux-x64)
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: affine-linux-x64-builds
|
||||
path: ./
|
||||
- name: Download Artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: release-yml-build-script
|
||||
path: ./
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18
|
||||
- name: Generate Release yml
|
||||
run: |
|
||||
RELEASE_VERSION=${{ github.event.inputs.version }} node generate-yml.js
|
||||
cp ./apps/electron/scripts/generate-yml.js .
|
||||
node generate-yml.js
|
||||
env:
|
||||
RELEASE_VERSION: ${{ github.event.inputs.version }}
|
||||
- name: Create Release Draft
|
||||
uses: softprops/action-gh-release@v1
|
||||
env:
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -28,9 +28,9 @@ node_modules
|
||||
|
||||
# IDE - VSCode
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/settings.template.json
|
||||
!.vscode/launch.template.json
|
||||
!.vscode/extensions.json
|
||||
|
||||
# misc
|
||||
@@ -70,3 +70,5 @@ next-env.d.ts
|
||||
# Rust
|
||||
target
|
||||
*.node
|
||||
tsconfig.node.tsbuildinfo
|
||||
lib
|
||||
|
||||
@@ -1 +1,4 @@
|
||||
pnpm-lock.yaml
|
||||
target
|
||||
lib
|
||||
test-results
|
||||
|
||||
@@ -6,6 +6,12 @@
|
||||
"name": "Run Dev",
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
},
|
||||
{
|
||||
"command": "yarn run dev:local",
|
||||
"name": "Run Dev Locally",
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -37,5 +37,6 @@
|
||||
"apps/electron/layers/**/*.spec.ts",
|
||||
"tests/unit/**/*.spec.ts",
|
||||
"tests/unit/**/*.spec.tsx"
|
||||
]
|
||||
],
|
||||
"deepscan.enable": true
|
||||
}
|
||||
65
Cargo.lock
generated
65
Cargo.lock
generated
@@ -11,6 +11,7 @@ dependencies = [
|
||||
"napi-build",
|
||||
"napi-derive",
|
||||
"notify",
|
||||
"once_cell",
|
||||
"parking_lot",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -51,6 +52,12 @@ version = "2.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "24a6904aef64d73cf10ab17ebace7befb918b82164785cb89907993be7f83813"
|
||||
|
||||
[[package]]
|
||||
name = "bytes"
|
||||
version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be"
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
@@ -298,9 +305,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "notify"
|
||||
version = "5.1.0"
|
||||
version = "6.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "58ea850aa68a06e48fdb069c0ec44d0d64c8dbffa49bf3b6f7f0a901fdea1ba9"
|
||||
checksum = "4d9ba6c734de18ca27c8cef5cd7058aa4ac9f63596131e4c7e41e579319032a2"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"crossbeam-channel",
|
||||
@@ -312,7 +319,7 @@ dependencies = [
|
||||
"mio",
|
||||
"serde",
|
||||
"walkdir",
|
||||
"windows-sys 0.42.0",
|
||||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -498,12 +505,31 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "signal-hook-registry"
|
||||
version = "1.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
|
||||
|
||||
[[package]]
|
||||
name = "socket2"
|
||||
version = "0.4.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.109"
|
||||
@@ -533,11 +559,29 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c3c786bf8134e5a3a166db9b29ab8f48134739014a3eca7bc6bfa95d673b136f"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"bytes",
|
||||
"libc",
|
||||
"mio",
|
||||
"num_cpus",
|
||||
"parking_lot",
|
||||
"pin-project-lite",
|
||||
"signal-hook-registry",
|
||||
"socket2",
|
||||
"tokio-macros",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-macros"
|
||||
version = "2.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.15",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.8"
|
||||
@@ -608,21 +652,6 @@ version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.42.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm 0.42.2",
|
||||
"windows_aarch64_msvc 0.42.2",
|
||||
"windows_i686_gnu 0.42.2",
|
||||
"windows_i686_msvc 0.42.2",
|
||||
"windows_x86_64_gnu 0.42.2",
|
||||
"windows_x86_64_gnullvm 0.42.2",
|
||||
"windows_x86_64_msvc 0.42.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.45.0"
|
||||
|
||||
20
README.md
20
README.md
@@ -24,12 +24,13 @@ See https://github.com/all-?/all-contributors/issues/361#issuecomment-637166066
|
||||
|
||||
<!-- ALL-CONTRIBUTORS-BADGE:END -->
|
||||
|
||||
[?style=flat-square&logoColor=white&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAACXBIWXMAAADAAAAAwAEwd99eAAABjElEQVRYhe1W0U3DMBB9RfyTDeoNyAYNG2QDOgJsECYgGxA26AZ4hIxgJqCZ4PjIGV+tUxK7raqiPsmKdXe5e3fOs7IiIlwSdxetfiNw7QRKAD0Ax/ssrI5QgQOw5v03AJOTJHcCL1x84LVmWzJyJlBg7P4BwCvb3pmIAbBPykZEqaulEU7YHNva1HypxUsKqIS9EvbynASs0n3ss+ciUIsuO8VvhL9emjdFBa3YO8XvALwpsZNYSqBB0PwUWgRZNksSL5GhlN0ngGd+dkpsD6AG8IGlslxwTh2fa09EBc3Dir32rRysuQlUAL54/wTAcpePPAXHPsOTGXhSEv69rAlYpZOt6DSO29J4D/TRRLJk6AvtaZSY9PkCFYVLqI9i/NF5YkkECgrXa6P4fVEn4iolrhNxRQqBZu7FqMNdZiMqAUPj2KdGZyicu1dHzlGqBHxn2sdTR53bmeJ+ebJd7LtXhGH4uQEwd0ttAPzMxGi5/6BdxTuMej41Bs59gGP+CU+Cq/4tvxH4HwR+Ab3Uqr/VGbqEAAAAAElFTkSuQmCC>)](https://app.affine.pro)
|
||||
[?style=flat-square&logoColor=white&logo=affine>)](https://app.affine.pro)
|
||||
[](https://affine.pro/download)
|
||||
[](https://affine.pro/download)
|
||||
[](https://affine.pro/download)
|
||||
[](https://affine.pro/download)
|
||||
|
||||
[](https://github.com/toeverything/AFFiNE/releases/latest)
|
||||
[![stars-icon]](https://github.com/toeverything/AFFiNE)
|
||||
[![All Contributors][all-contributors-badge]](#contributors)
|
||||
[![codecov]](https://codecov.io/gh/toeverything/AFFiNE)
|
||||
@@ -44,7 +45,7 @@ See https://github.com/all-?/all-contributors/issues/361#issuecomment-637166066
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
<a href="http://affine.pro"><img src="https://img.shields.io/badge/-AFFiNE-06449d?style=social&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEsAAABLCAMAAAAPkIrYAAAAP1BMVEU8b9w8b9w+b947cNw7b9w6b908b909b9w8b9w7b9w8b9w7cN08b9w7b908b9w7b9w8b907cNw8b9w8b91HcEx3NJCJAAAAFXRSTlP/3QWSgA+lHPlu6Di4XtIrxk/xRADGudUoAAAB9UlEQVR42tWYwbKjIBREG0GJKkRj/v9bZ1ZvRC99rzib11tTB9qqnKoW3/+X38vy7ifzQ1b/wk/8Q1bCv3y6Z6wFh2x2llIRGB6xRhzz6p+wVhRJD1gRZZYHrADYSyqsjFPGZtYbuFESesUysZXlcMnYyJpxTW5keQh5N7G6CUJCE2uHFNfEGiBmbmB1H4jxDawNcqbuPmtAJTtj6RZ0lpIwiR5jNmgfNtHHwLXPWfFYcS2NMdxkjac/dNaNCJPo3yf9pFuseHbDrBsRFguGs8te8Q4rXzTjVSPCIHp3FePKWbzi30xE+4zlBMmoJaGLfpLUmAmLiN4Xyibahy76WZRQMLJ2WX27on2oFvQVac8yi4p+J2forA0V8W1c++AVS1f1H6p9KKLHxk9RWKmsyB+VLC76gV65DLjokdg5KmsEMXsiDwXWSmTc9ezSoKJHoi9zUVihbMHfQOSsXB7Mrz1S1huKPde69sEsiKgNt8hYTjiWlAyENeu7IFe1D15RSEBN+yCiXw17K1RZm/w7UtJVWYN8f1ZyLlkVb2bT4vIVVrINH1dqX2YttkHmIWsfVWs646wcRFYis6fIVGpfYq1kjpGSW8kSRD+xYSmXRM0Ang9eSZioVdy/5pWaLqzIRyIpuVxYozvGf1m67I7pf/s3UXv+AP61NI2Y+BbSAAAAAElFTkSuQmCC" height=25></a>
|
||||
<a href="http://affine.pro"><img src="https://img.shields.io/badge/-AFFiNE-06449d?style=social&logo=affine" height=25></a>
|
||||
|
||||
<a href="https://community.affine.pro"><img src="https://img.shields.io/badge/-Community-424549?style=social&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAAXNJREFUWEftlitLRUEURtdVEVExWUx2qxgNVouoXYtNDP4Tw20WtftAsItZrHaTYBJREZ98MAc248wcZxi4CGfSeezHmm/23kyPAa/egPPTAXQK/FsFBP7ldVDRZoqcgO9I+2bHy3ZIJBfTCPCZM1tqAxwBmzUBrNQNbEx+5b0B5oEN4NCBrAMnMaiUAuPAs3HU82TLEZwBqwGbaJ4UgKQ8CFR6SoEl4LIWwCJwZQCegKkWBWLHVKSActvdzgG3DqitDf3/VQBskBDALrDnAKXUo3ueAF5KinAf2DKOmnzD7l214bdbA6hC1XHZNQa8hSBC0hwDa57xDHDvvvWB7ciOZoE79+8CWPbsBGc769eFxJdWIKcuyIdRoG3W7AAC1dJkHDIOo8B78+4rEBo8r4AkLFk6Jk3HaeDBBTgHVmIAfpJUz+cAFXVBreQCvQYW/lqEjV1NAMUMqpAaxQMHyDnjYtuS+0BxstwaqJooFqxToFPgB5FuPCEB6XK2AAAAAElFTkSuQmCC" height=25></a>
|
||||
|
||||
@@ -69,11 +70,11 @@ See https://github.com/all-?/all-contributors/issues/361#issuecomment-637166066
|
||||
|
||||
Before we tell you how to get started with AFFiNE, we'd like to shamelessly plug our awesome user and developer communities across [official social platforms](https://community.affine.pro/c/start-here/)! Once you’re familiar with using the software, maybe you will share your wisdom with others and even consider joining the [AFFiNE Ambassador program](https://community.affine.pro/c/start-here/affine-ambassador) to help spread AFFiNE to the world.
|
||||
|
||||
## Getting started & Stay tunned with us.
|
||||
## Getting started & staying tuned with us.
|
||||
|
||||
⚠️ Please note that AFFiNE is still under active development and is not yet ready for production use. ⚠️
|
||||
|
||||
[](https://app.affine.pro) No installation or registration required! Head over to our website and try it out now.
|
||||
[](https://app.affine.pro) No installation or registration required! Head over to our website and try it out now.
|
||||
|
||||
[](https://community.affine.pro) Our wonderful community, where you can meet and engage with the team, developers and other like-minded enthusiastic user of AFFiNE.
|
||||
|
||||
@@ -119,6 +120,15 @@ If you have questions, you are welcome to contact us. One of the best places to
|
||||
| [@toeverything/y-indexeddb](packages/y-indexeddb) | IndexedDB database adapter for Yjs | [](https://www.npmjs.com/package/@toeverything/y-indexeddb) |
|
||||
| [@toeverything/theme](packages/theme) | AFFiNE theme | [](https://www.npmjs.com/package/@toeverything/theme) |
|
||||
|
||||
## Plugins
|
||||
|
||||
> Plugins are a way to extend the functionality of AFFiNE.
|
||||
|
||||
| Name | |
|
||||
| ------------------------------------------------ | ----------------------------------------- |
|
||||
| [@affine/bookmark-block](plugins/bookmark-block) | A block for bookmarking a website |
|
||||
| [@affine/copilot](plugins/copilot) | AI Copilot that help you document writing |
|
||||
|
||||
## Thanks
|
||||
|
||||
We would also like to give thanks to open-source projects that make AFFiNE possible:
|
||||
@@ -140,7 +150,7 @@ Thanks a lot to the community for providing such powerful and simple libraries,
|
||||
We would like to express our gratitude to all the individuals who have already contributed to AFFiNE! If you have any AFFiNE-related project, documentation, tool or template, please feel free to contribute it by submitting a pull request to our curated list on GitHub: [awesome-affine](https://github.com/toeverything/awesome-affine).
|
||||
|
||||
<a href="https://github.com/toeverything/affine/graphs/contributors">
|
||||
<img src="https://user-images.githubusercontent.com/5910926/237263745-36bb975d-83d6-4a7c-a152-d9ad020e5023.png" />
|
||||
<img src="https://user-images.githubusercontent.com/5910926/240508358-93eddded-48a0-40cd-85e4-a1d172dbe1d9.svg" />
|
||||
</a>
|
||||
|
||||
## Self-Host
|
||||
|
||||
@@ -7,6 +7,7 @@ To run AFFiNE Desktop Client Application locally, run the following commands:
|
||||
```sh
|
||||
# in repo root
|
||||
yarn install
|
||||
yarn workspace @affine/native build
|
||||
yarn dev
|
||||
|
||||
# in apps/electron
|
||||
|
||||
@@ -94,7 +94,7 @@ module.exports = {
|
||||
config: {
|
||||
name: 'AFFiNE',
|
||||
setupIcon: icoPath,
|
||||
// loadingGif: './resources/icons/loading.gif',
|
||||
loadingGif: './resources/icons/affine_installing.gif',
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/consistent-type-imports */
|
||||
// This file contains the main process events
|
||||
// It will guide preload and main process on the correct event types and payloads
|
||||
|
||||
export type MainIPCHandlerMap = typeof import('./main/src/exposed').handlers;
|
||||
|
||||
export type MainIPCEventMap = typeof import('./main/src/exposed').events;
|
||||
@@ -2,10 +2,11 @@ import assert from 'node:assert';
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { MainIPCHandlerMap } from '../../../../constraints';
|
||||
import type { MainIPCHandlerMap } from '../exposed';
|
||||
|
||||
const registeredHandlers = new Map<
|
||||
string,
|
||||
@@ -40,6 +41,7 @@ ReturnType<MainIPCHandlerMap[T][F]> {
|
||||
}
|
||||
|
||||
const SESSION_DATA_PATH = path.join(__dirname, './tmp', 'affine-test');
|
||||
const DOCUMENTS_PATH = path.join(__dirname, './tmp', 'affine-test-documents');
|
||||
|
||||
const browserWindow = {
|
||||
isDestroyed: () => {
|
||||
@@ -90,8 +92,12 @@ function compareBuffer(a: Uint8Array | null, b: Uint8Array | null) {
|
||||
const electronModule = {
|
||||
app: {
|
||||
getPath: (name: string) => {
|
||||
assert(name === 'sessionData');
|
||||
return SESSION_DATA_PATH;
|
||||
if (name === 'sessionData') {
|
||||
return SESSION_DATA_PATH;
|
||||
} else if (name === 'documents') {
|
||||
return DOCUMENTS_PATH;
|
||||
}
|
||||
throw new Error('not implemented');
|
||||
},
|
||||
name: 'affine-test',
|
||||
on: (name: string, callback: (...args: any[]) => any) => {
|
||||
@@ -99,6 +105,11 @@ const electronModule = {
|
||||
handlers.push(callback);
|
||||
registeredHandlers.set(name, handlers);
|
||||
},
|
||||
addEventListener: (...args: any[]) => {
|
||||
// @ts-ignore
|
||||
electronModule.app.on(...args);
|
||||
},
|
||||
removeEventListener: () => {},
|
||||
},
|
||||
BrowserWindow: {
|
||||
getAllWindows: () => {
|
||||
@@ -117,26 +128,28 @@ vi.doMock('electron', () => {
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
const { registerHandlers } = await import('../register');
|
||||
const { registerHandlers } = await import('../handlers');
|
||||
registerHandlers();
|
||||
|
||||
// should also register events
|
||||
const { registerEvents } = await import('../../events');
|
||||
const { registerEvents } = await import('../events');
|
||||
registerEvents();
|
||||
await fs.mkdirp(SESSION_DATA_PATH);
|
||||
await import('../db/ensure-db');
|
||||
|
||||
registeredHandlers.get('ready')?.forEach(fn => fn());
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
const { cleanupSQLiteDBs } = await import('../db/ensure-db');
|
||||
await cleanupSQLiteDBs();
|
||||
await fs.remove(SESSION_DATA_PATH);
|
||||
|
||||
// reset registered handlers
|
||||
registeredHandlers.get('before-quit')?.forEach(fn => fn());
|
||||
|
||||
await fs.remove(SESSION_DATA_PATH);
|
||||
});
|
||||
|
||||
describe('ensureSQLiteDB', () => {
|
||||
test('should create db file on connection if it does not exist', async () => {
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const workspaceDB = await ensureSQLiteDB(id);
|
||||
const file = workspaceDB.path;
|
||||
@@ -144,72 +157,47 @@ describe('ensureSQLiteDB', () => {
|
||||
expect(fileExists).toBe(true);
|
||||
});
|
||||
|
||||
test('when db file is removed', async () => {
|
||||
// stub webContents.send
|
||||
const sendStub = vi.fn();
|
||||
browserWindow.webContents.send = sendStub;
|
||||
const id = 'test-workspace-id';
|
||||
test('should emit the same db instance for the same id', async () => {
|
||||
const [id1, id2] = [v4(), v4()];
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
let workspaceDB = await ensureSQLiteDB(id);
|
||||
const file = workspaceDB.path;
|
||||
const fileExists = await fs.pathExists(file);
|
||||
expect(fileExists).toBe(true);
|
||||
|
||||
await fs.remove(file);
|
||||
|
||||
// wait for 1000ms for file watcher to detect file removal
|
||||
await delay(2000);
|
||||
|
||||
expect(sendStub).toBeCalledWith('db:onDBFileMissing', id);
|
||||
|
||||
// ensureSQLiteDB should recreate the db file
|
||||
workspaceDB = await ensureSQLiteDB(id);
|
||||
const fileExists2 = await fs.pathExists(file);
|
||||
expect(fileExists2).toBe(true);
|
||||
const workspaceDB1 = await ensureSQLiteDB(id1);
|
||||
const workspaceDB2 = await ensureSQLiteDB(id2);
|
||||
const workspaceDB3 = await ensureSQLiteDB(id1);
|
||||
expect(workspaceDB1).toBe(workspaceDB3);
|
||||
expect(workspaceDB1).not.toBe(workspaceDB2);
|
||||
});
|
||||
|
||||
test('when db file is updated', async () => {
|
||||
// stub webContents.send
|
||||
const sendStub = vi.fn();
|
||||
browserWindow.webContents.send = sendStub;
|
||||
const id = 'test-workspace-id';
|
||||
test('when app quit, db should be closed', async () => {
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const workspaceDB = await ensureSQLiteDB(id);
|
||||
const file = workspaceDB.path;
|
||||
const fileExists = await fs.pathExists(file);
|
||||
expect(fileExists).toBe(true);
|
||||
|
||||
// wait to make sure
|
||||
await delay(500);
|
||||
|
||||
// writes some data to the db file
|
||||
await fs.appendFile(file, 'random-data', { encoding: 'binary' });
|
||||
// write again
|
||||
await fs.appendFile(file, 'random-data', { encoding: 'binary' });
|
||||
|
||||
// wait for 200ms for file watcher to detect file change
|
||||
await delay(2000);
|
||||
|
||||
expect(sendStub).toBeCalledWith('db:onDBFileUpdate', id);
|
||||
registeredHandlers.get('before-quit')?.forEach(fn => fn());
|
||||
await delay(100);
|
||||
expect(workspaceDB.db).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
describe('workspace handlers', () => {
|
||||
test('list all workspace ids', async () => {
|
||||
const ids = ['test-workspace-id', 'test-workspace-id-2'];
|
||||
const ids = [v4(), v4()];
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await Promise.all(ids.map(id => ensureSQLiteDB(id)));
|
||||
const list = await dispatch('workspace', 'list');
|
||||
expect(list.map(([id]) => id)).toEqual(ids);
|
||||
expect(list.map(([id]) => id).sort()).toEqual(ids.sort());
|
||||
});
|
||||
|
||||
test('delete workspace', async () => {
|
||||
const ids = ['test-workspace-id', 'test-workspace-id-2'];
|
||||
const ids = [v4(), v4()];
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await Promise.all(ids.map(id => ensureSQLiteDB(id)));
|
||||
await dispatch('workspace', 'delete', 'test-workspace-id-2');
|
||||
const dbs = await Promise.all(ids.map(id => ensureSQLiteDB(id)));
|
||||
await dispatch('workspace', 'delete', ids[1]);
|
||||
const list = await dispatch('workspace', 'list');
|
||||
expect(list.map(([id]) => id)).toEqual(['test-workspace-id']);
|
||||
expect(list.map(([id]) => id)).toEqual([ids[0]]);
|
||||
// deleted db should be closed
|
||||
expect(dbs[1].db).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -244,7 +232,7 @@ describe('UI handlers', () => {
|
||||
|
||||
describe('db handlers', () => {
|
||||
test('apply doc and get doc updates', async () => {
|
||||
const workspaceId = 'test-workspace-id';
|
||||
const workspaceId = v4();
|
||||
const bin = await dispatch('db', 'getDocAsUpdates', workspaceId);
|
||||
// ? is this a good test?
|
||||
expect(bin.every((byte: number) => byte === 0)).toBe(true);
|
||||
@@ -264,14 +252,14 @@ describe('db handlers', () => {
|
||||
});
|
||||
|
||||
test('get non existent blob', async () => {
|
||||
const workspaceId = 'test-workspace-id';
|
||||
const workspaceId = v4();
|
||||
const bin = await dispatch('db', 'getBlob', workspaceId, 'non-existent-id');
|
||||
expect(bin).toBeNull();
|
||||
});
|
||||
|
||||
test('list blobs (empty)', async () => {
|
||||
const workspaceId = 'test-workspace-id';
|
||||
const list = await dispatch('db', 'getPersistedBlobs', workspaceId);
|
||||
const workspaceId = v4();
|
||||
const list = await dispatch('db', 'getBlobKeys', workspaceId);
|
||||
expect(list).toEqual([]);
|
||||
});
|
||||
|
||||
@@ -301,14 +289,14 @@ describe('db handlers', () => {
|
||||
).toBe(true);
|
||||
|
||||
// list blobs
|
||||
let lists = await dispatch('db', 'getPersistedBlobs', workspaceId);
|
||||
let lists = await dispatch('db', 'getBlobKeys', workspaceId);
|
||||
expect(lists).toHaveLength(2);
|
||||
expect(lists).toContain('testBin');
|
||||
expect(lists).toContain('testBin2');
|
||||
|
||||
// delete blob
|
||||
await dispatch('db', 'deleteBlob', workspaceId, 'testBin');
|
||||
lists = await dispatch('db', 'getPersistedBlobs', workspaceId);
|
||||
lists = await dispatch('db', 'getBlobKeys', workspaceId);
|
||||
expect(lists).toEqual(['testBin2']);
|
||||
});
|
||||
});
|
||||
@@ -318,7 +306,7 @@ describe('dialog handlers', () => {
|
||||
const mockShowItemInFolder = vi.fn();
|
||||
electronModule.shell.showItemInFolder = mockShowItemInFolder;
|
||||
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const db = await ensureSQLiteDB(id);
|
||||
|
||||
@@ -334,13 +322,15 @@ describe('dialog handlers', () => {
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
electronModule.shell.showItemInFolder = mockShowItemInFolder;
|
||||
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
|
||||
await dispatch('dialog', 'saveDBFileAs', id);
|
||||
expect(mockShowSaveDialog).toBeCalled();
|
||||
expect(mockShowItemInFolder).not.toBeCalled();
|
||||
electronModule.dialog = {};
|
||||
electronModule.shell = {};
|
||||
});
|
||||
|
||||
test('saveDBFileAs', async () => {
|
||||
@@ -352,7 +342,7 @@ describe('dialog handlers', () => {
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
electronModule.shell.showItemInFolder = mockShowItemInFolder;
|
||||
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
|
||||
@@ -388,10 +378,10 @@ describe('dialog handlers', () => {
|
||||
expect(res.error).toBe('DB_FILE_PATH_INVALID');
|
||||
});
|
||||
|
||||
test('loadDBFile (error, not a valid db file)', async () => {
|
||||
test('loadDBFile (error, not a valid affine file)', async () => {
|
||||
// create a random db file
|
||||
const basePath = path.join(SESSION_DATA_PATH, 'random-path');
|
||||
const dbPath = path.join(basePath, 'xxx.db');
|
||||
const dbPath = path.join(basePath, 'xxx.affine');
|
||||
await fs.ensureDir(basePath);
|
||||
await fs.writeFile(dbPath, 'hello world');
|
||||
|
||||
@@ -403,70 +393,102 @@ describe('dialog handlers', () => {
|
||||
const res = await dispatch('dialog', 'loadDBFile');
|
||||
expect(mockShowOpenDialog).toBeCalled();
|
||||
expect(res.error).toBe('DB_FILE_INVALID');
|
||||
|
||||
electronModule.dialog = {};
|
||||
});
|
||||
|
||||
test('loadDBFile', async () => {
|
||||
test('loadDBFile (correct)', async () => {
|
||||
// we use ensureSQLiteDB to create a valid db file
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const db = await ensureSQLiteDB(id);
|
||||
|
||||
// copy db file to dbPath
|
||||
const basePath = path.join(SESSION_DATA_PATH, 'random-path');
|
||||
const originDBFilePath = path.join(basePath, 'xxx.db');
|
||||
const clonedDBPath = path.join(basePath, 'xxx.affine');
|
||||
await fs.ensureDir(basePath);
|
||||
await fs.copyFile(db.path, originDBFilePath);
|
||||
await fs.copyFile(db.path, clonedDBPath);
|
||||
|
||||
// remove db
|
||||
await fs.remove(db.path);
|
||||
// delete workspace
|
||||
await dispatch('workspace', 'delete', id);
|
||||
|
||||
// try load originDBFilePath
|
||||
const mockShowOpenDialog = vi.fn(() => {
|
||||
return { filePaths: [originDBFilePath] };
|
||||
return { filePaths: [clonedDBPath] };
|
||||
}) as any;
|
||||
electronModule.dialog.showOpenDialog = mockShowOpenDialog;
|
||||
|
||||
const res = await dispatch('dialog', 'loadDBFile');
|
||||
expect(mockShowOpenDialog).toBeCalled();
|
||||
expect(res.workspaceId).not.toBeUndefined();
|
||||
const newId = res.workspaceId;
|
||||
|
||||
const importedDb = await ensureSQLiteDB(res.workspaceId!);
|
||||
expect(await fs.realpath(importedDb.path)).toBe(originDBFilePath);
|
||||
expect(importedDb.path).not.toBe(originDBFilePath);
|
||||
expect(newId).not.toBeUndefined();
|
||||
|
||||
assert(newId);
|
||||
|
||||
const meta = await dispatch('workspace', 'getMeta', newId);
|
||||
|
||||
expect(meta.secondaryDBPath).toBe(clonedDBPath);
|
||||
|
||||
// try load it again, will trigger error (db file already loaded)
|
||||
const res2 = await dispatch('dialog', 'loadDBFile');
|
||||
expect(res2.error).toBe('DB_FILE_ALREADY_LOADED');
|
||||
});
|
||||
|
||||
test('moveDBFile', async () => {
|
||||
const newPath = path.join(SESSION_DATA_PATH, 'affine-test', 'xxx');
|
||||
const mockShowSaveDialog = vi.fn(() => {
|
||||
return { filePath: newPath };
|
||||
test('moveDBFile (valid)', async () => {
|
||||
const sendStub = vi.fn();
|
||||
browserWindow.webContents.send = sendStub;
|
||||
const newPath = path.join(SESSION_DATA_PATH, 'xxx');
|
||||
const showOpenDialog = vi.fn(() => {
|
||||
return { filePaths: [newPath] };
|
||||
}) as any;
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
electronModule.dialog.showOpenDialog = showOpenDialog;
|
||||
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
|
||||
const db = await ensureSQLiteDB(id);
|
||||
const res = await dispatch('dialog', 'moveDBFile', id);
|
||||
expect(mockShowSaveDialog).toBeCalled();
|
||||
expect(res.filePath).toBe(newPath);
|
||||
expect(showOpenDialog).toBeCalled();
|
||||
assert(res.filePath);
|
||||
expect(path.dirname(res.filePath)).toBe(newPath);
|
||||
expect(res.filePath.endsWith('.affine')).toBe(true);
|
||||
// should also send workspace meta change event
|
||||
expect(sendStub).toBeCalledWith('workspace:onMetaChange', {
|
||||
workspaceId: id,
|
||||
meta: { id, secondaryDBPath: res.filePath, mainDBPath: db.path },
|
||||
});
|
||||
electronModule.dialog = {};
|
||||
browserWindow.webContents.send = () => {};
|
||||
});
|
||||
|
||||
test('moveDBFile (skipped)', async () => {
|
||||
const mockShowSaveDialog = vi.fn(() => {
|
||||
return { filePath: null };
|
||||
test('moveDBFile (canceled)', async () => {
|
||||
const showOpenDialog = vi.fn(() => {
|
||||
return { filePaths: null };
|
||||
}) as any;
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
electronModule.dialog.showOpenDialog = showOpenDialog;
|
||||
|
||||
const id = 'test-workspace-id';
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
|
||||
const res = await dispatch('dialog', 'moveDBFile', id);
|
||||
expect(mockShowSaveDialog).toBeCalled();
|
||||
expect(showOpenDialog).toBeCalled();
|
||||
expect(res.filePath).toBe(undefined);
|
||||
electronModule.dialog = {};
|
||||
});
|
||||
});
|
||||
|
||||
describe('applicationMenu', () => {
|
||||
// test some basic IPC events
|
||||
test('applicationMenu event', async () => {
|
||||
const { applicationMenuSubjects } = await import('../application-menu');
|
||||
const sendStub = vi.fn();
|
||||
browserWindow.webContents.send = sendStub;
|
||||
applicationMenuSubjects.newPageAction.next();
|
||||
expect(sendStub).toHaveBeenCalledWith(
|
||||
'applicationMenu:onNewPageAction',
|
||||
undefined
|
||||
);
|
||||
browserWindow.webContents.send = () => {};
|
||||
});
|
||||
});
|
||||
@@ -1,7 +1,9 @@
|
||||
import { app, Menu } from 'electron';
|
||||
|
||||
import { isMacOS } from '../../utils';
|
||||
import { subjects } from './events';
|
||||
import { revealLogFile } from '../logger';
|
||||
import { checkForUpdatesAndNotify } from '../updater';
|
||||
import { isMacOS } from '../utils';
|
||||
import { applicationMenuSubjects } from './subject';
|
||||
|
||||
// Unique id for menuitems
|
||||
const MENUITEM_NEW_PAGE = 'affine:new-page';
|
||||
@@ -41,7 +43,7 @@ export function createApplicationMenu() {
|
||||
label: 'New Page',
|
||||
accelerator: isMac ? 'Cmd+N' : 'Ctrl+N',
|
||||
click: () => {
|
||||
subjects.applicationMenu.newPageAction.next();
|
||||
applicationMenuSubjects.newPageAction.next();
|
||||
},
|
||||
},
|
||||
{ type: 'separator' },
|
||||
@@ -114,6 +116,18 @@ export function createApplicationMenu() {
|
||||
await shell.openExternal('https://affine.pro/');
|
||||
},
|
||||
},
|
||||
{
|
||||
label: 'Open log file',
|
||||
click: async () => {
|
||||
revealLogFile();
|
||||
},
|
||||
},
|
||||
{
|
||||
label: 'Check for Updates',
|
||||
click: async () => {
|
||||
await checkForUpdatesAndNotify(true);
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
@@ -1,10 +1,8 @@
|
||||
import { Subject } from 'rxjs';
|
||||
import type { MainEventListener } from '../type';
|
||||
import { applicationMenuSubjects } from './subject';
|
||||
|
||||
import type { MainEventListener } from './type';
|
||||
|
||||
export const applicationMenuSubjects = {
|
||||
newPageAction: new Subject<void>(),
|
||||
};
|
||||
export * from './create';
|
||||
export * from './subject';
|
||||
|
||||
/**
|
||||
* Events triggered by application menu
|
||||
@@ -0,0 +1,5 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
export const applicationMenuSubjects = {
|
||||
newPageAction: new Subject<void>(),
|
||||
};
|
||||
1
apps/electron/layers/main/src/db/__tests__/.gitignore
vendored
Normal file
1
apps/electron/layers/main/src/db/__tests__/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
tmp
|
||||
147
apps/electron/layers/main/src/db/__tests__/ensure-db.spec.ts
Normal file
147
apps/electron/layers/main/src/db/__tests__/ensure-db.spec.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, beforeEach, expect, test, vi } from 'vitest';
|
||||
|
||||
const tmpDir = path.join(__dirname, 'tmp');
|
||||
|
||||
const registeredHandlers = new Map<
|
||||
string,
|
||||
((...args: any[]) => Promise<any>)[]
|
||||
>();
|
||||
|
||||
const SESSION_DATA_PATH = path.join(tmpDir, 'affine-test');
|
||||
const DOCUMENTS_PATH = path.join(tmpDir, 'affine-test-documents');
|
||||
|
||||
const electronModule = {
|
||||
app: {
|
||||
getPath: (name: string) => {
|
||||
if (name === 'sessionData') {
|
||||
return SESSION_DATA_PATH;
|
||||
} else if (name === 'documents') {
|
||||
return DOCUMENTS_PATH;
|
||||
}
|
||||
throw new Error('not implemented');
|
||||
},
|
||||
name: 'affine-test',
|
||||
on: (name: string, callback: (...args: any[]) => any) => {
|
||||
const handlers = registeredHandlers.get(name) || [];
|
||||
handlers.push(callback);
|
||||
registeredHandlers.set(name, handlers);
|
||||
},
|
||||
addEventListener: (...args: any[]) => {
|
||||
// @ts-ignore
|
||||
electronModule.app.on(...args);
|
||||
},
|
||||
removeEventListener: () => {},
|
||||
},
|
||||
shell: {} as Partial<Electron.Shell>,
|
||||
dialog: {} as Partial<Electron.Dialog>,
|
||||
};
|
||||
|
||||
const runHandler = (key: string) => {
|
||||
registeredHandlers.get(key)?.forEach(handler => handler());
|
||||
};
|
||||
|
||||
// dynamically import handlers so that we can inject local variables to mocks
|
||||
vi.doMock('electron', () => {
|
||||
return electronModule;
|
||||
});
|
||||
|
||||
const constructorStub = vi.fn();
|
||||
const destroyStub = vi.fn();
|
||||
|
||||
vi.doMock('../secondary-db', () => {
|
||||
return {
|
||||
SecondaryWorkspaceSQLiteDB: class {
|
||||
constructor(...args: any[]) {
|
||||
constructorStub(...args);
|
||||
}
|
||||
|
||||
destroy = destroyStub;
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers({ shouldAdvanceTime: true });
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
runHandler('before-quit');
|
||||
await fs.remove(tmpDir);
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
test('can get a valid WorkspaceSQLiteDB', async () => {
|
||||
const { ensureSQLiteDB } = await import('../ensure-db');
|
||||
const workspaceId = v4();
|
||||
const db0 = await ensureSQLiteDB(workspaceId);
|
||||
expect(db0).toBeDefined();
|
||||
expect(db0.workspaceId).toBe(workspaceId);
|
||||
|
||||
const db1 = await ensureSQLiteDB(v4());
|
||||
expect(db1).not.toBe(db0);
|
||||
expect(db1.workspaceId).not.toBe(db0.workspaceId);
|
||||
|
||||
// ensure that the db is cached
|
||||
expect(await ensureSQLiteDB(workspaceId)).toBe(db0);
|
||||
});
|
||||
|
||||
test('db should be destroyed when app quits', async () => {
|
||||
const { ensureSQLiteDB } = await import('../ensure-db');
|
||||
const workspaceId = v4();
|
||||
const db0 = await ensureSQLiteDB(workspaceId);
|
||||
const db1 = await ensureSQLiteDB(v4());
|
||||
|
||||
expect(db0.db).not.toBeNull();
|
||||
expect(db1.db).not.toBeNull();
|
||||
|
||||
runHandler('before-quit');
|
||||
|
||||
expect(db0.db).toBeNull();
|
||||
expect(db1.db).toBeNull();
|
||||
});
|
||||
|
||||
test('if db has a secondary db path, we should also poll that', async () => {
|
||||
const { ensureSQLiteDB } = await import('../ensure-db');
|
||||
const { appContext } = await import('../../context');
|
||||
const { storeWorkspaceMeta } = await import('../../workspace');
|
||||
const workspaceId = v4();
|
||||
await storeWorkspaceMeta(appContext, workspaceId, {
|
||||
secondaryDBPath: path.join(tmpDir, 'secondary.db'),
|
||||
});
|
||||
|
||||
const db = await ensureSQLiteDB(workspaceId);
|
||||
|
||||
await vi.advanceTimersByTimeAsync(1500);
|
||||
|
||||
// not sure why but we still need to wait with real timer
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
expect(constructorStub).toBeCalledTimes(1);
|
||||
expect(constructorStub).toBeCalledWith(path.join(tmpDir, 'secondary.db'), db);
|
||||
|
||||
// if secondary meta is changed
|
||||
await storeWorkspaceMeta(appContext, workspaceId, {
|
||||
secondaryDBPath: path.join(tmpDir, 'secondary2.db'),
|
||||
});
|
||||
|
||||
await vi.advanceTimersByTimeAsync(1500);
|
||||
expect(constructorStub).toBeCalledTimes(2);
|
||||
expect(destroyStub).toBeCalledTimes(1);
|
||||
|
||||
// if secondary meta is changed (but another workspace)
|
||||
await storeWorkspaceMeta(appContext, v4(), {
|
||||
secondaryDBPath: path.join(tmpDir, 'secondary3.db'),
|
||||
});
|
||||
await vi.advanceTimersByTimeAsync(1500);
|
||||
expect(constructorStub).toBeCalledTimes(2);
|
||||
expect(destroyStub).toBeCalledTimes(1);
|
||||
|
||||
// if primary is destroyed, secondary should also be destroyed
|
||||
db.destroy();
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
expect(destroyStub).toBeCalledTimes(2);
|
||||
});
|
||||
@@ -0,0 +1,101 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, expect, test, vi } from 'vitest';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { AppContext } from '../../context';
|
||||
import { dbSubjects } from '../subjects';
|
||||
|
||||
const tmpDir = path.join(__dirname, 'tmp');
|
||||
|
||||
const testAppContext: AppContext = {
|
||||
appDataPath: path.join(tmpDir, 'test-data'),
|
||||
appName: 'test',
|
||||
};
|
||||
|
||||
afterEach(async () => {
|
||||
if (process.platform !== 'win32') {
|
||||
// hmmm ....
|
||||
await fs.remove(tmpDir);
|
||||
}
|
||||
});
|
||||
|
||||
function getTestUpdates() {
|
||||
const testYDoc = new Y.Doc();
|
||||
const yText = testYDoc.getText('test');
|
||||
yText.insert(0, 'hello');
|
||||
const updates = Y.encodeStateAsUpdate(testYDoc);
|
||||
|
||||
return updates;
|
||||
}
|
||||
test('can create new db file if not exists', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const db = await openWorkspaceDatabase(testAppContext, workspaceId);
|
||||
const dbPath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
`workspaces/${workspaceId}`,
|
||||
`storage.db`
|
||||
);
|
||||
expect(await fs.exists(dbPath)).toBe(true);
|
||||
db.destroy();
|
||||
});
|
||||
|
||||
test('on applyUpdate (from self), will not trigger update', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const onUpdate = vi.fn();
|
||||
|
||||
const db = await openWorkspaceDatabase(testAppContext, workspaceId);
|
||||
db.update$.subscribe(onUpdate);
|
||||
db.applyUpdate(getTestUpdates(), 'self');
|
||||
expect(onUpdate).not.toHaveBeenCalled();
|
||||
db.destroy();
|
||||
});
|
||||
|
||||
test('on applyUpdate (from renderer), will trigger update', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const onUpdate = vi.fn();
|
||||
const onExternalUpdate = vi.fn();
|
||||
|
||||
const db = await openWorkspaceDatabase(testAppContext, workspaceId);
|
||||
db.update$.subscribe(onUpdate);
|
||||
const sub = dbSubjects.externalUpdate.subscribe(onExternalUpdate);
|
||||
db.applyUpdate(getTestUpdates(), 'renderer');
|
||||
expect(onUpdate).toHaveBeenCalled(); // not yet updated
|
||||
sub.unsubscribe();
|
||||
db.destroy();
|
||||
});
|
||||
|
||||
test('on applyUpdate (from external), will trigger update & send external update event', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const onUpdate = vi.fn();
|
||||
const onExternalUpdate = vi.fn();
|
||||
|
||||
const db = await openWorkspaceDatabase(testAppContext, workspaceId);
|
||||
db.update$.subscribe(onUpdate);
|
||||
const sub = dbSubjects.externalUpdate.subscribe(onExternalUpdate);
|
||||
db.applyUpdate(getTestUpdates(), 'external');
|
||||
expect(onUpdate).toHaveBeenCalled();
|
||||
expect(onExternalUpdate).toHaveBeenCalled();
|
||||
sub.unsubscribe();
|
||||
db.destroy();
|
||||
});
|
||||
|
||||
test('on destroy, check if resources have been released', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const db = await openWorkspaceDatabase(testAppContext, workspaceId);
|
||||
const updateSub = {
|
||||
complete: vi.fn(),
|
||||
next: vi.fn(),
|
||||
};
|
||||
db.update$ = updateSub as any;
|
||||
db.destroy();
|
||||
expect(db.db).toBe(null);
|
||||
expect(updateSub.complete).toHaveBeenCalled();
|
||||
});
|
||||
152
apps/electron/layers/main/src/db/base-db-adapter.ts
Normal file
152
apps/electron/layers/main/src/db/base-db-adapter.ts
Normal file
@@ -0,0 +1,152 @@
|
||||
import assert from 'assert';
|
||||
import type { Database } from 'better-sqlite3';
|
||||
import sqlite from 'better-sqlite3';
|
||||
|
||||
import { logger } from '../logger';
|
||||
|
||||
const schemas = [
|
||||
`CREATE TABLE IF NOT EXISTS "updates" (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
data BLOB NOT NULL,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS "blobs" (
|
||||
key TEXT PRIMARY KEY NOT NULL,
|
||||
data BLOB NOT NULL,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
)`,
|
||||
];
|
||||
|
||||
interface UpdateRow {
|
||||
id: number;
|
||||
data: Buffer;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
interface BlobRow {
|
||||
key: string;
|
||||
data: Buffer;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* A base class for SQLite DB adapter that provides basic methods around updates & blobs
|
||||
*/
|
||||
export abstract class BaseSQLiteAdapter {
|
||||
db: Database | null = null;
|
||||
abstract role: string;
|
||||
|
||||
constructor(public path: string) {}
|
||||
|
||||
ensureTables() {
|
||||
assert(this.db, 'db is not connected');
|
||||
this.db.exec(schemas.join(';'));
|
||||
}
|
||||
|
||||
// todo: what if SQLite DB wrapper later is not sync?
|
||||
connect(): Database | undefined {
|
||||
if (this.db) {
|
||||
return this.db;
|
||||
}
|
||||
logger.log(`[SQLiteAdapter][${this.role}] open db`, this.path);
|
||||
const db = (this.db = sqlite(this.path));
|
||||
this.ensureTables();
|
||||
return db;
|
||||
}
|
||||
|
||||
destroy() {
|
||||
this.db?.close();
|
||||
this.db = null;
|
||||
}
|
||||
|
||||
addBlob(key: string, data: Uint8Array) {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare(
|
||||
'INSERT INTO blobs (key, data) VALUES (?, ?) ON CONFLICT(key) DO UPDATE SET data = ?'
|
||||
);
|
||||
statement.run(key, data, data);
|
||||
return key;
|
||||
} catch (error) {
|
||||
logger.error('addBlob', error);
|
||||
}
|
||||
}
|
||||
|
||||
getBlob(key: string) {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare('SELECT data FROM blobs WHERE key = ?');
|
||||
const row = statement.get(key) as BlobRow;
|
||||
if (!row) {
|
||||
return null;
|
||||
}
|
||||
return row.data;
|
||||
} catch (error) {
|
||||
logger.error('getBlob', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
deleteBlob(key: string) {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare('DELETE FROM blobs WHERE key = ?');
|
||||
statement.run(key);
|
||||
} catch (error) {
|
||||
logger.error('deleteBlob', error);
|
||||
}
|
||||
}
|
||||
|
||||
getBlobKeys() {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare('SELECT key FROM blobs');
|
||||
const rows = statement.all() as BlobRow[];
|
||||
return rows.map(row => row.key);
|
||||
} catch (error) {
|
||||
logger.error('getBlobKeys', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
getUpdates() {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare('SELECT * FROM updates');
|
||||
const rows = statement.all() as UpdateRow[];
|
||||
return rows;
|
||||
} catch (error) {
|
||||
logger.error('getUpdates', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// add a single update to SQLite
|
||||
addUpdateToSQLite(updates: Uint8Array[]) {
|
||||
// batch write instead write per key stroke?
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const start = performance.now();
|
||||
const statement = this.db.prepare(
|
||||
'INSERT INTO updates (data) VALUES (?)'
|
||||
);
|
||||
const insertMany = this.db.transaction(updates => {
|
||||
for (const d of updates) {
|
||||
statement.run(d);
|
||||
}
|
||||
});
|
||||
|
||||
insertMany(updates);
|
||||
|
||||
logger.debug(
|
||||
`[SQLiteAdapter][${this.role}] addUpdateToSQLite`,
|
||||
'length:',
|
||||
updates.length,
|
||||
performance.now() - start,
|
||||
'ms'
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('addUpdateToSQLite', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
110
apps/electron/layers/main/src/db/ensure-db.ts
Normal file
110
apps/electron/layers/main/src/db/ensure-db.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import { app } from 'electron';
|
||||
import {
|
||||
defer,
|
||||
firstValueFrom,
|
||||
from,
|
||||
fromEvent,
|
||||
interval,
|
||||
merge,
|
||||
Observable,
|
||||
} from 'rxjs';
|
||||
import {
|
||||
distinctUntilChanged,
|
||||
filter,
|
||||
ignoreElements,
|
||||
last,
|
||||
map,
|
||||
shareReplay,
|
||||
startWith,
|
||||
switchMap,
|
||||
takeUntil,
|
||||
tap,
|
||||
} from 'rxjs/operators';
|
||||
|
||||
import { appContext } from '../context';
|
||||
import { logger } from '../logger';
|
||||
import { getWorkspaceMeta$ } from '../workspace';
|
||||
import { SecondaryWorkspaceSQLiteDB } from './secondary-db';
|
||||
import type { WorkspaceSQLiteDB } from './workspace-db-adapter';
|
||||
import { openWorkspaceDatabase } from './workspace-db-adapter';
|
||||
|
||||
const db$Map = new Map<string, Observable<WorkspaceSQLiteDB>>();
|
||||
|
||||
const beforeQuit$ = defer(() => fromEvent(app, 'before-quit'));
|
||||
|
||||
function getWorkspaceDB$(id: string) {
|
||||
if (!db$Map.has(id)) {
|
||||
db$Map.set(
|
||||
id,
|
||||
from(openWorkspaceDatabase(appContext, id)).pipe(
|
||||
shareReplay(1),
|
||||
switchMap(db => {
|
||||
return startPollingSecondaryDB(db).pipe(
|
||||
ignoreElements(),
|
||||
startWith(db),
|
||||
takeUntil(beforeQuit$),
|
||||
tap({
|
||||
complete: () => {
|
||||
logger.info('[ensureSQLiteDB] close db connection');
|
||||
db.destroy();
|
||||
db$Map.delete(id);
|
||||
},
|
||||
})
|
||||
);
|
||||
}),
|
||||
shareReplay(1)
|
||||
)
|
||||
);
|
||||
}
|
||||
return db$Map.get(id)!;
|
||||
}
|
||||
|
||||
function startPollingSecondaryDB(db: WorkspaceSQLiteDB) {
|
||||
const meta$ = getWorkspaceMeta$(db.workspaceId);
|
||||
const secondaryDB$ = meta$.pipe(
|
||||
map(meta => meta?.secondaryDBPath),
|
||||
distinctUntilChanged(),
|
||||
filter((p): p is string => !!p),
|
||||
switchMap(path => {
|
||||
return new Observable<SecondaryWorkspaceSQLiteDB>(observer => {
|
||||
const secondaryDB = new SecondaryWorkspaceSQLiteDB(path, db);
|
||||
observer.next(secondaryDB);
|
||||
return () => {
|
||||
logger.info(
|
||||
'[ensureSQLiteDB] close secondary db connection',
|
||||
secondaryDB.path
|
||||
);
|
||||
secondaryDB.destroy();
|
||||
};
|
||||
});
|
||||
}),
|
||||
takeUntil(db.update$.pipe(last())),
|
||||
shareReplay(1)
|
||||
);
|
||||
|
||||
const firstDelayedTick$ = defer(() => {
|
||||
return new Promise<number>(resolve =>
|
||||
setTimeout(() => {
|
||||
resolve(0);
|
||||
}, 1000)
|
||||
);
|
||||
});
|
||||
|
||||
// pull every 30 seconds
|
||||
const poll$ = merge(firstDelayedTick$, interval(30000)).pipe(
|
||||
switchMap(() => secondaryDB$),
|
||||
tap({
|
||||
next: secondaryDB => {
|
||||
secondaryDB.pull();
|
||||
},
|
||||
}),
|
||||
takeUntil(db.update$.pipe(last())),
|
||||
shareReplay(1)
|
||||
);
|
||||
|
||||
return poll$;
|
||||
}
|
||||
|
||||
export function ensureSQLiteDB(id: string) {
|
||||
return firstValueFrom(getWorkspaceDB$(id));
|
||||
}
|
||||
38
apps/electron/layers/main/src/db/helper.ts
Normal file
38
apps/electron/layers/main/src/db/helper.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import type { Database } from 'better-sqlite3';
|
||||
import sqlite from 'better-sqlite3';
|
||||
|
||||
import { logger } from '../logger';
|
||||
|
||||
export function isValidateDB(db: Database) {
|
||||
// check if db has two tables, one for updates and one for blobs
|
||||
const statement = db.prepare(
|
||||
`SELECT name FROM sqlite_schema WHERE type='table'`
|
||||
);
|
||||
const rows = statement.all() as { name: string }[];
|
||||
const tableNames = rows.map(row => row.name);
|
||||
if (!tableNames.includes('updates') || !tableNames.includes('blobs')) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function isValidDBFile(path: string) {
|
||||
let db: Database | null = null;
|
||||
try {
|
||||
db = sqlite(path);
|
||||
// check if db has two tables, one for updates and one for blobs
|
||||
const statement = db.prepare(
|
||||
`SELECT name FROM sqlite_schema WHERE type='table'`
|
||||
);
|
||||
const rows = statement.all() as { name: string }[];
|
||||
const tableNames = rows.map(row => row.name);
|
||||
if (!tableNames.includes('updates') || !tableNames.includes('blobs')) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error('isValidDBFile', error);
|
||||
return false;
|
||||
} finally {
|
||||
db?.close();
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,10 @@
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import { appContext } from '../../context';
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { appContext } from '../context';
|
||||
import type { MainEventListener, NamespaceHandlers } from '../type';
|
||||
import { ensureSQLiteDB } from './ensure-db';
|
||||
import { dbSubjects } from './subjects';
|
||||
|
||||
export * from './ensure-db';
|
||||
export * from './subjects';
|
||||
|
||||
export const dbHandlers = {
|
||||
getDocAsUpdates: async (_, id: string) => {
|
||||
@@ -25,18 +27,22 @@ export const dbHandlers = {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return workspaceDB.deleteBlob(key);
|
||||
},
|
||||
getPersistedBlobs: async (_, workspaceId: string) => {
|
||||
getBlobKeys: async (_, workspaceId: string) => {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return workspaceDB.getPersistentBlobKeys();
|
||||
return workspaceDB.getBlobKeys();
|
||||
},
|
||||
getDefaultStorageLocation: async () => {
|
||||
return appContext.appDataPath;
|
||||
},
|
||||
getDBFilePath: async (_, workspaceId: string) => {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return {
|
||||
path: workspaceDB.path,
|
||||
realPath: await fs.realpath(workspaceDB.path),
|
||||
} satisfies NamespaceHandlers;
|
||||
|
||||
export const dbEvents = {
|
||||
onExternalUpdate: (
|
||||
fn: (update: { workspaceId: string; update: Uint8Array }) => void
|
||||
) => {
|
||||
const sub = dbSubjects.externalUpdate.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
198
apps/electron/layers/main/src/db/secondary-db.ts
Normal file
198
apps/electron/layers/main/src/db/secondary-db.ts
Normal file
@@ -0,0 +1,198 @@
|
||||
import { debounce } from 'lodash-es';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { AppContext } from '../context';
|
||||
import { logger } from '../logger';
|
||||
import type { YOrigin } from '../type';
|
||||
import { mergeUpdateWorker } from '../workers';
|
||||
import { getWorkspaceMeta } from '../workspace';
|
||||
import { BaseSQLiteAdapter } from './base-db-adapter';
|
||||
import type { WorkspaceSQLiteDB } from './workspace-db-adapter';
|
||||
|
||||
const FLUSH_WAIT_TIME = 5000;
|
||||
const FLUSH_MAX_WAIT_TIME = 10000;
|
||||
|
||||
export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
role = 'secondary';
|
||||
yDoc = new Y.Doc();
|
||||
firstConnected = false;
|
||||
|
||||
updateQueue: Uint8Array[] = [];
|
||||
|
||||
unsubscribers = new Set<() => void>();
|
||||
|
||||
constructor(
|
||||
public override path: string,
|
||||
public upstream: WorkspaceSQLiteDB
|
||||
) {
|
||||
super(path);
|
||||
this.setupAndListen();
|
||||
logger.debug('[SecondaryWorkspaceSQLiteDB] created', this.workspaceId);
|
||||
}
|
||||
|
||||
close() {
|
||||
this.db?.close();
|
||||
this.db = null;
|
||||
}
|
||||
|
||||
override destroy() {
|
||||
this.flushUpdateQueue();
|
||||
this.unsubscribers.forEach(unsub => unsub());
|
||||
this.db?.close();
|
||||
this.yDoc.destroy();
|
||||
}
|
||||
|
||||
get workspaceId() {
|
||||
return this.upstream.workspaceId;
|
||||
}
|
||||
|
||||
// do not update db immediately, instead, push to a queue
|
||||
// and flush the queue in a future time
|
||||
addUpdateToUpdateQueue(update: Uint8Array) {
|
||||
this.updateQueue.push(update);
|
||||
this.debouncedFlush();
|
||||
}
|
||||
|
||||
flushUpdateQueue() {
|
||||
logger.debug(
|
||||
'flushUpdateQueue',
|
||||
this.workspaceId,
|
||||
'queue',
|
||||
this.updateQueue.length
|
||||
);
|
||||
const updates = [...this.updateQueue];
|
||||
this.updateQueue = [];
|
||||
this.connect();
|
||||
this.addUpdateToSQLite(updates);
|
||||
this.close();
|
||||
}
|
||||
|
||||
// flush after 5s, but will not wait for more than 10s
|
||||
debouncedFlush = debounce(this.flushUpdateQueue, FLUSH_WAIT_TIME, {
|
||||
maxWait: FLUSH_MAX_WAIT_TIME,
|
||||
});
|
||||
|
||||
runCounter = 0;
|
||||
|
||||
// wrap the fn with connect and close
|
||||
// it only works for sync functions
|
||||
run = <T extends (...args: any[]) => any>(fn: T) => {
|
||||
try {
|
||||
if (this.runCounter === 0) {
|
||||
this.connect();
|
||||
}
|
||||
this.runCounter++;
|
||||
return fn();
|
||||
} catch (err) {
|
||||
logger.error(err);
|
||||
} finally {
|
||||
this.runCounter--;
|
||||
if (this.runCounter === 0) {
|
||||
this.close();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
setupAndListen() {
|
||||
if (this.firstConnected) {
|
||||
return;
|
||||
}
|
||||
this.firstConnected = true;
|
||||
|
||||
const onUpstreamUpdate = (update: Uint8Array, origin: YOrigin) => {
|
||||
if (origin === 'renderer') {
|
||||
// update to upstream yDoc should be replicated to self yDoc
|
||||
this.applyUpdate(update, 'upstream');
|
||||
}
|
||||
};
|
||||
|
||||
const onSelfUpdate = (update: Uint8Array, origin: YOrigin) => {
|
||||
// for self update from upstream, we need to push it to external DB
|
||||
if (origin === 'upstream') {
|
||||
this.addUpdateToUpdateQueue(update);
|
||||
}
|
||||
|
||||
if (origin === 'self') {
|
||||
this.upstream.applyUpdate(update, 'external');
|
||||
}
|
||||
};
|
||||
|
||||
// listen to upstream update
|
||||
this.upstream.yDoc.on('update', onUpstreamUpdate);
|
||||
this.yDoc.on('update', onSelfUpdate);
|
||||
|
||||
this.unsubscribers.add(() => {
|
||||
this.upstream.yDoc.off('update', onUpstreamUpdate);
|
||||
this.yDoc.off('update', onSelfUpdate);
|
||||
});
|
||||
|
||||
this.run(() => {
|
||||
// apply all updates from upstream
|
||||
const upstreamUpdate = this.upstream.getDocAsUpdates();
|
||||
// to initialize the yDoc, we need to apply all updates from the db
|
||||
this.applyUpdate(upstreamUpdate, 'upstream');
|
||||
|
||||
this.pull();
|
||||
});
|
||||
}
|
||||
|
||||
applyUpdate = (data: Uint8Array, origin: YOrigin = 'upstream') => {
|
||||
Y.applyUpdate(this.yDoc, data, origin);
|
||||
};
|
||||
|
||||
// TODO: have a better solution to handle blobs
|
||||
syncBlobs() {
|
||||
this.run(() => {
|
||||
// pull blobs
|
||||
const blobsKeys = this.getBlobKeys();
|
||||
const upstreamBlobsKeys = this.upstream.getBlobKeys();
|
||||
// put every missing blob to upstream
|
||||
for (const key of blobsKeys) {
|
||||
if (!upstreamBlobsKeys.includes(key)) {
|
||||
const blob = this.getBlob(key);
|
||||
if (blob) {
|
||||
this.upstream.addBlob(key, blob);
|
||||
logger.debug('syncBlobs', this.workspaceId, key);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* pull from external DB file and apply to embedded yDoc
|
||||
* workflow:
|
||||
* - connect to external db
|
||||
* - get updates
|
||||
* - apply updates to local yDoc
|
||||
* - get blobs and put new blobs to upstream
|
||||
* - disconnect
|
||||
*/
|
||||
async pull() {
|
||||
const start = performance.now();
|
||||
const updates = this.run(() => {
|
||||
// TODO: no need to get all updates, just get the latest ones (using a cursor, etc)?
|
||||
this.syncBlobs();
|
||||
return this.getUpdates().map(update => update.data);
|
||||
});
|
||||
|
||||
const merged = await mergeUpdateWorker(updates);
|
||||
this.applyUpdate(merged, 'self');
|
||||
|
||||
logger.debug(
|
||||
'pull external updates',
|
||||
this.path,
|
||||
updates.length,
|
||||
(performance.now() - start).toFixed(2),
|
||||
'ms'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function getSecondaryWorkspaceDBPath(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
) {
|
||||
const meta = await getWorkspaceMeta(context, workspaceId);
|
||||
return meta?.secondaryDBPath;
|
||||
}
|
||||
7
apps/electron/layers/main/src/db/subjects.ts
Normal file
7
apps/electron/layers/main/src/db/subjects.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
export const dbSubjects = {
|
||||
// emit workspace id when the db file is missing
|
||||
fileMissing: new Subject<string>(),
|
||||
externalUpdate: new Subject<{ workspaceId: string; update: Uint8Array }>(),
|
||||
};
|
||||
106
apps/electron/layers/main/src/db/workspace-db-adapter.ts
Normal file
106
apps/electron/layers/main/src/db/workspace-db-adapter.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import type { Database } from 'better-sqlite3';
|
||||
import { Subject } from 'rxjs';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { AppContext } from '../context';
|
||||
import { logger } from '../logger';
|
||||
import type { YOrigin } from '../type';
|
||||
import { mergeUpdateWorker } from '../workers';
|
||||
import { getWorkspaceMeta } from '../workspace';
|
||||
import { BaseSQLiteAdapter } from './base-db-adapter';
|
||||
import { dbSubjects } from './subjects';
|
||||
|
||||
export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
role = 'primary';
|
||||
yDoc = new Y.Doc();
|
||||
firstConnected = false;
|
||||
|
||||
update$ = new Subject<void>();
|
||||
|
||||
constructor(public override path: string, public workspaceId: string) {
|
||||
super(path);
|
||||
}
|
||||
|
||||
override destroy() {
|
||||
this.db?.close();
|
||||
this.db = null;
|
||||
this.yDoc.destroy();
|
||||
|
||||
// when db is closed, we can safely remove it from ensure-db list
|
||||
this.update$.complete();
|
||||
}
|
||||
|
||||
getWorkspaceName = () => {
|
||||
return this.yDoc.getMap('space:meta').get('name') as string;
|
||||
};
|
||||
|
||||
async init(): Promise<Database | undefined> {
|
||||
const db = super.connect();
|
||||
|
||||
if (!this.firstConnected) {
|
||||
this.yDoc.on('update', (update: Uint8Array, origin: YOrigin) => {
|
||||
if (origin === 'renderer') {
|
||||
this.addUpdateToSQLite([update]);
|
||||
} else if (origin === 'external') {
|
||||
this.addUpdateToSQLite([update]);
|
||||
logger.debug('external update', this.workspaceId);
|
||||
dbSubjects.externalUpdate.next({
|
||||
workspaceId: this.workspaceId,
|
||||
update,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const updates = this.getUpdates();
|
||||
const merged = await mergeUpdateWorker(updates.map(update => update.data));
|
||||
|
||||
// to initialize the yDoc, we need to apply all updates from the db
|
||||
this.applyUpdate(merged, 'self');
|
||||
|
||||
this.firstConnected = true;
|
||||
this.update$.next();
|
||||
|
||||
return db;
|
||||
}
|
||||
|
||||
getDocAsUpdates = () => {
|
||||
return Y.encodeStateAsUpdate(this.yDoc);
|
||||
};
|
||||
|
||||
// non-blocking and use yDoc to validate the update
|
||||
// after that, the update is added to the db
|
||||
applyUpdate = (data: Uint8Array, origin: YOrigin = 'renderer') => {
|
||||
// todo: trim the updates when the number of records is too large
|
||||
// 1. store the current ydoc state in the db
|
||||
// 2. then delete the old updates
|
||||
// yjs-idb will always trim the db for the first time after DB is loaded
|
||||
Y.applyUpdate(this.yDoc, data, origin);
|
||||
};
|
||||
|
||||
override addBlob(key: string, value: Uint8Array) {
|
||||
const res = super.addBlob(key, value);
|
||||
this.update$.next();
|
||||
return res;
|
||||
}
|
||||
|
||||
override deleteBlob(key: string) {
|
||||
super.deleteBlob(key);
|
||||
this.update$.next();
|
||||
}
|
||||
|
||||
override addUpdateToSQLite(data: Uint8Array[]) {
|
||||
super.addUpdateToSQLite(data);
|
||||
this.update$.next();
|
||||
}
|
||||
}
|
||||
|
||||
export async function openWorkspaceDatabase(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
) {
|
||||
const meta = await getWorkspaceMeta(context, workspaceId);
|
||||
const db = new WorkspaceSQLiteDB(meta.mainDBPath, workspaceId);
|
||||
await db.init();
|
||||
return db;
|
||||
}
|
||||
@@ -1,25 +1,35 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import { app } from 'electron';
|
||||
import { dialog, shell } from 'electron';
|
||||
import fs from 'fs-extra';
|
||||
import { nanoid } from 'nanoid';
|
||||
|
||||
import { appContext } from '../../context';
|
||||
import { logger } from '../../logger';
|
||||
import { appContext } from '../context';
|
||||
import { ensureSQLiteDB } from '../db/ensure-db';
|
||||
import { getWorkspaceDBPath, isValidDBFile } from '../db/sqlite';
|
||||
import { listWorkspaces } from '../workspace/workspace';
|
||||
import { isValidDBFile } from '../db/helper';
|
||||
import type { WorkspaceSQLiteDB } from '../db/workspace-db-adapter';
|
||||
import { logger } from '../logger';
|
||||
import {
|
||||
getWorkspaceDBPath,
|
||||
getWorkspaceMeta,
|
||||
listWorkspaces,
|
||||
storeWorkspaceMeta,
|
||||
} from '../workspace';
|
||||
|
||||
// NOTE:
|
||||
// we are using native dialogs because HTML dialogs do not give full file paths
|
||||
|
||||
export async function revealDBFile(workspaceId: string) {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
shell.showItemInFolder(await fs.realpath(workspaceDB.path));
|
||||
const meta = await getWorkspaceMeta(appContext, workspaceId);
|
||||
if (!meta) {
|
||||
return;
|
||||
}
|
||||
shell.showItemInFolder(meta.secondaryDBPath ?? meta.mainDBPath);
|
||||
}
|
||||
|
||||
// provide a backdoor to set dialog path for testing in playwright
|
||||
interface FakeDialogResult {
|
||||
export interface FakeDialogResult {
|
||||
canceled?: boolean;
|
||||
filePath?: string;
|
||||
filePaths?: string[];
|
||||
@@ -53,12 +63,20 @@ const ErrorMessages = [
|
||||
|
||||
type ErrorMessage = (typeof ErrorMessages)[number];
|
||||
|
||||
interface SaveDBFileResult {
|
||||
export interface SaveDBFileResult {
|
||||
filePath?: string;
|
||||
canceled?: boolean;
|
||||
error?: ErrorMessage;
|
||||
}
|
||||
|
||||
const extension = 'affine';
|
||||
|
||||
function getDefaultDBFileName(name: string, id: string) {
|
||||
const fileName = `${name}_${id}.${extension}`;
|
||||
// make sure fileName is a valid file name
|
||||
return fileName.replace(/[/\\?%*:|"<>]/g, '-');
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is called when the user clicks the "Save" button in the "Save Workspace" dialog.
|
||||
*
|
||||
@@ -76,7 +94,13 @@ export async function saveDBFileAs(
|
||||
title: 'Save Workspace',
|
||||
showsTagField: false,
|
||||
buttonLabel: 'Save',
|
||||
defaultPath: `${db.getWorkspaceName()}_${workspaceId}.db`,
|
||||
filters: [
|
||||
{
|
||||
extensions: [extension],
|
||||
name: '',
|
||||
},
|
||||
],
|
||||
defaultPath: getDefaultDBFileName(db.getWorkspaceName(), workspaceId),
|
||||
message: 'Save Workspace as a SQLite Database file',
|
||||
}));
|
||||
const filePath = ret.filePath;
|
||||
@@ -98,7 +122,7 @@ export async function saveDBFileAs(
|
||||
}
|
||||
}
|
||||
|
||||
interface SelectDBFileLocationResult {
|
||||
export interface SelectDBFileLocationResult {
|
||||
filePath?: string;
|
||||
error?: ErrorMessage;
|
||||
canceled?: boolean;
|
||||
@@ -108,27 +132,20 @@ export async function selectDBFileLocation(): Promise<SelectDBFileLocationResult
|
||||
try {
|
||||
const ret =
|
||||
getFakedResult() ??
|
||||
(await dialog.showSaveDialog({
|
||||
properties: ['showOverwriteConfirmation'],
|
||||
title: 'Set database location',
|
||||
showsTagField: false,
|
||||
(await dialog.showOpenDialog({
|
||||
properties: ['openDirectory'],
|
||||
title: 'Set Workspace Storage Location',
|
||||
buttonLabel: 'Select',
|
||||
defaultPath: `workspace-storage.db`,
|
||||
defaultPath: app.getPath('documents'),
|
||||
message: "Select a location to store the workspace's database file",
|
||||
}));
|
||||
const filePath = ret.filePath;
|
||||
if (ret.canceled || !filePath) {
|
||||
const dir = ret.filePaths?.[0];
|
||||
if (ret.canceled || !dir) {
|
||||
return {
|
||||
canceled: true,
|
||||
};
|
||||
}
|
||||
// the same db file cannot be loaded twice
|
||||
if (await dbFileAlreadyLoaded(filePath)) {
|
||||
return {
|
||||
error: 'DB_FILE_ALREADY_LOADED',
|
||||
};
|
||||
}
|
||||
return { filePath };
|
||||
return { filePath: dir };
|
||||
} catch (err) {
|
||||
logger.error('selectDBFileLocation', err);
|
||||
return {
|
||||
@@ -137,7 +154,7 @@ export async function selectDBFileLocation(): Promise<SelectDBFileLocationResult
|
||||
}
|
||||
}
|
||||
|
||||
interface LoadDBFileResult {
|
||||
export interface LoadDBFileResult {
|
||||
workspaceId?: string;
|
||||
error?: ErrorMessage;
|
||||
canceled?: boolean;
|
||||
@@ -169,10 +186,10 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
|
||||
{
|
||||
name: 'SQLite Database',
|
||||
// do we want to support other file format?
|
||||
extensions: ['db'],
|
||||
extensions: ['db', 'affine'],
|
||||
},
|
||||
],
|
||||
message: 'Load Workspace from a SQLite Database file',
|
||||
message: 'Load Workspace from a AFFiNE file',
|
||||
}));
|
||||
const filePath = ret.filePaths?.[0];
|
||||
if (ret.canceled || !filePath) {
|
||||
@@ -196,14 +213,20 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
|
||||
return { error: 'DB_FILE_INVALID' }; // invalid db file
|
||||
}
|
||||
|
||||
// symlink the db file to a new workspace id
|
||||
// copy the db file to a new workspace id
|
||||
const workspaceId = nanoid(10);
|
||||
const linkedFilePath = await getWorkspaceDBPath(appContext, workspaceId);
|
||||
const internalFilePath = getWorkspaceDBPath(appContext, workspaceId);
|
||||
|
||||
await fs.ensureDir(path.join(appContext.appDataPath, 'workspaces'));
|
||||
|
||||
await fs.symlink(filePath, linkedFilePath, 'file');
|
||||
logger.info(`loadDBFile, symlink: ${filePath} -> ${linkedFilePath}`);
|
||||
await fs.copy(filePath, internalFilePath);
|
||||
logger.info(`loadDBFile, copy: ${filePath} -> ${internalFilePath}`);
|
||||
|
||||
await storeWorkspaceMeta(appContext, workspaceId, {
|
||||
id: workspaceId,
|
||||
mainDBPath: internalFilePath,
|
||||
secondaryDBPath: filePath,
|
||||
});
|
||||
|
||||
return { workspaceId };
|
||||
} catch (err) {
|
||||
@@ -214,7 +237,7 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
|
||||
}
|
||||
}
|
||||
|
||||
interface MoveDBFileResult {
|
||||
export interface MoveDBFileResult {
|
||||
filePath?: string;
|
||||
error?: ErrorMessage;
|
||||
canceled?: boolean;
|
||||
@@ -224,71 +247,78 @@ interface MoveDBFileResult {
|
||||
* This function is called when the user clicks the "Move" button in the "Move Workspace Storage" setting.
|
||||
*
|
||||
* It will
|
||||
* - move the source db file to a new location
|
||||
* - symlink the new location to the old db file
|
||||
* - copy the source db file to a new location
|
||||
* - remove the old db external file
|
||||
* - update the external db file path in the workspace meta
|
||||
* - return the new file path
|
||||
*/
|
||||
export async function moveDBFile(
|
||||
workspaceId: string,
|
||||
dbFileLocation?: string
|
||||
dbFileDir?: string
|
||||
): Promise<MoveDBFileResult> {
|
||||
let db: WorkspaceSQLiteDB | null = null;
|
||||
try {
|
||||
const db = await ensureSQLiteDB(workspaceId);
|
||||
db = await ensureSQLiteDB(workspaceId);
|
||||
|
||||
// get the real file path of db
|
||||
const realpath = await fs.realpath(db.path);
|
||||
const isLink = realpath !== db.path;
|
||||
const meta = await getWorkspaceMeta(appContext, workspaceId);
|
||||
|
||||
const newFilePath =
|
||||
dbFileLocation ||
|
||||
const oldDir = meta.secondaryDBPath
|
||||
? path.dirname(meta.secondaryDBPath)
|
||||
: null;
|
||||
const defaultDir = oldDir ?? app.getPath('documents');
|
||||
|
||||
const newName = getDefaultDBFileName(db.getWorkspaceName(), workspaceId);
|
||||
|
||||
const newDirPath =
|
||||
dbFileDir ??
|
||||
(
|
||||
getFakedResult() ||
|
||||
(await dialog.showSaveDialog({
|
||||
properties: ['showOverwriteConfirmation'],
|
||||
getFakedResult() ??
|
||||
(await dialog.showOpenDialog({
|
||||
properties: ['openDirectory'],
|
||||
title: 'Move Workspace Storage',
|
||||
showsTagField: false,
|
||||
buttonLabel: 'Save',
|
||||
defaultPath: realpath,
|
||||
buttonLabel: 'Move',
|
||||
defaultPath: defaultDir,
|
||||
message: 'Move Workspace storage file',
|
||||
}))
|
||||
).filePath;
|
||||
).filePaths?.[0];
|
||||
|
||||
// skips if
|
||||
// - user canceled the dialog
|
||||
// - user selected the same file
|
||||
// - user selected the same file in the link file in app data dir
|
||||
if (!newFilePath || newFilePath === realpath || db.path === newFilePath) {
|
||||
// - user selected the same dir
|
||||
if (!newDirPath || newDirPath === oldDir) {
|
||||
return {
|
||||
canceled: true,
|
||||
};
|
||||
}
|
||||
|
||||
const newFilePath = path.join(newDirPath, newName);
|
||||
|
||||
if (await fs.pathExists(newFilePath)) {
|
||||
return {
|
||||
error: 'FILE_ALREADY_EXISTS',
|
||||
};
|
||||
}
|
||||
|
||||
db.db.close();
|
||||
logger.info(`[moveDBFile] copy ${meta.mainDBPath} -> ${newFilePath}`);
|
||||
|
||||
if (isLink) {
|
||||
// remove the old link to unblock new link
|
||||
await fs.unlink(db.path);
|
||||
await fs.copy(meta.mainDBPath, newFilePath);
|
||||
|
||||
// remove the old db file, but we don't care if it fails
|
||||
if (meta.secondaryDBPath) {
|
||||
fs.remove(meta.secondaryDBPath);
|
||||
}
|
||||
|
||||
await fs.move(realpath, newFilePath, {
|
||||
overwrite: true,
|
||||
// update meta
|
||||
await storeWorkspaceMeta(appContext, workspaceId, {
|
||||
secondaryDBPath: newFilePath,
|
||||
});
|
||||
|
||||
await fs.ensureSymlink(newFilePath, db.path, 'file');
|
||||
logger.info(`openMoveDBFileDialog symlink: ${realpath} -> ${newFilePath}`);
|
||||
db.reconnectDB();
|
||||
|
||||
return {
|
||||
filePath: newFilePath,
|
||||
};
|
||||
} catch (err) {
|
||||
logger.error('moveDBFile', err);
|
||||
db?.destroy();
|
||||
logger.error('[moveDBFile]', err);
|
||||
return {
|
||||
error: 'UNKNOWN_ERROR',
|
||||
};
|
||||
@@ -297,7 +327,6 @@ export async function moveDBFile(
|
||||
|
||||
async function dbFileAlreadyLoaded(path: string) {
|
||||
const meta = await listWorkspaces(appContext);
|
||||
const realpath = await fs.realpath(path);
|
||||
const paths = meta.map(m => m[1].realpath);
|
||||
return paths.includes(realpath);
|
||||
const paths = meta.map(m => m[1].secondaryDBPath);
|
||||
return paths.includes(path);
|
||||
}
|
||||
@@ -18,7 +18,7 @@ export const dialogHandlers = {
|
||||
saveDBFileAs: async (_, workspaceId: string) => {
|
||||
return saveDBFileAs(workspaceId);
|
||||
},
|
||||
moveDBFile: async (_, workspaceId: string, dbFileLocation?: string) => {
|
||||
moveDBFile: (_, workspaceId: string, dbFileLocation?: string) => {
|
||||
return moveDBFile(workspaceId, dbFileLocation);
|
||||
},
|
||||
selectDBFileLocation: async () => {
|
||||
@@ -1,14 +1,16 @@
|
||||
import { app, BrowserWindow } from 'electron';
|
||||
|
||||
import { logger } from '../logger';
|
||||
import { applicationMenuEvents } from './application-menu';
|
||||
import { dbEvents } from './db';
|
||||
import { updaterEvents } from './updater';
|
||||
import { logger } from './logger';
|
||||
import { updaterEvents } from './updater/event';
|
||||
import { workspaceEvents } from './workspace';
|
||||
|
||||
export const allEvents = {
|
||||
applicationMenu: applicationMenuEvents,
|
||||
db: dbEvents,
|
||||
updater: updaterEvents,
|
||||
applicationMenu: applicationMenuEvents,
|
||||
workspace: workspaceEvents,
|
||||
};
|
||||
|
||||
function getActiveWindows() {
|
||||
@@ -19,9 +21,18 @@ export function registerEvents() {
|
||||
// register events
|
||||
for (const [namespace, namespaceEvents] of Object.entries(allEvents)) {
|
||||
for (const [key, eventRegister] of Object.entries(namespaceEvents)) {
|
||||
const subscription = eventRegister((...args: any) => {
|
||||
const subscription = eventRegister((...args: any[]) => {
|
||||
const chan = `${namespace}:${key}`;
|
||||
logger.info('[ipc-event]', chan, args);
|
||||
logger.info(
|
||||
'[ipc-event]',
|
||||
chan,
|
||||
args.filter(
|
||||
a =>
|
||||
a !== undefined &&
|
||||
typeof a !== 'function' &&
|
||||
typeof a !== 'object'
|
||||
)
|
||||
);
|
||||
getActiveWindows().forEach(win => win.webContents.send(chan, ...args));
|
||||
});
|
||||
app.on('before-quit', () => {
|
||||
@@ -1,38 +0,0 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
import type { MainEventListener } from './type';
|
||||
|
||||
interface DBFilePathMeta {
|
||||
workspaceId: string;
|
||||
path: string;
|
||||
realPath: string;
|
||||
}
|
||||
|
||||
export const dbSubjects = {
|
||||
// emit workspace ids
|
||||
dbFileMissing: new Subject<string>(),
|
||||
// emit workspace ids
|
||||
dbFileUpdate: new Subject<string>(),
|
||||
dbFilePathChange: new Subject<DBFilePathMeta>(),
|
||||
};
|
||||
|
||||
export const dbEvents = {
|
||||
onDBFileMissing: (fn: (workspaceId: string) => void) => {
|
||||
const sub = dbSubjects.dbFileMissing.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
onDBFileUpdate: (fn: (workspaceId: string) => void) => {
|
||||
const sub = dbSubjects.dbFileUpdate.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
onDBFilePathChange: (fn: (meta: DBFilePathMeta) => void) => {
|
||||
const sub = dbSubjects.dbFilePathChange.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
@@ -1,9 +0,0 @@
|
||||
export * from './register';
|
||||
|
||||
import { applicationMenuSubjects } from './application-menu';
|
||||
import { dbSubjects } from './db';
|
||||
|
||||
export const subjects = {
|
||||
db: dbSubjects,
|
||||
applicationMenu: applicationMenuSubjects,
|
||||
};
|
||||
@@ -1 +0,0 @@
|
||||
export type MainEventListener = (...args: any[]) => () => void;
|
||||
@@ -1,21 +0,0 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
import type { MainEventListener } from './type';
|
||||
|
||||
interface UpdateMeta {
|
||||
version: string;
|
||||
}
|
||||
|
||||
export const updaterSubjects = {
|
||||
// means it is ready for restart and install the new version
|
||||
clientUpdateReady: new Subject<UpdateMeta>(),
|
||||
};
|
||||
|
||||
export const updaterEvents = {
|
||||
onClientUpdateReady: (fn: (versionMeta: UpdateMeta) => void) => {
|
||||
const sub = updaterSubjects.clientUpdateReady.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
10
apps/electron/layers/main/src/export/index.ts
Normal file
10
apps/electron/layers/main/src/export/index.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { savePDFFileAs } from './pdf';
|
||||
|
||||
export const exportHandlers = {
|
||||
savePDFFileAs: async (_, title: string) => {
|
||||
return savePDFFileAs(title);
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
|
||||
export * from './pdf';
|
||||
61
apps/electron/layers/main/src/export/pdf.ts
Normal file
61
apps/electron/layers/main/src/export/pdf.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { BrowserWindow, dialog, shell } from 'electron';
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import { logger } from '../logger';
|
||||
import type { ErrorMessage } from './utils';
|
||||
import { getFakedResult } from './utils';
|
||||
|
||||
export interface SavePDFFileResult {
|
||||
filePath?: string;
|
||||
canceled?: boolean;
|
||||
error?: ErrorMessage;
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is called when the user clicks the "Export to PDF" button in the electron.
|
||||
*
|
||||
* It will just copy the file to the given path
|
||||
*/
|
||||
export async function savePDFFileAs(
|
||||
pageTitle: string
|
||||
): Promise<SavePDFFileResult> {
|
||||
try {
|
||||
const ret =
|
||||
getFakedResult() ??
|
||||
(await dialog.showSaveDialog({
|
||||
properties: ['showOverwriteConfirmation'],
|
||||
title: 'Save PDF',
|
||||
showsTagField: false,
|
||||
buttonLabel: 'Save',
|
||||
defaultPath: `${pageTitle}.pdf`,
|
||||
message: 'Save Page as a PDF file',
|
||||
}));
|
||||
const filePath = ret.filePath;
|
||||
if (ret.canceled || !filePath) {
|
||||
return {
|
||||
canceled: true,
|
||||
};
|
||||
}
|
||||
|
||||
await BrowserWindow.getFocusedWindow()
|
||||
?.webContents.printToPDF({
|
||||
pageSize: 'A4',
|
||||
printBackground: true,
|
||||
landscape: false,
|
||||
})
|
||||
.then(data => {
|
||||
fs.writeFile(filePath, data, error => {
|
||||
if (error) throw error;
|
||||
logger.log(`Wrote PDF successfully to ${filePath}`);
|
||||
});
|
||||
});
|
||||
|
||||
shell.openPath(filePath);
|
||||
return { filePath };
|
||||
} catch (err) {
|
||||
logger.error('savePDFFileAs', err);
|
||||
return {
|
||||
error: 'UNKNOWN_ERROR',
|
||||
};
|
||||
}
|
||||
}
|
||||
24
apps/electron/layers/main/src/export/utils.ts
Normal file
24
apps/electron/layers/main/src/export/utils.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
// provide a backdoor to set dialog path for testing in playwright
|
||||
interface FakeDialogResult {
|
||||
canceled?: boolean;
|
||||
filePath?: string;
|
||||
filePaths?: string[];
|
||||
}
|
||||
// result will be used in the next call to showOpenDialog
|
||||
// if it is being read once, it will be reset to undefined
|
||||
let fakeDialogResult: FakeDialogResult | undefined = undefined;
|
||||
export function getFakedResult() {
|
||||
const result = fakeDialogResult;
|
||||
fakeDialogResult = undefined;
|
||||
return result;
|
||||
}
|
||||
|
||||
export function setFakeDialogResult(result: FakeDialogResult | undefined) {
|
||||
fakeDialogResult = result;
|
||||
// for convenience, we will fill filePaths with filePath if it is not set
|
||||
if (result?.filePaths === undefined && result?.filePath !== undefined) {
|
||||
result.filePaths = [result.filePath];
|
||||
}
|
||||
}
|
||||
const ErrorMessages = ['FILE_ALREADY_EXISTS', 'UNKNOWN_ERROR'] as const;
|
||||
export type ErrorMessage = (typeof ErrorMessages)[number];
|
||||
@@ -2,4 +2,35 @@ import { allEvents as events } from './events';
|
||||
import { allHandlers as handlers } from './handlers';
|
||||
|
||||
// this will be used by preload script to expose all handlers and events to the renderer process
|
||||
// - register in exposeInMainWorld in preload
|
||||
// - provide type hints
|
||||
export { events, handlers };
|
||||
|
||||
export const getExposedMeta = () => {
|
||||
const handlersMeta = Object.entries(handlers).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.keys(namespaceHandlers).map(handlerName => handlerName),
|
||||
];
|
||||
}
|
||||
);
|
||||
|
||||
const eventsMeta = Object.entries(events).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.keys(namespaceHandlers).map(handlerName => handlerName),
|
||||
];
|
||||
}
|
||||
);
|
||||
|
||||
return {
|
||||
handlers: handlersMeta,
|
||||
events: eventsMeta,
|
||||
};
|
||||
};
|
||||
|
||||
export type MainIPCHandlerMap = typeof handlers;
|
||||
|
||||
export type MainIPCEventMap = typeof events;
|
||||
|
||||
@@ -1,21 +1,14 @@
|
||||
import { ipcMain } from 'electron';
|
||||
|
||||
import { getLogFilePath, logger, revealLogFile } from '../logger';
|
||||
import { dbHandlers } from './db';
|
||||
import { dialogHandlers } from './dialog';
|
||||
import { exportHandlers } from './export';
|
||||
import { getLogFilePath, logger, revealLogFile } from './logger';
|
||||
import type { NamespaceHandlers } from './type';
|
||||
import { uiHandlers } from './ui';
|
||||
import { updaterHandlers } from './updater';
|
||||
import { workspaceHandlers } from './workspace';
|
||||
|
||||
type IsomorphicHandler = (
|
||||
e: Electron.IpcMainInvokeEvent,
|
||||
...args: any[]
|
||||
) => Promise<any>;
|
||||
|
||||
type NamespaceHandlers = {
|
||||
[key: string]: IsomorphicHandler;
|
||||
};
|
||||
|
||||
export const debugHandlers = {
|
||||
revealLogFile: async () => {
|
||||
return revealLogFile();
|
||||
@@ -27,12 +20,13 @@ export const debugHandlers = {
|
||||
|
||||
// Note: all of these handlers will be the single-source-of-truth for the apis exposed to the renderer process
|
||||
export const allHandlers = {
|
||||
workspace: workspaceHandlers,
|
||||
ui: uiHandlers,
|
||||
db: dbHandlers,
|
||||
dialog: dialogHandlers,
|
||||
debug: debugHandlers,
|
||||
dialog: dialogHandlers,
|
||||
ui: uiHandlers,
|
||||
export: exportHandlers,
|
||||
updater: updaterHandlers,
|
||||
workspace: workspaceHandlers,
|
||||
} satisfies Record<string, NamespaceHandlers>;
|
||||
|
||||
export const registerHandlers = () => {
|
||||
@@ -1,94 +0,0 @@
|
||||
import { watch } from 'chokidar';
|
||||
import { app } from 'electron';
|
||||
|
||||
import { appContext } from '../../context';
|
||||
import { subjects } from '../../events';
|
||||
import { logger } from '../../logger';
|
||||
import { debounce, ts } from '../../utils';
|
||||
import type { WorkspaceSQLiteDB } from './sqlite';
|
||||
import { openWorkspaceDatabase } from './sqlite';
|
||||
|
||||
const dbMapping = new Map<string, Promise<WorkspaceSQLiteDB>>();
|
||||
const dbWatchers = new Map<string, () => void>();
|
||||
|
||||
// if we removed the file, we will stop watching it
|
||||
function startWatchingDBFile(db: WorkspaceSQLiteDB) {
|
||||
if (dbWatchers.has(db.workspaceId)) {
|
||||
return dbWatchers.get(db.workspaceId);
|
||||
}
|
||||
logger.info('watch db file', db.path);
|
||||
const watcher = watch(db.path);
|
||||
|
||||
const debounceOnChange = debounce(() => {
|
||||
logger.info(
|
||||
'db file changed on disk',
|
||||
db.workspaceId,
|
||||
ts() - db.lastUpdateTime,
|
||||
'ms'
|
||||
);
|
||||
// reconnect db
|
||||
db.reconnectDB();
|
||||
subjects.db.dbFileUpdate.next(db.workspaceId);
|
||||
}, 1000);
|
||||
|
||||
watcher.on('change', () => {
|
||||
const currentTime = ts();
|
||||
if (currentTime - db.lastUpdateTime > 100) {
|
||||
debounceOnChange();
|
||||
}
|
||||
});
|
||||
|
||||
dbWatchers.set(db.workspaceId, () => {
|
||||
watcher.close();
|
||||
});
|
||||
|
||||
// todo: there is still a possibility that the file is deleted
|
||||
// but we didn't get the event soon enough and another event tries to
|
||||
// access the db
|
||||
watcher.on('unlink', () => {
|
||||
logger.info('db file missing', db.workspaceId);
|
||||
subjects.db.dbFileMissing.next(db.workspaceId);
|
||||
// cleanup
|
||||
watcher.close().then(() => {
|
||||
db.destroy();
|
||||
dbWatchers.delete(db.workspaceId);
|
||||
dbMapping.delete(db.workspaceId);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export async function ensureSQLiteDB(id: string) {
|
||||
let workspaceDB = dbMapping.get(id);
|
||||
if (!workspaceDB) {
|
||||
logger.info('[ensureSQLiteDB] open db connection', id);
|
||||
workspaceDB = openWorkspaceDatabase(appContext, id);
|
||||
dbMapping.set(id, workspaceDB);
|
||||
startWatchingDBFile(await workspaceDB);
|
||||
}
|
||||
return await workspaceDB;
|
||||
}
|
||||
|
||||
export async function disconnectSQLiteDB(id: string) {
|
||||
const dbp = dbMapping.get(id);
|
||||
if (dbp) {
|
||||
const db = await dbp;
|
||||
logger.info('close db connection', id);
|
||||
db.destroy();
|
||||
dbWatchers.get(id)?.();
|
||||
dbWatchers.delete(id);
|
||||
dbMapping.delete(id);
|
||||
}
|
||||
}
|
||||
|
||||
export async function cleanupSQLiteDBs() {
|
||||
for (const [id] of dbMapping) {
|
||||
logger.info('close db connection', id);
|
||||
await disconnectSQLiteDB(id);
|
||||
}
|
||||
dbMapping.clear();
|
||||
dbWatchers.clear();
|
||||
}
|
||||
|
||||
app?.on('before-quit', async () => {
|
||||
await cleanupSQLiteDBs();
|
||||
});
|
||||
@@ -1,244 +0,0 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import type { Database } from 'better-sqlite3';
|
||||
import sqlite from 'better-sqlite3';
|
||||
import fs from 'fs-extra';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { AppContext } from '../../context';
|
||||
import { dbSubjects } from '../../events/db';
|
||||
import { logger } from '../../logger';
|
||||
import { ts } from '../../utils';
|
||||
|
||||
const schemas = [
|
||||
`CREATE TABLE IF NOT EXISTS "updates" (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
data BLOB NOT NULL,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS "blobs" (
|
||||
key TEXT PRIMARY KEY NOT NULL,
|
||||
data BLOB NOT NULL,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
)`,
|
||||
];
|
||||
|
||||
interface UpdateRow {
|
||||
id: number;
|
||||
data: Buffer;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
interface BlobRow {
|
||||
key: string;
|
||||
data: Buffer;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
const SQLITE_ORIGIN = Symbol('sqlite-origin');
|
||||
|
||||
export class WorkspaceSQLiteDB {
|
||||
db: Database;
|
||||
ydoc = new Y.Doc();
|
||||
firstConnect = false;
|
||||
lastUpdateTime = ts();
|
||||
|
||||
constructor(public path: string, public workspaceId: string) {
|
||||
this.db = this.reconnectDB();
|
||||
}
|
||||
|
||||
// release resources
|
||||
destroy = () => {
|
||||
this.db?.close();
|
||||
this.ydoc.destroy();
|
||||
};
|
||||
|
||||
getWorkspaceName = () => {
|
||||
return this.ydoc.getMap('space:meta').get('name') as string;
|
||||
};
|
||||
|
||||
reconnectDB = () => {
|
||||
logger.log('open db', this.workspaceId);
|
||||
if (this.db) {
|
||||
this.db.close();
|
||||
}
|
||||
|
||||
fs.realpath(this.path)
|
||||
.then(realPath => {
|
||||
dbSubjects.dbFilePathChange.next({
|
||||
workspaceId: this.workspaceId,
|
||||
path: this.path,
|
||||
realPath,
|
||||
});
|
||||
})
|
||||
.catch(() => {
|
||||
// skip error
|
||||
});
|
||||
|
||||
// use cached version?
|
||||
const db = (this.db = sqlite(this.path));
|
||||
db.exec(schemas.join(';'));
|
||||
|
||||
if (!this.firstConnect) {
|
||||
this.ydoc.on('update', (update: Uint8Array, origin) => {
|
||||
if (origin !== SQLITE_ORIGIN) {
|
||||
this.addUpdateToSQLite(update);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Y.transact(this.ydoc, () => {
|
||||
const updates = this.getUpdates();
|
||||
updates.forEach(update => {
|
||||
// give SQLITE_ORIGIN to skip self update
|
||||
Y.applyUpdate(this.ydoc, update.data, SQLITE_ORIGIN);
|
||||
});
|
||||
});
|
||||
|
||||
this.lastUpdateTime = ts();
|
||||
|
||||
if (this.firstConnect) {
|
||||
logger.info('db reconnected', this.workspaceId);
|
||||
} else {
|
||||
logger.info('db connected', this.workspaceId);
|
||||
}
|
||||
|
||||
this.firstConnect = true;
|
||||
|
||||
return db;
|
||||
};
|
||||
|
||||
getDocAsUpdates = () => {
|
||||
return Y.encodeStateAsUpdate(this.ydoc);
|
||||
};
|
||||
|
||||
// non-blocking and use yDoc to validate the update
|
||||
// after that, the update is added to the db
|
||||
applyUpdate = (data: Uint8Array) => {
|
||||
Y.applyUpdate(this.ydoc, data);
|
||||
|
||||
// todo: trim the updates when the number of records is too large
|
||||
// 1. store the current ydoc state in the db
|
||||
// 2. then delete the old updates
|
||||
// yjs-idb will always trim the db for the first time after DB is loaded
|
||||
this.lastUpdateTime = ts();
|
||||
logger.debug('applyUpdate', this.workspaceId, this.lastUpdateTime);
|
||||
};
|
||||
|
||||
addBlob = (key: string, data: Uint8Array) => {
|
||||
this.lastUpdateTime = ts();
|
||||
try {
|
||||
const statement = this.db.prepare(
|
||||
'INSERT INTO blobs (key, data) VALUES (?, ?) ON CONFLICT(key) DO UPDATE SET data = ?'
|
||||
);
|
||||
statement.run(key, data, data);
|
||||
return key;
|
||||
} catch (error) {
|
||||
logger.error('addBlob', error);
|
||||
}
|
||||
};
|
||||
|
||||
getBlob = (key: string) => {
|
||||
try {
|
||||
const statement = this.db.prepare('SELECT data FROM blobs WHERE key = ?');
|
||||
const row = statement.get(key) as BlobRow;
|
||||
if (!row) {
|
||||
return null;
|
||||
}
|
||||
return row.data;
|
||||
} catch (error) {
|
||||
logger.error('getBlob', error);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
deleteBlob = (key: string) => {
|
||||
this.lastUpdateTime = ts();
|
||||
try {
|
||||
const statement = this.db.prepare('DELETE FROM blobs WHERE key = ?');
|
||||
statement.run(key);
|
||||
} catch (error) {
|
||||
logger.error('deleteBlob', error);
|
||||
}
|
||||
};
|
||||
|
||||
getPersistentBlobKeys = () => {
|
||||
try {
|
||||
const statement = this.db.prepare('SELECT key FROM blobs');
|
||||
const rows = statement.all() as BlobRow[];
|
||||
return rows.map(row => row.key);
|
||||
} catch (error) {
|
||||
logger.error('getPersistentBlobKeys', error);
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
private getUpdates = () => {
|
||||
try {
|
||||
const statement = this.db.prepare('SELECT * FROM updates');
|
||||
const rows = statement.all() as UpdateRow[];
|
||||
return rows;
|
||||
} catch (error) {
|
||||
logger.error('getUpdates', error);
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
// batch write instead write per key stroke?
|
||||
private addUpdateToSQLite = (data: Uint8Array) => {
|
||||
try {
|
||||
const start = performance.now();
|
||||
const statement = this.db.prepare(
|
||||
'INSERT INTO updates (data) VALUES (?)'
|
||||
);
|
||||
statement.run(data);
|
||||
logger.debug(
|
||||
'addUpdateToSQLite',
|
||||
this.workspaceId,
|
||||
'length:',
|
||||
data.length,
|
||||
performance.now() - start,
|
||||
'ms'
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('addUpdateToSQLite', error);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export async function getWorkspaceDBPath(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
) {
|
||||
const basePath = path.join(context.appDataPath, 'workspaces', workspaceId);
|
||||
await fs.ensureDir(basePath);
|
||||
return path.join(basePath, 'storage.db');
|
||||
}
|
||||
|
||||
export async function openWorkspaceDatabase(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
) {
|
||||
const dbPath = await getWorkspaceDBPath(context, workspaceId);
|
||||
return new WorkspaceSQLiteDB(dbPath, workspaceId);
|
||||
}
|
||||
|
||||
export function isValidDBFile(path: string) {
|
||||
try {
|
||||
const db = sqlite(path);
|
||||
// check if db has two tables, one for updates and onefor blobs
|
||||
const statement = db.prepare(
|
||||
`SELECT name FROM sqlite_schema WHERE type='table'`
|
||||
);
|
||||
const rows = statement.all() as { name: string }[];
|
||||
const tableNames = rows.map(row => row.name);
|
||||
if (!tableNames.includes('updates') || !tableNames.includes('blobs')) {
|
||||
return false;
|
||||
}
|
||||
db.close();
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error('isValidDBFile', error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
export * from './register';
|
||||
@@ -1,8 +0,0 @@
|
||||
export type IsomorphicHandler = (
|
||||
e: Electron.IpcMainInvokeEvent,
|
||||
...args: any[]
|
||||
) => Promise<any>;
|
||||
|
||||
export type NamespaceHandlers = {
|
||||
[key: string]: IsomorphicHandler;
|
||||
};
|
||||
@@ -1,10 +0,0 @@
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
|
||||
export const updaterHandlers = {
|
||||
updateClient: async () => {
|
||||
const { updateClient } = await import('./updater');
|
||||
return updateClient();
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
|
||||
export * from './updater';
|
||||
@@ -1,72 +0,0 @@
|
||||
import type { AppUpdater } from 'electron-updater';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { isMacOS } from '../../../../utils';
|
||||
import { updaterSubjects } from '../../events/updater';
|
||||
import { logger } from '../../logger';
|
||||
|
||||
export const ReleaseTypeSchema = z.enum([
|
||||
'stable',
|
||||
'beta',
|
||||
'canary',
|
||||
'internal',
|
||||
]);
|
||||
|
||||
export const envBuildType = (process.env.BUILD_TYPE || 'canary')
|
||||
.trim()
|
||||
.toLowerCase();
|
||||
export const buildType = ReleaseTypeSchema.parse(envBuildType);
|
||||
const mode = process.env.NODE_ENV;
|
||||
const isDev = mode === 'development';
|
||||
|
||||
let _autoUpdater: AppUpdater | null = null;
|
||||
|
||||
export const updateClient = async () => {
|
||||
_autoUpdater?.quitAndInstall();
|
||||
};
|
||||
|
||||
export const registerUpdater = async () => {
|
||||
// require it will cause some side effects and will break generate-main-exposed-meta,
|
||||
// so we wrap it in a function
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const { autoUpdater } = require('electron-updater');
|
||||
|
||||
_autoUpdater = autoUpdater;
|
||||
|
||||
if (!_autoUpdater) {
|
||||
return;
|
||||
}
|
||||
|
||||
_autoUpdater.autoDownload = false;
|
||||
_autoUpdater.allowPrerelease = buildType !== 'stable';
|
||||
_autoUpdater.autoInstallOnAppQuit = false;
|
||||
_autoUpdater.autoRunAppAfterInstall = true;
|
||||
_autoUpdater.setFeedURL({
|
||||
channel: buildType,
|
||||
provider: 'github',
|
||||
repo: buildType !== 'internal' ? 'AFFiNE' : 'AFFiNE-Releases',
|
||||
owner: 'toeverything',
|
||||
releaseType: buildType === 'stable' ? 'release' : 'prerelease',
|
||||
});
|
||||
|
||||
if (isMacOS()) {
|
||||
_autoUpdater.on('update-available', () => {
|
||||
_autoUpdater!.downloadUpdate();
|
||||
logger.info('Update available, downloading...');
|
||||
});
|
||||
_autoUpdater.on('download-progress', e => {
|
||||
logger.info(`Download progress: ${e.percent}`);
|
||||
});
|
||||
_autoUpdater.on('update-downloaded', e => {
|
||||
updaterSubjects.clientUpdateReady.next({
|
||||
version: e.version,
|
||||
});
|
||||
logger.info('Update downloaded, ready to install');
|
||||
});
|
||||
_autoUpdater.on('error', e => {
|
||||
logger.error('Error while updating client', e);
|
||||
});
|
||||
_autoUpdater.forceDevUpdateConfig = isDev;
|
||||
await _autoUpdater.checkForUpdatesAndNotify();
|
||||
}
|
||||
};
|
||||
@@ -1,8 +0,0 @@
|
||||
import { appContext } from '../../context';
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { deleteWorkspace, listWorkspaces } from './workspace';
|
||||
|
||||
export const workspaceHandlers = {
|
||||
list: async () => listWorkspaces(appContext),
|
||||
delete: async (_, id: string) => deleteWorkspace(appContext, id),
|
||||
} satisfies NamespaceHandlers;
|
||||
@@ -1,60 +0,0 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import type { AppContext } from '../../context';
|
||||
import { logger } from '../../logger';
|
||||
|
||||
interface WorkspaceMeta {
|
||||
path: string;
|
||||
realpath: string;
|
||||
}
|
||||
|
||||
export async function listWorkspaces(
|
||||
context: AppContext
|
||||
): Promise<[workspaceId: string, meta: WorkspaceMeta][]> {
|
||||
const basePath = path.join(context.appDataPath, 'workspaces');
|
||||
try {
|
||||
await fs.ensureDir(basePath);
|
||||
const dirs = await fs.readdir(basePath, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
|
||||
const meta = await Promise.all(
|
||||
dirs.map(async dir => {
|
||||
const dbFilePath = path.join(basePath, dir.name, 'storage.db');
|
||||
if (dir.isDirectory() && (await fs.exists(dbFilePath))) {
|
||||
// try read storage.db under it
|
||||
const realpath = await fs.realpath(dbFilePath);
|
||||
return [dir.name, { path: dbFilePath, realpath }] as [
|
||||
string,
|
||||
WorkspaceMeta
|
||||
];
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
return meta.filter((w): w is [string, WorkspaceMeta] => !!w);
|
||||
} catch (error) {
|
||||
logger.error('listWorkspaces', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteWorkspace(context: AppContext, id: string) {
|
||||
const basePath = path.join(context.appDataPath, 'workspaces', id);
|
||||
const movedPath = path.join(
|
||||
context.appDataPath,
|
||||
'delete-workspaces',
|
||||
`${id}`
|
||||
);
|
||||
try {
|
||||
return await fs.move(basePath, movedPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('deleteWorkspace', error);
|
||||
}
|
||||
}
|
||||
@@ -2,14 +2,15 @@ import './security-restrictions';
|
||||
|
||||
import { app } from 'electron';
|
||||
|
||||
import { createApplicationMenu } from './application-menu';
|
||||
import { createApplicationMenu } from './application-menu/create';
|
||||
import { registerEvents } from './events';
|
||||
import { registerHandlers } from './handlers';
|
||||
import { registerUpdater } from './handlers/updater';
|
||||
import { logger } from './logger';
|
||||
import { restoreOrCreateWindow } from './main-window';
|
||||
import { registerProtocol } from './protocol';
|
||||
import { registerUpdater } from './updater';
|
||||
|
||||
if (require('electron-squirrel-startup')) app.quit();
|
||||
// allow tests to overwrite app name through passing args
|
||||
if (process.argv.includes('--app-name')) {
|
||||
const appNameIndex = process.argv.indexOf('--app-name');
|
||||
@@ -61,14 +62,3 @@ app
|
||||
.then(createApplicationMenu)
|
||||
.then(registerUpdater)
|
||||
.catch(e => console.error('Failed create window:', e));
|
||||
/**
|
||||
* Check new app version in production mode only
|
||||
*/
|
||||
// FIXME: add me back later
|
||||
// if (import.meta.env.PROD) {
|
||||
// app
|
||||
// .whenReady()
|
||||
// .then(() => import('electron-updater'))
|
||||
// .then(({ autoUpdater }) => autoUpdater.checkForUpdatesAndNotify())
|
||||
// .catch(e => console.error('Failed check updates:', e));
|
||||
// }
|
||||
|
||||
@@ -7,7 +7,7 @@ export function getLogFilePath() {
|
||||
return log.transports.file.getFile().path;
|
||||
}
|
||||
|
||||
export function revealLogFile() {
|
||||
export async function revealLogFile() {
|
||||
const filePath = getLogFilePath();
|
||||
shell.showItemInFolder(filePath);
|
||||
return await shell.openPath(filePath);
|
||||
}
|
||||
|
||||
@@ -2,8 +2,9 @@ import { BrowserWindow, nativeTheme } from 'electron';
|
||||
import electronWindowState from 'electron-window-state';
|
||||
import { join } from 'path';
|
||||
|
||||
import { isMacOS, isWindows } from '../../utils';
|
||||
import { getExposedMeta } from './exposed';
|
||||
import { logger } from './logger';
|
||||
import { isMacOS, isWindows } from './utils';
|
||||
|
||||
const IS_DEV: boolean =
|
||||
process.env.NODE_ENV === 'development' && !process.env.CI;
|
||||
@@ -17,6 +18,8 @@ async function createWindow() {
|
||||
defaultHeight: 800,
|
||||
});
|
||||
|
||||
const exposedMeta = getExposedMeta();
|
||||
|
||||
const browserWindow = new BrowserWindow({
|
||||
titleBarStyle: isMacOS()
|
||||
? 'hiddenInset'
|
||||
@@ -40,6 +43,8 @@ async function createWindow() {
|
||||
webviewTag: false, // The webview tag is not recommended. Consider alternatives like iframe or Electron's BrowserView. https://www.electronjs.org/docs/latest/api/webview-tag#warning
|
||||
spellcheck: false, // FIXME: enable?
|
||||
preload: join(__dirname, '../preload/index.js'),
|
||||
// serialize exposed meta that to be used in preload
|
||||
additionalArguments: [`--exposed-meta=` + JSON.stringify(exposedMeta)],
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
18
apps/electron/layers/main/src/type.ts
Normal file
18
apps/electron/layers/main/src/type.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
export type MainEventListener = (...args: any[]) => () => void;
|
||||
|
||||
export type IsomorphicHandler = (
|
||||
e: Electron.IpcMainInvokeEvent,
|
||||
...args: any[]
|
||||
) => Promise<any>;
|
||||
|
||||
export type NamespaceHandlers = {
|
||||
[key: string]: IsomorphicHandler;
|
||||
};
|
||||
|
||||
export interface WorkspaceMeta {
|
||||
id: string;
|
||||
mainDBPath: string;
|
||||
secondaryDBPath?: string; // assume there will be only one
|
||||
}
|
||||
|
||||
export type YOrigin = 'self' | 'external' | 'upstream' | 'renderer';
|
||||
49
apps/electron/layers/main/src/ui/get-meta-data/get-html.ts
Normal file
49
apps/electron/layers/main/src/ui/get-meta-data/get-html.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { BrowserWindow } from 'electron';
|
||||
|
||||
import type { GetHTMLOptions } from './types';
|
||||
|
||||
async function getHTMLFromWindow(win: BrowserWindow): Promise<string> {
|
||||
return win.webContents
|
||||
.executeJavaScript(`document.documentElement.outerHTML;`)
|
||||
.then(html => html);
|
||||
}
|
||||
|
||||
// For normal web pages, obtaining html can be done directly,
|
||||
// but for some dynamic web pages, obtaining html should wait for the complete loading of web pages. shouldReGetHTML should be used to judge whether to obtain html again
|
||||
export async function getHTMLByURL(
|
||||
url: string,
|
||||
options: GetHTMLOptions
|
||||
): Promise<string> {
|
||||
return new Promise(resolve => {
|
||||
const { timeout = 10000, shouldReGetHTML } = options;
|
||||
const window = new BrowserWindow({
|
||||
show: false,
|
||||
});
|
||||
let html = '';
|
||||
window.loadURL(url);
|
||||
|
||||
const timer = setTimeout(() => {
|
||||
resolve(html);
|
||||
window.close();
|
||||
}, timeout);
|
||||
|
||||
async function loopHandle() {
|
||||
html = await getHTMLFromWindow(window);
|
||||
if (!shouldReGetHTML) {
|
||||
return html;
|
||||
}
|
||||
|
||||
if (await shouldReGetHTML(html)) {
|
||||
setTimeout(loopHandle, 1000);
|
||||
} else {
|
||||
window.close();
|
||||
clearTimeout(timer);
|
||||
resolve(html);
|
||||
}
|
||||
}
|
||||
|
||||
window.webContents.on('did-finish-load', async () => {
|
||||
loopHandle();
|
||||
});
|
||||
});
|
||||
}
|
||||
107
apps/electron/layers/main/src/ui/get-meta-data/index.ts
Normal file
107
apps/electron/layers/main/src/ui/get-meta-data/index.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
import type { CheerioAPI, Element } from 'cheerio';
|
||||
import { load } from 'cheerio';
|
||||
|
||||
import type { Context, MetaData, Options, RuleSet } from './types';
|
||||
|
||||
export * from './types';
|
||||
|
||||
import { getHTMLByURL } from './get-html';
|
||||
import { metaDataRules } from './rules';
|
||||
import type { GetMetaDataOptions } from './types';
|
||||
|
||||
function runRule(ruleSet: RuleSet, $: CheerioAPI, context: Context) {
|
||||
let maxScore = 0;
|
||||
let value;
|
||||
|
||||
for (let currRule = 0; currRule < ruleSet.rules.length; currRule++) {
|
||||
const [query, handler] = ruleSet.rules[currRule];
|
||||
const elements = Array.from($(query));
|
||||
|
||||
if (elements.length) {
|
||||
for (const element of elements) {
|
||||
let score = ruleSet.rules.length - currRule;
|
||||
|
||||
if (ruleSet.scorer) {
|
||||
const newScore = ruleSet.scorer(element as Element, score);
|
||||
|
||||
if (newScore) {
|
||||
score = newScore;
|
||||
}
|
||||
}
|
||||
|
||||
if (score > maxScore) {
|
||||
maxScore = score;
|
||||
value = handler(element as Element);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (value) {
|
||||
if (ruleSet.processor) {
|
||||
value = ruleSet.processor(value, context);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
if (ruleSet.defaultValue) {
|
||||
return ruleSet.defaultValue(context);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async function getMetaDataByHTML(
|
||||
html: string,
|
||||
url: string,
|
||||
options: GetMetaDataOptions
|
||||
) {
|
||||
const { customRules = {} } = options;
|
||||
const rules: Record<string, RuleSet> = { ...metaDataRules };
|
||||
Object.keys(customRules).forEach((key: string) => {
|
||||
rules[key] = {
|
||||
rules: [...metaDataRules[key].rules, ...customRules[key].rules],
|
||||
defaultValue:
|
||||
customRules[key].defaultValue || metaDataRules[key].defaultValue,
|
||||
processor: customRules[key].processor || metaDataRules[key].processor,
|
||||
};
|
||||
});
|
||||
|
||||
const metadata: MetaData = {};
|
||||
const context: Context = {
|
||||
url,
|
||||
...options,
|
||||
};
|
||||
|
||||
const $ = load(html);
|
||||
|
||||
Object.keys(rules).forEach((key: string) => {
|
||||
const ruleSet = rules[key];
|
||||
metadata[key] = runRule(ruleSet, $, context) || undefined;
|
||||
});
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
export async function getMetaData(url: string, options: Options = {}) {
|
||||
const { customRules, forceImageHttps, shouldReGetHTML, ...other } = options;
|
||||
const html = await getHTMLByURL(url, {
|
||||
...other,
|
||||
shouldReGetHTML: async html => {
|
||||
const meta = await getMetaDataByHTML(html, url, {
|
||||
customRules,
|
||||
forceImageHttps,
|
||||
});
|
||||
return shouldReGetHTML ? await shouldReGetHTML(meta) : false;
|
||||
},
|
||||
}).catch(() => {
|
||||
// TODO: report error
|
||||
return '';
|
||||
});
|
||||
|
||||
return await getMetaDataByHTML(html, url, {
|
||||
customRules,
|
||||
forceImageHttps,
|
||||
});
|
||||
}
|
||||
690
apps/electron/layers/main/src/ui/get-meta-data/rules.ts
Normal file
690
apps/electron/layers/main/src/ui/get-meta-data/rules.ts
Normal file
@@ -0,0 +1,690 @@
|
||||
import type { RuleSet } from './types';
|
||||
import { getProvider, makeUrlAbsolute, makeUrlSecure, parseUrl } from './utils';
|
||||
|
||||
export const metaDataRules: Record<string, RuleSet> = {
|
||||
title: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:title"][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="twitter:title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['title', (element: any) => element.text],
|
||||
],
|
||||
},
|
||||
description: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="description" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="description" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="twitter:description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="summary" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="summary" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
},
|
||||
language: {
|
||||
rules: [
|
||||
['html[lang]', element => element.attribs['lang']],
|
||||
[
|
||||
'meta[property="language" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="language" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:locale"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:locale"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
processor: (language: any) => language.split('-')[0],
|
||||
},
|
||||
type: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:type"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:type"][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="parsely-type"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-type"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="medium"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="medium"][content]', element => element.attribs['content']],
|
||||
],
|
||||
},
|
||||
url: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:url"][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="al:web:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="al:web:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-link"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-link"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['a.amp-canurl', element => element.attribs['href']],
|
||||
['link[rel="canonical"][href]', element => element.attribs['href']],
|
||||
],
|
||||
defaultValue: context => context.url,
|
||||
processor: (url: any, context) => makeUrlAbsolute(context.url, url),
|
||||
},
|
||||
provider: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:site_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:site_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="publisher" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="publisher" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="application-name" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="application-name" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="al:android:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="al:android:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="al:iphone:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="al:iphone:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="al:ipad:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="al:ipad:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="al:ios:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="al:ios:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="twitter:app:name:iphone"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:app:name:iphone"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="twitter:app:name:ipad"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:app:name:ipad"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="twitter:app:name:googleplay"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:app:name:googleplay"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
defaultValue: context => getProvider(parseUrl(context.url)),
|
||||
},
|
||||
keywords: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="keywords" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="keywords" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-tags"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-tags"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.tags"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.tags"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="article:tag" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="article:tag" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="book:tag" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="book:tag" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="topic" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="topic" i][content]', element => element.attribs['content']],
|
||||
],
|
||||
processor: (keywords: any) =>
|
||||
keywords.split(',').map((keyword: string) => keyword.trim()),
|
||||
},
|
||||
section: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="article:section"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="article:section"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="category"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="category"][content]', element => element.attribs['content']],
|
||||
],
|
||||
},
|
||||
author: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="author" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="author" i][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="article:author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="article:author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="book:author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="book:author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['a[class*="author" i]', (element: any) => element.text],
|
||||
['[rel="author"]', (element: any) => element.text],
|
||||
[
|
||||
'meta[property="twitter:creator"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:creator"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="profile:username"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="profile:username"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
},
|
||||
published: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="article:published_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="article:published_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="published_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="published_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-pub-date"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-pub-date"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.date"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.date"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="date" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="date" i][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="release_date" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="release_date" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['time[datetime]', element => element.attribs['datetime']],
|
||||
['time[datetime][pubdate]', element => element.attribs['datetime']],
|
||||
],
|
||||
processor: (value: any) =>
|
||||
Date.parse(value.toString())
|
||||
? new Date(value.toString()).toISOString()
|
||||
: undefined,
|
||||
},
|
||||
modified: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:updated_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:updated_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="article:modified_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="article:modified_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="updated_time" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="updated_time" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="modified_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="modified_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="revised"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="revised"][content]', element => element.attribs['content']],
|
||||
],
|
||||
processor: (value: any) =>
|
||||
Date.parse(value.toString())
|
||||
? new Date(value.toString()).toISOString()
|
||||
: undefined,
|
||||
},
|
||||
robots: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="robots" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="robots" i][content]', element => element.attribs['content']],
|
||||
],
|
||||
processor: (keywords: any) =>
|
||||
keywords.split(',').map((keyword: string) => keyword.trim()),
|
||||
},
|
||||
copyright: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="copyright" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="copyright" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
},
|
||||
email: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="email" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="email" i][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="reply-to" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="reply-to" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
},
|
||||
twitter: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="twitter:site"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:site"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
},
|
||||
facebook: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="fb:pages"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="fb:pages"][content]', element => element.attribs['content']],
|
||||
],
|
||||
},
|
||||
image: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:image:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:image:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:image:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:image:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:image"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:image"][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="twitter:image"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:image"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="twitter:image:src"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:image:src"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="thumbnail"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="thumbnail"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-image-url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-image-url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.image.full"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.image.full"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
processor: (imageUrl: any, context) =>
|
||||
context.forceImageHttps === true
|
||||
? makeUrlSecure(makeUrlAbsolute(context.url, imageUrl))
|
||||
: makeUrlAbsolute(context.url, imageUrl),
|
||||
},
|
||||
icon: {
|
||||
rules: [
|
||||
[
|
||||
'link[rel="apple-touch-icon"][href]',
|
||||
element => element.attribs['href'],
|
||||
],
|
||||
[
|
||||
'link[rel="apple-touch-icon-precomposed"][href]',
|
||||
element => element.attribs['href'],
|
||||
],
|
||||
['link[rel="icon" i][href]', element => element.attribs['href']],
|
||||
['link[rel="fluid-icon"][href]', element => element.attribs['href']],
|
||||
['link[rel="shortcut icon"][href]', element => element.attribs['href']],
|
||||
['link[rel="Shortcut Icon"][href]', element => element.attribs['href']],
|
||||
['link[rel="mask-icon"][href]', element => element.attribs['href']],
|
||||
],
|
||||
scorer: element => {
|
||||
const sizes = element.attribs['sizes'];
|
||||
if (sizes) {
|
||||
const sizeMatches = sizes.match(/\d+/g);
|
||||
if (sizeMatches) {
|
||||
const parsed = parseInt(sizeMatches[0]);
|
||||
if (!isNaN(parsed)) {
|
||||
return parsed;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
defaultValue: context => makeUrlAbsolute(context.url, '/favicon.ico'),
|
||||
processor: (iconUrl, context) =>
|
||||
context.forceImageHttps === true
|
||||
? makeUrlSecure(makeUrlAbsolute(context.url, iconUrl))
|
||||
: makeUrlAbsolute(context.url, iconUrl),
|
||||
},
|
||||
video: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:video:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:video:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:video:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:video:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:video"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:video"][content]', element => element.attribs['content']],
|
||||
],
|
||||
processor: (imageUrl: any, context) =>
|
||||
context.forceImageHttps === true
|
||||
? makeUrlSecure(makeUrlAbsolute(context.url, imageUrl))
|
||||
: makeUrlAbsolute(context.url, imageUrl),
|
||||
},
|
||||
audio: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:audio:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:audio:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:audio:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:audio:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:audio"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:audio"][content]', element => element.attribs['content']],
|
||||
],
|
||||
processor: (imageUrl: any, context) =>
|
||||
context.forceImageHttps === true
|
||||
? makeUrlSecure(makeUrlAbsolute(context.url, imageUrl))
|
||||
: makeUrlAbsolute(context.url, imageUrl),
|
||||
},
|
||||
};
|
||||
43
apps/electron/layers/main/src/ui/get-meta-data/types.ts
Normal file
43
apps/electron/layers/main/src/ui/get-meta-data/types.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import type { Element } from 'cheerio';
|
||||
|
||||
export type MetaData = {
|
||||
title?: string;
|
||||
description?: string;
|
||||
icon?: string;
|
||||
image?: string;
|
||||
keywords?: string[];
|
||||
language?: string;
|
||||
type?: string;
|
||||
url?: string;
|
||||
provider?: string;
|
||||
|
||||
[x: string]: string | string[] | undefined;
|
||||
};
|
||||
|
||||
export type MetadataRule = [string, (el: Element) => string | null];
|
||||
|
||||
export type Context = {
|
||||
url: string;
|
||||
} & GetMetaDataOptions;
|
||||
|
||||
export type RuleSet = {
|
||||
rules: MetadataRule[];
|
||||
defaultValue?: (context: Context) => string | string[];
|
||||
scorer?: (el: Element, score: any) => any;
|
||||
processor?: (input: any, context: Context) => any;
|
||||
};
|
||||
|
||||
export type GetMetaDataOptions = {
|
||||
customRules?: Record<string, RuleSet>;
|
||||
forceImageHttps?: boolean;
|
||||
};
|
||||
|
||||
export type GetHTMLOptions = {
|
||||
timeout?: number;
|
||||
shouldReGetHTML?: (currentHTML: string) => boolean | Promise<boolean>;
|
||||
};
|
||||
|
||||
export type Options = {
|
||||
shouldReGetHTML?: (metaData: MetaData) => boolean | Promise<boolean>;
|
||||
} & GetMetaDataOptions &
|
||||
Omit<GetHTMLOptions, 'shouldReGetHTML'>;
|
||||
28
apps/electron/layers/main/src/ui/get-meta-data/utils.ts
Normal file
28
apps/electron/layers/main/src/ui/get-meta-data/utils.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import urlparse from 'url';
|
||||
|
||||
export function makeUrlAbsolute(base: string, relative: string): string {
|
||||
const relativeParsed = urlparse.parse(relative);
|
||||
|
||||
if (relativeParsed.host === null) {
|
||||
return urlparse.resolve(base, relative);
|
||||
}
|
||||
|
||||
return relative;
|
||||
}
|
||||
|
||||
export function makeUrlSecure(url: string): string {
|
||||
return url.replace(/^http:/, 'https:');
|
||||
}
|
||||
|
||||
export function parseUrl(url: string): string {
|
||||
return urlparse.parse(url).hostname || '';
|
||||
}
|
||||
|
||||
export function getProvider(host: string): string {
|
||||
return host
|
||||
.replace(/www[a-zA-Z0-9]*\./, '')
|
||||
.replace('.co.', '.')
|
||||
.split('.')
|
||||
.slice(0, -1)
|
||||
.join(' ');
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
import { app, BrowserWindow, shell } from 'electron';
|
||||
import { parse } from 'url';
|
||||
|
||||
import { logger } from '../../logger';
|
||||
import { logger } from '../logger';
|
||||
|
||||
const redirectUri = 'https://affine.pro/client/auth-callback';
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { app, BrowserWindow, nativeTheme } from 'electron';
|
||||
|
||||
import { isMacOS } from '../../../../utils';
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { isMacOS } from '../utils';
|
||||
import { getMetaData } from './get-meta-data';
|
||||
import { getGoogleOauthCode } from './google-auth';
|
||||
|
||||
export const uiHandlers = {
|
||||
@@ -39,4 +40,11 @@ export const uiHandlers = {
|
||||
getGoogleOauthCode: async () => {
|
||||
return getGoogleOauthCode();
|
||||
},
|
||||
getBookmarkDataByLink: async (_, url: string) => {
|
||||
return getMetaData(url, {
|
||||
shouldReGetHTML: metaData => {
|
||||
return !metaData.title && !metaData.description;
|
||||
},
|
||||
});
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
99
apps/electron/layers/main/src/updater/electron-updater.ts
Normal file
99
apps/electron/layers/main/src/updater/electron-updater.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
import { app } from 'electron';
|
||||
import type { AppUpdater } from 'electron-updater';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { logger } from '../logger';
|
||||
import { isMacOS } from '../utils';
|
||||
import { updaterSubjects } from './event';
|
||||
|
||||
export const ReleaseTypeSchema = z.enum([
|
||||
'stable',
|
||||
'beta',
|
||||
'canary',
|
||||
'internal',
|
||||
]);
|
||||
|
||||
export const envBuildType = (process.env.BUILD_TYPE || 'canary')
|
||||
.trim()
|
||||
.toLowerCase();
|
||||
export const buildType = ReleaseTypeSchema.parse(envBuildType);
|
||||
const mode = process.env.NODE_ENV;
|
||||
const isDev = mode === 'development';
|
||||
|
||||
let _autoUpdater: AppUpdater | null = null;
|
||||
|
||||
export const quitAndInstall = async () => {
|
||||
_autoUpdater?.quitAndInstall();
|
||||
};
|
||||
|
||||
let lastCheckTime = 0;
|
||||
export const checkForUpdatesAndNotify = async (force = true) => {
|
||||
if (!_autoUpdater) {
|
||||
return; // ?
|
||||
}
|
||||
// check every 30 minutes (1800 seconds) at most
|
||||
if (force || lastCheckTime + 1000 * 1800 < Date.now()) {
|
||||
lastCheckTime = Date.now();
|
||||
return await _autoUpdater.checkForUpdatesAndNotify();
|
||||
}
|
||||
};
|
||||
|
||||
export const registerUpdater = async () => {
|
||||
// so we wrap it in a function
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const { autoUpdater } = require('electron-updater');
|
||||
|
||||
_autoUpdater = autoUpdater;
|
||||
|
||||
if (!_autoUpdater) {
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: support auto update on windows and linux
|
||||
const allowAutoUpdate = isMacOS();
|
||||
|
||||
_autoUpdater.autoDownload = false;
|
||||
_autoUpdater.allowPrerelease = buildType !== 'stable';
|
||||
_autoUpdater.autoInstallOnAppQuit = false;
|
||||
_autoUpdater.autoRunAppAfterInstall = true;
|
||||
_autoUpdater.setFeedURL({
|
||||
channel: buildType,
|
||||
provider: 'github',
|
||||
repo: buildType !== 'internal' ? 'AFFiNE' : 'AFFiNE-Releases',
|
||||
owner: 'toeverything',
|
||||
releaseType: buildType === 'stable' ? 'release' : 'prerelease',
|
||||
});
|
||||
|
||||
// register events for checkForUpdatesAndNotify
|
||||
_autoUpdater.on('update-available', info => {
|
||||
if (allowAutoUpdate) {
|
||||
_autoUpdater!.downloadUpdate();
|
||||
logger.info('Update available, downloading...', info);
|
||||
}
|
||||
updaterSubjects.updateAvailable.next({
|
||||
version: info.version,
|
||||
allowAutoUpdate,
|
||||
});
|
||||
});
|
||||
_autoUpdater.on('download-progress', e => {
|
||||
logger.info(`Download progress: ${e.percent}`);
|
||||
updaterSubjects.downloadProgress.next(e.percent);
|
||||
});
|
||||
_autoUpdater.on('update-downloaded', e => {
|
||||
updaterSubjects.updateReady.next({
|
||||
version: e.version,
|
||||
allowAutoUpdate,
|
||||
});
|
||||
// I guess we can skip it?
|
||||
// updaterSubjects.clientDownloadProgress.next(100);
|
||||
logger.info('Update downloaded, ready to install');
|
||||
});
|
||||
_autoUpdater.on('error', e => {
|
||||
logger.error('Error while updating client', e);
|
||||
});
|
||||
_autoUpdater.forceDevUpdateConfig = isDev;
|
||||
|
||||
app.on('activate', async () => {
|
||||
await checkForUpdatesAndNotify(false);
|
||||
});
|
||||
};
|
||||
36
apps/electron/layers/main/src/updater/event.ts
Normal file
36
apps/electron/layers/main/src/updater/event.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { BehaviorSubject, Subject } from 'rxjs';
|
||||
|
||||
import type { MainEventListener } from '../type';
|
||||
|
||||
export interface UpdateMeta {
|
||||
version: string;
|
||||
allowAutoUpdate: boolean;
|
||||
}
|
||||
|
||||
export const updaterSubjects = {
|
||||
// means it is ready for restart and install the new version
|
||||
updateAvailable: new Subject<UpdateMeta>(),
|
||||
updateReady: new Subject<UpdateMeta>(),
|
||||
downloadProgress: new BehaviorSubject<number>(0),
|
||||
};
|
||||
|
||||
export const updaterEvents = {
|
||||
onUpdateAvailable: (fn: (versionMeta: UpdateMeta) => void) => {
|
||||
const sub = updaterSubjects.updateAvailable.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
onUpdateReady: (fn: (versionMeta: UpdateMeta) => void) => {
|
||||
const sub = updaterSubjects.updateReady.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
onDownloadProgress: (fn: (progress: number) => void) => {
|
||||
const sub = updaterSubjects.downloadProgress.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
18
apps/electron/layers/main/src/updater/index.ts
Normal file
18
apps/electron/layers/main/src/updater/index.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { app } from 'electron';
|
||||
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { checkForUpdatesAndNotify, quitAndInstall } from './electron-updater';
|
||||
|
||||
export const updaterHandlers = {
|
||||
currentVersion: async () => {
|
||||
return app.getVersion();
|
||||
},
|
||||
quitAndInstall: async () => {
|
||||
return quitAndInstall();
|
||||
},
|
||||
checkForUpdatesAndNotify: async () => {
|
||||
return checkForUpdatesAndNotify(true);
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
|
||||
export * from './electron-updater';
|
||||
@@ -1,19 +1,11 @@
|
||||
export function debounce<T extends (...args: any[]) => void>(
|
||||
fn: T,
|
||||
delay: number
|
||||
) {
|
||||
let timeoutId: NodeJS.Timer | undefined;
|
||||
return (...args: Parameters<T>) => {
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
timeoutId = setTimeout(() => {
|
||||
fn(...args);
|
||||
timeoutId = undefined;
|
||||
}, delay);
|
||||
};
|
||||
}
|
||||
|
||||
export function ts() {
|
||||
export function getTime() {
|
||||
return new Date().getTime();
|
||||
}
|
||||
|
||||
export const isMacOS = () => {
|
||||
return process.platform === 'darwin';
|
||||
};
|
||||
|
||||
export const isWindows = () => {
|
||||
return process.platform === 'win32';
|
||||
};
|
||||
|
||||
35
apps/electron/layers/main/src/workers/index.ts
Normal file
35
apps/electron/layers/main/src/workers/index.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import path from 'node:path';
|
||||
import { Worker } from 'node:worker_threads';
|
||||
|
||||
import { mergeUpdate } from './merge-update';
|
||||
|
||||
export function mergeUpdateWorker(updates: Uint8Array[]) {
|
||||
// fallback to main thread if worker is disabled (in vitest)
|
||||
if (process.env.USE_WORKER !== 'true') {
|
||||
return mergeUpdate(updates);
|
||||
}
|
||||
return new Promise<Uint8Array>((resolve, reject) => {
|
||||
// it is intended to have "./workers" in the path
|
||||
const workerFile = path.join(__dirname, './workers/merge-update.worker.js');
|
||||
|
||||
// convert updates to SharedArrayBuffer[s]
|
||||
const sharedArrayBufferUpdates = updates.map(update => {
|
||||
const buffer = new SharedArrayBuffer(update.byteLength);
|
||||
const view = new Uint8Array(buffer);
|
||||
view.set(update);
|
||||
return view;
|
||||
});
|
||||
|
||||
const worker = new Worker(workerFile, {
|
||||
workerData: sharedArrayBufferUpdates,
|
||||
});
|
||||
|
||||
worker.on('message', resolve);
|
||||
worker.on('error', reject);
|
||||
worker.on('exit', code => {
|
||||
if (code !== 0) {
|
||||
reject(new Error(`Worker stopped with exit code ${code}`));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
11
apps/electron/layers/main/src/workers/merge-update.ts
Normal file
11
apps/electron/layers/main/src/workers/merge-update.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import * as Y from 'yjs';
|
||||
|
||||
export function mergeUpdate(updates: Uint8Array[]) {
|
||||
const yDoc = new Y.Doc();
|
||||
Y.transact(yDoc, () => {
|
||||
for (const update of updates) {
|
||||
Y.applyUpdate(yDoc, update);
|
||||
}
|
||||
});
|
||||
return Y.encodeStateAsUpdate(yDoc);
|
||||
}
|
||||
14
apps/electron/layers/main/src/workers/merge-update.worker.ts
Normal file
14
apps/electron/layers/main/src/workers/merge-update.worker.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { parentPort, workerData } from 'node:worker_threads';
|
||||
|
||||
import { mergeUpdate } from './merge-update';
|
||||
|
||||
function getMergeUpdate(updates: Uint8Array[]) {
|
||||
const update = mergeUpdate(updates);
|
||||
const buffer = new SharedArrayBuffer(update.byteLength);
|
||||
const view = new Uint8Array(buffer);
|
||||
view.set(update);
|
||||
|
||||
return update;
|
||||
}
|
||||
|
||||
parentPort?.postMessage(getMergeUpdate(workerData));
|
||||
1
apps/electron/layers/main/src/workspace/__tests__/.gitignore
vendored
Normal file
1
apps/electron/layers/main/src/workspace/__tests__/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
tmp
|
||||
@@ -0,0 +1,208 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, describe, expect, test, vi } from 'vitest';
|
||||
|
||||
import type { AppContext } from '../../context';
|
||||
|
||||
const tmpDir = path.join(__dirname, 'tmp');
|
||||
|
||||
const testAppContext: AppContext = {
|
||||
appDataPath: path.join(tmpDir, 'test-data'),
|
||||
appName: 'test',
|
||||
};
|
||||
|
||||
vi.doMock('../../context', () => ({
|
||||
appContext: testAppContext,
|
||||
}));
|
||||
|
||||
vi.doMock('../../db/ensure-db', () => ({
|
||||
ensureSQLiteDB: async () => ({
|
||||
destroy: () => {},
|
||||
}),
|
||||
}));
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.remove(tmpDir);
|
||||
});
|
||||
|
||||
describe('list workspaces', () => {
|
||||
test('listWorkspaces (valid)', async () => {
|
||||
const { listWorkspaces } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
const meta = {
|
||||
id: workspaceId,
|
||||
};
|
||||
await fs.ensureDir(workspacePath);
|
||||
await fs.writeJSON(path.join(workspacePath, 'meta.json'), meta);
|
||||
const workspaces = await listWorkspaces(testAppContext);
|
||||
expect(workspaces).toEqual([[workspaceId, meta]]);
|
||||
});
|
||||
|
||||
test('listWorkspaces (without meta json file)', async () => {
|
||||
const { listWorkspaces } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
await fs.ensureDir(workspacePath);
|
||||
const workspaces = await listWorkspaces(testAppContext);
|
||||
expect(workspaces).toEqual([
|
||||
[
|
||||
workspaceId,
|
||||
// meta file will be created automatically
|
||||
{ id: workspaceId, mainDBPath: path.join(workspacePath, 'storage.db') },
|
||||
],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete workspace', () => {
|
||||
test('deleteWorkspace', async () => {
|
||||
const { deleteWorkspace } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
await fs.ensureDir(workspacePath);
|
||||
await deleteWorkspace(testAppContext, workspaceId);
|
||||
expect(await fs.pathExists(workspacePath)).toBe(false);
|
||||
// removed workspace will be moved to delete-workspaces
|
||||
expect(
|
||||
await fs.pathExists(
|
||||
path.join(testAppContext.appDataPath, 'delete-workspaces', workspaceId)
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getWorkspaceMeta', () => {
|
||||
test('can get meta', async () => {
|
||||
const { getWorkspaceMeta } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
const meta = {
|
||||
id: workspaceId,
|
||||
};
|
||||
await fs.ensureDir(workspacePath);
|
||||
await fs.writeJSON(path.join(workspacePath, 'meta.json'), meta);
|
||||
expect(await getWorkspaceMeta(testAppContext, workspaceId)).toEqual(meta);
|
||||
});
|
||||
|
||||
test('can create meta if not exists', async () => {
|
||||
const { getWorkspaceMeta } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
await fs.ensureDir(workspacePath);
|
||||
expect(await getWorkspaceMeta(testAppContext, workspaceId)).toEqual({
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
});
|
||||
expect(
|
||||
await fs.pathExists(path.join(workspacePath, 'meta.json'))
|
||||
).toBeTruthy();
|
||||
});
|
||||
|
||||
test('can migrate meta if db file is a link', async () => {
|
||||
const { getWorkspaceMeta } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
await fs.ensureDir(workspacePath);
|
||||
const sourcePath = path.join(tmpDir, 'source.db');
|
||||
await fs.writeFile(sourcePath, 'test');
|
||||
|
||||
await fs.ensureSymlink(sourcePath, path.join(workspacePath, 'storage.db'));
|
||||
|
||||
expect(await getWorkspaceMeta(testAppContext, workspaceId)).toEqual({
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
secondaryDBPath: sourcePath,
|
||||
});
|
||||
|
||||
expect(
|
||||
await fs.pathExists(path.join(workspacePath, 'meta.json'))
|
||||
).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
test('storeWorkspaceMeta', async () => {
|
||||
const { storeWorkspaceMeta } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
await fs.ensureDir(workspacePath);
|
||||
const meta = {
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
};
|
||||
await storeWorkspaceMeta(testAppContext, workspaceId, meta);
|
||||
expect(await fs.readJSON(path.join(workspacePath, 'meta.json'))).toEqual(
|
||||
meta
|
||||
);
|
||||
await storeWorkspaceMeta(testAppContext, workspaceId, {
|
||||
secondaryDBPath: path.join(tmpDir, 'test.db'),
|
||||
});
|
||||
expect(await fs.readJSON(path.join(workspacePath, 'meta.json'))).toEqual({
|
||||
...meta,
|
||||
secondaryDBPath: path.join(tmpDir, 'test.db'),
|
||||
});
|
||||
});
|
||||
|
||||
test('getWorkspaceMeta observable', async () => {
|
||||
const { storeWorkspaceMeta } = await import('../handlers');
|
||||
const { getWorkspaceMeta$ } = await import('../index');
|
||||
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
|
||||
const metaChange = vi.fn();
|
||||
|
||||
const meta$ = getWorkspaceMeta$(workspaceId);
|
||||
|
||||
meta$.subscribe(metaChange);
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
expect(metaChange).toHaveBeenCalledWith({
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
});
|
||||
|
||||
await storeWorkspaceMeta(testAppContext, workspaceId, {
|
||||
secondaryDBPath: path.join(tmpDir, 'test.db'),
|
||||
});
|
||||
|
||||
expect(metaChange).toHaveBeenCalledWith({
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
secondaryDBPath: path.join(tmpDir, 'test.db'),
|
||||
});
|
||||
});
|
||||
135
apps/electron/layers/main/src/workspace/handlers.ts
Normal file
135
apps/electron/layers/main/src/workspace/handlers.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import { type AppContext } from '../context';
|
||||
import { ensureSQLiteDB } from '../db/ensure-db';
|
||||
import { logger } from '../logger';
|
||||
import type { WorkspaceMeta } from '../type';
|
||||
import { workspaceSubjects } from './subjects';
|
||||
|
||||
export async function listWorkspaces(
|
||||
context: AppContext
|
||||
): Promise<[workspaceId: string, meta: WorkspaceMeta][]> {
|
||||
const basePath = getWorkspacesBasePath(context);
|
||||
try {
|
||||
await fs.ensureDir(basePath);
|
||||
const dirs = await fs.readdir(basePath, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
const metaList = (
|
||||
await Promise.all(
|
||||
dirs.map(async dir => {
|
||||
// ? shall we put all meta in a single file instead of one file per workspace?
|
||||
return await getWorkspaceMeta(context, dir.name);
|
||||
})
|
||||
)
|
||||
).filter((w): w is WorkspaceMeta => !!w);
|
||||
return metaList.map(meta => [meta.id, meta]);
|
||||
} catch (error) {
|
||||
logger.error('listWorkspaces', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteWorkspace(context: AppContext, id: string) {
|
||||
const basePath = getWorkspaceBasePath(context, id);
|
||||
const movedPath = path.join(
|
||||
context.appDataPath,
|
||||
'delete-workspaces',
|
||||
`${id}`
|
||||
);
|
||||
try {
|
||||
const db = await ensureSQLiteDB(id);
|
||||
db.destroy();
|
||||
return await fs.move(basePath, movedPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('deleteWorkspace', error);
|
||||
}
|
||||
}
|
||||
|
||||
export function getWorkspacesBasePath(context: AppContext) {
|
||||
return path.join(context.appDataPath, 'workspaces');
|
||||
}
|
||||
|
||||
export function getWorkspaceBasePath(context: AppContext, workspaceId: string) {
|
||||
return path.join(context.appDataPath, 'workspaces', workspaceId);
|
||||
}
|
||||
|
||||
export function getWorkspaceDBPath(context: AppContext, workspaceId: string) {
|
||||
const basePath = getWorkspaceBasePath(context, workspaceId);
|
||||
return path.join(basePath, 'storage.db');
|
||||
}
|
||||
|
||||
export function getWorkspaceMetaPath(context: AppContext, workspaceId: string) {
|
||||
const basePath = getWorkspaceBasePath(context, workspaceId);
|
||||
return path.join(basePath, 'meta.json');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get workspace meta, create one if not exists
|
||||
* This function will also migrate the workspace if needed
|
||||
*/
|
||||
export async function getWorkspaceMeta(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
): Promise<WorkspaceMeta> {
|
||||
try {
|
||||
const basePath = getWorkspaceBasePath(context, workspaceId);
|
||||
const metaPath = getWorkspaceMetaPath(context, workspaceId);
|
||||
if (!(await fs.exists(metaPath))) {
|
||||
// since not meta is found, we will migrate symlinked db file if needed
|
||||
await fs.ensureDir(basePath);
|
||||
const dbPath = getWorkspaceDBPath(context, workspaceId);
|
||||
|
||||
// todo: remove this after migration (in stable version)
|
||||
const realDBPath = (await fs.exists(dbPath))
|
||||
? await fs.realpath(dbPath)
|
||||
: dbPath;
|
||||
const isLink = realDBPath !== dbPath;
|
||||
if (isLink) {
|
||||
await fs.copy(realDBPath, dbPath);
|
||||
}
|
||||
// create one if not exists
|
||||
const meta = {
|
||||
id: workspaceId,
|
||||
mainDBPath: dbPath,
|
||||
secondaryDBPath: isLink ? realDBPath : undefined,
|
||||
};
|
||||
await fs.writeJSON(metaPath, meta);
|
||||
return meta;
|
||||
} else {
|
||||
const meta = await fs.readJSON(metaPath);
|
||||
return meta;
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('getWorkspaceMeta failed', err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
export async function storeWorkspaceMeta(
|
||||
context: AppContext,
|
||||
workspaceId: string,
|
||||
meta: Partial<WorkspaceMeta>
|
||||
) {
|
||||
try {
|
||||
const basePath = getWorkspaceBasePath(context, workspaceId);
|
||||
await fs.ensureDir(basePath);
|
||||
const metaPath = path.join(basePath, 'meta.json');
|
||||
const currentMeta = await getWorkspaceMeta(context, workspaceId);
|
||||
const newMeta = {
|
||||
...currentMeta,
|
||||
...meta,
|
||||
};
|
||||
await fs.writeJSON(metaPath, newMeta);
|
||||
workspaceSubjects.meta.next({
|
||||
workspaceId,
|
||||
meta: newMeta,
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error('storeWorkspaceMeta failed', err);
|
||||
}
|
||||
}
|
||||
44
apps/electron/layers/main/src/workspace/index.ts
Normal file
44
apps/electron/layers/main/src/workspace/index.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { merge } from 'rxjs';
|
||||
import { filter, map } from 'rxjs/operators';
|
||||
|
||||
import { appContext } from '../context';
|
||||
import type {
|
||||
MainEventListener,
|
||||
NamespaceHandlers,
|
||||
WorkspaceMeta,
|
||||
} from '../type';
|
||||
import { deleteWorkspace, getWorkspaceMeta, listWorkspaces } from './handlers';
|
||||
import { workspaceSubjects } from './subjects';
|
||||
|
||||
export * from './handlers';
|
||||
export * from './subjects';
|
||||
|
||||
export const workspaceEvents = {
|
||||
onMetaChange: (
|
||||
fn: (meta: { workspaceId: string; meta: WorkspaceMeta }) => void
|
||||
) => {
|
||||
const sub = workspaceSubjects.meta.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
|
||||
export const workspaceHandlers = {
|
||||
list: async () => listWorkspaces(appContext),
|
||||
delete: async (_, id: string) => deleteWorkspace(appContext, id),
|
||||
getMeta: async (_, id: string) => {
|
||||
return getWorkspaceMeta(appContext, id);
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
|
||||
// used internally. Get a stream of workspace id -> meta
|
||||
export const getWorkspaceMeta$ = (workspaceId: string) => {
|
||||
return merge(
|
||||
getWorkspaceMeta(appContext, workspaceId),
|
||||
workspaceSubjects.meta.pipe(
|
||||
map(meta => meta.meta),
|
||||
filter(meta => meta.id === workspaceId)
|
||||
)
|
||||
);
|
||||
};
|
||||
7
apps/electron/layers/main/src/workspace/subjects.ts
Normal file
7
apps/electron/layers/main/src/workspace/subjects.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
import type { WorkspaceMeta } from '../type';
|
||||
|
||||
export const workspaceSubjects = {
|
||||
meta: new Subject<{ workspaceId: string; meta: WorkspaceMeta }>(),
|
||||
};
|
||||
7
apps/electron/layers/preload/preload.d.ts
vendored
7
apps/electron/layers/preload/preload.d.ts
vendored
@@ -1,7 +1,6 @@
|
||||
/* eslint-disable @typescript-eslint/consistent-type-imports */
|
||||
|
||||
interface Window {
|
||||
apis?: typeof import('./src/affine-apis').apis;
|
||||
events?: typeof import('./src/affine-apis').events;
|
||||
appInfo?: typeof import('./src/affine-apis').appInfo;
|
||||
declare interface Window {
|
||||
apis: import('./src/affine-apis').PreloadHandlers;
|
||||
events: import('./src/affine-apis').MainIPCEventMap;
|
||||
}
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
// NOTE: we will generate preload types from this file
|
||||
|
||||
// NOTE: we will generate preload types from this file
|
||||
import { ipcRenderer } from 'electron';
|
||||
|
||||
import type { MainIPCEventMap, MainIPCHandlerMap } from '../../constraints';
|
||||
// eslint-disable-next-line @typescript-eslint/no-restricted-imports
|
||||
import type {
|
||||
MainIPCEventMap,
|
||||
MainIPCHandlerMap,
|
||||
} from '../../main/src/exposed';
|
||||
|
||||
type WithoutFirstParameter<T> = T extends (_: any, ...args: infer P) => infer R
|
||||
? (...args: P) => R
|
||||
@@ -15,7 +19,7 @@ type HandlersMap<N extends keyof MainIPCHandlerMap> = {
|
||||
>;
|
||||
};
|
||||
|
||||
type PreloadHandlers = {
|
||||
export type PreloadHandlers = {
|
||||
[N in keyof MainIPCHandlerMap]: HandlersMap<N>;
|
||||
};
|
||||
|
||||
@@ -24,17 +28,17 @@ type MainExposedMeta = {
|
||||
events: [namespace: string, eventNames: string[]][];
|
||||
};
|
||||
|
||||
const meta: MainExposedMeta = (() => {
|
||||
const val = process.argv
|
||||
.find(arg => arg.startsWith('--exposed-meta='))
|
||||
?.split('=')[1];
|
||||
|
||||
return val ? JSON.parse(val) : null;
|
||||
})();
|
||||
|
||||
// main handlers that can be invoked from the renderer process
|
||||
const apis: PreloadHandlers = (() => {
|
||||
// the following were generated by the build script
|
||||
// 1. bundle extra main/src/expose.ts entry
|
||||
// 2. use generate-main-exposed-meta.mjs to generate exposed-meta.js in dist
|
||||
//
|
||||
// we cannot directly import main/src/handlers.ts because it will be bundled into the preload bundle
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const {
|
||||
handlers: handlersMeta,
|
||||
}: MainExposedMeta = require('../main/exposed-meta');
|
||||
const { handlers: handlersMeta } = meta;
|
||||
|
||||
const all = handlersMeta.map(([namespace, functionNames]) => {
|
||||
const namespaceApis = functionNames.map(name => {
|
||||
@@ -54,9 +58,7 @@ const apis: PreloadHandlers = (() => {
|
||||
|
||||
// main events that can be listened to from the renderer process
|
||||
const events: MainIPCEventMap = (() => {
|
||||
const {
|
||||
events: eventsMeta,
|
||||
}: MainExposedMeta = require('../main/exposed-meta');
|
||||
const { events: eventsMeta } = meta;
|
||||
|
||||
// NOTE: ui may try to listen to a lot of the same events, so we increase the limit...
|
||||
ipcRenderer.setMaxListeners(100);
|
||||
@@ -90,3 +92,6 @@ const appInfo = {
|
||||
};
|
||||
|
||||
export { apis, appInfo, events };
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-restricted-imports
|
||||
export type { MainIPCEventMap } from '../../main/src/exposed';
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
export const isMacOS = () => {
|
||||
return process.platform === 'darwin';
|
||||
};
|
||||
|
||||
export const isWindows = () => {
|
||||
return process.platform === 'win32';
|
||||
};
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@affine/electron",
|
||||
"private": true,
|
||||
"version": "0.6.0-canary.2",
|
||||
"version": "0.7.0-canary.8",
|
||||
"author": "affine",
|
||||
"repository": {
|
||||
"url": "https://github.com/toeverything/AFFiNE",
|
||||
@@ -15,13 +15,8 @@
|
||||
"prod": "yarn electron-rebuild && yarn node scripts/dev.mjs",
|
||||
"build-layers": "zx scripts/build-layers.mjs",
|
||||
"generate-assets": "zx scripts/generate-assets.mjs",
|
||||
"generate-main-exposed-meta": "zx scripts/generate-main-exposed-meta.mjs",
|
||||
"package": "electron-forge package",
|
||||
"make": "electron-forge make",
|
||||
"make-macos-arm64": "electron-forge make --platform=darwin --arch=arm64",
|
||||
"make-macos-x64": "electron-forge make --platform=darwin --arch=x64",
|
||||
"make-windows-x64": "electron-forge make --platform=win32 --arch=x64",
|
||||
"make-linux-x64": "electron-forge make --platform=linux --arch=x64",
|
||||
"rebuild:for-unit-test": "yarn rebuild better-sqlite3",
|
||||
"rebuild:for-electron": "yarn electron-rebuild",
|
||||
"test": "playwright test"
|
||||
@@ -32,6 +27,7 @@
|
||||
"main": "./dist/layers/main/index.js",
|
||||
"devDependencies": {
|
||||
"@affine-test/kit": "workspace:*",
|
||||
"@affine/native": "workspace:*",
|
||||
"@electron-forge/cli": "^6.1.1",
|
||||
"@electron-forge/core": "^6.1.1",
|
||||
"@electron-forge/core-utils": "^6.1.1",
|
||||
@@ -44,22 +40,26 @@
|
||||
"@electron/remote": "2.0.9",
|
||||
"@types/better-sqlite3": "^7.6.4",
|
||||
"@types/fs-extra": "^11.0.1",
|
||||
"@types/uuid": "^9.0.1",
|
||||
"cross-env": "7.0.3",
|
||||
"electron": "24.3.0",
|
||||
"electron-log": "^5.0.0-beta.23",
|
||||
"electron": "25.0.0",
|
||||
"electron-log": "^5.0.0-beta.24",
|
||||
"electron-squirrel-startup": "1.0.0",
|
||||
"electron-window-state": "^5.0.3",
|
||||
"esbuild": "^0.17.19",
|
||||
"fs-extra": "^11.1.1",
|
||||
"playwright": "^1.33.0",
|
||||
"playwright": "=1.33.0",
|
||||
"ts-node": "^10.9.1",
|
||||
"undici": "^5.22.1",
|
||||
"uuid": "^9.0.0",
|
||||
"zx": "^7.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"better-sqlite3": "^8.3.0",
|
||||
"better-sqlite3": "^8.4.0",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"chokidar": "^3.5.3",
|
||||
"electron-updater": "^5.3.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"nanoid": "^4.0.2",
|
||||
"rxjs": "^7.8.1",
|
||||
"yjs": "^13.6.1"
|
||||
|
||||
BIN
apps/electron/resources/icons/affine_installing.gif
Normal file
BIN
apps/electron/resources/icons/affine_installing.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 2.1 MiB |
@@ -8,6 +8,11 @@ import { config } from './common.mjs';
|
||||
const NODE_ENV =
|
||||
process.env.NODE_ENV === 'development' ? 'development' : 'production';
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
$.shell = true;
|
||||
$.prefix = '';
|
||||
}
|
||||
|
||||
async function buildLayers() {
|
||||
const common = config();
|
||||
await esbuild.build(common.preload);
|
||||
@@ -20,8 +25,6 @@ async function buildLayers() {
|
||||
'process.env.BUILD_TYPE': `"${process.env.BUILD_TYPE || 'stable'}"`,
|
||||
},
|
||||
});
|
||||
|
||||
await $`yarn workspace @affine/electron generate-main-exposed-meta`;
|
||||
}
|
||||
|
||||
await buildLayers();
|
||||
|
||||
@@ -12,16 +12,6 @@ const DEV_SERVER_URL = process.env.DEV_SERVER_URL;
|
||||
/** @type 'production' | 'development'' */
|
||||
const mode = (process.env.NODE_ENV = process.env.NODE_ENV || 'development');
|
||||
|
||||
const nativeNodeModulesPlugin = {
|
||||
name: 'native-node-modules',
|
||||
setup(build) {
|
||||
// Mark native Node.js modules as external
|
||||
build.onResolve({ filter: /\.node$/, namespace: 'file' }, args => {
|
||||
return { path: args.path, external: true };
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
// List of env that will be replaced by esbuild
|
||||
const ENV_MACROS = ['AFFINE_GOOGLE_CLIENT_ID', 'AFFINE_GOOGLE_CLIENT_SECRET'];
|
||||
|
||||
@@ -33,6 +23,7 @@ export const config = () => {
|
||||
JSON.stringify(process.env[key] ?? ''),
|
||||
]),
|
||||
['process.env.NODE_ENV', `"${mode}"`],
|
||||
['process.env.USE_WORKER', '"true"'],
|
||||
]);
|
||||
|
||||
if (DEV_SERVER_URL) {
|
||||
@@ -43,16 +34,20 @@ export const config = () => {
|
||||
main: {
|
||||
entryPoints: [
|
||||
resolve(root, './layers/main/src/index.ts'),
|
||||
resolve(root, './layers/main/src/exposed.ts'),
|
||||
resolve(root, './layers/main/src/workers/merge-update.worker.ts'),
|
||||
],
|
||||
outdir: resolve(root, './dist/layers/main'),
|
||||
bundle: true,
|
||||
target: `node${NODE_MAJOR_VERSION}`,
|
||||
platform: 'node',
|
||||
external: ['electron', 'yjs', 'better-sqlite3', 'electron-updater'],
|
||||
plugins: [nativeNodeModulesPlugin],
|
||||
define: define,
|
||||
format: 'cjs',
|
||||
loader: {
|
||||
'.node': 'copy',
|
||||
},
|
||||
assetNames: '[name]',
|
||||
treeShaking: true,
|
||||
},
|
||||
preload: {
|
||||
entryPoints: [resolve(root, './layers/preload/src/index.ts')],
|
||||
@@ -60,8 +55,7 @@ export const config = () => {
|
||||
bundle: true,
|
||||
target: `node${NODE_MAJOR_VERSION}`,
|
||||
platform: 'node',
|
||||
external: ['electron', '../main/exposed-meta'],
|
||||
plugins: [nativeNodeModulesPlugin],
|
||||
external: ['electron'],
|
||||
define: define,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/* eslint-disable no-async-promise-executor */
|
||||
import { execSync, spawn } from 'node:child_process';
|
||||
import { spawn } from 'node:child_process';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
@@ -105,8 +105,6 @@ async function watchMain() {
|
||||
name: 'electron-dev:reload-app-on-main-change',
|
||||
setup(build) {
|
||||
build.onEnd(() => {
|
||||
execSync('yarn generate-main-exposed-meta');
|
||||
|
||||
if (initialBuild) {
|
||||
console.log(`[main] has changed, [re]launching electron...`);
|
||||
spawnOrReloadElectron();
|
||||
|
||||
@@ -1,14 +1,18 @@
|
||||
#!/usr/bin/env zx
|
||||
import 'zx/globals';
|
||||
|
||||
import { createRequire } from 'node:module';
|
||||
import path from 'node:path';
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
|
||||
const repoRootDir = path.join(__dirname, '..', '..', '..');
|
||||
const electronRootDir = path.join(__dirname, '..');
|
||||
const publicDistDir = path.join(electronRootDir, 'resources');
|
||||
const affineWebDir = path.join(repoRootDir, 'apps', 'web');
|
||||
const affineWebOutDir = path.join(affineWebDir, 'out');
|
||||
const publicAffineOutDir = path.join(publicDistDir, `web-static`);
|
||||
const releaseVersionEnv = process.env.RELEASE_VERSION || '';
|
||||
|
||||
console.log('build with following dir', {
|
||||
repoRootDir,
|
||||
@@ -19,9 +23,16 @@ console.log('build with following dir', {
|
||||
publicAffineOutDir,
|
||||
});
|
||||
|
||||
// step 0: check version match
|
||||
const electronPackageJson = require(`${electronRootDir}/package.json`);
|
||||
if (releaseVersionEnv && electronPackageJson.version !== releaseVersionEnv) {
|
||||
throw new Error(
|
||||
`Version mismatch, expected ${releaseVersionEnv} but got ${electronPackageJson.version}`
|
||||
);
|
||||
}
|
||||
// copy web dist files to electron dist
|
||||
|
||||
// step 0: clean up
|
||||
// step 1: clean up
|
||||
await cleanup();
|
||||
echo('Clean up done');
|
||||
|
||||
@@ -32,9 +43,6 @@ if (process.platform === 'win32') {
|
||||
|
||||
cd(repoRootDir);
|
||||
|
||||
// step 1: build electron resources
|
||||
await $`yarn workspace @affine/electron build-layers`;
|
||||
|
||||
// step 2: build web (nextjs) dist
|
||||
if (!process.env.SKIP_WEB_BUILD) {
|
||||
process.env.ENABLE_LEGACY_PROVIDER = 'false';
|
||||
@@ -59,6 +67,17 @@ if (!process.env.SKIP_WEB_BUILD) {
|
||||
await fs.move(affineWebOutDir, publicAffineOutDir, { overwrite: true });
|
||||
}
|
||||
|
||||
// step 3: update app-updater.yml content with build type in resources folder
|
||||
if (process.env.BUILD_TYPE === 'internal') {
|
||||
const appUpdaterYml = path.join(publicDistDir, 'app-update.yml');
|
||||
const appUpdaterYmlContent = await fs.readFile(appUpdaterYml, 'utf-8');
|
||||
const newAppUpdaterYmlContent = appUpdaterYmlContent.replace(
|
||||
'AFFiNE',
|
||||
'AFFiNE-Releases'
|
||||
);
|
||||
await fs.writeFile(appUpdaterYml, newAppUpdaterYmlContent);
|
||||
}
|
||||
|
||||
/// --------
|
||||
/// --------
|
||||
/// --------
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
#!/usr/bin/env zx
|
||||
/* eslint-disable @typescript-eslint/no-restricted-imports */
|
||||
import 'zx/globals';
|
||||
|
||||
const mainDistDir = path.resolve(__dirname, '../dist/layers/main');
|
||||
|
||||
// be careful and avoid any side effects in
|
||||
const { handlers, events } = await import(
|
||||
'file://' + path.resolve(mainDistDir, 'exposed.js')
|
||||
);
|
||||
|
||||
const handlersMeta = Object.entries(handlers).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.keys(namespaceHandlers).map(handlerName => handlerName),
|
||||
];
|
||||
}
|
||||
);
|
||||
|
||||
const eventsMeta = Object.entries(events).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.keys(namespaceHandlers).map(handlerName => handlerName),
|
||||
];
|
||||
}
|
||||
);
|
||||
|
||||
const meta = {
|
||||
handlers: handlersMeta,
|
||||
events: eventsMeta,
|
||||
};
|
||||
|
||||
await fs.writeFile(
|
||||
path.resolve(mainDistDir, 'exposed-meta.js'),
|
||||
`module.exports = ${JSON.stringify(meta)};`
|
||||
);
|
||||
|
||||
console.log('generate main exposed-meta.js done');
|
||||
@@ -2,7 +2,9 @@
|
||||
"extends": "../../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"baseUrl": ".",
|
||||
"noEmit": true
|
||||
"noEmit": true,
|
||||
"target": "ESNext"
|
||||
},
|
||||
"references": [{ "path": "../../../tests/kit" }],
|
||||
"include": ["**.spec.ts", "**.test.ts"]
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ test('move workspace db file', async ({ page, appInfo, workspace }) => {
|
||||
// goto settings
|
||||
await settingButton.click();
|
||||
|
||||
const tmpPath = path.join(appInfo.sessionData, w.id + '-tmp.db');
|
||||
const tmpPath = path.join(appInfo.sessionData, w.id + '-tmp-dir');
|
||||
|
||||
// move db file to tmp folder
|
||||
await page.evaluate(tmpPath => {
|
||||
@@ -36,6 +36,9 @@ test('move workspace db file', async ({ page, appInfo, workspace }) => {
|
||||
// check if db file exists
|
||||
await page.waitForSelector('text="Move folder success"');
|
||||
expect(await fs.exists(tmpPath)).toBe(true);
|
||||
// check if db file exists under tmpPath (a file ends with .affine)
|
||||
const files = await fs.readdir(tmpPath);
|
||||
expect(files.some(f => f.endsWith('.affine'))).toBe(true);
|
||||
});
|
||||
|
||||
test('export then add', async ({ page, appInfo, workspace }) => {
|
||||
@@ -56,7 +59,7 @@ test('export then add', async ({ page, appInfo, workspace }) => {
|
||||
|
||||
const tmpPath = path.join(appInfo.sessionData, w.id + '-tmp.db');
|
||||
|
||||
// move db file to tmp folder
|
||||
// export db file to tmp folder
|
||||
await page.evaluate(tmpPath => {
|
||||
window.apis?.dialog.setFakeDialogResult({
|
||||
filePath: tmpPath,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"composite": true,
|
||||
"skipLibCheck": true,
|
||||
"target": "ESNext",
|
||||
"module": "ESNext",
|
||||
@@ -10,14 +11,22 @@
|
||||
"outDir": "dist",
|
||||
"moduleResolution": "node",
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true
|
||||
"noImplicitOverride": true,
|
||||
"noEmit": false
|
||||
},
|
||||
"include": ["**/*.ts", "**/*.tsx", "package.json"],
|
||||
"exclude": ["out", "dist", "node_modules"],
|
||||
"include": ["**/*.ts", "**/*.tsx"],
|
||||
"exclude": ["node_modules", "out", "dist"],
|
||||
"references": [
|
||||
{
|
||||
"path": "./tsconfig.node.json"
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": "../../packages/native"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/env"
|
||||
},
|
||||
{ "path": "../../tests/kit" }
|
||||
],
|
||||
"ts-node": {
|
||||
"esm": true,
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"composite": true,
|
||||
"target": "ESNext",
|
||||
"module": "ESNext",
|
||||
"resolveJsonModule": true,
|
||||
"moduleResolution": "Node",
|
||||
"allowSyntheticDefaultImports": true
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"noEmit": false
|
||||
},
|
||||
"include": ["./scripts", "package.json"]
|
||||
"include": ["./scripts"]
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@affine/server",
|
||||
"private": true,
|
||||
"version": "0.6.0-canary.2",
|
||||
"version": "0.7.0-canary.8",
|
||||
"description": "Affine Node.js server",
|
||||
"type": "module",
|
||||
"bin": {
|
||||
@@ -15,36 +15,36 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@apollo/server": "^4.7.1",
|
||||
"@nestjs/apollo": "^11.0.5",
|
||||
"@nestjs/common": "^9.4.0",
|
||||
"@nestjs/core": "^9.4.0",
|
||||
"@nestjs/graphql": "^11.0.5",
|
||||
"@nestjs/platform-express": "^9.4.0",
|
||||
"@nestjs/apollo": "^11.0.6",
|
||||
"@nestjs/common": "^9.4.2",
|
||||
"@nestjs/core": "^9.4.2",
|
||||
"@nestjs/graphql": "^11.0.6",
|
||||
"@nestjs/platform-express": "^9.4.2",
|
||||
"@node-rs/bcrypt": "^1.7.1",
|
||||
"@prisma/client": "^4.14.0",
|
||||
"dotenv": "^16.0.3",
|
||||
"@prisma/client": "^4.15.0",
|
||||
"dotenv": "^16.1.1",
|
||||
"express": "^4.18.2",
|
||||
"graphql": "^16.6.0",
|
||||
"graphql-type-json": "^0.3.2",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"prisma": "^4.14.0",
|
||||
"prisma": "^4.15.0",
|
||||
"reflect-metadata": "^0.1.13",
|
||||
"rxjs": "^7.8.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@nestjs/testing": "^9.4.0",
|
||||
"@nestjs/testing": "^9.4.2",
|
||||
"@types/express": "^4.17.17",
|
||||
"@types/jsonwebtoken": "^9.0.2",
|
||||
"@types/lodash-es": "^4.17.7",
|
||||
"@types/node": "^18.16.9",
|
||||
"@types/node": "^18.16.16",
|
||||
"@types/supertest": "^2.0.12",
|
||||
"c8": "^7.13.0",
|
||||
"c8": "^7.14.0",
|
||||
"nodemon": "^2.0.22",
|
||||
"supertest": "^6.3.3",
|
||||
"ts-node": "^10.9.1",
|
||||
"typescript": "^5.0.4",
|
||||
"vitest": "^0.31.0"
|
||||
"vitest": "^0.31.2"
|
||||
},
|
||||
"nodemonConfig": {
|
||||
"exec": "node",
|
||||
|
||||
@@ -10,7 +10,7 @@ declare global {
|
||||
}
|
||||
}
|
||||
|
||||
export const enum ExternalAccount {
|
||||
export enum ExternalAccount {
|
||||
github = 'github',
|
||||
google = 'google',
|
||||
firebase = 'firebase',
|
||||
|
||||
@@ -5,7 +5,8 @@
|
||||
"module": "ESNext",
|
||||
"resolveJsonModule": true,
|
||||
"moduleResolution": "Node",
|
||||
"allowSyntheticDefaultImports": true
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"outDir": "dist/scripts"
|
||||
},
|
||||
"include": ["scripts", "package.json"]
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ First, run the development server:
|
||||
pnpm run dev
|
||||
```
|
||||
|
||||
Open [http://localhost:8080](http://localhost:3000) with your browser to see the result.
|
||||
Open [http://localhost:8080](http://localhost:8080) with your browser to see the result.
|
||||
|
||||
You can start editing the page by modifying `src/pages/workspace/[workspaceId]/all.tsx`. The page auto-updates as you edit the file.
|
||||
|
||||
|
||||
@@ -109,8 +109,10 @@ const nextConfig = {
|
||||
'@affine/templates',
|
||||
'@affine/workspace',
|
||||
'@affine/jotai',
|
||||
'@affine/copilot',
|
||||
'@toeverything/hooks',
|
||||
'@toeverything/y-indexeddb',
|
||||
'@toeverything/plugin-infra',
|
||||
],
|
||||
publicRuntimeConfig: {
|
||||
PROJECT_NAME: process.env.npm_package_name ?? 'AFFiNE',
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
{
|
||||
"name": "@affine/web",
|
||||
"private": true,
|
||||
"version": "0.6.0-canary.2",
|
||||
"version": "0.7.0-canary.8",
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"export": "next export",
|
||||
"start": "next start",
|
||||
"lint": "next lint"
|
||||
"start": "next start"
|
||||
},
|
||||
"dependencies": {
|
||||
"@affine-test/fixtures": "workspace:*",
|
||||
"@affine/component": "workspace:*",
|
||||
"@affine/copilot": "workspace:*",
|
||||
"@affine/debug": "workspace:*",
|
||||
"@affine/env": "workspace:*",
|
||||
"@affine/graphql": "workspace:*",
|
||||
@@ -19,34 +19,35 @@
|
||||
"@affine/jotai": "workspace:*",
|
||||
"@affine/templates": "workspace:*",
|
||||
"@affine/workspace": "workspace:*",
|
||||
"@blocksuite/blocks": "0.0.0-20230514141009-705c0fac-nightly",
|
||||
"@blocksuite/editor": "0.0.0-20230514141009-705c0fac-nightly",
|
||||
"@blocksuite/global": "0.0.0-20230514141009-705c0fac-nightly",
|
||||
"@blocksuite/icons": "^2.1.16",
|
||||
"@blocksuite/lit": "0.0.0-20230514141009-705c0fac-nightly",
|
||||
"@blocksuite/store": "0.0.0-20230514141009-705c0fac-nightly",
|
||||
"@blocksuite/blocks": "0.0.0-20230601122821-16196c35-nightly",
|
||||
"@blocksuite/editor": "0.0.0-20230601122821-16196c35-nightly",
|
||||
"@blocksuite/global": "0.0.0-20230601122821-16196c35-nightly",
|
||||
"@blocksuite/icons": "^2.1.19",
|
||||
"@blocksuite/lit": "0.0.0-20230601122821-16196c35-nightly",
|
||||
"@blocksuite/store": "0.0.0-20230601122821-16196c35-nightly",
|
||||
"@dnd-kit/core": "^6.0.8",
|
||||
"@dnd-kit/sortable": "^7.0.2",
|
||||
"@emotion/cache": "^11.11.0",
|
||||
"@emotion/react": "^11.11.0",
|
||||
"@emotion/server": "^11.11.0",
|
||||
"@emotion/styled": "^11.11.0",
|
||||
"@mui/material": "^5.13.0",
|
||||
"@react-hookz/web": "^23.0.0",
|
||||
"@sentry/nextjs": "^7.51.2",
|
||||
"@mui/material": "^5.13.3",
|
||||
"@react-hookz/web": "^23.0.1",
|
||||
"@sentry/nextjs": "^7.53.1",
|
||||
"@toeverything/hooks": "workspace:*",
|
||||
"@toeverything/plugin-infra": "workspace:*",
|
||||
"cmdk": "^0.2.0",
|
||||
"css-spring": "^4.1.0",
|
||||
"dayjs": "^1.11.7",
|
||||
"graphql": "^16.6.0",
|
||||
"jotai": "^2.1.0",
|
||||
"jotai-devtools": "^0.5.3",
|
||||
"lit": "^2.7.4",
|
||||
"lottie-web": "^5.11.0",
|
||||
"lottie-web": "^5.12.0",
|
||||
"next-themes": "^0.2.1",
|
||||
"react": "18.3.0-canary-16d053d59-20230506",
|
||||
"react-dom": "18.3.0-canary-16d053d59-20230506",
|
||||
"react-is": "^18.2.0",
|
||||
"react-mosaic-component": "^6.0.1",
|
||||
"rxjs": "^7.8.1",
|
||||
"swr": "^2.1.5",
|
||||
"y-protocols": "^1.0.5",
|
||||
@@ -62,21 +63,21 @@
|
||||
"@swc-jotai/react-refresh": "^0.0.8",
|
||||
"@types/react": "^18.2.6",
|
||||
"@types/react-dom": "^18.2.4",
|
||||
"@types/webpack-env": "^1.18.0",
|
||||
"@types/webpack-env": "^1.18.1",
|
||||
"@vanilla-extract/css": "^1.11.0",
|
||||
"@vanilla-extract/next-plugin": "^2.1.2",
|
||||
"dotenv": "^16.0.3",
|
||||
"eslint": "^8.40.0",
|
||||
"eslint-config-next": "^13.4.2",
|
||||
"@vanilla-extract/next-plugin": "=2.1.2",
|
||||
"dotenv": "^16.1.1",
|
||||
"eslint": "^8.41.0",
|
||||
"eslint-config-next": "^13.4.4",
|
||||
"eslint-plugin-unicorn": "^47.0.0",
|
||||
"next": "^13.4.2",
|
||||
"next": "=13.4.2",
|
||||
"next-debug-local": "^0.1.5",
|
||||
"next-router-mock": "^0.9.3",
|
||||
"raw-loader": "^4.0.2",
|
||||
"redux": "^4.2.1",
|
||||
"swc-plugin-coverage-instrument": "^0.0.18",
|
||||
"typescript": "^5.0.4",
|
||||
"webpack": "^5.82.1"
|
||||
"webpack": "^5.84.1"
|
||||
},
|
||||
"stableVersion": "0.0.0"
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
// @ts-check
|
||||
import 'dotenv/config';
|
||||
|
||||
/**
|
||||
* @type {import('@affine/env').BlockSuiteFeatureFlags}
|
||||
*/
|
||||
@@ -12,12 +11,19 @@ export const blockSuiteFeatureFlags = {
|
||||
enable_drag_handle: true,
|
||||
enable_surface: true,
|
||||
enable_linked_page: true,
|
||||
enable_bookmark_operation: process.env.ENABLE_BOOKMARK_OPERATION === 'true',
|
||||
};
|
||||
|
||||
/**
|
||||
* @type {import('@affine/env').BuildFlags}
|
||||
*/
|
||||
export const buildFlags = {
|
||||
enablePlugin: process.env.ENABLE_PLUGIN === 'true',
|
||||
enableAllPageFilter:
|
||||
!!process.env.VERCEL ||
|
||||
(process.env.ENABLE_ALL_PAGE_FILTER
|
||||
? process.env.ENABLE_ALL_PAGE_FILTER === 'true'
|
||||
: false),
|
||||
enableImagePreviewModal: process.env.ENABLE_IMAGE_PREVIEW_MODAL
|
||||
? process.env.ENABLE_IMAGE_PREVIEW_MODAL === 'true'
|
||||
: true,
|
||||
@@ -33,4 +39,6 @@ export const buildFlags = {
|
||||
enableDebugPage: Boolean(
|
||||
process.env.ENABLE_DEBUG_PAGE ?? process.env.NODE_ENV === 'development'
|
||||
),
|
||||
changelogUrl:
|
||||
process.env.CHANGELOG_URL ?? 'http://affine.pro/blog/whats-new-affine-0601',
|
||||
};
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { Unreachable } from '@affine/env/constant';
|
||||
import { affineApis } from '@affine/workspace/affine/shared';
|
||||
import { rootStore } from '@affine/workspace/atom';
|
||||
import { createAffineProviders } from '@affine/workspace/providers';
|
||||
import type { AffineLegacyCloudWorkspace } from '@affine/workspace/type';
|
||||
import { WorkspaceFlavour } from '@affine/workspace/type';
|
||||
import { createEmptyBlockSuiteWorkspace } from '@affine/workspace/utils';
|
||||
import { assertExists } from '@blocksuite/store';
|
||||
|
||||
import { workspacesAtom } from '../../atoms';
|
||||
import { createAffineProviders } from '../../blocksuite';
|
||||
import { Unreachable } from '../../components/affine/affine-error-eoundary';
|
||||
import { affineApis } from '../../shared/apis';
|
||||
|
||||
type Query = (typeof QueryKey)[keyof typeof QueryKey];
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user