mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-05 17:13:43 +00:00
Compare commits
120 Commits
v0.5.4-bet
...
v0.6.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a3bbd7e098 | ||
|
|
27fa3a5d76 | ||
|
|
b342cc604c | ||
|
|
79ab2c1525 | ||
|
|
1e571089b3 | ||
|
|
4ab5457a44 | ||
|
|
2a31af0973 | ||
|
|
fad00c242b | ||
|
|
f93db613f4 | ||
|
|
13e85f11f8 | ||
|
|
e1d87cf698 | ||
|
|
369282e29e | ||
|
|
a018d50780 | ||
|
|
9379a0fb49 | ||
|
|
fb9d200dd3 | ||
|
|
7e8169c4b8 | ||
|
|
f18c164953 | ||
|
|
411593c8de | ||
|
|
cfc3fbbb3f | ||
|
|
589ae0a26c | ||
|
|
4ced5a226d | ||
|
|
cb914c0405 | ||
|
|
d30f99d8df | ||
|
|
eb01c2e76f | ||
|
|
72d4c785e5 | ||
|
|
151fb56281 | ||
|
|
f340e15987 | ||
|
|
f06f67e182 | ||
|
|
7050f41ba9 | ||
|
|
72066a6f54 | ||
|
|
52c1efee9e | ||
|
|
3eef11416a | ||
|
|
a84cfb80d1 | ||
|
|
840ce7d146 | ||
|
|
30cbde31cb | ||
|
|
7c6d5adde5 | ||
|
|
dc33130c79 | ||
|
|
7ed3250042 | ||
|
|
4abe62c9e0 | ||
|
|
e4ba72853d | ||
|
|
8281fa49c1 | ||
|
|
cffdd420e2 | ||
|
|
a7ac6562b0 | ||
|
|
e7e447d0e1 | ||
|
|
3a043f339c | ||
|
|
0a4b2b9ca7 | ||
|
|
ece0853265 | ||
|
|
8b3c87cdfa | ||
|
|
2ed8d63d8a | ||
|
|
f7487ad037 | ||
|
|
bfe3b2242e | ||
|
|
f0763337c4 | ||
|
|
540a93274a | ||
|
|
2dff731965 | ||
|
|
319d71345d | ||
|
|
a22c39f395 | ||
|
|
81e4122fc2 | ||
|
|
5832ba1f34 | ||
|
|
a9d417b3ce | ||
|
|
8e6bb78bea | ||
|
|
921f4c97d1 | ||
|
|
6d362f77ca | ||
|
|
76a93cb859 | ||
|
|
87d03f6e17 | ||
|
|
bd6bde130b | ||
|
|
b2da7cdff5 | ||
|
|
f982120a1f | ||
|
|
099b84c383 | ||
|
|
5266f6ac13 | ||
|
|
4cfba64aa5 | ||
|
|
d16927c4ad | ||
|
|
aa02779883 | ||
|
|
e734771beb | ||
|
|
d2badccce3 | ||
|
|
581bc97896 | ||
|
|
5911b526e5 | ||
|
|
19a8d85924 | ||
|
|
c5c9723c48 | ||
|
|
604b0441b0 | ||
|
|
0444dd9264 | ||
|
|
823bcbb6fb | ||
|
|
29bd170c2b | ||
|
|
8b672064d0 | ||
|
|
3299d64488 | ||
|
|
e5b47c307e | ||
|
|
93a584e4b9 | ||
|
|
cee829d08f | ||
|
|
79116f06dd | ||
|
|
460dc4d560 | ||
|
|
bd83f95745 | ||
|
|
40ee400285 | ||
|
|
f6da67df32 | ||
|
|
4948fb555c | ||
|
|
b6fd58e0f5 | ||
|
|
f4057593af | ||
|
|
d17a5c2784 | ||
|
|
cae3527c12 | ||
|
|
4c79a918b2 | ||
|
|
7e4edd2c65 | ||
|
|
01173babe6 | ||
|
|
569d71886c | ||
|
|
03b4f78743 | ||
|
|
a7b3aacc28 | ||
|
|
3f95c3a654 | ||
|
|
3dcee2fa60 | ||
|
|
8450523f05 | ||
|
|
05ee884532 | ||
|
|
4c2d17b07f | ||
|
|
abf57e5b45 | ||
|
|
7aef5de193 | ||
|
|
e765a7e831 | ||
|
|
99dc4fbf22 | ||
|
|
d905c9b5cf | ||
|
|
41936393ea | ||
|
|
d869ce1684 | ||
|
|
68a114c540 | ||
|
|
fb5e027c61 | ||
|
|
debf8d170e | ||
|
|
97e88b3d8b | ||
|
|
05bf41501a |
@@ -5,3 +5,4 @@ out
|
||||
storybook-static
|
||||
affine-out
|
||||
_next
|
||||
lib
|
||||
|
||||
51
.eslintrc.js
51
.eslintrc.js
@@ -1,3 +1,43 @@
|
||||
const createPattern = packageName => [
|
||||
{
|
||||
group: ['**/dist', '**/dist/**'],
|
||||
message: 'Do not import from dist',
|
||||
allowTypeImports: false,
|
||||
},
|
||||
{
|
||||
group: ['**/src', '**/src/**'],
|
||||
message: 'Do not import from src',
|
||||
allowTypeImports: false,
|
||||
},
|
||||
{
|
||||
group: [`@affine/${packageName}`],
|
||||
message: 'Do not import package itself',
|
||||
allowTypeImports: false,
|
||||
},
|
||||
{
|
||||
group: [`@toeverything/${packageName}`],
|
||||
message: 'Do not import package itself',
|
||||
allowTypeImports: false,
|
||||
},
|
||||
];
|
||||
|
||||
const allPackages = [
|
||||
'cli',
|
||||
'component',
|
||||
'debug',
|
||||
'env',
|
||||
'graphql',
|
||||
'hooks',
|
||||
'i18n',
|
||||
'jotai',
|
||||
'native',
|
||||
'plugin-infra',
|
||||
'templates',
|
||||
'theme',
|
||||
'workspace',
|
||||
'y-indexeddb',
|
||||
];
|
||||
|
||||
/**
|
||||
* @type {import('eslint').Linter.Config}
|
||||
*/
|
||||
@@ -96,6 +136,17 @@ const config = {
|
||||
'@typescript-eslint/no-var-requires': 0,
|
||||
},
|
||||
},
|
||||
...allPackages.map(pkg => ({
|
||||
files: [`packages/${pkg}/src/**/*.ts`, `packages/${pkg}/src/**/*.tsx`],
|
||||
rules: {
|
||||
'@typescript-eslint/no-restricted-imports': [
|
||||
'error',
|
||||
{
|
||||
patterns: createPattern(pkg),
|
||||
},
|
||||
],
|
||||
},
|
||||
})),
|
||||
],
|
||||
};
|
||||
|
||||
|
||||
1
.github/CLA.md
vendored
1
.github/CLA.md
vendored
@@ -58,3 +58,4 @@ Example:
|
||||
- Howard Do, @howarddo2208, 2023/04/20
|
||||
- 三咲智子 Kevin Deng, @sxzz, 2023/04/21
|
||||
- Moeyua, @moeyua, 2023/04/22
|
||||
- Shishu, @shishudesu, 2023/05/19
|
||||
|
||||
6
.github/labeler.yml
vendored
6
.github/labeler.yml
vendored
@@ -8,11 +8,17 @@ test:
|
||||
- '**/tests/**/*'
|
||||
- '**/__tests__/**/*'
|
||||
|
||||
plugin:copilot:
|
||||
- 'plugins/copilot/**/*'
|
||||
|
||||
mod:dev:
|
||||
- 'scripts/**/*'
|
||||
- 'packages/cli/**/*'
|
||||
- 'packages/debug/**/*'
|
||||
|
||||
mod:plugin-infra:
|
||||
- 'packages/plugin-infra/**/*'
|
||||
|
||||
mod:workspace: 'packages/workspace/**/*'
|
||||
|
||||
mod:i18n: 'packages/i18n/**/*'
|
||||
|
||||
24
.github/workflows/add-to-project.yml
vendored
24
.github/workflows/add-to-project.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Add to GitHub projects
|
||||
|
||||
on:
|
||||
issues:
|
||||
types:
|
||||
- opened
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
|
||||
jobs:
|
||||
add-to-project:
|
||||
name: Add issues and pull requests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/add-to-project@v0.4.0
|
||||
with:
|
||||
# You can target a repository in a different organization
|
||||
# to the issue
|
||||
project-url: https://github.com/orgs/toeverything/projects/10
|
||||
github-token: ${{ secrets.ADD_TO_PROJECT_PAT }}
|
||||
# labeled: bug, needs-triage
|
||||
# label-operator: OR
|
||||
15
.github/workflows/build.yml
vendored
15
.github/workflows/build.yml
vendored
@@ -35,7 +35,12 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- run: yarn lint --max-warnings=0
|
||||
- name: Run checks
|
||||
run: |
|
||||
yarn i18n-codegen gen
|
||||
yarn typecheck
|
||||
yarn lint --max-warnings=0
|
||||
yarn circular
|
||||
|
||||
build-storybook:
|
||||
name: Build Storybook
|
||||
@@ -82,7 +87,9 @@ jobs:
|
||||
NEXT_PUBLIC_FIREBASE_APP_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_APP_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID }}
|
||||
API_SERVER_PROFILE: local
|
||||
ENABLE_DEBUG_PAGE: true
|
||||
ENABLE_DEBUG_PAGE: 1
|
||||
ENABLE_PLUGIN: true
|
||||
ENABLE_ALL_PAGE_FILTER: true
|
||||
ENABLE_LEGACY_PROVIDER: true
|
||||
COVERAGE: true
|
||||
|
||||
@@ -104,7 +111,9 @@ jobs:
|
||||
NEXT_PUBLIC_FIREBASE_APP_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_APP_ID }}
|
||||
NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID: ${{ secrets.NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID }}
|
||||
API_SERVER_PROFILE: affine
|
||||
ENABLE_DEBUG_PAGE: true
|
||||
ENABLE_DEBUG_PAGE: 1
|
||||
ENABLE_PLUGIN: true
|
||||
ENABLE_ALL_PAGE_FILTER: true
|
||||
ENABLE_LEGACY_PROVIDER: false
|
||||
COVERAGE: true
|
||||
|
||||
|
||||
8
.github/workflows/languages-sync.yml
vendored
8
.github/workflows/languages-sync.yml
vendored
@@ -13,14 +13,6 @@ on:
|
||||
- '.github/workflows/languages-sync.yml'
|
||||
workflow_dispatch:
|
||||
|
||||
# Cancels all previous workflow runs for pull requests that have not completed.
|
||||
# See https://docs.github.com/en/actions/using-jobs/using-concurrency
|
||||
concurrency:
|
||||
# The concurrency group contains the workflow name and the branch name for
|
||||
# pull requests or the commit hash for any other events.
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
1
.github/workflows/nightly-build.yml
vendored
1
.github/workflows/nightly-build.yml
vendored
@@ -67,6 +67,7 @@ jobs:
|
||||
ENABLE_TEST_PROPERTIES: false
|
||||
ENABLE_IMAGE_PREVIEW_MODAL: false
|
||||
RELEASE_VERSION: ${{ needs.set-build-version.outputs.version }}
|
||||
ENABLE_BOOKMARK_OPERATION: true
|
||||
|
||||
- name: Upload Artifact (web-static)
|
||||
uses: actions/upload-artifact@v3
|
||||
|
||||
1
.github/workflows/release-desktop-app.yml
vendored
1
.github/workflows/release-desktop-app.yml
vendored
@@ -68,6 +68,7 @@ jobs:
|
||||
ENABLE_TEST_PROPERTIES: false
|
||||
ENABLE_IMAGE_PREVIEW_MODAL: false
|
||||
RELEASE_VERSION: ${{ github.event.inputs.version }}
|
||||
ENABLE_BOOKMARK_OPERATION: true
|
||||
|
||||
- name: Upload Artifact (web-static)
|
||||
uses: actions/upload-artifact@v3
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -28,9 +28,9 @@ node_modules
|
||||
|
||||
# IDE - VSCode
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/settings.template.json
|
||||
!.vscode/launch.template.json
|
||||
!.vscode/extensions.json
|
||||
|
||||
# misc
|
||||
@@ -70,3 +70,5 @@ next-env.d.ts
|
||||
# Rust
|
||||
target
|
||||
*.node
|
||||
tsconfig.node.tsbuildinfo
|
||||
lib
|
||||
|
||||
@@ -1 +1,4 @@
|
||||
pnpm-lock.yaml
|
||||
target
|
||||
lib
|
||||
test-results
|
||||
|
||||
@@ -6,6 +6,12 @@
|
||||
"name": "Run Dev",
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
},
|
||||
{
|
||||
"command": "yarn run dev:local",
|
||||
"name": "Run Dev Locally",
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
}
|
||||
]
|
||||
}
|
||||
21
Cargo.lock
generated
21
Cargo.lock
generated
@@ -305,9 +305,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "notify"
|
||||
version = "5.1.0"
|
||||
version = "6.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "58ea850aa68a06e48fdb069c0ec44d0d64c8dbffa49bf3b6f7f0a901fdea1ba9"
|
||||
checksum = "4d9ba6c734de18ca27c8cef5cd7058aa4ac9f63596131e4c7e41e579319032a2"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"crossbeam-channel",
|
||||
@@ -319,7 +319,7 @@ dependencies = [
|
||||
"mio",
|
||||
"serde",
|
||||
"walkdir",
|
||||
"windows-sys 0.42.0",
|
||||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -652,21 +652,6 @@ version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.42.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm 0.42.2",
|
||||
"windows_aarch64_msvc 0.42.2",
|
||||
"windows_i686_gnu 0.42.2",
|
||||
"windows_i686_msvc 0.42.2",
|
||||
"windows_x86_64_gnu 0.42.2",
|
||||
"windows_x86_64_gnullvm 0.42.2",
|
||||
"windows_x86_64_msvc 0.42.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.45.0"
|
||||
|
||||
20
README.md
20
README.md
@@ -24,12 +24,13 @@ See https://github.com/all-?/all-contributors/issues/361#issuecomment-637166066
|
||||
|
||||
<!-- ALL-CONTRIBUTORS-BADGE:END -->
|
||||
|
||||
[?style=flat-square&logoColor=white&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAACXBIWXMAAADAAAAAwAEwd99eAAABjElEQVRYhe1W0U3DMBB9RfyTDeoNyAYNG2QDOgJsECYgGxA26AZ4hIxgJqCZ4PjIGV+tUxK7raqiPsmKdXe5e3fOs7IiIlwSdxetfiNw7QRKAD0Ax/ssrI5QgQOw5v03AJOTJHcCL1x84LVmWzJyJlBg7P4BwCvb3pmIAbBPykZEqaulEU7YHNva1HypxUsKqIS9EvbynASs0n3ss+ciUIsuO8VvhL9emjdFBa3YO8XvALwpsZNYSqBB0PwUWgRZNksSL5GhlN0ngGd+dkpsD6AG8IGlslxwTh2fa09EBc3Dir32rRysuQlUAL54/wTAcpePPAXHPsOTGXhSEv69rAlYpZOt6DSO29J4D/TRRLJk6AvtaZSY9PkCFYVLqI9i/NF5YkkECgrXa6P4fVEn4iolrhNxRQqBZu7FqMNdZiMqAUPj2KdGZyicu1dHzlGqBHxn2sdTR53bmeJ+ebJd7LtXhGH4uQEwd0ttAPzMxGi5/6BdxTuMej41Bs59gGP+CU+Cq/4tvxH4HwR+Ab3Uqr/VGbqEAAAAAElFTkSuQmCC>)](https://app.affine.pro)
|
||||
[?style=flat-square&logoColor=white&logo=affine>)](https://app.affine.pro)
|
||||
[](https://affine.pro/download)
|
||||
[](https://affine.pro/download)
|
||||
[](https://affine.pro/download)
|
||||
[](https://affine.pro/download)
|
||||
|
||||
[](https://github.com/toeverything/AFFiNE/releases/latest)
|
||||
[![stars-icon]](https://github.com/toeverything/AFFiNE)
|
||||
[![All Contributors][all-contributors-badge]](#contributors)
|
||||
[![codecov]](https://codecov.io/gh/toeverything/AFFiNE)
|
||||
@@ -44,7 +45,7 @@ See https://github.com/all-?/all-contributors/issues/361#issuecomment-637166066
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
<a href="http://affine.pro"><img src="https://img.shields.io/badge/-AFFiNE-06449d?style=social&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEsAAABLCAMAAAAPkIrYAAAAP1BMVEU8b9w8b9w+b947cNw7b9w6b908b909b9w8b9w7b9w8b9w7cN08b9w7b908b9w7b9w8b907cNw8b9w8b91HcEx3NJCJAAAAFXRSTlP/3QWSgA+lHPlu6Di4XtIrxk/xRADGudUoAAAB9UlEQVR42tWYwbKjIBREG0GJKkRj/v9bZ1ZvRC99rzib11tTB9qqnKoW3/+X38vy7ifzQ1b/wk/8Q1bCv3y6Z6wFh2x2llIRGB6xRhzz6p+wVhRJD1gRZZYHrADYSyqsjFPGZtYbuFESesUysZXlcMnYyJpxTW5keQh5N7G6CUJCE2uHFNfEGiBmbmB1H4jxDawNcqbuPmtAJTtj6RZ0lpIwiR5jNmgfNtHHwLXPWfFYcS2NMdxkjac/dNaNCJPo3yf9pFuseHbDrBsRFguGs8te8Q4rXzTjVSPCIHp3FePKWbzi30xE+4zlBMmoJaGLfpLUmAmLiN4Xyibahy76WZRQMLJ2WX27on2oFvQVac8yi4p+J2forA0V8W1c++AVS1f1H6p9KKLHxk9RWKmsyB+VLC76gV65DLjokdg5KmsEMXsiDwXWSmTc9ezSoKJHoi9zUVihbMHfQOSsXB7Mrz1S1huKPde69sEsiKgNt8hYTjiWlAyENeu7IFe1D15RSEBN+yCiXw17K1RZm/w7UtJVWYN8f1ZyLlkVb2bT4vIVVrINH1dqX2YttkHmIWsfVWs646wcRFYis6fIVGpfYq1kjpGSW8kSRD+xYSmXRM0Ang9eSZioVdy/5pWaLqzIRyIpuVxYozvGf1m67I7pf/s3UXv+AP61NI2Y+BbSAAAAAElFTkSuQmCC" height=25></a>
|
||||
<a href="http://affine.pro"><img src="https://img.shields.io/badge/-AFFiNE-06449d?style=social&logo=affine" height=25></a>
|
||||
|
||||
<a href="https://community.affine.pro"><img src="https://img.shields.io/badge/-Community-424549?style=social&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAAXNJREFUWEftlitLRUEURtdVEVExWUx2qxgNVouoXYtNDP4Tw20WtftAsItZrHaTYBJREZ98MAc248wcZxi4CGfSeezHmm/23kyPAa/egPPTAXQK/FsFBP7ldVDRZoqcgO9I+2bHy3ZIJBfTCPCZM1tqAxwBmzUBrNQNbEx+5b0B5oEN4NCBrAMnMaiUAuPAs3HU82TLEZwBqwGbaJ4UgKQ8CFR6SoEl4LIWwCJwZQCegKkWBWLHVKSActvdzgG3DqitDf3/VQBskBDALrDnAKXUo3ueAF5KinAf2DKOmnzD7l214bdbA6hC1XHZNQa8hSBC0hwDa57xDHDvvvWB7ciOZoE79+8CWPbsBGc769eFxJdWIKcuyIdRoG3W7AAC1dJkHDIOo8B78+4rEBo8r4AkLFk6Jk3HaeDBBTgHVmIAfpJUz+cAFXVBreQCvQYW/lqEjV1NAMUMqpAaxQMHyDnjYtuS+0BxstwaqJooFqxToFPgB5FuPCEB6XK2AAAAAElFTkSuQmCC" height=25></a>
|
||||
|
||||
@@ -69,11 +70,11 @@ See https://github.com/all-?/all-contributors/issues/361#issuecomment-637166066
|
||||
|
||||
Before we tell you how to get started with AFFiNE, we'd like to shamelessly plug our awesome user and developer communities across [official social platforms](https://community.affine.pro/c/start-here/)! Once you’re familiar with using the software, maybe you will share your wisdom with others and even consider joining the [AFFiNE Ambassador program](https://community.affine.pro/c/start-here/affine-ambassador) to help spread AFFiNE to the world.
|
||||
|
||||
## Getting started & Stay tunned with us.
|
||||
## Getting started & staying tuned with us.
|
||||
|
||||
⚠️ Please note that AFFiNE is still under active development and is not yet ready for production use. ⚠️
|
||||
|
||||
[](https://app.affine.pro) No installation or registration required! Head over to our website and try it out now.
|
||||
[](https://app.affine.pro) No installation or registration required! Head over to our website and try it out now.
|
||||
|
||||
[](https://community.affine.pro) Our wonderful community, where you can meet and engage with the team, developers and other like-minded enthusiastic user of AFFiNE.
|
||||
|
||||
@@ -119,6 +120,15 @@ If you have questions, you are welcome to contact us. One of the best places to
|
||||
| [@toeverything/y-indexeddb](packages/y-indexeddb) | IndexedDB database adapter for Yjs | [](https://www.npmjs.com/package/@toeverything/y-indexeddb) |
|
||||
| [@toeverything/theme](packages/theme) | AFFiNE theme | [](https://www.npmjs.com/package/@toeverything/theme) |
|
||||
|
||||
## Plugins
|
||||
|
||||
> Plugins are a way to extend the functionality of AFFiNE.
|
||||
|
||||
| Name | |
|
||||
| ------------------------------------------------ | ----------------------------------------- |
|
||||
| [@affine/bookmark-block](plugins/bookmark-block) | A block for bookmarking a website |
|
||||
| [@affine/copilot](plugins/copilot) | AI Copilot that help you document writing |
|
||||
|
||||
## Thanks
|
||||
|
||||
We would also like to give thanks to open-source projects that make AFFiNE possible:
|
||||
@@ -140,7 +150,7 @@ Thanks a lot to the community for providing such powerful and simple libraries,
|
||||
We would like to express our gratitude to all the individuals who have already contributed to AFFiNE! If you have any AFFiNE-related project, documentation, tool or template, please feel free to contribute it by submitting a pull request to our curated list on GitHub: [awesome-affine](https://github.com/toeverything/awesome-affine).
|
||||
|
||||
<a href="https://github.com/toeverything/affine/graphs/contributors">
|
||||
<img src="https://user-images.githubusercontent.com/5910926/237263745-36bb975d-83d6-4a7c-a152-d9ad020e5023.png" />
|
||||
<img src="https://user-images.githubusercontent.com/5910926/240508358-93eddded-48a0-40cd-85e4-a1d172dbe1d9.svg" />
|
||||
</a>
|
||||
|
||||
## Self-Host
|
||||
|
||||
@@ -7,6 +7,7 @@ To run AFFiNE Desktop Client Application locally, run the following commands:
|
||||
```sh
|
||||
# in repo root
|
||||
yarn install
|
||||
yarn workspace @affine/native build
|
||||
yarn dev
|
||||
|
||||
# in apps/electron
|
||||
|
||||
@@ -94,7 +94,7 @@ module.exports = {
|
||||
config: {
|
||||
name: 'AFFiNE',
|
||||
setupIcon: icoPath,
|
||||
// loadingGif: './resources/icons/loading.gif',
|
||||
loadingGif: './resources/icons/affine_installing.gif',
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/consistent-type-imports */
|
||||
// This file contains the main process events
|
||||
// It will guide preload and main process on the correct event types and payloads
|
||||
|
||||
export type MainIPCHandlerMap = typeof import('./main/src/exposed').handlers;
|
||||
|
||||
export type MainIPCEventMap = typeof import('./main/src/exposed').events;
|
||||
@@ -2,12 +2,11 @@ import assert from 'node:assert';
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import type { Subscription } from 'rxjs';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { MainIPCHandlerMap } from '../../../../constraints';
|
||||
import type { MainIPCHandlerMap } from '../exposed';
|
||||
|
||||
const registeredHandlers = new Map<
|
||||
string,
|
||||
@@ -42,6 +41,7 @@ ReturnType<MainIPCHandlerMap[T][F]> {
|
||||
}
|
||||
|
||||
const SESSION_DATA_PATH = path.join(__dirname, './tmp', 'affine-test');
|
||||
const DOCUMENTS_PATH = path.join(__dirname, './tmp', 'affine-test-documents');
|
||||
|
||||
const browserWindow = {
|
||||
isDestroyed: () => {
|
||||
@@ -92,8 +92,12 @@ function compareBuffer(a: Uint8Array | null, b: Uint8Array | null) {
|
||||
const electronModule = {
|
||||
app: {
|
||||
getPath: (name: string) => {
|
||||
assert(name === 'sessionData');
|
||||
return SESSION_DATA_PATH;
|
||||
if (name === 'sessionData') {
|
||||
return SESSION_DATA_PATH;
|
||||
} else if (name === 'documents') {
|
||||
return DOCUMENTS_PATH;
|
||||
}
|
||||
throw new Error('not implemented');
|
||||
},
|
||||
name: 'affine-test',
|
||||
on: (name: string, callback: (...args: any[]) => any) => {
|
||||
@@ -123,27 +127,23 @@ vi.doMock('electron', () => {
|
||||
return electronModule;
|
||||
});
|
||||
|
||||
let connectableSubscription: Subscription;
|
||||
|
||||
beforeEach(async () => {
|
||||
const { registerHandlers } = await import('../register');
|
||||
const { registerHandlers } = await import('../handlers');
|
||||
registerHandlers();
|
||||
|
||||
// should also register events
|
||||
const { registerEvents } = await import('../../events');
|
||||
const { registerEvents } = await import('../events');
|
||||
registerEvents();
|
||||
await fs.mkdirp(SESSION_DATA_PATH);
|
||||
const { database$ } = await import('../db/ensure-db');
|
||||
await import('../db/ensure-db');
|
||||
|
||||
connectableSubscription = database$.connect();
|
||||
registeredHandlers.get('ready')?.forEach(fn => fn());
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// reset registered handlers
|
||||
registeredHandlers.get('before-quit')?.forEach(fn => fn());
|
||||
|
||||
connectableSubscription.unsubscribe();
|
||||
|
||||
await fs.remove(SESSION_DATA_PATH);
|
||||
});
|
||||
|
||||
@@ -157,55 +157,26 @@ describe('ensureSQLiteDB', () => {
|
||||
expect(fileExists).toBe(true);
|
||||
});
|
||||
|
||||
test('when db file is removed', async () => {
|
||||
// stub webContents.send
|
||||
const sendSpy = vi.spyOn(browserWindow.webContents, 'send');
|
||||
const id = v4();
|
||||
test('should emit the same db instance for the same id', async () => {
|
||||
const [id1, id2] = [v4(), v4()];
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
let workspaceDB = await ensureSQLiteDB(id);
|
||||
const file = workspaceDB.path;
|
||||
const fileExists = await fs.pathExists(file);
|
||||
expect(fileExists).toBe(true);
|
||||
|
||||
// Can't remove file on Windows, because the sqlite is still holding the file handle
|
||||
if (process.platform === 'win32') {
|
||||
return;
|
||||
}
|
||||
|
||||
await fs.remove(file);
|
||||
|
||||
// wait for 2000ms for file watcher to detect file removal
|
||||
await delay(2000);
|
||||
|
||||
expect(sendSpy).toBeCalledWith('db:onDBFileMissing', id);
|
||||
|
||||
// ensureSQLiteDB should recreate the db file
|
||||
workspaceDB = await ensureSQLiteDB(id);
|
||||
const fileExists2 = await fs.pathExists(file);
|
||||
expect(fileExists2).toBe(true);
|
||||
sendSpy.mockRestore();
|
||||
const workspaceDB1 = await ensureSQLiteDB(id1);
|
||||
const workspaceDB2 = await ensureSQLiteDB(id2);
|
||||
const workspaceDB3 = await ensureSQLiteDB(id1);
|
||||
expect(workspaceDB1).toBe(workspaceDB3);
|
||||
expect(workspaceDB1).not.toBe(workspaceDB2);
|
||||
});
|
||||
|
||||
test('when db file is updated', async () => {
|
||||
test('when app quit, db should be closed', async () => {
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
const { dbSubjects } = await import('../../events/db');
|
||||
const workspaceDB = await ensureSQLiteDB(id);
|
||||
const file = workspaceDB.path;
|
||||
const fileExists = await fs.pathExists(file);
|
||||
expect(fileExists).toBe(true);
|
||||
const dbUpdateSpy = vi.spyOn(dbSubjects.dbFileUpdate, 'next');
|
||||
registeredHandlers.get('before-quit')?.forEach(fn => fn());
|
||||
await delay(100);
|
||||
// writes some data to the db file
|
||||
await fs.appendFile(file, 'random-data', { encoding: 'binary' });
|
||||
// write again
|
||||
await fs.appendFile(file, 'random-data', { encoding: 'binary' });
|
||||
|
||||
// wait for 2000ms for file watcher to detect file change
|
||||
await delay(2000);
|
||||
|
||||
expect(dbUpdateSpy).toBeCalledWith(id);
|
||||
dbUpdateSpy.mockRestore();
|
||||
expect(workspaceDB.db).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -219,16 +190,14 @@ describe('workspace handlers', () => {
|
||||
});
|
||||
|
||||
test('delete workspace', async () => {
|
||||
// @TODO dispatch is hanging on Windows
|
||||
if (process.platform === 'win32') {
|
||||
return;
|
||||
}
|
||||
const ids = [v4(), v4()];
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await Promise.all(ids.map(id => ensureSQLiteDB(id)));
|
||||
const dbs = await Promise.all(ids.map(id => ensureSQLiteDB(id)));
|
||||
await dispatch('workspace', 'delete', ids[1]);
|
||||
const list = await dispatch('workspace', 'list');
|
||||
expect(list.map(([id]) => id)).toEqual([ids[0]]);
|
||||
// deleted db should be closed
|
||||
expect(dbs[1].db).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -290,7 +259,7 @@ describe('db handlers', () => {
|
||||
|
||||
test('list blobs (empty)', async () => {
|
||||
const workspaceId = v4();
|
||||
const list = await dispatch('db', 'getPersistedBlobs', workspaceId);
|
||||
const list = await dispatch('db', 'getBlobKeys', workspaceId);
|
||||
expect(list).toEqual([]);
|
||||
});
|
||||
|
||||
@@ -320,14 +289,14 @@ describe('db handlers', () => {
|
||||
).toBe(true);
|
||||
|
||||
// list blobs
|
||||
let lists = await dispatch('db', 'getPersistedBlobs', workspaceId);
|
||||
let lists = await dispatch('db', 'getBlobKeys', workspaceId);
|
||||
expect(lists).toHaveLength(2);
|
||||
expect(lists).toContain('testBin');
|
||||
expect(lists).toContain('testBin2');
|
||||
|
||||
// delete blob
|
||||
await dispatch('db', 'deleteBlob', workspaceId, 'testBin');
|
||||
lists = await dispatch('db', 'getPersistedBlobs', workspaceId);
|
||||
lists = await dispatch('db', 'getBlobKeys', workspaceId);
|
||||
expect(lists).toEqual(['testBin2']);
|
||||
});
|
||||
});
|
||||
@@ -409,10 +378,10 @@ describe('dialog handlers', () => {
|
||||
expect(res.error).toBe('DB_FILE_PATH_INVALID');
|
||||
});
|
||||
|
||||
test('loadDBFile (error, not a valid db file)', async () => {
|
||||
test('loadDBFile (error, not a valid affine file)', async () => {
|
||||
// create a random db file
|
||||
const basePath = path.join(SESSION_DATA_PATH, 'random-path');
|
||||
const dbPath = path.join(basePath, 'xxx.db');
|
||||
const dbPath = path.join(basePath, 'xxx.affine');
|
||||
await fs.ensureDir(basePath);
|
||||
await fs.writeFile(dbPath, 'hello world');
|
||||
|
||||
@@ -428,7 +397,7 @@ describe('dialog handlers', () => {
|
||||
electronModule.dialog = {};
|
||||
});
|
||||
|
||||
test('loadDBFile', async () => {
|
||||
test('loadDBFile (correct)', async () => {
|
||||
// we use ensureSQLiteDB to create a valid db file
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
@@ -436,66 +405,90 @@ describe('dialog handlers', () => {
|
||||
|
||||
// copy db file to dbPath
|
||||
const basePath = path.join(SESSION_DATA_PATH, 'random-path');
|
||||
const originDBFilePath = path.join(basePath, 'xxx.db');
|
||||
const clonedDBPath = path.join(basePath, 'xxx.affine');
|
||||
await fs.ensureDir(basePath);
|
||||
await fs.copyFile(db.path, originDBFilePath);
|
||||
await fs.copyFile(db.path, clonedDBPath);
|
||||
|
||||
// on Windows, we skip this test because we can't delete the db file
|
||||
if (process.platform === 'win32') {
|
||||
return;
|
||||
}
|
||||
|
||||
// remove db
|
||||
await fs.remove(db.path);
|
||||
// delete workspace
|
||||
await dispatch('workspace', 'delete', id);
|
||||
|
||||
// try load originDBFilePath
|
||||
const mockShowOpenDialog = vi.fn(() => {
|
||||
return { filePaths: [originDBFilePath] };
|
||||
return { filePaths: [clonedDBPath] };
|
||||
}) as any;
|
||||
electronModule.dialog.showOpenDialog = mockShowOpenDialog;
|
||||
|
||||
const res = await dispatch('dialog', 'loadDBFile');
|
||||
expect(mockShowOpenDialog).toBeCalled();
|
||||
expect(res.workspaceId).not.toBeUndefined();
|
||||
const newId = res.workspaceId;
|
||||
|
||||
const importedDb = await ensureSQLiteDB(res.workspaceId!);
|
||||
expect(await fs.realpath(importedDb.path)).toBe(originDBFilePath);
|
||||
expect(importedDb.path).not.toBe(originDBFilePath);
|
||||
expect(newId).not.toBeUndefined();
|
||||
|
||||
assert(newId);
|
||||
|
||||
const meta = await dispatch('workspace', 'getMeta', newId);
|
||||
|
||||
expect(meta.secondaryDBPath).toBe(clonedDBPath);
|
||||
|
||||
// try load it again, will trigger error (db file already loaded)
|
||||
const res2 = await dispatch('dialog', 'loadDBFile');
|
||||
expect(res2.error).toBe('DB_FILE_ALREADY_LOADED');
|
||||
});
|
||||
|
||||
test('moveDBFile', async () => {
|
||||
test('moveDBFile (valid)', async () => {
|
||||
const sendStub = vi.fn();
|
||||
browserWindow.webContents.send = sendStub;
|
||||
const newPath = path.join(SESSION_DATA_PATH, 'xxx');
|
||||
const mockShowSaveDialog = vi.fn(() => {
|
||||
return { filePath: newPath };
|
||||
const showOpenDialog = vi.fn(() => {
|
||||
return { filePaths: [newPath] };
|
||||
}) as any;
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
electronModule.dialog.showOpenDialog = showOpenDialog;
|
||||
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
const db = await ensureSQLiteDB(id);
|
||||
const res = await dispatch('dialog', 'moveDBFile', id);
|
||||
expect(mockShowSaveDialog).toBeCalled();
|
||||
expect(res.filePath).toBe(newPath);
|
||||
expect(showOpenDialog).toBeCalled();
|
||||
assert(res.filePath);
|
||||
expect(path.dirname(res.filePath)).toBe(newPath);
|
||||
expect(res.filePath.endsWith('.affine')).toBe(true);
|
||||
// should also send workspace meta change event
|
||||
expect(sendStub).toBeCalledWith('workspace:onMetaChange', {
|
||||
workspaceId: id,
|
||||
meta: { id, secondaryDBPath: res.filePath, mainDBPath: db.path },
|
||||
});
|
||||
electronModule.dialog = {};
|
||||
browserWindow.webContents.send = () => {};
|
||||
});
|
||||
|
||||
test('moveDBFile (skipped)', async () => {
|
||||
const mockShowSaveDialog = vi.fn(() => {
|
||||
return { filePath: null };
|
||||
test('moveDBFile (canceled)', async () => {
|
||||
const showOpenDialog = vi.fn(() => {
|
||||
return { filePaths: null };
|
||||
}) as any;
|
||||
electronModule.dialog.showSaveDialog = mockShowSaveDialog;
|
||||
electronModule.dialog.showOpenDialog = showOpenDialog;
|
||||
|
||||
const id = v4();
|
||||
const { ensureSQLiteDB } = await import('../db/ensure-db');
|
||||
await ensureSQLiteDB(id);
|
||||
|
||||
const res = await dispatch('dialog', 'moveDBFile', id);
|
||||
expect(mockShowSaveDialog).toBeCalled();
|
||||
expect(showOpenDialog).toBeCalled();
|
||||
expect(res.filePath).toBe(undefined);
|
||||
electronModule.dialog = {};
|
||||
});
|
||||
});
|
||||
|
||||
describe('applicationMenu', () => {
|
||||
// test some basic IPC events
|
||||
test('applicationMenu event', async () => {
|
||||
const { applicationMenuSubjects } = await import('../application-menu');
|
||||
const sendStub = vi.fn();
|
||||
browserWindow.webContents.send = sendStub;
|
||||
applicationMenuSubjects.newPageAction.next();
|
||||
expect(sendStub).toHaveBeenCalledWith(
|
||||
'applicationMenu:onNewPageAction',
|
||||
undefined
|
||||
);
|
||||
browserWindow.webContents.send = () => {};
|
||||
});
|
||||
});
|
||||
@@ -1,9 +1,9 @@
|
||||
import { app, Menu } from 'electron';
|
||||
|
||||
import { isMacOS } from '../../utils';
|
||||
import { subjects } from './events';
|
||||
import { checkForUpdatesAndNotify } from './handlers/updater';
|
||||
import { revealLogFile } from './logger';
|
||||
import { revealLogFile } from '../logger';
|
||||
import { checkForUpdatesAndNotify } from '../updater';
|
||||
import { isMacOS } from '../utils';
|
||||
import { applicationMenuSubjects } from './subject';
|
||||
|
||||
// Unique id for menuitems
|
||||
const MENUITEM_NEW_PAGE = 'affine:new-page';
|
||||
@@ -43,7 +43,7 @@ export function createApplicationMenu() {
|
||||
label: 'New Page',
|
||||
accelerator: isMac ? 'Cmd+N' : 'Ctrl+N',
|
||||
click: () => {
|
||||
subjects.applicationMenu.newPageAction.next();
|
||||
applicationMenuSubjects.newPageAction.next();
|
||||
},
|
||||
},
|
||||
{ type: 'separator' },
|
||||
@@ -117,7 +117,7 @@ export function createApplicationMenu() {
|
||||
},
|
||||
},
|
||||
{
|
||||
label: 'Open logs folder',
|
||||
label: 'Open log file',
|
||||
click: async () => {
|
||||
revealLogFile();
|
||||
},
|
||||
@@ -1,10 +1,8 @@
|
||||
import { Subject } from 'rxjs';
|
||||
import type { MainEventListener } from '../type';
|
||||
import { applicationMenuSubjects } from './subject';
|
||||
|
||||
import type { MainEventListener } from './type';
|
||||
|
||||
export const applicationMenuSubjects = {
|
||||
newPageAction: new Subject<void>(),
|
||||
};
|
||||
export * from './create';
|
||||
export * from './subject';
|
||||
|
||||
/**
|
||||
* Events triggered by application menu
|
||||
@@ -0,0 +1,5 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
export const applicationMenuSubjects = {
|
||||
newPageAction: new Subject<void>(),
|
||||
};
|
||||
1
apps/electron/layers/main/src/db/__tests__/.gitignore
vendored
Normal file
1
apps/electron/layers/main/src/db/__tests__/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
tmp
|
||||
147
apps/electron/layers/main/src/db/__tests__/ensure-db.spec.ts
Normal file
147
apps/electron/layers/main/src/db/__tests__/ensure-db.spec.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, beforeEach, expect, test, vi } from 'vitest';
|
||||
|
||||
const tmpDir = path.join(__dirname, 'tmp');
|
||||
|
||||
const registeredHandlers = new Map<
|
||||
string,
|
||||
((...args: any[]) => Promise<any>)[]
|
||||
>();
|
||||
|
||||
const SESSION_DATA_PATH = path.join(tmpDir, 'affine-test');
|
||||
const DOCUMENTS_PATH = path.join(tmpDir, 'affine-test-documents');
|
||||
|
||||
const electronModule = {
|
||||
app: {
|
||||
getPath: (name: string) => {
|
||||
if (name === 'sessionData') {
|
||||
return SESSION_DATA_PATH;
|
||||
} else if (name === 'documents') {
|
||||
return DOCUMENTS_PATH;
|
||||
}
|
||||
throw new Error('not implemented');
|
||||
},
|
||||
name: 'affine-test',
|
||||
on: (name: string, callback: (...args: any[]) => any) => {
|
||||
const handlers = registeredHandlers.get(name) || [];
|
||||
handlers.push(callback);
|
||||
registeredHandlers.set(name, handlers);
|
||||
},
|
||||
addEventListener: (...args: any[]) => {
|
||||
// @ts-ignore
|
||||
electronModule.app.on(...args);
|
||||
},
|
||||
removeEventListener: () => {},
|
||||
},
|
||||
shell: {} as Partial<Electron.Shell>,
|
||||
dialog: {} as Partial<Electron.Dialog>,
|
||||
};
|
||||
|
||||
const runHandler = (key: string) => {
|
||||
registeredHandlers.get(key)?.forEach(handler => handler());
|
||||
};
|
||||
|
||||
// dynamically import handlers so that we can inject local variables to mocks
|
||||
vi.doMock('electron', () => {
|
||||
return electronModule;
|
||||
});
|
||||
|
||||
const constructorStub = vi.fn();
|
||||
const destroyStub = vi.fn();
|
||||
|
||||
vi.doMock('../secondary-db', () => {
|
||||
return {
|
||||
SecondaryWorkspaceSQLiteDB: class {
|
||||
constructor(...args: any[]) {
|
||||
constructorStub(...args);
|
||||
}
|
||||
|
||||
destroy = destroyStub;
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers({ shouldAdvanceTime: true });
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
runHandler('before-quit');
|
||||
await fs.remove(tmpDir);
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
test('can get a valid WorkspaceSQLiteDB', async () => {
|
||||
const { ensureSQLiteDB } = await import('../ensure-db');
|
||||
const workspaceId = v4();
|
||||
const db0 = await ensureSQLiteDB(workspaceId);
|
||||
expect(db0).toBeDefined();
|
||||
expect(db0.workspaceId).toBe(workspaceId);
|
||||
|
||||
const db1 = await ensureSQLiteDB(v4());
|
||||
expect(db1).not.toBe(db0);
|
||||
expect(db1.workspaceId).not.toBe(db0.workspaceId);
|
||||
|
||||
// ensure that the db is cached
|
||||
expect(await ensureSQLiteDB(workspaceId)).toBe(db0);
|
||||
});
|
||||
|
||||
test('db should be destroyed when app quits', async () => {
|
||||
const { ensureSQLiteDB } = await import('../ensure-db');
|
||||
const workspaceId = v4();
|
||||
const db0 = await ensureSQLiteDB(workspaceId);
|
||||
const db1 = await ensureSQLiteDB(v4());
|
||||
|
||||
expect(db0.db).not.toBeNull();
|
||||
expect(db1.db).not.toBeNull();
|
||||
|
||||
runHandler('before-quit');
|
||||
|
||||
expect(db0.db).toBeNull();
|
||||
expect(db1.db).toBeNull();
|
||||
});
|
||||
|
||||
test('if db has a secondary db path, we should also poll that', async () => {
|
||||
const { ensureSQLiteDB } = await import('../ensure-db');
|
||||
const { appContext } = await import('../../context');
|
||||
const { storeWorkspaceMeta } = await import('../../workspace');
|
||||
const workspaceId = v4();
|
||||
await storeWorkspaceMeta(appContext, workspaceId, {
|
||||
secondaryDBPath: path.join(tmpDir, 'secondary.db'),
|
||||
});
|
||||
|
||||
const db = await ensureSQLiteDB(workspaceId);
|
||||
|
||||
await vi.advanceTimersByTimeAsync(1500);
|
||||
|
||||
// not sure why but we still need to wait with real timer
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
expect(constructorStub).toBeCalledTimes(1);
|
||||
expect(constructorStub).toBeCalledWith(path.join(tmpDir, 'secondary.db'), db);
|
||||
|
||||
// if secondary meta is changed
|
||||
await storeWorkspaceMeta(appContext, workspaceId, {
|
||||
secondaryDBPath: path.join(tmpDir, 'secondary2.db'),
|
||||
});
|
||||
|
||||
await vi.advanceTimersByTimeAsync(1500);
|
||||
expect(constructorStub).toBeCalledTimes(2);
|
||||
expect(destroyStub).toBeCalledTimes(1);
|
||||
|
||||
// if secondary meta is changed (but another workspace)
|
||||
await storeWorkspaceMeta(appContext, v4(), {
|
||||
secondaryDBPath: path.join(tmpDir, 'secondary3.db'),
|
||||
});
|
||||
await vi.advanceTimersByTimeAsync(1500);
|
||||
expect(constructorStub).toBeCalledTimes(2);
|
||||
expect(destroyStub).toBeCalledTimes(1);
|
||||
|
||||
// if primary is destroyed, secondary should also be destroyed
|
||||
db.destroy();
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
expect(destroyStub).toBeCalledTimes(2);
|
||||
});
|
||||
@@ -0,0 +1,101 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, expect, test, vi } from 'vitest';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { AppContext } from '../../context';
|
||||
import { dbSubjects } from '../subjects';
|
||||
|
||||
const tmpDir = path.join(__dirname, 'tmp');
|
||||
|
||||
const testAppContext: AppContext = {
|
||||
appDataPath: path.join(tmpDir, 'test-data'),
|
||||
appName: 'test',
|
||||
};
|
||||
|
||||
afterEach(async () => {
|
||||
if (process.platform !== 'win32') {
|
||||
// hmmm ....
|
||||
await fs.remove(tmpDir);
|
||||
}
|
||||
});
|
||||
|
||||
function getTestUpdates() {
|
||||
const testYDoc = new Y.Doc();
|
||||
const yText = testYDoc.getText('test');
|
||||
yText.insert(0, 'hello');
|
||||
const updates = Y.encodeStateAsUpdate(testYDoc);
|
||||
|
||||
return updates;
|
||||
}
|
||||
test('can create new db file if not exists', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const db = await openWorkspaceDatabase(testAppContext, workspaceId);
|
||||
const dbPath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
`workspaces/${workspaceId}`,
|
||||
`storage.db`
|
||||
);
|
||||
expect(await fs.exists(dbPath)).toBe(true);
|
||||
db.destroy();
|
||||
});
|
||||
|
||||
test('on applyUpdate (from self), will not trigger update', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const onUpdate = vi.fn();
|
||||
|
||||
const db = await openWorkspaceDatabase(testAppContext, workspaceId);
|
||||
db.update$.subscribe(onUpdate);
|
||||
db.applyUpdate(getTestUpdates(), 'self');
|
||||
expect(onUpdate).not.toHaveBeenCalled();
|
||||
db.destroy();
|
||||
});
|
||||
|
||||
test('on applyUpdate (from renderer), will trigger update', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const onUpdate = vi.fn();
|
||||
const onExternalUpdate = vi.fn();
|
||||
|
||||
const db = await openWorkspaceDatabase(testAppContext, workspaceId);
|
||||
db.update$.subscribe(onUpdate);
|
||||
const sub = dbSubjects.externalUpdate.subscribe(onExternalUpdate);
|
||||
db.applyUpdate(getTestUpdates(), 'renderer');
|
||||
expect(onUpdate).toHaveBeenCalled(); // not yet updated
|
||||
sub.unsubscribe();
|
||||
db.destroy();
|
||||
});
|
||||
|
||||
test('on applyUpdate (from external), will trigger update & send external update event', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const onUpdate = vi.fn();
|
||||
const onExternalUpdate = vi.fn();
|
||||
|
||||
const db = await openWorkspaceDatabase(testAppContext, workspaceId);
|
||||
db.update$.subscribe(onUpdate);
|
||||
const sub = dbSubjects.externalUpdate.subscribe(onExternalUpdate);
|
||||
db.applyUpdate(getTestUpdates(), 'external');
|
||||
expect(onUpdate).toHaveBeenCalled();
|
||||
expect(onExternalUpdate).toHaveBeenCalled();
|
||||
sub.unsubscribe();
|
||||
db.destroy();
|
||||
});
|
||||
|
||||
test('on destroy, check if resources have been released', async () => {
|
||||
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
|
||||
const workspaceId = v4();
|
||||
const db = await openWorkspaceDatabase(testAppContext, workspaceId);
|
||||
const updateSub = {
|
||||
complete: vi.fn(),
|
||||
next: vi.fn(),
|
||||
};
|
||||
db.update$ = updateSub as any;
|
||||
db.destroy();
|
||||
expect(db.db).toBe(null);
|
||||
expect(updateSub.complete).toHaveBeenCalled();
|
||||
});
|
||||
152
apps/electron/layers/main/src/db/base-db-adapter.ts
Normal file
152
apps/electron/layers/main/src/db/base-db-adapter.ts
Normal file
@@ -0,0 +1,152 @@
|
||||
import assert from 'assert';
|
||||
import type { Database } from 'better-sqlite3';
|
||||
import sqlite from 'better-sqlite3';
|
||||
|
||||
import { logger } from '../logger';
|
||||
|
||||
const schemas = [
|
||||
`CREATE TABLE IF NOT EXISTS "updates" (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
data BLOB NOT NULL,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS "blobs" (
|
||||
key TEXT PRIMARY KEY NOT NULL,
|
||||
data BLOB NOT NULL,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
)`,
|
||||
];
|
||||
|
||||
interface UpdateRow {
|
||||
id: number;
|
||||
data: Buffer;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
interface BlobRow {
|
||||
key: string;
|
||||
data: Buffer;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* A base class for SQLite DB adapter that provides basic methods around updates & blobs
|
||||
*/
|
||||
export abstract class BaseSQLiteAdapter {
|
||||
db: Database | null = null;
|
||||
abstract role: string;
|
||||
|
||||
constructor(public path: string) {}
|
||||
|
||||
ensureTables() {
|
||||
assert(this.db, 'db is not connected');
|
||||
this.db.exec(schemas.join(';'));
|
||||
}
|
||||
|
||||
// todo: what if SQLite DB wrapper later is not sync?
|
||||
connect(): Database | undefined {
|
||||
if (this.db) {
|
||||
return this.db;
|
||||
}
|
||||
logger.log(`[SQLiteAdapter][${this.role}] open db`, this.path);
|
||||
const db = (this.db = sqlite(this.path));
|
||||
this.ensureTables();
|
||||
return db;
|
||||
}
|
||||
|
||||
destroy() {
|
||||
this.db?.close();
|
||||
this.db = null;
|
||||
}
|
||||
|
||||
addBlob(key: string, data: Uint8Array) {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare(
|
||||
'INSERT INTO blobs (key, data) VALUES (?, ?) ON CONFLICT(key) DO UPDATE SET data = ?'
|
||||
);
|
||||
statement.run(key, data, data);
|
||||
return key;
|
||||
} catch (error) {
|
||||
logger.error('addBlob', error);
|
||||
}
|
||||
}
|
||||
|
||||
getBlob(key: string) {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare('SELECT data FROM blobs WHERE key = ?');
|
||||
const row = statement.get(key) as BlobRow;
|
||||
if (!row) {
|
||||
return null;
|
||||
}
|
||||
return row.data;
|
||||
} catch (error) {
|
||||
logger.error('getBlob', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
deleteBlob(key: string) {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare('DELETE FROM blobs WHERE key = ?');
|
||||
statement.run(key);
|
||||
} catch (error) {
|
||||
logger.error('deleteBlob', error);
|
||||
}
|
||||
}
|
||||
|
||||
getBlobKeys() {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare('SELECT key FROM blobs');
|
||||
const rows = statement.all() as BlobRow[];
|
||||
return rows.map(row => row.key);
|
||||
} catch (error) {
|
||||
logger.error('getBlobKeys', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
getUpdates() {
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const statement = this.db.prepare('SELECT * FROM updates');
|
||||
const rows = statement.all() as UpdateRow[];
|
||||
return rows;
|
||||
} catch (error) {
|
||||
logger.error('getUpdates', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// add a single update to SQLite
|
||||
addUpdateToSQLite(updates: Uint8Array[]) {
|
||||
// batch write instead write per key stroke?
|
||||
try {
|
||||
assert(this.db, 'db is not connected');
|
||||
const start = performance.now();
|
||||
const statement = this.db.prepare(
|
||||
'INSERT INTO updates (data) VALUES (?)'
|
||||
);
|
||||
const insertMany = this.db.transaction(updates => {
|
||||
for (const d of updates) {
|
||||
statement.run(d);
|
||||
}
|
||||
});
|
||||
|
||||
insertMany(updates);
|
||||
|
||||
logger.debug(
|
||||
`[SQLiteAdapter][${this.role}] addUpdateToSQLite`,
|
||||
'length:',
|
||||
updates.length,
|
||||
performance.now() - start,
|
||||
'ms'
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('addUpdateToSQLite', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
110
apps/electron/layers/main/src/db/ensure-db.ts
Normal file
110
apps/electron/layers/main/src/db/ensure-db.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import { app } from 'electron';
|
||||
import {
|
||||
defer,
|
||||
firstValueFrom,
|
||||
from,
|
||||
fromEvent,
|
||||
interval,
|
||||
merge,
|
||||
Observable,
|
||||
} from 'rxjs';
|
||||
import {
|
||||
distinctUntilChanged,
|
||||
filter,
|
||||
ignoreElements,
|
||||
last,
|
||||
map,
|
||||
shareReplay,
|
||||
startWith,
|
||||
switchMap,
|
||||
takeUntil,
|
||||
tap,
|
||||
} from 'rxjs/operators';
|
||||
|
||||
import { appContext } from '../context';
|
||||
import { logger } from '../logger';
|
||||
import { getWorkspaceMeta$ } from '../workspace';
|
||||
import { SecondaryWorkspaceSQLiteDB } from './secondary-db';
|
||||
import type { WorkspaceSQLiteDB } from './workspace-db-adapter';
|
||||
import { openWorkspaceDatabase } from './workspace-db-adapter';
|
||||
|
||||
const db$Map = new Map<string, Observable<WorkspaceSQLiteDB>>();
|
||||
|
||||
const beforeQuit$ = defer(() => fromEvent(app, 'before-quit'));
|
||||
|
||||
function getWorkspaceDB$(id: string) {
|
||||
if (!db$Map.has(id)) {
|
||||
db$Map.set(
|
||||
id,
|
||||
from(openWorkspaceDatabase(appContext, id)).pipe(
|
||||
shareReplay(1),
|
||||
switchMap(db => {
|
||||
return startPollingSecondaryDB(db).pipe(
|
||||
ignoreElements(),
|
||||
startWith(db),
|
||||
takeUntil(beforeQuit$),
|
||||
tap({
|
||||
complete: () => {
|
||||
logger.info('[ensureSQLiteDB] close db connection');
|
||||
db.destroy();
|
||||
db$Map.delete(id);
|
||||
},
|
||||
})
|
||||
);
|
||||
}),
|
||||
shareReplay(1)
|
||||
)
|
||||
);
|
||||
}
|
||||
return db$Map.get(id)!;
|
||||
}
|
||||
|
||||
function startPollingSecondaryDB(db: WorkspaceSQLiteDB) {
|
||||
const meta$ = getWorkspaceMeta$(db.workspaceId);
|
||||
const secondaryDB$ = meta$.pipe(
|
||||
map(meta => meta?.secondaryDBPath),
|
||||
distinctUntilChanged(),
|
||||
filter((p): p is string => !!p),
|
||||
switchMap(path => {
|
||||
return new Observable<SecondaryWorkspaceSQLiteDB>(observer => {
|
||||
const secondaryDB = new SecondaryWorkspaceSQLiteDB(path, db);
|
||||
observer.next(secondaryDB);
|
||||
return () => {
|
||||
logger.info(
|
||||
'[ensureSQLiteDB] close secondary db connection',
|
||||
secondaryDB.path
|
||||
);
|
||||
secondaryDB.destroy();
|
||||
};
|
||||
});
|
||||
}),
|
||||
takeUntil(db.update$.pipe(last())),
|
||||
shareReplay(1)
|
||||
);
|
||||
|
||||
const firstDelayedTick$ = defer(() => {
|
||||
return new Promise<number>(resolve =>
|
||||
setTimeout(() => {
|
||||
resolve(0);
|
||||
}, 1000)
|
||||
);
|
||||
});
|
||||
|
||||
// pull every 30 seconds
|
||||
const poll$ = merge(firstDelayedTick$, interval(30000)).pipe(
|
||||
switchMap(() => secondaryDB$),
|
||||
tap({
|
||||
next: secondaryDB => {
|
||||
secondaryDB.pull();
|
||||
},
|
||||
}),
|
||||
takeUntil(db.update$.pipe(last())),
|
||||
shareReplay(1)
|
||||
);
|
||||
|
||||
return poll$;
|
||||
}
|
||||
|
||||
export function ensureSQLiteDB(id: string) {
|
||||
return firstValueFrom(getWorkspaceDB$(id));
|
||||
}
|
||||
38
apps/electron/layers/main/src/db/helper.ts
Normal file
38
apps/electron/layers/main/src/db/helper.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import type { Database } from 'better-sqlite3';
|
||||
import sqlite from 'better-sqlite3';
|
||||
|
||||
import { logger } from '../logger';
|
||||
|
||||
export function isValidateDB(db: Database) {
|
||||
// check if db has two tables, one for updates and one for blobs
|
||||
const statement = db.prepare(
|
||||
`SELECT name FROM sqlite_schema WHERE type='table'`
|
||||
);
|
||||
const rows = statement.all() as { name: string }[];
|
||||
const tableNames = rows.map(row => row.name);
|
||||
if (!tableNames.includes('updates') || !tableNames.includes('blobs')) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function isValidDBFile(path: string) {
|
||||
let db: Database | null = null;
|
||||
try {
|
||||
db = sqlite(path);
|
||||
// check if db has two tables, one for updates and one for blobs
|
||||
const statement = db.prepare(
|
||||
`SELECT name FROM sqlite_schema WHERE type='table'`
|
||||
);
|
||||
const rows = statement.all() as { name: string }[];
|
||||
const tableNames = rows.map(row => row.name);
|
||||
if (!tableNames.includes('updates') || !tableNames.includes('blobs')) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error('isValidDBFile', error);
|
||||
return false;
|
||||
} finally {
|
||||
db?.close();
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,10 @@
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import { appContext } from '../../context';
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { appContext } from '../context';
|
||||
import type { MainEventListener, NamespaceHandlers } from '../type';
|
||||
import { ensureSQLiteDB } from './ensure-db';
|
||||
import { dbSubjects } from './subjects';
|
||||
|
||||
export * from './ensure-db';
|
||||
export * from './subjects';
|
||||
|
||||
export const dbHandlers = {
|
||||
getDocAsUpdates: async (_, id: string) => {
|
||||
@@ -25,18 +27,22 @@ export const dbHandlers = {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return workspaceDB.deleteBlob(key);
|
||||
},
|
||||
getPersistedBlobs: async (_, workspaceId: string) => {
|
||||
getBlobKeys: async (_, workspaceId: string) => {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return workspaceDB.getPersistentBlobKeys();
|
||||
return workspaceDB.getBlobKeys();
|
||||
},
|
||||
getDefaultStorageLocation: async () => {
|
||||
return appContext.appDataPath;
|
||||
},
|
||||
getDBFilePath: async (_, workspaceId: string) => {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
return {
|
||||
path: workspaceDB.path,
|
||||
realPath: await fs.realpath(workspaceDB.path),
|
||||
} satisfies NamespaceHandlers;
|
||||
|
||||
export const dbEvents = {
|
||||
onExternalUpdate: (
|
||||
fn: (update: { workspaceId: string; update: Uint8Array }) => void
|
||||
) => {
|
||||
const sub = dbSubjects.externalUpdate.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
198
apps/electron/layers/main/src/db/secondary-db.ts
Normal file
198
apps/electron/layers/main/src/db/secondary-db.ts
Normal file
@@ -0,0 +1,198 @@
|
||||
import { debounce } from 'lodash-es';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { AppContext } from '../context';
|
||||
import { logger } from '../logger';
|
||||
import type { YOrigin } from '../type';
|
||||
import { mergeUpdateWorker } from '../workers';
|
||||
import { getWorkspaceMeta } from '../workspace';
|
||||
import { BaseSQLiteAdapter } from './base-db-adapter';
|
||||
import type { WorkspaceSQLiteDB } from './workspace-db-adapter';
|
||||
|
||||
const FLUSH_WAIT_TIME = 5000;
|
||||
const FLUSH_MAX_WAIT_TIME = 10000;
|
||||
|
||||
export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
role = 'secondary';
|
||||
yDoc = new Y.Doc();
|
||||
firstConnected = false;
|
||||
|
||||
updateQueue: Uint8Array[] = [];
|
||||
|
||||
unsubscribers = new Set<() => void>();
|
||||
|
||||
constructor(
|
||||
public override path: string,
|
||||
public upstream: WorkspaceSQLiteDB
|
||||
) {
|
||||
super(path);
|
||||
this.setupAndListen();
|
||||
logger.debug('[SecondaryWorkspaceSQLiteDB] created', this.workspaceId);
|
||||
}
|
||||
|
||||
close() {
|
||||
this.db?.close();
|
||||
this.db = null;
|
||||
}
|
||||
|
||||
override destroy() {
|
||||
this.flushUpdateQueue();
|
||||
this.unsubscribers.forEach(unsub => unsub());
|
||||
this.db?.close();
|
||||
this.yDoc.destroy();
|
||||
}
|
||||
|
||||
get workspaceId() {
|
||||
return this.upstream.workspaceId;
|
||||
}
|
||||
|
||||
// do not update db immediately, instead, push to a queue
|
||||
// and flush the queue in a future time
|
||||
addUpdateToUpdateQueue(update: Uint8Array) {
|
||||
this.updateQueue.push(update);
|
||||
this.debouncedFlush();
|
||||
}
|
||||
|
||||
flushUpdateQueue() {
|
||||
logger.debug(
|
||||
'flushUpdateQueue',
|
||||
this.workspaceId,
|
||||
'queue',
|
||||
this.updateQueue.length
|
||||
);
|
||||
const updates = [...this.updateQueue];
|
||||
this.updateQueue = [];
|
||||
this.connect();
|
||||
this.addUpdateToSQLite(updates);
|
||||
this.close();
|
||||
}
|
||||
|
||||
// flush after 5s, but will not wait for more than 10s
|
||||
debouncedFlush = debounce(this.flushUpdateQueue, FLUSH_WAIT_TIME, {
|
||||
maxWait: FLUSH_MAX_WAIT_TIME,
|
||||
});
|
||||
|
||||
runCounter = 0;
|
||||
|
||||
// wrap the fn with connect and close
|
||||
// it only works for sync functions
|
||||
run = <T extends (...args: any[]) => any>(fn: T) => {
|
||||
try {
|
||||
if (this.runCounter === 0) {
|
||||
this.connect();
|
||||
}
|
||||
this.runCounter++;
|
||||
return fn();
|
||||
} catch (err) {
|
||||
logger.error(err);
|
||||
} finally {
|
||||
this.runCounter--;
|
||||
if (this.runCounter === 0) {
|
||||
this.close();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
setupAndListen() {
|
||||
if (this.firstConnected) {
|
||||
return;
|
||||
}
|
||||
this.firstConnected = true;
|
||||
|
||||
const onUpstreamUpdate = (update: Uint8Array, origin: YOrigin) => {
|
||||
if (origin === 'renderer') {
|
||||
// update to upstream yDoc should be replicated to self yDoc
|
||||
this.applyUpdate(update, 'upstream');
|
||||
}
|
||||
};
|
||||
|
||||
const onSelfUpdate = (update: Uint8Array, origin: YOrigin) => {
|
||||
// for self update from upstream, we need to push it to external DB
|
||||
if (origin === 'upstream') {
|
||||
this.addUpdateToUpdateQueue(update);
|
||||
}
|
||||
|
||||
if (origin === 'self') {
|
||||
this.upstream.applyUpdate(update, 'external');
|
||||
}
|
||||
};
|
||||
|
||||
// listen to upstream update
|
||||
this.upstream.yDoc.on('update', onUpstreamUpdate);
|
||||
this.yDoc.on('update', onSelfUpdate);
|
||||
|
||||
this.unsubscribers.add(() => {
|
||||
this.upstream.yDoc.off('update', onUpstreamUpdate);
|
||||
this.yDoc.off('update', onSelfUpdate);
|
||||
});
|
||||
|
||||
this.run(() => {
|
||||
// apply all updates from upstream
|
||||
const upstreamUpdate = this.upstream.getDocAsUpdates();
|
||||
// to initialize the yDoc, we need to apply all updates from the db
|
||||
this.applyUpdate(upstreamUpdate, 'upstream');
|
||||
|
||||
this.pull();
|
||||
});
|
||||
}
|
||||
|
||||
applyUpdate = (data: Uint8Array, origin: YOrigin = 'upstream') => {
|
||||
Y.applyUpdate(this.yDoc, data, origin);
|
||||
};
|
||||
|
||||
// TODO: have a better solution to handle blobs
|
||||
syncBlobs() {
|
||||
this.run(() => {
|
||||
// pull blobs
|
||||
const blobsKeys = this.getBlobKeys();
|
||||
const upstreamBlobsKeys = this.upstream.getBlobKeys();
|
||||
// put every missing blob to upstream
|
||||
for (const key of blobsKeys) {
|
||||
if (!upstreamBlobsKeys.includes(key)) {
|
||||
const blob = this.getBlob(key);
|
||||
if (blob) {
|
||||
this.upstream.addBlob(key, blob);
|
||||
logger.debug('syncBlobs', this.workspaceId, key);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* pull from external DB file and apply to embedded yDoc
|
||||
* workflow:
|
||||
* - connect to external db
|
||||
* - get updates
|
||||
* - apply updates to local yDoc
|
||||
* - get blobs and put new blobs to upstream
|
||||
* - disconnect
|
||||
*/
|
||||
async pull() {
|
||||
const start = performance.now();
|
||||
const updates = this.run(() => {
|
||||
// TODO: no need to get all updates, just get the latest ones (using a cursor, etc)?
|
||||
this.syncBlobs();
|
||||
return this.getUpdates().map(update => update.data);
|
||||
});
|
||||
|
||||
const merged = await mergeUpdateWorker(updates);
|
||||
this.applyUpdate(merged, 'self');
|
||||
|
||||
logger.debug(
|
||||
'pull external updates',
|
||||
this.path,
|
||||
updates.length,
|
||||
(performance.now() - start).toFixed(2),
|
||||
'ms'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function getSecondaryWorkspaceDBPath(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
) {
|
||||
const meta = await getWorkspaceMeta(context, workspaceId);
|
||||
return meta?.secondaryDBPath;
|
||||
}
|
||||
7
apps/electron/layers/main/src/db/subjects.ts
Normal file
7
apps/electron/layers/main/src/db/subjects.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
export const dbSubjects = {
|
||||
// emit workspace id when the db file is missing
|
||||
fileMissing: new Subject<string>(),
|
||||
externalUpdate: new Subject<{ workspaceId: string; update: Uint8Array }>(),
|
||||
};
|
||||
106
apps/electron/layers/main/src/db/workspace-db-adapter.ts
Normal file
106
apps/electron/layers/main/src/db/workspace-db-adapter.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import type { Database } from 'better-sqlite3';
|
||||
import { Subject } from 'rxjs';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { AppContext } from '../context';
|
||||
import { logger } from '../logger';
|
||||
import type { YOrigin } from '../type';
|
||||
import { mergeUpdateWorker } from '../workers';
|
||||
import { getWorkspaceMeta } from '../workspace';
|
||||
import { BaseSQLiteAdapter } from './base-db-adapter';
|
||||
import { dbSubjects } from './subjects';
|
||||
|
||||
export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
|
||||
role = 'primary';
|
||||
yDoc = new Y.Doc();
|
||||
firstConnected = false;
|
||||
|
||||
update$ = new Subject<void>();
|
||||
|
||||
constructor(public override path: string, public workspaceId: string) {
|
||||
super(path);
|
||||
}
|
||||
|
||||
override destroy() {
|
||||
this.db?.close();
|
||||
this.db = null;
|
||||
this.yDoc.destroy();
|
||||
|
||||
// when db is closed, we can safely remove it from ensure-db list
|
||||
this.update$.complete();
|
||||
}
|
||||
|
||||
getWorkspaceName = () => {
|
||||
return this.yDoc.getMap('space:meta').get('name') as string;
|
||||
};
|
||||
|
||||
async init(): Promise<Database | undefined> {
|
||||
const db = super.connect();
|
||||
|
||||
if (!this.firstConnected) {
|
||||
this.yDoc.on('update', (update: Uint8Array, origin: YOrigin) => {
|
||||
if (origin === 'renderer') {
|
||||
this.addUpdateToSQLite([update]);
|
||||
} else if (origin === 'external') {
|
||||
this.addUpdateToSQLite([update]);
|
||||
logger.debug('external update', this.workspaceId);
|
||||
dbSubjects.externalUpdate.next({
|
||||
workspaceId: this.workspaceId,
|
||||
update,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const updates = this.getUpdates();
|
||||
const merged = await mergeUpdateWorker(updates.map(update => update.data));
|
||||
|
||||
// to initialize the yDoc, we need to apply all updates from the db
|
||||
this.applyUpdate(merged, 'self');
|
||||
|
||||
this.firstConnected = true;
|
||||
this.update$.next();
|
||||
|
||||
return db;
|
||||
}
|
||||
|
||||
getDocAsUpdates = () => {
|
||||
return Y.encodeStateAsUpdate(this.yDoc);
|
||||
};
|
||||
|
||||
// non-blocking and use yDoc to validate the update
|
||||
// after that, the update is added to the db
|
||||
applyUpdate = (data: Uint8Array, origin: YOrigin = 'renderer') => {
|
||||
// todo: trim the updates when the number of records is too large
|
||||
// 1. store the current ydoc state in the db
|
||||
// 2. then delete the old updates
|
||||
// yjs-idb will always trim the db for the first time after DB is loaded
|
||||
Y.applyUpdate(this.yDoc, data, origin);
|
||||
};
|
||||
|
||||
override addBlob(key: string, value: Uint8Array) {
|
||||
const res = super.addBlob(key, value);
|
||||
this.update$.next();
|
||||
return res;
|
||||
}
|
||||
|
||||
override deleteBlob(key: string) {
|
||||
super.deleteBlob(key);
|
||||
this.update$.next();
|
||||
}
|
||||
|
||||
override addUpdateToSQLite(data: Uint8Array[]) {
|
||||
super.addUpdateToSQLite(data);
|
||||
this.update$.next();
|
||||
}
|
||||
}
|
||||
|
||||
export async function openWorkspaceDatabase(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
) {
|
||||
const meta = await getWorkspaceMeta(context, workspaceId);
|
||||
const db = new WorkspaceSQLiteDB(meta.mainDBPath, workspaceId);
|
||||
await db.init();
|
||||
return db;
|
||||
}
|
||||
@@ -1,26 +1,35 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import { app } from 'electron';
|
||||
import { dialog, shell } from 'electron';
|
||||
import fs from 'fs-extra';
|
||||
import { nanoid } from 'nanoid';
|
||||
|
||||
import { appContext } from '../../context';
|
||||
import { logger } from '../../logger';
|
||||
import { ensureSQLiteDB, isRemoveOrMoveEvent } from '../db/ensure-db';
|
||||
import type { WorkspaceSQLiteDB } from '../db/sqlite';
|
||||
import { getWorkspaceDBPath, isValidDBFile } from '../db/sqlite';
|
||||
import { listWorkspaces } from '../workspace/workspace';
|
||||
import { appContext } from '../context';
|
||||
import { ensureSQLiteDB } from '../db/ensure-db';
|
||||
import { isValidDBFile } from '../db/helper';
|
||||
import type { WorkspaceSQLiteDB } from '../db/workspace-db-adapter';
|
||||
import { logger } from '../logger';
|
||||
import {
|
||||
getWorkspaceDBPath,
|
||||
getWorkspaceMeta,
|
||||
listWorkspaces,
|
||||
storeWorkspaceMeta,
|
||||
} from '../workspace';
|
||||
|
||||
// NOTE:
|
||||
// we are using native dialogs because HTML dialogs do not give full file paths
|
||||
|
||||
export async function revealDBFile(workspaceId: string) {
|
||||
const workspaceDB = await ensureSQLiteDB(workspaceId);
|
||||
shell.showItemInFolder(await fs.realpath(workspaceDB.path));
|
||||
const meta = await getWorkspaceMeta(appContext, workspaceId);
|
||||
if (!meta) {
|
||||
return;
|
||||
}
|
||||
shell.showItemInFolder(meta.secondaryDBPath ?? meta.mainDBPath);
|
||||
}
|
||||
|
||||
// provide a backdoor to set dialog path for testing in playwright
|
||||
interface FakeDialogResult {
|
||||
export interface FakeDialogResult {
|
||||
canceled?: boolean;
|
||||
filePath?: string;
|
||||
filePaths?: string[];
|
||||
@@ -54,12 +63,20 @@ const ErrorMessages = [
|
||||
|
||||
type ErrorMessage = (typeof ErrorMessages)[number];
|
||||
|
||||
interface SaveDBFileResult {
|
||||
export interface SaveDBFileResult {
|
||||
filePath?: string;
|
||||
canceled?: boolean;
|
||||
error?: ErrorMessage;
|
||||
}
|
||||
|
||||
const extension = 'affine';
|
||||
|
||||
function getDefaultDBFileName(name: string, id: string) {
|
||||
const fileName = `${name}_${id}.${extension}`;
|
||||
// make sure fileName is a valid file name
|
||||
return fileName.replace(/[/\\?%*:|"<>]/g, '-');
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is called when the user clicks the "Save" button in the "Save Workspace" dialog.
|
||||
*
|
||||
@@ -77,7 +94,13 @@ export async function saveDBFileAs(
|
||||
title: 'Save Workspace',
|
||||
showsTagField: false,
|
||||
buttonLabel: 'Save',
|
||||
defaultPath: `${db.getWorkspaceName()}_${workspaceId}.db`,
|
||||
filters: [
|
||||
{
|
||||
extensions: [extension],
|
||||
name: '',
|
||||
},
|
||||
],
|
||||
defaultPath: getDefaultDBFileName(db.getWorkspaceName(), workspaceId),
|
||||
message: 'Save Workspace as a SQLite Database file',
|
||||
}));
|
||||
const filePath = ret.filePath;
|
||||
@@ -99,7 +122,7 @@ export async function saveDBFileAs(
|
||||
}
|
||||
}
|
||||
|
||||
interface SelectDBFileLocationResult {
|
||||
export interface SelectDBFileLocationResult {
|
||||
filePath?: string;
|
||||
error?: ErrorMessage;
|
||||
canceled?: boolean;
|
||||
@@ -109,27 +132,20 @@ export async function selectDBFileLocation(): Promise<SelectDBFileLocationResult
|
||||
try {
|
||||
const ret =
|
||||
getFakedResult() ??
|
||||
(await dialog.showSaveDialog({
|
||||
properties: ['showOverwriteConfirmation'],
|
||||
title: 'Set database location',
|
||||
showsTagField: false,
|
||||
(await dialog.showOpenDialog({
|
||||
properties: ['openDirectory'],
|
||||
title: 'Set Workspace Storage Location',
|
||||
buttonLabel: 'Select',
|
||||
defaultPath: `workspace-storage.db`,
|
||||
defaultPath: app.getPath('documents'),
|
||||
message: "Select a location to store the workspace's database file",
|
||||
}));
|
||||
const filePath = ret.filePath;
|
||||
if (ret.canceled || !filePath) {
|
||||
const dir = ret.filePaths?.[0];
|
||||
if (ret.canceled || !dir) {
|
||||
return {
|
||||
canceled: true,
|
||||
};
|
||||
}
|
||||
// the same db file cannot be loaded twice
|
||||
if (await dbFileAlreadyLoaded(filePath)) {
|
||||
return {
|
||||
error: 'DB_FILE_ALREADY_LOADED',
|
||||
};
|
||||
}
|
||||
return { filePath };
|
||||
return { filePath: dir };
|
||||
} catch (err) {
|
||||
logger.error('selectDBFileLocation', err);
|
||||
return {
|
||||
@@ -138,7 +154,7 @@ export async function selectDBFileLocation(): Promise<SelectDBFileLocationResult
|
||||
}
|
||||
}
|
||||
|
||||
interface LoadDBFileResult {
|
||||
export interface LoadDBFileResult {
|
||||
workspaceId?: string;
|
||||
error?: ErrorMessage;
|
||||
canceled?: boolean;
|
||||
@@ -170,10 +186,10 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
|
||||
{
|
||||
name: 'SQLite Database',
|
||||
// do we want to support other file format?
|
||||
extensions: ['db'],
|
||||
extensions: ['db', 'affine'],
|
||||
},
|
||||
],
|
||||
message: 'Load Workspace from a SQLite Database file',
|
||||
message: 'Load Workspace from a AFFiNE file',
|
||||
}));
|
||||
const filePath = ret.filePaths?.[0];
|
||||
if (ret.canceled || !filePath) {
|
||||
@@ -197,14 +213,20 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
|
||||
return { error: 'DB_FILE_INVALID' }; // invalid db file
|
||||
}
|
||||
|
||||
// symlink the db file to a new workspace id
|
||||
// copy the db file to a new workspace id
|
||||
const workspaceId = nanoid(10);
|
||||
const linkedFilePath = await getWorkspaceDBPath(appContext, workspaceId);
|
||||
const internalFilePath = getWorkspaceDBPath(appContext, workspaceId);
|
||||
|
||||
await fs.ensureDir(path.join(appContext.appDataPath, 'workspaces'));
|
||||
|
||||
await fs.symlink(filePath, linkedFilePath, 'file');
|
||||
logger.info(`loadDBFile, symlink: ${filePath} -> ${linkedFilePath}`);
|
||||
await fs.copy(filePath, internalFilePath);
|
||||
logger.info(`loadDBFile, copy: ${filePath} -> ${internalFilePath}`);
|
||||
|
||||
await storeWorkspaceMeta(appContext, workspaceId, {
|
||||
id: workspaceId,
|
||||
mainDBPath: internalFilePath,
|
||||
secondaryDBPath: filePath,
|
||||
});
|
||||
|
||||
return { workspaceId };
|
||||
} catch (err) {
|
||||
@@ -215,7 +237,7 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
|
||||
}
|
||||
}
|
||||
|
||||
interface MoveDBFileResult {
|
||||
export interface MoveDBFileResult {
|
||||
filePath?: string;
|
||||
error?: ErrorMessage;
|
||||
canceled?: boolean;
|
||||
@@ -225,58 +247,51 @@ interface MoveDBFileResult {
|
||||
* This function is called when the user clicks the "Move" button in the "Move Workspace Storage" setting.
|
||||
*
|
||||
* It will
|
||||
* - move the source db file to a new location
|
||||
* - symlink the new location to the old db file
|
||||
* - copy the source db file to a new location
|
||||
* - remove the old db external file
|
||||
* - update the external db file path in the workspace meta
|
||||
* - return the new file path
|
||||
*/
|
||||
export async function moveDBFile(
|
||||
workspaceId: string,
|
||||
dbFileLocation?: string
|
||||
dbFileDir?: string
|
||||
): Promise<MoveDBFileResult> {
|
||||
let db: WorkspaceSQLiteDB | null = null;
|
||||
try {
|
||||
const { moveFile, FsWatcher } = await import('@affine/native');
|
||||
db = await ensureSQLiteDB(workspaceId);
|
||||
// get the real file path of db
|
||||
const realpath = await fs.realpath(db.path);
|
||||
const isLink = realpath !== db.path;
|
||||
const watcher = FsWatcher.watch(realpath, { recursive: false });
|
||||
const waitForRemove = new Promise<void>(resolve => {
|
||||
const subscription = watcher.subscribe(event => {
|
||||
if (isRemoveOrMoveEvent(event)) {
|
||||
subscription.unsubscribe();
|
||||
// resolve after FSWatcher in `database$` is fired
|
||||
setImmediate(() => {
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
const newFilePath =
|
||||
dbFileLocation ??
|
||||
|
||||
const meta = await getWorkspaceMeta(appContext, workspaceId);
|
||||
|
||||
const oldDir = meta.secondaryDBPath
|
||||
? path.dirname(meta.secondaryDBPath)
|
||||
: null;
|
||||
const defaultDir = oldDir ?? app.getPath('documents');
|
||||
|
||||
const newName = getDefaultDBFileName(db.getWorkspaceName(), workspaceId);
|
||||
|
||||
const newDirPath =
|
||||
dbFileDir ??
|
||||
(
|
||||
getFakedResult() ??
|
||||
(await dialog.showSaveDialog({
|
||||
properties: ['showOverwriteConfirmation'],
|
||||
(await dialog.showOpenDialog({
|
||||
properties: ['openDirectory'],
|
||||
title: 'Move Workspace Storage',
|
||||
showsTagField: false,
|
||||
buttonLabel: 'Save',
|
||||
defaultPath: realpath,
|
||||
buttonLabel: 'Move',
|
||||
defaultPath: defaultDir,
|
||||
message: 'Move Workspace storage file',
|
||||
}))
|
||||
).filePath;
|
||||
).filePaths?.[0];
|
||||
|
||||
// skips if
|
||||
// - user canceled the dialog
|
||||
// - user selected the same file
|
||||
// - user selected the same file in the link file in app data dir
|
||||
if (!newFilePath || newFilePath === realpath || db.path === newFilePath) {
|
||||
// - user selected the same dir
|
||||
if (!newDirPath || newDirPath === oldDir) {
|
||||
return {
|
||||
canceled: true,
|
||||
};
|
||||
}
|
||||
|
||||
db.db.close();
|
||||
const newFilePath = path.join(newDirPath, newName);
|
||||
|
||||
if (await fs.pathExists(newFilePath)) {
|
||||
return {
|
||||
@@ -284,24 +299,19 @@ export async function moveDBFile(
|
||||
};
|
||||
}
|
||||
|
||||
if (isLink) {
|
||||
// remove the old link to unblock new link
|
||||
await fs.unlink(db.path);
|
||||
logger.info(`[moveDBFile] copy ${meta.mainDBPath} -> ${newFilePath}`);
|
||||
|
||||
await fs.copy(meta.mainDBPath, newFilePath);
|
||||
|
||||
// remove the old db file, but we don't care if it fails
|
||||
if (meta.secondaryDBPath) {
|
||||
fs.remove(meta.secondaryDBPath);
|
||||
}
|
||||
|
||||
logger.info(`[moveDBFile] move ${realpath} -> ${newFilePath}`);
|
||||
|
||||
await moveFile(realpath, newFilePath);
|
||||
|
||||
await fs.ensureSymlink(newFilePath, db.path, 'file');
|
||||
logger.info(`[moveDBFile] symlink: ${realpath} -> ${newFilePath}`);
|
||||
// wait for the file move event emits to the FileWatcher in database$ in ensure-db.ts
|
||||
// so that the db will be destroyed and we can call the `ensureSQLiteDB` in the next step
|
||||
// or the FileWatcher will continue listen on the `realpath` and emit file change events
|
||||
// then the database will reload while receiving these events; and the moved database file will be recreated while reloading database
|
||||
await waitForRemove;
|
||||
logger.info(`removed`);
|
||||
await ensureSQLiteDB(workspaceId);
|
||||
// update meta
|
||||
await storeWorkspaceMeta(appContext, workspaceId, {
|
||||
secondaryDBPath: newFilePath,
|
||||
});
|
||||
|
||||
return {
|
||||
filePath: newFilePath,
|
||||
@@ -317,7 +327,6 @@ export async function moveDBFile(
|
||||
|
||||
async function dbFileAlreadyLoaded(path: string) {
|
||||
const meta = await listWorkspaces(appContext);
|
||||
const realpath = await fs.realpath(path);
|
||||
const paths = meta.map(m => m[1].realpath);
|
||||
return paths.includes(realpath);
|
||||
const paths = meta.map(m => m[1].secondaryDBPath);
|
||||
return paths.includes(path);
|
||||
}
|
||||
@@ -1,14 +1,16 @@
|
||||
import { app, BrowserWindow } from 'electron';
|
||||
|
||||
import { logger } from '../logger';
|
||||
import { applicationMenuEvents } from './application-menu';
|
||||
import { dbEvents } from './db';
|
||||
import { updaterEvents } from './updater';
|
||||
import { logger } from './logger';
|
||||
import { updaterEvents } from './updater/event';
|
||||
import { workspaceEvents } from './workspace';
|
||||
|
||||
export const allEvents = {
|
||||
applicationMenu: applicationMenuEvents,
|
||||
db: dbEvents,
|
||||
updater: updaterEvents,
|
||||
applicationMenu: applicationMenuEvents,
|
||||
workspace: workspaceEvents,
|
||||
};
|
||||
|
||||
function getActiveWindows() {
|
||||
@@ -19,9 +21,18 @@ export function registerEvents() {
|
||||
// register events
|
||||
for (const [namespace, namespaceEvents] of Object.entries(allEvents)) {
|
||||
for (const [key, eventRegister] of Object.entries(namespaceEvents)) {
|
||||
const subscription = eventRegister((...args: any) => {
|
||||
const subscription = eventRegister((...args: any[]) => {
|
||||
const chan = `${namespace}:${key}`;
|
||||
logger.info('[ipc-event]', chan, args);
|
||||
logger.info(
|
||||
'[ipc-event]',
|
||||
chan,
|
||||
args.filter(
|
||||
a =>
|
||||
a !== undefined &&
|
||||
typeof a !== 'function' &&
|
||||
typeof a !== 'object'
|
||||
)
|
||||
);
|
||||
getActiveWindows().forEach(win => win.webContents.send(chan, ...args));
|
||||
});
|
||||
app.on('before-quit', () => {
|
||||
@@ -1,38 +0,0 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
import type { MainEventListener } from './type';
|
||||
|
||||
interface DBFilePathMeta {
|
||||
workspaceId: string;
|
||||
path: string;
|
||||
realPath: string;
|
||||
}
|
||||
|
||||
export const dbSubjects = {
|
||||
// emit workspace ids
|
||||
dbFileMissing: new Subject<string>(),
|
||||
// emit workspace ids
|
||||
dbFileUpdate: new Subject<string>(),
|
||||
dbFilePathChange: new Subject<DBFilePathMeta>(),
|
||||
};
|
||||
|
||||
export const dbEvents = {
|
||||
onDBFileMissing: (fn: (workspaceId: string) => void) => {
|
||||
const sub = dbSubjects.dbFileMissing.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
onDBFileUpdate: (fn: (workspaceId: string) => void) => {
|
||||
const sub = dbSubjects.dbFileUpdate.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
onDBFilePathChange: (fn: (meta: DBFilePathMeta) => void) => {
|
||||
const sub = dbSubjects.dbFilePathChange.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
@@ -1,9 +0,0 @@
|
||||
export * from './register';
|
||||
|
||||
import { applicationMenuSubjects } from './application-menu';
|
||||
import { dbSubjects } from './db';
|
||||
|
||||
export const subjects = {
|
||||
db: dbSubjects,
|
||||
applicationMenu: applicationMenuSubjects,
|
||||
};
|
||||
@@ -1 +0,0 @@
|
||||
export type MainEventListener = (...args: any[]) => () => void;
|
||||
10
apps/electron/layers/main/src/export/index.ts
Normal file
10
apps/electron/layers/main/src/export/index.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { savePDFFileAs } from './pdf';
|
||||
|
||||
export const exportHandlers = {
|
||||
savePDFFileAs: async (_, title: string) => {
|
||||
return savePDFFileAs(title);
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
|
||||
export * from './pdf';
|
||||
61
apps/electron/layers/main/src/export/pdf.ts
Normal file
61
apps/electron/layers/main/src/export/pdf.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { BrowserWindow, dialog, shell } from 'electron';
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import { logger } from '../logger';
|
||||
import type { ErrorMessage } from './utils';
|
||||
import { getFakedResult } from './utils';
|
||||
|
||||
export interface SavePDFFileResult {
|
||||
filePath?: string;
|
||||
canceled?: boolean;
|
||||
error?: ErrorMessage;
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is called when the user clicks the "Export to PDF" button in the electron.
|
||||
*
|
||||
* It will just copy the file to the given path
|
||||
*/
|
||||
export async function savePDFFileAs(
|
||||
pageTitle: string
|
||||
): Promise<SavePDFFileResult> {
|
||||
try {
|
||||
const ret =
|
||||
getFakedResult() ??
|
||||
(await dialog.showSaveDialog({
|
||||
properties: ['showOverwriteConfirmation'],
|
||||
title: 'Save PDF',
|
||||
showsTagField: false,
|
||||
buttonLabel: 'Save',
|
||||
defaultPath: `${pageTitle}.pdf`,
|
||||
message: 'Save Page as a PDF file',
|
||||
}));
|
||||
const filePath = ret.filePath;
|
||||
if (ret.canceled || !filePath) {
|
||||
return {
|
||||
canceled: true,
|
||||
};
|
||||
}
|
||||
|
||||
await BrowserWindow.getFocusedWindow()
|
||||
?.webContents.printToPDF({
|
||||
pageSize: 'A4',
|
||||
printBackground: true,
|
||||
landscape: false,
|
||||
})
|
||||
.then(data => {
|
||||
fs.writeFile(filePath, data, error => {
|
||||
if (error) throw error;
|
||||
logger.log(`Wrote PDF successfully to ${filePath}`);
|
||||
});
|
||||
});
|
||||
|
||||
shell.openPath(filePath);
|
||||
return { filePath };
|
||||
} catch (err) {
|
||||
logger.error('savePDFFileAs', err);
|
||||
return {
|
||||
error: 'UNKNOWN_ERROR',
|
||||
};
|
||||
}
|
||||
}
|
||||
24
apps/electron/layers/main/src/export/utils.ts
Normal file
24
apps/electron/layers/main/src/export/utils.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
// provide a backdoor to set dialog path for testing in playwright
|
||||
interface FakeDialogResult {
|
||||
canceled?: boolean;
|
||||
filePath?: string;
|
||||
filePaths?: string[];
|
||||
}
|
||||
// result will be used in the next call to showOpenDialog
|
||||
// if it is being read once, it will be reset to undefined
|
||||
let fakeDialogResult: FakeDialogResult | undefined = undefined;
|
||||
export function getFakedResult() {
|
||||
const result = fakeDialogResult;
|
||||
fakeDialogResult = undefined;
|
||||
return result;
|
||||
}
|
||||
|
||||
export function setFakeDialogResult(result: FakeDialogResult | undefined) {
|
||||
fakeDialogResult = result;
|
||||
// for convenience, we will fill filePaths with filePath if it is not set
|
||||
if (result?.filePaths === undefined && result?.filePath !== undefined) {
|
||||
result.filePaths = [result.filePath];
|
||||
}
|
||||
}
|
||||
const ErrorMessages = ['FILE_ALREADY_EXISTS', 'UNKNOWN_ERROR'] as const;
|
||||
export type ErrorMessage = (typeof ErrorMessages)[number];
|
||||
@@ -2,4 +2,35 @@ import { allEvents as events } from './events';
|
||||
import { allHandlers as handlers } from './handlers';
|
||||
|
||||
// this will be used by preload script to expose all handlers and events to the renderer process
|
||||
// - register in exposeInMainWorld in preload
|
||||
// - provide type hints
|
||||
export { events, handlers };
|
||||
|
||||
export const getExposedMeta = () => {
|
||||
const handlersMeta = Object.entries(handlers).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.keys(namespaceHandlers).map(handlerName => handlerName),
|
||||
];
|
||||
}
|
||||
);
|
||||
|
||||
const eventsMeta = Object.entries(events).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.keys(namespaceHandlers).map(handlerName => handlerName),
|
||||
];
|
||||
}
|
||||
);
|
||||
|
||||
return {
|
||||
handlers: handlersMeta,
|
||||
events: eventsMeta,
|
||||
};
|
||||
};
|
||||
|
||||
export type MainIPCHandlerMap = typeof handlers;
|
||||
|
||||
export type MainIPCEventMap = typeof events;
|
||||
|
||||
@@ -1,21 +1,14 @@
|
||||
import { ipcMain } from 'electron';
|
||||
|
||||
import { getLogFilePath, logger, revealLogFile } from '../logger';
|
||||
import { dbHandlers } from './db';
|
||||
import { dialogHandlers } from './dialog';
|
||||
import { exportHandlers } from './export';
|
||||
import { getLogFilePath, logger, revealLogFile } from './logger';
|
||||
import type { NamespaceHandlers } from './type';
|
||||
import { uiHandlers } from './ui';
|
||||
import { updaterHandlers } from './updater';
|
||||
import { workspaceHandlers } from './workspace';
|
||||
|
||||
type IsomorphicHandler = (
|
||||
e: Electron.IpcMainInvokeEvent,
|
||||
...args: any[]
|
||||
) => Promise<any>;
|
||||
|
||||
type NamespaceHandlers = {
|
||||
[key: string]: IsomorphicHandler;
|
||||
};
|
||||
|
||||
export const debugHandlers = {
|
||||
revealLogFile: async () => {
|
||||
return revealLogFile();
|
||||
@@ -27,12 +20,13 @@ export const debugHandlers = {
|
||||
|
||||
// Note: all of these handlers will be the single-source-of-truth for the apis exposed to the renderer process
|
||||
export const allHandlers = {
|
||||
workspace: workspaceHandlers,
|
||||
ui: uiHandlers,
|
||||
db: dbHandlers,
|
||||
dialog: dialogHandlers,
|
||||
debug: debugHandlers,
|
||||
dialog: dialogHandlers,
|
||||
ui: uiHandlers,
|
||||
export: exportHandlers,
|
||||
updater: updaterHandlers,
|
||||
workspace: workspaceHandlers,
|
||||
} satisfies Record<string, NamespaceHandlers>;
|
||||
|
||||
export const registerHandlers = () => {
|
||||
@@ -1,160 +0,0 @@
|
||||
import type { NotifyEvent } from '@affine/native/event';
|
||||
import { createFSWatcher } from '@affine/native/fs-watcher';
|
||||
import { app } from 'electron';
|
||||
import {
|
||||
connectable,
|
||||
defer,
|
||||
from,
|
||||
fromEvent,
|
||||
identity,
|
||||
lastValueFrom,
|
||||
Observable,
|
||||
ReplaySubject,
|
||||
Subject,
|
||||
} from 'rxjs';
|
||||
import {
|
||||
debounceTime,
|
||||
exhaustMap,
|
||||
filter,
|
||||
groupBy,
|
||||
ignoreElements,
|
||||
mergeMap,
|
||||
shareReplay,
|
||||
startWith,
|
||||
switchMap,
|
||||
take,
|
||||
takeUntil,
|
||||
tap,
|
||||
} from 'rxjs/operators';
|
||||
|
||||
import { appContext } from '../../context';
|
||||
import { subjects } from '../../events';
|
||||
import { logger } from '../../logger';
|
||||
import { ts } from '../../utils';
|
||||
import type { WorkspaceSQLiteDB } from './sqlite';
|
||||
import { openWorkspaceDatabase } from './sqlite';
|
||||
|
||||
const databaseInput$ = new Subject<string>();
|
||||
export const databaseConnector$ = new ReplaySubject<WorkspaceSQLiteDB>();
|
||||
|
||||
const groupedDatabaseInput$ = databaseInput$.pipe(groupBy(identity));
|
||||
|
||||
export const database$ = connectable(
|
||||
groupedDatabaseInput$.pipe(
|
||||
mergeMap(workspaceDatabase$ =>
|
||||
workspaceDatabase$.pipe(
|
||||
// only open the first db with the same workspaceId, and emit it to the downstream
|
||||
exhaustMap(workspaceId => {
|
||||
logger.info('[ensureSQLiteDB] open db connection', workspaceId);
|
||||
return from(openWorkspaceDatabase(appContext, workspaceId)).pipe(
|
||||
switchMap(db => {
|
||||
return startWatchingDBFile(db).pipe(
|
||||
// ignore all events and only emit the db to the downstream
|
||||
ignoreElements(),
|
||||
startWith(db)
|
||||
);
|
||||
})
|
||||
);
|
||||
}),
|
||||
shareReplay(1)
|
||||
)
|
||||
),
|
||||
tap({
|
||||
complete: () => {
|
||||
logger.info('[FSWatcher] close all watchers');
|
||||
createFSWatcher().close();
|
||||
},
|
||||
})
|
||||
),
|
||||
{
|
||||
connector: () => databaseConnector$,
|
||||
resetOnDisconnect: true,
|
||||
}
|
||||
);
|
||||
|
||||
export const databaseConnectableSubscription = database$.connect();
|
||||
|
||||
// 1. File delete
|
||||
// 2. File move
|
||||
// - on Linux, it's `type: { modify: { kind: 'rename', mode: 'from' } }`
|
||||
// - on Windows, it's `type: { remove: { kind: 'any' } }`
|
||||
// - on macOS, it's `type: { modify: { kind: 'rename', mode: 'any' } }`
|
||||
export function isRemoveOrMoveEvent(event: NotifyEvent) {
|
||||
return (
|
||||
typeof event.type === 'object' &&
|
||||
('remove' in event.type ||
|
||||
('modify' in event.type &&
|
||||
event.type.modify.kind === 'rename' &&
|
||||
(event.type.modify.mode === 'from' ||
|
||||
event.type.modify.mode === 'any')))
|
||||
);
|
||||
}
|
||||
|
||||
// if we removed the file, we will stop watching it
|
||||
function startWatchingDBFile(db: WorkspaceSQLiteDB) {
|
||||
const FSWatcher = createFSWatcher();
|
||||
return new Observable<NotifyEvent>(subscriber => {
|
||||
logger.info('[FSWatcher] start watching db file', db.workspaceId);
|
||||
const subscription = FSWatcher.watch(db.path, {
|
||||
recursive: false,
|
||||
}).subscribe(
|
||||
event => {
|
||||
logger.info('[FSWatcher]', event);
|
||||
subscriber.next(event);
|
||||
// remove file or move file, complete the observable and close db
|
||||
if (isRemoveOrMoveEvent(event)) {
|
||||
subscriber.complete();
|
||||
}
|
||||
},
|
||||
err => {
|
||||
subscriber.error(err);
|
||||
}
|
||||
);
|
||||
return () => {
|
||||
// destroy on unsubscribe
|
||||
logger.info('[FSWatcher] cleanup db file watcher', db.workspaceId);
|
||||
db.destroy();
|
||||
subscription.unsubscribe();
|
||||
};
|
||||
}).pipe(
|
||||
debounceTime(1000),
|
||||
filter(event => !isRemoveOrMoveEvent(event)),
|
||||
tap({
|
||||
next: () => {
|
||||
logger.info(
|
||||
'[FSWatcher] db file changed on disk',
|
||||
db.workspaceId,
|
||||
ts() - db.lastUpdateTime,
|
||||
'ms'
|
||||
);
|
||||
db.reconnectDB();
|
||||
subjects.db.dbFileUpdate.next(db.workspaceId);
|
||||
},
|
||||
complete: () => {
|
||||
// todo: there is still a possibility that the file is deleted
|
||||
// but we didn't get the event soon enough and another event tries to
|
||||
// access the db
|
||||
logger.info('[FSWatcher] db file missing', db.workspaceId);
|
||||
subjects.db.dbFileMissing.next(db.workspaceId);
|
||||
db.destroy();
|
||||
},
|
||||
}),
|
||||
takeUntil(defer(() => fromEvent(app, 'before-quit')))
|
||||
);
|
||||
}
|
||||
|
||||
export function ensureSQLiteDB(id: string) {
|
||||
const deferValue = lastValueFrom(
|
||||
database$.pipe(
|
||||
filter(db => db.workspaceId === id && db.db.open),
|
||||
take(1),
|
||||
tap({
|
||||
error: err => {
|
||||
logger.error('[ensureSQLiteDB] error', err);
|
||||
},
|
||||
})
|
||||
)
|
||||
);
|
||||
databaseInput$.next(id);
|
||||
return deferValue;
|
||||
}
|
||||
@@ -1,247 +0,0 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import type { Database } from 'better-sqlite3';
|
||||
import sqlite from 'better-sqlite3';
|
||||
import fs from 'fs-extra';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import type { AppContext } from '../../context';
|
||||
import { dbSubjects } from '../../events/db';
|
||||
import { logger } from '../../logger';
|
||||
import { ts } from '../../utils';
|
||||
|
||||
const schemas = [
|
||||
`CREATE TABLE IF NOT EXISTS "updates" (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
data BLOB NOT NULL,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS "blobs" (
|
||||
key TEXT PRIMARY KEY NOT NULL,
|
||||
data BLOB NOT NULL,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
)`,
|
||||
];
|
||||
|
||||
interface UpdateRow {
|
||||
id: number;
|
||||
data: Buffer;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
interface BlobRow {
|
||||
key: string;
|
||||
data: Buffer;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
const SQLITE_ORIGIN = Symbol('sqlite-origin');
|
||||
|
||||
export class WorkspaceSQLiteDB {
|
||||
db: Database;
|
||||
ydoc = new Y.Doc();
|
||||
firstConnect = false;
|
||||
lastUpdateTime = ts();
|
||||
destroyed = false;
|
||||
|
||||
constructor(public path: string, public workspaceId: string) {
|
||||
this.db = this.reconnectDB();
|
||||
}
|
||||
|
||||
// release resources
|
||||
destroy = () => {
|
||||
this.db?.close();
|
||||
this.ydoc.destroy();
|
||||
};
|
||||
|
||||
getWorkspaceName = () => {
|
||||
return this.ydoc.getMap('space:meta').get('name') as string;
|
||||
};
|
||||
|
||||
reconnectDB = () => {
|
||||
logger.log('[WorkspaceSQLiteDB] open db', this.workspaceId);
|
||||
if (this.db) {
|
||||
this.db.close();
|
||||
}
|
||||
|
||||
fs.realpath(this.path)
|
||||
.then(realPath => {
|
||||
dbSubjects.dbFilePathChange.next({
|
||||
workspaceId: this.workspaceId,
|
||||
path: this.path,
|
||||
realPath,
|
||||
});
|
||||
})
|
||||
.catch(() => {
|
||||
// skip error
|
||||
});
|
||||
|
||||
// use cached version?
|
||||
const db = (this.db = sqlite(this.path));
|
||||
db.exec(schemas.join(';'));
|
||||
|
||||
if (!this.firstConnect) {
|
||||
this.ydoc.on('update', (update: Uint8Array, origin) => {
|
||||
if (origin !== SQLITE_ORIGIN) {
|
||||
this.addUpdateToSQLite(update);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Y.transact(this.ydoc, () => {
|
||||
const updates = this.getUpdates();
|
||||
updates.forEach(update => {
|
||||
// give SQLITE_ORIGIN to skip self update
|
||||
Y.applyUpdate(this.ydoc, update.data, SQLITE_ORIGIN);
|
||||
});
|
||||
});
|
||||
|
||||
this.lastUpdateTime = ts();
|
||||
|
||||
if (this.firstConnect) {
|
||||
logger.info('db reconnected', this.workspaceId);
|
||||
} else {
|
||||
logger.info('db connected', this.workspaceId);
|
||||
}
|
||||
|
||||
this.firstConnect = true;
|
||||
|
||||
return db;
|
||||
};
|
||||
|
||||
getDocAsUpdates = () => {
|
||||
return Y.encodeStateAsUpdate(this.ydoc);
|
||||
};
|
||||
|
||||
// non-blocking and use yDoc to validate the update
|
||||
// after that, the update is added to the db
|
||||
applyUpdate = (data: Uint8Array) => {
|
||||
Y.applyUpdate(this.ydoc, data);
|
||||
|
||||
// todo: trim the updates when the number of records is too large
|
||||
// 1. store the current ydoc state in the db
|
||||
// 2. then delete the old updates
|
||||
// yjs-idb will always trim the db for the first time after DB is loaded
|
||||
this.lastUpdateTime = ts();
|
||||
logger.debug('applyUpdate', this.workspaceId, this.lastUpdateTime);
|
||||
};
|
||||
|
||||
addBlob = (key: string, data: Uint8Array) => {
|
||||
this.lastUpdateTime = ts();
|
||||
try {
|
||||
const statement = this.db.prepare(
|
||||
'INSERT INTO blobs (key, data) VALUES (?, ?) ON CONFLICT(key) DO UPDATE SET data = ?'
|
||||
);
|
||||
statement.run(key, data, data);
|
||||
return key;
|
||||
} catch (error) {
|
||||
logger.error('addBlob', error);
|
||||
}
|
||||
};
|
||||
|
||||
getBlob = (key: string) => {
|
||||
try {
|
||||
const statement = this.db.prepare('SELECT data FROM blobs WHERE key = ?');
|
||||
const row = statement.get(key) as BlobRow;
|
||||
if (!row) {
|
||||
return null;
|
||||
}
|
||||
return row.data;
|
||||
} catch (error) {
|
||||
logger.error('getBlob', error);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
deleteBlob = (key: string) => {
|
||||
this.lastUpdateTime = ts();
|
||||
try {
|
||||
const statement = this.db.prepare('DELETE FROM blobs WHERE key = ?');
|
||||
statement.run(key);
|
||||
} catch (error) {
|
||||
logger.error('deleteBlob', error);
|
||||
}
|
||||
};
|
||||
|
||||
getPersistentBlobKeys = () => {
|
||||
try {
|
||||
const statement = this.db.prepare('SELECT key FROM blobs');
|
||||
const rows = statement.all() as BlobRow[];
|
||||
return rows.map(row => row.key);
|
||||
} catch (error) {
|
||||
logger.error('getPersistentBlobKeys', error);
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
private getUpdates = () => {
|
||||
try {
|
||||
const statement = this.db.prepare('SELECT * FROM updates');
|
||||
const rows = statement.all() as UpdateRow[];
|
||||
return rows;
|
||||
} catch (error) {
|
||||
logger.error('getUpdates', error);
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
// batch write instead write per key stroke?
|
||||
private addUpdateToSQLite = (data: Uint8Array) => {
|
||||
try {
|
||||
const start = performance.now();
|
||||
const statement = this.db.prepare(
|
||||
'INSERT INTO updates (data) VALUES (?)'
|
||||
);
|
||||
statement.run(data);
|
||||
logger.debug(
|
||||
'addUpdateToSQLite',
|
||||
this.workspaceId,
|
||||
'length:',
|
||||
data.length,
|
||||
performance.now() - start,
|
||||
'ms'
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('addUpdateToSQLite', error);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export async function getWorkspaceDBPath(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
) {
|
||||
const basePath = path.join(context.appDataPath, 'workspaces', workspaceId);
|
||||
await fs.ensureDir(basePath);
|
||||
return path.join(basePath, 'storage.db');
|
||||
}
|
||||
|
||||
export async function openWorkspaceDatabase(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
) {
|
||||
const dbPath = await getWorkspaceDBPath(context, workspaceId);
|
||||
return new WorkspaceSQLiteDB(dbPath, workspaceId);
|
||||
}
|
||||
|
||||
export function isValidDBFile(path: string) {
|
||||
let db: Database | null = null;
|
||||
try {
|
||||
db = sqlite(path);
|
||||
// check if db has two tables, one for updates and onefor blobs
|
||||
const statement = db.prepare(
|
||||
`SELECT name FROM sqlite_schema WHERE type='table'`
|
||||
);
|
||||
const rows = statement.all() as { name: string }[];
|
||||
const tableNames = rows.map(row => row.name);
|
||||
if (!tableNames.includes('updates') || !tableNames.includes('blobs')) {
|
||||
return false;
|
||||
}
|
||||
db.close();
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error('isValidDBFile', error);
|
||||
db?.close();
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
export * from './register';
|
||||
@@ -1,8 +0,0 @@
|
||||
export type IsomorphicHandler = (
|
||||
e: Electron.IpcMainInvokeEvent,
|
||||
...args: any[]
|
||||
) => Promise<any>;
|
||||
|
||||
export type NamespaceHandlers = {
|
||||
[key: string]: IsomorphicHandler;
|
||||
};
|
||||
@@ -1,8 +0,0 @@
|
||||
import { appContext } from '../../context';
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { deleteWorkspace, listWorkspaces } from './workspace';
|
||||
|
||||
export const workspaceHandlers = {
|
||||
list: async () => listWorkspaces(appContext),
|
||||
delete: async (_, id: string) => deleteWorkspace(appContext, id),
|
||||
} satisfies NamespaceHandlers;
|
||||
@@ -1,60 +0,0 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import type { AppContext } from '../../context';
|
||||
import { logger } from '../../logger';
|
||||
|
||||
interface WorkspaceMeta {
|
||||
path: string;
|
||||
realpath: string;
|
||||
}
|
||||
|
||||
export async function listWorkspaces(
|
||||
context: AppContext
|
||||
): Promise<[workspaceId: string, meta: WorkspaceMeta][]> {
|
||||
const basePath = path.join(context.appDataPath, 'workspaces');
|
||||
try {
|
||||
await fs.ensureDir(basePath);
|
||||
const dirs = await fs.readdir(basePath, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
|
||||
const meta = await Promise.all(
|
||||
dirs.map(async dir => {
|
||||
const dbFilePath = path.join(basePath, dir.name, 'storage.db');
|
||||
if (dir.isDirectory() && (await fs.exists(dbFilePath))) {
|
||||
// try read storage.db under it
|
||||
const realpath = await fs.realpath(dbFilePath);
|
||||
return [dir.name, { path: dbFilePath, realpath }] as [
|
||||
string,
|
||||
WorkspaceMeta
|
||||
];
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
return meta.filter((w): w is [string, WorkspaceMeta] => !!w);
|
||||
} catch (error) {
|
||||
logger.error('listWorkspaces', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteWorkspace(context: AppContext, id: string) {
|
||||
const basePath = path.join(context.appDataPath, 'workspaces', id);
|
||||
const movedPath = path.join(
|
||||
context.appDataPath,
|
||||
'delete-workspaces',
|
||||
`${id}`
|
||||
);
|
||||
try {
|
||||
return await fs.move(basePath, movedPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('deleteWorkspace', error);
|
||||
}
|
||||
}
|
||||
@@ -2,14 +2,15 @@ import './security-restrictions';
|
||||
|
||||
import { app } from 'electron';
|
||||
|
||||
import { createApplicationMenu } from './application-menu';
|
||||
import { createApplicationMenu } from './application-menu/create';
|
||||
import { registerEvents } from './events';
|
||||
import { registerHandlers } from './handlers';
|
||||
import { registerUpdater } from './handlers/updater';
|
||||
import { logger } from './logger';
|
||||
import { restoreOrCreateWindow } from './main-window';
|
||||
import { registerProtocol } from './protocol';
|
||||
import { registerUpdater } from './updater';
|
||||
|
||||
if (require('electron-squirrel-startup')) app.quit();
|
||||
// allow tests to overwrite app name through passing args
|
||||
if (process.argv.includes('--app-name')) {
|
||||
const appNameIndex = process.argv.indexOf('--app-name');
|
||||
@@ -61,14 +62,3 @@ app
|
||||
.then(createApplicationMenu)
|
||||
.then(registerUpdater)
|
||||
.catch(e => console.error('Failed create window:', e));
|
||||
/**
|
||||
* Check new app version in production mode only
|
||||
*/
|
||||
// FIXME: add me back later
|
||||
// if (import.meta.env.PROD) {
|
||||
// app
|
||||
// .whenReady()
|
||||
// .then(() => import('electron-updater'))
|
||||
// .then(({ autoUpdater }) => autoUpdater.checkForUpdatesAndNotify())
|
||||
// .catch(e => console.error('Failed check updates:', e));
|
||||
// }
|
||||
|
||||
@@ -7,7 +7,7 @@ export function getLogFilePath() {
|
||||
return log.transports.file.getFile().path;
|
||||
}
|
||||
|
||||
export function revealLogFile() {
|
||||
export async function revealLogFile() {
|
||||
const filePath = getLogFilePath();
|
||||
shell.showItemInFolder(filePath);
|
||||
return await shell.openPath(filePath);
|
||||
}
|
||||
|
||||
@@ -2,8 +2,9 @@ import { BrowserWindow, nativeTheme } from 'electron';
|
||||
import electronWindowState from 'electron-window-state';
|
||||
import { join } from 'path';
|
||||
|
||||
import { isMacOS, isWindows } from '../../utils';
|
||||
import { getExposedMeta } from './exposed';
|
||||
import { logger } from './logger';
|
||||
import { isMacOS, isWindows } from './utils';
|
||||
|
||||
const IS_DEV: boolean =
|
||||
process.env.NODE_ENV === 'development' && !process.env.CI;
|
||||
@@ -17,6 +18,8 @@ async function createWindow() {
|
||||
defaultHeight: 800,
|
||||
});
|
||||
|
||||
const exposedMeta = getExposedMeta();
|
||||
|
||||
const browserWindow = new BrowserWindow({
|
||||
titleBarStyle: isMacOS()
|
||||
? 'hiddenInset'
|
||||
@@ -40,6 +43,8 @@ async function createWindow() {
|
||||
webviewTag: false, // The webview tag is not recommended. Consider alternatives like iframe or Electron's BrowserView. https://www.electronjs.org/docs/latest/api/webview-tag#warning
|
||||
spellcheck: false, // FIXME: enable?
|
||||
preload: join(__dirname, '../preload/index.js'),
|
||||
// serialize exposed meta that to be used in preload
|
||||
additionalArguments: [`--exposed-meta=` + JSON.stringify(exposedMeta)],
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
18
apps/electron/layers/main/src/type.ts
Normal file
18
apps/electron/layers/main/src/type.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
export type MainEventListener = (...args: any[]) => () => void;
|
||||
|
||||
export type IsomorphicHandler = (
|
||||
e: Electron.IpcMainInvokeEvent,
|
||||
...args: any[]
|
||||
) => Promise<any>;
|
||||
|
||||
export type NamespaceHandlers = {
|
||||
[key: string]: IsomorphicHandler;
|
||||
};
|
||||
|
||||
export interface WorkspaceMeta {
|
||||
id: string;
|
||||
mainDBPath: string;
|
||||
secondaryDBPath?: string; // assume there will be only one
|
||||
}
|
||||
|
||||
export type YOrigin = 'self' | 'external' | 'upstream' | 'renderer';
|
||||
49
apps/electron/layers/main/src/ui/get-meta-data/get-html.ts
Normal file
49
apps/electron/layers/main/src/ui/get-meta-data/get-html.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { BrowserWindow } from 'electron';
|
||||
|
||||
import type { GetHTMLOptions } from './types';
|
||||
|
||||
async function getHTMLFromWindow(win: BrowserWindow): Promise<string> {
|
||||
return win.webContents
|
||||
.executeJavaScript(`document.documentElement.outerHTML;`)
|
||||
.then(html => html);
|
||||
}
|
||||
|
||||
// For normal web pages, obtaining html can be done directly,
|
||||
// but for some dynamic web pages, obtaining html should wait for the complete loading of web pages. shouldReGetHTML should be used to judge whether to obtain html again
|
||||
export async function getHTMLByURL(
|
||||
url: string,
|
||||
options: GetHTMLOptions
|
||||
): Promise<string> {
|
||||
return new Promise(resolve => {
|
||||
const { timeout = 10000, shouldReGetHTML } = options;
|
||||
const window = new BrowserWindow({
|
||||
show: false,
|
||||
});
|
||||
let html = '';
|
||||
window.loadURL(url);
|
||||
|
||||
const timer = setTimeout(() => {
|
||||
resolve(html);
|
||||
window.close();
|
||||
}, timeout);
|
||||
|
||||
async function loopHandle() {
|
||||
html = await getHTMLFromWindow(window);
|
||||
if (!shouldReGetHTML) {
|
||||
return html;
|
||||
}
|
||||
|
||||
if (await shouldReGetHTML(html)) {
|
||||
setTimeout(loopHandle, 1000);
|
||||
} else {
|
||||
window.close();
|
||||
clearTimeout(timer);
|
||||
resolve(html);
|
||||
}
|
||||
}
|
||||
|
||||
window.webContents.on('did-finish-load', async () => {
|
||||
loopHandle();
|
||||
});
|
||||
});
|
||||
}
|
||||
107
apps/electron/layers/main/src/ui/get-meta-data/index.ts
Normal file
107
apps/electron/layers/main/src/ui/get-meta-data/index.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
import type { CheerioAPI, Element } from 'cheerio';
|
||||
import { load } from 'cheerio';
|
||||
|
||||
import type { Context, MetaData, Options, RuleSet } from './types';
|
||||
|
||||
export * from './types';
|
||||
|
||||
import { getHTMLByURL } from './get-html';
|
||||
import { metaDataRules } from './rules';
|
||||
import type { GetMetaDataOptions } from './types';
|
||||
|
||||
function runRule(ruleSet: RuleSet, $: CheerioAPI, context: Context) {
|
||||
let maxScore = 0;
|
||||
let value;
|
||||
|
||||
for (let currRule = 0; currRule < ruleSet.rules.length; currRule++) {
|
||||
const [query, handler] = ruleSet.rules[currRule];
|
||||
const elements = Array.from($(query));
|
||||
|
||||
if (elements.length) {
|
||||
for (const element of elements) {
|
||||
let score = ruleSet.rules.length - currRule;
|
||||
|
||||
if (ruleSet.scorer) {
|
||||
const newScore = ruleSet.scorer(element as Element, score);
|
||||
|
||||
if (newScore) {
|
||||
score = newScore;
|
||||
}
|
||||
}
|
||||
|
||||
if (score > maxScore) {
|
||||
maxScore = score;
|
||||
value = handler(element as Element);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (value) {
|
||||
if (ruleSet.processor) {
|
||||
value = ruleSet.processor(value, context);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
if (ruleSet.defaultValue) {
|
||||
return ruleSet.defaultValue(context);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async function getMetaDataByHTML(
|
||||
html: string,
|
||||
url: string,
|
||||
options: GetMetaDataOptions
|
||||
) {
|
||||
const { customRules = {} } = options;
|
||||
const rules: Record<string, RuleSet> = { ...metaDataRules };
|
||||
Object.keys(customRules).forEach((key: string) => {
|
||||
rules[key] = {
|
||||
rules: [...metaDataRules[key].rules, ...customRules[key].rules],
|
||||
defaultValue:
|
||||
customRules[key].defaultValue || metaDataRules[key].defaultValue,
|
||||
processor: customRules[key].processor || metaDataRules[key].processor,
|
||||
};
|
||||
});
|
||||
|
||||
const metadata: MetaData = {};
|
||||
const context: Context = {
|
||||
url,
|
||||
...options,
|
||||
};
|
||||
|
||||
const $ = load(html);
|
||||
|
||||
Object.keys(rules).forEach((key: string) => {
|
||||
const ruleSet = rules[key];
|
||||
metadata[key] = runRule(ruleSet, $, context) || undefined;
|
||||
});
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
export async function getMetaData(url: string, options: Options = {}) {
|
||||
const { customRules, forceImageHttps, shouldReGetHTML, ...other } = options;
|
||||
const html = await getHTMLByURL(url, {
|
||||
...other,
|
||||
shouldReGetHTML: async html => {
|
||||
const meta = await getMetaDataByHTML(html, url, {
|
||||
customRules,
|
||||
forceImageHttps,
|
||||
});
|
||||
return shouldReGetHTML ? await shouldReGetHTML(meta) : false;
|
||||
},
|
||||
}).catch(() => {
|
||||
// TODO: report error
|
||||
return '';
|
||||
});
|
||||
|
||||
return await getMetaDataByHTML(html, url, {
|
||||
customRules,
|
||||
forceImageHttps,
|
||||
});
|
||||
}
|
||||
690
apps/electron/layers/main/src/ui/get-meta-data/rules.ts
Normal file
690
apps/electron/layers/main/src/ui/get-meta-data/rules.ts
Normal file
@@ -0,0 +1,690 @@
|
||||
import type { RuleSet } from './types';
|
||||
import { getProvider, makeUrlAbsolute, makeUrlSecure, parseUrl } from './utils';
|
||||
|
||||
export const metaDataRules: Record<string, RuleSet> = {
|
||||
title: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:title"][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="twitter:title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.title"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['title', (element: any) => element.text],
|
||||
],
|
||||
},
|
||||
description: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="description" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="description" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="twitter:description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:description"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="summary" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="summary" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
},
|
||||
language: {
|
||||
rules: [
|
||||
['html[lang]', element => element.attribs['lang']],
|
||||
[
|
||||
'meta[property="language" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="language" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:locale"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:locale"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
processor: (language: any) => language.split('-')[0],
|
||||
},
|
||||
type: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:type"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:type"][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="parsely-type"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-type"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="medium"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="medium"][content]', element => element.attribs['content']],
|
||||
],
|
||||
},
|
||||
url: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:url"][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="al:web:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="al:web:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-link"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-link"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['a.amp-canurl', element => element.attribs['href']],
|
||||
['link[rel="canonical"][href]', element => element.attribs['href']],
|
||||
],
|
||||
defaultValue: context => context.url,
|
||||
processor: (url: any, context) => makeUrlAbsolute(context.url, url),
|
||||
},
|
||||
provider: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:site_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:site_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="publisher" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="publisher" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="application-name" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="application-name" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="al:android:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="al:android:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="al:iphone:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="al:iphone:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="al:ipad:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="al:ipad:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="al:ios:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="al:ios:app_name"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="twitter:app:name:iphone"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:app:name:iphone"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="twitter:app:name:ipad"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:app:name:ipad"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="twitter:app:name:googleplay"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:app:name:googleplay"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
defaultValue: context => getProvider(parseUrl(context.url)),
|
||||
},
|
||||
keywords: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="keywords" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="keywords" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-tags"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-tags"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.tags"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.tags"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="article:tag" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="article:tag" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="book:tag" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="book:tag" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="topic" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="topic" i][content]', element => element.attribs['content']],
|
||||
],
|
||||
processor: (keywords: any) =>
|
||||
keywords.split(',').map((keyword: string) => keyword.trim()),
|
||||
},
|
||||
section: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="article:section"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="article:section"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="category"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="category"][content]', element => element.attribs['content']],
|
||||
],
|
||||
},
|
||||
author: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="author" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="author" i][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="article:author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="article:author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="book:author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="book:author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.author"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['a[class*="author" i]', (element: any) => element.text],
|
||||
['[rel="author"]', (element: any) => element.text],
|
||||
[
|
||||
'meta[property="twitter:creator"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:creator"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="profile:username"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="profile:username"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
},
|
||||
published: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="article:published_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="article:published_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="published_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="published_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-pub-date"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-pub-date"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.date"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.date"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="date" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="date" i][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="release_date" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="release_date" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['time[datetime]', element => element.attribs['datetime']],
|
||||
['time[datetime][pubdate]', element => element.attribs['datetime']],
|
||||
],
|
||||
processor: (value: any) =>
|
||||
Date.parse(value.toString())
|
||||
? new Date(value.toString()).toISOString()
|
||||
: undefined,
|
||||
},
|
||||
modified: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:updated_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:updated_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="article:modified_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="article:modified_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="updated_time" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="updated_time" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="modified_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="modified_time"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="revised"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="revised"][content]', element => element.attribs['content']],
|
||||
],
|
||||
processor: (value: any) =>
|
||||
Date.parse(value.toString())
|
||||
? new Date(value.toString()).toISOString()
|
||||
: undefined,
|
||||
},
|
||||
robots: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="robots" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="robots" i][content]', element => element.attribs['content']],
|
||||
],
|
||||
processor: (keywords: any) =>
|
||||
keywords.split(',').map((keyword: string) => keyword.trim()),
|
||||
},
|
||||
copyright: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="copyright" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="copyright" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
},
|
||||
email: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="email" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="email" i][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="reply-to" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="reply-to" i][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
},
|
||||
twitter: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="twitter:site"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:site"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
},
|
||||
facebook: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="fb:pages"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="fb:pages"][content]', element => element.attribs['content']],
|
||||
],
|
||||
},
|
||||
image: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:image:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:image:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:image:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:image:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:image"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:image"][content]', element => element.attribs['content']],
|
||||
[
|
||||
'meta[property="twitter:image"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:image"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="twitter:image:src"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="twitter:image:src"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="thumbnail"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="thumbnail"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="parsely-image-url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="parsely-image-url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="sailthru.image.full"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="sailthru.image.full"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
],
|
||||
processor: (imageUrl: any, context) =>
|
||||
context.forceImageHttps === true
|
||||
? makeUrlSecure(makeUrlAbsolute(context.url, imageUrl))
|
||||
: makeUrlAbsolute(context.url, imageUrl),
|
||||
},
|
||||
icon: {
|
||||
rules: [
|
||||
[
|
||||
'link[rel="apple-touch-icon"][href]',
|
||||
element => element.attribs['href'],
|
||||
],
|
||||
[
|
||||
'link[rel="apple-touch-icon-precomposed"][href]',
|
||||
element => element.attribs['href'],
|
||||
],
|
||||
['link[rel="icon" i][href]', element => element.attribs['href']],
|
||||
['link[rel="fluid-icon"][href]', element => element.attribs['href']],
|
||||
['link[rel="shortcut icon"][href]', element => element.attribs['href']],
|
||||
['link[rel="Shortcut Icon"][href]', element => element.attribs['href']],
|
||||
['link[rel="mask-icon"][href]', element => element.attribs['href']],
|
||||
],
|
||||
scorer: element => {
|
||||
const sizes = element.attribs['sizes'];
|
||||
if (sizes) {
|
||||
const sizeMatches = sizes.match(/\d+/g);
|
||||
if (sizeMatches) {
|
||||
const parsed = parseInt(sizeMatches[0]);
|
||||
if (!isNaN(parsed)) {
|
||||
return parsed;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
defaultValue: context => makeUrlAbsolute(context.url, '/favicon.ico'),
|
||||
processor: (iconUrl, context) =>
|
||||
context.forceImageHttps === true
|
||||
? makeUrlSecure(makeUrlAbsolute(context.url, iconUrl))
|
||||
: makeUrlAbsolute(context.url, iconUrl),
|
||||
},
|
||||
video: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:video:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:video:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:video:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:video:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:video"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:video"][content]', element => element.attribs['content']],
|
||||
],
|
||||
processor: (imageUrl: any, context) =>
|
||||
context.forceImageHttps === true
|
||||
? makeUrlSecure(makeUrlAbsolute(context.url, imageUrl))
|
||||
: makeUrlAbsolute(context.url, imageUrl),
|
||||
},
|
||||
audio: {
|
||||
rules: [
|
||||
[
|
||||
'meta[property="og:audio:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:audio:secure_url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:audio:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[name="og:audio:url"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
[
|
||||
'meta[property="og:audio"][content]',
|
||||
element => element.attribs['content'],
|
||||
],
|
||||
['meta[name="og:audio"][content]', element => element.attribs['content']],
|
||||
],
|
||||
processor: (imageUrl: any, context) =>
|
||||
context.forceImageHttps === true
|
||||
? makeUrlSecure(makeUrlAbsolute(context.url, imageUrl))
|
||||
: makeUrlAbsolute(context.url, imageUrl),
|
||||
},
|
||||
};
|
||||
43
apps/electron/layers/main/src/ui/get-meta-data/types.ts
Normal file
43
apps/electron/layers/main/src/ui/get-meta-data/types.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import type { Element } from 'cheerio';
|
||||
|
||||
export type MetaData = {
|
||||
title?: string;
|
||||
description?: string;
|
||||
icon?: string;
|
||||
image?: string;
|
||||
keywords?: string[];
|
||||
language?: string;
|
||||
type?: string;
|
||||
url?: string;
|
||||
provider?: string;
|
||||
|
||||
[x: string]: string | string[] | undefined;
|
||||
};
|
||||
|
||||
export type MetadataRule = [string, (el: Element) => string | null];
|
||||
|
||||
export type Context = {
|
||||
url: string;
|
||||
} & GetMetaDataOptions;
|
||||
|
||||
export type RuleSet = {
|
||||
rules: MetadataRule[];
|
||||
defaultValue?: (context: Context) => string | string[];
|
||||
scorer?: (el: Element, score: any) => any;
|
||||
processor?: (input: any, context: Context) => any;
|
||||
};
|
||||
|
||||
export type GetMetaDataOptions = {
|
||||
customRules?: Record<string, RuleSet>;
|
||||
forceImageHttps?: boolean;
|
||||
};
|
||||
|
||||
export type GetHTMLOptions = {
|
||||
timeout?: number;
|
||||
shouldReGetHTML?: (currentHTML: string) => boolean | Promise<boolean>;
|
||||
};
|
||||
|
||||
export type Options = {
|
||||
shouldReGetHTML?: (metaData: MetaData) => boolean | Promise<boolean>;
|
||||
} & GetMetaDataOptions &
|
||||
Omit<GetHTMLOptions, 'shouldReGetHTML'>;
|
||||
28
apps/electron/layers/main/src/ui/get-meta-data/utils.ts
Normal file
28
apps/electron/layers/main/src/ui/get-meta-data/utils.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import urlparse from 'url';
|
||||
|
||||
export function makeUrlAbsolute(base: string, relative: string): string {
|
||||
const relativeParsed = urlparse.parse(relative);
|
||||
|
||||
if (relativeParsed.host === null) {
|
||||
return urlparse.resolve(base, relative);
|
||||
}
|
||||
|
||||
return relative;
|
||||
}
|
||||
|
||||
export function makeUrlSecure(url: string): string {
|
||||
return url.replace(/^http:/, 'https:');
|
||||
}
|
||||
|
||||
export function parseUrl(url: string): string {
|
||||
return urlparse.parse(url).hostname || '';
|
||||
}
|
||||
|
||||
export function getProvider(host: string): string {
|
||||
return host
|
||||
.replace(/www[a-zA-Z0-9]*\./, '')
|
||||
.replace('.co.', '.')
|
||||
.split('.')
|
||||
.slice(0, -1)
|
||||
.join(' ');
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
import { app, BrowserWindow, shell } from 'electron';
|
||||
import { parse } from 'url';
|
||||
|
||||
import { logger } from '../../logger';
|
||||
import { logger } from '../logger';
|
||||
|
||||
const redirectUri = 'https://affine.pro/client/auth-callback';
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { app, BrowserWindow, nativeTheme } from 'electron';
|
||||
|
||||
import { isMacOS } from '../../../../utils';
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { isMacOS } from '../utils';
|
||||
import { getMetaData } from './get-meta-data';
|
||||
import { getGoogleOauthCode } from './google-auth';
|
||||
|
||||
export const uiHandlers = {
|
||||
@@ -39,4 +40,11 @@ export const uiHandlers = {
|
||||
getGoogleOauthCode: async () => {
|
||||
return getGoogleOauthCode();
|
||||
},
|
||||
getBookmarkDataByLink: async (_, url: string) => {
|
||||
return getMetaData(url, {
|
||||
shouldReGetHTML: metaData => {
|
||||
return !metaData.title && !metaData.description;
|
||||
},
|
||||
});
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
@@ -2,9 +2,9 @@ import { app } from 'electron';
|
||||
import type { AppUpdater } from 'electron-updater';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { isMacOS } from '../../../../utils';
|
||||
import { updaterSubjects } from '../../events/updater';
|
||||
import { logger } from '../../logger';
|
||||
import { logger } from '../logger';
|
||||
import { isMacOS } from '../utils';
|
||||
import { updaterSubjects } from './event';
|
||||
|
||||
export const ReleaseTypeSchema = z.enum([
|
||||
'stable',
|
||||
@@ -34,12 +34,11 @@ export const checkForUpdatesAndNotify = async (force = true) => {
|
||||
// check every 30 minutes (1800 seconds) at most
|
||||
if (force || lastCheckTime + 1000 * 1800 < Date.now()) {
|
||||
lastCheckTime = Date.now();
|
||||
return _autoUpdater.checkForUpdatesAndNotify();
|
||||
return await _autoUpdater.checkForUpdatesAndNotify();
|
||||
}
|
||||
};
|
||||
|
||||
export const registerUpdater = async () => {
|
||||
// require it will cause some side effects and will break generate-main-exposed-meta,
|
||||
// so we wrap it in a function
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const { autoUpdater } = require('electron-updater');
|
||||
@@ -1,8 +1,8 @@
|
||||
import { BehaviorSubject, Subject } from 'rxjs';
|
||||
|
||||
import type { MainEventListener } from './type';
|
||||
import type { MainEventListener } from '../type';
|
||||
|
||||
interface UpdateMeta {
|
||||
export interface UpdateMeta {
|
||||
version: string;
|
||||
allowAutoUpdate: boolean;
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
import { app } from 'electron';
|
||||
|
||||
import type { NamespaceHandlers } from '../type';
|
||||
import { checkForUpdatesAndNotify, quitAndInstall } from './updater';
|
||||
import { checkForUpdatesAndNotify, quitAndInstall } from './electron-updater';
|
||||
|
||||
export const updaterHandlers = {
|
||||
currentVersion: async () => {
|
||||
@@ -15,4 +15,4 @@ export const updaterHandlers = {
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
|
||||
export * from './updater';
|
||||
export * from './electron-updater';
|
||||
@@ -1,19 +1,11 @@
|
||||
export function debounce<T extends (...args: any[]) => void>(
|
||||
fn: T,
|
||||
delay: number
|
||||
) {
|
||||
let timeoutId: NodeJS.Timer | undefined;
|
||||
return (...args: Parameters<T>) => {
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
timeoutId = setTimeout(() => {
|
||||
fn(...args);
|
||||
timeoutId = undefined;
|
||||
}, delay);
|
||||
};
|
||||
}
|
||||
|
||||
export function ts() {
|
||||
export function getTime() {
|
||||
return new Date().getTime();
|
||||
}
|
||||
|
||||
export const isMacOS = () => {
|
||||
return process.platform === 'darwin';
|
||||
};
|
||||
|
||||
export const isWindows = () => {
|
||||
return process.platform === 'win32';
|
||||
};
|
||||
|
||||
35
apps/electron/layers/main/src/workers/index.ts
Normal file
35
apps/electron/layers/main/src/workers/index.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import path from 'node:path';
|
||||
import { Worker } from 'node:worker_threads';
|
||||
|
||||
import { mergeUpdate } from './merge-update';
|
||||
|
||||
export function mergeUpdateWorker(updates: Uint8Array[]) {
|
||||
// fallback to main thread if worker is disabled (in vitest)
|
||||
if (process.env.USE_WORKER !== 'true') {
|
||||
return mergeUpdate(updates);
|
||||
}
|
||||
return new Promise<Uint8Array>((resolve, reject) => {
|
||||
// it is intended to have "./workers" in the path
|
||||
const workerFile = path.join(__dirname, './workers/merge-update.worker.js');
|
||||
|
||||
// convert updates to SharedArrayBuffer[s]
|
||||
const sharedArrayBufferUpdates = updates.map(update => {
|
||||
const buffer = new SharedArrayBuffer(update.byteLength);
|
||||
const view = new Uint8Array(buffer);
|
||||
view.set(update);
|
||||
return view;
|
||||
});
|
||||
|
||||
const worker = new Worker(workerFile, {
|
||||
workerData: sharedArrayBufferUpdates,
|
||||
});
|
||||
|
||||
worker.on('message', resolve);
|
||||
worker.on('error', reject);
|
||||
worker.on('exit', code => {
|
||||
if (code !== 0) {
|
||||
reject(new Error(`Worker stopped with exit code ${code}`));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
11
apps/electron/layers/main/src/workers/merge-update.ts
Normal file
11
apps/electron/layers/main/src/workers/merge-update.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import * as Y from 'yjs';
|
||||
|
||||
export function mergeUpdate(updates: Uint8Array[]) {
|
||||
const yDoc = new Y.Doc();
|
||||
Y.transact(yDoc, () => {
|
||||
for (const update of updates) {
|
||||
Y.applyUpdate(yDoc, update);
|
||||
}
|
||||
});
|
||||
return Y.encodeStateAsUpdate(yDoc);
|
||||
}
|
||||
14
apps/electron/layers/main/src/workers/merge-update.worker.ts
Normal file
14
apps/electron/layers/main/src/workers/merge-update.worker.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { parentPort, workerData } from 'node:worker_threads';
|
||||
|
||||
import { mergeUpdate } from './merge-update';
|
||||
|
||||
function getMergeUpdate(updates: Uint8Array[]) {
|
||||
const update = mergeUpdate(updates);
|
||||
const buffer = new SharedArrayBuffer(update.byteLength);
|
||||
const view = new Uint8Array(buffer);
|
||||
view.set(update);
|
||||
|
||||
return update;
|
||||
}
|
||||
|
||||
parentPort?.postMessage(getMergeUpdate(workerData));
|
||||
1
apps/electron/layers/main/src/workspace/__tests__/.gitignore
vendored
Normal file
1
apps/electron/layers/main/src/workspace/__tests__/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
tmp
|
||||
@@ -0,0 +1,208 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import { v4 } from 'uuid';
|
||||
import { afterEach, describe, expect, test, vi } from 'vitest';
|
||||
|
||||
import type { AppContext } from '../../context';
|
||||
|
||||
const tmpDir = path.join(__dirname, 'tmp');
|
||||
|
||||
const testAppContext: AppContext = {
|
||||
appDataPath: path.join(tmpDir, 'test-data'),
|
||||
appName: 'test',
|
||||
};
|
||||
|
||||
vi.doMock('../../context', () => ({
|
||||
appContext: testAppContext,
|
||||
}));
|
||||
|
||||
vi.doMock('../../db/ensure-db', () => ({
|
||||
ensureSQLiteDB: async () => ({
|
||||
destroy: () => {},
|
||||
}),
|
||||
}));
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.remove(tmpDir);
|
||||
});
|
||||
|
||||
describe('list workspaces', () => {
|
||||
test('listWorkspaces (valid)', async () => {
|
||||
const { listWorkspaces } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
const meta = {
|
||||
id: workspaceId,
|
||||
};
|
||||
await fs.ensureDir(workspacePath);
|
||||
await fs.writeJSON(path.join(workspacePath, 'meta.json'), meta);
|
||||
const workspaces = await listWorkspaces(testAppContext);
|
||||
expect(workspaces).toEqual([[workspaceId, meta]]);
|
||||
});
|
||||
|
||||
test('listWorkspaces (without meta json file)', async () => {
|
||||
const { listWorkspaces } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
await fs.ensureDir(workspacePath);
|
||||
const workspaces = await listWorkspaces(testAppContext);
|
||||
expect(workspaces).toEqual([
|
||||
[
|
||||
workspaceId,
|
||||
// meta file will be created automatically
|
||||
{ id: workspaceId, mainDBPath: path.join(workspacePath, 'storage.db') },
|
||||
],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete workspace', () => {
|
||||
test('deleteWorkspace', async () => {
|
||||
const { deleteWorkspace } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
await fs.ensureDir(workspacePath);
|
||||
await deleteWorkspace(testAppContext, workspaceId);
|
||||
expect(await fs.pathExists(workspacePath)).toBe(false);
|
||||
// removed workspace will be moved to delete-workspaces
|
||||
expect(
|
||||
await fs.pathExists(
|
||||
path.join(testAppContext.appDataPath, 'delete-workspaces', workspaceId)
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getWorkspaceMeta', () => {
|
||||
test('can get meta', async () => {
|
||||
const { getWorkspaceMeta } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
const meta = {
|
||||
id: workspaceId,
|
||||
};
|
||||
await fs.ensureDir(workspacePath);
|
||||
await fs.writeJSON(path.join(workspacePath, 'meta.json'), meta);
|
||||
expect(await getWorkspaceMeta(testAppContext, workspaceId)).toEqual(meta);
|
||||
});
|
||||
|
||||
test('can create meta if not exists', async () => {
|
||||
const { getWorkspaceMeta } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
await fs.ensureDir(workspacePath);
|
||||
expect(await getWorkspaceMeta(testAppContext, workspaceId)).toEqual({
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
});
|
||||
expect(
|
||||
await fs.pathExists(path.join(workspacePath, 'meta.json'))
|
||||
).toBeTruthy();
|
||||
});
|
||||
|
||||
test('can migrate meta if db file is a link', async () => {
|
||||
const { getWorkspaceMeta } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
await fs.ensureDir(workspacePath);
|
||||
const sourcePath = path.join(tmpDir, 'source.db');
|
||||
await fs.writeFile(sourcePath, 'test');
|
||||
|
||||
await fs.ensureSymlink(sourcePath, path.join(workspacePath, 'storage.db'));
|
||||
|
||||
expect(await getWorkspaceMeta(testAppContext, workspaceId)).toEqual({
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
secondaryDBPath: sourcePath,
|
||||
});
|
||||
|
||||
expect(
|
||||
await fs.pathExists(path.join(workspacePath, 'meta.json'))
|
||||
).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
test('storeWorkspaceMeta', async () => {
|
||||
const { storeWorkspaceMeta } = await import('../handlers');
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
await fs.ensureDir(workspacePath);
|
||||
const meta = {
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
};
|
||||
await storeWorkspaceMeta(testAppContext, workspaceId, meta);
|
||||
expect(await fs.readJSON(path.join(workspacePath, 'meta.json'))).toEqual(
|
||||
meta
|
||||
);
|
||||
await storeWorkspaceMeta(testAppContext, workspaceId, {
|
||||
secondaryDBPath: path.join(tmpDir, 'test.db'),
|
||||
});
|
||||
expect(await fs.readJSON(path.join(workspacePath, 'meta.json'))).toEqual({
|
||||
...meta,
|
||||
secondaryDBPath: path.join(tmpDir, 'test.db'),
|
||||
});
|
||||
});
|
||||
|
||||
test('getWorkspaceMeta observable', async () => {
|
||||
const { storeWorkspaceMeta } = await import('../handlers');
|
||||
const { getWorkspaceMeta$ } = await import('../index');
|
||||
|
||||
const workspaceId = v4();
|
||||
const workspacePath = path.join(
|
||||
testAppContext.appDataPath,
|
||||
'workspaces',
|
||||
workspaceId
|
||||
);
|
||||
|
||||
const metaChange = vi.fn();
|
||||
|
||||
const meta$ = getWorkspaceMeta$(workspaceId);
|
||||
|
||||
meta$.subscribe(metaChange);
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
expect(metaChange).toHaveBeenCalledWith({
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
});
|
||||
|
||||
await storeWorkspaceMeta(testAppContext, workspaceId, {
|
||||
secondaryDBPath: path.join(tmpDir, 'test.db'),
|
||||
});
|
||||
|
||||
expect(metaChange).toHaveBeenCalledWith({
|
||||
id: workspaceId,
|
||||
mainDBPath: path.join(workspacePath, 'storage.db'),
|
||||
secondaryDBPath: path.join(tmpDir, 'test.db'),
|
||||
});
|
||||
});
|
||||
135
apps/electron/layers/main/src/workspace/handlers.ts
Normal file
135
apps/electron/layers/main/src/workspace/handlers.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import { type AppContext } from '../context';
|
||||
import { ensureSQLiteDB } from '../db/ensure-db';
|
||||
import { logger } from '../logger';
|
||||
import type { WorkspaceMeta } from '../type';
|
||||
import { workspaceSubjects } from './subjects';
|
||||
|
||||
export async function listWorkspaces(
|
||||
context: AppContext
|
||||
): Promise<[workspaceId: string, meta: WorkspaceMeta][]> {
|
||||
const basePath = getWorkspacesBasePath(context);
|
||||
try {
|
||||
await fs.ensureDir(basePath);
|
||||
const dirs = await fs.readdir(basePath, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
const metaList = (
|
||||
await Promise.all(
|
||||
dirs.map(async dir => {
|
||||
// ? shall we put all meta in a single file instead of one file per workspace?
|
||||
return await getWorkspaceMeta(context, dir.name);
|
||||
})
|
||||
)
|
||||
).filter((w): w is WorkspaceMeta => !!w);
|
||||
return metaList.map(meta => [meta.id, meta]);
|
||||
} catch (error) {
|
||||
logger.error('listWorkspaces', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteWorkspace(context: AppContext, id: string) {
|
||||
const basePath = getWorkspaceBasePath(context, id);
|
||||
const movedPath = path.join(
|
||||
context.appDataPath,
|
||||
'delete-workspaces',
|
||||
`${id}`
|
||||
);
|
||||
try {
|
||||
const db = await ensureSQLiteDB(id);
|
||||
db.destroy();
|
||||
return await fs.move(basePath, movedPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('deleteWorkspace', error);
|
||||
}
|
||||
}
|
||||
|
||||
export function getWorkspacesBasePath(context: AppContext) {
|
||||
return path.join(context.appDataPath, 'workspaces');
|
||||
}
|
||||
|
||||
export function getWorkspaceBasePath(context: AppContext, workspaceId: string) {
|
||||
return path.join(context.appDataPath, 'workspaces', workspaceId);
|
||||
}
|
||||
|
||||
export function getWorkspaceDBPath(context: AppContext, workspaceId: string) {
|
||||
const basePath = getWorkspaceBasePath(context, workspaceId);
|
||||
return path.join(basePath, 'storage.db');
|
||||
}
|
||||
|
||||
export function getWorkspaceMetaPath(context: AppContext, workspaceId: string) {
|
||||
const basePath = getWorkspaceBasePath(context, workspaceId);
|
||||
return path.join(basePath, 'meta.json');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get workspace meta, create one if not exists
|
||||
* This function will also migrate the workspace if needed
|
||||
*/
|
||||
export async function getWorkspaceMeta(
|
||||
context: AppContext,
|
||||
workspaceId: string
|
||||
): Promise<WorkspaceMeta> {
|
||||
try {
|
||||
const basePath = getWorkspaceBasePath(context, workspaceId);
|
||||
const metaPath = getWorkspaceMetaPath(context, workspaceId);
|
||||
if (!(await fs.exists(metaPath))) {
|
||||
// since not meta is found, we will migrate symlinked db file if needed
|
||||
await fs.ensureDir(basePath);
|
||||
const dbPath = getWorkspaceDBPath(context, workspaceId);
|
||||
|
||||
// todo: remove this after migration (in stable version)
|
||||
const realDBPath = (await fs.exists(dbPath))
|
||||
? await fs.realpath(dbPath)
|
||||
: dbPath;
|
||||
const isLink = realDBPath !== dbPath;
|
||||
if (isLink) {
|
||||
await fs.copy(realDBPath, dbPath);
|
||||
}
|
||||
// create one if not exists
|
||||
const meta = {
|
||||
id: workspaceId,
|
||||
mainDBPath: dbPath,
|
||||
secondaryDBPath: isLink ? realDBPath : undefined,
|
||||
};
|
||||
await fs.writeJSON(metaPath, meta);
|
||||
return meta;
|
||||
} else {
|
||||
const meta = await fs.readJSON(metaPath);
|
||||
return meta;
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('getWorkspaceMeta failed', err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
export async function storeWorkspaceMeta(
|
||||
context: AppContext,
|
||||
workspaceId: string,
|
||||
meta: Partial<WorkspaceMeta>
|
||||
) {
|
||||
try {
|
||||
const basePath = getWorkspaceBasePath(context, workspaceId);
|
||||
await fs.ensureDir(basePath);
|
||||
const metaPath = path.join(basePath, 'meta.json');
|
||||
const currentMeta = await getWorkspaceMeta(context, workspaceId);
|
||||
const newMeta = {
|
||||
...currentMeta,
|
||||
...meta,
|
||||
};
|
||||
await fs.writeJSON(metaPath, newMeta);
|
||||
workspaceSubjects.meta.next({
|
||||
workspaceId,
|
||||
meta: newMeta,
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error('storeWorkspaceMeta failed', err);
|
||||
}
|
||||
}
|
||||
44
apps/electron/layers/main/src/workspace/index.ts
Normal file
44
apps/electron/layers/main/src/workspace/index.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { merge } from 'rxjs';
|
||||
import { filter, map } from 'rxjs/operators';
|
||||
|
||||
import { appContext } from '../context';
|
||||
import type {
|
||||
MainEventListener,
|
||||
NamespaceHandlers,
|
||||
WorkspaceMeta,
|
||||
} from '../type';
|
||||
import { deleteWorkspace, getWorkspaceMeta, listWorkspaces } from './handlers';
|
||||
import { workspaceSubjects } from './subjects';
|
||||
|
||||
export * from './handlers';
|
||||
export * from './subjects';
|
||||
|
||||
export const workspaceEvents = {
|
||||
onMetaChange: (
|
||||
fn: (meta: { workspaceId: string; meta: WorkspaceMeta }) => void
|
||||
) => {
|
||||
const sub = workspaceSubjects.meta.subscribe(fn);
|
||||
return () => {
|
||||
sub.unsubscribe();
|
||||
};
|
||||
},
|
||||
} satisfies Record<string, MainEventListener>;
|
||||
|
||||
export const workspaceHandlers = {
|
||||
list: async () => listWorkspaces(appContext),
|
||||
delete: async (_, id: string) => deleteWorkspace(appContext, id),
|
||||
getMeta: async (_, id: string) => {
|
||||
return getWorkspaceMeta(appContext, id);
|
||||
},
|
||||
} satisfies NamespaceHandlers;
|
||||
|
||||
// used internally. Get a stream of workspace id -> meta
|
||||
export const getWorkspaceMeta$ = (workspaceId: string) => {
|
||||
return merge(
|
||||
getWorkspaceMeta(appContext, workspaceId),
|
||||
workspaceSubjects.meta.pipe(
|
||||
map(meta => meta.meta),
|
||||
filter(meta => meta.id === workspaceId)
|
||||
)
|
||||
);
|
||||
};
|
||||
7
apps/electron/layers/main/src/workspace/subjects.ts
Normal file
7
apps/electron/layers/main/src/workspace/subjects.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
import type { WorkspaceMeta } from '../type';
|
||||
|
||||
export const workspaceSubjects = {
|
||||
meta: new Subject<{ workspaceId: string; meta: WorkspaceMeta }>(),
|
||||
};
|
||||
7
apps/electron/layers/preload/preload.d.ts
vendored
7
apps/electron/layers/preload/preload.d.ts
vendored
@@ -1,7 +1,6 @@
|
||||
/* eslint-disable @typescript-eslint/consistent-type-imports */
|
||||
|
||||
interface Window {
|
||||
apis: typeof import('./src/affine-apis').apis;
|
||||
events: typeof import('./src/affine-apis').events;
|
||||
appInfo: typeof import('./src/affine-apis').appInfo;
|
||||
declare interface Window {
|
||||
apis: import('./src/affine-apis').PreloadHandlers;
|
||||
events: import('./src/affine-apis').MainIPCEventMap;
|
||||
}
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
// NOTE: we will generate preload types from this file
|
||||
|
||||
// NOTE: we will generate preload types from this file
|
||||
import { ipcRenderer } from 'electron';
|
||||
|
||||
import type { MainIPCEventMap, MainIPCHandlerMap } from '../../constraints';
|
||||
// eslint-disable-next-line @typescript-eslint/no-restricted-imports
|
||||
import type {
|
||||
MainIPCEventMap,
|
||||
MainIPCHandlerMap,
|
||||
} from '../../main/src/exposed';
|
||||
|
||||
type WithoutFirstParameter<T> = T extends (_: any, ...args: infer P) => infer R
|
||||
? (...args: P) => R
|
||||
@@ -15,7 +19,7 @@ type HandlersMap<N extends keyof MainIPCHandlerMap> = {
|
||||
>;
|
||||
};
|
||||
|
||||
type PreloadHandlers = {
|
||||
export type PreloadHandlers = {
|
||||
[N in keyof MainIPCHandlerMap]: HandlersMap<N>;
|
||||
};
|
||||
|
||||
@@ -24,17 +28,17 @@ type MainExposedMeta = {
|
||||
events: [namespace: string, eventNames: string[]][];
|
||||
};
|
||||
|
||||
const meta: MainExposedMeta = (() => {
|
||||
const val = process.argv
|
||||
.find(arg => arg.startsWith('--exposed-meta='))
|
||||
?.split('=')[1];
|
||||
|
||||
return val ? JSON.parse(val) : null;
|
||||
})();
|
||||
|
||||
// main handlers that can be invoked from the renderer process
|
||||
const apis: PreloadHandlers = (() => {
|
||||
// the following were generated by the build script
|
||||
// 1. bundle extra main/src/expose.ts entry
|
||||
// 2. use generate-main-exposed-meta.mjs to generate exposed-meta.js in dist
|
||||
//
|
||||
// we cannot directly import main/src/handlers.ts because it will be bundled into the preload bundle
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const {
|
||||
handlers: handlersMeta,
|
||||
}: MainExposedMeta = require('../main/exposed-meta');
|
||||
const { handlers: handlersMeta } = meta;
|
||||
|
||||
const all = handlersMeta.map(([namespace, functionNames]) => {
|
||||
const namespaceApis = functionNames.map(name => {
|
||||
@@ -54,9 +58,7 @@ const apis: PreloadHandlers = (() => {
|
||||
|
||||
// main events that can be listened to from the renderer process
|
||||
const events: MainIPCEventMap = (() => {
|
||||
const {
|
||||
events: eventsMeta,
|
||||
}: MainExposedMeta = require('../main/exposed-meta');
|
||||
const { events: eventsMeta } = meta;
|
||||
|
||||
// NOTE: ui may try to listen to a lot of the same events, so we increase the limit...
|
||||
ipcRenderer.setMaxListeners(100);
|
||||
@@ -90,3 +92,6 @@ const appInfo = {
|
||||
};
|
||||
|
||||
export { apis, appInfo, events };
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-restricted-imports
|
||||
export type { MainIPCEventMap } from '../../main/src/exposed';
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
export const isMacOS = () => {
|
||||
return process.platform === 'darwin';
|
||||
};
|
||||
|
||||
export const isWindows = () => {
|
||||
return process.platform === 'win32';
|
||||
};
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@affine/electron",
|
||||
"private": true,
|
||||
"version": "0.5.4-beta.1",
|
||||
"version": "0.6.0",
|
||||
"author": "affine",
|
||||
"repository": {
|
||||
"url": "https://github.com/toeverything/AFFiNE",
|
||||
@@ -15,7 +15,6 @@
|
||||
"prod": "yarn electron-rebuild && yarn node scripts/dev.mjs",
|
||||
"build-layers": "zx scripts/build-layers.mjs",
|
||||
"generate-assets": "zx scripts/generate-assets.mjs",
|
||||
"generate-main-exposed-meta": "zx scripts/generate-main-exposed-meta.mjs",
|
||||
"package": "electron-forge package",
|
||||
"make": "electron-forge make",
|
||||
"rebuild:for-unit-test": "yarn rebuild better-sqlite3",
|
||||
@@ -43,22 +42,24 @@
|
||||
"@types/fs-extra": "^11.0.1",
|
||||
"@types/uuid": "^9.0.1",
|
||||
"cross-env": "7.0.3",
|
||||
"electron": "24.3.1",
|
||||
"electron": "25.0.0",
|
||||
"electron-log": "^5.0.0-beta.24",
|
||||
"electron-squirrel-startup": "1.0.0",
|
||||
"electron-window-state": "^5.0.3",
|
||||
"esbuild": "^0.17.19",
|
||||
"fs-extra": "^11.1.1",
|
||||
"playwright": "^1.33.0",
|
||||
"playwright": "=1.33.0",
|
||||
"ts-node": "^10.9.1",
|
||||
"undici": "^5.22.1",
|
||||
"uuid": "^9.0.0",
|
||||
"zx": "^7.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"better-sqlite3": "^8.3.0",
|
||||
"better-sqlite3": "^8.4.0",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"chokidar": "^3.5.3",
|
||||
"electron-updater": "^5.3.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"nanoid": "^4.0.2",
|
||||
"rxjs": "^7.8.1",
|
||||
"yjs": "^13.6.1"
|
||||
|
||||
BIN
apps/electron/resources/icons/affine_installing.gif
Normal file
BIN
apps/electron/resources/icons/affine_installing.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 2.1 MiB |
@@ -25,8 +25,6 @@ async function buildLayers() {
|
||||
'process.env.BUILD_TYPE': `"${process.env.BUILD_TYPE || 'stable'}"`,
|
||||
},
|
||||
});
|
||||
|
||||
await $`yarn workspace @affine/electron generate-main-exposed-meta`;
|
||||
}
|
||||
|
||||
await buildLayers();
|
||||
|
||||
@@ -23,6 +23,7 @@ export const config = () => {
|
||||
JSON.stringify(process.env[key] ?? ''),
|
||||
]),
|
||||
['process.env.NODE_ENV', `"${mode}"`],
|
||||
['process.env.USE_WORKER', '"true"'],
|
||||
]);
|
||||
|
||||
if (DEV_SERVER_URL) {
|
||||
@@ -33,7 +34,7 @@ export const config = () => {
|
||||
main: {
|
||||
entryPoints: [
|
||||
resolve(root, './layers/main/src/index.ts'),
|
||||
resolve(root, './layers/main/src/exposed.ts'),
|
||||
resolve(root, './layers/main/src/workers/merge-update.worker.ts'),
|
||||
],
|
||||
outdir: resolve(root, './dist/layers/main'),
|
||||
bundle: true,
|
||||
@@ -54,7 +55,7 @@ export const config = () => {
|
||||
bundle: true,
|
||||
target: `node${NODE_MAJOR_VERSION}`,
|
||||
platform: 'node',
|
||||
external: ['electron', '../main/exposed-meta'],
|
||||
external: ['electron'],
|
||||
define: define,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/* eslint-disable no-async-promise-executor */
|
||||
import { execSync, spawn } from 'node:child_process';
|
||||
import { spawn } from 'node:child_process';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
@@ -105,8 +105,6 @@ async function watchMain() {
|
||||
name: 'electron-dev:reload-app-on-main-change',
|
||||
setup(build) {
|
||||
build.onEnd(() => {
|
||||
execSync('yarn generate-main-exposed-meta');
|
||||
|
||||
if (initialBuild) {
|
||||
console.log(`[main] has changed, [re]launching electron...`);
|
||||
spawnOrReloadElectron();
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
const mainDistDir = path.resolve(__dirname, '../dist/layers/main');
|
||||
|
||||
// be careful and avoid any side effects in
|
||||
const { handlers, events } = await import(
|
||||
'file://' + path.resolve(mainDistDir, 'exposed.js')
|
||||
);
|
||||
|
||||
const handlersMeta = Object.entries(handlers).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.keys(namespaceHandlers).map(handlerName => handlerName),
|
||||
];
|
||||
}
|
||||
);
|
||||
|
||||
const eventsMeta = Object.entries(events).map(
|
||||
([namespace, namespaceHandlers]) => {
|
||||
return [
|
||||
namespace,
|
||||
Object.keys(namespaceHandlers).map(handlerName => handlerName),
|
||||
];
|
||||
}
|
||||
);
|
||||
|
||||
const meta = {
|
||||
handlers: handlersMeta,
|
||||
events: eventsMeta,
|
||||
};
|
||||
|
||||
await fs.writeFile(
|
||||
path.resolve(mainDistDir, 'exposed-meta.js'),
|
||||
`module.exports = ${JSON.stringify(meta)};`
|
||||
);
|
||||
|
||||
console.log('generate main exposed-meta.js done');
|
||||
@@ -2,7 +2,9 @@
|
||||
"extends": "../../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"baseUrl": ".",
|
||||
"noEmit": true
|
||||
"noEmit": true,
|
||||
"target": "ESNext"
|
||||
},
|
||||
"references": [{ "path": "../../../tests/kit" }],
|
||||
"include": ["**.spec.ts", "**.test.ts"]
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ test('move workspace db file', async ({ page, appInfo, workspace }) => {
|
||||
// goto settings
|
||||
await settingButton.click();
|
||||
|
||||
const tmpPath = path.join(appInfo.sessionData, w.id + '-tmp.db');
|
||||
const tmpPath = path.join(appInfo.sessionData, w.id + '-tmp-dir');
|
||||
|
||||
// move db file to tmp folder
|
||||
await page.evaluate(tmpPath => {
|
||||
@@ -36,6 +36,9 @@ test('move workspace db file', async ({ page, appInfo, workspace }) => {
|
||||
// check if db file exists
|
||||
await page.waitForSelector('text="Move folder success"');
|
||||
expect(await fs.exists(tmpPath)).toBe(true);
|
||||
// check if db file exists under tmpPath (a file ends with .affine)
|
||||
const files = await fs.readdir(tmpPath);
|
||||
expect(files.some(f => f.endsWith('.affine'))).toBe(true);
|
||||
});
|
||||
|
||||
test('export then add', async ({ page, appInfo, workspace }) => {
|
||||
@@ -56,7 +59,7 @@ test('export then add', async ({ page, appInfo, workspace }) => {
|
||||
|
||||
const tmpPath = path.join(appInfo.sessionData, w.id + '-tmp.db');
|
||||
|
||||
// move db file to tmp folder
|
||||
// export db file to tmp folder
|
||||
await page.evaluate(tmpPath => {
|
||||
window.apis?.dialog.setFakeDialogResult({
|
||||
filePath: tmpPath,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"composite": true,
|
||||
"skipLibCheck": true,
|
||||
"target": "ESNext",
|
||||
"module": "ESNext",
|
||||
@@ -9,17 +10,23 @@
|
||||
"types": ["node"],
|
||||
"outDir": "dist",
|
||||
"moduleResolution": "node",
|
||||
"resolveJsonModule": true
|
||||
"resolveJsonModule": true,
|
||||
"noImplicitOverride": true,
|
||||
"noEmit": false
|
||||
},
|
||||
"include": ["**/*.ts", "**/*.tsx", "package.json"],
|
||||
"exclude": ["out", "dist", "node_modules"],
|
||||
"include": ["**/*.ts", "**/*.tsx"],
|
||||
"exclude": ["node_modules", "out", "dist"],
|
||||
"references": [
|
||||
{
|
||||
"path": "./tsconfig.node.json"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/native"
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": "../../packages/env"
|
||||
},
|
||||
{ "path": "../../tests/kit" }
|
||||
],
|
||||
"ts-node": {
|
||||
"esm": true,
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"composite": true,
|
||||
"target": "ESNext",
|
||||
"module": "ESNext",
|
||||
"resolveJsonModule": true,
|
||||
"moduleResolution": "Node",
|
||||
"allowSyntheticDefaultImports": true
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"noEmit": false
|
||||
},
|
||||
"include": ["./scripts", "package.json"]
|
||||
"include": ["./scripts"]
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@affine/server",
|
||||
"private": true,
|
||||
"version": "0.5.4-beta.1",
|
||||
"version": "0.6.0",
|
||||
"description": "Affine Node.js server",
|
||||
"type": "module",
|
||||
"bin": {
|
||||
@@ -15,36 +15,36 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@apollo/server": "^4.7.1",
|
||||
"@nestjs/apollo": "^11.0.5",
|
||||
"@nestjs/common": "^9.4.1",
|
||||
"@nestjs/core": "^9.4.1",
|
||||
"@nestjs/graphql": "^11.0.5",
|
||||
"@nestjs/platform-express": "^9.4.1",
|
||||
"@nestjs/apollo": "^11.0.6",
|
||||
"@nestjs/common": "^9.4.2",
|
||||
"@nestjs/core": "^9.4.2",
|
||||
"@nestjs/graphql": "^11.0.6",
|
||||
"@nestjs/platform-express": "^9.4.2",
|
||||
"@node-rs/bcrypt": "^1.7.1",
|
||||
"@prisma/client": "^4.14.1",
|
||||
"dotenv": "^16.0.3",
|
||||
"@prisma/client": "^4.15.0",
|
||||
"dotenv": "^16.1.1",
|
||||
"express": "^4.18.2",
|
||||
"graphql": "^16.6.0",
|
||||
"graphql-type-json": "^0.3.2",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"prisma": "^4.14.1",
|
||||
"prisma": "^4.15.0",
|
||||
"reflect-metadata": "^0.1.13",
|
||||
"rxjs": "^7.8.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@nestjs/testing": "^9.4.1",
|
||||
"@nestjs/testing": "^9.4.2",
|
||||
"@types/express": "^4.17.17",
|
||||
"@types/jsonwebtoken": "^9.0.2",
|
||||
"@types/lodash-es": "^4.17.7",
|
||||
"@types/node": "^18.16.12",
|
||||
"@types/node": "^18.16.16",
|
||||
"@types/supertest": "^2.0.12",
|
||||
"c8": "^7.13.0",
|
||||
"c8": "^7.14.0",
|
||||
"nodemon": "^2.0.22",
|
||||
"supertest": "^6.3.3",
|
||||
"ts-node": "^10.9.1",
|
||||
"typescript": "^5.0.4",
|
||||
"vitest": "^0.31.1"
|
||||
"vitest": "^0.31.2"
|
||||
},
|
||||
"nodemonConfig": {
|
||||
"exec": "node",
|
||||
|
||||
@@ -10,7 +10,7 @@ declare global {
|
||||
}
|
||||
}
|
||||
|
||||
export const enum ExternalAccount {
|
||||
export enum ExternalAccount {
|
||||
github = 'github',
|
||||
google = 'google',
|
||||
firebase = 'firebase',
|
||||
|
||||
@@ -5,7 +5,8 @@
|
||||
"module": "ESNext",
|
||||
"resolveJsonModule": true,
|
||||
"moduleResolution": "Node",
|
||||
"allowSyntheticDefaultImports": true
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"outDir": "dist/scripts"
|
||||
},
|
||||
"include": ["scripts", "package.json"]
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ First, run the development server:
|
||||
pnpm run dev
|
||||
```
|
||||
|
||||
Open [http://localhost:8080](http://localhost:3000) with your browser to see the result.
|
||||
Open [http://localhost:8080](http://localhost:8080) with your browser to see the result.
|
||||
|
||||
You can start editing the page by modifying `src/pages/workspace/[workspaceId]/all.tsx`. The page auto-updates as you edit the file.
|
||||
|
||||
|
||||
@@ -109,8 +109,10 @@ const nextConfig = {
|
||||
'@affine/templates',
|
||||
'@affine/workspace',
|
||||
'@affine/jotai',
|
||||
'@affine/copilot',
|
||||
'@toeverything/hooks',
|
||||
'@toeverything/y-indexeddb',
|
||||
'@toeverything/plugin-infra',
|
||||
],
|
||||
publicRuntimeConfig: {
|
||||
PROJECT_NAME: process.env.npm_package_name ?? 'AFFiNE',
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
{
|
||||
"name": "@affine/web",
|
||||
"private": true,
|
||||
"version": "0.5.4-beta.1",
|
||||
"version": "0.6.0",
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"export": "next export",
|
||||
"start": "next start",
|
||||
"lint": "next lint"
|
||||
"start": "next start"
|
||||
},
|
||||
"dependencies": {
|
||||
"@affine-test/fixtures": "workspace:*",
|
||||
"@affine/component": "workspace:*",
|
||||
"@affine/copilot": "workspace:*",
|
||||
"@affine/debug": "workspace:*",
|
||||
"@affine/env": "workspace:*",
|
||||
"@affine/graphql": "workspace:*",
|
||||
@@ -19,34 +19,35 @@
|
||||
"@affine/jotai": "workspace:*",
|
||||
"@affine/templates": "workspace:*",
|
||||
"@affine/workspace": "workspace:*",
|
||||
"@blocksuite/blocks": "0.0.0-20230518051344-45970a96-nightly",
|
||||
"@blocksuite/editor": "0.0.0-20230518051344-45970a96-nightly",
|
||||
"@blocksuite/global": "0.0.0-20230518051344-45970a96-nightly",
|
||||
"@blocksuite/icons": "^2.1.16",
|
||||
"@blocksuite/lit": "0.0.0-20230518051344-45970a96-nightly",
|
||||
"@blocksuite/store": "0.0.0-20230518051344-45970a96-nightly",
|
||||
"@blocksuite/blocks": "0.0.0-20230601101714-0d24ba91-nightly",
|
||||
"@blocksuite/editor": "0.0.0-20230601101714-0d24ba91-nightly",
|
||||
"@blocksuite/global": "0.0.0-20230601101714-0d24ba91-nightly",
|
||||
"@blocksuite/icons": "^2.1.19",
|
||||
"@blocksuite/lit": "0.0.0-20230601101714-0d24ba91-nightly",
|
||||
"@blocksuite/store": "0.0.0-20230601101714-0d24ba91-nightly",
|
||||
"@dnd-kit/core": "^6.0.8",
|
||||
"@dnd-kit/sortable": "^7.0.2",
|
||||
"@emotion/cache": "^11.11.0",
|
||||
"@emotion/react": "^11.11.0",
|
||||
"@emotion/server": "^11.11.0",
|
||||
"@emotion/styled": "^11.11.0",
|
||||
"@mui/material": "^5.13.1",
|
||||
"@react-hookz/web": "^23.0.0",
|
||||
"@sentry/nextjs": "^7.52.1",
|
||||
"@mui/material": "^5.13.3",
|
||||
"@react-hookz/web": "^23.0.1",
|
||||
"@sentry/nextjs": "^7.53.1",
|
||||
"@toeverything/hooks": "workspace:*",
|
||||
"@toeverything/plugin-infra": "workspace:*",
|
||||
"cmdk": "^0.2.0",
|
||||
"css-spring": "^4.1.0",
|
||||
"dayjs": "^1.11.7",
|
||||
"graphql": "^16.6.0",
|
||||
"jotai": "^2.1.0",
|
||||
"jotai-devtools": "^0.5.3",
|
||||
"lit": "^2.7.4",
|
||||
"lottie-web": "^5.11.0",
|
||||
"lottie-web": "^5.12.0",
|
||||
"next-themes": "^0.2.1",
|
||||
"react": "18.3.0-canary-16d053d59-20230506",
|
||||
"react-dom": "18.3.0-canary-16d053d59-20230506",
|
||||
"react-is": "^18.2.0",
|
||||
"react-mosaic-component": "^6.0.1",
|
||||
"rxjs": "^7.8.1",
|
||||
"swr": "^2.1.5",
|
||||
"y-protocols": "^1.0.5",
|
||||
@@ -62,21 +63,21 @@
|
||||
"@swc-jotai/react-refresh": "^0.0.8",
|
||||
"@types/react": "^18.2.6",
|
||||
"@types/react-dom": "^18.2.4",
|
||||
"@types/webpack-env": "^1.18.0",
|
||||
"@types/webpack-env": "^1.18.1",
|
||||
"@vanilla-extract/css": "^1.11.0",
|
||||
"@vanilla-extract/next-plugin": "^2.1.2",
|
||||
"dotenv": "^16.0.3",
|
||||
"eslint": "^8.40.0",
|
||||
"eslint-config-next": "^13.4.2",
|
||||
"@vanilla-extract/next-plugin": "=2.1.2",
|
||||
"dotenv": "^16.1.1",
|
||||
"eslint": "^8.41.0",
|
||||
"eslint-config-next": "^13.4.4",
|
||||
"eslint-plugin-unicorn": "^47.0.0",
|
||||
"next": "^13.4.2",
|
||||
"next": "=13.4.2",
|
||||
"next-debug-local": "^0.1.5",
|
||||
"next-router-mock": "^0.9.3",
|
||||
"raw-loader": "^4.0.2",
|
||||
"redux": "^4.2.1",
|
||||
"swc-plugin-coverage-instrument": "^0.0.18",
|
||||
"typescript": "^5.0.4",
|
||||
"webpack": "^5.83.1"
|
||||
"webpack": "^5.84.1"
|
||||
},
|
||||
"stableVersion": "0.0.0"
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
// @ts-check
|
||||
import 'dotenv/config';
|
||||
|
||||
/**
|
||||
* @type {import('@affine/env').BlockSuiteFeatureFlags}
|
||||
*/
|
||||
@@ -12,12 +11,19 @@ export const blockSuiteFeatureFlags = {
|
||||
enable_drag_handle: true,
|
||||
enable_surface: true,
|
||||
enable_linked_page: true,
|
||||
enable_bookmark_operation: process.env.ENABLE_BOOKMARK_OPERATION === 'true',
|
||||
};
|
||||
|
||||
/**
|
||||
* @type {import('@affine/env').BuildFlags}
|
||||
*/
|
||||
export const buildFlags = {
|
||||
enablePlugin: process.env.ENABLE_PLUGIN === 'true',
|
||||
enableAllPageFilter:
|
||||
!!process.env.VERCEL ||
|
||||
(process.env.ENABLE_ALL_PAGE_FILTER
|
||||
? process.env.ENABLE_ALL_PAGE_FILTER === 'true'
|
||||
: false),
|
||||
enableImagePreviewModal: process.env.ENABLE_IMAGE_PREVIEW_MODAL
|
||||
? process.env.ENABLE_IMAGE_PREVIEW_MODAL === 'true'
|
||||
: true,
|
||||
@@ -34,6 +40,5 @@ export const buildFlags = {
|
||||
process.env.ENABLE_DEBUG_PAGE ?? process.env.NODE_ENV === 'development'
|
||||
),
|
||||
changelogUrl:
|
||||
process.env.CHANGELOG_URL ??
|
||||
'https://affine.pro/blog/whats-new-affine-0518',
|
||||
process.env.CHANGELOG_URL ?? 'http://affine.pro/blog/whats-new-affine-0601',
|
||||
};
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { Unreachable } from '@affine/env/constant';
|
||||
import { affineApis } from '@affine/workspace/affine/shared';
|
||||
import { rootStore } from '@affine/workspace/atom';
|
||||
import { createAffineProviders } from '@affine/workspace/providers';
|
||||
import type { AffineLegacyCloudWorkspace } from '@affine/workspace/type';
|
||||
import { WorkspaceFlavour } from '@affine/workspace/type';
|
||||
import { createEmptyBlockSuiteWorkspace } from '@affine/workspace/utils';
|
||||
import { assertExists } from '@blocksuite/store';
|
||||
|
||||
import { workspacesAtom } from '../../atoms';
|
||||
import { createAffineProviders } from '../../blocksuite';
|
||||
import { affineApis } from '../../shared/apis';
|
||||
|
||||
type Query = (typeof QueryKey)[keyof typeof QueryKey];
|
||||
|
||||
@@ -1,18 +1,26 @@
|
||||
import { AFFINE_STORAGE_KEY, config, prefixUrl } from '@affine/env';
|
||||
/**
|
||||
* This file has deprecated because we do not maintain legacy affine cloud,
|
||||
* please use new affine cloud instead.
|
||||
*/
|
||||
import { AFFINE_STORAGE_KEY, config } from '@affine/env';
|
||||
import { initPage } from '@affine/env/blocksuite';
|
||||
import { PageNotFoundError } from '@affine/env/constant';
|
||||
import { currentAffineUserAtom } from '@affine/workspace/affine/atom';
|
||||
import {
|
||||
clearLoginStorage,
|
||||
createAffineAuth,
|
||||
getLoginStorage,
|
||||
isExpired,
|
||||
parseIdToken,
|
||||
setLoginStorage,
|
||||
SignMethod,
|
||||
} from '@affine/workspace/affine/login';
|
||||
import { affineApis, affineAuth } from '@affine/workspace/affine/shared';
|
||||
import { rootStore, rootWorkspacesMetadataAtom } from '@affine/workspace/atom';
|
||||
import { createIndexedDBBackgroundProvider } from '@affine/workspace/providers';
|
||||
import {
|
||||
createAffineProviders,
|
||||
createIndexedDBBackgroundProvider,
|
||||
} from '@affine/workspace/providers';
|
||||
import { createAffineDownloadProvider } from '@affine/workspace/providers';
|
||||
import type { AffineLegacyCloudWorkspace } from '@affine/workspace/type';
|
||||
import {
|
||||
LoadPriority,
|
||||
@@ -29,17 +37,17 @@ import { Suspense, useEffect } from 'react';
|
||||
import { mutate } from 'swr';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { createAffineProviders } from '../../blocksuite';
|
||||
import { createAffineDownloadProvider } from '../../blocksuite/providers/affine';
|
||||
import { WorkspaceSettingDetail } from '../../components/affine/workspace-setting-detail';
|
||||
import { BlockSuitePageList } from '../../components/blocksuite/block-suite-page-list';
|
||||
import { PageDetailEditor } from '../../components/page-detail-editor';
|
||||
import { PageLoading } from '../../components/pure/loading';
|
||||
import { useAffineRefreshAuthToken } from '../../hooks/affine/use-affine-refresh-auth-token';
|
||||
import { BlockSuiteWorkspace } from '../../shared';
|
||||
import { affineApis } from '../../shared/apis';
|
||||
import { toast } from '../../utils';
|
||||
import type { WorkspaceAdapter } from '..';
|
||||
import {
|
||||
BlockSuitePageList,
|
||||
PageDetailEditor,
|
||||
WorkspaceHeader,
|
||||
WorkspaceSettingDetail,
|
||||
} from '../shared';
|
||||
import type { WorkspaceAdapter } from '../type';
|
||||
import { QueryKey } from './fetcher';
|
||||
|
||||
const storage = createJSONStorage(() => localStorage);
|
||||
@@ -79,8 +87,6 @@ const getPersistenceAllWorkspace = () => {
|
||||
return allWorkspaces;
|
||||
};
|
||||
|
||||
export const affineAuth = createAffineAuth(prefixUrl);
|
||||
|
||||
function AuthContext({ children }: PropsWithChildren): ReactElement {
|
||||
const login = useAffineRefreshAuthToken();
|
||||
|
||||
@@ -95,7 +101,7 @@ function AuthContext({ children }: PropsWithChildren): ReactElement {
|
||||
return <>{children}</>;
|
||||
}
|
||||
|
||||
export const AffinePlugin: WorkspaceAdapter<WorkspaceFlavour.AFFINE> = {
|
||||
export const AffineAdapter: WorkspaceAdapter<WorkspaceFlavour.AFFINE> = {
|
||||
releaseType: ReleaseType.STABLE,
|
||||
flavour: WorkspaceFlavour.AFFINE,
|
||||
loadPriority: LoadPriority.HIGH,
|
||||
@@ -174,7 +180,7 @@ export const AffinePlugin: WorkspaceAdapter<WorkspaceFlavour.AFFINE> = {
|
||||
|
||||
await mutate(matcher => matcher === QueryKey.getWorkspaces);
|
||||
// refresh the local storage
|
||||
await AffinePlugin.CRUD.list();
|
||||
await AffineAdapter.CRUD.list();
|
||||
return id;
|
||||
},
|
||||
delete: async workspace => {
|
||||
@@ -207,7 +213,7 @@ export const AffinePlugin: WorkspaceAdapter<WorkspaceFlavour.AFFINE> = {
|
||||
return null;
|
||||
}
|
||||
const workspaces: AffineLegacyCloudWorkspace[] =
|
||||
await AffinePlugin.CRUD.list();
|
||||
await AffineAdapter.CRUD.list();
|
||||
return (
|
||||
workspaces.find(workspace => workspace.id === workspaceId) ?? null
|
||||
);
|
||||
@@ -310,7 +316,8 @@ export const AffinePlugin: WorkspaceAdapter<WorkspaceFlavour.AFFINE> = {
|
||||
</Suspense>
|
||||
);
|
||||
},
|
||||
PageDetail: ({ currentWorkspace, currentPageId }) => {
|
||||
Header: WorkspaceHeader,
|
||||
PageDetail: ({ currentWorkspace, currentPageId, onLoadEditor }) => {
|
||||
const page = currentWorkspace.blockSuiteWorkspace.getPage(currentPageId);
|
||||
if (!page) {
|
||||
throw new PageNotFoundError(
|
||||
@@ -324,13 +331,15 @@ export const AffinePlugin: WorkspaceAdapter<WorkspaceFlavour.AFFINE> = {
|
||||
pageId={currentPageId}
|
||||
workspace={currentWorkspace}
|
||||
onInit={initPage}
|
||||
onLoad={onLoadEditor}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
},
|
||||
PageList: ({ blockSuiteWorkspace, onOpenPage }) => {
|
||||
PageList: ({ blockSuiteWorkspace, onOpenPage, view }) => {
|
||||
return (
|
||||
<BlockSuitePageList
|
||||
view={view}
|
||||
listType="all"
|
||||
onOpenPage={onOpenPage}
|
||||
blockSuiteWorkspace={blockSuiteWorkspace}
|
||||
@@ -19,14 +19,17 @@ import { createEmptyBlockSuiteWorkspace } from '@affine/workspace/utils';
|
||||
import { nanoid } from '@blocksuite/store';
|
||||
import React from 'react';
|
||||
|
||||
import { WorkspaceSettingDetail } from '../../components/affine/workspace-setting-detail';
|
||||
import { BlockSuitePageList } from '../../components/blocksuite/block-suite-page-list';
|
||||
import { PageDetailEditor } from '../../components/page-detail-editor';
|
||||
import type { WorkspaceAdapter } from '..';
|
||||
import {
|
||||
BlockSuitePageList,
|
||||
PageDetailEditor,
|
||||
WorkspaceHeader,
|
||||
WorkspaceSettingDetail,
|
||||
} from '../shared';
|
||||
import type { WorkspaceAdapter } from '../type';
|
||||
|
||||
const logger = new DebugLogger('use-create-first-workspace');
|
||||
|
||||
export const LocalPlugin: WorkspaceAdapter<WorkspaceFlavour.LOCAL> = {
|
||||
export const LocalAdapter: WorkspaceAdapter<WorkspaceFlavour.LOCAL> = {
|
||||
releaseType: ReleaseType.STABLE,
|
||||
flavour: WorkspaceFlavour.LOCAL,
|
||||
loadPriority: LoadPriority.LOW,
|
||||
@@ -59,10 +62,11 @@ export const LocalPlugin: WorkspaceAdapter<WorkspaceFlavour.LOCAL> = {
|
||||
},
|
||||
CRUD,
|
||||
UI: {
|
||||
Header: WorkspaceHeader,
|
||||
Provider: ({ children }) => {
|
||||
return <>{children}</>;
|
||||
},
|
||||
PageDetail: ({ currentWorkspace, currentPageId }) => {
|
||||
PageDetail: ({ currentWorkspace, currentPageId, onLoadEditor }) => {
|
||||
const page = currentWorkspace.blockSuiteWorkspace.getPage(currentPageId);
|
||||
if (!page) {
|
||||
throw new PageNotFoundError(
|
||||
@@ -75,15 +79,17 @@ export const LocalPlugin: WorkspaceAdapter<WorkspaceFlavour.LOCAL> = {
|
||||
<PageDetailEditor
|
||||
pageId={currentPageId}
|
||||
onInit={initPage}
|
||||
onLoad={onLoadEditor}
|
||||
workspace={currentWorkspace}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
},
|
||||
PageList: ({ blockSuiteWorkspace, onOpenPage }) => {
|
||||
PageList: ({ blockSuiteWorkspace, onOpenPage, view }) => {
|
||||
return (
|
||||
<BlockSuitePageList
|
||||
listType="all"
|
||||
view={view}
|
||||
onOpenPage={onOpenPage}
|
||||
blockSuiteWorkspace={blockSuiteWorkspace}
|
||||
/>
|
||||
27
apps/web/src/adapters/shared.ts
Normal file
27
apps/web/src/adapters/shared.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { lazy } from 'react';
|
||||
|
||||
export const WorkspaceSettingDetail = lazy(() =>
|
||||
import('../components/affine/workspace-setting-detail').then(
|
||||
({ WorkspaceSettingDetail }) => ({
|
||||
default: WorkspaceSettingDetail,
|
||||
})
|
||||
)
|
||||
);
|
||||
export const BlockSuitePageList = lazy(() =>
|
||||
import('../components/blocksuite/block-suite-page-list').then(
|
||||
({ BlockSuitePageList }) => ({
|
||||
default: BlockSuitePageList,
|
||||
})
|
||||
)
|
||||
);
|
||||
export const PageDetailEditor = lazy(() =>
|
||||
import('../components/page-detail-editor').then(({ PageDetailEditor }) => ({
|
||||
default: PageDetailEditor,
|
||||
}))
|
||||
);
|
||||
|
||||
export const WorkspaceHeader = lazy(() =>
|
||||
import('../components/workspace-header').then(({ WorkspaceHeader }) => ({
|
||||
default: WorkspaceHeader,
|
||||
}))
|
||||
);
|
||||
21
apps/web/src/adapters/type.tsx
Normal file
21
apps/web/src/adapters/type.tsx
Normal file
@@ -0,0 +1,21 @@
|
||||
import type {
|
||||
AppEvents,
|
||||
WorkspaceCRUD,
|
||||
WorkspaceUISchema,
|
||||
} from '@affine/workspace/type';
|
||||
import type {
|
||||
LoadPriority,
|
||||
ReleaseType,
|
||||
WorkspaceFlavour,
|
||||
} from '@affine/workspace/type';
|
||||
|
||||
export interface WorkspaceAdapter<Flavour extends WorkspaceFlavour> {
|
||||
releaseType: ReleaseType;
|
||||
flavour: Flavour;
|
||||
// Plugin will be loaded according to the priority
|
||||
loadPriority: LoadPriority;
|
||||
Events: Partial<AppEvents>;
|
||||
// Fetch necessary data for the first render
|
||||
CRUD: WorkspaceCRUD<Flavour>;
|
||||
UI: WorkspaceUISchema<Flavour>;
|
||||
}
|
||||
@@ -1,35 +1,22 @@
|
||||
import type {
|
||||
AppEvents,
|
||||
WorkspaceCRUD,
|
||||
WorkspaceUISchema,
|
||||
} from '@affine/workspace/type';
|
||||
import { Unreachable } from '@affine/env';
|
||||
import type { AppEvents, WorkspaceUISchema } from '@affine/workspace/type';
|
||||
import {
|
||||
LoadPriority,
|
||||
ReleaseType,
|
||||
WorkspaceFlavour,
|
||||
} from '@affine/workspace/type';
|
||||
|
||||
import { AffinePlugin } from './affine';
|
||||
import { LocalPlugin } from './local';
|
||||
|
||||
export interface WorkspaceAdapter<Flavour extends WorkspaceFlavour> {
|
||||
releaseType: ReleaseType;
|
||||
flavour: Flavour;
|
||||
// Plugin will be loaded according to the priority
|
||||
loadPriority: LoadPriority;
|
||||
Events: Partial<AppEvents>;
|
||||
// Fetch necessary data for the first render
|
||||
CRUD: WorkspaceCRUD<Flavour>;
|
||||
UI: WorkspaceUISchema<Flavour>;
|
||||
}
|
||||
import { AffineAdapter } from './affine';
|
||||
import { LocalAdapter } from './local';
|
||||
import type { WorkspaceAdapter } from './type';
|
||||
|
||||
const unimplemented = () => {
|
||||
throw new Error('Not implemented');
|
||||
};
|
||||
|
||||
export const WorkspaceAdapters = {
|
||||
[WorkspaceFlavour.AFFINE]: AffinePlugin,
|
||||
[WorkspaceFlavour.LOCAL]: LocalPlugin,
|
||||
[WorkspaceFlavour.AFFINE]: AffineAdapter,
|
||||
[WorkspaceFlavour.LOCAL]: LocalAdapter,
|
||||
[WorkspaceFlavour.AFFINE_CLOUD]: {
|
||||
releaseType: ReleaseType.UNRELEASED,
|
||||
flavour: WorkspaceFlavour.AFFINE_CLOUD,
|
||||
@@ -45,6 +32,7 @@ export const WorkspaceAdapters = {
|
||||
// todo: implement this
|
||||
UI: {
|
||||
Provider: unimplemented,
|
||||
Header: unimplemented,
|
||||
PageDetail: unimplemented,
|
||||
PageList: unimplemented,
|
||||
SettingsDetail: unimplemented,
|
||||
@@ -65,6 +53,7 @@ export const WorkspaceAdapters = {
|
||||
// todo: implement this
|
||||
UI: {
|
||||
Provider: unimplemented,
|
||||
Header: unimplemented,
|
||||
PageDetail: unimplemented,
|
||||
PageList: unimplemented,
|
||||
SettingsDetail: unimplemented,
|
||||
@@ -73,3 +62,13 @@ export const WorkspaceAdapters = {
|
||||
} satisfies {
|
||||
[Key in WorkspaceFlavour]: WorkspaceAdapter<Key>;
|
||||
};
|
||||
|
||||
export function getUIAdapter<Flavour extends WorkspaceFlavour>(
|
||||
flavour: Flavour
|
||||
): WorkspaceUISchema<Flavour> {
|
||||
const ui = WorkspaceAdapters[flavour].UI as WorkspaceUISchema<Flavour>;
|
||||
if (!ui) {
|
||||
throw new Unreachable();
|
||||
}
|
||||
return ui;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user