Compare commits

..

119 Commits

Author SHA1 Message Date
Alex Yang
2c772bd81b v0.7.0-canary.24 2023-06-29 18:50:48 +08:00
JimmFly
7f00011542 chore: update changelog link and remove obsolete changelog components (#2918) 2023-06-29 10:19:26 +00:00
Alex Yang
f76d8b8818 chore: bump blocksuite to 0.0.0-20230629084521-542de4e8-nightly (#2921) 2023-06-29 09:42:47 +00:00
Alex Yang
1d6b39dec9 ci: allow codecov upload failure (#2922) 2023-06-29 09:39:16 +00:00
Qi
5cfdf6c7e2 fix: a serise of ui issues of new setting (#2920)
Co-authored-by: Alex Yang <himself65@outlook.com>
2023-06-29 09:25:42 +00:00
Alex Yang
8410d83744 refactor: rootWorkspacesMetadataAtom loading logic (#2882) 2023-06-29 08:48:12 +00:00
DarkSky
8a2dac9718 fix: incorrect formatting (#2917) 2023-06-29 08:25:43 +00:00
JimmFly
5ad2908760 chore: update translation (#2916)
Co-authored-by: zuozijian3720 <zuozijian1994@gmail.com>
2023-06-29 08:20:25 +00:00
Alex Yang
5b8771485e docs: add apps/README.md 2023-06-29 16:07:30 +08:00
Alex Yang
ed8480caf0 ci: split migration test 2023-06-29 15:11:16 +08:00
Alex Yang
42ef3c0fc2 test: migration test in real world (#2885) 2023-06-29 06:50:26 +00:00
Alex Yang
e08ee9b7ff ci: add prettier format check (#2908) 2023-06-29 04:13:35 +00:00
liuyi
2c95bfcc3d feat(storage): binding jwst storage to node (#2808) 2023-06-29 01:45:45 +00:00
Alex Yang
86616e152d build: disable sqlite provider in canary 2023-06-29 10:00:41 +08:00
Peng Xiao
b1f478ee5e fix: updater color updates (#2913) 2023-06-28 17:21:07 +00:00
DarkSky
6b0f9fbdad feat: add deployment guide & fix pod label (#2912) 2023-06-28 17:12:23 +00:00
Alex Yang
da3f2b784a ci: fix output variable 2023-06-29 01:20:35 +08:00
Alex Yang
acb140ab78 v0.7.0-canary.23 2023-06-29 00:40:50 +08:00
Alex Yang
0b74bd9bfe ci: use production environment 2023-06-29 00:40:50 +08:00
Alex Yang
acfc030d16 ci: fix package version output 2023-06-29 00:40:50 +08:00
Alex Yang
d0d04ce376 v0.7.0-canary.22 2023-06-29 00:27:17 +08:00
Alex Yang
2250f42d2a ci: fix tag version 2023-06-29 00:26:48 +08:00
Alex Yang
887434fea4 v0.7.0-canary.21 2023-06-29 00:23:06 +08:00
Alex Yang
9b817c4b79 ci: automatically build canary release (#2911) 2023-06-28 15:53:32 +00:00
Alex Yang
ea03bbfb2d ci: add codeql check to merge group (#2909) 2023-06-28 15:07:27 +00:00
Qi
db40cd35c6 feat: migrate workspace setting with new design to setting modal (#2900)
Co-authored-by: Alex Yang <himself65@outlook.com>
2023-06-28 14:45:33 +00:00
Alex Yang
aabac9e921 chore: bump typescript version (#2906) 2023-06-28 12:57:33 +00:00
Alex Yang
0a91c41e0a chore: codesandbox setup (#2907) 2023-06-28 12:32:56 +00:00
DarkSky
d6addc0d0b docs: improve helm ci & document (#2902) 2023-06-28 12:30:02 +00:00
Alex Yang
91d3b76be5 refactor(storybook): move to apps folder (#2901) 2023-06-28 12:29:52 +00:00
Alex Yang
3eed009270 feat: add rule 'sonarjs/no-identical-functions' (#2905) 2023-06-28 12:29:12 +00:00
Alex Yang
bc14d54cfa chore: update pre-commit hook (#2904) 2023-06-28 11:24:37 +00:00
Alex Yang
5496969e58 refactor: environment setup (#2898)
Co-authored-by: Simon He <57086651+Simon-He95@users.noreply.github.com>
2023-06-28 11:19:19 +00:00
Alex Yang
80c2a78273 fix(web): bypass adapter list error (#2903) 2023-06-28 11:06:13 +00:00
Alex Yang
92f378aefc test(server): watch mode (#2893) 2023-06-28 10:00:06 +00:00
Alex Yang
877ceee698 ci: enable merge group (#2899) 2023-06-28 09:56:02 +00:00
Alex Yang
7960b6a22e feat: update migration test page (#2871) 2023-06-28 16:46:08 +08:00
Alex Yang
fa45d8a718 build: unify build flags (#2891) 2023-06-28 16:45:05 +08:00
Alex Yang
87574c9993 build: fix i18n output (#2896) 2023-06-28 16:40:41 +08:00
Alex Yang
2dd62f7603 v0.7.0-canary.20 2023-06-28 16:03:21 +08:00
Peng Xiao
79b3b1dabc fix: disable sqlite provider (#2888)
Co-authored-by: Alex Yang <himself65@outlook.com>
2023-06-28 15:01:15 +08:00
regischen
fd0aa4a2ee fix: migration props:elements (#2889) 2023-06-28 14:24:14 +08:00
3720
da57fbeadd fix: database migration (#2887) 2023-06-28 13:44:11 +08:00
JimmFly
3f12e4925f style: remove switch button shadow (#2890) 2023-06-28 05:07:33 +00:00
Alex Yang
21cb05a30c build(web): fix debug local (#2886) 2023-06-28 11:43:13 +08:00
Kushagra Singh
7a8ff2c489 docs: update CLA.md (#2884) 2023-06-28 03:12:15 +08:00
Alex Yang
d108434881 fix: preloading page (#2876)
Co-authored-by: Mirone <Saul-Mirone@outlook.com>
2023-06-28 03:11:14 +08:00
Alex Yang
20fd9b6574 feat: upload 0.7.0-canary.18 static output (#2883) 2023-06-28 03:10:08 +08:00
Alex Yang
26ac56e163 test: remove deprecated test (#2880) 2023-06-28 00:53:04 +08:00
Alex Yang
78b74d5b15 feat(docs): update document (#2877) 2023-06-28 00:52:42 +08:00
DarkSky
1556167262 feat: add helm releaser (#2875) 2023-06-28 00:16:40 +08:00
Alex Yang
5186710f84 v0.7.0-canary.19 2023-06-27 23:13:01 +08:00
JimmFly
c7f25e8fe3 fix: incorrect scrollbar position when opening full width layout (#2869)
Co-authored-by: Alex Yang <himself65@outlook.com>
2023-06-27 14:15:34 +00:00
Alex Yang
ab703fe3ae test: fix image preview (#2874) 2023-06-27 21:05:24 +08:00
DarkSky
5275ff5493 feat: add affine cloud helm config (#2870) 2023-06-27 21:00:41 +08:00
Alex Yang
4fad21fe5c fix: remove preloading page (#2873) 2023-06-27 20:00:06 +08:00
JimmFly
eec2074b88 style: add hover style for filter item (#2868) 2023-06-27 19:02:46 +08:00
LongYinan
dd58b1bbf6 build: docker images (#2860) 2023-06-27 17:23:19 +08:00
Peng Xiao
05452bb297 feat: sqlite subdocument (#2816)
Co-authored-by: Alex Yang <himself65@outlook.com>
2023-06-27 07:40:37 +00:00
danielchim
4307e1eb6b fix: image preview (#2818) 2023-06-27 15:26:57 +08:00
JimmFly
10c7f93a85 style: add hover style for scrollbar (#2867) 2023-06-27 15:26:13 +08:00
Alex Yang
adca2a7225 chore: bump version (#2866) 2023-06-27 13:23:54 +08:00
Alex Yang
eb3f160a64 chore: bump version (#2862) 2023-06-27 11:32:24 +08:00
Alex Yang
53488a1498 build: remove image preview config (#2861) 2023-06-27 10:59:24 +08:00
DarkSky
d46b6c4863 feat: new workspace apis (#2825) 2023-06-26 22:12:58 +08:00
Alex Yang
e3ffd04804 feat(docs): bootstrapping using blocksuite (#2859) 2023-06-26 21:39:07 +08:00
Alex Yang
bddcfe1b8b chore: bump version (#2858) 2023-06-26 17:58:24 +08:00
Believerd
f18d07a4a2 docs: added note for building native dependencies (#2851) 2023-06-26 17:34:48 +08:00
Alex Yang
44166f7256 fix(docs): defer entry js (#2856) 2023-06-26 16:59:42 +08:00
Alex Yang
8e82d1e02c feat: support migration (#2852) 2023-06-26 15:55:44 +08:00
JimmFly
002e64c819 style: add scrollbar (#2826) 2023-06-26 15:12:44 +08:00
Alex Yang
773d92760e feat: add migration test page (#2855) 2023-06-26 13:42:07 +08:00
Mirone
84c8828e8c feat: add migration script for database (#2854) 2023-06-26 12:16:30 +08:00
himself65
fb6de18b2f feat(docs): update home page 2023-06-25 22:25:13 +08:00
Alex Yang
d525bd9113 feat: init @affine/docs (#2849) 2023-06-25 21:18:23 +08:00
Alex Yang
d8bb51a222 chore: bump version (#2848) 2023-06-25 18:37:20 +08:00
Alex Yang
da4d89275f chore: bump version (#2847) 2023-06-25 17:47:48 +08:00
Alex Yang
9bf5ea3e56 chore: bump version (#2846) 2023-06-25 17:16:03 +08:00
Alex Yang
7fcc5e599e feat!: upgrade blocksuite version (#2833) 2023-06-25 01:16:46 +08:00
Qi
aa86d3a2ee feat: new setting modal (#2834)
Co-authored-by: Alex Yang <himself65@outlook.com>
2023-06-21 19:57:59 +08:00
LongYinan
9a90ce694c chore(server): commit server generated gql file to prevent build fail (#2835) 2023-06-21 07:22:47 +00:00
LongYinan
9b3fa43b81 feat(server): auth server (#2773) 2023-06-21 06:08:32 +00:00
himself65
2698e7fd0d fix(y-indexeddb): remove .js suffix 2023-06-21 11:25:14 +08:00
Alex Yang
58fd3857c0 fix(y-indexeddb): test cases (#2832) 2023-06-20 22:01:47 +08:00
Peng Xiao
3755661ff6 fix: incorrect bookmark toast text (#2815)
Co-authored-by: Alex Yang <himself65@outlook.com>
2023-06-20 19:14:15 +08:00
himself65
c649995a7a build: update changelogUrl 2023-06-20 18:58:05 +08:00
himself65
448d3731e5 build: update nx.json 2023-06-20 18:08:48 +08:00
3720
019a2f57cb fix: e2e test (#2828)
Co-authored-by: Alex Yang <himself65@outlook.com>
2023-06-20 17:15:49 +08:00
himself65
6f9bb024be build: update nx.json 2023-06-20 13:41:58 +08:00
Mirone
707d585698 feat: add subdoc migration script (#2820)
Co-authored-by: himself65 <himself65@outlook.com>
2023-06-20 11:20:12 +08:00
Alex Yang
b73c75182f refactor: simplify rpc code (#2823) 2023-06-19 23:27:27 +08:00
Alex Yang
775d6212d3 build: fix nx config (#2824) 2023-06-19 23:07:40 +08:00
Alex Yang
357ff9853d fix: native build (#2822) 2023-06-19 18:21:34 +08:00
DarkSky
cd7892b7ed chore: update nx cloud read only token (#2821) 2023-06-19 17:38:26 +08:00
himself65
9512964366 build: fix publish.sh 2023-06-19 11:18:54 +08:00
Alex Yang
5473a12c0e build: fix nx output (#2817) 2023-06-19 11:13:59 +08:00
Alex Yang
c249cdf76a build: enable nx cloud (#2812) 2023-06-17 19:36:55 +08:00
Alex Yang
0e01094ffd ci: use nx on rust build (#2811) 2023-06-17 17:49:07 +08:00
Alex Yang
c68220166a feat(y-indexeddb): remove id (#2810) 2023-06-17 13:58:48 +08:00
Alex Yang
deeafb3a12 chore: bump version (#2799) 2023-06-17 13:52:07 +08:00
himself65
4d44542ca6 ci: update BUG-REPORT.yml 2023-06-16 15:18:18 +08:00
JimmFly
f83c92cb87 style: temporarily adjust the style of the co-pilot (#2804) 2023-06-16 15:12:59 +08:00
himself65
03c0b6b364 build: fix publish.sh 2023-06-16 15:00:06 +08:00
himself65
c5eb16139f build: update nx.json 2023-06-16 14:50:42 +08:00
himself65
bfb9e9b5c5 ci: update release-desktop-app.yml 2023-06-16 14:44:45 +08:00
himself65
af4de0b14f ci: update nightly-build.yml 2023-06-16 14:38:15 +08:00
Peng Xiao
0553ca3c02 docs: update building.md for electron (#2800) 2023-06-16 13:15:42 +08:00
himself65
9422b93857 fix(copilot): idb upgrade 2023-06-16 11:15:51 +08:00
himself65
951f5540a3 v0.7.0-canary.18 2023-06-15 21:56:22 +08:00
himself65
2b9929222c build: update change log url 2023-06-15 21:54:06 +08:00
Peng Xiao
b1c64a5f7e fix: missing updater (#2796) 2023-06-15 20:48:10 +08:00
Himself65
5164c8c1f9 chore: bump version (#2787) 2023-06-15 15:03:20 +08:00
himself65
18fad62f5c v0.7.0-canary.17 2023-06-15 14:34:11 +08:00
JimmFly
7eaff644e3 fix: electron cannot be started in Windows (#2784) 2023-06-15 06:16:27 +00:00
JimmFly
9fd4818d81 fix: window control not work (#2790) 2023-06-15 06:09:46 +00:00
himself65
d144c9f6f5 build(y-indexeddb): update package.json 2023-06-15 14:04:39 +08:00
JimmFly
a6752bb49c test: add basic test for link page and database (#2775) 2023-06-15 13:58:43 +08:00
danielchim
ae7da1b018 fix: image preview (#2786)
Co-authored-by: himself65 <himself65@outlook.com>
2023-06-15 13:55:17 +08:00
JimmFly
3819342ff2 fix: add guide to the other page (#2779) 2023-06-15 12:46:56 +08:00
429 changed files with 36087 additions and 13650 deletions

20
.codesandbox/task.json Normal file
View File

@@ -0,0 +1,20 @@
{
"$schema": "https://codesandbox.io/schemas/tasks.json",
"setupTasks": [
{
"name": "Install Dependencies",
"command": "yarn install"
}
],
"tasks": {
"start-web": {
"name": "Start Web",
"command": "yarn nx dev @affine/web --port 8080",
"runAtStart": true,
"preview": {
"port": 8080
}
}
}
}

View File

@@ -9,6 +9,7 @@
"server",
"web",
"docs",
"storybook",
"component",
"workspace",
"env",
@@ -21,7 +22,7 @@
"templates",
"y-indexeddb",
"debug",
"theme"
"storage"
]
]
}

View File

@@ -41,6 +41,7 @@ const allPackages = [
'apps/web',
'apps/server',
'apps/electron',
'apps/storybook',
'plugins/copilot',
'plugins/bookmark-block',
];
@@ -64,6 +65,7 @@ const config = {
'plugin:react/recommended',
'plugin:react/jsx-runtime',
'plugin:@typescript-eslint/recommended',
'prettier',
],
parser: '@typescript-eslint/parser',
parserOptions: {
@@ -157,6 +159,7 @@ const config = {
'sonarjs/no-duplicated-branches': 'error',
'sonarjs/no-collection-size-mischeck': 'error',
'sonarjs/no-useless-catch': 'error',
'sonarjs/no-identical-functions': 'error',
},
overrides: [
{

1
.github/CLA.md vendored
View File

@@ -59,3 +59,4 @@ Example:
- 三咲智子 Kevin Deng, @sxzz, 2023/04/21
- Moeyua, @moeyua, 2023/04/22
- Shishu, @shishudesu, 2023/05/19
- Kushagra Singh, @kush002, 2023/06/28

View File

@@ -18,22 +18,22 @@ body:
- type: dropdown
id: version
attributes:
label: Version
description: What version of our software are you running?
label: Distribution version
description: What version of AFFiNE are you using?
options:
- app.affine.pro
- stage.affine.pro
- dev.affine.live
- affine-preview.vercel.app
- macOS x64
- macOS ARM 64
- Windows x64
- Linux
- Web (app.affine.pro)
- Web (stage.affine.pro)
- Web (dev.affine.live)
validations:
required: true
- type: dropdown
id: browsers
attributes:
label: What browsers are you seeing the problem on?
label: What browsers are you seeing the problem on if you're using web version?
multiple: true
options:
- Chrome

View File

@@ -4,6 +4,9 @@ inputs:
target:
description: 'Cargo target'
required: true
nx_token:
description: 'Nx Cloud access token'
required: false
runs:
using: 'composite'
@@ -24,28 +27,32 @@ runs:
.cargo-cache
target/${{ inputs.target }}
key: stable-${{ inputs.target }}-cargo-cache
- name: Build
if: ${{ inputs.target != 'x86_64-unknown-linux-gnu' && inputs.target != 'aarch64-unknown-linux-gnu' }}
shell: bash
run: yarn workspace @affine/native build --target ${{ inputs.target }}
run: |
yarn nx build @affine/native --target ${{ inputs.target }}
env:
NX_CLOUD_ACCESS_TOKEN: ${{ inputs.nx_token }}
- name: Build
if: ${{ inputs.target == 'x86_64-unknown-linux-gnu' }}
uses: addnab/docker-run-action@v3
with:
image: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian
options: --user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
run: >-
export CC=x86_64-unknown-linux-gnu-gcc &&
export CC_x86_64_unknown_linux_gnu=x86_64-unknown-linux-gnu-gcc &&
yarn workspace @affine/native build --target ${{ inputs.target }}
options: --user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build -e NX_CLOUD_ACCESS_TOKEN=${{ inputs.nx_token }}
run: |
export CC=x86_64-unknown-linux-gnu-gcc
export CC_x86_64_unknown_linux_gnu=x86_64-unknown-linux-gnu-gcc
yarn nx build @affine/native --target ${{ inputs.target }}
chmod -R 777 node_modules/.cache
- name: Build
if: ${{ inputs.target == 'aarch64-unknown-linux-gnu' }}
uses: addnab/docker-run-action@v3
with:
image: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian-aarch64
options: --user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build
run: >-
yarn workspace @affine/native build --target ${{ inputs.target }}
options: --user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build -e NX_CLOUD_ACCESS_TOKEN=${{ inputs.nx_token }}
run: |
yarn nx build @affine/native --target ${{ inputs.target }}
chmod -R 777 node_modules/.cache

View File

@@ -82,7 +82,7 @@ runs:
id: playwright-version
if: ${{ inputs.playwright-install == 'true' }}
shell: bash
run: echo "version=$(yarn why --json @playwright/test | grep -h 'workspace:.' | jq --raw-output '.children[].locator' | sed -e 's/@playwright\/test@.*://')" >> $GITHUB_OUTPUT
run: echo "version=$(yarn why --json @playwright/test | grep -h 'workspace:.' | jq --raw-output '.children[].locator' | sed -e 's/@playwright\/test@.*://' | head -n 1)" >> $GITHUB_OUTPUT
# Attempt to restore the correct Playwright browser binaries based on the
# currently installed version of Playwright (The browser binary versions

31
.github/actions/setup-rust/action.yml vendored Normal file
View File

@@ -0,0 +1,31 @@
name: 'AFFiNE Rust setup'
description: 'Rust setup, including cache configuration'
inputs:
target:
description: 'Cargo target'
required: true
toolchain:
description: 'Rustup toolchain'
required: false
default: 'stable'
runs:
using: 'composite'
steps:
- name: Setup Rust
uses: dtolnay/rust-toolchain@stable
with:
toolchain: ${{ inputs.toolchain }}
targets: ${{ inputs.target }}
- name: Cache cargo
uses: actions/cache@v3
with:
path: |
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: cargo-cache-${{ runner.os }}-${{ inputs.toolchain }}-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
cargo-cache-${{ runner.os }}-${{ inputs.toolchain }}-

View File

@@ -1,40 +0,0 @@
:80 {
root /* ./dist
file_server {
# precompressed br
}
encode {
zstd
gzip 9
}
header {
# 7 days
Cache-Control "public, max-age=86400, must-revalidate"
}
handle /api/* {
reverse_proxy {$API_SERVER} {
health_uri /api/healthz
@error status 500 502 503 503
handle_response @error {
root * /dist
rewrite * /50x.html
file_server
}
}
}
@notStatic {
not path /_next/static/*
}
handle @notStatic {
header {
Cache-Control "no-cache, no-store, must-revalidate"
}
try_files {path} /index.html
}
}

View File

@@ -1,13 +0,0 @@
FROM node:16-alpine as relocate
WORKDIR /app
COPY ./apps/web/out ./dist
COPY ./.github/deployment/Caddyfile ./Caddyfile
FROM caddy:2.6.2-alpine
ARG API_SERVER
WORKDIR /app
COPY --from=relocate /app .
EXPOSE 80
ENV API_SERVER=$API_SERVER
CMD ["caddy", "run"]

11
.github/deployment/front/Dockerfile vendored Normal file
View File

@@ -0,0 +1,11 @@
FROM openresty/openresty:1.21.4.1-0-buster
WORKDIR /app
COPY ./apps/web/out ./dist
COPY ./.github/deployment/front/nginx.conf /usr/local/openresty/nginx/conf/nginx.conf
COPY ./.github/deployment/front/affine.nginx.conf /etc/nginx/conf.d/affine.nginx.conf
RUN mkdir -p /var/log/nginx && \
rm /etc/nginx/conf.d/default.conf
EXPOSE 8080
CMD ["/usr/local/openresty/bin/openresty", "-g", "daemon off;"]

View File

@@ -0,0 +1,13 @@
server {
listen 8080;
root /app/dist;
location / {
try_files $uri $uri/index.html $uri.html =404;;
}
error_page 404 /404.html;
location = /404.html {
internal;
}
}

14
.github/deployment/front/nginx.conf vendored Normal file
View File

@@ -0,0 +1,14 @@
worker_processes 4;
error_log /var/log/nginx/error.log warn;
pcre_jit on;
events {
worker_connections 1024;
}
http {
include mime.types;
log_format main '$remote_addr [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log /var/log/nginx/access.log main;
include /etc/nginx/conf.d/*.conf;
}

10
.github/deployment/node/Dockerfile vendored Normal file
View File

@@ -0,0 +1,10 @@
FROM node:18-bookworm-slim
COPY ./apps/server /app
WORKDIR /app
RUN apt-get update && \
apt-get install -y --no-install-recommends openssl && \
rm -rf /var/lib/apt/lists/*
CMD ["node", "--es-module-specifier-resolution=node", "./dist/index.js"]

1
.github/helm/affine-cloud/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
charts/

23
.github/helm/affine-cloud/.helmignore vendored Normal file
View File

@@ -0,0 +1,23 @@
# Patterns to ignore when building packages.
# This supports shell glob matching, relative path matching, and
# negation (prefixed with !). Only one pattern per line.
.DS_Store
# Common VCS dirs
.git/
.gitignore
.bzr/
.bzrignore
.hg/
.hgignore
.svn/
# Common backup files
*.swp
*.bak
*.tmp
*.orig
*~
# Various IDEs
.project
.idea/
*.tmproj
.vscode/

6
.github/helm/affine-cloud/Chart.lock vendored Normal file
View File

@@ -0,0 +1,6 @@
dependencies:
- name: postgresql
repository: https://charts.bitnami.com/bitnami
version: 12.5.8
digest: sha256:c91c0dc1370e879538dc9d6e435e731a726ef99d6a3b081372318483792b48a7
generated: "2023-06-27T18:34:12.683806+08:00"

12
.github/helm/affine-cloud/Chart.yaml vendored Normal file
View File

@@ -0,0 +1,12 @@
apiVersion: v2
name: affine-cloud
description: A Helm chart for AFFiNE Cloud
type: application
version: 0.6.1
appVersion: '0.6.1'
dependencies:
- name: postgresql
version: 12.5.8
repository: https://charts.bitnami.com/bitnami

30
.github/helm/affine-cloud/readme.md vendored Normal file
View File

@@ -0,0 +1,30 @@
# Helm Chart Configuration
The following table lists the configurable parameters of this Helm chart and their default values.
## AFFiNE Cloud Server parameters
| Parameter | Description | Default |
| ------------------------------ | -------------------------------------------------- | ------------------ |
| `affineCloud.tag` | The Docker tag of the AffineCloud image to be used | `'nightly-latest'` |
| `affineCloud.resources.cpu` | The CPU resources allocated for AffineCloud | `'250m'` |
| `affineCloud.resources.memory` | The memory resources allocated for AffineCloud | `'0.5Gi'` |
| `affineCloud.signKey` | The key used to sign the JWT tokens | `'c2VjcmV0'` |
| `affineCloud.service.type` | The type of the Kubernetes service | `'ClusterIP'` |
| `affineCloud.service.port` | The port of the Kubernetes service | `'http'` |
| `affineCloud.mail.account` | The email account used to send emails | `''` |
| `affineCloud.mail.password` | The password of the email account | `''` |
## PostgreSQL parameters
| Parameter | Description | Default |
| -------------------------------------------- | ------------------------------------------------------------------------------------- | ------------ |
| `postgresql.auth.username` | Username for the PostgreSQL database | `'affine'` |
| `postgresql.auth.password` | Password for the PostgreSQL database. Please change this for production environments. | `'password'` |
| `postgresql.auth.database` | The name of the default database that will be created on image startup | `'affine'` |
| `postgresql.primary.resources.limits.cpu` | The CPU resources allocated for the PostgreSQL primary node | `'500m'` |
| `postgresql.primary.resources.limits.memory` | The memory resources allocated for the PostgreSQL primary node | `'0.5Gi'` |
For more postgres parameters, please refer to: https://artifacthub.io/packages/helm/bitnami/postgresql
Please note that for the `postgresql.auth.password`, you should provide your own password for production environments. The default value is provided only for demonstration purposes.

View File

@@ -0,0 +1,51 @@
{{/*
Expand the name of the chart.
*/}}
{{- define "affine-cloud.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
If release name contains chart name it will be used as a full name.
*/}}
{{- define "affine-cloud.fullname" -}}
{{- if .Values.fullnameOverride }}
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- $name := default .Chart.Name .Values.nameOverride }}
{{- if contains $name .Release.Name }}
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
{{- end }}
{{- end }}
{{- end }}
{{/*
Create chart name and version as used by the chart label.
*/}}
{{- define "affine-cloud.chart" -}}
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Common labels
*/}}
{{- define "affine-cloud.labels" -}}
helm.sh/chart: {{ include "affine-cloud.chart" . }}
{{ include "affine-cloud.selectorLabels" . }}
{{- if .Chart.AppVersion }}
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
{{- end }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
{{- end }}
{{/*
Selector labels
*/}}
{{- define "affine-cloud.selectorLabels" -}}
app.kubernetes.io/name: {{ include "affine-cloud.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}

View File

@@ -0,0 +1,51 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: "{{ include "affine-cloud.fullname" . }}"
labels:
{{- include "affine-cloud.labels" . | nindent 4 }}
spec:
replicas: 1
selector:
matchLabels:
{{- include "affine-cloud.selectorLabels" . | nindent 6 }}
strategy:
type: RollingUpdate
rollingUpdate:
maxUnavailable: 2
template:
metadata:
labels:
{{- include "affine-cloud.selectorLabels" . | nindent 8 }}
spec:
restartPolicy: Always
containers:
- name: affine-cloud
image: "ghcr.io/toeverything/cloud-self-hosted:{{ .Values.affineCloud.tag | default .Chart.AppVersion }}"
env:
- name: PG_USER
value: "{{ .Values.postgresql.auth.username }}"
- name: PG_PASS
value: "{{ .Values.postgresql.auth.password }}"
- name: PG_DATABASE
value: "{{ .Values.postgresql.auth.database }}"
- name: PG_HOST
value: "{{ .Values.postgresql.fullnameOverride | default (printf "%s-postgresql" .Release.Name) }}"
- name: DATABASE_URL
value: "{{ .Values.affineCloud.databaseUrl | default "postgresql://$(PG_USER):$(PG_PASS)@$(PG_HOST)/$(PG_DATABASE)" }}"
envFrom:
- secretRef:
name: affine-cloud-secret
ports:
- containerPort: 3000
livenessProbe:
httpGet:
path: /api/healthz
port: 3000
failureThreshold: 1
initialDelaySeconds: 10
periodSeconds: 10
resources:
limits:
cpu: "{{ .Values.affineCloud.resources.cpu }}"
memory: "{{ .Values.affineCloud.resources.memory }}"

View File

@@ -0,0 +1,9 @@
apiVersion: v1
kind: Secret
metadata:
name: affine-cloud-secret
type: Opaque
data:
SIGN_KEY: "{{ .Values.affineCloud.signKey }}"
MAIL_ACCOUNT: "{{ .Values.affineCloud.mail.account }}"
MAIL_PASSWORD: "{{ .Values.affineCloud.mail.password }}"

View File

@@ -0,0 +1,15 @@
apiVersion: v1
kind: Service
metadata:
name: "{{ include "affine-cloud.fullname" . }}"
labels:
{{- include "affine-cloud.labels" . | nindent 4 }}
spec:
type: "{{ .Values.affineCloud.service.type }}"
ports:
- name: http
protocol: TCP
port: {{ .Values.affineCloud.service.port }}
targetPort: 3000
selector:
{{- include "affine-cloud.selectorLabels" . | nindent 4 }}

30
.github/helm/affine-cloud/values.yaml vendored Normal file
View File

@@ -0,0 +1,30 @@
affineCloud:
tag: 'canary-5e0d5e0cc65ea46f326fdde12658bfac59b38c9f-0949'
# databaseUrl: 'postgresql://affine:password@affine-cloud-postgresql:5432/affine'
signKey: TUFtdFdzQTJhdGJuem01TA==
mail:
account: ''
password: ''
service:
type: ClusterIP
port: 80
resources:
cpu: '250m'
memory: 0.5Gi
postgresql:
fullnameOverride: tcp-postgresql
auth:
# only for demo, please modify it at prod env
username: affine
password: password
database: affine
primary:
initdb:
scripts:
01-init.sql: |
CREATE DATABASE affine_binary;
GRANT ALL PRIVILEGES ON DATABASE affine_binary TO affine;
resources:
limits:
cpu: '500m'
memory: 0.5Gi

60
.github/helm/deployment_guide.md vendored Normal file
View File

@@ -0,0 +1,60 @@
# Cluster Deployment Guide
This document provides a step-by-step guide for developers on how to deploy services in a Kubernetes cluster. The following content assumes that the reader already has a basic understanding of Kubernetes concepts and operations.
### 1. Configure Service Mesh (Optional)
In the Kubernetes cluster, we optionally use Service Mesh (like Istio and Anthos Service Mesh) to manage the network interactions of microservices. If Service Mesh is already deployed on your cluster or do not need to use the service network, you can skip this step. In this step, we assume that you are using Google Kubernetes Engine (GKE) and have already installed Anthos Service Mesh on your cluster, if you wish to use another Ingress Controller, please refer to the relevant documentation.
To configure your kubectl context to interact with your Kubernetes cluster using the gcloud tool, you need to execute the following commands:
```sh
export CLUSTER_NAME=your_cluster_name
export REGION=your_cluster_region
export PROJECT=your_project_id
gcloud container clusters get-credentials $CLUSTER_NAME --region $REGION --project $PROJECT
```
In this command, you should replace `CLUSTER_NAME`, `REGION` and `PROJECT` with the actual name, region and project id of your Kubernetes cluster. This command retrieves the access credentials for your Kubernetes cluster and automatically configures kubectl to use these credentials.
Now, to inject Service Mesh for a specific Namespace, first, set the environment variable `NAMESPACE` that should correspond to your target Kubernetes Namespace. In this example, we use `prod` as the target Namespace:
```sh
export NAMESPACE=prod
```
Then, we label the Namespace which will enable Istio to automatically inject the sidecar container for all new Pods under this Namespace:
```sh
kubectl label namespace $NAMESPACE istio-injection- istio.io/rev=asm-managed --overwrite
```
Finally, we trigger the Kubernetes Deployment restart mechanism to allow existing Pods to also obtain sidecar container injection:
```sh
kubectl rollout restart deployment -n $NAMESPACE
```
### 2. Deploying the Application
Next, we will deploy our application in the Kubernetes cluster through Helm. First, set relevant environment variables:
```sh
export NAMESPACE=prod
export RELEASE=affine-cloud-prod
export PATH=.github/helm/affine-cloud
```
- `NAMESPACE` should be consistent with the first step, indicating your target Kubernetes Namespace.
- `RELEASE` is the name of your Helm release.
- `PATH` is the location of your Helm chart in your file system.
Finally, use the `helm upgrade --install` command to deploy or upgrade your application:
```sh
helm upgrade --namespace $NAMESPACE --create-namespace --install $RELEASE $PATH
```
This command creates (if it doesn't already exist) and deploys your Helm chart in the specified Namespace. If the release already exists, it will be upgraded.
The above are the complete steps for deploying an application in a Kubernetes cluster. Make sure all prerequisites are met before deploying, and also ensure that you have the correct permissions for operations in Kubernetes.

2
.github/helm/releaser.yaml vendored Normal file
View File

@@ -0,0 +1,2 @@
owner: toeverything
git-repo: helm-charts

2
.github/labeler.yml vendored
View File

@@ -48,3 +48,5 @@ app:web: 'apps/web/**/*'
app:electron: 'apps/electron/**/*'
app:server: 'apps/server/**/*'
app:docs: 'apps/docs/**/*'

View File

@@ -10,7 +10,10 @@ on:
- README.md
- .github/**
- '!.github/workflows/build.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
pull_request:
merge_group:
branches:
- master
- v[0-9]+.[0-9]+.x-staging
@@ -19,12 +22,16 @@ on:
- README.md
- .github/**
- '!.github/workflows/build.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
env:
DEBUG: napi:*
BUILD_TYPE: canary
APP_NAME: affine
COVERAGE: true
MACOSX_DEPLOYMENT_TARGET: '10.13'
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
lint:
@@ -36,12 +43,29 @@ jobs:
- uses: actions/checkout@v3
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Run checks
run: |
yarn i18n-codegen gen
yarn typecheck
yarn lint --max-warnings=0
yarn circular
- name: Run i18n codegen
run: yarn i18n-codegen gen
- name: Run Type Check
run: yarn typecheck
- name: Run ESLint
run: yarn lint --max-warnings=0 --cache
- name: Run Prettier
run: yarn prettier . --ignore-unknown --cache --check
- name: Run circular
run: yarn circular
build-docs:
name: Build Docs
runs-on: ubuntu-latest
environment: development
steps:
- uses: actions/checkout@v3
- name: Setup Node.js
uses: ./.github/actions/setup-node
- run: yarn nx build @affine/docs
env:
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
build-storybook:
name: Build Storybook
@@ -53,11 +77,13 @@ jobs:
- name: Setup Node.js
uses: ./.github/actions/setup-node
- run: yarn nx build @affine/storybook
env:
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload storybook artifact
uses: actions/upload-artifact@v3
with:
name: storybook
path: ./packages/storybook/storybook-static
path: ./apps/storybook/storybook-static
if-no-files-found: error
build-web:
@@ -69,24 +95,8 @@ jobs:
- uses: actions/checkout@v3
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Cache Next.js
uses: actions/cache@v3
with:
path: |
${{ github.workspace }}/apps/web/.next/cache
key: ${{ runner.os }}-nextjs-${{ hashFiles('**/yarn.lock') }}-${{ hashFiles('**.[jt]s', '**.[jt]sx') }}
restore-keys: |
${{ runner.os }}-nextjs-${{ hashFiles('**/yarn.lock') }}-
- name: Build Web
run: yarn nx build @affine/web
env:
API_SERVER_PROFILE: local
ENABLE_DEBUG_PAGE: 1
ENABLE_PLUGIN: true
ENABLE_ALL_PAGE_FILTER: true
ENABLE_LEGACY_PROVIDER: true
ENABLE_PRELOADING: false
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
@@ -94,18 +104,18 @@ jobs:
path: ./apps/web/.next
if-no-files-found: error
- name: Build Web (Desktop)
run: yarn nx build @affine/web
env:
API_SERVER_PROFILE: affine
ENABLE_DEBUG_PAGE: 1
ENABLE_PLUGIN: true
ENABLE_ALL_PAGE_FILTER: true
ENABLE_LEGACY_PROVIDER: false
ENABLE_PRELOADING: false
build-web-desktop:
name: Build @affine/web (Desktop)
runs-on: ubuntu-latest
environment: development
steps:
- uses: actions/checkout@v3
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Export static resources
run: yarn workspace @affine/web export
run: yarn nx export @affine/web
- name: Upload static resources artifact
uses: actions/upload-artifact@v3
@@ -153,9 +163,14 @@ jobs:
working-directory: apps/server
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Setup Rust
uses: ./.github/actions/setup-rust
with:
target: 'x86_64-unknown-linux-gnu'
- name: Run server tests
run: yarn nx test:coverage @affine/server
env:
CARGO_TARGET_DIR: '${{ github.workspace }}/target'
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Upload server test coverage results
uses: codecov/codecov-action@v3
@@ -164,7 +179,7 @@ jobs:
files: ./apps/server/.coverage/lcov.info
flags: server-test
name: affine
fail_ci_if_error: true
fail_ci_if_error: false
storybook-test:
name: Storybook Test
@@ -181,9 +196,9 @@ jobs:
uses: actions/download-artifact@v3
with:
name: storybook
path: ./packages/storybook/storybook-static
path: ./apps/storybook/storybook-static
- name: Run storybook tests
working-directory: ./packages/storybook
working-directory: ./apps/storybook
run: |
yarn exec concurrently -k -s first -n "SB,TEST" -c "magenta,blue" "yarn exec serve ./storybook-static -l 6006" "yarn exec wait-on tcp:6006 && yarn test"
@@ -222,7 +237,7 @@ jobs:
uses: actions/download-artifact@v3
with:
name: storybook
path: ./packages/storybook/storybook-static
path: ./apps/storybook/storybook-static
- name: Wait for Octobase Ready
run: |
@@ -243,7 +258,7 @@ jobs:
files: ./.coverage/lcov.info
flags: e2etest
name: affine
fail_ci_if_error: true
fail_ci_if_error: false
- name: Upload test results
if: ${{ failure() }}
@@ -253,6 +268,45 @@ jobs:
path: ./test-results
if-no-files-found: ignore
e2e-migration-test:
name: E2E Migration Test
runs-on: ubuntu-latest
environment: development
needs: [build-web]
steps:
- uses: actions/checkout@v3
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
- name: Download next static
uses: actions/download-artifact@v3
with:
name: next-js-static
path: ./apps/web/out
- name: Unzip
run: yarn unzip
working-directory: ./tests/affine-legacy/0.7.0-canary.18
- name: Run legacy playwright tests
run: yarn e2e --forbid-only
working-directory: ./tests/affine-legacy/0.7.0-canary.18
- name: Run vitest
run: yarn test
working-directory: ./tests/affine-legacy/0.7.0-canary.18
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-migration
path: ./tests/affine-legacy/0.7.0-canary.18/test-results
if-no-files-found: ignore
desktop-test:
name: Desktop Test
runs-on: ${{ matrix.spec.os }}
@@ -290,7 +344,7 @@ jobs:
target: x86_64-pc-windows-msvc,
test: true,
}
needs: [build-web]
needs: [build-web-desktop]
steps:
- uses: actions/checkout@v3
- name: Setup Node.js
@@ -301,6 +355,7 @@ jobs:
uses: ./.github/actions/build-rust
with:
target: ${{ matrix.spec.target }}
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Run unit tests
if: ${{ matrix.spec.test }}
shell: bash
@@ -339,7 +394,7 @@ jobs:
files: ./.coverage/lcov.info
flags: e2etest-${{ matrix.spec.os }}-${{ matrix.spec.arch }}
name: affine
fail_ci_if_error: true
fail_ci_if_error: false
- name: Upload test results
if: ${{ failure() }}
@@ -378,4 +433,61 @@ jobs:
files: ./.coverage/store/lcov.info
flags: unittest
name: affine
fail_ci_if_error: true
fail_ci_if_error: false
build-docker:
if: github.ref == 'refs/heads/master'
name: Build Docker
needs:
- build-web-desktop
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Download next static
uses: actions/download-artifact@v3
with:
name: next-js-static
path: ./apps/web/out
- name: Setup Git short hash
run: |
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
logout: false
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Build front Dockerfile
uses: docker/build-push-action@v4
with:
context: .
push: true
pull: true
platforms: linux/amd64,linux/arm64
provenance: true
file: .github/deployment/front/Dockerfile
tags: ghcr.io/toeverything/affine-front:${{ env.GIT_SHORT_HASH }},ghcr.io/toeverything/affine-front:latest
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
package-install: false
- name: Install Node.js dependencies
run: yarn workspaces focus @affine/server --production
- name: Build graphql Dockerfile
uses: docker/build-push-action@v4
with:
context: .
push: true
pull: true
platforms: linux/amd64,linux/arm64
provenance: true
file: .github/deployment/node/Dockerfile
tags: ghcr.io/toeverything/affine-graphql:${{ env.GIT_SHORT_HASH }},ghcr.io/toeverything/affine-graphql:latest

View File

@@ -15,6 +15,7 @@ on:
push:
branches: [master]
pull_request:
merge_group:
# The branches below must be a subset of the branches above
branches: [master]

65
.github/workflows/helm-releaser.yml vendored Normal file
View File

@@ -0,0 +1,65 @@
name: Release Charts
on:
push:
branches: [master]
pull_request:
branches: [master]
jobs:
release:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Checkout Helm chart repo
uses: actions/checkout@v3
with:
repository: toeverything/helm-charts
path: .helm-chart-repo
ref: gh-pages
token: ${{ secrets.HELM_RELEASER_TOKEN }}
- name: Install Helm
uses: azure/setup-helm@v3
- name: Install chart releaser
run: |
set -e
arch="$(dpkg --print-architecture)"
curl -s https://api.github.com/repos/helm/chart-releaser/releases/latest \
| yq --indent 0 --no-colors --input-format json --unwrapScalar \
".assets[] | select(.name | test("\""^chart-releaser_.+_linux_${arch}\.tar\.gz$"\"")) | .browser_download_url" \
| xargs curl -SsL \
| tar zxf - -C /usr/local/bin
- name: Package charts
working-directory: .helm-chart-repo
run: |
mkdir -p .cr-index
helm repo add bitnami https://charts.bitnami.com/bitnami
helm repo update
helm dependencies build ../.github/helm/affine-cloud
cr package ../.github/helm/affine-cloud
- name: Package charts
if: github.ref == 'refs/heads/master'
working-directory: .helm-chart-repo
run: |
set -ex
git config --local user.name "$GITHUB_ACTOR"
git config --local user.email "$GITHUB_ACTOR@users.noreply.github.com"
owner=$(cut -d '/' -f 1 <<< '${{ github.repository }}')
repo=helm-charts
git_hash=$(git rev-parse HEAD)
cr upload --commit "$git_hash" \
--git-repo "$repo" --owner "$owner" \
--token '${{ secrets.HELM_RELEASER_TOKEN }}' \
--skip-existing
cr index --git-repo "$repo" --owner "$owner" \
--token '${{ secrets.HELM_RELEASER_TOKEN }}' \
--index-path .cr-index --push

View File

@@ -6,11 +6,13 @@ on:
paths:
- 'packages/i18n/**'
- '.github/workflows/languages-sync.yml'
- '!.github/actions/setup-node/action.yml'
pull_request_target:
branches: ['master']
paths:
- 'packages/i18n/**'
- '.github/workflows/languages-sync.yml'
- '!.github/actions/setup-node/action.yml'
workflow_dispatch:
jobs:

View File

@@ -3,11 +3,15 @@ name: Build Canary Desktop App on Staging Branch
on:
push:
branches:
# 0.6.x-staging
- v[0-9]+.[0-9]+.x-staging
# 0.6.1-staging
- v[0-9]+.[0-9]+.[0-9]+-staging
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/nightly-build.yml'
- '!.github/actions/setup-node/action.yml'
permissions:
actions: write
@@ -56,8 +60,8 @@ jobs:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
API_SERVER_PROFILE: prod
ENABLE_TEST_PROPERTIES: false
ENABLE_IMAGE_PREVIEW_MODAL: true
ENABLE_BOOKMARK_OPERATION: true
ENABLE_SQLITE_PROVIDER: false
RELEASE_VERSION: ${{ needs.set-build-version.outputs.version }}
- name: Upload Artifact (web-static)
@@ -113,6 +117,7 @@ jobs:
uses: ./.github/actions/build-rust
with:
target: ${{ matrix.spec.target }}
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Replace Version
run: ./scripts/set-version.sh ${{ needs.set-build-version.outputs.version }}
- uses: actions/download-artifact@v3

View File

@@ -1,6 +1,9 @@
name: Release Desktop App
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+-canary.[0-9]+'
workflow_dispatch:
inputs:
version:
@@ -28,14 +31,8 @@ permissions:
contents: write
security-events: write
concurrency:
# The concurrency group contains the workflow name and the branch name for
# pull requests or the commit hash for any other events.
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }}
cancel-in-progress: true
env:
BUILD_TYPE: ${{ github.event.inputs.build-type }}
BUILD_TYPE: ${{ github.event.inputs.build-type || (github.ref_type == 'tag' && contains(github.ref, 'canary') && 'canary') }}
DEBUG: napi:*
APP_NAME: affine
MACOSX_DEPLOYMENT_TARGET: '10.13'
@@ -43,22 +40,31 @@ env:
jobs:
before-make:
runs-on: ubuntu-latest
environment: ${{ github.ref_name == 'master' && 'production' || 'development' }}
environment: production
outputs:
RELEASE_VERSION: ${{ steps.get-canary-version.outputs.RELEASE_VERSION }}
steps:
- uses: actions/checkout@v3
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Get canary version
id: get-canary-version
if: ${{ github.ref_type == 'tag' }}
run: |
TAG_VERSION=${GITHUB_REF#refs/tags/v}
PACKAGE_VERSION=$(node -p "require('./apps/electron/package.json').version")
if [ "$TAG_VERSION" != "$PACKAGE_VERSION" ]; then
echo "Tag version ($TAG_VERSION) does not match package.json version ($PACKAGE_VERSION)"
exit 1
fi
echo "RELEASE_VERSION=$(node -p "require('./apps/electron/package.json').version")" >> $GITHUB_OUTPUT
- name: generate-assets
run: yarn workspace @affine/electron generate-assets
env:
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
API_SERVER_PROFILE: prod
ENABLE_TEST_PROPERTIES: false
ENABLE_IMAGE_PREVIEW_MODAL: true
ENABLE_BOOKMARK_OPERATION: true
RELEASE_VERSION: ${{ github.event.inputs.version }}
RELEASE_VERSION: ${{ github.event.inputs.version || steps.get-canary-version.outputs.RELEASE_VERSION }}
- name: Upload Artifact (web-static)
uses: actions/upload-artifact@v3
@@ -67,7 +73,7 @@ jobs:
path: apps/electron/resources/web-static
make-distribution:
environment: ${{ github.ref_name == 'master' && 'production' || 'development' }}
environment: production
strategy:
# all combinations: macos-latest x64, macos-latest arm64, windows-latest x64, ubuntu-latest x64
matrix:
@@ -111,6 +117,7 @@ jobs:
uses: ./.github/actions/build-rust
with:
target: ${{ matrix.spec.target }}
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- uses: actions/download-artifact@v3
with:
name: before-make-web-static
@@ -157,7 +164,7 @@ jobs:
path: builds
release:
needs: make-distribution
needs: [before-make, make-distribution]
runs-on: ubuntu-latest
steps:
@@ -190,16 +197,16 @@ jobs:
cp ./apps/electron/scripts/generate-yml.js .
node generate-yml.js
env:
RELEASE_VERSION: ${{ github.event.inputs.version }}
RELEASE_VERSION: ${{ github.event.inputs.version || needs.before-make.outputs.RELEASE_VERSION }}
- name: Create Release Draft
uses: softprops/action-gh-release@v1
env:
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
with:
name: Desktop APP ${{ github.event.inputs.version }}
body: 'TODO: Add release notes here'
draft: ${{ github.event.inputs.is-draft }}
prerelease: ${{ github.event.inputs.is-pre-release }}
name: ${{ github.event.inputs.version || needs.before-make.outputs.RELEASE_VERSION }}
body: ''
draft: ${{ github.event.inputs.is-draft || true }}
prerelease: ${{ github.event.inputs.is-pre-release || needs.before-make.outputs.version }}
files: |
./VERSION
./*.zip

1
.gitignore vendored
View File

@@ -55,6 +55,7 @@ Thumbs.db
.history
.next
.vercel
out/
storybook-static
i18n-generated.ts

View File

@@ -2,7 +2,10 @@
. "$(dirname -- "$0")/_/husky.sh"
# check lockfile is up to date
yarn install
yarn install --mode=update-lockfile
# lint staged files
yarn exec lint-staged
# type check
yarn typecheck

View File

@@ -1,4 +1,12 @@
pnpm-lock.yaml
yarn.lock
target
lib
test-results
packages/i18n/src/i18n-generated.ts
packages/graphql/src/graphql/index.ts
.next
out
dist
.yarn
tests/affine-legacy/0.7.0-canary.18/static
.github/helm

25
apps/README.md Normal file
View File

@@ -0,0 +1,25 @@
# Apps structure
> This is the structure of the `apps` directory.
## docs
AFFiNE Developer Documentation using [waku](https://github.com/dai-shi/waku).
## electron
> `web` needs to be built before electron.
AFFiNE Desktop (macOS, Linux and Windows Distribution) using [Electron](https://www.electronjs.org/).
## server
Server using [Nest.js](https://nestjs.com/).
## storybook
Storybook using [Storybook](https://storybook.js.org/).
## web
AFFiNE Core Application using [React.js](https://reactjs.org/).

17
apps/docs/entries.ts Normal file
View File

@@ -0,0 +1,17 @@
import { defineRouter } from 'waku/router/server';
export default defineRouter(
async id => {
switch (id) {
case 'index': {
const { default: AppCreator } = await import('./src/app.js');
return AppCreator(id);
}
default:
return null;
}
},
async () => {
return ['index'];
}
);

36
apps/docs/index.html Normal file
View File

@@ -0,0 +1,36 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<title>AFFiNE Developer Documentation</title>
<style>
@keyframes spinner {
to {
transform: rotate(360deg);
}
}
.spinner {
width: 36px;
height: 36px;
margin: auto;
border: 2px solid #ddd;
border-top-color: #222;
border-radius: 50%;
animation: spinner 1s linear infinite;
}
#root > .spinner {
margin-top: calc(50% - 18px);
}
</style>
</head>
<body>
<!--placeholder1-->
<div id="root">
<div class="spinner"></div>
</div>
<!--/placeholder1-->
<script src="./src/index.tsx" defer type="module"></script>
<!--placeholder2-->
<!--/placeholder2-->
</body>
</html>

35
apps/docs/package.json Normal file
View File

@@ -0,0 +1,35 @@
{
"name": "@affine/docs",
"version": "0.7.0-canary.24",
"type": "module",
"private": true,
"scripts": {
"dev": "waku dev",
"build": "waku build",
"build:vercel": "waku build && cp -Lr ./dist/.vercel/output ./.vercel/"
},
"dependencies": {
"@affine/component": "workspace:*",
"@blocksuite/block-std": "0.0.0-20230629084521-542de4e8-nightly",
"@blocksuite/blocks": "0.0.0-20230629084521-542de4e8-nightly",
"@blocksuite/editor": "0.0.0-20230629084521-542de4e8-nightly",
"@blocksuite/global": "0.0.0-20230629084521-542de4e8-nightly",
"@blocksuite/lit": "0.0.0-20230629084521-542de4e8-nightly",
"@blocksuite/store": "0.0.0-20230629084521-542de4e8-nightly",
"express": "^4.18.2",
"jotai": "^2.2.1",
"react": "18.3.0-canary-8ec962d82-20230623",
"react-dom": "18.3.0-canary-8ec962d82-20230623",
"react-server-dom-webpack": "18.3.0-canary-8ec962d82-20230623",
"waku": "0.12.1"
},
"devDependencies": {
"@types/react": "^18.2.14",
"@types/react-dom": "^18.2.6",
"@vanilla-extract/css": "^1.11.1",
"@vanilla-extract/vite-plugin": "^3.8.2",
"autoprefixer": "^10.4.14",
"tailwindcss": "^3.3.2",
"typescript": "^5.1.5"
}
}

View File

@@ -0,0 +1,6 @@
export default {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
};

44
apps/docs/src/app.tsx Normal file
View File

@@ -0,0 +1,44 @@
/// <reference types="vite/client" />
'use server';
import { existsSync, readFileSync } from 'node:fs';
import { resolve } from 'node:path';
import { fileURLToPath } from 'node:url';
import type { ReactElement } from 'react';
import { lazy } from 'react';
import { Sidebar } from './components/sidebar/index.js';
import { saveFile } from './server-fns.js';
const Editor = lazy(() =>
import('./components/editor.js').then(({ Editor }) => ({ default: Editor }))
);
const __dirname = fileURLToPath(new URL('.', import.meta.url));
const AppCreator = (pathname: string) =>
function App(): ReactElement {
let path = resolve(__dirname, 'pages', 'binary');
if (!existsSync(path)) {
path = resolve(__dirname, '..', '..', 'src', 'pages', 'binary');
}
const buffer = [...readFileSync(path)];
return (
<div className="flex flex-col-reverse sm:flex-row h-screen">
<nav className="w-full sm:w-64">
<Sidebar />
</nav>
<main className="flex-1 p-6 w-full sm:w-[calc(100%-16rem)] overflow-scroll">
<Editor
workspaceId={pathname}
pageId="1"
onSave={saveFile}
binary={buffer}
/>
</main>
</div>
);
};
export default AppCreator;

11
apps/docs/src/atom.ts Normal file
View File

@@ -0,0 +1,11 @@
import { __unstableSchemas, AffineSchemas } from '@blocksuite/blocks/models';
import { atom } from 'jotai/vanilla';
export const workspaceAtom = atom(async () => {
const { Workspace } = await import('@blocksuite/store');
return new Workspace({
id: 'test-workspace',
})
.register(AffineSchemas)
.register(__unstableSchemas);
});

View File

@@ -0,0 +1,53 @@
'use client';
import '@blocksuite/editor/themes/affine.css';
import { BlockSuiteEditor } from '@affine/component/block-suite-editor';
import type { Page } from '@blocksuite/store';
import { useAtomValue } from 'jotai/react';
import type { ReactElement } from 'react';
import { use } from 'react';
import { applyUpdate } from 'yjs';
import { workspaceAtom } from '../atom.js';
export type EditorProps = {
workspaceId: string;
pageId: string;
binary?: number[];
onSave: (binary: any) => Promise<void>;
};
export const Editor = (props: EditorProps): ReactElement => {
const workspace = useAtomValue(workspaceAtom);
let page = workspace.getPage('page0') as Page;
if (!page) {
page = workspace.createPage({
id: 'page0',
});
}
if (props.binary && !page.root) {
use(
page.waitForLoaded().then(() => {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
applyUpdate(page._ySpaceDoc, new Uint8Array(props.binary as number[]));
})
);
if (import.meta.env.MODE !== 'development') {
page.awarenessStore.setReadonly(page, true);
}
} else if (!page.root) {
use(
page.waitForLoaded().then(() => {
const pageBlockId = page.addBlock('affine:page', {
title: new page.Text(''),
});
page.addBlock('affine:surface', {}, pageBlockId);
const noteBlockId = page.addBlock('affine:note', {}, pageBlockId);
page.addBlock('affine:paragraph', {}, noteBlockId);
})
);
}
return <BlockSuiteEditor page={page} mode="page" onInit={() => {}} />;
};

View File

@@ -0,0 +1,31 @@
'use server';
import { lazy } from 'react';
import { saveFile } from '../../server-fns.js';
const SaveToLocal = lazy(() =>
import('./save-to-local.js').then(({ SaveToLocal }) => ({
default: SaveToLocal,
}))
);
export const Sidebar = () => {
return (
<div
className="h-screen text-black overflow-y-auto"
style={{
backgroundColor: '#f9f7f7',
}}
>
<a href="/">
<div className="flex items-center justify-center h-16 font-bold">
AFFiNE
</div>
</a>
{import.meta.env.MODE === 'development' && (
<SaveToLocal saveFile={saveFile} />
)}
</div>
);
};

View File

@@ -0,0 +1,28 @@
'use client';
import { assertExists } from '@blocksuite/global/utils';
import { useAtomValue } from 'jotai/react';
import { useCallback } from 'react';
import { encodeStateAsUpdate } from 'yjs';
import { workspaceAtom } from '../../atom.js';
type SaveToLocalProps = {
saveFile: (update: number[]) => void;
};
export const SaveToLocal = (props: SaveToLocalProps) => {
const workspace = useAtomValue(workspaceAtom);
const saveFile = props.saveFile;
const onSave = useCallback(() => {
const page = workspace.getPage('page0');
assertExists(page);
saveFile([...encodeStateAsUpdate(page.spaceDoc)]);
}, [saveFile, workspace]);
return (
<div>
<div className="flex items-center justify-center h-16 font-bold">
<button onClick={onSave}>Save to Local</button>
</div>
</div>
);
};

3
apps/docs/src/index.css Normal file
View File

@@ -0,0 +1,3 @@
@tailwind base;
@tailwind components;
@tailwind utilities;

14
apps/docs/src/index.tsx Normal file
View File

@@ -0,0 +1,14 @@
import '@blocksuite/editor/themes/affine.css';
import './index.css';
import { StrictMode } from 'react';
import { createRoot } from 'react-dom/client';
import { Router } from 'waku/router/client';
const root = createRoot(document.getElementById('root') as HTMLElement);
root.render(
<StrictMode>
<Router />
</StrictMode>
);

BIN
apps/docs/src/pages/binary Normal file

Binary file not shown.

View File

@@ -0,0 +1,10 @@
'use server';
import { writeFile } from 'node:fs/promises';
import { fileURLToPath } from 'node:url';
const __dirname = fileURLToPath(new URL('.', import.meta.url));
export async function saveFile(binary: any) {
const data = new Uint8Array(binary);
await writeFile(__dirname + 'pages' + '/binary', data);
}

View File

@@ -0,0 +1,8 @@
/** @type {import('tailwindcss').Config} */
export default {
content: ['./index.html', './src/**/*.{ts,tsx}'],
theme: {
extend: {},
},
plugins: [],
};

24
apps/docs/tsconfig.json Normal file
View File

@@ -0,0 +1,24 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"moduleResolution": "Node16",
"strict": true,
"target": "esnext",
"downlevelIteration": true,
"esModuleInterop": true,
"module": "NodeNext",
"skipLibCheck": true,
"noUncheckedIndexedAccess": true,
"exactOptionalPropertyTypes": true,
"jsx": "react-jsx"
},
"include": ["src", "entries.ts"],
"references": [
{
"path": "./tsconfig.node.json"
},
{
"path": "../../packages/component"
}
]
}

View File

@@ -0,0 +1,14 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"composite": true,
"target": "ESNext",
"module": "ESNext",
"resolveJsonModule": true,
"moduleResolution": "Node16",
"allowSyntheticDefaultImports": true,
"outDir": "dist/scripts",
"rootDir": "."
},
"include": ["vite.config.ts", "vite.prod.config.ts"]
}

13
apps/docs/vite.config.ts Normal file
View File

@@ -0,0 +1,13 @@
import path from 'node:path';
import url from 'node:url';
import { vanillaExtractPlugin } from '@vanilla-extract/vite-plugin';
import { defineConfig } from 'waku/config';
export default defineConfig({
root: path.dirname(url.fileURLToPath(import.meta.url)),
plugins: [vanillaExtractPlugin()],
build: {
target: 'esnext',
},
});

View File

@@ -52,6 +52,8 @@ module.exports = {
teamId: process.env.APPLE_TEAM_ID,
}
: undefined,
// We need the following line for updater
extraResource: ['./resources/app-update.yml'],
},
makers: [
{

View File

@@ -1,7 +1,7 @@
{
"name": "@affine/electron",
"private": true,
"version": "0.7.0-canary.16",
"version": "0.7.0-canary.24",
"author": "affine",
"repository": {
"url": "https://github.com/toeverything/AFFiNE",
@@ -29,34 +29,35 @@
"devDependencies": {
"@affine-test/kit": "workspace:*",
"@affine/native": "workspace:*",
"@blocksuite/blocks": "0.0.0-20230607055421-9b20fcaf-nightly",
"@blocksuite/editor": "0.0.0-20230607055421-9b20fcaf-nightly",
"@blocksuite/lit": "0.0.0-20230607055421-9b20fcaf-nightly",
"@blocksuite/store": "0.0.0-20230607055421-9b20fcaf-nightly",
"@electron-forge/cli": "^6.1.1",
"@electron-forge/core": "^6.1.1",
"@electron-forge/core-utils": "^6.1.1",
"@electron-forge/maker-deb": "^6.1.1",
"@electron-forge/maker-dmg": "^6.1.1",
"@electron-forge/maker-squirrel": "^6.1.1",
"@electron-forge/maker-zip": "^6.1.1",
"@electron-forge/shared-types": "^6.1.1",
"@electron/remote": "2.0.9",
"@blocksuite/blocks": "0.0.0-20230629084521-542de4e8-nightly",
"@blocksuite/editor": "0.0.0-20230629084521-542de4e8-nightly",
"@blocksuite/lit": "0.0.0-20230629084521-542de4e8-nightly",
"@blocksuite/store": "0.0.0-20230629084521-542de4e8-nightly",
"@electron-forge/cli": "^6.2.1",
"@electron-forge/core": "^6.2.1",
"@electron-forge/core-utils": "^6.2.1",
"@electron-forge/maker-deb": "^6.2.1",
"@electron-forge/maker-dmg": "^6.2.1",
"@electron-forge/maker-squirrel": "^6.2.1",
"@electron-forge/maker-zip": "^6.2.1",
"@electron-forge/shared-types": "^6.2.1",
"@electron/remote": "2.0.10",
"@toeverything/infra": "workspace:*",
"@types/fs-extra": "^11.0.1",
"@types/uuid": "^9.0.1",
"@types/uuid": "^9.0.2",
"cross-env": "7.0.3",
"electron": "=25.0.1",
"electron": "^25.2.0",
"electron-log": "^5.0.0-beta.24",
"electron-squirrel-startup": "1.0.0",
"electron-window-state": "^5.0.3",
"esbuild": "^0.17.19",
"esbuild": "^0.18.9",
"fs-extra": "^11.1.1",
"jotai": "^2.1.1",
"jotai": "^2.2.1",
"playwright": "=1.33.0",
"ts-node": "^10.9.1",
"undici": "^5.22.1",
"uuid": "^9.0.0",
"which": "^3.0.1",
"zx": "^7.2.2"
},
"dependencies": {
@@ -67,7 +68,7 @@
"lodash-es": "^4.17.21",
"nanoid": "^4.0.2",
"rxjs": "^7.8.1",
"yjs": "^13.6.1"
"yjs": "^13.6.6"
},
"build": {
"protocols": [

View File

@@ -12,6 +12,7 @@ import type { PlaywrightTestConfig } from '@playwright/test';
*/
const config: PlaywrightTestConfig = {
testDir: './tests',
testIgnore: '**/lib/**',
fullyParallel: true,
timeout: process.env.CI ? 50_000 : 30_000,
use: {

View File

@@ -5,6 +5,7 @@ import path, { resolve } from 'node:path';
import electronPath from 'electron';
import * as esbuild from 'esbuild';
import which from 'which';
import { config, electronDir, rootDir } from './common.mjs';
@@ -67,9 +68,9 @@ function spawnOrReloadElectron() {
}
const common = config();
const yarnPath = which.sync('yarn');
async function watchPlugins() {
spawn('yarn', ['dev'], {
spawn(yarnPath, ['dev'], {
stdio: 'inherit',
cwd: resolve(rootDir, './packages/plugin-infra'),
});

View File

@@ -20,14 +20,31 @@ afterEach(async () => {
await fs.remove(tmpDir);
});
let testYDoc: Y.Doc;
let testYSubDoc: Y.Doc;
function getTestUpdates() {
const testYDoc = new Y.Doc();
testYDoc = new Y.Doc();
const yText = testYDoc.getText('test');
yText.insert(0, 'hello');
testYSubDoc = new Y.Doc();
testYDoc.getMap('subdocs').set('test-subdoc', testYSubDoc);
const updates = Y.encodeStateAsUpdate(testYDoc);
return updates;
}
function getTestSubDocUpdates() {
const yText = testYSubDoc.getText('test');
yText.insert(0, 'hello');
const updates = Y.encodeStateAsUpdate(testYSubDoc);
return updates;
}
test('can create new db file if not exists', async () => {
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
const workspaceId = v4();
@@ -68,6 +85,31 @@ test('on applyUpdate (from renderer), will trigger update', async () => {
await db.destroy();
});
test('on applyUpdate (from renderer, subdoc), will trigger update', async () => {
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
const workspaceId = v4();
const onUpdate = vi.fn();
const insertUpdates = vi.fn();
const db = await openWorkspaceDatabase(workspaceId);
db.applyUpdate(getTestUpdates(), 'renderer');
db.db!.insertUpdates = insertUpdates;
db.update$.subscribe(onUpdate);
const subdocUpdates = getTestSubDocUpdates();
db.applyUpdate(subdocUpdates, 'renderer', testYSubDoc.guid);
expect(onUpdate).toHaveBeenCalled();
expect(insertUpdates).toHaveBeenCalledWith([
{
docId: testYSubDoc.guid,
data: subdocUpdates,
},
]);
await db.destroy();
});
test('on applyUpdate (from external), will trigger update & send external update event', async () => {
const { openWorkspaceDatabase } = await import('../workspace-db-adapter');
const workspaceId = v4();

View File

@@ -1,4 +1,4 @@
import { SqliteConnection } from '@affine/native';
import { type InsertRow, SqliteConnection } from '@affine/native';
import { logger } from '../logger';
@@ -79,21 +79,34 @@ export abstract class BaseSQLiteAdapter {
}
}
async getUpdates() {
async getUpdates(docId?: string) {
try {
if (!this.db) {
logger.warn(`${this.path} is not connected`);
return [];
}
return await this.db.getUpdates();
return await this.db.getUpdates(docId);
} catch (error) {
logger.error('getUpdates', error);
return [];
}
}
async getAllUpdates() {
try {
if (!this.db) {
logger.warn(`${this.path} is not connected`);
return [];
}
return await this.db.getAllUpdates();
} catch (error) {
logger.error('getAllUpdates', error);
return [];
}
}
// add a single update to SQLite
async addUpdateToSQLite(updates: Uint8Array[]) {
async addUpdateToSQLite(updates: InsertRow[]) {
// batch write instead write per key stroke?
try {
if (!this.db) {

View File

@@ -7,13 +7,17 @@ export * from './ensure-db';
export * from './subjects';
export const dbHandlers = {
getDocAsUpdates: async (id: string) => {
const workspaceDB = await ensureSQLiteDB(id);
return workspaceDB.getDocAsUpdates();
getDocAsUpdates: async (workspaceId: string, subdocId?: string) => {
const workspaceDB = await ensureSQLiteDB(workspaceId);
return workspaceDB.getDocAsUpdates(subdocId);
},
applyDocUpdate: async (id: string, update: Uint8Array) => {
const workspaceDB = await ensureSQLiteDB(id);
return workspaceDB.applyUpdate(update);
applyDocUpdate: async (
workspaceId: string,
update: Uint8Array,
subdocId?: string
) => {
const workspaceDB = await ensureSQLiteDB(workspaceId);
return workspaceDB.applyUpdate(update, 'renderer', subdocId);
},
addBlob: async (workspaceId: string, key: string, data: Uint8Array) => {
const workspaceDB = await ensureSQLiteDB(workspaceId);
@@ -38,7 +42,11 @@ export const dbHandlers = {
export const dbEvents = {
onExternalUpdate: (
fn: (update: { workspaceId: string; update: Uint8Array }) => void
fn: (update: {
workspaceId: string;
update: Uint8Array;
docId?: string;
}) => void
) => {
const sub = dbSubjects.externalUpdate.subscribe(fn);
return () => {

View File

@@ -1,6 +1,6 @@
import assert from 'node:assert';
import type { SqliteConnection } from '@affine/native';
import type { InsertRow } from '@affine/native';
import { debounce } from 'lodash-es';
import * as Y from 'yjs';
@@ -8,19 +8,19 @@ import { logger } from '../logger';
import type { YOrigin } from '../type';
import { getWorkspaceMeta } from '../workspace';
import { BaseSQLiteAdapter } from './base-db-adapter';
import { mergeUpdate } from './merge-update';
import type { WorkspaceSQLiteDB } from './workspace-db-adapter';
const FLUSH_WAIT_TIME = 5000;
const FLUSH_MAX_WAIT_TIME = 10000;
// todo: trim db when it is too big
export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
role = 'secondary';
yDoc = new Y.Doc();
firstConnected = false;
destroyed = false;
updateQueue: Uint8Array[] = [];
updateQueue: { data: Uint8Array; docId?: string }[] = [];
unsubscribers = new Set<() => void>();
@@ -29,10 +29,23 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
public upstream: WorkspaceSQLiteDB
) {
super(path);
this.setupAndListen();
this.init();
logger.debug('[SecondaryWorkspaceSQLiteDB] created', this.workspaceId);
}
getDoc(docId?: string) {
if (!docId) {
return this.yDoc;
}
// this should be pretty fast and we don't need to cache it
for (const subdoc of this.yDoc.subdocs) {
if (subdoc.guid === docId) {
return subdoc;
}
}
return null;
}
override async destroy() {
await this.flushUpdateQueue();
this.unsubscribers.forEach(unsub => unsub());
@@ -47,7 +60,7 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
// do not update db immediately, instead, push to a queue
// and flush the queue in a future time
async addUpdateToUpdateQueue(db: SqliteConnection, update: Uint8Array) {
async addUpdateToUpdateQueue(update: InsertRow) {
this.updateQueue.push(update);
await this.debouncedFlush();
}
@@ -101,55 +114,82 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
}
}
setupAndListen() {
if (this.firstConnected) {
setupListener(docId?: string) {
const doc = this.getDoc(docId);
if (!doc) {
return;
}
this.firstConnected = true;
const onUpstreamUpdate = (update: Uint8Array, origin: YOrigin) => {
if (origin === 'renderer') {
// update to upstream yDoc should be replicated to self yDoc
this.applyUpdate(update, 'upstream');
this.applyUpdate(update, 'upstream', docId);
}
};
const onSelfUpdate = async (update: Uint8Array, origin: YOrigin) => {
// for self update from upstream, we need to push it to external DB
if (origin === 'upstream' && this.db) {
await this.addUpdateToUpdateQueue(this.db, update);
if (origin === 'upstream') {
await this.addUpdateToUpdateQueue({
data: update,
docId,
});
}
if (origin === 'self') {
this.upstream.applyUpdate(update, 'external');
this.upstream.applyUpdate(update, 'external', docId);
}
};
const onSubdocs = ({ added }: { added: Set<Y.Doc> }) => {
added.forEach(subdoc => {
this.setupListener(subdoc.guid);
});
};
// listen to upstream update
this.upstream.yDoc.on('update', onUpstreamUpdate);
this.yDoc.on('update', onSelfUpdate);
this.yDoc.on('subdocs', onSubdocs);
this.unsubscribers.add(() => {
this.upstream.yDoc.off('update', onUpstreamUpdate);
this.yDoc.off('update', onSelfUpdate);
this.yDoc.off('subdocs', onSubdocs);
});
this.run(() => {
// apply all updates from upstream
const upstreamUpdate = this.upstream.getDocAsUpdates();
// to initialize the yDoc, we need to apply all updates from the db
this.applyUpdate(upstreamUpdate, 'upstream');
})
.then(() => {
logger.debug('run success');
})
.catch(err => {
logger.error('run error', err);
});
}
applyUpdate = (data: Uint8Array, origin: YOrigin = 'upstream') => {
Y.applyUpdate(this.yDoc, data, origin);
init() {
if (this.firstConnected) {
return;
}
this.firstConnected = true;
this.setupListener();
// apply all updates from upstream
// we assume here that the upstream ydoc is already sync'ed
const syncUpstreamDoc = (docId?: string) => {
const update = this.upstream.getDocAsUpdates(docId);
if (update) {
this.applyUpdate(update, 'upstream');
}
};
syncUpstreamDoc();
this.upstream.yDoc.subdocs.forEach(subdoc => {
syncUpstreamDoc(subdoc.guid);
});
}
applyUpdate = (
data: Uint8Array,
origin: YOrigin = 'upstream',
docId?: string
) => {
const doc = this.getDoc(docId);
if (doc) {
Y.applyUpdate(this.yDoc, data, origin);
} else {
logger.warn('applyUpdate: doc not found', docId);
}
};
// TODO: have a better solution to handle blobs
@@ -186,23 +226,33 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
async pull() {
const start = performance.now();
assert(this.upstream.db, 'upstream db should be connected');
const updates = await this.run(async () => {
const rows = await this.run(async () => {
// TODO: no need to get all updates, just get the latest ones (using a cursor, etc)?
await this.syncBlobs();
return (await this.getUpdates()).map(update => update.data);
return await this.getAllUpdates();
});
if (!updates || this.destroyed) {
if (!rows || this.destroyed) {
return;
}
const merged = mergeUpdate(updates);
this.applyUpdate(merged, 'self');
// apply root doc first
rows.forEach(row => {
if (!row.docId) {
this.applyUpdate(row.data, 'self');
}
});
rows.forEach(row => {
if (row.docId) {
this.applyUpdate(row.data, 'self', row.docId);
}
});
logger.debug(
'pull external updates',
this.path,
updates.length,
rows.length,
(performance.now() - start).toFixed(2),
'ms'
);

View File

@@ -1,5 +1,9 @@
import { Subject } from 'rxjs';
export const dbSubjects = {
externalUpdate: new Subject<{ workspaceId: string; update: Uint8Array }>(),
externalUpdate: new Subject<{
workspaceId: string;
update: Uint8Array;
docId?: string;
}>(),
};

View File

@@ -1,3 +1,5 @@
import type { InsertRow } from '@affine/native';
import { debounce } from 'lodash-es';
import { Subject } from 'rxjs';
import * as Y from 'yjs';
@@ -5,9 +7,10 @@ import { logger } from '../logger';
import type { YOrigin } from '../type';
import { getWorkspaceMeta } from '../workspace';
import { BaseSQLiteAdapter } from './base-db-adapter';
import { mergeUpdate } from './merge-update';
import { dbSubjects } from './subjects';
const TRIM_SIZE = 500;
export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
role = 'primary';
yDoc = new Y.Doc();
@@ -28,33 +31,76 @@ export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
this.firstConnected = false;
}
getDoc(docId?: string) {
if (!docId) {
return this.yDoc;
}
// this should be pretty fast and we don't need to cache it
for (const subdoc of this.yDoc.subdocs) {
if (subdoc.guid === docId) {
return subdoc;
}
}
return null;
}
getWorkspaceName = () => {
return this.yDoc.getMap('space:meta').get('name') as string;
return this.yDoc.getMap('meta').get('name') as string;
};
setupListener(docId?: string) {
const doc = this.getDoc(docId);
if (doc) {
const onUpdate = async (update: Uint8Array, origin: YOrigin) => {
const insertRows = [{ data: update, docId }];
if (origin === 'renderer') {
await this.addUpdateToSQLite(insertRows);
} else if (origin === 'external') {
dbSubjects.externalUpdate.next({
workspaceId: this.workspaceId,
update,
docId,
});
await this.addUpdateToSQLite(insertRows);
logger.debug('external update', this.workspaceId);
}
};
const onSubdocs = ({ added }: { added: Set<Y.Doc> }) => {
added.forEach(subdoc => {
this.setupListener(subdoc.guid);
});
};
doc.on('update', onUpdate);
doc.on('subdocs', onSubdocs);
} else {
logger.error('setupListener: doc not found', docId);
}
}
async init() {
const db = await super.connectIfNeeded();
if (!this.firstConnected) {
this.yDoc.on('update', async (update: Uint8Array, origin: YOrigin) => {
if (origin === 'renderer') {
await this.addUpdateToSQLite([update]);
} else if (origin === 'external') {
dbSubjects.externalUpdate.next({
workspaceId: this.workspaceId,
update,
});
await this.addUpdateToSQLite([update]);
logger.debug('external update', this.workspaceId);
}
});
this.setupListener();
}
const updates = await this.getUpdates();
const merged = mergeUpdate(updates.map(update => update.data));
const updates = await this.getAllUpdates();
// to initialize the yDoc, we need to apply all updates from the db
this.applyUpdate(merged, 'self');
// apply root first (without ID).
// subdoc will be available after root is applied
updates.forEach(update => {
if (!update.docId) {
this.applyUpdate(update.data, 'self');
}
});
// then, for all subdocs, apply the updates
updates.forEach(update => {
if (update.docId) {
this.applyUpdate(update.data, 'self', update.docId);
}
});
this.firstConnected = true;
this.update$.next();
@@ -62,18 +108,32 @@ export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
return db;
}
getDocAsUpdates = () => {
return Y.encodeStateAsUpdate(this.yDoc);
// unlike getUpdates, this will return updates in yDoc
getDocAsUpdates = (docId?: string) => {
const doc = docId ? this.getDoc(docId) : this.yDoc;
if (doc) {
return Y.encodeStateAsUpdate(doc);
}
return null;
};
// non-blocking and use yDoc to validate the update
// after that, the update is added to the db
applyUpdate = (data: Uint8Array, origin: YOrigin = 'renderer') => {
applyUpdate = (
data: Uint8Array,
origin: YOrigin = 'renderer',
docId?: string
) => {
// todo: trim the updates when the number of records is too large
// 1. store the current ydoc state in the db
// 2. then delete the old updates
// yjs-idb will always trim the db for the first time after DB is loaded
Y.applyUpdate(this.yDoc, data, origin);
const doc = this.getDoc(docId);
if (doc) {
Y.applyUpdate(doc, data, origin);
} else {
logger.warn('applyUpdate: doc not found', docId);
}
};
override async addBlob(key: string, value: Uint8Array) {
@@ -87,10 +147,30 @@ export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
await super.deleteBlob(key);
}
override async addUpdateToSQLite(data: Uint8Array[]) {
override async addUpdateToSQLite(data: InsertRow[]) {
this.update$.next();
data.forEach(row => {
this.trimWhenNecessary(row.docId)?.catch(err => {
logger.error('trimWhenNecessary failed', err);
});
});
await super.addUpdateToSQLite(data);
}
trimWhenNecessary = debounce(async (docId?: string) => {
if (this.firstConnected) {
const count = (await this.db?.getUpdatesCount(docId)) ?? 0;
if (count > TRIM_SIZE) {
logger.debug(`trim ${this.workspaceId}:${docId} ${count}`);
const update = this.getDocAsUpdates(docId);
if (update) {
const insertRows = [{ data: update, docId }];
await this.db?.replaceUpdates(docId, insertRows);
logger.debug(`trim ${this.workspaceId}:${docId} successfully`);
}
}
}
}, 1000);
}
export async function openWorkspaceDatabase(workspaceId: string) {

View File

@@ -1,30 +1,8 @@
import type { EventBasedChannel } from 'async-call-rpc';
import { AsyncCall } from 'async-call-rpc';
import { events, handlers } from './exposed';
import { logger } from './logger';
const createMessagePortMainChannel = (
connection: Electron.MessagePortMain
): EventBasedChannel => {
return {
on(listener) {
const f = (e: Electron.MessageEvent) => {
listener(e.data);
};
connection.on('message', f);
// MUST start the connection to receive messages
connection.start();
return () => {
connection.off('message', f);
};
},
send(data) {
connection.postMessage(data);
},
};
};
function setupRendererConnection(rendererPort: Electron.MessagePortMain) {
const flattenedHandlers = Object.entries(handlers).flatMap(
([namespace, namespaceHandlers]) => {
@@ -55,7 +33,22 @@ function setupRendererConnection(rendererPort: Electron.MessagePortMain) {
const rpc = AsyncCall<PeersAPIs.RendererToHelper>(
Object.fromEntries(flattenedHandlers),
{
channel: createMessagePortMainChannel(rendererPort),
channel: {
on(listener) {
const f = (e: Electron.MessageEvent) => {
listener(e.data);
};
rendererPort.on('message', f);
// MUST start the connection to receive messages
rendererPort.start();
return () => {
rendererPort.off('message', f);
};
},
send(data) {
rendererPort.postMessage(data);
},
},
log: false,
}
);

View File

@@ -1,26 +1,7 @@
import { AsyncCall, type EventBasedChannel } from 'async-call-rpc';
import { AsyncCall } from 'async-call-rpc';
import { getExposedMeta } from './exposed';
function createMessagePortMainChannel(
connection: Electron.ParentPort
): EventBasedChannel {
return {
on(listener) {
const f = (e: Electron.MessageEvent) => {
listener(e.data);
};
connection.on('message', f);
return () => {
connection.off('message', f);
};
},
send(data) {
connection.postMessage(data);
},
};
}
const helperToMainServer: PeersAPIs.HelperToMain = {
getMeta: () => getExposedMeta(),
};
@@ -29,5 +10,18 @@ export const mainRPC = AsyncCall<PeersAPIs.MainToHelper>(helperToMainServer, {
strict: {
unknownMessage: false,
},
channel: createMessagePortMainChannel(process.parentPort),
channel: {
on(listener) {
const f = (e: Electron.MessageEvent) => {
listener(e.data);
};
process.parentPort.on('message', f);
return () => {
process.parentPort.off('message', f);
};
},
send(data) {
process.parentPort.postMessage(data);
},
},
});

View File

@@ -92,7 +92,7 @@ class HelperProcessManager {
...appMethods,
};
const server = AsyncCall<PeersAPIs.HelperToMain>(mainToHelperServer, {
this.rpc = AsyncCall<PeersAPIs.HelperToMain>(mainToHelperServer, {
strict: {
// the channel is shared for other purposes as well so that we do not want to
// restrict to only JSONRPC messages
@@ -100,7 +100,6 @@ class HelperProcessManager {
},
channel: new MessageEventChannel(this.#process),
});
this.rpc = server;
}
}

View File

@@ -66,6 +66,5 @@ app
.then(ensureHelperProcess)
.then(restoreOrCreateWindow)
.then(createApplicationMenu)
.then()
.then(registerUpdater)
.catch(e => console.error('Failed create window:', e));

View File

@@ -38,7 +38,7 @@ export function registerPlugin() {
'./bookmark-block/index.mjs'
);
logger.info('bookmark plugin path:', bookmarkPluginPath);
import(bookmarkPluginPath);
import('file://' + bookmarkPluginPath);
let dispose: () => void = () => {
// noop
};

View File

@@ -34,7 +34,6 @@ export function registerProtocol() {
const url = request.url.replace(/^file:\/\//, '');
const realpath = toAbsolutePath(url);
callback(realpath);
console.log('interceptFileProtocol realpath', request.url, realpath);
return true;
});

View File

@@ -34,7 +34,7 @@ import('@toeverything/plugin-infra/manager')
console.log('import bookmark plugin', bookmarkPluginPath);
import(bookmarkPluginPath).catch(console.log);
import('file://' + bookmarkPluginPath).catch(console.log);
rootStore.sub(affinePluginsAtom, () => {
const plugins = rootStore.get(affinePluginsAtom);
Object.values(plugins).forEach(plugin => {

View File

@@ -5,7 +5,7 @@ import fs from 'fs-extra';
import { test } from './fixture';
test('check workspace has a DB file', async ({ appInfo, workspace }) => {
test.skip('check workspace has a DB file', async ({ appInfo, workspace }) => {
const w = await workspace.current();
const dbPath = path.join(
appInfo.sessionData,
@@ -17,7 +17,7 @@ test('check workspace has a DB file', async ({ appInfo, workspace }) => {
expect(await fs.exists(dbPath)).toBe(true);
});
test('move workspace db file', async ({ page, appInfo, workspace }) => {
test.skip('move workspace db file', async ({ page, appInfo, workspace }) => {
const w = await workspace.current();
const settingButton = page.getByTestId('slider-bar-workspace-setting-button');
// goto settings
@@ -42,7 +42,7 @@ test('move workspace db file', async ({ page, appInfo, workspace }) => {
expect(files.some(f => f.endsWith('.affine'))).toBe(true);
});
test('export then add', async ({ page, appInfo, workspace }) => {
test.skip('export then add', async ({ page, appInfo, workspace }) => {
const w = await workspace.current();
const settingButton = page.getByTestId('slider-bar-workspace-setting-button');
// goto settings

View File

@@ -1,2 +1 @@
.env
src/schema.gql

View File

@@ -1,3 +1,23 @@
# Server
The latest server code of AFFiNE is at https://github.com/toeverything/OctoBase/tree/master/apps/cloud
## Get started
### Install dependencies
```bash
yarn
```
### Build Native binding
```bash
yarn workspace @affine/storage build
```
### Run server
```bash
yarn dev
```
now you can access the server GraphQL endpoint at http://localhost:3000/graphql

View File

@@ -0,0 +1,77 @@
/*
Warnings:
- You are about to drop the column `avatar_url` on the `users` table. All the data in the column will be lost.
- You are about to drop the column `fulfilled` on the `users` table. All the data in the column will be lost.
- You are about to drop the column `token_nonce` on the `users` table. All the data in the column will be lost.
- You are about to drop the `connected_accounts` table. If the table is not empty, all the data it contains will be lost.
*/
-- DropForeignKey
ALTER TABLE "connected_accounts" DROP CONSTRAINT "connected_accounts_user_id_fkey";
-- AlterTable
ALTER TABLE "users" DROP COLUMN "avatar_url",
DROP COLUMN "fulfilled",
DROP COLUMN "token_nonce",
ADD COLUMN "email_verified" TIMESTAMP(3),
ADD COLUMN "image" VARCHAR,
ALTER COLUMN "name" SET DATA TYPE TEXT,
ALTER COLUMN "email" DROP NOT NULL,
ALTER COLUMN "email" SET DATA TYPE TEXT;
-- DropTable
DROP TABLE "connected_accounts";
-- CreateTable
CREATE TABLE "accounts" (
"id" TEXT NOT NULL,
"user_id" TEXT NOT NULL,
"type" TEXT NOT NULL,
"provider" TEXT NOT NULL,
"provider_account_id" TEXT NOT NULL,
"refresh_token" TEXT,
"access_token" TEXT,
"expires_at" INTEGER,
"token_type" TEXT,
"scope" TEXT,
"id_token" TEXT,
"session_state" TEXT,
CONSTRAINT "accounts_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "sessions" (
"id" TEXT NOT NULL,
"session_token" TEXT NOT NULL,
"user_id" TEXT NOT NULL,
"expires" TIMESTAMP(3) NOT NULL,
CONSTRAINT "sessions_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "verificationtokens" (
"identifier" TEXT NOT NULL,
"token" TEXT NOT NULL,
"expires" TIMESTAMP(3) NOT NULL
);
-- CreateIndex
CREATE UNIQUE INDEX "accounts_provider_provider_account_id_key" ON "accounts"("provider", "provider_account_id");
-- CreateIndex
CREATE UNIQUE INDEX "sessions_session_token_key" ON "sessions"("session_token");
-- CreateIndex
CREATE UNIQUE INDEX "verificationtokens_token_key" ON "verificationtokens"("token");
-- CreateIndex
CREATE UNIQUE INDEX "verificationtokens_identifier_token_key" ON "verificationtokens"("identifier", "token");
-- AddForeignKey
ALTER TABLE "accounts" ADD CONSTRAINT "accounts_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "sessions" ADD CONSTRAINT "sessions_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -0,0 +1,52 @@
-- CreateTable
CREATE TABLE "blobs" (
"hash" VARCHAR NOT NULL,
"workspace_id" VARCHAR NOT NULL,
"blob" BYTEA NOT NULL,
"length" INTEGER NOT NULL,
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "blobs_pkey" PRIMARY KEY ("hash")
);
-- CreateTable
CREATE TABLE "optimized_blobs" (
"hash" VARCHAR NOT NULL,
"workspace_id" VARCHAR NOT NULL,
"params" VARCHAR NOT NULL,
"blob" BYTEA NOT NULL,
"length" INTEGER NOT NULL,
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "optimized_blobs_pkey" PRIMARY KEY ("hash")
);
-- CreateTable
CREATE TABLE "docs" (
"id" SERIAL NOT NULL,
"workspace_id" VARCHAR NOT NULL,
"guid" VARCHAR NOT NULL,
"is_workspace" BOOLEAN NOT NULL DEFAULT true,
"blob" BYTEA NOT NULL,
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "docs_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "blobs_workspace_id_hash_key" ON "blobs"("workspace_id", "hash");
-- CreateIndex
CREATE UNIQUE INDEX "optimized_blobs_workspace_id_hash_params_key" ON "optimized_blobs"("workspace_id", "hash", "params");
-- CreateIndex
CREATE INDEX "docs_workspace_id_guid_idx" ON "docs"("workspace_id", "guid");
-- AddForeignKey
ALTER TABLE "blobs" ADD CONSTRAINT "blobs_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "optimized_blobs" ADD CONSTRAINT "optimized_blobs_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "docs" ADD CONSTRAINT "docs_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -1,7 +1,7 @@
{
"name": "@affine/server",
"private": true,
"version": "0.7.0-canary.16",
"version": "0.7.0-canary.24",
"description": "Affine Node.js server",
"type": "module",
"bin": {
@@ -10,41 +10,48 @@
"scripts": {
"dev": "nodemon ./src/index.ts",
"test": "yarn exec ts-node-esm ./scripts/run-test.ts all",
"test:watch": "yarn exec ts-node-esm ./scripts/run-test.ts all --watch",
"test:coverage": "c8 yarn exec ts-node-esm ./scripts/run-test.ts all",
"postinstall": "prisma generate"
},
"dependencies": {
"@apollo/server": "^4.7.1",
"@nestjs/apollo": "^11.0.6",
"@nestjs/common": "^9.4.2",
"@nestjs/core": "^9.4.2",
"@nestjs/graphql": "^11.0.6",
"@nestjs/platform-express": "^9.4.2",
"@node-rs/bcrypt": "^1.7.1",
"@prisma/client": "^4.15.0",
"dotenv": "^16.1.4",
"@affine/storage": "workspace:*",
"@apollo/server": "^4.7.4",
"@auth/prisma-adapter": "^1.0.0",
"@aws-sdk/client-s3": "^3.359.0",
"@nestjs/apollo": "^12.0.3",
"@nestjs/common": "^10.0.3",
"@nestjs/core": "^10.0.3",
"@nestjs/graphql": "^12.0.3",
"@nestjs/platform-express": "^10.0.3",
"@node-rs/argon2": "^1.5.0",
"@node-rs/crc32": "^1.7.0",
"@node-rs/jsonwebtoken": "^0.2.0",
"@prisma/client": "^4.16.1",
"dotenv": "^16.3.1",
"express": "^4.18.2",
"graphql": "^16.6.0",
"graphql": "^16.7.1",
"graphql-type-json": "^0.3.2",
"jsonwebtoken": "^9.0.0",
"graphql-upload": "^16.0.2",
"lodash-es": "^4.17.21",
"prisma": "^4.15.0",
"next-auth": "^4.22.1",
"parse-duration": "^1.1.0",
"prisma": "^4.16.1",
"reflect-metadata": "^0.1.13",
"rxjs": "^7.8.1"
},
"devDependencies": {
"@nestjs/testing": "^9.4.2",
"@napi-rs/image": "^1.6.1",
"@nestjs/testing": "^10.0.3",
"@types/express": "^4.17.17",
"@types/jsonwebtoken": "^9.0.2",
"@types/lodash-es": "^4.17.7",
"@types/node": "^18.16.16",
"@types/node": "^18.16.18",
"@types/supertest": "^2.0.12",
"c8": "^7.14.0",
"c8": "^8.0.0",
"nodemon": "^2.0.22",
"supertest": "^6.3.3",
"ts-node": "^10.9.1",
"typescript": "^5.1.3",
"vitest": "^0.31.4"
"typescript": "^5.1.5"
},
"nodemonConfig": {
"exec": "node",
@@ -81,5 +88,8 @@
"**/*.spec.ts"
]
},
"stableVersion": "0.5.3"
"stableVersion": "0.5.3",
"installConfig": {
"hoistingLimits": "workspaces"
}
}

View File

@@ -1,52 +1,24 @@
generator client {
provider = "prisma-client-js"
}
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
}
generator client {
provider = "prisma-client-js"
}
model User {
id String @id @default(uuid()) @db.VarChar
name String @db.VarChar
email String @unique @db.VarChar
tokenNonce Int @default(0) @map("token_nonce") @db.SmallInt
avatarUrl String? @map("avatar_url") @db.VarChar
/// Available if user signed up through OAuth providers
password String? @db.VarChar
/// User may created by email collobration invitation before signup.
/// We will precreate a user entity in such senarios but leave fulfilled as false until they signed up
/// This implementation is convenient for handing unregistered user permissoin
fulfilled Boolean @default(true)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
connectedAccounts ConnectedAccount[]
workspaces UserWorkspacePermission[]
@@map("users")
}
model Workspace {
id String @id @default(uuid()) @db.VarChar
public Boolean
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
users UserWorkspacePermission[]
id String @id @default(uuid()) @db.VarChar
public Boolean
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
users UserWorkspacePermission[]
blobs Blob[]
docs Doc[]
optimizedBlobs OptimizedBlob[]
@@map("workspaces")
}
model ConnectedAccount {
id String @id @default(uuid()) @db.VarChar
userId String @map("user_id")
/// the general provider name, e.g. google, github, facebook
provider String @db.VarChar
/// the user id provided by OAuth providers, or other user identitive credential like `username` provided by GitHub
providerUserId String @unique @map("provider_user_id") @db.VarChar
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@map("connected_accounts")
}
model UserWorkspacePermission {
id String @id @default(uuid()) @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar
@@ -56,8 +28,105 @@ model UserWorkspacePermission {
/// Whether the permission invitation is accepted by the user
accepted Boolean @default(false)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
@@map("user_workspace_permissions")
}
model User {
id String @id @default(uuid()) @db.VarChar
name String
email String? @unique
emailVerified DateTime? @map("email_verified")
// image field is for the next-auth
avatarUrl String? @map("image") @db.VarChar
accounts Account[]
sessions Session[]
workspaces UserWorkspacePermission[]
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
/// Not available if user signed up through OAuth providers
password String? @db.VarChar
@@map("users")
}
model Account {
id String @id @default(cuid())
userId String @map("user_id")
type String
provider String
providerAccountId String @map("provider_account_id")
refresh_token String? @db.Text
access_token String? @db.Text
expires_at Int?
token_type String?
scope String?
id_token String? @db.Text
session_state String?
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@unique([provider, providerAccountId])
@@map("accounts")
}
model Session {
id String @id @default(cuid())
sessionToken String @unique @map("session_token")
userId String @map("user_id")
expires DateTime
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@map("sessions")
}
model VerificationToken {
identifier String
token String @unique
expires DateTime
@@unique([identifier, token])
@@map("verificationtokens")
}
model Blob {
hash String @id @default(uuid()) @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar
blob Bytes @db.ByteA
length Int
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
@@unique([workspaceId, hash])
@@map("blobs")
}
model OptimizedBlob {
hash String @id @default(uuid()) @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar
params String @db.VarChar
blob Bytes @db.ByteA
length Int
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
@@unique([workspaceId, hash, params])
@@map("optimized_blobs")
}
model Doc {
id Int @id @default(autoincrement()) @db.Integer
workspaceId String @map("workspace_id") @db.VarChar
guid String @db.VarChar
is_workspace Boolean @default(true) @db.Boolean
blob Bytes @db.ByteA
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
@@index([workspaceId, guid])
@@map("docs")
}

View File

@@ -1,7 +1,5 @@
import crypto from 'node:crypto';
import { genSalt } from '@node-rs/bcrypt';
const { privateKey, publicKey } = crypto.generateKeyPairSync('ec', {
namedCurve: 'prime256v1',
publicKeyEncoding: {
@@ -14,6 +12,5 @@ const { privateKey, publicKey } = crypto.generateKeyPairSync('ec', {
},
});
console.log('Salt:\n', await genSalt(10));
console.log('ECDSA Public Key:\n', publicKey);
console.log('ECDSA Private Key:\n', privateKey);

View File

@@ -12,7 +12,13 @@ const root = fileURLToPath(new URL('..', import.meta.url));
const testDir = resolve(root, 'src', 'tests');
const files = await readdir(testDir);
const sharedArgs = [...pkg.nodemonConfig.nodeArgs, '--test'];
const watchMode = process.argv.includes('--watch');
const sharedArgs = [
...pkg.nodemonConfig.nodeArgs,
'--test',
watchMode ? '--watch' : '',
];
const env = {
PATH: process.env.PATH,

View File

@@ -1,16 +1,17 @@
/// <reference types="./global.d.ts" />
import { Module } from '@nestjs/common';
import { ConfigModule } from './config';
import { GqlModule } from './graphql.module';
import { BusinessModules } from './modules';
import { PrismaModule } from './prisma';
import { StorageModule } from './storage';
@Module({
imports: [
PrismaModule,
GqlModule,
ConfigModule.forRoot(),
StorageModule.forRoot(),
...BusinessModules,
],
})

View File

@@ -132,6 +132,13 @@ export interface AFFiNEConfig {
*/
get origin(): string;
/**
* the database config
*/
db: {
url: string;
};
/**
* the apollo driver config
*/
@@ -163,6 +170,12 @@ export interface AFFiNEConfig {
* }
*/
config: Record<string, string>;
/**
* Only used when `enable` is `false`
*/
fs: {
path: string;
};
};
/**
@@ -172,11 +185,16 @@ export interface AFFiNEConfig {
/**
* Application access token expiration time
*/
readonly accessTokenExpiresIn: string;
readonly accessTokenExpiresIn: number;
/**
* Application refresh token expiration time
*/
readonly refreshTokenExpiresIn: string;
readonly refreshTokenExpiresIn: number;
/**
* Add some leeway (in seconds) to the exp and nbf validation to account for clock skew.
* Defaults to 60 if omitted.
*/
readonly leeway: number;
/**
* Application public key
*
@@ -195,6 +213,10 @@ export interface AFFiNEConfig {
* whether allow user to signup by oauth providers
*/
enableOauth: boolean;
/**
* NEXTAUTH_SECRET
*/
nextAuthSecret: string;
/**
* all available oauth providers
*/

View File

@@ -1,5 +1,10 @@
/// <reference types="../global.d.ts" />
import { homedir } from 'node:os';
import { join } from 'node:path';
import parse from 'parse-duration';
import pkg from '../../package.json' assert { type: 'json' };
import type { AFFiNEConfig } from './def';
@@ -47,6 +52,9 @@ export const getDefaultAFFiNEConfig: () => AFFiNEConfig = () => ({
get baseUrl() {
return `${this.origin}${this.path}`;
},
db: {
url: '',
},
graphql: {
buildSchemaOptions: {
numberScalarMode: 'integer',
@@ -56,16 +64,25 @@ export const getDefaultAFFiNEConfig: () => AFFiNEConfig = () => ({
debug: true,
},
auth: {
accessTokenExpiresIn: '1h',
refreshTokenExpiresIn: '7d',
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
accessTokenExpiresIn: parse('1h')! / 1000,
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
refreshTokenExpiresIn: parse('7d')! / 1000,
leeway: 60,
publicKey: examplePublicKey,
privateKey: examplePrivateKey,
enableSignup: true,
enableOauth: false,
nextAuthSecret: '',
oauthProviders: {},
},
objectStorage: {
enable: false,
config: {},
fs: {
path: join(homedir(), '.affine-storage'),
},
},
});
export { registerEnvs } from './env';

View File

@@ -2,14 +2,16 @@ import { set } from 'lodash-es';
import { parseEnvValue } from './def';
for (const env in AFFiNE.ENV_MAP) {
const config = AFFiNE.ENV_MAP[env];
const [path, value] =
typeof config === 'string'
? [config, process.env[env]]
: [config[0], parseEnvValue(process.env[env], config[1])];
export function registerEnvs() {
for (const env in globalThis.AFFiNE.ENV_MAP) {
const config = globalThis.AFFiNE.ENV_MAP[env];
const [path, value] =
typeof config === 'string'
? [config, process.env[env]]
: [config[0], parseEnvValue(process.env[env], config[1])];
if (typeof value !== 'undefined') {
set(globalThis.AFFiNE, path, process.env[env]);
if (typeof value !== 'undefined') {
set(globalThis.AFFiNE, path, process.env[env]);
}
}
}

View File

@@ -1,9 +1,12 @@
// eslint-disable-next-line simple-import-sort/imports
import type { DynamicModule, FactoryProvider } from '@nestjs/common';
import { merge } from 'lodash-es';
import type { DeepPartial } from '../utils/types';
import type { AFFiNEConfig } from './def';
import '../prelude';
type ConstructorOf<T> = {
new (): T;
};
@@ -37,11 +40,14 @@ function createConfigProvider(
provide: Config,
useFactory: () => {
const wrapper = new Config();
const config = merge({}, AFFiNE, override);
const config = merge({}, globalThis.AFFiNE, override);
const proxy: Config = new Proxy(wrapper, {
get: (_target, property: keyof Config) => {
const desc = Object.getOwnPropertyDescriptor(AFFiNE, property);
const desc = Object.getOwnPropertyDescriptor(
globalThis.AFFiNE,
property
);
if (desc?.get) {
return desc.get.call(proxy);
}

View File

@@ -16,6 +16,9 @@ import { Config } from './config';
return {
...config.graphql,
path: `${config.path}/graphql`,
csrfPrevention: {
requestHeaders: ['content-type'],
},
autoSchemaFile: join(
fileURLToPath(import.meta.url),
'..',

View File

@@ -1,9 +1,12 @@
import './prelude';
/// <reference types="./global.d.ts" />
import { NestFactory } from '@nestjs/core';
import type { NestExpressApplication } from '@nestjs/platform-express';
import { static as staticMiddleware } from 'express';
// @ts-expect-error graphql-upload is not typed
import graphqlUploadExpress from 'graphql-upload/graphqlUploadExpress.mjs';
import { AppModule } from './app';
import { Config } from './config';
const app = await NestFactory.create<NestExpressApplication>(AppModule, {
cors: {
@@ -12,14 +15,27 @@ const app = await NestFactory.create<NestExpressApplication>(AppModule, {
? ['https://affine-preview.vercel.app']
: ['http://localhost:8080'],
methods: ['GET', 'POST', 'PUT', 'DELETE', 'OPTIONS'],
allowedHeaders: '*',
allowedHeaders: ['x-operation-name', 'x-definition-name'],
},
bodyParser: true,
});
app.use(
graphqlUploadExpress({
maxFileSize: 10 * 1024 * 1024,
maxFiles: 5,
})
);
const host = process.env.HOST ?? 'localhost';
const port = process.env.PORT ?? 3010;
const config = app.get(Config);
if (!config.objectStorage.enable) {
app.use('/assets', staticMiddleware(config.objectStorage.fs.path));
}
await app.listen(port, host);
console.log(`Listening on http://${host}:${port}`);

View File

@@ -49,10 +49,17 @@ class AuthGuard implements CanActivate {
if (!token) {
return false;
}
const [type, jwt] = token.split(' ') ?? [];
const claims = this.auth.verify(token);
req.user = await this.prisma.user.findUnique({ where: { id: claims.id } });
return !!req.user;
if (type === 'Bearer') {
const claims = await this.auth.verify(jwt);
req.user = await this.prisma.user.findUnique({
where: { id: claims.id },
});
return !!req.user;
}
return false;
}
}

View File

@@ -1,5 +1,6 @@
import { Global, Module } from '@nestjs/common';
import { NextAuthController } from './next-auth.controller';
import { AuthResolver } from './resolver';
import { AuthService } from './service';
@@ -7,6 +8,9 @@ import { AuthService } from './service';
@Module({
providers: [AuthService, AuthResolver],
exports: [AuthService],
controllers: [NextAuthController],
})
export class AuthModule {}
export * from './guard';
export { TokenType } from './resolver';

View File

@@ -0,0 +1,158 @@
import { randomUUID } from 'node:crypto';
import { PrismaAdapter } from '@auth/prisma-adapter';
import {
BadRequestException,
Controller,
Get,
Next,
Post,
Query,
Req,
Res,
} from '@nestjs/common';
import { Algorithm, sign, verify as jwtVerify } from '@node-rs/jsonwebtoken';
import type { NextFunction, Request, Response } from 'express';
import type { AuthAction, AuthOptions } from 'next-auth';
import { AuthHandler } from 'next-auth/core';
import Github from 'next-auth/providers/github';
import Google from 'next-auth/providers/google';
import { Config } from '../../config';
import { PrismaService } from '../../prisma/service';
import { getUtcTimestamp, type UserClaim } from './service';
const BASE_URL = '/api/auth/';
@Controller(BASE_URL)
export class NextAuthController {
private readonly nextAuthOptions: AuthOptions;
constructor(readonly config: Config, readonly prisma: PrismaService) {
this.nextAuthOptions = {
providers: [],
// @ts-expect-error Third part library type mismatch
adapter: PrismaAdapter(prisma),
};
if (config.auth.oauthProviders.github) {
this.nextAuthOptions.providers.push(
Github({
clientId: config.auth.oauthProviders.github.clientId,
clientSecret: config.auth.oauthProviders.github.clientSecret,
})
);
}
if (config.auth.oauthProviders.google) {
this.nextAuthOptions.providers.push(
Google({
clientId: config.auth.oauthProviders.google.clientId,
clientSecret: config.auth.oauthProviders.google.clientSecret,
})
);
}
this.nextAuthOptions.jwt = {
encode: async ({ token, maxAge }) => {
if (!token?.email) {
throw new BadRequestException('Missing email in jwt token');
}
const user = await this.prisma.user.findFirstOrThrow({
where: {
email: token.email,
},
});
const now = getUtcTimestamp();
return sign(
{
data: {
id: user.id,
name: user.name,
email: user.email,
createdAt: user.createdAt.toISOString(),
},
iat: now,
exp: now + (maxAge ?? config.auth.accessTokenExpiresIn),
iss: this.config.serverId,
sub: user.id,
aud: user.name,
jti: randomUUID({
disableEntropyCache: true,
}),
},
this.config.auth.privateKey,
{
algorithm: Algorithm.ES256,
}
);
},
decode: async ({ token }) => {
if (!token) {
return null;
}
const { name, email, id } = (
await jwtVerify(token, this.config.auth.publicKey, {
algorithms: [Algorithm.ES256],
iss: [this.config.serverId],
leeway: this.config.auth.leeway,
requiredSpecClaims: ['exp', 'iat', 'iss', 'sub'],
})
).data as UserClaim;
return {
name,
email,
sub: id,
};
},
};
this.nextAuthOptions.secret ??= config.auth.nextAuthSecret;
}
@Get()
@Post()
async auth(
@Req() req: Request,
@Res() res: Response,
@Query() query: Record<string, any>,
@Next() next: NextFunction
) {
const nextauth = req.url // start with request url
.slice(BASE_URL.length) // make relative to baseUrl
.replace(/\?.*/, '') // remove query part, use only path part
.split('/') as AuthAction[]; // as array of strings;
const { status, headers, body, redirect, cookies } = await AuthHandler({
req: {
body: req.body,
query: query,
method: req.method,
action: nextauth[0],
providerId: nextauth[1],
error: query.error ?? nextauth[1],
cookies: req.cookies,
},
options: this.nextAuthOptions,
});
if (status) {
res.status(status);
}
if (headers) {
for (const { key, value } of headers) {
res.setHeader(key, value);
}
}
if (cookies) {
for (const cookie of cookies) {
res.cookie(cookie.name, cookie.value, cookie.options);
}
}
if (redirect) {
res.redirect(redirect);
} else if (typeof body === 'string') {
res.send(body);
} else if (body && typeof body === 'object') {
res.json(body);
} else {
next();
}
}
}

View File

@@ -40,6 +40,18 @@ export class AuthResolver {
};
}
@Mutation(() => UserType)
async register(
@Context() ctx: { req: Request },
@Args('name') name: string,
@Args('email') email: string,
@Args('password') password: string
) {
const user = await this.auth.register(name, email, password);
ctx.req.user = user;
return user;
}
@Mutation(() => UserType)
async signIn(
@Context() ctx: { req: Request },

View File

@@ -1,44 +1,88 @@
import { randomUUID } from 'node:crypto';
import {
BadRequestException,
Injectable,
InternalServerErrorException,
UnauthorizedException,
} from '@nestjs/common';
import { compare, hash } from '@node-rs/bcrypt';
import { hash, verify } from '@node-rs/argon2';
import { Algorithm, sign, verify as jwtVerify } from '@node-rs/jsonwebtoken';
import type { User } from '@prisma/client';
import jwt from 'jsonwebtoken';
import { Config } from '../../config';
import { PrismaService } from '../../prisma';
type UserClaim = Pick<User, 'id' | 'name' | 'email'>;
export type UserClaim = Pick<User, 'id' | 'name' | 'email' | 'createdAt'>;
export const getUtcTimestamp = () => Math.floor(new Date().getTime() / 1000);
@Injectable()
export class AuthService {
constructor(private config: Config, private prisma: PrismaService) {}
sign(user: UserClaim) {
return jwt.sign(user, this.config.auth.privateKey, {
algorithm: 'ES256',
subject: user.id,
issuer: this.config.serverId,
expiresIn: this.config.auth.accessTokenExpiresIn,
});
const now = getUtcTimestamp();
return sign(
{
data: {
id: user.id,
name: user.name,
email: user.email,
createdAt: user.createdAt.toISOString(),
},
iat: now,
exp: now + this.config.auth.accessTokenExpiresIn,
iss: this.config.serverId,
sub: user.id,
aud: user.name,
jti: randomUUID({
disableEntropyCache: true,
}),
},
this.config.auth.privateKey,
{
algorithm: Algorithm.ES256,
}
);
}
refresh(user: UserClaim) {
return jwt.sign(user, this.config.auth.privateKey, {
algorithm: 'ES256',
subject: user.id,
issuer: this.config.serverId,
expiresIn: this.config.auth.refreshTokenExpiresIn,
});
const now = getUtcTimestamp();
return sign(
{
data: {
id: user.id,
name: user.name,
email: user.email,
createdAt: user.createdAt.toISOString(),
},
exp: now + this.config.auth.refreshTokenExpiresIn,
iat: now,
iss: this.config.serverId,
sub: user.id,
aud: user.name,
jti: randomUUID({
disableEntropyCache: true,
}),
},
this.config.auth.privateKey,
{
algorithm: Algorithm.ES256,
}
);
}
verify(token: string) {
async verify(token: string) {
try {
return jwt.verify(token, this.config.auth.publicKey, {
algorithms: ['ES256'],
}) as UserClaim;
return (
await jwtVerify(token, this.config.auth.publicKey, {
algorithms: [Algorithm.ES256],
iss: [this.config.serverId],
leeway: this.config.auth.leeway,
requiredSpecClaims: ['exp', 'iat', 'iss', 'sub'],
})
).data as UserClaim;
} catch (e) {
throw new UnauthorizedException('Invalid token');
}
@@ -58,9 +102,13 @@ export class AuthService {
if (!user.password) {
throw new BadRequestException('User has no password');
}
const equal = await compare(password, user.password);
let equal = false;
try {
equal = await verify(user.password, password);
} catch (e) {
console.error(e);
throw new InternalServerErrorException(e, 'Verify password failed');
}
if (!equal) {
throw new UnauthorizedException('Invalid password');
}
@@ -69,8 +117,6 @@ export class AuthService {
}
async register(name: string, email: string, password: string): Promise<User> {
const hashedPassword = await hash(password);
const user = await this.prisma.user.findFirst({
where: {
email,
@@ -81,6 +127,8 @@ export class AuthService {
throw new BadRequestException('Email already exists');
}
const hashedPassword = await hash(password);
return this.prisma.user.create({
data: {
name,

View File

@@ -0,0 +1,23 @@
import { createWriteStream } from 'node:fs';
import { mkdir } from 'node:fs/promises';
import { join } from 'node:path';
import { pipeline } from 'node:stream/promises';
import { Injectable } from '@nestjs/common';
import { Config } from '../../config';
import { FileUpload } from '../../types';
@Injectable()
export class FSService {
constructor(private readonly config: Config) {}
async writeFile(key: string, file: FileUpload) {
const dest = this.config.objectStorage.fs.path;
await mkdir(dest, { recursive: true });
const destFile = join(dest, key);
await pipeline(file.createReadStream(), createWriteStream(destFile));
return `/assets/${destFile}`;
}
}

View File

@@ -0,0 +1,11 @@
import { Module } from '@nestjs/common';
import { FSService } from './fs';
import { S3 } from './s3';
import { StorageService } from './storage.service';
@Module({
providers: [S3, StorageService, FSService],
exports: [StorageService],
})
export class StorageModule {}

View File

@@ -0,0 +1,15 @@
import { S3Client } from '@aws-sdk/client-s3';
import { FactoryProvider } from '@nestjs/common';
import { Config } from '../../config';
export const S3_SERVICE = Symbol('S3_SERVICE');
export const S3: FactoryProvider<S3Client> = {
provide: S3_SERVICE,
useFactory: (config: Config) => {
const s3 = new S3Client(config.objectStorage.config);
return s3;
},
inject: [Config],
};

View File

@@ -0,0 +1,31 @@
import { PutObjectCommand, S3Client } from '@aws-sdk/client-s3';
import { Inject, Injectable } from '@nestjs/common';
import { Config } from '../../config';
import { FileUpload } from '../../types';
import { FSService } from './fs';
import { S3_SERVICE } from './s3';
@Injectable()
export class StorageService {
constructor(
@Inject(S3_SERVICE) private readonly s3: S3Client,
private readonly fs: FSService,
private readonly config: Config
) {}
async uploadFile(key: string, file: FileUpload) {
if (this.config.objectStorage.enable) {
await this.s3.send(
new PutObjectCommand({
Body: file.createReadStream(),
Bucket: this.config.objectStorage.config.bucket,
Key: key,
})
);
return `https://avatar.affineassets.com/${key}`;
} else {
return this.fs.writeFile(key, file);
}
}
}

View File

@@ -1,8 +1,12 @@
import { Module } from '@nestjs/common';
import { StorageModule } from '../storage';
import { UserResolver } from './resolver';
@Module({
imports: [StorageModule],
providers: [UserResolver],
})
export class UsersModule {}
export { UserType } from './resolver';

View File

@@ -1,7 +1,21 @@
import { Args, Field, ID, ObjectType, Query, Resolver } from '@nestjs/graphql';
import { BadRequestException } from '@nestjs/common';
import {
Args,
Field,
ID,
Mutation,
ObjectType,
Query,
Resolver,
} from '@nestjs/graphql';
import type { User } from '@prisma/client';
// @ts-expect-error graphql-upload is not typed
import GraphQLUpload from 'graphql-upload/GraphQLUpload.mjs';
import { PrismaService } from '../../prisma/service';
import type { FileUpload } from '../../types';
import { Auth } from '../auth/guard';
import { StorageService } from '../storage/storage.service';
@ObjectType()
export class UserType implements Partial<User> {
@@ -21,9 +35,13 @@ export class UserType implements Partial<User> {
createdAt!: Date;
}
@Auth()
@Resolver(() => UserType)
export class UserResolver {
constructor(private readonly prisma: PrismaService) {}
constructor(
private readonly prisma: PrismaService,
private readonly storage: StorageService
) {}
@Query(() => UserType, {
name: 'user',
@@ -34,4 +52,24 @@ export class UserResolver {
where: { email },
});
}
@Mutation(() => UserType, {
name: 'uploadAvatar',
description: 'Upload user avatar',
})
async uploadAvatar(
@Args('id') id: string,
@Args({ name: 'avatar', type: () => GraphQLUpload })
avatar: FileUpload
) {
const user = await this.prisma.user.findUnique({ where: { id } });
if (!user) {
throw new BadRequestException(`User ${id} not found`);
}
const url = await this.storage.uploadFile(`${id}-avatar`, avatar);
return this.prisma.user.update({
where: { id },
data: { avatarUrl: url },
});
}
}

View File

@@ -0,0 +1,27 @@
import { Storage } from '@affine/storage';
import { Controller, Get, NotFoundException, Param, Res } from '@nestjs/common';
import type { Response } from 'express';
@Controller('/api/workspaces')
export class WorkspacesController {
constructor(private readonly storage: Storage) {}
@Get('/:id/blobs/:name')
async blob(
@Param('id') workspaceId: string,
@Param('name') name: string,
@Res() res: Response
) {
const blob = await this.storage.blob(workspaceId, name);
if (!blob) {
throw new NotFoundException('Blob not found');
}
res.setHeader('content-type', blob.contentType);
res.setHeader('last-modified', blob.lastModified);
res.setHeader('content-length', blob.size);
res.send(blob.data);
}
}

View File

@@ -1,10 +1,11 @@
import { Module } from '@nestjs/common';
import { WorkspacesController } from './controller';
import { PermissionService } from './permission';
import { WorkspaceResolver } from './resolver';
@Module({
providers: [WorkspaceResolver, PermissionService],
providers: [WorkspaceResolver, PermissionService, WorkspacesController],
exports: [PermissionService],
})
export class WorkspaceModule {}

Some files were not shown because too many files have changed in this diff Show More