mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-08 18:43:46 +00:00
Compare commits
453 Commits
v0.15.6
...
v0.17.0-be
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bffc294620 | ||
|
|
be4df0f8ac | ||
|
|
64d2b926a2 | ||
|
|
a027cef457 | ||
|
|
7d93fae1dd | ||
|
|
702c03539f | ||
|
|
52cd6102cb | ||
|
|
74cd175d37 | ||
|
|
9cbe416c2c | ||
|
|
f4db4058f8 | ||
|
|
8be67dce82 | ||
|
|
017e89f458 | ||
|
|
d3f50a2e38 | ||
|
|
d0010a1d11 | ||
|
|
10fe79a9a8 | ||
|
|
4bbb94c32d | ||
|
|
5083fe3cfd | ||
|
|
de1a51bf7c | ||
|
|
7621758c76 | ||
|
|
f7816a5b28 | ||
|
|
5c67f98e90 | ||
|
|
d5ecf503c2 | ||
|
|
2a2a969394 | ||
|
|
691bfed185 | ||
|
|
3837c84791 | ||
|
|
059030fbeb | ||
|
|
19a7d1eb92 | ||
|
|
f452414952 | ||
|
|
03c2051926 | ||
|
|
73dd1d3326 | ||
|
|
24acce2eac | ||
|
|
b711b087c5 | ||
|
|
23748c83a1 | ||
|
|
7c3666ad6d | ||
|
|
03a071e4cb | ||
|
|
2bdf8c6ef3 | ||
|
|
c6840c8674 | ||
|
|
5f3dd867ad | ||
|
|
b680c1839b | ||
|
|
1cac2f6ccd | ||
|
|
06552a1120 | ||
|
|
416faba2bc | ||
|
|
93ae724c66 | ||
|
|
989e8830f7 | ||
|
|
0ae5673aaa | ||
|
|
51bc40d2a8 | ||
|
|
2524491bd1 | ||
|
|
53886a7cd3 | ||
|
|
f688c057eb | ||
|
|
eb16c273ee | ||
|
|
01b2339173 | ||
|
|
2799e8cd43 | ||
|
|
fb64bc7e55 | ||
|
|
51f3566bec | ||
|
|
01e6370dd2 | ||
|
|
2ac803c73f | ||
|
|
0d6f468385 | ||
|
|
ec3b97d069 | ||
|
|
7dde509970 | ||
|
|
681f4561fb | ||
|
|
d00f86c3ce | ||
|
|
ae3b13bfb4 | ||
|
|
aad442e73d | ||
|
|
4595df9b0e | ||
|
|
106f332c19 | ||
|
|
98d9295259 | ||
|
|
e7b53641d7 | ||
|
|
41f9149be6 | ||
|
|
935771c8a8 | ||
|
|
52c9da67f0 | ||
|
|
e33aa35f7e | ||
|
|
8b0afd6eeb | ||
|
|
821de0a3bb | ||
|
|
bc306faa2c | ||
|
|
bd8c844e75 | ||
|
|
abbc6aef09 | ||
|
|
3f324a5af9 | ||
|
|
7b9d9a1ca6 | ||
|
|
12d73ee290 | ||
|
|
d819a26bc1 | ||
|
|
a6484018ef | ||
|
|
f9d0a348c4 | ||
|
|
c3ae219992 | ||
|
|
ad110078ac | ||
|
|
bea3d42f40 | ||
|
|
02f0d7aa08 | ||
|
|
197996de31 | ||
|
|
392fef663e | ||
|
|
130e6ab4a3 | ||
|
|
2e37ee0e33 | ||
|
|
e1310b65cd | ||
|
|
87da792c4c | ||
|
|
56f4634c1f | ||
|
|
61e37d8873 | ||
|
|
41d35fdafd | ||
|
|
a992376053 | ||
|
|
d93d39e29d | ||
|
|
d9cedf89e1 | ||
|
|
a802dc4fd6 | ||
|
|
4caf32629a | ||
|
|
697f2c6520 | ||
|
|
df34e2cdf7 | ||
|
|
592997b65b | ||
|
|
69892655a1 | ||
|
|
f99988afa6 | ||
|
|
4ff6fbd052 | ||
|
|
f544e69d02 | ||
|
|
adf314d594 | ||
|
|
7ae141bd9e | ||
|
|
f8e6f1f2b5 | ||
|
|
f1bb1fc9b8 | ||
|
|
5e8683c9be | ||
|
|
3ce92f2abc | ||
|
|
db76780bc9 | ||
|
|
f37051dc87 | ||
|
|
b96ad57568 | ||
|
|
4ec45a247e | ||
|
|
dde45748d9 | ||
|
|
06685683ae | ||
|
|
65a87196d5 | ||
|
|
09ab922572 | ||
|
|
c53adbc7e8 | ||
|
|
03b2cda845 | ||
|
|
3e810eb043 | ||
|
|
b8f07ce3fc | ||
|
|
10a066a52a | ||
|
|
6557b5d4b6 | ||
|
|
67c9c7bcc5 | ||
|
|
e44a9483c2 | ||
|
|
ecf50a4dad | ||
|
|
0209e3fa76 | ||
|
|
9ea4aaaf37 | ||
|
|
611925fa10 | ||
|
|
bc86f0a672 | ||
|
|
3c37006657 | ||
|
|
dbcfd24ed8 | ||
|
|
14066965fa | ||
|
|
01c9d1758e | ||
|
|
130dc2bd8b | ||
|
|
442a843257 | ||
|
|
0b3c7d1407 | ||
|
|
49c8a25fce | ||
|
|
920afa7bf1 | ||
|
|
b57388fd85 | ||
|
|
5e555b3807 | ||
|
|
3b727ef40a | ||
|
|
4bc4a58a30 | ||
|
|
2f02f0da2b | ||
|
|
592150638e | ||
|
|
20174b9cbe | ||
|
|
b333cde336 | ||
|
|
03c4d56a91 | ||
|
|
2e2a3af967 | ||
|
|
5acf1b5309 | ||
|
|
3db95bafa2 | ||
|
|
cf086e4018 | ||
|
|
483fcfb2c7 | ||
|
|
b6a3697793 | ||
|
|
23b0db36b9 | ||
|
|
182b2fd62d | ||
|
|
a43c34feec | ||
|
|
c9a1a8c3b2 | ||
|
|
a49b92e4c8 | ||
|
|
cfe0677b84 | ||
|
|
b6f46e01c5 | ||
|
|
e20bdbf925 | ||
|
|
6f9f579e5d | ||
|
|
682a01e441 | ||
|
|
6b0c398ae5 | ||
|
|
152815c175 | ||
|
|
9d42db56ca | ||
|
|
f3930a9262 | ||
|
|
10bea3922e | ||
|
|
4a89b1a5dd | ||
|
|
4916eea24f | ||
|
|
cfac3ebf1f | ||
|
|
e0a91f63d3 | ||
|
|
23c73243ab | ||
|
|
f324fa4719 | ||
|
|
c822594882 | ||
|
|
83716c2fd9 | ||
|
|
620d20710a | ||
|
|
cfc367efe7 | ||
|
|
69c507fded | ||
|
|
b57ce4646f | ||
|
|
dba024d561 | ||
|
|
e26ba48a45 | ||
|
|
e53dde7944 | ||
|
|
624f3514fc | ||
|
|
ba8958f39b | ||
|
|
9af0e53ae2 | ||
|
|
5a2f93f035 | ||
|
|
9192ac4420 | ||
|
|
57449c1530 | ||
|
|
89537e6892 | ||
|
|
50948318e0 | ||
|
|
0504d0b0ff | ||
|
|
339c39c1ec | ||
|
|
cd3924b8fc | ||
|
|
42b5ef7a8b | ||
|
|
ad42418089 | ||
|
|
99e70c91e8 | ||
|
|
05ac3dbdcb | ||
|
|
994b539507 | ||
|
|
d5edadabe6 | ||
|
|
05247bb24e | ||
|
|
f69f026ac3 | ||
|
|
015247345c | ||
|
|
0ba516866f | ||
|
|
7afba6b8b5 | ||
|
|
08cc15a55c | ||
|
|
24c34eb3fc | ||
|
|
ba5ba71f35 | ||
|
|
d4065fee78 | ||
|
|
d86f7f41dc | ||
|
|
171a974904 | ||
|
|
0ec1995add | ||
|
|
6dea831d8a | ||
|
|
b214003968 | ||
|
|
bf6e36de37 | ||
|
|
7f7c0519a0 | ||
|
|
83a9beed83 | ||
|
|
1db6b9fe3b | ||
|
|
ccf225c8f9 | ||
|
|
dc519348c5 | ||
|
|
10f4eaf2bd | ||
|
|
d365494fef | ||
|
|
69c64b2fc2 | ||
|
|
dc41ffbe2f | ||
|
|
9037e6695e | ||
|
|
6228b27271 | ||
|
|
75e02bb088 | ||
|
|
4ac9bd7790 | ||
|
|
a6169ab26a | ||
|
|
d82f4b5461 | ||
|
|
a579cc7716 | ||
|
|
b993ab04df | ||
|
|
eef9afd3ed | ||
|
|
06d5d9719c | ||
|
|
f8e51112aa | ||
|
|
e8d5692062 | ||
|
|
d2b0ee40a8 | ||
|
|
3ad5170b71 | ||
|
|
8209e84842 | ||
|
|
fc19180451 | ||
|
|
009b5353b1 | ||
|
|
4beedaa22c | ||
|
|
26fd9a4a1c | ||
|
|
b2c00a2618 | ||
|
|
85637156f6 | ||
|
|
c006f3f0af | ||
|
|
7efc87b6d3 | ||
|
|
450106ea54 | ||
|
|
ffc12176c9 | ||
|
|
3d4fbcaebc | ||
|
|
8db37e9bbf | ||
|
|
7fca13076a | ||
|
|
fd6e198295 | ||
|
|
b71945c29f | ||
|
|
6ef5675be1 | ||
|
|
c7aabd3a8d | ||
|
|
03fd23de39 | ||
|
|
f2eafc374c | ||
|
|
83244f0201 | ||
|
|
f62d30527b | ||
|
|
025abc6169 | ||
|
|
58b43582e1 | ||
|
|
ff68efb206 | ||
|
|
c8f4766ceb | ||
|
|
d968cfe425 | ||
|
|
2f0e39b702 | ||
|
|
4e03edba44 | ||
|
|
00ee2a8852 | ||
|
|
75a308ac79 | ||
|
|
f35dc744dd | ||
|
|
ae9381c36d | ||
|
|
e1087a0c7b | ||
|
|
eb01e76426 | ||
|
|
67dce9c97a | ||
|
|
7edd78884e | ||
|
|
74025fc85e | ||
|
|
b5e543c406 | ||
|
|
352ceca94b | ||
|
|
f3855c57b4 | ||
|
|
f6279ee47f | ||
|
|
aee24ffb31 | ||
|
|
96fed60655 | ||
|
|
dd74cfea14 | ||
|
|
c2cf331ff7 | ||
|
|
744cc542de | ||
|
|
601f5fef95 | ||
|
|
14669b9ced | ||
|
|
5872b884a5 | ||
|
|
d0f1bb24fd | ||
|
|
7373e174db | ||
|
|
cc09085dc2 | ||
|
|
f93743dae6 | ||
|
|
de7933c1dd | ||
|
|
ca7c221d23 | ||
|
|
873e6faef2 | ||
|
|
5938d8b259 | ||
|
|
cd4e462d8c | ||
|
|
a03831f2a2 | ||
|
|
0d7de67e01 | ||
|
|
0acc1bd9e8 | ||
|
|
e6e9f7d4c7 | ||
|
|
9f57ed5e84 | ||
|
|
9cc976ce2e | ||
|
|
6d253c0600 | ||
|
|
73a6723d15 | ||
|
|
5050418c1a | ||
|
|
5ab1210c9c | ||
|
|
51848ff6c3 | ||
|
|
5f52547d9e | ||
|
|
561fa46232 | ||
|
|
7a66212568 | ||
|
|
51ca7657d8 | ||
|
|
bd31c8388c | ||
|
|
545bd032a7 | ||
|
|
e3878ae8bf | ||
|
|
c0c5c83dad | ||
|
|
cbdcfdc2d8 | ||
|
|
741ff2379e | ||
|
|
9307acf0de | ||
|
|
0468355593 | ||
|
|
249f3471c9 | ||
|
|
3d855647c7 | ||
|
|
10deed94e3 | ||
|
|
f108b95704 | ||
|
|
ad26102815 | ||
|
|
05448f50af | ||
|
|
e54be7dc02 | ||
|
|
94c5effdd5 | ||
|
|
62fc7e2f4d | ||
|
|
f7798a00c1 | ||
|
|
854718db0e | ||
|
|
2cfe9e8b9e | ||
|
|
bfff10e25e | ||
|
|
4719ffadc6 | ||
|
|
07409b8a91 | ||
|
|
e60b2d64e5 | ||
|
|
8816d2a639 | ||
|
|
553fbed60f | ||
|
|
bb767a6cdc | ||
|
|
33fc00f8c7 | ||
|
|
3a0241340c | ||
|
|
2093685385 | ||
|
|
10e78d617e | ||
|
|
49529b7e63 | ||
|
|
48e17fad02 | ||
|
|
4ec89ebd69 | ||
|
|
c6d4985cba | ||
|
|
280e24934a | ||
|
|
6b8f99c013 | ||
|
|
812fdd27b5 | ||
|
|
e1e1b29afb | ||
|
|
52a95af828 | ||
|
|
ede576061d | ||
|
|
083123cdfb | ||
|
|
12a2f929f8 | ||
|
|
c1b26473a9 | ||
|
|
c7217ed443 | ||
|
|
cd823fe118 | ||
|
|
b343f975fb | ||
|
|
ab92efcfc0 | ||
|
|
ea7066d02c | ||
|
|
d80c80ecdd | ||
|
|
482b5da02f | ||
|
|
fcf0ecbaa2 | ||
|
|
67248316bd | ||
|
|
dd47c14c65 | ||
|
|
63e8729da4 | ||
|
|
d769c8bb87 | ||
|
|
f052547b78 | ||
|
|
4a2d400087 | ||
|
|
157cc97a65 | ||
|
|
1efc1d0f5b | ||
|
|
622715d2f3 | ||
|
|
1b4d65fd64 | ||
|
|
a0cbf05da8 | ||
|
|
0472ffe569 | ||
|
|
c5cf8480fc | ||
|
|
ab11f09b83 | ||
|
|
5c62a2b2f5 | ||
|
|
b9c0119d2c | ||
|
|
214f5fa94d | ||
|
|
e6f0847ec3 | ||
|
|
94a55cde62 | ||
|
|
1575472a3f | ||
|
|
6bc5337307 | ||
|
|
3eb09cde5e | ||
|
|
5207e7abfc | ||
|
|
fcc42104fa | ||
|
|
c63d007571 | ||
|
|
2a2a19fec7 | ||
|
|
1306a3be61 | ||
|
|
3f0e4c04d7 | ||
|
|
54da85ec62 | ||
|
|
b26b0c3a22 | ||
|
|
470262d400 | ||
|
|
cb0d91facd | ||
|
|
0617061c5b | ||
|
|
8646221ee8 | ||
|
|
22c36102b9 | ||
|
|
a714961b20 | ||
|
|
549e7befed | ||
|
|
11a2dc7d7f | ||
|
|
662a3d4b76 | ||
|
|
dd6901fe15 | ||
|
|
2b42f84815 | ||
|
|
8f60626291 | ||
|
|
1871c15cd0 | ||
|
|
20c4224e2d | ||
|
|
25b74467ce | ||
|
|
9d446469f8 | ||
|
|
98281a6394 | ||
|
|
6ca7c41861 | ||
|
|
b1380ce81f | ||
|
|
091f5eec01 | ||
|
|
f89945e730 | ||
|
|
0dbed968a0 | ||
|
|
b0ad36425d | ||
|
|
dddbfe6473 | ||
|
|
14fbeb7879 | ||
|
|
dc7eeedb24 | ||
|
|
d7cc546f58 | ||
|
|
386d766597 | ||
|
|
7d7399a9eb | ||
|
|
55db9f9719 | ||
|
|
e3c3d1ac69 | ||
|
|
bd0279730c | ||
|
|
988f3a39f8 | ||
|
|
f65380f847 | ||
|
|
a62b7f0024 | ||
|
|
4512a1a91d | ||
|
|
af7d44164c | ||
|
|
6dbcb62da7 | ||
|
|
239de4c283 | ||
|
|
544236f1a0 | ||
|
|
145872b9f4 | ||
|
|
90c00b6db9 | ||
|
|
585003640f | ||
|
|
9440dc8dd5 | ||
|
|
9fe77baf05 | ||
|
|
133888d760 | ||
|
|
9160469a18 | ||
|
|
71ddb1f841 | ||
|
|
4f718cffbf | ||
|
|
b9d84fe007 | ||
|
|
ad970837ec | ||
|
|
d168128174 | ||
|
|
2919d4912c | ||
|
|
dcb9d75db7 |
@@ -34,8 +34,8 @@ const createPattern = packageName => [
|
||||
{
|
||||
group: ['@affine/env/constant'],
|
||||
message:
|
||||
'Do not import from @affine/env/constant. Use `environment.isDesktop` instead',
|
||||
importNames: ['isDesktop'],
|
||||
'Do not import from @affine/env/constant. Use `environment.isElectron` instead',
|
||||
importNames: ['isElectron'],
|
||||
},
|
||||
];
|
||||
|
||||
@@ -247,7 +247,8 @@ const config = {
|
||||
'react-hooks/exhaustive-deps': [
|
||||
'warn',
|
||||
{
|
||||
additionalHooks: '(useAsyncCallback|useDraggable|useDropTarget)',
|
||||
additionalHooks:
|
||||
'(useAsyncCallback|useCatchEventCallback|useDraggable|useDropTarget|useRefEffect)',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
36
.github/actions/cluster-auth/action.yml
vendored
Normal file
36
.github/actions/cluster-auth/action.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
name: 'Auth to Cluster'
|
||||
description: 'Auth to the GCP Cluster'
|
||||
inputs:
|
||||
gcp-project-number:
|
||||
description: 'GCP project number'
|
||||
required: true
|
||||
gcp-project-id:
|
||||
description: 'GCP project id'
|
||||
required: true
|
||||
service-account:
|
||||
description: 'Service account'
|
||||
cluster-name:
|
||||
description: 'Cluster name'
|
||||
cluster-location:
|
||||
description: 'Cluster location'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- id: auth
|
||||
uses: google-github-actions/auth@v2
|
||||
with:
|
||||
workload_identity_provider: 'projects/${{ inputs.gcp-project-number }}/locations/global/workloadIdentityPools/github-actions/providers/github-actions-helm-deploy'
|
||||
service_account: '${{ inputs.service-account }}'
|
||||
token_format: 'access_token'
|
||||
project_id: '${{ inputs.gcp-project-id }}'
|
||||
|
||||
- name: 'Setup gcloud cli'
|
||||
uses: 'google-github-actions/setup-gcloud@v2'
|
||||
with:
|
||||
install_components: 'gke-gcloud-auth-plugin'
|
||||
|
||||
- id: get-gke-credentials
|
||||
shell: bash
|
||||
run: |
|
||||
gcloud container clusters get-credentials ${{ inputs.cluster-name }} --region ${{ inputs.cluster-location }} --project ${{ inputs.gcp-project-id }}
|
||||
24
.github/actions/deploy/action.yml
vendored
24
.github/actions/deploy/action.yml
vendored
@@ -24,24 +24,14 @@ runs:
|
||||
shell: bash
|
||||
run: |
|
||||
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
|
||||
- uses: azure/setup-helm@v4
|
||||
- id: auth
|
||||
uses: google-github-actions/auth@v2
|
||||
- name: 'Auth to cluster'
|
||||
uses: './.github/actions/cluster-auth'
|
||||
with:
|
||||
workload_identity_provider: 'projects/${{ inputs.gcp-project-number }}/locations/global/workloadIdentityPools/github-actions/providers/github-actions-helm-deploy'
|
||||
service_account: '${{ inputs.service-account }}'
|
||||
token_format: 'access_token'
|
||||
project_id: '${{ inputs.gcp-project-id }}'
|
||||
|
||||
- name: 'Setup gcloud cli'
|
||||
uses: 'google-github-actions/setup-gcloud@v2'
|
||||
with:
|
||||
install_components: 'gke-gcloud-auth-plugin'
|
||||
|
||||
- id: get-gke-credentials
|
||||
shell: bash
|
||||
run: |
|
||||
gcloud container clusters get-credentials ${{ inputs.cluster-name }} --region ${{ inputs.cluster-location }} --project ${{ inputs.gcp-project-id }}
|
||||
gcp-project-number: '${{ inputs.gcp-project-number }}'
|
||||
gcp-project-id: '${{ inputs.gcp-project-id }}'
|
||||
service-account: '${{ inputs.service-account }}'
|
||||
cluster-name: '${{ inputs.cluster-name }}'
|
||||
cluster-location: '${{ inputs.cluster-location }}'
|
||||
|
||||
- name: Deploy
|
||||
shell: bash
|
||||
|
||||
2
.github/actions/setup-node/action.yml
vendored
2
.github/actions/setup-node/action.yml
vendored
@@ -156,7 +156,7 @@ runs:
|
||||
- name: Install Playwright's dependencies
|
||||
shell: bash
|
||||
if: inputs.playwright-install == 'true'
|
||||
run: yarn playwright install --with-deps chromium
|
||||
run: yarn playwright install --with-deps chromium webkit
|
||||
env:
|
||||
PLAYWRIGHT_BROWSERS_PATH: ${{ github.workspace }}/node_modules/.cache/ms-playwright
|
||||
|
||||
|
||||
2
.github/actions/setup-version/action.yml
vendored
2
.github/actions/setup-version/action.yml
vendored
@@ -17,7 +17,7 @@ runs:
|
||||
PACKAGE_VERSION=$(node -p "require('./package.json').version")
|
||||
TIME_VERSION=$(date +%Y%m%d%H%M)
|
||||
GIT_SHORT_HASH=$(git rev-parse --short HEAD)
|
||||
APP_VERSION=$PACKAGE_VERSION-nightly-$TIME_VERSION-$GIT_SHORT_HASH
|
||||
APP_VERSION=$PACKAGE_VERSION-nightly-$GIT_SHORT_HASH
|
||||
fi
|
||||
echo $APP_VERSION
|
||||
echo "APP_VERSION=$APP_VERSION" >> "$GITHUB_OUTPUT"
|
||||
|
||||
3
.github/deployment/front/Dockerfile
vendored
3
.github/deployment/front/Dockerfile
vendored
@@ -1,7 +1,8 @@
|
||||
FROM openresty/openresty:1.25.3.1-0-buster
|
||||
FROM openresty/openresty:1.25.3.2-0-buster
|
||||
WORKDIR /app
|
||||
COPY ./packages/frontend/web/dist ./dist
|
||||
COPY ./packages/frontend/admin/dist ./admin
|
||||
COPY ./packages/frontend/mobile/dist ./mobile
|
||||
COPY ./.github/deployment/front/nginx.conf /usr/local/openresty/nginx/conf/nginx.conf
|
||||
COPY ./.github/deployment/front/affine.nginx.conf /etc/nginx/conf.d/affine.nginx.conf
|
||||
|
||||
|
||||
20
.github/deployment/front/affine.nginx.conf
vendored
20
.github/deployment/front/affine.nginx.conf
vendored
@@ -11,10 +11,28 @@ server {
|
||||
try_files $uri $uri/ =404;
|
||||
}
|
||||
|
||||
set $app_root_path /app/dist/;
|
||||
set $mobile_root /app/dist/;
|
||||
set_by_lua $affine_env 'return os.getenv("AFFINE_ENV")';
|
||||
|
||||
if ($affine_env = "dev") {
|
||||
set $mobile_root /app/mobile/;
|
||||
}
|
||||
|
||||
# https://gist.github.com/mariusom/6683dc52b1cad1a1f372e908bdb209d0
|
||||
if ($http_user_agent ~* "(android|bb\d+|meego).+mobile|avantgo|bada\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\.(browser|link)|vodafone|wap|windows ce|xda|xiino") {
|
||||
set $app_root_path $mobile_root;
|
||||
}
|
||||
|
||||
if ($http_user_agent ~* "^(1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s\-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|\-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw\-(n|u)|c55\/|capi|ccwa|cdm\-|cell|chtm|cldc|cmd\-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc\-s|devi|dica|dmob|do(c|p)o|ds(12|\-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(\-|_)|g1 u|g560|gene|gf\-5|g\-mo|go(\.w|od)|gr(ad|un)|haie|hcit|hd\-(m|p|t)|hei\-|hi(pt|ta)|hp( i|ip)|hs\-c|ht(c(\-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i\-(20|go|ma)|i230|iac( |\-|\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\/)|klon|kpt |kwc\-|kyo(c|k)|le(no|xi)|lg( g|\/(k|l|u)|50|54|\-[a-w])|libw|lynx|m1\-w|m3ga|m50\/|ma(te|ui|xo)|mc(01|21|ca)|m\-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(\-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)\-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|\-([1-8]|c))|phil|pire|pl(ay|uc)|pn\-2|po(ck|rt|se)|prox|psio|pt\-g|qa\-a|qc(07|12|21|32|60|\-[2-7]|i\-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h\-|oo|p\-)|sdk\/|se(c(\-|0|1)|47|mc|nd|ri)|sgh\-|shar|sie(\-|m)|sk\-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h\-|v\-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl\-|tdg\-|tel(i|m)|tim\-|t\-mo|to(pl|sh)|ts(70|m\-|m3|m5)|tx\-9|up(\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|\-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(\-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas\-|your|zeto|zte\-)") {
|
||||
set $app_root_path $mobile_root;
|
||||
}
|
||||
|
||||
location / {
|
||||
root /app/dist/;
|
||||
root $app_root_path;
|
||||
index index.html;
|
||||
try_files $uri $uri/ /index.html;
|
||||
add_header Cache-Control "private, no-cache, no-store, max-age=0, must-revalidate";
|
||||
}
|
||||
|
||||
error_page 404 /404.html;
|
||||
|
||||
17
.github/deployment/front/nginx.conf
vendored
17
.github/deployment/front/nginx.conf
vendored
@@ -1,14 +1,15 @@
|
||||
worker_processes 4;
|
||||
worker_processes 4;
|
||||
error_log /var/log/nginx/error.log warn;
|
||||
pcre_jit on;
|
||||
env AFFINE_ENV;
|
||||
events {
|
||||
worker_connections 1024;
|
||||
worker_connections 1024;
|
||||
}
|
||||
http {
|
||||
include mime.types;
|
||||
log_format main '$remote_addr [$time_local] "$request" '
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
access_log /var/log/nginx/access.log main;
|
||||
include /etc/nginx/conf.d/*.conf;
|
||||
include mime.types;
|
||||
log_format main '$remote_addr [$time_local] "$request" '
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
access_log /var/log/nginx/access.log main;
|
||||
include /etc/nginx/conf.d/*.conf;
|
||||
}
|
||||
|
||||
2
.github/helm/affine/Chart.yaml
vendored
2
.github/helm/affine/Chart.yaml
vendored
@@ -3,4 +3,4 @@ name: affine
|
||||
description: AFFiNE cloud chart
|
||||
type: application
|
||||
version: 0.0.0
|
||||
appVersion: "0.15.0"
|
||||
appVersion: "0.16.0"
|
||||
|
||||
@@ -3,7 +3,7 @@ name: graphql
|
||||
description: AFFiNE GraphQL server
|
||||
type: application
|
||||
version: 0.0.0
|
||||
appVersion: "0.15.0"
|
||||
appVersion: "0.16.0"
|
||||
dependencies:
|
||||
- name: gcloud-sql-proxy
|
||||
version: 0.0.0
|
||||
|
||||
@@ -77,8 +77,6 @@ spec:
|
||||
value: "{{ .Values.app.https }}"
|
||||
- name: ENABLE_R2_OBJECT_STORAGE
|
||||
value: "{{ .Values.app.objectStorage.r2.enabled }}"
|
||||
- name: ENABLE_CAPTCHA
|
||||
value: "{{ .Values.app.captcha.enabled }}"
|
||||
- name: FEATURES_EARLY_ACCESS_PREVIEW
|
||||
value: "{{ .Values.app.features.earlyAccessPreview }}"
|
||||
- name: FEATURES_SYNC_CLIENT_VERSION_CHECK
|
||||
@@ -204,12 +202,12 @@ spec:
|
||||
protocol: TCP
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
path: /info
|
||||
port: http
|
||||
initialDelaySeconds: {{ .Values.probe.initialDelaySeconds }}
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
path: /info
|
||||
port: http
|
||||
initialDelaySeconds: {{ .Values.probe.initialDelaySeconds }}
|
||||
resources:
|
||||
|
||||
@@ -4,6 +4,10 @@ metadata:
|
||||
name: {{ include "graphql.fullname" . }}
|
||||
labels:
|
||||
{{- include "graphql.labels" . | nindent 4 }}
|
||||
{{- with .Values.service.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
type: {{ .Values.service.type }}
|
||||
ports:
|
||||
|
||||
2
.github/helm/affine/charts/sync/Chart.yaml
vendored
2
.github/helm/affine/charts/sync/Chart.yaml
vendored
@@ -3,7 +3,7 @@ name: sync
|
||||
description: AFFiNE Sync Server
|
||||
type: application
|
||||
version: 0.0.0
|
||||
appVersion: "0.15.0"
|
||||
appVersion: "0.16.0"
|
||||
dependencies:
|
||||
- name: gcloud-sql-proxy
|
||||
version: 0.0.0
|
||||
|
||||
@@ -27,6 +27,9 @@ spec:
|
||||
- name: {{ .Chart.Name }}
|
||||
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
env:
|
||||
- name: AFFINE_ENV
|
||||
value: "{{ .Release.Namespace }}"
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: {{ .Values.service.port }}
|
||||
|
||||
7
.github/helm/affine/templates/ingress.yaml
vendored
7
.github/helm/affine/templates/ingress.yaml
vendored
@@ -60,13 +60,6 @@ spec:
|
||||
name: affine-graphql
|
||||
port:
|
||||
number: {{ .Values.graphql.service.port }}
|
||||
- path: /oauth
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: affine-graphql
|
||||
port:
|
||||
number: {{ .Values.graphql.service.port }}
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
|
||||
4
.github/helm/affine/values.yaml
vendored
4
.github/helm/affine/values.yaml
vendored
@@ -36,14 +36,14 @@ graphql:
|
||||
type: ClusterIP
|
||||
port: 3000
|
||||
annotations:
|
||||
cloud.google.com/backend-config: '{"default": "affine-backendconfig"}'
|
||||
cloud.google.com/backend-config: '{"default": "affine-api-backendconfig"}'
|
||||
|
||||
sync:
|
||||
service:
|
||||
type: ClusterIP
|
||||
port: 3010
|
||||
annotations:
|
||||
cloud.google.com/backend-config: '{"default": "affine-backendconfig"}'
|
||||
cloud.google.com/backend-config: '{"default": "affine-api-backendconfig"}'
|
||||
|
||||
web:
|
||||
service:
|
||||
|
||||
10
.github/helm/separate-config/backend-config.yaml
vendored
Normal file
10
.github/helm/separate-config/backend-config.yaml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
apiVersion: cloud.google.com/v1
|
||||
kind: BackendConfig
|
||||
metadata:
|
||||
name: "affine-api-backendconfig"
|
||||
spec:
|
||||
healthCheck:
|
||||
timeoutSec: 1
|
||||
type: HTTP
|
||||
requestPath: /info
|
||||
|
||||
3
.github/renovate.json
vendored
3
.github/renovate.json
vendored
@@ -27,7 +27,8 @@
|
||||
"matchPackagePatterns": ["^@blocksuite"],
|
||||
"excludePackageNames": ["@blocksuite/icons"],
|
||||
"rangeStrategy": "replace",
|
||||
"followTag": "canary"
|
||||
"followTag": "canary",
|
||||
"enabled": false
|
||||
},
|
||||
{
|
||||
"groupName": "all non-major dependencies",
|
||||
|
||||
35
.github/workflows/build-test.yml
vendored
35
.github/workflows/build-test.yml
vendored
@@ -143,6 +143,36 @@ jobs:
|
||||
path: ./test-results
|
||||
if-no-files-found: ignore
|
||||
|
||||
e2e-mobile-test:
|
||||
name: E2E Mobile Test
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DISTRIBUTION: mobile
|
||||
IN_CI_TEST: true
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
shard: [1, 2, 3, 4, 5]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
with:
|
||||
playwright-install: true
|
||||
electron-install: false
|
||||
full-cache: true
|
||||
|
||||
- name: Run playwright tests
|
||||
run: yarn workspace @affine-test/affine-mobile e2e --forbid-only --shard=${{ matrix.shard }}/${{ strategy.job-total }}
|
||||
|
||||
- name: Upload test results
|
||||
if: ${{ failure() }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-results-e2e-mobile-${{ matrix.shard }}
|
||||
path: ./test-results
|
||||
if-no-files-found: ignore
|
||||
|
||||
e2e-migration-test:
|
||||
name: E2E Migration Test
|
||||
runs-on: ubuntu-latest
|
||||
@@ -180,7 +210,7 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
with:
|
||||
electron-install: false
|
||||
electron-install: true
|
||||
full-cache: true
|
||||
|
||||
- name: Download affine.linux-x64-gnu.node
|
||||
@@ -443,6 +473,8 @@ jobs:
|
||||
${{ matrix.tests.script }}
|
||||
env:
|
||||
DEV_SERVER_URL: http://localhost:8080
|
||||
COPILOT_OPENAI_API_KEY: 1
|
||||
COPILOT_FAL_API_KEY: 1
|
||||
|
||||
- name: Upload test results
|
||||
if: ${{ failure() }}
|
||||
@@ -569,6 +601,7 @@ jobs:
|
||||
- lint
|
||||
- check-yarn-binary
|
||||
- e2e-test
|
||||
- e2e-mobile-test
|
||||
- e2e-migration-test
|
||||
- unit-test
|
||||
- server-test
|
||||
|
||||
177
.github/workflows/deploy.yml
vendored
177
.github/workflows/deploy.yml
vendored
@@ -21,6 +21,47 @@ permissions:
|
||||
packages: 'write'
|
||||
|
||||
jobs:
|
||||
output-prev-version:
|
||||
name: Output previous version
|
||||
runs-on: ubuntu-latest
|
||||
environment: ${{ github.event.inputs.flavor }}
|
||||
outputs:
|
||||
prev: ${{ steps.print.outputs.version }}
|
||||
namespace: ${{ steps.print.outputs.namespace }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Auth to Cluster
|
||||
uses: './.github/actions/cluster-auth'
|
||||
with:
|
||||
gcp-project-number: ${{ secrets.GCP_PROJECT_NUMBER }}
|
||||
gcp-project-id: ${{ secrets.GCP_PROJECT_ID }}
|
||||
service-account: ${{ secrets.GCP_HELM_DEPLOY_SERVICE_ACCOUNT }}
|
||||
cluster-name: ${{ secrets.GCP_CLUSTER_NAME }}
|
||||
cluster-location: ${{ secrets.GCP_CLUSTER_LOCATION }}
|
||||
- name: Output previous version
|
||||
id: print
|
||||
run: |
|
||||
namespace=""
|
||||
if [ "${{ github.event.inputs.flavor }}" = "canary" ]; then
|
||||
namespace="dev"
|
||||
elif [ "${{ github.event.inputs.flavor }}" = "beta" ]; then
|
||||
namespace="beta"
|
||||
elif [ "${{ github.event.inputs.flavor }}" = "stable" ]; then
|
||||
namespace="production"
|
||||
else
|
||||
echo "Invalid flavor: ${{ github.event.inputs.flavor }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Namespace set to: $namespace"
|
||||
|
||||
# Get the previous version from the deployment
|
||||
prev_version=$(kubectl get deployment -n $namespace affine-graphql -o=jsonpath='{.spec.template.spec.containers[0].image}' | awk -F '-' '{print $3}')
|
||||
|
||||
echo "Previous version: $prev_version"
|
||||
echo "version=$prev_version" >> $GITHUB_OUTPUT
|
||||
echo "namesapce=$namespace" >> $GITHUB_OUTPUT
|
||||
|
||||
build-server-image:
|
||||
name: Build Server Image
|
||||
uses: ./.github/workflows/build-server-image.yml
|
||||
@@ -48,6 +89,7 @@ jobs:
|
||||
CAPTCHA_SITE_KEY: ${{ secrets.CAPTCHA_SITE_KEY }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: 'affine-web'
|
||||
SENTRY_RELEASE: ${{ steps.version.outputs.APP_VERSION }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||
PERFSEE_TOKEN: ${{ secrets.PERFSEE_TOKEN }}
|
||||
@@ -70,7 +112,7 @@ jobs:
|
||||
uses: ./.github/actions/setup-version
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- name: Build Core
|
||||
- name: Build Admin
|
||||
run: yarn nx build @affine/admin --skip-nx-cache
|
||||
env:
|
||||
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
|
||||
@@ -91,12 +133,45 @@ jobs:
|
||||
path: ./packages/frontend/admin/dist
|
||||
if-no-files-found: error
|
||||
|
||||
build-mobile:
|
||||
name: Build @affine/mobile
|
||||
runs-on: ubuntu-latest
|
||||
environment: ${{ github.event.inputs.flavor }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Version
|
||||
id: version
|
||||
uses: ./.github/actions/setup-version
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- name: Build Mobile
|
||||
run: yarn nx build @affine/mobile --skip-nx-cache
|
||||
env:
|
||||
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
|
||||
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
|
||||
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
|
||||
BUILD_TYPE: ${{ github.event.inputs.flavor }}
|
||||
CAPTCHA_SITE_KEY: ${{ secrets.CAPTCHA_SITE_KEY }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: 'affine-mobile'
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||
PERFSEE_TOKEN: ${{ secrets.PERFSEE_TOKEN }}
|
||||
MIXPANEL_TOKEN: ${{ secrets.MIXPANEL_TOKEN }}
|
||||
- name: Upload mobile artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: mobile
|
||||
path: ./packages/frontend/mobile/dist
|
||||
if-no-files-found: error
|
||||
|
||||
build-frontend-image:
|
||||
name: Build Frontend Image
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- build-web
|
||||
- build-admin
|
||||
- build-mobile
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Download web artifact
|
||||
@@ -109,6 +184,11 @@ jobs:
|
||||
with:
|
||||
name: admin
|
||||
path: ./packages/frontend/admin/dist
|
||||
- name: Download mobile artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: mobile
|
||||
path: ./packages/frontend/mobile/dist
|
||||
- name: Setup env
|
||||
run: |
|
||||
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
|
||||
@@ -192,3 +272,98 @@ jobs:
|
||||
STRIPE_API_KEY: ${{ secrets.STRIPE_API_KEY }}
|
||||
STRIPE_WEBHOOK_KEY: ${{ secrets.STRIPE_WEBHOOK_KEY }}
|
||||
STATIC_IP_NAME: ${{ secrets.STATIC_IP_NAME }}
|
||||
|
||||
deploy-done:
|
||||
needs:
|
||||
- output-prev-version
|
||||
- build-web
|
||||
- build-admin
|
||||
- build-mobile
|
||||
- build-frontend-image
|
||||
- build-server-image
|
||||
- deploy
|
||||
if: always()
|
||||
runs-on: ubuntu-latest
|
||||
name: Post deploy message
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: toeverything/blocksuite
|
||||
path: blocksuite
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
with:
|
||||
extra-flags: 'workspaces focus @affine/changelog'
|
||||
electron-install: false
|
||||
- name: Output deployed info
|
||||
if: ${{ always() && !contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') }}
|
||||
id: set_info
|
||||
run: |
|
||||
if [ "${{ github.event.inputs.flavor }}" = "canary" ]; then
|
||||
echo "deployed_url=https://affine.fail" >> $GITHUB_OUTPUT
|
||||
elif [ "${{ github.event.inputs.flavor }}" = "beta" ]; then
|
||||
echo "deployed_url=https://insider.affine.pro" >> $GITHUB_OUTPUT
|
||||
elif [ "${{ github.event.inputs.flavor }}" = "stable" ]; then
|
||||
echo "deployed_url=https://app.affine.pro" >> $GITHUB_OUTPUT
|
||||
else
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
|
||||
- name: Post Success event to a Slack channel
|
||||
if: ${{ always() && !contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') }}
|
||||
run: node ./tools/changelog/index.js
|
||||
env:
|
||||
CHANNEL_ID: ${{ secrets.RELEASE_SLACK_CHNNEL_ID }}
|
||||
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
DEPLOYED_URL: ${{ steps.set_info.outputs.deployed_url }}
|
||||
PREV_VERSION: ${{ needs.output-prev-version.outputs.prev }}
|
||||
NAMESPACE: ${{ needs.output-prev-version.outputs.namespace }}
|
||||
DEPLOYMENT: 'SERVER'
|
||||
FLAVOR: ${{ github.event.inputs.flavor }}
|
||||
BLOCKSUITE_REPO_PATH: ${{ github.workspace }}/blocksuite
|
||||
- name: Post Failed event to a Slack channel
|
||||
id: failed-slack
|
||||
uses: slackapi/slack-github-action@v1.27.0
|
||||
if: ${{ always() && contains(needs.*.result, 'failure') }}
|
||||
with:
|
||||
channel-id: ${{ secrets.RELEASE_SLACK_CHNNEL_ID }}
|
||||
payload: |
|
||||
{
|
||||
"blocks": [
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"text": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Backend deploy failed `${{ github.event.inputs.flavor }}`>",
|
||||
"type": "mrkdwn"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
env:
|
||||
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
- name: Post Cancel event to a Slack channel
|
||||
id: cancel-slack
|
||||
uses: slackapi/slack-github-action@v1.27.0
|
||||
if: ${{ always() && contains(needs.*.result, 'cancelled') && !contains(needs.*.result, 'failure') }}
|
||||
with:
|
||||
channel-id: ${{ secrets.RELEASE_SLACK_CHNNEL_ID }}
|
||||
payload: |
|
||||
{
|
||||
"blocks": [
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"text": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Backend deploy cancelled `${{ github.event.inputs.flavor }}`>",
|
||||
"type": "mrkdwn"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
env:
|
||||
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
|
||||
29
.github/workflows/release-desktop.yml
vendored
29
.github/workflows/release-desktop.yml
vendored
@@ -27,6 +27,8 @@ permissions:
|
||||
actions: write
|
||||
contents: write
|
||||
security-events: write
|
||||
id-token: write
|
||||
attestations: write
|
||||
|
||||
env:
|
||||
BUILD_TYPE: ${{ github.event.inputs.build-type }}
|
||||
@@ -56,6 +58,7 @@ jobs:
|
||||
SENTRY_PROJECT: 'affine'
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||
SENTRY_RELEASE: ${{ steps.version.outputs.APP_VERSION }}
|
||||
RELEASE_VERSION: ${{ steps.version.outputs.APP_VERSION }}
|
||||
SKIP_NX_CACHE: 'true'
|
||||
MIXPANEL_TOKEN: ${{ secrets.MIXPANEL_TOKEN }}
|
||||
@@ -158,6 +161,20 @@ jobs:
|
||||
mv packages/frontend/electron/out/*/make/zip/linux/x64/*.zip ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-x64.zip
|
||||
mv packages/frontend/electron/out/*/make/*.AppImage ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-x64.appimage
|
||||
|
||||
- uses: actions/attest-build-provenance@v1
|
||||
if: ${{ matrix.spec.platform == 'darwin' }}
|
||||
with:
|
||||
subject-path: |
|
||||
./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.zip
|
||||
./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.dmg
|
||||
|
||||
- uses: actions/attest-build-provenance@v1
|
||||
if: ${{ matrix.spec.platform == 'linux' }}
|
||||
with:
|
||||
subject-path: |
|
||||
./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-x64.zip
|
||||
./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-x64.appimage
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -254,6 +271,9 @@ jobs:
|
||||
FILES_TO_BE_SIGNED: ${{ steps.get_files_to_be_signed.outputs.FILES_TO_BE_SIGNED }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Version
|
||||
id: version
|
||||
uses: ./.github/actions/setup-version
|
||||
- name: Setup Node.js
|
||||
timeout-minutes: 10
|
||||
uses: ./.github/actions/setup-node
|
||||
@@ -324,6 +344,13 @@ jobs:
|
||||
mv packages/frontend/electron/out/*/make/squirrel.windows/x64/*.exe ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.exe
|
||||
mv packages/frontend/electron/out/*/make/nsis.windows/x64/*.exe ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.nsis.exe
|
||||
|
||||
- uses: actions/attest-build-provenance@v1
|
||||
with:
|
||||
subject-path: |
|
||||
./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.zip
|
||||
./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.exe
|
||||
./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.nsis.exe
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -364,7 +391,7 @@ jobs:
|
||||
path: ./
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
node-version: 20
|
||||
- name: Generate Release yml
|
||||
run: |
|
||||
node ./packages/frontend/electron/scripts/generate-yml.js
|
||||
|
||||
894
.yarn/releases/yarn-4.3.1.cjs
vendored
894
.yarn/releases/yarn-4.3.1.cjs
vendored
File diff suppressed because one or more lines are too long
925
.yarn/releases/yarn-4.4.1.cjs
vendored
Executable file
925
.yarn/releases/yarn-4.4.1.cjs
vendored
Executable file
File diff suppressed because one or more lines are too long
@@ -12,4 +12,4 @@ npmPublishAccess: public
|
||||
|
||||
npmPublishRegistry: "https://registry.npmjs.org"
|
||||
|
||||
yarnPath: .yarn/releases/yarn-4.3.1.cjs
|
||||
yarnPath: .yarn/releases/yarn-4.4.1.cjs
|
||||
|
||||
619
Cargo.lock
generated
619
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -18,7 +18,7 @@ rand = "0.8"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
sha3 = "0.10"
|
||||
sqlx = { version = "0.7", default-features = false, features = ["chrono", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] }
|
||||
sqlx = { version = "0.8", default-features = false, features = ["chrono", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] }
|
||||
tiktoken-rs = "0.5"
|
||||
tokio = "1.37"
|
||||
uuid = "1.8"
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
|
||||
<div align="center">
|
||||
<a href="https://affine.pro">Home Page</a> |
|
||||
<a href="https://discord.com/invite/yz6tGVsf5p">Discord</a> |
|
||||
<a href="https://discord.gg/whd5mjYqVw">Discord</a> |
|
||||
<a href="https://app.affine.pro">Live Demo</a> |
|
||||
<a href="https://affine.pro/blog/">Blog</a> |
|
||||
<a href="https://docs.affine.pro/docs/">Documentation</a>
|
||||
@@ -176,6 +176,12 @@ Thanks to [Chromatic](https://www.chromatic.com/) for providing the visual testi
|
||||
|
||||
## License
|
||||
|
||||
### Editions
|
||||
|
||||
- AFFiNE Community Edition (CE) is the current available version, it's free for self-host under the MIT license.
|
||||
|
||||
- AFFiNE Enterprise Edition (EE) is yet to be published, it will have more advanced features and enterprise-oriented offerings, including but not exclusive to rebranding and SSO, advanced admin and audit, etc., you may refer to https://affine.pro/pricing for more information
|
||||
|
||||
See [LICENSE] for details.
|
||||
|
||||
[all-contributors-badge]: https://img.shields.io/github/contributors/toeverything/AFFiNE
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
],
|
||||
"ext": "ts,md,json"
|
||||
},
|
||||
"version": "0.15.0"
|
||||
"version": "0.16.0"
|
||||
}
|
||||
|
||||
28
package.json
28
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@affine/monorepo",
|
||||
"version": "0.15.0",
|
||||
"version": "0.16.0",
|
||||
"private": true,
|
||||
"author": "toeverything",
|
||||
"license": "MIT",
|
||||
@@ -56,26 +56,26 @@
|
||||
"@affine/cli": "workspace:*",
|
||||
"@commitlint/cli": "^19.2.1",
|
||||
"@commitlint/config-conventional": "^19.1.0",
|
||||
"@faker-js/faker": "^8.4.1",
|
||||
"@faker-js/faker": "^9.0.0",
|
||||
"@istanbuljs/schema": "^0.1.3",
|
||||
"@magic-works/i18n-codegen": "^0.6.0",
|
||||
"@nx/vite": "19.4.3",
|
||||
"@playwright/test": "=1.44.1",
|
||||
"@nx/vite": "^19.5.3",
|
||||
"@playwright/test": "=1.46.1",
|
||||
"@taplo/cli": "^0.7.0",
|
||||
"@testing-library/react": "^16.0.0",
|
||||
"@toeverything/infra": "workspace:*",
|
||||
"@types/affine__env": "workspace:*",
|
||||
"@types/eslint": "^8.56.7",
|
||||
"@types/eslint": "^9.0.0",
|
||||
"@types/node": "^20.12.7",
|
||||
"@typescript-eslint/eslint-plugin": "^7.6.0",
|
||||
"@typescript-eslint/parser": "^7.6.0",
|
||||
"@vanilla-extract/vite-plugin": "^4.0.7",
|
||||
"@vanilla-extract/webpack-plugin": "^2.3.7",
|
||||
"@vitejs/plugin-react-swc": "^3.6.0",
|
||||
"@vitest/coverage-istanbul": "1.6.0",
|
||||
"@vitest/ui": "1.6.0",
|
||||
"@vitest/coverage-istanbul": "2.0.5",
|
||||
"@vitest/ui": "2.0.5",
|
||||
"cross-env": "^7.0.3",
|
||||
"electron": "~30.2.0",
|
||||
"electron": "^31.0.0",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-import-x": "^0.5.0",
|
||||
@@ -88,15 +88,15 @@
|
||||
"eslint-plugin-unused-imports": "^3.1.0",
|
||||
"eslint-plugin-vue": "^9.24.1",
|
||||
"fake-indexeddb": "6.0.0",
|
||||
"happy-dom": "^14.7.1",
|
||||
"happy-dom": "^15.0.0",
|
||||
"husky": "^9.0.11",
|
||||
"lint-staged": "^15.2.2",
|
||||
"msw": "^2.3.0",
|
||||
"nanoid": "^5.0.7",
|
||||
"nx": "^19.0.0",
|
||||
"nyc": "^17.0.0",
|
||||
"oxlint": "0.6.0",
|
||||
"prettier": "^3.2.5",
|
||||
"oxlint": "0.9.2",
|
||||
"prettier": "^3.3.3",
|
||||
"semver": "^7.6.0",
|
||||
"serve": "^14.2.1",
|
||||
"string-width": "^7.1.0",
|
||||
@@ -106,11 +106,11 @@
|
||||
"vite": "^5.2.8",
|
||||
"vite-plugin-istanbul": "^6.0.0",
|
||||
"vite-plugin-static-copy": "^1.0.2",
|
||||
"vitest": "1.6.0",
|
||||
"vitest": "2.0.5",
|
||||
"vitest-fetch-mock": "^0.3.0",
|
||||
"vitest-mock-extended": "^1.3.1"
|
||||
"vitest-mock-extended": "^2.0.0"
|
||||
},
|
||||
"packageManager": "yarn@4.3.1",
|
||||
"packageManager": "yarn@4.4.1",
|
||||
"resolutions": {
|
||||
"array-buffer-byte-length": "npm:@nolyfill/array-buffer-byte-length@latest",
|
||||
"array-includes": "npm:@nolyfill/array-includes@latest",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@affine/server-native",
|
||||
"version": "0.15.0",
|
||||
"version": "0.16.0",
|
||||
"engines": {
|
||||
"node": ">= 10.16.0 < 11 || >= 11.8.0"
|
||||
},
|
||||
@@ -33,7 +33,7 @@
|
||||
"build:debug": "napi build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "3.0.0-alpha.60",
|
||||
"@napi-rs/cli": "3.0.0-alpha.62",
|
||||
"lib0": "^0.2.93",
|
||||
"nx": "^19.0.0",
|
||||
"nx-cloud": "^19.0.0",
|
||||
|
||||
1
packages/backend/server/.gitignore
vendored
1
packages/backend/server/.gitignore
vendored
@@ -1 +1,2 @@
|
||||
.env
|
||||
static/
|
||||
|
||||
@@ -0,0 +1,146 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "_data_migrations" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "started_at" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "finished_at" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "ai_prompts_messages" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "ai_prompts_metadata" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "ai_sessions_messages" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "session_id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "ai_sessions_metadata" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "workspace_id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "doc_id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "deleted_at" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "parent_session_id" SET DATA TYPE VARCHAR;
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "app_runtime_settings" ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "deleted_at" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "last_updated_by" SET DATA TYPE VARCHAR;
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "features" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "multiple_users_sessions" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "expires_at" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "snapshot_histories"
|
||||
ALTER COLUMN "workspace_id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "guid" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "timestamp" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "expired_at" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "snapshots" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "updates" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "user_connected_accounts" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "expires_at" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "user_features" ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "expired_at" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "user_invoices" ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "user_sessions" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "session_id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "expires_at" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "user_stripe_customers" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "user_subscriptions" ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "start" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "end" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "next_bill_at" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "canceled_at" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "trial_start" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "trial_end" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "users" ALTER COLUMN "name" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "email" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "verification_tokens" ALTER COLUMN "token" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "expiresAt" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "workspace_features" ALTER COLUMN "workspace_id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
|
||||
ALTER COLUMN "expired_at" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "workspace_page_user_permissions"
|
||||
ALTER COLUMN "id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "workspace_id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "page_id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "workspace_pages" ALTER COLUMN "workspace_id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "page_id" SET DATA TYPE VARCHAR;
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "workspace_user_permissions" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "workspace_id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
|
||||
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "workspaces" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
|
||||
|
||||
-- DropTable
|
||||
DROP TABLE "accounts";
|
||||
|
||||
-- DropTable
|
||||
DROP TABLE "blobs";
|
||||
|
||||
-- DropTable
|
||||
DROP TABLE "new_features_waiting_list";
|
||||
|
||||
-- DropTable
|
||||
DROP TABLE "optimized_blobs";
|
||||
|
||||
-- DropTable
|
||||
DROP TABLE "sessions";
|
||||
|
||||
-- DropTable
|
||||
DROP TABLE "user_workspace_permissions";
|
||||
|
||||
-- DropTable
|
||||
DROP TABLE "verificationtokens";
|
||||
@@ -0,0 +1,95 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- The primary key for the `snapshot_histories` table will be changed. If it partially fails, the table could be left without primary key constraint.
|
||||
|
||||
*/
|
||||
-- AlterTable
|
||||
ALTER TABLE "_data_migrations" ALTER COLUMN "started_at" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "finished_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "ai_prompts_messages" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "ai_prompts_metadata" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "ai_sessions_messages" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "ai_sessions_metadata" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "deleted_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "app_runtime_settings" ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "deleted_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "features" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "multiple_users_sessions" ALTER COLUMN "expires_at" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "snapshot_histories" ALTER COLUMN "timestamp" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "expired_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "snapshots" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "updates" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "user_connected_accounts" ALTER COLUMN "expires_at" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "user_features" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "expired_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "user_invoices" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "user_sessions" ALTER COLUMN "expires_at" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "user_stripe_customers" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "user_subscriptions" ALTER COLUMN "start" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "end" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "next_bill_at" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "canceled_at" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "trial_start" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "trial_end" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "users" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "email_verified" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "verification_tokens" ALTER COLUMN "expiresAt" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "workspace_features" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
|
||||
ALTER COLUMN "expired_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "workspace_page_user_permissions" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "workspace_user_permissions" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "workspaces" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
|
||||
@@ -0,0 +1,3 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "ai_prompts_metadata" ADD COLUMN "modified" BOOLEAN NOT NULL DEFAULT false,
|
||||
ADD COLUMN "updated_at" TIMESTAMPTZ(3) NOT NULL DEFAULT CURRENT_TIMESTAMP;
|
||||
@@ -0,0 +1,13 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "user_snapshots" (
|
||||
"user_id" VARCHAR NOT NULL,
|
||||
"id" VARCHAR NOT NULL,
|
||||
"blob" BYTEA NOT NULL,
|
||||
"created_at" TIMESTAMPTZ(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "user_snapshots_pkey" PRIMARY KEY ("user_id","id")
|
||||
);
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "user_snapshots" ADD CONSTRAINT "user_snapshots_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,14 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- The primary key for the `updates` table will be changed. If it partially fails, the table could be left without primary key constraint.
|
||||
|
||||
*/
|
||||
-- AlterTable
|
||||
ALTER TABLE "snapshots" ALTER COLUMN "seq" DROP NOT NULL;
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "updates" DROP CONSTRAINT "updates_pkey",
|
||||
ALTER COLUMN "created_at" DROP DEFAULT,
|
||||
ALTER COLUMN "seq" DROP NOT NULL,
|
||||
ADD CONSTRAINT "updates_pkey" PRIMARY KEY ("workspace_id", "guid", "created_at");
|
||||
@@ -0,0 +1,21 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "snapshot_histories" ADD COLUMN "created_by" VARCHAR;
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "snapshots" ADD COLUMN "created_by" VARCHAR,
|
||||
ADD COLUMN "updated_by" VARCHAR;
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "updates" ADD COLUMN "created_by" VARCHAR;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "snapshots" ADD CONSTRAINT "snapshots_created_by_fkey" FOREIGN KEY ("created_by") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "snapshots" ADD CONSTRAINT "snapshots_updated_by_fkey" FOREIGN KEY ("updated_by") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "updates" ADD CONSTRAINT "updates_created_by_fkey" FOREIGN KEY ("created_by") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "snapshot_histories" ADD CONSTRAINT "snapshot_histories_created_by_fkey" FOREIGN KEY ("created_by") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
@@ -1,32 +1,31 @@
|
||||
{
|
||||
"name": "@affine/server",
|
||||
"private": true,
|
||||
"version": "0.15.0",
|
||||
"version": "0.16.0",
|
||||
"description": "Affine Node.js server",
|
||||
"type": "module",
|
||||
"bin": {
|
||||
"run-test": "./scripts/run-test.ts"
|
||||
},
|
||||
"scripts": {
|
||||
"run:script": "node --import ./scripts/register.js",
|
||||
"build": "tsc",
|
||||
"start": "yarn run:script ./src/index.ts",
|
||||
"start": "node --loader ts-node/esm/transpile-only.mjs ./src/index.ts",
|
||||
"dev": "nodemon ./src/index.ts",
|
||||
"test": "ava --concurrency 1 --serial",
|
||||
"test:coverage": "c8 ava --concurrency 1 --serial",
|
||||
"postinstall": "prisma generate",
|
||||
"data-migration": "yarn run:script ./src/data/index.ts",
|
||||
"predeploy": "yarn prisma migrate deploy && yarn run:script ./dist/data/index.js run",
|
||||
"db:upgrade": "yarn prisma migrate deploy && yarn data-migration run"
|
||||
"data-migration": "node --loader ts-node/esm/transpile-only.mjs ./src/data/index.ts",
|
||||
"predeploy": "yarn prisma migrate deploy && node --import ./scripts/register.js ./dist/data/index.js run",
|
||||
"predeploy:ts": "yarn prisma migrate deploy && node --loader ts-node/esm/transpile-only.mjs ./src/data/index.ts run"
|
||||
},
|
||||
"dependencies": {
|
||||
"@apollo/server": "^4.10.2",
|
||||
"@aws-sdk/client-s3": "^3.552.0",
|
||||
"@fal-ai/serverless-client": "^0.13.0",
|
||||
"@aws-sdk/client-s3": "^3.620.0",
|
||||
"@fal-ai/serverless-client": "^0.14.0",
|
||||
"@google-cloud/opentelemetry-cloud-monitoring-exporter": "^0.19.0",
|
||||
"@google-cloud/opentelemetry-cloud-trace-exporter": "^2.2.0",
|
||||
"@google-cloud/opentelemetry-resource-util": "^2.2.0",
|
||||
"@keyv/redis": "^2.8.4",
|
||||
"@keyv/redis": "^3.0.0",
|
||||
"@nestjs/apollo": "^12.1.0",
|
||||
"@nestjs/common": "^10.3.7",
|
||||
"@nestjs/core": "^10.3.7",
|
||||
@@ -35,26 +34,25 @@
|
||||
"@nestjs/platform-express": "^10.3.7",
|
||||
"@nestjs/platform-socket.io": "^10.3.7",
|
||||
"@nestjs/schedule": "^4.0.1",
|
||||
"@nestjs/serve-static": "^4.0.2",
|
||||
"@nestjs/throttler": "5.2.0",
|
||||
"@nestjs/throttler": "6.2.1",
|
||||
"@nestjs/websockets": "^10.3.7",
|
||||
"@node-rs/argon2": "^1.8.0",
|
||||
"@node-rs/crc32": "^1.10.0",
|
||||
"@node-rs/jsonwebtoken": "^0.5.2",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/core": "^1.25.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-proto": "^0.52.0",
|
||||
"@opentelemetry/exporter-trace-otlp-proto": "^0.52.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.53.0",
|
||||
"@opentelemetry/exporter-zipkin": "^1.25.0",
|
||||
"@opentelemetry/host-metrics": "^0.35.2",
|
||||
"@opentelemetry/instrumentation": "^0.52.0",
|
||||
"@opentelemetry/instrumentation-graphql": "^0.42.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.52.0",
|
||||
"@opentelemetry/instrumentation-ioredis": "^0.42.0",
|
||||
"@opentelemetry/instrumentation-nestjs-core": "^0.39.0",
|
||||
"@opentelemetry/instrumentation-socket.io": "^0.41.0",
|
||||
"@opentelemetry/instrumentation": "^0.53.0",
|
||||
"@opentelemetry/instrumentation-graphql": "^0.43.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.53.0",
|
||||
"@opentelemetry/instrumentation-ioredis": "^0.43.0",
|
||||
"@opentelemetry/instrumentation-nestjs-core": "^0.40.0",
|
||||
"@opentelemetry/instrumentation-socket.io": "^0.42.0",
|
||||
"@opentelemetry/resources": "^1.25.0",
|
||||
"@opentelemetry/sdk-metrics": "^1.25.0",
|
||||
"@opentelemetry/sdk-node": "^0.52.0",
|
||||
"@opentelemetry/sdk-node": "^0.53.0",
|
||||
"@opentelemetry/sdk-trace-node": "^1.25.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.25.0",
|
||||
"@prisma/client": "^5.15.0",
|
||||
@@ -72,13 +70,13 @@
|
||||
"graphql-upload": "^16.0.2",
|
||||
"html-validate": "^8.20.1",
|
||||
"ioredis": "^5.3.2",
|
||||
"keyv": "^4.5.4",
|
||||
"keyv": "^5.0.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"mixpanel": "^0.18.0",
|
||||
"mustache": "^4.2.0",
|
||||
"nanoid": "^5.0.7",
|
||||
"nest-commander": "^3.12.5",
|
||||
"nestjs-throttler-storage-redis": "^0.4.1",
|
||||
"nestjs-throttler-storage-redis": "^0.5.0",
|
||||
"nodemailer": "^6.9.13",
|
||||
"on-headers": "^1.0.2",
|
||||
"openai": "^4.33.0",
|
||||
@@ -139,6 +137,7 @@
|
||||
],
|
||||
"watchMode": {
|
||||
"ignoreChanges": [
|
||||
"static/**",
|
||||
"**/*.gen.*"
|
||||
]
|
||||
},
|
||||
@@ -166,8 +165,9 @@
|
||||
"exec": "node",
|
||||
"script": "./src/index.ts",
|
||||
"nodeArgs": [
|
||||
"--import",
|
||||
"./scripts/register.js"
|
||||
"--loader",
|
||||
"ts-node/esm.mjs",
|
||||
"--es-module-specifier-resolution=node"
|
||||
],
|
||||
"ignore": [
|
||||
"**/__tests__/**",
|
||||
|
||||
@@ -11,18 +11,18 @@ datasource db {
|
||||
|
||||
model User {
|
||||
id String @id @default(uuid()) @db.VarChar
|
||||
name String
|
||||
email String @unique
|
||||
emailVerifiedAt DateTime? @map("email_verified")
|
||||
name String @db.VarChar
|
||||
email String @unique @db.VarChar
|
||||
emailVerifiedAt DateTime? @map("email_verified") @db.Timestamptz(3)
|
||||
avatarUrl String? @map("avatar_url") @db.VarChar
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
/// Not available if user signed up through OAuth providers
|
||||
password String? @db.VarChar
|
||||
/// Indicate whether the user finished the signup progress.
|
||||
/// for example, the value will be false if user never registered and invited into a workspace by others.
|
||||
registered Boolean @default(true)
|
||||
|
||||
features UserFeatures[]
|
||||
features UserFeature[]
|
||||
customer UserStripeCustomer?
|
||||
subscriptions UserSubscription[]
|
||||
invoices UserInvoice[]
|
||||
@@ -32,22 +32,27 @@ model User {
|
||||
sessions UserSession[]
|
||||
aiSessions AiSession[]
|
||||
updatedRuntimeConfigs RuntimeConfig[]
|
||||
userSnapshots UserSnapshot[]
|
||||
createdSnapshot Snapshot[] @relation("createdSnapshot")
|
||||
updatedSnapshot Snapshot[] @relation("updatedSnapshot")
|
||||
createdUpdate Update[] @relation("createdUpdate")
|
||||
createdHistory SnapshotHistory[] @relation("createdHistory")
|
||||
|
||||
@@index([email])
|
||||
@@map("users")
|
||||
}
|
||||
|
||||
model ConnectedAccount {
|
||||
id String @id @default(uuid()) @db.VarChar(36)
|
||||
userId String @map("user_id") @db.VarChar(36)
|
||||
id String @id @default(uuid()) @db.VarChar
|
||||
userId String @map("user_id") @db.VarChar
|
||||
provider String @db.VarChar
|
||||
providerAccountId String @map("provider_account_id") @db.VarChar
|
||||
scope String? @db.Text
|
||||
accessToken String? @map("access_token") @db.Text
|
||||
refreshToken String? @map("refresh_token") @db.Text
|
||||
expiresAt DateTime? @map("expires_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
|
||||
expiresAt DateTime? @map("expires_at") @db.Timestamptz(3)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
|
||||
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@ -57,21 +62,22 @@ model ConnectedAccount {
|
||||
}
|
||||
|
||||
model Session {
|
||||
id String @id @default(uuid()) @db.VarChar(36)
|
||||
expiresAt DateTime? @map("expires_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
|
||||
id String @id @default(uuid()) @db.VarChar
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
userSessions UserSession[]
|
||||
|
||||
// @deprecated use [UserSession.expiresAt]
|
||||
deprecated_expiresAt DateTime? @map("expires_at") @db.Timestamptz(3)
|
||||
|
||||
@@map("multiple_users_sessions")
|
||||
}
|
||||
|
||||
model UserSession {
|
||||
id String @id @default(uuid()) @db.VarChar(36)
|
||||
sessionId String @map("session_id") @db.VarChar(36)
|
||||
userId String @map("user_id") @db.VarChar(36)
|
||||
expiresAt DateTime? @map("expires_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
id String @id @default(uuid()) @db.VarChar
|
||||
sessionId String @map("session_id") @db.VarChar
|
||||
userId String @map("user_id") @db.VarChar
|
||||
expiresAt DateTime? @map("expires_at") @db.Timestamptz(3)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
|
||||
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
@@ -81,10 +87,10 @@ model UserSession {
|
||||
}
|
||||
|
||||
model VerificationToken {
|
||||
token String @db.VarChar(36)
|
||||
token String @db.VarChar
|
||||
type Int @db.SmallInt
|
||||
credential String? @db.Text
|
||||
expiresAt DateTime @db.Timestamptz(6)
|
||||
expiresAt DateTime @db.Timestamptz(3)
|
||||
|
||||
@@unique([type, token])
|
||||
@@map("verification_tokens")
|
||||
@@ -93,12 +99,12 @@ model VerificationToken {
|
||||
model Workspace {
|
||||
id String @id @default(uuid()) @db.VarChar
|
||||
public Boolean
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
|
||||
pages WorkspacePage[]
|
||||
permissions WorkspaceUserPermission[]
|
||||
pagePermissions WorkspacePageUserPermission[]
|
||||
features WorkspaceFeatures[]
|
||||
features WorkspaceFeature[]
|
||||
|
||||
@@map("workspaces")
|
||||
}
|
||||
@@ -109,8 +115,8 @@ model Workspace {
|
||||
// Only the ones that have ever changed will have records here,
|
||||
// and for others we will make sure it's has a default value return in our bussiness logic.
|
||||
model WorkspacePage {
|
||||
workspaceId String @map("workspace_id") @db.VarChar(36)
|
||||
pageId String @map("page_id") @db.VarChar(36)
|
||||
workspaceId String @map("workspace_id") @db.VarChar
|
||||
pageId String @map("page_id") @db.VarChar
|
||||
public Boolean @default(false)
|
||||
// Page/Edgeless
|
||||
mode Int @default(0) @db.SmallInt
|
||||
@@ -121,31 +127,15 @@ model WorkspacePage {
|
||||
@@map("workspace_pages")
|
||||
}
|
||||
|
||||
// @deprecated, use WorkspaceUserPermission
|
||||
model DeprecatedUserWorkspacePermission {
|
||||
model WorkspaceUserPermission {
|
||||
id String @id @default(uuid()) @db.VarChar
|
||||
workspaceId String @map("workspace_id") @db.VarChar
|
||||
subPageId String? @map("sub_page_id") @db.VarChar
|
||||
userId String? @map("entity_id") @db.VarChar
|
||||
/// Read/Write/Admin/Owner
|
||||
type Int @db.SmallInt
|
||||
/// Whether the permission invitation is accepted by the user
|
||||
accepted Boolean @default(false)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
|
||||
@@unique([workspaceId, subPageId, userId])
|
||||
@@map("user_workspace_permissions")
|
||||
}
|
||||
|
||||
model WorkspaceUserPermission {
|
||||
id String @id @default(uuid()) @db.VarChar(36)
|
||||
workspaceId String @map("workspace_id") @db.VarChar(36)
|
||||
userId String @map("user_id") @db.VarChar(36)
|
||||
userId String @map("user_id") @db.VarChar
|
||||
// Read/Write
|
||||
type Int @db.SmallInt
|
||||
/// Whether the permission invitation is accepted by the user
|
||||
accepted Boolean @default(false)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
@@ -155,15 +145,15 @@ model WorkspaceUserPermission {
|
||||
}
|
||||
|
||||
model WorkspacePageUserPermission {
|
||||
id String @id @default(uuid()) @db.VarChar(36)
|
||||
workspaceId String @map("workspace_id") @db.VarChar(36)
|
||||
pageId String @map("page_id") @db.VarChar(36)
|
||||
userId String @map("user_id") @db.VarChar(36)
|
||||
id String @id @default(uuid()) @db.VarChar
|
||||
workspaceId String @map("workspace_id") @db.VarChar
|
||||
pageId String @map("page_id") @db.VarChar
|
||||
userId String @map("user_id") @db.VarChar
|
||||
// Read/Write
|
||||
type Int @db.SmallInt
|
||||
/// Whether the permission invitation is accepted by the user
|
||||
accepted Boolean @default(false)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
@@ -176,9 +166,9 @@ model WorkspacePageUserPermission {
|
||||
// for example:
|
||||
// - early access is a feature that allow some users to access the insider version
|
||||
// - pro plan is a quota that allow some users access to more resources after they pay
|
||||
model UserFeatures {
|
||||
model UserFeature {
|
||||
id Int @id @default(autoincrement())
|
||||
userId String @map("user_id") @db.VarChar(36)
|
||||
userId String @map("user_id") @db.VarChar
|
||||
featureId Int @map("feature_id") @db.Integer
|
||||
|
||||
// we will record the reason why the feature is enabled/disabled
|
||||
@@ -186,16 +176,16 @@ model UserFeatures {
|
||||
// - pro_plan_v1: "user buy the pro plan"
|
||||
reason String @db.VarChar
|
||||
// record the quota enabled time
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
// record the quota expired time, pay plan is a subscription, so it will expired
|
||||
expiredAt DateTime? @map("expired_at") @db.Timestamptz(6)
|
||||
expiredAt DateTime? @map("expired_at") @db.Timestamptz(3)
|
||||
// whether the feature is activated
|
||||
// for example:
|
||||
// - if we switch the user to another plan, we will set the old plan to deactivated, but dont delete it
|
||||
activated Boolean @default(false)
|
||||
|
||||
feature Features @relation(fields: [featureId], references: [id], onDelete: Cascade)
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
feature Feature @relation(fields: [featureId], references: [id], onDelete: Cascade)
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([userId])
|
||||
@@map("user_features")
|
||||
@@ -204,9 +194,9 @@ model UserFeatures {
|
||||
// feature gates is a way to enable/disable features for a workspace
|
||||
// for example:
|
||||
// - copilet is a feature that allow some users in a workspace to access the copilet feature
|
||||
model WorkspaceFeatures {
|
||||
model WorkspaceFeature {
|
||||
id Int @id @default(autoincrement())
|
||||
workspaceId String @map("workspace_id") @db.VarChar(36)
|
||||
workspaceId String @map("workspace_id") @db.VarChar
|
||||
featureId Int @map("feature_id") @db.Integer
|
||||
|
||||
// we will record the reason why the feature is enabled/disabled
|
||||
@@ -214,21 +204,21 @@ model WorkspaceFeatures {
|
||||
// - copilet_v1: "owner buy the copilet feature package"
|
||||
reason String @db.VarChar
|
||||
// record the feature enabled time
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
// record the quota expired time, pay plan is a subscription, so it will expired
|
||||
expiredAt DateTime? @map("expired_at") @db.Timestamptz(6)
|
||||
expiredAt DateTime? @map("expired_at") @db.Timestamptz(3)
|
||||
// whether the feature is activated
|
||||
// for example:
|
||||
// - if owner unsubscribe a feature package, we will set the feature to deactivated, but dont delete it
|
||||
activated Boolean @default(false)
|
||||
|
||||
feature Features @relation(fields: [featureId], references: [id], onDelete: Cascade)
|
||||
feature Feature @relation(fields: [featureId], references: [id], onDelete: Cascade)
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@map("workspace_features")
|
||||
}
|
||||
|
||||
model Features {
|
||||
model Feature {
|
||||
id Int @id @default(autoincrement())
|
||||
feature String @db.VarChar
|
||||
version Int @default(0) @db.Integer
|
||||
@@ -236,135 +226,98 @@ model Features {
|
||||
type Int @db.Integer
|
||||
// configs, define by feature conntroller
|
||||
configs Json @db.Json
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
|
||||
UserFeatureGates UserFeatures[]
|
||||
WorkspaceFeatures WorkspaceFeatures[]
|
||||
UserFeatureGates UserFeature[]
|
||||
WorkspaceFeatures WorkspaceFeature[]
|
||||
|
||||
@@unique([feature, version])
|
||||
@@map("features")
|
||||
}
|
||||
|
||||
model DeprecatedNextAuthAccount {
|
||||
id String @id @default(cuid())
|
||||
userId String @map("user_id")
|
||||
type String
|
||||
provider String
|
||||
providerAccountId String @map("provider_account_id")
|
||||
refresh_token String? @db.Text
|
||||
access_token String? @db.Text
|
||||
expires_at Int?
|
||||
token_type String?
|
||||
scope String?
|
||||
id_token String? @db.Text
|
||||
session_state String?
|
||||
|
||||
@@unique([provider, providerAccountId])
|
||||
@@map("accounts")
|
||||
}
|
||||
|
||||
model DeprecatedNextAuthSession {
|
||||
id String @id @default(cuid())
|
||||
sessionToken String @unique @map("session_token")
|
||||
userId String @map("user_id")
|
||||
expires DateTime
|
||||
|
||||
@@map("sessions")
|
||||
}
|
||||
|
||||
model DeprecatedNextAuthVerificationToken {
|
||||
identifier String
|
||||
token String @unique
|
||||
expires DateTime
|
||||
|
||||
@@unique([identifier, token])
|
||||
@@map("verificationtokens")
|
||||
}
|
||||
|
||||
// deprecated, use [ObjectStorage]
|
||||
model Blob {
|
||||
id Int @id @default(autoincrement()) @db.Integer
|
||||
hash String @db.VarChar
|
||||
workspaceId String @map("workspace_id") @db.VarChar
|
||||
blob Bytes @db.ByteA
|
||||
length BigInt
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
// not for keeping, but for snapshot history
|
||||
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(6)
|
||||
|
||||
@@unique([workspaceId, hash])
|
||||
@@map("blobs")
|
||||
}
|
||||
|
||||
// deprecated, use [ObjectStorage]
|
||||
model OptimizedBlob {
|
||||
id Int @id @default(autoincrement()) @db.Integer
|
||||
hash String @db.VarChar
|
||||
workspaceId String @map("workspace_id") @db.VarChar
|
||||
params String @db.VarChar
|
||||
blob Bytes @db.ByteA
|
||||
length BigInt
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
// not for keeping, but for snapshot history
|
||||
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(6)
|
||||
|
||||
@@unique([workspaceId, hash, params])
|
||||
@@map("optimized_blobs")
|
||||
}
|
||||
|
||||
// the latest snapshot of each doc that we've seen
|
||||
// Snapshot + Updates are the latest state of the doc
|
||||
model Snapshot {
|
||||
workspaceId String @map("workspace_id") @db.VarChar
|
||||
id String @default(uuid()) @map("guid") @db.VarChar
|
||||
blob Bytes @db.ByteA
|
||||
seq Int @default(0) @db.Integer
|
||||
state Bytes? @db.ByteA
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
// the `updated_at` field will not record the time of record changed,
|
||||
// but the created time of last seen update that has been merged into snapshot.
|
||||
updatedAt DateTime @map("updated_at") @db.Timestamptz(6)
|
||||
updatedAt DateTime @map("updated_at") @db.Timestamptz(3)
|
||||
createdBy String? @map("created_by") @db.VarChar
|
||||
updatedBy String? @map("updated_by") @db.VarChar
|
||||
|
||||
// should not delete origin snapshot even if user is deleted
|
||||
// we only delete the snapshot if the workspace is deleted
|
||||
createdByUser User? @relation(name: "createdSnapshot", fields: [createdBy], references: [id], onDelete: SetNull)
|
||||
updatedByUser User? @relation(name: "updatedSnapshot", fields: [updatedBy], references: [id], onDelete: SetNull)
|
||||
|
||||
// @deprecated use updatedAt only
|
||||
seq Int? @default(0) @db.Integer
|
||||
|
||||
// we need to clear all hanging updates and snapshots before enable the foreign key on workspaceId
|
||||
// workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@id([id, workspaceId])
|
||||
@@map("snapshots")
|
||||
}
|
||||
|
||||
// user snapshots are special snapshots for user storage like personal app settings, distinguished from workspace snapshots
|
||||
// basically they share the same structure with workspace snapshots
|
||||
// but for convenience, we don't fork the updates queue and hisotry for user snapshots, until we have to
|
||||
// which means all operation on user snapshot will happen in-pace
|
||||
model UserSnapshot {
|
||||
userId String @map("user_id") @db.VarChar
|
||||
id String @map("id") @db.VarChar
|
||||
blob Bytes @db.ByteA
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
|
||||
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@id([userId, id])
|
||||
@@map("user_snapshots")
|
||||
}
|
||||
|
||||
model Update {
|
||||
workspaceId String @map("workspace_id") @db.VarChar
|
||||
id String @map("guid") @db.VarChar
|
||||
seq Int @db.Integer
|
||||
blob Bytes @db.ByteA
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @map("created_at") @db.Timestamptz(3)
|
||||
createdBy String? @map("created_by") @db.VarChar
|
||||
|
||||
@@id([workspaceId, id, seq])
|
||||
// will delete createor record if createor's account is deleted
|
||||
createdByUser User? @relation(name: "createdUpdate", fields: [createdBy], references: [id], onDelete: SetNull)
|
||||
|
||||
// @deprecated use createdAt only
|
||||
seq Int? @db.Integer
|
||||
|
||||
@@id([workspaceId, id, createdAt])
|
||||
@@map("updates")
|
||||
}
|
||||
|
||||
model SnapshotHistory {
|
||||
workspaceId String @map("workspace_id") @db.VarChar(36)
|
||||
id String @map("guid") @db.VarChar(36)
|
||||
timestamp DateTime @db.Timestamptz(6)
|
||||
workspaceId String @map("workspace_id") @db.VarChar
|
||||
id String @map("guid") @db.VarChar
|
||||
timestamp DateTime @db.Timestamptz(3)
|
||||
blob Bytes @db.ByteA
|
||||
state Bytes? @db.ByteA
|
||||
expiredAt DateTime @map("expired_at") @db.Timestamptz(6)
|
||||
expiredAt DateTime @map("expired_at") @db.Timestamptz(3)
|
||||
createdBy String? @map("created_by") @db.VarChar
|
||||
|
||||
// will delete createor record if creator's account is deleted
|
||||
createdByUser User? @relation(name: "createdHistory", fields: [createdBy], references: [id], onDelete: SetNull)
|
||||
|
||||
@@id([workspaceId, id, timestamp])
|
||||
@@map("snapshot_histories")
|
||||
}
|
||||
|
||||
model NewFeaturesWaitingList {
|
||||
id String @id @default(uuid()) @db.VarChar
|
||||
email String @unique
|
||||
type Int @db.SmallInt
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
|
||||
@@map("new_features_waiting_list")
|
||||
}
|
||||
|
||||
model UserStripeCustomer {
|
||||
userId String @id @map("user_id") @db.VarChar
|
||||
stripeCustomerId String @unique @map("stripe_customer_id") @db.VarChar
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@ -373,7 +326,7 @@ model UserStripeCustomer {
|
||||
|
||||
model UserSubscription {
|
||||
id Int @id @default(autoincrement()) @db.Integer
|
||||
userId String @map("user_id") @db.VarChar(36)
|
||||
userId String @map("user_id") @db.VarChar
|
||||
plan String @db.VarChar(20)
|
||||
// yearly/monthly
|
||||
recurring String @db.VarChar(20)
|
||||
@@ -382,21 +335,21 @@ model UserSubscription {
|
||||
// subscription.status, active/past_due/canceled/unpaid...
|
||||
status String @db.VarChar(20)
|
||||
// subscription.current_period_start
|
||||
start DateTime @map("start") @db.Timestamptz(6)
|
||||
start DateTime @map("start") @db.Timestamptz(3)
|
||||
// subscription.current_period_end, null for lifetime payment
|
||||
end DateTime? @map("end") @db.Timestamptz(6)
|
||||
end DateTime? @map("end") @db.Timestamptz(3)
|
||||
// subscription.billing_cycle_anchor
|
||||
nextBillAt DateTime? @map("next_bill_at") @db.Timestamptz(6)
|
||||
nextBillAt DateTime? @map("next_bill_at") @db.Timestamptz(3)
|
||||
// subscription.canceled_at
|
||||
canceledAt DateTime? @map("canceled_at") @db.Timestamptz(6)
|
||||
canceledAt DateTime? @map("canceled_at") @db.Timestamptz(3)
|
||||
// subscription.trial_start
|
||||
trialStart DateTime? @map("trial_start") @db.Timestamptz(6)
|
||||
trialStart DateTime? @map("trial_start") @db.Timestamptz(3)
|
||||
// subscription.trial_end
|
||||
trialEnd DateTime? @map("trial_end") @db.Timestamptz(6)
|
||||
trialEnd DateTime? @map("trial_end") @db.Timestamptz(3)
|
||||
stripeScheduleId String? @map("stripe_schedule_id") @db.VarChar
|
||||
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([userId, plan])
|
||||
@@ -405,7 +358,7 @@ model UserSubscription {
|
||||
|
||||
model UserInvoice {
|
||||
id Int @id @default(autoincrement()) @db.Integer
|
||||
userId String @map("user_id") @db.VarChar(36)
|
||||
userId String @map("user_id") @db.VarChar
|
||||
stripeInvoiceId String @unique @map("stripe_invoice_id")
|
||||
currency String @db.VarChar(3)
|
||||
// CNY 12.50 stored as 1250
|
||||
@@ -413,8 +366,8 @@ model UserInvoice {
|
||||
status String @db.VarChar(20)
|
||||
plan String @db.VarChar(20)
|
||||
recurring String @db.VarChar(20)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
|
||||
// billing reason
|
||||
reason String @db.VarChar
|
||||
lastPaymentError String? @map("last_payment_error") @db.Text
|
||||
@@ -442,7 +395,7 @@ model AiPromptMessage {
|
||||
content String @db.Text
|
||||
attachments Json? @db.Json
|
||||
params Json? @db.Json
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
|
||||
prompt AiPrompt @relation(fields: [promptId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@ -458,7 +411,10 @@ model AiPrompt {
|
||||
action String? @db.VarChar
|
||||
model String @db.VarChar
|
||||
config Json? @db.Json
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
updatedAt DateTime @default(now()) @map("updated_at") @db.Timestamptz(3)
|
||||
// whether the prompt is modified by the admin panel
|
||||
modified Boolean @default(false)
|
||||
|
||||
messages AiPromptMessage[]
|
||||
sessions AiSession[]
|
||||
@@ -467,14 +423,14 @@ model AiPrompt {
|
||||
}
|
||||
|
||||
model AiSessionMessage {
|
||||
id String @id @default(uuid()) @db.VarChar(36)
|
||||
sessionId String @map("session_id") @db.VarChar(36)
|
||||
id String @id @default(uuid()) @db.VarChar
|
||||
sessionId String @map("session_id") @db.VarChar
|
||||
role AiPromptRole
|
||||
content String @db.Text
|
||||
attachments Json? @db.Json
|
||||
params Json? @db.Json
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
|
||||
|
||||
session AiSession @relation(fields: [sessionId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@ -482,17 +438,17 @@ model AiSessionMessage {
|
||||
}
|
||||
|
||||
model AiSession {
|
||||
id String @id @default(uuid()) @db.VarChar(36)
|
||||
userId String @map("user_id") @db.VarChar(36)
|
||||
workspaceId String @map("workspace_id") @db.VarChar(36)
|
||||
docId String @map("doc_id") @db.VarChar(36)
|
||||
id String @id @default(uuid()) @db.VarChar
|
||||
userId String @map("user_id") @db.VarChar
|
||||
workspaceId String @map("workspace_id") @db.VarChar
|
||||
docId String @map("doc_id") @db.VarChar
|
||||
promptName String @map("prompt_name") @db.VarChar(32)
|
||||
// the session id of the parent session if this session is a forked session
|
||||
parentSessionId String? @map("parent_session_id") @db.VarChar(36)
|
||||
parentSessionId String? @map("parent_session_id") @db.VarChar
|
||||
messageCost Int @default(0)
|
||||
tokenCost Int @default(0)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(3)
|
||||
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
prompt AiPrompt @relation(fields: [promptName], references: [name], onDelete: Cascade)
|
||||
@@ -502,10 +458,10 @@ model AiSession {
|
||||
}
|
||||
|
||||
model DataMigration {
|
||||
id String @id @default(uuid()) @db.VarChar(36)
|
||||
id String @id @default(uuid()) @db.VarChar
|
||||
name String @db.VarChar
|
||||
startedAt DateTime @default(now()) @map("started_at") @db.Timestamptz(6)
|
||||
finishedAt DateTime? @map("finished_at") @db.Timestamptz(6)
|
||||
startedAt DateTime @default(now()) @map("started_at") @db.Timestamptz(3)
|
||||
finishedAt DateTime? @map("finished_at") @db.Timestamptz(3)
|
||||
|
||||
@@map("_data_migrations")
|
||||
}
|
||||
@@ -525,9 +481,9 @@ model RuntimeConfig {
|
||||
key String @db.VarChar
|
||||
value Json @db.Json
|
||||
description String @db.Text
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
|
||||
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(6)
|
||||
lastUpdatedBy String? @map("last_updated_by") @db.VarChar(36)
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
|
||||
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(3)
|
||||
lastUpdatedBy String? @map("last_updated_by") @db.VarChar
|
||||
|
||||
lastUpdatedByUser User? @relation(fields: [lastUpdatedBy], references: [id])
|
||||
|
||||
|
||||
@@ -1,32 +1,11 @@
|
||||
import * as otel from '@opentelemetry/instrumentation/hook.mjs';
|
||||
import { createEsmHooks, register } from 'ts-node';
|
||||
import { create, createEsmHooks } from 'ts-node';
|
||||
|
||||
const service = register({
|
||||
const service = create({
|
||||
experimentalSpecifierResolution: 'node',
|
||||
transpileOnly: true,
|
||||
logError: true,
|
||||
skipProject: true,
|
||||
});
|
||||
const hooks = createEsmHooks(service);
|
||||
|
||||
/**
|
||||
* @type {import('ts-node').NodeLoaderHooksAPI2}
|
||||
|
||||
*/
|
||||
const ts = createEsmHooks(service);
|
||||
|
||||
/**
|
||||
* @type {import('ts-node').NodeLoaderHooksAPI2.ResolveHook}
|
||||
*/
|
||||
export const resolve = (specifier, context, defaultResolver) => {
|
||||
return ts.resolve(specifier, context, (s, c) => {
|
||||
return otel.resolve(s, c, defaultResolver);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* @type {import('ts-node').NodeLoaderHooksAPI2.LoadHook}
|
||||
*/
|
||||
export const load = async (url, context, defaultLoader) => {
|
||||
return await otel.load(url, context, (u, c) => {
|
||||
return ts.load(u, c, defaultLoader);
|
||||
});
|
||||
};
|
||||
export const resolve = hooks.resolve;
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Controller, Get } from '@nestjs/common';
|
||||
import { Public } from './core/auth';
|
||||
import { Config, SkipThrottle } from './fundamentals';
|
||||
|
||||
@Controller('/')
|
||||
@Controller('/info')
|
||||
export class AppController {
|
||||
constructor(private readonly config: Config) {}
|
||||
|
||||
@@ -15,7 +15,7 @@ export class AppController {
|
||||
compatibility: this.config.version,
|
||||
message: `AFFiNE ${this.config.version} Server`,
|
||||
type: this.config.type,
|
||||
flavor: this.config.flavor,
|
||||
flavor: this.config.flavor.type,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { join } from 'node:path';
|
||||
|
||||
import {
|
||||
DynamicModule,
|
||||
ForwardReference,
|
||||
@@ -7,15 +5,16 @@ import {
|
||||
Module,
|
||||
} from '@nestjs/common';
|
||||
import { ScheduleModule } from '@nestjs/schedule';
|
||||
import { ServeStaticModule } from '@nestjs/serve-static';
|
||||
import { get } from 'lodash-es';
|
||||
|
||||
import { AppController } from './app.controller';
|
||||
import { AuthModule } from './core/auth';
|
||||
import { ADD_ENABLED_FEATURES, ServerConfigModule } from './core/config';
|
||||
import { DocModule } from './core/doc';
|
||||
import { DocStorageModule } from './core/doc';
|
||||
import { FeatureModule } from './core/features';
|
||||
import { PermissionModule } from './core/permission';
|
||||
import { QuotaModule } from './core/quota';
|
||||
import { SelfhostModule } from './core/selfhost';
|
||||
import { StorageModule } from './core/storage';
|
||||
import { SyncModule } from './core/sync';
|
||||
import { UserModule } from './core/user';
|
||||
@@ -136,7 +135,7 @@ export class AppModuleBuilder {
|
||||
compile() {
|
||||
@Module({
|
||||
imports: this.modules,
|
||||
controllers: this.config.isSelfhosted ? [] : [AppController],
|
||||
controllers: [AppController],
|
||||
})
|
||||
class AppModule {}
|
||||
|
||||
@@ -144,49 +143,37 @@ export class AppModuleBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
function buildAppModule() {
|
||||
export function buildAppModule() {
|
||||
AFFiNE = mergeConfigOverride(AFFiNE);
|
||||
const factor = new AppModuleBuilder(AFFiNE);
|
||||
|
||||
factor
|
||||
// common fundamental modules
|
||||
// basic
|
||||
.use(...FunctionalityModules)
|
||||
.useIf(config => config.flavor.sync, WebSocketModule)
|
||||
|
||||
// auth
|
||||
.use(AuthModule)
|
||||
.use(UserModule, AuthModule, PermissionModule)
|
||||
|
||||
// business modules
|
||||
.use(DocModule)
|
||||
.use(DocStorageModule)
|
||||
|
||||
// sync server only
|
||||
.useIf(config => config.flavor.sync, WebSocketModule, SyncModule)
|
||||
.useIf(config => config.flavor.sync, SyncModule)
|
||||
|
||||
// graphql server only
|
||||
.useIf(
|
||||
config => config.flavor.graphql,
|
||||
ServerConfigModule,
|
||||
GqlModule,
|
||||
StorageModule,
|
||||
UserModule,
|
||||
ServerConfigModule,
|
||||
WorkspaceModule,
|
||||
FeatureModule,
|
||||
QuotaModule
|
||||
)
|
||||
|
||||
// self hosted server only
|
||||
.useIf(
|
||||
config => config.isSelfhosted,
|
||||
ServeStaticModule.forRoot({
|
||||
rootPath: join('/app', 'static'),
|
||||
exclude: ['/admin*'],
|
||||
})
|
||||
)
|
||||
.useIf(
|
||||
config => config.isSelfhosted,
|
||||
ServeStaticModule.forRoot({
|
||||
rootPath: join('/app', 'static', 'admin'),
|
||||
serveRoot: '/admin',
|
||||
})
|
||||
);
|
||||
.useIf(config => config.isSelfhosted, SelfhostModule);
|
||||
|
||||
// plugin modules
|
||||
ENABLED_PLUGINS.forEach(name => {
|
||||
|
||||
@@ -29,7 +29,7 @@ export async function createApp() {
|
||||
graphqlUploadExpress({
|
||||
// TODO(@darkskygit): dynamic limit by quota maybe?
|
||||
maxFileSize: 100 * 1024 * 1024,
|
||||
maxFiles: 5,
|
||||
maxFiles: 32,
|
||||
})
|
||||
);
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ AFFiNE.ENV_MAP = {
|
||||
MAILER_PASSWORD: 'mailer.auth.pass',
|
||||
MAILER_SENDER: 'mailer.from.address',
|
||||
MAILER_SECURE: ['mailer.secure', 'boolean'],
|
||||
DATABASE_URL: 'database.datasourceUrl',
|
||||
OAUTH_GOOGLE_CLIENT_ID: 'plugins.oauth.providers.google.clientId',
|
||||
OAUTH_GOOGLE_CLIENT_SECRET: 'plugins.oauth.providers.google.clientSecret',
|
||||
OAUTH_GITHUB_CLIENT_ID: 'plugins.oauth.providers.github.clientId',
|
||||
@@ -24,6 +25,7 @@ AFFiNE.ENV_MAP = {
|
||||
OAUTH_OIDC_CLAIM_MAP_EMAIL: 'plugins.oauth.providers.oidc.args.claim_email',
|
||||
OAUTH_OIDC_CLAIM_MAP_NAME: 'plugins.oauth.providers.oidc.args.claim_name',
|
||||
METRICS_CUSTOMER_IO_TOKEN: ['metrics.customerIo.token', 'string'],
|
||||
CAPTCHA_TURNSTILE_SECRET: ['plugins.captcha.turnstile.secret', 'string'],
|
||||
COPILOT_OPENAI_API_KEY: 'plugins.copilot.openai.apiKey',
|
||||
COPILOT_FAL_API_KEY: 'plugins.copilot.fal.apiKey',
|
||||
COPILOT_UNSPLASH_API_KEY: 'plugins.copilot.unsplashKey',
|
||||
|
||||
@@ -71,6 +71,14 @@ AFFiNE.use('payment', {
|
||||
});
|
||||
AFFiNE.use('oauth');
|
||||
|
||||
/* Captcha Plugin Default Config */
|
||||
AFFiNE.use('captcha', {
|
||||
turnstile: {},
|
||||
challenge: {
|
||||
bits: 20,
|
||||
},
|
||||
});
|
||||
|
||||
if (AFFiNE.deploy) {
|
||||
AFFiNE.mailer = {
|
||||
service: 'gmail',
|
||||
|
||||
@@ -95,6 +95,15 @@ AFFiNE.server.port = 3010;
|
||||
// });
|
||||
//
|
||||
//
|
||||
// /* Captcha Plugin Default Config */
|
||||
// AFFiNE.plugins.use('captcha', {
|
||||
// turnstile: {},
|
||||
// challenge: {
|
||||
// bits: 20,
|
||||
// },
|
||||
// });
|
||||
//
|
||||
//
|
||||
// /* Cloudflare R2 Plugin */
|
||||
// /* Enable if you choose to store workspace blobs or user avatars in Cloudflare R2 Storage Service */
|
||||
// AFFiNE.use('cloudflare-r2', {
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
|
||||
import {
|
||||
Body,
|
||||
Controller,
|
||||
@@ -18,26 +16,34 @@ import {
|
||||
EarlyAccessRequired,
|
||||
EmailTokenNotFound,
|
||||
InternalServerError,
|
||||
InvalidEmail,
|
||||
InvalidEmailToken,
|
||||
SignUpForbidden,
|
||||
Throttle,
|
||||
URLHelper,
|
||||
UseNamedGuard,
|
||||
} from '../../fundamentals';
|
||||
import { UserService } from '../user';
|
||||
import { validators } from '../utils/validators';
|
||||
import { CurrentUser } from './current-user';
|
||||
import { Public } from './guard';
|
||||
import { AuthService, parseAuthUserSeqNum } from './service';
|
||||
import { AuthService } from './service';
|
||||
import { CurrentUser, Session } from './session';
|
||||
import { TokenService, TokenType } from './token';
|
||||
|
||||
class SignInCredential {
|
||||
email!: string;
|
||||
password?: string;
|
||||
interface PreflightResponse {
|
||||
registered: boolean;
|
||||
hasPassword: boolean;
|
||||
}
|
||||
|
||||
class MagicLinkCredential {
|
||||
email!: string;
|
||||
token!: string;
|
||||
interface SignInCredential {
|
||||
email: string;
|
||||
password?: string;
|
||||
callbackUrl?: string;
|
||||
}
|
||||
|
||||
interface MagicLinkCredential {
|
||||
email: string;
|
||||
token: string;
|
||||
}
|
||||
|
||||
@Throttle('strict')
|
||||
@@ -52,13 +58,44 @@ export class AuthController {
|
||||
) {}
|
||||
|
||||
@Public()
|
||||
@Post('/preflight')
|
||||
async preflight(
|
||||
@Body() params?: { email: string }
|
||||
): Promise<PreflightResponse> {
|
||||
if (!params?.email) {
|
||||
throw new InvalidEmail();
|
||||
}
|
||||
validators.assertValidEmail(params.email);
|
||||
|
||||
const user = await this.user.findUserWithHashedPasswordByEmail(
|
||||
params.email
|
||||
);
|
||||
|
||||
if (!user) {
|
||||
return {
|
||||
registered: false,
|
||||
hasPassword: false,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
registered: user.registered,
|
||||
hasPassword: !!user.password,
|
||||
};
|
||||
}
|
||||
|
||||
@Public()
|
||||
@UseNamedGuard('captcha')
|
||||
@Post('/sign-in')
|
||||
@Header('content-type', 'application/json')
|
||||
async signIn(
|
||||
@Req() req: Request,
|
||||
@Res() res: Response,
|
||||
@Body() credential: SignInCredential,
|
||||
@Query('redirect_uri') redirectUri = this.url.home
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Query('redirect_uri') redirectUri?: string
|
||||
) {
|
||||
validators.assertValidEmail(credential.email);
|
||||
const canSignIn = await this.auth.canSignIn(credential.email);
|
||||
@@ -67,80 +104,86 @@ export class AuthController {
|
||||
}
|
||||
|
||||
if (credential.password) {
|
||||
const user = await this.auth.signIn(
|
||||
await this.passwordSignIn(
|
||||
req,
|
||||
res,
|
||||
credential.email,
|
||||
credential.password
|
||||
);
|
||||
|
||||
await this.auth.setCookie(req, res, user);
|
||||
res.status(HttpStatus.OK).send(user);
|
||||
} else {
|
||||
// send email magic link
|
||||
const user = await this.user.findUserByEmail(credential.email);
|
||||
if (!user) {
|
||||
const allowSignup = await this.config.runtime.fetch('auth/allowSignup');
|
||||
if (!allowSignup) {
|
||||
throw new SignUpForbidden();
|
||||
}
|
||||
}
|
||||
|
||||
const result = await this.sendSignInEmail(
|
||||
{ email: credential.email, signUp: !user },
|
||||
await this.sendMagicLink(
|
||||
req,
|
||||
res,
|
||||
credential.email,
|
||||
credential.callbackUrl,
|
||||
redirectUri
|
||||
);
|
||||
|
||||
if (result.rejected.length) {
|
||||
throw new InternalServerError('Failed to send sign-in email.');
|
||||
}
|
||||
|
||||
res.status(HttpStatus.OK).send({
|
||||
email: credential.email,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async sendSignInEmail(
|
||||
{ email, signUp }: { email: string; signUp: boolean },
|
||||
redirectUri: string
|
||||
async passwordSignIn(
|
||||
req: Request,
|
||||
res: Response,
|
||||
email: string,
|
||||
password: string
|
||||
) {
|
||||
const user = await this.auth.signIn(email, password);
|
||||
|
||||
await this.auth.setCookies(req, res, user.id);
|
||||
res.status(HttpStatus.OK).send(user);
|
||||
}
|
||||
|
||||
async sendMagicLink(
|
||||
_req: Request,
|
||||
res: Response,
|
||||
email: string,
|
||||
callbackUrl = '/magic-link',
|
||||
redirectUrl?: string
|
||||
) {
|
||||
// send email magic link
|
||||
const user = await this.user.findUserByEmail(email);
|
||||
if (!user) {
|
||||
const allowSignup = await this.config.runtime.fetch('auth/allowSignup');
|
||||
if (!allowSignup) {
|
||||
throw new SignUpForbidden();
|
||||
}
|
||||
}
|
||||
|
||||
const token = await this.token.createToken(TokenType.SignIn, email);
|
||||
|
||||
const magicLink = this.url.link('/magic-link', {
|
||||
const magicLink = this.url.link(callbackUrl, {
|
||||
token,
|
||||
email,
|
||||
redirect_uri: redirectUri,
|
||||
...(redirectUrl
|
||||
? {
|
||||
redirect_uri: redirectUrl,
|
||||
}
|
||||
: {}),
|
||||
});
|
||||
|
||||
const result = await this.auth.sendSignInEmail(email, magicLink, signUp);
|
||||
const result = await this.auth.sendSignInEmail(email, magicLink, !user);
|
||||
|
||||
return result;
|
||||
if (result.rejected.length) {
|
||||
throw new InternalServerError('Failed to send sign-in email.');
|
||||
}
|
||||
|
||||
res.status(HttpStatus.OK).send({
|
||||
email: email,
|
||||
});
|
||||
}
|
||||
|
||||
@Get('/sign-out')
|
||||
async signOut(
|
||||
@Req() req: Request,
|
||||
@Res() res: Response,
|
||||
@Query('redirect_uri') redirectUri?: string
|
||||
@Session() session: Session,
|
||||
@Body() { all }: { all: boolean }
|
||||
) {
|
||||
const session = await this.auth.signOut(
|
||||
req.cookies[AuthService.sessionCookieName],
|
||||
parseAuthUserSeqNum(req.headers[AuthService.authUserSeqHeaderName])
|
||||
await this.auth.signOut(
|
||||
session.sessionId,
|
||||
all ? undefined : session.userId
|
||||
);
|
||||
|
||||
if (session) {
|
||||
res.cookie(AuthService.sessionCookieName, session.id, {
|
||||
expires: session.expiresAt ?? void 0, // expiredAt is `string | null`
|
||||
...this.auth.cookieOptions,
|
||||
});
|
||||
} else {
|
||||
res.clearCookie(AuthService.sessionCookieName);
|
||||
}
|
||||
|
||||
if (redirectUri) {
|
||||
return this.url.safeRedirect(res, redirectUri);
|
||||
} else {
|
||||
return res.send(null);
|
||||
}
|
||||
res.status(HttpStatus.OK).send({});
|
||||
}
|
||||
|
||||
@Public()
|
||||
@@ -156,11 +199,11 @@ export class AuthController {
|
||||
|
||||
validators.assertValidEmail(email);
|
||||
|
||||
const valid = await this.token.verifyToken(TokenType.SignIn, token, {
|
||||
const tokenRecord = await this.token.verifyToken(TokenType.SignIn, token, {
|
||||
credential: email,
|
||||
});
|
||||
|
||||
if (!valid) {
|
||||
if (!tokenRecord) {
|
||||
throw new InvalidEmailToken();
|
||||
}
|
||||
|
||||
@@ -169,9 +212,8 @@ export class AuthController {
|
||||
registered: true,
|
||||
});
|
||||
|
||||
await this.auth.setCookie(req, res, user);
|
||||
|
||||
res.send({ id: user.id, email: user.email, name: user.name });
|
||||
await this.auth.setCookies(req, res, user.id);
|
||||
res.send({ id: user.id });
|
||||
}
|
||||
|
||||
@Throttle('default', { limit: 1200 })
|
||||
@@ -198,14 +240,4 @@ export class AuthController {
|
||||
users: await this.auth.getUserList(token),
|
||||
};
|
||||
}
|
||||
|
||||
@Public()
|
||||
@Get('/challenge')
|
||||
async challenge() {
|
||||
// TODO(@darksky): impl in following PR
|
||||
return {
|
||||
challenge: randomUUID(),
|
||||
resource: randomUUID(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,24 +1,23 @@
|
||||
import type {
|
||||
CanActivate,
|
||||
ExecutionContext,
|
||||
FactoryProvider,
|
||||
OnModuleInit,
|
||||
} from '@nestjs/common';
|
||||
import { Injectable, SetMetadata, UseGuards } from '@nestjs/common';
|
||||
import { Injectable, SetMetadata } from '@nestjs/common';
|
||||
import { ModuleRef, Reflector } from '@nestjs/core';
|
||||
import type { Request } from 'express';
|
||||
|
||||
import {
|
||||
AuthenticationRequired,
|
||||
Config,
|
||||
getRequestResponseFromContext,
|
||||
mapAnyError,
|
||||
parseCookies,
|
||||
} from '../../fundamentals';
|
||||
import { AuthService, parseAuthUserSeqNum } from './service';
|
||||
|
||||
function extractTokenFromHeader(authorization: string) {
|
||||
if (!/^Bearer\s/i.test(authorization)) {
|
||||
return;
|
||||
}
|
||||
|
||||
return authorization.substring(7);
|
||||
}
|
||||
import { WEBSOCKET_OPTIONS } from '../../fundamentals/websocket';
|
||||
import { AuthService } from './service';
|
||||
import { Session } from './session';
|
||||
|
||||
const PUBLIC_ENTRYPOINT_SYMBOL = Symbol('public');
|
||||
|
||||
@@ -38,37 +37,9 @@ export class AuthGuard implements CanActivate, OnModuleInit {
|
||||
async canActivate(context: ExecutionContext) {
|
||||
const { req, res } = getRequestResponseFromContext(context);
|
||||
|
||||
// check cookie
|
||||
let sessionToken: string | undefined =
|
||||
req.cookies[AuthService.sessionCookieName];
|
||||
|
||||
if (!sessionToken && req.headers.authorization) {
|
||||
sessionToken = extractTokenFromHeader(req.headers.authorization);
|
||||
}
|
||||
|
||||
if (sessionToken) {
|
||||
const userSeq = parseAuthUserSeqNum(
|
||||
req.headers[AuthService.authUserSeqHeaderName]
|
||||
);
|
||||
|
||||
const { user, expiresAt } = await this.auth.getUser(
|
||||
sessionToken,
|
||||
userSeq
|
||||
);
|
||||
if (res && user && expiresAt) {
|
||||
await this.auth.refreshUserSessionIfNeeded(
|
||||
req,
|
||||
res,
|
||||
sessionToken,
|
||||
user.id,
|
||||
expiresAt
|
||||
);
|
||||
}
|
||||
|
||||
if (user) {
|
||||
req.sid = sessionToken;
|
||||
req.user = user;
|
||||
}
|
||||
const userSession = await this.signIn(req);
|
||||
if (res && userSession && userSession.expiresAt) {
|
||||
await this.auth.refreshUserSessionIfNeeded(res, userSession);
|
||||
}
|
||||
|
||||
// api is public
|
||||
@@ -81,33 +52,70 @@ export class AuthGuard implements CanActivate, OnModuleInit {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!req.user) {
|
||||
if (!userSession) {
|
||||
throw new AuthenticationRequired();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async signIn(req: Request): Promise<Session | null> {
|
||||
if (req.session) {
|
||||
return req.session;
|
||||
}
|
||||
|
||||
// compatibility with websocket request
|
||||
parseCookies(req);
|
||||
|
||||
// TODO(@forehalo): a cache for user session
|
||||
const userSession = await this.auth.getUserSessionFromRequest(req);
|
||||
|
||||
if (userSession) {
|
||||
req.session = {
|
||||
...userSession.session,
|
||||
user: userSession.user,
|
||||
};
|
||||
|
||||
return req.session;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This guard is used to protect routes/queries/mutations that require a user to be logged in.
|
||||
*
|
||||
* The `@CurrentUser()` parameter decorator used in a `Auth` guarded queries would always give us the user because the `Auth` guard will
|
||||
* fast throw if user is not logged in.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* ```typescript
|
||||
* \@Auth()
|
||||
* \@Query(() => UserType)
|
||||
* user(@CurrentUser() user: CurrentUser) {
|
||||
* return user;
|
||||
* }
|
||||
* ```
|
||||
* Mark api to be public accessible
|
||||
*/
|
||||
export const Auth = () => {
|
||||
return UseGuards(AuthGuard);
|
||||
};
|
||||
|
||||
// api is public accessible
|
||||
export const Public = () => SetMetadata(PUBLIC_ENTRYPOINT_SYMBOL, true);
|
||||
|
||||
export const AuthWebsocketOptionsProvider: FactoryProvider = {
|
||||
provide: WEBSOCKET_OPTIONS,
|
||||
useFactory: (config: Config, guard: AuthGuard) => {
|
||||
return {
|
||||
...config.websocket,
|
||||
allowRequest: async (
|
||||
req: any,
|
||||
pass: (err: string | null | undefined, success: boolean) => void
|
||||
) => {
|
||||
if (!config.websocket.requireAuthentication) {
|
||||
return pass(null, true);
|
||||
}
|
||||
|
||||
try {
|
||||
const authentication = await guard.signIn(req);
|
||||
|
||||
if (authentication) {
|
||||
return pass(null, true);
|
||||
} else {
|
||||
return pass('unauthenticated', false);
|
||||
}
|
||||
} catch (e) {
|
||||
const error = mapAnyError(e);
|
||||
error.log('Websocket');
|
||||
return pass('unauthenticated', false);
|
||||
}
|
||||
},
|
||||
};
|
||||
},
|
||||
inject: [Config, AuthGuard],
|
||||
};
|
||||
|
||||
@@ -6,15 +6,21 @@ import { FeatureModule } from '../features';
|
||||
import { QuotaModule } from '../quota';
|
||||
import { UserModule } from '../user';
|
||||
import { AuthController } from './controller';
|
||||
import { AuthGuard } from './guard';
|
||||
import { AuthGuard, AuthWebsocketOptionsProvider } from './guard';
|
||||
import { AuthResolver } from './resolver';
|
||||
import { AuthService } from './service';
|
||||
import { TokenService, TokenType } from './token';
|
||||
|
||||
@Module({
|
||||
imports: [FeatureModule, UserModule, QuotaModule],
|
||||
providers: [AuthService, AuthResolver, TokenService, AuthGuard],
|
||||
exports: [AuthService, AuthGuard],
|
||||
providers: [
|
||||
AuthService,
|
||||
AuthResolver,
|
||||
TokenService,
|
||||
AuthGuard,
|
||||
AuthWebsocketOptionsProvider,
|
||||
],
|
||||
exports: [AuthService, AuthGuard, AuthWebsocketOptionsProvider, TokenService],
|
||||
controllers: [AuthController],
|
||||
})
|
||||
export class AuthModule {}
|
||||
@@ -22,4 +28,4 @@ export class AuthModule {}
|
||||
export * from './guard';
|
||||
export { ClientTokenType } from './resolver';
|
||||
export { AuthService, TokenService, TokenType };
|
||||
export * from './current-user';
|
||||
export * from './session';
|
||||
|
||||
@@ -11,22 +11,23 @@ import {
|
||||
|
||||
import {
|
||||
ActionForbidden,
|
||||
Config,
|
||||
EmailAlreadyUsed,
|
||||
EmailTokenNotFound,
|
||||
EmailVerificationRequired,
|
||||
InvalidEmailToken,
|
||||
LinkExpired,
|
||||
SameEmailProvided,
|
||||
SkipThrottle,
|
||||
Throttle,
|
||||
URLHelper,
|
||||
} from '../../fundamentals';
|
||||
import { Admin } from '../common';
|
||||
import { UserService } from '../user';
|
||||
import { UserType } from '../user/types';
|
||||
import { validators } from '../utils/validators';
|
||||
import { CurrentUser } from './current-user';
|
||||
import { Public } from './guard';
|
||||
import { AuthService } from './service';
|
||||
import { CurrentUser } from './session';
|
||||
import { TokenService, TokenType } from './token';
|
||||
|
||||
@ObjectType('tokenType')
|
||||
@@ -45,7 +46,6 @@ export class ClientTokenType {
|
||||
@Resolver(() => UserType)
|
||||
export class AuthResolver {
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly url: URLHelper,
|
||||
private readonly auth: AuthService,
|
||||
private readonly user: UserService,
|
||||
@@ -65,7 +65,7 @@ export class AuthResolver {
|
||||
|
||||
@ResolveField(() => ClientTokenType, {
|
||||
name: 'token',
|
||||
deprecationReason: 'use [/api/auth/authorize]',
|
||||
deprecationReason: 'use [/api/auth/sign-in?native=true] instead',
|
||||
})
|
||||
async clientToken(
|
||||
@CurrentUser() currentUser: CurrentUser,
|
||||
@@ -75,39 +75,32 @@ export class AuthResolver {
|
||||
throw new ActionForbidden();
|
||||
}
|
||||
|
||||
const session = await this.auth.createUserSession(
|
||||
user,
|
||||
undefined,
|
||||
this.config.auth.accessToken.ttl
|
||||
);
|
||||
const userSession = await this.auth.createUserSession(user.id);
|
||||
|
||||
return {
|
||||
sessionToken: session.sessionId,
|
||||
token: session.sessionId,
|
||||
sessionToken: userSession.sessionId,
|
||||
token: userSession.sessionId,
|
||||
refresh: '',
|
||||
};
|
||||
}
|
||||
|
||||
@Mutation(() => UserType)
|
||||
@Public()
|
||||
@Mutation(() => Boolean)
|
||||
async changePassword(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args('token') token: string,
|
||||
@Args('newPassword') newPassword: string
|
||||
@Args('newPassword') newPassword: string,
|
||||
@Args('userId', { type: () => String, nullable: true }) userId?: string
|
||||
) {
|
||||
const config = await this.config.runtime.fetchAll({
|
||||
'auth/password.max': true,
|
||||
'auth/password.min': true,
|
||||
});
|
||||
validators.assertValidPassword(newPassword, {
|
||||
min: config['auth/password.min'],
|
||||
max: config['auth/password.max'],
|
||||
});
|
||||
if (!userId) {
|
||||
throw new LinkExpired();
|
||||
}
|
||||
|
||||
// NOTE: Set & Change password are using the same token type.
|
||||
const valid = await this.token.verifyToken(
|
||||
TokenType.ChangePassword,
|
||||
token,
|
||||
{
|
||||
credential: user.id,
|
||||
credential: userId,
|
||||
}
|
||||
);
|
||||
|
||||
@@ -115,10 +108,10 @@ export class AuthResolver {
|
||||
throw new InvalidEmailToken();
|
||||
}
|
||||
|
||||
await this.auth.changePassword(user.id, newPassword);
|
||||
await this.auth.revokeUserSessions(user.id);
|
||||
await this.auth.changePassword(userId, newPassword);
|
||||
await this.auth.revokeUserSessions(userId);
|
||||
|
||||
return user;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Mutation(() => UserType)
|
||||
@@ -127,7 +120,6 @@ export class AuthResolver {
|
||||
@Args('token') token: string,
|
||||
@Args('email') email: string
|
||||
) {
|
||||
validators.assertValidEmail(email);
|
||||
// @see [sendChangeEmail]
|
||||
const valid = await this.token.verifyToken(TokenType.VerifyEmail, token, {
|
||||
credential: user.id,
|
||||
@@ -150,8 +142,11 @@ export class AuthResolver {
|
||||
async sendChangePasswordEmail(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args('callbackUrl') callbackUrl: string,
|
||||
// @deprecated
|
||||
@Args('email', { nullable: true }) _email?: string
|
||||
@Args('email', {
|
||||
nullable: true,
|
||||
deprecationReason: 'fetched from signed in user',
|
||||
})
|
||||
_email?: string
|
||||
) {
|
||||
if (!user.emailVerified) {
|
||||
throw new EmailVerificationRequired();
|
||||
@@ -162,7 +157,7 @@ export class AuthResolver {
|
||||
user.id
|
||||
);
|
||||
|
||||
const url = this.url.link(callbackUrl, { token });
|
||||
const url = this.url.link(callbackUrl, { userId: user.id, token });
|
||||
|
||||
const res = await this.auth.sendChangePasswordEmail(user.email, url);
|
||||
|
||||
@@ -173,21 +168,13 @@ export class AuthResolver {
|
||||
async sendSetPasswordEmail(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args('callbackUrl') callbackUrl: string,
|
||||
@Args('email', { nullable: true }) _email?: string
|
||||
@Args('email', {
|
||||
nullable: true,
|
||||
deprecationReason: 'fetched from signed in user',
|
||||
})
|
||||
_email?: string
|
||||
) {
|
||||
if (!user.emailVerified) {
|
||||
throw new EmailVerificationRequired();
|
||||
}
|
||||
|
||||
const token = await this.token.createToken(
|
||||
TokenType.ChangePassword,
|
||||
user.id
|
||||
);
|
||||
|
||||
const url = this.url.link(callbackUrl, { token });
|
||||
|
||||
const res = await this.auth.sendSetPasswordEmail(user.email, url);
|
||||
return !res.rejected.length;
|
||||
return this.sendChangePasswordEmail(user, callbackUrl);
|
||||
}
|
||||
|
||||
// The change email step is:
|
||||
@@ -291,4 +278,20 @@ export class AuthResolver {
|
||||
|
||||
return emailVerifiedAt !== null;
|
||||
}
|
||||
|
||||
@Admin()
|
||||
@Mutation(() => String, {
|
||||
description: 'Create change password url',
|
||||
})
|
||||
async createChangePasswordUrl(
|
||||
@Args('userId') userId: string,
|
||||
@Args('callbackUrl') callbackUrl: string
|
||||
): Promise<string> {
|
||||
const token = await this.token.createToken(
|
||||
TokenType.ChangePassword,
|
||||
userId
|
||||
);
|
||||
|
||||
return this.url.link(callbackUrl, { userId, token });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,46 +1,16 @@
|
||||
import { Injectable, OnApplicationBootstrap } from '@nestjs/common';
|
||||
import { Cron, CronExpression } from '@nestjs/schedule';
|
||||
import type { User } from '@prisma/client';
|
||||
import type { User, UserSession } from '@prisma/client';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import type { CookieOptions, Request, Response } from 'express';
|
||||
import { assign, omit } from 'lodash-es';
|
||||
import { assign, pick } from 'lodash-es';
|
||||
|
||||
import {
|
||||
Config,
|
||||
CryptoHelper,
|
||||
EmailAlreadyUsed,
|
||||
MailService,
|
||||
WrongSignInCredentials,
|
||||
WrongSignInMethod,
|
||||
} from '../../fundamentals';
|
||||
import { Config, MailService, SignUpForbidden } from '../../fundamentals';
|
||||
import { FeatureManagementService } from '../features/management';
|
||||
import { QuotaService } from '../quota/service';
|
||||
import { QuotaType } from '../quota/types';
|
||||
import { UserService } from '../user/service';
|
||||
import type { CurrentUser } from './current-user';
|
||||
|
||||
export function parseAuthUserSeqNum(value: any) {
|
||||
let seq: number = 0;
|
||||
switch (typeof value) {
|
||||
case 'number': {
|
||||
seq = value;
|
||||
break;
|
||||
}
|
||||
case 'string': {
|
||||
const result = value.match(/^([\d{0, 10}])$/);
|
||||
if (result?.[1]) {
|
||||
seq = Number(result[1]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
default: {
|
||||
seq = 0;
|
||||
}
|
||||
}
|
||||
|
||||
return Math.max(0, seq);
|
||||
}
|
||||
import type { CurrentUser } from './session';
|
||||
|
||||
export function sessionUser(
|
||||
user: Pick<
|
||||
@@ -48,13 +18,19 @@ export function sessionUser(
|
||||
'id' | 'email' | 'avatarUrl' | 'name' | 'emailVerifiedAt'
|
||||
> & { password?: string | null }
|
||||
): CurrentUser {
|
||||
return assign(
|
||||
omit(user, 'password', 'registered', 'emailVerifiedAt', 'createdAt'),
|
||||
{
|
||||
hasPassword: user.password !== null,
|
||||
emailVerified: user.emailVerifiedAt !== null,
|
||||
}
|
||||
);
|
||||
// use pick to avoid unexpected fields
|
||||
return assign(pick(user, 'id', 'email', 'avatarUrl', 'name'), {
|
||||
hasPassword: user.password !== null,
|
||||
emailVerified: user.emailVerifiedAt !== null,
|
||||
});
|
||||
}
|
||||
|
||||
function extractTokenFromHeader(authorization: string) {
|
||||
if (!/^Bearer\s/i.test(authorization)) {
|
||||
return;
|
||||
}
|
||||
|
||||
return authorization.substring(7);
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
@@ -66,7 +42,7 @@ export class AuthService implements OnApplicationBootstrap {
|
||||
secure: this.config.server.https,
|
||||
};
|
||||
static readonly sessionCookieName = 'affine_session';
|
||||
static readonly authUserSeqHeaderName = 'x-auth-user';
|
||||
static readonly userCookieName = 'affine_user_id';
|
||||
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
@@ -74,26 +50,25 @@ export class AuthService implements OnApplicationBootstrap {
|
||||
private readonly mailer: MailService,
|
||||
private readonly feature: FeatureManagementService,
|
||||
private readonly quota: QuotaService,
|
||||
private readonly user: UserService,
|
||||
private readonly crypto: CryptoHelper
|
||||
private readonly user: UserService
|
||||
) {}
|
||||
|
||||
async onApplicationBootstrap() {
|
||||
if (this.config.node.dev) {
|
||||
try {
|
||||
const [email, name, pwd] = ['dev@affine.pro', 'Dev User', 'dev'];
|
||||
const [email, name, password] = ['dev@affine.pro', 'Dev User', 'dev'];
|
||||
let devUser = await this.user.findUserByEmail(email);
|
||||
if (!devUser) {
|
||||
devUser = await this.user.createUser({
|
||||
devUser = await this.user.createUser_without_verification({
|
||||
email,
|
||||
name,
|
||||
password: await this.crypto.encryptPassword(pwd),
|
||||
password,
|
||||
});
|
||||
}
|
||||
await this.quota.switchUserQuota(devUser.id, QuotaType.ProPlanV1);
|
||||
await this.feature.addAdmin(devUser.id);
|
||||
await this.feature.addCopilot(devUser.id);
|
||||
} catch (e) {
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
@@ -103,187 +78,173 @@ export class AuthService implements OnApplicationBootstrap {
|
||||
return this.feature.canEarlyAccess(email);
|
||||
}
|
||||
|
||||
async signUp(
|
||||
name: string,
|
||||
email: string,
|
||||
password: string
|
||||
): Promise<CurrentUser> {
|
||||
const user = await this.user.findUserByEmail(email);
|
||||
|
||||
if (user) {
|
||||
throw new EmailAlreadyUsed();
|
||||
/**
|
||||
* This is a test only helper to quickly signup a user, do not use in production
|
||||
*/
|
||||
async signUp(email: string, password: string): Promise<CurrentUser> {
|
||||
if (!this.config.node.test) {
|
||||
throw new SignUpForbidden(
|
||||
'sign up helper is forbidden for non-test environment'
|
||||
);
|
||||
}
|
||||
|
||||
const hashedPassword = await this.crypto.encryptPassword(password);
|
||||
|
||||
return this.user
|
||||
.createUser({
|
||||
name,
|
||||
.createUser_without_verification({
|
||||
email,
|
||||
password: hashedPassword,
|
||||
password,
|
||||
})
|
||||
.then(sessionUser);
|
||||
}
|
||||
|
||||
async signIn(email: string, password: string) {
|
||||
const user = await this.user.findUserWithHashedPasswordByEmail(email);
|
||||
|
||||
if (!user) {
|
||||
throw new WrongSignInCredentials();
|
||||
}
|
||||
|
||||
if (!user.password) {
|
||||
throw new WrongSignInMethod();
|
||||
}
|
||||
|
||||
const passwordMatches = await this.crypto.verifyPassword(
|
||||
password,
|
||||
user.password
|
||||
);
|
||||
|
||||
if (!passwordMatches) {
|
||||
throw new WrongSignInCredentials();
|
||||
}
|
||||
|
||||
return sessionUser(user);
|
||||
async signIn(email: string, password: string): Promise<CurrentUser> {
|
||||
return this.user.signIn(email, password).then(sessionUser);
|
||||
}
|
||||
|
||||
async getUser(
|
||||
token: string,
|
||||
seq = 0
|
||||
): Promise<{ user: CurrentUser | null; expiresAt: Date | null }> {
|
||||
const session = await this.getSession(token);
|
||||
async signOut(sessionId: string, userId?: string) {
|
||||
// sign out all users in the session
|
||||
if (!userId) {
|
||||
await this.db.session.deleteMany({
|
||||
where: {
|
||||
id: sessionId,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
await this.db.userSession.deleteMany({
|
||||
where: {
|
||||
sessionId,
|
||||
userId,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async getUserSession(
|
||||
sessionId: string,
|
||||
userId?: string
|
||||
): Promise<{ user: CurrentUser; session: UserSession } | null> {
|
||||
const userSession = await this.db.userSession.findFirst({
|
||||
where: {
|
||||
sessionId,
|
||||
userId,
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
sessionId: true,
|
||||
userId: true,
|
||||
createdAt: true,
|
||||
expiresAt: true,
|
||||
user: true,
|
||||
},
|
||||
orderBy: {
|
||||
createdAt: 'asc',
|
||||
},
|
||||
});
|
||||
|
||||
// no such session
|
||||
if (!session) {
|
||||
return { user: null, expiresAt: null };
|
||||
}
|
||||
|
||||
const userSession = session.userSessions.at(seq);
|
||||
|
||||
// no such user session
|
||||
if (!userSession) {
|
||||
return { user: null, expiresAt: null };
|
||||
return null;
|
||||
}
|
||||
|
||||
// user session expired
|
||||
if (userSession.expiresAt && userSession.expiresAt <= new Date()) {
|
||||
return { user: null, expiresAt: null };
|
||||
}
|
||||
|
||||
const user = await this.db.user.findUnique({
|
||||
where: { id: userSession.userId },
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
return { user: null, expiresAt: null };
|
||||
}
|
||||
|
||||
return { user: sessionUser(user), expiresAt: userSession.expiresAt };
|
||||
}
|
||||
|
||||
async getUserList(token: string) {
|
||||
const session = await this.getSession(token);
|
||||
|
||||
if (!session || !session.userSessions.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const users = await this.db.user.findMany({
|
||||
where: {
|
||||
id: {
|
||||
in: session.userSessions.map(({ userId }) => userId),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// TODO(@forehalo): need to separate expired session, same for [getUser]
|
||||
// Session
|
||||
// | { user: LimitedUser { email, avatarUrl }, expired: true }
|
||||
// | { user: User, expired: false }
|
||||
return session.userSessions
|
||||
.map(userSession => {
|
||||
// keep users in the same order as userSessions
|
||||
const user = users.find(({ id }) => id === userSession.userId);
|
||||
if (!user) {
|
||||
return null;
|
||||
}
|
||||
return sessionUser(user);
|
||||
})
|
||||
.filter(Boolean) as CurrentUser[];
|
||||
}
|
||||
|
||||
async signOut(token: string, seq = 0) {
|
||||
const session = await this.getSession(token);
|
||||
|
||||
if (session) {
|
||||
// overflow the logged in user
|
||||
if (session.userSessions.length <= seq) {
|
||||
return session;
|
||||
}
|
||||
|
||||
await this.db.userSession.deleteMany({
|
||||
where: { id: session.userSessions[seq].id },
|
||||
});
|
||||
|
||||
// no more user session active, delete the whole session
|
||||
if (session.userSessions.length === 1) {
|
||||
await this.db.session.delete({ where: { id: session.id } });
|
||||
return null;
|
||||
}
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async getSession(token: string) {
|
||||
if (!token) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this.db.$transaction(async tx => {
|
||||
const session = await tx.session.findUnique({
|
||||
return { user: sessionUser(userSession.user), session: userSession };
|
||||
}
|
||||
|
||||
async createUserSession(
|
||||
userId: string,
|
||||
sessionId?: string,
|
||||
ttl = this.config.auth.session.ttl
|
||||
) {
|
||||
// check whether given session is valid
|
||||
if (sessionId) {
|
||||
const session = await this.db.session.findFirst({
|
||||
where: {
|
||||
id: token,
|
||||
},
|
||||
include: {
|
||||
userSessions: {
|
||||
orderBy: {
|
||||
createdAt: 'asc',
|
||||
},
|
||||
},
|
||||
id: sessionId,
|
||||
},
|
||||
});
|
||||
|
||||
if (!session) {
|
||||
return null;
|
||||
sessionId = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
if (session.expiresAt && session.expiresAt <= new Date()) {
|
||||
await tx.session.delete({
|
||||
where: {
|
||||
id: session.id,
|
||||
if (!sessionId) {
|
||||
const session = await this.createSession();
|
||||
sessionId = session.id;
|
||||
}
|
||||
|
||||
const expiresAt = new Date(Date.now() + ttl * 1000);
|
||||
|
||||
return this.db.userSession.upsert({
|
||||
where: {
|
||||
sessionId_userId: {
|
||||
sessionId,
|
||||
userId,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
expiresAt,
|
||||
},
|
||||
create: {
|
||||
sessionId,
|
||||
userId,
|
||||
expiresAt,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async getUserList(sessionId: string) {
|
||||
const sessions = await this.db.userSession.findMany({
|
||||
where: {
|
||||
sessionId,
|
||||
OR: [
|
||||
{
|
||||
expiresAt: null,
|
||||
},
|
||||
});
|
||||
{
|
||||
expiresAt: {
|
||||
gt: new Date(),
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
include: {
|
||||
user: true,
|
||||
},
|
||||
orderBy: {
|
||||
createdAt: 'asc',
|
||||
},
|
||||
});
|
||||
|
||||
return null;
|
||||
}
|
||||
return sessions.map(({ user }) => sessionUser(user));
|
||||
}
|
||||
|
||||
return session;
|
||||
async createSession() {
|
||||
return this.db.session.create({
|
||||
data: {},
|
||||
});
|
||||
}
|
||||
|
||||
async getSession(sessionId: string) {
|
||||
return this.db.session.findFirst({
|
||||
where: {
|
||||
id: sessionId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async refreshUserSessionIfNeeded(
|
||||
_req: Request,
|
||||
res: Response,
|
||||
sessionId: string,
|
||||
userId: string,
|
||||
expiresAt: Date,
|
||||
session: UserSession,
|
||||
ttr = this.config.auth.session.ttr
|
||||
): Promise<boolean> {
|
||||
if (expiresAt && expiresAt.getTime() - Date.now() > ttr * 1000) {
|
||||
if (
|
||||
session.expiresAt &&
|
||||
session.expiresAt.getTime() - Date.now() > ttr * 1000
|
||||
) {
|
||||
// no need to refresh
|
||||
return false;
|
||||
}
|
||||
@@ -294,17 +255,14 @@ export class AuthService implements OnApplicationBootstrap {
|
||||
|
||||
await this.db.userSession.update({
|
||||
where: {
|
||||
sessionId_userId: {
|
||||
sessionId,
|
||||
userId,
|
||||
},
|
||||
id: session.id,
|
||||
},
|
||||
data: {
|
||||
expiresAt: newExpiresAt,
|
||||
},
|
||||
});
|
||||
|
||||
res.cookie(AuthService.sessionCookieName, sessionId, {
|
||||
res.cookie(AuthService.sessionCookieName, session.sessionId, {
|
||||
expires: newExpiresAt,
|
||||
...this.cookieOptions,
|
||||
});
|
||||
@@ -312,78 +270,70 @@ export class AuthService implements OnApplicationBootstrap {
|
||||
return true;
|
||||
}
|
||||
|
||||
async createUserSession(
|
||||
user: { id: string },
|
||||
existingSession?: string,
|
||||
ttl = this.config.auth.session.ttl
|
||||
) {
|
||||
const session = existingSession
|
||||
? await this.getSession(existingSession)
|
||||
: null;
|
||||
|
||||
const expiresAt = new Date(Date.now() + ttl * 1000);
|
||||
if (session) {
|
||||
return this.db.userSession.upsert({
|
||||
where: {
|
||||
sessionId_userId: {
|
||||
sessionId: session.id,
|
||||
userId: user.id,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
expiresAt,
|
||||
},
|
||||
create: {
|
||||
sessionId: session.id,
|
||||
userId: user.id,
|
||||
expiresAt,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
return this.db.userSession.create({
|
||||
data: {
|
||||
expiresAt,
|
||||
session: {
|
||||
create: {},
|
||||
},
|
||||
user: {
|
||||
connect: {
|
||||
id: user.id,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async revokeUserSessions(userId: string, sessionId?: string) {
|
||||
async revokeUserSessions(userId: string) {
|
||||
return this.db.userSession.deleteMany({
|
||||
where: {
|
||||
userId,
|
||||
sessionId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async setCookie(_req: Request, res: Response, user: { id: string }) {
|
||||
const session = await this.createUserSession(
|
||||
user
|
||||
// TODO(@forehalo): enable multi user session
|
||||
// req.cookies[AuthService.sessionCookieName]
|
||||
);
|
||||
getSessionOptionsFromRequest(req: Request) {
|
||||
let sessionId: string | undefined =
|
||||
req.cookies[AuthService.sessionCookieName];
|
||||
|
||||
res.cookie(AuthService.sessionCookieName, session.sessionId, {
|
||||
expires: session.expiresAt ?? void 0,
|
||||
if (!sessionId && req.headers.authorization) {
|
||||
sessionId = extractTokenFromHeader(req.headers.authorization);
|
||||
}
|
||||
|
||||
const userId: string | undefined =
|
||||
req.cookies[AuthService.userCookieName] ||
|
||||
req.headers[AuthService.userCookieName];
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
userId,
|
||||
};
|
||||
}
|
||||
|
||||
async setCookies(req: Request, res: Response, userId: string) {
|
||||
const { sessionId } = this.getSessionOptionsFromRequest(req);
|
||||
|
||||
const userSession = await this.createUserSession(userId, sessionId);
|
||||
|
||||
res.cookie(AuthService.sessionCookieName, userSession.sessionId, {
|
||||
...this.cookieOptions,
|
||||
expires: userSession.expiresAt ?? void 0,
|
||||
});
|
||||
|
||||
this.setUserCookie(res, userId);
|
||||
}
|
||||
|
||||
setUserCookie(res: Response, userId: string) {
|
||||
res.cookie(AuthService.userCookieName, userId, {
|
||||
...this.cookieOptions,
|
||||
// user cookie is client readable & writable for fast user switch if there are multiple users in one session
|
||||
// it safe to be non-secure & non-httpOnly because server will validate it by `cookie[AuthService.sessionCookieName]`
|
||||
httpOnly: false,
|
||||
secure: false,
|
||||
});
|
||||
}
|
||||
|
||||
async getUserSessionFromRequest(req: Request) {
|
||||
const { sessionId, userId } = this.getSessionOptionsFromRequest(req);
|
||||
|
||||
if (!sessionId) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this.getUserSession(sessionId, userId);
|
||||
}
|
||||
|
||||
async changePassword(
|
||||
id: string,
|
||||
newPassword: string
|
||||
): Promise<Omit<User, 'password'>> {
|
||||
const hashedPassword = await this.crypto.encryptPassword(newPassword);
|
||||
return this.user.updateUser(id, { password: hashedPassword });
|
||||
return this.user.updateUser(id, { password: newPassword });
|
||||
}
|
||||
|
||||
async changeEmail(
|
||||
@@ -425,24 +375,16 @@ export class AuthService implements OnApplicationBootstrap {
|
||||
|
||||
async sendSignInEmail(email: string, link: string, signUp: boolean) {
|
||||
return signUp
|
||||
? await this.mailer.sendSignUpMail(link.toString(), {
|
||||
? await this.mailer.sendSignUpMail(link, {
|
||||
to: email,
|
||||
})
|
||||
: await this.mailer.sendSignInMail(link.toString(), {
|
||||
: await this.mailer.sendSignInMail(link, {
|
||||
to: email,
|
||||
});
|
||||
}
|
||||
|
||||
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT)
|
||||
async cleanExpiredSessions() {
|
||||
await this.db.session.deleteMany({
|
||||
where: {
|
||||
expiresAt: {
|
||||
lte: new Date(),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await this.db.userSession.deleteMany({
|
||||
where: {
|
||||
expiresAt: {
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
import type { ExecutionContext } from '@nestjs/common';
|
||||
import { createParamDecorator } from '@nestjs/common';
|
||||
import { User } from '@prisma/client';
|
||||
import { User, UserSession } from '@prisma/client';
|
||||
|
||||
import { getRequestResponseFromContext } from '../../fundamentals';
|
||||
|
||||
function getUserFromContext(context: ExecutionContext) {
|
||||
return getRequestResponseFromContext(context).req.user;
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to fetch current user from the request context.
|
||||
*
|
||||
@@ -44,7 +40,7 @@ function getUserFromContext(context: ExecutionContext) {
|
||||
// eslint-disable-next-line no-redeclare
|
||||
export const CurrentUser = createParamDecorator(
|
||||
(_: unknown, context: ExecutionContext) => {
|
||||
return getUserFromContext(context);
|
||||
return getRequestResponseFromContext(context).req.session?.user;
|
||||
}
|
||||
);
|
||||
|
||||
@@ -53,3 +49,15 @@ export interface CurrentUser
|
||||
hasPassword: boolean | null;
|
||||
emailVerified: boolean;
|
||||
}
|
||||
|
||||
// interface and variable don't conflict
|
||||
// eslint-disable-next-line no-redeclare
|
||||
export const Session = createParamDecorator(
|
||||
(_: unknown, context: ExecutionContext) => {
|
||||
return getRequestResponseFromContext(context).req.session;
|
||||
}
|
||||
);
|
||||
|
||||
export type Session = UserSession & {
|
||||
user: CurrentUser;
|
||||
};
|
||||
@@ -69,13 +69,9 @@ export class TokenService {
|
||||
const valid =
|
||||
!expired && (!record.credential || record.credential === credential);
|
||||
|
||||
if ((expired || valid) && !keep) {
|
||||
const deleted = await this.db.verificationToken.deleteMany({
|
||||
where: {
|
||||
token,
|
||||
type,
|
||||
},
|
||||
});
|
||||
// always revoke expired token
|
||||
if (expired || (valid && !keep)) {
|
||||
const deleted = await this.revokeToken(type, token);
|
||||
|
||||
// already deleted, means token has been used
|
||||
if (!deleted.count) {
|
||||
@@ -86,6 +82,15 @@ export class TokenService {
|
||||
return valid ? record : null;
|
||||
}
|
||||
|
||||
async revokeToken(type: TokenType, token: string) {
|
||||
return await this.db.verificationToken.deleteMany({
|
||||
where: {
|
||||
token,
|
||||
type,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT)
|
||||
async cleanExpiredTokens() {
|
||||
await this.db.verificationToken.deleteMany({
|
||||
|
||||
@@ -25,8 +25,8 @@ export class AdminGuard implements CanActivate, OnModuleInit {
|
||||
async canActivate(context: ExecutionContext) {
|
||||
const { req } = getRequestResponseFromContext(context);
|
||||
let allow = false;
|
||||
if (req.user) {
|
||||
allow = await this.feature.isAdmin(req.user.id);
|
||||
if (req.session) {
|
||||
allow = await this.feature.isAdmin(req.session.user.id);
|
||||
}
|
||||
|
||||
if (!allow) {
|
||||
|
||||
@@ -2,11 +2,25 @@ import './config';
|
||||
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { ServerConfigResolver, ServerRuntimeConfigResolver } from './resolver';
|
||||
import {
|
||||
ServerConfigResolver,
|
||||
ServerFeatureConfigResolver,
|
||||
ServerRuntimeConfigResolver,
|
||||
ServerServiceConfigResolver,
|
||||
} from './resolver';
|
||||
import { ServerService } from './service';
|
||||
|
||||
@Module({
|
||||
providers: [ServerConfigResolver, ServerRuntimeConfigResolver],
|
||||
providers: [
|
||||
ServerService,
|
||||
ServerConfigResolver,
|
||||
ServerFeatureConfigResolver,
|
||||
ServerRuntimeConfigResolver,
|
||||
ServerServiceConfigResolver,
|
||||
],
|
||||
exports: [ServerService],
|
||||
})
|
||||
export class ServerConfigModule {}
|
||||
export { ADD_ENABLED_FEATURES, ServerConfigType } from './resolver';
|
||||
export { ServerService };
|
||||
export { ADD_ENABLED_FEATURES } from './server-feature';
|
||||
export { ServerFeature } from './types';
|
||||
|
||||
@@ -12,24 +12,15 @@ import {
|
||||
import { RuntimeConfig, RuntimeConfigType } from '@prisma/client';
|
||||
import { GraphQLJSON, GraphQLJSONObject } from 'graphql-scalars';
|
||||
|
||||
import { Config, DeploymentType, URLHelper } from '../../fundamentals';
|
||||
import { Config, URLHelper } from '../../fundamentals';
|
||||
import { Public } from '../auth';
|
||||
import { Admin } from '../common';
|
||||
import { FeatureType } from '../features';
|
||||
import { AvailableUserFeatureConfig } from '../features/resolver';
|
||||
import { ServerFlags } from './config';
|
||||
import { ServerFeature } from './types';
|
||||
|
||||
const ENABLED_FEATURES: Set<ServerFeature> = new Set();
|
||||
export function ADD_ENABLED_FEATURES(feature: ServerFeature) {
|
||||
ENABLED_FEATURES.add(feature);
|
||||
}
|
||||
|
||||
registerEnumType(ServerFeature, {
|
||||
name: 'ServerFeature',
|
||||
});
|
||||
|
||||
registerEnumType(DeploymentType, {
|
||||
name: 'ServerDeploymentType',
|
||||
});
|
||||
import { ENABLED_FEATURES } from './server-feature';
|
||||
import { ServerService } from './service';
|
||||
import { ServerConfigType } from './types';
|
||||
|
||||
@ObjectType()
|
||||
export class PasswordLimitsType {
|
||||
@@ -45,36 +36,6 @@ export class CredentialsRequirementType {
|
||||
password!: PasswordLimitsType;
|
||||
}
|
||||
|
||||
@ObjectType()
|
||||
export class ServerConfigType {
|
||||
@Field({
|
||||
description:
|
||||
'server identical name could be shown as badge on user interface',
|
||||
})
|
||||
name!: string;
|
||||
|
||||
@Field({ description: 'server version' })
|
||||
version!: string;
|
||||
|
||||
@Field({ description: 'server base url' })
|
||||
baseUrl!: string;
|
||||
|
||||
@Field(() => DeploymentType, { description: 'server type' })
|
||||
type!: DeploymentType;
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Field({ description: 'server flavor', deprecationReason: 'use `features`' })
|
||||
flavor!: string;
|
||||
|
||||
@Field(() => [ServerFeature], { description: 'enabled server features' })
|
||||
features!: ServerFeature[];
|
||||
|
||||
@Field({ description: 'enable telemetry' })
|
||||
enableTelemetry!: boolean;
|
||||
}
|
||||
|
||||
registerEnumType(RuntimeConfigType, {
|
||||
name: 'RuntimeConfigType',
|
||||
});
|
||||
@@ -115,7 +76,8 @@ export class ServerFlagsType implements ServerFlags {
|
||||
export class ServerConfigResolver {
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly url: URLHelper
|
||||
private readonly url: URLHelper,
|
||||
private readonly server: ServerService
|
||||
) {}
|
||||
|
||||
@Public()
|
||||
@@ -165,13 +127,65 @@ export class ServerConfigResolver {
|
||||
return flags;
|
||||
}, {} as ServerFlagsType);
|
||||
}
|
||||
|
||||
@ResolveField(() => Boolean, {
|
||||
description: 'whether server has been initialized',
|
||||
})
|
||||
async initialized() {
|
||||
return this.server.initialized();
|
||||
}
|
||||
}
|
||||
|
||||
@Resolver(() => ServerConfigType)
|
||||
export class ServerFeatureConfigResolver extends AvailableUserFeatureConfig {
|
||||
constructor(config: Config) {
|
||||
super(config);
|
||||
}
|
||||
|
||||
@ResolveField(() => [FeatureType], {
|
||||
description: 'Features for user that can be configured',
|
||||
})
|
||||
override availableUserFeatures() {
|
||||
return super.availableUserFeatures();
|
||||
}
|
||||
}
|
||||
|
||||
@ObjectType()
|
||||
class ServerServiceConfig {
|
||||
@Field()
|
||||
name!: string;
|
||||
|
||||
@Field(() => GraphQLJSONObject)
|
||||
config!: any;
|
||||
}
|
||||
|
||||
interface ServerServeConfig {
|
||||
https: boolean;
|
||||
host: string;
|
||||
port: number;
|
||||
externalUrl: string;
|
||||
}
|
||||
|
||||
interface ServerMailerConfig {
|
||||
host?: string | null;
|
||||
port?: number | null;
|
||||
secure?: boolean | null;
|
||||
service?: string | null;
|
||||
sender?: string | null;
|
||||
}
|
||||
|
||||
interface ServerDatabaseConfig {
|
||||
host: string;
|
||||
port: number;
|
||||
user?: string | null;
|
||||
database: string;
|
||||
}
|
||||
|
||||
@Admin()
|
||||
@Resolver(() => ServerRuntimeConfigType)
|
||||
export class ServerRuntimeConfigResolver {
|
||||
constructor(private readonly config: Config) {}
|
||||
|
||||
@Admin()
|
||||
@Query(() => [ServerRuntimeConfigType], {
|
||||
description: 'get all server runtime configurable settings',
|
||||
})
|
||||
@@ -179,7 +193,6 @@ export class ServerRuntimeConfigResolver {
|
||||
return this.config.runtime.list();
|
||||
}
|
||||
|
||||
@Admin()
|
||||
@Mutation(() => ServerRuntimeConfigType, {
|
||||
description: 'update server runtime configurable setting',
|
||||
})
|
||||
@@ -190,7 +203,6 @@ export class ServerRuntimeConfigResolver {
|
||||
return await this.config.runtime.set(id as any, value);
|
||||
}
|
||||
|
||||
@Admin()
|
||||
@Mutation(() => [ServerRuntimeConfigType], {
|
||||
description: 'update multiple server runtime configurable settings',
|
||||
})
|
||||
@@ -205,3 +217,57 @@ export class ServerRuntimeConfigResolver {
|
||||
return results;
|
||||
}
|
||||
}
|
||||
|
||||
@Admin()
|
||||
@Resolver(() => ServerServiceConfig)
|
||||
export class ServerServiceConfigResolver {
|
||||
constructor(private readonly config: Config) {}
|
||||
|
||||
@Query(() => [ServerServiceConfig])
|
||||
serverServiceConfigs() {
|
||||
return [
|
||||
{
|
||||
name: 'server',
|
||||
config: this.serve(),
|
||||
},
|
||||
{
|
||||
name: 'mailer',
|
||||
config: this.mail(),
|
||||
},
|
||||
{
|
||||
name: 'database',
|
||||
config: this.database(),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
serve(): ServerServeConfig {
|
||||
return this.config.server;
|
||||
}
|
||||
|
||||
mail(): ServerMailerConfig {
|
||||
const sender =
|
||||
typeof this.config.mailer.from === 'string'
|
||||
? this.config.mailer.from
|
||||
: this.config.mailer.from?.address;
|
||||
|
||||
return {
|
||||
host: this.config.mailer.host,
|
||||
port: this.config.mailer.port,
|
||||
secure: this.config.mailer.secure,
|
||||
service: this.config.mailer.service,
|
||||
sender,
|
||||
};
|
||||
}
|
||||
|
||||
database(): ServerDatabaseConfig {
|
||||
const url = new URL(this.config.database.datasourceUrl);
|
||||
|
||||
return {
|
||||
host: url.hostname,
|
||||
port: Number(url.port),
|
||||
user: url.username,
|
||||
database: url.pathname.slice(1) ?? url.username,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
import { ServerFeature } from './types';
|
||||
|
||||
export const ENABLED_FEATURES: Set<ServerFeature> = new Set();
|
||||
export function ADD_ENABLED_FEATURES(feature: ServerFeature) {
|
||||
ENABLED_FEATURES.add(feature);
|
||||
}
|
||||
export { ServerFeature };
|
||||
17
packages/backend/server/src/core/config/service.ts
Normal file
17
packages/backend/server/src/core/config/service.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
@Injectable()
|
||||
export class ServerService {
|
||||
private _initialized: boolean | null = null;
|
||||
constructor(private readonly db: PrismaClient) {}
|
||||
|
||||
async initialized() {
|
||||
if (!this._initialized) {
|
||||
const userCount = await this.db.user.count();
|
||||
this._initialized = userCount > 0;
|
||||
}
|
||||
|
||||
return this._initialized;
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,48 @@
|
||||
import { Field, ObjectType, registerEnumType } from '@nestjs/graphql';
|
||||
|
||||
import { DeploymentType } from '../../fundamentals';
|
||||
|
||||
export enum ServerFeature {
|
||||
Captcha = 'captcha',
|
||||
Copilot = 'copilot',
|
||||
Payment = 'payment',
|
||||
OAuth = 'oauth',
|
||||
}
|
||||
|
||||
registerEnumType(ServerFeature, {
|
||||
name: 'ServerFeature',
|
||||
});
|
||||
|
||||
registerEnumType(DeploymentType, {
|
||||
name: 'ServerDeploymentType',
|
||||
});
|
||||
|
||||
@ObjectType()
|
||||
export class ServerConfigType {
|
||||
@Field({
|
||||
description:
|
||||
'server identical name could be shown as badge on user interface',
|
||||
})
|
||||
name!: string;
|
||||
|
||||
@Field({ description: 'server version' })
|
||||
version!: string;
|
||||
|
||||
@Field({ description: 'server base url' })
|
||||
baseUrl!: string;
|
||||
|
||||
@Field(() => DeploymentType, { description: 'server type' })
|
||||
type!: DeploymentType;
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Field({ description: 'server flavor', deprecationReason: 'use `features`' })
|
||||
flavor!: string;
|
||||
|
||||
@Field(() => [ServerFeature], { description: 'enabled server features' })
|
||||
features!: ServerFeature[];
|
||||
|
||||
@Field({ description: 'enable telemetry' })
|
||||
enableTelemetry!: boolean;
|
||||
}
|
||||
|
||||
186
packages/backend/server/src/core/doc/adapters/userspace.ts
Normal file
186
packages/backend/server/src/core/doc/adapters/userspace.ts
Normal file
@@ -0,0 +1,186 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
import { Mutex } from '../../../fundamentals';
|
||||
import { DocStorageOptions } from '../options';
|
||||
import { DocRecord, DocStorageAdapter } from '../storage';
|
||||
|
||||
@Injectable()
|
||||
export class PgUserspaceDocStorageAdapter extends DocStorageAdapter {
|
||||
constructor(
|
||||
private readonly db: PrismaClient,
|
||||
private readonly mutex: Mutex,
|
||||
options: DocStorageOptions
|
||||
) {
|
||||
super(options);
|
||||
}
|
||||
|
||||
// no updates queue for userspace, directly merge them inplace
|
||||
// no history record for userspace
|
||||
protected async getDocUpdates() {
|
||||
return [];
|
||||
}
|
||||
|
||||
protected async markUpdatesMerged() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
async listDocHistories() {
|
||||
return [];
|
||||
}
|
||||
|
||||
async getDocHistory() {
|
||||
return null;
|
||||
}
|
||||
|
||||
protected async createDocHistory() {
|
||||
return false;
|
||||
}
|
||||
|
||||
override async rollbackDoc() {
|
||||
return;
|
||||
}
|
||||
|
||||
override async getDoc(spaceId: string, docId: string) {
|
||||
return this.getDocSnapshot(spaceId, docId);
|
||||
}
|
||||
|
||||
async pushDocUpdates(
|
||||
userId: string,
|
||||
docId: string,
|
||||
updates: Uint8Array[],
|
||||
editorId?: string
|
||||
) {
|
||||
if (!updates.length) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
await using _lock = await this.lockDocForUpdate(userId, docId);
|
||||
const snapshot = await this.getDocSnapshot(userId, docId);
|
||||
const now = Date.now();
|
||||
const pendings = updates.map((update, i) => ({
|
||||
bin: update,
|
||||
timestamp: now + i,
|
||||
}));
|
||||
|
||||
const { timestamp, bin } = await this.squash(
|
||||
snapshot ? [snapshot, ...pendings] : pendings
|
||||
);
|
||||
|
||||
await this.setDocSnapshot({
|
||||
spaceId: userId,
|
||||
docId,
|
||||
bin,
|
||||
timestamp,
|
||||
editor: editorId,
|
||||
});
|
||||
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
async deleteDoc(userId: string, docId: string) {
|
||||
await this.db.userSnapshot.deleteMany({
|
||||
where: {
|
||||
userId,
|
||||
id: docId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async deleteSpace(userId: string) {
|
||||
await this.db.userSnapshot.deleteMany({
|
||||
where: {
|
||||
userId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async getSpaceDocTimestamps(userId: string, after?: number) {
|
||||
const snapshots = await this.db.userSnapshot.findMany({
|
||||
select: {
|
||||
id: true,
|
||||
updatedAt: true,
|
||||
},
|
||||
where: {
|
||||
userId,
|
||||
...(after
|
||||
? {
|
||||
updatedAt: {
|
||||
gt: new Date(after),
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
});
|
||||
|
||||
const result: Record<string, number> = {};
|
||||
|
||||
snapshots.forEach(s => {
|
||||
result[s.id] = s.updatedAt.getTime();
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
protected async getDocSnapshot(userId: string, docId: string) {
|
||||
const snapshot = await this.db.userSnapshot.findUnique({
|
||||
where: {
|
||||
userId_id: {
|
||||
userId,
|
||||
id: docId,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!snapshot) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
spaceId: userId,
|
||||
docId,
|
||||
bin: snapshot.blob,
|
||||
timestamp: snapshot.updatedAt.getTime(),
|
||||
editor: snapshot.userId,
|
||||
};
|
||||
}
|
||||
|
||||
protected async setDocSnapshot(snapshot: DocRecord) {
|
||||
// we always get lock before writing to user snapshot table,
|
||||
// so a simple upsert without testing on updatedAt is safe
|
||||
await this.db.userSnapshot.upsert({
|
||||
where: {
|
||||
userId_id: {
|
||||
userId: snapshot.spaceId,
|
||||
id: snapshot.docId,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
blob: Buffer.from(snapshot.bin),
|
||||
updatedAt: new Date(snapshot.timestamp),
|
||||
},
|
||||
create: {
|
||||
userId: snapshot.spaceId,
|
||||
id: snapshot.docId,
|
||||
blob: Buffer.from(snapshot.bin),
|
||||
createdAt: new Date(snapshot.timestamp),
|
||||
updatedAt: new Date(snapshot.timestamp),
|
||||
},
|
||||
});
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
protected override async lockDocForUpdate(
|
||||
workspaceId: string,
|
||||
docId: string
|
||||
) {
|
||||
const lock = await this.mutex.lock(`userspace:${workspaceId}:${docId}`);
|
||||
|
||||
if (!lock) {
|
||||
throw new Error('Too many concurrent writings');
|
||||
}
|
||||
|
||||
return lock;
|
||||
}
|
||||
}
|
||||
618
packages/backend/server/src/core/doc/adapters/workspace.ts
Normal file
618
packages/backend/server/src/core/doc/adapters/workspace.ts
Normal file
@@ -0,0 +1,618 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import { chunk } from 'lodash-es';
|
||||
|
||||
import {
|
||||
Cache,
|
||||
DocHistoryNotFound,
|
||||
DocNotFound,
|
||||
FailedToSaveUpdates,
|
||||
FailedToUpsertSnapshot,
|
||||
metrics,
|
||||
Mutex,
|
||||
} from '../../../fundamentals';
|
||||
import { retryable } from '../../../fundamentals/utils/promise';
|
||||
import { DocStorageOptions } from '../options';
|
||||
import {
|
||||
DocRecord,
|
||||
DocStorageAdapter,
|
||||
DocUpdate,
|
||||
HistoryFilter,
|
||||
} from '../storage';
|
||||
|
||||
const UPDATES_QUEUE_CACHE_KEY = 'doc:manager:updates';
|
||||
|
||||
@Injectable()
|
||||
export class PgWorkspaceDocStorageAdapter extends DocStorageAdapter {
|
||||
private readonly logger = new Logger(PgWorkspaceDocStorageAdapter.name);
|
||||
|
||||
constructor(
|
||||
private readonly db: PrismaClient,
|
||||
private readonly mutex: Mutex,
|
||||
private readonly cache: Cache,
|
||||
protected override readonly options: DocStorageOptions
|
||||
) {
|
||||
super(options);
|
||||
}
|
||||
|
||||
async pushDocUpdates(
|
||||
workspaceId: string,
|
||||
docId: string,
|
||||
updates: Uint8Array[],
|
||||
editorId?: string
|
||||
) {
|
||||
if (!updates.length) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
let pendings = updates;
|
||||
let done = 0;
|
||||
let timestamp = Date.now();
|
||||
try {
|
||||
await retryable(async () => {
|
||||
if (done !== 0) {
|
||||
pendings = pendings.slice(done);
|
||||
}
|
||||
|
||||
// TODO(@forehalo): remove in next release
|
||||
const lastSeq = await this.getUpdateSeq(
|
||||
workspaceId,
|
||||
docId,
|
||||
updates.length
|
||||
);
|
||||
|
||||
let turn = 0;
|
||||
const batchCount = 10;
|
||||
for (const batch of chunk(pendings, batchCount)) {
|
||||
const now = Date.now();
|
||||
await this.db.update.createMany({
|
||||
data: batch.map((update, i) => {
|
||||
const subSeq = turn * batchCount + i + 1;
|
||||
// `seq` is the last seq num of the batch
|
||||
// example for 11 batched updates, start from seq num 20
|
||||
// seq for first update in the batch should be:
|
||||
// 31 - 11 + subSeq(0 * 10 + 0 + 1) = 21
|
||||
// ^ last seq num ^ updates.length ^ turn ^ batchCount ^i
|
||||
const seq = lastSeq - updates.length + subSeq;
|
||||
const createdAt = now + subSeq;
|
||||
timestamp = Math.max(timestamp, createdAt);
|
||||
|
||||
return {
|
||||
workspaceId,
|
||||
id: docId,
|
||||
blob: Buffer.from(update),
|
||||
seq,
|
||||
createdAt: new Date(createdAt),
|
||||
createdBy: editorId || null,
|
||||
};
|
||||
}),
|
||||
});
|
||||
turn++;
|
||||
done += batch.length;
|
||||
await this.updateCachedUpdatesCount(workspaceId, docId, batch.length);
|
||||
}
|
||||
});
|
||||
} catch (e) {
|
||||
this.logger.error('Failed to insert doc updates', e);
|
||||
metrics.doc.counter('doc_update_insert_failed').add(1);
|
||||
throw new FailedToSaveUpdates();
|
||||
}
|
||||
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
protected async getDocUpdates(workspaceId: string, docId: string) {
|
||||
const rows = await this.db.update.findMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
id: docId,
|
||||
},
|
||||
orderBy: {
|
||||
createdAt: 'asc',
|
||||
},
|
||||
});
|
||||
|
||||
return rows.map(row => ({
|
||||
bin: row.blob,
|
||||
timestamp: row.createdAt.getTime(),
|
||||
editor: row.createdBy || undefined,
|
||||
}));
|
||||
}
|
||||
|
||||
async deleteDoc(workspaceId: string, docId: string) {
|
||||
const ident = { where: { workspaceId, id: docId } };
|
||||
await this.db.$transaction([
|
||||
this.db.snapshot.deleteMany(ident),
|
||||
this.db.update.deleteMany(ident),
|
||||
this.db.snapshotHistory.deleteMany(ident),
|
||||
]);
|
||||
}
|
||||
|
||||
async deleteSpace(workspaceId: string) {
|
||||
const ident = { where: { workspaceId } };
|
||||
await this.db.$transaction([
|
||||
this.db.workspace.deleteMany({
|
||||
where: {
|
||||
id: workspaceId,
|
||||
},
|
||||
}),
|
||||
this.db.snapshot.deleteMany(ident),
|
||||
this.db.update.deleteMany(ident),
|
||||
this.db.snapshotHistory.deleteMany(ident),
|
||||
]);
|
||||
}
|
||||
|
||||
async getSpaceDocTimestamps(workspaceId: string, after?: number) {
|
||||
const snapshots = await this.db.snapshot.findMany({
|
||||
select: {
|
||||
id: true,
|
||||
updatedAt: true,
|
||||
},
|
||||
where: {
|
||||
workspaceId,
|
||||
...(after
|
||||
? {
|
||||
updatedAt: {
|
||||
gt: new Date(after),
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
});
|
||||
|
||||
const updates = await this.db.update.groupBy({
|
||||
where: {
|
||||
workspaceId,
|
||||
...(after
|
||||
? {
|
||||
createdAt: {
|
||||
gt: new Date(after),
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
by: ['id'],
|
||||
_max: {
|
||||
createdAt: true,
|
||||
},
|
||||
});
|
||||
|
||||
const result: Record<string, number> = {};
|
||||
|
||||
snapshots.forEach(s => {
|
||||
result[s.id] = s.updatedAt.getTime();
|
||||
});
|
||||
|
||||
updates.forEach(u => {
|
||||
if (u._max.createdAt) {
|
||||
result[u.id] = u._max.createdAt.getTime();
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
protected async markUpdatesMerged(
|
||||
workspaceId: string,
|
||||
docId: string,
|
||||
updates: DocUpdate[]
|
||||
) {
|
||||
const result = await this.db.update.deleteMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
id: docId,
|
||||
createdAt: {
|
||||
in: updates.map(u => new Date(u.timestamp)),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await this.updateCachedUpdatesCount(workspaceId, docId, -result.count);
|
||||
return result.count;
|
||||
}
|
||||
|
||||
async listDocHistories(
|
||||
workspaceId: string,
|
||||
docId: string,
|
||||
query: HistoryFilter
|
||||
) {
|
||||
const histories = await this.db.snapshotHistory.findMany({
|
||||
select: {
|
||||
timestamp: true,
|
||||
createdByUser: {
|
||||
select: {
|
||||
name: true,
|
||||
avatarUrl: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
where: {
|
||||
workspaceId,
|
||||
id: docId,
|
||||
timestamp: {
|
||||
lt: query.before ? new Date(query.before) : new Date(),
|
||||
},
|
||||
},
|
||||
orderBy: {
|
||||
timestamp: 'desc',
|
||||
},
|
||||
take: query.limit,
|
||||
});
|
||||
|
||||
return histories.map(h => ({
|
||||
timestamp: h.timestamp.getTime(),
|
||||
editor: h.createdByUser,
|
||||
}));
|
||||
}
|
||||
|
||||
async getDocHistory(workspaceId: string, docId: string, timestamp: number) {
|
||||
const history = await this.db.snapshotHistory.findUnique({
|
||||
where: {
|
||||
workspaceId_id_timestamp: {
|
||||
workspaceId,
|
||||
id: docId,
|
||||
timestamp: new Date(timestamp),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!history) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
spaceId: workspaceId,
|
||||
docId,
|
||||
bin: history.blob,
|
||||
timestamp,
|
||||
editor: history.createdBy || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
override async rollbackDoc(
|
||||
spaceId: string,
|
||||
docId: string,
|
||||
timestamp: number,
|
||||
editorId?: string
|
||||
): Promise<void> {
|
||||
await using _lock = await this.lockDocForUpdate(spaceId, docId);
|
||||
const toSnapshot = await this.getDocHistory(spaceId, docId, timestamp);
|
||||
if (!toSnapshot) {
|
||||
throw new DocHistoryNotFound({ spaceId, docId, timestamp });
|
||||
}
|
||||
|
||||
const fromSnapshot = await this.getDocSnapshot(spaceId, docId);
|
||||
|
||||
if (!fromSnapshot) {
|
||||
throw new DocNotFound({ spaceId, docId });
|
||||
}
|
||||
|
||||
// force create a new history record after rollback
|
||||
await this.createDocHistory(
|
||||
{
|
||||
...fromSnapshot,
|
||||
// override the editor to the one who requested the rollback
|
||||
editor: editorId,
|
||||
},
|
||||
true
|
||||
);
|
||||
// WARN:
|
||||
// we should never do the snapshot updating in recovering,
|
||||
// which is not the solution in CRDT.
|
||||
// let user revert in client and update the data in sync system
|
||||
// const change = this.generateChangeUpdate(fromSnapshot.bin, toSnapshot.bin);
|
||||
// await this.pushDocUpdates(spaceId, docId, [change]);
|
||||
|
||||
metrics.doc
|
||||
.counter('history_recovered_counter', {
|
||||
description: 'How many times history recovered request happened',
|
||||
})
|
||||
.add(1);
|
||||
}
|
||||
|
||||
protected async createDocHistory(snapshot: DocRecord, force = false) {
|
||||
const last = await this.lastDocHistory(snapshot.spaceId, snapshot.docId);
|
||||
|
||||
let shouldCreateHistory = false;
|
||||
|
||||
if (!last) {
|
||||
// never created
|
||||
shouldCreateHistory = true;
|
||||
} else {
|
||||
const lastHistoryTimestamp = last.timestamp.getTime();
|
||||
if (lastHistoryTimestamp === snapshot.timestamp) {
|
||||
// no change
|
||||
shouldCreateHistory = false;
|
||||
} else if (
|
||||
// force
|
||||
force ||
|
||||
// last history created before interval in configs
|
||||
lastHistoryTimestamp <
|
||||
snapshot.timestamp - this.options.historyMinInterval(snapshot.spaceId)
|
||||
) {
|
||||
shouldCreateHistory = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldCreateHistory) {
|
||||
if (this.isEmptyBin(snapshot.bin)) {
|
||||
this.logger.debug(
|
||||
`Doc is empty, skip creating history record for ${snapshot.docId} in workspace ${snapshot.spaceId}`
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
await this.db.snapshotHistory
|
||||
.create({
|
||||
select: {
|
||||
timestamp: true,
|
||||
},
|
||||
data: {
|
||||
workspaceId: snapshot.spaceId,
|
||||
id: snapshot.docId,
|
||||
timestamp: new Date(snapshot.timestamp),
|
||||
blob: Buffer.from(snapshot.bin),
|
||||
createdBy: snapshot.editor,
|
||||
expiredAt: new Date(
|
||||
Date.now() + (await this.options.historyMaxAge(snapshot.spaceId))
|
||||
),
|
||||
},
|
||||
})
|
||||
.catch(() => {
|
||||
// safe to ignore
|
||||
// only happens when duplicated history record created in multi processes
|
||||
});
|
||||
|
||||
metrics.doc
|
||||
.counter('history_created_counter', {
|
||||
description: 'How many times the snapshot history created',
|
||||
})
|
||||
.add(1);
|
||||
this.logger.debug(
|
||||
`History created for ${snapshot.docId} in workspace ${snapshot.spaceId}.`
|
||||
);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
protected async getDocSnapshot(workspaceId: string, docId: string) {
|
||||
const snapshot = await this.db.snapshot.findUnique({
|
||||
where: {
|
||||
id_workspaceId: {
|
||||
workspaceId,
|
||||
id: docId,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!snapshot) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
spaceId: workspaceId,
|
||||
docId,
|
||||
bin: snapshot.blob,
|
||||
timestamp: snapshot.updatedAt.getTime(),
|
||||
// creator and editor may null if their account is deleted
|
||||
editor: snapshot.updatedBy || snapshot.createdBy || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
protected async setDocSnapshot(snapshot: DocRecord) {
|
||||
const { spaceId, docId, bin, timestamp } = snapshot;
|
||||
|
||||
if (this.isEmptyBin(bin)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const updatedAt = new Date(timestamp);
|
||||
|
||||
// CONCERNS:
|
||||
// i. Because we save the real user's last seen action time as `updatedAt`,
|
||||
// it's possible to simply compare the `updatedAt` to determine if the snapshot is older than the one we are going to save.
|
||||
//
|
||||
// ii. Prisma doesn't support `upsert` with additional `where` condition along side unique constraint.
|
||||
// In our case, we need to manually check the `updatedAt` to avoid overriding the newer snapshot.
|
||||
// where: { id_workspaceId: {}, updatedAt: { lt: updatedAt } }
|
||||
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
try {
|
||||
const result: { updatedAt: Date }[] = await this.db.$queryRaw`
|
||||
INSERT INTO "snapshots" ("workspace_id", "guid", "blob", "created_at", "updated_at", "created_by", "updated_by")
|
||||
VALUES (${spaceId}, ${docId}, ${bin}, DEFAULT, ${updatedAt}, ${snapshot.editor}, ${snapshot.editor})
|
||||
ON CONFLICT ("workspace_id", "guid")
|
||||
DO UPDATE SET "blob" = ${bin}, "updated_at" = ${updatedAt}, "updated_by" = ${snapshot.editor}
|
||||
WHERE "snapshots"."workspace_id" = ${spaceId} AND "snapshots"."guid" = ${docId} AND "snapshots"."updated_at" <= ${updatedAt}
|
||||
RETURNING "snapshots"."workspace_id" as "workspaceId", "snapshots"."guid" as "id", "snapshots"."updated_at" as "updatedAt"
|
||||
`;
|
||||
|
||||
// const result = await this.db.snapshot.upsert({
|
||||
// select: {
|
||||
// updatedAt: true,
|
||||
// seq: true,
|
||||
// },
|
||||
// where: {
|
||||
// id_workspaceId: {
|
||||
// workspaceId,
|
||||
// id: guid,
|
||||
// },
|
||||
// ⬇️ NOT SUPPORTED BY PRISMA YET
|
||||
// updatedAt: {
|
||||
// lt: updatedAt,
|
||||
// },
|
||||
// },
|
||||
// update: {
|
||||
// blob,
|
||||
// state,
|
||||
// updatedAt,
|
||||
// },
|
||||
// create: {
|
||||
// workspaceId,
|
||||
// id: guid,
|
||||
// blob,
|
||||
// state,
|
||||
// updatedAt,
|
||||
// seq,
|
||||
// },
|
||||
// });
|
||||
|
||||
// if the condition `snapshot.updatedAt > updatedAt` is true, by which means the snapshot has already been updated by other process,
|
||||
// the updates has been applied to current `doc` must have been seen by the other process as well.
|
||||
// The `updatedSnapshot` will be `undefined` in this case.
|
||||
const updatedSnapshot = result.at(0);
|
||||
return !!updatedSnapshot;
|
||||
} catch (e) {
|
||||
metrics.doc.counter('snapshot_upsert_failed').add(1);
|
||||
this.logger.error('Failed to upsert snapshot', e);
|
||||
throw new FailedToUpsertSnapshot();
|
||||
}
|
||||
}
|
||||
|
||||
protected override async lockDocForUpdate(
|
||||
workspaceId: string,
|
||||
docId: string
|
||||
) {
|
||||
const lock = await this.mutex.lock(`doc:update:${workspaceId}:${docId}`);
|
||||
|
||||
if (!lock) {
|
||||
throw new Error('Too many concurrent writings');
|
||||
}
|
||||
|
||||
return lock;
|
||||
}
|
||||
|
||||
protected async lastDocHistory(workspaceId: string, id: string) {
|
||||
return this.db.snapshotHistory.findFirst({
|
||||
where: {
|
||||
workspaceId,
|
||||
id,
|
||||
},
|
||||
select: {
|
||||
timestamp: true,
|
||||
state: true,
|
||||
},
|
||||
orderBy: {
|
||||
timestamp: 'desc',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// for auto merging
|
||||
async randomDoc() {
|
||||
const key = await this.cache.mapRandomKey(UPDATES_QUEUE_CACHE_KEY);
|
||||
|
||||
if (key) {
|
||||
const cachedCount = await this.cache.mapIncrease(
|
||||
UPDATES_QUEUE_CACHE_KEY,
|
||||
key,
|
||||
0
|
||||
);
|
||||
|
||||
if (cachedCount > 0) {
|
||||
const [workspaceId, id] = key.split('::');
|
||||
const count = await this.db.update.count({
|
||||
where: {
|
||||
workspaceId,
|
||||
id,
|
||||
},
|
||||
});
|
||||
|
||||
// FIXME(@forehalo): somehow the update count in cache is not accurate
|
||||
if (count === 0) {
|
||||
metrics.doc
|
||||
.counter('doc_update_count_inconsistent_with_cache')
|
||||
.add(1);
|
||||
await this.cache.mapDelete(UPDATES_QUEUE_CACHE_KEY, key);
|
||||
return null;
|
||||
}
|
||||
|
||||
return { workspaceId, docId: id };
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private async updateCachedUpdatesCount(
|
||||
workspaceId: string,
|
||||
guid: string,
|
||||
count: number
|
||||
) {
|
||||
const result = await this.cache.mapIncrease(
|
||||
UPDATES_QUEUE_CACHE_KEY,
|
||||
`${workspaceId}::${guid}`,
|
||||
count
|
||||
);
|
||||
|
||||
if (result <= 0) {
|
||||
await this.cache.mapDelete(
|
||||
UPDATES_QUEUE_CACHE_KEY,
|
||||
`${workspaceId}::${guid}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
private readonly seqMap = new Map<string, number>();
|
||||
/**
|
||||
*
|
||||
* @deprecated updates do not rely on seq number anymore
|
||||
*
|
||||
* keep in next release to avoid downtime when upgrading instances
|
||||
*/
|
||||
private async getUpdateSeq(workspaceId: string, guid: string, batch = 1) {
|
||||
const MAX_SEQ_NUM = 0x3fffffff; // u31
|
||||
|
||||
try {
|
||||
const { seq } = await this.db.snapshot.update({
|
||||
select: {
|
||||
seq: true,
|
||||
},
|
||||
where: {
|
||||
id_workspaceId: {
|
||||
workspaceId,
|
||||
id: guid,
|
||||
},
|
||||
},
|
||||
data: {
|
||||
seq: {
|
||||
increment: batch,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!seq) {
|
||||
return batch;
|
||||
}
|
||||
|
||||
// reset
|
||||
if (seq >= MAX_SEQ_NUM) {
|
||||
await this.db.snapshot.update({
|
||||
select: {
|
||||
seq: true,
|
||||
},
|
||||
where: {
|
||||
id_workspaceId: {
|
||||
workspaceId,
|
||||
id: guid,
|
||||
},
|
||||
},
|
||||
data: {
|
||||
seq: 0,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return seq;
|
||||
} catch {
|
||||
// not existing snapshot just count it from 1
|
||||
const last = this.seqMap.get(workspaceId + guid) ?? 0;
|
||||
this.seqMap.set(workspaceId + guid, last + batch);
|
||||
return last + batch;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,266 +0,0 @@
|
||||
import { isDeepStrictEqual } from 'node:util';
|
||||
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { Cron, CronExpression } from '@nestjs/schedule';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
import type { EventPayload } from '../../fundamentals';
|
||||
import {
|
||||
Config,
|
||||
DocHistoryNotFound,
|
||||
DocNotFound,
|
||||
metrics,
|
||||
OnEvent,
|
||||
WorkspaceNotFound,
|
||||
} from '../../fundamentals';
|
||||
import { QuotaService } from '../quota';
|
||||
import { Permission } from '../workspaces/types';
|
||||
import { isEmptyBuffer } from './manager';
|
||||
|
||||
@Injectable()
|
||||
export class DocHistoryManager {
|
||||
private readonly logger = new Logger(DocHistoryManager.name);
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly db: PrismaClient,
|
||||
private readonly quota: QuotaService
|
||||
) {}
|
||||
|
||||
@OnEvent('workspace.deleted')
|
||||
onWorkspaceDeleted(workspaceId: EventPayload<'workspace.deleted'>) {
|
||||
return this.db.snapshotHistory.deleteMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent('snapshot.deleted')
|
||||
onSnapshotDeleted({ workspaceId, id }: EventPayload<'snapshot.deleted'>) {
|
||||
return this.db.snapshotHistory.deleteMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
id,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent('snapshot.updated')
|
||||
async onDocUpdated(
|
||||
{ workspaceId, id, previous }: EventPayload<'snapshot.updated'>,
|
||||
forceCreate = false
|
||||
) {
|
||||
const last = await this.last(workspaceId, id);
|
||||
|
||||
let shouldCreateHistory = false;
|
||||
|
||||
if (!last) {
|
||||
// never created
|
||||
shouldCreateHistory = true;
|
||||
} else if (last.timestamp === previous.updatedAt) {
|
||||
// no change
|
||||
shouldCreateHistory = false;
|
||||
} else if (
|
||||
// force
|
||||
forceCreate ||
|
||||
// last history created before interval in configs
|
||||
last.timestamp.getTime() <
|
||||
previous.updatedAt.getTime() - this.config.doc.history.interval
|
||||
) {
|
||||
shouldCreateHistory = true;
|
||||
}
|
||||
|
||||
if (shouldCreateHistory) {
|
||||
// skip the history recording when no actual update on snapshot happended
|
||||
if (last && isDeepStrictEqual(last.state, previous.state)) {
|
||||
this.logger.debug(
|
||||
`State matches, skip creating history record for ${id} in workspace ${workspaceId}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (isEmptyBuffer(previous.blob)) {
|
||||
this.logger.debug(
|
||||
`Doc is empty, skip creating history record for ${id} in workspace ${workspaceId}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
await this.db.snapshotHistory
|
||||
.create({
|
||||
select: {
|
||||
timestamp: true,
|
||||
},
|
||||
data: {
|
||||
workspaceId,
|
||||
id,
|
||||
timestamp: previous.updatedAt,
|
||||
blob: previous.blob,
|
||||
state: previous.state,
|
||||
expiredAt: await this.getExpiredDateFromNow(workspaceId),
|
||||
},
|
||||
})
|
||||
.catch(() => {
|
||||
// safe to ignore
|
||||
// only happens when duplicated history record created in multi processes
|
||||
});
|
||||
metrics.doc
|
||||
.counter('history_created_counter', {
|
||||
description: 'How many times the snapshot history created',
|
||||
})
|
||||
.add(1);
|
||||
this.logger.debug(
|
||||
`History created for ${id} in workspace ${workspaceId}.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async list(
|
||||
workspaceId: string,
|
||||
id: string,
|
||||
before: Date = new Date(),
|
||||
take: number = 10
|
||||
) {
|
||||
return this.db.snapshotHistory.findMany({
|
||||
select: {
|
||||
timestamp: true,
|
||||
},
|
||||
where: {
|
||||
workspaceId,
|
||||
id,
|
||||
timestamp: {
|
||||
lt: before,
|
||||
},
|
||||
// only include the ones has not expired
|
||||
expiredAt: {
|
||||
gt: new Date(),
|
||||
},
|
||||
},
|
||||
orderBy: {
|
||||
timestamp: 'desc',
|
||||
},
|
||||
take,
|
||||
});
|
||||
}
|
||||
|
||||
async count(workspaceId: string, id: string) {
|
||||
return this.db.snapshotHistory.count({
|
||||
where: {
|
||||
workspaceId,
|
||||
id,
|
||||
expiredAt: {
|
||||
gt: new Date(),
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async get(workspaceId: string, id: string, timestamp: Date) {
|
||||
return this.db.snapshotHistory.findUnique({
|
||||
where: {
|
||||
workspaceId_id_timestamp: {
|
||||
workspaceId,
|
||||
id,
|
||||
timestamp,
|
||||
},
|
||||
expiredAt: {
|
||||
gt: new Date(),
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async last(workspaceId: string, id: string) {
|
||||
return this.db.snapshotHistory.findFirst({
|
||||
where: {
|
||||
workspaceId,
|
||||
id,
|
||||
},
|
||||
select: {
|
||||
timestamp: true,
|
||||
state: true,
|
||||
},
|
||||
orderBy: {
|
||||
timestamp: 'desc',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async recover(workspaceId: string, id: string, timestamp: Date) {
|
||||
const history = await this.db.snapshotHistory.findUnique({
|
||||
where: {
|
||||
workspaceId_id_timestamp: {
|
||||
workspaceId,
|
||||
id,
|
||||
timestamp,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!history) {
|
||||
throw new DocHistoryNotFound({
|
||||
workspaceId,
|
||||
docId: id,
|
||||
timestamp: timestamp.getTime(),
|
||||
});
|
||||
}
|
||||
|
||||
const oldSnapshot = await this.db.snapshot.findUnique({
|
||||
where: {
|
||||
id_workspaceId: {
|
||||
id,
|
||||
workspaceId,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!oldSnapshot) {
|
||||
throw new DocNotFound({ workspaceId, docId: id });
|
||||
}
|
||||
|
||||
// save old snapshot as one history record
|
||||
await this.onDocUpdated({ workspaceId, id, previous: oldSnapshot }, true);
|
||||
// WARN:
|
||||
// we should never do the snapshot updating in recovering,
|
||||
// which is not the solution in CRDT.
|
||||
// let user revert in client and update the data in sync system
|
||||
// `await this.db.snapshot.update();`
|
||||
metrics.doc
|
||||
.counter('history_recovered_counter', {
|
||||
description: 'How many times history recovered request happened',
|
||||
})
|
||||
.add(1);
|
||||
|
||||
return history.timestamp;
|
||||
}
|
||||
|
||||
async getExpiredDateFromNow(workspaceId: string) {
|
||||
const permission = await this.db.workspaceUserPermission.findFirst({
|
||||
select: {
|
||||
userId: true,
|
||||
},
|
||||
where: {
|
||||
workspaceId,
|
||||
type: Permission.Owner,
|
||||
},
|
||||
});
|
||||
|
||||
if (!permission) {
|
||||
throw new WorkspaceNotFound({ workspaceId });
|
||||
}
|
||||
|
||||
const quota = await this.quota.getUserQuota(permission.userId);
|
||||
return quota.feature.historyPeriodFromNow;
|
||||
}
|
||||
|
||||
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT /* everyday at 12am */)
|
||||
async cleanupExpiredHistory() {
|
||||
await this.db.snapshotHistory.deleteMany({
|
||||
where: {
|
||||
expiredAt: {
|
||||
lte: new Date(),
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -2,15 +2,24 @@ import './config';
|
||||
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { PermissionModule } from '../permission';
|
||||
import { QuotaModule } from '../quota';
|
||||
import { DocHistoryManager } from './history';
|
||||
import { DocManager } from './manager';
|
||||
import { PgUserspaceDocStorageAdapter } from './adapters/userspace';
|
||||
import { PgWorkspaceDocStorageAdapter } from './adapters/workspace';
|
||||
import { DocStorageCronJob } from './job';
|
||||
import { DocStorageOptions } from './options';
|
||||
|
||||
@Module({
|
||||
imports: [QuotaModule],
|
||||
providers: [DocManager, DocHistoryManager],
|
||||
exports: [DocManager, DocHistoryManager],
|
||||
imports: [QuotaModule, PermissionModule],
|
||||
providers: [
|
||||
DocStorageOptions,
|
||||
PgWorkspaceDocStorageAdapter,
|
||||
PgUserspaceDocStorageAdapter,
|
||||
DocStorageCronJob,
|
||||
],
|
||||
exports: [PgWorkspaceDocStorageAdapter, PgUserspaceDocStorageAdapter],
|
||||
})
|
||||
export class DocModule {}
|
||||
export class DocStorageModule {}
|
||||
export { PgUserspaceDocStorageAdapter, PgWorkspaceDocStorageAdapter };
|
||||
|
||||
export { DocHistoryManager, DocManager };
|
||||
export { DocStorageAdapter, type Editor } from './storage';
|
||||
|
||||
76
packages/backend/server/src/core/doc/job.ts
Normal file
76
packages/backend/server/src/core/doc/job.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { Injectable, Logger, OnModuleInit } from '@nestjs/common';
|
||||
import { Cron, CronExpression, SchedulerRegistry } from '@nestjs/schedule';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
import { CallTimer, Config, metrics } from '../../fundamentals';
|
||||
import { PgWorkspaceDocStorageAdapter } from './adapters/workspace';
|
||||
|
||||
@Injectable()
|
||||
export class DocStorageCronJob implements OnModuleInit {
|
||||
private readonly logger = new Logger(DocStorageCronJob.name);
|
||||
private busy = false;
|
||||
|
||||
constructor(
|
||||
private readonly registry: SchedulerRegistry,
|
||||
private readonly config: Config,
|
||||
private readonly db: PrismaClient,
|
||||
private readonly workspace: PgWorkspaceDocStorageAdapter
|
||||
) {}
|
||||
|
||||
onModuleInit() {
|
||||
if (this.config.doc.manager.enableUpdateAutoMerging) {
|
||||
this.registry.addInterval(
|
||||
this.autoMergePendingDocUpdates.name,
|
||||
// scheduler registry will clean up the interval when the app is stopped
|
||||
setInterval(() => {
|
||||
if (this.busy) {
|
||||
return;
|
||||
}
|
||||
this.busy = true;
|
||||
this.autoMergePendingDocUpdates()
|
||||
.catch(() => {
|
||||
/* never fail */
|
||||
})
|
||||
.finally(() => {
|
||||
this.busy = false;
|
||||
});
|
||||
}, this.config.doc.manager.updatePollInterval)
|
||||
);
|
||||
|
||||
this.logger.log('Updates pending queue auto merging cron started');
|
||||
}
|
||||
}
|
||||
|
||||
@CallTimer('doc', 'auto_merge_pending_doc_updates')
|
||||
async autoMergePendingDocUpdates() {
|
||||
try {
|
||||
const randomDoc = await this.workspace.randomDoc();
|
||||
if (!randomDoc) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.workspace.getDoc(randomDoc.workspaceId, randomDoc.docId);
|
||||
} catch (e) {
|
||||
metrics.doc.counter('auto_merge_pending_doc_updates_error').add(1);
|
||||
this.logger.error('Failed to auto merge pending doc updates', e);
|
||||
}
|
||||
}
|
||||
|
||||
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT /* everyday at 12am */)
|
||||
async cleanupExpiredHistory() {
|
||||
await this.db.snapshotHistory.deleteMany({
|
||||
where: {
|
||||
expiredAt: {
|
||||
lte: new Date(),
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@Cron(CronExpression.EVERY_MINUTE)
|
||||
async reportUpdatesQueueCount() {
|
||||
metrics.doc
|
||||
.gauge('updates_queue_count')
|
||||
.record(await this.db.update.count());
|
||||
}
|
||||
}
|
||||
@@ -1,853 +0,0 @@
|
||||
import {
|
||||
Injectable,
|
||||
Logger,
|
||||
OnModuleDestroy,
|
||||
OnModuleInit,
|
||||
} from '@nestjs/common';
|
||||
import { Cron, CronExpression } from '@nestjs/schedule';
|
||||
import { PrismaClient, Snapshot, Update } from '@prisma/client';
|
||||
import { chunk } from 'lodash-es';
|
||||
import { defer, retry } from 'rxjs';
|
||||
import {
|
||||
applyUpdate,
|
||||
Doc,
|
||||
encodeStateAsUpdate,
|
||||
encodeStateVector,
|
||||
transact,
|
||||
} from 'yjs';
|
||||
|
||||
import type { EventPayload } from '../../fundamentals';
|
||||
import {
|
||||
Cache,
|
||||
CallTimer,
|
||||
Config,
|
||||
EventEmitter,
|
||||
mergeUpdatesInApplyWay as jwstMergeUpdates,
|
||||
metrics,
|
||||
OnEvent,
|
||||
} from '../../fundamentals';
|
||||
|
||||
function compare(yBinary: Buffer, jwstBinary: Buffer, strict = false): boolean {
|
||||
if (yBinary.equals(jwstBinary)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (strict) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const doc = new Doc();
|
||||
applyUpdate(doc, jwstBinary);
|
||||
|
||||
const yBinary2 = Buffer.from(encodeStateAsUpdate(doc));
|
||||
|
||||
return compare(yBinary, yBinary2, true);
|
||||
}
|
||||
|
||||
export function isEmptyBuffer(buf: Buffer): boolean {
|
||||
return (
|
||||
buf.length === 0 ||
|
||||
// 0x0000
|
||||
(buf.length === 2 && buf[0] === 0 && buf[1] === 0)
|
||||
);
|
||||
}
|
||||
|
||||
const MAX_SEQ_NUM = 0x3fffffff; // u31
|
||||
const UPDATES_QUEUE_CACHE_KEY = 'doc:manager:updates';
|
||||
|
||||
interface DocResponse {
|
||||
doc: Doc;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
interface BinaryResponse {
|
||||
binary: Buffer;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Since we can't directly save all client updates into database, in which way the database will overload,
|
||||
* we need to buffer the updates and merge them to reduce db write.
|
||||
*
|
||||
* And also, if a new client join, it would be nice to see the latest doc asap,
|
||||
* so we need to at least store a snapshot of the doc and return quickly,
|
||||
* along side all the updates that have not been applies to that snapshot(timestamp).
|
||||
*/
|
||||
@Injectable()
|
||||
export class DocManager implements OnModuleInit, OnModuleDestroy {
|
||||
private readonly logger = new Logger(DocManager.name);
|
||||
private job: NodeJS.Timeout | null = null;
|
||||
private readonly seqMap = new Map<string, number>();
|
||||
private busy = false;
|
||||
|
||||
constructor(
|
||||
private readonly db: PrismaClient,
|
||||
private readonly config: Config,
|
||||
private readonly cache: Cache,
|
||||
private readonly event: EventEmitter
|
||||
) {}
|
||||
|
||||
onModuleInit() {
|
||||
if (this.config.doc.manager.enableUpdateAutoMerging) {
|
||||
this.logger.log('Use Database');
|
||||
this.setup();
|
||||
}
|
||||
}
|
||||
|
||||
onModuleDestroy() {
|
||||
this.destroy();
|
||||
}
|
||||
|
||||
@CallTimer('doc', 'yjs_recover_updates_to_doc')
|
||||
private recoverDoc(...updates: Buffer[]): Promise<Doc> {
|
||||
const doc = new Doc();
|
||||
const chunks = chunk(updates, 10);
|
||||
|
||||
return new Promise(resolve => {
|
||||
const next = () => {
|
||||
const updates = chunks.shift();
|
||||
if (updates?.length) {
|
||||
transact(doc, () => {
|
||||
updates.forEach(u => {
|
||||
try {
|
||||
applyUpdate(doc, u);
|
||||
} catch (e) {
|
||||
this.logger.error('Failed to apply update', e);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// avoid applying too many updates in single round which will take the whole cpu time like dead lock
|
||||
setImmediate(() => {
|
||||
next();
|
||||
});
|
||||
} else {
|
||||
resolve(doc);
|
||||
}
|
||||
};
|
||||
|
||||
next();
|
||||
});
|
||||
}
|
||||
|
||||
private async applyUpdates(guid: string, ...updates: Buffer[]): Promise<Doc> {
|
||||
const doc = await this.recoverDoc(...updates);
|
||||
|
||||
const useYocto = await this.config.runtime.fetch(
|
||||
'doc/experimentalMergeWithYOcto'
|
||||
);
|
||||
// test jwst codec
|
||||
if (useYocto) {
|
||||
metrics.jwst.counter('codec_merge_counter').add(1);
|
||||
const yjsResult = Buffer.from(encodeStateAsUpdate(doc));
|
||||
let log = false;
|
||||
try {
|
||||
const jwstResult = jwstMergeUpdates(updates);
|
||||
if (!compare(yjsResult, jwstResult)) {
|
||||
metrics.jwst.counter('codec_not_match').add(1);
|
||||
this.logger.warn(
|
||||
`jwst codec result doesn't match yjs codec result for: ${guid}`
|
||||
);
|
||||
log = true;
|
||||
if (this.config.node.dev) {
|
||||
this.logger.warn(`Expected:\n ${yjsResult.toString('hex')}`);
|
||||
this.logger.warn(`Result:\n ${jwstResult.toString('hex')}`);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
metrics.jwst.counter('codec_fails_counter').add(1);
|
||||
this.logger.warn(`jwst apply update failed for ${guid}: ${e}`);
|
||||
log = true;
|
||||
} finally {
|
||||
if (log && this.config.node.dev) {
|
||||
this.logger.warn(
|
||||
`Updates: ${updates.map(u => u.toString('hex')).join('\n')}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
/**
|
||||
* setup pending update processing loop
|
||||
*/
|
||||
setup() {
|
||||
this.job = setInterval(() => {
|
||||
if (!this.busy) {
|
||||
this.busy = true;
|
||||
this.autoSquash()
|
||||
.catch(() => {
|
||||
/* we handle all errors in work itself */
|
||||
})
|
||||
.finally(() => {
|
||||
this.busy = false;
|
||||
});
|
||||
}
|
||||
}, this.config.doc.manager.updatePollInterval);
|
||||
|
||||
this.logger.log('Automation started');
|
||||
}
|
||||
|
||||
/**
|
||||
* stop pending update processing loop
|
||||
*/
|
||||
destroy() {
|
||||
if (this.job) {
|
||||
clearInterval(this.job);
|
||||
this.job = null;
|
||||
this.logger.log('Automation stopped');
|
||||
}
|
||||
}
|
||||
|
||||
@OnEvent('workspace.deleted')
|
||||
async onWorkspaceDeleted(workspaceId: string) {
|
||||
await this.db.snapshot.deleteMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
},
|
||||
});
|
||||
await this.db.update.deleteMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent('snapshot.deleted')
|
||||
async onSnapshotDeleted({
|
||||
id,
|
||||
workspaceId,
|
||||
}: EventPayload<'snapshot.deleted'>) {
|
||||
await this.db.update.deleteMany({
|
||||
where: {
|
||||
id,
|
||||
workspaceId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* add update to manager for later processing.
|
||||
*/
|
||||
async push(
|
||||
workspaceId: string,
|
||||
guid: string,
|
||||
update: Buffer,
|
||||
retryTimes = 10
|
||||
) {
|
||||
const timestamp = await new Promise<number>((resolve, reject) => {
|
||||
defer(async () => {
|
||||
const seq = await this.getUpdateSeq(workspaceId, guid);
|
||||
const { createdAt } = await this.db.update.create({
|
||||
select: {
|
||||
createdAt: true,
|
||||
},
|
||||
data: {
|
||||
workspaceId,
|
||||
id: guid,
|
||||
seq,
|
||||
blob: update,
|
||||
},
|
||||
});
|
||||
|
||||
return createdAt.getTime();
|
||||
})
|
||||
.pipe(retry(retryTimes)) // retry until seq num not conflict
|
||||
.subscribe({
|
||||
next: timestamp => {
|
||||
this.logger.debug(
|
||||
`pushed 1 update for ${guid} in workspace ${workspaceId}`
|
||||
);
|
||||
resolve(timestamp);
|
||||
},
|
||||
error: e => {
|
||||
this.logger.error('Failed to push updates', e);
|
||||
reject(new Error('Failed to push update'));
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
await this.updateCachedUpdatesCount(workspaceId, guid, 1);
|
||||
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
async batchPush(
|
||||
workspaceId: string,
|
||||
guid: string,
|
||||
updates: Buffer[],
|
||||
retryTimes = 10
|
||||
) {
|
||||
const timestamp = await new Promise<number>((resolve, reject) => {
|
||||
defer(async () => {
|
||||
const lastSeq = await this.getUpdateSeq(
|
||||
workspaceId,
|
||||
guid,
|
||||
updates.length
|
||||
);
|
||||
const now = Date.now();
|
||||
let timestamp = now;
|
||||
let turn = 0;
|
||||
const batchCount = 10;
|
||||
for (const batch of chunk(updates, batchCount)) {
|
||||
await this.db.update.createMany({
|
||||
data: batch.map((update, i) => {
|
||||
const subSeq = turn * batchCount + i + 1;
|
||||
// `seq` is the last seq num of the batch
|
||||
// example for 11 batched updates, start from seq num 20
|
||||
// seq for first update in the batch should be:
|
||||
// 31 - 11 + subSeq(0 * 10 + 0 + 1) = 21
|
||||
// ^ last seq num ^ updates.length ^ turn ^ batchCount ^i
|
||||
const seq = lastSeq - updates.length + subSeq;
|
||||
const createdAt = now + subSeq;
|
||||
timestamp = Math.max(timestamp, createdAt);
|
||||
|
||||
return {
|
||||
workspaceId,
|
||||
id: guid,
|
||||
blob: update,
|
||||
seq,
|
||||
createdAt: new Date(createdAt), // make sure the updates can be ordered by create time
|
||||
};
|
||||
}),
|
||||
});
|
||||
turn++;
|
||||
}
|
||||
|
||||
return timestamp;
|
||||
})
|
||||
.pipe(retry(retryTimes)) // retry until seq num not conflict
|
||||
.subscribe({
|
||||
next: timestamp => {
|
||||
this.logger.debug(
|
||||
`pushed ${updates.length} updates for ${guid} in workspace ${workspaceId}`
|
||||
);
|
||||
resolve(timestamp);
|
||||
},
|
||||
error: e => {
|
||||
this.logger.error('Failed to push updates', e);
|
||||
reject(new Error('Failed to push update'));
|
||||
},
|
||||
});
|
||||
});
|
||||
await this.updateCachedUpdatesCount(workspaceId, guid, updates.length);
|
||||
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get latest timestamp of all docs in the workspace.
|
||||
*/
|
||||
@CallTimer('doc', 'get_doc_timestamps')
|
||||
async getDocTimestamps(workspaceId: string, after: number | undefined = 0) {
|
||||
const snapshots = await this.db.snapshot.findMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
updatedAt: {
|
||||
gt: new Date(after),
|
||||
},
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
updatedAt: true,
|
||||
},
|
||||
});
|
||||
|
||||
const updates = await this.db.update.groupBy({
|
||||
where: {
|
||||
workspaceId,
|
||||
createdAt: {
|
||||
gt: new Date(after),
|
||||
},
|
||||
},
|
||||
by: ['id'],
|
||||
_max: {
|
||||
createdAt: true,
|
||||
},
|
||||
});
|
||||
|
||||
const result: Record<string, number> = {};
|
||||
|
||||
snapshots.forEach(s => {
|
||||
result[s.id] = s.updatedAt.getTime();
|
||||
});
|
||||
|
||||
updates.forEach(u => {
|
||||
if (u._max.createdAt) {
|
||||
result[u.id] = u._max.createdAt.getTime();
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* get the latest doc with all update applied.
|
||||
*/
|
||||
async get(workspaceId: string, guid: string): Promise<DocResponse | null> {
|
||||
const result = await this._get(workspaceId, guid);
|
||||
if (result) {
|
||||
if ('doc' in result) {
|
||||
return result;
|
||||
} else {
|
||||
const doc = await this.recoverDoc(result.binary);
|
||||
|
||||
return {
|
||||
doc,
|
||||
timestamp: result.timestamp,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* get the latest doc binary with all update applied.
|
||||
*/
|
||||
async getBinary(
|
||||
workspaceId: string,
|
||||
guid: string
|
||||
): Promise<BinaryResponse | null> {
|
||||
const result = await this._get(workspaceId, guid);
|
||||
if (result) {
|
||||
if ('doc' in result) {
|
||||
return {
|
||||
binary: Buffer.from(encodeStateAsUpdate(result.doc)),
|
||||
timestamp: result.timestamp,
|
||||
};
|
||||
} else {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* get the latest doc state vector with all update applied.
|
||||
*/
|
||||
async getDocState(
|
||||
workspaceId: string,
|
||||
guid: string
|
||||
): Promise<BinaryResponse | null> {
|
||||
const snapshot = await this.getSnapshot(workspaceId, guid);
|
||||
const updates = await this.getUpdates(workspaceId, guid);
|
||||
|
||||
if (updates.length) {
|
||||
const { doc, timestamp } = await this.squash(snapshot, updates);
|
||||
return {
|
||||
binary: Buffer.from(encodeStateVector(doc)),
|
||||
timestamp,
|
||||
};
|
||||
}
|
||||
|
||||
return snapshot?.state
|
||||
? {
|
||||
binary: snapshot.state,
|
||||
timestamp: snapshot.updatedAt.getTime(),
|
||||
}
|
||||
: null;
|
||||
}
|
||||
|
||||
/**
|
||||
* get the snapshot of the doc we've seen.
|
||||
*/
|
||||
async getSnapshot(workspaceId: string, guid: string) {
|
||||
return this.db.snapshot.findUnique({
|
||||
where: {
|
||||
id_workspaceId: {
|
||||
workspaceId,
|
||||
id: guid,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* get pending updates
|
||||
*/
|
||||
async getUpdates(workspaceId: string, guid: string) {
|
||||
const updates = await this.db.update.findMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
id: guid,
|
||||
},
|
||||
// take it ease, we don't want to overload db and or cpu
|
||||
// if we limit the taken number here,
|
||||
// user will never see the latest doc if there are too many updates pending to be merged.
|
||||
take: this.config.doc.manager.maxUpdatesPullCount,
|
||||
});
|
||||
|
||||
// perf(memory): avoid sorting in db
|
||||
return updates.sort((a, b) => (a.createdAt < b.createdAt ? -1 : 1));
|
||||
}
|
||||
|
||||
/**
|
||||
* apply pending updates to snapshot
|
||||
*/
|
||||
private async autoSquash() {
|
||||
// find the first update and batch process updates with same id
|
||||
const candidate = await this.getAutoSquashCandidate();
|
||||
|
||||
// no pending updates
|
||||
if (!candidate) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { id, workspaceId } = candidate;
|
||||
|
||||
await this.lockUpdatesForAutoSquash(workspaceId, id, async () => {
|
||||
try {
|
||||
await this._get(workspaceId, id);
|
||||
} catch (e) {
|
||||
this.logger.error(
|
||||
`Failed to apply updates for workspace: ${workspaceId}, guid: ${id}`
|
||||
);
|
||||
this.logger.error(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private async getAutoSquashCandidate() {
|
||||
const cache = await this.getAutoSquashCandidateFromCache();
|
||||
|
||||
if (cache) {
|
||||
return cache;
|
||||
}
|
||||
|
||||
return this.db.update.findFirst({
|
||||
select: {
|
||||
id: true,
|
||||
workspaceId: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns whether the snapshot is updated to the latest, `undefined` means the doc to be upserted is outdated.
|
||||
*/
|
||||
@CallTimer('doc', 'upsert')
|
||||
private async upsert(
|
||||
workspaceId: string,
|
||||
guid: string,
|
||||
doc: Doc,
|
||||
// we always delay the snapshot update to avoid db overload,
|
||||
// so the value of auto updated `updatedAt` by db will never be accurate to user's real action time
|
||||
updatedAt: Date,
|
||||
seq: number
|
||||
) {
|
||||
const blob = Buffer.from(encodeStateAsUpdate(doc));
|
||||
|
||||
if (isEmptyBuffer(blob)) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const state = Buffer.from(encodeStateVector(doc));
|
||||
|
||||
// CONCERNS:
|
||||
// i. Because we save the real user's last seen action time as `updatedAt`,
|
||||
// it's possible to simply compare the `updatedAt` to determine if the snapshot is older than the one we are going to save.
|
||||
//
|
||||
// ii. Prisma doesn't support `upsert` with additional `where` condition along side unique constraint.
|
||||
// In our case, we need to manually check the `updatedAt` to avoid overriding the newer snapshot.
|
||||
// where: { id_workspaceId: {}, updatedAt: { lt: updatedAt } }
|
||||
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
//
|
||||
// iii. Only set the seq number when creating the snapshot.
|
||||
// For updating scenario, the seq number will be updated when updates pushed to db.
|
||||
try {
|
||||
const result: { updatedAt: Date }[] = await this.db.$queryRaw`
|
||||
INSERT INTO "snapshots" ("workspace_id", "guid", "blob", "state", "seq", "created_at", "updated_at")
|
||||
VALUES (${workspaceId}, ${guid}, ${blob}, ${state}, ${seq}, DEFAULT, ${updatedAt})
|
||||
ON CONFLICT ("workspace_id", "guid")
|
||||
DO UPDATE SET "blob" = ${blob}, "state" = ${state}, "updated_at" = ${updatedAt}, "seq" = ${seq}
|
||||
WHERE "snapshots"."workspace_id" = ${workspaceId} AND "snapshots"."guid" = ${guid} AND "snapshots"."updated_at" <= ${updatedAt}
|
||||
RETURNING "snapshots"."workspace_id" as "workspaceId", "snapshots"."guid" as "id", "snapshots"."updated_at" as "updatedAt"
|
||||
`;
|
||||
|
||||
// const result = await this.db.snapshot.upsert({
|
||||
// select: {
|
||||
// updatedAt: true,
|
||||
// seq: true,
|
||||
// },
|
||||
// where: {
|
||||
// id_workspaceId: {
|
||||
// workspaceId,
|
||||
// id: guid,
|
||||
// },
|
||||
// ⬇️ NOT SUPPORTED BY PRISMA YET
|
||||
// updatedAt: {
|
||||
// lt: updatedAt,
|
||||
// },
|
||||
// },
|
||||
// update: {
|
||||
// blob,
|
||||
// state,
|
||||
// updatedAt,
|
||||
// },
|
||||
// create: {
|
||||
// workspaceId,
|
||||
// id: guid,
|
||||
// blob,
|
||||
// state,
|
||||
// updatedAt,
|
||||
// seq,
|
||||
// },
|
||||
// });
|
||||
|
||||
// if the condition `snapshot.updatedAt > updatedAt` is true, by which means the snapshot has already been updated by other process,
|
||||
// the updates has been applied to current `doc` must have been seen by the other process as well.
|
||||
// The `updatedSnapshot` will be `undefined` in this case.
|
||||
const updatedSnapshot = result.at(0);
|
||||
|
||||
if (!updatedSnapshot) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (e) {
|
||||
this.logger.error('Failed to upsert snapshot', e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private async _get(
|
||||
workspaceId: string,
|
||||
guid: string
|
||||
): Promise<DocResponse | BinaryResponse | null> {
|
||||
const snapshot = await this.getSnapshot(workspaceId, guid);
|
||||
const updates = await this.getUpdates(workspaceId, guid);
|
||||
|
||||
if (updates.length) {
|
||||
return this.squash(snapshot, updates);
|
||||
}
|
||||
|
||||
return snapshot
|
||||
? { binary: snapshot.blob, timestamp: snapshot.updatedAt.getTime() }
|
||||
: null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Squash updates into a single update and save it as snapshot,
|
||||
* and delete the updates records at the same time.
|
||||
*/
|
||||
@CallTimer('doc', 'squash')
|
||||
private async squash(
|
||||
snapshot: Snapshot | null,
|
||||
updates: Update[]
|
||||
): Promise<DocResponse> {
|
||||
if (!updates.length) {
|
||||
throw new Error('No updates to squash');
|
||||
}
|
||||
|
||||
const last = updates[updates.length - 1];
|
||||
const { id, workspaceId } = last;
|
||||
|
||||
const doc = await this.applyUpdates(
|
||||
id,
|
||||
snapshot ? snapshot.blob : Buffer.from([0, 0]),
|
||||
...updates.map(u => u.blob)
|
||||
);
|
||||
|
||||
const done = await this.upsert(
|
||||
workspaceId,
|
||||
id,
|
||||
doc,
|
||||
last.createdAt,
|
||||
last.seq
|
||||
);
|
||||
|
||||
if (done) {
|
||||
if (snapshot) {
|
||||
this.event.emit('snapshot.updated', {
|
||||
id,
|
||||
workspaceId,
|
||||
previous: {
|
||||
blob: snapshot.blob,
|
||||
state: snapshot.state,
|
||||
updatedAt: snapshot.updatedAt,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
this.logger.debug(
|
||||
`Squashed ${updates.length} updates for ${id} in workspace ${workspaceId}`
|
||||
);
|
||||
}
|
||||
|
||||
// we will keep the updates only if the upsert failed on unknown reason
|
||||
// `done === undefined` means the updates is outdated(have already been merged by other process), safe to be deleted
|
||||
// `done === true` means the upsert is successful, safe to be deleted
|
||||
if (done !== false) {
|
||||
// always delete updates
|
||||
// the upsert will return false if the state is not newer, so we don't need to worry about it
|
||||
const { count } = await this.db.update.deleteMany({
|
||||
where: {
|
||||
id,
|
||||
workspaceId,
|
||||
seq: {
|
||||
in: updates.map(u => u.seq),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await this.updateCachedUpdatesCount(workspaceId, id, -count);
|
||||
}
|
||||
|
||||
return { doc, timestamp: last.createdAt.getTime() };
|
||||
}
|
||||
|
||||
private async getUpdateSeq(workspaceId: string, guid: string, batch = 1) {
|
||||
try {
|
||||
const { seq } = await this.db.snapshot.update({
|
||||
select: {
|
||||
seq: true,
|
||||
},
|
||||
where: {
|
||||
id_workspaceId: {
|
||||
workspaceId,
|
||||
id: guid,
|
||||
},
|
||||
},
|
||||
data: {
|
||||
seq: {
|
||||
increment: batch,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// reset
|
||||
if (seq >= MAX_SEQ_NUM) {
|
||||
await this.db.snapshot.update({
|
||||
select: {
|
||||
seq: true,
|
||||
},
|
||||
where: {
|
||||
id_workspaceId: {
|
||||
workspaceId,
|
||||
id: guid,
|
||||
},
|
||||
},
|
||||
data: {
|
||||
seq: 0,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return seq;
|
||||
} catch {
|
||||
// not existing snapshot just count it from 1
|
||||
const last = this.seqMap.get(workspaceId + guid) ?? 0;
|
||||
this.seqMap.set(workspaceId + guid, last + batch);
|
||||
return last + batch;
|
||||
}
|
||||
}
|
||||
|
||||
private async updateCachedUpdatesCount(
|
||||
workspaceId: string,
|
||||
guid: string,
|
||||
count: number
|
||||
) {
|
||||
const result = await this.cache.mapIncrease(
|
||||
UPDATES_QUEUE_CACHE_KEY,
|
||||
`${workspaceId}::${guid}`,
|
||||
count
|
||||
);
|
||||
|
||||
if (result <= 0) {
|
||||
await this.cache.mapDelete(
|
||||
UPDATES_QUEUE_CACHE_KEY,
|
||||
`${workspaceId}::${guid}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private async getAutoSquashCandidateFromCache() {
|
||||
const key = await this.cache.mapRandomKey(UPDATES_QUEUE_CACHE_KEY);
|
||||
|
||||
if (key) {
|
||||
const cachedCount = await this.cache.mapIncrease(
|
||||
UPDATES_QUEUE_CACHE_KEY,
|
||||
key,
|
||||
0
|
||||
);
|
||||
|
||||
if (cachedCount > 0) {
|
||||
const [workspaceId, id] = key.split('::');
|
||||
const count = await this.db.update.count({
|
||||
where: {
|
||||
workspaceId,
|
||||
id,
|
||||
},
|
||||
});
|
||||
|
||||
// FIXME(@forehalo): somehow the update count in cache is not accurate
|
||||
if (count === 0) {
|
||||
await this.cache.mapDelete(UPDATES_QUEUE_CACHE_KEY, key);
|
||||
|
||||
return null;
|
||||
}
|
||||
return { id, workspaceId };
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private async doWithLock<T>(
|
||||
lockScope: string,
|
||||
lockResource: string,
|
||||
job: () => Promise<T>
|
||||
) {
|
||||
const lock = `lock:${lockScope}:${lockResource}`;
|
||||
const acquired = await this.cache.setnx(lock, 1, {
|
||||
ttl: 60 * 1000,
|
||||
});
|
||||
metrics.doc.counter('lock').add(1, { scope: lockScope });
|
||||
|
||||
if (!acquired) {
|
||||
metrics.doc.counter('lock_failed').add(1, { scope: lockScope });
|
||||
return;
|
||||
}
|
||||
metrics.doc.counter('lock_required').add(1, { scope: lockScope });
|
||||
|
||||
try {
|
||||
return await job();
|
||||
} finally {
|
||||
await this.cache
|
||||
.delete(lock)
|
||||
.then(() => {
|
||||
metrics.doc.counter('lock_released').add(1, { scope: lockScope });
|
||||
})
|
||||
.catch(e => {
|
||||
metrics.doc
|
||||
.counter('lock_release_failed')
|
||||
.add(1, { scope: lockScope });
|
||||
// safe, the lock will be expired when ttl ends
|
||||
this.logger.error(`Failed to release lock ${lock}`, e);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private async lockUpdatesForAutoSquash<T>(
|
||||
workspaceId: string,
|
||||
guid: string,
|
||||
job: () => Promise<T>
|
||||
) {
|
||||
return this.doWithLock(
|
||||
'doc:manager:updates',
|
||||
`${workspaceId}::${guid}`,
|
||||
job
|
||||
);
|
||||
}
|
||||
|
||||
@Cron(CronExpression.EVERY_MINUTE)
|
||||
async reportUpdatesQueueCount() {
|
||||
metrics.doc
|
||||
.gauge('updates_queue_count')
|
||||
.record(await this.db.update.count());
|
||||
}
|
||||
}
|
||||
130
packages/backend/server/src/core/doc/options.ts
Normal file
130
packages/backend/server/src/core/doc/options.ts
Normal file
@@ -0,0 +1,130 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { chunk } from 'lodash-es';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import {
|
||||
CallTimer,
|
||||
Config,
|
||||
mergeUpdatesInApplyWay as yotcoMergeUpdates,
|
||||
metrics,
|
||||
} from '../../fundamentals';
|
||||
import { PermissionService } from '../permission';
|
||||
import { QuotaService } from '../quota';
|
||||
import { DocStorageOptions as IDocStorageOptions } from './storage';
|
||||
|
||||
function compare(yBinary: Buffer, jwstBinary: Buffer, strict = false): boolean {
|
||||
if (yBinary.equals(jwstBinary)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (strict) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const doc = new Y.Doc();
|
||||
Y.applyUpdate(doc, jwstBinary);
|
||||
|
||||
const yBinary2 = Buffer.from(Y.encodeStateAsUpdate(doc));
|
||||
|
||||
return compare(yBinary, yBinary2, true);
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class DocStorageOptions implements IDocStorageOptions {
|
||||
private readonly logger = new Logger('DocStorageOptions');
|
||||
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly permission: PermissionService,
|
||||
private readonly quota: QuotaService
|
||||
) {}
|
||||
|
||||
mergeUpdates = async (updates: Uint8Array[]) => {
|
||||
const useYocto = await this.config.runtime.fetch(
|
||||
'doc/experimentalMergeWithYOcto'
|
||||
);
|
||||
|
||||
if (useYocto) {
|
||||
const doc = await this.recoverDoc(updates);
|
||||
|
||||
metrics.jwst.counter('codec_merge_counter').add(1);
|
||||
const yjsResult = Buffer.from(Y.encodeStateAsUpdate(doc));
|
||||
let log = false;
|
||||
try {
|
||||
const yocto = yotcoMergeUpdates(updates.map(Buffer.from));
|
||||
if (!compare(yjsResult, yocto)) {
|
||||
metrics.jwst.counter('codec_not_match').add(1);
|
||||
this.logger.warn(`yocto codec result doesn't match yjs codec result`);
|
||||
log = true;
|
||||
if (this.config.node.dev) {
|
||||
this.logger.warn(`Expected:\n ${yjsResult.toString('hex')}`);
|
||||
this.logger.warn(`Result:\n ${yocto.toString('hex')}`);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
metrics.jwst.counter('codec_fails_counter').add(1);
|
||||
this.logger.warn(`jwst apply update failed: ${e}`);
|
||||
log = true;
|
||||
}
|
||||
|
||||
if (log && this.config.node.dev) {
|
||||
this.logger.warn(
|
||||
`Updates: ${updates.map(u => Buffer.from(u).toString('hex')).join('\n')}`
|
||||
);
|
||||
}
|
||||
|
||||
return yjsResult;
|
||||
} else {
|
||||
return this.simpleMergeUpdates(updates);
|
||||
}
|
||||
};
|
||||
|
||||
historyMaxAge = async (spaceId: string) => {
|
||||
const owner = await this.permission.getWorkspaceOwner(spaceId);
|
||||
const quota = await this.quota.getUserQuota(owner.id);
|
||||
return quota.feature.historyPeriod;
|
||||
};
|
||||
|
||||
historyMinInterval = (_spaceId: string) => {
|
||||
return this.config.doc.history.interval;
|
||||
};
|
||||
|
||||
@CallTimer('doc', 'yjs_merge_updates')
|
||||
private simpleMergeUpdates(updates: Uint8Array[]) {
|
||||
return Y.mergeUpdates(updates);
|
||||
}
|
||||
|
||||
@CallTimer('doc', 'yjs_recover_updates_to_doc')
|
||||
private recoverDoc(updates: Uint8Array[]): Promise<Y.Doc> {
|
||||
const doc = new Y.Doc();
|
||||
const chunks = chunk(updates, 10);
|
||||
let i = 0;
|
||||
|
||||
return new Promise(resolve => {
|
||||
Y.transact(doc, () => {
|
||||
const next = () => {
|
||||
const updates = chunks.at(i++);
|
||||
|
||||
if (updates?.length) {
|
||||
updates.forEach(u => {
|
||||
try {
|
||||
Y.applyUpdate(doc, u);
|
||||
} catch (e) {
|
||||
this.logger.error('Failed to apply update', e);
|
||||
}
|
||||
});
|
||||
|
||||
// avoid applying too many updates in single round which will take the whole cpu time like dead lock
|
||||
setImmediate(() => {
|
||||
next();
|
||||
});
|
||||
} else {
|
||||
resolve(doc);
|
||||
}
|
||||
};
|
||||
|
||||
next();
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
16
packages/backend/server/src/core/doc/storage/blob.ts
Normal file
16
packages/backend/server/src/core/doc/storage/blob.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { Connection } from './connection';
|
||||
|
||||
export interface BlobStorageOptions {}
|
||||
|
||||
export interface Blob {
|
||||
key: string;
|
||||
bin: Uint8Array;
|
||||
mimeType: string;
|
||||
}
|
||||
|
||||
export abstract class BlobStorageAdapter extends Connection {
|
||||
abstract getBlob(spaceId: string, key: string): Promise<Blob | null>;
|
||||
abstract setBlob(spaceId: string, blob: Blob): Promise<string>;
|
||||
abstract deleteBlob(spaceId: string, key: string): Promise<boolean>;
|
||||
abstract listBlobs(spaceId: string): Promise<Blob>;
|
||||
}
|
||||
11
packages/backend/server/src/core/doc/storage/connection.ts
Normal file
11
packages/backend/server/src/core/doc/storage/connection.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
export class Connection {
|
||||
protected connected: boolean = false;
|
||||
connect(): Promise<void> {
|
||||
this.connected = true;
|
||||
return Promise.resolve();
|
||||
}
|
||||
disconnect(): Promise<void> {
|
||||
this.connected = false;
|
||||
return Promise.resolve();
|
||||
}
|
||||
}
|
||||
216
packages/backend/server/src/core/doc/storage/doc.ts
Normal file
216
packages/backend/server/src/core/doc/storage/doc.ts
Normal file
@@ -0,0 +1,216 @@
|
||||
import {
|
||||
applyUpdate,
|
||||
Doc,
|
||||
encodeStateAsUpdate,
|
||||
encodeStateVector,
|
||||
mergeUpdates,
|
||||
UndoManager,
|
||||
} from 'yjs';
|
||||
|
||||
import { CallTimer } from '../../../fundamentals';
|
||||
import { Connection } from './connection';
|
||||
import { SingletonLocker } from './lock';
|
||||
|
||||
export interface DocRecord {
|
||||
spaceId: string;
|
||||
docId: string;
|
||||
bin: Uint8Array;
|
||||
timestamp: number;
|
||||
editor?: string;
|
||||
}
|
||||
|
||||
export interface DocUpdate {
|
||||
bin: Uint8Array;
|
||||
timestamp: number;
|
||||
editor?: string;
|
||||
}
|
||||
|
||||
export interface HistoryFilter {
|
||||
before?: number;
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
export interface Editor {
|
||||
name: string;
|
||||
avatarUrl: string | null;
|
||||
}
|
||||
|
||||
export interface DocStorageOptions {
|
||||
mergeUpdates?: (updates: Uint8Array[]) => Promise<Uint8Array> | Uint8Array;
|
||||
}
|
||||
|
||||
export abstract class DocStorageAdapter extends Connection {
|
||||
private readonly locker = new SingletonLocker();
|
||||
|
||||
constructor(
|
||||
protected readonly options: DocStorageOptions = {
|
||||
mergeUpdates,
|
||||
}
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
// open apis
|
||||
isEmptyBin(bin: Uint8Array): boolean {
|
||||
return (
|
||||
bin.length === 0 ||
|
||||
// 0x0 for state vector
|
||||
(bin.length === 1 && bin[0] === 0) ||
|
||||
// 0x00 for update
|
||||
(bin.length === 2 && bin[0] === 0 && bin[1] === 0)
|
||||
);
|
||||
}
|
||||
|
||||
async getDoc(spaceId: string, docId: string): Promise<DocRecord | null> {
|
||||
await using _lock = await this.lockDocForUpdate(spaceId, docId);
|
||||
|
||||
const snapshot = await this.getDocSnapshot(spaceId, docId);
|
||||
const updates = await this.getDocUpdates(spaceId, docId);
|
||||
|
||||
if (updates.length) {
|
||||
const { timestamp, bin, editor } = await this.squash(
|
||||
snapshot ? [snapshot, ...updates] : updates
|
||||
);
|
||||
|
||||
const newSnapshot = {
|
||||
spaceId: spaceId,
|
||||
docId,
|
||||
bin,
|
||||
timestamp,
|
||||
editor,
|
||||
};
|
||||
|
||||
const success = await this.setDocSnapshot(newSnapshot);
|
||||
|
||||
// if there is old snapshot, create a new history record
|
||||
if (success && snapshot) {
|
||||
await this.createDocHistory(snapshot);
|
||||
}
|
||||
|
||||
// always mark updates as merged unless throws
|
||||
await this.markUpdatesMerged(spaceId, docId, updates);
|
||||
|
||||
return newSnapshot;
|
||||
}
|
||||
|
||||
return snapshot;
|
||||
}
|
||||
|
||||
abstract pushDocUpdates(
|
||||
spaceId: string,
|
||||
docId: string,
|
||||
updates: Uint8Array[],
|
||||
editorId?: string
|
||||
): Promise<number>;
|
||||
|
||||
abstract deleteDoc(spaceId: string, docId: string): Promise<void>;
|
||||
abstract deleteSpace(spaceId: string): Promise<void>;
|
||||
async rollbackDoc(
|
||||
spaceId: string,
|
||||
docId: string,
|
||||
timestamp: number,
|
||||
editorId?: string
|
||||
): Promise<void> {
|
||||
await using _lock = await this.lockDocForUpdate(spaceId, docId);
|
||||
const toSnapshot = await this.getDocHistory(spaceId, docId, timestamp);
|
||||
if (!toSnapshot) {
|
||||
throw new Error('Can not find the version to rollback to.');
|
||||
}
|
||||
|
||||
const fromSnapshot = await this.getDocSnapshot(spaceId, docId);
|
||||
|
||||
if (!fromSnapshot) {
|
||||
throw new Error('Can not find the current version of the doc.');
|
||||
}
|
||||
|
||||
const change = this.generateChangeUpdate(fromSnapshot.bin, toSnapshot.bin);
|
||||
await this.pushDocUpdates(spaceId, docId, [change], editorId);
|
||||
// force create a new history record after rollback
|
||||
await this.createDocHistory(fromSnapshot, true);
|
||||
}
|
||||
|
||||
abstract getSpaceDocTimestamps(
|
||||
spaceId: string,
|
||||
after?: number
|
||||
): Promise<Record<string, number> | null>;
|
||||
abstract listDocHistories(
|
||||
spaceId: string,
|
||||
docId: string,
|
||||
query: { skip?: number; limit?: number }
|
||||
): Promise<{ timestamp: number; editor: Editor | null }[]>;
|
||||
abstract getDocHistory(
|
||||
spaceId: string,
|
||||
docId: string,
|
||||
timestamp: number
|
||||
): Promise<DocRecord | null>;
|
||||
|
||||
// api for internal usage
|
||||
protected abstract getDocSnapshot(
|
||||
spaceId: string,
|
||||
docId: string
|
||||
): Promise<DocRecord | null>;
|
||||
protected abstract setDocSnapshot(snapshot: DocRecord): Promise<boolean>;
|
||||
protected abstract getDocUpdates(
|
||||
spaceId: string,
|
||||
docId: string
|
||||
): Promise<DocUpdate[]>;
|
||||
protected abstract markUpdatesMerged(
|
||||
spaceId: string,
|
||||
docId: string,
|
||||
updates: DocUpdate[]
|
||||
): Promise<number>;
|
||||
|
||||
protected abstract createDocHistory(
|
||||
snapshot: DocRecord,
|
||||
force?: boolean
|
||||
): Promise<boolean>;
|
||||
|
||||
@CallTimer('doc', 'squash')
|
||||
protected async squash(updates: DocUpdate[]): Promise<DocUpdate> {
|
||||
const merge = this.options?.mergeUpdates ?? mergeUpdates;
|
||||
const lastUpdate = updates.at(-1);
|
||||
if (!lastUpdate) {
|
||||
throw new Error('No updates to be squashed.');
|
||||
}
|
||||
|
||||
// fast return
|
||||
if (updates.length === 1) {
|
||||
return lastUpdate;
|
||||
}
|
||||
|
||||
const finalUpdate = await merge(updates.map(u => u.bin));
|
||||
|
||||
return {
|
||||
bin: finalUpdate,
|
||||
timestamp: lastUpdate.timestamp,
|
||||
editor: lastUpdate.editor,
|
||||
};
|
||||
}
|
||||
|
||||
protected async lockDocForUpdate(
|
||||
spaceId: string,
|
||||
docId: string
|
||||
): Promise<AsyncDisposable> {
|
||||
return this.locker.lock(`workspace:${spaceId}:update`, docId);
|
||||
}
|
||||
|
||||
protected generateChangeUpdate(newerBin: Uint8Array, olderBin: Uint8Array) {
|
||||
const newerDoc = new Doc();
|
||||
applyUpdate(newerDoc, newerBin);
|
||||
const olderDoc = new Doc();
|
||||
applyUpdate(olderDoc, olderBin);
|
||||
|
||||
const newerState = encodeStateVector(newerDoc);
|
||||
const olderState = encodeStateVector(olderDoc);
|
||||
|
||||
const diff = encodeStateAsUpdate(newerDoc, olderState);
|
||||
|
||||
const undoManager = new UndoManager(Array.from(newerDoc.share.values()));
|
||||
|
||||
applyUpdate(olderDoc, diff);
|
||||
|
||||
undoManager.undo();
|
||||
|
||||
return encodeStateAsUpdate(olderDoc, newerState);
|
||||
}
|
||||
}
|
||||
33
packages/backend/server/src/core/doc/storage/index.ts
Normal file
33
packages/backend/server/src/core/doc/storage/index.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
// TODO(@forehalo): share with frontend
|
||||
import type { BlobStorageAdapter } from './blob';
|
||||
import { Connection } from './connection';
|
||||
import type { DocStorageAdapter } from './doc';
|
||||
|
||||
export class SpaceStorage extends Connection {
|
||||
constructor(
|
||||
public readonly doc: DocStorageAdapter,
|
||||
public readonly blob: BlobStorageAdapter
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
override async connect() {
|
||||
await this.doc.connect();
|
||||
await this.blob.connect();
|
||||
}
|
||||
|
||||
override async disconnect() {
|
||||
await this.doc.disconnect();
|
||||
await this.blob.disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
export { BlobStorageAdapter, type BlobStorageOptions } from './blob';
|
||||
export {
|
||||
type DocRecord,
|
||||
DocStorageAdapter,
|
||||
type DocStorageOptions,
|
||||
type DocUpdate,
|
||||
type Editor,
|
||||
type HistoryFilter,
|
||||
} from './doc';
|
||||
42
packages/backend/server/src/core/doc/storage/lock.ts
Normal file
42
packages/backend/server/src/core/doc/storage/lock.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
export interface Locker {
|
||||
lock(domain: string, resource: string): Promise<Lock>;
|
||||
}
|
||||
|
||||
export class SingletonLocker implements Locker {
|
||||
lockedResource = new Map<string, Lock>();
|
||||
constructor() {}
|
||||
|
||||
async lock(domain: string, resource: string) {
|
||||
let lock = this.lockedResource.get(`${domain}:${resource}`);
|
||||
|
||||
if (!lock) {
|
||||
lock = new Lock();
|
||||
}
|
||||
|
||||
await lock.acquire();
|
||||
|
||||
return lock;
|
||||
}
|
||||
}
|
||||
|
||||
export class Lock {
|
||||
private inner: Promise<void> = Promise.resolve();
|
||||
private release: () => void = () => {};
|
||||
|
||||
async acquire() {
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
let release: () => void = null!;
|
||||
const nextLock = new Promise<void>(resolve => {
|
||||
release = resolve;
|
||||
});
|
||||
|
||||
await this.inner;
|
||||
this.inner = nextLock;
|
||||
this.release = release;
|
||||
}
|
||||
|
||||
[Symbol.asyncDispose]() {
|
||||
this.release();
|
||||
return Promise.resolve();
|
||||
}
|
||||
}
|
||||
@@ -32,7 +32,7 @@ export async function getFeature(prisma: PrismaTransaction, featureId: number) {
|
||||
return cachedFeature;
|
||||
}
|
||||
|
||||
const feature = await prisma.features.findFirst({
|
||||
const feature = await prisma.feature.findFirst({
|
||||
where: {
|
||||
id: featureId,
|
||||
},
|
||||
|
||||
@@ -2,7 +2,10 @@ import { Module } from '@nestjs/common';
|
||||
|
||||
import { UserModule } from '../user';
|
||||
import { EarlyAccessType, FeatureManagementService } from './management';
|
||||
import { FeatureManagementResolver } from './resolver';
|
||||
import {
|
||||
AdminFeatureManagementResolver,
|
||||
FeatureManagementResolver,
|
||||
} from './resolver';
|
||||
import { FeatureService } from './service';
|
||||
|
||||
/**
|
||||
@@ -17,6 +20,7 @@ import { FeatureService } from './service';
|
||||
FeatureService,
|
||||
FeatureManagementService,
|
||||
FeatureManagementResolver,
|
||||
AdminFeatureManagementResolver,
|
||||
],
|
||||
exports: [FeatureService, FeatureManagementService],
|
||||
})
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
|
||||
import { Config } from '../../fundamentals';
|
||||
import { Config, type EventPayload, OnEvent } from '../../fundamentals';
|
||||
import { UserService } from '../user/service';
|
||||
import { FeatureService } from './service';
|
||||
import { FeatureType } from './types';
|
||||
@@ -167,4 +167,9 @@ export class FeatureManagementService {
|
||||
async listFeatureWorkspaces(feature: FeatureType) {
|
||||
return this.feature.listFeatureWorkspaces(feature);
|
||||
}
|
||||
|
||||
@OnEvent('user.admin.created')
|
||||
async onAdminUserCreated({ id }: EventPayload<'user.admin.created'>) {
|
||||
await this.addAdmin(id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,21 +1,18 @@
|
||||
import {
|
||||
Args,
|
||||
Context,
|
||||
Int,
|
||||
Mutation,
|
||||
Parent,
|
||||
Query,
|
||||
registerEnumType,
|
||||
ResolveField,
|
||||
Resolver,
|
||||
} from '@nestjs/graphql';
|
||||
import { difference } from 'lodash-es';
|
||||
|
||||
import { UserNotFound } from '../../fundamentals';
|
||||
import { sessionUser } from '../auth/service';
|
||||
import { Config } from '../../fundamentals';
|
||||
import { Admin } from '../common';
|
||||
import { UserService } from '../user/service';
|
||||
import { UserType } from '../user/types';
|
||||
import { EarlyAccessType, FeatureManagementService } from './management';
|
||||
import { FeatureService } from './service';
|
||||
import { FeatureType } from './types';
|
||||
|
||||
registerEnumType(EarlyAccessType, {
|
||||
@@ -24,10 +21,7 @@ registerEnumType(EarlyAccessType, {
|
||||
|
||||
@Resolver(() => UserType)
|
||||
export class FeatureManagementResolver {
|
||||
constructor(
|
||||
private readonly users: UserService,
|
||||
private readonly feature: FeatureManagementService
|
||||
) {}
|
||||
constructor(private readonly feature: FeatureManagementService) {}
|
||||
|
||||
@ResolveField(() => [FeatureType], {
|
||||
name: 'features',
|
||||
@@ -36,57 +30,48 @@ export class FeatureManagementResolver {
|
||||
async userFeatures(@Parent() user: UserType) {
|
||||
return this.feature.getActivatedUserFeatures(user.id);
|
||||
}
|
||||
}
|
||||
|
||||
@Admin()
|
||||
@Mutation(() => Int)
|
||||
async addToEarlyAccess(
|
||||
@Args('email') email: string,
|
||||
@Args({ name: 'type', type: () => EarlyAccessType }) type: EarlyAccessType
|
||||
): Promise<number> {
|
||||
const user = await this.users.findUserByEmail(email);
|
||||
if (user) {
|
||||
return this.feature.addEarlyAccess(user.id, type);
|
||||
} else {
|
||||
const user = await this.users.createAnonymousUser(email, {
|
||||
registered: false,
|
||||
});
|
||||
return this.feature.addEarlyAccess(user.id, type);
|
||||
}
|
||||
}
|
||||
export class AvailableUserFeatureConfig {
|
||||
constructor(private readonly config: Config) {}
|
||||
|
||||
@Admin()
|
||||
@Mutation(() => Int)
|
||||
async removeEarlyAccess(@Args('email') email: string): Promise<number> {
|
||||
const user = await this.users.findUserByEmail(email);
|
||||
if (!user) {
|
||||
throw new UserNotFound();
|
||||
}
|
||||
return this.feature.removeEarlyAccess(user.id);
|
||||
}
|
||||
|
||||
@Admin()
|
||||
@Query(() => [UserType])
|
||||
async earlyAccessUsers(
|
||||
@Context() ctx: { isAdminQuery: boolean }
|
||||
): Promise<UserType[]> {
|
||||
// allow query other user's subscription
|
||||
ctx.isAdminQuery = true;
|
||||
return this.feature.listEarlyAccess().then(users => {
|
||||
return users.map(sessionUser);
|
||||
});
|
||||
}
|
||||
|
||||
@Admin()
|
||||
@Mutation(() => Boolean)
|
||||
async addAdminister(@Args('email') email: string): Promise<boolean> {
|
||||
const user = await this.users.findUserByEmail(email);
|
||||
|
||||
if (!user) {
|
||||
throw new UserNotFound();
|
||||
}
|
||||
|
||||
await this.feature.addAdmin(user.id);
|
||||
|
||||
return true;
|
||||
async availableUserFeatures() {
|
||||
return this.config.isSelfhosted
|
||||
? [FeatureType.Admin]
|
||||
: [FeatureType.EarlyAccess, FeatureType.AIEarlyAccess, FeatureType.Admin];
|
||||
}
|
||||
}
|
||||
|
||||
@Admin()
|
||||
@Resolver(() => Boolean)
|
||||
export class AdminFeatureManagementResolver extends AvailableUserFeatureConfig {
|
||||
constructor(
|
||||
config: Config,
|
||||
private readonly feature: FeatureService
|
||||
) {
|
||||
super(config);
|
||||
}
|
||||
|
||||
@Mutation(() => [FeatureType], {
|
||||
description: 'update user enabled feature',
|
||||
})
|
||||
async updateUserFeatures(
|
||||
@Args('id') id: string,
|
||||
@Args({ name: 'features', type: () => [FeatureType] })
|
||||
features: FeatureType[]
|
||||
) {
|
||||
const configurableFeatures = await this.availableUserFeatures();
|
||||
|
||||
const removed = difference(configurableFeatures, features);
|
||||
await Promise.all(
|
||||
features.map(feature =>
|
||||
this.feature.addUserFeature(id, feature, 'admin panel')
|
||||
)
|
||||
);
|
||||
await Promise.all(
|
||||
removed.map(feature => this.feature.removeUserFeature(id, feature))
|
||||
);
|
||||
|
||||
return features;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
import { CannotDeleteAllAdminAccount } from '../../fundamentals';
|
||||
import { WorkspaceType } from '../workspaces/types';
|
||||
import { FeatureConfigType, getFeature } from './feature';
|
||||
import { FeatureKind, FeatureType } from './types';
|
||||
@@ -10,7 +11,7 @@ export class FeatureService {
|
||||
constructor(private readonly prisma: PrismaClient) {}
|
||||
|
||||
async getFeature<F extends FeatureType>(feature: F) {
|
||||
const data = await this.prisma.features.findFirst({
|
||||
const data = await this.prisma.feature.findFirst({
|
||||
where: {
|
||||
feature,
|
||||
type: FeatureKind.Feature,
|
||||
@@ -36,7 +37,7 @@ export class FeatureService {
|
||||
expiredAt?: Date | string
|
||||
) {
|
||||
return this.prisma.$transaction(async tx => {
|
||||
const latestFlag = await tx.userFeatures.findFirst({
|
||||
const latestFlag = await tx.userFeature.findFirst({
|
||||
where: {
|
||||
userId,
|
||||
feature: {
|
||||
@@ -53,7 +54,7 @@ export class FeatureService {
|
||||
if (latestFlag) {
|
||||
return latestFlag.id;
|
||||
} else {
|
||||
const featureId = await tx.features
|
||||
const featureId = await tx.feature
|
||||
.findFirst({
|
||||
where: { feature, type: FeatureKind.Feature },
|
||||
orderBy: { version: 'desc' },
|
||||
@@ -65,7 +66,7 @@ export class FeatureService {
|
||||
throw new Error(`Feature ${feature} not found`);
|
||||
}
|
||||
|
||||
return tx.userFeatures
|
||||
return tx.userFeature
|
||||
.create({
|
||||
data: {
|
||||
reason,
|
||||
@@ -81,7 +82,10 @@ export class FeatureService {
|
||||
}
|
||||
|
||||
async removeUserFeature(userId: string, feature: FeatureType) {
|
||||
return this.prisma.userFeatures
|
||||
if (feature === FeatureType.Admin) {
|
||||
await this.ensureNotLastAdmin(userId);
|
||||
}
|
||||
return this.prisma.userFeature
|
||||
.updateMany({
|
||||
where: {
|
||||
userId,
|
||||
@@ -98,13 +102,27 @@ export class FeatureService {
|
||||
.then(r => r.count);
|
||||
}
|
||||
|
||||
async ensureNotLastAdmin(userId: string) {
|
||||
const count = await this.prisma.userFeature.count({
|
||||
where: {
|
||||
userId: { not: userId },
|
||||
feature: { feature: FeatureType.Admin, type: FeatureKind.Feature },
|
||||
activated: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (count === 0) {
|
||||
throw new CannotDeleteAllAdminAccount();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* get user's features, will included inactivated features
|
||||
* @param userId user id
|
||||
* @returns list of features
|
||||
*/
|
||||
async getUserFeatures(userId: string) {
|
||||
const features = await this.prisma.userFeatures.findMany({
|
||||
const features = await this.prisma.userFeature.findMany({
|
||||
where: {
|
||||
userId,
|
||||
feature: { type: FeatureKind.Feature },
|
||||
@@ -129,7 +147,7 @@ export class FeatureService {
|
||||
}
|
||||
|
||||
async getActivatedUserFeatures(userId: string) {
|
||||
const features = await this.prisma.userFeatures.findMany({
|
||||
const features = await this.prisma.userFeature.findMany({
|
||||
where: {
|
||||
userId,
|
||||
feature: { type: FeatureKind.Feature },
|
||||
@@ -156,7 +174,7 @@ export class FeatureService {
|
||||
}
|
||||
|
||||
async listFeatureUsers(feature: FeatureType) {
|
||||
return this.prisma.userFeatures
|
||||
return this.prisma.userFeature
|
||||
.findMany({
|
||||
where: {
|
||||
activated: true,
|
||||
@@ -182,7 +200,7 @@ export class FeatureService {
|
||||
}
|
||||
|
||||
async hasUserFeature(userId: string, feature: FeatureType) {
|
||||
return this.prisma.userFeatures
|
||||
return this.prisma.userFeature
|
||||
.count({
|
||||
where: {
|
||||
userId,
|
||||
@@ -206,7 +224,7 @@ export class FeatureService {
|
||||
expiredAt?: Date | string
|
||||
) {
|
||||
return this.prisma.$transaction(async tx => {
|
||||
const latestFlag = await tx.workspaceFeatures.findFirst({
|
||||
const latestFlag = await tx.workspaceFeature.findFirst({
|
||||
where: {
|
||||
workspaceId,
|
||||
feature: {
|
||||
@@ -223,7 +241,7 @@ export class FeatureService {
|
||||
return latestFlag.id;
|
||||
} else {
|
||||
// use latest version of feature
|
||||
const featureId = await tx.features
|
||||
const featureId = await tx.feature
|
||||
.findFirst({
|
||||
where: { feature, type: FeatureKind.Feature },
|
||||
select: { id: true },
|
||||
@@ -235,7 +253,7 @@ export class FeatureService {
|
||||
throw new Error(`Feature ${feature} not found`);
|
||||
}
|
||||
|
||||
return tx.workspaceFeatures
|
||||
return tx.workspaceFeature
|
||||
.create({
|
||||
data: {
|
||||
reason,
|
||||
@@ -251,7 +269,7 @@ export class FeatureService {
|
||||
}
|
||||
|
||||
async removeWorkspaceFeature(workspaceId: string, feature: FeatureType) {
|
||||
return this.prisma.workspaceFeatures
|
||||
return this.prisma.workspaceFeature
|
||||
.updateMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
@@ -274,7 +292,7 @@ export class FeatureService {
|
||||
* @returns list of features
|
||||
*/
|
||||
async getWorkspaceFeatures(workspaceId: string) {
|
||||
const features = await this.prisma.workspaceFeatures.findMany({
|
||||
const features = await this.prisma.workspaceFeature.findMany({
|
||||
where: {
|
||||
workspace: { id: workspaceId },
|
||||
feature: {
|
||||
@@ -301,7 +319,7 @@ export class FeatureService {
|
||||
}
|
||||
|
||||
async listFeatureWorkspaces(feature: FeatureType): Promise<WorkspaceType[]> {
|
||||
return this.prisma.workspaceFeatures
|
||||
return this.prisma.workspaceFeature
|
||||
.findMany({
|
||||
where: {
|
||||
activated: true,
|
||||
@@ -324,7 +342,7 @@ export class FeatureService {
|
||||
}
|
||||
|
||||
async hasWorkspaceFeature(workspaceId: string, feature: FeatureType) {
|
||||
return this.prisma.workspaceFeatures
|
||||
return this.prisma.workspaceFeature
|
||||
.count({
|
||||
where: {
|
||||
workspaceId,
|
||||
|
||||
12
packages/backend/server/src/core/permission/index.ts
Normal file
12
packages/backend/server/src/core/permission/index.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { PermissionService } from './service';
|
||||
|
||||
@Module({
|
||||
providers: [PermissionService],
|
||||
exports: [PermissionService],
|
||||
})
|
||||
export class PermissionModule {}
|
||||
|
||||
export { PermissionService } from './service';
|
||||
export { Permission, PublicPageMode } from './types';
|
||||
@@ -2,13 +2,12 @@ import { Injectable } from '@nestjs/common';
|
||||
import type { Prisma } from '@prisma/client';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
import { DocAccessDenied, WorkspaceAccessDenied } from '../../fundamentals';
|
||||
import { Permission } from './types';
|
||||
|
||||
export enum PublicPageMode {
|
||||
Page,
|
||||
Edgeless,
|
||||
}
|
||||
import {
|
||||
DocAccessDenied,
|
||||
SpaceAccessDenied,
|
||||
SpaceOwnerNotFound,
|
||||
} from '../../fundamentals';
|
||||
import { Permission, PublicPageMode } from './types';
|
||||
|
||||
@Injectable()
|
||||
export class PermissionService {
|
||||
@@ -59,7 +58,7 @@ export class PermissionService {
|
||||
}
|
||||
|
||||
async getWorkspaceOwner(workspaceId: string) {
|
||||
return this.prisma.workspaceUserPermission.findFirstOrThrow({
|
||||
const owner = await this.prisma.workspaceUserPermission.findFirst({
|
||||
where: {
|
||||
workspaceId,
|
||||
type: Permission.Owner,
|
||||
@@ -68,6 +67,20 @@ export class PermissionService {
|
||||
user: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!owner) {
|
||||
throw new SpaceOwnerNotFound({ spaceId: workspaceId });
|
||||
}
|
||||
|
||||
return owner.user;
|
||||
}
|
||||
|
||||
async getWorkspaceMemberCount(workspaceId: string) {
|
||||
return this.prisma.workspaceUserPermission.count({
|
||||
where: {
|
||||
workspaceId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async tryGetWorkspaceOwner(workspaceId: string) {
|
||||
@@ -152,7 +165,7 @@ export class PermissionService {
|
||||
permission: Permission = Permission.Read
|
||||
) {
|
||||
if (!(await this.tryCheckWorkspace(ws, user, permission))) {
|
||||
throw new WorkspaceAccessDenied({ workspaceId: ws });
|
||||
throw new SpaceAccessDenied({ spaceId: ws });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -195,6 +208,17 @@ export class PermissionService {
|
||||
return false;
|
||||
}
|
||||
|
||||
async allowUrlPreview(ws: string) {
|
||||
const count = await this.prisma.workspace.count({
|
||||
where: {
|
||||
id: ws,
|
||||
public: true,
|
||||
},
|
||||
});
|
||||
|
||||
return count > 0;
|
||||
}
|
||||
|
||||
async grant(
|
||||
ws: string,
|
||||
user: string,
|
||||
@@ -324,7 +348,7 @@ export class PermissionService {
|
||||
permission = Permission.Read
|
||||
) {
|
||||
if (!(await this.tryCheckPage(ws, page, user, permission))) {
|
||||
throw new DocAccessDenied({ workspaceId: ws, docId: page });
|
||||
throw new DocAccessDenied({ spaceId: ws, docId: page });
|
||||
}
|
||||
}
|
||||
|
||||
11
packages/backend/server/src/core/permission/types.ts
Normal file
11
packages/backend/server/src/core/permission/types.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
export enum Permission {
|
||||
Read = 0,
|
||||
Write = 1,
|
||||
Admin = 10,
|
||||
Owner = 99,
|
||||
}
|
||||
|
||||
export enum PublicPageMode {
|
||||
Page,
|
||||
Edgeless,
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { FeatureModule } from '../features';
|
||||
import { PermissionModule } from '../permission';
|
||||
import { StorageModule } from '../storage';
|
||||
import { PermissionService } from '../workspaces/permission';
|
||||
import { QuotaManagementResolver } from './resolver';
|
||||
import { QuotaService } from './service';
|
||||
import { QuotaManagementService } from './storage';
|
||||
@@ -14,13 +14,8 @@ import { QuotaManagementService } from './storage';
|
||||
* - quota statistics
|
||||
*/
|
||||
@Module({
|
||||
imports: [FeatureModule, StorageModule],
|
||||
providers: [
|
||||
PermissionService,
|
||||
QuotaService,
|
||||
QuotaManagementResolver,
|
||||
QuotaManagementService,
|
||||
],
|
||||
imports: [FeatureModule, StorageModule, PermissionModule],
|
||||
providers: [QuotaService, QuotaManagementResolver, QuotaManagementService],
|
||||
exports: [QuotaService, QuotaManagementService],
|
||||
})
|
||||
export class QuotaModule {}
|
||||
|
||||
@@ -13,7 +13,7 @@ export class QuotaConfig {
|
||||
return cachedQuota;
|
||||
}
|
||||
|
||||
const quota = await tx.features.findFirst({
|
||||
const quota = await tx.feature.findFirst({
|
||||
where: {
|
||||
id: featureId,
|
||||
},
|
||||
@@ -71,10 +71,6 @@ export class QuotaConfig {
|
||||
return this.config.configs.historyPeriod;
|
||||
}
|
||||
|
||||
get historyPeriodFromNow() {
|
||||
return new Date(Date.now() + this.historyPeriod);
|
||||
}
|
||||
|
||||
get memberLimit() {
|
||||
return this.config.configs.memberLimit;
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ import {
|
||||
} from '@nestjs/graphql';
|
||||
import { SafeIntResolver } from 'graphql-scalars';
|
||||
|
||||
import { CurrentUser } from '../auth/current-user';
|
||||
import { CurrentUser } from '../auth/session';
|
||||
import { EarlyAccessType } from '../features';
|
||||
import { UserType } from '../user';
|
||||
import { QuotaService } from './service';
|
||||
|
||||
@@ -17,7 +17,7 @@ export class QuotaService {
|
||||
|
||||
// get activated user quota
|
||||
async getUserQuota(userId: string) {
|
||||
const quota = await this.prisma.userFeatures.findFirst({
|
||||
const quota = await this.prisma.userFeature.findFirst({
|
||||
where: {
|
||||
userId,
|
||||
feature: {
|
||||
@@ -44,7 +44,7 @@ export class QuotaService {
|
||||
|
||||
// get user all quota records
|
||||
async getUserQuotas(userId: string) {
|
||||
const quotas = await this.prisma.userFeatures.findMany({
|
||||
const quotas = await this.prisma.userFeature.findMany({
|
||||
where: {
|
||||
userId,
|
||||
feature: {
|
||||
@@ -58,6 +58,9 @@ export class QuotaService {
|
||||
expiredAt: true,
|
||||
featureId: true,
|
||||
},
|
||||
orderBy: {
|
||||
id: 'asc',
|
||||
},
|
||||
});
|
||||
const configs = await Promise.all(
|
||||
quotas.map(async quota => {
|
||||
@@ -66,7 +69,7 @@ export class QuotaService {
|
||||
...quota,
|
||||
feature: await QuotaConfig.get(this.prisma, quota.featureId),
|
||||
};
|
||||
} catch (_) {}
|
||||
} catch {}
|
||||
return null as unknown as typeof quota & {
|
||||
feature: QuotaConfig;
|
||||
};
|
||||
@@ -92,7 +95,7 @@ export class QuotaService {
|
||||
return;
|
||||
}
|
||||
|
||||
const featureId = await tx.features
|
||||
const featureId = await tx.feature
|
||||
.findFirst({
|
||||
where: { feature: quota, type: FeatureKind.Quota },
|
||||
select: { id: true },
|
||||
@@ -105,7 +108,7 @@ export class QuotaService {
|
||||
}
|
||||
|
||||
// we will deactivate all exists quota for this user
|
||||
await tx.userFeatures.updateMany({
|
||||
await tx.userFeature.updateMany({
|
||||
where: {
|
||||
id: undefined,
|
||||
userId,
|
||||
@@ -118,7 +121,7 @@ export class QuotaService {
|
||||
},
|
||||
});
|
||||
|
||||
await tx.userFeatures.create({
|
||||
await tx.userFeature.create({
|
||||
data: {
|
||||
userId,
|
||||
featureId,
|
||||
@@ -133,7 +136,7 @@ export class QuotaService {
|
||||
async hasQuota(userId: string, quota: QuotaType, tx?: PrismaTransaction) {
|
||||
const executor = tx ?? this.prisma;
|
||||
|
||||
return executor.userFeatures
|
||||
return executor.userFeature
|
||||
.count({
|
||||
where: {
|
||||
userId,
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
|
||||
import { WorkspaceOwnerNotFound } from '../../fundamentals';
|
||||
import { FeatureService, FeatureType } from '../features';
|
||||
import { PermissionService } from '../permission';
|
||||
import { WorkspaceBlobStorage } from '../storage';
|
||||
import { PermissionService } from '../workspaces/permission';
|
||||
import { OneGB } from './constant';
|
||||
import { QuotaService } from './service';
|
||||
import { formatSize, QuotaQueryType } from './types';
|
||||
@@ -113,9 +112,9 @@ export class QuotaManagementService {
|
||||
// get workspace's owner quota and total size of used
|
||||
// quota was apply to owner's account
|
||||
async getWorkspaceUsage(workspaceId: string): Promise<QuotaBusinessType> {
|
||||
const { user: owner } =
|
||||
await this.permissions.getWorkspaceOwner(workspaceId);
|
||||
if (!owner) throw new WorkspaceOwnerNotFound({ workspaceId });
|
||||
const owner = await this.permissions.getWorkspaceOwner(workspaceId);
|
||||
const memberCount =
|
||||
await this.permissions.getWorkspaceMemberCount(workspaceId);
|
||||
const {
|
||||
feature: {
|
||||
name,
|
||||
@@ -148,6 +147,7 @@ export class QuotaManagementService {
|
||||
humanReadable,
|
||||
usedSize,
|
||||
unlimited,
|
||||
memberCount,
|
||||
};
|
||||
|
||||
if (quota.unlimited) {
|
||||
|
||||
@@ -87,6 +87,9 @@ export class QuotaQueryType {
|
||||
@Field(() => SafeIntResolver)
|
||||
memberLimit!: number;
|
||||
|
||||
@Field(() => SafeIntResolver)
|
||||
memberCount!: number;
|
||||
|
||||
@Field(() => SafeIntResolver)
|
||||
storageQuota!: number;
|
||||
|
||||
|
||||
66
packages/backend/server/src/core/selfhost/controller.ts
Normal file
66
packages/backend/server/src/core/selfhost/controller.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import { Body, Controller, Post, Req, Res } from '@nestjs/common';
|
||||
import type { Request, Response } from 'express';
|
||||
|
||||
import {
|
||||
ActionForbidden,
|
||||
EventEmitter,
|
||||
InternalServerError,
|
||||
Mutex,
|
||||
PasswordRequired,
|
||||
} from '../../fundamentals';
|
||||
import { AuthService, Public } from '../auth';
|
||||
import { ServerService } from '../config';
|
||||
import { UserService } from '../user/service';
|
||||
|
||||
interface CreateUserInput {
|
||||
email: string;
|
||||
password: string;
|
||||
}
|
||||
|
||||
@Controller('/api/setup')
|
||||
export class CustomSetupController {
|
||||
constructor(
|
||||
private readonly user: UserService,
|
||||
private readonly auth: AuthService,
|
||||
private readonly event: EventEmitter,
|
||||
private readonly mutex: Mutex,
|
||||
private readonly server: ServerService
|
||||
) {}
|
||||
|
||||
@Public()
|
||||
@Post('/create-admin-user')
|
||||
async createAdmin(
|
||||
@Req() req: Request,
|
||||
@Res() res: Response,
|
||||
@Body() input: CreateUserInput
|
||||
) {
|
||||
if (!input.password) {
|
||||
throw new PasswordRequired();
|
||||
}
|
||||
|
||||
await using lock = await this.mutex.lock('createFirstAdmin');
|
||||
|
||||
if (!lock) {
|
||||
throw new InternalServerError();
|
||||
}
|
||||
|
||||
if (await this.server.initialized()) {
|
||||
throw new ActionForbidden('First user already created');
|
||||
}
|
||||
|
||||
const user = await this.user.createUser({
|
||||
email: input.email,
|
||||
password: input.password,
|
||||
registered: true,
|
||||
});
|
||||
|
||||
try {
|
||||
await this.event.emitAsync('user.admin.created', user);
|
||||
await this.auth.setCookies(req, res, user.id);
|
||||
res.send({ id: user.id, email: user.email, name: user.name });
|
||||
} catch (e) {
|
||||
await this.user.deleteUser(user.id);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
104
packages/backend/server/src/core/selfhost/index.ts
Normal file
104
packages/backend/server/src/core/selfhost/index.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
import { join } from 'node:path';
|
||||
|
||||
import {
|
||||
Injectable,
|
||||
Module,
|
||||
NestMiddleware,
|
||||
OnModuleInit,
|
||||
} from '@nestjs/common';
|
||||
import { HttpAdapterHost } from '@nestjs/core';
|
||||
import type { Application, Request, Response } from 'express';
|
||||
import { static as serveStatic } from 'express';
|
||||
|
||||
import { Config } from '../../fundamentals';
|
||||
import { AuthModule } from '../auth';
|
||||
import { ServerConfigModule, ServerService } from '../config';
|
||||
import { UserModule } from '../user';
|
||||
import { CustomSetupController } from './controller';
|
||||
|
||||
@Injectable()
|
||||
export class SetupMiddleware implements NestMiddleware {
|
||||
constructor(private readonly server: ServerService) {}
|
||||
|
||||
use = (req: Request, res: Response, next: (error?: Error | any) => void) => {
|
||||
// never throw
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
this.server
|
||||
.initialized()
|
||||
.then(initialized => {
|
||||
// Redirect to setup page if not initialized
|
||||
if (!initialized && req.path !== '/admin/setup') {
|
||||
res.redirect('/admin/setup');
|
||||
return;
|
||||
}
|
||||
|
||||
// redirect to admin page if initialized
|
||||
if (initialized && req.path === '/admin/setup') {
|
||||
res.redirect('/admin');
|
||||
return;
|
||||
}
|
||||
|
||||
next();
|
||||
})
|
||||
.catch(() => {
|
||||
next();
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
@Module({
|
||||
imports: [AuthModule, UserModule, ServerConfigModule],
|
||||
providers: [SetupMiddleware],
|
||||
controllers: [CustomSetupController],
|
||||
})
|
||||
export class SelfhostModule implements OnModuleInit {
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly adapterHost: HttpAdapterHost,
|
||||
private readonly check: SetupMiddleware
|
||||
) {}
|
||||
|
||||
onModuleInit() {
|
||||
const staticPath = join(this.config.projectRoot, 'static');
|
||||
// in command line mode
|
||||
if (!this.adapterHost.httpAdapter) {
|
||||
return;
|
||||
}
|
||||
|
||||
const app = this.adapterHost.httpAdapter.getInstance<Application>();
|
||||
const basePath = this.config.server.path;
|
||||
|
||||
app.get(basePath + '/admin/index.html', (_req, res) => {
|
||||
res.redirect(basePath + '/admin');
|
||||
});
|
||||
app.use(
|
||||
basePath + '/admin',
|
||||
serveStatic(join(staticPath, 'admin'), {
|
||||
redirect: false,
|
||||
index: false,
|
||||
})
|
||||
);
|
||||
|
||||
app.get(
|
||||
[basePath + '/admin', basePath + '/admin/*'],
|
||||
this.check.use,
|
||||
(_req, res) => {
|
||||
res.sendFile(join(staticPath, 'admin', 'index.html'));
|
||||
}
|
||||
);
|
||||
|
||||
app.get(basePath + '/index.html', (_req, res) => {
|
||||
res.redirect(basePath);
|
||||
});
|
||||
app.use(
|
||||
basePath,
|
||||
serveStatic(staticPath, {
|
||||
redirect: false,
|
||||
index: false,
|
||||
})
|
||||
);
|
||||
app.get('*', this.check.use, (_req, res) => {
|
||||
res.sendFile(join(staticPath, 'index.html'));
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,333 +0,0 @@
|
||||
import { applyDecorators, Logger } from '@nestjs/common';
|
||||
import {
|
||||
ConnectedSocket,
|
||||
MessageBody,
|
||||
OnGatewayConnection,
|
||||
OnGatewayDisconnect,
|
||||
SubscribeMessage as RawSubscribeMessage,
|
||||
WebSocketGateway,
|
||||
WebSocketServer,
|
||||
} from '@nestjs/websockets';
|
||||
import { Server, Socket } from 'socket.io';
|
||||
import { encodeStateAsUpdate, encodeStateVector } from 'yjs';
|
||||
|
||||
import {
|
||||
CallTimer,
|
||||
Config,
|
||||
DocNotFound,
|
||||
GatewayErrorWrapper,
|
||||
metrics,
|
||||
NotInWorkspace,
|
||||
VersionRejected,
|
||||
WorkspaceAccessDenied,
|
||||
} from '../../../fundamentals';
|
||||
import { Auth, CurrentUser } from '../../auth';
|
||||
import { DocManager } from '../../doc';
|
||||
import { DocID } from '../../utils/doc';
|
||||
import { PermissionService } from '../../workspaces/permission';
|
||||
import { Permission } from '../../workspaces/types';
|
||||
|
||||
const SubscribeMessage = (event: string) =>
|
||||
applyDecorators(
|
||||
GatewayErrorWrapper(event),
|
||||
CallTimer('socketio', 'event_duration', { event }),
|
||||
RawSubscribeMessage(event)
|
||||
);
|
||||
|
||||
type EventResponse<Data = any> = Data extends never
|
||||
? {
|
||||
data?: never;
|
||||
}
|
||||
: {
|
||||
data: Data;
|
||||
};
|
||||
|
||||
function Sync(workspaceId: string): `${string}:sync` {
|
||||
return `${workspaceId}:sync`;
|
||||
}
|
||||
|
||||
function Awareness(workspaceId: string): `${string}:awareness` {
|
||||
return `${workspaceId}:awareness`;
|
||||
}
|
||||
|
||||
@WebSocketGateway({
|
||||
cors: !AFFiNE.node.prod,
|
||||
transports: ['websocket'],
|
||||
// see: https://socket.io/docs/v4/server-options/#maxhttpbuffersize
|
||||
maxHttpBufferSize: 1e8, // 100 MB
|
||||
})
|
||||
export class EventsGateway implements OnGatewayConnection, OnGatewayDisconnect {
|
||||
protected logger = new Logger(EventsGateway.name);
|
||||
private connectionCount = 0;
|
||||
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly docManager: DocManager,
|
||||
private readonly permissions: PermissionService
|
||||
) {}
|
||||
|
||||
@WebSocketServer()
|
||||
server!: Server;
|
||||
|
||||
handleConnection() {
|
||||
this.connectionCount++;
|
||||
metrics.socketio.gauge('realtime_connections').record(this.connectionCount);
|
||||
}
|
||||
|
||||
handleDisconnect() {
|
||||
this.connectionCount--;
|
||||
metrics.socketio.gauge('realtime_connections').record(this.connectionCount);
|
||||
}
|
||||
|
||||
async assertVersion(client: Socket, version?: string) {
|
||||
const shouldCheckClientVersion = await this.config.runtime.fetch(
|
||||
'flags/syncClientVersionCheck'
|
||||
);
|
||||
if (
|
||||
// @todo(@darkskygit): remove this flag after 0.12 goes stable
|
||||
shouldCheckClientVersion &&
|
||||
version !== AFFiNE.version
|
||||
) {
|
||||
client.emit('server-version-rejected', {
|
||||
currentVersion: version,
|
||||
requiredVersion: AFFiNE.version,
|
||||
reason: `Client version${
|
||||
version ? ` ${version}` : ''
|
||||
} is outdated, please update to ${AFFiNE.version}`,
|
||||
});
|
||||
|
||||
throw new VersionRejected({
|
||||
version: version || 'unknown',
|
||||
serverVersion: AFFiNE.version,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async joinWorkspace(
|
||||
client: Socket,
|
||||
room: `${string}:${'sync' | 'awareness'}`
|
||||
) {
|
||||
await client.join(room);
|
||||
}
|
||||
|
||||
async leaveWorkspace(
|
||||
client: Socket,
|
||||
room: `${string}:${'sync' | 'awareness'}`
|
||||
) {
|
||||
await client.leave(room);
|
||||
}
|
||||
|
||||
assertInWorkspace(client: Socket, room: `${string}:${'sync' | 'awareness'}`) {
|
||||
if (!client.rooms.has(room)) {
|
||||
throw new NotInWorkspace({ workspaceId: room.split(':')[0] });
|
||||
}
|
||||
}
|
||||
|
||||
async assertWorkspaceAccessible(
|
||||
workspaceId: string,
|
||||
userId: string,
|
||||
permission: Permission = Permission.Read
|
||||
) {
|
||||
if (
|
||||
!(await this.permissions.isWorkspaceMember(
|
||||
workspaceId,
|
||||
userId,
|
||||
permission
|
||||
))
|
||||
) {
|
||||
throw new WorkspaceAccessDenied({ workspaceId });
|
||||
}
|
||||
}
|
||||
|
||||
@Auth()
|
||||
@SubscribeMessage('client-handshake-sync')
|
||||
async handleClientHandshakeSync(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@MessageBody('workspaceId') workspaceId: string,
|
||||
@MessageBody('version') version: string | undefined,
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse<{ clientId: string }>> {
|
||||
await this.assertVersion(client, version);
|
||||
await this.assertWorkspaceAccessible(
|
||||
workspaceId,
|
||||
user.id,
|
||||
Permission.Write
|
||||
);
|
||||
|
||||
await this.joinWorkspace(client, Sync(workspaceId));
|
||||
return {
|
||||
data: {
|
||||
clientId: client.id,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@Auth()
|
||||
@SubscribeMessage('client-handshake-awareness')
|
||||
async handleClientHandshakeAwareness(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@MessageBody('workspaceId') workspaceId: string,
|
||||
@MessageBody('version') version: string | undefined,
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse<{ clientId: string }>> {
|
||||
await this.assertVersion(client, version);
|
||||
await this.assertWorkspaceAccessible(
|
||||
workspaceId,
|
||||
user.id,
|
||||
Permission.Write
|
||||
);
|
||||
|
||||
await this.joinWorkspace(client, Awareness(workspaceId));
|
||||
return {
|
||||
data: {
|
||||
clientId: client.id,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@SubscribeMessage('client-leave-sync')
|
||||
async handleLeaveSync(
|
||||
@MessageBody() workspaceId: string,
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse> {
|
||||
this.assertInWorkspace(client, Sync(workspaceId));
|
||||
await this.leaveWorkspace(client, Sync(workspaceId));
|
||||
return {};
|
||||
}
|
||||
|
||||
@SubscribeMessage('client-leave-awareness')
|
||||
async handleLeaveAwareness(
|
||||
@MessageBody() workspaceId: string,
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse> {
|
||||
this.assertInWorkspace(client, Awareness(workspaceId));
|
||||
await this.leaveWorkspace(client, Awareness(workspaceId));
|
||||
return {};
|
||||
}
|
||||
|
||||
@SubscribeMessage('client-pre-sync')
|
||||
async loadDocStats(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody()
|
||||
{ workspaceId, timestamp }: { workspaceId: string; timestamp?: number }
|
||||
): Promise<EventResponse<Record<string, number>>> {
|
||||
this.assertInWorkspace(client, Sync(workspaceId));
|
||||
|
||||
const stats = await this.docManager.getDocTimestamps(
|
||||
workspaceId,
|
||||
timestamp
|
||||
);
|
||||
|
||||
return {
|
||||
data: stats,
|
||||
};
|
||||
}
|
||||
|
||||
@SubscribeMessage('client-update-v2')
|
||||
async handleClientUpdateV2(
|
||||
@MessageBody()
|
||||
{
|
||||
workspaceId,
|
||||
guid,
|
||||
updates,
|
||||
}: {
|
||||
workspaceId: string;
|
||||
guid: string;
|
||||
updates: string[];
|
||||
},
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse<{ accepted: true; timestamp?: number }>> {
|
||||
this.assertInWorkspace(client, Sync(workspaceId));
|
||||
|
||||
const docId = new DocID(guid, workspaceId);
|
||||
const buffers = updates.map(update => Buffer.from(update, 'base64'));
|
||||
const timestamp = await this.docManager.batchPush(
|
||||
docId.workspace,
|
||||
docId.guid,
|
||||
buffers
|
||||
);
|
||||
|
||||
client
|
||||
.to(Sync(workspaceId))
|
||||
.emit('server-updates', { workspaceId, guid, updates, timestamp });
|
||||
|
||||
return {
|
||||
data: {
|
||||
accepted: true,
|
||||
timestamp,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@SubscribeMessage('doc-load-v2')
|
||||
async loadDocV2(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody()
|
||||
{
|
||||
workspaceId,
|
||||
guid,
|
||||
stateVector,
|
||||
}: {
|
||||
workspaceId: string;
|
||||
guid: string;
|
||||
stateVector?: string;
|
||||
}
|
||||
): Promise<
|
||||
EventResponse<{ missing: string; state?: string; timestamp: number }>
|
||||
> {
|
||||
this.assertInWorkspace(client, Sync(workspaceId));
|
||||
|
||||
const docId = new DocID(guid, workspaceId);
|
||||
const res = await this.docManager.get(docId.workspace, docId.guid);
|
||||
|
||||
if (!res) {
|
||||
throw new DocNotFound({ workspaceId, docId: docId.guid });
|
||||
}
|
||||
|
||||
const missing = Buffer.from(
|
||||
encodeStateAsUpdate(
|
||||
res.doc,
|
||||
stateVector ? Buffer.from(stateVector, 'base64') : undefined
|
||||
)
|
||||
).toString('base64');
|
||||
const state = Buffer.from(encodeStateVector(res.doc)).toString('base64');
|
||||
|
||||
return {
|
||||
data: {
|
||||
missing,
|
||||
state,
|
||||
timestamp: res.timestamp,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@SubscribeMessage('awareness-init')
|
||||
async handleInitAwareness(
|
||||
@MessageBody() workspaceId: string,
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse<{ clientId: string }>> {
|
||||
this.assertInWorkspace(client, Awareness(workspaceId));
|
||||
client.to(Awareness(workspaceId)).emit('new-client-awareness-init');
|
||||
return {
|
||||
data: {
|
||||
clientId: client.id,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@SubscribeMessage('awareness-update')
|
||||
async handleHelpGatheringAwareness(
|
||||
@MessageBody()
|
||||
{
|
||||
workspaceId,
|
||||
awarenessUpdate,
|
||||
}: { workspaceId: string; awarenessUpdate: string },
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse> {
|
||||
this.assertInWorkspace(client, Awareness(workspaceId));
|
||||
client
|
||||
.to(Awareness(workspaceId))
|
||||
.emit('server-awareness-broadcast', { workspaceId, awarenessUpdate });
|
||||
return {};
|
||||
}
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { DocModule } from '../../doc';
|
||||
import { PermissionService } from '../../workspaces/permission';
|
||||
import { EventsGateway } from './events.gateway';
|
||||
|
||||
@Module({
|
||||
imports: [DocModule],
|
||||
providers: [EventsGateway, PermissionService],
|
||||
})
|
||||
export class EventsModule {}
|
||||
665
packages/backend/server/src/core/sync/gateway.ts
Normal file
665
packages/backend/server/src/core/sync/gateway.ts
Normal file
@@ -0,0 +1,665 @@
|
||||
import { applyDecorators, Logger } from '@nestjs/common';
|
||||
import {
|
||||
ConnectedSocket,
|
||||
MessageBody,
|
||||
OnGatewayConnection,
|
||||
OnGatewayDisconnect,
|
||||
SubscribeMessage as RawSubscribeMessage,
|
||||
WebSocketGateway,
|
||||
} from '@nestjs/websockets';
|
||||
import { Socket } from 'socket.io';
|
||||
import { diffUpdate, encodeStateVectorFromUpdate } from 'yjs';
|
||||
|
||||
import {
|
||||
AlreadyInSpace,
|
||||
CallTimer,
|
||||
Config,
|
||||
DocNotFound,
|
||||
GatewayErrorWrapper,
|
||||
metrics,
|
||||
NotInSpace,
|
||||
SpaceAccessDenied,
|
||||
VersionRejected,
|
||||
} from '../../fundamentals';
|
||||
import { CurrentUser } from '../auth';
|
||||
import {
|
||||
DocStorageAdapter,
|
||||
PgUserspaceDocStorageAdapter,
|
||||
PgWorkspaceDocStorageAdapter,
|
||||
} from '../doc';
|
||||
import { Permission, PermissionService } from '../permission';
|
||||
import { DocID } from '../utils/doc';
|
||||
|
||||
const SubscribeMessage = (event: string) =>
|
||||
applyDecorators(
|
||||
GatewayErrorWrapper(event),
|
||||
CallTimer('socketio', 'event_duration', { event }),
|
||||
RawSubscribeMessage(event)
|
||||
);
|
||||
|
||||
type EventResponse<Data = any> = Data extends never
|
||||
? {
|
||||
data?: never;
|
||||
}
|
||||
: {
|
||||
data: Data;
|
||||
};
|
||||
|
||||
type RoomType = 'sync' | `${string}:awareness`;
|
||||
|
||||
function Room(
|
||||
spaceId: string,
|
||||
type: RoomType = 'sync'
|
||||
): `${string}:${RoomType}` {
|
||||
return `${spaceId}:${type}`;
|
||||
}
|
||||
|
||||
enum SpaceType {
|
||||
Workspace = 'workspace',
|
||||
Userspace = 'userspace',
|
||||
}
|
||||
|
||||
interface JoinSpaceMessage {
|
||||
spaceType: SpaceType;
|
||||
spaceId: string;
|
||||
clientVersion: string;
|
||||
}
|
||||
|
||||
interface JoinSpaceAwarenessMessage {
|
||||
spaceType: SpaceType;
|
||||
spaceId: string;
|
||||
docId: string;
|
||||
clientVersion: string;
|
||||
}
|
||||
|
||||
interface LeaveSpaceMessage {
|
||||
spaceType: SpaceType;
|
||||
spaceId: string;
|
||||
}
|
||||
|
||||
interface LeaveSpaceAwarenessMessage {
|
||||
spaceType: SpaceType;
|
||||
spaceId: string;
|
||||
docId: string;
|
||||
}
|
||||
|
||||
interface PushDocUpdatesMessage {
|
||||
spaceType: SpaceType;
|
||||
spaceId: string;
|
||||
docId: string;
|
||||
updates: string[];
|
||||
}
|
||||
|
||||
interface LoadDocMessage {
|
||||
spaceType: SpaceType;
|
||||
spaceId: string;
|
||||
docId: string;
|
||||
stateVector?: string;
|
||||
}
|
||||
|
||||
interface LoadDocTimestampsMessage {
|
||||
spaceType: SpaceType;
|
||||
spaceId: string;
|
||||
timestamp?: number;
|
||||
}
|
||||
|
||||
interface LoadSpaceAwarenessesMessage {
|
||||
spaceType: SpaceType;
|
||||
spaceId: string;
|
||||
docId: string;
|
||||
}
|
||||
interface UpdateAwarenessMessage {
|
||||
spaceType: SpaceType;
|
||||
spaceId: string;
|
||||
docId: string;
|
||||
awarenessUpdate: string;
|
||||
}
|
||||
@WebSocketGateway()
|
||||
export class SpaceSyncGateway
|
||||
implements OnGatewayConnection, OnGatewayDisconnect
|
||||
{
|
||||
protected logger = new Logger(SpaceSyncGateway.name);
|
||||
|
||||
private connectionCount = 0;
|
||||
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly permissions: PermissionService,
|
||||
private readonly workspace: PgWorkspaceDocStorageAdapter,
|
||||
private readonly userspace: PgUserspaceDocStorageAdapter
|
||||
) {}
|
||||
|
||||
handleConnection() {
|
||||
this.connectionCount++;
|
||||
metrics.socketio.gauge('realtime_connections').record(this.connectionCount);
|
||||
}
|
||||
|
||||
handleDisconnect() {
|
||||
this.connectionCount--;
|
||||
metrics.socketio.gauge('realtime_connections').record(this.connectionCount);
|
||||
}
|
||||
|
||||
selectAdapter(client: Socket, spaceType: SpaceType): SyncSocketAdapter {
|
||||
let adapters: Record<SpaceType, SyncSocketAdapter> = (client as any)
|
||||
.affineSyncAdapters;
|
||||
|
||||
if (!adapters) {
|
||||
const workspace = new WorkspaceSyncAdapter(
|
||||
client,
|
||||
this.workspace,
|
||||
this.permissions
|
||||
);
|
||||
const userspace = new UserspaceSyncAdapter(client, this.userspace);
|
||||
|
||||
adapters = { workspace, userspace };
|
||||
(client as any).affineSyncAdapters = adapters;
|
||||
}
|
||||
|
||||
return adapters[spaceType];
|
||||
}
|
||||
|
||||
async assertVersion(client: Socket, version?: string) {
|
||||
const shouldCheckClientVersion = await this.config.runtime.fetch(
|
||||
'flags/syncClientVersionCheck'
|
||||
);
|
||||
if (
|
||||
// @todo(@darkskygit): remove this flag after 0.12 goes stable
|
||||
shouldCheckClientVersion &&
|
||||
version !== AFFiNE.version
|
||||
) {
|
||||
client.emit('server-version-rejected', {
|
||||
currentVersion: version,
|
||||
requiredVersion: AFFiNE.version,
|
||||
reason: `Client version${
|
||||
version ? ` ${version}` : ''
|
||||
} is outdated, please update to ${AFFiNE.version}`,
|
||||
});
|
||||
|
||||
throw new VersionRejected({
|
||||
version: version || 'unknown',
|
||||
serverVersion: AFFiNE.version,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async joinWorkspace(
|
||||
client: Socket,
|
||||
room: `${string}:${'sync' | 'awareness'}`
|
||||
) {
|
||||
await client.join(room);
|
||||
}
|
||||
|
||||
async leaveWorkspace(
|
||||
client: Socket,
|
||||
room: `${string}:${'sync' | 'awareness'}`
|
||||
) {
|
||||
await client.leave(room);
|
||||
}
|
||||
|
||||
assertInWorkspace(client: Socket, room: `${string}:${'sync' | 'awareness'}`) {
|
||||
if (!client.rooms.has(room)) {
|
||||
throw new NotInSpace({ spaceId: room.split(':')[0] });
|
||||
}
|
||||
}
|
||||
|
||||
// v3
|
||||
@SubscribeMessage('space:join')
|
||||
async onJoinSpace(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody()
|
||||
{ spaceType, spaceId, clientVersion }: JoinSpaceMessage
|
||||
): Promise<EventResponse<{ clientId: string; success: true }>> {
|
||||
await this.assertVersion(client, clientVersion);
|
||||
|
||||
await this.selectAdapter(client, spaceType).join(user.id, spaceId);
|
||||
|
||||
return { data: { clientId: client.id, success: true } };
|
||||
}
|
||||
|
||||
@SubscribeMessage('space:leave')
|
||||
async onLeaveSpace(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody() { spaceType, spaceId }: LeaveSpaceMessage
|
||||
): Promise<EventResponse<{ clientId: string; success: true }>> {
|
||||
await this.selectAdapter(client, spaceType).leave(spaceId);
|
||||
|
||||
return { data: { clientId: client.id, success: true } };
|
||||
}
|
||||
|
||||
@SubscribeMessage('space:load-doc')
|
||||
async onLoadSpaceDoc(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody()
|
||||
{ spaceType, spaceId, docId, stateVector }: LoadDocMessage
|
||||
): Promise<
|
||||
EventResponse<{ missing: string; state?: string; timestamp: number }>
|
||||
> {
|
||||
const adapter = this.selectAdapter(client, spaceType);
|
||||
adapter.assertIn(spaceId);
|
||||
|
||||
const doc = await adapter.get(spaceId, docId);
|
||||
|
||||
if (!doc) {
|
||||
throw new DocNotFound({ spaceId, docId });
|
||||
}
|
||||
|
||||
const missing = Buffer.from(
|
||||
stateVector
|
||||
? diffUpdate(doc.bin, Buffer.from(stateVector, 'base64'))
|
||||
: doc.bin
|
||||
).toString('base64');
|
||||
|
||||
const state = Buffer.from(encodeStateVectorFromUpdate(doc.bin)).toString(
|
||||
'base64'
|
||||
);
|
||||
|
||||
return {
|
||||
data: {
|
||||
missing,
|
||||
state,
|
||||
timestamp: doc.timestamp,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@SubscribeMessage('space:push-doc-updates')
|
||||
async onReceiveDocUpdates(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@MessageBody()
|
||||
message: PushDocUpdatesMessage
|
||||
): Promise<EventResponse<{ accepted: true; timestamp?: number }>> {
|
||||
const { spaceType, spaceId, docId, updates } = message;
|
||||
const adapter = this.selectAdapter(client, spaceType);
|
||||
|
||||
// TODO(@forehalo): we might need to check write permission before push updates
|
||||
const timestamp = await adapter.push(
|
||||
spaceId,
|
||||
docId,
|
||||
updates.map(update => Buffer.from(update, 'base64')),
|
||||
user.id
|
||||
);
|
||||
|
||||
// could be put in [adapter.push]
|
||||
// but the event should be kept away from adapter
|
||||
// so
|
||||
client
|
||||
.to(adapter.room(spaceId))
|
||||
.emit('space:broadcast-doc-updates', { ...message, timestamp });
|
||||
|
||||
// TODO(@forehalo): remove backward compatibility
|
||||
if (spaceType === SpaceType.Workspace) {
|
||||
const id = new DocID(docId, spaceId);
|
||||
client.to(adapter.room(spaceId)).emit('server-updates', {
|
||||
workspaceId: spaceId,
|
||||
guid: id.guid,
|
||||
updates,
|
||||
timestamp,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
data: {
|
||||
accepted: true,
|
||||
timestamp,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@SubscribeMessage('space:load-doc-timestamps')
|
||||
async onLoadDocTimestamps(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody()
|
||||
{ spaceType, spaceId, timestamp }: LoadDocTimestampsMessage
|
||||
): Promise<EventResponse<Record<string, number>>> {
|
||||
const adapter = this.selectAdapter(client, spaceType);
|
||||
|
||||
const stats = await adapter.getTimestamps(spaceId, timestamp);
|
||||
|
||||
return {
|
||||
data: stats ?? {},
|
||||
};
|
||||
}
|
||||
|
||||
@SubscribeMessage('space:join-awareness')
|
||||
async onJoinAwareness(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@MessageBody()
|
||||
{ spaceType, spaceId, docId, clientVersion }: JoinSpaceAwarenessMessage
|
||||
) {
|
||||
await this.assertVersion(client, clientVersion);
|
||||
|
||||
await this.selectAdapter(client, spaceType).join(
|
||||
user.id,
|
||||
spaceId,
|
||||
`${docId}:awareness`
|
||||
);
|
||||
|
||||
return { data: { clientId: client.id, success: true } };
|
||||
}
|
||||
|
||||
@SubscribeMessage('space:leave-awareness')
|
||||
async onLeaveAwareness(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody()
|
||||
{ spaceType, spaceId, docId }: LeaveSpaceAwarenessMessage
|
||||
) {
|
||||
await this.selectAdapter(client, spaceType).leave(
|
||||
spaceId,
|
||||
`${docId}:awareness`
|
||||
);
|
||||
|
||||
return { data: { clientId: client.id, success: true } };
|
||||
}
|
||||
|
||||
@SubscribeMessage('space:load-awarenesses')
|
||||
async onLoadAwareness(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody()
|
||||
{ spaceType, spaceId, docId }: LoadSpaceAwarenessesMessage
|
||||
) {
|
||||
const adapter = this.selectAdapter(client, spaceType);
|
||||
|
||||
const roomType = `${docId}:awareness` as const;
|
||||
adapter.assertIn(spaceId, roomType);
|
||||
client
|
||||
.to(adapter.room(spaceId, roomType))
|
||||
.emit('space:collect-awareness', { spaceType, spaceId, docId });
|
||||
|
||||
// TODO(@forehalo): remove backward compatibility
|
||||
if (spaceType === SpaceType.Workspace) {
|
||||
client
|
||||
.to(adapter.room(spaceId, roomType))
|
||||
.emit('new-client-awareness-init');
|
||||
}
|
||||
|
||||
return { data: { clientId: client.id } };
|
||||
}
|
||||
|
||||
@SubscribeMessage('space:update-awareness')
|
||||
async onUpdateAwareness(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody() message: UpdateAwarenessMessage
|
||||
) {
|
||||
const { spaceType, spaceId, docId } = message;
|
||||
const adapter = this.selectAdapter(client, spaceType);
|
||||
|
||||
const roomType = `${docId}:awareness` as const;
|
||||
adapter.assertIn(spaceId, roomType);
|
||||
client
|
||||
.to(adapter.room(spaceId, roomType))
|
||||
.emit('space:broadcast-awareness-update', message);
|
||||
|
||||
// TODO(@forehalo): remove backward compatibility
|
||||
if (spaceType === SpaceType.Workspace) {
|
||||
client
|
||||
.to(adapter.room(spaceId, roomType))
|
||||
.emit('server-awareness-broadcast', {
|
||||
workspaceId: spaceId,
|
||||
awarenessUpdate: message.awarenessUpdate,
|
||||
});
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
// TODO(@forehalo): remove
|
||||
// deprecated section
|
||||
@SubscribeMessage('client-handshake-sync')
|
||||
async handleClientHandshakeSync(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@MessageBody('workspaceId') workspaceId: string,
|
||||
@MessageBody('version') version: string,
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse<{ clientId: string }>> {
|
||||
await this.assertVersion(client, version);
|
||||
|
||||
return this.onJoinSpace(user, client, {
|
||||
spaceType: SpaceType.Workspace,
|
||||
spaceId: workspaceId,
|
||||
clientVersion: version,
|
||||
});
|
||||
}
|
||||
|
||||
@SubscribeMessage('client-leave-sync')
|
||||
async handleLeaveSync(
|
||||
@MessageBody() workspaceId: string,
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse> {
|
||||
return this.onLeaveSpace(client, {
|
||||
spaceType: SpaceType.Workspace,
|
||||
spaceId: workspaceId,
|
||||
});
|
||||
}
|
||||
|
||||
@SubscribeMessage('client-pre-sync')
|
||||
async loadDocStats(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody()
|
||||
{ workspaceId, timestamp }: { workspaceId: string; timestamp?: number }
|
||||
): Promise<EventResponse<Record<string, number>>> {
|
||||
return this.onLoadDocTimestamps(client, {
|
||||
spaceType: SpaceType.Workspace,
|
||||
spaceId: workspaceId,
|
||||
timestamp,
|
||||
});
|
||||
}
|
||||
|
||||
@SubscribeMessage('client-update-v2')
|
||||
async handleClientUpdateV2(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@MessageBody()
|
||||
{
|
||||
workspaceId,
|
||||
guid,
|
||||
updates,
|
||||
}: {
|
||||
workspaceId: string;
|
||||
guid: string;
|
||||
updates: string[];
|
||||
},
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse<{ accepted: true; timestamp?: number }>> {
|
||||
return this.onReceiveDocUpdates(client, user, {
|
||||
spaceType: SpaceType.Workspace,
|
||||
spaceId: workspaceId,
|
||||
docId: guid,
|
||||
updates,
|
||||
});
|
||||
}
|
||||
|
||||
@SubscribeMessage('doc-load-v2')
|
||||
async loadDocV2(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody()
|
||||
{
|
||||
workspaceId,
|
||||
guid,
|
||||
stateVector,
|
||||
}: {
|
||||
workspaceId: string;
|
||||
guid: string;
|
||||
stateVector?: string;
|
||||
}
|
||||
): Promise<
|
||||
EventResponse<{ missing: string; state?: string; timestamp: number }>
|
||||
> {
|
||||
return this.onLoadSpaceDoc(client, {
|
||||
spaceType: SpaceType.Workspace,
|
||||
spaceId: workspaceId,
|
||||
docId: guid,
|
||||
stateVector,
|
||||
});
|
||||
}
|
||||
|
||||
@SubscribeMessage('client-handshake-awareness')
|
||||
async handleClientHandshakeAwareness(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@MessageBody('workspaceId') workspaceId: string,
|
||||
@MessageBody('version') version: string
|
||||
): Promise<EventResponse<{ clientId: string }>> {
|
||||
return this.onJoinAwareness(client, user, {
|
||||
spaceType: SpaceType.Workspace,
|
||||
spaceId: workspaceId,
|
||||
docId: workspaceId,
|
||||
clientVersion: version,
|
||||
});
|
||||
}
|
||||
|
||||
@SubscribeMessage('client-leave-awareness')
|
||||
async handleLeaveAwareness(
|
||||
@MessageBody() workspaceId: string,
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse> {
|
||||
return this.onLeaveAwareness(client, {
|
||||
spaceType: SpaceType.Workspace,
|
||||
spaceId: workspaceId,
|
||||
docId: workspaceId,
|
||||
});
|
||||
}
|
||||
|
||||
@SubscribeMessage('awareness-init')
|
||||
async handleInitAwareness(
|
||||
@MessageBody() workspaceId: string,
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse<{ clientId: string }>> {
|
||||
return this.onLoadAwareness(client, {
|
||||
spaceType: SpaceType.Workspace,
|
||||
spaceId: workspaceId,
|
||||
docId: workspaceId,
|
||||
});
|
||||
}
|
||||
|
||||
@SubscribeMessage('awareness-update')
|
||||
async handleHelpGatheringAwareness(
|
||||
@MessageBody()
|
||||
{
|
||||
workspaceId,
|
||||
awarenessUpdate,
|
||||
}: { workspaceId: string; awarenessUpdate: string },
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse> {
|
||||
return this.onUpdateAwareness(client, {
|
||||
spaceType: SpaceType.Workspace,
|
||||
spaceId: workspaceId,
|
||||
docId: workspaceId,
|
||||
awarenessUpdate,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
abstract class SyncSocketAdapter {
|
||||
constructor(
|
||||
private readonly spaceType: SpaceType,
|
||||
public readonly client: Socket,
|
||||
public readonly storage: DocStorageAdapter
|
||||
) {}
|
||||
|
||||
room(spaceId: string, roomType: RoomType = 'sync') {
|
||||
return `${this.spaceType}:${Room(spaceId, roomType)}`;
|
||||
}
|
||||
|
||||
async join(userId: string, spaceId: string, roomType: RoomType = 'sync') {
|
||||
this.assertNotIn(spaceId, roomType);
|
||||
await this.assertAccessible(spaceId, userId, Permission.Read);
|
||||
return this.client.join(this.room(spaceId, roomType));
|
||||
}
|
||||
|
||||
async leave(spaceId: string, roomType: RoomType = 'sync') {
|
||||
this.assertIn(spaceId, roomType);
|
||||
return this.client.leave(this.room(spaceId, roomType));
|
||||
}
|
||||
|
||||
in(spaceId: string, roomType: RoomType = 'sync') {
|
||||
return this.client.rooms.has(this.room(spaceId, roomType));
|
||||
}
|
||||
|
||||
assertNotIn(spaceId: string, roomType: RoomType = 'sync') {
|
||||
if (this.client.rooms.has(this.room(spaceId, roomType))) {
|
||||
throw new AlreadyInSpace({ spaceId });
|
||||
}
|
||||
}
|
||||
|
||||
assertIn(spaceId: string, roomType: RoomType = 'sync') {
|
||||
if (!this.client.rooms.has(this.room(spaceId, roomType))) {
|
||||
throw new NotInSpace({ spaceId });
|
||||
}
|
||||
}
|
||||
|
||||
abstract assertAccessible(
|
||||
spaceId: string,
|
||||
userId: string,
|
||||
permission?: Permission
|
||||
): Promise<void>;
|
||||
|
||||
push(spaceId: string, docId: string, updates: Buffer[], editorId: string) {
|
||||
this.assertIn(spaceId);
|
||||
return this.storage.pushDocUpdates(spaceId, docId, updates, editorId);
|
||||
}
|
||||
|
||||
get(spaceId: string, docId: string) {
|
||||
this.assertIn(spaceId);
|
||||
return this.storage.getDoc(spaceId, docId);
|
||||
}
|
||||
|
||||
getTimestamps(spaceId: string, timestamp?: number) {
|
||||
this.assertIn(spaceId);
|
||||
return this.storage.getSpaceDocTimestamps(spaceId, timestamp);
|
||||
}
|
||||
}
|
||||
|
||||
class WorkspaceSyncAdapter extends SyncSocketAdapter {
|
||||
constructor(
|
||||
client: Socket,
|
||||
storage: DocStorageAdapter,
|
||||
private readonly permission: PermissionService
|
||||
) {
|
||||
super(SpaceType.Workspace, client, storage);
|
||||
}
|
||||
|
||||
override push(
|
||||
spaceId: string,
|
||||
docId: string,
|
||||
updates: Buffer[],
|
||||
editorId: string
|
||||
) {
|
||||
const id = new DocID(docId, spaceId);
|
||||
return super.push(spaceId, id.guid, updates, editorId);
|
||||
}
|
||||
|
||||
override get(spaceId: string, docId: string) {
|
||||
const id = new DocID(docId, spaceId);
|
||||
return this.storage.getDoc(spaceId, id.guid);
|
||||
}
|
||||
|
||||
async assertAccessible(
|
||||
spaceId: string,
|
||||
userId: string,
|
||||
permission: Permission = Permission.Read
|
||||
) {
|
||||
if (
|
||||
!(await this.permission.isWorkspaceMember(spaceId, userId, permission))
|
||||
) {
|
||||
throw new SpaceAccessDenied({ spaceId });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class UserspaceSyncAdapter extends SyncSocketAdapter {
|
||||
constructor(client: Socket, storage: DocStorageAdapter) {
|
||||
super(SpaceType.Userspace, client, storage);
|
||||
}
|
||||
|
||||
async assertAccessible(
|
||||
spaceId: string,
|
||||
userId: string,
|
||||
_permission: Permission = Permission.Read
|
||||
) {
|
||||
if (spaceId !== userId) {
|
||||
throw new SpaceAccessDenied({ spaceId });
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,11 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { EventsModule } from './events/events.module';
|
||||
import { DocStorageModule } from '../doc';
|
||||
import { PermissionModule } from '../permission';
|
||||
import { SpaceSyncGateway } from './gateway';
|
||||
|
||||
@Module({
|
||||
imports: [EventsModule],
|
||||
imports: [DocStorageModule, PermissionModule],
|
||||
providers: [SpaceSyncGateway],
|
||||
})
|
||||
export class SyncModule {}
|
||||
|
||||
@@ -13,21 +13,21 @@ import GraphQLUpload from 'graphql-upload/GraphQLUpload.mjs';
|
||||
import { isNil, omitBy } from 'lodash-es';
|
||||
|
||||
import {
|
||||
Config,
|
||||
CryptoHelper,
|
||||
CannotDeleteOwnAccount,
|
||||
type FileUpload,
|
||||
Throttle,
|
||||
UserNotFound,
|
||||
} from '../../fundamentals';
|
||||
import { CurrentUser } from '../auth/current-user';
|
||||
import { Public } from '../auth/guard';
|
||||
import { sessionUser } from '../auth/service';
|
||||
import { CurrentUser } from '../auth/session';
|
||||
import { Admin } from '../common';
|
||||
import { AvatarStorage } from '../storage';
|
||||
import { validators } from '../utils/validators';
|
||||
import { UserService } from './service';
|
||||
import {
|
||||
DeleteAccount,
|
||||
ManageUserInput,
|
||||
RemoveAvatar,
|
||||
UpdateUserInput,
|
||||
UserOrLimitedUser,
|
||||
@@ -167,9 +167,6 @@ class CreateUserInput {
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
name!: string | null;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
password!: string | null;
|
||||
}
|
||||
|
||||
@Admin()
|
||||
@@ -177,11 +174,16 @@ class CreateUserInput {
|
||||
export class UserManagementResolver {
|
||||
constructor(
|
||||
private readonly db: PrismaClient,
|
||||
private readonly user: UserService,
|
||||
private readonly crypto: CryptoHelper,
|
||||
private readonly config: Config
|
||||
private readonly user: UserService
|
||||
) {}
|
||||
|
||||
@Query(() => Int, {
|
||||
description: 'Get users count',
|
||||
})
|
||||
async usersCount(): Promise<number> {
|
||||
return this.db.user.count();
|
||||
}
|
||||
|
||||
@Query(() => [UserType], {
|
||||
description: 'List registered users',
|
||||
})
|
||||
@@ -216,28 +218,34 @@ export class UserManagementResolver {
|
||||
return sessionUser(user);
|
||||
}
|
||||
|
||||
@Query(() => UserType, {
|
||||
name: 'userByEmail',
|
||||
description: 'Get user by email for admin',
|
||||
nullable: true,
|
||||
})
|
||||
async getUserByEmail(@Args('email') email: string) {
|
||||
const user = await this.db.user.findUnique({
|
||||
select: { ...this.user.defaultUserSelect, password: true },
|
||||
where: {
|
||||
email,
|
||||
},
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return sessionUser(user);
|
||||
}
|
||||
|
||||
@Mutation(() => UserType, {
|
||||
description: 'Create a new user',
|
||||
})
|
||||
async createUser(
|
||||
@Args({ name: 'input', type: () => CreateUserInput }) input: CreateUserInput
|
||||
) {
|
||||
validators.assertValidEmail(input.email);
|
||||
if (input.password) {
|
||||
const config = await this.config.runtime.fetchAll({
|
||||
'auth/password.max': true,
|
||||
'auth/password.min': true,
|
||||
});
|
||||
validators.assertValidPassword(input.password, {
|
||||
max: config['auth/password.max'],
|
||||
min: config['auth/password.min'],
|
||||
});
|
||||
}
|
||||
|
||||
const { id } = await this.user.createAnonymousUser(input.email, {
|
||||
password: input.password
|
||||
? await this.crypto.encryptPassword(input.password)
|
||||
: undefined,
|
||||
const { id } = await this.user.createUser({
|
||||
email: input.email,
|
||||
registered: true,
|
||||
});
|
||||
|
||||
@@ -248,8 +256,42 @@ export class UserManagementResolver {
|
||||
@Mutation(() => DeleteAccount, {
|
||||
description: 'Delete a user account',
|
||||
})
|
||||
async deleteUser(@Args('id') id: string): Promise<DeleteAccount> {
|
||||
async deleteUser(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args('id') id: string
|
||||
): Promise<DeleteAccount> {
|
||||
if (user.id === id) {
|
||||
throw new CannotDeleteOwnAccount();
|
||||
}
|
||||
await this.user.deleteUser(id);
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
@Mutation(() => UserType, {
|
||||
description: 'Update a user',
|
||||
})
|
||||
async updateUser(
|
||||
@Args('id') id: string,
|
||||
@Args('input') input: ManageUserInput
|
||||
): Promise<UserType> {
|
||||
const user = await this.db.user.findUnique({
|
||||
where: { id },
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
throw new UserNotFound();
|
||||
}
|
||||
|
||||
input = omitBy(input, isNil);
|
||||
if (Object.keys(input).length === 0) {
|
||||
return sessionUser(user);
|
||||
}
|
||||
|
||||
return sessionUser(
|
||||
await this.user.updateUser(user.id, {
|
||||
email: input.email,
|
||||
name: input.name,
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,12 +3,18 @@ import { Prisma, PrismaClient } from '@prisma/client';
|
||||
|
||||
import {
|
||||
Config,
|
||||
CryptoHelper,
|
||||
EmailAlreadyUsed,
|
||||
EventEmitter,
|
||||
type EventPayload,
|
||||
OnEvent,
|
||||
WrongSignInCredentials,
|
||||
WrongSignInMethod,
|
||||
} from '../../fundamentals';
|
||||
import { Quota_FreePlanV1_1 } from '../quota/schema';
|
||||
import { validators } from '../utils/validators';
|
||||
|
||||
type CreateUserInput = Omit<Prisma.UserCreateInput, 'name'> & { name?: string };
|
||||
|
||||
@Injectable()
|
||||
export class UserService {
|
||||
@@ -26,6 +32,7 @@ export class UserService {
|
||||
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly crypto: CryptoHelper,
|
||||
private readonly prisma: PrismaClient,
|
||||
private readonly emitter: EventEmitter
|
||||
) {}
|
||||
@@ -35,7 +42,7 @@ export class UserService {
|
||||
name: 'Unnamed',
|
||||
features: {
|
||||
create: {
|
||||
reason: 'created by invite sign up',
|
||||
reason: 'sign up',
|
||||
activated: true,
|
||||
feature: {
|
||||
connect: {
|
||||
@@ -47,7 +54,38 @@ export class UserService {
|
||||
};
|
||||
}
|
||||
|
||||
async createUser(data: Prisma.UserCreateInput) {
|
||||
async createUser(data: CreateUserInput) {
|
||||
validators.assertValidEmail(data.email);
|
||||
|
||||
if (data.password) {
|
||||
const config = await this.config.runtime.fetchAll({
|
||||
'auth/password.max': true,
|
||||
'auth/password.min': true,
|
||||
});
|
||||
validators.assertValidPassword(data.password, {
|
||||
max: config['auth/password.max'],
|
||||
min: config['auth/password.min'],
|
||||
});
|
||||
}
|
||||
|
||||
return this.createUser_without_verification(data);
|
||||
}
|
||||
|
||||
async createUser_without_verification(data: CreateUserInput) {
|
||||
const user = await this.findUserByEmail(data.email);
|
||||
|
||||
if (user) {
|
||||
throw new EmailAlreadyUsed();
|
||||
}
|
||||
|
||||
if (data.password) {
|
||||
data.password = await this.crypto.encryptPassword(data.password);
|
||||
}
|
||||
|
||||
if (!data.name) {
|
||||
data.name = data.email.split('@')[0];
|
||||
}
|
||||
|
||||
return this.prisma.user.create({
|
||||
select: this.defaultUserSelect,
|
||||
data: {
|
||||
@@ -57,23 +95,6 @@ export class UserService {
|
||||
});
|
||||
}
|
||||
|
||||
async createAnonymousUser(
|
||||
email: string,
|
||||
data?: Partial<Prisma.UserCreateInput>
|
||||
) {
|
||||
const user = await this.findUserByEmail(email);
|
||||
|
||||
if (user) {
|
||||
throw new EmailAlreadyUsed();
|
||||
}
|
||||
|
||||
return this.createUser({
|
||||
email,
|
||||
name: email.split('@')[0],
|
||||
...data,
|
||||
});
|
||||
}
|
||||
|
||||
async findUserById(id: string) {
|
||||
return this.prisma.user
|
||||
.findUnique({
|
||||
@@ -86,6 +107,7 @@ export class UserService {
|
||||
}
|
||||
|
||||
async findUserByEmail(email: string) {
|
||||
validators.assertValidEmail(email);
|
||||
return this.prisma.user.findFirst({
|
||||
where: {
|
||||
email: {
|
||||
@@ -101,6 +123,7 @@ export class UserService {
|
||||
* supposed to be used only for `Credential SignIn`
|
||||
*/
|
||||
async findUserWithHashedPasswordByEmail(email: string) {
|
||||
validators.assertValidEmail(email);
|
||||
return this.prisma.user.findFirst({
|
||||
where: {
|
||||
email: {
|
||||
@@ -111,22 +134,32 @@ export class UserService {
|
||||
});
|
||||
}
|
||||
|
||||
async findOrCreateUser(
|
||||
email: string,
|
||||
data?: Partial<Prisma.UserCreateInput>
|
||||
) {
|
||||
const user = await this.findUserByEmail(email);
|
||||
if (user) {
|
||||
return user;
|
||||
async signIn(email: string, password: string) {
|
||||
const user = await this.findUserWithHashedPasswordByEmail(email);
|
||||
|
||||
if (!user) {
|
||||
throw new WrongSignInCredentials();
|
||||
}
|
||||
return this.createAnonymousUser(email, data);
|
||||
|
||||
if (!user.password) {
|
||||
throw new WrongSignInMethod();
|
||||
}
|
||||
|
||||
const passwordMatches = await this.crypto.verifyPassword(
|
||||
password,
|
||||
user.password
|
||||
);
|
||||
|
||||
if (!passwordMatches) {
|
||||
throw new WrongSignInCredentials();
|
||||
}
|
||||
|
||||
return user;
|
||||
}
|
||||
|
||||
async fulfillUser(
|
||||
email: string,
|
||||
data: Partial<
|
||||
Pick<Prisma.UserCreateInput, 'emailVerifiedAt' | 'registered'>
|
||||
>
|
||||
data: Omit<Partial<Prisma.UserCreateInput>, 'id'>
|
||||
) {
|
||||
const user = await this.findUserByEmail(email);
|
||||
if (!user) {
|
||||
@@ -146,7 +179,6 @@ export class UserService {
|
||||
|
||||
if (Object.keys(data).length) {
|
||||
return await this.prisma.user.update({
|
||||
select: this.defaultUserSelect,
|
||||
where: { id: user.id },
|
||||
data,
|
||||
});
|
||||
@@ -160,9 +192,38 @@ export class UserService {
|
||||
|
||||
async updateUser(
|
||||
id: string,
|
||||
data: Prisma.UserUpdateInput,
|
||||
data: Omit<Partial<Prisma.UserCreateInput>, 'id'>,
|
||||
select: Prisma.UserSelect = this.defaultUserSelect
|
||||
) {
|
||||
if (data.password) {
|
||||
const config = await this.config.runtime.fetchAll({
|
||||
'auth/password.max': true,
|
||||
'auth/password.min': true,
|
||||
});
|
||||
validators.assertValidPassword(data.password, {
|
||||
max: config['auth/password.max'],
|
||||
min: config['auth/password.min'],
|
||||
});
|
||||
|
||||
data.password = await this.crypto.encryptPassword(data.password);
|
||||
}
|
||||
|
||||
if (data.email) {
|
||||
validators.assertValidEmail(data.email);
|
||||
const emailTaken = await this.prisma.user.count({
|
||||
where: {
|
||||
email: data.email,
|
||||
id: {
|
||||
not: id,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (emailTaken) {
|
||||
throw new EmailAlreadyUsed();
|
||||
}
|
||||
}
|
||||
|
||||
const user = await this.prisma.user.update({ where: { id }, data, select });
|
||||
|
||||
this.emitter.emit('user.updated', user);
|
||||
|
||||
@@ -7,7 +7,8 @@ import {
|
||||
} from '@nestjs/graphql';
|
||||
import type { User } from '@prisma/client';
|
||||
|
||||
import { CurrentUser } from '../auth/current-user';
|
||||
import type { Payload } from '../../fundamentals/event/def';
|
||||
import { type CurrentUser } from '../auth/session';
|
||||
|
||||
@ObjectType()
|
||||
export class UserType implements CurrentUser {
|
||||
@@ -81,3 +82,20 @@ export class UpdateUserInput implements Partial<User> {
|
||||
@Field({ description: 'User name', nullable: true })
|
||||
name?: string;
|
||||
}
|
||||
|
||||
@InputType()
|
||||
export class ManageUserInput {
|
||||
@Field({ description: 'User email', nullable: true })
|
||||
email?: string;
|
||||
|
||||
@Field({ description: 'User name', nullable: true })
|
||||
name?: string;
|
||||
}
|
||||
|
||||
declare module '../../fundamentals/event/def' {
|
||||
interface UserEvents {
|
||||
admin: {
|
||||
created: Payload<{ id: string }>;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ export class DocID {
|
||||
static parse(raw: string): DocID | null {
|
||||
try {
|
||||
return new DocID(raw);
|
||||
} catch (e) {
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,11 +12,10 @@ import {
|
||||
InvalidHistoryTimestamp,
|
||||
} from '../../fundamentals';
|
||||
import { CurrentUser, Public } from '../auth';
|
||||
import { DocHistoryManager, DocManager } from '../doc';
|
||||
import { PgWorkspaceDocStorageAdapter } from '../doc';
|
||||
import { Permission, PermissionService, PublicPageMode } from '../permission';
|
||||
import { WorkspaceBlobStorage } from '../storage';
|
||||
import { DocID } from '../utils/doc';
|
||||
import { PermissionService, PublicPageMode } from './permission';
|
||||
import { Permission } from './types';
|
||||
|
||||
@Controller('/api/workspaces')
|
||||
export class WorkspacesController {
|
||||
@@ -24,8 +23,7 @@ export class WorkspacesController {
|
||||
constructor(
|
||||
private readonly storage: WorkspaceBlobStorage,
|
||||
private readonly permission: PermissionService,
|
||||
private readonly docManager: DocManager,
|
||||
private readonly historyManager: DocHistoryManager,
|
||||
private readonly workspace: PgWorkspaceDocStorageAdapter,
|
||||
private readonly prisma: PrismaClient
|
||||
) {}
|
||||
|
||||
@@ -57,7 +55,7 @@ export class WorkspacesController {
|
||||
|
||||
if (!body) {
|
||||
throw new BlobNotFound({
|
||||
workspaceId,
|
||||
spaceId: workspaceId,
|
||||
blobId: name,
|
||||
});
|
||||
}
|
||||
@@ -97,14 +95,14 @@ export class WorkspacesController {
|
||||
throw new AccessDenied();
|
||||
}
|
||||
|
||||
const binResponse = await this.docManager.getBinary(
|
||||
const binResponse = await this.workspace.getDoc(
|
||||
docId.workspace,
|
||||
docId.guid
|
||||
);
|
||||
|
||||
if (!binResponse) {
|
||||
throw new DocNotFound({
|
||||
workspaceId: docId.workspace,
|
||||
spaceId: docId.workspace,
|
||||
docId: docId.guid,
|
||||
});
|
||||
}
|
||||
@@ -126,7 +124,7 @@ export class WorkspacesController {
|
||||
}
|
||||
|
||||
res.setHeader('content-type', 'application/octet-stream');
|
||||
res.send(binResponse.binary);
|
||||
res.send(binResponse.bin);
|
||||
}
|
||||
|
||||
@Get('/:id/docs/:guid/histories/:timestamp')
|
||||
@@ -142,7 +140,7 @@ export class WorkspacesController {
|
||||
let ts;
|
||||
try {
|
||||
ts = new Date(timestamp);
|
||||
} catch (e) {
|
||||
} catch {
|
||||
throw new InvalidHistoryTimestamp({ timestamp });
|
||||
}
|
||||
|
||||
@@ -153,19 +151,19 @@ export class WorkspacesController {
|
||||
Permission.Write
|
||||
);
|
||||
|
||||
const history = await this.historyManager.get(
|
||||
const history = await this.workspace.getDocHistory(
|
||||
docId.workspace,
|
||||
docId.guid,
|
||||
ts
|
||||
ts.getTime()
|
||||
);
|
||||
|
||||
if (history) {
|
||||
res.setHeader('content-type', 'application/octet-stream');
|
||||
res.setHeader('cache-control', 'private, max-age=2592000, immutable');
|
||||
res.send(history.blob);
|
||||
res.send(history.bin);
|
||||
} else {
|
||||
throw new DocHistoryNotFound({
|
||||
workspaceId: docId.workspace,
|
||||
spaceId: docId.workspace,
|
||||
docId: guid,
|
||||
timestamp: ts.getTime(),
|
||||
});
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { DocModule } from '../doc';
|
||||
import { DocStorageModule } from '../doc';
|
||||
import { FeatureModule } from '../features';
|
||||
import { PermissionModule } from '../permission';
|
||||
import { QuotaModule } from '../quota';
|
||||
import { StorageModule } from '../storage';
|
||||
import { UserModule } from '../user';
|
||||
import { WorkspacesController } from './controller';
|
||||
import { WorkspaceManagementResolver } from './management';
|
||||
import { PermissionService } from './permission';
|
||||
import {
|
||||
DocHistoryResolver,
|
||||
PagePermissionResolver,
|
||||
@@ -16,17 +16,22 @@ import {
|
||||
} from './resolvers';
|
||||
|
||||
@Module({
|
||||
imports: [DocModule, FeatureModule, QuotaModule, StorageModule, UserModule],
|
||||
imports: [
|
||||
DocStorageModule,
|
||||
FeatureModule,
|
||||
QuotaModule,
|
||||
StorageModule,
|
||||
UserModule,
|
||||
PermissionModule,
|
||||
],
|
||||
controllers: [WorkspacesController],
|
||||
providers: [
|
||||
WorkspaceResolver,
|
||||
WorkspaceManagementResolver,
|
||||
PermissionService,
|
||||
PagePermissionResolver,
|
||||
DocHistoryResolver,
|
||||
WorkspaceBlobResolver,
|
||||
],
|
||||
exports: [PermissionService],
|
||||
})
|
||||
export class WorkspaceModule {}
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ import { ActionForbidden } from '../../fundamentals';
|
||||
import { CurrentUser } from '../auth';
|
||||
import { Admin } from '../common';
|
||||
import { FeatureManagementService, FeatureType } from '../features';
|
||||
import { PermissionService } from './permission';
|
||||
import { PermissionService } from '../permission';
|
||||
import { WorkspaceType } from './types';
|
||||
|
||||
@Resolver(() => WorkspaceType)
|
||||
@@ -61,7 +61,7 @@ export class WorkspaceManagementResolver {
|
||||
|
||||
const owner = await this.permission.getWorkspaceOwner(workspaceId);
|
||||
const availableFeatures = await this.availableFeatures(user);
|
||||
if (owner.user.id !== user.id || !availableFeatures.includes(feature)) {
|
||||
if (owner.id !== user.id || !availableFeatures.includes(feature)) {
|
||||
throw new ActionForbidden();
|
||||
}
|
||||
|
||||
|
||||
@@ -19,10 +19,10 @@ import {
|
||||
PreventCache,
|
||||
} from '../../../fundamentals';
|
||||
import { CurrentUser } from '../../auth';
|
||||
import { Permission, PermissionService } from '../../permission';
|
||||
import { QuotaManagementService } from '../../quota';
|
||||
import { WorkspaceBlobStorage } from '../../storage';
|
||||
import { PermissionService } from '../permission';
|
||||
import { Permission, WorkspaceBlobSizes, WorkspaceType } from '../types';
|
||||
import { WorkspaceBlobSizes, WorkspaceType } from '../types';
|
||||
|
||||
@UseGuards(CloudThrottlerGuard)
|
||||
@Resolver(() => WorkspaceType)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user