mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-06 09:33:45 +00:00
Compare commits
215 Commits
v0.16.2
...
v0.17.0-be
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bffc294620 | ||
|
|
be4df0f8ac | ||
|
|
64d2b926a2 | ||
|
|
a027cef457 | ||
|
|
7d93fae1dd | ||
|
|
702c03539f | ||
|
|
52cd6102cb | ||
|
|
74cd175d37 | ||
|
|
9cbe416c2c | ||
|
|
f4db4058f8 | ||
|
|
8be67dce82 | ||
|
|
017e89f458 | ||
|
|
d3f50a2e38 | ||
|
|
d0010a1d11 | ||
|
|
10fe79a9a8 | ||
|
|
4bbb94c32d | ||
|
|
5083fe3cfd | ||
|
|
de1a51bf7c | ||
|
|
7621758c76 | ||
|
|
f7816a5b28 | ||
|
|
5c67f98e90 | ||
|
|
d5ecf503c2 | ||
|
|
2a2a969394 | ||
|
|
691bfed185 | ||
|
|
3837c84791 | ||
|
|
059030fbeb | ||
|
|
19a7d1eb92 | ||
|
|
f452414952 | ||
|
|
03c2051926 | ||
|
|
73dd1d3326 | ||
|
|
24acce2eac | ||
|
|
b711b087c5 | ||
|
|
23748c83a1 | ||
|
|
7c3666ad6d | ||
|
|
03a071e4cb | ||
|
|
2bdf8c6ef3 | ||
|
|
c6840c8674 | ||
|
|
5f3dd867ad | ||
|
|
b680c1839b | ||
|
|
1cac2f6ccd | ||
|
|
06552a1120 | ||
|
|
416faba2bc | ||
|
|
93ae724c66 | ||
|
|
989e8830f7 | ||
|
|
0ae5673aaa | ||
|
|
51bc40d2a8 | ||
|
|
2524491bd1 | ||
|
|
53886a7cd3 | ||
|
|
f688c057eb | ||
|
|
eb16c273ee | ||
|
|
01b2339173 | ||
|
|
2799e8cd43 | ||
|
|
fb64bc7e55 | ||
|
|
51f3566bec | ||
|
|
01e6370dd2 | ||
|
|
2ac803c73f | ||
|
|
0d6f468385 | ||
|
|
ec3b97d069 | ||
|
|
7dde509970 | ||
|
|
681f4561fb | ||
|
|
d00f86c3ce | ||
|
|
ae3b13bfb4 | ||
|
|
aad442e73d | ||
|
|
4595df9b0e | ||
|
|
106f332c19 | ||
|
|
98d9295259 | ||
|
|
e7b53641d7 | ||
|
|
41f9149be6 | ||
|
|
935771c8a8 | ||
|
|
52c9da67f0 | ||
|
|
e33aa35f7e | ||
|
|
8b0afd6eeb | ||
|
|
821de0a3bb | ||
|
|
bc306faa2c | ||
|
|
bd8c844e75 | ||
|
|
abbc6aef09 | ||
|
|
3f324a5af9 | ||
|
|
7b9d9a1ca6 | ||
|
|
12d73ee290 | ||
|
|
d819a26bc1 | ||
|
|
a6484018ef | ||
|
|
f9d0a348c4 | ||
|
|
c3ae219992 | ||
|
|
ad110078ac | ||
|
|
bea3d42f40 | ||
|
|
02f0d7aa08 | ||
|
|
197996de31 | ||
|
|
392fef663e | ||
|
|
130e6ab4a3 | ||
|
|
2e37ee0e33 | ||
|
|
e1310b65cd | ||
|
|
87da792c4c | ||
|
|
56f4634c1f | ||
|
|
61e37d8873 | ||
|
|
41d35fdafd | ||
|
|
a992376053 | ||
|
|
d93d39e29d | ||
|
|
d9cedf89e1 | ||
|
|
a802dc4fd6 | ||
|
|
4caf32629a | ||
|
|
697f2c6520 | ||
|
|
df34e2cdf7 | ||
|
|
592997b65b | ||
|
|
69892655a1 | ||
|
|
f99988afa6 | ||
|
|
4ff6fbd052 | ||
|
|
f544e69d02 | ||
|
|
adf314d594 | ||
|
|
7ae141bd9e | ||
|
|
f8e6f1f2b5 | ||
|
|
f1bb1fc9b8 | ||
|
|
5e8683c9be | ||
|
|
3ce92f2abc | ||
|
|
db76780bc9 | ||
|
|
f37051dc87 | ||
|
|
b96ad57568 | ||
|
|
4ec45a247e | ||
|
|
dde45748d9 | ||
|
|
06685683ae | ||
|
|
65a87196d5 | ||
|
|
09ab922572 | ||
|
|
c53adbc7e8 | ||
|
|
03b2cda845 | ||
|
|
3e810eb043 | ||
|
|
b8f07ce3fc | ||
|
|
10a066a52a | ||
|
|
6557b5d4b6 | ||
|
|
67c9c7bcc5 | ||
|
|
e44a9483c2 | ||
|
|
ecf50a4dad | ||
|
|
0209e3fa76 | ||
|
|
9ea4aaaf37 | ||
|
|
611925fa10 | ||
|
|
bc86f0a672 | ||
|
|
3c37006657 | ||
|
|
dbcfd24ed8 | ||
|
|
14066965fa | ||
|
|
01c9d1758e | ||
|
|
130dc2bd8b | ||
|
|
442a843257 | ||
|
|
0b3c7d1407 | ||
|
|
49c8a25fce | ||
|
|
920afa7bf1 | ||
|
|
b57388fd85 | ||
|
|
5e555b3807 | ||
|
|
3b727ef40a | ||
|
|
4bc4a58a30 | ||
|
|
2f02f0da2b | ||
|
|
592150638e | ||
|
|
20174b9cbe | ||
|
|
b333cde336 | ||
|
|
03c4d56a91 | ||
|
|
2e2a3af967 | ||
|
|
5acf1b5309 | ||
|
|
3db95bafa2 | ||
|
|
cf086e4018 | ||
|
|
483fcfb2c7 | ||
|
|
b6a3697793 | ||
|
|
23b0db36b9 | ||
|
|
182b2fd62d | ||
|
|
a43c34feec | ||
|
|
c9a1a8c3b2 | ||
|
|
a49b92e4c8 | ||
|
|
cfe0677b84 | ||
|
|
b6f46e01c5 | ||
|
|
e20bdbf925 | ||
|
|
6f9f579e5d | ||
|
|
682a01e441 | ||
|
|
6b0c398ae5 | ||
|
|
152815c175 | ||
|
|
9d42db56ca | ||
|
|
f3930a9262 | ||
|
|
10bea3922e | ||
|
|
4a89b1a5dd | ||
|
|
4916eea24f | ||
|
|
cfac3ebf1f | ||
|
|
e0a91f63d3 | ||
|
|
23c73243ab | ||
|
|
f324fa4719 | ||
|
|
c822594882 | ||
|
|
83716c2fd9 | ||
|
|
620d20710a | ||
|
|
cfc367efe7 | ||
|
|
69c507fded | ||
|
|
b57ce4646f | ||
|
|
dba024d561 | ||
|
|
e26ba48a45 | ||
|
|
e53dde7944 | ||
|
|
624f3514fc | ||
|
|
ba8958f39b | ||
|
|
9af0e53ae2 | ||
|
|
5a2f93f035 | ||
|
|
9192ac4420 | ||
|
|
57449c1530 | ||
|
|
89537e6892 | ||
|
|
50948318e0 | ||
|
|
0504d0b0ff | ||
|
|
339c39c1ec | ||
|
|
cd3924b8fc | ||
|
|
42b5ef7a8b | ||
|
|
ad42418089 | ||
|
|
99e70c91e8 | ||
|
|
05ac3dbdcb | ||
|
|
994b539507 | ||
|
|
d5edadabe6 | ||
|
|
05247bb24e | ||
|
|
f69f026ac3 | ||
|
|
015247345c | ||
|
|
0ba516866f | ||
|
|
7afba6b8b5 | ||
|
|
08cc15a55c | ||
|
|
24c34eb3fc | ||
|
|
ba5ba71f35 | ||
|
|
d4065fee78 | ||
|
|
d86f7f41dc |
@@ -34,8 +34,8 @@ const createPattern = packageName => [
|
||||
{
|
||||
group: ['@affine/env/constant'],
|
||||
message:
|
||||
'Do not import from @affine/env/constant. Use `environment.isDesktop` instead',
|
||||
importNames: ['isDesktop'],
|
||||
'Do not import from @affine/env/constant. Use `environment.isElectron` instead',
|
||||
importNames: ['isElectron'],
|
||||
},
|
||||
];
|
||||
|
||||
@@ -248,7 +248,7 @@ const config = {
|
||||
'warn',
|
||||
{
|
||||
additionalHooks:
|
||||
'(useAsyncCallback|useCatchEventCallback|useDraggable|useDropTarget)',
|
||||
'(useAsyncCallback|useCatchEventCallback|useDraggable|useDropTarget|useRefEffect)',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
36
.github/actions/cluster-auth/action.yml
vendored
Normal file
36
.github/actions/cluster-auth/action.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
name: 'Auth to Cluster'
|
||||
description: 'Auth to the GCP Cluster'
|
||||
inputs:
|
||||
gcp-project-number:
|
||||
description: 'GCP project number'
|
||||
required: true
|
||||
gcp-project-id:
|
||||
description: 'GCP project id'
|
||||
required: true
|
||||
service-account:
|
||||
description: 'Service account'
|
||||
cluster-name:
|
||||
description: 'Cluster name'
|
||||
cluster-location:
|
||||
description: 'Cluster location'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- id: auth
|
||||
uses: google-github-actions/auth@v2
|
||||
with:
|
||||
workload_identity_provider: 'projects/${{ inputs.gcp-project-number }}/locations/global/workloadIdentityPools/github-actions/providers/github-actions-helm-deploy'
|
||||
service_account: '${{ inputs.service-account }}'
|
||||
token_format: 'access_token'
|
||||
project_id: '${{ inputs.gcp-project-id }}'
|
||||
|
||||
- name: 'Setup gcloud cli'
|
||||
uses: 'google-github-actions/setup-gcloud@v2'
|
||||
with:
|
||||
install_components: 'gke-gcloud-auth-plugin'
|
||||
|
||||
- id: get-gke-credentials
|
||||
shell: bash
|
||||
run: |
|
||||
gcloud container clusters get-credentials ${{ inputs.cluster-name }} --region ${{ inputs.cluster-location }} --project ${{ inputs.gcp-project-id }}
|
||||
24
.github/actions/deploy/action.yml
vendored
24
.github/actions/deploy/action.yml
vendored
@@ -24,24 +24,14 @@ runs:
|
||||
shell: bash
|
||||
run: |
|
||||
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
|
||||
- uses: azure/setup-helm@v4
|
||||
- id: auth
|
||||
uses: google-github-actions/auth@v2
|
||||
- name: 'Auth to cluster'
|
||||
uses: './.github/actions/cluster-auth'
|
||||
with:
|
||||
workload_identity_provider: 'projects/${{ inputs.gcp-project-number }}/locations/global/workloadIdentityPools/github-actions/providers/github-actions-helm-deploy'
|
||||
service_account: '${{ inputs.service-account }}'
|
||||
token_format: 'access_token'
|
||||
project_id: '${{ inputs.gcp-project-id }}'
|
||||
|
||||
- name: 'Setup gcloud cli'
|
||||
uses: 'google-github-actions/setup-gcloud@v2'
|
||||
with:
|
||||
install_components: 'gke-gcloud-auth-plugin'
|
||||
|
||||
- id: get-gke-credentials
|
||||
shell: bash
|
||||
run: |
|
||||
gcloud container clusters get-credentials ${{ inputs.cluster-name }} --region ${{ inputs.cluster-location }} --project ${{ inputs.gcp-project-id }}
|
||||
gcp-project-number: '${{ inputs.gcp-project-number }}'
|
||||
gcp-project-id: '${{ inputs.gcp-project-id }}'
|
||||
service-account: '${{ inputs.service-account }}'
|
||||
cluster-name: '${{ inputs.cluster-name }}'
|
||||
cluster-location: '${{ inputs.cluster-location }}'
|
||||
|
||||
- name: Deploy
|
||||
shell: bash
|
||||
|
||||
2
.github/actions/setup-node/action.yml
vendored
2
.github/actions/setup-node/action.yml
vendored
@@ -156,7 +156,7 @@ runs:
|
||||
- name: Install Playwright's dependencies
|
||||
shell: bash
|
||||
if: inputs.playwright-install == 'true'
|
||||
run: yarn playwright install --with-deps chromium
|
||||
run: yarn playwright install --with-deps chromium webkit
|
||||
env:
|
||||
PLAYWRIGHT_BROWSERS_PATH: ${{ github.workspace }}/node_modules/.cache/ms-playwright
|
||||
|
||||
|
||||
3
.github/deployment/front/Dockerfile
vendored
3
.github/deployment/front/Dockerfile
vendored
@@ -1,7 +1,8 @@
|
||||
FROM openresty/openresty:1.25.3.1-0-buster
|
||||
FROM openresty/openresty:1.25.3.2-0-buster
|
||||
WORKDIR /app
|
||||
COPY ./packages/frontend/web/dist ./dist
|
||||
COPY ./packages/frontend/admin/dist ./admin
|
||||
COPY ./packages/frontend/mobile/dist ./mobile
|
||||
COPY ./.github/deployment/front/nginx.conf /usr/local/openresty/nginx/conf/nginx.conf
|
||||
COPY ./.github/deployment/front/affine.nginx.conf /etc/nginx/conf.d/affine.nginx.conf
|
||||
|
||||
|
||||
20
.github/deployment/front/affine.nginx.conf
vendored
20
.github/deployment/front/affine.nginx.conf
vendored
@@ -11,10 +11,28 @@ server {
|
||||
try_files $uri $uri/ =404;
|
||||
}
|
||||
|
||||
set $app_root_path /app/dist/;
|
||||
set $mobile_root /app/dist/;
|
||||
set_by_lua $affine_env 'return os.getenv("AFFINE_ENV")';
|
||||
|
||||
if ($affine_env = "dev") {
|
||||
set $mobile_root /app/mobile/;
|
||||
}
|
||||
|
||||
# https://gist.github.com/mariusom/6683dc52b1cad1a1f372e908bdb209d0
|
||||
if ($http_user_agent ~* "(android|bb\d+|meego).+mobile|avantgo|bada\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\.(browser|link)|vodafone|wap|windows ce|xda|xiino") {
|
||||
set $app_root_path $mobile_root;
|
||||
}
|
||||
|
||||
if ($http_user_agent ~* "^(1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s\-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|\-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw\-(n|u)|c55\/|capi|ccwa|cdm\-|cell|chtm|cldc|cmd\-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc\-s|devi|dica|dmob|do(c|p)o|ds(12|\-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(\-|_)|g1 u|g560|gene|gf\-5|g\-mo|go(\.w|od)|gr(ad|un)|haie|hcit|hd\-(m|p|t)|hei\-|hi(pt|ta)|hp( i|ip)|hs\-c|ht(c(\-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i\-(20|go|ma)|i230|iac( |\-|\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\/)|klon|kpt |kwc\-|kyo(c|k)|le(no|xi)|lg( g|\/(k|l|u)|50|54|\-[a-w])|libw|lynx|m1\-w|m3ga|m50\/|ma(te|ui|xo)|mc(01|21|ca)|m\-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(\-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)\-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|\-([1-8]|c))|phil|pire|pl(ay|uc)|pn\-2|po(ck|rt|se)|prox|psio|pt\-g|qa\-a|qc(07|12|21|32|60|\-[2-7]|i\-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h\-|oo|p\-)|sdk\/|se(c(\-|0|1)|47|mc|nd|ri)|sgh\-|shar|sie(\-|m)|sk\-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h\-|v\-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl\-|tdg\-|tel(i|m)|tim\-|t\-mo|to(pl|sh)|ts(70|m\-|m3|m5)|tx\-9|up(\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|\-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(\-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas\-|your|zeto|zte\-)") {
|
||||
set $app_root_path $mobile_root;
|
||||
}
|
||||
|
||||
location / {
|
||||
root /app/dist/;
|
||||
root $app_root_path;
|
||||
index index.html;
|
||||
try_files $uri $uri/ /index.html;
|
||||
add_header Cache-Control "private, no-cache, no-store, max-age=0, must-revalidate";
|
||||
}
|
||||
|
||||
error_page 404 /404.html;
|
||||
|
||||
17
.github/deployment/front/nginx.conf
vendored
17
.github/deployment/front/nginx.conf
vendored
@@ -1,14 +1,15 @@
|
||||
worker_processes 4;
|
||||
worker_processes 4;
|
||||
error_log /var/log/nginx/error.log warn;
|
||||
pcre_jit on;
|
||||
env AFFINE_ENV;
|
||||
events {
|
||||
worker_connections 1024;
|
||||
worker_connections 1024;
|
||||
}
|
||||
http {
|
||||
include mime.types;
|
||||
log_format main '$remote_addr [$time_local] "$request" '
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
access_log /var/log/nginx/access.log main;
|
||||
include /etc/nginx/conf.d/*.conf;
|
||||
include mime.types;
|
||||
log_format main '$remote_addr [$time_local] "$request" '
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
access_log /var/log/nginx/access.log main;
|
||||
include /etc/nginx/conf.d/*.conf;
|
||||
}
|
||||
|
||||
@@ -77,8 +77,6 @@ spec:
|
||||
value: "{{ .Values.app.https }}"
|
||||
- name: ENABLE_R2_OBJECT_STORAGE
|
||||
value: "{{ .Values.app.objectStorage.r2.enabled }}"
|
||||
- name: ENABLE_CAPTCHA
|
||||
value: "{{ .Values.app.captcha.enabled }}"
|
||||
- name: FEATURES_EARLY_ACCESS_PREVIEW
|
||||
value: "{{ .Values.app.features.earlyAccessPreview }}"
|
||||
- name: FEATURES_SYNC_CLIENT_VERSION_CHECK
|
||||
@@ -204,12 +202,12 @@ spec:
|
||||
protocol: TCP
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
path: /info
|
||||
port: http
|
||||
initialDelaySeconds: {{ .Values.probe.initialDelaySeconds }}
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
path: /info
|
||||
port: http
|
||||
initialDelaySeconds: {{ .Values.probe.initialDelaySeconds }}
|
||||
resources:
|
||||
|
||||
@@ -4,6 +4,10 @@ metadata:
|
||||
name: {{ include "graphql.fullname" . }}
|
||||
labels:
|
||||
{{- include "graphql.labels" . | nindent 4 }}
|
||||
{{- with .Values.service.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
type: {{ .Values.service.type }}
|
||||
ports:
|
||||
|
||||
@@ -27,6 +27,9 @@ spec:
|
||||
- name: {{ .Chart.Name }}
|
||||
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
env:
|
||||
- name: AFFINE_ENV
|
||||
value: "{{ .Release.Namespace }}"
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: {{ .Values.service.port }}
|
||||
|
||||
7
.github/helm/affine/templates/ingress.yaml
vendored
7
.github/helm/affine/templates/ingress.yaml
vendored
@@ -60,13 +60,6 @@ spec:
|
||||
name: affine-graphql
|
||||
port:
|
||||
number: {{ .Values.graphql.service.port }}
|
||||
- path: /oauth
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: affine-graphql
|
||||
port:
|
||||
number: {{ .Values.graphql.service.port }}
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
|
||||
4
.github/helm/affine/values.yaml
vendored
4
.github/helm/affine/values.yaml
vendored
@@ -36,14 +36,14 @@ graphql:
|
||||
type: ClusterIP
|
||||
port: 3000
|
||||
annotations:
|
||||
cloud.google.com/backend-config: '{"default": "affine-backendconfig"}'
|
||||
cloud.google.com/backend-config: '{"default": "affine-api-backendconfig"}'
|
||||
|
||||
sync:
|
||||
service:
|
||||
type: ClusterIP
|
||||
port: 3010
|
||||
annotations:
|
||||
cloud.google.com/backend-config: '{"default": "affine-backendconfig"}'
|
||||
cloud.google.com/backend-config: '{"default": "affine-api-backendconfig"}'
|
||||
|
||||
web:
|
||||
service:
|
||||
|
||||
10
.github/helm/separate-config/backend-config.yaml
vendored
Normal file
10
.github/helm/separate-config/backend-config.yaml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
apiVersion: cloud.google.com/v1
|
||||
kind: BackendConfig
|
||||
metadata:
|
||||
name: "affine-api-backendconfig"
|
||||
spec:
|
||||
healthCheck:
|
||||
timeoutSec: 1
|
||||
type: HTTP
|
||||
requestPath: /info
|
||||
|
||||
33
.github/workflows/build-test.yml
vendored
33
.github/workflows/build-test.yml
vendored
@@ -143,6 +143,36 @@ jobs:
|
||||
path: ./test-results
|
||||
if-no-files-found: ignore
|
||||
|
||||
e2e-mobile-test:
|
||||
name: E2E Mobile Test
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DISTRIBUTION: mobile
|
||||
IN_CI_TEST: true
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
shard: [1, 2, 3, 4, 5]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
with:
|
||||
playwright-install: true
|
||||
electron-install: false
|
||||
full-cache: true
|
||||
|
||||
- name: Run playwright tests
|
||||
run: yarn workspace @affine-test/affine-mobile e2e --forbid-only --shard=${{ matrix.shard }}/${{ strategy.job-total }}
|
||||
|
||||
- name: Upload test results
|
||||
if: ${{ failure() }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-results-e2e-mobile-${{ matrix.shard }}
|
||||
path: ./test-results
|
||||
if-no-files-found: ignore
|
||||
|
||||
e2e-migration-test:
|
||||
name: E2E Migration Test
|
||||
runs-on: ubuntu-latest
|
||||
@@ -180,7 +210,7 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
with:
|
||||
electron-install: false
|
||||
electron-install: true
|
||||
full-cache: true
|
||||
|
||||
- name: Download affine.linux-x64-gnu.node
|
||||
@@ -571,6 +601,7 @@ jobs:
|
||||
- lint
|
||||
- check-yarn-binary
|
||||
- e2e-test
|
||||
- e2e-mobile-test
|
||||
- e2e-migration-test
|
||||
- unit-test
|
||||
- server-test
|
||||
|
||||
176
.github/workflows/deploy.yml
vendored
176
.github/workflows/deploy.yml
vendored
@@ -21,6 +21,47 @@ permissions:
|
||||
packages: 'write'
|
||||
|
||||
jobs:
|
||||
output-prev-version:
|
||||
name: Output previous version
|
||||
runs-on: ubuntu-latest
|
||||
environment: ${{ github.event.inputs.flavor }}
|
||||
outputs:
|
||||
prev: ${{ steps.print.outputs.version }}
|
||||
namespace: ${{ steps.print.outputs.namespace }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Auth to Cluster
|
||||
uses: './.github/actions/cluster-auth'
|
||||
with:
|
||||
gcp-project-number: ${{ secrets.GCP_PROJECT_NUMBER }}
|
||||
gcp-project-id: ${{ secrets.GCP_PROJECT_ID }}
|
||||
service-account: ${{ secrets.GCP_HELM_DEPLOY_SERVICE_ACCOUNT }}
|
||||
cluster-name: ${{ secrets.GCP_CLUSTER_NAME }}
|
||||
cluster-location: ${{ secrets.GCP_CLUSTER_LOCATION }}
|
||||
- name: Output previous version
|
||||
id: print
|
||||
run: |
|
||||
namespace=""
|
||||
if [ "${{ github.event.inputs.flavor }}" = "canary" ]; then
|
||||
namespace="dev"
|
||||
elif [ "${{ github.event.inputs.flavor }}" = "beta" ]; then
|
||||
namespace="beta"
|
||||
elif [ "${{ github.event.inputs.flavor }}" = "stable" ]; then
|
||||
namespace="production"
|
||||
else
|
||||
echo "Invalid flavor: ${{ github.event.inputs.flavor }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Namespace set to: $namespace"
|
||||
|
||||
# Get the previous version from the deployment
|
||||
prev_version=$(kubectl get deployment -n $namespace affine-graphql -o=jsonpath='{.spec.template.spec.containers[0].image}' | awk -F '-' '{print $3}')
|
||||
|
||||
echo "Previous version: $prev_version"
|
||||
echo "version=$prev_version" >> $GITHUB_OUTPUT
|
||||
echo "namesapce=$namespace" >> $GITHUB_OUTPUT
|
||||
|
||||
build-server-image:
|
||||
name: Build Server Image
|
||||
uses: ./.github/workflows/build-server-image.yml
|
||||
@@ -71,7 +112,7 @@ jobs:
|
||||
uses: ./.github/actions/setup-version
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- name: Build Core
|
||||
- name: Build Admin
|
||||
run: yarn nx build @affine/admin --skip-nx-cache
|
||||
env:
|
||||
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
|
||||
@@ -92,12 +133,45 @@ jobs:
|
||||
path: ./packages/frontend/admin/dist
|
||||
if-no-files-found: error
|
||||
|
||||
build-mobile:
|
||||
name: Build @affine/mobile
|
||||
runs-on: ubuntu-latest
|
||||
environment: ${{ github.event.inputs.flavor }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Version
|
||||
id: version
|
||||
uses: ./.github/actions/setup-version
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- name: Build Mobile
|
||||
run: yarn nx build @affine/mobile --skip-nx-cache
|
||||
env:
|
||||
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
|
||||
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
|
||||
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
|
||||
BUILD_TYPE: ${{ github.event.inputs.flavor }}
|
||||
CAPTCHA_SITE_KEY: ${{ secrets.CAPTCHA_SITE_KEY }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: 'affine-mobile'
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||
PERFSEE_TOKEN: ${{ secrets.PERFSEE_TOKEN }}
|
||||
MIXPANEL_TOKEN: ${{ secrets.MIXPANEL_TOKEN }}
|
||||
- name: Upload mobile artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: mobile
|
||||
path: ./packages/frontend/mobile/dist
|
||||
if-no-files-found: error
|
||||
|
||||
build-frontend-image:
|
||||
name: Build Frontend Image
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- build-web
|
||||
- build-admin
|
||||
- build-mobile
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Download web artifact
|
||||
@@ -110,6 +184,11 @@ jobs:
|
||||
with:
|
||||
name: admin
|
||||
path: ./packages/frontend/admin/dist
|
||||
- name: Download mobile artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: mobile
|
||||
path: ./packages/frontend/mobile/dist
|
||||
- name: Setup env
|
||||
run: |
|
||||
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
|
||||
@@ -193,3 +272,98 @@ jobs:
|
||||
STRIPE_API_KEY: ${{ secrets.STRIPE_API_KEY }}
|
||||
STRIPE_WEBHOOK_KEY: ${{ secrets.STRIPE_WEBHOOK_KEY }}
|
||||
STATIC_IP_NAME: ${{ secrets.STATIC_IP_NAME }}
|
||||
|
||||
deploy-done:
|
||||
needs:
|
||||
- output-prev-version
|
||||
- build-web
|
||||
- build-admin
|
||||
- build-mobile
|
||||
- build-frontend-image
|
||||
- build-server-image
|
||||
- deploy
|
||||
if: always()
|
||||
runs-on: ubuntu-latest
|
||||
name: Post deploy message
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: toeverything/blocksuite
|
||||
path: blocksuite
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
with:
|
||||
extra-flags: 'workspaces focus @affine/changelog'
|
||||
electron-install: false
|
||||
- name: Output deployed info
|
||||
if: ${{ always() && !contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') }}
|
||||
id: set_info
|
||||
run: |
|
||||
if [ "${{ github.event.inputs.flavor }}" = "canary" ]; then
|
||||
echo "deployed_url=https://affine.fail" >> $GITHUB_OUTPUT
|
||||
elif [ "${{ github.event.inputs.flavor }}" = "beta" ]; then
|
||||
echo "deployed_url=https://insider.affine.pro" >> $GITHUB_OUTPUT
|
||||
elif [ "${{ github.event.inputs.flavor }}" = "stable" ]; then
|
||||
echo "deployed_url=https://app.affine.pro" >> $GITHUB_OUTPUT
|
||||
else
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
|
||||
- name: Post Success event to a Slack channel
|
||||
if: ${{ always() && !contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') }}
|
||||
run: node ./tools/changelog/index.js
|
||||
env:
|
||||
CHANNEL_ID: ${{ secrets.RELEASE_SLACK_CHNNEL_ID }}
|
||||
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
DEPLOYED_URL: ${{ steps.set_info.outputs.deployed_url }}
|
||||
PREV_VERSION: ${{ needs.output-prev-version.outputs.prev }}
|
||||
NAMESPACE: ${{ needs.output-prev-version.outputs.namespace }}
|
||||
DEPLOYMENT: 'SERVER'
|
||||
FLAVOR: ${{ github.event.inputs.flavor }}
|
||||
BLOCKSUITE_REPO_PATH: ${{ github.workspace }}/blocksuite
|
||||
- name: Post Failed event to a Slack channel
|
||||
id: failed-slack
|
||||
uses: slackapi/slack-github-action@v1.27.0
|
||||
if: ${{ always() && contains(needs.*.result, 'failure') }}
|
||||
with:
|
||||
channel-id: ${{ secrets.RELEASE_SLACK_CHNNEL_ID }}
|
||||
payload: |
|
||||
{
|
||||
"blocks": [
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"text": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Backend deploy failed `${{ github.event.inputs.flavor }}`>",
|
||||
"type": "mrkdwn"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
env:
|
||||
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
- name: Post Cancel event to a Slack channel
|
||||
id: cancel-slack
|
||||
uses: slackapi/slack-github-action@v1.27.0
|
||||
if: ${{ always() && contains(needs.*.result, 'cancelled') && !contains(needs.*.result, 'failure') }}
|
||||
with:
|
||||
channel-id: ${{ secrets.RELEASE_SLACK_CHNNEL_ID }}
|
||||
payload: |
|
||||
{
|
||||
"blocks": [
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"text": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Backend deploy cancelled `${{ github.event.inputs.flavor }}`>",
|
||||
"type": "mrkdwn"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
env:
|
||||
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
|
||||
894
.yarn/releases/yarn-4.3.1.cjs
vendored
894
.yarn/releases/yarn-4.3.1.cjs
vendored
File diff suppressed because one or more lines are too long
925
.yarn/releases/yarn-4.4.1.cjs
vendored
Executable file
925
.yarn/releases/yarn-4.4.1.cjs
vendored
Executable file
File diff suppressed because one or more lines are too long
@@ -12,4 +12,4 @@ npmPublishAccess: public
|
||||
|
||||
npmPublishRegistry: "https://registry.npmjs.org"
|
||||
|
||||
yarnPath: .yarn/releases/yarn-4.3.1.cjs
|
||||
yarnPath: .yarn/releases/yarn-4.4.1.cjs
|
||||
|
||||
619
Cargo.lock
generated
619
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -18,7 +18,7 @@ rand = "0.8"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
sha3 = "0.10"
|
||||
sqlx = { version = "0.7", default-features = false, features = ["chrono", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] }
|
||||
sqlx = { version = "0.8", default-features = false, features = ["chrono", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] }
|
||||
tiktoken-rs = "0.5"
|
||||
tokio = "1.37"
|
||||
uuid = "1.8"
|
||||
|
||||
@@ -176,6 +176,12 @@ Thanks to [Chromatic](https://www.chromatic.com/) for providing the visual testi
|
||||
|
||||
## License
|
||||
|
||||
### Editions
|
||||
|
||||
- AFFiNE Community Edition (CE) is the current available version, it's free for self-host under the MIT license.
|
||||
|
||||
- AFFiNE Enterprise Edition (EE) is yet to be published, it will have more advanced features and enterprise-oriented offerings, including but not exclusive to rebranding and SSO, advanced admin and audit, etc., you may refer to https://affine.pro/pricing for more information
|
||||
|
||||
See [LICENSE] for details.
|
||||
|
||||
[all-contributors-badge]: https://img.shields.io/github/contributors/toeverything/AFFiNE
|
||||
|
||||
22
package.json
22
package.json
@@ -56,26 +56,26 @@
|
||||
"@affine/cli": "workspace:*",
|
||||
"@commitlint/cli": "^19.2.1",
|
||||
"@commitlint/config-conventional": "^19.1.0",
|
||||
"@faker-js/faker": "^8.4.1",
|
||||
"@faker-js/faker": "^9.0.0",
|
||||
"@istanbuljs/schema": "^0.1.3",
|
||||
"@magic-works/i18n-codegen": "^0.6.0",
|
||||
"@nx/vite": "^19.5.3",
|
||||
"@playwright/test": "=1.44.1",
|
||||
"@playwright/test": "=1.46.1",
|
||||
"@taplo/cli": "^0.7.0",
|
||||
"@testing-library/react": "^16.0.0",
|
||||
"@toeverything/infra": "workspace:*",
|
||||
"@types/affine__env": "workspace:*",
|
||||
"@types/eslint": "^8.56.7",
|
||||
"@types/eslint": "^9.0.0",
|
||||
"@types/node": "^20.12.7",
|
||||
"@typescript-eslint/eslint-plugin": "^7.6.0",
|
||||
"@typescript-eslint/parser": "^7.6.0",
|
||||
"@vanilla-extract/vite-plugin": "^4.0.7",
|
||||
"@vanilla-extract/webpack-plugin": "^2.3.7",
|
||||
"@vitejs/plugin-react-swc": "^3.6.0",
|
||||
"@vitest/coverage-istanbul": "1.6.0",
|
||||
"@vitest/ui": "1.6.0",
|
||||
"@vitest/coverage-istanbul": "2.0.5",
|
||||
"@vitest/ui": "2.0.5",
|
||||
"cross-env": "^7.0.3",
|
||||
"electron": "~30.2.0",
|
||||
"electron": "^31.0.0",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-import-x": "^0.5.0",
|
||||
@@ -88,15 +88,15 @@
|
||||
"eslint-plugin-unused-imports": "^3.1.0",
|
||||
"eslint-plugin-vue": "^9.24.1",
|
||||
"fake-indexeddb": "6.0.0",
|
||||
"happy-dom": "^14.7.1",
|
||||
"happy-dom": "^15.0.0",
|
||||
"husky": "^9.0.11",
|
||||
"lint-staged": "^15.2.2",
|
||||
"msw": "^2.3.0",
|
||||
"nanoid": "^5.0.7",
|
||||
"nx": "^19.0.0",
|
||||
"nyc": "^17.0.0",
|
||||
"oxlint": "0.7.0",
|
||||
"prettier": "^3.2.5",
|
||||
"oxlint": "0.9.2",
|
||||
"prettier": "^3.3.3",
|
||||
"semver": "^7.6.0",
|
||||
"serve": "^14.2.1",
|
||||
"string-width": "^7.1.0",
|
||||
@@ -106,11 +106,11 @@
|
||||
"vite": "^5.2.8",
|
||||
"vite-plugin-istanbul": "^6.0.0",
|
||||
"vite-plugin-static-copy": "^1.0.2",
|
||||
"vitest": "1.6.0",
|
||||
"vitest": "2.0.5",
|
||||
"vitest-fetch-mock": "^0.3.0",
|
||||
"vitest-mock-extended": "^2.0.0"
|
||||
},
|
||||
"packageManager": "yarn@4.3.1",
|
||||
"packageManager": "yarn@4.4.1",
|
||||
"resolutions": {
|
||||
"array-buffer-byte-length": "npm:@nolyfill/array-buffer-byte-length@latest",
|
||||
"array-includes": "npm:@nolyfill/array-includes@latest",
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
"build:debug": "napi build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "3.0.0-alpha.60",
|
||||
"@napi-rs/cli": "3.0.0-alpha.62",
|
||||
"lib0": "^0.2.93",
|
||||
"nx": "^19.0.0",
|
||||
"nx-cloud": "^19.0.0",
|
||||
|
||||
1
packages/backend/server/.gitignore
vendored
1
packages/backend/server/.gitignore
vendored
@@ -1 +1,2 @@
|
||||
.env
|
||||
static/
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "ai_prompts_metadata" ADD COLUMN "modified" BOOLEAN NOT NULL DEFAULT false,
|
||||
ADD COLUMN "updated_at" TIMESTAMPTZ(3) NOT NULL DEFAULT CURRENT_TIMESTAMP;
|
||||
@@ -0,0 +1,13 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "user_snapshots" (
|
||||
"user_id" VARCHAR NOT NULL,
|
||||
"id" VARCHAR NOT NULL,
|
||||
"blob" BYTEA NOT NULL,
|
||||
"created_at" TIMESTAMPTZ(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "user_snapshots_pkey" PRIMARY KEY ("user_id","id")
|
||||
);
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "user_snapshots" ADD CONSTRAINT "user_snapshots_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,14 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- The primary key for the `updates` table will be changed. If it partially fails, the table could be left without primary key constraint.
|
||||
|
||||
*/
|
||||
-- AlterTable
|
||||
ALTER TABLE "snapshots" ALTER COLUMN "seq" DROP NOT NULL;
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "updates" DROP CONSTRAINT "updates_pkey",
|
||||
ALTER COLUMN "created_at" DROP DEFAULT,
|
||||
ALTER COLUMN "seq" DROP NOT NULL,
|
||||
ADD CONSTRAINT "updates_pkey" PRIMARY KEY ("workspace_id", "guid", "created_at");
|
||||
@@ -0,0 +1,21 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "snapshot_histories" ADD COLUMN "created_by" VARCHAR;
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "snapshots" ADD COLUMN "created_by" VARCHAR,
|
||||
ADD COLUMN "updated_by" VARCHAR;
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "updates" ADD COLUMN "created_by" VARCHAR;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "snapshots" ADD CONSTRAINT "snapshots_created_by_fkey" FOREIGN KEY ("created_by") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "snapshots" ADD CONSTRAINT "snapshots_updated_by_fkey" FOREIGN KEY ("updated_by") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "updates" ADD CONSTRAINT "updates_created_by_fkey" FOREIGN KEY ("created_by") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "snapshot_histories" ADD CONSTRAINT "snapshot_histories_created_by_fkey" FOREIGN KEY ("created_by") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
@@ -21,11 +21,11 @@
|
||||
"dependencies": {
|
||||
"@apollo/server": "^4.10.2",
|
||||
"@aws-sdk/client-s3": "^3.620.0",
|
||||
"@fal-ai/serverless-client": "^0.13.0",
|
||||
"@fal-ai/serverless-client": "^0.14.0",
|
||||
"@google-cloud/opentelemetry-cloud-monitoring-exporter": "^0.19.0",
|
||||
"@google-cloud/opentelemetry-cloud-trace-exporter": "^2.2.0",
|
||||
"@google-cloud/opentelemetry-resource-util": "^2.2.0",
|
||||
"@keyv/redis": "^2.8.4",
|
||||
"@keyv/redis": "^3.0.0",
|
||||
"@nestjs/apollo": "^12.1.0",
|
||||
"@nestjs/common": "^10.3.7",
|
||||
"@nestjs/core": "^10.3.7",
|
||||
@@ -34,26 +34,25 @@
|
||||
"@nestjs/platform-express": "^10.3.7",
|
||||
"@nestjs/platform-socket.io": "^10.3.7",
|
||||
"@nestjs/schedule": "^4.0.1",
|
||||
"@nestjs/serve-static": "^4.0.2",
|
||||
"@nestjs/throttler": "5.2.0",
|
||||
"@nestjs/throttler": "6.2.1",
|
||||
"@nestjs/websockets": "^10.3.7",
|
||||
"@node-rs/argon2": "^1.8.0",
|
||||
"@node-rs/crc32": "^1.10.0",
|
||||
"@node-rs/jsonwebtoken": "^0.5.2",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/core": "^1.25.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.52.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.53.0",
|
||||
"@opentelemetry/exporter-zipkin": "^1.25.0",
|
||||
"@opentelemetry/host-metrics": "^0.35.2",
|
||||
"@opentelemetry/instrumentation": "^0.52.0",
|
||||
"@opentelemetry/instrumentation-graphql": "^0.42.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.52.0",
|
||||
"@opentelemetry/instrumentation-ioredis": "^0.42.0",
|
||||
"@opentelemetry/instrumentation-nestjs-core": "^0.39.0",
|
||||
"@opentelemetry/instrumentation-socket.io": "^0.41.0",
|
||||
"@opentelemetry/instrumentation": "^0.53.0",
|
||||
"@opentelemetry/instrumentation-graphql": "^0.43.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.53.0",
|
||||
"@opentelemetry/instrumentation-ioredis": "^0.43.0",
|
||||
"@opentelemetry/instrumentation-nestjs-core": "^0.40.0",
|
||||
"@opentelemetry/instrumentation-socket.io": "^0.42.0",
|
||||
"@opentelemetry/resources": "^1.25.0",
|
||||
"@opentelemetry/sdk-metrics": "^1.25.0",
|
||||
"@opentelemetry/sdk-node": "^0.52.0",
|
||||
"@opentelemetry/sdk-node": "^0.53.0",
|
||||
"@opentelemetry/sdk-trace-node": "^1.25.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.25.0",
|
||||
"@prisma/client": "^5.15.0",
|
||||
@@ -71,13 +70,13 @@
|
||||
"graphql-upload": "^16.0.2",
|
||||
"html-validate": "^8.20.1",
|
||||
"ioredis": "^5.3.2",
|
||||
"keyv": "^4.5.4",
|
||||
"keyv": "^5.0.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"mixpanel": "^0.18.0",
|
||||
"mustache": "^4.2.0",
|
||||
"nanoid": "^5.0.7",
|
||||
"nest-commander": "^3.12.5",
|
||||
"nestjs-throttler-storage-redis": "^0.4.1",
|
||||
"nestjs-throttler-storage-redis": "^0.5.0",
|
||||
"nodemailer": "^6.9.13",
|
||||
"on-headers": "^1.0.2",
|
||||
"openai": "^4.33.0",
|
||||
@@ -138,6 +137,7 @@
|
||||
],
|
||||
"watchMode": {
|
||||
"ignoreChanges": [
|
||||
"static/**",
|
||||
"**/*.gen.*"
|
||||
]
|
||||
},
|
||||
|
||||
@@ -32,6 +32,11 @@ model User {
|
||||
sessions UserSession[]
|
||||
aiSessions AiSession[]
|
||||
updatedRuntimeConfigs RuntimeConfig[]
|
||||
userSnapshots UserSnapshot[]
|
||||
createdSnapshot Snapshot[] @relation("createdSnapshot")
|
||||
updatedSnapshot Snapshot[] @relation("updatedSnapshot")
|
||||
createdUpdate Update[] @relation("createdUpdate")
|
||||
createdHistory SnapshotHistory[] @relation("createdHistory")
|
||||
|
||||
@@index([email])
|
||||
@@map("users")
|
||||
@@ -57,12 +62,13 @@ model ConnectedAccount {
|
||||
}
|
||||
|
||||
model Session {
|
||||
id String @id @default(uuid()) @db.VarChar
|
||||
expiresAt DateTime? @map("expires_at") @db.Timestamptz(3)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
|
||||
id String @id @default(uuid()) @db.VarChar
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
userSessions UserSession[]
|
||||
|
||||
// @deprecated use [UserSession.expiresAt]
|
||||
deprecated_expiresAt DateTime? @map("expires_at") @db.Timestamptz(3)
|
||||
|
||||
@@map("multiple_users_sessions")
|
||||
}
|
||||
|
||||
@@ -235,25 +241,60 @@ model Snapshot {
|
||||
workspaceId String @map("workspace_id") @db.VarChar
|
||||
id String @default(uuid()) @map("guid") @db.VarChar
|
||||
blob Bytes @db.ByteA
|
||||
seq Int @default(0) @db.Integer
|
||||
state Bytes? @db.ByteA
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
// the `updated_at` field will not record the time of record changed,
|
||||
// but the created time of last seen update that has been merged into snapshot.
|
||||
updatedAt DateTime @map("updated_at") @db.Timestamptz(3)
|
||||
createdBy String? @map("created_by") @db.VarChar
|
||||
updatedBy String? @map("updated_by") @db.VarChar
|
||||
|
||||
// should not delete origin snapshot even if user is deleted
|
||||
// we only delete the snapshot if the workspace is deleted
|
||||
createdByUser User? @relation(name: "createdSnapshot", fields: [createdBy], references: [id], onDelete: SetNull)
|
||||
updatedByUser User? @relation(name: "updatedSnapshot", fields: [updatedBy], references: [id], onDelete: SetNull)
|
||||
|
||||
// @deprecated use updatedAt only
|
||||
seq Int? @default(0) @db.Integer
|
||||
|
||||
// we need to clear all hanging updates and snapshots before enable the foreign key on workspaceId
|
||||
// workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@id([id, workspaceId])
|
||||
@@map("snapshots")
|
||||
}
|
||||
|
||||
// user snapshots are special snapshots for user storage like personal app settings, distinguished from workspace snapshots
|
||||
// basically they share the same structure with workspace snapshots
|
||||
// but for convenience, we don't fork the updates queue and hisotry for user snapshots, until we have to
|
||||
// which means all operation on user snapshot will happen in-pace
|
||||
model UserSnapshot {
|
||||
userId String @map("user_id") @db.VarChar
|
||||
id String @map("id") @db.VarChar
|
||||
blob Bytes @db.ByteA
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
|
||||
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@id([userId, id])
|
||||
@@map("user_snapshots")
|
||||
}
|
||||
|
||||
model Update {
|
||||
workspaceId String @map("workspace_id") @db.VarChar
|
||||
id String @map("guid") @db.VarChar
|
||||
seq Int @db.Integer
|
||||
blob Bytes @db.ByteA
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
createdAt DateTime @map("created_at") @db.Timestamptz(3)
|
||||
createdBy String? @map("created_by") @db.VarChar
|
||||
|
||||
@@id([workspaceId, id, seq])
|
||||
// will delete createor record if createor's account is deleted
|
||||
createdByUser User? @relation(name: "createdUpdate", fields: [createdBy], references: [id], onDelete: SetNull)
|
||||
|
||||
// @deprecated use createdAt only
|
||||
seq Int? @db.Integer
|
||||
|
||||
@@id([workspaceId, id, createdAt])
|
||||
@@map("updates")
|
||||
}
|
||||
|
||||
@@ -264,6 +305,10 @@ model SnapshotHistory {
|
||||
blob Bytes @db.ByteA
|
||||
state Bytes? @db.ByteA
|
||||
expiredAt DateTime @map("expired_at") @db.Timestamptz(3)
|
||||
createdBy String? @map("created_by") @db.VarChar
|
||||
|
||||
// will delete createor record if creator's account is deleted
|
||||
createdByUser User? @relation(name: "createdHistory", fields: [createdBy], references: [id], onDelete: SetNull)
|
||||
|
||||
@@id([workspaceId, id, timestamp])
|
||||
@@map("snapshot_histories")
|
||||
@@ -367,6 +412,9 @@ model AiPrompt {
|
||||
model String @db.VarChar
|
||||
config Json? @db.Json
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
updatedAt DateTime @default(now()) @map("updated_at") @db.Timestamptz(3)
|
||||
// whether the prompt is modified by the admin panel
|
||||
modified Boolean @default(false)
|
||||
|
||||
messages AiPromptMessage[]
|
||||
sessions AiSession[]
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Controller, Get } from '@nestjs/common';
|
||||
import { Public } from './core/auth';
|
||||
import { Config, SkipThrottle } from './fundamentals';
|
||||
|
||||
@Controller('/')
|
||||
@Controller('/info')
|
||||
export class AppController {
|
||||
constructor(private readonly config: Config) {}
|
||||
|
||||
@@ -15,7 +15,7 @@ export class AppController {
|
||||
compatibility: this.config.version,
|
||||
message: `AFFiNE ${this.config.version} Server`,
|
||||
type: this.config.type,
|
||||
flavor: this.config.flavor,
|
||||
flavor: this.config.flavor.type,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { join } from 'node:path';
|
||||
|
||||
import {
|
||||
DynamicModule,
|
||||
ForwardReference,
|
||||
@@ -7,16 +5,16 @@ import {
|
||||
Module,
|
||||
} from '@nestjs/common';
|
||||
import { ScheduleModule } from '@nestjs/schedule';
|
||||
import { ServeStaticModule } from '@nestjs/serve-static';
|
||||
import { get } from 'lodash-es';
|
||||
|
||||
import { AppController } from './app.controller';
|
||||
import { AuthModule } from './core/auth';
|
||||
import { ADD_ENABLED_FEATURES, ServerConfigModule } from './core/config';
|
||||
import { DocModule } from './core/doc';
|
||||
import { DocStorageModule } from './core/doc';
|
||||
import { FeatureModule } from './core/features';
|
||||
import { PermissionModule } from './core/permission';
|
||||
import { QuotaModule } from './core/quota';
|
||||
import { CustomSetupModule } from './core/setup';
|
||||
import { SelfhostModule } from './core/selfhost';
|
||||
import { StorageModule } from './core/storage';
|
||||
import { SyncModule } from './core/sync';
|
||||
import { UserModule } from './core/user';
|
||||
@@ -137,7 +135,7 @@ export class AppModuleBuilder {
|
||||
compile() {
|
||||
@Module({
|
||||
imports: this.modules,
|
||||
controllers: this.config.isSelfhosted ? [] : [AppController],
|
||||
controllers: [AppController],
|
||||
})
|
||||
class AppModule {}
|
||||
|
||||
@@ -145,20 +143,20 @@ export class AppModuleBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
function buildAppModule() {
|
||||
export function buildAppModule() {
|
||||
AFFiNE = mergeConfigOverride(AFFiNE);
|
||||
const factor = new AppModuleBuilder(AFFiNE);
|
||||
|
||||
factor
|
||||
// common fundamental modules
|
||||
// basic
|
||||
.use(...FunctionalityModules)
|
||||
.useIf(config => config.flavor.sync, WebSocketModule)
|
||||
|
||||
// auth
|
||||
.use(UserModule, AuthModule)
|
||||
.use(UserModule, AuthModule, PermissionModule)
|
||||
|
||||
// business modules
|
||||
.use(DocModule)
|
||||
.use(DocStorageModule)
|
||||
|
||||
// sync server only
|
||||
.useIf(config => config.flavor.sync, SyncModule)
|
||||
@@ -166,27 +164,16 @@ function buildAppModule() {
|
||||
// graphql server only
|
||||
.useIf(
|
||||
config => config.flavor.graphql,
|
||||
ServerConfigModule,
|
||||
GqlModule,
|
||||
StorageModule,
|
||||
ServerConfigModule,
|
||||
WorkspaceModule,
|
||||
FeatureModule,
|
||||
QuotaModule
|
||||
)
|
||||
|
||||
// self hosted server only
|
||||
.useIf(
|
||||
config => config.isSelfhosted,
|
||||
CustomSetupModule,
|
||||
ServeStaticModule.forRoot({
|
||||
rootPath: join('/app', 'static'),
|
||||
exclude: ['/admin*'],
|
||||
}),
|
||||
ServeStaticModule.forRoot({
|
||||
rootPath: join('/app', 'static', 'admin'),
|
||||
serveRoot: '/admin',
|
||||
})
|
||||
);
|
||||
.useIf(config => config.isSelfhosted, SelfhostModule);
|
||||
|
||||
// plugin modules
|
||||
ENABLED_PLUGINS.forEach(name => {
|
||||
|
||||
@@ -25,6 +25,7 @@ AFFiNE.ENV_MAP = {
|
||||
OAUTH_OIDC_CLAIM_MAP_EMAIL: 'plugins.oauth.providers.oidc.args.claim_email',
|
||||
OAUTH_OIDC_CLAIM_MAP_NAME: 'plugins.oauth.providers.oidc.args.claim_name',
|
||||
METRICS_CUSTOMER_IO_TOKEN: ['metrics.customerIo.token', 'string'],
|
||||
CAPTCHA_TURNSTILE_SECRET: ['plugins.captcha.turnstile.secret', 'string'],
|
||||
COPILOT_OPENAI_API_KEY: 'plugins.copilot.openai.apiKey',
|
||||
COPILOT_FAL_API_KEY: 'plugins.copilot.fal.apiKey',
|
||||
COPILOT_UNSPLASH_API_KEY: 'plugins.copilot.unsplashKey',
|
||||
|
||||
@@ -71,6 +71,14 @@ AFFiNE.use('payment', {
|
||||
});
|
||||
AFFiNE.use('oauth');
|
||||
|
||||
/* Captcha Plugin Default Config */
|
||||
AFFiNE.use('captcha', {
|
||||
turnstile: {},
|
||||
challenge: {
|
||||
bits: 20,
|
||||
},
|
||||
});
|
||||
|
||||
if (AFFiNE.deploy) {
|
||||
AFFiNE.mailer = {
|
||||
service: 'gmail',
|
||||
|
||||
@@ -95,6 +95,15 @@ AFFiNE.server.port = 3010;
|
||||
// });
|
||||
//
|
||||
//
|
||||
// /* Captcha Plugin Default Config */
|
||||
// AFFiNE.plugins.use('captcha', {
|
||||
// turnstile: {},
|
||||
// challenge: {
|
||||
// bits: 20,
|
||||
// },
|
||||
// });
|
||||
//
|
||||
//
|
||||
// /* Cloudflare R2 Plugin */
|
||||
// /* Enable if you choose to store workspace blobs or user avatars in Cloudflare R2 Storage Service */
|
||||
// AFFiNE.use('cloudflare-r2', {
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
|
||||
import {
|
||||
Body,
|
||||
Controller,
|
||||
@@ -18,26 +16,34 @@ import {
|
||||
EarlyAccessRequired,
|
||||
EmailTokenNotFound,
|
||||
InternalServerError,
|
||||
InvalidEmail,
|
||||
InvalidEmailToken,
|
||||
SignUpForbidden,
|
||||
Throttle,
|
||||
URLHelper,
|
||||
UseNamedGuard,
|
||||
} from '../../fundamentals';
|
||||
import { UserService } from '../user';
|
||||
import { validators } from '../utils/validators';
|
||||
import { CurrentUser } from './current-user';
|
||||
import { Public } from './guard';
|
||||
import { AuthService, parseAuthUserSeqNum } from './service';
|
||||
import { AuthService } from './service';
|
||||
import { CurrentUser, Session } from './session';
|
||||
import { TokenService, TokenType } from './token';
|
||||
|
||||
class SignInCredential {
|
||||
email!: string;
|
||||
password?: string;
|
||||
interface PreflightResponse {
|
||||
registered: boolean;
|
||||
hasPassword: boolean;
|
||||
}
|
||||
|
||||
class MagicLinkCredential {
|
||||
email!: string;
|
||||
token!: string;
|
||||
interface SignInCredential {
|
||||
email: string;
|
||||
password?: string;
|
||||
callbackUrl?: string;
|
||||
}
|
||||
|
||||
interface MagicLinkCredential {
|
||||
email: string;
|
||||
token: string;
|
||||
}
|
||||
|
||||
@Throttle('strict')
|
||||
@@ -52,13 +58,44 @@ export class AuthController {
|
||||
) {}
|
||||
|
||||
@Public()
|
||||
@Post('/preflight')
|
||||
async preflight(
|
||||
@Body() params?: { email: string }
|
||||
): Promise<PreflightResponse> {
|
||||
if (!params?.email) {
|
||||
throw new InvalidEmail();
|
||||
}
|
||||
validators.assertValidEmail(params.email);
|
||||
|
||||
const user = await this.user.findUserWithHashedPasswordByEmail(
|
||||
params.email
|
||||
);
|
||||
|
||||
if (!user) {
|
||||
return {
|
||||
registered: false,
|
||||
hasPassword: false,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
registered: user.registered,
|
||||
hasPassword: !!user.password,
|
||||
};
|
||||
}
|
||||
|
||||
@Public()
|
||||
@UseNamedGuard('captcha')
|
||||
@Post('/sign-in')
|
||||
@Header('content-type', 'application/json')
|
||||
async signIn(
|
||||
@Req() req: Request,
|
||||
@Res() res: Response,
|
||||
@Body() credential: SignInCredential,
|
||||
@Query('redirect_uri') redirectUri = this.url.home
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
@Query('redirect_uri') redirectUri?: string
|
||||
) {
|
||||
validators.assertValidEmail(credential.email);
|
||||
const canSignIn = await this.auth.canSignIn(credential.email);
|
||||
@@ -67,80 +104,86 @@ export class AuthController {
|
||||
}
|
||||
|
||||
if (credential.password) {
|
||||
const user = await this.auth.signIn(
|
||||
await this.passwordSignIn(
|
||||
req,
|
||||
res,
|
||||
credential.email,
|
||||
credential.password
|
||||
);
|
||||
|
||||
await this.auth.setCookie(req, res, user);
|
||||
res.status(HttpStatus.OK).send(user);
|
||||
} else {
|
||||
// send email magic link
|
||||
const user = await this.user.findUserByEmail(credential.email);
|
||||
if (!user) {
|
||||
const allowSignup = await this.config.runtime.fetch('auth/allowSignup');
|
||||
if (!allowSignup) {
|
||||
throw new SignUpForbidden();
|
||||
}
|
||||
}
|
||||
|
||||
const result = await this.sendSignInEmail(
|
||||
{ email: credential.email, signUp: !user },
|
||||
await this.sendMagicLink(
|
||||
req,
|
||||
res,
|
||||
credential.email,
|
||||
credential.callbackUrl,
|
||||
redirectUri
|
||||
);
|
||||
|
||||
if (result.rejected.length) {
|
||||
throw new InternalServerError('Failed to send sign-in email.');
|
||||
}
|
||||
|
||||
res.status(HttpStatus.OK).send({
|
||||
email: credential.email,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async sendSignInEmail(
|
||||
{ email, signUp }: { email: string; signUp: boolean },
|
||||
redirectUri: string
|
||||
async passwordSignIn(
|
||||
req: Request,
|
||||
res: Response,
|
||||
email: string,
|
||||
password: string
|
||||
) {
|
||||
const user = await this.auth.signIn(email, password);
|
||||
|
||||
await this.auth.setCookies(req, res, user.id);
|
||||
res.status(HttpStatus.OK).send(user);
|
||||
}
|
||||
|
||||
async sendMagicLink(
|
||||
_req: Request,
|
||||
res: Response,
|
||||
email: string,
|
||||
callbackUrl = '/magic-link',
|
||||
redirectUrl?: string
|
||||
) {
|
||||
// send email magic link
|
||||
const user = await this.user.findUserByEmail(email);
|
||||
if (!user) {
|
||||
const allowSignup = await this.config.runtime.fetch('auth/allowSignup');
|
||||
if (!allowSignup) {
|
||||
throw new SignUpForbidden();
|
||||
}
|
||||
}
|
||||
|
||||
const token = await this.token.createToken(TokenType.SignIn, email);
|
||||
|
||||
const magicLink = this.url.link('/magic-link', {
|
||||
const magicLink = this.url.link(callbackUrl, {
|
||||
token,
|
||||
email,
|
||||
redirect_uri: redirectUri,
|
||||
...(redirectUrl
|
||||
? {
|
||||
redirect_uri: redirectUrl,
|
||||
}
|
||||
: {}),
|
||||
});
|
||||
|
||||
const result = await this.auth.sendSignInEmail(email, magicLink, signUp);
|
||||
const result = await this.auth.sendSignInEmail(email, magicLink, !user);
|
||||
|
||||
return result;
|
||||
if (result.rejected.length) {
|
||||
throw new InternalServerError('Failed to send sign-in email.');
|
||||
}
|
||||
|
||||
res.status(HttpStatus.OK).send({
|
||||
email: email,
|
||||
});
|
||||
}
|
||||
|
||||
@Get('/sign-out')
|
||||
async signOut(
|
||||
@Req() req: Request,
|
||||
@Res() res: Response,
|
||||
@Query('redirect_uri') redirectUri?: string
|
||||
@Session() session: Session,
|
||||
@Body() { all }: { all: boolean }
|
||||
) {
|
||||
const session = await this.auth.signOut(
|
||||
req.cookies[AuthService.sessionCookieName],
|
||||
parseAuthUserSeqNum(req.headers[AuthService.authUserSeqHeaderName])
|
||||
await this.auth.signOut(
|
||||
session.sessionId,
|
||||
all ? undefined : session.userId
|
||||
);
|
||||
|
||||
if (session) {
|
||||
res.cookie(AuthService.sessionCookieName, session.id, {
|
||||
expires: session.expiresAt ?? void 0, // expiredAt is `string | null`
|
||||
...this.auth.cookieOptions,
|
||||
});
|
||||
} else {
|
||||
res.clearCookie(AuthService.sessionCookieName);
|
||||
}
|
||||
|
||||
if (redirectUri) {
|
||||
return this.url.safeRedirect(res, redirectUri);
|
||||
} else {
|
||||
return res.send(null);
|
||||
}
|
||||
res.status(HttpStatus.OK).send({});
|
||||
}
|
||||
|
||||
@Public()
|
||||
@@ -156,11 +199,11 @@ export class AuthController {
|
||||
|
||||
validators.assertValidEmail(email);
|
||||
|
||||
const valid = await this.token.verifyToken(TokenType.SignIn, token, {
|
||||
const tokenRecord = await this.token.verifyToken(TokenType.SignIn, token, {
|
||||
credential: email,
|
||||
});
|
||||
|
||||
if (!valid) {
|
||||
if (!tokenRecord) {
|
||||
throw new InvalidEmailToken();
|
||||
}
|
||||
|
||||
@@ -169,9 +212,8 @@ export class AuthController {
|
||||
registered: true,
|
||||
});
|
||||
|
||||
await this.auth.setCookie(req, res, user);
|
||||
|
||||
res.send({ id: user.id, email: user.email, name: user.name });
|
||||
await this.auth.setCookies(req, res, user.id);
|
||||
res.send({ id: user.id });
|
||||
}
|
||||
|
||||
@Throttle('default', { limit: 1200 })
|
||||
@@ -198,14 +240,4 @@ export class AuthController {
|
||||
users: await this.auth.getUserList(token),
|
||||
};
|
||||
}
|
||||
|
||||
@Public()
|
||||
@Get('/challenge')
|
||||
async challenge() {
|
||||
// TODO(@darksky): impl in following PR
|
||||
return {
|
||||
challenge: randomUUID(),
|
||||
resource: randomUUID(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ import type {
|
||||
FactoryProvider,
|
||||
OnModuleInit,
|
||||
} from '@nestjs/common';
|
||||
import { Injectable, SetMetadata, UseGuards } from '@nestjs/common';
|
||||
import { Injectable, SetMetadata } from '@nestjs/common';
|
||||
import { ModuleRef, Reflector } from '@nestjs/core';
|
||||
import type { Request } from 'express';
|
||||
|
||||
@@ -16,16 +16,8 @@ import {
|
||||
parseCookies,
|
||||
} from '../../fundamentals';
|
||||
import { WEBSOCKET_OPTIONS } from '../../fundamentals/websocket';
|
||||
import { CurrentUser, UserSession } from './current-user';
|
||||
import { AuthService, parseAuthUserSeqNum } from './service';
|
||||
|
||||
function extractTokenFromHeader(authorization: string) {
|
||||
if (!/^Bearer\s/i.test(authorization)) {
|
||||
return;
|
||||
}
|
||||
|
||||
return authorization.substring(7);
|
||||
}
|
||||
import { AuthService } from './service';
|
||||
import { Session } from './session';
|
||||
|
||||
const PUBLIC_ENTRYPOINT_SYMBOL = Symbol('public');
|
||||
|
||||
@@ -46,8 +38,8 @@ export class AuthGuard implements CanActivate, OnModuleInit {
|
||||
const { req, res } = getRequestResponseFromContext(context);
|
||||
|
||||
const userSession = await this.signIn(req);
|
||||
if (res && userSession && userSession.session.expiresAt) {
|
||||
await this.auth.refreshUserSessionIfNeeded(req, res, userSession.session);
|
||||
if (res && userSession && userSession.expiresAt) {
|
||||
await this.auth.refreshUserSessionIfNeeded(res, userSession);
|
||||
}
|
||||
|
||||
// api is public
|
||||
@@ -60,43 +52,31 @@ export class AuthGuard implements CanActivate, OnModuleInit {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!req.user) {
|
||||
if (!userSession) {
|
||||
throw new AuthenticationRequired();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async signIn(
|
||||
req: Request
|
||||
): Promise<{ user: CurrentUser; session: UserSession } | null> {
|
||||
if (req.user && req.session) {
|
||||
return {
|
||||
user: req.user,
|
||||
session: req.session,
|
||||
};
|
||||
async signIn(req: Request): Promise<Session | null> {
|
||||
if (req.session) {
|
||||
return req.session;
|
||||
}
|
||||
|
||||
// compatibility with websocket request
|
||||
parseCookies(req);
|
||||
let sessionToken: string | undefined =
|
||||
req.cookies[AuthService.sessionCookieName];
|
||||
|
||||
if (!sessionToken && req.headers.authorization) {
|
||||
sessionToken = extractTokenFromHeader(req.headers.authorization);
|
||||
}
|
||||
// TODO(@forehalo): a cache for user session
|
||||
const userSession = await this.auth.getUserSessionFromRequest(req);
|
||||
|
||||
if (sessionToken) {
|
||||
const userSeq = parseAuthUserSeqNum(
|
||||
req.headers[AuthService.authUserSeqHeaderName]
|
||||
);
|
||||
if (userSession) {
|
||||
req.session = {
|
||||
...userSession.session,
|
||||
user: userSession.user,
|
||||
};
|
||||
|
||||
const userSession = await this.auth.getUserSession(sessionToken, userSeq);
|
||||
|
||||
if (userSession) {
|
||||
req.session = userSession.session;
|
||||
req.user = userSession.user;
|
||||
}
|
||||
|
||||
return userSession;
|
||||
return req.session;
|
||||
}
|
||||
|
||||
return null;
|
||||
@@ -104,26 +84,8 @@ export class AuthGuard implements CanActivate, OnModuleInit {
|
||||
}
|
||||
|
||||
/**
|
||||
* This guard is used to protect routes/queries/mutations that require a user to be logged in.
|
||||
*
|
||||
* The `@CurrentUser()` parameter decorator used in a `Auth` guarded queries would always give us the user because the `Auth` guard will
|
||||
* fast throw if user is not logged in.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* ```typescript
|
||||
* \@Auth()
|
||||
* \@Query(() => UserType)
|
||||
* user(@CurrentUser() user: CurrentUser) {
|
||||
* return user;
|
||||
* }
|
||||
* ```
|
||||
* Mark api to be public accessible
|
||||
*/
|
||||
export const Auth = () => {
|
||||
return UseGuards(AuthGuard);
|
||||
};
|
||||
|
||||
// api is public accessible
|
||||
export const Public = () => SetMetadata(PUBLIC_ENTRYPOINT_SYMBOL, true);
|
||||
|
||||
export const AuthWebsocketOptionsProvider: FactoryProvider = {
|
||||
|
||||
@@ -20,7 +20,7 @@ import { TokenService, TokenType } from './token';
|
||||
AuthGuard,
|
||||
AuthWebsocketOptionsProvider,
|
||||
],
|
||||
exports: [AuthService, AuthGuard, AuthWebsocketOptionsProvider],
|
||||
exports: [AuthService, AuthGuard, AuthWebsocketOptionsProvider, TokenService],
|
||||
controllers: [AuthController],
|
||||
})
|
||||
export class AuthModule {}
|
||||
@@ -28,4 +28,4 @@ export class AuthModule {}
|
||||
export * from './guard';
|
||||
export { ClientTokenType } from './resolver';
|
||||
export { AuthService, TokenService, TokenType };
|
||||
export * from './current-user';
|
||||
export * from './session';
|
||||
|
||||
@@ -11,11 +11,11 @@ import {
|
||||
|
||||
import {
|
||||
ActionForbidden,
|
||||
Config,
|
||||
EmailAlreadyUsed,
|
||||
EmailTokenNotFound,
|
||||
EmailVerificationRequired,
|
||||
InvalidEmailToken,
|
||||
LinkExpired,
|
||||
SameEmailProvided,
|
||||
SkipThrottle,
|
||||
Throttle,
|
||||
@@ -25,9 +25,9 @@ import { Admin } from '../common';
|
||||
import { UserService } from '../user';
|
||||
import { UserType } from '../user/types';
|
||||
import { validators } from '../utils/validators';
|
||||
import { CurrentUser } from './current-user';
|
||||
import { Public } from './guard';
|
||||
import { AuthService } from './service';
|
||||
import { CurrentUser } from './session';
|
||||
import { TokenService, TokenType } from './token';
|
||||
|
||||
@ObjectType('tokenType')
|
||||
@@ -46,7 +46,6 @@ export class ClientTokenType {
|
||||
@Resolver(() => UserType)
|
||||
export class AuthResolver {
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly url: URLHelper,
|
||||
private readonly auth: AuthService,
|
||||
private readonly user: UserService,
|
||||
@@ -66,7 +65,7 @@ export class AuthResolver {
|
||||
|
||||
@ResolveField(() => ClientTokenType, {
|
||||
name: 'token',
|
||||
deprecationReason: 'use [/api/auth/authorize]',
|
||||
deprecationReason: 'use [/api/auth/sign-in?native=true] instead',
|
||||
})
|
||||
async clientToken(
|
||||
@CurrentUser() currentUser: CurrentUser,
|
||||
@@ -76,39 +75,32 @@ export class AuthResolver {
|
||||
throw new ActionForbidden();
|
||||
}
|
||||
|
||||
const session = await this.auth.createUserSession(
|
||||
user,
|
||||
undefined,
|
||||
this.config.auth.accessToken.ttl
|
||||
);
|
||||
const userSession = await this.auth.createUserSession(user.id);
|
||||
|
||||
return {
|
||||
sessionToken: session.sessionId,
|
||||
token: session.sessionId,
|
||||
sessionToken: userSession.sessionId,
|
||||
token: userSession.sessionId,
|
||||
refresh: '',
|
||||
};
|
||||
}
|
||||
|
||||
@Mutation(() => UserType)
|
||||
@Public()
|
||||
@Mutation(() => Boolean)
|
||||
async changePassword(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args('token') token: string,
|
||||
@Args('newPassword') newPassword: string
|
||||
@Args('newPassword') newPassword: string,
|
||||
@Args('userId', { type: () => String, nullable: true }) userId?: string
|
||||
) {
|
||||
const config = await this.config.runtime.fetchAll({
|
||||
'auth/password.max': true,
|
||||
'auth/password.min': true,
|
||||
});
|
||||
validators.assertValidPassword(newPassword, {
|
||||
min: config['auth/password.min'],
|
||||
max: config['auth/password.max'],
|
||||
});
|
||||
if (!userId) {
|
||||
throw new LinkExpired();
|
||||
}
|
||||
|
||||
// NOTE: Set & Change password are using the same token type.
|
||||
const valid = await this.token.verifyToken(
|
||||
TokenType.ChangePassword,
|
||||
token,
|
||||
{
|
||||
credential: user.id,
|
||||
credential: userId,
|
||||
}
|
||||
);
|
||||
|
||||
@@ -116,10 +108,10 @@ export class AuthResolver {
|
||||
throw new InvalidEmailToken();
|
||||
}
|
||||
|
||||
await this.auth.changePassword(user.id, newPassword);
|
||||
await this.auth.revokeUserSessions(user.id);
|
||||
await this.auth.changePassword(userId, newPassword);
|
||||
await this.auth.revokeUserSessions(userId);
|
||||
|
||||
return user;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Mutation(() => UserType)
|
||||
@@ -128,7 +120,6 @@ export class AuthResolver {
|
||||
@Args('token') token: string,
|
||||
@Args('email') email: string
|
||||
) {
|
||||
validators.assertValidEmail(email);
|
||||
// @see [sendChangeEmail]
|
||||
const valid = await this.token.verifyToken(TokenType.VerifyEmail, token, {
|
||||
credential: user.id,
|
||||
@@ -151,8 +142,11 @@ export class AuthResolver {
|
||||
async sendChangePasswordEmail(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args('callbackUrl') callbackUrl: string,
|
||||
// @deprecated
|
||||
@Args('email', { nullable: true }) _email?: string
|
||||
@Args('email', {
|
||||
nullable: true,
|
||||
deprecationReason: 'fetched from signed in user',
|
||||
})
|
||||
_email?: string
|
||||
) {
|
||||
if (!user.emailVerified) {
|
||||
throw new EmailVerificationRequired();
|
||||
@@ -163,7 +157,7 @@ export class AuthResolver {
|
||||
user.id
|
||||
);
|
||||
|
||||
const url = this.url.link(callbackUrl, { token });
|
||||
const url = this.url.link(callbackUrl, { userId: user.id, token });
|
||||
|
||||
const res = await this.auth.sendChangePasswordEmail(user.email, url);
|
||||
|
||||
@@ -174,21 +168,13 @@ export class AuthResolver {
|
||||
async sendSetPasswordEmail(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args('callbackUrl') callbackUrl: string,
|
||||
@Args('email', { nullable: true }) _email?: string
|
||||
@Args('email', {
|
||||
nullable: true,
|
||||
deprecationReason: 'fetched from signed in user',
|
||||
})
|
||||
_email?: string
|
||||
) {
|
||||
if (!user.emailVerified) {
|
||||
throw new EmailVerificationRequired();
|
||||
}
|
||||
|
||||
const token = await this.token.createToken(
|
||||
TokenType.ChangePassword,
|
||||
user.id
|
||||
);
|
||||
|
||||
const url = this.url.link(callbackUrl, { token });
|
||||
|
||||
const res = await this.auth.sendSetPasswordEmail(user.email, url);
|
||||
return !res.rejected.length;
|
||||
return this.sendChangePasswordEmail(user, callbackUrl);
|
||||
}
|
||||
|
||||
// The change email step is:
|
||||
@@ -305,6 +291,7 @@ export class AuthResolver {
|
||||
TokenType.ChangePassword,
|
||||
userId
|
||||
);
|
||||
return this.url.link(callbackUrl, { token });
|
||||
|
||||
return this.url.link(callbackUrl, { userId, token });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,35 +5,12 @@ import { PrismaClient } from '@prisma/client';
|
||||
import type { CookieOptions, Request, Response } from 'express';
|
||||
import { assign, pick } from 'lodash-es';
|
||||
|
||||
import { Config, EmailAlreadyUsed, MailService } from '../../fundamentals';
|
||||
import { Config, MailService, SignUpForbidden } from '../../fundamentals';
|
||||
import { FeatureManagementService } from '../features/management';
|
||||
import { QuotaService } from '../quota/service';
|
||||
import { QuotaType } from '../quota/types';
|
||||
import { UserService } from '../user/service';
|
||||
import type { CurrentUser } from './current-user';
|
||||
|
||||
export function parseAuthUserSeqNum(value: any) {
|
||||
let seq: number = 0;
|
||||
switch (typeof value) {
|
||||
case 'number': {
|
||||
seq = value;
|
||||
break;
|
||||
}
|
||||
case 'string': {
|
||||
const result = value.match(/^([\d{0, 10}])$/);
|
||||
if (result?.[1]) {
|
||||
seq = Number(result[1]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
default: {
|
||||
seq = 0;
|
||||
}
|
||||
}
|
||||
|
||||
return Math.max(0, seq);
|
||||
}
|
||||
import type { CurrentUser } from './session';
|
||||
|
||||
export function sessionUser(
|
||||
user: Pick<
|
||||
@@ -48,6 +25,14 @@ export function sessionUser(
|
||||
});
|
||||
}
|
||||
|
||||
function extractTokenFromHeader(authorization: string) {
|
||||
if (!/^Bearer\s/i.test(authorization)) {
|
||||
return;
|
||||
}
|
||||
|
||||
return authorization.substring(7);
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class AuthService implements OnApplicationBootstrap {
|
||||
readonly cookieOptions: CookieOptions = {
|
||||
@@ -57,7 +42,7 @@ export class AuthService implements OnApplicationBootstrap {
|
||||
secure: this.config.server.https,
|
||||
};
|
||||
static readonly sessionCookieName = 'affine_session';
|
||||
static readonly authUserSeqHeaderName = 'x-auth-user';
|
||||
static readonly userCookieName = 'affine_user_id';
|
||||
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
@@ -83,7 +68,7 @@ export class AuthService implements OnApplicationBootstrap {
|
||||
await this.quota.switchUserQuota(devUser.id, QuotaType.ProPlanV1);
|
||||
await this.feature.addAdmin(devUser.id);
|
||||
await this.feature.addCopilot(devUser.id);
|
||||
} catch (e) {
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
@@ -93,46 +78,69 @@ export class AuthService implements OnApplicationBootstrap {
|
||||
return this.feature.canEarlyAccess(email);
|
||||
}
|
||||
|
||||
async signUp(
|
||||
name: string,
|
||||
email: string,
|
||||
password: string
|
||||
): Promise<CurrentUser> {
|
||||
const user = await this.user.findUserByEmail(email);
|
||||
|
||||
if (user) {
|
||||
throw new EmailAlreadyUsed();
|
||||
/**
|
||||
* This is a test only helper to quickly signup a user, do not use in production
|
||||
*/
|
||||
async signUp(email: string, password: string): Promise<CurrentUser> {
|
||||
if (!this.config.node.test) {
|
||||
throw new SignUpForbidden(
|
||||
'sign up helper is forbidden for non-test environment'
|
||||
);
|
||||
}
|
||||
|
||||
return this.user
|
||||
.createUser({
|
||||
name,
|
||||
.createUser_without_verification({
|
||||
email,
|
||||
password,
|
||||
})
|
||||
.then(sessionUser);
|
||||
}
|
||||
|
||||
async signIn(email: string, password: string) {
|
||||
const user = await this.user.signIn(email, password);
|
||||
async signIn(email: string, password: string): Promise<CurrentUser> {
|
||||
return this.user.signIn(email, password).then(sessionUser);
|
||||
}
|
||||
|
||||
return sessionUser(user);
|
||||
async signOut(sessionId: string, userId?: string) {
|
||||
// sign out all users in the session
|
||||
if (!userId) {
|
||||
await this.db.session.deleteMany({
|
||||
where: {
|
||||
id: sessionId,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
await this.db.userSession.deleteMany({
|
||||
where: {
|
||||
sessionId,
|
||||
userId,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async getUserSession(
|
||||
token: string,
|
||||
seq = 0
|
||||
sessionId: string,
|
||||
userId?: string
|
||||
): Promise<{ user: CurrentUser; session: UserSession } | null> {
|
||||
const session = await this.getSession(token);
|
||||
const userSession = await this.db.userSession.findFirst({
|
||||
where: {
|
||||
sessionId,
|
||||
userId,
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
sessionId: true,
|
||||
userId: true,
|
||||
createdAt: true,
|
||||
expiresAt: true,
|
||||
user: true,
|
||||
},
|
||||
orderBy: {
|
||||
createdAt: 'asc',
|
||||
},
|
||||
});
|
||||
|
||||
// no such session
|
||||
if (!session) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const userSession = session.userSessions.at(seq);
|
||||
|
||||
// no such user session
|
||||
if (!userSession) {
|
||||
return null;
|
||||
}
|
||||
@@ -142,112 +150,93 @@ export class AuthService implements OnApplicationBootstrap {
|
||||
return null;
|
||||
}
|
||||
|
||||
const user = await this.db.user.findUnique({
|
||||
where: { id: userSession.userId },
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return { user: sessionUser(user), session: userSession };
|
||||
return { user: sessionUser(userSession.user), session: userSession };
|
||||
}
|
||||
|
||||
async getUserList(token: string) {
|
||||
const session = await this.getSession(token);
|
||||
|
||||
if (!session || !session.userSessions.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const users = await this.db.user.findMany({
|
||||
where: {
|
||||
id: {
|
||||
in: session.userSessions.map(({ userId }) => userId),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// TODO(@forehalo): need to separate expired session, same for [getUser]
|
||||
// Session
|
||||
// | { user: LimitedUser { email, avatarUrl }, expired: true }
|
||||
// | { user: User, expired: false }
|
||||
return session.userSessions
|
||||
.map(userSession => {
|
||||
// keep users in the same order as userSessions
|
||||
const user = users.find(({ id }) => id === userSession.userId);
|
||||
if (!user) {
|
||||
return null;
|
||||
}
|
||||
return sessionUser(user);
|
||||
})
|
||||
.filter(Boolean) as CurrentUser[];
|
||||
}
|
||||
|
||||
async signOut(token: string, seq = 0) {
|
||||
const session = await this.getSession(token);
|
||||
|
||||
if (session) {
|
||||
// overflow the logged in user
|
||||
if (session.userSessions.length <= seq) {
|
||||
return session;
|
||||
}
|
||||
|
||||
await this.db.userSession.deleteMany({
|
||||
where: { id: session.userSessions[seq].id },
|
||||
});
|
||||
|
||||
// no more user session active, delete the whole session
|
||||
if (session.userSessions.length === 1) {
|
||||
await this.db.session.delete({ where: { id: session.id } });
|
||||
return null;
|
||||
}
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async getSession(token: string) {
|
||||
if (!token) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this.db.$transaction(async tx => {
|
||||
const session = await tx.session.findUnique({
|
||||
async createUserSession(
|
||||
userId: string,
|
||||
sessionId?: string,
|
||||
ttl = this.config.auth.session.ttl
|
||||
) {
|
||||
// check whether given session is valid
|
||||
if (sessionId) {
|
||||
const session = await this.db.session.findFirst({
|
||||
where: {
|
||||
id: token,
|
||||
},
|
||||
include: {
|
||||
userSessions: {
|
||||
orderBy: {
|
||||
createdAt: 'asc',
|
||||
},
|
||||
},
|
||||
id: sessionId,
|
||||
},
|
||||
});
|
||||
|
||||
if (!session) {
|
||||
return null;
|
||||
sessionId = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
if (session.expiresAt && session.expiresAt <= new Date()) {
|
||||
await tx.session.delete({
|
||||
where: {
|
||||
id: session.id,
|
||||
if (!sessionId) {
|
||||
const session = await this.createSession();
|
||||
sessionId = session.id;
|
||||
}
|
||||
|
||||
const expiresAt = new Date(Date.now() + ttl * 1000);
|
||||
|
||||
return this.db.userSession.upsert({
|
||||
where: {
|
||||
sessionId_userId: {
|
||||
sessionId,
|
||||
userId,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
expiresAt,
|
||||
},
|
||||
create: {
|
||||
sessionId,
|
||||
userId,
|
||||
expiresAt,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async getUserList(sessionId: string) {
|
||||
const sessions = await this.db.userSession.findMany({
|
||||
where: {
|
||||
sessionId,
|
||||
OR: [
|
||||
{
|
||||
expiresAt: null,
|
||||
},
|
||||
});
|
||||
{
|
||||
expiresAt: {
|
||||
gt: new Date(),
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
include: {
|
||||
user: true,
|
||||
},
|
||||
orderBy: {
|
||||
createdAt: 'asc',
|
||||
},
|
||||
});
|
||||
|
||||
return null;
|
||||
}
|
||||
return sessions.map(({ user }) => sessionUser(user));
|
||||
}
|
||||
|
||||
return session;
|
||||
async createSession() {
|
||||
return this.db.session.create({
|
||||
data: {},
|
||||
});
|
||||
}
|
||||
|
||||
async getSession(sessionId: string) {
|
||||
return this.db.session.findFirst({
|
||||
where: {
|
||||
id: sessionId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async refreshUserSessionIfNeeded(
|
||||
_req: Request,
|
||||
res: Response,
|
||||
session: UserSession,
|
||||
ttr = this.config.auth.session.ttr
|
||||
@@ -281,70 +270,63 @@ export class AuthService implements OnApplicationBootstrap {
|
||||
return true;
|
||||
}
|
||||
|
||||
async createUserSession(
|
||||
user: { id: string },
|
||||
existingSession?: string,
|
||||
ttl = this.config.auth.session.ttl
|
||||
) {
|
||||
const session = existingSession
|
||||
? await this.getSession(existingSession)
|
||||
: null;
|
||||
|
||||
const expiresAt = new Date(Date.now() + ttl * 1000);
|
||||
if (session) {
|
||||
return this.db.userSession.upsert({
|
||||
where: {
|
||||
sessionId_userId: {
|
||||
sessionId: session.id,
|
||||
userId: user.id,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
expiresAt,
|
||||
},
|
||||
create: {
|
||||
sessionId: session.id,
|
||||
userId: user.id,
|
||||
expiresAt,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
return this.db.userSession.create({
|
||||
data: {
|
||||
expiresAt,
|
||||
session: {
|
||||
create: {},
|
||||
},
|
||||
user: {
|
||||
connect: {
|
||||
id: user.id,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async revokeUserSessions(userId: string, sessionId?: string) {
|
||||
async revokeUserSessions(userId: string) {
|
||||
return this.db.userSession.deleteMany({
|
||||
where: {
|
||||
userId,
|
||||
sessionId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async setCookie(_req: Request, res: Response, user: { id: string }) {
|
||||
const session = await this.createUserSession(
|
||||
user
|
||||
// TODO(@forehalo): enable multi user session
|
||||
// req.cookies[AuthService.sessionCookieName]
|
||||
);
|
||||
getSessionOptionsFromRequest(req: Request) {
|
||||
let sessionId: string | undefined =
|
||||
req.cookies[AuthService.sessionCookieName];
|
||||
|
||||
res.cookie(AuthService.sessionCookieName, session.sessionId, {
|
||||
expires: session.expiresAt ?? void 0,
|
||||
if (!sessionId && req.headers.authorization) {
|
||||
sessionId = extractTokenFromHeader(req.headers.authorization);
|
||||
}
|
||||
|
||||
const userId: string | undefined =
|
||||
req.cookies[AuthService.userCookieName] ||
|
||||
req.headers[AuthService.userCookieName];
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
userId,
|
||||
};
|
||||
}
|
||||
|
||||
async setCookies(req: Request, res: Response, userId: string) {
|
||||
const { sessionId } = this.getSessionOptionsFromRequest(req);
|
||||
|
||||
const userSession = await this.createUserSession(userId, sessionId);
|
||||
|
||||
res.cookie(AuthService.sessionCookieName, userSession.sessionId, {
|
||||
...this.cookieOptions,
|
||||
expires: userSession.expiresAt ?? void 0,
|
||||
});
|
||||
|
||||
this.setUserCookie(res, userId);
|
||||
}
|
||||
|
||||
setUserCookie(res: Response, userId: string) {
|
||||
res.cookie(AuthService.userCookieName, userId, {
|
||||
...this.cookieOptions,
|
||||
// user cookie is client readable & writable for fast user switch if there are multiple users in one session
|
||||
// it safe to be non-secure & non-httpOnly because server will validate it by `cookie[AuthService.sessionCookieName]`
|
||||
httpOnly: false,
|
||||
secure: false,
|
||||
});
|
||||
}
|
||||
|
||||
async getUserSessionFromRequest(req: Request) {
|
||||
const { sessionId, userId } = this.getSessionOptionsFromRequest(req);
|
||||
|
||||
if (!sessionId) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this.getUserSession(sessionId, userId);
|
||||
}
|
||||
|
||||
async changePassword(
|
||||
@@ -393,24 +375,16 @@ export class AuthService implements OnApplicationBootstrap {
|
||||
|
||||
async sendSignInEmail(email: string, link: string, signUp: boolean) {
|
||||
return signUp
|
||||
? await this.mailer.sendSignUpMail(link.toString(), {
|
||||
? await this.mailer.sendSignUpMail(link, {
|
||||
to: email,
|
||||
})
|
||||
: await this.mailer.sendSignInMail(link.toString(), {
|
||||
: await this.mailer.sendSignInMail(link, {
|
||||
to: email,
|
||||
});
|
||||
}
|
||||
|
||||
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT)
|
||||
async cleanExpiredSessions() {
|
||||
await this.db.session.deleteMany({
|
||||
where: {
|
||||
expiresAt: {
|
||||
lte: new Date(),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await this.db.userSession.deleteMany({
|
||||
where: {
|
||||
expiresAt: {
|
||||
|
||||
@@ -4,10 +4,6 @@ import { User, UserSession } from '@prisma/client';
|
||||
|
||||
import { getRequestResponseFromContext } from '../../fundamentals';
|
||||
|
||||
function getUserFromContext(context: ExecutionContext) {
|
||||
return getRequestResponseFromContext(context).req.user;
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to fetch current user from the request context.
|
||||
*
|
||||
@@ -44,7 +40,7 @@ function getUserFromContext(context: ExecutionContext) {
|
||||
// eslint-disable-next-line no-redeclare
|
||||
export const CurrentUser = createParamDecorator(
|
||||
(_: unknown, context: ExecutionContext) => {
|
||||
return getUserFromContext(context);
|
||||
return getRequestResponseFromContext(context).req.session?.user;
|
||||
}
|
||||
);
|
||||
|
||||
@@ -54,4 +50,14 @@ export interface CurrentUser
|
||||
emailVerified: boolean;
|
||||
}
|
||||
|
||||
export { type UserSession };
|
||||
// interface and variable don't conflict
|
||||
// eslint-disable-next-line no-redeclare
|
||||
export const Session = createParamDecorator(
|
||||
(_: unknown, context: ExecutionContext) => {
|
||||
return getRequestResponseFromContext(context).req.session;
|
||||
}
|
||||
);
|
||||
|
||||
export type Session = UserSession & {
|
||||
user: CurrentUser;
|
||||
};
|
||||
@@ -69,13 +69,9 @@ export class TokenService {
|
||||
const valid =
|
||||
!expired && (!record.credential || record.credential === credential);
|
||||
|
||||
if ((expired || valid) && !keep) {
|
||||
const deleted = await this.db.verificationToken.deleteMany({
|
||||
where: {
|
||||
token,
|
||||
type,
|
||||
},
|
||||
});
|
||||
// always revoke expired token
|
||||
if (expired || (valid && !keep)) {
|
||||
const deleted = await this.revokeToken(type, token);
|
||||
|
||||
// already deleted, means token has been used
|
||||
if (!deleted.count) {
|
||||
@@ -86,6 +82,15 @@ export class TokenService {
|
||||
return valid ? record : null;
|
||||
}
|
||||
|
||||
async revokeToken(type: TokenType, token: string) {
|
||||
return await this.db.verificationToken.deleteMany({
|
||||
where: {
|
||||
token,
|
||||
type,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT)
|
||||
async cleanExpiredTokens() {
|
||||
await this.db.verificationToken.deleteMany({
|
||||
|
||||
@@ -25,8 +25,8 @@ export class AdminGuard implements CanActivate, OnModuleInit {
|
||||
async canActivate(context: ExecutionContext) {
|
||||
const { req } = getRequestResponseFromContext(context);
|
||||
let allow = false;
|
||||
if (req.user) {
|
||||
allow = await this.feature.isAdmin(req.user.id);
|
||||
if (req.session) {
|
||||
allow = await this.feature.isAdmin(req.session.user.id);
|
||||
}
|
||||
|
||||
if (!allow) {
|
||||
|
||||
@@ -4,17 +4,23 @@ import { Module } from '@nestjs/common';
|
||||
|
||||
import {
|
||||
ServerConfigResolver,
|
||||
ServerFeatureConfigResolver,
|
||||
ServerRuntimeConfigResolver,
|
||||
ServerServiceConfigResolver,
|
||||
} from './resolver';
|
||||
import { ServerService } from './service';
|
||||
|
||||
@Module({
|
||||
providers: [
|
||||
ServerService,
|
||||
ServerConfigResolver,
|
||||
ServerFeatureConfigResolver,
|
||||
ServerRuntimeConfigResolver,
|
||||
ServerServiceConfigResolver,
|
||||
],
|
||||
exports: [ServerService],
|
||||
})
|
||||
export class ServerConfigModule {}
|
||||
export { ServerService };
|
||||
export { ADD_ENABLED_FEATURES } from './server-feature';
|
||||
export { ServerFeature } from './types';
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
ResolveField,
|
||||
Resolver,
|
||||
} from '@nestjs/graphql';
|
||||
import { PrismaClient, RuntimeConfig, RuntimeConfigType } from '@prisma/client';
|
||||
import { RuntimeConfig, RuntimeConfigType } from '@prisma/client';
|
||||
import { GraphQLJSON, GraphQLJSONObject } from 'graphql-scalars';
|
||||
|
||||
import { Config, URLHelper } from '../../fundamentals';
|
||||
@@ -19,6 +19,7 @@ import { FeatureType } from '../features';
|
||||
import { AvailableUserFeatureConfig } from '../features/resolver';
|
||||
import { ServerFlags } from './config';
|
||||
import { ENABLED_FEATURES } from './server-feature';
|
||||
import { ServerService } from './service';
|
||||
import { ServerConfigType } from './types';
|
||||
|
||||
@ObjectType()
|
||||
@@ -76,7 +77,7 @@ export class ServerConfigResolver {
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly url: URLHelper,
|
||||
private readonly db: PrismaClient
|
||||
private readonly server: ServerService
|
||||
) {}
|
||||
|
||||
@Public()
|
||||
@@ -131,7 +132,7 @@ export class ServerConfigResolver {
|
||||
description: 'whether server has been initialized',
|
||||
})
|
||||
async initialized() {
|
||||
return (await this.db.user.count()) > 0;
|
||||
return this.server.initialized();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
17
packages/backend/server/src/core/config/service.ts
Normal file
17
packages/backend/server/src/core/config/service.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
@Injectable()
|
||||
export class ServerService {
|
||||
private _initialized: boolean | null = null;
|
||||
constructor(private readonly db: PrismaClient) {}
|
||||
|
||||
async initialized() {
|
||||
if (!this._initialized) {
|
||||
const userCount = await this.db.user.count();
|
||||
this._initialized = userCount > 0;
|
||||
}
|
||||
|
||||
return this._initialized;
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ import { Field, ObjectType, registerEnumType } from '@nestjs/graphql';
|
||||
import { DeploymentType } from '../../fundamentals';
|
||||
|
||||
export enum ServerFeature {
|
||||
Captcha = 'captcha',
|
||||
Copilot = 'copilot',
|
||||
Payment = 'payment',
|
||||
OAuth = 'oauth',
|
||||
|
||||
186
packages/backend/server/src/core/doc/adapters/userspace.ts
Normal file
186
packages/backend/server/src/core/doc/adapters/userspace.ts
Normal file
@@ -0,0 +1,186 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
import { Mutex } from '../../../fundamentals';
|
||||
import { DocStorageOptions } from '../options';
|
||||
import { DocRecord, DocStorageAdapter } from '../storage';
|
||||
|
||||
@Injectable()
|
||||
export class PgUserspaceDocStorageAdapter extends DocStorageAdapter {
|
||||
constructor(
|
||||
private readonly db: PrismaClient,
|
||||
private readonly mutex: Mutex,
|
||||
options: DocStorageOptions
|
||||
) {
|
||||
super(options);
|
||||
}
|
||||
|
||||
// no updates queue for userspace, directly merge them inplace
|
||||
// no history record for userspace
|
||||
protected async getDocUpdates() {
|
||||
return [];
|
||||
}
|
||||
|
||||
protected async markUpdatesMerged() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
async listDocHistories() {
|
||||
return [];
|
||||
}
|
||||
|
||||
async getDocHistory() {
|
||||
return null;
|
||||
}
|
||||
|
||||
protected async createDocHistory() {
|
||||
return false;
|
||||
}
|
||||
|
||||
override async rollbackDoc() {
|
||||
return;
|
||||
}
|
||||
|
||||
override async getDoc(spaceId: string, docId: string) {
|
||||
return this.getDocSnapshot(spaceId, docId);
|
||||
}
|
||||
|
||||
async pushDocUpdates(
|
||||
userId: string,
|
||||
docId: string,
|
||||
updates: Uint8Array[],
|
||||
editorId?: string
|
||||
) {
|
||||
if (!updates.length) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
await using _lock = await this.lockDocForUpdate(userId, docId);
|
||||
const snapshot = await this.getDocSnapshot(userId, docId);
|
||||
const now = Date.now();
|
||||
const pendings = updates.map((update, i) => ({
|
||||
bin: update,
|
||||
timestamp: now + i,
|
||||
}));
|
||||
|
||||
const { timestamp, bin } = await this.squash(
|
||||
snapshot ? [snapshot, ...pendings] : pendings
|
||||
);
|
||||
|
||||
await this.setDocSnapshot({
|
||||
spaceId: userId,
|
||||
docId,
|
||||
bin,
|
||||
timestamp,
|
||||
editor: editorId,
|
||||
});
|
||||
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
async deleteDoc(userId: string, docId: string) {
|
||||
await this.db.userSnapshot.deleteMany({
|
||||
where: {
|
||||
userId,
|
||||
id: docId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async deleteSpace(userId: string) {
|
||||
await this.db.userSnapshot.deleteMany({
|
||||
where: {
|
||||
userId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async getSpaceDocTimestamps(userId: string, after?: number) {
|
||||
const snapshots = await this.db.userSnapshot.findMany({
|
||||
select: {
|
||||
id: true,
|
||||
updatedAt: true,
|
||||
},
|
||||
where: {
|
||||
userId,
|
||||
...(after
|
||||
? {
|
||||
updatedAt: {
|
||||
gt: new Date(after),
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
});
|
||||
|
||||
const result: Record<string, number> = {};
|
||||
|
||||
snapshots.forEach(s => {
|
||||
result[s.id] = s.updatedAt.getTime();
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
protected async getDocSnapshot(userId: string, docId: string) {
|
||||
const snapshot = await this.db.userSnapshot.findUnique({
|
||||
where: {
|
||||
userId_id: {
|
||||
userId,
|
||||
id: docId,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!snapshot) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
spaceId: userId,
|
||||
docId,
|
||||
bin: snapshot.blob,
|
||||
timestamp: snapshot.updatedAt.getTime(),
|
||||
editor: snapshot.userId,
|
||||
};
|
||||
}
|
||||
|
||||
protected async setDocSnapshot(snapshot: DocRecord) {
|
||||
// we always get lock before writing to user snapshot table,
|
||||
// so a simple upsert without testing on updatedAt is safe
|
||||
await this.db.userSnapshot.upsert({
|
||||
where: {
|
||||
userId_id: {
|
||||
userId: snapshot.spaceId,
|
||||
id: snapshot.docId,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
blob: Buffer.from(snapshot.bin),
|
||||
updatedAt: new Date(snapshot.timestamp),
|
||||
},
|
||||
create: {
|
||||
userId: snapshot.spaceId,
|
||||
id: snapshot.docId,
|
||||
blob: Buffer.from(snapshot.bin),
|
||||
createdAt: new Date(snapshot.timestamp),
|
||||
updatedAt: new Date(snapshot.timestamp),
|
||||
},
|
||||
});
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
protected override async lockDocForUpdate(
|
||||
workspaceId: string,
|
||||
docId: string
|
||||
) {
|
||||
const lock = await this.mutex.lock(`userspace:${workspaceId}:${docId}`);
|
||||
|
||||
if (!lock) {
|
||||
throw new Error('Too many concurrent writings');
|
||||
}
|
||||
|
||||
return lock;
|
||||
}
|
||||
}
|
||||
618
packages/backend/server/src/core/doc/adapters/workspace.ts
Normal file
618
packages/backend/server/src/core/doc/adapters/workspace.ts
Normal file
@@ -0,0 +1,618 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import { chunk } from 'lodash-es';
|
||||
|
||||
import {
|
||||
Cache,
|
||||
DocHistoryNotFound,
|
||||
DocNotFound,
|
||||
FailedToSaveUpdates,
|
||||
FailedToUpsertSnapshot,
|
||||
metrics,
|
||||
Mutex,
|
||||
} from '../../../fundamentals';
|
||||
import { retryable } from '../../../fundamentals/utils/promise';
|
||||
import { DocStorageOptions } from '../options';
|
||||
import {
|
||||
DocRecord,
|
||||
DocStorageAdapter,
|
||||
DocUpdate,
|
||||
HistoryFilter,
|
||||
} from '../storage';
|
||||
|
||||
const UPDATES_QUEUE_CACHE_KEY = 'doc:manager:updates';
|
||||
|
||||
@Injectable()
|
||||
export class PgWorkspaceDocStorageAdapter extends DocStorageAdapter {
|
||||
private readonly logger = new Logger(PgWorkspaceDocStorageAdapter.name);
|
||||
|
||||
constructor(
|
||||
private readonly db: PrismaClient,
|
||||
private readonly mutex: Mutex,
|
||||
private readonly cache: Cache,
|
||||
protected override readonly options: DocStorageOptions
|
||||
) {
|
||||
super(options);
|
||||
}
|
||||
|
||||
async pushDocUpdates(
|
||||
workspaceId: string,
|
||||
docId: string,
|
||||
updates: Uint8Array[],
|
||||
editorId?: string
|
||||
) {
|
||||
if (!updates.length) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
let pendings = updates;
|
||||
let done = 0;
|
||||
let timestamp = Date.now();
|
||||
try {
|
||||
await retryable(async () => {
|
||||
if (done !== 0) {
|
||||
pendings = pendings.slice(done);
|
||||
}
|
||||
|
||||
// TODO(@forehalo): remove in next release
|
||||
const lastSeq = await this.getUpdateSeq(
|
||||
workspaceId,
|
||||
docId,
|
||||
updates.length
|
||||
);
|
||||
|
||||
let turn = 0;
|
||||
const batchCount = 10;
|
||||
for (const batch of chunk(pendings, batchCount)) {
|
||||
const now = Date.now();
|
||||
await this.db.update.createMany({
|
||||
data: batch.map((update, i) => {
|
||||
const subSeq = turn * batchCount + i + 1;
|
||||
// `seq` is the last seq num of the batch
|
||||
// example for 11 batched updates, start from seq num 20
|
||||
// seq for first update in the batch should be:
|
||||
// 31 - 11 + subSeq(0 * 10 + 0 + 1) = 21
|
||||
// ^ last seq num ^ updates.length ^ turn ^ batchCount ^i
|
||||
const seq = lastSeq - updates.length + subSeq;
|
||||
const createdAt = now + subSeq;
|
||||
timestamp = Math.max(timestamp, createdAt);
|
||||
|
||||
return {
|
||||
workspaceId,
|
||||
id: docId,
|
||||
blob: Buffer.from(update),
|
||||
seq,
|
||||
createdAt: new Date(createdAt),
|
||||
createdBy: editorId || null,
|
||||
};
|
||||
}),
|
||||
});
|
||||
turn++;
|
||||
done += batch.length;
|
||||
await this.updateCachedUpdatesCount(workspaceId, docId, batch.length);
|
||||
}
|
||||
});
|
||||
} catch (e) {
|
||||
this.logger.error('Failed to insert doc updates', e);
|
||||
metrics.doc.counter('doc_update_insert_failed').add(1);
|
||||
throw new FailedToSaveUpdates();
|
||||
}
|
||||
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
protected async getDocUpdates(workspaceId: string, docId: string) {
|
||||
const rows = await this.db.update.findMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
id: docId,
|
||||
},
|
||||
orderBy: {
|
||||
createdAt: 'asc',
|
||||
},
|
||||
});
|
||||
|
||||
return rows.map(row => ({
|
||||
bin: row.blob,
|
||||
timestamp: row.createdAt.getTime(),
|
||||
editor: row.createdBy || undefined,
|
||||
}));
|
||||
}
|
||||
|
||||
async deleteDoc(workspaceId: string, docId: string) {
|
||||
const ident = { where: { workspaceId, id: docId } };
|
||||
await this.db.$transaction([
|
||||
this.db.snapshot.deleteMany(ident),
|
||||
this.db.update.deleteMany(ident),
|
||||
this.db.snapshotHistory.deleteMany(ident),
|
||||
]);
|
||||
}
|
||||
|
||||
async deleteSpace(workspaceId: string) {
|
||||
const ident = { where: { workspaceId } };
|
||||
await this.db.$transaction([
|
||||
this.db.workspace.deleteMany({
|
||||
where: {
|
||||
id: workspaceId,
|
||||
},
|
||||
}),
|
||||
this.db.snapshot.deleteMany(ident),
|
||||
this.db.update.deleteMany(ident),
|
||||
this.db.snapshotHistory.deleteMany(ident),
|
||||
]);
|
||||
}
|
||||
|
||||
async getSpaceDocTimestamps(workspaceId: string, after?: number) {
|
||||
const snapshots = await this.db.snapshot.findMany({
|
||||
select: {
|
||||
id: true,
|
||||
updatedAt: true,
|
||||
},
|
||||
where: {
|
||||
workspaceId,
|
||||
...(after
|
||||
? {
|
||||
updatedAt: {
|
||||
gt: new Date(after),
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
});
|
||||
|
||||
const updates = await this.db.update.groupBy({
|
||||
where: {
|
||||
workspaceId,
|
||||
...(after
|
||||
? {
|
||||
createdAt: {
|
||||
gt: new Date(after),
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
by: ['id'],
|
||||
_max: {
|
||||
createdAt: true,
|
||||
},
|
||||
});
|
||||
|
||||
const result: Record<string, number> = {};
|
||||
|
||||
snapshots.forEach(s => {
|
||||
result[s.id] = s.updatedAt.getTime();
|
||||
});
|
||||
|
||||
updates.forEach(u => {
|
||||
if (u._max.createdAt) {
|
||||
result[u.id] = u._max.createdAt.getTime();
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
protected async markUpdatesMerged(
|
||||
workspaceId: string,
|
||||
docId: string,
|
||||
updates: DocUpdate[]
|
||||
) {
|
||||
const result = await this.db.update.deleteMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
id: docId,
|
||||
createdAt: {
|
||||
in: updates.map(u => new Date(u.timestamp)),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await this.updateCachedUpdatesCount(workspaceId, docId, -result.count);
|
||||
return result.count;
|
||||
}
|
||||
|
||||
async listDocHistories(
|
||||
workspaceId: string,
|
||||
docId: string,
|
||||
query: HistoryFilter
|
||||
) {
|
||||
const histories = await this.db.snapshotHistory.findMany({
|
||||
select: {
|
||||
timestamp: true,
|
||||
createdByUser: {
|
||||
select: {
|
||||
name: true,
|
||||
avatarUrl: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
where: {
|
||||
workspaceId,
|
||||
id: docId,
|
||||
timestamp: {
|
||||
lt: query.before ? new Date(query.before) : new Date(),
|
||||
},
|
||||
},
|
||||
orderBy: {
|
||||
timestamp: 'desc',
|
||||
},
|
||||
take: query.limit,
|
||||
});
|
||||
|
||||
return histories.map(h => ({
|
||||
timestamp: h.timestamp.getTime(),
|
||||
editor: h.createdByUser,
|
||||
}));
|
||||
}
|
||||
|
||||
async getDocHistory(workspaceId: string, docId: string, timestamp: number) {
|
||||
const history = await this.db.snapshotHistory.findUnique({
|
||||
where: {
|
||||
workspaceId_id_timestamp: {
|
||||
workspaceId,
|
||||
id: docId,
|
||||
timestamp: new Date(timestamp),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!history) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
spaceId: workspaceId,
|
||||
docId,
|
||||
bin: history.blob,
|
||||
timestamp,
|
||||
editor: history.createdBy || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
override async rollbackDoc(
|
||||
spaceId: string,
|
||||
docId: string,
|
||||
timestamp: number,
|
||||
editorId?: string
|
||||
): Promise<void> {
|
||||
await using _lock = await this.lockDocForUpdate(spaceId, docId);
|
||||
const toSnapshot = await this.getDocHistory(spaceId, docId, timestamp);
|
||||
if (!toSnapshot) {
|
||||
throw new DocHistoryNotFound({ spaceId, docId, timestamp });
|
||||
}
|
||||
|
||||
const fromSnapshot = await this.getDocSnapshot(spaceId, docId);
|
||||
|
||||
if (!fromSnapshot) {
|
||||
throw new DocNotFound({ spaceId, docId });
|
||||
}
|
||||
|
||||
// force create a new history record after rollback
|
||||
await this.createDocHistory(
|
||||
{
|
||||
...fromSnapshot,
|
||||
// override the editor to the one who requested the rollback
|
||||
editor: editorId,
|
||||
},
|
||||
true
|
||||
);
|
||||
// WARN:
|
||||
// we should never do the snapshot updating in recovering,
|
||||
// which is not the solution in CRDT.
|
||||
// let user revert in client and update the data in sync system
|
||||
// const change = this.generateChangeUpdate(fromSnapshot.bin, toSnapshot.bin);
|
||||
// await this.pushDocUpdates(spaceId, docId, [change]);
|
||||
|
||||
metrics.doc
|
||||
.counter('history_recovered_counter', {
|
||||
description: 'How many times history recovered request happened',
|
||||
})
|
||||
.add(1);
|
||||
}
|
||||
|
||||
protected async createDocHistory(snapshot: DocRecord, force = false) {
|
||||
const last = await this.lastDocHistory(snapshot.spaceId, snapshot.docId);
|
||||
|
||||
let shouldCreateHistory = false;
|
||||
|
||||
if (!last) {
|
||||
// never created
|
||||
shouldCreateHistory = true;
|
||||
} else {
|
||||
const lastHistoryTimestamp = last.timestamp.getTime();
|
||||
if (lastHistoryTimestamp === snapshot.timestamp) {
|
||||
// no change
|
||||
shouldCreateHistory = false;
|
||||
} else if (
|
||||
// force
|
||||
force ||
|
||||
// last history created before interval in configs
|
||||
lastHistoryTimestamp <
|
||||
snapshot.timestamp - this.options.historyMinInterval(snapshot.spaceId)
|
||||
) {
|
||||
shouldCreateHistory = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldCreateHistory) {
|
||||
if (this.isEmptyBin(snapshot.bin)) {
|
||||
this.logger.debug(
|
||||
`Doc is empty, skip creating history record for ${snapshot.docId} in workspace ${snapshot.spaceId}`
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
await this.db.snapshotHistory
|
||||
.create({
|
||||
select: {
|
||||
timestamp: true,
|
||||
},
|
||||
data: {
|
||||
workspaceId: snapshot.spaceId,
|
||||
id: snapshot.docId,
|
||||
timestamp: new Date(snapshot.timestamp),
|
||||
blob: Buffer.from(snapshot.bin),
|
||||
createdBy: snapshot.editor,
|
||||
expiredAt: new Date(
|
||||
Date.now() + (await this.options.historyMaxAge(snapshot.spaceId))
|
||||
),
|
||||
},
|
||||
})
|
||||
.catch(() => {
|
||||
// safe to ignore
|
||||
// only happens when duplicated history record created in multi processes
|
||||
});
|
||||
|
||||
metrics.doc
|
||||
.counter('history_created_counter', {
|
||||
description: 'How many times the snapshot history created',
|
||||
})
|
||||
.add(1);
|
||||
this.logger.debug(
|
||||
`History created for ${snapshot.docId} in workspace ${snapshot.spaceId}.`
|
||||
);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
protected async getDocSnapshot(workspaceId: string, docId: string) {
|
||||
const snapshot = await this.db.snapshot.findUnique({
|
||||
where: {
|
||||
id_workspaceId: {
|
||||
workspaceId,
|
||||
id: docId,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!snapshot) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
spaceId: workspaceId,
|
||||
docId,
|
||||
bin: snapshot.blob,
|
||||
timestamp: snapshot.updatedAt.getTime(),
|
||||
// creator and editor may null if their account is deleted
|
||||
editor: snapshot.updatedBy || snapshot.createdBy || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
protected async setDocSnapshot(snapshot: DocRecord) {
|
||||
const { spaceId, docId, bin, timestamp } = snapshot;
|
||||
|
||||
if (this.isEmptyBin(bin)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const updatedAt = new Date(timestamp);
|
||||
|
||||
// CONCERNS:
|
||||
// i. Because we save the real user's last seen action time as `updatedAt`,
|
||||
// it's possible to simply compare the `updatedAt` to determine if the snapshot is older than the one we are going to save.
|
||||
//
|
||||
// ii. Prisma doesn't support `upsert` with additional `where` condition along side unique constraint.
|
||||
// In our case, we need to manually check the `updatedAt` to avoid overriding the newer snapshot.
|
||||
// where: { id_workspaceId: {}, updatedAt: { lt: updatedAt } }
|
||||
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
try {
|
||||
const result: { updatedAt: Date }[] = await this.db.$queryRaw`
|
||||
INSERT INTO "snapshots" ("workspace_id", "guid", "blob", "created_at", "updated_at", "created_by", "updated_by")
|
||||
VALUES (${spaceId}, ${docId}, ${bin}, DEFAULT, ${updatedAt}, ${snapshot.editor}, ${snapshot.editor})
|
||||
ON CONFLICT ("workspace_id", "guid")
|
||||
DO UPDATE SET "blob" = ${bin}, "updated_at" = ${updatedAt}, "updated_by" = ${snapshot.editor}
|
||||
WHERE "snapshots"."workspace_id" = ${spaceId} AND "snapshots"."guid" = ${docId} AND "snapshots"."updated_at" <= ${updatedAt}
|
||||
RETURNING "snapshots"."workspace_id" as "workspaceId", "snapshots"."guid" as "id", "snapshots"."updated_at" as "updatedAt"
|
||||
`;
|
||||
|
||||
// const result = await this.db.snapshot.upsert({
|
||||
// select: {
|
||||
// updatedAt: true,
|
||||
// seq: true,
|
||||
// },
|
||||
// where: {
|
||||
// id_workspaceId: {
|
||||
// workspaceId,
|
||||
// id: guid,
|
||||
// },
|
||||
// ⬇️ NOT SUPPORTED BY PRISMA YET
|
||||
// updatedAt: {
|
||||
// lt: updatedAt,
|
||||
// },
|
||||
// },
|
||||
// update: {
|
||||
// blob,
|
||||
// state,
|
||||
// updatedAt,
|
||||
// },
|
||||
// create: {
|
||||
// workspaceId,
|
||||
// id: guid,
|
||||
// blob,
|
||||
// state,
|
||||
// updatedAt,
|
||||
// seq,
|
||||
// },
|
||||
// });
|
||||
|
||||
// if the condition `snapshot.updatedAt > updatedAt` is true, by which means the snapshot has already been updated by other process,
|
||||
// the updates has been applied to current `doc` must have been seen by the other process as well.
|
||||
// The `updatedSnapshot` will be `undefined` in this case.
|
||||
const updatedSnapshot = result.at(0);
|
||||
return !!updatedSnapshot;
|
||||
} catch (e) {
|
||||
metrics.doc.counter('snapshot_upsert_failed').add(1);
|
||||
this.logger.error('Failed to upsert snapshot', e);
|
||||
throw new FailedToUpsertSnapshot();
|
||||
}
|
||||
}
|
||||
|
||||
protected override async lockDocForUpdate(
|
||||
workspaceId: string,
|
||||
docId: string
|
||||
) {
|
||||
const lock = await this.mutex.lock(`doc:update:${workspaceId}:${docId}`);
|
||||
|
||||
if (!lock) {
|
||||
throw new Error('Too many concurrent writings');
|
||||
}
|
||||
|
||||
return lock;
|
||||
}
|
||||
|
||||
protected async lastDocHistory(workspaceId: string, id: string) {
|
||||
return this.db.snapshotHistory.findFirst({
|
||||
where: {
|
||||
workspaceId,
|
||||
id,
|
||||
},
|
||||
select: {
|
||||
timestamp: true,
|
||||
state: true,
|
||||
},
|
||||
orderBy: {
|
||||
timestamp: 'desc',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// for auto merging
|
||||
async randomDoc() {
|
||||
const key = await this.cache.mapRandomKey(UPDATES_QUEUE_CACHE_KEY);
|
||||
|
||||
if (key) {
|
||||
const cachedCount = await this.cache.mapIncrease(
|
||||
UPDATES_QUEUE_CACHE_KEY,
|
||||
key,
|
||||
0
|
||||
);
|
||||
|
||||
if (cachedCount > 0) {
|
||||
const [workspaceId, id] = key.split('::');
|
||||
const count = await this.db.update.count({
|
||||
where: {
|
||||
workspaceId,
|
||||
id,
|
||||
},
|
||||
});
|
||||
|
||||
// FIXME(@forehalo): somehow the update count in cache is not accurate
|
||||
if (count === 0) {
|
||||
metrics.doc
|
||||
.counter('doc_update_count_inconsistent_with_cache')
|
||||
.add(1);
|
||||
await this.cache.mapDelete(UPDATES_QUEUE_CACHE_KEY, key);
|
||||
return null;
|
||||
}
|
||||
|
||||
return { workspaceId, docId: id };
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private async updateCachedUpdatesCount(
|
||||
workspaceId: string,
|
||||
guid: string,
|
||||
count: number
|
||||
) {
|
||||
const result = await this.cache.mapIncrease(
|
||||
UPDATES_QUEUE_CACHE_KEY,
|
||||
`${workspaceId}::${guid}`,
|
||||
count
|
||||
);
|
||||
|
||||
if (result <= 0) {
|
||||
await this.cache.mapDelete(
|
||||
UPDATES_QUEUE_CACHE_KEY,
|
||||
`${workspaceId}::${guid}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
private readonly seqMap = new Map<string, number>();
|
||||
/**
|
||||
*
|
||||
* @deprecated updates do not rely on seq number anymore
|
||||
*
|
||||
* keep in next release to avoid downtime when upgrading instances
|
||||
*/
|
||||
private async getUpdateSeq(workspaceId: string, guid: string, batch = 1) {
|
||||
const MAX_SEQ_NUM = 0x3fffffff; // u31
|
||||
|
||||
try {
|
||||
const { seq } = await this.db.snapshot.update({
|
||||
select: {
|
||||
seq: true,
|
||||
},
|
||||
where: {
|
||||
id_workspaceId: {
|
||||
workspaceId,
|
||||
id: guid,
|
||||
},
|
||||
},
|
||||
data: {
|
||||
seq: {
|
||||
increment: batch,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!seq) {
|
||||
return batch;
|
||||
}
|
||||
|
||||
// reset
|
||||
if (seq >= MAX_SEQ_NUM) {
|
||||
await this.db.snapshot.update({
|
||||
select: {
|
||||
seq: true,
|
||||
},
|
||||
where: {
|
||||
id_workspaceId: {
|
||||
workspaceId,
|
||||
id: guid,
|
||||
},
|
||||
},
|
||||
data: {
|
||||
seq: 0,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return seq;
|
||||
} catch {
|
||||
// not existing snapshot just count it from 1
|
||||
const last = this.seqMap.get(workspaceId + guid) ?? 0;
|
||||
this.seqMap.set(workspaceId + guid, last + batch);
|
||||
return last + batch;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,266 +0,0 @@
|
||||
import { isDeepStrictEqual } from 'node:util';
|
||||
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { Cron, CronExpression } from '@nestjs/schedule';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
import type { EventPayload } from '../../fundamentals';
|
||||
import {
|
||||
Config,
|
||||
DocHistoryNotFound,
|
||||
DocNotFound,
|
||||
metrics,
|
||||
OnEvent,
|
||||
WorkspaceNotFound,
|
||||
} from '../../fundamentals';
|
||||
import { QuotaService } from '../quota';
|
||||
import { Permission } from '../workspaces/types';
|
||||
import { isEmptyBuffer } from './manager';
|
||||
|
||||
@Injectable()
|
||||
export class DocHistoryManager {
|
||||
private readonly logger = new Logger(DocHistoryManager.name);
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly db: PrismaClient,
|
||||
private readonly quota: QuotaService
|
||||
) {}
|
||||
|
||||
@OnEvent('workspace.deleted')
|
||||
onWorkspaceDeleted(workspaceId: EventPayload<'workspace.deleted'>) {
|
||||
return this.db.snapshotHistory.deleteMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent('snapshot.deleted')
|
||||
onSnapshotDeleted({ workspaceId, id }: EventPayload<'snapshot.deleted'>) {
|
||||
return this.db.snapshotHistory.deleteMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
id,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent('snapshot.updated')
|
||||
async onDocUpdated(
|
||||
{ workspaceId, id, previous }: EventPayload<'snapshot.updated'>,
|
||||
forceCreate = false
|
||||
) {
|
||||
const last = await this.last(workspaceId, id);
|
||||
|
||||
let shouldCreateHistory = false;
|
||||
|
||||
if (!last) {
|
||||
// never created
|
||||
shouldCreateHistory = true;
|
||||
} else if (last.timestamp === previous.updatedAt) {
|
||||
// no change
|
||||
shouldCreateHistory = false;
|
||||
} else if (
|
||||
// force
|
||||
forceCreate ||
|
||||
// last history created before interval in configs
|
||||
last.timestamp.getTime() <
|
||||
previous.updatedAt.getTime() - this.config.doc.history.interval
|
||||
) {
|
||||
shouldCreateHistory = true;
|
||||
}
|
||||
|
||||
if (shouldCreateHistory) {
|
||||
// skip the history recording when no actual update on snapshot happended
|
||||
if (last && isDeepStrictEqual(last.state, previous.state)) {
|
||||
this.logger.debug(
|
||||
`State matches, skip creating history record for ${id} in workspace ${workspaceId}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (isEmptyBuffer(previous.blob)) {
|
||||
this.logger.debug(
|
||||
`Doc is empty, skip creating history record for ${id} in workspace ${workspaceId}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
await this.db.snapshotHistory
|
||||
.create({
|
||||
select: {
|
||||
timestamp: true,
|
||||
},
|
||||
data: {
|
||||
workspaceId,
|
||||
id,
|
||||
timestamp: previous.updatedAt,
|
||||
blob: previous.blob,
|
||||
state: previous.state,
|
||||
expiredAt: await this.getExpiredDateFromNow(workspaceId),
|
||||
},
|
||||
})
|
||||
.catch(() => {
|
||||
// safe to ignore
|
||||
// only happens when duplicated history record created in multi processes
|
||||
});
|
||||
metrics.doc
|
||||
.counter('history_created_counter', {
|
||||
description: 'How many times the snapshot history created',
|
||||
})
|
||||
.add(1);
|
||||
this.logger.debug(
|
||||
`History created for ${id} in workspace ${workspaceId}.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async list(
|
||||
workspaceId: string,
|
||||
id: string,
|
||||
before: Date = new Date(),
|
||||
take: number = 10
|
||||
) {
|
||||
return this.db.snapshotHistory.findMany({
|
||||
select: {
|
||||
timestamp: true,
|
||||
},
|
||||
where: {
|
||||
workspaceId,
|
||||
id,
|
||||
timestamp: {
|
||||
lt: before,
|
||||
},
|
||||
// only include the ones has not expired
|
||||
expiredAt: {
|
||||
gt: new Date(),
|
||||
},
|
||||
},
|
||||
orderBy: {
|
||||
timestamp: 'desc',
|
||||
},
|
||||
take,
|
||||
});
|
||||
}
|
||||
|
||||
async count(workspaceId: string, id: string) {
|
||||
return this.db.snapshotHistory.count({
|
||||
where: {
|
||||
workspaceId,
|
||||
id,
|
||||
expiredAt: {
|
||||
gt: new Date(),
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async get(workspaceId: string, id: string, timestamp: Date) {
|
||||
return this.db.snapshotHistory.findUnique({
|
||||
where: {
|
||||
workspaceId_id_timestamp: {
|
||||
workspaceId,
|
||||
id,
|
||||
timestamp,
|
||||
},
|
||||
expiredAt: {
|
||||
gt: new Date(),
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async last(workspaceId: string, id: string) {
|
||||
return this.db.snapshotHistory.findFirst({
|
||||
where: {
|
||||
workspaceId,
|
||||
id,
|
||||
},
|
||||
select: {
|
||||
timestamp: true,
|
||||
state: true,
|
||||
},
|
||||
orderBy: {
|
||||
timestamp: 'desc',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async recover(workspaceId: string, id: string, timestamp: Date) {
|
||||
const history = await this.db.snapshotHistory.findUnique({
|
||||
where: {
|
||||
workspaceId_id_timestamp: {
|
||||
workspaceId,
|
||||
id,
|
||||
timestamp,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!history) {
|
||||
throw new DocHistoryNotFound({
|
||||
workspaceId,
|
||||
docId: id,
|
||||
timestamp: timestamp.getTime(),
|
||||
});
|
||||
}
|
||||
|
||||
const oldSnapshot = await this.db.snapshot.findUnique({
|
||||
where: {
|
||||
id_workspaceId: {
|
||||
id,
|
||||
workspaceId,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!oldSnapshot) {
|
||||
throw new DocNotFound({ workspaceId, docId: id });
|
||||
}
|
||||
|
||||
// save old snapshot as one history record
|
||||
await this.onDocUpdated({ workspaceId, id, previous: oldSnapshot }, true);
|
||||
// WARN:
|
||||
// we should never do the snapshot updating in recovering,
|
||||
// which is not the solution in CRDT.
|
||||
// let user revert in client and update the data in sync system
|
||||
// `await this.db.snapshot.update();`
|
||||
metrics.doc
|
||||
.counter('history_recovered_counter', {
|
||||
description: 'How many times history recovered request happened',
|
||||
})
|
||||
.add(1);
|
||||
|
||||
return history.timestamp;
|
||||
}
|
||||
|
||||
async getExpiredDateFromNow(workspaceId: string) {
|
||||
const permission = await this.db.workspaceUserPermission.findFirst({
|
||||
select: {
|
||||
userId: true,
|
||||
},
|
||||
where: {
|
||||
workspaceId,
|
||||
type: Permission.Owner,
|
||||
},
|
||||
});
|
||||
|
||||
if (!permission) {
|
||||
throw new WorkspaceNotFound({ workspaceId });
|
||||
}
|
||||
|
||||
const quota = await this.quota.getUserQuota(permission.userId);
|
||||
return quota.feature.historyPeriodFromNow;
|
||||
}
|
||||
|
||||
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT /* everyday at 12am */)
|
||||
async cleanupExpiredHistory() {
|
||||
await this.db.snapshotHistory.deleteMany({
|
||||
where: {
|
||||
expiredAt: {
|
||||
lte: new Date(),
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -2,15 +2,24 @@ import './config';
|
||||
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { PermissionModule } from '../permission';
|
||||
import { QuotaModule } from '../quota';
|
||||
import { DocHistoryManager } from './history';
|
||||
import { DocManager } from './manager';
|
||||
import { PgUserspaceDocStorageAdapter } from './adapters/userspace';
|
||||
import { PgWorkspaceDocStorageAdapter } from './adapters/workspace';
|
||||
import { DocStorageCronJob } from './job';
|
||||
import { DocStorageOptions } from './options';
|
||||
|
||||
@Module({
|
||||
imports: [QuotaModule],
|
||||
providers: [DocManager, DocHistoryManager],
|
||||
exports: [DocManager, DocHistoryManager],
|
||||
imports: [QuotaModule, PermissionModule],
|
||||
providers: [
|
||||
DocStorageOptions,
|
||||
PgWorkspaceDocStorageAdapter,
|
||||
PgUserspaceDocStorageAdapter,
|
||||
DocStorageCronJob,
|
||||
],
|
||||
exports: [PgWorkspaceDocStorageAdapter, PgUserspaceDocStorageAdapter],
|
||||
})
|
||||
export class DocModule {}
|
||||
export class DocStorageModule {}
|
||||
export { PgUserspaceDocStorageAdapter, PgWorkspaceDocStorageAdapter };
|
||||
|
||||
export { DocHistoryManager, DocManager };
|
||||
export { DocStorageAdapter, type Editor } from './storage';
|
||||
|
||||
76
packages/backend/server/src/core/doc/job.ts
Normal file
76
packages/backend/server/src/core/doc/job.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { Injectable, Logger, OnModuleInit } from '@nestjs/common';
|
||||
import { Cron, CronExpression, SchedulerRegistry } from '@nestjs/schedule';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
import { CallTimer, Config, metrics } from '../../fundamentals';
|
||||
import { PgWorkspaceDocStorageAdapter } from './adapters/workspace';
|
||||
|
||||
@Injectable()
|
||||
export class DocStorageCronJob implements OnModuleInit {
|
||||
private readonly logger = new Logger(DocStorageCronJob.name);
|
||||
private busy = false;
|
||||
|
||||
constructor(
|
||||
private readonly registry: SchedulerRegistry,
|
||||
private readonly config: Config,
|
||||
private readonly db: PrismaClient,
|
||||
private readonly workspace: PgWorkspaceDocStorageAdapter
|
||||
) {}
|
||||
|
||||
onModuleInit() {
|
||||
if (this.config.doc.manager.enableUpdateAutoMerging) {
|
||||
this.registry.addInterval(
|
||||
this.autoMergePendingDocUpdates.name,
|
||||
// scheduler registry will clean up the interval when the app is stopped
|
||||
setInterval(() => {
|
||||
if (this.busy) {
|
||||
return;
|
||||
}
|
||||
this.busy = true;
|
||||
this.autoMergePendingDocUpdates()
|
||||
.catch(() => {
|
||||
/* never fail */
|
||||
})
|
||||
.finally(() => {
|
||||
this.busy = false;
|
||||
});
|
||||
}, this.config.doc.manager.updatePollInterval)
|
||||
);
|
||||
|
||||
this.logger.log('Updates pending queue auto merging cron started');
|
||||
}
|
||||
}
|
||||
|
||||
@CallTimer('doc', 'auto_merge_pending_doc_updates')
|
||||
async autoMergePendingDocUpdates() {
|
||||
try {
|
||||
const randomDoc = await this.workspace.randomDoc();
|
||||
if (!randomDoc) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.workspace.getDoc(randomDoc.workspaceId, randomDoc.docId);
|
||||
} catch (e) {
|
||||
metrics.doc.counter('auto_merge_pending_doc_updates_error').add(1);
|
||||
this.logger.error('Failed to auto merge pending doc updates', e);
|
||||
}
|
||||
}
|
||||
|
||||
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT /* everyday at 12am */)
|
||||
async cleanupExpiredHistory() {
|
||||
await this.db.snapshotHistory.deleteMany({
|
||||
where: {
|
||||
expiredAt: {
|
||||
lte: new Date(),
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@Cron(CronExpression.EVERY_MINUTE)
|
||||
async reportUpdatesQueueCount() {
|
||||
metrics.doc
|
||||
.gauge('updates_queue_count')
|
||||
.record(await this.db.update.count());
|
||||
}
|
||||
}
|
||||
@@ -1,853 +0,0 @@
|
||||
import {
|
||||
Injectable,
|
||||
Logger,
|
||||
OnModuleDestroy,
|
||||
OnModuleInit,
|
||||
} from '@nestjs/common';
|
||||
import { Cron, CronExpression } from '@nestjs/schedule';
|
||||
import { PrismaClient, Snapshot, Update } from '@prisma/client';
|
||||
import { chunk } from 'lodash-es';
|
||||
import { defer, retry } from 'rxjs';
|
||||
import {
|
||||
applyUpdate,
|
||||
Doc,
|
||||
encodeStateAsUpdate,
|
||||
encodeStateVector,
|
||||
transact,
|
||||
} from 'yjs';
|
||||
|
||||
import type { EventPayload } from '../../fundamentals';
|
||||
import {
|
||||
Cache,
|
||||
CallTimer,
|
||||
Config,
|
||||
EventEmitter,
|
||||
mergeUpdatesInApplyWay as jwstMergeUpdates,
|
||||
metrics,
|
||||
OnEvent,
|
||||
} from '../../fundamentals';
|
||||
|
||||
function compare(yBinary: Buffer, jwstBinary: Buffer, strict = false): boolean {
|
||||
if (yBinary.equals(jwstBinary)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (strict) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const doc = new Doc();
|
||||
applyUpdate(doc, jwstBinary);
|
||||
|
||||
const yBinary2 = Buffer.from(encodeStateAsUpdate(doc));
|
||||
|
||||
return compare(yBinary, yBinary2, true);
|
||||
}
|
||||
|
||||
export function isEmptyBuffer(buf: Buffer): boolean {
|
||||
return (
|
||||
buf.length === 0 ||
|
||||
// 0x0000
|
||||
(buf.length === 2 && buf[0] === 0 && buf[1] === 0)
|
||||
);
|
||||
}
|
||||
|
||||
const MAX_SEQ_NUM = 0x3fffffff; // u31
|
||||
const UPDATES_QUEUE_CACHE_KEY = 'doc:manager:updates';
|
||||
|
||||
interface DocResponse {
|
||||
doc: Doc;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
interface BinaryResponse {
|
||||
binary: Buffer;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Since we can't directly save all client updates into database, in which way the database will overload,
|
||||
* we need to buffer the updates and merge them to reduce db write.
|
||||
*
|
||||
* And also, if a new client join, it would be nice to see the latest doc asap,
|
||||
* so we need to at least store a snapshot of the doc and return quickly,
|
||||
* along side all the updates that have not been applies to that snapshot(timestamp).
|
||||
*/
|
||||
@Injectable()
|
||||
export class DocManager implements OnModuleInit, OnModuleDestroy {
|
||||
private readonly logger = new Logger(DocManager.name);
|
||||
private job: NodeJS.Timeout | null = null;
|
||||
private readonly seqMap = new Map<string, number>();
|
||||
private busy = false;
|
||||
|
||||
constructor(
|
||||
private readonly db: PrismaClient,
|
||||
private readonly config: Config,
|
||||
private readonly cache: Cache,
|
||||
private readonly event: EventEmitter
|
||||
) {}
|
||||
|
||||
onModuleInit() {
|
||||
if (this.config.doc.manager.enableUpdateAutoMerging) {
|
||||
this.logger.log('Use Database');
|
||||
this.setup();
|
||||
}
|
||||
}
|
||||
|
||||
onModuleDestroy() {
|
||||
this.destroy();
|
||||
}
|
||||
|
||||
@CallTimer('doc', 'yjs_recover_updates_to_doc')
|
||||
private recoverDoc(...updates: Buffer[]): Promise<Doc> {
|
||||
const doc = new Doc();
|
||||
const chunks = chunk(updates, 10);
|
||||
|
||||
return new Promise(resolve => {
|
||||
const next = () => {
|
||||
const updates = chunks.shift();
|
||||
if (updates?.length) {
|
||||
transact(doc, () => {
|
||||
updates.forEach(u => {
|
||||
try {
|
||||
applyUpdate(doc, u);
|
||||
} catch (e) {
|
||||
this.logger.error('Failed to apply update', e);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// avoid applying too many updates in single round which will take the whole cpu time like dead lock
|
||||
setImmediate(() => {
|
||||
next();
|
||||
});
|
||||
} else {
|
||||
resolve(doc);
|
||||
}
|
||||
};
|
||||
|
||||
next();
|
||||
});
|
||||
}
|
||||
|
||||
private async applyUpdates(guid: string, ...updates: Buffer[]): Promise<Doc> {
|
||||
const doc = await this.recoverDoc(...updates);
|
||||
|
||||
const useYocto = await this.config.runtime.fetch(
|
||||
'doc/experimentalMergeWithYOcto'
|
||||
);
|
||||
// test jwst codec
|
||||
if (useYocto) {
|
||||
metrics.jwst.counter('codec_merge_counter').add(1);
|
||||
const yjsResult = Buffer.from(encodeStateAsUpdate(doc));
|
||||
let log = false;
|
||||
try {
|
||||
const jwstResult = jwstMergeUpdates(updates);
|
||||
if (!compare(yjsResult, jwstResult)) {
|
||||
metrics.jwst.counter('codec_not_match').add(1);
|
||||
this.logger.warn(
|
||||
`jwst codec result doesn't match yjs codec result for: ${guid}`
|
||||
);
|
||||
log = true;
|
||||
if (this.config.node.dev) {
|
||||
this.logger.warn(`Expected:\n ${yjsResult.toString('hex')}`);
|
||||
this.logger.warn(`Result:\n ${jwstResult.toString('hex')}`);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
metrics.jwst.counter('codec_fails_counter').add(1);
|
||||
this.logger.warn(`jwst apply update failed for ${guid}: ${e}`);
|
||||
log = true;
|
||||
} finally {
|
||||
if (log && this.config.node.dev) {
|
||||
this.logger.warn(
|
||||
`Updates: ${updates.map(u => u.toString('hex')).join('\n')}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
/**
|
||||
* setup pending update processing loop
|
||||
*/
|
||||
setup() {
|
||||
this.job = setInterval(() => {
|
||||
if (!this.busy) {
|
||||
this.busy = true;
|
||||
this.autoSquash()
|
||||
.catch(() => {
|
||||
/* we handle all errors in work itself */
|
||||
})
|
||||
.finally(() => {
|
||||
this.busy = false;
|
||||
});
|
||||
}
|
||||
}, this.config.doc.manager.updatePollInterval);
|
||||
|
||||
this.logger.log('Automation started');
|
||||
}
|
||||
|
||||
/**
|
||||
* stop pending update processing loop
|
||||
*/
|
||||
destroy() {
|
||||
if (this.job) {
|
||||
clearInterval(this.job);
|
||||
this.job = null;
|
||||
this.logger.log('Automation stopped');
|
||||
}
|
||||
}
|
||||
|
||||
@OnEvent('workspace.deleted')
|
||||
async onWorkspaceDeleted(workspaceId: string) {
|
||||
await this.db.snapshot.deleteMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
},
|
||||
});
|
||||
await this.db.update.deleteMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent('snapshot.deleted')
|
||||
async onSnapshotDeleted({
|
||||
id,
|
||||
workspaceId,
|
||||
}: EventPayload<'snapshot.deleted'>) {
|
||||
await this.db.update.deleteMany({
|
||||
where: {
|
||||
id,
|
||||
workspaceId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* add update to manager for later processing.
|
||||
*/
|
||||
async push(
|
||||
workspaceId: string,
|
||||
guid: string,
|
||||
update: Buffer,
|
||||
retryTimes = 10
|
||||
) {
|
||||
const timestamp = await new Promise<number>((resolve, reject) => {
|
||||
defer(async () => {
|
||||
const seq = await this.getUpdateSeq(workspaceId, guid);
|
||||
const { createdAt } = await this.db.update.create({
|
||||
select: {
|
||||
createdAt: true,
|
||||
},
|
||||
data: {
|
||||
workspaceId,
|
||||
id: guid,
|
||||
seq,
|
||||
blob: update,
|
||||
},
|
||||
});
|
||||
|
||||
return createdAt.getTime();
|
||||
})
|
||||
.pipe(retry(retryTimes)) // retry until seq num not conflict
|
||||
.subscribe({
|
||||
next: timestamp => {
|
||||
this.logger.debug(
|
||||
`pushed 1 update for ${guid} in workspace ${workspaceId}`
|
||||
);
|
||||
resolve(timestamp);
|
||||
},
|
||||
error: e => {
|
||||
this.logger.error('Failed to push updates', e);
|
||||
reject(new Error('Failed to push update'));
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
await this.updateCachedUpdatesCount(workspaceId, guid, 1);
|
||||
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
async batchPush(
|
||||
workspaceId: string,
|
||||
guid: string,
|
||||
updates: Buffer[],
|
||||
retryTimes = 10
|
||||
) {
|
||||
const timestamp = await new Promise<number>((resolve, reject) => {
|
||||
defer(async () => {
|
||||
const lastSeq = await this.getUpdateSeq(
|
||||
workspaceId,
|
||||
guid,
|
||||
updates.length
|
||||
);
|
||||
const now = Date.now();
|
||||
let timestamp = now;
|
||||
let turn = 0;
|
||||
const batchCount = 10;
|
||||
for (const batch of chunk(updates, batchCount)) {
|
||||
await this.db.update.createMany({
|
||||
data: batch.map((update, i) => {
|
||||
const subSeq = turn * batchCount + i + 1;
|
||||
// `seq` is the last seq num of the batch
|
||||
// example for 11 batched updates, start from seq num 20
|
||||
// seq for first update in the batch should be:
|
||||
// 31 - 11 + subSeq(0 * 10 + 0 + 1) = 21
|
||||
// ^ last seq num ^ updates.length ^ turn ^ batchCount ^i
|
||||
const seq = lastSeq - updates.length + subSeq;
|
||||
const createdAt = now + subSeq;
|
||||
timestamp = Math.max(timestamp, createdAt);
|
||||
|
||||
return {
|
||||
workspaceId,
|
||||
id: guid,
|
||||
blob: update,
|
||||
seq,
|
||||
createdAt: new Date(createdAt), // make sure the updates can be ordered by create time
|
||||
};
|
||||
}),
|
||||
});
|
||||
turn++;
|
||||
}
|
||||
|
||||
return timestamp;
|
||||
})
|
||||
.pipe(retry(retryTimes)) // retry until seq num not conflict
|
||||
.subscribe({
|
||||
next: timestamp => {
|
||||
this.logger.debug(
|
||||
`pushed ${updates.length} updates for ${guid} in workspace ${workspaceId}`
|
||||
);
|
||||
resolve(timestamp);
|
||||
},
|
||||
error: e => {
|
||||
this.logger.error('Failed to push updates', e);
|
||||
reject(new Error('Failed to push update'));
|
||||
},
|
||||
});
|
||||
});
|
||||
await this.updateCachedUpdatesCount(workspaceId, guid, updates.length);
|
||||
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get latest timestamp of all docs in the workspace.
|
||||
*/
|
||||
@CallTimer('doc', 'get_doc_timestamps')
|
||||
async getDocTimestamps(workspaceId: string, after: number | undefined = 0) {
|
||||
const snapshots = await this.db.snapshot.findMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
updatedAt: {
|
||||
gt: new Date(after),
|
||||
},
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
updatedAt: true,
|
||||
},
|
||||
});
|
||||
|
||||
const updates = await this.db.update.groupBy({
|
||||
where: {
|
||||
workspaceId,
|
||||
createdAt: {
|
||||
gt: new Date(after),
|
||||
},
|
||||
},
|
||||
by: ['id'],
|
||||
_max: {
|
||||
createdAt: true,
|
||||
},
|
||||
});
|
||||
|
||||
const result: Record<string, number> = {};
|
||||
|
||||
snapshots.forEach(s => {
|
||||
result[s.id] = s.updatedAt.getTime();
|
||||
});
|
||||
|
||||
updates.forEach(u => {
|
||||
if (u._max.createdAt) {
|
||||
result[u.id] = u._max.createdAt.getTime();
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* get the latest doc with all update applied.
|
||||
*/
|
||||
async get(workspaceId: string, guid: string): Promise<DocResponse | null> {
|
||||
const result = await this._get(workspaceId, guid);
|
||||
if (result) {
|
||||
if ('doc' in result) {
|
||||
return result;
|
||||
} else {
|
||||
const doc = await this.recoverDoc(result.binary);
|
||||
|
||||
return {
|
||||
doc,
|
||||
timestamp: result.timestamp,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* get the latest doc binary with all update applied.
|
||||
*/
|
||||
async getBinary(
|
||||
workspaceId: string,
|
||||
guid: string
|
||||
): Promise<BinaryResponse | null> {
|
||||
const result = await this._get(workspaceId, guid);
|
||||
if (result) {
|
||||
if ('doc' in result) {
|
||||
return {
|
||||
binary: Buffer.from(encodeStateAsUpdate(result.doc)),
|
||||
timestamp: result.timestamp,
|
||||
};
|
||||
} else {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* get the latest doc state vector with all update applied.
|
||||
*/
|
||||
async getDocState(
|
||||
workspaceId: string,
|
||||
guid: string
|
||||
): Promise<BinaryResponse | null> {
|
||||
const snapshot = await this.getSnapshot(workspaceId, guid);
|
||||
const updates = await this.getUpdates(workspaceId, guid);
|
||||
|
||||
if (updates.length) {
|
||||
const { doc, timestamp } = await this.squash(snapshot, updates);
|
||||
return {
|
||||
binary: Buffer.from(encodeStateVector(doc)),
|
||||
timestamp,
|
||||
};
|
||||
}
|
||||
|
||||
return snapshot?.state
|
||||
? {
|
||||
binary: snapshot.state,
|
||||
timestamp: snapshot.updatedAt.getTime(),
|
||||
}
|
||||
: null;
|
||||
}
|
||||
|
||||
/**
|
||||
* get the snapshot of the doc we've seen.
|
||||
*/
|
||||
async getSnapshot(workspaceId: string, guid: string) {
|
||||
return this.db.snapshot.findUnique({
|
||||
where: {
|
||||
id_workspaceId: {
|
||||
workspaceId,
|
||||
id: guid,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* get pending updates
|
||||
*/
|
||||
async getUpdates(workspaceId: string, guid: string) {
|
||||
const updates = await this.db.update.findMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
id: guid,
|
||||
},
|
||||
// take it ease, we don't want to overload db and or cpu
|
||||
// if we limit the taken number here,
|
||||
// user will never see the latest doc if there are too many updates pending to be merged.
|
||||
take: this.config.doc.manager.maxUpdatesPullCount,
|
||||
});
|
||||
|
||||
// perf(memory): avoid sorting in db
|
||||
return updates.sort((a, b) => (a.createdAt < b.createdAt ? -1 : 1));
|
||||
}
|
||||
|
||||
/**
|
||||
* apply pending updates to snapshot
|
||||
*/
|
||||
private async autoSquash() {
|
||||
// find the first update and batch process updates with same id
|
||||
const candidate = await this.getAutoSquashCandidate();
|
||||
|
||||
// no pending updates
|
||||
if (!candidate) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { id, workspaceId } = candidate;
|
||||
|
||||
await this.lockUpdatesForAutoSquash(workspaceId, id, async () => {
|
||||
try {
|
||||
await this._get(workspaceId, id);
|
||||
} catch (e) {
|
||||
this.logger.error(
|
||||
`Failed to apply updates for workspace: ${workspaceId}, guid: ${id}`
|
||||
);
|
||||
this.logger.error(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private async getAutoSquashCandidate() {
|
||||
const cache = await this.getAutoSquashCandidateFromCache();
|
||||
|
||||
if (cache) {
|
||||
return cache;
|
||||
}
|
||||
|
||||
return this.db.update.findFirst({
|
||||
select: {
|
||||
id: true,
|
||||
workspaceId: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns whether the snapshot is updated to the latest, `undefined` means the doc to be upserted is outdated.
|
||||
*/
|
||||
@CallTimer('doc', 'upsert')
|
||||
private async upsert(
|
||||
workspaceId: string,
|
||||
guid: string,
|
||||
doc: Doc,
|
||||
// we always delay the snapshot update to avoid db overload,
|
||||
// so the value of auto updated `updatedAt` by db will never be accurate to user's real action time
|
||||
updatedAt: Date,
|
||||
seq: number
|
||||
) {
|
||||
const blob = Buffer.from(encodeStateAsUpdate(doc));
|
||||
|
||||
if (isEmptyBuffer(blob)) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const state = Buffer.from(encodeStateVector(doc));
|
||||
|
||||
// CONCERNS:
|
||||
// i. Because we save the real user's last seen action time as `updatedAt`,
|
||||
// it's possible to simply compare the `updatedAt` to determine if the snapshot is older than the one we are going to save.
|
||||
//
|
||||
// ii. Prisma doesn't support `upsert` with additional `where` condition along side unique constraint.
|
||||
// In our case, we need to manually check the `updatedAt` to avoid overriding the newer snapshot.
|
||||
// where: { id_workspaceId: {}, updatedAt: { lt: updatedAt } }
|
||||
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
//
|
||||
// iii. Only set the seq number when creating the snapshot.
|
||||
// For updating scenario, the seq number will be updated when updates pushed to db.
|
||||
try {
|
||||
const result: { updatedAt: Date }[] = await this.db.$queryRaw`
|
||||
INSERT INTO "snapshots" ("workspace_id", "guid", "blob", "state", "seq", "created_at", "updated_at")
|
||||
VALUES (${workspaceId}, ${guid}, ${blob}, ${state}, ${seq}, DEFAULT, ${updatedAt})
|
||||
ON CONFLICT ("workspace_id", "guid")
|
||||
DO UPDATE SET "blob" = ${blob}, "state" = ${state}, "updated_at" = ${updatedAt}, "seq" = ${seq}
|
||||
WHERE "snapshots"."workspace_id" = ${workspaceId} AND "snapshots"."guid" = ${guid} AND "snapshots"."updated_at" <= ${updatedAt}
|
||||
RETURNING "snapshots"."workspace_id" as "workspaceId", "snapshots"."guid" as "id", "snapshots"."updated_at" as "updatedAt"
|
||||
`;
|
||||
|
||||
// const result = await this.db.snapshot.upsert({
|
||||
// select: {
|
||||
// updatedAt: true,
|
||||
// seq: true,
|
||||
// },
|
||||
// where: {
|
||||
// id_workspaceId: {
|
||||
// workspaceId,
|
||||
// id: guid,
|
||||
// },
|
||||
// ⬇️ NOT SUPPORTED BY PRISMA YET
|
||||
// updatedAt: {
|
||||
// lt: updatedAt,
|
||||
// },
|
||||
// },
|
||||
// update: {
|
||||
// blob,
|
||||
// state,
|
||||
// updatedAt,
|
||||
// },
|
||||
// create: {
|
||||
// workspaceId,
|
||||
// id: guid,
|
||||
// blob,
|
||||
// state,
|
||||
// updatedAt,
|
||||
// seq,
|
||||
// },
|
||||
// });
|
||||
|
||||
// if the condition `snapshot.updatedAt > updatedAt` is true, by which means the snapshot has already been updated by other process,
|
||||
// the updates has been applied to current `doc` must have been seen by the other process as well.
|
||||
// The `updatedSnapshot` will be `undefined` in this case.
|
||||
const updatedSnapshot = result.at(0);
|
||||
|
||||
if (!updatedSnapshot) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (e) {
|
||||
this.logger.error('Failed to upsert snapshot', e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private async _get(
|
||||
workspaceId: string,
|
||||
guid: string
|
||||
): Promise<DocResponse | BinaryResponse | null> {
|
||||
const snapshot = await this.getSnapshot(workspaceId, guid);
|
||||
const updates = await this.getUpdates(workspaceId, guid);
|
||||
|
||||
if (updates.length) {
|
||||
return this.squash(snapshot, updates);
|
||||
}
|
||||
|
||||
return snapshot
|
||||
? { binary: snapshot.blob, timestamp: snapshot.updatedAt.getTime() }
|
||||
: null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Squash updates into a single update and save it as snapshot,
|
||||
* and delete the updates records at the same time.
|
||||
*/
|
||||
@CallTimer('doc', 'squash')
|
||||
private async squash(
|
||||
snapshot: Snapshot | null,
|
||||
updates: Update[]
|
||||
): Promise<DocResponse> {
|
||||
if (!updates.length) {
|
||||
throw new Error('No updates to squash');
|
||||
}
|
||||
|
||||
const last = updates[updates.length - 1];
|
||||
const { id, workspaceId } = last;
|
||||
|
||||
const doc = await this.applyUpdates(
|
||||
id,
|
||||
snapshot ? snapshot.blob : Buffer.from([0, 0]),
|
||||
...updates.map(u => u.blob)
|
||||
);
|
||||
|
||||
const done = await this.upsert(
|
||||
workspaceId,
|
||||
id,
|
||||
doc,
|
||||
last.createdAt,
|
||||
last.seq
|
||||
);
|
||||
|
||||
if (done) {
|
||||
if (snapshot) {
|
||||
this.event.emit('snapshot.updated', {
|
||||
id,
|
||||
workspaceId,
|
||||
previous: {
|
||||
blob: snapshot.blob,
|
||||
state: snapshot.state,
|
||||
updatedAt: snapshot.updatedAt,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
this.logger.debug(
|
||||
`Squashed ${updates.length} updates for ${id} in workspace ${workspaceId}`
|
||||
);
|
||||
}
|
||||
|
||||
// we will keep the updates only if the upsert failed on unknown reason
|
||||
// `done === undefined` means the updates is outdated(have already been merged by other process), safe to be deleted
|
||||
// `done === true` means the upsert is successful, safe to be deleted
|
||||
if (done !== false) {
|
||||
// always delete updates
|
||||
// the upsert will return false if the state is not newer, so we don't need to worry about it
|
||||
const { count } = await this.db.update.deleteMany({
|
||||
where: {
|
||||
id,
|
||||
workspaceId,
|
||||
seq: {
|
||||
in: updates.map(u => u.seq),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await this.updateCachedUpdatesCount(workspaceId, id, -count);
|
||||
}
|
||||
|
||||
return { doc, timestamp: last.createdAt.getTime() };
|
||||
}
|
||||
|
||||
private async getUpdateSeq(workspaceId: string, guid: string, batch = 1) {
|
||||
try {
|
||||
const { seq } = await this.db.snapshot.update({
|
||||
select: {
|
||||
seq: true,
|
||||
},
|
||||
where: {
|
||||
id_workspaceId: {
|
||||
workspaceId,
|
||||
id: guid,
|
||||
},
|
||||
},
|
||||
data: {
|
||||
seq: {
|
||||
increment: batch,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// reset
|
||||
if (seq >= MAX_SEQ_NUM) {
|
||||
await this.db.snapshot.update({
|
||||
select: {
|
||||
seq: true,
|
||||
},
|
||||
where: {
|
||||
id_workspaceId: {
|
||||
workspaceId,
|
||||
id: guid,
|
||||
},
|
||||
},
|
||||
data: {
|
||||
seq: 0,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return seq;
|
||||
} catch {
|
||||
// not existing snapshot just count it from 1
|
||||
const last = this.seqMap.get(workspaceId + guid) ?? 0;
|
||||
this.seqMap.set(workspaceId + guid, last + batch);
|
||||
return last + batch;
|
||||
}
|
||||
}
|
||||
|
||||
private async updateCachedUpdatesCount(
|
||||
workspaceId: string,
|
||||
guid: string,
|
||||
count: number
|
||||
) {
|
||||
const result = await this.cache.mapIncrease(
|
||||
UPDATES_QUEUE_CACHE_KEY,
|
||||
`${workspaceId}::${guid}`,
|
||||
count
|
||||
);
|
||||
|
||||
if (result <= 0) {
|
||||
await this.cache.mapDelete(
|
||||
UPDATES_QUEUE_CACHE_KEY,
|
||||
`${workspaceId}::${guid}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private async getAutoSquashCandidateFromCache() {
|
||||
const key = await this.cache.mapRandomKey(UPDATES_QUEUE_CACHE_KEY);
|
||||
|
||||
if (key) {
|
||||
const cachedCount = await this.cache.mapIncrease(
|
||||
UPDATES_QUEUE_CACHE_KEY,
|
||||
key,
|
||||
0
|
||||
);
|
||||
|
||||
if (cachedCount > 0) {
|
||||
const [workspaceId, id] = key.split('::');
|
||||
const count = await this.db.update.count({
|
||||
where: {
|
||||
workspaceId,
|
||||
id,
|
||||
},
|
||||
});
|
||||
|
||||
// FIXME(@forehalo): somehow the update count in cache is not accurate
|
||||
if (count === 0) {
|
||||
await this.cache.mapDelete(UPDATES_QUEUE_CACHE_KEY, key);
|
||||
|
||||
return null;
|
||||
}
|
||||
return { id, workspaceId };
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private async doWithLock<T>(
|
||||
lockScope: string,
|
||||
lockResource: string,
|
||||
job: () => Promise<T>
|
||||
) {
|
||||
const lock = `lock:${lockScope}:${lockResource}`;
|
||||
const acquired = await this.cache.setnx(lock, 1, {
|
||||
ttl: 60 * 1000,
|
||||
});
|
||||
metrics.doc.counter('lock').add(1, { scope: lockScope });
|
||||
|
||||
if (!acquired) {
|
||||
metrics.doc.counter('lock_failed').add(1, { scope: lockScope });
|
||||
return;
|
||||
}
|
||||
metrics.doc.counter('lock_required').add(1, { scope: lockScope });
|
||||
|
||||
try {
|
||||
return await job();
|
||||
} finally {
|
||||
await this.cache
|
||||
.delete(lock)
|
||||
.then(() => {
|
||||
metrics.doc.counter('lock_released').add(1, { scope: lockScope });
|
||||
})
|
||||
.catch(e => {
|
||||
metrics.doc
|
||||
.counter('lock_release_failed')
|
||||
.add(1, { scope: lockScope });
|
||||
// safe, the lock will be expired when ttl ends
|
||||
this.logger.error(`Failed to release lock ${lock}`, e);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private async lockUpdatesForAutoSquash<T>(
|
||||
workspaceId: string,
|
||||
guid: string,
|
||||
job: () => Promise<T>
|
||||
) {
|
||||
return this.doWithLock(
|
||||
'doc:manager:updates',
|
||||
`${workspaceId}::${guid}`,
|
||||
job
|
||||
);
|
||||
}
|
||||
|
||||
@Cron(CronExpression.EVERY_MINUTE)
|
||||
async reportUpdatesQueueCount() {
|
||||
metrics.doc
|
||||
.gauge('updates_queue_count')
|
||||
.record(await this.db.update.count());
|
||||
}
|
||||
}
|
||||
130
packages/backend/server/src/core/doc/options.ts
Normal file
130
packages/backend/server/src/core/doc/options.ts
Normal file
@@ -0,0 +1,130 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { chunk } from 'lodash-es';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import {
|
||||
CallTimer,
|
||||
Config,
|
||||
mergeUpdatesInApplyWay as yotcoMergeUpdates,
|
||||
metrics,
|
||||
} from '../../fundamentals';
|
||||
import { PermissionService } from '../permission';
|
||||
import { QuotaService } from '../quota';
|
||||
import { DocStorageOptions as IDocStorageOptions } from './storage';
|
||||
|
||||
function compare(yBinary: Buffer, jwstBinary: Buffer, strict = false): boolean {
|
||||
if (yBinary.equals(jwstBinary)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (strict) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const doc = new Y.Doc();
|
||||
Y.applyUpdate(doc, jwstBinary);
|
||||
|
||||
const yBinary2 = Buffer.from(Y.encodeStateAsUpdate(doc));
|
||||
|
||||
return compare(yBinary, yBinary2, true);
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class DocStorageOptions implements IDocStorageOptions {
|
||||
private readonly logger = new Logger('DocStorageOptions');
|
||||
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly permission: PermissionService,
|
||||
private readonly quota: QuotaService
|
||||
) {}
|
||||
|
||||
mergeUpdates = async (updates: Uint8Array[]) => {
|
||||
const useYocto = await this.config.runtime.fetch(
|
||||
'doc/experimentalMergeWithYOcto'
|
||||
);
|
||||
|
||||
if (useYocto) {
|
||||
const doc = await this.recoverDoc(updates);
|
||||
|
||||
metrics.jwst.counter('codec_merge_counter').add(1);
|
||||
const yjsResult = Buffer.from(Y.encodeStateAsUpdate(doc));
|
||||
let log = false;
|
||||
try {
|
||||
const yocto = yotcoMergeUpdates(updates.map(Buffer.from));
|
||||
if (!compare(yjsResult, yocto)) {
|
||||
metrics.jwst.counter('codec_not_match').add(1);
|
||||
this.logger.warn(`yocto codec result doesn't match yjs codec result`);
|
||||
log = true;
|
||||
if (this.config.node.dev) {
|
||||
this.logger.warn(`Expected:\n ${yjsResult.toString('hex')}`);
|
||||
this.logger.warn(`Result:\n ${yocto.toString('hex')}`);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
metrics.jwst.counter('codec_fails_counter').add(1);
|
||||
this.logger.warn(`jwst apply update failed: ${e}`);
|
||||
log = true;
|
||||
}
|
||||
|
||||
if (log && this.config.node.dev) {
|
||||
this.logger.warn(
|
||||
`Updates: ${updates.map(u => Buffer.from(u).toString('hex')).join('\n')}`
|
||||
);
|
||||
}
|
||||
|
||||
return yjsResult;
|
||||
} else {
|
||||
return this.simpleMergeUpdates(updates);
|
||||
}
|
||||
};
|
||||
|
||||
historyMaxAge = async (spaceId: string) => {
|
||||
const owner = await this.permission.getWorkspaceOwner(spaceId);
|
||||
const quota = await this.quota.getUserQuota(owner.id);
|
||||
return quota.feature.historyPeriod;
|
||||
};
|
||||
|
||||
historyMinInterval = (_spaceId: string) => {
|
||||
return this.config.doc.history.interval;
|
||||
};
|
||||
|
||||
@CallTimer('doc', 'yjs_merge_updates')
|
||||
private simpleMergeUpdates(updates: Uint8Array[]) {
|
||||
return Y.mergeUpdates(updates);
|
||||
}
|
||||
|
||||
@CallTimer('doc', 'yjs_recover_updates_to_doc')
|
||||
private recoverDoc(updates: Uint8Array[]): Promise<Y.Doc> {
|
||||
const doc = new Y.Doc();
|
||||
const chunks = chunk(updates, 10);
|
||||
let i = 0;
|
||||
|
||||
return new Promise(resolve => {
|
||||
Y.transact(doc, () => {
|
||||
const next = () => {
|
||||
const updates = chunks.at(i++);
|
||||
|
||||
if (updates?.length) {
|
||||
updates.forEach(u => {
|
||||
try {
|
||||
Y.applyUpdate(doc, u);
|
||||
} catch (e) {
|
||||
this.logger.error('Failed to apply update', e);
|
||||
}
|
||||
});
|
||||
|
||||
// avoid applying too many updates in single round which will take the whole cpu time like dead lock
|
||||
setImmediate(() => {
|
||||
next();
|
||||
});
|
||||
} else {
|
||||
resolve(doc);
|
||||
}
|
||||
};
|
||||
|
||||
next();
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
16
packages/backend/server/src/core/doc/storage/blob.ts
Normal file
16
packages/backend/server/src/core/doc/storage/blob.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { Connection } from './connection';
|
||||
|
||||
export interface BlobStorageOptions {}
|
||||
|
||||
export interface Blob {
|
||||
key: string;
|
||||
bin: Uint8Array;
|
||||
mimeType: string;
|
||||
}
|
||||
|
||||
export abstract class BlobStorageAdapter extends Connection {
|
||||
abstract getBlob(spaceId: string, key: string): Promise<Blob | null>;
|
||||
abstract setBlob(spaceId: string, blob: Blob): Promise<string>;
|
||||
abstract deleteBlob(spaceId: string, key: string): Promise<boolean>;
|
||||
abstract listBlobs(spaceId: string): Promise<Blob>;
|
||||
}
|
||||
11
packages/backend/server/src/core/doc/storage/connection.ts
Normal file
11
packages/backend/server/src/core/doc/storage/connection.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
export class Connection {
|
||||
protected connected: boolean = false;
|
||||
connect(): Promise<void> {
|
||||
this.connected = true;
|
||||
return Promise.resolve();
|
||||
}
|
||||
disconnect(): Promise<void> {
|
||||
this.connected = false;
|
||||
return Promise.resolve();
|
||||
}
|
||||
}
|
||||
216
packages/backend/server/src/core/doc/storage/doc.ts
Normal file
216
packages/backend/server/src/core/doc/storage/doc.ts
Normal file
@@ -0,0 +1,216 @@
|
||||
import {
|
||||
applyUpdate,
|
||||
Doc,
|
||||
encodeStateAsUpdate,
|
||||
encodeStateVector,
|
||||
mergeUpdates,
|
||||
UndoManager,
|
||||
} from 'yjs';
|
||||
|
||||
import { CallTimer } from '../../../fundamentals';
|
||||
import { Connection } from './connection';
|
||||
import { SingletonLocker } from './lock';
|
||||
|
||||
export interface DocRecord {
|
||||
spaceId: string;
|
||||
docId: string;
|
||||
bin: Uint8Array;
|
||||
timestamp: number;
|
||||
editor?: string;
|
||||
}
|
||||
|
||||
export interface DocUpdate {
|
||||
bin: Uint8Array;
|
||||
timestamp: number;
|
||||
editor?: string;
|
||||
}
|
||||
|
||||
export interface HistoryFilter {
|
||||
before?: number;
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
export interface Editor {
|
||||
name: string;
|
||||
avatarUrl: string | null;
|
||||
}
|
||||
|
||||
export interface DocStorageOptions {
|
||||
mergeUpdates?: (updates: Uint8Array[]) => Promise<Uint8Array> | Uint8Array;
|
||||
}
|
||||
|
||||
export abstract class DocStorageAdapter extends Connection {
|
||||
private readonly locker = new SingletonLocker();
|
||||
|
||||
constructor(
|
||||
protected readonly options: DocStorageOptions = {
|
||||
mergeUpdates,
|
||||
}
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
// open apis
|
||||
isEmptyBin(bin: Uint8Array): boolean {
|
||||
return (
|
||||
bin.length === 0 ||
|
||||
// 0x0 for state vector
|
||||
(bin.length === 1 && bin[0] === 0) ||
|
||||
// 0x00 for update
|
||||
(bin.length === 2 && bin[0] === 0 && bin[1] === 0)
|
||||
);
|
||||
}
|
||||
|
||||
async getDoc(spaceId: string, docId: string): Promise<DocRecord | null> {
|
||||
await using _lock = await this.lockDocForUpdate(spaceId, docId);
|
||||
|
||||
const snapshot = await this.getDocSnapshot(spaceId, docId);
|
||||
const updates = await this.getDocUpdates(spaceId, docId);
|
||||
|
||||
if (updates.length) {
|
||||
const { timestamp, bin, editor } = await this.squash(
|
||||
snapshot ? [snapshot, ...updates] : updates
|
||||
);
|
||||
|
||||
const newSnapshot = {
|
||||
spaceId: spaceId,
|
||||
docId,
|
||||
bin,
|
||||
timestamp,
|
||||
editor,
|
||||
};
|
||||
|
||||
const success = await this.setDocSnapshot(newSnapshot);
|
||||
|
||||
// if there is old snapshot, create a new history record
|
||||
if (success && snapshot) {
|
||||
await this.createDocHistory(snapshot);
|
||||
}
|
||||
|
||||
// always mark updates as merged unless throws
|
||||
await this.markUpdatesMerged(spaceId, docId, updates);
|
||||
|
||||
return newSnapshot;
|
||||
}
|
||||
|
||||
return snapshot;
|
||||
}
|
||||
|
||||
abstract pushDocUpdates(
|
||||
spaceId: string,
|
||||
docId: string,
|
||||
updates: Uint8Array[],
|
||||
editorId?: string
|
||||
): Promise<number>;
|
||||
|
||||
abstract deleteDoc(spaceId: string, docId: string): Promise<void>;
|
||||
abstract deleteSpace(spaceId: string): Promise<void>;
|
||||
async rollbackDoc(
|
||||
spaceId: string,
|
||||
docId: string,
|
||||
timestamp: number,
|
||||
editorId?: string
|
||||
): Promise<void> {
|
||||
await using _lock = await this.lockDocForUpdate(spaceId, docId);
|
||||
const toSnapshot = await this.getDocHistory(spaceId, docId, timestamp);
|
||||
if (!toSnapshot) {
|
||||
throw new Error('Can not find the version to rollback to.');
|
||||
}
|
||||
|
||||
const fromSnapshot = await this.getDocSnapshot(spaceId, docId);
|
||||
|
||||
if (!fromSnapshot) {
|
||||
throw new Error('Can not find the current version of the doc.');
|
||||
}
|
||||
|
||||
const change = this.generateChangeUpdate(fromSnapshot.bin, toSnapshot.bin);
|
||||
await this.pushDocUpdates(spaceId, docId, [change], editorId);
|
||||
// force create a new history record after rollback
|
||||
await this.createDocHistory(fromSnapshot, true);
|
||||
}
|
||||
|
||||
abstract getSpaceDocTimestamps(
|
||||
spaceId: string,
|
||||
after?: number
|
||||
): Promise<Record<string, number> | null>;
|
||||
abstract listDocHistories(
|
||||
spaceId: string,
|
||||
docId: string,
|
||||
query: { skip?: number; limit?: number }
|
||||
): Promise<{ timestamp: number; editor: Editor | null }[]>;
|
||||
abstract getDocHistory(
|
||||
spaceId: string,
|
||||
docId: string,
|
||||
timestamp: number
|
||||
): Promise<DocRecord | null>;
|
||||
|
||||
// api for internal usage
|
||||
protected abstract getDocSnapshot(
|
||||
spaceId: string,
|
||||
docId: string
|
||||
): Promise<DocRecord | null>;
|
||||
protected abstract setDocSnapshot(snapshot: DocRecord): Promise<boolean>;
|
||||
protected abstract getDocUpdates(
|
||||
spaceId: string,
|
||||
docId: string
|
||||
): Promise<DocUpdate[]>;
|
||||
protected abstract markUpdatesMerged(
|
||||
spaceId: string,
|
||||
docId: string,
|
||||
updates: DocUpdate[]
|
||||
): Promise<number>;
|
||||
|
||||
protected abstract createDocHistory(
|
||||
snapshot: DocRecord,
|
||||
force?: boolean
|
||||
): Promise<boolean>;
|
||||
|
||||
@CallTimer('doc', 'squash')
|
||||
protected async squash(updates: DocUpdate[]): Promise<DocUpdate> {
|
||||
const merge = this.options?.mergeUpdates ?? mergeUpdates;
|
||||
const lastUpdate = updates.at(-1);
|
||||
if (!lastUpdate) {
|
||||
throw new Error('No updates to be squashed.');
|
||||
}
|
||||
|
||||
// fast return
|
||||
if (updates.length === 1) {
|
||||
return lastUpdate;
|
||||
}
|
||||
|
||||
const finalUpdate = await merge(updates.map(u => u.bin));
|
||||
|
||||
return {
|
||||
bin: finalUpdate,
|
||||
timestamp: lastUpdate.timestamp,
|
||||
editor: lastUpdate.editor,
|
||||
};
|
||||
}
|
||||
|
||||
protected async lockDocForUpdate(
|
||||
spaceId: string,
|
||||
docId: string
|
||||
): Promise<AsyncDisposable> {
|
||||
return this.locker.lock(`workspace:${spaceId}:update`, docId);
|
||||
}
|
||||
|
||||
protected generateChangeUpdate(newerBin: Uint8Array, olderBin: Uint8Array) {
|
||||
const newerDoc = new Doc();
|
||||
applyUpdate(newerDoc, newerBin);
|
||||
const olderDoc = new Doc();
|
||||
applyUpdate(olderDoc, olderBin);
|
||||
|
||||
const newerState = encodeStateVector(newerDoc);
|
||||
const olderState = encodeStateVector(olderDoc);
|
||||
|
||||
const diff = encodeStateAsUpdate(newerDoc, olderState);
|
||||
|
||||
const undoManager = new UndoManager(Array.from(newerDoc.share.values()));
|
||||
|
||||
applyUpdate(olderDoc, diff);
|
||||
|
||||
undoManager.undo();
|
||||
|
||||
return encodeStateAsUpdate(olderDoc, newerState);
|
||||
}
|
||||
}
|
||||
33
packages/backend/server/src/core/doc/storage/index.ts
Normal file
33
packages/backend/server/src/core/doc/storage/index.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
// TODO(@forehalo): share with frontend
|
||||
import type { BlobStorageAdapter } from './blob';
|
||||
import { Connection } from './connection';
|
||||
import type { DocStorageAdapter } from './doc';
|
||||
|
||||
export class SpaceStorage extends Connection {
|
||||
constructor(
|
||||
public readonly doc: DocStorageAdapter,
|
||||
public readonly blob: BlobStorageAdapter
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
override async connect() {
|
||||
await this.doc.connect();
|
||||
await this.blob.connect();
|
||||
}
|
||||
|
||||
override async disconnect() {
|
||||
await this.doc.disconnect();
|
||||
await this.blob.disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
export { BlobStorageAdapter, type BlobStorageOptions } from './blob';
|
||||
export {
|
||||
type DocRecord,
|
||||
DocStorageAdapter,
|
||||
type DocStorageOptions,
|
||||
type DocUpdate,
|
||||
type Editor,
|
||||
type HistoryFilter,
|
||||
} from './doc';
|
||||
42
packages/backend/server/src/core/doc/storage/lock.ts
Normal file
42
packages/backend/server/src/core/doc/storage/lock.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
export interface Locker {
|
||||
lock(domain: string, resource: string): Promise<Lock>;
|
||||
}
|
||||
|
||||
export class SingletonLocker implements Locker {
|
||||
lockedResource = new Map<string, Lock>();
|
||||
constructor() {}
|
||||
|
||||
async lock(domain: string, resource: string) {
|
||||
let lock = this.lockedResource.get(`${domain}:${resource}`);
|
||||
|
||||
if (!lock) {
|
||||
lock = new Lock();
|
||||
}
|
||||
|
||||
await lock.acquire();
|
||||
|
||||
return lock;
|
||||
}
|
||||
}
|
||||
|
||||
export class Lock {
|
||||
private inner: Promise<void> = Promise.resolve();
|
||||
private release: () => void = () => {};
|
||||
|
||||
async acquire() {
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
let release: () => void = null!;
|
||||
const nextLock = new Promise<void>(resolve => {
|
||||
release = resolve;
|
||||
});
|
||||
|
||||
await this.inner;
|
||||
this.inner = nextLock;
|
||||
this.release = release;
|
||||
}
|
||||
|
||||
[Symbol.asyncDispose]() {
|
||||
this.release();
|
||||
return Promise.resolve();
|
||||
}
|
||||
}
|
||||
12
packages/backend/server/src/core/permission/index.ts
Normal file
12
packages/backend/server/src/core/permission/index.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { PermissionService } from './service';
|
||||
|
||||
@Module({
|
||||
providers: [PermissionService],
|
||||
exports: [PermissionService],
|
||||
})
|
||||
export class PermissionModule {}
|
||||
|
||||
export { PermissionService } from './service';
|
||||
export { Permission, PublicPageMode } from './types';
|
||||
@@ -2,13 +2,12 @@ import { Injectable } from '@nestjs/common';
|
||||
import type { Prisma } from '@prisma/client';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
import { DocAccessDenied, WorkspaceAccessDenied } from '../../fundamentals';
|
||||
import { Permission } from './types';
|
||||
|
||||
export enum PublicPageMode {
|
||||
Page,
|
||||
Edgeless,
|
||||
}
|
||||
import {
|
||||
DocAccessDenied,
|
||||
SpaceAccessDenied,
|
||||
SpaceOwnerNotFound,
|
||||
} from '../../fundamentals';
|
||||
import { Permission, PublicPageMode } from './types';
|
||||
|
||||
@Injectable()
|
||||
export class PermissionService {
|
||||
@@ -59,7 +58,7 @@ export class PermissionService {
|
||||
}
|
||||
|
||||
async getWorkspaceOwner(workspaceId: string) {
|
||||
return this.prisma.workspaceUserPermission.findFirstOrThrow({
|
||||
const owner = await this.prisma.workspaceUserPermission.findFirst({
|
||||
where: {
|
||||
workspaceId,
|
||||
type: Permission.Owner,
|
||||
@@ -68,6 +67,20 @@ export class PermissionService {
|
||||
user: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!owner) {
|
||||
throw new SpaceOwnerNotFound({ spaceId: workspaceId });
|
||||
}
|
||||
|
||||
return owner.user;
|
||||
}
|
||||
|
||||
async getWorkspaceMemberCount(workspaceId: string) {
|
||||
return this.prisma.workspaceUserPermission.count({
|
||||
where: {
|
||||
workspaceId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async tryGetWorkspaceOwner(workspaceId: string) {
|
||||
@@ -152,7 +165,7 @@ export class PermissionService {
|
||||
permission: Permission = Permission.Read
|
||||
) {
|
||||
if (!(await this.tryCheckWorkspace(ws, user, permission))) {
|
||||
throw new WorkspaceAccessDenied({ workspaceId: ws });
|
||||
throw new SpaceAccessDenied({ spaceId: ws });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -195,6 +208,17 @@ export class PermissionService {
|
||||
return false;
|
||||
}
|
||||
|
||||
async allowUrlPreview(ws: string) {
|
||||
const count = await this.prisma.workspace.count({
|
||||
where: {
|
||||
id: ws,
|
||||
public: true,
|
||||
},
|
||||
});
|
||||
|
||||
return count > 0;
|
||||
}
|
||||
|
||||
async grant(
|
||||
ws: string,
|
||||
user: string,
|
||||
@@ -324,7 +348,7 @@ export class PermissionService {
|
||||
permission = Permission.Read
|
||||
) {
|
||||
if (!(await this.tryCheckPage(ws, page, user, permission))) {
|
||||
throw new DocAccessDenied({ workspaceId: ws, docId: page });
|
||||
throw new DocAccessDenied({ spaceId: ws, docId: page });
|
||||
}
|
||||
}
|
||||
|
||||
11
packages/backend/server/src/core/permission/types.ts
Normal file
11
packages/backend/server/src/core/permission/types.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
export enum Permission {
|
||||
Read = 0,
|
||||
Write = 1,
|
||||
Admin = 10,
|
||||
Owner = 99,
|
||||
}
|
||||
|
||||
export enum PublicPageMode {
|
||||
Page,
|
||||
Edgeless,
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { FeatureModule } from '../features';
|
||||
import { PermissionModule } from '../permission';
|
||||
import { StorageModule } from '../storage';
|
||||
import { PermissionService } from '../workspaces/permission';
|
||||
import { QuotaManagementResolver } from './resolver';
|
||||
import { QuotaService } from './service';
|
||||
import { QuotaManagementService } from './storage';
|
||||
@@ -14,13 +14,8 @@ import { QuotaManagementService } from './storage';
|
||||
* - quota statistics
|
||||
*/
|
||||
@Module({
|
||||
imports: [FeatureModule, StorageModule],
|
||||
providers: [
|
||||
PermissionService,
|
||||
QuotaService,
|
||||
QuotaManagementResolver,
|
||||
QuotaManagementService,
|
||||
],
|
||||
imports: [FeatureModule, StorageModule, PermissionModule],
|
||||
providers: [QuotaService, QuotaManagementResolver, QuotaManagementService],
|
||||
exports: [QuotaService, QuotaManagementService],
|
||||
})
|
||||
export class QuotaModule {}
|
||||
|
||||
@@ -71,10 +71,6 @@ export class QuotaConfig {
|
||||
return this.config.configs.historyPeriod;
|
||||
}
|
||||
|
||||
get historyPeriodFromNow() {
|
||||
return new Date(Date.now() + this.historyPeriod);
|
||||
}
|
||||
|
||||
get memberLimit() {
|
||||
return this.config.configs.memberLimit;
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ import {
|
||||
} from '@nestjs/graphql';
|
||||
import { SafeIntResolver } from 'graphql-scalars';
|
||||
|
||||
import { CurrentUser } from '../auth/current-user';
|
||||
import { CurrentUser } from '../auth/session';
|
||||
import { EarlyAccessType } from '../features';
|
||||
import { UserType } from '../user';
|
||||
import { QuotaService } from './service';
|
||||
|
||||
@@ -69,7 +69,7 @@ export class QuotaService {
|
||||
...quota,
|
||||
feature: await QuotaConfig.get(this.prisma, quota.featureId),
|
||||
};
|
||||
} catch (_) {}
|
||||
} catch {}
|
||||
return null as unknown as typeof quota & {
|
||||
feature: QuotaConfig;
|
||||
};
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
|
||||
import { WorkspaceOwnerNotFound } from '../../fundamentals';
|
||||
import { FeatureService, FeatureType } from '../features';
|
||||
import { PermissionService } from '../permission';
|
||||
import { WorkspaceBlobStorage } from '../storage';
|
||||
import { PermissionService } from '../workspaces/permission';
|
||||
import { OneGB } from './constant';
|
||||
import { QuotaService } from './service';
|
||||
import { formatSize, QuotaQueryType } from './types';
|
||||
@@ -113,9 +112,9 @@ export class QuotaManagementService {
|
||||
// get workspace's owner quota and total size of used
|
||||
// quota was apply to owner's account
|
||||
async getWorkspaceUsage(workspaceId: string): Promise<QuotaBusinessType> {
|
||||
const { user: owner } =
|
||||
await this.permissions.getWorkspaceOwner(workspaceId);
|
||||
if (!owner) throw new WorkspaceOwnerNotFound({ workspaceId });
|
||||
const owner = await this.permissions.getWorkspaceOwner(workspaceId);
|
||||
const memberCount =
|
||||
await this.permissions.getWorkspaceMemberCount(workspaceId);
|
||||
const {
|
||||
feature: {
|
||||
name,
|
||||
@@ -148,6 +147,7 @@ export class QuotaManagementService {
|
||||
humanReadable,
|
||||
usedSize,
|
||||
unlimited,
|
||||
memberCount,
|
||||
};
|
||||
|
||||
if (quota.unlimited) {
|
||||
|
||||
@@ -87,6 +87,9 @@ export class QuotaQueryType {
|
||||
@Field(() => SafeIntResolver)
|
||||
memberLimit!: number;
|
||||
|
||||
@Field(() => SafeIntResolver)
|
||||
memberCount!: number;
|
||||
|
||||
@Field(() => SafeIntResolver)
|
||||
storageQuota!: number;
|
||||
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
import { Body, Controller, Post, Req, Res } from '@nestjs/common';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import type { Request, Response } from 'express';
|
||||
|
||||
import {
|
||||
ActionForbidden,
|
||||
EventEmitter,
|
||||
InternalServerError,
|
||||
MutexService,
|
||||
Mutex,
|
||||
PasswordRequired,
|
||||
} from '../../fundamentals';
|
||||
import { AuthService, Public } from '../auth';
|
||||
import { ServerService } from '../config';
|
||||
import { UserService } from '../user/service';
|
||||
|
||||
interface CreateUserInput {
|
||||
@@ -20,11 +20,11 @@ interface CreateUserInput {
|
||||
@Controller('/api/setup')
|
||||
export class CustomSetupController {
|
||||
constructor(
|
||||
private readonly db: PrismaClient,
|
||||
private readonly user: UserService,
|
||||
private readonly auth: AuthService,
|
||||
private readonly event: EventEmitter,
|
||||
private readonly mutex: MutexService
|
||||
private readonly mutex: Mutex,
|
||||
private readonly server: ServerService
|
||||
) {}
|
||||
|
||||
@Public()
|
||||
@@ -44,7 +44,7 @@ export class CustomSetupController {
|
||||
throw new InternalServerError();
|
||||
}
|
||||
|
||||
if ((await this.db.user.count()) > 0) {
|
||||
if (await this.server.initialized()) {
|
||||
throw new ActionForbidden('First user already created');
|
||||
}
|
||||
|
||||
@@ -56,7 +56,7 @@ export class CustomSetupController {
|
||||
|
||||
try {
|
||||
await this.event.emitAsync('user.admin.created', user);
|
||||
await this.auth.setCookie(req, res, user);
|
||||
await this.auth.setCookies(req, res, user.id);
|
||||
res.send({ id: user.id, email: user.email, name: user.name });
|
||||
} catch (e) {
|
||||
await this.user.deleteUser(user.id);
|
||||
104
packages/backend/server/src/core/selfhost/index.ts
Normal file
104
packages/backend/server/src/core/selfhost/index.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
import { join } from 'node:path';
|
||||
|
||||
import {
|
||||
Injectable,
|
||||
Module,
|
||||
NestMiddleware,
|
||||
OnModuleInit,
|
||||
} from '@nestjs/common';
|
||||
import { HttpAdapterHost } from '@nestjs/core';
|
||||
import type { Application, Request, Response } from 'express';
|
||||
import { static as serveStatic } from 'express';
|
||||
|
||||
import { Config } from '../../fundamentals';
|
||||
import { AuthModule } from '../auth';
|
||||
import { ServerConfigModule, ServerService } from '../config';
|
||||
import { UserModule } from '../user';
|
||||
import { CustomSetupController } from './controller';
|
||||
|
||||
@Injectable()
|
||||
export class SetupMiddleware implements NestMiddleware {
|
||||
constructor(private readonly server: ServerService) {}
|
||||
|
||||
use = (req: Request, res: Response, next: (error?: Error | any) => void) => {
|
||||
// never throw
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
this.server
|
||||
.initialized()
|
||||
.then(initialized => {
|
||||
// Redirect to setup page if not initialized
|
||||
if (!initialized && req.path !== '/admin/setup') {
|
||||
res.redirect('/admin/setup');
|
||||
return;
|
||||
}
|
||||
|
||||
// redirect to admin page if initialized
|
||||
if (initialized && req.path === '/admin/setup') {
|
||||
res.redirect('/admin');
|
||||
return;
|
||||
}
|
||||
|
||||
next();
|
||||
})
|
||||
.catch(() => {
|
||||
next();
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
@Module({
|
||||
imports: [AuthModule, UserModule, ServerConfigModule],
|
||||
providers: [SetupMiddleware],
|
||||
controllers: [CustomSetupController],
|
||||
})
|
||||
export class SelfhostModule implements OnModuleInit {
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly adapterHost: HttpAdapterHost,
|
||||
private readonly check: SetupMiddleware
|
||||
) {}
|
||||
|
||||
onModuleInit() {
|
||||
const staticPath = join(this.config.projectRoot, 'static');
|
||||
// in command line mode
|
||||
if (!this.adapterHost.httpAdapter) {
|
||||
return;
|
||||
}
|
||||
|
||||
const app = this.adapterHost.httpAdapter.getInstance<Application>();
|
||||
const basePath = this.config.server.path;
|
||||
|
||||
app.get(basePath + '/admin/index.html', (_req, res) => {
|
||||
res.redirect(basePath + '/admin');
|
||||
});
|
||||
app.use(
|
||||
basePath + '/admin',
|
||||
serveStatic(join(staticPath, 'admin'), {
|
||||
redirect: false,
|
||||
index: false,
|
||||
})
|
||||
);
|
||||
|
||||
app.get(
|
||||
[basePath + '/admin', basePath + '/admin/*'],
|
||||
this.check.use,
|
||||
(_req, res) => {
|
||||
res.sendFile(join(staticPath, 'admin', 'index.html'));
|
||||
}
|
||||
);
|
||||
|
||||
app.get(basePath + '/index.html', (_req, res) => {
|
||||
res.redirect(basePath);
|
||||
});
|
||||
app.use(
|
||||
basePath,
|
||||
serveStatic(staticPath, {
|
||||
redirect: false,
|
||||
index: false,
|
||||
})
|
||||
);
|
||||
app.get('*', this.check.use, (_req, res) => {
|
||||
res.sendFile(join(staticPath, 'index.html'));
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { AuthModule } from '../auth';
|
||||
import { UserModule } from '../user';
|
||||
import { CustomSetupController } from './controller';
|
||||
|
||||
@Module({
|
||||
imports: [AuthModule, UserModule],
|
||||
controllers: [CustomSetupController],
|
||||
})
|
||||
export class CustomSetupModule {}
|
||||
@@ -1,328 +0,0 @@
|
||||
import { applyDecorators, Logger } from '@nestjs/common';
|
||||
import {
|
||||
ConnectedSocket,
|
||||
MessageBody,
|
||||
OnGatewayConnection,
|
||||
OnGatewayDisconnect,
|
||||
SubscribeMessage as RawSubscribeMessage,
|
||||
WebSocketGateway,
|
||||
WebSocketServer,
|
||||
} from '@nestjs/websockets';
|
||||
import { Server, Socket } from 'socket.io';
|
||||
import { encodeStateAsUpdate, encodeStateVector } from 'yjs';
|
||||
|
||||
import {
|
||||
CallTimer,
|
||||
Config,
|
||||
DocNotFound,
|
||||
GatewayErrorWrapper,
|
||||
metrics,
|
||||
NotInWorkspace,
|
||||
VersionRejected,
|
||||
WorkspaceAccessDenied,
|
||||
} from '../../../fundamentals';
|
||||
import { Auth, CurrentUser } from '../../auth';
|
||||
import { DocManager } from '../../doc';
|
||||
import { DocID } from '../../utils/doc';
|
||||
import { PermissionService } from '../../workspaces/permission';
|
||||
import { Permission } from '../../workspaces/types';
|
||||
|
||||
const SubscribeMessage = (event: string) =>
|
||||
applyDecorators(
|
||||
GatewayErrorWrapper(event),
|
||||
CallTimer('socketio', 'event_duration', { event }),
|
||||
RawSubscribeMessage(event)
|
||||
);
|
||||
|
||||
type EventResponse<Data = any> = Data extends never
|
||||
? {
|
||||
data?: never;
|
||||
}
|
||||
: {
|
||||
data: Data;
|
||||
};
|
||||
|
||||
function Sync(workspaceId: string): `${string}:sync` {
|
||||
return `${workspaceId}:sync`;
|
||||
}
|
||||
|
||||
function Awareness(workspaceId: string): `${string}:awareness` {
|
||||
return `${workspaceId}:awareness`;
|
||||
}
|
||||
|
||||
@WebSocketGateway()
|
||||
export class EventsGateway implements OnGatewayConnection, OnGatewayDisconnect {
|
||||
protected logger = new Logger(EventsGateway.name);
|
||||
private connectionCount = 0;
|
||||
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly docManager: DocManager,
|
||||
private readonly permissions: PermissionService
|
||||
) {}
|
||||
|
||||
@WebSocketServer()
|
||||
server!: Server;
|
||||
|
||||
handleConnection() {
|
||||
this.connectionCount++;
|
||||
metrics.socketio.gauge('realtime_connections').record(this.connectionCount);
|
||||
}
|
||||
|
||||
handleDisconnect() {
|
||||
this.connectionCount--;
|
||||
metrics.socketio.gauge('realtime_connections').record(this.connectionCount);
|
||||
}
|
||||
|
||||
async assertVersion(client: Socket, version?: string) {
|
||||
const shouldCheckClientVersion = await this.config.runtime.fetch(
|
||||
'flags/syncClientVersionCheck'
|
||||
);
|
||||
if (
|
||||
// @todo(@darkskygit): remove this flag after 0.12 goes stable
|
||||
shouldCheckClientVersion &&
|
||||
version !== AFFiNE.version
|
||||
) {
|
||||
client.emit('server-version-rejected', {
|
||||
currentVersion: version,
|
||||
requiredVersion: AFFiNE.version,
|
||||
reason: `Client version${
|
||||
version ? ` ${version}` : ''
|
||||
} is outdated, please update to ${AFFiNE.version}`,
|
||||
});
|
||||
|
||||
throw new VersionRejected({
|
||||
version: version || 'unknown',
|
||||
serverVersion: AFFiNE.version,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async joinWorkspace(
|
||||
client: Socket,
|
||||
room: `${string}:${'sync' | 'awareness'}`
|
||||
) {
|
||||
await client.join(room);
|
||||
}
|
||||
|
||||
async leaveWorkspace(
|
||||
client: Socket,
|
||||
room: `${string}:${'sync' | 'awareness'}`
|
||||
) {
|
||||
await client.leave(room);
|
||||
}
|
||||
|
||||
assertInWorkspace(client: Socket, room: `${string}:${'sync' | 'awareness'}`) {
|
||||
if (!client.rooms.has(room)) {
|
||||
throw new NotInWorkspace({ workspaceId: room.split(':')[0] });
|
||||
}
|
||||
}
|
||||
|
||||
async assertWorkspaceAccessible(
|
||||
workspaceId: string,
|
||||
userId: string,
|
||||
permission: Permission = Permission.Read
|
||||
) {
|
||||
if (
|
||||
!(await this.permissions.isWorkspaceMember(
|
||||
workspaceId,
|
||||
userId,
|
||||
permission
|
||||
))
|
||||
) {
|
||||
throw new WorkspaceAccessDenied({ workspaceId });
|
||||
}
|
||||
}
|
||||
|
||||
@Auth()
|
||||
@SubscribeMessage('client-handshake-sync')
|
||||
async handleClientHandshakeSync(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@MessageBody('workspaceId') workspaceId: string,
|
||||
@MessageBody('version') version: string | undefined,
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse<{ clientId: string }>> {
|
||||
await this.assertVersion(client, version);
|
||||
await this.assertWorkspaceAccessible(
|
||||
workspaceId,
|
||||
user.id,
|
||||
Permission.Write
|
||||
);
|
||||
|
||||
await this.joinWorkspace(client, Sync(workspaceId));
|
||||
return {
|
||||
data: {
|
||||
clientId: client.id,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@Auth()
|
||||
@SubscribeMessage('client-handshake-awareness')
|
||||
async handleClientHandshakeAwareness(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@MessageBody('workspaceId') workspaceId: string,
|
||||
@MessageBody('version') version: string | undefined,
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse<{ clientId: string }>> {
|
||||
await this.assertVersion(client, version);
|
||||
await this.assertWorkspaceAccessible(
|
||||
workspaceId,
|
||||
user.id,
|
||||
Permission.Write
|
||||
);
|
||||
|
||||
await this.joinWorkspace(client, Awareness(workspaceId));
|
||||
return {
|
||||
data: {
|
||||
clientId: client.id,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@SubscribeMessage('client-leave-sync')
|
||||
async handleLeaveSync(
|
||||
@MessageBody() workspaceId: string,
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse> {
|
||||
this.assertInWorkspace(client, Sync(workspaceId));
|
||||
await this.leaveWorkspace(client, Sync(workspaceId));
|
||||
return {};
|
||||
}
|
||||
|
||||
@SubscribeMessage('client-leave-awareness')
|
||||
async handleLeaveAwareness(
|
||||
@MessageBody() workspaceId: string,
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse> {
|
||||
this.assertInWorkspace(client, Awareness(workspaceId));
|
||||
await this.leaveWorkspace(client, Awareness(workspaceId));
|
||||
return {};
|
||||
}
|
||||
|
||||
@SubscribeMessage('client-pre-sync')
|
||||
async loadDocStats(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody()
|
||||
{ workspaceId, timestamp }: { workspaceId: string; timestamp?: number }
|
||||
): Promise<EventResponse<Record<string, number>>> {
|
||||
this.assertInWorkspace(client, Sync(workspaceId));
|
||||
|
||||
const stats = await this.docManager.getDocTimestamps(
|
||||
workspaceId,
|
||||
timestamp
|
||||
);
|
||||
|
||||
return {
|
||||
data: stats,
|
||||
};
|
||||
}
|
||||
|
||||
@SubscribeMessage('client-update-v2')
|
||||
async handleClientUpdateV2(
|
||||
@MessageBody()
|
||||
{
|
||||
workspaceId,
|
||||
guid,
|
||||
updates,
|
||||
}: {
|
||||
workspaceId: string;
|
||||
guid: string;
|
||||
updates: string[];
|
||||
},
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse<{ accepted: true; timestamp?: number }>> {
|
||||
this.assertInWorkspace(client, Sync(workspaceId));
|
||||
|
||||
const docId = new DocID(guid, workspaceId);
|
||||
const buffers = updates.map(update => Buffer.from(update, 'base64'));
|
||||
const timestamp = await this.docManager.batchPush(
|
||||
docId.workspace,
|
||||
docId.guid,
|
||||
buffers
|
||||
);
|
||||
|
||||
client
|
||||
.to(Sync(workspaceId))
|
||||
.emit('server-updates', { workspaceId, guid, updates, timestamp });
|
||||
|
||||
return {
|
||||
data: {
|
||||
accepted: true,
|
||||
timestamp,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@SubscribeMessage('doc-load-v2')
|
||||
async loadDocV2(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody()
|
||||
{
|
||||
workspaceId,
|
||||
guid,
|
||||
stateVector,
|
||||
}: {
|
||||
workspaceId: string;
|
||||
guid: string;
|
||||
stateVector?: string;
|
||||
}
|
||||
): Promise<
|
||||
EventResponse<{ missing: string; state?: string; timestamp: number }>
|
||||
> {
|
||||
this.assertInWorkspace(client, Sync(workspaceId));
|
||||
|
||||
const docId = new DocID(guid, workspaceId);
|
||||
const res = await this.docManager.get(docId.workspace, docId.guid);
|
||||
|
||||
if (!res) {
|
||||
throw new DocNotFound({ workspaceId, docId: docId.guid });
|
||||
}
|
||||
|
||||
const missing = Buffer.from(
|
||||
encodeStateAsUpdate(
|
||||
res.doc,
|
||||
stateVector ? Buffer.from(stateVector, 'base64') : undefined
|
||||
)
|
||||
).toString('base64');
|
||||
const state = Buffer.from(encodeStateVector(res.doc)).toString('base64');
|
||||
|
||||
return {
|
||||
data: {
|
||||
missing,
|
||||
state,
|
||||
timestamp: res.timestamp,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@SubscribeMessage('awareness-init')
|
||||
async handleInitAwareness(
|
||||
@MessageBody() workspaceId: string,
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse<{ clientId: string }>> {
|
||||
this.assertInWorkspace(client, Awareness(workspaceId));
|
||||
client.to(Awareness(workspaceId)).emit('new-client-awareness-init');
|
||||
return {
|
||||
data: {
|
||||
clientId: client.id,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@SubscribeMessage('awareness-update')
|
||||
async handleHelpGatheringAwareness(
|
||||
@MessageBody()
|
||||
{
|
||||
workspaceId,
|
||||
awarenessUpdate,
|
||||
}: { workspaceId: string; awarenessUpdate: string },
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse> {
|
||||
this.assertInWorkspace(client, Awareness(workspaceId));
|
||||
client
|
||||
.to(Awareness(workspaceId))
|
||||
.emit('server-awareness-broadcast', { workspaceId, awarenessUpdate });
|
||||
return {};
|
||||
}
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { DocModule } from '../../doc';
|
||||
import { PermissionService } from '../../workspaces/permission';
|
||||
import { EventsGateway } from './events.gateway';
|
||||
|
||||
@Module({
|
||||
imports: [DocModule],
|
||||
providers: [EventsGateway, PermissionService],
|
||||
})
|
||||
export class EventsModule {}
|
||||
665
packages/backend/server/src/core/sync/gateway.ts
Normal file
665
packages/backend/server/src/core/sync/gateway.ts
Normal file
@@ -0,0 +1,665 @@
|
||||
import { applyDecorators, Logger } from '@nestjs/common';
|
||||
import {
|
||||
ConnectedSocket,
|
||||
MessageBody,
|
||||
OnGatewayConnection,
|
||||
OnGatewayDisconnect,
|
||||
SubscribeMessage as RawSubscribeMessage,
|
||||
WebSocketGateway,
|
||||
} from '@nestjs/websockets';
|
||||
import { Socket } from 'socket.io';
|
||||
import { diffUpdate, encodeStateVectorFromUpdate } from 'yjs';
|
||||
|
||||
import {
|
||||
AlreadyInSpace,
|
||||
CallTimer,
|
||||
Config,
|
||||
DocNotFound,
|
||||
GatewayErrorWrapper,
|
||||
metrics,
|
||||
NotInSpace,
|
||||
SpaceAccessDenied,
|
||||
VersionRejected,
|
||||
} from '../../fundamentals';
|
||||
import { CurrentUser } from '../auth';
|
||||
import {
|
||||
DocStorageAdapter,
|
||||
PgUserspaceDocStorageAdapter,
|
||||
PgWorkspaceDocStorageAdapter,
|
||||
} from '../doc';
|
||||
import { Permission, PermissionService } from '../permission';
|
||||
import { DocID } from '../utils/doc';
|
||||
|
||||
const SubscribeMessage = (event: string) =>
|
||||
applyDecorators(
|
||||
GatewayErrorWrapper(event),
|
||||
CallTimer('socketio', 'event_duration', { event }),
|
||||
RawSubscribeMessage(event)
|
||||
);
|
||||
|
||||
type EventResponse<Data = any> = Data extends never
|
||||
? {
|
||||
data?: never;
|
||||
}
|
||||
: {
|
||||
data: Data;
|
||||
};
|
||||
|
||||
type RoomType = 'sync' | `${string}:awareness`;
|
||||
|
||||
function Room(
|
||||
spaceId: string,
|
||||
type: RoomType = 'sync'
|
||||
): `${string}:${RoomType}` {
|
||||
return `${spaceId}:${type}`;
|
||||
}
|
||||
|
||||
enum SpaceType {
|
||||
Workspace = 'workspace',
|
||||
Userspace = 'userspace',
|
||||
}
|
||||
|
||||
interface JoinSpaceMessage {
|
||||
spaceType: SpaceType;
|
||||
spaceId: string;
|
||||
clientVersion: string;
|
||||
}
|
||||
|
||||
interface JoinSpaceAwarenessMessage {
|
||||
spaceType: SpaceType;
|
||||
spaceId: string;
|
||||
docId: string;
|
||||
clientVersion: string;
|
||||
}
|
||||
|
||||
interface LeaveSpaceMessage {
|
||||
spaceType: SpaceType;
|
||||
spaceId: string;
|
||||
}
|
||||
|
||||
interface LeaveSpaceAwarenessMessage {
|
||||
spaceType: SpaceType;
|
||||
spaceId: string;
|
||||
docId: string;
|
||||
}
|
||||
|
||||
interface PushDocUpdatesMessage {
|
||||
spaceType: SpaceType;
|
||||
spaceId: string;
|
||||
docId: string;
|
||||
updates: string[];
|
||||
}
|
||||
|
||||
interface LoadDocMessage {
|
||||
spaceType: SpaceType;
|
||||
spaceId: string;
|
||||
docId: string;
|
||||
stateVector?: string;
|
||||
}
|
||||
|
||||
interface LoadDocTimestampsMessage {
|
||||
spaceType: SpaceType;
|
||||
spaceId: string;
|
||||
timestamp?: number;
|
||||
}
|
||||
|
||||
interface LoadSpaceAwarenessesMessage {
|
||||
spaceType: SpaceType;
|
||||
spaceId: string;
|
||||
docId: string;
|
||||
}
|
||||
interface UpdateAwarenessMessage {
|
||||
spaceType: SpaceType;
|
||||
spaceId: string;
|
||||
docId: string;
|
||||
awarenessUpdate: string;
|
||||
}
|
||||
@WebSocketGateway()
|
||||
export class SpaceSyncGateway
|
||||
implements OnGatewayConnection, OnGatewayDisconnect
|
||||
{
|
||||
protected logger = new Logger(SpaceSyncGateway.name);
|
||||
|
||||
private connectionCount = 0;
|
||||
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly permissions: PermissionService,
|
||||
private readonly workspace: PgWorkspaceDocStorageAdapter,
|
||||
private readonly userspace: PgUserspaceDocStorageAdapter
|
||||
) {}
|
||||
|
||||
handleConnection() {
|
||||
this.connectionCount++;
|
||||
metrics.socketio.gauge('realtime_connections').record(this.connectionCount);
|
||||
}
|
||||
|
||||
handleDisconnect() {
|
||||
this.connectionCount--;
|
||||
metrics.socketio.gauge('realtime_connections').record(this.connectionCount);
|
||||
}
|
||||
|
||||
selectAdapter(client: Socket, spaceType: SpaceType): SyncSocketAdapter {
|
||||
let adapters: Record<SpaceType, SyncSocketAdapter> = (client as any)
|
||||
.affineSyncAdapters;
|
||||
|
||||
if (!adapters) {
|
||||
const workspace = new WorkspaceSyncAdapter(
|
||||
client,
|
||||
this.workspace,
|
||||
this.permissions
|
||||
);
|
||||
const userspace = new UserspaceSyncAdapter(client, this.userspace);
|
||||
|
||||
adapters = { workspace, userspace };
|
||||
(client as any).affineSyncAdapters = adapters;
|
||||
}
|
||||
|
||||
return adapters[spaceType];
|
||||
}
|
||||
|
||||
async assertVersion(client: Socket, version?: string) {
|
||||
const shouldCheckClientVersion = await this.config.runtime.fetch(
|
||||
'flags/syncClientVersionCheck'
|
||||
);
|
||||
if (
|
||||
// @todo(@darkskygit): remove this flag after 0.12 goes stable
|
||||
shouldCheckClientVersion &&
|
||||
version !== AFFiNE.version
|
||||
) {
|
||||
client.emit('server-version-rejected', {
|
||||
currentVersion: version,
|
||||
requiredVersion: AFFiNE.version,
|
||||
reason: `Client version${
|
||||
version ? ` ${version}` : ''
|
||||
} is outdated, please update to ${AFFiNE.version}`,
|
||||
});
|
||||
|
||||
throw new VersionRejected({
|
||||
version: version || 'unknown',
|
||||
serverVersion: AFFiNE.version,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async joinWorkspace(
|
||||
client: Socket,
|
||||
room: `${string}:${'sync' | 'awareness'}`
|
||||
) {
|
||||
await client.join(room);
|
||||
}
|
||||
|
||||
async leaveWorkspace(
|
||||
client: Socket,
|
||||
room: `${string}:${'sync' | 'awareness'}`
|
||||
) {
|
||||
await client.leave(room);
|
||||
}
|
||||
|
||||
assertInWorkspace(client: Socket, room: `${string}:${'sync' | 'awareness'}`) {
|
||||
if (!client.rooms.has(room)) {
|
||||
throw new NotInSpace({ spaceId: room.split(':')[0] });
|
||||
}
|
||||
}
|
||||
|
||||
// v3
|
||||
@SubscribeMessage('space:join')
|
||||
async onJoinSpace(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody()
|
||||
{ spaceType, spaceId, clientVersion }: JoinSpaceMessage
|
||||
): Promise<EventResponse<{ clientId: string; success: true }>> {
|
||||
await this.assertVersion(client, clientVersion);
|
||||
|
||||
await this.selectAdapter(client, spaceType).join(user.id, spaceId);
|
||||
|
||||
return { data: { clientId: client.id, success: true } };
|
||||
}
|
||||
|
||||
@SubscribeMessage('space:leave')
|
||||
async onLeaveSpace(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody() { spaceType, spaceId }: LeaveSpaceMessage
|
||||
): Promise<EventResponse<{ clientId: string; success: true }>> {
|
||||
await this.selectAdapter(client, spaceType).leave(spaceId);
|
||||
|
||||
return { data: { clientId: client.id, success: true } };
|
||||
}
|
||||
|
||||
@SubscribeMessage('space:load-doc')
|
||||
async onLoadSpaceDoc(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody()
|
||||
{ spaceType, spaceId, docId, stateVector }: LoadDocMessage
|
||||
): Promise<
|
||||
EventResponse<{ missing: string; state?: string; timestamp: number }>
|
||||
> {
|
||||
const adapter = this.selectAdapter(client, spaceType);
|
||||
adapter.assertIn(spaceId);
|
||||
|
||||
const doc = await adapter.get(spaceId, docId);
|
||||
|
||||
if (!doc) {
|
||||
throw new DocNotFound({ spaceId, docId });
|
||||
}
|
||||
|
||||
const missing = Buffer.from(
|
||||
stateVector
|
||||
? diffUpdate(doc.bin, Buffer.from(stateVector, 'base64'))
|
||||
: doc.bin
|
||||
).toString('base64');
|
||||
|
||||
const state = Buffer.from(encodeStateVectorFromUpdate(doc.bin)).toString(
|
||||
'base64'
|
||||
);
|
||||
|
||||
return {
|
||||
data: {
|
||||
missing,
|
||||
state,
|
||||
timestamp: doc.timestamp,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@SubscribeMessage('space:push-doc-updates')
|
||||
async onReceiveDocUpdates(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@MessageBody()
|
||||
message: PushDocUpdatesMessage
|
||||
): Promise<EventResponse<{ accepted: true; timestamp?: number }>> {
|
||||
const { spaceType, spaceId, docId, updates } = message;
|
||||
const adapter = this.selectAdapter(client, spaceType);
|
||||
|
||||
// TODO(@forehalo): we might need to check write permission before push updates
|
||||
const timestamp = await adapter.push(
|
||||
spaceId,
|
||||
docId,
|
||||
updates.map(update => Buffer.from(update, 'base64')),
|
||||
user.id
|
||||
);
|
||||
|
||||
// could be put in [adapter.push]
|
||||
// but the event should be kept away from adapter
|
||||
// so
|
||||
client
|
||||
.to(adapter.room(spaceId))
|
||||
.emit('space:broadcast-doc-updates', { ...message, timestamp });
|
||||
|
||||
// TODO(@forehalo): remove backward compatibility
|
||||
if (spaceType === SpaceType.Workspace) {
|
||||
const id = new DocID(docId, spaceId);
|
||||
client.to(adapter.room(spaceId)).emit('server-updates', {
|
||||
workspaceId: spaceId,
|
||||
guid: id.guid,
|
||||
updates,
|
||||
timestamp,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
data: {
|
||||
accepted: true,
|
||||
timestamp,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@SubscribeMessage('space:load-doc-timestamps')
|
||||
async onLoadDocTimestamps(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody()
|
||||
{ spaceType, spaceId, timestamp }: LoadDocTimestampsMessage
|
||||
): Promise<EventResponse<Record<string, number>>> {
|
||||
const adapter = this.selectAdapter(client, spaceType);
|
||||
|
||||
const stats = await adapter.getTimestamps(spaceId, timestamp);
|
||||
|
||||
return {
|
||||
data: stats ?? {},
|
||||
};
|
||||
}
|
||||
|
||||
@SubscribeMessage('space:join-awareness')
|
||||
async onJoinAwareness(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@MessageBody()
|
||||
{ spaceType, spaceId, docId, clientVersion }: JoinSpaceAwarenessMessage
|
||||
) {
|
||||
await this.assertVersion(client, clientVersion);
|
||||
|
||||
await this.selectAdapter(client, spaceType).join(
|
||||
user.id,
|
||||
spaceId,
|
||||
`${docId}:awareness`
|
||||
);
|
||||
|
||||
return { data: { clientId: client.id, success: true } };
|
||||
}
|
||||
|
||||
@SubscribeMessage('space:leave-awareness')
|
||||
async onLeaveAwareness(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody()
|
||||
{ spaceType, spaceId, docId }: LeaveSpaceAwarenessMessage
|
||||
) {
|
||||
await this.selectAdapter(client, spaceType).leave(
|
||||
spaceId,
|
||||
`${docId}:awareness`
|
||||
);
|
||||
|
||||
return { data: { clientId: client.id, success: true } };
|
||||
}
|
||||
|
||||
@SubscribeMessage('space:load-awarenesses')
|
||||
async onLoadAwareness(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody()
|
||||
{ spaceType, spaceId, docId }: LoadSpaceAwarenessesMessage
|
||||
) {
|
||||
const adapter = this.selectAdapter(client, spaceType);
|
||||
|
||||
const roomType = `${docId}:awareness` as const;
|
||||
adapter.assertIn(spaceId, roomType);
|
||||
client
|
||||
.to(adapter.room(spaceId, roomType))
|
||||
.emit('space:collect-awareness', { spaceType, spaceId, docId });
|
||||
|
||||
// TODO(@forehalo): remove backward compatibility
|
||||
if (spaceType === SpaceType.Workspace) {
|
||||
client
|
||||
.to(adapter.room(spaceId, roomType))
|
||||
.emit('new-client-awareness-init');
|
||||
}
|
||||
|
||||
return { data: { clientId: client.id } };
|
||||
}
|
||||
|
||||
@SubscribeMessage('space:update-awareness')
|
||||
async onUpdateAwareness(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody() message: UpdateAwarenessMessage
|
||||
) {
|
||||
const { spaceType, spaceId, docId } = message;
|
||||
const adapter = this.selectAdapter(client, spaceType);
|
||||
|
||||
const roomType = `${docId}:awareness` as const;
|
||||
adapter.assertIn(spaceId, roomType);
|
||||
client
|
||||
.to(adapter.room(spaceId, roomType))
|
||||
.emit('space:broadcast-awareness-update', message);
|
||||
|
||||
// TODO(@forehalo): remove backward compatibility
|
||||
if (spaceType === SpaceType.Workspace) {
|
||||
client
|
||||
.to(adapter.room(spaceId, roomType))
|
||||
.emit('server-awareness-broadcast', {
|
||||
workspaceId: spaceId,
|
||||
awarenessUpdate: message.awarenessUpdate,
|
||||
});
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
// TODO(@forehalo): remove
|
||||
// deprecated section
|
||||
@SubscribeMessage('client-handshake-sync')
|
||||
async handleClientHandshakeSync(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@MessageBody('workspaceId') workspaceId: string,
|
||||
@MessageBody('version') version: string,
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse<{ clientId: string }>> {
|
||||
await this.assertVersion(client, version);
|
||||
|
||||
return this.onJoinSpace(user, client, {
|
||||
spaceType: SpaceType.Workspace,
|
||||
spaceId: workspaceId,
|
||||
clientVersion: version,
|
||||
});
|
||||
}
|
||||
|
||||
@SubscribeMessage('client-leave-sync')
|
||||
async handleLeaveSync(
|
||||
@MessageBody() workspaceId: string,
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse> {
|
||||
return this.onLeaveSpace(client, {
|
||||
spaceType: SpaceType.Workspace,
|
||||
spaceId: workspaceId,
|
||||
});
|
||||
}
|
||||
|
||||
@SubscribeMessage('client-pre-sync')
|
||||
async loadDocStats(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody()
|
||||
{ workspaceId, timestamp }: { workspaceId: string; timestamp?: number }
|
||||
): Promise<EventResponse<Record<string, number>>> {
|
||||
return this.onLoadDocTimestamps(client, {
|
||||
spaceType: SpaceType.Workspace,
|
||||
spaceId: workspaceId,
|
||||
timestamp,
|
||||
});
|
||||
}
|
||||
|
||||
@SubscribeMessage('client-update-v2')
|
||||
async handleClientUpdateV2(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@MessageBody()
|
||||
{
|
||||
workspaceId,
|
||||
guid,
|
||||
updates,
|
||||
}: {
|
||||
workspaceId: string;
|
||||
guid: string;
|
||||
updates: string[];
|
||||
},
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse<{ accepted: true; timestamp?: number }>> {
|
||||
return this.onReceiveDocUpdates(client, user, {
|
||||
spaceType: SpaceType.Workspace,
|
||||
spaceId: workspaceId,
|
||||
docId: guid,
|
||||
updates,
|
||||
});
|
||||
}
|
||||
|
||||
@SubscribeMessage('doc-load-v2')
|
||||
async loadDocV2(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@MessageBody()
|
||||
{
|
||||
workspaceId,
|
||||
guid,
|
||||
stateVector,
|
||||
}: {
|
||||
workspaceId: string;
|
||||
guid: string;
|
||||
stateVector?: string;
|
||||
}
|
||||
): Promise<
|
||||
EventResponse<{ missing: string; state?: string; timestamp: number }>
|
||||
> {
|
||||
return this.onLoadSpaceDoc(client, {
|
||||
spaceType: SpaceType.Workspace,
|
||||
spaceId: workspaceId,
|
||||
docId: guid,
|
||||
stateVector,
|
||||
});
|
||||
}
|
||||
|
||||
@SubscribeMessage('client-handshake-awareness')
|
||||
async handleClientHandshakeAwareness(
|
||||
@ConnectedSocket() client: Socket,
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@MessageBody('workspaceId') workspaceId: string,
|
||||
@MessageBody('version') version: string
|
||||
): Promise<EventResponse<{ clientId: string }>> {
|
||||
return this.onJoinAwareness(client, user, {
|
||||
spaceType: SpaceType.Workspace,
|
||||
spaceId: workspaceId,
|
||||
docId: workspaceId,
|
||||
clientVersion: version,
|
||||
});
|
||||
}
|
||||
|
||||
@SubscribeMessage('client-leave-awareness')
|
||||
async handleLeaveAwareness(
|
||||
@MessageBody() workspaceId: string,
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse> {
|
||||
return this.onLeaveAwareness(client, {
|
||||
spaceType: SpaceType.Workspace,
|
||||
spaceId: workspaceId,
|
||||
docId: workspaceId,
|
||||
});
|
||||
}
|
||||
|
||||
@SubscribeMessage('awareness-init')
|
||||
async handleInitAwareness(
|
||||
@MessageBody() workspaceId: string,
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse<{ clientId: string }>> {
|
||||
return this.onLoadAwareness(client, {
|
||||
spaceType: SpaceType.Workspace,
|
||||
spaceId: workspaceId,
|
||||
docId: workspaceId,
|
||||
});
|
||||
}
|
||||
|
||||
@SubscribeMessage('awareness-update')
|
||||
async handleHelpGatheringAwareness(
|
||||
@MessageBody()
|
||||
{
|
||||
workspaceId,
|
||||
awarenessUpdate,
|
||||
}: { workspaceId: string; awarenessUpdate: string },
|
||||
@ConnectedSocket() client: Socket
|
||||
): Promise<EventResponse> {
|
||||
return this.onUpdateAwareness(client, {
|
||||
spaceType: SpaceType.Workspace,
|
||||
spaceId: workspaceId,
|
||||
docId: workspaceId,
|
||||
awarenessUpdate,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
abstract class SyncSocketAdapter {
|
||||
constructor(
|
||||
private readonly spaceType: SpaceType,
|
||||
public readonly client: Socket,
|
||||
public readonly storage: DocStorageAdapter
|
||||
) {}
|
||||
|
||||
room(spaceId: string, roomType: RoomType = 'sync') {
|
||||
return `${this.spaceType}:${Room(spaceId, roomType)}`;
|
||||
}
|
||||
|
||||
async join(userId: string, spaceId: string, roomType: RoomType = 'sync') {
|
||||
this.assertNotIn(spaceId, roomType);
|
||||
await this.assertAccessible(spaceId, userId, Permission.Read);
|
||||
return this.client.join(this.room(spaceId, roomType));
|
||||
}
|
||||
|
||||
async leave(spaceId: string, roomType: RoomType = 'sync') {
|
||||
this.assertIn(spaceId, roomType);
|
||||
return this.client.leave(this.room(spaceId, roomType));
|
||||
}
|
||||
|
||||
in(spaceId: string, roomType: RoomType = 'sync') {
|
||||
return this.client.rooms.has(this.room(spaceId, roomType));
|
||||
}
|
||||
|
||||
assertNotIn(spaceId: string, roomType: RoomType = 'sync') {
|
||||
if (this.client.rooms.has(this.room(spaceId, roomType))) {
|
||||
throw new AlreadyInSpace({ spaceId });
|
||||
}
|
||||
}
|
||||
|
||||
assertIn(spaceId: string, roomType: RoomType = 'sync') {
|
||||
if (!this.client.rooms.has(this.room(spaceId, roomType))) {
|
||||
throw new NotInSpace({ spaceId });
|
||||
}
|
||||
}
|
||||
|
||||
abstract assertAccessible(
|
||||
spaceId: string,
|
||||
userId: string,
|
||||
permission?: Permission
|
||||
): Promise<void>;
|
||||
|
||||
push(spaceId: string, docId: string, updates: Buffer[], editorId: string) {
|
||||
this.assertIn(spaceId);
|
||||
return this.storage.pushDocUpdates(spaceId, docId, updates, editorId);
|
||||
}
|
||||
|
||||
get(spaceId: string, docId: string) {
|
||||
this.assertIn(spaceId);
|
||||
return this.storage.getDoc(spaceId, docId);
|
||||
}
|
||||
|
||||
getTimestamps(spaceId: string, timestamp?: number) {
|
||||
this.assertIn(spaceId);
|
||||
return this.storage.getSpaceDocTimestamps(spaceId, timestamp);
|
||||
}
|
||||
}
|
||||
|
||||
class WorkspaceSyncAdapter extends SyncSocketAdapter {
|
||||
constructor(
|
||||
client: Socket,
|
||||
storage: DocStorageAdapter,
|
||||
private readonly permission: PermissionService
|
||||
) {
|
||||
super(SpaceType.Workspace, client, storage);
|
||||
}
|
||||
|
||||
override push(
|
||||
spaceId: string,
|
||||
docId: string,
|
||||
updates: Buffer[],
|
||||
editorId: string
|
||||
) {
|
||||
const id = new DocID(docId, spaceId);
|
||||
return super.push(spaceId, id.guid, updates, editorId);
|
||||
}
|
||||
|
||||
override get(spaceId: string, docId: string) {
|
||||
const id = new DocID(docId, spaceId);
|
||||
return this.storage.getDoc(spaceId, id.guid);
|
||||
}
|
||||
|
||||
async assertAccessible(
|
||||
spaceId: string,
|
||||
userId: string,
|
||||
permission: Permission = Permission.Read
|
||||
) {
|
||||
if (
|
||||
!(await this.permission.isWorkspaceMember(spaceId, userId, permission))
|
||||
) {
|
||||
throw new SpaceAccessDenied({ spaceId });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class UserspaceSyncAdapter extends SyncSocketAdapter {
|
||||
constructor(client: Socket, storage: DocStorageAdapter) {
|
||||
super(SpaceType.Userspace, client, storage);
|
||||
}
|
||||
|
||||
async assertAccessible(
|
||||
spaceId: string,
|
||||
userId: string,
|
||||
_permission: Permission = Permission.Read
|
||||
) {
|
||||
if (spaceId !== userId) {
|
||||
throw new SpaceAccessDenied({ spaceId });
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,11 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { EventsModule } from './events/events.module';
|
||||
import { DocStorageModule } from '../doc';
|
||||
import { PermissionModule } from '../permission';
|
||||
import { SpaceSyncGateway } from './gateway';
|
||||
|
||||
@Module({
|
||||
imports: [EventsModule],
|
||||
imports: [DocStorageModule, PermissionModule],
|
||||
providers: [SpaceSyncGateway],
|
||||
})
|
||||
export class SyncModule {}
|
||||
|
||||
@@ -18,9 +18,9 @@ import {
|
||||
Throttle,
|
||||
UserNotFound,
|
||||
} from '../../fundamentals';
|
||||
import { CurrentUser } from '../auth/current-user';
|
||||
import { Public } from '../auth/guard';
|
||||
import { sessionUser } from '../auth/service';
|
||||
import { CurrentUser } from '../auth/session';
|
||||
import { Admin } from '../common';
|
||||
import { AvatarStorage } from '../storage';
|
||||
import { validators } from '../utils/validators';
|
||||
|
||||
@@ -56,11 +56,6 @@ export class UserService {
|
||||
|
||||
async createUser(data: CreateUserInput) {
|
||||
validators.assertValidEmail(data.email);
|
||||
const user = await this.findUserByEmail(data.email);
|
||||
|
||||
if (user) {
|
||||
throw new EmailAlreadyUsed();
|
||||
}
|
||||
|
||||
if (data.password) {
|
||||
const config = await this.config.runtime.fetchAll({
|
||||
@@ -77,6 +72,12 @@ export class UserService {
|
||||
}
|
||||
|
||||
async createUser_without_verification(data: CreateUserInput) {
|
||||
const user = await this.findUserByEmail(data.email);
|
||||
|
||||
if (user) {
|
||||
throw new EmailAlreadyUsed();
|
||||
}
|
||||
|
||||
if (data.password) {
|
||||
data.password = await this.crypto.encryptPassword(data.password);
|
||||
}
|
||||
@@ -158,9 +159,7 @@ export class UserService {
|
||||
|
||||
async fulfillUser(
|
||||
email: string,
|
||||
data: Partial<
|
||||
Pick<Prisma.UserCreateInput, 'emailVerifiedAt' | 'registered'>
|
||||
>
|
||||
data: Omit<Partial<Prisma.UserCreateInput>, 'id'>
|
||||
) {
|
||||
const user = await this.findUserByEmail(email);
|
||||
if (!user) {
|
||||
@@ -180,7 +179,6 @@ export class UserService {
|
||||
|
||||
if (Object.keys(data).length) {
|
||||
return await this.prisma.user.update({
|
||||
select: this.defaultUserSelect,
|
||||
where: { id: user.id },
|
||||
data,
|
||||
});
|
||||
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
import type { User } from '@prisma/client';
|
||||
|
||||
import type { Payload } from '../../fundamentals/event/def';
|
||||
import { CurrentUser } from '../auth/current-user';
|
||||
import { type CurrentUser } from '../auth/session';
|
||||
|
||||
@ObjectType()
|
||||
export class UserType implements CurrentUser {
|
||||
|
||||
@@ -21,7 +21,7 @@ export class DocID {
|
||||
static parse(raw: string): DocID | null {
|
||||
try {
|
||||
return new DocID(raw);
|
||||
} catch (e) {
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,11 +12,10 @@ import {
|
||||
InvalidHistoryTimestamp,
|
||||
} from '../../fundamentals';
|
||||
import { CurrentUser, Public } from '../auth';
|
||||
import { DocHistoryManager, DocManager } from '../doc';
|
||||
import { PgWorkspaceDocStorageAdapter } from '../doc';
|
||||
import { Permission, PermissionService, PublicPageMode } from '../permission';
|
||||
import { WorkspaceBlobStorage } from '../storage';
|
||||
import { DocID } from '../utils/doc';
|
||||
import { PermissionService, PublicPageMode } from './permission';
|
||||
import { Permission } from './types';
|
||||
|
||||
@Controller('/api/workspaces')
|
||||
export class WorkspacesController {
|
||||
@@ -24,8 +23,7 @@ export class WorkspacesController {
|
||||
constructor(
|
||||
private readonly storage: WorkspaceBlobStorage,
|
||||
private readonly permission: PermissionService,
|
||||
private readonly docManager: DocManager,
|
||||
private readonly historyManager: DocHistoryManager,
|
||||
private readonly workspace: PgWorkspaceDocStorageAdapter,
|
||||
private readonly prisma: PrismaClient
|
||||
) {}
|
||||
|
||||
@@ -57,7 +55,7 @@ export class WorkspacesController {
|
||||
|
||||
if (!body) {
|
||||
throw new BlobNotFound({
|
||||
workspaceId,
|
||||
spaceId: workspaceId,
|
||||
blobId: name,
|
||||
});
|
||||
}
|
||||
@@ -97,14 +95,14 @@ export class WorkspacesController {
|
||||
throw new AccessDenied();
|
||||
}
|
||||
|
||||
const binResponse = await this.docManager.getBinary(
|
||||
const binResponse = await this.workspace.getDoc(
|
||||
docId.workspace,
|
||||
docId.guid
|
||||
);
|
||||
|
||||
if (!binResponse) {
|
||||
throw new DocNotFound({
|
||||
workspaceId: docId.workspace,
|
||||
spaceId: docId.workspace,
|
||||
docId: docId.guid,
|
||||
});
|
||||
}
|
||||
@@ -126,7 +124,7 @@ export class WorkspacesController {
|
||||
}
|
||||
|
||||
res.setHeader('content-type', 'application/octet-stream');
|
||||
res.send(binResponse.binary);
|
||||
res.send(binResponse.bin);
|
||||
}
|
||||
|
||||
@Get('/:id/docs/:guid/histories/:timestamp')
|
||||
@@ -142,7 +140,7 @@ export class WorkspacesController {
|
||||
let ts;
|
||||
try {
|
||||
ts = new Date(timestamp);
|
||||
} catch (e) {
|
||||
} catch {
|
||||
throw new InvalidHistoryTimestamp({ timestamp });
|
||||
}
|
||||
|
||||
@@ -153,19 +151,19 @@ export class WorkspacesController {
|
||||
Permission.Write
|
||||
);
|
||||
|
||||
const history = await this.historyManager.get(
|
||||
const history = await this.workspace.getDocHistory(
|
||||
docId.workspace,
|
||||
docId.guid,
|
||||
ts
|
||||
ts.getTime()
|
||||
);
|
||||
|
||||
if (history) {
|
||||
res.setHeader('content-type', 'application/octet-stream');
|
||||
res.setHeader('cache-control', 'private, max-age=2592000, immutable');
|
||||
res.send(history.blob);
|
||||
res.send(history.bin);
|
||||
} else {
|
||||
throw new DocHistoryNotFound({
|
||||
workspaceId: docId.workspace,
|
||||
spaceId: docId.workspace,
|
||||
docId: guid,
|
||||
timestamp: ts.getTime(),
|
||||
});
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { DocModule } from '../doc';
|
||||
import { DocStorageModule } from '../doc';
|
||||
import { FeatureModule } from '../features';
|
||||
import { PermissionModule } from '../permission';
|
||||
import { QuotaModule } from '../quota';
|
||||
import { StorageModule } from '../storage';
|
||||
import { UserModule } from '../user';
|
||||
import { WorkspacesController } from './controller';
|
||||
import { WorkspaceManagementResolver } from './management';
|
||||
import { PermissionService } from './permission';
|
||||
import {
|
||||
DocHistoryResolver,
|
||||
PagePermissionResolver,
|
||||
@@ -16,17 +16,22 @@ import {
|
||||
} from './resolvers';
|
||||
|
||||
@Module({
|
||||
imports: [DocModule, FeatureModule, QuotaModule, StorageModule, UserModule],
|
||||
imports: [
|
||||
DocStorageModule,
|
||||
FeatureModule,
|
||||
QuotaModule,
|
||||
StorageModule,
|
||||
UserModule,
|
||||
PermissionModule,
|
||||
],
|
||||
controllers: [WorkspacesController],
|
||||
providers: [
|
||||
WorkspaceResolver,
|
||||
WorkspaceManagementResolver,
|
||||
PermissionService,
|
||||
PagePermissionResolver,
|
||||
DocHistoryResolver,
|
||||
WorkspaceBlobResolver,
|
||||
],
|
||||
exports: [PermissionService],
|
||||
})
|
||||
export class WorkspaceModule {}
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ import { ActionForbidden } from '../../fundamentals';
|
||||
import { CurrentUser } from '../auth';
|
||||
import { Admin } from '../common';
|
||||
import { FeatureManagementService, FeatureType } from '../features';
|
||||
import { PermissionService } from './permission';
|
||||
import { PermissionService } from '../permission';
|
||||
import { WorkspaceType } from './types';
|
||||
|
||||
@Resolver(() => WorkspaceType)
|
||||
@@ -61,7 +61,7 @@ export class WorkspaceManagementResolver {
|
||||
|
||||
const owner = await this.permission.getWorkspaceOwner(workspaceId);
|
||||
const availableFeatures = await this.availableFeatures(user);
|
||||
if (owner.user.id !== user.id || !availableFeatures.includes(feature)) {
|
||||
if (owner.id !== user.id || !availableFeatures.includes(feature)) {
|
||||
throw new ActionForbidden();
|
||||
}
|
||||
|
||||
|
||||
@@ -19,10 +19,10 @@ import {
|
||||
PreventCache,
|
||||
} from '../../../fundamentals';
|
||||
import { CurrentUser } from '../../auth';
|
||||
import { Permission, PermissionService } from '../../permission';
|
||||
import { QuotaManagementService } from '../../quota';
|
||||
import { WorkspaceBlobStorage } from '../../storage';
|
||||
import { PermissionService } from '../permission';
|
||||
import { Permission, WorkspaceBlobSizes, WorkspaceType } from '../types';
|
||||
import { WorkspaceBlobSizes, WorkspaceType } from '../types';
|
||||
|
||||
@UseGuards(CloudThrottlerGuard)
|
||||
@Resolver(() => WorkspaceType)
|
||||
|
||||
@@ -12,10 +12,11 @@ import {
|
||||
import type { SnapshotHistory } from '@prisma/client';
|
||||
|
||||
import { CurrentUser } from '../../auth';
|
||||
import { DocHistoryManager } from '../../doc';
|
||||
import { PgWorkspaceDocStorageAdapter } from '../../doc';
|
||||
import { Permission, PermissionService } from '../../permission';
|
||||
import { DocID } from '../../utils/doc';
|
||||
import { PermissionService } from '../permission';
|
||||
import { Permission, WorkspaceType } from '../types';
|
||||
import { WorkspaceType } from '../types';
|
||||
import { EditorType } from './workspace';
|
||||
|
||||
@ObjectType()
|
||||
class DocHistoryType implements Partial<SnapshotHistory> {
|
||||
@@ -27,12 +28,15 @@ class DocHistoryType implements Partial<SnapshotHistory> {
|
||||
|
||||
@Field(() => GraphQLISODateTime)
|
||||
timestamp!: Date;
|
||||
|
||||
@Field(() => EditorType, { nullable: true })
|
||||
editor!: EditorType | null;
|
||||
}
|
||||
|
||||
@Resolver(() => WorkspaceType)
|
||||
export class DocHistoryResolver {
|
||||
constructor(
|
||||
private readonly historyManager: DocHistoryManager,
|
||||
private readonly workspace: PgWorkspaceDocStorageAdapter,
|
||||
private readonly permission: PermissionService
|
||||
) {}
|
||||
|
||||
@@ -47,17 +51,20 @@ export class DocHistoryResolver {
|
||||
): Promise<DocHistoryType[]> {
|
||||
const docId = new DocID(guid, workspace.id);
|
||||
|
||||
return this.historyManager
|
||||
.list(workspace.id, docId.guid, timestamp, take)
|
||||
.then(rows =>
|
||||
rows.map(({ timestamp }) => {
|
||||
return {
|
||||
workspaceId: workspace.id,
|
||||
id: docId.guid,
|
||||
timestamp,
|
||||
};
|
||||
})
|
||||
);
|
||||
const histories = await this.workspace.listDocHistories(
|
||||
workspace.id,
|
||||
docId.guid,
|
||||
{ before: timestamp.getTime(), limit: take }
|
||||
);
|
||||
|
||||
return histories.map(history => {
|
||||
return {
|
||||
workspaceId: workspace.id,
|
||||
id: docId.guid,
|
||||
timestamp: new Date(history.timestamp),
|
||||
editor: history.editor,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
@Mutation(() => Date)
|
||||
@@ -76,6 +83,13 @@ export class DocHistoryResolver {
|
||||
Permission.Write
|
||||
);
|
||||
|
||||
return this.historyManager.recover(docId.workspace, docId.guid, timestamp);
|
||||
await this.workspace.rollbackDoc(
|
||||
docId.workspace,
|
||||
docId.guid,
|
||||
timestamp.getTime(),
|
||||
user.id
|
||||
);
|
||||
|
||||
return timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,9 +17,13 @@ import {
|
||||
PageIsNotPublic,
|
||||
} from '../../../fundamentals';
|
||||
import { CurrentUser } from '../../auth';
|
||||
import {
|
||||
Permission,
|
||||
PermissionService,
|
||||
PublicPageMode,
|
||||
} from '../../permission';
|
||||
import { DocID } from '../../utils/doc';
|
||||
import { PermissionService, PublicPageMode } from '../permission';
|
||||
import { Permission, WorkspaceType } from '../types';
|
||||
import { WorkspaceType } from '../types';
|
||||
|
||||
registerEnumType(PublicPageMode, {
|
||||
name: 'PublicPageMode',
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
import {
|
||||
Args,
|
||||
Field,
|
||||
Int,
|
||||
Mutation,
|
||||
ObjectType,
|
||||
Parent,
|
||||
Query,
|
||||
ResolveField,
|
||||
@@ -15,33 +17,57 @@ import { applyUpdate, Doc } from 'yjs';
|
||||
|
||||
import type { FileUpload } from '../../../fundamentals';
|
||||
import {
|
||||
CantChangeWorkspaceOwner,
|
||||
CantChangeSpaceOwner,
|
||||
DocNotFound,
|
||||
EventEmitter,
|
||||
InternalServerError,
|
||||
MailService,
|
||||
MemberQuotaExceeded,
|
||||
MutexService,
|
||||
RequestMutex,
|
||||
SpaceAccessDenied,
|
||||
SpaceNotFound,
|
||||
Throttle,
|
||||
TooManyRequest,
|
||||
UserNotFound,
|
||||
WorkspaceAccessDenied,
|
||||
WorkspaceNotFound,
|
||||
WorkspaceOwnerNotFound,
|
||||
} from '../../../fundamentals';
|
||||
import { CurrentUser, Public } from '../../auth';
|
||||
import type { Editor } from '../../doc';
|
||||
import { Permission, PermissionService } from '../../permission';
|
||||
import { QuotaManagementService, QuotaQueryType } from '../../quota';
|
||||
import { WorkspaceBlobStorage } from '../../storage';
|
||||
import { UserService, UserType } from '../../user';
|
||||
import { PermissionService } from '../permission';
|
||||
import {
|
||||
InvitationType,
|
||||
InviteUserType,
|
||||
Permission,
|
||||
UpdateWorkspaceInput,
|
||||
WorkspaceType,
|
||||
} from '../types';
|
||||
import { defaultWorkspaceAvatar } from '../utils';
|
||||
|
||||
@ObjectType()
|
||||
export class EditorType implements Partial<Editor> {
|
||||
@Field()
|
||||
name!: string;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
avatarUrl!: string | null;
|
||||
}
|
||||
|
||||
@ObjectType()
|
||||
class WorkspacePageMeta {
|
||||
@Field(() => Date)
|
||||
createdAt!: Date;
|
||||
|
||||
@Field(() => Date)
|
||||
updatedAt!: Date;
|
||||
|
||||
@Field(() => EditorType, { nullable: true })
|
||||
createdBy!: EditorType | null;
|
||||
|
||||
@Field(() => EditorType, { nullable: true })
|
||||
updatedBy!: EditorType | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Workspace resolver
|
||||
* Public apis rate limit: 10 req/m
|
||||
@@ -59,7 +85,7 @@ export class WorkspaceResolver {
|
||||
private readonly users: UserService,
|
||||
private readonly event: EventEmitter,
|
||||
private readonly blobStorage: WorkspaceBlobStorage,
|
||||
private readonly mutex: MutexService
|
||||
private readonly mutex: RequestMutex
|
||||
) {}
|
||||
|
||||
@ResolveField(() => Permission, {
|
||||
@@ -78,7 +104,7 @@ export class WorkspaceResolver {
|
||||
const permission = await this.permissions.get(workspace.id, user.id);
|
||||
|
||||
if (!permission) {
|
||||
throw new WorkspaceAccessDenied({ workspaceId: workspace.id });
|
||||
throw new SpaceAccessDenied({ spaceId: workspace.id });
|
||||
}
|
||||
|
||||
return permission;
|
||||
@@ -89,11 +115,22 @@ export class WorkspaceResolver {
|
||||
complexity: 2,
|
||||
})
|
||||
memberCount(@Parent() workspace: WorkspaceType) {
|
||||
return this.prisma.workspaceUserPermission.count({
|
||||
where: {
|
||||
workspaceId: workspace.id,
|
||||
},
|
||||
});
|
||||
return this.permissions.getWorkspaceMemberCount(workspace.id);
|
||||
}
|
||||
|
||||
@ResolveField(() => Boolean, {
|
||||
description: 'is current workspace initialized',
|
||||
complexity: 2,
|
||||
})
|
||||
async initialized(@Parent() workspace: WorkspaceType) {
|
||||
return this.prisma.snapshot
|
||||
.count({
|
||||
where: {
|
||||
id: workspace.id,
|
||||
workspaceId: workspace.id,
|
||||
},
|
||||
})
|
||||
.then(count => count > 0);
|
||||
}
|
||||
|
||||
@ResolveField(() => UserType, {
|
||||
@@ -101,9 +138,7 @@ export class WorkspaceResolver {
|
||||
complexity: 2,
|
||||
})
|
||||
async owner(@Parent() workspace: WorkspaceType) {
|
||||
const data = await this.permissions.getWorkspaceOwner(workspace.id);
|
||||
|
||||
return data.user;
|
||||
return this.permissions.getWorkspaceOwner(workspace.id);
|
||||
}
|
||||
|
||||
@ResolveField(() => [InviteUserType], {
|
||||
@@ -144,6 +179,35 @@ export class WorkspaceResolver {
|
||||
}));
|
||||
}
|
||||
|
||||
@ResolveField(() => WorkspacePageMeta, {
|
||||
description: 'Cloud page metadata of workspace',
|
||||
complexity: 2,
|
||||
})
|
||||
async pageMeta(
|
||||
@Parent() workspace: WorkspaceType,
|
||||
@Args('pageId') pageId: string
|
||||
) {
|
||||
const metadata = await this.prisma.snapshot.findFirst({
|
||||
where: { workspaceId: workspace.id, id: pageId },
|
||||
select: {
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
createdByUser: { select: { name: true, avatarUrl: true } },
|
||||
updatedByUser: { select: { name: true, avatarUrl: true } },
|
||||
},
|
||||
});
|
||||
if (!metadata) {
|
||||
throw new DocNotFound({ spaceId: workspace.id, docId: pageId });
|
||||
}
|
||||
|
||||
return {
|
||||
createdAt: metadata.createdAt,
|
||||
updatedAt: metadata.updatedAt,
|
||||
createdBy: metadata.createdByUser || null,
|
||||
updatedBy: metadata.updatedByUser || null,
|
||||
};
|
||||
}
|
||||
|
||||
@ResolveField(() => QuotaQueryType, {
|
||||
name: 'quota',
|
||||
description: 'quota of workspace',
|
||||
@@ -197,7 +261,7 @@ export class WorkspaceResolver {
|
||||
const workspace = await this.prisma.workspace.findUnique({ where: { id } });
|
||||
|
||||
if (!workspace) {
|
||||
throw new WorkspaceNotFound({ workspaceId: id });
|
||||
throw new SpaceNotFound({ spaceId: id });
|
||||
}
|
||||
|
||||
return workspace;
|
||||
@@ -308,7 +372,7 @@ export class WorkspaceResolver {
|
||||
);
|
||||
|
||||
if (permission === Permission.Owner) {
|
||||
throw new CantChangeWorkspaceOwner();
|
||||
throw new CantChangeSpaceOwner();
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -320,13 +384,8 @@ export class WorkspaceResolver {
|
||||
}
|
||||
|
||||
// member limit check
|
||||
const [memberCount, quota] = await Promise.all([
|
||||
this.prisma.workspaceUserPermission.count({
|
||||
where: { workspaceId },
|
||||
}),
|
||||
this.quota.getWorkspaceUsage(workspaceId),
|
||||
]);
|
||||
if (memberCount >= quota.memberLimit) {
|
||||
const quota = await this.quota.getWorkspaceUsage(workspaceId);
|
||||
if (quota.memberCount >= quota.memberLimit) {
|
||||
return new MemberQuotaExceeded();
|
||||
}
|
||||
|
||||
@@ -449,7 +508,7 @@ export class WorkspaceResolver {
|
||||
avatar: avatar || defaultWorkspaceAvatar,
|
||||
id: workspaceId,
|
||||
},
|
||||
user: owner.user,
|
||||
user: owner,
|
||||
invitee: invitee.user,
|
||||
};
|
||||
}
|
||||
@@ -507,12 +566,8 @@ export class WorkspaceResolver {
|
||||
|
||||
const owner = await this.permissions.getWorkspaceOwner(workspaceId);
|
||||
|
||||
if (!owner.user) {
|
||||
throw new WorkspaceOwnerNotFound({ workspaceId: workspaceId });
|
||||
}
|
||||
|
||||
if (sendLeaveMail) {
|
||||
await this.mailer.sendLeaveWorkspaceEmail(owner.user.email, {
|
||||
await this.mailer.sendLeaveWorkspaceEmail(owner.email, {
|
||||
workspaceName,
|
||||
inviteeName: user.name,
|
||||
});
|
||||
|
||||
@@ -11,15 +11,9 @@ import {
|
||||
import type { Workspace } from '@prisma/client';
|
||||
import { SafeIntResolver } from 'graphql-scalars';
|
||||
|
||||
import { Permission } from '../permission';
|
||||
import { UserType } from '../user/types';
|
||||
|
||||
export enum Permission {
|
||||
Read = 0,
|
||||
Write = 1,
|
||||
Admin = 10,
|
||||
Owner = 99,
|
||||
}
|
||||
|
||||
registerEnumType(Permission, {
|
||||
name: 'Permission',
|
||||
description: 'User permission in workspace',
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
import Keyv from 'keyv';
|
||||
import Keyv, { KeyvOptions } from 'keyv';
|
||||
|
||||
import type { Cache, CacheSetOptions } from './def';
|
||||
|
||||
export class LocalCache implements Cache {
|
||||
private readonly kv: Keyv;
|
||||
|
||||
constructor(opts: Keyv.Options<any> = {}) {
|
||||
constructor(opts: KeyvOptions = {}) {
|
||||
this.kv = new Keyv(opts);
|
||||
}
|
||||
|
||||
// standard operation
|
||||
async get<T = unknown>(key: string): Promise<T | undefined> {
|
||||
return this.kv.get(key).catch(() => undefined);
|
||||
return this.kv.get<T>(key).catch(() => undefined);
|
||||
}
|
||||
|
||||
async set<T = unknown>(
|
||||
@@ -74,14 +74,14 @@ export class LocalCache implements Cache {
|
||||
|
||||
// list operations
|
||||
private async getArray<T = unknown>(key: string) {
|
||||
const raw = await this.kv.get(key, { raw: true });
|
||||
const raw = await this.kv.get<T[]>(key, { raw: true });
|
||||
if (raw && !Array.isArray(raw.value)) {
|
||||
throw new Error(
|
||||
`Expect an Array keyed by ${key}, but found ${raw.value}`
|
||||
);
|
||||
}
|
||||
|
||||
return raw as Keyv.DeserializedData<T[]>;
|
||||
return raw;
|
||||
}
|
||||
|
||||
private async setArray<T = unknown>(
|
||||
@@ -97,7 +97,9 @@ export class LocalCache implements Cache {
|
||||
let ttl: number | undefined = undefined;
|
||||
const raw = await this.getArray(key);
|
||||
if (raw) {
|
||||
list = raw.value;
|
||||
if (raw.value) {
|
||||
list = raw.value;
|
||||
}
|
||||
if (raw.expires) {
|
||||
ttl = raw.expires - Date.now();
|
||||
}
|
||||
@@ -112,7 +114,9 @@ export class LocalCache implements Cache {
|
||||
let ttl: number | undefined = undefined;
|
||||
const raw = await this.getArray(key);
|
||||
if (raw) {
|
||||
list = raw.value;
|
||||
if (raw.value) {
|
||||
list = raw.value;
|
||||
}
|
||||
if (raw.expires) {
|
||||
ttl = raw.expires - Date.now();
|
||||
}
|
||||
@@ -123,7 +127,7 @@ export class LocalCache implements Cache {
|
||||
}
|
||||
|
||||
async len(key: string): Promise<number> {
|
||||
return this.getArray(key).then(v => v?.value.length ?? 0);
|
||||
return this.getArray<any[]>(key).then(v => v?.value?.length ?? 0);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -147,7 +151,7 @@ export class LocalCache implements Cache {
|
||||
|
||||
private async trim<T = unknown>(key: string, start: number, end: number) {
|
||||
const raw = await this.getArray<T>(key);
|
||||
if (raw) {
|
||||
if (raw && raw.value) {
|
||||
start = (raw.value.length + start) % raw.value.length;
|
||||
// make negative end index work, and end indice is inclusive
|
||||
end = ((raw.value.length + end) % raw.value.length) + 1;
|
||||
@@ -173,7 +177,7 @@ export class LocalCache implements Cache {
|
||||
|
||||
// map operations
|
||||
private async getMap<T = unknown>(map: string) {
|
||||
const raw = await this.kv.get(map, { raw: true });
|
||||
const raw = await this.kv.get<Record<string, T>>(map, { raw: true });
|
||||
|
||||
if (raw) {
|
||||
if (typeof raw.value !== 'object') {
|
||||
@@ -187,7 +191,7 @@ export class LocalCache implements Cache {
|
||||
}
|
||||
}
|
||||
|
||||
return raw as Keyv.DeserializedData<Record<string, T>>;
|
||||
return raw;
|
||||
}
|
||||
|
||||
private async setMap<T = unknown>(
|
||||
@@ -263,7 +267,7 @@ export class LocalCache implements Cache {
|
||||
|
||||
async mapKeys(map: string): Promise<string[]> {
|
||||
const raw = await this.getMap(map);
|
||||
if (raw) {
|
||||
if (raw?.value) {
|
||||
return Object.keys(raw.value);
|
||||
}
|
||||
|
||||
@@ -277,6 +281,6 @@ export class LocalCache implements Cache {
|
||||
|
||||
async mapLen(map: string): Promise<number> {
|
||||
const raw = await this.getMap(map);
|
||||
return raw ? Object.keys(raw.value).length : 0;
|
||||
return raw?.value ? Object.keys(raw.value).length : 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ export interface PreDefinedAFFiNEConfig {
|
||||
ENV_MAP: Record<string, ConfigPaths | [ConfigPaths, EnvConfigType?]>;
|
||||
serverId: string;
|
||||
serverName: string;
|
||||
readonly projectRoot: string;
|
||||
readonly AFFINE_ENV: AFFINE_ENV;
|
||||
readonly NODE_ENV: NODE_ENV;
|
||||
readonly version: string;
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
import { resolve } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
import pkg from '../../../package.json' assert { type: 'json' };
|
||||
import {
|
||||
AFFINE_ENV,
|
||||
@@ -62,6 +65,7 @@ function getPredefinedAFFiNEConfig(): PreDefinedAFFiNEConfig {
|
||||
affine,
|
||||
node,
|
||||
deploy: !node.dev && !node.test,
|
||||
projectRoot: resolve(fileURLToPath(import.meta.url), '../../../../'),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -198,6 +198,10 @@ export const USER_FRIENDLY_ERRORS = {
|
||||
type: 'too_many_requests',
|
||||
message: 'Too many requests.',
|
||||
},
|
||||
not_found: {
|
||||
type: 'resource_not_found',
|
||||
message: 'Resource not found.',
|
||||
},
|
||||
|
||||
// User Errors
|
||||
user_not_found: {
|
||||
@@ -279,6 +283,10 @@ export const USER_FRIENDLY_ERRORS = {
|
||||
type: 'invalid_input',
|
||||
message: 'An invalid email token provided.',
|
||||
},
|
||||
link_expired: {
|
||||
type: 'bad_request',
|
||||
message: 'The link has expired.',
|
||||
},
|
||||
|
||||
// Authentication & Permission Errors
|
||||
authentication_required: {
|
||||
@@ -298,45 +306,49 @@ export const USER_FRIENDLY_ERRORS = {
|
||||
message: 'You must verify your email before accessing this resource.',
|
||||
},
|
||||
|
||||
// Workspace & Doc & Sync errors
|
||||
workspace_not_found: {
|
||||
// Workspace & Userspace & Doc & Sync errors
|
||||
space_not_found: {
|
||||
type: 'resource_not_found',
|
||||
args: { workspaceId: 'string' },
|
||||
message: ({ workspaceId }) => `Workspace ${workspaceId} not found.`,
|
||||
args: { spaceId: 'string' },
|
||||
message: ({ spaceId }) => `Space ${spaceId} not found.`,
|
||||
},
|
||||
not_in_workspace: {
|
||||
not_in_space: {
|
||||
type: 'action_forbidden',
|
||||
args: { workspaceId: 'string' },
|
||||
message: ({ workspaceId }) =>
|
||||
`You should join in workspace ${workspaceId} before broadcasting messages.`,
|
||||
args: { spaceId: 'string' },
|
||||
message: ({ spaceId }) =>
|
||||
`You should join in Space ${spaceId} before broadcasting messages.`,
|
||||
},
|
||||
workspace_access_denied: {
|
||||
already_in_space: {
|
||||
type: 'action_forbidden',
|
||||
args: { spaceId: 'string' },
|
||||
message: ({ spaceId }) => `You have already joined in Space ${spaceId}.`,
|
||||
},
|
||||
space_access_denied: {
|
||||
type: 'no_permission',
|
||||
args: { workspaceId: 'string' },
|
||||
message: ({ workspaceId }) =>
|
||||
`You do not have permission to access workspace ${workspaceId}.`,
|
||||
args: { spaceId: 'string' },
|
||||
message: ({ spaceId }) =>
|
||||
`You do not have permission to access Space ${spaceId}.`,
|
||||
},
|
||||
workspace_owner_not_found: {
|
||||
space_owner_not_found: {
|
||||
type: 'internal_server_error',
|
||||
args: { workspaceId: 'string' },
|
||||
message: ({ workspaceId }) =>
|
||||
`Owner of workspace ${workspaceId} not found.`,
|
||||
args: { spaceId: 'string' },
|
||||
message: ({ spaceId }) => `Owner of Space ${spaceId} not found.`,
|
||||
},
|
||||
cant_change_workspace_owner: {
|
||||
cant_change_space_owner: {
|
||||
type: 'action_forbidden',
|
||||
message: 'You are not allowed to change the owner of a workspace.',
|
||||
message: 'You are not allowed to change the owner of a Space.',
|
||||
},
|
||||
doc_not_found: {
|
||||
type: 'resource_not_found',
|
||||
args: { workspaceId: 'string', docId: 'string' },
|
||||
message: ({ workspaceId, docId }) =>
|
||||
`Doc ${docId} under workspace ${workspaceId} not found.`,
|
||||
args: { spaceId: 'string', docId: 'string' },
|
||||
message: ({ spaceId, docId }) =>
|
||||
`Doc ${docId} under Space ${spaceId} not found.`,
|
||||
},
|
||||
doc_access_denied: {
|
||||
type: 'no_permission',
|
||||
args: { workspaceId: 'string', docId: 'string' },
|
||||
message: ({ workspaceId, docId }) =>
|
||||
`You do not have permission to access doc ${docId} under workspace ${workspaceId}.`,
|
||||
args: { spaceId: 'string', docId: 'string' },
|
||||
message: ({ spaceId, docId }) =>
|
||||
`You do not have permission to access doc ${docId} under Space ${spaceId}.`,
|
||||
},
|
||||
version_rejected: {
|
||||
type: 'action_forbidden',
|
||||
@@ -351,28 +363,36 @@ export const USER_FRIENDLY_ERRORS = {
|
||||
},
|
||||
doc_history_not_found: {
|
||||
type: 'resource_not_found',
|
||||
args: { workspaceId: 'string', docId: 'string', timestamp: 'number' },
|
||||
message: ({ workspaceId, docId, timestamp }) =>
|
||||
`History of ${docId} at ${timestamp} under workspace ${workspaceId}.`,
|
||||
args: { spaceId: 'string', docId: 'string', timestamp: 'number' },
|
||||
message: ({ spaceId, docId, timestamp }) =>
|
||||
`History of ${docId} at ${timestamp} under Space ${spaceId}.`,
|
||||
},
|
||||
blob_not_found: {
|
||||
type: 'resource_not_found',
|
||||
args: { workspaceId: 'string', blobId: 'string' },
|
||||
message: ({ workspaceId, blobId }) =>
|
||||
`Blob ${blobId} not found in workspace ${workspaceId}.`,
|
||||
args: { spaceId: 'string', blobId: 'string' },
|
||||
message: ({ spaceId, blobId }) =>
|
||||
`Blob ${blobId} not found in Space ${spaceId}.`,
|
||||
},
|
||||
expect_to_publish_page: {
|
||||
type: 'invalid_input',
|
||||
message: 'Expected to publish a page, not a workspace.',
|
||||
message: 'Expected to publish a page, not a Space.',
|
||||
},
|
||||
expect_to_revoke_public_page: {
|
||||
type: 'invalid_input',
|
||||
message: 'Expected to revoke a public page, not a workspace.',
|
||||
message: 'Expected to revoke a public page, not a Space.',
|
||||
},
|
||||
page_is_not_public: {
|
||||
type: 'bad_request',
|
||||
message: 'Page is not public.',
|
||||
},
|
||||
failed_to_save_updates: {
|
||||
type: 'internal_server_error',
|
||||
message: 'Failed to store doc updates.',
|
||||
},
|
||||
failed_to_upsert_snapshot: {
|
||||
type: 'internal_server_error',
|
||||
message: 'Failed to store doc snapshot.',
|
||||
},
|
||||
|
||||
// Subscription Errors
|
||||
failed_to_checkout: {
|
||||
@@ -506,4 +526,10 @@ export const USER_FRIENDLY_ERRORS = {
|
||||
type: 'action_forbidden',
|
||||
message: 'Cannot delete own account.',
|
||||
},
|
||||
|
||||
// captcha errors
|
||||
captcha_verification_failed: {
|
||||
type: 'bad_request',
|
||||
message: 'Captcha verification failed.',
|
||||
},
|
||||
} satisfies Record<string, UserFriendlyErrorOptions>;
|
||||
|
||||
@@ -16,6 +16,12 @@ export class TooManyRequest extends UserFriendlyError {
|
||||
}
|
||||
}
|
||||
|
||||
export class NotFound extends UserFriendlyError {
|
||||
constructor(message?: string) {
|
||||
super('resource_not_found', 'not_found', message);
|
||||
}
|
||||
}
|
||||
|
||||
export class UserNotFound extends UserFriendlyError {
|
||||
constructor(message?: string) {
|
||||
super('resource_not_found', 'user_not_found', message);
|
||||
@@ -137,6 +143,12 @@ export class InvalidEmailToken extends UserFriendlyError {
|
||||
}
|
||||
}
|
||||
|
||||
export class LinkExpired extends UserFriendlyError {
|
||||
constructor(message?: string) {
|
||||
super('bad_request', 'link_expired', message);
|
||||
}
|
||||
}
|
||||
|
||||
export class AuthenticationRequired extends UserFriendlyError {
|
||||
constructor(message?: string) {
|
||||
super('authentication_required', 'authentication_required', message);
|
||||
@@ -161,54 +173,64 @@ export class EmailVerificationRequired extends UserFriendlyError {
|
||||
}
|
||||
}
|
||||
@ObjectType()
|
||||
class WorkspaceNotFoundDataType {
|
||||
@Field() workspaceId!: string
|
||||
class SpaceNotFoundDataType {
|
||||
@Field() spaceId!: string
|
||||
}
|
||||
|
||||
export class WorkspaceNotFound extends UserFriendlyError {
|
||||
constructor(args: WorkspaceNotFoundDataType, message?: string | ((args: WorkspaceNotFoundDataType) => string)) {
|
||||
super('resource_not_found', 'workspace_not_found', message, args);
|
||||
export class SpaceNotFound extends UserFriendlyError {
|
||||
constructor(args: SpaceNotFoundDataType, message?: string | ((args: SpaceNotFoundDataType) => string)) {
|
||||
super('resource_not_found', 'space_not_found', message, args);
|
||||
}
|
||||
}
|
||||
@ObjectType()
|
||||
class NotInWorkspaceDataType {
|
||||
@Field() workspaceId!: string
|
||||
class NotInSpaceDataType {
|
||||
@Field() spaceId!: string
|
||||
}
|
||||
|
||||
export class NotInWorkspace extends UserFriendlyError {
|
||||
constructor(args: NotInWorkspaceDataType, message?: string | ((args: NotInWorkspaceDataType) => string)) {
|
||||
super('action_forbidden', 'not_in_workspace', message, args);
|
||||
export class NotInSpace extends UserFriendlyError {
|
||||
constructor(args: NotInSpaceDataType, message?: string | ((args: NotInSpaceDataType) => string)) {
|
||||
super('action_forbidden', 'not_in_space', message, args);
|
||||
}
|
||||
}
|
||||
@ObjectType()
|
||||
class WorkspaceAccessDeniedDataType {
|
||||
@Field() workspaceId!: string
|
||||
class AlreadyInSpaceDataType {
|
||||
@Field() spaceId!: string
|
||||
}
|
||||
|
||||
export class WorkspaceAccessDenied extends UserFriendlyError {
|
||||
constructor(args: WorkspaceAccessDeniedDataType, message?: string | ((args: WorkspaceAccessDeniedDataType) => string)) {
|
||||
super('no_permission', 'workspace_access_denied', message, args);
|
||||
export class AlreadyInSpace extends UserFriendlyError {
|
||||
constructor(args: AlreadyInSpaceDataType, message?: string | ((args: AlreadyInSpaceDataType) => string)) {
|
||||
super('action_forbidden', 'already_in_space', message, args);
|
||||
}
|
||||
}
|
||||
@ObjectType()
|
||||
class WorkspaceOwnerNotFoundDataType {
|
||||
@Field() workspaceId!: string
|
||||
class SpaceAccessDeniedDataType {
|
||||
@Field() spaceId!: string
|
||||
}
|
||||
|
||||
export class WorkspaceOwnerNotFound extends UserFriendlyError {
|
||||
constructor(args: WorkspaceOwnerNotFoundDataType, message?: string | ((args: WorkspaceOwnerNotFoundDataType) => string)) {
|
||||
super('internal_server_error', 'workspace_owner_not_found', message, args);
|
||||
export class SpaceAccessDenied extends UserFriendlyError {
|
||||
constructor(args: SpaceAccessDeniedDataType, message?: string | ((args: SpaceAccessDeniedDataType) => string)) {
|
||||
super('no_permission', 'space_access_denied', message, args);
|
||||
}
|
||||
}
|
||||
@ObjectType()
|
||||
class SpaceOwnerNotFoundDataType {
|
||||
@Field() spaceId!: string
|
||||
}
|
||||
|
||||
export class SpaceOwnerNotFound extends UserFriendlyError {
|
||||
constructor(args: SpaceOwnerNotFoundDataType, message?: string | ((args: SpaceOwnerNotFoundDataType) => string)) {
|
||||
super('internal_server_error', 'space_owner_not_found', message, args);
|
||||
}
|
||||
}
|
||||
|
||||
export class CantChangeWorkspaceOwner extends UserFriendlyError {
|
||||
export class CantChangeSpaceOwner extends UserFriendlyError {
|
||||
constructor(message?: string) {
|
||||
super('action_forbidden', 'cant_change_workspace_owner', message);
|
||||
super('action_forbidden', 'cant_change_space_owner', message);
|
||||
}
|
||||
}
|
||||
@ObjectType()
|
||||
class DocNotFoundDataType {
|
||||
@Field() workspaceId!: string
|
||||
@Field() spaceId!: string
|
||||
@Field() docId!: string
|
||||
}
|
||||
|
||||
@@ -219,7 +241,7 @@ export class DocNotFound extends UserFriendlyError {
|
||||
}
|
||||
@ObjectType()
|
||||
class DocAccessDeniedDataType {
|
||||
@Field() workspaceId!: string
|
||||
@Field() spaceId!: string
|
||||
@Field() docId!: string
|
||||
}
|
||||
|
||||
@@ -251,7 +273,7 @@ export class InvalidHistoryTimestamp extends UserFriendlyError {
|
||||
}
|
||||
@ObjectType()
|
||||
class DocHistoryNotFoundDataType {
|
||||
@Field() workspaceId!: string
|
||||
@Field() spaceId!: string
|
||||
@Field() docId!: string
|
||||
@Field() timestamp!: number
|
||||
}
|
||||
@@ -263,7 +285,7 @@ export class DocHistoryNotFound extends UserFriendlyError {
|
||||
}
|
||||
@ObjectType()
|
||||
class BlobNotFoundDataType {
|
||||
@Field() workspaceId!: string
|
||||
@Field() spaceId!: string
|
||||
@Field() blobId!: string
|
||||
}
|
||||
|
||||
@@ -291,6 +313,18 @@ export class PageIsNotPublic extends UserFriendlyError {
|
||||
}
|
||||
}
|
||||
|
||||
export class FailedToSaveUpdates extends UserFriendlyError {
|
||||
constructor(message?: string) {
|
||||
super('internal_server_error', 'failed_to_save_updates', message);
|
||||
}
|
||||
}
|
||||
|
||||
export class FailedToUpsertSnapshot extends UserFriendlyError {
|
||||
constructor(message?: string) {
|
||||
super('internal_server_error', 'failed_to_upsert_snapshot', message);
|
||||
}
|
||||
}
|
||||
|
||||
export class FailedToCheckout extends UserFriendlyError {
|
||||
constructor(message?: string) {
|
||||
super('internal_server_error', 'failed_to_checkout', message);
|
||||
@@ -499,9 +533,16 @@ export class CannotDeleteOwnAccount extends UserFriendlyError {
|
||||
super('action_forbidden', 'cannot_delete_own_account', message);
|
||||
}
|
||||
}
|
||||
|
||||
export class CaptchaVerificationFailed extends UserFriendlyError {
|
||||
constructor(message?: string) {
|
||||
super('bad_request', 'captcha_verification_failed', message);
|
||||
}
|
||||
}
|
||||
export enum ErrorNames {
|
||||
INTERNAL_SERVER_ERROR,
|
||||
TOO_MANY_REQUEST,
|
||||
NOT_FOUND,
|
||||
USER_NOT_FOUND,
|
||||
USER_AVATAR_NOT_FOUND,
|
||||
EMAIL_ALREADY_USED,
|
||||
@@ -520,15 +561,17 @@ export enum ErrorNames {
|
||||
SIGN_UP_FORBIDDEN,
|
||||
EMAIL_TOKEN_NOT_FOUND,
|
||||
INVALID_EMAIL_TOKEN,
|
||||
LINK_EXPIRED,
|
||||
AUTHENTICATION_REQUIRED,
|
||||
ACTION_FORBIDDEN,
|
||||
ACCESS_DENIED,
|
||||
EMAIL_VERIFICATION_REQUIRED,
|
||||
WORKSPACE_NOT_FOUND,
|
||||
NOT_IN_WORKSPACE,
|
||||
WORKSPACE_ACCESS_DENIED,
|
||||
WORKSPACE_OWNER_NOT_FOUND,
|
||||
CANT_CHANGE_WORKSPACE_OWNER,
|
||||
SPACE_NOT_FOUND,
|
||||
NOT_IN_SPACE,
|
||||
ALREADY_IN_SPACE,
|
||||
SPACE_ACCESS_DENIED,
|
||||
SPACE_OWNER_NOT_FOUND,
|
||||
CANT_CHANGE_SPACE_OWNER,
|
||||
DOC_NOT_FOUND,
|
||||
DOC_ACCESS_DENIED,
|
||||
VERSION_REJECTED,
|
||||
@@ -538,6 +581,8 @@ export enum ErrorNames {
|
||||
EXPECT_TO_PUBLISH_PAGE,
|
||||
EXPECT_TO_REVOKE_PUBLIC_PAGE,
|
||||
PAGE_IS_NOT_PUBLIC,
|
||||
FAILED_TO_SAVE_UPDATES,
|
||||
FAILED_TO_UPSERT_SNAPSHOT,
|
||||
FAILED_TO_CHECKOUT,
|
||||
SUBSCRIPTION_ALREADY_EXISTS,
|
||||
SUBSCRIPTION_NOT_EXISTS,
|
||||
@@ -565,7 +610,8 @@ export enum ErrorNames {
|
||||
INVALID_RUNTIME_CONFIG_TYPE,
|
||||
MAILER_SERVICE_IS_NOT_CONFIGURED,
|
||||
CANNOT_DELETE_ALL_ADMIN_ACCOUNT,
|
||||
CANNOT_DELETE_OWN_ACCOUNT
|
||||
CANNOT_DELETE_OWN_ACCOUNT,
|
||||
CAPTCHA_VERIFICATION_FAILED
|
||||
}
|
||||
registerEnumType(ErrorNames, {
|
||||
name: 'ErrorNames'
|
||||
@@ -574,5 +620,5 @@ registerEnumType(ErrorNames, {
|
||||
export const ErrorDataUnionType = createUnionType({
|
||||
name: 'ErrorDataUnion',
|
||||
types: () =>
|
||||
[UnknownOauthProviderDataType, MissingOauthQueryParameterDataType, InvalidPasswordLengthDataType, WorkspaceNotFoundDataType, NotInWorkspaceDataType, WorkspaceAccessDeniedDataType, WorkspaceOwnerNotFoundDataType, DocNotFoundDataType, DocAccessDeniedDataType, VersionRejectedDataType, InvalidHistoryTimestampDataType, DocHistoryNotFoundDataType, BlobNotFoundDataType, SubscriptionAlreadyExistsDataType, SubscriptionNotExistsDataType, SameSubscriptionRecurringDataType, SubscriptionPlanNotFoundDataType, CopilotMessageNotFoundDataType, CopilotPromptNotFoundDataType, CopilotProviderSideErrorDataType, RuntimeConfigNotFoundDataType, InvalidRuntimeConfigTypeDataType] as const,
|
||||
[UnknownOauthProviderDataType, MissingOauthQueryParameterDataType, InvalidPasswordLengthDataType, SpaceNotFoundDataType, NotInSpaceDataType, AlreadyInSpaceDataType, SpaceAccessDeniedDataType, SpaceOwnerNotFoundDataType, DocNotFoundDataType, DocAccessDeniedDataType, VersionRejectedDataType, InvalidHistoryTimestampDataType, DocHistoryNotFoundDataType, BlobNotFoundDataType, SubscriptionAlreadyExistsDataType, SubscriptionNotExistsDataType, SameSubscriptionRecurringDataType, SubscriptionPlanNotFoundDataType, CopilotMessageNotFoundDataType, CopilotPromptNotFoundDataType, CopilotProviderSideErrorDataType, RuntimeConfigNotFoundDataType, InvalidRuntimeConfigTypeDataType] as const,
|
||||
});
|
||||
|
||||
@@ -40,5 +40,3 @@ export class ErrorModule implements OnModuleInit {
|
||||
|
||||
export { UserFriendlyError } from './def';
|
||||
export * from './errors.gen';
|
||||
export * from './payment-required';
|
||||
export * from './too-many-requests';
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
import { HttpException, HttpStatus } from '@nestjs/common';
|
||||
|
||||
export class PaymentRequiredException extends HttpException {
|
||||
constructor(desc?: string, code: string = 'Payment Required') {
|
||||
super(
|
||||
HttpException.createBody(desc ?? code, code, HttpStatus.PAYMENT_REQUIRED),
|
||||
HttpStatus.PAYMENT_REQUIRED
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
import { HttpException, HttpStatus } from '@nestjs/common';
|
||||
|
||||
export class TooManyRequestsException extends HttpException {
|
||||
constructor(desc?: string, code: string = 'Too Many Requests') {
|
||||
super(
|
||||
HttpException.createBody(
|
||||
desc ?? code,
|
||||
code,
|
||||
HttpStatus.TOO_MANY_REQUESTS
|
||||
),
|
||||
HttpStatus.TOO_MANY_REQUESTS
|
||||
);
|
||||
}
|
||||
}
|
||||
50
packages/backend/server/src/fundamentals/guard/guard.ts
Normal file
50
packages/backend/server/src/fundamentals/guard/guard.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import {
|
||||
applyDecorators,
|
||||
CanActivate,
|
||||
ExecutionContext,
|
||||
Injectable,
|
||||
SetMetadata,
|
||||
UseGuards,
|
||||
} from '@nestjs/common';
|
||||
import { Reflector } from '@nestjs/core';
|
||||
|
||||
import { GUARD_PROVIDER, NamedGuards } from './provider';
|
||||
|
||||
const BasicGuardSymbol = Symbol('BasicGuard');
|
||||
|
||||
@Injectable()
|
||||
export class BasicGuard implements CanActivate {
|
||||
constructor(private readonly reflector: Reflector) {}
|
||||
|
||||
async canActivate(context: ExecutionContext) {
|
||||
// get registered guard name
|
||||
const providerName = this.reflector.get<string>(
|
||||
BasicGuardSymbol,
|
||||
context.getHandler()
|
||||
);
|
||||
|
||||
const provider = GUARD_PROVIDER[providerName as NamedGuards];
|
||||
if (provider) {
|
||||
return await provider.canActivate(context);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This guard is used to protect routes/queries/mutations that use a registered guard
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* ```typescript
|
||||
* \@UseNamedGuard('captcha') // use captcha guard
|
||||
* \@Auth()
|
||||
* \@Query(() => UserType)
|
||||
* user(@CurrentUser() user: CurrentUser) {
|
||||
* return user;
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export const UseNamedGuard = (name: NamedGuards) =>
|
||||
applyDecorators(UseGuards(BasicGuard), SetMetadata(BasicGuardSymbol, name));
|
||||
2
packages/backend/server/src/fundamentals/guard/index.ts
Normal file
2
packages/backend/server/src/fundamentals/guard/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export { UseNamedGuard } from './guard';
|
||||
export { GuardProvider, type RegisterGuardName } from './provider';
|
||||
26
packages/backend/server/src/fundamentals/guard/provider.ts
Normal file
26
packages/backend/server/src/fundamentals/guard/provider.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import {
|
||||
CanActivate,
|
||||
ExecutionContext,
|
||||
Injectable,
|
||||
Logger,
|
||||
OnModuleInit,
|
||||
} from '@nestjs/common';
|
||||
|
||||
export interface RegisterGuardName {}
|
||||
|
||||
export type NamedGuards = keyof RegisterGuardName;
|
||||
|
||||
export const GUARD_PROVIDER: Partial<Record<NamedGuards, GuardProvider>> = {};
|
||||
|
||||
@Injectable()
|
||||
export abstract class GuardProvider implements OnModuleInit, CanActivate {
|
||||
private readonly logger = new Logger(GuardProvider.name);
|
||||
abstract name: NamedGuards;
|
||||
|
||||
onModuleInit() {
|
||||
GUARD_PROVIDER[this.name] = this;
|
||||
this.logger.log(`Guard provider [${this.name}] registered`);
|
||||
}
|
||||
|
||||
abstract canActivate(context: ExecutionContext): boolean | Promise<boolean>;
|
||||
}
|
||||
@@ -89,7 +89,7 @@ export class URLHelper {
|
||||
if (!['http:', 'https:'].includes(url.protocol)) return false;
|
||||
if (!url.hostname) return false;
|
||||
return true;
|
||||
} catch (_) {
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user