Compare commits

..

1 Commits

Author SHA1 Message Date
Peng Xiao
31071c8308 fix(electron): electron cmd+r issue 2024-07-31 07:18:42 +00:00
1723 changed files with 42558 additions and 64894 deletions

View File

@@ -1,4 +1,5 @@
CHANGELOG_URL=
ENABLE_PRELOADING=
ENABLE_NEW_SETTING_UNSTABLE_API=
ENABLE_CAPTCHA=
CAPTCHA_SITE_KEY=

View File

@@ -34,8 +34,8 @@ const createPattern = packageName => [
{
group: ['@affine/env/constant'],
message:
'Do not import from @affine/env/constant. Use `BUILD_CONFIG.isElectron` instead',
importNames: ['isElectron'],
'Do not import from @affine/env/constant. Use `environment.isDesktop` instead',
importNames: ['isDesktop'],
},
];
@@ -43,14 +43,11 @@ const allPackages = [
'packages/backend/server',
'packages/frontend/component',
'packages/frontend/core',
'packages/frontend/apps/electron',
'packages/frontend/apps/web',
'packages/frontend/apps/mobile',
'packages/frontend/electron',
'packages/frontend/graphql',
'packages/frontend/i18n',
'packages/frontend/native',
'packages/frontend/templates',
'packages/frontend/track',
'packages/common/debug',
'packages/common/env',
'packages/common/infra',
@@ -250,8 +247,7 @@ const config = {
'react-hooks/exhaustive-deps': [
'warn',
{
additionalHooks:
'(useAsyncCallback|useCatchEventCallback|useDraggable|useDropTarget|useRefEffect)',
additionalHooks: '(useAsyncCallback|useDraggable|useDropTarget)',
},
],
},

View File

@@ -1,36 +0,0 @@
name: 'Auth to Cluster'
description: 'Auth to the GCP Cluster'
inputs:
gcp-project-number:
description: 'GCP project number'
required: true
gcp-project-id:
description: 'GCP project id'
required: true
service-account:
description: 'Service account'
cluster-name:
description: 'Cluster name'
cluster-location:
description: 'Cluster location'
runs:
using: 'composite'
steps:
- id: auth
uses: google-github-actions/auth@v2
with:
workload_identity_provider: 'projects/${{ inputs.gcp-project-number }}/locations/global/workloadIdentityPools/github-actions/providers/github-actions-helm-deploy'
service_account: '${{ inputs.service-account }}'
token_format: 'access_token'
project_id: '${{ inputs.gcp-project-id }}'
- name: 'Setup gcloud cli'
uses: 'google-github-actions/setup-gcloud@v2'
with:
install_components: 'gke-gcloud-auth-plugin'
- id: get-gke-credentials
shell: bash
run: |
gcloud container clusters get-credentials ${{ inputs.cluster-name }} --region ${{ inputs.cluster-location }} --project ${{ inputs.gcp-project-id }}

View File

@@ -24,14 +24,24 @@ runs:
shell: bash
run: |
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
- name: 'Auth to cluster'
uses: './.github/actions/cluster-auth'
- uses: azure/setup-helm@v4
- id: auth
uses: google-github-actions/auth@v2
with:
gcp-project-number: '${{ inputs.gcp-project-number }}'
gcp-project-id: '${{ inputs.gcp-project-id }}'
service-account: '${{ inputs.service-account }}'
cluster-name: '${{ inputs.cluster-name }}'
cluster-location: '${{ inputs.cluster-location }}'
workload_identity_provider: 'projects/${{ inputs.gcp-project-number }}/locations/global/workloadIdentityPools/github-actions/providers/github-actions-helm-deploy'
service_account: '${{ inputs.service-account }}'
token_format: 'access_token'
project_id: '${{ inputs.gcp-project-id }}'
- name: 'Setup gcloud cli'
uses: 'google-github-actions/setup-gcloud@v2'
with:
install_components: 'gke-gcloud-auth-plugin'
- id: get-gke-credentials
shell: bash
run: |
gcloud container clusters get-credentials ${{ inputs.cluster-name }} --region ${{ inputs.cluster-location }} --project ${{ inputs.gcp-project-id }}
- name: Deploy
shell: bash

View File

@@ -90,14 +90,9 @@ const createHelmCommand = ({ isDryRun }) => {
const deployCommand = [
`helm upgrade --install affine .github/helm/affine`,
`--namespace ${namespace}`,
`--set-string global.app.buildType="${buildType}"`,
`--set global.ingress.enabled=true`,
`--set-json global.ingress.annotations=\"{ \\"kubernetes.io/ingress.class\\": \\"gce\\", \\"kubernetes.io/ingress.allow-http\\": \\"true\\", \\"kubernetes.io/ingress.global-static-ip-name\\": \\"${STATIC_IP_NAME}\\" }\"`,
`--set-string global.ingress.host="${host}"`,
`--set global.objectStorage.r2.enabled=true`,
`--set-string global.objectStorage.r2.accountId="${R2_ACCOUNT_ID}"`,
`--set-string global.objectStorage.r2.accessKeyId="${R2_ACCESS_KEY_ID}"`,
`--set-string global.objectStorage.r2.secretAccessKey="${R2_SECRET_ACCESS_KEY}"`,
`--set-string global.version="${APP_VERSION}"`,
...redisAndPostgres,
`--set web.replicaCount=${webReplicaCount}`,
@@ -111,6 +106,10 @@ const createHelmCommand = ({ isDryRun }) => {
`--set-string graphql.app.copilot.openai.key="${COPILOT_OPENAI_API_KEY}"`,
`--set-string graphql.app.copilot.fal.key="${COPILOT_FAL_API_KEY}"`,
`--set-string graphql.app.copilot.unsplash.key="${COPILOT_UNSPLASH_API_KEY}"`,
`--set graphql.app.objectStorage.r2.enabled=true`,
`--set-string graphql.app.objectStorage.r2.accountId="${R2_ACCOUNT_ID}"`,
`--set-string graphql.app.objectStorage.r2.accessKeyId="${R2_ACCESS_KEY_ID}"`,
`--set-string graphql.app.objectStorage.r2.secretAccessKey="${R2_SECRET_ACCESS_KEY}"`,
`--set-string graphql.app.mailer.sender="${MAILER_SENDER}"`,
`--set-string graphql.app.mailer.user="${MAILER_USER}"`,
`--set-string graphql.app.mailer.password="${MAILER_PASSWORD}"`,
@@ -126,8 +125,6 @@ const createHelmCommand = ({ isDryRun }) => {
`--set graphql.app.features.syncClientVersionCheck=true`,
`--set sync.replicaCount=${syncReplicaCount}`,
`--set-string sync.image.tag="${imageTag}"`,
`--set-string renderer.image.tag="${imageTag}"`,
`--set renderer.app.host=${host}`,
...serviceAnnotations,
`--timeout 10m`,
flag,

View File

@@ -156,7 +156,7 @@ runs:
- name: Install Playwright's dependencies
shell: bash
if: inputs.playwright-install == 'true'
run: yarn playwright install --with-deps chromium webkit
run: yarn playwright install --with-deps chromium
env:
PLAYWRIGHT_BROWSERS_PATH: ${{ github.workspace }}/node_modules/.cache/ms-playwright

View File

@@ -17,7 +17,7 @@ runs:
PACKAGE_VERSION=$(node -p "require('./package.json').version")
TIME_VERSION=$(date +%Y%m%d%H%M)
GIT_SHORT_HASH=$(git rev-parse --short HEAD)
APP_VERSION=$PACKAGE_VERSION-nightly-$GIT_SHORT_HASH
APP_VERSION=$PACKAGE_VERSION-$GIT_SHORT_HASH
fi
echo $APP_VERSION
echo "APP_VERSION=$APP_VERSION" >> "$GITHUB_OUTPUT"

View File

@@ -1,8 +1,7 @@
FROM openresty/openresty:1.25.3.2-0-buster
FROM openresty/openresty:1.25.3.1-0-buster
WORKDIR /app
COPY ./packages/frontend/apps/web/dist ./dist
COPY ./packages/frontend/web/dist ./dist
COPY ./packages/frontend/admin/dist ./admin
COPY ./packages/frontend/apps/mobile/dist ./mobile
COPY ./.github/deployment/front/nginx.conf /usr/local/openresty/nginx/conf/nginx.conf
COPY ./.github/deployment/front/affine.nginx.conf /etc/nginx/conf.d/affine.nginx.conf

View File

@@ -6,33 +6,15 @@ server {
try_files $uri/index.html $uri/ $uri /admin/index.html;
}
set $app_root_path /app/dist/;
set $mobile_root /app/dist/;
set_by_lua $affine_env 'return os.getenv("AFFINE_ENV")';
if ($affine_env = "dev") {
set $mobile_root /app/mobile/;
}
# https://gist.github.com/mariusom/6683dc52b1cad1a1f372e908bdb209d0
if ($http_user_agent ~* "(android|bb\d+|meego).+mobile|avantgo|bada\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\.(browser|link)|vodafone|wap|windows ce|xda|xiino") {
set $app_root_path $mobile_root;
}
if ($http_user_agent ~* "^(1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s\-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|\-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw\-(n|u)|c55\/|capi|ccwa|cdm\-|cell|chtm|cldc|cmd\-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc\-s|devi|dica|dmob|do(c|p)o|ds(12|\-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(\-|_)|g1 u|g560|gene|gf\-5|g\-mo|go(\.w|od)|gr(ad|un)|haie|hcit|hd\-(m|p|t)|hei\-|hi(pt|ta)|hp( i|ip)|hs\-c|ht(c(\-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i\-(20|go|ma)|i230|iac( |\-|\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\/)|klon|kpt |kwc\-|kyo(c|k)|le(no|xi)|lg( g|\/(k|l|u)|50|54|\-[a-w])|libw|lynx|m1\-w|m3ga|m50\/|ma(te|ui|xo)|mc(01|21|ca)|m\-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(\-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)\-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|\-([1-8]|c))|phil|pire|pl(ay|uc)|pn\-2|po(ck|rt|se)|prox|psio|pt\-g|qa\-a|qc(07|12|21|32|60|\-[2-7]|i\-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h\-|oo|p\-)|sdk\/|se(c(\-|0|1)|47|mc|nd|ri)|sgh\-|shar|sie(\-|m)|sk\-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h\-|v\-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl\-|tdg\-|tel(i|m)|tim\-|t\-mo|to(pl|sh)|ts(70|m\-|m3|m5)|tx\-9|up(\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|\-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(\-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas\-|your|zeto|zte\-)") {
set $app_root_path $mobile_root;
}
location ~ ^/(_plugin|assets|imgs|js|plugins|static)/ {
root $app_root_path;
root /app/dist/;
try_files $uri $uri/ =404;
}
location / {
root $app_root_path;
root /app/dist/;
index index.html;
try_files $uri $uri/ /index.html;
add_header Cache-Control "private, no-cache, no-store, max-age=0, must-revalidate";
}
error_page 404 /404.html;

View File

@@ -1,15 +1,14 @@
worker_processes 4;
worker_processes 4;
error_log /var/log/nginx/error.log warn;
pcre_jit on;
env AFFINE_ENV;
events {
worker_connections 1024;
worker_connections 1024;
}
http {
include mime.types;
log_format main '$remote_addr [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log /var/log/nginx/access.log main;
include /etc/nginx/conf.d/*.conf;
include mime.types;
log_format main '$remote_addr [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log /var/log/nginx/access.log main;
include /etc/nginx/conf.d/*.conf;
}

View File

@@ -1,9 +1,8 @@
FROM node:20-bookworm-slim
COPY ./packages/backend/server /app
COPY ./packages/frontend/apps/web/dist /app/static
COPY ./packages/frontend/web/dist /app/static
COPY ./packages/frontend/admin/dist /app/static/admin
COPY ./packages/frontend/apps/mobile/dist /app/static/mobile
WORKDIR /app
RUN apt-get update && \

View File

@@ -28,6 +28,8 @@ services:
- REDIS_SERVER_HOST=redis
- DATABASE_URL=postgres://affine:affine@postgres:5432/affine
- NODE_ENV=production
- AFFINE_ADMIN_EMAIL=${AFFINE_ADMIN_EMAIL}
- AFFINE_ADMIN_PASSWORD=${AFFINE_ADMIN_PASSWORD}
# Telemetry allows us to collect data on how you use the affine. This data will helps us improve the app and provide better features.
# Uncomment next line if you wish to quit telemetry.
# - TELEMETRY_ENABLE=false

View File

@@ -3,4 +3,4 @@ name: affine
description: AFFiNE cloud chart
type: application
version: 0.0.0
appVersion: "0.17.0"
appVersion: "0.15.0"

View File

@@ -3,7 +3,7 @@ name: graphql
description: AFFiNE GraphQL server
type: application
version: 0.0.0
appVersion: "0.17.0"
appVersion: "0.15.0"
dependencies:
- name: gcloud-sql-proxy
version: 0.0.0

View File

@@ -76,7 +76,9 @@ spec:
- name: AFFINE_SERVER_HTTPS
value: "{{ .Values.app.https }}"
- name: ENABLE_R2_OBJECT_STORAGE
value: "{{ .Values.global.objectStorage.r2.enabled }}"
value: "{{ .Values.app.objectStorage.r2.enabled }}"
- name: ENABLE_CAPTCHA
value: "{{ .Values.app.captcha.enabled }}"
- name: FEATURES_EARLY_ACCESS_PREVIEW
value: "{{ .Values.app.features.earlyAccessPreview }}"
- name: FEATURES_SYNC_CLIENT_VERSION_CHECK
@@ -122,21 +124,21 @@ spec:
- name: DOC_MERGE_USE_JWST_CODEC
value: "true"
{{ end }}
{{ if .Values.global.objectStorage.r2.enabled }}
{{ if .Values.app.objectStorage.r2.enabled }}
- name: R2_OBJECT_STORAGE_ACCOUNT_ID
valueFrom:
secretKeyRef:
name: "{{ .Values.global.objectStorage.r2.secretName }}"
name: "{{ .Values.app.objectStorage.r2.secretName }}"
key: accountId
- name: R2_OBJECT_STORAGE_ACCESS_KEY_ID
valueFrom:
secretKeyRef:
name: "{{ .Values.global.objectStorage.r2.secretName }}"
name: "{{ .Values.app.objectStorage.r2.secretName }}"
key: accessKeyId
- name: R2_OBJECT_STORAGE_SECRET_ACCESS_KEY
valueFrom:
secretKeyRef:
name: "{{ .Values.global.objectStorage.r2.secretName }}"
name: "{{ .Values.app.objectStorage.r2.secretName }}"
key: secretAccessKey
{{ end }}
{{ if .Values.app.captcha.enabled }}
@@ -202,12 +204,12 @@ spec:
protocol: TCP
livenessProbe:
httpGet:
path: /info
path: /
port: http
initialDelaySeconds: {{ .Values.probe.initialDelaySeconds }}
readinessProbe:
httpGet:
path: /info
path: /
port: http
initialDelaySeconds: {{ .Values.probe.initialDelaySeconds }}
resources:

View File

@@ -37,21 +37,21 @@ spec:
- name: DATABASE_URL
value: postgres://{{ .Values.global.database.user }}:$(DATABASE_PASSWORD)@{{ .Values.global.database.gcloud.cloudSqlInternal }}:{{ .Values.global.database.port }}/{{ .Values.global.database.name }}
{{ end }}
{{ if .Values.global.objectStorage.r2.enabled }}
{{ if .Values.app.objectStorage.r2.enabled }}
- name: R2_OBJECT_STORAGE_ACCOUNT_ID
valueFrom:
secretKeyRef:
name: "{{ .Values.global.objectStorage.r2.secretName }}"
name: "{{ .Values.app.objectStorage.r2.secretName }}"
key: accountId
- name: R2_OBJECT_STORAGE_ACCESS_KEY_ID
valueFrom:
secretKeyRef:
name: "{{ .Values.global.objectStorage.r2.secretName }}"
name: "{{ .Values.app.objectStorage.r2.secretName }}"
key: accessKeyId
- name: R2_OBJECT_STORAGE_SECRET_ACCESS_KEY
valueFrom:
secretKeyRef:
name: "{{ .Values.global.objectStorage.r2.secretName }}"
name: "{{ .Values.app.objectStorage.r2.secretName }}"
key: secretAccessKey
{{ end }}
resources:

View File

@@ -0,0 +1,11 @@
{{- if .Values.app.objectStorage.r2.enabled -}}
apiVersion: v1
kind: Secret
metadata:
name: "{{ .Values.app.objectStorage.r2.secretName }}"
type: Opaque
data:
accountId: {{ .Values.app.objectStorage.r2.accountId | b64enc }}
accessKeyId: {{ .Values.app.objectStorage.r2.accessKeyId | b64enc }}
secretAccessKey: {{ .Values.app.objectStorage.r2.secretAccessKey | b64enc }}
{{- end }}

View File

@@ -4,10 +4,6 @@ metadata:
name: {{ include "graphql.fullname" . }}
labels:
{{- include "graphql.labels" . | nindent 4 }}
{{- with .Values.service.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
spec:
type: {{ .Values.service.type }}
ports:

View File

@@ -29,7 +29,14 @@ app:
secretName: copilot
openai:
key: ''
oauth:
objectStorage:
r2:
enabled: false
secretName: r2
accountId: ''
accessKeyId: ''
secretAccessKey: ''
oauth:
google:
enabled: false
secretName: oauth-google

View File

@@ -1,11 +0,0 @@
apiVersion: v2
name: renderer
description: AFFiNE renderer server
type: application
version: 0.0.0
appVersion: "0.16.0"
dependencies:
- name: gcloud-sql-proxy
version: 0.0.0
repository: "file://../gcloud-sql-proxy"
condition: .global.database.gcloud.enabled

View File

@@ -1,16 +0,0 @@
1. Get the application URL by running these commands:
{{- if contains "NodePort" .Values.service.type }}
export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "renderer.fullname" . }})
export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}")
echo http://$NODE_IP:$NODE_PORT
{{- else if contains "LoadBalancer" .Values.service.type }}
NOTE: It may take a few minutes for the LoadBalancer IP to be available.
You can watch the status of by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "renderer.fullname" . }}'
export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "renderer.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}")
echo http://$SERVICE_IP:{{ .Values.service.port }}
{{- else if contains "ClusterIP" .Values.service.type }}
export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "renderer.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}")
echo "Visit http://127.0.0.1:8080 to use your application"
kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT
{{- end }}

View File

@@ -1,63 +0,0 @@
{{/*
Expand the name of the chart.
*/}}
{{- define "renderer.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
If release name contains chart name it will be used as a full name.
*/}}
{{- define "renderer.fullname" -}}
{{- if .Values.fullnameOverride }}
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- $name := default .Chart.Name .Values.nameOverride }}
{{- if contains $name .Release.Name }}
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
{{- end }}
{{- end }}
{{- end }}
{{/*
Create chart name and version as used by the chart label.
*/}}
{{- define "renderer.chart" -}}
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Common labels
*/}}
{{- define "renderer.labels" -}}
helm.sh/chart: {{ include "renderer.chart" . }}
{{ include "renderer.selectorLabels" . }}
{{- if .Chart.AppVersion }}
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
{{- end }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
monitoring: enabled
{{- end }}
{{/*
Selector labels
*/}}
{{- define "renderer.selectorLabels" -}}
app.kubernetes.io/name: {{ include "renderer.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}
{{/*
Create the name of the service account to use
*/}}
{{- define "renderer.serviceAccountName" -}}
{{- if .Values.serviceAccount.create }}
{{- default (include "renderer.fullname" .) .Values.serviceAccount.name }}
{{- else }}
{{- default "default" .Values.serviceAccount.name }}
{{- end }}
{{- end }}

View File

@@ -1,124 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "renderer.fullname" . }}
labels:
{{- include "renderer.labels" . | nindent 4 }}
spec:
replicas: {{ .Values.replicaCount }}
selector:
matchLabels:
{{- include "renderer.selectorLabels" . | nindent 6 }}
template:
metadata:
{{- with .Values.podAnnotations }}
annotations:
{{- toYaml . | nindent 8 }}
{{- end }}
labels:
{{- include "renderer.selectorLabels" . | nindent 8 }}
spec:
{{- with .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml . | nindent 8 }}
{{- end }}
serviceAccountName: {{ include "renderer.serviceAccountName" . }}
containers:
- name: {{ .Chart.Name }}
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
env:
- name: AFFINE_PRIVATE_KEY
valueFrom:
secretKeyRef:
name: "{{ .Values.global.secret.secretName }}"
key: key
- name: NODE_ENV
value: "{{ .Values.env }}"
- name: NODE_OPTIONS
value: "--max-old-space-size=4096"
- name: NO_COLOR
value: "1"
- name: DEPLOYMENT_TYPE
value: "affine"
- name: SERVER_FLAVOR
value: "renderer"
- name: AFFINE_ENV
value: "{{ .Release.Namespace }}"
- name: DATABASE_PASSWORD
valueFrom:
secretKeyRef:
name: pg-postgresql
key: postgres-password
- name: DATABASE_URL
value: postgres://{{ .Values.global.database.user }}:$(DATABASE_PASSWORD)@{{ .Values.global.database.url }}:{{ .Values.global.database.port }}/{{ .Values.global.database.name }}
- name: REDIS_SERVER_ENABLED
value: "true"
- name: REDIS_SERVER_HOST
value: "{{ .Values.global.redis.host }}"
- name: REDIS_SERVER_PORT
value: "{{ .Values.global.redis.port }}"
- name: REDIS_SERVER_USER
value: "{{ .Values.global.redis.username }}"
- name: REDIS_SERVER_PASSWORD
valueFrom:
secretKeyRef:
name: redis
key: redis-password
- name: REDIS_SERVER_DATABASE
value: "{{ .Values.global.redis.database }}"
- name: AFFINE_SERVER_PORT
value: "{{ .Values.service.port }}"
- name: AFFINE_SERVER_SUB_PATH
value: "{{ .Values.app.path }}"
- name: AFFINE_SERVER_HOST
value: "{{ .Values.app.host }}"
- name: AFFINE_SERVER_HTTPS
value: "{{ .Values.app.https }}"
- name: ENABLE_R2_OBJECT_STORAGE
value: "{{ .Values.global.objectStorage.r2.enabled }}"
{{ if .Values.global.objectStorage.r2.enabled }}
- name: R2_OBJECT_STORAGE_ACCOUNT_ID
valueFrom:
secretKeyRef:
name: "{{ .Values.global.objectStorage.r2.secretName }}"
key: accountId
- name: R2_OBJECT_STORAGE_ACCESS_KEY_ID
valueFrom:
secretKeyRef:
name: "{{ .Values.global.objectStorage.r2.secretName }}"
key: accessKeyId
- name: R2_OBJECT_STORAGE_SECRET_ACCESS_KEY
valueFrom:
secretKeyRef:
name: "{{ .Values.global.objectStorage.r2.secretName }}"
key: secretAccessKey
{{ end }}
ports:
- name: http
containerPort: {{ .Values.service.port }}
protocol: TCP
livenessProbe:
httpGet:
path: /info
port: http
initialDelaySeconds: {{ .Values.probe.initialDelaySeconds }}
readinessProbe:
httpGet:
path: /info
port: http
initialDelaySeconds: {{ .Values.probe.initialDelaySeconds }}
resources:
{{- toYaml .Values.resources | nindent 12 }}
{{- with .Values.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.tolerations }}
tolerations:
{{- toYaml . | nindent 8 }}
{{- end }}

View File

@@ -1,19 +0,0 @@
apiVersion: v1
kind: Service
metadata:
name: {{ include "graphql.fullname" . }}
labels:
{{- include "graphql.labels" . | nindent 4 }}
{{- with .Values.service.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
spec:
type: {{ .Values.service.type }}
ports:
- port: {{ .Values.service.port }}
targetPort: http
protocol: TCP
name: http
selector:
{{- include "graphql.selectorLabels" . | nindent 4 }}

View File

@@ -1,12 +0,0 @@
{{- if .Values.serviceAccount.create -}}
apiVersion: v1
kind: ServiceAccount
metadata:
name: {{ include "graphql.serviceAccountName" . }}
labels:
{{- include "graphql.labels" . | nindent 4 }}
{{- with .Values.serviceAccount.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
{{- end }}

View File

@@ -1,15 +0,0 @@
apiVersion: v1
kind: Pod
metadata:
name: "{{ include "renderer.fullname" . }}-test-connection"
labels:
{{- include "renderer.labels" . | nindent 4 }}
annotations:
"helm.sh/hook": test
spec:
containers:
- name: wget
image: busybox
command: ['wget']
args: ['{{ include "renderer.fullname" . }}:{{ .Values.service.port }}']
restartPolicy: Never

View File

@@ -1,38 +0,0 @@
replicaCount: 1
image:
repository: ghcr.io/toeverything/affine-graphql
pullPolicy: IfNotPresent
tag: ''
imagePullSecrets: []
nameOverride: ''
fullnameOverride: ''
# map to NODE_ENV environment variable
env: 'production'
app:
# AFFINE_SERVER_SUB_PATH
path: ''
# AFFINE_SERVER_HOST
host: '0.0.0.0'
https: true
serviceAccount:
create: true
annotations: {}
name: 'affine-renderer'
podAnnotations: {}
podSecurityContext:
fsGroup: 2000
resources:
requests:
cpu: '4'
memory: 4Gi
probe:
initialDelaySeconds: 20
nodeSelector: {}
tolerations: []
affinity: {}

View File

@@ -3,7 +3,7 @@ name: sync
description: AFFiNE Sync Server
type: application
version: 0.0.0
appVersion: "0.17.0"
appVersion: "0.15.0"
dependencies:
- name: gcloud-sql-proxy
version: 0.0.0

View File

@@ -27,9 +27,6 @@ spec:
- name: {{ .Chart.Name }}
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
env:
- name: AFFINE_ENV
value: "{{ .Release.Namespace }}"
ports:
- name: http
containerPort: {{ .Values.service.port }}

View File

@@ -1,9 +0,0 @@
apiVersion: v1
kind: ConfigMap
metadata:
name: {{ .Release.Name }}-runtime-config
data:
web-assets-manifest: |-
{{ .Files.Get "web-assets-manifest.json" | nindent 4 }}
mobile-assets-manifest: |-
{{ .Files.Get "mobile-assets-manifest.json" | nindent 4 }}

View File

@@ -60,13 +60,13 @@ spec:
name: affine-graphql
port:
number: {{ .Values.graphql.service.port }}
- path: /workspace
- path: /oauth
pathType: Prefix
backend:
service:
name: affine-renderer
name: affine-graphql
port:
number: {{ .Values.renderer.service.port }}
number: {{ .Values.graphql.service.port }}
- path: /
pathType: Prefix
backend:
@@ -74,4 +74,11 @@ spec:
name: affine-web
port:
number: {{ .Values.web.service.port }}
- path: /js/worker.(.+).js
pathType: ImplementationSpecific
backend:
service:
name: affine-web
port:
number: {{ .Values.web.service.port }}
{{- end }}

View File

@@ -1,11 +0,0 @@
{{- if .Values.global.objectStorage.r2.enabled -}}
apiVersion: v1
kind: Secret
metadata:
name: "{{ .Values.global.objectStorage.r2.secretName }}"
type: Opaque
data:
accountId: {{ .Values.global.objectStorage.r2.accountId | b64enc }}
accessKeyId: {{ .Values.global.objectStorage.r2.accessKeyId | b64enc }}
secretAccessKey: {{ .Values.global.objectStorage.r2.secretAccessKey | b64enc }}
{{- end }}

View File

@@ -1,6 +1,4 @@
global:
app:
buildType: 'stable'
ingress:
enabled: false
className: ''
@@ -30,13 +28,6 @@ global:
username: ''
password: ''
database: 0
objectStorage:
r2:
enabled: false
secretName: r2
accountId: ''
accessKeyId: ''
secretAccessKey: ''
gke:
enabled: true
@@ -45,21 +36,14 @@ graphql:
type: ClusterIP
port: 3000
annotations:
cloud.google.com/backend-config: '{"default": "affine-api-backendconfig"}'
cloud.google.com/backend-config: '{"default": "affine-backendconfig"}'
sync:
service:
type: ClusterIP
port: 3010
annotations:
cloud.google.com/backend-config: '{"default": "affine-api-backendconfig"}'
renderer:
service:
type: ClusterIP
port: 3000
annotations:
cloud.google.com/backend-config: '{"default": "affine-api-backendconfig"}'
cloud.google.com/backend-config: '{"default": "affine-backendconfig"}'
web:
service:

View File

@@ -1,10 +0,0 @@
apiVersion: cloud.google.com/v1
kind: BackendConfig
metadata:
name: "affine-api-backendconfig"
spec:
healthCheck:
timeoutSec: 1
type: HTTP
requestPath: /info

2
.github/labeler.yml vendored
View File

@@ -77,7 +77,7 @@ app:core:
app:electron:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/apps/electron/**/*'
- 'packages/frontend/electron/**/*'
app:server:
- changed-files:

View File

@@ -23,11 +23,12 @@
"groupName": "oxlint"
},
{
"groupName": "blocksuite",
"groupName": "blocksuite-canary",
"matchPackagePatterns": ["^@blocksuite"],
"excludePackageNames": ["@blocksuite/icons"],
"rangeStrategy": "replace",
"changelogUrl": "https://github.com/toeverything/blocksuite/blob/master/packages/blocks/CHANGELOG.md"
"followTag": "canary",
"enabled": false
},
{
"groupName": "all non-major dependencies",
@@ -40,10 +41,6 @@
"groupName": "rust toolchain",
"matchManagers": ["custom.regex"],
"matchDepNames": ["rustc"]
},
{
"groupName": "nestjs",
"matchPackagePatterns": ["^@nestjs"]
}
],
"commitMessagePrefix": "chore: ",

View File

@@ -20,6 +20,6 @@ permissions:
jobs:
build-image:
name: Build Image
uses: ./.github/workflows/build-images.yml
uses: ./.github/workflows/build-server-image.yml
with:
flavor: ${{ github.event.inputs.flavor }}

View File

@@ -6,6 +6,11 @@ on:
flavor:
type: string
required: true
workflow_dispatch:
inputs:
flavor:
type: string
required: false
env:
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
@@ -38,103 +43,6 @@ jobs:
path: ./packages/backend/server/dist
if-no-files-found: error
build-web:
name: Build @affine/web
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.flavor }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Core
run: yarn nx build @affine/web --skip-nx-cache
env:
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
BUILD_TYPE: ${{ github.event.inputs.flavor }}
CAPTCHA_SITE_KEY: ${{ secrets.CAPTCHA_SITE_KEY }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: 'affine-web'
SENTRY_RELEASE: ${{ steps.version.outputs.APP_VERSION }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
PERFSEE_TOKEN: ${{ secrets.PERFSEE_TOKEN }}
MIXPANEL_TOKEN: ${{ secrets.MIXPANEL_TOKEN }}
- name: Upload web artifact
uses: actions/upload-artifact@v4
with:
name: web
path: ./packages/frontend/apps/web/dist
if-no-files-found: error
build-admin:
name: Build @affine/admin
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.flavor }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Admin
run: yarn nx build @affine/admin --skip-nx-cache
env:
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
BUILD_TYPE: ${{ github.event.inputs.flavor }}
CAPTCHA_SITE_KEY: ${{ secrets.CAPTCHA_SITE_KEY }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: 'affine-admin'
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
PERFSEE_TOKEN: ${{ secrets.PERFSEE_TOKEN }}
MIXPANEL_TOKEN: ${{ secrets.MIXPANEL_TOKEN }}
- name: Upload admin artifact
uses: actions/upload-artifact@v4
with:
name: admin
path: ./packages/frontend/admin/dist
if-no-files-found: error
build-mobile:
name: Build @affine/mobile
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.flavor }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Mobile
run: yarn nx build @affine/mobile --skip-nx-cache
env:
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
BUILD_TYPE: ${{ github.event.inputs.flavor }}
CAPTCHA_SITE_KEY: ${{ secrets.CAPTCHA_SITE_KEY }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: 'affine-mobile'
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
PERFSEE_TOKEN: ${{ secrets.PERFSEE_TOKEN }}
MIXPANEL_TOKEN: ${{ secrets.MIXPANEL_TOKEN }}
- name: Upload mobile artifact
uses: actions/upload-artifact@v4
with:
name: mobile
path: ./packages/frontend/apps/mobile/dist
if-no-files-found: error
build-web-selfhost:
name: Build @affine/web selfhost
runs-on: ubuntu-latest
@@ -159,32 +67,7 @@ jobs:
uses: actions/upload-artifact@v4
with:
name: selfhost-web
path: ./packages/frontend/apps/web/dist
if-no-files-found: error
build-mobile-selfhost:
name: Build @affine/mobile selfhost
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.flavor }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Mobile
run: yarn nx build @affine/mobile --skip-nx-cache
env:
BUILD_TYPE: ${{ github.event.inputs.flavor }}
PUBLIC_PATH: '/'
SELF_HOSTED: true
MIXPANEL_TOKEN: ${{ secrets.MIXPANEL_TOKEN }}
- name: Upload mobile artifact
uses: actions/upload-artifact@v4
with:
name: selfhost-mobile
path: ./packages/frontend/apps/mobile/dist
path: ./packages/frontend/web/dist
if-no-files-found: error
build-admin-selfhost:
@@ -198,7 +81,7 @@ jobs:
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build admin
- name: Build Core
run: yarn nx build @affine/admin --skip-nx-cache
env:
BUILD_TYPE: ${{ github.event.inputs.flavor }}
@@ -248,16 +131,12 @@ jobs:
path: ./packages/backend/native/server-native.node
if-no-files-found: error
build-images:
name: Build Images
build-docker:
name: Build Docker
runs-on: ubuntu-latest
needs:
- build-server
- build-web
- build-mobile
- build-admin
- build-web-selfhost
- build-mobile-selfhost
- build-admin-selfhost
- build-server-native
steps:
@@ -316,41 +195,17 @@ jobs:
registry-url: https://npm.pkg.github.com
scope: '@toeverything'
- name: Download web artifact
uses: actions/download-artifact@v4
with:
name: web
path: ./packages/frontend/apps/web/dist
- name: Download mobile artifact
uses: actions/download-artifact@v4
with:
name: mobile
path: ./packages/frontend/apps/mobile/dist
- name: Download admin artifact
uses: actions/download-artifact@v4
with:
name: admin
path: ./packages/frontend/admin/dist
- name: Download selfhost web artifact
uses: actions/download-artifact@v4
with:
name: selfhost-web
path: ./packages/frontend/apps/web/dist/selfhost
- name: Download selfhost mobile artifact
uses: actions/download-artifact@v4
with:
name: selfhost-mobile
path: ./packages/frontend/apps/mobile/dist/selfhost
path: ./packages/frontend/web/dist
- name: Download selfhost admin artifact
uses: actions/download-artifact@v4
with:
name: selfhost-admin
path: ./packages/frontend/admin/dist/selfhost
path: ./packages/frontend/admin/dist
- name: Install Node.js dependencies
run: |
@@ -365,17 +220,6 @@ jobs:
id: version
uses: ./.github/actions/setup-version
- name: Build front Dockerfile
uses: docker/build-push-action@v6
with:
context: .
push: true
pull: true
platforms: linux/amd64,linux/arm64
provenance: true
file: .github/deployment/front/Dockerfile
tags: ghcr.io/toeverything/affine-front:${{env.RELEASE_FLAVOR}}-${{ env.GIT_SHORT_HASH }},ghcr.io/toeverything/affine-front:${{env.RELEASE_FLAVOR}}
- name: Build graphql Dockerfile
uses: docker/build-push-action@v6
with:

View File

@@ -117,7 +117,7 @@ jobs:
name: E2E Test
runs-on: ubuntu-latest
env:
DISTRIBUTION: web
DISTRIBUTION: browser
IN_CI_TEST: true
strategy:
fail-fast: false
@@ -143,41 +143,11 @@ jobs:
path: ./test-results
if-no-files-found: ignore
e2e-mobile-test:
name: E2E Mobile Test
runs-on: ubuntu-latest
env:
DISTRIBUTION: mobile
IN_CI_TEST: true
strategy:
fail-fast: false
matrix:
shard: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
full-cache: true
- name: Run playwright tests
run: yarn workspace @affine-test/affine-mobile e2e --forbid-only --shard=${{ matrix.shard }}/${{ strategy.job-total }}
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v4
with:
name: test-results-e2e-mobile-${{ matrix.shard }}
path: ./test-results
if-no-files-found: ignore
e2e-migration-test:
name: E2E Migration Test
runs-on: ubuntu-latest
env:
DISTRIBUTION: web
DISTRIBUTION: browser
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
@@ -204,13 +174,13 @@ jobs:
needs:
- build-native
env:
DISTRIBUTION: web
DISTRIBUTION: browser
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: true
electron-install: false
full-cache: true
- name: Download affine.linux-x64-gnu.node
@@ -296,8 +266,8 @@ jobs:
path: ./packages/backend/native/server-native.node
if-no-files-found: error
build-electron-renderer:
name: Build @affine/electron renderer
build-web:
name: Build @affine/web
runs-on: ubuntu-latest
steps:
@@ -307,13 +277,13 @@ jobs:
with:
electron-install: false
full-cache: true
- name: Build Electron renderer
- name: Build Web
# always skip cache because its fast, and cache configuration is always changing
run: yarn build
run: yarn nx build @affine/web --skip-nx-cache
env:
DISTRIBUTION: desktop
DISTRIBUTION: 'desktop'
- name: zip web
run: tar -czf dist.tar.gz --directory=packages/frontend/apps/electron/renderer/dist .
run: tar -czf dist.tar.gz --directory=packages/frontend/electron/renderer/dist .
- name: Upload web artifact
uses: actions/upload-artifact@v4
with:
@@ -327,7 +297,7 @@ jobs:
needs: build-server-native
env:
NODE_ENV: test
DISTRIBUTION: web
DISTRIBUTION: browser
services:
postgres:
image: postgres
@@ -396,7 +366,7 @@ jobs:
name: ${{ matrix.tests.name }}
runs-on: ubuntu-latest
env:
DISTRIBUTION: web
DISTRIBUTION: browser
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
IN_CI_TEST: true
strategy:
@@ -520,7 +490,7 @@ jobs:
test: true,
}
needs:
- build-electron-renderer
- build-web
- build-native
steps:
- uses: actions/checkout@v4
@@ -554,7 +524,7 @@ jobs:
- name: Download web artifact
uses: ./.github/actions/download-web
with:
path: packages/frontend/apps/electron/resources/web-static
path: packages/frontend/electron/resources/web-static
- name: Build Desktop Layers
run: yarn workspace @affine/electron build
@@ -601,11 +571,8 @@ jobs:
- lint
- check-yarn-binary
- e2e-test
- e2e-mobile-test
- e2e-migration-test
- unit-test
- build-native
- build-server-native
- server-test
- server-e2e-test
- desktop-test

View File

@@ -21,60 +21,133 @@ permissions:
packages: 'write'
jobs:
output-prev-version:
name: Output previous version
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.flavor }}
outputs:
prev: ${{ steps.print.outputs.version }}
namespace: ${{ steps.print.outputs.namespace }}
steps:
- uses: actions/checkout@v4
- name: Auth to Cluster
uses: './.github/actions/cluster-auth'
with:
gcp-project-number: ${{ secrets.GCP_PROJECT_NUMBER }}
gcp-project-id: ${{ secrets.GCP_PROJECT_ID }}
service-account: ${{ secrets.GCP_HELM_DEPLOY_SERVICE_ACCOUNT }}
cluster-name: ${{ secrets.GCP_CLUSTER_NAME }}
cluster-location: ${{ secrets.GCP_CLUSTER_LOCATION }}
- name: Output previous version
id: print
run: |
namespace=""
if [ "${{ github.event.inputs.flavor }}" = "canary" ]; then
namespace="dev"
elif [ "${{ github.event.inputs.flavor }}" = "beta" ]; then
namespace="beta"
elif [ "${{ github.event.inputs.flavor }}" = "stable" ]; then
namespace="production"
else
echo "Invalid flavor: ${{ github.event.inputs.flavor }}"
exit 1
fi
echo "Namespace set to: $namespace"
# Get the previous version from the deployment
prev_version=$(kubectl get deployment -n $namespace affine-graphql -o=jsonpath='{.spec.template.spec.containers[0].image}' | awk -F '-' '{print $3}')
echo "Previous version: $prev_version"
echo "version=$prev_version" >> $GITHUB_OUTPUT
echo "namesapce=$namespace" >> $GITHUB_OUTPUT
build-images:
name: Build Images
uses: ./.github/workflows/build-images.yml
secrets: inherit
build-server-image:
name: Build Server Image
uses: ./.github/workflows/build-server-image.yml
with:
flavor: ${{ github.event.inputs.flavor }}
build-web:
name: Build @affine/web
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.flavor }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Core
run: yarn nx build @affine/web --skip-nx-cache
env:
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
BUILD_TYPE: ${{ github.event.inputs.flavor }}
CAPTCHA_SITE_KEY: ${{ secrets.CAPTCHA_SITE_KEY }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: 'affine-web'
SENTRY_RELEASE: ${{ steps.version.outputs.APP_VERSION }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
PERFSEE_TOKEN: ${{ secrets.PERFSEE_TOKEN }}
MIXPANEL_TOKEN: ${{ secrets.MIXPANEL_TOKEN }}
- name: Upload web artifact
uses: actions/upload-artifact@v4
with:
name: web
path: ./packages/frontend/web/dist
if-no-files-found: error
build-admin:
name: Build @affine/admin
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.flavor }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Core
run: yarn nx build @affine/admin --skip-nx-cache
env:
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
BUILD_TYPE: ${{ github.event.inputs.flavor }}
CAPTCHA_SITE_KEY: ${{ secrets.CAPTCHA_SITE_KEY }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: 'affine-admin'
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
PERFSEE_TOKEN: ${{ secrets.PERFSEE_TOKEN }}
MIXPANEL_TOKEN: ${{ secrets.MIXPANEL_TOKEN }}
- name: Upload admin artifact
uses: actions/upload-artifact@v4
with:
name: admin
path: ./packages/frontend/admin/dist
if-no-files-found: error
build-frontend-image:
name: Build Frontend Image
runs-on: ubuntu-latest
needs:
- build-web
- build-admin
steps:
- uses: actions/checkout@v4
- name: Download web artifact
uses: actions/download-artifact@v4
with:
name: web
path: ./packages/frontend/web/dist
- name: Download admin artifact
uses: actions/download-artifact@v4
with:
name: admin
path: ./packages/frontend/admin/dist
- name: Setup env
run: |
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
if [ -z "${{ inputs.flavor }}" ]
then
echo "RELEASE_FLAVOR=canary" >> "$GITHUB_ENV"
else
echo "RELEASE_FLAVOR=${{ inputs.flavor }}" >> "$GITHUB_ENV"
fi
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
logout: false
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build front Dockerfile
uses: docker/build-push-action@v6
with:
context: .
push: true
pull: true
platforms: linux/amd64,linux/arm64
provenance: true
file: .github/deployment/front/Dockerfile
tags: ghcr.io/toeverything/affine-front:${{env.RELEASE_FLAVOR}}-${{ env.GIT_SHORT_HASH }},ghcr.io/toeverything/affine-front:${{env.RELEASE_FLAVOR}}
deploy:
name: Deploy to cluster
if: ${{ github.event_name == 'workflow_dispatch' }}
environment: ${{ github.event.inputs.flavor }}
needs:
- build-images
- build-frontend-image
- build-server-image
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
@@ -120,94 +193,3 @@ jobs:
STRIPE_API_KEY: ${{ secrets.STRIPE_API_KEY }}
STRIPE_WEBHOOK_KEY: ${{ secrets.STRIPE_WEBHOOK_KEY }}
STATIC_IP_NAME: ${{ secrets.STATIC_IP_NAME }}
deploy-done:
needs:
- output-prev-version
- build-images
- deploy
if: always()
runs-on: ubuntu-latest
name: Post deploy message
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/checkout@v4
with:
repository: toeverything/blocksuite
path: blocksuite
fetch-depth: 0
fetch-tags: true
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
extra-flags: 'workspaces focus @affine/changelog'
electron-install: false
- name: Output deployed info
if: ${{ always() && !contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') }}
id: set_info
run: |
if [ "${{ github.event.inputs.flavor }}" = "canary" ]; then
echo "deployed_url=https://affine.fail" >> $GITHUB_OUTPUT
elif [ "${{ github.event.inputs.flavor }}" = "beta" ]; then
echo "deployed_url=https://insider.affine.pro" >> $GITHUB_OUTPUT
elif [ "${{ github.event.inputs.flavor }}" = "stable" ]; then
echo "deployed_url=https://app.affine.pro" >> $GITHUB_OUTPUT
else
exit 1
fi
env:
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
- name: Post Success event to a Slack channel
if: ${{ always() && !contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') }}
run: node ./tools/changelog/index.js
env:
CHANNEL_ID: ${{ secrets.RELEASE_SLACK_CHNNEL_ID }}
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
DEPLOYED_URL: ${{ steps.set_info.outputs.deployed_url }}
PREV_VERSION: ${{ needs.output-prev-version.outputs.prev }}
NAMESPACE: ${{ needs.output-prev-version.outputs.namespace }}
DEPLOYMENT: 'SERVER'
FLAVOR: ${{ github.event.inputs.flavor }}
BLOCKSUITE_REPO_PATH: ${{ github.workspace }}/blocksuite
- name: Post Failed event to a Slack channel
id: failed-slack
uses: slackapi/slack-github-action@v1.27.0
if: ${{ always() && contains(needs.*.result, 'failure') }}
with:
channel-id: ${{ secrets.RELEASE_SLACK_CHNNEL_ID }}
payload: |
{
"blocks": [
{
"type": "section",
"text": {
"text": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Backend deploy failed `${{ github.event.inputs.flavor }}`>",
"type": "mrkdwn"
}
}
]
}
env:
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
- name: Post Cancel event to a Slack channel
id: cancel-slack
uses: slackapi/slack-github-action@v1.27.0
if: ${{ always() && contains(needs.*.result, 'cancelled') && !contains(needs.*.result, 'failure') }}
with:
channel-id: ${{ secrets.RELEASE_SLACK_CHNNEL_ID }}
payload: |
{
"blocks": [
{
"type": "section",
"text": {
"text": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Backend deploy cancelled `${{ github.event.inputs.flavor }}`>",
"type": "mrkdwn"
}
}
]
}
env:
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}

35
.github/workflows/languages-sync.yml vendored Normal file
View File

@@ -0,0 +1,35 @@
name: Languages Sync
on:
push:
branches: ['canary']
paths:
- 'packages/frontend/i18n/**'
- '.github/workflows/languages-sync.yml'
- '!.github/actions/setup-node/action.yml'
pull_request_target:
branches: ['canary']
paths:
- 'packages/frontend/i18n/**'
- '.github/workflows/languages-sync.yml'
- '!.github/actions/setup-node/action.yml'
workflow_dispatch:
jobs:
main:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Check Language Key
if: github.ref != 'refs/heads/canary'
run: yarn workspace @affine/i18n run sync-languages:check
env:
TOLGEE_API_KEY: ${{ secrets.TOLGEE_API_KEY }}
- name: Sync Languages
if: github.ref == 'refs/heads/canary'
run: yarn workspace @affine/i18n run sync-languages
env:
TOLGEE_API_KEY: ${{ secrets.TOLGEE_API_KEY }}

View File

@@ -67,7 +67,7 @@ jobs:
uses: actions/upload-artifact@v4
with:
name: web
path: packages/frontend/apps/electron/resources/web-static
path: packages/frontend/electron/resources/web-static
make-distribution:
strategy:
@@ -119,7 +119,7 @@ jobs:
- uses: actions/download-artifact@v4
with:
name: web
path: packages/frontend/apps/electron/resources/web-static
path: packages/frontend/electron/resources/web-static
- name: Build Desktop Layers
run: yarn workspace @affine/electron build
@@ -146,20 +146,20 @@ jobs:
- name: signing DMG
if: ${{ matrix.spec.platform == 'darwin' }}
run: |
codesign --force --sign "Developer ID Application: TOEVERYTHING PTE. LTD." packages/frontend/apps/electron/out/${{ env.BUILD_TYPE }}/make/AFFiNE.dmg
codesign --force --sign "Developer ID Application: TOEVERYTHING PTE. LTD." packages/frontend/electron/out/${{ env.BUILD_TYPE }}/make/AFFiNE.dmg
- name: Save artifacts (mac)
if: ${{ matrix.spec.platform == 'darwin' }}
run: |
mkdir -p builds
mv packages/frontend/apps/electron/out/*/make/*.dmg ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.dmg
mv packages/frontend/apps/electron/out/*/make/zip/darwin/${{ matrix.spec.arch }}/*.zip ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.zip
mv packages/frontend/electron/out/*/make/*.dmg ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.dmg
mv packages/frontend/electron/out/*/make/zip/darwin/${{ matrix.spec.arch }}/*.zip ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.zip
- name: Save artifacts (linux)
if: ${{ matrix.spec.platform == 'linux' }}
run: |
mkdir -p builds
mv packages/frontend/apps/electron/out/*/make/zip/linux/x64/*.zip ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-x64.zip
mv packages/frontend/apps/electron/out/*/make/*.AppImage ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-x64.appimage
mv packages/frontend/electron/out/*/make/zip/linux/x64/*.zip ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-x64.zip
mv packages/frontend/electron/out/*/make/*.AppImage ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-x64.appimage
- uses: actions/attest-build-provenance@v1
if: ${{ matrix.spec.platform == 'darwin' }}
@@ -181,7 +181,7 @@ jobs:
name: affine-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}-builds
path: builds
package-distribution-windows:
make-distribution-windows-skip-signing:
strategy:
matrix:
spec:
@@ -191,8 +191,6 @@ jobs:
target: x86_64-pc-windows-msvc
runs-on: ${{ matrix.spec.runner }}
needs: before-make
outputs:
FILES_TO_BE_SIGNED: ${{ steps.get_files_to_be_signed.outputs.FILES_TO_BE_SIGNED }}
env:
SKIP_GENERATE_ASSETS: 1
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
@@ -221,7 +219,7 @@ jobs:
- uses: actions/download-artifact@v4
with:
name: web
path: packages/frontend/apps/electron/resources/web-static
path: packages/frontend/electron/resources/web-static
- name: Build Desktop Layers
run: yarn workspace @affine/electron build
@@ -232,117 +230,18 @@ jobs:
SKIP_WEB_BUILD: 1
HOIST_NODE_MODULES: 1
- name: get all files to be signed
id: get_files_to_be_signed
run: |
Set-Variable -Name FILES_TO_BE_SIGNED -Value ((Get-ChildItem -Path packages/frontend/apps/electron/out -Recurse -File | Where-Object { $_.Extension -in @(".exe", ".node", ".dll", ".msi") } | ForEach-Object { '"' + $_.FullName.Replace((Get-Location).Path + '\packages\frontend\apps\electron\out\', '') + '"' }) -join ' ')
"FILES_TO_BE_SIGNED=$FILES_TO_BE_SIGNED" >> $env:GITHUB_OUTPUT
echo $FILES_TO_BE_SIGNED
- name: Zip artifacts for faster upload
run: Compress-Archive -CompressionLevel Fastest -Path packages/frontend/apps/electron/out/* -DestinationPath archive.zip
- name: Save packaged artifacts for signing
uses: actions/upload-artifact@v4
with:
name: packaged-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
path: |
archive.zip
!**/*.map
sign-packaged-artifacts-windows:
needs: package-distribution-windows
uses: ./.github/workflows/windows-signer.yml
with:
files: ${{ needs.package-distribution-windows.outputs.FILES_TO_BE_SIGNED }}
artifact-name: packaged-win32-x64
make-windows-installer:
needs: sign-packaged-artifacts-windows
strategy:
matrix:
spec:
- runner: windows-latest
platform: win32
arch: x64
target: x86_64-pc-windows-msvc
runs-on: ${{ matrix.spec.runner }}
outputs:
FILES_TO_BE_SIGNED: ${{ steps.get_files_to_be_signed.outputs.FILES_TO_BE_SIGNED }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
timeout-minutes: 10
uses: ./.github/actions/setup-node
with:
extra-flags: workspaces focus @affine/electron @affine/monorepo
hard-link-nm: false
nmHoistingLimits: workspaces
- name: Download and overwrite packaged artifacts
uses: actions/download-artifact@v4
with:
name: signed-packaged-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
path: .
- name: unzip file
run: Expand-Archive -Path signed.zip -DestinationPath packages/frontend/apps/electron/out
- name: Make squirrel.windows installer
run: yarn workspace @affine/electron make-squirrel --platform=${{ matrix.spec.platform }} --arch=${{ matrix.spec.arch }}
- name: Make nsis.windows installer
run: yarn workspace @affine/electron make-nsis --platform=${{ matrix.spec.platform }} --arch=${{ matrix.spec.arch }}
- name: Zip artifacts for faster upload
run: Compress-Archive -CompressionLevel Fastest -Path packages/frontend/apps/electron/out/${{ env.BUILD_TYPE }}/make/* -DestinationPath archive.zip
- name: get all files to be signed
id: get_files_to_be_signed
run: |
Set-Variable -Name FILES_TO_BE_SIGNED -Value ((Get-ChildItem -Path packages/frontend/apps/electron/out/${{ env.BUILD_TYPE }}/make -Recurse -File | Where-Object { $_.Extension -in @(".exe", ".node", ".dll", ".msi") } | ForEach-Object { '"' + $_.FullName.Replace((Get-Location).Path + '\packages\frontend\apps\electron\out\${{ env.BUILD_TYPE }}\make\', '') + '"' }) -join ' ')
"FILES_TO_BE_SIGNED=$FILES_TO_BE_SIGNED" >> $env:GITHUB_OUTPUT
echo $FILES_TO_BE_SIGNED
- name: Save installer for signing
uses: actions/upload-artifact@v4
with:
name: installer-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
path: archive.zip
sign-installer-artifacts-windows:
needs: make-windows-installer
uses: ./.github/workflows/windows-signer.yml
with:
files: ${{ needs.make-windows-installer.outputs.FILES_TO_BE_SIGNED }}
artifact-name: installer-win32-x64
finalize-installer-windows:
needs: [sign-installer-artifacts-windows, before-make]
strategy:
matrix:
spec:
- runner: windows-latest
platform: win32
arch: x64
target: x86_64-pc-windows-msvc
runs-on: ${{ matrix.spec.runner }}
steps:
- name: Download and overwrite installer artifacts
uses: actions/download-artifact@v4
with:
name: signed-installer-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
path: .
- name: unzip file
run: Expand-Archive -Path signed.zip -DestinationPath packages/frontend/apps/electron/out/${{ env.BUILD_TYPE }}/make
- name: Save artifacts
run: |
mkdir -p builds
mv packages/frontend/apps/electron/out/*/make/zip/win32/x64/AFFiNE*-win32-x64-*.zip ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.zip
mv packages/frontend/apps/electron/out/*/make/squirrel.windows/x64/*.exe ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.exe
mv packages/frontend/apps/electron/out/*/make/nsis.windows/x64/*.exe ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.nsis.exe
mv packages/frontend/electron/out/*/make/zip/win32/x64/AFFiNE*-win32-x64-*.zip ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.zip
mv packages/frontend/electron/out/*/make/squirrel.windows/x64/*.exe ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.exe
mv packages/frontend/electron/out/*/make/nsis.windows/x64/*.exe ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.nsis.exe
- uses: actions/attest-build-provenance@v1
with:
@@ -357,8 +256,180 @@ jobs:
name: affine-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}-builds
path: builds
# package-distribution-windows:
# strategy:
# matrix:
# spec:
# - runner: windows-latest
# platform: win32
# arch: x64
# target: x86_64-pc-windows-msvc
# runs-on: ${{ matrix.spec.runner }}
# needs: before-make
# outputs:
# FILES_TO_BE_SIGNED: ${{ steps.get_files_to_be_signed.outputs.FILES_TO_BE_SIGNED }}
# env:
# SKIP_GENERATE_ASSETS: 1
# SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
# SENTRY_PROJECT: 'affine'
# SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
# SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
# MIXPANEL_TOKEN: ${{ secrets.MIXPANEL_TOKEN }}
# steps:
# - uses: actions/checkout@v4
# - name: Setup Version
# id: version
# uses: ./.github/actions/setup-version
# - name: Setup Node.js
# timeout-minutes: 10
# uses: ./.github/actions/setup-node
# with:
# extra-flags: workspaces focus @affine/electron @affine/monorepo
# hard-link-nm: false
# nmHoistingLimits: workspaces
# - name: Build AFFiNE native
# uses: ./.github/actions/build-rust
# with:
# target: ${{ matrix.spec.target }}
# package: '@affine/native'
# nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
# - uses: actions/download-artifact@v4
# with:
# name: web
# path: packages/frontend/electron/resources/web-static
# - name: Build Desktop Layers
# run: yarn workspace @affine/electron build
# - name: package
# run: yarn workspace @affine/electron package --platform=${{ matrix.spec.platform }} --arch=${{ matrix.spec.arch }}
# env:
# SKIP_WEB_BUILD: 1
# HOIST_NODE_MODULES: 1
# - name: get all files to be signed
# id: get_files_to_be_signed
# run: |
# Set-Variable -Name FILES_TO_BE_SIGNED -Value ((Get-ChildItem -Path packages/frontend/electron/out -Recurse -File | Where-Object { $_.Extension -in @(".exe", ".node", ".dll", ".msi") } | ForEach-Object { '"' + $_.FullName.Replace((Get-Location).Path + '\packages\frontend\electron\out\', '') + '"' }) -join ' ')
# "FILES_TO_BE_SIGNED=$FILES_TO_BE_SIGNED" >> $env:GITHUB_OUTPUT
# echo $FILES_TO_BE_SIGNED
# - name: Zip artifacts for faster upload
# run: Compress-Archive -CompressionLevel Fastest -Path packages/frontend/electron/out/* -DestinationPath archive.zip
# - name: Save packaged artifacts for signing
# uses: actions/upload-artifact@v4
# with:
# name: packaged-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
# path: |
# archive.zip
# !**/*.map
# sign-packaged-artifacts-windows:
# needs: package-distribution-windows
# uses: ./.github/workflows/windows-signer.yml
# with:
# files: ${{ needs.package-distribution-windows.outputs.FILES_TO_BE_SIGNED }}
# artifact-name: packaged-win32-x64
# make-windows-installer:
# needs: sign-packaged-artifacts-windows
# strategy:
# matrix:
# spec:
# - runner: windows-latest
# platform: win32
# arch: x64
# target: x86_64-pc-windows-msvc
# runs-on: ${{ matrix.spec.runner }}
# outputs:
# FILES_TO_BE_SIGNED: ${{ steps.get_files_to_be_signed.outputs.FILES_TO_BE_SIGNED }}
# steps:
# - uses: actions/checkout@v4
# - name: Setup Version
# id: version
# uses: ./.github/actions/setup-version
# - name: Setup Node.js
# timeout-minutes: 10
# uses: ./.github/actions/setup-node
# with:
# extra-flags: workspaces focus @affine/electron @affine/monorepo
# hard-link-nm: false
# nmHoistingLimits: workspaces
# - name: Download and overwrite packaged artifacts
# uses: actions/download-artifact@v4
# with:
# name: signed-packaged-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
# path: .
# - name: unzip file
# run: Expand-Archive -Path signed.zip -DestinationPath packages/frontend/electron/out
# - name: Make squirrel.windows installer
# run: yarn workspace @affine/electron make-squirrel --platform=${{ matrix.spec.platform }} --arch=${{ matrix.spec.arch }}
# - name: Make nsis.windows installer
# run: yarn workspace @affine/electron make-nsis --platform=${{ matrix.spec.platform }} --arch=${{ matrix.spec.arch }}
# - name: Zip artifacts for faster upload
# run: Compress-Archive -CompressionLevel Fastest -Path packages/frontend/electron/out/${{ env.BUILD_TYPE }}/make/* -DestinationPath archive.zip
# - name: get all files to be signed
# id: get_files_to_be_signed
# run: |
# Set-Variable -Name FILES_TO_BE_SIGNED -Value ((Get-ChildItem -Path packages/frontend/electron/out/${{ env.BUILD_TYPE }}/make -Recurse -File | Where-Object { $_.Extension -in @(".exe", ".node", ".dll", ".msi") } | ForEach-Object { '"' + $_.FullName.Replace((Get-Location).Path + '\packages\frontend\electron\out\${{ env.BUILD_TYPE }}\make\', '') + '"' }) -join ' ')
# "FILES_TO_BE_SIGNED=$FILES_TO_BE_SIGNED" >> $env:GITHUB_OUTPUT
# echo $FILES_TO_BE_SIGNED
# - name: Save installer for signing
# uses: actions/upload-artifact@v4
# with:
# name: installer-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
# path: archive.zip
# sign-installer-artifacts-windows:
# needs: make-windows-installer
# uses: ./.github/workflows/windows-signer.yml
# with:
# files: ${{ needs.make-windows-installer.outputs.FILES_TO_BE_SIGNED }}
# artifact-name: installer-win32-x64
# finalize-installer-windows:
# needs: [sign-installer-artifacts-windows, before-make]
# strategy:
# matrix:
# spec:
# - runner: windows-latest
# platform: win32
# arch: x64
# target: x86_64-pc-windows-msvc
# runs-on: ${{ matrix.spec.runner }}
# steps:
# - name: Download and overwrite installer artifacts
# uses: actions/download-artifact@v4
# with:
# name: signed-installer-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
# path: .
# - name: unzip file
# run: Expand-Archive -Path signed.zip -DestinationPath packages/frontend/electron/out/${{ env.BUILD_TYPE }}/make
# - name: Save artifacts
# run: |
# mkdir -p builds
# mv packages/frontend/electron/out/*/make/zip/win32/x64/AFFiNE*-win32-x64-*.zip ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.zip
# mv packages/frontend/electron/out/*/make/squirrel.windows/x64/*.exe ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.exe
# mv packages/frontend/electron/out/*/make/nsis.windows/x64/*.exe ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.nsis.exe
# - name: Upload Artifact
# uses: actions/upload-artifact@v4
# with:
# name: affine-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}-builds
# path: builds
release:
needs: [before-make, make-distribution, finalize-installer-windows]
needs:
- before-make
- make-distribution
- make-distribution-windows-skip-signing
runs-on: ubuntu-latest
steps:
@@ -394,7 +465,7 @@ jobs:
node-version: 20
- name: Generate Release yml
run: |
node ./packages/frontend/apps/electron/scripts/generate-yml.js
node ./packages/frontend/electron/scripts/generate-yml.js
env:
RELEASE_VERSION: ${{ needs.before-make.outputs.RELEASE_VERSION }}
- name: Create Release Draft

View File

@@ -1,42 +0,0 @@
name: Sync I18n with Crowdin
on:
push:
branches:
- canary
paths:
- 'packages/frontend/i18n/**'
workflow_dispatch:
jobs:
synchronize-with-crowdin:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Crowdin action
uses: crowdin/github-action@v2
with:
upload_sources: true
upload_translations: true
download_translations: true
auto_approve_imported: true
import_eq_suggestions: true
export_only_approved: true
skip_untranslated_strings: true
localization_branch_name: l10n_crowdin_translations
create_pull_request: true
pull_request_title: 'New Crowdin Translations'
pull_request_body: 'New Crowdin translations by [Crowdin GH Action](https://github.com/crowdin/github-action)'
pull_request_base_branch_name: 'canary'
config: packages/frontend/i18n/crowdin.yml
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CROWDIN_PROJECT_ID: ${{ secrets.CROWDIN_PROJECT_ID }}
CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}

2
.nvmrc
View File

@@ -1 +1 @@
20.17.0
20.15.1

894
.yarn/releases/yarn-4.3.1.cjs vendored Executable file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -12,4 +12,4 @@ npmPublishAccess: public
npmPublishRegistry: "https://registry.npmjs.org"
yarnPath: .yarn/releases/yarn-4.5.0.cjs
yarnPath: .yarn/releases/yarn-4.3.1.cjs

681
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -3,27 +3,26 @@ members = ["./packages/backend/native", "./packages/frontend/native", "./packag
resolver = "2"
[workspace.dependencies]
anyhow = "1"
chrono = "0.4"
dotenv = "0.15"
file-format = { version = "0.25", features = ["reader"] }
mimalloc = "0.1"
napi = { version = "3.0.0-alpha.12", features = ["async", "chrono_date", "error_anyhow", "napi9", "serde"] }
napi-build = { version = "2" }
napi-derive = { version = "3.0.0-alpha.12" }
notify = { version = "6", features = ["serde"] }
once_cell = "1"
parking_lot = "0.12"
rand = "0.8"
serde = "1"
serde_json = "1"
sha3 = "0.10"
sqlx = { version = "0.8", default-features = false, features = ["chrono", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] }
tiktoken-rs = "0.5"
tokio = "1.37"
uuid = "1.8"
v_htmlescape = "0.15"
y-octo = { git = "https://github.com/y-crdt/y-octo.git", branch = "main" }
anyhow = "1"
chrono = "0.4"
dotenv = "0.15"
file-format = { version = "0.25", features = ["reader"] }
mimalloc = "0.1"
napi = { version = "3.0.0-alpha.1", features = ["async", "chrono_date", "error_anyhow", "napi9", "serde"] }
napi-build = { version = "2" }
napi-derive = { version = "3.0.0-alpha.1" }
notify = { version = "6", features = ["serde"] }
once_cell = "1"
parking_lot = "0.12"
rand = "0.8"
serde = "1"
serde_json = "1"
sha3 = "0.10"
sqlx = { version = "0.7", default-features = false, features = ["chrono", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] }
tiktoken-rs = "0.5"
tokio = "1.37"
uuid = "1.8"
y-octo = { git = "https://github.com/y-crdt/y-octo.git", branch = "main" }
[profile.dev.package.sqlx-macros]
opt-level = 3

View File

@@ -23,7 +23,7 @@
<div align="center">
<a href="https://affine.pro">Home Page</a> |
<a href="https://discord.gg/whd5mjYqVw">Discord</a> |
<a href="https://discord.com/invite/yz6tGVsf5p">Discord</a> |
<a href="https://app.affine.pro">Live Demo</a> |
<a href="https://affine.pro/blog/">Blog</a> |
<a href="https://docs.affine.pro/docs/">Documentation</a>
@@ -176,12 +176,6 @@ Thanks to [Chromatic](https://www.chromatic.com/) for providing the visual testi
## License
### Editions
- AFFiNE Community Edition (CE) is the current available version, it's free for self-host under the MIT license.
- AFFiNE Enterprise Edition (EE) is yet to be published, it will have more advanced features and enterprise-oriented offerings, including but not exclusive to rebranding and SSO, advanced admin and audit, etc., you may refer to https://affine.pro/pricing for more information
See [LICENSE] for details.
[all-contributors-badge]: https://img.shields.io/github/contributors/toeverything/AFFiNE

View File

@@ -19,5 +19,5 @@
],
"ext": "ts,md,json"
},
"version": "0.17.0"
"version": "0.15.0"
}

12
nx.json
View File

@@ -81,6 +81,9 @@
"test": {
"outputs": ["{workspaceRoot}/.nyc_output"],
"inputs": [
{
"env": "ENABLE_PRELOADING"
},
{
"env": "COVERAGE"
}
@@ -89,6 +92,9 @@
"test:ui": {
"outputs": ["{workspaceRoot}/.nyc_output"],
"inputs": [
{
"env": "ENABLE_PRELOADING"
},
{
"env": "COVERAGE"
}
@@ -96,7 +102,11 @@
},
"test:coverage": {
"outputs": ["{workspaceRoot}/.nyc_output"],
"inputs": []
"inputs": [
{
"env": "ENABLE_PRELOADING"
}
]
}
}
}

View File

@@ -1,13 +1,12 @@
{
"name": "@affine/monorepo",
"version": "0.17.0",
"version": "0.15.0",
"private": true,
"author": "toeverything",
"license": "MIT",
"workspaces": [
".",
"packages/*/*",
"packages/frontend/apps/*",
"tools/*",
"docs/reference",
"tools/@types/*",
@@ -19,8 +18,8 @@
},
"scripts": {
"dev": "yarn workspace @affine/cli dev",
"build": "yarn workspace @affine/cli bundle",
"dev:electron": "yarn workspace @affine/electron dev",
"build": "yarn nx build @affine/web",
"build:electron": "yarn nx build @affine/electron",
"build:server-native": "yarn nx run-many -t build -p @affine/server-native",
"start:web-static": "yarn workspace @affine/web static-server",
@@ -53,23 +52,30 @@
]
},
"devDependencies": {
"@affine-test/kit": "workspace:*",
"@affine/cli": "workspace:*",
"@faker-js/faker": "^9.0.0",
"@commitlint/cli": "^19.2.1",
"@commitlint/config-conventional": "^19.1.0",
"@faker-js/faker": "^8.4.1",
"@istanbuljs/schema": "^0.1.3",
"@magic-works/i18n-codegen": "^0.6.0",
"@playwright/test": "=1.47.2",
"@nx/vite": "^19.5.3",
"@playwright/test": "=1.44.1",
"@taplo/cli": "^0.7.0",
"@testing-library/react": "^16.0.0",
"@toeverything/infra": "workspace:*",
"@types/affine__env": "workspace:*",
"@types/eslint": "^9.0.0",
"@types/eslint": "^8.56.7",
"@types/node": "^20.12.7",
"@typescript-eslint/eslint-plugin": "^7.6.0",
"@typescript-eslint/parser": "^7.6.0",
"@vanilla-extract/vite-plugin": "^4.0.7",
"@vitest/coverage-istanbul": "2.1.1",
"@vitest/ui": "2.1.1",
"@vanilla-extract/webpack-plugin": "^2.3.7",
"@vitejs/plugin-react-swc": "^3.6.0",
"@vitest/coverage-istanbul": "1.6.0",
"@vitest/ui": "1.6.0",
"cross-env": "^7.0.3",
"electron": "^32.0.0",
"electron": "~30.2.0",
"eslint": "^8.57.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-import-x": "^0.5.0",
@@ -80,21 +86,31 @@
"eslint-plugin-sonarjs": "^0.25.1",
"eslint-plugin-unicorn": "^52.0.0",
"eslint-plugin-unused-imports": "^3.1.0",
"happy-dom": "^15.0.0",
"eslint-plugin-vue": "^9.24.1",
"fake-indexeddb": "6.0.0",
"happy-dom": "^14.7.1",
"husky": "^9.0.11",
"lint-staged": "^15.2.2",
"msw": "^2.3.0",
"nanoid": "^5.0.7",
"nx": "^19.0.0",
"oxlint": "0.9.6",
"prettier": "^3.3.3",
"nyc": "^17.0.0",
"oxlint": "0.6.1",
"prettier": "^3.2.5",
"semver": "^7.6.0",
"serve": "^14.2.1",
"string-width": "^7.1.0",
"ts-node": "^10.9.2",
"typescript": "^5.4.5",
"unplugin-swc": "^1.4.5",
"vite": "^5.2.8",
"vitest": "2.1.1"
"vite-plugin-istanbul": "^6.0.0",
"vite-plugin-static-copy": "^1.0.2",
"vitest": "1.6.0",
"vitest-fetch-mock": "^0.3.0",
"vitest-mock-extended": "^1.3.1"
},
"packageManager": "yarn@4.5.0",
"packageManager": "yarn@4.3.1",
"resolutions": {
"array-buffer-byte-length": "npm:@nolyfill/array-buffer-byte-length@latest",
"array-includes": "npm:@nolyfill/array-includes@latest",
@@ -151,7 +167,7 @@
"unbox-primitive": "npm:@nolyfill/unbox-primitive@latest",
"which-boxed-primitive": "npm:@nolyfill/which-boxed-primitive@latest",
"which-typed-array": "npm:@nolyfill/which-typed-array@latest",
"@reforged/maker-appimage/@electron-forge/maker-base": "7.5.0",
"@reforged/maker-appimage/@electron-forge/maker-base": "7.4.0",
"macos-alias": "npm:@napi-rs/macos-alias@0.0.4",
"fs-xattr": "npm:@napi-rs/xattr@latest"
}

View File

@@ -7,15 +7,14 @@ version = "1.0.0"
crate-type = ["cdylib"]
[dependencies]
chrono = { workspace = true }
file-format = { workspace = true }
napi = { workspace = true }
napi-derive = { workspace = true }
rand = { workspace = true }
sha3 = { workspace = true }
tiktoken-rs = { workspace = true }
v_htmlescape = { workspace = true }
y-octo = { workspace = true }
chrono = { workspace = true }
file-format = { workspace = true }
napi = { workspace = true }
napi-derive = { workspace = true }
rand = { workspace = true }
sha3 = { workspace = true }
tiktoken-rs = { workspace = true }
y-octo = { workspace = true }
[target.'cfg(not(target_os = "linux"))'.dependencies]
mimalloc = { workspace = true }

View File

@@ -8,8 +8,6 @@ export declare function fromModelName(modelName: string): Tokenizer | null
export declare function getMime(input: Uint8Array): string
export declare function htmlSanitize(input: string): string
/**
* Merge updates in form like `Y.applyUpdate(doc, update)` way and return the
* result binary.

View File

@@ -11,4 +11,3 @@ export const mintChallengeResponse = binding.mintChallengeResponse;
export const getMime = binding.getMime;
export const Tokenizer = binding.Tokenizer;
export const fromModelName = binding.fromModelName;
export const htmlSanitize = binding.htmlSanitize;

View File

@@ -1,6 +1,6 @@
{
"name": "@affine/server-native",
"version": "0.17.0",
"version": "0.15.0",
"engines": {
"node": ">= 10.16.0 < 11 || >= 11.8.0"
},
@@ -33,7 +33,7 @@
"build:debug": "napi build"
},
"devDependencies": {
"@napi-rs/cli": "3.0.0-alpha.62",
"@napi-rs/cli": "3.0.0-alpha.60",
"lib0": "^0.2.93",
"nx": "^19.0.0",
"nx-cloud": "^19.0.0",

View File

@@ -12,12 +12,12 @@
"script": "build"
},
"inputs": [
{ "fileset": "{workspaceRoot}/rust-toolchain.toml" },
{ "fileset": "{workspaceRoot}/Cargo.lock" },
{ "fileset": "{workspaceRoot}/packages/backend/native/**/*.rs" },
{ "fileset": "{workspaceRoot}/packages/backend/native/Cargo.toml" }
{ "runtime": "rustc --version" },
{ "runtime": "node -v" },
{ "runtime": "clang --version" },
{ "runtime": "cargo tree" }
],
"outputs": ["{projectRoot}/*.node"]
"outputs": ["{projectRoot}/*.node", "{workspaceRoot}/*.node"]
}
}
}

View File

@@ -1,4 +0,0 @@
#[napi]
pub fn html_sanitize(input: String) -> String {
v_htmlescape::escape(&input).to_string()
}

View File

@@ -2,7 +2,6 @@
pub mod file_type;
pub mod hashcash;
pub mod html_sanitize;
pub mod tiktoken;
use std::fmt::{Debug, Display};

View File

@@ -1,2 +1 @@
.env
static/

View File

@@ -1,146 +0,0 @@
-- AlterTable
ALTER TABLE "_data_migrations" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
ALTER COLUMN "started_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "finished_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "ai_prompts_messages" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "ai_prompts_metadata" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "ai_sessions_messages" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
ALTER COLUMN "session_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "ai_sessions_metadata" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "workspace_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "doc_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "deleted_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "parent_session_id" SET DATA TYPE VARCHAR;
-- AlterTable
ALTER TABLE "app_runtime_settings" ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "deleted_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "last_updated_by" SET DATA TYPE VARCHAR;
-- AlterTable
ALTER TABLE "features" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "multiple_users_sessions" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
ALTER COLUMN "expires_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "snapshot_histories"
ALTER COLUMN "workspace_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "guid" SET DATA TYPE VARCHAR,
ALTER COLUMN "timestamp" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "expired_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "snapshots" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "updates" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "user_connected_accounts" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "expires_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "user_features" ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "expired_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "user_invoices" ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "user_sessions" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
ALTER COLUMN "session_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "expires_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "user_stripe_customers" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "user_subscriptions" ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "start" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "end" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "next_bill_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "canceled_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "trial_start" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "trial_end" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "users" ALTER COLUMN "name" SET DATA TYPE VARCHAR,
ALTER COLUMN "email" SET DATA TYPE VARCHAR,
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "verification_tokens" ALTER COLUMN "token" SET DATA TYPE VARCHAR,
ALTER COLUMN "expiresAt" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "workspace_features" ALTER COLUMN "workspace_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "expired_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "workspace_page_user_permissions"
ALTER COLUMN "id" SET DATA TYPE VARCHAR,
ALTER COLUMN "workspace_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "page_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "workspace_pages" ALTER COLUMN "workspace_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "page_id" SET DATA TYPE VARCHAR;
-- AlterTable
ALTER TABLE "workspace_user_permissions" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
ALTER COLUMN "workspace_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "workspaces" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- DropTable
DROP TABLE "accounts";
-- DropTable
DROP TABLE "blobs";
-- DropTable
DROP TABLE "new_features_waiting_list";
-- DropTable
DROP TABLE "optimized_blobs";
-- DropTable
DROP TABLE "sessions";
-- DropTable
DROP TABLE "user_workspace_permissions";
-- DropTable
DROP TABLE "verificationtokens";

View File

@@ -1,95 +0,0 @@
/*
Warnings:
- The primary key for the `snapshot_histories` table will be changed. If it partially fails, the table could be left without primary key constraint.
*/
-- AlterTable
ALTER TABLE "_data_migrations" ALTER COLUMN "started_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "finished_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "ai_prompts_messages" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "ai_prompts_metadata" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "ai_sessions_messages" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "ai_sessions_metadata" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "deleted_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "app_runtime_settings" ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "deleted_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "features" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "multiple_users_sessions" ALTER COLUMN "expires_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "snapshot_histories" ALTER COLUMN "timestamp" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "expired_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "snapshots" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "updates" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "user_connected_accounts" ALTER COLUMN "expires_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "user_features" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "expired_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "user_invoices" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "user_sessions" ALTER COLUMN "expires_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "user_stripe_customers" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "user_subscriptions" ALTER COLUMN "start" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "end" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "next_bill_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "canceled_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "trial_start" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "trial_end" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "users" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "email_verified" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "verification_tokens" ALTER COLUMN "expiresAt" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "workspace_features" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "expired_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "workspace_page_user_permissions" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "workspace_user_permissions" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "workspaces" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);

View File

@@ -1,3 +0,0 @@
-- AlterTable
ALTER TABLE "ai_prompts_metadata" ADD COLUMN "modified" BOOLEAN NOT NULL DEFAULT false,
ADD COLUMN "updated_at" TIMESTAMPTZ(3) NOT NULL DEFAULT CURRENT_TIMESTAMP;

View File

@@ -1,13 +0,0 @@
-- CreateTable
CREATE TABLE "user_snapshots" (
"user_id" VARCHAR NOT NULL,
"id" VARCHAR NOT NULL,
"blob" BYTEA NOT NULL,
"created_at" TIMESTAMPTZ(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMPTZ(3) NOT NULL,
CONSTRAINT "user_snapshots_pkey" PRIMARY KEY ("user_id","id")
);
-- AddForeignKey
ALTER TABLE "user_snapshots" ADD CONSTRAINT "user_snapshots_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -1,14 +0,0 @@
/*
Warnings:
- The primary key for the `updates` table will be changed. If it partially fails, the table could be left without primary key constraint.
*/
-- AlterTable
ALTER TABLE "snapshots" ALTER COLUMN "seq" DROP NOT NULL;
-- AlterTable
ALTER TABLE "updates" DROP CONSTRAINT "updates_pkey",
ALTER COLUMN "created_at" DROP DEFAULT,
ALTER COLUMN "seq" DROP NOT NULL,
ADD CONSTRAINT "updates_pkey" PRIMARY KEY ("workspace_id", "guid", "created_at");

View File

@@ -1,21 +0,0 @@
-- AlterTable
ALTER TABLE "snapshot_histories" ADD COLUMN "created_by" VARCHAR;
-- AlterTable
ALTER TABLE "snapshots" ADD COLUMN "created_by" VARCHAR,
ADD COLUMN "updated_by" VARCHAR;
-- AlterTable
ALTER TABLE "updates" ADD COLUMN "created_by" VARCHAR;
-- AddForeignKey
ALTER TABLE "snapshots" ADD CONSTRAINT "snapshots_created_by_fkey" FOREIGN KEY ("created_by") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "snapshots" ADD CONSTRAINT "snapshots_updated_by_fkey" FOREIGN KEY ("updated_by") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "updates" ADD CONSTRAINT "updates_created_by_fkey" FOREIGN KEY ("created_by") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "snapshot_histories" ADD CONSTRAINT "snapshot_histories_created_by_fkey" FOREIGN KEY ("created_by") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE;

View File

@@ -1,2 +0,0 @@
-- AlterTable
ALTER TABLE "workspaces" ADD COLUMN "enable_url_preview" BOOLEAN NOT NULL DEFAULT false;

View File

@@ -1,2 +0,0 @@
-- CreateIndex
CREATE INDEX "workspace_user_permissions_user_id_idx" ON "workspace_user_permissions"("user_id");

View File

@@ -1,12 +0,0 @@
/*
Warnings:
- The primary key for the `snapshots` table will be changed. If it partially fails, the table could be left without primary key constraint.
*/
-- AlterTable
ALTER TABLE "snapshots" DROP CONSTRAINT "snapshots_pkey",
ADD CONSTRAINT "snapshots_pkey" PRIMARY KEY ("workspace_id", "guid");
-- CreateIndex
CREATE INDEX "snapshots_workspace_id_updated_at_idx" ON "snapshots"("workspace_id", "updated_at");

View File

@@ -1,8 +0,0 @@
-- CreateIndex
CREATE INDEX "ai_sessions_messages_session_id_idx" ON "ai_sessions_messages"("session_id");
-- CreateIndex
CREATE INDEX "ai_sessions_metadata_user_id_idx" ON "ai_sessions_metadata"("user_id");
-- CreateIndex
CREATE INDEX "ai_sessions_metadata_user_id_workspace_id_idx" ON "ai_sessions_metadata"("user_id", "workspace_id");

View File

@@ -1,2 +0,0 @@
-- CreateIndex
CREATE INDEX "users_email_lowercase_idx" ON "users"(lower("email"))

View File

@@ -1,7 +1,7 @@
{
"name": "@affine/server",
"private": true,
"version": "0.17.0",
"version": "0.15.0",
"description": "Affine Node.js server",
"type": "module",
"bin": {
@@ -21,10 +21,11 @@
"dependencies": {
"@apollo/server": "^4.10.2",
"@aws-sdk/client-s3": "^3.620.0",
"@fal-ai/serverless-client": "^0.14.0",
"@fal-ai/serverless-client": "^0.13.0",
"@google-cloud/opentelemetry-cloud-monitoring-exporter": "^0.19.0",
"@google-cloud/opentelemetry-cloud-trace-exporter": "^2.2.0",
"@google-cloud/opentelemetry-resource-util": "^2.2.0",
"@keyv/redis": "^2.8.4",
"@nestjs/apollo": "^12.1.0",
"@nestjs/common": "^10.3.7",
"@nestjs/core": "^10.3.7",
@@ -33,24 +34,26 @@
"@nestjs/platform-express": "^10.3.7",
"@nestjs/platform-socket.io": "^10.3.7",
"@nestjs/schedule": "^4.0.1",
"@nestjs/throttler": "6.2.1",
"@nestjs/serve-static": "^4.0.2",
"@nestjs/throttler": "5.2.0",
"@nestjs/websockets": "^10.3.7",
"@node-rs/argon2": "^1.8.0",
"@node-rs/crc32": "^1.10.0",
"@node-rs/jsonwebtoken": "^0.5.2",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/core": "^1.25.0",
"@opentelemetry/exporter-prometheus": "^0.53.0",
"@opentelemetry/exporter-prometheus": "^0.52.0",
"@opentelemetry/exporter-zipkin": "^1.25.0",
"@opentelemetry/host-metrics": "^0.35.2",
"@opentelemetry/instrumentation": "^0.53.0",
"@opentelemetry/instrumentation-graphql": "^0.43.0",
"@opentelemetry/instrumentation-http": "^0.53.0",
"@opentelemetry/instrumentation-ioredis": "^0.43.0",
"@opentelemetry/instrumentation-nestjs-core": "^0.40.0",
"@opentelemetry/instrumentation-socket.io": "^0.42.0",
"@opentelemetry/instrumentation": "^0.52.0",
"@opentelemetry/instrumentation-graphql": "^0.42.0",
"@opentelemetry/instrumentation-http": "^0.52.0",
"@opentelemetry/instrumentation-ioredis": "^0.42.0",
"@opentelemetry/instrumentation-nestjs-core": "^0.39.0",
"@opentelemetry/instrumentation-socket.io": "^0.41.0",
"@opentelemetry/resources": "^1.25.0",
"@opentelemetry/sdk-metrics": "^1.25.0",
"@opentelemetry/sdk-node": "^0.53.0",
"@opentelemetry/sdk-node": "^0.52.0",
"@opentelemetry/sdk-trace-node": "^1.25.0",
"@opentelemetry/semantic-conventions": "^1.25.0",
"@prisma/client": "^5.15.0",
@@ -58,42 +61,50 @@
"@socket.io/redis-adapter": "^8.3.0",
"cookie-parser": "^1.4.6",
"dotenv": "^16.4.5",
"dotenv-cli": "^7.4.1",
"express": "^4.19.2",
"fast-xml-parser": "^4.4.0",
"get-stream": "^9.0.1",
"graphql": "^16.8.1",
"graphql-scalars": "^1.23.0",
"graphql-type-json": "^0.3.2",
"graphql-upload": "^16.0.2",
"html-validate": "^8.20.1",
"ioredis": "^5.3.2",
"is-mobile": "^4.0.0",
"keyv": "^5.0.0",
"keyv": "^4.5.4",
"lodash-es": "^4.17.21",
"mixpanel": "^0.18.0",
"mustache": "^4.2.0",
"nanoid": "^5.0.7",
"nest-commander": "^3.12.5",
"nestjs-throttler-storage-redis": "^0.5.0",
"nestjs-throttler-storage-redis": "^0.4.1",
"nodemailer": "^6.9.13",
"on-headers": "^1.0.2",
"openai": "^4.33.0",
"parse-duration": "^1.1.0",
"piscina": "^4.5.1",
"pretty-time": "^1.1.0",
"prisma": "^5.12.1",
"prom-client": "^15.1.1",
"reflect-metadata": "^0.2.2",
"rxjs": "^7.8.1",
"semver": "^7.6.0",
"ses": "^1.4.1",
"socket.io": "^4.7.5",
"stripe": "^16.0.0",
"ts-node": "^10.9.2",
"typescript": "^5.4.5",
"ws": "^8.16.0",
"yjs": "patch:yjs@npm%3A13.6.18#~/.yarn/patches/yjs-npm-13.6.18-ad0d5f7c43.patch",
"zod": "^3.22.4"
},
"devDependencies": {
"@affine-test/kit": "workspace:*",
"@affine/server-native": "workspace:*",
"@napi-rs/image": "^1.9.1",
"@nestjs/testing": "^10.3.7",
"@types/cookie-parser": "^1.4.7",
"@types/engine.io": "^3.1.10",
"@types/express": "^4.17.21",
"@types/graphql-upload": "^16.0.7",
"@types/keyv": "^4.2.0",
@@ -103,12 +114,14 @@
"@types/node": "^20.12.7",
"@types/nodemailer": "^6.4.14",
"@types/on-headers": "^1.0.3",
"@types/pretty-time": "^1.1.5",
"@types/sinon": "^17.0.3",
"@types/supertest": "^6.0.2",
"@types/ws": "^8.5.10",
"ava": "^6.1.2",
"c8": "^10.0.0",
"nodemon": "^3.1.0",
"sinon": "^19.0.0",
"sinon": "^18.0.0",
"supertest": "^7.0.0"
},
"ava": {
@@ -125,7 +138,6 @@
],
"watchMode": {
"ignoreChanges": [
"static/**",
"**/*.gen.*"
]
},

View File

@@ -11,18 +11,18 @@ datasource db {
model User {
id String @id @default(uuid()) @db.VarChar
name String @db.VarChar
email String @unique @db.VarChar
emailVerifiedAt DateTime? @map("email_verified") @db.Timestamptz(3)
name String
email String @unique
emailVerifiedAt DateTime? @map("email_verified")
avatarUrl String? @map("avatar_url") @db.VarChar
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
/// Not available if user signed up through OAuth providers
password String? @db.VarChar
/// Indicate whether the user finished the signup progress.
/// for example, the value will be false if user never registered and invited into a workspace by others.
registered Boolean @default(true)
features UserFeature[]
features UserFeatures[]
customer UserStripeCustomer?
subscriptions UserSubscription[]
invoices UserInvoice[]
@@ -32,27 +32,22 @@ model User {
sessions UserSession[]
aiSessions AiSession[]
updatedRuntimeConfigs RuntimeConfig[]
userSnapshots UserSnapshot[]
createdSnapshot Snapshot[] @relation("createdSnapshot")
updatedSnapshot Snapshot[] @relation("updatedSnapshot")
createdUpdate Update[] @relation("createdUpdate")
createdHistory SnapshotHistory[] @relation("createdHistory")
@@index([email])
@@map("users")
}
model ConnectedAccount {
id String @id @default(uuid()) @db.VarChar
userId String @map("user_id") @db.VarChar
id String @id @default(uuid()) @db.VarChar(36)
userId String @map("user_id") @db.VarChar(36)
provider String @db.VarChar
providerAccountId String @map("provider_account_id") @db.VarChar
scope String? @db.Text
accessToken String? @map("access_token") @db.Text
refreshToken String? @map("refresh_token") @db.Text
expiresAt DateTime? @map("expires_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
expiresAt DateTime? @map("expires_at") @db.Timestamptz(6)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@ -62,22 +57,21 @@ model ConnectedAccount {
}
model Session {
id String @id @default(uuid()) @db.VarChar
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
userSessions UserSession[]
id String @id @default(uuid()) @db.VarChar(36)
expiresAt DateTime? @map("expires_at") @db.Timestamptz(6)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// @deprecated use [UserSession.expiresAt]
deprecated_expiresAt DateTime? @map("expires_at") @db.Timestamptz(3)
userSessions UserSession[]
@@map("multiple_users_sessions")
}
model UserSession {
id String @id @default(uuid()) @db.VarChar
sessionId String @map("session_id") @db.VarChar
userId String @map("user_id") @db.VarChar
expiresAt DateTime? @map("expires_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
id String @id @default(uuid()) @db.VarChar(36)
sessionId String @map("session_id") @db.VarChar(36)
userId String @map("user_id") @db.VarChar(36)
expiresAt DateTime? @map("expires_at") @db.Timestamptz(6)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@ -87,25 +81,24 @@ model UserSession {
}
model VerificationToken {
token String @db.VarChar
token String @db.VarChar(36)
type Int @db.SmallInt
credential String? @db.Text
expiresAt DateTime @db.Timestamptz(3)
expiresAt DateTime @db.Timestamptz(6)
@@unique([type, token])
@@map("verification_tokens")
}
model Workspace {
id String @id @default(uuid()) @db.VarChar
public Boolean
enableUrlPreview Boolean @default(false) @map("enable_url_preview")
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
id String @id @default(uuid()) @db.VarChar
public Boolean
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
pages WorkspacePage[]
permissions WorkspaceUserPermission[]
pagePermissions WorkspacePageUserPermission[]
features WorkspaceFeature[]
features WorkspaceFeatures[]
@@map("workspaces")
}
@@ -116,8 +109,8 @@ model Workspace {
// Only the ones that have ever changed will have records here,
// and for others we will make sure it's has a default value return in our bussiness logic.
model WorkspacePage {
workspaceId String @map("workspace_id") @db.VarChar
pageId String @map("page_id") @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar(36)
pageId String @map("page_id") @db.VarChar(36)
public Boolean @default(false)
// Page/Edgeless
mode Int @default(0) @db.SmallInt
@@ -128,35 +121,49 @@ model WorkspacePage {
@@map("workspace_pages")
}
model WorkspaceUserPermission {
// @deprecated, use WorkspaceUserPermission
model DeprecatedUserWorkspacePermission {
id String @id @default(uuid()) @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar
userId String @map("user_id") @db.VarChar
subPageId String? @map("sub_page_id") @db.VarChar
userId String? @map("entity_id") @db.VarChar
/// Read/Write/Admin/Owner
type Int @db.SmallInt
/// Whether the permission invitation is accepted by the user
accepted Boolean @default(false)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
@@unique([workspaceId, subPageId, userId])
@@map("user_workspace_permissions")
}
model WorkspaceUserPermission {
id String @id @default(uuid()) @db.VarChar(36)
workspaceId String @map("workspace_id") @db.VarChar(36)
userId String @map("user_id") @db.VarChar(36)
// Read/Write
type Int @db.SmallInt
/// Whether the permission invitation is accepted by the user
accepted Boolean @default(false)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
@@unique([workspaceId, userId])
// optimize for quering user's workspace permissions
@@index(userId)
@@map("workspace_user_permissions")
}
model WorkspacePageUserPermission {
id String @id @default(uuid()) @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar
pageId String @map("page_id") @db.VarChar
userId String @map("user_id") @db.VarChar
id String @id @default(uuid()) @db.VarChar(36)
workspaceId String @map("workspace_id") @db.VarChar(36)
pageId String @map("page_id") @db.VarChar(36)
userId String @map("user_id") @db.VarChar(36)
// Read/Write
type Int @db.SmallInt
/// Whether the permission invitation is accepted by the user
accepted Boolean @default(false)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
@@ -169,9 +176,9 @@ model WorkspacePageUserPermission {
// for example:
// - early access is a feature that allow some users to access the insider version
// - pro plan is a quota that allow some users access to more resources after they pay
model UserFeature {
model UserFeatures {
id Int @id @default(autoincrement())
userId String @map("user_id") @db.VarChar
userId String @map("user_id") @db.VarChar(36)
featureId Int @map("feature_id") @db.Integer
// we will record the reason why the feature is enabled/disabled
@@ -179,16 +186,16 @@ model UserFeature {
// - pro_plan_v1: "user buy the pro plan"
reason String @db.VarChar
// record the quota enabled time
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// record the quota expired time, pay plan is a subscription, so it will expired
expiredAt DateTime? @map("expired_at") @db.Timestamptz(3)
expiredAt DateTime? @map("expired_at") @db.Timestamptz(6)
// whether the feature is activated
// for example:
// - if we switch the user to another plan, we will set the old plan to deactivated, but dont delete it
activated Boolean @default(false)
feature Feature @relation(fields: [featureId], references: [id], onDelete: Cascade)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
feature Features @relation(fields: [featureId], references: [id], onDelete: Cascade)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@index([userId])
@@map("user_features")
@@ -197,9 +204,9 @@ model UserFeature {
// feature gates is a way to enable/disable features for a workspace
// for example:
// - copilet is a feature that allow some users in a workspace to access the copilet feature
model WorkspaceFeature {
model WorkspaceFeatures {
id Int @id @default(autoincrement())
workspaceId String @map("workspace_id") @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar(36)
featureId Int @map("feature_id") @db.Integer
// we will record the reason why the feature is enabled/disabled
@@ -207,21 +214,21 @@ model WorkspaceFeature {
// - copilet_v1: "owner buy the copilet feature package"
reason String @db.VarChar
// record the feature enabled time
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// record the quota expired time, pay plan is a subscription, so it will expired
expiredAt DateTime? @map("expired_at") @db.Timestamptz(3)
expiredAt DateTime? @map("expired_at") @db.Timestamptz(6)
// whether the feature is activated
// for example:
// - if owner unsubscribe a feature package, we will set the feature to deactivated, but dont delete it
activated Boolean @default(false)
feature Feature @relation(fields: [featureId], references: [id], onDelete: Cascade)
feature Features @relation(fields: [featureId], references: [id], onDelete: Cascade)
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
@@map("workspace_features")
}
model Feature {
model Features {
id Int @id @default(autoincrement())
feature String @db.VarChar
version Int @default(0) @db.Integer
@@ -229,99 +236,135 @@ model Feature {
type Int @db.Integer
// configs, define by feature conntroller
configs Json @db.Json
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
UserFeatureGates UserFeature[]
WorkspaceFeatures WorkspaceFeature[]
UserFeatureGates UserFeatures[]
WorkspaceFeatures WorkspaceFeatures[]
@@unique([feature, version])
@@map("features")
}
model DeprecatedNextAuthAccount {
id String @id @default(cuid())
userId String @map("user_id")
type String
provider String
providerAccountId String @map("provider_account_id")
refresh_token String? @db.Text
access_token String? @db.Text
expires_at Int?
token_type String?
scope String?
id_token String? @db.Text
session_state String?
@@unique([provider, providerAccountId])
@@map("accounts")
}
model DeprecatedNextAuthSession {
id String @id @default(cuid())
sessionToken String @unique @map("session_token")
userId String @map("user_id")
expires DateTime
@@map("sessions")
}
model DeprecatedNextAuthVerificationToken {
identifier String
token String @unique
expires DateTime
@@unique([identifier, token])
@@map("verificationtokens")
}
// deprecated, use [ObjectStorage]
model Blob {
id Int @id @default(autoincrement()) @db.Integer
hash String @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar
blob Bytes @db.ByteA
length BigInt
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// not for keeping, but for snapshot history
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(6)
@@unique([workspaceId, hash])
@@map("blobs")
}
// deprecated, use [ObjectStorage]
model OptimizedBlob {
id Int @id @default(autoincrement()) @db.Integer
hash String @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar
params String @db.VarChar
blob Bytes @db.ByteA
length BigInt
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// not for keeping, but for snapshot history
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(6)
@@unique([workspaceId, hash, params])
@@map("optimized_blobs")
}
// the latest snapshot of each doc that we've seen
// Snapshot + Updates are the latest state of the doc
model Snapshot {
workspaceId String @map("workspace_id") @db.VarChar
id String @default(uuid()) @map("guid") @db.VarChar
blob Bytes @db.ByteA
seq Int @default(0) @db.Integer
state Bytes? @db.ByteA
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// the `updated_at` field will not record the time of record changed,
// but the created time of last seen update that has been merged into snapshot.
updatedAt DateTime @map("updated_at") @db.Timestamptz(3)
createdBy String? @map("created_by") @db.VarChar
updatedBy String? @map("updated_by") @db.VarChar
updatedAt DateTime @map("updated_at") @db.Timestamptz(6)
// should not delete origin snapshot even if user is deleted
// we only delete the snapshot if the workspace is deleted
createdByUser User? @relation(name: "createdSnapshot", fields: [createdBy], references: [id], onDelete: SetNull)
updatedByUser User? @relation(name: "updatedSnapshot", fields: [updatedBy], references: [id], onDelete: SetNull)
// @deprecated use updatedAt only
seq Int? @default(0) @db.Integer
// we need to clear all hanging updates and snapshots before enable the foreign key on workspaceId
// workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
@@id([workspaceId, id])
@@index([workspaceId, updatedAt])
@@id([id, workspaceId])
@@map("snapshots")
}
// user snapshots are special snapshots for user storage like personal app settings, distinguished from workspace snapshots
// basically they share the same structure with workspace snapshots
// but for convenience, we don't fork the updates queue and hisotry for user snapshots, until we have to
// which means all operation on user snapshot will happen in-pace
model UserSnapshot {
userId String @map("user_id") @db.VarChar
id String @map("id") @db.VarChar
blob Bytes @db.ByteA
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@id([userId, id])
@@map("user_snapshots")
}
model Update {
workspaceId String @map("workspace_id") @db.VarChar
id String @map("guid") @db.VarChar
seq Int @db.Integer
blob Bytes @db.ByteA
createdAt DateTime @map("created_at") @db.Timestamptz(3)
createdBy String? @map("created_by") @db.VarChar
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// will delete createor record if createor's account is deleted
createdByUser User? @relation(name: "createdUpdate", fields: [createdBy], references: [id], onDelete: SetNull)
// @deprecated use createdAt only
seq Int? @db.Integer
@@id([workspaceId, id, createdAt])
@@id([workspaceId, id, seq])
@@map("updates")
}
model SnapshotHistory {
workspaceId String @map("workspace_id") @db.VarChar
id String @map("guid") @db.VarChar
timestamp DateTime @db.Timestamptz(3)
workspaceId String @map("workspace_id") @db.VarChar(36)
id String @map("guid") @db.VarChar(36)
timestamp DateTime @db.Timestamptz(6)
blob Bytes @db.ByteA
state Bytes? @db.ByteA
expiredAt DateTime @map("expired_at") @db.Timestamptz(3)
createdBy String? @map("created_by") @db.VarChar
// will delete createor record if creator's account is deleted
createdByUser User? @relation(name: "createdHistory", fields: [createdBy], references: [id], onDelete: SetNull)
expiredAt DateTime @map("expired_at") @db.Timestamptz(6)
@@id([workspaceId, id, timestamp])
@@map("snapshot_histories")
}
model NewFeaturesWaitingList {
id String @id @default(uuid()) @db.VarChar
email String @unique
type Int @db.SmallInt
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
@@map("new_features_waiting_list")
}
model UserStripeCustomer {
userId String @id @map("user_id") @db.VarChar
stripeCustomerId String @unique @map("stripe_customer_id") @db.VarChar
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@ -330,7 +373,7 @@ model UserStripeCustomer {
model UserSubscription {
id Int @id @default(autoincrement()) @db.Integer
userId String @map("user_id") @db.VarChar
userId String @map("user_id") @db.VarChar(36)
plan String @db.VarChar(20)
// yearly/monthly
recurring String @db.VarChar(20)
@@ -339,21 +382,21 @@ model UserSubscription {
// subscription.status, active/past_due/canceled/unpaid...
status String @db.VarChar(20)
// subscription.current_period_start
start DateTime @map("start") @db.Timestamptz(3)
start DateTime @map("start") @db.Timestamptz(6)
// subscription.current_period_end, null for lifetime payment
end DateTime? @map("end") @db.Timestamptz(3)
end DateTime? @map("end") @db.Timestamptz(6)
// subscription.billing_cycle_anchor
nextBillAt DateTime? @map("next_bill_at") @db.Timestamptz(3)
nextBillAt DateTime? @map("next_bill_at") @db.Timestamptz(6)
// subscription.canceled_at
canceledAt DateTime? @map("canceled_at") @db.Timestamptz(3)
canceledAt DateTime? @map("canceled_at") @db.Timestamptz(6)
// subscription.trial_start
trialStart DateTime? @map("trial_start") @db.Timestamptz(3)
trialStart DateTime? @map("trial_start") @db.Timestamptz(6)
// subscription.trial_end
trialEnd DateTime? @map("trial_end") @db.Timestamptz(3)
trialEnd DateTime? @map("trial_end") @db.Timestamptz(6)
stripeScheduleId String? @map("stripe_schedule_id") @db.VarChar
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@unique([userId, plan])
@@ -362,7 +405,7 @@ model UserSubscription {
model UserInvoice {
id Int @id @default(autoincrement()) @db.Integer
userId String @map("user_id") @db.VarChar
userId String @map("user_id") @db.VarChar(36)
stripeInvoiceId String @unique @map("stripe_invoice_id")
currency String @db.VarChar(3)
// CNY 12.50 stored as 1250
@@ -370,8 +413,8 @@ model UserInvoice {
status String @db.VarChar(20)
plan String @db.VarChar(20)
recurring String @db.VarChar(20)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
// billing reason
reason String @db.VarChar
lastPaymentError String? @map("last_payment_error") @db.Text
@@ -399,7 +442,7 @@ model AiPromptMessage {
content String @db.Text
attachments Json? @db.Json
params Json? @db.Json
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
prompt AiPrompt @relation(fields: [promptId], references: [id], onDelete: Cascade)
@@ -415,10 +458,7 @@ model AiPrompt {
action String? @db.VarChar
model String @db.VarChar
config Json? @db.Json
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
updatedAt DateTime @default(now()) @map("updated_at") @db.Timestamptz(3)
// whether the prompt is modified by the admin panel
modified Boolean @default(false)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
messages AiPromptMessage[]
sessions AiSession[]
@@ -427,48 +467,45 @@ model AiPrompt {
}
model AiSessionMessage {
id String @id @default(uuid()) @db.VarChar
sessionId String @map("session_id") @db.VarChar
id String @id @default(uuid()) @db.VarChar(36)
sessionId String @map("session_id") @db.VarChar(36)
role AiPromptRole
content String @db.Text
attachments Json? @db.Json
params Json? @db.Json
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
session AiSession @relation(fields: [sessionId], references: [id], onDelete: Cascade)
@@index([sessionId])
@@map("ai_sessions_messages")
}
model AiSession {
id String @id @default(uuid()) @db.VarChar
userId String @map("user_id") @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar
docId String @map("doc_id") @db.VarChar
id String @id @default(uuid()) @db.VarChar(36)
userId String @map("user_id") @db.VarChar(36)
workspaceId String @map("workspace_id") @db.VarChar(36)
docId String @map("doc_id") @db.VarChar(36)
promptName String @map("prompt_name") @db.VarChar(32)
// the session id of the parent session if this session is a forked session
parentSessionId String? @map("parent_session_id") @db.VarChar
parentSessionId String? @map("parent_session_id") @db.VarChar(36)
messageCost Int @default(0)
tokenCost Int @default(0)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(6)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
prompt AiPrompt @relation(fields: [promptName], references: [name], onDelete: Cascade)
messages AiSessionMessage[]
@@index([userId])
@@index([userId, workspaceId])
@@map("ai_sessions_metadata")
}
model DataMigration {
id String @id @default(uuid()) @db.VarChar
id String @id @default(uuid()) @db.VarChar(36)
name String @db.VarChar
startedAt DateTime @default(now()) @map("started_at") @db.Timestamptz(3)
finishedAt DateTime? @map("finished_at") @db.Timestamptz(3)
startedAt DateTime @default(now()) @map("started_at") @db.Timestamptz(6)
finishedAt DateTime? @map("finished_at") @db.Timestamptz(6)
@@map("_data_migrations")
}
@@ -488,9 +525,9 @@ model RuntimeConfig {
key String @db.VarChar
value Json @db.Json
description String @db.Text
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(3)
lastUpdatedBy String? @map("last_updated_by") @db.VarChar
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(6)
lastUpdatedBy String? @map("last_updated_by") @db.VarChar(36)
lastUpdatedByUser User? @relation(fields: [lastUpdatedBy], references: [id])

View File

@@ -3,7 +3,7 @@ import { Controller, Get } from '@nestjs/common';
import { Public } from './core/auth';
import { Config, SkipThrottle } from './fundamentals';
@Controller('/info')
@Controller('/')
export class AppController {
constructor(private readonly config: Config) {}
@@ -15,7 +15,7 @@ export class AppController {
compatibility: this.config.version,
message: `AFFiNE ${this.config.version} Server`,
type: this.config.type,
flavor: this.config.flavor.type,
flavor: this.config.flavor,
};
}
}

View File

@@ -1,3 +1,5 @@
import { join } from 'node:path';
import {
DynamicModule,
ForwardReference,
@@ -5,17 +7,16 @@ import {
Module,
} from '@nestjs/common';
import { ScheduleModule } from '@nestjs/schedule';
import { ServeStaticModule } from '@nestjs/serve-static';
import { get } from 'lodash-es';
import { AppController } from './app.controller';
import { AuthModule } from './core/auth';
import { ADD_ENABLED_FEATURES, ServerConfigModule } from './core/config';
import { DocStorageModule } from './core/doc';
import { DocRendererModule } from './core/doc-renderer';
import { DocModule } from './core/doc';
import { FeatureModule } from './core/features';
import { PermissionModule } from './core/permission';
import { QuotaModule } from './core/quota';
import { SelfhostModule } from './core/selfhost';
import { CustomSetupModule } from './core/setup';
import { StorageModule } from './core/storage';
import { SyncModule } from './core/sync';
import { UserModule } from './core/user';
@@ -43,6 +44,7 @@ import { ENABLED_PLUGINS } from './plugins/registry';
export const FunctionalityModules = [
ConfigModule.forRoot(),
ScheduleModule.forRoot(),
EventModule,
CacheModule,
MutexModule,
@@ -135,7 +137,7 @@ export class AppModuleBuilder {
compile() {
@Module({
imports: this.modules,
controllers: [AppController],
controllers: this.config.isSelfhosted ? [] : [AppController],
})
class AppModule {}
@@ -143,37 +145,47 @@ export class AppModuleBuilder {
}
}
export function buildAppModule() {
function buildAppModule() {
AFFiNE = mergeConfigOverride(AFFiNE);
const factor = new AppModuleBuilder(AFFiNE);
factor
// basic
// common fundamental modules
.use(...FunctionalityModules)
.useIf(config => config.flavor.sync, WebSocketModule)
// auth
.use(UserModule, AuthModule, PermissionModule)
.use(AuthModule)
// business modules
.use(FeatureModule, QuotaModule, DocStorageModule)
.use(DocModule)
// sync server only
.useIf(config => config.flavor.sync, SyncModule)
.useIf(config => config.flavor.sync, WebSocketModule, SyncModule)
// graphql server only
.useIf(
config => config.flavor.graphql,
ScheduleModule.forRoot(),
ServerConfigModule,
GqlModule,
StorageModule,
ServerConfigModule,
WorkspaceModule
UserModule,
WorkspaceModule,
FeatureModule,
QuotaModule
)
// self hosted server only
.useIf(config => config.isSelfhosted, SelfhostModule)
.useIf(config => config.flavor.renderer, DocRendererModule);
.useIf(
config => config.isSelfhosted,
CustomSetupModule,
ServeStaticModule.forRoot({
rootPath: join('/app', 'static'),
exclude: ['/admin*'],
}),
ServeStaticModule.forRoot({
rootPath: join('/app', 'static', 'admin'),
serveRoot: '/admin',
})
);
// plugin modules
ENABLED_PLUGINS.forEach(name => {

View File

@@ -29,7 +29,7 @@ export async function createApp() {
graphqlUploadExpress({
// TODO(@darkskygit): dynamic limit by quota maybe?
maxFileSize: 100 * 1024 * 1024,
maxFiles: 32,
maxFiles: 5,
})
);

View File

@@ -25,7 +25,6 @@ AFFiNE.ENV_MAP = {
OAUTH_OIDC_CLAIM_MAP_EMAIL: 'plugins.oauth.providers.oidc.args.claim_email',
OAUTH_OIDC_CLAIM_MAP_NAME: 'plugins.oauth.providers.oidc.args.claim_name',
METRICS_CUSTOMER_IO_TOKEN: ['metrics.customerIo.token', 'string'],
CAPTCHA_TURNSTILE_SECRET: ['plugins.captcha.turnstile.secret', 'string'],
COPILOT_OPENAI_API_KEY: 'plugins.copilot.openai.apiKey',
COPILOT_FAL_API_KEY: 'plugins.copilot.fal.apiKey',
COPILOT_UNSPLASH_API_KEY: 'plugins.copilot.unsplashKey',

View File

@@ -71,14 +71,6 @@ AFFiNE.use('payment', {
});
AFFiNE.use('oauth');
/* Captcha Plugin Default Config */
AFFiNE.use('captcha', {
turnstile: {},
challenge: {
bits: 20,
},
});
if (AFFiNE.deploy) {
AFFiNE.mailer = {
service: 'gmail',

View File

@@ -95,15 +95,6 @@ AFFiNE.server.port = 3010;
// });
//
//
// /* Captcha Plugin Default Config */
// AFFiNE.plugins.use('captcha', {
// turnstile: {},
// challenge: {
// bits: 20,
// },
// });
//
//
// /* Cloudflare R2 Plugin */
// /* Enable if you choose to store workspace blobs or user avatars in Cloudflare R2 Storage Service */
// AFFiNE.use('cloudflare-r2', {

View File

@@ -1,3 +1,5 @@
import { randomUUID } from 'node:crypto';
import {
Body,
Controller,
@@ -16,34 +18,26 @@ import {
EarlyAccessRequired,
EmailTokenNotFound,
InternalServerError,
InvalidEmail,
InvalidEmailToken,
SignUpForbidden,
Throttle,
URLHelper,
UseNamedGuard,
} from '../../fundamentals';
import { UserService } from '../user';
import { validators } from '../utils/validators';
import { CurrentUser } from './current-user';
import { Public } from './guard';
import { AuthService } from './service';
import { CurrentUser, Session } from './session';
import { AuthService, parseAuthUserSeqNum } from './service';
import { TokenService, TokenType } from './token';
interface PreflightResponse {
registered: boolean;
hasPassword: boolean;
}
interface SignInCredential {
email: string;
class SignInCredential {
email!: string;
password?: string;
callbackUrl?: string;
}
interface MagicLinkCredential {
email: string;
token: string;
class MagicLinkCredential {
email!: string;
token!: string;
}
@Throttle('strict')
@@ -58,44 +52,13 @@ export class AuthController {
) {}
@Public()
@Post('/preflight')
async preflight(
@Body() params?: { email: string }
): Promise<PreflightResponse> {
if (!params?.email) {
throw new InvalidEmail();
}
validators.assertValidEmail(params.email);
const user = await this.user.findUserWithHashedPasswordByEmail(
params.email
);
if (!user) {
return {
registered: false,
hasPassword: false,
};
}
return {
registered: user.registered,
hasPassword: !!user.password,
};
}
@Public()
@UseNamedGuard('captcha')
@Post('/sign-in')
@Header('content-type', 'application/json')
async signIn(
@Req() req: Request,
@Res() res: Response,
@Body() credential: SignInCredential,
/**
* @deprecated
*/
@Query('redirect_uri') redirectUri?: string
@Query('redirect_uri') redirectUri = this.url.home
) {
validators.assertValidEmail(credential.email);
const canSignIn = await this.auth.canSignIn(credential.email);
@@ -104,90 +67,80 @@ export class AuthController {
}
if (credential.password) {
await this.passwordSignIn(
req,
res,
const user = await this.auth.signIn(
credential.email,
credential.password
);
await this.auth.setCookie(req, res, user);
res.status(HttpStatus.OK).send(user);
} else {
await this.sendMagicLink(
req,
res,
credential.email,
credential.callbackUrl,
// send email magic link
const user = await this.user.findUserByEmail(credential.email);
if (!user) {
const allowSignup = await this.config.runtime.fetch('auth/allowSignup');
if (!allowSignup) {
throw new SignUpForbidden();
}
}
const result = await this.sendSignInEmail(
{ email: credential.email, signUp: !user },
redirectUri
);
}
}
async passwordSignIn(
req: Request,
res: Response,
email: string,
password: string
) {
const user = await this.auth.signIn(email, password);
await this.auth.setCookies(req, res, user.id);
res.status(HttpStatus.OK).send(user);
}
async sendMagicLink(
_req: Request,
res: Response,
email: string,
callbackUrl = '/magic-link',
redirectUrl?: string
) {
// send email magic link
const user = await this.user.findUserByEmail(email);
if (!user) {
const allowSignup = await this.config.runtime.fetch('auth/allowSignup');
if (!allowSignup) {
throw new SignUpForbidden();
if (result.rejected.length) {
throw new InternalServerError('Failed to send sign-in email.');
}
}
res.status(HttpStatus.OK).send({
email: credential.email,
});
}
}
async sendSignInEmail(
{ email, signUp }: { email: string; signUp: boolean },
redirectUri: string
) {
const token = await this.token.createToken(TokenType.SignIn, email);
const magicLink = this.url.link(callbackUrl, {
const magicLink = this.url.link('/magic-link', {
token,
email,
...(redirectUrl
? {
redirect_uri: redirectUrl,
}
: {}),
redirect_uri: redirectUri,
});
const result = await this.auth.sendSignInEmail(email, magicLink, !user);
const result = await this.auth.sendSignInEmail(email, magicLink, signUp);
if (result.rejected.length) {
throw new InternalServerError('Failed to send sign-in email.');
}
res.status(HttpStatus.OK).send({
email: email,
});
return result;
}
@Public()
@Get('/sign-out')
async signOut(
@Req() req: Request,
@Res() res: Response,
@Session() session: Session | undefined,
@Query('user_id') userId: string | undefined
@Query('redirect_uri') redirectUri?: string
) {
if (!session) {
res.status(HttpStatus.OK).send({});
return;
const session = await this.auth.signOut(
req.cookies[AuthService.sessionCookieName],
parseAuthUserSeqNum(req.headers[AuthService.authUserSeqHeaderName])
);
if (session) {
res.cookie(AuthService.sessionCookieName, session.id, {
expires: session.expiresAt ?? void 0, // expiredAt is `string | null`
...this.auth.cookieOptions,
});
} else {
res.clearCookie(AuthService.sessionCookieName);
}
await this.auth.signOut(session.sessionId, userId);
await this.auth.refreshCookies(res, session.sessionId);
res.status(HttpStatus.OK).send({});
if (redirectUri) {
return this.url.safeRedirect(res, redirectUri);
} else {
return res.send(null);
}
}
@Public()
@@ -203,11 +156,11 @@ export class AuthController {
validators.assertValidEmail(email);
const tokenRecord = await this.token.verifyToken(TokenType.SignIn, token, {
const valid = await this.token.verifyToken(TokenType.SignIn, token, {
credential: email,
});
if (!tokenRecord) {
if (!valid) {
throw new InvalidEmailToken();
}
@@ -216,8 +169,9 @@ export class AuthController {
registered: true,
});
await this.auth.setCookies(req, res, user.id);
res.send({ id: user.id });
await this.auth.setCookie(req, res, user);
res.send({ id: user.id, email: user.email, name: user.name });
}
@Throttle('default', { limit: 1200 })
@@ -244,4 +198,14 @@ export class AuthController {
users: await this.auth.getUserList(token),
};
}
@Public()
@Get('/challenge')
async challenge() {
// TODO(@darksky): impl in following PR
return {
challenge: randomUUID(),
resource: randomUUID(),
};
}
}

View File

@@ -1,9 +1,13 @@
import type { ExecutionContext } from '@nestjs/common';
import { createParamDecorator } from '@nestjs/common';
import { User, UserSession } from '@prisma/client';
import { User } from '@prisma/client';
import { getRequestResponseFromContext } from '../../fundamentals';
function getUserFromContext(context: ExecutionContext) {
return getRequestResponseFromContext(context).req.user;
}
/**
* Used to fetch current user from the request context.
*
@@ -40,7 +44,7 @@ import { getRequestResponseFromContext } from '../../fundamentals';
// eslint-disable-next-line no-redeclare
export const CurrentUser = createParamDecorator(
(_: unknown, context: ExecutionContext) => {
return getRequestResponseFromContext(context).req.session?.user;
return getUserFromContext(context);
}
);
@@ -49,15 +53,3 @@ export interface CurrentUser
hasPassword: boolean | null;
emailVerified: boolean;
}
// interface and variable don't conflict
// eslint-disable-next-line no-redeclare
export const Session = createParamDecorator(
(_: unknown, context: ExecutionContext) => {
return getRequestResponseFromContext(context).req.session;
}
);
export type Session = UserSession & {
user: CurrentUser;
};

View File

@@ -1,23 +1,24 @@
import type {
CanActivate,
ExecutionContext,
FactoryProvider,
OnModuleInit,
} from '@nestjs/common';
import { Injectable, SetMetadata } from '@nestjs/common';
import { Injectable, SetMetadata, UseGuards } from '@nestjs/common';
import { ModuleRef, Reflector } from '@nestjs/core';
import type { Request, Response } from 'express';
import {
AuthenticationRequired,
Config,
getRequestResponseFromContext,
mapAnyError,
parseCookies,
} from '../../fundamentals';
import { WEBSOCKET_OPTIONS } from '../../fundamentals/websocket';
import { AuthService } from './service';
import { Session } from './session';
import { AuthService, parseAuthUserSeqNum } from './service';
function extractTokenFromHeader(authorization: string) {
if (!/^Bearer\s/i.test(authorization)) {
return;
}
return authorization.substring(7);
}
const PUBLIC_ENTRYPOINT_SYMBOL = Symbol('public');
@@ -37,9 +38,37 @@ export class AuthGuard implements CanActivate, OnModuleInit {
async canActivate(context: ExecutionContext) {
const { req, res } = getRequestResponseFromContext(context);
const userSession = await this.signIn(req, res);
if (res && userSession && userSession.expiresAt) {
await this.auth.refreshUserSessionIfNeeded(res, userSession);
// check cookie
let sessionToken: string | undefined =
req.cookies[AuthService.sessionCookieName];
if (!sessionToken && req.headers.authorization) {
sessionToken = extractTokenFromHeader(req.headers.authorization);
}
if (sessionToken) {
const userSeq = parseAuthUserSeqNum(
req.headers[AuthService.authUserSeqHeaderName]
);
const { user, expiresAt } = await this.auth.getUser(
sessionToken,
userSeq
);
if (res && user && expiresAt) {
await this.auth.refreshUserSessionIfNeeded(
req,
res,
sessionToken,
user.id,
expiresAt
);
}
if (user) {
req.sid = sessionToken;
req.user = user;
}
}
// api is public
@@ -52,70 +81,33 @@ export class AuthGuard implements CanActivate, OnModuleInit {
return true;
}
if (!userSession) {
if (!req.user) {
throw new AuthenticationRequired();
}
return true;
}
async signIn(req: Request, res?: Response): Promise<Session | null> {
if (req.session) {
return req.session;
}
// compatibility with websocket request
parseCookies(req);
// TODO(@forehalo): a cache for user session
const userSession = await this.auth.getUserSessionFromRequest(req, res);
if (userSession) {
req.session = {
...userSession.session,
user: userSession.user,
};
return req.session;
}
return null;
}
}
/**
* Mark api to be public accessible
* This guard is used to protect routes/queries/mutations that require a user to be logged in.
*
* The `@CurrentUser()` parameter decorator used in a `Auth` guarded queries would always give us the user because the `Auth` guard will
* fast throw if user is not logged in.
*
* @example
*
* ```typescript
* \@Auth()
* \@Query(() => UserType)
* user(@CurrentUser() user: CurrentUser) {
* return user;
* }
* ```
*/
export const Public = () => SetMetadata(PUBLIC_ENTRYPOINT_SYMBOL, true);
export const AuthWebsocketOptionsProvider: FactoryProvider = {
provide: WEBSOCKET_OPTIONS,
useFactory: (config: Config, guard: AuthGuard) => {
return {
...config.websocket,
allowRequest: async (
req: any,
pass: (err: string | null | undefined, success: boolean) => void
) => {
if (!config.websocket.requireAuthentication) {
return pass(null, true);
}
try {
const authentication = await guard.signIn(req);
if (authentication) {
return pass(null, true);
} else {
return pass('unauthenticated', false);
}
} catch (e) {
const error = mapAnyError(e);
error.log('Websocket');
return pass('unauthenticated', false);
}
},
};
},
inject: [Config, AuthGuard],
export const Auth = () => {
return UseGuards(AuthGuard);
};
// api is public accessible
export const Public = () => SetMetadata(PUBLIC_ENTRYPOINT_SYMBOL, true);

View File

@@ -6,21 +6,15 @@ import { FeatureModule } from '../features';
import { QuotaModule } from '../quota';
import { UserModule } from '../user';
import { AuthController } from './controller';
import { AuthGuard, AuthWebsocketOptionsProvider } from './guard';
import { AuthGuard } from './guard';
import { AuthResolver } from './resolver';
import { AuthService } from './service';
import { TokenService, TokenType } from './token';
@Module({
imports: [FeatureModule, UserModule, QuotaModule],
providers: [
AuthService,
AuthResolver,
TokenService,
AuthGuard,
AuthWebsocketOptionsProvider,
],
exports: [AuthService, AuthGuard, AuthWebsocketOptionsProvider, TokenService],
providers: [AuthService, AuthResolver, TokenService, AuthGuard],
exports: [AuthService, AuthGuard],
controllers: [AuthController],
})
export class AuthModule {}
@@ -28,4 +22,4 @@ export class AuthModule {}
export * from './guard';
export { ClientTokenType } from './resolver';
export { AuthService, TokenService, TokenType };
export * from './session';
export * from './current-user';

View File

@@ -11,23 +11,22 @@ import {
import {
ActionForbidden,
Config,
EmailAlreadyUsed,
EmailTokenNotFound,
EmailVerificationRequired,
InvalidEmailToken,
LinkExpired,
SameEmailProvided,
SkipThrottle,
Throttle,
URLHelper,
} from '../../fundamentals';
import { Admin } from '../common';
import { UserService } from '../user';
import { UserType } from '../user/types';
import { validators } from '../utils/validators';
import { CurrentUser } from './current-user';
import { Public } from './guard';
import { AuthService } from './service';
import { CurrentUser } from './session';
import { TokenService, TokenType } from './token';
@ObjectType('tokenType')
@@ -46,6 +45,7 @@ export class ClientTokenType {
@Resolver(() => UserType)
export class AuthResolver {
constructor(
private readonly config: Config,
private readonly url: URLHelper,
private readonly auth: AuthService,
private readonly user: UserService,
@@ -65,7 +65,7 @@ export class AuthResolver {
@ResolveField(() => ClientTokenType, {
name: 'token',
deprecationReason: 'use [/api/auth/sign-in?native=true] instead',
deprecationReason: 'use [/api/auth/authorize]',
})
async clientToken(
@CurrentUser() currentUser: CurrentUser,
@@ -75,32 +75,39 @@ export class AuthResolver {
throw new ActionForbidden();
}
const userSession = await this.auth.createUserSession(user.id);
const session = await this.auth.createUserSession(
user,
undefined,
this.config.auth.accessToken.ttl
);
return {
sessionToken: userSession.sessionId,
token: userSession.sessionId,
sessionToken: session.sessionId,
token: session.sessionId,
refresh: '',
};
}
@Public()
@Mutation(() => Boolean)
@Mutation(() => UserType)
async changePassword(
@CurrentUser() user: CurrentUser,
@Args('token') token: string,
@Args('newPassword') newPassword: string,
@Args('userId', { type: () => String, nullable: true }) userId?: string
@Args('newPassword') newPassword: string
) {
if (!userId) {
throw new LinkExpired();
}
const config = await this.config.runtime.fetchAll({
'auth/password.max': true,
'auth/password.min': true,
});
validators.assertValidPassword(newPassword, {
min: config['auth/password.min'],
max: config['auth/password.max'],
});
// NOTE: Set & Change password are using the same token type.
const valid = await this.token.verifyToken(
TokenType.ChangePassword,
token,
{
credential: userId,
credential: user.id,
}
);
@@ -108,10 +115,10 @@ export class AuthResolver {
throw new InvalidEmailToken();
}
await this.auth.changePassword(userId, newPassword);
await this.auth.revokeUserSessions(userId);
await this.auth.changePassword(user.id, newPassword);
await this.auth.revokeUserSessions(user.id);
return true;
return user;
}
@Mutation(() => UserType)
@@ -120,6 +127,7 @@ export class AuthResolver {
@Args('token') token: string,
@Args('email') email: string
) {
validators.assertValidEmail(email);
// @see [sendChangeEmail]
const valid = await this.token.verifyToken(TokenType.VerifyEmail, token, {
credential: user.id,
@@ -142,11 +150,8 @@ export class AuthResolver {
async sendChangePasswordEmail(
@CurrentUser() user: CurrentUser,
@Args('callbackUrl') callbackUrl: string,
@Args('email', {
nullable: true,
deprecationReason: 'fetched from signed in user',
})
_email?: string
// @deprecated
@Args('email', { nullable: true }) _email?: string
) {
if (!user.emailVerified) {
throw new EmailVerificationRequired();
@@ -157,7 +162,7 @@ export class AuthResolver {
user.id
);
const url = this.url.link(callbackUrl, { userId: user.id, token });
const url = this.url.link(callbackUrl, { token });
const res = await this.auth.sendChangePasswordEmail(user.email, url);
@@ -168,13 +173,21 @@ export class AuthResolver {
async sendSetPasswordEmail(
@CurrentUser() user: CurrentUser,
@Args('callbackUrl') callbackUrl: string,
@Args('email', {
nullable: true,
deprecationReason: 'fetched from signed in user',
})
_email?: string
@Args('email', { nullable: true }) _email?: string
) {
return this.sendChangePasswordEmail(user, callbackUrl);
if (!user.emailVerified) {
throw new EmailVerificationRequired();
}
const token = await this.token.createToken(
TokenType.ChangePassword,
user.id
);
const url = this.url.link(callbackUrl, { token });
const res = await this.auth.sendSetPasswordEmail(user.email, url);
return !res.rejected.length;
}
// The change email step is:
@@ -278,20 +291,4 @@ export class AuthResolver {
return emailVerifiedAt !== null;
}
@Admin()
@Mutation(() => String, {
description: 'Create change password url',
})
async createChangePasswordUrl(
@Args('userId') userId: string,
@Args('callbackUrl') callbackUrl: string
): Promise<string> {
const token = await this.token.createToken(
TokenType.ChangePassword,
userId
);
return this.url.link(callbackUrl, { userId, token });
}
}

View File

@@ -1,16 +1,39 @@
import { Injectable, OnApplicationBootstrap } from '@nestjs/common';
import { Cron, CronExpression } from '@nestjs/schedule';
import type { User, UserSession } from '@prisma/client';
import type { User } from '@prisma/client';
import { PrismaClient } from '@prisma/client';
import type { CookieOptions, Request, Response } from 'express';
import { assign, pick } from 'lodash-es';
import { assign, omit } from 'lodash-es';
import { Config, MailService, SignUpForbidden } from '../../fundamentals';
import { Config, EmailAlreadyUsed, MailService } from '../../fundamentals';
import { FeatureManagementService } from '../features/management';
import { QuotaService } from '../quota/service';
import { QuotaType } from '../quota/types';
import { UserService } from '../user/service';
import type { CurrentUser } from './session';
import type { CurrentUser } from './current-user';
export function parseAuthUserSeqNum(value: any) {
let seq: number = 0;
switch (typeof value) {
case 'number': {
seq = value;
break;
}
case 'string': {
const result = value.match(/^([\d{0, 10}])$/);
if (result?.[1]) {
seq = Number(result[1]);
}
break;
}
default: {
seq = 0;
}
}
return Math.max(0, seq);
}
export function sessionUser(
user: Pick<
@@ -18,19 +41,13 @@ export function sessionUser(
'id' | 'email' | 'avatarUrl' | 'name' | 'emailVerifiedAt'
> & { password?: string | null }
): CurrentUser {
// use pick to avoid unexpected fields
return assign(pick(user, 'id', 'email', 'avatarUrl', 'name'), {
hasPassword: user.password !== null,
emailVerified: user.emailVerifiedAt !== null,
});
}
function extractTokenFromHeader(authorization: string) {
if (!/^Bearer\s/i.test(authorization)) {
return;
}
return authorization.substring(7);
return assign(
omit(user, 'password', 'registered', 'emailVerifiedAt', 'createdAt'),
{
hasPassword: user.password !== null,
emailVerified: user.emailVerifiedAt !== null,
}
);
}
@Injectable()
@@ -42,7 +59,7 @@ export class AuthService implements OnApplicationBootstrap {
secure: this.config.server.https,
};
static readonly sessionCookieName = 'affine_session';
static readonly userCookieName = 'affine_user_id';
static readonly authUserSeqHeaderName = 'x-auth-user';
constructor(
private readonly config: Config,
@@ -68,7 +85,7 @@ export class AuthService implements OnApplicationBootstrap {
await this.quota.switchUserQuota(devUser.id, QuotaType.ProPlanV1);
await this.feature.addAdmin(devUser.id);
await this.feature.addCopilot(devUser.id);
} catch {
} catch (e) {
// ignore
}
}
@@ -78,183 +95,168 @@ export class AuthService implements OnApplicationBootstrap {
return this.feature.canEarlyAccess(email);
}
/**
* This is a test only helper to quickly signup a user, do not use in production
*/
async signUp(email: string, password: string): Promise<CurrentUser> {
if (!this.config.node.test) {
throw new SignUpForbidden(
'sign up helper is forbidden for non-test environment'
);
async signUp(
name: string,
email: string,
password: string
): Promise<CurrentUser> {
const user = await this.user.findUserByEmail(email);
if (user) {
throw new EmailAlreadyUsed();
}
return this.user
.createUser_without_verification({
.createUser({
name,
email,
password,
})
.then(sessionUser);
}
async signIn(email: string, password: string): Promise<CurrentUser> {
return this.user.signIn(email, password).then(sessionUser);
async signIn(email: string, password: string) {
const user = await this.user.signIn(email, password);
return sessionUser(user);
}
async signOut(sessionId: string, userId?: string) {
// sign out all users in the session
if (!userId) {
await this.db.session.deleteMany({
where: {
id: sessionId,
},
});
} else {
await this.db.userSession.deleteMany({
where: {
sessionId,
userId,
},
});
}
}
async getUser(
token: string,
seq = 0
): Promise<{ user: CurrentUser | null; expiresAt: Date | null }> {
const session = await this.getSession(token);
async getUserSession(
sessionId: string,
userId?: string
): Promise<{ user: CurrentUser; session: UserSession } | null> {
const sessions = await this.getUserSessions(sessionId);
if (!sessions.length) {
return null;
// no such session
if (!session) {
return { user: null, expiresAt: null };
}
let userSession: UserSession | undefined;
const userSession = session.userSessions.at(seq);
// try read from user provided cookies.userId
if (userId) {
userSession = sessions.find(s => s.userId === userId);
}
// fallback to the first valid session if user provided userId is invalid
// no such user session
if (!userSession) {
// checked
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
userSession = sessions.at(-1)!;
return { user: null, expiresAt: null };
}
const user = await this.user.findUserById(userSession.userId);
// user session expired
if (userSession.expiresAt && userSession.expiresAt <= new Date()) {
return { user: null, expiresAt: null };
}
const user = await this.db.user.findUnique({
where: { id: userSession.userId },
});
if (!user) {
return { user: null, expiresAt: null };
}
return { user: sessionUser(user), expiresAt: userSession.expiresAt };
}
async getUserList(token: string) {
const session = await this.getSession(token);
if (!session || !session.userSessions.length) {
return [];
}
const users = await this.db.user.findMany({
where: {
id: {
in: session.userSessions.map(({ userId }) => userId),
},
},
});
// TODO(@forehalo): need to separate expired session, same for [getUser]
// Session
// | { user: LimitedUser { email, avatarUrl }, expired: true }
// | { user: User, expired: false }
return session.userSessions
.map(userSession => {
// keep users in the same order as userSessions
const user = users.find(({ id }) => id === userSession.userId);
if (!user) {
return null;
}
return sessionUser(user);
})
.filter(Boolean) as CurrentUser[];
}
async signOut(token: string, seq = 0) {
const session = await this.getSession(token);
if (session) {
// overflow the logged in user
if (session.userSessions.length <= seq) {
return session;
}
await this.db.userSession.deleteMany({
where: { id: session.userSessions[seq].id },
});
// no more user session active, delete the whole session
if (session.userSessions.length === 1) {
await this.db.session.delete({ where: { id: session.id } });
return null;
}
return session;
}
return null;
}
async getSession(token: string) {
if (!token) {
return null;
}
return { user: sessionUser(user), session: userSession };
}
async getUserSessions(sessionId: string) {
return this.db.userSession.findMany({
where: {
sessionId,
OR: [{ expiresAt: { gt: new Date() } }, { expiresAt: null }],
},
orderBy: {
createdAt: 'asc',
},
});
}
async createUserSession(
userId: string,
sessionId?: string,
ttl = this.config.auth.session.ttl
) {
// check whether given session is valid
if (sessionId) {
const session = await this.db.session.findFirst({
return this.db.$transaction(async tx => {
const session = await tx.session.findUnique({
where: {
id: sessionId,
id: token,
},
include: {
userSessions: {
orderBy: {
createdAt: 'asc',
},
},
},
});
if (!session) {
sessionId = undefined;
return null;
}
}
if (!sessionId) {
const session = await this.createSession();
sessionId = session.id;
}
const expiresAt = new Date(Date.now() + ttl * 1000);
return this.db.userSession.upsert({
where: {
sessionId_userId: {
sessionId,
userId,
},
},
update: {
expiresAt,
},
create: {
sessionId,
userId,
expiresAt,
},
});
}
async getUserList(sessionId: string) {
const sessions = await this.db.userSession.findMany({
where: {
sessionId,
OR: [
{
expiresAt: null,
if (session.expiresAt && session.expiresAt <= new Date()) {
await tx.session.delete({
where: {
id: session.id,
},
{
expiresAt: {
gt: new Date(),
},
},
],
},
include: {
user: true,
},
orderBy: {
createdAt: 'asc',
},
});
});
return sessions.map(({ user }) => sessionUser(user));
}
return null;
}
async createSession() {
return this.db.session.create({
data: {},
});
}
async getSession(sessionId: string) {
return this.db.session.findFirst({
where: {
id: sessionId,
},
return session;
});
}
async refreshUserSessionIfNeeded(
_req: Request,
res: Response,
session: UserSession,
sessionId: string,
userId: string,
expiresAt: Date,
ttr = this.config.auth.session.ttr
): Promise<boolean> {
if (
session.expiresAt &&
session.expiresAt.getTime() - Date.now() > ttr * 1000
) {
if (expiresAt && expiresAt.getTime() - Date.now() > ttr * 1000) {
// no need to refresh
return false;
}
@@ -265,14 +267,17 @@ export class AuthService implements OnApplicationBootstrap {
await this.db.userSession.update({
where: {
id: session.id,
sessionId_userId: {
sessionId,
userId,
},
},
data: {
expiresAt: newExpiresAt,
},
});
res.cookie(AuthService.sessionCookieName, session.sessionId, {
res.cookie(AuthService.sessionCookieName, sessionId, {
expires: newExpiresAt,
...this.cookieOptions,
});
@@ -280,96 +285,70 @@ export class AuthService implements OnApplicationBootstrap {
return true;
}
async revokeUserSessions(userId: string) {
async createUserSession(
user: { id: string },
existingSession?: string,
ttl = this.config.auth.session.ttl
) {
const session = existingSession
? await this.getSession(existingSession)
: null;
const expiresAt = new Date(Date.now() + ttl * 1000);
if (session) {
return this.db.userSession.upsert({
where: {
sessionId_userId: {
sessionId: session.id,
userId: user.id,
},
},
update: {
expiresAt,
},
create: {
sessionId: session.id,
userId: user.id,
expiresAt,
},
});
} else {
return this.db.userSession.create({
data: {
expiresAt,
session: {
create: {},
},
user: {
connect: {
id: user.id,
},
},
},
});
}
}
async revokeUserSessions(userId: string, sessionId?: string) {
return this.db.userSession.deleteMany({
where: {
userId,
sessionId,
},
});
}
getSessionOptionsFromRequest(req: Request) {
let sessionId: string | undefined =
req.cookies[AuthService.sessionCookieName];
async setCookie(_req: Request, res: Response, user: { id: string }) {
const session = await this.createUserSession(
user
// TODO(@forehalo): enable multi user session
// req.cookies[AuthService.sessionCookieName]
);
if (!sessionId && req.headers.authorization) {
sessionId = extractTokenFromHeader(req.headers.authorization);
}
const userId: string | undefined =
req.cookies[AuthService.userCookieName] ||
req.headers[AuthService.userCookieName];
return {
sessionId,
userId,
};
}
async setCookies(req: Request, res: Response, userId: string) {
const { sessionId } = this.getSessionOptionsFromRequest(req);
const userSession = await this.createUserSession(userId, sessionId);
res.cookie(AuthService.sessionCookieName, userSession.sessionId, {
res.cookie(AuthService.sessionCookieName, session.sessionId, {
expires: session.expiresAt ?? void 0,
...this.cookieOptions,
expires: userSession.expiresAt ?? void 0,
});
this.setUserCookie(res, userId);
}
async refreshCookies(res: Response, sessionId?: string) {
if (sessionId) {
const users = await this.getUserList(sessionId);
const candidateUser = users.at(-1);
if (candidateUser) {
this.setUserCookie(res, candidateUser.id);
return;
}
}
this.clearCookies(res);
}
private clearCookies(res: Response<any, Record<string, any>>) {
res.clearCookie(AuthService.sessionCookieName);
res.clearCookie(AuthService.userCookieName);
}
setUserCookie(res: Response, userId: string) {
res.cookie(AuthService.userCookieName, userId, {
...this.cookieOptions,
// user cookie is client readable & writable for fast user switch if there are multiple users in one session
// it safe to be non-secure & non-httpOnly because server will validate it by `cookie[AuthService.sessionCookieName]`
httpOnly: false,
secure: false,
});
}
async getUserSessionFromRequest(req: Request, res?: Response) {
const { sessionId, userId } = this.getSessionOptionsFromRequest(req);
if (!sessionId) {
return null;
}
const session = await this.getUserSession(sessionId, userId);
if (res) {
if (session) {
// set user id cookie for fast authentication
if (!userId || userId !== session.user.id) {
this.setUserCookie(res, session.user.id);
}
} else if (sessionId) {
// clear invalid cookies.session and cookies.userId
this.clearCookies(res);
}
}
return session;
}
async changePassword(
@@ -418,16 +397,24 @@ export class AuthService implements OnApplicationBootstrap {
async sendSignInEmail(email: string, link: string, signUp: boolean) {
return signUp
? await this.mailer.sendSignUpMail(link, {
? await this.mailer.sendSignUpMail(link.toString(), {
to: email,
})
: await this.mailer.sendSignInMail(link, {
: await this.mailer.sendSignInMail(link.toString(), {
to: email,
});
}
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT)
async cleanExpiredSessions() {
await this.db.session.deleteMany({
where: {
expiresAt: {
lte: new Date(),
},
},
});
await this.db.userSession.deleteMany({
where: {
expiresAt: {

View File

@@ -40,48 +40,6 @@ export class TokenService {
return this.crypto.encrypt(token);
}
/**
* get token by type
*
* token will be revoked if expired or keep is not set
*/
async getToken(type: TokenType, token: string, keep?: boolean) {
token = this.crypto.decrypt(token);
const record = await this.db.verificationToken.findUnique({
where: {
type_token: {
token,
type,
},
},
});
if (!record) {
return null;
}
const expired = record.expiresAt <= new Date();
// always revoke expired token
if (expired || !keep) {
const deleted = await this.revokeToken(type, token);
// already deleted, means token has been used
if (!deleted.count) {
return null;
}
}
return !expired ? record : null;
}
/**
* get token and verify credential
*
* if credential is not provided, it will be failed
*
* token will be revoked if expired or keep is not set
*/
async verifyToken(
type: TokenType,
token: string,
@@ -111,9 +69,13 @@ export class TokenService {
const valid =
!expired && (!record.credential || record.credential === credential);
// always revoke expired token
if (expired || (valid && !keep)) {
const deleted = await this.revokeToken(type, token);
if ((expired || valid) && !keep) {
const deleted = await this.db.verificationToken.deleteMany({
where: {
token,
type,
},
});
// already deleted, means token has been used
if (!deleted.count) {
@@ -124,15 +86,6 @@ export class TokenService {
return valid ? record : null;
}
async revokeToken(type: TokenType, token: string) {
return await this.db.verificationToken.deleteMany({
where: {
token,
type,
},
});
}
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT)
async cleanExpiredTokens() {
await this.db.verificationToken.deleteMany({

View File

@@ -25,8 +25,8 @@ export class AdminGuard implements CanActivate, OnModuleInit {
async canActivate(context: ExecutionContext) {
const { req } = getRequestResponseFromContext(context);
let allow = false;
if (req.session) {
allow = await this.feature.isAdmin(req.session.user.id);
if (req.user) {
allow = await this.feature.isAdmin(req.user.id);
}
if (!allow) {

View File

@@ -4,23 +4,17 @@ import { Module } from '@nestjs/common';
import {
ServerConfigResolver,
ServerFeatureConfigResolver,
ServerRuntimeConfigResolver,
ServerServiceConfigResolver,
} from './resolver';
import { ServerService } from './service';
@Module({
providers: [
ServerService,
ServerConfigResolver,
ServerFeatureConfigResolver,
ServerRuntimeConfigResolver,
ServerServiceConfigResolver,
],
exports: [ServerService],
})
export class ServerConfigModule {}
export { ServerService };
export { ADD_ENABLED_FEATURES } from './server-feature';
export { ADD_ENABLED_FEATURES, ServerConfigType } from './resolver';
export { ServerFeature } from './types';

View File

@@ -9,18 +9,27 @@ import {
ResolveField,
Resolver,
} from '@nestjs/graphql';
import { RuntimeConfig, RuntimeConfigType } from '@prisma/client';
import { PrismaClient, RuntimeConfig, RuntimeConfigType } from '@prisma/client';
import { GraphQLJSON, GraphQLJSONObject } from 'graphql-scalars';
import { Config, URLHelper } from '../../fundamentals';
import { Config, DeploymentType, URLHelper } from '../../fundamentals';
import { Public } from '../auth';
import { Admin } from '../common';
import { FeatureType } from '../features';
import { AvailableUserFeatureConfig } from '../features/resolver';
import { ServerFlags } from './config';
import { ENABLED_FEATURES } from './server-feature';
import { ServerService } from './service';
import { ServerConfigType } from './types';
import { ServerFeature } from './types';
const ENABLED_FEATURES: Set<ServerFeature> = new Set();
export function ADD_ENABLED_FEATURES(feature: ServerFeature) {
ENABLED_FEATURES.add(feature);
}
registerEnumType(ServerFeature, {
name: 'ServerFeature',
});
registerEnumType(DeploymentType, {
name: 'ServerDeploymentType',
});
@ObjectType()
export class PasswordLimitsType {
@@ -36,6 +45,36 @@ export class CredentialsRequirementType {
password!: PasswordLimitsType;
}
@ObjectType()
export class ServerConfigType {
@Field({
description:
'server identical name could be shown as badge on user interface',
})
name!: string;
@Field({ description: 'server version' })
version!: string;
@Field({ description: 'server base url' })
baseUrl!: string;
@Field(() => DeploymentType, { description: 'server type' })
type!: DeploymentType;
/**
* @deprecated
*/
@Field({ description: 'server flavor', deprecationReason: 'use `features`' })
flavor!: string;
@Field(() => [ServerFeature], { description: 'enabled server features' })
features!: ServerFeature[];
@Field({ description: 'enable telemetry' })
enableTelemetry!: boolean;
}
registerEnumType(RuntimeConfigType, {
name: 'RuntimeConfigType',
});
@@ -77,7 +116,7 @@ export class ServerConfigResolver {
constructor(
private readonly config: Config,
private readonly url: URLHelper,
private readonly server: ServerService
private readonly db: PrismaClient
) {}
@Public()
@@ -132,21 +171,7 @@ export class ServerConfigResolver {
description: 'whether server has been initialized',
})
async initialized() {
return this.server.initialized();
}
}
@Resolver(() => ServerConfigType)
export class ServerFeatureConfigResolver extends AvailableUserFeatureConfig {
constructor(config: Config) {
super(config);
}
@ResolveField(() => [FeatureType], {
description: 'Features for user that can be configured',
})
override availableUserFeatures() {
return super.availableUserFeatures();
return (await this.db.user.count()) > 0;
}
}

View File

@@ -1,7 +0,0 @@
import { ServerFeature } from './types';
export const ENABLED_FEATURES: Set<ServerFeature> = new Set();
export function ADD_ENABLED_FEATURES(feature: ServerFeature) {
ENABLED_FEATURES.add(feature);
}
export { ServerFeature };

View File

@@ -1,17 +0,0 @@
import { Injectable } from '@nestjs/common';
import { PrismaClient } from '@prisma/client';
@Injectable()
export class ServerService {
private _initialized: boolean | null = null;
constructor(private readonly db: PrismaClient) {}
async initialized() {
if (!this._initialized) {
const userCount = await this.db.user.count();
this._initialized = userCount > 0;
}
return this._initialized;
}
}

View File

@@ -1,48 +1,5 @@
import { Field, ObjectType, registerEnumType } from '@nestjs/graphql';
import { DeploymentType } from '../../fundamentals';
export enum ServerFeature {
Captcha = 'captcha',
Copilot = 'copilot',
Payment = 'payment',
OAuth = 'oauth',
}
registerEnumType(ServerFeature, {
name: 'ServerFeature',
});
registerEnumType(DeploymentType, {
name: 'ServerDeploymentType',
});
@ObjectType()
export class ServerConfigType {
@Field({
description:
'server identical name could be shown as badge on user interface',
})
name!: string;
@Field({ description: 'server version' })
version!: string;
@Field({ description: 'server base url' })
baseUrl!: string;
@Field(() => DeploymentType, { description: 'server type' })
type!: DeploymentType;
/**
* @deprecated
*/
@Field({ description: 'server flavor', deprecationReason: 'use `features`' })
flavor!: string;
@Field(() => [ServerFeature], { description: 'enabled server features' })
features!: ServerFeature[];
@Field({ description: 'enable telemetry' })
enableTelemetry!: boolean;
}

View File

@@ -1,231 +0,0 @@
import { readFileSync } from 'node:fs';
import { join } from 'node:path';
import { Controller, Get, Logger, Req, Res } from '@nestjs/common';
import type { Request, Response } from 'express';
import isMobile from 'is-mobile';
import { Config, metrics, URLHelper } from '../../fundamentals';
import { htmlSanitize } from '../../native';
import { Public } from '../auth';
import { PermissionService } from '../permission';
import { DocContentService } from './service';
interface RenderOptions {
title: string;
summary: string;
avatar?: string;
}
interface HtmlAssets {
html: string;
css: string[];
js: string[];
publicPath: string;
gitHash: string;
description: string;
}
const defaultAssets: HtmlAssets = {
html: '',
css: [],
js: [],
publicPath: '/',
gitHash: '',
description: '',
};
// TODO(@forehalo): reuse routes with frontend
const staticPaths = new Set([
'all',
'home',
'search',
'collection',
'tag',
'trash',
]);
@Controller('/workspace')
export class DocRendererController {
private readonly logger = new Logger(DocRendererController.name);
private readonly webAssets: HtmlAssets = defaultAssets;
private readonly mobileAssets: HtmlAssets = defaultAssets;
constructor(
private readonly doc: DocContentService,
private readonly permission: PermissionService,
private readonly config: Config,
private readonly url: URLHelper
) {
this.webAssets = this.readHtmlAssets(
join(
this.config.projectRoot,
this.config.isSelfhosted ? 'static/selfhost' : 'static'
)
);
this.mobileAssets = this.readHtmlAssets(
join(
this.config.projectRoot,
this.config.isSelfhosted ? 'static/mobile/selfhost' : 'static/mobile'
)
);
}
@Public()
@Get('/*')
async render(@Req() req: Request, @Res() res: Response) {
const assets: HtmlAssets =
this.config.affine.canary &&
isMobile({
ua: req.headers['user-agent'] ?? undefined,
})
? this.mobileAssets
: this.webAssets;
let opts: RenderOptions | null = null;
// /workspace/:workspaceId/{:docId | staticPaths}
const [, , workspaceId, subPath, ...restPaths] = req.path.split('/');
// /:workspaceId/:docId
if (workspaceId && !staticPaths.has(subPath) && restPaths.length === 0) {
try {
opts =
workspaceId === subPath
? await this.getWorkspaceContent(workspaceId)
: await this.getPageContent(workspaceId, subPath);
metrics.doc.counter('render').add(1);
} catch (e) {
this.logger.error('failed to render page', e);
}
}
res.setHeader('Content-Type', 'text/html');
if (!opts) {
res.setHeader('X-Robots-Tag', 'noindex');
}
res.send(this._render(opts, assets));
}
private async getPageContent(
workspaceId: string,
docId: string
): Promise<RenderOptions | null> {
let allowUrlPreview = await this.permission.isPublicPage(
workspaceId,
docId
);
if (!allowUrlPreview) {
// if page is private, but workspace url preview is on
allowUrlPreview = await this.permission.allowUrlPreview(workspaceId);
}
if (allowUrlPreview) {
return this.doc.getPageContent(workspaceId, docId);
}
return null;
}
private async getWorkspaceContent(
workspaceId: string
): Promise<RenderOptions | null> {
const allowUrlPreview = await this.permission.allowUrlPreview(workspaceId);
if (allowUrlPreview) {
const workspaceContent = await this.doc.getWorkspaceContent(workspaceId);
if (workspaceContent) {
return {
title: workspaceContent.name,
summary: '',
avatar: workspaceContent.avatarKey
? this.url.link(
`/api/workspaces/${workspaceId}/blobs/${workspaceContent.avatarKey}`
)
: undefined,
};
}
}
return null;
}
_render(opts: RenderOptions | null, assets: HtmlAssets): string {
if (!opts && assets.html) {
return assets.html;
}
const title = opts?.title
? htmlSanitize(`${opts.title} | AFFiNE`)
: 'AFFiNE';
const summary = opts ? htmlSanitize(opts.summary) : assets.description;
const image = opts?.avatar ?? 'https://affine.pro/og.jpeg';
// TODO(@forehalo): parse assets/index.html
return `<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<meta
name="viewport"
content="width=device-width, initial-scale=1, maximum-scale=1"
/>
<meta name="mobile-web-app-capable" content="yes" />
<meta name="apple-mobile-web-app-capable" content="yes" />
<meta
name="apple-mobile-web-app-status-bar-style"
content="black-translucent"
/>
<title>${title}</title>
<meta name="theme-color" content="#fafafa" />
<link rel="preconnect" href="${assets.publicPath}">
<link rel="manifest" href="/manifest.json" />
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png" />
<link rel="icon" sizes="192x192" href="/favicon-192.png" />
<link rel="shortcut icon" href="/favicon.ico" />
<meta name="emotion-insertion-point" content="" />
${!opts ? '<meta name="robots" content="noindex, nofollow" />' : ''}
<meta
name="twitter:title"
content="${title}"
/>
<meta name="twitter:description" content="${summary}" />
<meta name="twitter:site" content="@AffineOfficial" />
<meta name="twitter:image" content="${image}" />
<meta property="og:title" content="${title}" />
<meta property="og:description" content="${summary}" />
<meta property="og:image" content="${image}" />
${assets.css.map(url => `<link rel="stylesheet" href="${url}" />`).join('\n')}
</head>
<body>
<div id="app" data-version="${assets.gitHash}"></div>
${assets.js.map(url => `<script src="${url}"></script>`).join('\n')}
</body>
</html>
`;
}
/**
* Should only be called at startup time
*/
private readHtmlAssets(path: string): HtmlAssets {
const manifestPath = join(path, 'assets-manifest.json');
const htmlPath = join(path, 'index.html');
try {
const assets = JSON.parse(readFileSync(manifestPath, 'utf-8'));
assets.html = readFileSync(htmlPath, 'utf-8');
return assets;
} catch (e) {
if (this.config.node.prod) {
throw e;
} else {
return defaultAssets;
}
}
}
}

View File

@@ -1,16 +0,0 @@
import { Module } from '@nestjs/common';
import { DocStorageModule } from '../doc';
import { PermissionModule } from '../permission';
import { DocRendererController } from './controller';
import { DocContentService } from './service';
@Module({
imports: [DocStorageModule, PermissionModule],
providers: [DocContentService],
controllers: [DocRendererController],
exports: [DocContentService],
})
export class DocRendererModule {}
export { DocContentService };

View File

@@ -1,92 +0,0 @@
import { Injectable } from '@nestjs/common';
import { applyUpdate, Doc } from 'yjs';
import { Cache, type EventPayload, OnEvent } from '../../fundamentals';
import { PgWorkspaceDocStorageAdapter } from '../doc';
import {
type PageDocContent,
parsePageDoc,
parseWorkspaceDoc,
type WorkspaceDocContent,
} from '../utils/blocksuite';
@Injectable()
export class DocContentService {
constructor(
private readonly cache: Cache,
private readonly workspace: PgWorkspaceDocStorageAdapter
) {}
async getPageContent(
workspaceId: string,
guid: string
): Promise<PageDocContent | null> {
const cacheKey = `workspace:${workspaceId}:doc:${guid}:content`;
const cachedResult = await this.cache.get<PageDocContent>(cacheKey);
if (cachedResult) {
return cachedResult;
}
const docRecord = await this.workspace.getDoc(workspaceId, guid);
if (!docRecord) {
return null;
}
const doc = new Doc();
applyUpdate(doc, docRecord.bin);
const content = parsePageDoc(doc);
if (content) {
await this.cache.set(cacheKey, content, {
ttl:
7 *
24 *
60 *
60 *
1000 /* TODO(@forehalo): we need time constants helper */,
});
}
return content;
}
async getWorkspaceContent(
workspaceId: string
): Promise<WorkspaceDocContent | null> {
const cacheKey = `workspace:${workspaceId}:content`;
const cachedResult = await this.cache.get<WorkspaceDocContent>(cacheKey);
if (cachedResult) {
return cachedResult;
}
const docRecord = await this.workspace.getDoc(workspaceId, workspaceId);
if (!docRecord) {
return null;
}
const doc = new Doc();
applyUpdate(doc, docRecord.bin);
const content = parseWorkspaceDoc(doc);
if (content) {
await this.cache.set(cacheKey, content);
}
return content;
}
@OnEvent('snapshot.updated')
async markDocContentCacheStale({
workspaceId,
id,
}: EventPayload<'snapshot.updated'>) {
const key =
workspaceId === id
? `workspace:${workspaceId}:content`
: `workspace:${workspaceId}:doc:${id}:content`;
await this.cache.delete(key);
}
}

View File

@@ -1,186 +0,0 @@
import { Injectable } from '@nestjs/common';
import { PrismaClient } from '@prisma/client';
import { Mutex } from '../../../fundamentals';
import { DocStorageOptions } from '../options';
import { DocRecord, DocStorageAdapter } from '../storage';
@Injectable()
export class PgUserspaceDocStorageAdapter extends DocStorageAdapter {
constructor(
private readonly db: PrismaClient,
private readonly mutex: Mutex,
options: DocStorageOptions
) {
super(options);
}
// no updates queue for userspace, directly merge them inplace
// no history record for userspace
protected async getDocUpdates() {
return [];
}
protected async markUpdatesMerged() {
return 0;
}
async listDocHistories() {
return [];
}
async getDocHistory() {
return null;
}
protected async createDocHistory() {
return false;
}
override async rollbackDoc() {
return;
}
override async getDoc(spaceId: string, docId: string) {
return this.getDocSnapshot(spaceId, docId);
}
async pushDocUpdates(
userId: string,
docId: string,
updates: Uint8Array[],
editorId?: string
) {
if (!updates.length) {
return 0;
}
await using _lock = await this.lockDocForUpdate(userId, docId);
const snapshot = await this.getDocSnapshot(userId, docId);
const now = Date.now();
const pendings = updates.map((update, i) => ({
bin: update,
timestamp: now + i,
}));
const { timestamp, bin } = await this.squash(
snapshot ? [snapshot, ...pendings] : pendings
);
await this.setDocSnapshot({
spaceId: userId,
docId,
bin,
timestamp,
editor: editorId,
});
return timestamp;
}
async deleteDoc(userId: string, docId: string) {
await this.db.userSnapshot.deleteMany({
where: {
userId,
id: docId,
},
});
}
async deleteSpace(userId: string) {
await this.db.userSnapshot.deleteMany({
where: {
userId,
},
});
}
async getSpaceDocTimestamps(userId: string, after?: number) {
const snapshots = await this.db.userSnapshot.findMany({
select: {
id: true,
updatedAt: true,
},
where: {
userId,
...(after
? {
updatedAt: {
gt: new Date(after),
},
}
: {}),
},
});
const result: Record<string, number> = {};
snapshots.forEach(s => {
result[s.id] = s.updatedAt.getTime();
});
return result;
}
protected async getDocSnapshot(userId: string, docId: string) {
const snapshot = await this.db.userSnapshot.findUnique({
where: {
userId_id: {
userId,
id: docId,
},
},
});
if (!snapshot) {
return null;
}
return {
spaceId: userId,
docId,
bin: snapshot.blob,
timestamp: snapshot.updatedAt.getTime(),
editor: snapshot.userId,
};
}
protected async setDocSnapshot(snapshot: DocRecord) {
// we always get lock before writing to user snapshot table,
// so a simple upsert without testing on updatedAt is safe
await this.db.userSnapshot.upsert({
where: {
userId_id: {
userId: snapshot.spaceId,
id: snapshot.docId,
},
},
update: {
blob: Buffer.from(snapshot.bin),
updatedAt: new Date(snapshot.timestamp),
},
create: {
userId: snapshot.spaceId,
id: snapshot.docId,
blob: Buffer.from(snapshot.bin),
createdAt: new Date(snapshot.timestamp),
updatedAt: new Date(snapshot.timestamp),
},
});
return true;
}
protected override async lockDocForUpdate(
workspaceId: string,
docId: string
) {
const lock = await this.mutex.lock(`userspace:${workspaceId}:${docId}`);
if (!lock) {
throw new Error('Too many concurrent writings');
}
return lock;
}
}

View File

@@ -1,632 +0,0 @@
import { Injectable, Logger } from '@nestjs/common';
import { PrismaClient } from '@prisma/client';
import { chunk } from 'lodash-es';
import {
Cache,
DocHistoryNotFound,
DocNotFound,
EventEmitter,
FailedToSaveUpdates,
FailedToUpsertSnapshot,
metrics,
Mutex,
} from '../../../fundamentals';
import { retryable } from '../../../fundamentals/utils/promise';
import { DocStorageOptions } from '../options';
import {
DocRecord,
DocStorageAdapter,
DocUpdate,
HistoryFilter,
} from '../storage';
const UPDATES_QUEUE_CACHE_KEY = 'doc:manager:updates';
@Injectable()
export class PgWorkspaceDocStorageAdapter extends DocStorageAdapter {
private readonly logger = new Logger(PgWorkspaceDocStorageAdapter.name);
constructor(
private readonly db: PrismaClient,
private readonly mutex: Mutex,
private readonly cache: Cache,
private readonly event: EventEmitter,
protected override readonly options: DocStorageOptions
) {
super(options);
}
async pushDocUpdates(
workspaceId: string,
docId: string,
updates: Uint8Array[],
editorId?: string
) {
if (!updates.length) {
return 0;
}
let pendings = updates;
let done = 0;
let timestamp = Date.now();
try {
await retryable(async () => {
if (done !== 0) {
pendings = pendings.slice(done);
}
// TODO(@forehalo): remove in next release
const lastSeq = await this.getUpdateSeq(
workspaceId,
docId,
updates.length
);
let turn = 0;
const batchCount = 10;
for (const batch of chunk(pendings, batchCount)) {
const now = Date.now();
await this.db.update.createMany({
data: batch.map((update, i) => {
const subSeq = turn * batchCount + i + 1;
// `seq` is the last seq num of the batch
// example for 11 batched updates, start from seq num 20
// seq for first update in the batch should be:
// 31 - 11 + subSeq(0 * 10 + 0 + 1) = 21
// ^ last seq num ^ updates.length ^ turn ^ batchCount ^i
const seq = lastSeq - updates.length + subSeq;
const createdAt = now + subSeq;
timestamp = Math.max(timestamp, createdAt);
return {
workspaceId,
id: docId,
blob: Buffer.from(update),
seq,
createdAt: new Date(createdAt),
createdBy: editorId || null,
};
}),
});
turn++;
done += batch.length;
await this.updateCachedUpdatesCount(workspaceId, docId, batch.length);
}
});
} catch (e) {
this.logger.error('Failed to insert doc updates', e);
metrics.doc.counter('doc_update_insert_failed').add(1);
throw new FailedToSaveUpdates();
}
return timestamp;
}
protected async getDocUpdates(workspaceId: string, docId: string) {
const rows = await this.db.update.findMany({
where: {
workspaceId,
id: docId,
},
orderBy: {
createdAt: 'asc',
},
});
return rows.map(row => ({
bin: row.blob,
timestamp: row.createdAt.getTime(),
editor: row.createdBy || undefined,
}));
}
async deleteDoc(workspaceId: string, docId: string) {
const ident = { where: { workspaceId, id: docId } };
await this.db.$transaction([
this.db.snapshot.deleteMany(ident),
this.db.update.deleteMany(ident),
this.db.snapshotHistory.deleteMany(ident),
]);
}
async deleteSpace(workspaceId: string) {
const ident = { where: { workspaceId } };
await this.db.$transaction([
this.db.snapshot.deleteMany(ident),
this.db.update.deleteMany(ident),
this.db.snapshotHistory.deleteMany(ident),
]);
}
async getSpaceDocTimestamps(workspaceId: string, after?: number) {
const snapshots = await this.db.snapshot.findMany({
select: {
id: true,
updatedAt: true,
},
where: {
workspaceId,
...(after
? {
updatedAt: {
gt: new Date(after),
},
}
: {}),
},
});
const updates = await this.db.update.groupBy({
where: {
workspaceId,
...(after
? {
// [createdAt] in updates table is indexed, so it's fast
createdAt: {
gt: new Date(after),
},
}
: {}),
},
by: ['id'],
_max: {
createdAt: true,
},
});
const result: Record<string, number> = {};
snapshots.forEach(s => {
result[s.id] = s.updatedAt.getTime();
});
updates.forEach(u => {
if (u._max.createdAt) {
result[u.id] = u._max.createdAt.getTime();
}
});
return result;
}
protected async markUpdatesMerged(
workspaceId: string,
docId: string,
updates: DocUpdate[]
) {
const result = await this.db.update.deleteMany({
where: {
workspaceId,
id: docId,
createdAt: {
in: updates.map(u => new Date(u.timestamp)),
},
},
});
await this.updateCachedUpdatesCount(workspaceId, docId, -result.count);
return result.count;
}
async listDocHistories(
workspaceId: string,
docId: string,
query: HistoryFilter
) {
const histories = await this.db.snapshotHistory.findMany({
select: {
timestamp: true,
createdByUser: {
select: {
name: true,
avatarUrl: true,
},
},
},
where: {
workspaceId,
id: docId,
timestamp: {
lt: query.before ? new Date(query.before) : new Date(),
},
},
orderBy: {
timestamp: 'desc',
},
take: query.limit,
});
return histories.map(h => ({
timestamp: h.timestamp.getTime(),
editor: h.createdByUser,
}));
}
async getDocHistory(workspaceId: string, docId: string, timestamp: number) {
const history = await this.db.snapshotHistory.findUnique({
where: {
workspaceId_id_timestamp: {
workspaceId,
id: docId,
timestamp: new Date(timestamp),
},
},
});
if (!history) {
return null;
}
return {
spaceId: workspaceId,
docId,
bin: history.blob,
timestamp,
editor: history.createdBy || undefined,
};
}
override async rollbackDoc(
spaceId: string,
docId: string,
timestamp: number,
editorId?: string
): Promise<void> {
await using _lock = await this.lockDocForUpdate(spaceId, docId);
const toSnapshot = await this.getDocHistory(spaceId, docId, timestamp);
if (!toSnapshot) {
throw new DocHistoryNotFound({ spaceId, docId, timestamp });
}
const fromSnapshot = await this.getDocSnapshot(spaceId, docId);
if (!fromSnapshot) {
throw new DocNotFound({ spaceId, docId });
}
// force create a new history record after rollback
await this.createDocHistory(
{
...fromSnapshot,
// override the editor to the one who requested the rollback
editor: editorId,
},
true
);
// WARN:
// we should never do the snapshot updating in recovering,
// which is not the solution in CRDT.
// let user revert in client and update the data in sync system
// const change = this.generateChangeUpdate(fromSnapshot.bin, toSnapshot.bin);
// await this.pushDocUpdates(spaceId, docId, [change]);
metrics.doc
.counter('history_recovered_counter', {
description: 'How many times history recovered request happened',
})
.add(1);
}
protected async createDocHistory(snapshot: DocRecord, force = false) {
const last = await this.lastDocHistory(snapshot.spaceId, snapshot.docId);
let shouldCreateHistory = false;
if (!last) {
// never created
shouldCreateHistory = true;
} else {
const lastHistoryTimestamp = last.timestamp.getTime();
if (lastHistoryTimestamp === snapshot.timestamp) {
// no change
shouldCreateHistory = false;
} else if (
// force
force ||
// last history created before interval in configs
lastHistoryTimestamp <
snapshot.timestamp - this.options.historyMinInterval(snapshot.spaceId)
) {
shouldCreateHistory = true;
}
}
if (shouldCreateHistory) {
if (this.isEmptyBin(snapshot.bin)) {
this.logger.debug(
`Doc is empty, skip creating history record for ${snapshot.docId} in workspace ${snapshot.spaceId}`
);
return false;
}
const historyMaxAge = await this.options
.historyMaxAge(snapshot.spaceId)
.catch(
() =>
0 /* edgecase: user deleted but owned workspaces not handled correctly */
);
if (historyMaxAge === 0) {
return false;
}
await this.db.snapshotHistory
.create({
select: {
timestamp: true,
},
data: {
workspaceId: snapshot.spaceId,
id: snapshot.docId,
timestamp: new Date(snapshot.timestamp),
blob: Buffer.from(snapshot.bin),
createdBy: snapshot.editor,
expiredAt: new Date(Date.now() + historyMaxAge),
},
})
.catch(() => {
// safe to ignore
// only happens when duplicated history record created in multi processes
});
metrics.doc
.counter('history_created_counter', {
description: 'How many times the snapshot history created',
})
.add(1);
this.logger.debug(
`History created for ${snapshot.docId} in workspace ${snapshot.spaceId}.`
);
return true;
}
return false;
}
protected async getDocSnapshot(workspaceId: string, docId: string) {
const snapshot = await this.db.snapshot.findUnique({
where: {
workspaceId_id: {
workspaceId,
id: docId,
},
},
});
if (!snapshot) {
return null;
}
return {
spaceId: workspaceId,
docId,
bin: snapshot.blob,
timestamp: snapshot.updatedAt.getTime(),
// creator and editor may null if their account is deleted
editor: snapshot.updatedBy || snapshot.createdBy || undefined,
};
}
protected async setDocSnapshot(snapshot: DocRecord) {
const { spaceId, docId, bin, timestamp } = snapshot;
if (this.isEmptyBin(bin)) {
return false;
}
const updatedAt = new Date(timestamp);
// CONCERNS:
// i. Because we save the real user's last seen action time as `updatedAt`,
// it's possible to simply compare the `updatedAt` to determine if the snapshot is older than the one we are going to save.
//
// ii. Prisma doesn't support `upsert` with additional `where` condition along side unique constraint.
// In our case, we need to manually check the `updatedAt` to avoid overriding the newer snapshot.
// where: { workspaceId_id: {}, updatedAt: { lt: updatedAt } }
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
try {
const result: { updatedAt: Date }[] = await this.db.$queryRaw`
INSERT INTO "snapshots" ("workspace_id", "guid", "blob", "created_at", "updated_at", "created_by", "updated_by")
VALUES (${spaceId}, ${docId}, ${bin}, DEFAULT, ${updatedAt}, ${snapshot.editor}, ${snapshot.editor})
ON CONFLICT ("workspace_id", "guid")
DO UPDATE SET "blob" = ${bin}, "updated_at" = ${updatedAt}, "updated_by" = ${snapshot.editor}
WHERE "snapshots"."workspace_id" = ${spaceId} AND "snapshots"."guid" = ${docId} AND "snapshots"."updated_at" <= ${updatedAt}
RETURNING "snapshots"."workspace_id" as "workspaceId", "snapshots"."guid" as "id", "snapshots"."updated_at" as "updatedAt"
`;
// const result = await this.db.snapshot.upsert({
// select: {
// updatedAt: true,
// seq: true,
// },
// where: {
// workspaceId_id: {
// workspaceId,
// id: guid,
// },
// ⬇️ NOT SUPPORTED BY PRISMA YET
// updatedAt: {
// lt: updatedAt,
// },
// },
// update: {
// blob,
// state,
// updatedAt,
// },
// create: {
// workspaceId,
// id: guid,
// blob,
// state,
// updatedAt,
// seq,
// },
// });
// if the condition `snapshot.updatedAt > updatedAt` is true, by which means the snapshot has already been updated by other process,
// the updates has been applied to current `doc` must have been seen by the other process as well.
// The `updatedSnapshot` will be `undefined` in this case.
const updatedSnapshot = result.at(0);
if (updatedSnapshot) {
this.event.emit('snapshot.updated', {
workspaceId: snapshot.spaceId,
id: snapshot.docId,
});
}
return !!updatedSnapshot;
} catch (e) {
metrics.doc.counter('snapshot_upsert_failed').add(1);
this.logger.error('Failed to upsert snapshot', e);
throw new FailedToUpsertSnapshot();
}
}
protected override async lockDocForUpdate(
workspaceId: string,
docId: string
) {
const lock = await this.mutex.lock(`doc:update:${workspaceId}:${docId}`);
if (!lock) {
throw new Error('Too many concurrent writings');
}
return lock;
}
protected async lastDocHistory(workspaceId: string, id: string) {
return this.db.snapshotHistory.findFirst({
where: {
workspaceId,
id,
},
select: {
timestamp: true,
state: true,
},
orderBy: {
timestamp: 'desc',
},
});
}
// for auto merging
async randomDoc() {
const key = await this.cache.mapRandomKey(UPDATES_QUEUE_CACHE_KEY);
if (key) {
const cachedCount = await this.cache.mapIncrease(
UPDATES_QUEUE_CACHE_KEY,
key,
0
);
if (cachedCount > 0) {
const [workspaceId, id] = key.split('::');
const count = await this.db.update.count({
where: {
workspaceId,
id,
},
});
// FIXME(@forehalo): somehow the update count in cache is not accurate
if (count === 0) {
metrics.doc
.counter('doc_update_count_inconsistent_with_cache')
.add(1);
await this.cache.mapDelete(UPDATES_QUEUE_CACHE_KEY, key);
return null;
}
return { workspaceId, docId: id };
}
}
return null;
}
private async updateCachedUpdatesCount(
workspaceId: string,
guid: string,
count: number
) {
const result = await this.cache.mapIncrease(
UPDATES_QUEUE_CACHE_KEY,
`${workspaceId}::${guid}`,
count
);
if (result <= 0) {
await this.cache.mapDelete(
UPDATES_QUEUE_CACHE_KEY,
`${workspaceId}::${guid}`
);
}
}
/**
* @deprecated
*/
private readonly seqMap = new Map<string, number>();
/**
*
* @deprecated updates do not rely on seq number anymore
*
* keep in next release to avoid downtime when upgrading instances
*/
private async getUpdateSeq(workspaceId: string, guid: string, batch = 1) {
const MAX_SEQ_NUM = 0x3fffffff; // u31
try {
const { seq } = await this.db.snapshot.update({
select: {
seq: true,
},
where: {
workspaceId_id: {
workspaceId,
id: guid,
},
},
data: {
seq: {
increment: batch,
},
},
});
if (!seq) {
return batch;
}
// reset
if (seq >= MAX_SEQ_NUM) {
await this.db.snapshot.update({
select: {
seq: true,
},
where: {
workspaceId_id: {
workspaceId,
id: guid,
},
},
data: {
seq: 0,
},
});
}
return seq;
} catch {
// not existing snapshot just count it from 1
const last = this.seqMap.get(workspaceId + guid) ?? 0;
this.seqMap.set(workspaceId + guid, last + batch);
return last + batch;
}
}
}

Some files were not shown because too many files have changed in this diff Show More