Compare commits

..

1 Commits

Author SHA1 Message Date
Peng Xiao
31071c8308 fix(electron): electron cmd+r issue 2024-07-31 07:18:42 +00:00
1312 changed files with 29235 additions and 53468 deletions

View File

@@ -34,8 +34,8 @@ const createPattern = packageName => [
{
group: ['@affine/env/constant'],
message:
'Do not import from @affine/env/constant. Use `environment.isElectron` instead',
importNames: ['isElectron'],
'Do not import from @affine/env/constant. Use `environment.isDesktop` instead',
importNames: ['isDesktop'],
},
];
@@ -247,8 +247,7 @@ const config = {
'react-hooks/exhaustive-deps': [
'warn',
{
additionalHooks:
'(useAsyncCallback|useCatchEventCallback|useDraggable|useDropTarget|useRefEffect)',
additionalHooks: '(useAsyncCallback|useDraggable|useDropTarget)',
},
],
},

View File

@@ -1,36 +0,0 @@
name: 'Auth to Cluster'
description: 'Auth to the GCP Cluster'
inputs:
gcp-project-number:
description: 'GCP project number'
required: true
gcp-project-id:
description: 'GCP project id'
required: true
service-account:
description: 'Service account'
cluster-name:
description: 'Cluster name'
cluster-location:
description: 'Cluster location'
runs:
using: 'composite'
steps:
- id: auth
uses: google-github-actions/auth@v2
with:
workload_identity_provider: 'projects/${{ inputs.gcp-project-number }}/locations/global/workloadIdentityPools/github-actions/providers/github-actions-helm-deploy'
service_account: '${{ inputs.service-account }}'
token_format: 'access_token'
project_id: '${{ inputs.gcp-project-id }}'
- name: 'Setup gcloud cli'
uses: 'google-github-actions/setup-gcloud@v2'
with:
install_components: 'gke-gcloud-auth-plugin'
- id: get-gke-credentials
shell: bash
run: |
gcloud container clusters get-credentials ${{ inputs.cluster-name }} --region ${{ inputs.cluster-location }} --project ${{ inputs.gcp-project-id }}

View File

@@ -24,14 +24,24 @@ runs:
shell: bash
run: |
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
- name: 'Auth to cluster'
uses: './.github/actions/cluster-auth'
- uses: azure/setup-helm@v4
- id: auth
uses: google-github-actions/auth@v2
with:
gcp-project-number: '${{ inputs.gcp-project-number }}'
gcp-project-id: '${{ inputs.gcp-project-id }}'
service-account: '${{ inputs.service-account }}'
cluster-name: '${{ inputs.cluster-name }}'
cluster-location: '${{ inputs.cluster-location }}'
workload_identity_provider: 'projects/${{ inputs.gcp-project-number }}/locations/global/workloadIdentityPools/github-actions/providers/github-actions-helm-deploy'
service_account: '${{ inputs.service-account }}'
token_format: 'access_token'
project_id: '${{ inputs.gcp-project-id }}'
- name: 'Setup gcloud cli'
uses: 'google-github-actions/setup-gcloud@v2'
with:
install_components: 'gke-gcloud-auth-plugin'
- id: get-gke-credentials
shell: bash
run: |
gcloud container clusters get-credentials ${{ inputs.cluster-name }} --region ${{ inputs.cluster-location }} --project ${{ inputs.gcp-project-id }}
- name: Deploy
shell: bash

View File

@@ -156,7 +156,7 @@ runs:
- name: Install Playwright's dependencies
shell: bash
if: inputs.playwright-install == 'true'
run: yarn playwright install --with-deps chromium webkit
run: yarn playwright install --with-deps chromium
env:
PLAYWRIGHT_BROWSERS_PATH: ${{ github.workspace }}/node_modules/.cache/ms-playwright

View File

@@ -17,7 +17,7 @@ runs:
PACKAGE_VERSION=$(node -p "require('./package.json').version")
TIME_VERSION=$(date +%Y%m%d%H%M)
GIT_SHORT_HASH=$(git rev-parse --short HEAD)
APP_VERSION=$PACKAGE_VERSION-nightly-$GIT_SHORT_HASH
APP_VERSION=$PACKAGE_VERSION-$GIT_SHORT_HASH
fi
echo $APP_VERSION
echo "APP_VERSION=$APP_VERSION" >> "$GITHUB_OUTPUT"

View File

@@ -1,8 +1,7 @@
FROM openresty/openresty:1.25.3.2-0-buster
FROM openresty/openresty:1.25.3.1-0-buster
WORKDIR /app
COPY ./packages/frontend/web/dist ./dist
COPY ./packages/frontend/admin/dist ./admin
COPY ./packages/frontend/mobile/dist ./mobile
COPY ./.github/deployment/front/nginx.conf /usr/local/openresty/nginx/conf/nginx.conf
COPY ./.github/deployment/front/affine.nginx.conf /etc/nginx/conf.d/affine.nginx.conf

View File

@@ -11,28 +11,10 @@ server {
try_files $uri $uri/ =404;
}
set $app_root_path /app/dist/;
set $mobile_root /app/dist/;
set_by_lua $affine_env 'return os.getenv("AFFINE_ENV")';
if ($affine_env = "dev") {
set $mobile_root /app/mobile/;
}
# https://gist.github.com/mariusom/6683dc52b1cad1a1f372e908bdb209d0
if ($http_user_agent ~* "(android|bb\d+|meego).+mobile|avantgo|bada\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\.(browser|link)|vodafone|wap|windows ce|xda|xiino") {
set $app_root_path $mobile_root;
}
if ($http_user_agent ~* "^(1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s\-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|\-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw\-(n|u)|c55\/|capi|ccwa|cdm\-|cell|chtm|cldc|cmd\-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc\-s|devi|dica|dmob|do(c|p)o|ds(12|\-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(\-|_)|g1 u|g560|gene|gf\-5|g\-mo|go(\.w|od)|gr(ad|un)|haie|hcit|hd\-(m|p|t)|hei\-|hi(pt|ta)|hp( i|ip)|hs\-c|ht(c(\-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i\-(20|go|ma)|i230|iac( |\-|\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\/)|klon|kpt |kwc\-|kyo(c|k)|le(no|xi)|lg( g|\/(k|l|u)|50|54|\-[a-w])|libw|lynx|m1\-w|m3ga|m50\/|ma(te|ui|xo)|mc(01|21|ca)|m\-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(\-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)\-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|\-([1-8]|c))|phil|pire|pl(ay|uc)|pn\-2|po(ck|rt|se)|prox|psio|pt\-g|qa\-a|qc(07|12|21|32|60|\-[2-7]|i\-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h\-|oo|p\-)|sdk\/|se(c(\-|0|1)|47|mc|nd|ri)|sgh\-|shar|sie(\-|m)|sk\-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h\-|v\-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl\-|tdg\-|tel(i|m)|tim\-|t\-mo|to(pl|sh)|ts(70|m\-|m3|m5)|tx\-9|up(\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|\-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(\-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas\-|your|zeto|zte\-)") {
set $app_root_path $mobile_root;
}
location / {
root $app_root_path;
root /app/dist/;
index index.html;
try_files $uri $uri/ /index.html;
add_header Cache-Control "private, no-cache, no-store, max-age=0, must-revalidate";
}
error_page 404 /404.html;

View File

@@ -1,15 +1,14 @@
worker_processes 4;
worker_processes 4;
error_log /var/log/nginx/error.log warn;
pcre_jit on;
env AFFINE_ENV;
events {
worker_connections 1024;
worker_connections 1024;
}
http {
include mime.types;
log_format main '$remote_addr [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log /var/log/nginx/access.log main;
include /etc/nginx/conf.d/*.conf;
include mime.types;
log_format main '$remote_addr [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log /var/log/nginx/access.log main;
include /etc/nginx/conf.d/*.conf;
}

View File

@@ -3,4 +3,4 @@ name: affine
description: AFFiNE cloud chart
type: application
version: 0.0.0
appVersion: "0.16.0"
appVersion: "0.15.0"

View File

@@ -3,7 +3,7 @@ name: graphql
description: AFFiNE GraphQL server
type: application
version: 0.0.0
appVersion: "0.16.0"
appVersion: "0.15.0"
dependencies:
- name: gcloud-sql-proxy
version: 0.0.0

View File

@@ -77,6 +77,8 @@ spec:
value: "{{ .Values.app.https }}"
- name: ENABLE_R2_OBJECT_STORAGE
value: "{{ .Values.app.objectStorage.r2.enabled }}"
- name: ENABLE_CAPTCHA
value: "{{ .Values.app.captcha.enabled }}"
- name: FEATURES_EARLY_ACCESS_PREVIEW
value: "{{ .Values.app.features.earlyAccessPreview }}"
- name: FEATURES_SYNC_CLIENT_VERSION_CHECK
@@ -202,12 +204,12 @@ spec:
protocol: TCP
livenessProbe:
httpGet:
path: /info
path: /
port: http
initialDelaySeconds: {{ .Values.probe.initialDelaySeconds }}
readinessProbe:
httpGet:
path: /info
path: /
port: http
initialDelaySeconds: {{ .Values.probe.initialDelaySeconds }}
resources:

View File

@@ -4,10 +4,6 @@ metadata:
name: {{ include "graphql.fullname" . }}
labels:
{{- include "graphql.labels" . | nindent 4 }}
{{- with .Values.service.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
spec:
type: {{ .Values.service.type }}
ports:

View File

@@ -3,7 +3,7 @@ name: sync
description: AFFiNE Sync Server
type: application
version: 0.0.0
appVersion: "0.16.0"
appVersion: "0.15.0"
dependencies:
- name: gcloud-sql-proxy
version: 0.0.0

View File

@@ -27,9 +27,6 @@ spec:
- name: {{ .Chart.Name }}
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
env:
- name: AFFINE_ENV
value: "{{ .Release.Namespace }}"
ports:
- name: http
containerPort: {{ .Values.service.port }}

View File

@@ -60,6 +60,13 @@ spec:
name: affine-graphql
port:
number: {{ .Values.graphql.service.port }}
- path: /oauth
pathType: Prefix
backend:
service:
name: affine-graphql
port:
number: {{ .Values.graphql.service.port }}
- path: /
pathType: Prefix
backend:

View File

@@ -36,14 +36,14 @@ graphql:
type: ClusterIP
port: 3000
annotations:
cloud.google.com/backend-config: '{"default": "affine-api-backendconfig"}'
cloud.google.com/backend-config: '{"default": "affine-backendconfig"}'
sync:
service:
type: ClusterIP
port: 3010
annotations:
cloud.google.com/backend-config: '{"default": "affine-api-backendconfig"}'
cloud.google.com/backend-config: '{"default": "affine-backendconfig"}'
web:
service:

View File

@@ -1,10 +0,0 @@
apiVersion: cloud.google.com/v1
kind: BackendConfig
metadata:
name: "affine-api-backendconfig"
spec:
healthCheck:
timeoutSec: 1
type: HTTP
requestPath: /info

View File

@@ -143,36 +143,6 @@ jobs:
path: ./test-results
if-no-files-found: ignore
e2e-mobile-test:
name: E2E Mobile Test
runs-on: ubuntu-latest
env:
DISTRIBUTION: mobile
IN_CI_TEST: true
strategy:
fail-fast: false
matrix:
shard: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
full-cache: true
- name: Run playwright tests
run: yarn workspace @affine-test/affine-mobile e2e --forbid-only --shard=${{ matrix.shard }}/${{ strategy.job-total }}
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v4
with:
name: test-results-e2e-mobile-${{ matrix.shard }}
path: ./test-results
if-no-files-found: ignore
e2e-migration-test:
name: E2E Migration Test
runs-on: ubuntu-latest
@@ -210,7 +180,7 @@ jobs:
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: true
electron-install: false
full-cache: true
- name: Download affine.linux-x64-gnu.node
@@ -601,7 +571,6 @@ jobs:
- lint
- check-yarn-binary
- e2e-test
- e2e-mobile-test
- e2e-migration-test
- unit-test
- server-test

View File

@@ -21,47 +21,6 @@ permissions:
packages: 'write'
jobs:
output-prev-version:
name: Output previous version
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.flavor }}
outputs:
prev: ${{ steps.print.outputs.version }}
namespace: ${{ steps.print.outputs.namespace }}
steps:
- uses: actions/checkout@v4
- name: Auth to Cluster
uses: './.github/actions/cluster-auth'
with:
gcp-project-number: ${{ secrets.GCP_PROJECT_NUMBER }}
gcp-project-id: ${{ secrets.GCP_PROJECT_ID }}
service-account: ${{ secrets.GCP_HELM_DEPLOY_SERVICE_ACCOUNT }}
cluster-name: ${{ secrets.GCP_CLUSTER_NAME }}
cluster-location: ${{ secrets.GCP_CLUSTER_LOCATION }}
- name: Output previous version
id: print
run: |
namespace=""
if [ "${{ github.event.inputs.flavor }}" = "canary" ]; then
namespace="dev"
elif [ "${{ github.event.inputs.flavor }}" = "beta" ]; then
namespace="beta"
elif [ "${{ github.event.inputs.flavor }}" = "stable" ]; then
namespace="production"
else
echo "Invalid flavor: ${{ github.event.inputs.flavor }}"
exit 1
fi
echo "Namespace set to: $namespace"
# Get the previous version from the deployment
prev_version=$(kubectl get deployment -n $namespace affine-graphql -o=jsonpath='{.spec.template.spec.containers[0].image}' | awk -F '-' '{print $3}')
echo "Previous version: $prev_version"
echo "version=$prev_version" >> $GITHUB_OUTPUT
echo "namesapce=$namespace" >> $GITHUB_OUTPUT
build-server-image:
name: Build Server Image
uses: ./.github/workflows/build-server-image.yml
@@ -112,7 +71,7 @@ jobs:
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Admin
- name: Build Core
run: yarn nx build @affine/admin --skip-nx-cache
env:
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
@@ -133,45 +92,12 @@ jobs:
path: ./packages/frontend/admin/dist
if-no-files-found: error
build-mobile:
name: Build @affine/mobile
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.flavor }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Mobile
run: yarn nx build @affine/mobile --skip-nx-cache
env:
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
BUILD_TYPE: ${{ github.event.inputs.flavor }}
CAPTCHA_SITE_KEY: ${{ secrets.CAPTCHA_SITE_KEY }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: 'affine-mobile'
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
PERFSEE_TOKEN: ${{ secrets.PERFSEE_TOKEN }}
MIXPANEL_TOKEN: ${{ secrets.MIXPANEL_TOKEN }}
- name: Upload mobile artifact
uses: actions/upload-artifact@v4
with:
name: mobile
path: ./packages/frontend/mobile/dist
if-no-files-found: error
build-frontend-image:
name: Build Frontend Image
runs-on: ubuntu-latest
needs:
- build-web
- build-admin
- build-mobile
steps:
- uses: actions/checkout@v4
- name: Download web artifact
@@ -184,11 +110,6 @@ jobs:
with:
name: admin
path: ./packages/frontend/admin/dist
- name: Download mobile artifact
uses: actions/download-artifact@v4
with:
name: mobile
path: ./packages/frontend/mobile/dist
- name: Setup env
run: |
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
@@ -272,98 +193,3 @@ jobs:
STRIPE_API_KEY: ${{ secrets.STRIPE_API_KEY }}
STRIPE_WEBHOOK_KEY: ${{ secrets.STRIPE_WEBHOOK_KEY }}
STATIC_IP_NAME: ${{ secrets.STATIC_IP_NAME }}
deploy-done:
needs:
- output-prev-version
- build-web
- build-admin
- build-mobile
- build-frontend-image
- build-server-image
- deploy
if: always()
runs-on: ubuntu-latest
name: Post deploy message
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/checkout@v4
with:
repository: toeverything/blocksuite
path: blocksuite
fetch-depth: 0
fetch-tags: true
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
extra-flags: 'workspaces focus @affine/changelog'
electron-install: false
- name: Output deployed info
if: ${{ always() && !contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') }}
id: set_info
run: |
if [ "${{ github.event.inputs.flavor }}" = "canary" ]; then
echo "deployed_url=https://affine.fail" >> $GITHUB_OUTPUT
elif [ "${{ github.event.inputs.flavor }}" = "beta" ]; then
echo "deployed_url=https://insider.affine.pro" >> $GITHUB_OUTPUT
elif [ "${{ github.event.inputs.flavor }}" = "stable" ]; then
echo "deployed_url=https://app.affine.pro" >> $GITHUB_OUTPUT
else
exit 1
fi
env:
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
- name: Post Success event to a Slack channel
if: ${{ always() && !contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') }}
run: node ./tools/changelog/index.js
env:
CHANNEL_ID: ${{ secrets.RELEASE_SLACK_CHNNEL_ID }}
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
DEPLOYED_URL: ${{ steps.set_info.outputs.deployed_url }}
PREV_VERSION: ${{ needs.output-prev-version.outputs.prev }}
NAMESPACE: ${{ needs.output-prev-version.outputs.namespace }}
DEPLOYMENT: 'SERVER'
FLAVOR: ${{ github.event.inputs.flavor }}
BLOCKSUITE_REPO_PATH: ${{ github.workspace }}/blocksuite
- name: Post Failed event to a Slack channel
id: failed-slack
uses: slackapi/slack-github-action@v1.27.0
if: ${{ always() && contains(needs.*.result, 'failure') }}
with:
channel-id: ${{ secrets.RELEASE_SLACK_CHNNEL_ID }}
payload: |
{
"blocks": [
{
"type": "section",
"text": {
"text": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Backend deploy failed `${{ github.event.inputs.flavor }}`>",
"type": "mrkdwn"
}
}
]
}
env:
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
- name: Post Cancel event to a Slack channel
id: cancel-slack
uses: slackapi/slack-github-action@v1.27.0
if: ${{ always() && contains(needs.*.result, 'cancelled') && !contains(needs.*.result, 'failure') }}
with:
channel-id: ${{ secrets.RELEASE_SLACK_CHNNEL_ID }}
payload: |
{
"blocks": [
{
"type": "section",
"text": {
"text": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Backend deploy cancelled `${{ github.event.inputs.flavor }}`>",
"type": "mrkdwn"
}
}
]
}
env:
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}

View File

@@ -181,7 +181,7 @@ jobs:
name: affine-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}-builds
path: builds
package-distribution-windows:
make-distribution-windows-skip-signing:
strategy:
matrix:
spec:
@@ -191,8 +191,6 @@ jobs:
target: x86_64-pc-windows-msvc
runs-on: ${{ matrix.spec.runner }}
needs: before-make
outputs:
FILES_TO_BE_SIGNED: ${{ steps.get_files_to_be_signed.outputs.FILES_TO_BE_SIGNED }}
env:
SKIP_GENERATE_ASSETS: 1
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
@@ -232,111 +230,12 @@ jobs:
SKIP_WEB_BUILD: 1
HOIST_NODE_MODULES: 1
- name: get all files to be signed
id: get_files_to_be_signed
run: |
Set-Variable -Name FILES_TO_BE_SIGNED -Value ((Get-ChildItem -Path packages/frontend/electron/out -Recurse -File | Where-Object { $_.Extension -in @(".exe", ".node", ".dll", ".msi") } | ForEach-Object { '"' + $_.FullName.Replace((Get-Location).Path + '\packages\frontend\electron\out\', '') + '"' }) -join ' ')
"FILES_TO_BE_SIGNED=$FILES_TO_BE_SIGNED" >> $env:GITHUB_OUTPUT
echo $FILES_TO_BE_SIGNED
- name: Zip artifacts for faster upload
run: Compress-Archive -CompressionLevel Fastest -Path packages/frontend/electron/out/* -DestinationPath archive.zip
- name: Save packaged artifacts for signing
uses: actions/upload-artifact@v4
with:
name: packaged-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
path: |
archive.zip
!**/*.map
sign-packaged-artifacts-windows:
needs: package-distribution-windows
uses: ./.github/workflows/windows-signer.yml
with:
files: ${{ needs.package-distribution-windows.outputs.FILES_TO_BE_SIGNED }}
artifact-name: packaged-win32-x64
make-windows-installer:
needs: sign-packaged-artifacts-windows
strategy:
matrix:
spec:
- runner: windows-latest
platform: win32
arch: x64
target: x86_64-pc-windows-msvc
runs-on: ${{ matrix.spec.runner }}
outputs:
FILES_TO_BE_SIGNED: ${{ steps.get_files_to_be_signed.outputs.FILES_TO_BE_SIGNED }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
timeout-minutes: 10
uses: ./.github/actions/setup-node
with:
extra-flags: workspaces focus @affine/electron @affine/monorepo
hard-link-nm: false
nmHoistingLimits: workspaces
- name: Download and overwrite packaged artifacts
uses: actions/download-artifact@v4
with:
name: signed-packaged-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
path: .
- name: unzip file
run: Expand-Archive -Path signed.zip -DestinationPath packages/frontend/electron/out
- name: Make squirrel.windows installer
run: yarn workspace @affine/electron make-squirrel --platform=${{ matrix.spec.platform }} --arch=${{ matrix.spec.arch }}
- name: Make nsis.windows installer
run: yarn workspace @affine/electron make-nsis --platform=${{ matrix.spec.platform }} --arch=${{ matrix.spec.arch }}
- name: Zip artifacts for faster upload
run: Compress-Archive -CompressionLevel Fastest -Path packages/frontend/electron/out/${{ env.BUILD_TYPE }}/make/* -DestinationPath archive.zip
- name: get all files to be signed
id: get_files_to_be_signed
run: |
Set-Variable -Name FILES_TO_BE_SIGNED -Value ((Get-ChildItem -Path packages/frontend/electron/out/${{ env.BUILD_TYPE }}/make -Recurse -File | Where-Object { $_.Extension -in @(".exe", ".node", ".dll", ".msi") } | ForEach-Object { '"' + $_.FullName.Replace((Get-Location).Path + '\packages\frontend\electron\out\${{ env.BUILD_TYPE }}\make\', '') + '"' }) -join ' ')
"FILES_TO_BE_SIGNED=$FILES_TO_BE_SIGNED" >> $env:GITHUB_OUTPUT
echo $FILES_TO_BE_SIGNED
- name: Save installer for signing
uses: actions/upload-artifact@v4
with:
name: installer-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
path: archive.zip
sign-installer-artifacts-windows:
needs: make-windows-installer
uses: ./.github/workflows/windows-signer.yml
with:
files: ${{ needs.make-windows-installer.outputs.FILES_TO_BE_SIGNED }}
artifact-name: installer-win32-x64
finalize-installer-windows:
needs: [sign-installer-artifacts-windows, before-make]
strategy:
matrix:
spec:
- runner: windows-latest
platform: win32
arch: x64
target: x86_64-pc-windows-msvc
runs-on: ${{ matrix.spec.runner }}
steps:
- name: Download and overwrite installer artifacts
uses: actions/download-artifact@v4
with:
name: signed-installer-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
path: .
- name: unzip file
run: Expand-Archive -Path signed.zip -DestinationPath packages/frontend/electron/out/${{ env.BUILD_TYPE }}/make
- name: Save artifacts
run: |
mkdir -p builds
@@ -357,8 +256,180 @@ jobs:
name: affine-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}-builds
path: builds
# package-distribution-windows:
# strategy:
# matrix:
# spec:
# - runner: windows-latest
# platform: win32
# arch: x64
# target: x86_64-pc-windows-msvc
# runs-on: ${{ matrix.spec.runner }}
# needs: before-make
# outputs:
# FILES_TO_BE_SIGNED: ${{ steps.get_files_to_be_signed.outputs.FILES_TO_BE_SIGNED }}
# env:
# SKIP_GENERATE_ASSETS: 1
# SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
# SENTRY_PROJECT: 'affine'
# SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
# SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
# MIXPANEL_TOKEN: ${{ secrets.MIXPANEL_TOKEN }}
# steps:
# - uses: actions/checkout@v4
# - name: Setup Version
# id: version
# uses: ./.github/actions/setup-version
# - name: Setup Node.js
# timeout-minutes: 10
# uses: ./.github/actions/setup-node
# with:
# extra-flags: workspaces focus @affine/electron @affine/monorepo
# hard-link-nm: false
# nmHoistingLimits: workspaces
# - name: Build AFFiNE native
# uses: ./.github/actions/build-rust
# with:
# target: ${{ matrix.spec.target }}
# package: '@affine/native'
# nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
# - uses: actions/download-artifact@v4
# with:
# name: web
# path: packages/frontend/electron/resources/web-static
# - name: Build Desktop Layers
# run: yarn workspace @affine/electron build
# - name: package
# run: yarn workspace @affine/electron package --platform=${{ matrix.spec.platform }} --arch=${{ matrix.spec.arch }}
# env:
# SKIP_WEB_BUILD: 1
# HOIST_NODE_MODULES: 1
# - name: get all files to be signed
# id: get_files_to_be_signed
# run: |
# Set-Variable -Name FILES_TO_BE_SIGNED -Value ((Get-ChildItem -Path packages/frontend/electron/out -Recurse -File | Where-Object { $_.Extension -in @(".exe", ".node", ".dll", ".msi") } | ForEach-Object { '"' + $_.FullName.Replace((Get-Location).Path + '\packages\frontend\electron\out\', '') + '"' }) -join ' ')
# "FILES_TO_BE_SIGNED=$FILES_TO_BE_SIGNED" >> $env:GITHUB_OUTPUT
# echo $FILES_TO_BE_SIGNED
# - name: Zip artifacts for faster upload
# run: Compress-Archive -CompressionLevel Fastest -Path packages/frontend/electron/out/* -DestinationPath archive.zip
# - name: Save packaged artifacts for signing
# uses: actions/upload-artifact@v4
# with:
# name: packaged-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
# path: |
# archive.zip
# !**/*.map
# sign-packaged-artifacts-windows:
# needs: package-distribution-windows
# uses: ./.github/workflows/windows-signer.yml
# with:
# files: ${{ needs.package-distribution-windows.outputs.FILES_TO_BE_SIGNED }}
# artifact-name: packaged-win32-x64
# make-windows-installer:
# needs: sign-packaged-artifacts-windows
# strategy:
# matrix:
# spec:
# - runner: windows-latest
# platform: win32
# arch: x64
# target: x86_64-pc-windows-msvc
# runs-on: ${{ matrix.spec.runner }}
# outputs:
# FILES_TO_BE_SIGNED: ${{ steps.get_files_to_be_signed.outputs.FILES_TO_BE_SIGNED }}
# steps:
# - uses: actions/checkout@v4
# - name: Setup Version
# id: version
# uses: ./.github/actions/setup-version
# - name: Setup Node.js
# timeout-minutes: 10
# uses: ./.github/actions/setup-node
# with:
# extra-flags: workspaces focus @affine/electron @affine/monorepo
# hard-link-nm: false
# nmHoistingLimits: workspaces
# - name: Download and overwrite packaged artifacts
# uses: actions/download-artifact@v4
# with:
# name: signed-packaged-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
# path: .
# - name: unzip file
# run: Expand-Archive -Path signed.zip -DestinationPath packages/frontend/electron/out
# - name: Make squirrel.windows installer
# run: yarn workspace @affine/electron make-squirrel --platform=${{ matrix.spec.platform }} --arch=${{ matrix.spec.arch }}
# - name: Make nsis.windows installer
# run: yarn workspace @affine/electron make-nsis --platform=${{ matrix.spec.platform }} --arch=${{ matrix.spec.arch }}
# - name: Zip artifacts for faster upload
# run: Compress-Archive -CompressionLevel Fastest -Path packages/frontend/electron/out/${{ env.BUILD_TYPE }}/make/* -DestinationPath archive.zip
# - name: get all files to be signed
# id: get_files_to_be_signed
# run: |
# Set-Variable -Name FILES_TO_BE_SIGNED -Value ((Get-ChildItem -Path packages/frontend/electron/out/${{ env.BUILD_TYPE }}/make -Recurse -File | Where-Object { $_.Extension -in @(".exe", ".node", ".dll", ".msi") } | ForEach-Object { '"' + $_.FullName.Replace((Get-Location).Path + '\packages\frontend\electron\out\${{ env.BUILD_TYPE }}\make\', '') + '"' }) -join ' ')
# "FILES_TO_BE_SIGNED=$FILES_TO_BE_SIGNED" >> $env:GITHUB_OUTPUT
# echo $FILES_TO_BE_SIGNED
# - name: Save installer for signing
# uses: actions/upload-artifact@v4
# with:
# name: installer-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
# path: archive.zip
# sign-installer-artifacts-windows:
# needs: make-windows-installer
# uses: ./.github/workflows/windows-signer.yml
# with:
# files: ${{ needs.make-windows-installer.outputs.FILES_TO_BE_SIGNED }}
# artifact-name: installer-win32-x64
# finalize-installer-windows:
# needs: [sign-installer-artifacts-windows, before-make]
# strategy:
# matrix:
# spec:
# - runner: windows-latest
# platform: win32
# arch: x64
# target: x86_64-pc-windows-msvc
# runs-on: ${{ matrix.spec.runner }}
# steps:
# - name: Download and overwrite installer artifacts
# uses: actions/download-artifact@v4
# with:
# name: signed-installer-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
# path: .
# - name: unzip file
# run: Expand-Archive -Path signed.zip -DestinationPath packages/frontend/electron/out/${{ env.BUILD_TYPE }}/make
# - name: Save artifacts
# run: |
# mkdir -p builds
# mv packages/frontend/electron/out/*/make/zip/win32/x64/AFFiNE*-win32-x64-*.zip ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.zip
# mv packages/frontend/electron/out/*/make/squirrel.windows/x64/*.exe ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.exe
# mv packages/frontend/electron/out/*/make/nsis.windows/x64/*.exe ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.nsis.exe
# - name: Upload Artifact
# uses: actions/upload-artifact@v4
# with:
# name: affine-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}-builds
# path: builds
release:
needs: [before-make, make-distribution, finalize-installer-windows]
needs:
- before-make
- make-distribution
- make-distribution-windows-skip-signing
runs-on: ubuntu-latest
steps:

2
.nvmrc
View File

@@ -1 +1 @@
20.17.0
20.15.1

894
.yarn/releases/yarn-4.3.1.cjs vendored Executable file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -12,4 +12,4 @@ npmPublishAccess: public
npmPublishRegistry: "https://registry.npmjs.org"
yarnPath: .yarn/releases/yarn-4.4.1.cjs
yarnPath: .yarn/releases/yarn-4.3.1.cjs

617
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -18,7 +18,7 @@ rand = "0.8"
serde = "1"
serde_json = "1"
sha3 = "0.10"
sqlx = { version = "0.8", default-features = false, features = ["chrono", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] }
sqlx = { version = "0.7", default-features = false, features = ["chrono", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] }
tiktoken-rs = "0.5"
tokio = "1.37"
uuid = "1.8"

View File

@@ -23,7 +23,7 @@
<div align="center">
<a href="https://affine.pro">Home Page</a> |
<a href="https://discord.gg/whd5mjYqVw">Discord</a> |
<a href="https://discord.com/invite/yz6tGVsf5p">Discord</a> |
<a href="https://app.affine.pro">Live Demo</a> |
<a href="https://affine.pro/blog/">Blog</a> |
<a href="https://docs.affine.pro/docs/">Documentation</a>
@@ -176,12 +176,6 @@ Thanks to [Chromatic](https://www.chromatic.com/) for providing the visual testi
## License
### Editions
- AFFiNE Community Edition (CE) is the current available version, it's free for self-host under the MIT license.
- AFFiNE Enterprise Edition (EE) is yet to be published, it will have more advanced features and enterprise-oriented offerings, including but not exclusive to rebranding and SSO, advanced admin and audit, etc., you may refer to https://affine.pro/pricing for more information
See [LICENSE] for details.
[all-contributors-badge]: https://img.shields.io/github/contributors/toeverything/AFFiNE

View File

@@ -19,5 +19,5 @@
],
"ext": "ts,md,json"
},
"version": "0.16.0"
"version": "0.15.0"
}

View File

@@ -1,6 +1,6 @@
{
"name": "@affine/monorepo",
"version": "0.16.0",
"version": "0.15.0",
"private": true,
"author": "toeverything",
"license": "MIT",
@@ -56,26 +56,26 @@
"@affine/cli": "workspace:*",
"@commitlint/cli": "^19.2.1",
"@commitlint/config-conventional": "^19.1.0",
"@faker-js/faker": "^9.0.0",
"@faker-js/faker": "^8.4.1",
"@istanbuljs/schema": "^0.1.3",
"@magic-works/i18n-codegen": "^0.6.0",
"@nx/vite": "^19.5.3",
"@playwright/test": "=1.46.1",
"@playwright/test": "=1.44.1",
"@taplo/cli": "^0.7.0",
"@testing-library/react": "^16.0.0",
"@toeverything/infra": "workspace:*",
"@types/affine__env": "workspace:*",
"@types/eslint": "^9.0.0",
"@types/eslint": "^8.56.7",
"@types/node": "^20.12.7",
"@typescript-eslint/eslint-plugin": "^7.6.0",
"@typescript-eslint/parser": "^7.6.0",
"@vanilla-extract/vite-plugin": "^4.0.7",
"@vanilla-extract/webpack-plugin": "^2.3.7",
"@vitejs/plugin-react-swc": "^3.6.0",
"@vitest/coverage-istanbul": "2.0.5",
"@vitest/ui": "2.0.5",
"@vitest/coverage-istanbul": "1.6.0",
"@vitest/ui": "1.6.0",
"cross-env": "^7.0.3",
"electron": "^31.0.0",
"electron": "~30.2.0",
"eslint": "^8.57.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-import-x": "^0.5.0",
@@ -88,15 +88,15 @@
"eslint-plugin-unused-imports": "^3.1.0",
"eslint-plugin-vue": "^9.24.1",
"fake-indexeddb": "6.0.0",
"happy-dom": "^15.0.0",
"happy-dom": "^14.7.1",
"husky": "^9.0.11",
"lint-staged": "^15.2.2",
"msw": "^2.3.0",
"nanoid": "^5.0.7",
"nx": "^19.0.0",
"nyc": "^17.0.0",
"oxlint": "0.9.2",
"prettier": "^3.3.3",
"oxlint": "0.6.1",
"prettier": "^3.2.5",
"semver": "^7.6.0",
"serve": "^14.2.1",
"string-width": "^7.1.0",
@@ -106,11 +106,11 @@
"vite": "^5.2.8",
"vite-plugin-istanbul": "^6.0.0",
"vite-plugin-static-copy": "^1.0.2",
"vitest": "2.0.5",
"vitest": "1.6.0",
"vitest-fetch-mock": "^0.3.0",
"vitest-mock-extended": "^2.0.0"
"vitest-mock-extended": "^1.3.1"
},
"packageManager": "yarn@4.4.1",
"packageManager": "yarn@4.3.1",
"resolutions": {
"array-buffer-byte-length": "npm:@nolyfill/array-buffer-byte-length@latest",
"array-includes": "npm:@nolyfill/array-includes@latest",

View File

@@ -1,6 +1,6 @@
{
"name": "@affine/server-native",
"version": "0.16.0",
"version": "0.15.0",
"engines": {
"node": ">= 10.16.0 < 11 || >= 11.8.0"
},
@@ -33,7 +33,7 @@
"build:debug": "napi build"
},
"devDependencies": {
"@napi-rs/cli": "3.0.0-alpha.62",
"@napi-rs/cli": "3.0.0-alpha.60",
"lib0": "^0.2.93",
"nx": "^19.0.0",
"nx-cloud": "^19.0.0",

View File

@@ -1,2 +1 @@
.env
static/

View File

@@ -1,146 +0,0 @@
-- AlterTable
ALTER TABLE "_data_migrations" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
ALTER COLUMN "started_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "finished_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "ai_prompts_messages" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "ai_prompts_metadata" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "ai_sessions_messages" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
ALTER COLUMN "session_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "ai_sessions_metadata" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "workspace_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "doc_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "deleted_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "parent_session_id" SET DATA TYPE VARCHAR;
-- AlterTable
ALTER TABLE "app_runtime_settings" ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "deleted_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "last_updated_by" SET DATA TYPE VARCHAR;
-- AlterTable
ALTER TABLE "features" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "multiple_users_sessions" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
ALTER COLUMN "expires_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "snapshot_histories"
ALTER COLUMN "workspace_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "guid" SET DATA TYPE VARCHAR,
ALTER COLUMN "timestamp" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "expired_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "snapshots" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "updates" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "user_connected_accounts" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "expires_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "user_features" ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "expired_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "user_invoices" ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "user_sessions" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
ALTER COLUMN "session_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "expires_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "user_stripe_customers" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "user_subscriptions" ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "start" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "end" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "next_bill_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "canceled_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "trial_start" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "trial_end" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "users" ALTER COLUMN "name" SET DATA TYPE VARCHAR,
ALTER COLUMN "email" SET DATA TYPE VARCHAR,
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "verification_tokens" ALTER COLUMN "token" SET DATA TYPE VARCHAR,
ALTER COLUMN "expiresAt" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "workspace_features" ALTER COLUMN "workspace_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3),
ALTER COLUMN "expired_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "workspace_page_user_permissions"
ALTER COLUMN "id" SET DATA TYPE VARCHAR,
ALTER COLUMN "workspace_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "page_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "workspace_pages" ALTER COLUMN "workspace_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "page_id" SET DATA TYPE VARCHAR;
-- AlterTable
ALTER TABLE "workspace_user_permissions" ALTER COLUMN "id" SET DATA TYPE VARCHAR,
ALTER COLUMN "workspace_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "user_id" SET DATA TYPE VARCHAR,
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- AlterTable
ALTER TABLE "workspaces" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMP(3);
-- DropTable
DROP TABLE "accounts";
-- DropTable
DROP TABLE "blobs";
-- DropTable
DROP TABLE "new_features_waiting_list";
-- DropTable
DROP TABLE "optimized_blobs";
-- DropTable
DROP TABLE "sessions";
-- DropTable
DROP TABLE "user_workspace_permissions";
-- DropTable
DROP TABLE "verificationtokens";

View File

@@ -1,95 +0,0 @@
/*
Warnings:
- The primary key for the `snapshot_histories` table will be changed. If it partially fails, the table could be left without primary key constraint.
*/
-- AlterTable
ALTER TABLE "_data_migrations" ALTER COLUMN "started_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "finished_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "ai_prompts_messages" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "ai_prompts_metadata" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "ai_sessions_messages" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "ai_sessions_metadata" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "deleted_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "app_runtime_settings" ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "deleted_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "features" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "multiple_users_sessions" ALTER COLUMN "expires_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "snapshot_histories" ALTER COLUMN "timestamp" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "expired_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "snapshots" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "updates" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "user_connected_accounts" ALTER COLUMN "expires_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "user_features" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "expired_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "user_invoices" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "user_sessions" ALTER COLUMN "expires_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "user_stripe_customers" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "user_subscriptions" ALTER COLUMN "start" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "end" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "next_bill_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "canceled_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "trial_start" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "trial_end" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "updated_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "users" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "email_verified" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "verification_tokens" ALTER COLUMN "expiresAt" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "workspace_features" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3),
ALTER COLUMN "expired_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "workspace_page_user_permissions" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "workspace_user_permissions" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);
-- AlterTable
ALTER TABLE "workspaces" ALTER COLUMN "created_at" SET DATA TYPE TIMESTAMPTZ(3);

View File

@@ -1,3 +0,0 @@
-- AlterTable
ALTER TABLE "ai_prompts_metadata" ADD COLUMN "modified" BOOLEAN NOT NULL DEFAULT false,
ADD COLUMN "updated_at" TIMESTAMPTZ(3) NOT NULL DEFAULT CURRENT_TIMESTAMP;

View File

@@ -1,13 +0,0 @@
-- CreateTable
CREATE TABLE "user_snapshots" (
"user_id" VARCHAR NOT NULL,
"id" VARCHAR NOT NULL,
"blob" BYTEA NOT NULL,
"created_at" TIMESTAMPTZ(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMPTZ(3) NOT NULL,
CONSTRAINT "user_snapshots_pkey" PRIMARY KEY ("user_id","id")
);
-- AddForeignKey
ALTER TABLE "user_snapshots" ADD CONSTRAINT "user_snapshots_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -1,14 +0,0 @@
/*
Warnings:
- The primary key for the `updates` table will be changed. If it partially fails, the table could be left without primary key constraint.
*/
-- AlterTable
ALTER TABLE "snapshots" ALTER COLUMN "seq" DROP NOT NULL;
-- AlterTable
ALTER TABLE "updates" DROP CONSTRAINT "updates_pkey",
ALTER COLUMN "created_at" DROP DEFAULT,
ALTER COLUMN "seq" DROP NOT NULL,
ADD CONSTRAINT "updates_pkey" PRIMARY KEY ("workspace_id", "guid", "created_at");

View File

@@ -1,21 +0,0 @@
-- AlterTable
ALTER TABLE "snapshot_histories" ADD COLUMN "created_by" VARCHAR;
-- AlterTable
ALTER TABLE "snapshots" ADD COLUMN "created_by" VARCHAR,
ADD COLUMN "updated_by" VARCHAR;
-- AlterTable
ALTER TABLE "updates" ADD COLUMN "created_by" VARCHAR;
-- AddForeignKey
ALTER TABLE "snapshots" ADD CONSTRAINT "snapshots_created_by_fkey" FOREIGN KEY ("created_by") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "snapshots" ADD CONSTRAINT "snapshots_updated_by_fkey" FOREIGN KEY ("updated_by") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "updates" ADD CONSTRAINT "updates_created_by_fkey" FOREIGN KEY ("created_by") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "snapshot_histories" ADD CONSTRAINT "snapshot_histories_created_by_fkey" FOREIGN KEY ("created_by") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE;

View File

@@ -1,7 +1,7 @@
{
"name": "@affine/server",
"private": true,
"version": "0.16.0",
"version": "0.15.0",
"description": "Affine Node.js server",
"type": "module",
"bin": {
@@ -21,11 +21,11 @@
"dependencies": {
"@apollo/server": "^4.10.2",
"@aws-sdk/client-s3": "^3.620.0",
"@fal-ai/serverless-client": "^0.14.0",
"@fal-ai/serverless-client": "^0.13.0",
"@google-cloud/opentelemetry-cloud-monitoring-exporter": "^0.19.0",
"@google-cloud/opentelemetry-cloud-trace-exporter": "^2.2.0",
"@google-cloud/opentelemetry-resource-util": "^2.2.0",
"@keyv/redis": "^3.0.0",
"@keyv/redis": "^2.8.4",
"@nestjs/apollo": "^12.1.0",
"@nestjs/common": "^10.3.7",
"@nestjs/core": "^10.3.7",
@@ -34,25 +34,26 @@
"@nestjs/platform-express": "^10.3.7",
"@nestjs/platform-socket.io": "^10.3.7",
"@nestjs/schedule": "^4.0.1",
"@nestjs/throttler": "6.2.1",
"@nestjs/serve-static": "^4.0.2",
"@nestjs/throttler": "5.2.0",
"@nestjs/websockets": "^10.3.7",
"@node-rs/argon2": "^1.8.0",
"@node-rs/crc32": "^1.10.0",
"@node-rs/jsonwebtoken": "^0.5.2",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/core": "^1.25.0",
"@opentelemetry/exporter-prometheus": "^0.53.0",
"@opentelemetry/exporter-prometheus": "^0.52.0",
"@opentelemetry/exporter-zipkin": "^1.25.0",
"@opentelemetry/host-metrics": "^0.35.2",
"@opentelemetry/instrumentation": "^0.53.0",
"@opentelemetry/instrumentation-graphql": "^0.43.0",
"@opentelemetry/instrumentation-http": "^0.53.0",
"@opentelemetry/instrumentation-ioredis": "^0.43.0",
"@opentelemetry/instrumentation-nestjs-core": "^0.40.0",
"@opentelemetry/instrumentation-socket.io": "^0.42.0",
"@opentelemetry/instrumentation": "^0.52.0",
"@opentelemetry/instrumentation-graphql": "^0.42.0",
"@opentelemetry/instrumentation-http": "^0.52.0",
"@opentelemetry/instrumentation-ioredis": "^0.42.0",
"@opentelemetry/instrumentation-nestjs-core": "^0.39.0",
"@opentelemetry/instrumentation-socket.io": "^0.41.0",
"@opentelemetry/resources": "^1.25.0",
"@opentelemetry/sdk-metrics": "^1.25.0",
"@opentelemetry/sdk-node": "^0.53.0",
"@opentelemetry/sdk-node": "^0.52.0",
"@opentelemetry/sdk-trace-node": "^1.25.0",
"@opentelemetry/semantic-conventions": "^1.25.0",
"@prisma/client": "^5.15.0",
@@ -70,13 +71,13 @@
"graphql-upload": "^16.0.2",
"html-validate": "^8.20.1",
"ioredis": "^5.3.2",
"keyv": "^5.0.0",
"keyv": "^4.5.4",
"lodash-es": "^4.17.21",
"mixpanel": "^0.18.0",
"mustache": "^4.2.0",
"nanoid": "^5.0.7",
"nest-commander": "^3.12.5",
"nestjs-throttler-storage-redis": "^0.5.0",
"nestjs-throttler-storage-redis": "^0.4.1",
"nodemailer": "^6.9.13",
"on-headers": "^1.0.2",
"openai": "^4.33.0",
@@ -137,7 +138,6 @@
],
"watchMode": {
"ignoreChanges": [
"static/**",
"**/*.gen.*"
]
},

View File

@@ -11,18 +11,18 @@ datasource db {
model User {
id String @id @default(uuid()) @db.VarChar
name String @db.VarChar
email String @unique @db.VarChar
emailVerifiedAt DateTime? @map("email_verified") @db.Timestamptz(3)
name String
email String @unique
emailVerifiedAt DateTime? @map("email_verified")
avatarUrl String? @map("avatar_url") @db.VarChar
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
/// Not available if user signed up through OAuth providers
password String? @db.VarChar
/// Indicate whether the user finished the signup progress.
/// for example, the value will be false if user never registered and invited into a workspace by others.
registered Boolean @default(true)
features UserFeature[]
features UserFeatures[]
customer UserStripeCustomer?
subscriptions UserSubscription[]
invoices UserInvoice[]
@@ -32,27 +32,22 @@ model User {
sessions UserSession[]
aiSessions AiSession[]
updatedRuntimeConfigs RuntimeConfig[]
userSnapshots UserSnapshot[]
createdSnapshot Snapshot[] @relation("createdSnapshot")
updatedSnapshot Snapshot[] @relation("updatedSnapshot")
createdUpdate Update[] @relation("createdUpdate")
createdHistory SnapshotHistory[] @relation("createdHistory")
@@index([email])
@@map("users")
}
model ConnectedAccount {
id String @id @default(uuid()) @db.VarChar
userId String @map("user_id") @db.VarChar
id String @id @default(uuid()) @db.VarChar(36)
userId String @map("user_id") @db.VarChar(36)
provider String @db.VarChar
providerAccountId String @map("provider_account_id") @db.VarChar
scope String? @db.Text
accessToken String? @map("access_token") @db.Text
refreshToken String? @map("refresh_token") @db.Text
expiresAt DateTime? @map("expires_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
expiresAt DateTime? @map("expires_at") @db.Timestamptz(6)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@ -62,22 +57,21 @@ model ConnectedAccount {
}
model Session {
id String @id @default(uuid()) @db.VarChar
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
userSessions UserSession[]
id String @id @default(uuid()) @db.VarChar(36)
expiresAt DateTime? @map("expires_at") @db.Timestamptz(6)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// @deprecated use [UserSession.expiresAt]
deprecated_expiresAt DateTime? @map("expires_at") @db.Timestamptz(3)
userSessions UserSession[]
@@map("multiple_users_sessions")
}
model UserSession {
id String @id @default(uuid()) @db.VarChar
sessionId String @map("session_id") @db.VarChar
userId String @map("user_id") @db.VarChar
expiresAt DateTime? @map("expires_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
id String @id @default(uuid()) @db.VarChar(36)
sessionId String @map("session_id") @db.VarChar(36)
userId String @map("user_id") @db.VarChar(36)
expiresAt DateTime? @map("expires_at") @db.Timestamptz(6)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@ -87,10 +81,10 @@ model UserSession {
}
model VerificationToken {
token String @db.VarChar
token String @db.VarChar(36)
type Int @db.SmallInt
credential String? @db.Text
expiresAt DateTime @db.Timestamptz(3)
expiresAt DateTime @db.Timestamptz(6)
@@unique([type, token])
@@map("verification_tokens")
@@ -99,12 +93,12 @@ model VerificationToken {
model Workspace {
id String @id @default(uuid()) @db.VarChar
public Boolean
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
pages WorkspacePage[]
permissions WorkspaceUserPermission[]
pagePermissions WorkspacePageUserPermission[]
features WorkspaceFeature[]
features WorkspaceFeatures[]
@@map("workspaces")
}
@@ -115,8 +109,8 @@ model Workspace {
// Only the ones that have ever changed will have records here,
// and for others we will make sure it's has a default value return in our bussiness logic.
model WorkspacePage {
workspaceId String @map("workspace_id") @db.VarChar
pageId String @map("page_id") @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar(36)
pageId String @map("page_id") @db.VarChar(36)
public Boolean @default(false)
// Page/Edgeless
mode Int @default(0) @db.SmallInt
@@ -127,15 +121,31 @@ model WorkspacePage {
@@map("workspace_pages")
}
model WorkspaceUserPermission {
// @deprecated, use WorkspaceUserPermission
model DeprecatedUserWorkspacePermission {
id String @id @default(uuid()) @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar
userId String @map("user_id") @db.VarChar
subPageId String? @map("sub_page_id") @db.VarChar
userId String? @map("entity_id") @db.VarChar
/// Read/Write/Admin/Owner
type Int @db.SmallInt
/// Whether the permission invitation is accepted by the user
accepted Boolean @default(false)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
@@unique([workspaceId, subPageId, userId])
@@map("user_workspace_permissions")
}
model WorkspaceUserPermission {
id String @id @default(uuid()) @db.VarChar(36)
workspaceId String @map("workspace_id") @db.VarChar(36)
userId String @map("user_id") @db.VarChar(36)
// Read/Write
type Int @db.SmallInt
/// Whether the permission invitation is accepted by the user
accepted Boolean @default(false)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
@@ -145,15 +155,15 @@ model WorkspaceUserPermission {
}
model WorkspacePageUserPermission {
id String @id @default(uuid()) @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar
pageId String @map("page_id") @db.VarChar
userId String @map("user_id") @db.VarChar
id String @id @default(uuid()) @db.VarChar(36)
workspaceId String @map("workspace_id") @db.VarChar(36)
pageId String @map("page_id") @db.VarChar(36)
userId String @map("user_id") @db.VarChar(36)
// Read/Write
type Int @db.SmallInt
/// Whether the permission invitation is accepted by the user
accepted Boolean @default(false)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
@@ -166,9 +176,9 @@ model WorkspacePageUserPermission {
// for example:
// - early access is a feature that allow some users to access the insider version
// - pro plan is a quota that allow some users access to more resources after they pay
model UserFeature {
model UserFeatures {
id Int @id @default(autoincrement())
userId String @map("user_id") @db.VarChar
userId String @map("user_id") @db.VarChar(36)
featureId Int @map("feature_id") @db.Integer
// we will record the reason why the feature is enabled/disabled
@@ -176,16 +186,16 @@ model UserFeature {
// - pro_plan_v1: "user buy the pro plan"
reason String @db.VarChar
// record the quota enabled time
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// record the quota expired time, pay plan is a subscription, so it will expired
expiredAt DateTime? @map("expired_at") @db.Timestamptz(3)
expiredAt DateTime? @map("expired_at") @db.Timestamptz(6)
// whether the feature is activated
// for example:
// - if we switch the user to another plan, we will set the old plan to deactivated, but dont delete it
activated Boolean @default(false)
feature Feature @relation(fields: [featureId], references: [id], onDelete: Cascade)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
feature Features @relation(fields: [featureId], references: [id], onDelete: Cascade)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@index([userId])
@@map("user_features")
@@ -194,9 +204,9 @@ model UserFeature {
// feature gates is a way to enable/disable features for a workspace
// for example:
// - copilet is a feature that allow some users in a workspace to access the copilet feature
model WorkspaceFeature {
model WorkspaceFeatures {
id Int @id @default(autoincrement())
workspaceId String @map("workspace_id") @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar(36)
featureId Int @map("feature_id") @db.Integer
// we will record the reason why the feature is enabled/disabled
@@ -204,21 +214,21 @@ model WorkspaceFeature {
// - copilet_v1: "owner buy the copilet feature package"
reason String @db.VarChar
// record the feature enabled time
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// record the quota expired time, pay plan is a subscription, so it will expired
expiredAt DateTime? @map("expired_at") @db.Timestamptz(3)
expiredAt DateTime? @map("expired_at") @db.Timestamptz(6)
// whether the feature is activated
// for example:
// - if owner unsubscribe a feature package, we will set the feature to deactivated, but dont delete it
activated Boolean @default(false)
feature Feature @relation(fields: [featureId], references: [id], onDelete: Cascade)
feature Features @relation(fields: [featureId], references: [id], onDelete: Cascade)
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
@@map("workspace_features")
}
model Feature {
model Features {
id Int @id @default(autoincrement())
feature String @db.VarChar
version Int @default(0) @db.Integer
@@ -226,98 +236,135 @@ model Feature {
type Int @db.Integer
// configs, define by feature conntroller
configs Json @db.Json
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
UserFeatureGates UserFeature[]
WorkspaceFeatures WorkspaceFeature[]
UserFeatureGates UserFeatures[]
WorkspaceFeatures WorkspaceFeatures[]
@@unique([feature, version])
@@map("features")
}
model DeprecatedNextAuthAccount {
id String @id @default(cuid())
userId String @map("user_id")
type String
provider String
providerAccountId String @map("provider_account_id")
refresh_token String? @db.Text
access_token String? @db.Text
expires_at Int?
token_type String?
scope String?
id_token String? @db.Text
session_state String?
@@unique([provider, providerAccountId])
@@map("accounts")
}
model DeprecatedNextAuthSession {
id String @id @default(cuid())
sessionToken String @unique @map("session_token")
userId String @map("user_id")
expires DateTime
@@map("sessions")
}
model DeprecatedNextAuthVerificationToken {
identifier String
token String @unique
expires DateTime
@@unique([identifier, token])
@@map("verificationtokens")
}
// deprecated, use [ObjectStorage]
model Blob {
id Int @id @default(autoincrement()) @db.Integer
hash String @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar
blob Bytes @db.ByteA
length BigInt
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// not for keeping, but for snapshot history
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(6)
@@unique([workspaceId, hash])
@@map("blobs")
}
// deprecated, use [ObjectStorage]
model OptimizedBlob {
id Int @id @default(autoincrement()) @db.Integer
hash String @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar
params String @db.VarChar
blob Bytes @db.ByteA
length BigInt
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// not for keeping, but for snapshot history
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(6)
@@unique([workspaceId, hash, params])
@@map("optimized_blobs")
}
// the latest snapshot of each doc that we've seen
// Snapshot + Updates are the latest state of the doc
model Snapshot {
workspaceId String @map("workspace_id") @db.VarChar
id String @default(uuid()) @map("guid") @db.VarChar
blob Bytes @db.ByteA
seq Int @default(0) @db.Integer
state Bytes? @db.ByteA
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// the `updated_at` field will not record the time of record changed,
// but the created time of last seen update that has been merged into snapshot.
updatedAt DateTime @map("updated_at") @db.Timestamptz(3)
createdBy String? @map("created_by") @db.VarChar
updatedBy String? @map("updated_by") @db.VarChar
// should not delete origin snapshot even if user is deleted
// we only delete the snapshot if the workspace is deleted
createdByUser User? @relation(name: "createdSnapshot", fields: [createdBy], references: [id], onDelete: SetNull)
updatedByUser User? @relation(name: "updatedSnapshot", fields: [updatedBy], references: [id], onDelete: SetNull)
// @deprecated use updatedAt only
seq Int? @default(0) @db.Integer
// we need to clear all hanging updates and snapshots before enable the foreign key on workspaceId
// workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
updatedAt DateTime @map("updated_at") @db.Timestamptz(6)
@@id([id, workspaceId])
@@map("snapshots")
}
// user snapshots are special snapshots for user storage like personal app settings, distinguished from workspace snapshots
// basically they share the same structure with workspace snapshots
// but for convenience, we don't fork the updates queue and hisotry for user snapshots, until we have to
// which means all operation on user snapshot will happen in-pace
model UserSnapshot {
userId String @map("user_id") @db.VarChar
id String @map("id") @db.VarChar
blob Bytes @db.ByteA
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@id([userId, id])
@@map("user_snapshots")
}
model Update {
workspaceId String @map("workspace_id") @db.VarChar
id String @map("guid") @db.VarChar
seq Int @db.Integer
blob Bytes @db.ByteA
createdAt DateTime @map("created_at") @db.Timestamptz(3)
createdBy String? @map("created_by") @db.VarChar
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// will delete createor record if createor's account is deleted
createdByUser User? @relation(name: "createdUpdate", fields: [createdBy], references: [id], onDelete: SetNull)
// @deprecated use createdAt only
seq Int? @db.Integer
@@id([workspaceId, id, createdAt])
@@id([workspaceId, id, seq])
@@map("updates")
}
model SnapshotHistory {
workspaceId String @map("workspace_id") @db.VarChar
id String @map("guid") @db.VarChar
timestamp DateTime @db.Timestamptz(3)
workspaceId String @map("workspace_id") @db.VarChar(36)
id String @map("guid") @db.VarChar(36)
timestamp DateTime @db.Timestamptz(6)
blob Bytes @db.ByteA
state Bytes? @db.ByteA
expiredAt DateTime @map("expired_at") @db.Timestamptz(3)
createdBy String? @map("created_by") @db.VarChar
// will delete createor record if creator's account is deleted
createdByUser User? @relation(name: "createdHistory", fields: [createdBy], references: [id], onDelete: SetNull)
expiredAt DateTime @map("expired_at") @db.Timestamptz(6)
@@id([workspaceId, id, timestamp])
@@map("snapshot_histories")
}
model NewFeaturesWaitingList {
id String @id @default(uuid()) @db.VarChar
email String @unique
type Int @db.SmallInt
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
@@map("new_features_waiting_list")
}
model UserStripeCustomer {
userId String @id @map("user_id") @db.VarChar
stripeCustomerId String @unique @map("stripe_customer_id") @db.VarChar
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@ -326,7 +373,7 @@ model UserStripeCustomer {
model UserSubscription {
id Int @id @default(autoincrement()) @db.Integer
userId String @map("user_id") @db.VarChar
userId String @map("user_id") @db.VarChar(36)
plan String @db.VarChar(20)
// yearly/monthly
recurring String @db.VarChar(20)
@@ -335,21 +382,21 @@ model UserSubscription {
// subscription.status, active/past_due/canceled/unpaid...
status String @db.VarChar(20)
// subscription.current_period_start
start DateTime @map("start") @db.Timestamptz(3)
start DateTime @map("start") @db.Timestamptz(6)
// subscription.current_period_end, null for lifetime payment
end DateTime? @map("end") @db.Timestamptz(3)
end DateTime? @map("end") @db.Timestamptz(6)
// subscription.billing_cycle_anchor
nextBillAt DateTime? @map("next_bill_at") @db.Timestamptz(3)
nextBillAt DateTime? @map("next_bill_at") @db.Timestamptz(6)
// subscription.canceled_at
canceledAt DateTime? @map("canceled_at") @db.Timestamptz(3)
canceledAt DateTime? @map("canceled_at") @db.Timestamptz(6)
// subscription.trial_start
trialStart DateTime? @map("trial_start") @db.Timestamptz(3)
trialStart DateTime? @map("trial_start") @db.Timestamptz(6)
// subscription.trial_end
trialEnd DateTime? @map("trial_end") @db.Timestamptz(3)
trialEnd DateTime? @map("trial_end") @db.Timestamptz(6)
stripeScheduleId String? @map("stripe_schedule_id") @db.VarChar
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@unique([userId, plan])
@@ -358,7 +405,7 @@ model UserSubscription {
model UserInvoice {
id Int @id @default(autoincrement()) @db.Integer
userId String @map("user_id") @db.VarChar
userId String @map("user_id") @db.VarChar(36)
stripeInvoiceId String @unique @map("stripe_invoice_id")
currency String @db.VarChar(3)
// CNY 12.50 stored as 1250
@@ -366,8 +413,8 @@ model UserInvoice {
status String @db.VarChar(20)
plan String @db.VarChar(20)
recurring String @db.VarChar(20)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
// billing reason
reason String @db.VarChar
lastPaymentError String? @map("last_payment_error") @db.Text
@@ -395,7 +442,7 @@ model AiPromptMessage {
content String @db.Text
attachments Json? @db.Json
params Json? @db.Json
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
prompt AiPrompt @relation(fields: [promptId], references: [id], onDelete: Cascade)
@@ -411,10 +458,7 @@ model AiPrompt {
action String? @db.VarChar
model String @db.VarChar
config Json? @db.Json
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
updatedAt DateTime @default(now()) @map("updated_at") @db.Timestamptz(3)
// whether the prompt is modified by the admin panel
modified Boolean @default(false)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
messages AiPromptMessage[]
sessions AiSession[]
@@ -423,14 +467,14 @@ model AiPrompt {
}
model AiSessionMessage {
id String @id @default(uuid()) @db.VarChar
sessionId String @map("session_id") @db.VarChar
id String @id @default(uuid()) @db.VarChar(36)
sessionId String @map("session_id") @db.VarChar(36)
role AiPromptRole
content String @db.Text
attachments Json? @db.Json
params Json? @db.Json
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
session AiSession @relation(fields: [sessionId], references: [id], onDelete: Cascade)
@@ -438,17 +482,17 @@ model AiSessionMessage {
}
model AiSession {
id String @id @default(uuid()) @db.VarChar
userId String @map("user_id") @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar
docId String @map("doc_id") @db.VarChar
id String @id @default(uuid()) @db.VarChar(36)
userId String @map("user_id") @db.VarChar(36)
workspaceId String @map("workspace_id") @db.VarChar(36)
docId String @map("doc_id") @db.VarChar(36)
promptName String @map("prompt_name") @db.VarChar(32)
// the session id of the parent session if this session is a forked session
parentSessionId String? @map("parent_session_id") @db.VarChar
parentSessionId String? @map("parent_session_id") @db.VarChar(36)
messageCost Int @default(0)
tokenCost Int @default(0)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(3)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(6)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
prompt AiPrompt @relation(fields: [promptName], references: [name], onDelete: Cascade)
@@ -458,10 +502,10 @@ model AiSession {
}
model DataMigration {
id String @id @default(uuid()) @db.VarChar
id String @id @default(uuid()) @db.VarChar(36)
name String @db.VarChar
startedAt DateTime @default(now()) @map("started_at") @db.Timestamptz(3)
finishedAt DateTime? @map("finished_at") @db.Timestamptz(3)
startedAt DateTime @default(now()) @map("started_at") @db.Timestamptz(6)
finishedAt DateTime? @map("finished_at") @db.Timestamptz(6)
@@map("_data_migrations")
}
@@ -481,9 +525,9 @@ model RuntimeConfig {
key String @db.VarChar
value Json @db.Json
description String @db.Text
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(3)
lastUpdatedBy String? @map("last_updated_by") @db.VarChar
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(6)
lastUpdatedBy String? @map("last_updated_by") @db.VarChar(36)
lastUpdatedByUser User? @relation(fields: [lastUpdatedBy], references: [id])

View File

@@ -3,7 +3,7 @@ import { Controller, Get } from '@nestjs/common';
import { Public } from './core/auth';
import { Config, SkipThrottle } from './fundamentals';
@Controller('/info')
@Controller('/')
export class AppController {
constructor(private readonly config: Config) {}
@@ -15,7 +15,7 @@ export class AppController {
compatibility: this.config.version,
message: `AFFiNE ${this.config.version} Server`,
type: this.config.type,
flavor: this.config.flavor.type,
flavor: this.config.flavor,
};
}
}

View File

@@ -1,3 +1,5 @@
import { join } from 'node:path';
import {
DynamicModule,
ForwardReference,
@@ -5,16 +7,16 @@ import {
Module,
} from '@nestjs/common';
import { ScheduleModule } from '@nestjs/schedule';
import { ServeStaticModule } from '@nestjs/serve-static';
import { get } from 'lodash-es';
import { AppController } from './app.controller';
import { AuthModule } from './core/auth';
import { ADD_ENABLED_FEATURES, ServerConfigModule } from './core/config';
import { DocStorageModule } from './core/doc';
import { DocModule } from './core/doc';
import { FeatureModule } from './core/features';
import { PermissionModule } from './core/permission';
import { QuotaModule } from './core/quota';
import { SelfhostModule } from './core/selfhost';
import { CustomSetupModule } from './core/setup';
import { StorageModule } from './core/storage';
import { SyncModule } from './core/sync';
import { UserModule } from './core/user';
@@ -135,7 +137,7 @@ export class AppModuleBuilder {
compile() {
@Module({
imports: this.modules,
controllers: [AppController],
controllers: this.config.isSelfhosted ? [] : [AppController],
})
class AppModule {}
@@ -143,37 +145,47 @@ export class AppModuleBuilder {
}
}
export function buildAppModule() {
function buildAppModule() {
AFFiNE = mergeConfigOverride(AFFiNE);
const factor = new AppModuleBuilder(AFFiNE);
factor
// basic
// common fundamental modules
.use(...FunctionalityModules)
.useIf(config => config.flavor.sync, WebSocketModule)
// auth
.use(UserModule, AuthModule, PermissionModule)
.use(AuthModule)
// business modules
.use(DocStorageModule)
.use(DocModule)
// sync server only
.useIf(config => config.flavor.sync, SyncModule)
.useIf(config => config.flavor.sync, WebSocketModule, SyncModule)
// graphql server only
.useIf(
config => config.flavor.graphql,
ServerConfigModule,
GqlModule,
StorageModule,
ServerConfigModule,
UserModule,
WorkspaceModule,
FeatureModule,
QuotaModule
)
// self hosted server only
.useIf(config => config.isSelfhosted, SelfhostModule);
.useIf(
config => config.isSelfhosted,
CustomSetupModule,
ServeStaticModule.forRoot({
rootPath: join('/app', 'static'),
exclude: ['/admin*'],
}),
ServeStaticModule.forRoot({
rootPath: join('/app', 'static', 'admin'),
serveRoot: '/admin',
})
);
// plugin modules
ENABLED_PLUGINS.forEach(name => {

View File

@@ -29,7 +29,7 @@ export async function createApp() {
graphqlUploadExpress({
// TODO(@darkskygit): dynamic limit by quota maybe?
maxFileSize: 100 * 1024 * 1024,
maxFiles: 32,
maxFiles: 5,
})
);

View File

@@ -25,7 +25,6 @@ AFFiNE.ENV_MAP = {
OAUTH_OIDC_CLAIM_MAP_EMAIL: 'plugins.oauth.providers.oidc.args.claim_email',
OAUTH_OIDC_CLAIM_MAP_NAME: 'plugins.oauth.providers.oidc.args.claim_name',
METRICS_CUSTOMER_IO_TOKEN: ['metrics.customerIo.token', 'string'],
CAPTCHA_TURNSTILE_SECRET: ['plugins.captcha.turnstile.secret', 'string'],
COPILOT_OPENAI_API_KEY: 'plugins.copilot.openai.apiKey',
COPILOT_FAL_API_KEY: 'plugins.copilot.fal.apiKey',
COPILOT_UNSPLASH_API_KEY: 'plugins.copilot.unsplashKey',

View File

@@ -71,14 +71,6 @@ AFFiNE.use('payment', {
});
AFFiNE.use('oauth');
/* Captcha Plugin Default Config */
AFFiNE.use('captcha', {
turnstile: {},
challenge: {
bits: 20,
},
});
if (AFFiNE.deploy) {
AFFiNE.mailer = {
service: 'gmail',

View File

@@ -95,15 +95,6 @@ AFFiNE.server.port = 3010;
// });
//
//
// /* Captcha Plugin Default Config */
// AFFiNE.plugins.use('captcha', {
// turnstile: {},
// challenge: {
// bits: 20,
// },
// });
//
//
// /* Cloudflare R2 Plugin */
// /* Enable if you choose to store workspace blobs or user avatars in Cloudflare R2 Storage Service */
// AFFiNE.use('cloudflare-r2', {

View File

@@ -1,3 +1,5 @@
import { randomUUID } from 'node:crypto';
import {
Body,
Controller,
@@ -16,34 +18,26 @@ import {
EarlyAccessRequired,
EmailTokenNotFound,
InternalServerError,
InvalidEmail,
InvalidEmailToken,
SignUpForbidden,
Throttle,
URLHelper,
UseNamedGuard,
} from '../../fundamentals';
import { UserService } from '../user';
import { validators } from '../utils/validators';
import { CurrentUser } from './current-user';
import { Public } from './guard';
import { AuthService } from './service';
import { CurrentUser, Session } from './session';
import { AuthService, parseAuthUserSeqNum } from './service';
import { TokenService, TokenType } from './token';
interface PreflightResponse {
registered: boolean;
hasPassword: boolean;
}
interface SignInCredential {
email: string;
class SignInCredential {
email!: string;
password?: string;
callbackUrl?: string;
}
interface MagicLinkCredential {
email: string;
token: string;
class MagicLinkCredential {
email!: string;
token!: string;
}
@Throttle('strict')
@@ -58,44 +52,13 @@ export class AuthController {
) {}
@Public()
@Post('/preflight')
async preflight(
@Body() params?: { email: string }
): Promise<PreflightResponse> {
if (!params?.email) {
throw new InvalidEmail();
}
validators.assertValidEmail(params.email);
const user = await this.user.findUserWithHashedPasswordByEmail(
params.email
);
if (!user) {
return {
registered: false,
hasPassword: false,
};
}
return {
registered: user.registered,
hasPassword: !!user.password,
};
}
@Public()
@UseNamedGuard('captcha')
@Post('/sign-in')
@Header('content-type', 'application/json')
async signIn(
@Req() req: Request,
@Res() res: Response,
@Body() credential: SignInCredential,
/**
* @deprecated
*/
@Query('redirect_uri') redirectUri?: string
@Query('redirect_uri') redirectUri = this.url.home
) {
validators.assertValidEmail(credential.email);
const canSignIn = await this.auth.canSignIn(credential.email);
@@ -104,86 +67,80 @@ export class AuthController {
}
if (credential.password) {
await this.passwordSignIn(
req,
res,
const user = await this.auth.signIn(
credential.email,
credential.password
);
await this.auth.setCookie(req, res, user);
res.status(HttpStatus.OK).send(user);
} else {
await this.sendMagicLink(
req,
res,
credential.email,
credential.callbackUrl,
// send email magic link
const user = await this.user.findUserByEmail(credential.email);
if (!user) {
const allowSignup = await this.config.runtime.fetch('auth/allowSignup');
if (!allowSignup) {
throw new SignUpForbidden();
}
}
const result = await this.sendSignInEmail(
{ email: credential.email, signUp: !user },
redirectUri
);
}
}
async passwordSignIn(
req: Request,
res: Response,
email: string,
password: string
) {
const user = await this.auth.signIn(email, password);
await this.auth.setCookies(req, res, user.id);
res.status(HttpStatus.OK).send(user);
}
async sendMagicLink(
_req: Request,
res: Response,
email: string,
callbackUrl = '/magic-link',
redirectUrl?: string
) {
// send email magic link
const user = await this.user.findUserByEmail(email);
if (!user) {
const allowSignup = await this.config.runtime.fetch('auth/allowSignup');
if (!allowSignup) {
throw new SignUpForbidden();
if (result.rejected.length) {
throw new InternalServerError('Failed to send sign-in email.');
}
}
res.status(HttpStatus.OK).send({
email: credential.email,
});
}
}
async sendSignInEmail(
{ email, signUp }: { email: string; signUp: boolean },
redirectUri: string
) {
const token = await this.token.createToken(TokenType.SignIn, email);
const magicLink = this.url.link(callbackUrl, {
const magicLink = this.url.link('/magic-link', {
token,
email,
...(redirectUrl
? {
redirect_uri: redirectUrl,
}
: {}),
redirect_uri: redirectUri,
});
const result = await this.auth.sendSignInEmail(email, magicLink, !user);
const result = await this.auth.sendSignInEmail(email, magicLink, signUp);
if (result.rejected.length) {
throw new InternalServerError('Failed to send sign-in email.');
}
res.status(HttpStatus.OK).send({
email: email,
});
return result;
}
@Get('/sign-out')
async signOut(
@Req() req: Request,
@Res() res: Response,
@Session() session: Session,
@Body() { all }: { all: boolean }
@Query('redirect_uri') redirectUri?: string
) {
await this.auth.signOut(
session.sessionId,
all ? undefined : session.userId
const session = await this.auth.signOut(
req.cookies[AuthService.sessionCookieName],
parseAuthUserSeqNum(req.headers[AuthService.authUserSeqHeaderName])
);
res.status(HttpStatus.OK).send({});
if (session) {
res.cookie(AuthService.sessionCookieName, session.id, {
expires: session.expiresAt ?? void 0, // expiredAt is `string | null`
...this.auth.cookieOptions,
});
} else {
res.clearCookie(AuthService.sessionCookieName);
}
if (redirectUri) {
return this.url.safeRedirect(res, redirectUri);
} else {
return res.send(null);
}
}
@Public()
@@ -199,11 +156,11 @@ export class AuthController {
validators.assertValidEmail(email);
const tokenRecord = await this.token.verifyToken(TokenType.SignIn, token, {
const valid = await this.token.verifyToken(TokenType.SignIn, token, {
credential: email,
});
if (!tokenRecord) {
if (!valid) {
throw new InvalidEmailToken();
}
@@ -212,8 +169,9 @@ export class AuthController {
registered: true,
});
await this.auth.setCookies(req, res, user.id);
res.send({ id: user.id });
await this.auth.setCookie(req, res, user);
res.send({ id: user.id, email: user.email, name: user.name });
}
@Throttle('default', { limit: 1200 })
@@ -240,4 +198,14 @@ export class AuthController {
users: await this.auth.getUserList(token),
};
}
@Public()
@Get('/challenge')
async challenge() {
// TODO(@darksky): impl in following PR
return {
challenge: randomUUID(),
resource: randomUUID(),
};
}
}

View File

@@ -1,9 +1,13 @@
import type { ExecutionContext } from '@nestjs/common';
import { createParamDecorator } from '@nestjs/common';
import { User, UserSession } from '@prisma/client';
import { User } from '@prisma/client';
import { getRequestResponseFromContext } from '../../fundamentals';
function getUserFromContext(context: ExecutionContext) {
return getRequestResponseFromContext(context).req.user;
}
/**
* Used to fetch current user from the request context.
*
@@ -40,7 +44,7 @@ import { getRequestResponseFromContext } from '../../fundamentals';
// eslint-disable-next-line no-redeclare
export const CurrentUser = createParamDecorator(
(_: unknown, context: ExecutionContext) => {
return getRequestResponseFromContext(context).req.session?.user;
return getUserFromContext(context);
}
);
@@ -49,15 +53,3 @@ export interface CurrentUser
hasPassword: boolean | null;
emailVerified: boolean;
}
// interface and variable don't conflict
// eslint-disable-next-line no-redeclare
export const Session = createParamDecorator(
(_: unknown, context: ExecutionContext) => {
return getRequestResponseFromContext(context).req.session;
}
);
export type Session = UserSession & {
user: CurrentUser;
};

View File

@@ -1,23 +1,24 @@
import type {
CanActivate,
ExecutionContext,
FactoryProvider,
OnModuleInit,
} from '@nestjs/common';
import { Injectable, SetMetadata } from '@nestjs/common';
import { Injectable, SetMetadata, UseGuards } from '@nestjs/common';
import { ModuleRef, Reflector } from '@nestjs/core';
import type { Request } from 'express';
import {
AuthenticationRequired,
Config,
getRequestResponseFromContext,
mapAnyError,
parseCookies,
} from '../../fundamentals';
import { WEBSOCKET_OPTIONS } from '../../fundamentals/websocket';
import { AuthService } from './service';
import { Session } from './session';
import { AuthService, parseAuthUserSeqNum } from './service';
function extractTokenFromHeader(authorization: string) {
if (!/^Bearer\s/i.test(authorization)) {
return;
}
return authorization.substring(7);
}
const PUBLIC_ENTRYPOINT_SYMBOL = Symbol('public');
@@ -37,9 +38,37 @@ export class AuthGuard implements CanActivate, OnModuleInit {
async canActivate(context: ExecutionContext) {
const { req, res } = getRequestResponseFromContext(context);
const userSession = await this.signIn(req);
if (res && userSession && userSession.expiresAt) {
await this.auth.refreshUserSessionIfNeeded(res, userSession);
// check cookie
let sessionToken: string | undefined =
req.cookies[AuthService.sessionCookieName];
if (!sessionToken && req.headers.authorization) {
sessionToken = extractTokenFromHeader(req.headers.authorization);
}
if (sessionToken) {
const userSeq = parseAuthUserSeqNum(
req.headers[AuthService.authUserSeqHeaderName]
);
const { user, expiresAt } = await this.auth.getUser(
sessionToken,
userSeq
);
if (res && user && expiresAt) {
await this.auth.refreshUserSessionIfNeeded(
req,
res,
sessionToken,
user.id,
expiresAt
);
}
if (user) {
req.sid = sessionToken;
req.user = user;
}
}
// api is public
@@ -52,70 +81,33 @@ export class AuthGuard implements CanActivate, OnModuleInit {
return true;
}
if (!userSession) {
if (!req.user) {
throw new AuthenticationRequired();
}
return true;
}
async signIn(req: Request): Promise<Session | null> {
if (req.session) {
return req.session;
}
// compatibility with websocket request
parseCookies(req);
// TODO(@forehalo): a cache for user session
const userSession = await this.auth.getUserSessionFromRequest(req);
if (userSession) {
req.session = {
...userSession.session,
user: userSession.user,
};
return req.session;
}
return null;
}
}
/**
* Mark api to be public accessible
* This guard is used to protect routes/queries/mutations that require a user to be logged in.
*
* The `@CurrentUser()` parameter decorator used in a `Auth` guarded queries would always give us the user because the `Auth` guard will
* fast throw if user is not logged in.
*
* @example
*
* ```typescript
* \@Auth()
* \@Query(() => UserType)
* user(@CurrentUser() user: CurrentUser) {
* return user;
* }
* ```
*/
export const Public = () => SetMetadata(PUBLIC_ENTRYPOINT_SYMBOL, true);
export const AuthWebsocketOptionsProvider: FactoryProvider = {
provide: WEBSOCKET_OPTIONS,
useFactory: (config: Config, guard: AuthGuard) => {
return {
...config.websocket,
allowRequest: async (
req: any,
pass: (err: string | null | undefined, success: boolean) => void
) => {
if (!config.websocket.requireAuthentication) {
return pass(null, true);
}
try {
const authentication = await guard.signIn(req);
if (authentication) {
return pass(null, true);
} else {
return pass('unauthenticated', false);
}
} catch (e) {
const error = mapAnyError(e);
error.log('Websocket');
return pass('unauthenticated', false);
}
},
};
},
inject: [Config, AuthGuard],
export const Auth = () => {
return UseGuards(AuthGuard);
};
// api is public accessible
export const Public = () => SetMetadata(PUBLIC_ENTRYPOINT_SYMBOL, true);

View File

@@ -6,21 +6,15 @@ import { FeatureModule } from '../features';
import { QuotaModule } from '../quota';
import { UserModule } from '../user';
import { AuthController } from './controller';
import { AuthGuard, AuthWebsocketOptionsProvider } from './guard';
import { AuthGuard } from './guard';
import { AuthResolver } from './resolver';
import { AuthService } from './service';
import { TokenService, TokenType } from './token';
@Module({
imports: [FeatureModule, UserModule, QuotaModule],
providers: [
AuthService,
AuthResolver,
TokenService,
AuthGuard,
AuthWebsocketOptionsProvider,
],
exports: [AuthService, AuthGuard, AuthWebsocketOptionsProvider, TokenService],
providers: [AuthService, AuthResolver, TokenService, AuthGuard],
exports: [AuthService, AuthGuard],
controllers: [AuthController],
})
export class AuthModule {}
@@ -28,4 +22,4 @@ export class AuthModule {}
export * from './guard';
export { ClientTokenType } from './resolver';
export { AuthService, TokenService, TokenType };
export * from './session';
export * from './current-user';

View File

@@ -11,23 +11,22 @@ import {
import {
ActionForbidden,
Config,
EmailAlreadyUsed,
EmailTokenNotFound,
EmailVerificationRequired,
InvalidEmailToken,
LinkExpired,
SameEmailProvided,
SkipThrottle,
Throttle,
URLHelper,
} from '../../fundamentals';
import { Admin } from '../common';
import { UserService } from '../user';
import { UserType } from '../user/types';
import { validators } from '../utils/validators';
import { CurrentUser } from './current-user';
import { Public } from './guard';
import { AuthService } from './service';
import { CurrentUser } from './session';
import { TokenService, TokenType } from './token';
@ObjectType('tokenType')
@@ -46,6 +45,7 @@ export class ClientTokenType {
@Resolver(() => UserType)
export class AuthResolver {
constructor(
private readonly config: Config,
private readonly url: URLHelper,
private readonly auth: AuthService,
private readonly user: UserService,
@@ -65,7 +65,7 @@ export class AuthResolver {
@ResolveField(() => ClientTokenType, {
name: 'token',
deprecationReason: 'use [/api/auth/sign-in?native=true] instead',
deprecationReason: 'use [/api/auth/authorize]',
})
async clientToken(
@CurrentUser() currentUser: CurrentUser,
@@ -75,32 +75,39 @@ export class AuthResolver {
throw new ActionForbidden();
}
const userSession = await this.auth.createUserSession(user.id);
const session = await this.auth.createUserSession(
user,
undefined,
this.config.auth.accessToken.ttl
);
return {
sessionToken: userSession.sessionId,
token: userSession.sessionId,
sessionToken: session.sessionId,
token: session.sessionId,
refresh: '',
};
}
@Public()
@Mutation(() => Boolean)
@Mutation(() => UserType)
async changePassword(
@CurrentUser() user: CurrentUser,
@Args('token') token: string,
@Args('newPassword') newPassword: string,
@Args('userId', { type: () => String, nullable: true }) userId?: string
@Args('newPassword') newPassword: string
) {
if (!userId) {
throw new LinkExpired();
}
const config = await this.config.runtime.fetchAll({
'auth/password.max': true,
'auth/password.min': true,
});
validators.assertValidPassword(newPassword, {
min: config['auth/password.min'],
max: config['auth/password.max'],
});
// NOTE: Set & Change password are using the same token type.
const valid = await this.token.verifyToken(
TokenType.ChangePassword,
token,
{
credential: userId,
credential: user.id,
}
);
@@ -108,10 +115,10 @@ export class AuthResolver {
throw new InvalidEmailToken();
}
await this.auth.changePassword(userId, newPassword);
await this.auth.revokeUserSessions(userId);
await this.auth.changePassword(user.id, newPassword);
await this.auth.revokeUserSessions(user.id);
return true;
return user;
}
@Mutation(() => UserType)
@@ -120,6 +127,7 @@ export class AuthResolver {
@Args('token') token: string,
@Args('email') email: string
) {
validators.assertValidEmail(email);
// @see [sendChangeEmail]
const valid = await this.token.verifyToken(TokenType.VerifyEmail, token, {
credential: user.id,
@@ -142,11 +150,8 @@ export class AuthResolver {
async sendChangePasswordEmail(
@CurrentUser() user: CurrentUser,
@Args('callbackUrl') callbackUrl: string,
@Args('email', {
nullable: true,
deprecationReason: 'fetched from signed in user',
})
_email?: string
// @deprecated
@Args('email', { nullable: true }) _email?: string
) {
if (!user.emailVerified) {
throw new EmailVerificationRequired();
@@ -157,7 +162,7 @@ export class AuthResolver {
user.id
);
const url = this.url.link(callbackUrl, { userId: user.id, token });
const url = this.url.link(callbackUrl, { token });
const res = await this.auth.sendChangePasswordEmail(user.email, url);
@@ -168,13 +173,21 @@ export class AuthResolver {
async sendSetPasswordEmail(
@CurrentUser() user: CurrentUser,
@Args('callbackUrl') callbackUrl: string,
@Args('email', {
nullable: true,
deprecationReason: 'fetched from signed in user',
})
_email?: string
@Args('email', { nullable: true }) _email?: string
) {
return this.sendChangePasswordEmail(user, callbackUrl);
if (!user.emailVerified) {
throw new EmailVerificationRequired();
}
const token = await this.token.createToken(
TokenType.ChangePassword,
user.id
);
const url = this.url.link(callbackUrl, { token });
const res = await this.auth.sendSetPasswordEmail(user.email, url);
return !res.rejected.length;
}
// The change email step is:
@@ -278,20 +291,4 @@ export class AuthResolver {
return emailVerifiedAt !== null;
}
@Admin()
@Mutation(() => String, {
description: 'Create change password url',
})
async createChangePasswordUrl(
@Args('userId') userId: string,
@Args('callbackUrl') callbackUrl: string
): Promise<string> {
const token = await this.token.createToken(
TokenType.ChangePassword,
userId
);
return this.url.link(callbackUrl, { userId, token });
}
}

View File

@@ -1,16 +1,39 @@
import { Injectable, OnApplicationBootstrap } from '@nestjs/common';
import { Cron, CronExpression } from '@nestjs/schedule';
import type { User, UserSession } from '@prisma/client';
import type { User } from '@prisma/client';
import { PrismaClient } from '@prisma/client';
import type { CookieOptions, Request, Response } from 'express';
import { assign, pick } from 'lodash-es';
import { assign, omit } from 'lodash-es';
import { Config, MailService, SignUpForbidden } from '../../fundamentals';
import { Config, EmailAlreadyUsed, MailService } from '../../fundamentals';
import { FeatureManagementService } from '../features/management';
import { QuotaService } from '../quota/service';
import { QuotaType } from '../quota/types';
import { UserService } from '../user/service';
import type { CurrentUser } from './session';
import type { CurrentUser } from './current-user';
export function parseAuthUserSeqNum(value: any) {
let seq: number = 0;
switch (typeof value) {
case 'number': {
seq = value;
break;
}
case 'string': {
const result = value.match(/^([\d{0, 10}])$/);
if (result?.[1]) {
seq = Number(result[1]);
}
break;
}
default: {
seq = 0;
}
}
return Math.max(0, seq);
}
export function sessionUser(
user: Pick<
@@ -18,19 +41,13 @@ export function sessionUser(
'id' | 'email' | 'avatarUrl' | 'name' | 'emailVerifiedAt'
> & { password?: string | null }
): CurrentUser {
// use pick to avoid unexpected fields
return assign(pick(user, 'id', 'email', 'avatarUrl', 'name'), {
hasPassword: user.password !== null,
emailVerified: user.emailVerifiedAt !== null,
});
}
function extractTokenFromHeader(authorization: string) {
if (!/^Bearer\s/i.test(authorization)) {
return;
}
return authorization.substring(7);
return assign(
omit(user, 'password', 'registered', 'emailVerifiedAt', 'createdAt'),
{
hasPassword: user.password !== null,
emailVerified: user.emailVerifiedAt !== null,
}
);
}
@Injectable()
@@ -42,7 +59,7 @@ export class AuthService implements OnApplicationBootstrap {
secure: this.config.server.https,
};
static readonly sessionCookieName = 'affine_session';
static readonly userCookieName = 'affine_user_id';
static readonly authUserSeqHeaderName = 'x-auth-user';
constructor(
private readonly config: Config,
@@ -68,7 +85,7 @@ export class AuthService implements OnApplicationBootstrap {
await this.quota.switchUserQuota(devUser.id, QuotaType.ProPlanV1);
await this.feature.addAdmin(devUser.id);
await this.feature.addCopilot(devUser.id);
} catch {
} catch (e) {
// ignore
}
}
@@ -78,173 +95,168 @@ export class AuthService implements OnApplicationBootstrap {
return this.feature.canEarlyAccess(email);
}
/**
* This is a test only helper to quickly signup a user, do not use in production
*/
async signUp(email: string, password: string): Promise<CurrentUser> {
if (!this.config.node.test) {
throw new SignUpForbidden(
'sign up helper is forbidden for non-test environment'
);
async signUp(
name: string,
email: string,
password: string
): Promise<CurrentUser> {
const user = await this.user.findUserByEmail(email);
if (user) {
throw new EmailAlreadyUsed();
}
return this.user
.createUser_without_verification({
.createUser({
name,
email,
password,
})
.then(sessionUser);
}
async signIn(email: string, password: string): Promise<CurrentUser> {
return this.user.signIn(email, password).then(sessionUser);
async signIn(email: string, password: string) {
const user = await this.user.signIn(email, password);
return sessionUser(user);
}
async signOut(sessionId: string, userId?: string) {
// sign out all users in the session
if (!userId) {
await this.db.session.deleteMany({
where: {
id: sessionId,
},
});
} else {
await this.db.userSession.deleteMany({
where: {
sessionId,
userId,
},
});
}
}
async getUserSession(
sessionId: string,
userId?: string
): Promise<{ user: CurrentUser; session: UserSession } | null> {
const userSession = await this.db.userSession.findFirst({
where: {
sessionId,
userId,
},
select: {
id: true,
sessionId: true,
userId: true,
createdAt: true,
expiresAt: true,
user: true,
},
orderBy: {
createdAt: 'asc',
},
});
async getUser(
token: string,
seq = 0
): Promise<{ user: CurrentUser | null; expiresAt: Date | null }> {
const session = await this.getSession(token);
// no such session
if (!session) {
return { user: null, expiresAt: null };
}
const userSession = session.userSessions.at(seq);
// no such user session
if (!userSession) {
return null;
return { user: null, expiresAt: null };
}
// user session expired
if (userSession.expiresAt && userSession.expiresAt <= new Date()) {
return { user: null, expiresAt: null };
}
const user = await this.db.user.findUnique({
where: { id: userSession.userId },
});
if (!user) {
return { user: null, expiresAt: null };
}
return { user: sessionUser(user), expiresAt: userSession.expiresAt };
}
async getUserList(token: string) {
const session = await this.getSession(token);
if (!session || !session.userSessions.length) {
return [];
}
const users = await this.db.user.findMany({
where: {
id: {
in: session.userSessions.map(({ userId }) => userId),
},
},
});
// TODO(@forehalo): need to separate expired session, same for [getUser]
// Session
// | { user: LimitedUser { email, avatarUrl }, expired: true }
// | { user: User, expired: false }
return session.userSessions
.map(userSession => {
// keep users in the same order as userSessions
const user = users.find(({ id }) => id === userSession.userId);
if (!user) {
return null;
}
return sessionUser(user);
})
.filter(Boolean) as CurrentUser[];
}
async signOut(token: string, seq = 0) {
const session = await this.getSession(token);
if (session) {
// overflow the logged in user
if (session.userSessions.length <= seq) {
return session;
}
await this.db.userSession.deleteMany({
where: { id: session.userSessions[seq].id },
});
// no more user session active, delete the whole session
if (session.userSessions.length === 1) {
await this.db.session.delete({ where: { id: session.id } });
return null;
}
return session;
}
return null;
}
async getSession(token: string) {
if (!token) {
return null;
}
return { user: sessionUser(userSession.user), session: userSession };
}
async createUserSession(
userId: string,
sessionId?: string,
ttl = this.config.auth.session.ttl
) {
// check whether given session is valid
if (sessionId) {
const session = await this.db.session.findFirst({
return this.db.$transaction(async tx => {
const session = await tx.session.findUnique({
where: {
id: sessionId,
id: token,
},
include: {
userSessions: {
orderBy: {
createdAt: 'asc',
},
},
},
});
if (!session) {
sessionId = undefined;
return null;
}
}
if (!sessionId) {
const session = await this.createSession();
sessionId = session.id;
}
const expiresAt = new Date(Date.now() + ttl * 1000);
return this.db.userSession.upsert({
where: {
sessionId_userId: {
sessionId,
userId,
},
},
update: {
expiresAt,
},
create: {
sessionId,
userId,
expiresAt,
},
});
}
async getUserList(sessionId: string) {
const sessions = await this.db.userSession.findMany({
where: {
sessionId,
OR: [
{
expiresAt: null,
if (session.expiresAt && session.expiresAt <= new Date()) {
await tx.session.delete({
where: {
id: session.id,
},
{
expiresAt: {
gt: new Date(),
},
},
],
},
include: {
user: true,
},
orderBy: {
createdAt: 'asc',
},
});
});
return sessions.map(({ user }) => sessionUser(user));
}
return null;
}
async createSession() {
return this.db.session.create({
data: {},
});
}
async getSession(sessionId: string) {
return this.db.session.findFirst({
where: {
id: sessionId,
},
return session;
});
}
async refreshUserSessionIfNeeded(
_req: Request,
res: Response,
session: UserSession,
sessionId: string,
userId: string,
expiresAt: Date,
ttr = this.config.auth.session.ttr
): Promise<boolean> {
if (
session.expiresAt &&
session.expiresAt.getTime() - Date.now() > ttr * 1000
) {
if (expiresAt && expiresAt.getTime() - Date.now() > ttr * 1000) {
// no need to refresh
return false;
}
@@ -255,14 +267,17 @@ export class AuthService implements OnApplicationBootstrap {
await this.db.userSession.update({
where: {
id: session.id,
sessionId_userId: {
sessionId,
userId,
},
},
data: {
expiresAt: newExpiresAt,
},
});
res.cookie(AuthService.sessionCookieName, session.sessionId, {
res.cookie(AuthService.sessionCookieName, sessionId, {
expires: newExpiresAt,
...this.cookieOptions,
});
@@ -270,63 +285,70 @@ export class AuthService implements OnApplicationBootstrap {
return true;
}
async revokeUserSessions(userId: string) {
async createUserSession(
user: { id: string },
existingSession?: string,
ttl = this.config.auth.session.ttl
) {
const session = existingSession
? await this.getSession(existingSession)
: null;
const expiresAt = new Date(Date.now() + ttl * 1000);
if (session) {
return this.db.userSession.upsert({
where: {
sessionId_userId: {
sessionId: session.id,
userId: user.id,
},
},
update: {
expiresAt,
},
create: {
sessionId: session.id,
userId: user.id,
expiresAt,
},
});
} else {
return this.db.userSession.create({
data: {
expiresAt,
session: {
create: {},
},
user: {
connect: {
id: user.id,
},
},
},
});
}
}
async revokeUserSessions(userId: string, sessionId?: string) {
return this.db.userSession.deleteMany({
where: {
userId,
sessionId,
},
});
}
getSessionOptionsFromRequest(req: Request) {
let sessionId: string | undefined =
req.cookies[AuthService.sessionCookieName];
async setCookie(_req: Request, res: Response, user: { id: string }) {
const session = await this.createUserSession(
user
// TODO(@forehalo): enable multi user session
// req.cookies[AuthService.sessionCookieName]
);
if (!sessionId && req.headers.authorization) {
sessionId = extractTokenFromHeader(req.headers.authorization);
}
const userId: string | undefined =
req.cookies[AuthService.userCookieName] ||
req.headers[AuthService.userCookieName];
return {
sessionId,
userId,
};
}
async setCookies(req: Request, res: Response, userId: string) {
const { sessionId } = this.getSessionOptionsFromRequest(req);
const userSession = await this.createUserSession(userId, sessionId);
res.cookie(AuthService.sessionCookieName, userSession.sessionId, {
res.cookie(AuthService.sessionCookieName, session.sessionId, {
expires: session.expiresAt ?? void 0,
...this.cookieOptions,
expires: userSession.expiresAt ?? void 0,
});
this.setUserCookie(res, userId);
}
setUserCookie(res: Response, userId: string) {
res.cookie(AuthService.userCookieName, userId, {
...this.cookieOptions,
// user cookie is client readable & writable for fast user switch if there are multiple users in one session
// it safe to be non-secure & non-httpOnly because server will validate it by `cookie[AuthService.sessionCookieName]`
httpOnly: false,
secure: false,
});
}
async getUserSessionFromRequest(req: Request) {
const { sessionId, userId } = this.getSessionOptionsFromRequest(req);
if (!sessionId) {
return null;
}
return this.getUserSession(sessionId, userId);
}
async changePassword(
@@ -375,16 +397,24 @@ export class AuthService implements OnApplicationBootstrap {
async sendSignInEmail(email: string, link: string, signUp: boolean) {
return signUp
? await this.mailer.sendSignUpMail(link, {
? await this.mailer.sendSignUpMail(link.toString(), {
to: email,
})
: await this.mailer.sendSignInMail(link, {
: await this.mailer.sendSignInMail(link.toString(), {
to: email,
});
}
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT)
async cleanExpiredSessions() {
await this.db.session.deleteMany({
where: {
expiresAt: {
lte: new Date(),
},
},
});
await this.db.userSession.deleteMany({
where: {
expiresAt: {

View File

@@ -69,9 +69,13 @@ export class TokenService {
const valid =
!expired && (!record.credential || record.credential === credential);
// always revoke expired token
if (expired || (valid && !keep)) {
const deleted = await this.revokeToken(type, token);
if ((expired || valid) && !keep) {
const deleted = await this.db.verificationToken.deleteMany({
where: {
token,
type,
},
});
// already deleted, means token has been used
if (!deleted.count) {
@@ -82,15 +86,6 @@ export class TokenService {
return valid ? record : null;
}
async revokeToken(type: TokenType, token: string) {
return await this.db.verificationToken.deleteMany({
where: {
token,
type,
},
});
}
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT)
async cleanExpiredTokens() {
await this.db.verificationToken.deleteMany({

View File

@@ -25,8 +25,8 @@ export class AdminGuard implements CanActivate, OnModuleInit {
async canActivate(context: ExecutionContext) {
const { req } = getRequestResponseFromContext(context);
let allow = false;
if (req.session) {
allow = await this.feature.isAdmin(req.session.user.id);
if (req.user) {
allow = await this.feature.isAdmin(req.user.id);
}
if (!allow) {

View File

@@ -4,23 +4,17 @@ import { Module } from '@nestjs/common';
import {
ServerConfigResolver,
ServerFeatureConfigResolver,
ServerRuntimeConfigResolver,
ServerServiceConfigResolver,
} from './resolver';
import { ServerService } from './service';
@Module({
providers: [
ServerService,
ServerConfigResolver,
ServerFeatureConfigResolver,
ServerRuntimeConfigResolver,
ServerServiceConfigResolver,
],
exports: [ServerService],
})
export class ServerConfigModule {}
export { ServerService };
export { ADD_ENABLED_FEATURES } from './server-feature';
export { ADD_ENABLED_FEATURES, ServerConfigType } from './resolver';
export { ServerFeature } from './types';

View File

@@ -9,18 +9,27 @@ import {
ResolveField,
Resolver,
} from '@nestjs/graphql';
import { RuntimeConfig, RuntimeConfigType } from '@prisma/client';
import { PrismaClient, RuntimeConfig, RuntimeConfigType } from '@prisma/client';
import { GraphQLJSON, GraphQLJSONObject } from 'graphql-scalars';
import { Config, URLHelper } from '../../fundamentals';
import { Config, DeploymentType, URLHelper } from '../../fundamentals';
import { Public } from '../auth';
import { Admin } from '../common';
import { FeatureType } from '../features';
import { AvailableUserFeatureConfig } from '../features/resolver';
import { ServerFlags } from './config';
import { ENABLED_FEATURES } from './server-feature';
import { ServerService } from './service';
import { ServerConfigType } from './types';
import { ServerFeature } from './types';
const ENABLED_FEATURES: Set<ServerFeature> = new Set();
export function ADD_ENABLED_FEATURES(feature: ServerFeature) {
ENABLED_FEATURES.add(feature);
}
registerEnumType(ServerFeature, {
name: 'ServerFeature',
});
registerEnumType(DeploymentType, {
name: 'ServerDeploymentType',
});
@ObjectType()
export class PasswordLimitsType {
@@ -36,6 +45,36 @@ export class CredentialsRequirementType {
password!: PasswordLimitsType;
}
@ObjectType()
export class ServerConfigType {
@Field({
description:
'server identical name could be shown as badge on user interface',
})
name!: string;
@Field({ description: 'server version' })
version!: string;
@Field({ description: 'server base url' })
baseUrl!: string;
@Field(() => DeploymentType, { description: 'server type' })
type!: DeploymentType;
/**
* @deprecated
*/
@Field({ description: 'server flavor', deprecationReason: 'use `features`' })
flavor!: string;
@Field(() => [ServerFeature], { description: 'enabled server features' })
features!: ServerFeature[];
@Field({ description: 'enable telemetry' })
enableTelemetry!: boolean;
}
registerEnumType(RuntimeConfigType, {
name: 'RuntimeConfigType',
});
@@ -77,7 +116,7 @@ export class ServerConfigResolver {
constructor(
private readonly config: Config,
private readonly url: URLHelper,
private readonly server: ServerService
private readonly db: PrismaClient
) {}
@Public()
@@ -132,21 +171,7 @@ export class ServerConfigResolver {
description: 'whether server has been initialized',
})
async initialized() {
return this.server.initialized();
}
}
@Resolver(() => ServerConfigType)
export class ServerFeatureConfigResolver extends AvailableUserFeatureConfig {
constructor(config: Config) {
super(config);
}
@ResolveField(() => [FeatureType], {
description: 'Features for user that can be configured',
})
override availableUserFeatures() {
return super.availableUserFeatures();
return (await this.db.user.count()) > 0;
}
}

View File

@@ -1,7 +0,0 @@
import { ServerFeature } from './types';
export const ENABLED_FEATURES: Set<ServerFeature> = new Set();
export function ADD_ENABLED_FEATURES(feature: ServerFeature) {
ENABLED_FEATURES.add(feature);
}
export { ServerFeature };

View File

@@ -1,17 +0,0 @@
import { Injectable } from '@nestjs/common';
import { PrismaClient } from '@prisma/client';
@Injectable()
export class ServerService {
private _initialized: boolean | null = null;
constructor(private readonly db: PrismaClient) {}
async initialized() {
if (!this._initialized) {
const userCount = await this.db.user.count();
this._initialized = userCount > 0;
}
return this._initialized;
}
}

View File

@@ -1,48 +1,5 @@
import { Field, ObjectType, registerEnumType } from '@nestjs/graphql';
import { DeploymentType } from '../../fundamentals';
export enum ServerFeature {
Captcha = 'captcha',
Copilot = 'copilot',
Payment = 'payment',
OAuth = 'oauth',
}
registerEnumType(ServerFeature, {
name: 'ServerFeature',
});
registerEnumType(DeploymentType, {
name: 'ServerDeploymentType',
});
@ObjectType()
export class ServerConfigType {
@Field({
description:
'server identical name could be shown as badge on user interface',
})
name!: string;
@Field({ description: 'server version' })
version!: string;
@Field({ description: 'server base url' })
baseUrl!: string;
@Field(() => DeploymentType, { description: 'server type' })
type!: DeploymentType;
/**
* @deprecated
*/
@Field({ description: 'server flavor', deprecationReason: 'use `features`' })
flavor!: string;
@Field(() => [ServerFeature], { description: 'enabled server features' })
features!: ServerFeature[];
@Field({ description: 'enable telemetry' })
enableTelemetry!: boolean;
}

View File

@@ -1,186 +0,0 @@
import { Injectable } from '@nestjs/common';
import { PrismaClient } from '@prisma/client';
import { Mutex } from '../../../fundamentals';
import { DocStorageOptions } from '../options';
import { DocRecord, DocStorageAdapter } from '../storage';
@Injectable()
export class PgUserspaceDocStorageAdapter extends DocStorageAdapter {
constructor(
private readonly db: PrismaClient,
private readonly mutex: Mutex,
options: DocStorageOptions
) {
super(options);
}
// no updates queue for userspace, directly merge them inplace
// no history record for userspace
protected async getDocUpdates() {
return [];
}
protected async markUpdatesMerged() {
return 0;
}
async listDocHistories() {
return [];
}
async getDocHistory() {
return null;
}
protected async createDocHistory() {
return false;
}
override async rollbackDoc() {
return;
}
override async getDoc(spaceId: string, docId: string) {
return this.getDocSnapshot(spaceId, docId);
}
async pushDocUpdates(
userId: string,
docId: string,
updates: Uint8Array[],
editorId?: string
) {
if (!updates.length) {
return 0;
}
await using _lock = await this.lockDocForUpdate(userId, docId);
const snapshot = await this.getDocSnapshot(userId, docId);
const now = Date.now();
const pendings = updates.map((update, i) => ({
bin: update,
timestamp: now + i,
}));
const { timestamp, bin } = await this.squash(
snapshot ? [snapshot, ...pendings] : pendings
);
await this.setDocSnapshot({
spaceId: userId,
docId,
bin,
timestamp,
editor: editorId,
});
return timestamp;
}
async deleteDoc(userId: string, docId: string) {
await this.db.userSnapshot.deleteMany({
where: {
userId,
id: docId,
},
});
}
async deleteSpace(userId: string) {
await this.db.userSnapshot.deleteMany({
where: {
userId,
},
});
}
async getSpaceDocTimestamps(userId: string, after?: number) {
const snapshots = await this.db.userSnapshot.findMany({
select: {
id: true,
updatedAt: true,
},
where: {
userId,
...(after
? {
updatedAt: {
gt: new Date(after),
},
}
: {}),
},
});
const result: Record<string, number> = {};
snapshots.forEach(s => {
result[s.id] = s.updatedAt.getTime();
});
return result;
}
protected async getDocSnapshot(userId: string, docId: string) {
const snapshot = await this.db.userSnapshot.findUnique({
where: {
userId_id: {
userId,
id: docId,
},
},
});
if (!snapshot) {
return null;
}
return {
spaceId: userId,
docId,
bin: snapshot.blob,
timestamp: snapshot.updatedAt.getTime(),
editor: snapshot.userId,
};
}
protected async setDocSnapshot(snapshot: DocRecord) {
// we always get lock before writing to user snapshot table,
// so a simple upsert without testing on updatedAt is safe
await this.db.userSnapshot.upsert({
where: {
userId_id: {
userId: snapshot.spaceId,
id: snapshot.docId,
},
},
update: {
blob: Buffer.from(snapshot.bin),
updatedAt: new Date(snapshot.timestamp),
},
create: {
userId: snapshot.spaceId,
id: snapshot.docId,
blob: Buffer.from(snapshot.bin),
createdAt: new Date(snapshot.timestamp),
updatedAt: new Date(snapshot.timestamp),
},
});
return true;
}
protected override async lockDocForUpdate(
workspaceId: string,
docId: string
) {
const lock = await this.mutex.lock(`userspace:${workspaceId}:${docId}`);
if (!lock) {
throw new Error('Too many concurrent writings');
}
return lock;
}
}

View File

@@ -1,618 +0,0 @@
import { Injectable, Logger } from '@nestjs/common';
import { PrismaClient } from '@prisma/client';
import { chunk } from 'lodash-es';
import {
Cache,
DocHistoryNotFound,
DocNotFound,
FailedToSaveUpdates,
FailedToUpsertSnapshot,
metrics,
Mutex,
} from '../../../fundamentals';
import { retryable } from '../../../fundamentals/utils/promise';
import { DocStorageOptions } from '../options';
import {
DocRecord,
DocStorageAdapter,
DocUpdate,
HistoryFilter,
} from '../storage';
const UPDATES_QUEUE_CACHE_KEY = 'doc:manager:updates';
@Injectable()
export class PgWorkspaceDocStorageAdapter extends DocStorageAdapter {
private readonly logger = new Logger(PgWorkspaceDocStorageAdapter.name);
constructor(
private readonly db: PrismaClient,
private readonly mutex: Mutex,
private readonly cache: Cache,
protected override readonly options: DocStorageOptions
) {
super(options);
}
async pushDocUpdates(
workspaceId: string,
docId: string,
updates: Uint8Array[],
editorId?: string
) {
if (!updates.length) {
return 0;
}
let pendings = updates;
let done = 0;
let timestamp = Date.now();
try {
await retryable(async () => {
if (done !== 0) {
pendings = pendings.slice(done);
}
// TODO(@forehalo): remove in next release
const lastSeq = await this.getUpdateSeq(
workspaceId,
docId,
updates.length
);
let turn = 0;
const batchCount = 10;
for (const batch of chunk(pendings, batchCount)) {
const now = Date.now();
await this.db.update.createMany({
data: batch.map((update, i) => {
const subSeq = turn * batchCount + i + 1;
// `seq` is the last seq num of the batch
// example for 11 batched updates, start from seq num 20
// seq for first update in the batch should be:
// 31 - 11 + subSeq(0 * 10 + 0 + 1) = 21
// ^ last seq num ^ updates.length ^ turn ^ batchCount ^i
const seq = lastSeq - updates.length + subSeq;
const createdAt = now + subSeq;
timestamp = Math.max(timestamp, createdAt);
return {
workspaceId,
id: docId,
blob: Buffer.from(update),
seq,
createdAt: new Date(createdAt),
createdBy: editorId || null,
};
}),
});
turn++;
done += batch.length;
await this.updateCachedUpdatesCount(workspaceId, docId, batch.length);
}
});
} catch (e) {
this.logger.error('Failed to insert doc updates', e);
metrics.doc.counter('doc_update_insert_failed').add(1);
throw new FailedToSaveUpdates();
}
return timestamp;
}
protected async getDocUpdates(workspaceId: string, docId: string) {
const rows = await this.db.update.findMany({
where: {
workspaceId,
id: docId,
},
orderBy: {
createdAt: 'asc',
},
});
return rows.map(row => ({
bin: row.blob,
timestamp: row.createdAt.getTime(),
editor: row.createdBy || undefined,
}));
}
async deleteDoc(workspaceId: string, docId: string) {
const ident = { where: { workspaceId, id: docId } };
await this.db.$transaction([
this.db.snapshot.deleteMany(ident),
this.db.update.deleteMany(ident),
this.db.snapshotHistory.deleteMany(ident),
]);
}
async deleteSpace(workspaceId: string) {
const ident = { where: { workspaceId } };
await this.db.$transaction([
this.db.workspace.deleteMany({
where: {
id: workspaceId,
},
}),
this.db.snapshot.deleteMany(ident),
this.db.update.deleteMany(ident),
this.db.snapshotHistory.deleteMany(ident),
]);
}
async getSpaceDocTimestamps(workspaceId: string, after?: number) {
const snapshots = await this.db.snapshot.findMany({
select: {
id: true,
updatedAt: true,
},
where: {
workspaceId,
...(after
? {
updatedAt: {
gt: new Date(after),
},
}
: {}),
},
});
const updates = await this.db.update.groupBy({
where: {
workspaceId,
...(after
? {
createdAt: {
gt: new Date(after),
},
}
: {}),
},
by: ['id'],
_max: {
createdAt: true,
},
});
const result: Record<string, number> = {};
snapshots.forEach(s => {
result[s.id] = s.updatedAt.getTime();
});
updates.forEach(u => {
if (u._max.createdAt) {
result[u.id] = u._max.createdAt.getTime();
}
});
return result;
}
protected async markUpdatesMerged(
workspaceId: string,
docId: string,
updates: DocUpdate[]
) {
const result = await this.db.update.deleteMany({
where: {
workspaceId,
id: docId,
createdAt: {
in: updates.map(u => new Date(u.timestamp)),
},
},
});
await this.updateCachedUpdatesCount(workspaceId, docId, -result.count);
return result.count;
}
async listDocHistories(
workspaceId: string,
docId: string,
query: HistoryFilter
) {
const histories = await this.db.snapshotHistory.findMany({
select: {
timestamp: true,
createdByUser: {
select: {
name: true,
avatarUrl: true,
},
},
},
where: {
workspaceId,
id: docId,
timestamp: {
lt: query.before ? new Date(query.before) : new Date(),
},
},
orderBy: {
timestamp: 'desc',
},
take: query.limit,
});
return histories.map(h => ({
timestamp: h.timestamp.getTime(),
editor: h.createdByUser,
}));
}
async getDocHistory(workspaceId: string, docId: string, timestamp: number) {
const history = await this.db.snapshotHistory.findUnique({
where: {
workspaceId_id_timestamp: {
workspaceId,
id: docId,
timestamp: new Date(timestamp),
},
},
});
if (!history) {
return null;
}
return {
spaceId: workspaceId,
docId,
bin: history.blob,
timestamp,
editor: history.createdBy || undefined,
};
}
override async rollbackDoc(
spaceId: string,
docId: string,
timestamp: number,
editorId?: string
): Promise<void> {
await using _lock = await this.lockDocForUpdate(spaceId, docId);
const toSnapshot = await this.getDocHistory(spaceId, docId, timestamp);
if (!toSnapshot) {
throw new DocHistoryNotFound({ spaceId, docId, timestamp });
}
const fromSnapshot = await this.getDocSnapshot(spaceId, docId);
if (!fromSnapshot) {
throw new DocNotFound({ spaceId, docId });
}
// force create a new history record after rollback
await this.createDocHistory(
{
...fromSnapshot,
// override the editor to the one who requested the rollback
editor: editorId,
},
true
);
// WARN:
// we should never do the snapshot updating in recovering,
// which is not the solution in CRDT.
// let user revert in client and update the data in sync system
// const change = this.generateChangeUpdate(fromSnapshot.bin, toSnapshot.bin);
// await this.pushDocUpdates(spaceId, docId, [change]);
metrics.doc
.counter('history_recovered_counter', {
description: 'How many times history recovered request happened',
})
.add(1);
}
protected async createDocHistory(snapshot: DocRecord, force = false) {
const last = await this.lastDocHistory(snapshot.spaceId, snapshot.docId);
let shouldCreateHistory = false;
if (!last) {
// never created
shouldCreateHistory = true;
} else {
const lastHistoryTimestamp = last.timestamp.getTime();
if (lastHistoryTimestamp === snapshot.timestamp) {
// no change
shouldCreateHistory = false;
} else if (
// force
force ||
// last history created before interval in configs
lastHistoryTimestamp <
snapshot.timestamp - this.options.historyMinInterval(snapshot.spaceId)
) {
shouldCreateHistory = true;
}
}
if (shouldCreateHistory) {
if (this.isEmptyBin(snapshot.bin)) {
this.logger.debug(
`Doc is empty, skip creating history record for ${snapshot.docId} in workspace ${snapshot.spaceId}`
);
return false;
}
await this.db.snapshotHistory
.create({
select: {
timestamp: true,
},
data: {
workspaceId: snapshot.spaceId,
id: snapshot.docId,
timestamp: new Date(snapshot.timestamp),
blob: Buffer.from(snapshot.bin),
createdBy: snapshot.editor,
expiredAt: new Date(
Date.now() + (await this.options.historyMaxAge(snapshot.spaceId))
),
},
})
.catch(() => {
// safe to ignore
// only happens when duplicated history record created in multi processes
});
metrics.doc
.counter('history_created_counter', {
description: 'How many times the snapshot history created',
})
.add(1);
this.logger.debug(
`History created for ${snapshot.docId} in workspace ${snapshot.spaceId}.`
);
return true;
}
return false;
}
protected async getDocSnapshot(workspaceId: string, docId: string) {
const snapshot = await this.db.snapshot.findUnique({
where: {
id_workspaceId: {
workspaceId,
id: docId,
},
},
});
if (!snapshot) {
return null;
}
return {
spaceId: workspaceId,
docId,
bin: snapshot.blob,
timestamp: snapshot.updatedAt.getTime(),
// creator and editor may null if their account is deleted
editor: snapshot.updatedBy || snapshot.createdBy || undefined,
};
}
protected async setDocSnapshot(snapshot: DocRecord) {
const { spaceId, docId, bin, timestamp } = snapshot;
if (this.isEmptyBin(bin)) {
return false;
}
const updatedAt = new Date(timestamp);
// CONCERNS:
// i. Because we save the real user's last seen action time as `updatedAt`,
// it's possible to simply compare the `updatedAt` to determine if the snapshot is older than the one we are going to save.
//
// ii. Prisma doesn't support `upsert` with additional `where` condition along side unique constraint.
// In our case, we need to manually check the `updatedAt` to avoid overriding the newer snapshot.
// where: { id_workspaceId: {}, updatedAt: { lt: updatedAt } }
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
try {
const result: { updatedAt: Date }[] = await this.db.$queryRaw`
INSERT INTO "snapshots" ("workspace_id", "guid", "blob", "created_at", "updated_at", "created_by", "updated_by")
VALUES (${spaceId}, ${docId}, ${bin}, DEFAULT, ${updatedAt}, ${snapshot.editor}, ${snapshot.editor})
ON CONFLICT ("workspace_id", "guid")
DO UPDATE SET "blob" = ${bin}, "updated_at" = ${updatedAt}, "updated_by" = ${snapshot.editor}
WHERE "snapshots"."workspace_id" = ${spaceId} AND "snapshots"."guid" = ${docId} AND "snapshots"."updated_at" <= ${updatedAt}
RETURNING "snapshots"."workspace_id" as "workspaceId", "snapshots"."guid" as "id", "snapshots"."updated_at" as "updatedAt"
`;
// const result = await this.db.snapshot.upsert({
// select: {
// updatedAt: true,
// seq: true,
// },
// where: {
// id_workspaceId: {
// workspaceId,
// id: guid,
// },
// ⬇️ NOT SUPPORTED BY PRISMA YET
// updatedAt: {
// lt: updatedAt,
// },
// },
// update: {
// blob,
// state,
// updatedAt,
// },
// create: {
// workspaceId,
// id: guid,
// blob,
// state,
// updatedAt,
// seq,
// },
// });
// if the condition `snapshot.updatedAt > updatedAt` is true, by which means the snapshot has already been updated by other process,
// the updates has been applied to current `doc` must have been seen by the other process as well.
// The `updatedSnapshot` will be `undefined` in this case.
const updatedSnapshot = result.at(0);
return !!updatedSnapshot;
} catch (e) {
metrics.doc.counter('snapshot_upsert_failed').add(1);
this.logger.error('Failed to upsert snapshot', e);
throw new FailedToUpsertSnapshot();
}
}
protected override async lockDocForUpdate(
workspaceId: string,
docId: string
) {
const lock = await this.mutex.lock(`doc:update:${workspaceId}:${docId}`);
if (!lock) {
throw new Error('Too many concurrent writings');
}
return lock;
}
protected async lastDocHistory(workspaceId: string, id: string) {
return this.db.snapshotHistory.findFirst({
where: {
workspaceId,
id,
},
select: {
timestamp: true,
state: true,
},
orderBy: {
timestamp: 'desc',
},
});
}
// for auto merging
async randomDoc() {
const key = await this.cache.mapRandomKey(UPDATES_QUEUE_CACHE_KEY);
if (key) {
const cachedCount = await this.cache.mapIncrease(
UPDATES_QUEUE_CACHE_KEY,
key,
0
);
if (cachedCount > 0) {
const [workspaceId, id] = key.split('::');
const count = await this.db.update.count({
where: {
workspaceId,
id,
},
});
// FIXME(@forehalo): somehow the update count in cache is not accurate
if (count === 0) {
metrics.doc
.counter('doc_update_count_inconsistent_with_cache')
.add(1);
await this.cache.mapDelete(UPDATES_QUEUE_CACHE_KEY, key);
return null;
}
return { workspaceId, docId: id };
}
}
return null;
}
private async updateCachedUpdatesCount(
workspaceId: string,
guid: string,
count: number
) {
const result = await this.cache.mapIncrease(
UPDATES_QUEUE_CACHE_KEY,
`${workspaceId}::${guid}`,
count
);
if (result <= 0) {
await this.cache.mapDelete(
UPDATES_QUEUE_CACHE_KEY,
`${workspaceId}::${guid}`
);
}
}
/**
* @deprecated
*/
private readonly seqMap = new Map<string, number>();
/**
*
* @deprecated updates do not rely on seq number anymore
*
* keep in next release to avoid downtime when upgrading instances
*/
private async getUpdateSeq(workspaceId: string, guid: string, batch = 1) {
const MAX_SEQ_NUM = 0x3fffffff; // u31
try {
const { seq } = await this.db.snapshot.update({
select: {
seq: true,
},
where: {
id_workspaceId: {
workspaceId,
id: guid,
},
},
data: {
seq: {
increment: batch,
},
},
});
if (!seq) {
return batch;
}
// reset
if (seq >= MAX_SEQ_NUM) {
await this.db.snapshot.update({
select: {
seq: true,
},
where: {
id_workspaceId: {
workspaceId,
id: guid,
},
},
data: {
seq: 0,
},
});
}
return seq;
} catch {
// not existing snapshot just count it from 1
const last = this.seqMap.get(workspaceId + guid) ?? 0;
this.seqMap.set(workspaceId + guid, last + batch);
return last + batch;
}
}
}

View File

@@ -0,0 +1,266 @@
import { isDeepStrictEqual } from 'node:util';
import { Injectable, Logger } from '@nestjs/common';
import { Cron, CronExpression } from '@nestjs/schedule';
import { PrismaClient } from '@prisma/client';
import type { EventPayload } from '../../fundamentals';
import {
Config,
DocHistoryNotFound,
DocNotFound,
metrics,
OnEvent,
WorkspaceNotFound,
} from '../../fundamentals';
import { QuotaService } from '../quota';
import { Permission } from '../workspaces/types';
import { isEmptyBuffer } from './manager';
@Injectable()
export class DocHistoryManager {
private readonly logger = new Logger(DocHistoryManager.name);
constructor(
private readonly config: Config,
private readonly db: PrismaClient,
private readonly quota: QuotaService
) {}
@OnEvent('workspace.deleted')
onWorkspaceDeleted(workspaceId: EventPayload<'workspace.deleted'>) {
return this.db.snapshotHistory.deleteMany({
where: {
workspaceId,
},
});
}
@OnEvent('snapshot.deleted')
onSnapshotDeleted({ workspaceId, id }: EventPayload<'snapshot.deleted'>) {
return this.db.snapshotHistory.deleteMany({
where: {
workspaceId,
id,
},
});
}
@OnEvent('snapshot.updated')
async onDocUpdated(
{ workspaceId, id, previous }: EventPayload<'snapshot.updated'>,
forceCreate = false
) {
const last = await this.last(workspaceId, id);
let shouldCreateHistory = false;
if (!last) {
// never created
shouldCreateHistory = true;
} else if (last.timestamp === previous.updatedAt) {
// no change
shouldCreateHistory = false;
} else if (
// force
forceCreate ||
// last history created before interval in configs
last.timestamp.getTime() <
previous.updatedAt.getTime() - this.config.doc.history.interval
) {
shouldCreateHistory = true;
}
if (shouldCreateHistory) {
// skip the history recording when no actual update on snapshot happended
if (last && isDeepStrictEqual(last.state, previous.state)) {
this.logger.debug(
`State matches, skip creating history record for ${id} in workspace ${workspaceId}`
);
return;
}
if (isEmptyBuffer(previous.blob)) {
this.logger.debug(
`Doc is empty, skip creating history record for ${id} in workspace ${workspaceId}`
);
return;
}
await this.db.snapshotHistory
.create({
select: {
timestamp: true,
},
data: {
workspaceId,
id,
timestamp: previous.updatedAt,
blob: previous.blob,
state: previous.state,
expiredAt: await this.getExpiredDateFromNow(workspaceId),
},
})
.catch(() => {
// safe to ignore
// only happens when duplicated history record created in multi processes
});
metrics.doc
.counter('history_created_counter', {
description: 'How many times the snapshot history created',
})
.add(1);
this.logger.debug(
`History created for ${id} in workspace ${workspaceId}.`
);
}
}
async list(
workspaceId: string,
id: string,
before: Date = new Date(),
take: number = 10
) {
return this.db.snapshotHistory.findMany({
select: {
timestamp: true,
},
where: {
workspaceId,
id,
timestamp: {
lt: before,
},
// only include the ones has not expired
expiredAt: {
gt: new Date(),
},
},
orderBy: {
timestamp: 'desc',
},
take,
});
}
async count(workspaceId: string, id: string) {
return this.db.snapshotHistory.count({
where: {
workspaceId,
id,
expiredAt: {
gt: new Date(),
},
},
});
}
async get(workspaceId: string, id: string, timestamp: Date) {
return this.db.snapshotHistory.findUnique({
where: {
workspaceId_id_timestamp: {
workspaceId,
id,
timestamp,
},
expiredAt: {
gt: new Date(),
},
},
});
}
async last(workspaceId: string, id: string) {
return this.db.snapshotHistory.findFirst({
where: {
workspaceId,
id,
},
select: {
timestamp: true,
state: true,
},
orderBy: {
timestamp: 'desc',
},
});
}
async recover(workspaceId: string, id: string, timestamp: Date) {
const history = await this.db.snapshotHistory.findUnique({
where: {
workspaceId_id_timestamp: {
workspaceId,
id,
timestamp,
},
},
});
if (!history) {
throw new DocHistoryNotFound({
workspaceId,
docId: id,
timestamp: timestamp.getTime(),
});
}
const oldSnapshot = await this.db.snapshot.findUnique({
where: {
id_workspaceId: {
id,
workspaceId,
},
},
});
if (!oldSnapshot) {
throw new DocNotFound({ workspaceId, docId: id });
}
// save old snapshot as one history record
await this.onDocUpdated({ workspaceId, id, previous: oldSnapshot }, true);
// WARN:
// we should never do the snapshot updating in recovering,
// which is not the solution in CRDT.
// let user revert in client and update the data in sync system
// `await this.db.snapshot.update();`
metrics.doc
.counter('history_recovered_counter', {
description: 'How many times history recovered request happened',
})
.add(1);
return history.timestamp;
}
async getExpiredDateFromNow(workspaceId: string) {
const permission = await this.db.workspaceUserPermission.findFirst({
select: {
userId: true,
},
where: {
workspaceId,
type: Permission.Owner,
},
});
if (!permission) {
throw new WorkspaceNotFound({ workspaceId });
}
const quota = await this.quota.getUserQuota(permission.userId);
return quota.feature.historyPeriodFromNow;
}
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT /* everyday at 12am */)
async cleanupExpiredHistory() {
await this.db.snapshotHistory.deleteMany({
where: {
expiredAt: {
lte: new Date(),
},
},
});
}
}

View File

@@ -2,24 +2,15 @@ import './config';
import { Module } from '@nestjs/common';
import { PermissionModule } from '../permission';
import { QuotaModule } from '../quota';
import { PgUserspaceDocStorageAdapter } from './adapters/userspace';
import { PgWorkspaceDocStorageAdapter } from './adapters/workspace';
import { DocStorageCronJob } from './job';
import { DocStorageOptions } from './options';
import { DocHistoryManager } from './history';
import { DocManager } from './manager';
@Module({
imports: [QuotaModule, PermissionModule],
providers: [
DocStorageOptions,
PgWorkspaceDocStorageAdapter,
PgUserspaceDocStorageAdapter,
DocStorageCronJob,
],
exports: [PgWorkspaceDocStorageAdapter, PgUserspaceDocStorageAdapter],
imports: [QuotaModule],
providers: [DocManager, DocHistoryManager],
exports: [DocManager, DocHistoryManager],
})
export class DocStorageModule {}
export { PgUserspaceDocStorageAdapter, PgWorkspaceDocStorageAdapter };
export class DocModule {}
export { DocStorageAdapter, type Editor } from './storage';
export { DocHistoryManager, DocManager };

View File

@@ -1,76 +0,0 @@
import { Injectable, Logger, OnModuleInit } from '@nestjs/common';
import { Cron, CronExpression, SchedulerRegistry } from '@nestjs/schedule';
import { PrismaClient } from '@prisma/client';
import { CallTimer, Config, metrics } from '../../fundamentals';
import { PgWorkspaceDocStorageAdapter } from './adapters/workspace';
@Injectable()
export class DocStorageCronJob implements OnModuleInit {
private readonly logger = new Logger(DocStorageCronJob.name);
private busy = false;
constructor(
private readonly registry: SchedulerRegistry,
private readonly config: Config,
private readonly db: PrismaClient,
private readonly workspace: PgWorkspaceDocStorageAdapter
) {}
onModuleInit() {
if (this.config.doc.manager.enableUpdateAutoMerging) {
this.registry.addInterval(
this.autoMergePendingDocUpdates.name,
// scheduler registry will clean up the interval when the app is stopped
setInterval(() => {
if (this.busy) {
return;
}
this.busy = true;
this.autoMergePendingDocUpdates()
.catch(() => {
/* never fail */
})
.finally(() => {
this.busy = false;
});
}, this.config.doc.manager.updatePollInterval)
);
this.logger.log('Updates pending queue auto merging cron started');
}
}
@CallTimer('doc', 'auto_merge_pending_doc_updates')
async autoMergePendingDocUpdates() {
try {
const randomDoc = await this.workspace.randomDoc();
if (!randomDoc) {
return;
}
await this.workspace.getDoc(randomDoc.workspaceId, randomDoc.docId);
} catch (e) {
metrics.doc.counter('auto_merge_pending_doc_updates_error').add(1);
this.logger.error('Failed to auto merge pending doc updates', e);
}
}
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT /* everyday at 12am */)
async cleanupExpiredHistory() {
await this.db.snapshotHistory.deleteMany({
where: {
expiredAt: {
lte: new Date(),
},
},
});
}
@Cron(CronExpression.EVERY_MINUTE)
async reportUpdatesQueueCount() {
metrics.doc
.gauge('updates_queue_count')
.record(await this.db.update.count());
}
}

View File

@@ -0,0 +1,853 @@
import {
Injectable,
Logger,
OnModuleDestroy,
OnModuleInit,
} from '@nestjs/common';
import { Cron, CronExpression } from '@nestjs/schedule';
import { PrismaClient, Snapshot, Update } from '@prisma/client';
import { chunk } from 'lodash-es';
import { defer, retry } from 'rxjs';
import {
applyUpdate,
Doc,
encodeStateAsUpdate,
encodeStateVector,
transact,
} from 'yjs';
import type { EventPayload } from '../../fundamentals';
import {
Cache,
CallTimer,
Config,
EventEmitter,
mergeUpdatesInApplyWay as jwstMergeUpdates,
metrics,
OnEvent,
} from '../../fundamentals';
function compare(yBinary: Buffer, jwstBinary: Buffer, strict = false): boolean {
if (yBinary.equals(jwstBinary)) {
return true;
}
if (strict) {
return false;
}
const doc = new Doc();
applyUpdate(doc, jwstBinary);
const yBinary2 = Buffer.from(encodeStateAsUpdate(doc));
return compare(yBinary, yBinary2, true);
}
export function isEmptyBuffer(buf: Buffer): boolean {
return (
buf.length === 0 ||
// 0x0000
(buf.length === 2 && buf[0] === 0 && buf[1] === 0)
);
}
const MAX_SEQ_NUM = 0x3fffffff; // u31
const UPDATES_QUEUE_CACHE_KEY = 'doc:manager:updates';
interface DocResponse {
doc: Doc;
timestamp: number;
}
interface BinaryResponse {
binary: Buffer;
timestamp: number;
}
/**
* Since we can't directly save all client updates into database, in which way the database will overload,
* we need to buffer the updates and merge them to reduce db write.
*
* And also, if a new client join, it would be nice to see the latest doc asap,
* so we need to at least store a snapshot of the doc and return quickly,
* along side all the updates that have not been applies to that snapshot(timestamp).
*/
@Injectable()
export class DocManager implements OnModuleInit, OnModuleDestroy {
private readonly logger = new Logger(DocManager.name);
private job: NodeJS.Timeout | null = null;
private readonly seqMap = new Map<string, number>();
private busy = false;
constructor(
private readonly db: PrismaClient,
private readonly config: Config,
private readonly cache: Cache,
private readonly event: EventEmitter
) {}
onModuleInit() {
if (this.config.doc.manager.enableUpdateAutoMerging) {
this.logger.log('Use Database');
this.setup();
}
}
onModuleDestroy() {
this.destroy();
}
@CallTimer('doc', 'yjs_recover_updates_to_doc')
private recoverDoc(...updates: Buffer[]): Promise<Doc> {
const doc = new Doc();
const chunks = chunk(updates, 10);
return new Promise(resolve => {
const next = () => {
const updates = chunks.shift();
if (updates?.length) {
transact(doc, () => {
updates.forEach(u => {
try {
applyUpdate(doc, u);
} catch (e) {
this.logger.error('Failed to apply update', e);
}
});
});
// avoid applying too many updates in single round which will take the whole cpu time like dead lock
setImmediate(() => {
next();
});
} else {
resolve(doc);
}
};
next();
});
}
private async applyUpdates(guid: string, ...updates: Buffer[]): Promise<Doc> {
const doc = await this.recoverDoc(...updates);
const useYocto = await this.config.runtime.fetch(
'doc/experimentalMergeWithYOcto'
);
// test jwst codec
if (useYocto) {
metrics.jwst.counter('codec_merge_counter').add(1);
const yjsResult = Buffer.from(encodeStateAsUpdate(doc));
let log = false;
try {
const jwstResult = jwstMergeUpdates(updates);
if (!compare(yjsResult, jwstResult)) {
metrics.jwst.counter('codec_not_match').add(1);
this.logger.warn(
`jwst codec result doesn't match yjs codec result for: ${guid}`
);
log = true;
if (this.config.node.dev) {
this.logger.warn(`Expected:\n ${yjsResult.toString('hex')}`);
this.logger.warn(`Result:\n ${jwstResult.toString('hex')}`);
}
}
} catch (e) {
metrics.jwst.counter('codec_fails_counter').add(1);
this.logger.warn(`jwst apply update failed for ${guid}: ${e}`);
log = true;
} finally {
if (log && this.config.node.dev) {
this.logger.warn(
`Updates: ${updates.map(u => u.toString('hex')).join('\n')}`
);
}
}
}
return doc;
}
/**
* setup pending update processing loop
*/
setup() {
this.job = setInterval(() => {
if (!this.busy) {
this.busy = true;
this.autoSquash()
.catch(() => {
/* we handle all errors in work itself */
})
.finally(() => {
this.busy = false;
});
}
}, this.config.doc.manager.updatePollInterval);
this.logger.log('Automation started');
}
/**
* stop pending update processing loop
*/
destroy() {
if (this.job) {
clearInterval(this.job);
this.job = null;
this.logger.log('Automation stopped');
}
}
@OnEvent('workspace.deleted')
async onWorkspaceDeleted(workspaceId: string) {
await this.db.snapshot.deleteMany({
where: {
workspaceId,
},
});
await this.db.update.deleteMany({
where: {
workspaceId,
},
});
}
@OnEvent('snapshot.deleted')
async onSnapshotDeleted({
id,
workspaceId,
}: EventPayload<'snapshot.deleted'>) {
await this.db.update.deleteMany({
where: {
id,
workspaceId,
},
});
}
/**
* add update to manager for later processing.
*/
async push(
workspaceId: string,
guid: string,
update: Buffer,
retryTimes = 10
) {
const timestamp = await new Promise<number>((resolve, reject) => {
defer(async () => {
const seq = await this.getUpdateSeq(workspaceId, guid);
const { createdAt } = await this.db.update.create({
select: {
createdAt: true,
},
data: {
workspaceId,
id: guid,
seq,
blob: update,
},
});
return createdAt.getTime();
})
.pipe(retry(retryTimes)) // retry until seq num not conflict
.subscribe({
next: timestamp => {
this.logger.debug(
`pushed 1 update for ${guid} in workspace ${workspaceId}`
);
resolve(timestamp);
},
error: e => {
this.logger.error('Failed to push updates', e);
reject(new Error('Failed to push update'));
},
});
});
await this.updateCachedUpdatesCount(workspaceId, guid, 1);
return timestamp;
}
async batchPush(
workspaceId: string,
guid: string,
updates: Buffer[],
retryTimes = 10
) {
const timestamp = await new Promise<number>((resolve, reject) => {
defer(async () => {
const lastSeq = await this.getUpdateSeq(
workspaceId,
guid,
updates.length
);
const now = Date.now();
let timestamp = now;
let turn = 0;
const batchCount = 10;
for (const batch of chunk(updates, batchCount)) {
await this.db.update.createMany({
data: batch.map((update, i) => {
const subSeq = turn * batchCount + i + 1;
// `seq` is the last seq num of the batch
// example for 11 batched updates, start from seq num 20
// seq for first update in the batch should be:
// 31 - 11 + subSeq(0 * 10 + 0 + 1) = 21
// ^ last seq num ^ updates.length ^ turn ^ batchCount ^i
const seq = lastSeq - updates.length + subSeq;
const createdAt = now + subSeq;
timestamp = Math.max(timestamp, createdAt);
return {
workspaceId,
id: guid,
blob: update,
seq,
createdAt: new Date(createdAt), // make sure the updates can be ordered by create time
};
}),
});
turn++;
}
return timestamp;
})
.pipe(retry(retryTimes)) // retry until seq num not conflict
.subscribe({
next: timestamp => {
this.logger.debug(
`pushed ${updates.length} updates for ${guid} in workspace ${workspaceId}`
);
resolve(timestamp);
},
error: e => {
this.logger.error('Failed to push updates', e);
reject(new Error('Failed to push update'));
},
});
});
await this.updateCachedUpdatesCount(workspaceId, guid, updates.length);
return timestamp;
}
/**
* Get latest timestamp of all docs in the workspace.
*/
@CallTimer('doc', 'get_doc_timestamps')
async getDocTimestamps(workspaceId: string, after: number | undefined = 0) {
const snapshots = await this.db.snapshot.findMany({
where: {
workspaceId,
updatedAt: {
gt: new Date(after),
},
},
select: {
id: true,
updatedAt: true,
},
});
const updates = await this.db.update.groupBy({
where: {
workspaceId,
createdAt: {
gt: new Date(after),
},
},
by: ['id'],
_max: {
createdAt: true,
},
});
const result: Record<string, number> = {};
snapshots.forEach(s => {
result[s.id] = s.updatedAt.getTime();
});
updates.forEach(u => {
if (u._max.createdAt) {
result[u.id] = u._max.createdAt.getTime();
}
});
return result;
}
/**
* get the latest doc with all update applied.
*/
async get(workspaceId: string, guid: string): Promise<DocResponse | null> {
const result = await this._get(workspaceId, guid);
if (result) {
if ('doc' in result) {
return result;
} else {
const doc = await this.recoverDoc(result.binary);
return {
doc,
timestamp: result.timestamp,
};
}
}
return null;
}
/**
* get the latest doc binary with all update applied.
*/
async getBinary(
workspaceId: string,
guid: string
): Promise<BinaryResponse | null> {
const result = await this._get(workspaceId, guid);
if (result) {
if ('doc' in result) {
return {
binary: Buffer.from(encodeStateAsUpdate(result.doc)),
timestamp: result.timestamp,
};
} else {
return result;
}
}
return null;
}
/**
* get the latest doc state vector with all update applied.
*/
async getDocState(
workspaceId: string,
guid: string
): Promise<BinaryResponse | null> {
const snapshot = await this.getSnapshot(workspaceId, guid);
const updates = await this.getUpdates(workspaceId, guid);
if (updates.length) {
const { doc, timestamp } = await this.squash(snapshot, updates);
return {
binary: Buffer.from(encodeStateVector(doc)),
timestamp,
};
}
return snapshot?.state
? {
binary: snapshot.state,
timestamp: snapshot.updatedAt.getTime(),
}
: null;
}
/**
* get the snapshot of the doc we've seen.
*/
async getSnapshot(workspaceId: string, guid: string) {
return this.db.snapshot.findUnique({
where: {
id_workspaceId: {
workspaceId,
id: guid,
},
},
});
}
/**
* get pending updates
*/
async getUpdates(workspaceId: string, guid: string) {
const updates = await this.db.update.findMany({
where: {
workspaceId,
id: guid,
},
// take it ease, we don't want to overload db and or cpu
// if we limit the taken number here,
// user will never see the latest doc if there are too many updates pending to be merged.
take: this.config.doc.manager.maxUpdatesPullCount,
});
// perf(memory): avoid sorting in db
return updates.sort((a, b) => (a.createdAt < b.createdAt ? -1 : 1));
}
/**
* apply pending updates to snapshot
*/
private async autoSquash() {
// find the first update and batch process updates with same id
const candidate = await this.getAutoSquashCandidate();
// no pending updates
if (!candidate) {
return;
}
const { id, workspaceId } = candidate;
await this.lockUpdatesForAutoSquash(workspaceId, id, async () => {
try {
await this._get(workspaceId, id);
} catch (e) {
this.logger.error(
`Failed to apply updates for workspace: ${workspaceId}, guid: ${id}`
);
this.logger.error(e);
}
});
}
private async getAutoSquashCandidate() {
const cache = await this.getAutoSquashCandidateFromCache();
if (cache) {
return cache;
}
return this.db.update.findFirst({
select: {
id: true,
workspaceId: true,
},
});
}
/**
* @returns whether the snapshot is updated to the latest, `undefined` means the doc to be upserted is outdated.
*/
@CallTimer('doc', 'upsert')
private async upsert(
workspaceId: string,
guid: string,
doc: Doc,
// we always delay the snapshot update to avoid db overload,
// so the value of auto updated `updatedAt` by db will never be accurate to user's real action time
updatedAt: Date,
seq: number
) {
const blob = Buffer.from(encodeStateAsUpdate(doc));
if (isEmptyBuffer(blob)) {
return undefined;
}
const state = Buffer.from(encodeStateVector(doc));
// CONCERNS:
// i. Because we save the real user's last seen action time as `updatedAt`,
// it's possible to simply compare the `updatedAt` to determine if the snapshot is older than the one we are going to save.
//
// ii. Prisma doesn't support `upsert` with additional `where` condition along side unique constraint.
// In our case, we need to manually check the `updatedAt` to avoid overriding the newer snapshot.
// where: { id_workspaceId: {}, updatedAt: { lt: updatedAt } }
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
//
// iii. Only set the seq number when creating the snapshot.
// For updating scenario, the seq number will be updated when updates pushed to db.
try {
const result: { updatedAt: Date }[] = await this.db.$queryRaw`
INSERT INTO "snapshots" ("workspace_id", "guid", "blob", "state", "seq", "created_at", "updated_at")
VALUES (${workspaceId}, ${guid}, ${blob}, ${state}, ${seq}, DEFAULT, ${updatedAt})
ON CONFLICT ("workspace_id", "guid")
DO UPDATE SET "blob" = ${blob}, "state" = ${state}, "updated_at" = ${updatedAt}, "seq" = ${seq}
WHERE "snapshots"."workspace_id" = ${workspaceId} AND "snapshots"."guid" = ${guid} AND "snapshots"."updated_at" <= ${updatedAt}
RETURNING "snapshots"."workspace_id" as "workspaceId", "snapshots"."guid" as "id", "snapshots"."updated_at" as "updatedAt"
`;
// const result = await this.db.snapshot.upsert({
// select: {
// updatedAt: true,
// seq: true,
// },
// where: {
// id_workspaceId: {
// workspaceId,
// id: guid,
// },
// ⬇️ NOT SUPPORTED BY PRISMA YET
// updatedAt: {
// lt: updatedAt,
// },
// },
// update: {
// blob,
// state,
// updatedAt,
// },
// create: {
// workspaceId,
// id: guid,
// blob,
// state,
// updatedAt,
// seq,
// },
// });
// if the condition `snapshot.updatedAt > updatedAt` is true, by which means the snapshot has already been updated by other process,
// the updates has been applied to current `doc` must have been seen by the other process as well.
// The `updatedSnapshot` will be `undefined` in this case.
const updatedSnapshot = result.at(0);
if (!updatedSnapshot) {
return undefined;
}
return true;
} catch (e) {
this.logger.error('Failed to upsert snapshot', e);
return false;
}
}
private async _get(
workspaceId: string,
guid: string
): Promise<DocResponse | BinaryResponse | null> {
const snapshot = await this.getSnapshot(workspaceId, guid);
const updates = await this.getUpdates(workspaceId, guid);
if (updates.length) {
return this.squash(snapshot, updates);
}
return snapshot
? { binary: snapshot.blob, timestamp: snapshot.updatedAt.getTime() }
: null;
}
/**
* Squash updates into a single update and save it as snapshot,
* and delete the updates records at the same time.
*/
@CallTimer('doc', 'squash')
private async squash(
snapshot: Snapshot | null,
updates: Update[]
): Promise<DocResponse> {
if (!updates.length) {
throw new Error('No updates to squash');
}
const last = updates[updates.length - 1];
const { id, workspaceId } = last;
const doc = await this.applyUpdates(
id,
snapshot ? snapshot.blob : Buffer.from([0, 0]),
...updates.map(u => u.blob)
);
const done = await this.upsert(
workspaceId,
id,
doc,
last.createdAt,
last.seq
);
if (done) {
if (snapshot) {
this.event.emit('snapshot.updated', {
id,
workspaceId,
previous: {
blob: snapshot.blob,
state: snapshot.state,
updatedAt: snapshot.updatedAt,
},
});
}
this.logger.debug(
`Squashed ${updates.length} updates for ${id} in workspace ${workspaceId}`
);
}
// we will keep the updates only if the upsert failed on unknown reason
// `done === undefined` means the updates is outdated(have already been merged by other process), safe to be deleted
// `done === true` means the upsert is successful, safe to be deleted
if (done !== false) {
// always delete updates
// the upsert will return false if the state is not newer, so we don't need to worry about it
const { count } = await this.db.update.deleteMany({
where: {
id,
workspaceId,
seq: {
in: updates.map(u => u.seq),
},
},
});
await this.updateCachedUpdatesCount(workspaceId, id, -count);
}
return { doc, timestamp: last.createdAt.getTime() };
}
private async getUpdateSeq(workspaceId: string, guid: string, batch = 1) {
try {
const { seq } = await this.db.snapshot.update({
select: {
seq: true,
},
where: {
id_workspaceId: {
workspaceId,
id: guid,
},
},
data: {
seq: {
increment: batch,
},
},
});
// reset
if (seq >= MAX_SEQ_NUM) {
await this.db.snapshot.update({
select: {
seq: true,
},
where: {
id_workspaceId: {
workspaceId,
id: guid,
},
},
data: {
seq: 0,
},
});
}
return seq;
} catch {
// not existing snapshot just count it from 1
const last = this.seqMap.get(workspaceId + guid) ?? 0;
this.seqMap.set(workspaceId + guid, last + batch);
return last + batch;
}
}
private async updateCachedUpdatesCount(
workspaceId: string,
guid: string,
count: number
) {
const result = await this.cache.mapIncrease(
UPDATES_QUEUE_CACHE_KEY,
`${workspaceId}::${guid}`,
count
);
if (result <= 0) {
await this.cache.mapDelete(
UPDATES_QUEUE_CACHE_KEY,
`${workspaceId}::${guid}`
);
}
}
private async getAutoSquashCandidateFromCache() {
const key = await this.cache.mapRandomKey(UPDATES_QUEUE_CACHE_KEY);
if (key) {
const cachedCount = await this.cache.mapIncrease(
UPDATES_QUEUE_CACHE_KEY,
key,
0
);
if (cachedCount > 0) {
const [workspaceId, id] = key.split('::');
const count = await this.db.update.count({
where: {
workspaceId,
id,
},
});
// FIXME(@forehalo): somehow the update count in cache is not accurate
if (count === 0) {
await this.cache.mapDelete(UPDATES_QUEUE_CACHE_KEY, key);
return null;
}
return { id, workspaceId };
}
}
return null;
}
private async doWithLock<T>(
lockScope: string,
lockResource: string,
job: () => Promise<T>
) {
const lock = `lock:${lockScope}:${lockResource}`;
const acquired = await this.cache.setnx(lock, 1, {
ttl: 60 * 1000,
});
metrics.doc.counter('lock').add(1, { scope: lockScope });
if (!acquired) {
metrics.doc.counter('lock_failed').add(1, { scope: lockScope });
return;
}
metrics.doc.counter('lock_required').add(1, { scope: lockScope });
try {
return await job();
} finally {
await this.cache
.delete(lock)
.then(() => {
metrics.doc.counter('lock_released').add(1, { scope: lockScope });
})
.catch(e => {
metrics.doc
.counter('lock_release_failed')
.add(1, { scope: lockScope });
// safe, the lock will be expired when ttl ends
this.logger.error(`Failed to release lock ${lock}`, e);
});
}
}
private async lockUpdatesForAutoSquash<T>(
workspaceId: string,
guid: string,
job: () => Promise<T>
) {
return this.doWithLock(
'doc:manager:updates',
`${workspaceId}::${guid}`,
job
);
}
@Cron(CronExpression.EVERY_MINUTE)
async reportUpdatesQueueCount() {
metrics.doc
.gauge('updates_queue_count')
.record(await this.db.update.count());
}
}

View File

@@ -1,130 +0,0 @@
import { Injectable, Logger } from '@nestjs/common';
import { chunk } from 'lodash-es';
import * as Y from 'yjs';
import {
CallTimer,
Config,
mergeUpdatesInApplyWay as yotcoMergeUpdates,
metrics,
} from '../../fundamentals';
import { PermissionService } from '../permission';
import { QuotaService } from '../quota';
import { DocStorageOptions as IDocStorageOptions } from './storage';
function compare(yBinary: Buffer, jwstBinary: Buffer, strict = false): boolean {
if (yBinary.equals(jwstBinary)) {
return true;
}
if (strict) {
return false;
}
const doc = new Y.Doc();
Y.applyUpdate(doc, jwstBinary);
const yBinary2 = Buffer.from(Y.encodeStateAsUpdate(doc));
return compare(yBinary, yBinary2, true);
}
@Injectable()
export class DocStorageOptions implements IDocStorageOptions {
private readonly logger = new Logger('DocStorageOptions');
constructor(
private readonly config: Config,
private readonly permission: PermissionService,
private readonly quota: QuotaService
) {}
mergeUpdates = async (updates: Uint8Array[]) => {
const useYocto = await this.config.runtime.fetch(
'doc/experimentalMergeWithYOcto'
);
if (useYocto) {
const doc = await this.recoverDoc(updates);
metrics.jwst.counter('codec_merge_counter').add(1);
const yjsResult = Buffer.from(Y.encodeStateAsUpdate(doc));
let log = false;
try {
const yocto = yotcoMergeUpdates(updates.map(Buffer.from));
if (!compare(yjsResult, yocto)) {
metrics.jwst.counter('codec_not_match').add(1);
this.logger.warn(`yocto codec result doesn't match yjs codec result`);
log = true;
if (this.config.node.dev) {
this.logger.warn(`Expected:\n ${yjsResult.toString('hex')}`);
this.logger.warn(`Result:\n ${yocto.toString('hex')}`);
}
}
} catch (e) {
metrics.jwst.counter('codec_fails_counter').add(1);
this.logger.warn(`jwst apply update failed: ${e}`);
log = true;
}
if (log && this.config.node.dev) {
this.logger.warn(
`Updates: ${updates.map(u => Buffer.from(u).toString('hex')).join('\n')}`
);
}
return yjsResult;
} else {
return this.simpleMergeUpdates(updates);
}
};
historyMaxAge = async (spaceId: string) => {
const owner = await this.permission.getWorkspaceOwner(spaceId);
const quota = await this.quota.getUserQuota(owner.id);
return quota.feature.historyPeriod;
};
historyMinInterval = (_spaceId: string) => {
return this.config.doc.history.interval;
};
@CallTimer('doc', 'yjs_merge_updates')
private simpleMergeUpdates(updates: Uint8Array[]) {
return Y.mergeUpdates(updates);
}
@CallTimer('doc', 'yjs_recover_updates_to_doc')
private recoverDoc(updates: Uint8Array[]): Promise<Y.Doc> {
const doc = new Y.Doc();
const chunks = chunk(updates, 10);
let i = 0;
return new Promise(resolve => {
Y.transact(doc, () => {
const next = () => {
const updates = chunks.at(i++);
if (updates?.length) {
updates.forEach(u => {
try {
Y.applyUpdate(doc, u);
} catch (e) {
this.logger.error('Failed to apply update', e);
}
});
// avoid applying too many updates in single round which will take the whole cpu time like dead lock
setImmediate(() => {
next();
});
} else {
resolve(doc);
}
};
next();
});
});
}
}

View File

@@ -1,16 +0,0 @@
import { Connection } from './connection';
export interface BlobStorageOptions {}
export interface Blob {
key: string;
bin: Uint8Array;
mimeType: string;
}
export abstract class BlobStorageAdapter extends Connection {
abstract getBlob(spaceId: string, key: string): Promise<Blob | null>;
abstract setBlob(spaceId: string, blob: Blob): Promise<string>;
abstract deleteBlob(spaceId: string, key: string): Promise<boolean>;
abstract listBlobs(spaceId: string): Promise<Blob>;
}

View File

@@ -1,11 +0,0 @@
export class Connection {
protected connected: boolean = false;
connect(): Promise<void> {
this.connected = true;
return Promise.resolve();
}
disconnect(): Promise<void> {
this.connected = false;
return Promise.resolve();
}
}

View File

@@ -1,216 +0,0 @@
import {
applyUpdate,
Doc,
encodeStateAsUpdate,
encodeStateVector,
mergeUpdates,
UndoManager,
} from 'yjs';
import { CallTimer } from '../../../fundamentals';
import { Connection } from './connection';
import { SingletonLocker } from './lock';
export interface DocRecord {
spaceId: string;
docId: string;
bin: Uint8Array;
timestamp: number;
editor?: string;
}
export interface DocUpdate {
bin: Uint8Array;
timestamp: number;
editor?: string;
}
export interface HistoryFilter {
before?: number;
limit?: number;
}
export interface Editor {
name: string;
avatarUrl: string | null;
}
export interface DocStorageOptions {
mergeUpdates?: (updates: Uint8Array[]) => Promise<Uint8Array> | Uint8Array;
}
export abstract class DocStorageAdapter extends Connection {
private readonly locker = new SingletonLocker();
constructor(
protected readonly options: DocStorageOptions = {
mergeUpdates,
}
) {
super();
}
// open apis
isEmptyBin(bin: Uint8Array): boolean {
return (
bin.length === 0 ||
// 0x0 for state vector
(bin.length === 1 && bin[0] === 0) ||
// 0x00 for update
(bin.length === 2 && bin[0] === 0 && bin[1] === 0)
);
}
async getDoc(spaceId: string, docId: string): Promise<DocRecord | null> {
await using _lock = await this.lockDocForUpdate(spaceId, docId);
const snapshot = await this.getDocSnapshot(spaceId, docId);
const updates = await this.getDocUpdates(spaceId, docId);
if (updates.length) {
const { timestamp, bin, editor } = await this.squash(
snapshot ? [snapshot, ...updates] : updates
);
const newSnapshot = {
spaceId: spaceId,
docId,
bin,
timestamp,
editor,
};
const success = await this.setDocSnapshot(newSnapshot);
// if there is old snapshot, create a new history record
if (success && snapshot) {
await this.createDocHistory(snapshot);
}
// always mark updates as merged unless throws
await this.markUpdatesMerged(spaceId, docId, updates);
return newSnapshot;
}
return snapshot;
}
abstract pushDocUpdates(
spaceId: string,
docId: string,
updates: Uint8Array[],
editorId?: string
): Promise<number>;
abstract deleteDoc(spaceId: string, docId: string): Promise<void>;
abstract deleteSpace(spaceId: string): Promise<void>;
async rollbackDoc(
spaceId: string,
docId: string,
timestamp: number,
editorId?: string
): Promise<void> {
await using _lock = await this.lockDocForUpdate(spaceId, docId);
const toSnapshot = await this.getDocHistory(spaceId, docId, timestamp);
if (!toSnapshot) {
throw new Error('Can not find the version to rollback to.');
}
const fromSnapshot = await this.getDocSnapshot(spaceId, docId);
if (!fromSnapshot) {
throw new Error('Can not find the current version of the doc.');
}
const change = this.generateChangeUpdate(fromSnapshot.bin, toSnapshot.bin);
await this.pushDocUpdates(spaceId, docId, [change], editorId);
// force create a new history record after rollback
await this.createDocHistory(fromSnapshot, true);
}
abstract getSpaceDocTimestamps(
spaceId: string,
after?: number
): Promise<Record<string, number> | null>;
abstract listDocHistories(
spaceId: string,
docId: string,
query: { skip?: number; limit?: number }
): Promise<{ timestamp: number; editor: Editor | null }[]>;
abstract getDocHistory(
spaceId: string,
docId: string,
timestamp: number
): Promise<DocRecord | null>;
// api for internal usage
protected abstract getDocSnapshot(
spaceId: string,
docId: string
): Promise<DocRecord | null>;
protected abstract setDocSnapshot(snapshot: DocRecord): Promise<boolean>;
protected abstract getDocUpdates(
spaceId: string,
docId: string
): Promise<DocUpdate[]>;
protected abstract markUpdatesMerged(
spaceId: string,
docId: string,
updates: DocUpdate[]
): Promise<number>;
protected abstract createDocHistory(
snapshot: DocRecord,
force?: boolean
): Promise<boolean>;
@CallTimer('doc', 'squash')
protected async squash(updates: DocUpdate[]): Promise<DocUpdate> {
const merge = this.options?.mergeUpdates ?? mergeUpdates;
const lastUpdate = updates.at(-1);
if (!lastUpdate) {
throw new Error('No updates to be squashed.');
}
// fast return
if (updates.length === 1) {
return lastUpdate;
}
const finalUpdate = await merge(updates.map(u => u.bin));
return {
bin: finalUpdate,
timestamp: lastUpdate.timestamp,
editor: lastUpdate.editor,
};
}
protected async lockDocForUpdate(
spaceId: string,
docId: string
): Promise<AsyncDisposable> {
return this.locker.lock(`workspace:${spaceId}:update`, docId);
}
protected generateChangeUpdate(newerBin: Uint8Array, olderBin: Uint8Array) {
const newerDoc = new Doc();
applyUpdate(newerDoc, newerBin);
const olderDoc = new Doc();
applyUpdate(olderDoc, olderBin);
const newerState = encodeStateVector(newerDoc);
const olderState = encodeStateVector(olderDoc);
const diff = encodeStateAsUpdate(newerDoc, olderState);
const undoManager = new UndoManager(Array.from(newerDoc.share.values()));
applyUpdate(olderDoc, diff);
undoManager.undo();
return encodeStateAsUpdate(olderDoc, newerState);
}
}

View File

@@ -1,33 +0,0 @@
// TODO(@forehalo): share with frontend
import type { BlobStorageAdapter } from './blob';
import { Connection } from './connection';
import type { DocStorageAdapter } from './doc';
export class SpaceStorage extends Connection {
constructor(
public readonly doc: DocStorageAdapter,
public readonly blob: BlobStorageAdapter
) {
super();
}
override async connect() {
await this.doc.connect();
await this.blob.connect();
}
override async disconnect() {
await this.doc.disconnect();
await this.blob.disconnect();
}
}
export { BlobStorageAdapter, type BlobStorageOptions } from './blob';
export {
type DocRecord,
DocStorageAdapter,
type DocStorageOptions,
type DocUpdate,
type Editor,
type HistoryFilter,
} from './doc';

View File

@@ -1,42 +0,0 @@
export interface Locker {
lock(domain: string, resource: string): Promise<Lock>;
}
export class SingletonLocker implements Locker {
lockedResource = new Map<string, Lock>();
constructor() {}
async lock(domain: string, resource: string) {
let lock = this.lockedResource.get(`${domain}:${resource}`);
if (!lock) {
lock = new Lock();
}
await lock.acquire();
return lock;
}
}
export class Lock {
private inner: Promise<void> = Promise.resolve();
private release: () => void = () => {};
async acquire() {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
let release: () => void = null!;
const nextLock = new Promise<void>(resolve => {
release = resolve;
});
await this.inner;
this.inner = nextLock;
this.release = release;
}
[Symbol.asyncDispose]() {
this.release();
return Promise.resolve();
}
}

View File

@@ -32,7 +32,7 @@ export async function getFeature(prisma: PrismaTransaction, featureId: number) {
return cachedFeature;
}
const feature = await prisma.feature.findFirst({
const feature = await prisma.features.findFirst({
where: {
id: featureId,
},

View File

@@ -2,10 +2,7 @@ import { Module } from '@nestjs/common';
import { UserModule } from '../user';
import { EarlyAccessType, FeatureManagementService } from './management';
import {
AdminFeatureManagementResolver,
FeatureManagementResolver,
} from './resolver';
import { FeatureManagementResolver } from './resolver';
import { FeatureService } from './service';
/**
@@ -20,7 +17,6 @@ import { FeatureService } from './service';
FeatureService,
FeatureManagementService,
FeatureManagementResolver,
AdminFeatureManagementResolver,
],
exports: [FeatureService, FeatureManagementService],
})

View File

@@ -1,18 +1,21 @@
import {
Args,
Context,
Int,
Mutation,
Parent,
Query,
registerEnumType,
ResolveField,
Resolver,
} from '@nestjs/graphql';
import { difference } from 'lodash-es';
import { Config } from '../../fundamentals';
import { UserNotFound } from '../../fundamentals';
import { sessionUser } from '../auth/service';
import { Admin } from '../common';
import { UserService } from '../user/service';
import { UserType } from '../user/types';
import { EarlyAccessType, FeatureManagementService } from './management';
import { FeatureService } from './service';
import { FeatureType } from './types';
registerEnumType(EarlyAccessType, {
@@ -21,7 +24,10 @@ registerEnumType(EarlyAccessType, {
@Resolver(() => UserType)
export class FeatureManagementResolver {
constructor(private readonly feature: FeatureManagementService) {}
constructor(
private readonly users: UserService,
private readonly feature: FeatureManagementService
) {}
@ResolveField(() => [FeatureType], {
name: 'features',
@@ -30,48 +36,58 @@ export class FeatureManagementResolver {
async userFeatures(@Parent() user: UserType) {
return this.feature.getActivatedUserFeatures(user.id);
}
}
export class AvailableUserFeatureConfig {
constructor(private readonly config: Config) {}
async availableUserFeatures() {
return this.config.isSelfhosted
? [FeatureType.Admin]
: [FeatureType.EarlyAccess, FeatureType.AIEarlyAccess, FeatureType.Admin];
}
}
@Admin()
@Resolver(() => Boolean)
export class AdminFeatureManagementResolver extends AvailableUserFeatureConfig {
constructor(
config: Config,
private readonly feature: FeatureService
) {
super(config);
@Admin()
@Mutation(() => Int)
async addToEarlyAccess(
@Args('email') email: string,
@Args({ name: 'type', type: () => EarlyAccessType }) type: EarlyAccessType
): Promise<number> {
const user = await this.users.findUserByEmail(email);
if (user) {
return this.feature.addEarlyAccess(user.id, type);
} else {
const user = await this.users.createUser({
email,
registered: false,
});
return this.feature.addEarlyAccess(user.id, type);
}
}
@Mutation(() => [FeatureType], {
description: 'update user enabled feature',
})
async updateUserFeatures(
@Args('id') id: string,
@Args({ name: 'features', type: () => [FeatureType] })
features: FeatureType[]
) {
const configurableFeatures = await this.availableUserFeatures();
@Admin()
@Mutation(() => Int)
async removeEarlyAccess(@Args('email') email: string): Promise<number> {
const user = await this.users.findUserByEmail(email);
if (!user) {
throw new UserNotFound();
}
return this.feature.removeEarlyAccess(user.id);
}
const removed = difference(configurableFeatures, features);
await Promise.all(
features.map(feature =>
this.feature.addUserFeature(id, feature, 'admin panel')
)
);
await Promise.all(
removed.map(feature => this.feature.removeUserFeature(id, feature))
);
@Admin()
@Query(() => [UserType])
async earlyAccessUsers(
@Context() ctx: { isAdminQuery: boolean }
): Promise<UserType[]> {
// allow query other user's subscription
ctx.isAdminQuery = true;
return this.feature.listEarlyAccess().then(users => {
return users.map(sessionUser);
});
}
return features;
@Admin()
@Mutation(() => Boolean)
async addAdminister(@Args('email') email: string): Promise<boolean> {
const user = await this.users.findUserByEmail(email);
if (!user) {
throw new UserNotFound();
}
await this.feature.addAdmin(user.id);
return true;
}
}

View File

@@ -1,7 +1,6 @@
import { Injectable } from '@nestjs/common';
import { PrismaClient } from '@prisma/client';
import { CannotDeleteAllAdminAccount } from '../../fundamentals';
import { WorkspaceType } from '../workspaces/types';
import { FeatureConfigType, getFeature } from './feature';
import { FeatureKind, FeatureType } from './types';
@@ -11,7 +10,7 @@ export class FeatureService {
constructor(private readonly prisma: PrismaClient) {}
async getFeature<F extends FeatureType>(feature: F) {
const data = await this.prisma.feature.findFirst({
const data = await this.prisma.features.findFirst({
where: {
feature,
type: FeatureKind.Feature,
@@ -37,7 +36,7 @@ export class FeatureService {
expiredAt?: Date | string
) {
return this.prisma.$transaction(async tx => {
const latestFlag = await tx.userFeature.findFirst({
const latestFlag = await tx.userFeatures.findFirst({
where: {
userId,
feature: {
@@ -54,7 +53,7 @@ export class FeatureService {
if (latestFlag) {
return latestFlag.id;
} else {
const featureId = await tx.feature
const featureId = await tx.features
.findFirst({
where: { feature, type: FeatureKind.Feature },
orderBy: { version: 'desc' },
@@ -66,7 +65,7 @@ export class FeatureService {
throw new Error(`Feature ${feature} not found`);
}
return tx.userFeature
return tx.userFeatures
.create({
data: {
reason,
@@ -82,10 +81,7 @@ export class FeatureService {
}
async removeUserFeature(userId: string, feature: FeatureType) {
if (feature === FeatureType.Admin) {
await this.ensureNotLastAdmin(userId);
}
return this.prisma.userFeature
return this.prisma.userFeatures
.updateMany({
where: {
userId,
@@ -102,27 +98,13 @@ export class FeatureService {
.then(r => r.count);
}
async ensureNotLastAdmin(userId: string) {
const count = await this.prisma.userFeature.count({
where: {
userId: { not: userId },
feature: { feature: FeatureType.Admin, type: FeatureKind.Feature },
activated: true,
},
});
if (count === 0) {
throw new CannotDeleteAllAdminAccount();
}
}
/**
* get user's features, will included inactivated features
* @param userId user id
* @returns list of features
*/
async getUserFeatures(userId: string) {
const features = await this.prisma.userFeature.findMany({
const features = await this.prisma.userFeatures.findMany({
where: {
userId,
feature: { type: FeatureKind.Feature },
@@ -147,7 +129,7 @@ export class FeatureService {
}
async getActivatedUserFeatures(userId: string) {
const features = await this.prisma.userFeature.findMany({
const features = await this.prisma.userFeatures.findMany({
where: {
userId,
feature: { type: FeatureKind.Feature },
@@ -174,7 +156,7 @@ export class FeatureService {
}
async listFeatureUsers(feature: FeatureType) {
return this.prisma.userFeature
return this.prisma.userFeatures
.findMany({
where: {
activated: true,
@@ -200,7 +182,7 @@ export class FeatureService {
}
async hasUserFeature(userId: string, feature: FeatureType) {
return this.prisma.userFeature
return this.prisma.userFeatures
.count({
where: {
userId,
@@ -224,7 +206,7 @@ export class FeatureService {
expiredAt?: Date | string
) {
return this.prisma.$transaction(async tx => {
const latestFlag = await tx.workspaceFeature.findFirst({
const latestFlag = await tx.workspaceFeatures.findFirst({
where: {
workspaceId,
feature: {
@@ -241,7 +223,7 @@ export class FeatureService {
return latestFlag.id;
} else {
// use latest version of feature
const featureId = await tx.feature
const featureId = await tx.features
.findFirst({
where: { feature, type: FeatureKind.Feature },
select: { id: true },
@@ -253,7 +235,7 @@ export class FeatureService {
throw new Error(`Feature ${feature} not found`);
}
return tx.workspaceFeature
return tx.workspaceFeatures
.create({
data: {
reason,
@@ -269,7 +251,7 @@ export class FeatureService {
}
async removeWorkspaceFeature(workspaceId: string, feature: FeatureType) {
return this.prisma.workspaceFeature
return this.prisma.workspaceFeatures
.updateMany({
where: {
workspaceId,
@@ -292,7 +274,7 @@ export class FeatureService {
* @returns list of features
*/
async getWorkspaceFeatures(workspaceId: string) {
const features = await this.prisma.workspaceFeature.findMany({
const features = await this.prisma.workspaceFeatures.findMany({
where: {
workspace: { id: workspaceId },
feature: {
@@ -319,7 +301,7 @@ export class FeatureService {
}
async listFeatureWorkspaces(feature: FeatureType): Promise<WorkspaceType[]> {
return this.prisma.workspaceFeature
return this.prisma.workspaceFeatures
.findMany({
where: {
activated: true,
@@ -342,7 +324,7 @@ export class FeatureService {
}
async hasWorkspaceFeature(workspaceId: string, feature: FeatureType) {
return this.prisma.workspaceFeature
return this.prisma.workspaceFeatures
.count({
where: {
workspaceId,

View File

@@ -1,12 +0,0 @@
import { Module } from '@nestjs/common';
import { PermissionService } from './service';
@Module({
providers: [PermissionService],
exports: [PermissionService],
})
export class PermissionModule {}
export { PermissionService } from './service';
export { Permission, PublicPageMode } from './types';

View File

@@ -1,11 +0,0 @@
export enum Permission {
Read = 0,
Write = 1,
Admin = 10,
Owner = 99,
}
export enum PublicPageMode {
Page,
Edgeless,
}

View File

@@ -1,8 +1,8 @@
import { Module } from '@nestjs/common';
import { FeatureModule } from '../features';
import { PermissionModule } from '../permission';
import { StorageModule } from '../storage';
import { PermissionService } from '../workspaces/permission';
import { QuotaManagementResolver } from './resolver';
import { QuotaService } from './service';
import { QuotaManagementService } from './storage';
@@ -14,8 +14,13 @@ import { QuotaManagementService } from './storage';
* - quota statistics
*/
@Module({
imports: [FeatureModule, StorageModule, PermissionModule],
providers: [QuotaService, QuotaManagementResolver, QuotaManagementService],
imports: [FeatureModule, StorageModule],
providers: [
PermissionService,
QuotaService,
QuotaManagementResolver,
QuotaManagementService,
],
exports: [QuotaService, QuotaManagementService],
})
export class QuotaModule {}

View File

@@ -13,7 +13,7 @@ export class QuotaConfig {
return cachedQuota;
}
const quota = await tx.feature.findFirst({
const quota = await tx.features.findFirst({
where: {
id: featureId,
},
@@ -71,6 +71,10 @@ export class QuotaConfig {
return this.config.configs.historyPeriod;
}
get historyPeriodFromNow() {
return new Date(Date.now() + this.historyPeriod);
}
get memberLimit() {
return this.config.configs.memberLimit;
}

View File

@@ -7,7 +7,7 @@ import {
} from '@nestjs/graphql';
import { SafeIntResolver } from 'graphql-scalars';
import { CurrentUser } from '../auth/session';
import { CurrentUser } from '../auth/current-user';
import { EarlyAccessType } from '../features';
import { UserType } from '../user';
import { QuotaService } from './service';

View File

@@ -17,7 +17,7 @@ export class QuotaService {
// get activated user quota
async getUserQuota(userId: string) {
const quota = await this.prisma.userFeature.findFirst({
const quota = await this.prisma.userFeatures.findFirst({
where: {
userId,
feature: {
@@ -44,7 +44,7 @@ export class QuotaService {
// get user all quota records
async getUserQuotas(userId: string) {
const quotas = await this.prisma.userFeature.findMany({
const quotas = await this.prisma.userFeatures.findMany({
where: {
userId,
feature: {
@@ -58,9 +58,6 @@ export class QuotaService {
expiredAt: true,
featureId: true,
},
orderBy: {
id: 'asc',
},
});
const configs = await Promise.all(
quotas.map(async quota => {
@@ -69,7 +66,7 @@ export class QuotaService {
...quota,
feature: await QuotaConfig.get(this.prisma, quota.featureId),
};
} catch {}
} catch (_) {}
return null as unknown as typeof quota & {
feature: QuotaConfig;
};
@@ -95,7 +92,7 @@ export class QuotaService {
return;
}
const featureId = await tx.feature
const featureId = await tx.features
.findFirst({
where: { feature: quota, type: FeatureKind.Quota },
select: { id: true },
@@ -108,7 +105,7 @@ export class QuotaService {
}
// we will deactivate all exists quota for this user
await tx.userFeature.updateMany({
await tx.userFeatures.updateMany({
where: {
id: undefined,
userId,
@@ -121,7 +118,7 @@ export class QuotaService {
},
});
await tx.userFeature.create({
await tx.userFeatures.create({
data: {
userId,
featureId,
@@ -136,7 +133,7 @@ export class QuotaService {
async hasQuota(userId: string, quota: QuotaType, tx?: PrismaTransaction) {
const executor = tx ?? this.prisma;
return executor.userFeature
return executor.userFeatures
.count({
where: {
userId,

View File

@@ -1,8 +1,9 @@
import { Injectable, Logger } from '@nestjs/common';
import { WorkspaceOwnerNotFound } from '../../fundamentals';
import { FeatureService, FeatureType } from '../features';
import { PermissionService } from '../permission';
import { WorkspaceBlobStorage } from '../storage';
import { PermissionService } from '../workspaces/permission';
import { OneGB } from './constant';
import { QuotaService } from './service';
import { formatSize, QuotaQueryType } from './types';
@@ -112,9 +113,9 @@ export class QuotaManagementService {
// get workspace's owner quota and total size of used
// quota was apply to owner's account
async getWorkspaceUsage(workspaceId: string): Promise<QuotaBusinessType> {
const owner = await this.permissions.getWorkspaceOwner(workspaceId);
const memberCount =
await this.permissions.getWorkspaceMemberCount(workspaceId);
const { user: owner } =
await this.permissions.getWorkspaceOwner(workspaceId);
if (!owner) throw new WorkspaceOwnerNotFound({ workspaceId });
const {
feature: {
name,
@@ -147,7 +148,6 @@ export class QuotaManagementService {
humanReadable,
usedSize,
unlimited,
memberCount,
};
if (quota.unlimited) {

View File

@@ -87,9 +87,6 @@ export class QuotaQueryType {
@Field(() => SafeIntResolver)
memberLimit!: number;
@Field(() => SafeIntResolver)
memberCount!: number;
@Field(() => SafeIntResolver)
storageQuota!: number;

View File

@@ -1,104 +0,0 @@
import { join } from 'node:path';
import {
Injectable,
Module,
NestMiddleware,
OnModuleInit,
} from '@nestjs/common';
import { HttpAdapterHost } from '@nestjs/core';
import type { Application, Request, Response } from 'express';
import { static as serveStatic } from 'express';
import { Config } from '../../fundamentals';
import { AuthModule } from '../auth';
import { ServerConfigModule, ServerService } from '../config';
import { UserModule } from '../user';
import { CustomSetupController } from './controller';
@Injectable()
export class SetupMiddleware implements NestMiddleware {
constructor(private readonly server: ServerService) {}
use = (req: Request, res: Response, next: (error?: Error | any) => void) => {
// never throw
// eslint-disable-next-line @typescript-eslint/no-floating-promises
this.server
.initialized()
.then(initialized => {
// Redirect to setup page if not initialized
if (!initialized && req.path !== '/admin/setup') {
res.redirect('/admin/setup');
return;
}
// redirect to admin page if initialized
if (initialized && req.path === '/admin/setup') {
res.redirect('/admin');
return;
}
next();
})
.catch(() => {
next();
});
};
}
@Module({
imports: [AuthModule, UserModule, ServerConfigModule],
providers: [SetupMiddleware],
controllers: [CustomSetupController],
})
export class SelfhostModule implements OnModuleInit {
constructor(
private readonly config: Config,
private readonly adapterHost: HttpAdapterHost,
private readonly check: SetupMiddleware
) {}
onModuleInit() {
const staticPath = join(this.config.projectRoot, 'static');
// in command line mode
if (!this.adapterHost.httpAdapter) {
return;
}
const app = this.adapterHost.httpAdapter.getInstance<Application>();
const basePath = this.config.server.path;
app.get(basePath + '/admin/index.html', (_req, res) => {
res.redirect(basePath + '/admin');
});
app.use(
basePath + '/admin',
serveStatic(join(staticPath, 'admin'), {
redirect: false,
index: false,
})
);
app.get(
[basePath + '/admin', basePath + '/admin/*'],
this.check.use,
(_req, res) => {
res.sendFile(join(staticPath, 'admin', 'index.html'));
}
);
app.get(basePath + '/index.html', (_req, res) => {
res.redirect(basePath);
});
app.use(
basePath,
serveStatic(staticPath, {
redirect: false,
index: false,
})
);
app.get('*', this.check.use, (_req, res) => {
res.sendFile(join(staticPath, 'index.html'));
});
}
}

View File

@@ -1,15 +1,15 @@
import { Body, Controller, Post, Req, Res } from '@nestjs/common';
import { PrismaClient } from '@prisma/client';
import type { Request, Response } from 'express';
import {
ActionForbidden,
EventEmitter,
InternalServerError,
Mutex,
MutexService,
PasswordRequired,
} from '../../fundamentals';
import { AuthService, Public } from '../auth';
import { ServerService } from '../config';
import { UserService } from '../user/service';
interface CreateUserInput {
@@ -20,11 +20,11 @@ interface CreateUserInput {
@Controller('/api/setup')
export class CustomSetupController {
constructor(
private readonly db: PrismaClient,
private readonly user: UserService,
private readonly auth: AuthService,
private readonly event: EventEmitter,
private readonly mutex: Mutex,
private readonly server: ServerService
private readonly mutex: MutexService
) {}
@Public()
@@ -44,7 +44,7 @@ export class CustomSetupController {
throw new InternalServerError();
}
if (await this.server.initialized()) {
if ((await this.db.user.count()) > 0) {
throw new ActionForbidden('First user already created');
}
@@ -56,7 +56,7 @@ export class CustomSetupController {
try {
await this.event.emitAsync('user.admin.created', user);
await this.auth.setCookies(req, res, user.id);
await this.auth.setCookie(req, res, user);
res.send({ id: user.id, email: user.email, name: user.name });
} catch (e) {
await this.user.deleteUser(user.id);

View File

@@ -0,0 +1,11 @@
import { Module } from '@nestjs/common';
import { AuthModule } from '../auth';
import { UserModule } from '../user';
import { CustomSetupController } from './controller';
@Module({
imports: [AuthModule, UserModule],
controllers: [CustomSetupController],
})
export class CustomSetupModule {}

View File

@@ -0,0 +1,333 @@
import { applyDecorators, Logger } from '@nestjs/common';
import {
ConnectedSocket,
MessageBody,
OnGatewayConnection,
OnGatewayDisconnect,
SubscribeMessage as RawSubscribeMessage,
WebSocketGateway,
WebSocketServer,
} from '@nestjs/websockets';
import { Server, Socket } from 'socket.io';
import { encodeStateAsUpdate, encodeStateVector } from 'yjs';
import {
CallTimer,
Config,
DocNotFound,
GatewayErrorWrapper,
metrics,
NotInWorkspace,
VersionRejected,
WorkspaceAccessDenied,
} from '../../../fundamentals';
import { Auth, CurrentUser } from '../../auth';
import { DocManager } from '../../doc';
import { DocID } from '../../utils/doc';
import { PermissionService } from '../../workspaces/permission';
import { Permission } from '../../workspaces/types';
const SubscribeMessage = (event: string) =>
applyDecorators(
GatewayErrorWrapper(event),
CallTimer('socketio', 'event_duration', { event }),
RawSubscribeMessage(event)
);
type EventResponse<Data = any> = Data extends never
? {
data?: never;
}
: {
data: Data;
};
function Sync(workspaceId: string): `${string}:sync` {
return `${workspaceId}:sync`;
}
function Awareness(workspaceId: string): `${string}:awareness` {
return `${workspaceId}:awareness`;
}
@WebSocketGateway({
cors: !AFFiNE.node.prod,
transports: ['websocket'],
// see: https://socket.io/docs/v4/server-options/#maxhttpbuffersize
maxHttpBufferSize: 1e8, // 100 MB
})
export class EventsGateway implements OnGatewayConnection, OnGatewayDisconnect {
protected logger = new Logger(EventsGateway.name);
private connectionCount = 0;
constructor(
private readonly config: Config,
private readonly docManager: DocManager,
private readonly permissions: PermissionService
) {}
@WebSocketServer()
server!: Server;
handleConnection() {
this.connectionCount++;
metrics.socketio.gauge('realtime_connections').record(this.connectionCount);
}
handleDisconnect() {
this.connectionCount--;
metrics.socketio.gauge('realtime_connections').record(this.connectionCount);
}
async assertVersion(client: Socket, version?: string) {
const shouldCheckClientVersion = await this.config.runtime.fetch(
'flags/syncClientVersionCheck'
);
if (
// @todo(@darkskygit): remove this flag after 0.12 goes stable
shouldCheckClientVersion &&
version !== AFFiNE.version
) {
client.emit('server-version-rejected', {
currentVersion: version,
requiredVersion: AFFiNE.version,
reason: `Client version${
version ? ` ${version}` : ''
} is outdated, please update to ${AFFiNE.version}`,
});
throw new VersionRejected({
version: version || 'unknown',
serverVersion: AFFiNE.version,
});
}
}
async joinWorkspace(
client: Socket,
room: `${string}:${'sync' | 'awareness'}`
) {
await client.join(room);
}
async leaveWorkspace(
client: Socket,
room: `${string}:${'sync' | 'awareness'}`
) {
await client.leave(room);
}
assertInWorkspace(client: Socket, room: `${string}:${'sync' | 'awareness'}`) {
if (!client.rooms.has(room)) {
throw new NotInWorkspace({ workspaceId: room.split(':')[0] });
}
}
async assertWorkspaceAccessible(
workspaceId: string,
userId: string,
permission: Permission = Permission.Read
) {
if (
!(await this.permissions.isWorkspaceMember(
workspaceId,
userId,
permission
))
) {
throw new WorkspaceAccessDenied({ workspaceId });
}
}
@Auth()
@SubscribeMessage('client-handshake-sync')
async handleClientHandshakeSync(
@CurrentUser() user: CurrentUser,
@MessageBody('workspaceId') workspaceId: string,
@MessageBody('version') version: string | undefined,
@ConnectedSocket() client: Socket
): Promise<EventResponse<{ clientId: string }>> {
await this.assertVersion(client, version);
await this.assertWorkspaceAccessible(
workspaceId,
user.id,
Permission.Write
);
await this.joinWorkspace(client, Sync(workspaceId));
return {
data: {
clientId: client.id,
},
};
}
@Auth()
@SubscribeMessage('client-handshake-awareness')
async handleClientHandshakeAwareness(
@CurrentUser() user: CurrentUser,
@MessageBody('workspaceId') workspaceId: string,
@MessageBody('version') version: string | undefined,
@ConnectedSocket() client: Socket
): Promise<EventResponse<{ clientId: string }>> {
await this.assertVersion(client, version);
await this.assertWorkspaceAccessible(
workspaceId,
user.id,
Permission.Write
);
await this.joinWorkspace(client, Awareness(workspaceId));
return {
data: {
clientId: client.id,
},
};
}
@SubscribeMessage('client-leave-sync')
async handleLeaveSync(
@MessageBody() workspaceId: string,
@ConnectedSocket() client: Socket
): Promise<EventResponse> {
this.assertInWorkspace(client, Sync(workspaceId));
await this.leaveWorkspace(client, Sync(workspaceId));
return {};
}
@SubscribeMessage('client-leave-awareness')
async handleLeaveAwareness(
@MessageBody() workspaceId: string,
@ConnectedSocket() client: Socket
): Promise<EventResponse> {
this.assertInWorkspace(client, Awareness(workspaceId));
await this.leaveWorkspace(client, Awareness(workspaceId));
return {};
}
@SubscribeMessage('client-pre-sync')
async loadDocStats(
@ConnectedSocket() client: Socket,
@MessageBody()
{ workspaceId, timestamp }: { workspaceId: string; timestamp?: number }
): Promise<EventResponse<Record<string, number>>> {
this.assertInWorkspace(client, Sync(workspaceId));
const stats = await this.docManager.getDocTimestamps(
workspaceId,
timestamp
);
return {
data: stats,
};
}
@SubscribeMessage('client-update-v2')
async handleClientUpdateV2(
@MessageBody()
{
workspaceId,
guid,
updates,
}: {
workspaceId: string;
guid: string;
updates: string[];
},
@ConnectedSocket() client: Socket
): Promise<EventResponse<{ accepted: true; timestamp?: number }>> {
this.assertInWorkspace(client, Sync(workspaceId));
const docId = new DocID(guid, workspaceId);
const buffers = updates.map(update => Buffer.from(update, 'base64'));
const timestamp = await this.docManager.batchPush(
docId.workspace,
docId.guid,
buffers
);
client
.to(Sync(workspaceId))
.emit('server-updates', { workspaceId, guid, updates, timestamp });
return {
data: {
accepted: true,
timestamp,
},
};
}
@SubscribeMessage('doc-load-v2')
async loadDocV2(
@ConnectedSocket() client: Socket,
@MessageBody()
{
workspaceId,
guid,
stateVector,
}: {
workspaceId: string;
guid: string;
stateVector?: string;
}
): Promise<
EventResponse<{ missing: string; state?: string; timestamp: number }>
> {
this.assertInWorkspace(client, Sync(workspaceId));
const docId = new DocID(guid, workspaceId);
const res = await this.docManager.get(docId.workspace, docId.guid);
if (!res) {
throw new DocNotFound({ workspaceId, docId: docId.guid });
}
const missing = Buffer.from(
encodeStateAsUpdate(
res.doc,
stateVector ? Buffer.from(stateVector, 'base64') : undefined
)
).toString('base64');
const state = Buffer.from(encodeStateVector(res.doc)).toString('base64');
return {
data: {
missing,
state,
timestamp: res.timestamp,
},
};
}
@SubscribeMessage('awareness-init')
async handleInitAwareness(
@MessageBody() workspaceId: string,
@ConnectedSocket() client: Socket
): Promise<EventResponse<{ clientId: string }>> {
this.assertInWorkspace(client, Awareness(workspaceId));
client.to(Awareness(workspaceId)).emit('new-client-awareness-init');
return {
data: {
clientId: client.id,
},
};
}
@SubscribeMessage('awareness-update')
async handleHelpGatheringAwareness(
@MessageBody()
{
workspaceId,
awarenessUpdate,
}: { workspaceId: string; awarenessUpdate: string },
@ConnectedSocket() client: Socket
): Promise<EventResponse> {
this.assertInWorkspace(client, Awareness(workspaceId));
client
.to(Awareness(workspaceId))
.emit('server-awareness-broadcast', { workspaceId, awarenessUpdate });
return {};
}
}

View File

@@ -0,0 +1,11 @@
import { Module } from '@nestjs/common';
import { DocModule } from '../../doc';
import { PermissionService } from '../../workspaces/permission';
import { EventsGateway } from './events.gateway';
@Module({
imports: [DocModule],
providers: [EventsGateway, PermissionService],
})
export class EventsModule {}

View File

@@ -1,665 +0,0 @@
import { applyDecorators, Logger } from '@nestjs/common';
import {
ConnectedSocket,
MessageBody,
OnGatewayConnection,
OnGatewayDisconnect,
SubscribeMessage as RawSubscribeMessage,
WebSocketGateway,
} from '@nestjs/websockets';
import { Socket } from 'socket.io';
import { diffUpdate, encodeStateVectorFromUpdate } from 'yjs';
import {
AlreadyInSpace,
CallTimer,
Config,
DocNotFound,
GatewayErrorWrapper,
metrics,
NotInSpace,
SpaceAccessDenied,
VersionRejected,
} from '../../fundamentals';
import { CurrentUser } from '../auth';
import {
DocStorageAdapter,
PgUserspaceDocStorageAdapter,
PgWorkspaceDocStorageAdapter,
} from '../doc';
import { Permission, PermissionService } from '../permission';
import { DocID } from '../utils/doc';
const SubscribeMessage = (event: string) =>
applyDecorators(
GatewayErrorWrapper(event),
CallTimer('socketio', 'event_duration', { event }),
RawSubscribeMessage(event)
);
type EventResponse<Data = any> = Data extends never
? {
data?: never;
}
: {
data: Data;
};
type RoomType = 'sync' | `${string}:awareness`;
function Room(
spaceId: string,
type: RoomType = 'sync'
): `${string}:${RoomType}` {
return `${spaceId}:${type}`;
}
enum SpaceType {
Workspace = 'workspace',
Userspace = 'userspace',
}
interface JoinSpaceMessage {
spaceType: SpaceType;
spaceId: string;
clientVersion: string;
}
interface JoinSpaceAwarenessMessage {
spaceType: SpaceType;
spaceId: string;
docId: string;
clientVersion: string;
}
interface LeaveSpaceMessage {
spaceType: SpaceType;
spaceId: string;
}
interface LeaveSpaceAwarenessMessage {
spaceType: SpaceType;
spaceId: string;
docId: string;
}
interface PushDocUpdatesMessage {
spaceType: SpaceType;
spaceId: string;
docId: string;
updates: string[];
}
interface LoadDocMessage {
spaceType: SpaceType;
spaceId: string;
docId: string;
stateVector?: string;
}
interface LoadDocTimestampsMessage {
spaceType: SpaceType;
spaceId: string;
timestamp?: number;
}
interface LoadSpaceAwarenessesMessage {
spaceType: SpaceType;
spaceId: string;
docId: string;
}
interface UpdateAwarenessMessage {
spaceType: SpaceType;
spaceId: string;
docId: string;
awarenessUpdate: string;
}
@WebSocketGateway()
export class SpaceSyncGateway
implements OnGatewayConnection, OnGatewayDisconnect
{
protected logger = new Logger(SpaceSyncGateway.name);
private connectionCount = 0;
constructor(
private readonly config: Config,
private readonly permissions: PermissionService,
private readonly workspace: PgWorkspaceDocStorageAdapter,
private readonly userspace: PgUserspaceDocStorageAdapter
) {}
handleConnection() {
this.connectionCount++;
metrics.socketio.gauge('realtime_connections').record(this.connectionCount);
}
handleDisconnect() {
this.connectionCount--;
metrics.socketio.gauge('realtime_connections').record(this.connectionCount);
}
selectAdapter(client: Socket, spaceType: SpaceType): SyncSocketAdapter {
let adapters: Record<SpaceType, SyncSocketAdapter> = (client as any)
.affineSyncAdapters;
if (!adapters) {
const workspace = new WorkspaceSyncAdapter(
client,
this.workspace,
this.permissions
);
const userspace = new UserspaceSyncAdapter(client, this.userspace);
adapters = { workspace, userspace };
(client as any).affineSyncAdapters = adapters;
}
return adapters[spaceType];
}
async assertVersion(client: Socket, version?: string) {
const shouldCheckClientVersion = await this.config.runtime.fetch(
'flags/syncClientVersionCheck'
);
if (
// @todo(@darkskygit): remove this flag after 0.12 goes stable
shouldCheckClientVersion &&
version !== AFFiNE.version
) {
client.emit('server-version-rejected', {
currentVersion: version,
requiredVersion: AFFiNE.version,
reason: `Client version${
version ? ` ${version}` : ''
} is outdated, please update to ${AFFiNE.version}`,
});
throw new VersionRejected({
version: version || 'unknown',
serverVersion: AFFiNE.version,
});
}
}
async joinWorkspace(
client: Socket,
room: `${string}:${'sync' | 'awareness'}`
) {
await client.join(room);
}
async leaveWorkspace(
client: Socket,
room: `${string}:${'sync' | 'awareness'}`
) {
await client.leave(room);
}
assertInWorkspace(client: Socket, room: `${string}:${'sync' | 'awareness'}`) {
if (!client.rooms.has(room)) {
throw new NotInSpace({ spaceId: room.split(':')[0] });
}
}
// v3
@SubscribeMessage('space:join')
async onJoinSpace(
@CurrentUser() user: CurrentUser,
@ConnectedSocket() client: Socket,
@MessageBody()
{ spaceType, spaceId, clientVersion }: JoinSpaceMessage
): Promise<EventResponse<{ clientId: string; success: true }>> {
await this.assertVersion(client, clientVersion);
await this.selectAdapter(client, spaceType).join(user.id, spaceId);
return { data: { clientId: client.id, success: true } };
}
@SubscribeMessage('space:leave')
async onLeaveSpace(
@ConnectedSocket() client: Socket,
@MessageBody() { spaceType, spaceId }: LeaveSpaceMessage
): Promise<EventResponse<{ clientId: string; success: true }>> {
await this.selectAdapter(client, spaceType).leave(spaceId);
return { data: { clientId: client.id, success: true } };
}
@SubscribeMessage('space:load-doc')
async onLoadSpaceDoc(
@ConnectedSocket() client: Socket,
@MessageBody()
{ spaceType, spaceId, docId, stateVector }: LoadDocMessage
): Promise<
EventResponse<{ missing: string; state?: string; timestamp: number }>
> {
const adapter = this.selectAdapter(client, spaceType);
adapter.assertIn(spaceId);
const doc = await adapter.get(spaceId, docId);
if (!doc) {
throw new DocNotFound({ spaceId, docId });
}
const missing = Buffer.from(
stateVector
? diffUpdate(doc.bin, Buffer.from(stateVector, 'base64'))
: doc.bin
).toString('base64');
const state = Buffer.from(encodeStateVectorFromUpdate(doc.bin)).toString(
'base64'
);
return {
data: {
missing,
state,
timestamp: doc.timestamp,
},
};
}
@SubscribeMessage('space:push-doc-updates')
async onReceiveDocUpdates(
@ConnectedSocket() client: Socket,
@CurrentUser() user: CurrentUser,
@MessageBody()
message: PushDocUpdatesMessage
): Promise<EventResponse<{ accepted: true; timestamp?: number }>> {
const { spaceType, spaceId, docId, updates } = message;
const adapter = this.selectAdapter(client, spaceType);
// TODO(@forehalo): we might need to check write permission before push updates
const timestamp = await adapter.push(
spaceId,
docId,
updates.map(update => Buffer.from(update, 'base64')),
user.id
);
// could be put in [adapter.push]
// but the event should be kept away from adapter
// so
client
.to(adapter.room(spaceId))
.emit('space:broadcast-doc-updates', { ...message, timestamp });
// TODO(@forehalo): remove backward compatibility
if (spaceType === SpaceType.Workspace) {
const id = new DocID(docId, spaceId);
client.to(adapter.room(spaceId)).emit('server-updates', {
workspaceId: spaceId,
guid: id.guid,
updates,
timestamp,
});
}
return {
data: {
accepted: true,
timestamp,
},
};
}
@SubscribeMessage('space:load-doc-timestamps')
async onLoadDocTimestamps(
@ConnectedSocket() client: Socket,
@MessageBody()
{ spaceType, spaceId, timestamp }: LoadDocTimestampsMessage
): Promise<EventResponse<Record<string, number>>> {
const adapter = this.selectAdapter(client, spaceType);
const stats = await adapter.getTimestamps(spaceId, timestamp);
return {
data: stats ?? {},
};
}
@SubscribeMessage('space:join-awareness')
async onJoinAwareness(
@ConnectedSocket() client: Socket,
@CurrentUser() user: CurrentUser,
@MessageBody()
{ spaceType, spaceId, docId, clientVersion }: JoinSpaceAwarenessMessage
) {
await this.assertVersion(client, clientVersion);
await this.selectAdapter(client, spaceType).join(
user.id,
spaceId,
`${docId}:awareness`
);
return { data: { clientId: client.id, success: true } };
}
@SubscribeMessage('space:leave-awareness')
async onLeaveAwareness(
@ConnectedSocket() client: Socket,
@MessageBody()
{ spaceType, spaceId, docId }: LeaveSpaceAwarenessMessage
) {
await this.selectAdapter(client, spaceType).leave(
spaceId,
`${docId}:awareness`
);
return { data: { clientId: client.id, success: true } };
}
@SubscribeMessage('space:load-awarenesses')
async onLoadAwareness(
@ConnectedSocket() client: Socket,
@MessageBody()
{ spaceType, spaceId, docId }: LoadSpaceAwarenessesMessage
) {
const adapter = this.selectAdapter(client, spaceType);
const roomType = `${docId}:awareness` as const;
adapter.assertIn(spaceId, roomType);
client
.to(adapter.room(spaceId, roomType))
.emit('space:collect-awareness', { spaceType, spaceId, docId });
// TODO(@forehalo): remove backward compatibility
if (spaceType === SpaceType.Workspace) {
client
.to(adapter.room(spaceId, roomType))
.emit('new-client-awareness-init');
}
return { data: { clientId: client.id } };
}
@SubscribeMessage('space:update-awareness')
async onUpdateAwareness(
@ConnectedSocket() client: Socket,
@MessageBody() message: UpdateAwarenessMessage
) {
const { spaceType, spaceId, docId } = message;
const adapter = this.selectAdapter(client, spaceType);
const roomType = `${docId}:awareness` as const;
adapter.assertIn(spaceId, roomType);
client
.to(adapter.room(spaceId, roomType))
.emit('space:broadcast-awareness-update', message);
// TODO(@forehalo): remove backward compatibility
if (spaceType === SpaceType.Workspace) {
client
.to(adapter.room(spaceId, roomType))
.emit('server-awareness-broadcast', {
workspaceId: spaceId,
awarenessUpdate: message.awarenessUpdate,
});
}
return {};
}
// TODO(@forehalo): remove
// deprecated section
@SubscribeMessage('client-handshake-sync')
async handleClientHandshakeSync(
@CurrentUser() user: CurrentUser,
@MessageBody('workspaceId') workspaceId: string,
@MessageBody('version') version: string,
@ConnectedSocket() client: Socket
): Promise<EventResponse<{ clientId: string }>> {
await this.assertVersion(client, version);
return this.onJoinSpace(user, client, {
spaceType: SpaceType.Workspace,
spaceId: workspaceId,
clientVersion: version,
});
}
@SubscribeMessage('client-leave-sync')
async handleLeaveSync(
@MessageBody() workspaceId: string,
@ConnectedSocket() client: Socket
): Promise<EventResponse> {
return this.onLeaveSpace(client, {
spaceType: SpaceType.Workspace,
spaceId: workspaceId,
});
}
@SubscribeMessage('client-pre-sync')
async loadDocStats(
@ConnectedSocket() client: Socket,
@MessageBody()
{ workspaceId, timestamp }: { workspaceId: string; timestamp?: number }
): Promise<EventResponse<Record<string, number>>> {
return this.onLoadDocTimestamps(client, {
spaceType: SpaceType.Workspace,
spaceId: workspaceId,
timestamp,
});
}
@SubscribeMessage('client-update-v2')
async handleClientUpdateV2(
@CurrentUser() user: CurrentUser,
@MessageBody()
{
workspaceId,
guid,
updates,
}: {
workspaceId: string;
guid: string;
updates: string[];
},
@ConnectedSocket() client: Socket
): Promise<EventResponse<{ accepted: true; timestamp?: number }>> {
return this.onReceiveDocUpdates(client, user, {
spaceType: SpaceType.Workspace,
spaceId: workspaceId,
docId: guid,
updates,
});
}
@SubscribeMessage('doc-load-v2')
async loadDocV2(
@ConnectedSocket() client: Socket,
@MessageBody()
{
workspaceId,
guid,
stateVector,
}: {
workspaceId: string;
guid: string;
stateVector?: string;
}
): Promise<
EventResponse<{ missing: string; state?: string; timestamp: number }>
> {
return this.onLoadSpaceDoc(client, {
spaceType: SpaceType.Workspace,
spaceId: workspaceId,
docId: guid,
stateVector,
});
}
@SubscribeMessage('client-handshake-awareness')
async handleClientHandshakeAwareness(
@ConnectedSocket() client: Socket,
@CurrentUser() user: CurrentUser,
@MessageBody('workspaceId') workspaceId: string,
@MessageBody('version') version: string
): Promise<EventResponse<{ clientId: string }>> {
return this.onJoinAwareness(client, user, {
spaceType: SpaceType.Workspace,
spaceId: workspaceId,
docId: workspaceId,
clientVersion: version,
});
}
@SubscribeMessage('client-leave-awareness')
async handleLeaveAwareness(
@MessageBody() workspaceId: string,
@ConnectedSocket() client: Socket
): Promise<EventResponse> {
return this.onLeaveAwareness(client, {
spaceType: SpaceType.Workspace,
spaceId: workspaceId,
docId: workspaceId,
});
}
@SubscribeMessage('awareness-init')
async handleInitAwareness(
@MessageBody() workspaceId: string,
@ConnectedSocket() client: Socket
): Promise<EventResponse<{ clientId: string }>> {
return this.onLoadAwareness(client, {
spaceType: SpaceType.Workspace,
spaceId: workspaceId,
docId: workspaceId,
});
}
@SubscribeMessage('awareness-update')
async handleHelpGatheringAwareness(
@MessageBody()
{
workspaceId,
awarenessUpdate,
}: { workspaceId: string; awarenessUpdate: string },
@ConnectedSocket() client: Socket
): Promise<EventResponse> {
return this.onUpdateAwareness(client, {
spaceType: SpaceType.Workspace,
spaceId: workspaceId,
docId: workspaceId,
awarenessUpdate,
});
}
}
abstract class SyncSocketAdapter {
constructor(
private readonly spaceType: SpaceType,
public readonly client: Socket,
public readonly storage: DocStorageAdapter
) {}
room(spaceId: string, roomType: RoomType = 'sync') {
return `${this.spaceType}:${Room(spaceId, roomType)}`;
}
async join(userId: string, spaceId: string, roomType: RoomType = 'sync') {
this.assertNotIn(spaceId, roomType);
await this.assertAccessible(spaceId, userId, Permission.Read);
return this.client.join(this.room(spaceId, roomType));
}
async leave(spaceId: string, roomType: RoomType = 'sync') {
this.assertIn(spaceId, roomType);
return this.client.leave(this.room(spaceId, roomType));
}
in(spaceId: string, roomType: RoomType = 'sync') {
return this.client.rooms.has(this.room(spaceId, roomType));
}
assertNotIn(spaceId: string, roomType: RoomType = 'sync') {
if (this.client.rooms.has(this.room(spaceId, roomType))) {
throw new AlreadyInSpace({ spaceId });
}
}
assertIn(spaceId: string, roomType: RoomType = 'sync') {
if (!this.client.rooms.has(this.room(spaceId, roomType))) {
throw new NotInSpace({ spaceId });
}
}
abstract assertAccessible(
spaceId: string,
userId: string,
permission?: Permission
): Promise<void>;
push(spaceId: string, docId: string, updates: Buffer[], editorId: string) {
this.assertIn(spaceId);
return this.storage.pushDocUpdates(spaceId, docId, updates, editorId);
}
get(spaceId: string, docId: string) {
this.assertIn(spaceId);
return this.storage.getDoc(spaceId, docId);
}
getTimestamps(spaceId: string, timestamp?: number) {
this.assertIn(spaceId);
return this.storage.getSpaceDocTimestamps(spaceId, timestamp);
}
}
class WorkspaceSyncAdapter extends SyncSocketAdapter {
constructor(
client: Socket,
storage: DocStorageAdapter,
private readonly permission: PermissionService
) {
super(SpaceType.Workspace, client, storage);
}
override push(
spaceId: string,
docId: string,
updates: Buffer[],
editorId: string
) {
const id = new DocID(docId, spaceId);
return super.push(spaceId, id.guid, updates, editorId);
}
override get(spaceId: string, docId: string) {
const id = new DocID(docId, spaceId);
return this.storage.getDoc(spaceId, id.guid);
}
async assertAccessible(
spaceId: string,
userId: string,
permission: Permission = Permission.Read
) {
if (
!(await this.permission.isWorkspaceMember(spaceId, userId, permission))
) {
throw new SpaceAccessDenied({ spaceId });
}
}
}
class UserspaceSyncAdapter extends SyncSocketAdapter {
constructor(client: Socket, storage: DocStorageAdapter) {
super(SpaceType.Userspace, client, storage);
}
async assertAccessible(
spaceId: string,
userId: string,
_permission: Permission = Permission.Read
) {
if (spaceId !== userId) {
throw new SpaceAccessDenied({ spaceId });
}
}
}

View File

@@ -1,11 +1,8 @@
import { Module } from '@nestjs/common';
import { DocStorageModule } from '../doc';
import { PermissionModule } from '../permission';
import { SpaceSyncGateway } from './gateway';
import { EventsModule } from './events/events.module';
@Module({
imports: [DocStorageModule, PermissionModule],
providers: [SpaceSyncGateway],
imports: [EventsModule],
})
export class SyncModule {}

View File

@@ -12,22 +12,16 @@ import { PrismaClient } from '@prisma/client';
import GraphQLUpload from 'graphql-upload/GraphQLUpload.mjs';
import { isNil, omitBy } from 'lodash-es';
import {
CannotDeleteOwnAccount,
type FileUpload,
Throttle,
UserNotFound,
} from '../../fundamentals';
import { type FileUpload, Throttle, UserNotFound } from '../../fundamentals';
import { CurrentUser } from '../auth/current-user';
import { Public } from '../auth/guard';
import { sessionUser } from '../auth/service';
import { CurrentUser } from '../auth/session';
import { Admin } from '../common';
import { AvatarStorage } from '../storage';
import { validators } from '../utils/validators';
import { UserService } from './service';
import {
DeleteAccount,
ManageUserInput,
RemoveAvatar,
UpdateUserInput,
UserOrLimitedUser,
@@ -167,6 +161,9 @@ class CreateUserInput {
@Field(() => String, { nullable: true })
name!: string | null;
@Field(() => String, { nullable: true })
password!: string | null;
}
@Admin()
@@ -177,13 +174,6 @@ export class UserManagementResolver {
private readonly user: UserService
) {}
@Query(() => Int, {
description: 'Get users count',
})
async usersCount(): Promise<number> {
return this.db.user.count();
}
@Query(() => [UserType], {
description: 'List registered users',
})
@@ -218,26 +208,6 @@ export class UserManagementResolver {
return sessionUser(user);
}
@Query(() => UserType, {
name: 'userByEmail',
description: 'Get user by email for admin',
nullable: true,
})
async getUserByEmail(@Args('email') email: string) {
const user = await this.db.user.findUnique({
select: { ...this.user.defaultUserSelect, password: true },
where: {
email,
},
});
if (!user) {
return null;
}
return sessionUser(user);
}
@Mutation(() => UserType, {
description: 'Create a new user',
})
@@ -246,6 +216,7 @@ export class UserManagementResolver {
) {
const { id } = await this.user.createUser({
email: input.email,
password: input.password,
registered: true,
});
@@ -256,42 +227,8 @@ export class UserManagementResolver {
@Mutation(() => DeleteAccount, {
description: 'Delete a user account',
})
async deleteUser(
@CurrentUser() user: CurrentUser,
@Args('id') id: string
): Promise<DeleteAccount> {
if (user.id === id) {
throw new CannotDeleteOwnAccount();
}
async deleteUser(@Args('id') id: string): Promise<DeleteAccount> {
await this.user.deleteUser(id);
return { success: true };
}
@Mutation(() => UserType, {
description: 'Update a user',
})
async updateUser(
@Args('id') id: string,
@Args('input') input: ManageUserInput
): Promise<UserType> {
const user = await this.db.user.findUnique({
where: { id },
});
if (!user) {
throw new UserNotFound();
}
input = omitBy(input, isNil);
if (Object.keys(input).length === 0) {
return sessionUser(user);
}
return sessionUser(
await this.user.updateUser(user.id, {
email: input.email,
name: input.name,
})
);
}
}

View File

@@ -56,6 +56,11 @@ export class UserService {
async createUser(data: CreateUserInput) {
validators.assertValidEmail(data.email);
const user = await this.findUserByEmail(data.email);
if (user) {
throw new EmailAlreadyUsed();
}
if (data.password) {
const config = await this.config.runtime.fetchAll({
@@ -72,12 +77,6 @@ export class UserService {
}
async createUser_without_verification(data: CreateUserInput) {
const user = await this.findUserByEmail(data.email);
if (user) {
throw new EmailAlreadyUsed();
}
if (data.password) {
data.password = await this.crypto.encryptPassword(data.password);
}
@@ -159,7 +158,9 @@ export class UserService {
async fulfillUser(
email: string,
data: Omit<Partial<Prisma.UserCreateInput>, 'id'>
data: Partial<
Pick<Prisma.UserCreateInput, 'emailVerifiedAt' | 'registered'>
>
) {
const user = await this.findUserByEmail(email);
if (!user) {
@@ -179,6 +180,7 @@ export class UserService {
if (Object.keys(data).length) {
return await this.prisma.user.update({
select: this.defaultUserSelect,
where: { id: user.id },
data,
});
@@ -192,7 +194,9 @@ export class UserService {
async updateUser(
id: string,
data: Omit<Partial<Prisma.UserCreateInput>, 'id'>,
data: Omit<Prisma.UserUpdateInput, 'password'> & {
password?: string | null;
},
select: Prisma.UserSelect = this.defaultUserSelect
) {
if (data.password) {
@@ -207,23 +211,6 @@ export class UserService {
data.password = await this.crypto.encryptPassword(data.password);
}
if (data.email) {
validators.assertValidEmail(data.email);
const emailTaken = await this.prisma.user.count({
where: {
email: data.email,
id: {
not: id,
},
},
});
if (emailTaken) {
throw new EmailAlreadyUsed();
}
}
const user = await this.prisma.user.update({ where: { id }, data, select });
this.emitter.emit('user.updated', user);

View File

@@ -8,7 +8,7 @@ import {
import type { User } from '@prisma/client';
import type { Payload } from '../../fundamentals/event/def';
import { type CurrentUser } from '../auth/session';
import { CurrentUser } from '../auth/current-user';
@ObjectType()
export class UserType implements CurrentUser {
@@ -83,15 +83,6 @@ export class UpdateUserInput implements Partial<User> {
name?: string;
}
@InputType()
export class ManageUserInput {
@Field({ description: 'User email', nullable: true })
email?: string;
@Field({ description: 'User name', nullable: true })
name?: string;
}
declare module '../../fundamentals/event/def' {
interface UserEvents {
admin: {

View File

@@ -21,7 +21,7 @@ export class DocID {
static parse(raw: string): DocID | null {
try {
return new DocID(raw);
} catch {
} catch (e) {
return null;
}
}

View File

@@ -12,10 +12,11 @@ import {
InvalidHistoryTimestamp,
} from '../../fundamentals';
import { CurrentUser, Public } from '../auth';
import { PgWorkspaceDocStorageAdapter } from '../doc';
import { Permission, PermissionService, PublicPageMode } from '../permission';
import { DocHistoryManager, DocManager } from '../doc';
import { WorkspaceBlobStorage } from '../storage';
import { DocID } from '../utils/doc';
import { PermissionService, PublicPageMode } from './permission';
import { Permission } from './types';
@Controller('/api/workspaces')
export class WorkspacesController {
@@ -23,7 +24,8 @@ export class WorkspacesController {
constructor(
private readonly storage: WorkspaceBlobStorage,
private readonly permission: PermissionService,
private readonly workspace: PgWorkspaceDocStorageAdapter,
private readonly docManager: DocManager,
private readonly historyManager: DocHistoryManager,
private readonly prisma: PrismaClient
) {}
@@ -55,7 +57,7 @@ export class WorkspacesController {
if (!body) {
throw new BlobNotFound({
spaceId: workspaceId,
workspaceId,
blobId: name,
});
}
@@ -95,14 +97,14 @@ export class WorkspacesController {
throw new AccessDenied();
}
const binResponse = await this.workspace.getDoc(
const binResponse = await this.docManager.getBinary(
docId.workspace,
docId.guid
);
if (!binResponse) {
throw new DocNotFound({
spaceId: docId.workspace,
workspaceId: docId.workspace,
docId: docId.guid,
});
}
@@ -124,7 +126,7 @@ export class WorkspacesController {
}
res.setHeader('content-type', 'application/octet-stream');
res.send(binResponse.bin);
res.send(binResponse.binary);
}
@Get('/:id/docs/:guid/histories/:timestamp')
@@ -140,7 +142,7 @@ export class WorkspacesController {
let ts;
try {
ts = new Date(timestamp);
} catch {
} catch (e) {
throw new InvalidHistoryTimestamp({ timestamp });
}
@@ -151,19 +153,19 @@ export class WorkspacesController {
Permission.Write
);
const history = await this.workspace.getDocHistory(
const history = await this.historyManager.get(
docId.workspace,
docId.guid,
ts.getTime()
ts
);
if (history) {
res.setHeader('content-type', 'application/octet-stream');
res.setHeader('cache-control', 'private, max-age=2592000, immutable');
res.send(history.bin);
res.send(history.blob);
} else {
throw new DocHistoryNotFound({
spaceId: docId.workspace,
workspaceId: docId.workspace,
docId: guid,
timestamp: ts.getTime(),
});

View File

@@ -1,13 +1,13 @@
import { Module } from '@nestjs/common';
import { DocStorageModule } from '../doc';
import { DocModule } from '../doc';
import { FeatureModule } from '../features';
import { PermissionModule } from '../permission';
import { QuotaModule } from '../quota';
import { StorageModule } from '../storage';
import { UserModule } from '../user';
import { WorkspacesController } from './controller';
import { WorkspaceManagementResolver } from './management';
import { PermissionService } from './permission';
import {
DocHistoryResolver,
PagePermissionResolver,
@@ -16,22 +16,17 @@ import {
} from './resolvers';
@Module({
imports: [
DocStorageModule,
FeatureModule,
QuotaModule,
StorageModule,
UserModule,
PermissionModule,
],
imports: [DocModule, FeatureModule, QuotaModule, StorageModule, UserModule],
controllers: [WorkspacesController],
providers: [
WorkspaceResolver,
WorkspaceManagementResolver,
PermissionService,
PagePermissionResolver,
DocHistoryResolver,
WorkspaceBlobResolver,
],
exports: [PermissionService],
})
export class WorkspaceModule {}

View File

@@ -12,7 +12,7 @@ import { ActionForbidden } from '../../fundamentals';
import { CurrentUser } from '../auth';
import { Admin } from '../common';
import { FeatureManagementService, FeatureType } from '../features';
import { PermissionService } from '../permission';
import { PermissionService } from './permission';
import { WorkspaceType } from './types';
@Resolver(() => WorkspaceType)
@@ -61,7 +61,7 @@ export class WorkspaceManagementResolver {
const owner = await this.permission.getWorkspaceOwner(workspaceId);
const availableFeatures = await this.availableFeatures(user);
if (owner.id !== user.id || !availableFeatures.includes(feature)) {
if (owner.user.id !== user.id || !availableFeatures.includes(feature)) {
throw new ActionForbidden();
}

View File

@@ -2,12 +2,13 @@ import { Injectable } from '@nestjs/common';
import type { Prisma } from '@prisma/client';
import { PrismaClient } from '@prisma/client';
import {
DocAccessDenied,
SpaceAccessDenied,
SpaceOwnerNotFound,
} from '../../fundamentals';
import { Permission, PublicPageMode } from './types';
import { DocAccessDenied, WorkspaceAccessDenied } from '../../fundamentals';
import { Permission } from './types';
export enum PublicPageMode {
Page,
Edgeless,
}
@Injectable()
export class PermissionService {
@@ -58,7 +59,7 @@ export class PermissionService {
}
async getWorkspaceOwner(workspaceId: string) {
const owner = await this.prisma.workspaceUserPermission.findFirst({
return this.prisma.workspaceUserPermission.findFirstOrThrow({
where: {
workspaceId,
type: Permission.Owner,
@@ -67,20 +68,6 @@ export class PermissionService {
user: true,
},
});
if (!owner) {
throw new SpaceOwnerNotFound({ spaceId: workspaceId });
}
return owner.user;
}
async getWorkspaceMemberCount(workspaceId: string) {
return this.prisma.workspaceUserPermission.count({
where: {
workspaceId,
},
});
}
async tryGetWorkspaceOwner(workspaceId: string) {
@@ -165,7 +152,7 @@ export class PermissionService {
permission: Permission = Permission.Read
) {
if (!(await this.tryCheckWorkspace(ws, user, permission))) {
throw new SpaceAccessDenied({ spaceId: ws });
throw new WorkspaceAccessDenied({ workspaceId: ws });
}
}
@@ -208,17 +195,6 @@ export class PermissionService {
return false;
}
async allowUrlPreview(ws: string) {
const count = await this.prisma.workspace.count({
where: {
id: ws,
public: true,
},
});
return count > 0;
}
async grant(
ws: string,
user: string,
@@ -348,7 +324,7 @@ export class PermissionService {
permission = Permission.Read
) {
if (!(await this.tryCheckPage(ws, page, user, permission))) {
throw new DocAccessDenied({ spaceId: ws, docId: page });
throw new DocAccessDenied({ workspaceId: ws, docId: page });
}
}

View File

@@ -19,10 +19,10 @@ import {
PreventCache,
} from '../../../fundamentals';
import { CurrentUser } from '../../auth';
import { Permission, PermissionService } from '../../permission';
import { QuotaManagementService } from '../../quota';
import { WorkspaceBlobStorage } from '../../storage';
import { WorkspaceBlobSizes, WorkspaceType } from '../types';
import { PermissionService } from '../permission';
import { Permission, WorkspaceBlobSizes, WorkspaceType } from '../types';
@UseGuards(CloudThrottlerGuard)
@Resolver(() => WorkspaceType)

View File

@@ -12,11 +12,10 @@ import {
import type { SnapshotHistory } from '@prisma/client';
import { CurrentUser } from '../../auth';
import { PgWorkspaceDocStorageAdapter } from '../../doc';
import { Permission, PermissionService } from '../../permission';
import { DocHistoryManager } from '../../doc';
import { DocID } from '../../utils/doc';
import { WorkspaceType } from '../types';
import { EditorType } from './workspace';
import { PermissionService } from '../permission';
import { Permission, WorkspaceType } from '../types';
@ObjectType()
class DocHistoryType implements Partial<SnapshotHistory> {
@@ -28,15 +27,12 @@ class DocHistoryType implements Partial<SnapshotHistory> {
@Field(() => GraphQLISODateTime)
timestamp!: Date;
@Field(() => EditorType, { nullable: true })
editor!: EditorType | null;
}
@Resolver(() => WorkspaceType)
export class DocHistoryResolver {
constructor(
private readonly workspace: PgWorkspaceDocStorageAdapter,
private readonly historyManager: DocHistoryManager,
private readonly permission: PermissionService
) {}
@@ -51,20 +47,17 @@ export class DocHistoryResolver {
): Promise<DocHistoryType[]> {
const docId = new DocID(guid, workspace.id);
const histories = await this.workspace.listDocHistories(
workspace.id,
docId.guid,
{ before: timestamp.getTime(), limit: take }
);
return histories.map(history => {
return {
workspaceId: workspace.id,
id: docId.guid,
timestamp: new Date(history.timestamp),
editor: history.editor,
};
});
return this.historyManager
.list(workspace.id, docId.guid, timestamp, take)
.then(rows =>
rows.map(({ timestamp }) => {
return {
workspaceId: workspace.id,
id: docId.guid,
timestamp,
};
})
);
}
@Mutation(() => Date)
@@ -83,13 +76,6 @@ export class DocHistoryResolver {
Permission.Write
);
await this.workspace.rollbackDoc(
docId.workspace,
docId.guid,
timestamp.getTime(),
user.id
);
return timestamp;
return this.historyManager.recover(docId.workspace, docId.guid, timestamp);
}
}

View File

@@ -17,13 +17,9 @@ import {
PageIsNotPublic,
} from '../../../fundamentals';
import { CurrentUser } from '../../auth';
import {
Permission,
PermissionService,
PublicPageMode,
} from '../../permission';
import { DocID } from '../../utils/doc';
import { WorkspaceType } from '../types';
import { PermissionService, PublicPageMode } from '../permission';
import { Permission, WorkspaceType } from '../types';
registerEnumType(PublicPageMode, {
name: 'PublicPageMode',

Some files were not shown because too many files have changed in this diff Show More