Compare commits

..

1 Commits

Author SHA1 Message Date
李华桥
48cf392a45 chore: v0.10.4-canary.1 2023-12-08 13:32:23 +08:00
567 changed files with 13019 additions and 19643 deletions

View File

@@ -32,9 +32,7 @@ runs:
- name: Set CC
if: ${{ contains(inputs.target, 'linux') && inputs.package != '@affine/native' }}
shell: bash
run: |
echo "CC=clang" >> "$GITHUB_ENV"
echo "TARGET_CC=clang" >> "$GITHUB_ENV"
run: echo "CC=clang" >> "$GITHUB_ENV"
- name: Cache cargo
uses: actions/cache@v3
@@ -52,4 +50,3 @@ runs:
yarn workspace ${{ inputs.package }} nx build ${{ inputs.package }} --target ${{ inputs.target }} --use-napi-cross
env:
NX_CLOUD_ACCESS_TOKEN: ${{ inputs.nx_token }}
DEBUG: 'napi:*'

View File

@@ -34,7 +34,7 @@ runs:
project_id: '${{ inputs.gcp-project-id }}'
- name: 'Setup gcloud cli'
uses: 'google-github-actions/setup-gcloud@v2'
uses: 'google-github-actions/setup-gcloud@v1'
with:
install_components: 'gke-gcloud-auth-plugin'

View File

@@ -1,7 +1,6 @@
import { execSync } from 'node:child_process';
const {
APP_VERSION,
BUILD_TYPE,
DEPLOY_HOST,
CANARY_DEPLOY_HOST,
@@ -28,7 +27,6 @@ const {
REDIS_PASSWORD,
STRIPE_API_KEY,
STRIPE_WEBHOOK_KEY,
STATIC_IP_NAME,
} = process.env;
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
@@ -36,13 +34,17 @@ const buildType = BUILD_TYPE || 'canary';
const isProduction = buildType === 'stable';
const isBeta = buildType === 'beta';
const isInternal = buildType === 'internal';
const createHelmCommand = ({ isDryRun }) => {
const flag = isDryRun ? '--dry-run' : '--atomic';
const imageTag = `${buildType}-${GIT_SHORT_HASH}`;
const staticIpName = isProduction
? 'affine-cluster-production'
: isBeta
? 'affine-cluster-beta'
: 'affine-cluster-dev';
const redisAndPostgres =
isProduction || isBeta || isInternal
isProduction || isBeta
? [
`--set-string global.database.url=${DATABASE_URL}`,
`--set-string global.database.user=${DATABASE_USERNAME}`,
@@ -56,34 +58,27 @@ const createHelmCommand = ({ isDryRun }) => {
]
: [];
const serviceAnnotations =
isProduction || isBeta || isInternal
isProduction || isBeta
? [
`--set-json web.service.annotations=\"{ \\"cloud.google.com/neg\\": \\"{\\\\\\"ingress\\\\\\": true}\\" }\"`,
`--set-json graphql.serviceAccount.annotations=\"{ \\"iam.gke.io/gcp-service-account\\": \\"${CLOUD_SQL_IAM_ACCOUNT}\\" }\"`,
`--set-json graphql.service.annotations=\"{ \\"cloud.google.com/neg\\": \\"{\\\\\\"ingress\\\\\\": true}\\" }\"`,
`--set-json sync.serviceAccount.annotations=\"{ \\"iam.gke.io/gcp-service-account\\": \\"${CLOUD_SQL_IAM_ACCOUNT}\\" }\"`,
`--set-json sync.service.annotations=\"{ \\"cloud.google.com/neg\\": \\"{\\\\\\"ingress\\\\\\": true}\\" }\"`,
`--set-json cloud-sql-proxy.serviceAccount.annotations=\"{ \\"iam.gke.io/gcp-service-account\\": \\"${CLOUD_SQL_IAM_ACCOUNT}\\" }\"`,
`--set-json cloud-sql-proxy.nodeSelector=\"{ \\"iam.gke.io/gke-metadata-server-enabled\\": \\"true\\" }\"`,
]
: [];
const webReplicaCount = isProduction ? 3 : isBeta ? 2 : 2;
const graphqlReplicaCount = isProduction ? 10 : isBeta ? 5 : 2;
const syncReplicaCount = isProduction ? 10 : isBeta ? 5 : 2;
const namespace = isProduction
? 'production'
: isBeta
? 'beta'
: isInternal
? 'internal'
: 'dev';
const namespace = isProduction ? 'production' : isBeta ? 'beta' : 'dev';
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const host = DEPLOY_HOST || CANARY_DEPLOY_HOST;
const deployCommand = [
`helm upgrade --install affine .github/helm/affine`,
`--namespace ${namespace}`,
`--set global.ingress.enabled=true`,
`--set-json global.ingress.annotations=\"{ \\"kubernetes.io/ingress.class\\": \\"gce\\", \\"kubernetes.io/ingress.allow-http\\": \\"true\\", \\"kubernetes.io/ingress.global-static-ip-name\\": \\"${STATIC_IP_NAME}\\" }\"`,
`--set-json global.ingress.annotations=\"{ \\"kubernetes.io/ingress.class\\": \\"gce\\", \\"kubernetes.io/ingress.allow-http\\": \\"true\\", \\"kubernetes.io/ingress.global-static-ip-name\\": \\"${staticIpName}\\" }\"`,
`--set-string global.ingress.host="${host}"`,
`--set-string global.version="${APP_VERSION}"`,
...redisAndPostgres,
`--set web.replicaCount=${webReplicaCount}`,
`--set-string web.image.tag="${imageTag}"`,
@@ -110,7 +105,7 @@ const createHelmCommand = ({ isDryRun }) => {
`--set sync.replicaCount=${syncReplicaCount}`,
`--set-string sync.image.tag="${imageTag}"`,
...serviceAnnotations,
`--timeout 10m`,
`--version "0.0.0-${buildType}.${GIT_SHORT_HASH}" --timeout 10m`,
flag,
].join(' ');
return deployCommand;

View File

@@ -3,4 +3,4 @@ name: affine
description: AFFiNE cloud chart
type: application
version: 0.0.0
appVersion: "0.11.0-beta.1"
appVersion: '0.7.0-canary.18'

View File

@@ -1,23 +0,0 @@
# Patterns to ignore when building packages.
# This supports shell glob matching, relative path matching, and
# negation (prefixed with !). Only one pattern per line.
.DS_Store
# Common VCS dirs
.git/
.gitignore
.bzr/
.bzrignore
.hg/
.hgignore
.svn/
# Common backup files
*.swp
*.bak
*.tmp
*.orig
*~
# Various IDEs
.project
.idea/
*.tmproj
.vscode/

View File

@@ -1,6 +0,0 @@
apiVersion: v2
name: cloud-sql-proxy
description: Google Cloud SQL Proxy
type: application
version: 0.0.0
appVersion: "2.8.1"

View File

@@ -1,18 +0,0 @@
{{- if .Values.global.database.gcloud.enabled -}}
1. Get the application URL by running these commands:
{{- if contains "NodePort" .Values.service.type }}
export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "gcloud-sql-proxy.fullname" . }})
export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}")
echo http://$NODE_IP:$NODE_PORT
{{- else if contains "LoadBalancer" .Values.service.type }}
NOTE: It may take a few minutes for the LoadBalancer IP to be available.
You can watch the status of by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "gcloud-sql-proxy.fullname" . }}'
export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "gcloud-sql-proxy.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}")
echo http://$SERVICE_IP:{{ .Values.service.port }}
{{- else if contains "ClusterIP" .Values.service.type }}
export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "gcloud-sql-proxy.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}")
echo "Visit http://127.0.0.1:8080 to use your application"
kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT
{{- end }}
{{- end }}

View File

@@ -1,62 +0,0 @@
{{/*
Expand the name of the chart.
*/}}
{{- define "gcloud-sql-proxy.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
If release name contains chart name it will be used as a full name.
*/}}
{{- define "gcloud-sql-proxy.fullname" -}}
{{- if .Values.fullnameOverride }}
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- $name := default .Chart.Name .Values.nameOverride }}
{{- if contains $name .Release.Name }}
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
{{- end }}
{{- end }}
{{- end }}
{{/*
Create chart name and version as used by the chart label.
*/}}
{{- define "gcloud-sql-proxy.chart" -}}
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Common labels
*/}}
{{- define "gcloud-sql-proxy.labels" -}}
helm.sh/chart: {{ include "gcloud-sql-proxy.chart" . }}
{{ include "gcloud-sql-proxy.selectorLabels" . }}
{{- if .Chart.AppVersion }}
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
{{- end }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
{{- end }}
{{/*
Selector labels
*/}}
{{- define "gcloud-sql-proxy.selectorLabels" -}}
app.kubernetes.io/name: {{ include "gcloud-sql-proxy.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}
{{/*
Create the name of the service account to use
*/}}
{{- define "gcloud-sql-proxy.serviceAccountName" -}}
{{- if .Values.serviceAccount.create }}
{{- default (include "gcloud-sql-proxy.fullname" .) .Values.serviceAccount.name }}
{{- else }}
{{- default "default" .Values.serviceAccount.name }}
{{- end }}
{{- end }}

View File

@@ -1,132 +0,0 @@
{{- if .Values.global.database.gcloud.enabled -}}
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "gcloud-sql-proxy.fullname" . }}
labels:
{{- include "gcloud-sql-proxy.labels" . | nindent 4 }}
spec:
replicas: {{ .Values.replicaCount }}
selector:
matchLabels:
{{- include "gcloud-sql-proxy.selectorLabels" . | nindent 6 }}
template:
metadata:
{{- with .Values.podAnnotations }}
annotations:
{{- toYaml . | nindent 8 }}
{{- end }}
labels:
{{- include "gcloud-sql-proxy.labels" . | nindent 8 }}
{{- with .Values.podLabels }}
{{- toYaml . | nindent 8 }}
{{- end }}
spec:
{{- with .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml . | nindent 8 }}
{{- end }}
serviceAccountName: {{ include "gcloud-sql-proxy.serviceAccountName" . }}
securityContext:
{{- toYaml .Values.podSecurityContext | nindent 8 }}
containers:
- name: {{ .Chart.Name }}
securityContext:
{{- toYaml .Values.securityContext | nindent 12 }}
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: File
image: "{{ .Values.image.repository }}:{{ .Chart.AppVersion }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
args:
- "--address"
- "0.0.0.0"
- "--structured-logs"
- "--auto-iam-authn"
- "{{ .Values.global.database.gcloud.connectionName }}"
env:
# Enable HTTP healthchecks on port 9801. This enables /liveness,
# /readiness and /startup health check endpoints. Allow connections
# listen for connections on any interface (0.0.0.0) so that the
# k8s management components can reach these endpoints.
- name: CSQL_PROXY_HEALTH_CHECK
value: "true"
- name: CSQL_PROXY_HTTP_PORT
value: "9801"
- name: CSQL_PROXY_HTTP_ADDRESS
value: 0.0.0.0
ports:
- name: cloud-sql-proxy
containerPort: {{ .Values.global.database.gcloud.proxyPort }}
protocol: TCP
- containerPort: 9801
protocol: TCP
# The /startup probe returns OK when the proxy is ready to receive
# connections from the application. In this example, k8s will check
# once a second for 60 seconds.
startupProbe:
failureThreshold: 60
httpGet:
path: /startup
port: 9801
scheme: HTTP
periodSeconds: 1
successThreshold: 1
timeoutSeconds: 10
# The /liveness probe returns OK as soon as the proxy application has
# begun its startup process and continues to return OK until the
# process stops.
livenessProbe:
failureThreshold: 3
httpGet:
path: /liveness
port: 9801
scheme: HTTP
# The probe will be checked every 10 seconds.
periodSeconds: 10
# Number of times the probe is allowed to fail before the transition
# from healthy to failure state.
#
# If periodSeconds = 60, 5 tries will result in five minutes of
# checks. The proxy starts to refresh a certificate five minutes
# before its expiration. If those five minutes lapse without a
# successful refresh, the liveness probe will fail and the pod will be
# restarted.
successThreshold: 1
# The probe will fail if it does not respond in 10 seconds
timeoutSeconds: 10
readinessProbe:
# The /readiness probe returns OK when the proxy can establish
# a new connections to its databases.
httpGet:
path: /readiness
port: 9801
initialDelaySeconds: 10
periodSeconds: 10
timeoutSeconds: 10
# Number of times the probe must report success to transition from failure to healthy state.
# Defaults to 1 for readiness probe.
successThreshold: 1
failureThreshold: 6
resources:
{{- toYaml .Values.resources | nindent 12 }}
{{- with .Values.volumeMounts }}
volumeMounts:
{{- toYaml . | nindent 12 }}
{{- end }}
{{- with .Values.volumes }}
volumes:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.tolerations }}
tolerations:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- end }}

View File

@@ -1,17 +0,0 @@
{{- if .Values.global.database.gcloud.enabled -}}
apiVersion: v1
kind: Service
metadata:
name: {{ include "gcloud-sql-proxy.fullname" . }}
labels:
{{- include "gcloud-sql-proxy.labels" . | nindent 4 }}
spec:
type: {{ .Values.service.type }}
ports:
- port: {{ .Values.global.database.port }}
targetPort: cloud-sql-proxy
protocol: TCP
name: cloud-sql-proxy
selector:
{{- include "gcloud-sql-proxy.selectorLabels" . | nindent 4 }}
{{- end }}

View File

@@ -1,15 +0,0 @@
{{- if .Values.global.database.gcloud.enabled -}}
{{- if .Values.serviceAccount.create -}}
apiVersion: v1
kind: ServiceAccount
metadata:
name: {{ include "gcloud-sql-proxy.serviceAccountName" . }}
labels:
{{- include "gcloud-sql-proxy.labels" . | nindent 4 }}
{{- with .Values.serviceAccount.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
automountServiceAccountToken: {{ .Values.serviceAccount.automount }}
{{- end }}
{{- end }}

View File

@@ -1,17 +0,0 @@
{{- if .Values.global.database.gcloud.enabled -}}
apiVersion: v1
kind: Pod
metadata:
name: "{{ include "gcloud-sql-proxy.fullname" . }}-test-connection"
labels:
{{- include "gcloud-sql-proxy.labels" . | nindent 4 }}
annotations:
"helm.sh/hook": test
spec:
containers:
- name: wget
image: busybox
command: ['wget']
args: ['{{ include "gcloud-sql-proxy.fullname" . }}:{{ .Values.service.port }}']
restartPolicy: Never
{{- end }}

View File

@@ -1,40 +0,0 @@
replicaCount: 3
image:
# the tag is defined as chart appVersion.
repository: gcr.io/cloud-sql-connectors/cloud-sql-proxy
pullPolicy: IfNotPresent
imagePullSecrets: []
nameOverride: ""
fullnameOverride: ""
serviceAccount:
create: true
automount: true
annotations: {}
name: ""
podAnnotations: {}
podLabels: {}
podSecurityContext:
fsGroup: 2000
securityContext:
runAsNonRoot: true
service:
type: ClusterIP
port: 5432
resources:
limits:
memory: "4Gi"
cpu: "2"
volumes: []
volumeMounts: []
nodeSelector: {}
tolerations: []
affinity: {}

View File

@@ -3,9 +3,4 @@ name: graphql
description: AFFiNE GraphQL server
type: application
version: 0.0.0
appVersion: "0.11.0-beta.1"
dependencies:
- name: gcloud-sql-proxy
version: 0.0.0
repository: "file://../gcloud-sql-proxy"
condition: .global.database.gcloud.enabled
appVersion: '0.7.0-canary.18'

View File

@@ -189,6 +189,20 @@ spec:
initialDelaySeconds: {{ .Values.probe.initialDelaySeconds }}
resources:
{{- toYaml .Values.resources | nindent 12 }}
{{ if .Values.global.database.gcloud.enabled }}
- name: cloud-sql-proxy
image: gcr.io/cloud-sql-connectors/cloud-sql-proxy:2.6.0
args:
- "--structured-logs"
- "--auto-iam-authn"
- "{{ .Values.global.database.gcloud.connectionName }}"
securityContext:
runAsNonRoot: true
resources:
requests:
memory: "2Gi"
cpu: "1"
{{ end }}
{{- with .Values.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}

View File

@@ -1,11 +1,6 @@
apiVersion: v2
name: sync
description: AFFiNE Sync Server
description: A Helm chart for Kubernetes
type: application
version: 0.0.0
appVersion: "0.11.0-beta.1"
dependencies:
- name: gcloud-sql-proxy
version: 0.0.0
repository: "file://../gcloud-sql-proxy"
condition: .global.database.gcloud.enabled
appVersion: "0.7.0-canary.18"

View File

@@ -82,6 +82,20 @@ spec:
initialDelaySeconds: {{ .Values.probe.initialDelaySeconds }}
resources:
{{- toYaml .Values.resources | nindent 12 }}
{{ if .Values.global.database.gcloud.enabled }}
- name: cloud-sql-proxy
image: gcr.io/cloud-sql-connectors/cloud-sql-proxy:2.6.0
args:
- "--structured-logs"
- "--auto-iam-authn"
- "{{ .Values.global.database.gcloud.connectionName }}"
securityContext:
runAsNonRoot: true
resources:
requests:
memory: "2Gi"
cpu: "1"
{{ end }}
{{- with .Values.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}

View File

@@ -16,8 +16,6 @@ global:
cloudSqlInternal: ''
connectionName: ''
serviceAccount: ''
cloudProxyReplicas: 3
proxyPort: '5432'
redis:
enabled: true
host: 'redis-master'

View File

@@ -45,7 +45,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -58,7 +58,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
@@ -71,7 +71,7 @@ jobs:
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
uses: github/codeql-action/analyze@v2
lint:
name: Lint
runs-on: ubuntu-latest
@@ -80,7 +80,7 @@ jobs:
- uses: actions/checkout@v4
- name: Run oxlint
# oxlint is fast, so wrong code will fail quickly
run: yarn dlx $(node -e "console.log(require('./package.json').scripts['lint:ox'].replace('oxlint', 'oxlint@' + require('./package.json').devDependencies.oxlint))")
run: yarn dlx $(node -e "console.log(require('./package.json').scripts['lint:ox'])")
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
@@ -374,9 +374,7 @@ jobs:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Run init-db script
run: |
yarn workspace @affine/server data-migration run
yarn workspace @affine/server exec node --loader ts-node/esm/transpile-only ./scripts/init-db.ts
run: yarn workspace @affine/server exec node --loader ts-node/esm/transpile-only ./scripts/init-db.ts
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
@@ -466,9 +464,7 @@ jobs:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Run init-db script
run: |
yarn workspace @affine/server data-migration run
yarn workspace @affine/server exec node --loader ts-node/esm/transpile-only ./scripts/init-db.ts
run: yarn workspace @affine/server exec node --loader ts-node/esm/transpile-only ./scripts/init-db.ts
- name: Download storage.node
uses: actions/download-artifact@v3
with:

View File

@@ -4,7 +4,7 @@ on:
workflow_dispatch:
inputs:
flavor:
description: 'Build type (canary, beta, or stable or internal)'
description: 'Build type (canary, beta, or stable)'
type: string
default: canary
@@ -69,7 +69,7 @@ jobs:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Rust
- name: Setup Rust
uses: ./.github/actions/build-rust
with:
target: 'x86_64-unknown-linux-gnu'
@@ -90,7 +90,7 @@ jobs:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Rust
- name: Setup Rust
uses: ./.github/actions/build-rust
with:
target: 'aarch64-unknown-linux-gnu'
@@ -207,12 +207,6 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: setup deploy version
id: version
run: |
export APP_VERSION=`node -e "console.log(require('./package.json').version)"`
echo $APP_VERSION
echo "APP_VERSION=$APP_VERSION" >> "$GITHUB_OUTPUT"
- name: Deploy to ${{ github.event.inputs.flavor }}
uses: ./.github/actions/deploy
with:
@@ -223,7 +217,6 @@ jobs:
cluster-name: ${{ secrets.GCP_CLUSTER_NAME }}
cluster-location: ${{ secrets.GCP_CLUSTER_LOCATION }}
env:
APP_VERSION: ${{ steps.version.outputs.APP_VERSION }}
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
CANARY_DEPLOY_HOST: ${{ secrets.CANARY_DEPLOY_HOST }}
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
@@ -249,4 +242,3 @@ jobs:
CLOUD_SQL_IAM_ACCOUNT: ${{ secrets.CLOUD_SQL_IAM_ACCOUNT }}
STRIPE_API_KEY: ${{ secrets.STRIPE_API_KEY }}
STRIPE_WEBHOOK_KEY: ${{ secrets.STRIPE_WEBHOOK_KEY }}
STATIC_IP_NAME: ${{ secrets.STATIC_IP_NAME }}

View File

@@ -231,11 +231,6 @@ jobs:
node ./packages/frontend/electron/scripts/generate-yml.js
env:
RELEASE_VERSION: ${{ needs.set-build-version.outputs.version }}
- name: Generate SHA512 checksums
run: |
sha512sum *-linux-* > SHA512SUMS.txt
sha512sum *-macos-* >> SHA512SUMS.txt
sha512sum *-windows-* >> SHA512SUMS.txt
- name: Create Release Draft
uses: softprops/action-gh-release@v1
env:
@@ -246,7 +241,6 @@ jobs:
tag_name: ${{ needs.set-build-version.outputs.version }}
prerelease: true
files: |
./SHA512SUMS.txt
./VERSION
./*.zip
./*.dmg

View File

@@ -11,7 +11,7 @@ jobs:
deploy:
runs-on: ubuntu-latest
name: Deploy
environment: stable
environment: production
steps:
- uses: actions/checkout@v4
- name: Publish

View File

@@ -15,7 +15,6 @@ packages/backend/server/src/schema.gql
packages/frontend/i18n/src/i18n-generated.ts
packages/frontend/graphql/src/graphql/index.ts
tests/affine-legacy/**/static
.yarnrc.yml
# auto-generated by NAPI-RS
# fixme(@joooye34): need script to check and generate ignore list here

File diff suppressed because one or more lines are too long

View File

@@ -6,10 +6,10 @@ nmMode: hardlinks-local
nodeLinker: node-modules
npmAuthToken: "${NPM_TOKEN:-NONE}"
npmAuthToken: '${NPM_TOKEN:-NONE}'
npmPublishAccess: public
npmPublishRegistry: "https://registry.npmjs.org"
npmPublishRegistry: 'https://registry.npmjs.org'
yarnPath: .yarn/releases/yarn-4.0.2.cjs
yarnPath: .yarn/releases/yarn-4.0.1.cjs

8
Cargo.lock generated
View File

@@ -3318,18 +3318,18 @@ dependencies = [
[[package]]
name = "zerocopy"
version = "0.7.31"
version = "0.7.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c4061bedbb353041c12f413700357bec76df2c7e2ca8e4df8bac24c6bf68e3d"
checksum = "e97e415490559a91254a2979b4829267a57d2fcd741a98eee8b722fb57289aa0"
dependencies = [
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.7.31"
version = "0.7.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3c129550b3e6de3fd0ba67ba5c81818f9805e58b8d7fee80a3a59d2c9fc601a"
checksum = "dd7e48ccf166952882ca8bd778a43502c64f33bf94c12ebe2a7f08e5a0f6689f"
dependencies = [
"proc-macro2",
"quote",

View File

@@ -227,7 +227,7 @@ See [LICENSE] for details.
[jobs available]: ./docs/jobs.md
[latest packages]: https://github.com/toeverything/AFFiNE/pkgs/container/affine-self-hosted
[contributor license agreement]: https://github.com/toeverything/affine/edit/canary/.github/CLA.md
[rust-version-icon]: https://img.shields.io/badge/Rust-1.74.1-dea584
[rust-version-icon]: https://img.shields.io/badge/Rust-1.74.0-dea584
[stars-icon]: https://img.shields.io/github/stars/toeverything/AFFiNE.svg?style=flat&logo=github&colorB=red&label=stars
[codecov]: https://codecov.io/gh/toeverything/affine/branch/canary/graphs/badge.svg?branch=canary
[node-version-icon]: https://img.shields.io/badge/node-%3E=18.16.1-success

View File

@@ -1,6 +1,6 @@
{
"name": "@affine/monorepo",
"version": "0.11.0-beta.1",
"version": "0.10.4-canary.1",
"private": true,
"author": "toeverything",
"license": "MIT",
@@ -33,7 +33,7 @@
"lint:eslint:fix": "yarn lint:eslint --fix",
"lint:prettier": "prettier --ignore-unknown --cache --check .",
"lint:prettier:fix": "prettier --ignore-unknown --cache --write .",
"lint:ox": "oxlint --deny-warnings -D correctness -D nursery -D prefer-array-some -D no-useless-promise-resolve-reject -A no-undef -A consistent-type-exports -A default -A named -A ban-ts-comment",
"lint:ox": "oxlint --deny-warnings --import-plugin -D correctness -D nursery -D prefer-array-some -D no-useless-promise-resolve-reject -A no-undef -A consistent-type-exports -A default -A named -A ban-ts-comment",
"lint": "yarn lint:eslint && yarn lint:prettier",
"lint:fix": "yarn lint:eslint:fix && yarn lint:prettier:fix",
"test": "vitest --run",
@@ -78,8 +78,8 @@
"@vanilla-extract/vite-plugin": "^3.9.2",
"@vanilla-extract/webpack-plugin": "^2.3.1",
"@vitejs/plugin-react-swc": "^3.5.0",
"@vitest/coverage-istanbul": "1.0.4",
"@vitest/ui": "1.0.4",
"@vitest/coverage-istanbul": "0.34.6",
"@vitest/ui": "0.34.6",
"electron": "^27.1.0",
"eslint": "^8.54.0",
"eslint-config-prettier": "^9.0.0",
@@ -100,24 +100,24 @@
"nx": "^17.1.3",
"nx-cloud": "^16.5.2",
"nyc": "^15.1.0",
"oxlint": "0.0.21",
"oxlint": "^0.0.18",
"prettier": "^3.1.0",
"semver": "^7.5.4",
"serve": "^14.2.1",
"string-width": "^7.0.0",
"ts-node": "^10.9.1",
"typescript": "^5.3.2",
"vite": "^5.0.6",
"vite": "^5.0.1",
"vite-plugin-istanbul": "^5.0.0",
"vite-plugin-static-copy": "^1.0.0",
"vite-plugin-static-copy": "^0.17.1",
"vite-tsconfig-paths": "^4.2.1",
"vitest": "1.0.4",
"vitest": "0.34.6",
"vitest-fetch-mock": "^0.2.2",
"vitest-mock-extended": "^1.3.1"
},
"packageManager": "yarn@4.0.2",
"packageManager": "yarn@4.0.1",
"resolutions": {
"vite": "^5.0.6",
"vite": "^4.4.11",
"array-buffer-byte-length": "npm:@nolyfill/array-buffer-byte-length@latest",
"array-includes": "npm:@nolyfill/array-includes@latest",
"array.prototype.flat": "npm:@nolyfill/array.prototype.flat@latest",
@@ -174,7 +174,7 @@
"which-boxed-primitive": "npm:@nolyfill/which-boxed-primitive@latest",
"which-typed-array": "npm:@nolyfill/which-typed-array@latest",
"next-auth@^4.24.5": "patch:next-auth@npm%3A4.24.5#~/.yarn/patches/next-auth-npm-4.24.5-8428e11927.patch",
"@reforged/maker-appimage/@electron-forge/maker-base": "7.2.0",
"@reforged/maker-appimage/@electron-forge/maker-base": "7.1.0",
"macos-alias": "npm:macos-alias-building@latest",
"fs-xattr": "npm:@napi-rs/xattr@latest"
}

View File

@@ -1,45 +0,0 @@
/*
Warnings:
- You are about to drop the `user_feature_gates` table. If the table is not empty, all the data it contains will be lost.
*/
-- DropForeignKey
ALTER TABLE "user_feature_gates" DROP CONSTRAINT "user_feature_gates_user_id_fkey";
-- DropTable
DROP TABLE "user_feature_gates";
-- CreateTable
CREATE TABLE "user_features" (
"id" SERIAL NOT NULL,
"user_id" VARCHAR(36) NOT NULL,
"feature_id" INTEGER NOT NULL,
"reason" VARCHAR NOT NULL,
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"expired_at" TIMESTAMPTZ(6),
"activated" BOOLEAN NOT NULL DEFAULT false,
CONSTRAINT "user_features_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "features" (
"id" SERIAL NOT NULL,
"feature" VARCHAR NOT NULL,
"version" INTEGER NOT NULL DEFAULT 0,
"type" INTEGER NOT NULL,
"configs" JSON NOT NULL,
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "features_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "features_feature_version_key" ON "features"("feature", "version");
-- AddForeignKey
ALTER TABLE "user_features" ADD CONSTRAINT "user_features_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "user_features" ADD CONSTRAINT "user_features_feature_id_fkey" FOREIGN KEY ("feature_id") REFERENCES "features"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -1,7 +1,7 @@
{
"name": "@affine/server",
"private": true,
"version": "0.11.0-beta.1",
"version": "0.10.4-canary.1",
"description": "Affine Node.js server",
"type": "module",
"bin": {
@@ -41,7 +41,7 @@
"@opentelemetry/core": "^1.18.1",
"@opentelemetry/exporter-prometheus": "^0.45.1",
"@opentelemetry/exporter-zipkin": "^1.18.1",
"@opentelemetry/host-metrics": "^0.34.0",
"@opentelemetry/host-metrics": "^0.33.2",
"@opentelemetry/instrumentation": "^0.45.1",
"@opentelemetry/instrumentation-graphql": "^0.36.0",
"@opentelemetry/instrumentation-http": "^0.45.1",
@@ -82,8 +82,7 @@
"socket.io": "^4.7.2",
"stripe": "^14.5.0",
"ws": "^8.14.2",
"yjs": "^13.6.10",
"zod": "^3.22.4"
"yjs": "^13.6.10"
},
"devDependencies": {
"@affine-test/kit": "workspace:*",
@@ -136,8 +135,7 @@
"ENABLE_LOCAL_EMAIL": "true",
"OAUTH_EMAIL_LOGIN": "noreply@toeverything.info",
"OAUTH_EMAIL_PASSWORD": "affine",
"OAUTH_EMAIL_SENDER": "noreply@toeverything.info",
"FEATURES_EARLY_ACCESS_PREVIEW": "false"
"OAUTH_EMAIL_SENDER": "noreply@toeverything.info"
}
},
"nodemonConfig": {

View File

@@ -22,7 +22,7 @@ model User {
accounts Account[]
sessions Session[]
features UserFeatures[]
features UserFeatureGates[]
customer UserStripeCustomer?
subscription UserSubscription?
invoices UserInvoice[]
@@ -113,48 +113,15 @@ model WorkspacePageUserPermission {
@@map("workspace_page_user_permissions")
}
// feature gates is a way to enable/disable features for a user
// for example:
// - early access is a feature that allow some users to access the insider version
// - pro plan is a quota that allow some users access to more resources after they pay
model UserFeatures {
id Int @id @default(autoincrement())
userId String @map("user_id") @db.VarChar(36)
featureId Int @map("feature_id") @db.Integer
// we will record the reason why the feature is enabled/disabled
// for example:
// - pro_plan_v1: "user buy the pro plan"
reason String @db.VarChar
// record the quota enabled time
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// record the quota expired time, pay plan is a subscription, so it will expired
expiredAt DateTime? @map("expired_at") @db.Timestamptz(6)
// whether the feature is activated
// for example:
// - if we switch the user to another plan, we will set the old plan to deactivated, but dont delete it
activated Boolean @default(false)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
feature Features @relation(fields: [featureId], references: [id], onDelete: Cascade)
@@map("user_features")
}
model Features {
id Int @id @default(autoincrement())
model UserFeatureGates {
id String @id @default(uuid()) @db.VarChar
userId String @map("user_id") @db.VarChar
feature String @db.VarChar
version Int @default(0) @db.Integer
// 0: feature, 1: quota
type Int @db.Integer
// configs, define by feature conntroller
configs Json @db.Json
reason String @db.VarChar
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
UserFeatureGates UserFeatures[]
@@unique([feature, version])
@@map("features")
@@map("user_feature_gates")
}
model Account {

View File

@@ -8,20 +8,6 @@ async function main() {
data: {
...userA,
password: await hash(userA.password),
features: {
create: {
reason: 'created by api sign up',
activated: true,
feature: {
connect: {
feature_version: {
feature: 'free_plan_v1',
version: 1,
},
},
},
},
},
},
});
}

View File

@@ -1,8 +1,7 @@
import { Module } from '@nestjs/common';
import { APP_INTERCEPTOR } from '@nestjs/core';
import { AppController } from './app.controller';
import { CacheInterceptor, CacheModule } from './cache';
import { CacheModule } from './cache';
import { ConfigModule } from './config';
import { EventModule } from './event';
import { BusinessModules } from './modules';
@@ -24,12 +23,6 @@ const BasicModules = [
];
@Module({
providers: [
{
provide: APP_INTERCEPTOR,
useClass: CacheInterceptor,
},
],
imports: [...BasicModules, ...BusinessModules],
controllers: [AppController],
})

View File

@@ -22,5 +22,3 @@ const CacheProvider: FactoryProvider = {
})
export class CacheModule {}
export { LocalCache as Cache };
export { CacheInterceptor, MakeCache, PreventCache } from './interceptor';

View File

@@ -1,99 +0,0 @@
import {
CallHandler,
ExecutionContext,
Injectable,
Logger,
NestInterceptor,
SetMetadata,
} from '@nestjs/common';
import { Reflector } from '@nestjs/core';
import { GqlContextType, GqlExecutionContext } from '@nestjs/graphql';
import { mergeMap, Observable, of } from 'rxjs';
import { LocalCache } from './cache';
export const MakeCache = (key: string[], args?: string[]) =>
SetMetadata('cacheKey', [key, args]);
export const PreventCache = (key: string[], args?: string[]) =>
SetMetadata('preventCache', [key, args]);
type CacheConfig = [string[], string[]?];
@Injectable()
export class CacheInterceptor implements NestInterceptor {
private readonly logger = new Logger(CacheInterceptor.name);
constructor(
private readonly reflector: Reflector,
private readonly cache: LocalCache
) {}
async intercept(
ctx: ExecutionContext,
next: CallHandler<any>
): Promise<Observable<any>> {
const key = this.reflector.get<CacheConfig | undefined>(
'cacheKey',
ctx.getHandler()
);
const preventKey = this.reflector.get<CacheConfig | undefined>(
'preventCache',
ctx.getHandler()
);
if (preventKey) {
this.logger.debug(`prevent cache: ${JSON.stringify(preventKey)}`);
const key = await this.getCacheKey(ctx, preventKey);
if (key) {
await this.cache.delete(key);
}
return next.handle();
} else if (!key) {
return next.handle();
}
const cacheKey = await this.getCacheKey(ctx, key);
if (!cacheKey) {
return next.handle();
}
const cachedData = await this.cache.get(cacheKey);
if (cachedData) {
this.logger.debug('cache hit', cacheKey, cachedData);
return of(cachedData);
} else {
return next.handle().pipe(
mergeMap(async result => {
this.logger.debug('cache miss', cacheKey, result);
await this.cache.set(cacheKey, result);
return result;
})
);
}
}
private async getCacheKey(
ctx: ExecutionContext,
config: CacheConfig
): Promise<string | null> {
const [key, params] = config;
if (!params) {
return key.join(':');
} else if (ctx.getType<GqlContextType>() === 'graphql') {
const args = GqlExecutionContext.create(ctx).getArgs();
const cacheKey = params
.map(name => args[name])
.filter(v => v)
.join(':');
if (cacheKey) {
return [...key, cacheKey].join(':');
} else {
return key.join(':');
}
}
return null;
}
}

View File

@@ -16,8 +16,6 @@ export enum ExternalAccount {
firebase = 'firebase',
}
export type ServerFlavor = 'allinone' | 'graphql' | 'sync' | 'selfhosted';
type EnvConfigType = 'string' | 'int' | 'float' | 'boolean';
type ConfigPaths = LeafPaths<
Omit<
@@ -188,6 +186,11 @@ export interface AFFiNEConfig {
fs: {
path: string;
};
/**
* default storage quota
* @default 10 * 1024 * 1024 * 1024 (10GB)
*/
quota: number;
};
/**
@@ -342,11 +345,6 @@ export interface AFFiNEConfig {
doc: {
manager: {
/**
* Whether auto merge updates into doc snapshot.
*/
enableUpdateAutoMerging: boolean;
/**
* How often the [DocManager] will start a new turn of merging pending updates into doc snapshot.
*

View File

@@ -7,12 +7,9 @@ import { join } from 'node:path';
import parse from 'parse-duration';
import pkg from '../../package.json' assert { type: 'json' };
import type { AFFiNEConfig, ServerFlavor } from './def';
import type { AFFiNEConfig } from './def';
import { applyEnvToConfig } from './env';
export const SERVER_FLAVOR = (process.env.SERVER_FLAVOR ??
'allinone') as ServerFlavor;
// Don't use this in production
export const examplePrivateKey = `-----BEGIN EC PRIVATE KEY-----
MHcCAQEEIEtyAJLIULkphVhqXqxk4Nr8Ggty3XLwUJWBxzAWCWTMoAoGCCqGSM49
@@ -58,6 +55,7 @@ export const getDefaultAFFiNEConfig: () => AFFiNEConfig = () => {
AFFINE_SERVER_HOST: 'host',
AFFINE_SERVER_SUB_PATH: 'path',
AFFINE_ENV: 'affineEnv',
AFFINE_FREE_USER_QUOTA: 'objectStorage.quota',
DATABASE_URL: 'db.url',
ENABLE_R2_OBJECT_STORAGE: ['objectStorage.r2.enabled', 'boolean'],
R2_OBJECT_STORAGE_ACCOUNT_ID: 'objectStorage.r2.accountId',
@@ -191,6 +189,8 @@ export const getDefaultAFFiNEConfig: () => AFFiNEConfig = () => {
fs: {
path: join(homedir(), '.affine-storage'),
},
// 10GB
quota: 10 * 1024 * 1024 * 1024,
},
rateLimiter: {
ttl: 60,
@@ -206,7 +206,6 @@ export const getDefaultAFFiNEConfig: () => AFFiNEConfig = () => {
},
doc: {
manager: {
enableUpdateAutoMerging: SERVER_FLAVOR !== 'sync',
updatePollInterval: 3000,
experimentalMergeWithJwstCodec: false,
},

View File

@@ -73,4 +73,3 @@ export class ConfigModule {
}
export type { AFFiNEConfig } from './def';
export { SERVER_FLAVOR } from './default';

View File

@@ -14,7 +14,7 @@ interface Migration {
down: (db: PrismaService) => Promise<void>;
}
export async function collectMigrations(): Promise<Migration[]> {
async function collectMigrations(): Promise<Migration[]> {
const folder = join(fileURLToPath(import.meta.url), '../../migrations');
const migrationFiles = readdirSync(folder)
@@ -64,8 +64,35 @@ export class RunCommand extends CommandRunner {
continue;
}
await this.runMigration(migration);
this.logger.log(`Running ${migration.name}...`);
const record = await this.db.dataMigration.create({
data: {
name: migration.name,
startedAt: new Date(),
},
});
try {
await migration.up(this.db);
} catch (e) {
await this.db.dataMigration.delete({
where: {
id: record.id,
},
});
await migration.down(this.db);
this.logger.error('Failed to run data migration', e);
process.exit(1);
}
await this.db.dataMigration.update({
where: {
id: record.id,
},
data: {
finishedAt: new Date(),
},
});
done.push(migration);
}
@@ -74,56 +101,6 @@ export class RunCommand extends CommandRunner {
this.logger.log(`${migration.name}`);
});
}
async runOne(name: string) {
const migrations = await collectMigrations();
const migration = migrations.find(m => m.name === name);
if (!migration) {
throw new Error(`Unknown migration name: ${name}.`);
}
const exists = await this.db.dataMigration.count({
where: {
name: migration.name,
},
});
if (exists) return;
await this.runMigration(migration);
}
private async runMigration(migration: Migration) {
this.logger.log(`Running ${migration.name}...`);
const record = await this.db.dataMigration.create({
data: {
name: migration.name,
startedAt: new Date(),
},
});
try {
await migration.up(this.db);
} catch (e) {
await this.db.dataMigration.delete({
where: {
id: record.id,
},
});
await migration.down(this.db);
this.logger.error('Failed to run data migration', e);
process.exit(1);
}
await this.db.dataMigration.update({
where: {
id: record.id,
},
data: {
finishedAt: new Date(),
},
});
}
}
@Command({

View File

@@ -1,122 +0,0 @@
import { Prisma } from '@prisma/client';
import {
CommonFeature,
FeatureKind,
Features,
FeatureType,
} from '../../modules/features';
import { Quotas } from '../../modules/quota/schema';
import { PrismaService } from '../../prisma';
export class UserFeaturesInit1698652531198 {
// do the migration
static async up(db: PrismaService) {
// upgrade features from lower version to higher version
for (const feature of Features) {
await upsertFeature(db, feature);
}
await migrateNewFeatureTable(db);
for (const quota of Quotas) {
await upsertFeature(db, quota);
}
}
// revert the migration
static async down(_db: PrismaService) {
// TODO: revert the migration
}
}
// upgrade features from lower version to higher version
async function upsertFeature(
db: PrismaService,
feature: CommonFeature
): Promise<void> {
const hasEqualOrGreaterVersion =
(await db.features.count({
where: {
feature: feature.feature,
version: {
gte: feature.version,
},
},
})) > 0;
// will not update exists version
if (!hasEqualOrGreaterVersion) {
await db.features.create({
data: {
feature: feature.feature,
type: feature.type,
version: feature.version,
configs: feature.configs as Prisma.InputJsonValue,
},
});
}
}
async function migrateNewFeatureTable(prisma: PrismaService) {
const waitingList = await prisma.newFeaturesWaitingList.findMany();
for (const oldUser of waitingList) {
const user = await prisma.user.findFirst({
where: {
email: oldUser.email,
},
});
if (user) {
const hasEarlyAccess = await prisma.userFeatures.count({
where: {
userId: user.id,
feature: {
feature: FeatureType.EarlyAccess,
},
activated: true,
},
});
if (hasEarlyAccess === 0) {
await prisma.$transaction(async tx => {
const latestFlag = await tx.userFeatures.findFirst({
where: {
userId: user.id,
feature: {
feature: FeatureType.EarlyAccess,
type: FeatureKind.Feature,
},
activated: true,
},
orderBy: {
createdAt: 'desc',
},
});
if (latestFlag) {
return latestFlag.id;
} else {
return tx.userFeatures
.create({
data: {
reason: 'Early access user',
activated: true,
user: {
connect: {
id: user.id,
},
},
feature: {
connect: {
feature_version: {
feature: FeatureType.EarlyAccess,
version: 1,
},
type: FeatureKind.Feature,
},
},
},
})
.then(r => r.id);
}
});
}
}
}
}

View File

@@ -1,37 +0,0 @@
import { QuotaType } from '../../modules/quota/types';
import { PrismaService } from '../../prisma';
export class OldUserFeature1702620653283 {
// do the migration
static async up(db: PrismaService) {
await db.$transaction(async tx => {
const latestFreePlan = await tx.features.findFirstOrThrow({
where: { feature: QuotaType.FreePlanV1 },
orderBy: { version: 'desc' },
select: { id: true },
});
// find all users that don't have any features
const userIds = await db.user.findMany({
where: { NOT: { features: { some: { NOT: { id: { gt: 0 } } } } } },
select: { id: true },
});
console.log(`migrating ${userIds.join('|')} users`);
await tx.userFeatures.createMany({
data: userIds.map(({ id: userId }) => ({
userId,
featureId: latestFreePlan.id,
reason: 'old user feature migration',
activated: true,
})),
});
});
}
// revert the migration
// WARN: this will drop all user features
static async down(db: PrismaService) {
await db.userFeatures.deleteMany({});
}
}

View File

@@ -28,10 +28,12 @@ export class EventEmitter {
}
}
export const OnEvent = RawOnEvent as (
export const OnEvent = (
event: Event,
opts?: Parameters<typeof RawOnEvent>[1]
) => MethodDecorator;
) => {
return RawOnEvent(event, opts);
};
@Global()
@Module({

View File

@@ -29,7 +29,6 @@ import { GQLLoggerPlugin } from './graphql/logger-plugin';
context: ({ req, res }: { req: Request; res: Response }) => ({
req,
res,
isAdminQuery: false,
}),
plugins: [new GQLLoggerPlugin()],
};

View File

@@ -72,11 +72,15 @@ export class MailService {
invitationInfo.workspace.name
}</span></p><p style="margin-top:8px;margin-bottom:0;">Click button to join this workspace</p>`;
const subContent =
'Currently, AFFiNE Cloud is in the early access stage. Only Early Access Sponsors can register and log in to AFFiNE Cloud. <a href="https://community.affine.pro/c/insider-general/" style="color: #1e67af" >Please click here for more information.</a>';
const html = emailTemplate({
title: 'You are invited!',
content,
buttonContent: 'Accept & Join',
buttonUrl,
subContent,
});
return this.sendMail({

View File

@@ -11,8 +11,8 @@ import Google from 'next-auth/providers/google';
import { Config } from '../../config';
import { PrismaService } from '../../prisma';
import { SessionService } from '../../session';
import { FeatureType } from '../features';
import { Quota_FreePlanV1 } from '../quota';
import { NewFeaturesKind } from '../users/types';
import { isStaff } from '../users/utils';
import { MailService } from './mailer';
import {
decode,
@@ -44,17 +44,6 @@ export const NextAuthOptionsProvider: FactoryProvider<NextAuthOptions> = {
email: data.email,
avatarUrl: '',
emailVerified: data.emailVerified,
features: {
create: {
reason: 'created by email sign up',
activated: true,
feature: {
connect: {
feature_version: Quota_FreePlanV1,
},
},
},
},
};
if (data.email && !data.name) {
userData.name = data.email.split('@')[0];
@@ -234,23 +223,18 @@ export const NextAuthOptionsProvider: FactoryProvider<NextAuthOptions> = {
}
const email = profile?.email ?? user.email;
if (email) {
// FIXME: cannot inject FeatureManagementService here
// it will cause prisma.account to be undefined
// then prismaAdapter.getUserByAccount will throw error
if (email.endsWith('@toeverything.info')) return true;
return prisma.userFeatures
.count({
if (isStaff(email)) {
return true;
}
return prisma.newFeaturesWaitingList
.findUnique({
where: {
user: {
email,
},
feature: {
feature: FeatureType.EarlyAccess,
},
activated: true,
email,
type: NewFeaturesKind.EarlyAccess,
},
})
.then(count => count > 0);
.then(user => !!user)
.catch(() => false);
}
return false;
},
@@ -258,10 +242,6 @@ export const NextAuthOptionsProvider: FactoryProvider<NextAuthOptions> = {
return url;
},
};
nextAuthOptions.pages = {
newUser: '/auth/onboarding',
};
return nextAuthOptions;
},
inject: [Config, PrismaService, MailService, SessionService],

View File

@@ -19,7 +19,7 @@ import { nanoid } from 'nanoid';
import { Config } from '../../config';
import { SessionService } from '../../session';
import { CloudThrottlerGuard, Throttle } from '../../throttler';
import { UserType } from '../users';
import { UserType } from '../users/resolver';
import { Auth, CurrentUser } from './guard';
import { AuthService } from './service';

View File

@@ -14,7 +14,6 @@ import { nanoid } from 'nanoid';
import { Config } from '../../config';
import { PrismaService } from '../../prisma';
import { verifyChallengeResponse } from '../../storage';
import { Quota_FreePlanV1 } from '../quota';
import { MailService } from './mailer';
export type UserClaim = Pick<
@@ -191,17 +190,6 @@ export class AuthService {
name,
email,
password: hashedPassword,
features: {
create: {
reason: 'created by api sign up',
activated: true,
feature: {
connect: {
feature_version: Quota_FreePlanV1,
},
},
},
},
},
});
}
@@ -221,17 +209,6 @@ export class AuthService {
data: {
name: 'Unnamed',
email,
features: {
create: {
reason: 'created by invite sign up',
activated: true,
feature: {
connect: {
feature_version: Quota_FreePlanV1,
},
},
},
},
},
});
}
@@ -281,7 +258,6 @@ export class AuthService {
},
});
}
async changeEmail(id: string, newEmail: string): Promise<User> {
const user = await this.prisma.user.findUnique({
where: {

View File

@@ -1,7 +1,7 @@
import { Module } from '@nestjs/common';
import { Field, ObjectType, Query } from '@nestjs/graphql';
import { SERVER_FLAVOR } from '../config';
export const { SERVER_FLAVOR } = process.env;
@ObjectType()
export class ServerConfigType {
@@ -19,7 +19,7 @@ export class ServerConfigResolver {
serverConfig(): ServerConfigType {
return {
version: AFFiNE.version,
flavor: SERVER_FLAVOR,
flavor: SERVER_FLAVOR || 'allinone',
};
}
}

View File

@@ -7,7 +7,7 @@ import { Config } from '../../config';
import { type EventPayload, OnEvent } from '../../event';
import { metrics } from '../../metrics';
import { PrismaService } from '../../prisma';
import { QuotaService } from '../quota';
import { SubscriptionStatus } from '../payment/service';
import { Permission } from '../workspaces/types';
import { isEmptyBuffer } from './manager';
@@ -16,8 +16,7 @@ export class DocHistoryManager {
private readonly logger = new Logger(DocHistoryManager.name);
constructor(
private readonly config: Config,
private readonly db: PrismaService,
private readonly quota: QuotaService
private readonly db: PrismaService
) {}
@OnEvent('workspace.deleted')
@@ -223,6 +222,9 @@ export class DocHistoryManager {
return history.timestamp;
}
/**
* @todo(@darkskygit) refactor with [Usage Control] system
*/
async getExpiredDateFromNow(workspaceId: string) {
const permission = await this.db.workspaceUserPermission.findFirst({
select: {
@@ -239,8 +241,25 @@ export class DocHistoryManager {
throw new Error('Workspace owner not found');
}
const quota = await this.quota.getUserQuota(permission.userId);
return quota.feature.historyPeriodFromNow;
const sub = await this.db.userSubscription.findFirst({
select: {
id: true,
},
where: {
userId: permission.userId,
status: SubscriptionStatus.Active,
},
});
return new Date(
Date.now() +
1000 *
60 *
60 *
24 *
// 30 days for subscription user, 7 days for free user
(sub ? 30 : 7)
);
}
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT /* everyday at 12am */)

View File

@@ -1,14 +1,38 @@
import { Module } from '@nestjs/common';
import { DynamicModule } from '@nestjs/common';
import { QuotaModule } from '../quota';
import { DocHistoryManager } from './history';
import { DocManager } from './manager';
@Module({
imports: [QuotaModule],
providers: [DocManager, DocHistoryManager],
exports: [DocManager, DocHistoryManager],
})
export class DocModule {}
export class DocModule {
/**
* @param automation whether enable update merging automation logic
*/
private static defModule(automation = true): DynamicModule {
return {
module: DocModule,
providers: [
{
provide: 'DOC_MANAGER_AUTOMATION',
useValue: automation,
},
DocManager,
DocHistoryManager,
],
exports: [DocManager, DocHistoryManager],
};
}
static forRoot() {
return this.defModule();
}
static forSync(): DynamicModule {
return this.defModule(false);
}
static forFeature(): DynamicModule {
return this.defModule(false);
}
}
export { DocHistoryManager, DocManager };

View File

@@ -1,4 +1,5 @@
import {
Inject,
Injectable,
Logger,
OnModuleDestroy,
@@ -96,6 +97,8 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
private busy = false;
constructor(
@Inject('DOC_MANAGER_AUTOMATION')
private readonly automation: boolean,
private readonly db: PrismaService,
private readonly config: Config,
private readonly cache: Cache,
@@ -103,7 +106,7 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
) {}
onModuleInit() {
if (this.config.doc.manager.enableUpdateAutoMerging) {
if (this.automation) {
this.logger.log('Use Database');
this.setup();
}
@@ -461,9 +464,6 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
workspaceId: string,
guid: string,
doc: Doc,
// we always delay the snapshot update to avoid db overload,
// so the value of `updatedAt` will not be accurate to user's real action time
updatedAt: Date,
initialSeq?: number
) {
return this.lockSnapshotForUpsert(workspaceId, guid, async () => {
@@ -502,7 +502,6 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
data: {
blob,
state,
updatedAt,
},
});
@@ -522,8 +521,6 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
blob,
state,
seq: initialSeq,
createdAt: updatedAt,
updatedAt,
},
});
@@ -568,45 +565,38 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
...updates.map(u => u.blob)
);
const done = await this.upsert(
workspaceId,
id,
doc,
last.createdAt,
last.seq
);
await this.upsert(workspaceId, id, doc, last.seq);
if (snapshot) {
this.event.emit('snapshot.updated', {
id,
workspaceId,
previous: {
blob: snapshot.blob,
state: snapshot.state,
updatedAt: snapshot.updatedAt,
},
});
}
const done = await this.upsert(workspaceId, id, doc, last.seq);
if (done) {
if (snapshot) {
this.event.emit('snapshot.updated', {
id,
workspaceId,
previous: {
blob: snapshot.blob,
state: snapshot.state,
updatedAt: snapshot.updatedAt,
},
});
}
this.logger.debug(
`Squashed ${updates.length} updates for ${id} in workspace ${workspaceId}`
);
}
// always delete updates
// the upsert will return false if the state is not newer, so we don't need to worry about it
const { count } = await this.db.update.deleteMany({
where: {
id,
workspaceId,
seq: {
in: updates.map(u => u.seq),
await this.db.update.deleteMany({
where: {
id,
workspaceId,
seq: {
in: updates.map(u => u.seq),
},
},
},
});
});
await this.updateCachedUpdatesCount(workspaceId, id, -count);
await this.updateCachedUpdatesCount(workspaceId, id, -updates.length);
}
return doc;
}

View File

@@ -1,78 +0,0 @@
import { PrismaService } from '../../prisma';
import { Feature, FeatureSchema, FeatureType } from './types';
class FeatureConfig {
readonly config: Feature;
constructor(data: any) {
const config = FeatureSchema.safeParse(data);
if (config.success) {
this.config = config.data;
} else {
throw new Error(`Invalid quota config: ${config.error.message}`);
}
}
/// feature name of quota
get name() {
return this.config.feature;
}
}
export class EarlyAccessFeatureConfig extends FeatureConfig {
constructor(data: any) {
super(data);
if (this.config.feature !== FeatureType.EarlyAccess) {
throw new Error('Invalid feature config: type is not EarlyAccess');
}
}
checkWhiteList(email: string) {
for (const domain in this.config.configs.whitelist) {
if (email.endsWith(domain)) {
return true;
}
}
return false;
}
}
const FeatureConfigMap = {
[FeatureType.EarlyAccess]: EarlyAccessFeatureConfig,
};
const FeatureCache = new Map<
number,
InstanceType<(typeof FeatureConfigMap)[FeatureType]>
>();
export async function getFeature(prisma: PrismaService, featureId: number) {
const cachedQuota = FeatureCache.get(featureId);
if (cachedQuota) {
return cachedQuota;
}
const feature = await prisma.features.findFirst({
where: {
id: featureId,
},
});
if (!feature) {
// this should unreachable
throw new Error(`Quota config ${featureId} not found`);
}
const ConfigClass = FeatureConfigMap[feature.feature as FeatureType];
if (!ConfigClass) {
throw new Error(`Feature config ${featureId} not found`);
}
const config = new ConfigClass(feature);
// we always edit quota config as a new quota config
// so we can cache it by featureId
FeatureCache.set(featureId, config);
return config;
}

View File

@@ -1,21 +0,0 @@
import { Module } from '@nestjs/common';
import { PrismaService } from '../../prisma';
import { FeatureManagementService } from './management';
import { FeatureService } from './service';
/**
* Feature module provider pre-user feature flag management.
* includes:
* - feature query/update/permit
* - feature statistics
*/
@Module({
providers: [FeatureService, FeatureManagementService],
exports: [FeatureService, FeatureManagementService],
})
export class FeatureModule {}
export { type CommonFeature, commonFeatureSchema } from './types';
export { FeatureKind, Features, FeatureType } from './types';
export { FeatureManagementService, FeatureService, PrismaService };

View File

@@ -1,89 +0,0 @@
import { Injectable, Logger, OnModuleInit } from '@nestjs/common';
import { Config } from '../../config';
import { PrismaService } from '../../prisma';
import { EarlyAccessFeatureConfig } from './feature';
import { FeatureService } from './service';
import { FeatureType } from './types';
enum NewFeaturesKind {
EarlyAccess,
}
@Injectable()
export class FeatureManagementService implements OnModuleInit {
protected logger = new Logger(FeatureManagementService.name);
private earlyAccessFeature?: EarlyAccessFeatureConfig;
constructor(
private readonly feature: FeatureService,
private readonly prisma: PrismaService,
private readonly config: Config
) {}
async onModuleInit() {
this.earlyAccessFeature = await this.feature.getFeature(
FeatureType.EarlyAccess
);
}
// ======== Admin ========
// todo(@darkskygit): replace this with abac
isStaff(email: string) {
return this.earlyAccessFeature?.checkWhiteList(email) ?? false;
}
// ======== Early Access ========
async addEarlyAccess(userId: string) {
return this.feature.addUserFeature(
userId,
FeatureType.EarlyAccess,
1,
'Early access user'
);
}
async removeEarlyAccess(userId: string) {
return this.feature.removeUserFeature(userId, FeatureType.EarlyAccess);
}
async listEarlyAccess() {
return this.feature.listFeatureUsers(FeatureType.EarlyAccess);
}
/// check early access by email
async canEarlyAccess(email: string) {
if (this.config.featureFlags.earlyAccessPreview && !this.isStaff(email)) {
const user = await this.prisma.user.findFirst({
where: {
email,
},
});
if (user) {
const canEarlyAccess = await this.feature
.hasFeature(user.id, FeatureType.EarlyAccess)
.catch(() => false);
if (canEarlyAccess) {
return true;
}
// TODO: Outdated, switch to feature gates
const oldCanEarlyAccess = await this.prisma.newFeaturesWaitingList
.findUnique({
where: { email, type: NewFeaturesKind.EarlyAccess },
})
.then(x => !!x)
.catch(() => false);
if (oldCanEarlyAccess) {
this.logger.warn(
`User ${email} has early access in old table but not in new table`
);
}
return oldCanEarlyAccess;
}
return false;
} else {
return true;
}
}
}

View File

@@ -1,184 +0,0 @@
import { Injectable } from '@nestjs/common';
import { PrismaService } from '../../prisma';
import { UserType } from '../users/types';
import { getFeature } from './feature';
import { FeatureKind, FeatureType } from './types';
@Injectable()
export class FeatureService {
constructor(private readonly prisma: PrismaService) {}
async getFeaturesVersion() {
const features = await this.prisma.features.findMany({
where: {
type: FeatureKind.Feature,
},
select: {
feature: true,
version: true,
},
});
return features.reduce(
(acc, feature) => {
acc[feature.feature] = feature.version;
return acc;
},
{} as Record<string, number>
);
}
async getFeature(feature: FeatureType) {
const data = await this.prisma.features.findFirst({
where: {
feature,
type: FeatureKind.Feature,
},
select: { id: true },
orderBy: {
version: 'desc',
},
});
if (data) {
return getFeature(this.prisma, data.id);
}
return undefined;
}
async addUserFeature(
userId: string,
feature: FeatureType,
version: number,
reason: string,
expiredAt?: Date | string
) {
return this.prisma.$transaction(async tx => {
const latestFlag = await tx.userFeatures.findFirst({
where: {
userId,
feature: {
feature,
type: FeatureKind.Feature,
},
activated: true,
},
orderBy: {
createdAt: 'desc',
},
});
if (latestFlag) {
return latestFlag.id;
} else {
return tx.userFeatures
.create({
data: {
reason,
expiredAt,
activated: true,
user: {
connect: {
id: userId,
},
},
feature: {
connect: {
feature_version: {
feature,
version,
},
type: FeatureKind.Feature,
},
},
},
})
.then(r => r.id);
}
});
}
async removeUserFeature(userId: string, feature: FeatureType) {
return this.prisma.userFeatures
.updateMany({
where: {
userId,
feature: {
feature,
type: FeatureKind.Feature,
},
activated: true,
},
data: {
activated: false,
},
})
.then(r => r.count);
}
async getUserFeatures(userId: string) {
const features = await this.prisma.userFeatures.findMany({
where: {
user: { id: userId },
feature: {
type: FeatureKind.Feature,
},
},
select: {
activated: true,
reason: true,
createdAt: true,
expiredAt: true,
featureId: true,
},
});
const configs = await Promise.all(
features.map(async feature => ({
...feature,
feature: await getFeature(this.prisma, feature.featureId),
}))
);
return configs.filter(feature => !!feature.feature);
}
async listFeatureUsers(feature: FeatureType): Promise<UserType[]> {
return this.prisma.userFeatures
.findMany({
where: {
activated: true,
feature: {
feature: feature,
type: FeatureKind.Feature,
},
},
select: {
user: {
select: {
id: true,
name: true,
avatarUrl: true,
email: true,
emailVerified: true,
createdAt: true,
},
},
},
})
.then(users => users.map(user => user.user));
}
async hasFeature(userId: string, feature: FeatureType) {
return this.prisma.userFeatures
.count({
where: {
userId,
activated: true,
feature: {
feature,
type: FeatureKind.Feature,
},
},
})
.then(count => count > 0);
}
}

View File

@@ -1,65 +0,0 @@
import { URL } from 'node:url';
import { z } from 'zod';
/// ======== common schema ========
export enum FeatureKind {
Feature,
Quota,
}
export const commonFeatureSchema = z.object({
feature: z.string(),
type: z.nativeEnum(FeatureKind),
version: z.number(),
configs: z.unknown(),
});
export type CommonFeature = z.infer<typeof commonFeatureSchema>;
/// ======== feature define ========
export enum FeatureType {
EarlyAccess = 'early_access',
}
function checkHostname(host: string) {
try {
return new URL(`https://${host}`).hostname === host;
} catch (_) {
return false;
}
}
const featureEarlyAccess = z.object({
feature: z.literal(FeatureType.EarlyAccess),
configs: z.object({
whitelist: z
.string()
.startsWith('@')
.refine(domain => checkHostname(domain.slice(1)))
.array(),
}),
});
export const Features: Feature[] = [
{
feature: FeatureType.EarlyAccess,
type: FeatureKind.Feature,
version: 1,
configs: {
whitelist: ['@toeverything.info'],
},
},
];
/// ======== schema infer ========
export const FeatureSchema = commonFeatureSchema
.extend({
type: z.literal(FeatureKind.Feature),
})
.and(z.discriminatedUnion('feature', [featureEarlyAccess]));
export type Feature = z.infer<typeof FeatureSchema>;

View File

@@ -1,12 +1,10 @@
import { DynamicModule, Type } from '@nestjs/common';
import { ScheduleModule } from '@nestjs/schedule';
import { SERVER_FLAVOR } from '../config';
import { GqlModule } from '../graphql.module';
import { ServerConfigModule } from './config';
import { SERVER_FLAVOR, ServerConfigModule } from './config';
import { DocModule } from './doc';
import { PaymentModule } from './payment';
import { QuotaModule } from './quota';
import { SelfHostedModule } from './self-hosted';
import { SyncModule } from './sync';
import { UsersModule } from './users';
@@ -16,7 +14,7 @@ const BusinessModules: (Type | DynamicModule)[] = [];
switch (SERVER_FLAVOR) {
case 'sync':
BusinessModules.push(SyncModule, DocModule);
BusinessModules.push(SyncModule, DocModule.forSync());
break;
case 'selfhosted':
BusinessModules.push(
@@ -27,7 +25,7 @@ switch (SERVER_FLAVOR) {
WorkspaceModule,
UsersModule,
SyncModule,
DocModule
DocModule.forRoot()
);
break;
case 'graphql':
@@ -37,9 +35,8 @@ switch (SERVER_FLAVOR) {
GqlModule,
WorkspaceModule,
UsersModule,
DocModule,
PaymentModule,
QuotaModule
DocModule.forRoot(),
PaymentModule
);
break;
case 'allinone':
@@ -50,9 +47,8 @@ switch (SERVER_FLAVOR) {
GqlModule,
WorkspaceModule,
UsersModule,
QuotaModule,
SyncModule,
DocModule,
DocModule.forRoot(),
PaymentModule
);
break;

View File

@@ -1,7 +1,6 @@
import { Module } from '@nestjs/common';
import { FeatureModule } from '../features';
import { QuotaModule } from '../quota';
import { UsersModule } from '../users';
import { SubscriptionResolver, UserSubscriptionResolver } from './resolver';
import { ScheduleManager } from './schedule';
import { SubscriptionService } from './service';
@@ -9,7 +8,7 @@ import { StripeProvider } from './stripe';
import { StripeWebhook } from './webhook';
@Module({
imports: [FeatureModule, QuotaModule],
imports: [UsersModule],
providers: [
ScheduleManager,
StripeProvider,

View File

@@ -1,7 +1,6 @@
import { HttpStatus } from '@nestjs/common';
import {
Args,
Context,
Field,
Int,
Mutation,
@@ -255,13 +254,8 @@ export class UserSubscriptionResolver {
constructor(private readonly db: PrismaService) {}
@ResolveField(() => UserSubscriptionType, { nullable: true })
async subscription(
@Context() ctx: { isAdminQuery: boolean },
@CurrentUser() me: User,
@Parent() user: User
) {
// allow admin to query other user's subscription
if (!ctx.isAdminQuery && me.id !== user.id) {
async subscription(@CurrentUser() me: User, @Parent() user: User) {
if (me.id !== user.id) {
throw new GraphQLError(
'You are not allowed to access this subscription',
{

View File

@@ -11,8 +11,7 @@ import Stripe from 'stripe';
import { Config } from '../../config';
import { PrismaService } from '../../prisma';
import { FeatureManagementService } from '../features';
import { QuotaService, QuotaType } from '../quota';
import { UsersService } from '../users';
import { ScheduleManager } from './schedule';
const OnEvent = (
@@ -61,11 +60,6 @@ export enum SubscriptionStatus {
Trialing = 'trialing',
}
const SubscriptionActivated: Stripe.Subscription.Status[] = [
SubscriptionStatus.Active,
SubscriptionStatus.Trialing,
];
export enum InvoiceStatus {
Draft = 'draft',
Open = 'open',
@@ -88,9 +82,8 @@ export class SubscriptionService {
config: Config,
private readonly stripe: Stripe,
private readonly db: PrismaService,
private readonly scheduleManager: ScheduleManager,
private readonly features: FeatureManagementService,
private readonly quota: QuotaService
private readonly user: UsersService,
private readonly scheduleManager: ScheduleManager
) {
this.paymentConfig = config.payment;
@@ -478,16 +471,6 @@ export class SubscriptionService {
}
}
private getPlanQuota(plan: SubscriptionPlan) {
if (plan === SubscriptionPlan.Free) {
return QuotaType.FreePlanV1;
} else if (plan === SubscriptionPlan.Pro) {
return QuotaType.ProPlanV1;
} else {
throw new Error(`Unknown plan: ${plan}`);
}
}
private async saveSubscription(
user: User,
subscription: Stripe.Subscription,
@@ -500,28 +483,23 @@ export class SubscriptionService {
subscription = await this.stripe.subscriptions.retrieve(subscription.id);
}
// get next bill date from upcoming invoice
// see https://stripe.com/docs/api/invoices/upcoming
let nextBillAt: Date | null = null;
if (
(subscription.status === SubscriptionStatus.Active ||
subscription.status === SubscriptionStatus.Trialing) &&
!subscription.canceled_at
) {
nextBillAt = new Date(subscription.current_period_end * 1000);
}
const price = subscription.items.data[0].price;
if (!price.lookup_key) {
throw new Error('Unexpected subscription with no key');
}
const [plan, recurring] = decodeLookupKey(price.lookup_key);
const planActivated = SubscriptionActivated.includes(subscription.status);
let nextBillAt: Date | null = null;
if (planActivated) {
// update user's quota if plan activated
await this.quota.switchUserQuota(user.id, this.getPlanQuota(plan));
// get next bill date from upcoming invoice
// see https://stripe.com/docs/api/invoices/upcoming
if (!subscription.canceled_at) {
nextBillAt = new Date(subscription.current_period_end * 1000);
}
} else {
// switch to free plan if subscription is canceled
await this.quota.switchUserQuota(user.id, QuotaType.FreePlanV1);
}
const commonData = {
start: new Date(subscription.current_period_start * 1000),
@@ -680,7 +658,7 @@ export class SubscriptionService {
user: User,
couponType: CouponType
): Promise<string | null> {
const earlyAccess = await this.features.canEarlyAccess(user.email);
const earlyAccess = await this.user.isEarlyAccessUser(user.email);
if (earlyAccess) {
try {
const coupon = await this.stripe.coupons.retrieve(couponType);

View File

@@ -1,5 +0,0 @@
export const OneKB = 1024;
export const OneMB = OneKB * OneKB;
export const OneGB = OneKB * OneMB;
export const OneDay = 1000 * 60 * 60 * 24;
export const ByteUnit = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'];

View File

@@ -1,21 +0,0 @@
import { Module } from '@nestjs/common';
import { PermissionService } from '../workspaces/permission';
import { QuotaService } from './service';
import { QuotaManagementService } from './storage';
/**
* Quota module provider pre-user quota management.
* includes:
* - quota query/update/permit
* - quota statistics
*/
@Module({
providers: [PermissionService, QuotaService, QuotaManagementService],
exports: [QuotaService, QuotaManagementService],
})
export class QuotaModule {}
export { QuotaManagementService, QuotaService };
export { Quota_FreePlanV1, Quota_ProPlanV1, Quotas } from './schema';
export { QuotaType } from './types';

View File

@@ -1,81 +0,0 @@
import { PrismaService } from '../../prisma';
import { formatDate, formatSize, Quota, QuotaSchema } from './types';
const QuotaCache = new Map<number, QuotaConfig>();
export class QuotaConfig {
readonly config: Quota;
static async get(prisma: PrismaService, featureId: number) {
const cachedQuota = QuotaCache.get(featureId);
if (cachedQuota) {
return cachedQuota;
}
const quota = await prisma.features.findFirst({
where: {
id: featureId,
},
});
if (!quota) {
throw new Error(`Quota config ${featureId} not found`);
}
const config = new QuotaConfig(quota);
// we always edit quota config as a new quota config
// so we can cache it by featureId
QuotaCache.set(featureId, config);
return config;
}
private constructor(data: any) {
const config = QuotaSchema.safeParse(data);
if (config.success) {
this.config = config.data;
} else {
throw new Error(
`Invalid quota config: ${config.error.message}, ${JSON.stringify(
data
)})}`
);
}
}
/// feature name of quota
get name() {
return this.config.feature;
}
get blobLimit() {
return this.config.configs.blobLimit;
}
get storageQuota() {
return this.config.configs.storageQuota;
}
get historyPeriod() {
return this.config.configs.historyPeriod;
}
get historyPeriodFromNow() {
return new Date(Date.now() + this.historyPeriod);
}
get memberLimit() {
return this.config.configs.memberLimit;
}
get humanReadable() {
return {
name: this.config.configs.name,
blobLimit: formatSize(this.blobLimit),
storageQuota: formatSize(this.storageQuota),
historyPeriod: formatDate(this.historyPeriod),
memberLimit: this.memberLimit.toString(),
};
}
}

View File

@@ -1,50 +0,0 @@
import { FeatureKind } from '../features';
import { OneDay, OneGB, OneMB } from './constant';
import { Quota, QuotaType } from './types';
export const Quotas: Quota[] = [
{
feature: QuotaType.FreePlanV1,
type: FeatureKind.Quota,
version: 1,
configs: {
// quota name
name: 'Free',
// single blob limit 10MB
blobLimit: 10 * OneMB,
// total blob limit 10GB
storageQuota: 10 * OneGB,
// history period of validity 7 days
historyPeriod: 7 * OneDay,
// member limit 3
memberLimit: 3,
},
},
{
feature: QuotaType.ProPlanV1,
type: FeatureKind.Quota,
version: 1,
configs: {
// quota name
name: 'Pro',
// single blob limit 100MB
blobLimit: 100 * OneMB,
// total blob limit 100GB
storageQuota: 100 * OneGB,
// history period of validity 30 days
historyPeriod: 30 * OneDay,
// member limit 10
memberLimit: 10,
},
},
];
export const Quota_FreePlanV1 = {
feature: Quotas[0].feature,
version: Quotas[0].version,
};
export const Quota_ProPlanV1 = {
feature: Quotas[1].feature,
version: Quotas[1].version,
};

View File

@@ -1,147 +0,0 @@
import { Injectable } from '@nestjs/common';
import { PrismaService } from '../../prisma';
import { FeatureKind } from '../features';
import { QuotaConfig } from './quota';
import { QuotaType } from './types';
@Injectable()
export class QuotaService {
constructor(private readonly prisma: PrismaService) {}
// get activated user quota
async getUserQuota(userId: string) {
const quota = await this.prisma.userFeatures.findFirst({
where: {
user: {
id: userId,
},
feature: {
type: FeatureKind.Quota,
},
activated: true,
},
select: {
reason: true,
createdAt: true,
expiredAt: true,
featureId: true,
},
});
if (!quota) {
// this should unreachable
throw new Error(`User ${userId} has no quota`);
}
const feature = await QuotaConfig.get(this.prisma, quota.featureId);
return { ...quota, feature };
}
// get user all quota records
async getUserQuotas(userId: string) {
const quotas = await this.prisma.userFeatures.findMany({
where: {
user: {
id: userId,
},
feature: {
type: FeatureKind.Quota,
},
},
select: {
activated: true,
reason: true,
createdAt: true,
expiredAt: true,
featureId: true,
},
});
const configs = await Promise.all(
quotas.map(async quota => {
try {
return {
...quota,
feature: await QuotaConfig.get(this.prisma, quota.featureId),
};
} catch (_) {}
return null as unknown as typeof quota & {
feature: QuotaConfig;
};
})
);
return configs.filter(quota => !!quota);
}
// switch user to a new quota
// currently each user can only have one quota
async switchUserQuota(
userId: string,
quota: QuotaType,
reason?: string,
expiredAt?: Date
) {
await this.prisma.$transaction(async tx => {
const latestPlanVersion = await tx.features.aggregate({
where: {
feature: quota,
},
_max: {
version: true,
},
});
// we will deactivate all exists quota for this user
await tx.userFeatures.updateMany({
where: {
id: undefined,
userId,
feature: {
type: FeatureKind.Quota,
},
},
data: {
activated: false,
},
});
await tx.userFeatures.create({
data: {
user: {
connect: {
id: userId,
},
},
feature: {
connect: {
feature_version: {
feature: quota,
version: latestPlanVersion._max.version || 1,
},
type: FeatureKind.Quota,
},
},
reason: reason ?? 'switch quota',
activated: true,
expiredAt,
},
});
});
}
async hasQuota(userId: string, quota: QuotaType) {
return this.prisma.userFeatures
.count({
where: {
userId,
feature: {
feature: quota,
type: FeatureKind.Quota,
},
activated: true,
},
})
.then(count => count > 0);
}
}

View File

@@ -1,54 +0,0 @@
import type { Storage } from '@affine/storage';
import { Inject, Injectable, NotFoundException } from '@nestjs/common';
import { StorageProvide } from '../../storage';
import { PermissionService } from '../workspaces/permission';
import { QuotaService } from './service';
@Injectable()
export class QuotaManagementService {
constructor(
private readonly quota: QuotaService,
private readonly permissions: PermissionService,
@Inject(StorageProvide) private readonly storage: Storage
) {}
async getUserQuota(userId: string) {
const quota = await this.quota.getUserQuota(userId);
return {
name: quota.feature.name,
reason: quota.reason,
createAt: quota.createdAt,
expiredAt: quota.expiredAt,
blobLimit: quota.feature.blobLimit,
storageQuota: quota.feature.storageQuota,
};
}
// TODO: lazy calc, need to be optimized with cache
async getUserUsage(userId: string) {
const workspaces = await this.permissions.getOwnedWorkspaces(userId);
return this.storage.blobsSize(workspaces);
}
// get workspace's owner quota and total size of used
// quota was apply to owner's account
async getWorkspaceUsage(workspaceId: string) {
const { user: owner } =
await this.permissions.getWorkspaceOwner(workspaceId);
if (!owner) throw new NotFoundException('Workspace owner not found');
const { storageQuota } = await this.getUserQuota(owner.id);
// get all workspaces size of owner used
const usageSize = await this.getUserUsage(owner.id);
return { quota: storageQuota, size: usageSize };
}
async checkBlobQuota(workspaceId: string, size: number) {
const { quota, size: usageSize } =
await this.getWorkspaceUsage(workspaceId);
return quota - (size + usageSize);
}
}

View File

@@ -1,50 +0,0 @@
import { z } from 'zod';
import { commonFeatureSchema, FeatureKind } from '../features';
import { ByteUnit, OneDay, OneKB } from './constant';
/// ======== quota define ========
export enum QuotaType {
FreePlanV1 = 'free_plan_v1',
ProPlanV1 = 'pro_plan_v1',
}
const quotaPlan = z.object({
feature: z.enum([QuotaType.FreePlanV1, QuotaType.ProPlanV1]),
configs: z.object({
name: z.string(),
blobLimit: z.number().positive().int(),
storageQuota: z.number().positive().int(),
historyPeriod: z.number().positive().int(),
memberLimit: z.number().positive().int(),
}),
});
/// ======== schema infer ========
export const QuotaSchema = commonFeatureSchema
.extend({
type: z.literal(FeatureKind.Quota),
})
.and(z.discriminatedUnion('feature', [quotaPlan]));
export type Quota = z.infer<typeof QuotaSchema>;
/// ======== utils ========
export function formatSize(bytes: number, decimals: number = 2): string {
if (bytes === 0) return '0 B';
const dm = decimals < 0 ? 0 : decimals;
const i = Math.floor(Math.log(bytes) / Math.log(OneKB));
return (
parseFloat((bytes / Math.pow(OneKB, i)).toFixed(dm)) + ' ' + ByteUnit[i]
);
}
export function formatDate(ms: number): string {
return `${(ms / OneDay).toFixed(0)} days`;
}

View File

@@ -113,69 +113,11 @@ export class EventsGateway implements OnGatewayConnection, OnGatewayDisconnect {
metrics.socketio.gauge('realtime_connections').record(this.connectionCount);
}
@Auth()
@SubscribeMessage('client-handshake-sync')
async handleClientHandshakeSync(
@CurrentUser() user: UserType,
@MessageBody() workspaceId: string,
@ConnectedSocket() client: Socket
): Promise<EventResponse<{ clientId: string }>> {
const canWrite = await this.permissions.tryCheckWorkspace(
workspaceId,
user.id,
Permission.Write
);
if (canWrite) {
await client.join(`${workspaceId}:sync`);
return {
data: {
clientId: client.id,
},
};
} else {
return {
error: new AccessDeniedError(workspaceId),
};
}
}
@Auth()
@SubscribeMessage('client-handshake-awareness')
async handleClientHandshakeAwareness(
@CurrentUser() user: UserType,
@MessageBody() workspaceId: string,
@ConnectedSocket() client: Socket
): Promise<EventResponse<{ clientId: string }>> {
const canWrite = await this.permissions.tryCheckWorkspace(
workspaceId,
user.id,
Permission.Write
);
if (canWrite) {
await client.join(`${workspaceId}:awareness`);
return {
data: {
clientId: client.id,
},
};
} else {
return {
error: new AccessDeniedError(workspaceId),
};
}
}
/**
* @deprecated use `client-handshake-sync` and `client-handshake-awareness` instead
*/
@Auth()
@SubscribeMessage('client-handshake')
async handleClientHandShake(
@CurrentUser() user: UserType,
@MessageBody()
workspaceId: string,
@MessageBody() workspaceId: string,
@ConnectedSocket() client: Socket
): Promise<EventResponse<{ clientId: string }>> {
const canWrite = await this.permissions.tryCheckWorkspace(
@@ -185,7 +127,7 @@ export class EventsGateway implements OnGatewayConnection, OnGatewayDisconnect {
);
if (canWrite) {
await client.join([`${workspaceId}:sync`, `${workspaceId}:awareness`]);
await client.join(workspaceId);
return {
data: {
clientId: client.id,
@@ -198,51 +140,19 @@ export class EventsGateway implements OnGatewayConnection, OnGatewayDisconnect {
}
}
@SubscribeMessage('client-leave-sync')
async handleLeaveSync(
@MessageBody() workspaceId: string,
@ConnectedSocket() client: Socket
): Promise<EventResponse> {
if (client.rooms.has(`${workspaceId}:sync`)) {
await client.leave(`${workspaceId}:sync`);
return {};
} else {
return {
error: new NotInWorkspaceError(workspaceId),
};
}
}
@SubscribeMessage('client-leave-awareness')
async handleLeaveAwareness(
@MessageBody() workspaceId: string,
@ConnectedSocket() client: Socket
): Promise<EventResponse> {
if (client.rooms.has(`${workspaceId}:awareness`)) {
await client.leave(`${workspaceId}:awareness`);
return {};
} else {
return {
error: new NotInWorkspaceError(workspaceId),
};
}
}
/**
* @deprecated use `client-leave-sync` and `client-leave-awareness` instead
*/
@SubscribeMessage('client-leave')
async handleClientLeave(
@MessageBody() workspaceId: string,
@ConnectedSocket() client: Socket
): Promise<EventResponse> {
if (client.rooms.has(`${workspaceId}:sync`)) {
await client.leave(`${workspaceId}:sync`);
if (client.rooms.has(workspaceId)) {
await client.leave(workspaceId);
return {};
} else {
return {
error: new NotInWorkspaceError(workspaceId),
};
}
if (client.rooms.has(`${workspaceId}:awareness`)) {
await client.leave(`${workspaceId}:awareness`);
}
return {};
}
/**
@@ -265,7 +175,7 @@ export class EventsGateway implements OnGatewayConnection, OnGatewayDisconnect {
},
@ConnectedSocket() client: Socket
) {
if (!client.rooms.has(`${workspaceId}:sync`)) {
if (!client.rooms.has(workspaceId)) {
this.logger.verbose(
`Client ${client.id} tried to push update to workspace ${workspaceId} without joining it first`
);
@@ -275,12 +185,12 @@ export class EventsGateway implements OnGatewayConnection, OnGatewayDisconnect {
const docId = new DocID(guid, workspaceId);
client
.to(`${docId.workspace}:sync`)
.to(docId.workspace)
.emit('server-update', { workspaceId, guid, update });
// broadcast to all clients with newer version that only listen to `server-updates`
client
.to(`${docId.workspace}:sync`)
.to(docId.workspace)
.emit('server-updates', { workspaceId, guid, updates: [update] });
const buf = Buffer.from(update, 'base64');
@@ -309,7 +219,7 @@ export class EventsGateway implements OnGatewayConnection, OnGatewayDisconnect {
stateVector?: string;
}
): Promise<{ missing: string; state?: string } | false> {
if (!client.rooms.has(`${workspaceId}:sync`)) {
if (!client.rooms.has(workspaceId)) {
const canRead = await this.permissions.tryCheckWorkspace(
workspaceId,
user.id
@@ -354,7 +264,7 @@ export class EventsGateway implements OnGatewayConnection, OnGatewayDisconnect {
},
@ConnectedSocket() client: Socket
): Promise<EventResponse<{ accepted: true }>> {
if (!client.rooms.has(`${workspaceId}:sync`)) {
if (!client.rooms.has(workspaceId)) {
return {
error: new NotInWorkspaceError(workspaceId),
};
@@ -362,7 +272,7 @@ export class EventsGateway implements OnGatewayConnection, OnGatewayDisconnect {
const docId = new DocID(guid, workspaceId);
client
.to(`${docId.workspace}:sync`)
.to(docId.workspace)
.emit('server-updates', { workspaceId, guid, updates });
const buffers = updates.map(update => Buffer.from(update, 'base64'));
@@ -391,7 +301,7 @@ export class EventsGateway implements OnGatewayConnection, OnGatewayDisconnect {
stateVector?: string;
}
): Promise<EventResponse<{ missing: string; state?: string }>> {
if (!client.rooms.has(`${workspaceId}:sync`)) {
if (!client.rooms.has(workspaceId)) {
const canRead = await this.permissions.tryCheckWorkspace(
workspaceId,
user.id
@@ -433,8 +343,8 @@ export class EventsGateway implements OnGatewayConnection, OnGatewayDisconnect {
@MessageBody() workspaceId: string,
@ConnectedSocket() client: Socket
): Promise<EventResponse<{ clientId: string }>> {
if (client.rooms.has(`${workspaceId}:awareness`)) {
client.to(`${workspaceId}:awareness`).emit('new-client-awareness-init');
if (client.rooms.has(workspaceId)) {
client.to(workspaceId).emit('new-client-awareness-init');
return {
data: {
clientId: client.id,
@@ -452,9 +362,9 @@ export class EventsGateway implements OnGatewayConnection, OnGatewayDisconnect {
@MessageBody() message: { workspaceId: string; awarenessUpdate: string },
@ConnectedSocket() client: Socket
): Promise<EventResponse> {
if (client.rooms.has(`${message.workspaceId}:awareness`)) {
if (client.rooms.has(message.workspaceId)) {
client
.to(`${message.workspaceId}:awareness`)
.to(message.workspaceId)
.emit('server-awareness-broadcast', message);
return {};
} else {

View File

@@ -5,7 +5,7 @@ import { PermissionService } from '../../workspaces/permission';
import { EventsGateway } from './events.gateway';
@Module({
imports: [DocModule],
imports: [DocModule.forFeature()],
providers: [EventsGateway, PermissionService],
})
export class EventsModule {}

View File

@@ -0,0 +1,42 @@
type FeatureEarlyAccessPreview = {
whitelist: RegExp[];
};
type FeatureStorageLimit = {
storageQuota: number;
};
type UserFeatureGate = {
earlyAccessPreview: FeatureEarlyAccessPreview;
freeUser: FeatureStorageLimit;
proUser: FeatureStorageLimit;
};
const UserLevel = {
freeUser: {
storageQuota: 10 * 1024 * 1024 * 1024,
},
proUser: {
storageQuota: 100 * 1024 * 1024 * 1024,
},
} satisfies Pick<UserFeatureGate, 'freeUser' | 'proUser'>;
export function getStorageQuota(features: string[]) {
for (const feature of features) {
if (feature in UserLevel) {
return UserLevel[feature as keyof typeof UserLevel].storageQuota;
}
}
return null;
}
const UserType = {
earlyAccessPreview: {
whitelist: [/@toeverything\.info$/],
},
} satisfies Pick<UserFeatureGate, 'earlyAccessPreview'>;
export const FeatureGates = {
...UserType,
...UserLevel,
} satisfies UserFeatureGate;

View File

@@ -1,17 +1,15 @@
import { Module } from '@nestjs/common';
import { FeatureModule } from '../features';
import { QuotaModule } from '../quota';
import { StorageModule } from '../storage';
import { UserResolver } from './resolver';
import { UsersService } from './users';
@Module({
imports: [StorageModule, FeatureModule, QuotaModule],
imports: [StorageModule],
providers: [UserResolver, UsersService],
exports: [UsersService],
})
export class UsersModule {}
export { UserType } from './types';
export { UserType } from './resolver';
export { UsersService } from './users';

View File

@@ -6,10 +6,13 @@ import {
} from '@nestjs/common';
import {
Args,
Context,
Field,
ID,
Int,
Mutation,
ObjectType,
Query,
registerEnumType,
ResolveField,
Resolver,
} from '@nestjs/graphql';
@@ -21,12 +24,60 @@ import { PrismaService } from '../../prisma/service';
import { CloudThrottlerGuard, Throttle } from '../../throttler';
import type { FileUpload } from '../../types';
import { Auth, CurrentUser, Public, Publicable } from '../auth/guard';
import { AuthService } from '../auth/service';
import { FeatureManagementService } from '../features';
import { QuotaService } from '../quota';
import { StorageService } from '../storage/storage.service';
import { DeleteAccount, RemoveAvatar, UserQuotaType, UserType } from './types';
import { NewFeaturesKind } from './types';
import { UsersService } from './users';
import { isStaff } from './utils';
registerEnumType(NewFeaturesKind, {
name: 'NewFeaturesKind',
});
@ObjectType()
export class UserType implements Partial<User> {
@Field(() => ID)
id!: string;
@Field({ description: 'User name' })
name!: string;
@Field({ description: 'User email' })
email!: string;
@Field(() => String, { description: 'User avatar url', nullable: true })
avatarUrl: string | null = null;
@Field(() => Date, { description: 'User email verified', nullable: true })
emailVerified: Date | null = null;
@Field({ description: 'User created date', nullable: true })
createdAt!: Date;
@Field(() => Boolean, {
description: 'User password has been set',
nullable: true,
})
hasPassword?: boolean;
}
@ObjectType()
export class DeleteAccount {
@Field()
success!: boolean;
}
@ObjectType()
export class RemoveAvatar {
@Field()
success!: boolean;
}
@ObjectType()
export class AddToNewFeaturesWaitingList {
@Field()
email!: string;
@Field(() => NewFeaturesKind, { description: 'New features kind' })
type!: NewFeaturesKind;
}
/**
* User resolver
@@ -37,12 +88,9 @@ import { UsersService } from './users';
@Resolver(() => UserType)
export class UserResolver {
constructor(
private readonly auth: AuthService,
private readonly prisma: PrismaService,
private readonly storage: StorageService,
private readonly users: UsersService,
private readonly feature: FeatureManagementService,
private readonly quota: QuotaService
private readonly users: UsersService
) {}
@Throttle({
@@ -90,7 +138,7 @@ export class UserResolver {
})
@Public()
async user(@Args('email') email: string) {
if (!(await this.feature.canEarlyAccess(email))) {
if (!(await this.users.canEarlyAccess(email))) {
return new GraphQLError(
`You don't have early access permission\nVisit https://community.affine.pro/c/insider-general/ for more information`,
{
@@ -110,14 +158,6 @@ export class UserResolver {
return user;
}
@Throttle({ default: { limit: 10, ttl: 60 } })
@ResolveField(() => UserQuotaType, { name: 'quota', nullable: true })
async getQuota(@CurrentUser() me: User) {
const quota = await this.quota.getUserQuota(me.id);
return quota.feature;
}
@Throttle({ default: { limit: 10, ttl: 60 } })
@ResolveField(() => Int, {
name: 'invoiceCount',
@@ -193,60 +233,27 @@ export class UserResolver {
ttl: 60,
},
})
@Mutation(() => Int)
async addToEarlyAccess(
@CurrentUser() currentUser: UserType,
@Mutation(() => AddToNewFeaturesWaitingList)
async addToNewFeaturesWaitingList(
@CurrentUser() user: UserType,
@Args('type', {
type: () => NewFeaturesKind,
})
type: NewFeaturesKind,
@Args('email') email: string
): Promise<number> {
if (!this.feature.isStaff(currentUser.email)) {
): Promise<AddToNewFeaturesWaitingList> {
if (!isStaff(user.email)) {
throw new ForbiddenException('You are not allowed to do this');
}
const user = await this.users.findUserByEmail(email);
if (user) {
return this.feature.addEarlyAccess(user.id);
} else {
const user = await this.auth.createAnonymousUser(email);
return this.feature.addEarlyAccess(user.id);
}
}
@Throttle({
default: {
limit: 10,
ttl: 60,
},
})
@Mutation(() => Int)
async removeEarlyAccess(
@CurrentUser() currentUser: UserType,
@Args('email') email: string
): Promise<number> {
if (!this.feature.isStaff(currentUser.email)) {
throw new ForbiddenException('You are not allowed to do this');
}
const user = await this.users.findUserByEmail(email);
if (!user) {
throw new BadRequestException(`User ${email} not found`);
}
return this.feature.removeEarlyAccess(user.id);
}
@Throttle({
default: {
limit: 10,
ttl: 60,
},
})
@Query(() => [UserType])
async earlyAccessUsers(
@Context() ctx: { isAdminQuery: boolean },
@CurrentUser() user: UserType
): Promise<UserType[]> {
if (!this.feature.isStaff(user.email)) {
throw new ForbiddenException('You are not allowed to do this');
}
// allow query other user's subscription
ctx.isAdminQuery = true;
return this.feature.listEarlyAccess();
await this.prisma.newFeaturesWaitingList.create({
data: {
email,
type,
},
});
return {
email,
type,
};
}
}

View File

@@ -1,79 +1,3 @@
import { Field, Float, ID, ObjectType } from '@nestjs/graphql';
import type { User } from '@prisma/client';
@ObjectType('UserQuotaHumanReadable')
export class UserQuotaHumanReadableType {
@Field({ name: 'name' })
name!: string;
@Field({ name: 'blobLimit' })
blobLimit!: string;
@Field({ name: 'storageQuota' })
storageQuota!: string;
@Field({ name: 'historyPeriod' })
historyPeriod!: string;
@Field({ name: 'memberLimit' })
memberLimit!: string;
}
@ObjectType('UserQuota')
export class UserQuotaType {
@Field({ name: 'name' })
name!: string;
@Field(() => Float, { name: 'blobLimit' })
blobLimit!: number;
@Field(() => Float, { name: 'storageQuota' })
storageQuota!: number;
@Field(() => Float, { name: 'historyPeriod' })
historyPeriod!: number;
@Field({ name: 'memberLimit' })
memberLimit!: number;
@Field({ name: 'humanReadable' })
humanReadable!: UserQuotaHumanReadableType;
}
@ObjectType()
export class UserType implements Partial<User> {
@Field(() => ID)
id!: string;
@Field({ description: 'User name' })
name!: string;
@Field({ description: 'User email' })
email!: string;
@Field(() => String, { description: 'User avatar url', nullable: true })
avatarUrl: string | null = null;
@Field(() => Date, { description: 'User email verified', nullable: true })
emailVerified: Date | null = null;
@Field({ description: 'User created date', nullable: true })
createdAt!: Date;
@Field(() => Boolean, {
description: 'User password has been set',
nullable: true,
})
hasPassword?: boolean;
}
@ObjectType()
export class DeleteAccount {
@Field()
success!: boolean;
}
@ObjectType()
export class RemoveAvatar {
@Field()
success!: boolean;
export enum NewFeaturesKind {
EarlyAccess,
}

View File

@@ -1,10 +1,51 @@
import { Injectable } from '@nestjs/common';
import { Config } from '../../config';
import { PrismaService } from '../../prisma';
import { getStorageQuota } from './gates';
import { NewFeaturesKind } from './types';
import { isStaff } from './utils';
@Injectable()
export class UsersService {
constructor(private readonly prisma: PrismaService) {}
constructor(
private readonly prisma: PrismaService,
private readonly config: Config
) {}
async canEarlyAccess(email: string) {
if (this.config.featureFlags.earlyAccessPreview && !isStaff(email)) {
return this.isEarlyAccessUser(email);
} else {
return true;
}
}
async isEarlyAccessUser(email: string) {
return this.prisma.newFeaturesWaitingList
.count({
where: { email, type: NewFeaturesKind.EarlyAccess },
})
.then(count => count > 0)
.catch(() => false);
}
async getStorageQuotaById(id: string) {
const features = await this.prisma.user
.findUnique({
where: { id },
select: {
features: {
select: {
feature: true,
},
},
},
})
.then(user => user?.features.map(f => f.feature) ?? []);
return getStorageQuota(features) || this.config.objectStorage.quota;
}
async findUserByEmail(email: string) {
return this.prisma.user

View File

@@ -0,0 +1,3 @@
export function isStaff(email: string) {
return email.endsWith('@toeverything.info');
}

View File

@@ -1,7 +1,6 @@
import { Module } from '@nestjs/common';
import { DocModule } from '../doc';
import { QuotaModule } from '../quota';
import { UsersService } from '../users';
import { WorkspacesController } from './controller';
import { DocHistoryResolver } from './history.resolver';
@@ -9,7 +8,7 @@ import { PermissionService } from './permission';
import { PagePermissionResolver, WorkspaceResolver } from './resolver';
@Module({
imports: [DocModule, QuotaModule],
imports: [DocModule.forFeature()],
controllers: [WorkspacesController],
providers: [
WorkspaceResolver,

View File

@@ -26,18 +26,6 @@ export class PermissionService {
return data?.type as Permission;
}
async getOwnedWorkspaces(userId: string) {
return this.prisma.workspaceUserPermission
.findMany({
where: {
userId,
accepted: true,
type: Permission.Owner,
},
})
.then(data => data.map(({ workspaceId }) => workspaceId));
}
async getWorkspaceOwner(workspaceId: string) {
return this.prisma.workspaceUserPermission.findFirstOrThrow({
where: {

View File

@@ -33,7 +33,6 @@ import type {
import GraphQLUpload from 'graphql-upload/GraphQLUpload.mjs';
import { applyUpdate, Doc } from 'yjs';
import { MakeCache, PreventCache } from '../../cache';
import { EventEmitter } from '../../event';
import { PrismaService } from '../../prisma';
import { StorageProvide } from '../../storage';
@@ -43,8 +42,8 @@ import { DocID } from '../../utils/doc';
import { Auth, CurrentUser, Public } from '../auth';
import { MailService } from '../auth/mailer';
import { AuthService } from '../auth/service';
import { QuotaManagementService } from '../quota';
import { UsersService, UserType } from '../users';
import { UsersService } from '../users';
import { UserType } from '../users/resolver';
import { PermissionService, PublicPageMode } from './permission';
import { Permission } from './types';
import { defaultWorkspaceAvatar } from './utils';
@@ -149,7 +148,6 @@ export class WorkspaceResolver {
private readonly permissions: PermissionService,
private readonly users: UsersService,
private readonly event: EventEmitter,
private readonly quota: QuotaManagementService,
@Inject(StorageProvide) private readonly storage: Storage
) {}
@@ -235,14 +233,6 @@ export class WorkspaceResolver {
}));
}
@ResolveField(() => Int, {
description: 'Blobs size of workspace',
complexity: 2,
})
async blobsSize(@Parent() workspace: WorkspaceType) {
return this.storage.blobsSize([workspace.id]);
}
@Query(() => Boolean, {
description: 'Get is owner of workspace',
complexity: 2,
@@ -657,7 +647,6 @@ export class WorkspaceResolver {
@Query(() => [String], {
description: 'List blobs of workspace',
})
@MakeCache(['blobs'], ['workspaceId'])
async listBlobs(
@CurrentUser() user: UserType,
@Args('workspaceId') workspaceId: string
@@ -667,9 +656,36 @@ export class WorkspaceResolver {
return this.storage.listBlobs(workspaceId);
}
@Query(() => WorkspaceBlobSizes)
async collectBlobSizes(
@CurrentUser() user: UserType,
@Args('workspaceId') workspaceId: string
) {
await this.permissions.checkWorkspace(workspaceId, user.id);
return this.storage.blobsSize([workspaceId]).then(size => ({ size }));
}
@Query(() => WorkspaceBlobSizes)
async collectAllBlobSizes(@CurrentUser() user: UserType) {
const size = await this.quota.getUserUsage(user.id);
const workspaces = await this.prisma.workspaceUserPermission
.findMany({
where: {
userId: user.id,
accepted: true,
type: Permission.Owner,
},
select: {
workspace: {
select: {
id: true,
},
},
},
})
.then(data => data.map(({ workspace }) => workspace.id));
const size = await this.storage.blobsSize(workspaces);
return { size };
}
@@ -677,7 +693,7 @@ export class WorkspaceResolver {
async checkBlobSize(
@CurrentUser() user: UserType,
@Args('workspaceId') workspaceId: string,
@Args('size', { type: () => Float }) blobSize: number
@Args('size', { type: () => Float }) size: number
) {
const canWrite = await this.permissions.tryCheckWorkspace(
workspaceId,
@@ -685,14 +701,18 @@ export class WorkspaceResolver {
Permission.Write
);
if (canWrite) {
const size = await this.quota.checkBlobQuota(workspaceId, blobSize);
return { size };
const { user } = await this.permissions.getWorkspaceOwner(workspaceId);
if (user) {
const quota = await this.users.getStorageQuotaById(user.id);
const { size: currentSize } = await this.collectAllBlobSizes(user);
return { size: quota - (size + currentSize) };
}
}
return false;
}
@Mutation(() => String)
@PreventCache(['blobs'], ['workspaceId'])
async setBlob(
@CurrentUser() user: UserType,
@Args('workspaceId') workspaceId: string,
@@ -705,12 +725,14 @@ export class WorkspaceResolver {
Permission.Write
);
const { quota, size } = await this.quota.getWorkspaceUsage(workspaceId);
// quota was apply to owner's account
const { user: owner } =
await this.permissions.getWorkspaceOwner(workspaceId);
if (!owner) return new NotFoundException('Workspace owner not found');
const quota = await this.users.getStorageQuotaById(owner.id);
const { size } = await this.collectAllBlobSizes(owner);
const checkExceeded = (recvSize: number) => {
if (!quota) {
throw new ForbiddenException('cannot find user quota');
}
if (size + recvSize > quota) {
this.logger.log(
`storage size limit exceeded: ${size + recvSize} > ${quota}`
@@ -752,7 +774,6 @@ export class WorkspaceResolver {
}
@Mutation(() => Boolean)
@PreventCache(['blobs'], ['workspaceId'])
async deleteBlob(
@CurrentUser() user: UserType,
@Args('workspaceId') workspaceId: string,

View File

@@ -10,23 +10,6 @@ type ServerConfigType {
flavor: String!
}
type UserQuotaHumanReadable {
name: String!
blobLimit: String!
storageQuota: String!
historyPeriod: String!
memberLimit: String!
}
type UserQuota {
name: String!
blobLimit: Float!
storageQuota: Float!
historyPeriod: Float!
memberLimit: Int!
humanReadable: UserQuotaHumanReadable!
}
type UserType {
id: ID!
@@ -48,7 +31,6 @@ type UserType {
"""User password has been set"""
hasPassword: Boolean
token: TokenType!
quota: UserQuota
"""Get user invoice count"""
invoiceCount: Int!
@@ -69,6 +51,17 @@ type RemoveAvatar {
success: Boolean!
}
type AddToNewFeaturesWaitingList {
email: String!
"""New features kind"""
type: NewFeaturesKind!
}
enum NewFeaturesKind {
EarlyAccess
}
type TokenType {
token: String!
refresh: String!
@@ -203,9 +196,6 @@ type WorkspaceType {
"""Owner of workspace"""
owner: UserType!
"""Blobs size of workspace"""
blobsSize: Int!
"""Shared pages of workspace"""
sharedPages: [String!]! @deprecated(reason: "use WorkspaceType.publicPages")
@@ -279,6 +269,7 @@ type Query {
"""List blobs of workspace"""
listBlobs(workspaceId: String!): [String!]!
collectBlobSizes(workspaceId: String!): WorkspaceBlobSizes!
collectAllBlobSizes: WorkspaceBlobSizes!
checkBlobSize(workspaceId: String!, size: Float!): WorkspaceBlobSizes!
@@ -287,7 +278,6 @@ type Query {
"""Get user by email"""
user(email: String!): UserType
earlyAccessUsers: [UserType!]!
prices: [SubscriptionPrice!]!
}
@@ -325,8 +315,7 @@ type Mutation {
"""Remove user avatar"""
removeAvatar: RemoveAvatar!
deleteAccount: DeleteAccount!
addToEarlyAccess(email: String!): Int!
removeEarlyAccess(email: String!): Int!
addToNewFeaturesWaitingList(type: NewFeaturesKind!, email: String!): AddToNewFeaturesWaitingList!
"""Create a subscription checkout link of stripe"""
checkout(recurring: SubscriptionRecurring!, idempotencyKey: String!): String!

View File

@@ -9,13 +9,14 @@ import {
import Redis from 'ioredis';
import { ThrottlerStorageRedisService } from 'nestjs-throttler-storage-redis';
import { Config } from './config';
import { Config, ConfigModule } from './config';
import { getRequestResponseFromContext } from './utils/nestjs';
@Global()
@Module({
imports: [
ThrottlerModule.forRootAsync({
imports: [ConfigModule],
inject: [Config],
useFactory: (config: Config): ThrottlerModuleOptions => {
const options: ThrottlerModuleOptions = {

View File

@@ -12,7 +12,6 @@ import graphqlUploadExpress from 'graphql-upload/graphqlUploadExpress.mjs';
import request from 'supertest';
import { AppModule } from '../src/app';
import { FeatureManagementService } from '../src/modules/features';
import { PrismaService } from '../src/prisma/service';
const gql = '/graphql';
@@ -46,13 +45,6 @@ class FakePrisma {
},
};
}
get newFeaturesWaitingList() {
return {
async findUnique() {
return null;
},
};
}
}
test.beforeEach(async t => {
@@ -61,8 +53,6 @@ test.beforeEach(async t => {
})
.overrideProvider(PrismaService)
.useClass(FakePrisma)
.overrideProvider(FeatureManagementService)
.useValue({ canEarlyAccess: () => true })
.compile();
t.context.app = module.createNestApplication({
cors: true,

View File

@@ -9,13 +9,11 @@ import ava, { type TestFn } from 'ava';
import graphqlUploadExpress from 'graphql-upload/graphqlUploadExpress.mjs';
import { AppModule } from '../src/app';
import { RevertCommand, RunCommand } from '../src/data/commands/run';
import { MailService } from '../src/modules/auth/mailer';
import { AuthService } from '../src/modules/auth/service';
import {
changeEmail,
createWorkspace,
initFeatureConfigs,
sendChangeEmail,
sendVerifyChangeEmail,
signUp,
@@ -39,7 +37,6 @@ test.beforeEach(async t => {
await client.$disconnect();
const module = await Test.createTestingModule({
imports: [AppModule],
providers: [RevertCommand, RunCommand],
}).compile();
const app = module.createNestApplication();
app.use(
@@ -55,9 +52,6 @@ test.beforeEach(async t => {
t.context.app = app;
t.context.auth = auth;
t.context.mail = mail;
// init features
await initFeatureConfigs(module);
});
test.afterEach(async t => {

View File

@@ -4,7 +4,6 @@ import { PrismaClient } from '@prisma/client';
import test from 'ava';
import { ConfigModule } from '../src/config';
import { RevertCommand, RunCommand } from '../src/data/commands/run';
import { GqlModule } from '../src/graphql.module';
import { AuthModule } from '../src/modules/auth';
import { AuthResolver } from '../src/modules/auth/resolver';
@@ -12,7 +11,6 @@ import { AuthService } from '../src/modules/auth/service';
import { PrismaModule } from '../src/prisma';
import { mintChallengeResponse, verifyChallengeResponse } from '../src/storage';
import { RateLimiterModule } from '../src/throttler';
import { initFeatureConfigs } from './utils';
let authService: AuthService;
let authResolver: AuthResolver;
@@ -42,15 +40,10 @@ test.beforeEach(async () => {
GqlModule,
AuthModule,
RateLimiterModule,
RevertCommand,
RunCommand,
],
}).compile();
authService = module.get(AuthService);
authResolver = module.get(AuthResolver);
// init features
await initFeatureConfigs(module);
});
test.afterEach.always(async () => {

View File

@@ -14,16 +14,10 @@ import {
import { CacheModule } from '../src/cache';
import { Config, ConfigModule } from '../src/config';
import {
collectMigrations,
RevertCommand,
RunCommand,
} from '../src/data/commands/run';
import { EventModule } from '../src/event';
import { DocManager, DocModule } from '../src/modules/doc';
import { QuotaModule } from '../src/modules/quota';
import { PrismaModule, PrismaService } from '../src/prisma';
import { FakeStorageModule, flushDB } from './utils';
import { flushDB } from './utils';
const createModule = () => {
return Test.createTestingModule({
@@ -31,12 +25,8 @@ const createModule = () => {
PrismaModule,
CacheModule,
EventModule,
QuotaModule,
FakeStorageModule.forRoot(),
ConfigModule.forRoot(),
DocModule,
RevertCommand,
RunCommand,
DocModule.forRoot(),
],
}).compile();
};
@@ -55,13 +45,6 @@ test.beforeEach(async () => {
app = m.createNestApplication();
app.enableShutdownHooks();
await app.init();
// init features
const run = m.get(RunCommand);
const revert = m.get(RevertCommand);
const migrations = await collectMigrations();
await Promise.allSettled(migrations.map(m => revert.run([m.name])));
await run.run();
});
test.afterEach.always(async () => {

View File

@@ -6,7 +6,6 @@ import request from 'supertest';
import { AppModule } from '../src/app';
import { ExceptionLogger } from '../src/middleware/exception-logger';
import { FeatureManagementService } from '../src/modules/features';
import { PrismaService } from '../src/prisma';
const gql = '/graphql';
@@ -39,8 +38,6 @@ test.beforeEach(async () => {
})
.overrideProvider(PrismaService)
.useClass(FakePrisma)
.overrideProvider(FeatureManagementService)
.useValue({})
.compile();
app = module.createNestApplication({
cors: true,

View File

@@ -1,143 +0,0 @@
/// <reference types="../src/global.d.ts" />
import { Test, TestingModule } from '@nestjs/testing';
import { PrismaClient } from '@prisma/client';
import ava, { type TestFn } from 'ava';
import { ConfigModule } from '../src/config';
import { RevertCommand, RunCommand } from '../src/data/commands/run';
import { AuthModule } from '../src/modules/auth';
import { AuthService } from '../src/modules/auth/service';
import {
FeatureManagementService,
FeatureModule,
FeatureService,
FeatureType,
} from '../src/modules/features';
import { PrismaModule } from '../src/prisma';
import { RateLimiterModule } from '../src/throttler';
import { initFeatureConfigs } from './utils';
const test = ava as TestFn<{
auth: AuthService;
feature: FeatureService;
early_access: FeatureManagementService;
app: TestingModule;
}>;
// cleanup database before each test
test.beforeEach(async () => {
const client = new PrismaClient();
await client.$connect();
await client.user.deleteMany({});
await client.$disconnect();
});
test.beforeEach(async t => {
const module = await Test.createTestingModule({
imports: [
ConfigModule.forRoot({
auth: {
accessTokenExpiresIn: 1,
refreshTokenExpiresIn: 1,
leeway: 1,
},
host: 'example.org',
https: true,
featureFlags: {
earlyAccessPreview: true,
},
}),
PrismaModule,
AuthModule,
FeatureModule,
RateLimiterModule,
RevertCommand,
RunCommand,
],
}).compile();
t.context.app = module;
t.context.auth = module.get(AuthService);
t.context.feature = module.get(FeatureService);
t.context.early_access = module.get(FeatureManagementService);
// init features
await initFeatureConfigs(module);
});
test.afterEach.always(async t => {
await t.context.app.close();
});
test('should be able to set feature', async t => {
const { auth, feature } = t.context;
const u1 = await auth.signUp('DarkSky', 'darksky@example.org', '123456');
const f1 = await feature.getUserFeatures(u1.id);
t.is(f1.length, 0, 'should be empty');
await feature.addUserFeature(u1.id, FeatureType.EarlyAccess, 1, 'test');
const f2 = await feature.getUserFeatures(u1.id);
t.is(f2.length, 1, 'should have 1 feature');
t.is(f2[0].feature.name, FeatureType.EarlyAccess, 'should be early access');
});
test('should be able to check early access', async t => {
const { auth, feature, early_access } = t.context;
const u1 = await auth.signUp('DarkSky', 'darksky@example.org', '123456');
const f1 = await early_access.canEarlyAccess(u1.email);
t.false(f1, 'should not have early access');
await early_access.addEarlyAccess(u1.id);
const f2 = await early_access.canEarlyAccess(u1.email);
t.true(f2, 'should have early access');
const f3 = await feature.listFeatureUsers(FeatureType.EarlyAccess);
t.is(f3.length, 1, 'should have 1 user');
t.is(f3[0].id, u1.id, 'should be the same user');
});
test('should be able revert quota', async t => {
const { auth, feature, early_access } = t.context;
const u1 = await auth.signUp('DarkSky', 'darksky@example.org', '123456');
const f1 = await early_access.canEarlyAccess(u1.email);
t.false(f1, 'should not have early access');
await early_access.addEarlyAccess(u1.id);
const f2 = await early_access.canEarlyAccess(u1.email);
t.true(f2, 'should have early access');
const q1 = await early_access.listEarlyAccess();
t.is(q1.length, 1, 'should have 1 user');
t.is(q1[0].id, u1.id, 'should be the same user');
await early_access.removeEarlyAccess(u1.id);
const f3 = await early_access.canEarlyAccess(u1.email);
t.false(f3, 'should not have early access');
const q2 = await early_access.listEarlyAccess();
t.is(q2.length, 0, 'should have no user');
const q3 = await feature.getUserFeatures(u1.id);
t.is(q3.length, 1, 'should have 1 feature');
t.is(q3[0].feature.name, FeatureType.EarlyAccess, 'should be early access');
t.is(q3[0].activated, false, 'should be deactivated');
});
test('should be same instance after reset the feature', async t => {
const { auth, feature, early_access } = t.context;
const u1 = await auth.signUp('DarkSky', 'darksky@example.org', '123456');
await early_access.addEarlyAccess(u1.id);
const f1 = (await feature.getUserFeatures(u1.id))[0];
await early_access.removeEarlyAccess(u1.id);
await early_access.addEarlyAccess(u1.id);
const f2 = (await feature.getUserFeatures(u1.id))[1];
t.is(f1.feature, f2.feature, 'should be same instance');
});

View File

@@ -8,9 +8,8 @@ import * as Sinon from 'sinon';
import { ConfigModule } from '../src/config';
import type { EventPayload } from '../src/event';
import { DocHistoryManager } from '../src/modules/doc';
import { QuotaModule } from '../src/modules/quota';
import { PrismaModule, PrismaService } from '../src/prisma';
import { FakeStorageModule, flushDB } from './utils';
import { flushDB } from './utils';
let app: INestApplication;
let m: TestingModule;
@@ -21,13 +20,7 @@ let db: PrismaService;
test.beforeEach(async () => {
await flushDB();
m = await Test.createTestingModule({
imports: [
PrismaModule,
QuotaModule,
FakeStorageModule.forRoot(),
ScheduleModule.forRoot(),
ConfigModule.forRoot(),
],
imports: [PrismaModule, ScheduleModule.forRoot(), ConfigModule.forRoot()],
providers: [DocHistoryManager],
}).compile();
@@ -284,8 +277,8 @@ test('should be able to recover from history', async t => {
t.is(history2.timestamp.getTime(), snapshot.updatedAt.getTime());
// new history data force created with snapshot state before recovered
t.deepEqual(history2.blob, Buffer.from([1, 1]));
t.deepEqual(history2.state, Buffer.from([1, 1]));
t.deepEqual(history2?.blob, Buffer.from([1, 1]));
t.deepEqual(history2?.state, Buffer.from([1, 1]));
});
test('should be able to cleanup expired history', async t => {

View File

@@ -11,13 +11,11 @@ import { PrismaClient } from '@prisma/client';
import ava, { type TestFn } from 'ava';
import { ConfigModule } from '../src/config';
import { RevertCommand, RunCommand } from '../src/data/commands/run';
import { GqlModule } from '../src/graphql.module';
import { AuthModule } from '../src/modules/auth';
import { AuthService } from '../src/modules/auth/service';
import { PrismaModule } from '../src/prisma';
import { RateLimiterModule } from '../src/throttler';
import { initFeatureConfigs } from './utils';
const test = ava as TestFn<{
auth: AuthService;
@@ -47,12 +45,8 @@ test.beforeEach(async t => {
AuthModule,
RateLimiterModule,
],
providers: [RevertCommand, RunCommand],
}).compile();
t.context.auth = t.context.module.get(AuthService);
// init features
await initFeatureConfigs(t.context.module);
});
test.afterEach.always(async t => {

View File

@@ -9,7 +9,6 @@ import graphqlUploadExpress from 'graphql-upload/graphqlUploadExpress.mjs';
import { AppModule } from '../src/app';
import { MailService } from '../src/modules/auth/mailer';
import { FeatureManagementService } from '../src/modules/features';
import { PrismaService } from '../src/prisma';
import { createWorkspace, getInviteInfo, inviteUser, signUp } from './utils';
@@ -101,8 +100,6 @@ test.beforeEach(async t => {
})
.overrideProvider(PrismaService)
.useValue(FakePrisma)
.overrideProvider(FeatureManagementService)
.useValue({})
.compile();
const app = module.createNestApplication();
app.use(

View File

@@ -1,133 +0,0 @@
/// <reference types="../src/global.d.ts" />
import { Test, TestingModule } from '@nestjs/testing';
import { PrismaClient } from '@prisma/client';
import ava, { type TestFn } from 'ava';
import { ConfigModule } from '../src/config';
import { RevertCommand, RunCommand } from '../src/data/commands/run';
import { AuthModule } from '../src/modules/auth';
import { AuthService } from '../src/modules/auth/service';
import {
QuotaManagementService,
QuotaModule,
Quotas,
QuotaService,
QuotaType,
} from '../src/modules/quota';
import { PrismaModule } from '../src/prisma';
import { RateLimiterModule } from '../src/throttler';
import { FakeStorageModule, initFeatureConfigs } from './utils';
const test = ava as TestFn<{
auth: AuthService;
quota: QuotaService;
storageQuota: QuotaManagementService;
app: TestingModule;
}>;
// cleanup database before each test
test.beforeEach(async () => {
const client = new PrismaClient();
await client.$connect();
await client.user.deleteMany({});
await client.$disconnect();
});
test.beforeEach(async t => {
const module = await Test.createTestingModule({
imports: [
ConfigModule.forRoot({
auth: {
accessTokenExpiresIn: 1,
refreshTokenExpiresIn: 1,
leeway: 1,
},
host: 'example.org',
https: true,
}),
PrismaModule,
AuthModule,
QuotaModule,
FakeStorageModule.forRoot(),
RateLimiterModule,
RevertCommand,
RunCommand,
],
}).compile();
const quota = module.get(QuotaService);
const storageQuota = module.get(QuotaManagementService);
const auth = module.get(AuthService);
t.context.app = module;
t.context.quota = quota;
t.context.storageQuota = storageQuota;
t.context.auth = auth;
// init features
await initFeatureConfigs(module);
});
test.afterEach.always(async t => {
await t.context.app.close();
});
test('should be able to set quota', async t => {
const { auth, quota } = t.context;
const u1 = await auth.signUp('DarkSky', 'darksky@example.org', '123456');
const q1 = await quota.getUserQuota(u1.id);
t.truthy(q1, 'should have quota');
t.is(q1?.feature.name, QuotaType.FreePlanV1, 'should be free plan');
await quota.switchUserQuota(u1.id, QuotaType.ProPlanV1);
const q2 = await quota.getUserQuota(u1.id);
t.is(q2?.feature.name, QuotaType.ProPlanV1, 'should be pro plan');
const fail = quota.switchUserQuota(u1.id, 'not_exists_plan_v1' as QuotaType);
await t.throwsAsync(fail, { instanceOf: Error }, 'should throw error');
});
test('should be able to check storage quota', async t => {
const { auth, quota, storageQuota } = t.context;
const u1 = await auth.signUp('DarkSky', 'darksky@example.org', '123456');
const q1 = await storageQuota.getUserQuota(u1.id);
t.is(q1?.blobLimit, Quotas[0].configs.blobLimit, 'should be free plan');
t.is(q1?.storageQuota, Quotas[0].configs.storageQuota, 'should be free plan');
await quota.switchUserQuota(u1.id, QuotaType.ProPlanV1);
const q2 = await storageQuota.getUserQuota(u1.id);
t.is(q2?.blobLimit, Quotas[1].configs.blobLimit, 'should be pro plan');
t.is(q2?.storageQuota, Quotas[1].configs.storageQuota, 'should be pro plan');
});
test('should be able revert quota', async t => {
const { auth, quota, storageQuota } = t.context;
const u1 = await auth.signUp('DarkSky', 'darksky@example.org', '123456');
const q1 = await storageQuota.getUserQuota(u1.id);
t.is(q1?.blobLimit, Quotas[0].configs.blobLimit, 'should be free plan');
t.is(q1?.storageQuota, Quotas[0].configs.storageQuota, 'should be free plan');
await quota.switchUserQuota(u1.id, QuotaType.ProPlanV1);
const q2 = await storageQuota.getUserQuota(u1.id);
t.is(q2?.blobLimit, Quotas[1].configs.blobLimit, 'should be pro plan');
t.is(q2?.storageQuota, Quotas[1].configs.storageQuota, 'should be pro plan');
await quota.switchUserQuota(u1.id, QuotaType.FreePlanV1);
const q3 = await storageQuota.getUserQuota(u1.id);
t.is(q3?.blobLimit, Quotas[0].configs.blobLimit, 'should be free plan');
const quotas = await quota.getUserQuotas(u1.id);
t.is(quotas.length, 3, 'should have 3 quotas');
t.is(quotas[0].feature.name, QuotaType.FreePlanV1, 'should be free plan');
t.is(quotas[1].feature.name, QuotaType.ProPlanV1, 'should be pro plan');
t.is(quotas[2].feature.name, QuotaType.FreePlanV1, 'should be free plan');
t.is(quotas[0].activated, false, 'should be activated');
t.is(quotas[1].activated, false, 'should be activated');
t.is(quotas[2].activated, true, 'should be activated');
});

View File

@@ -6,8 +6,7 @@ import graphqlUploadExpress from 'graphql-upload/graphqlUploadExpress.mjs';
import request from 'supertest';
import { AppModule } from '../src/app';
import { RevertCommand, RunCommand } from '../src/data/commands/run';
import { currentUser, initFeatureConfigs, signUp } from './utils';
import { currentUser, signUp } from './utils';
let app: INestApplication;
@@ -22,7 +21,6 @@ test.beforeEach(async () => {
test.beforeEach(async () => {
const module = await Test.createTestingModule({
imports: [AppModule],
providers: [RevertCommand, RunCommand],
}).compile();
app = module.createNestApplication();
app.use(
@@ -32,9 +30,6 @@ test.beforeEach(async () => {
})
);
await app.init();
// init features
await initFeatureConfigs(module);
});
test.afterEach.always(async () => {

View File

@@ -0,0 +1,587 @@
import { randomUUID } from 'node:crypto';
import type { INestApplication } from '@nestjs/common';
import { hashSync } from '@node-rs/argon2';
import { PrismaClient, type User } from '@prisma/client';
import request from 'supertest';
import type { TokenType } from '../src/modules/auth';
import type { UserType } from '../src/modules/users';
import type { InvitationType, WorkspaceType } from '../src/modules/workspaces';
const gql = '/graphql';
async function signUp(
app: INestApplication,
name: string,
email: string,
password: string
): Promise<UserType & { token: TokenType }> {
const res = await request(app.getHttpServer())
.post(gql)
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
signUp(name: "${name}", email: "${email}", password: "${password}") {
id, name, email, token { token }
}
}
`,
})
.expect(200);
return res.body.data.signUp;
}
async function currentUser(app: INestApplication, token: string) {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
query {
currentUser {
id, name, email, emailVerified, avatarUrl, createdAt, hasPassword,
token { token }
}
}
`,
})
.expect(200);
return res.body.data.currentUser;
}
async function createWorkspace(
app: INestApplication,
token: string
): Promise<WorkspaceType> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.field(
'operations',
JSON.stringify({
name: 'createWorkspace',
query: `mutation createWorkspace($init: Upload!) {
createWorkspace(init: $init) {
id
}
}`,
variables: { init: null },
})
)
.field('map', JSON.stringify({ '0': ['variables.init'] }))
.attach('0', Buffer.from([0, 0]), 'init.data')
.expect(200);
return res.body.data.createWorkspace;
}
export async function getWorkspacePublicPages(
app: INestApplication,
token: string,
workspaceId: string
) {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
query {
workspace(id: "${workspaceId}") {
publicPages {
id
mode
}
}
}
`,
})
.expect(200);
return res.body.data.workspace.publicPages;
}
async function getWorkspace(
app: INestApplication,
token: string,
workspaceId: string,
skip = 0,
take = 8
): Promise<WorkspaceType> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
query {
workspace(id: "${workspaceId}") {
id, members(skip: ${skip}, take: ${take}) { id, name, email, permission, inviteId }
}
}
`,
})
.expect(200);
return res.body.data.workspace;
}
async function getPublicWorkspace(
app: INestApplication,
workspaceId: string
): Promise<WorkspaceType> {
const res = await request(app.getHttpServer())
.post(gql)
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
query {
publicWorkspace(id: "${workspaceId}") {
id
}
}
`,
})
.expect(200);
return res.body.data.publicWorkspace;
}
async function updateWorkspace(
app: INestApplication,
token: string,
workspaceId: string,
isPublic: boolean
): Promise<boolean> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
updateWorkspace(input: { id: "${workspaceId}", public: ${isPublic} }) {
public
}
}
`,
})
.expect(200);
return res.body.data.updateWorkspace.public;
}
async function inviteUser(
app: INestApplication,
token: string,
workspaceId: string,
email: string,
permission: string,
sendInviteMail = false
): Promise<string> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
invite(workspaceId: "${workspaceId}", email: "${email}", permission: ${permission}, sendInviteMail: ${sendInviteMail})
}
`,
})
.expect(200);
return res.body.data.invite;
}
async function acceptInviteById(
app: INestApplication,
workspaceId: string,
inviteId: string,
sendAcceptMail = false
): Promise<boolean> {
const res = await request(app.getHttpServer())
.post(gql)
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
acceptInviteById(workspaceId: "${workspaceId}", inviteId: "${inviteId}", sendAcceptMail: ${sendAcceptMail})
}
`,
})
.expect(200);
return res.body.data.acceptInviteById;
}
async function leaveWorkspace(
app: INestApplication,
token: string,
workspaceId: string,
sendLeaveMail = false
): Promise<boolean> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
leaveWorkspace(workspaceId: "${workspaceId}", workspaceName: "test workspace", sendLeaveMail: ${sendLeaveMail})
}
`,
})
.expect(200);
return res.body.data.leaveWorkspace;
}
async function revokeUser(
app: INestApplication,
token: string,
workspaceId: string,
userId: string
): Promise<boolean> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
revoke(workspaceId: "${workspaceId}", userId: "${userId}")
}
`,
})
.expect(200);
return res.body.data.revoke;
}
async function publishPage(
app: INestApplication,
token: string,
workspaceId: string,
pageId: string
) {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
publishPage(workspaceId: "${workspaceId}", pageId: "${pageId}") {
id
mode
}
}
`,
})
.expect(200);
return res.body.errors?.[0]?.message || res.body.data?.publishPage;
}
async function revokePublicPage(
app: INestApplication,
token: string,
workspaceId: string,
pageId: string
) {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
revokePublicPage(workspaceId: "${workspaceId}", pageId: "${pageId}") {
id
mode
public
}
}
`,
})
.expect(200);
return res.body.errors?.[0]?.message || res.body.data?.revokePublicPage;
}
async function listBlobs(
app: INestApplication,
token: string,
workspaceId: string
): Promise<string[]> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
query {
listBlobs(workspaceId: "${workspaceId}")
}
`,
})
.expect(200);
return res.body.data.listBlobs;
}
async function collectBlobSizes(
app: INestApplication,
token: string,
workspaceId: string
): Promise<number> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.send({
query: `
query {
collectBlobSizes(workspaceId: "${workspaceId}") {
size
}
}
`,
})
.expect(200);
return res.body.data.collectBlobSizes.size;
}
async function collectAllBlobSizes(
app: INestApplication,
token: string
): Promise<number> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.send({
query: `
query {
collectAllBlobSizes {
size
}
}
`,
})
.expect(200);
return res.body.data.collectAllBlobSizes.size;
}
async function checkBlobSize(
app: INestApplication,
token: string,
workspaceId: string,
size: number
): Promise<number> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.send({
query: `query checkBlobSize($workspaceId: String!, $size: Float!) {
checkBlobSize(workspaceId: $workspaceId, size: $size) {
size
}
}`,
variables: { workspaceId, size },
})
.expect(200);
return res.body.data.checkBlobSize.size;
}
async function setBlob(
app: INestApplication,
token: string,
workspaceId: string,
buffer: Buffer
): Promise<string> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.field(
'operations',
JSON.stringify({
name: 'setBlob',
query: `mutation setBlob($blob: Upload!) {
setBlob(workspaceId: "${workspaceId}", blob: $blob)
}`,
variables: { blob: null },
})
)
.field('map', JSON.stringify({ '0': ['variables.blob'] }))
.attach('0', buffer, 'blob.data')
.expect(200);
return res.body.data.setBlob;
}
async function flushDB() {
const client = new PrismaClient();
await client.$connect();
const result: { tablename: string }[] =
await client.$queryRaw`SELECT tablename
FROM pg_catalog.pg_tables
WHERE schemaname != 'pg_catalog'
AND schemaname != 'information_schema'`;
// remove all table data
await client.$executeRawUnsafe(
`TRUNCATE TABLE ${result
.map(({ tablename }) => tablename)
.filter(name => !name.includes('migrations'))
.join(', ')}`
);
await client.$disconnect();
}
async function getInviteInfo(
app: INestApplication,
token: string,
inviteId: string
): Promise<InvitationType> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
query {
getInviteInfo(inviteId: "${inviteId}") {
workspace {
id
name
avatar
}
user {
id
name
avatarUrl
}
}
}
`,
})
.expect(200);
return res.body.data.getInviteInfo;
}
async function sendChangeEmail(
app: INestApplication,
userToken: string,
email: string,
callbackUrl: string
): Promise<boolean> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(userToken, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
sendChangeEmail(email: "${email}", callbackUrl: "${callbackUrl}")
}
`,
})
.expect(200);
return res.body.data.sendChangeEmail;
}
async function sendVerifyChangeEmail(
app: INestApplication,
userToken: string,
token: string,
email: string,
callbackUrl: string
): Promise<boolean> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(userToken, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
sendVerifyChangeEmail(token:"${token}", email: "${email}", callbackUrl: "${callbackUrl}")
}
`,
})
.expect(200);
return res.body.data.sendVerifyChangeEmail;
}
async function changeEmail(
app: INestApplication,
userToken: string,
token: string
): Promise<UserType & { token: TokenType }> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(userToken, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
changeEmail(token: "${token}") {
id
name
avatarUrl
email
}
}
`,
})
.expect(200);
return res.body.data.changeEmail;
}
export class FakePrisma {
fakeUser: User = {
id: randomUUID(),
name: 'Alex Yang',
avatarUrl: '',
email: 'alex.yang@example.org',
password: hashSync('123456'),
emailVerified: new Date(),
createdAt: new Date(),
};
get user() {
// eslint-disable-next-line @typescript-eslint/no-this-alias
const prisma = this;
return {
async findFirst() {
return prisma.fakeUser;
},
async findUnique() {
return this.findFirst();
},
async update() {
return this.findFirst();
},
};
}
}
export {
acceptInviteById,
changeEmail,
checkBlobSize,
collectAllBlobSizes,
collectBlobSizes,
createWorkspace,
currentUser,
flushDB,
getInviteInfo,
getPublicWorkspace,
getWorkspace,
inviteUser,
leaveWorkspace,
listBlobs,
publishPage,
revokePublicPage,
revokeUser,
sendChangeEmail,
sendVerifyChangeEmail,
setBlob,
signUp,
updateWorkspace,
};

View File

@@ -1,112 +0,0 @@
import type { INestApplication } from '@nestjs/common';
import request from 'supertest';
import { gql } from './common';
export async function listBlobs(
app: INestApplication,
token: string,
workspaceId: string
): Promise<string[]> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
query {
listBlobs(workspaceId: "${workspaceId}")
}
`,
})
.expect(200);
return res.body.data.listBlobs;
}
export async function getWorkspaceBlobsSize(
app: INestApplication,
token: string,
workspaceId: string
): Promise<number> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.send({
query: `
query {
workspace(id: "${workspaceId}") {
blobsSize
}
}
`,
})
.expect(200);
return res.body.data.workspace.blobsSize;
}
export async function collectAllBlobSizes(
app: INestApplication,
token: string
): Promise<number> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.send({
query: `
query {
collectAllBlobSizes {
size
}
}
`,
})
.expect(200);
return res.body.data.collectAllBlobSizes.size;
}
export async function checkBlobSize(
app: INestApplication,
token: string,
workspaceId: string,
size: number
): Promise<number> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.send({
query: `query checkBlobSize($workspaceId: String!, $size: Float!) {
checkBlobSize(workspaceId: $workspaceId, size: $size) {
size
}
}`,
variables: { workspaceId, size },
})
.expect(200);
return res.body.data.checkBlobSize.size;
}
export async function setBlob(
app: INestApplication,
token: string,
workspaceId: string,
buffer: Buffer
): Promise<string> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.field(
'operations',
JSON.stringify({
name: 'setBlob',
query: `mutation setBlob($blob: Upload!) {
setBlob(workspaceId: "${workspaceId}", blob: $blob)
}`,
variables: { blob: null },
})
)
.field('map', JSON.stringify({ '0': ['variables.blob'] }))
.attach('0', buffer, 'blob.data')
.expect(200);
return res.body.data.setBlob;
}

View File

@@ -1 +0,0 @@
export const gql = '/graphql';

View File

@@ -1,5 +0,0 @@
export * from './blobs';
export * from './invite';
export * from './user';
export * from './utils';
export * from './workspace';

View File

@@ -1,121 +0,0 @@
import type { INestApplication } from '@nestjs/common';
import request from 'supertest';
import type { InvitationType } from '../../src/modules/workspaces';
import { gql } from './common';
export async function inviteUser(
app: INestApplication,
token: string,
workspaceId: string,
email: string,
permission: string,
sendInviteMail = false
): Promise<string> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
invite(workspaceId: "${workspaceId}", email: "${email}", permission: ${permission}, sendInviteMail: ${sendInviteMail})
}
`,
})
.expect(200);
return res.body.data.invite;
}
export async function acceptInviteById(
app: INestApplication,
workspaceId: string,
inviteId: string,
sendAcceptMail = false
): Promise<boolean> {
const res = await request(app.getHttpServer())
.post(gql)
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
acceptInviteById(workspaceId: "${workspaceId}", inviteId: "${inviteId}", sendAcceptMail: ${sendAcceptMail})
}
`,
})
.expect(200);
return res.body.data.acceptInviteById;
}
export async function leaveWorkspace(
app: INestApplication,
token: string,
workspaceId: string,
sendLeaveMail = false
): Promise<boolean> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
leaveWorkspace(workspaceId: "${workspaceId}", workspaceName: "test workspace", sendLeaveMail: ${sendLeaveMail})
}
`,
})
.expect(200);
return res.body.data.leaveWorkspace;
}
export async function revokeUser(
app: INestApplication,
token: string,
workspaceId: string,
userId: string
): Promise<boolean> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
revoke(workspaceId: "${workspaceId}", userId: "${userId}")
}
`,
})
.expect(200);
return res.body.data.revoke;
}
export async function getInviteInfo(
app: INestApplication,
token: string,
inviteId: string
): Promise<InvitationType> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
query {
getInviteInfo(inviteId: "${inviteId}") {
workspace {
id
name
avatar
}
user {
id
name
avatarUrl
}
}
}
`,
})
.expect(200);
return res.body.data.getInviteInfo;
}

View File

@@ -1,117 +0,0 @@
import type { INestApplication } from '@nestjs/common';
import request from 'supertest';
import type { TokenType } from '../../src/modules/auth';
import type { UserType } from '../../src/modules/users';
import { gql } from './common';
export async function signUp(
app: INestApplication,
name: string,
email: string,
password: string
): Promise<UserType & { token: TokenType }> {
const res = await request(app.getHttpServer())
.post(gql)
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
signUp(name: "${name}", email: "${email}", password: "${password}") {
id, name, email, token { token }
}
}
`,
})
.expect(200);
return res.body.data.signUp;
}
export async function currentUser(app: INestApplication, token: string) {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
query {
currentUser {
id, name, email, emailVerified, avatarUrl, createdAt, hasPassword,
token { token }
}
}
`,
})
.expect(200);
return res.body.data.currentUser;
}
export async function sendChangeEmail(
app: INestApplication,
userToken: string,
email: string,
callbackUrl: string
): Promise<boolean> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(userToken, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
sendChangeEmail(email: "${email}", callbackUrl: "${callbackUrl}")
}
`,
})
.expect(200);
return res.body.data.sendChangeEmail;
}
export async function sendVerifyChangeEmail(
app: INestApplication,
userToken: string,
token: string,
email: string,
callbackUrl: string
): Promise<boolean> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(userToken, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
sendVerifyChangeEmail(token:"${token}", email: "${email}", callbackUrl: "${callbackUrl}")
}
`,
})
.expect(200);
return res.body.data.sendVerifyChangeEmail;
}
export async function changeEmail(
app: INestApplication,
userToken: string,
token: string
): Promise<UserType & { token: TokenType }> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(userToken, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
changeEmail(token: "${token}") {
id
name
avatarUrl
email
}
}
`,
})
.expect(200);
return res.body.data.changeEmail;
}

View File

@@ -1,82 +0,0 @@
import { randomUUID } from 'node:crypto';
import type { DynamicModule, FactoryProvider } from '@nestjs/common';
import { TestingModule } from '@nestjs/testing';
import { hashSync } from '@node-rs/argon2';
import { PrismaClient, type User } from '@prisma/client';
import { RevertCommand, RunCommand } from '../../src/data/commands/run';
import { StorageProvide } from '../../src/storage';
export async function flushDB() {
const client = new PrismaClient();
await client.$connect();
const result: { tablename: string }[] =
await client.$queryRaw`SELECT tablename
FROM pg_catalog.pg_tables
WHERE schemaname != 'pg_catalog'
AND schemaname != 'information_schema'`;
// remove all table data
await client.$executeRawUnsafe(
`TRUNCATE TABLE ${result
.map(({ tablename }) => tablename)
.filter(name => !name.includes('migrations'))
.join(', ')}`
);
await client.$disconnect();
}
export class FakePrisma {
fakeUser: User = {
id: randomUUID(),
name: 'Alex Yang',
avatarUrl: '',
email: 'alex.yang@example.org',
password: hashSync('123456'),
emailVerified: new Date(),
createdAt: new Date(),
};
get user() {
// eslint-disable-next-line @typescript-eslint/no-this-alias
const prisma = this;
return {
async findFirst() {
return prisma.fakeUser;
},
async findUnique() {
return this.findFirst();
},
async update() {
return this.findFirst();
},
};
}
}
export class FakeStorageModule {
static forRoot(): DynamicModule {
const storageProvider: FactoryProvider = {
provide: StorageProvide,
useFactory: async () => {
return null;
},
};
return {
global: true,
module: FakeStorageModule,
providers: [storageProvider],
exports: [storageProvider],
};
}
}
export async function initFeatureConfigs(module: TestingModule) {
const run = module.get(RunCommand);
const revert = module.get(RevertCommand);
await Promise.allSettled([revert.run(['UserFeaturesInit1698652531198'])]);
await run.runOne('UserFeaturesInit1698652531198');
}

View File

@@ -1,172 +0,0 @@
import type { INestApplication } from '@nestjs/common';
import request from 'supertest';
import type { WorkspaceType } from '../../src/modules/workspaces';
import { gql } from './common';
export async function createWorkspace(
app: INestApplication,
token: string
): Promise<WorkspaceType> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.field(
'operations',
JSON.stringify({
name: 'createWorkspace',
query: `mutation createWorkspace($init: Upload!) {
createWorkspace(init: $init) {
id
}
}`,
variables: { init: null },
})
)
.field('map', JSON.stringify({ '0': ['variables.init'] }))
.attach('0', Buffer.from([0, 0]), 'init.data')
.expect(200);
return res.body.data.createWorkspace;
}
export async function getWorkspacePublicPages(
app: INestApplication,
token: string,
workspaceId: string
) {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
query {
workspace(id: "${workspaceId}") {
publicPages {
id
mode
}
}
}
`,
})
.expect(200);
return res.body.data.workspace.publicPages;
}
export async function getWorkspace(
app: INestApplication,
token: string,
workspaceId: string,
skip = 0,
take = 8
): Promise<WorkspaceType> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
query {
workspace(id: "${workspaceId}") {
id, members(skip: ${skip}, take: ${take}) { id, name, email, permission, inviteId }
}
}
`,
})
.expect(200);
return res.body.data.workspace;
}
export async function getPublicWorkspace(
app: INestApplication,
workspaceId: string
): Promise<WorkspaceType> {
const res = await request(app.getHttpServer())
.post(gql)
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
query {
publicWorkspace(id: "${workspaceId}") {
id
}
}
`,
})
.expect(200);
return res.body.data.publicWorkspace;
}
export async function updateWorkspace(
app: INestApplication,
token: string,
workspaceId: string,
isPublic: boolean
): Promise<boolean> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
updateWorkspace(input: { id: "${workspaceId}", public: ${isPublic} }) {
public
}
}
`,
})
.expect(200);
return res.body.data.updateWorkspace.public;
}
export async function publishPage(
app: INestApplication,
token: string,
workspaceId: string,
pageId: string
) {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
publishPage(workspaceId: "${workspaceId}", pageId: "${pageId}") {
id
mode
}
}
`,
})
.expect(200);
return res.body.errors?.[0]?.message || res.body.data?.publishPage;
}
export async function revokePublicPage(
app: INestApplication,
token: string,
workspaceId: string,
pageId: string
) {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
.send({
query: `
mutation {
revokePublicPage(workspaceId: "${workspaceId}", pageId: "${pageId}") {
id
mode
public
}
}
`,
})
.expect(200);
return res.body.errors?.[0]?.message || res.body.data?.revokePublicPage;
}

View File

@@ -6,21 +6,17 @@ import graphqlUploadExpress from 'graphql-upload/graphqlUploadExpress.mjs';
import request from 'supertest';
import { AppModule } from '../src/app';
import { RevertCommand, RunCommand } from '../src/data/commands/run';
import { QuotaService, QuotaType } from '../src/modules/quota';
import {
checkBlobSize,
collectAllBlobSizes,
collectBlobSizes,
createWorkspace,
getWorkspaceBlobsSize,
initFeatureConfigs,
listBlobs,
setBlob,
signUp,
} from './utils';
let app: INestApplication;
let quota: QuotaService;
const client = new PrismaClient();
@@ -37,7 +33,6 @@ test.beforeEach(async () => {
test.beforeEach(async () => {
const module = await Test.createTestingModule({
imports: [AppModule],
providers: [RevertCommand, RunCommand],
}).compile();
app = module.createNestApplication();
app.use(
@@ -46,11 +41,6 @@ test.beforeEach(async () => {
maxFiles: 5,
})
);
quota = module.get(QuotaService);
// init features
await initFeatureConfigs(module);
await app.init();
});
@@ -113,7 +103,7 @@ test('should calc blobs size', async t => {
const buffer2 = Buffer.from([0, 1]);
await setBlob(app, u1.token.token, workspace.id, buffer2);
const size = await getWorkspaceBlobsSize(app, u1.token.token, workspace.id);
const size = await collectBlobSizes(app, u1.token.token, workspace.id);
t.is(size, 4, 'failed to collect blob sizes');
});
@@ -153,39 +143,3 @@ test('should calc all blobs size', async t => {
);
t.is(size2, -1, 'failed to check blob size');
});
test('should be able calc quota after switch plan', async t => {
const u1 = await signUp(app, 'darksky', 'darksky@affine.pro', '1');
const workspace1 = await createWorkspace(app, u1.token.token);
const buffer1 = Buffer.from([0, 0]);
await setBlob(app, u1.token.token, workspace1.id, buffer1);
const buffer2 = Buffer.from([0, 1]);
await setBlob(app, u1.token.token, workspace1.id, buffer2);
const workspace2 = await createWorkspace(app, u1.token.token);
const buffer3 = Buffer.from([0, 0]);
await setBlob(app, u1.token.token, workspace2.id, buffer3);
const buffer4 = Buffer.from([0, 1]);
await setBlob(app, u1.token.token, workspace2.id, buffer4);
const size1 = await checkBlobSize(
app,
u1.token.token,
workspace1.id,
10 * 1024 * 1024 * 1024 - 8
);
t.is(size1, 0, 'failed to check free plan blob size');
quota.switchUserQuota(u1.id, QuotaType.ProPlanV1);
const size2 = await checkBlobSize(
app,
u1.token.token,
workspace1.id,
100 * 1024 * 1024 * 1024 - 8
);
t.is(size2, 0, 'failed to check pro plan blob size');
});

Some files were not shown because too many files have changed in this diff Show More