Compare commits

..

30 Commits

Author SHA1 Message Date
李华桥
b20e91bee0 v0.10.3-beta.1 2023-11-25 14:14:40 +08:00
李华桥
9a4e5ec8c3 Merge branch 'canary' into stable 2023-11-25 14:14:14 +08:00
李华桥
2019838ae7 v0.10.3-beta.0 2023-11-24 11:39:23 +08:00
李华桥
30ff25f400 Merge branch 'canary' into stable 2023-11-23 23:40:32 +08:00
李华桥
e766208c18 chore: reset merge wrong codes 2023-11-23 22:53:06 +08:00
李华桥
8742f28148 Merge branch 'canary' into stable 2023-11-23 21:31:42 +08:00
LongYinan
cd291bb60e build: remove useless source-map-loader to speedup webpack (#4910) 2023-11-20 10:52:28 +08:00
LongYinan
62c0efcfd1 fix(core): handle the getSession network error properly (#4909)
If network offline or API error happens, the `session` returned by the `useSession` hook will be null, so we can't assume it is not null.

There should be following changes:
1. create a page in ErrorBoundary to let the user refetch the session.
2. The `SessionProvider` stop to pull the new session once the session is null, we need to figure out a way to pull the new session when the network is back or the user click the refetch button.
2023-11-17 16:50:48 +08:00
liuyi
87248b3337 fix(server): all viewers can share public link (#4968) 2023-11-17 12:34:15 +08:00
Joooye_34
00c940f7df chore: bump affine version to 0.10.2 (#4959) 2023-11-16 15:48:37 +08:00
Flrande
931b459fbd chore: bump blocksuite (#4958) 2023-11-16 14:27:39 +08:00
LongYinan
51e71f4a0a ci: prevent error if rust build is cached by nx (#4951)
If Rust build was cached by nx, only the output file will be presented. The chmod command will be failed in this case like: https://github.com/toeverything/AFFiNE/actions/runs/6874496337/job/18697360212
2023-11-16 10:31:51 +08:00
Peng Xiao
9b631f2328 fix(infra): page id compat fix for page ids in workspace.meta (#4950)
since we strip `page:` in keys of workspacedoc.spaces, we should also strip the prefix in meta.pages as well.
2023-11-15 17:36:08 +08:00
LongYinan
01f481a9b6 ci: only disable postinstall on macOS in nightly desktop build (#4938) 2023-11-14 23:00:30 +08:00
Joooye_34
0177ab5c87 fix(infra): workspace migration without blockVersions (#4936) 2023-11-14 14:38:11 +01:00
Peng Xiao
4db35d341c perf(component): use png instead of svg for rendering noise svg (#4935) 2023-11-14 11:52:51 +00:00
DarkSky
3c4a803c97 fix: change password token check (#4934) (#4932) 2023-11-14 11:15:54 +00:00
LongYinan
05154dc7ca ci: disable postinstall in nightly desktop build (#4930)
Should be part of https://github.com/toeverything/AFFiNE/pull/4885
2023-11-14 14:13:55 +08:00
Peng Xiao
c90b477f60 fix(core): change server url of stable to insider (#4902) (#4926) 2023-11-14 12:05:52 +08:00
李华桥
6f18ddbe85 v0.10.1 2023-11-13 19:49:26 +08:00
LongYinan
dde779a71d test(e2e): add subdoc migration test (#4921)
test(e2e): add subdoc migration test

fix: remove .only
2023-11-13 18:00:40 +08:00
Peng Xiao
bd9f66fbc7 fix(infra): compatibility fix for space prefix (#4912)
It seems there are some cases that [this upstream PR](https://github.com/toeverything/blocksuite/pull/4747) will cause data loss.

Because of some historical reasons, the page id could be different with its doc id.
It might be caused by subdoc migration in the following (not 100% sure if all white screen issue is caused by it) 0714c12703/packages/common/infra/src/blocksuite/index.ts (L538-L540)

In version 0.10, page id in spaces no longer has prefix "space:"
The data flow for fetching a doc's updates is:
- page id in `meta.pages` -> find `${page-id}` in `doc.spaces` -> `doc` -> `doc.guid`
if `doc` is not found in `doc.spaces`, a new doc will be created and its `doc.guid` is the same with its pageId
- because of guid logic change, the doc that previously prefixed with `space:` will not be found in `doc.spaces`
- when fetching the rows of this doc using the doc id === page id,
  it will return EMPTY since there is no updates associated with the page id

The provided fix in the PR will patch the `spaces` field of the root doc so that after 0.10 the page doc can still be found in the `spaces` map. It shall apply to both of the idb & sqlite datasources.

Special thanks to @lawvs 's db file for investigation!
2023-11-13 17:57:56 +08:00
liuyi
92f1f40bfa fix(server): wrap updates applying in a transaction (#4922) 2023-11-13 08:49:30 +00:00
LongYinan
48dc1049b3 Merge pull request #4913 from toeverything/darksky/cleanup-depolyment
chore: cleanup deployment
2023-11-12 11:20:02 +08:00
DarkSky
9add530370 chore: cleanup deployment 2023-11-12 11:03:25 +08:00
LongYinan
b77460d871 Merge pull request #4908 from toeverything/61/hotfix-websocket-payload
fix(server): increase server acceptable websocket payload size
2023-11-10 22:01:48 +08:00
forehalo
42db41776b fix(server): increase server acceptable websocket payload size 2023-11-10 21:31:45 +08:00
李华桥
075439c74f fix(core): change server url of stable to insider 2023-11-10 18:32:53 +08:00
Yifeng Wang
fc6c553ece chore: bump theme (#4904)
Co-authored-by: 李华桥 <joooye1991@gmail.com>
2023-11-10 15:40:38 +08:00
Joooye_34
59cb3d5df1 fix(core): change server url of stable to insider (#4902) 2023-11-10 14:50:57 +08:00
884 changed files with 19196 additions and 33169 deletions

View File

@@ -126,8 +126,6 @@ const config = {
'no-cond-assign': 'off',
'no-constant-binary-expression': 'error',
'no-constructor-return': 'error',
'no-self-compare': 'error',
eqeqeq: ['error', 'always', { null: 'ignore' }],
'react/prop-types': 'off',
'react/jsx-no-useless-fragment': 'error',
'@typescript-eslint/consistent-type-imports': 'error',
@@ -135,9 +133,6 @@ const config = {
'@typescript-eslint/no-explicit-any': 'off',
'@typescript-eslint/no-empty-function': 'off',
'@typescript-eslint/await-thenable': 'error',
'@typescript-eslint/require-array-sort-compare': 'error',
'@typescript-eslint/unified-signatures': 'error',
'@typescript-eslint/prefer-for-of': 'error',
'@typescript-eslint/no-unused-vars': [
'error',
{
@@ -209,17 +204,6 @@ const config = {
},
],
'unicorn/no-unnecessary-await': 'error',
'unicorn/no-useless-fallback-in-spread': 'error',
'unicorn/prefer-dom-node-dataset': 'error',
'unicorn/prefer-dom-node-append': 'error',
'unicorn/prefer-dom-node-remove': 'error',
'unicorn/prefer-array-some': 'error',
'unicorn/prefer-date-now': 'error',
'unicorn/prefer-blob-reading-methods': 'error',
'unicorn/no-typeof-undefined': 'error',
'unicorn/no-useless-promise-resolve-reject': 'error',
'unicorn/no-new-array': 'error',
'unicorn/new-for-builtins': 'error',
'sonarjs/no-all-duplicated-branches': 'error',
'sonarjs/no-element-overwrite': 'error',
'sonarjs/no-empty-collection': 'error',
@@ -270,7 +254,6 @@ const config = {
},
],
'@typescript-eslint/no-misused-promises': ['error'],
'@typescript-eslint/prefer-readonly': 'error',
'i/no-extraneous-dependencies': ['error'],
'react-hooks/exhaustive-deps': [
'warn',

View File

@@ -14,28 +14,14 @@ inputs:
runs:
using: 'composite'
steps:
- name: Print rustup toolchain version
shell: bash
id: rustup-version
run: |
export RUST_TOOLCHAIN_VERSION="$(grep 'channel' rust-toolchain.toml | head -1 | awk -F '"' '{print $2}')"
echo "Rust toolchain version: $RUST_TOOLCHAIN_VERSION"
echo "RUST_TOOLCHAIN_VERSION=$RUST_TOOLCHAIN_VERSION" >> "$GITHUB_OUTPUT"
- name: Setup Rust
uses: dtolnay/rust-toolchain@stable
with:
toolchain: '${{ steps.rustup-version.outputs.RUST_TOOLCHAIN_VERSION }}'
toolchain: stable
targets: ${{ inputs.target }}
env:
CARGO_INCREMENTAL: '1'
- name: Set CC
if: ${{ contains(inputs.target, 'linux') && inputs.package != '@affine/native' }}
shell: bash
run: |
echo "CC=clang" >> "$GITHUB_ENV"
echo "TARGET_CC=clang" >> "$GITHUB_ENV"
- name: Cache cargo
uses: actions/cache@v3
with:
@@ -43,13 +29,47 @@ runs:
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
~/.napi-rs
.cargo-cache
target/${{ inputs.target }}
key: stable-${{ inputs.target }}-cargo-cache
- name: Build
if: ${{ inputs.target != 'x86_64-unknown-linux-gnu' && inputs.target != 'aarch64-unknown-linux-gnu' }}
shell: bash
run: |
yarn workspace ${{ inputs.package }} nx build ${{ inputs.package }} --target ${{ inputs.target }} --use-napi-cross
yarn workspace ${{ inputs.package }} nx build ${{ inputs.package }} --target ${{ inputs.target }}
env:
NX_CLOUD_ACCESS_TOKEN: ${{ inputs.nx_token }}
DEBUG: 'napi:*'
- name: Build
if: ${{ inputs.target == 'x86_64-unknown-linux-gnu' }}
uses: addnab/docker-run-action@v3
with:
image: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian
options: --user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build -e NX_CLOUD_ACCESS_TOKEN=${{ inputs.nx_token }}
run: |
export CC=x86_64-unknown-linux-gnu-gcc
export CC_x86_64_unknown_linux_gnu=x86_64-unknown-linux-gnu-gcc
export RUSTFLAGS="-C debuginfo=1"
yarn workspace ${{ inputs.package }} nx build ${{ inputs.package }} --target ${{ inputs.target }}
if [ -d "node_modules/.cache" ]; then
chmod -R 777 node_modules/.cache
fi
if [ -d "target" ]; then
chmod -R 777 target;
fi
- name: Build
if: ${{ inputs.target == 'aarch64-unknown-linux-gnu' }}
uses: addnab/docker-run-action@v3
with:
image: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian-aarch64
options: --user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build -e NX_CLOUD_ACCESS_TOKEN=${{ inputs.nx_token }}
run: |
export RUSTFLAGS="-C debuginfo=1"
yarn workspace ${{ inputs.package }} nx build ${{ inputs.package }} --target ${{ inputs.target }}
if [ -d "node_modules/.cache" ]; then
chmod -R 777 node_modules/.cache
fi
if [ -d "target" ]; then
chmod -R 777 target;
fi

View File

@@ -26,7 +26,7 @@ runs:
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
- uses: azure/setup-helm@v3
- id: auth
uses: google-github-actions/auth@v2
uses: google-github-actions/auth@v1
with:
workload_identity_provider: 'projects/${{ inputs.gcp-project-number }}/locations/global/workloadIdentityPools/github-actions/providers/github-actions-helm-deploy'
service_account: '${{ inputs.service-account }}'
@@ -34,7 +34,7 @@ runs:
project_id: '${{ inputs.gcp-project-id }}'
- name: 'Setup gcloud cli'
uses: 'google-github-actions/setup-gcloud@v2'
uses: 'google-github-actions/setup-gcloud@v1'
with:
install_components: 'gke-gcloud-auth-plugin'

View File

@@ -1,7 +1,6 @@
import { execSync } from 'node:child_process';
const {
APP_VERSION,
BUILD_TYPE,
DEPLOY_HOST,
CANARY_DEPLOY_HOST,
@@ -28,7 +27,6 @@ const {
REDIS_PASSWORD,
STRIPE_API_KEY,
STRIPE_WEBHOOK_KEY,
STATIC_IP_NAME,
} = process.env;
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
@@ -36,13 +34,17 @@ const buildType = BUILD_TYPE || 'canary';
const isProduction = buildType === 'stable';
const isBeta = buildType === 'beta';
const isInternal = buildType === 'internal';
const createHelmCommand = ({ isDryRun }) => {
const flag = isDryRun ? '--dry-run' : '--atomic';
const imageTag = `${buildType}-${GIT_SHORT_HASH}`;
const staticIpName = isProduction
? 'affine-cluster-production'
: isBeta
? 'affine-cluster-beta'
: 'affine-cluster-dev';
const redisAndPostgres =
isProduction || isBeta || isInternal
isProduction || isBeta
? [
`--set-string global.database.url=${DATABASE_URL}`,
`--set-string global.database.user=${DATABASE_USERNAME}`,
@@ -56,34 +58,27 @@ const createHelmCommand = ({ isDryRun }) => {
]
: [];
const serviceAnnotations =
isProduction || isBeta || isInternal
isProduction || isBeta
? [
`--set-json web.service.annotations=\"{ \\"cloud.google.com/neg\\": \\"{\\\\\\"ingress\\\\\\": true}\\" }\"`,
`--set-json graphql.serviceAccount.annotations=\"{ \\"iam.gke.io/gcp-service-account\\": \\"${CLOUD_SQL_IAM_ACCOUNT}\\" }\"`,
`--set-json graphql.service.annotations=\"{ \\"cloud.google.com/neg\\": \\"{\\\\\\"ingress\\\\\\": true}\\" }\"`,
`--set-json sync.serviceAccount.annotations=\"{ \\"iam.gke.io/gcp-service-account\\": \\"${CLOUD_SQL_IAM_ACCOUNT}\\" }\"`,
`--set-json sync.service.annotations=\"{ \\"cloud.google.com/neg\\": \\"{\\\\\\"ingress\\\\\\": true}\\" }\"`,
`--set-json cloud-sql-proxy.serviceAccount.annotations=\"{ \\"iam.gke.io/gcp-service-account\\": \\"${CLOUD_SQL_IAM_ACCOUNT}\\" }\"`,
`--set-json cloud-sql-proxy.nodeSelector=\"{ \\"iam.gke.io/gke-metadata-server-enabled\\": \\"true\\" }\"`,
]
: [];
const webReplicaCount = isProduction ? 3 : isBeta ? 2 : 2;
const graphqlReplicaCount = isProduction ? 10 : isBeta ? 5 : 2;
const syncReplicaCount = isProduction ? 10 : isBeta ? 5 : 2;
const namespace = isProduction
? 'production'
: isBeta
? 'beta'
: isInternal
? 'internal'
: 'dev';
const namespace = isProduction ? 'production' : isBeta ? 'beta' : 'dev';
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const host = DEPLOY_HOST || CANARY_DEPLOY_HOST;
const deployCommand = [
`helm upgrade --install affine .github/helm/affine`,
`--namespace ${namespace}`,
`--set global.ingress.enabled=true`,
`--set-json global.ingress.annotations=\"{ \\"kubernetes.io/ingress.class\\": \\"gce\\", \\"kubernetes.io/ingress.allow-http\\": \\"true\\", \\"kubernetes.io/ingress.global-static-ip-name\\": \\"${STATIC_IP_NAME}\\" }\"`,
`--set-json global.ingress.annotations=\"{ \\"kubernetes.io/ingress.class\\": \\"gce\\", \\"kubernetes.io/ingress.allow-http\\": \\"true\\", \\"kubernetes.io/ingress.global-static-ip-name\\": \\"${staticIpName}\\" }\"`,
`--set-string global.ingress.host="${host}"`,
`--set-string global.version="${APP_VERSION}"`,
...redisAndPostgres,
`--set web.replicaCount=${webReplicaCount}`,
`--set-string web.image.tag="${imageTag}"`,
@@ -110,7 +105,7 @@ const createHelmCommand = ({ isDryRun }) => {
`--set sync.replicaCount=${syncReplicaCount}`,
`--set-string sync.image.tag="${imageTag}"`,
...serviceAnnotations,
`--timeout 10m`,
`--version "0.0.0-${buildType}.${GIT_SHORT_HASH}" --timeout 10m`,
flag,
].join(' ');
return deployCommand;

View File

@@ -1,22 +0,0 @@
name: 'Download core artifacts'
description: 'Download core artifacts and extract to dist'
inputs:
path:
description: 'Path to extract'
required: true
runs:
using: 'composite'
steps:
- name: Download tar.gz
uses: actions/download-artifact@v3
with:
name: core
path: .
- name: Extract core artifacts
shell: bash
run: |
mkdir -p ${{ inputs.path }}
tar -xvf dist.tar.gz --directory ${{ inputs.path }}
rm dist.tar.gz

View File

@@ -36,19 +36,17 @@ inputs:
description: 'Set enableScripts in .yarnrc.yml'
required: false
default: 'true'
full-cache:
description: 'Full installation cache'
required: false
runs:
using: 'composite'
steps:
- name: Setup Node.js
uses: actions/setup-node@v4
uses: actions/setup-node@v3
with:
node-version-file: '.nvmrc'
registry-url: https://npm.pkg.github.com
scope: '@toeverything'
cache: 'yarn'
- name: Set nmMode
if: ${{ inputs.hard-link-nm == 'false' }}
@@ -65,48 +63,6 @@ runs:
shell: bash
run: yarn config set enableScripts false
- name: Set yarn global cache path
shell: bash
id: yarn-cache
run: node -e "const p = $(yarn config cacheFolder --json).effective; console.log('yarn_global_cache=' + p)" >> $GITHUB_OUTPUT
- name: Cache non-full yarn cache on Linux
uses: actions/cache@v3
if: ${{ inputs.full-cache != 'true' && runner.os == 'Linux' }}
with:
path: |
node_modules
${{ steps.yarn-cache.outputs.yarn_global_cache }}
key: node_modules-cache-${{ github.job }}-${{ runner.os }}
# The network performance on macOS is very poor
# and the decompression performance on Windows is very terrible
# so we reduce the number of cached files on non-Linux systems by remove node_modules from cache path.
- name: Cache non-full yarn cache on non-Linux
uses: actions/cache@v3
if: ${{ inputs.full-cache != 'true' && runner.os != 'Linux' }}
with:
path: |
${{ steps.yarn-cache.outputs.yarn_global_cache }}
key: node_modules-cache-${{ github.job }}-${{ runner.os }}
- name: Cache full yarn cache on Linux
uses: actions/cache@v3
if: ${{ inputs.full-cache == 'true' && runner.os == 'Linux' }}
with:
path: |
node_modules
${{ steps.yarn-cache.outputs.yarn_global_cache }}
key: node_modules-cache-full-${{ runner.os }}
- name: Cache full yarn cache on non-Linux
uses: actions/cache@v3
if: ${{ inputs.full-cache == 'true' && runner.os != 'Linux' }}
with:
path: |
${{ steps.yarn-cache.outputs.yarn_global_cache }}
key: node_modules-cache-full-${{ runner.os }}
- name: yarn install
if: ${{ inputs.package-install == 'true' }}
continue-on-error: true
@@ -146,8 +102,8 @@ runs:
id: playwright-cache
if: ${{ inputs.playwright-install == 'true' }}
with:
path: ${{ github.workspace }}/node_modules/.cache/ms-playwright
key: '${{ runner.os }}-playwright-${{ steps.playwright-version.outputs.version }}'
path: '~/.cache/ms-playwright'
key: '${{ runner.os }}-${{ runner.arch }}-playwright-${{ steps.playwright-version.outputs.version }}'
# As a fallback, if the Playwright version has changed, try use the
# most recently cached version. There's a good chance that at least one
# of the browser binary versions haven't been updated, so Playwright can
@@ -157,7 +113,7 @@ runs:
# date cache, but still let Playwright decide if it needs to download
# new binaries or not.
restore-keys: |
${{ runner.os }}-playwright-
${{ runner.os }}-${{ runner.arch }}-playwright-
# If the Playwright browser binaries weren't able to be restored, we tell
# playwright to install everything for us.
@@ -165,8 +121,6 @@ runs:
shell: bash
if: inputs.playwright-install == 'true'
run: yarn playwright install --with-deps chromium
env:
PLAYWRIGHT_BROWSERS_PATH: ${{ github.workspace }}/node_modules/.cache/ms-playwright
- name: Get installed Electron version
id: electron-version
@@ -180,16 +134,16 @@ runs:
if: ${{ inputs.electron-install == 'true' }}
with:
path: 'node_modules/.cache/electron'
key: '${{ runner.os }}-electron-${{ steps.electron-version.outputs.version }}'
key: '${{ runner.os }}-{{ runner.arch }}-electron-${{ steps.electron-version.outputs.version }}'
restore-keys: |
${{ runner.os }}-electron-
${{ runner.os }}-{{ runner.arch }}-electron-
- name: Install Electron binary
shell: bash
if: inputs.electron-install == 'true'
run: node ./node_modules/electron/install.js
env:
electron_config_cache: ./node_modules/.cache/electron
ELECTRON_OVERRIDE_DIST_PATH: ./node_modules/.cache/electron
- name: Build Infra
shell: bash

View File

@@ -1,20 +0,0 @@
name: Setup Version
description: 'Setup Version'
runs:
using: 'composite'
steps:
- name: 'Write Version'
id: version
shell: bash
run: |
if [ "${{ github.ref_type }}" == "tag" ]; then
APP_VERSION=$(echo "${{ github.ref_name }}" | sed 's/^v//')
else
PACKAGE_VERSION=$(node -p "require('./package.json').version")
TIME_VERSION=$(date +%Y%m%d%H%M)
GIT_SHORT_HASH=$(git rev-parse --short HEAD)
APP_VERSION=$PACKAGE_VERSION-nightly-$TIME_VERSION-$GIT_SHORT_HASH
fi
echo $APP_VERSION
echo "APP_VERSION=$APP_VERSION" >> "$GITHUB_OUTPUT"
./scripts/set-version.sh $APP_VERSION

31
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,31 @@
version: 2
updates:
- package-ecosystem: 'npm'
directory: '/'
groups:
all-npm-dependencies:
patterns:
- '*'
schedule:
interval: 'weekly'
versioning-strategy: increase
commit-message:
prefix: 'chore'
- package-ecosystem: 'cargo'
directory: '/'
schedule:
interval: 'weekly'
versioning-strategy: auto
commit-message:
prefix: 'chore'
groups:
all-cargo-dependencies:
patterns:
- '*'
- package-ecosystem: 'github-actions'
directory: '/'
schedule:
interval: 'daily'
commit-message:
prefix: 'ci'

View File

@@ -1,6 +1,6 @@
dependencies:
- name: postgresql
repository: https://charts.bitnami.com/bitnami
version: 13.2.23
digest: sha256:5b64538509bd067bb0f67bf082847a2c5d66dc37d0b9d7948a40405d9c446400
generated: "2023-12-05T03:04:57.997927753Z"
version: 12.5.8
digest: sha256:c91c0dc1370e879538dc9d6e435e731a726ef99d6a3b081372318483792b48a7
generated: "2023-06-27T18:34:12.683806+08:00"

View File

@@ -8,5 +8,5 @@ appVersion: '0.6.1'
dependencies:
- name: postgresql
version: 13.2.23
version: 12.5.8
repository: https://charts.bitnami.com/bitnami

View File

@@ -3,4 +3,4 @@ name: affine
description: AFFiNE cloud chart
type: application
version: 0.0.0
appVersion: "0.11.0"
appVersion: '0.7.0-canary.18'

View File

@@ -1,23 +0,0 @@
# Patterns to ignore when building packages.
# This supports shell glob matching, relative path matching, and
# negation (prefixed with !). Only one pattern per line.
.DS_Store
# Common VCS dirs
.git/
.gitignore
.bzr/
.bzrignore
.hg/
.hgignore
.svn/
# Common backup files
*.swp
*.bak
*.tmp
*.orig
*~
# Various IDEs
.project
.idea/
*.tmproj
.vscode/

View File

@@ -1,6 +0,0 @@
apiVersion: v2
name: cloud-sql-proxy
description: Google Cloud SQL Proxy
type: application
version: 0.0.0
appVersion: "2.8.1"

View File

@@ -1,18 +0,0 @@
{{- if .Values.global.database.gcloud.enabled -}}
1. Get the application URL by running these commands:
{{- if contains "NodePort" .Values.service.type }}
export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "gcloud-sql-proxy.fullname" . }})
export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}")
echo http://$NODE_IP:$NODE_PORT
{{- else if contains "LoadBalancer" .Values.service.type }}
NOTE: It may take a few minutes for the LoadBalancer IP to be available.
You can watch the status of by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "gcloud-sql-proxy.fullname" . }}'
export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "gcloud-sql-proxy.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}")
echo http://$SERVICE_IP:{{ .Values.service.port }}
{{- else if contains "ClusterIP" .Values.service.type }}
export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "gcloud-sql-proxy.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}")
echo "Visit http://127.0.0.1:8080 to use your application"
kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT
{{- end }}
{{- end }}

View File

@@ -1,62 +0,0 @@
{{/*
Expand the name of the chart.
*/}}
{{- define "gcloud-sql-proxy.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
If release name contains chart name it will be used as a full name.
*/}}
{{- define "gcloud-sql-proxy.fullname" -}}
{{- if .Values.fullnameOverride }}
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- $name := default .Chart.Name .Values.nameOverride }}
{{- if contains $name .Release.Name }}
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
{{- end }}
{{- end }}
{{- end }}
{{/*
Create chart name and version as used by the chart label.
*/}}
{{- define "gcloud-sql-proxy.chart" -}}
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Common labels
*/}}
{{- define "gcloud-sql-proxy.labels" -}}
helm.sh/chart: {{ include "gcloud-sql-proxy.chart" . }}
{{ include "gcloud-sql-proxy.selectorLabels" . }}
{{- if .Chart.AppVersion }}
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
{{- end }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
{{- end }}
{{/*
Selector labels
*/}}
{{- define "gcloud-sql-proxy.selectorLabels" -}}
app.kubernetes.io/name: {{ include "gcloud-sql-proxy.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}
{{/*
Create the name of the service account to use
*/}}
{{- define "gcloud-sql-proxy.serviceAccountName" -}}
{{- if .Values.serviceAccount.create }}
{{- default (include "gcloud-sql-proxy.fullname" .) .Values.serviceAccount.name }}
{{- else }}
{{- default "default" .Values.serviceAccount.name }}
{{- end }}
{{- end }}

View File

@@ -1,132 +0,0 @@
{{- if .Values.global.database.gcloud.enabled -}}
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "gcloud-sql-proxy.fullname" . }}
labels:
{{- include "gcloud-sql-proxy.labels" . | nindent 4 }}
spec:
replicas: {{ .Values.replicaCount }}
selector:
matchLabels:
{{- include "gcloud-sql-proxy.selectorLabels" . | nindent 6 }}
template:
metadata:
{{- with .Values.podAnnotations }}
annotations:
{{- toYaml . | nindent 8 }}
{{- end }}
labels:
{{- include "gcloud-sql-proxy.labels" . | nindent 8 }}
{{- with .Values.podLabels }}
{{- toYaml . | nindent 8 }}
{{- end }}
spec:
{{- with .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml . | nindent 8 }}
{{- end }}
serviceAccountName: {{ include "gcloud-sql-proxy.serviceAccountName" . }}
securityContext:
{{- toYaml .Values.podSecurityContext | nindent 8 }}
containers:
- name: {{ .Chart.Name }}
securityContext:
{{- toYaml .Values.securityContext | nindent 12 }}
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: File
image: "{{ .Values.image.repository }}:{{ .Chart.AppVersion }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
args:
- "--address"
- "0.0.0.0"
- "--structured-logs"
- "--auto-iam-authn"
- "{{ .Values.global.database.gcloud.connectionName }}"
env:
# Enable HTTP healthchecks on port 9801. This enables /liveness,
# /readiness and /startup health check endpoints. Allow connections
# listen for connections on any interface (0.0.0.0) so that the
# k8s management components can reach these endpoints.
- name: CSQL_PROXY_HEALTH_CHECK
value: "true"
- name: CSQL_PROXY_HTTP_PORT
value: "9801"
- name: CSQL_PROXY_HTTP_ADDRESS
value: 0.0.0.0
ports:
- name: cloud-sql-proxy
containerPort: {{ .Values.global.database.gcloud.proxyPort }}
protocol: TCP
- containerPort: 9801
protocol: TCP
# The /startup probe returns OK when the proxy is ready to receive
# connections from the application. In this example, k8s will check
# once a second for 60 seconds.
startupProbe:
failureThreshold: 60
httpGet:
path: /startup
port: 9801
scheme: HTTP
periodSeconds: 1
successThreshold: 1
timeoutSeconds: 10
# The /liveness probe returns OK as soon as the proxy application has
# begun its startup process and continues to return OK until the
# process stops.
livenessProbe:
failureThreshold: 3
httpGet:
path: /liveness
port: 9801
scheme: HTTP
# The probe will be checked every 10 seconds.
periodSeconds: 10
# Number of times the probe is allowed to fail before the transition
# from healthy to failure state.
#
# If periodSeconds = 60, 5 tries will result in five minutes of
# checks. The proxy starts to refresh a certificate five minutes
# before its expiration. If those five minutes lapse without a
# successful refresh, the liveness probe will fail and the pod will be
# restarted.
successThreshold: 1
# The probe will fail if it does not respond in 10 seconds
timeoutSeconds: 10
readinessProbe:
# The /readiness probe returns OK when the proxy can establish
# a new connections to its databases.
httpGet:
path: /readiness
port: 9801
initialDelaySeconds: 10
periodSeconds: 10
timeoutSeconds: 10
# Number of times the probe must report success to transition from failure to healthy state.
# Defaults to 1 for readiness probe.
successThreshold: 1
failureThreshold: 6
resources:
{{- toYaml .Values.resources | nindent 12 }}
{{- with .Values.volumeMounts }}
volumeMounts:
{{- toYaml . | nindent 12 }}
{{- end }}
{{- with .Values.volumes }}
volumes:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.tolerations }}
tolerations:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- end }}

View File

@@ -1,17 +0,0 @@
{{- if .Values.global.database.gcloud.enabled -}}
apiVersion: v1
kind: Service
metadata:
name: {{ include "gcloud-sql-proxy.fullname" . }}
labels:
{{- include "gcloud-sql-proxy.labels" . | nindent 4 }}
spec:
type: {{ .Values.service.type }}
ports:
- port: {{ .Values.global.database.port }}
targetPort: cloud-sql-proxy
protocol: TCP
name: cloud-sql-proxy
selector:
{{- include "gcloud-sql-proxy.selectorLabels" . | nindent 4 }}
{{- end }}

View File

@@ -1,15 +0,0 @@
{{- if .Values.global.database.gcloud.enabled -}}
{{- if .Values.serviceAccount.create -}}
apiVersion: v1
kind: ServiceAccount
metadata:
name: {{ include "gcloud-sql-proxy.serviceAccountName" . }}
labels:
{{- include "gcloud-sql-proxy.labels" . | nindent 4 }}
{{- with .Values.serviceAccount.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
automountServiceAccountToken: {{ .Values.serviceAccount.automount }}
{{- end }}
{{- end }}

View File

@@ -1,17 +0,0 @@
{{- if .Values.global.database.gcloud.enabled -}}
apiVersion: v1
kind: Pod
metadata:
name: "{{ include "gcloud-sql-proxy.fullname" . }}-test-connection"
labels:
{{- include "gcloud-sql-proxy.labels" . | nindent 4 }}
annotations:
"helm.sh/hook": test
spec:
containers:
- name: wget
image: busybox
command: ['wget']
args: ['{{ include "gcloud-sql-proxy.fullname" . }}:{{ .Values.service.port }}']
restartPolicy: Never
{{- end }}

View File

@@ -1,40 +0,0 @@
replicaCount: 3
image:
# the tag is defined as chart appVersion.
repository: gcr.io/cloud-sql-connectors/cloud-sql-proxy
pullPolicy: IfNotPresent
imagePullSecrets: []
nameOverride: ""
fullnameOverride: ""
serviceAccount:
create: true
automount: true
annotations: {}
name: ""
podAnnotations: {}
podLabels: {}
podSecurityContext:
fsGroup: 2000
securityContext:
runAsNonRoot: true
service:
type: ClusterIP
port: 5432
resources:
limits:
memory: "4Gi"
cpu: "2"
volumes: []
volumeMounts: []
nodeSelector: {}
tolerations: []
affinity: {}

View File

@@ -3,9 +3,4 @@ name: graphql
description: AFFiNE GraphQL server
type: application
version: 0.0.0
appVersion: "0.11.0"
dependencies:
- name: gcloud-sql-proxy
version: 0.0.0
repository: "file://../gcloud-sql-proxy"
condition: .global.database.gcloud.enabled
appVersion: '0.7.0-canary.18'

View File

@@ -189,6 +189,20 @@ spec:
initialDelaySeconds: {{ .Values.probe.initialDelaySeconds }}
resources:
{{- toYaml .Values.resources | nindent 12 }}
{{ if .Values.global.database.gcloud.enabled }}
- name: cloud-sql-proxy
image: gcr.io/cloud-sql-connectors/cloud-sql-proxy:2.6.0
args:
- "--structured-logs"
- "--auto-iam-authn"
- "{{ .Values.global.database.gcloud.connectionName }}"
securityContext:
runAsNonRoot: true
resources:
requests:
memory: "2Gi"
cpu: "1"
{{ end }}
{{- with .Values.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}

View File

@@ -1,11 +1,6 @@
apiVersion: v2
name: sync
description: AFFiNE Sync Server
description: A Helm chart for Kubernetes
type: application
version: 0.0.0
appVersion: "0.11.0"
dependencies:
- name: gcloud-sql-proxy
version: 0.0.0
repository: "file://../gcloud-sql-proxy"
condition: .global.database.gcloud.enabled
appVersion: "0.7.0-canary.18"

View File

@@ -82,6 +82,20 @@ spec:
initialDelaySeconds: {{ .Values.probe.initialDelaySeconds }}
resources:
{{- toYaml .Values.resources | nindent 12 }}
{{ if .Values.global.database.gcloud.enabled }}
- name: cloud-sql-proxy
image: gcr.io/cloud-sql-connectors/cloud-sql-proxy:2.6.0
args:
- "--structured-logs"
- "--auto-iam-authn"
- "{{ .Values.global.database.gcloud.connectionName }}"
securityContext:
runAsNonRoot: true
resources:
requests:
memory: "2Gi"
cpu: "1"
{{ end }}
{{- with .Values.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}

View File

@@ -16,8 +16,6 @@ global:
cloudSqlInternal: ''
connectionName: ''
serviceAccount: ''
cloudProxyReplicas: 3
proxyPort: '5432'
redis:
enabled: true
host: 'redis-master'

117
.github/labeler.yml vendored
View File

@@ -1,115 +1,62 @@
docs:
- changed-files:
- any-glob-to-any-file:
- 'docs/**/*'
- '**/README.md'
- 'packages/frontend/templates/**/*'
- 'docs/**/*'
- '**/README.md'
- 'packages/frontend/templates/**/*'
test:
- changed-files:
- any-glob-to-any-file:
- 'tests/**/*'
- '**/tests/**/*'
- '**/__tests__/**/*'
- 'tests/**/*'
- '**/tests/**/*'
- '**/__tests__/**/*'
mod:dev:
- changed-files:
- any-glob-to-any-file:
- 'scripts/**/*'
- 'tools/cli/**/*'
- 'packages/common/debug/**/*'
- 'scripts/**/*'
- 'tools/cli/**/*'
- 'packages/common/debug/**/*'
mod:plugin:
- changed-files:
- any-glob-to-any-file:
- 'packages/plugins/**/*'
- 'packages/plugins/**/*'
plugin:copilot:
- changed-files:
- any-glob-to-any-file:
- 'packages/plugins/copilot/**/*'
- 'packages/plugins/copilot/**/*'
mod:infra:
- changed-files:
- any-glob-to-any-file:
- 'packages/common/infra/**/*'
- 'packages/common/infra/**/*'
mod:sdk:
- changed-files:
- any-glob-to-any-file:
- 'packages/common/sdk/**/*'
- 'packages/common/sdk/**/*'
mod:plugin-cli:
- changed-files:
- any-glob-to-any-file:
- 'tools/plugin-cli/**/*'
- 'tools/plugin-cli/**/*'
mod:workspace:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/workspace/**/*'
mod:workspace: 'packages/frontend/workspace/**/*'
mod:i18n:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/i18n/**/*'
mod:i18n: 'packages/frontend/i18n/**/*'
mod:env:
- changed-files:
- any-glob-to-any-file:
- 'packages/common/env/**/*'
mod:env: 'packages/common/env/**/*'
mod:hooks:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/hooks/**/*'
mod:hooks: 'packages/frontend/hooks/**/*'
mod:component:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/component/**/*'
mod:component: 'packages/frontend/component/**/*'
mod:storage:
- changed-files:
- any-glob-to-any-file:
- 'packages/backend/storage/**/*'
mod:storage: 'packages/backend/storage/**/*'
mod:native:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/native/**/*'
mod:native: 'packages/frontend/native/**/*'
mod:store:
- changed-files:
- any-glob-to-any-file:
- '**/atoms/**/*'
- '**/atoms/**/*'
rust:
- changed-files:
- any-glob-to-any-file:
- '**/*.rs'
- '**/Cargo.toml'
- '**/Cargo.lock'
- '**/rust-toolchain'
- '**/rust-toolchain.toml'
- '**/rustfmt.toml'
- '**/*.rs'
- '**/Cargo.toml'
- '**/Cargo.lock'
- '**/rust-toolchain'
- '**/rust-toolchain.toml'
- '**/rustfmt.toml'
package:y-indexeddb:
- changed-files:
- any-glob-to-any-file:
- 'packages/common/y-indexeddb/**/*'
package:y-indexeddb: 'packages/common/y-indexeddb/**/*'
app:core:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/core/**/*'
app:core: 'packages/frontend/core/**/*'
app:electron:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/electron/**/*'
app:electron: 'packages/frontend/electron/**/*'
app:server:
- changed-files:
- any-glob-to-any-file:
- 'packages/backend/server/**/*'
app:server: 'packages/backend/server/**/*'

53
.github/renovate.json vendored
View File

@@ -1,53 +0,0 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"config:base",
"group:allNonMajor",
":preserveSemverRanges",
":disablePeerDependencies"
],
"labels": ["dependencies"],
"packageRules": [
{
"matchPackageNames": ["napi", "napi-build", "napi-derive"],
"groupName": "napi-rs"
},
{
"matchPackagePatterns": ["^eslint", "^@typescript-eslint"],
"groupName": "linter"
},
{
"matchPackagePatterns": ["^@nestjs"],
"groupName": "nestjs"
},
{
"matchPackagePatterns": ["^@opentelemetry"],
"groupName": "opentelemetry"
},
{
"matchPackageNames": [
"@prisma/client",
"@prisma/instrumentation",
"prisma"
],
"groupName": "prisma"
},
{
"matchPackagePatterns": ["^@electron-forge"],
"groupName": "electron-forge"
},
{
"matchPackagePatterns": ["^@blocksuite"],
"excludePackageNames": ["@blocksuite/icons"],
"followTag": "nightly"
}
],
"commitMessagePrefix": "chore: ",
"commitMessageAction": "bump up",
"commitMessageTopic": "{{depName}} version",
"ignoreDeps": [],
"lockFileMaintenance": {
"enabled": true,
"extends": ["schedule:weekly"]
}
}

View File

@@ -9,5 +9,4 @@ jobs:
pull-requests: write
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/labeler@v5
- uses: actions/labeler@v4

190
.github/workflows/build-desktop.yml vendored Normal file
View File

@@ -0,0 +1,190 @@
name: Build(Desktop) & Test
on:
push:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/build-desktop.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
pull_request:
merge_group:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/build-desktop.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
env:
DEBUG: napi:*
BUILD_TYPE: canary
APP_NAME: affine
COVERAGE: true
DISTRIBUTION: desktop
MACOSX_DEPLOYMENT_TARGET: '10.13'
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
build-core:
name: Build @affine/core
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- name: Build Core
run: yarn nx build @affine/core
- name: Upload core artifact
uses: actions/upload-artifact@v3
with:
name: core
path: ./packages/frontend/core/dist
if-no-files-found: error
build-native:
name: Build Native
runs-on: ubuntu-latest
needs: build-core
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build AFFiNE native
uses: ./.github/actions/build-rust
with:
target: x86_64-unknown-linux-gnu
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Run tests
run: yarn test
working-directory: ./packages/frontend/native
desktop-test:
name: Desktop Test
runs-on: ${{ matrix.spec.os }}
strategy:
fail-fast: false
# all combinations: macos-latest x64, macos-latest arm64, windows-latest x64, ubuntu-latest x64
matrix:
spec:
- {
os: macos-latest,
platform: macos,
arch: x64,
target: x86_64-apple-darwin,
test: true,
}
- {
os: macos-latest,
platform: macos,
arch: arm64,
target: aarch64-apple-darwin,
test: false,
}
- {
os: ubuntu-latest,
platform: linux,
arch: x64,
target: x86_64-unknown-linux-gnu,
test: true,
}
- {
os: windows-latest,
platform: windows,
arch: x64,
target: x86_64-pc-windows-msvc,
test: true,
}
needs: build-core
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
timeout-minutes: 10
with:
extra-flags: workspaces focus @affine/electron @affine/monorepo @affine-test/affine-desktop
playwright-install: true
hard-link-nm: false
enableScripts: false
- name: Build AFFiNE native
uses: ./.github/actions/build-rust
with:
target: ${{ matrix.spec.target }}
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Run unit tests
if: ${{ matrix.spec.test }}
shell: bash
run: yarn vitest
working-directory: packages/frontend/electron
- name: Download core artifact
uses: actions/download-artifact@v3
with:
name: core
path: packages/frontend/electron/resources/web-static
- name: Build Desktop Layers
run: yarn workspace @affine/electron build
- name: Run desktop tests
if: ${{ matrix.spec.test && matrix.spec.os == 'ubuntu-latest' }}
run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn workspace @affine-test/affine-desktop e2e
env:
COVERAGE: true
- name: Run desktop tests
if: ${{ matrix.spec.test && matrix.spec.os != 'ubuntu-latest' }}
run: yarn workspace @affine-test/affine-desktop e2e
env:
COVERAGE: true
- name: Make bundle
if: ${{ matrix.spec.os == 'macos-latest' && matrix.spec.arch == 'arm64' }}
env:
SKIP_BUNDLE: true
SKIP_WEB_BUILD: true
HOIST_NODE_MODULES: 1
run: yarn workspace @affine/electron package --platform=darwin --arch=arm64
- name: Output check
if: ${{ matrix.spec.os == 'macos-latest' && matrix.spec.arch == 'arm64' }}
run: |
yarn workspace @affine/electron ts-node ./scripts/macos-arm64-output-check.ts
- name: Collect code coverage report
if: ${{ matrix.spec.test }}
run: yarn exec nyc report -t .nyc_output --report-dir .coverage --reporter=lcov
- name: Upload e2e test coverage results
if: ${{ matrix.spec.test }}
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/lcov.info
flags: e2etest-${{ matrix.spec.os }}-${{ matrix.spec.arch }}
name: affine
fail_ci_if_error: false
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-${{ matrix.spec.os }}-${{ matrix.spec.arch }}
path: ./test-results
if-no-files-found: ignore

311
.github/workflows/build-server.yml vendored Normal file
View File

@@ -0,0 +1,311 @@
name: Build(Server) & Test
on:
push:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/build-server.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
pull_request:
merge_group:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/build-server.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
env:
DEBUG: napi:*
BUILD_TYPE: canary
APP_NAME: affine
COVERAGE: true
DISTRIBUTION: browser
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
build-storage:
name: Build Storage
runs-on: ubuntu-latest
env:
RUSTFLAGS: '-C debuginfo=1'
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
extra-flags: workspaces focus @affine/storage
electron-install: false
build-infra: false
build-plugins: false
- name: Build Rust
uses: ./.github/actions/build-rust
with:
target: 'x86_64-unknown-linux-gnu'
package: '@affine/storage'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload storage.node
uses: actions/upload-artifact@v3
with:
name: storage.node
path: ./packages/backend/storage/storage.node
if-no-files-found: error
server-test:
name: Server Test
runs-on: ubuntu-latest
needs: build-storage
services:
postgres:
image: postgres
env:
POSTGRES_PASSWORD: affine
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
mailer:
image: mailhog/mailhog
ports:
- 1025:1025
- 8025:8025
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- name: Initialize database
run: |
psql -h localhost -U postgres -c "CREATE DATABASE affine;"
psql -h localhost -U postgres -c "CREATE USER affine WITH PASSWORD 'affine';"
psql -h localhost -U postgres -c "ALTER USER affine WITH SUPERUSER;"
env:
PGPASSWORD: affine
- name: Generate prisma client
run: |
yarn workspace @affine/server exec prisma generate
yarn workspace @affine/server exec prisma db push
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Run init-db script
run: yarn workspace @affine/server exec ts-node ./scripts/init-db.ts
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Download storage.node
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./packages/backend/server
- name: Run server tests
run: yarn workspace @affine/server test:coverage
env:
CARGO_TARGET_DIR: '${{ github.workspace }}/target'
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Upload server test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./packages/backend/server/.coverage/lcov.info
flags: server-test
name: affine
fail_ci_if_error: false
server-e2e-test:
name: Server E2E Test
runs-on: ubuntu-latest
needs: build-storage
services:
postgres:
image: postgres
env:
POSTGRES_PASSWORD: affine
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
mailer:
image: mailhog/mailhog
ports:
- 1025:1025
- 8025:8025
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
- name: Initialize database
run: |
psql -h localhost -U postgres -c "CREATE DATABASE affine;"
psql -h localhost -U postgres -c "CREATE USER affine WITH PASSWORD 'affine';"
psql -h localhost -U postgres -c "ALTER USER affine WITH SUPERUSER;"
env:
PGPASSWORD: affine
- name: Generate prisma client
run: |
yarn workspace @affine/server exec prisma generate
yarn workspace @affine/server exec prisma db push
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Run init-db script
run: yarn workspace @affine/server exec ts-node ./scripts/init-db.ts
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Download storage.node
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./packages/backend/server
- name: Run playwright tests
run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn workspace @affine-test/affine-cloud e2e --forbid-only
env:
COVERAGE: true
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Collect code coverage report
run: yarn exec nyc report -t .nyc_output --report-dir .coverage --reporter=lcov
- name: Upload e2e test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/lcov.info
flags: server-e2etest
name: affine
fail_ci_if_error: false
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-server
path: ./tests/affine-cloud/test-results
if-no-files-found: ignore
server-desktop-e2e-test:
name: Server Desktop E2E Test
runs-on: ubuntu-latest
needs: build-storage
services:
postgres:
image: postgres
env:
POSTGRES_PASSWORD: affine
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
mailer:
image: mailhog/mailhog
ports:
- 1025:1025
- 8025:8025
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
hard-link-nm: false
- name: Build AFFiNE native
uses: ./.github/actions/build-rust
with:
target: x86_64-unknown-linux-gnu
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Initialize database
run: |
psql -h localhost -U postgres -c "CREATE DATABASE affine;"
psql -h localhost -U postgres -c "CREATE USER affine WITH PASSWORD 'affine';"
psql -h localhost -U postgres -c "ALTER USER affine WITH SUPERUSER;"
env:
PGPASSWORD: affine
- name: Generate prisma client
run: |
yarn workspace @affine/server exec prisma generate
yarn workspace @affine/server prisma db push
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Run init-db script
run: yarn workspace @affine/server exec ts-node ./scripts/init-db.ts
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Download storage.node
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./packages/backend/server
- name: Build Plugins
run: yarn run build:plugins
- name: Build Desktop Layers
run: yarn workspace @affine/electron build:dev
- name: Run playwright tests
run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" yarn workspace @affine-test/affine-desktop-cloud e2e
env:
COVERAGE: true
DEV_SERVER_URL: http://localhost:8080
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
ENABLE_LOCAL_EMAIL: true
- name: Collect code coverage report
run: yarn exec nyc report -t .nyc_output --report-dir .coverage --reporter=lcov
- name: Upload e2e test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/lcov.info
flags: server-e2etest
name: affine
fail_ci_if_error: false
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-server
path: ./tests/affine-cloud/test-results
if-no-files-found: ignore

View File

@@ -1,603 +0,0 @@
name: Build & Test
on:
push:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
pull_request:
env:
DEBUG: napi:*
BUILD_TYPE: canary
APP_NAME: affine
AFFINE_ENV: dev
COVERAGE: true
MACOSX_DEPLOYMENT_TARGET: '10.13'
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
PLAYWRIGHT_BROWSERS_PATH: ${{ github.workspace }}/node_modules/.cache/ms-playwright
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: ['javascript', 'typescript']
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
lint:
name: Lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Run oxlint
# oxlint is fast, so wrong code will fail quickly
run: yarn dlx $(node -e "console.log(require('./package.json').scripts['lint:ox'].replace('oxlint', 'oxlint@' + require('./package.json').devDependencies.oxlint))")
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
full-cache: true
- name: Run i18n codegen
run: yarn i18n-codegen gen
- name: Run ESLint
run: yarn lint:eslint --max-warnings=0
- name: Run Prettier
# Set nmMode in `actions/setup-node` will modify the .yarnrc.yml
run: |
git checkout .yarnrc.yml
yarn lint:prettier
- name: Run Type Check
run: yarn typecheck
check-yarn-binary:
name: Check yarn binary
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Run check
run: |
yarn set version $(node -e "console.log(require('./package.json').packageManager.split('@')[1])")
git diff --exit-code
e2e-plugin-test:
name: E2E Plugin Test
runs-on: ubuntu-latest
env:
DISTRIBUTION: browser
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
full-cache: true
- name: Run playwright tests
run: yarn e2e --forbid-only
working-directory: tests/affine-plugin
env:
COVERAGE: true
- name: Collect code coverage report
run: yarn exec nyc report -t .nyc_output --report-dir .coverage --reporter=lcov
- name: Upload e2e test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/lcov.info
flags: e2e-plugin-test
name: affine
fail_ci_if_error: false
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-plugin
path: ./test-results
if-no-files-found: ignore
e2e-test:
name: E2E Test
runs-on: ubuntu-latest
env:
DISTRIBUTION: browser
strategy:
fail-fast: false
matrix:
shard: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
full-cache: true
- name: Run playwright tests
run: yarn workspace @affine-test/affine-local e2e --forbid-only --shard=${{ matrix.shard }}/${{ strategy.job-total }}
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-${{ matrix.shard }}
path: ./test-results
if-no-files-found: ignore
e2e-migration-test:
name: E2E Migration Test
runs-on: ubuntu-latest
env:
DISTRIBUTION: browser
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
full-cache: true
- name: Run playwright tests
run: yarn workspace @affine-test/affine-migration e2e --forbid-only
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-migration
path: ./tests/affine-migration/test-results
if-no-files-found: ignore
unit-test:
name: Unit Test
runs-on: ubuntu-latest
needs:
- build-native
env:
DISTRIBUTION: browser
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
full-cache: true
- name: Download affine.linux-x64-gnu.node
uses: actions/download-artifact@v3
with:
name: affine.linux-x64-gnu.node
path: ./packages/frontend/native
- name: Unit Test
run: yarn nx test:coverage @affine/monorepo
- name: Upload unit test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/store/lcov.info
flags: unittest
name: affine
fail_ci_if_error: false
build-native:
name: Build AFFiNE native (${{ matrix.spec.target }})
runs-on: ${{ matrix.spec.os }}
env:
CARGO_PROFILE_RELEASE_DEBUG: '1'
strategy:
fail-fast: false
matrix:
spec:
- { os: ubuntu-latest, target: x86_64-unknown-linux-gnu }
- { os: windows-latest, target: x86_64-pc-windows-msvc }
- { os: macos-latest, target: x86_64-apple-darwin }
- { os: macos-latest, target: aarch64-apple-darwin }
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
extra-flags: workspaces focus @affine/native
electron-install: false
build-infra: false
build-plugins: false
- name: Setup filename
id: filename
shell: bash
run: |
export PLATFORM_ARCH_ABI=$(node -e "console.log(require('@napi-rs/cli').parseTriple('${{ matrix.spec.target }}').platformArchABI)")
echo "filename=affine.$PLATFORM_ARCH_ABI.node" >> "$GITHUB_OUTPUT"
- name: Build AFFiNE native
uses: ./.github/actions/build-rust
with:
target: ${{ matrix.spec.target }}
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload ${{ steps.filename.outputs.filename }}
uses: actions/upload-artifact@v3
with:
name: ${{ steps.filename.outputs.filename }}
path: ./packages/frontend/native/${{ steps.filename.outputs.filename }}
if-no-files-found: error
build-storage:
name: Build Storage
runs-on: ubuntu-latest
env:
CARGO_PROFILE_RELEASE_DEBUG: '1'
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
extra-flags: workspaces focus @affine/storage
electron-install: false
build-infra: false
build-plugins: false
- name: Build Rust
uses: ./.github/actions/build-rust
with:
target: 'x86_64-unknown-linux-gnu'
package: '@affine/storage'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload storage.node
uses: actions/upload-artifact@v3
with:
name: storage.node
path: ./packages/backend/storage/storage.node
if-no-files-found: error
build-core:
name: Build @affine/core
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
build-plugins: false
full-cache: true
- name: Build Core
# always skip cache because its fast, and cache configuration is always changing
run: yarn nx build @affine/core --skip-nx-cache
- name: zip core
run: tar -czf dist.tar.gz --directory=packages/frontend/core/dist .
- name: Upload core artifact
uses: actions/upload-artifact@v3
with:
name: core
path: dist.tar.gz
if-no-files-found: error
server-test:
name: Server Test
runs-on: ubuntu-latest
needs: build-storage
env:
DISTRIBUTION: browser
services:
postgres:
image: postgres
env:
POSTGRES_PASSWORD: affine
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
mailer:
image: mailhog/mailhog
ports:
- 1025:1025
- 8025:8025
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
full-cache: true
- name: Initialize database
run: |
psql -h localhost -U postgres -c "CREATE DATABASE affine;"
psql -h localhost -U postgres -c "CREATE USER affine WITH PASSWORD 'affine';"
psql -h localhost -U postgres -c "ALTER USER affine WITH SUPERUSER;"
env:
PGPASSWORD: affine
- name: Generate prisma client
run: |
yarn workspace @affine/server exec prisma generate
yarn workspace @affine/server exec prisma db push
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Run init-db script
run: |
yarn workspace @affine/server data-migration run
yarn workspace @affine/server exec node --loader ts-node/esm/transpile-only ./scripts/init-db.ts
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Download storage.node
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./packages/backend/server
- name: Run server tests
run: yarn workspace @affine/server test:coverage
env:
CARGO_TARGET_DIR: '${{ github.workspace }}/target'
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Upload server test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./packages/backend/server/.coverage/lcov.info
flags: server-test
name: affine
fail_ci_if_error: false
server-e2e-test:
name: ${{ matrix.tests.name }}
runs-on: ubuntu-latest
env:
DISTRIBUTION: browser
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
strategy:
fail-fast: false
matrix:
tests:
- name: 'Server E2E Test 1/3'
script: yarn workspace @affine-test/affine-cloud e2e --forbid-only --shard=1/3
- name: 'Server E2E Test 2/3'
script: yarn workspace @affine-test/affine-cloud e2e --forbid-only --shard=2/3
- name: 'Server E2E Test 3/3'
script: yarn workspace @affine-test/affine-cloud e2e --forbid-only --shard=3/3
- name: 'Server Desktop E2E Test'
script: |
yarn workspace @affine/electron build:dev
xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn workspace @affine-test/affine-desktop-cloud e2e
needs:
- build-storage
- build-native
services:
postgres:
image: postgres
env:
POSTGRES_PASSWORD: affine
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
mailer:
image: mailhog/mailhog
ports:
- 1025:1025
- 8025:8025
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
hard-link-nm: false
- name: Initialize database
run: |
psql -h localhost -U postgres -c "CREATE DATABASE affine;"
psql -h localhost -U postgres -c "CREATE USER affine WITH PASSWORD 'affine';"
psql -h localhost -U postgres -c "ALTER USER affine WITH SUPERUSER;"
env:
PGPASSWORD: affine
- name: Generate prisma client
run: |
yarn workspace @affine/server exec prisma generate
yarn workspace @affine/server exec prisma db push
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Run init-db script
run: |
yarn workspace @affine/server data-migration run
yarn workspace @affine/server exec node --loader ts-node/esm/transpile-only ./scripts/init-db.ts
- name: Download storage.node
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./packages/backend/server
- name: Download affine.linux-x64-gnu.node
uses: actions/download-artifact@v3
with:
name: affine.linux-x64-gnu.node
path: ./packages/frontend/native
- name: ${{ matrix.tests.name }}
run: |
${{ matrix.tests.script }}
env:
DEV_SERVER_URL: http://localhost:8080
ENABLE_LOCAL_EMAIL: true
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-server
path: ./tests/affine-cloud/test-results
if-no-files-found: ignore
desktop-test:
name: Desktop Test (${{ matrix.spec.os }}, ${{ matrix.spec.platform }}, ${{ matrix.spec.arch }}, ${{ matrix.spec.target }}, ${{ matrix.spec.test }})
runs-on: ${{ matrix.spec.os }}
strategy:
fail-fast: false
# all combinations: macos-latest x64, macos-latest arm64, windows-latest x64, ubuntu-latest x64
matrix:
spec:
- {
os: macos-latest,
platform: macos,
arch: x64,
target: x86_64-apple-darwin,
test: true,
}
- {
os: macos-latest,
platform: macos,
arch: arm64,
target: aarch64-apple-darwin,
test: false,
}
- {
os: ubuntu-latest,
platform: linux,
arch: x64,
target: x86_64-unknown-linux-gnu,
test: true,
}
- {
os: windows-latest,
platform: windows,
arch: x64,
target: x86_64-pc-windows-msvc,
test: true,
}
needs:
- build-core
- build-native
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
timeout-minutes: 10
with:
extra-flags: workspaces focus @affine/electron @affine/monorepo @affine-test/affine-desktop
playwright-install: true
hard-link-nm: false
enableScripts: false
- name: Setup filename
id: filename
shell: bash
run: |
export PLATFORM_ARCH_ABI=$(node -e "console.log(require('@napi-rs/cli').parseTriple('${{ matrix.spec.target }}').platformArchABI)")
echo "filename=affine.$PLATFORM_ARCH_ABI.node" >> "$GITHUB_OUTPUT"
- name: Download ${{ steps.filename.outputs.filename }}
uses: actions/download-artifact@v3
with:
name: ${{ steps.filename.outputs.filename }}
path: ./packages/frontend/native
- name: Run unit tests
if: ${{ matrix.spec.test }}
shell: bash
run: yarn vitest
working-directory: packages/frontend/electron
- name: Download core artifact
uses: ./.github/actions/download-core
with:
path: packages/frontend/electron/resources/web-static
- name: Build Desktop Layers
run: yarn workspace @affine/electron build
- name: Run desktop tests
if: ${{ matrix.spec.test && matrix.spec.os == 'ubuntu-latest' }}
run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn workspace @affine-test/affine-desktop e2e
- name: Run desktop tests
if: ${{ matrix.spec.test && matrix.spec.os != 'ubuntu-latest' }}
run: yarn workspace @affine-test/affine-desktop e2e
- name: Make bundle
if: ${{ matrix.spec.os == 'macos-latest' && matrix.spec.arch == 'arm64' }}
env:
SKIP_BUNDLE: true
SKIP_WEB_BUILD: true
HOIST_NODE_MODULES: 1
run: yarn workspace @affine/electron package --platform=darwin --arch=arm64
- name: Output check
if: ${{ matrix.spec.os == 'macos-latest' && matrix.spec.arch == 'arm64' }}
run: |
yarn workspace @affine/electron exec node --loader ts-node/esm/transpile-only ./scripts/macos-arm64-output-check.ts
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-${{ matrix.spec.os }}-${{ matrix.spec.arch }}
path: ./test-results
if-no-files-found: ignore

201
.github/workflows/build.yml vendored Normal file
View File

@@ -0,0 +1,201 @@
name: Build & Test
on:
push:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/build.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
pull_request:
merge_group:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/build.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
env:
DEBUG: napi:*
BUILD_TYPE: canary
APP_NAME: affine
AFFINE_ENV: dev
COVERAGE: true
DISTRIBUTION: browser
MACOSX_DEPLOYMENT_TARGET: '10.13'
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
lint:
name: Lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Run oxlint
# oxlint is fast, so wrong code will fail quickly
run: yarn dlx oxlint@latest .
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- name: Run i18n codegen
run: yarn i18n-codegen gen
- name: Run ESLint
run: yarn lint:eslint --max-warnings=0
- name: Run Prettier
# Set nmMode in `actions/setup-node` will modify the .yarnrc.yml
run: |
git checkout .yarnrc.yml
yarn lint:prettier
- name: Run circular
run: yarn circular
- name: Run Type Check
run: yarn typecheck
check-yarn-binary:
name: Check yarn binary
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Run check
run: |
yarn set version $(node -e "console.log(require('./package.json').packageManager.split('@')[1])")
git diff --exit-code
e2e-plugin-test:
name: E2E Plugin Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
- name: Run playwright tests
run: yarn e2e --forbid-only
working-directory: tests/affine-plugin
env:
COVERAGE: true
- name: Collect code coverage report
run: yarn exec nyc report -t .nyc_output --report-dir .coverage --reporter=lcov
- name: Upload e2e test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/lcov.info
flags: e2e-plugin-test
name: affine
fail_ci_if_error: false
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-plugin
path: ./test-results
if-no-files-found: ignore
e2e-test:
name: E2E Test
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
shard: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
- name: Run playwright tests
run: yarn e2e --forbid-only --shard=${{ matrix.shard }}/${{ strategy.job-total }}
working-directory: tests/affine-local
env:
COVERAGE: true
- name: Collect code coverage report
run: yarn exec nyc report -t .nyc_output --report-dir .coverage --reporter=lcov
- name: Upload e2e test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/lcov.info
flags: e2etest
name: affine
fail_ci_if_error: false
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-${{ matrix.shard }}
path: ./test-results
if-no-files-found: ignore
e2e-migration-test:
name: E2E Migration Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
- name: Run playwright tests
run: yarn workspace @affine-test/affine-migration e2e --forbid-only
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-migration
path: ./tests/affine-migration/test-results
if-no-files-found: ignore
unit-test:
name: Unit Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- name: Build AFFiNE native
uses: ./.github/actions/build-rust
with:
target: x86_64-unknown-linux-gnu
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Unit Test
run: yarn nx test:coverage @affine/monorepo
- name: Upload unit test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/store/lcov.info
flags: unittest
name: affine
fail_ci_if_error: false

36
.github/workflows/cache-cleanup.yml vendored Normal file
View File

@@ -0,0 +1,36 @@
# https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#force-deleting-cache-entries
name: Cleanup caches for closed branches
on:
pull_request:
types:
- closed
workflow_dispatch:
jobs:
cleanup:
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@v4
- name: Cleanup
run: |
gh extension install actions/gh-actions-cache
REPO=${{ github.repository }}
BRANCH="refs/pull/${{ github.event.pull_request.number }}/merge"
echo "Fetching list of cache key"
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH | cut -f 1 )
## Setting this to not fail the workflow while deleting cache keys.
set +e
echo "Deleting caches..."
for cacheKey in $cacheKeysForPR
do
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
done
echo "Done"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

18
.github/workflows/cancel.yml vendored Normal file
View File

@@ -0,0 +1,18 @@
name: Cancel
on:
pull_request_target:
types:
- edited
- synchronize
jobs:
cancel:
name: 'Cancel Previous Runs'
runs-on: ubuntu-latest
timeout-minutes: 2
steps:
- uses: styfle/cancel-workflow-action@0.12.0
with:
# See https://api.github.com/repos/toeverything/AFFiNE/actions/workflows
workflow_id: 44038251, 61883931, 65188160, 66789140
access_token: ${{ github.token }}

70
.github/workflows/codeql.yml vendored Normal file
View File

@@ -0,0 +1,70 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: 'CodeQL'
on:
push:
branches: [canary]
pull_request:
merge_group:
# The branches below must be a subset of the branches above
branches: [canary]
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: ['javascript']
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2

View File

@@ -4,15 +4,12 @@ on:
workflow_dispatch:
inputs:
flavor:
description: 'Select what enverionment to deploy to'
type: choice
description: 'Build type (canary, beta, or stable)'
type: string
default: canary
options:
- canary
- beta
- stable
- internal
env:
BUILD_TYPE: canary
APP_NAME: affine
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
@@ -20,10 +17,9 @@ jobs:
build-server:
name: Build Server
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.flavor }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
@@ -39,29 +35,23 @@ jobs:
build-core:
name: Build @affine/core
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.flavor }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Plugins
run: yarn run build:plugins
- name: Build Core
run: yarn nx build @affine/core --skip-nx-cache
run: yarn nx build @affine/core
env:
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
BUILD_TYPE: ${{ github.event.inputs.flavor }}
BUILD_TYPE_OVERRIDE: ${{ github.event.inputs.flavor }}
SHOULD_REPORT_TRACE: true
TRACE_REPORT_ENDPOINT: ${{ secrets.TRACE_REPORT_ENDPOINT }}
CAPTCHA_SITE_KEY: ${{ secrets.CAPTCHA_SITE_KEY }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
- name: Upload core artifact
uses: actions/upload-artifact@v3
with:
@@ -75,11 +65,9 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup Version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Rust
- name: Setup Rust
uses: ./.github/actions/build-rust
with:
target: 'x86_64-unknown-linux-gnu'
@@ -98,11 +86,9 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup Version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Rust
- name: Setup Rust
uses: ./.github/actions/build-rust
with:
target: 'aarch64-unknown-linux-gnu'
@@ -219,9 +205,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Version
uses: ./.github/actions/setup-version
- name: Deploy to ${{ github.event.inputs.flavor }}
- name: Deploy to dev
uses: ./.github/actions/deploy
with:
build-type: ${{ github.event.inputs.flavor }}
@@ -231,7 +215,6 @@ jobs:
cluster-name: ${{ secrets.GCP_CLUSTER_NAME }}
cluster-location: ${{ secrets.GCP_CLUSTER_LOCATION }}
env:
APP_VERSION: ${{ steps.version.outputs.APP_VERSION }}
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
CANARY_DEPLOY_HOST: ${{ secrets.CANARY_DEPLOY_HOST }}
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
@@ -257,4 +240,3 @@ jobs:
CLOUD_SQL_IAM_ACCOUNT: ${{ secrets.CLOUD_SQL_IAM_ACCOUNT }}
STRIPE_API_KEY: ${{ secrets.STRIPE_API_KEY }}
STRIPE_WEBHOOK_KEY: ${{ secrets.STRIPE_WEBHOOK_KEY }}
STATIC_IP_NAME: ${{ secrets.STATIC_IP_NAME }}

View File

@@ -65,15 +65,14 @@ jobs:
- name: Replace Version
run: ./scripts/set-version.sh ${{ needs.set-build-version.outputs.version }}
- name: generate-assets
run: yarn workspace @affine/electron generate-assets
working-directory: packages/frontend/electron
run: yarn generate-assets
env:
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
NEXT_PUBLIC_SENTRY_DSN: ${{ secrets.NEXT_PUBLIC_SENTRY_DSN }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
RELEASE_VERSION: ${{ needs.set-build-version.outputs.version }}
SKIP_PLUGIN_BUILD: 'true'
SKIP_NX_CACHE: 'true'
- name: Upload core artifact
uses: actions/upload-artifact@v3
@@ -231,11 +230,6 @@ jobs:
node ./packages/frontend/electron/scripts/generate-yml.js
env:
RELEASE_VERSION: ${{ needs.set-build-version.outputs.version }}
- name: Generate SHA512 checksums
run: |
sha512sum *-linux-* > SHA512SUMS.txt
sha512sum *-macos-* >> SHA512SUMS.txt
sha512sum *-windows-* >> SHA512SUMS.txt
- name: Create Release Draft
uses: softprops/action-gh-release@v1
env:
@@ -246,7 +240,6 @@ jobs:
tag_name: ${{ needs.set-build-version.outputs.version }}
prerelease: true
files: |
./SHA512SUMS.txt
./VERSION
./*.zip
./*.dmg

View File

@@ -19,10 +19,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
uses: ./.github/actions/setup-node
with:
cache: 'yarn'
node-version-file: '.nvmrc'
- name: Install dependencies
run: yarn workspaces focus @affine/commitlint-config
- run: echo "${{ github.event.pull_request.title }}" | yarn workspace @affine/commitlint-config commitlint -g ./.commitlintrc.json
electron-install: false
- run: echo "${{ github.event.pull_request.title }}" | yarn dlx commitlint -g ./.commitlintrc.json

View File

@@ -40,7 +40,6 @@ env:
jobs:
before-make:
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.build-type || (github.ref_type == 'tag' && contains(github.ref, 'canary') && 'canary') }}
outputs:
RELEASE_VERSION: ${{ steps.get-canary-version.outputs.RELEASE_VERSION }}
steps:
@@ -66,10 +65,8 @@ jobs:
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
RELEASE_VERSION: ${{ github.event.inputs.version || steps.get-canary-version.outputs.RELEASE_VERSION }}
SKIP_PLUGIN_BUILD: 'true'
SKIP_NX_CACHE: 'true'
- name: Upload core artifact
uses: actions/upload-artifact@v3

165
.github/workflows/release.yml vendored Normal file
View File

@@ -0,0 +1,165 @@
name: Release
on:
push:
branches:
- canary
env:
BUILD_TYPE: stable
APP_NAME: affine
COVERAGE: false
DISTRIBUTION: browser
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
release:
name: Try publishing npm@latest release
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Try publishing to NPM
run: ./scripts/publish.sh
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
build-core:
name: Build @affine/core
runs-on: ubuntu-latest
environment: development
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Plugins
run: yarn run build:plugins
- name: Build Core
run: yarn nx build @affine/core
- name: Upload core artifact
uses: actions/upload-artifact@v3
with:
name: core
path: ./packages/frontend/core/dist
if-no-files-found: error
build-server:
name: Build Server
runs-on: ubuntu-latest
environment: development
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- name: Build Server
run: yarn nx build @affine/server
- name: Upload server dist
uses: actions/upload-artifact@v3
with:
name: server-dist
path: ./packages/backend/server/dist
if-no-files-found: error
build-storage:
name: Build Storage
runs-on: ubuntu-latest
env:
RUSTFLAGS: '-C debuginfo=1'
environment: development
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Setup Rust
uses: ./.github/actions/build-rust
with:
target: 'x86_64-unknown-linux-gnu'
package: '@affine/storage'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload storage.node
uses: actions/upload-artifact@v3
with:
name: storage.node
path: ./packages/backend/storage/storage.node
if-no-files-found: error
build-docker:
if: github.ref == 'refs/heads/canary'
name: Build Docker
runs-on: ubuntu-latest
needs:
- build-server
- build-core
- build-storage
steps:
- uses: actions/checkout@v4
- name: Download core artifact
uses: actions/download-artifact@v3
with:
name: core
path: ./packages/frontend/core/dist
- name: Download server dist
uses: actions/download-artifact@v3
with:
name: server-dist
path: ./packages/backend/server/dist
- name: Download storage.node
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./packages/backend/server
- name: Setup Git short hash
run: |
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
logout: false
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build front Dockerfile
uses: docker/build-push-action@v5
with:
context: .
push: true
pull: true
platforms: linux/amd64,linux/arm64
provenance: true
file: .github/deployment/front/Dockerfile
tags: ghcr.io/toeverything/affine-front:${{ env.GIT_SHORT_HASH }},ghcr.io/toeverything/affine-front:latest
# setup node without cache configuration
# Prisma cache is not compatible with docker build cache
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
registry-url: https://npm.pkg.github.com
scope: '@toeverything'
- name: Install Node.js dependencies
run: yarn workspaces focus @affine/server --production
- name: Generate Prisma client
run: yarn workspace @affine/server prisma generate
- name: Build graphql Dockerfile
uses: docker/build-push-action@v5
with:
context: .
push: true
pull: true
platforms: linux/amd64,linux/arm64
provenance: true
file: .github/deployment/node/Dockerfile
tags: ghcr.io/toeverything/affine-graphql:${{ env.GIT_SHORT_HASH }},ghcr.io/toeverything/affine-graphql:latest

View File

@@ -11,7 +11,7 @@ jobs:
deploy:
runs-on: ubuntu-latest
name: Deploy
environment: stable
environment: production
steps:
- uses: actions/checkout@v4
- name: Publish

View File

@@ -1,4 +1,23 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
yarn lint-staged && yarn lint:ox
# check lockfile is up to date
yarn install --mode=skip-build --inline-builds --immutable
# build infra code
yarn -T run build:infra
# generate prisma client type
yarn workspace @affine/server prisma generate
# generate i18n
yarn i18n-codegen gen
# lint staged files
yarn exec lint-staged
# type check
yarn typecheck
# circular dependency check
yarn circular

View File

@@ -15,7 +15,6 @@ packages/backend/server/src/schema.gql
packages/frontend/i18n/src/i18n-generated.ts
packages/frontend/graphql/src/graphql/index.ts
tests/affine-legacy/**/static
.yarnrc.yml
# auto-generated by NAPI-RS
# fixme(@joooye34): need script to check and generate ignore list here

File diff suppressed because one or more lines are too long

View File

@@ -6,10 +6,10 @@ nmMode: hardlinks-local
nodeLinker: node-modules
npmAuthToken: "${NPM_TOKEN:-NONE}"
npmAuthToken: '${NPM_TOKEN:-NONE}'
npmPublishAccess: public
npmPublishRegistry: "https://registry.npmjs.org"
npmPublishRegistry: 'https://registry.npmjs.org'
yarnPath: .yarn/releases/yarn-4.0.2.cjs
yarnPath: .yarn/releases/yarn-4.0.1.cjs

144
Cargo.lock generated
View File

@@ -83,15 +83,14 @@ dependencies = [
[[package]]
name = "ahash"
version = "0.8.6"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a"
checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f"
dependencies = [
"cfg-if",
"getrandom",
"once_cell",
"version_check",
"zerocopy",
]
[[package]]
@@ -241,16 +240,6 @@ dependencies = [
"num-traits",
]
[[package]]
name = "atomic-write-file"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c232177ba50b16fe7a4588495bd474a62a9e45a8e4ca6fd7d0b7ac29d164631e"
dependencies = [
"nix",
"rand",
]
[[package]]
name = "autocfg"
version = "1.1.0"
@@ -952,7 +941,7 @@ version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e"
dependencies = [
"ahash 0.8.6",
"ahash 0.8.3",
]
[[package]]
@@ -961,7 +950,7 @@ version = "0.14.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156"
dependencies = [
"ahash 0.8.6",
"ahash 0.8.3",
"allocator-api2",
]
@@ -1123,7 +1112,7 @@ dependencies = [
[[package]]
name = "jwst-codec"
version = "0.1.0"
source = "git+https://github.com/toeverything/OctoBase.git?rev=49a6b7a#49a6b7af25ce1fe54e8383e10980e9536821d286"
source = "git+https://github.com/toeverything/OctoBase.git?rev=aad9e5b#aad9e5b7e9d6f479e6cf7555f5845bbbaaadbc66"
dependencies = [
"arbitrary",
"bitvec",
@@ -1144,7 +1133,7 @@ dependencies = [
[[package]]
name = "jwst-core"
version = "0.1.0"
source = "git+https://github.com/toeverything/OctoBase.git?rev=49a6b7a#49a6b7af25ce1fe54e8383e10980e9536821d286"
source = "git+https://github.com/toeverything/OctoBase.git?rev=aad9e5b#aad9e5b7e9d6f479e6cf7555f5845bbbaaadbc66"
dependencies = [
"async-trait",
"base64",
@@ -1162,7 +1151,7 @@ dependencies = [
[[package]]
name = "jwst-logger"
version = "0.1.0"
source = "git+https://github.com/toeverything/OctoBase.git?rev=49a6b7a#49a6b7af25ce1fe54e8383e10980e9536821d286"
source = "git+https://github.com/toeverything/OctoBase.git?rev=aad9e5b#aad9e5b7e9d6f479e6cf7555f5845bbbaaadbc66"
dependencies = [
"chrono",
"nu-ansi-term 0.49.0",
@@ -1175,7 +1164,7 @@ dependencies = [
[[package]]
name = "jwst-storage"
version = "0.1.0"
source = "git+https://github.com/toeverything/OctoBase.git?rev=49a6b7a#49a6b7af25ce1fe54e8383e10980e9536821d286"
source = "git+https://github.com/toeverything/OctoBase.git?rev=aad9e5b#aad9e5b7e9d6f479e6cf7555f5845bbbaaadbc66"
dependencies = [
"anyhow",
"async-trait",
@@ -1200,7 +1189,7 @@ dependencies = [
[[package]]
name = "jwst-storage-migration"
version = "0.1.0"
source = "git+https://github.com/toeverything/OctoBase.git?rev=49a6b7a#49a6b7af25ce1fe54e8383e10980e9536821d286"
source = "git+https://github.com/toeverything/OctoBase.git?rev=aad9e5b#aad9e5b7e9d6f479e6cf7555f5845bbbaaadbc66"
dependencies = [
"sea-orm-migration",
"tokio",
@@ -1268,9 +1257,9 @@ checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058"
[[package]]
name = "libsqlite3-sys"
version = "0.27.0"
version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf4e226dcd58b4be396f7bd3c20da8fdee2911400705297ba7d2d7cc2c30f716"
checksum = "afc22eff61b133b115c6e8c74e818c628d6d5e7a502afea6f64dee076dd94326"
dependencies = [
"cc",
"pkg-config",
@@ -1348,15 +1337,6 @@ version = "2.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
[[package]]
name = "memoffset"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4"
dependencies = [
"autocfg",
]
[[package]]
name = "minimal-lexical"
version = "0.2.1"
@@ -1395,9 +1375,9 @@ dependencies = [
[[package]]
name = "napi"
version = "2.14.1"
version = "2.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1133249c46e92da921bafc8aba4912bf84d6c475f7625183772ed2d0844dc3a7"
checksum = "f9d90182620f32fe34b6ac9b52cba898af26e94c7f5abc01eb4094c417ae2e6c"
dependencies = [
"anyhow",
"bitflags 2.4.1",
@@ -1419,9 +1399,9 @@ checksum = "d4b4532cf86bfef556348ac65e561e3123879f0e7566cca6d43a6ff5326f13df"
[[package]]
name = "napi-derive"
version = "2.14.2"
version = "2.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0cca5738c6e81eb5ffd2c8ff2b4f05ece9c4c60c7e2b36cec6524492cf7f330"
checksum = "3619fa472d23cd5af94d63a2bae454a77a8863251f40230fbf59ce20eafa8a86"
dependencies = [
"cfg-if",
"convert_case",
@@ -1433,9 +1413,9 @@ dependencies = [
[[package]]
name = "napi-derive-backend"
version = "1.0.55"
version = "1.0.54"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35960e5f33228192a9b661447d0dfe8f5a3790ff5b4058c4d67680ded4f65b91"
checksum = "ecd3ea4b54020c73d591a49cd192f6334c5f37f71a63ead54dbc851fa991ef00"
dependencies = [
"convert_case",
"once_cell",
@@ -1455,19 +1435,6 @@ dependencies = [
"libloading",
]
[[package]]
name = "nix"
version = "0.26.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b"
dependencies = [
"bitflags 1.3.2",
"cfg-if",
"libc",
"memoffset",
"pin-utils",
]
[[package]]
name = "no-std-compat"
version = "0.4.1"
@@ -2325,18 +2292,18 @@ checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090"
[[package]]
name = "serde"
version = "1.0.193"
version = "1.0.192"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89"
checksum = "bca2a08484b285dcb282d0f67b26cadc0df8b19f8c12502c13d966bf9482f001"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.193"
version = "1.0.192"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3"
checksum = "d6c7207fbec9faa48073f3e3074cbe553af6ea512d7c21ba46e434e70ea9fbc1"
dependencies = [
"proc-macro2",
"quote",
@@ -2483,9 +2450,9 @@ dependencies = [
[[package]]
name = "sqlx"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dba03c279da73694ef99763320dea58b51095dfe87d001b1d4b5fe78ba8763cf"
checksum = "0e50c216e3624ec8e7ecd14c6a6a6370aad6ee5d8cfc3ab30b5162eeeef2ed33"
dependencies = [
"sqlx-core",
"sqlx-macros",
@@ -2496,11 +2463,11 @@ dependencies = [
[[package]]
name = "sqlx-core"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d84b0a3c3739e220d94b3239fd69fb1f74bc36e16643423bd99de3b43c21bfbd"
checksum = "8d6753e460c998bbd4cd8c6f0ed9a64346fcca0723d6e75e52fdc351c5d2169d"
dependencies = [
"ahash 0.8.6",
"ahash 0.8.3",
"atoi",
"bigdecimal",
"byteorder",
@@ -2544,9 +2511,9 @@ dependencies = [
[[package]]
name = "sqlx-macros"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89961c00dc4d7dffb7aee214964b065072bff69e36ddb9e2c107541f75e4f2a5"
checksum = "9a793bb3ba331ec8359c1853bd39eed32cdd7baaf22c35ccf5c92a7e8d1189ec"
dependencies = [
"proc-macro2",
"quote",
@@ -2557,11 +2524,10 @@ dependencies = [
[[package]]
name = "sqlx-macros-core"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0bd4519486723648186a08785143599760f7cc81c52334a55d6a83ea1e20841"
checksum = "0a4ee1e104e00dedb6aa5ffdd1343107b0a4702e862a84320ee7cc74782d96fc"
dependencies = [
"atomic-write-file",
"dotenvy",
"either",
"heck",
@@ -2584,9 +2550,9 @@ dependencies = [
[[package]]
name = "sqlx-mysql"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e37195395df71fd068f6e2082247891bc11e3289624bbc776a0cdfa1ca7f1ea4"
checksum = "864b869fdf56263f4c95c45483191ea0af340f9f3e3e7b4d57a61c7c87a970db"
dependencies = [
"atoi",
"base64",
@@ -2631,9 +2597,9 @@ dependencies = [
[[package]]
name = "sqlx-postgres"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6ac0ac3b7ccd10cc96c7ab29791a7dd236bd94021f31eec7ba3d46a74aa1c24"
checksum = "eb7ae0e6a97fb3ba33b23ac2671a5ce6e3cabe003f451abd5a56e7951d975624"
dependencies = [
"atoi",
"base64",
@@ -2676,9 +2642,9 @@ dependencies = [
[[package]]
name = "sqlx-sqlite"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "210976b7d948c7ba9fced8ca835b11cbb2d677c59c79de41ac0d397e14547490"
checksum = "d59dc83cf45d89c555a577694534fcd1b55c545a816c816ce51f20bbe56a4f3f"
dependencies = [
"atoi",
"chrono",
@@ -2696,7 +2662,6 @@ dependencies = [
"time",
"tracing",
"url",
"urlencoding",
"uuid",
]
@@ -3059,12 +3024,6 @@ dependencies = [
"percent-encoding",
]
[[package]]
name = "urlencoding"
version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
[[package]]
name = "utf8parse"
version = "0.2.1"
@@ -3073,9 +3032,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]]
name = "uuid"
version = "1.6.1"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e395fcf16a7a3d8127ec99782007af141946b4795001f876d54fb0d55978560"
checksum = "c58fe91d841bc04822c9801002db4ea904b9e4b8e6bbad25127b46eff8dc516b"
dependencies = [
"getrandom",
"rand",
@@ -3182,9 +3141,12 @@ dependencies = [
[[package]]
name = "webpki-roots"
version = "0.25.3"
version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1778a42e8b3b90bff8d0f5032bf22250792889a5cdc752aa0020c84abe3aaf10"
checksum = "b291546d5d9d1eab74f069c77749f2cb8504a12caa20f0f2de93ddbf6f411888"
dependencies = [
"rustls-webpki",
]
[[package]]
name = "whoami"
@@ -3316,26 +3278,6 @@ dependencies = [
"tap",
]
[[package]]
name = "zerocopy"
version = "0.7.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c4061bedbb353041c12f413700357bec76df2c7e2ca8e4df8bac24c6bf68e3d"
dependencies = [
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.7.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3c129550b3e6de3fd0ba67ba5c81818f9805e58b8d7fee80a3a59d2c9fc601a"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.38",
]
[[package]]
name = "zeroize"
version = "1.6.0"

View File

@@ -107,11 +107,12 @@ If you have questions, you are welcome to contact us. One of the best places to
## Ecosystem
| Name | | |
| -------------------------------------------------------- | ---------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------- |
| [@affine/component](packages/frontend/component) | AFFiNE Component Resources | [![](https://img.shields.io/codecov/c/github/toeverything/affine?style=flat-square)](https://affine-storybook.vercel.app/) |
| [@toeverything/y-indexeddb](packages/common/y-indexeddb) | IndexedDB database adapter for Yjs | [![](https://img.shields.io/npm/dm/@toeverything/y-indexeddb?style=flat-square&color=eee)](https://www.npmjs.com/package/@toeverything/y-indexeddb) |
| [@toeverything/theme](packages/common/theme) | AFFiNE theme | [![](https://img.shields.io/npm/dm/@toeverything/theme?style=flat-square&color=eee)](https://www.npmjs.com/package/@toeverything/theme) |
| Name | | |
| ----------------------------------------------------------------------------------------------- | --------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------- |
| [@toeverything/component](https://github.com/toeverything/design/tree/main/packages/components) | Toeverything Shared Component Resources | |
| [@affine/component](packages/frontend/component) | AFFiNE Component Resources | [![](https://img.shields.io/codecov/c/github/toeverything/affine?style=flat-square)](https://affine-storybook.vercel.app/) |
| [@toeverything/y-indexeddb](packages/common/y-indexeddb) | IndexedDB database adapter for Yjs | [![](https://img.shields.io/npm/dm/@toeverything/y-indexeddb?style=flat-square&color=eee)](https://www.npmjs.com/package/@toeverything/y-indexeddb) |
| [@toeverything/theme](packages/common/theme) | AFFiNE theme | [![](https://img.shields.io/npm/dm/@toeverything/theme?style=flat-square&color=eee)](https://www.npmjs.com/package/@toeverything/theme) |
## Plugins
@@ -227,7 +228,7 @@ See [LICENSE] for details.
[jobs available]: ./docs/jobs.md
[latest packages]: https://github.com/toeverything/AFFiNE/pkgs/container/affine-self-hosted
[contributor license agreement]: https://github.com/toeverything/affine/edit/canary/.github/CLA.md
[rust-version-icon]: https://img.shields.io/badge/Rust-1.74.1-dea584
[rust-version-icon]: https://img.shields.io/badge/Rust-1.71.0-dea584
[stars-icon]: https://img.shields.io/github/stars/toeverything/AFFiNE.svg?style=flat&logo=github&colorB=red&label=stars
[codecov]: https://codecov.io/gh/toeverything/affine/branch/canary/graphs/badge.svg?branch=canary
[node-version-icon]: https://img.shields.io/badge/node-%3E=18.16.1-success

View File

@@ -1,28 +0,0 @@
# Issues Triaging
When we receive your issue, we will first triaging it. Triaging an issue usually takes around one business day but may take longer. Goal of triaging is to provide you with a clear understanding of what will happen to your issue. For example, after your feature request was triaged you know whether we plan to tackle the issue or whether we'll wait to hear what the broader community thinks about this request.
Here are issue states and their descriptions:
| State | Description |
| ------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| Untriaged | The team has not yet reviewed the issue. We usually do it within one business day. |
| As designed | The behavior described in the issue is intentional. If you find it seriously disruptive or if weve misunderstood you, please let us know in the issues comments section. |
| Blocked | We cant work on this issue until another one (linked) is resolved. |
| Cant Reproduce | We have been unable to reproduce the issue on our side. It could be flaky or fixed already, or we may not have had all the details we needed. If youre still experiencing the issue and have any further details, please share them. |
| Duplicate | The issue is the same (or has the same cause) as another one (linked). |
| Fixed | If the issue was a bug, its been fixed; if it was a missing feature, its been implemented. |
| Fixed In Branch | If the issue was a bug, its been fixed; if it was a missing feature, its been implemented; the changes are now in a separate branch and havent been merged into the default branch yet. |
| In Progress | Were currently working on the issue. |
| Incomplete | Unfortunately we dont have enough information to proceed. If youre willing to share any further details about the issue, please do so in the comments. |
| Obsolete | The part of the product that was causing this issue has been removed or significantly reworked since it was created. |
| Upvoting | We are currently evaluating demand for the issue and checking whether it requires complicated or risky changes. Please leave a vote or comment if you think it should be prioritized. |
| Open | We want to implement the fix or feature in the near future. We cant promise it will appear in the next public release, but its on our short list. |
| Shelved | We have reviewed the issue and decided that, even though it has merit, we cannot currently include it in our near-term plan. |
| Third Party Problem | The issue is caused by a third party. We've done our best to inform them about it. |
| To be Discussed | We need some time to discuss the issue. |
| To Reproduce | We will try to find the steps needed to reproduce the issue on our side. |
| Under Investigation | Weve triaged the issue, but now we need to investigate it more thoroughly. This may require processing additional information like logs or dumps. |
| Waiting for Info | Weve requested additional information from the person who created the issue and are waiting for them to get back to us. |
| Declined | Weve reviewed the suggestion and, while we appreciate its value, we unfortunately do not have the resources to implement it. |
| Answered | The issue actually turned out to be a question or a misunderstanding, and it has been answered or resolved. |

View File

@@ -56,7 +56,7 @@
"env": "SENTRY_AUTH_TOKEN"
},
{
"env": "SENTRY_DSN"
"env": "NEXT_PUBLIC_SENTRY_DSN"
},
{
"env": "DISTRIBUTION"

View File

@@ -1,6 +1,6 @@
{
"name": "@affine/monorepo",
"version": "0.11.0",
"version": "0.10.3-beta.1",
"private": true,
"author": "toeverything",
"license": "MIT",
@@ -33,12 +33,12 @@
"lint:eslint:fix": "yarn lint:eslint --fix",
"lint:prettier": "prettier --ignore-unknown --cache --check .",
"lint:prettier:fix": "prettier --ignore-unknown --cache --write .",
"lint:ox": "oxlint --deny-warnings -D correctness -D nursery -D prefer-array-some -D no-useless-promise-resolve-reject -A no-undef -A consistent-type-exports -A default -A named -A ban-ts-comment",
"lint": "yarn lint:eslint && yarn lint:prettier",
"lint:fix": "yarn lint:eslint:fix && yarn lint:prettier:fix",
"test": "vitest --run",
"test:ui": "vitest --ui",
"test:coverage": "vitest run --coverage",
"circular": "madge --circular --ts-config ./tsconfig.json ./packages/frontend/core/src/pages/**/*.tsx ./packages/frontend/core/src/index.tsx ./packages/frontend/electron/src/*/index.ts",
"typecheck": "tsc -b tsconfig.json --diagnostics",
"postinstall": "node ./scripts/check-version.mjs && yarn i18n-codegen gen && yarn husky install"
},
@@ -49,10 +49,8 @@
"eslint --cache --fix"
],
"*.toml": [
"prettier --ignore-unknown --write",
"taplo format"
],
"*.rs": [
"cargo fmt --"
]
},
"devDependencies": {
@@ -73,17 +71,18 @@
"@types/affine__env": "workspace:*",
"@types/eslint": "^8.44.7",
"@types/node": "^20.9.3",
"@typescript-eslint/eslint-plugin": "^6.13.1",
"@typescript-eslint/parser": "^6.13.1",
"@typescript-eslint/eslint-plugin": "^6.12.0",
"@typescript-eslint/parser": "^6.12.0",
"@vanilla-extract/vite-plugin": "^3.9.2",
"@vanilla-extract/webpack-plugin": "^2.3.1",
"@vitejs/plugin-react-swc": "^3.5.0",
"@vitest/coverage-istanbul": "1.0.4",
"@vitest/ui": "1.0.4",
"@vitest/coverage-istanbul": "0.34.6",
"@vitest/ui": "0.34.6",
"electron": "^27.1.0",
"eslint": "^8.54.0",
"eslint-config-prettier": "^9.0.0",
"eslint-plugin-i": "^2.29.0",
"eslint-plugin-prettier": "^5.0.1",
"eslint-plugin-react": "^7.33.2",
"eslint-plugin-react-hooks": "^4.6.0",
"eslint-plugin-simple-import-sort": "^10.0.0",
@@ -95,29 +94,29 @@
"happy-dom": "^12.10.3",
"husky": "^8.0.3",
"lint-staged": "^15.1.0",
"madge": "^6.1.0",
"msw": "^2.0.8",
"nanoid": "^5.0.3",
"nx": "^17.1.3",
"nx-cloud": "^16.5.2",
"nyc": "^15.1.0",
"oxlint": "0.0.21",
"prettier": "^3.1.0",
"semver": "^7.5.4",
"serve": "^14.2.1",
"string-width": "^7.0.0",
"ts-node": "^10.9.1",
"typescript": "^5.3.2",
"vite": "^5.0.6",
"vite": "^5.0.1",
"vite-plugin-istanbul": "^5.0.0",
"vite-plugin-static-copy": "^1.0.0",
"vite-plugin-static-copy": "^0.17.1",
"vite-tsconfig-paths": "^4.2.1",
"vitest": "1.0.4",
"vitest": "0.34.6",
"vitest-fetch-mock": "^0.2.2",
"vitest-mock-extended": "^1.3.1"
},
"packageManager": "yarn@4.0.2",
"packageManager": "yarn@4.0.1",
"resolutions": {
"vite": "^5.0.6",
"vite": "^4.4.11",
"array-buffer-byte-length": "npm:@nolyfill/array-buffer-byte-length@latest",
"array-includes": "npm:@nolyfill/array-includes@latest",
"array.prototype.flat": "npm:@nolyfill/array.prototype.flat@latest",
@@ -174,7 +173,7 @@
"which-boxed-primitive": "npm:@nolyfill/which-boxed-primitive@latest",
"which-typed-array": "npm:@nolyfill/which-typed-array@latest",
"next-auth@^4.24.5": "patch:next-auth@npm%3A4.24.5#~/.yarn/patches/next-auth-npm-4.24.5-8428e11927.patch",
"@reforged/maker-appimage/@electron-forge/maker-base": "7.2.0",
"@reforged/maker-appimage/@electron-forge/maker-base": "7.1.0",
"macos-alias": "npm:macos-alias-building@latest",
"fs-xattr": "npm:@napi-rs/xattr@latest"
}

View File

@@ -1,45 +0,0 @@
/*
Warnings:
- You are about to drop the `user_feature_gates` table. If the table is not empty, all the data it contains will be lost.
*/
-- DropForeignKey
ALTER TABLE "user_feature_gates" DROP CONSTRAINT "user_feature_gates_user_id_fkey";
-- DropTable
DROP TABLE "user_feature_gates";
-- CreateTable
CREATE TABLE "user_features" (
"id" SERIAL NOT NULL,
"user_id" VARCHAR(36) NOT NULL,
"feature_id" INTEGER NOT NULL,
"reason" VARCHAR NOT NULL,
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"expired_at" TIMESTAMPTZ(6),
"activated" BOOLEAN NOT NULL DEFAULT false,
CONSTRAINT "user_features_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "features" (
"id" SERIAL NOT NULL,
"feature" VARCHAR NOT NULL,
"version" INTEGER NOT NULL DEFAULT 0,
"type" INTEGER NOT NULL,
"configs" JSON NOT NULL,
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "features_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "features_feature_version_key" ON "features"("feature", "version");
-- AddForeignKey
ALTER TABLE "user_features" ADD CONSTRAINT "user_features_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "user_features" ADD CONSTRAINT "user_features_feature_id_fkey" FOREIGN KEY ("feature_id") REFERENCES "features"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -1,2 +0,0 @@
-- AlterTable
ALTER TABLE "optimized_blobs" ADD COLUMN "deleted_at" TIMESTAMPTZ(6);

View File

@@ -1,7 +1,7 @@
{
"name": "@affine/server",
"private": true,
"version": "0.11.0",
"version": "0.10.3-beta.1",
"description": "Affine Node.js server",
"type": "module",
"bin": {
@@ -41,14 +41,13 @@
"@opentelemetry/core": "^1.18.1",
"@opentelemetry/exporter-prometheus": "^0.45.1",
"@opentelemetry/exporter-zipkin": "^1.18.1",
"@opentelemetry/host-metrics": "^0.34.0",
"@opentelemetry/host-metrics": "^0.33.2",
"@opentelemetry/instrumentation": "^0.45.1",
"@opentelemetry/instrumentation-graphql": "^0.36.0",
"@opentelemetry/instrumentation-http": "^0.45.1",
"@opentelemetry/instrumentation-ioredis": "^0.36.0",
"@opentelemetry/instrumentation-ioredis": "^0.35.3",
"@opentelemetry/instrumentation-nestjs-core": "^0.33.3",
"@opentelemetry/instrumentation-socket.io": "^0.34.3",
"@opentelemetry/resources": "^1.18.1",
"@opentelemetry/sdk-metrics": "^1.18.1",
"@opentelemetry/sdk-node": "^0.45.1",
"@opentelemetry/sdk-trace-node": "^1.18.1",
@@ -82,8 +81,7 @@
"socket.io": "^4.7.2",
"stripe": "^14.5.0",
"ws": "^8.14.2",
"yjs": "^13.6.10",
"zod": "^3.22.4"
"yjs": "^13.6.10"
},
"devDependencies": {
"@affine-test/kit": "workspace:*",
@@ -103,7 +101,7 @@
"@types/sinon": "^17.0.2",
"@types/supertest": "^2.0.16",
"@types/ws": "^8.5.10",
"ava": "^6.0.0",
"ava": "^5.3.1",
"c8": "^8.0.1",
"nodemon": "^3.0.1",
"sinon": "^17.0.1",
@@ -136,8 +134,7 @@
"ENABLE_LOCAL_EMAIL": "true",
"OAUTH_EMAIL_LOGIN": "noreply@toeverything.info",
"OAUTH_EMAIL_PASSWORD": "affine",
"OAUTH_EMAIL_SENDER": "noreply@toeverything.info",
"FEATURES_EARLY_ACCESS_PREVIEW": "false"
"OAUTH_EMAIL_SENDER": "noreply@toeverything.info"
}
},
"nodemonConfig": {

View File

@@ -22,7 +22,7 @@ model User {
accounts Account[]
sessions Session[]
features UserFeatures[]
features UserFeatureGates[]
customer UserStripeCustomer?
subscription UserSubscription?
invoices UserInvoice[]
@@ -113,48 +113,15 @@ model WorkspacePageUserPermission {
@@map("workspace_page_user_permissions")
}
// feature gates is a way to enable/disable features for a user
// for example:
// - early access is a feature that allow some users to access the insider version
// - pro plan is a quota that allow some users access to more resources after they pay
model UserFeatures {
id Int @id @default(autoincrement())
userId String @map("user_id") @db.VarChar(36)
featureId Int @map("feature_id") @db.Integer
// we will record the reason why the feature is enabled/disabled
// for example:
// - pro_plan_v1: "user buy the pro plan"
reason String @db.VarChar
// record the quota enabled time
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// record the quota expired time, pay plan is a subscription, so it will expired
expiredAt DateTime? @map("expired_at") @db.Timestamptz(6)
// whether the feature is activated
// for example:
// - if we switch the user to another plan, we will set the old plan to deactivated, but dont delete it
activated Boolean @default(false)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
feature Features @relation(fields: [featureId], references: [id], onDelete: Cascade)
@@map("user_features")
}
model Features {
id Int @id @default(autoincrement())
model UserFeatureGates {
id String @id @default(uuid()) @db.VarChar
userId String @map("user_id") @db.VarChar
feature String @db.VarChar
version Int @default(0) @db.Integer
// 0: feature, 1: quota
type Int @db.Integer
// configs, define by feature conntroller
configs Json @db.Json
reason String @db.VarChar
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
UserFeatureGates UserFeatures[]
@@unique([feature, version])
@@map("features")
@@map("user_feature_gates")
}
model Account {
@@ -211,15 +178,13 @@ model Blob {
}
model OptimizedBlob {
id Int @id @default(autoincrement()) @db.Integer
hash String @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar
params String @db.VarChar
blob Bytes @db.ByteA
id Int @id @default(autoincrement()) @db.Integer
hash String @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar
params String @db.VarChar
blob Bytes @db.ByteA
length BigInt
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// not for keeping, but for snapshot history
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(6)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
@@unique([workspaceId, hash, params])
@@map("optimized_blobs")

View File

@@ -8,20 +8,6 @@ async function main() {
data: {
...userA,
password: await hash(userA.password),
features: {
create: {
reason: 'created by api sign up',
activated: true,
feature: {
connect: {
feature_version: {
feature: 'free_plan_v1',
version: 1,
},
},
},
},
},
},
});
}

View File

@@ -1,10 +1,8 @@
import { Module } from '@nestjs/common';
import { APP_INTERCEPTOR } from '@nestjs/core';
import { AppController } from './app.controller';
import { CacheInterceptor, CacheModule } from './cache';
import { CacheModule } from './cache';
import { ConfigModule } from './config';
import { EventModule } from './event';
import { BusinessModules } from './modules';
import { AuthModule } from './modules/auth';
import { PrismaModule } from './prisma';
@@ -16,7 +14,6 @@ const BasicModules = [
PrismaModule,
ConfigModule.forRoot(),
CacheModule,
EventModule,
StorageModule.forRoot(),
SessionModule,
RateLimiterModule,
@@ -24,12 +21,6 @@ const BasicModules = [
];
@Module({
providers: [
{
provide: APP_INTERCEPTOR,
useClass: CacheInterceptor,
},
],
imports: [...BasicModules, ...BusinessModules],
controllers: [AppController],
})

View File

@@ -22,5 +22,3 @@ const CacheProvider: FactoryProvider = {
})
export class CacheModule {}
export { LocalCache as Cache };
export { CacheInterceptor, MakeCache, PreventCache } from './interceptor';

View File

@@ -1,99 +0,0 @@
import {
CallHandler,
ExecutionContext,
Injectable,
Logger,
NestInterceptor,
SetMetadata,
} from '@nestjs/common';
import { Reflector } from '@nestjs/core';
import { GqlContextType, GqlExecutionContext } from '@nestjs/graphql';
import { mergeMap, Observable, of } from 'rxjs';
import { LocalCache } from './cache';
export const MakeCache = (key: string[], args?: string[]) =>
SetMetadata('cacheKey', [key, args]);
export const PreventCache = (key: string[], args?: string[]) =>
SetMetadata('preventCache', [key, args]);
type CacheConfig = [string[], string[]?];
@Injectable()
export class CacheInterceptor implements NestInterceptor {
private readonly logger = new Logger(CacheInterceptor.name);
constructor(
private readonly reflector: Reflector,
private readonly cache: LocalCache
) {}
async intercept(
ctx: ExecutionContext,
next: CallHandler<any>
): Promise<Observable<any>> {
const key = this.reflector.get<CacheConfig | undefined>(
'cacheKey',
ctx.getHandler()
);
const preventKey = this.reflector.get<CacheConfig | undefined>(
'preventCache',
ctx.getHandler()
);
if (preventKey) {
this.logger.debug(`prevent cache: ${JSON.stringify(preventKey)}`);
const key = await this.getCacheKey(ctx, preventKey);
if (key) {
await this.cache.delete(key);
}
return next.handle();
} else if (!key) {
return next.handle();
}
const cacheKey = await this.getCacheKey(ctx, key);
if (!cacheKey) {
return next.handle();
}
const cachedData = await this.cache.get(cacheKey);
if (cachedData) {
this.logger.debug('cache hit', cacheKey, cachedData);
return of(cachedData);
} else {
return next.handle().pipe(
mergeMap(async result => {
this.logger.debug('cache miss', cacheKey, result);
await this.cache.set(cacheKey, result);
return result;
})
);
}
}
private async getCacheKey(
ctx: ExecutionContext,
config: CacheConfig
): Promise<string | null> {
const [key, params] = config;
if (!params) {
return key.join(':');
} else if (ctx.getType<GqlContextType>() === 'graphql') {
const args = GqlExecutionContext.create(ctx).getArgs();
const cacheKey = params
.map(name => args[name])
.filter(v => v)
.join(':');
if (cacheKey) {
return [...key, cacheKey].join(':');
} else {
return key.join(':');
}
}
return null;
}
}

View File

@@ -16,8 +16,6 @@ export enum ExternalAccount {
firebase = 'firebase',
}
export type ServerFlavor = 'allinone' | 'graphql' | 'sync' | 'selfhosted';
type EnvConfigType = 'string' | 'int' | 'float' | 'boolean';
type ConfigPaths = LeafPaths<
Omit<
@@ -52,7 +50,7 @@ function boolean(value: string) {
}
export function parseEnvValue(value: string | undefined, type?: EnvConfigType) {
if (value === undefined) {
if (typeof value === 'undefined') {
return;
}
@@ -188,6 +186,11 @@ export interface AFFiNEConfig {
fs: {
path: string;
};
/**
* default storage quota
* @default 10 * 1024 * 1024 * 1024 (10GB)
*/
quota: number;
};
/**
@@ -342,11 +345,6 @@ export interface AFFiNEConfig {
doc: {
manager: {
/**
* Whether auto merge updates into doc snapshot.
*/
enableUpdateAutoMerging: boolean;
/**
* How often the [DocManager] will start a new turn of merging pending updates into doc snapshot.
*

View File

@@ -7,12 +7,9 @@ import { join } from 'node:path';
import parse from 'parse-duration';
import pkg from '../../package.json' assert { type: 'json' };
import type { AFFiNEConfig, ServerFlavor } from './def';
import type { AFFiNEConfig } from './def';
import { applyEnvToConfig } from './env';
export const SERVER_FLAVOR = (process.env.SERVER_FLAVOR ??
'allinone') as ServerFlavor;
// Don't use this in production
export const examplePrivateKey = `-----BEGIN EC PRIVATE KEY-----
MHcCAQEEIEtyAJLIULkphVhqXqxk4Nr8Ggty3XLwUJWBxzAWCWTMoAoGCCqGSM49
@@ -58,6 +55,7 @@ export const getDefaultAFFiNEConfig: () => AFFiNEConfig = () => {
AFFINE_SERVER_HOST: 'host',
AFFINE_SERVER_SUB_PATH: 'path',
AFFINE_ENV: 'affineEnv',
AFFINE_FREE_USER_QUOTA: 'objectStorage.quota',
DATABASE_URL: 'db.url',
ENABLE_R2_OBJECT_STORAGE: ['objectStorage.r2.enabled', 'boolean'],
R2_OBJECT_STORAGE_ACCOUNT_ID: 'objectStorage.r2.accountId',
@@ -191,6 +189,8 @@ export const getDefaultAFFiNEConfig: () => AFFiNEConfig = () => {
fs: {
path: join(homedir(), '.affine-storage'),
},
// 10GB
quota: 10 * 1024 * 1024 * 1024,
},
rateLimiter: {
ttl: 60,
@@ -206,7 +206,6 @@ export const getDefaultAFFiNEConfig: () => AFFiNEConfig = () => {
},
doc: {
manager: {
enableUpdateAutoMerging: SERVER_FLAVOR !== 'sync',
updatePollInterval: 3000,
experimentalMergeWithJwstCodec: false,
},

View File

@@ -10,7 +10,7 @@ export function applyEnvToConfig(rawConfig: AFFiNEConfig) {
? [config, process.env[env]]
: [config[0], parseEnvValue(process.env[env], config[1])];
if (value !== undefined) {
if (typeof value !== 'undefined') {
set(rawConfig, path, value);
}
}

View File

@@ -73,4 +73,3 @@ export class ConfigModule {
}
export type { AFFiNEConfig } from './def';
export { SERVER_FLAVOR } from './default';

View File

@@ -14,7 +14,7 @@ interface Migration {
down: (db: PrismaService) => Promise<void>;
}
export async function collectMigrations(): Promise<Migration[]> {
async function collectMigrations(): Promise<Migration[]> {
const folder = join(fileURLToPath(import.meta.url), '../../migrations');
const migrationFiles = readdirSync(folder)
@@ -64,8 +64,35 @@ export class RunCommand extends CommandRunner {
continue;
}
await this.runMigration(migration);
this.logger.log(`Running ${migration.name}...`);
const record = await this.db.dataMigration.create({
data: {
name: migration.name,
startedAt: new Date(),
},
});
try {
await migration.up(this.db);
} catch (e) {
await this.db.dataMigration.delete({
where: {
id: record.id,
},
});
await migration.down(this.db);
this.logger.error('Failed to run data migration', e);
process.exit(1);
}
await this.db.dataMigration.update({
where: {
id: record.id,
},
data: {
finishedAt: new Date(),
},
});
done.push(migration);
}
@@ -74,56 +101,6 @@ export class RunCommand extends CommandRunner {
this.logger.log(`${migration.name}`);
});
}
async runOne(name: string) {
const migrations = await collectMigrations();
const migration = migrations.find(m => m.name === name);
if (!migration) {
throw new Error(`Unknown migration name: ${name}.`);
}
const exists = await this.db.dataMigration.count({
where: {
name: migration.name,
},
});
if (exists) return;
await this.runMigration(migration);
}
private async runMigration(migration: Migration) {
this.logger.log(`Running ${migration.name}...`);
const record = await this.db.dataMigration.create({
data: {
name: migration.name,
startedAt: new Date(),
},
});
try {
await migration.up(this.db);
} catch (e) {
await this.db.dataMigration.delete({
where: {
id: record.id,
},
});
await migration.down(this.db);
this.logger.error('Failed to run data migration', e);
process.exit(1);
}
await this.db.dataMigration.update({
where: {
id: record.id,
},
data: {
finishedAt: new Date(),
},
});
}
}
@Command({

View File

@@ -1,122 +0,0 @@
import { Prisma } from '@prisma/client';
import {
CommonFeature,
FeatureKind,
Features,
FeatureType,
} from '../../modules/features';
import { Quotas } from '../../modules/quota/schema';
import { PrismaService } from '../../prisma';
export class UserFeaturesInit1698652531198 {
// do the migration
static async up(db: PrismaService) {
// upgrade features from lower version to higher version
for (const feature of Features) {
await upsertFeature(db, feature);
}
await migrateNewFeatureTable(db);
for (const quota of Quotas) {
await upsertFeature(db, quota);
}
}
// revert the migration
static async down(_db: PrismaService) {
// TODO: revert the migration
}
}
// upgrade features from lower version to higher version
async function upsertFeature(
db: PrismaService,
feature: CommonFeature
): Promise<void> {
const hasEqualOrGreaterVersion =
(await db.features.count({
where: {
feature: feature.feature,
version: {
gte: feature.version,
},
},
})) > 0;
// will not update exists version
if (!hasEqualOrGreaterVersion) {
await db.features.create({
data: {
feature: feature.feature,
type: feature.type,
version: feature.version,
configs: feature.configs as Prisma.InputJsonValue,
},
});
}
}
async function migrateNewFeatureTable(prisma: PrismaService) {
const waitingList = await prisma.newFeaturesWaitingList.findMany();
for (const oldUser of waitingList) {
const user = await prisma.user.findFirst({
where: {
email: oldUser.email,
},
});
if (user) {
const hasEarlyAccess = await prisma.userFeatures.count({
where: {
userId: user.id,
feature: {
feature: FeatureType.EarlyAccess,
},
activated: true,
},
});
if (hasEarlyAccess === 0) {
await prisma.$transaction(async tx => {
const latestFlag = await tx.userFeatures.findFirst({
where: {
userId: user.id,
feature: {
feature: FeatureType.EarlyAccess,
type: FeatureKind.Feature,
},
activated: true,
},
orderBy: {
createdAt: 'desc',
},
});
if (latestFlag) {
return latestFlag.id;
} else {
return tx.userFeatures
.create({
data: {
reason: 'Early access user',
activated: true,
user: {
connect: {
id: user.id,
},
},
feature: {
connect: {
feature_version: {
feature: FeatureType.EarlyAccess,
version: 1,
},
type: FeatureKind.Feature,
},
},
},
})
.then(r => r.id);
}
});
}
}
}
}

View File

@@ -1,37 +0,0 @@
import { QuotaType } from '../../modules/quota/types';
import { PrismaService } from '../../prisma';
export class OldUserFeature1702620653283 {
// do the migration
static async up(db: PrismaService) {
await db.$transaction(async tx => {
const latestFreePlan = await tx.features.findFirstOrThrow({
where: { feature: QuotaType.FreePlanV1 },
orderBy: { version: 'desc' },
select: { id: true },
});
// find all users that don't have any features
const userIds = await db.user.findMany({
where: { NOT: { features: { some: { NOT: { id: { gt: 0 } } } } } },
select: { id: true },
});
console.log(`migrating ${userIds.join('|')} users`);
await tx.userFeatures.createMany({
data: userIds.map(({ id: userId }) => ({
userId,
featureId: latestFreePlan.id,
reason: 'old user feature migration',
activated: true,
})),
});
});
}
// revert the migration
// WARN: this will drop all user features
static async down(db: PrismaService) {
await db.userFeatures.deleteMany({});
}
}

View File

@@ -1,23 +0,0 @@
import type { Snapshot, Workspace } from '@prisma/client';
import { Flatten, Payload } from './types';
interface EventDefinitions {
workspace: {
deleted: Payload<Workspace['id']>;
};
snapshot: {
updated: Payload<
Pick<Snapshot, 'id' | 'workspaceId'> & {
previous: Pick<Snapshot, 'blob' | 'state' | 'updatedAt'>;
}
>;
deleted: Payload<Pick<Snapshot, 'id' | 'workspaceId'>>;
};
}
export type EventKV = Flatten<EventDefinitions>;
export type Event = keyof EventKV;
export type EventPayload<E extends Event> = EventKV[E];

View File

@@ -1,43 +0,0 @@
import { Global, Injectable, Module } from '@nestjs/common';
import {
EventEmitter2,
EventEmitterModule,
OnEvent as RawOnEvent,
} from '@nestjs/event-emitter';
import type { Event, EventPayload } from './events';
@Injectable()
export class EventEmitter {
constructor(private readonly emitter: EventEmitter2) {}
emit<E extends Event>(event: E, payload: EventPayload<E>) {
return this.emitter.emit(event, payload);
}
emitAsync<E extends Event>(event: E, payload: EventPayload<E>) {
return this.emitter.emitAsync(event, payload);
}
on<E extends Event>(event: E, handler: (payload: EventPayload<E>) => void) {
return this.emitter.on(event, handler);
}
once<E extends Event>(event: E, handler: (payload: EventPayload<E>) => void) {
return this.emitter.once(event, handler);
}
}
export const OnEvent = RawOnEvent as (
event: Event,
opts?: Parameters<typeof RawOnEvent>[1]
) => MethodDecorator;
@Global()
@Module({
imports: [EventEmitterModule.forRoot()],
providers: [EventEmitter],
exports: [EventEmitter],
})
export class EventModule {}
export { EventPayload };

View File

@@ -1,33 +0,0 @@
export type Payload<T> = {
__payload: true;
data: T;
};
export type Join<A extends string, B extends string> = A extends ''
? B
: `${A}.${B}`;
export type PathType<T, Path extends string> = string extends Path
? unknown
: Path extends keyof T
? T[Path]
: Path extends `${infer K}.${infer R}`
? K extends keyof T
? PathType<T[K], R>
: unknown
: unknown;
export type Leaves<T, P extends string = ''> = T extends Payload<any>
? P
: T extends Record<string, any>
? {
[K in keyof T]: K extends string ? Leaves<T[K], Join<P, K>> : never;
}[keyof T]
: never;
export type Flatten<T> = Leaves<T> extends infer R
? {
// @ts-expect-error yo, ts can't make it
[K in R]: PathType<T, K> extends Payload<infer U> ? U : never;
}
: never;

View File

@@ -29,7 +29,6 @@ import { GQLLoggerPlugin } from './graphql/logger-plugin';
context: ({ req, res }: { req: Request; res: Response }) => ({
req,
res,
isAdminQuery: false,
}),
plugins: [new GQLLoggerPlugin()],
};

View File

@@ -20,7 +20,7 @@ export class GQLLoggerPlugin implements ApolloServerPlugin {
const res = reqContext.contextValue.req.res as Response;
const operation = reqContext.request.operationName;
metrics.gql.counter('query_counter').add(1, { operation });
metrics().gqlRequest.add(1, { operation });
const start = Date.now();
return Promise.resolve({
@@ -30,9 +30,7 @@ export class GQLLoggerPlugin implements ApolloServerPlugin {
'Server-Timing',
`gql;dur=${costInMilliseconds};desc="GraphQL"`
);
metrics.gql
.histogram('query_duration')
.record(costInMilliseconds, { operation });
metrics().gqlTimer.record(costInMilliseconds, { operation });
return Promise.resolve();
},
didEncounterErrors: () => {
@@ -41,9 +39,7 @@ export class GQLLoggerPlugin implements ApolloServerPlugin {
'Server-Timing',
`gql;dur=${costInMilliseconds};desc="GraphQL ${operation}"`
);
metrics.gql
.histogram('query_duration')
.record(costInMilliseconds, { operation });
metrics().gqlTimer.record(costInMilliseconds, { operation });
return Promise.resolve();
},
});

View File

@@ -1,129 +1,76 @@
import {
Attributes,
Counter,
Histogram,
Meter,
MetricOptions,
} from '@opentelemetry/api';
import opentelemetry, { Attributes, Observable } from '@opentelemetry/api';
import { getMeter } from './opentelemetry';
type MetricType = 'counter' | 'gauge' | 'histogram';
type Metric<T extends MetricType> = T extends 'counter'
? Counter
: T extends 'gauge'
? Histogram
: T extends 'histogram'
? Histogram
: never;
export type ScopedMetrics = {
[T in MetricType]: (name: string, opts?: MetricOptions) => Metric<T>;
};
type MetricCreators = {
[T in MetricType]: (
meter: Meter,
name: string,
opts?: MetricOptions
) => Metric<T>;
};
export type KnownMetricScopes =
| 'socketio'
| 'gql'
| 'jwst'
| 'auth'
| 'controllers'
| 'doc';
const metricCreators: MetricCreators = {
counter(meter: Meter, name: string, opts?: MetricOptions) {
return meter.createCounter(name, opts);
},
gauge(meter: Meter, name: string, opts?: MetricOptions) {
let value: any;
let attrs: Attributes | undefined;
const ob = meter.createObservableGauge(name, opts);
ob.addCallback(result => {
result.observe(value, attrs);
});
return {
record: (newValue, newAttrs) => {
value = newValue;
attrs = newAttrs;
},
} satisfies Histogram;
},
histogram(meter: Meter, name: string, opts?: MetricOptions) {
return meter.createHistogram(name, opts);
},
};
const scopes = new Map<string, ScopedMetrics>();
function make(scope: string) {
const meter = getMeter();
const metrics = new Map<string, { type: MetricType; metric: any }>();
const prefix = scope + '/';
function getOrCreate<T extends MetricType>(
type: T,
name: string,
opts?: MetricOptions
): Metric<T> {
name = prefix + name;
const metric = metrics.get(name);
if (metric) {
if (type !== metric.type) {
throw new Error(
`Metric ${name} has already been registered as ${metric.type} mode, but get as ${type} again.`
);
}
return metric.metric;
} else {
const metric = metricCreators[type](meter, name, opts);
metrics.set(name, { type, metric });
return metric;
}
}
return {
counter(name, opts) {
return getOrCreate('counter', name, opts);
},
gauge(name, opts) {
return getOrCreate('gauge', name, opts);
},
histogram(name, opts) {
return getOrCreate('histogram', name, opts);
},
} satisfies ScopedMetrics;
interface AsyncMetric {
ob: Observable;
get value(): any;
get attrs(): Attributes | undefined;
}
/**
* @example
*
* ```
* metrics.scope.counter('example_count').add(1, {
* attr1: 'example-event'
* })
* ```
*/
export const metrics = new Proxy<Record<KnownMetricScopes, ScopedMetrics>>(
// @ts-expect-error proxied
{},
{
get(_, scopeName: string) {
let scope = scopes.get(scopeName);
if (!scope) {
scope = make(scopeName);
scopes.set(scopeName, scope);
}
let _metrics: ReturnType<typeof createBusinessMetrics> | undefined = undefined;
return scope;
},
export function getMeter(name = 'business') {
return opentelemetry.metrics.getMeter(name);
}
function createBusinessMetrics() {
const meter = getMeter();
const asyncMetrics: AsyncMetric[] = [];
function createGauge(name: string) {
let value: any;
let attrs: Attributes | undefined;
const ob = meter.createObservableGauge(name);
asyncMetrics.push({
ob,
get value() {
return value;
},
get attrs() {
return attrs;
},
});
return (newValue: any, newAttrs?: Attributes) => {
value = newValue;
attrs = newAttrs;
};
}
);
const metrics = {
socketIOConnectionGauge: createGauge('socket_io_connection'),
gqlRequest: meter.createCounter('gql_request'),
gqlError: meter.createCounter('gql_error'),
gqlTimer: meter.createHistogram('gql_timer'),
jwstCodecMerge: meter.createCounter('jwst_codec_merge'),
jwstCodecDidnotMatch: meter.createCounter('jwst_codec_didnot_match'),
jwstCodecFail: meter.createCounter('jwst_codec_fail'),
authCounter: meter.createCounter('auth'),
authFailCounter: meter.createCounter('auth_fail'),
docHistoryCounter: meter.createCounter('doc_history_created'),
docRecoverCounter: meter.createCounter('doc_history_recovered'),
};
meter.addBatchObservableCallback(
result => {
asyncMetrics.forEach(metric => {
result.observe(metric.ob, metric.value, metric.attrs);
});
},
asyncMetrics.map(({ ob }) => ob)
);
return metrics;
}
export function registerBusinessMetrics() {
if (!_metrics) {
_metrics = createBusinessMetrics();
}
return _metrics;
}
export const metrics = registerBusinessMetrics;

View File

@@ -1,6 +1,5 @@
import { MetricExporter } from '@google-cloud/opentelemetry-cloud-monitoring-exporter';
import { TraceExporter } from '@google-cloud/opentelemetry-cloud-trace-exporter';
import { metrics } from '@opentelemetry/api';
import {
CompositePropagator,
W3CBaggagePropagator,
@@ -17,8 +16,6 @@ import { NestInstrumentation } from '@opentelemetry/instrumentation-nestjs-core'
import { SocketIoInstrumentation } from '@opentelemetry/instrumentation-socket.io';
import {
ConsoleMetricExporter,
type MeterProvider,
MetricProducer,
MetricReader,
PeriodicExportingMetricReader,
} from '@opentelemetry/sdk-metrics';
@@ -27,11 +24,10 @@ import {
BatchSpanProcessor,
ConsoleSpanExporter,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-node';
import { PrismaInstrumentation } from '@prisma/instrumentation';
import { PrismaMetricProducer } from './prisma';
import { registerBusinessMetrics } from './metrics';
abstract class OpentelemetryFactor {
abstract getMetricReader(): MetricReader;
@@ -48,14 +44,9 @@ abstract class OpentelemetryFactor {
];
}
getMetricsProducers(): MetricProducer[] {
return [new PrismaMetricProducer()];
}
create() {
const traceExporter = this.getSpanExporter();
return new NodeSDK({
sampler: new TraceIdRatioBasedSampler(0.1),
traceExporter,
metricReader: this.getMetricReader(),
spanProcessor: new BatchSpanProcessor(traceExporter),
@@ -75,11 +66,8 @@ class GCloudOpentelemetryFactor extends OpentelemetryFactor {
override getMetricReader(): MetricReader {
return new PeriodicExportingMetricReader({
exportIntervalMillis: 30000,
exportTimeoutMillis: 10000,
exporter: new MetricExporter({
prefix: 'custom.googleapis.com',
}),
metricProducers: this.getMetricsProducers(),
exportTimeoutMillis: 60000,
exporter: new MetricExporter(),
});
}
@@ -90,9 +78,7 @@ class GCloudOpentelemetryFactor extends OpentelemetryFactor {
class LocalOpentelemetryFactor extends OpentelemetryFactor {
override getMetricReader(): MetricReader {
return new PrometheusExporter({
metricProducers: this.getMetricsProducers(),
});
return new PrometheusExporter();
}
override getSpanExporter(): SpanExporter {
@@ -104,7 +90,6 @@ class DebugOpentelemetryFactor extends OpentelemetryFactor {
override getMetricReader(): MetricReader {
return new PeriodicExportingMetricReader({
exporter: new ConsoleMetricExporter(),
metricProducers: this.getMetricsProducers(),
});
}
@@ -126,30 +111,9 @@ function createSDK() {
return factor?.create();
}
let OPENTELEMETRY_STARTED = false;
function ensureStarted() {
if (!OPENTELEMETRY_STARTED) {
OPENTELEMETRY_STARTED = true;
start();
}
}
function getMeterProvider() {
ensureStarted();
return metrics.getMeterProvider();
}
function registerCustomMetrics() {
const hostMetricsMonitoring = new HostMetrics({
name: 'instance-host-metrics',
meterProvider: getMeterProvider() as MeterProvider,
});
hostMetricsMonitoring.start();
}
export function getMeter(name = 'business') {
return getMeterProvider().getMeter(name);
const host = new HostMetrics({ name: 'instance-host-metrics' });
host.start();
}
export function start() {
@@ -158,5 +122,6 @@ export function start() {
if (sdk) {
sdk.start();
registerCustomMetrics();
registerBusinessMetrics();
}
}

View File

@@ -1,132 +0,0 @@
import { HrTime, ValueType } from '@opentelemetry/api';
import { hrTime } from '@opentelemetry/core';
import { Resource } from '@opentelemetry/resources';
import {
AggregationTemporality,
CollectionResult,
DataPointType,
InstrumentType,
MetricProducer,
ScopeMetrics,
} from '@opentelemetry/sdk-metrics';
import { PrismaService } from '../prisma';
function transformPrismaKey(key: string) {
// replace first '_' to '/' as a scope prefix
// example: prisma_client_query_duration_seconds_sum -> prisma/client_query_duration_seconds_sum
return key.replace(/_/, '/');
}
export class PrismaMetricProducer implements MetricProducer {
private readonly startTime: HrTime = hrTime();
async collect(): Promise<CollectionResult> {
const result: CollectionResult = {
resourceMetrics: {
resource: Resource.EMPTY,
scopeMetrics: [],
},
errors: [],
};
if (!PrismaService.INSTANCE) {
return result;
}
const prisma = PrismaService.INSTANCE;
const endTime = hrTime();
const metrics = await prisma.$metrics.json();
const scopeMetrics: ScopeMetrics = {
scope: {
name: '',
},
metrics: [],
};
for (const counter of metrics.counters) {
scopeMetrics.metrics.push({
descriptor: {
name: transformPrismaKey(counter.key),
description: counter.description,
unit: '1',
type: InstrumentType.COUNTER,
valueType: ValueType.INT,
},
dataPointType: DataPointType.SUM,
aggregationTemporality: AggregationTemporality.CUMULATIVE,
dataPoints: [
{
startTime: this.startTime,
endTime: endTime,
value: counter.value,
attributes: counter.labels,
},
],
isMonotonic: true,
});
}
for (const gauge of metrics.gauges) {
scopeMetrics.metrics.push({
descriptor: {
name: transformPrismaKey(gauge.key),
description: gauge.description,
unit: '1',
type: InstrumentType.UP_DOWN_COUNTER,
valueType: ValueType.INT,
},
dataPointType: DataPointType.GAUGE,
aggregationTemporality: AggregationTemporality.CUMULATIVE,
dataPoints: [
{
startTime: this.startTime,
endTime: endTime,
value: gauge.value,
attributes: gauge.labels,
},
],
});
}
for (const histogram of metrics.histograms) {
const boundaries = [];
const counts = [];
for (const [boundary, count] of histogram.value.buckets) {
boundaries.push(boundary);
counts.push(count);
}
scopeMetrics.metrics.push({
descriptor: {
name: transformPrismaKey(histogram.key),
description: histogram.description,
unit: 'ms',
type: InstrumentType.HISTOGRAM,
valueType: ValueType.DOUBLE,
},
dataPointType: DataPointType.HISTOGRAM,
aggregationTemporality: AggregationTemporality.CUMULATIVE,
dataPoints: [
{
startTime: this.startTime,
endTime: endTime,
value: {
buckets: {
boundaries,
counts,
},
count: histogram.value.count,
sum: histogram.value.sum,
},
attributes: histogram.labels,
},
],
});
}
result.resourceMetrics.scopeMetrics.push(scopeMetrics);
return result;
}
}

View File

@@ -1,9 +1,8 @@
import { Attributes } from '@opentelemetry/api';
import { KnownMetricScopes, metrics } from './metrics';
import { getMeter } from './metrics';
export const CallTimer = (
scope: KnownMetricScopes,
name: string,
attrs?: Attributes
): MethodDecorator => {
@@ -19,11 +18,9 @@ export const CallTimer = (
}
desc.value = function (...args: any[]) {
const timer = metrics[scope].histogram(name, {
const timer = getMeter().createHistogram(name, {
description: `function call time costs of ${name}`,
unit: 'ms',
});
const start = Date.now();
const end = () => {
@@ -51,7 +48,6 @@ export const CallTimer = (
};
export const CallCounter = (
scope: KnownMetricScopes,
name: string,
attrs?: Attributes
): MethodDecorator => {
@@ -67,7 +63,7 @@ export const CallCounter = (
}
desc.value = function (...args: any[]) {
const count = metrics[scope].counter(name, {
const count = getMeter().createCounter(name, {
description: `function call counter of ${name}`,
});

View File

@@ -14,7 +14,7 @@ const TrivialExceptions = [NotFoundException];
@Catch()
export class ExceptionLogger implements ExceptionFilter {
private readonly logger = new Logger('ExceptionLogger');
private logger = new Logger('ExceptionLogger');
catch(exception: Error, host: ArgumentsHost) {
// with useGlobalFilters, the context is always HTTP

View File

@@ -53,8 +53,8 @@ class AuthGuard implements CanActivate {
constructor(
@Inject(NextAuthOptionsProvide)
private readonly nextAuthOptions: NextAuthOptions,
private readonly auth: AuthService,
private readonly prisma: PrismaService,
private auth: AuthService,
private prisma: PrismaService,
private readonly reflector: Reflector
) {}

View File

@@ -72,11 +72,15 @@ export class MailService {
invitationInfo.workspace.name
}</span></p><p style="margin-top:8px;margin-bottom:0;">Click button to join this workspace</p>`;
const subContent =
'Currently, AFFiNE Cloud is in the early access stage. Only Early Access Sponsors can register and log in to AFFiNE Cloud. <a href="https://community.affine.pro/c/insider-general/" style="color: #1e67af" >Please click here for more information.</a>';
const html = emailTemplate({
title: 'You are invited!',
content,
buttonContent: 'Accept & Join',
buttonUrl,
subContent,
});
return this.sendMail({

View File

@@ -11,8 +11,8 @@ import Google from 'next-auth/providers/google';
import { Config } from '../../config';
import { PrismaService } from '../../prisma';
import { SessionService } from '../../session';
import { FeatureType } from '../features';
import { Quota_FreePlanV1 } from '../quota';
import { NewFeaturesKind } from '../users/types';
import { isStaff } from '../users/utils';
import { MailService } from './mailer';
import {
decode,
@@ -44,17 +44,6 @@ export const NextAuthOptionsProvider: FactoryProvider<NextAuthOptions> = {
email: data.email,
avatarUrl: '',
emailVerified: data.emailVerified,
features: {
create: {
reason: 'created by email sign up',
activated: true,
feature: {
connect: {
feature_version: Quota_FreePlanV1,
},
},
},
},
};
if (data.email && !data.name) {
userData.name = data.email.split('@')[0];
@@ -234,23 +223,18 @@ export const NextAuthOptionsProvider: FactoryProvider<NextAuthOptions> = {
}
const email = profile?.email ?? user.email;
if (email) {
// FIXME: cannot inject FeatureManagementService here
// it will cause prisma.account to be undefined
// then prismaAdapter.getUserByAccount will throw error
if (email.endsWith('@toeverything.info')) return true;
return prisma.userFeatures
.count({
if (isStaff(email)) {
return true;
}
return prisma.newFeaturesWaitingList
.findUnique({
where: {
user: {
email,
},
feature: {
feature: FeatureType.EarlyAccess,
},
activated: true,
email,
type: NewFeaturesKind.EarlyAccess,
},
})
.then(count => count > 0);
.then(user => !!user)
.catch(() => false);
}
return false;
},
@@ -258,10 +242,6 @@ export const NextAuthOptionsProvider: FactoryProvider<NextAuthOptions> = {
return url;
},
};
nextAuthOptions.pages = {
newUser: '/auth/onboarding',
};
return nextAuthOptions;
},
inject: [Config, PrismaService, MailService, SessionService],

View File

@@ -89,13 +89,12 @@ export class NextAuthController {
res.redirect(`/signin${query}`);
return;
}
metrics().authCounter.add(1);
const [action, providerId] = req.url // start with request url
.slice(BASE_URL.length) // make relative to baseUrl
.replace(/\?.*/, '') // remove query part, use only path part
.split('/') as [AuthAction, string]; // as array of strings;
metrics.auth.counter('call_counter').add(1, { action, providerId });
const credentialsSignIn =
req.method === 'POST' && providerId === 'credentials';
let userId: string | undefined;
@@ -127,9 +126,7 @@ export class NextAuthController {
const options = this.nextAuthOptions;
if (req.method === 'POST' && action === 'session') {
if (typeof req.body !== 'object' || typeof req.body.data !== 'object') {
metrics.auth
.counter('call_fails_counter')
.add(1, { reason: 'invalid_session_data' });
metrics().authFailCounter.add(1, { reason: 'invalid_session_data' });
throw new BadRequestException(`Invalid new session data`);
}
const user = await this.updateSession(req, req.body.data);
@@ -212,10 +209,9 @@ export class NextAuthController {
if (redirect?.endsWith('api/auth/error?error=AccessDenied')) {
this.logger.log(`Early access redirect headers: ${req.headers}`);
metrics.auth
.counter('call_fails_counter')
.add(1, { reason: 'no_early_access_permission' });
metrics().authFailCounter.add(1, {
reason: 'no_early_access_permission',
});
if (
!req.headers?.referer ||
checkUrlOrigin(req.headers.referer, 'https://accounts.google.com')

View File

@@ -19,7 +19,7 @@ import { nanoid } from 'nanoid';
import { Config } from '../../config';
import { SessionService } from '../../session';
import { CloudThrottlerGuard, Throttle } from '../../throttler';
import { UserType } from '../users';
import { UserType } from '../users/resolver';
import { Auth, CurrentUser } from './guard';
import { AuthService } from './service';

View File

@@ -14,7 +14,6 @@ import { nanoid } from 'nanoid';
import { Config } from '../../config';
import { PrismaService } from '../../prisma';
import { verifyChallengeResponse } from '../../storage';
import { Quota_FreePlanV1 } from '../quota';
import { MailService } from './mailer';
export type UserClaim = Pick<
@@ -24,14 +23,14 @@ export type UserClaim = Pick<
hasPassword?: boolean;
};
export const getUtcTimestamp = () => Math.floor(Date.now() / 1000);
export const getUtcTimestamp = () => Math.floor(new Date().getTime() / 1000);
@Injectable()
export class AuthService {
constructor(
private readonly config: Config,
private readonly prisma: PrismaService,
private readonly mailer: MailService
private config: Config,
private prisma: PrismaService,
private mailer: MailService
) {}
sign(user: UserClaim) {
@@ -191,17 +190,6 @@ export class AuthService {
name,
email,
password: hashedPassword,
features: {
create: {
reason: 'created by api sign up',
activated: true,
feature: {
connect: {
feature_version: Quota_FreePlanV1,
},
},
},
},
},
});
}
@@ -221,17 +209,6 @@ export class AuthService {
data: {
name: 'Unnamed',
email,
features: {
create: {
reason: 'created by invite sign up',
activated: true,
feature: {
connect: {
feature_version: Quota_FreePlanV1,
},
},
},
},
},
});
}
@@ -281,7 +258,6 @@ export class AuthService {
},
});
}
async changeEmail(id: string, newEmail: string): Promise<User> {
const user = await this.prisma.user.findUnique({
where: {

View File

@@ -1,30 +0,0 @@
import { Module } from '@nestjs/common';
import { Field, ObjectType, Query } from '@nestjs/graphql';
import { SERVER_FLAVOR } from '../config';
@ObjectType()
export class ServerConfigType {
@Field({ description: 'server version' })
version!: string;
@Field({ description: 'server flavor' })
flavor!: string;
}
export class ServerConfigResolver {
@Query(() => ServerConfigType, {
description: 'server config',
})
serverConfig(): ServerConfigType {
return {
version: AFFiNE.version,
flavor: SERVER_FLAVOR,
};
}
}
@Module({
providers: [ServerConfigResolver],
})
export class ServerConfigModule {}

View File

@@ -1,57 +1,34 @@
import { isDeepStrictEqual } from 'node:util';
import { Injectable, Logger } from '@nestjs/common';
import { OnEvent } from '@nestjs/event-emitter';
import { Cron, CronExpression } from '@nestjs/schedule';
import type { Snapshot } from '@prisma/client';
import { Config } from '../../config';
import { type EventPayload, OnEvent } from '../../event';
import { metrics } from '../../metrics';
import { PrismaService } from '../../prisma';
import { QuotaService } from '../quota';
import { SubscriptionStatus } from '../payment/service';
import { Permission } from '../workspaces/types';
import { isEmptyBuffer } from './manager';
@Injectable()
export class DocHistoryManager {
private readonly logger = new Logger(DocHistoryManager.name);
constructor(
private readonly config: Config,
private readonly db: PrismaService,
private readonly quota: QuotaService
private readonly db: PrismaService
) {}
@OnEvent('workspace.deleted')
onWorkspaceDeleted(workspaceId: EventPayload<'workspace.deleted'>) {
return this.db.snapshotHistory.deleteMany({
where: {
workspaceId,
},
});
}
@OnEvent('snapshot.deleted')
onSnapshotDeleted({ workspaceId, id }: EventPayload<'snapshot.deleted'>) {
return this.db.snapshotHistory.deleteMany({
where: {
workspaceId,
id,
},
});
}
@OnEvent('snapshot.updated')
async onDocUpdated(
{ workspaceId, id, previous }: EventPayload<'snapshot.updated'>,
forceCreate = false
) {
const last = await this.last(workspaceId, id);
@OnEvent('doc:manager:snapshot:beforeUpdate')
async onDocUpdated(snapshot: Snapshot, forceCreate = false) {
const last = await this.last(snapshot.workspaceId, snapshot.id);
let shouldCreateHistory = false;
if (!last) {
// never created
shouldCreateHistory = true;
} else if (last.timestamp === previous.updatedAt) {
} else if (last.timestamp === snapshot.updatedAt) {
// no change
shouldCreateHistory = false;
} else if (
@@ -59,23 +36,16 @@ export class DocHistoryManager {
forceCreate ||
// last history created before interval in configs
last.timestamp.getTime() <
previous.updatedAt.getTime() - this.config.doc.history.interval
snapshot.updatedAt.getTime() - this.config.doc.history.interval
) {
shouldCreateHistory = true;
}
if (shouldCreateHistory) {
// skip the history recording when no actual update on snapshot happended
if (last && isDeepStrictEqual(last.state, previous.state)) {
if (last && isDeepStrictEqual(last.state, snapshot.state)) {
this.logger.debug(
`State matches, skip creating history record for ${id} in workspace ${workspaceId}`
);
return;
}
if (isEmptyBuffer(previous.blob)) {
this.logger.debug(
`Doc is empty, skip creating history record for ${id} in workspace ${workspaceId}`
`State matches, skip creating history record for ${snapshot.id} in workspace ${snapshot.workspaceId}`
);
return;
}
@@ -86,24 +56,22 @@ export class DocHistoryManager {
timestamp: true,
},
data: {
workspaceId,
id,
timestamp: previous.updatedAt,
blob: previous.blob,
state: previous.state,
expiredAt: await this.getExpiredDateFromNow(workspaceId),
workspaceId: snapshot.workspaceId,
id: snapshot.id,
timestamp: snapshot.updatedAt,
blob: snapshot.blob,
state: snapshot.state,
expiredAt: await this.getExpiredDateFromNow(snapshot.workspaceId),
},
})
.catch(() => {
// safe to ignore
// only happens when duplicated history record created in multi processes
});
metrics.doc
.counter('history_created_counter', {
description: 'How many times the snapshot history created',
})
.add(1);
this.logger.log(`History created for ${id} in workspace ${workspaceId}.`);
metrics().docHistoryCounter.add(1, {});
this.logger.log(
`History created for ${snapshot.id} in workspace ${snapshot.workspaceId}.`
);
}
}
@@ -121,7 +89,7 @@ export class DocHistoryManager {
workspaceId,
id,
timestamp: {
lt: before,
lte: before,
},
// only include the ones has not expired
expiredAt: {
@@ -208,21 +176,20 @@ export class DocHistoryManager {
}
// save old snapshot as one history record
await this.onDocUpdated({ workspaceId, id, previous: oldSnapshot }, true);
await this.onDocUpdated(oldSnapshot, true);
// WARN:
// we should never do the snapshot updating in recovering,
// which is not the solution in CRDT.
// let user revert in client and update the data in sync system
// `await this.db.snapshot.update();`
metrics.doc
.counter('history_recovered_counter', {
description: 'How many times history recovered request happened',
})
.add(1);
metrics().docRecoverCounter.add(1, {});
return history.timestamp;
}
/**
* @todo(@darkskygit) refactor with [Usage Control] system
*/
async getExpiredDateFromNow(workspaceId: string) {
const permission = await this.db.workspaceUserPermission.findFirst({
select: {
@@ -239,8 +206,25 @@ export class DocHistoryManager {
throw new Error('Workspace owner not found');
}
const quota = await this.quota.getUserQuota(permission.userId);
return quota.feature.historyPeriodFromNow;
const sub = await this.db.userSubscription.findFirst({
select: {
id: true,
},
where: {
userId: permission.userId,
status: SubscriptionStatus.Active,
},
});
return new Date(
Date.now() +
1000 *
60 *
60 *
24 *
// 30 days for subscription user, 7 days for free user
(sub ? 30 : 7)
);
}
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT /* everyday at 12am */)

View File

@@ -1,14 +1,38 @@
import { Module } from '@nestjs/common';
import { DynamicModule } from '@nestjs/common';
import { QuotaModule } from '../quota';
import { DocHistoryManager } from './history';
import { DocManager } from './manager';
@Module({
imports: [QuotaModule],
providers: [DocManager, DocHistoryManager],
exports: [DocManager, DocHistoryManager],
})
export class DocModule {}
export class DocModule {
/**
* @param automation whether enable update merging automation logic
*/
private static defModule(automation = true): DynamicModule {
return {
module: DocModule,
providers: [
{
provide: 'DOC_MANAGER_AUTOMATION',
useValue: automation,
},
DocManager,
DocHistoryManager,
],
exports: [DocManager, DocHistoryManager],
};
}
static forRoot() {
return this.defModule();
}
static forSync(): DynamicModule {
return this.defModule(false);
}
static forFeature(): DynamicModule {
return this.defModule(false);
}
}
export { DocHistoryManager, DocManager };

View File

@@ -1,15 +1,16 @@
import {
Inject,
Injectable,
Logger,
OnModuleDestroy,
OnModuleInit,
} from '@nestjs/common';
import { EventEmitter2 } from '@nestjs/event-emitter';
import { Snapshot, Update } from '@prisma/client';
import { chunk } from 'lodash-es';
import { defer, retry } from 'rxjs';
import {
applyUpdate,
decodeStateVector,
Doc,
encodeStateAsUpdate,
encodeStateVector,
@@ -18,7 +19,6 @@ import {
import { Cache } from '../../cache';
import { Config } from '../../config';
import { EventEmitter, type EventPayload, OnEvent } from '../../event';
import { metrics } from '../../metrics/metrics';
import { PrismaService } from '../../prisma';
import { mergeUpdatesInApplyWay as jwstMergeUpdates } from '../../storage';
@@ -40,39 +40,9 @@ function compare(yBinary: Buffer, jwstBinary: Buffer, strict = false): boolean {
return compare(yBinary, yBinary2, true);
}
/**
* Detect whether rhs state is newer than lhs state.
*
* How could we tell a state is newer:
*
* i. if the state vector size is larger, it's newer
* ii. if the state vector size is same, compare each client's state
*/
function isStateNewer(lhs: Buffer, rhs: Buffer): boolean {
const lhsVector = decodeStateVector(lhs);
const rhsVector = decodeStateVector(rhs);
if (lhsVector.size < rhsVector.size) {
return true;
}
for (const [client, state] of lhsVector) {
const rstate = rhsVector.get(client);
if (!rstate) {
return false;
}
if (state < rstate) {
return true;
}
}
return false;
}
export function isEmptyBuffer(buf: Buffer): boolean {
function isEmptyBuffer(buf: Buffer): boolean {
return (
buf.length === 0 ||
buf.length == 0 ||
// 0x0000
(buf.length === 2 && buf[0] === 0 && buf[1] === 0)
);
@@ -90,20 +60,22 @@ const MAX_SEQ_NUM = 0x3fffffff; // u31
*/
@Injectable()
export class DocManager implements OnModuleInit, OnModuleDestroy {
private readonly logger = new Logger(DocManager.name);
private logger = new Logger(DocManager.name);
private job: NodeJS.Timeout | null = null;
private readonly seqMap = new Map<string, number>();
private seqMap = new Map<string, number>();
private busy = false;
constructor(
@Inject('DOC_MANAGER_AUTOMATION')
private readonly automation: boolean,
private readonly db: PrismaService,
private readonly config: Config,
private readonly cache: Cache,
private readonly event: EventEmitter
private readonly event: EventEmitter2
) {}
onModuleInit() {
if (this.config.doc.manager.enableUpdateAutoMerging) {
if (this.automation) {
this.logger.log('Use Database');
this.setup();
}
@@ -153,13 +125,13 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
this.config.doc.manager.experimentalMergeWithJwstCodec &&
updates.length < 100 /* avoid overloading */
) {
metrics.jwst.counter('codec_merge_counter').add(1);
metrics().jwstCodecMerge.add(1);
const yjsResult = Buffer.from(encodeStateAsUpdate(doc));
let log = false;
try {
const jwstResult = jwstMergeUpdates(updates);
if (!compare(yjsResult, jwstResult)) {
metrics.jwst.counter('codec_not_match').add(1);
metrics().jwstCodecDidnotMatch.add(1);
this.logger.warn(
`jwst codec result doesn't match yjs codec result for: ${guid}`
);
@@ -170,7 +142,7 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
}
}
} catch (e) {
metrics.jwst.counter('codec_fails_counter').add(1);
metrics().jwstCodecFail.add(1);
this.logger.warn(`jwst apply update failed for ${guid}: ${e}`);
log = true;
} finally {
@@ -221,33 +193,6 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
}
}
@OnEvent('workspace.deleted')
async onWorkspaceDeleted(workspaceId: string) {
await this.db.snapshot.deleteMany({
where: {
workspaceId,
},
});
await this.db.update.deleteMany({
where: {
workspaceId,
},
});
}
@OnEvent('snapshot.deleted')
async onSnapshotDeleted({
id,
workspaceId,
}: EventPayload<'snapshot.deleted'>) {
await this.db.update.deleteMany({
where: {
id,
workspaceId,
},
});
}
/**
* add update to manager for later processing.
*/
@@ -429,17 +374,23 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
}
const { id, workspaceId } = candidate;
// acquire lock
const ok = await this.lockUpdatesForAutoSquash(workspaceId, id);
await this.lockUpdatesForAutoSquash(workspaceId, id, async () => {
try {
await this._get(workspaceId, id);
} catch (e) {
this.logger.error(
`Failed to apply updates for workspace: ${workspaceId}, guid: ${id}`
);
this.logger.error(e);
}
});
if (!ok) {
return;
}
try {
await this._get(workspaceId, id);
} catch (e) {
this.logger.error(
`Failed to apply updates for workspace: ${workspaceId}, guid: ${id}`
);
this.logger.error(e);
} finally {
await this.unlockUpdatesForAutoSquash(workspaceId, id);
}
}
private async getAutoSquashCandidate() {
@@ -461,75 +412,36 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
workspaceId: string,
guid: string,
doc: Doc,
// we always delay the snapshot update to avoid db overload,
// so the value of `updatedAt` will not be accurate to user's real action time
updatedAt: Date,
initialSeq?: number
) {
return this.lockSnapshotForUpsert(workspaceId, guid, async () => {
const blob = Buffer.from(encodeStateAsUpdate(doc));
const blob = Buffer.from(encodeStateAsUpdate(doc));
const state = Buffer.from(encodeStateVector(doc));
if (isEmptyBuffer(blob)) {
return false;
}
if (isEmptyBuffer(blob)) {
return;
}
const state = Buffer.from(encodeStateVector(doc));
return await this.db.$transaction(async db => {
const snapshot = await db.snapshot.findUnique({
where: {
id_workspaceId: {
id: guid,
workspaceId,
},
},
});
// update
if (snapshot) {
// only update if state is newer
if (isStateNewer(snapshot.state ?? Buffer.from([0]), state)) {
await db.snapshot.update({
select: {
seq: true,
},
where: {
id_workspaceId: {
workspaceId,
id: guid,
},
},
data: {
blob,
state,
updatedAt,
},
});
return true;
} else {
return false;
}
} else {
// create
await db.snapshot.create({
select: {
seq: true,
},
data: {
id: guid,
workspaceId,
blob,
state,
seq: initialSeq,
createdAt: updatedAt,
updatedAt,
},
});
return true;
}
});
await this.db.snapshot.upsert({
select: {
seq: true,
},
where: {
id_workspaceId: {
id: guid,
workspaceId,
},
},
create: {
id: guid,
workspaceId,
blob,
state,
seq: initialSeq,
},
update: {
blob,
state,
},
});
}
@@ -568,35 +480,15 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
...updates.map(u => u.blob)
);
const done = await this.upsert(
workspaceId,
id,
doc,
last.createdAt,
last.seq
);
if (done) {
if (snapshot) {
this.event.emit('snapshot.updated', {
id,
workspaceId,
previous: {
blob: snapshot.blob,
state: snapshot.state,
updatedAt: snapshot.updatedAt,
},
});
}
this.logger.debug(
`Squashed ${updates.length} updates for ${id} in workspace ${workspaceId}`
);
if (snapshot) {
this.event.emit('doc:manager:snapshot:beforeUpdate', snapshot);
}
// always delete updates
// the upsert will return false if the state is not newer, so we don't need to worry about it
const { count } = await this.db.update.deleteMany({
await this.upsert(workspaceId, id, doc, last.seq);
this.logger.debug(
`Squashed ${updates.length} updates for ${id} in workspace ${workspaceId}`
);
await this.db.update.deleteMany({
where: {
id,
workspaceId,
@@ -606,8 +498,7 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
},
});
await this.updateCachedUpdatesCount(workspaceId, id, -count);
await this.updateCachedUpdatesCount(workspaceId, id, -updates.length);
return doc;
}
@@ -690,44 +581,22 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
return null;
}
private async doWithLock<T>(lock: string, job: () => Promise<T>) {
const acquired = await this.cache.setnx(lock, 1, {
ttl: 60 * 1000,
});
if (!acquired) {
return;
}
try {
return await job();
} finally {
await this.cache.delete(lock).catch(e => {
// safe, the lock will be expired when ttl ends
this.logger.error(`Failed to release lock ${lock}`, e);
});
}
}
private async lockUpdatesForAutoSquash<T>(
workspaceId: string,
guid: string,
job: () => Promise<T>
) {
return this.doWithLock(
private async lockUpdatesForAutoSquash(workspaceId: string, guid: string) {
return this.cache.setnx(
`doc:manager:updates-lock:${workspaceId}::${guid}`,
job
1,
{
ttl: 60 * 1000,
}
);
}
async lockSnapshotForUpsert<T>(
workspaceId: string,
guid: string,
job: () => Promise<T>
) {
return this.doWithLock(
`doc:manager:snapshot-lock:${workspaceId}::${guid}`,
job
);
private async unlockUpdatesForAutoSquash(workspaceId: string, guid: string) {
return this.cache
.delete(`doc:manager:updates-lock:${workspaceId}::${guid}`)
.catch(e => {
// safe, the lock will be expired when ttl ends
this.logger.error('Failed to release updates lock', e);
});
}
}

View File

@@ -1,78 +0,0 @@
import { PrismaService } from '../../prisma';
import { Feature, FeatureSchema, FeatureType } from './types';
class FeatureConfig {
readonly config: Feature;
constructor(data: any) {
const config = FeatureSchema.safeParse(data);
if (config.success) {
this.config = config.data;
} else {
throw new Error(`Invalid quota config: ${config.error.message}`);
}
}
/// feature name of quota
get name() {
return this.config.feature;
}
}
export class EarlyAccessFeatureConfig extends FeatureConfig {
constructor(data: any) {
super(data);
if (this.config.feature !== FeatureType.EarlyAccess) {
throw new Error('Invalid feature config: type is not EarlyAccess');
}
}
checkWhiteList(email: string) {
for (const domain in this.config.configs.whitelist) {
if (email.endsWith(domain)) {
return true;
}
}
return false;
}
}
const FeatureConfigMap = {
[FeatureType.EarlyAccess]: EarlyAccessFeatureConfig,
};
const FeatureCache = new Map<
number,
InstanceType<(typeof FeatureConfigMap)[FeatureType]>
>();
export async function getFeature(prisma: PrismaService, featureId: number) {
const cachedQuota = FeatureCache.get(featureId);
if (cachedQuota) {
return cachedQuota;
}
const feature = await prisma.features.findFirst({
where: {
id: featureId,
},
});
if (!feature) {
// this should unreachable
throw new Error(`Quota config ${featureId} not found`);
}
const ConfigClass = FeatureConfigMap[feature.feature as FeatureType];
if (!ConfigClass) {
throw new Error(`Feature config ${featureId} not found`);
}
const config = new ConfigClass(feature);
// we always edit quota config as a new quota config
// so we can cache it by featureId
FeatureCache.set(featureId, config);
return config;
}

View File

@@ -1,21 +0,0 @@
import { Module } from '@nestjs/common';
import { PrismaService } from '../../prisma';
import { FeatureManagementService } from './management';
import { FeatureService } from './service';
/**
* Feature module provider pre-user feature flag management.
* includes:
* - feature query/update/permit
* - feature statistics
*/
@Module({
providers: [FeatureService, FeatureManagementService],
exports: [FeatureService, FeatureManagementService],
})
export class FeatureModule {}
export { type CommonFeature, commonFeatureSchema } from './types';
export { FeatureKind, Features, FeatureType } from './types';
export { FeatureManagementService, FeatureService, PrismaService };

View File

@@ -1,89 +0,0 @@
import { Injectable, Logger, OnModuleInit } from '@nestjs/common';
import { Config } from '../../config';
import { PrismaService } from '../../prisma';
import { EarlyAccessFeatureConfig } from './feature';
import { FeatureService } from './service';
import { FeatureType } from './types';
enum NewFeaturesKind {
EarlyAccess,
}
@Injectable()
export class FeatureManagementService implements OnModuleInit {
protected logger = new Logger(FeatureManagementService.name);
private earlyAccessFeature?: EarlyAccessFeatureConfig;
constructor(
private readonly feature: FeatureService,
private readonly prisma: PrismaService,
private readonly config: Config
) {}
async onModuleInit() {
this.earlyAccessFeature = await this.feature.getFeature(
FeatureType.EarlyAccess
);
}
// ======== Admin ========
// todo(@darkskygit): replace this with abac
isStaff(email: string) {
return this.earlyAccessFeature?.checkWhiteList(email) ?? false;
}
// ======== Early Access ========
async addEarlyAccess(userId: string) {
return this.feature.addUserFeature(
userId,
FeatureType.EarlyAccess,
1,
'Early access user'
);
}
async removeEarlyAccess(userId: string) {
return this.feature.removeUserFeature(userId, FeatureType.EarlyAccess);
}
async listEarlyAccess() {
return this.feature.listFeatureUsers(FeatureType.EarlyAccess);
}
/// check early access by email
async canEarlyAccess(email: string) {
if (this.config.featureFlags.earlyAccessPreview && !this.isStaff(email)) {
const user = await this.prisma.user.findFirst({
where: {
email,
},
});
if (user) {
const canEarlyAccess = await this.feature
.hasFeature(user.id, FeatureType.EarlyAccess)
.catch(() => false);
if (canEarlyAccess) {
return true;
}
// TODO: Outdated, switch to feature gates
const oldCanEarlyAccess = await this.prisma.newFeaturesWaitingList
.findUnique({
where: { email, type: NewFeaturesKind.EarlyAccess },
})
.then(x => !!x)
.catch(() => false);
if (oldCanEarlyAccess) {
this.logger.warn(
`User ${email} has early access in old table but not in new table`
);
}
return oldCanEarlyAccess;
}
return false;
} else {
return true;
}
}
}

View File

@@ -1,184 +0,0 @@
import { Injectable } from '@nestjs/common';
import { PrismaService } from '../../prisma';
import { UserType } from '../users/types';
import { getFeature } from './feature';
import { FeatureKind, FeatureType } from './types';
@Injectable()
export class FeatureService {
constructor(private readonly prisma: PrismaService) {}
async getFeaturesVersion() {
const features = await this.prisma.features.findMany({
where: {
type: FeatureKind.Feature,
},
select: {
feature: true,
version: true,
},
});
return features.reduce(
(acc, feature) => {
acc[feature.feature] = feature.version;
return acc;
},
{} as Record<string, number>
);
}
async getFeature(feature: FeatureType) {
const data = await this.prisma.features.findFirst({
where: {
feature,
type: FeatureKind.Feature,
},
select: { id: true },
orderBy: {
version: 'desc',
},
});
if (data) {
return getFeature(this.prisma, data.id);
}
return undefined;
}
async addUserFeature(
userId: string,
feature: FeatureType,
version: number,
reason: string,
expiredAt?: Date | string
) {
return this.prisma.$transaction(async tx => {
const latestFlag = await tx.userFeatures.findFirst({
where: {
userId,
feature: {
feature,
type: FeatureKind.Feature,
},
activated: true,
},
orderBy: {
createdAt: 'desc',
},
});
if (latestFlag) {
return latestFlag.id;
} else {
return tx.userFeatures
.create({
data: {
reason,
expiredAt,
activated: true,
user: {
connect: {
id: userId,
},
},
feature: {
connect: {
feature_version: {
feature,
version,
},
type: FeatureKind.Feature,
},
},
},
})
.then(r => r.id);
}
});
}
async removeUserFeature(userId: string, feature: FeatureType) {
return this.prisma.userFeatures
.updateMany({
where: {
userId,
feature: {
feature,
type: FeatureKind.Feature,
},
activated: true,
},
data: {
activated: false,
},
})
.then(r => r.count);
}
async getUserFeatures(userId: string) {
const features = await this.prisma.userFeatures.findMany({
where: {
user: { id: userId },
feature: {
type: FeatureKind.Feature,
},
},
select: {
activated: true,
reason: true,
createdAt: true,
expiredAt: true,
featureId: true,
},
});
const configs = await Promise.all(
features.map(async feature => ({
...feature,
feature: await getFeature(this.prisma, feature.featureId),
}))
);
return configs.filter(feature => !!feature.feature);
}
async listFeatureUsers(feature: FeatureType): Promise<UserType[]> {
return this.prisma.userFeatures
.findMany({
where: {
activated: true,
feature: {
feature: feature,
type: FeatureKind.Feature,
},
},
select: {
user: {
select: {
id: true,
name: true,
avatarUrl: true,
email: true,
emailVerified: true,
createdAt: true,
},
},
},
})
.then(users => users.map(user => user.user));
}
async hasFeature(userId: string, feature: FeatureType) {
return this.prisma.userFeatures
.count({
where: {
userId,
activated: true,
feature: {
feature,
type: FeatureKind.Feature,
},
},
})
.then(count => count > 0);
}
}

View File

@@ -1,65 +0,0 @@
import { URL } from 'node:url';
import { z } from 'zod';
/// ======== common schema ========
export enum FeatureKind {
Feature,
Quota,
}
export const commonFeatureSchema = z.object({
feature: z.string(),
type: z.nativeEnum(FeatureKind),
version: z.number(),
configs: z.unknown(),
});
export type CommonFeature = z.infer<typeof commonFeatureSchema>;
/// ======== feature define ========
export enum FeatureType {
EarlyAccess = 'early_access',
}
function checkHostname(host: string) {
try {
return new URL(`https://${host}`).hostname === host;
} catch (_) {
return false;
}
}
const featureEarlyAccess = z.object({
feature: z.literal(FeatureType.EarlyAccess),
configs: z.object({
whitelist: z
.string()
.startsWith('@')
.refine(domain => checkHostname(domain.slice(1)))
.array(),
}),
});
export const Features: Feature[] = [
{
feature: FeatureType.EarlyAccess,
type: FeatureKind.Feature,
version: 1,
configs: {
whitelist: ['@toeverything.info'],
},
},
];
/// ======== schema infer ========
export const FeatureSchema = commonFeatureSchema
.extend({
type: z.literal(FeatureKind.Feature),
})
.and(z.discriminatedUnion('feature', [featureEarlyAccess]));
export type Feature = z.infer<typeof FeatureSchema>;

View File

@@ -1,61 +1,51 @@
import { DynamicModule, Type } from '@nestjs/common';
import { EventEmitterModule } from '@nestjs/event-emitter';
import { ScheduleModule } from '@nestjs/schedule';
import { SERVER_FLAVOR } from '../config';
import { GqlModule } from '../graphql.module';
import { ServerConfigModule } from './config';
import { AuthModule } from './auth';
import { DocModule } from './doc';
import { PaymentModule } from './payment';
import { QuotaModule } from './quota';
import { SelfHostedModule } from './self-hosted';
import { SyncModule } from './sync';
import { UsersModule } from './users';
import { WorkspaceModule } from './workspaces';
const BusinessModules: (Type | DynamicModule)[] = [];
const { SERVER_FLAVOR } = process.env;
const BusinessModules: (Type | DynamicModule)[] = [
EventEmitterModule.forRoot({
global: true,
}),
];
switch (SERVER_FLAVOR) {
case 'sync':
BusinessModules.push(SyncModule, DocModule);
break;
case 'selfhosted':
BusinessModules.push(
ServerConfigModule,
SelfHostedModule,
ScheduleModule.forRoot(),
GqlModule,
WorkspaceModule,
UsersModule,
SyncModule,
DocModule
);
BusinessModules.push(SyncModule, DocModule.forSync());
break;
case 'graphql':
BusinessModules.push(
ServerConfigModule,
ScheduleModule.forRoot(),
GqlModule,
WorkspaceModule,
UsersModule,
DocModule,
PaymentModule,
QuotaModule
AuthModule,
DocModule.forRoot(),
PaymentModule
);
break;
case 'allinone':
default:
BusinessModules.push(
ServerConfigModule,
ScheduleModule.forRoot(),
GqlModule,
WorkspaceModule,
UsersModule,
QuotaModule,
AuthModule,
SyncModule,
DocModule,
DocModule.forRoot(),
PaymentModule
);
break;
}
export { BusinessModules, SERVER_FLAVOR };
export { BusinessModules };

View File

@@ -1,7 +1,6 @@
import { Module } from '@nestjs/common';
import { FeatureModule } from '../features';
import { QuotaModule } from '../quota';
import { UsersModule } from '../users';
import { SubscriptionResolver, UserSubscriptionResolver } from './resolver';
import { ScheduleManager } from './schedule';
import { SubscriptionService } from './service';
@@ -9,7 +8,7 @@ import { StripeProvider } from './stripe';
import { StripeWebhook } from './webhook';
@Module({
imports: [FeatureModule, QuotaModule],
imports: [UsersModule],
providers: [
ScheduleManager,
StripeProvider,

View File

@@ -1,7 +1,6 @@
import { HttpStatus } from '@nestjs/common';
import {
Args,
Context,
Field,
Int,
Mutation,
@@ -53,7 +52,7 @@ class SubscriptionPrice {
}
@ObjectType('UserSubscription')
export class UserSubscriptionType implements Partial<UserSubscription> {
class UserSubscriptionType implements Partial<UserSubscription> {
@Field({ name: 'id' })
stripeSubscriptionId!: string;
@@ -255,13 +254,8 @@ export class UserSubscriptionResolver {
constructor(private readonly db: PrismaService) {}
@ResolveField(() => UserSubscriptionType, { nullable: true })
async subscription(
@Context() ctx: { isAdminQuery: boolean },
@CurrentUser() me: User,
@Parent() user: User
) {
// allow admin to query other user's subscription
if (!ctx.isAdminQuery && me.id !== user.id) {
async subscription(@CurrentUser() me: User, @Parent() user: User) {
if (me.id !== user.id) {
throw new GraphQLError(
'You are not allowed to access this subscription',
{

View File

@@ -11,8 +11,7 @@ import Stripe from 'stripe';
import { Config } from '../../config';
import { PrismaService } from '../../prisma';
import { FeatureManagementService } from '../features';
import { QuotaService, QuotaType } from '../quota';
import { UsersService } from '../users';
import { ScheduleManager } from './schedule';
const OnEvent = (
@@ -31,7 +30,6 @@ export enum SubscriptionPlan {
Pro = 'pro',
Team = 'team',
Enterprise = 'enterprise',
SelfHosted = 'selfhosted',
}
export function encodeLookupKey(
@@ -61,11 +59,6 @@ export enum SubscriptionStatus {
Trialing = 'trialing',
}
const SubscriptionActivated: Stripe.Subscription.Status[] = [
SubscriptionStatus.Active,
SubscriptionStatus.Trialing,
];
export enum InvoiceStatus {
Draft = 'draft',
Open = 'open',
@@ -88,9 +81,8 @@ export class SubscriptionService {
config: Config,
private readonly stripe: Stripe,
private readonly db: PrismaService,
private readonly scheduleManager: ScheduleManager,
private readonly features: FeatureManagementService,
private readonly quota: QuotaService
private readonly user: UsersService,
private readonly scheduleManager: ScheduleManager
) {
this.paymentConfig = config.payment;
@@ -478,16 +470,6 @@ export class SubscriptionService {
}
}
private getPlanQuota(plan: SubscriptionPlan) {
if (plan === SubscriptionPlan.Free) {
return QuotaType.FreePlanV1;
} else if (plan === SubscriptionPlan.Pro) {
return QuotaType.ProPlanV1;
} else {
throw new Error(`Unknown plan: ${plan}`);
}
}
private async saveSubscription(
user: User,
subscription: Stripe.Subscription,
@@ -500,28 +482,23 @@ export class SubscriptionService {
subscription = await this.stripe.subscriptions.retrieve(subscription.id);
}
// get next bill date from upcoming invoice
// see https://stripe.com/docs/api/invoices/upcoming
let nextBillAt: Date | null = null;
if (
(subscription.status === SubscriptionStatus.Active ||
subscription.status === SubscriptionStatus.Trialing) &&
!subscription.canceled_at
) {
nextBillAt = new Date(subscription.current_period_end * 1000);
}
const price = subscription.items.data[0].price;
if (!price.lookup_key) {
throw new Error('Unexpected subscription with no key');
}
const [plan, recurring] = decodeLookupKey(price.lookup_key);
const planActivated = SubscriptionActivated.includes(subscription.status);
let nextBillAt: Date | null = null;
if (planActivated) {
// update user's quota if plan activated
await this.quota.switchUserQuota(user.id, this.getPlanQuota(plan));
// get next bill date from upcoming invoice
// see https://stripe.com/docs/api/invoices/upcoming
if (!subscription.canceled_at) {
nextBillAt = new Date(subscription.current_period_end * 1000);
}
} else {
// switch to free plan if subscription is canceled
await this.quota.switchUserQuota(user.id, QuotaType.FreePlanV1);
}
const commonData = {
start: new Date(subscription.current_period_start * 1000),
@@ -680,7 +657,7 @@ export class SubscriptionService {
user: User,
couponType: CouponType
): Promise<string | null> {
const earlyAccess = await this.features.canEarlyAccess(user.email);
const earlyAccess = await this.user.isEarlyAccessUser(user.email);
if (earlyAccess) {
try {
const coupon = await this.stripe.coupons.retrieve(couponType);

View File

@@ -1,5 +0,0 @@
export const OneKB = 1024;
export const OneMB = OneKB * OneKB;
export const OneGB = OneKB * OneMB;
export const OneDay = 1000 * 60 * 60 * 24;
export const ByteUnit = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'];

View File

@@ -1,21 +0,0 @@
import { Module } from '@nestjs/common';
import { PermissionService } from '../workspaces/permission';
import { QuotaService } from './service';
import { QuotaManagementService } from './storage';
/**
* Quota module provider pre-user quota management.
* includes:
* - quota query/update/permit
* - quota statistics
*/
@Module({
providers: [PermissionService, QuotaService, QuotaManagementService],
exports: [QuotaService, QuotaManagementService],
})
export class QuotaModule {}
export { QuotaManagementService, QuotaService };
export { Quota_FreePlanV1, Quota_ProPlanV1, Quotas } from './schema';
export { QuotaType } from './types';

View File

@@ -1,81 +0,0 @@
import { PrismaService } from '../../prisma';
import { formatDate, formatSize, Quota, QuotaSchema } from './types';
const QuotaCache = new Map<number, QuotaConfig>();
export class QuotaConfig {
readonly config: Quota;
static async get(prisma: PrismaService, featureId: number) {
const cachedQuota = QuotaCache.get(featureId);
if (cachedQuota) {
return cachedQuota;
}
const quota = await prisma.features.findFirst({
where: {
id: featureId,
},
});
if (!quota) {
throw new Error(`Quota config ${featureId} not found`);
}
const config = new QuotaConfig(quota);
// we always edit quota config as a new quota config
// so we can cache it by featureId
QuotaCache.set(featureId, config);
return config;
}
private constructor(data: any) {
const config = QuotaSchema.safeParse(data);
if (config.success) {
this.config = config.data;
} else {
throw new Error(
`Invalid quota config: ${config.error.message}, ${JSON.stringify(
data
)})}`
);
}
}
/// feature name of quota
get name() {
return this.config.feature;
}
get blobLimit() {
return this.config.configs.blobLimit;
}
get storageQuota() {
return this.config.configs.storageQuota;
}
get historyPeriod() {
return this.config.configs.historyPeriod;
}
get historyPeriodFromNow() {
return new Date(Date.now() + this.historyPeriod);
}
get memberLimit() {
return this.config.configs.memberLimit;
}
get humanReadable() {
return {
name: this.config.configs.name,
blobLimit: formatSize(this.blobLimit),
storageQuota: formatSize(this.storageQuota),
historyPeriod: formatDate(this.historyPeriod),
memberLimit: this.memberLimit.toString(),
};
}
}

View File

@@ -1,50 +0,0 @@
import { FeatureKind } from '../features';
import { OneDay, OneGB, OneMB } from './constant';
import { Quota, QuotaType } from './types';
export const Quotas: Quota[] = [
{
feature: QuotaType.FreePlanV1,
type: FeatureKind.Quota,
version: 1,
configs: {
// quota name
name: 'Free',
// single blob limit 10MB
blobLimit: 10 * OneMB,
// total blob limit 10GB
storageQuota: 10 * OneGB,
// history period of validity 7 days
historyPeriod: 7 * OneDay,
// member limit 3
memberLimit: 3,
},
},
{
feature: QuotaType.ProPlanV1,
type: FeatureKind.Quota,
version: 1,
configs: {
// quota name
name: 'Pro',
// single blob limit 100MB
blobLimit: 100 * OneMB,
// total blob limit 100GB
storageQuota: 100 * OneGB,
// history period of validity 30 days
historyPeriod: 30 * OneDay,
// member limit 10
memberLimit: 10,
},
},
];
export const Quota_FreePlanV1 = {
feature: Quotas[0].feature,
version: Quotas[0].version,
};
export const Quota_ProPlanV1 = {
feature: Quotas[1].feature,
version: Quotas[1].version,
};

View File

@@ -1,147 +0,0 @@
import { Injectable } from '@nestjs/common';
import { PrismaService } from '../../prisma';
import { FeatureKind } from '../features';
import { QuotaConfig } from './quota';
import { QuotaType } from './types';
@Injectable()
export class QuotaService {
constructor(private readonly prisma: PrismaService) {}
// get activated user quota
async getUserQuota(userId: string) {
const quota = await this.prisma.userFeatures.findFirst({
where: {
user: {
id: userId,
},
feature: {
type: FeatureKind.Quota,
},
activated: true,
},
select: {
reason: true,
createdAt: true,
expiredAt: true,
featureId: true,
},
});
if (!quota) {
// this should unreachable
throw new Error(`User ${userId} has no quota`);
}
const feature = await QuotaConfig.get(this.prisma, quota.featureId);
return { ...quota, feature };
}
// get user all quota records
async getUserQuotas(userId: string) {
const quotas = await this.prisma.userFeatures.findMany({
where: {
user: {
id: userId,
},
feature: {
type: FeatureKind.Quota,
},
},
select: {
activated: true,
reason: true,
createdAt: true,
expiredAt: true,
featureId: true,
},
});
const configs = await Promise.all(
quotas.map(async quota => {
try {
return {
...quota,
feature: await QuotaConfig.get(this.prisma, quota.featureId),
};
} catch (_) {}
return null as unknown as typeof quota & {
feature: QuotaConfig;
};
})
);
return configs.filter(quota => !!quota);
}
// switch user to a new quota
// currently each user can only have one quota
async switchUserQuota(
userId: string,
quota: QuotaType,
reason?: string,
expiredAt?: Date
) {
await this.prisma.$transaction(async tx => {
const latestPlanVersion = await tx.features.aggregate({
where: {
feature: quota,
},
_max: {
version: true,
},
});
// we will deactivate all exists quota for this user
await tx.userFeatures.updateMany({
where: {
id: undefined,
userId,
feature: {
type: FeatureKind.Quota,
},
},
data: {
activated: false,
},
});
await tx.userFeatures.create({
data: {
user: {
connect: {
id: userId,
},
},
feature: {
connect: {
feature_version: {
feature: quota,
version: latestPlanVersion._max.version || 1,
},
type: FeatureKind.Quota,
},
},
reason: reason ?? 'switch quota',
activated: true,
expiredAt,
},
});
});
}
async hasQuota(userId: string, quota: QuotaType) {
return this.prisma.userFeatures
.count({
where: {
userId,
feature: {
feature: quota,
type: FeatureKind.Quota,
},
activated: true,
},
})
.then(count => count > 0);
}
}

Some files were not shown because too many files have changed in this diff Show More