Compare commits

..

2 Commits

Author SHA1 Message Date
DarkSky
ae4c201e40 fix: use secure websocket (#5297) 2023-12-14 11:28:25 +08:00
DarkSky
6724e5a537 feat: only follow serverUrlPrefix at redirect to client (#5295) 2023-12-14 11:28:09 +08:00
1153 changed files with 41200 additions and 63905 deletions

View File

@@ -23,7 +23,10 @@
"y-provider",
"debug",
"storage",
"infra"
"infra",
"plugin-cli",
"sdk",
"plugin"
]
]
}

View File

@@ -11,4 +11,6 @@ e2e-dist-*
static
web-static
public
packages/common/sdk/src/*.d.ts
packages/common/sdk/src/*.js
packages/frontend/i18n/src/i18n-generated.ts

View File

@@ -61,6 +61,7 @@ const allPackages = [
'packages/frontend/core',
'packages/frontend/electron',
'packages/frontend/graphql',
'packages/frontend/hooks',
'packages/frontend/i18n',
'packages/frontend/native',
'packages/frontend/templates',
@@ -68,8 +69,10 @@ const allPackages = [
'packages/common/debug',
'packages/common/env',
'packages/common/infra',
'packages/common/sdk',
'packages/common/theme',
'packages/common/y-indexeddb',
'packages/plugins/copilot',
'tools/cli',
'tests/storybook',
];
@@ -123,8 +126,6 @@ const config = {
'no-cond-assign': 'off',
'no-constant-binary-expression': 'error',
'no-constructor-return': 'error',
'no-self-compare': 'error',
eqeqeq: ['error', 'always', { null: 'ignore' }],
'react/prop-types': 'off',
'react/jsx-no-useless-fragment': 'error',
'@typescript-eslint/consistent-type-imports': 'error',
@@ -132,9 +133,6 @@ const config = {
'@typescript-eslint/no-explicit-any': 'off',
'@typescript-eslint/no-empty-function': 'off',
'@typescript-eslint/await-thenable': 'error',
'@typescript-eslint/require-array-sort-compare': 'error',
'@typescript-eslint/unified-signatures': 'error',
'@typescript-eslint/prefer-for-of': 'error',
'@typescript-eslint/no-unused-vars': [
'error',
{
@@ -206,17 +204,6 @@ const config = {
},
],
'unicorn/no-unnecessary-await': 'error',
'unicorn/no-useless-fallback-in-spread': 'error',
'unicorn/prefer-dom-node-dataset': 'error',
'unicorn/prefer-dom-node-append': 'error',
'unicorn/prefer-dom-node-remove': 'error',
'unicorn/prefer-array-some': 'error',
'unicorn/prefer-date-now': 'error',
'unicorn/prefer-blob-reading-methods': 'error',
'unicorn/no-typeof-undefined': 'error',
'unicorn/no-useless-promise-resolve-reject': 'error',
'unicorn/no-new-array': 'error',
'unicorn/new-for-builtins': 'error',
'sonarjs/no-all-duplicated-branches': 'error',
'sonarjs/no-element-overwrite': 'error',
'sonarjs/no-empty-collection': 'error',
@@ -267,7 +254,6 @@ const config = {
},
],
'@typescript-eslint/no-misused-promises': ['error'],
'@typescript-eslint/prefer-readonly': 'error',
'i/no-extraneous-dependencies': ['error'],
'react-hooks/exhaustive-deps': [
'warn',

30
.github/CLA.md vendored
View File

@@ -33,6 +33,32 @@ You accept and agree to the following terms and conditions for your past, presen
9. This Agreement will be governed by the laws of Republic of Singapore without reference to conflict of laws principles.
## How To Sign
## List of Contributors
Visit https://cla-assistant.io/toeverything/AFFiNE and sign it.
The below-signed are contributors to a code repository that is part of the project named "AFFiNE". Each below-signed contributor has read, understand and agrees to the terms above in the section within this document entitled "AFFiNE Contributor License Agreement" as of the date beside their real name (or entity name) and GitHub account name.
---
<!--
Example:
- Dark Sky, @darkskygit, 2022/07/22
-->
- Dark Sky, @darkskygit, 2022/07/22
- Lin Onetwo, @linonetwo, 2022/02/14
- zqran, @zqran, 2023/02/17
- Alessio Gravili, @AlessioGr, 2023/03/04
- Victor Nanka, @victornanka, 2023/03/09
- Aditya Sharma, @adityash1, 2023/03/21
- Fangdun Tsai, @fundon, 2023/03/21
- Zhilin Liu, @lzlme, 2023/04/09
- Skye Sun, @skyesun, 2023/04/14
- Jordy Delgado, @Jdelgad8, 2023/04/17
- Howard Do, @howarddo2208, 2023/04/20
- 三咲智子 Kevin Deng, @sxzz, 2023/04/21
- Moeyua, @moeyua, 2023/04/22
- Shishu, @shishudesu, 2023/05/19
- Kushagra Singh, @kush002, 2023/06/28
- Sarvesh Kumar, @sarvesh521 2023/08/25
- 微扰理论 Qinghao Huang, @wfnuser 2023/09/29

View File

@@ -14,28 +14,14 @@ inputs:
runs:
using: 'composite'
steps:
- name: Print rustup toolchain version
shell: bash
id: rustup-version
run: |
export RUST_TOOLCHAIN_VERSION="$(grep 'channel' rust-toolchain.toml | head -1 | awk -F '"' '{print $2}')"
echo "Rust toolchain version: $RUST_TOOLCHAIN_VERSION"
echo "RUST_TOOLCHAIN_VERSION=$RUST_TOOLCHAIN_VERSION" >> "$GITHUB_OUTPUT"
- name: Setup Rust
uses: dtolnay/rust-toolchain@stable
with:
toolchain: '${{ steps.rustup-version.outputs.RUST_TOOLCHAIN_VERSION }}'
toolchain: stable
targets: ${{ inputs.target }}
env:
CARGO_INCREMENTAL: '1'
- name: Set CC
if: ${{ contains(inputs.target, 'linux') && inputs.package != '@affine/native' }}
shell: bash
run: |
echo "CC=clang" >> "$GITHUB_ENV"
echo "TARGET_CC=clang" >> "$GITHUB_ENV"
- name: Cache cargo
uses: actions/cache@v3
with:
@@ -43,13 +29,47 @@ runs:
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
~/.napi-rs
.cargo-cache
target/${{ inputs.target }}
key: stable-${{ inputs.target }}-cargo-cache
- name: Build
if: ${{ inputs.target != 'x86_64-unknown-linux-gnu' && inputs.target != 'aarch64-unknown-linux-gnu' }}
shell: bash
run: |
yarn workspace ${{ inputs.package }} nx build ${{ inputs.package }} --target ${{ inputs.target }} --use-napi-cross
yarn workspace ${{ inputs.package }} nx build ${{ inputs.package }} --target ${{ inputs.target }}
env:
NX_CLOUD_ACCESS_TOKEN: ${{ inputs.nx_token }}
DEBUG: 'napi:*'
- name: Build
if: ${{ inputs.target == 'x86_64-unknown-linux-gnu' }}
uses: addnab/docker-run-action@v3
with:
image: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian
options: --user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build -e NX_CLOUD_ACCESS_TOKEN=${{ inputs.nx_token }}
run: |
export CC=x86_64-unknown-linux-gnu-gcc
export CC_x86_64_unknown_linux_gnu=x86_64-unknown-linux-gnu-gcc
export RUSTFLAGS="-C debuginfo=1"
yarn workspace ${{ inputs.package }} nx build ${{ inputs.package }} --target ${{ inputs.target }}
if [ -d "node_modules/.cache" ]; then
chmod -R 777 node_modules/.cache
fi
if [ -d "target" ]; then
chmod -R 777 target;
fi
- name: Build
if: ${{ inputs.target == 'aarch64-unknown-linux-gnu' }}
uses: addnab/docker-run-action@v3
with:
image: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian-aarch64
options: --user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build -e NX_CLOUD_ACCESS_TOKEN=${{ inputs.nx_token }}
run: |
export RUSTFLAGS="-C debuginfo=1"
yarn workspace ${{ inputs.package }} nx build ${{ inputs.package }} --target ${{ inputs.target }}
if [ -d "node_modules/.cache" ]; then
chmod -R 777 node_modules/.cache
fi
if [ -d "target" ]; then
chmod -R 777 target;
fi

View File

@@ -26,7 +26,7 @@ runs:
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
- uses: azure/setup-helm@v3
- id: auth
uses: google-github-actions/auth@v2
uses: google-github-actions/auth@v1
with:
workload_identity_provider: 'projects/${{ inputs.gcp-project-number }}/locations/global/workloadIdentityPools/github-actions/providers/github-actions-helm-deploy'
service_account: '${{ inputs.service-account }}'
@@ -34,7 +34,7 @@ runs:
project_id: '${{ inputs.gcp-project-id }}'
- name: 'Setup gcloud cli'
uses: 'google-github-actions/setup-gcloud@v2'
uses: 'google-github-actions/setup-gcloud@v1'
with:
install_components: 'gke-gcloud-auth-plugin'

View File

@@ -1,7 +1,6 @@
import { execSync } from 'node:child_process';
const {
APP_VERSION,
BUILD_TYPE,
DEPLOY_HOST,
CANARY_DEPLOY_HOST,
@@ -13,6 +12,7 @@ const {
R2_ACCOUNT_ID,
R2_ACCESS_KEY_ID,
R2_SECRET_ACCESS_KEY,
R2_BUCKET,
ENABLE_CAPTCHA,
CAPTCHA_TURNSTILE_SECRET,
OAUTH_EMAIL_SENDER,
@@ -27,7 +27,6 @@ const {
REDIS_PASSWORD,
STRIPE_API_KEY,
STRIPE_WEBHOOK_KEY,
STATIC_IP_NAME,
} = process.env;
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
@@ -35,13 +34,17 @@ const buildType = BUILD_TYPE || 'canary';
const isProduction = buildType === 'stable';
const isBeta = buildType === 'beta';
const isInternal = buildType === 'internal';
const createHelmCommand = ({ isDryRun }) => {
const flag = isDryRun ? '--dry-run' : '--atomic';
const imageTag = `${buildType}-${GIT_SHORT_HASH}`;
const staticIpName = isProduction
? 'affine-cluster-production'
: isBeta
? 'affine-cluster-beta'
: 'affine-cluster-dev';
const redisAndPostgres =
isProduction || isBeta || isInternal
isProduction || isBeta
? [
`--set-string global.database.url=${DATABASE_URL}`,
`--set-string global.database.user=${DATABASE_USERNAME}`,
@@ -55,34 +58,27 @@ const createHelmCommand = ({ isDryRun }) => {
]
: [];
const serviceAnnotations =
isProduction || isBeta || isInternal
isProduction || isBeta
? [
`--set-json web.service.annotations=\"{ \\"cloud.google.com/neg\\": \\"{\\\\\\"ingress\\\\\\": true}\\" }\"`,
`--set-json graphql.serviceAccount.annotations=\"{ \\"iam.gke.io/gcp-service-account\\": \\"${CLOUD_SQL_IAM_ACCOUNT}\\" }\"`,
`--set-json graphql.service.annotations=\"{ \\"cloud.google.com/neg\\": \\"{\\\\\\"ingress\\\\\\": true}\\" }\"`,
`--set-json sync.serviceAccount.annotations=\"{ \\"iam.gke.io/gcp-service-account\\": \\"${CLOUD_SQL_IAM_ACCOUNT}\\" }\"`,
`--set-json sync.service.annotations=\"{ \\"cloud.google.com/neg\\": \\"{\\\\\\"ingress\\\\\\": true}\\" }\"`,
`--set-json cloud-sql-proxy.serviceAccount.annotations=\"{ \\"iam.gke.io/gcp-service-account\\": \\"${CLOUD_SQL_IAM_ACCOUNT}\\" }\"`,
`--set-json cloud-sql-proxy.nodeSelector=\"{ \\"iam.gke.io/gke-metadata-server-enabled\\": \\"true\\" }\"`,
]
: [];
const webReplicaCount = isProduction ? 3 : isBeta ? 2 : 2;
const graphqlReplicaCount = isProduction ? 10 : isBeta ? 5 : 2;
const syncReplicaCount = isProduction ? 10 : isBeta ? 5 : 2;
const namespace = isProduction
? 'production'
: isBeta
? 'beta'
: isInternal
? 'internal'
: 'dev';
const namespace = isProduction ? 'production' : isBeta ? 'beta' : 'dev';
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const host = DEPLOY_HOST || CANARY_DEPLOY_HOST;
const deployCommand = [
`helm upgrade --install affine .github/helm/affine`,
`--namespace ${namespace}`,
`--set global.ingress.enabled=true`,
`--set-json global.ingress.annotations=\"{ \\"kubernetes.io/ingress.class\\": \\"gce\\", \\"kubernetes.io/ingress.allow-http\\": \\"true\\", \\"kubernetes.io/ingress.global-static-ip-name\\": \\"${STATIC_IP_NAME}\\" }\"`,
`--set-json global.ingress.annotations=\"{ \\"kubernetes.io/ingress.class\\": \\"gce\\", \\"kubernetes.io/ingress.allow-http\\": \\"true\\", \\"kubernetes.io/ingress.global-static-ip-name\\": \\"${staticIpName}\\" }\"`,
`--set-string global.ingress.host="${host}"`,
`--set-string global.version="${APP_VERSION}"`,
...redisAndPostgres,
`--set web.replicaCount=${webReplicaCount}`,
`--set-string web.image.tag="${imageTag}"`,
@@ -95,6 +91,7 @@ const createHelmCommand = ({ isDryRun }) => {
`--set-string graphql.app.objectStorage.r2.accountId="${R2_ACCOUNT_ID}"`,
`--set-string graphql.app.objectStorage.r2.accessKeyId="${R2_ACCESS_KEY_ID}"`,
`--set-string graphql.app.objectStorage.r2.secretAccessKey="${R2_SECRET_ACCESS_KEY}"`,
`--set-string graphql.app.objectStorage.r2.bucket="${R2_BUCKET}"`,
`--set-string graphql.app.oauth.email.sender="${OAUTH_EMAIL_SENDER}"`,
`--set-string graphql.app.oauth.email.login="${OAUTH_EMAIL_LOGIN}"`,
`--set-string graphql.app.oauth.email.password="${OAUTH_EMAIL_PASSWORD}"`,
@@ -108,7 +105,7 @@ const createHelmCommand = ({ isDryRun }) => {
`--set sync.replicaCount=${syncReplicaCount}`,
`--set-string sync.image.tag="${imageTag}"`,
...serviceAnnotations,
`--timeout 10m`,
`--version "0.0.0-${buildType}.${GIT_SHORT_HASH}" --timeout 10m`,
flag,
].join(' ');
return deployCommand;

View File

@@ -1,22 +0,0 @@
name: 'Download core artifacts'
description: 'Download core artifacts and extract to dist'
inputs:
path:
description: 'Path to extract'
required: true
runs:
using: 'composite'
steps:
- name: Download tar.gz
uses: actions/download-artifact@v4
with:
name: core
path: .
- name: Extract core artifacts
shell: bash
run: |
mkdir -p ${{ inputs.path }}
tar -xvf dist.tar.gz --directory ${{ inputs.path }}
rm dist.tar.gz

View File

@@ -21,6 +21,14 @@ inputs:
description: 'set nmMode to hardlinks-local in .yarnrc.yml'
required: false
default: 'true'
build-infra:
description: 'Build infra'
required: false
default: 'true'
build-plugins:
description: 'Build plugins'
required: false
default: 'true'
nmHoistingLimits:
description: 'Set nmHoistingLimits in .yarnrc.yml'
required: false
@@ -28,19 +36,17 @@ inputs:
description: 'Set enableScripts in .yarnrc.yml'
required: false
default: 'true'
full-cache:
description: 'Full installation cache'
required: false
runs:
using: 'composite'
steps:
- name: Setup Node.js
uses: actions/setup-node@v4
uses: actions/setup-node@v3
with:
node-version-file: '.nvmrc'
registry-url: https://npm.pkg.github.com
scope: '@toeverything'
cache: 'yarn'
- name: Set nmMode
if: ${{ inputs.hard-link-nm == 'false' }}
@@ -57,48 +63,6 @@ runs:
shell: bash
run: yarn config set enableScripts false
- name: Set yarn global cache path
shell: bash
id: yarn-cache
run: node -e "const p = $(yarn config cacheFolder --json).effective; console.log('yarn_global_cache=' + p)" >> $GITHUB_OUTPUT
- name: Cache non-full yarn cache on Linux
uses: actions/cache@v3
if: ${{ inputs.full-cache != 'true' && runner.os == 'Linux' }}
with:
path: |
node_modules
${{ steps.yarn-cache.outputs.yarn_global_cache }}
key: node_modules-cache-${{ github.job }}-${{ runner.os }}
# The network performance on macOS is very poor
# and the decompression performance on Windows is very terrible
# so we reduce the number of cached files on non-Linux systems by remove node_modules from cache path.
- name: Cache non-full yarn cache on non-Linux
uses: actions/cache@v3
if: ${{ inputs.full-cache != 'true' && runner.os != 'Linux' }}
with:
path: |
${{ steps.yarn-cache.outputs.yarn_global_cache }}
key: node_modules-cache-${{ github.job }}-${{ runner.os }}
- name: Cache full yarn cache on Linux
uses: actions/cache@v3
if: ${{ inputs.full-cache == 'true' && runner.os == 'Linux' }}
with:
path: |
node_modules
${{ steps.yarn-cache.outputs.yarn_global_cache }}
key: node_modules-cache-full-${{ runner.os }}
- name: Cache full yarn cache on non-Linux
uses: actions/cache@v3
if: ${{ inputs.full-cache == 'true' && runner.os != 'Linux' }}
with:
path: |
${{ steps.yarn-cache.outputs.yarn_global_cache }}
key: node_modules-cache-full-${{ runner.os }}
- name: yarn install
if: ${{ inputs.package-install == 'true' }}
continue-on-error: true
@@ -138,8 +102,8 @@ runs:
id: playwright-cache
if: ${{ inputs.playwright-install == 'true' }}
with:
path: ${{ github.workspace }}/node_modules/.cache/ms-playwright
key: '${{ runner.os }}-playwright-${{ steps.playwright-version.outputs.version }}'
path: '~/.cache/ms-playwright'
key: '${{ runner.os }}-${{ runner.arch }}-playwright-${{ steps.playwright-version.outputs.version }}'
# As a fallback, if the Playwright version has changed, try use the
# most recently cached version. There's a good chance that at least one
# of the browser binary versions haven't been updated, so Playwright can
@@ -149,7 +113,7 @@ runs:
# date cache, but still let Playwright decide if it needs to download
# new binaries or not.
restore-keys: |
${{ runner.os }}-playwright-
${{ runner.os }}-${{ runner.arch }}-playwright-
# If the Playwright browser binaries weren't able to be restored, we tell
# playwright to install everything for us.
@@ -157,8 +121,6 @@ runs:
shell: bash
if: inputs.playwright-install == 'true'
run: yarn playwright install --with-deps chromium
env:
PLAYWRIGHT_BROWSERS_PATH: ${{ github.workspace }}/node_modules/.cache/ms-playwright
- name: Get installed Electron version
id: electron-version
@@ -172,13 +134,23 @@ runs:
if: ${{ inputs.electron-install == 'true' }}
with:
path: 'node_modules/.cache/electron'
key: '${{ runner.os }}-electron-${{ steps.electron-version.outputs.version }}'
key: '${{ runner.os }}-{{ runner.arch }}-electron-${{ steps.electron-version.outputs.version }}'
restore-keys: |
${{ runner.os }}-electron-
${{ runner.os }}-{{ runner.arch }}-electron-
- name: Install Electron binary
shell: bash
if: inputs.electron-install == 'true'
run: node ./node_modules/electron/install.js
env:
electron_config_cache: ./node_modules/.cache/electron
ELECTRON_OVERRIDE_DIST_PATH: ./node_modules/.cache/electron
- name: Build Infra
shell: bash
if: inputs.build-infra == 'true'
run: yarn run build:infra
- name: Build Plugins
if: inputs.build-plugins == 'true'
shell: bash
run: yarn run build:plugins

View File

@@ -1,24 +0,0 @@
name: Setup Version
description: 'Setup Version'
outputs:
APP_VERSION:
description: 'App Version'
value: ${{ steps.version.outputs.APP_VERSION }}
runs:
using: 'composite'
steps:
- name: 'Write Version'
id: version
shell: bash
run: |
if [ "${{ github.ref_type }}" == "tag" ]; then
APP_VERSION=$(echo "${{ github.ref_name }}" | sed 's/^v//')
else
PACKAGE_VERSION=$(node -p "require('./package.json').version")
TIME_VERSION=$(date +%Y%m%d%H%M)
GIT_SHORT_HASH=$(git rev-parse --short HEAD)
APP_VERSION=$PACKAGE_VERSION-nightly-$TIME_VERSION-$GIT_SHORT_HASH
fi
echo $APP_VERSION
echo "APP_VERSION=$APP_VERSION" >> "$GITHUB_OUTPUT"
./scripts/set-version.sh $APP_VERSION

31
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,31 @@
version: 2
updates:
- package-ecosystem: 'npm'
directory: '/'
groups:
all-npm-dependencies:
patterns:
- '*'
schedule:
interval: 'weekly'
versioning-strategy: increase
commit-message:
prefix: 'chore'
- package-ecosystem: 'cargo'
directory: '/'
schedule:
interval: 'weekly'
versioning-strategy: auto
commit-message:
prefix: 'chore'
groups:
all-cargo-dependencies:
patterns:
- '*'
- package-ecosystem: 'github-actions'
directory: '/'
schedule:
interval: 'daily'
commit-message:
prefix: 'ci'

View File

@@ -1,4 +1,4 @@
FROM openresty/openresty:1.21.4.3-0-buster
FROM openresty/openresty:1.21.4.1-0-buster
WORKDIR /app
COPY ./packages/frontend/core/dist ./dist
COPY ./.github/deployment/front/nginx.conf /usr/local/openresty/nginx/conf/nginx.conf

1
.github/helm/affine-cloud/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
charts/

View File

@@ -20,4 +20,4 @@
.project
.idea/
*.tmproj
.vscode/
.vscode/

6
.github/helm/affine-cloud/Chart.lock vendored Normal file
View File

@@ -0,0 +1,6 @@
dependencies:
- name: postgresql
repository: https://charts.bitnami.com/bitnami
version: 12.5.8
digest: sha256:c91c0dc1370e879538dc9d6e435e731a726ef99d6a3b081372318483792b48a7
generated: "2023-06-27T18:34:12.683806+08:00"

12
.github/helm/affine-cloud/Chart.yaml vendored Normal file
View File

@@ -0,0 +1,12 @@
apiVersion: v2
name: affine-cloud
description: A Helm chart for AFFiNE Cloud
type: application
version: 0.6.1
appVersion: '0.6.1'
dependencies:
- name: postgresql
version: 12.5.8
repository: https://charts.bitnami.com/bitnami

30
.github/helm/affine-cloud/readme.md vendored Normal file
View File

@@ -0,0 +1,30 @@
# Helm Chart Configuration
The following table lists the configurable parameters of this Helm chart and their default values.
## AFFiNE Cloud Server parameters
| Parameter | Description | Default |
| ------------------------------ | -------------------------------------------------- | ------------------ |
| `affineCloud.tag` | The Docker tag of the AffineCloud image to be used | `'nightly-latest'` |
| `affineCloud.resources.cpu` | The CPU resources allocated for AffineCloud | `'250m'` |
| `affineCloud.resources.memory` | The memory resources allocated for AffineCloud | `'0.5Gi'` |
| `affineCloud.signKey` | The key used to sign the JWT tokens | `'c2VjcmV0'` |
| `affineCloud.service.type` | The type of the Kubernetes service | `'ClusterIP'` |
| `affineCloud.service.port` | The port of the Kubernetes service | `'http'` |
| `affineCloud.mail.account` | The email account used to send emails | `''` |
| `affineCloud.mail.password` | The password of the email account | `''` |
## PostgreSQL parameters
| Parameter | Description | Default |
| -------------------------------------------- | ------------------------------------------------------------------------------------- | ------------ |
| `postgresql.auth.username` | Username for the PostgreSQL database | `'affine'` |
| `postgresql.auth.password` | Password for the PostgreSQL database. Please change this for production environments. | `'password'` |
| `postgresql.auth.database` | The name of the default database that will be created on image startup | `'affine'` |
| `postgresql.primary.resources.limits.cpu` | The CPU resources allocated for the PostgreSQL primary node | `'500m'` |
| `postgresql.primary.resources.limits.memory` | The memory resources allocated for the PostgreSQL primary node | `'0.5Gi'` |
For more postgres parameters, please refer to: https://artifacthub.io/packages/helm/bitnami/postgresql
Please note that for the `postgresql.auth.password`, you should provide your own password for production environments. The default value is provided only for demonstration purposes.

View File

@@ -1,7 +1,7 @@
{{/*
Expand the name of the chart.
*/}}
{{- define "gcloud-sql-proxy.name" -}}
{{- define "affine-cloud.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
{{- end }}
@@ -10,7 +10,7 @@ Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
If release name contains chart name it will be used as a full name.
*/}}
{{- define "gcloud-sql-proxy.fullname" -}}
{{- define "affine-cloud.fullname" -}}
{{- if .Values.fullnameOverride }}
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
{{- else }}
@@ -26,16 +26,16 @@ If release name contains chart name it will be used as a full name.
{{/*
Create chart name and version as used by the chart label.
*/}}
{{- define "gcloud-sql-proxy.chart" -}}
{{- define "affine-cloud.chart" -}}
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Common labels
*/}}
{{- define "gcloud-sql-proxy.labels" -}}
helm.sh/chart: {{ include "gcloud-sql-proxy.chart" . }}
{{ include "gcloud-sql-proxy.selectorLabels" . }}
{{- define "affine-cloud.labels" -}}
helm.sh/chart: {{ include "affine-cloud.chart" . }}
{{ include "affine-cloud.selectorLabels" . }}
{{- if .Chart.AppVersion }}
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
{{- end }}
@@ -45,18 +45,7 @@ app.kubernetes.io/managed-by: {{ .Release.Service }}
{{/*
Selector labels
*/}}
{{- define "gcloud-sql-proxy.selectorLabels" -}}
app.kubernetes.io/name: {{ include "gcloud-sql-proxy.name" . }}
{{- define "affine-cloud.selectorLabels" -}}
app.kubernetes.io/name: {{ include "affine-cloud.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}
{{/*
Create the name of the service account to use
*/}}
{{- define "gcloud-sql-proxy.serviceAccountName" -}}
{{- if .Values.serviceAccount.create }}
{{- default (include "gcloud-sql-proxy.fullname" .) .Values.serviceAccount.name }}
{{- else }}
{{- default "default" .Values.serviceAccount.name }}
{{- end }}
{{- end }}

View File

@@ -0,0 +1,51 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: "{{ include "affine-cloud.fullname" . }}"
labels:
{{- include "affine-cloud.labels" . | nindent 4 }}
spec:
replicas: 1
selector:
matchLabels:
{{- include "affine-cloud.selectorLabels" . | nindent 6 }}
strategy:
type: RollingUpdate
rollingUpdate:
maxUnavailable: 2
template:
metadata:
labels:
{{- include "affine-cloud.selectorLabels" . | nindent 8 }}
spec:
restartPolicy: Always
containers:
- name: affine-cloud
image: "ghcr.io/toeverything/cloud-self-hosted:{{ .Values.affineCloud.tag | default .Chart.AppVersion }}"
env:
- name: PG_USER
value: "{{ .Values.postgresql.auth.username }}"
- name: PG_PASS
value: "{{ .Values.postgresql.auth.password }}"
- name: PG_DATABASE
value: "{{ .Values.postgresql.auth.database }}"
- name: PG_HOST
value: "{{ .Values.postgresql.fullnameOverride | default (printf "%s-postgresql" .Release.Name) }}"
- name: DATABASE_URL
value: "{{ .Values.affineCloud.databaseUrl | default "postgresql://$(PG_USER):$(PG_PASS)@$(PG_HOST)/$(PG_DATABASE)" }}"
envFrom:
- secretRef:
name: affine-cloud-secret
ports:
- containerPort: 3000
livenessProbe:
httpGet:
path: /api/healthz
port: 3000
failureThreshold: 1
initialDelaySeconds: 10
periodSeconds: 10
resources:
limits:
cpu: "{{ .Values.affineCloud.resources.cpu }}"
memory: "{{ .Values.affineCloud.resources.memory }}"

View File

@@ -0,0 +1,9 @@
apiVersion: v1
kind: Secret
metadata:
name: affine-cloud-secret
type: Opaque
data:
SIGN_KEY: "{{ .Values.affineCloud.signKey }}"
MAIL_ACCOUNT: "{{ .Values.affineCloud.mail.account }}"
MAIL_PASSWORD: "{{ .Values.affineCloud.mail.password }}"

View File

@@ -0,0 +1,15 @@
apiVersion: v1
kind: Service
metadata:
name: "{{ include "affine-cloud.fullname" . }}"
labels:
{{- include "affine-cloud.labels" . | nindent 4 }}
spec:
type: "{{ .Values.affineCloud.service.type }}"
ports:
- name: http
protocol: TCP
port: {{ .Values.affineCloud.service.port }}
targetPort: 3000
selector:
{{- include "affine-cloud.selectorLabels" . | nindent 4 }}

30
.github/helm/affine-cloud/values.yaml vendored Normal file
View File

@@ -0,0 +1,30 @@
affineCloud:
tag: 'canary-5e0d5e0cc65ea46f326fdde12658bfac59b38c9f-0949'
# databaseUrl: 'postgresql://affine:password@affine-cloud-postgresql:5432/affine'
signKey: TUFtdFdzQTJhdGJuem01TA==
mail:
account: ''
password: ''
service:
type: ClusterIP
port: 80
resources:
cpu: '250m'
memory: 0.5Gi
postgresql:
fullnameOverride: tcp-postgresql
auth:
# only for demo, please modify it at prod env
username: affine
password: password
database: affine
primary:
initdb:
scripts:
01-init.sql: |
CREATE DATABASE affine_binary;
GRANT ALL PRIVILEGES ON DATABASE affine_binary TO affine;
resources:
limits:
cpu: '500m'
memory: 0.5Gi

View File

@@ -3,4 +3,4 @@ name: affine
description: AFFiNE cloud chart
type: application
version: 0.0.0
appVersion: "0.11.0"
appVersion: '0.7.0-canary.18'

View File

@@ -1,6 +0,0 @@
apiVersion: v2
name: cloud-sql-proxy
description: Google Cloud SQL Proxy
type: application
version: 0.0.0
appVersion: "2.8.1"

View File

@@ -1,18 +0,0 @@
{{- if .Values.global.database.gcloud.enabled -}}
1. Get the application URL by running these commands:
{{- if contains "NodePort" .Values.service.type }}
export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "gcloud-sql-proxy.fullname" . }})
export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}")
echo http://$NODE_IP:$NODE_PORT
{{- else if contains "LoadBalancer" .Values.service.type }}
NOTE: It may take a few minutes for the LoadBalancer IP to be available.
You can watch the status of by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "gcloud-sql-proxy.fullname" . }}'
export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "gcloud-sql-proxy.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}")
echo http://$SERVICE_IP:{{ .Values.service.port }}
{{- else if contains "ClusterIP" .Values.service.type }}
export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "gcloud-sql-proxy.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}")
echo "Visit http://127.0.0.1:8080 to use your application"
kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT
{{- end }}
{{- end }}

View File

@@ -1,132 +0,0 @@
{{- if .Values.global.database.gcloud.enabled -}}
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "gcloud-sql-proxy.fullname" . }}
labels:
{{- include "gcloud-sql-proxy.labels" . | nindent 4 }}
spec:
replicas: {{ .Values.replicaCount }}
selector:
matchLabels:
{{- include "gcloud-sql-proxy.selectorLabels" . | nindent 6 }}
template:
metadata:
{{- with .Values.podAnnotations }}
annotations:
{{- toYaml . | nindent 8 }}
{{- end }}
labels:
{{- include "gcloud-sql-proxy.labels" . | nindent 8 }}
{{- with .Values.podLabels }}
{{- toYaml . | nindent 8 }}
{{- end }}
spec:
{{- with .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml . | nindent 8 }}
{{- end }}
serviceAccountName: {{ include "gcloud-sql-proxy.serviceAccountName" . }}
securityContext:
{{- toYaml .Values.podSecurityContext | nindent 8 }}
containers:
- name: {{ .Chart.Name }}
securityContext:
{{- toYaml .Values.securityContext | nindent 12 }}
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: File
image: "{{ .Values.image.repository }}:{{ .Chart.AppVersion }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
args:
- "--address"
- "0.0.0.0"
- "--structured-logs"
- "--auto-iam-authn"
- "{{ .Values.global.database.gcloud.connectionName }}"
env:
# Enable HTTP healthchecks on port 9801. This enables /liveness,
# /readiness and /startup health check endpoints. Allow connections
# listen for connections on any interface (0.0.0.0) so that the
# k8s management components can reach these endpoints.
- name: CSQL_PROXY_HEALTH_CHECK
value: "true"
- name: CSQL_PROXY_HTTP_PORT
value: "9801"
- name: CSQL_PROXY_HTTP_ADDRESS
value: 0.0.0.0
ports:
- name: cloud-sql-proxy
containerPort: {{ .Values.global.database.gcloud.proxyPort }}
protocol: TCP
- containerPort: 9801
protocol: TCP
# The /startup probe returns OK when the proxy is ready to receive
# connections from the application. In this example, k8s will check
# once a second for 60 seconds.
startupProbe:
failureThreshold: 60
httpGet:
path: /startup
port: 9801
scheme: HTTP
periodSeconds: 1
successThreshold: 1
timeoutSeconds: 10
# The /liveness probe returns OK as soon as the proxy application has
# begun its startup process and continues to return OK until the
# process stops.
livenessProbe:
failureThreshold: 3
httpGet:
path: /liveness
port: 9801
scheme: HTTP
# The probe will be checked every 10 seconds.
periodSeconds: 10
# Number of times the probe is allowed to fail before the transition
# from healthy to failure state.
#
# If periodSeconds = 60, 5 tries will result in five minutes of
# checks. The proxy starts to refresh a certificate five minutes
# before its expiration. If those five minutes lapse without a
# successful refresh, the liveness probe will fail and the pod will be
# restarted.
successThreshold: 1
# The probe will fail if it does not respond in 10 seconds
timeoutSeconds: 10
readinessProbe:
# The /readiness probe returns OK when the proxy can establish
# a new connections to its databases.
httpGet:
path: /readiness
port: 9801
initialDelaySeconds: 10
periodSeconds: 10
timeoutSeconds: 10
# Number of times the probe must report success to transition from failure to healthy state.
# Defaults to 1 for readiness probe.
successThreshold: 1
failureThreshold: 6
resources:
{{- toYaml .Values.resources | nindent 12 }}
{{- with .Values.volumeMounts }}
volumeMounts:
{{- toYaml . | nindent 12 }}
{{- end }}
{{- with .Values.volumes }}
volumes:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.tolerations }}
tolerations:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- end }}

View File

@@ -1,17 +0,0 @@
{{- if .Values.global.database.gcloud.enabled -}}
apiVersion: v1
kind: Service
metadata:
name: {{ include "gcloud-sql-proxy.fullname" . }}
labels:
{{- include "gcloud-sql-proxy.labels" . | nindent 4 }}
spec:
type: {{ .Values.service.type }}
ports:
- port: {{ .Values.global.database.port }}
targetPort: cloud-sql-proxy
protocol: TCP
name: cloud-sql-proxy
selector:
{{- include "gcloud-sql-proxy.selectorLabels" . | nindent 4 }}
{{- end }}

View File

@@ -1,15 +0,0 @@
{{- if .Values.global.database.gcloud.enabled -}}
{{- if .Values.serviceAccount.create -}}
apiVersion: v1
kind: ServiceAccount
metadata:
name: {{ include "gcloud-sql-proxy.serviceAccountName" . }}
labels:
{{- include "gcloud-sql-proxy.labels" . | nindent 4 }}
{{- with .Values.serviceAccount.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
automountServiceAccountToken: {{ .Values.serviceAccount.automount }}
{{- end }}
{{- end }}

View File

@@ -1,17 +0,0 @@
{{- if .Values.global.database.gcloud.enabled -}}
apiVersion: v1
kind: Pod
metadata:
name: "{{ include "gcloud-sql-proxy.fullname" . }}-test-connection"
labels:
{{- include "gcloud-sql-proxy.labels" . | nindent 4 }}
annotations:
"helm.sh/hook": test
spec:
containers:
- name: wget
image: busybox
command: ['wget']
args: ['{{ include "gcloud-sql-proxy.fullname" . }}:{{ .Values.service.port }}']
restartPolicy: Never
{{- end }}

View File

@@ -1,40 +0,0 @@
replicaCount: 3
image:
# the tag is defined as chart appVersion.
repository: gcr.io/cloud-sql-connectors/cloud-sql-proxy
pullPolicy: IfNotPresent
imagePullSecrets: []
nameOverride: ""
fullnameOverride: ""
serviceAccount:
create: true
automount: true
annotations: {}
name: ""
podAnnotations: {}
podLabels: {}
podSecurityContext:
fsGroup: 2000
securityContext:
runAsNonRoot: true
service:
type: ClusterIP
port: 5432
resources:
limits:
memory: "4Gi"
cpu: "2"
volumes: []
volumeMounts: []
nodeSelector: {}
tolerations: []
affinity: {}

View File

@@ -3,9 +3,4 @@ name: graphql
description: AFFiNE GraphQL server
type: application
version: 0.0.0
appVersion: "0.11.0"
dependencies:
- name: gcloud-sql-proxy
version: 0.0.0
repository: "file://../gcloud-sql-proxy"
condition: .global.database.gcloud.enabled
appVersion: '0.7.0-canary.18'

View File

@@ -136,6 +136,11 @@ spec:
secretKeyRef:
name: "{{ .Values.app.objectStorage.r2.secretName }}"
key: secretAccessKey
- name: R2_OBJECT_STORAGE_BUCKET
valueFrom:
secretKeyRef:
name: "{{ .Values.app.objectStorage.r2.secretName }}"
key: bucket
{{ end }}
{{ if .Values.app.captcha.enabled }}
- name: CAPTCHA_TURNSTILE_SECRET
@@ -184,6 +189,20 @@ spec:
initialDelaySeconds: {{ .Values.probe.initialDelaySeconds }}
resources:
{{- toYaml .Values.resources | nindent 12 }}
{{ if .Values.global.database.gcloud.enabled }}
- name: cloud-sql-proxy
image: gcr.io/cloud-sql-connectors/cloud-sql-proxy:2.6.0
args:
- "--structured-logs"
- "--auto-iam-authn"
- "{{ .Values.global.database.gcloud.connectionName }}"
securityContext:
runAsNonRoot: true
resources:
requests:
memory: "2Gi"
cpu: "1"
{{ end }}
{{- with .Values.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}

View File

@@ -35,23 +35,6 @@ spec:
- name: DATABASE_URL
value: postgres://{{ .Values.global.database.user }}:$(DATABASE_PASSWORD)@{{ .Values.global.database.gcloud.cloudSqlInternal }}:{{ .Values.global.database.port }}/{{ .Values.global.database.name }}
{{ end }}
{{ if .Values.app.objectStorage.r2.enabled }}
- name: R2_OBJECT_STORAGE_ACCOUNT_ID
valueFrom:
secretKeyRef:
name: "{{ .Values.app.objectStorage.r2.secretName }}"
key: accountId
- name: R2_OBJECT_STORAGE_ACCESS_KEY_ID
valueFrom:
secretKeyRef:
name: "{{ .Values.app.objectStorage.r2.secretName }}"
key: accessKeyId
- name: R2_OBJECT_STORAGE_SECRET_ACCESS_KEY
valueFrom:
secretKeyRef:
name: "{{ .Values.app.objectStorage.r2.secretName }}"
key: secretAccessKey
{{ end }}
resources:
requests:
cpu: '100m'

View File

@@ -8,4 +8,5 @@ data:
accountId: {{ .Values.app.objectStorage.r2.accountId | b64enc }}
accessKeyId: {{ .Values.app.objectStorage.r2.accessKeyId | b64enc }}
secretAccessKey: {{ .Values.app.objectStorage.r2.secretAccessKey | b64enc }}
bucket: {{ .Values.app.objectStorage.r2.bucket | b64enc }}
{{- end }}

View File

@@ -34,6 +34,7 @@ app:
accountId: ''
accessKeyId: ''
secretAccessKey: ''
bucket: ''
oauth:
email:
secretName: 'oauth-email'

View File

@@ -1,11 +1,6 @@
apiVersion: v2
name: sync
description: AFFiNE Sync Server
description: A Helm chart for Kubernetes
type: application
version: 0.0.0
appVersion: "0.11.0"
dependencies:
- name: gcloud-sql-proxy
version: 0.0.0
repository: "file://../gcloud-sql-proxy"
condition: .global.database.gcloud.enabled
appVersion: "0.7.0-canary.18"

View File

@@ -82,6 +82,20 @@ spec:
initialDelaySeconds: {{ .Values.probe.initialDelaySeconds }}
resources:
{{- toYaml .Values.resources | nindent 12 }}
{{ if .Values.global.database.gcloud.enabled }}
- name: cloud-sql-proxy
image: gcr.io/cloud-sql-connectors/cloud-sql-proxy:2.6.0
args:
- "--structured-logs"
- "--auto-iam-authn"
- "{{ .Values.global.database.gcloud.connectionName }}"
securityContext:
runAsNonRoot: true
resources:
requests:
memory: "2Gi"
cpu: "1"
{{ end }}
{{- with .Values.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}

View File

@@ -16,8 +16,6 @@ global:
cloudSqlInternal: ''
connectionName: ''
serviceAccount: ''
cloudProxyReplicas: 3
proxyPort: '5432'
redis:
enabled: true
host: 'redis-master'

60
.github/helm/deployment_guide.md vendored Normal file
View File

@@ -0,0 +1,60 @@
# Cluster Deployment Guide
This document provides a step-by-step guide for developers on how to deploy services in a Kubernetes cluster. The following content assumes that the reader already has a basic understanding of Kubernetes concepts and operations.
### 1. Configure Service Mesh (Optional)
In the Kubernetes cluster, we optionally use Service Mesh (like Istio and Anthos Service Mesh) to manage the network interactions of microservices. If Service Mesh is already deployed on your cluster or do not need to use the service network, you can skip this step. In this step, we assume that you are using Google Kubernetes Engine (GKE) and have already installed Anthos Service Mesh on your cluster, if you wish to use another Ingress Controller, please refer to the relevant documentation.
To configure your kubectl context to interact with your Kubernetes cluster using the gcloud tool, you need to execute the following commands:
```sh
export CLUSTER_NAME=your_cluster_name
export REGION=your_cluster_region
export PROJECT=your_project_id
gcloud container clusters get-credentials $CLUSTER_NAME --region $REGION --project $PROJECT
```
In this command, you should replace `CLUSTER_NAME`, `REGION` and `PROJECT` with the actual name, region and project id of your Kubernetes cluster. This command retrieves the access credentials for your Kubernetes cluster and automatically configures kubectl to use these credentials.
Now, to inject Service Mesh for a specific Namespace, first, set the environment variable `NAMESPACE` that should correspond to your target Kubernetes Namespace. In this example, we use `prod` as the target Namespace:
```sh
export NAMESPACE=prod
```
Then, we label the Namespace which will enable Istio to automatically inject the sidecar container for all new Pods under this Namespace:
```sh
kubectl label namespace $NAMESPACE istio-injection- istio.io/rev=asm-managed --overwrite
```
Finally, we trigger the Kubernetes Deployment restart mechanism to allow existing Pods to also obtain sidecar container injection:
```sh
kubectl rollout restart deployment -n $NAMESPACE
```
### 2. Deploying the Application
Next, we will deploy our application in the Kubernetes cluster through Helm. First, set relevant environment variables:
```sh
export NAMESPACE=prod
export RELEASE=affine-cloud-prod
export PATH=.github/helm/affine-cloud
```
- `NAMESPACE` should be consistent with the first step, indicating your target Kubernetes Namespace.
- `RELEASE` is the name of your Helm release.
- `PATH` is the location of your Helm chart in your file system.
Finally, use the `helm upgrade --install` command to deploy or upgrade your application:
```sh
helm upgrade --namespace $NAMESPACE --create-namespace --install $RELEASE $PATH
```
This command creates (if it doesn't already exist) and deploys your Helm chart in the specified Namespace. If the release already exists, it will be upgraded.
The above are the complete steps for deploying an application in a Kubernetes cluster. Make sure all prerequisites are met before deploying, and also ensure that you have the correct permissions for operations in Kubernetes.

114
.github/labeler.yml vendored
View File

@@ -1,100 +1,62 @@
docs:
- changed-files:
- any-glob-to-any-file:
- 'docs/**/*'
- '**/README.md'
- 'packages/frontend/templates/**/*'
- 'docs/**/*'
- '**/README.md'
- 'packages/frontend/templates/**/*'
test:
- changed-files:
- any-glob-to-any-file:
- 'tests/**/*'
- '**/tests/**/*'
- '**/__tests__/**/*'
- 'tests/**/*'
- '**/tests/**/*'
- '**/__tests__/**/*'
mod:dev:
- changed-files:
- any-glob-to-any-file:
- 'scripts/**/*'
- 'tools/cli/**/*'
- 'packages/common/debug/**/*'
- 'scripts/**/*'
- 'tools/cli/**/*'
- 'packages/common/debug/**/*'
mod:plugin:
- 'packages/plugins/**/*'
plugin:copilot:
- 'packages/plugins/copilot/**/*'
mod:infra:
- changed-files:
- any-glob-to-any-file:
- 'packages/common/infra/**/*'
- 'packages/common/infra/**/*'
mod:sdk:
- 'packages/common/sdk/**/*'
mod:plugin-cli:
- changed-files:
- any-glob-to-any-file:
- 'tools/plugin-cli/**/*'
- 'tools/plugin-cli/**/*'
mod:workspace:
- changed-files:
- any-glob-to-any-file:
- 'packages/common/workspace/**/*'
mod:workspace: 'packages/frontend/workspace/**/*'
mod:workspace-impl:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/workspace-impl/**/*'
mod:i18n: 'packages/frontend/i18n/**/*'
mod:i18n:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/i18n/**/*'
mod:env: 'packages/common/env/**/*'
mod:env:
- changed-files:
- any-glob-to-any-file:
- 'packages/common/env/**/*'
mod:hooks: 'packages/frontend/hooks/**/*'
mod:component:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/component/**/*'
mod:component: 'packages/frontend/component/**/*'
mod:storage:
- changed-files:
- any-glob-to-any-file:
- 'packages/backend/storage/**/*'
mod:storage: 'packages/backend/storage/**/*'
mod:native:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/native/**/*'
mod:native: 'packages/frontend/native/**/*'
mod:store:
- changed-files:
- any-glob-to-any-file:
- '**/atoms/**/*'
- '**/atoms/**/*'
rust:
- changed-files:
- any-glob-to-any-file:
- '**/*.rs'
- '**/Cargo.toml'
- '**/Cargo.lock'
- '**/rust-toolchain'
- '**/rust-toolchain.toml'
- '**/rustfmt.toml'
- '**/*.rs'
- '**/Cargo.toml'
- '**/Cargo.lock'
- '**/rust-toolchain'
- '**/rust-toolchain.toml'
- '**/rustfmt.toml'
package:y-indexeddb:
- changed-files:
- any-glob-to-any-file:
- 'packages/common/y-indexeddb/**/*'
package:y-indexeddb: 'packages/common/y-indexeddb/**/*'
app:core:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/core/**/*'
app:core: 'packages/frontend/core/**/*'
app:electron:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/electron/**/*'
app:electron: 'packages/frontend/electron/**/*'
app:server:
- changed-files:
- any-glob-to-any-file:
- 'packages/backend/server/**/*'
app:server: 'packages/backend/server/**/*'

77
.github/renovate.json vendored
View File

@@ -1,77 +0,0 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": ["config:recommended", ":disablePeerDependencies"],
"labels": ["dependencies"],
"ignorePaths": [
"**/node_modules/**",
"**/bower_components/**",
"**/vendor/**",
"**/examples/**",
"**/__tests__/**",
"**/test/**",
"**/__fixtures__/**"
],
"packageRules": [
{
"matchPackageNames": ["napi", "napi-build", "napi-derive"],
"rangeStrategy": "replace",
"groupName": "napi-rs"
},
{
"matchPackagePatterns": ["^eslint", "^@typescript-eslint"],
"rangeStrategy": "replace",
"groupName": "linter"
},
{
"matchPackagePatterns": ["^@nestjs"],
"rangeStrategy": "replace",
"groupName": "nestjs"
},
{
"matchPackagePatterns": ["^@opentelemetry"],
"rangeStrategy": "replace",
"groupName": "opentelemetry"
},
{
"matchPackageNames": [
"@prisma/client",
"@prisma/instrumentation",
"prisma"
],
"rangeStrategy": "replace",
"groupName": "prisma"
},
{
"matchPackagePatterns": ["^@electron-forge"],
"rangeStrategy": "replace",
"groupName": "electron-forge"
},
{
"groupName": "blocksuite-nightly",
"matchPackagePatterns": ["^@blocksuite"],
"excludePackageNames": ["@blocksuite/icons"],
"rangeStrategy": "replace",
"followTag": "nightly"
},
{
"groupName": "all non-major dependencies",
"groupSlug": "all-minor-patch",
"matchPackagePatterns": ["*"],
"excludePackagePatterns": ["^@blocksuite/"],
"matchUpdateTypes": ["minor", "patch"]
},
{
"matchPackagePatterns": ["*"],
"rangeStrategy": "replace",
"excludePackagePatterns": ["^@blocksuite/"]
}
],
"commitMessagePrefix": "chore: ",
"commitMessageAction": "bump up",
"commitMessageTopic": "{{depName}} version",
"ignoreDeps": [],
"lockFileMaintenance": {
"enabled": true,
"extends": ["schedule:weekly"]
}
}

View File

@@ -9,5 +9,4 @@ jobs:
pull-requests: write
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/labeler@v5
- uses: actions/labeler@v4

190
.github/workflows/build-desktop.yml vendored Normal file
View File

@@ -0,0 +1,190 @@
name: Build(Desktop) & Test
on:
push:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/build-desktop.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
pull_request:
merge_group:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/build-desktop.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
env:
DEBUG: napi:*
BUILD_TYPE: canary
APP_NAME: affine
COVERAGE: true
DISTRIBUTION: desktop
MACOSX_DEPLOYMENT_TARGET: '10.13'
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
build-core:
name: Build @affine/core
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- name: Build Core
run: yarn nx build @affine/core
- name: Upload core artifact
uses: actions/upload-artifact@v3
with:
name: core
path: ./packages/frontend/core/dist
if-no-files-found: error
build-native:
name: Build Native
runs-on: ubuntu-latest
needs: build-core
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build AFFiNE native
uses: ./.github/actions/build-rust
with:
target: x86_64-unknown-linux-gnu
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Run tests
run: yarn test
working-directory: ./packages/frontend/native
desktop-test:
name: Desktop Test
runs-on: ${{ matrix.spec.os }}
strategy:
fail-fast: false
# all combinations: macos-latest x64, macos-latest arm64, windows-latest x64, ubuntu-latest x64
matrix:
spec:
- {
os: macos-latest,
platform: macos,
arch: x64,
target: x86_64-apple-darwin,
test: true,
}
- {
os: macos-latest,
platform: macos,
arch: arm64,
target: aarch64-apple-darwin,
test: false,
}
- {
os: ubuntu-latest,
platform: linux,
arch: x64,
target: x86_64-unknown-linux-gnu,
test: true,
}
- {
os: windows-latest,
platform: windows,
arch: x64,
target: x86_64-pc-windows-msvc,
test: true,
}
needs: build-core
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
timeout-minutes: 10
with:
extra-flags: workspaces focus @affine/electron @affine/monorepo @affine-test/affine-desktop
playwright-install: true
hard-link-nm: false
enableScripts: false
- name: Build AFFiNE native
uses: ./.github/actions/build-rust
with:
target: ${{ matrix.spec.target }}
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Run unit tests
if: ${{ matrix.spec.test }}
shell: bash
run: yarn vitest
working-directory: packages/frontend/electron
- name: Download core artifact
uses: actions/download-artifact@v3
with:
name: core
path: packages/frontend/electron/resources/web-static
- name: Build Desktop Layers
run: yarn workspace @affine/electron build
- name: Run desktop tests
if: ${{ matrix.spec.test && matrix.spec.os == 'ubuntu-latest' }}
run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn workspace @affine-test/affine-desktop e2e
env:
COVERAGE: true
- name: Run desktop tests
if: ${{ matrix.spec.test && matrix.spec.os != 'ubuntu-latest' }}
run: yarn workspace @affine-test/affine-desktop e2e
env:
COVERAGE: true
- name: Make bundle
if: ${{ matrix.spec.os == 'macos-latest' && matrix.spec.arch == 'arm64' }}
env:
SKIP_BUNDLE: true
SKIP_WEB_BUILD: true
HOIST_NODE_MODULES: 1
run: yarn workspace @affine/electron package --platform=darwin --arch=arm64
- name: Output check
if: ${{ matrix.spec.os == 'macos-latest' && matrix.spec.arch == 'arm64' }}
run: |
yarn workspace @affine/electron ts-node ./scripts/macos-arm64-output-check.ts
- name: Collect code coverage report
if: ${{ matrix.spec.test }}
run: yarn exec nyc report -t .nyc_output --report-dir .coverage --reporter=lcov
- name: Upload e2e test coverage results
if: ${{ matrix.spec.test }}
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/lcov.info
flags: e2etest-${{ matrix.spec.os }}-${{ matrix.spec.arch }}
name: affine
fail_ci_if_error: false
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-${{ matrix.spec.os }}-${{ matrix.spec.arch }}
path: ./test-results
if-no-files-found: ignore

311
.github/workflows/build-server.yml vendored Normal file
View File

@@ -0,0 +1,311 @@
name: Build(Server) & Test
on:
push:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/build-server.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
pull_request:
merge_group:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/build-server.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
env:
DEBUG: napi:*
BUILD_TYPE: canary
APP_NAME: affine
COVERAGE: true
DISTRIBUTION: browser
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
build-storage:
name: Build Storage
runs-on: ubuntu-latest
env:
RUSTFLAGS: '-C debuginfo=1'
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
extra-flags: workspaces focus @affine/storage
electron-install: false
build-infra: false
build-plugins: false
- name: Build Rust
uses: ./.github/actions/build-rust
with:
target: 'x86_64-unknown-linux-gnu'
package: '@affine/storage'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload storage.node
uses: actions/upload-artifact@v3
with:
name: storage.node
path: ./packages/backend/storage/storage.node
if-no-files-found: error
server-test:
name: Server Test
runs-on: ubuntu-latest
needs: build-storage
services:
postgres:
image: postgres
env:
POSTGRES_PASSWORD: affine
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
mailer:
image: mailhog/mailhog
ports:
- 1025:1025
- 8025:8025
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- name: Initialize database
run: |
psql -h localhost -U postgres -c "CREATE DATABASE affine;"
psql -h localhost -U postgres -c "CREATE USER affine WITH PASSWORD 'affine';"
psql -h localhost -U postgres -c "ALTER USER affine WITH SUPERUSER;"
env:
PGPASSWORD: affine
- name: Generate prisma client
run: |
yarn workspace @affine/server exec prisma generate
yarn workspace @affine/server exec prisma db push
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Run init-db script
run: yarn workspace @affine/server exec ts-node ./scripts/init-db.ts
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Download storage.node
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./packages/backend/server
- name: Run server tests
run: yarn workspace @affine/server test:coverage
env:
CARGO_TARGET_DIR: '${{ github.workspace }}/target'
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Upload server test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./packages/backend/server/.coverage/lcov.info
flags: server-test
name: affine
fail_ci_if_error: false
server-e2e-test:
name: Server E2E Test
runs-on: ubuntu-latest
needs: build-storage
services:
postgres:
image: postgres
env:
POSTGRES_PASSWORD: affine
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
mailer:
image: mailhog/mailhog
ports:
- 1025:1025
- 8025:8025
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
- name: Initialize database
run: |
psql -h localhost -U postgres -c "CREATE DATABASE affine;"
psql -h localhost -U postgres -c "CREATE USER affine WITH PASSWORD 'affine';"
psql -h localhost -U postgres -c "ALTER USER affine WITH SUPERUSER;"
env:
PGPASSWORD: affine
- name: Generate prisma client
run: |
yarn workspace @affine/server exec prisma generate
yarn workspace @affine/server exec prisma db push
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Run init-db script
run: yarn workspace @affine/server exec ts-node ./scripts/init-db.ts
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Download storage.node
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./packages/backend/server
- name: Run playwright tests
run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn workspace @affine-test/affine-cloud e2e --forbid-only
env:
COVERAGE: true
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Collect code coverage report
run: yarn exec nyc report -t .nyc_output --report-dir .coverage --reporter=lcov
- name: Upload e2e test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/lcov.info
flags: server-e2etest
name: affine
fail_ci_if_error: false
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-server
path: ./tests/affine-cloud/test-results
if-no-files-found: ignore
server-desktop-e2e-test:
name: Server Desktop E2E Test
runs-on: ubuntu-latest
needs: build-storage
services:
postgres:
image: postgres
env:
POSTGRES_PASSWORD: affine
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
mailer:
image: mailhog/mailhog
ports:
- 1025:1025
- 8025:8025
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
hard-link-nm: false
- name: Build AFFiNE native
uses: ./.github/actions/build-rust
with:
target: x86_64-unknown-linux-gnu
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Initialize database
run: |
psql -h localhost -U postgres -c "CREATE DATABASE affine;"
psql -h localhost -U postgres -c "CREATE USER affine WITH PASSWORD 'affine';"
psql -h localhost -U postgres -c "ALTER USER affine WITH SUPERUSER;"
env:
PGPASSWORD: affine
- name: Generate prisma client
run: |
yarn workspace @affine/server exec prisma generate
yarn workspace @affine/server prisma db push
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Run init-db script
run: yarn workspace @affine/server exec ts-node ./scripts/init-db.ts
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Download storage.node
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./packages/backend/server
- name: Build Plugins
run: yarn run build:plugins
- name: Build Desktop Layers
run: yarn workspace @affine/electron build:dev
- name: Run playwright tests
run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" yarn workspace @affine-test/affine-desktop-cloud e2e
env:
COVERAGE: true
DEV_SERVER_URL: http://localhost:8080
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
ENABLE_LOCAL_EMAIL: true
- name: Collect code coverage report
run: yarn exec nyc report -t .nyc_output --report-dir .coverage --reporter=lcov
- name: Upload e2e test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/lcov.info
flags: server-e2etest
name: affine
fail_ci_if_error: false
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-server
path: ./tests/affine-cloud/test-results
if-no-files-found: ignore

View File

@@ -1,560 +0,0 @@
name: Build & Test
on:
push:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
pull_request:
env:
DEBUG: napi:*
BUILD_TYPE: canary
APP_NAME: affine
AFFINE_ENV: dev
COVERAGE: true
MACOSX_DEPLOYMENT_TARGET: '10.13'
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
PLAYWRIGHT_BROWSERS_PATH: ${{ github.workspace }}/node_modules/.cache/ms-playwright
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: ['javascript', 'typescript']
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
lint:
name: Lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Run oxlint
# oxlint is fast, so wrong code will fail quickly
run: yarn dlx $(node -e "console.log(require('./package.json').scripts['lint:ox'].replace('oxlint', 'oxlint@' + require('./package.json').devDependencies.oxlint))")
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
full-cache: true
- name: Run i18n codegen
run: yarn i18n-codegen gen
- name: Run ESLint
run: yarn lint:eslint --max-warnings=0
- name: Run Prettier
# Set nmMode in `actions/setup-node` will modify the .yarnrc.yml
run: |
git checkout .yarnrc.yml
yarn lint:prettier
- name: Run Type Check
run: yarn typecheck
check-yarn-binary:
name: Check yarn binary
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Run check
run: |
yarn set version $(node -e "console.log(require('./package.json').packageManager.split('@')[1])")
git diff --exit-code
e2e-test:
name: E2E Test
runs-on: ubuntu-latest
env:
DISTRIBUTION: browser
strategy:
fail-fast: false
matrix:
shard: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
full-cache: true
- name: Run playwright tests
run: yarn workspace @affine-test/affine-local e2e --forbid-only --shard=${{ matrix.shard }}/${{ strategy.job-total }}
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v4
with:
name: test-results-e2e-${{ matrix.shard }}
path: ./test-results
if-no-files-found: ignore
e2e-migration-test:
name: E2E Migration Test
runs-on: ubuntu-latest
env:
DISTRIBUTION: browser
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
full-cache: true
- name: Run playwright tests
run: yarn workspace @affine-test/affine-migration e2e --forbid-only
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v4
with:
name: test-results-e2e-migration
path: ./tests/affine-migration/test-results
if-no-files-found: ignore
unit-test:
name: Unit Test
runs-on: ubuntu-latest
needs:
- build-native
env:
DISTRIBUTION: browser
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
full-cache: true
- name: Download affine.linux-x64-gnu.node
uses: actions/download-artifact@v4
with:
name: affine.linux-x64-gnu.node
path: ./packages/frontend/native
- name: Unit Test
run: yarn nx test:coverage @affine/monorepo
- name: Upload unit test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/store/lcov.info
flags: unittest
name: affine
fail_ci_if_error: false
build-native:
name: Build AFFiNE native (${{ matrix.spec.target }})
runs-on: ${{ matrix.spec.os }}
env:
CARGO_PROFILE_RELEASE_DEBUG: '1'
strategy:
fail-fast: false
matrix:
spec:
- { os: ubuntu-latest, target: x86_64-unknown-linux-gnu }
- { os: windows-latest, target: x86_64-pc-windows-msvc }
- { os: macos-latest, target: x86_64-apple-darwin }
- { os: macos-latest, target: aarch64-apple-darwin }
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
extra-flags: workspaces focus @affine/native
electron-install: false
- name: Setup filename
id: filename
shell: bash
run: |
export PLATFORM_ARCH_ABI=$(node -e "console.log(require('@napi-rs/cli').parseTriple('${{ matrix.spec.target }}').platformArchABI)")
echo "filename=affine.$PLATFORM_ARCH_ABI.node" >> "$GITHUB_OUTPUT"
- name: Build AFFiNE native
uses: ./.github/actions/build-rust
with:
target: ${{ matrix.spec.target }}
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload ${{ steps.filename.outputs.filename }}
uses: actions/upload-artifact@v4
with:
name: ${{ steps.filename.outputs.filename }}
path: ./packages/frontend/native/${{ steps.filename.outputs.filename }}
if-no-files-found: error
build-storage:
name: Build Storage
runs-on: ubuntu-latest
env:
CARGO_PROFILE_RELEASE_DEBUG: '1'
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
extra-flags: workspaces focus @affine/storage
electron-install: false
- name: Build Rust
uses: ./.github/actions/build-rust
with:
target: 'x86_64-unknown-linux-gnu'
package: '@affine/storage'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload storage.node
uses: actions/upload-artifact@v4
with:
name: storage.node
path: ./packages/backend/storage/storage.node
if-no-files-found: error
build-core:
name: Build @affine/core
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
full-cache: true
- name: Build Core
# always skip cache because its fast, and cache configuration is always changing
run: yarn nx build @affine/core --skip-nx-cache
- name: zip core
run: tar -czf dist.tar.gz --directory=packages/frontend/core/dist .
- name: Upload core artifact
uses: actions/upload-artifact@v4
with:
name: core
path: dist.tar.gz
if-no-files-found: error
server-test:
name: Server Test
runs-on: ubuntu-latest
needs: build-storage
env:
DISTRIBUTION: browser
services:
postgres:
image: postgres
env:
POSTGRES_PASSWORD: affine
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
mailer:
image: mailhog/mailhog
ports:
- 1025:1025
- 8025:8025
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
full-cache: true
- name: Download storage.node
uses: actions/download-artifact@v4
with:
name: storage.node
path: ./packages/backend/server
- name: Initialize database
run: |
psql -h localhost -U postgres -c "CREATE DATABASE affine;"
psql -h localhost -U postgres -c "CREATE USER affine WITH PASSWORD 'affine';"
psql -h localhost -U postgres -c "ALTER USER affine WITH SUPERUSER;"
env:
PGPASSWORD: affine
- name: Generate prisma client
run: |
yarn workspace @affine/server exec prisma generate
yarn workspace @affine/server exec prisma db push
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Run init-db script
run: |
yarn workspace @affine/server data-migration run
yarn workspace @affine/server exec node --loader ts-node/esm/transpile-only ./scripts/init-db.ts
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Run server tests
run: yarn workspace @affine/server test:coverage
env:
CARGO_TARGET_DIR: '${{ github.workspace }}/target'
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Upload server test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./packages/backend/server/.coverage/lcov.info
flags: server-test
name: affine
fail_ci_if_error: false
server-e2e-test:
name: ${{ matrix.tests.name }}
runs-on: ubuntu-latest
env:
DISTRIBUTION: browser
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
strategy:
fail-fast: false
matrix:
tests:
- name: 'Server E2E Test 1/3'
script: yarn workspace @affine-test/affine-cloud e2e --forbid-only --shard=1/3
- name: 'Server E2E Test 2/3'
script: yarn workspace @affine-test/affine-cloud e2e --forbid-only --shard=2/3
- name: 'Server E2E Test 3/3'
script: yarn workspace @affine-test/affine-cloud e2e --forbid-only --shard=3/3
- name: 'Server Desktop E2E Test'
script: |
yarn workspace @affine/electron build:dev
xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn workspace @affine-test/affine-desktop-cloud e2e
needs:
- build-storage
- build-native
services:
postgres:
image: postgres
env:
POSTGRES_PASSWORD: affine
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
mailer:
image: mailhog/mailhog
ports:
- 1025:1025
- 8025:8025
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
hard-link-nm: false
- name: Download storage.node
uses: actions/download-artifact@v4
with:
name: storage.node
path: ./packages/backend/server
- name: Download affine.linux-x64-gnu.node
uses: actions/download-artifact@v4
with:
name: affine.linux-x64-gnu.node
path: ./packages/frontend/native
- name: Initialize database
run: |
psql -h localhost -U postgres -c "CREATE DATABASE affine;"
psql -h localhost -U postgres -c "CREATE USER affine WITH PASSWORD 'affine';"
psql -h localhost -U postgres -c "ALTER USER affine WITH SUPERUSER;"
env:
PGPASSWORD: affine
- name: Generate prisma client
run: |
yarn workspace @affine/server exec prisma generate
yarn workspace @affine/server exec prisma db push
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Run init-db script
run: |
yarn workspace @affine/server data-migration run
yarn workspace @affine/server exec node --loader ts-node/esm/transpile-only ./scripts/init-db.ts
- name: ${{ matrix.tests.name }}
run: |
${{ matrix.tests.script }}
env:
DEV_SERVER_URL: http://localhost:8080
ENABLE_LOCAL_EMAIL: true
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v4
with:
name: test-results-e2e-server
path: ./tests/affine-cloud/test-results
if-no-files-found: ignore
desktop-test:
name: Desktop Test (${{ matrix.spec.os }}, ${{ matrix.spec.platform }}, ${{ matrix.spec.arch }}, ${{ matrix.spec.target }}, ${{ matrix.spec.test }})
runs-on: ${{ matrix.spec.os }}
strategy:
fail-fast: false
# all combinations: macos-latest x64, macos-latest arm64, windows-latest x64, ubuntu-latest x64
matrix:
spec:
- {
os: macos-latest,
platform: macos,
arch: x64,
target: x86_64-apple-darwin,
test: true,
}
- {
os: macos-latest,
platform: macos,
arch: arm64,
target: aarch64-apple-darwin,
test: false,
}
- {
os: ubuntu-latest,
platform: linux,
arch: x64,
target: x86_64-unknown-linux-gnu,
test: true,
}
- {
os: windows-latest,
platform: windows,
arch: x64,
target: x86_64-pc-windows-msvc,
test: true,
}
needs:
- build-core
- build-native
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
timeout-minutes: 10
with:
extra-flags: workspaces focus @affine/electron @affine/monorepo @affine-test/affine-desktop
playwright-install: true
hard-link-nm: false
enableScripts: false
- name: Setup filename
id: filename
shell: bash
run: |
export PLATFORM_ARCH_ABI=$(node -e "console.log(require('@napi-rs/cli').parseTriple('${{ matrix.spec.target }}').platformArchABI)")
echo "filename=affine.$PLATFORM_ARCH_ABI.node" >> "$GITHUB_OUTPUT"
- name: Download ${{ steps.filename.outputs.filename }}
uses: actions/download-artifact@v4
with:
name: ${{ steps.filename.outputs.filename }}
path: ./packages/frontend/native
- name: Run unit tests
if: ${{ matrix.spec.test }}
shell: bash
run: yarn workspace @affine/electron vitest
- name: Download core artifact
uses: ./.github/actions/download-core
with:
path: packages/frontend/electron/resources/web-static
- name: Build Desktop Layers
run: yarn workspace @affine/electron build
- name: Run desktop tests
if: ${{ matrix.spec.test && matrix.spec.os == 'ubuntu-latest' }}
run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn workspace @affine-test/affine-desktop e2e
- name: Run desktop tests
if: ${{ matrix.spec.test && matrix.spec.os != 'ubuntu-latest' }}
run: yarn workspace @affine-test/affine-desktop e2e
- name: Make bundle
if: ${{ matrix.spec.os == 'macos-latest' && matrix.spec.arch == 'arm64' }}
env:
SKIP_BUNDLE: true
SKIP_WEB_BUILD: true
HOIST_NODE_MODULES: 1
run: yarn workspace @affine/electron package --platform=darwin --arch=arm64
- name: Output check
if: ${{ matrix.spec.os == 'macos-latest' && matrix.spec.arch == 'arm64' }}
run: |
yarn workspace @affine/electron exec node --loader ts-node/esm/transpile-only ./scripts/macos-arm64-output-check.ts
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v4
with:
name: test-results-e2e-${{ matrix.spec.os }}-${{ matrix.spec.arch }}
path: ./test-results
if-no-files-found: ignore

201
.github/workflows/build.yml vendored Normal file
View File

@@ -0,0 +1,201 @@
name: Build & Test
on:
push:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/build.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
pull_request:
merge_group:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/build.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
env:
DEBUG: napi:*
BUILD_TYPE: canary
APP_NAME: affine
AFFINE_ENV: dev
COVERAGE: true
DISTRIBUTION: browser
MACOSX_DEPLOYMENT_TARGET: '10.13'
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
lint:
name: Lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Run oxlint
# oxlint is fast, so wrong code will fail quickly
run: yarn dlx oxlint@latest .
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- name: Run i18n codegen
run: yarn i18n-codegen gen
- name: Run ESLint
run: yarn lint:eslint --max-warnings=0
- name: Run Prettier
# Set nmMode in `actions/setup-node` will modify the .yarnrc.yml
run: |
git checkout .yarnrc.yml
yarn lint:prettier
- name: Run circular
run: yarn circular
- name: Run Type Check
run: yarn typecheck
check-yarn-binary:
name: Check yarn binary
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Run check
run: |
yarn set version $(node -e "console.log(require('./package.json').packageManager.split('@')[1])")
git diff --exit-code
e2e-plugin-test:
name: E2E Plugin Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
- name: Run playwright tests
run: yarn e2e --forbid-only
working-directory: tests/affine-plugin
env:
COVERAGE: true
- name: Collect code coverage report
run: yarn exec nyc report -t .nyc_output --report-dir .coverage --reporter=lcov
- name: Upload e2e test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/lcov.info
flags: e2e-plugin-test
name: affine
fail_ci_if_error: false
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-plugin
path: ./test-results
if-no-files-found: ignore
e2e-test:
name: E2E Test
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
shard: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
- name: Run playwright tests
run: yarn e2e --forbid-only --shard=${{ matrix.shard }}/${{ strategy.job-total }}
working-directory: tests/affine-local
env:
COVERAGE: true
- name: Collect code coverage report
run: yarn exec nyc report -t .nyc_output --report-dir .coverage --reporter=lcov
- name: Upload e2e test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/lcov.info
flags: e2etest
name: affine
fail_ci_if_error: false
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-${{ matrix.shard }}
path: ./test-results
if-no-files-found: ignore
e2e-migration-test:
name: E2E Migration Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
- name: Run playwright tests
run: yarn workspace @affine-test/affine-migration e2e --forbid-only
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-migration
path: ./tests/affine-migration/test-results
if-no-files-found: ignore
unit-test:
name: Unit Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- name: Build AFFiNE native
uses: ./.github/actions/build-rust
with:
target: x86_64-unknown-linux-gnu
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Unit Test
run: yarn nx test:coverage @affine/monorepo
- name: Upload unit test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/store/lcov.info
flags: unittest
name: affine
fail_ci_if_error: false

36
.github/workflows/cache-cleanup.yml vendored Normal file
View File

@@ -0,0 +1,36 @@
# https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#force-deleting-cache-entries
name: Cleanup caches for closed branches
on:
pull_request:
types:
- closed
workflow_dispatch:
jobs:
cleanup:
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@v4
- name: Cleanup
run: |
gh extension install actions/gh-actions-cache
REPO=${{ github.repository }}
BRANCH="refs/pull/${{ github.event.pull_request.number }}/merge"
echo "Fetching list of cache key"
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH | cut -f 1 )
## Setting this to not fail the workflow while deleting cache keys.
set +e
echo "Deleting caches..."
for cacheKey in $cacheKeysForPR
do
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
done
echo "Done"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

18
.github/workflows/cancel.yml vendored Normal file
View File

@@ -0,0 +1,18 @@
name: Cancel
on:
pull_request_target:
types:
- edited
- synchronize
jobs:
cancel:
name: 'Cancel Previous Runs'
runs-on: ubuntu-latest
timeout-minutes: 2
steps:
- uses: styfle/cancel-workflow-action@0.12.0
with:
# See https://api.github.com/repos/toeverything/AFFiNE/actions/workflows
workflow_id: 44038251, 61883931, 65188160, 66789140
access_token: ${{ github.token }}

70
.github/workflows/codeql.yml vendored Normal file
View File

@@ -0,0 +1,70 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: 'CodeQL'
on:
push:
branches: [canary]
pull_request:
merge_group:
# The branches below must be a subset of the branches above
branches: [canary]
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: ['javascript']
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2

View File

@@ -1,27 +0,0 @@
name: Deploy Automatically
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+-canary.[0-9]+'
schedule:
- cron: '0 9 * * *'
jobs:
dispatch-deploy:
runs-on: ubuntu-latest
name: Setup Deploy
steps:
- name: dispatch deploy by tag
if: ${{ github.event_name == 'push' }}
uses: benc-uk/workflow-dispatch@v1
with:
workflow: deploy.yml
inputs: '{ "flavor": "canary" }'
- name: dispatch deploy by schedule
if: ${{ github.event_name == 'schedule' }}
uses: benc-uk/workflow-dispatch@v1
with:
workflow: deploy.yml
inputs: '{ "flavor": "canary" }'
ref: canary

View File

@@ -4,15 +4,12 @@ on:
workflow_dispatch:
inputs:
flavor:
description: 'Select what enverionment to deploy to'
type: choice
description: 'Build type (canary, beta, or stable)'
type: string
default: canary
options:
- canary
- beta
- stable
- internal
env:
BUILD_TYPE: canary
APP_NAME: affine
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
@@ -20,11 +17,9 @@ jobs:
build-server:
name: Build Server
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.flavor }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
@@ -32,7 +27,7 @@ jobs:
- name: Build Server
run: yarn workspace @affine/server build
- name: Upload server dist
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: server-dist
path: ./packages/backend/server/dist
@@ -43,18 +38,17 @@ jobs:
environment: ${{ github.event.inputs.flavor }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Plugins
run: yarn run build:plugins
- name: Build Core
run: yarn nx build @affine/core --skip-nx-cache
run: yarn nx build @affine/core
env:
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
BUILD_TYPE: ${{ github.event.inputs.flavor }}
BUILD_TYPE_OVERRIDE: ${{ github.event.inputs.flavor }}
SHOULD_REPORT_TRACE: true
TRACE_REPORT_ENDPOINT: ${{ secrets.TRACE_REPORT_ENDPOINT }}
CAPTCHA_SITE_KEY: ${{ secrets.CAPTCHA_SITE_KEY }}
@@ -63,7 +57,7 @@ jobs:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
- name: Upload core artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: core
path: ./packages/frontend/core/dist
@@ -75,19 +69,16 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Rust
- name: Setup Rust
uses: ./.github/actions/build-rust
with:
target: 'x86_64-unknown-linux-gnu'
package: '@affine/storage'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload storage.node
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: storage.node
path: ./packages/backend/storage/storage.node
@@ -99,19 +90,16 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Rust
- name: Setup Rust
uses: ./.github/actions/build-rust
with:
target: 'aarch64-unknown-linux-gnu'
package: '@affine/storage'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload storage.node
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: storage.arm64.node
path: ./packages/backend/storage/storage.node
@@ -128,22 +116,22 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Download core artifact
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: core
path: ./packages/frontend/core/dist
- name: Download server dist
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: server-dist
path: ./packages/backend/server/dist
- name: Download storage.node
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./packages/backend/server
- name: Download storage.node arm64
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: storage.arm64.node
path: ./packages/backend/storage
@@ -221,10 +209,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Deploy to ${{ github.event.inputs.flavor }}
- name: Deploy to dev
uses: ./.github/actions/deploy
with:
build-type: ${{ github.event.inputs.flavor }}
@@ -234,12 +219,12 @@ jobs:
cluster-name: ${{ secrets.GCP_CLUSTER_NAME }}
cluster-location: ${{ secrets.GCP_CLUSTER_LOCATION }}
env:
APP_VERSION: ${{ steps.version.outputs.APP_VERSION }}
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
CANARY_DEPLOY_HOST: ${{ secrets.CANARY_DEPLOY_HOST }}
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
R2_BUCKET: ${{ secrets.R2_BUCKET }}
ENABLE_CAPTCHA: true
CAPTCHA_TURNSTILE_SECRET: ${{ secrets.CAPTCHA_TURNSTILE_SECRET }}
OAUTH_EMAIL_SENDER: ${{ secrets.OAUTH_EMAIL_SENDER }}
@@ -259,4 +244,3 @@ jobs:
CLOUD_SQL_IAM_ACCOUNT: ${{ secrets.CLOUD_SQL_IAM_ACCOUNT }}
STRIPE_API_KEY: ${{ secrets.STRIPE_API_KEY }}
STRIPE_WEBHOOK_KEY: ${{ secrets.STRIPE_WEBHOOK_KEY }}
STATIC_IP_NAME: ${{ secrets.STATIC_IP_NAME }}

30
.github/workflows/dispatch-deploy.yml vendored Normal file
View File

@@ -0,0 +1,30 @@
name: Dispatch Deploy by tag
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+-canary.[0-9]+'
jobs:
dispatch-deploy-by-tag:
runs-on: ubuntu-latest
name: Setup deploy environment
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
extra-flags: 'workspaces focus @affine/monorepo'
hard-link-nm: false
electron-install: false
build-infra: false
build-plugins: false
- name: Setup output value
id: flavor
run: |
node -e "const env = require('semver').parse('${{ github.ref_name }}').prerelease[0] ?? 'stable'; console.log(`flavor=${env}`)" >> "$GITHUB_OUTPUT"
- name: dispatch deploy
uses: benc-uk/workflow-dispatch@v1
with:
workflow: deploy.yml
inputs: '{ "flavor": "${{ steps.flavor.outputs.flavor }}" }'

251
.github/workflows/nightly-build.yml vendored Normal file
View File

@@ -0,0 +1,251 @@
name: Build Canary Desktop App on Staging Branch
on:
workflow_dispatch:
inputs:
channel_override:
description: 'channel type (canary, beta, or stable)'
type: choice
default: beta
options:
- canary
- beta
- stable
push:
branches:
# 0.6.x-staging
- v[0-9]+.[0-9]+.x-staging
# 0.6.1-staging
- v[0-9]+.[0-9]+.[0-9]+-staging
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/nightly-build.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
permissions:
actions: write
contents: write
security-events: write
concurrency:
# The concurrency group contains the workflow name and the branch name for
# pull requests or the commit hash for any other events.
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }}
cancel-in-progress: true
env:
# BUILD_TYPE => app icon, app name, etc
BUILD_TYPE: internal
# BUILD_TYPE_OVERRIDE => channel type (canary, beta, or stable) - get the channel type (the api configs)
BUILD_TYPE_OVERRIDE: ${{ github.event.inputs.channel_override || 'beta' }}
jobs:
set-build-version:
runs-on: ubuntu-latest
outputs:
version: 0.0.0-internal.${{ steps.version.outputs.version }}
steps:
- uses: actions/checkout@v4
- uses: toeverything/set-build-version@latest
- id: version
run: echo ::set-output name=version::${{ env.BUILD_VERSION }}
before-make:
runs-on: ubuntu-latest
needs:
- set-build-version
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Setup @sentry/cli
uses: ./.github/actions/setup-sentry
- name: Replace Version
run: ./scripts/set-version.sh ${{ needs.set-build-version.outputs.version }}
- name: generate-assets
working-directory: packages/frontend/electron
run: yarn generate-assets
env:
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
RELEASE_VERSION: ${{ needs.set-build-version.outputs.version }}
- name: Upload core artifact
uses: actions/upload-artifact@v3
with:
name: core
path: packages/frontend/electron/resources/web-static
make-distribution:
strategy:
# all combinations: macos-latest x64, macos-latest arm64, ubuntu-latest x64
# For windows, we need a separate approach
matrix:
spec:
- runner: macos-latest
platform: darwin
arch: x64
target: x86_64-apple-darwin
- runner: macos-latest
platform: darwin
arch: arm64
target: aarch64-apple-darwin
- runner: ubuntu-latest
platform: linux
arch: x64
target: x86_64-unknown-linux-gnu
- runner: windows-latest
platform: win32
arch: x64
target: x86_64-pc-windows-msvc
runs-on: ${{ matrix.spec.runner }}
needs:
- before-make
- set-build-version
env:
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
SKIP_GENERATE_ASSETS: 1
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
timeout-minutes: 10
if: ${{ matrix.spec.platform == 'darwin' }}
uses: ./.github/actions/setup-node
with:
extra-flags: workspaces focus @affine/electron @affine/monorepo
hard-link-nm: false
build-plugins: false
nmHoistingLimits: workspaces
enableScripts: false
- name: Setup Node.js
timeout-minutes: 10
if: ${{ matrix.spec.platform != 'darwin' }}
uses: ./.github/actions/setup-node
with:
extra-flags: workspaces focus @affine/electron @affine/monorepo
hard-link-nm: false
build-plugins: false
nmHoistingLimits: workspaces
- name: Build AFFiNE native
uses: ./.github/actions/build-rust
with:
target: ${{ matrix.spec.target }}
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Replace Version
run: ./scripts/set-version.sh ${{ needs.set-build-version.outputs.version }}
- uses: actions/download-artifact@v3
with:
name: core
path: packages/frontend/electron/resources/web-static
- name: Build Plugins
run: yarn run build:plugins
- name: Build Desktop Layers
run: yarn workspace @affine/electron build
- name: Signing By Apple Developer ID
if: ${{ matrix.spec.platform == 'darwin' }}
uses: apple-actions/import-codesign-certs@v2
with:
p12-file-base64: ${{ secrets.CERTIFICATES_P12 }}
p12-password: ${{ secrets.CERTIFICATES_P12_PASSWORD }}
- name: make
run: yarn workspace @affine/electron make --platform=${{ matrix.spec.platform }} --arch=${{ matrix.spec.arch }}
env:
SKIP_PLUGIN_BUILD: 1
SKIP_WEB_BUILD: 1
HOIST_NODE_MODULES: 1
- name: Save artifacts (mac)
if: ${{ matrix.spec.platform == 'darwin' }}
run: |
mkdir -p builds
mv packages/frontend/electron/out/*/make/*.dmg ./builds/affine-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.dmg
mv packages/frontend/electron/out/*/make/zip/darwin/${{ matrix.spec.arch }}/*.zip ./builds/affine-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.zip
- name: Save artifacts (windows)
if: ${{ matrix.spec.platform == 'win32' }}
run: |
mkdir -p builds
mv packages/frontend/electron/out/*/make/zip/win32/x64/AFFiNE*-win32-x64-*.zip ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.zip
mv packages/frontend/electron/out/*/make/squirrel.windows/x64/*.exe ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.exe
mv packages/frontend/electron/out/*/make/squirrel.windows/x64/*.msi ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.msi
mv packages/frontend/electron/out/*/make/squirrel.windows/x64/*.nupkg ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.nupkg
- name: Save artifacts (linux)
if: ${{ matrix.spec.platform == 'linux' }}
run: |
mkdir -p builds
mv packages/frontend/electron/out/*/make/zip/linux/x64/*.zip ./builds/affine-${{ env.BUILD_TYPE }}-linux-x64.zip
mv packages/frontend/electron/out/*/make/AppImage/x64/*.AppImage ./builds/affine-${{ env.BUILD_TYPE }}-linux-x64.AppImage
- name: Upload Artifact
uses: actions/upload-artifact@v3
with:
name: affine-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}-builds
path: builds
release:
needs:
- make-distribution
- set-build-version
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Download Artifacts (macos-x64)
uses: actions/download-artifact@v3
with:
name: affine-darwin-x64-builds
path: ./
- name: Download Artifacts (macos-arm64)
uses: actions/download-artifact@v3
with:
name: affine-darwin-arm64-builds
path: ./
- name: Download Artifacts (windows-x64)
uses: actions/download-artifact@v3
with:
name: affine-win32-x64-builds
path: ./
- name: Download Artifacts (linux-x64)
uses: actions/download-artifact@v3
with:
name: affine-linux-x64-builds
path: ./
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 18
- name: Generate Release yml
run: |
node ./packages/frontend/electron/scripts/generate-yml.js
env:
RELEASE_VERSION: ${{ needs.set-build-version.outputs.version }}
- name: Create Release Draft
uses: softprops/action-gh-release@v1
env:
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
with:
repository: 'toeverything/AFFiNE-Releases'
name: ${{ needs.set-build-version.outputs.version }}
tag_name: ${{ needs.set-build-version.outputs.version }}
prerelease: true
files: |
./VERSION
./*.zip
./*.dmg
./*.exe
./*.nupkg
./RELEASES
./*.AppImage
./*.apk
./*.yml

View File

@@ -19,10 +19,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
uses: ./.github/actions/setup-node
with:
cache: 'yarn'
node-version-file: '.nvmrc'
- name: Install dependencies
run: yarn workspaces focus @affine/commitlint-config
- run: echo "${{ github.event.pull_request.title }}" | yarn workspace @affine/commitlint-config commitlint -g ./.commitlintrc.json
electron-install: false
- run: echo "${{ github.event.pull_request.title }}" | yarn dlx commitlint -g ./.commitlintrc.json

View File

@@ -32,6 +32,8 @@ jobs:
uses: ./.github/actions/setup-node
with:
electron-install: false
- name: Build Plugins
run: yarn run build:plugins
- uses: chromaui/action-next@v1
with:
workingDir: tests/storybook
@@ -42,7 +44,7 @@ jobs:
env:
CHROMATIC_PROJECT_TOKEN: ${{ secrets.CHROMATIC_PROJECT_TOKEN }}
NODE_OPTIONS: ${{ env.NODE_OPTIONS }}
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
if: always()
with:
name: chromatic-build-artifacts-${{ github.run_id }}

View File

@@ -1,17 +1,15 @@
name: Release Desktop App
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+-canary.[0-9]+'
workflow_dispatch:
inputs:
build-type:
description: 'Build Type'
type: choice
version:
description: App Version
required: true
default: canary
options:
- canary
- beta
- stable
default: 0.0.0
is-draft:
description: 'Draft Release?'
type: boolean
@@ -22,6 +20,11 @@ on:
type: boolean
required: true
default: true
build-type:
description: 'Build Type (canary, beta or stable)'
type: string
required: true
default: canary
permissions:
actions: write
@@ -29,7 +32,7 @@ permissions:
security-events: write
env:
BUILD_TYPE: ${{ github.event.inputs.build-type }}
BUILD_TYPE: ${{ github.event.inputs.build-type || (github.ref_type == 'tag' && contains(github.ref, 'canary') && 'canary') }}
DEBUG: napi:*
APP_NAME: affine
MACOSX_DEPLOYMENT_TARGET: '10.13'
@@ -37,18 +40,26 @@ env:
jobs:
before-make:
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.build-type }}
environment: ${{ github.event.inputs.build-type || (github.ref_type == 'tag' && contains(github.ref, 'canary') && 'canary') }}
outputs:
RELEASE_VERSION: ${{ steps.version.outputs.APP_VERSION }}
RELEASE_VERSION: ${{ steps.get-canary-version.outputs.RELEASE_VERSION }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Setup @sentry/cli
uses: ./.github/actions/setup-sentry
- name: Get canary version
id: get-canary-version
if: ${{ github.ref_type == 'tag' }}
run: |
TAG_VERSION=${GITHUB_REF#refs/tags/v}
PACKAGE_VERSION=$(node -p "require('./packages/frontend/electron/package.json').version")
if [ "$TAG_VERSION" != "$PACKAGE_VERSION" ]; then
echo "Tag version ($TAG_VERSION) does not match package.json version ($PACKAGE_VERSION)"
exit 1
fi
echo "RELEASE_VERSION=$(node -p "require('./packages/frontend/electron/package.json').version")" >> $GITHUB_OUTPUT
- name: generate-assets
run: yarn workspace @affine/electron generate-assets
env:
@@ -56,12 +67,11 @@ jobs:
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
RELEASE_VERSION: ${{ steps.version.outputs.APP_VERSION }}
RELEASE_VERSION: ${{ github.event.inputs.version || steps.get-canary-version.outputs.RELEASE_VERSION }}
SKIP_PLUGIN_BUILD: 'true'
SKIP_NX_CACHE: 'true'
- name: Upload core artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: core
path: packages/frontend/electron/resources/web-static
@@ -93,15 +103,13 @@ jobs:
SKIP_GENERATE_ASSETS: 1
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
timeout-minutes: 10
uses: ./.github/actions/setup-node
with:
extra-flags: workspaces focus @affine/electron @affine/monorepo
hard-link-nm: false
build-plugins: false
nmHoistingLimits: workspaces
enableScripts: false
- name: Build AFFiNE native
@@ -110,7 +118,7 @@ jobs:
target: ${{ matrix.spec.target }}
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- uses: actions/download-artifact@v4
- uses: actions/download-artifact@v3
with:
name: core
path: packages/frontend/electron/resources/web-static
@@ -146,7 +154,7 @@ jobs:
mv packages/frontend/electron/out/*/make/AppImage/x64/*.AppImage ./builds/affine-${{ env.BUILD_TYPE }}-linux-x64.AppImage
- name: Upload Artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: affine-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}-builds
path: builds
@@ -169,15 +177,13 @@ jobs:
SKIP_GENERATE_ASSETS: 1
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
timeout-minutes: 10
uses: ./.github/actions/setup-node
with:
extra-flags: workspaces focus @affine/electron @affine/monorepo
hard-link-nm: false
build-plugins: false
nmHoistingLimits: workspaces
- name: Build AFFiNE native
uses: ./.github/actions/build-rust
@@ -185,11 +191,14 @@ jobs:
target: ${{ matrix.spec.target }}
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- uses: actions/download-artifact@v4
- uses: actions/download-artifact@v3
with:
name: core
path: packages/frontend/electron/resources/web-static
- name: Build Plugins
run: yarn run build:plugins
- name: Build Desktop Layers
run: yarn workspace @affine/electron build
@@ -211,7 +220,7 @@ jobs:
run: Compress-Archive -CompressionLevel Fastest -Path packages/frontend/electron/out/* -DestinationPath archive.zip
- name: Save packaged artifacts for signing
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: packaged-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
path: |
@@ -245,7 +254,7 @@ jobs:
timeout-minutes: 10
uses: ./.github/actions/setup-node
- name: Download and overwrite packaged artifacts
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: signed-packaged-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
path: .
@@ -266,7 +275,7 @@ jobs:
echo $FILES_TO_BE_SIGNED
- name: Save installer for signing
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: installer-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
path: archive.zip
@@ -292,7 +301,7 @@ jobs:
runs-on: ${{ matrix.spec.runner }}
steps:
- name: Download and overwrite installer artifacts
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: signed-installer-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
path: .
@@ -307,7 +316,7 @@ jobs:
mv packages/frontend/electron/out/*/make/squirrel.windows/x64/*.msi ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.msi
- name: Upload Artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: affine-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}-builds
path: builds
@@ -318,29 +327,29 @@ jobs:
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
- uses: actions/download-artifact@v3
with:
name: core
path: web-static
- name: Zip web-static
run: zip -r web-static.zip web-static
- name: Download Artifacts (macos-x64)
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: affine-darwin-x64-builds
path: ./
- name: Download Artifacts (macos-arm64)
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: affine-darwin-arm64-builds
path: ./
- name: Download Artifacts (windows-x64)
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: affine-win32-x64-builds
path: ./
- name: Download Artifacts (linux-x64)
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: affine-linux-x64-builds
path: ./
@@ -351,37 +360,14 @@ jobs:
run: |
node ./packages/frontend/electron/scripts/generate-yml.js
env:
RELEASE_VERSION: ${{ needs.before-make.outputs.RELEASE_VERSION }}
RELEASE_VERSION: ${{ github.event.inputs.version || needs.before-make.outputs.RELEASE_VERSION }}
- name: Create Release Draft
if: ${{ github.ref_type == 'tag' }}
uses: softprops/action-gh-release@v1
with:
name: ${{ needs.before-make.outputs.RELEASE_VERSION }}
name: ${{ github.event.inputs.version || needs.before-make.outputs.RELEASE_VERSION }}
body: ''
draft: ${{ github.event.inputs.is-draft }}
prerelease: ${{ github.event.inputs.is-pre-release }}
files: |
./VERSION
./*.zip
./*.dmg
./*.exe
./*.AppImage
./*.apk
./*.yml
- name: Create Nightly Release Draft
if: ${{ github.ref_type == 'branch' }}
uses: softprops/action-gh-release@v1
env:
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
with:
# Temporarily, treat release from branch as nightly release, artifact saved to AFFiNE-Releases.
# Need to improve internal build and nightly release logic.
repository: 'toeverything/AFFiNE-Releases'
name: ${{ needs.before-make.outputs.RELEASE_VERSION }}
tag_name: ${{ needs.before-make.outputs.RELEASE_VERSION }}
body: ''
draft: false
prerelease: true
draft: ${{ github.event.inputs.is-draft || true }}
prerelease: ${{ github.event.inputs.is-pre-release || needs.before-make.outputs.version }}
files: |
./VERSION
./*.zip

View File

@@ -1,27 +0,0 @@
name: Release Desktop Automatically
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+-canary.[0-9]+'
schedule:
- cron: '0 9 * * *'
jobs:
dispatch-release-desktop:
runs-on: ubuntu-latest
name: Setup Release Desktop
steps:
- name: dispatch desktop release by tag
if: ${{ github.event_name == 'push' }}
uses: benc-uk/workflow-dispatch@v1
with:
workflow: release-desktop.yml
inputs: '{ "build-type": "canary", "is-draft": false, "is-pre-release": true }'
- name: dispatch desktop release by schedule
if: ${{ github.event_name == 'schedule' }}
uses: benc-uk/workflow-dispatch@v1
with:
workflow: release-desktop.yml
inputs: '{ "build-type": "canary", "is-draft": false, "is-pre-release": true }'
ref: canary

165
.github/workflows/release.yml vendored Normal file
View File

@@ -0,0 +1,165 @@
name: Release
on:
push:
branches:
- canary
env:
BUILD_TYPE: stable
APP_NAME: affine
COVERAGE: false
DISTRIBUTION: browser
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
release:
name: Try publishing npm@latest release
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Try publishing to NPM
run: ./scripts/publish.sh
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
build-core:
name: Build @affine/core
runs-on: ubuntu-latest
environment: development
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Plugins
run: yarn run build:plugins
- name: Build Core
run: yarn nx build @affine/core
- name: Upload core artifact
uses: actions/upload-artifact@v3
with:
name: core
path: ./packages/frontend/core/dist
if-no-files-found: error
build-server:
name: Build Server
runs-on: ubuntu-latest
environment: development
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- name: Build Server
run: yarn nx build @affine/server
- name: Upload server dist
uses: actions/upload-artifact@v3
with:
name: server-dist
path: ./packages/backend/server/dist
if-no-files-found: error
build-storage:
name: Build Storage
runs-on: ubuntu-latest
env:
RUSTFLAGS: '-C debuginfo=1'
environment: development
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Setup Rust
uses: ./.github/actions/build-rust
with:
target: 'x86_64-unknown-linux-gnu'
package: '@affine/storage'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload storage.node
uses: actions/upload-artifact@v3
with:
name: storage.node
path: ./packages/backend/storage/storage.node
if-no-files-found: error
build-docker:
if: github.ref == 'refs/heads/canary'
name: Build Docker
runs-on: ubuntu-latest
needs:
- build-server
- build-core
- build-storage
steps:
- uses: actions/checkout@v4
- name: Download core artifact
uses: actions/download-artifact@v3
with:
name: core
path: ./packages/frontend/core/dist
- name: Download server dist
uses: actions/download-artifact@v3
with:
name: server-dist
path: ./packages/backend/server/dist
- name: Download storage.node
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./packages/backend/server
- name: Setup Git short hash
run: |
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
logout: false
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build front Dockerfile
uses: docker/build-push-action@v5
with:
context: .
push: true
pull: true
platforms: linux/amd64,linux/arm64
provenance: true
file: .github/deployment/front/Dockerfile
tags: ghcr.io/toeverything/affine-front:${{ env.GIT_SHORT_HASH }},ghcr.io/toeverything/affine-front:latest
# setup node without cache configuration
# Prisma cache is not compatible with docker build cache
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
registry-url: https://npm.pkg.github.com
scope: '@toeverything'
- name: Install Node.js dependencies
run: yarn workspaces focus @affine/server --production
- name: Generate Prisma client
run: yarn workspace @affine/server prisma generate
- name: Build graphql Dockerfile
uses: docker/build-push-action@v5
with:
context: .
push: true
pull: true
platforms: linux/amd64,linux/arm64
provenance: true
file: .github/deployment/node/Dockerfile
tags: ghcr.io/toeverything/affine-graphql:${{ env.GIT_SHORT_HASH }},ghcr.io/toeverything/affine-graphql:latest

View File

@@ -14,7 +14,7 @@ jobs:
env:
ARCHIVE_DIR: ${{ github.run_id }}-${{ github.run_attempt }}-${{ inputs.artifact-name }}
steps:
- uses: actions/download-artifact@v4
- uses: actions/download-artifact@v3
with:
name: ${{ inputs.artifact-name }}
path: ${{ env.ARCHIVE_DIR }}
@@ -36,7 +36,7 @@ jobs:
cd ${{ env.ARCHIVE_DIR }}
7za a signed.zip .\out\*
- name: upload
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: signed-${{ inputs.artifact-name }}
path: ${{ env.ARCHIVE_DIR }}/signed.zip

View File

@@ -11,11 +11,11 @@ jobs:
deploy:
runs-on: ubuntu-latest
name: Deploy
environment: stable
environment: production
steps:
- uses: actions/checkout@v4
- name: Publish
uses: cloudflare/wrangler-action@v3.4.0
uses: cloudflare/wrangler-action@v3.3.2
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}

View File

@@ -1,4 +1,23 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
yarn lint-staged && yarn lint:ox
# check lockfile is up to date
yarn install --mode=skip-build --inline-builds --immutable
# build infra code
yarn -T run build:infra
# generate prisma client type
yarn workspace @affine/server prisma generate
# generate i18n
yarn i18n-codegen gen
# lint staged files
yarn exec lint-staged
# type check
yarn typecheck
# circular dependency check
yarn circular

View File

@@ -15,9 +15,6 @@ packages/backend/server/src/schema.gql
packages/frontend/i18n/src/i18n-generated.ts
packages/frontend/graphql/src/graphql/index.ts
tests/affine-legacy/**/static
.yarnrc.yml
packages/frontend/templates/templates.gen.ts
packages/frontend/templates/onboarding
# auto-generated by NAPI-RS
# fixme(@joooye34): need script to check and generate ignore list here

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -6,10 +6,10 @@ nmMode: hardlinks-local
nodeLinker: node-modules
npmAuthToken: "${NPM_TOKEN:-NONE}"
npmAuthToken: '${NPM_TOKEN:-NONE}'
npmPublishAccess: public
npmPublishRegistry: "https://registry.npmjs.org"
npmPublishRegistry: 'https://registry.npmjs.org'
yarnPath: .yarn/releases/yarn-4.0.2.cjs
yarnPath: .yarn/releases/yarn-4.0.1.cjs

154
Cargo.lock generated
View File

@@ -83,15 +83,14 @@ dependencies = [
[[package]]
name = "ahash"
version = "0.8.6"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a"
checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f"
dependencies = [
"cfg-if",
"getrandom",
"once_cell",
"version_check",
"zerocopy",
]
[[package]]
@@ -241,16 +240,6 @@ dependencies = [
"num-traits",
]
[[package]]
name = "atomic-write-file"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c232177ba50b16fe7a4588495bd474a62a9e45a8e4ca6fd7d0b7ac29d164631e"
dependencies = [
"nix",
"rand",
]
[[package]]
name = "autocfg"
version = "1.1.0"
@@ -952,7 +941,7 @@ version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e"
dependencies = [
"ahash 0.8.6",
"ahash 0.8.3",
]
[[package]]
@@ -961,7 +950,7 @@ version = "0.14.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156"
dependencies = [
"ahash 0.8.6",
"ahash 0.8.3",
"allocator-api2",
]
@@ -1123,7 +1112,7 @@ dependencies = [
[[package]]
name = "jwst-codec"
version = "0.1.0"
source = "git+https://github.com/toeverything/OctoBase.git?rev=49a6b7a#49a6b7af25ce1fe54e8383e10980e9536821d286"
source = "git+https://github.com/toeverything/OctoBase.git?rev=aad9e5b#aad9e5b7e9d6f479e6cf7555f5845bbbaaadbc66"
dependencies = [
"arbitrary",
"bitvec",
@@ -1144,7 +1133,7 @@ dependencies = [
[[package]]
name = "jwst-core"
version = "0.1.0"
source = "git+https://github.com/toeverything/OctoBase.git?rev=49a6b7a#49a6b7af25ce1fe54e8383e10980e9536821d286"
source = "git+https://github.com/toeverything/OctoBase.git?rev=aad9e5b#aad9e5b7e9d6f479e6cf7555f5845bbbaaadbc66"
dependencies = [
"async-trait",
"base64",
@@ -1162,7 +1151,7 @@ dependencies = [
[[package]]
name = "jwst-logger"
version = "0.1.0"
source = "git+https://github.com/toeverything/OctoBase.git?rev=49a6b7a#49a6b7af25ce1fe54e8383e10980e9536821d286"
source = "git+https://github.com/toeverything/OctoBase.git?rev=aad9e5b#aad9e5b7e9d6f479e6cf7555f5845bbbaaadbc66"
dependencies = [
"chrono",
"nu-ansi-term 0.49.0",
@@ -1175,7 +1164,7 @@ dependencies = [
[[package]]
name = "jwst-storage"
version = "0.1.0"
source = "git+https://github.com/toeverything/OctoBase.git?rev=49a6b7a#49a6b7af25ce1fe54e8383e10980e9536821d286"
source = "git+https://github.com/toeverything/OctoBase.git?rev=aad9e5b#aad9e5b7e9d6f479e6cf7555f5845bbbaaadbc66"
dependencies = [
"anyhow",
"async-trait",
@@ -1200,7 +1189,7 @@ dependencies = [
[[package]]
name = "jwst-storage-migration"
version = "0.1.0"
source = "git+https://github.com/toeverything/OctoBase.git?rev=49a6b7a#49a6b7af25ce1fe54e8383e10980e9536821d286"
source = "git+https://github.com/toeverything/OctoBase.git?rev=aad9e5b#aad9e5b7e9d6f479e6cf7555f5845bbbaaadbc66"
dependencies = [
"sea-orm-migration",
"tokio",
@@ -1268,9 +1257,9 @@ checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058"
[[package]]
name = "libsqlite3-sys"
version = "0.27.0"
version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf4e226dcd58b4be396f7bd3c20da8fdee2911400705297ba7d2d7cc2c30f716"
checksum = "afc22eff61b133b115c6e8c74e818c628d6d5e7a502afea6f64dee076dd94326"
dependencies = [
"cc",
"pkg-config",
@@ -1348,15 +1337,6 @@ version = "2.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
[[package]]
name = "memoffset"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4"
dependencies = [
"autocfg",
]
[[package]]
name = "minimal-lexical"
version = "0.2.1"
@@ -1395,9 +1375,9 @@ dependencies = [
[[package]]
name = "napi"
version = "2.14.1"
version = "2.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1133249c46e92da921bafc8aba4912bf84d6c475f7625183772ed2d0844dc3a7"
checksum = "f9d90182620f32fe34b6ac9b52cba898af26e94c7f5abc01eb4094c417ae2e6c"
dependencies = [
"anyhow",
"bitflags 2.4.1",
@@ -1419,9 +1399,9 @@ checksum = "d4b4532cf86bfef556348ac65e561e3123879f0e7566cca6d43a6ff5326f13df"
[[package]]
name = "napi-derive"
version = "2.14.2"
version = "2.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0cca5738c6e81eb5ffd2c8ff2b4f05ece9c4c60c7e2b36cec6524492cf7f330"
checksum = "3619fa472d23cd5af94d63a2bae454a77a8863251f40230fbf59ce20eafa8a86"
dependencies = [
"cfg-if",
"convert_case",
@@ -1433,9 +1413,9 @@ dependencies = [
[[package]]
name = "napi-derive-backend"
version = "1.0.55"
version = "1.0.54"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35960e5f33228192a9b661447d0dfe8f5a3790ff5b4058c4d67680ded4f65b91"
checksum = "ecd3ea4b54020c73d591a49cd192f6334c5f37f71a63ead54dbc851fa991ef00"
dependencies = [
"convert_case",
"once_cell",
@@ -1455,19 +1435,6 @@ dependencies = [
"libloading",
]
[[package]]
name = "nix"
version = "0.26.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b"
dependencies = [
"bitflags 1.3.2",
"cfg-if",
"libc",
"memoffset",
"pin-utils",
]
[[package]]
name = "no-std-compat"
version = "0.4.1"
@@ -2024,14 +1991,16 @@ dependencies = [
[[package]]
name = "rsa"
version = "0.9.6"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc"
checksum = "6ab43bb47d23c1a631b4b680199a45255dce26fa9ab2fa902581f624ff13e6a8"
dependencies = [
"byteorder",
"const-oid",
"digest",
"num-bigint-dig",
"num-integer",
"num-iter",
"num-traits",
"pkcs1",
"pkcs8",
@@ -2323,18 +2292,18 @@ checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090"
[[package]]
name = "serde"
version = "1.0.193"
version = "1.0.192"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89"
checksum = "bca2a08484b285dcb282d0f67b26cadc0df8b19f8c12502c13d966bf9482f001"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.193"
version = "1.0.192"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3"
checksum = "d6c7207fbec9faa48073f3e3074cbe553af6ea512d7c21ba46e434e70ea9fbc1"
dependencies = [
"proc-macro2",
"quote",
@@ -2460,9 +2429,9 @@ dependencies = [
[[package]]
name = "spki"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d"
checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a"
dependencies = [
"base64ct",
"der",
@@ -2481,9 +2450,9 @@ dependencies = [
[[package]]
name = "sqlx"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dba03c279da73694ef99763320dea58b51095dfe87d001b1d4b5fe78ba8763cf"
checksum = "0e50c216e3624ec8e7ecd14c6a6a6370aad6ee5d8cfc3ab30b5162eeeef2ed33"
dependencies = [
"sqlx-core",
"sqlx-macros",
@@ -2494,11 +2463,11 @@ dependencies = [
[[package]]
name = "sqlx-core"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d84b0a3c3739e220d94b3239fd69fb1f74bc36e16643423bd99de3b43c21bfbd"
checksum = "8d6753e460c998bbd4cd8c6f0ed9a64346fcca0723d6e75e52fdc351c5d2169d"
dependencies = [
"ahash 0.8.6",
"ahash 0.8.3",
"atoi",
"bigdecimal",
"byteorder",
@@ -2542,9 +2511,9 @@ dependencies = [
[[package]]
name = "sqlx-macros"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89961c00dc4d7dffb7aee214964b065072bff69e36ddb9e2c107541f75e4f2a5"
checksum = "9a793bb3ba331ec8359c1853bd39eed32cdd7baaf22c35ccf5c92a7e8d1189ec"
dependencies = [
"proc-macro2",
"quote",
@@ -2555,11 +2524,10 @@ dependencies = [
[[package]]
name = "sqlx-macros-core"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0bd4519486723648186a08785143599760f7cc81c52334a55d6a83ea1e20841"
checksum = "0a4ee1e104e00dedb6aa5ffdd1343107b0a4702e862a84320ee7cc74782d96fc"
dependencies = [
"atomic-write-file",
"dotenvy",
"either",
"heck",
@@ -2582,9 +2550,9 @@ dependencies = [
[[package]]
name = "sqlx-mysql"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e37195395df71fd068f6e2082247891bc11e3289624bbc776a0cdfa1ca7f1ea4"
checksum = "864b869fdf56263f4c95c45483191ea0af340f9f3e3e7b4d57a61c7c87a970db"
dependencies = [
"atoi",
"base64",
@@ -2629,9 +2597,9 @@ dependencies = [
[[package]]
name = "sqlx-postgres"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6ac0ac3b7ccd10cc96c7ab29791a7dd236bd94021f31eec7ba3d46a74aa1c24"
checksum = "eb7ae0e6a97fb3ba33b23ac2671a5ce6e3cabe003f451abd5a56e7951d975624"
dependencies = [
"atoi",
"base64",
@@ -2674,9 +2642,9 @@ dependencies = [
[[package]]
name = "sqlx-sqlite"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "210976b7d948c7ba9fced8ca835b11cbb2d677c59c79de41ac0d397e14547490"
checksum = "d59dc83cf45d89c555a577694534fcd1b55c545a816c816ce51f20bbe56a4f3f"
dependencies = [
"atoi",
"chrono",
@@ -2694,7 +2662,6 @@ dependencies = [
"time",
"tracing",
"url",
"urlencoding",
"uuid",
]
@@ -3057,12 +3024,6 @@ dependencies = [
"percent-encoding",
]
[[package]]
name = "urlencoding"
version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
[[package]]
name = "utf8parse"
version = "0.2.1"
@@ -3071,9 +3032,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]]
name = "uuid"
version = "1.6.1"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e395fcf16a7a3d8127ec99782007af141946b4795001f876d54fb0d55978560"
checksum = "c58fe91d841bc04822c9801002db4ea904b9e4b8e6bbad25127b46eff8dc516b"
dependencies = [
"getrandom",
"rand",
@@ -3180,9 +3141,12 @@ dependencies = [
[[package]]
name = "webpki-roots"
version = "0.25.3"
version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1778a42e8b3b90bff8d0f5032bf22250792889a5cdc752aa0020c84abe3aaf10"
checksum = "b291546d5d9d1eab74f069c77749f2cb8504a12caa20f0f2de93ddbf6f411888"
dependencies = [
"rustls-webpki",
]
[[package]]
name = "whoami"
@@ -3314,26 +3278,6 @@ dependencies = [
"tap",
]
[[package]]
name = "zerocopy"
version = "0.7.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c4061bedbb353041c12f413700357bec76df2c7e2ca8e4df8bac24c6bf68e3d"
dependencies = [
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.7.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3c129550b3e6de3fd0ba67ba5c81818f9805e58b8d7fee80a3a59d2c9fc601a"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.38",
]
[[package]]
name = "zeroize"
version = "1.6.0"

View File

@@ -107,11 +107,27 @@ If you have questions, you are welcome to contact us. One of the best places to
## Ecosystem
| Name | | |
| -------------------------------------------------------- | ---------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------- |
| [@affine/component](packages/frontend/component) | AFFiNE Component Resources | [![](https://img.shields.io/codecov/c/github/toeverything/affine?style=flat-square)](https://affine-storybook.vercel.app/) |
| [@toeverything/y-indexeddb](packages/common/y-indexeddb) | IndexedDB database adapter for Yjs | [![](https://img.shields.io/npm/dm/@toeverything/y-indexeddb?style=flat-square&color=eee)](https://www.npmjs.com/package/@toeverything/y-indexeddb) |
| [@toeverything/theme](packages/common/theme) | AFFiNE theme | [![](https://img.shields.io/npm/dm/@toeverything/theme?style=flat-square&color=eee)](https://www.npmjs.com/package/@toeverything/theme) |
| Name | | |
| ----------------------------------------------------------------------------------------------- | --------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------- |
| [@toeverything/component](https://github.com/toeverything/design/tree/main/packages/components) | Toeverything Shared Component Resources | |
| [@affine/component](packages/frontend/component) | AFFiNE Component Resources | [![](https://img.shields.io/codecov/c/github/toeverything/affine?style=flat-square)](https://affine-storybook.vercel.app/) |
| [@toeverything/y-indexeddb](packages/common/y-indexeddb) | IndexedDB database adapter for Yjs | [![](https://img.shields.io/npm/dm/@toeverything/y-indexeddb?style=flat-square&color=eee)](https://www.npmjs.com/package/@toeverything/y-indexeddb) |
| [@toeverything/theme](packages/common/theme) | AFFiNE theme | [![](https://img.shields.io/npm/dm/@toeverything/theme?style=flat-square&color=eee)](https://www.npmjs.com/package/@toeverything/theme) |
## Plugins
> Plugins are a way to extend the functionality of AFFiNE. You can use plugins to add new blocks, new features, and even new ways to edit content.
>
> (Currently, the plugin system is under heavy development. You will see the plugin system in the canary release.)
- [@affine/sdk](./packages/common/sdk) - SDK for developing plugins
- [@affine/plugin-cli](./tools/plugin-cli) - CLI for developing plugins
| Official Plugin | Description | Status |
| ---------------------------------------------------------------- | ----------------------------------------- | ------ |
| [@affine/copilot-plugin](./packages/plugins/copilot) | AI Copilot that help you document writing | 🚧 |
| [@affine/image-preview-plugin](./packages/plugins/image-preview) | Component for previewing an image | ✅ |
| [@affine/outline](./packages/plugins/outline) | Outline for your document | ✅ |
## Upstreams
@@ -212,7 +228,7 @@ See [LICENSE] for details.
[jobs available]: ./docs/jobs.md
[latest packages]: https://github.com/toeverything/AFFiNE/pkgs/container/affine-self-hosted
[contributor license agreement]: https://github.com/toeverything/affine/edit/canary/.github/CLA.md
[rust-version-icon]: https://img.shields.io/badge/Rust-1.75.0-dea584
[rust-version-icon]: https://img.shields.io/badge/Rust-1.71.0-dea584
[stars-icon]: https://img.shields.io/github/stars/toeverything/AFFiNE.svg?style=flat&logo=github&colorB=red&label=stars
[codecov]: https://codecov.io/gh/toeverything/affine/branch/canary/graphs/badge.svg?branch=canary
[node-version-icon]: https://img.shields.io/badge/node-%3E=18.16.1-success

View File

@@ -57,29 +57,6 @@ corepack prepare yarn@stable --activate
yarn install
```
### Clone repository
#### Linux & MacOS
```sh
git clone https://github.com/toeverything/AFFiNE
```
#### Windows
In our codebase, we use symbolic links. Due to the security design of Windows, the creation of symbolic links requires administrator privileges. This is part of the security policy settings of Windows, and more information can be found at [Security Policy Settings for Creating Symbolic Links](https://learn.microsoft.com/en-us/windows/security/threat-protection/security-policy-settings/create-symbolic-links).
For detailed guidance on enabling this feature, please refer to the official documentation: [Enable Developer Mode on Windows](https://learn.microsoft.com/en-us/windows/apps/get-started/enable-your-device-for-development).
Once Developer Mode is enabled, execute the following command with administrator privileges:
```sh
# Enable symbolic links
git config --global core.symlinks true
# Clone the repository, also need to be run with administrator privileges
git clone https://github.com/toeverything/AFFiNE
```
### Build Native Dependencies
Run the following script. It will build the native module at [`/packages/frontend/native`](/packages/frontend/native) and build Node.js binding using [NAPI.rs](https://napi.rs/).
@@ -90,6 +67,18 @@ Note: use `strip` from system instead of `binutils` if you are running MacOS. [s
yarn workspace @affine/native build
```
### Build Infra
```sh
yarn run build:infra
```
### Build Plugins
```sh
yarn run build:plugins
```
### Build Server Dependencies
```sh
@@ -113,7 +102,7 @@ yarn test
### E2E Test
```shell
# there are `affine-local`, `affine-migration`, `affine-local`, `affine-prototype` e2e tests,
# there are `affine-local`, `affine-migration`, `affine-local`, `affine-plugin`, `affine-prototype` e2e tests,
# which are run under different situations.
cd tests/affine-local
yarn e2e

View File

@@ -17,6 +17,7 @@ The codebase is organized as follows:
- `packages/` contains all code running in production.
- `backend/` contains backend code, more information from <https://github.com/toeverything/OctoBase>.
- `frontend/` contains frontend code, including the web app, the electron app and business libraries.
- `plugins/` contains all build-in plugins.
- `common` contains the isomorphic code or basic libraries without business.
- `tools/` contains tools to help developing or CI, not used in production.
- `tests/` contains testings across different libraries, including e2e testings and integration testings.

View File

@@ -1,28 +0,0 @@
# Issues Triaging
When we receive your issue, we will first triaging it. Triaging an issue usually takes around one business day but may take longer. Goal of triaging is to provide you with a clear understanding of what will happen to your issue. For example, after your feature request was triaged you know whether we plan to tackle the issue or whether we'll wait to hear what the broader community thinks about this request.
Here are issue states and their descriptions:
| State | Description |
| ------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| Untriaged | The team has not yet reviewed the issue. We usually do it within one business day. |
| As designed | The behavior described in the issue is intentional. If you find it seriously disruptive or if weve misunderstood you, please let us know in the issues comments section. |
| Blocked | We cant work on this issue until another one (linked) is resolved. |
| Cant Reproduce | We have been unable to reproduce the issue on our side. It could be flaky or fixed already, or we may not have had all the details we needed. If youre still experiencing the issue and have any further details, please share them. |
| Duplicate | The issue is the same (or has the same cause) as another one (linked). |
| Fixed | If the issue was a bug, its been fixed; if it was a missing feature, its been implemented. |
| Fixed In Branch | If the issue was a bug, its been fixed; if it was a missing feature, its been implemented; the changes are now in a separate branch and havent been merged into the default branch yet. |
| In Progress | Were currently working on the issue. |
| Incomplete | Unfortunately we dont have enough information to proceed. If youre willing to share any further details about the issue, please do so in the comments. |
| Obsolete | The part of the product that was causing this issue has been removed or significantly reworked since it was created. |
| Upvoting | We are currently evaluating demand for the issue and checking whether it requires complicated or risky changes. Please leave a vote or comment if you think it should be prioritized. |
| Open | We want to implement the fix or feature in the near future. We cant promise it will appear in the next public release, but its on our short list. |
| Shelved | We have reviewed the issue and decided that, even though it has merit, we cannot currently include it in our near-term plan. |
| Third Party Problem | The issue is caused by a third party. We've done our best to inform them about it. |
| To be Discussed | We need some time to discuss the issue. |
| To Reproduce | We will try to find the steps needed to reproduce the issue on our side. |
| Under Investigation | Weve triaged the issue, but now we need to investigate it more thoroughly. This may require processing additional information like logs or dumps. |
| Waiting for Info | Weve requested additional information from the person who created the issue and are waiting for them to get back to us. |
| Declined | Weve reviewed the suggestion and, while we appreciate its value, we unfortunately do not have the resources to implement it. |
| Answered | The issue actually turned out to be a question or a misunderstanding, and it has been answered or resolved. |

View File

@@ -1,23 +0,0 @@
{
"name": "@affine/docs",
"type": "module",
"private": true,
"scripts": {
"build": "typedoc --options ../../typedoc.json",
"dev": "nodemon --exec 'typedoc --options ../../typedoc.json' & serve dist/"
},
"devDependencies": {
"nodemon": "^3.0.1",
"serve": "^14.2.1",
"typedoc": "^0.25.4"
},
"nodemonConfig": {
"watch": [
"./readme.md",
"../../packages/*/*/src/*.ts",
"../../**/typedoc{.base,}.json"
],
"ext": "ts,md,json"
},
"version": "0.10.3-canary.2"
}

View File

@@ -1,7 +0,0 @@
Welcome to AFFiNE development reference.
This document is intended for developers who want to contribute to AFFiNE. It contains information about the architecture of AFFiNE, how to build it, and how to contribute to it.
### The Infrastructure of AFFiNE
see {@link @toeverything/infra!}

View File

@@ -1,6 +1,6 @@
{
"name": "@affine/monorepo",
"version": "0.11.0",
"version": "0.10.3",
"private": true,
"author": "toeverything",
"license": "MIT",
@@ -8,9 +8,10 @@
".",
"packages/*/*",
"tools/*",
"docs/reference",
"!tools/@types",
"tools/@types/*",
"tests/*",
"!tests/affine-legacy",
"tests/affine-legacy/*"
],
"engines": {
@@ -22,6 +23,8 @@
"build": "yarn nx build @affine/core",
"build:electron": "yarn nx build @affine/electron",
"build:storage": "yarn nx run-many -t build -p @affine/storage",
"build:infra": "yarn nx run-many -t build --projects=tag:infra",
"build:plugins": "yarn nx run-many -t build --projects=tag:plugin",
"build:storybook": "yarn nx build @affine/storybook",
"start:web-static": "yarn workspace @affine/core static-server",
"start:storybook": "yarn exec serve tests/storybook/storybook-static -l 6006",
@@ -30,12 +33,12 @@
"lint:eslint:fix": "yarn lint:eslint --fix",
"lint:prettier": "prettier --ignore-unknown --cache --check .",
"lint:prettier:fix": "prettier --ignore-unknown --cache --write .",
"lint:ox": "oxlint --import-plugin --deny-warnings -D correctness -D nursery -D prefer-array-some -D no-useless-promise-resolve-reject -D perf -A no-undef -A consistent-type-exports -A default -A named -A ban-ts-comment -A export",
"lint": "yarn lint:eslint && yarn lint:prettier",
"lint:fix": "yarn lint:eslint:fix && yarn lint:prettier:fix",
"test": "vitest --run",
"test:ui": "vitest --ui",
"test:coverage": "vitest run --coverage",
"circular": "madge --circular --ts-config ./tsconfig.json ./packages/frontend/core/src/pages/**/*.tsx ./packages/frontend/core/src/index.tsx ./packages/frontend/electron/src/*/index.ts",
"typecheck": "tsc -b tsconfig.json --diagnostics",
"postinstall": "node ./scripts/check-version.mjs && yarn i18n-codegen gen && yarn husky install"
},
@@ -46,21 +49,20 @@
"eslint --cache --fix"
],
"*.toml": [
"prettier --ignore-unknown --write",
"taplo format"
],
"*.rs": [
"cargo fmt --"
]
},
"devDependencies": {
"@affine-test/kit": "workspace:*",
"@affine/cli": "workspace:*",
"@affine/plugin-cli": "workspace:*",
"@commitlint/cli": "^18.4.3",
"@commitlint/config-conventional": "^18.4.3",
"@faker-js/faker": "^8.3.1",
"@istanbuljs/schema": "^0.1.3",
"@magic-works/i18n-codegen": "^0.5.0",
"@nx/vite": "17.2.8",
"@nx/vite": "17.1.3",
"@perfsee/sdk": "^1.9.0",
"@playwright/test": "^1.40.0",
"@taplo/cli": "^0.5.2",
@@ -69,51 +71,52 @@
"@types/affine__env": "workspace:*",
"@types/eslint": "^8.44.7",
"@types/node": "^20.9.3",
"@typescript-eslint/eslint-plugin": "^6.13.1",
"@typescript-eslint/parser": "^6.13.1",
"@typescript-eslint/eslint-plugin": "^6.12.0",
"@typescript-eslint/parser": "^6.12.0",
"@vanilla-extract/vite-plugin": "^3.9.2",
"@vanilla-extract/webpack-plugin": "^2.3.1",
"@vitejs/plugin-react-swc": "^3.5.0",
"@vitest/coverage-istanbul": "1.1.3",
"@vitest/ui": "1.1.3",
"@vitest/coverage-istanbul": "0.34.6",
"@vitest/ui": "0.34.6",
"electron": "^27.1.0",
"eslint": "^8.54.0",
"eslint-config-prettier": "^9.0.0",
"eslint-plugin-i": "^2.29.0",
"eslint-plugin-prettier": "^5.0.1",
"eslint-plugin-react": "^7.33.2",
"eslint-plugin-react-hooks": "^4.6.0",
"eslint-plugin-simple-import-sort": "^10.0.0",
"eslint-plugin-sonarjs": "^0.23.0",
"eslint-plugin-unicorn": "^50.0.0",
"eslint-plugin-unicorn": "^49.0.0",
"eslint-plugin-unused-imports": "^3.0.0",
"eslint-plugin-vue": "^9.18.1",
"fake-indexeddb": "5.0.2",
"fake-indexeddb": "5.0.1",
"happy-dom": "^12.10.3",
"husky": "^8.0.3",
"lint-staged": "^15.1.0",
"madge": "^6.1.0",
"msw": "^2.0.8",
"nanoid": "^5.0.3",
"nx": "^17.1.3",
"nx-cloud": "^16.5.2",
"nyc": "^15.1.0",
"oxlint": "0.0.22",
"prettier": "^3.1.0",
"semver": "^7.5.4",
"serve": "^14.2.1",
"string-width": "^7.0.0",
"ts-node": "^10.9.1",
"typescript": "^5.3.2",
"vite": "^5.0.6",
"vite": "^5.0.1",
"vite-plugin-istanbul": "^5.0.0",
"vite-plugin-static-copy": "^1.0.0",
"vite-plugin-static-copy": "^0.17.1",
"vite-tsconfig-paths": "^4.2.1",
"vitest": "1.1.3",
"vitest": "0.34.6",
"vitest-fetch-mock": "^0.2.2",
"vitest-mock-extended": "^1.3.1"
},
"packageManager": "yarn@4.0.2",
"packageManager": "yarn@4.0.1",
"resolutions": {
"vite": "^5.0.6",
"vite": "^4.4.11",
"array-buffer-byte-length": "npm:@nolyfill/array-buffer-byte-length@latest",
"array-includes": "npm:@nolyfill/array-includes@latest",
"array.prototype.flat": "npm:@nolyfill/array.prototype.flat@latest",
@@ -170,9 +173,8 @@
"which-boxed-primitive": "npm:@nolyfill/which-boxed-primitive@latest",
"which-typed-array": "npm:@nolyfill/which-typed-array@latest",
"next-auth@^4.24.5": "patch:next-auth@npm%3A4.24.5#~/.yarn/patches/next-auth-npm-4.24.5-8428e11927.patch",
"@reforged/maker-appimage/@electron-forge/maker-base": "7.2.0",
"@reforged/maker-appimage/@electron-forge/maker-base": "7.1.0",
"macos-alias": "npm:macos-alias-building@latest",
"fs-xattr": "npm:@napi-rs/xattr@latest",
"@radix-ui/react-dialog": "npm:@radix-ui/react-dialog@latest"
"fs-xattr": "npm:@napi-rs/xattr@latest"
}
}

View File

@@ -1,45 +0,0 @@
/*
Warnings:
- You are about to drop the `user_feature_gates` table. If the table is not empty, all the data it contains will be lost.
*/
-- DropForeignKey
ALTER TABLE "user_feature_gates" DROP CONSTRAINT "user_feature_gates_user_id_fkey";
-- DropTable
DROP TABLE "user_feature_gates";
-- CreateTable
CREATE TABLE "user_features" (
"id" SERIAL NOT NULL,
"user_id" VARCHAR(36) NOT NULL,
"feature_id" INTEGER NOT NULL,
"reason" VARCHAR NOT NULL,
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"expired_at" TIMESTAMPTZ(6),
"activated" BOOLEAN NOT NULL DEFAULT false,
CONSTRAINT "user_features_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "features" (
"id" SERIAL NOT NULL,
"feature" VARCHAR NOT NULL,
"version" INTEGER NOT NULL DEFAULT 0,
"type" INTEGER NOT NULL,
"configs" JSON NOT NULL,
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "features_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "features_feature_version_key" ON "features"("feature", "version");
-- AddForeignKey
ALTER TABLE "user_features" ADD CONSTRAINT "user_features_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "user_features" ADD CONSTRAINT "user_features_feature_id_fkey" FOREIGN KEY ("feature_id") REFERENCES "features"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -1,2 +0,0 @@
-- AlterTable
ALTER TABLE "optimized_blobs" ADD COLUMN "deleted_at" TIMESTAMPTZ(6);

View File

@@ -1,18 +0,0 @@
-- CreateTable
CREATE TABLE "workspace_features" (
"id" SERIAL NOT NULL,
"workspace_id" VARCHAR(36) NOT NULL,
"feature_id" INTEGER NOT NULL,
"reason" VARCHAR NOT NULL,
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"expired_at" TIMESTAMPTZ(6),
"activated" BOOLEAN NOT NULL DEFAULT false,
CONSTRAINT "workspace_features_pkey" PRIMARY KEY ("id")
);
-- AddForeignKey
ALTER TABLE "workspace_features" ADD CONSTRAINT "workspace_features_feature_id_fkey" FOREIGN KEY ("feature_id") REFERENCES "features"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "workspace_features" ADD CONSTRAINT "workspace_features_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -1,7 +1,7 @@
{
"name": "@affine/server",
"private": true,
"version": "0.11.0",
"version": "0.10.3",
"description": "Affine Node.js server",
"type": "module",
"bin": {
@@ -9,18 +9,18 @@
},
"scripts": {
"build": "tsc",
"start": "node --loader ts-node/esm/transpile-only.mjs --es-module-specifier-resolution node ./src/index.ts",
"start": "node --loader ts-node/esm.mjs --es-module-specifier-resolution node ./src/index.ts",
"dev": "nodemon ./src/index.ts",
"test": "ava --concurrency 1 --serial",
"test:coverage": "c8 ava --concurrency 1 --serial",
"postinstall": "prisma generate",
"data-migration": "node --loader ts-node/esm/transpile-only.mjs --es-module-specifier-resolution node ./src/data/app.ts",
"data-migration": "node --loader ts-node/esm.mjs --es-module-specifier-resolution node ./src/data/app.ts",
"predeploy": "yarn prisma migrate deploy && node --es-module-specifier-resolution node ./dist/data/app.js run"
},
"dependencies": {
"@apollo/server": "^4.9.5",
"@auth/prisma-adapter": "^1.0.7",
"@aws-sdk/client-s3": "^3.499.0",
"@aws-sdk/client-s3": "^3.454.0",
"@google-cloud/opentelemetry-cloud-monitoring-exporter": "^0.17.0",
"@google-cloud/opentelemetry-cloud-trace-exporter": "^2.1.0",
"@keyv/redis": "^2.8.0",
@@ -36,29 +36,28 @@
"@nestjs/websockets": "^10.2.10",
"@node-rs/argon2": "^1.5.2",
"@node-rs/crc32": "^1.7.2",
"@node-rs/jsonwebtoken": "^0.3.0",
"@node-rs/jsonwebtoken": "^0.2.3",
"@opentelemetry/api": "^1.7.0",
"@opentelemetry/core": "^1.19.0",
"@opentelemetry/exporter-prometheus": "^0.46.0",
"@opentelemetry/exporter-zipkin": "^1.19.0",
"@opentelemetry/host-metrics": "^0.34.0",
"@opentelemetry/instrumentation": "^0.46.0",
"@opentelemetry/core": "^1.18.1",
"@opentelemetry/exporter-prometheus": "^0.45.1",
"@opentelemetry/exporter-zipkin": "^1.18.1",
"@opentelemetry/host-metrics": "^0.33.2",
"@opentelemetry/instrumentation": "^0.45.1",
"@opentelemetry/instrumentation-graphql": "^0.36.0",
"@opentelemetry/instrumentation-http": "^0.46.0",
"@opentelemetry/instrumentation-ioredis": "^0.36.0",
"@opentelemetry/instrumentation-http": "^0.45.1",
"@opentelemetry/instrumentation-ioredis": "^0.35.3",
"@opentelemetry/instrumentation-nestjs-core": "^0.33.3",
"@opentelemetry/instrumentation-socket.io": "^0.35.0",
"@opentelemetry/resources": "^1.19.0",
"@opentelemetry/sdk-metrics": "^1.19.0",
"@opentelemetry/sdk-node": "^0.46.0",
"@opentelemetry/sdk-trace-node": "^1.19.0",
"@prisma/client": "^5.7.1",
"@prisma/instrumentation": "^5.7.1",
"@opentelemetry/instrumentation-socket.io": "^0.34.3",
"@opentelemetry/sdk-metrics": "^1.18.1",
"@opentelemetry/sdk-node": "^0.45.1",
"@opentelemetry/sdk-trace-node": "^1.18.1",
"@prisma/client": "^5.6.0",
"@prisma/instrumentation": "^5.6.0",
"@socket.io/redis-adapter": "^8.2.1",
"cookie-parser": "^1.4.6",
"dotenv": "^16.3.1",
"express": "^4.18.2",
"file-type": "^19.0.0",
"file-type": "^18.7.0",
"get-stream": "^8.0.1",
"graphql": "^16.8.1",
"graphql-type-json": "^0.3.2",
@@ -74,16 +73,15 @@
"on-headers": "^1.0.2",
"parse-duration": "^1.1.0",
"pretty-time": "^1.1.0",
"prisma": "^5.7.1",
"prisma": "^5.6.0",
"prom-client": "^15.0.0",
"reflect-metadata": "^0.2.0",
"reflect-metadata": "^0.1.13",
"rxjs": "^7.8.1",
"semver": "^7.5.4",
"socket.io": "^4.7.2",
"stripe": "^14.5.0",
"ws": "^8.14.2",
"yjs": "^13.6.10",
"zod": "^3.22.4"
"yjs": "^13.6.10"
},
"devDependencies": {
"@affine-test/kit": "workspace:*",
@@ -101,10 +99,10 @@
"@types/on-headers": "^1.0.3",
"@types/pretty-time": "^1.1.5",
"@types/sinon": "^17.0.2",
"@types/supertest": "^6.0.0",
"@types/supertest": "^2.0.16",
"@types/ws": "^8.5.10",
"ava": "^6.0.0",
"c8": "^9.0.0",
"ava": "^5.3.1",
"c8": "^8.0.1",
"nodemon": "^3.0.1",
"sinon": "^17.0.1",
"supertest": "^6.3.3",
@@ -136,8 +134,7 @@
"ENABLE_LOCAL_EMAIL": "true",
"OAUTH_EMAIL_LOGIN": "noreply@toeverything.info",
"OAUTH_EMAIL_PASSWORD": "affine",
"OAUTH_EMAIL_SENDER": "noreply@toeverything.info",
"FEATURES_EARLY_ACCESS_PREVIEW": "false"
"OAUTH_EMAIL_SENDER": "noreply@toeverything.info"
}
},
"nodemonConfig": {

View File

@@ -1,7 +1,7 @@
generator client {
provider = "prisma-client-js"
binaryTargets = ["native", "debian-openssl-3.0.x", "linux-arm64-openssl-3.0.x"]
previewFeatures = ["metrics", "tracing", "relationJoins", "nativeDistinct"]
previewFeatures = ["metrics", "tracing"]
}
datasource db {
@@ -22,7 +22,7 @@ model User {
accounts Account[]
sessions Session[]
features UserFeatures[]
features UserFeatureGates[]
customer UserStripeCustomer?
subscription UserSubscription?
invoices UserInvoice[]
@@ -40,7 +40,6 @@ model Workspace {
pages WorkspacePage[]
permissions WorkspaceUserPermission[]
pagePermissions WorkspacePageUserPermission[]
features WorkspaceFeatures[]
@@map("workspaces")
}
@@ -114,76 +113,15 @@ model WorkspacePageUserPermission {
@@map("workspace_page_user_permissions")
}
// feature gates is a way to enable/disable features for a user
// for example:
// - early access is a feature that allow some users to access the insider version
// - pro plan is a quota that allow some users access to more resources after they pay
model UserFeatures {
id Int @id @default(autoincrement())
userId String @map("user_id") @db.VarChar(36)
featureId Int @map("feature_id") @db.Integer
// we will record the reason why the feature is enabled/disabled
// for example:
// - pro_plan_v1: "user buy the pro plan"
reason String @db.VarChar
// record the quota enabled time
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// record the quota expired time, pay plan is a subscription, so it will expired
expiredAt DateTime? @map("expired_at") @db.Timestamptz(6)
// whether the feature is activated
// for example:
// - if we switch the user to another plan, we will set the old plan to deactivated, but dont delete it
activated Boolean @default(false)
feature Features @relation(fields: [featureId], references: [id], onDelete: Cascade)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@map("user_features")
}
// feature gates is a way to enable/disable features for a workspace
// for example:
// - copilet is a feature that allow some users in a workspace to access the copilet feature
model WorkspaceFeatures {
id Int @id @default(autoincrement())
workspaceId String @map("workspace_id") @db.VarChar(36)
featureId Int @map("feature_id") @db.Integer
// we will record the reason why the feature is enabled/disabled
// for example:
// - copilet_v1: "owner buy the copilet feature package"
reason String @db.VarChar
// record the feature enabled time
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// record the quota expired time, pay plan is a subscription, so it will expired
expiredAt DateTime? @map("expired_at") @db.Timestamptz(6)
// whether the feature is activated
// for example:
// - if owner unsubscribe a feature package, we will set the feature to deactivated, but dont delete it
activated Boolean @default(false)
feature Features @relation(fields: [featureId], references: [id], onDelete: Cascade)
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
@@map("workspace_features")
}
model Features {
id Int @id @default(autoincrement())
model UserFeatureGates {
id String @id @default(uuid()) @db.VarChar
userId String @map("user_id") @db.VarChar
feature String @db.VarChar
version Int @default(0) @db.Integer
// 0: feature, 1: quota
type Int @db.Integer
// configs, define by feature conntroller
configs Json @db.Json
reason String @db.VarChar
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
UserFeatureGates UserFeatures[]
WorkspaceFeatures WorkspaceFeatures[]
@@unique([feature, version])
@@map("features")
@@map("user_feature_gates")
}
model Account {
@@ -225,7 +163,6 @@ model VerificationToken {
@@map("verificationtokens")
}
// deprecated, use [ObjectStorage]
model Blob {
id Int @id @default(autoincrement()) @db.Integer
hash String @db.VarChar
@@ -240,17 +177,14 @@ model Blob {
@@map("blobs")
}
// deprecated, use [ObjectStorage]
model OptimizedBlob {
id Int @id @default(autoincrement()) @db.Integer
hash String @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar
params String @db.VarChar
blob Bytes @db.ByteA
id Int @id @default(autoincrement()) @db.Integer
hash String @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar
params String @db.VarChar
blob Bytes @db.ByteA
length BigInt
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// not for keeping, but for snapshot history
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(6)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
@@unique([workspaceId, hash, params])
@@map("optimized_blobs")

View File

@@ -8,20 +8,6 @@ async function main() {
data: {
...userA,
password: await hash(userA.password),
features: {
create: {
reason: 'created by api sign up',
activated: true,
feature: {
connect: {
feature_version: {
feature: 'free_plan_v1',
version: 1,
},
},
},
},
},
},
});
}

View File

@@ -1,25 +0,0 @@
/* eslint-disable @typescript-eslint/no-non-null-assertion */
// Custom configurations
const env = process.env;
const node = AFFiNE.node;
// TODO: may be separate config overring in `affine.[env].config`?
if (node.prod && env.R2_OBJECT_STORAGE_ACCOUNT_ID) {
AFFiNE.storage.providers.r2 = {
accountId: env.R2_OBJECT_STORAGE_ACCOUNT_ID,
credentials: {
accessKeyId: env.R2_OBJECT_STORAGE_ACCESS_KEY_ID!,
secretAccessKey: env.R2_OBJECT_STORAGE_SECRET_ACCESS_KEY!,
},
};
AFFiNE.storage.storages.avatar.provider = 'r2';
AFFiNE.storage.storages.avatar.bucket = 'account-avatar';
AFFiNE.storage.storages.avatar.publicLinkFactory = key =>
`https://avatar.affineassets.com/${key}`;
AFFiNE.storage.storages.blob.provider = 'r2';
AFFiNE.storage.storages.blob.bucket = `workspace-blobs-${
AFFiNE.affine.canary ? 'canary' : 'prod'
}`;
}

View File

@@ -1,3 +0,0 @@
import { getDefaultAFFiNEConfig } from './config/default';
globalThis.AFFiNE = getDefaultAFFiNEConfig();

View File

@@ -1,33 +1,26 @@
import { Module } from '@nestjs/common';
import { APP_INTERCEPTOR } from '@nestjs/core';
import { AppController } from './app.controller';
import { CacheInterceptor, CacheModule } from './cache';
import { CacheModule } from './cache';
import { ConfigModule } from './config';
import { EventModule } from './event';
import { BusinessModules } from './modules';
import { AuthModule } from './modules/auth';
import { PrismaModule } from './prisma';
import { SessionModule } from './session';
import { StorageModule } from './storage';
import { RateLimiterModule } from './throttler';
const BasicModules = [
PrismaModule,
ConfigModule.forRoot(),
CacheModule,
EventModule,
StorageModule.forRoot(),
SessionModule,
RateLimiterModule,
AuthModule,
];
@Module({
providers: [
{
provide: APP_INTERCEPTOR,
useClass: CacheInterceptor,
},
],
imports: [...BasicModules, ...BusinessModules],
controllers: [AppController],
})

View File

@@ -22,5 +22,3 @@ const CacheProvider: FactoryProvider = {
})
export class CacheModule {}
export { LocalCache as Cache };
export { CacheInterceptor, MakeCache, PreventCache } from './interceptor';

View File

@@ -1,99 +0,0 @@
import {
CallHandler,
ExecutionContext,
Injectable,
Logger,
NestInterceptor,
SetMetadata,
} from '@nestjs/common';
import { Reflector } from '@nestjs/core';
import { GqlContextType, GqlExecutionContext } from '@nestjs/graphql';
import { mergeMap, Observable, of } from 'rxjs';
import { LocalCache } from './cache';
export const MakeCache = (key: string[], args?: string[]) =>
SetMetadata('cacheKey', [key, args]);
export const PreventCache = (key: string[], args?: string[]) =>
SetMetadata('preventCache', [key, args]);
type CacheConfig = [string[], string[]?];
@Injectable()
export class CacheInterceptor implements NestInterceptor {
private readonly logger = new Logger(CacheInterceptor.name);
constructor(
private readonly reflector: Reflector,
private readonly cache: LocalCache
) {}
async intercept(
ctx: ExecutionContext,
next: CallHandler<any>
): Promise<Observable<any>> {
const key = this.reflector.get<CacheConfig | undefined>(
'cacheKey',
ctx.getHandler()
);
const preventKey = this.reflector.get<CacheConfig | undefined>(
'preventCache',
ctx.getHandler()
);
if (preventKey) {
this.logger.debug(`prevent cache: ${JSON.stringify(preventKey)}`);
const key = await this.getCacheKey(ctx, preventKey);
if (key) {
await this.cache.delete(key);
}
return next.handle();
} else if (!key) {
return next.handle();
}
const cacheKey = await this.getCacheKey(ctx, key);
if (!cacheKey) {
return next.handle();
}
const cachedData = await this.cache.get(cacheKey);
if (cachedData) {
this.logger.debug('cache hit', cacheKey, cachedData);
return of(cachedData);
} else {
return next.handle().pipe(
mergeMap(async result => {
this.logger.debug('cache miss', cacheKey, result);
await this.cache.set(cacheKey, result);
return result;
})
);
}
}
private async getCacheKey(
ctx: ExecutionContext,
config: CacheConfig
): Promise<string | null> {
const [key, params] = config;
if (!params) {
return key.join(':');
} else if (ctx.getType<GqlContextType>() === 'graphql') {
const args = GqlExecutionContext.create(ctx).getArgs();
const cacheKey = params
.map(name => args[name])
.filter(v => v)
.join(':');
if (cacheKey) {
return [...key, cacheKey].join(':');
} else {
return key.join(':');
}
}
return null;
}
}

View File

@@ -1,7 +1,6 @@
import type { ApolloDriverConfig } from '@nestjs/apollo';
import type { LeafPaths } from '../utils/types';
import type { AFFiNEStorageConfig } from './storage';
declare global {
// eslint-disable-next-line @typescript-eslint/no-namespace
@@ -17,8 +16,6 @@ export enum ExternalAccount {
firebase = 'firebase',
}
export type ServerFlavor = 'allinone' | 'graphql' | 'sync' | 'selfhosted';
type EnvConfigType = 'string' | 'int' | 'float' | 'boolean';
type ConfigPaths = LeafPaths<
Omit<
@@ -53,7 +50,7 @@ function boolean(value: string) {
}
export function parseEnvValue(value: string | undefined, type?: EnvConfigType) {
if (value === undefined) {
if (typeof value === 'undefined') {
return;
}
@@ -166,11 +163,35 @@ export interface AFFiNEConfig {
featureFlags: {
earlyAccessPreview: boolean;
};
/**
* Configuration for Object Storage, which defines how blobs and avatar assets are stored.
* object storage Config
*
* all artifacts and logs will be stored on instance disk,
* and can not shared between instances if not configured
*/
storage: AFFiNEStorageConfig;
objectStorage: {
/**
* whether use remote object storage
*/
r2: {
enabled: boolean;
accountId: string;
bucket: string;
accessKeyId: string;
secretAccessKey: string;
};
/**
* Only used when `enable` is `false`
*/
fs: {
path: string;
};
/**
* default storage quota
* @default 10 * 1024 * 1024 * 1024 (10GB)
*/
quota: number;
};
/**
* Rate limiter config
@@ -324,11 +345,6 @@ export interface AFFiNEConfig {
doc: {
manager: {
/**
* Whether auto merge updates into doc snapshot.
*/
enableUpdateAutoMerging: boolean;
/**
* How often the [DocManager] will start a new turn of merging pending updates into doc snapshot.
*

View File

@@ -1,16 +1,14 @@
/// <reference types="../global.d.ts" />
import { createPrivateKey, createPublicKey } from 'node:crypto';
import { homedir } from 'node:os';
import { join } from 'node:path';
import parse from 'parse-duration';
import pkg from '../../package.json' assert { type: 'json' };
import type { AFFiNEConfig, ServerFlavor } from './def';
import type { AFFiNEConfig } from './def';
import { applyEnvToConfig } from './env';
import { getDefaultAFFiNEStorageConfig } from './storage';
export const SERVER_FLAVOR = (process.env.SERVER_FLAVOR ??
'allinone') as ServerFlavor;
// Don't use this in production
export const examplePrivateKey = `-----BEGIN EC PRIVATE KEY-----
@@ -57,7 +55,13 @@ export const getDefaultAFFiNEConfig: () => AFFiNEConfig = () => {
AFFINE_SERVER_HOST: 'host',
AFFINE_SERVER_SUB_PATH: 'path',
AFFINE_ENV: 'affineEnv',
AFFINE_FREE_USER_QUOTA: 'objectStorage.quota',
DATABASE_URL: 'db.url',
ENABLE_R2_OBJECT_STORAGE: ['objectStorage.r2.enabled', 'boolean'],
R2_OBJECT_STORAGE_ACCOUNT_ID: 'objectStorage.r2.accountId',
R2_OBJECT_STORAGE_ACCESS_KEY_ID: 'objectStorage.r2.accessKeyId',
R2_OBJECT_STORAGE_SECRET_ACCESS_KEY: 'objectStorage.r2.secretAccessKey',
R2_OBJECT_STORAGE_BUCKET: 'objectStorage.r2.bucket',
ENABLE_CAPTCHA: ['auth.captcha.enable', 'boolean'],
CAPTCHA_TURNSTILE_SECRET: ['auth.captcha.turnstile.secret', 'string'],
OAUTH_GOOGLE_ENABLED: ['auth.oauthProviders.google.enabled', 'boolean'],
@@ -174,7 +178,20 @@ export const getDefaultAFFiNEConfig: () => AFFiNEConfig = () => {
password: '',
},
},
storage: getDefaultAFFiNEStorageConfig(),
objectStorage: {
r2: {
enabled: false,
bucket: '',
accountId: '',
accessKeyId: '',
secretAccessKey: '',
},
fs: {
path: join(homedir(), '.affine-storage'),
},
// 10GB
quota: 10 * 1024 * 1024 * 1024,
},
rateLimiter: {
ttl: 60,
limit: 60,
@@ -189,7 +206,6 @@ export const getDefaultAFFiNEConfig: () => AFFiNEConfig = () => {
},
doc: {
manager: {
enableUpdateAutoMerging: SERVER_FLAVOR !== 'sync',
updatePollInterval: 3000,
experimentalMergeWithJwstCodec: false,
},

View File

@@ -10,7 +10,7 @@ export function applyEnvToConfig(rawConfig: AFFiNEConfig) {
? [config, process.env[env]]
: [config[0], parseEnvValue(process.env[env], config[1])];
if (value !== undefined) {
if (typeof value !== 'undefined') {
set(rawConfig, path, value);
}
}

View File

@@ -73,5 +73,3 @@ export class ConfigModule {
}
export type { AFFiNEConfig } from './def';
export { SERVER_FLAVOR } from './default';
export * from './storage';

View File

@@ -1,59 +0,0 @@
import { homedir } from 'node:os';
import { join } from 'node:path';
import { S3ClientConfigType } from '@aws-sdk/client-s3';
export type StorageProviderType = 'fs' | 'r2' | 's3';
export interface FsStorageConfig {
path: string;
}
export type R2StorageConfig = S3ClientConfigType & {
accountId: string;
};
export type S3StorageConfig = S3ClientConfigType;
export type StorageTargetConfig<Ext = unknown> = {
provider: StorageProviderType;
bucket: string;
} & Ext;
export interface AFFiNEStorageConfig {
/**
* All providers for object storage
*
* Support different providers for different usage at the same time.
*/
providers: {
fs?: FsStorageConfig;
s3?: S3StorageConfig;
r2?: R2StorageConfig;
};
storages: {
avatar: StorageTargetConfig<{ publicLinkFactory: (key: string) => string }>;
blob: StorageTargetConfig;
};
}
export type StorageProviders = AFFiNEStorageConfig['providers'];
export type Storages = keyof AFFiNEStorageConfig['storages'];
export function getDefaultAFFiNEStorageConfig(): AFFiNEStorageConfig {
return {
providers: {
fs: {
path: join(homedir(), '.affine/storage'),
},
},
storages: {
avatar: {
provider: 'fs',
bucket: 'avatars',
publicLinkFactory: key => `/api/avatars/${key}`,
},
blob: {
provider: 'fs',
bucket: 'blobs',
},
},
};
}

View File

@@ -1,32 +1,18 @@
import { Logger, Module } from '@nestjs/common';
import { CommandFactory } from 'nest-commander';
import { AppModule as BusinessAppModule } from '../app';
import { ConfigModule } from '../config';
import { PrismaModule } from '../prisma';
import { CreateCommand, NameQuestion } from './commands/create';
import { RevertCommand, RunCommand } from './commands/run';
@Module({
imports: [
ConfigModule.forRoot({
doc: {
manager: {
enableUpdateAutoMerging: false,
},
},
}),
BusinessAppModule,
],
imports: [PrismaModule],
providers: [NameQuestion, CreateCommand, RunCommand, RevertCommand],
})
class AppModule {}
async function bootstrap() {
await CommandFactory.run(AppModule, new Logger()).catch(e => {
console.error(e);
process.exit(1);
});
process.exit(0);
await CommandFactory.run(AppModule, new Logger());
}
await bootstrap();

View File

@@ -3,7 +3,6 @@ import { join } from 'node:path';
import { fileURLToPath } from 'node:url';
import { Logger } from '@nestjs/common';
import { ModuleRef } from '@nestjs/core';
import { Command, CommandRunner } from 'nest-commander';
import { PrismaService } from '../../prisma';
@@ -11,11 +10,11 @@ import { PrismaService } from '../../prisma';
interface Migration {
file: string;
name: string;
up: (db: PrismaService, injector: ModuleRef) => Promise<void>;
down: (db: PrismaService, injector: ModuleRef) => Promise<void>;
up: (db: PrismaService) => Promise<void>;
down: (db: PrismaService) => Promise<void>;
}
export async function collectMigrations(): Promise<Migration[]> {
async function collectMigrations(): Promise<Migration[]> {
const folder = join(fileURLToPath(import.meta.url), '../../migrations');
const migrationFiles = readdirSync(folder)
@@ -47,10 +46,7 @@ export async function collectMigrations(): Promise<Migration[]> {
})
export class RunCommand extends CommandRunner {
logger = new Logger(RunCommand.name);
constructor(
private readonly db: PrismaService,
private readonly injector: ModuleRef
) {
constructor(private readonly db: PrismaService) {
super();
}
@@ -68,8 +64,35 @@ export class RunCommand extends CommandRunner {
continue;
}
await this.runMigration(migration);
this.logger.log(`Running ${migration.name}...`);
const record = await this.db.dataMigration.create({
data: {
name: migration.name,
startedAt: new Date(),
},
});
try {
await migration.up(this.db);
} catch (e) {
await this.db.dataMigration.delete({
where: {
id: record.id,
},
});
await migration.down(this.db);
this.logger.error('Failed to run data migration', e);
process.exit(1);
}
await this.db.dataMigration.update({
where: {
id: record.id,
},
data: {
finishedAt: new Date(),
},
});
done.push(migration);
}
@@ -78,56 +101,6 @@ export class RunCommand extends CommandRunner {
this.logger.log(`${migration.name}`);
});
}
async runOne(name: string) {
const migrations = await collectMigrations();
const migration = migrations.find(m => m.name === name);
if (!migration) {
throw new Error(`Unknown migration name: ${name}.`);
}
const exists = await this.db.dataMigration.count({
where: {
name: migration.name,
},
});
if (exists) return;
await this.runMigration(migration);
}
private async runMigration(migration: Migration) {
this.logger.log(`Running ${migration.name}...`);
const record = await this.db.dataMigration.create({
data: {
name: migration.name,
startedAt: new Date(),
},
});
try {
await migration.up(this.db, this.injector);
} catch (e) {
await this.db.dataMigration.delete({
where: {
id: record.id,
},
});
await migration.down(this.db, this.injector);
this.logger.error('Failed to run data migration', e);
process.exit(1);
}
await this.db.dataMigration.update({
where: {
id: record.id,
},
data: {
finishedAt: new Date(),
},
});
}
}
@Command({
@@ -138,10 +111,7 @@ export class RunCommand extends CommandRunner {
export class RevertCommand extends CommandRunner {
logger = new Logger(RevertCommand.name);
constructor(
private readonly db: PrismaService,
private readonly injector: ModuleRef
) {
constructor(private readonly db: PrismaService) {
super();
}
@@ -175,7 +145,7 @@ export class RevertCommand extends CommandRunner {
try {
this.logger.log(`Reverting ${name}...`);
await migration.down(this.db, this.injector);
await migration.down(this.db);
this.logger.log('Done reverting');
} catch (e) {
this.logger.error(`Failed to revert data migration ${name}`, e);

View File

@@ -1,24 +0,0 @@
import { Features } from '../../modules/features';
import { Quotas } from '../../modules/quota/schema';
import { PrismaService } from '../../prisma';
import { migrateNewFeatureTable, upsertFeature } from './utils/user-features';
export class UserFeaturesInit1698652531198 {
// do the migration
static async up(db: PrismaService) {
// upgrade features from lower version to higher version
for (const feature of Features) {
await upsertFeature(db, feature);
}
await migrateNewFeatureTable(db);
for (const quota of Quotas) {
await upsertFeature(db, quota);
}
}
// revert the migration
static async down(_db: PrismaService) {
// TODO: revert the migration
}
}

View File

@@ -1,37 +0,0 @@
import { QuotaType } from '../../modules/quota/types';
import { PrismaService } from '../../prisma';
export class OldUserFeature1702620653283 {
// do the migration
static async up(db: PrismaService) {
await db.$transaction(async tx => {
const latestFreePlan = await tx.features.findFirstOrThrow({
where: { feature: QuotaType.FreePlanV1 },
orderBy: { version: 'desc' },
select: { id: true },
});
// find all users that don't have any features
const userIds = await db.user.findMany({
where: { NOT: { features: { some: { NOT: { id: { gt: 0 } } } } } },
select: { id: true },
});
console.log(`migrating ${userIds.join('|')} users`);
await tx.userFeatures.createMany({
data: userIds.map(({ id: userId }) => ({
userId,
featureId: latestFreePlan.id,
reason: 'old user feature migration',
activated: true,
})),
});
});
}
// revert the migration
// WARN: this will drop all user features
static async down(db: PrismaService) {
await db.userFeatures.deleteMany({});
}
}

View File

@@ -1,31 +0,0 @@
import type { UserType } from '../../modules/users';
import { PrismaService } from '../../prisma';
export class UnamedAccount1703756315970 {
// do the migration
static async up(db: PrismaService) {
await db.$transaction(async tx => {
// only find users with empty names
const users = await db.$queryRaw<
UserType[]
>`SELECT * FROM users WHERE name ~ E'^[\\s\\u2000-\\u200F]*$';`;
console.log(
`renaming ${users.map(({ email }) => email).join('|')} users`
);
await Promise.all(
users.map(({ id, email }) =>
tx.user.update({
where: { id },
data: {
name: email.split('@')[0],
},
})
)
);
});
}
// revert the migration
static async down(_db: PrismaService) {}
}

View File

@@ -1,38 +0,0 @@
import { ModuleRef } from '@nestjs/core';
import { WorkspaceBlobStorage } from '../../modules/storage';
import { PrismaService } from '../../prisma';
export class WorkspaceBlobs1703828796699 {
// do the migration
static async up(db: PrismaService, injector: ModuleRef) {
const blobStorage = injector.get(WorkspaceBlobStorage, { strict: false });
let hasMore = true;
let turn = 0;
const eachTurnCount = 50;
while (hasMore) {
const blobs = await db.blob.findMany({
skip: turn * eachTurnCount,
take: eachTurnCount,
orderBy: {
createdAt: 'asc',
},
});
hasMore = blobs.length === eachTurnCount;
turn += 1;
await Promise.all(
blobs.map(async ({ workspaceId, hash, blob }) =>
blobStorage.put(workspaceId, hash, blob)
)
);
}
}
// revert the migration
static async down(_db: PrismaService) {
// old data kept, no need to downgrade the migration
}
}

View File

@@ -1,16 +0,0 @@
import { Features } from '../../modules/features';
import { PrismaService } from '../../prisma';
import { upsertFeature } from './utils/user-features';
export class RefreshUserFeatures1704352562369 {
// do the migration
static async up(db: PrismaService) {
// add early access v2 & copilot feature
for (const feature of Features) {
await upsertFeature(db, feature);
}
}
// revert the migration
static async down(_db: PrismaService) {}
}

View File

@@ -1,100 +0,0 @@
import { Prisma } from '@prisma/client';
import {
CommonFeature,
FeatureKind,
FeatureType,
} from '../../../modules/features';
import { PrismaService } from '../../../prisma';
// upgrade features from lower version to higher version
export async function upsertFeature(
db: PrismaService,
feature: CommonFeature
): Promise<void> {
const hasEqualOrGreaterVersion =
(await db.features.count({
where: {
feature: feature.feature,
version: {
gte: feature.version,
},
},
})) > 0;
// will not update exists version
if (!hasEqualOrGreaterVersion) {
await db.features.create({
data: {
feature: feature.feature,
type: feature.type,
version: feature.version,
configs: feature.configs as Prisma.InputJsonValue,
},
});
}
}
export async function migrateNewFeatureTable(prisma: PrismaService) {
const waitingList = await prisma.newFeaturesWaitingList.findMany();
for (const oldUser of waitingList) {
const user = await prisma.user.findFirst({
where: {
email: oldUser.email,
},
});
if (user) {
const hasEarlyAccess = await prisma.userFeatures.count({
where: {
userId: user.id,
feature: {
feature: FeatureType.EarlyAccess,
},
activated: true,
},
});
if (hasEarlyAccess === 0) {
await prisma.$transaction(async tx => {
const latestFlag = await tx.userFeatures.findFirst({
where: {
userId: user.id,
feature: {
feature: FeatureType.EarlyAccess,
type: FeatureKind.Feature,
},
activated: true,
},
orderBy: {
createdAt: 'desc',
},
});
if (latestFlag) {
return latestFlag.id;
} else {
return tx.userFeatures
.create({
data: {
reason: 'Early access user',
activated: true,
user: {
connect: {
id: user.id,
},
},
feature: {
connect: {
feature_version: {
feature: FeatureType.EarlyAccess,
version: 1,
},
type: FeatureKind.Feature,
},
},
},
})
.then(r => r.id);
}
});
}
}
}
}

View File

@@ -1,33 +0,0 @@
import type { Snapshot, User, Workspace } from '@prisma/client';
import { Flatten, Payload } from './types';
interface EventDefinitions {
workspace: {
deleted: Payload<Workspace['id']>;
blob: {
deleted: Payload<{
workspaceId: Workspace['id'];
name: string;
}>;
};
};
snapshot: {
updated: Payload<
Pick<Snapshot, 'id' | 'workspaceId'> & {
previous: Pick<Snapshot, 'blob' | 'state' | 'updatedAt'>;
}
>;
deleted: Payload<Pick<Snapshot, 'id' | 'workspaceId'>>;
};
user: {
deleted: Payload<User>;
};
}
export type EventKV = Flatten<EventDefinitions>;
export type Event = keyof EventKV;
export type EventPayload<E extends Event> = EventKV[E];

View File

@@ -1,43 +0,0 @@
import { Global, Injectable, Module } from '@nestjs/common';
import {
EventEmitter2,
EventEmitterModule,
OnEvent as RawOnEvent,
} from '@nestjs/event-emitter';
import type { Event, EventPayload } from './events';
@Injectable()
export class EventEmitter {
constructor(private readonly emitter: EventEmitter2) {}
emit<E extends Event>(event: E, payload: EventPayload<E>) {
return this.emitter.emit(event, payload);
}
emitAsync<E extends Event>(event: E, payload: EventPayload<E>) {
return this.emitter.emitAsync(event, payload);
}
on<E extends Event>(event: E, handler: (payload: EventPayload<E>) => void) {
return this.emitter.on(event, handler);
}
once<E extends Event>(event: E, handler: (payload: EventPayload<E>) => void) {
return this.emitter.once(event, handler);
}
}
export const OnEvent = RawOnEvent as (
event: Event,
opts?: Parameters<typeof RawOnEvent>[1]
) => MethodDecorator;
@Global()
@Module({
imports: [EventEmitterModule.forRoot()],
providers: [EventEmitter],
exports: [EventEmitter],
})
export class EventModule {}
export { EventPayload };

View File

@@ -1,33 +0,0 @@
export type Payload<T> = {
__payload: true;
data: T;
};
export type Join<A extends string, B extends string> = A extends ''
? B
: `${A}.${B}`;
export type PathType<T, Path extends string> = string extends Path
? unknown
: Path extends keyof T
? T[Path]
: Path extends `${infer K}.${infer R}`
? K extends keyof T
? PathType<T[K], R>
: unknown
: unknown;
export type Leaves<T, P extends string = ''> = T extends Payload<any>
? P
: T extends Record<string, any>
? {
[K in keyof T]: K extends string ? Leaves<T[K], Join<P, K>> : never;
}[keyof T]
: never;
export type Flatten<T> = Leaves<T> extends infer R
? {
// @ts-expect-error yo, ts can't make it
[K in R]: PathType<T, K> extends Payload<infer U> ? U : never;
}
: never;

Some files were not shown because too many files have changed in this diff Show More