Compare commits

..

2 Commits

Author SHA1 Message Date
DarkSky
ae4c201e40 fix: use secure websocket (#5297) 2023-12-14 11:28:25 +08:00
DarkSky
6724e5a537 feat: only follow serverUrlPrefix at redirect to client (#5295) 2023-12-14 11:28:09 +08:00
535 changed files with 7394 additions and 14550 deletions

View File

@@ -126,8 +126,6 @@ const config = {
'no-cond-assign': 'off',
'no-constant-binary-expression': 'error',
'no-constructor-return': 'error',
'no-self-compare': 'error',
eqeqeq: ['error', 'always', { null: 'ignore' }],
'react/prop-types': 'off',
'react/jsx-no-useless-fragment': 'error',
'@typescript-eslint/consistent-type-imports': 'error',
@@ -135,9 +133,6 @@ const config = {
'@typescript-eslint/no-explicit-any': 'off',
'@typescript-eslint/no-empty-function': 'off',
'@typescript-eslint/await-thenable': 'error',
'@typescript-eslint/require-array-sort-compare': 'error',
'@typescript-eslint/unified-signatures': 'error',
'@typescript-eslint/prefer-for-of': 'error',
'@typescript-eslint/no-unused-vars': [
'error',
{
@@ -209,17 +204,6 @@ const config = {
},
],
'unicorn/no-unnecessary-await': 'error',
'unicorn/no-useless-fallback-in-spread': 'error',
'unicorn/prefer-dom-node-dataset': 'error',
'unicorn/prefer-dom-node-append': 'error',
'unicorn/prefer-dom-node-remove': 'error',
'unicorn/prefer-array-some': 'error',
'unicorn/prefer-date-now': 'error',
'unicorn/prefer-blob-reading-methods': 'error',
'unicorn/no-typeof-undefined': 'error',
'unicorn/no-useless-promise-resolve-reject': 'error',
'unicorn/no-new-array': 'error',
'unicorn/new-for-builtins': 'error',
'sonarjs/no-all-duplicated-branches': 'error',
'sonarjs/no-element-overwrite': 'error',
'sonarjs/no-empty-collection': 'error',
@@ -270,7 +254,6 @@ const config = {
},
],
'@typescript-eslint/no-misused-promises': ['error'],
'@typescript-eslint/prefer-readonly': 'error',
'i/no-extraneous-dependencies': ['error'],
'react-hooks/exhaustive-deps': [
'warn',

View File

@@ -14,28 +14,14 @@ inputs:
runs:
using: 'composite'
steps:
- name: Print rustup toolchain version
shell: bash
id: rustup-version
run: |
export RUST_TOOLCHAIN_VERSION="$(grep 'channel' rust-toolchain.toml | head -1 | awk -F '"' '{print $2}')"
echo "Rust toolchain version: $RUST_TOOLCHAIN_VERSION"
echo "RUST_TOOLCHAIN_VERSION=$RUST_TOOLCHAIN_VERSION" >> "$GITHUB_OUTPUT"
- name: Setup Rust
uses: dtolnay/rust-toolchain@stable
with:
toolchain: '${{ steps.rustup-version.outputs.RUST_TOOLCHAIN_VERSION }}'
toolchain: stable
targets: ${{ inputs.target }}
env:
CARGO_INCREMENTAL: '1'
- name: Set CC
if: ${{ contains(inputs.target, 'linux') && inputs.package != '@affine/native' }}
shell: bash
run: |
echo "CC=clang" >> "$GITHUB_ENV"
echo "TARGET_CC=clang" >> "$GITHUB_ENV"
- name: Cache cargo
uses: actions/cache@v3
with:
@@ -43,13 +29,47 @@ runs:
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
~/.napi-rs
.cargo-cache
target/${{ inputs.target }}
key: stable-${{ inputs.target }}-cargo-cache
- name: Build
if: ${{ inputs.target != 'x86_64-unknown-linux-gnu' && inputs.target != 'aarch64-unknown-linux-gnu' }}
shell: bash
run: |
yarn workspace ${{ inputs.package }} nx build ${{ inputs.package }} --target ${{ inputs.target }} --use-napi-cross
yarn workspace ${{ inputs.package }} nx build ${{ inputs.package }} --target ${{ inputs.target }}
env:
NX_CLOUD_ACCESS_TOKEN: ${{ inputs.nx_token }}
DEBUG: 'napi:*'
- name: Build
if: ${{ inputs.target == 'x86_64-unknown-linux-gnu' }}
uses: addnab/docker-run-action@v3
with:
image: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian
options: --user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build -e NX_CLOUD_ACCESS_TOKEN=${{ inputs.nx_token }}
run: |
export CC=x86_64-unknown-linux-gnu-gcc
export CC_x86_64_unknown_linux_gnu=x86_64-unknown-linux-gnu-gcc
export RUSTFLAGS="-C debuginfo=1"
yarn workspace ${{ inputs.package }} nx build ${{ inputs.package }} --target ${{ inputs.target }}
if [ -d "node_modules/.cache" ]; then
chmod -R 777 node_modules/.cache
fi
if [ -d "target" ]; then
chmod -R 777 target;
fi
- name: Build
if: ${{ inputs.target == 'aarch64-unknown-linux-gnu' }}
uses: addnab/docker-run-action@v3
with:
image: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian-aarch64
options: --user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build -e NX_CLOUD_ACCESS_TOKEN=${{ inputs.nx_token }}
run: |
export RUSTFLAGS="-C debuginfo=1"
yarn workspace ${{ inputs.package }} nx build ${{ inputs.package }} --target ${{ inputs.target }}
if [ -d "node_modules/.cache" ]; then
chmod -R 777 node_modules/.cache
fi
if [ -d "target" ]; then
chmod -R 777 target;
fi

View File

@@ -26,7 +26,7 @@ runs:
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
- uses: azure/setup-helm@v3
- id: auth
uses: google-github-actions/auth@v2
uses: google-github-actions/auth@v1
with:
workload_identity_provider: 'projects/${{ inputs.gcp-project-number }}/locations/global/workloadIdentityPools/github-actions/providers/github-actions-helm-deploy'
service_account: '${{ inputs.service-account }}'
@@ -34,7 +34,7 @@ runs:
project_id: '${{ inputs.gcp-project-id }}'
- name: 'Setup gcloud cli'
uses: 'google-github-actions/setup-gcloud@v2'
uses: 'google-github-actions/setup-gcloud@v1'
with:
install_components: 'gke-gcloud-auth-plugin'

View File

@@ -1,7 +1,6 @@
import { execSync } from 'node:child_process';
const {
APP_VERSION,
BUILD_TYPE,
DEPLOY_HOST,
CANARY_DEPLOY_HOST,
@@ -80,7 +79,6 @@ const createHelmCommand = ({ isDryRun }) => {
`--set global.ingress.enabled=true`,
`--set-json global.ingress.annotations=\"{ \\"kubernetes.io/ingress.class\\": \\"gce\\", \\"kubernetes.io/ingress.allow-http\\": \\"true\\", \\"kubernetes.io/ingress.global-static-ip-name\\": \\"${staticIpName}\\" }\"`,
`--set-string global.ingress.host="${host}"`,
`--set-string global.version="${APP_VERSION}"`,
...redisAndPostgres,
`--set web.replicaCount=${webReplicaCount}`,
`--set-string web.image.tag="${imageTag}"`,
@@ -107,7 +105,7 @@ const createHelmCommand = ({ isDryRun }) => {
`--set sync.replicaCount=${syncReplicaCount}`,
`--set-string sync.image.tag="${imageTag}"`,
...serviceAnnotations,
`--timeout 10m`,
`--version "0.0.0-${buildType}.${GIT_SHORT_HASH}" --timeout 10m`,
flag,
].join(' ');
return deployCommand;

View File

@@ -1,22 +0,0 @@
name: 'Download core artifacts'
description: 'Download core artifacts and extract to dist'
inputs:
path:
description: 'Path to extract'
required: true
runs:
using: 'composite'
steps:
- name: Download tar.gz
uses: actions/download-artifact@v3
with:
name: core
path: .
- name: Extract core artifacts
shell: bash
run: |
mkdir -p ${{ inputs.path }}
tar -xvf dist.tar.gz --directory ${{ inputs.path }}
rm dist.tar.gz

View File

@@ -36,19 +36,17 @@ inputs:
description: 'Set enableScripts in .yarnrc.yml'
required: false
default: 'true'
full-cache:
description: 'Full installation cache'
required: false
runs:
using: 'composite'
steps:
- name: Setup Node.js
uses: actions/setup-node@v4
uses: actions/setup-node@v3
with:
node-version-file: '.nvmrc'
registry-url: https://npm.pkg.github.com
scope: '@toeverything'
cache: 'yarn'
- name: Set nmMode
if: ${{ inputs.hard-link-nm == 'false' }}
@@ -65,48 +63,6 @@ runs:
shell: bash
run: yarn config set enableScripts false
- name: Set yarn global cache path
shell: bash
id: yarn-cache
run: node -e "const p = $(yarn config cacheFolder --json).effective; console.log('yarn_global_cache=' + p)" >> $GITHUB_OUTPUT
- name: Cache non-full yarn cache on Linux
uses: actions/cache@v3
if: ${{ inputs.full-cache != 'true' && runner.os == 'Linux' }}
with:
path: |
node_modules
${{ steps.yarn-cache.outputs.yarn_global_cache }}
key: node_modules-cache-${{ github.job }}-${{ runner.os }}
# The network performance on macOS is very poor
# and the decompression performance on Windows is very terrible
# so we reduce the number of cached files on non-Linux systems by remove node_modules from cache path.
- name: Cache non-full yarn cache on non-Linux
uses: actions/cache@v3
if: ${{ inputs.full-cache != 'true' && runner.os != 'Linux' }}
with:
path: |
${{ steps.yarn-cache.outputs.yarn_global_cache }}
key: node_modules-cache-${{ github.job }}-${{ runner.os }}
- name: Cache full yarn cache on Linux
uses: actions/cache@v3
if: ${{ inputs.full-cache == 'true' && runner.os == 'Linux' }}
with:
path: |
node_modules
${{ steps.yarn-cache.outputs.yarn_global_cache }}
key: node_modules-cache-full-${{ runner.os }}
- name: Cache full yarn cache on non-Linux
uses: actions/cache@v3
if: ${{ inputs.full-cache == 'true' && runner.os != 'Linux' }}
with:
path: |
${{ steps.yarn-cache.outputs.yarn_global_cache }}
key: node_modules-cache-full-${{ runner.os }}
- name: yarn install
if: ${{ inputs.package-install == 'true' }}
continue-on-error: true
@@ -146,8 +102,8 @@ runs:
id: playwright-cache
if: ${{ inputs.playwright-install == 'true' }}
with:
path: ${{ github.workspace }}/node_modules/.cache/ms-playwright
key: '${{ runner.os }}-playwright-${{ steps.playwright-version.outputs.version }}'
path: '~/.cache/ms-playwright'
key: '${{ runner.os }}-${{ runner.arch }}-playwright-${{ steps.playwright-version.outputs.version }}'
# As a fallback, if the Playwright version has changed, try use the
# most recently cached version. There's a good chance that at least one
# of the browser binary versions haven't been updated, so Playwright can
@@ -157,7 +113,7 @@ runs:
# date cache, but still let Playwright decide if it needs to download
# new binaries or not.
restore-keys: |
${{ runner.os }}-playwright-
${{ runner.os }}-${{ runner.arch }}-playwright-
# If the Playwright browser binaries weren't able to be restored, we tell
# playwright to install everything for us.
@@ -165,8 +121,6 @@ runs:
shell: bash
if: inputs.playwright-install == 'true'
run: yarn playwright install --with-deps chromium
env:
PLAYWRIGHT_BROWSERS_PATH: ${{ github.workspace }}/node_modules/.cache/ms-playwright
- name: Get installed Electron version
id: electron-version
@@ -180,16 +134,16 @@ runs:
if: ${{ inputs.electron-install == 'true' }}
with:
path: 'node_modules/.cache/electron'
key: '${{ runner.os }}-electron-${{ steps.electron-version.outputs.version }}'
key: '${{ runner.os }}-{{ runner.arch }}-electron-${{ steps.electron-version.outputs.version }}'
restore-keys: |
${{ runner.os }}-electron-
${{ runner.os }}-{{ runner.arch }}-electron-
- name: Install Electron binary
shell: bash
if: inputs.electron-install == 'true'
run: node ./node_modules/electron/install.js
env:
electron_config_cache: ./node_modules/.cache/electron
ELECTRON_OVERRIDE_DIST_PATH: ./node_modules/.cache/electron
- name: Build Infra
shell: bash

31
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,31 @@
version: 2
updates:
- package-ecosystem: 'npm'
directory: '/'
groups:
all-npm-dependencies:
patterns:
- '*'
schedule:
interval: 'weekly'
versioning-strategy: increase
commit-message:
prefix: 'chore'
- package-ecosystem: 'cargo'
directory: '/'
schedule:
interval: 'weekly'
versioning-strategy: auto
commit-message:
prefix: 'chore'
groups:
all-cargo-dependencies:
patterns:
- '*'
- package-ecosystem: 'github-actions'
directory: '/'
schedule:
interval: 'daily'
commit-message:
prefix: 'ci'

View File

@@ -1,6 +1,6 @@
dependencies:
- name: postgresql
repository: https://charts.bitnami.com/bitnami
version: 13.2.23
digest: sha256:5b64538509bd067bb0f67bf082847a2c5d66dc37d0b9d7948a40405d9c446400
generated: "2023-12-05T03:04:57.997927753Z"
version: 12.5.8
digest: sha256:c91c0dc1370e879538dc9d6e435e731a726ef99d6a3b081372318483792b48a7
generated: "2023-06-27T18:34:12.683806+08:00"

View File

@@ -8,5 +8,5 @@ appVersion: '0.6.1'
dependencies:
- name: postgresql
version: 13.2.23
version: 12.5.8
repository: https://charts.bitnami.com/bitnami

View File

@@ -3,4 +3,4 @@ name: affine
description: AFFiNE cloud chart
type: application
version: 0.0.0
appVersion: "0.10.4-beta.2"
appVersion: '0.7.0-canary.18'

View File

@@ -3,4 +3,4 @@ name: graphql
description: AFFiNE GraphQL server
type: application
version: 0.0.0
appVersion: "0.10.4-beta.2"
appVersion: '0.7.0-canary.18'

View File

@@ -3,4 +3,4 @@ name: sync
description: A Helm chart for Kubernetes
type: application
version: 0.0.0
appVersion: "0.10.4-beta.2"
appVersion: "0.7.0-canary.18"

117
.github/labeler.yml vendored
View File

@@ -1,115 +1,62 @@
docs:
- changed-files:
- any-glob-to-any-file:
- 'docs/**/*'
- '**/README.md'
- 'packages/frontend/templates/**/*'
- 'docs/**/*'
- '**/README.md'
- 'packages/frontend/templates/**/*'
test:
- changed-files:
- any-glob-to-any-file:
- 'tests/**/*'
- '**/tests/**/*'
- '**/__tests__/**/*'
- 'tests/**/*'
- '**/tests/**/*'
- '**/__tests__/**/*'
mod:dev:
- changed-files:
- any-glob-to-any-file:
- 'scripts/**/*'
- 'tools/cli/**/*'
- 'packages/common/debug/**/*'
- 'scripts/**/*'
- 'tools/cli/**/*'
- 'packages/common/debug/**/*'
mod:plugin:
- changed-files:
- any-glob-to-any-file:
- 'packages/plugins/**/*'
- 'packages/plugins/**/*'
plugin:copilot:
- changed-files:
- any-glob-to-any-file:
- 'packages/plugins/copilot/**/*'
- 'packages/plugins/copilot/**/*'
mod:infra:
- changed-files:
- any-glob-to-any-file:
- 'packages/common/infra/**/*'
- 'packages/common/infra/**/*'
mod:sdk:
- changed-files:
- any-glob-to-any-file:
- 'packages/common/sdk/**/*'
- 'packages/common/sdk/**/*'
mod:plugin-cli:
- changed-files:
- any-glob-to-any-file:
- 'tools/plugin-cli/**/*'
- 'tools/plugin-cli/**/*'
mod:workspace:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/workspace/**/*'
mod:workspace: 'packages/frontend/workspace/**/*'
mod:i18n:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/i18n/**/*'
mod:i18n: 'packages/frontend/i18n/**/*'
mod:env:
- changed-files:
- any-glob-to-any-file:
- 'packages/common/env/**/*'
mod:env: 'packages/common/env/**/*'
mod:hooks:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/hooks/**/*'
mod:hooks: 'packages/frontend/hooks/**/*'
mod:component:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/component/**/*'
mod:component: 'packages/frontend/component/**/*'
mod:storage:
- changed-files:
- any-glob-to-any-file:
- 'packages/backend/storage/**/*'
mod:storage: 'packages/backend/storage/**/*'
mod:native:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/native/**/*'
mod:native: 'packages/frontend/native/**/*'
mod:store:
- changed-files:
- any-glob-to-any-file:
- '**/atoms/**/*'
- '**/atoms/**/*'
rust:
- changed-files:
- any-glob-to-any-file:
- '**/*.rs'
- '**/Cargo.toml'
- '**/Cargo.lock'
- '**/rust-toolchain'
- '**/rust-toolchain.toml'
- '**/rustfmt.toml'
- '**/*.rs'
- '**/Cargo.toml'
- '**/Cargo.lock'
- '**/rust-toolchain'
- '**/rust-toolchain.toml'
- '**/rustfmt.toml'
package:y-indexeddb:
- changed-files:
- any-glob-to-any-file:
- 'packages/common/y-indexeddb/**/*'
package:y-indexeddb: 'packages/common/y-indexeddb/**/*'
app:core:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/core/**/*'
app:core: 'packages/frontend/core/**/*'
app:electron:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/electron/**/*'
app:electron: 'packages/frontend/electron/**/*'
app:server:
- changed-files:
- any-glob-to-any-file:
- 'packages/backend/server/**/*'
app:server: 'packages/backend/server/**/*'

53
.github/renovate.json vendored
View File

@@ -1,53 +0,0 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"config:base",
"group:allNonMajor",
":preserveSemverRanges",
":disablePeerDependencies"
],
"labels": ["dependencies"],
"packageRules": [
{
"matchPackageNames": ["napi", "napi-build", "napi-derive"],
"groupName": "napi-rs"
},
{
"matchPackagePatterns": ["^eslint", "^@typescript-eslint"],
"groupName": "linter"
},
{
"matchPackagePatterns": ["^@nestjs"],
"groupName": "nestjs"
},
{
"matchPackagePatterns": ["^@opentelemetry"],
"groupName": "opentelemetry"
},
{
"matchPackageNames": [
"@prisma/client",
"@prisma/instrumentation",
"prisma"
],
"groupName": "prisma"
},
{
"matchPackagePatterns": ["^@electron-forge"],
"groupName": "electron-forge"
},
{
"matchPackagePatterns": ["^@blocksuite"],
"excludePackageNames": ["@blocksuite/icons"],
"followTag": "nightly"
}
],
"commitMessagePrefix": "chore: ",
"commitMessageAction": "bump up",
"commitMessageTopic": "{{depName}} version",
"ignoreDeps": [],
"lockFileMaintenance": {
"enabled": true,
"extends": ["schedule:weekly"]
}
}

View File

@@ -9,5 +9,4 @@ jobs:
pull-requests: write
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/labeler@v5
- uses: actions/labeler@v4

190
.github/workflows/build-desktop.yml vendored Normal file
View File

@@ -0,0 +1,190 @@
name: Build(Desktop) & Test
on:
push:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/build-desktop.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
pull_request:
merge_group:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/build-desktop.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
env:
DEBUG: napi:*
BUILD_TYPE: canary
APP_NAME: affine
COVERAGE: true
DISTRIBUTION: desktop
MACOSX_DEPLOYMENT_TARGET: '10.13'
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
build-core:
name: Build @affine/core
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- name: Build Core
run: yarn nx build @affine/core
- name: Upload core artifact
uses: actions/upload-artifact@v3
with:
name: core
path: ./packages/frontend/core/dist
if-no-files-found: error
build-native:
name: Build Native
runs-on: ubuntu-latest
needs: build-core
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build AFFiNE native
uses: ./.github/actions/build-rust
with:
target: x86_64-unknown-linux-gnu
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Run tests
run: yarn test
working-directory: ./packages/frontend/native
desktop-test:
name: Desktop Test
runs-on: ${{ matrix.spec.os }}
strategy:
fail-fast: false
# all combinations: macos-latest x64, macos-latest arm64, windows-latest x64, ubuntu-latest x64
matrix:
spec:
- {
os: macos-latest,
platform: macos,
arch: x64,
target: x86_64-apple-darwin,
test: true,
}
- {
os: macos-latest,
platform: macos,
arch: arm64,
target: aarch64-apple-darwin,
test: false,
}
- {
os: ubuntu-latest,
platform: linux,
arch: x64,
target: x86_64-unknown-linux-gnu,
test: true,
}
- {
os: windows-latest,
platform: windows,
arch: x64,
target: x86_64-pc-windows-msvc,
test: true,
}
needs: build-core
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
timeout-minutes: 10
with:
extra-flags: workspaces focus @affine/electron @affine/monorepo @affine-test/affine-desktop
playwright-install: true
hard-link-nm: false
enableScripts: false
- name: Build AFFiNE native
uses: ./.github/actions/build-rust
with:
target: ${{ matrix.spec.target }}
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Run unit tests
if: ${{ matrix.spec.test }}
shell: bash
run: yarn vitest
working-directory: packages/frontend/electron
- name: Download core artifact
uses: actions/download-artifact@v3
with:
name: core
path: packages/frontend/electron/resources/web-static
- name: Build Desktop Layers
run: yarn workspace @affine/electron build
- name: Run desktop tests
if: ${{ matrix.spec.test && matrix.spec.os == 'ubuntu-latest' }}
run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn workspace @affine-test/affine-desktop e2e
env:
COVERAGE: true
- name: Run desktop tests
if: ${{ matrix.spec.test && matrix.spec.os != 'ubuntu-latest' }}
run: yarn workspace @affine-test/affine-desktop e2e
env:
COVERAGE: true
- name: Make bundle
if: ${{ matrix.spec.os == 'macos-latest' && matrix.spec.arch == 'arm64' }}
env:
SKIP_BUNDLE: true
SKIP_WEB_BUILD: true
HOIST_NODE_MODULES: 1
run: yarn workspace @affine/electron package --platform=darwin --arch=arm64
- name: Output check
if: ${{ matrix.spec.os == 'macos-latest' && matrix.spec.arch == 'arm64' }}
run: |
yarn workspace @affine/electron ts-node ./scripts/macos-arm64-output-check.ts
- name: Collect code coverage report
if: ${{ matrix.spec.test }}
run: yarn exec nyc report -t .nyc_output --report-dir .coverage --reporter=lcov
- name: Upload e2e test coverage results
if: ${{ matrix.spec.test }}
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/lcov.info
flags: e2etest-${{ matrix.spec.os }}-${{ matrix.spec.arch }}
name: affine
fail_ci_if_error: false
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-${{ matrix.spec.os }}-${{ matrix.spec.arch }}
path: ./test-results
if-no-files-found: ignore

311
.github/workflows/build-server.yml vendored Normal file
View File

@@ -0,0 +1,311 @@
name: Build(Server) & Test
on:
push:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/build-server.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
pull_request:
merge_group:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/build-server.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
env:
DEBUG: napi:*
BUILD_TYPE: canary
APP_NAME: affine
COVERAGE: true
DISTRIBUTION: browser
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
build-storage:
name: Build Storage
runs-on: ubuntu-latest
env:
RUSTFLAGS: '-C debuginfo=1'
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
extra-flags: workspaces focus @affine/storage
electron-install: false
build-infra: false
build-plugins: false
- name: Build Rust
uses: ./.github/actions/build-rust
with:
target: 'x86_64-unknown-linux-gnu'
package: '@affine/storage'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload storage.node
uses: actions/upload-artifact@v3
with:
name: storage.node
path: ./packages/backend/storage/storage.node
if-no-files-found: error
server-test:
name: Server Test
runs-on: ubuntu-latest
needs: build-storage
services:
postgres:
image: postgres
env:
POSTGRES_PASSWORD: affine
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
mailer:
image: mailhog/mailhog
ports:
- 1025:1025
- 8025:8025
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- name: Initialize database
run: |
psql -h localhost -U postgres -c "CREATE DATABASE affine;"
psql -h localhost -U postgres -c "CREATE USER affine WITH PASSWORD 'affine';"
psql -h localhost -U postgres -c "ALTER USER affine WITH SUPERUSER;"
env:
PGPASSWORD: affine
- name: Generate prisma client
run: |
yarn workspace @affine/server exec prisma generate
yarn workspace @affine/server exec prisma db push
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Run init-db script
run: yarn workspace @affine/server exec ts-node ./scripts/init-db.ts
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Download storage.node
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./packages/backend/server
- name: Run server tests
run: yarn workspace @affine/server test:coverage
env:
CARGO_TARGET_DIR: '${{ github.workspace }}/target'
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Upload server test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./packages/backend/server/.coverage/lcov.info
flags: server-test
name: affine
fail_ci_if_error: false
server-e2e-test:
name: Server E2E Test
runs-on: ubuntu-latest
needs: build-storage
services:
postgres:
image: postgres
env:
POSTGRES_PASSWORD: affine
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
mailer:
image: mailhog/mailhog
ports:
- 1025:1025
- 8025:8025
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
- name: Initialize database
run: |
psql -h localhost -U postgres -c "CREATE DATABASE affine;"
psql -h localhost -U postgres -c "CREATE USER affine WITH PASSWORD 'affine';"
psql -h localhost -U postgres -c "ALTER USER affine WITH SUPERUSER;"
env:
PGPASSWORD: affine
- name: Generate prisma client
run: |
yarn workspace @affine/server exec prisma generate
yarn workspace @affine/server exec prisma db push
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Run init-db script
run: yarn workspace @affine/server exec ts-node ./scripts/init-db.ts
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Download storage.node
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./packages/backend/server
- name: Run playwright tests
run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn workspace @affine-test/affine-cloud e2e --forbid-only
env:
COVERAGE: true
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Collect code coverage report
run: yarn exec nyc report -t .nyc_output --report-dir .coverage --reporter=lcov
- name: Upload e2e test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/lcov.info
flags: server-e2etest
name: affine
fail_ci_if_error: false
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-server
path: ./tests/affine-cloud/test-results
if-no-files-found: ignore
server-desktop-e2e-test:
name: Server Desktop E2E Test
runs-on: ubuntu-latest
needs: build-storage
services:
postgres:
image: postgres
env:
POSTGRES_PASSWORD: affine
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
mailer:
image: mailhog/mailhog
ports:
- 1025:1025
- 8025:8025
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
hard-link-nm: false
- name: Build AFFiNE native
uses: ./.github/actions/build-rust
with:
target: x86_64-unknown-linux-gnu
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Initialize database
run: |
psql -h localhost -U postgres -c "CREATE DATABASE affine;"
psql -h localhost -U postgres -c "CREATE USER affine WITH PASSWORD 'affine';"
psql -h localhost -U postgres -c "ALTER USER affine WITH SUPERUSER;"
env:
PGPASSWORD: affine
- name: Generate prisma client
run: |
yarn workspace @affine/server exec prisma generate
yarn workspace @affine/server prisma db push
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Run init-db script
run: yarn workspace @affine/server exec ts-node ./scripts/init-db.ts
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Download storage.node
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./packages/backend/server
- name: Build Plugins
run: yarn run build:plugins
- name: Build Desktop Layers
run: yarn workspace @affine/electron build:dev
- name: Run playwright tests
run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" yarn workspace @affine-test/affine-desktop-cloud e2e
env:
COVERAGE: true
DEV_SERVER_URL: http://localhost:8080
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
ENABLE_LOCAL_EMAIL: true
- name: Collect code coverage report
run: yarn exec nyc report -t .nyc_output --report-dir .coverage --reporter=lcov
- name: Upload e2e test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/lcov.info
flags: server-e2etest
name: affine
fail_ci_if_error: false
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-server
path: ./tests/affine-cloud/test-results
if-no-files-found: ignore

View File

@@ -1,599 +0,0 @@
name: Build & Test
on:
push:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
pull_request:
env:
DEBUG: napi:*
BUILD_TYPE: canary
APP_NAME: affine
AFFINE_ENV: dev
COVERAGE: true
MACOSX_DEPLOYMENT_TARGET: '10.13'
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
PLAYWRIGHT_BROWSERS_PATH: ${{ github.workspace }}/node_modules/.cache/ms-playwright
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: ['javascript', 'typescript']
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
lint:
name: Lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Run oxlint
# oxlint is fast, so wrong code will fail quickly
run: yarn dlx $(node -e "console.log(require('./package.json').scripts['lint:ox'])")
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
full-cache: true
- name: Run i18n codegen
run: yarn i18n-codegen gen
- name: Run ESLint
run: yarn lint:eslint --max-warnings=0
- name: Run Prettier
# Set nmMode in `actions/setup-node` will modify the .yarnrc.yml
run: |
git checkout .yarnrc.yml
yarn lint:prettier
- name: Run Type Check
run: yarn typecheck
check-yarn-binary:
name: Check yarn binary
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Run check
run: |
yarn set version $(node -e "console.log(require('./package.json').packageManager.split('@')[1])")
git diff --exit-code
e2e-plugin-test:
name: E2E Plugin Test
runs-on: ubuntu-latest
env:
DISTRIBUTION: browser
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
full-cache: true
- name: Run playwright tests
run: yarn e2e --forbid-only
working-directory: tests/affine-plugin
env:
COVERAGE: true
- name: Collect code coverage report
run: yarn exec nyc report -t .nyc_output --report-dir .coverage --reporter=lcov
- name: Upload e2e test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/lcov.info
flags: e2e-plugin-test
name: affine
fail_ci_if_error: false
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-plugin
path: ./test-results
if-no-files-found: ignore
e2e-test:
name: E2E Test
runs-on: ubuntu-latest
env:
DISTRIBUTION: browser
strategy:
fail-fast: false
matrix:
shard: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
full-cache: true
- name: Run playwright tests
run: yarn workspace @affine-test/affine-local e2e --forbid-only --shard=${{ matrix.shard }}/${{ strategy.job-total }}
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-${{ matrix.shard }}
path: ./test-results
if-no-files-found: ignore
e2e-migration-test:
name: E2E Migration Test
runs-on: ubuntu-latest
env:
DISTRIBUTION: browser
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
full-cache: true
- name: Run playwright tests
run: yarn workspace @affine-test/affine-migration e2e --forbid-only
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-migration
path: ./tests/affine-migration/test-results
if-no-files-found: ignore
unit-test:
name: Unit Test
runs-on: ubuntu-latest
needs:
- build-native
env:
DISTRIBUTION: browser
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
full-cache: true
- name: Download affine.linux-x64-gnu.node
uses: actions/download-artifact@v3
with:
name: affine.linux-x64-gnu.node
path: ./packages/frontend/native
- name: Unit Test
run: yarn nx test:coverage @affine/monorepo
- name: Upload unit test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/store/lcov.info
flags: unittest
name: affine
fail_ci_if_error: false
build-native:
name: Build AFFiNE native (${{ matrix.spec.target }})
runs-on: ${{ matrix.spec.os }}
env:
CARGO_PROFILE_RELEASE_DEBUG: '1'
strategy:
fail-fast: false
matrix:
spec:
- { os: ubuntu-latest, target: x86_64-unknown-linux-gnu }
- { os: windows-latest, target: x86_64-pc-windows-msvc }
- { os: macos-latest, target: x86_64-apple-darwin }
- { os: macos-latest, target: aarch64-apple-darwin }
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
extra-flags: workspaces focus @affine/native
electron-install: false
build-infra: false
build-plugins: false
- name: Setup filename
id: filename
shell: bash
run: |
export PLATFORM_ARCH_ABI=$(node -e "console.log(require('@napi-rs/cli').parseTriple('${{ matrix.spec.target }}').platformArchABI)")
echo "filename=affine.$PLATFORM_ARCH_ABI.node" >> "$GITHUB_OUTPUT"
- name: Build AFFiNE native
uses: ./.github/actions/build-rust
with:
target: ${{ matrix.spec.target }}
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload ${{ steps.filename.outputs.filename }}
uses: actions/upload-artifact@v3
with:
name: ${{ steps.filename.outputs.filename }}
path: ./packages/frontend/native/${{ steps.filename.outputs.filename }}
if-no-files-found: error
build-storage:
name: Build Storage
runs-on: ubuntu-latest
env:
CARGO_PROFILE_RELEASE_DEBUG: '1'
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
extra-flags: workspaces focus @affine/storage
electron-install: false
build-infra: false
build-plugins: false
- name: Build Rust
uses: ./.github/actions/build-rust
with:
target: 'x86_64-unknown-linux-gnu'
package: '@affine/storage'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload storage.node
uses: actions/upload-artifact@v3
with:
name: storage.node
path: ./packages/backend/storage/storage.node
if-no-files-found: error
build-core:
name: Build @affine/core
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
build-plugins: false
full-cache: true
- name: Build Core
# always skip cache because its fast, and cache configuration is always changing
run: yarn nx build @affine/core --skip-nx-cache
- name: zip core
run: tar -czf dist.tar.gz --directory=packages/frontend/core/dist .
- name: Upload core artifact
uses: actions/upload-artifact@v3
with:
name: core
path: dist.tar.gz
if-no-files-found: error
server-test:
name: Server Test
runs-on: ubuntu-latest
needs: build-storage
env:
DISTRIBUTION: browser
services:
postgres:
image: postgres
env:
POSTGRES_PASSWORD: affine
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
mailer:
image: mailhog/mailhog
ports:
- 1025:1025
- 8025:8025
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
full-cache: true
- name: Initialize database
run: |
psql -h localhost -U postgres -c "CREATE DATABASE affine;"
psql -h localhost -U postgres -c "CREATE USER affine WITH PASSWORD 'affine';"
psql -h localhost -U postgres -c "ALTER USER affine WITH SUPERUSER;"
env:
PGPASSWORD: affine
- name: Generate prisma client
run: |
yarn workspace @affine/server exec prisma generate
yarn workspace @affine/server exec prisma db push
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Run init-db script
run: yarn workspace @affine/server exec node --loader ts-node/esm/transpile-only ./scripts/init-db.ts
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Download storage.node
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./packages/backend/server
- name: Run server tests
run: yarn workspace @affine/server test:coverage
env:
CARGO_TARGET_DIR: '${{ github.workspace }}/target'
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Upload server test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./packages/backend/server/.coverage/lcov.info
flags: server-test
name: affine
fail_ci_if_error: false
server-e2e-test:
name: ${{ matrix.tests.name }}
runs-on: ubuntu-latest
env:
DISTRIBUTION: browser
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
strategy:
fail-fast: false
matrix:
tests:
- name: 'Server E2E Test 1/3'
script: yarn workspace @affine-test/affine-cloud e2e --forbid-only --shard=1/3
- name: 'Server E2E Test 2/3'
script: yarn workspace @affine-test/affine-cloud e2e --forbid-only --shard=2/3
- name: 'Server E2E Test 3/3'
script: yarn workspace @affine-test/affine-cloud e2e --forbid-only --shard=3/3
- name: 'Server Desktop E2E Test'
script: |
yarn workspace @affine/electron build:dev
xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn workspace @affine-test/affine-desktop-cloud e2e
needs:
- build-storage
- build-native
services:
postgres:
image: postgres
env:
POSTGRES_PASSWORD: affine
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
mailer:
image: mailhog/mailhog
ports:
- 1025:1025
- 8025:8025
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
hard-link-nm: false
- name: Initialize database
run: |
psql -h localhost -U postgres -c "CREATE DATABASE affine;"
psql -h localhost -U postgres -c "CREATE USER affine WITH PASSWORD 'affine';"
psql -h localhost -U postgres -c "ALTER USER affine WITH SUPERUSER;"
env:
PGPASSWORD: affine
- name: Generate prisma client
run: |
yarn workspace @affine/server exec prisma generate
yarn workspace @affine/server exec prisma db push
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Run init-db script
run: yarn workspace @affine/server exec node --loader ts-node/esm/transpile-only ./scripts/init-db.ts
- name: Download storage.node
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./packages/backend/server
- name: Download affine.linux-x64-gnu.node
uses: actions/download-artifact@v3
with:
name: affine.linux-x64-gnu.node
path: ./packages/frontend/native
- name: ${{ matrix.tests.name }}
run: |
${{ matrix.tests.script }}
env:
DEV_SERVER_URL: http://localhost:8080
ENABLE_LOCAL_EMAIL: true
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-server
path: ./tests/affine-cloud/test-results
if-no-files-found: ignore
desktop-test:
name: Desktop Test (${{ matrix.spec.os }}, ${{ matrix.spec.platform }}, ${{ matrix.spec.arch }}, ${{ matrix.spec.target }}, ${{ matrix.spec.test }})
runs-on: ${{ matrix.spec.os }}
strategy:
fail-fast: false
# all combinations: macos-latest x64, macos-latest arm64, windows-latest x64, ubuntu-latest x64
matrix:
spec:
- {
os: macos-latest,
platform: macos,
arch: x64,
target: x86_64-apple-darwin,
test: true,
}
- {
os: macos-latest,
platform: macos,
arch: arm64,
target: aarch64-apple-darwin,
test: false,
}
- {
os: ubuntu-latest,
platform: linux,
arch: x64,
target: x86_64-unknown-linux-gnu,
test: true,
}
- {
os: windows-latest,
platform: windows,
arch: x64,
target: x86_64-pc-windows-msvc,
test: true,
}
needs:
- build-core
- build-native
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
timeout-minutes: 10
with:
extra-flags: workspaces focus @affine/electron @affine/monorepo @affine-test/affine-desktop
playwright-install: true
hard-link-nm: false
enableScripts: false
- name: Setup filename
id: filename
shell: bash
run: |
export PLATFORM_ARCH_ABI=$(node -e "console.log(require('@napi-rs/cli').parseTriple('${{ matrix.spec.target }}').platformArchABI)")
echo "filename=affine.$PLATFORM_ARCH_ABI.node" >> "$GITHUB_OUTPUT"
- name: Download ${{ steps.filename.outputs.filename }}
uses: actions/download-artifact@v3
with:
name: ${{ steps.filename.outputs.filename }}
path: ./packages/frontend/native
- name: Run unit tests
if: ${{ matrix.spec.test }}
shell: bash
run: yarn vitest
working-directory: packages/frontend/electron
- name: Download core artifact
uses: ./.github/actions/download-core
with:
path: packages/frontend/electron/resources/web-static
- name: Build Desktop Layers
run: yarn workspace @affine/electron build
- name: Run desktop tests
if: ${{ matrix.spec.test && matrix.spec.os == 'ubuntu-latest' }}
run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn workspace @affine-test/affine-desktop e2e
- name: Run desktop tests
if: ${{ matrix.spec.test && matrix.spec.os != 'ubuntu-latest' }}
run: yarn workspace @affine-test/affine-desktop e2e
- name: Make bundle
if: ${{ matrix.spec.os == 'macos-latest' && matrix.spec.arch == 'arm64' }}
env:
SKIP_BUNDLE: true
SKIP_WEB_BUILD: true
HOIST_NODE_MODULES: 1
run: yarn workspace @affine/electron package --platform=darwin --arch=arm64
- name: Output check
if: ${{ matrix.spec.os == 'macos-latest' && matrix.spec.arch == 'arm64' }}
run: |
yarn workspace @affine/electron exec node --loader ts-node/esm/transpile-only ./scripts/macos-arm64-output-check.ts
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-${{ matrix.spec.os }}-${{ matrix.spec.arch }}
path: ./test-results
if-no-files-found: ignore

201
.github/workflows/build.yml vendored Normal file
View File

@@ -0,0 +1,201 @@
name: Build & Test
on:
push:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/build.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
pull_request:
merge_group:
branches:
- canary
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/build.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
env:
DEBUG: napi:*
BUILD_TYPE: canary
APP_NAME: affine
AFFINE_ENV: dev
COVERAGE: true
DISTRIBUTION: browser
MACOSX_DEPLOYMENT_TARGET: '10.13'
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
lint:
name: Lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Run oxlint
# oxlint is fast, so wrong code will fail quickly
run: yarn dlx oxlint@latest .
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- name: Run i18n codegen
run: yarn i18n-codegen gen
- name: Run ESLint
run: yarn lint:eslint --max-warnings=0
- name: Run Prettier
# Set nmMode in `actions/setup-node` will modify the .yarnrc.yml
run: |
git checkout .yarnrc.yml
yarn lint:prettier
- name: Run circular
run: yarn circular
- name: Run Type Check
run: yarn typecheck
check-yarn-binary:
name: Check yarn binary
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Run check
run: |
yarn set version $(node -e "console.log(require('./package.json').packageManager.split('@')[1])")
git diff --exit-code
e2e-plugin-test:
name: E2E Plugin Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
- name: Run playwright tests
run: yarn e2e --forbid-only
working-directory: tests/affine-plugin
env:
COVERAGE: true
- name: Collect code coverage report
run: yarn exec nyc report -t .nyc_output --report-dir .coverage --reporter=lcov
- name: Upload e2e test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/lcov.info
flags: e2e-plugin-test
name: affine
fail_ci_if_error: false
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-plugin
path: ./test-results
if-no-files-found: ignore
e2e-test:
name: E2E Test
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
shard: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
- name: Run playwright tests
run: yarn e2e --forbid-only --shard=${{ matrix.shard }}/${{ strategy.job-total }}
working-directory: tests/affine-local
env:
COVERAGE: true
- name: Collect code coverage report
run: yarn exec nyc report -t .nyc_output --report-dir .coverage --reporter=lcov
- name: Upload e2e test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/lcov.info
flags: e2etest
name: affine
fail_ci_if_error: false
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-${{ matrix.shard }}
path: ./test-results
if-no-files-found: ignore
e2e-migration-test:
name: E2E Migration Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
- name: Run playwright tests
run: yarn workspace @affine-test/affine-migration e2e --forbid-only
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-migration
path: ./tests/affine-migration/test-results
if-no-files-found: ignore
unit-test:
name: Unit Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- name: Build AFFiNE native
uses: ./.github/actions/build-rust
with:
target: x86_64-unknown-linux-gnu
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Unit Test
run: yarn nx test:coverage @affine/monorepo
- name: Upload unit test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/store/lcov.info
flags: unittest
name: affine
fail_ci_if_error: false

36
.github/workflows/cache-cleanup.yml vendored Normal file
View File

@@ -0,0 +1,36 @@
# https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#force-deleting-cache-entries
name: Cleanup caches for closed branches
on:
pull_request:
types:
- closed
workflow_dispatch:
jobs:
cleanup:
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@v4
- name: Cleanup
run: |
gh extension install actions/gh-actions-cache
REPO=${{ github.repository }}
BRANCH="refs/pull/${{ github.event.pull_request.number }}/merge"
echo "Fetching list of cache key"
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH | cut -f 1 )
## Setting this to not fail the workflow while deleting cache keys.
set +e
echo "Deleting caches..."
for cacheKey in $cacheKeysForPR
do
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
done
echo "Done"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

18
.github/workflows/cancel.yml vendored Normal file
View File

@@ -0,0 +1,18 @@
name: Cancel
on:
pull_request_target:
types:
- edited
- synchronize
jobs:
cancel:
name: 'Cancel Previous Runs'
runs-on: ubuntu-latest
timeout-minutes: 2
steps:
- uses: styfle/cancel-workflow-action@0.12.0
with:
# See https://api.github.com/repos/toeverything/AFFiNE/actions/workflows
workflow_id: 44038251, 61883931, 65188160, 66789140
access_token: ${{ github.token }}

70
.github/workflows/codeql.yml vendored Normal file
View File

@@ -0,0 +1,70 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: 'CodeQL'
on:
push:
branches: [canary]
pull_request:
merge_group:
# The branches below must be a subset of the branches above
branches: [canary]
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: ['javascript']
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2

View File

@@ -9,6 +9,7 @@ on:
default: canary
env:
BUILD_TYPE: canary
APP_NAME: affine
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
@@ -16,6 +17,7 @@ jobs:
build-server:
name: Build Server
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.flavor }}
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
@@ -41,12 +43,12 @@ jobs:
- name: Build Plugins
run: yarn run build:plugins
- name: Build Core
run: yarn nx build @affine/core --skip-nx-cache
run: yarn nx build @affine/core
env:
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
BUILD_TYPE: ${{ github.event.inputs.flavor }}
BUILD_TYPE_OVERRIDE: ${{ github.event.inputs.flavor }}
SHOULD_REPORT_TRACE: true
TRACE_REPORT_ENDPOINT: ${{ secrets.TRACE_REPORT_ENDPOINT }}
CAPTCHA_SITE_KEY: ${{ secrets.CAPTCHA_SITE_KEY }}
@@ -69,7 +71,7 @@ jobs:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Rust
- name: Setup Rust
uses: ./.github/actions/build-rust
with:
target: 'x86_64-unknown-linux-gnu'
@@ -90,7 +92,7 @@ jobs:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Rust
- name: Setup Rust
uses: ./.github/actions/build-rust
with:
target: 'aarch64-unknown-linux-gnu'
@@ -207,13 +209,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: setup deploy version
id: version
run: |
export APP_VERSION=`node -e "console.log(require('./package.json').version)"`
echo $APP_VERSION
echo "APP_VERSION=$APP_VERSION" >> "$GITHUB_OUTPUT"
- name: Deploy to ${{ github.event.inputs.flavor }}
- name: Deploy to dev
uses: ./.github/actions/deploy
with:
build-type: ${{ github.event.inputs.flavor }}
@@ -223,7 +219,6 @@ jobs:
cluster-name: ${{ secrets.GCP_CLUSTER_NAME }}
cluster-location: ${{ secrets.GCP_CLUSTER_LOCATION }}
env:
APP_VERSION: ${{ steps.version.outputs.APP_VERSION }}
DEPLOY_HOST: ${{ secrets.DEPLOY_HOST }}
CANARY_DEPLOY_HOST: ${{ secrets.CANARY_DEPLOY_HOST }}
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}

View File

@@ -65,15 +65,14 @@ jobs:
- name: Replace Version
run: ./scripts/set-version.sh ${{ needs.set-build-version.outputs.version }}
- name: generate-assets
run: yarn workspace @affine/electron generate-assets
working-directory: packages/frontend/electron
run: yarn generate-assets
env:
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
RELEASE_VERSION: ${{ needs.set-build-version.outputs.version }}
SKIP_PLUGIN_BUILD: 'true'
SKIP_NX_CACHE: 'true'
- name: Upload core artifact
uses: actions/upload-artifact@v3
@@ -231,11 +230,6 @@ jobs:
node ./packages/frontend/electron/scripts/generate-yml.js
env:
RELEASE_VERSION: ${{ needs.set-build-version.outputs.version }}
- name: Generate SHA512 checksums
run: |
sha512sum *-linux-* > SHA512SUMS.txt
sha512sum *-macos-* >> SHA512SUMS.txt
sha512sum *-windows-* >> SHA512SUMS.txt
- name: Create Release Draft
uses: softprops/action-gh-release@v1
env:
@@ -246,7 +240,6 @@ jobs:
tag_name: ${{ needs.set-build-version.outputs.version }}
prerelease: true
files: |
./SHA512SUMS.txt
./VERSION
./*.zip
./*.dmg

View File

@@ -19,10 +19,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
uses: ./.github/actions/setup-node
with:
cache: 'yarn'
node-version-file: '.nvmrc'
- name: Install dependencies
run: yarn workspaces focus @affine/commitlint-config
- run: echo "${{ github.event.pull_request.title }}" | yarn workspace @affine/commitlint-config commitlint -g ./.commitlintrc.json
electron-install: false
- run: echo "${{ github.event.pull_request.title }}" | yarn dlx commitlint -g ./.commitlintrc.json

View File

@@ -69,7 +69,6 @@ jobs:
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
RELEASE_VERSION: ${{ github.event.inputs.version || steps.get-canary-version.outputs.RELEASE_VERSION }}
SKIP_PLUGIN_BUILD: 'true'
SKIP_NX_CACHE: 'true'
- name: Upload core artifact
uses: actions/upload-artifact@v3

165
.github/workflows/release.yml vendored Normal file
View File

@@ -0,0 +1,165 @@
name: Release
on:
push:
branches:
- canary
env:
BUILD_TYPE: stable
APP_NAME: affine
COVERAGE: false
DISTRIBUTION: browser
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
release:
name: Try publishing npm@latest release
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Try publishing to NPM
run: ./scripts/publish.sh
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
build-core:
name: Build @affine/core
runs-on: ubuntu-latest
environment: development
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Plugins
run: yarn run build:plugins
- name: Build Core
run: yarn nx build @affine/core
- name: Upload core artifact
uses: actions/upload-artifact@v3
with:
name: core
path: ./packages/frontend/core/dist
if-no-files-found: error
build-server:
name: Build Server
runs-on: ubuntu-latest
environment: development
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- name: Build Server
run: yarn nx build @affine/server
- name: Upload server dist
uses: actions/upload-artifact@v3
with:
name: server-dist
path: ./packages/backend/server/dist
if-no-files-found: error
build-storage:
name: Build Storage
runs-on: ubuntu-latest
env:
RUSTFLAGS: '-C debuginfo=1'
environment: development
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Setup Rust
uses: ./.github/actions/build-rust
with:
target: 'x86_64-unknown-linux-gnu'
package: '@affine/storage'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload storage.node
uses: actions/upload-artifact@v3
with:
name: storage.node
path: ./packages/backend/storage/storage.node
if-no-files-found: error
build-docker:
if: github.ref == 'refs/heads/canary'
name: Build Docker
runs-on: ubuntu-latest
needs:
- build-server
- build-core
- build-storage
steps:
- uses: actions/checkout@v4
- name: Download core artifact
uses: actions/download-artifact@v3
with:
name: core
path: ./packages/frontend/core/dist
- name: Download server dist
uses: actions/download-artifact@v3
with:
name: server-dist
path: ./packages/backend/server/dist
- name: Download storage.node
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./packages/backend/server
- name: Setup Git short hash
run: |
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
logout: false
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build front Dockerfile
uses: docker/build-push-action@v5
with:
context: .
push: true
pull: true
platforms: linux/amd64,linux/arm64
provenance: true
file: .github/deployment/front/Dockerfile
tags: ghcr.io/toeverything/affine-front:${{ env.GIT_SHORT_HASH }},ghcr.io/toeverything/affine-front:latest
# setup node without cache configuration
# Prisma cache is not compatible with docker build cache
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
registry-url: https://npm.pkg.github.com
scope: '@toeverything'
- name: Install Node.js dependencies
run: yarn workspaces focus @affine/server --production
- name: Generate Prisma client
run: yarn workspace @affine/server prisma generate
- name: Build graphql Dockerfile
uses: docker/build-push-action@v5
with:
context: .
push: true
pull: true
platforms: linux/amd64,linux/arm64
provenance: true
file: .github/deployment/node/Dockerfile
tags: ghcr.io/toeverything/affine-graphql:${{ env.GIT_SHORT_HASH }},ghcr.io/toeverything/affine-graphql:latest

View File

@@ -1,4 +1,23 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
yarn lint-staged && yarn lint:ox
# check lockfile is up to date
yarn install --mode=skip-build --inline-builds --immutable
# build infra code
yarn -T run build:infra
# generate prisma client type
yarn workspace @affine/server prisma generate
# generate i18n
yarn i18n-codegen gen
# lint staged files
yarn exec lint-staged
# type check
yarn typecheck
# circular dependency check
yarn circular

View File

@@ -15,7 +15,6 @@ packages/backend/server/src/schema.gql
packages/frontend/i18n/src/i18n-generated.ts
packages/frontend/graphql/src/graphql/index.ts
tests/affine-legacy/**/static
.yarnrc.yml
# auto-generated by NAPI-RS
# fixme(@joooye34): need script to check and generate ignore list here

File diff suppressed because one or more lines are too long

View File

@@ -6,10 +6,10 @@ nmMode: hardlinks-local
nodeLinker: node-modules
npmAuthToken: "${NPM_TOKEN:-NONE}"
npmAuthToken: '${NPM_TOKEN:-NONE}'
npmPublishAccess: public
npmPublishRegistry: "https://registry.npmjs.org"
npmPublishRegistry: 'https://registry.npmjs.org'
yarnPath: .yarn/releases/yarn-4.0.2.cjs
yarnPath: .yarn/releases/yarn-4.0.1.cjs

144
Cargo.lock generated
View File

@@ -83,15 +83,14 @@ dependencies = [
[[package]]
name = "ahash"
version = "0.8.6"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a"
checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f"
dependencies = [
"cfg-if",
"getrandom",
"once_cell",
"version_check",
"zerocopy",
]
[[package]]
@@ -241,16 +240,6 @@ dependencies = [
"num-traits",
]
[[package]]
name = "atomic-write-file"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c232177ba50b16fe7a4588495bd474a62a9e45a8e4ca6fd7d0b7ac29d164631e"
dependencies = [
"nix",
"rand",
]
[[package]]
name = "autocfg"
version = "1.1.0"
@@ -952,7 +941,7 @@ version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e"
dependencies = [
"ahash 0.8.6",
"ahash 0.8.3",
]
[[package]]
@@ -961,7 +950,7 @@ version = "0.14.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156"
dependencies = [
"ahash 0.8.6",
"ahash 0.8.3",
"allocator-api2",
]
@@ -1123,7 +1112,7 @@ dependencies = [
[[package]]
name = "jwst-codec"
version = "0.1.0"
source = "git+https://github.com/toeverything/OctoBase.git?rev=49a6b7a#49a6b7af25ce1fe54e8383e10980e9536821d286"
source = "git+https://github.com/toeverything/OctoBase.git?rev=aad9e5b#aad9e5b7e9d6f479e6cf7555f5845bbbaaadbc66"
dependencies = [
"arbitrary",
"bitvec",
@@ -1144,7 +1133,7 @@ dependencies = [
[[package]]
name = "jwst-core"
version = "0.1.0"
source = "git+https://github.com/toeverything/OctoBase.git?rev=49a6b7a#49a6b7af25ce1fe54e8383e10980e9536821d286"
source = "git+https://github.com/toeverything/OctoBase.git?rev=aad9e5b#aad9e5b7e9d6f479e6cf7555f5845bbbaaadbc66"
dependencies = [
"async-trait",
"base64",
@@ -1162,7 +1151,7 @@ dependencies = [
[[package]]
name = "jwst-logger"
version = "0.1.0"
source = "git+https://github.com/toeverything/OctoBase.git?rev=49a6b7a#49a6b7af25ce1fe54e8383e10980e9536821d286"
source = "git+https://github.com/toeverything/OctoBase.git?rev=aad9e5b#aad9e5b7e9d6f479e6cf7555f5845bbbaaadbc66"
dependencies = [
"chrono",
"nu-ansi-term 0.49.0",
@@ -1175,7 +1164,7 @@ dependencies = [
[[package]]
name = "jwst-storage"
version = "0.1.0"
source = "git+https://github.com/toeverything/OctoBase.git?rev=49a6b7a#49a6b7af25ce1fe54e8383e10980e9536821d286"
source = "git+https://github.com/toeverything/OctoBase.git?rev=aad9e5b#aad9e5b7e9d6f479e6cf7555f5845bbbaaadbc66"
dependencies = [
"anyhow",
"async-trait",
@@ -1200,7 +1189,7 @@ dependencies = [
[[package]]
name = "jwst-storage-migration"
version = "0.1.0"
source = "git+https://github.com/toeverything/OctoBase.git?rev=49a6b7a#49a6b7af25ce1fe54e8383e10980e9536821d286"
source = "git+https://github.com/toeverything/OctoBase.git?rev=aad9e5b#aad9e5b7e9d6f479e6cf7555f5845bbbaaadbc66"
dependencies = [
"sea-orm-migration",
"tokio",
@@ -1268,9 +1257,9 @@ checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058"
[[package]]
name = "libsqlite3-sys"
version = "0.27.0"
version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf4e226dcd58b4be396f7bd3c20da8fdee2911400705297ba7d2d7cc2c30f716"
checksum = "afc22eff61b133b115c6e8c74e818c628d6d5e7a502afea6f64dee076dd94326"
dependencies = [
"cc",
"pkg-config",
@@ -1348,15 +1337,6 @@ version = "2.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
[[package]]
name = "memoffset"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4"
dependencies = [
"autocfg",
]
[[package]]
name = "minimal-lexical"
version = "0.2.1"
@@ -1395,9 +1375,9 @@ dependencies = [
[[package]]
name = "napi"
version = "2.14.1"
version = "2.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1133249c46e92da921bafc8aba4912bf84d6c475f7625183772ed2d0844dc3a7"
checksum = "f9d90182620f32fe34b6ac9b52cba898af26e94c7f5abc01eb4094c417ae2e6c"
dependencies = [
"anyhow",
"bitflags 2.4.1",
@@ -1419,9 +1399,9 @@ checksum = "d4b4532cf86bfef556348ac65e561e3123879f0e7566cca6d43a6ff5326f13df"
[[package]]
name = "napi-derive"
version = "2.14.2"
version = "2.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0cca5738c6e81eb5ffd2c8ff2b4f05ece9c4c60c7e2b36cec6524492cf7f330"
checksum = "3619fa472d23cd5af94d63a2bae454a77a8863251f40230fbf59ce20eafa8a86"
dependencies = [
"cfg-if",
"convert_case",
@@ -1433,9 +1413,9 @@ dependencies = [
[[package]]
name = "napi-derive-backend"
version = "1.0.55"
version = "1.0.54"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35960e5f33228192a9b661447d0dfe8f5a3790ff5b4058c4d67680ded4f65b91"
checksum = "ecd3ea4b54020c73d591a49cd192f6334c5f37f71a63ead54dbc851fa991ef00"
dependencies = [
"convert_case",
"once_cell",
@@ -1455,19 +1435,6 @@ dependencies = [
"libloading",
]
[[package]]
name = "nix"
version = "0.26.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b"
dependencies = [
"bitflags 1.3.2",
"cfg-if",
"libc",
"memoffset",
"pin-utils",
]
[[package]]
name = "no-std-compat"
version = "0.4.1"
@@ -2325,18 +2292,18 @@ checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090"
[[package]]
name = "serde"
version = "1.0.193"
version = "1.0.192"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89"
checksum = "bca2a08484b285dcb282d0f67b26cadc0df8b19f8c12502c13d966bf9482f001"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.193"
version = "1.0.192"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3"
checksum = "d6c7207fbec9faa48073f3e3074cbe553af6ea512d7c21ba46e434e70ea9fbc1"
dependencies = [
"proc-macro2",
"quote",
@@ -2483,9 +2450,9 @@ dependencies = [
[[package]]
name = "sqlx"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dba03c279da73694ef99763320dea58b51095dfe87d001b1d4b5fe78ba8763cf"
checksum = "0e50c216e3624ec8e7ecd14c6a6a6370aad6ee5d8cfc3ab30b5162eeeef2ed33"
dependencies = [
"sqlx-core",
"sqlx-macros",
@@ -2496,11 +2463,11 @@ dependencies = [
[[package]]
name = "sqlx-core"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d84b0a3c3739e220d94b3239fd69fb1f74bc36e16643423bd99de3b43c21bfbd"
checksum = "8d6753e460c998bbd4cd8c6f0ed9a64346fcca0723d6e75e52fdc351c5d2169d"
dependencies = [
"ahash 0.8.6",
"ahash 0.8.3",
"atoi",
"bigdecimal",
"byteorder",
@@ -2544,9 +2511,9 @@ dependencies = [
[[package]]
name = "sqlx-macros"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89961c00dc4d7dffb7aee214964b065072bff69e36ddb9e2c107541f75e4f2a5"
checksum = "9a793bb3ba331ec8359c1853bd39eed32cdd7baaf22c35ccf5c92a7e8d1189ec"
dependencies = [
"proc-macro2",
"quote",
@@ -2557,11 +2524,10 @@ dependencies = [
[[package]]
name = "sqlx-macros-core"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0bd4519486723648186a08785143599760f7cc81c52334a55d6a83ea1e20841"
checksum = "0a4ee1e104e00dedb6aa5ffdd1343107b0a4702e862a84320ee7cc74782d96fc"
dependencies = [
"atomic-write-file",
"dotenvy",
"either",
"heck",
@@ -2584,9 +2550,9 @@ dependencies = [
[[package]]
name = "sqlx-mysql"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e37195395df71fd068f6e2082247891bc11e3289624bbc776a0cdfa1ca7f1ea4"
checksum = "864b869fdf56263f4c95c45483191ea0af340f9f3e3e7b4d57a61c7c87a970db"
dependencies = [
"atoi",
"base64",
@@ -2631,9 +2597,9 @@ dependencies = [
[[package]]
name = "sqlx-postgres"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6ac0ac3b7ccd10cc96c7ab29791a7dd236bd94021f31eec7ba3d46a74aa1c24"
checksum = "eb7ae0e6a97fb3ba33b23ac2671a5ce6e3cabe003f451abd5a56e7951d975624"
dependencies = [
"atoi",
"base64",
@@ -2676,9 +2642,9 @@ dependencies = [
[[package]]
name = "sqlx-sqlite"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "210976b7d948c7ba9fced8ca835b11cbb2d677c59c79de41ac0d397e14547490"
checksum = "d59dc83cf45d89c555a577694534fcd1b55c545a816c816ce51f20bbe56a4f3f"
dependencies = [
"atoi",
"chrono",
@@ -2696,7 +2662,6 @@ dependencies = [
"time",
"tracing",
"url",
"urlencoding",
"uuid",
]
@@ -3059,12 +3024,6 @@ dependencies = [
"percent-encoding",
]
[[package]]
name = "urlencoding"
version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
[[package]]
name = "utf8parse"
version = "0.2.1"
@@ -3073,9 +3032,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]]
name = "uuid"
version = "1.6.1"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e395fcf16a7a3d8127ec99782007af141946b4795001f876d54fb0d55978560"
checksum = "c58fe91d841bc04822c9801002db4ea904b9e4b8e6bbad25127b46eff8dc516b"
dependencies = [
"getrandom",
"rand",
@@ -3182,9 +3141,12 @@ dependencies = [
[[package]]
name = "webpki-roots"
version = "0.25.3"
version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1778a42e8b3b90bff8d0f5032bf22250792889a5cdc752aa0020c84abe3aaf10"
checksum = "b291546d5d9d1eab74f069c77749f2cb8504a12caa20f0f2de93ddbf6f411888"
dependencies = [
"rustls-webpki",
]
[[package]]
name = "whoami"
@@ -3316,26 +3278,6 @@ dependencies = [
"tap",
]
[[package]]
name = "zerocopy"
version = "0.7.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e97e415490559a91254a2979b4829267a57d2fcd741a98eee8b722fb57289aa0"
dependencies = [
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.7.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd7e48ccf166952882ca8bd778a43502c64f33bf94c12ebe2a7f08e5a0f6689f"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.38",
]
[[package]]
name = "zeroize"
version = "1.6.0"

View File

@@ -107,11 +107,12 @@ If you have questions, you are welcome to contact us. One of the best places to
## Ecosystem
| Name | | |
| -------------------------------------------------------- | ---------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------- |
| [@affine/component](packages/frontend/component) | AFFiNE Component Resources | [![](https://img.shields.io/codecov/c/github/toeverything/affine?style=flat-square)](https://affine-storybook.vercel.app/) |
| [@toeverything/y-indexeddb](packages/common/y-indexeddb) | IndexedDB database adapter for Yjs | [![](https://img.shields.io/npm/dm/@toeverything/y-indexeddb?style=flat-square&color=eee)](https://www.npmjs.com/package/@toeverything/y-indexeddb) |
| [@toeverything/theme](packages/common/theme) | AFFiNE theme | [![](https://img.shields.io/npm/dm/@toeverything/theme?style=flat-square&color=eee)](https://www.npmjs.com/package/@toeverything/theme) |
| Name | | |
| ----------------------------------------------------------------------------------------------- | --------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------- |
| [@toeverything/component](https://github.com/toeverything/design/tree/main/packages/components) | Toeverything Shared Component Resources | |
| [@affine/component](packages/frontend/component) | AFFiNE Component Resources | [![](https://img.shields.io/codecov/c/github/toeverything/affine?style=flat-square)](https://affine-storybook.vercel.app/) |
| [@toeverything/y-indexeddb](packages/common/y-indexeddb) | IndexedDB database adapter for Yjs | [![](https://img.shields.io/npm/dm/@toeverything/y-indexeddb?style=flat-square&color=eee)](https://www.npmjs.com/package/@toeverything/y-indexeddb) |
| [@toeverything/theme](packages/common/theme) | AFFiNE theme | [![](https://img.shields.io/npm/dm/@toeverything/theme?style=flat-square&color=eee)](https://www.npmjs.com/package/@toeverything/theme) |
## Plugins
@@ -227,7 +228,7 @@ See [LICENSE] for details.
[jobs available]: ./docs/jobs.md
[latest packages]: https://github.com/toeverything/AFFiNE/pkgs/container/affine-self-hosted
[contributor license agreement]: https://github.com/toeverything/affine/edit/canary/.github/CLA.md
[rust-version-icon]: https://img.shields.io/badge/Rust-1.74.1-dea584
[rust-version-icon]: https://img.shields.io/badge/Rust-1.71.0-dea584
[stars-icon]: https://img.shields.io/github/stars/toeverything/AFFiNE.svg?style=flat&logo=github&colorB=red&label=stars
[codecov]: https://codecov.io/gh/toeverything/affine/branch/canary/graphs/badge.svg?branch=canary
[node-version-icon]: https://img.shields.io/badge/node-%3E=18.16.1-success

View File

@@ -1,28 +0,0 @@
# Issues Triaging
When we receive your issue, we will first triaging it. Triaging an issue usually takes around one business day but may take longer. Goal of triaging is to provide you with a clear understanding of what will happen to your issue. For example, after your feature request was triaged you know whether we plan to tackle the issue or whether we'll wait to hear what the broader community thinks about this request.
Here are issue states and their descriptions:
| State | Description |
| ------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| Untriaged | The team has not yet reviewed the issue. We usually do it within one business day. |
| As designed | The behavior described in the issue is intentional. If you find it seriously disruptive or if weve misunderstood you, please let us know in the issues comments section. |
| Blocked | We cant work on this issue until another one (linked) is resolved. |
| Cant Reproduce | We have been unable to reproduce the issue on our side. It could be flaky or fixed already, or we may not have had all the details we needed. If youre still experiencing the issue and have any further details, please share them. |
| Duplicate | The issue is the same (or has the same cause) as another one (linked). |
| Fixed | If the issue was a bug, its been fixed; if it was a missing feature, its been implemented. |
| Fixed In Branch | If the issue was a bug, its been fixed; if it was a missing feature, its been implemented; the changes are now in a separate branch and havent been merged into the default branch yet. |
| In Progress | Were currently working on the issue. |
| Incomplete | Unfortunately we dont have enough information to proceed. If youre willing to share any further details about the issue, please do so in the comments. |
| Obsolete | The part of the product that was causing this issue has been removed or significantly reworked since it was created. |
| Upvoting | We are currently evaluating demand for the issue and checking whether it requires complicated or risky changes. Please leave a vote or comment if you think it should be prioritized. |
| Open | We want to implement the fix or feature in the near future. We cant promise it will appear in the next public release, but its on our short list. |
| Shelved | We have reviewed the issue and decided that, even though it has merit, we cannot currently include it in our near-term plan. |
| Third Party Problem | The issue is caused by a third party. We've done our best to inform them about it. |
| To be Discussed | We need some time to discuss the issue. |
| To Reproduce | We will try to find the steps needed to reproduce the issue on our side. |
| Under Investigation | Weve triaged the issue, but now we need to investigate it more thoroughly. This may require processing additional information like logs or dumps. |
| Waiting for Info | Weve requested additional information from the person who created the issue and are waiting for them to get back to us. |
| Declined | Weve reviewed the suggestion and, while we appreciate its value, we unfortunately do not have the resources to implement it. |
| Answered | The issue actually turned out to be a question or a misunderstanding, and it has been answered or resolved. |

View File

@@ -1,6 +1,6 @@
{
"name": "@affine/monorepo",
"version": "0.10.4-beta.2",
"version": "0.10.3",
"private": true,
"author": "toeverything",
"license": "MIT",
@@ -33,12 +33,12 @@
"lint:eslint:fix": "yarn lint:eslint --fix",
"lint:prettier": "prettier --ignore-unknown --cache --check .",
"lint:prettier:fix": "prettier --ignore-unknown --cache --write .",
"lint:ox": "oxlint --deny-warnings --import-plugin -D correctness -D nursery -D prefer-array-some -D no-useless-promise-resolve-reject -A no-undef -A consistent-type-exports -A default -A named -A ban-ts-comment",
"lint": "yarn lint:eslint && yarn lint:prettier",
"lint:fix": "yarn lint:eslint:fix && yarn lint:prettier:fix",
"test": "vitest --run",
"test:ui": "vitest --ui",
"test:coverage": "vitest run --coverage",
"circular": "madge --circular --ts-config ./tsconfig.json ./packages/frontend/core/src/pages/**/*.tsx ./packages/frontend/core/src/index.tsx ./packages/frontend/electron/src/*/index.ts",
"typecheck": "tsc -b tsconfig.json --diagnostics",
"postinstall": "node ./scripts/check-version.mjs && yarn i18n-codegen gen && yarn husky install"
},
@@ -49,10 +49,8 @@
"eslint --cache --fix"
],
"*.toml": [
"prettier --ignore-unknown --write",
"taplo format"
],
"*.rs": [
"cargo fmt --"
]
},
"devDependencies": {
@@ -73,8 +71,8 @@
"@types/affine__env": "workspace:*",
"@types/eslint": "^8.44.7",
"@types/node": "^20.9.3",
"@typescript-eslint/eslint-plugin": "^6.13.1",
"@typescript-eslint/parser": "^6.13.1",
"@typescript-eslint/eslint-plugin": "^6.12.0",
"@typescript-eslint/parser": "^6.12.0",
"@vanilla-extract/vite-plugin": "^3.9.2",
"@vanilla-extract/webpack-plugin": "^2.3.1",
"@vitejs/plugin-react-swc": "^3.5.0",
@@ -84,6 +82,7 @@
"eslint": "^8.54.0",
"eslint-config-prettier": "^9.0.0",
"eslint-plugin-i": "^2.29.0",
"eslint-plugin-prettier": "^5.0.1",
"eslint-plugin-react": "^7.33.2",
"eslint-plugin-react-hooks": "^4.6.0",
"eslint-plugin-simple-import-sort": "^10.0.0",
@@ -95,29 +94,29 @@
"happy-dom": "^12.10.3",
"husky": "^8.0.3",
"lint-staged": "^15.1.0",
"madge": "^6.1.0",
"msw": "^2.0.8",
"nanoid": "^5.0.3",
"nx": "^17.1.3",
"nx-cloud": "^16.5.2",
"nyc": "^15.1.0",
"oxlint": "^0.0.18",
"prettier": "^3.1.0",
"semver": "^7.5.4",
"serve": "^14.2.1",
"string-width": "^7.0.0",
"ts-node": "^10.9.1",
"typescript": "^5.3.2",
"vite": "^5.0.6",
"vite": "^5.0.1",
"vite-plugin-istanbul": "^5.0.0",
"vite-plugin-static-copy": "^1.0.0",
"vite-plugin-static-copy": "^0.17.1",
"vite-tsconfig-paths": "^4.2.1",
"vitest": "0.34.6",
"vitest-fetch-mock": "^0.2.2",
"vitest-mock-extended": "^1.3.1"
},
"packageManager": "yarn@4.0.2",
"packageManager": "yarn@4.0.1",
"resolutions": {
"vite": "^5.0.6",
"vite": "^4.4.11",
"array-buffer-byte-length": "npm:@nolyfill/array-buffer-byte-length@latest",
"array-includes": "npm:@nolyfill/array-includes@latest",
"array.prototype.flat": "npm:@nolyfill/array.prototype.flat@latest",
@@ -174,7 +173,7 @@
"which-boxed-primitive": "npm:@nolyfill/which-boxed-primitive@latest",
"which-typed-array": "npm:@nolyfill/which-typed-array@latest",
"next-auth@^4.24.5": "patch:next-auth@npm%3A4.24.5#~/.yarn/patches/next-auth-npm-4.24.5-8428e11927.patch",
"@reforged/maker-appimage/@electron-forge/maker-base": "7.2.0",
"@reforged/maker-appimage/@electron-forge/maker-base": "7.1.0",
"macos-alias": "npm:macos-alias-building@latest",
"fs-xattr": "npm:@napi-rs/xattr@latest"
}

View File

@@ -1,2 +0,0 @@
-- AlterTable
ALTER TABLE "optimized_blobs" ADD COLUMN "deleted_at" TIMESTAMPTZ(6);

View File

@@ -1,7 +1,7 @@
{
"name": "@affine/server",
"private": true,
"version": "0.10.4-beta.2",
"version": "0.10.3",
"description": "Affine Node.js server",
"type": "module",
"bin": {
@@ -41,14 +41,13 @@
"@opentelemetry/core": "^1.18.1",
"@opentelemetry/exporter-prometheus": "^0.45.1",
"@opentelemetry/exporter-zipkin": "^1.18.1",
"@opentelemetry/host-metrics": "^0.34.0",
"@opentelemetry/host-metrics": "^0.33.2",
"@opentelemetry/instrumentation": "^0.45.1",
"@opentelemetry/instrumentation-graphql": "^0.36.0",
"@opentelemetry/instrumentation-http": "^0.45.1",
"@opentelemetry/instrumentation-ioredis": "^0.36.0",
"@opentelemetry/instrumentation-ioredis": "^0.35.3",
"@opentelemetry/instrumentation-nestjs-core": "^0.33.3",
"@opentelemetry/instrumentation-socket.io": "^0.34.3",
"@opentelemetry/resources": "^1.18.1",
"@opentelemetry/sdk-metrics": "^1.18.1",
"@opentelemetry/sdk-node": "^0.45.1",
"@opentelemetry/sdk-trace-node": "^1.18.1",
@@ -102,7 +101,7 @@
"@types/sinon": "^17.0.2",
"@types/supertest": "^2.0.16",
"@types/ws": "^8.5.10",
"ava": "^6.0.0",
"ava": "^5.3.1",
"c8": "^8.0.1",
"nodemon": "^3.0.1",
"sinon": "^17.0.1",

View File

@@ -178,15 +178,13 @@ model Blob {
}
model OptimizedBlob {
id Int @id @default(autoincrement()) @db.Integer
hash String @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar
params String @db.VarChar
blob Bytes @db.ByteA
id Int @id @default(autoincrement()) @db.Integer
hash String @db.VarChar
workspaceId String @map("workspace_id") @db.VarChar
params String @db.VarChar
blob Bytes @db.ByteA
length BigInt
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// not for keeping, but for snapshot history
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(6)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
@@unique([workspaceId, hash, params])
@@map("optimized_blobs")

View File

@@ -3,7 +3,6 @@ import { Module } from '@nestjs/common';
import { AppController } from './app.controller';
import { CacheModule } from './cache';
import { ConfigModule } from './config';
import { EventModule } from './event';
import { BusinessModules } from './modules';
import { AuthModule } from './modules/auth';
import { PrismaModule } from './prisma';
@@ -15,7 +14,6 @@ const BasicModules = [
PrismaModule,
ConfigModule.forRoot(),
CacheModule,
EventModule,
StorageModule.forRoot(),
SessionModule,
RateLimiterModule,

View File

@@ -50,7 +50,7 @@ function boolean(value: string) {
}
export function parseEnvValue(value: string | undefined, type?: EnvConfigType) {
if (value === undefined) {
if (typeof value === 'undefined') {
return;
}

View File

@@ -10,7 +10,7 @@ export function applyEnvToConfig(rawConfig: AFFiNEConfig) {
? [config, process.env[env]]
: [config[0], parseEnvValue(process.env[env], config[1])];
if (value !== undefined) {
if (typeof value !== 'undefined') {
set(rawConfig, path, value);
}
}

View File

@@ -1,23 +0,0 @@
import type { Snapshot, Workspace } from '@prisma/client';
import { Flatten, Payload } from './types';
interface EventDefinitions {
workspace: {
deleted: Payload<Workspace['id']>;
};
snapshot: {
updated: Payload<
Pick<Snapshot, 'id' | 'workspaceId'> & {
previous: Pick<Snapshot, 'blob' | 'state' | 'updatedAt'>;
}
>;
deleted: Payload<Pick<Snapshot, 'id' | 'workspaceId'>>;
};
}
export type EventKV = Flatten<EventDefinitions>;
export type Event = keyof EventKV;
export type EventPayload<E extends Event> = EventKV[E];

View File

@@ -1,45 +0,0 @@
import { Global, Injectable, Module } from '@nestjs/common';
import {
EventEmitter2,
EventEmitterModule,
OnEvent as RawOnEvent,
} from '@nestjs/event-emitter';
import type { Event, EventPayload } from './events';
@Injectable()
export class EventEmitter {
constructor(private readonly emitter: EventEmitter2) {}
emit<E extends Event>(event: E, payload: EventPayload<E>) {
return this.emitter.emit(event, payload);
}
emitAsync<E extends Event>(event: E, payload: EventPayload<E>) {
return this.emitter.emitAsync(event, payload);
}
on<E extends Event>(event: E, handler: (payload: EventPayload<E>) => void) {
return this.emitter.on(event, handler);
}
once<E extends Event>(event: E, handler: (payload: EventPayload<E>) => void) {
return this.emitter.once(event, handler);
}
}
export const OnEvent = (
event: Event,
opts?: Parameters<typeof RawOnEvent>[1]
) => {
return RawOnEvent(event, opts);
};
@Global()
@Module({
imports: [EventEmitterModule.forRoot()],
providers: [EventEmitter],
exports: [EventEmitter],
})
export class EventModule {}
export { EventPayload };

View File

@@ -1,33 +0,0 @@
export type Payload<T> = {
__payload: true;
data: T;
};
export type Join<A extends string, B extends string> = A extends ''
? B
: `${A}.${B}`;
export type PathType<T, Path extends string> = string extends Path
? unknown
: Path extends keyof T
? T[Path]
: Path extends `${infer K}.${infer R}`
? K extends keyof T
? PathType<T[K], R>
: unknown
: unknown;
export type Leaves<T, P extends string = ''> = T extends Payload<any>
? P
: T extends Record<string, any>
? {
[K in keyof T]: K extends string ? Leaves<T[K], Join<P, K>> : never;
}[keyof T]
: never;
export type Flatten<T> = Leaves<T> extends infer R
? {
// @ts-expect-error yo, ts can't make it
[K in R]: PathType<T, K> extends Payload<infer U> ? U : never;
}
: never;

View File

@@ -20,7 +20,7 @@ export class GQLLoggerPlugin implements ApolloServerPlugin {
const res = reqContext.contextValue.req.res as Response;
const operation = reqContext.request.operationName;
metrics.gql.counter('query_counter').add(1, { operation });
metrics().gqlRequest.add(1, { operation });
const start = Date.now();
return Promise.resolve({
@@ -30,9 +30,7 @@ export class GQLLoggerPlugin implements ApolloServerPlugin {
'Server-Timing',
`gql;dur=${costInMilliseconds};desc="GraphQL"`
);
metrics.gql
.histogram('query_duration')
.record(costInMilliseconds, { operation });
metrics().gqlTimer.record(costInMilliseconds, { operation });
return Promise.resolve();
},
didEncounterErrors: () => {
@@ -41,9 +39,7 @@ export class GQLLoggerPlugin implements ApolloServerPlugin {
'Server-Timing',
`gql;dur=${costInMilliseconds};desc="GraphQL ${operation}"`
);
metrics.gql
.histogram('query_duration')
.record(costInMilliseconds, { operation });
metrics().gqlTimer.record(costInMilliseconds, { operation });
return Promise.resolve();
},
});

View File

@@ -1,129 +1,76 @@
import {
Attributes,
Counter,
Histogram,
Meter,
MetricOptions,
} from '@opentelemetry/api';
import opentelemetry, { Attributes, Observable } from '@opentelemetry/api';
import { getMeter } from './opentelemetry';
type MetricType = 'counter' | 'gauge' | 'histogram';
type Metric<T extends MetricType> = T extends 'counter'
? Counter
: T extends 'gauge'
? Histogram
: T extends 'histogram'
? Histogram
: never;
export type ScopedMetrics = {
[T in MetricType]: (name: string, opts?: MetricOptions) => Metric<T>;
};
type MetricCreators = {
[T in MetricType]: (
meter: Meter,
name: string,
opts?: MetricOptions
) => Metric<T>;
};
export type KnownMetricScopes =
| 'socketio'
| 'gql'
| 'jwst'
| 'auth'
| 'controllers'
| 'doc';
const metricCreators: MetricCreators = {
counter(meter: Meter, name: string, opts?: MetricOptions) {
return meter.createCounter(name, opts);
},
gauge(meter: Meter, name: string, opts?: MetricOptions) {
let value: any;
let attrs: Attributes | undefined;
const ob = meter.createObservableGauge(name, opts);
ob.addCallback(result => {
result.observe(value, attrs);
});
return {
record: (newValue, newAttrs) => {
value = newValue;
attrs = newAttrs;
},
} satisfies Histogram;
},
histogram(meter: Meter, name: string, opts?: MetricOptions) {
return meter.createHistogram(name, opts);
},
};
const scopes = new Map<string, ScopedMetrics>();
function make(scope: string) {
const meter = getMeter();
const metrics = new Map<string, { type: MetricType; metric: any }>();
const prefix = scope + '/';
function getOrCreate<T extends MetricType>(
type: T,
name: string,
opts?: MetricOptions
): Metric<T> {
name = prefix + name;
const metric = metrics.get(name);
if (metric) {
if (type !== metric.type) {
throw new Error(
`Metric ${name} has already been registered as ${metric.type} mode, but get as ${type} again.`
);
}
return metric.metric;
} else {
const metric = metricCreators[type](meter, name, opts);
metrics.set(name, { type, metric });
return metric;
}
}
return {
counter(name, opts) {
return getOrCreate('counter', name, opts);
},
gauge(name, opts) {
return getOrCreate('gauge', name, opts);
},
histogram(name, opts) {
return getOrCreate('histogram', name, opts);
},
} satisfies ScopedMetrics;
interface AsyncMetric {
ob: Observable;
get value(): any;
get attrs(): Attributes | undefined;
}
/**
* @example
*
* ```
* metrics.scope.counter('example_count').add(1, {
* attr1: 'example-event'
* })
* ```
*/
export const metrics = new Proxy<Record<KnownMetricScopes, ScopedMetrics>>(
// @ts-expect-error proxied
{},
{
get(_, scopeName: string) {
let scope = scopes.get(scopeName);
if (!scope) {
scope = make(scopeName);
scopes.set(scopeName, scope);
}
let _metrics: ReturnType<typeof createBusinessMetrics> | undefined = undefined;
return scope;
},
export function getMeter(name = 'business') {
return opentelemetry.metrics.getMeter(name);
}
function createBusinessMetrics() {
const meter = getMeter();
const asyncMetrics: AsyncMetric[] = [];
function createGauge(name: string) {
let value: any;
let attrs: Attributes | undefined;
const ob = meter.createObservableGauge(name);
asyncMetrics.push({
ob,
get value() {
return value;
},
get attrs() {
return attrs;
},
});
return (newValue: any, newAttrs?: Attributes) => {
value = newValue;
attrs = newAttrs;
};
}
);
const metrics = {
socketIOConnectionGauge: createGauge('socket_io_connection'),
gqlRequest: meter.createCounter('gql_request'),
gqlError: meter.createCounter('gql_error'),
gqlTimer: meter.createHistogram('gql_timer'),
jwstCodecMerge: meter.createCounter('jwst_codec_merge'),
jwstCodecDidnotMatch: meter.createCounter('jwst_codec_didnot_match'),
jwstCodecFail: meter.createCounter('jwst_codec_fail'),
authCounter: meter.createCounter('auth'),
authFailCounter: meter.createCounter('auth_fail'),
docHistoryCounter: meter.createCounter('doc_history_created'),
docRecoverCounter: meter.createCounter('doc_history_recovered'),
};
meter.addBatchObservableCallback(
result => {
asyncMetrics.forEach(metric => {
result.observe(metric.ob, metric.value, metric.attrs);
});
},
asyncMetrics.map(({ ob }) => ob)
);
return metrics;
}
export function registerBusinessMetrics() {
if (!_metrics) {
_metrics = createBusinessMetrics();
}
return _metrics;
}
export const metrics = registerBusinessMetrics;

View File

@@ -1,6 +1,5 @@
import { MetricExporter } from '@google-cloud/opentelemetry-cloud-monitoring-exporter';
import { TraceExporter } from '@google-cloud/opentelemetry-cloud-trace-exporter';
import { metrics } from '@opentelemetry/api';
import {
CompositePropagator,
W3CBaggagePropagator,
@@ -17,8 +16,6 @@ import { NestInstrumentation } from '@opentelemetry/instrumentation-nestjs-core'
import { SocketIoInstrumentation } from '@opentelemetry/instrumentation-socket.io';
import {
ConsoleMetricExporter,
type MeterProvider,
MetricProducer,
MetricReader,
PeriodicExportingMetricReader,
} from '@opentelemetry/sdk-metrics';
@@ -27,11 +24,10 @@ import {
BatchSpanProcessor,
ConsoleSpanExporter,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-node';
import { PrismaInstrumentation } from '@prisma/instrumentation';
import { PrismaMetricProducer } from './prisma';
import { registerBusinessMetrics } from './metrics';
abstract class OpentelemetryFactor {
abstract getMetricReader(): MetricReader;
@@ -48,14 +44,9 @@ abstract class OpentelemetryFactor {
];
}
getMetricsProducers(): MetricProducer[] {
return [new PrismaMetricProducer()];
}
create() {
const traceExporter = this.getSpanExporter();
return new NodeSDK({
sampler: new TraceIdRatioBasedSampler(0.1),
traceExporter,
metricReader: this.getMetricReader(),
spanProcessor: new BatchSpanProcessor(traceExporter),
@@ -76,10 +67,7 @@ class GCloudOpentelemetryFactor extends OpentelemetryFactor {
return new PeriodicExportingMetricReader({
exportIntervalMillis: 30000,
exportTimeoutMillis: 10000,
exporter: new MetricExporter({
prefix: 'custom.googleapis.com',
}),
metricProducers: this.getMetricsProducers(),
exporter: new MetricExporter(),
});
}
@@ -90,9 +78,7 @@ class GCloudOpentelemetryFactor extends OpentelemetryFactor {
class LocalOpentelemetryFactor extends OpentelemetryFactor {
override getMetricReader(): MetricReader {
return new PrometheusExporter({
metricProducers: this.getMetricsProducers(),
});
return new PrometheusExporter();
}
override getSpanExporter(): SpanExporter {
@@ -104,7 +90,6 @@ class DebugOpentelemetryFactor extends OpentelemetryFactor {
override getMetricReader(): MetricReader {
return new PeriodicExportingMetricReader({
exporter: new ConsoleMetricExporter(),
metricProducers: this.getMetricsProducers(),
});
}
@@ -126,30 +111,9 @@ function createSDK() {
return factor?.create();
}
let OPENTELEMETRY_STARTED = false;
function ensureStarted() {
if (!OPENTELEMETRY_STARTED) {
OPENTELEMETRY_STARTED = true;
start();
}
}
function getMeterProvider() {
ensureStarted();
return metrics.getMeterProvider();
}
function registerCustomMetrics() {
const hostMetricsMonitoring = new HostMetrics({
name: 'instance-host-metrics',
meterProvider: getMeterProvider() as MeterProvider,
});
hostMetricsMonitoring.start();
}
export function getMeter(name = 'business') {
return getMeterProvider().getMeter(name);
const host = new HostMetrics({ name: 'instance-host-metrics' });
host.start();
}
export function start() {
@@ -158,5 +122,6 @@ export function start() {
if (sdk) {
sdk.start();
registerCustomMetrics();
registerBusinessMetrics();
}
}

View File

@@ -1,132 +0,0 @@
import { HrTime, ValueType } from '@opentelemetry/api';
import { hrTime } from '@opentelemetry/core';
import { Resource } from '@opentelemetry/resources';
import {
AggregationTemporality,
CollectionResult,
DataPointType,
InstrumentType,
MetricProducer,
ScopeMetrics,
} from '@opentelemetry/sdk-metrics';
import { PrismaService } from '../prisma';
function transformPrismaKey(key: string) {
// replace first '_' to '/' as a scope prefix
// example: prisma_client_query_duration_seconds_sum -> prisma/client_query_duration_seconds_sum
return key.replace(/_/, '/');
}
export class PrismaMetricProducer implements MetricProducer {
private readonly startTime: HrTime = hrTime();
async collect(): Promise<CollectionResult> {
const result: CollectionResult = {
resourceMetrics: {
resource: Resource.EMPTY,
scopeMetrics: [],
},
errors: [],
};
if (!PrismaService.INSTANCE) {
return result;
}
const prisma = PrismaService.INSTANCE;
const endTime = hrTime();
const metrics = await prisma.$metrics.json();
const scopeMetrics: ScopeMetrics = {
scope: {
name: '',
},
metrics: [],
};
for (const counter of metrics.counters) {
scopeMetrics.metrics.push({
descriptor: {
name: transformPrismaKey(counter.key),
description: counter.description,
unit: '1',
type: InstrumentType.COUNTER,
valueType: ValueType.INT,
},
dataPointType: DataPointType.SUM,
aggregationTemporality: AggregationTemporality.CUMULATIVE,
dataPoints: [
{
startTime: this.startTime,
endTime: endTime,
value: counter.value,
attributes: counter.labels,
},
],
isMonotonic: true,
});
}
for (const gauge of metrics.gauges) {
scopeMetrics.metrics.push({
descriptor: {
name: transformPrismaKey(gauge.key),
description: gauge.description,
unit: '1',
type: InstrumentType.UP_DOWN_COUNTER,
valueType: ValueType.INT,
},
dataPointType: DataPointType.GAUGE,
aggregationTemporality: AggregationTemporality.CUMULATIVE,
dataPoints: [
{
startTime: this.startTime,
endTime: endTime,
value: gauge.value,
attributes: gauge.labels,
},
],
});
}
for (const histogram of metrics.histograms) {
const boundaries = [];
const counts = [];
for (const [boundary, count] of histogram.value.buckets) {
boundaries.push(boundary);
counts.push(count);
}
scopeMetrics.metrics.push({
descriptor: {
name: transformPrismaKey(histogram.key),
description: histogram.description,
unit: 'ms',
type: InstrumentType.HISTOGRAM,
valueType: ValueType.DOUBLE,
},
dataPointType: DataPointType.HISTOGRAM,
aggregationTemporality: AggregationTemporality.CUMULATIVE,
dataPoints: [
{
startTime: this.startTime,
endTime: endTime,
value: {
buckets: {
boundaries,
counts,
},
count: histogram.value.count,
sum: histogram.value.sum,
},
attributes: histogram.labels,
},
],
});
}
result.resourceMetrics.scopeMetrics.push(scopeMetrics);
return result;
}
}

View File

@@ -1,9 +1,8 @@
import { Attributes } from '@opentelemetry/api';
import { KnownMetricScopes, metrics } from './metrics';
import { getMeter } from './metrics';
export const CallTimer = (
scope: KnownMetricScopes,
name: string,
attrs?: Attributes
): MethodDecorator => {
@@ -19,11 +18,9 @@ export const CallTimer = (
}
desc.value = function (...args: any[]) {
const timer = metrics[scope].histogram(name, {
const timer = getMeter().createHistogram(name, {
description: `function call time costs of ${name}`,
unit: 'ms',
});
const start = Date.now();
const end = () => {
@@ -51,7 +48,6 @@ export const CallTimer = (
};
export const CallCounter = (
scope: KnownMetricScopes,
name: string,
attrs?: Attributes
): MethodDecorator => {
@@ -67,7 +63,7 @@ export const CallCounter = (
}
desc.value = function (...args: any[]) {
const count = metrics[scope].counter(name, {
const count = getMeter().createCounter(name, {
description: `function call counter of ${name}`,
});

View File

@@ -14,7 +14,7 @@ const TrivialExceptions = [NotFoundException];
@Catch()
export class ExceptionLogger implements ExceptionFilter {
private readonly logger = new Logger('ExceptionLogger');
private logger = new Logger('ExceptionLogger');
catch(exception: Error, host: ArgumentsHost) {
// with useGlobalFilters, the context is always HTTP

View File

@@ -53,8 +53,8 @@ class AuthGuard implements CanActivate {
constructor(
@Inject(NextAuthOptionsProvide)
private readonly nextAuthOptions: NextAuthOptions,
private readonly auth: AuthService,
private readonly prisma: PrismaService,
private auth: AuthService,
private prisma: PrismaService,
private readonly reflector: Reflector
) {}

View File

@@ -89,13 +89,12 @@ export class NextAuthController {
res.redirect(`/signin${query}`);
return;
}
metrics().authCounter.add(1);
const [action, providerId] = req.url // start with request url
.slice(BASE_URL.length) // make relative to baseUrl
.replace(/\?.*/, '') // remove query part, use only path part
.split('/') as [AuthAction, string]; // as array of strings;
metrics.auth.counter('call_counter').add(1, { action, providerId });
const credentialsSignIn =
req.method === 'POST' && providerId === 'credentials';
let userId: string | undefined;
@@ -127,9 +126,7 @@ export class NextAuthController {
const options = this.nextAuthOptions;
if (req.method === 'POST' && action === 'session') {
if (typeof req.body !== 'object' || typeof req.body.data !== 'object') {
metrics.auth
.counter('call_fails_counter')
.add(1, { reason: 'invalid_session_data' });
metrics().authFailCounter.add(1, { reason: 'invalid_session_data' });
throw new BadRequestException(`Invalid new session data`);
}
const user = await this.updateSession(req, req.body.data);
@@ -212,10 +209,9 @@ export class NextAuthController {
if (redirect?.endsWith('api/auth/error?error=AccessDenied')) {
this.logger.log(`Early access redirect headers: ${req.headers}`);
metrics.auth
.counter('call_fails_counter')
.add(1, { reason: 'no_early_access_permission' });
metrics().authFailCounter.add(1, {
reason: 'no_early_access_permission',
});
if (
!req.headers?.referer ||
checkUrlOrigin(req.headers.referer, 'https://accounts.google.com')

View File

@@ -23,14 +23,14 @@ export type UserClaim = Pick<
hasPassword?: boolean;
};
export const getUtcTimestamp = () => Math.floor(Date.now() / 1000);
export const getUtcTimestamp = () => Math.floor(new Date().getTime() / 1000);
@Injectable()
export class AuthService {
constructor(
private readonly config: Config,
private readonly prisma: PrismaService,
private readonly mailer: MailService
private config: Config,
private prisma: PrismaService,
private mailer: MailService
) {}
sign(user: UserClaim) {

View File

@@ -1,30 +0,0 @@
import { Module } from '@nestjs/common';
import { Field, ObjectType, Query } from '@nestjs/graphql';
export const { SERVER_FLAVOR } = process.env;
@ObjectType()
export class ServerConfigType {
@Field({ description: 'server version' })
version!: string;
@Field({ description: 'server flavor' })
flavor!: string;
}
export class ServerConfigResolver {
@Query(() => ServerConfigType, {
description: 'server config',
})
serverConfig(): ServerConfigType {
return {
version: AFFiNE.version,
flavor: SERVER_FLAVOR || 'allinone',
};
}
}
@Module({
providers: [ServerConfigResolver],
})
export class ServerConfigModule {}

View File

@@ -1,15 +1,15 @@
import { isDeepStrictEqual } from 'node:util';
import { Injectable, Logger } from '@nestjs/common';
import { OnEvent } from '@nestjs/event-emitter';
import { Cron, CronExpression } from '@nestjs/schedule';
import type { Snapshot } from '@prisma/client';
import { Config } from '../../config';
import { type EventPayload, OnEvent } from '../../event';
import { metrics } from '../../metrics';
import { PrismaService } from '../../prisma';
import { SubscriptionStatus } from '../payment/service';
import { Permission } from '../workspaces/types';
import { isEmptyBuffer } from './manager';
@Injectable()
export class DocHistoryManager {
@@ -19,38 +19,16 @@ export class DocHistoryManager {
private readonly db: PrismaService
) {}
@OnEvent('workspace.deleted')
onWorkspaceDeleted(workspaceId: EventPayload<'workspace.deleted'>) {
return this.db.snapshotHistory.deleteMany({
where: {
workspaceId,
},
});
}
@OnEvent('snapshot.deleted')
onSnapshotDeleted({ workspaceId, id }: EventPayload<'snapshot.deleted'>) {
return this.db.snapshotHistory.deleteMany({
where: {
workspaceId,
id,
},
});
}
@OnEvent('snapshot.updated')
async onDocUpdated(
{ workspaceId, id, previous }: EventPayload<'snapshot.updated'>,
forceCreate = false
) {
const last = await this.last(workspaceId, id);
@OnEvent('doc:manager:snapshot:beforeUpdate')
async onDocUpdated(snapshot: Snapshot, forceCreate = false) {
const last = await this.last(snapshot.workspaceId, snapshot.id);
let shouldCreateHistory = false;
if (!last) {
// never created
shouldCreateHistory = true;
} else if (last.timestamp === previous.updatedAt) {
} else if (last.timestamp === snapshot.updatedAt) {
// no change
shouldCreateHistory = false;
} else if (
@@ -58,23 +36,16 @@ export class DocHistoryManager {
forceCreate ||
// last history created before interval in configs
last.timestamp.getTime() <
previous.updatedAt.getTime() - this.config.doc.history.interval
snapshot.updatedAt.getTime() - this.config.doc.history.interval
) {
shouldCreateHistory = true;
}
if (shouldCreateHistory) {
// skip the history recording when no actual update on snapshot happended
if (last && isDeepStrictEqual(last.state, previous.state)) {
if (last && isDeepStrictEqual(last.state, snapshot.state)) {
this.logger.debug(
`State matches, skip creating history record for ${id} in workspace ${workspaceId}`
);
return;
}
if (isEmptyBuffer(previous.blob)) {
this.logger.debug(
`Doc is empty, skip creating history record for ${id} in workspace ${workspaceId}`
`State matches, skip creating history record for ${snapshot.id} in workspace ${snapshot.workspaceId}`
);
return;
}
@@ -85,24 +56,22 @@ export class DocHistoryManager {
timestamp: true,
},
data: {
workspaceId,
id,
timestamp: previous.updatedAt,
blob: previous.blob,
state: previous.state,
expiredAt: await this.getExpiredDateFromNow(workspaceId),
workspaceId: snapshot.workspaceId,
id: snapshot.id,
timestamp: snapshot.updatedAt,
blob: snapshot.blob,
state: snapshot.state,
expiredAt: await this.getExpiredDateFromNow(snapshot.workspaceId),
},
})
.catch(() => {
// safe to ignore
// only happens when duplicated history record created in multi processes
});
metrics.doc
.counter('history_created_counter', {
description: 'How many times the snapshot history created',
})
.add(1);
this.logger.log(`History created for ${id} in workspace ${workspaceId}.`);
metrics().docHistoryCounter.add(1, {});
this.logger.log(
`History created for ${snapshot.id} in workspace ${snapshot.workspaceId}.`
);
}
}
@@ -120,7 +89,7 @@ export class DocHistoryManager {
workspaceId,
id,
timestamp: {
lt: before,
lte: before,
},
// only include the ones has not expired
expiredAt: {
@@ -207,17 +176,13 @@ export class DocHistoryManager {
}
// save old snapshot as one history record
await this.onDocUpdated({ workspaceId, id, previous: oldSnapshot }, true);
await this.onDocUpdated(oldSnapshot, true);
// WARN:
// we should never do the snapshot updating in recovering,
// which is not the solution in CRDT.
// let user revert in client and update the data in sync system
// `await this.db.snapshot.update();`
metrics.doc
.counter('history_recovered_counter', {
description: 'How many times history recovered request happened',
})
.add(1);
metrics().docRecoverCounter.add(1, {});
return history.timestamp;
}

View File

@@ -5,12 +5,12 @@ import {
OnModuleDestroy,
OnModuleInit,
} from '@nestjs/common';
import { EventEmitter2 } from '@nestjs/event-emitter';
import { Snapshot, Update } from '@prisma/client';
import { chunk } from 'lodash-es';
import { defer, retry } from 'rxjs';
import {
applyUpdate,
decodeStateVector,
Doc,
encodeStateAsUpdate,
encodeStateVector,
@@ -19,7 +19,6 @@ import {
import { Cache } from '../../cache';
import { Config } from '../../config';
import { EventEmitter, type EventPayload, OnEvent } from '../../event';
import { metrics } from '../../metrics/metrics';
import { PrismaService } from '../../prisma';
import { mergeUpdatesInApplyWay as jwstMergeUpdates } from '../../storage';
@@ -41,39 +40,9 @@ function compare(yBinary: Buffer, jwstBinary: Buffer, strict = false): boolean {
return compare(yBinary, yBinary2, true);
}
/**
* Detect whether rhs state is newer than lhs state.
*
* How could we tell a state is newer:
*
* i. if the state vector size is larger, it's newer
* ii. if the state vector size is same, compare each client's state
*/
function isStateNewer(lhs: Buffer, rhs: Buffer): boolean {
const lhsVector = decodeStateVector(lhs);
const rhsVector = decodeStateVector(rhs);
if (lhsVector.size < rhsVector.size) {
return true;
}
for (const [client, state] of lhsVector) {
const rstate = rhsVector.get(client);
if (!rstate) {
return false;
}
if (state < rstate) {
return true;
}
}
return false;
}
export function isEmptyBuffer(buf: Buffer): boolean {
function isEmptyBuffer(buf: Buffer): boolean {
return (
buf.length === 0 ||
buf.length == 0 ||
// 0x0000
(buf.length === 2 && buf[0] === 0 && buf[1] === 0)
);
@@ -91,9 +60,9 @@ const MAX_SEQ_NUM = 0x3fffffff; // u31
*/
@Injectable()
export class DocManager implements OnModuleInit, OnModuleDestroy {
private readonly logger = new Logger(DocManager.name);
private logger = new Logger(DocManager.name);
private job: NodeJS.Timeout | null = null;
private readonly seqMap = new Map<string, number>();
private seqMap = new Map<string, number>();
private busy = false;
constructor(
@@ -102,7 +71,7 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
private readonly db: PrismaService,
private readonly config: Config,
private readonly cache: Cache,
private readonly event: EventEmitter
private readonly event: EventEmitter2
) {}
onModuleInit() {
@@ -156,13 +125,13 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
this.config.doc.manager.experimentalMergeWithJwstCodec &&
updates.length < 100 /* avoid overloading */
) {
metrics.jwst.counter('codec_merge_counter').add(1);
metrics().jwstCodecMerge.add(1);
const yjsResult = Buffer.from(encodeStateAsUpdate(doc));
let log = false;
try {
const jwstResult = jwstMergeUpdates(updates);
if (!compare(yjsResult, jwstResult)) {
metrics.jwst.counter('codec_not_match').add(1);
metrics().jwstCodecDidnotMatch.add(1);
this.logger.warn(
`jwst codec result doesn't match yjs codec result for: ${guid}`
);
@@ -173,7 +142,7 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
}
}
} catch (e) {
metrics.jwst.counter('codec_fails_counter').add(1);
metrics().jwstCodecFail.add(1);
this.logger.warn(`jwst apply update failed for ${guid}: ${e}`);
log = true;
} finally {
@@ -224,33 +193,6 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
}
}
@OnEvent('workspace.deleted')
async onWorkspaceDeleted(workspaceId: string) {
await this.db.snapshot.deleteMany({
where: {
workspaceId,
},
});
await this.db.update.deleteMany({
where: {
workspaceId,
},
});
}
@OnEvent('snapshot.deleted')
async onSnapshotDeleted({
id,
workspaceId,
}: EventPayload<'snapshot.deleted'>) {
await this.db.update.deleteMany({
where: {
id,
workspaceId,
},
});
}
/**
* add update to manager for later processing.
*/
@@ -432,17 +374,23 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
}
const { id, workspaceId } = candidate;
// acquire lock
const ok = await this.lockUpdatesForAutoSquash(workspaceId, id);
await this.lockUpdatesForAutoSquash(workspaceId, id, async () => {
try {
await this._get(workspaceId, id);
} catch (e) {
this.logger.error(
`Failed to apply updates for workspace: ${workspaceId}, guid: ${id}`
);
this.logger.error(e);
}
});
if (!ok) {
return;
}
try {
await this._get(workspaceId, id);
} catch (e) {
this.logger.error(
`Failed to apply updates for workspace: ${workspaceId}, guid: ${id}`
);
this.logger.error(e);
} finally {
await this.unlockUpdatesForAutoSquash(workspaceId, id);
}
}
private async getAutoSquashCandidate() {
@@ -466,67 +414,34 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
doc: Doc,
initialSeq?: number
) {
return this.lockSnapshotForUpsert(workspaceId, guid, async () => {
const blob = Buffer.from(encodeStateAsUpdate(doc));
const blob = Buffer.from(encodeStateAsUpdate(doc));
const state = Buffer.from(encodeStateVector(doc));
if (isEmptyBuffer(blob)) {
return false;
}
if (isEmptyBuffer(blob)) {
return;
}
const state = Buffer.from(encodeStateVector(doc));
return await this.db.$transaction(async db => {
const snapshot = await db.snapshot.findUnique({
where: {
id_workspaceId: {
id: guid,
workspaceId,
},
},
});
// update
if (snapshot) {
// only update if state is newer
if (isStateNewer(snapshot.state ?? Buffer.from([0]), state)) {
await db.snapshot.update({
select: {
seq: true,
},
where: {
id_workspaceId: {
workspaceId,
id: guid,
},
},
data: {
blob,
state,
},
});
return true;
} else {
return false;
}
} else {
// create
await db.snapshot.create({
select: {
seq: true,
},
data: {
id: guid,
workspaceId,
blob,
state,
seq: initialSeq,
},
});
return true;
}
});
await this.db.snapshot.upsert({
select: {
seq: true,
},
where: {
id_workspaceId: {
id: guid,
workspaceId,
},
},
create: {
id: guid,
workspaceId,
blob,
state,
seq: initialSeq,
},
update: {
blob,
state,
},
});
}
@@ -565,29 +480,15 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
...updates.map(u => u.blob)
);
const done = await this.upsert(workspaceId, id, doc, last.seq);
if (done) {
if (snapshot) {
this.event.emit('snapshot.updated', {
id,
workspaceId,
previous: {
blob: snapshot.blob,
state: snapshot.state,
updatedAt: snapshot.updatedAt,
},
});
}
this.logger.debug(
`Squashed ${updates.length} updates for ${id} in workspace ${workspaceId}`
);
if (snapshot) {
this.event.emit('doc:manager:snapshot:beforeUpdate', snapshot);
}
// always delete updates
// the upsert will return false if the state is not newer, so we don't need to worry about it
const { count } = await this.db.update.deleteMany({
await this.upsert(workspaceId, id, doc, last.seq);
this.logger.debug(
`Squashed ${updates.length} updates for ${id} in workspace ${workspaceId}`
);
await this.db.update.deleteMany({
where: {
id,
workspaceId,
@@ -597,8 +498,7 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
},
});
await this.updateCachedUpdatesCount(workspaceId, id, -count);
await this.updateCachedUpdatesCount(workspaceId, id, -updates.length);
return doc;
}
@@ -681,44 +581,22 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
return null;
}
private async doWithLock<T>(lock: string, job: () => Promise<T>) {
const acquired = await this.cache.setnx(lock, 1, {
ttl: 60 * 1000,
});
if (!acquired) {
return;
}
try {
return await job();
} finally {
await this.cache.delete(lock).catch(e => {
// safe, the lock will be expired when ttl ends
this.logger.error(`Failed to release lock ${lock}`, e);
});
}
}
private async lockUpdatesForAutoSquash<T>(
workspaceId: string,
guid: string,
job: () => Promise<T>
) {
return this.doWithLock(
private async lockUpdatesForAutoSquash(workspaceId: string, guid: string) {
return this.cache.setnx(
`doc:manager:updates-lock:${workspaceId}::${guid}`,
job
1,
{
ttl: 60 * 1000,
}
);
}
async lockSnapshotForUpsert<T>(
workspaceId: string,
guid: string,
job: () => Promise<T>
) {
return this.doWithLock(
`doc:manager:snapshot-lock:${workspaceId}::${guid}`,
job
);
private async unlockUpdatesForAutoSquash(workspaceId: string, guid: string) {
return this.cache
.delete(`doc:manager:updates-lock:${workspaceId}::${guid}`)
.catch(e => {
// safe, the lock will be expired when ttl ends
this.logger.error('Failed to release updates lock', e);
});
}
}

View File

@@ -1,40 +1,34 @@
import { DynamicModule, Type } from '@nestjs/common';
import { EventEmitterModule } from '@nestjs/event-emitter';
import { ScheduleModule } from '@nestjs/schedule';
import { GqlModule } from '../graphql.module';
import { SERVER_FLAVOR, ServerConfigModule } from './config';
import { AuthModule } from './auth';
import { DocModule } from './doc';
import { PaymentModule } from './payment';
import { SelfHostedModule } from './self-hosted';
import { SyncModule } from './sync';
import { UsersModule } from './users';
import { WorkspaceModule } from './workspaces';
const BusinessModules: (Type | DynamicModule)[] = [];
const { SERVER_FLAVOR } = process.env;
const BusinessModules: (Type | DynamicModule)[] = [
EventEmitterModule.forRoot({
global: true,
}),
];
switch (SERVER_FLAVOR) {
case 'sync':
BusinessModules.push(SyncModule, DocModule.forSync());
break;
case 'selfhosted':
BusinessModules.push(
ServerConfigModule,
SelfHostedModule,
ScheduleModule.forRoot(),
GqlModule,
WorkspaceModule,
UsersModule,
SyncModule,
DocModule.forRoot()
);
break;
case 'graphql':
BusinessModules.push(
ServerConfigModule,
ScheduleModule.forRoot(),
GqlModule,
WorkspaceModule,
UsersModule,
AuthModule,
DocModule.forRoot(),
PaymentModule
);
@@ -42,11 +36,11 @@ switch (SERVER_FLAVOR) {
case 'allinone':
default:
BusinessModules.push(
ServerConfigModule,
ScheduleModule.forRoot(),
GqlModule,
WorkspaceModule,
UsersModule,
AuthModule,
SyncModule,
DocModule.forRoot(),
PaymentModule
@@ -54,4 +48,4 @@ switch (SERVER_FLAVOR) {
break;
}
export { BusinessModules, SERVER_FLAVOR };
export { BusinessModules };

View File

@@ -52,7 +52,7 @@ class SubscriptionPrice {
}
@ObjectType('UserSubscription')
export class UserSubscriptionType implements Partial<UserSubscription> {
class UserSubscriptionType implements Partial<UserSubscription> {
@Field({ name: 'id' })
stripeSubscriptionId!: string;

View File

@@ -30,7 +30,6 @@ export enum SubscriptionPlan {
Pro = 'pro',
Team = 'team',
Enterprise = 'enterprise',
SelfHosted = 'selfhosted',
}
export function encodeLookupKey(

View File

@@ -1,38 +0,0 @@
import { Module } from '@nestjs/common';
import { ResolveField, Resolver } from '@nestjs/graphql';
import { UserSubscriptionType } from './payment/resolver';
import {
SubscriptionPlan,
SubscriptionRecurring,
SubscriptionStatus,
} from './payment/service';
import { UserType } from './users';
const YEAR = 1000 * 60 * 60 * 24 * 30 * 12;
@Resolver(() => UserType)
export class SelfHostedDummyResolver {
private readonly start = new Date();
private readonly end = new Date(Number(this.start) + YEAR);
constructor() {}
@ResolveField(() => UserSubscriptionType)
async subscription() {
return {
stripeSubscriptionId: 'dummy',
plan: SubscriptionPlan.SelfHosted,
recurring: SubscriptionRecurring.Yearly,
status: SubscriptionStatus.Active,
start: this.start,
end: this.end,
createdAt: this.start,
updatedAt: this.start,
};
}
}
@Module({
providers: [SelfHostedDummyResolver],
})
export class SelfHostedModule {}

View File

@@ -25,6 +25,7 @@ import {
EventError,
InternalError,
NotInWorkspaceError,
WorkspaceNotFoundError,
} from './error';
export const GatewayErrorWrapper = (): MethodDecorator => {
@@ -44,7 +45,6 @@ export const GatewayErrorWrapper = (): MethodDecorator => {
try {
result = originalMethod.apply(this, args);
} catch (e) {
metrics.socketio.counter('unhandled_errors').add(1);
return {
error: new InternalError(e as Error),
};
@@ -52,7 +52,6 @@ export const GatewayErrorWrapper = (): MethodDecorator => {
if (result instanceof Promise) {
return result.catch(e => {
metrics.socketio.counter('unhandled_errors').add(1);
return {
error: new InternalError(e),
};
@@ -69,7 +68,7 @@ export const GatewayErrorWrapper = (): MethodDecorator => {
const SubscribeMessage = (event: string) =>
applyDecorators(
GatewayErrorWrapper(),
CallTimer('socketio', 'event_duration', { event }),
CallTimer('socket_io_event_duration', { event }),
RawSubscribeMessage(event)
);
@@ -105,12 +104,12 @@ export class EventsGateway implements OnGatewayConnection, OnGatewayDisconnect {
handleConnection() {
this.connectionCount++;
metrics.socketio.gauge('realtime_connections').record(this.connectionCount);
metrics().socketIOConnectionGauge(this.connectionCount);
}
handleDisconnect() {
this.connectionCount--;
metrics.socketio.gauge('realtime_connections').record(this.connectionCount);
metrics().socketIOConnectionGauge(this.connectionCount);
}
@Auth()
@@ -318,7 +317,9 @@ export class EventsGateway implements OnGatewayConnection, OnGatewayDisconnect {
if (!doc) {
return {
error: new DocNotFoundError(workspaceId, docId.guid),
error: docId.isWorkspace
? new WorkspaceNotFoundError(workspaceId)
: new DocNotFoundError(workspaceId, docId.guid),
};
}

View File

@@ -34,7 +34,7 @@ export class WorkspacesController {
//
// NOTE: because graphql can't represent a File, so we have to use REST API to get blob
@Get('/:id/blobs/:name')
@CallTimer('controllers', 'workspace_get_blob')
@CallTimer('doc_controller', { method: 'get_blob' })
async blob(
@Param('id') workspaceId: string,
@Param('name') name: string,
@@ -59,7 +59,7 @@ export class WorkspacesController {
@Get('/:id/docs/:guid')
@Auth()
@Publicable()
@CallTimer('controllers', 'workspace_get_doc')
@CallTimer('doc_controller', { method: 'get_doc' })
async doc(
@CurrentUser() user: UserType | undefined,
@Param('id') ws: string,
@@ -106,7 +106,7 @@ export class WorkspacesController {
@Get('/:id/docs/:guid/histories/:timestamp')
@Auth()
@CallTimer('controllers', 'workspace_get_history')
@CallTimer('doc_controller', { method: 'get_history' })
async history(
@CurrentUser() user: UserType,
@Param('id') ws: string,

View File

@@ -33,7 +33,6 @@ import type {
import GraphQLUpload from 'graphql-upload/GraphQLUpload.mjs';
import { applyUpdate, Doc } from 'yjs';
import { EventEmitter } from '../../event';
import { PrismaService } from '../../prisma';
import { StorageProvide } from '../../storage';
import { CloudThrottlerGuard, Throttle } from '../../throttler';
@@ -147,7 +146,6 @@ export class WorkspaceResolver {
private readonly prisma: PrismaService,
private readonly permissions: PermissionService,
private readonly users: UsersService,
private readonly event: EventEmitter,
@Inject(StorageProvide) private readonly storage: Storage
) {}
@@ -310,11 +308,22 @@ export class WorkspaceResolver {
})
async createWorkspace(
@CurrentUser() user: UserType,
// we no longer support init workspace with a preload file
// use sync system to uploading them once created
@Args({ name: 'init', type: () => GraphQLUpload, nullable: true })
init: FileUpload | null
@Args({ name: 'init', type: () => GraphQLUpload })
update: FileUpload
) {
// convert stream to buffer
const buffer = await new Promise<Buffer>((resolve, reject) => {
const stream = update.createReadStream();
const chunks: Uint8Array[] = [];
stream.on('data', chunk => {
chunks.push(chunk);
});
stream.on('error', reject);
stream.on('end', () => {
resolve(Buffer.concat(chunks));
});
});
const workspace = await this.prisma.workspace.create({
data: {
public: false,
@@ -332,31 +341,14 @@ export class WorkspaceResolver {
},
});
if (init) {
// convert stream to buffer
const buffer = await new Promise<Buffer>(resolve => {
const stream = init.createReadStream();
const chunks: Uint8Array[] = [];
stream.on('data', chunk => {
chunks.push(chunk);
});
stream.on('error', () => {
resolve(Buffer.from([]));
});
stream.on('end', () => {
resolve(Buffer.concat(chunks));
});
if (buffer.length) {
await this.prisma.snapshot.create({
data: {
id: workspace.id,
workspaceId: workspace.id,
blob: buffer,
},
});
if (buffer.length) {
await this.prisma.snapshot.create({
data: {
id: workspace.id,
workspaceId: workspace.id,
blob: buffer,
},
});
}
}
return workspace;
@@ -390,7 +382,18 @@ export class WorkspaceResolver {
},
});
this.event.emit('workspace.deleted', id);
await this.prisma.$transaction([
this.prisma.update.deleteMany({
where: {
workspaceId: id,
},
}),
this.prisma.snapshot.deleteMany({
where: {
workspaceId: id,
},
}),
]);
return true;
}

View File

@@ -7,13 +7,6 @@ export class PrismaService
extends PrismaClient
implements OnModuleInit, OnModuleDestroy
{
static INSTANCE: PrismaService | null = null;
constructor() {
super();
PrismaService.INSTANCE = this;
}
async onModuleInit() {
await this.$connect();
}

View File

@@ -2,14 +2,6 @@
# THIS FILE WAS AUTOMATICALLY GENERATED (DO NOT MODIFY)
# ------------------------------------------------------
type ServerConfigType {
"""server version"""
version: String!
"""server flavor"""
flavor: String!
}
type UserType {
id: ID!
@@ -81,7 +73,6 @@ enum SubscriptionPlan {
Pro
Team
Enterprise
SelfHosted
}
type UserSubscription {
@@ -249,9 +240,6 @@ type DocHistoryType {
}
type Query {
"""server config"""
serverConfig: ServerConfigType!
"""Get is owner of workspace"""
isOwner(workspaceId: String!): Boolean!
@@ -292,7 +280,7 @@ type Mutation {
sendVerifyChangeEmail(token: String!, email: String!, callbackUrl: String!): Boolean!
"""Create a new workspace"""
createWorkspace(init: Upload): WorkspaceType!
createWorkspace(init: Upload!): WorkspaceType!
"""Update workspace"""
updateWorkspace(input: UpdateWorkspaceInput!): WorkspaceType!

View File

@@ -16,7 +16,7 @@ export class DocID {
raw: string;
workspace: string;
variant: DocVariant;
private readonly sub: string | null;
private sub: string | null;
static parse(raw: string): DocID | null {
try {

View File

@@ -1,20 +1,15 @@
import { mock } from 'node:test';
import type { INestApplication } from '@nestjs/common';
import { EventEmitterModule } from '@nestjs/event-emitter';
import { Test, TestingModule } from '@nestjs/testing';
import test from 'ava';
import { register } from 'prom-client';
import * as Sinon from 'sinon';
import {
applyUpdate,
decodeStateVector,
Doc as YDoc,
encodeStateAsUpdate,
} from 'yjs';
import { Doc as YDoc, encodeStateAsUpdate } from 'yjs';
import { CacheModule } from '../src/cache';
import { Config, ConfigModule } from '../src/config';
import { EventModule } from '../src/event';
import { DocManager, DocModule } from '../src/modules/doc';
import { PrismaModule, PrismaService } from '../src/prisma';
import { flushDB } from './utils';
@@ -24,7 +19,7 @@ const createModule = () => {
imports: [
PrismaModule,
CacheModule,
EventModule,
EventEmitterModule.forRoot(),
ConfigModule.forRoot(),
DocModule.forRoot(),
],
@@ -288,73 +283,3 @@ test('should throw if meet max retry times', async t => {
);
t.is(stub.callCount, 5);
});
test('should not update snapshot if state is outdated', async t => {
const db = m.get(PrismaService);
const manager = m.get(DocManager);
await db.snapshot.create({
data: {
id: '2',
workspaceId: '2',
blob: Buffer.from([0, 0]),
seq: 1,
},
});
const doc = new YDoc();
const text = doc.getText('content');
const updates: Buffer[] = [];
doc.on('update', update => {
updates.push(Buffer.from(update));
});
text.insert(0, 'hello');
text.insert(5, 'world');
text.insert(5, ' ');
await Promise.all(updates.map(update => manager.push('2', '2', update)));
const updateWith3Records = await manager.getUpdates('2', '2');
text.insert(11, '!');
await manager.push('2', '2', updates[3]);
const updateWith4Records = await manager.getUpdates('2', '2');
// Simulation:
// Node A get 3 updates and squash them at time 1, will finish at time 10
// Node B get 4 updates and squash them at time 3, will finish at time 8
// Node B finish the squash first, and update the snapshot
// Node A finish the squash later, and update the snapshot to an outdated state
// Time: ---------------------->
// A: ^get ^upsert
// B: ^get ^upsert
//
// We should avoid such situation
// @ts-expect-error private
await manager.squash(updateWith4Records, null);
// @ts-expect-error private
await manager.squash(updateWith3Records, null);
const result = await db.snapshot.findUnique({
where: {
id_workspaceId: {
id: '2',
workspaceId: '2',
},
},
});
if (!result) {
t.fail('snapshot not found');
return;
}
const state = decodeStateVector(result.state!);
t.is(state.get(doc.clientID), 12);
const d = new YDoc();
applyUpdate(d, result.blob!);
const dtext = d.getText('content');
t.is(dtext.toString(), 'hello world!');
});

View File

@@ -17,7 +17,7 @@ class FakePrisma {
get workspace() {
return {
async findUnique() {
throw new Error('exception from graphql');
throw Error('exception from graphql');
},
};
}

View File

@@ -6,7 +6,6 @@ import test from 'ava';
import * as Sinon from 'sinon';
import { ConfigModule } from '../src/config';
import type { EventPayload } from '../src/event';
import { DocHistoryManager } from '../src/modules/doc';
import { PrismaModule, PrismaService } from '../src/prisma';
import { flushDB } from './utils';
@@ -42,28 +41,21 @@ test.afterEach(async () => {
const snapshot: Snapshot = {
workspaceId: '1',
id: 'doc1',
blob: Buffer.from([1, 0]),
state: Buffer.from([0]),
blob: Buffer.from([0, 0]),
state: Buffer.from([0, 0]),
seq: 0,
updatedAt: new Date(),
createdAt: new Date(),
};
function getEventData(
timestamp: Date = new Date()
): EventPayload<'snapshot.updated'> {
return {
workspaceId: snapshot.workspaceId,
id: snapshot.id,
previous: { ...snapshot, updatedAt: timestamp },
};
}
test('should create doc history if never created before', async t => {
Sinon.stub(manager, 'last').resolves(null);
const timestamp = new Date();
await manager.onDocUpdated(getEventData(timestamp));
await manager.onDocUpdated({
...snapshot,
updatedAt: timestamp,
});
const history = await db.snapshotHistory.findFirst({
where: {
@@ -80,7 +72,10 @@ test('should not create history if timestamp equals to last record', async t =>
const timestamp = new Date();
Sinon.stub(manager, 'last').resolves({ timestamp, state: null });
await manager.onDocUpdated(getEventData(timestamp));
await manager.onDocUpdated({
...snapshot,
updatedAt: timestamp,
});
const history = await db.snapshotHistory.findFirst({
where: {
@@ -99,7 +94,10 @@ test('should not create history if state equals to last record', async t => {
state: snapshot.state,
});
await manager.onDocUpdated(getEventData(timestamp));
await manager.onDocUpdated({
...snapshot,
updatedAt: timestamp,
});
const history = await db.snapshotHistory.findFirst({
where: {
@@ -118,7 +116,10 @@ test('should not create history if time diff is less than interval config', asyn
state: Buffer.from([0, 1]),
});
await manager.onDocUpdated(getEventData(timestamp));
await manager.onDocUpdated({
...snapshot,
updatedAt: timestamp,
});
const history = await db.snapshotHistory.findFirst({
where: {
@@ -137,7 +138,10 @@ test('should create history if time diff is larger than interval config and stat
state: Buffer.from([0, 1]),
});
await manager.onDocUpdated(getEventData(timestamp));
await manager.onDocUpdated({
...snapshot,
updatedAt: timestamp,
});
const history = await db.snapshotHistory.findFirst({
where: {
@@ -156,7 +160,13 @@ test('should create history with force flag even if time diff in small', async t
state: Buffer.from([0, 1]),
});
await manager.onDocUpdated(getEventData(timestamp), true);
await manager.onDocUpdated(
{
...snapshot,
updatedAt: timestamp,
},
true
);
const history = await db.snapshotHistory.findFirst({
where: {
@@ -173,30 +183,26 @@ test('should correctly list all history records', async t => {
// insert expired data
await db.snapshotHistory.createMany({
data: Array.from({ length: 10 })
.fill(0)
.map((_, i) => ({
workspaceId: snapshot.workspaceId,
id: snapshot.id,
blob: snapshot.blob,
state: snapshot.state,
timestamp: new Date(timestamp - 10 - i),
expiredAt: new Date(timestamp - 1),
})),
data: new Array(10).fill(0).map((_, i) => ({
workspaceId: snapshot.workspaceId,
id: snapshot.id,
blob: snapshot.blob,
state: snapshot.state,
timestamp: new Date(timestamp - 10 - i),
expiredAt: new Date(timestamp - 1),
})),
});
// insert available data
await db.snapshotHistory.createMany({
data: Array.from({ length: 10 })
.fill(0)
.map((_, i) => ({
workspaceId: snapshot.workspaceId,
id: snapshot.id,
blob: snapshot.blob,
state: snapshot.state,
timestamp: new Date(timestamp + i),
expiredAt: new Date(timestamp + 1000),
})),
data: new Array(10).fill(0).map((_, i) => ({
workspaceId: snapshot.workspaceId,
id: snapshot.id,
blob: snapshot.blob,
state: snapshot.state,
timestamp: new Date(timestamp + i),
expiredAt: new Date(timestamp + 1000),
})),
});
const list = await manager.list(
@@ -214,7 +220,13 @@ test('should correctly list all history records', async t => {
test('should be able to get history data', async t => {
const timestamp = new Date();
await manager.onDocUpdated(getEventData(timestamp), true);
await manager.onDocUpdated(
{
...snapshot,
updatedAt: timestamp,
},
true
);
const history = await manager.get(
snapshot.workspaceId,
@@ -231,16 +243,14 @@ test('should be able to get last history record', async t => {
// insert available data
await db.snapshotHistory.createMany({
data: Array.from({ length: 10 })
.fill(0)
.map((_, i) => ({
workspaceId: snapshot.workspaceId,
id: snapshot.id,
blob: snapshot.blob,
state: snapshot.state,
timestamp: new Date(timestamp + i),
expiredAt: new Date(timestamp + 1000),
})),
data: new Array(10).fill(0).map((_, i) => ({
workspaceId: snapshot.workspaceId,
id: snapshot.id,
blob: snapshot.blob,
state: snapshot.state,
timestamp: new Date(timestamp + i),
expiredAt: new Date(timestamp + 1000),
})),
});
const history = await manager.last(snapshot.workspaceId, snapshot.id);
@@ -258,7 +268,10 @@ test('should be able to recover from history', async t => {
},
});
const history1Timestamp = snapshot.updatedAt.getTime() - 10;
await manager.onDocUpdated(getEventData(new Date(history1Timestamp)));
await manager.onDocUpdated({
...snapshot,
updatedAt: new Date(history1Timestamp),
});
await manager.recover(
snapshot.workspaceId,
@@ -286,30 +299,26 @@ test('should be able to cleanup expired history', async t => {
// insert expired data
await db.snapshotHistory.createMany({
data: Array.from({ length: 10 })
.fill(0)
.map((_, i) => ({
workspaceId: snapshot.workspaceId,
id: snapshot.id,
blob: snapshot.blob,
state: snapshot.state,
timestamp: new Date(timestamp - 10 - i),
expiredAt: new Date(timestamp - 1),
})),
data: new Array(10).fill(0).map((_, i) => ({
workspaceId: snapshot.workspaceId,
id: snapshot.id,
blob: snapshot.blob,
state: snapshot.state,
timestamp: new Date(timestamp - 10 - i),
expiredAt: new Date(timestamp - 1),
})),
});
// insert available data
await db.snapshotHistory.createMany({
data: Array.from({ length: 10 })
.fill(0)
.map((_, i) => ({
workspaceId: snapshot.workspaceId,
id: snapshot.id,
blob: snapshot.blob,
state: snapshot.state,
timestamp: new Date(timestamp + i),
expiredAt: new Date(timestamp + 1000),
})),
data: new Array(10).fill(0).map((_, i) => ({
workspaceId: snapshot.workspaceId,
id: snapshot.id,
blob: snapshot.blob,
state: snapshot.state,
timestamp: new Date(timestamp + i),
expiredAt: new Date(timestamp + 1000),
})),
});
let count = await db.snapshotHistory.count();

View File

@@ -65,7 +65,9 @@ test.afterEach.always(async t => {
test('should get blob size limit', async t => {
const { resolver } = t.context;
fakeUserService.getStorageQuotaById.resolves(100 * 1024 * 1024 * 1024);
fakeUserService.getStorageQuotaById.returns(
Promise.resolve(100 * 1024 * 1024 * 1024)
);
const res = await resolver.checkBlobSize(new FakePrisma().fakeUser, '', 100);
t.not(res, false);
// @ts-expect-error

View File

@@ -8,9 +8,9 @@ crate-type = ["cdylib"]
[dependencies]
chrono = "0.4"
jwst-codec = { git = "https://github.com/toeverything/OctoBase.git", rev = "49a6b7a" }
jwst-core = { git = "https://github.com/toeverything/OctoBase.git", rev = "49a6b7a" }
jwst-storage = { git = "https://github.com/toeverything/OctoBase.git", rev = "49a6b7a" }
jwst-codec = { git = "https://github.com/toeverything/OctoBase.git", rev = "aad9e5b" }
jwst-core = { git = "https://github.com/toeverything/OctoBase.git", rev = "aad9e5b" }
jwst-storage = { git = "https://github.com/toeverything/OctoBase.git", rev = "aad9e5b" }
napi = { version = "2", default-features = false, features = [
"napi5",
"async",

View File

@@ -1,6 +1,21 @@
/* auto-generated by NAPI-RS */
/* tslint:disable */
/* eslint-disable */
/* auto-generated by NAPI-RS */
export function verifyChallengeResponse(response: string, bits: number, resource: string): Promise<boolean>
export function mintChallengeResponse(resource: string, bits?: number | undefined | null): Promise<string>
export interface Blob {
contentType: string
lastModified: string
size: number
data: Buffer
}
/**
* Merge updates in form like `Y.applyUpdate(doc, update)` way and return the
* result binary.
*/
export function mergeUpdatesInApplyWay(updates: Array<Buffer>): Buffer
export class Storage {
/** Create a storage instance and establish connection to persist store. */
static connect(database: string, debugOnlyAutoMigrate?: boolean | undefined | null): Promise<Storage>
@@ -15,21 +30,3 @@ export class Storage {
/** Workspace size taken by blobs. */
blobsSize(workspaces: Array<string>): Promise<number>
}
export interface Blob {
contentType: string
lastModified: string
size: number
data: Buffer
}
/**
* Merge updates in form like `Y.applyUpdate(doc, update)` way and return the
* result binary.
*/
export function mergeUpdatesInApplyWay(updates: Array<Buffer>): Buffer
export function mintChallengeResponse(resource: string, bits?: number | undefined | null): Promise<string>
export function verifyChallengeResponse(response: string, bits: number, resource: string): Promise<boolean>

View File

@@ -1,6 +1,6 @@
{
"name": "@affine/storage",
"version": "0.10.4-beta.2",
"version": "0.10.3",
"engines": {
"node": ">= 10.16.0 < 11 || >= 11.8.0"
},
@@ -16,26 +16,27 @@
}
},
"napi": {
"binaryName": "storage",
"name": "storage",
"targets": [
"aarch64-apple-darwin",
"aarch64-unknown-linux-gnu",
"aarch64-pc-windows-msvc",
"x86_64-apple-darwin",
"x86_64-pc-windows-msvc",
"x86_64-unknown-linux-gnu"
"x86_64-unknown-linux-gnu",
"universal-apple-darwin"
]
},
"scripts": {
"test": "node --test ./__tests__/**/*.spec.js",
"build": "napi build --release --strip --no-const-enum",
"build": "napi build --release --strip",
"build:debug": "napi build",
"prepublishOnly": "napi prepublish -t npm",
"artifacts": "napi artifacts",
"version": "napi version"
},
"devDependencies": {
"@napi-rs/cli": "3.0.0-alpha.15",
"@napi-rs/cli": "^2.16.5",
"lib0": "^0.2.87",
"nx": "^17.1.3",
"nx-cloud": "^16.5.2",

View File

@@ -8,5 +8,5 @@
"react": "18.2.0",
"react-dom": "18.2.0"
},
"version": "0.10.4-beta.2"
"version": "0.10.3"
}

View File

@@ -9,5 +9,5 @@
"@types/debug": "^4.1.9",
"vitest": "0.34.6"
},
"version": "0.10.4-beta.2"
"version": "0.10.3"
}

View File

@@ -24,7 +24,7 @@ if (typeof window !== 'undefined') {
}
export class DebugLogger {
private readonly _debug: debug.Debugger;
private _debug: debug.Debugger;
constructor(namespace: string) {
this._debug = debug(namespace);

View File

@@ -3,8 +3,8 @@
"private": true,
"type": "module",
"devDependencies": {
"@blocksuite/global": "0.11.0-nightly-202312070955-2b5bb47",
"@blocksuite/store": "0.11.0-nightly-202312070955-2b5bb47",
"@blocksuite/global": "0.0.0-20231122113751-6bf81eb3-nightly",
"@blocksuite/store": "0.0.0-20231122113751-6bf81eb3-nightly",
"react": "18.2.0",
"react-dom": "18.2.0",
"vitest": "0.34.6",
@@ -21,10 +21,11 @@
},
"peerDependencies": {
"@affine/templates": "workspace:*",
"@blocksuite/global": "0.0.0-20230409084303-221991d4-nightly"
"@blocksuite/global": "0.0.0-20230409084303-221991d4-nightly",
"@toeverything/infra": "workspace:*"
},
"dependencies": {
"lit": "^3.0.2"
},
"version": "0.10.4-beta.2"
"version": "0.10.3"
}

View File

@@ -6,9 +6,12 @@ import { isDesktop, isServer } from './constant.js';
import { UaHelper } from './ua-helper.js';
export const blockSuiteFeatureFlags = z.object({
enable_transformer_clipboard: z.boolean(),
enable_expand_database_block: z.boolean(),
enable_bultin_ledits: z.boolean(),
enable_set_remote_flag: z.boolean(),
enable_block_hub: z.boolean(),
enable_toggle_block: z.boolean(),
enable_bookmark_operation: z.boolean(),
enable_note_index: z.boolean(),
});
export const runtimeFlagsSchema = z.object({
@@ -18,7 +21,6 @@ export const runtimeFlagsSchema = z.object({
enableBroadcastChannelProvider: z.boolean(),
enableDebugPage: z.boolean(),
changelogUrl: z.string(),
downloadUrl: z.string(),
// see: tools/workers
imageProxyUrl: z.string(),
enablePreloading: z.boolean(),
@@ -30,19 +32,12 @@ export const runtimeFlagsSchema = z.object({
enableCaptcha: z.boolean(),
enableEnhanceShareMode: z.boolean(),
enablePayment: z.boolean(),
enablePageHistory: z.boolean(),
// this is for the electron app
serverUrlPrefix: z.string(),
enableMoveDatabase: z.boolean(),
editorFlags: blockSuiteFeatureFlags,
appVersion: z.string(),
editorVersion: z.string(),
appBuildType: z.union([
z.literal('stable'),
z.literal('beta'),
z.literal('internal'),
z.literal('canary'),
]),
});
export type BlockSuiteFeatureFlags = z.infer<typeof blockSuiteFeatureFlags>;

View File

@@ -1,7 +1,7 @@
import { assertExists } from '@blocksuite/global/utils';
export class UaHelper {
private readonly uaMap;
private uaMap;
public isLinux = false;
public isMacOs = false;
public isSafari = false;

View File

@@ -1,3 +1,5 @@
import type { EditorContainer } from '@blocksuite/editor';
import type { Page } from '@blocksuite/store';
import type {
ActiveDocProvider,
PassiveDocProvider,
@@ -109,7 +111,7 @@ export const settingPanel = {
Export: 'export',
Sync: 'sync',
} as const;
export const settingPanelValues = Object.values(settingPanel);
export const settingPanelValues = [...Object.values(settingPanel)] as const;
export type SettingPanel = (typeof settingPanel)[keyof typeof settingPanel];
// built-in workspaces
@@ -132,6 +134,18 @@ type UIBaseProps<_Flavour extends keyof WorkspaceRegistry> = {
currentWorkspaceId: string;
};
export type WorkspaceHeaderProps<Flavour extends keyof WorkspaceRegistry> =
UIBaseProps<Flavour> & {
rightSlot?: ReactNode;
currentEntry:
| {
subPath: WorkspaceSubPath;
}
| {
pageId: string;
};
};
type NewSettingProps<Flavour extends keyof WorkspaceRegistry> =
UIBaseProps<Flavour> & {
onDeleteLocalWorkspace: () => void;
@@ -147,11 +161,18 @@ type NewSettingProps<Flavour extends keyof WorkspaceRegistry> =
) => void;
};
type PageDetailProps<Flavour extends keyof WorkspaceRegistry> =
UIBaseProps<Flavour> & {
currentPageId: string;
onLoadEditor: (page: Page, editor: EditorContainer) => () => void;
};
interface FC<P> {
(props: P): ReactNode;
}
export interface WorkspaceUISchema<Flavour extends keyof WorkspaceRegistry> {
PageDetail: FC<PageDetailProps<Flavour>>;
NewSettingsDetail: FC<NewSettingProps<Flavour>>;
Provider: FC<PropsWithChildren>;
LoginCard?: FC<object>;

View File

@@ -7,6 +7,9 @@
"outDir": "lib"
},
"references": [
{
"path": "../infra"
},
{
"path": "../../../tests/fixtures"
}

View File

@@ -54,12 +54,10 @@
"dev": "vite build --watch"
},
"dependencies": {
"@affine/debug": "workspace:*",
"@affine/env": "workspace:*",
"@affine/sdk": "workspace:*",
"@blocksuite/blocks": "0.11.0-nightly-202312070955-2b5bb47",
"@blocksuite/global": "0.11.0-nightly-202312070955-2b5bb47",
"@blocksuite/store": "0.11.0-nightly-202312070955-2b5bb47",
"@blocksuite/blocks": "0.0.0-20231122113751-6bf81eb3-nightly",
"@blocksuite/global": "0.0.0-20231122113751-6bf81eb3-nightly",
"@blocksuite/store": "0.0.0-20231122113751-6bf81eb3-nightly",
"jotai": "^2.5.1",
"jotai-effect": "^0.2.3",
"tinykeys": "^2.1.0",
@@ -68,22 +66,22 @@
"devDependencies": {
"@affine-test/fixtures": "workspace:*",
"@affine/templates": "workspace:*",
"@blocksuite/lit": "0.11.0-nightly-202312070955-2b5bb47",
"@blocksuite/presets": "0.11.0-nightly-202312070955-2b5bb47",
"@blocksuite/editor": "0.0.0-20231122113751-6bf81eb3-nightly",
"@blocksuite/lit": "0.0.0-20231122113751-6bf81eb3-nightly",
"@testing-library/react": "^14.0.0",
"async-call-rpc": "^6.3.1",
"electron": "link:../../frontend/electron/node_modules/electron",
"nanoid": "^5.0.3",
"react": "^18.2.0",
"rxjs": "^7.8.1",
"vite": "^5.0.6",
"vite": "^4.4.11",
"vite-plugin-dts": "3.6.0",
"vitest": "0.34.6",
"yjs": "^13.6.10"
},
"peerDependencies": {
"@affine/templates": "*",
"@blocksuite/presets": "*",
"@blocksuite/editor": "*",
"async-call-rpc": "*",
"electron": "*",
"react": "*",
@@ -93,7 +91,7 @@
"@affine/templates": {
"optional": true
},
"@blocksuite/presets": {
"@blocksuite/editor": {
"optional": true
},
"async-call-rpc": {
@@ -109,5 +107,5 @@
"optional": true
}
},
"version": "0.10.4-beta.2"
"version": "0.10.3"
}

View File

@@ -1,5 +1,34 @@
import type { ExpectedLayout } from '@affine/sdk/entry';
import { atom } from 'jotai';
import { assertExists } from '@blocksuite/global/utils';
import type { Workspace } from '@blocksuite/store';
import { atom, createStore } from 'jotai/vanilla';
import { getBlockSuiteWorkspaceAtom } from './__internal__/workspace';
// global store
let rootStore = createStore();
export function getCurrentStore() {
return rootStore;
}
/**
* @internal do not use this function unless you know what you are doing
*/
export function _setCurrentStore(store: ReturnType<typeof createStore>) {
rootStore = store;
}
export const loadedPluginNameAtom = atom<string[]>([]);
export const currentWorkspaceIdAtom = atom<string | null>(null);
export const currentPageIdAtom = atom<string | null>(null);
export const currentWorkspaceAtom = atom<Promise<Workspace>>(async get => {
const workspaceId = get(currentWorkspaceIdAtom);
assertExists(workspaceId);
const [currentWorkspaceAtom] = getBlockSuiteWorkspaceAtom(workspaceId);
return get(currentWorkspaceAtom);
});
const contentLayoutBaseAtom = atom<ExpectedLayout>('editor');

View File

@@ -1,8 +0,0 @@
import { atom } from 'jotai';
export const loadedPluginNameAtom = atom<string[]>([]);
export * from './layout';
export * from './root-store';
export * from './settings';
export * from './workspace';

View File

@@ -1,15 +0,0 @@
import { createStore } from 'jotai';
// global store
let rootStore = createStore();
export function getCurrentStore() {
return rootStore;
}
/**
* @internal do not use this function unless you know what you are doing
*/
export function _setCurrentStore(store: ReturnType<typeof createStore>) {
rootStore = store;
}

View File

@@ -1,14 +0,0 @@
import { assertExists } from '@blocksuite/global/utils';
import type { Workspace } from '@blocksuite/store';
import { atom } from 'jotai';
import { getBlockSuiteWorkspaceAtom } from '../__internal__/workspace';
export const currentWorkspaceIdAtom = atom<string | null>(null);
export const currentPageIdAtom = atom<string | null>(null);
export const currentWorkspaceAtom = atom<Promise<Workspace>>(async get => {
const workspaceId = get(currentWorkspaceIdAtom);
assertExists(workspaceId);
const [currentWorkspaceAtom] = getBlockSuiteWorkspaceAtom(workspaceId);
return get(currentWorkspaceAtom);
});

View File

@@ -3,11 +3,8 @@ import type { Page, PageMeta, Workspace } from '@blocksuite/store';
import type { createStore, WritableAtom } from 'jotai/vanilla';
import { nanoid } from 'nanoid';
import { checkWorkspaceCompatibility, MigrationPoint } from '..';
import { migratePages } from '../migration/blocksuite';
import {
checkWorkspaceCompatibility,
MigrationPoint,
} from '../migration/workspace';
export async function initEmptyPage(page: Page, title?: string) {
await page.load(() => {

View File

@@ -50,7 +50,7 @@ function migrateDatabase(data: YMap<unknown>) {
update: (cell: { id: string; value: unknown }) => void
) => {
Object.values(cells).forEach(row => {
if (row[id] !== null && row[id] !== undefined) {
if (row[id] != null) {
update(row[id]);
}
});

View File

@@ -1,4 +1,3 @@
import { DebugLogger } from '@affine/debug';
// @ts-expect-error upstream type is wrong
import { tinykeys } from 'tinykeys';
@@ -8,14 +7,12 @@ import {
createAffineCommand,
} from './command';
const commandLogger = new DebugLogger('command:registry');
export const AffineCommandRegistry = new (class {
readonly commands: Map<string, AffineCommand> = new Map();
register(options: AffineCommandOptions) {
if (this.commands.has(options.id)) {
commandLogger.warn(`Command ${options.id} already registered.`);
console.warn(`Command ${options.id} already registered.`);
return () => {};
}
const command = createAffineCommand(options);
@@ -41,17 +38,17 @@ export const AffineCommandRegistry = new (class {
});
}
commandLogger.debug(`Registered command ${command.id}`);
console.debug(`Registered command ${command.id}`);
return () => {
unsubKb?.();
this.commands.delete(command.id);
commandLogger.debug(`Unregistered command ${command.id}`);
console.debug(`Unregistered command ${command.id}`);
};
}
get(id: string): AffineCommand | undefined {
if (!this.commands.has(id)) {
commandLogger.warn(`Command ${id} not registered.`);
console.warn(`Command ${id} not registered.`);
return undefined;
}
return this.commands.get(id);

View File

@@ -45,8 +45,8 @@ export abstract class HandlerManager<
Handlers extends Record<string, PrimitiveHandlers>,
> {
static instance: HandlerManager<string, Record<string, PrimitiveHandlers>>;
private readonly _app: App<Namespace, Handlers>;
private readonly _namespace: Namespace;
private _app: App<Namespace, Handlers>;
private _namespace: Namespace;
private _handlers: Handlers;
constructor() {
@@ -190,18 +190,10 @@ export interface UpdateMeta {
allowAutoUpdate: boolean;
}
export type UpdaterConfig = {
autoCheckUpdate: boolean;
autoDownloadUpdate: boolean;
};
export type UpdaterHandlers = {
currentVersion: () => Promise<string>;
quitAndInstall: () => Promise<void>;
downloadUpdate: () => Promise<void>;
getConfig: () => Promise<UpdaterConfig>;
setConfig: (newConfig: Partial<UpdaterConfig>) => Promise<void>;
checkForUpdates: () => Promise<{ version: string } | null>;
checkForUpdatesAndNotify: () => Promise<{ version: string } | null>;
};
export type WorkspaceHandlers = {

View File

@@ -11,12 +11,6 @@
{
"path": "../sdk"
},
{
"path": "../env"
},
{
"path": "../debug"
},
{
"path": "./tsconfig.node.json"
}

View File

@@ -13,7 +13,7 @@ export default defineConfig({
entry: {
blocksuite: resolve(root, 'src/blocksuite/index.ts'),
index: resolve(root, 'src/index.ts'),
atom: resolve(root, 'src/atom/index.ts'),
atom: resolve(root, 'src/atom.ts'),
command: resolve(root, 'src/command/index.ts'),
type: resolve(root, 'src/type.ts'),
'core/event-emitter': resolve(root, 'src/core/event-emitter.ts'),

View File

@@ -1,6 +1,6 @@
{
"name": "@affine/sdk",
"version": "0.10.4-beta.2",
"version": "0.10.3",
"type": "module",
"scripts": {
"build": "vite build",
@@ -22,16 +22,16 @@
"dist"
],
"dependencies": {
"@blocksuite/block-std": "0.11.0-nightly-202312070955-2b5bb47",
"@blocksuite/blocks": "0.11.0-nightly-202312070955-2b5bb47",
"@blocksuite/global": "0.11.0-nightly-202312070955-2b5bb47",
"@blocksuite/presets": "0.11.0-nightly-202312070955-2b5bb47",
"@blocksuite/store": "0.11.0-nightly-202312070955-2b5bb47",
"@blocksuite/block-std": "0.0.0-20231122113751-6bf81eb3-nightly",
"@blocksuite/blocks": "0.0.0-20231122113751-6bf81eb3-nightly",
"@blocksuite/editor": "0.0.0-20231122113751-6bf81eb3-nightly",
"@blocksuite/global": "0.0.0-20231122113751-6bf81eb3-nightly",
"@blocksuite/store": "0.0.0-20231122113751-6bf81eb3-nightly",
"jotai": "^2.5.1",
"zod": "^3.22.4"
},
"devDependencies": {
"vite": "^5.0.6",
"vite": "^4.4.11",
"vite-plugin-dts": "3.6.0"
}
}

View File

@@ -1,5 +1,5 @@
import type { BaseSelection } from '@blocksuite/block-std';
import type { EditorContainer } from '@blocksuite/presets';
import type { EditorContainer } from '@blocksuite/editor';
import type { Page } from '@blocksuite/store';
import type { Workspace } from '@blocksuite/store';
import type { Atom, getDefaultStore } from 'jotai/vanilla';

View File

@@ -1,7 +1,7 @@
{
"name": "@toeverything/y-indexeddb",
"type": "module",
"version": "0.10.4-beta.2",
"version": "0.10.3",
"description": "IndexedDB database adapter for Yjs",
"repository": "toeverything/AFFiNE",
"author": "toeverything",
@@ -32,15 +32,15 @@
}
},
"dependencies": {
"idb": "^8.0.0",
"idb": "^7.1.1",
"nanoid": "^5.0.3",
"y-provider": "workspace:*"
},
"devDependencies": {
"@blocksuite/blocks": "0.11.0-nightly-202312070955-2b5bb47",
"@blocksuite/store": "0.11.0-nightly-202312070955-2b5bb47",
"@blocksuite/blocks": "0.0.0-20231122113751-6bf81eb3-nightly",
"@blocksuite/store": "0.0.0-20231122113751-6bf81eb3-nightly",
"fake-indexeddb": "^5.0.0",
"vite": "^5.0.6",
"vite": "^4.4.11",
"vite-plugin-dts": "3.6.0",
"vitest": "0.34.6",
"y-indexeddb": "^9.0.11",

View File

@@ -65,7 +65,7 @@ beforeEach(() => {
id = nanoid();
workspace = new Workspace({
id,
isSSR: true,
schema,
});
vi.useFakeTimers({ toFake: ['requestIdleCallback'] });
@@ -383,7 +383,7 @@ describe('subDoc', () => {
{
const newWorkspace = new Workspace({
id,
isSSR: true,
schema,
});
const provider = createIndexedDBProvider(newWorkspace.doc, rootDBName);
@@ -423,7 +423,7 @@ describe('utils', () => {
expect(update).toBeInstanceOf(Uint8Array);
const newWorkspace = new Workspace({
id,
isSSR: true,
schema,
});
applyUpdate(newWorkspace.doc, update);

View File

@@ -1,7 +1,7 @@
{
"name": "y-provider",
"type": "module",
"version": "0.10.4-beta.2",
"version": "0.10.3",
"description": "Yjs provider protocol for multi document support",
"exports": {
".": "./src/index.ts"
@@ -24,8 +24,8 @@
"build": "vite build"
},
"devDependencies": {
"@blocksuite/store": "0.11.0-nightly-202312070955-2b5bb47",
"vite": "^5.0.6",
"@blocksuite/store": "0.0.0-20231122113751-6bf81eb3-nightly",
"vite": "^4.4.11",
"vite-plugin-dts": "3.6.0",
"vitest": "0.34.6",
"yjs": "^13.6.10"

View File

@@ -1,54 +0,0 @@
import { StorybookConfig } from '@storybook/react-vite';
import { vanillaExtractPlugin } from '@vanilla-extract/vite-plugin';
import { fileURLToPath } from 'url';
import { mergeConfig } from 'vite';
import tsconfigPaths from 'vite-tsconfig-paths';
import { getRuntimeConfig } from '../../core/.webpack/runtime-config';
export default {
stories: ['../src/ui/**/*.stories.@(js|jsx|ts|tsx|mdx)'],
addons: [
'@storybook/addon-links',
'@storybook/addon-essentials',
'@storybook/addon-interactions',
'@storybook/addon-mdx-gfm',
'storybook-dark-mode',
],
framework: {
name: '@storybook/react-vite',
options: {},
},
features: {
storyStoreV7: true,
},
docs: {
autodocs: true,
},
async viteFinal(config, _options) {
return mergeConfig(config, {
plugins: [
vanillaExtractPlugin(),
tsconfigPaths({
root: fileURLToPath(new URL('../../../../', import.meta.url)),
ignoreConfigErrors: true,
}),
],
define: {
'process.on': '(() => void 0)',
'process.env': {},
'process.env.COVERAGE': JSON.stringify(!!process.env.COVERAGE),
'process.env.SHOULD_REPORT_TRACE': `${Boolean(
process.env.SHOULD_REPORT_TRACE === 'true'
)}`,
'process.env.TRACE_REPORT_ENDPOINT': `"${process.env.TRACE_REPORT_ENDPOINT}"`,
'process.env.CAPTCHA_SITE_KEY': `"${process.env.CAPTCHA_SITE_KEY}"`,
runtimeConfig: getRuntimeConfig({
distribution: 'browser',
mode: 'development',
channel: 'canary',
coverage: false,
}),
},
});
},
} satisfies StorybookConfig;

View File

@@ -1,27 +0,0 @@
import { darkCssVariables, lightCssVariables } from '@toeverything/theme';
import { globalStyle } from '@vanilla-extract/css';
globalStyle('*', {
margin: 0,
padding: 0,
});
globalStyle('body', {
color: 'var(--affine-text-primary-color)',
fontFamily: 'var(--affine-font-family)',
fontSize: 'var(--affine-font-base)',
lineHeight: 'var(--affine-font-height)',
backgroundColor: 'var(--affine-background-primary-color)',
});
globalStyle('html', {
vars: lightCssVariables,
});
globalStyle('html[data-theme="dark"]', {
vars: darkCssVariables,
});
globalStyle('.docs-story', {
backgroundColor: 'var(--affine-background-primary-color)',
});

Some files were not shown because too many files have changed in this diff Show More