Merge branch 'canary' into fix/issue-11515

This commit is contained in:
DarkSky
2026-03-11 15:08:09 +08:00
committed by GitHub
324 changed files with 15723 additions and 9621 deletions

View File

@@ -19,3 +19,8 @@ rustflags = [
# pthread_key_create() destructors and segfault after a DSO unloading
[target.'cfg(all(target_env = "gnu", not(target_os = "windows")))']
rustflags = ["-C", "link-args=-Wl,-z,nodelete"]
# Temporary local llm_adapter override.
# Uncomment when verifying AFFiNE against the sibling llm_adapter workspace.
# [patch.crates-io]
# llm_adapter = { path = "../llm_adapter" }

View File

@@ -197,8 +197,8 @@
"properties": {
"SMTP.name": {
"type": "string",
"description": "Name of the email server (e.g. your domain name)\n@default \"AFFiNE Server\"\n@environment `MAILER_SERVERNAME`",
"default": "AFFiNE Server"
"description": "Hostname used for SMTP HELO/EHLO (e.g. mail.example.com). Leave empty to use the system hostname.\n@default \"\"\n@environment `MAILER_SERVERNAME`",
"default": ""
},
"SMTP.host": {
"type": "string",
@@ -237,8 +237,8 @@
},
"fallbackSMTP.name": {
"type": "string",
"description": "Name of the fallback email server (e.g. your domain name)\n@default \"AFFiNE Server\"",
"default": "AFFiNE Server"
"description": "Hostname used for fallback SMTP HELO/EHLO (e.g. mail.example.com). Leave empty to use the system hostname.\n@default \"\"",
"default": ""
},
"fallbackSMTP.host": {
"type": "string",
@@ -971,7 +971,7 @@
},
"scenarios": {
"type": "object",
"description": "Use custom models in scenarios and override default settings.\n@default {\"override_enabled\":false,\"scenarios\":{\"audio_transcribing\":\"gemini-2.5-flash\",\"chat\":\"gemini-2.5-flash\",\"embedding\":\"gemini-embedding-001\",\"image\":\"gpt-image-1\",\"rerank\":\"gpt-4.1\",\"coding\":\"claude-sonnet-4-5@20250929\",\"complex_text_generation\":\"gpt-4o-2024-08-06\",\"quick_decision_making\":\"gpt-5-mini\",\"quick_text_generation\":\"gemini-2.5-flash\",\"polish_and_summarize\":\"gemini-2.5-flash\"}}",
"description": "Use custom models in scenarios and override default settings.\n@default {\"override_enabled\":false,\"scenarios\":{\"audio_transcribing\":\"gemini-2.5-flash\",\"chat\":\"gemini-2.5-flash\",\"embedding\":\"gemini-embedding-001\",\"image\":\"gpt-image-1\",\"coding\":\"claude-sonnet-4-5@20250929\",\"complex_text_generation\":\"gpt-5-mini\",\"quick_decision_making\":\"gpt-5-mini\",\"quick_text_generation\":\"gemini-2.5-flash\",\"polish_and_summarize\":\"gemini-2.5-flash\"}}",
"default": {
"override_enabled": false,
"scenarios": {
@@ -979,15 +979,24 @@
"chat": "gemini-2.5-flash",
"embedding": "gemini-embedding-001",
"image": "gpt-image-1",
"rerank": "gpt-4.1",
"coding": "claude-sonnet-4-5@20250929",
"complex_text_generation": "gpt-4o-2024-08-06",
"complex_text_generation": "gpt-5-mini",
"quick_decision_making": "gpt-5-mini",
"quick_text_generation": "gemini-2.5-flash",
"polish_and_summarize": "gemini-2.5-flash"
}
}
},
"providers.profiles": {
"type": "array",
"description": "The profile list for copilot providers.\n@default []",
"default": []
},
"providers.defaults": {
"type": "object",
"description": "The default provider ids for model output types and global fallback.\n@default {}",
"default": {}
},
"providers.openai": {
"type": "object",
"description": "The config for the openai provider.\n@default {\"apiKey\":\"\",\"baseURL\":\"https://api.openai.com/v1\"}\n@link https://github.com/openai/openai-node",

View File

@@ -50,8 +50,14 @@ runs:
# https://github.com/tree-sitter/tree-sitter/issues/4186
# pass -D_BSD_SOURCE to clang to fix the tree-sitter build issue
run: |
echo "CC=clang -D_BSD_SOURCE" >> "$GITHUB_ENV"
echo "TARGET_CC=clang -D_BSD_SOURCE" >> "$GITHUB_ENV"
if [[ "${{ inputs.target }}" == "aarch64-unknown-linux-gnu" ]]; then
# napi cross-toolchain 1.0.3 headers miss AT_HWCAP2 in elf.h
echo "CC=clang -D_BSD_SOURCE -DAT_HWCAP2=26" >> "$GITHUB_ENV"
echo "TARGET_CC=clang -D_BSD_SOURCE -DAT_HWCAP2=26" >> "$GITHUB_ENV"
else
echo "CC=clang -D_BSD_SOURCE" >> "$GITHUB_ENV"
echo "TARGET_CC=clang -D_BSD_SOURCE" >> "$GITHUB_ENV"
fi
- name: Cache cargo
uses: Swatinem/rust-cache@v2

View File

@@ -53,7 +53,7 @@ runs:
fi
- name: Setup Node.js
uses: actions/setup-node@v4
uses: actions/setup-node@v6
with:
node-version-file: '.nvmrc'
registry-url: https://npm.pkg.github.com
@@ -93,7 +93,7 @@ runs:
run: node -e "const p = $(yarn config cacheFolder --json).effective; console.log('yarn_global_cache=' + p)" >> $GITHUB_OUTPUT
- name: Cache non-full yarn cache on Linux
uses: actions/cache@v4
uses: actions/cache@v5
if: ${{ inputs.full-cache != 'true' && runner.os == 'Linux' }}
with:
path: |
@@ -105,7 +105,7 @@ runs:
# and the decompression performance on Windows is very terrible
# so we reduce the number of cached files on non-Linux systems by remove node_modules from cache path.
- name: Cache non-full yarn cache on non-Linux
uses: actions/cache@v4
uses: actions/cache@v5
if: ${{ inputs.full-cache != 'true' && runner.os != 'Linux' }}
with:
path: |
@@ -113,7 +113,7 @@ runs:
key: node_modules-cache-${{ github.job }}-${{ runner.os }}-${{ runner.arch }}-${{ steps.system-info.outputs.name }}-${{ steps.system-info.outputs.release }}-${{ steps.system-info.outputs.version }}
- name: Cache full yarn cache on Linux
uses: actions/cache@v4
uses: actions/cache@v5
if: ${{ inputs.full-cache == 'true' && runner.os == 'Linux' }}
with:
path: |
@@ -122,7 +122,7 @@ runs:
key: node_modules-cache-full-${{ runner.os }}-${{ runner.arch }}-${{ steps.system-info.outputs.name }}-${{ steps.system-info.outputs.release }}-${{ steps.system-info.outputs.version }}
- name: Cache full yarn cache on non-Linux
uses: actions/cache@v4
uses: actions/cache@v5
if: ${{ inputs.full-cache == 'true' && runner.os != 'Linux' }}
with:
path: |
@@ -154,7 +154,7 @@ runs:
# Note: Playwright's cache directory is hard coded because that's what it
# says to do in the docs. There doesn't appear to be a command that prints
# it out for us.
- uses: actions/cache@v4
- uses: actions/cache@v5
id: playwright-cache
if: ${{ inputs.playwright-install == 'true' }}
with:
@@ -189,7 +189,7 @@ runs:
run: |
echo "version=$(yarn why --json electron | grep -h 'workspace:.' | jq --raw-output '.children[].locator' | sed -e 's/@playwright\/test@.*://' | head -n 1)" >> $GITHUB_OUTPUT
- uses: actions/cache@v4
- uses: actions/cache@v5
id: electron-cache
if: ${{ inputs.electron-install == 'true' }}
with:

View File

@@ -31,10 +31,10 @@ podSecurityContext:
resources:
limits:
cpu: '1'
memory: 4Gi
memory: 6Gi
requests:
cpu: '1'
memory: 2Gi
memory: 4Gi
probe:
initialDelaySeconds: 20

View File

@@ -13,5 +13,5 @@ jobs:
pull-requests: write
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/labeler@v5
- uses: actions/checkout@v6
- uses: actions/labeler@v6

View File

@@ -24,7 +24,7 @@ jobs:
runs-on: ubuntu-latest
environment: ${{ inputs.build-type }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Version
uses: ./.github/actions/setup-version
with:
@@ -57,7 +57,7 @@ jobs:
runs-on: ubuntu-latest
environment: ${{ inputs.build-type }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Version
uses: ./.github/actions/setup-version
with:
@@ -89,7 +89,7 @@ jobs:
runs-on: ubuntu-latest
environment: ${{ inputs.build-type }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Version
uses: ./.github/actions/setup-version
with:
@@ -118,7 +118,7 @@ jobs:
build-server-native:
name: Build Server native - ${{ matrix.targets.name }}
runs-on: ubuntu-latest
runs-on: ubuntu-22.04
environment: ${{ inputs.build-type }}
strategy:
fail-fast: false
@@ -132,7 +132,7 @@ jobs:
file: server-native.armv7.node
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Version
uses: ./.github/actions/setup-version
with:
@@ -166,7 +166,7 @@ jobs:
needs:
- build-server-native
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Version
uses: ./.github/actions/setup-version
with:
@@ -202,7 +202,7 @@ jobs:
- build-mobile
- build-admin
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Download server dist
uses: actions/download-artifact@v4
with:
@@ -222,7 +222,7 @@ jobs:
# setup node without cache configuration
# Prisma cache is not compatible with docker build cache
- name: Setup Node.js
uses: actions/setup-node@v4
uses: actions/setup-node@v6
with:
node-version-file: '.nvmrc'
registry-url: https://npm.pkg.github.com

View File

@@ -46,7 +46,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v4
uses: actions/checkout@v6
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
@@ -67,9 +67,9 @@ jobs:
name: Lint
runs-on: ubuntu-24.04-arm
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Go (for actionlint)
uses: actions/setup-go@v5
uses: actions/setup-go@v6
with:
go-version: 'stable'
- name: Install actionlint
@@ -111,7 +111,7 @@ jobs:
env:
NODE_OPTIONS: --max-old-space-size=14384
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
@@ -138,7 +138,7 @@ jobs:
outputs:
run-rust: ${{ steps.rust-filter.outputs.rust }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- uses: dorny/paths-filter@v3
id: rust-filter
@@ -159,7 +159,7 @@ jobs:
needs:
- rust-test-filter
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- uses: ./.github/actions/build-rust
with:
target: x86_64-unknown-linux-gnu
@@ -182,7 +182,7 @@ jobs:
needs:
- build-server-native
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
@@ -212,7 +212,7 @@ jobs:
name: Check yarn binary
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Run check
run: |
set -euo pipefail
@@ -226,9 +226,9 @@ jobs:
strategy:
fail-fast: false
matrix:
shard: [1, 2]
shard: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
@@ -256,7 +256,7 @@ jobs:
name: E2E BlockSuite Cross Browser Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
@@ -282,52 +282,6 @@ jobs:
path: ./test-results
if-no-files-found: ignore
bundler-matrix:
name: Bundler Matrix (${{ matrix.bundler }})
runs-on: ubuntu-24.04-arm
strategy:
fail-fast: false
matrix:
bundler: [webpack, rspack]
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: false
electron-install: false
full-cache: true
- name: Run frontend build matrix
env:
AFFINE_BUNDLER: ${{ matrix.bundler }}
run: |
set -euo pipefail
packages=(
"@affine/web"
"@affine/mobile"
"@affine/ios"
"@affine/android"
"@affine/admin"
"@affine/electron-renderer"
)
summary="test-results-bundler-${AFFINE_BUNDLER}.txt"
: > "$summary"
for pkg in "${packages[@]}"; do
start=$(date +%s)
yarn affine "$pkg" build
end=$(date +%s)
echo "${pkg},$((end-start))" >> "$summary"
done
- name: Upload bundler timing
if: always()
uses: actions/upload-artifact@v4
with:
name: test-results-bundler-${{ matrix.bundler }}
path: ./test-results-bundler-${{ matrix.bundler }}.txt
if-no-files-found: ignore
e2e-test:
name: E2E Test
runs-on: ubuntu-24.04-arm
@@ -340,7 +294,7 @@ jobs:
matrix:
shard: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
@@ -372,7 +326,7 @@ jobs:
matrix:
shard: [1, 2]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
@@ -402,9 +356,9 @@ jobs:
strategy:
fail-fast: false
matrix:
shard: [1, 2, 3]
shard: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
@@ -437,7 +391,7 @@ jobs:
env:
CARGO_PROFILE_RELEASE_DEBUG: '1'
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
@@ -476,7 +430,7 @@ jobs:
- { os: macos-latest, target: aarch64-apple-darwin }
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
@@ -517,7 +471,7 @@ jobs:
- { os: windows-latest, target: aarch64-pc-windows-msvc }
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- uses: samypr100/setup-dev-drive@v3
with:
workspace-copy: true
@@ -557,7 +511,7 @@ jobs:
env:
CARGO_PROFILE_RELEASE_DEBUG: '1'
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
@@ -580,7 +534,7 @@ jobs:
name: Build @affine/electron renderer
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
@@ -607,7 +561,7 @@ jobs:
needs:
- build-native-linux
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
@@ -661,7 +615,7 @@ jobs:
ports:
- 9308:9308
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
@@ -742,7 +696,7 @@ jobs:
stack-version: 9.0.1
security-enabled: false
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
@@ -805,7 +759,7 @@ jobs:
ports:
- 9308:9308
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
@@ -846,7 +800,7 @@ jobs:
CARGO_TERM_COLOR: always
MIRIFLAGS: -Zmiri-backtrace=full -Zmiri-tree-borrows
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Rust
uses: dtolnay/rust-toolchain@stable
@@ -874,7 +828,7 @@ jobs:
RUST_BACKTRACE: full
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Rust
uses: dtolnay/rust-toolchain@stable
@@ -898,7 +852,7 @@ jobs:
env:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Rust
uses: dtolnay/rust-toolchain@stable
@@ -937,7 +891,7 @@ jobs:
env:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Rust
uses: ./.github/actions/build-rust
with:
@@ -960,7 +914,7 @@ jobs:
run-api: ${{ steps.decision.outputs.run_api }}
run-e2e: ${{ steps.decision.outputs.run_e2e }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- uses: dorny/paths-filter@v3
id: copilot-filter
@@ -1029,7 +983,7 @@ jobs:
ports:
- 9308:9308
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
@@ -1102,7 +1056,7 @@ jobs:
ports:
- 9308:9308
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
@@ -1185,7 +1139,7 @@ jobs:
ports:
- 9308:9308
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
@@ -1266,7 +1220,7 @@ jobs:
test: true,
}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
timeout-minutes: 10

View File

@@ -10,7 +10,7 @@ jobs:
env:
CARGO_PROFILE_RELEASE_DEBUG: '1'
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
@@ -64,7 +64,7 @@ jobs:
ports:
- 9308:9308
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
@@ -134,7 +134,7 @@ jobs:
ports:
- 9308:9308
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: ./.github/actions/setup-node
@@ -167,7 +167,7 @@ jobs:
runs-on: ubuntu-latest
name: Post test result message
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Setup Node.js

View File

@@ -18,9 +18,9 @@ jobs:
runs-on: ubuntu-latest
if: ${{ github.event.action != 'edited' || github.event.changes.title != null }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@v4
uses: actions/setup-node@v6
with:
cache: 'yarn'
node-version-file: '.nvmrc'

View File

@@ -35,7 +35,7 @@ jobs:
- build-images
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Deploy to ${{ inputs.build-type }}
uses: ./.github/actions/deploy
with:

View File

@@ -69,7 +69,7 @@ jobs:
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
SENTRY_RELEASE: ${{ inputs.app_version }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Version
uses: ./.github/actions/setup-version
@@ -101,7 +101,7 @@ jobs:
- name: Signing By Apple Developer ID
if: ${{ inputs.platform == 'darwin' && inputs.apple_codesign }}
uses: apple-actions/import-codesign-certs@v5
uses: apple-actions/import-codesign-certs@v6
with:
p12-file-base64: ${{ secrets.CERTIFICATES_P12 }}
p12-password: ${{ secrets.CERTIFICATES_P12_PASSWORD }}
@@ -178,14 +178,14 @@ jobs:
mv packages/frontend/apps/electron/out/*/make/deb/${{ inputs.arch }}/*.deb ./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-${{ inputs.arch }}.deb
mv packages/frontend/apps/electron/out/*/make/flatpak/*/*.flatpak ./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-${{ inputs.arch }}.flatpak
- uses: actions/attest-build-provenance@v2
- uses: actions/attest-build-provenance@v4
if: ${{ inputs.platform == 'darwin' }}
with:
subject-path: |
./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-macos-${{ inputs.arch }}.zip
./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-macos-${{ inputs.arch }}.dmg
- uses: actions/attest-build-provenance@v2
- uses: actions/attest-build-provenance@v4
if: ${{ inputs.platform == 'linux' }}
with:
subject-path: |

View File

@@ -48,7 +48,7 @@ jobs:
runs-on: ubuntu-latest
environment: ${{ inputs.build-type }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Version
uses: ./.github/actions/setup-version
with:
@@ -187,7 +187,7 @@ jobs:
FILES_TO_BE_SIGNED_x64: ${{ steps.get_files_to_be_signed.outputs.FILES_TO_BE_SIGNED_x64 }}
FILES_TO_BE_SIGNED_arm64: ${{ steps.get_files_to_be_signed.outputs.FILES_TO_BE_SIGNED_arm64 }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Version
uses: ./.github/actions/setup-version
with:
@@ -344,7 +344,7 @@ jobs:
mv packages/frontend/apps/electron/out/*/make/squirrel.windows/${{ matrix.spec.arch }}/*.exe ./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-${{ matrix.spec.arch }}.exe
mv packages/frontend/apps/electron/out/*/make/nsis.windows/${{ matrix.spec.arch }}/*.exe ./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-${{ matrix.spec.arch }}.nsis.exe
- uses: actions/attest-build-provenance@v2
- uses: actions/attest-build-provenance@v4
with:
subject-path: |
./builds/affine-${{ env.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-${{ matrix.spec.arch }}.zip
@@ -369,7 +369,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Download Artifacts (macos-x64)
uses: actions/download-artifact@v4
with:
@@ -395,7 +395,7 @@ jobs:
with:
name: affine-linux-x64-builds
path: ./release
- uses: actions/setup-node@v4
- uses: actions/setup-node@v6
with:
node-version: 20
- name: Copy Selfhost Release Files

View File

@@ -26,7 +26,7 @@ jobs:
runs-on: ubuntu-latest
environment: ${{ inputs.build-type }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Version
uses: ./.github/actions/setup-version
with:
@@ -54,7 +54,7 @@ jobs:
build-android-web:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Version
uses: ./.github/actions/setup-version
with:
@@ -83,7 +83,7 @@ jobs:
needs:
- build-ios-web
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Version
uses: ./.github/actions/setup-version
with:
@@ -114,7 +114,7 @@ jobs:
- name: Cap sync
run: yarn workspace @affine/ios sync
- name: Signing By Apple Developer ID
uses: apple-actions/import-codesign-certs@v5
uses: apple-actions/import-codesign-certs@v6
id: import-codesign-certs
with:
p12-file-base64: ${{ secrets.CERTIFICATES_P12_MOBILE }}
@@ -147,7 +147,7 @@ jobs:
needs:
- build-android-web
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Setup Version
uses: ./.github/actions/setup-version
with:
@@ -180,7 +180,7 @@ jobs:
no-build: 'true'
- name: Cap sync
run: yarn workspace @affine/android cap sync
- uses: actions/setup-python@v5
- uses: actions/setup-python@v6
with:
python-version: '3.13'
- name: Auth gcloud
@@ -192,7 +192,7 @@ jobs:
token_format: 'access_token'
project_id: '${{ secrets.GCP_PROJECT_ID }}'
access_token_scopes: 'https://www.googleapis.com/auth/androidpublisher'
- uses: actions/setup-java@v4
- uses: actions/setup-java@v5
with:
distribution: 'temurin'
java-version: '21'

View File

@@ -55,7 +55,7 @@ jobs:
GIT_SHORT_HASH: ${{ steps.prepare.outputs.GIT_SHORT_HASH }}
BUILD_TYPE: ${{ steps.prepare.outputs.BUILD_TYPE }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Prepare Release
id: prepare
uses: ./.github/actions/prepare-release
@@ -72,7 +72,7 @@ jobs:
steps:
- name: Decide whether to release
id: decide
uses: actions/github-script@v7
uses: actions/github-script@v8
with:
script: |
const buildType = '${{ needs.prepare.outputs.BUILD_TYPE }}'

1
.gitignore vendored
View File

@@ -48,6 +48,7 @@ testem.log
/typings
tsconfig.tsbuildinfo
.context
/*.md
# System Files
.DS_Store

2
.nvmrc
View File

@@ -1 +1 @@
22.22.0
22.22.1

327
Cargo.lock generated
View File

@@ -178,9 +178,15 @@ name = "affine_server_native"
version = "1.0.0"
dependencies = [
"affine_common",
"anyhow",
"chrono",
"file-format",
"image",
"infer",
"libwebp-sys",
"little_exif",
"llm_adapter",
"matroska",
"mimalloc",
"mp4parse",
"napi",
@@ -188,6 +194,8 @@ dependencies = [
"napi-derive",
"rand 0.9.2",
"rayon",
"serde",
"serde_json",
"sha3",
"tiktoken-rs",
"tokio",
@@ -232,6 +240,21 @@ dependencies = [
"memchr",
]
[[package]]
name = "alloc-no-stdlib"
version = "2.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3"
[[package]]
name = "alloc-stdlib"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece"
dependencies = [
"alloc-no-stdlib",
]
[[package]]
name = "allocator-api2"
version = "0.2.21"
@@ -245,7 +268,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed7572b7ba83a31e20d1b48970ee402d2e3e0537dcfe0a3ff4d6eb7508617d43"
dependencies = [
"alsa-sys",
"bitflags 2.10.0",
"bitflags 2.11.0",
"cfg-if",
"libc",
]
@@ -533,7 +556,7 @@ version = "0.72.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "993776b509cfb49c750f11b8f07a46fa23e0a1386ffc01fb1e7d343efc387895"
dependencies = [
"bitflags 2.10.0",
"bitflags 2.11.0",
"cexpr",
"clang-sys",
"itertools 0.13.0",
@@ -583,9 +606,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.10.0"
version = "2.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3"
checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af"
dependencies = [
"serde_core",
]
@@ -599,6 +622,15 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "bitstream-io"
version = "3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "680575de65ce8b916b82a447458b94a48776707d9c2681a9d8da351c06886a1f"
dependencies = [
"core2",
]
[[package]]
name = "block-buffer"
version = "0.10.4"
@@ -641,6 +673,27 @@ version = "0.9.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "473976d7a8620bb1e06dcdd184407c2363fe4fec8e983ee03ed9197222634a31"
[[package]]
name = "brotli"
version = "8.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560"
dependencies = [
"alloc-no-stdlib",
"alloc-stdlib",
"brotli-decompressor",
]
[[package]]
name = "brotli-decompressor"
version = "5.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03"
dependencies = [
"alloc-no-stdlib",
"alloc-stdlib",
]
[[package]]
name = "bstr"
version = "1.12.1"
@@ -676,6 +729,12 @@ version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "byteorder-lite"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f1fe948ff07f4bd06c30984e69f5b4899c516a3ef74f34df92a2df2ab535495"
[[package]]
name = "bytes"
version = "1.11.1"
@@ -904,6 +963,12 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d"
[[package]]
name = "color_quant"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b"
[[package]]
name = "colorchoice"
version = "1.0.4"
@@ -983,7 +1048,7 @@ version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa95a34622365fa5bbf40b20b75dba8dfa8c94c734aea8ac9a5ca38af14316f1"
dependencies = [
"bitflags 2.10.0",
"bitflags 2.11.0",
"core-foundation",
"core-graphics-types",
"foreign-types",
@@ -996,7 +1061,7 @@ version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "064badf302c3194842cf2c5d61f56cc88e54a759313879cdf03abdd27d0c3b97"
dependencies = [
"bitflags 2.10.0",
"bitflags 2.11.0",
"core-foundation",
"core-graphics-types",
"foreign-types",
@@ -1009,7 +1074,7 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d44a101f213f6c4cdc1853d4b78aef6db6bdfa3468798cc1d9912f4735013eb"
dependencies = [
"bitflags 2.10.0",
"bitflags 2.11.0",
"core-foundation",
"libc",
]
@@ -1379,7 +1444,7 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec"
dependencies = [
"bitflags 2.10.0",
"bitflags 2.11.0",
"block2",
"libc",
"objc2",
@@ -1562,6 +1627,15 @@ dependencies = [
"getrandom 0.2.16",
]
[[package]]
name = "fdeflate"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e6853b52649d4ac5c0bd02320cddc5ba956bdb407c4b75a2c6b75bf51500f8c"
dependencies = [
"simd-adler32",
]
[[package]]
name = "file-format"
version = "0.28.0"
@@ -1833,6 +1907,16 @@ dependencies = [
"wasip2",
]
[[package]]
name = "gif"
version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f5df2ba84018d80c213569363bdcd0c64e6933c67fe4c1d60ecf822971a3c35e"
dependencies = [
"color_quant",
"weezl",
]
[[package]]
name = "glob"
version = "0.3.3"
@@ -1880,7 +1964,7 @@ version = "1.41.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0c43e7c3212bd992c11b6b9796563388170950521ae8487f5cdf6f6e792f1c8"
dependencies = [
"bitflags 2.10.0",
"bitflags 2.11.0",
"proc-macro2",
"quote",
"syn 1.0.109",
@@ -2003,6 +2087,22 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "http"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a"
dependencies = [
"bytes",
"itoa",
]
[[package]]
name = "httparse"
version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87"
[[package]]
name = "iana-time-zone"
version = "0.1.64"
@@ -2175,6 +2275,34 @@ dependencies = [
"icu_properties",
]
[[package]]
name = "image"
version = "0.25.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6506c6c10786659413faa717ceebcb8f70731c0a60cbae39795fdf114519c1a"
dependencies = [
"bytemuck",
"byteorder-lite",
"color_quant",
"gif",
"image-webp",
"moxcms",
"num-traits",
"png",
"zune-core",
"zune-jpeg",
]
[[package]]
name = "image-webp"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "525e9ff3e1a4be2fbea1fdf0e98686a6d98b4d8f937e1bf7402245af1909e8c3"
dependencies = [
"byteorder-lite",
"quick-error 2.0.1",
]
[[package]]
name = "include-flate"
version = "0.3.1"
@@ -2376,9 +2504,9 @@ dependencies = [
[[package]]
name = "keccak"
version = "0.1.5"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654"
checksum = "cb26cec98cce3a3d96cbb7bced3c4b16e3d13f27ec56dbd62cbc8f39cfb9d653"
dependencies = [
"cpufeatures",
]
@@ -2490,7 +2618,7 @@ version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616"
dependencies = [
"bitflags 2.10.0",
"bitflags 2.11.0",
"libc",
"redox_syscall 0.7.0",
]
@@ -2506,6 +2634,17 @@ dependencies = [
"vcpkg",
]
[[package]]
name = "libwebp-sys"
version = "0.14.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "375ca3fbd6d89769361c5d505c9da676eb4128ee471b9fd763144d377a2d30e6"
dependencies = [
"cc",
"glob",
"pkg-config",
]
[[package]]
name = "linux-raw-sys"
version = "0.11.0"
@@ -2518,6 +2657,33 @@ version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77"
[[package]]
name = "little_exif"
version = "0.6.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21eeb58b22d31be8dc5c625004fcd4b9b385cd3c05df575f523bcca382c51122"
dependencies = [
"brotli",
"crc",
"log",
"miniz_oxide",
"paste",
"quick-xml",
]
[[package]]
name = "llm_adapter"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e98485dda5180cc89b993a001688bed93307be6bd8fedcde445b69bbca4f554d"
dependencies = [
"base64",
"serde",
"serde_json",
"thiserror 2.0.17",
"ureq",
]
[[package]]
name = "lock_api"
version = "0.4.14"
@@ -2555,7 +2721,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59fa2559e99ba0f26a12458aabc754432c805bbb8cba516c427825a997af1fb7"
dependencies = [
"aes",
"bitflags 2.10.0",
"bitflags 2.11.0",
"cbc",
"ecb",
"encoding_rs",
@@ -2642,6 +2808,16 @@ dependencies = [
"regex-automata",
]
[[package]]
name = "matroska"
version = "0.30.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fde85cd7fb5cf875c4a46fac0cbd6567d413bea2538cef6788e3a0e52a902b45"
dependencies = [
"bitstream-io",
"phf 0.11.3",
]
[[package]]
name = "md-5"
version = "0.10.6"
@@ -2711,6 +2887,16 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "moxcms"
version = "0.7.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac9557c559cd6fc9867e122e20d2cbefc9ca29d80d027a8e39310920ed2f0a97"
dependencies = [
"num-traits",
"pxfm",
]
[[package]]
name = "mp4parse"
version = "0.17.0"
@@ -2741,7 +2927,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "000f205daae6646003fdc38517be6232af2b150bad4b67bdaf4c5aadb119d738"
dependencies = [
"anyhow",
"bitflags 2.10.0",
"bitflags 2.11.0",
"chrono",
"ctor",
"futures",
@@ -2801,7 +2987,7 @@ version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2076a31b7010b17a38c01907c45b945e8f11495ee4dd588309718901b1f7a5b7"
dependencies = [
"bitflags 2.10.0",
"bitflags 2.11.0",
"jni-sys",
"log",
"ndk-sys",
@@ -2836,7 +3022,7 @@ version = "0.30.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6"
dependencies = [
"bitflags 2.10.0",
"bitflags 2.11.0",
"cfg-if",
"cfg_aliases",
"libc",
@@ -3000,7 +3186,7 @@ version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a180dd8642fa45cdb7dd721cd4c11b1cadd4929ce112ebd8b9f5803cc79d536"
dependencies = [
"bitflags 2.10.0",
"bitflags 2.11.0",
"dispatch2",
"objc2",
]
@@ -3017,7 +3203,7 @@ version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3e0adef53c21f888deb4fa59fc59f7eb17404926ee8a6f59f5df0fd7f9f3272"
dependencies = [
"bitflags 2.10.0",
"bitflags 2.11.0",
"block2",
"libc",
"objc2",
@@ -3123,6 +3309,12 @@ dependencies = [
"windows-link 0.2.1",
]
[[package]]
name = "paste"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
[[package]]
name = "path-ext"
version = "0.1.2"
@@ -3369,6 +3561,19 @@ dependencies = [
"plotters-backend",
]
[[package]]
name = "png"
version = "0.18.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60769b8b31b2a9f263dae2776c37b1b28ae246943cf719eb6946a1db05128a61"
dependencies = [
"bitflags 2.11.0",
"crc32fast",
"fdeflate",
"flate2",
"miniz_oxide",
]
[[package]]
name = "pom"
version = "1.1.0"
@@ -3469,7 +3674,7 @@ checksum = "bee689443a2bd0a16ab0348b52ee43e3b2d1b1f931c8aa5c9f8de4c86fbe8c40"
dependencies = [
"bit-set 0.8.0",
"bit-vec 0.8.0",
"bitflags 2.10.0",
"bitflags 2.11.0",
"num-traits",
"rand 0.9.2",
"rand_chacha 0.9.0",
@@ -3497,7 +3702,7 @@ version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e8bbe1a966bd2f362681a44f6edce3c2310ac21e4d5067a6e7ec396297a6ea0"
dependencies = [
"bitflags 2.10.0",
"bitflags 2.11.0",
"getopts",
"memchr",
"pulldown-cmark-escape",
@@ -3510,12 +3715,33 @@ version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "007d8adb5ddab6f8e3f491ac63566a7d5002cc7ed73901f72057943fa71ae1ae"
[[package]]
name = "pxfm"
version = "0.1.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5a041e753da8b807c9255f28de81879c78c876392ff2469cde94799b2896b9d"
[[package]]
name = "quick-error"
version = "1.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
[[package]]
name = "quick-error"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3"
[[package]]
name = "quick-xml"
version = "0.37.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "331e97a1af0bf59823e6eadffe373d7b27f485be8748f71471c662c1f269b7fb"
dependencies = [
"memchr",
]
[[package]]
name = "quote"
version = "1.0.43"
@@ -3663,7 +3889,7 @@ version = "0.5.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d"
dependencies = [
"bitflags 2.10.0",
"bitflags 2.11.0",
]
[[package]]
@@ -3672,7 +3898,7 @@ version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49f3fe0889e69e2ae9e41f4d6c4c0181701d00e4697b356fb1f74173a5e0ee27"
dependencies = [
"bitflags 2.10.0",
"bitflags 2.11.0",
]
[[package]]
@@ -3831,7 +4057,7 @@ version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34"
dependencies = [
"bitflags 2.10.0",
"bitflags 2.11.0",
"errno",
"libc",
"linux-raw-sys",
@@ -3844,6 +4070,7 @@ version = "0.23.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b"
dependencies = [
"log",
"once_cell",
"ring",
"rustls-pki-types",
@@ -3885,7 +4112,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc6bf79ff24e648f6da1f8d1f011e9cac26491b619e6b9280f2b47f1774e6ee2"
dependencies = [
"fnv",
"quick-error",
"quick-error 1.2.3",
"tempfile",
"wait-timeout",
]
@@ -4269,7 +4496,7 @@ checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526"
dependencies = [
"atoi",
"base64",
"bitflags 2.10.0",
"bitflags 2.11.0",
"byteorder",
"bytes",
"chrono",
@@ -4312,7 +4539,7 @@ checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46"
dependencies = [
"atoi",
"base64",
"bitflags 2.10.0",
"bitflags 2.11.0",
"byteorder",
"chrono",
"crc",
@@ -4912,9 +5139,9 @@ dependencies = [
[[package]]
name = "toml_parser"
version = "1.0.6+spec-1.1.0"
version = "1.0.9+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44"
checksum = "702d4415e08923e7e1ef96cd5727c0dfed80b4d2fa25db9647fe5eb6f7c5a4c4"
dependencies = [
"winnow",
]
@@ -5345,6 +5572,35 @@ version = "0.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d49784317cd0d1ee7ec5c716dd598ec5b4483ea832a2dced265471cc0f690ae"
[[package]]
name = "ureq"
version = "3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fdc97a28575b85cfedf2a7e7d3cc64b3e11bd8ac766666318003abbacc7a21fc"
dependencies = [
"base64",
"flate2",
"log",
"percent-encoding",
"rustls",
"rustls-pki-types",
"ureq-proto",
"utf-8",
"webpki-roots 1.0.5",
]
[[package]]
name = "ureq-proto"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d81f9efa9df032be5934a46a068815a10a042b494b6a58cb0a1a97bb5467ed6f"
dependencies = [
"base64",
"http",
"httparse",
"log",
]
[[package]]
name = "url"
version = "2.5.8"
@@ -6310,3 +6566,18 @@ dependencies = [
"cc",
"pkg-config",
]
[[package]]
name = "zune-core"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb8a0807f7c01457d0379ba880ba6322660448ddebc890ce29bb64da71fb40f9"
[[package]]
name = "zune-jpeg"
version = "0.5.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "410e9ecef634c709e3831c2cfdb8d9c32164fae1c67496d5b68fff728eec37fe"
dependencies = [
"zune-core",
]

View File

@@ -40,13 +40,24 @@ resolver = "3"
dotenvy = "0.15"
file-format = { version = "0.28", features = ["reader"] }
homedir = "0.3"
image = { version = "0.25.9", default-features = false, features = [
"bmp",
"gif",
"jpeg",
"png",
"webp",
] }
infer = { version = "0.19.0" }
lasso = { version = "0.7", features = ["multi-threaded"] }
lib0 = { version = "0.16", features = ["lib0-serde"] }
libc = "0.2"
libwebp-sys = "0.14.2"
little_exif = "0.6.23"
llm_adapter = { version = "0.1.3", default-features = false }
log = "0.4"
loom = { version = "0.7", features = ["checkpoint"] }
lru = "0.16"
matroska = "0.30"
memory-indexer = "0.3.0"
mimalloc = "0.1"
mp4parse = "0.17"

View File

@@ -23,4 +23,6 @@ We welcome you to provide us with bug reports via and email at [security@toevery
Since we are an open source project, we also welcome you to provide corresponding fix PRs, we will determine specific rewards based on the evaluation results.
Due to limited resources, we do not accept and will not review any AI-generated security reports.
If the vulnerability is caused by a library we depend on, we encourage you to submit a security report to the corresponding dependent library at the same time to benefit more users.

View File

@@ -300,6 +300,6 @@
"devDependencies": {
"@vanilla-extract/vite-plugin": "^5.0.0",
"msw": "^2.12.4",
"vitest": "^3.2.4"
"vitest": "^4.0.18"
}
}

View File

@@ -11,7 +11,7 @@ export default defineConfig({
include: ['src/__tests__/**/*.unit.spec.ts'],
testTimeout: 1000,
coverage: {
provider: 'istanbul', // or 'c8'
provider: 'istanbul',
reporter: ['lcov'],
reportsDirectory: '../../../.coverage/blocksuite-affine',
},

View File

@@ -31,7 +31,8 @@
"zod": "^3.25.76"
},
"devDependencies": {
"vitest": "^3.2.4"
"@vitest/browser-playwright": "^4.0.18",
"vitest": "^4.0.18"
},
"exports": {
".": "./src/index.ts",

View File

@@ -108,7 +108,9 @@ export class BookmarkBlockComponent extends CaptionedBlockComponent<BookmarkBloc
}
open = () => {
window.open(this.link, '_blank');
const link = this.link;
if (!link) return;
window.open(link, '_blank', 'noopener,noreferrer');
};
refreshData = () => {

View File

@@ -1,3 +1,4 @@
import { playwright } from '@vitest/browser-playwright';
import { defineConfig } from 'vitest/config';
export default defineConfig({
@@ -8,10 +9,9 @@ export default defineConfig({
browser: {
enabled: true,
headless: true,
name: 'chromium',
provider: 'playwright',
instances: [{ browser: 'chromium' }],
provider: playwright(),
isolate: false,
providerOptions: {},
},
include: ['src/__tests__/**/*.unit.spec.ts'],
testTimeout: 500,

View File

@@ -45,8 +45,10 @@ export class AffineCodeUnit extends ShadowlessElement {
if (!codeBlock || !vElement) return plainContent;
const tokens = codeBlock.highlightTokens$.value;
if (tokens.length === 0) return plainContent;
const line = tokens[vElement.lineIndex];
if (!line) return plainContent;
// copy the tokens to avoid modifying the original tokens
const lineTokens = structuredClone(tokens[vElement.lineIndex]);
const lineTokens = structuredClone(line);
if (lineTokens.length === 0) return plainContent;
const startOffset = vElement.startOffset;

View File

@@ -35,7 +35,7 @@
"zod": "^3.25.76"
},
"devDependencies": {
"vitest": "^3.2.4"
"vitest": "^4.0.18"
},
"exports": {
".": "./src/index.ts",

View File

@@ -35,7 +35,7 @@
"zod": "^3.25.76"
},
"devDependencies": {
"vitest": "^3.2.4"
"vitest": "^4.0.18"
},
"exports": {
".": "./src/index.ts",

View File

@@ -31,7 +31,7 @@
"zod": "^3.25.76"
},
"devDependencies": {
"vitest": "^3.2.4"
"vitest": "^4.0.18"
},
"exports": {
".": "./src/index.ts",

View File

@@ -221,6 +221,12 @@ export class EdgelessNoteBlockComponent extends toGfxBlockComponent(
}
}
override getCSSScaleVal(): number {
const baseScale = super.getCSSScaleVal();
const extraScale = this.model.props.edgeless?.scale ?? 1;
return baseScale * extraScale;
}
override getRenderingRect() {
const { xywh, edgeless } = this.model.props;
const { collapse, scale = 1 } = edgeless;
@@ -255,7 +261,6 @@ export class EdgelessNoteBlockComponent extends toGfxBlockComponent(
const style = {
borderRadius: borderRadius + 'px',
transform: `scale(${scale})`,
};
const extra = this._editing ? ACTIVE_NOTE_EXTRA_PADDING : 0;
@@ -454,6 +459,28 @@ export const EdgelessNoteInteraction =
return;
}
let isClickOnTitle = false;
const titleRect = view
.querySelector('edgeless-page-block-title')
?.getBoundingClientRect();
if (titleRect) {
const titleBound = new Bound(
titleRect.x,
titleRect.y,
titleRect.width,
titleRect.height
);
if (titleBound.isPointInBound([e.clientX, e.clientY])) {
isClickOnTitle = true;
}
}
if (isClickOnTitle) {
handleNativeRangeAtPoint(e.clientX, e.clientY);
return;
}
if (model.children.length === 0) {
const blockId = std.store.addBlock(
'affine:paragraph',

View File

@@ -22,6 +22,7 @@ import {
FrameBlockModel,
ImageBlockModel,
isExternalEmbedModel,
MindmapElementModel,
NoteBlockModel,
ParagraphBlockModel,
} from '@blocksuite/affine-model';
@@ -401,7 +402,17 @@ function reorderElements(
) {
if (!models.length) return;
for (const model of models) {
const normalizedModels = Array.from(
new Map(
models.map(model => {
const reorderTarget =
model.group instanceof MindmapElementModel ? model.group : model;
return [reorderTarget.id, reorderTarget];
})
).values()
);
for (const model of normalizedModels) {
const index = ctx.gfx.layer.getReorderedIndex(model, type);
// block should be updated in transaction

View File

@@ -33,7 +33,7 @@
"zod": "^3.25.76"
},
"devDependencies": {
"vitest": "^3.2.4"
"vitest": "^4.0.18"
},
"exports": {
".": "./src/index.ts",

View File

@@ -2,16 +2,24 @@ import { type Color, ColorScheme } from '@blocksuite/affine-model';
import { FeatureFlagService } from '@blocksuite/affine-shared/services';
import { requestConnectedFrame } from '@blocksuite/affine-shared/utils';
import { DisposableGroup } from '@blocksuite/global/disposable';
import type { IBound } from '@blocksuite/global/gfx';
import { getBoundWithRotation, intersects } from '@blocksuite/global/gfx';
import {
Bound,
getBoundWithRotation,
type IBound,
intersects,
} from '@blocksuite/global/gfx';
import type { BlockStdScope } from '@blocksuite/std';
import type {
GfxCompatibleInterface,
GfxController,
GfxLocalElementModel,
GridManager,
LayerManager,
SurfaceBlockModel,
Viewport,
} from '@blocksuite/std/gfx';
import { GfxControllerIdentifier } from '@blocksuite/std/gfx';
import { effect } from '@preact/signals-core';
import last from 'lodash-es/last';
import { Subject } from 'rxjs';
@@ -40,11 +48,82 @@ type RendererOptions = {
surfaceModel: SurfaceBlockModel;
};
export type CanvasRenderPassMetrics = {
overlayCount: number;
placeholderElementCount: number;
renderByBoundCallCount: number;
renderedElementCount: number;
visibleElementCount: number;
};
export type CanvasMemorySnapshot = {
bytes: number;
datasetLayerId: string | null;
height: number;
kind: 'main' | 'stacking';
width: number;
zIndex: string;
};
export type CanvasRendererDebugMetrics = {
canvasLayerCount: number;
canvasMemoryBytes: number;
canvasMemorySnapshots: CanvasMemorySnapshot[];
canvasMemoryMegabytes: number;
canvasPixelCount: number;
coalescedRefreshCount: number;
dirtyLayerRenderCount: number;
fallbackElementCount: number;
lastRenderDurationMs: number;
lastRenderMetrics: CanvasRenderPassMetrics;
maxRenderDurationMs: number;
pooledStackingCanvasCount: number;
refreshCount: number;
renderCount: number;
stackingCanvasCount: number;
totalLayerCount: number;
totalRenderDurationMs: number;
visibleStackingCanvasCount: number;
};
type MutableCanvasRendererDebugMetrics = Omit<
CanvasRendererDebugMetrics,
| 'canvasLayerCount'
| 'canvasMemoryBytes'
| 'canvasMemoryMegabytes'
| 'canvasPixelCount'
| 'canvasMemorySnapshots'
| 'pooledStackingCanvasCount'
| 'stackingCanvasCount'
| 'totalLayerCount'
| 'visibleStackingCanvasCount'
>;
type RenderPassStats = CanvasRenderPassMetrics;
type StackingCanvasState = {
bound: Bound | null;
layerId: string | null;
};
type RefreshTarget =
| { type: 'all' }
| { type: 'main' }
| { type: 'element'; element: SurfaceElementModel | GfxLocalElementModel }
| {
type: 'elements';
elements: Array<SurfaceElementModel | GfxLocalElementModel>;
};
const STACKING_CANVAS_PADDING = 32;
export class CanvasRenderer {
private _container!: HTMLElement;
private readonly _disposables = new DisposableGroup();
private readonly _gfx: GfxController;
private readonly _turboEnabled: () => boolean;
private readonly _overlays = new Set<Overlay>();
@@ -53,6 +132,37 @@ export class CanvasRenderer {
private _stackingCanvas: HTMLCanvasElement[] = [];
private readonly _stackingCanvasPool: HTMLCanvasElement[] = [];
private readonly _stackingCanvasState = new WeakMap<
HTMLCanvasElement,
StackingCanvasState
>();
private readonly _dirtyStackingCanvasIndexes = new Set<number>();
private _mainCanvasDirty = true;
private _needsFullRender = true;
private _debugMetrics: MutableCanvasRendererDebugMetrics = {
refreshCount: 0,
coalescedRefreshCount: 0,
renderCount: 0,
totalRenderDurationMs: 0,
lastRenderDurationMs: 0,
maxRenderDurationMs: 0,
lastRenderMetrics: {
renderByBoundCallCount: 0,
visibleElementCount: 0,
renderedElementCount: 0,
placeholderElementCount: 0,
overlayCount: 0,
},
dirtyLayerRenderCount: 0,
fallbackElementCount: 0,
};
canvas: HTMLCanvasElement;
ctx: CanvasRenderingContext2D;
@@ -89,6 +199,7 @@ export class CanvasRenderer {
this.layerManager = options.layerManager;
this.grid = options.gridManager;
this.provider = options.provider ?? {};
this._gfx = this.std.get(GfxControllerIdentifier);
this._turboEnabled = () => {
const featureFlagService = options.std.get(FeatureFlagService);
@@ -132,15 +243,199 @@ export class CanvasRenderer {
};
}
private _applyStackingCanvasLayout(
canvas: HTMLCanvasElement,
bound: Bound | null,
dpr = window.devicePixelRatio
) {
const state =
this._stackingCanvasState.get(canvas) ??
({
bound: null,
layerId: canvas.dataset.layerId ?? null,
} satisfies StackingCanvasState);
if (!bound || bound.w <= 0 || bound.h <= 0) {
canvas.style.display = 'none';
canvas.style.left = '0px';
canvas.style.top = '0px';
canvas.style.width = '0px';
canvas.style.height = '0px';
canvas.style.transform = '';
canvas.width = 0;
canvas.height = 0;
state.bound = null;
state.layerId = canvas.dataset.layerId ?? null;
this._stackingCanvasState.set(canvas, state);
return;
}
const { viewportBounds, zoom, viewScale } = this.viewport;
const width = bound.w * zoom;
const height = bound.h * zoom;
const left = (bound.x - viewportBounds.x) * zoom;
const top = (bound.y - viewportBounds.y) * zoom;
const actualWidth = Math.max(1, Math.ceil(width * dpr));
const actualHeight = Math.max(1, Math.ceil(height * dpr));
const transform = `translate(${left}px, ${top}px) scale(${1 / viewScale})`;
if (canvas.style.display !== 'block') {
canvas.style.display = 'block';
}
if (canvas.style.left !== '0px') {
canvas.style.left = '0px';
}
if (canvas.style.top !== '0px') {
canvas.style.top = '0px';
}
if (canvas.style.width !== `${width}px`) {
canvas.style.width = `${width}px`;
}
if (canvas.style.height !== `${height}px`) {
canvas.style.height = `${height}px`;
}
if (canvas.style.transform !== transform) {
canvas.style.transform = transform;
}
if (canvas.style.transformOrigin !== 'top left') {
canvas.style.transformOrigin = 'top left';
}
if (canvas.width !== actualWidth) {
canvas.width = actualWidth;
}
if (canvas.height !== actualHeight) {
canvas.height = actualHeight;
}
state.bound = bound;
state.layerId = canvas.dataset.layerId ?? null;
this._stackingCanvasState.set(canvas, state);
}
private _clampBoundToViewport(bound: Bound, viewportBounds: Bound) {
const minX = Math.max(bound.x, viewportBounds.x);
const minY = Math.max(bound.y, viewportBounds.y);
const maxX = Math.min(bound.maxX, viewportBounds.maxX);
const maxY = Math.min(bound.maxY, viewportBounds.maxY);
if (maxX <= minX || maxY <= minY) {
return null;
}
return new Bound(minX, minY, maxX - minX, maxY - minY);
}
private _createCanvasForLayer(
onCreated?: (canvas: HTMLCanvasElement) => void
) {
const reused = this._stackingCanvasPool.pop();
if (reused) {
return reused;
}
const created = document.createElement('canvas');
onCreated?.(created);
return created;
}
private _findLayerIndexByElement(
element: SurfaceElementModel | GfxLocalElementModel
) {
const canvasLayers = this.layerManager.getCanvasLayers();
const index = canvasLayers.findIndex(layer =>
layer.elements.some(layerElement => layerElement.id === element.id)
);
return index === -1 ? null : index;
}
private _getLayerRenderBound(
elements: SurfaceElementModel[],
viewportBounds: Bound
) {
let layerBound: Bound | null = null;
for (const element of elements) {
const display = (element.display ?? true) && !element.hidden;
if (!display) {
continue;
}
const elementBound = Bound.from(getBoundWithRotation(element));
if (!intersects(elementBound, viewportBounds)) {
continue;
}
layerBound = layerBound ? layerBound.unite(elementBound) : elementBound;
}
if (!layerBound) {
return null;
}
return this._clampBoundToViewport(
layerBound.expand(STACKING_CANVAS_PADDING),
viewportBounds
);
}
private _getResolvedStackingCanvasBound(
canvas: HTMLCanvasElement,
bound: Bound | null
) {
if (!bound || !this._gfx.tool.dragging$.peek()) {
return bound;
}
const previousBound = this._stackingCanvasState.get(canvas)?.bound;
return previousBound ? previousBound.unite(bound) : bound;
}
private _invalidate(target: RefreshTarget = { type: 'all' }) {
if (target.type === 'all') {
this._needsFullRender = true;
this._mainCanvasDirty = true;
this._dirtyStackingCanvasIndexes.clear();
return;
}
if (this._needsFullRender) {
return;
}
if (target.type === 'main') {
this._mainCanvasDirty = true;
return;
}
const elements =
target.type === 'element' ? [target.element] : target.elements;
for (const element of elements) {
const layerIndex = this._findLayerIndexByElement(element);
if (layerIndex === null || layerIndex >= this._stackingCanvas.length) {
this._mainCanvasDirty = true;
continue;
}
this._dirtyStackingCanvasIndexes.add(layerIndex);
}
}
private _resetPooledCanvas(canvas: HTMLCanvasElement) {
canvas.dataset.layerId = '';
this._applyStackingCanvasLayout(canvas, null);
}
private _initStackingCanvas(onCreated?: (canvas: HTMLCanvasElement) => void) {
const layer = this.layerManager;
const updateStackingCanvasSize = (canvases: HTMLCanvasElement[]) => {
this._stackingCanvas = canvases;
const sizeUpdater = this._canvasSizeUpdater();
canvases.filter(sizeUpdater.filter).forEach(sizeUpdater.update);
};
const updateStackingCanvas = () => {
/**
* we already have a main canvas, so the last layer should be skipped
@@ -159,11 +454,7 @@ export class CanvasRenderer {
const created = i < currentCanvases.length;
const canvas = created
? currentCanvases[i]
: document.createElement('canvas');
if (!created) {
onCreated?.(canvas);
}
: this._createCanvasForLayer(onCreated);
canvas.dataset.layerId = `[${layer.indexes[0]}--${layer.indexes[1]}]`;
canvas.style.zIndex = layer.zIndex.toString();
@@ -171,7 +462,6 @@ export class CanvasRenderer {
}
this._stackingCanvas = canvases;
updateStackingCanvasSize(canvases);
if (currentCanvases.length !== canvases.length) {
const diff = canvases.length - currentCanvases.length;
@@ -189,12 +479,16 @@ export class CanvasRenderer {
payload.added = canvases.slice(-diff);
} else {
payload.removed = currentCanvases.slice(diff);
payload.removed.forEach(canvas => {
this._resetPooledCanvas(canvas);
this._stackingCanvasPool.push(canvas);
});
}
this.stackingCanvasUpdated.next(payload);
}
this.refresh();
this.refresh({ type: 'all' });
};
this._disposables.add(
@@ -211,7 +505,7 @@ export class CanvasRenderer {
this._disposables.add(
this.viewport.viewportUpdated.subscribe(() => {
this.refresh();
this.refresh({ type: 'all' });
})
);
@@ -222,7 +516,6 @@ export class CanvasRenderer {
sizeUpdatedRafId = null;
this._resetSize();
this._render();
this.refresh();
}, this._container);
})
);
@@ -233,69 +526,212 @@ export class CanvasRenderer {
if (this.usePlaceholder !== shouldRenderPlaceholders) {
this.usePlaceholder = shouldRenderPlaceholders;
this.refresh();
this.refresh({ type: 'all' });
}
})
);
let wasDragging = false;
this._disposables.add(
effect(() => {
const isDragging = this._gfx.tool.dragging$.value;
if (wasDragging && !isDragging) {
this.refresh({ type: 'all' });
}
wasDragging = isDragging;
})
);
this.usePlaceholder = false;
}
private _createRenderPassStats(): RenderPassStats {
return {
renderByBoundCallCount: 0,
visibleElementCount: 0,
renderedElementCount: 0,
placeholderElementCount: 0,
overlayCount: 0,
};
}
private _getCanvasMemorySnapshots(): CanvasMemorySnapshot[] {
return [this.canvas, ...this._stackingCanvas].map((canvas, index) => {
return {
kind: index === 0 ? 'main' : 'stacking',
width: canvas.width,
height: canvas.height,
bytes: canvas.width * canvas.height * 4,
zIndex: canvas.style.zIndex,
datasetLayerId: canvas.dataset.layerId ?? null,
};
});
}
private _render() {
const renderStart = performance.now();
const { viewportBounds, zoom } = this.viewport;
const { ctx } = this;
const dpr = window.devicePixelRatio;
const scale = zoom * dpr;
const matrix = new DOMMatrix().scaleSelf(scale);
const renderStats = this._createRenderPassStats();
const fullRender = this._needsFullRender;
const stackingIndexesToRender = fullRender
? this._stackingCanvas.map((_, idx) => idx)
: [...this._dirtyStackingCanvasIndexes];
/**
* if a layer does not have a corresponding canvas
* its element will be add to this array and drawing on the
* main canvas
*/
let fallbackElement: SurfaceElementModel[] = [];
const allCanvasLayers = this.layerManager.getCanvasLayers();
const viewportBound = Bound.from(viewportBounds);
this.layerManager.getCanvasLayers().forEach((layer, idx) => {
if (!this._stackingCanvas[idx]) {
fallbackElement = fallbackElement.concat(layer.elements);
return;
for (const idx of stackingIndexesToRender) {
const layer = allCanvasLayers[idx];
const canvas = this._stackingCanvas[idx];
if (!layer || !canvas) {
continue;
}
const canvas = this._stackingCanvas[idx];
const ctx = canvas.getContext('2d') as CanvasRenderingContext2D;
const rc = new RoughCanvas(ctx.canvas);
const layerRenderBound = this._getLayerRenderBound(
layer.elements,
viewportBound
);
const resolvedLayerRenderBound = this._getResolvedStackingCanvasBound(
canvas,
layerRenderBound
);
ctx.clearRect(0, 0, canvas.width, canvas.height);
this._applyStackingCanvasLayout(canvas, resolvedLayerRenderBound);
if (
!resolvedLayerRenderBound ||
canvas.width === 0 ||
canvas.height === 0
) {
continue;
}
const layerCtx = canvas.getContext('2d') as CanvasRenderingContext2D;
const layerRc = new RoughCanvas(layerCtx.canvas);
layerCtx.clearRect(0, 0, canvas.width, canvas.height);
layerCtx.save();
layerCtx.setTransform(matrix);
this._renderByBound(
layerCtx,
matrix,
layerRc,
resolvedLayerRenderBound,
layer.elements,
false,
renderStats
);
}
if (fullRender || this._mainCanvasDirty) {
allCanvasLayers.forEach((layer, idx) => {
if (!this._stackingCanvas[idx]) {
fallbackElement = fallbackElement.concat(layer.elements);
}
});
ctx.clearRect(0, 0, this.canvas.width, this.canvas.height);
ctx.save();
ctx.setTransform(matrix);
this._renderByBound(ctx, matrix, rc, viewportBounds, layer.elements);
});
this._renderByBound(
ctx,
matrix,
new RoughCanvas(ctx.canvas),
viewportBounds,
fallbackElement,
true,
renderStats
);
}
ctx.clearRect(0, 0, this.canvas.width, this.canvas.height);
ctx.save();
ctx.setTransform(matrix);
this._renderByBound(
ctx,
matrix,
new RoughCanvas(ctx.canvas),
viewportBounds,
fallbackElement,
true
const canvasMemorySnapshots = this._getCanvasMemorySnapshots();
const canvasMemoryBytes = canvasMemorySnapshots.reduce(
(sum, snapshot) => sum + snapshot.bytes,
0
);
const layerTypes = this.layerManager.layers.map(layer => layer.type);
const renderDurationMs = performance.now() - renderStart;
this._debugMetrics.renderCount += 1;
this._debugMetrics.totalRenderDurationMs += renderDurationMs;
this._debugMetrics.lastRenderDurationMs = renderDurationMs;
this._debugMetrics.maxRenderDurationMs = Math.max(
this._debugMetrics.maxRenderDurationMs,
renderDurationMs
);
this._debugMetrics.lastRenderMetrics = renderStats;
this._debugMetrics.fallbackElementCount = fallbackElement.length;
this._debugMetrics.dirtyLayerRenderCount = stackingIndexesToRender.length;
this._lastDebugSnapshot = {
canvasMemorySnapshots,
canvasMemoryBytes,
canvasPixelCount: canvasMemorySnapshots.reduce(
(sum, snapshot) => sum + snapshot.width * snapshot.height,
0
),
stackingCanvasCount: this._stackingCanvas.length,
canvasLayerCount: layerTypes.filter(type => type === 'canvas').length,
totalLayerCount: layerTypes.length,
pooledStackingCanvasCount: this._stackingCanvasPool.length,
visibleStackingCanvasCount: this._stackingCanvas.filter(
canvas => canvas.width > 0 && canvas.height > 0
).length,
};
this._needsFullRender = false;
this._mainCanvasDirty = false;
this._dirtyStackingCanvasIndexes.clear();
}
private _lastDebugSnapshot: Pick<
CanvasRendererDebugMetrics,
| 'canvasMemoryBytes'
| 'canvasMemorySnapshots'
| 'canvasPixelCount'
| 'canvasLayerCount'
| 'pooledStackingCanvasCount'
| 'stackingCanvasCount'
| 'totalLayerCount'
| 'visibleStackingCanvasCount'
> = {
canvasMemoryBytes: 0,
canvasMemorySnapshots: [],
canvasPixelCount: 0,
canvasLayerCount: 0,
pooledStackingCanvasCount: 0,
stackingCanvasCount: 0,
totalLayerCount: 0,
visibleStackingCanvasCount: 0,
};
private _renderByBound(
ctx: CanvasRenderingContext2D | null,
matrix: DOMMatrix,
rc: RoughCanvas,
bound: IBound,
surfaceElements?: SurfaceElementModel[],
overLay: boolean = false
overLay: boolean = false,
renderStats?: RenderPassStats
) {
if (!ctx) return;
renderStats && (renderStats.renderByBoundCallCount += 1);
const elements =
surfaceElements ??
(this.grid.search(bound, {
@@ -305,10 +741,12 @@ export class CanvasRenderer {
for (const element of elements) {
const display = (element.display ?? true) && !element.hidden;
if (display && intersects(getBoundWithRotation(element), bound)) {
renderStats && (renderStats.visibleElementCount += 1);
if (
this.usePlaceholder &&
!(element as GfxCompatibleInterface).forceFullRender
) {
renderStats && (renderStats.placeholderElementCount += 1);
ctx.save();
ctx.fillStyle = 'rgba(200, 200, 200, 0.5)';
const drawX = element.x - bound.x;
@@ -316,6 +754,7 @@ export class CanvasRenderer {
ctx.fillRect(drawX, drawY, element.w, element.h);
ctx.restore();
} else {
renderStats && (renderStats.renderedElementCount += 1);
ctx.save();
const renderFn = this.std.getOptional<ElementRenderer>(
ElementRendererIdentifier(element.type)
@@ -333,6 +772,7 @@ export class CanvasRenderer {
}
if (overLay) {
renderStats && (renderStats.overlayCount += this._overlays.size);
for (const overlay of this._overlays) {
ctx.save();
ctx.translate(-bound.x, -bound.y);
@@ -348,33 +788,38 @@ export class CanvasRenderer {
const sizeUpdater = this._canvasSizeUpdater();
sizeUpdater.update(this.canvas);
this._stackingCanvas.forEach(sizeUpdater.update);
this.refresh();
this._invalidate({ type: 'all' });
}
private _watchSurface(surfaceModel: SurfaceBlockModel) {
this._disposables.add(
surfaceModel.elementAdded.subscribe(() => this.refresh())
surfaceModel.elementAdded.subscribe(() => this.refresh({ type: 'all' }))
);
this._disposables.add(
surfaceModel.elementRemoved.subscribe(() => this.refresh())
surfaceModel.elementRemoved.subscribe(() => this.refresh({ type: 'all' }))
);
this._disposables.add(
surfaceModel.localElementAdded.subscribe(() => this.refresh())
surfaceModel.localElementAdded.subscribe(() =>
this.refresh({ type: 'all' })
)
);
this._disposables.add(
surfaceModel.localElementDeleted.subscribe(() => this.refresh())
surfaceModel.localElementDeleted.subscribe(() =>
this.refresh({ type: 'all' })
)
);
this._disposables.add(
surfaceModel.localElementUpdated.subscribe(() => this.refresh())
surfaceModel.localElementUpdated.subscribe(({ model }) => {
this.refresh({ type: 'element', element: model });
})
);
this._disposables.add(
surfaceModel.elementUpdated.subscribe(payload => {
// ignore externalXYWH update cause it's updated by the renderer
if (payload.props['externalXYWH']) return;
this.refresh();
const element = surfaceModel.getElementById(payload.id);
this.refresh(element ? { type: 'element', element } : { type: 'all' });
})
);
}
@@ -382,7 +827,7 @@ export class CanvasRenderer {
addOverlay(overlay: Overlay) {
overlay.setRenderer(this);
this._overlays.add(overlay);
this.refresh();
this.refresh({ type: 'main' });
}
/**
@@ -394,7 +839,7 @@ export class CanvasRenderer {
container.append(this.canvas);
this._resetSize();
this.refresh();
this.refresh({ type: 'all' });
}
dispose(): void {
@@ -453,8 +898,46 @@ export class CanvasRenderer {
return this.provider.getPropertyValue?.(property) ?? '';
}
refresh() {
if (this._refreshRafId !== null) return;
getDebugMetrics(): CanvasRendererDebugMetrics {
return {
...this._debugMetrics,
...this._lastDebugSnapshot,
canvasMemoryMegabytes:
this._lastDebugSnapshot.canvasMemoryBytes / 1024 / 1024,
};
}
resetDebugMetrics() {
this._debugMetrics = {
refreshCount: 0,
coalescedRefreshCount: 0,
renderCount: 0,
totalRenderDurationMs: 0,
lastRenderDurationMs: 0,
maxRenderDurationMs: 0,
lastRenderMetrics: this._createRenderPassStats(),
dirtyLayerRenderCount: 0,
fallbackElementCount: 0,
};
this._lastDebugSnapshot = {
canvasMemoryBytes: 0,
canvasMemorySnapshots: [],
canvasPixelCount: 0,
canvasLayerCount: 0,
pooledStackingCanvasCount: 0,
stackingCanvasCount: 0,
totalLayerCount: 0,
visibleStackingCanvasCount: 0,
};
}
refresh(target: RefreshTarget = { type: 'all' }) {
this._debugMetrics.refreshCount += 1;
this._invalidate(target);
if (this._refreshRafId !== null) {
this._debugMetrics.coalescedRefreshCount += 1;
return;
}
this._refreshRafId = requestConnectedFrame(() => {
this._refreshRafId = null;
@@ -469,6 +952,6 @@ export class CanvasRenderer {
overlay.setRenderer(null);
this._overlays.delete(overlay);
this.refresh();
this.refresh({ type: 'main' });
}
}

View File

@@ -354,30 +354,37 @@ export class DomRenderer {
this._disposables.add(
surfaceModel.elementAdded.subscribe(payload => {
this._markElementDirty(payload.id, UpdateType.ELEMENT_ADDED);
this._markViewportDirty();
this.refresh();
})
);
this._disposables.add(
surfaceModel.elementRemoved.subscribe(payload => {
this._markElementDirty(payload.id, UpdateType.ELEMENT_REMOVED);
this._markViewportDirty();
this.refresh();
})
);
this._disposables.add(
surfaceModel.localElementAdded.subscribe(payload => {
this._markElementDirty(payload.id, UpdateType.ELEMENT_ADDED);
this._markViewportDirty();
this.refresh();
})
);
this._disposables.add(
surfaceModel.localElementDeleted.subscribe(payload => {
this._markElementDirty(payload.id, UpdateType.ELEMENT_REMOVED);
this._markViewportDirty();
this.refresh();
})
);
this._disposables.add(
surfaceModel.localElementUpdated.subscribe(payload => {
this._markElementDirty(payload.model.id, UpdateType.ELEMENT_UPDATED);
if (payload.props['index'] || payload.props['groupId']) {
this._markViewportDirty();
}
this.refresh();
})
);
@@ -387,6 +394,9 @@ export class DomRenderer {
// ignore externalXYWH update cause it's updated by the renderer
if (payload.props['externalXYWH']) return;
this._markElementDirty(payload.id, UpdateType.ELEMENT_UPDATED);
if (payload.props['index'] || payload.props['childIds']) {
this._markViewportDirty();
}
this.refresh();
})
);

View File

@@ -19,7 +19,7 @@
"@blocksuite/sync": "workspace:*",
"@floating-ui/dom": "^1.6.13",
"@lit/context": "^1.1.2",
"@lottiefiles/dotlottie-wc": "^0.5.0",
"@lottiefiles/dotlottie-wc": "^0.9.4",
"@preact/signals-core": "^1.8.0",
"@toeverything/theme": "^1.1.23",
"@types/hast": "^3.0.4",

View File

@@ -1,4 +1,8 @@
import { getHostName } from '@blocksuite/affine-shared/utils';
import {
getHostName,
isValidUrl,
normalizeUrl,
} from '@blocksuite/affine-shared/utils';
import { PropTypes, requiredProperties } from '@blocksuite/std';
import { css, LitElement } from 'lit';
import { property } from 'lit/decorators.js';
@@ -44,15 +48,27 @@ export class LinkPreview extends LitElement {
override render() {
const { url } = this;
const normalizedUrl = normalizeUrl(url);
const safeUrl =
normalizedUrl && isValidUrl(normalizedUrl) ? normalizedUrl : null;
const hostName = getHostName(safeUrl ?? url);
if (!safeUrl) {
return html`
<span class="affine-link-preview">
<span>${hostName}</span>
</span>
`;
}
return html`
<a
class="affine-link-preview"
rel="noopener noreferrer"
target="_blank"
href=${url}
href=${safeUrl}
>
<span>${getHostName(url)}</span>
<span>${hostName}</span>
</a>
`;
}

View File

@@ -32,7 +32,7 @@
"zod": "^3.25.76"
},
"devDependencies": {
"vitest": "^3.2.4"
"vitest": "^4.0.18"
},
"exports": {
".": "./src/index.ts",

View File

@@ -15,7 +15,7 @@
"zod": "^3.25.76"
},
"devDependencies": {
"vitest": "^3.2.4"
"vitest": "^4.0.18"
},
"exports": {
".": "./src/index.ts"

View File

@@ -8,7 +8,7 @@ export default defineConfig({
include: ['src/__tests__/**/*.unit.spec.ts'],
testTimeout: 500,
coverage: {
provider: 'istanbul', // or 'c8'
provider: 'istanbul',
reporter: ['lcov'],
reportsDirectory: '../../../.coverage/ext-loader',
},

View File

@@ -5,6 +5,8 @@ import {
import type { BrushElementModel } from '@blocksuite/affine-model';
import { DefaultTheme } from '@blocksuite/affine-model';
import { renderBrushLikeDom } from './shared';
export const BrushDomRendererExtension = DomElementRendererExtension(
'brush',
(
@@ -12,58 +14,11 @@ export const BrushDomRendererExtension = DomElementRendererExtension(
domElement: HTMLElement,
renderer: DomRenderer
) => {
const { zoom } = renderer.viewport;
const [, , w, h] = model.deserializedXYWH;
// Early return if invalid dimensions
if (w <= 0 || h <= 0) {
return;
}
// Early return if no commands
if (!model.commands) {
return;
}
// Clear previous content
domElement.innerHTML = '';
// Get color value
const color = renderer.getColorValue(model.color, DefaultTheme.black, true);
// Create SVG element
const svg = document.createElementNS('http://www.w3.org/2000/svg', 'svg');
svg.style.position = 'absolute';
svg.style.left = '0';
svg.style.top = '0';
svg.style.width = `${w * zoom}px`;
svg.style.height = `${h * zoom}px`;
svg.style.overflow = 'visible';
svg.style.pointerEvents = 'none';
svg.setAttribute('viewBox', `0 0 ${w} ${h}`);
// Apply rotation transform
if (model.rotate !== 0) {
svg.style.transform = `rotate(${model.rotate}deg)`;
svg.style.transformOrigin = 'center';
}
// Create path element for the brush stroke
const pathElement = document.createElementNS(
'http://www.w3.org/2000/svg',
'path'
);
pathElement.setAttribute('d', model.commands);
pathElement.setAttribute('fill', color);
pathElement.setAttribute('stroke', 'none');
svg.append(pathElement);
domElement.replaceChildren(svg);
// Set element size and position
domElement.style.width = `${w * zoom}px`;
domElement.style.height = `${h * zoom}px`;
domElement.style.overflow = 'visible';
domElement.style.pointerEvents = 'none';
renderBrushLikeDom({
model,
domElement,
renderer,
color: renderer.getColorValue(model.color, DefaultTheme.black, true),
});
}
);

View File

@@ -5,6 +5,8 @@ import {
import type { HighlighterElementModel } from '@blocksuite/affine-model';
import { DefaultTheme } from '@blocksuite/affine-model';
import { renderBrushLikeDom } from './shared';
export const HighlighterDomRendererExtension = DomElementRendererExtension(
'highlighter',
(
@@ -12,62 +14,15 @@ export const HighlighterDomRendererExtension = DomElementRendererExtension(
domElement: HTMLElement,
renderer: DomRenderer
) => {
const { zoom } = renderer.viewport;
const [, , w, h] = model.deserializedXYWH;
// Early return if invalid dimensions
if (w <= 0 || h <= 0) {
return;
}
// Early return if no commands
if (!model.commands) {
return;
}
// Clear previous content
domElement.innerHTML = '';
// Get color value
const color = renderer.getColorValue(
model.color,
DefaultTheme.hightlighterColor,
true
);
// Create SVG element
const svg = document.createElementNS('http://www.w3.org/2000/svg', 'svg');
svg.style.position = 'absolute';
svg.style.left = '0';
svg.style.top = '0';
svg.style.width = `${w * zoom}px`;
svg.style.height = `${h * zoom}px`;
svg.style.overflow = 'visible';
svg.style.pointerEvents = 'none';
svg.setAttribute('viewBox', `0 0 ${w} ${h}`);
// Apply rotation transform
if (model.rotate !== 0) {
svg.style.transform = `rotate(${model.rotate}deg)`;
svg.style.transformOrigin = 'center';
}
// Create path element for the highlighter stroke
const pathElement = document.createElementNS(
'http://www.w3.org/2000/svg',
'path'
);
pathElement.setAttribute('d', model.commands);
pathElement.setAttribute('fill', color);
pathElement.setAttribute('stroke', 'none');
svg.append(pathElement);
domElement.replaceChildren(svg);
// Set element size and position
domElement.style.width = `${w * zoom}px`;
domElement.style.height = `${h * zoom}px`;
domElement.style.overflow = 'visible';
domElement.style.pointerEvents = 'none';
renderBrushLikeDom({
model,
domElement,
renderer,
color: renderer.getColorValue(
model.color,
DefaultTheme.hightlighterColor,
true
),
});
}
);

View File

@@ -0,0 +1,82 @@
import type { DomRenderer } from '@blocksuite/affine-block-surface';
import type {
BrushElementModel,
HighlighterElementModel,
} from '@blocksuite/affine-model';
const SVG_NS = 'http://www.w3.org/2000/svg';
type BrushLikeModel = BrushElementModel | HighlighterElementModel;
type RetainedBrushDom = {
path: SVGPathElement;
svg: SVGSVGElement;
};
const retainedBrushDom = new WeakMap<HTMLElement, RetainedBrushDom>();
function clearBrushLikeDom(domElement: HTMLElement) {
retainedBrushDom.delete(domElement);
domElement.replaceChildren();
}
function getRetainedBrushDom(domElement: HTMLElement) {
const existing = retainedBrushDom.get(domElement);
if (existing) {
return existing;
}
const svg = document.createElementNS(SVG_NS, 'svg');
svg.style.position = 'absolute';
svg.style.left = '0';
svg.style.top = '0';
svg.style.overflow = 'visible';
svg.style.pointerEvents = 'none';
const path = document.createElementNS(SVG_NS, 'path');
path.setAttribute('stroke', 'none');
svg.append(path);
const retained = { svg, path };
retainedBrushDom.set(domElement, retained);
domElement.replaceChildren(svg);
return retained;
}
export function renderBrushLikeDom({
color,
domElement,
model,
renderer,
}: {
color: string;
domElement: HTMLElement;
model: BrushLikeModel;
renderer: DomRenderer;
}) {
const { zoom } = renderer.viewport;
const [, , w, h] = model.deserializedXYWH;
if (w <= 0 || h <= 0 || !model.commands) {
clearBrushLikeDom(domElement);
return;
}
const { path, svg } = getRetainedBrushDom(domElement);
svg.style.width = `${w * zoom}px`;
svg.style.height = `${h * zoom}px`;
svg.style.transform = model.rotate === 0 ? '' : `rotate(${model.rotate}deg)`;
svg.style.transformOrigin = model.rotate === 0 ? '' : 'center';
svg.setAttribute('viewBox', `0 0 ${w} ${h}`);
path.setAttribute('d', model.commands);
path.setAttribute('fill', color);
domElement.style.width = `${w * zoom}px`;
domElement.style.height = `${h * zoom}px`;
domElement.style.overflow = 'visible';
domElement.style.pointerEvents = 'none';
}

View File

@@ -14,6 +14,8 @@ import { PointLocation, SVGPathBuilder } from '@blocksuite/global/gfx';
import { isConnectorWithLabel } from '../connector-manager';
import { DEFAULT_ARROW_SIZE } from './utils';
const SVG_NS = 'http://www.w3.org/2000/svg';
interface PathBounds {
minX: number;
minY: number;
@@ -21,6 +23,15 @@ interface PathBounds {
maxY: number;
}
type RetainedConnectorDom = {
defs: SVGDefsElement;
label: HTMLDivElement | null;
path: SVGPathElement;
svg: SVGSVGElement;
};
const retainedConnectorDom = new WeakMap<HTMLElement, RetainedConnectorDom>();
function calculatePathBounds(path: PointLocation[]): PathBounds {
if (path.length === 0) {
return { minX: 0, minY: 0, maxX: 0, maxY: 0 };
@@ -81,10 +92,7 @@ function createArrowMarker(
strokeWidth: number,
isStart: boolean = false
): SVGMarkerElement {
const marker = document.createElementNS(
'http://www.w3.org/2000/svg',
'marker'
);
const marker = document.createElementNS(SVG_NS, 'marker');
const size = DEFAULT_ARROW_SIZE * (strokeWidth / 2);
marker.id = id;
@@ -98,10 +106,7 @@ function createArrowMarker(
switch (style) {
case 'Arrow': {
const path = document.createElementNS(
'http://www.w3.org/2000/svg',
'path'
);
const path = document.createElementNS(SVG_NS, 'path');
path.setAttribute(
'd',
isStart ? 'M 20 5 L 10 10 L 20 15 Z' : 'M 0 5 L 10 10 L 0 15 Z'
@@ -112,10 +117,7 @@ function createArrowMarker(
break;
}
case 'Triangle': {
const path = document.createElementNS(
'http://www.w3.org/2000/svg',
'path'
);
const path = document.createElementNS(SVG_NS, 'path');
path.setAttribute(
'd',
isStart ? 'M 20 7 L 12 10 L 20 13 Z' : 'M 0 7 L 8 10 L 0 13 Z'
@@ -126,10 +128,7 @@ function createArrowMarker(
break;
}
case 'Circle': {
const circle = document.createElementNS(
'http://www.w3.org/2000/svg',
'circle'
);
const circle = document.createElementNS(SVG_NS, 'circle');
circle.setAttribute('cx', '10');
circle.setAttribute('cy', '10');
circle.setAttribute('r', '4');
@@ -139,10 +138,7 @@ function createArrowMarker(
break;
}
case 'Diamond': {
const path = document.createElementNS(
'http://www.w3.org/2000/svg',
'path'
);
const path = document.createElementNS(SVG_NS, 'path');
path.setAttribute('d', 'M 10 6 L 14 10 L 10 14 L 6 10 Z');
path.setAttribute('fill', color);
path.setAttribute('stroke', color);
@@ -154,13 +150,64 @@ function createArrowMarker(
return marker;
}
function clearRetainedConnectorDom(element: HTMLElement) {
retainedConnectorDom.delete(element);
element.replaceChildren();
}
function getRetainedConnectorDom(element: HTMLElement): RetainedConnectorDom {
const existing = retainedConnectorDom.get(element);
if (existing) {
return existing;
}
const svg = document.createElementNS(SVG_NS, 'svg');
svg.style.position = 'absolute';
svg.style.overflow = 'visible';
svg.style.pointerEvents = 'none';
const defs = document.createElementNS(SVG_NS, 'defs');
const path = document.createElementNS(SVG_NS, 'path');
path.setAttribute('fill', 'none');
path.setAttribute('stroke-linecap', 'round');
path.setAttribute('stroke-linejoin', 'round');
svg.append(defs, path);
element.replaceChildren(svg);
const retained = {
svg,
defs,
path,
label: null,
};
retainedConnectorDom.set(element, retained);
return retained;
}
function getOrCreateLabelElement(retained: RetainedConnectorDom) {
if (retained.label) {
return retained.label;
}
const label = document.createElement('div');
retained.svg.insertAdjacentElement('afterend', label);
retained.label = label;
return label;
}
function renderConnectorLabel(
model: ConnectorElementModel,
container: HTMLElement,
retained: RetainedConnectorDom,
renderer: DomRenderer,
zoom: number
) {
if (!isConnectorWithLabel(model) || !model.labelXYWH) {
retained.label?.remove();
retained.label = null;
return;
}
@@ -176,8 +223,7 @@ function renderConnectorLabel(
},
} = model;
// Create label element
const labelElement = document.createElement('div');
const labelElement = getOrCreateLabelElement(retained);
labelElement.style.position = 'absolute';
labelElement.style.left = `${lx * zoom}px`;
labelElement.style.top = `${ly * zoom}px`;
@@ -210,11 +256,7 @@ function renderConnectorLabel(
labelElement.style.wordWrap = 'break-word';
// Add text content
if (model.text) {
labelElement.textContent = model.text.toString();
}
container.append(labelElement);
labelElement.textContent = model.text ? model.text.toString() : '';
}
/**
@@ -241,14 +283,13 @@ export const connectorBaseDomRenderer = (
stroke,
} = model;
// Clear previous content
element.innerHTML = '';
// Early return if no path points
if (!points || points.length < 2) {
clearRetainedConnectorDom(element);
return;
}
const retained = getRetainedConnectorDom(element);
// Calculate bounds for the SVG viewBox
const pathBounds = calculatePathBounds(points);
const padding = Math.max(strokeWidth * 2, 20); // Add padding for arrows
@@ -257,8 +298,7 @@ export const connectorBaseDomRenderer = (
const offsetX = pathBounds.minX - padding;
const offsetY = pathBounds.minY - padding;
// Create SVG element
const svg = document.createElementNS('http://www.w3.org/2000/svg', 'svg');
const { defs, path, svg } = retained;
svg.style.position = 'absolute';
svg.style.left = `${offsetX * zoom}px`;
svg.style.top = `${offsetY * zoom}px`;
@@ -268,49 +308,43 @@ export const connectorBaseDomRenderer = (
svg.style.pointerEvents = 'none';
svg.setAttribute('viewBox', `0 0 ${svgWidth / zoom} ${svgHeight / zoom}`);
// Create defs for markers
const defs = document.createElementNS('http://www.w3.org/2000/svg', 'defs');
svg.append(defs);
const strokeColor = renderer.getColorValue(
stroke,
DefaultTheme.connectorColor,
true
);
// Create markers for endpoints
const markers: SVGMarkerElement[] = [];
let startMarkerId = '';
let endMarkerId = '';
if (frontEndpointStyle !== 'None') {
startMarkerId = `start-marker-${model.id}`;
const startMarker = createArrowMarker(
startMarkerId,
frontEndpointStyle,
strokeColor,
strokeWidth,
true
markers.push(
createArrowMarker(
startMarkerId,
frontEndpointStyle,
strokeColor,
strokeWidth,
true
)
);
defs.append(startMarker);
}
if (rearEndpointStyle !== 'None') {
endMarkerId = `end-marker-${model.id}`;
const endMarker = createArrowMarker(
endMarkerId,
rearEndpointStyle,
strokeColor,
strokeWidth,
false
markers.push(
createArrowMarker(
endMarkerId,
rearEndpointStyle,
strokeColor,
strokeWidth,
false
)
);
defs.append(endMarker);
}
// Create path element
const pathElement = document.createElementNS(
'http://www.w3.org/2000/svg',
'path'
);
defs.replaceChildren(...markers);
// Adjust points relative to the SVG coordinate system
const adjustedPoints = points.map(point => {
@@ -334,29 +368,25 @@ export const connectorBaseDomRenderer = (
});
const pathData = createConnectorPath(adjustedPoints, mode);
pathElement.setAttribute('d', pathData);
pathElement.setAttribute('stroke', strokeColor);
pathElement.setAttribute('stroke-width', String(strokeWidth));
pathElement.setAttribute('fill', 'none');
pathElement.setAttribute('stroke-linecap', 'round');
pathElement.setAttribute('stroke-linejoin', 'round');
// Apply stroke style
path.setAttribute('d', pathData);
path.setAttribute('stroke', strokeColor);
path.setAttribute('stroke-width', String(strokeWidth));
if (strokeStyle === 'dash') {
pathElement.setAttribute('stroke-dasharray', '12,12');
path.setAttribute('stroke-dasharray', '12,12');
} else {
path.removeAttribute('stroke-dasharray');
}
// Apply markers
if (startMarkerId) {
pathElement.setAttribute('marker-start', `url(#${startMarkerId})`);
path.setAttribute('marker-start', `url(#${startMarkerId})`);
} else {
path.removeAttribute('marker-start');
}
if (endMarkerId) {
pathElement.setAttribute('marker-end', `url(#${endMarkerId})`);
path.setAttribute('marker-end', `url(#${endMarkerId})`);
} else {
path.removeAttribute('marker-end');
}
svg.append(pathElement);
element.append(svg);
// Set element size and position
element.style.width = `${model.w * zoom}px`;
element.style.height = `${model.h * zoom}px`;
@@ -370,7 +400,11 @@ export const connectorDomRenderer = (
renderer: DomRenderer
): void => {
connectorBaseDomRenderer(model, element, renderer);
renderConnectorLabel(model, element, renderer, renderer.viewport.zoom);
const retained = retainedConnectorDom.get(element);
if (!retained) return;
renderConnectorLabel(model, retained, renderer, renderer.viewport.zoom);
};
/**

View File

@@ -34,7 +34,7 @@
"zod": "^3.25.76"
},
"devDependencies": {
"vitest": "^3.2.4"
"vitest": "^4.0.18"
},
"exports": {
".": "./src/index.ts",

View File

@@ -32,7 +32,7 @@
"zod": "^3.25.76"
},
"devDependencies": {
"vitest": "^3.2.4"
"vitest": "^4.0.18"
},
"exports": {
".": "./src/index.ts",

View File

@@ -6,6 +6,37 @@ import { SVGShapeBuilder } from '@blocksuite/global/gfx';
import { manageClassNames, setStyles } from './utils';
const SVG_NS = 'http://www.w3.org/2000/svg';
type RetainedShapeDom = {
polygon: SVGPolygonElement | null;
svg: SVGSVGElement | null;
text: HTMLDivElement | null;
};
type RetainedShapeSvg = {
polygon: SVGPolygonElement;
svg: SVGSVGElement;
};
const retainedShapeDom = new WeakMap<HTMLElement, RetainedShapeDom>();
function getRetainedShapeDom(element: HTMLElement): RetainedShapeDom {
const existing = retainedShapeDom.get(element);
if (existing) {
return existing;
}
const retained = {
svg: null,
polygon: null,
text: null,
};
retainedShapeDom.set(element, retained);
return retained;
}
function applyShapeSpecificStyles(
model: ShapeElementModel,
element: HTMLElement,
@@ -14,10 +45,6 @@ function applyShapeSpecificStyles(
// Reset properties that might be set by different shape types
element.style.removeProperty('clip-path');
element.style.removeProperty('border-radius');
// Clear DOM for shapes that don't use SVG, or if type changes from SVG-based to non-SVG-based
if (model.shapeType !== 'diamond' && model.shapeType !== 'triangle') {
while (element.firstChild) element.firstChild.remove();
}
switch (model.shapeType) {
case 'rect': {
@@ -42,6 +69,54 @@ function applyShapeSpecificStyles(
// No 'else' needed to clear styles, as they are reset at the beginning of the function.
}
function getOrCreateSvg(
retained: RetainedShapeDom,
element: HTMLElement
): RetainedShapeSvg {
if (retained.svg && retained.polygon) {
return {
svg: retained.svg,
polygon: retained.polygon,
};
}
const svg = document.createElementNS(SVG_NS, 'svg');
svg.setAttribute('width', '100%');
svg.setAttribute('height', '100%');
svg.setAttribute('preserveAspectRatio', 'none');
const polygon = document.createElementNS(SVG_NS, 'polygon');
svg.append(polygon);
retained.svg = svg;
retained.polygon = polygon;
element.prepend(svg);
return { svg, polygon };
}
function removeSvg(retained: RetainedShapeDom) {
retained.svg?.remove();
retained.svg = null;
retained.polygon = null;
}
function getOrCreateText(retained: RetainedShapeDom, element: HTMLElement) {
if (retained.text) {
return retained.text;
}
const text = document.createElement('div');
retained.text = text;
element.append(text);
return text;
}
function removeText(retained: RetainedShapeDom) {
retained.text?.remove();
retained.text = null;
}
function applyBorderStyles(
model: ShapeElementModel,
element: HTMLElement,
@@ -99,8 +174,7 @@ export const shapeDomRenderer = (
const { zoom } = renderer.viewport;
const unscaledWidth = model.w;
const unscaledHeight = model.h;
const newChildren: Element[] = [];
const retained = getRetainedShapeDom(element);
const fillColor = renderer.getColorValue(
model.fillColor,
@@ -124,6 +198,7 @@ export const shapeDomRenderer = (
// For diamond and triangle, fill and border are handled by inline SVG
element.style.border = 'none'; // Ensure no standard CSS border interferes
element.style.backgroundColor = 'transparent'; // Host element is transparent
const { polygon, svg } = getOrCreateSvg(retained, element);
const strokeW = model.strokeWidth;
@@ -155,37 +230,30 @@ export const shapeDomRenderer = (
// Determine fill color
const finalFillColor = model.filled ? fillColor : 'transparent';
// Build SVG safely with DOM-API
const SVG_NS = 'http://www.w3.org/2000/svg';
const svg = document.createElementNS(SVG_NS, 'svg');
svg.setAttribute('width', '100%');
svg.setAttribute('height', '100%');
svg.setAttribute('viewBox', `0 0 ${unscaledWidth} ${unscaledHeight}`);
svg.setAttribute('preserveAspectRatio', 'none');
const polygon = document.createElementNS(SVG_NS, 'polygon');
polygon.setAttribute('points', svgPoints);
polygon.setAttribute('fill', finalFillColor);
polygon.setAttribute('stroke', finalStrokeColor);
polygon.setAttribute('stroke-width', String(strokeW));
if (finalStrokeDasharray !== 'none') {
polygon.setAttribute('stroke-dasharray', finalStrokeDasharray);
} else {
polygon.removeAttribute('stroke-dasharray');
}
svg.append(polygon);
newChildren.push(svg);
} else {
// Standard rendering for other shapes (e.g., rect, ellipse)
// innerHTML was already cleared by applyShapeSpecificStyles if necessary
removeSvg(retained);
element.style.backgroundColor = model.filled ? fillColor : 'transparent';
applyBorderStyles(model, element, strokeColor, zoom); // Uses standard CSS border
}
if (model.textDisplay && model.text) {
const str = model.text.toString();
const textElement = document.createElement('div');
const textElement = getOrCreateText(retained, element);
if (isRTL(str)) {
textElement.dir = 'rtl';
} else {
textElement.removeAttribute('dir');
}
textElement.style.position = 'absolute';
textElement.style.inset = '0';
@@ -210,12 +278,10 @@ export const shapeDomRenderer = (
true
);
textElement.textContent = str;
newChildren.push(textElement);
} else {
removeText(retained);
}
// Replace existing children to avoid memory leaks
element.replaceChildren(...newChildren);
applyTransformStyles(model, element);
manageClassNames(model, element);

View File

@@ -29,7 +29,7 @@
"zod": "^3.25.76"
},
"devDependencies": {
"vitest": "^3.2.4"
"vitest": "^4.0.18"
},
"exports": {
".": "./src/index.ts",

View File

@@ -34,7 +34,8 @@
"zod": "^3.25.76"
},
"devDependencies": {
"vitest": "^3.2.4"
"@vitest/browser-playwright": "^4.0.18",
"vitest": "^4.0.18"
},
"exports": {
".": "./src/index.ts",

View File

@@ -4,6 +4,7 @@ import type { FootNote } from '@blocksuite/affine-model';
import { CitationProvider } from '@blocksuite/affine-shared/services';
import { unsafeCSSVarV2 } from '@blocksuite/affine-shared/theme';
import type { AffineTextAttributes } from '@blocksuite/affine-shared/types';
import { isValidUrl, normalizeUrl } from '@blocksuite/affine-shared/utils';
import { WithDisposable } from '@blocksuite/global/lit';
import {
BlockSelection,
@@ -152,7 +153,9 @@ export class AffineFootnoteNode extends WithDisposable(ShadowlessElement) {
};
private readonly _handleUrlReference = (url: string) => {
window.open(url, '_blank');
const normalizedUrl = normalizeUrl(url);
if (!normalizedUrl || !isValidUrl(normalizedUrl)) return;
window.open(normalizedUrl, '_blank', 'noopener,noreferrer');
};
private readonly _updateFootnoteAttributes = (footnote: FootNote) => {

View File

@@ -1,3 +1,4 @@
import { playwright } from '@vitest/browser-playwright';
import { defineConfig } from 'vitest/config';
export default defineConfig({
@@ -8,10 +9,9 @@ export default defineConfig({
browser: {
enabled: true,
headless: true,
name: 'chromium',
provider: 'playwright',
instances: [{ browser: 'chromium' }],
provider: playwright(),
isolate: false,
providerOptions: {},
},
include: ['src/__tests__/**/*.unit.spec.ts'],
testTimeout: 500,

View File

@@ -177,6 +177,11 @@ export class ConnectorElementModel extends GfxPrimitiveElementModel<ConnectorEle
override getNearestPoint(point: IVec): IVec {
const { mode, absolutePath: path } = this;
if (path.length === 0) {
const { x, y } = this;
return [x, y];
}
if (mode === ConnectorMode.Straight) {
const first = path[0];
const last = path[path.length - 1];
@@ -213,6 +218,10 @@ export class ConnectorElementModel extends GfxPrimitiveElementModel<ConnectorEle
h = bounds.h;
}
if (path.length === 0) {
return 0.5;
}
point[0] = Vec.clamp(point[0], x, x + w);
point[1] = Vec.clamp(point[1], y, y + h);
@@ -258,6 +267,10 @@ export class ConnectorElementModel extends GfxPrimitiveElementModel<ConnectorEle
h = bounds.h;
}
if (path.length === 0) {
return [x + w / 2, y + h / 2];
}
if (mode === ConnectorMode.Orthogonal) {
const points = path.map<IVec>(p => [p[0], p[1]]);
const point = Polyline.pointAt(points, offsetDistance);
@@ -300,6 +313,10 @@ export class ConnectorElementModel extends GfxPrimitiveElementModel<ConnectorEle
const { mode, strokeWidth, absolutePath: path } = this;
if (path.length === 0) {
return false;
}
const point =
mode === ConnectorMode.Curve
? getBezierNearestPoint(getBezierParameters(path), currentPoint)

View File

@@ -74,7 +74,7 @@
],
"devDependencies": {
"@types/pdfmake": "^0.2.12",
"vitest": "^3.2.4"
"vitest": "^4.0.18"
},
"version": "0.26.3"
}

View File

@@ -0,0 +1,108 @@
import {
beforeEach,
describe,
expect,
it,
type MockInstance,
vi,
} from 'vitest';
import * as PointToRangeUtils from '../../utils/dom/point-to-range';
import { handleNativeRangeAtPoint } from '../../utils/dom/point-to-range';
describe('test handleNativeRangeAtPoint', () => {
let caretRangeFromPointSpy: MockInstance<
(clientX: number, clientY: number) => Range | null
>;
let resetNativeSelectionSpy: MockInstance<(range: Range | null) => void>;
beforeEach(() => {
caretRangeFromPointSpy = vi.spyOn(
PointToRangeUtils.api,
'caretRangeFromPoint'
);
resetNativeSelectionSpy = vi.spyOn(
PointToRangeUtils.api,
'resetNativeSelection'
);
});
it('does nothing if caretRangeFromPoint returns null', () => {
caretRangeFromPointSpy.mockReturnValue(null);
handleNativeRangeAtPoint(10, 10);
expect(resetNativeSelectionSpy).not.toHaveBeenCalled();
});
it('keeps range untouched if startContainer is a Text node', () => {
const div = document.createElement('div');
div.textContent = 'hello';
const text = div.firstChild!;
const range = document.createRange();
range.setStart(text, 2);
range.collapse(true);
caretRangeFromPointSpy.mockReturnValue(range);
handleNativeRangeAtPoint(10, 10);
expect(range.startContainer).toBe(text);
expect(range.startOffset).toBe(2);
expect(resetNativeSelectionSpy).toHaveBeenCalled();
});
it('moves caret into direct text child when clicking element', () => {
const div = document.createElement('div');
div.append('hello');
const range = document.createRange();
range.setStart(div, 1);
range.collapse(true);
caretRangeFromPointSpy.mockReturnValue(range);
handleNativeRangeAtPoint(10, 10);
expect(range.startContainer.nodeType).toBe(Node.TEXT_NODE);
expect(range.startContainer.textContent).toBe('hello');
expect(range.startOffset).toBe(5);
expect(resetNativeSelectionSpy).toHaveBeenCalled();
});
it('moves caret to last meaningful text inside nested element', () => {
const div = document.createElement('div');
div.innerHTML = `<span>a</span><span><em>b</em>c</span>`;
const range = document.createRange();
range.setStart(div, 2);
range.collapse(true);
caretRangeFromPointSpy.mockReturnValue(range);
handleNativeRangeAtPoint(10, 10);
expect(range.startContainer.nodeType).toBe(Node.TEXT_NODE);
expect(range.startContainer.textContent).toBe('c');
expect(range.startOffset).toBe(1);
expect(resetNativeSelectionSpy).toHaveBeenCalled();
});
it('falls back to searching startContainer when offset element has no text', () => {
const div = document.createElement('div');
div.innerHTML = `<span></span><span>ok</span>`;
const range = document.createRange();
range.setStart(div, 1);
range.collapse(true);
caretRangeFromPointSpy.mockReturnValue(range);
handleNativeRangeAtPoint(10, 10);
expect(range.startContainer.textContent).toBe('ok');
expect(range.startOffset).toBe(2);
expect(resetNativeSelectionSpy).toHaveBeenCalled();
});
});

View File

@@ -88,11 +88,73 @@ export function getCurrentNativeRange(selection = window.getSelection()) {
return selection.getRangeAt(0);
}
// functions need to be mocked in unit-test
export const api = {
caretRangeFromPoint,
resetNativeSelection,
};
export function handleNativeRangeAtPoint(x: number, y: number) {
const range = caretRangeFromPoint(x, y);
const range = api.caretRangeFromPoint(x, y);
if (range) {
normalizeCaretRange(range);
}
const startContainer = range?.startContainer;
// click on rich text
if (startContainer instanceof Node) {
resetNativeSelection(range);
api.resetNativeSelection(range);
}
}
function lastMeaningfulTextNode(node: Node) {
const walker = document.createTreeWalker(node, NodeFilter.SHOW_TEXT, {
acceptNode(node) {
return node.textContent && node.textContent?.trim().length > 0
? NodeFilter.FILTER_ACCEPT
: NodeFilter.FILTER_REJECT;
},
});
let last = null;
while (walker.nextNode()) {
last = walker.currentNode;
}
return last;
}
function normalizeCaretRange(range: Range) {
let { startContainer, startOffset } = range;
if (startContainer.nodeType === Node.TEXT_NODE) return;
// Try to find text in the element at `startOffset`
const offsetEl =
startOffset > 0
? startContainer.childNodes[startOffset - 1]
: startContainer.childNodes[0];
if (offsetEl) {
if (offsetEl.nodeType === Node.TEXT_NODE) {
range.setStart(
offsetEl,
startOffset > 0 ? (offsetEl.textContent?.length ?? 0) : 0
);
range.collapse(true);
return;
}
const text = lastMeaningfulTextNode(offsetEl);
if (text) {
range.setStart(text, text.textContent?.length ?? 0);
range.collapse(true);
return;
}
}
// Fallback, try to find text in startContainer
const text = lastMeaningfulTextNode(startContainer);
if (text) {
range.setStart(text, text.textContent?.length ?? 0);
range.collapse(true);
return;
}
}

View File

@@ -49,6 +49,9 @@ export async function printToPdf(
--affine-background-primary: #fff !important;
--affine-background-secondary: #fff !important;
--affine-background-tertiary: #fff !important;
--affine-background-code-block: #f5f5f5 !important;
--affine-quote-color: #e3e3e3 !important;
--affine-border-color: #e3e3e3 !important;
}
body, [data-theme='dark'] {
color: #000 !important;

View File

@@ -24,6 +24,11 @@ const toURL = (str: string) => {
}
};
const hasAllowedScheme = (url: URL) => {
const protocol = url.protocol.slice(0, -1).toLowerCase();
return ALLOWED_SCHEMES.has(protocol);
};
function resolveURL(str: string, baseUrl: string, padded = false) {
const url = toURL(str);
if (!url) return null;
@@ -61,6 +66,7 @@ export function normalizeUrl(str: string) {
// Formatted
if (url) {
if (!hasAllowedScheme(url)) return '';
if (!str.endsWith('/') && url.href.endsWith('/')) {
return url.href.substring(0, url.href.length - 1);
}

View File

@@ -9,7 +9,7 @@ export default defineConfig({
include: ['src/__tests__/**/*.unit.spec.ts'],
testTimeout: 1000,
coverage: {
provider: 'istanbul', // or 'c8'
provider: 'istanbul', // or 'istanbul'
reporter: ['lcov'],
reportsDirectory: '../../../.coverage/affine-shared',
},

View File

@@ -62,7 +62,7 @@
"zod": "^3.25.76"
},
"devDependencies": {
"vitest": "^3.2.4"
"vitest": "^4.0.18"
},
"version": "0.26.3"
}

View File

@@ -0,0 +1,22 @@
import { describe, expect, test } from 'vitest';
import { getBezierParameters } from '../gfx/curve.js';
import { PointLocation } from '../gfx/model/index.js';
describe('getBezierParameters', () => {
test('should handle empty path', () => {
expect(() => getBezierParameters([])).not.toThrow();
expect(getBezierParameters([])).toEqual([
new PointLocation(),
new PointLocation(),
new PointLocation(),
new PointLocation(),
]);
});
test('should handle single-point path', () => {
const point = new PointLocation([10, 20]);
expect(getBezierParameters([point])).toEqual([point, point, point, point]);
});
});

View File

@@ -142,6 +142,11 @@ export function getBezierNearestPoint(
export function getBezierParameters(
points: PointLocation[]
): BezierCurveParameters {
if (points.length === 0) {
const point = new PointLocation();
return [point, point, point, point];
}
// Fallback for degenerate Bezier curve (all points are at the same position)
if (points.length === 1) {
const point = points[0];

View File

@@ -5,7 +5,7 @@ export default defineConfig({
include: ['src/__tests__/**/*.unit.spec.ts'],
testTimeout: 500,
coverage: {
provider: 'istanbul', // or 'c8'
provider: 'istanbul',
reporter: ['lcov'],
reportsDirectory: '../../../.coverage/global',
},

View File

@@ -33,7 +33,8 @@
"zod": "^3.25.76"
},
"devDependencies": {
"vitest": "^3.2.4"
"@vitest/browser-playwright": "^4.0.18",
"vitest": "^4.0.18"
},
"exports": {
".": "./src/index.ts",

View File

@@ -596,7 +596,7 @@ export class LayerManager extends GfxExtension {
private _updateLayer(
element: GfxModel | GfxLocalElementModel,
props?: Record<string, unknown>,
oldValues?: Record<string, unknown>
_oldValues?: Record<string, unknown>
) {
const modelType = this._getModelType(element);
const isLocalElem = element instanceof GfxLocalElementModel;
@@ -613,16 +613,7 @@ export class LayerManager extends GfxExtension {
};
if (shouldUpdateGroupChildren) {
const group = element as GfxModel & GfxGroupCompatibleInterface;
const oldChildIds = childIdsChanged
? Array.isArray(oldValues?.['childIds'])
? (oldValues['childIds'] as string[])
: this._groupChildSnapshot.get(group.id)
: undefined;
const relatedElements = this._getRelatedGroupElements(group, oldChildIds);
this._refreshElementsInLayer(relatedElements);
this._syncGroupChildSnapshot(group);
this._reset();
return true;
}

View File

@@ -31,6 +31,13 @@ function updateTransform(element: GfxBlockComponent) {
element.style.transform = element.getCSSTransform();
}
function updateZIndex(element: GfxBlockComponent) {
const zIndex = element.toZIndex();
if (element.style.zIndex !== zIndex) {
element.style.zIndex = zIndex;
}
}
function updateBlockVisibility(view: GfxBlockComponent) {
if (view.transformState$.value === 'active') {
view.style.visibility = 'visible';
@@ -58,14 +65,22 @@ function handleGfxConnection(instance: GfxBlockComponent) {
instance.store.slots.blockUpdated.subscribe(({ type, id }) => {
if (id === instance.model.id && type === 'update') {
updateTransform(instance);
updateZIndex(instance);
}
})
);
instance.disposables.add(
instance.gfx.layer.slots.layerUpdated.subscribe(() => {
updateZIndex(instance);
})
);
instance.disposables.add(
effect(() => {
updateBlockVisibility(instance);
updateTransform(instance);
updateZIndex(instance);
})
);
}
@@ -105,17 +120,23 @@ export abstract class GfxBlockComponent<
onBoxSelected(_: BoxSelectionContext) {}
getCSSScaleVal(): number {
const viewport = this.gfx.viewport;
const { zoom, viewScale } = viewport;
return zoom / viewScale;
}
getCSSTransform() {
const viewport = this.gfx.viewport;
const { translateX, translateY, zoom } = viewport;
const { translateX, translateY, zoom, viewScale } = viewport;
const bound = Bound.deserialize(this.model.xywh);
const scaledX = bound.x * zoom;
const scaledY = bound.y * zoom;
const scaledX = (bound.x * zoom) / viewScale;
const scaledY = (bound.y * zoom) / viewScale;
const deltaX = scaledX - bound.x;
const deltaY = scaledY - bound.y;
return `translate(${translateX + deltaX}px, ${translateY + deltaY}px) scale(${zoom})`;
return `translate(${translateX / viewScale + deltaX}px, ${translateY / viewScale + deltaY}px) scale(${this.getCSSScaleVal()})`;
}
getRenderingRect() {
@@ -219,18 +240,12 @@ export function toGfxBlockComponent<
handleGfxConnection(this);
}
// eslint-disable-next-line sonarjs/no-identical-functions
getCSSScaleVal(): number {
return GfxBlockComponent.prototype.getCSSScaleVal.call(this);
}
getCSSTransform() {
const viewport = this.gfx.viewport;
const { translateX, translateY, zoom } = viewport;
const bound = Bound.deserialize(this.model.xywh);
const scaledX = bound.x * zoom;
const scaledY = bound.y * zoom;
const deltaX = scaledX - bound.x;
const deltaY = scaledY - bound.y;
return `translate(${translateX + deltaX}px, ${translateY + deltaY}px) scale(${zoom})`;
return GfxBlockComponent.prototype.getCSSTransform.call(this);
}
// eslint-disable-next-line sonarjs/no-identical-functions

View File

@@ -1,3 +1,4 @@
import { playwright } from '@vitest/browser-playwright';
import { defineConfig } from 'vitest/config';
export default defineConfig({
@@ -8,15 +9,14 @@ export default defineConfig({
browser: {
enabled: true,
headless: true,
name: 'chromium',
provider: 'playwright',
instances: [{ browser: 'chromium' }],
provider: playwright(),
isolate: false,
providerOptions: {},
},
include: ['src/__tests__/**/*.unit.spec.ts'],
testTimeout: 500,
coverage: {
provider: 'istanbul', // or 'c8'
provider: 'istanbul',
reporter: ['lcov'],
reportsDirectory: '../../../.coverage/std',
},

View File

@@ -29,7 +29,7 @@
"devDependencies": {
"@types/lodash.clonedeep": "^4.5.9",
"@types/lodash.merge": "^4.6.9",
"vitest": "^3.2.4"
"vitest": "^4.0.18"
},
"exports": {
".": "./src/index.ts",

View File

@@ -7,15 +7,11 @@ export * from './transformer';
export { type IdGenerator, nanoid, uuidv4 } from './utils/id-generator';
export * from './yjs';
const env = (
typeof globalThis !== 'undefined'
? globalThis
: typeof window !== 'undefined'
? window
: typeof global !== 'undefined'
? global
: {}
) as Record<string, boolean>;
const env = (typeof globalThis !== 'undefined'
? globalThis
: typeof window !== 'undefined'
? window
: {}) as unknown as Record<string, boolean>;
const importIdentifier = '__ $BLOCKSUITE_STORE$ __';
if (env[importIdentifier] === true) {

View File

@@ -8,7 +8,7 @@ export default defineConfig({
include: ['src/__tests__/**/*.unit.spec.ts'],
testTimeout: 500,
coverage: {
provider: 'istanbul', // or 'c8'
provider: 'istanbul',
reporter: ['lcov'],
reportsDirectory: '../../../.coverage/store',
},

View File

@@ -19,7 +19,7 @@
"y-protocols": "^1.0.6"
},
"devDependencies": {
"vitest": "^3.2.4"
"vitest": "^4.0.18"
},
"peerDependencies": {
"yjs": "*"

View File

@@ -5,7 +5,7 @@ export default defineConfig({
include: ['src/__tests__/**/*.unit.spec.ts'],
testTimeout: 500,
coverage: {
provider: 'istanbul', // or 'c8'
provider: 'istanbul',
reporter: ['lcov'],
reportsDirectory: '../../../.coverage/sync',
},

View File

@@ -6,7 +6,7 @@
"dev": "vite",
"build": "tsc",
"test:unit": "vitest --browser.headless --run",
"test:debug": "PWDEBUG=1 npx vitest"
"test:debug": "PWDEBUG=1 npx vitest --browser.headless=false"
},
"sideEffects": false,
"keywords": [],
@@ -17,7 +17,7 @@
"@blocksuite/icons": "^2.2.17",
"@floating-ui/dom": "^1.6.13",
"@lit/context": "^1.1.3",
"@lottiefiles/dotlottie-wc": "^0.5.0",
"@lottiefiles/dotlottie-wc": "^0.9.4",
"@preact/signals-core": "^1.8.0",
"@toeverything/theme": "^1.1.23",
"@vanilla-extract/css": "^1.17.0",
@@ -41,10 +41,11 @@
],
"devDependencies": {
"@vanilla-extract/vite-plugin": "^5.0.0",
"@vitest/browser-playwright": "^4.0.18",
"vite": "^7.2.7",
"vite-plugin-istanbul": "^7.2.1",
"vite-plugin-wasm": "^3.5.0",
"vitest": "^3.2.4"
"vitest": "^4.0.18"
},
"version": "0.26.3"
}

View File

@@ -6,6 +6,7 @@ import type {
import { ungroupCommand } from '@blocksuite/affine/gfx/group';
import type {
GroupElementModel,
MindmapElementModel,
NoteBlockModel,
} from '@blocksuite/affine/model';
import { generateKeyBetween } from '@blocksuite/affine/std/gfx';
@@ -253,6 +254,40 @@ test('blocks should rerender when their z-index changed', async () => {
assertBlocksContent();
});
test('block host z-index should update after reordering', async () => {
const backId = addNote(doc);
const frontId = addNote(doc);
await wait();
const getBlockHost = (id: string) =>
document.querySelector<HTMLElement>(
`affine-edgeless-root gfx-viewport > [data-block-id="${id}"]`
);
const backHost = getBlockHost(backId);
const frontHost = getBlockHost(frontId);
expect(backHost).not.toBeNull();
expect(frontHost).not.toBeNull();
expect(Number(backHost!.style.zIndex)).toBeLessThan(
Number(frontHost!.style.zIndex)
);
service.crud.updateElement(backId, {
index: service.layer.getReorderedIndex(
service.crud.getElementById(backId)!,
'front'
),
});
await wait();
expect(Number(backHost!.style.zIndex)).toBeGreaterThan(
Number(frontHost!.style.zIndex)
);
});
describe('layer reorder functionality', () => {
let ids: string[] = [];
@@ -428,14 +463,17 @@ describe('group related functionality', () => {
const elements = [
service.crud.addElement('shape', {
shapeType: 'rect',
xywh: '[0,0,100,100]',
})!,
addNote(doc),
service.crud.addElement('shape', {
shapeType: 'rect',
xywh: '[120,0,100,100]',
})!,
addNote(doc),
service.crud.addElement('shape', {
shapeType: 'rect',
xywh: '[240,0,100,100]',
})!,
];
@@ -528,6 +566,35 @@ describe('group related functionality', () => {
expect(service.layer.layers[1].elements[0]).toBe(group);
});
test("change mindmap index should update its nodes' layer", async () => {
const noteId = addNote(doc);
const mindmapId = service.crud.addElement('mindmap', {
children: {
text: 'root',
children: [{ text: 'child' }],
},
})!;
await wait();
const note = service.crud.getElementById(noteId)!;
const mindmap = service.crud.getElementById(
mindmapId
)! as MindmapElementModel;
const root = mindmap.tree.element;
expect(service.layer.getZIndex(root)).toBeGreaterThan(
service.layer.getZIndex(note)
);
mindmap.index = service.layer.getReorderedIndex(mindmap, 'back');
await wait();
expect(service.layer.getZIndex(root)).toBeLessThan(
service.layer.getZIndex(note)
);
});
test('should keep relative index order of elements after group, ungroup, undo, redo', () => {
const edgeless = getDocRootBlock(doc, editor, 'edgeless');
const elementIds = [
@@ -769,6 +836,7 @@ test('indexed canvas should be inserted into edgeless portal when switch to edge
service.crud.addElement('shape', {
shapeType: 'rect',
xywh: '[0,0,100,100]',
})!;
addNote(doc);
@@ -777,6 +845,7 @@ test('indexed canvas should be inserted into edgeless portal when switch to edge
service.crud.addElement('shape', {
shapeType: 'rect',
xywh: '[120,0,100,100]',
})!;
editor.mode = 'page';
@@ -792,10 +861,10 @@ test('indexed canvas should be inserted into edgeless portal when switch to edge
'.indexable-canvas'
)[0] as HTMLCanvasElement;
expect(indexedCanvas.width).toBe(
expect(indexedCanvas.width).toBeLessThanOrEqual(
(surface.renderer as CanvasRenderer).canvas.width
);
expect(indexedCanvas.height).toBe(
expect(indexedCanvas.height).toBeLessThanOrEqual(
(surface.renderer as CanvasRenderer).canvas.height
);
expect(indexedCanvas.width).not.toBe(0);

View File

@@ -1,4 +1,5 @@
import { vanillaExtractPlugin } from '@vanilla-extract/vite-plugin';
import { playwright } from '@vitest/browser-playwright';
import { defineConfig } from 'vitest/config';
export default defineConfig(_configEnv =>
@@ -18,13 +19,13 @@ export default defineConfig(_configEnv =>
retry: process.env.CI === 'true' ? 3 : 0,
browser: {
enabled: true,
headless: process.env.CI === 'true',
headless: true,
instances: [
{ browser: 'chromium' },
{ browser: 'firefox' },
{ browser: 'webkit' },
],
provider: 'playwright',
provider: playwright(),
isolate: false,
viewport: {
width: 1024,
@@ -32,16 +33,13 @@ export default defineConfig(_configEnv =>
},
},
coverage: {
provider: 'istanbul', // or 'c8'
provider: 'istanbul',
reporter: ['lcov'],
reportsDirectory: '../../.coverage/integration-test',
},
deps: {
interopDefault: true,
},
testTransformMode: {
web: ['src/__tests__/**/*.spec.ts'],
},
},
})
);

View File

@@ -22,7 +22,7 @@
"af": "r affine.ts",
"dev": "yarn affine dev",
"build": "yarn affine build",
"lint:eslint": "cross-env NODE_OPTIONS=\"--max-old-space-size=8192\" eslint --report-unused-disable-directives-severity=off . --cache",
"lint:eslint": "cross-env NODE_OPTIONS=\"--max-old-space-size=16384\" eslint --report-unused-disable-directives-severity=off . --cache",
"lint:eslint:fix": "yarn lint:eslint --fix --fix-type problem,suggestion,layout",
"lint:prettier": "prettier --ignore-unknown --cache --check .",
"lint:prettier:fix": "prettier --ignore-unknown --cache --write .",
@@ -56,7 +56,7 @@
"@faker-js/faker": "^10.1.0",
"@istanbuljs/schema": "^0.1.3",
"@magic-works/i18n-codegen": "^0.6.1",
"@playwright/test": "=1.52.0",
"@playwright/test": "=1.58.2",
"@smarttools/eslint-plugin-rxjs": "^1.0.8",
"@taplo/cli": "^0.7.0",
"@toeverything/infra": "workspace:*",
@@ -64,9 +64,9 @@
"@types/node": "^22.0.0",
"@typescript-eslint/parser": "^8.55.0",
"@vanilla-extract/vite-plugin": "^5.0.0",
"@vitest/browser": "^3.2.4",
"@vitest/coverage-istanbul": "^3.2.4",
"@vitest/ui": "^3.2.4",
"@vitest/browser": "^4.0.18",
"@vitest/coverage-istanbul": "^4.0.18",
"@vitest/ui": "^4.0.18",
"cross-env": "^10.1.0",
"electron": "^39.0.0",
"eslint": "^9.39.2",
@@ -90,7 +90,7 @@
"typescript-eslint": "^8.55.0",
"unplugin-swc": "^1.5.9",
"vite": "^7.2.7",
"vitest": "^3.2.4"
"vitest": "^4.0.18"
},
"packageManager": "yarn@4.12.0",
"resolutions": {

View File

@@ -14,13 +14,23 @@ affine_common = { workspace = true, features = [
"napi",
"ydoc-loader",
] }
anyhow = { workspace = true }
chrono = { workspace = true }
file-format = { workspace = true }
image = { workspace = true }
infer = { workspace = true }
libwebp-sys = { workspace = true }
little_exif = { workspace = true }
llm_adapter = { workspace = true, default-features = false, features = [
"ureq-client",
] }
matroska = { workspace = true }
mp4parse = { workspace = true }
napi = { workspace = true, features = ["async"] }
napi-derive = { workspace = true }
rand = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
sha3 = { workspace = true }
tiktoken-rs = { workspace = true }
v_htmlescape = { workspace = true }

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -1,5 +1,9 @@
/* auto-generated by NAPI-RS */
/* eslint-disable */
export declare class LlmStreamHandle {
abort(): void
}
export declare class Tokenizer {
count(content: string, allowedSpecial?: Array<string> | undefined | null): number
}
@@ -46,6 +50,16 @@ export declare function getMime(input: Uint8Array): string
export declare function htmlSanitize(input: string): string
export declare function llmDispatch(protocol: string, backendConfigJson: string, requestJson: string): string
export declare function llmDispatchStream(protocol: string, backendConfigJson: string, requestJson: string, callback: ((err: Error | null, arg: string) => void)): LlmStreamHandle
export declare function llmEmbeddingDispatch(protocol: string, backendConfigJson: string, requestJson: string): string
export declare function llmRerankDispatch(protocol: string, backendConfigJson: string, requestJson: string): string
export declare function llmStructuredDispatch(protocol: string, backendConfigJson: string, requestJson: string): string
/**
* Merge updates in form like `Y.applyUpdate(doc, update)` way and return the
* result binary.
@@ -75,6 +89,8 @@ export interface NativeCrawlResult {
export interface NativeMarkdownResult {
title: string
markdown: string
knownUnsupportedBlocks: Array<string>
unknownBlocks: Array<string>
}
export interface NativePageDocContent {
@@ -102,6 +118,8 @@ export declare function parsePageDoc(docBin: Buffer, maxSummaryLength?: number |
export declare function parseWorkspaceDoc(docBin: Buffer): NativeWorkspaceDocContent | null
export declare function processImage(input: Buffer, maxEdge: number, keepExif: boolean): Promise<Buffer>
export declare function readAllDocIdsFromRootDoc(docBin: Buffer, includeTrash?: boolean | undefined | null): Array<string>
/**

View File

@@ -9,6 +9,8 @@ use napi_derive::napi;
pub struct NativeMarkdownResult {
pub title: String,
pub markdown: String,
pub known_unsupported_blocks: Vec<String>,
pub unknown_blocks: Vec<String>,
}
impl From<MarkdownResult> for NativeMarkdownResult {
@@ -16,6 +18,8 @@ impl From<MarkdownResult> for NativeMarkdownResult {
Self {
title: result.title,
markdown: result.markdown,
known_unsupported_blocks: result.known_unsupported_blocks,
unknown_blocks: result.unknown_blocks,
}
}
}

View File

@@ -1,3 +1,4 @@
use matroska::Matroska;
use mp4parse::{TrackType, read_mp4};
use napi_derive::napi;
@@ -8,7 +9,13 @@ pub fn get_mime(input: &[u8]) -> String {
} else {
file_format::FileFormat::from_bytes(input).media_type().to_string()
};
if mimetype == "video/mp4" {
if let Some(container) = matroska_container_kind(input).or(match mimetype.as_str() {
"video/webm" | "application/webm" => Some(ContainerKind::WebM),
"video/x-matroska" | "application/x-matroska" => Some(ContainerKind::Matroska),
_ => None,
}) {
detect_matroska_flavor(input, container, &mimetype)
} else if mimetype == "video/mp4" {
detect_mp4_flavor(input)
} else {
mimetype
@@ -37,3 +44,68 @@ fn detect_mp4_flavor(input: &[u8]) -> String {
Err(_) => "video/mp4".to_string(),
}
}
#[derive(Clone, Copy)]
enum ContainerKind {
WebM,
Matroska,
}
impl ContainerKind {
fn audio_mime(&self) -> &'static str {
match self {
ContainerKind::WebM => "audio/webm",
ContainerKind::Matroska => "audio/x-matroska",
}
}
}
fn detect_matroska_flavor(input: &[u8], container: ContainerKind, fallback: &str) -> String {
match Matroska::open(std::io::Cursor::new(input)) {
Ok(file) => {
let has_video = file.video_tracks().next().is_some();
let has_audio = file.audio_tracks().next().is_some();
if !has_video && has_audio {
container.audio_mime().to_string()
} else {
fallback.to_string()
}
}
Err(_) => fallback.to_string(),
}
}
fn matroska_container_kind(input: &[u8]) -> Option<ContainerKind> {
let header = &input[..1024.min(input.len())];
if header.windows(4).any(|window| window.eq_ignore_ascii_case(b"webm")) {
Some(ContainerKind::WebM)
} else if header.windows(8).any(|window| window.eq_ignore_ascii_case(b"matroska")) {
Some(ContainerKind::Matroska)
} else {
None
}
}
#[cfg(test)]
mod tests {
use super::*;
const AUDIO_ONLY_WEBM: &[u8] = include_bytes!("../fixtures/audio-only.webm");
const AUDIO_VIDEO_WEBM: &[u8] = include_bytes!("../fixtures/audio-video.webm");
const AUDIO_ONLY_MATROSKA: &[u8] = include_bytes!("../fixtures/audio-only.mka");
#[test]
fn detects_audio_only_webm_as_audio() {
assert_eq!(get_mime(AUDIO_ONLY_WEBM), "audio/webm");
}
#[test]
fn preserves_video_webm() {
assert_eq!(get_mime(AUDIO_VIDEO_WEBM), "video/webm");
}
#[test]
fn detects_audio_only_matroska_as_audio() {
assert_eq!(get_mime(AUDIO_ONLY_MATROSKA), "audio/x-matroska");
}
}

View File

@@ -0,0 +1,353 @@
use std::io::Cursor;
use anyhow::{Context, Result as AnyResult, bail};
use image::{
AnimationDecoder, DynamicImage, ImageDecoder, ImageFormat, ImageReader,
codecs::{gif::GifDecoder, png::PngDecoder, webp::WebPDecoder},
imageops::FilterType,
metadata::Orientation,
};
use libwebp_sys::{
WEBP_MUX_ABI_VERSION, WebPData, WebPDataClear, WebPDataInit, WebPEncodeRGBA, WebPFree, WebPMuxAssemble,
WebPMuxCreateInternal, WebPMuxDelete, WebPMuxError, WebPMuxSetChunk,
};
use little_exif::{exif_tag::ExifTag, filetype::FileExtension, metadata::Metadata};
use napi::{
Env, Error, Result, Status, Task,
bindgen_prelude::{AsyncTask, Buffer},
};
use napi_derive::napi;
const WEBP_QUALITY: f32 = 80.0;
const MAX_IMAGE_DIMENSION: u32 = 16_384;
const MAX_IMAGE_PIXELS: u64 = 40_000_000;
pub struct AsyncProcessImageTask {
input: Vec<u8>,
max_edge: u32,
keep_exif: bool,
}
#[napi]
impl Task for AsyncProcessImageTask {
type Output = Vec<u8>;
type JsValue = Buffer;
fn compute(&mut self) -> Result<Self::Output> {
process_image_inner(&self.input, self.max_edge, self.keep_exif)
.map_err(|error| Error::new(Status::InvalidArg, error.to_string()))
}
fn resolve(&mut self, _: Env, output: Self::Output) -> Result<Self::JsValue> {
Ok(output.into())
}
}
#[napi]
pub fn process_image(input: Buffer, max_edge: u32, keep_exif: bool) -> AsyncTask<AsyncProcessImageTask> {
AsyncTask::new(AsyncProcessImageTask {
input: input.to_vec(),
max_edge,
keep_exif,
})
}
fn process_image_inner(input: &[u8], max_edge: u32, keep_exif: bool) -> AnyResult<Vec<u8>> {
if max_edge == 0 {
bail!("max_edge must be greater than 0");
}
let format = image::guess_format(input).context("unsupported image format")?;
let (width, height) = read_dimensions(input, format)?;
validate_dimensions(width, height)?;
let mut image = decode_image(input, format)?;
let orientation = read_orientation(input, format)?;
image.apply_orientation(orientation);
if image.width().max(image.height()) > max_edge {
image = image.resize(max_edge, max_edge, FilterType::Lanczos3);
}
let mut output = encode_webp_lossy(&image.into_rgba8())?;
if keep_exif {
preserve_exif(input, format, &mut output)?;
}
Ok(output)
}
fn read_dimensions(input: &[u8], format: ImageFormat) -> AnyResult<(u32, u32)> {
ImageReader::with_format(Cursor::new(input), format)
.into_dimensions()
.context("failed to decode image")
}
fn validate_dimensions(width: u32, height: u32) -> AnyResult<()> {
if width == 0 || height == 0 {
bail!("failed to decode image");
}
if width > MAX_IMAGE_DIMENSION || height > MAX_IMAGE_DIMENSION {
bail!("image dimensions exceed limit");
}
if u64::from(width) * u64::from(height) > MAX_IMAGE_PIXELS {
bail!("image pixel count exceeds limit");
}
Ok(())
}
fn decode_image(input: &[u8], format: ImageFormat) -> AnyResult<DynamicImage> {
Ok(match format {
ImageFormat::Gif => {
let decoder = GifDecoder::new(Cursor::new(input)).context("failed to decode image")?;
let frame = decoder
.into_frames()
.next()
.transpose()
.context("failed to decode image")?
.context("image does not contain any frames")?;
DynamicImage::ImageRgba8(frame.into_buffer())
}
ImageFormat::Png => {
let decoder = PngDecoder::new(Cursor::new(input)).context("failed to decode image")?;
if decoder.is_apng().context("failed to decode image")? {
let frame = decoder
.apng()
.context("failed to decode image")?
.into_frames()
.next()
.transpose()
.context("failed to decode image")?
.context("image does not contain any frames")?;
DynamicImage::ImageRgba8(frame.into_buffer())
} else {
DynamicImage::from_decoder(decoder).context("failed to decode image")?
}
}
ImageFormat::WebP => {
let decoder = WebPDecoder::new(Cursor::new(input)).context("failed to decode image")?;
let frame = decoder
.into_frames()
.next()
.transpose()
.context("failed to decode image")?
.context("image does not contain any frames")?;
DynamicImage::ImageRgba8(frame.into_buffer())
}
_ => {
let reader = ImageReader::with_format(Cursor::new(input), format);
let decoder = reader.into_decoder().context("failed to decode image")?;
DynamicImage::from_decoder(decoder).context("failed to decode image")?
}
})
}
fn read_orientation(input: &[u8], format: ImageFormat) -> AnyResult<Orientation> {
Ok(match format {
ImageFormat::Gif => GifDecoder::new(Cursor::new(input))
.context("failed to decode image")?
.orientation()
.context("failed to decode image")?,
ImageFormat::Png => PngDecoder::new(Cursor::new(input))
.context("failed to decode image")?
.orientation()
.context("failed to decode image")?,
ImageFormat::WebP => WebPDecoder::new(Cursor::new(input))
.context("failed to decode image")?
.orientation()
.context("failed to decode image")?,
_ => ImageReader::with_format(Cursor::new(input), format)
.into_decoder()
.context("failed to decode image")?
.orientation()
.context("failed to decode image")?,
})
}
fn encode_webp_lossy(image: &image::RgbaImage) -> AnyResult<Vec<u8>> {
let width = i32::try_from(image.width()).context("image width is too large")?;
let height = i32::try_from(image.height()).context("image height is too large")?;
let stride = width.checked_mul(4).context("image width is too large")?;
let mut output = std::ptr::null_mut();
let encoded_len = unsafe { WebPEncodeRGBA(image.as_ptr(), width, height, stride, WEBP_QUALITY, &mut output) };
if output.is_null() || encoded_len == 0 {
bail!("failed to encode webp");
}
let encoded = unsafe { std::slice::from_raw_parts(output, encoded_len) }.to_vec();
unsafe {
WebPFree(output.cast());
}
Ok(encoded)
}
fn preserve_exif(input: &[u8], format: ImageFormat, output: &mut Vec<u8>) -> AnyResult<()> {
let Some(file_type) = map_exif_file_type(format) else {
return Ok(());
};
let input = input.to_vec();
let Ok(mut metadata) = Metadata::new_from_vec(&input, file_type) else {
return Ok(());
};
metadata.remove_tag(ExifTag::Orientation(vec![1]));
if !metadata.get_ifds().iter().any(|ifd| !ifd.get_tags().is_empty()) {
return Ok(());
}
let encoded_metadata = metadata.encode().context("failed to preserve exif metadata")?;
let source = WebPData {
bytes: output.as_ptr(),
size: output.len(),
};
let exif = WebPData {
bytes: encoded_metadata.as_ptr(),
size: encoded_metadata.len(),
};
let mut assembled = WebPData::default();
let mux = unsafe { WebPMuxCreateInternal(&source, 1, WEBP_MUX_ABI_VERSION as _) };
if mux.is_null() {
bail!("failed to preserve exif metadata");
}
let encoded = (|| -> AnyResult<Vec<u8>> {
if unsafe { WebPMuxSetChunk(mux, c"EXIF".as_ptr(), &exif, 1) } != WebPMuxError::WEBP_MUX_OK {
bail!("failed to preserve exif metadata");
}
WebPDataInit(&mut assembled);
if unsafe { WebPMuxAssemble(mux, &mut assembled) } != WebPMuxError::WEBP_MUX_OK {
bail!("failed to preserve exif metadata");
}
Ok(unsafe { std::slice::from_raw_parts(assembled.bytes, assembled.size) }.to_vec())
})();
unsafe {
WebPDataClear(&mut assembled);
WebPMuxDelete(mux);
}
*output = encoded?;
Ok(())
}
fn map_exif_file_type(format: ImageFormat) -> Option<FileExtension> {
match format {
ImageFormat::Jpeg => Some(FileExtension::JPEG),
ImageFormat::Png => Some(FileExtension::PNG { as_zTXt_chunk: true }),
ImageFormat::Tiff => Some(FileExtension::TIFF),
ImageFormat::WebP => Some(FileExtension::WEBP),
_ => None,
}
}
#[cfg(test)]
mod tests {
use image::{ExtendedColorType, GenericImageView, ImageEncoder, codecs::png::PngEncoder};
use super::*;
fn encode_png(width: u32, height: u32) -> Vec<u8> {
let image = image::RgbaImage::from_pixel(width, height, image::Rgba([255, 0, 0, 255]));
let mut encoded = Vec::new();
PngEncoder::new(&mut encoded)
.write_image(image.as_raw(), width, height, ExtendedColorType::Rgba8)
.unwrap();
encoded
}
fn encode_bmp_header(width: u32, height: u32) -> Vec<u8> {
let mut encoded = Vec::with_capacity(54);
encoded.extend_from_slice(b"BM");
encoded.extend_from_slice(&(54u32).to_le_bytes());
encoded.extend_from_slice(&0u16.to_le_bytes());
encoded.extend_from_slice(&0u16.to_le_bytes());
encoded.extend_from_slice(&(54u32).to_le_bytes());
encoded.extend_from_slice(&(40u32).to_le_bytes());
encoded.extend_from_slice(&(width as i32).to_le_bytes());
encoded.extend_from_slice(&(height as i32).to_le_bytes());
encoded.extend_from_slice(&1u16.to_le_bytes());
encoded.extend_from_slice(&24u16.to_le_bytes());
encoded.extend_from_slice(&0u32.to_le_bytes());
encoded.extend_from_slice(&0u32.to_le_bytes());
encoded.extend_from_slice(&0u32.to_le_bytes());
encoded.extend_from_slice(&0u32.to_le_bytes());
encoded.extend_from_slice(&0u32.to_le_bytes());
encoded.extend_from_slice(&0u32.to_le_bytes());
encoded
}
#[test]
fn process_image_keeps_small_dimensions() {
let png = encode_png(8, 6);
let output = process_image_inner(&png, 512, false).unwrap();
let format = image::guess_format(&output).unwrap();
assert_eq!(format, ImageFormat::WebP);
let decoded = image::load_from_memory(&output).unwrap();
assert_eq!(decoded.dimensions(), (8, 6));
}
#[test]
fn process_image_scales_down_large_dimensions() {
let png = encode_png(1024, 256);
let output = process_image_inner(&png, 512, false).unwrap();
let decoded = image::load_from_memory(&output).unwrap();
assert_eq!(decoded.dimensions(), (512, 128));
}
#[test]
fn process_image_preserves_exif_without_orientation() {
let png = encode_png(8, 8);
let mut png_with_exif = png.clone();
let mut metadata = Metadata::new();
metadata.set_tag(ExifTag::ImageDescription("copilot".to_string()));
metadata.set_tag(ExifTag::Orientation(vec![6]));
metadata
.write_to_vec(&mut png_with_exif, FileExtension::PNG { as_zTXt_chunk: true })
.unwrap();
let output = process_image_inner(&png_with_exif, 512, true).unwrap();
let decoded_metadata = Metadata::new_from_vec(&output, FileExtension::WEBP).unwrap();
assert!(
decoded_metadata
.get_tag(&ExifTag::ImageDescription(String::new()))
.next()
.is_some()
);
assert!(
decoded_metadata
.get_tag(&ExifTag::Orientation(vec![1]))
.next()
.is_none()
);
}
#[test]
fn process_image_rejects_invalid_input() {
let error = process_image_inner(b"not-an-image", 512, false).unwrap_err();
assert_eq!(error.to_string(), "unsupported image format");
}
#[test]
fn process_image_rejects_images_over_dimension_limit_before_decode() {
let bmp = encode_bmp_header(MAX_IMAGE_DIMENSION + 1, 1);
let error = process_image_inner(&bmp, 512, false).unwrap_err();
assert_eq!(error.to_string(), "image dimensions exceed limit");
}
}

View File

@@ -7,6 +7,8 @@ pub mod doc_loader;
pub mod file_type;
pub mod hashcash;
pub mod html_sanitize;
pub mod image;
pub mod llm;
pub mod tiktoken;
use affine_common::napi_utils::map_napi_err;

View File

@@ -0,0 +1,414 @@
use std::sync::{
Arc,
atomic::{AtomicBool, Ordering},
};
use llm_adapter::{
backend::{
BackendConfig, BackendError, BackendProtocol, DefaultHttpClient, dispatch_embedding_request, dispatch_request,
dispatch_rerank_request, dispatch_stream_events_with, dispatch_structured_request,
},
core::{CoreRequest, EmbeddingRequest, RerankRequest, StreamEvent, StructuredRequest},
middleware::{
MiddlewareConfig, PipelineContext, RequestMiddleware, StreamMiddleware, citation_indexing, clamp_max_tokens,
normalize_messages, run_request_middleware_chain, run_stream_middleware_chain, stream_event_normalize,
tool_schema_rewrite,
},
};
use napi::{
Error, Result, Status,
threadsafe_function::{ThreadsafeFunction, ThreadsafeFunctionCallMode},
};
use serde::Deserialize;
pub const STREAM_END_MARKER: &str = "__AFFINE_LLM_STREAM_END__";
const STREAM_ABORTED_REASON: &str = "__AFFINE_LLM_STREAM_ABORTED__";
const STREAM_CALLBACK_DISPATCH_FAILED_REASON: &str = "__AFFINE_LLM_STREAM_CALLBACK_DISPATCH_FAILED__";
#[derive(Debug, Clone, Default, Deserialize)]
#[serde(default)]
struct LlmMiddlewarePayload {
request: Vec<String>,
stream: Vec<String>,
config: MiddlewareConfig,
}
#[derive(Debug, Clone, Deserialize)]
struct LlmDispatchPayload {
#[serde(flatten)]
request: CoreRequest,
#[serde(default)]
middleware: LlmMiddlewarePayload,
}
#[derive(Debug, Clone, Deserialize)]
struct LlmStructuredDispatchPayload {
#[serde(flatten)]
request: StructuredRequest,
#[serde(default)]
middleware: LlmMiddlewarePayload,
}
#[derive(Debug, Clone, Deserialize)]
struct LlmRerankDispatchPayload {
#[serde(flatten)]
request: RerankRequest,
}
#[napi]
pub struct LlmStreamHandle {
aborted: Arc<AtomicBool>,
}
#[napi]
impl LlmStreamHandle {
#[napi]
pub fn abort(&self) {
self.aborted.store(true, Ordering::SeqCst);
}
}
#[napi(catch_unwind)]
pub fn llm_dispatch(protocol: String, backend_config_json: String, request_json: String) -> Result<String> {
let protocol = parse_protocol(&protocol)?;
let config: BackendConfig = serde_json::from_str(&backend_config_json).map_err(map_json_error)?;
let payload: LlmDispatchPayload = serde_json::from_str(&request_json).map_err(map_json_error)?;
let request = apply_request_middlewares(payload.request, &payload.middleware)?;
let response =
dispatch_request(&DefaultHttpClient::default(), &config, protocol, &request).map_err(map_backend_error)?;
serde_json::to_string(&response).map_err(map_json_error)
}
#[napi(catch_unwind)]
pub fn llm_structured_dispatch(protocol: String, backend_config_json: String, request_json: String) -> Result<String> {
let protocol = parse_protocol(&protocol)?;
let config: BackendConfig = serde_json::from_str(&backend_config_json).map_err(map_json_error)?;
let payload: LlmStructuredDispatchPayload = serde_json::from_str(&request_json).map_err(map_json_error)?;
let request = apply_structured_request_middlewares(payload.request, &payload.middleware)?;
let response = dispatch_structured_request(&DefaultHttpClient::default(), &config, protocol, &request)
.map_err(map_backend_error)?;
serde_json::to_string(&response).map_err(map_json_error)
}
#[napi(catch_unwind)]
pub fn llm_embedding_dispatch(protocol: String, backend_config_json: String, request_json: String) -> Result<String> {
let protocol = parse_protocol(&protocol)?;
let config: BackendConfig = serde_json::from_str(&backend_config_json).map_err(map_json_error)?;
let request: EmbeddingRequest = serde_json::from_str(&request_json).map_err(map_json_error)?;
let response = dispatch_embedding_request(&DefaultHttpClient::default(), &config, protocol, &request)
.map_err(map_backend_error)?;
serde_json::to_string(&response).map_err(map_json_error)
}
#[napi(catch_unwind)]
pub fn llm_rerank_dispatch(protocol: String, backend_config_json: String, request_json: String) -> Result<String> {
let protocol = parse_protocol(&protocol)?;
let config: BackendConfig = serde_json::from_str(&backend_config_json).map_err(map_json_error)?;
let payload: LlmRerankDispatchPayload = serde_json::from_str(&request_json).map_err(map_json_error)?;
let response = dispatch_rerank_request(&DefaultHttpClient::default(), &config, protocol, &payload.request)
.map_err(map_backend_error)?;
serde_json::to_string(&response).map_err(map_json_error)
}
#[napi(catch_unwind)]
pub fn llm_dispatch_stream(
protocol: String,
backend_config_json: String,
request_json: String,
callback: ThreadsafeFunction<String, ()>,
) -> Result<LlmStreamHandle> {
let protocol = parse_protocol(&protocol)?;
let config: BackendConfig = serde_json::from_str(&backend_config_json).map_err(map_json_error)?;
let payload: LlmDispatchPayload = serde_json::from_str(&request_json).map_err(map_json_error)?;
let request = apply_request_middlewares(payload.request, &payload.middleware)?;
let middleware = payload.middleware.clone();
let aborted = Arc::new(AtomicBool::new(false));
let aborted_in_worker = aborted.clone();
std::thread::spawn(move || {
let chain = match resolve_stream_chain(&middleware.stream) {
Ok(chain) => chain,
Err(error) => {
emit_error_event(&callback, error.reason.clone(), "middleware_error");
let _ = callback.call(
Ok(STREAM_END_MARKER.to_string()),
ThreadsafeFunctionCallMode::NonBlocking,
);
return;
}
};
let mut pipeline = StreamPipeline::new(chain, middleware.config.clone());
let mut aborted_by_user = false;
let mut callback_dispatch_failed = false;
let result = dispatch_stream_events_with(&DefaultHttpClient::default(), &config, protocol, &request, |event| {
if aborted_in_worker.load(Ordering::Relaxed) {
aborted_by_user = true;
return Err(BackendError::Http(STREAM_ABORTED_REASON.to_string()));
}
for event in pipeline.process(event) {
let status = emit_stream_event(&callback, &event);
if status != Status::Ok {
callback_dispatch_failed = true;
return Err(BackendError::Http(format!(
"{STREAM_CALLBACK_DISPATCH_FAILED_REASON}:{status}"
)));
}
}
Ok(())
});
if !aborted_by_user {
for event in pipeline.finish() {
if aborted_in_worker.load(Ordering::Relaxed) {
aborted_by_user = true;
break;
}
if emit_stream_event(&callback, &event) != Status::Ok {
callback_dispatch_failed = true;
break;
}
}
}
if let Err(error) = result
&& !aborted_by_user
&& !callback_dispatch_failed
&& !is_abort_error(&error)
&& !is_callback_dispatch_failed_error(&error)
{
emit_error_event(&callback, error.to_string(), "dispatch_error");
}
if !callback_dispatch_failed {
let _ = callback.call(
Ok(STREAM_END_MARKER.to_string()),
ThreadsafeFunctionCallMode::NonBlocking,
);
}
});
Ok(LlmStreamHandle { aborted })
}
fn apply_request_middlewares(request: CoreRequest, middleware: &LlmMiddlewarePayload) -> Result<CoreRequest> {
let chain = resolve_request_chain(&middleware.request)?;
Ok(run_request_middleware_chain(request, &middleware.config, &chain))
}
fn apply_structured_request_middlewares(
request: StructuredRequest,
middleware: &LlmMiddlewarePayload,
) -> Result<StructuredRequest> {
let mut core = request.as_core_request();
core = apply_request_middlewares(core, middleware)?;
Ok(StructuredRequest {
model: core.model,
messages: core.messages,
schema: core
.response_schema
.ok_or_else(|| Error::new(Status::InvalidArg, "Structured request schema is required"))?,
max_tokens: core.max_tokens,
temperature: core.temperature,
reasoning: core.reasoning,
strict: request.strict,
response_mime_type: request.response_mime_type,
})
}
#[derive(Clone)]
struct StreamPipeline {
chain: Vec<StreamMiddleware>,
config: MiddlewareConfig,
context: PipelineContext,
}
impl StreamPipeline {
fn new(chain: Vec<StreamMiddleware>, config: MiddlewareConfig) -> Self {
Self {
chain,
config,
context: PipelineContext::default(),
}
}
fn process(&mut self, event: StreamEvent) -> Vec<StreamEvent> {
run_stream_middleware_chain(event, &mut self.context, &self.config, &self.chain)
}
fn finish(&mut self) -> Vec<StreamEvent> {
self.context.flush_pending_deltas();
self.context.drain_queued_events()
}
}
fn emit_stream_event(callback: &ThreadsafeFunction<String, ()>, event: &StreamEvent) -> Status {
let value = serde_json::to_string(event).unwrap_or_else(|error| {
serde_json::json!({
"type": "error",
"message": format!("failed to serialize stream event: {error}"),
})
.to_string()
});
callback.call(Ok(value), ThreadsafeFunctionCallMode::NonBlocking)
}
fn emit_error_event(callback: &ThreadsafeFunction<String, ()>, message: String, code: &str) {
let error_event = serde_json::to_string(&StreamEvent::Error {
message: message.clone(),
code: Some(code.to_string()),
})
.unwrap_or_else(|_| {
serde_json::json!({
"type": "error",
"message": message,
"code": code,
})
.to_string()
});
let _ = callback.call(Ok(error_event), ThreadsafeFunctionCallMode::NonBlocking);
}
fn is_abort_error(error: &BackendError) -> bool {
matches!(
error,
BackendError::Http(reason) if reason == STREAM_ABORTED_REASON
)
}
fn is_callback_dispatch_failed_error(error: &BackendError) -> bool {
matches!(
error,
BackendError::Http(reason) if reason.starts_with(STREAM_CALLBACK_DISPATCH_FAILED_REASON)
)
}
fn resolve_request_chain(request: &[String]) -> Result<Vec<RequestMiddleware>> {
if request.is_empty() {
return Ok(vec![normalize_messages, tool_schema_rewrite]);
}
request
.iter()
.map(|name| match name.as_str() {
"normalize_messages" => Ok(normalize_messages as RequestMiddleware),
"clamp_max_tokens" => Ok(clamp_max_tokens as RequestMiddleware),
"tool_schema_rewrite" => Ok(tool_schema_rewrite as RequestMiddleware),
_ => Err(Error::new(
Status::InvalidArg,
format!("Unsupported request middleware: {name}"),
)),
})
.collect()
}
fn resolve_stream_chain(stream: &[String]) -> Result<Vec<StreamMiddleware>> {
if stream.is_empty() {
return Ok(vec![stream_event_normalize, citation_indexing]);
}
stream
.iter()
.map(|name| match name.as_str() {
"stream_event_normalize" => Ok(stream_event_normalize as StreamMiddleware),
"citation_indexing" => Ok(citation_indexing as StreamMiddleware),
_ => Err(Error::new(
Status::InvalidArg,
format!("Unsupported stream middleware: {name}"),
)),
})
.collect()
}
fn parse_protocol(protocol: &str) -> Result<BackendProtocol> {
match protocol {
"openai_chat" | "openai-chat" | "openai_chat_completions" | "chat-completions" | "chat_completions" => {
Ok(BackendProtocol::OpenaiChatCompletions)
}
"openai_responses" | "openai-responses" | "responses" => Ok(BackendProtocol::OpenaiResponses),
"anthropic" | "anthropic_messages" | "anthropic-messages" => Ok(BackendProtocol::AnthropicMessages),
"gemini" | "gemini_generate_content" | "gemini-generate-content" => Ok(BackendProtocol::GeminiGenerateContent),
other => Err(Error::new(
Status::InvalidArg,
format!("Unsupported llm backend protocol: {other}"),
)),
}
}
fn map_json_error(error: serde_json::Error) -> Error {
Error::new(Status::InvalidArg, format!("Invalid JSON payload: {error}"))
}
fn map_backend_error(error: BackendError) -> Error {
Error::new(Status::GenericFailure, error.to_string())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn should_parse_supported_protocol_aliases() {
assert!(parse_protocol("openai_chat").is_ok());
assert!(parse_protocol("chat-completions").is_ok());
assert!(parse_protocol("responses").is_ok());
assert!(parse_protocol("anthropic").is_ok());
assert!(parse_protocol("gemini").is_ok());
}
#[test]
fn should_reject_unsupported_protocol() {
let error = parse_protocol("unknown").unwrap_err();
assert_eq!(error.status, Status::InvalidArg);
assert!(error.reason.contains("Unsupported llm backend protocol"));
}
#[test]
fn llm_dispatch_should_reject_invalid_backend_json() {
let error = llm_dispatch("openai_chat".to_string(), "{".to_string(), "{}".to_string()).unwrap_err();
assert_eq!(error.status, Status::InvalidArg);
assert!(error.reason.contains("Invalid JSON payload"));
}
#[test]
fn map_json_error_should_use_invalid_arg_status() {
let parse_error = serde_json::from_str::<serde_json::Value>("{").unwrap_err();
let error = map_json_error(parse_error);
assert_eq!(error.status, Status::InvalidArg);
assert!(error.reason.contains("Invalid JSON payload"));
}
#[test]
fn resolve_request_chain_should_support_clamp_max_tokens() {
let chain = resolve_request_chain(&["normalize_messages".to_string(), "clamp_max_tokens".to_string()]).unwrap();
assert_eq!(chain.len(), 2);
}
#[test]
fn resolve_request_chain_should_reject_unknown_middleware() {
let error = resolve_request_chain(&["unknown".to_string()]).unwrap_err();
assert_eq!(error.status, Status::InvalidArg);
assert!(error.reason.contains("Unsupported request middleware"));
}
#[test]
fn resolve_stream_chain_should_reject_unknown_middleware() {
let error = resolve_stream_chain(&["unknown".to_string()]).unwrap_err();
assert_eq!(error.status, Status::InvalidArg);
assert!(error.reason.contains("Unsupported stream middleware"));
}
}

View File

@@ -6,6 +6,7 @@
# MAILER_HOST=127.0.0.1
# MAILER_PORT=1025
# MAILER_SERVERNAME="mail.example.com"
# MAILER_SENDER="noreply@toeverything.info"
# MAILER_USER="noreply@toeverything.info"
# MAILER_PASSWORD="affine"

View File

@@ -4,17 +4,14 @@
"version": "0.26.3",
"description": "Affine Node.js server",
"type": "module",
"bin": {
"run-test": "./scripts/run-test.ts"
},
"scripts": {
"build": "affine bundle -p @affine/server",
"dev": "nodemon ./src/index.ts",
"dev:mail": "email dev -d src/mails",
"test": "ava --concurrency 1 --serial",
"test:copilot": "ava \"src/__tests__/copilot-*.spec.ts\"",
"test:copilot": "ava \"src/__tests__/copilot/copilot-*.spec.ts\"",
"test:coverage": "c8 ava --concurrency 1 --serial",
"test:copilot:coverage": "c8 ava --timeout=5m \"src/__tests__/copilot-*.spec.ts\"",
"test:copilot:coverage": "c8 ava --timeout=5m \"src/__tests__/copilot/copilot-*.spec.ts\"",
"e2e": "cross-env TEST_MODE=e2e ava --serial",
"e2e:coverage": "cross-env TEST_MODE=e2e c8 ava --serial",
"data-migration": "cross-env NODE_ENV=development SERVER_FLAVOR=script r ./src/index.ts",
@@ -28,19 +25,12 @@
"dependencies": {
"@affine/s3-compat": "workspace:*",
"@affine/server-native": "workspace:*",
"@ai-sdk/anthropic": "^2.0.54",
"@ai-sdk/google": "^2.0.45",
"@ai-sdk/google-vertex": "^3.0.88",
"@ai-sdk/openai": "^2.0.80",
"@ai-sdk/openai-compatible": "^1.0.28",
"@ai-sdk/perplexity": "^2.0.21",
"@apollo/server": "^4.13.0",
"@fal-ai/serverless-client": "^0.15.0",
"@google-cloud/opentelemetry-cloud-trace-exporter": "^3.0.0",
"@google-cloud/opentelemetry-resource-util": "^3.0.0",
"@modelcontextprotocol/sdk": "^1.26.0",
"@nestjs-cls/transactional": "^2.7.0",
"@nestjs-cls/transactional-adapter-prisma": "^1.2.24",
"@nestjs-cls/transactional": "^3.2.0",
"@nestjs-cls/transactional-adapter-prisma": "^1.3.4",
"@nestjs/apollo": "^13.0.4",
"@nestjs/bullmq": "^11.0.4",
"@nestjs/common": "^11.0.21",
@@ -55,18 +45,18 @@
"@node-rs/crc32": "^1.10.6",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/core": "^2.2.0",
"@opentelemetry/exporter-prometheus": "^0.211.0",
"@opentelemetry/exporter-prometheus": "^0.212.0",
"@opentelemetry/exporter-zipkin": "^2.2.0",
"@opentelemetry/host-metrics": "^0.38.0",
"@opentelemetry/instrumentation": "^0.211.0",
"@opentelemetry/instrumentation-graphql": "^0.58.0",
"@opentelemetry/instrumentation-http": "^0.211.0",
"@opentelemetry/instrumentation-ioredis": "^0.59.0",
"@opentelemetry/instrumentation-nestjs-core": "^0.57.0",
"@opentelemetry/instrumentation-socket.io": "^0.57.0",
"@opentelemetry/instrumentation": "^0.212.0",
"@opentelemetry/instrumentation-graphql": "^0.60.0",
"@opentelemetry/instrumentation-http": "^0.212.0",
"@opentelemetry/instrumentation-ioredis": "^0.60.0",
"@opentelemetry/instrumentation-nestjs-core": "^0.58.0",
"@opentelemetry/instrumentation-socket.io": "^0.59.0",
"@opentelemetry/resources": "^2.2.0",
"@opentelemetry/sdk-metrics": "^2.2.0",
"@opentelemetry/sdk-node": "^0.211.0",
"@opentelemetry/sdk-node": "^0.212.0",
"@opentelemetry/sdk-trace-node": "^2.2.0",
"@opentelemetry/semantic-conventions": "^1.38.0",
"@prisma/client": "^6.6.0",
@@ -74,7 +64,6 @@
"@queuedash/api": "^3.16.0",
"@react-email/components": "^0.5.7",
"@socket.io/redis-adapter": "^8.3.0",
"ai": "^5.0.118",
"bullmq": "^5.40.2",
"cookie-parser": "^1.4.7",
"cross-env": "^10.1.0",
@@ -126,7 +115,6 @@
"@faker-js/faker": "^10.1.0",
"@nestjs/swagger": "^11.2.0",
"@nestjs/testing": "patch:@nestjs/testing@npm%3A10.4.15#~/.yarn/patches/@nestjs-testing-npm-10.4.15-d591a1705a.patch",
"@react-email/preview-server": "^4.3.2",
"@types/cookie-parser": "^1.4.8",
"@types/express": "^5.0.1",
"@types/express-serve-static-core": "^5.0.6",
@@ -142,8 +130,8 @@
"@types/react-dom": "^19.0.2",
"@types/semver": "^7.5.8",
"@types/sinon": "^21.0.0",
"@types/supertest": "^6.0.2",
"ava": "^6.4.0",
"@types/supertest": "^7.0.0",
"ava": "^7.0.0",
"c8": "^10.1.3",
"nodemon": "^3.1.14",
"react-email": "^4.3.2",

View File

@@ -43,7 +43,9 @@ Generated by [AVA](https://avajs.dev).
> Snapshot 5
Buffer @Uint8Array [
66616b65 20696d61 6765
89504e47 0d0a1a0a 0000000d 49484452 00000001 00000001 08040000 00b51c0c
02000000 0b494441 5478da63 fcff1f00 03030200 efa37c9f 00000000 49454e44
ae426082
]
## should preview link

View File

@@ -12,12 +12,12 @@ Generated by [AVA](https://avajs.dev).
{
messages: [
{
content: 'generate text to text',
content: 'generate text to text stream',
role: 'assistant',
},
],
pinned: false,
tokens: 8,
tokens: 10,
},
]
@@ -27,12 +27,12 @@ Generated by [AVA](https://avajs.dev).
{
messages: [
{
content: 'generate text to text',
content: 'generate text to text stream',
role: 'assistant',
},
],
pinned: false,
tokens: 8,
tokens: 10,
},
]

View File

@@ -4,31 +4,31 @@ import type { ExecutionContext, TestFn } from 'ava';
import ava from 'ava';
import { z } from 'zod';
import { ServerFeature, ServerService } from '../core';
import { AuthService } from '../core/auth';
import { QuotaModule } from '../core/quota';
import { Models } from '../models';
import { CopilotModule } from '../plugins/copilot';
import { prompts, PromptService } from '../plugins/copilot/prompt';
import { ServerFeature, ServerService } from '../../core';
import { AuthService } from '../../core/auth';
import { QuotaModule } from '../../core/quota';
import { Models } from '../../models';
import { CopilotModule } from '../../plugins/copilot';
import { prompts, PromptService } from '../../plugins/copilot/prompt';
import {
CopilotProviderFactory,
CopilotProviderType,
StreamObject,
StreamObjectSchema,
} from '../plugins/copilot/providers';
import { TranscriptionResponseSchema } from '../plugins/copilot/transcript/types';
} from '../../plugins/copilot/providers';
import { TranscriptionResponseSchema } from '../../plugins/copilot/transcript/types';
import {
CopilotChatTextExecutor,
CopilotWorkflowService,
GraphExecutorState,
} from '../plugins/copilot/workflow';
} from '../../plugins/copilot/workflow';
import {
CopilotChatImageExecutor,
CopilotCheckHtmlExecutor,
CopilotCheckJsonExecutor,
} from '../plugins/copilot/workflow/executor';
import { createTestingModule, TestingModule } from './utils';
import { TestAssets } from './utils/copilot';
} from '../../plugins/copilot/workflow/executor';
import { createTestingModule, TestingModule } from '../utils';
import { TestAssets } from '../utils/copilot';
type Tester = {
auth: AuthService;
@@ -118,7 +118,6 @@ test.serial.before(async t => {
enabled: true,
scenarios: {
image: 'flux-1/schnell',
rerank: 'gpt-5-mini',
complex_text_generation: 'gpt-5-mini',
coding: 'gpt-5-mini',
quick_decision_making: 'gpt-5-mini',
@@ -226,6 +225,20 @@ const checkStreamObjects = (result: string) => {
}
};
const parseStreamObjects = (result: string): StreamObject[] => {
const streamObjects = JSON.parse(result);
return z.array(StreamObjectSchema).parse(streamObjects);
};
const getStreamObjectText = (result: string) =>
parseStreamObjects(result)
.filter(
(chunk): chunk is Extract<StreamObject, { type: 'text-delta' }> =>
chunk.type === 'text-delta'
)
.map(chunk => chunk.textDelta)
.join('');
const retry = async (
action: string,
t: ExecutionContext<Tester>,
@@ -445,6 +458,49 @@ The term **“CRDT”** was first introduced by Marc Shapiro, Nuno Preguiça, Ca
},
type: 'object' as const,
},
{
name: 'Gemini native text',
promptName: ['Chat With AFFiNE AI'],
messages: [
{
role: 'user' as const,
content:
'In one short sentence, explain what AFFiNE AI is and mention AFFiNE by name.',
},
],
config: { model: 'gemini-2.5-flash' },
verifier: (t: ExecutionContext<Tester>, result: string) => {
assertNotWrappedInCodeBlock(t, result);
t.assert(
result.toLowerCase().includes('affine'),
'should mention AFFiNE'
);
},
prefer: CopilotProviderType.Gemini,
type: 'text' as const,
},
{
name: 'Gemini native stream objects',
promptName: ['Chat With AFFiNE AI'],
messages: [
{
role: 'user' as const,
content:
'Respond with one short sentence about AFFiNE AI and mention AFFiNE by name.',
},
],
config: { model: 'gemini-2.5-flash' },
verifier: (t: ExecutionContext<Tester>, result: string) => {
t.truthy(checkStreamObjects(result), 'should be valid stream objects');
const assembledText = getStreamObjectText(result);
t.assert(
assembledText.toLowerCase().includes('affine'),
'should mention AFFiNE'
);
},
prefer: CopilotProviderType.Gemini,
type: 'object' as const,
},
{
name: 'Should transcribe short audio',
promptName: ['Transcript audio'],
@@ -717,14 +773,13 @@ for (const {
const { factory, prompt: promptService } = t.context;
const prompt = (await promptService.get(promptName))!;
t.truthy(prompt, 'should have prompt');
const provider = (await factory.getProviderByModel(prompt.model, {
const finalConfig = Object.assign({}, prompt.config, config);
const modelId = finalConfig.model || prompt.model;
const provider = (await factory.getProviderByModel(modelId, {
prefer,
}))!;
t.truthy(provider, 'should have provider');
await retry(`action: ${promptName}`, t, async t => {
const finalConfig = Object.assign({}, prompt.config, config);
const modelId = finalConfig.model || prompt.model;
switch (type) {
case 'text': {
const result = await provider.text(
@@ -892,7 +947,7 @@ test(
'should be able to rerank message chunks',
runIfCopilotConfigured,
async t => {
const { factory, prompt } = t.context;
const { factory } = t.context;
await retry('rerank', t, async t => {
const query = 'Is this content relevant to programming?';
@@ -909,14 +964,18 @@ test(
'The stock market is experiencing significant fluctuations.',
];
const p = (await prompt.get('Rerank results'))!;
t.assert(p, 'should have prompt for rerank');
const provider = (await factory.getProviderByModel(p.model))!;
const provider = (await factory.getProviderByModel('gpt-5.2'))!;
t.assert(provider, 'should have provider for rerank');
const scores = await provider.rerank(
{ modelId: p.model },
embeddings.map(e => p.finish({ query, doc: e }))
{ modelId: 'gpt-5.2' },
{
query,
candidates: embeddings.map((text, index) => ({
id: String(index),
text,
})),
}
);
t.is(scores.length, 10, 'should return scores for all chunks');
@@ -931,8 +990,8 @@ test(
t.log('Rerank scores:', scores);
t.is(
scores.filter(s => s > 0.5).length,
4,
'should have 4 related chunks'
5,
'should have 5 related chunks'
);
});
}

View File

@@ -6,25 +6,26 @@ import type { TestFn } from 'ava';
import ava from 'ava';
import Sinon from 'sinon';
import { AppModule } from '../app.module';
import { JobQueue } from '../base';
import { ConfigModule } from '../base/config';
import { AuthService } from '../core/auth';
import { DocReader } from '../core/doc';
import { CopilotContextService } from '../plugins/copilot/context';
import { AppModule } from '../../app.module';
import { JobQueue } from '../../base';
import { ConfigModule } from '../../base/config';
import { AuthService } from '../../core/auth';
import { DocReader } from '../../core/doc';
import { CopilotContextService } from '../../plugins/copilot/context';
import {
CopilotEmbeddingJob,
MockEmbeddingClient,
} from '../plugins/copilot/embedding';
import { prompts, PromptService } from '../plugins/copilot/prompt';
} from '../../plugins/copilot/embedding';
import { ChatMessageCache } from '../../plugins/copilot/message';
import { prompts, PromptService } from '../../plugins/copilot/prompt';
import {
CopilotProviderFactory,
CopilotProviderType,
GeminiGenerativeProvider,
OpenAIProvider,
} from '../plugins/copilot/providers';
import { CopilotStorage } from '../plugins/copilot/storage';
import { MockCopilotProvider } from './mocks';
} from '../../plugins/copilot/providers';
import { CopilotStorage } from '../../plugins/copilot/storage';
import { MockCopilotProvider } from '../mocks';
import {
acceptInviteById,
createTestingApp,
@@ -33,7 +34,7 @@ import {
smallestPng,
TestingApp,
TestUser,
} from './utils';
} from '../utils';
import {
addContextDoc,
addContextFile,
@@ -67,7 +68,7 @@ import {
textToEventStream,
unsplashSearch,
updateCopilotSession,
} from './utils/copilot';
} from '../utils/copilot';
const test = ava as TestFn<{
auth: AuthService;
@@ -416,6 +417,7 @@ test('should be able to use test provider', async t => {
test('should create message correctly', async t => {
const { app } = t.context;
const messageCache = app.get(ChatMessageCache);
{
const { id } = await createWorkspace(app);
@@ -463,6 +465,19 @@ test('should create message correctly', async t => {
new File([new Uint8Array(pngData)], '1.png', { type: 'image/png' })
);
t.truthy(messageId, 'should be able to create message with blob');
const message = await messageCache.get(messageId);
const attachment = message?.attachments?.[0] as
| { attachment: string; mimeType: string }
| undefined;
const payload = Buffer.from(
attachment?.attachment.split(',').at(1) || '',
'base64'
);
t.is(attachment?.mimeType, 'image/webp');
t.is(payload.subarray(0, 4).toString('ascii'), 'RIFF');
t.is(payload.subarray(8, 12).toString('ascii'), 'WEBP');
}
// with attachments
@@ -513,7 +528,11 @@ test('should be able to chat with api', async t => {
);
const messageId = await createCopilotMessage(app, sessionId);
const ret = await chatWithText(app, sessionId, messageId);
t.is(ret, 'generate text to text', 'should be able to chat with text');
t.is(
ret,
'generate text to text stream',
'should be able to chat with text'
);
const ret2 = await chatWithTextStream(app, sessionId, messageId);
t.is(
@@ -657,7 +676,7 @@ test('should be able to retry with api', async t => {
const histories = await getHistories(app, { workspaceId: id, docId });
t.deepEqual(
histories.map(h => h.messages.map(m => m.content)),
[['generate text to text', 'generate text to text']],
[['generate text to text stream', 'generate text to text stream']],
'should be able to list history'
);
}
@@ -794,7 +813,7 @@ test('should be able to list history', async t => {
const histories = await getHistories(app, { workspaceId, docId });
t.deepEqual(
histories.map(h => h.messages.map(m => m.content)),
[['hello', 'generate text to text']],
[['hello', 'generate text to text stream']],
'should be able to list history'
);
}
@@ -807,7 +826,7 @@ test('should be able to list history', async t => {
});
t.deepEqual(
histories.map(h => h.messages.map(m => m.content)),
[['generate text to text', 'hello']],
[['generate text to text stream', 'hello']],
'should be able to list history'
);
}
@@ -858,7 +877,7 @@ test('should reject request that user have not permission', async t => {
const histories = await getHistories(app, { workspaceId, docId });
t.deepEqual(
histories.map(h => h.messages.map(m => m.content)),
[['generate text to text']],
[['generate text to text stream']],
'should able to list history'
);

View File

@@ -8,38 +8,35 @@ import ava from 'ava';
import { nanoid } from 'nanoid';
import Sinon from 'sinon';
import { EventBus, JobQueue } from '../base';
import { ConfigModule } from '../base/config';
import { AuthService } from '../core/auth';
import { QuotaModule } from '../core/quota';
import { StorageModule, WorkspaceBlobStorage } from '../core/storage';
import { EventBus, JobQueue } from '../../base';
import { ConfigModule } from '../../base/config';
import { AuthService } from '../../core/auth';
import { QuotaModule } from '../../core/quota';
import { StorageModule, WorkspaceBlobStorage } from '../../core/storage';
import {
ContextCategories,
CopilotSessionModel,
WorkspaceModel,
} from '../models';
import { CopilotModule } from '../plugins/copilot';
import { CopilotContextService } from '../plugins/copilot/context';
import { CopilotCronJobs } from '../plugins/copilot/cron';
} from '../../models';
import { CopilotModule } from '../../plugins/copilot';
import { CopilotContextService } from '../../plugins/copilot/context';
import { CopilotCronJobs } from '../../plugins/copilot/cron';
import {
CopilotEmbeddingJob,
MockEmbeddingClient,
} from '../plugins/copilot/embedding';
import { prompts, PromptService } from '../plugins/copilot/prompt';
} from '../../plugins/copilot/embedding';
import { prompts, PromptService } from '../../plugins/copilot/prompt';
import {
CopilotProviderFactory,
CopilotProviderType,
ModelInputType,
ModelOutputType,
OpenAIProvider,
} from '../plugins/copilot/providers';
import {
CitationParser,
TextStreamParser,
} from '../plugins/copilot/providers/utils';
import { ChatSessionService } from '../plugins/copilot/session';
import { CopilotStorage } from '../plugins/copilot/storage';
import { CopilotTranscriptionService } from '../plugins/copilot/transcript';
} from '../../plugins/copilot/providers';
import { TextStreamParser } from '../../plugins/copilot/providers/utils';
import { ChatSessionService } from '../../plugins/copilot/session';
import { CopilotStorage } from '../../plugins/copilot/storage';
import { CopilotTranscriptionService } from '../../plugins/copilot/transcript';
import {
CopilotChatTextExecutor,
CopilotWorkflowService,
@@ -48,7 +45,7 @@ import {
WorkflowGraphExecutor,
type WorkflowNodeData,
WorkflowNodeType,
} from '../plugins/copilot/workflow';
} from '../../plugins/copilot/workflow';
import {
CopilotChatImageExecutor,
CopilotCheckHtmlExecutor,
@@ -56,16 +53,16 @@ import {
getWorkflowExecutor,
NodeExecuteState,
NodeExecutorType,
} from '../plugins/copilot/workflow/executor';
import { AutoRegisteredWorkflowExecutor } from '../plugins/copilot/workflow/executor/utils';
import { WorkflowGraphList } from '../plugins/copilot/workflow/graph';
import { CopilotWorkspaceService } from '../plugins/copilot/workspace';
import { PaymentModule } from '../plugins/payment';
import { SubscriptionService } from '../plugins/payment/service';
import { SubscriptionStatus } from '../plugins/payment/types';
import { MockCopilotProvider } from './mocks';
import { createTestingModule, TestingModule } from './utils';
import { WorkflowTestCases } from './utils/copilot';
} from '../../plugins/copilot/workflow/executor';
import { AutoRegisteredWorkflowExecutor } from '../../plugins/copilot/workflow/executor/utils';
import { WorkflowGraphList } from '../../plugins/copilot/workflow/graph';
import { CopilotWorkspaceService } from '../../plugins/copilot/workspace';
import { PaymentModule } from '../../plugins/payment';
import { SubscriptionService } from '../../plugins/payment/service';
import { SubscriptionStatus } from '../../plugins/payment/types';
import { MockCopilotProvider } from '../mocks';
import { createTestingModule, TestingModule } from '../utils';
import { WorkflowTestCases } from '../utils/copilot';
type Context = {
auth: AuthService;
@@ -364,6 +361,21 @@ test('should be able to manage chat session', async t => {
});
t.is(newSessionId, sessionId, 'should get same session id');
}
// should create a fresh session when reuseLatestChat is explicitly disabled
{
const newSessionId = await session.create({
userId,
promptName,
...commonParams,
reuseLatestChat: false,
});
t.not(
newSessionId,
sessionId,
'should create new session id when reuseLatestChat is false'
);
}
});
test('should be able to update chat session prompt', async t => {
@@ -645,6 +657,55 @@ test('should be able to generate with message id', async t => {
}
});
test('should preserve file handle attachments when merging user content into prompt', async t => {
const { prompt, session } = t.context;
await prompt.set(promptName, 'model', [
{ role: 'user', content: '{{content}}' },
]);
const sessionId = await session.create({
docId: 'test',
workspaceId: 'test',
userId,
promptName,
pinned: false,
});
const s = (await session.get(sessionId))!;
const message = await session.createMessage({
sessionId,
content: 'Summarize this file',
attachments: [
{
kind: 'file_handle',
fileHandle: 'file_123',
mimeType: 'application/pdf',
},
],
});
await s.pushByMessageId(message);
const finalMessages = s.finish({});
t.deepEqual(finalMessages, [
{
role: 'user',
content: 'Summarize this file',
attachments: [
{
kind: 'file_handle',
fileHandle: 'file_123',
mimeType: 'application/pdf',
},
],
params: {
content: 'Summarize this file',
},
},
]);
});
test('should save message correctly', async t => {
const { prompt, session } = t.context;
@@ -881,6 +942,26 @@ test('should be able to get provider', async t => {
}
});
test('should resolve provider by prefixed model id', async t => {
const { factory } = t.context;
const provider = await factory.getProviderByModel('openai-default/test');
t.truthy(provider, 'should resolve prefixed model id');
t.is(provider?.type, CopilotProviderType.OpenAI);
const result = await provider?.text({ modelId: 'openai-default/test' }, [
{ role: 'user', content: 'hello' },
]);
t.is(result, 'generate text to text');
});
test('should fallback to null when prefixed provider id does not exist', async t => {
const { factory } = t.context;
const provider = await factory.getProviderByModel('unknown/test');
t.is(provider, null);
});
// ==================== workflow ====================
// this test used to preview the final result of the workflow
@@ -1190,149 +1271,6 @@ test('should be able to run image executor', async t => {
Sinon.restore();
});
test('CitationParser should replace citation placeholders with URLs', t => {
const content =
'This is [a] test sentence with [citations [1]] and [[2]] and [3].';
const citations = ['https://example1.com', 'https://example2.com'];
const parser = new CitationParser();
for (const citation of citations) {
parser.push(citation);
}
const result = parser.parse(content) + parser.end();
const expected = [
'This is [a] test sentence with [citations [^1]] and [^2] and [3].',
`[^1]: {"type":"url","url":"${encodeURIComponent(citations[0])}"}`,
`[^2]: {"type":"url","url":"${encodeURIComponent(citations[1])}"}`,
].join('\n');
t.is(result, expected);
});
test('CitationParser should replace chunks of citation placeholders with URLs', t => {
const contents = [
'[[]]',
'This is [',
'a] test sentence ',
'with citations [1',
'] and [',
'[2]] and [[',
'3]] and [[4',
']] and [[5]',
'] and [[6]]',
' and [7',
];
const citations = [
'https://example1.com',
'https://example2.com',
'https://example3.com',
'https://example4.com',
'https://example5.com',
'https://example6.com',
'https://example7.com',
];
const parser = new CitationParser();
for (const citation of citations) {
parser.push(citation);
}
let result = contents.reduce((acc, current) => {
return acc + parser.parse(current);
}, '');
result += parser.end();
const expected = [
'[[]]This is [a] test sentence with citations [^1] and [^2] and [^3] and [^4] and [^5] and [^6] and [7',
`[^1]: {"type":"url","url":"${encodeURIComponent(citations[0])}"}`,
`[^2]: {"type":"url","url":"${encodeURIComponent(citations[1])}"}`,
`[^3]: {"type":"url","url":"${encodeURIComponent(citations[2])}"}`,
`[^4]: {"type":"url","url":"${encodeURIComponent(citations[3])}"}`,
`[^5]: {"type":"url","url":"${encodeURIComponent(citations[4])}"}`,
`[^6]: {"type":"url","url":"${encodeURIComponent(citations[5])}"}`,
`[^7]: {"type":"url","url":"${encodeURIComponent(citations[6])}"}`,
].join('\n');
t.is(result, expected);
});
test('CitationParser should not replace citation already with URLs', t => {
const content =
'This is [a] test sentence with citations [1](https://example1.com) and [[2]](https://example2.com) and [[3](https://example3.com)].';
const citations = [
'https://example4.com',
'https://example5.com',
'https://example6.com',
];
const parser = new CitationParser();
for (const citation of citations) {
parser.push(citation);
}
const result = parser.parse(content) + parser.end();
const expected = [
content,
`[^1]: {"type":"url","url":"${encodeURIComponent(citations[0])}"}`,
`[^2]: {"type":"url","url":"${encodeURIComponent(citations[1])}"}`,
`[^3]: {"type":"url","url":"${encodeURIComponent(citations[2])}"}`,
].join('\n');
t.is(result, expected);
});
test('CitationParser should not replace chunks of citation already with URLs', t => {
const contents = [
'This is [a] test sentence with citations [1',
'](https://example1.com) and [[2]',
'](https://example2.com) and [[3](https://example3.com)].',
];
const citations = [
'https://example4.com',
'https://example5.com',
'https://example6.com',
];
const parser = new CitationParser();
for (const citation of citations) {
parser.push(citation);
}
let result = contents.reduce((acc, current) => {
return acc + parser.parse(current);
}, '');
result += parser.end();
const expected = [
contents.join(''),
`[^1]: {"type":"url","url":"${encodeURIComponent(citations[0])}"}`,
`[^2]: {"type":"url","url":"${encodeURIComponent(citations[1])}"}`,
`[^3]: {"type":"url","url":"${encodeURIComponent(citations[2])}"}`,
].join('\n');
t.is(result, expected);
});
test('CitationParser should replace openai style reference chunks', t => {
const contents = [
'This is [a] test sentence with citations ',
'([example1.com](https://example1.com))',
];
const parser = new CitationParser();
let result = contents.reduce((acc, current) => {
return acc + parser.parse(current);
}, '');
result += parser.end();
const expected = [
contents[0] + '[^1]',
`[^1]: {"type":"url","url":"${encodeURIComponent('https://example1.com')}"}`,
].join('\n');
t.is(result, expected);
});
test('TextStreamParser should format different types of chunks correctly', t => {
// Define interfaces for fixtures
interface BaseFixture {
@@ -2063,25 +2001,23 @@ test('should handle copilot cron jobs correctly', async t => {
});
test('should resolve model correctly based on subscription status and prompt config', async t => {
const { db, session, subscription } = t.context;
const { prompt, session, subscription } = t.context;
// 1) Seed a prompt that has optionalModels and proModels in config
const promptName = 'resolve-model-test';
await db.aiPrompt.create({
data: {
name: promptName,
model: 'gemini-2.5-flash',
messages: {
create: [{ idx: 0, role: 'system', content: 'test' }],
},
config: { proModels: ['gemini-2.5-pro', 'claude-sonnet-4-5@20250929'] },
await prompt.set(
promptName,
'gemini-2.5-flash',
[{ role: 'system', content: 'test' }],
{ proModels: ['gemini-2.5-pro', 'claude-sonnet-4-5@20250929'] },
{
optionalModels: [
'gemini-2.5-flash',
'gemini-2.5-pro',
'claude-sonnet-4-5@20250929',
],
},
});
}
);
// 2) Create a chat session with this prompt
const sessionId = await session.create({
@@ -2106,6 +2042,16 @@ test('should resolve model correctly based on subscription status and prompt con
const model1 = await s.resolveModel(false, 'gemini-2.5-pro');
t.snapshot(model1, 'should honor requested pro model');
const model1WithPrefix = await s.resolveModel(
false,
'openai-default/gemini-2.5-pro'
);
t.is(
model1WithPrefix,
'openai-default/gemini-2.5-pro',
'should honor requested prefixed pro model'
);
const model2 = await s.resolveModel(false, 'not-in-optional');
t.snapshot(model2, 'should fallback to default model');
}
@@ -2119,6 +2065,16 @@ test('should resolve model correctly based on subscription status and prompt con
'should fallback to default model when requesting pro model during trialing'
);
const model3WithPrefix = await s.resolveModel(
true,
'openai-default/gemini-2.5-pro'
);
t.is(
model3WithPrefix,
'gemini-2.5-flash',
'should fallback to default model when requesting prefixed pro model during trialing'
);
const model4 = await s.resolveModel(true, 'gemini-2.5-flash');
t.snapshot(model4, 'should honor requested non-pro model during trialing');
@@ -2141,6 +2097,16 @@ test('should resolve model correctly based on subscription status and prompt con
const model7 = await s.resolveModel(true, 'claude-sonnet-4-5@20250929');
t.snapshot(model7, 'should honor requested pro model during active');
const model7WithPrefix = await s.resolveModel(
true,
'openai-default/claude-sonnet-4-5@20250929'
);
t.is(
model7WithPrefix,
'openai-default/claude-sonnet-4-5@20250929',
'should honor requested prefixed pro model during active'
);
const model8 = await s.resolveModel(true, 'not-in-optional');
t.snapshot(
model8,

Some files were not shown because too many files have changed in this diff Show More