Compare commits

..

3 Commits

Author SHA1 Message Date
李华桥
bd91aa8a38 chore: improve desktop building 2023-12-21 12:08:58 +08:00
李华桥
55792e2f41 chore: replace v char in tag name 2023-12-21 10:35:30 +08:00
李华桥
04e7a9fc14 ci: use setup version action to init version 2023-12-21 10:19:19 +08:00
524 changed files with 20478 additions and 29524 deletions

View File

@@ -11,4 +11,6 @@ e2e-dist-*
static
web-static
public
packages/common/sdk/src/*.d.ts
packages/common/sdk/src/*.js
packages/frontend/i18n/src/i18n-generated.ts

View File

@@ -61,6 +61,7 @@ const allPackages = [
'packages/frontend/core',
'packages/frontend/electron',
'packages/frontend/graphql',
'packages/frontend/hooks',
'packages/frontend/i18n',
'packages/frontend/native',
'packages/frontend/templates',
@@ -68,8 +69,10 @@ const allPackages = [
'packages/common/debug',
'packages/common/env',
'packages/common/infra',
'packages/common/sdk',
'packages/common/theme',
'packages/common/y-indexeddb',
'packages/plugins/copilot',
'tools/cli',
'tests/storybook',
];

View File

@@ -9,7 +9,7 @@ runs:
using: 'composite'
steps:
- name: Download tar.gz
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: core
path: .

View File

@@ -21,6 +21,14 @@ inputs:
description: 'set nmMode to hardlinks-local in .yarnrc.yml'
required: false
default: 'true'
build-infra:
description: 'Build infra'
required: false
default: 'true'
build-plugins:
description: 'Build plugins'
required: false
default: 'true'
nmHoistingLimits:
description: 'Set nmHoistingLimits in .yarnrc.yml'
required: false
@@ -182,3 +190,13 @@ runs:
run: node ./node_modules/electron/install.js
env:
electron_config_cache: ./node_modules/.cache/electron
- name: Build Infra
shell: bash
if: inputs.build-infra == 'true'
run: yarn run build:infra
- name: Build Plugins
if: inputs.build-plugins == 'true'
shell: bash
run: yarn run build:plugins

View File

@@ -1,9 +1,5 @@
name: Setup Version
description: 'Setup Version'
outputs:
APP_VERSION:
description: 'App Version'
value: ${{ steps.version.outputs.APP_VERSION }}
runs:
using: 'composite'
steps:

View File

@@ -1,4 +1,4 @@
FROM openresty/openresty:1.21.4.3-0-buster
FROM openresty/openresty:1.21.4.1-0-buster
WORKDIR /app
COPY ./packages/frontend/core/dist ./dist
COPY ./.github/deployment/front/nginx.conf /usr/local/openresty/nginx/conf/nginx.conf

1
.github/helm/affine-cloud/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
charts/

23
.github/helm/affine-cloud/.helmignore vendored Normal file
View File

@@ -0,0 +1,23 @@
# Patterns to ignore when building packages.
# This supports shell glob matching, relative path matching, and
# negation (prefixed with !). Only one pattern per line.
.DS_Store
# Common VCS dirs
.git/
.gitignore
.bzr/
.bzrignore
.hg/
.hgignore
.svn/
# Common backup files
*.swp
*.bak
*.tmp
*.orig
*~
# Various IDEs
.project
.idea/
*.tmproj
.vscode/

6
.github/helm/affine-cloud/Chart.lock vendored Normal file
View File

@@ -0,0 +1,6 @@
dependencies:
- name: postgresql
repository: https://charts.bitnami.com/bitnami
version: 13.2.23
digest: sha256:5b64538509bd067bb0f67bf082847a2c5d66dc37d0b9d7948a40405d9c446400
generated: "2023-12-05T03:04:57.997927753Z"

12
.github/helm/affine-cloud/Chart.yaml vendored Normal file
View File

@@ -0,0 +1,12 @@
apiVersion: v2
name: affine-cloud
description: A Helm chart for AFFiNE Cloud
type: application
version: 0.6.1
appVersion: '0.6.1'
dependencies:
- name: postgresql
version: 13.2.23
repository: https://charts.bitnami.com/bitnami

30
.github/helm/affine-cloud/readme.md vendored Normal file
View File

@@ -0,0 +1,30 @@
# Helm Chart Configuration
The following table lists the configurable parameters of this Helm chart and their default values.
## AFFiNE Cloud Server parameters
| Parameter | Description | Default |
| ------------------------------ | -------------------------------------------------- | ------------------ |
| `affineCloud.tag` | The Docker tag of the AffineCloud image to be used | `'nightly-latest'` |
| `affineCloud.resources.cpu` | The CPU resources allocated for AffineCloud | `'250m'` |
| `affineCloud.resources.memory` | The memory resources allocated for AffineCloud | `'0.5Gi'` |
| `affineCloud.signKey` | The key used to sign the JWT tokens | `'c2VjcmV0'` |
| `affineCloud.service.type` | The type of the Kubernetes service | `'ClusterIP'` |
| `affineCloud.service.port` | The port of the Kubernetes service | `'http'` |
| `affineCloud.mail.account` | The email account used to send emails | `''` |
| `affineCloud.mail.password` | The password of the email account | `''` |
## PostgreSQL parameters
| Parameter | Description | Default |
| -------------------------------------------- | ------------------------------------------------------------------------------------- | ------------ |
| `postgresql.auth.username` | Username for the PostgreSQL database | `'affine'` |
| `postgresql.auth.password` | Password for the PostgreSQL database. Please change this for production environments. | `'password'` |
| `postgresql.auth.database` | The name of the default database that will be created on image startup | `'affine'` |
| `postgresql.primary.resources.limits.cpu` | The CPU resources allocated for the PostgreSQL primary node | `'500m'` |
| `postgresql.primary.resources.limits.memory` | The memory resources allocated for the PostgreSQL primary node | `'0.5Gi'` |
For more postgres parameters, please refer to: https://artifacthub.io/packages/helm/bitnami/postgresql
Please note that for the `postgresql.auth.password`, you should provide your own password for production environments. The default value is provided only for demonstration purposes.

View File

@@ -0,0 +1,51 @@
{{/*
Expand the name of the chart.
*/}}
{{- define "affine-cloud.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
If release name contains chart name it will be used as a full name.
*/}}
{{- define "affine-cloud.fullname" -}}
{{- if .Values.fullnameOverride }}
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- $name := default .Chart.Name .Values.nameOverride }}
{{- if contains $name .Release.Name }}
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
{{- end }}
{{- end }}
{{- end }}
{{/*
Create chart name and version as used by the chart label.
*/}}
{{- define "affine-cloud.chart" -}}
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Common labels
*/}}
{{- define "affine-cloud.labels" -}}
helm.sh/chart: {{ include "affine-cloud.chart" . }}
{{ include "affine-cloud.selectorLabels" . }}
{{- if .Chart.AppVersion }}
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
{{- end }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
{{- end }}
{{/*
Selector labels
*/}}
{{- define "affine-cloud.selectorLabels" -}}
app.kubernetes.io/name: {{ include "affine-cloud.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}

View File

@@ -0,0 +1,51 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: "{{ include "affine-cloud.fullname" . }}"
labels:
{{- include "affine-cloud.labels" . | nindent 4 }}
spec:
replicas: 1
selector:
matchLabels:
{{- include "affine-cloud.selectorLabels" . | nindent 6 }}
strategy:
type: RollingUpdate
rollingUpdate:
maxUnavailable: 2
template:
metadata:
labels:
{{- include "affine-cloud.selectorLabels" . | nindent 8 }}
spec:
restartPolicy: Always
containers:
- name: affine-cloud
image: "ghcr.io/toeverything/cloud-self-hosted:{{ .Values.affineCloud.tag | default .Chart.AppVersion }}"
env:
- name: PG_USER
value: "{{ .Values.postgresql.auth.username }}"
- name: PG_PASS
value: "{{ .Values.postgresql.auth.password }}"
- name: PG_DATABASE
value: "{{ .Values.postgresql.auth.database }}"
- name: PG_HOST
value: "{{ .Values.postgresql.fullnameOverride | default (printf "%s-postgresql" .Release.Name) }}"
- name: DATABASE_URL
value: "{{ .Values.affineCloud.databaseUrl | default "postgresql://$(PG_USER):$(PG_PASS)@$(PG_HOST)/$(PG_DATABASE)" }}"
envFrom:
- secretRef:
name: affine-cloud-secret
ports:
- containerPort: 3000
livenessProbe:
httpGet:
path: /api/healthz
port: 3000
failureThreshold: 1
initialDelaySeconds: 10
periodSeconds: 10
resources:
limits:
cpu: "{{ .Values.affineCloud.resources.cpu }}"
memory: "{{ .Values.affineCloud.resources.memory }}"

View File

@@ -0,0 +1,9 @@
apiVersion: v1
kind: Secret
metadata:
name: affine-cloud-secret
type: Opaque
data:
SIGN_KEY: "{{ .Values.affineCloud.signKey }}"
MAIL_ACCOUNT: "{{ .Values.affineCloud.mail.account }}"
MAIL_PASSWORD: "{{ .Values.affineCloud.mail.password }}"

View File

@@ -0,0 +1,15 @@
apiVersion: v1
kind: Service
metadata:
name: "{{ include "affine-cloud.fullname" . }}"
labels:
{{- include "affine-cloud.labels" . | nindent 4 }}
spec:
type: "{{ .Values.affineCloud.service.type }}"
ports:
- name: http
protocol: TCP
port: {{ .Values.affineCloud.service.port }}
targetPort: 3000
selector:
{{- include "affine-cloud.selectorLabels" . | nindent 4 }}

30
.github/helm/affine-cloud/values.yaml vendored Normal file
View File

@@ -0,0 +1,30 @@
affineCloud:
tag: 'canary-5e0d5e0cc65ea46f326fdde12658bfac59b38c9f-0949'
# databaseUrl: 'postgresql://affine:password@affine-cloud-postgresql:5432/affine'
signKey: TUFtdFdzQTJhdGJuem01TA==
mail:
account: ''
password: ''
service:
type: ClusterIP
port: 80
resources:
cpu: '250m'
memory: 0.5Gi
postgresql:
fullnameOverride: tcp-postgresql
auth:
# only for demo, please modify it at prod env
username: affine
password: password
database: affine
primary:
initdb:
scripts:
01-init.sql: |
CREATE DATABASE affine_binary;
GRANT ALL PRIVILEGES ON DATABASE affine_binary TO affine;
resources:
limits:
cpu: '500m'
memory: 0.5Gi

60
.github/helm/deployment_guide.md vendored Normal file
View File

@@ -0,0 +1,60 @@
# Cluster Deployment Guide
This document provides a step-by-step guide for developers on how to deploy services in a Kubernetes cluster. The following content assumes that the reader already has a basic understanding of Kubernetes concepts and operations.
### 1. Configure Service Mesh (Optional)
In the Kubernetes cluster, we optionally use Service Mesh (like Istio and Anthos Service Mesh) to manage the network interactions of microservices. If Service Mesh is already deployed on your cluster or do not need to use the service network, you can skip this step. In this step, we assume that you are using Google Kubernetes Engine (GKE) and have already installed Anthos Service Mesh on your cluster, if you wish to use another Ingress Controller, please refer to the relevant documentation.
To configure your kubectl context to interact with your Kubernetes cluster using the gcloud tool, you need to execute the following commands:
```sh
export CLUSTER_NAME=your_cluster_name
export REGION=your_cluster_region
export PROJECT=your_project_id
gcloud container clusters get-credentials $CLUSTER_NAME --region $REGION --project $PROJECT
```
In this command, you should replace `CLUSTER_NAME`, `REGION` and `PROJECT` with the actual name, region and project id of your Kubernetes cluster. This command retrieves the access credentials for your Kubernetes cluster and automatically configures kubectl to use these credentials.
Now, to inject Service Mesh for a specific Namespace, first, set the environment variable `NAMESPACE` that should correspond to your target Kubernetes Namespace. In this example, we use `prod` as the target Namespace:
```sh
export NAMESPACE=prod
```
Then, we label the Namespace which will enable Istio to automatically inject the sidecar container for all new Pods under this Namespace:
```sh
kubectl label namespace $NAMESPACE istio-injection- istio.io/rev=asm-managed --overwrite
```
Finally, we trigger the Kubernetes Deployment restart mechanism to allow existing Pods to also obtain sidecar container injection:
```sh
kubectl rollout restart deployment -n $NAMESPACE
```
### 2. Deploying the Application
Next, we will deploy our application in the Kubernetes cluster through Helm. First, set relevant environment variables:
```sh
export NAMESPACE=prod
export RELEASE=affine-cloud-prod
export PATH=.github/helm/affine-cloud
```
- `NAMESPACE` should be consistent with the first step, indicating your target Kubernetes Namespace.
- `RELEASE` is the name of your Helm release.
- `PATH` is the location of your Helm chart in your file system.
Finally, use the `helm upgrade --install` command to deploy or upgrade your application:
```sh
helm upgrade --namespace $NAMESPACE --create-namespace --install $RELEASE $PATH
```
This command creates (if it doesn't already exist) and deploys your Helm chart in the specified Namespace. If the release already exists, it will be upgraded.
The above are the complete steps for deploying an application in a Kubernetes cluster. Make sure all prerequisites are met before deploying, and also ensure that you have the correct permissions for operations in Kubernetes.

27
.github/labeler.yml vendored
View File

@@ -19,11 +19,26 @@ mod:dev:
- 'tools/cli/**/*'
- 'packages/common/debug/**/*'
mod:plugin:
- changed-files:
- any-glob-to-any-file:
- 'packages/plugins/**/*'
plugin:copilot:
- changed-files:
- any-glob-to-any-file:
- 'packages/plugins/copilot/**/*'
mod:infra:
- changed-files:
- any-glob-to-any-file:
- 'packages/common/infra/**/*'
mod:sdk:
- changed-files:
- any-glob-to-any-file:
- 'packages/common/sdk/**/*'
mod:plugin-cli:
- changed-files:
- any-glob-to-any-file:
@@ -32,12 +47,7 @@ mod:plugin-cli:
mod:workspace:
- changed-files:
- any-glob-to-any-file:
- 'packages/common/workspace/**/*'
mod:workspace-impl:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/workspace-impl/**/*'
- 'packages/frontend/workspace/**/*'
mod:i18n:
- changed-files:
@@ -49,6 +59,11 @@ mod:env:
- any-glob-to-any-file:
- 'packages/common/env/**/*'
mod:hooks:
- changed-files:
- any-glob-to-any-file:
- 'packages/frontend/hooks/**/*'
mod:component:
- changed-files:
- any-glob-to-any-file:

27
.github/renovate.json vendored
View File

@@ -1,26 +1,27 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": ["config:base", ":disablePeerDependencies"],
"extends": [
"config:base",
"group:allNonMajor",
":preserveSemverRanges",
":disablePeerDependencies"
],
"labels": ["dependencies"],
"packageRules": [
{
"matchPackageNames": ["napi", "napi-build", "napi-derive"],
"rangeStrategy": "replace",
"groupName": "napi-rs"
},
{
"matchPackagePatterns": ["^eslint", "^@typescript-eslint"],
"rangeStrategy": "replace",
"groupName": "linter"
},
{
"matchPackagePatterns": ["^@nestjs"],
"rangeStrategy": "replace",
"groupName": "nestjs"
},
{
"matchPackagePatterns": ["^@opentelemetry"],
"rangeStrategy": "replace",
"groupName": "opentelemetry"
},
{
@@ -29,32 +30,16 @@
"@prisma/instrumentation",
"prisma"
],
"rangeStrategy": "replace",
"groupName": "prisma"
},
{
"matchPackagePatterns": ["^@electron-forge"],
"rangeStrategy": "replace",
"groupName": "electron-forge"
},
{
"groupName": "blocksuite-nightly",
"matchPackagePatterns": ["^@blocksuite"],
"excludePackageNames": ["@blocksuite/icons"],
"rangeStrategy": "replace",
"followTag": "nightly"
},
{
"groupName": "all non-major dependencies",
"groupSlug": "all-minor-patch",
"matchPackagePatterns": ["*"],
"excludePackagePatterns": ["^@blocksuite/"],
"matchUpdateTypes": ["minor", "patch"]
},
{
"matchPackagePatterns": ["*"],
"rangeStrategy": "replace",
"excludePackagePatterns": ["^@blocksuite/"]
}
],
"commitMessagePrefix": "chore: ",

View File

@@ -108,6 +108,44 @@ jobs:
yarn set version $(node -e "console.log(require('./package.json').packageManager.split('@')[1])")
git diff --exit-code
e2e-plugin-test:
name: E2E Plugin Test
runs-on: ubuntu-latest
env:
DISTRIBUTION: browser
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
full-cache: true
- name: Run playwright tests
run: yarn e2e --forbid-only
working-directory: tests/affine-plugin
env:
COVERAGE: true
- name: Collect code coverage report
run: yarn exec nyc report -t .nyc_output --report-dir .coverage --reporter=lcov
- name: Upload e2e test coverage results
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/lcov.info
flags: e2e-plugin-test
name: affine
fail_ci_if_error: false
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-plugin
path: ./test-results
if-no-files-found: ignore
e2e-test:
name: E2E Test
runs-on: ubuntu-latest
@@ -131,7 +169,7 @@ jobs:
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-${{ matrix.shard }}
path: ./test-results
@@ -156,7 +194,7 @@ jobs:
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-migration
path: ./tests/affine-migration/test-results
@@ -178,7 +216,7 @@ jobs:
full-cache: true
- name: Download affine.linux-x64-gnu.node
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: affine.linux-x64-gnu.node
path: ./packages/frontend/native
@@ -216,6 +254,8 @@ jobs:
with:
extra-flags: workspaces focus @affine/native
electron-install: false
build-infra: false
build-plugins: false
- name: Setup filename
id: filename
shell: bash
@@ -229,7 +269,7 @@ jobs:
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload ${{ steps.filename.outputs.filename }}
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ steps.filename.outputs.filename }}
path: ./packages/frontend/native/${{ steps.filename.outputs.filename }}
@@ -247,6 +287,8 @@ jobs:
with:
extra-flags: workspaces focus @affine/storage
electron-install: false
build-infra: false
build-plugins: false
- name: Build Rust
uses: ./.github/actions/build-rust
with:
@@ -254,7 +296,7 @@ jobs:
package: '@affine/storage'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload storage.node
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: storage.node
path: ./packages/backend/storage/storage.node
@@ -270,6 +312,7 @@ jobs:
uses: ./.github/actions/setup-node
with:
electron-install: false
build-plugins: false
full-cache: true
- name: Build Core
# always skip cache because its fast, and cache configuration is always changing
@@ -277,7 +320,7 @@ jobs:
- name: zip core
run: tar -czf dist.tar.gz --directory=packages/frontend/core/dist .
- name: Upload core artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: core
path: dist.tar.gz
@@ -338,7 +381,7 @@ jobs:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Download storage.node
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./packages/backend/server
@@ -427,13 +470,13 @@ jobs:
yarn workspace @affine/server data-migration run
yarn workspace @affine/server exec node --loader ts-node/esm/transpile-only ./scripts/init-db.ts
- name: Download storage.node
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./packages/backend/server
- name: Download affine.linux-x64-gnu.node
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: affine.linux-x64-gnu.node
path: ./packages/frontend/native
@@ -447,7 +490,7 @@ jobs:
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-server
path: ./tests/affine-cloud/test-results
@@ -511,7 +554,7 @@ jobs:
echo "filename=affine.$PLATFORM_ARCH_ABI.node" >> "$GITHUB_OUTPUT"
- name: Download ${{ steps.filename.outputs.filename }}
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ steps.filename.outputs.filename }}
path: ./packages/frontend/native
@@ -553,7 +596,7 @@ jobs:
- name: Upload test results
if: ${{ failure() }}
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: test-results-e2e-${{ matrix.spec.os }}-${{ matrix.spec.arch }}
path: ./test-results

View File

@@ -1,27 +0,0 @@
name: Deploy Automatically
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+-canary.[0-9]+'
schedule:
- cron: '0 9 * * *'
jobs:
dispatch-deploy:
runs-on: ubuntu-latest
name: Setup Deploy
steps:
- name: dispatch deploy by tag
if: ${{ github.event_name == 'push' }}
uses: benc-uk/workflow-dispatch@v1
with:
workflow: deploy.yml
inputs: '{ "flavor": "canary" }'
- name: dispatch deploy by schedule
if: ${{ github.event_name == 'schedule' }}
uses: benc-uk/workflow-dispatch@v1
with:
workflow: deploy.yml
inputs: '{ "flavor": "canary" }'
ref: canary

View File

@@ -23,7 +23,6 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
@@ -32,7 +31,7 @@ jobs:
- name: Build Server
run: yarn workspace @affine/server build
- name: Upload server dist
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: server-dist
path: ./packages/backend/server/dist
@@ -44,10 +43,11 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Plugins
run: yarn run build:plugins
- name: Build Core
run: yarn nx build @affine/core --skip-nx-cache
env:
@@ -63,7 +63,7 @@ jobs:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
- name: Upload core artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: core
path: ./packages/frontend/core/dist
@@ -76,7 +76,6 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
@@ -87,7 +86,7 @@ jobs:
package: '@affine/storage'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload storage.node
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: storage.node
path: ./packages/backend/storage/storage.node
@@ -100,7 +99,6 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
@@ -111,7 +109,7 @@ jobs:
package: '@affine/storage'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload storage.node
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: storage.arm64.node
path: ./packages/backend/storage/storage.node
@@ -128,22 +126,22 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Download core artifact
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: core
path: ./packages/frontend/core/dist
- name: Download server dist
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: server-dist
path: ./packages/backend/server/dist
- name: Download storage.node
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./packages/backend/server
- name: Download storage.node arm64
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: storage.arm64.node
path: ./packages/backend/storage
@@ -222,7 +220,6 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Deploy to ${{ github.event.inputs.flavor }}
uses: ./.github/actions/deploy

30
.github/workflows/dispatch-deploy.yml vendored Normal file
View File

@@ -0,0 +1,30 @@
name: Dispatch Deploy by tag
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+-canary.[0-9]+'
jobs:
dispatch-deploy-by-tag:
runs-on: ubuntu-latest
name: Setup deploy environment
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
extra-flags: 'workspaces focus @affine/monorepo'
hard-link-nm: false
electron-install: false
build-infra: false
build-plugins: false
- name: Setup output value
id: flavor
run: |
node -e "const env = require('semver').parse('${{ github.ref_name }}').prerelease[0] ?? 'stable'; console.log(`flavor=${env}`)" >> "$GITHUB_OUTPUT"
- name: dispatch deploy
uses: benc-uk/workflow-dispatch@v1
with:
workflow: deploy.yml
inputs: '{ "flavor": "${{ steps.flavor.outputs.flavor }}" }'

258
.github/workflows/nightly-build.yml vendored Normal file
View File

@@ -0,0 +1,258 @@
name: Build Canary Desktop App on Staging Branch
on:
workflow_dispatch:
inputs:
channel_override:
description: 'channel type (canary, beta, or stable)'
type: choice
default: beta
options:
- canary
- beta
- stable
push:
branches:
# 0.6.x-staging
- v[0-9]+.[0-9]+.x-staging
# 0.6.1-staging
- v[0-9]+.[0-9]+.[0-9]+-staging
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/nightly-build.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
permissions:
actions: write
contents: write
security-events: write
concurrency:
# The concurrency group contains the workflow name and the branch name for
# pull requests or the commit hash for any other events.
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }}
cancel-in-progress: true
env:
# BUILD_TYPE => app icon, app name, etc
BUILD_TYPE: internal
# BUILD_TYPE_OVERRIDE => channel type (canary, beta, or stable) - get the channel type (the api configs)
BUILD_TYPE_OVERRIDE: ${{ github.event.inputs.channel_override || 'beta' }}
jobs:
set-build-version:
runs-on: ubuntu-latest
outputs:
version: 0.0.0-internal.${{ steps.version.outputs.version }}
steps:
- uses: actions/checkout@v4
- uses: toeverything/set-build-version@latest
- id: version
run: echo ::set-output name=version::${{ env.BUILD_VERSION }}
before-make:
runs-on: ubuntu-latest
needs:
- set-build-version
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Setup @sentry/cli
uses: ./.github/actions/setup-sentry
- name: Replace Version
run: ./scripts/set-version.sh ${{ needs.set-build-version.outputs.version }}
- name: generate-assets
run: yarn workspace @affine/electron generate-assets
env:
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
RELEASE_VERSION: ${{ needs.set-build-version.outputs.version }}
SKIP_PLUGIN_BUILD: 'true'
SKIP_NX_CACHE: 'true'
- name: Upload core artifact
uses: actions/upload-artifact@v3
with:
name: core
path: packages/frontend/electron/resources/web-static
make-distribution:
strategy:
# all combinations: macos-latest x64, macos-latest arm64, ubuntu-latest x64
# For windows, we need a separate approach
matrix:
spec:
- runner: macos-latest
platform: darwin
arch: x64
target: x86_64-apple-darwin
- runner: macos-latest
platform: darwin
arch: arm64
target: aarch64-apple-darwin
- runner: ubuntu-latest
platform: linux
arch: x64
target: x86_64-unknown-linux-gnu
- runner: windows-latest
platform: win32
arch: x64
target: x86_64-pc-windows-msvc
runs-on: ${{ matrix.spec.runner }}
needs:
- before-make
- set-build-version
env:
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
SKIP_GENERATE_ASSETS: 1
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
timeout-minutes: 10
if: ${{ matrix.spec.platform == 'darwin' }}
uses: ./.github/actions/setup-node
with:
extra-flags: workspaces focus @affine/electron @affine/monorepo
hard-link-nm: false
build-plugins: false
nmHoistingLimits: workspaces
enableScripts: false
- name: Setup Node.js
timeout-minutes: 10
if: ${{ matrix.spec.platform != 'darwin' }}
uses: ./.github/actions/setup-node
with:
extra-flags: workspaces focus @affine/electron @affine/monorepo
hard-link-nm: false
build-plugins: false
nmHoistingLimits: workspaces
- name: Build AFFiNE native
uses: ./.github/actions/build-rust
with:
target: ${{ matrix.spec.target }}
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Replace Version
run: ./scripts/set-version.sh ${{ needs.set-build-version.outputs.version }}
- uses: actions/download-artifact@v3
with:
name: core
path: packages/frontend/electron/resources/web-static
- name: Build Plugins
run: yarn run build:plugins
- name: Build Desktop Layers
run: yarn workspace @affine/electron build
- name: Signing By Apple Developer ID
if: ${{ matrix.spec.platform == 'darwin' }}
uses: apple-actions/import-codesign-certs@v2
with:
p12-file-base64: ${{ secrets.CERTIFICATES_P12 }}
p12-password: ${{ secrets.CERTIFICATES_P12_PASSWORD }}
- name: make
run: yarn workspace @affine/electron make --platform=${{ matrix.spec.platform }} --arch=${{ matrix.spec.arch }}
env:
SKIP_PLUGIN_BUILD: 1
SKIP_WEB_BUILD: 1
HOIST_NODE_MODULES: 1
- name: Save artifacts (mac)
if: ${{ matrix.spec.platform == 'darwin' }}
run: |
mkdir -p builds
mv packages/frontend/electron/out/*/make/*.dmg ./builds/affine-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.dmg
mv packages/frontend/electron/out/*/make/zip/darwin/${{ matrix.spec.arch }}/*.zip ./builds/affine-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.zip
- name: Save artifacts (windows)
if: ${{ matrix.spec.platform == 'win32' }}
run: |
mkdir -p builds
mv packages/frontend/electron/out/*/make/zip/win32/x64/AFFiNE*-win32-x64-*.zip ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.zip
mv packages/frontend/electron/out/*/make/squirrel.windows/x64/*.exe ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.exe
mv packages/frontend/electron/out/*/make/squirrel.windows/x64/*.msi ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.msi
mv packages/frontend/electron/out/*/make/squirrel.windows/x64/*.nupkg ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.nupkg
- name: Save artifacts (linux)
if: ${{ matrix.spec.platform == 'linux' }}
run: |
mkdir -p builds
mv packages/frontend/electron/out/*/make/zip/linux/x64/*.zip ./builds/affine-${{ env.BUILD_TYPE }}-linux-x64.zip
mv packages/frontend/electron/out/*/make/AppImage/x64/*.AppImage ./builds/affine-${{ env.BUILD_TYPE }}-linux-x64.AppImage
- name: Upload Artifact
uses: actions/upload-artifact@v3
with:
name: affine-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}-builds
path: builds
release:
needs:
- make-distribution
- set-build-version
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Download Artifacts (macos-x64)
uses: actions/download-artifact@v3
with:
name: affine-darwin-x64-builds
path: ./
- name: Download Artifacts (macos-arm64)
uses: actions/download-artifact@v3
with:
name: affine-darwin-arm64-builds
path: ./
- name: Download Artifacts (windows-x64)
uses: actions/download-artifact@v3
with:
name: affine-win32-x64-builds
path: ./
- name: Download Artifacts (linux-x64)
uses: actions/download-artifact@v3
with:
name: affine-linux-x64-builds
path: ./
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 18
- name: Generate Release yml
run: |
node ./packages/frontend/electron/scripts/generate-yml.js
env:
RELEASE_VERSION: ${{ needs.set-build-version.outputs.version }}
- name: Generate SHA512 checksums
run: |
sha512sum *-linux-* > SHA512SUMS.txt
sha512sum *-macos-* >> SHA512SUMS.txt
sha512sum *-windows-* >> SHA512SUMS.txt
- name: Create Release Draft
uses: softprops/action-gh-release@v1
env:
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
with:
repository: 'toeverything/AFFiNE-Releases'
name: ${{ needs.set-build-version.outputs.version }}
tag_name: ${{ needs.set-build-version.outputs.version }}
prerelease: true
files: |
./SHA512SUMS.txt
./VERSION
./*.zip
./*.dmg
./*.exe
./*.nupkg
./RELEASES
./*.AppImage
./*.apk
./*.yml

View File

@@ -32,6 +32,8 @@ jobs:
uses: ./.github/actions/setup-node
with:
electron-install: false
- name: Build Plugins
run: yarn run build:plugins
- uses: chromaui/action-next@v1
with:
workingDir: tests/storybook
@@ -42,7 +44,7 @@ jobs:
env:
CHROMATIC_PROJECT_TOKEN: ${{ secrets.CHROMATIC_PROJECT_TOKEN }}
NODE_OPTIONS: ${{ env.NODE_OPTIONS }}
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
if: always()
with:
name: chromatic-build-artifacts-${{ github.run_id }}

View File

@@ -1,6 +1,9 @@
name: Release Desktop App
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+-canary.[0-9]+'
workflow_dispatch:
inputs:
build-type:
@@ -43,7 +46,6 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
@@ -61,7 +63,7 @@ jobs:
SKIP_NX_CACHE: 'true'
- name: Upload core artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: core
path: packages/frontend/electron/resources/web-static
@@ -93,15 +95,13 @@ jobs:
SKIP_GENERATE_ASSETS: 1
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
timeout-minutes: 10
uses: ./.github/actions/setup-node
with:
extra-flags: workspaces focus @affine/electron @affine/monorepo
hard-link-nm: false
build-plugins: false
nmHoistingLimits: workspaces
enableScripts: false
- name: Build AFFiNE native
@@ -110,7 +110,7 @@ jobs:
target: ${{ matrix.spec.target }}
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- uses: actions/download-artifact@v4
- uses: actions/download-artifact@v3
with:
name: core
path: packages/frontend/electron/resources/web-static
@@ -146,7 +146,7 @@ jobs:
mv packages/frontend/electron/out/*/make/AppImage/x64/*.AppImage ./builds/affine-${{ env.BUILD_TYPE }}-linux-x64.AppImage
- name: Upload Artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: affine-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}-builds
path: builds
@@ -169,15 +169,13 @@ jobs:
SKIP_GENERATE_ASSETS: 1
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
timeout-minutes: 10
uses: ./.github/actions/setup-node
with:
extra-flags: workspaces focus @affine/electron @affine/monorepo
hard-link-nm: false
build-plugins: false
nmHoistingLimits: workspaces
- name: Build AFFiNE native
uses: ./.github/actions/build-rust
@@ -185,11 +183,14 @@ jobs:
target: ${{ matrix.spec.target }}
package: '@affine/native'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- uses: actions/download-artifact@v4
- uses: actions/download-artifact@v3
with:
name: core
path: packages/frontend/electron/resources/web-static
- name: Build Plugins
run: yarn run build:plugins
- name: Build Desktop Layers
run: yarn workspace @affine/electron build
@@ -211,7 +212,7 @@ jobs:
run: Compress-Archive -CompressionLevel Fastest -Path packages/frontend/electron/out/* -DestinationPath archive.zip
- name: Save packaged artifacts for signing
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: packaged-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
path: |
@@ -245,7 +246,7 @@ jobs:
timeout-minutes: 10
uses: ./.github/actions/setup-node
- name: Download and overwrite packaged artifacts
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: signed-packaged-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
path: .
@@ -266,7 +267,7 @@ jobs:
echo $FILES_TO_BE_SIGNED
- name: Save installer for signing
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: installer-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
path: archive.zip
@@ -292,7 +293,7 @@ jobs:
runs-on: ${{ matrix.spec.runner }}
steps:
- name: Download and overwrite installer artifacts
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: signed-installer-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}
path: .
@@ -307,7 +308,7 @@ jobs:
mv packages/frontend/electron/out/*/make/squirrel.windows/x64/*.msi ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.msi
- name: Upload Artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: affine-${{ matrix.spec.platform }}-${{ matrix.spec.arch }}-builds
path: builds
@@ -318,29 +319,29 @@ jobs:
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
- uses: actions/download-artifact@v3
with:
name: core
path: web-static
- name: Zip web-static
run: zip -r web-static.zip web-static
- name: Download Artifacts (macos-x64)
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: affine-darwin-x64-builds
path: ./
- name: Download Artifacts (macos-arm64)
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: affine-darwin-arm64-builds
path: ./
- name: Download Artifacts (windows-x64)
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: affine-win32-x64-builds
path: ./
- name: Download Artifacts (linux-x64)
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: affine-linux-x64-builds
path: ./
@@ -353,35 +354,12 @@ jobs:
env:
RELEASE_VERSION: ${{ needs.before-make.outputs.RELEASE_VERSION }}
- name: Create Release Draft
if: ${{ github.ref_type == 'tag' }}
uses: softprops/action-gh-release@v1
with:
name: ${{ needs.before-make.outputs.RELEASE_VERSION }}
body: ''
draft: ${{ github.event.inputs.is-draft }}
prerelease: ${{ github.event.inputs.is-pre-release }}
files: |
./VERSION
./*.zip
./*.dmg
./*.exe
./*.AppImage
./*.apk
./*.yml
- name: Create Nightly Release Draft
if: ${{ github.ref_type == 'branch' }}
uses: softprops/action-gh-release@v1
env:
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
with:
# Temporarily, treat release from branch as nightly release, artifact saved to AFFiNE-Releases.
# Need to improve internal build and nightly release logic.
repository: 'toeverything/AFFiNE-Releases'
name: ${{ needs.before-make.outputs.RELEASE_VERSION }}
tag_name: ${{ needs.before-make.outputs.RELEASE_VERSION }}
body: ''
draft: false
prerelease: true
draft: ${{ github.event.inputs.is-draft || true }}
prerelease: ${{ github.event.inputs.is-pre-release || true }}
files: |
./VERSION
./*.zip

View File

@@ -1,27 +0,0 @@
name: Release Desktop Automatically
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+-canary.[0-9]+'
schedule:
- cron: '0 9 * * *'
jobs:
dispatch-release-desktop:
runs-on: ubuntu-latest
name: Setup Release Desktop
steps:
- name: dispatch desktop release by tag
if: ${{ github.event_name == 'push' }}
uses: benc-uk/workflow-dispatch@v1
with:
workflow: release-desktop.yml
inputs: '{ "build-type": "canary", "is-draft": false, "is-pre-release": true }'
- name: dispatch desktop release by schedule
if: ${{ github.event_name == 'schedule' }}
uses: benc-uk/workflow-dispatch@v1
with:
workflow: release-desktop.yml
inputs: '{ "build-type": "canary", "is-draft": false, "is-pre-release": true }'
ref: canary

View File

@@ -14,7 +14,7 @@ jobs:
env:
ARCHIVE_DIR: ${{ github.run_id }}-${{ github.run_attempt }}-${{ inputs.artifact-name }}
steps:
- uses: actions/download-artifact@v4
- uses: actions/download-artifact@v3
with:
name: ${{ inputs.artifact-name }}
path: ${{ env.ARCHIVE_DIR }}
@@ -36,7 +36,7 @@ jobs:
cd ${{ env.ARCHIVE_DIR }}
7za a signed.zip .\out\*
- name: upload
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: signed-${{ inputs.artifact-name }}
path: ${{ env.ARCHIVE_DIR }}/signed.zip

View File

@@ -15,7 +15,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Publish
uses: cloudflare/wrangler-action@v3.4.0
uses: cloudflare/wrangler-action@v3.3.2
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}

View File

@@ -16,8 +16,6 @@ packages/frontend/i18n/src/i18n-generated.ts
packages/frontend/graphql/src/graphql/index.ts
tests/affine-legacy/**/static
.yarnrc.yml
packages/frontend/templates/templates.gen.ts
packages/frontend/templates/onboarding
# auto-generated by NAPI-RS
# fixme(@joooye34): need script to check and generate ignore list here

10
Cargo.lock generated
View File

@@ -2024,14 +2024,16 @@ dependencies = [
[[package]]
name = "rsa"
version = "0.9.6"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc"
checksum = "6ab43bb47d23c1a631b4b680199a45255dce26fa9ab2fa902581f624ff13e6a8"
dependencies = [
"byteorder",
"const-oid",
"digest",
"num-bigint-dig",
"num-integer",
"num-iter",
"num-traits",
"pkcs1",
"pkcs8",
@@ -2460,9 +2462,9 @@ dependencies = [
[[package]]
name = "spki"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d"
checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a"
dependencies = [
"base64ct",
"der",

View File

@@ -113,6 +113,21 @@ If you have questions, you are welcome to contact us. One of the best places to
| [@toeverything/y-indexeddb](packages/common/y-indexeddb) | IndexedDB database adapter for Yjs | [![](https://img.shields.io/npm/dm/@toeverything/y-indexeddb?style=flat-square&color=eee)](https://www.npmjs.com/package/@toeverything/y-indexeddb) |
| [@toeverything/theme](packages/common/theme) | AFFiNE theme | [![](https://img.shields.io/npm/dm/@toeverything/theme?style=flat-square&color=eee)](https://www.npmjs.com/package/@toeverything/theme) |
## Plugins
> Plugins are a way to extend the functionality of AFFiNE. You can use plugins to add new blocks, new features, and even new ways to edit content.
>
> (Currently, the plugin system is under heavy development. You will see the plugin system in the canary release.)
- [@affine/sdk](./packages/common/sdk) - SDK for developing plugins
- [@affine/plugin-cli](./tools/plugin-cli) - CLI for developing plugins
| Official Plugin | Description | Status |
| ---------------------------------------------------------------- | ----------------------------------------- | ------ |
| [@affine/copilot-plugin](./packages/plugins/copilot) | AI Copilot that help you document writing | 🚧 |
| [@affine/image-preview-plugin](./packages/plugins/image-preview) | Component for previewing an image | ✅ |
| [@affine/outline](./packages/plugins/outline) | Outline for your document | ✅ |
## Upstreams
We would also like to give thanks to open-source projects that make AFFiNE possible:

View File

@@ -57,29 +57,6 @@ corepack prepare yarn@stable --activate
yarn install
```
### Clone repository
#### Linux & MacOS
```sh
git clone https://github.com/toeverything/AFFiNE
```
#### Windows
In our codebase, we use symbolic links. Due to the security design of Windows, the creation of symbolic links requires administrator privileges. This is part of the security policy settings of Windows, and more information can be found at [Security Policy Settings for Creating Symbolic Links](https://learn.microsoft.com/en-us/windows/security/threat-protection/security-policy-settings/create-symbolic-links).
For detailed guidance on enabling this feature, please refer to the official documentation: [Enable Developer Mode on Windows](https://learn.microsoft.com/en-us/windows/apps/get-started/enable-your-device-for-development).
Once Developer Mode is enabled, execute the following command with administrator privileges:
```sh
# Enable symbolic links
git config --global core.symlinks true
# Clone the repository, also need to be run with administrator privileges
git clone https://github.com/toeverything/AFFiNE
```
### Build Native Dependencies
Run the following script. It will build the native module at [`/packages/frontend/native`](/packages/frontend/native) and build Node.js binding using [NAPI.rs](https://napi.rs/).
@@ -90,6 +67,18 @@ Note: use `strip` from system instead of `binutils` if you are running MacOS. [s
yarn workspace @affine/native build
```
### Build Infra
```sh
yarn run build:infra
```
### Build Plugins
```sh
yarn run build:plugins
```
### Build Server Dependencies
```sh
@@ -113,7 +102,7 @@ yarn test
### E2E Test
```shell
# there are `affine-local`, `affine-migration`, `affine-local`, `affine-prototype` e2e tests,
# there are `affine-local`, `affine-migration`, `affine-local`, `affine-plugin`, `affine-prototype` e2e tests,
# which are run under different situations.
cd tests/affine-local
yarn e2e

View File

@@ -17,6 +17,7 @@ The codebase is organized as follows:
- `packages/` contains all code running in production.
- `backend/` contains backend code, more information from <https://github.com/toeverything/OctoBase>.
- `frontend/` contains frontend code, including the web app, the electron app and business libraries.
- `plugins/` contains all build-in plugins.
- `common` contains the isomorphic code or basic libraries without business.
- `tools/` contains tools to help developing or CI, not used in production.
- `tests/` contains testings across different libraries, including e2e testings and integration testings.

View File

@@ -1,23 +0,0 @@
{
"name": "@affine/docs",
"type": "module",
"private": true,
"scripts": {
"build": "typedoc --options ../../typedoc.json",
"dev": "nodemon --exec 'typedoc --options ../../typedoc.json' & serve dist/"
},
"devDependencies": {
"nodemon": "^3.0.1",
"serve": "^14.2.1",
"typedoc": "^0.25.4"
},
"nodemonConfig": {
"watch": [
"./readme.md",
"../../packages/*/*/src/*.ts",
"../../**/typedoc{.base,}.json"
],
"ext": "ts,md,json"
},
"version": "0.10.3-canary.2"
}

View File

@@ -1,7 +0,0 @@
Welcome to AFFiNE development reference.
This document is intended for developers who want to contribute to AFFiNE. It contains information about the architecture of AFFiNE, how to build it, and how to contribute to it.
### The Infrastructure of AFFiNE
see {@link @toeverything/infra!}

View File

@@ -8,7 +8,6 @@
".",
"packages/*/*",
"tools/*",
"docs/reference",
"!tools/@types",
"tools/@types/*",
"tests/*",
@@ -24,6 +23,8 @@
"build": "yarn nx build @affine/core",
"build:electron": "yarn nx build @affine/electron",
"build:storage": "yarn nx run-many -t build -p @affine/storage",
"build:infra": "yarn nx run-many -t build --projects=tag:infra",
"build:plugins": "yarn nx run-many -t build --projects=tag:plugin",
"build:storybook": "yarn nx build @affine/storybook",
"start:web-static": "yarn workspace @affine/core static-server",
"start:storybook": "yarn exec serve tests/storybook/storybook-static -l 6006",
@@ -32,7 +33,7 @@
"lint:eslint:fix": "yarn lint:eslint --fix",
"lint:prettier": "prettier --ignore-unknown --cache --check .",
"lint:prettier:fix": "prettier --ignore-unknown --cache --write .",
"lint:ox": "oxlint --import-plugin --deny-warnings -D correctness -D nursery -D prefer-array-some -D no-useless-promise-resolve-reject -D perf -A no-undef -A consistent-type-exports -A default -A named -A ban-ts-comment -A export",
"lint:ox": "oxlint --deny-warnings -D correctness -D nursery -D prefer-array-some -D no-useless-promise-resolve-reject -A no-undef -A consistent-type-exports -A default -A named -A ban-ts-comment",
"lint": "yarn lint:eslint && yarn lint:prettier",
"lint:fix": "yarn lint:eslint:fix && yarn lint:prettier:fix",
"test": "vitest --run",
@@ -57,12 +58,13 @@
"devDependencies": {
"@affine-test/kit": "workspace:*",
"@affine/cli": "workspace:*",
"@affine/plugin-cli": "workspace:*",
"@commitlint/cli": "^18.4.3",
"@commitlint/config-conventional": "^18.4.3",
"@faker-js/faker": "^8.3.1",
"@istanbuljs/schema": "^0.1.3",
"@magic-works/i18n-codegen": "^0.5.0",
"@nx/vite": "17.2.8",
"@nx/vite": "17.1.3",
"@perfsee/sdk": "^1.9.0",
"@playwright/test": "^1.40.0",
"@taplo/cli": "^0.5.2",
@@ -76,8 +78,8 @@
"@vanilla-extract/vite-plugin": "^3.9.2",
"@vanilla-extract/webpack-plugin": "^2.3.1",
"@vitejs/plugin-react-swc": "^3.5.0",
"@vitest/coverage-istanbul": "1.1.1",
"@vitest/ui": "1.1.1",
"@vitest/coverage-istanbul": "1.0.4",
"@vitest/ui": "1.0.4",
"electron": "^27.1.0",
"eslint": "^8.54.0",
"eslint-config-prettier": "^9.0.0",
@@ -86,10 +88,10 @@
"eslint-plugin-react-hooks": "^4.6.0",
"eslint-plugin-simple-import-sort": "^10.0.0",
"eslint-plugin-sonarjs": "^0.23.0",
"eslint-plugin-unicorn": "^50.0.0",
"eslint-plugin-unicorn": "^49.0.0",
"eslint-plugin-unused-imports": "^3.0.0",
"eslint-plugin-vue": "^9.18.1",
"fake-indexeddb": "5.0.2",
"fake-indexeddb": "5.0.1",
"happy-dom": "^12.10.3",
"husky": "^8.0.3",
"lint-staged": "^15.1.0",
@@ -98,7 +100,7 @@
"nx": "^17.1.3",
"nx-cloud": "^16.5.2",
"nyc": "^15.1.0",
"oxlint": "0.0.22",
"oxlint": "0.0.21",
"prettier": "^3.1.0",
"semver": "^7.5.4",
"serve": "^14.2.1",
@@ -109,7 +111,7 @@
"vite-plugin-istanbul": "^5.0.0",
"vite-plugin-static-copy": "^1.0.0",
"vite-tsconfig-paths": "^4.2.1",
"vitest": "1.1.1",
"vitest": "1.0.4",
"vitest-fetch-mock": "^0.2.2",
"vitest-mock-extended": "^1.3.1"
},

View File

@@ -38,22 +38,22 @@
"@node-rs/crc32": "^1.7.2",
"@node-rs/jsonwebtoken": "^0.2.3",
"@opentelemetry/api": "^1.7.0",
"@opentelemetry/core": "^1.19.0",
"@opentelemetry/exporter-prometheus": "^0.46.0",
"@opentelemetry/exporter-zipkin": "^1.19.0",
"@opentelemetry/core": "^1.18.1",
"@opentelemetry/exporter-prometheus": "^0.45.1",
"@opentelemetry/exporter-zipkin": "^1.18.1",
"@opentelemetry/host-metrics": "^0.34.0",
"@opentelemetry/instrumentation": "^0.46.0",
"@opentelemetry/instrumentation": "^0.45.1",
"@opentelemetry/instrumentation-graphql": "^0.36.0",
"@opentelemetry/instrumentation-http": "^0.46.0",
"@opentelemetry/instrumentation-http": "^0.45.1",
"@opentelemetry/instrumentation-ioredis": "^0.36.0",
"@opentelemetry/instrumentation-nestjs-core": "^0.33.3",
"@opentelemetry/instrumentation-socket.io": "^0.34.4",
"@opentelemetry/resources": "^1.19.0",
"@opentelemetry/sdk-metrics": "^1.19.0",
"@opentelemetry/sdk-node": "^0.46.0",
"@opentelemetry/sdk-trace-node": "^1.19.0",
"@prisma/client": "^5.7.1",
"@prisma/instrumentation": "^5.7.1",
"@opentelemetry/instrumentation-socket.io": "^0.34.3",
"@opentelemetry/resources": "^1.18.1",
"@opentelemetry/sdk-metrics": "^1.18.1",
"@opentelemetry/sdk-node": "^0.45.1",
"@opentelemetry/sdk-trace-node": "^1.18.1",
"@prisma/client": "^5.6.0",
"@prisma/instrumentation": "^5.6.0",
"@socket.io/redis-adapter": "^8.2.1",
"cookie-parser": "^1.4.6",
"dotenv": "^16.3.1",
@@ -74,9 +74,9 @@
"on-headers": "^1.0.2",
"parse-duration": "^1.1.0",
"pretty-time": "^1.1.0",
"prisma": "^5.7.1",
"prisma": "^5.6.0",
"prom-client": "^15.0.0",
"reflect-metadata": "^0.2.0",
"reflect-metadata": "^0.1.13",
"rxjs": "^7.8.1",
"semver": "^7.5.4",
"socket.io": "^4.7.2",
@@ -101,7 +101,7 @@
"@types/on-headers": "^1.0.3",
"@types/pretty-time": "^1.1.5",
"@types/sinon": "^17.0.2",
"@types/supertest": "^6.0.0",
"@types/supertest": "^2.0.16",
"@types/ws": "^8.5.10",
"ava": "^6.0.0",
"c8": "^8.0.1",

View File

@@ -1,7 +1,7 @@
generator client {
provider = "prisma-client-js"
binaryTargets = ["native", "debian-openssl-3.0.x", "linux-arm64-openssl-3.0.x"]
previewFeatures = ["metrics", "tracing", "relationJoins", "nativeDistinct"]
previewFeatures = ["metrics", "tracing"]
}
datasource db {

View File

@@ -1,21 +0,0 @@
/* eslint-disable @typescript-eslint/no-non-null-assertion */
// Custom configurations
const env = process.env;
const node = AFFiNE.node;
// TODO: may be separate config overring in `affine.[env].config`?
if (node.prod && env.R2_OBJECT_STORAGE_ACCOUNT_ID) {
AFFiNE.storage.providers.r2 = {
accountId: env.R2_OBJECT_STORAGE_ACCOUNT_ID,
credentials: {
accessKeyId: env.R2_OBJECT_STORAGE_ACCESS_KEY_ID!,
secretAccessKey: env.R2_OBJECT_STORAGE_SECRET_ACCESS_KEY!,
},
};
AFFiNE.storage.storages.avatar.provider = 'r2';
AFFiNE.storage.storages.avatar.bucket = 'account-avatar';
AFFiNE.storage.storages.blob.provider = 'r2';
AFFiNE.storage.storages.blob.bucket = 'workspace-blobs';
}

View File

@@ -1,3 +0,0 @@
import { getDefaultAFFiNEConfig } from './config/default';
globalThis.AFFiNE = getDefaultAFFiNEConfig();

View File

@@ -1,7 +1,6 @@
import type { ApolloDriverConfig } from '@nestjs/apollo';
import type { LeafPaths } from '../utils/types';
import type { AFFiNEStorageConfig } from './storage';
declare global {
// eslint-disable-next-line @typescript-eslint/no-namespace
@@ -166,18 +165,11 @@ export interface AFFiNEConfig {
featureFlags: {
earlyAccessPreview: boolean;
};
/**
* Configuration for Object Storage, which defines how blobs and avatar assets are stored.
*/
storage: AFFiNEStorageConfig;
/**
* object storage Config
*
* all artifacts and logs will be stored on instance disk,
* and can not shared between instances if not configured
* @deprecated use `storage` instead
*/
objectStorage: {
/**

View File

@@ -9,7 +9,6 @@ import parse from 'parse-duration';
import pkg from '../../package.json' assert { type: 'json' };
import type { AFFiNEConfig, ServerFlavor } from './def';
import { applyEnvToConfig } from './env';
import { getDefaultAFFiNEStorageConfig } from './storage';
export const SERVER_FLAVOR = (process.env.SERVER_FLAVOR ??
'allinone') as ServerFlavor;
@@ -60,6 +59,11 @@ export const getDefaultAFFiNEConfig: () => AFFiNEConfig = () => {
AFFINE_SERVER_SUB_PATH: 'path',
AFFINE_ENV: 'affineEnv',
DATABASE_URL: 'db.url',
ENABLE_R2_OBJECT_STORAGE: ['objectStorage.r2.enabled', 'boolean'],
R2_OBJECT_STORAGE_ACCOUNT_ID: 'objectStorage.r2.accountId',
R2_OBJECT_STORAGE_ACCESS_KEY_ID: 'objectStorage.r2.accessKeyId',
R2_OBJECT_STORAGE_SECRET_ACCESS_KEY: 'objectStorage.r2.secretAccessKey',
R2_OBJECT_STORAGE_BUCKET: 'objectStorage.r2.bucket',
ENABLE_CAPTCHA: ['auth.captcha.enable', 'boolean'],
CAPTCHA_TURNSTILE_SECRET: ['auth.captcha.turnstile.secret', 'string'],
OAUTH_GOOGLE_ENABLED: ['auth.oauthProviders.google.enabled', 'boolean'],
@@ -176,7 +180,6 @@ export const getDefaultAFFiNEConfig: () => AFFiNEConfig = () => {
password: '',
},
},
storage: getDefaultAFFiNEStorageConfig(),
objectStorage: {
r2: {
enabled: false,

View File

@@ -74,4 +74,3 @@ export class ConfigModule {
export type { AFFiNEConfig } from './def';
export { SERVER_FLAVOR } from './default';
export * from './storage';

View File

@@ -1,58 +0,0 @@
import { homedir } from 'node:os';
import { join } from 'node:path';
import { S3ClientConfigType } from '@aws-sdk/client-s3';
export type StorageProviderType = 'fs' | 'r2' | 's3';
export interface FsStorageConfig {
path: string;
}
export type R2StorageConfig = S3ClientConfigType & {
accountId: string;
};
export type S3StorageConfig = S3ClientConfigType;
export type StorageTargetConfig = {
provider: StorageProviderType;
bucket: string;
};
export interface AFFiNEStorageConfig {
/**
* All providers for object storage
*
* Support different providers for different usage at the same time.
*/
providers: {
fs?: FsStorageConfig;
s3?: S3StorageConfig;
r2?: R2StorageConfig;
};
storages: {
avatar: StorageTargetConfig;
blob: StorageTargetConfig;
};
}
export type StorageProviders = AFFiNEStorageConfig['providers'];
export type Storages = keyof AFFiNEStorageConfig['storages'];
export function getDefaultAFFiNEStorageConfig(): AFFiNEStorageConfig {
return {
providers: {
fs: {
path: join(homedir(), '.affine/storage'),
},
},
storages: {
avatar: {
provider: 'fs',
bucket: 'avatars',
},
blob: {
provider: 'fs',
bucket: 'blobs',
},
},
};
}

View File

@@ -1,31 +0,0 @@
import type { UserType } from '../../modules/users';
import { PrismaService } from '../../prisma';
export class UnamedAccount1703756315970 {
// do the migration
static async up(db: PrismaService) {
await db.$transaction(async tx => {
// only find users with empty names
const users = await db.$queryRaw<
UserType[]
>`SELECT * FROM users WHERE name ~ E'^[\\s\\u2000-\\u200F]*$';`;
console.log(
`renaming ${users.map(({ email }) => email).join('|')} users`
);
await Promise.all(
users.map(({ id, email }) =>
tx.user.update({
where: { id },
data: {
name: email.split('@')[0],
},
})
)
);
});
}
// revert the migration
static async down(_db: PrismaService) {}
}

View File

@@ -29,9 +29,7 @@ import {
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-node';
import prismaInstrument from '@prisma/instrumentation';
const { PrismaInstrumentation } = prismaInstrument;
import { PrismaInstrumentation } from '@prisma/instrumentation';
import { PrismaMetricProducer } from './prisma';

View File

@@ -10,9 +10,6 @@ export class ServerConfigType {
@Field({ description: 'server flavor' })
flavor!: string;
@Field({ description: 'server base url' })
baseUrl!: string;
}
export class ServerConfigResolver {
@@ -23,7 +20,6 @@ export class ServerConfigResolver {
return {
version: AFFiNE.version,
flavor: SERVER_FLAVOR,
baseUrl: AFFiNE.baseUrl,
};
}
}

View File

@@ -1,113 +0,0 @@
import { promises as fs } from 'node:fs';
import { join } from 'node:path';
import test from 'ava';
import { getStreamAsBuffer } from 'get-stream';
import { ListObjectsMetadata } from '../providers';
import { FsStorageProvider } from '../providers/fs';
const config = {
path: join(process.cwd(), 'node_modules', '.cache/affine-test-storage'),
};
function createProvider() {
return new FsStorageProvider(
config,
'test' + Math.random().toString(16).substring(2, 8)
);
}
function keys(list: ListObjectsMetadata[]) {
return list.map(i => i.key);
}
async function randomPut(
provider: FsStorageProvider,
prefix = ''
): Promise<string> {
const key = prefix + 'test-key-' + Math.random().toString(16).substring(2, 8);
const body = Buffer.from(key);
provider.put(key, body);
return key;
}
test.after.always(() => {
fs.rm(config.path, { recursive: true });
});
test('put & get', async t => {
const provider = createProvider();
const key = 'testKey';
const body = Buffer.from('testBody');
await provider.put(key, body);
const result = await provider.get(key);
t.deepEqual(await getStreamAsBuffer(result.body!), body);
t.is(result.metadata?.contentLength, body.length);
});
test('list - one level', async t => {
const provider = createProvider();
const list = await Promise.all(
Array.from({ length: 100 }).map(() => randomPut(provider))
);
list.sort();
// random order, use set
const result = await provider.list();
t.deepEqual(keys(result), list);
const result2 = await provider.list('test-key');
t.deepEqual(keys(result2), list);
const result3 = await provider.list('testKey');
t.is(result3.length, 0);
});
test('list recursively', async t => {
const provider = createProvider();
await Promise.all([
Promise.all(Array.from({ length: 10 }).map(() => randomPut(provider))),
Promise.all(
Array.from({ length: 10 }).map(() => randomPut(provider, 'a/'))
),
Promise.all(
Array.from({ length: 10 }).map(() => randomPut(provider, 'a/b/'))
),
Promise.all(
Array.from({ length: 10 }).map(() => randomPut(provider, 'a/b/t/'))
),
]);
const r1 = await provider.list();
t.is(r1.length, 40);
// contains all `a/xxx` and `a/b/xxx` and `a/b/c/xxx`
const r2 = await provider.list('a');
t.is(r2.length, 30);
// contains only `a/b/xxx`
const r3 = await provider.list('a/b');
const r4 = await provider.list('a/b/');
t.is(r3.length, 20);
t.deepEqual(r3, r4);
// prefix is not ended with '/', it's open to all files and sub dirs
// contains all `a/b/t/xxx` and `a/b/t{xxxx}`
const r5 = await provider.list('a/b/t');
t.is(r5.length, 20);
});
test.only('delete', async t => {
const provider = createProvider();
const key = 'testKey';
const body = Buffer.from('testBody');
await provider.put(key, body);
await provider.delete(key);
await t.throwsAsync(() => fs.access(join(config.path, provider.bucket, key)));
});

View File

@@ -1,256 +0,0 @@
import {
accessSync,
constants,
createReadStream,
Dirent,
mkdirSync,
readdirSync,
readFileSync,
rmSync,
statSync,
writeFileSync,
} from 'node:fs';
import { join, parse, resolve } from 'node:path';
import { Logger } from '@nestjs/common';
import { Readable } from 'stream';
import { FsStorageConfig } from '../../../config/storage';
import {
BlobInputType,
GetObjectMetadata,
ListObjectsMetadata,
PutObjectMetadata,
StorageProvider,
} from './provider';
import { autoMetadata, toBuffer } from './utils';
function escapeKey(key: string): string {
// avoid '../' and './' in key
return key.replace(/\.?\.[/\\]/g, '%');
}
export class FsStorageProvider implements StorageProvider {
private readonly path: string;
private readonly logger: Logger;
constructor(
config: FsStorageConfig,
public readonly bucket: string
) {
this.path = resolve(config.path, bucket);
this.ensureAvailability();
this.logger = new Logger(`${FsStorageProvider.name}:${bucket}`);
}
async put(
key: string,
body: BlobInputType,
metadata: PutObjectMetadata = {}
): Promise<void> {
key = escapeKey(key);
const blob = await toBuffer(body);
// write object
this.writeObject(key, blob);
// write metadata
await this.writeMetadata(key, blob, metadata);
this.logger.verbose(`Object \`${key}\` put`);
}
async get(key: string): Promise<{
body?: Readable;
metadata?: GetObjectMetadata;
}> {
key = escapeKey(key);
try {
const metadata = this.readMetadata(key);
const stream = this.readObject(this.join(key));
this.logger.verbose(`Read object \`${key}\``);
return {
body: stream,
metadata,
};
} catch (e) {
this.logger.error(`Failed to read object \`${key}\``, e);
return {};
}
}
async list(prefix?: string): Promise<ListObjectsMetadata[]> {
// prefix cases:
// - `undefined`: list all objects
// - `a/b`: list objects under dir `a` with prefix `b`, `b` might be a dir under `a` as well.
// - `a/b/` list objects under dir `a/b`
// read dir recursively and filter out '.metadata.json' files
let dir = this.path;
if (prefix) {
prefix = escapeKey(prefix);
const parts = prefix.split(/[/\\]/);
// for prefix `a/b/c`, move `a/b` to dir and `c` to key prefix
if (parts.length > 1) {
dir = join(dir, ...parts.slice(0, -1));
prefix = parts[parts.length - 1];
}
}
const results: ListObjectsMetadata[] = [];
async function getFiles(dir: string, prefix?: string): Promise<void> {
try {
const entries: Dirent[] = readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
const res = join(dir, entry.name);
if (entry.isDirectory()) {
if (!prefix || entry.name.startsWith(prefix)) {
await getFiles(res);
}
} else if (
(!prefix || entry.name.startsWith(prefix)) &&
!entry.name.endsWith('.metadata.json')
) {
const stat = statSync(res);
results.push({
key: res,
lastModified: stat.mtime,
size: stat.size,
});
}
}
} catch (e) {
// failed to read dir, stop recursion
}
}
await getFiles(dir, prefix);
// trim path with `this.path` prefix
results.forEach(r => (r.key = r.key.slice(this.path.length + 1)));
return results;
}
delete(key: string): Promise<void> {
key = escapeKey(key);
try {
rmSync(this.join(key), { force: true });
rmSync(this.join(`${key}.metadata.json`), { force: true });
} catch (e) {
throw new Error(`Failed to delete object \`${key}\``, {
cause: e,
});
}
this.logger.verbose(`Object \`${key}\` deleted`);
return Promise.resolve();
}
ensureAvailability() {
// check stats
const stats = statSync(this.path, {
throwIfNoEntry: false,
});
// not existing, create it
if (!stats) {
try {
mkdirSync(this.path, { recursive: true });
} catch (e) {
throw new Error(
`Failed to create target directory for fs storage provider: ${this.path}`,
{
cause: e,
}
);
}
} else if (stats.isDirectory()) {
// the target directory has already existed, check if it is readable & writable
try {
accessSync(this.path, constants.W_OK | constants.R_OK);
} catch (e) {
throw new Error(
`The target directory for fs storage provider has already existed, but it is not readable & writable: ${this.path}`,
{
cause: e,
}
);
}
} else if (stats.isFile()) {
throw new Error(
`The target directory for fs storage provider is a file: ${this.path}`
);
}
}
private join(...paths: string[]) {
return join(this.path, ...paths);
}
private readObject(file: string): Readable | undefined {
const state = statSync(file, { throwIfNoEntry: false });
if (state?.isFile()) {
return createReadStream(file);
}
return undefined;
}
private writeObject(key: string, blob: Buffer) {
const path = this.join(key);
mkdirSync(parse(path).dir, { recursive: true });
writeFileSync(path, blob);
}
private async writeMetadata(
key: string,
blob: Buffer,
raw: PutObjectMetadata
) {
try {
const metadata = await autoMetadata(blob, raw);
if (raw.checksumCRC32 && metadata.checksumCRC32 !== raw.checksumCRC32) {
throw new Error(
'The checksum of the uploaded file is not matched with the one you provide, the file may be corrupted and the uploading will not be processed.'
);
}
writeFileSync(
this.join(`${key}.metadata.json`),
JSON.stringify({
...metadata,
lastModified: Date.now(),
})
);
} catch (e) {
this.logger.warn(`Failed to write metadata of object \`${key}\``, e);
}
}
private readMetadata(key: string): GetObjectMetadata | undefined {
try {
const raw = JSON.parse(
readFileSync(this.join(`${key}.metadata.json`), {
encoding: 'utf-8',
})
);
return {
...raw,
lastModified: new Date(raw.lastModified),
expires: raw.expires ? new Date(raw.expires) : undefined,
};
} catch (e) {
this.logger.warn(`Failed to read metadata of object \`${key}\``, e);
return;
}
}
}

View File

@@ -1,34 +0,0 @@
import { AFFiNEStorageConfig, Storages } from '../../../config/storage';
import { FsStorageProvider } from './fs';
import type { StorageProvider } from './provider';
import { R2StorageProvider } from './r2';
import { S3StorageProvider } from './s3';
export function createStorageProvider(
config: AFFiNEStorageConfig,
storage: Storages
): StorageProvider {
const storageConfig = config.storages[storage];
const providerConfig = config.providers[storageConfig.provider] as any;
if (!providerConfig) {
throw new Error(
`Failed to create ${storageConfig.provider} storage, configuration not correctly set`
);
}
if (storageConfig.provider === 's3') {
return new S3StorageProvider(providerConfig, storageConfig.bucket);
}
if (storageConfig.provider === 'r2') {
return new R2StorageProvider(providerConfig, storageConfig.bucket);
}
if (storageConfig.provider === 'fs') {
return new FsStorageProvider(providerConfig, storageConfig.bucket);
}
throw new Error(`Unknown storage provider type: ${storageConfig.provider}`);
}
export type * from './provider';

View File

@@ -1,39 +0,0 @@
import type { Readable } from 'node:stream';
export interface GetObjectMetadata {
/**
* @default 'application/octet-stream'
*/
contentType: string;
contentLength: number;
lastModified: Date;
checksumCRC32: string;
}
export interface PutObjectMetadata {
contentType?: string;
contentLength?: number;
checksumCRC32?: string;
}
export interface ListObjectsMetadata {
key: string;
lastModified: Date;
size: number;
}
export type BlobInputType = Buffer | Readable | string;
export type BlobOutputType = Readable;
export interface StorageProvider {
put(
key: string,
body: BlobInputType,
metadata?: PutObjectMetadata
): Promise<void>;
get(
key: string
): Promise<{ body?: BlobOutputType; metadata?: GetObjectMetadata }>;
list(prefix?: string): Promise<ListObjectsMetadata[]>;
delete(key: string): Promise<void>;
}

View File

@@ -1,14 +0,0 @@
import { R2StorageConfig } from '../../../config/storage';
import { S3StorageProvider } from './s3';
export class R2StorageProvider extends S3StorageProvider {
constructor(config: R2StorageConfig, bucket: string) {
super(
{
...config,
endpoint: `https://${config.accountId}.r2.cloudflarestorage.com`,
},
bucket
);
}
}

View File

@@ -1,159 +0,0 @@
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import { Readable } from 'node:stream';
import {
DeleteObjectCommand,
GetObjectCommand,
ListObjectsV2Command,
PutObjectCommand,
S3Client,
} from '@aws-sdk/client-s3';
import { Logger } from '@nestjs/common';
import { S3StorageConfig } from '../../../config/storage';
import {
BlobInputType,
GetObjectMetadata,
ListObjectsMetadata,
PutObjectMetadata,
StorageProvider,
} from './provider';
import { autoMetadata, toBuffer } from './utils';
export class S3StorageProvider implements StorageProvider {
logger: Logger;
client: S3Client;
constructor(
config: S3StorageConfig,
public readonly bucket: string
) {
this.client = new S3Client(config);
this.logger = new Logger(`${S3StorageProvider.name}:${bucket}`);
}
async put(
key: string,
body: BlobInputType,
metadata: PutObjectMetadata = {}
): Promise<void> {
const blob = await toBuffer(body);
metadata = await autoMetadata(blob, metadata);
try {
await this.client.send(
new PutObjectCommand({
Bucket: this.bucket,
Key: key,
Body: body,
// metadata
ContentType: metadata.contentType,
ContentLength: metadata.contentLength,
ChecksumCRC32: metadata.checksumCRC32,
})
);
this.logger.verbose(`Object \`${key}\` put`);
} catch (e) {
throw new Error(`Failed to put object \`${key}\``, {
cause: e,
});
}
}
async get(key: string): Promise<{
body?: Readable;
metadata?: GetObjectMetadata;
}> {
try {
const obj = await this.client.send(
new GetObjectCommand({
Bucket: this.bucket,
Key: key,
})
);
if (!obj.Body) {
this.logger.verbose(`Object \`${key}\` not found`);
return {};
}
this.logger.verbose(`Read object \`${key}\``);
return {
// @ts-expect-errors ignore browser response type `Blob`
body: obj.Body,
metadata: {
// always set when putting object
contentType: obj.ContentType!,
contentLength: obj.ContentLength!,
checksumCRC32: obj.ChecksumCRC32!,
lastModified: obj.LastModified!,
},
};
} catch (e) {
throw new Error(`Failed to read object \`${key}\``, {
cause: e,
});
}
}
async list(prefix?: string): Promise<ListObjectsMetadata[]> {
// continuationToken should be `string | undefined`,
// but TypeScript will fail on type infer in the code below.
// Seems to be a bug in TypeScript
let continuationToken: any = undefined;
let hasMore = true;
let result: ListObjectsMetadata[] = [];
try {
while (hasMore) {
const listResult = await this.client.send(
new ListObjectsV2Command({
Bucket: this.bucket,
Prefix: prefix,
ContinuationToken: continuationToken,
})
);
if (listResult.Contents?.length) {
result = result.concat(
listResult.Contents.map(r => ({
key: r.Key!,
lastModified: r.LastModified!,
size: r.Size!,
}))
);
}
// has more items not listed
hasMore = !!listResult.IsTruncated;
continuationToken = listResult.NextContinuationToken;
}
this.logger.verbose(
`List ${result.length} objects with prefix \`${prefix}\``
);
return result;
} catch (e) {
throw new Error(`Failed to list objects with prefix \`${prefix}\``, {
cause: e,
});
}
}
async delete(key: string): Promise<void> {
try {
await this.client.send(
new DeleteObjectCommand({
Bucket: this.bucket,
Key: key,
})
);
} catch (e) {
throw new Error(`Failed to delete object \`${key}\``, {
cause: e,
});
}
}
}

View File

@@ -1,49 +0,0 @@
import { Readable } from 'node:stream';
import { crc32 } from '@node-rs/crc32';
import { fileTypeFromBuffer } from 'file-type';
import { getStreamAsBuffer } from 'get-stream';
import { BlobInputType, PutObjectMetadata } from './provider';
export async function toBuffer(input: BlobInputType): Promise<Buffer> {
return input instanceof Readable
? await getStreamAsBuffer(input)
: input instanceof Buffer
? input
: Buffer.from(input);
}
export async function autoMetadata(
blob: Buffer,
raw: PutObjectMetadata
): Promise<PutObjectMetadata> {
const metadata = {
...raw,
};
try {
// length
if (!metadata.contentLength) {
metadata.contentLength = blob.length;
}
// checksum
if (!metadata.checksumCRC32) {
metadata.checksumCRC32 = crc32(blob).toString(16);
}
// mime type
if (!metadata.contentType) {
try {
const typeResult = await fileTypeFromBuffer(blob);
metadata.contentType = typeResult?.mime ?? 'application/octet-stream';
} catch {
// ignore
}
}
return metadata;
} catch (e) {
return metadata;
}
}

View File

@@ -1,30 +0,0 @@
import { Injectable } from '@nestjs/common';
import { Config } from '../../../config';
import {
BlobInputType,
createStorageProvider,
PutObjectMetadata,
StorageProvider,
} from '../providers';
@Injectable()
export class AvatarStorage {
public readonly provider: StorageProvider;
constructor({ storage }: Config) {
this.provider = createStorageProvider(storage, 'avatar');
}
put(key: string, blob: BlobInputType, metadata?: PutObjectMetadata) {
return this.provider.put(key, blob, metadata);
}
get(key: string) {
return this.provider.get(key);
}
async delete(key: string) {
return this.provider.delete(key);
}
}

View File

@@ -1,45 +0,0 @@
import { Injectable } from '@nestjs/common';
import { Config } from '../../../config';
import {
BlobInputType,
createStorageProvider,
StorageProvider,
} from '../providers';
@Injectable()
export class WorkspaceBlobStorage {
public readonly provider: StorageProvider;
constructor({ storage }: Config) {
this.provider = createStorageProvider(storage, 'blob');
}
put(workspaceId: string, key: string, blob: BlobInputType) {
return this.provider.put(`${workspaceId}/${key}`, blob);
}
get(workspaceId: string, key: string) {
return this.provider.get(`${workspaceId}/${key}`);
}
async list(workspaceId: string) {
const blobs = await this.provider.list(workspaceId + '/');
blobs.forEach(item => {
// trim workspace prefix
item.key = item.key.slice(workspaceId.length + 1);
});
return blobs;
}
async delete(workspaceId: string, key: string) {
return this.provider.delete(`${workspaceId}/${key}`);
}
async totalSize(workspaceId: string) {
const blobs = await this.list(workspaceId);
// how could we ignore the ones get soft-deleted?
return blobs.reduce((acc, item) => acc + item.size, 0);
}
}

View File

@@ -1,2 +0,0 @@
export { AvatarStorage } from './avatar';
export { WorkspaceBlobStorage } from './blob';

View File

@@ -1,7 +1,9 @@
import 'reflect-metadata';
import 'dotenv/config';
import './affine';
import './affine.config';
import { getDefaultAFFiNEConfig } from './config/default';
globalThis.AFFiNE = getDefaultAFFiNEConfig();
if (process.env.NODE_ENV === 'development') {
console.log('AFFiNE Config:', globalThis.AFFiNE);

View File

@@ -8,9 +8,6 @@ type ServerConfigType {
"""server flavor"""
flavor: String!
"""server base url"""
baseUrl: String!
}
type UserQuotaHumanReadable {

View File

@@ -35,7 +35,7 @@
"version": "napi version"
},
"devDependencies": {
"@napi-rs/cli": "3.0.0-alpha.29",
"@napi-rs/cli": "3.0.0-alpha.15",
"lib0": "^0.2.87",
"nx": "^17.1.3",
"nx-cloud": "^16.5.2",

View File

@@ -7,7 +7,7 @@
},
"devDependencies": {
"@types/debug": "^4.1.9",
"vitest": "1.1.1"
"vitest": "1.0.4"
},
"version": "0.11.0"
}

View File

@@ -3,11 +3,11 @@
"private": true,
"type": "module",
"devDependencies": {
"@blocksuite/global": "0.11.0-nightly-202401020419-752a5b8",
"@blocksuite/store": "0.11.0-nightly-202401020419-752a5b8",
"@blocksuite/global": "0.11.0-nightly-202312200102-8254dc9",
"@blocksuite/store": "0.11.0-nightly-202312200102-8254dc9",
"react": "18.2.0",
"react-dom": "18.2.0",
"vitest": "1.1.1",
"vitest": "1.0.4",
"zod": "^3.22.4"
},
"exports": {

View File

@@ -12,6 +12,8 @@ export const blockSuiteFeatureFlags = z.object({
});
export const runtimeFlagsSchema = z.object({
enablePlugin: z.boolean(),
builtinPlugins: z.array(z.string()),
enableTestProperties: z.boolean(),
enableBroadcastChannelProvider: z.boolean(),
enableDebugPage: z.boolean(),

View File

@@ -1,3 +1,5 @@
import type { PropsWithChildren, ReactNode } from 'react';
export enum WorkspaceSubPath {
ALL = 'all',
Collection = 'collection',
@@ -6,6 +8,18 @@ export enum WorkspaceSubPath {
SHARED = 'shared',
}
export enum ReleaseType {
// if workspace is not released yet, we will not show it in the workspace list
UNRELEASED = 'unreleased',
STABLE = 'stable',
}
export enum LoadPriority {
HIGH = 1,
MEDIUM = 2,
LOW = 3,
}
export enum WorkspaceFlavour {
/**
* New AFFiNE Cloud Workspace using Nest.js Server.
@@ -13,3 +27,41 @@ export enum WorkspaceFlavour {
AFFINE_CLOUD = 'affine-cloud',
LOCAL = 'local',
}
export const settingPanel = {
General: 'general',
Collaboration: 'collaboration',
Publish: 'publish',
Export: 'export',
Sync: 'sync',
} as const;
export const settingPanelValues = Object.values(settingPanel);
export type SettingPanel = (typeof settingPanel)[keyof typeof settingPanel];
export type WorkspaceHeaderProps = {
rightSlot?: ReactNode;
currentEntry:
| {
subPath: WorkspaceSubPath;
}
| {
pageId: string;
};
};
interface FC<P> {
(props: P): ReactNode;
}
export interface WorkspaceUISchema {
Provider: FC<PropsWithChildren>;
LoginCard?: FC<object>;
}
export interface WorkspaceAdapter<Flavour extends WorkspaceFlavour> {
releaseType: ReleaseType;
flavour: Flavour;
// The Adapter will be loaded according to the priority
loadPriority: LoadPriority;
UI: WorkspaceUISchema;
}

View File

@@ -1,21 +1,70 @@
{
"name": "@toeverything/infra",
"type": "module",
"private": true,
"module": "./dist/index.js",
"main": "./dist/index.cjs",
"types": "./dist/src/index.d.ts",
"exports": {
"./blocksuite": "./src/blocksuite/index.ts",
"./command": "./src/command/index.ts",
"./atom": "./src/atom/index.ts",
"./app-config-storage": "./src/app-config-storage.ts",
".": "./src/index.ts"
".": {
"types": "./dist/src/index.d.ts",
"import": "./dist/index.js",
"require": "./dist/index.cjs"
},
"./blocksuite": {
"types": "./dist/src/blocksuite/index.d.ts",
"import": "./dist/blocksuite.js",
"require": "./dist/blocksuite.cjs"
},
"./command": {
"types": "./dist/src/command/index.d.ts",
"import": "./dist/command.js",
"require": "./dist/command.cjs"
},
"./core/*": {
"types": "./dist/src/core/*.d.ts",
"import": "./dist/core/*.js",
"require": "./dist/core/*.cjs"
},
"./preload/*": {
"types": "./dist/src/preload/*.d.ts",
"import": "./dist/preload/*.js",
"require": "./dist/preload/*.cjs"
},
"./atom": {
"type": "./dist/src/atom.d.ts",
"import": "./dist/atom.js",
"require": "./dist/atom.cjs"
},
"./type": {
"type": "./dist/src/type.d.ts",
"import": "./dist/type.js",
"require": "./dist/type.cjs"
},
"./app-config-storage": {
"type": "./dist/src/app-config-storage.d.ts",
"import": "./dist/app-config-storage.js",
"require": "./dist/app-config-storage.cjs"
},
"./__internal__/*": {
"type": "./dist/src/__internal__/*.d.ts",
"import": "./dist/__internal__/*.js",
"require": "./dist/__internal__/*.cjs"
}
},
"files": [
"dist"
],
"scripts": {
"build": "vite build",
"dev": "vite build --watch"
},
"dependencies": {
"@affine/debug": "workspace:*",
"@affine/env": "workspace:*",
"@affine/templates": "workspace:*",
"@blocksuite/blocks": "0.11.0-nightly-202401020419-752a5b8",
"@blocksuite/global": "0.11.0-nightly-202401020419-752a5b8",
"@blocksuite/store": "0.11.0-nightly-202401020419-752a5b8",
"@affine/sdk": "workspace:*",
"@blocksuite/blocks": "0.11.0-nightly-202312200102-8254dc9",
"@blocksuite/global": "0.11.0-nightly-202312200102-8254dc9",
"@blocksuite/store": "0.11.0-nightly-202312200102-8254dc9",
"jotai": "^2.5.1",
"jotai-effect": "^0.2.3",
"tinykeys": "^2.1.0",
@@ -24,15 +73,17 @@
"devDependencies": {
"@affine-test/fixtures": "workspace:*",
"@affine/templates": "workspace:*",
"@blocksuite/lit": "0.11.0-nightly-202401020419-752a5b8",
"@blocksuite/presets": "0.11.0-nightly-202401020419-752a5b8",
"@blocksuite/lit": "0.11.0-nightly-202312200102-8254dc9",
"@blocksuite/presets": "0.11.0-nightly-202312200102-8254dc9",
"@testing-library/react": "^14.0.0",
"async-call-rpc": "^6.3.1",
"electron": "link:../../frontend/electron/node_modules/electron",
"nanoid": "^5.0.3",
"react": "^18.2.0",
"rxjs": "^7.8.1",
"vite": "^5.0.6",
"vite-plugin-dts": "3.7.0",
"vitest": "1.1.1",
"vite-plugin-dts": "3.6.0",
"vitest": "1.0.4",
"yjs": "^13.6.10"
},
"peerDependencies": {

View File

@@ -0,0 +1,3 @@
/* eslint-disable */
// @ts-ignore
export * from '../dist/src/preload/electron';

View File

@@ -0,0 +1,3 @@
/* eslint-disable */
/// <reference types="../dist/preload/electron.d.ts" />
export * from '../dist/preload/electron.js';

View File

@@ -0,0 +1,18 @@
{
"name": "infra",
"$schema": "../../../node_modules/nx/schemas/project-schema.json",
"projectType": "library",
"sourceRoot": "packages/common/src",
"targets": {
"build": {
"executor": "nx:run-script",
"dependsOn": ["^build"],
"inputs": ["{projectRoot}/**/*"],
"options": {
"script": "build"
},
"outputs": ["{projectRoot}/dist"]
}
},
"tags": ["infra"]
}

View File

@@ -0,0 +1,54 @@
import type { CallbackMap } from '@affine/sdk/entry';
import { assertExists } from '@blocksuite/global/utils';
import { atomWithStorage } from 'jotai/utils';
import { atom } from 'jotai/vanilla';
import type { z } from 'zod';
import type { packageJsonOutputSchema } from '../type.js';
export const builtinPluginPaths = new Set(runtimeConfig.builtinPlugins);
const pluginCleanupMap = new Map<string, Set<() => void>>();
export function addCleanup(
pluginName: string,
cleanup: () => void
): () => void {
if (!pluginCleanupMap.has(pluginName)) {
pluginCleanupMap.set(pluginName, new Set());
}
const cleanupSet = pluginCleanupMap.get(pluginName);
assertExists(cleanupSet);
cleanupSet.add(cleanup);
return () => {
cleanupSet.delete(cleanup);
};
}
export function invokeCleanup(pluginName: string) {
pluginCleanupMap.get(pluginName)?.forEach(cleanup => cleanup());
pluginCleanupMap.delete(pluginName);
}
export const pluginPackageJson = atom<
z.infer<typeof packageJsonOutputSchema>[]
>([]);
export const enabledPluginAtom = atomWithStorage('affine-enabled-plugin', [
'@affine/image-preview-plugin',
'@affine/outline-plugin',
]);
export const pluginHeaderItemAtom = atom<
Record<string, CallbackMap['headerItem']>
>({});
export const pluginSettingAtom = atom<Record<string, CallbackMap['setting']>>(
{}
);
export const pluginEditorAtom = atom<Record<string, CallbackMap['editor']>>({});
export const pluginWindowAtom = atom<
Record<string, (root: HTMLElement) => () => void>
>({});

View File

@@ -1,2 +1,8 @@
import { atom } from 'jotai';
export const loadedPluginNameAtom = atom<string[]>([]);
export * from './layout';
export * from './root-store';
export * from './settings';
export * from './workspace';

View File

@@ -0,0 +1,34 @@
import type { ExpectedLayout } from '@affine/sdk/entry';
import { atom } from 'jotai';
const contentLayoutBaseAtom = atom<ExpectedLayout>('editor');
type SetStateAction<Value> = Value | ((prev: Value) => Value);
export const contentLayoutAtom = atom<
ExpectedLayout,
[SetStateAction<ExpectedLayout>],
void
>(
get => get(contentLayoutBaseAtom),
(_, set, layout) => {
set(contentLayoutBaseAtom, prev => {
let setV: (prev: ExpectedLayout) => ExpectedLayout;
if (typeof layout !== 'function') {
setV = () => layout;
} else {
setV = layout;
}
const nextValue = setV(prev);
if (nextValue === 'editor') {
return nextValue;
}
if (nextValue.first !== 'editor') {
throw new Error('The first element of the layout should be editor.');
}
if (nextValue.splitPercentage && nextValue.splitPercentage < 70) {
throw new Error('The split percentage should be greater than 70.');
}
return nextValue;
});
}
);

View File

@@ -72,13 +72,12 @@ const appSettingEffect = atomEffect(get => {
// some values in settings should be synced into electron side
if (environment.isDesktop) {
console.log('set config', settings);
// this api type in @affine/electron-api, but it is circular dependency this package, use any here
(window as any).apis?.updater
window.apis?.updater
.setConfig({
autoCheckUpdate: settings.autoCheckUpdate,
autoDownloadUpdate: settings.autoDownloadUpdate,
})
.catch((err: any) => {
.catch(err => {
console.error(err);
});
}

View File

@@ -0,0 +1,3 @@
import { atom } from 'jotai';
export const currentPageIdAtom = atom<string | null>(null);

View File

@@ -1,17 +1,10 @@
import type {
JobMiddleware,
Page,
PageMeta,
PageSnapshot,
Workspace,
WorkspaceInfoSnapshot,
} from '@blocksuite/store';
import { Job } from '@blocksuite/store';
import { assertExists } from '@blocksuite/global/utils';
import type { Page, PageMeta, Workspace } from '@blocksuite/store';
import type { createStore, WritableAtom } from 'jotai/vanilla';
import { nanoid } from 'nanoid';
import { Map as YMap } from 'yjs';
import { getLatestVersions } from '../migration/blocksuite';
import { replaceIdMiddleware } from './middleware';
export async function initEmptyPage(page: Page, title?: string) {
await page.load(() => {
@@ -29,10 +22,7 @@ export async function initEmptyPage(page: Page, title?: string) {
*/
export async function buildShowcaseWorkspace(
workspace: Workspace,
{
store,
atoms,
}: {
options: {
atoms: {
pageMode: WritableAtom<
undefined,
@@ -43,72 +33,239 @@ export async function buildShowcaseWorkspace(
store: ReturnType<typeof createStore>;
}
) {
const { onboarding } = await import('@affine/templates');
const info = onboarding['info.json'] as WorkspaceInfoSnapshot;
const migrationMiddleware: JobMiddleware = ({ slots, workspace }) => {
slots.afterImport.on(payload => {
if (payload.type === 'page') {
workspace.schema.upgradePage(
info?.pageVersion ?? 0,
info?.blockVersions ?? {},
payload.page.spaceDoc
);
}
});
const prototypes = {
tags: {
options: [
{
id: 'icg1n5UdkP',
value: 'Travel',
color: 'var(--affine-tag-gray)',
},
{
id: 'Oe5dSe1DDJ',
value: 'Quick summary',
color: 'var(--affine-tag-green)',
},
{
id: 'g1L5dXKctL',
value: 'OKR',
color: 'var(--affine-tag-purple)',
},
{
id: 'q3mceOl_zi',
value: 'Streamline your workflow',
color: 'var(--affine-tag-teal)',
},
{
id: 'ze07JVwBu4',
value: 'Plan',
color: 'var(--affine-tag-teal)',
},
{
id: '8qcYPCTK0h',
value: 'Review',
color: 'var(--affine-tag-orange)',
},
{
id: 'wg-fBtd2eI',
value: 'Engage',
color: 'var(--affine-tag-pink)',
},
{
id: 'QYFD_HeQc-',
value: 'Create',
color: 'var(--affine-tag-blue)',
},
{
id: 'ZHBa2NtdSo',
value: 'Learn',
color: 'var(--affine-tag-yellow)',
},
],
},
};
workspace.meta.setProperties(prototypes);
const edgelessPage1 = nanoid();
const { store, atoms } = options;
store.set(atoms.pageMode, edgelessPage1, 'edgeless');
const job = new Job({
workspace,
middlewares: [replaceIdMiddleware, migrationMiddleware],
const pageMetas = {
'9f6f3c04-cf32-470c-9648-479dc838f10e': {
createDate: 1691548231530,
tags: ['ZHBa2NtdSo', 'QYFD_HeQc-', 'wg-fBtd2eI'],
updatedDate: 1691676331623,
favorite: true,
jumpOnce: true,
},
'0773e198-5de0-45d4-a35e-de22ea72b96b': {
createDate: 1691548220794,
tags: [],
updatedDate: 1691676775642,
favorite: false,
},
'59b140eb-4449-488f-9eeb-42412dcc044e': {
createDate: 1691551731225,
tags: [],
updatedDate: 1691654611175,
favorite: false,
},
'7217fbe2-61db-4a91-93c6-ad5c800e5a43': {
createDate: 1691552082822,
tags: [],
updatedDate: 1691654606912,
favorite: false,
},
'6eb43ea8-8c11-456d-bb1d-5193937961ab': {
createDate: 1691552090989,
tags: [],
updatedDate: 1691646748171,
favorite: false,
},
'3ddc8a4f-62c7-4fd4-8064-9ed9f61e437a': {
createDate: 1691564303138,
tags: [],
updatedDate: 1691646845195,
},
'22163830-8252-43fe-b62d-fd9bbeaa4caa': {
createDate: 1691574859042,
tags: [],
updatedDate: 1691648159371,
},
'b7a9e1bc-e205-44aa-8dad-7e328269d00b': {
createDate: 1691575011078,
tags: ['8qcYPCTK0h'],
updatedDate: 1691645074511,
favorite: false,
},
'646305d9-93e0-48df-bb92-d82944ceb5a3': {
createDate: 1691634722239,
tags: ['ze07JVwBu4'],
updatedDate: 1691647069662,
favorite: false,
},
'0350509d-8702-4797-b4d7-168f5e9359c7': {
createDate: 1691635388447,
tags: ['Oe5dSe1DDJ'],
updatedDate: 1691645873930,
},
'aa02af3c-5c5c-4856-b7ce-947ad17331f3': {
createDate: 1691636192263,
tags: ['q3mceOl_zi', 'g1L5dXKctL'],
updatedDate: 1691645102104,
},
} satisfies Record<string, Partial<PageMeta>>;
const data = [
[
'9f6f3c04-cf32-470c-9648-479dc838f10e',
import('@affine/templates/v1/getting-started.json'),
nanoid(),
],
[
'0773e198-5de0-45d4-a35e-de22ea72b96b',
import('@affine/templates/v1/preloading.json'),
edgelessPage1,
],
[
'59b140eb-4449-488f-9eeb-42412dcc044e',
import('@affine/templates/v1/template-galleries.json'),
nanoid(),
],
[
'7217fbe2-61db-4a91-93c6-ad5c800e5a43',
import('@affine/templates/v1/personal-home.json'),
nanoid(),
],
[
'6eb43ea8-8c11-456d-bb1d-5193937961ab',
import('@affine/templates/v1/working-home.json'),
nanoid(),
],
[
'3ddc8a4f-62c7-4fd4-8064-9ed9f61e437a',
import('@affine/templates/v1/personal-project-management.json'),
nanoid(),
],
[
'22163830-8252-43fe-b62d-fd9bbeaa4caa',
import('@affine/templates/v1/personal-knowledge-management.json'),
nanoid(),
],
[
'b7a9e1bc-e205-44aa-8dad-7e328269d00b',
import('@affine/templates/v1/annual-performance-review.json'),
nanoid(),
],
[
'646305d9-93e0-48df-bb92-d82944ceb5a3',
import('@affine/templates/v1/brief-event-planning.json'),
nanoid(),
],
[
'0350509d-8702-4797-b4d7-168f5e9359c7',
import('@affine/templates/v1/meeting-summary.json'),
nanoid(),
],
[
'aa02af3c-5c5c-4856-b7ce-947ad17331f3',
import('@affine/templates/v1/okr-template.json'),
nanoid(),
],
] as const;
const idMap = await Promise.all(data).then(async data => {
return data.reduce<Record<string, string>>(
(record, currentValue) => {
const [oldId, _, newId] = currentValue;
record[oldId] = newId;
return record;
},
{} as Record<string, string>
);
});
job.snapshotToWorkspaceInfo(info);
// for now all onboarding assets are considered served via CDN
// hack assets so that every blob exists
// @ts-expect-error - rethinking API
job._assetsManager.writeToBlob = async () => {};
const pageSnapshots: PageSnapshot[] = Object.entries(onboarding)
.filter(([key]) => {
return key.endsWith('snapshot.json');
})
.map(([_, value]) => value as unknown as PageSnapshot);
await Promise.all(
pageSnapshots.map(snapshot => {
return job.snapshotToPage(snapshot);
})
);
// Import page one by one to prevent workspace meta race condition problem.
for (const [id, promise, newId] of data) {
const { default: template } = await promise;
let json = JSON.stringify(template);
Object.entries(idMap).forEach(([oldId, newId]) => {
json = json.replaceAll(oldId, newId);
});
json = JSON.parse(json);
await workspace
.importPageSnapshot(structuredClone(json), newId)
.catch(error => {
console.error('error importing page', id, error);
});
const page = workspace.getPage(newId);
assertExists(page);
await page.load();
workspace.schema.upgradePage(
0,
{
'affine:note': 1,
'affine:bookmark': 1,
'affine:database': 2,
'affine:divider': 1,
'affine:image': 1,
'affine:list': 1,
'affine:code': 1,
'affine:page': 2,
'affine:paragraph': 1,
'affine:surface': 3,
},
page.spaceDoc
);
}
// The showcase building will create multiple pages once, and may skip the version writing.
// https://github.com/toeverything/blocksuite/blob/master/packages/store/src/workspace/page.ts#L662
workspace.doc.getMap('meta').set('pageVersion', 2);
const newVersions = getLatestVersions(workspace.schema);
workspace.doc
.getMap('meta')
.set('blockVersions', new YMap(Object.entries(newVersions)));
// todo: find better way to do the following
// perhaps put them into middleware?
{
// the "AFFiNE - not just a note-taking app" page should be set to edgeless mode
const edgelessPage1 = (workspace.meta.pages as PageMeta[])?.find(
p => p.title === 'AFFiNE - not just a note-taking app'
)?.id;
if (edgelessPage1) {
store.set(atoms.pageMode, edgelessPage1, 'edgeless');
}
// should jump to "Getting Started" by default
const gettingStartedPage = (workspace.meta.pages as PageMeta[])?.find(p =>
p.title.startsWith('Getting Started')
)?.id;
if (gettingStartedPage) {
workspace.setPageMeta(gettingStartedPage, {
jumpOnce: true,
});
}
}
Object.entries(pageMetas).forEach(([oldId, meta]) => {
const newId = idMap[oldId];
workspace.setPageMeta(newId, meta);
});
}

View File

@@ -1,142 +0,0 @@
/* eslint-disable @typescript-eslint/ban-ts-comment */
/* eslint-disable @typescript-eslint/no-restricted-imports */
/* eslint-disable @typescript-eslint/no-non-null-assertion */
// @ts-nocheck
// TODO: remove this file after blocksuite exposed it
import type {
DatabaseBlockModel,
ListBlockModel,
ParagraphBlockModel,
} from '@blocksuite/blocks/dist/models.js';
import { assertExists } from '@blocksuite/global/utils';
import type { DeltaOperation, JobMiddleware } from '@blocksuite/store';
export const replaceIdMiddleware: JobMiddleware = ({ slots, workspace }) => {
const idMap = new Map<string, string>();
slots.afterImport.on(payload => {
if (
payload.type === 'block' &&
payload.snapshot.flavour === 'affine:database'
) {
const model = payload.model as DatabaseBlockModel;
Object.keys(model.cells).forEach(cellId => {
if (idMap.has(cellId)) {
model.cells[idMap.get(cellId)!] = model.cells[cellId];
delete model.cells[cellId];
}
});
}
// replace LinkedPage pageId with new id in paragraph blocks
if (
payload.type === 'block' &&
['affine:paragraph', 'affine:list'].includes(payload.snapshot.flavour)
) {
const model = payload.model as ParagraphBlockModel | ListBlockModel;
let prev = 0;
const delta: DeltaOperation[] = [];
for (const d of model.text.toDelta()) {
if (d.attributes?.reference?.pageId) {
if (prev > 0) {
delta.push({ retain: prev });
}
delta.push({
retain: d.insert.length,
attributes: {
reference: {
...d.attributes.reference,
pageId: idMap.get(d.attributes.reference.pageId)!,
},
},
});
prev = 0;
} else {
prev += d.insert.length;
}
}
if (delta.length > 0) {
model.text.applyDelta(delta);
}
}
});
slots.beforeImport.on(payload => {
if (payload.type === 'page') {
const newId = workspace.idGenerator('page');
idMap.set(payload.snapshot.meta.id, newId);
payload.snapshot.meta.id = newId;
return;
}
if (payload.type === 'block') {
const { snapshot } = payload;
if (snapshot.flavour === 'affine:page') {
const index = snapshot.children.findIndex(
c => c.flavour === 'affine:surface'
);
if (index !== -1) {
const [surface] = snapshot.children.splice(index, 1);
snapshot.children.push(surface);
}
}
const original = snapshot.id;
let newId: string;
if (idMap.has(original)) {
newId = idMap.get(original)!;
} else {
newId = workspace.idGenerator('block');
idMap.set(original, newId);
}
snapshot.id = newId;
if (snapshot.flavour === 'affine:surface') {
// Generate new IDs for images and frames in advance.
snapshot.children.forEach(child => {
const original = child.id;
if (idMap.has(original)) {
newId = idMap.get(original)!;
} else {
newId = workspace.idGenerator('block');
idMap.set(original, newId);
}
});
Object.entries(
snapshot.props.elements as Record<string, Record<string, unknown>>
).forEach(([_, value]) => {
switch (value.type) {
case 'connector': {
let connection = value.source as Record<string, string>;
if (idMap.has(connection.id)) {
const newId = idMap.get(connection.id);
assertExists(newId, 'reference id must exist');
connection.id = newId;
}
connection = value.target as Record<string, string>;
if (idMap.has(connection.id)) {
const newId = idMap.get(connection.id);
assertExists(newId, 'reference id must exist');
connection.id = newId;
}
break;
}
case 'group': {
const json = value.children.json as Record<string, unknown>;
Object.entries(json).forEach(([key, value]) => {
if (idMap.has(key)) {
delete json[key];
const newKey = idMap.get(key);
assertExists(newKey, 'reference id must exist');
json[newKey] = value;
}
});
break;
}
default:
break;
}
});
}
}
});
};

View File

@@ -28,12 +28,7 @@ export async function migratePages(
const oldVersions = versions?.toJSON() ?? {};
spaces.forEach((space: YDoc) => {
try {
// Catch page upgrade error to avoid blocking the whole workspace migration.
schema.upgradePage(0, oldVersions, space);
} catch (e) {
console.error(e);
}
schema.upgradePage(0, oldVersions, space);
});
schema.upgradeWorkspace(rootDoc);

View File

@@ -11,8 +11,7 @@ export enum MigrationPoint {
}
export function checkWorkspaceCompatibility(
workspace: Workspace,
isCloud: boolean
workspace: Workspace
): MigrationPoint | null {
// check if there is any key starts with 'space:' on root doc
const spaceMetaObj = workspace.doc.share.get('space:meta') as
@@ -21,9 +20,7 @@ export function checkWorkspaceCompatibility(
const docKeys = Array.from(workspace.doc.share.keys());
const haveSpaceMeta = !!spaceMetaObj && spaceMetaObj.size > 0;
const haveLegacySpace = docKeys.some(key => key.startsWith('space:'));
// DON'T UPGRADE SUBDOC ON CLOUD
if (!isCloud && (haveSpaceMeta || haveLegacySpace)) {
if (haveSpaceMeta || haveLegacySpace) {
return MigrationPoint.SubDoc;
}

View File

@@ -0,0 +1,74 @@
/**
* The MIT License (MIT)
*
* Copyright (c) 2018 Andy Wermke
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
export type EventMap = {
[key: string]: (...args: any[]) => void;
};
/**
* Type-safe event emitter.
*
* Use it like this:
*
* ```typescript
* type MyEvents = {
* error: (error: Error) => void;
* message: (from: string, content: string) => void;
* }
*
* const myEmitter = new EventEmitter() as TypedEmitter<MyEvents>;
*
* myEmitter.emit("error", "x") // <- Will catch this type error;
* ```
*
* Lifecycle:
* invoke -> handle -> emit -> on/once
*/
export interface TypedEventEmitter<Events extends EventMap> {
addListener<E extends keyof Events>(event: E, listener: Events[E]): this;
on<E extends keyof Events>(event: E, listener: Events[E]): this;
once<E extends keyof Events>(event: E, listener: Events[E]): this;
off<E extends keyof Events>(event: E, listener: Events[E]): this;
removeAllListeners<E extends keyof Events>(event?: E): this;
removeListener<E extends keyof Events>(event: E, listener: Events[E]): this;
emit<E extends keyof Events>(
event: E,
...args: Parameters<Events[E]>
): boolean;
// The sloppy `eventNames()` return type is to mitigate type incompatibilities - see #5
eventNames(): (keyof Events | string | symbol)[];
rawListeners<E extends keyof Events>(event: E): Events[E][];
listeners<E extends keyof Events>(event: E): Events[E][];
listenerCount<E extends keyof Events>(event: E): number;
handle<E extends keyof Events>(event: E, handler: Events[E]): this;
invoke<E extends keyof Events>(
event: E,
...args: Parameters<Events[E]>
): Promise<ReturnType<Events[E]>>;
getMaxListeners(): number;
setMaxListeners(maxListeners: number): this;
}

View File

@@ -0,0 +1,57 @@
import type {
ClipboardHandlers,
ConfigStorageHandlers,
DBHandlers,
DebugHandlers,
DialogHandlers,
ExportHandlers,
UIHandlers,
UpdaterHandlers,
WorkspaceHandlers,
} from './type.js';
import { HandlerManager } from './type.js';
export abstract class DBHandlerManager extends HandlerManager<
'db',
DBHandlers
> {}
export abstract class DebugHandlerManager extends HandlerManager<
'debug',
DebugHandlers
> {}
export abstract class DialogHandlerManager extends HandlerManager<
'dialog',
DialogHandlers
> {}
export abstract class UIHandlerManager extends HandlerManager<
'ui',
UIHandlers
> {}
export abstract class ClipboardHandlerManager extends HandlerManager<
'clipboard',
ClipboardHandlers
> {}
export abstract class ExportHandlerManager extends HandlerManager<
'export',
ExportHandlers
> {}
export abstract class UpdaterHandlerManager extends HandlerManager<
'updater',
UpdaterHandlers
> {}
export abstract class WorkspaceHandlerManager extends HandlerManager<
'workspace',
WorkspaceHandlers
> {}
export abstract class ConfigStorageHandlerManager extends HandlerManager<
'configStorage',
ConfigStorageHandlers
> {}

View File

@@ -1,4 +1,2 @@
export * from './app-config-storage';
export * from './atom';
export * from './blocksuite';
export * from './command';
export * from './handler.js';
export * from './type.js';

View File

@@ -1,50 +1,37 @@
// Please add modules to `external` in `rollupOptions` to avoid wrong bundling.
import { AsyncCall, type EventBasedChannel } from 'async-call-rpc';
import type { app, dialog, shell } from 'electron';
import { ipcRenderer } from 'electron';
import { Subject } from 'rxjs';
import { z } from 'zod';
import type {
ExposedMeta,
HelperToRenderer,
RendererToHelper,
} from '../shared/type';
export interface ExposedMeta {
handlers: [string, string[]][];
events: [string, string[]][];
}
export const affine = {
ipcRenderer: {
send(channel: string, ...args: any[]) {
ipcRenderer.send(channel, ...args);
},
// render <-> helper
export interface RendererToHelper {
postEvent: (channel: string, ...args: any[]) => void;
}
invoke(channel: string, ...args: any[]) {
return ipcRenderer.invoke(channel, ...args);
},
export interface HelperToRenderer {
[key: string]: (...args: any[]) => Promise<any>;
}
on(
channel: string,
listener: (event: Electron.IpcRendererEvent, ...args: any[]) => void
) {
ipcRenderer.on(channel, listener);
return this;
},
// helper <-> main
export interface HelperToMain {
getMeta: () => ExposedMeta;
}
once(
channel: string,
listener: (event: Electron.IpcRendererEvent, ...args: any[]) => void
) {
ipcRenderer.once(channel, listener);
return this;
},
removeListener(
channel: string,
listener: (event: Electron.IpcRendererEvent, ...args: any[]) => void
) {
ipcRenderer.removeListener(channel, listener);
return this;
},
},
};
export type MainToHelper = Pick<
typeof dialog & typeof shell & typeof app,
| 'showOpenDialog'
| 'showSaveDialog'
| 'openExternal'
| 'showItemInFolder'
| 'getPath'
>;
export function getElectronAPIs() {
const mainAPIs = getMainAPIs();

View File

@@ -0,0 +1,295 @@
import type { ExpectedLayout } from '@affine/sdk/entry';
import type Buffer from 'buffer';
import type { WritableAtom } from 'jotai';
import { z } from 'zod';
import type { AppConfigSchema } from './app-config-storage.js';
import type { TypedEventEmitter } from './core/event-emitter.js';
type Buffer = Buffer.Buffer;
export const packageJsonInputSchema = z.object({
name: z.string(),
version: z.string(),
description: z.string(),
affinePlugin: z.object({
release: z.union([z.boolean(), z.enum(['development'])]),
entry: z.object({
core: z.string(),
}),
}),
});
export const packageJsonOutputSchema = z.object({
name: z.string(),
version: z.string(),
description: z.string(),
affinePlugin: z.object({
release: z.union([z.boolean(), z.enum(['development'])]),
entry: z.object({
core: z.string(),
}),
assets: z.array(z.string()),
}),
});
type SetStateAction<Value> = Value | ((prev: Value) => Value);
export type ContentLayoutAtom = WritableAtom<
ExpectedLayout,
[SetStateAction<ExpectedLayout>],
void
>;
export abstract class HandlerManager<
Namespace extends string,
Handlers extends Record<string, PrimitiveHandlers>,
> {
static instance: HandlerManager<string, Record<string, PrimitiveHandlers>>;
private readonly _app: App<Namespace, Handlers>;
private readonly _namespace: Namespace;
private _handlers: Handlers;
constructor() {
throw new Error('Method not implemented.');
}
private _initialized = false;
registerHandlers(handlers: Handlers) {
if (this._initialized) {
throw new Error('Already initialized');
}
this._handlers = handlers;
for (const [name, handler] of Object.entries(this._handlers)) {
this._app.handle(`${this._namespace}:${name}`, (async (...args: any[]) =>
handler(...args)) as any);
}
this._initialized = true;
}
invokeHandler<K extends keyof Handlers>(
name: K,
...args: Parameters<Handlers[K]>
): Promise<ReturnType<Handlers[K]>> {
return this._handlers[name](...args);
}
static getInstance(): HandlerManager<
string,
Record<string, PrimitiveHandlers>
> {
throw new Error('Method not implemented.');
}
}
export interface WorkspaceMeta {
id: string;
mainDBPath: string;
secondaryDBPath?: string; // assume there will be only one
}
export type PrimitiveHandlers = (...args: any[]) => Promise<any>;
export type DBHandlers = {
getDocAsUpdates: (
workspaceId: string,
subdocId?: string
) => Promise<Uint8Array | false>;
applyDocUpdate: (
id: string,
update: Uint8Array,
subdocId?: string
) => Promise<void>;
addBlob: (
workspaceId: string,
key: string,
data: Uint8Array
) => Promise<void>;
getBlob: (workspaceId: string, key: string) => Promise<Buffer | null>;
deleteBlob: (workspaceId: string, key: string) => Promise<void>;
getBlobKeys: (workspaceId: string) => Promise<string[]>;
getDefaultStorageLocation: () => Promise<string>;
};
export type DebugHandlers = {
revealLogFile: () => Promise<string>;
logFilePath: () => Promise<string>;
};
export type ErrorMessage =
| 'DB_FILE_ALREADY_LOADED'
| 'DB_FILE_PATH_INVALID'
| 'DB_FILE_INVALID'
| 'DB_FILE_MIGRATION_FAILED'
| 'FILE_ALREADY_EXISTS'
| 'UNKNOWN_ERROR';
export interface LoadDBFileResult {
workspaceId?: string;
error?: ErrorMessage;
canceled?: boolean;
}
export interface SaveDBFileResult {
filePath?: string;
canceled?: boolean;
error?: ErrorMessage;
}
export interface SelectDBFileLocationResult {
filePath?: string;
error?: ErrorMessage;
canceled?: boolean;
}
export interface MoveDBFileResult {
filePath?: string;
error?: ErrorMessage;
canceled?: boolean;
}
// provide a backdoor to set dialog path for testing in playwright
export interface FakeDialogResult {
canceled?: boolean;
filePath?: string;
filePaths?: string[];
}
export type DialogHandlers = {
revealDBFile: (workspaceId: string) => Promise<void>;
loadDBFile: () => Promise<LoadDBFileResult>;
saveDBFileAs: (workspaceId: string) => Promise<SaveDBFileResult>;
moveDBFile: (
workspaceId: string,
dbFileLocation?: string
) => Promise<MoveDBFileResult>;
selectDBFileLocation: () => Promise<SelectDBFileLocationResult>;
setFakeDialogResult: (result: any) => Promise<void>;
};
export type UIHandlers = {
handleThemeChange: (theme: 'system' | 'light' | 'dark') => Promise<any>;
handleSidebarVisibilityChange: (visible: boolean) => Promise<any>;
handleMinimizeApp: () => Promise<any>;
handleMaximizeApp: () => Promise<any>;
handleCloseApp: () => Promise<any>;
getGoogleOauthCode: () => Promise<any>;
getChallengeResponse: (resource: string) => Promise<string>;
handleOpenMainApp: () => Promise<any>;
};
export type ClipboardHandlers = {
copyAsImageFromString: (dataURL: string) => Promise<void>;
};
export type ExportHandlers = {
savePDFFileAs: (title: string) => Promise<any>;
};
export interface UpdateMeta {
version: string;
allowAutoUpdate: boolean;
}
export type UpdaterConfig = {
autoCheckUpdate: boolean;
autoDownloadUpdate: boolean;
};
export type UpdaterHandlers = {
currentVersion: () => Promise<string>;
quitAndInstall: () => Promise<void>;
downloadUpdate: () => Promise<void>;
getConfig: () => Promise<UpdaterConfig>;
setConfig: (newConfig: Partial<UpdaterConfig>) => Promise<void>;
checkForUpdates: () => Promise<{ version: string } | null>;
};
export type WorkspaceHandlers = {
list: () => Promise<[workspaceId: string, meta: WorkspaceMeta][]>;
delete: (id: string) => Promise<void>;
getMeta: (id: string) => Promise<WorkspaceMeta>;
clone: (id: string, newId: string) => Promise<void>;
};
export type ConfigStorageHandlers = {
set: (config: AppConfigSchema | Partial<AppConfigSchema>) => Promise<void>;
get: () => Promise<AppConfigSchema>;
};
export type UnwrapManagerHandlerToServerSide<
ElectronEvent extends {
frameId: number;
processId: number;
},
Manager extends HandlerManager<string, Record<string, PrimitiveHandlers>>,
> = Manager extends HandlerManager<infer _, infer Handlers>
? {
[K in keyof Handlers]: Handlers[K] extends (
...args: infer Args
) => Promise<infer R>
? (event: ElectronEvent, ...args: Args) => Promise<R>
: never;
}
: never;
export type UnwrapManagerHandlerToClientSide<
Manager extends HandlerManager<string, Record<string, PrimitiveHandlers>>,
> = Manager extends HandlerManager<infer _, infer Handlers>
? {
[K in keyof Handlers]: Handlers[K] extends (
...args: infer Args
) => Promise<infer R>
? (...args: Args) => Promise<R>
: never;
}
: never;
/**
* @internal
*/
export type App<
Namespace extends string,
Handlers extends Record<string, PrimitiveHandlers>,
> = TypedEventEmitter<{
[K in keyof Handlers as `${Namespace}:${K & string}`]: Handlers[K];
}>;
export interface UpdaterEvents {
onUpdateAvailable: (fn: (versionMeta: UpdateMeta) => void) => () => void;
onUpdateReady: (fn: (versionMeta: UpdateMeta) => void) => () => void;
onDownloadProgress: (fn: (progress: number) => void) => () => void;
}
export interface ApplicationMenuEvents {
onNewPageAction: (fn: () => void) => () => void;
}
export interface DBEvents {
onExternalUpdate: (
fn: (update: {
workspaceId: string;
update: Uint8Array;
docId?: string;
}) => void
) => () => void;
}
export interface WorkspaceEvents {
onMetaChange: (
fn: (workspaceId: string, meta: WorkspaceMeta) => void
) => () => void;
}
export interface UIEvents {
onMaximized: (fn: (maximized: boolean) => void) => () => void;
}
export interface EventMap {
updater: UpdaterEvents;
applicationMenu: ApplicationMenuEvents;
db: DBEvents;
ui: UIEvents;
workspace: WorkspaceEvents;
}

View File

@@ -4,9 +4,13 @@
"compilerOptions": {
"composite": true,
"noEmit": false,
"moduleResolution": "bundler",
"outDir": "lib"
},
"references": [
{
"path": "../sdk"
},
{
"path": "../env"
},

View File

@@ -1,4 +0,0 @@
{
"extends": ["../../../typedoc.base.json"],
"entryPoints": ["src/index.ts"]
}

View File

@@ -0,0 +1,43 @@
import { resolve } from 'node:path';
import { fileURLToPath } from 'url';
import { defineConfig } from 'vite';
import dts from 'vite-plugin-dts';
const root = fileURLToPath(new URL('.', import.meta.url));
export default defineConfig({
build: {
minify: false,
lib: {
entry: {
blocksuite: resolve(root, 'src/blocksuite/index.ts'),
index: resolve(root, 'src/index.ts'),
atom: resolve(root, 'src/atom/index.ts'),
command: resolve(root, 'src/command/index.ts'),
type: resolve(root, 'src/type.ts'),
'core/event-emitter': resolve(root, 'src/core/event-emitter.ts'),
'preload/electron': resolve(root, 'src/preload/electron.ts'),
'app-config-storage': resolve(root, 'src/app-config-storage.ts'),
'__internal__/plugin': resolve(root, 'src/__internal__/plugin.ts'),
},
formats: ['es', 'cjs'],
name: 'AffineInfra',
},
rollupOptions: {
external: [
'electron',
'async-call-rpc',
'rxjs',
'zod',
'react',
'yjs',
'nanoid',
/^jotai/,
/^@blocksuite/,
/^@affine\/templates/,
],
},
},
plugins: [dts()],
});

4
packages/common/sdk/.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
src/entry.d.ts
src/entry.d.ts.map
src/entry.js
src/entry.js.map

View File

@@ -0,0 +1,37 @@
{
"name": "@affine/sdk",
"version": "0.11.0",
"type": "module",
"scripts": {
"build": "vite build",
"dev": "vite build --watch"
},
"exports": {
"./entry": {
"types": "./dist/src/entry.d.ts",
"import": "./dist/entry.js",
"require": "./dist/entry.cjs"
},
"./server": {
"types": "./dist/src/server.d.ts",
"import": "./dist/server.js",
"require": "./dist/server.cjs"
}
},
"files": [
"dist"
],
"dependencies": {
"@blocksuite/block-std": "0.11.0-nightly-202312200102-8254dc9",
"@blocksuite/blocks": "0.11.0-nightly-202312200102-8254dc9",
"@blocksuite/global": "0.11.0-nightly-202312200102-8254dc9",
"@blocksuite/presets": "0.11.0-nightly-202312200102-8254dc9",
"@blocksuite/store": "0.11.0-nightly-202312200102-8254dc9",
"jotai": "^2.5.1",
"zod": "^3.22.4"
},
"devDependencies": {
"vite": "^5.0.6",
"vite-plugin-dts": "3.6.0"
}
}

View File

@@ -0,0 +1,22 @@
{
"name": "sdk",
"$schema": "../../../node_modules/nx/schemas/project-schema.json",
"projectType": "library",
"sourceRoot": "packages/common/sdk/src",
"targets": {
"build": {
"executor": "@nx/vite:build",
"options": {
"outputPath": "packages/common/sdk/dist"
}
},
"serve": {
"executor": "@nx/vite:build",
"options": {
"outputPath": "packages/common/sdk/dist",
"watch": true
}
}
},
"tags": ["infra"]
}

View File

@@ -0,0 +1,65 @@
import type { BaseSelection } from '@blocksuite/block-std';
import type { AffineEditorContainer } from '@blocksuite/presets';
import type { Page } from '@blocksuite/store';
import type { Workspace } from '@blocksuite/store';
import type { Atom, getDefaultStore } from 'jotai/vanilla';
import type { WritableAtom } from 'jotai/vanilla/atom';
import type { FunctionComponent } from 'react';
export type Part = 'headerItem' | 'editor' | 'setting' | 'formatBar';
export type CallbackMap = {
headerItem: (root: HTMLElement) => () => void;
editor: (root: HTMLElement, editor: AffineEditorContainer) => () => void;
setting: (root: HTMLElement) => () => void;
formatBar: (
root: HTMLElement,
page: Page,
getBlockRange: () => BaseSelection[]
) => () => void;
};
export interface PluginContext {
register: <T extends Part>(part: T, callback: CallbackMap[T]) => void;
utils: {
PluginProvider: FunctionComponent; // make more clear
};
}
export type LayoutDirection = 'horizontal' | 'vertical';
export type LayoutNode = LayoutParentNode | string;
export type LayoutParentNode = {
direction: LayoutDirection;
splitPercentage: number; // 0 - 100
first: string;
second: LayoutNode;
maxWidth?: (number | undefined)[];
};
export type ExpectedLayout =
| {
direction: 'horizontal';
// the first element is always the editor
first: 'editor';
second: LayoutNode;
// the percentage should be greater than 70
splitPercentage: number;
}
| 'editor';
export declare const pushLayoutAtom: WritableAtom<
null,
| [
string,
(div: HTMLDivElement) => () => void,
{
maxWidth: (number | undefined)[];
},
]
| [string, (div: HTMLDivElement) => () => void],
void
>;
export declare const deleteLayoutAtom: WritableAtom<null, [string], void>;
export declare const currentPageIdAtom: Atom<string | null>;
export declare const currentWorkspaceAtom: Atom<Promise<Workspace>>;
export declare const rootStore: ReturnType<typeof getDefaultStore>;

View File

@@ -0,0 +1,4 @@
export interface ServerContext {
registerCommand: (command: string, fn: (...args: any[]) => any) => void;
unregisterCommand: (command: string) => void;
}

View File

@@ -4,14 +4,12 @@
"compilerOptions": {
"composite": true,
"noEmit": false,
"moduleResolution": "bundler",
"outDir": "lib"
},
"references": [
{
"path": "../../common/infra"
},
{
"path": "../../frontend/electron"
"path": "./tsconfig.node.json"
}
]
}

View File

@@ -0,0 +1,12 @@
{
"extends": "../../../tsconfig.json",
"compilerOptions": {
"composite": true,
"module": "ESNext",
"moduleResolution": "Node",
"allowSyntheticDefaultImports": true,
"outDir": "lib",
"noEmit": false
},
"include": ["vite.config.ts"]
}

View File

@@ -0,0 +1,23 @@
import { resolve } from 'node:path';
import { fileURLToPath } from 'url';
import { defineConfig } from 'vite';
import dts from 'vite-plugin-dts';
const root = fileURLToPath(new URL('.', import.meta.url));
export default defineConfig({
build: {
minify: false,
lib: {
entry: {
entry: resolve(root, 'src/entry.ts'),
server: resolve(root, 'src/server.ts'),
},
},
rollupOptions: {
external: [/^jotai/, /^@blocksuite/, 'zod'],
},
},
plugins: [dts()],
});

View File

@@ -1,24 +0,0 @@
{
"name": "@affine/workspace",
"private": true,
"main": "./src/index.ts",
"exports": {
".": "./src/index.ts"
},
"peerDependencies": {
"@blocksuite/blocks": "*",
"@blocksuite/global": "*",
"@blocksuite/store": "*"
},
"dependencies": {
"@affine/debug": "workspace:*",
"@affine/env": "workspace:*",
"@toeverything/infra": "workspace:*",
"lodash-es": "^4.17.21",
"yjs": "^13.6.10"
},
"devDependencies": {
"vitest": "1.1.1"
},
"version": "0.11.0"
}

View File

@@ -1,7 +0,0 @@
export * from './engine';
export * from './factory';
export * from './global-schema';
export * from './list';
export * from './manager';
export * from './metadata';
export * from './workspace';

View File

@@ -1,4 +0,0 @@
{
"extends": ["../../../typedoc.base.json"],
"entryPoints": ["src/index.ts"]
}

View File

@@ -37,12 +37,12 @@
"y-provider": "workspace:*"
},
"devDependencies": {
"@blocksuite/blocks": "0.11.0-nightly-202401020419-752a5b8",
"@blocksuite/store": "0.11.0-nightly-202401020419-752a5b8",
"@blocksuite/blocks": "0.11.0-nightly-202312200102-8254dc9",
"@blocksuite/store": "0.11.0-nightly-202312200102-8254dc9",
"fake-indexeddb": "^5.0.0",
"vite": "^5.0.6",
"vite-plugin-dts": "3.7.0",
"vitest": "1.1.1",
"vite-plugin-dts": "3.6.0",
"vitest": "1.0.4",
"y-indexeddb": "^9.0.11",
"yjs": "^13.6.10"
},

View File

@@ -24,10 +24,10 @@
"build": "vite build"
},
"devDependencies": {
"@blocksuite/store": "0.11.0-nightly-202401020419-752a5b8",
"@blocksuite/store": "0.11.0-nightly-202312200102-8254dc9",
"vite": "^5.0.6",
"vite-plugin-dts": "3.7.0",
"vitest": "1.1.1",
"vite-plugin-dts": "3.6.0",
"vitest": "1.0.4",
"yjs": "^13.6.10"
},
"peerDependencies": {

View File

@@ -20,7 +20,6 @@
},
"dependencies": {
"@affine/debug": "workspace:*",
"@affine/electron-api": "workspace:*",
"@affine/graphql": "workspace:*",
"@affine/i18n": "workspace:*",
"@affine/workspace": "workspace:*",
@@ -42,6 +41,8 @@
"@radix-ui/react-toast": "^1.1.5",
"@radix-ui/react-toolbar": "^1.0.4",
"@radix-ui/react-tooltip": "^1.0.7",
"@toeverything/hooks": "workspace:*",
"@toeverything/infra": "workspace:*",
"@toeverything/theme": "^0.7.24",
"@vanilla-extract/dynamic": "^2.0.3",
"bytes": "^3.1.2",
@@ -71,12 +72,12 @@
"uuid": "^9.0.1"
},
"devDependencies": {
"@blocksuite/blocks": "0.11.0-nightly-202401020419-752a5b8",
"@blocksuite/global": "0.11.0-nightly-202401020419-752a5b8",
"@blocksuite/icons": "2.1.39",
"@blocksuite/lit": "0.11.0-nightly-202401020419-752a5b8",
"@blocksuite/presets": "0.11.0-nightly-202401020419-752a5b8",
"@blocksuite/store": "0.11.0-nightly-202401020419-752a5b8",
"@blocksuite/blocks": "0.11.0-nightly-202312200102-8254dc9",
"@blocksuite/global": "0.11.0-nightly-202312200102-8254dc9",
"@blocksuite/icons": "2.1.36",
"@blocksuite/lit": "0.11.0-nightly-202312200102-8254dc9",
"@blocksuite/presets": "0.11.0-nightly-202312200102-8254dc9",
"@blocksuite/store": "0.11.0-nightly-202312200102-8254dc9",
"@storybook/addon-actions": "^7.5.3",
"@storybook/addon-essentials": "^7.5.3",
"@storybook/addon-interactions": "^7.5.3",
@@ -88,7 +89,7 @@
"@storybook/jest": "^0.2.3",
"@storybook/react": "^7.5.3",
"@storybook/react-vite": "^7.5.3",
"@storybook/test-runner": "^0.16.0",
"@storybook/test-runner": "^0.15.2",
"@storybook/testing-library": "^0.2.2",
"@testing-library/react": "^14.0.0",
"@types/bytes": "^3.1.3",
@@ -102,7 +103,7 @@
"storybook-dark-mode": "^3.0.1",
"typescript": "^5.3.2",
"vite": "^5.0.6",
"vitest": "1.1.1",
"vitest": "1.0.4",
"yjs": "^13.6.10"
},
"version": "0.11.0"

Some files were not shown because too many files have changed in this diff Show More