Compare commits

..

39 Commits

Author SHA1 Message Date
Alex Yang
2c772bd81b v0.7.0-canary.24 2023-06-29 18:50:48 +08:00
JimmFly
7f00011542 chore: update changelog link and remove obsolete changelog components (#2918) 2023-06-29 10:19:26 +00:00
Alex Yang
f76d8b8818 chore: bump blocksuite to 0.0.0-20230629084521-542de4e8-nightly (#2921) 2023-06-29 09:42:47 +00:00
Alex Yang
1d6b39dec9 ci: allow codecov upload failure (#2922) 2023-06-29 09:39:16 +00:00
Qi
5cfdf6c7e2 fix: a serise of ui issues of new setting (#2920)
Co-authored-by: Alex Yang <himself65@outlook.com>
2023-06-29 09:25:42 +00:00
Alex Yang
8410d83744 refactor: rootWorkspacesMetadataAtom loading logic (#2882) 2023-06-29 08:48:12 +00:00
DarkSky
8a2dac9718 fix: incorrect formatting (#2917) 2023-06-29 08:25:43 +00:00
JimmFly
5ad2908760 chore: update translation (#2916)
Co-authored-by: zuozijian3720 <zuozijian1994@gmail.com>
2023-06-29 08:20:25 +00:00
Alex Yang
5b8771485e docs: add apps/README.md 2023-06-29 16:07:30 +08:00
Alex Yang
ed8480caf0 ci: split migration test 2023-06-29 15:11:16 +08:00
Alex Yang
42ef3c0fc2 test: migration test in real world (#2885) 2023-06-29 06:50:26 +00:00
Alex Yang
e08ee9b7ff ci: add prettier format check (#2908) 2023-06-29 04:13:35 +00:00
liuyi
2c95bfcc3d feat(storage): binding jwst storage to node (#2808) 2023-06-29 01:45:45 +00:00
Alex Yang
86616e152d build: disable sqlite provider in canary 2023-06-29 10:00:41 +08:00
Peng Xiao
b1f478ee5e fix: updater color updates (#2913) 2023-06-28 17:21:07 +00:00
DarkSky
6b0f9fbdad feat: add deployment guide & fix pod label (#2912) 2023-06-28 17:12:23 +00:00
Alex Yang
da3f2b784a ci: fix output variable 2023-06-29 01:20:35 +08:00
Alex Yang
acb140ab78 v0.7.0-canary.23 2023-06-29 00:40:50 +08:00
Alex Yang
0b74bd9bfe ci: use production environment 2023-06-29 00:40:50 +08:00
Alex Yang
acfc030d16 ci: fix package version output 2023-06-29 00:40:50 +08:00
Alex Yang
d0d04ce376 v0.7.0-canary.22 2023-06-29 00:27:17 +08:00
Alex Yang
2250f42d2a ci: fix tag version 2023-06-29 00:26:48 +08:00
Alex Yang
887434fea4 v0.7.0-canary.21 2023-06-29 00:23:06 +08:00
Alex Yang
9b817c4b79 ci: automatically build canary release (#2911) 2023-06-28 15:53:32 +00:00
Alex Yang
ea03bbfb2d ci: add codeql check to merge group (#2909) 2023-06-28 15:07:27 +00:00
Qi
db40cd35c6 feat: migrate workspace setting with new design to setting modal (#2900)
Co-authored-by: Alex Yang <himself65@outlook.com>
2023-06-28 14:45:33 +00:00
Alex Yang
aabac9e921 chore: bump typescript version (#2906) 2023-06-28 12:57:33 +00:00
Alex Yang
0a91c41e0a chore: codesandbox setup (#2907) 2023-06-28 12:32:56 +00:00
DarkSky
d6addc0d0b docs: improve helm ci & document (#2902) 2023-06-28 12:30:02 +00:00
Alex Yang
91d3b76be5 refactor(storybook): move to apps folder (#2901) 2023-06-28 12:29:52 +00:00
Alex Yang
3eed009270 feat: add rule 'sonarjs/no-identical-functions' (#2905) 2023-06-28 12:29:12 +00:00
Alex Yang
bc14d54cfa chore: update pre-commit hook (#2904) 2023-06-28 11:24:37 +00:00
Alex Yang
5496969e58 refactor: environment setup (#2898)
Co-authored-by: Simon He <57086651+Simon-He95@users.noreply.github.com>
2023-06-28 11:19:19 +00:00
Alex Yang
80c2a78273 fix(web): bypass adapter list error (#2903) 2023-06-28 11:06:13 +00:00
Alex Yang
92f378aefc test(server): watch mode (#2893) 2023-06-28 10:00:06 +00:00
Alex Yang
877ceee698 ci: enable merge group (#2899) 2023-06-28 09:56:02 +00:00
Alex Yang
7960b6a22e feat: update migration test page (#2871) 2023-06-28 16:46:08 +08:00
Alex Yang
fa45d8a718 build: unify build flags (#2891) 2023-06-28 16:45:05 +08:00
Alex Yang
87574c9993 build: fix i18n output (#2896) 2023-06-28 16:40:41 +08:00
574 changed files with 15636 additions and 14197 deletions

View File

@@ -22,9 +22,7 @@
"templates",
"y-indexeddb",
"debug",
"storage",
"infra",
"plugin-infra"
"storage"
]
]
}

View File

@@ -8,4 +8,3 @@ _next
lib
.eslintrc.js
packages/i18n/src/i18n-generated.ts
e2e-dist-*

View File

@@ -21,11 +21,6 @@ const createPattern = packageName => [
message: 'Do not import package itself',
allowTypeImports: false,
},
{
group: ['@blocksuite/store'],
message: "Import from '@blocksuite/global/utils'",
importNames: ['assertExists', 'assertEquals'],
},
];
const allPackages = [
@@ -88,7 +83,7 @@ const config = {
'@typescript-eslint',
'simple-import-sort',
'sonarjs',
'i',
'import',
'unused-imports',
'unicorn',
],
@@ -139,11 +134,6 @@ const config = {
message: "Don't import from src",
allowTypeImports: false,
},
{
group: ['@blocksuite/store'],
message: "Import from '@blocksuite/global/utils'",
importNames: ['assertExists', 'assertEquals'],
},
],
},
],
@@ -214,7 +204,6 @@ const config = {
'scripts/**/*',
'**/benchmark/**/*',
'**/__debug__/**/*',
'**/e2e/**/*',
],
rules: {
'@typescript-eslint/no-non-null-assertion': 0,

View File

@@ -46,7 +46,6 @@ runs:
export CC_x86_64_unknown_linux_gnu=x86_64-unknown-linux-gnu-gcc
yarn nx build @affine/native --target ${{ inputs.target }}
chmod -R 777 node_modules/.cache
chmod -R 777 target
- name: Build
if: ${{ inputs.target == 'aarch64-unknown-linux-gnu' }}
@@ -57,4 +56,3 @@ runs:
run: |
yarn nx build @affine/native --target ${{ inputs.target }}
chmod -R 777 node_modules/.cache
chmod -R 777 target

View File

@@ -13,18 +13,10 @@ inputs:
description: 'Run the install step for Playwright.'
required: false
default: 'false'
electron-install:
description: 'Download the Electron binary'
required: false
default: 'true'
npm-token:
description: 'The NPM token to use for private packages.'
required: false
default: ''
hard-link-nm:
description: 'set nmMode to hardlinks-local in .yarnrc.yml'
required: false
default: 'true'
runs:
using: 'composite'
@@ -37,10 +29,33 @@ runs:
scope: '@toeverything'
cache: 'yarn'
- name: Set nmMode
if: ${{ inputs.hard-link-nm == 'true' }}
- name: Expose yarn config as "$GITHUB_OUTPUT"
id: yarn-config
shell: bash
run: yarn config set nmMode hardlinks-local
run: |
echo "CACHE_FOLDER=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT
- name: Restore yarn cache
uses: actions/cache@v3
id: yarn-download-cache
with:
path: ${{ steps.yarn-config.outputs.CACHE_FOLDER }}
key: yarn-download-cache-${{ hashFiles('yarn.lock') }}
restore-keys: |
yarn-download-cache-
- name: Restore node_modules cache
uses: actions/cache@v3
with:
path: '**/node_modules'
key: ${{ runner.os }}-modules-${{ hashFiles('**/yarn.lock') }}
- name: Restore yarn install state
id: yarn-install-state-cache
uses: actions/cache@v3
with:
path: .yarn/ci-cache/
key: ${{ runner.os }}-yarn-install-state-cache-${{ hashFiles('yarn.lock', '.yarnrc.yml') }}
- name: yarn install
if: ${{ inputs.package-install == 'true' }}
@@ -49,9 +64,9 @@ runs:
run: yarn install ${{ inputs.extra-flags }}
env:
NODE_AUTH_TOKEN: ${{ inputs.npm-token }}
YARN_ENABLE_GLOBAL_CACHE: 'false'
YARN_INSTALL_STATE_PATH: .yarn/ci-cache/install-state.gz
HUSKY: '0'
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: '1'
ELECTRON_SKIP_BINARY_DOWNLOAD: '1'
- name: yarn install (try again)
if: ${{ steps.install.outcome == 'failure' }}
@@ -59,9 +74,9 @@ runs:
run: yarn install ${{ inputs.extra-flags }}
env:
NODE_AUTH_TOKEN: ${{ inputs.npm-token }}
YARN_ENABLE_GLOBAL_CACHE: 'false'
YARN_INSTALL_STATE_PATH: .yarn/ci-cache/install-state.gz
HUSKY: '0'
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: '1'
ELECTRON_SKIP_BINARY_DOWNLOAD: '1'
- name: Get installed Playwright version
id: playwright-version
@@ -98,30 +113,3 @@ runs:
shell: bash
if: inputs.playwright-install == 'true' && steps.playwright-cache.outputs.cache-hit != 'true'
run: yarn playwright install --with-deps
- name: Get installed Electron version
id: electron-version
if: ${{ inputs.electron-install == 'true' }}
shell: bash
run: |
echo "version=$(yarn why --json electron | grep -h 'workspace:.' | jq --raw-output '.children[].locator' | sed -e 's/@playwright\/test@.*://' | head -n 1)" >> $GITHUB_OUTPUT
- uses: actions/cache@v3
id: electron-cache
if: ${{ inputs.electron-install == 'true' }}
with:
path: 'node_modules/.cache/electron'
key: '${{ runner.os }}-electron-${{ steps.electron-version.outputs.version }}'
restore-keys: |
${{ runner.os }}-electron-
- name: Install Electron binary
shell: bash
if: inputs.electron-install == 'true'
run: node apps/electron/node_modules/electron/install.js
env:
ELECTRON_OVERRIDE_DIST_PATH: ./node_modules/.cache/electron
- name: Build Infra
shell: bash
run: yarn run build:infra

View File

@@ -3,7 +3,7 @@ server {
root /app/dist;
location / {
try_files $uri $uri/index.html $uri.html =404;
try_files $uri $uri/index.html $uri.html =404;;
}
error_page 404 /404.html;

View File

@@ -1,23 +0,0 @@
# Patterns to ignore when building packages.
# This supports shell glob matching, relative path matching, and
# negation (prefixed with !). Only one pattern per line.
.DS_Store
# Common VCS dirs
.git/
.gitignore
.bzr/
.bzrignore
.hg/
.hgignore
.svn/
# Common backup files
*.swp
*.bak
*.tmp
*.orig
*~
# Various IDEs
.project
.idea/
*.tmproj
.vscode/

View File

@@ -1,6 +0,0 @@
apiVersion: v2
name: affine
description: AFFiNE cloud chart
type: application
version: 0.0.0
appVersion: '0.7.0-canary.18'

View File

@@ -1,6 +0,0 @@
apiVersion: v2
name: graphql
description: AFFiNE GraphQL server
type: application
version: 0.0.0
appVersion: '0.7.0-canary.18'

View File

@@ -1,16 +0,0 @@
1. Get the application URL by running these commands:
{{- if contains "NodePort" .Values.service.type }}
export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "graphql.fullname" . }})
export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}")
echo http://$NODE_IP:$NODE_PORT
{{- else if contains "LoadBalancer" .Values.service.type }}
NOTE: It may take a few minutes for the LoadBalancer IP to be available.
You can watch the status of by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "graphql.fullname" . }}'
export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "graphql.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}")
echo http://$SERVICE_IP:{{ .Values.service.port }}
{{- else if contains "ClusterIP" .Values.service.type }}
export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "graphql.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}")
echo "Visit http://127.0.0.1:8080 to use your application"
kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT
{{- end }}

View File

@@ -1,132 +0,0 @@
{{/*
Expand the name of the chart.
*/}}
{{- define "graphql.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
If release name contains chart name it will be used as a full name.
*/}}
{{- define "graphql.fullname" -}}
{{- if .Values.fullnameOverride }}
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- $name := default .Chart.Name .Values.nameOverride }}
{{- if contains $name .Release.Name }}
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
{{- end }}
{{- end }}
{{- end }}
{{/*
Create chart name and version as used by the chart label.
*/}}
{{- define "graphql.chart" -}}
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Common labels
*/}}
{{- define "graphql.labels" -}}
helm.sh/chart: {{ include "graphql.chart" . }}
{{ include "graphql.selectorLabels" . }}
{{- if .Chart.AppVersion }}
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
{{- end }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
{{- end }}
{{/*
Selector labels
*/}}
{{- define "graphql.selectorLabels" -}}
app.kubernetes.io/name: {{ include "graphql.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}
{{/*
Create the name of the service account to use
*/}}
{{- define "graphql.serviceAccountName" -}}
{{- if .Values.serviceAccount.create }}
{{- default (include "graphql.fullname" .) .Values.serviceAccount.name }}
{{- else }}
{{- default "default" .Values.serviceAccount.name }}
{{- end }}
{{- end }}
{{- define "jwt.key" -}}
{{- $secret := lookup "v1" "Secret" .Release.Namespace .Values.app.jwt.secretName -}}
{{- if and $secret $secret.data.private -}}
{{/*
Reusing existing secret data
*/}}
key: {{ $secret.data.private }}
{{- else -}}
{{/*
Generate new data
*/}}
key: {{ genPrivateKey "ecdsa" | b64enc }}
{{- end -}}
{{- end -}}
{{- define "objectStorage.r2" -}}
{{- $secret := lookup "v1" "Secret" .Release.Namespace .Values.app.objectStorage.r2.secretName -}}
{{- if $secret -}}
{{/*
Reusing existing secret data
*/}}
accountId: {{ $secret.data.accountId }}
accessKeyId: {{ $secret.data.accessKeyId }}
secretAccessKey: {{ $secret.data.secretAccessKey }}
bucket: {{ $secret.data.bucket }}
{{- else -}}
{{/*
Generate new data
*/}}
accountId: {{ .Values.app.objectStorage.r2.accountId | b64enc }}
accessKeyId: {{ .Values.app.objectStorage.r2.accessKeyId | b64enc }}
secretAccessKey: {{ .Values.app.objectStorage.r2.secretAccessKey | b64enc }}
bucket: {{ .Values.app.objectStorage.r2.bucket | b64enc }}
{{- end -}}
{{- end -}}
{{- define "objectStorage.oauth.google" -}}
{{- $secret := lookup "v1" "Secret" .Release.Namespace .Values.app.oauth.google.secretName -}}
{{- if $secret -}}
{{/*
Reusing existing secret data
*/}}
clientId: {{ $secret.data.clientId }}
clientSecret: {{ $secret.data.clientSecret }}
{{- else -}}
{{/*
Generate new data
*/}}
clientId: "{{ .Values.app.oauth.google.clientId | b64enc }}"
clientSecret: "{{ .Values.app.oauth.google.clientSecret | b64enc }}"
{{- end -}}
{{- end -}}
{{- define "objectStorage.oauth.github" -}}
{{- $secret := lookup "v1" "Secret" .Release.Namespace .Values.app.oauth.github.secretName -}}
{{- if $secret -}}
{{/*
Reusing existing secret data
*/}}
clientId: {{ $secret.data.clientId }}
clientSecret: {{ $secret.data.clientSecret }}
{{- else -}}
{{/*
Generate new data
*/}}
clientId: "{{ .Values.app.oauth.github.clientId | b64enc }}"
clientSecret: "{{ .Values.app.oauth.github.clientSecret | b64enc }}"
{{- end -}}
{{- end -}}

View File

@@ -1,126 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "graphql.fullname" . }}
labels:
{{- include "graphql.labels" . | nindent 4 }}
spec:
replicas: {{ .Values.replicaCount }}
selector:
matchLabels:
{{- include "graphql.selectorLabels" . | nindent 6 }}
template:
metadata:
{{- with .Values.podAnnotations }}
annotations:
{{- toYaml . | nindent 8 }}
{{- end }}
labels:
{{- include "graphql.selectorLabels" . | nindent 8 }}
spec:
{{- with .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml . | nindent 8 }}
{{- end }}
serviceAccountName: {{ include "graphql.serviceAccountName" . }}
containers:
- name: {{ .Chart.Name }}
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
env:
- name: AUTH_PRIVATE_KEY
valueFrom:
secretKeyRef:
name: "{{ .Values.app.jwt.secretName }}"
key: key
- name: NODE_ENV
value: "{{ .Values.env }}"
- name: DATABSE_PASSWORD
valueFrom:
secretKeyRef:
name: pg-postgresql
key: postgres-password
- name: DATABASE_URL
value: postgres://{{ .Values.database.user }}:$(DATABSE_PASSWORD)@{{ .Values.database.url }}:{{ .Values.database.port }}/{{ .Values.database.name }}
- name: AFFINE_SERVER_PORT
value: "{{ .Values.service.port }}"
- name: AFFINE_SERVER_SUB_PATH
value: "{{ .Values.app.path }}"
- name: AFFINE_SERVER_HOST
value: "{{ .Values.app.host }}"
- name: ENABLE_R2_OBJECT_STORAGE
value: "{{ .Values.app.objectStorage.r2.enabled }}"
{{ if .Values.app.objectStorage.r2.enabled }}
- name: R2_OBJECT_STORAGE_ACCOUNT_ID
valueFrom:
secretKeyRef:
name: "{{ .Values.app.objectStorage.r2.secretName }}"
key: accountId
- name: R2_OBJECT_STORAGE_ACCESS_KEY_ID
valueFrom:
secretKeyRef:
name: "{{ .Values.app.objectStorage.r2.secretName }}"
key: accessKeyId
- name: R2_OBJECT_STORAGE_SECRET_ACCESS_KEY
valueFrom:
secretKeyRef:
name: "{{ .Values.app.objectStorage.r2.secretName }}"
key: secretAccessKey
- name: R2_OBJECT_STORAGE_BUCKET
valueFrom:
secretKeyRef:
name: "{{ .Values.app.objectStorage.r2.secretName }}"
key: bucket
{{ end }}
{{ if .Values.app.oauth.google.enabled }}
- name: OAUTH_GOOGLE_CLIENT_ID
valueFrom:
secretKeyRef:
name: "{{ .Values.app.oauth.google.secretName }}"
key: clientId
- name: OAUTH_GOOGLE_CLIENT_SECRET
valueFrom:
secretKeyRef:
name: "{{ .Values.app.oauth.google.secretName }}"
key: clientSecret
{{ end }}
{{ if .Values.app.oauth.github.enabled }}
- name: OAUTH_GITHUB_CLIENT_ID
valueFrom:
secretKeyRef:
name: "{{ .Values.app.oauth.github.secretName }}"
key: clientId
- name: OAUTH_GITHUB_CLIENT_SECRET
valueFrom:
secretKeyRef:
name: "{{ .Values.app.oauth.github.secretName }}"
key: clientSecret
{{ end }}
ports:
- name: http
containerPort: {{ .Values.service.port }}
protocol: TCP
livenessProbe:
httpGet:
path: /
port: http
initialDelaySeconds: {{ .Values.probe.initialDelaySeconds }}
readinessProbe:
httpGet:
path: /
port: http
initialDelaySeconds: {{ .Values.probe.initialDelaySeconds }}
resources:
{{- toYaml .Values.resources | nindent 12 }}
{{- with .Values.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.tolerations }}
tolerations:
{{- toYaml . | nindent 8 }}
{{- end }}

View File

@@ -1,7 +0,0 @@
apiVersion: v1
kind: Secret
metadata:
name: "{{ .Values.app.jwt.secretName }}"
type: Opaque
data:
{{- ( include "jwt.key" . ) | indent 2 -}}

View File

@@ -1,34 +0,0 @@
apiVersion: batch/v1
kind: Job
metadata:
name: {{ include "graphql.fullname" . }}-database-migration
labels:
{{- include "graphql.labels" . | nindent 4 }}
annotations:
"helm.sh/hook": pre-install,pre-upgrade
"helm.sh/hook-weight": "-1"
"helm.sh/hook-delete-policy": before-hook-creation
spec:
template:
spec:
containers:
- name: {{ .Chart.Name }}
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
command: ["yarn", "prisma", "migrate", "deploy"]
env:
- name: NODE_ENV
value: "{{ .Values.env }}"
- name: DATABSE_PASSWORD
valueFrom:
secretKeyRef:
name: pg-postgresql
key: postgres-password
- name: DATABASE_URL
value: postgres://{{ .Values.database.user }}:$(DATABSE_PASSWORD)@{{ .Values.database.url }}:{{ .Values.database.port }}/{{ .Values.database.name }}
resources:
requests:
cpu: '100m'
memory: '200Mi'
restartPolicy: Never
backoffLimit: 1

View File

@@ -1,10 +0,0 @@
{{- if .Values.app.oauth.github.enabled -}}
apiVersion: v1
kind: Secret
metadata:
name: "{{ .Values.app.oauth.github.secretName }}"
type: Opaque
data:
{{- ( include "objectStorage.oauth.github" . ) | indent 2 -}}
{{- end }}

View File

@@ -1,10 +0,0 @@
{{- if .Values.app.oauth.google.enabled -}}
apiVersion: v1
kind: Secret
metadata:
name: "{{ .Values.app.oauth.google.secretName }}"
type: Opaque
data:
{{- ( include "objectStorage.oauth.google" . ) | indent 2 -}}
{{- end }}

View File

@@ -1,9 +0,0 @@
{{- if .Values.app.objectStorage.r2.enabled -}}
apiVersion: v1
kind: Secret
metadata:
name: "{{ .Values.app.objectStorage.r2.secretName }}"
type: Opaque
data:
{{- ( include "objectStorage.r2" . ) | indent 2 -}}
{{- end }}

View File

@@ -1,15 +0,0 @@
apiVersion: v1
kind: Service
metadata:
name: {{ include "graphql.fullname" . }}
labels:
{{- include "graphql.labels" . | nindent 4 }}
spec:
type: {{ .Values.service.type }}
ports:
- port: {{ .Values.service.port }}
targetPort: http
protocol: TCP
name: http
selector:
{{- include "graphql.selectorLabels" . | nindent 4 }}

View File

@@ -1,12 +0,0 @@
{{- if .Values.serviceAccount.create -}}
apiVersion: v1
kind: ServiceAccount
metadata:
name: {{ include "graphql.serviceAccountName" . }}
labels:
{{- include "graphql.labels" . | nindent 4 }}
{{- with .Values.serviceAccount.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
{{- end }}

View File

@@ -1,15 +0,0 @@
apiVersion: v1
kind: Pod
metadata:
name: "{{ include "graphql.fullname" . }}-test-connection"
labels:
{{- include "graphql.labels" . | nindent 4 }}
annotations:
"helm.sh/hook": test
spec:
containers:
- name: wget
image: busybox
command: ['wget']
args: ['{{ include "graphql.fullname" . }}:{{ .Values.service.port }}']
restartPolicy: Never

View File

@@ -1,69 +0,0 @@
replicaCount: 1
image:
repository: ghcr.io/toeverything/affine-graphql
pullPolicy: IfNotPresent
tag: ''
imagePullSecrets: []
nameOverride: ''
fullnameOverride: ''
# map to NODE_ENV environment variable
env: 'production'
database:
user: 'postgres'
url: 'pg-postgresql'
port: '5432'
name: 'affine'
app:
# AFFINE_SERVER_SUB_PATH
path: ''
# AFFINE_SERVER_HOST
host: '0.0.0.0'
jwt:
secretName: jwt-private-key
# base64 encoded ecdsa private key
privateKey: ''
objectStorage:
r2:
enabled: false
secretName: r2
accountId: ''
accessKeyId: ''
secretAccessKey: ''
bucket: ''
oauth:
google:
enabled: false
secretName: oauth-google
clientId: ''
clientSecret: ''
github:
enabled: false
secretName: oauth-github
clientId: ''
clientSecret: ''
serviceAccount:
create: true
annotations: {}
name: 'affine-graphql'
podAnnotations: {}
podSecurityContext:
fsGroup: 2000
resources:
limits:
cpu: '2000m'
memory: 4Gi
requests:
cpu: '1000m'
memory: 2Gi
probe:
initialDelaySeconds: 20
nodeSelector: {}
tolerations: []
affinity: {}

View File

@@ -1,23 +0,0 @@
# Patterns to ignore when building packages.
# This supports shell glob matching, relative path matching, and
# negation (prefixed with !). Only one pattern per line.
.DS_Store
# Common VCS dirs
.git/
.gitignore
.bzr/
.bzrignore
.hg/
.hgignore
.svn/
# Common backup files
*.swp
*.bak
*.tmp
*.orig
*~
# Various IDEs
.project
.idea/
*.tmproj
.vscode/

View File

@@ -1,6 +0,0 @@
apiVersion: v2
name: web
description: A Helm chart for Kubernetes
type: application
version: 0.0.0
appVersion: "0.7.0-canary.18"

View File

@@ -1,16 +0,0 @@
1. Get the application URL by running these commands:
{{- if contains "NodePort" .Values.service.type }}
export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "web.fullname" . }})
export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}")
echo http://$NODE_IP:$NODE_PORT
{{- else if contains "LoadBalancer" .Values.service.type }}
NOTE: It may take a few minutes for the LoadBalancer IP to be available.
You can watch the status of by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "web.fullname" . }}'
export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "web.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}")
echo http://$SERVICE_IP:{{ .Values.service.port }}
{{- else if contains "ClusterIP" .Values.service.type }}
export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "web.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}")
echo "Visit http://127.0.0.1:8080 to use your application"
kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT
{{- end }}

View File

@@ -1,62 +0,0 @@
{{/*
Expand the name of the chart.
*/}}
{{- define "web.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
If release name contains chart name it will be used as a full name.
*/}}
{{- define "web.fullname" -}}
{{- if .Values.fullnameOverride }}
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- $name := default .Chart.Name .Values.nameOverride }}
{{- if contains $name .Release.Name }}
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
{{- end }}
{{- end }}
{{- end }}
{{/*
Create chart name and version as used by the chart label.
*/}}
{{- define "web.chart" -}}
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Common labels
*/}}
{{- define "web.labels" -}}
helm.sh/chart: {{ include "web.chart" . }}
{{ include "web.selectorLabels" . }}
{{- if .Chart.AppVersion }}
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
{{- end }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
{{- end }}
{{/*
Selector labels
*/}}
{{- define "web.selectorLabels" -}}
app.kubernetes.io/name: {{ include "web.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}
{{/*
Create the name of the service account to use
*/}}
{{- define "web.serviceAccountName" -}}
{{- if .Values.serviceAccount.create }}
{{- default (include "web.fullname" .) .Values.serviceAccount.name }}
{{- else }}
{{- default "default" .Values.serviceAccount.name }}
{{- end }}
{{- end }}

View File

@@ -1,57 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "web.fullname" . }}
labels:
{{- include "web.labels" . | nindent 4 }}
spec:
replicas: {{ .Values.replicaCount }}
selector:
matchLabels:
{{- include "web.selectorLabels" . | nindent 6 }}
template:
metadata:
{{- with .Values.podAnnotations }}
annotations:
{{- toYaml . | nindent 8 }}
{{- end }}
labels:
{{- include "web.selectorLabels" . | nindent 8 }}
spec:
{{- with .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml . | nindent 8 }}
{{- end }}
serviceAccountName: {{ include "web.serviceAccountName" . }}
containers:
- name: {{ .Chart.Name }}
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
ports:
- name: http
containerPort: {{ .Values.service.port }}
protocol: TCP
livenessProbe:
httpGet:
path: /
port: http
initialDelaySeconds: {{ .Values.probe.initialDelaySeconds }}
readinessProbe:
httpGet:
path: /
port: http
initialDelaySeconds: {{ .Values.probe.initialDelaySeconds }}
resources:
{{- toYaml .Values.resources | nindent 12 }}
{{- with .Values.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.tolerations }}
tolerations:
{{- toYaml . | nindent 8 }}
{{- end }}

View File

@@ -1,15 +0,0 @@
apiVersion: v1
kind: Service
metadata:
name: {{ include "web.fullname" . }}
labels:
{{- include "web.labels" . | nindent 4 }}
spec:
type: {{ .Values.service.type }}
ports:
- port: {{ .Values.service.port }}
targetPort: http
protocol: TCP
name: http
selector:
{{- include "web.selectorLabels" . | nindent 4 }}

View File

@@ -1,12 +0,0 @@
{{- if .Values.serviceAccount.create -}}
apiVersion: v1
kind: ServiceAccount
metadata:
name: {{ include "web.serviceAccountName" . }}
labels:
{{- include "web.labels" . | nindent 4 }}
{{- with .Values.serviceAccount.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
{{- end }}

View File

@@ -1,15 +0,0 @@
apiVersion: v1
kind: Pod
metadata:
name: "{{ include "web.fullname" . }}-test-connection"
labels:
{{- include "web.labels" . | nindent 4 }}
annotations:
"helm.sh/hook": test
spec:
containers:
- name: wget
image: busybox
command: ['wget']
args: ['{{ include "web.fullname" . }}:{{ .Values.service.port }}']
restartPolicy: Never

View File

@@ -1,37 +0,0 @@
replicaCount: 1
image:
repository: ghcr.io/toeverything/affine-front
pullPolicy: IfNotPresent
tag: ""
imagePullSecrets: []
nameOverride: ""
fullnameOverride: ""
serviceAccount:
create: true
annotations: {}
name: "affine-web"
podAnnotations: {}
podSecurityContext:
fsGroup: 2000
resources:
limits:
cpu: '500m'
memory: 2Gi
requests:
cpu: '500m'
memory: 2Gi
nodeSelector: {}
tolerations: []
affinity: {}
probe:
initialDelaySeconds: 1

View File

@@ -1,62 +0,0 @@
{{/*
Expand the name of the chart.
*/}}
{{- define "affine.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
If release name contains chart name it will be used as a full name.
*/}}
{{- define "affine.fullname" -}}
{{- if .Values.fullnameOverride }}
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- $name := default .Chart.Name .Values.nameOverride }}
{{- if contains $name .Release.Name }}
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
{{- end }}
{{- end }}
{{- end }}
{{/*
Create chart name and version as used by the chart label.
*/}}
{{- define "affine.chart" -}}
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Common labels
*/}}
{{- define "affine.labels" -}}
helm.sh/chart: {{ include "affine.chart" . }}
{{ include "affine.selectorLabels" . }}
{{- if .Chart.AppVersion }}
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
{{- end }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
{{- end }}
{{/*
Selector labels
*/}}
{{- define "affine.selectorLabels" -}}
app.kubernetes.io/name: {{ include "affine.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}
{{/*
Create the name of the service account to use
*/}}
{{- define "affine.serviceAccountName" -}}
{{- if .Values.serviceAccount.create }}
{{- default (include "affine.fullname" .) .Values.serviceAccount.name }}
{{- else }}
{{- default "default" .Values.serviceAccount.name }}
{{- end }}
{{- end }}

View File

@@ -1,64 +0,0 @@
{{- if .Values.ingress.enabled -}}
{{- $fullName := include "affine.fullname" . -}}
{{- if and .Values.ingress.className (not (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion)) }}
{{- if not (hasKey .Values.ingress.annotations "kubernetes.io/ingress.class") }}
{{- $_ := set .Values.ingress.annotations "kubernetes.io/ingress.class" .Values.ingress.className}}
{{- end }}
{{- end }}
{{- if semverCompare ">=1.19-0" .Capabilities.KubeVersion.GitVersion -}}
apiVersion: networking.k8s.io/v1
{{- else if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion -}}
apiVersion: networking.k8s.io/v1beta1
{{- else -}}
apiVersion: extensions/v1beta1
{{- end }}
kind: Ingress
metadata:
name: {{ $fullName }}
labels:
{{- include "affine.labels" . | nindent 4 }}
{{- with .Values.ingress.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
spec:
{{- if and .Values.ingress.className (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion) }}
ingressClassName: {{ .Values.ingress.className }}
{{- end }}
{{- if .Values.ingress.tls }}
tls:
{{- range .Values.ingress.tls }}
- hosts:
{{- range .hosts }}
- {{ . | quote }}
{{- end }}
secretName: {{ .secretName }}
{{- end }}
{{- end }}
rules:
- host: "{{ .Values.ingress.host }}"
http:
paths:
- path: /graphql
pathType: Prefix
backend:
service:
name: affine-graphql
port:
number: {{ .Values.graphql.service.port }}
- path: /api
pathType: Prefix
backend:
service:
name: affine-graphql
port:
number: {{ .Values.graphql.service.port }}
- path: /
pathType: Prefix
backend:
service:
name: affine-web
port:
number: {{ .Values.web.service.port }}
{{- end }}

View File

@@ -1,17 +0,0 @@
ingress:
enabled: false
className: ''
annotations:
kubernetes.io/ingress.class: nginx
host: affine.pro
tls: []
graphql:
service:
type: ClusterIP
port: 3000
web:
service:
type: ClusterIP
port: 8080

16
.github/labeler.yml vendored
View File

@@ -8,20 +8,14 @@ test:
- '**/tests/**/*'
- '**/__tests__/**/*'
plugin:copilot:
- 'plugins/copilot/**/*'
mod:dev:
- 'scripts/**/*'
- 'packages/cli/**/*'
- 'packages/debug/**/*'
mod:plugin:
- 'plugins/**/*'
plugin:bookmark-block:
- 'plugins/bookmark-block/**/*'
plugin:copilot:
- 'plugins/copilot/**/*'
mod:plugin-infra:
- 'packages/plugin-infra/**/*'
@@ -35,10 +29,6 @@ mod:hooks: 'packages/hooks/**/*'
mod:component: 'packages/component/**/*'
mod:storage: 'packages/storage/**/*'
mod:native: 'packages/native/**/*'
mod:store:
- 'packages/jotai/**/*'
- '**/atoms/**/*'

View File

@@ -43,27 +43,16 @@ jobs:
- uses: actions/checkout@v3
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- name: Run i18n codegen
run: yarn i18n-codegen gen
- name: Run Type Check
run: yarn typecheck
- name: Run ESLint
run: yarn lint:eslint --max-warnings=0
run: yarn lint --max-warnings=0 --cache
- name: Run Prettier
# Set nmMode in `actions/setup-node` will modify the .yarnrc.yml
run: |
git checkout .yarnrc.yml
yarn lint:prettier
run: yarn prettier . --ignore-unknown --cache --check
- name: Run circular
run: yarn circular
- name: Upload server dist
uses: actions/upload-artifact@v3
with:
name: server-dist
path: ./apps/server/dist
if-no-files-found: error
build-docs:
name: Build Docs
@@ -74,8 +63,6 @@ jobs:
- uses: actions/checkout@v3
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- run: yarn nx build @affine/docs
env:
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
@@ -89,8 +76,6 @@ jobs:
- uses: actions/checkout@v3
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- run: yarn nx build @affine/storybook
env:
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
@@ -113,6 +98,26 @@ jobs:
- name: Build Web
run: yarn nx build @affine/web
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: next-js
path: ./apps/web/.next
if-no-files-found: error
build-web-desktop:
name: Build @affine/web (Desktop)
runs-on: ubuntu-latest
environment: development
steps:
- uses: actions/checkout@v3
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Export static resources
run: yarn nx export @affine/web
- name: Upload static resources artifact
uses: actions/upload-artifact@v3
with:
name: next-js-static
@@ -162,20 +167,11 @@ jobs:
uses: ./.github/actions/setup-rust
with:
target: 'x86_64-unknown-linux-gnu'
- name: Build Storage
run: yarn build:storage
- name: Run server tests
run: yarn test:coverage
working-directory: apps/server
run: yarn nx test:coverage @affine/server
env:
CARGO_TARGET_DIR: '${{ github.workspace }}/target'
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Upload storage.node
uses: actions/upload-artifact@v3
with:
name: storage.node
path: ./packages/storage/storage.node
if-no-files-found: error
- name: Upload server test coverage results
uses: codecov/codecov-action@v3
with:
@@ -196,7 +192,6 @@ jobs:
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
- name: Download storybook artifact
uses: actions/download-artifact@v3
with:
@@ -213,9 +208,18 @@ jobs:
strategy:
fail-fast: false
matrix:
shard: [1, 2, 3, 4, 5]
shard: [1, 2, 3, 4]
environment: development
needs: build-web
needs: [build-web, build-storybook]
services:
octobase:
image: ghcr.io/toeverything/cloud-self-hosted:nightly-latest
ports:
- 3000:3000
env:
SIGN_KEY: 'test123'
RUST_LOG: 'debug'
JWST_DEV: '1'
steps:
- uses: actions/checkout@v3
@@ -223,16 +227,24 @@ jobs:
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
- name: Download artifact
uses: actions/download-artifact@v3
with:
name: next-js-static
path: ./apps/web/out
name: next-js
path: ./apps/web/.next
- name: Download storybook artifact
uses: actions/download-artifact@v3
with:
name: storybook
path: ./apps/storybook/storybook-static
- name: Wait for Octobase Ready
run: |
node ./scripts/wait-3000-healthz.mjs
- name: Run playwright tests
run: yarn e2e --forbid-only --shard=${{ matrix.shard }}/${{ strategy.job-total }}
working-directory: tests/affine-local
env:
COVERAGE: true
@@ -268,7 +280,6 @@ jobs:
uses: ./.github/actions/setup-node
with:
playwright-install: true
electron-install: false
- name: Download next static
uses: actions/download-artifact@v3
@@ -333,14 +344,13 @@ jobs:
target: x86_64-pc-windows-msvc,
test: true,
}
needs: [build-web]
needs: [build-web-desktop]
steps:
- uses: actions/checkout@v3
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
playwright-install: true
hard-link-nm: false
- name: Build AFFiNE native
uses: ./.github/actions/build-rust
with:
@@ -352,18 +362,13 @@ jobs:
run: yarn nx test @affine/monorepo
env:
NATIVE_TEST: 'true'
- name: Build layers
run: yarn workspace @affine/electron build
- name: Download static resource artifact
uses: actions/download-artifact@v3
with:
name: next-js-static
path: apps/electron/resources/web-static
- name: Build Plugins
run: yarn run build:plugins
- name: Build Desktop Layers
run: yarn workspace @affine/electron build
path: ./apps/electron/resources/web-static
- name: Run desktop tests
if: ${{ matrix.spec.test && matrix.spec.os == 'ubuntu-latest' }}
@@ -377,17 +382,6 @@ jobs:
env:
COVERAGE: true
- name: Make bundle
if: ${{ matrix.spec.os == 'macos-latest' && matrix.spec.arch == 'arm64' }}
run: yarn workspace @affine/electron make --platform=darwin --arch=arm64
- name: Bundle output check
if: ${{ matrix.spec.os == 'macos-latest' && matrix.spec.arch == 'arm64' }}
run: |
./scripts/unzip-macos-arm64.sh
yarn ts-node-esm ./scripts/macos-arm64-output-check.mts
working-directory: apps/electron
- name: Collect code coverage report
if: ${{ matrix.spec.test }}
run: yarn exec nyc report -t .nyc_output --report-dir .coverage --reporter=lcov
@@ -414,12 +408,20 @@ jobs:
name: Unit Test
runs-on: ubuntu-latest
environment: development
services:
octobase:
image: ghcr.io/toeverything/cloud-self-hosted:nightly-latest
ports:
- 3000:3000
env:
SIGN_KEY: 'test123'
RUST_LOG: 'debug'
JWST_DEV: '1'
steps:
- uses: actions/checkout@v3
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- name: Unit Test
run: yarn nx test:coverage @affine/monorepo
@@ -437,9 +439,7 @@ jobs:
if: github.ref == 'refs/heads/master'
name: Build Docker
needs:
- lint
- desktop-test
- server-test
- build-web-desktop
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
@@ -448,16 +448,6 @@ jobs:
with:
name: next-js-static
path: ./apps/web/out
- name: Download server dist
uses: actions/download-artifact@v3
with:
name: server-dist
path: ./apps/server/dist
- name: Download storage.node
uses: actions/download-artifact@v3
with:
name: storage.node
path: ./apps/server
- name: Setup Git short hash
run: |
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
@@ -483,21 +473,14 @@ jobs:
file: .github/deployment/front/Dockerfile
tags: ghcr.io/toeverything/affine-front:${{ env.GIT_SHORT_HASH }},ghcr.io/toeverything/affine-front:latest
# setup node without cache configuration
# Prisma cache is not compatible with docker build cache
- name: Setup Node.js
uses: actions/setup-node@v3
uses: ./.github/actions/setup-node
with:
node-version-file: '.nvmrc'
registry-url: https://npm.pkg.github.com
scope: '@toeverything'
package-install: false
- name: Install Node.js dependencies
run: yarn workspaces focus @affine/server --production
- name: Generate Prisma client
run: yarn workspace @affine/server prisma generate
- name: Build graphql Dockerfile
uses: docker/build-push-action@v4
with:

View File

@@ -1,18 +0,0 @@
name: Cancel
on:
pull_request_target:
types:
- edited
- synchronize
jobs:
cancel:
name: 'Cancel Previous Runs'
runs-on: ubuntu-latest
timeout-minutes: 2
steps:
- uses: styfle/cancel-workflow-action@0.11.0
with:
# See https://api.github.com/repos/toeverything/AFFiNE/actions/workflows
workflow_id: 44038251, 61883931
access_token: ${{ github.token }}

View File

@@ -3,8 +3,8 @@ name: Release Charts
on:
push:
branches: [master]
paths:
- '.github/helm/**/Chart.yml'
pull_request:
branches: [master]
jobs:
release:
@@ -43,12 +43,11 @@ jobs:
helm repo add bitnami https://charts.bitnami.com/bitnami
helm repo update
helm dependencies build ../.github/helm/affine
helm dependencies build ../.github/helm/affine-cloud
cr package ../.github/helm/affine
cr package ../.github/helm/affine-cloud
- name: Publish charts
- name: Package charts
if: github.ref == 'refs/heads/master'
working-directory: .helm-chart-repo
run: |
set -ex

View File

@@ -11,8 +11,6 @@ on:
- README.md
- .github/**
- '!.github/workflows/nightly-build.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
permissions:
@@ -34,7 +32,7 @@ jobs:
runs-on: ubuntu-latest
environment: production
outputs:
version: 0.0.0-internal.${{ steps.version.outputs.version }}
version: 0.0.0-${{ steps.version.outputs.version }}
steps:
- uses: actions/checkout@v3
- uses: toeverything/set-build-version@latest
@@ -60,6 +58,10 @@ jobs:
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
NEXT_PUBLIC_SENTRY_DSN: ${{ secrets.NEXT_PUBLIC_SENTRY_DSN }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
API_SERVER_PROFILE: prod
ENABLE_TEST_PROPERTIES: false
ENABLE_BOOKMARK_OPERATION: true
ENABLE_SQLITE_PROVIDER: false
RELEASE_VERSION: ${{ needs.set-build-version.outputs.version }}
- name: Upload Artifact (web-static)
@@ -123,10 +125,7 @@ jobs:
name: before-make-web-static
path: apps/electron/resources/web-static
- name: Build Plugins
run: yarn run build:plugins
- name: Build Desktop Layers
- name: Build layers
run: yarn workspace @affine/electron build
- name: Signing By Apple Developer ID

View File

@@ -1,54 +0,0 @@
name: NX
on:
push:
branches:
- master
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/nx.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
pull_request:
merge_group:
branches:
- master
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
- README.md
- .github/**
- '!.github/workflows/nx.yml'
- '!.github/actions/build-rust/action.yml'
- '!.github/actions/setup-node/action.yml'
jobs:
main:
name: Nx Cloud - Main Job
uses: nrwl/ci/.github/workflows/nx-cloud-main.yml@v0.13.0
with:
runs-on: macos-latest
main-branch-name: master
number-of-agents: 5
init-commands: |
yarn exec nx-cloud start-ci-run --stop-agents-after="build" --agent-count=5
environment-variables: |
BUILD_TYPE=canary
# parallel-commands: |
# yarn exec nx-cloud record -- yarn exec nx format:check
parallel-commands-on-agents: |
yarn exec nx affected --target=build --parallel=5
timeout: 60
agents:
name: Nx Cloud - Agents
uses: nrwl/ci/.github/workflows/nx-cloud-agents.yml@v0.13.0
with:
runs-on: macos-latest
number-of-agents: 5
environment-variables: |
BUILD_TYPE=canary
timeout: 60

View File

@@ -123,10 +123,7 @@ jobs:
name: before-make-web-static
path: apps/electron/resources/web-static
- name: Build Plugins
run: yarn run build:plugins
- name: Build Desktop Layers
- name: Build layers
run: yarn workspace @affine/electron build
- name: Signing By Apple Developer ID

View File

@@ -1,22 +0,0 @@
name: Deploy Cloudflare Worker
on:
push:
branches:
- master
paths:
- packages/workers/**
jobs:
deploy:
runs-on: ubuntu-latest
name: Deploy
environment: production
steps:
- uses: actions/checkout@v2
- name: Publish
uses: cloudflare/wrangler-action@2.0.0
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}
workingDirectory: 'packages/workers'

7
.gitignore vendored
View File

@@ -60,10 +60,9 @@ out/
storybook-static
i18n-generated.ts
test-results
playwright-report
playwright/.cache
download
/test-results/
/playwright-report/
/playwright/.cache/
# Cache
.eslintcache

View File

@@ -2,22 +2,10 @@
. "$(dirname -- "$0")/_/husky.sh"
# check lockfile is up to date
yarn install --mode=skip-build --inline-builds --immutable
# build infra code
yarn -T run build:infra
# generate prisma client type
yarn workspace @affine/server prisma generate
# generate i18n
yarn i18n-codegen gen
yarn install --mode=update-lockfile
# lint staged files
yarn exec lint-staged
# type check
yarn typecheck
# circular dependency check
yarn circular

1
.npmrc
View File

@@ -1,3 +1,2 @@
shell-emulator=true
electron_mirror="https://cdn.npmmirror.com/binaries/electron/"
engine-strict=true

View File

@@ -10,5 +10,3 @@ dist
.yarn
tests/affine-legacy/0.7.0-canary.18/static
.github/helm
_next
storybook-static

View File

@@ -2,13 +2,13 @@
<h1 style="border-bottom: none">
<b><a href="https://affine.pro">AFFiNE.PRO</a></b><br />
Write, Draw and Plan All at Once
The Next-Gen Collaborative Knowledge Base
<br>
</h1>
<p>
One hyper-fused platform for wildly creative minds. <br />
A privacy-focussed, local-first, open-source, and ready-to-use alternative for Notion & Miro.
AFFiNE is a next-gen knowledge base that brings planning, sorting and creating all together.<br />
Privacy first, open-source, customizable and ready to use - a free replacement for Notion & Miro. <br />
</p>
</div>
@@ -60,7 +60,7 @@ See https://github.com/all-?/all-contributors/issues/361#issuecomment-637166066
<br />
<div align="center">
<em>Docs, canvas and tables are hyper-merged with AFFiNE - just like the word affine (əˈɪn | a-fine).</em>
<em>See docs, canvas and tables are hyper merged with AFFiNE - just like the word affine (əˈɪn | a-fine).</em>
</div>
<br />
@@ -123,8 +123,6 @@ If you have questions, you are welcome to contact us. One of the best places to
## Plugins
> Plugins are a way to extend the functionality of AFFiNE.
>
> (Currently, plugins are under heavy development, and the SDK is not yet available.)
| Name | |
| ------------------------------------------------ | ----------------------------------------- |
@@ -137,7 +135,7 @@ We would also like to give thanks to open-source projects that make AFFiNE possi
- [BlockSuite](https://github.com/toeverything/BlockSuite) - 💠 BlockSuite is the open-source collaborative editor project behind AFFiNE.
- [OctoBase](https://github.com/toeverything/OctoBase) - 🐙 OctoBase is the open-source database behind AFFiNE, local-first, yet collaborative. A light-weight, scalable, data engine written in Rust.
- [Yjs](https://github.com/yjs/yjs) - Fundamental support of CRDTs for our implementation on state management and data sync.
- [Yjs](https://github.com/yjs/yjs) & [Yrs](https://github.com/y-crdt/y-crdt) - Fundamental support of CRDTs for our implementation on state management and data sync.
- [Electron](https://github.com/electron/electron) - Build cross-platform desktop apps with JavaScript, HTML, and CSS.
- [React](https://github.com/facebook/react) - View layer support and web GUI framework.
- [Rust](https://github.com/rust-lang/rust) - High performance language that extends the ability and availability of our real-time backend, OctoBase.
@@ -158,8 +156,7 @@ We would like to express our gratitude to all the individuals who have already c
## Self-Host
Get started with Docker and deploy your own feature-rich, restriction-free deployment of AFFiNE.
We are working hard to get this updated to the latest version, you can keep an eye on the [latest packages].
Get started with Docker and deploy your own feature-rich, restriction-free deployment of AFFiNE - check the [latest packages].
## Hiring
@@ -200,7 +197,7 @@ See [LICENSE] for details.
[rust-version-icon]: https://img.shields.io/badge/Rust-1.70.0-dea584
[stars-icon]: https://img.shields.io/github/stars/toeverything/AFFiNE.svg?style=flat&logo=github&colorB=red&label=stars
[codecov]: https://codecov.io/gh/toeverything/affine/branch/master/graphs/badge.svg?branch=master
[node-version-icon]: https://img.shields.io/badge/node-%3E=18.16.1-success
[node-version-icon]: https://img.shields.io/badge/node-%3E=18.16.0-success
[typescript-version-icon]: https://img.shields.io/github/package-json/dependency-version/toeverything/affine/dev/typescript
[react-version-icon]: https://img.shields.io/github/package-json/dependency-version/toeverything/AFFiNE/react?filename=apps%2Fweb%2Fpackage.json&color=rgb(97%2C228%2C251)
[react-version-icon]: https://img.shields.io/github/package-json/dependency-version/toeverything/affine/dev/react?color=rgb%2897%2C%20218%2C%20251%29
[blocksuite-icon]: https://img.shields.io/github/package-json/dependency-version/toeverything/AFFiNE/@blocksuite/store?color=6880ff&filename=apps%2Fweb%2Fpackage.json&label=blocksuite

View File

@@ -1,4 +1,4 @@
<!doctype html>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />

View File

@@ -1,6 +1,6 @@
{
"name": "@affine/docs",
"version": "0.7.0-beta.0",
"version": "0.7.0-canary.24",
"type": "module",
"private": true,
"scripts": {
@@ -10,26 +10,26 @@
},
"dependencies": {
"@affine/component": "workspace:*",
"@blocksuite/block-std": "0.0.0-20230717055529-79180930-nightly",
"@blocksuite/blocks": "0.0.0-20230717055529-79180930-nightly",
"@blocksuite/editor": "0.0.0-20230717055529-79180930-nightly",
"@blocksuite/global": "0.0.0-20230717055529-79180930-nightly",
"@blocksuite/lit": "0.0.0-20230717055529-79180930-nightly",
"@blocksuite/store": "0.0.0-20230717055529-79180930-nightly",
"@blocksuite/block-std": "0.0.0-20230629084521-542de4e8-nightly",
"@blocksuite/blocks": "0.0.0-20230629084521-542de4e8-nightly",
"@blocksuite/editor": "0.0.0-20230629084521-542de4e8-nightly",
"@blocksuite/global": "0.0.0-20230629084521-542de4e8-nightly",
"@blocksuite/lit": "0.0.0-20230629084521-542de4e8-nightly",
"@blocksuite/store": "0.0.0-20230629084521-542de4e8-nightly",
"express": "^4.18.2",
"jotai": "^2.2.2",
"react": "18.3.0-canary-1fdacbefd-20230630",
"react-dom": "18.3.0-canary-1fdacbefd-20230630",
"react-server-dom-webpack": "18.3.0-canary-1fdacbefd-20230630",
"jotai": "^2.2.1",
"react": "18.3.0-canary-8ec962d82-20230623",
"react-dom": "18.3.0-canary-8ec962d82-20230623",
"react-server-dom-webpack": "18.3.0-canary-8ec962d82-20230623",
"waku": "0.12.1"
},
"devDependencies": {
"@types/react": "^18.2.14",
"@types/react-dom": "^18.2.6",
"@vanilla-extract/css": "^1.12.0",
"@vanilla-extract/css": "^1.11.1",
"@vanilla-extract/vite-plugin": "^3.8.2",
"autoprefixer": "^10.4.14",
"tailwindcss": "^3.3.2",
"typescript": "^5.1.6"
"typescript": "^5.1.5"
}
}

View File

@@ -13,5 +13,3 @@ resources/web-static
!.yarn/sdks
!.yarn/versions
dev.json
zip-out

View File

@@ -26,8 +26,6 @@ const arch =
? process.argv[process.argv.indexOf('--arch') + 1]
: process.arch;
const windowsIconUrl = `https://cdn.affine.pro/app-icons/icon_${buildType}.ico`;
/**
* @type {import('@electron-forge/shared-types').ForgeConfig}
*/
@@ -97,7 +95,6 @@ module.exports = {
config: {
name: 'AFFiNE',
setupIcon: icoPath,
iconUrl: windowsIconUrl,
loadingGif: './resources/icons/affine_installing.gif',
},
},

View File

@@ -1,7 +1,7 @@
{
"name": "@affine/electron",
"private": true,
"version": "0.7.0-beta.0",
"version": "0.7.0-canary.24",
"author": "affine",
"repository": {
"url": "https://github.com/toeverything/AFFiNE",
@@ -11,25 +11,28 @@
"homepage": "https://github.com/toeverything/AFFiNE",
"scripts": {
"dev": "yarn cross-env DEV_SERVER_URL=http://localhost:8080 node scripts/dev.mjs",
"dev:prod": "yarn node scripts/dev.mjs",
"build": "NODE_ENV=production zx scripts/build-layers.mjs",
"watch": "yarn cross-env DEV_SERVER_URL=http://localhost:8080 node scripts/dev.mjs --watch",
"prod": "yarn node scripts/dev.mjs",
"build": "zx scripts/build-layers.mjs",
"generate-assets": "zx scripts/generate-assets.mjs",
"package": "electron-forge package",
"make": "electron-forge make",
"test": "DEBUG=pw:browser yarn -T run playwright test -c ./playwright.config.ts"
"test": "DEBUG=pw:browser playwright test"
},
"config": {
"forge": "./forge.config.js"
},
"main": "./dist/main.js",
"exports": {
"./scripts/plugins/build-plugins.mjs": "./scripts/plugins/build-plugins.mjs"
},
"devDependencies": {
"@affine-test/kit": "workspace:*",
"@affine/env": "workspace:*",
"@affine/native": "workspace:*",
"@blocksuite/blocks": "0.0.0-20230717055529-79180930-nightly",
"@blocksuite/editor": "0.0.0-20230717055529-79180930-nightly",
"@blocksuite/lit": "0.0.0-20230717055529-79180930-nightly",
"@blocksuite/store": "0.0.0-20230717055529-79180930-nightly",
"@blocksuite/blocks": "0.0.0-20230629084521-542de4e8-nightly",
"@blocksuite/editor": "0.0.0-20230629084521-542de4e8-nightly",
"@blocksuite/lit": "0.0.0-20230629084521-542de4e8-nightly",
"@blocksuite/store": "0.0.0-20230629084521-542de4e8-nightly",
"@electron-forge/cli": "^6.2.1",
"@electron-forge/core": "^6.2.1",
"@electron-forge/core-utils": "^6.2.1",
@@ -47,9 +50,10 @@
"electron-log": "^5.0.0-beta.24",
"electron-squirrel-startup": "1.0.0",
"electron-window-state": "^5.0.3",
"esbuild": "^0.18.11",
"esbuild": "^0.18.9",
"fs-extra": "^11.1.1",
"jotai": "^2.2.2",
"jotai": "^2.2.1",
"playwright": "=1.33.0",
"ts-node": "^10.9.1",
"undici": "^5.22.1",
"uuid": "^9.0.0",
@@ -59,7 +63,7 @@
"dependencies": {
"@toeverything/plugin-infra": "workspace:*",
"async-call-rpc": "^6.3.1",
"electron-updater": "^6.0.0",
"electron-updater": "^5.3.0",
"link-preview-js": "^3.0.4",
"lodash-es": "^4.17.21",
"nanoid": "^4.0.2",
@@ -81,6 +85,7 @@
"hoistingLimits": "workspaces"
},
"peerDependencies": {
"playwright": "*",
"ts-node": "*"
}
}

View File

@@ -11,7 +11,7 @@ import type { PlaywrightTestConfig } from '@playwright/test';
* See https://playwright.dev/docs/test-configuration.
*/
const config: PlaywrightTestConfig = {
testDir: './e2e',
testDir: './tests',
testIgnore: '**/lib/**',
fullyParallel: true,
timeout: process.env.CI ? 50_000 : 30_000,

View File

@@ -1,24 +0,0 @@
{
"name": "@affine/electron",
"$schema": "../../node_modules/nx/schemas/project-schema.json",
"projectType": "application",
"root": "apps/electron",
"sourceRoot": "apps/electron/src",
"targets": {
"build": {
"executor": "nx:run-script",
"dependsOn": [
{
"projects": ["@affine/bookmark-block"],
"target": "build",
"params": "ignore"
},
"^build"
],
"options": {
"script": "build"
},
"outputs": ["{projectRoot}/dist"]
}
}
}

View File

@@ -1,9 +1,12 @@
#!/usr/bin/env zx
import 'zx/globals';
import { resolve } from 'node:path';
import { spawnSync } from 'child_process';
import * as esbuild from 'esbuild';
import { config } from './common.mjs';
import { config, rootDir } from './common.mjs';
const NODE_ENV =
process.env.NODE_ENV === 'development' ? 'development' : 'production';
@@ -15,10 +18,20 @@ if (process.platform === 'win32') {
async function buildLayers() {
const common = config();
console.log('Build plugin infra');
spawnSync('yarn', ['build'], {
stdio: 'inherit',
cwd: resolve(rootDir, './packages/plugin-infra'),
});
console.log('Build plugins');
await import('./plugins/build-plugins.mjs');
await esbuild.build(common.workers);
await esbuild.build({
...common.layers,
define: {
...common.define,
'process.env.NODE_ENV': `"${NODE_ENV}"`,
'process.env.BUILD_TYPE': `"${process.env.BUILD_TYPE || 'stable'}"`,
},

View File

@@ -15,9 +15,16 @@ const DEV_SERVER_URL = process.env.DEV_SERVER_URL;
/** @type 'production' | 'development'' */
const mode = (process.env.NODE_ENV = process.env.NODE_ENV || 'development');
// List of env that will be replaced by esbuild
const ENV_MACROS = ['AFFINE_GOOGLE_CLIENT_ID', 'AFFINE_GOOGLE_CLIENT_SECRET'];
/** @return {{layers: import('esbuild').BuildOptions, workers: import('esbuild').BuildOptions}} */
export const config = () => {
const define = Object.fromEntries([
...ENV_MACROS.map(key => [
'process.env.' + key,
JSON.stringify(process.env[key] ?? ''),
]),
['process.env.NODE_ENV', `"${mode}"`],
['process.env.USE_WORKER', '"true"'],
]);
@@ -38,12 +45,7 @@ export const config = () => {
bundle: true,
target: `node${NODE_MAJOR_VERSION}`,
platform: 'node',
external: [
'electron',
'electron-updater',
'@toeverything/plugin-infra',
'yjs',
],
external: ['electron', 'electron-updater', '@toeverything/plugin-infra'],
define: define,
format: 'cjs',
loader: {

View File

@@ -1,12 +1,13 @@
/* eslint-disable no-async-promise-executor */
import { spawn } from 'node:child_process';
import { readFileSync } from 'node:fs';
import path from 'node:path';
import path, { resolve } from 'node:path';
import electronPath from 'electron';
import * as esbuild from 'esbuild';
import which from 'which';
import { config, electronDir } from './common.mjs';
import { config, electronDir, rootDir } from './common.mjs';
// this means we don't spawn electron windows, mainly for testing
const watchMode = process.argv.includes('--watch');
@@ -67,6 +68,14 @@ function spawnOrReloadElectron() {
}
const common = config();
const yarnPath = which.sync('yarn');
async function watchPlugins() {
spawn(yarnPath, ['dev'], {
stdio: 'inherit',
cwd: resolve(rootDir, './packages/plugin-infra'),
});
await import('./plugins/dev-plugins.mjs');
}
async function watchLayers() {
return new Promise(async resolve => {
@@ -125,6 +134,7 @@ async function watchWorkers() {
}
async function main() {
await watchPlugins();
await watchLayers();
await watchWorkers();

View File

@@ -43,6 +43,7 @@ cd(repoRootDir);
if (!process.env.SKIP_WEB_BUILD) {
process.env.ENABLE_LEGACY_PROVIDER = 'false';
await $`yarn nx build @affine/web`;
await $`yarn nx export @affine/web`;
// step 1.5: amend sourceMappingURL to allow debugging in devtools
await glob('**/*.{js,css}', { cwd: affineWebOutDir }).then(files => {

View File

@@ -1,44 +0,0 @@
import { fileURLToPath } from 'node:url';
import { readdir } from 'node:fs/promises';
const outputRoot = fileURLToPath(
new URL(
'../zip-out/AFFiNE-canary.app/Contents/Resources/app',
import.meta.url
)
);
const outputList = [
[
'dist',
[
'main.js',
'helper.js',
'preload.js',
'affine.darwin-arm64.node',
'plugins',
'workers',
],
],
['dist/plugins', ['bookmark-block']],
['dist/plugins/bookmark-block', ['index.mjs']],
['dist/workers', ['plugin.worker.js']],
[
'node_modules/@toeverything/plugin-infra/dist',
['manager.js', 'manager.cjs'],
],
['node_modules/@blocksuite/global/dist', ['utils.js']],
['node_modules/jotai', ['vanilla.js']],
] as [entry: string, expected: string[]][];
await Promise.all(
outputList.map(async ([entry, output]) => {
const files = await readdir(`${outputRoot}/${entry}`);
output.forEach(file => {
if (!files.includes(file)) {
throw new Error(`File ${entry}/${file} not found`);
}
});
})
);
console.log('Output check passed');

View File

@@ -1,7 +1,7 @@
import { resolve } from 'node:path';
import { fileURLToPath } from 'node:url';
export const rootDir = fileURLToPath(new URL('../../..', import.meta.url));
export const rootDir = fileURLToPath(new URL('../../../..', import.meta.url));
export const electronOutputDir = resolve(
rootDir,
'apps',

View File

@@ -1,16 +0,0 @@
#!/bin/bash
# Set the directory
dir="./out/canary/make/zip/darwin/arm64"
# Get the first file
file=$(ls -1 $dir | head -n 1)
# Check if file exists and is a zip file
if [ -f "$dir/$file" ] && [ ${file: -4} == ".zip" ]
then
# Unzip the file
unzip "$dir/$file" -d "zip-out"
else
echo "No zip file found"
fi

View File

@@ -1,11 +1,10 @@
import path from 'node:path';
import { setTimeout } from 'node:timers/promises';
import fs from 'fs-extra';
import { v4 } from 'uuid';
import { afterEach, beforeEach, expect, test, vi } from 'vitest';
import { removeWithRetry } from '../../../../tests/utils';
const tmpDir = path.join(__dirname, 'tmp');
const appDataPath = path.join(tmpDir, 'app-data');
@@ -45,7 +44,11 @@ beforeEach(() => {
afterEach(async () => {
existProcess();
await removeWithRetry(tmpDir);
// wait for the db to be closed on Windows
if (process.platform === 'win32') {
await setTimeout(200);
}
await fs.remove(tmpDir);
vi.useRealTimers();
});

View File

@@ -1,69 +0,0 @@
import path from 'node:path';
import { SqliteConnection } from '@affine/native';
import { afterEach, describe, expect, it, vi } from 'vitest';
import * as Y from 'yjs';
import { removeWithRetry } from '../../../../tests/utils';
import { copyToTemp, migrateToSubdocAndReplaceDatabase } from '../migration';
const tmpDir = path.join(__dirname, 'tmp');
const testDBFilePath = path.resolve(__dirname, 'old-db.affine');
const appDataPath = path.join(tmpDir, 'app-data');
vi.mock('../../main-rpc', () => ({
mainRPC: {
getPath: async () => appDataPath,
},
}));
afterEach(async () => {
await removeWithRetry(tmpDir);
});
describe('migrateToSubdocAndReplaceDatabase', () => {
it('should migrate and replace the database', async () => {
const copiedDbFilePath = await copyToTemp(testDBFilePath);
await migrateToSubdocAndReplaceDatabase(copiedDbFilePath);
const db = new SqliteConnection(copiedDbFilePath);
await db.connect();
// check if db has two rows, one for root doc and one for subdoc
const rows = await db.getAllUpdates();
expect(rows.length).toBe(2);
const rootUpdate = rows.find(row => row.docId === undefined)!.data;
const subdocUpdate = rows.find(row => row.docId !== undefined)!.data;
expect(rootUpdate).toBeDefined();
expect(subdocUpdate).toBeDefined();
// apply updates
const rootDoc = new Y.Doc();
Y.applyUpdate(rootDoc, rootUpdate);
// check if root doc has one subdoc
expect(rootDoc.subdocs.size).toBe(1);
// populates subdoc
Y.applyUpdate(rootDoc.subdocs.values().next().value, subdocUpdate);
// check if root doc's meta is correct
const meta = rootDoc.getMap('meta').toJSON();
expect(meta.workspaceVersion).toBe(1);
expect(meta.name).toBe('hiw');
expect(meta.pages.length).toBe(1);
const pageMeta = meta.pages[0];
expect(pageMeta.title).toBe('Welcome to AFFiNEd');
// get the subdoc through id
const subDoc = rootDoc
.getMap('spaces')
.get(`space:${pageMeta.id}`) as Y.Doc;
expect(subDoc).toEqual(rootDoc.subdocs.values().next().value);
await db.close();
});
});

View File

@@ -5,7 +5,6 @@ import { v4 } from 'uuid';
import { afterEach, expect, test, vi } from 'vitest';
import * as Y from 'yjs';
import { removeWithRetry } from '../../../../tests/utils';
import { dbSubjects } from '../subjects';
const tmpDir = path.join(__dirname, 'tmp');
@@ -18,7 +17,7 @@ vi.doMock('../../main-rpc', () => ({
}));
afterEach(async () => {
await removeWithRetry(tmpDir);
await fs.remove(tmpDir);
});
let testYDoc: Y.Doc;

View File

@@ -119,8 +119,6 @@ export abstract class BaseSQLiteAdapter {
`[SQLiteAdapter][${this.role}] addUpdateToSQLite`,
'length:',
updates.length,
'docids',
updates.map(u => u.docId),
performance.now() - start,
'ms'
);

View File

@@ -1,55 +0,0 @@
import { resolve } from 'node:path';
import { migrateToSubdoc } from '@affine/env/blocksuite';
import { SqliteConnection } from '@affine/native';
import fs from 'fs-extra';
import { nanoid } from 'nanoid';
import * as Y from 'yjs';
import { mainRPC } from '../main-rpc';
export const migrateToSubdocAndReplaceDatabase = async (path: string) => {
const db = new SqliteConnection(path);
await db.connect();
const rows = await db.getAllUpdates();
const originalDoc = new Y.Doc();
// 1. apply all updates to the root doc
rows.forEach(row => {
Y.applyUpdate(originalDoc, row.data);
});
// 2. migrate using migrateToSubdoc
const migratedDoc = migrateToSubdoc(originalDoc);
// 3. replace db rows with the migrated doc
await replaceRows(db, migratedDoc, true);
// 4. close db
await db.close();
};
export const copyToTemp = async (path: string) => {
const tmpDirPath = resolve(await mainRPC.getPath('sessionData'), 'tmp');
const tmpFilePath = resolve(tmpDirPath, nanoid());
await fs.ensureDir(tmpDirPath);
await fs.copyFile(path, tmpFilePath);
return tmpFilePath;
};
async function replaceRows(
db: SqliteConnection,
doc: Y.Doc,
isRoot: boolean
): Promise<void> {
const migratedUpdates = Y.encodeStateAsUpdate(doc);
const docId = isRoot ? undefined : doc.guid;
const rows = [{ data: migratedUpdates, docId: docId }];
await db.replaceUpdates(docId, rows);
await Promise.all(
[...doc.subdocs].map(async subdoc => {
await replaceRows(db, subdoc, false);
})
);
}

View File

@@ -115,43 +115,19 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
}
setupListener(docId?: string) {
logger.debug(
'SecondaryWorkspaceSQLiteDB:setupListener',
this.workspaceId,
docId
);
const doc = this.getDoc(docId);
const upstreamDoc = this.upstream.getDoc(docId);
if (!doc || !upstreamDoc) {
logger.warn(
'[SecondaryWorkspaceSQLiteDB] setupListener: doc not found',
docId
);
if (!doc) {
return;
}
const onUpstreamUpdate = (update: Uint8Array, origin: YOrigin) => {
logger.debug(
'SecondaryWorkspaceSQLiteDB:onUpstreamUpdate',
origin,
this.workspaceId,
docId,
update.length
);
if (origin === 'renderer' || origin === 'self') {
if (origin === 'renderer') {
// update to upstream yDoc should be replicated to self yDoc
this.applyUpdate(update, 'upstream', docId);
}
};
const onSelfUpdate = async (update: Uint8Array, origin: YOrigin) => {
logger.debug(
'SecondaryWorkspaceSQLiteDB:onSelfUpdate',
origin,
this.workspaceId,
docId,
update.length
);
// for self update from upstream, we need to push it to external DB
if (origin === 'upstream') {
await this.addUpdateToUpdateQueue({
@@ -171,19 +147,15 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
});
};
doc.subdocs.forEach(subdoc => {
this.setupListener(subdoc.guid);
});
// listen to upstream update
this.upstream.yDoc.on('update', onUpstreamUpdate);
doc.on('update', onSelfUpdate);
doc.on('subdocs', onSubdocs);
this.yDoc.on('update', onSelfUpdate);
this.yDoc.on('subdocs', onSubdocs);
this.unsubscribers.add(() => {
this.upstream.yDoc.off('update', onUpstreamUpdate);
doc.off('update', onSelfUpdate);
doc.off('subdocs', onSubdocs);
this.yDoc.off('update', onSelfUpdate);
this.yDoc.off('subdocs', onSubdocs);
});
}
@@ -216,10 +188,7 @@ export class SecondaryWorkspaceSQLiteDB extends BaseSQLiteAdapter {
if (doc) {
Y.applyUpdate(this.yDoc, data, origin);
} else {
logger.warn(
'[SecondaryWorkspaceSQLiteDB] applyUpdate: doc not found',
docId
);
logger.warn('applyUpdate: doc not found', docId);
}
};

View File

@@ -18,10 +18,7 @@ export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
update$ = new Subject<void>();
constructor(
public override path: string,
public workspaceId: string
) {
constructor(public override path: string, public workspaceId: string) {
super(path);
}
@@ -52,21 +49,9 @@ export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
};
setupListener(docId?: string) {
logger.debug(
'WorkspaceSQLiteDB:setupListener',
this.workspaceId,
docId,
this.getWorkspaceName()
);
const doc = this.getDoc(docId);
if (doc) {
const onUpdate = async (update: Uint8Array, origin: YOrigin) => {
logger.debug(
'WorkspaceSQLiteDB:onUpdate',
this.workspaceId,
docId,
update.length
);
const insertRows = [{ data: update, docId }];
if (origin === 'renderer') {
await this.addUpdateToSQLite(insertRows);
@@ -80,11 +65,7 @@ export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
logger.debug('external update', this.workspaceId);
}
};
doc.subdocs.forEach(subdoc => {
this.setupListener(subdoc.guid);
});
const onSubdocs = ({ added }: { added: Set<Y.Doc> }) => {
logger.info('onSubdocs', this.workspaceId, docId, added);
added.forEach(subdoc => {
this.setupListener(subdoc.guid);
});
@@ -151,7 +132,7 @@ export class WorkspaceSQLiteDB extends BaseSQLiteAdapter {
if (doc) {
Y.applyUpdate(doc, data, origin);
} else {
logger.warn('[WorkspaceSQLiteDB] applyUpdate: doc not found', docId);
logger.warn('applyUpdate: doc not found', docId);
}
};

View File

@@ -1,11 +1,9 @@
import path from 'node:path';
import { ValidationResult } from '@affine/native';
import fs from 'fs-extra';
import { nanoid } from 'nanoid';
import { ensureSQLiteDB } from '../db/ensure-db';
import { copyToTemp, migrateToSubdocAndReplaceDatabase } from '../db/migration';
import type { WorkspaceSQLiteDB } from '../db/workspace-db-adapter';
import { logger } from '../logger';
import { mainRPC } from '../main-rpc';
@@ -57,7 +55,6 @@ const ErrorMessages = [
'DB_FILE_ALREADY_LOADED',
'DB_FILE_PATH_INVALID',
'DB_FILE_INVALID',
'DB_FILE_MIGRATION_FAILED',
'FILE_ALREADY_EXISTS',
'UNKNOWN_ERROR',
] as const;
@@ -194,42 +191,27 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
],
message: 'Load Workspace from a AFFiNE file',
}));
let originalPath = ret.filePaths?.[0];
if (ret.canceled || !originalPath) {
const filePath = ret.filePaths?.[0];
if (ret.canceled || !filePath) {
logger.info('loadDBFile canceled');
return { canceled: true };
}
// the imported file should not be in app data dir
if (originalPath.startsWith(await getWorkspacesBasePath())) {
if (filePath.startsWith(await getWorkspacesBasePath())) {
logger.warn('loadDBFile: db file in app data dir');
return { error: 'DB_FILE_PATH_INVALID' };
}
if (await dbFileAlreadyLoaded(originalPath)) {
if (await dbFileAlreadyLoaded(filePath)) {
logger.warn('loadDBFile: db file already loaded');
return { error: 'DB_FILE_ALREADY_LOADED' };
}
const { SqliteConnection } = await import('@affine/native');
const validationResult = await SqliteConnection.validate(originalPath);
if (validationResult === ValidationResult.MissingDocIdColumn) {
try {
const tmpDBPath = await copyToTemp(originalPath);
await migrateToSubdocAndReplaceDatabase(tmpDBPath);
originalPath = tmpDBPath;
} catch (error) {
logger.warn(`loadDBFile, migration failed: ${originalPath}`, error);
return { error: 'DB_FILE_MIGRATION_FAILED' };
}
}
if (
validationResult !== ValidationResult.MissingDocIdColumn &&
validationResult !== ValidationResult.Valid
) {
if (!(await SqliteConnection.validate(filePath))) {
// TODO: report invalid db file error?
return { error: 'DB_FILE_INVALID' }; // invalid db file
}
@@ -238,12 +220,14 @@ export async function loadDBFile(): Promise<LoadDBFileResult> {
const internalFilePath = await getWorkspaceDBPath(workspaceId);
await fs.ensureDir(await getWorkspacesBasePath());
await fs.copy(originalPath, internalFilePath);
logger.info(`loadDBFile, copy: ${originalPath} -> ${internalFilePath}`);
await fs.copy(filePath, internalFilePath);
logger.info(`loadDBFile, copy: ${filePath} -> ${internalFilePath}`);
await storeWorkspaceMeta(workspaceId, {
id: workspaceId,
mainDBPath: internalFilePath,
secondaryDBPath: filePath,
});
return { workspaceId };

View File

@@ -1,4 +1,3 @@
import type { RendererToHelper } from '@toeverything/infra/preload/electron';
import { AsyncCall } from 'async-call-rpc';
import { events, handlers } from './exposed';
@@ -31,7 +30,7 @@ function setupRendererConnection(rendererPort: Electron.MessagePortMain) {
});
}
);
const rpc = AsyncCall<RendererToHelper>(
const rpc = AsyncCall<PeersAPIs.RendererToHelper>(
Object.fromEntries(flattenedHandlers),
{
channel: {

View File

@@ -1,16 +1,12 @@
import type {
HelperToMain,
MainToHelper,
} from '@toeverything/infra/preload/electron';
import { AsyncCall } from 'async-call-rpc';
import { getExposedMeta } from './exposed';
const helperToMainServer: HelperToMain = {
const helperToMainServer: PeersAPIs.HelperToMain = {
getMeta: () => getExposedMeta(),
};
export const mainRPC = AsyncCall<MainToHelper>(helperToMainServer, {
export const mainRPC = AsyncCall<PeersAPIs.MainToHelper>(helperToMainServer, {
strict: {
unknownMessage: false,
},

View File

@@ -4,8 +4,6 @@ import fs from 'fs-extra';
import { v4 } from 'uuid';
import { afterEach, describe, expect, test, vi } from 'vitest';
import { removeWithRetry } from '../../../../tests/utils';
const tmpDir = path.join(__dirname, 'tmp');
const appDataPath = path.join(tmpDir, 'app-data');
@@ -22,7 +20,7 @@ vi.doMock('../../main-rpc', () => ({
}));
afterEach(async () => {
await removeWithRetry(tmpDir);
await fs.remove(tmpDir);
});
describe('list workspaces', () => {

View File

@@ -1,10 +1,10 @@
import assert from 'node:assert';
import path from 'node:path';
import { setTimeout } from 'node:timers/promises';
import fs from 'fs-extra';
import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest';
import { removeWithRetry } from '../../../tests/utils';
import type { MainIPCHandlerMap } from '../exposed';
const registeredHandlers = new Map<
@@ -21,7 +21,7 @@ type WithoutFirstParameter<T> = T extends (_: any, ...args: infer P) => infer R
// however this is too hard to be typed correctly
async function dispatch<
T extends keyof MainIPCHandlerMap,
F extends keyof MainIPCHandlerMap[T],
F extends keyof MainIPCHandlerMap[T]
>(
namespace: T,
functionName: F,
@@ -121,7 +121,11 @@ beforeEach(async () => {
afterEach(async () => {
// reset registered handlers
registeredHandlers.get('before-quit')?.forEach(fn => fn());
await removeWithRetry(SESSION_DATA_PATH);
// wait for the db to be closed on Windows
if (process.platform === 'win32') {
await setTimeout(200);
}
await fs.remove(SESSION_DATA_PATH);
});
describe('UI handlers', () => {

View File

@@ -1,7 +1,7 @@
import { app, Menu } from 'electron';
import { revealLogFile } from '../logger';
import { checkForUpdates } from '../updater';
import { checkForUpdatesAndNotify } from '../updater';
import { isMacOS } from '../utils';
import { applicationMenuSubjects } from './subject';
@@ -125,7 +125,7 @@ export function createApplicationMenu() {
{
label: 'Check for Updates',
click: async () => {
await checkForUpdates(true);
await checkForUpdatesAndNotify(true);
},
},
],

View File

@@ -1,9 +0,0 @@
import { clipboard, type IpcMainInvokeEvent, nativeImage } from 'electron';
import type { NamespaceHandlers } from '../type';
export const clipboardHandlers = {
copyAsImageFromString: async (_: IpcMainInvokeEvent, dataURL: string) => {
clipboard.writeImage(nativeImage.createFromDataURL(dataURL));
},
} satisfies NamespaceHandlers;

View File

@@ -1,4 +1,4 @@
import { BrowserWindow, dialog } from 'electron';
import { BrowserWindow, dialog, shell } from 'electron';
import fs from 'fs-extra';
import { logger } from '../logger';
@@ -39,13 +39,6 @@ export async function savePDFFileAs(
await BrowserWindow.getFocusedWindow()
?.webContents.printToPDF({
margins: {
marginType: 'custom',
top: 0,
bottom: 0,
left: 0,
right: 0,
},
pageSize: 'A4',
printBackground: true,
landscape: false,
@@ -56,6 +49,8 @@ export async function savePDFFileAs(
logger.log(`Wrote PDF successfully to ${filePath}`);
});
});
await shell.openPath(filePath);
return { filePath };
} catch (err) {
logger.error('savePDFFileAs', err);

View File

@@ -1,5 +1,4 @@
import type {
ClipboardHandlerManager,
DebugHandlerManager,
ExportHandlerManager,
UIHandlerManager,
@@ -8,7 +7,6 @@ import type {
} from '@toeverything/infra';
import { ipcMain } from 'electron';
import { clipboardHandlers } from './clipboard';
import { exportHandlers } from './export';
import { getLogFilePath, logger, revealLogFile } from './logger';
import { uiHandlers } from './ui';
@@ -28,10 +26,6 @@ type AllHandlers = {
Electron.IpcMainInvokeEvent,
DebugHandlerManager
>;
clipboard: UnwrapManagerHandlerToServerSide<
Electron.IpcMainInvokeEvent,
ClipboardHandlerManager
>;
export: UnwrapManagerHandlerToServerSide<
Electron.IpcMainInvokeEvent,
ExportHandlerManager
@@ -50,7 +44,6 @@ type AllHandlers = {
export const allHandlers = {
debug: debugHandlers,
ui: uiHandlers,
clipboard: clipboardHandlers,
export: exportHandlers,
updater: updaterHandlers,
} satisfies AllHandlers;

View File

@@ -1,9 +1,5 @@
import path from 'node:path';
import type {
HelperToMain,
MainToHelper,
} from '@toeverything/infra/preload/electron';
import { type _AsyncVersionOf, AsyncCall } from 'async-call-rpc';
import {
app,
@@ -40,7 +36,7 @@ class HelperProcessManager {
#process: UtilityProcess;
// a rpc server for the main process -> helper process
rpc?: _AsyncVersionOf<HelperToMain>;
rpc?: _AsyncVersionOf<PeersAPIs.HelperToMain>;
static instance = new HelperProcessManager();
@@ -90,13 +86,13 @@ class HelperProcessManager {
]);
const appMethods = pickAndBind(app, ['getPath']);
const mainToHelperServer: MainToHelper = {
const mainToHelperServer: PeersAPIs.MainToHelper = {
...dialogMethods,
...shellMethods,
...appMethods,
};
this.rpc = AsyncCall<HelperToMain>(mainToHelperServer, {
this.rpc = AsyncCall<PeersAPIs.HelperToMain>(mainToHelperServer, {
strict: {
// the channel is shared for other purposes as well so that we do not want to
// restrict to only JSONRPC messages

View File

@@ -1,5 +1,5 @@
import { app } from 'electron';
import { autoUpdater } from 'electron-updater';
import type { AppUpdater } from 'electron-updater';
import { z } from 'zod';
import { logger } from '../logger';
@@ -20,55 +20,56 @@ export const buildType = ReleaseTypeSchema.parse(envBuildType);
const mode = process.env.NODE_ENV;
const isDev = mode === 'development';
let _autoUpdater: AppUpdater | null = null;
export const quitAndInstall = async () => {
autoUpdater.quitAndInstall();
_autoUpdater?.quitAndInstall();
};
let lastCheckTime = 0;
export const checkForUpdates = async (force = true) => {
export const checkForUpdatesAndNotify = async (force = true) => {
if (!_autoUpdater) {
return void 0;
}
// check every 30 minutes (1800 seconds) at most
if (force || lastCheckTime + 1000 * 1800 < Date.now()) {
lastCheckTime = Date.now();
return await autoUpdater.checkForUpdates();
return await _autoUpdater.checkForUpdatesAndNotify();
}
return void 0;
};
export const registerUpdater = async () => {
// so we wrap it in a function
// eslint-disable-next-line @typescript-eslint/no-var-requires
const { autoUpdater } = require('electron-updater');
_autoUpdater = autoUpdater;
// skip auto update in dev mode
if (isDev) {
if (!_autoUpdater || isDev) {
return;
}
// TODO: support auto update on windows and linux
const allowAutoUpdate = isMacOS();
autoUpdater.logger = logger;
autoUpdater.autoDownload = false;
autoUpdater.allowPrerelease = buildType !== 'stable';
autoUpdater.autoInstallOnAppQuit = false;
autoUpdater.autoRunAppAfterInstall = true;
const feedUrl: Parameters<typeof autoUpdater.setFeedURL>[0] = {
_autoUpdater.autoDownload = false;
_autoUpdater.allowPrerelease = buildType !== 'stable';
_autoUpdater.autoInstallOnAppQuit = false;
_autoUpdater.autoRunAppAfterInstall = true;
_autoUpdater.setFeedURL({
channel: buildType,
provider: 'github',
repo: buildType !== 'internal' ? 'AFFiNE' : 'AFFiNE-Releases',
owner: 'toeverything',
releaseType: buildType === 'stable' ? 'release' : 'prerelease',
};
logger.debug('auto-updater feed config', feedUrl);
autoUpdater.setFeedURL(feedUrl);
});
// register events for checkForUpdatesAndNotify
autoUpdater.on('checking-for-update', () => {
logger.info('Checking for update');
});
autoUpdater.on('update-available', info => {
logger.info('Update available', info);
_autoUpdater.on('update-available', info => {
if (allowAutoUpdate) {
autoUpdater?.downloadUpdate().catch(e => {
_autoUpdater?.downloadUpdate().catch(e => {
logger.error('Failed to download update', e);
});
logger.info('Update available, downloading...', info);
@@ -78,14 +79,11 @@ export const registerUpdater = async () => {
allowAutoUpdate,
});
});
autoUpdater.on('update-not-available', info => {
logger.info('Update not available', info);
});
autoUpdater.on('download-progress', e => {
_autoUpdater.on('download-progress', e => {
logger.info(`Download progress: ${e.percent}`);
updaterSubjects.downloadProgress.next(e.percent);
});
autoUpdater.on('update-downloaded', e => {
_autoUpdater.on('update-downloaded', e => {
updaterSubjects.updateReady.next({
version: e.version,
allowAutoUpdate,
@@ -94,12 +92,12 @@ export const registerUpdater = async () => {
// updaterSubjects.clientDownloadProgress.next(100);
logger.info('Update downloaded, ready to install');
});
autoUpdater.on('error', e => {
_autoUpdater.on('error', e => {
logger.error('Error while updating client', e);
});
autoUpdater.forceDevUpdateConfig = isDev;
_autoUpdater.forceDevUpdateConfig = isDev;
app.on('activate', async () => {
await checkForUpdates(false);
await checkForUpdatesAndNotify(false);
});
};

View File

@@ -1,7 +1,7 @@
import { app } from 'electron';
import type { NamespaceHandlers } from '../type';
import { checkForUpdates, quitAndInstall } from './electron-updater';
import { checkForUpdatesAndNotify, quitAndInstall } from './electron-updater';
export const updaterHandlers = {
currentVersion: async () => {
@@ -11,14 +11,7 @@ export const updaterHandlers = {
return quitAndInstall();
},
checkForUpdatesAndNotify: async () => {
const res = await checkForUpdates(true);
if (res) {
const { updateInfo } = res;
return {
updateInfo,
};
}
return null;
return checkForUpdatesAndNotify(true);
},
} satisfies NamespaceHandlers;

View File

@@ -1,38 +1,14 @@
// Please add modules to `external` in `rollupOptions` to avoid wrong bundling.
// NOTE: we will generate preload types from this file
import { AsyncCall, type EventBasedChannel } from 'async-call-rpc';
import type { app, dialog, shell } from 'electron';
import { ipcRenderer } from 'electron';
import { Subject } from 'rxjs';
export interface ExposedMeta {
handlers: [string, string[]][];
events: [string, string[]][];
}
type ExposedMeta = {
handlers: [namespace: string, handlerNames: string[]][];
events: [namespace: string, eventNames: string[]][];
};
// render <-> helper
export interface RendererToHelper {
postEvent: (channel: string, ...args: any[]) => void;
}
export interface HelperToRenderer {
[key: string]: (...args: any[]) => Promise<any>;
}
// helper <-> main
export interface HelperToMain {
getMeta: () => ExposedMeta;
}
export type MainToHelper = Pick<
typeof dialog & typeof shell & typeof app,
| 'showOpenDialog'
| 'showSaveDialog'
| 'openExternal'
| 'showItemInFolder'
| 'getPath'
>;
export function getElectronAPIs() {
export function getAffineAPIs() {
const mainAPIs = getMainAPIs();
const helperAPIs = getHelperAPIs();
@@ -150,13 +126,13 @@ function getHelperAPIs() {
return val ? JSON.parse(val) : null;
})();
const rendererToHelperServer: RendererToHelper = {
const rendererToHelperServer: PeersAPIs.RendererToHelper = {
postEvent: (channel, ...args) => {
events$.next({ channel, args });
},
};
const rpc = AsyncCall<HelperToRenderer>(rendererToHelperServer, {
const rpc = AsyncCall<PeersAPIs.HelperToRenderer>(rendererToHelperServer, {
channel: helperPort$.then(helperPort =>
createMessagePortChannel(helperPort)
),
@@ -181,10 +157,10 @@ function getHelperAPIs() {
};
const setup = (meta: ExposedMeta) => {
const { handlers, events } = meta;
const { handlers: handlersMeta, events: eventsMeta } = meta;
const helperHandlers = Object.fromEntries(
handlers.map(([namespace, functionNames]) => {
handlersMeta.map(([namespace, functionNames]) => {
return [
namespace,
Object.fromEntries(
@@ -197,7 +173,7 @@ function getHelperAPIs() {
);
const helperEvents = Object.fromEntries(
events.map(([namespace, eventNames]) => {
eventsMeta.map(([namespace, eventNames]) => {
return [
namespace,
Object.fromEntries(

View File

@@ -1,15 +1,12 @@
import { contextBridge, ipcRenderer } from 'electron';
(async () => {
const { appInfo, getElectronAPIs } = await import(
'@toeverything/infra/preload/electron'
);
const { apis, events } = getElectronAPIs();
const { appInfo, getAffineAPIs } = await import('./affine-apis');
const { apis, events } = getAffineAPIs();
contextBridge.exposeInMainWorld('appInfo', appInfo);
contextBridge.exposeInMainWorld('apis', apis);
contextBridge.exposeInMainWorld('events', events);
contextBridge.exposeInMainWorld('platform', process.platform);
// Credit to microsoft/vscode
const globals = {

35
apps/electron/src/types.d.ts vendored Normal file
View File

@@ -0,0 +1,35 @@
declare namespace PeersAPIs {
import type { app, dialog, shell } from 'electron';
interface ExposedMeta {
handlers: [string, string[]][];
events: [string, string[]][];
}
// render <-> helper
interface RendererToHelper {
postEvent: (channel: string, ...args: any[]) => void;
}
interface HelperToRenderer {
[key: string]: (...args: any[]) => Promise<any>;
}
// helper <-> main
interface HelperToMain {
getMeta: () => ExposedMeta;
}
type MainToHelper = Pick<
typeof dialog & typeof shell & typeof app,
| 'showOpenDialog'
| 'showSaveDialog'
| 'openExternal'
| 'showItemInFolder'
| 'getPath'
>;
// render <-> main
// these are handled via IPC
// TODO: fix type
}

View File

@@ -140,44 +140,3 @@ test('affine onboarding button', async ({ page }) => {
expect(await onboardingModal.isVisible()).toEqual(false);
});
test('windows only check', async ({ page }) => {
const windowOnlyUI = page.locator('[data-platform-target=win32]');
if (process.platform === 'win32') {
await expect(windowOnlyUI).toBeVisible();
} else {
await expect(windowOnlyUI).not.toBeVisible();
}
});
test('delete workspace', async ({ page }) => {
await page.getByTestId('current-workspace').click();
await page.getByTestId('add-or-new-workspace').click();
await page.getByTestId('new-workspace').click();
await page.getByTestId('create-workspace-default-location-button').click();
await page.getByTestId('create-workspace-input').type('Delete Me');
await page.getByTestId('create-workspace-create-button').click();
await page.getByTestId('create-workspace-continue-button').click();
await page.getByTestId('slider-bar-workspace-setting-button').click();
await page.getByTestId('current-workspace-label').click();
expect(await page.getByTestId('workspace-name-input').inputValue()).toBe(
'Delete Me'
);
const contentElement = await page.getByTestId('setting-modal-content');
const boundingBox = await contentElement.boundingBox();
if (!boundingBox) {
throw new Error('boundingBox is null');
}
await page.mouse.move(
boundingBox.x + boundingBox.width / 2,
boundingBox.y + boundingBox.height / 2
);
await page.mouse.wheel(0, 500);
await page.getByTestId('delete-workspace-button').click();
await page.getByTestId('delete-workspace-input').type('Delete Me');
await page.getByTestId('delete-workspace-confirm-button').click();
await page.waitForTimeout(1000);
expect(await page.getByTestId('workspace-name').textContent()).toBe(
'Demo Workspace'
);
});

View File

@@ -16,8 +16,6 @@ function generateUUID() {
return crypto.randomUUID();
}
type RoutePath = 'setting';
export const test = base.extend<{
page: Page;
electronApp: ElectronApplication;
@@ -26,8 +24,9 @@ export const test = base.extend<{
appData: string;
sessionData: string;
};
router: {
goto: (path: RoutePath) => Promise<void>;
workspace: {
// get current workspace
current: () => Promise<any>; // todo: type
};
}>({
page: async ({ electronApp }, use) => {
@@ -42,6 +41,10 @@ export const test = base.extend<{
});
});
}
const logFilePath = await page.evaluate(async () => {
// @ts-expect-error
return window.apis?.debug.logFilePath();
});
// wat for blocksuite to be loaded
await page.waitForSelector('v-line');
if (enableCoverage) {
@@ -68,6 +71,10 @@ export const test = base.extend<{
);
}
await page.close();
if (logFilePath) {
const logs = await fs.readFile(logFilePath, 'utf-8');
console.log(logs);
}
},
electronApp: async ({}, use) => {
// a random id to avoid conflicts between tests
@@ -117,4 +124,14 @@ export const test = base.extend<{
});
await use(appInfo);
},
workspace: async ({ page }, use) => {
await use({
current: async () => {
return await page.evaluate(async () => {
// @ts-expect-error
return globalThis.currentWorkspace;
});
},
});
},
});

View File

@@ -1,26 +0,0 @@
import { setTimeout } from 'node:timers/promises';
import fs from 'fs-extra';
export async function removeWithRetry(
filePath: string,
maxRetries = 5,
delay = 500
) {
for (let i = 0; i < maxRetries; i++) {
try {
await fs.remove(filePath);
console.log(`File ${filePath} successfully deleted.`);
return true;
} catch (err: any) {
if (err.code === 'EBUSY' || err.code === 'EPERM') {
console.log(`File ${filePath} is busy or locked, retrying...`);
await setTimeout(delay);
} else {
console.error(`Failed to delete file ${filePath}:`, err);
}
}
}
// Add a return statement here to ensure that a value is always returned
return false;
}

View File

@@ -5,7 +5,7 @@ import fs from 'fs-extra';
import { test } from './fixture';
test('check workspace has a DB file', async ({ appInfo, workspace }) => {
test.skip('check workspace has a DB file', async ({ appInfo, workspace }) => {
const w = await workspace.current();
const dbPath = path.join(
appInfo.sessionData,
@@ -19,11 +19,9 @@ test('check workspace has a DB file', async ({ appInfo, workspace }) => {
test.skip('move workspace db file', async ({ page, appInfo, workspace }) => {
const w = await workspace.current();
await page.getByTestId('slider-bar-workspace-setting-button').click();
await expect(page.getByTestId('setting-modal')).toBeVisible();
// goto workspace setting
await page.getByTestId('workspace-list-item').click();
const settingButton = page.getByTestId('slider-bar-workspace-setting-button');
// goto settings
await settingButton.click();
const tmpPath = path.join(appInfo.sessionData, w.id + '-tmp-dir');
@@ -44,26 +42,21 @@ test.skip('move workspace db file', async ({ page, appInfo, workspace }) => {
expect(files.some(f => f.endsWith('.affine'))).toBe(true);
});
test('export then add', async ({ page, appInfo, workspace }) => {
test.skip('export then add', async ({ page, appInfo, workspace }) => {
const w = await workspace.current();
await page.getByTestId('slider-bar-workspace-setting-button').click();
await expect(page.getByTestId('setting-modal')).toBeVisible();
const settingButton = page.getByTestId('slider-bar-workspace-setting-button');
// goto settings
await settingButton.click();
const originalId = w.id;
const newWorkspaceName = 'new-test-name';
// goto workspace setting
await page.getByTestId('workspace-list-item').click();
await page.waitForTimeout(500);
// change workspace name
await page.getByTestId('workspace-name-input').fill(newWorkspaceName);
await page.getByTestId('save-workspace-name').click();
await page.waitForSelector('text="Update workspace name success"');
await page.waitForTimeout(500);
await page.click('[data-tab-key="export"]');
const tmpPath = path.join(appInfo.sessionData, w.id + '-tmp.db');
@@ -80,11 +73,10 @@ test('export then add', async ({ page, appInfo, workspace }) => {
expect(await fs.exists(tmpPath)).toBe(true);
await page.getByTestId('modal-close-button').click();
// add workspace
// we are reusing the same db file so that we don't need to maintain one
// in the codebase
await page.getByTestId('current-workspace').click();
await page.getByTestId('add-or-new-workspace').click();

View File

@@ -14,7 +14,7 @@
"noImplicitOverride": true
},
"include": ["./src"],
"exclude": ["node_modules", "out", "dist", "**/__tests__/**/*"],
"exclude": ["node_modules", "out", "dist"],
"references": [
{
"path": "../../packages/plugin-infra"
@@ -25,16 +25,13 @@
{
"path": "../../packages/infra"
},
{
"path": "../../packages/env"
},
// Tests
{
"path": "./tsconfig.node.json"
},
{
"path": "./e2e/tsconfig.json"
"path": "./tests/tsconfig.json"
},
{ "path": "../../tests/kit" }
],

View File

@@ -7,8 +7,7 @@
"resolveJsonModule": true,
"moduleResolution": "Node",
"allowSyntheticDefaultImports": true,
"noEmit": false,
"outDir": "./lib/scripts"
"noEmit": false
},
"include": ["./scripts", "esbuild.main.config.ts", "esbuild.plugin.config.ts"]
"include": ["./scripts"]
}

View File

@@ -1,12 +0,0 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"composite": true
},
"include": ["**/__tests__/**/*", "./tests"],
"references": [
{
"path": "./tsconfig.json"
}
]
}

View File

@@ -1,2 +1,2 @@
SECRET_KEY="secret"
DATABASE_URL="postgresql://affine@localhost:5432/affine"
NEXTAUTH_URL="http://localhost:8080"

View File

@@ -1,7 +1,7 @@
{
"name": "@affine/server",
"private": true,
"version": "0.7.0-beta.0",
"version": "0.7.0-canary.24",
"description": "Affine Node.js server",
"type": "module",
"bin": {
@@ -15,19 +15,19 @@
"postinstall": "prisma generate"
},
"dependencies": {
"@apollo/server": "^4.7.5",
"@affine/storage": "workspace:*",
"@apollo/server": "^4.7.4",
"@auth/prisma-adapter": "^1.0.0",
"@aws-sdk/client-s3": "^3.363.0",
"@nestjs/apollo": "^12.0.7",
"@nestjs/common": "^10.0.4",
"@nestjs/core": "^10.0.4",
"@nestjs/graphql": "^12.0.7",
"@nestjs/platform-express": "^10.0.4",
"@aws-sdk/client-s3": "^3.359.0",
"@nestjs/apollo": "^12.0.3",
"@nestjs/common": "^10.0.3",
"@nestjs/core": "^10.0.3",
"@nestjs/graphql": "^12.0.3",
"@nestjs/platform-express": "^10.0.3",
"@node-rs/argon2": "^1.5.0",
"@node-rs/crc32": "^1.7.0",
"@node-rs/jsonwebtoken": "^0.2.0",
"@prisma/client": "^4.16.2",
"cookie-parser": "^1.4.6",
"@prisma/client": "^4.16.1",
"dotenv": "^16.3.1",
"express": "^4.18.2",
"graphql": "^16.7.1",
@@ -35,28 +35,23 @@
"graphql-upload": "^16.0.2",
"lodash-es": "^4.17.21",
"next-auth": "^4.22.1",
"nodemailer": "^6.9.3",
"parse-duration": "^1.1.0",
"prisma": "^4.16.2",
"prisma": "^4.16.1",
"reflect-metadata": "^0.1.13",
"rxjs": "^7.8.1",
"semver": "^7.5.4"
"rxjs": "^7.8.1"
},
"devDependencies": {
"@affine/storage": "workspace:*",
"@napi-rs/image": "^1.6.1",
"@nestjs/testing": "^10.0.4",
"@types/cookie-parser": "^1.4.3",
"@nestjs/testing": "^10.0.3",
"@types/express": "^4.17.17",
"@types/lodash-es": "^4.17.7",
"@types/node": "^18.16.19",
"@types/nodemailer": "^6.4.8",
"@types/node": "^18.16.18",
"@types/supertest": "^2.0.12",
"c8": "^8.0.0",
"nodemon": "^2.0.22",
"supertest": "^6.3.3",
"ts-node": "^10.9.1",
"typescript": "^5.1.6"
"typescript": "^5.1.5"
},
"nodemonConfig": {
"exec": "node",

View File

@@ -1,6 +1,5 @@
generator client {
provider = "prisma-client-js"
binaryTargets = ["native", "debian-openssl-3.0.x"]
provider = "prisma-client-js"
}
datasource db {

View File

@@ -1,13 +0,0 @@
import { Controller, Get } from '@nestjs/common';
import pkg from '../package.json' assert { type: 'json' };
@Controller('/')
export class AppController {
@Get()
hello() {
return {
message: `AFFiNE GraphQL server: ${pkg.version}`,
};
}
}

View File

@@ -1,6 +1,5 @@
import { Module } from '@nestjs/common';
import { AppController } from './app.controller';
import { ConfigModule } from './config';
import { GqlModule } from './graphql.module';
import { BusinessModules } from './modules';
@@ -15,6 +14,5 @@ import { StorageModule } from './storage';
StorageModule.forRoot(),
...BusinessModules,
],
controllers: [AppController],
})
export class AppModule {}

View File

@@ -106,7 +106,7 @@ export interface AFFiNEConfig {
/**
* which port the server will listen on
*
* @default 3010
* @default 3000
* @env AFFINE_SERVER_PORT
*/
port: number;
@@ -153,13 +153,23 @@ export interface AFFiNEConfig {
/**
* whether use remote object storage
*/
r2: {
enabled: boolean;
accountId: string;
bucket: string;
accessKeyId: string;
secretAccessKey: string;
};
enable: boolean;
/**
* used to store all uploaded builds and analysis reports
*
* the concrete type definition is not given here because different storage providers introduce
* significant differences in configuration
*
* @example
* {
* provider: 'aws',
* region: 'eu-west-1',
* aws_access_key_id: '',
* aws_secret_access_key: '',
* // other aws storage config...
* }
*/
config: Record<string, string>;
/**
* Only used when `enable` is `false`
*/
@@ -214,7 +224,6 @@ export interface AFFiNEConfig {
Record<
ExternalAccount,
{
enabled: boolean;
clientId: string;
clientSecret: string;
/**
@@ -233,11 +242,5 @@ export interface AFFiNEConfig {
}
>
>;
email: {
server: string;
port: number;
sender: string;
password: string;
};
};
}

Some files were not shown because too many files have changed in this diff Show More