mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-05 17:13:43 +00:00
Compare commits
145 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cd56d8a6e6 | ||
|
|
6ed5ec36bb | ||
|
|
0fa917aabb | ||
|
|
eb79e6bdc9 | ||
|
|
cf52a43773 | ||
|
|
9203980a8c | ||
|
|
d34eb2cbe5 | ||
|
|
7f3f993ce4 | ||
|
|
df17001284 | ||
|
|
e400abf1f4 | ||
|
|
640aa00148 | ||
|
|
5ae8f029f7 | ||
|
|
a26e0b3ec9 | ||
|
|
f492b6711b | ||
|
|
81aae61394 | ||
|
|
e08f58beea | ||
|
|
4560819f76 | ||
|
|
193c197a54 | ||
|
|
449c0a38a7 | ||
|
|
8d141e5a81 | ||
|
|
e04911315f | ||
|
|
75d58679b6 | ||
|
|
2e6386e4cf | ||
|
|
f345a61df0 | ||
|
|
a6420fcd76 | ||
|
|
fec406f7e8 | ||
|
|
769398591b | ||
|
|
e01569fff7 | ||
|
|
6bde2de783 | ||
|
|
3513ced6cb | ||
|
|
8dc9addc40 | ||
|
|
9d9f89ef2e | ||
|
|
6cfe5d4566 | ||
|
|
6032b432f8 | ||
|
|
5823787ded | ||
|
|
b3f272ba70 | ||
|
|
a5df5a7c8a | ||
|
|
90de90403a | ||
|
|
4d4e4fc4e2 | ||
|
|
aa73e532d3 | ||
|
|
31faa93c71 | ||
|
|
def60f4c61 | ||
|
|
d15ec0ff77 | ||
|
|
d2acd0385a | ||
|
|
1effb2f25f | ||
|
|
9189d26332 | ||
|
|
79a8be7799 | ||
|
|
1a643cc70c | ||
|
|
9321be3ff5 | ||
|
|
24dc3f95ff | ||
|
|
4257b5f3a4 | ||
|
|
ea17e86032 | ||
|
|
48cd8999bd | ||
|
|
cdf1d9002e | ||
|
|
79b39f14d2 | ||
|
|
619420cfd1 | ||
|
|
739e914b5f | ||
|
|
5e9739eb3a | ||
|
|
0a89b7f528 | ||
|
|
0a0ee37ac2 | ||
|
|
a143379161 | ||
|
|
8e7dedfe82 | ||
|
|
d25a8547d0 | ||
|
|
4d16229fea | ||
|
|
99371be7e8 | ||
|
|
34ed8dd7a5 | ||
|
|
39b7b671b1 | ||
|
|
207b56d5af | ||
|
|
9e94e7195b | ||
|
|
de951c8779 | ||
|
|
fd37026ca5 | ||
|
|
4fd5812a89 | ||
|
|
d01e987ecc | ||
|
|
d87c218c0b | ||
|
|
a5bf5cc244 | ||
|
|
16bcd6e76b | ||
|
|
2e2ace8472 | ||
|
|
37cff8fe8d | ||
|
|
70ab3b4916 | ||
|
|
f42ba54578 | ||
|
|
a67c8181fc | ||
|
|
613efbded9 | ||
|
|
549419d102 | ||
|
|
21c42f8771 | ||
|
|
9012adda7a | ||
|
|
fb442e9055 | ||
|
|
a231474dd2 | ||
|
|
833b42000b | ||
|
|
7690c48710 | ||
|
|
579828a700 | ||
|
|
746db2ccfc | ||
|
|
eff344a9c1 | ||
|
|
c89ebab596 | ||
|
|
62f4421b7c | ||
|
|
42383dbd29 | ||
|
|
120e7397ba | ||
|
|
24123ad01c | ||
|
|
ad50320391 | ||
|
|
eb21a60dda | ||
|
|
c0e3be2d40 | ||
|
|
09d3b72358 | ||
|
|
246e16c6c0 | ||
|
|
dc279d062b | ||
|
|
47d5f9e1c2 | ||
|
|
a226eb8d5f | ||
|
|
908c4e1a6f | ||
|
|
1d0bcc80a0 | ||
|
|
50010bd824 | ||
|
|
c0ede1326d | ||
|
|
89197bacef | ||
|
|
f97d323ab5 | ||
|
|
2acb219dcc | ||
|
|
992ed89a89 | ||
|
|
d272d7922d | ||
|
|
c1cd1713b9 | ||
|
|
b20e91bee0 | ||
|
|
9a4e5ec8c3 | ||
|
|
2019838ae7 | ||
|
|
30ff25f400 | ||
|
|
e766208c18 | ||
|
|
8742f28148 | ||
|
|
cd291bb60e | ||
|
|
62c0efcfd1 | ||
|
|
87248b3337 | ||
|
|
00c940f7df | ||
|
|
931b459fbd | ||
|
|
51e71f4a0a | ||
|
|
9b631f2328 | ||
|
|
01f481a9b6 | ||
|
|
0177ab5c87 | ||
|
|
4db35d341c | ||
|
|
3c4a803c97 | ||
|
|
05154dc7ca | ||
|
|
c90b477f60 | ||
|
|
6f18ddbe85 | ||
|
|
dde779a71d | ||
|
|
bd9f66fbc7 | ||
|
|
92f1f40bfa | ||
|
|
48dc1049b3 | ||
|
|
9add530370 | ||
|
|
b77460d871 | ||
|
|
42db41776b | ||
|
|
075439c74f | ||
|
|
fc6c553ece | ||
|
|
59cb3d5df1 |
@@ -8,11 +8,5 @@ corepack prepare yarn@stable --activate
|
||||
# install dependencies
|
||||
yarn install
|
||||
|
||||
# Build Server Dependencies
|
||||
yarn workspace @affine/server-native build
|
||||
|
||||
# Create database
|
||||
yarn workspace @affine/server prisma db push
|
||||
|
||||
# Create user username: affine, password: affine
|
||||
echo "INSERT INTO \"users\"(\"id\",\"name\",\"email\",\"email_verified\",\"created_at\",\"password\") VALUES('99f3ad04-7c9b-441e-a6db-79f73aa64db9','affine','affine@affine.pro','2024-02-26 15:54:16.974','2024-02-26 15:54:16.974+00','\$argon2id\$v=19\$m=19456,t=2,p=1\$esDS3QCHRH0Kmeh87YPm5Q\$9S+jf+xzw2Hicj6nkWltvaaaXX3dQIxAFwCfFa9o38A');" | yarn workspace @affine/server prisma db execute --stdin
|
||||
yarn workspace @affine/server prisma db push
|
||||
@@ -21,6 +21,5 @@
|
||||
}
|
||||
},
|
||||
"updateContentCommand": "bash ./.devcontainer/build.sh",
|
||||
"postCreateCommand": "bash ./.devcontainer/setup-user.sh",
|
||||
"postStartCommand": ["yarn dev", "yarn workspace @affine/server dev"]
|
||||
"postCreateCommand": "bash ./.devcontainer/setup-user.sh"
|
||||
}
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
set -e
|
||||
|
||||
if [ -v GRAPHITE_TOKEN ];then
|
||||
gt auth --token $GRAPHITE_TOKEN
|
||||
fi
|
||||
|
||||
git fetch origin canary:canary --depth=1
|
||||
git fetch
|
||||
git branch canary -t origin/canary
|
||||
gt init --trunk canary
|
||||
|
||||
@@ -12,4 +12,4 @@ static
|
||||
web-static
|
||||
public
|
||||
packages/frontend/i18n/src/i18n-generated.ts
|
||||
packages/frontend/templates/*.gen.ts
|
||||
packages/frontend/templates/edgeless-templates.gen.ts
|
||||
|
||||
65
.eslintrc.js
65
.eslintrc.js
@@ -1,4 +1,4 @@
|
||||
const { join } = require('node:path');
|
||||
const { resolve } = require('node:path');
|
||||
|
||||
const createPattern = packageName => [
|
||||
{
|
||||
@@ -31,6 +31,22 @@ const createPattern = packageName => [
|
||||
message: 'Use `useNavigateHelper` instead',
|
||||
importNames: ['useNavigate'],
|
||||
},
|
||||
{
|
||||
group: ['next-auth/react'],
|
||||
message: "Import hooks from 'use-current-user.tsx'",
|
||||
// useSession is type unsafe
|
||||
importNames: ['useSession'],
|
||||
},
|
||||
{
|
||||
group: ['next-auth/react'],
|
||||
message: "Import hooks from 'cloud-utils.ts'",
|
||||
importNames: ['signIn', 'signOut'],
|
||||
},
|
||||
{
|
||||
group: ['yjs'],
|
||||
message: 'Do not use this API because it has a bug',
|
||||
importNames: ['mergeUpdates'],
|
||||
},
|
||||
{
|
||||
group: ['@affine/env/constant'],
|
||||
message:
|
||||
@@ -48,11 +64,14 @@ const allPackages = [
|
||||
'packages/frontend/i18n',
|
||||
'packages/frontend/native',
|
||||
'packages/frontend/templates',
|
||||
'packages/frontend/workspace',
|
||||
'packages/common/debug',
|
||||
'packages/common/env',
|
||||
'packages/common/infra',
|
||||
'packages/common/theme',
|
||||
'packages/common/y-indexeddb',
|
||||
'tools/cli',
|
||||
'tests/storybook',
|
||||
];
|
||||
|
||||
/**
|
||||
@@ -85,17 +104,16 @@ const config = {
|
||||
},
|
||||
ecmaVersion: 'latest',
|
||||
sourceType: 'module',
|
||||
project: join(__dirname, 'tsconfig.eslint.json'),
|
||||
project: resolve(__dirname, './tsconfig.eslint.json'),
|
||||
},
|
||||
plugins: [
|
||||
'react',
|
||||
'@typescript-eslint',
|
||||
'simple-import-sort',
|
||||
'sonarjs',
|
||||
'import-x',
|
||||
'i',
|
||||
'unused-imports',
|
||||
'unicorn',
|
||||
'rxjs',
|
||||
],
|
||||
rules: {
|
||||
'array-callback-return': 'error',
|
||||
@@ -128,7 +146,6 @@ const config = {
|
||||
'unused-imports/no-unused-imports': 'error',
|
||||
'simple-import-sort/imports': 'error',
|
||||
'simple-import-sort/exports': 'error',
|
||||
'import-x/no-duplicates': 'error',
|
||||
'@typescript-eslint/ban-ts-comment': [
|
||||
'error',
|
||||
{
|
||||
@@ -162,6 +179,22 @@ const config = {
|
||||
message: 'Use `useNavigateHelper` instead',
|
||||
importNames: ['useNavigate'],
|
||||
},
|
||||
{
|
||||
group: ['next-auth/react'],
|
||||
message: "Import hooks from 'use-current-user.tsx'",
|
||||
// useSession is type unsafe
|
||||
importNames: ['useSession'],
|
||||
},
|
||||
{
|
||||
group: ['next-auth/react'],
|
||||
message: "Import hooks from 'cloud-utils.ts'",
|
||||
importNames: ['signIn', 'signOut'],
|
||||
},
|
||||
{
|
||||
group: ['yjs'],
|
||||
message: 'Do not use this API because it has a bug',
|
||||
importNames: ['mergeUpdates'],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
@@ -201,21 +234,6 @@ const config = {
|
||||
'sonarjs/no-collection-size-mischeck': 'error',
|
||||
'sonarjs/no-useless-catch': 'error',
|
||||
'sonarjs/no-identical-functions': 'error',
|
||||
'rxjs/finnish': [
|
||||
'error',
|
||||
{
|
||||
functions: false,
|
||||
methods: false,
|
||||
strict: true,
|
||||
types: {
|
||||
'^LiveData$': true,
|
||||
// some yjs classes are Observables, but they don't need to be in Finnish notation
|
||||
'^Doc$': false, // yjs Doc
|
||||
'^Awareness$': false, // yjs Awareness
|
||||
'^UndoManager$': false, // yjs UndoManager
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
overrides: [
|
||||
{
|
||||
@@ -231,7 +249,10 @@ const config = {
|
||||
},
|
||||
},
|
||||
...allPackages.map(pkg => ({
|
||||
files: [`${pkg}/src/**/*.ts`, `${pkg}/src/**/*.tsx`, `${pkg}/**/*.mjs`],
|
||||
files: [`${pkg}/src/**/*.ts`, `${pkg}/src/**/*.tsx`],
|
||||
parserOptions: {
|
||||
project: resolve(__dirname, './tsconfig.eslint.json'),
|
||||
},
|
||||
rules: {
|
||||
'@typescript-eslint/no-restricted-imports': [
|
||||
'error',
|
||||
@@ -248,7 +269,7 @@ const config = {
|
||||
],
|
||||
'@typescript-eslint/no-misused-promises': ['error'],
|
||||
'@typescript-eslint/prefer-readonly': 'error',
|
||||
'import-x/no-extraneous-dependencies': ['error'],
|
||||
'i/no-extraneous-dependencies': ['error'],
|
||||
'react-hooks/exhaustive-deps': [
|
||||
'warn',
|
||||
{
|
||||
|
||||
18
.github/ISSUE_TEMPLATE/BUG-REPORT.yml
vendored
18
.github/ISSUE_TEMPLATE/BUG-REPORT.yml
vendored
@@ -7,8 +7,6 @@ body:
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report!
|
||||
Check out this [link](https://github.com/toeverything/AFFiNE/blob/canary/docs/issue-triaging.md)
|
||||
to learn how we manage issues and when your issue will be processed.
|
||||
- type: textarea
|
||||
id: what-happened
|
||||
attributes:
|
||||
@@ -43,14 +41,6 @@ body:
|
||||
- Firefox
|
||||
- Safari
|
||||
- Other
|
||||
- type: checkboxes
|
||||
id: selfhost
|
||||
attributes:
|
||||
label: Are you self-hosting?
|
||||
description: >
|
||||
If you are self-hosting, please check the box and provide information about your setup.
|
||||
options:
|
||||
- label: 'Yes'
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
@@ -63,3 +53,11 @@ body:
|
||||
description: |
|
||||
Links? References? Anything that will give us more context about the issue you are encountering!
|
||||
Tip: You can attach images here
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Are you willing to submit a PR?
|
||||
description: >
|
||||
(Optional) We encourage you to submit a [Pull Request](https://github.com/toeverything/affine/pulls) (PR) to help improve AFFiNE for everyone, especially if you have a good understanding of how to implement a fix or feature.
|
||||
See the AFFiNE [Contributing Guide](https://github.com/toeverything/affine/blob/canary/CONTRIBUTING.md) to get started.
|
||||
options:
|
||||
- label: Yes I'd like to help by submitting a PR!
|
||||
|
||||
2
.github/actions/build-rust/action.yml
vendored
2
.github/actions/build-rust/action.yml
vendored
@@ -49,7 +49,7 @@ runs:
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
yarn workspace ${{ inputs.package }} nx build ${{ inputs.package }} -- --target ${{ inputs.target }} --use-napi-cross
|
||||
yarn workspace ${{ inputs.package }} nx build ${{ inputs.package }} --target ${{ inputs.target }} --use-napi-cross
|
||||
env:
|
||||
NX_CLOUD_ACCESS_TOKEN: ${{ inputs.nx_token }}
|
||||
DEBUG: 'napi:*'
|
||||
|
||||
2
.github/actions/deploy/action.yml
vendored
2
.github/actions/deploy/action.yml
vendored
@@ -24,7 +24,7 @@ runs:
|
||||
shell: bash
|
||||
run: |
|
||||
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
|
||||
- uses: azure/setup-helm@v4
|
||||
- uses: azure/setup-helm@v3
|
||||
- id: auth
|
||||
uses: google-github-actions/auth@v2
|
||||
with:
|
||||
|
||||
13
.github/actions/deploy/deploy.mjs
vendored
13
.github/actions/deploy/deploy.mjs
vendored
@@ -13,10 +13,8 @@ const {
|
||||
R2_ACCOUNT_ID,
|
||||
R2_ACCESS_KEY_ID,
|
||||
R2_SECRET_ACCESS_KEY,
|
||||
ENABLE_CAPTCHA,
|
||||
CAPTCHA_TURNSTILE_SECRET,
|
||||
COPILOT_OPENAI_API_KEY,
|
||||
COPILOT_FAL_API_KEY,
|
||||
COPILOT_UNSPLASH_API_KEY,
|
||||
MAILER_SENDER,
|
||||
MAILER_USER,
|
||||
MAILER_PASSWORD,
|
||||
@@ -99,12 +97,8 @@ const createHelmCommand = ({ isDryRun }) => {
|
||||
`--set graphql.replicaCount=${graphqlReplicaCount}`,
|
||||
`--set-string graphql.image.tag="${imageTag}"`,
|
||||
`--set graphql.app.host=${host}`,
|
||||
`--set graphql.app.captcha.enabled=true`,
|
||||
`--set graphql.app.captcha.enabled=${ENABLE_CAPTCHA}`,
|
||||
`--set-string graphql.app.captcha.turnstile.secret="${CAPTCHA_TURNSTILE_SECRET}"`,
|
||||
`--set graphql.app.copilot.enabled=true`,
|
||||
`--set-string graphql.app.copilot.openai.key="${COPILOT_OPENAI_API_KEY}"`,
|
||||
`--set-string graphql.app.copilot.fal.key="${COPILOT_FAL_API_KEY}"`,
|
||||
`--set-string graphql.app.copilot.unsplash.key="${COPILOT_UNSPLASH_API_KEY}"`,
|
||||
`--set graphql.app.objectStorage.r2.enabled=true`,
|
||||
`--set-string graphql.app.objectStorage.r2.accountId="${R2_ACCOUNT_ID}"`,
|
||||
`--set-string graphql.app.objectStorage.r2.accessKeyId="${R2_ACCESS_KEY_ID}"`,
|
||||
@@ -117,9 +111,8 @@ const createHelmCommand = ({ isDryRun }) => {
|
||||
`--set-string graphql.app.oauth.google.clientSecret="${AFFINE_GOOGLE_CLIENT_SECRET}"`,
|
||||
`--set-string graphql.app.payment.stripe.apiKey="${STRIPE_API_KEY}"`,
|
||||
`--set-string graphql.app.payment.stripe.webhookKey="${STRIPE_WEBHOOK_KEY}"`,
|
||||
`--set graphql.app.experimental.enableJwstCodec=${namespace === 'dev'}`,
|
||||
`--set graphql.app.experimental.enableJwstCodec=true`,
|
||||
`--set graphql.app.features.earlyAccessPreview=false`,
|
||||
`--set graphql.app.features.syncClientVersionCheck=true`,
|
||||
`--set sync.replicaCount=${syncReplicaCount}`,
|
||||
`--set-string sync.image.tag="${imageTag}"`,
|
||||
...serviceAnnotations,
|
||||
|
||||
@@ -11,7 +11,7 @@ runs:
|
||||
- name: Download tar.gz
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: web
|
||||
name: core
|
||||
path: .
|
||||
|
||||
- name: Extract core artifacts
|
||||
4
.github/deployment/front/Dockerfile
vendored
4
.github/deployment/front/Dockerfile
vendored
@@ -1,6 +1,6 @@
|
||||
FROM openresty/openresty:1.25.3.1-0-buster
|
||||
FROM openresty/openresty:1.21.4.3-0-buster
|
||||
WORKDIR /app
|
||||
COPY ./packages/frontend/web/dist ./dist
|
||||
COPY ./packages/frontend/core/dist ./dist
|
||||
COPY ./.github/deployment/front/nginx.conf /usr/local/openresty/nginx/conf/nginx.conf
|
||||
COPY ./.github/deployment/front/affine.nginx.conf /etc/nginx/conf.d/affine.nginx.conf
|
||||
|
||||
|
||||
4
.github/deployment/node/Dockerfile
vendored
4
.github/deployment/node/Dockerfile
vendored
@@ -1,7 +1,7 @@
|
||||
FROM node:20-bookworm-slim
|
||||
FROM node:18-bookworm-slim
|
||||
|
||||
COPY ./packages/backend/server /app
|
||||
COPY ./packages/frontend/web/dist /app/static
|
||||
COPY ./packages/frontend/core/dist /app/static
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update && \
|
||||
|
||||
3
.github/deployment/self-host/compose.yaml
vendored
3
.github/deployment/self-host/compose.yaml
vendored
@@ -30,9 +30,6 @@ services:
|
||||
- NODE_ENV=production
|
||||
- AFFINE_ADMIN_EMAIL=${AFFINE_ADMIN_EMAIL}
|
||||
- AFFINE_ADMIN_PASSWORD=${AFFINE_ADMIN_PASSWORD}
|
||||
# Telemetry allows us to collect data on how you use the affine. This data will helps us improve the app and provide better features.
|
||||
# Uncomment next line if you wish to quit telemetry.
|
||||
# - TELEMETRY_ENABLE=false
|
||||
redis:
|
||||
image: redis
|
||||
container_name: affine_redis
|
||||
|
||||
2
.github/helm/affine/Chart.yaml
vendored
2
.github/helm/affine/Chart.yaml
vendored
@@ -3,4 +3,4 @@ name: affine
|
||||
description: AFFiNE cloud chart
|
||||
type: application
|
||||
version: 0.0.0
|
||||
appVersion: "0.14.0"
|
||||
appVersion: "0.12.0"
|
||||
|
||||
@@ -3,7 +3,7 @@ name: graphql
|
||||
description: AFFiNE GraphQL server
|
||||
type: application
|
||||
version: 0.0.0
|
||||
appVersion: "0.14.0"
|
||||
appVersion: "0.12.0"
|
||||
dependencies:
|
||||
- name: gcloud-sql-proxy
|
||||
version: 0.0.0
|
||||
|
||||
@@ -61,3 +61,18 @@ Create the name of the service account to use
|
||||
{{- default "default" .Values.serviceAccount.name }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{- define "jwt.key" -}}
|
||||
{{- $secret := lookup "v1" "Secret" .Release.Namespace .Values.app.jwt.secretName -}}
|
||||
{{- if and $secret $secret.data.private -}}
|
||||
{{/*
|
||||
Reusing existing secret data
|
||||
*/}}
|
||||
key: {{ $secret.data.private }}
|
||||
{{- else -}}
|
||||
{{/*
|
||||
Generate new data
|
||||
*/}}
|
||||
key: {{ genPrivateKey "ecdsa" | b64enc }}
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
{{- if .Values.app.copilot.enabled -}}
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: "{{ .Values.app.copilot.secretName }}"
|
||||
type: Opaque
|
||||
data:
|
||||
openaiSecret: {{ .Values.app.copilot.openai.key | b64enc }}
|
||||
falSecret: {{ .Values.app.copilot.fal.key | b64enc }}
|
||||
unsplashSecret: {{ .Values.app.copilot.unsplash.key | b64enc }}
|
||||
{{- end }}
|
||||
@@ -28,10 +28,10 @@ spec:
|
||||
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
env:
|
||||
- name: AFFINE_PRIVATE_KEY
|
||||
- name: AUTH_PRIVATE_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: "{{ .Values.global.secret.secretName }}"
|
||||
name: "{{ .Values.app.jwt.secretName }}"
|
||||
key: key
|
||||
- name: NODE_ENV
|
||||
value: "{{ .Values.env }}"
|
||||
@@ -45,6 +45,8 @@ spec:
|
||||
value: "graphql"
|
||||
- name: AFFINE_ENV
|
||||
value: "{{ .Release.Namespace }}"
|
||||
- name: NEXTAUTH_URL
|
||||
value: "{{ .Values.global.ingress.host }}"
|
||||
- name: DATABASE_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
@@ -148,23 +150,6 @@ spec:
|
||||
name: "{{ .Values.app.captcha.secretName }}"
|
||||
key: turnstileSecret
|
||||
{{ end }}
|
||||
{{ if .Values.app.copilot.enabled }}
|
||||
- name: COPILOT_OPENAI_API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: "{{ .Values.app.copilot.secretName }}"
|
||||
key: openaiSecret
|
||||
- name: COPILOT_FAL_API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: "{{ .Values.app.copilot.secretName }}"
|
||||
key: falSecret
|
||||
- name: COPILOT_UNSPLASH_API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: "{{ .Values.app.copilot.secretName }}"
|
||||
key: unsplashSecret
|
||||
{{ end }}
|
||||
{{ if .Values.app.oauth.google.enabled }}
|
||||
- name: OAUTH_GOOGLE_ENABLED
|
||||
value: "true"
|
||||
|
||||
7
.github/helm/affine/charts/graphql/templates/jwt-secret.yaml
vendored
Normal file
7
.github/helm/affine/charts/graphql/templates/jwt-secret.yaml
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: "{{ .Values.app.jwt.secretName }}"
|
||||
type: Opaque
|
||||
data:
|
||||
{{- ( include "jwt.key" . ) | indent 2 -}}
|
||||
@@ -1,18 +0,0 @@
|
||||
{{- $privateKey := default (genPrivateKey "ecdsa") .Values.global.secret.privateKey | b64enc | quote }}
|
||||
|
||||
{{- if not .Values.global.secret.privateKey }}
|
||||
{{- $existingKey := (lookup "v1" "Secret" .Release.Namespace .Values.global.secret.secretName) }}
|
||||
{{- if $existingKey }}
|
||||
{{- $privateKey = index $existingKey.data "key" }}
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: {{ .Values.global.secret.secretName }}
|
||||
annotations:
|
||||
"helm.sh/resource-policy": "keep"
|
||||
type: Opaque
|
||||
data:
|
||||
key: {{ $privateKey }}
|
||||
10
.github/helm/affine/charts/graphql/values.yaml
vendored
10
.github/helm/affine/charts/graphql/values.yaml
vendored
@@ -19,16 +19,15 @@ app:
|
||||
https: true
|
||||
doc:
|
||||
mergeInterval: "3000"
|
||||
jwt:
|
||||
secretName: jwt-private-key
|
||||
# base64 encoded ecdsa private key
|
||||
privateKey: ''
|
||||
captcha:
|
||||
enable: false
|
||||
secretName: captcha
|
||||
turnstile:
|
||||
secret: ''
|
||||
copilot:
|
||||
enable: false
|
||||
secretName: copilot
|
||||
openai:
|
||||
key: ''
|
||||
objectStorage:
|
||||
r2:
|
||||
enabled: false
|
||||
@@ -61,7 +60,6 @@ app:
|
||||
webhookKey: ''
|
||||
features:
|
||||
earlyAccessPreview: false
|
||||
syncClientVersionCheck: false
|
||||
|
||||
serviceAccount:
|
||||
create: true
|
||||
|
||||
2
.github/helm/affine/charts/sync/Chart.yaml
vendored
2
.github/helm/affine/charts/sync/Chart.yaml
vendored
@@ -3,7 +3,7 @@ name: sync
|
||||
description: AFFiNE Sync Server
|
||||
type: application
|
||||
version: 0.0.0
|
||||
appVersion: "0.14.0"
|
||||
appVersion: "0.12.0"
|
||||
dependencies:
|
||||
- name: gcloud-sql-proxy
|
||||
version: 0.0.0
|
||||
|
||||
@@ -32,11 +32,6 @@ spec:
|
||||
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
env:
|
||||
- name: AFFINE_PRIVATE_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: "{{ .Values.global.secret.secretName }}"
|
||||
key: key
|
||||
- name: NODE_ENV
|
||||
value: "{{ .Values.env }}"
|
||||
- name: NO_COLOR
|
||||
@@ -45,6 +40,8 @@ spec:
|
||||
value: "affine"
|
||||
- name: SERVER_FLAVOR
|
||||
value: "sync"
|
||||
- name: NEXTAUTH_URL
|
||||
value: "{{ .Values.global.ingress.host }}"
|
||||
- name: AFFINE_ENV
|
||||
value: "{{ .Release.Namespace }}"
|
||||
- name: DATABASE_PASSWORD
|
||||
|
||||
1
.github/helm/affine/charts/sync/values.yaml
vendored
1
.github/helm/affine/charts/sync/values.yaml
vendored
@@ -12,6 +12,7 @@ env: 'production'
|
||||
app:
|
||||
# AFFINE_SERVER_HOST
|
||||
host: '0.0.0.0'
|
||||
|
||||
serviceAccount:
|
||||
create: true
|
||||
annotations: {}
|
||||
|
||||
7
.github/helm/affine/templates/ingress.yaml
vendored
7
.github/helm/affine/templates/ingress.yaml
vendored
@@ -60,13 +60,6 @@ spec:
|
||||
name: affine-graphql
|
||||
port:
|
||||
number: {{ .Values.graphql.service.port }}
|
||||
- path: /oauth
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: affine-graphql
|
||||
port:
|
||||
number: {{ .Values.graphql.service.port }}
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
|
||||
5
.github/helm/affine/values.yaml
vendored
5
.github/helm/affine/values.yaml
vendored
@@ -4,9 +4,6 @@ global:
|
||||
className: ''
|
||||
host: affine.pro
|
||||
tls: []
|
||||
secret:
|
||||
secretName: 'server-private-key'
|
||||
privateKey: ''
|
||||
database:
|
||||
user: 'postgres'
|
||||
url: 'pg-postgresql'
|
||||
@@ -35,8 +32,6 @@ graphql:
|
||||
service:
|
||||
type: ClusterIP
|
||||
port: 3000
|
||||
annotations:
|
||||
cloud.google.com/backend-config: '{"default": "affine-backendconfig"}'
|
||||
|
||||
sync:
|
||||
service:
|
||||
|
||||
19
.github/labeler.yml
vendored
19
.github/labeler.yml
vendored
@@ -29,6 +29,16 @@ mod:plugin-cli:
|
||||
- any-glob-to-any-file:
|
||||
- 'tools/plugin-cli/**/*'
|
||||
|
||||
mod:workspace:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- 'packages/common/workspace/**/*'
|
||||
|
||||
mod:workspace-impl:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- 'packages/frontend/workspace-impl/**/*'
|
||||
|
||||
mod:i18n:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
@@ -44,10 +54,10 @@ mod:component:
|
||||
- any-glob-to-any-file:
|
||||
- 'packages/frontend/component/**/*'
|
||||
|
||||
mod:server-native:
|
||||
mod:storage:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- 'packages/backend/native/**/*'
|
||||
- 'packages/backend/storage/**/*'
|
||||
|
||||
mod:native:
|
||||
- changed-files:
|
||||
@@ -69,6 +79,11 @@ rust:
|
||||
- '**/rust-toolchain.toml'
|
||||
- '**/rustfmt.toml'
|
||||
|
||||
package:y-indexeddb:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- 'packages/common/y-indexeddb/**/*'
|
||||
|
||||
app:core:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
|
||||
26
.github/renovate.json
vendored
26
.github/renovate.json
vendored
@@ -46,11 +46,6 @@
|
||||
"rangeStrategy": "replace",
|
||||
"groupName": "electron-forge"
|
||||
},
|
||||
{
|
||||
"matchPackageNames": ["oxlint"],
|
||||
"rangeStrategy": "replace",
|
||||
"groupName": "oxlint"
|
||||
},
|
||||
{
|
||||
"groupName": "blocksuite-canary",
|
||||
"matchPackagePatterns": ["^@blocksuite"],
|
||||
@@ -62,18 +57,13 @@
|
||||
"groupName": "all non-major dependencies",
|
||||
"groupSlug": "all-minor-patch",
|
||||
"matchPackagePatterns": ["*"],
|
||||
"excludePackagePatterns": ["^@blocksuite/", "oxlint"],
|
||||
"excludePackagePatterns": ["^@blocksuite/"],
|
||||
"matchUpdateTypes": ["minor", "patch"]
|
||||
},
|
||||
{
|
||||
"matchPackagePatterns": ["*"],
|
||||
"rangeStrategy": "replace",
|
||||
"excludePackagePatterns": ["^@blocksuite/"]
|
||||
},
|
||||
{
|
||||
"groupName": "rust toolchain",
|
||||
"matchManagers": ["custom.regex"],
|
||||
"matchPackageNames": ["rustc"]
|
||||
}
|
||||
],
|
||||
"commitMessagePrefix": "chore: ",
|
||||
@@ -84,17 +74,5 @@
|
||||
"lockFileMaintenance": {
|
||||
"enabled": true,
|
||||
"extends": ["schedule:weekly"]
|
||||
},
|
||||
"customManagers": [
|
||||
{
|
||||
"customType": "regex",
|
||||
"fileMatch": ["^rust-toolchain\\.toml?$"],
|
||||
"matchStrings": [
|
||||
"channel\\s*=\\s*\"(?<currentValue>\\d+\\.\\d+(\\.\\d+)?)\""
|
||||
],
|
||||
"depNameTemplate": "rustc",
|
||||
"packageNameTemplate": "rust-lang/rust",
|
||||
"datasourceTemplate": "github-releases"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
25
.github/workflows/build-selfhost-image.yml
vendored
25
.github/workflows/build-selfhost-image.yml
vendored
@@ -1,25 +0,0 @@
|
||||
name: Build Selfhost Image
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
flavor:
|
||||
description: 'Select distribution to build'
|
||||
type: choice
|
||||
default: canary
|
||||
options:
|
||||
- canary
|
||||
- beta
|
||||
- stable
|
||||
|
||||
permissions:
|
||||
contents: 'write'
|
||||
id-token: 'write'
|
||||
packages: 'write'
|
||||
|
||||
jobs:
|
||||
build-image:
|
||||
name: Build Image
|
||||
uses: ./.github/workflows/build-server-image.yml
|
||||
with:
|
||||
flavor: ${{ github.event.inputs.flavor }}
|
||||
192
.github/workflows/build-server-image.yml
vendored
192
.github/workflows/build-server-image.yml
vendored
@@ -1,192 +0,0 @@
|
||||
name: Build Images
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
flavor:
|
||||
type: string
|
||||
required: true
|
||||
|
||||
env:
|
||||
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
|
||||
|
||||
permissions:
|
||||
contents: 'write'
|
||||
id-token: 'write'
|
||||
packages: 'write'
|
||||
|
||||
jobs:
|
||||
build-server:
|
||||
name: Build Server
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Version
|
||||
id: version
|
||||
uses: ./.github/actions/setup-version
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
with:
|
||||
electron-install: false
|
||||
extra-flags: workspaces focus @affine/server
|
||||
- name: Build Server
|
||||
run: yarn workspace @affine/server build
|
||||
- name: Upload server dist
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: server-dist
|
||||
path: ./packages/backend/server/dist
|
||||
if-no-files-found: error
|
||||
|
||||
build-web-selfhost:
|
||||
name: Build @affine/web selfhost
|
||||
runs-on: ubuntu-latest
|
||||
environment: ${{ github.event.inputs.flavor }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Version
|
||||
id: version
|
||||
uses: ./.github/actions/setup-version
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- name: Build Core
|
||||
run: yarn nx build @affine/web --skip-nx-cache
|
||||
env:
|
||||
BUILD_TYPE: ${{ github.event.inputs.flavor }}
|
||||
SHOULD_REPORT_TRACE: false
|
||||
PUBLIC_PATH: '/'
|
||||
SELF_HOSTED: true
|
||||
MIXPANEL_TOKEN: ${{ secrets.MIXPANEL_TOKEN }}
|
||||
- name: Download selfhost fonts
|
||||
run: node ./scripts/download-blocksuite-fonts.mjs
|
||||
- name: Upload web artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: selfhost-web
|
||||
path: ./packages/frontend/web/dist
|
||||
if-no-files-found: error
|
||||
|
||||
build-server-native:
|
||||
name: Build Server native - ${{ matrix.targets.name }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
targets:
|
||||
- name: x86_64-unknown-linux-gnu
|
||||
file: server-native.node
|
||||
- name: aarch64-unknown-linux-gnu
|
||||
file: server-native.arm64.node
|
||||
- name: armv7-unknown-linux-gnueabihf
|
||||
file: server-native.armv7.node
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Version
|
||||
id: version
|
||||
uses: ./.github/actions/setup-version
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
with:
|
||||
electron-install: false
|
||||
extra-flags: workspaces focus @affine/server-native
|
||||
- name: Build Rust
|
||||
uses: ./.github/actions/build-rust
|
||||
with:
|
||||
target: ${{ matrix.targets.name }}
|
||||
package: '@affine/server-native'
|
||||
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
|
||||
- name: Upload ${{ matrix.targets.file }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.targets.file }}
|
||||
path: ./packages/backend/native/server-native.node
|
||||
if-no-files-found: error
|
||||
|
||||
build-docker:
|
||||
name: Build Docker
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- build-server
|
||||
- build-web-selfhost
|
||||
- build-server-native
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Download server dist
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: server-dist
|
||||
path: ./packages/backend/server/dist
|
||||
- name: Download server-native.node
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: server-native.node
|
||||
path: ./packages/backend/server
|
||||
- name: Download server-native.node arm64
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: server-native.arm64.node
|
||||
path: ./packages/backend/native
|
||||
- name: Download server-native.node arm64
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: server-native.armv7.node
|
||||
path: .
|
||||
- name: move server-native files
|
||||
run: |
|
||||
mv ./packages/backend/native/server-native.node ./packages/backend/server/server-native.arm64.node
|
||||
mv server-native.node ./packages/backend/server/server-native.armv7.node
|
||||
- name: Setup env
|
||||
run: |
|
||||
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
|
||||
if [ -z "${{ inputs.flavor }}" ]
|
||||
then
|
||||
echo "RELEASE_FLAVOR=canary" >> "$GITHUB_ENV"
|
||||
else
|
||||
echo "RELEASE_FLAVOR=${{ inputs.flavor }}" >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
logout: false
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
# setup node without cache configuration
|
||||
# Prisma cache is not compatible with docker build cache
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
registry-url: https://npm.pkg.github.com
|
||||
scope: '@toeverything'
|
||||
|
||||
- name: Download selfhost web artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: selfhost-web
|
||||
path: ./packages/frontend/web/dist
|
||||
|
||||
- name: Install Node.js dependencies
|
||||
run: |
|
||||
yarn config set --json supportedArchitectures.cpu '["x64", "arm64", "arm"]'
|
||||
yarn config set --json supportedArchitectures.libc '["glibc"]'
|
||||
yarn workspaces focus @affine/server --production
|
||||
|
||||
- name: Generate Prisma client
|
||||
run: yarn workspace @affine/server prisma generate
|
||||
|
||||
- name: Build graphql Dockerfile
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
provenance: true
|
||||
file: .github/deployment/node/Dockerfile
|
||||
tags: ghcr.io/toeverything/affine-graphql:${{env.RELEASE_FLAVOR}}-${{ env.GIT_SHORT_HASH }},ghcr.io/toeverything/affine-graphql:${{env.RELEASE_FLAVOR}}
|
||||
119
.github/workflows/build-test.yml
vendored
119
.github/workflows/build-test.yml
vendored
@@ -4,8 +4,6 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- canary
|
||||
- beta
|
||||
- stable
|
||||
- v[0-9]+.[0-9]+.x-staging
|
||||
- v[0-9]+.[0-9]+.x
|
||||
paths-ignore:
|
||||
@@ -118,7 +116,6 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DISTRIBUTION: browser
|
||||
IN_CI_TEST: true
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -193,7 +190,7 @@ jobs:
|
||||
run: yarn nx test:coverage @affine/monorepo
|
||||
|
||||
- name: Upload unit test coverage results
|
||||
uses: codecov/codecov-action@v4
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
files: ./.coverage/store/lcov.info
|
||||
@@ -212,8 +209,8 @@ jobs:
|
||||
spec:
|
||||
- { os: ubuntu-latest, target: x86_64-unknown-linux-gnu }
|
||||
- { os: windows-latest, target: x86_64-pc-windows-msvc }
|
||||
- { os: macos-14, target: x86_64-apple-darwin }
|
||||
- { os: macos-14, target: aarch64-apple-darwin }
|
||||
- { os: macos-latest, target: x86_64-apple-darwin }
|
||||
- { os: macos-latest, target: aarch64-apple-darwin }
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -241,8 +238,8 @@ jobs:
|
||||
path: ./packages/frontend/native/${{ steps.filename.outputs.filename }}
|
||||
if-no-files-found: error
|
||||
|
||||
build-server-native:
|
||||
name: Build Server native
|
||||
build-storage:
|
||||
name: Build Storage
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CARGO_PROFILE_RELEASE_DEBUG: '1'
|
||||
@@ -251,23 +248,23 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
with:
|
||||
extra-flags: workspaces focus @affine/server-native
|
||||
extra-flags: workspaces focus @affine/storage
|
||||
electron-install: false
|
||||
- name: Build Rust
|
||||
uses: ./.github/actions/build-rust
|
||||
with:
|
||||
target: 'x86_64-unknown-linux-gnu'
|
||||
package: '@affine/server-native'
|
||||
package: '@affine/storage'
|
||||
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
|
||||
- name: Upload server-native.node
|
||||
- name: Upload storage.node
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: server-native.node
|
||||
path: ./packages/backend/native/server-native.node
|
||||
name: storage.node
|
||||
path: ./packages/backend/storage/storage.node
|
||||
if-no-files-found: error
|
||||
|
||||
build-web:
|
||||
name: Build @affine/web
|
||||
build-core:
|
||||
name: Build @affine/core
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -277,24 +274,22 @@ jobs:
|
||||
with:
|
||||
electron-install: false
|
||||
full-cache: true
|
||||
- name: Build Web
|
||||
- name: Build Core
|
||||
# always skip cache because its fast, and cache configuration is always changing
|
||||
run: yarn nx build @affine/web --skip-nx-cache
|
||||
env:
|
||||
DISTRIBUTION: 'desktop'
|
||||
- name: zip web
|
||||
run: tar -czf dist.tar.gz --directory=packages/frontend/electron/renderer/dist .
|
||||
- name: Upload web artifact
|
||||
run: yarn nx build @affine/core --skip-nx-cache
|
||||
- name: zip core
|
||||
run: tar -czf dist.tar.gz --directory=packages/frontend/core/dist .
|
||||
- name: Upload core artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: web
|
||||
name: core
|
||||
path: dist.tar.gz
|
||||
if-no-files-found: error
|
||||
|
||||
server-test:
|
||||
name: Server Test
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-server-native
|
||||
needs: build-storage
|
||||
env:
|
||||
NODE_ENV: test
|
||||
DISTRIBUTION: browser
|
||||
@@ -324,10 +319,10 @@ jobs:
|
||||
electron-install: false
|
||||
full-cache: true
|
||||
|
||||
- name: Download server-native.node
|
||||
- name: Download storage.node
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: server-native.node
|
||||
name: storage.node
|
||||
path: ./packages/backend/server
|
||||
|
||||
- name: Initialize database
|
||||
@@ -338,11 +333,17 @@ jobs:
|
||||
env:
|
||||
PGPASSWORD: affine
|
||||
|
||||
- name: Run init-db script
|
||||
- name: Generate prisma client
|
||||
run: |
|
||||
yarn workspace @affine/server exec prisma generate
|
||||
yarn workspace @affine/server exec prisma db push
|
||||
env:
|
||||
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
|
||||
|
||||
- name: Run init-db script
|
||||
run: |
|
||||
yarn workspace @affine/server data-migration run
|
||||
yarn workspace @affine/server exec node --loader ts-node/esm/transpile-only ./scripts/init-db.ts
|
||||
env:
|
||||
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
|
||||
|
||||
@@ -353,7 +354,7 @@ jobs:
|
||||
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
|
||||
|
||||
- name: Upload server test coverage results
|
||||
uses: codecov/codecov-action@v4
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
files: ./packages/backend/server/.coverage/lcov.info
|
||||
@@ -367,7 +368,6 @@ jobs:
|
||||
env:
|
||||
DISTRIBUTION: browser
|
||||
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
|
||||
IN_CI_TEST: true
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -383,7 +383,7 @@ jobs:
|
||||
yarn workspace @affine/electron build:dev
|
||||
xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn workspace @affine-test/affine-desktop-cloud e2e
|
||||
needs:
|
||||
- build-server-native
|
||||
- build-storage
|
||||
- build-native
|
||||
services:
|
||||
postgres:
|
||||
@@ -411,10 +411,10 @@ jobs:
|
||||
playwright-install: true
|
||||
hard-link-nm: false
|
||||
|
||||
- name: Download server-native.node
|
||||
- name: Download storage.node
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: server-native.node
|
||||
name: storage.node
|
||||
path: ./packages/backend/server
|
||||
|
||||
- name: Download affine.linux-x64-gnu.node
|
||||
@@ -431,11 +431,17 @@ jobs:
|
||||
env:
|
||||
PGPASSWORD: affine
|
||||
|
||||
- name: Run init-db script
|
||||
- name: Generate prisma client
|
||||
run: |
|
||||
yarn workspace @affine/server exec prisma generate
|
||||
yarn workspace @affine/server exec prisma db push
|
||||
env:
|
||||
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
|
||||
|
||||
- name: Run init-db script
|
||||
run: |
|
||||
yarn workspace @affine/server data-migration run
|
||||
yarn workspace @affine/server exec node --loader ts-node/esm/transpile-only ./scripts/init-db.ts
|
||||
|
||||
- name: ${{ matrix.tests.name }}
|
||||
run: |
|
||||
@@ -456,21 +462,22 @@ jobs:
|
||||
runs-on: ${{ matrix.spec.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
# all combinations: macos-latest x64, macos-latest arm64, windows-latest x64, ubuntu-latest x64
|
||||
matrix:
|
||||
spec:
|
||||
- {
|
||||
os: macos-14,
|
||||
os: macos-latest,
|
||||
platform: macos,
|
||||
arch: x64,
|
||||
target: x86_64-apple-darwin,
|
||||
test: false,
|
||||
test: true,
|
||||
}
|
||||
- {
|
||||
os: macos-14,
|
||||
os: macos-latest,
|
||||
platform: macos,
|
||||
arch: arm64,
|
||||
target: aarch64-apple-darwin,
|
||||
test: true,
|
||||
test: false,
|
||||
}
|
||||
- {
|
||||
os: ubuntu-latest,
|
||||
@@ -487,7 +494,7 @@ jobs:
|
||||
test: true,
|
||||
}
|
||||
needs:
|
||||
- build-web
|
||||
- build-core
|
||||
- build-native
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -518,8 +525,8 @@ jobs:
|
||||
shell: bash
|
||||
run: yarn workspace @affine/electron vitest
|
||||
|
||||
- name: Download web artifact
|
||||
uses: ./.github/actions/download-web
|
||||
- name: Download core artifact
|
||||
uses: ./.github/actions/download-core
|
||||
with:
|
||||
path: packages/frontend/electron/resources/web-static
|
||||
|
||||
@@ -527,7 +534,7 @@ jobs:
|
||||
run: yarn workspace @affine/electron build
|
||||
|
||||
- name: Run desktop tests
|
||||
if: ${{ matrix.spec.os == 'ubuntu-latest' }}
|
||||
if: ${{ matrix.spec.test && matrix.spec.os == 'ubuntu-latest' }}
|
||||
run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn workspace @affine-test/affine-desktop e2e
|
||||
|
||||
- name: Run desktop tests
|
||||
@@ -535,22 +542,15 @@ jobs:
|
||||
run: yarn workspace @affine-test/affine-desktop e2e
|
||||
|
||||
- name: Make bundle
|
||||
if: ${{ matrix.spec.target == 'aarch64-apple-darwin' }}
|
||||
if: ${{ matrix.spec.os == 'macos-latest' && matrix.spec.arch == 'arm64' }}
|
||||
env:
|
||||
SKIP_BUNDLE: true
|
||||
SKIP_WEB_BUILD: true
|
||||
HOIST_NODE_MODULES: 1
|
||||
run: yarn workspace @affine/electron package --platform=darwin --arch=arm64
|
||||
|
||||
- name: Make AppImage
|
||||
run: yarn workspace @affine/electron make --platform=linux --arch=x64
|
||||
if: ${{ matrix.spec.target == 'x86_64-unknown-linux-gnu' }}
|
||||
env:
|
||||
SKIP_WEB_BUILD: 1
|
||||
HOIST_NODE_MODULES: 1
|
||||
|
||||
- name: Output check
|
||||
if: ${{ matrix.spec.os == 'macos-14' && matrix.spec.arch == 'arm64' }}
|
||||
if: ${{ matrix.spec.os == 'macos-latest' && matrix.spec.arch == 'arm64' }}
|
||||
run: |
|
||||
yarn workspace @affine/electron exec node --loader ts-node/esm/transpile-only ./scripts/macos-arm64-output-check.ts
|
||||
|
||||
@@ -561,22 +561,3 @@ jobs:
|
||||
name: test-results-e2e-${{ matrix.spec.os }}-${{ matrix.spec.arch }}
|
||||
path: ./test-results
|
||||
if-no-files-found: ignore
|
||||
|
||||
test-done:
|
||||
needs:
|
||||
- analyze
|
||||
- lint
|
||||
- check-yarn-binary
|
||||
- e2e-test
|
||||
- e2e-migration-test
|
||||
- unit-test
|
||||
- server-test
|
||||
- server-e2e-test
|
||||
- desktop-test
|
||||
if: always()
|
||||
runs-on: ubuntu-latest
|
||||
name: 3, 2, 1 Launch
|
||||
steps:
|
||||
- run: exit 1
|
||||
# Thank you, next https://github.com/vercel/next.js/blob/canary/.github/workflows/build_and_test.yml#L379
|
||||
if: ${{ always() && (contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled')) }}
|
||||
|
||||
5
.github/workflows/deploy-automatically.yml
vendored
5
.github/workflows/deploy-automatically.yml
vendored
@@ -7,11 +7,6 @@ on:
|
||||
schedule:
|
||||
- cron: '0 9 * * *'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
actions: write
|
||||
|
||||
jobs:
|
||||
dispatch-deploy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
200
.github/workflows/deploy.yml
vendored
200
.github/workflows/deploy.yml
vendored
@@ -13,22 +13,33 @@ on:
|
||||
- stable
|
||||
- internal
|
||||
env:
|
||||
APP_NAME: affine
|
||||
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
|
||||
|
||||
permissions:
|
||||
contents: 'write'
|
||||
id-token: 'write'
|
||||
packages: 'write'
|
||||
|
||||
jobs:
|
||||
build-server-image:
|
||||
name: Build Server Image
|
||||
uses: ./.github/workflows/build-server-image.yml
|
||||
with:
|
||||
flavor: ${{ github.event.inputs.flavor }}
|
||||
|
||||
build-web:
|
||||
name: Build @affine/web
|
||||
build-server:
|
||||
name: Build Server
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Version
|
||||
id: version
|
||||
uses: ./.github/actions/setup-version
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
with:
|
||||
electron-install: false
|
||||
extra-flags: workspaces focus @affine/server
|
||||
- name: Build Server
|
||||
run: yarn workspace @affine/server build
|
||||
- name: Upload server dist
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: server-dist
|
||||
path: ./packages/backend/server/dist
|
||||
if-no-files-found: error
|
||||
build-core:
|
||||
name: Build @affine/core
|
||||
runs-on: ubuntu-latest
|
||||
environment: ${{ github.event.inputs.flavor }}
|
||||
steps:
|
||||
@@ -39,7 +50,7 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- name: Build Core
|
||||
run: yarn nx build @affine/web --skip-nx-cache
|
||||
run: yarn nx build @affine/core --skip-nx-cache
|
||||
env:
|
||||
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
|
||||
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
|
||||
@@ -49,30 +60,118 @@ jobs:
|
||||
TRACE_REPORT_ENDPOINT: ${{ secrets.TRACE_REPORT_ENDPOINT }}
|
||||
CAPTCHA_SITE_KEY: ${{ secrets.CAPTCHA_SITE_KEY }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: 'affine-web'
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||
PERFSEE_TOKEN: ${{ secrets.PERFSEE_TOKEN }}
|
||||
MIXPANEL_TOKEN: ${{ secrets.MIXPANEL_TOKEN }}
|
||||
- name: Upload web artifact
|
||||
- name: Upload core artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: web
|
||||
path: ./packages/frontend/web/dist
|
||||
name: core
|
||||
path: ./packages/frontend/core/dist
|
||||
if-no-files-found: error
|
||||
|
||||
build-frontend-image:
|
||||
name: Build Frontend Image
|
||||
build-core-selfhost:
|
||||
name: Build @affine/core
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- build-web
|
||||
environment: ${{ github.event.inputs.flavor }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Download web artifact
|
||||
- name: Setup Version
|
||||
id: version
|
||||
uses: ./.github/actions/setup-version
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
- name: Build Core
|
||||
run: yarn nx build @affine/core --skip-nx-cache
|
||||
env:
|
||||
BUILD_TYPE: ${{ github.event.inputs.flavor }}
|
||||
SHOULD_REPORT_TRACE: false
|
||||
PUBLIC_PATH: '/'
|
||||
- name: Download selfhost fonts
|
||||
run: node ./scripts/download-blocksuite-fonts.mjs
|
||||
- name: Upload core artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: selfhost-core
|
||||
path: ./packages/frontend/core/dist
|
||||
if-no-files-found: error
|
||||
|
||||
build-storage:
|
||||
name: Build Storage - ${{ matrix.targets.name }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
targets:
|
||||
- name: x86_64-unknown-linux-gnu
|
||||
file: storage.node
|
||||
- name: aarch64-unknown-linux-gnu
|
||||
file: storage.arm64.node
|
||||
- name: armv7-unknown-linux-gnueabihf
|
||||
file: storage.armv7.node
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Version
|
||||
id: version
|
||||
uses: ./.github/actions/setup-version
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
with:
|
||||
electron-install: false
|
||||
extra-flags: workspaces focus @affine/storage
|
||||
- name: Build Rust
|
||||
uses: ./.github/actions/build-rust
|
||||
with:
|
||||
target: ${{ matrix.targets.name }}
|
||||
package: '@affine/storage'
|
||||
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
|
||||
- name: Upload ${{ matrix.targets.file }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.targets.file }}
|
||||
path: ./packages/backend/storage/storage.node
|
||||
if-no-files-found: error
|
||||
|
||||
build-docker:
|
||||
name: Build Docker
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- build-server
|
||||
- build-core
|
||||
- build-core-selfhost
|
||||
- build-storage
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Download core artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: web
|
||||
path: ./packages/frontend/web/dist
|
||||
name: core
|
||||
path: ./packages/frontend/core/dist
|
||||
- name: Download server dist
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: server-dist
|
||||
path: ./packages/backend/server/dist
|
||||
- name: Download storage.node
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: storage.node
|
||||
path: ./packages/backend/server
|
||||
- name: Download storage.node arm64
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: storage.arm64.node
|
||||
path: ./packages/backend/storage
|
||||
- name: Download storage.node arm64
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: storage.armv7.node
|
||||
path: .
|
||||
- name: move storage files
|
||||
run: |
|
||||
mv ./packages/backend/storage/storage.node ./packages/backend/server/storage.arm64.node
|
||||
mv storage.node ./packages/backend/server/storage.armv7.node
|
||||
- name: Setup env
|
||||
run: |
|
||||
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
|
||||
@@ -82,6 +181,7 @@ jobs:
|
||||
else
|
||||
echo "RELEASE_FLAVOR=${{ inputs.flavor }}" >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@@ -104,13 +204,53 @@ jobs:
|
||||
file: .github/deployment/front/Dockerfile
|
||||
tags: ghcr.io/toeverything/affine-front:${{env.RELEASE_FLAVOR}}-${{ env.GIT_SHORT_HASH }},ghcr.io/toeverything/affine-front:${{env.RELEASE_FLAVOR}}
|
||||
|
||||
# setup node without cache configuration
|
||||
# Prisma cache is not compatible with docker build cache
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
registry-url: https://npm.pkg.github.com
|
||||
scope: '@toeverything'
|
||||
|
||||
- name: Remove core dist
|
||||
run: rm -rf ./packages/frontend/core/dist
|
||||
|
||||
- name: Download selfhost core artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: selfhost-core
|
||||
path: ./packages/frontend/core/dist
|
||||
|
||||
- name: Install Node.js dependencies
|
||||
run: |
|
||||
yarn config set --json supportedArchitectures.cpu '["x64", "arm64", "arm"]'
|
||||
yarn config set --json supportedArchitectures.libc '["glibc"]'
|
||||
yarn workspaces focus @affine/server --production
|
||||
|
||||
- name: Generate Prisma client
|
||||
run: yarn workspace @affine/server prisma generate
|
||||
|
||||
- name: Build graphql Dockerfile
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
provenance: true
|
||||
file: .github/deployment/node/Dockerfile
|
||||
tags: ghcr.io/toeverything/affine-graphql:${{env.RELEASE_FLAVOR}}-${{ env.GIT_SHORT_HASH }},ghcr.io/toeverything/affine-graphql:${{env.RELEASE_FLAVOR}}
|
||||
|
||||
deploy:
|
||||
name: Deploy to cluster
|
||||
if: ${{ github.event_name == 'workflow_dispatch' }}
|
||||
environment: ${{ github.event.inputs.flavor }}
|
||||
permissions:
|
||||
contents: 'write'
|
||||
id-token: 'write'
|
||||
needs:
|
||||
- build-frontend-image
|
||||
- build-server-image
|
||||
- build-docker
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -133,10 +273,8 @@ jobs:
|
||||
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
|
||||
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
|
||||
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
|
||||
ENABLE_CAPTCHA: true
|
||||
CAPTCHA_TURNSTILE_SECRET: ${{ secrets.CAPTCHA_TURNSTILE_SECRET }}
|
||||
COPILOT_OPENAI_API_KEY: ${{ secrets.COPILOT_OPENAI_API_KEY }}
|
||||
COPILOT_FAL_API_KEY: ${{ secrets.COPILOT_FAL_API_KEY }}
|
||||
COPILOT_UNSPLASH_API_KEY: ${{ secrets.COPILOT_UNSPLASH_API_KEY }}
|
||||
MAILER_SENDER: ${{ secrets.OAUTH_EMAIL_SENDER }}
|
||||
MAILER_USER: ${{ secrets.OAUTH_EMAIL_LOGIN }}
|
||||
MAILER_PASSWORD: ${{ secrets.OAUTH_EMAIL_PASSWORD }}
|
||||
|
||||
2
.github/workflows/helm-releaser.yml
vendored
2
.github/workflows/helm-releaser.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
token: ${{ secrets.HELM_RELEASER_TOKEN }}
|
||||
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v4
|
||||
uses: azure/setup-helm@v3
|
||||
|
||||
- name: Install chart releaser
|
||||
run: |
|
||||
|
||||
2
.github/workflows/pr-auto-assign.yml
vendored
2
.github/workflows/pr-auto-assign.yml
vendored
@@ -9,4 +9,4 @@ jobs:
|
||||
add-reviews:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: kentaro-m/auto-assign-action@v2.0.0
|
||||
- uses: kentaro-m/auto-assign-action@v1.2.5
|
||||
|
||||
5
.github/workflows/pr-title-lint.yml
vendored
5
.github/workflows/pr-title-lint.yml
vendored
@@ -25,7 +25,4 @@ jobs:
|
||||
node-version-file: '.nvmrc'
|
||||
- name: Install dependencies
|
||||
run: yarn workspaces focus @affine/commitlint-config
|
||||
- name: Check PR title
|
||||
env:
|
||||
TITLE: ${{ github.event.pull_request.title }}
|
||||
run: echo "$TITLE" | yarn workspace @affine/commitlint-config commitlint -g ./.commitlintrc.json
|
||||
- run: echo "${{ github.event.pull_request.title }}" | yarn workspace @affine/commitlint-config commitlint -g ./.commitlintrc.json
|
||||
|
||||
51
.github/workflows/publish-storybook.yml
vendored
Normal file
51
.github/workflows/publish-storybook.yml
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
name: Publish Storybook
|
||||
|
||||
env:
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- canary
|
||||
pull_request:
|
||||
branches:
|
||||
- canary
|
||||
paths-ignore:
|
||||
- README.md
|
||||
- .github/**
|
||||
- packages/backend/server
|
||||
- packages/frontend/electron
|
||||
- '!.github/workflows/publish-storybook.yml'
|
||||
|
||||
jobs:
|
||||
publish-storybook:
|
||||
name: Publish Storybook
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.merge_commit_sha }}
|
||||
# This is required to fetch all commits for chromatic
|
||||
fetch-depth: 0
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
with:
|
||||
electron-install: false
|
||||
- uses: chromaui/action-next@v1
|
||||
with:
|
||||
workingDir: tests/storybook
|
||||
buildScriptName: build
|
||||
exitOnceUploaded: true
|
||||
onlyChanged: false
|
||||
diagnostics: true
|
||||
env:
|
||||
CHROMATIC_PROJECT_TOKEN: ${{ secrets.CHROMATIC_PROJECT_TOKEN }}
|
||||
NODE_OPTIONS: ${{ env.NODE_OPTIONS }}
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: chromatic-build-artifacts-${{ github.run_id }}
|
||||
path: |
|
||||
chromatic-diagnostics.json
|
||||
**/build-storybook.log
|
||||
51
.github/workflows/publish-ui-storybook.yml
vendored
Normal file
51
.github/workflows/publish-ui-storybook.yml
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
name: Publish UI Storybook
|
||||
|
||||
env:
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- canary
|
||||
pull_request:
|
||||
branches:
|
||||
- canary
|
||||
paths-ignore:
|
||||
- README.md
|
||||
- .github/**
|
||||
- packages/backend/server
|
||||
- packages/frontend/electron
|
||||
- '!.github/workflows/publish-storybook.yml'
|
||||
|
||||
jobs:
|
||||
publish-ui-storybook:
|
||||
name: Publish UI Storybook
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.merge_commit_sha }}
|
||||
# This is required to fetch all commits for chromatic
|
||||
fetch-depth: 0
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-node
|
||||
with:
|
||||
electron-install: false
|
||||
- uses: chromaui/action-next@v1
|
||||
with:
|
||||
workingDir: packages/frontend/component
|
||||
buildScriptName: build:storybook
|
||||
exitOnceUploaded: true
|
||||
onlyChanged: false
|
||||
diagnostics: true
|
||||
env:
|
||||
CHROMATIC_PROJECT_TOKEN: ${{ secrets.CHROMATIC_UI_PROJECT_TOKEN }}
|
||||
NODE_OPTIONS: ${{ env.NODE_OPTIONS }}
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: chromatic-build-artifacts-${{ github.run_id }}
|
||||
path: |
|
||||
chromatic-diagnostics.json
|
||||
**/build-storybook.log
|
||||
@@ -7,11 +7,6 @@ on:
|
||||
schedule:
|
||||
- cron: '0 9 * * *'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
actions: write
|
||||
|
||||
jobs:
|
||||
dispatch-release-desktop:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
80
.github/workflows/release-desktop.yml
vendored
80
.github/workflows/release-desktop.yml
vendored
@@ -53,28 +53,30 @@ jobs:
|
||||
run: yarn workspace @affine/electron generate-assets
|
||||
env:
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: 'affine'
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||
RELEASE_VERSION: ${{ steps.version.outputs.APP_VERSION }}
|
||||
SKIP_PLUGIN_BUILD: 'true'
|
||||
SKIP_NX_CACHE: 'true'
|
||||
MIXPANEL_TOKEN: ${{ secrets.MIXPANEL_TOKEN }}
|
||||
|
||||
- name: Upload web artifact
|
||||
- name: Upload core artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: web
|
||||
name: core
|
||||
path: packages/frontend/electron/resources/web-static
|
||||
|
||||
make-distribution:
|
||||
strategy:
|
||||
# all combinations: macos-latest x64, macos-latest arm64, ubuntu-latest x64
|
||||
# For windows, we need a separate approach
|
||||
matrix:
|
||||
spec:
|
||||
- runner: macos-14
|
||||
- runner: macos-latest
|
||||
platform: darwin
|
||||
arch: x64
|
||||
target: x86_64-apple-darwin
|
||||
- runner: macos-14
|
||||
- runner: macos-latest
|
||||
platform: darwin
|
||||
arch: arm64
|
||||
target: aarch64-apple-darwin
|
||||
@@ -89,11 +91,6 @@ jobs:
|
||||
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
SKIP_GENERATE_ASSETS: 1
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: 'affine'
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||
MIXPANEL_TOKEN: ${{ secrets.MIXPANEL_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Version
|
||||
@@ -115,7 +112,7 @@ jobs:
|
||||
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: web
|
||||
name: core
|
||||
path: packages/frontend/electron/resources/web-static
|
||||
|
||||
- name: Build Desktop Layers
|
||||
@@ -128,35 +125,25 @@ jobs:
|
||||
p12-file-base64: ${{ secrets.CERTIFICATES_P12 }}
|
||||
p12-password: ${{ secrets.CERTIFICATES_P12_PASSWORD }}
|
||||
|
||||
- name: Install fuse on Linux (for patching AppImage)
|
||||
if: ${{ matrix.spec.platform == 'linux' }}
|
||||
run: |
|
||||
sudo add-apt-repository universe
|
||||
sudo apt install libfuse2 -y
|
||||
|
||||
- name: make
|
||||
run: yarn workspace @affine/electron make --platform=${{ matrix.spec.platform }} --arch=${{ matrix.spec.arch }}
|
||||
env:
|
||||
SKIP_PLUGIN_BUILD: 1
|
||||
SKIP_WEB_BUILD: 1
|
||||
HOIST_NODE_MODULES: 1
|
||||
|
||||
- name: signing DMG
|
||||
if: ${{ matrix.spec.platform == 'darwin' }}
|
||||
run: |
|
||||
codesign --force --sign "Developer ID Application: TOEVERYTHING PTE. LTD." packages/frontend/electron/out/${{ env.BUILD_TYPE }}/make/AFFiNE.dmg
|
||||
|
||||
- name: Save artifacts (mac)
|
||||
if: ${{ matrix.spec.platform == 'darwin' }}
|
||||
run: |
|
||||
mkdir -p builds
|
||||
mv packages/frontend/electron/out/*/make/*.dmg ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.dmg
|
||||
mv packages/frontend/electron/out/*/make/zip/darwin/${{ matrix.spec.arch }}/*.zip ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.zip
|
||||
mv packages/frontend/electron/out/*/make/*.dmg ./builds/affine-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.dmg
|
||||
mv packages/frontend/electron/out/*/make/zip/darwin/${{ matrix.spec.arch }}/*.zip ./builds/affine-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.zip
|
||||
- name: Save artifacts (linux)
|
||||
if: ${{ matrix.spec.platform == 'linux' }}
|
||||
run: |
|
||||
mkdir -p builds
|
||||
mv packages/frontend/electron/out/*/make/zip/linux/x64/*.zip ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-x64.zip
|
||||
mv packages/frontend/electron/out/*/make/*.AppImage ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-x64.appimage
|
||||
mv packages/frontend/electron/out/*/make/zip/linux/x64/*.zip ./builds/affine-${{ env.BUILD_TYPE }}-linux-x64.zip
|
||||
mv packages/frontend/electron/out/*/make/*.AppImage ./builds/affine-${{ env.BUILD_TYPE }}-linux-x64.AppImage
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
@@ -166,6 +153,8 @@ jobs:
|
||||
|
||||
package-distribution-windows:
|
||||
strategy:
|
||||
# all combinations: macos-latest x64, macos-latest arm64, ubuntu-latest x64
|
||||
# For windows, we need a separate approach
|
||||
matrix:
|
||||
spec:
|
||||
- runner: windows-latest
|
||||
@@ -178,11 +167,6 @@ jobs:
|
||||
FILES_TO_BE_SIGNED: ${{ steps.get_files_to_be_signed.outputs.FILES_TO_BE_SIGNED }}
|
||||
env:
|
||||
SKIP_GENERATE_ASSETS: 1
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: 'affine'
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||
MIXPANEL_TOKEN: ${{ secrets.MIXPANEL_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Version
|
||||
@@ -203,7 +187,7 @@ jobs:
|
||||
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: web
|
||||
name: core
|
||||
path: packages/frontend/electron/resources/web-static
|
||||
|
||||
- name: Build Desktop Layers
|
||||
@@ -212,6 +196,7 @@ jobs:
|
||||
- name: package
|
||||
run: yarn workspace @affine/electron package --platform=${{ matrix.spec.platform }} --arch=${{ matrix.spec.arch }}
|
||||
env:
|
||||
SKIP_PLUGIN_BUILD: 1
|
||||
SKIP_WEB_BUILD: 1
|
||||
HOIST_NODE_MODULES: 1
|
||||
|
||||
@@ -243,6 +228,8 @@ jobs:
|
||||
make-windows-installer:
|
||||
needs: sign-packaged-artifacts-windows
|
||||
strategy:
|
||||
# all combinations: macos-latest x64, macos-latest arm64, ubuntu-latest x64
|
||||
# For windows, we need a separate approach
|
||||
matrix:
|
||||
spec:
|
||||
- runner: windows-latest
|
||||
@@ -257,10 +244,6 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
timeout-minutes: 10
|
||||
uses: ./.github/actions/setup-node
|
||||
with:
|
||||
extra-flags: workspaces focus @affine/electron @affine/monorepo
|
||||
hard-link-nm: false
|
||||
nmHoistingLimits: workspaces
|
||||
- name: Download and overwrite packaged artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
@@ -272,9 +255,6 @@ jobs:
|
||||
- name: Make squirrel.windows installer
|
||||
run: yarn workspace @affine/electron make-squirrel --platform=${{ matrix.spec.platform }} --arch=${{ matrix.spec.arch }}
|
||||
|
||||
- name: Make nsis.windows installer
|
||||
run: yarn workspace @affine/electron make-nsis --platform=${{ matrix.spec.platform }} --arch=${{ matrix.spec.arch }}
|
||||
|
||||
- name: Zip artifacts for faster upload
|
||||
run: Compress-Archive -CompressionLevel Fastest -Path packages/frontend/electron/out/${{ env.BUILD_TYPE }}/make/* -DestinationPath archive.zip
|
||||
|
||||
@@ -299,8 +279,10 @@ jobs:
|
||||
artifact-name: installer-win32-x64
|
||||
|
||||
finalize-installer-windows:
|
||||
needs: [sign-installer-artifacts-windows, before-make]
|
||||
needs: sign-installer-artifacts-windows
|
||||
strategy:
|
||||
# all combinations: macos-latest x64, macos-latest arm64, ubuntu-latest x64
|
||||
# For windows, we need a separate approach
|
||||
matrix:
|
||||
spec:
|
||||
- runner: windows-latest
|
||||
@@ -320,9 +302,9 @@ jobs:
|
||||
- name: Save artifacts
|
||||
run: |
|
||||
mkdir -p builds
|
||||
mv packages/frontend/electron/out/*/make/zip/win32/x64/AFFiNE*-win32-x64-*.zip ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.zip
|
||||
mv packages/frontend/electron/out/*/make/squirrel.windows/x64/*.exe ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.exe
|
||||
mv packages/frontend/electron/out/*/make/nsis.windows/x64/*.exe ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.nsis.exe
|
||||
mv packages/frontend/electron/out/*/make/zip/win32/x64/AFFiNE*-win32-x64-*.zip ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.zip
|
||||
mv packages/frontend/electron/out/*/make/squirrel.windows/x64/*.exe ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.exe
|
||||
mv packages/frontend/electron/out/*/make/squirrel.windows/x64/*.msi ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.msi
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
@@ -338,7 +320,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: web
|
||||
name: core
|
||||
path: web-static
|
||||
- name: Zip web-static
|
||||
run: zip -r web-static.zip web-static
|
||||
@@ -372,7 +354,7 @@ jobs:
|
||||
RELEASE_VERSION: ${{ needs.before-make.outputs.RELEASE_VERSION }}
|
||||
- name: Create Release Draft
|
||||
if: ${{ github.ref_type == 'tag' }}
|
||||
uses: softprops/action-gh-release@v2
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
name: ${{ needs.before-make.outputs.RELEASE_VERSION }}
|
||||
body: ''
|
||||
@@ -383,12 +365,12 @@ jobs:
|
||||
./*.zip
|
||||
./*.dmg
|
||||
./*.exe
|
||||
./*.appimage
|
||||
./*.AppImage
|
||||
./*.apk
|
||||
./*.yml
|
||||
- name: Create Nightly Release Draft
|
||||
if: ${{ github.ref_type == 'branch' }}
|
||||
uses: softprops/action-gh-release@v2
|
||||
uses: softprops/action-gh-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
||||
with:
|
||||
@@ -405,6 +387,6 @@ jobs:
|
||||
./*.zip
|
||||
./*.dmg
|
||||
./*.exe
|
||||
./*.appimage
|
||||
./*.AppImage
|
||||
./*.apk
|
||||
./*.yml
|
||||
|
||||
2
.github/workflows/workers.yml
vendored
2
.github/workflows/workers.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Publish
|
||||
uses: cloudflare/wrangler-action@v3.4.1
|
||||
uses: cloudflare/wrangler-action@v3.4.0
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
|
||||
@@ -16,11 +16,12 @@ packages/frontend/i18n/src/i18n-generated.ts
|
||||
packages/frontend/graphql/src/graphql/index.ts
|
||||
tests/affine-legacy/**/static
|
||||
.yarnrc.yml
|
||||
packages/frontend/templates/*.gen.ts
|
||||
packages/frontend/templates/edgeless-templates.gen.ts
|
||||
packages/frontend/templates/templates.gen.ts
|
||||
packages/frontend/templates/onboarding
|
||||
|
||||
# auto-generated by NAPI-RS
|
||||
# fixme(@joooye34): need script to check and generate ignore list here
|
||||
packages/backend/native/index.d.ts
|
||||
packages/backend/storage/index.d.ts
|
||||
packages/frontend/native/index.d.ts
|
||||
packages/frontend/native/index.js
|
||||
|
||||
202
.yarn/patches/cmdk-npm-0.2.0-302237a911.patch
Normal file
202
.yarn/patches/cmdk-npm-0.2.0-302237a911.patch
Normal file
File diff suppressed because one or more lines are too long
15
.yarn/patches/next-auth-npm-4.24.5-8428e11927.patch
Normal file
15
.yarn/patches/next-auth-npm-4.24.5-8428e11927.patch
Normal file
@@ -0,0 +1,15 @@
|
||||
diff --git a/package.json b/package.json
|
||||
index ca30bca63196b923fa5a27eb85ce2ee890222d36..39e9d08dea40f25568a39bfbc0154458d32c8a66 100644
|
||||
--- a/package.json
|
||||
+++ b/package.json
|
||||
@@ -31,6 +31,10 @@
|
||||
"types": "./index.d.ts",
|
||||
"default": "./index.js"
|
||||
},
|
||||
+ "./core": {
|
||||
+ "types": "./core/index.d.ts",
|
||||
+ "default": "./core/index.js"
|
||||
+ },
|
||||
"./adapters": {
|
||||
"types": "./adapters.d.ts"
|
||||
},
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -12,4 +12,4 @@ npmPublishAccess: public
|
||||
|
||||
npmPublishRegistry: "https://registry.npmjs.org"
|
||||
|
||||
yarnPath: .yarn/releases/yarn-4.1.1.cjs
|
||||
yarnPath: .yarn/releases/yarn-4.0.2.cjs
|
||||
|
||||
762
Cargo.lock
generated
762
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -3,7 +3,7 @@ resolver = "2"
|
||||
members = [
|
||||
"./packages/frontend/native",
|
||||
"./packages/frontend/native/schema",
|
||||
"./packages/backend/native",
|
||||
"./packages/backend/storage",
|
||||
]
|
||||
|
||||
[profile.dev.package.sqlx-macros]
|
||||
|
||||
139
README.md
139
README.md
@@ -5,89 +5,86 @@
|
||||
Write, Draw and Plan All at Once
|
||||
<br>
|
||||
</h1>
|
||||
<a href="https://affine.pro/download">
|
||||
<img alt="affine logo" src="https://cdn.affine.pro/Github_hero_image1.png" style="width: 100%">
|
||||
</a>
|
||||
<br/>
|
||||
<p align="center">
|
||||
A privacy-focused, local-first, open-source, and ready-to-use alternative for Notion & Miro. <br />
|
||||
One hyper-fused platform for wildly creative minds.
|
||||
|
||||
<p>
|
||||
One hyper-fused platform for wildly creative minds. <br />
|
||||
A privacy-focussed, local-first, open-source, and ready-to-use alternative for Notion & Miro.
|
||||
</p>
|
||||
|
||||
<br/>
|
||||
|
||||
<br/>
|
||||
<a href="https://www.producthunt.com/posts/affine-3?utm_source=badge-featured&utm_medium=badge&utm_souce=badge-affine-3" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=440671&theme=light" alt="AFFiNE - One app for all - Where Notion meets Miro | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
<br/>
|
||||
<br/>
|
||||
</div>
|
||||
|
||||
<div align="center">
|
||||
<a href="https://affine.pro">Home Page</a> |
|
||||
<a href="https://discord.com/invite/yz6tGVsf5p">Discord</a> |
|
||||
<a href="https://app.affine.pro">Live Demo</a> |
|
||||
<a href="https://affine.pro/blog/">Blog</a> |
|
||||
<a href="https://docs.affine.pro/docs/">Documentation</a>
|
||||
</div>
|
||||
<br/>
|
||||
|
||||
[?style=flat-square&logoColor=white&logo=affine>)](https://app.affine.pro)
|
||||
[](https://affine.pro/download)
|
||||
[](https://affine.pro/download)
|
||||
[](https://affine.pro/download)
|
||||
[](https://affine.pro/download)
|
||||
|
||||
[](https://github.com/toeverything/AFFiNE/releases/latest)
|
||||
[![stars-icon]](https://github.com/toeverything/AFFiNE)
|
||||
[![All Contributors][all-contributors-badge]](#contributors)
|
||||
[![codecov]](https://codecov.io/gh/toeverything/AFFiNE)
|
||||
[![Node-version-icon]](https://nodejs.org/)
|
||||
[![TypeScript-version-icon]](https://www.typescriptlang.org/)
|
||||
[![React-version-icon]](https://reactjs.org/)
|
||||
[![blocksuite-icon]](https://github.com/toeverything/blocksuite)
|
||||
[![Rust-version-icon]](https://www.rust-lang.org/)
|
||||
[](https://app.fossa.com/projects/git%2Bgithub.com%2Ftoeverything%2FAFFiNE?ref=badge_shield)
|
||||
[](https://github.com/toeverything/AFFiNE/actions/workflows/deploy.yml)
|
||||
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
<a href="http://affine.pro"><img src="https://img.shields.io/badge/-AFFiNE-06449d?style=social&logo=affine" height=25></a>
|
||||
|
||||
<a href="https://community.affine.pro"><img src="https://img.shields.io/badge/-Community-424549?style=social&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAAXNJREFUWEftlitLRUEURtdVEVExWUx2qxgNVouoXYtNDP4Tw20WtftAsItZrHaTYBJREZ98MAc248wcZxi4CGfSeezHmm/23kyPAa/egPPTAXQK/FsFBP7ldVDRZoqcgO9I+2bHy3ZIJBfTCPCZM1tqAxwBmzUBrNQNbEx+5b0B5oEN4NCBrAMnMaiUAuPAs3HU82TLEZwBqwGbaJ4UgKQ8CFR6SoEl4LIWwCJwZQCegKkWBWLHVKSActvdzgG3DqitDf3/VQBskBDALrDnAKXUo3ueAF5KinAf2DKOmnzD7l214bdbA6hC1XHZNQa8hSBC0hwDa57xDHDvvvWB7ciOZoE79+8CWPbsBGc769eFxJdWIKcuyIdRoG3W7AAC1dJkHDIOo8B78+4rEBo8r4AkLFk6Jk3HaeDBBTgHVmIAfpJUz+cAFXVBreQCvQYW/lqEjV1NAMUMqpAaxQMHyDnjYtuS+0BxstwaqJooFqxToFPgB5FuPCEB6XK2AAAAAElFTkSuQmCC" height=25></a>
|
||||
|
||||
<a href="https://discord.com/invite/yz6tGVsf5p"><img src="https://img.shields.io/badge/-Discord-424549?style=social&logo=discord" height=25></a>
|
||||
|
||||
<a href="https://t.me/affineworkos"><img src="https://img.shields.io/badge/-Telegram-red?style=social&logo=telegram" height=25></a>
|
||||
|
||||
<a href="https://twitter.com/AffineOfficial"><img src="https://img.shields.io/badge/-Twitter-red?style=social&logo=twitter" height=25></a>
|
||||
|
||||
<a href="https://medium.com/@affineworkos"><img src="https://img.shields.io/badge/-Medium-red?style=social&logo=medium" height=25></a>
|
||||
</div>
|
||||
|
||||
<br />
|
||||
<div align="center">
|
||||
<em>Docs, canvas and tables are hyper-merged with AFFiNE - just like the word affine (əˈfʌɪn | a-fine).</em>
|
||||
</div>
|
||||
<br />
|
||||
|
||||
<div align="center">
|
||||
<img src="https://github.com/toeverything/AFFiNE/assets/79301703/49a426bb-8d2b-4216-891a-fa5993642253" style="width: 100%"/>
|
||||
</div>
|
||||

|
||||
|
||||
## Join our community
|
||||
|
||||
Before we tell you how to get started with AFFiNE, we'd like to shamelessly plug our awesome user and developer communities across [official social platforms](https://community.affine.pro/c/start-here/)! Once you’re familiar with using the software, maybe you will share your wisdom with others and even consider joining the [AFFiNE Ambassador program](https://community.affine.pro/c/start-here/affine-ambassador) to help spread AFFiNE to the world.
|
||||
|
||||
## Getting started & staying tuned with us.
|
||||
|
||||
Star us, and you will receive all release notifications from GitHub without any delay!
|
||||
⚠️ Please note that AFFiNE is still under active development and is not yet ready for production use. ⚠️
|
||||
|
||||
<img src="https://user-images.githubusercontent.com/79301703/230891830-0110681e-8c7e-483b-b6d9-9e42b291b9ef.gif" style="width: 100%"/>
|
||||
[](https://app.affine.pro) No installation or registration required! Head over to our website and try it out now.
|
||||
|
||||
## What is AFFiNE
|
||||
[](https://community.affine.pro) Our wonderful community, where you can meet and engage with the team, developers and other like-minded enthusiastic user of AFFiNE.
|
||||
|
||||
AFFiNE is an open-source, all-in-one workspace and an operating system for all the building blocks that assemble your knowledge base and much more -- wiki, knowledge management, presentation and digital assets. It's a better alternative to Notion and Miro.
|
||||
Star us, and you will receive all releases notifications from GitHub without any delay!
|
||||

|
||||
|
||||
## Features
|
||||
|
||||
**A true canvas for blocks in any form. Docs and whiteboard are now fully merged.**
|
||||
- **Hyper merged** — Write, draw and plan all at once. Assemble any blocks you love on any canvas you like to enjoy seamless transitions between workflows with AFFiNE.
|
||||
- **Privacy focussed** — AFFiNE is built with your privacy in mind and is one of our key concerns. We want you to keep control of your data, allowing you to store it as you like, where you like while still being able to freely edit and view your data on-demand.
|
||||
- **Offline-first** — With your privacy in mind we also decided to go offline-first. This means that AFFiNE can be used offline, whether you want to view or edit, with support for conflict-free merging when you are back online.
|
||||
- **Clean, intuitive design** — With AFFiNE you can concentrate on editing with a clean and modern interface. Which is responsive, so it looks great on tablets too, and mobile support is coming in the future.
|
||||
- **Modern Block Editor with Markdown support** — A modern block editor can help you not only for docs, but slides and tables as well. When you write in AFFiNE you can use Markdown syntax which helps create an easier editing experience, that can be experienced with just a keyboard. And this allows you to export your data cleanly into Markdown.
|
||||
- **Collaboration** — Whether you want to collaborate with yourself across multiple devices, or work together with others, support for collaboration and multiplayer is out-of-the-box, which makes it easy for teams to get started with AFFiNE.
|
||||
- **Choice of multiple languages** — Thanks to community contributions AFFiNE offers support for multiple languages. If you don't find your language or would like to suggest some changes we welcome your contributions.
|
||||
|
||||
- Many editor apps claim to be a canvas for productivity, but AFFiNE is one of the very few which allows you to put any building block on an edgeless canvas -- rich text, sticky notes, any embedded web pages, multi-view databases, linked pages, shapes and even slides. We have it all.
|
||||
|
||||
**Multimodal AI partner ready to kick in any work**
|
||||
|
||||
- Write up professional work report? Turn an outline into expressive and presentable slides? Summary an article into a well-structured mindmap? Sorting your job plan and backlog for tasks? Or... draw and code prototype apps and web pages directly all with one prompt? With you, AFFiNE AI pushes your creativity to the edge of your imagination.
|
||||
|
||||
**Local-first & Real-time collaborative**
|
||||
|
||||
- We love the idea of local-first that you always own your data on your disk, in spite of the cloud. Furthermore, AFFiNE supports real-time sync and collaborations on web and cross-platform clients.
|
||||
|
||||
**Self-host & Shape your own AFFiNE**
|
||||
|
||||
- You have the freedom to manage, self-host, fork and build your own AFFiNE. Plugin community and third-party blocks are coming soon. More tractions on [Blocksuite](https://blocksuite.io). Check there to learn how to [self-host AFFiNE](https://docs.affine.pro/docs/self-host-affine).
|
||||
|
||||
## Acknowledgement
|
||||
|
||||
“We shape our tools and thereafter our tools shape us”. A lot of pioneers have inspired us along the way, e.g.:
|
||||
|
||||
- Quip & Notion with their great concept of “everything is a block”
|
||||
- Trello with their Kanban
|
||||
- Airtable & Miro with their no-code programable datasheets
|
||||
- Miro & Whimiscal with their edgeless visual whiteboard
|
||||
- Remote & Capacities with their object-based tag system
|
||||
|
||||
There is a large overlap of their atomic “building blocks” between these apps. They are not open source, nor do they have a plugin system like Vscode for contributors to customize. We want to have something that contains all the features we love and also goes one step even further.
|
||||
|
||||
Thanks for checking us out, we appreciate your interest and sincerely hope that AFFiNE resonates with you! 🎵 Checking https://affine.pro/ for more details ions.
|
||||

|
||||
|
||||
## Contributing
|
||||
|
||||
@@ -104,22 +101,23 @@ For **bug reports**, **feature requests** and other **suggestions** you can also
|
||||
|
||||
For **translation** and **language support** you can visit our [i18n General Space](https://community.affine.pro/c/i18n-general).
|
||||
|
||||
Looking for **other ways to contribute** and wondering where to start? Check out the [AFFiNE Ambassador program](https://community.affine.pro/c/start-here/affine-ambassador), we work closely with passionate community members and provide them with a wide range of support and resources.
|
||||
Looking for **others ways to contribute** and wondering where to start? Check out the [AFFiNE Ambassador program](https://community.affine.pro/c/start-here/affine-ambassador), we work closely with passionate community members and provide them with a wide-range of support and resources.
|
||||
|
||||
If you have questions, you are welcome to contact us. One of the best places to get more info and learn more is in the [AFFiNE Community](https://community.affine.pro) where you can engage with other like-minded individuals.
|
||||
|
||||
## Ecosystem
|
||||
|
||||
| Name | | |
|
||||
| ------------------------------------------------ | -------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| [@affine/component](packages/frontend/component) | AFFiNE Component Resources |  |
|
||||
| [@toeverything/theme](packages/common/theme) | AFFiNE theme | [](https://www.npmjs.com/package/@toeverything/theme) |
|
||||
| Name | | |
|
||||
| -------------------------------------------------------- | ---------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| [@affine/component](packages/frontend/component) | AFFiNE Component Resources | [](https://affine-storybook.vercel.app/) |
|
||||
| [@toeverything/y-indexeddb](packages/common/y-indexeddb) | IndexedDB database adapter for Yjs | [](https://www.npmjs.com/package/@toeverything/y-indexeddb) |
|
||||
| [@toeverything/theme](packages/common/theme) | AFFiNE theme | [](https://www.npmjs.com/package/@toeverything/theme) |
|
||||
|
||||
## Upstreams
|
||||
|
||||
We would also like to give thanks to open-source projects that make AFFiNE possible:
|
||||
|
||||
- [Blocksuite](https://github.com/toeverything/BlockSuite) - 💠 BlockSuite is the open-source collaborative editor project behind AFFiNE.
|
||||
- [blocksuite](https://github.com/toeverything/BlockSuite) - 💠 BlockSuite is the open-source collaborative editor project behind AFFiNE.
|
||||
- [OctoBase](https://github.com/toeverything/OctoBase) - 🐙 OctoBase is the open-source database behind AFFiNE, local-first, yet collaborative. A light-weight, scalable, data engine written in Rust.
|
||||
- [yjs](https://github.com/yjs/yjs) - Fundamental support of CRDTs for our implementation on state management and data sync.
|
||||
- [electron](https://github.com/electron/electron) - Build cross-platform desktop apps with JavaScript, HTML, and CSS.
|
||||
@@ -142,15 +140,24 @@ We would like to express our gratitude to all the individuals who have already c
|
||||
|
||||
## Self-Host
|
||||
|
||||
Begin with Docker to deploy your own feature-rich, unrestricted version of AFFiNE. Our team is diligently updating to the latest version. For more information on how to self-host AFFiNE, please refer to our [documentation](https://docs.affine.pro/docs/self-host-affine).
|
||||
> We know that the self-host version has been out of date for a long time.
|
||||
>
|
||||
> We are working hard to get this updated to the latest version, you can try our desktop version first.
|
||||
|
||||
Get started with Docker and deploy your own feature-rich, restriction-free deployment of AFFiNE.
|
||||
We are working hard to get this updated to the latest version, you can keep an eye on the [latest packages].
|
||||
|
||||
## Hiring
|
||||
|
||||
Some amazing companies, including AFFiNE, are looking for developers! Are you interested in joining AFFiNE or its partners? Check out our Discord channel for some of the latest jobs available.
|
||||
Some amazing companies including AFFiNE are looking for developers! Are you interested in helping build with AFFiNE and/or its partners? Check out some of the latest [jobs available].
|
||||
|
||||
## Upgrading
|
||||
|
||||
For upgrading information, please see our [update page].
|
||||
|
||||
## Feature Request
|
||||
|
||||
For feature requests, please see [community.affine.pro](https://community.affine.pro/c/feature-requests/).
|
||||
For feature request, please see [community.affine.pro](https://community.affine.pro/c/feature-requests/).
|
||||
|
||||
## Building
|
||||
|
||||
@@ -178,6 +185,8 @@ Thanks to [Chromatic](https://www.chromatic.com/) for providing the visual testi
|
||||
|
||||
See [LICENSE] for details.
|
||||
|
||||
[](https://app.fossa.com/projects/git%2Bgithub.com%2Ftoeverything%2FAFFiNE?ref=badge_large)
|
||||
|
||||
[all-contributors-badge]: https://img.shields.io/github/contributors/toeverything/AFFiNE
|
||||
[license]: ./LICENSE
|
||||
[building.md]: ./docs/BUILDING.md
|
||||
@@ -185,7 +194,7 @@ See [LICENSE] for details.
|
||||
[jobs available]: ./docs/jobs.md
|
||||
[latest packages]: https://github.com/toeverything/AFFiNE/pkgs/container/affine-self-hosted
|
||||
[contributor license agreement]: https://github.com/toeverything/affine/edit/canary/.github/CLA.md
|
||||
[rust-version-icon]: https://img.shields.io/badge/Rust-1.77.2-dea584
|
||||
[rust-version-icon]: https://img.shields.io/badge/Rust-1.75.0-dea584
|
||||
[stars-icon]: https://img.shields.io/github/stars/toeverything/AFFiNE.svg?style=flat&logo=github&colorB=red&label=stars
|
||||
[codecov]: https://codecov.io/gh/toeverything/affine/branch/canary/graphs/badge.svg?branch=canary
|
||||
[node-version-icon]: https://img.shields.io/badge/node-%3E=18.16.1-success
|
||||
|
||||
29
SECURITY.md
29
SECURITY.md
@@ -1,29 +0,0 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We recommend users to always use the latest major version. Security updates will be provided for the current major version until the next major version is released.
|
||||
|
||||
| Version | Supported |
|
||||
| --------------- | ------------------ |
|
||||
| 0.13.x (stable) | :white_check_mark: |
|
||||
| < 0.13.x | :x: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
We welcome you to provide us with bug reports via and email at [security@toeverything.info](mailto:security@toeverything.info). We expect your report to contain at least the following for us to evaluate and reproduce:
|
||||
|
||||
1. Using platform and version, for example:
|
||||
|
||||
- macos arm64 0.12.0-canary-202402220729-0868ac6
|
||||
- app.affine.pro 0.12.0-canary-202402220729-0868ac6
|
||||
|
||||
2. A sets of video or screenshot containing the reproduce steps that proves you successfully exploited the vulnerability, preferably including the time and software version of the successful exploit.
|
||||
|
||||
3. Your classification or analysis of the vulnerability (optional)
|
||||
|
||||
Since we are an open source project, we also welcome you to provide corresponding fix PRs.
|
||||
|
||||
We will provide bounties for vulnerabilities involving user information leakage, permission leakage, and unauthorized code execution. For other types of vulnerabilities, we will determine specific rewards based on the evaluation results.
|
||||
|
||||
If the vulnerability is caused by a library we depend on, we encourage you to submit a security report to the corresponding dependent library at the same time to benefit more users.
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
> **Warning**:
|
||||
>
|
||||
> This document is not guaranteed to be up-to-date.
|
||||
> This document has not been updated for a while.
|
||||
> If you find any outdated information, please feel free to open an issue or submit a PR.
|
||||
|
||||
> **Note**
|
||||
@@ -27,7 +27,7 @@ We suggest develop our product under node.js LTS(Long-term support) version
|
||||
|
||||
install [Node LTS version](https://nodejs.org/en/download)
|
||||
|
||||
> Up to now, the major node.js version is 20.x
|
||||
> Up to now, the major node.js version is 18.x
|
||||
|
||||
#### Option 2: Use node version manager
|
||||
|
||||
@@ -76,7 +76,7 @@ Once Developer Mode is enabled, execute the following command with administrator
|
||||
```sh
|
||||
# Enable symbolic links
|
||||
git config --global core.symlinks true
|
||||
# Clone the repository
|
||||
# Clone the repository, also need to be run with administrator privileges
|
||||
git clone https://github.com/toeverything/AFFiNE
|
||||
```
|
||||
|
||||
@@ -93,7 +93,7 @@ yarn workspace @affine/native build
|
||||
### Build Server Dependencies
|
||||
|
||||
```sh
|
||||
yarn workspace @affine/server-native build
|
||||
yarn workspace @affine/storage build
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
@@ -1 +1,93 @@
|
||||
# Please visit https://docs.affine.pro/docs/contributing
|
||||
# Welcome to our contributing guide <!-- omit in toc -->
|
||||
|
||||
Thank you for investing your time in contributing to our project! Any contribution you make will be reflected on our GitHub :sparkles:.
|
||||
|
||||
Read our [Code of Conduct](./CODE_OF_CONDUCT.md) to keep our community approachable and respectable. Join our [Discord](https://discord.com/invite/yz6tGVsf5p) server for more.
|
||||
|
||||
In this guide you will get an overview of the contribution workflow from opening an issue, creating a PR, reviewing, and merging the PR.
|
||||
|
||||
Use the table of contents icon on the top left corner of this document to get to a specific section of this guide quickly.
|
||||
|
||||
## New contributor guide
|
||||
|
||||
Currently we have two versions of AFFiNE:
|
||||
|
||||
- [AFFiNE Pre-Alpha](https://livedemo.affine.pro/). This version uses the branch `Pre-Alpha`, it is no longer actively developed but contains some different functions and features.
|
||||
- [AFFiNE Alpha](https://pathfinder.affine.pro/). This version uses the `canary` branch, this is the latest version under active development.
|
||||
|
||||
To get an overview of the project, read the [README](../README.md). Here are some resources to help you get started with open source contributions:
|
||||
|
||||
- [Finding ways to contribute to open source on GitHub](https://docs.github.com/en/get-started/exploring-projects-on-github/finding-ways-to-contribute-to-open-source-on-github)
|
||||
- [Set up Git](https://docs.github.com/en/get-started/quickstart/set-up-git)
|
||||
- [GitHub flow](https://docs.github.com/en/get-started/quickstart/github-flow)
|
||||
- [Collaborating with pull requests](https://docs.github.com/en/github/collaborating-with-pull-requests)
|
||||
|
||||
## Getting started
|
||||
|
||||
Check to see what [types of contributions](types-of-contributions.md) we accept before making changes. Some of them don't even require writing a single line of code :sparkles:.
|
||||
|
||||
### Issues
|
||||
|
||||
#### Create a new issue or feature request
|
||||
|
||||
If you spot a problem, [search if an issue already exists](https://docs.github.com/en/github/searching-for-information-on-github/searching-on-github/searching-issues-and-pull-requests#search-by-the-title-body-or-comments). If a related issue doesn't exist, you can open a new issue using a relevant [issue form](https://github.com/toeverything/AFFiNE/issues/new/choose).
|
||||
|
||||
#### Solve an issue
|
||||
|
||||
Scan through our [existing issues](https://github.com/toeverything/AFFiNE/issues) to find one that interests you. You can narrow down the search using `labels` as filters. See our [Labels](https://github.com/toeverything/AFFiNE/labels) for more information. As a general rule, we don’t assign issues to anyone. If you find an issue to work on, you are welcome to open a PR with a fix.
|
||||
|
||||
### Make Changes
|
||||
|
||||
#### Make changes in the UI
|
||||
|
||||
Click **Make a contribution** at the bottom of any docs page to make small changes such as a typo, sentence fix, or a broken link. This takes you to the `.md` file where you can make your changes and [create a pull request](#pull-request) for a review.
|
||||
|
||||
#### Make changes in a codespace
|
||||
|
||||
For more information about using a codespace for working on GitHub documentation, see "[Working in a codespace](https://github.com/github/docs/blob/main/contributing/codespace.md)."
|
||||
|
||||
#### Make changes locally
|
||||
|
||||
1. [Install Git LFS](https://docs.github.com/en/github/managing-large-files/versioning-large-files/installing-git-large-file-storage).
|
||||
|
||||
2. Fork the repository.
|
||||
|
||||
- Using GitHub Desktop:
|
||||
|
||||
- [Getting started with GitHub Desktop](https://docs.github.com/en/desktop/installing-and-configuring-github-desktop/getting-started-with-github-desktop) will guide you through setting up Desktop.
|
||||
- Once Desktop is set up, you can use it to [fork the repo](https://docs.github.com/en/desktop/contributing-and-collaborating-using-github-desktop/cloning-and-forking-repositories-from-github-desktop)!
|
||||
|
||||
- Using the command line:
|
||||
- [Fork the repo](https://docs.github.com/en/github/getting-started-with-github/fork-a-repo#fork-an-example-repository) so that you can make your changes without affecting the original project until you're ready to merge them.
|
||||
|
||||
3. Install or update to **Node.js v16**.
|
||||
|
||||
4. Create a working branch and start with your changes!
|
||||
|
||||
### Commit your update
|
||||
|
||||
Commit the changes once you are happy with them.
|
||||
|
||||
Reach out the community members for necessary help.
|
||||
|
||||
Once your changes are ready, don't forget to self-review to speed up the review process:zap:.
|
||||
|
||||
### Pull Request
|
||||
|
||||
When you're finished with the changes, create a pull request, also known as a PR.
|
||||
|
||||
- Fill the "Ready for review" template so that we can review your PR. This template helps reviewers understand your changes as well as the purpose of your pull request.
|
||||
- Don't forget to [link PR to issue](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue) if you are solving one.
|
||||
- Enable the checkbox to [allow maintainer edits](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/allowing-changes-to-a-pull-request-branch-created-from-a-fork) so the branch can be updated for a merge.
|
||||
Once you submit your PR, a Docs team member will review your proposal. We may ask questions or request for additional information.
|
||||
- We may ask for changes to be made before a PR can be merged, either using [suggested changes](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/incorporating-feedback-in-your-pull-request) or pull request comments. You can apply suggested changes directly through the UI. You can make any other changes in your fork, then commit them to your branch.
|
||||
- As you update your PR and apply changes, mark each conversation as [resolved](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/commenting-on-a-pull-request#resolving-conversations).
|
||||
- If you run into any merge issues, checkout this [git tutorial](https://github.com/skills/resolve-merge-conflicts) to help you resolve merge conflicts and other issues.
|
||||
|
||||
### Your PR is merged!
|
||||
|
||||
Congratulations :tada::tada: The AFFiNE team thanks you :sparkles:.
|
||||
|
||||
Once your PR is merged, your contributions will be publicly visible on our GitHub.
|
||||
|
||||
Now that you are part of the AFFiNE community, see how else you can join and help over at [GitBook](https://docs.affine.pro/affine/)
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
# Building AFFiNE Desktop Client App
|
||||
|
||||
> **Warning**:
|
||||
>
|
||||
> This document is not guaranteed to be up-to-date.
|
||||
> If you find any outdated information, please feel free to open an issue or submit a PR.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Prerequisites](#prerequisites)
|
||||
@@ -12,100 +7,35 @@
|
||||
- [Build](#build)
|
||||
- [CI](#ci)
|
||||
|
||||
## Things you may need to know before getting started
|
||||
|
||||
Building the desktop client app for the moment is a bit more complicated than building the web app. The client right now is an Electron app that wraps the prebuilt web app, with parts of the native modules written in Rust, which means we have the following source modules to build a desktop client app:
|
||||
|
||||
1. `packages/frontend/core`: the web app
|
||||
2. `packages/frontend/native`: the native modules written in Rust (mostly the sqlite bindings)
|
||||
3. `packages/frontend/electron`: the Electron app (containing main & helper process, and the electron entry point in `packages/frontend/electron/renderer`)
|
||||
|
||||
#3 is dependent on #1 and #2, and relies on electron-forge to make the final app & installer. To get a deep understanding of how the desktop client app is built, you may want to read the workflow file in [release-desktop.yml](/.github/workflows/release-desktop.yml).
|
||||
|
||||
Due to [some limitations of Electron builder](https://github.com/yarnpkg/berry/issues/4804), you may need to have two separate yarn config for building the core and the desktop client app:
|
||||
|
||||
1. build frontend (with default yarn settings)
|
||||
2. build electron (reinstall with hoisting off)
|
||||
|
||||
We will explain the steps in the following sections.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before you start building AFFiNE Desktop Client Application, please following the same steps in [BUILDING#Prerequisites](./BUILDING.md#prerequisites) to install Node.js and Rust.
|
||||
Before you start building AFFiNE Desktop Client Application, please [install Rust toolchain first](https://www.rust-lang.org/learn/get-started).
|
||||
|
||||
On Windows, you must enable symbolic links this code repo. See [#### Windows](./BUILDING.md#Windows).
|
||||
Note that if you encounter any issues with installing Rust and crates, try following [this guide (zh-CN)](https://course.rs/first-try/slowly-downloading.html) to set up alternative registries.
|
||||
|
||||
## Build, package & make the desktop client app
|
||||
## Development
|
||||
|
||||
### 0. Build the native modules
|
||||
To run AFFiNE Desktop Client Application locally, run the following commands:
|
||||
|
||||
Please refer to `Build Native Dependencies` section in [BUILDING.md](./BUILDING.md#Build-Native-Dependencies) to build the native modules.
|
||||
|
||||
### 1. Build the core
|
||||
|
||||
On Mac & Linux
|
||||
|
||||
```shell
|
||||
BUILD_TYPE=canary SKIP_NX_CACHE=1 yarn workspace @affine/electron generate-assets
|
||||
```
|
||||
|
||||
On Windows (powershell)
|
||||
|
||||
```powershell
|
||||
$env:BUILD_TYPE="canary"
|
||||
$env:SKIP_NX_CACHE=1
|
||||
$env:DISTRIBUTION=desktop
|
||||
$env:SKIP_WEB_BUILD=1
|
||||
yarn build --skip-nx-cache
|
||||
```
|
||||
|
||||
### 2. Re-config yarn, clean up the node_modules and reinstall the dependencies
|
||||
|
||||
As we said before, you need to reinstall the dependencies with hoisting off. You can do this by running the following command:
|
||||
|
||||
```shell
|
||||
yarn config set nmMode classic
|
||||
yarn config set nmHoistingLimits workspaces
|
||||
```
|
||||
|
||||
Then, clean up all node_modules and reinstall the dependencies:
|
||||
|
||||
On Mac & Linux
|
||||
|
||||
```shell
|
||||
find . -name 'node_modules' -type d -prune -exec rm -rf '{}' +
|
||||
```sh
|
||||
# in repo root
|
||||
yarn install
|
||||
yarn dev
|
||||
|
||||
# in packages/frontend/native
|
||||
yarn build
|
||||
|
||||
# in packages/frontend/electron
|
||||
yarn dev
|
||||
```
|
||||
|
||||
On Windows (powershell)
|
||||
Now you should see the Electron app window popping up shortly.
|
||||
|
||||
```powershell
|
||||
dir -Path . -Filter node_modules -recurse | foreach {echo $_.fullname; rm -r -Force $_.fullname}
|
||||
yarn install
|
||||
```
|
||||
## Build
|
||||
|
||||
### 3. Build the desktop client app installer
|
||||
To build the desktop client application, run `yarn make` in `packages/frontend/electron`.
|
||||
|
||||
#### Mac & Linux
|
||||
|
||||
Note: you need to comment out `osxSign` and `osxNotarize` in `forge.config.js` to skip signing and notarizing the app.
|
||||
|
||||
```shell
|
||||
BUILD_TYPE=canary SKIP_WEB_BUILD=1 HOIST_NODE_MODULES=1 yarn workspace @affine/electron make
|
||||
```
|
||||
|
||||
#### Windows
|
||||
|
||||
Making the windows installer is a bit different. Right now we provide two installer options: squirrel and nsis.
|
||||
|
||||
```powershell
|
||||
$env:BUILD_TYPE="canary"
|
||||
$env:SKIP_WEB_BUILD=1
|
||||
$env:HOIST_NODE_MODULES=1
|
||||
yarn workspace @affine/electron package
|
||||
yarn workspace @affine/electron make-squirrel
|
||||
yarn workspace @affine/electron make-nsis
|
||||
```
|
||||
Note: you may want to comment out `osxSign` and `osxNotarize` in `forge.config.js` to avoid signing and notarizing the app.
|
||||
|
||||
Once the build is complete, you can find the paths to the binaries in the terminal output.
|
||||
|
||||
|
||||
256
docs/contributing/behind-the-code.md
Normal file
256
docs/contributing/behind-the-code.md
Normal file
@@ -0,0 +1,256 @@
|
||||
# Behind the code - Code Design and Architecture of the AFFiNE platform
|
||||
|
||||
## Introduction
|
||||
|
||||
This document delves into the design and architecture of the AFFiNE platform, providing insights for developers interested in contributing to AFFiNE or gaining a better understanding of our design principles.
|
||||
|
||||
## Addressing the Challenge
|
||||
|
||||
AFFiNE is a platform designed to be the next-generation collaborative knowledge base for professionals. It is local-first, yet collaborative; It is robust as a foundational platform, yet friendly to extend. We believe that a knowledge base that truly meets the needs of professionals in different scenarios should be open-source and open to the community. By using AFFiNE, people can take full control of their data and workflow, thus achieving data sovereignty.
|
||||
To do so, we should have a stable plugin system that is easy to use by the community and a well-modularized editor for customizability. Let's list the challenges from the perspective of data modeling, UI and feature plugins, and cross-platform support.
|
||||
|
||||
### Data might come from anywhere and go anywhere, in spite of the cloud
|
||||
|
||||
AFFiNE provides users with flexibility and control over their data storage. Our platform is designed to prioritize user ownership of data, which means data in AFFiNE is always accessible from local devices like a laptop's local file or the browser's indexedDB. In the mean while, data can also be stored in centralised cloud-native way.
|
||||
|
||||
Thanks to our use of CRDTs (Conflict-free Replicated Data Types), data in AFFiNE is always conflict-free, similar to a auto-resolve-conflict Git. This means that data synchronization, sharing, and real-time collaboration are seamless and can occur across any network layer so long as the data as passed. As a result, developers do not need to worry about whether the data was generated locally or remotely, as CRDTs treat both equally.
|
||||
|
||||
While a server-centric backend is supported with AFFiNE, it is not suggested. By having a local-first architecture, AFFiNE users can have real-time responsive UI, optimal performance and effortlessly synchronize data across multiple devices and locations. This includes peer-to-peer file replication, storing file in local or cloud storage, saving it to a server-side database, or using AFFiNE Cloud for real-time collaboration and synchronization.
|
||||
|
||||
### Customizable UI and features
|
||||
|
||||
AFFiNE is a platform that allows users to customize the UI and features of each part.
|
||||
|
||||
We need to consider the following cases:
|
||||
|
||||
- Pluggable features: Some features can be disabled or enabled. For example, individuals who use AFFiNE for personal purposes may not need authentication or collaboration features. On the other hand, enterprise users may require authentication and strong security.
|
||||
- SDK for the developers, the developers can modify or build their own feature or UI plugins, such as AI writing support, self-hosted databases, or domain-specific editable blocks.
|
||||
|
||||
### Diverse platforms
|
||||
|
||||
AFFiNE supports various platforms, including desktop, mobile, and web while being local-first. However, it's important to note that certain features may differ on different platforms, and it's also possible for data and editor versions to become mismatched.
|
||||
|
||||
## The solution
|
||||
|
||||
### Loading Mechanism
|
||||
|
||||
The AFFiNE is built on the web platform, meaning that most code runs on the JavaScript runtime(v8, QuickJS).
|
||||
Some interfaces, like in the Desktop, will be implemented in the native code like Rust.
|
||||
|
||||
But eventually, the main logic of AFFiNE is running on the JavaScript runtime. Since it is a single-threaded runtime, we need to ensure that the code is running in a non-blocking way.
|
||||
|
||||
Some logic has to be running in the blocking way.
|
||||
|
||||
We have to set up the environment before starting the core.
|
||||
And for the Workspace, like local workspace or cloud workspace, we have to load the data from the storage before rendering the UI.
|
||||
|
||||
During this period, there will be transition animation and skeleton UI.
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
subgraph Interactive unavailable
|
||||
A[Loading] --> B[Setup Environment]
|
||||
B --> C[Loading Initial Data]
|
||||
C --> D[Skeleton UI]
|
||||
end
|
||||
D --> E[Render UI]
|
||||
E --> F[Async fetching Data] --> E
|
||||
```
|
||||
|
||||
In this way, we need to boost the performance of the loading process.
|
||||
|
||||
The initial data is the most costly part of the process.
|
||||
We must ensure that the initial data is loaded as quickly as possible.
|
||||
|
||||
Here is an obvious conclusion that only one Workspace is active simultaneously in one browser.
|
||||
So we need to load the data of the active Workspace as the initial data.
|
||||
And other workspaces can be loaded in the background asynchronously.
|
||||
|
||||
For example, the local Workspace is saved in the browser's indexedDB.
|
||||
|
||||
One way to boost the performance is to use the Web Worker to load the data in the background.
|
||||
|
||||
Here is one pseudocode:
|
||||
|
||||
```tsx
|
||||
// worker.ts
|
||||
import { openDB } from 'idb';
|
||||
|
||||
const db = await openDB('local-db' /* ... */);
|
||||
const data = await db.getAll('data');
|
||||
self.postMessage(data);
|
||||
// main.ts
|
||||
const worker = new Worker('./worker.ts', { type: 'module' });
|
||||
|
||||
await new Promise<Data>(resolve => {
|
||||
worker.addEventListener('message', e => resolve(e.data));
|
||||
});
|
||||
|
||||
// ready to render the UI
|
||||
renderUI(data);
|
||||
```
|
||||
|
||||
We use React Suspense to deal with the initial data loading in the real code.
|
||||
|
||||
```tsx
|
||||
import { atom, useAtom, useAtomValue, useSetAtom } from 'jotai';
|
||||
|
||||
const currentWorkspaceIdAtom = atom(null);
|
||||
const currentWorkspaceAtom = atom<Workspace>(async get => {
|
||||
const workspaceId = await get(currentWorkspaceIdAtom);
|
||||
// async load the workspace data
|
||||
return Workspace;
|
||||
});
|
||||
|
||||
const Workspace = () => {
|
||||
const currentWorkspace = useAtomValue(currentWorkspaceAtom);
|
||||
return <WorkspaceUI workspace={currentWorkspace} />;
|
||||
};
|
||||
|
||||
const App = () => {
|
||||
const router = useRouter();
|
||||
const workspaceId = router.query.workspaceId;
|
||||
const [currentWorkspaceId, set] = useAtom(currentWorkspaceIdAtom);
|
||||
if (!currentWorkspaceId) {
|
||||
set(workspaceId);
|
||||
return <Loading />;
|
||||
}
|
||||
return (
|
||||
<Suspense fallback={<Skeleton />}>
|
||||
<Workspace />
|
||||
</Suspense>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
### Data Storage and UI Rendering
|
||||
|
||||
We assume that the data is stored in different places and loaded differently.
|
||||
|
||||
In the current version, we have two places to store the data: local and Cloud storage.
|
||||
|
||||
The local storage is the browser's indexedDB, the default storage for the local Workspace.
|
||||
|
||||
The cloud storage is the AFFiNE Cloud, which is the default storage for the cloud workspace.
|
||||
|
||||
But since the Time to Interactive(TTI) is the most important metric for performance and user experience,
|
||||
all initial data is loaded in the indexedDB.
|
||||
|
||||
And other data will be loaded and updated in the background.
|
||||
|
||||
With this design concept, we have the following data structure:
|
||||
|
||||
```ts
|
||||
import { Workspace as Store } from '@blocksuite/store';
|
||||
|
||||
interface Provider {
|
||||
type: 'local-indexeddb' | 'affine-cloud' | 'desktop-sqlite';
|
||||
background: boolean; // if the provider is background, we will load the data in the background
|
||||
necessary: boolean; // if the provider is necessary, we will block the UI rendering until this provider is ready
|
||||
}
|
||||
|
||||
interface Workspace {
|
||||
id: string;
|
||||
store: Store;
|
||||
providers: Provider[];
|
||||
}
|
||||
```
|
||||
|
||||
The `provider` is a connector that bridges the current data in memory and the data in another place.
|
||||
|
||||
You can combine different providers to build different data storage and loading strategy.
|
||||
|
||||
For example, if there is only `affine-cloud`,
|
||||
the data will be only loaded from the Cloud and not saved in the local storage,
|
||||
which might be useful for the enterprise user.
|
||||
|
||||
Also, we want to distinguish the different types of Workspace.
|
||||
Even though the providers are enough for the Workspace, when we display the Workspace in the UI, we need to know the type of Workspace.
|
||||
AFFiNE Cloud Workspace needs user authentication; the local Workspace does not need it.
|
||||
|
||||
And there should have a way to create, read, update, and delete the Workspace.
|
||||
|
||||
Hence, we combine all details of the Workspace as we mentioned above into the `WorkspacePlugin` type.
|
||||
|
||||
```ts
|
||||
import React from 'react';
|
||||
|
||||
interface UI<WorkspaceType> {
|
||||
DetailPage: React.FC<UIProps<WorkspaceType>>;
|
||||
SettingPage: React.FC<UIProps<WorkspaceType>>;
|
||||
SettingPage: React.FC<UIProps<WorkspaceType>>;
|
||||
}
|
||||
|
||||
interface CRUD<WorkspaceType> {
|
||||
create: () => Promise<WorkspaceType>;
|
||||
read: (id: string) => Promise<WorkspaceType>;
|
||||
list: () => Promise<WorkspaceType[]>;
|
||||
delete: (Workspace: WorkspaceType) => Promise<WorkspaceType>;
|
||||
}
|
||||
|
||||
interface WorkspacePlugin<WorkspaceType> {
|
||||
type: WorkspaceType;
|
||||
ui: UI<WorkspaceType>;
|
||||
crud: CRUD<WorkspaceType>;
|
||||
}
|
||||
```
|
||||
|
||||
```mermaid
|
||||
graph TB
|
||||
WorkspaceCRUD --> Cloud
|
||||
WorkspaceCRUD --> SelfHostCloud
|
||||
subgraph Remote
|
||||
Cloud[AFFiNE Cloud]
|
||||
SelfHostCloud[Self Host AFFiNE Server]
|
||||
end
|
||||
subgraph Computer
|
||||
WorkspaceCRUD --> DesktopSqlite[Desktop Sqlite]
|
||||
subgraph JavaScript Runtime
|
||||
IndexedDB[IndexedDB]
|
||||
WorkspaceCRUD --> IndexedDB
|
||||
subgraph Next.js
|
||||
Entry((entry point))
|
||||
Entry --> NextApp[Next.js App]
|
||||
NextApp --> App[App]
|
||||
end
|
||||
subgraph Workspace Runtime
|
||||
App[App] --> WorkspaceUI
|
||||
WorkspacePlugin[Workspace Plugin]
|
||||
WorkspacePlugin[Workspace Plugin] --> WorkspaceUI
|
||||
WorkspacePlugin[Workspace Plugin] --> WorkspaceCRUD[Workspace CRUD]
|
||||
WorkspaceUI[Workspace UI] --> WorkspaceCRUD
|
||||
WorkspaceUI -->|async init| Provider
|
||||
Provider -->|update ui| WorkspaceUI
|
||||
Provider -->|update data| WorkspaceCRUD
|
||||
end
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
Notice that we do not assume the Workspace UI has to be written in React.js(for now, it has to be),
|
||||
In the future, we can support other UI frameworks instead, like Vue and Svelte.
|
||||
|
||||
### Workspace Loading Details
|
||||
|
||||
```mermaid
|
||||
flowchart TD
|
||||
subgraph JavaScript Runtime
|
||||
subgraph Next.js
|
||||
Start((entry point)) -->|setup environment| OnMount{On mount}
|
||||
OnMount -->|empty data| Init[Init Workspaces]
|
||||
Init --> LoadData
|
||||
OnMount -->|already have data| LoadData>Load data]
|
||||
LoadData --> CurrentWorkspace[Current workspace]
|
||||
LoadData --> Workspaces[Workspaces]
|
||||
Workspaces --> Providers[Providers]
|
||||
|
||||
subgraph React
|
||||
Router([Router]) -->|sync `query.workspaceId`| CurrentWorkspace
|
||||
CurrentWorkspace -->|sync `currentWorkspaceId`| Router
|
||||
CurrentWorkspace -->|render| WorkspaceUI[Workspace UI]
|
||||
end
|
||||
end
|
||||
Providers -->|push new update| Persistence[(Persistence)]
|
||||
Persistence -->|patch workspace| Providers
|
||||
end
|
||||
```
|
||||
@@ -29,6 +29,13 @@ It includes the global constants, browser and system check.
|
||||
|
||||
This package should be imported at the very beginning of the entry point.
|
||||
|
||||
### `@affine/workspace`
|
||||
|
||||
Current we have two workspace plugin:
|
||||
|
||||
- `local` for local workspace, which is the default workspace type.
|
||||
- `affine` for cloud workspace, which is the workspace type for AFFiNE Cloud with OctoBase backend.
|
||||
|
||||
#### Design principles
|
||||
|
||||
- Each workspace plugin has its state and is isolated from other workspace plugins.
|
||||
@@ -53,3 +60,13 @@ yarn dev
|
||||
### `@affine/electron`
|
||||
|
||||
See [building desktop client app](../building-desktop-client-app.md).
|
||||
|
||||
### `@affine/storybook`
|
||||
|
||||
```shell
|
||||
yarn workspace @affine/storybook storybook
|
||||
```
|
||||
|
||||
## What's next?
|
||||
|
||||
- [Behind the code](./behind-the-code.md)
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
This document explains how to start server (@affine/server) locally with Docker
|
||||
|
||||
> **Warning**:
|
||||
>
|
||||
> This document is not guaranteed to be up-to-date.
|
||||
> If you find any outdated information, please feel free to open an issue or submit a PR.
|
||||
|
||||
## Run postgresql in docker
|
||||
|
||||
```
|
||||
@@ -54,24 +49,28 @@ postgres=# \du
|
||||
|
||||
### Set the following config to `packages/backend/server/.env`
|
||||
|
||||
In the following setup, we assume you have postgres server running at localhost:5432 and mailhog running at localhost:1025.
|
||||
|
||||
When logging in via email, you will see the mail arriving at localhost:8025 in a browser.
|
||||
|
||||
```
|
||||
DATABASE_URL="postgresql://affine:affine@localhost:5432/affine"
|
||||
MAILER_SENDER="noreply@toeverything.info"
|
||||
MAILER_USER="auth"
|
||||
MAILER_PASSWORD="auth"
|
||||
MAILER_HOST="localhost"
|
||||
MAILER_PORT="1025"
|
||||
NEXTAUTH_URL="http://localhost:8080/"
|
||||
```
|
||||
|
||||
You may need additional env for auth login. You may want to put your own one if you are not part of the AFFiNE team
|
||||
|
||||
For email login & password, please refer to https://nodemailer.com/usage/using-gmail/
|
||||
|
||||
```
|
||||
MAILER_SENDER=
|
||||
MAILER_USER=
|
||||
MAILER_PASSWORD=
|
||||
OAUTH_GOOGLE_ENABLED="true"
|
||||
OAUTH_GOOGLE_CLIENT_ID=
|
||||
OAUTH_GOOGLE_CLIENT_SECRET=
|
||||
```
|
||||
|
||||
## Prepare prisma
|
||||
|
||||
```
|
||||
yarn workspace @affine/server prisma db push
|
||||
yarn workspace @affine/server data-migration run
|
||||
```
|
||||
|
||||
Note, you may need to do it again if db schema changed.
|
||||
@@ -86,7 +85,7 @@ yarn workspace @affine/server prisma studio
|
||||
|
||||
```
|
||||
# build native
|
||||
yarn workspace @affine/server-native build
|
||||
yarn workspace @affine/storage build
|
||||
yarn workspace @affine/native build
|
||||
```
|
||||
|
||||
|
||||
@@ -7,9 +7,9 @@
|
||||
"dev": "nodemon --exec 'typedoc --options ../../typedoc.json' & serve dist/"
|
||||
},
|
||||
"devDependencies": {
|
||||
"nodemon": "^3.1.0",
|
||||
"nodemon": "^3.0.1",
|
||||
"serve": "^14.2.1",
|
||||
"typedoc": "^0.25.13"
|
||||
"typedoc": "^0.25.4"
|
||||
},
|
||||
"nodemonConfig": {
|
||||
"watch": [
|
||||
@@ -19,5 +19,5 @@
|
||||
],
|
||||
"ext": "ts,md,json"
|
||||
},
|
||||
"version": "0.14.0"
|
||||
"version": "0.12.0"
|
||||
}
|
||||
|
||||
10
oxlint.json
10
oxlint.json
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"rules": {
|
||||
"import/no-cycle": [
|
||||
"error",
|
||||
{
|
||||
"ignoreTypes": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
117
package.json
117
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@affine/monorepo",
|
||||
"version": "0.14.0",
|
||||
"version": "0.12.0",
|
||||
"private": true,
|
||||
"author": "toeverything",
|
||||
"license": "MIT",
|
||||
@@ -14,27 +14,29 @@
|
||||
"tests/affine-legacy/*"
|
||||
],
|
||||
"engines": {
|
||||
"node": "<21.0.0"
|
||||
"node": ">=18.16.1 <19.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"dev": "yarn workspace @affine/cli dev",
|
||||
"dev": "dev-core",
|
||||
"dev:electron": "yarn workspace @affine/electron dev",
|
||||
"build": "yarn nx build @affine/web",
|
||||
"build": "yarn nx build @affine/core",
|
||||
"build:electron": "yarn nx build @affine/electron",
|
||||
"build:server-native": "yarn nx run-many -t build -p @affine/server-native",
|
||||
"start:web-static": "yarn workspace @affine/web static-server",
|
||||
"build:storage": "yarn nx run-many -t build -p @affine/storage",
|
||||
"build:storybook": "yarn nx build @affine/storybook",
|
||||
"start:web-static": "yarn workspace @affine/core static-server",
|
||||
"start:storybook": "yarn exec serve tests/storybook/storybook-static -l 6006",
|
||||
"serve:test-static": "yarn exec serve tests/fixtures --cors -p 8081",
|
||||
"lint:eslint": "cross-env NODE_OPTIONS=\"--max-old-space-size=8192\" eslint . --ext .js,mjs,.ts,.tsx --cache",
|
||||
"lint:eslint": "eslint . --ext .js,mjs,.ts,.tsx --cache",
|
||||
"lint:eslint:fix": "yarn lint:eslint --fix",
|
||||
"lint:prettier": "prettier --ignore-unknown --cache --check .",
|
||||
"lint:prettier:fix": "prettier --ignore-unknown --cache --write .",
|
||||
"lint:ox": "oxlint -c oxlint.json --import-plugin --deny-warnings -D correctness -D nursery -D prefer-array-some -D no-useless-promise-resolve-reject -D perf -A no-undef -A consistent-type-exports -A default -A named -A ban-ts-comment -A export -A no-unresolved -A no-default-export -A no-duplicates -A no-side-effects-in-initialization -A no-named-as-default -A getter-return",
|
||||
"lint:ox": "oxlint --import-plugin --deny-warnings -D correctness -D nursery -D prefer-array-some -D no-useless-promise-resolve-reject -D perf -A no-undef -A consistent-type-exports -A default -A named -A ban-ts-comment -A export",
|
||||
"lint": "yarn lint:eslint && yarn lint:prettier",
|
||||
"lint:fix": "yarn lint:eslint:fix && yarn lint:prettier:fix",
|
||||
"test": "vitest --run",
|
||||
"test:ui": "vitest --ui",
|
||||
"test:coverage": "vitest run --coverage",
|
||||
"typecheck": "tsc -b tsconfig.json",
|
||||
"typecheck": "tsc -b tsconfig.json --diagnostics",
|
||||
"postinstall": "node ./scripts/check-version.mjs && yarn i18n-codegen gen && yarn husky install",
|
||||
"prepare": "husky"
|
||||
},
|
||||
@@ -42,7 +44,7 @@
|
||||
"*": "prettier --write --ignore-unknown --cache",
|
||||
"*.{ts,tsx,mjs,js,jsx}": [
|
||||
"prettier --ignore-unknown --write",
|
||||
"cross-env NODE_OPTIONS=\"--max-old-space-size=8192\" eslint --cache --fix"
|
||||
"eslint --cache --fix"
|
||||
],
|
||||
"*.toml": [
|
||||
"taplo format"
|
||||
@@ -54,63 +56,63 @@
|
||||
"devDependencies": {
|
||||
"@affine-test/kit": "workspace:*",
|
||||
"@affine/cli": "workspace:*",
|
||||
"@commitlint/cli": "^19.2.1",
|
||||
"@commitlint/config-conventional": "^19.1.0",
|
||||
"@faker-js/faker": "^8.4.1",
|
||||
"@commitlint/cli": "^18.4.3",
|
||||
"@commitlint/config-conventional": "^18.4.3",
|
||||
"@faker-js/faker": "^8.3.1",
|
||||
"@istanbuljs/schema": "^0.1.3",
|
||||
"@magic-works/i18n-codegen": "^0.5.0",
|
||||
"@nx/vite": "19.0.0",
|
||||
"@playwright/test": "^1.43.0",
|
||||
"@taplo/cli": "^0.7.0",
|
||||
"@testing-library/react": "^15.0.0",
|
||||
"@nx/vite": "17.2.8",
|
||||
"@playwright/test": "^1.41.0",
|
||||
"@taplo/cli": "^0.5.2",
|
||||
"@testing-library/react": "^14.1.2",
|
||||
"@toeverything/infra": "workspace:*",
|
||||
"@types/affine__env": "workspace:*",
|
||||
"@types/eslint": "^8.56.7",
|
||||
"@types/node": "^20.12.7",
|
||||
"@typescript-eslint/eslint-plugin": "^7.6.0",
|
||||
"@typescript-eslint/parser": "^7.6.0",
|
||||
"@vanilla-extract/vite-plugin": "^4.0.7",
|
||||
"@vanilla-extract/webpack-plugin": "^2.3.7",
|
||||
"@vitejs/plugin-react-swc": "^3.6.0",
|
||||
"@vitest/coverage-istanbul": "1.4.0",
|
||||
"@vitest/ui": "1.4.0",
|
||||
"cross-env": "^7.0.3",
|
||||
"electron": "^30.0.0",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-import-x": "^0.5.0",
|
||||
"eslint-plugin-react": "^7.34.1",
|
||||
"@types/eslint": "^8.44.7",
|
||||
"@types/node": "^20.9.3",
|
||||
"@typescript-eslint/eslint-plugin": "^6.13.1",
|
||||
"@typescript-eslint/parser": "^6.13.1",
|
||||
"@vanilla-extract/vite-plugin": "^3.9.2",
|
||||
"@vanilla-extract/webpack-plugin": "^2.3.1",
|
||||
"@vitejs/plugin-react-swc": "^3.5.0",
|
||||
"@vitest/coverage-istanbul": "1.1.3",
|
||||
"@vitest/ui": "1.1.3",
|
||||
"electron": "^28.2.1",
|
||||
"eslint": "^8.54.0",
|
||||
"eslint-config-prettier": "^9.0.0",
|
||||
"eslint-plugin-i": "^2.29.0",
|
||||
"eslint-plugin-react": "^7.33.2",
|
||||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"eslint-plugin-rxjs": "^5.0.3",
|
||||
"eslint-plugin-simple-import-sort": "^12.0.0",
|
||||
"eslint-plugin-sonarjs": "^0.25.1",
|
||||
"eslint-plugin-unicorn": "^52.0.0",
|
||||
"eslint-plugin-unused-imports": "^3.1.0",
|
||||
"eslint-plugin-vue": "^9.24.1",
|
||||
"eslint-plugin-simple-import-sort": "^10.0.0",
|
||||
"eslint-plugin-sonarjs": "^0.23.0",
|
||||
"eslint-plugin-unicorn": "^50.0.0",
|
||||
"eslint-plugin-unused-imports": "^3.0.0",
|
||||
"eslint-plugin-vue": "^9.18.1",
|
||||
"fake-indexeddb": "5.0.2",
|
||||
"happy-dom": "^14.7.1",
|
||||
"husky": "^9.0.11",
|
||||
"lint-staged": "^15.2.2",
|
||||
"msw": "^2.2.13",
|
||||
"nanoid": "^5.0.7",
|
||||
"nx": "^19.0.0",
|
||||
"happy-dom": "^13.0.0",
|
||||
"husky": "^9.0.6",
|
||||
"lint-staged": "^15.1.0",
|
||||
"msw": "^2.0.8",
|
||||
"nanoid": "^5.0.3",
|
||||
"nx": "^17.2.8",
|
||||
"nyc": "^15.1.0",
|
||||
"oxlint": "0.3.1",
|
||||
"prettier": "^3.2.5",
|
||||
"semver": "^7.6.0",
|
||||
"oxlint": "0.0.22",
|
||||
"prettier": "^3.1.0",
|
||||
"semver": "^7.5.4",
|
||||
"serve": "^14.2.1",
|
||||
"string-width": "^7.1.0",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.4.5",
|
||||
"vite": "^5.2.8",
|
||||
"vite-plugin-istanbul": "^6.0.0",
|
||||
"vite-plugin-static-copy": "^1.0.2",
|
||||
"vitest": "1.4.0",
|
||||
"string-width": "^7.0.0",
|
||||
"ts-node": "^10.9.1",
|
||||
"typescript": "^5.3.2",
|
||||
"vite": "^5.0.6",
|
||||
"vite-plugin-istanbul": "^5.0.0",
|
||||
"vite-plugin-static-copy": "^1.0.0",
|
||||
"vite-tsconfig-paths": "^4.2.1",
|
||||
"vitest": "1.1.3",
|
||||
"vitest-fetch-mock": "^0.2.2",
|
||||
"vitest-mock-extended": "^1.3.1"
|
||||
},
|
||||
"packageManager": "yarn@4.1.1",
|
||||
"packageManager": "yarn@4.0.2",
|
||||
"resolutions": {
|
||||
"vite": "^5.0.6",
|
||||
"array-buffer-byte-length": "npm:@nolyfill/array-buffer-byte-length@latest",
|
||||
"array-includes": "npm:@nolyfill/array-includes@latest",
|
||||
"array.prototype.flat": "npm:@nolyfill/array.prototype.flat@latest",
|
||||
@@ -166,8 +168,9 @@
|
||||
"unbox-primitive": "npm:@nolyfill/unbox-primitive@latest",
|
||||
"which-boxed-primitive": "npm:@nolyfill/which-boxed-primitive@latest",
|
||||
"which-typed-array": "npm:@nolyfill/which-typed-array@latest",
|
||||
"@reforged/maker-appimage/@electron-forge/maker-base": "7.3.1",
|
||||
"macos-alias": "npm:@napi-rs/macos-alias@0.0.4",
|
||||
"next-auth@^4.24.5": "patch:next-auth@npm%3A4.24.5#~/.yarn/patches/next-auth-npm-4.24.5-8428e11927.patch",
|
||||
"@reforged/maker-appimage/@electron-forge/maker-base": "7.2.0",
|
||||
"macos-alias": "npm:macos-alias-building@latest",
|
||||
"fs-xattr": "npm:@napi-rs/xattr@latest",
|
||||
"@radix-ui/react-dialog": "npm:@radix-ui/react-dialog@latest"
|
||||
}
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
use napi_derive::napi;
|
||||
|
||||
#[napi]
|
||||
pub fn get_mime(input: &[u8]) -> String {
|
||||
file_format::FileFormat::from_bytes(input)
|
||||
.media_type()
|
||||
.to_string()
|
||||
}
|
||||
@@ -11,7 +11,7 @@ yarn
|
||||
### Build Native binding
|
||||
|
||||
```bash
|
||||
yarn workspace @affine/server-native build
|
||||
yarn workspace @affine/storage build
|
||||
```
|
||||
|
||||
### Run server
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
-- DropForeignKey
|
||||
ALTER TABLE "accounts" DROP CONSTRAINT "accounts_user_id_fkey";
|
||||
|
||||
-- DropForeignKey
|
||||
ALTER TABLE "sessions" DROP CONSTRAINT "sessions_user_id_fkey";
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "user_connected_accounts" (
|
||||
"id" VARCHAR(36) NOT NULL,
|
||||
"user_id" VARCHAR(36) NOT NULL,
|
||||
"provider" VARCHAR NOT NULL,
|
||||
"provider_account_id" VARCHAR NOT NULL,
|
||||
"scope" TEXT,
|
||||
"access_token" TEXT,
|
||||
"refresh_token" TEXT,
|
||||
"expires_at" TIMESTAMPTZ(6),
|
||||
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ(6) NOT NULL,
|
||||
|
||||
CONSTRAINT "user_connected_accounts_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "multiple_users_sessions" (
|
||||
"id" VARCHAR(36) NOT NULL,
|
||||
"expires_at" TIMESTAMPTZ(6),
|
||||
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "multiple_users_sessions_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "user_sessions" (
|
||||
"id" VARCHAR(36) NOT NULL,
|
||||
"session_id" VARCHAR(36) NOT NULL,
|
||||
"user_id" VARCHAR(36) NOT NULL,
|
||||
"expires_at" TIMESTAMPTZ(6),
|
||||
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "user_sessions_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "verification_tokens" (
|
||||
"token" VARCHAR(36) NOT NULL,
|
||||
"type" SMALLINT NOT NULL,
|
||||
"credential" TEXT,
|
||||
"expiresAt" TIMESTAMPTZ(6) NOT NULL
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "user_connected_accounts_user_id_idx" ON "user_connected_accounts"("user_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "user_connected_accounts_provider_account_id_idx" ON "user_connected_accounts"("provider_account_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "user_sessions_session_id_user_id_key" ON "user_sessions"("session_id", "user_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "verification_tokens_type_token_key" ON "verification_tokens"("type", "token");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "user_connected_accounts" ADD CONSTRAINT "user_connected_accounts_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "user_sessions" ADD CONSTRAINT "user_sessions_session_id_fkey" FOREIGN KEY ("session_id") REFERENCES "multiple_users_sessions"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "user_sessions" ADD CONSTRAINT "user_sessions_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -1,2 +0,0 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "users" ADD COLUMN "registered" BOOLEAN NOT NULL DEFAULT true;
|
||||
@@ -1,11 +0,0 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- A unique constraint covering the columns `[user_id,plan]` on the table `user_subscriptions` will be added. If there are existing duplicate values, this will fail.
|
||||
|
||||
*/
|
||||
-- DropIndex
|
||||
DROP INDEX "user_subscriptions_user_id_key";
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "user_subscriptions_user_id_plan_key" ON "user_subscriptions"("user_id", "plan");
|
||||
@@ -1,16 +0,0 @@
|
||||
-- CreateEnum
|
||||
CREATE TYPE "AiPromptRole" AS ENUM ('system', 'assistant', 'user');
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "ai_prompts" (
|
||||
"id" VARCHAR NOT NULL,
|
||||
"name" VARCHAR(20) NOT NULL,
|
||||
"idx" INTEGER NOT NULL,
|
||||
"role" "AiPromptRole" NOT NULL,
|
||||
"content" TEXT NOT NULL,
|
||||
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
CONSTRAINT "ai_prompts_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ai_prompts_name_idx_key" ON "ai_prompts"("name", "idx");
|
||||
@@ -1,25 +0,0 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "ai_sessions" (
|
||||
"id" VARCHAR(36) NOT NULL,
|
||||
"user_id" VARCHAR NOT NULL,
|
||||
"workspace_id" VARCHAR NOT NULL,
|
||||
"doc_id" VARCHAR NOT NULL,
|
||||
"prompt_name" VARCHAR NOT NULL,
|
||||
"action" BOOLEAN NOT NULL,
|
||||
"flavor" VARCHAR NOT NULL,
|
||||
"model" VARCHAR NOT NULL,
|
||||
"messages" JSON NOT NULL,
|
||||
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ(6) NOT NULL,
|
||||
|
||||
CONSTRAINT "ai_sessions_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "ai_sessions" ADD CONSTRAINT "ai_sessions_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "ai_sessions" ADD CONSTRAINT "ai_sessions_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "ai_sessions" ADD CONSTRAINT "ai_sessions_doc_id_workspace_id_fkey" FOREIGN KEY ("doc_id", "workspace_id") REFERENCES "snapshots"("guid", "workspace_id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -1,87 +0,0 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- You are about to drop the `ai_prompts` table. If the table is not empty, all the data it contains will be lost.
|
||||
- You are about to drop the `ai_sessions` table. If the table is not empty, all the data it contains will be lost.
|
||||
|
||||
*/
|
||||
-- DropForeignKey
|
||||
ALTER TABLE "ai_sessions" DROP CONSTRAINT "ai_sessions_doc_id_workspace_id_fkey";
|
||||
|
||||
-- DropForeignKey
|
||||
ALTER TABLE "ai_sessions" DROP CONSTRAINT "ai_sessions_user_id_fkey";
|
||||
|
||||
-- DropForeignKey
|
||||
ALTER TABLE "ai_sessions" DROP CONSTRAINT "ai_sessions_workspace_id_fkey";
|
||||
|
||||
-- DropTable
|
||||
DROP TABLE "ai_prompts";
|
||||
|
||||
-- DropTable
|
||||
DROP TABLE "ai_sessions";
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "ai_prompts_messages" (
|
||||
"prompt_id" INTEGER NOT NULL,
|
||||
"idx" INTEGER NOT NULL,
|
||||
"role" "AiPromptRole" NOT NULL,
|
||||
"content" TEXT NOT NULL,
|
||||
"attachments" JSON,
|
||||
"params" JSON,
|
||||
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "ai_prompts_metadata" (
|
||||
"id" SERIAL NOT NULL,
|
||||
"name" VARCHAR(32) NOT NULL,
|
||||
"action" VARCHAR,
|
||||
"model" VARCHAR,
|
||||
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "ai_prompts_metadata_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "ai_sessions_messages" (
|
||||
"id" VARCHAR(36) NOT NULL,
|
||||
"session_id" VARCHAR(36) NOT NULL,
|
||||
"role" "AiPromptRole" NOT NULL,
|
||||
"content" TEXT NOT NULL,
|
||||
"attachments" JSON,
|
||||
"params" JSON,
|
||||
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ(6) NOT NULL,
|
||||
|
||||
CONSTRAINT "ai_sessions_messages_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "ai_sessions_metadata" (
|
||||
"id" VARCHAR(36) NOT NULL,
|
||||
"user_id" VARCHAR(36) NOT NULL,
|
||||
"workspace_id" VARCHAR(36) NOT NULL,
|
||||
"doc_id" VARCHAR(36) NOT NULL,
|
||||
"prompt_name" VARCHAR(32) NOT NULL,
|
||||
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "ai_sessions_metadata_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ai_prompts_messages_prompt_id_idx_key" ON "ai_prompts_messages"("prompt_id", "idx");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ai_prompts_metadata_name_key" ON "ai_prompts_metadata"("name");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "ai_prompts_messages" ADD CONSTRAINT "ai_prompts_messages_prompt_id_fkey" FOREIGN KEY ("prompt_id") REFERENCES "ai_prompts_metadata"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "ai_sessions_messages" ADD CONSTRAINT "ai_sessions_messages_session_id_fkey" FOREIGN KEY ("session_id") REFERENCES "ai_sessions_metadata"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "ai_sessions_metadata" ADD CONSTRAINT "ai_sessions_metadata_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "ai_sessions_metadata" ADD CONSTRAINT "ai_sessions_metadata_prompt_name_fkey" FOREIGN KEY ("prompt_name") REFERENCES "ai_prompts_metadata"("name") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -1,5 +0,0 @@
|
||||
-- CreateIndex
|
||||
CREATE INDEX "user_features_user_id_idx" ON "user_features"("user_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "users_email_idx" ON "users"("email");
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@affine/server",
|
||||
"private": true,
|
||||
"version": "0.14.0",
|
||||
"version": "0.12.0",
|
||||
"description": "Affine Node.js server",
|
||||
"type": "module",
|
||||
"bin": {
|
||||
@@ -18,106 +18,103 @@
|
||||
"predeploy": "yarn prisma migrate deploy && node --import ./scripts/register.js ./dist/data/index.js run"
|
||||
},
|
||||
"dependencies": {
|
||||
"@apollo/server": "^4.10.2",
|
||||
"@aws-sdk/client-s3": "^3.552.0",
|
||||
"@apollo/server": "^4.9.5",
|
||||
"@auth/prisma-adapter": "^1.0.7",
|
||||
"@aws-sdk/client-s3": "^3.499.0",
|
||||
"@google-cloud/opentelemetry-cloud-monitoring-exporter": "^0.17.0",
|
||||
"@google-cloud/opentelemetry-cloud-trace-exporter": "^2.1.0",
|
||||
"@google-cloud/opentelemetry-resource-util": "^2.1.0",
|
||||
"@keyv/redis": "^2.8.4",
|
||||
"@nestjs/apollo": "^12.1.0",
|
||||
"@nestjs/common": "^10.3.7",
|
||||
"@nestjs/core": "^10.3.7",
|
||||
"@nestjs/event-emitter": "^2.0.4",
|
||||
"@nestjs/graphql": "^12.1.1",
|
||||
"@nestjs/platform-express": "^10.3.7",
|
||||
"@nestjs/platform-socket.io": "^10.3.7",
|
||||
"@nestjs/schedule": "^4.0.1",
|
||||
"@nestjs/serve-static": "^4.0.2",
|
||||
"@nestjs/throttler": "5.0.1",
|
||||
"@nestjs/websockets": "^10.3.7",
|
||||
"@node-rs/argon2": "^1.8.0",
|
||||
"@node-rs/crc32": "^1.10.0",
|
||||
"@node-rs/jsonwebtoken": "^0.5.2",
|
||||
"@opentelemetry/api": "^1.8.0",
|
||||
"@opentelemetry/core": "^1.23.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.50.0",
|
||||
"@opentelemetry/exporter-zipkin": "^1.23.0",
|
||||
"@keyv/redis": "^2.8.0",
|
||||
"@nestjs/apollo": "^12.0.11",
|
||||
"@nestjs/common": "^10.2.10",
|
||||
"@nestjs/core": "^10.2.10",
|
||||
"@nestjs/event-emitter": "^2.0.3",
|
||||
"@nestjs/graphql": "^12.0.11",
|
||||
"@nestjs/platform-express": "^10.2.10",
|
||||
"@nestjs/platform-socket.io": "^10.2.10",
|
||||
"@nestjs/schedule": "^4.0.0",
|
||||
"@nestjs/serve-static": "^4.0.0",
|
||||
"@nestjs/throttler": "^5.0.1",
|
||||
"@nestjs/websockets": "^10.2.10",
|
||||
"@node-rs/argon2": "^1.5.2",
|
||||
"@node-rs/crc32": "^1.7.2",
|
||||
"@node-rs/jsonwebtoken": "^0.3.0",
|
||||
"@opentelemetry/api": "^1.7.0",
|
||||
"@opentelemetry/core": "^1.21.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.48.0",
|
||||
"@opentelemetry/exporter-zipkin": "^1.21.0",
|
||||
"@opentelemetry/host-metrics": "^0.35.0",
|
||||
"@opentelemetry/instrumentation": "^0.50.0",
|
||||
"@opentelemetry/instrumentation-graphql": "^0.39.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.50.0",
|
||||
"@opentelemetry/instrumentation-ioredis": "^0.39.0",
|
||||
"@opentelemetry/instrumentation-nestjs-core": "^0.36.0",
|
||||
"@opentelemetry/instrumentation-socket.io": "^0.38.0",
|
||||
"@opentelemetry/resources": "^1.23.0",
|
||||
"@opentelemetry/sdk-metrics": "^1.23.0",
|
||||
"@opentelemetry/sdk-node": "^0.50.0",
|
||||
"@opentelemetry/sdk-trace-node": "^1.23.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.23.0",
|
||||
"@prisma/client": "^5.12.1",
|
||||
"@prisma/instrumentation": "^5.12.1",
|
||||
"@socket.io/redis-adapter": "^8.3.0",
|
||||
"@opentelemetry/instrumentation": "^0.48.0",
|
||||
"@opentelemetry/instrumentation-graphql": "^0.37.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.48.0",
|
||||
"@opentelemetry/instrumentation-ioredis": "^0.37.0",
|
||||
"@opentelemetry/instrumentation-nestjs-core": "^0.34.0",
|
||||
"@opentelemetry/instrumentation-socket.io": "^0.36.0",
|
||||
"@opentelemetry/resources": "^1.21.0",
|
||||
"@opentelemetry/sdk-metrics": "^1.21.0",
|
||||
"@opentelemetry/sdk-node": "^0.48.0",
|
||||
"@opentelemetry/sdk-trace-node": "^1.21.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.21.0",
|
||||
"@prisma/client": "^5.7.1",
|
||||
"@prisma/instrumentation": "^5.7.1",
|
||||
"@socket.io/redis-adapter": "^8.2.1",
|
||||
"cookie-parser": "^1.4.6",
|
||||
"dotenv": "^16.4.5",
|
||||
"dotenv-cli": "^7.4.1",
|
||||
"express": "^4.19.2",
|
||||
"get-stream": "^9.0.1",
|
||||
"dotenv": "^16.3.1",
|
||||
"dotenv-cli": "^7.3.0",
|
||||
"express": "^4.18.2",
|
||||
"file-type": "^19.0.0",
|
||||
"get-stream": "^8.0.1",
|
||||
"graphql": "^16.8.1",
|
||||
"graphql-scalars": "^1.23.0",
|
||||
"graphql-scalars": "^1.22.4",
|
||||
"graphql-type-json": "^0.3.2",
|
||||
"graphql-upload": "^16.0.2",
|
||||
"ioredis": "^5.3.2",
|
||||
"keyv": "^4.5.4",
|
||||
"lodash-es": "^4.17.21",
|
||||
"mixpanel": "^0.18.0",
|
||||
"mustache": "^4.2.0",
|
||||
"nanoid": "^5.0.7",
|
||||
"nest-commander": "^3.12.5",
|
||||
"nanoid": "^5.0.3",
|
||||
"nest-commander": "^3.12.2",
|
||||
"nestjs-throttler-storage-redis": "^0.4.1",
|
||||
"nodemailer": "^6.9.13",
|
||||
"next-auth": "^4.24.5",
|
||||
"nodemailer": "^6.9.7",
|
||||
"on-headers": "^1.0.2",
|
||||
"openai": "^4.33.0",
|
||||
"parse-duration": "^1.1.0",
|
||||
"pretty-time": "^1.1.0",
|
||||
"prisma": "^5.12.1",
|
||||
"prom-client": "^15.1.1",
|
||||
"reflect-metadata": "^0.2.2",
|
||||
"prisma": "^5.7.1",
|
||||
"prom-client": "^15.0.0",
|
||||
"reflect-metadata": "^0.2.0",
|
||||
"rxjs": "^7.8.1",
|
||||
"semver": "^7.6.0",
|
||||
"socket.io": "^4.7.5",
|
||||
"stripe": "^15.0.0",
|
||||
"tiktoken": "^1.0.13",
|
||||
"semver": "^7.5.4",
|
||||
"socket.io": "^4.7.2",
|
||||
"stripe": "^14.5.0",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.4.5",
|
||||
"ws": "^8.16.0",
|
||||
"yjs": "^13.6.14",
|
||||
"typescript": "^5.3.3",
|
||||
"ws": "^8.14.2",
|
||||
"yjs": "^13.6.10",
|
||||
"zod": "^3.22.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@affine-test/kit": "workspace:*",
|
||||
"@affine/server-native": "workspace:*",
|
||||
"@napi-rs/image": "^1.9.1",
|
||||
"@nestjs/testing": "^10.3.7",
|
||||
"@types/cookie-parser": "^1.4.7",
|
||||
"@affine/storage": "workspace:*",
|
||||
"@napi-rs/image": "^1.7.0",
|
||||
"@nestjs/testing": "^10.2.10",
|
||||
"@types/cookie-parser": "^1.4.6",
|
||||
"@types/engine.io": "^3.1.10",
|
||||
"@types/express": "^4.17.21",
|
||||
"@types/graphql-upload": "^16.0.7",
|
||||
"@types/graphql-upload": "^16.0.5",
|
||||
"@types/keyv": "^4.2.0",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/mixpanel": "^2.14.8",
|
||||
"@types/mustache": "^4.2.5",
|
||||
"@types/node": "^20.12.7",
|
||||
"@types/lodash-es": "^4.17.11",
|
||||
"@types/node": "^20.9.3",
|
||||
"@types/nodemailer": "^6.4.14",
|
||||
"@types/on-headers": "^1.0.3",
|
||||
"@types/pretty-time": "^1.1.5",
|
||||
"@types/sinon": "^17.0.3",
|
||||
"@types/supertest": "^6.0.2",
|
||||
"@types/sinon": "^17.0.2",
|
||||
"@types/supertest": "^6.0.0",
|
||||
"@types/ws": "^8.5.10",
|
||||
"ava": "^6.1.2",
|
||||
"c8": "^9.1.0",
|
||||
"nodemon": "^3.1.0",
|
||||
"ava": "^6.0.0",
|
||||
"c8": "^9.0.0",
|
||||
"nodemon": "^3.0.1",
|
||||
"sinon": "^17.0.1",
|
||||
"supertest": "^7.0.0"
|
||||
"supertest": "^6.3.3"
|
||||
},
|
||||
"ava": {
|
||||
"timeout": "1m",
|
||||
@@ -129,7 +126,8 @@
|
||||
"--trace-sigint",
|
||||
"--loader",
|
||||
"ts-node/esm/transpile-only.mjs",
|
||||
"--es-module-specifier-resolution=node"
|
||||
"--es-module-specifier-resolution",
|
||||
"node"
|
||||
],
|
||||
"files": [
|
||||
"tests/**/*.spec.ts",
|
||||
@@ -146,8 +144,7 @@
|
||||
"MAILER_USER": "noreply@toeverything.info",
|
||||
"MAILER_PASSWORD": "affine",
|
||||
"MAILER_SENDER": "noreply@toeverything.info",
|
||||
"FEATURES_EARLY_ACCESS_PREVIEW": "false",
|
||||
"DEPLOYMENT_TYPE": "affine"
|
||||
"FEATURES_EARLY_ACCESS_PREVIEW": "false"
|
||||
}
|
||||
},
|
||||
"nodemonConfig": {
|
||||
@@ -156,7 +153,8 @@
|
||||
"nodeArgs": [
|
||||
"--loader",
|
||||
"ts-node/esm.mjs",
|
||||
"--es-module-specifier-resolution=node"
|
||||
"--es-module-specifier-resolution",
|
||||
"node"
|
||||
],
|
||||
"ignore": [
|
||||
"**/__tests__/**",
|
||||
|
||||
@@ -10,85 +10,28 @@ datasource db {
|
||||
}
|
||||
|
||||
model User {
|
||||
id String @id @default(uuid()) @db.VarChar
|
||||
name String
|
||||
email String @unique
|
||||
emailVerifiedAt DateTime? @map("email_verified")
|
||||
avatarUrl String? @map("avatar_url") @db.VarChar
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
id String @id @default(uuid()) @db.VarChar
|
||||
name String
|
||||
email String @unique
|
||||
emailVerified DateTime? @map("email_verified")
|
||||
// image field is for the next-auth
|
||||
avatarUrl String? @map("avatar_url") @db.VarChar
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
/// Not available if user signed up through OAuth providers
|
||||
password String? @db.VarChar
|
||||
/// Indicate whether the user finished the signup progress.
|
||||
/// for example, the value will be false if user never registered and invited into a workspace by others.
|
||||
registered Boolean @default(true)
|
||||
password String? @db.VarChar
|
||||
|
||||
accounts Account[]
|
||||
sessions Session[]
|
||||
features UserFeatures[]
|
||||
customer UserStripeCustomer?
|
||||
subscriptions UserSubscription[]
|
||||
subscription UserSubscription?
|
||||
invoices UserInvoice[]
|
||||
workspacePermissions WorkspaceUserPermission[]
|
||||
pagePermissions WorkspacePageUserPermission[]
|
||||
connectedAccounts ConnectedAccount[]
|
||||
sessions UserSession[]
|
||||
aiSessions AiSession[]
|
||||
|
||||
@@index([email])
|
||||
@@map("users")
|
||||
}
|
||||
|
||||
model ConnectedAccount {
|
||||
id String @id @default(uuid()) @db.VarChar(36)
|
||||
userId String @map("user_id") @db.VarChar(36)
|
||||
provider String @db.VarChar
|
||||
providerAccountId String @map("provider_account_id") @db.VarChar
|
||||
scope String? @db.Text
|
||||
accessToken String? @map("access_token") @db.Text
|
||||
refreshToken String? @map("refresh_token") @db.Text
|
||||
expiresAt DateTime? @map("expires_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
|
||||
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([userId])
|
||||
@@index([providerAccountId])
|
||||
@@map("user_connected_accounts")
|
||||
}
|
||||
|
||||
model Session {
|
||||
id String @id @default(uuid()) @db.VarChar(36)
|
||||
expiresAt DateTime? @map("expires_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
|
||||
userSessions UserSession[]
|
||||
|
||||
@@map("multiple_users_sessions")
|
||||
}
|
||||
|
||||
model UserSession {
|
||||
id String @id @default(uuid()) @db.VarChar(36)
|
||||
sessionId String @map("session_id") @db.VarChar(36)
|
||||
userId String @map("user_id") @db.VarChar(36)
|
||||
expiresAt DateTime? @map("expires_at") @db.Timestamptz(6)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
|
||||
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([sessionId, userId])
|
||||
@@map("user_sessions")
|
||||
}
|
||||
|
||||
model VerificationToken {
|
||||
token String @db.VarChar(36)
|
||||
type Int @db.SmallInt
|
||||
credential String? @db.Text
|
||||
expiresAt DateTime @db.Timestamptz(6)
|
||||
|
||||
@@unique([type, token])
|
||||
@@map("verification_tokens")
|
||||
}
|
||||
|
||||
model Workspace {
|
||||
id String @id @default(uuid()) @db.VarChar
|
||||
public Boolean
|
||||
@@ -196,7 +139,6 @@ model UserFeatures {
|
||||
feature Features @relation(fields: [featureId], references: [id], onDelete: Cascade)
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([userId])
|
||||
@@map("user_features")
|
||||
}
|
||||
|
||||
@@ -244,7 +186,7 @@ model Features {
|
||||
@@map("features")
|
||||
}
|
||||
|
||||
model DeprecatedNextAuthAccount {
|
||||
model Account {
|
||||
id String @id @default(cuid())
|
||||
userId String @map("user_id")
|
||||
type String
|
||||
@@ -258,20 +200,23 @@ model DeprecatedNextAuthAccount {
|
||||
id_token String? @db.Text
|
||||
session_state String?
|
||||
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([provider, providerAccountId])
|
||||
@@map("accounts")
|
||||
}
|
||||
|
||||
model DeprecatedNextAuthSession {
|
||||
model Session {
|
||||
id String @id @default(cuid())
|
||||
sessionToken String @unique @map("session_token")
|
||||
userId String @map("user_id")
|
||||
expires DateTime
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@map("sessions")
|
||||
}
|
||||
|
||||
model DeprecatedNextAuthVerificationToken {
|
||||
model VerificationToken {
|
||||
identifier String
|
||||
token String @unique
|
||||
expires DateTime
|
||||
@@ -372,7 +317,7 @@ model UserStripeCustomer {
|
||||
|
||||
model UserSubscription {
|
||||
id Int @id @default(autoincrement()) @db.Integer
|
||||
userId String @map("user_id") @db.VarChar(36)
|
||||
userId String @unique @map("user_id") @db.VarChar(36)
|
||||
plan String @db.VarChar(20)
|
||||
// yearly/monthly
|
||||
recurring String @db.VarChar(20)
|
||||
@@ -398,7 +343,6 @@ model UserSubscription {
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([userId, plan])
|
||||
@@map("user_subscriptions")
|
||||
}
|
||||
|
||||
@@ -425,75 +369,6 @@ model UserInvoice {
|
||||
@@map("user_invoices")
|
||||
}
|
||||
|
||||
enum AiPromptRole {
|
||||
system
|
||||
assistant
|
||||
user
|
||||
}
|
||||
|
||||
model AiPromptMessage {
|
||||
promptId Int @map("prompt_id") @db.Integer
|
||||
// if a group of prompts contains multiple sentences, idx specifies the order of each sentence
|
||||
idx Int @db.Integer
|
||||
// system/assistant/user
|
||||
role AiPromptRole
|
||||
// prompt content
|
||||
content String @db.Text
|
||||
attachments Json? @db.Json
|
||||
params Json? @db.Json
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
|
||||
prompt AiPrompt @relation(fields: [promptId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([promptId, idx])
|
||||
@@map("ai_prompts_messages")
|
||||
}
|
||||
|
||||
model AiPrompt {
|
||||
id Int @id @default(autoincrement()) @db.Integer
|
||||
name String @unique @db.VarChar(32)
|
||||
// an mark identifying which view to use to display the session
|
||||
// it is only used in the frontend and does not affect the backend
|
||||
action String? @db.VarChar
|
||||
model String? @db.VarChar
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
|
||||
messages AiPromptMessage[]
|
||||
sessions AiSession[]
|
||||
|
||||
@@map("ai_prompts_metadata")
|
||||
}
|
||||
|
||||
model AiSessionMessage {
|
||||
id String @id @default(uuid()) @db.VarChar(36)
|
||||
sessionId String @map("session_id") @db.VarChar(36)
|
||||
role AiPromptRole
|
||||
content String @db.Text
|
||||
attachments Json? @db.Json
|
||||
params Json? @db.Json
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
|
||||
|
||||
session AiSession @relation(fields: [sessionId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@map("ai_sessions_messages")
|
||||
}
|
||||
|
||||
model AiSession {
|
||||
id String @id @default(uuid()) @db.VarChar(36)
|
||||
userId String @map("user_id") @db.VarChar(36)
|
||||
workspaceId String @map("workspace_id") @db.VarChar(36)
|
||||
docId String @map("doc_id") @db.VarChar(36)
|
||||
promptName String @map("prompt_name") @db.VarChar(32)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
|
||||
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
prompt AiPrompt @relation(fields: [promptName], references: [name], onDelete: Cascade)
|
||||
messages AiSessionMessage[]
|
||||
|
||||
@@map("ai_sessions_metadata")
|
||||
}
|
||||
|
||||
model DataMigration {
|
||||
id String @id @default(uuid()) @db.VarChar(36)
|
||||
name String @db.VarChar
|
||||
|
||||
37
packages/backend/server/scripts/init-db.ts
Normal file
37
packages/backend/server/scripts/init-db.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import userA from '@affine-test/fixtures/userA.json' assert { type: 'json' };
|
||||
import { hash } from '@node-rs/argon2';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
async function main() {
|
||||
await prisma.user.create({
|
||||
data: {
|
||||
...userA,
|
||||
password: await hash(userA.password),
|
||||
features: {
|
||||
create: {
|
||||
reason: 'created by api sign up',
|
||||
activated: true,
|
||||
feature: {
|
||||
connect: {
|
||||
feature_version: {
|
||||
feature: 'free_plan_v1',
|
||||
version: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
main()
|
||||
.then(async () => {
|
||||
await prisma.$disconnect();
|
||||
})
|
||||
.catch(async e => {
|
||||
console.error(e);
|
||||
await prisma.$disconnect();
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,10 +1,7 @@
|
||||
import { execSync } from 'node:child_process';
|
||||
import { generateKeyPairSync } from 'node:crypto';
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
import { parse } from 'dotenv';
|
||||
|
||||
const SELF_HOST_CONFIG_DIR = '/root/.affine/config';
|
||||
/**
|
||||
* @type {Array<{ from: string; to?: string, modifier?: (content: string): string }>}
|
||||
@@ -39,26 +36,6 @@ function prepare() {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// make the default .env
|
||||
if (to === '.env') {
|
||||
const dotenvFile = fs.readFileSync(targetFilePath, 'utf-8');
|
||||
const envs = parse(dotenvFile);
|
||||
// generate a new private key
|
||||
if (!envs.AFFINE_PRIVATE_KEY) {
|
||||
const privateKey = generateKeyPairSync('ec', {
|
||||
namedCurve: 'prime256v1',
|
||||
}).privateKey.export({
|
||||
type: 'sec1',
|
||||
format: 'pem',
|
||||
});
|
||||
|
||||
fs.writeFileSync(
|
||||
targetFilePath,
|
||||
`AFFINE_PRIVATE_KEY=${privateKey}\n` + dotenvFile
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
import { Controller, Get } from '@nestjs/common';
|
||||
|
||||
import { Public } from './core/auth';
|
||||
import { Config, SkipThrottle } from './fundamentals';
|
||||
import { Config } from './fundamentals/config';
|
||||
|
||||
@Controller('/')
|
||||
export class AppController {
|
||||
constructor(private readonly config: Config) {}
|
||||
|
||||
@SkipThrottle()
|
||||
@Public()
|
||||
@Get()
|
||||
info() {
|
||||
return {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { join } from 'node:path';
|
||||
|
||||
import { Logger, Module } from '@nestjs/common';
|
||||
import { APP_INTERCEPTOR } from '@nestjs/core';
|
||||
import { ScheduleModule } from '@nestjs/schedule';
|
||||
import { ServeStaticModule } from '@nestjs/serve-static';
|
||||
import { get } from 'lodash-es';
|
||||
@@ -13,36 +14,35 @@ import { FeatureModule } from './core/features';
|
||||
import { QuotaModule } from './core/quota';
|
||||
import { StorageModule } from './core/storage';
|
||||
import { SyncModule } from './core/sync';
|
||||
import { UserModule } from './core/user';
|
||||
import { UsersModule } from './core/users';
|
||||
import { WorkspaceModule } from './core/workspaces';
|
||||
import { getOptionalModuleMetadata } from './fundamentals';
|
||||
import { CacheModule } from './fundamentals/cache';
|
||||
import type { AvailablePlugins } from './fundamentals/config';
|
||||
import { Config, ConfigModule } from './fundamentals/config';
|
||||
import { CacheInterceptor, CacheModule } from './fundamentals/cache';
|
||||
import {
|
||||
type AvailablePlugins,
|
||||
Config,
|
||||
ConfigModule,
|
||||
} from './fundamentals/config';
|
||||
import { EventModule } from './fundamentals/event';
|
||||
import { GqlModule } from './fundamentals/graphql';
|
||||
import { HelpersModule } from './fundamentals/helpers';
|
||||
import { MailModule } from './fundamentals/mailer';
|
||||
import { MetricsModule } from './fundamentals/metrics';
|
||||
import { MutexModule } from './fundamentals/mutex';
|
||||
import { PrismaModule } from './fundamentals/prisma';
|
||||
import { StorageProviderModule } from './fundamentals/storage';
|
||||
import { SessionModule } from './fundamentals/session';
|
||||
import { RateLimiterModule } from './fundamentals/throttler';
|
||||
import { WebSocketModule } from './fundamentals/websocket';
|
||||
import { REGISTERED_PLUGINS } from './plugins';
|
||||
import { pluginsMap } from './plugins';
|
||||
|
||||
export const FunctionalityModules = [
|
||||
ConfigModule.forRoot(),
|
||||
ScheduleModule.forRoot(),
|
||||
EventModule,
|
||||
CacheModule,
|
||||
MutexModule,
|
||||
PrismaModule,
|
||||
MetricsModule,
|
||||
RateLimiterModule,
|
||||
SessionModule,
|
||||
MailModule,
|
||||
StorageProviderModule,
|
||||
HelpersModule,
|
||||
];
|
||||
|
||||
export class AppModuleBuilder {
|
||||
@@ -102,6 +102,12 @@ export class AppModuleBuilder {
|
||||
|
||||
compile() {
|
||||
@Module({
|
||||
providers: [
|
||||
{
|
||||
provide: APP_INTERCEPTOR,
|
||||
useClass: CacheInterceptor,
|
||||
},
|
||||
],
|
||||
imports: this.modules,
|
||||
controllers: this.config.isSelfhosted ? [] : [AppController],
|
||||
})
|
||||
@@ -124,15 +130,16 @@ function buildAppModule() {
|
||||
.use(DocModule)
|
||||
|
||||
// sync server only
|
||||
.useIf(config => config.flavor.sync, WebSocketModule, SyncModule)
|
||||
.useIf(config => config.flavor.sync, SyncModule)
|
||||
|
||||
// graphql server only
|
||||
.useIf(
|
||||
config => config.flavor.graphql,
|
||||
ServerConfigModule,
|
||||
WebSocketModule,
|
||||
GqlModule,
|
||||
StorageModule,
|
||||
UserModule,
|
||||
UsersModule,
|
||||
WorkspaceModule,
|
||||
FeatureModule,
|
||||
QuotaModule
|
||||
@@ -148,7 +155,7 @@ function buildAppModule() {
|
||||
|
||||
// plugin modules
|
||||
AFFiNE.plugins.enabled.forEach(name => {
|
||||
const plugin = REGISTERED_PLUGINS.get(name as AvailablePlugins);
|
||||
const plugin = pluginsMap.get(name as AvailablePlugins);
|
||||
if (!plugin) {
|
||||
throw new Error(`Unknown plugin ${name}`);
|
||||
}
|
||||
|
||||
@@ -4,13 +4,9 @@ import type { NestExpressApplication } from '@nestjs/platform-express';
|
||||
import cookieParser from 'cookie-parser';
|
||||
import graphqlUploadExpress from 'graphql-upload/graphqlUploadExpress.mjs';
|
||||
|
||||
import { AuthGuard } from './core/auth';
|
||||
import {
|
||||
CacheInterceptor,
|
||||
CloudThrottlerGuard,
|
||||
GlobalExceptionFilter,
|
||||
} from './fundamentals';
|
||||
import { SocketIoAdapter, SocketIoAdapterImpl } from './fundamentals/websocket';
|
||||
import { SocketIoAdapter } from './fundamentals';
|
||||
import { SocketIoAdapterImpl } from './fundamentals/websocket';
|
||||
import { ExceptionLogger } from './middleware/exception-logger';
|
||||
import { serverTimingAndCache } from './middleware/timing';
|
||||
|
||||
export async function createApp() {
|
||||
@@ -33,9 +29,7 @@ export async function createApp() {
|
||||
})
|
||||
);
|
||||
|
||||
app.useGlobalGuards(app.get(AuthGuard), app.get(CloudThrottlerGuard));
|
||||
app.useGlobalInterceptors(app.get(CacheInterceptor));
|
||||
app.useGlobalFilters(new GlobalExceptionFilter(app.getHttpAdapter()));
|
||||
app.useGlobalFilters(new ExceptionLogger());
|
||||
app.use(cookieParser());
|
||||
|
||||
if (AFFiNE.flavor.sync) {
|
||||
@@ -50,12 +44,5 @@ export async function createApp() {
|
||||
app.useWebSocketAdapter(adapter);
|
||||
}
|
||||
|
||||
if (AFFiNE.isSelfhosted && AFFiNE.telemetry.enabled) {
|
||||
const mixpanel = await import('mixpanel');
|
||||
mixpanel.init(AFFiNE.telemetry.token).track('selfhost-server-started', {
|
||||
version: AFFiNE.version,
|
||||
});
|
||||
}
|
||||
|
||||
return app;
|
||||
}
|
||||
|
||||
@@ -7,10 +7,12 @@ AFFiNE.ENV_MAP = {
|
||||
DATABASE_URL: 'db.url',
|
||||
ENABLE_CAPTCHA: ['auth.captcha.enable', 'boolean'],
|
||||
CAPTCHA_TURNSTILE_SECRET: ['auth.captcha.turnstile.secret', 'string'],
|
||||
OAUTH_GOOGLE_CLIENT_ID: 'plugins.oauth.providers.google.clientId',
|
||||
OAUTH_GOOGLE_CLIENT_SECRET: 'plugins.oauth.providers.google.clientSecret',
|
||||
OAUTH_GITHUB_CLIENT_ID: 'plugins.oauth.providers.github.clientId',
|
||||
OAUTH_GITHUB_CLIENT_SECRET: 'plugins.oauth.providers.github.clientSecret',
|
||||
OAUTH_GOOGLE_ENABLED: ['auth.oauthProviders.google.enabled', 'boolean'],
|
||||
OAUTH_GOOGLE_CLIENT_ID: 'auth.oauthProviders.google.clientId',
|
||||
OAUTH_GOOGLE_CLIENT_SECRET: 'auth.oauthProviders.google.clientSecret',
|
||||
OAUTH_GITHUB_ENABLED: ['auth.oauthProviders.github.enabled', 'boolean'],
|
||||
OAUTH_GITHUB_CLIENT_ID: 'auth.oauthProviders.github.clientId',
|
||||
OAUTH_GITHUB_CLIENT_SECRET: 'auth.oauthProviders.github.clientSecret',
|
||||
MAILER_HOST: 'mailer.host',
|
||||
MAILER_PORT: ['mailer.port', 'int'],
|
||||
MAILER_USER: 'mailer.auth.user',
|
||||
@@ -19,9 +21,6 @@ AFFiNE.ENV_MAP = {
|
||||
MAILER_SECURE: ['mailer.secure', 'boolean'],
|
||||
THROTTLE_TTL: ['rateLimiter.ttl', 'int'],
|
||||
THROTTLE_LIMIT: ['rateLimiter.limit', 'int'],
|
||||
COPILOT_OPENAI_API_KEY: 'plugins.copilot.openai.apiKey',
|
||||
COPILOT_FAL_API_KEY: 'plugins.copilot.fal.apiKey',
|
||||
COPILOT_UNSPLASH_API_KEY: 'plugins.copilot.unsplashKey',
|
||||
REDIS_SERVER_HOST: 'plugins.redis.host',
|
||||
REDIS_SERVER_PORT: ['plugins.redis.port', 'int'],
|
||||
REDIS_SERVER_USER: 'plugins.redis.username',
|
||||
@@ -35,9 +34,4 @@ AFFiNE.ENV_MAP = {
|
||||
STRIPE_API_KEY: 'plugins.payment.stripe.keys.APIKey',
|
||||
STRIPE_WEBHOOK_KEY: 'plugins.payment.stripe.keys.webhookKey',
|
||||
FEATURES_EARLY_ACCESS_PREVIEW: ['featureFlags.earlyAccessPreview', 'boolean'],
|
||||
FEATURES_SYNC_CLIENT_VERSION_CHECK: [
|
||||
'featureFlags.syncClientVersionCheck',
|
||||
'boolean',
|
||||
],
|
||||
TELEMETRY_ENABLE: ['telemetry.enabled', 'boolean'],
|
||||
};
|
||||
|
||||
@@ -20,44 +20,26 @@ const env = process.env;
|
||||
AFFiNE.metrics.enabled = !AFFiNE.node.test;
|
||||
|
||||
if (env.R2_OBJECT_STORAGE_ACCOUNT_ID) {
|
||||
AFFiNE.plugins.use('cloudflare-r2', {
|
||||
AFFiNE.storage.providers.r2 = {
|
||||
accountId: env.R2_OBJECT_STORAGE_ACCOUNT_ID,
|
||||
credentials: {
|
||||
accessKeyId: env.R2_OBJECT_STORAGE_ACCESS_KEY_ID!,
|
||||
secretAccessKey: env.R2_OBJECT_STORAGE_SECRET_ACCESS_KEY!,
|
||||
},
|
||||
});
|
||||
AFFiNE.storage.storages.avatar.provider = 'cloudflare-r2';
|
||||
};
|
||||
AFFiNE.storage.storages.avatar.provider = 'r2';
|
||||
AFFiNE.storage.storages.avatar.bucket = 'account-avatar';
|
||||
AFFiNE.storage.storages.avatar.publicLinkFactory = key =>
|
||||
`https://avatar.affineassets.com/${key}`;
|
||||
|
||||
AFFiNE.storage.storages.blob.provider = 'cloudflare-r2';
|
||||
AFFiNE.storage.storages.blob.provider = 'r2';
|
||||
AFFiNE.storage.storages.blob.bucket = `workspace-blobs-${
|
||||
AFFiNE.affine.canary ? 'canary' : 'prod'
|
||||
}`;
|
||||
|
||||
AFFiNE.storage.storages.copilot.provider = 'cloudflare-r2';
|
||||
AFFiNE.storage.storages.copilot.bucket = `workspace-copilot-${
|
||||
AFFiNE.affine.canary ? 'canary' : 'prod'
|
||||
}`;
|
||||
}
|
||||
|
||||
AFFiNE.plugins.use('copilot', {
|
||||
openai: {},
|
||||
fal: {},
|
||||
});
|
||||
AFFiNE.plugins.use('redis');
|
||||
AFFiNE.plugins.use('payment', {
|
||||
stripe: {
|
||||
keys: {
|
||||
// fake the key to ensure the server generate full GraphQL Schema even env vars are not set
|
||||
APIKey: '1',
|
||||
webhookKey: '1',
|
||||
},
|
||||
},
|
||||
});
|
||||
AFFiNE.plugins.use('oauth');
|
||||
AFFiNE.plugins.use('payment');
|
||||
|
||||
if (AFFiNE.deploy) {
|
||||
AFFiNE.mailer = {
|
||||
|
||||
@@ -52,21 +52,6 @@ AFFiNE.port = 3010;
|
||||
// /* The metrics will be available at `http://localhost:9464/metrics` with [Prometheus] format exported */
|
||||
// AFFiNE.metrics.enabled = true;
|
||||
//
|
||||
// /* Authentication Settings */
|
||||
// /* Whether allow anyone signup */
|
||||
// AFFiNE.auth.allowSignup = true;
|
||||
//
|
||||
// /* User Signup password limitation */
|
||||
// AFFiNE.auth.password = {
|
||||
// minLength: 8,
|
||||
// maxLength: 32,
|
||||
// };
|
||||
//
|
||||
// /* How long the login session would last by default */
|
||||
// AFFiNE.auth.session = {
|
||||
// ttl: 15 * 24 * 60 * 60, // 15 days
|
||||
// };
|
||||
//
|
||||
// /* GraphQL configurations that control the behavior of the Apollo Server behind */
|
||||
// /* @see https://www.apollographql.com/docs/apollo-server/api/apollo-server */
|
||||
// AFFiNE.graphql = {
|
||||
@@ -99,58 +84,11 @@ AFFiNE.port = 3010;
|
||||
// /* Redis Plugin */
|
||||
// /* Provide caching and session storing backed by Redis. */
|
||||
// /* Useful when you deploy AFFiNE server in a cluster. */
|
||||
// AFFiNE.plugins.use('redis', {
|
||||
// /* override options */
|
||||
// });
|
||||
//
|
||||
//
|
||||
AFFiNE.plugins.use('redis', {
|
||||
/* override options */
|
||||
});
|
||||
// /* Payment Plugin */
|
||||
// AFFiNE.plugins.use('payment', {
|
||||
// stripe: { keys: {}, apiVersion: '2023-10-16' },
|
||||
// });
|
||||
AFFiNE.plugins.use('payment', {
|
||||
stripe: { keys: {}, apiVersion: '2023-10-16' },
|
||||
});
|
||||
//
|
||||
//
|
||||
// /* Cloudflare R2 Plugin */
|
||||
// /* Enable if you choose to store workspace blobs or user avatars in Cloudflare R2 Storage Service */
|
||||
// AFFiNE.plugins.use('cloudflare-r2', {
|
||||
// accountId: '',
|
||||
// credentials: {
|
||||
// accessKeyId: '',
|
||||
// secretAccessKey: '',
|
||||
// },
|
||||
// });
|
||||
//
|
||||
// /* AWS S3 Plugin */
|
||||
// /* Enable if you choose to store workspace blobs or user avatars in AWS S3 Storage Service */
|
||||
// AFFiNE.plugins.use('aws-s3', {
|
||||
// credentials: {
|
||||
// accessKeyId: '',
|
||||
// secretAccessKey: '',
|
||||
// })
|
||||
// /* Update the provider of storages */
|
||||
// AFFiNE.storage.storages.blob.provider = 'r2';
|
||||
// AFFiNE.storage.storages.avatar.provider = 'r2';
|
||||
//
|
||||
// /* OAuth Plugin */
|
||||
// AFFiNE.plugins.use('oauth', {
|
||||
// providers: {
|
||||
// github: {
|
||||
// clientId: '',
|
||||
// clientSecret: '',
|
||||
// // See https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/authorizing-oauth-apps
|
||||
// args: {
|
||||
// scope: 'user',
|
||||
// },
|
||||
// },
|
||||
// google: {
|
||||
// clientId: '',
|
||||
// clientSecret: '',
|
||||
// args: {
|
||||
// // See https://developers.google.com/identity/protocols/oauth2
|
||||
// scope: 'openid email profile',
|
||||
// promot: 'select_account',
|
||||
// access_type: 'offline',
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// });
|
||||
|
||||
@@ -1,207 +0,0 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
|
||||
import {
|
||||
BadRequestException,
|
||||
Body,
|
||||
Controller,
|
||||
Get,
|
||||
Header,
|
||||
HttpStatus,
|
||||
Post,
|
||||
Query,
|
||||
Req,
|
||||
Res,
|
||||
} from '@nestjs/common';
|
||||
import type { Request, Response } from 'express';
|
||||
|
||||
import {
|
||||
Config,
|
||||
PaymentRequiredException,
|
||||
Throttle,
|
||||
URLHelper,
|
||||
} from '../../fundamentals';
|
||||
import { UserService } from '../user';
|
||||
import { validators } from '../utils/validators';
|
||||
import { CurrentUser } from './current-user';
|
||||
import { Public } from './guard';
|
||||
import { AuthService, parseAuthUserSeqNum } from './service';
|
||||
import { TokenService, TokenType } from './token';
|
||||
|
||||
class SignInCredential {
|
||||
email!: string;
|
||||
password?: string;
|
||||
}
|
||||
|
||||
class MagicLinkCredential {
|
||||
email!: string;
|
||||
token!: string;
|
||||
}
|
||||
|
||||
@Throttle('strict')
|
||||
@Controller('/api/auth')
|
||||
export class AuthController {
|
||||
constructor(
|
||||
private readonly url: URLHelper,
|
||||
private readonly auth: AuthService,
|
||||
private readonly user: UserService,
|
||||
private readonly token: TokenService,
|
||||
private readonly config: Config
|
||||
) {}
|
||||
|
||||
@Public()
|
||||
@Post('/sign-in')
|
||||
@Header('content-type', 'application/json')
|
||||
async signIn(
|
||||
@Req() req: Request,
|
||||
@Res() res: Response,
|
||||
@Body() credential: SignInCredential,
|
||||
@Query('redirect_uri') redirectUri = this.url.home
|
||||
) {
|
||||
validators.assertValidEmail(credential.email);
|
||||
const canSignIn = await this.auth.canSignIn(credential.email);
|
||||
if (!canSignIn) {
|
||||
throw new PaymentRequiredException(
|
||||
`You don't have early access permission\nVisit https://community.affine.pro/c/insider-general/ for more information`
|
||||
);
|
||||
}
|
||||
|
||||
if (credential.password) {
|
||||
const user = await this.auth.signIn(
|
||||
credential.email,
|
||||
credential.password
|
||||
);
|
||||
|
||||
await this.auth.setCookie(req, res, user);
|
||||
res.status(HttpStatus.OK).send(user);
|
||||
} else {
|
||||
// send email magic link
|
||||
const user = await this.user.findUserByEmail(credential.email);
|
||||
if (!user && !this.config.auth.allowSignup) {
|
||||
throw new BadRequestException('You are not allows to sign up.');
|
||||
}
|
||||
|
||||
const result = await this.sendSignInEmail(
|
||||
{ email: credential.email, signUp: !user },
|
||||
redirectUri
|
||||
);
|
||||
|
||||
if (result.rejected.length) {
|
||||
throw new Error('Failed to send sign-in email.');
|
||||
}
|
||||
|
||||
res.status(HttpStatus.OK).send({
|
||||
email: credential.email,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async sendSignInEmail(
|
||||
{ email, signUp }: { email: string; signUp: boolean },
|
||||
redirectUri: string
|
||||
) {
|
||||
const token = await this.token.createToken(TokenType.SignIn, email);
|
||||
|
||||
const magicLink = this.url.link('/magic-link', {
|
||||
token,
|
||||
email,
|
||||
redirect_uri: redirectUri,
|
||||
});
|
||||
|
||||
const result = await this.auth.sendSignInEmail(email, magicLink, signUp);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@Get('/sign-out')
|
||||
async signOut(
|
||||
@Req() req: Request,
|
||||
@Res() res: Response,
|
||||
@Query('redirect_uri') redirectUri?: string
|
||||
) {
|
||||
const session = await this.auth.signOut(
|
||||
req.cookies[AuthService.sessionCookieName],
|
||||
parseAuthUserSeqNum(req.headers[AuthService.authUserSeqHeaderName])
|
||||
);
|
||||
|
||||
if (session) {
|
||||
res.cookie(AuthService.sessionCookieName, session.id, {
|
||||
expires: session.expiresAt ?? void 0, // expiredAt is `string | null`
|
||||
...this.auth.cookieOptions,
|
||||
});
|
||||
} else {
|
||||
res.clearCookie(AuthService.sessionCookieName);
|
||||
}
|
||||
|
||||
if (redirectUri) {
|
||||
return this.url.safeRedirect(res, redirectUri);
|
||||
} else {
|
||||
return res.send(null);
|
||||
}
|
||||
}
|
||||
|
||||
@Public()
|
||||
@Post('/magic-link')
|
||||
async magicLinkSignIn(
|
||||
@Req() req: Request,
|
||||
@Res() res: Response,
|
||||
@Body() { email, token }: MagicLinkCredential
|
||||
) {
|
||||
if (!token || !email) {
|
||||
throw new BadRequestException('Missing sign-in mail token');
|
||||
}
|
||||
|
||||
validators.assertValidEmail(email);
|
||||
|
||||
const valid = await this.token.verifyToken(TokenType.SignIn, token, {
|
||||
credential: email,
|
||||
});
|
||||
|
||||
if (!valid) {
|
||||
throw new BadRequestException('Invalid sign-in mail token');
|
||||
}
|
||||
|
||||
const user = await this.user.fulfillUser(email, {
|
||||
emailVerifiedAt: new Date(),
|
||||
registered: true,
|
||||
});
|
||||
|
||||
await this.auth.setCookie(req, res, user);
|
||||
|
||||
res.send({ id: user.id, email: user.email, name: user.name });
|
||||
}
|
||||
|
||||
@Throttle('default', { limit: 1200 })
|
||||
@Public()
|
||||
@Get('/session')
|
||||
async currentSessionUser(@CurrentUser() user?: CurrentUser) {
|
||||
return {
|
||||
user,
|
||||
};
|
||||
}
|
||||
|
||||
@Throttle('default', { limit: 1200 })
|
||||
@Public()
|
||||
@Get('/sessions')
|
||||
async currentSessionUsers(@Req() req: Request) {
|
||||
const token = req.cookies[AuthService.sessionCookieName];
|
||||
if (!token) {
|
||||
return {
|
||||
users: [],
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
users: await this.auth.getUserList(token),
|
||||
};
|
||||
}
|
||||
|
||||
@Public()
|
||||
@Get('/challenge')
|
||||
async challenge() {
|
||||
// TODO: impl in following PR
|
||||
return {
|
||||
challenge: randomUUID(),
|
||||
resource: randomUUID(),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
import type { ExecutionContext } from '@nestjs/common';
|
||||
import { createParamDecorator } from '@nestjs/common';
|
||||
import { User } from '@prisma/client';
|
||||
|
||||
import { getRequestResponseFromContext } from '../../fundamentals';
|
||||
|
||||
function getUserFromContext(context: ExecutionContext) {
|
||||
return getRequestResponseFromContext(context).req.user;
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to fetch current user from the request context.
|
||||
*
|
||||
* > The user may be undefined if authorization token or session cookie is not provided.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* ```typescript
|
||||
* // Graphql Query
|
||||
* \@Query(() => UserType)
|
||||
* user(@CurrentUser() user: CurrentUser) {
|
||||
* return user;
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* ```typescript
|
||||
* // HTTP Controller
|
||||
* \@Get('/user')
|
||||
* user(@CurrentUser() user: CurrentUser) {
|
||||
* return user;
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* ```typescript
|
||||
* // for public apis
|
||||
* \@Public()
|
||||
* \@Get('/session')
|
||||
* session(@currentUser() user?: CurrentUser) {
|
||||
* return user
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
// interface and variable don't conflict
|
||||
// eslint-disable-next-line no-redeclare
|
||||
export const CurrentUser = createParamDecorator(
|
||||
(_: unknown, context: ExecutionContext) => {
|
||||
return getUserFromContext(context);
|
||||
}
|
||||
);
|
||||
|
||||
export interface CurrentUser
|
||||
extends Pick<User, 'id' | 'email' | 'avatarUrl' | 'name'> {
|
||||
hasPassword: boolean | null;
|
||||
emailVerified: boolean;
|
||||
}
|
||||
@@ -1,91 +1,128 @@
|
||||
import type {
|
||||
CanActivate,
|
||||
ExecutionContext,
|
||||
OnModuleInit,
|
||||
} from '@nestjs/common';
|
||||
import type { CanActivate, ExecutionContext } from '@nestjs/common';
|
||||
import {
|
||||
createParamDecorator,
|
||||
Inject,
|
||||
Injectable,
|
||||
SetMetadata,
|
||||
UnauthorizedException,
|
||||
UseGuards,
|
||||
} from '@nestjs/common';
|
||||
import { ModuleRef, Reflector } from '@nestjs/core';
|
||||
import { Reflector } from '@nestjs/core';
|
||||
import type { NextAuthOptions } from 'next-auth';
|
||||
import { AuthHandler } from 'next-auth/core';
|
||||
|
||||
import { getRequestResponseFromContext } from '../../fundamentals';
|
||||
import { AuthService, parseAuthUserSeqNum } from './service';
|
||||
import {
|
||||
getRequestResponseFromContext,
|
||||
PrismaService,
|
||||
} from '../../fundamentals';
|
||||
import { NextAuthOptionsProvide } from './next-auth-options';
|
||||
import { AuthService } from './service';
|
||||
|
||||
function extractTokenFromHeader(authorization: string) {
|
||||
if (!/^Bearer\s/i.test(authorization)) {
|
||||
return;
|
||||
}
|
||||
|
||||
return authorization.substring(7);
|
||||
export function getUserFromContext(context: ExecutionContext) {
|
||||
return getRequestResponseFromContext(context).req.user;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class AuthGuard implements CanActivate, OnModuleInit {
|
||||
private auth!: AuthService;
|
||||
/**
|
||||
* Used to fetch current user from the request context.
|
||||
*
|
||||
* > The user may be undefined if authorization token is not provided.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* ```typescript
|
||||
* // Graphql Query
|
||||
* \@Query(() => UserType)
|
||||
* user(@CurrentUser() user?: User) {
|
||||
* return user;
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* ```typescript
|
||||
* // HTTP Controller
|
||||
* \@Get('/user)
|
||||
* user(@CurrentUser() user?: User) {
|
||||
* return user;
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export const CurrentUser = createParamDecorator(
|
||||
(_: unknown, context: ExecutionContext) => {
|
||||
return getUserFromContext(context);
|
||||
}
|
||||
);
|
||||
|
||||
@Injectable()
|
||||
class AuthGuard implements CanActivate {
|
||||
constructor(
|
||||
private readonly ref: ModuleRef,
|
||||
@Inject(NextAuthOptionsProvide)
|
||||
private readonly nextAuthOptions: NextAuthOptions,
|
||||
private readonly auth: AuthService,
|
||||
private readonly prisma: PrismaService,
|
||||
private readonly reflector: Reflector
|
||||
) {}
|
||||
|
||||
onModuleInit() {
|
||||
this.auth = this.ref.get(AuthService, { strict: false });
|
||||
}
|
||||
|
||||
async canActivate(context: ExecutionContext) {
|
||||
const { req, res } = getRequestResponseFromContext(context);
|
||||
|
||||
// check cookie
|
||||
let sessionToken: string | undefined =
|
||||
req.cookies[AuthService.sessionCookieName];
|
||||
|
||||
if (!sessionToken && req.headers.authorization) {
|
||||
sessionToken = extractTokenFromHeader(req.headers.authorization);
|
||||
}
|
||||
|
||||
if (sessionToken) {
|
||||
const userSeq = parseAuthUserSeqNum(
|
||||
req.headers[AuthService.authUserSeqHeaderName]
|
||||
);
|
||||
|
||||
const { user, expiresAt } = await this.auth.getUser(
|
||||
sessionToken,
|
||||
userSeq
|
||||
);
|
||||
if (res && user && expiresAt) {
|
||||
await this.auth.refreshUserSessionIfNeeded(
|
||||
req,
|
||||
res,
|
||||
sessionToken,
|
||||
user.id,
|
||||
expiresAt
|
||||
);
|
||||
}
|
||||
|
||||
if (user) {
|
||||
req.sid = sessionToken;
|
||||
req.user = user;
|
||||
}
|
||||
}
|
||||
const token = req.headers.authorization;
|
||||
|
||||
// api is public
|
||||
const isPublic = this.reflector.get<boolean>(
|
||||
'isPublic',
|
||||
context.getHandler()
|
||||
);
|
||||
// api can be public, but if user is logged in, we can get user info
|
||||
const isPublicable = this.reflector.get<boolean>(
|
||||
'isPublicable',
|
||||
context.getHandler()
|
||||
);
|
||||
|
||||
if (isPublic) {
|
||||
return true;
|
||||
}
|
||||
} else if (!token) {
|
||||
if (!req.cookies) {
|
||||
return isPublicable;
|
||||
}
|
||||
|
||||
if (!req.user) {
|
||||
throw new UnauthorizedException('You are not signed in.');
|
||||
}
|
||||
const session = await AuthHandler({
|
||||
req: {
|
||||
cookies: req.cookies,
|
||||
action: 'session',
|
||||
method: 'GET',
|
||||
headers: req.headers,
|
||||
},
|
||||
options: this.nextAuthOptions,
|
||||
});
|
||||
|
||||
return true;
|
||||
const { body = {}, cookies, status = 200 } = session;
|
||||
if (!body && !isPublicable) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// @ts-expect-error body is user here
|
||||
req.user = body.user;
|
||||
if (cookies && res) {
|
||||
for (const cookie of cookies) {
|
||||
res.cookie(cookie.name, cookie.value, cookie.options);
|
||||
}
|
||||
}
|
||||
|
||||
return Boolean(
|
||||
status === 200 &&
|
||||
typeof body !== 'string' &&
|
||||
// ignore body if api is publicable
|
||||
(Object.keys(body).length || isPublicable)
|
||||
);
|
||||
} else {
|
||||
const [type, jwt] = token.split(' ') ?? [];
|
||||
|
||||
if (type === 'Bearer') {
|
||||
const claims = await this.auth.verify(jwt);
|
||||
req.user = await this.prisma.user.findUnique({
|
||||
where: { id: claims.id },
|
||||
});
|
||||
return !!req.user;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -100,7 +137,7 @@ export class AuthGuard implements CanActivate, OnModuleInit {
|
||||
* ```typescript
|
||||
* \@Auth()
|
||||
* \@Query(() => UserType)
|
||||
* user(@CurrentUser() user: CurrentUser) {
|
||||
* user(@CurrentUser() user: User) {
|
||||
* return user;
|
||||
* }
|
||||
* ```
|
||||
@@ -111,3 +148,5 @@ export const Auth = () => {
|
||||
|
||||
// api is public accessible
|
||||
export const Public = () => SetMetadata('isPublic', true);
|
||||
// api is public accessible, but if user is logged in, we can get user info
|
||||
export const Publicable = () => SetMetadata('isPublicable', true);
|
||||
|
||||
@@ -1,23 +1,18 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { Global, Module } from '@nestjs/common';
|
||||
|
||||
import { FeatureModule } from '../features';
|
||||
import { QuotaModule } from '../quota';
|
||||
import { UserModule } from '../user';
|
||||
import { AuthController } from './controller';
|
||||
import { AuthGuard } from './guard';
|
||||
import { NextAuthController } from './next-auth.controller';
|
||||
import { NextAuthOptionsProvider } from './next-auth-options';
|
||||
import { AuthResolver } from './resolver';
|
||||
import { AuthService } from './service';
|
||||
import { TokenService, TokenType } from './token';
|
||||
|
||||
@Global()
|
||||
@Module({
|
||||
imports: [FeatureModule, UserModule, QuotaModule],
|
||||
providers: [AuthService, AuthResolver, TokenService, AuthGuard],
|
||||
exports: [AuthService, AuthGuard],
|
||||
controllers: [AuthController],
|
||||
providers: [AuthService, AuthResolver, NextAuthOptionsProvider],
|
||||
exports: [AuthService, NextAuthOptionsProvider],
|
||||
controllers: [NextAuthController],
|
||||
})
|
||||
export class AuthModule {}
|
||||
|
||||
export * from './guard';
|
||||
export { ClientTokenType } from './resolver';
|
||||
export { AuthService, TokenService, TokenType };
|
||||
export * from './current-user';
|
||||
export { TokenType } from './resolver';
|
||||
export { AuthService };
|
||||
|
||||
285
packages/backend/server/src/core/auth/next-auth-options.ts
Normal file
285
packages/backend/server/src/core/auth/next-auth-options.ts
Normal file
@@ -0,0 +1,285 @@
|
||||
import { PrismaAdapter } from '@auth/prisma-adapter';
|
||||
import { FactoryProvider, Logger } from '@nestjs/common';
|
||||
import { verify } from '@node-rs/argon2';
|
||||
import { assign, omit } from 'lodash-es';
|
||||
import { NextAuthOptions } from 'next-auth';
|
||||
import Credentials from 'next-auth/providers/credentials';
|
||||
import Email from 'next-auth/providers/email';
|
||||
import Github from 'next-auth/providers/github';
|
||||
import Google from 'next-auth/providers/google';
|
||||
|
||||
import {
|
||||
Config,
|
||||
MailService,
|
||||
PrismaService,
|
||||
SessionService,
|
||||
} from '../../fundamentals';
|
||||
import { FeatureType } from '../features';
|
||||
import { Quota_FreePlanV1_1 } from '../quota';
|
||||
import {
|
||||
decode,
|
||||
encode,
|
||||
sendVerificationRequest,
|
||||
SendVerificationRequestParams,
|
||||
} from './utils';
|
||||
|
||||
export const NextAuthOptionsProvide = Symbol('NextAuthOptions');
|
||||
|
||||
const TrustedProviders = ['google'];
|
||||
|
||||
export const NextAuthOptionsProvider: FactoryProvider<NextAuthOptions> = {
|
||||
provide: NextAuthOptionsProvide,
|
||||
useFactory(
|
||||
config: Config,
|
||||
prisma: PrismaService,
|
||||
mailer: MailService,
|
||||
session: SessionService
|
||||
) {
|
||||
const logger = new Logger('NextAuth');
|
||||
const prismaAdapter = PrismaAdapter(prisma);
|
||||
// createUser exists in the adapter
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
const createUser = prismaAdapter.createUser!.bind(prismaAdapter);
|
||||
prismaAdapter.createUser = async data => {
|
||||
const userData = {
|
||||
name: data.name,
|
||||
email: data.email,
|
||||
avatarUrl: '',
|
||||
emailVerified: data.emailVerified,
|
||||
features: {
|
||||
create: {
|
||||
reason: 'created by email sign up',
|
||||
activated: true,
|
||||
feature: {
|
||||
connect: {
|
||||
feature_version: Quota_FreePlanV1_1,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
if (data.email && !data.name) {
|
||||
userData.name = data.email.split('@')[0];
|
||||
}
|
||||
if (data.image) {
|
||||
userData.avatarUrl = data.image;
|
||||
}
|
||||
return createUser(userData);
|
||||
};
|
||||
// linkAccount exists in the adapter
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
const linkAccount = prismaAdapter.linkAccount!.bind(prismaAdapter);
|
||||
prismaAdapter.linkAccount = async account => {
|
||||
// google account must be a verified email
|
||||
if (TrustedProviders.includes(account.provider)) {
|
||||
await prisma.user.update({
|
||||
where: {
|
||||
id: account.userId,
|
||||
},
|
||||
data: {
|
||||
emailVerified: new Date(),
|
||||
},
|
||||
});
|
||||
}
|
||||
return linkAccount(account) as Promise<void>;
|
||||
};
|
||||
// getUser exists in the adapter
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
const getUser = prismaAdapter.getUser!.bind(prismaAdapter)!;
|
||||
prismaAdapter.getUser = async id => {
|
||||
const result = await getUser(id);
|
||||
if (result) {
|
||||
// @ts-expect-error Third part library type mismatch
|
||||
result.image = result.avatarUrl;
|
||||
// @ts-expect-error Third part library type mismatch
|
||||
result.hasPassword = Boolean(result.password);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
prismaAdapter.createVerificationToken = async data => {
|
||||
await session.set(
|
||||
`${data.identifier}:${data.token}`,
|
||||
Date.now() + session.sessionTtl
|
||||
);
|
||||
return data;
|
||||
};
|
||||
|
||||
prismaAdapter.useVerificationToken = async ({ identifier, token }) => {
|
||||
const expires = await session.get(`${identifier}:${token}`);
|
||||
if (expires) {
|
||||
return { identifier, token, expires: new Date(expires) };
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const nextAuthOptions: NextAuthOptions = {
|
||||
providers: [],
|
||||
adapter: prismaAdapter,
|
||||
debug: !config.node.prod,
|
||||
logger: {
|
||||
debug(code, metadata) {
|
||||
logger.debug(`${code}: ${JSON.stringify(metadata)}`);
|
||||
},
|
||||
error(code, metadata) {
|
||||
if (metadata instanceof Error) {
|
||||
// @ts-expect-error assign code to error
|
||||
metadata.code = code;
|
||||
logger.error(metadata);
|
||||
} else if (metadata.error instanceof Error) {
|
||||
assign(metadata.error, omit(metadata, 'error'), { code });
|
||||
logger.error(metadata.error);
|
||||
}
|
||||
},
|
||||
warn(code) {
|
||||
logger.warn(code);
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
nextAuthOptions.providers.push(
|
||||
// @ts-expect-error esm interop issue
|
||||
Credentials.default({
|
||||
name: 'Password',
|
||||
credentials: {
|
||||
email: {
|
||||
label: 'Email',
|
||||
type: 'text',
|
||||
placeholder: 'torvalds@osdl.org',
|
||||
},
|
||||
password: { label: 'Password', type: 'password' },
|
||||
},
|
||||
async authorize(
|
||||
credentials:
|
||||
| Record<'email' | 'password' | 'hashedPassword', string>
|
||||
| undefined
|
||||
) {
|
||||
if (!credentials) {
|
||||
return null;
|
||||
}
|
||||
const { password, hashedPassword } = credentials;
|
||||
if (!password || !hashedPassword) {
|
||||
return null;
|
||||
}
|
||||
if (!(await verify(hashedPassword, password))) {
|
||||
return null;
|
||||
}
|
||||
return credentials;
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
if (config.mailer && mailer) {
|
||||
nextAuthOptions.providers.push(
|
||||
// @ts-expect-error esm interop issue
|
||||
Email.default({
|
||||
sendVerificationRequest: (params: SendVerificationRequestParams) =>
|
||||
sendVerificationRequest(config, logger, mailer, session, params),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
if (config.auth.oauthProviders.github) {
|
||||
nextAuthOptions.providers.push(
|
||||
// @ts-expect-error esm interop issue
|
||||
Github.default({
|
||||
clientId: config.auth.oauthProviders.github.clientId,
|
||||
clientSecret: config.auth.oauthProviders.github.clientSecret,
|
||||
allowDangerousEmailAccountLinking: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
if (config.auth.oauthProviders.google?.enabled) {
|
||||
nextAuthOptions.providers.push(
|
||||
// @ts-expect-error esm interop issue
|
||||
Google.default({
|
||||
clientId: config.auth.oauthProviders.google.clientId,
|
||||
clientSecret: config.auth.oauthProviders.google.clientSecret,
|
||||
checks: 'nonce',
|
||||
allowDangerousEmailAccountLinking: true,
|
||||
authorization: {
|
||||
params: { scope: 'openid email profile', prompt: 'select_account' },
|
||||
},
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
if (nextAuthOptions.providers.length > 1) {
|
||||
// not only credentials provider
|
||||
nextAuthOptions.session = { strategy: 'database' };
|
||||
}
|
||||
|
||||
nextAuthOptions.jwt = {
|
||||
encode: async ({ token, maxAge }) =>
|
||||
encode(config, prisma, token, maxAge),
|
||||
decode: async ({ token }) => decode(config, token),
|
||||
};
|
||||
nextAuthOptions.secret ??= config.auth.nextAuthSecret;
|
||||
|
||||
nextAuthOptions.callbacks = {
|
||||
session: async ({ session, user, token }) => {
|
||||
if (session.user) {
|
||||
if (user) {
|
||||
// @ts-expect-error Third part library type mismatch
|
||||
session.user.id = user.id;
|
||||
// @ts-expect-error Third part library type mismatch
|
||||
session.user.image = user.image ?? user.avatarUrl;
|
||||
// @ts-expect-error Third part library type mismatch
|
||||
session.user.emailVerified = user.emailVerified;
|
||||
// @ts-expect-error Third part library type mismatch
|
||||
session.user.hasPassword = Boolean(user.password);
|
||||
} else {
|
||||
// technically the sub should be the same as id
|
||||
// @ts-expect-error Third part library type mismatch
|
||||
session.user.id = token.sub;
|
||||
// @ts-expect-error Third part library type mismatch
|
||||
session.user.emailVerified = token.emailVerified;
|
||||
// @ts-expect-error Third part library type mismatch
|
||||
session.user.hasPassword = token.hasPassword;
|
||||
}
|
||||
if (token && token.picture) {
|
||||
session.user.image = token.picture;
|
||||
}
|
||||
}
|
||||
return session;
|
||||
},
|
||||
signIn: async ({ profile, user }) => {
|
||||
if (!config.featureFlags.earlyAccessPreview) {
|
||||
return true;
|
||||
}
|
||||
const email = profile?.email ?? user.email;
|
||||
if (email) {
|
||||
// FIXME: cannot inject FeatureManagementService here
|
||||
// it will cause prisma.account to be undefined
|
||||
// then prismaAdapter.getUserByAccount will throw error
|
||||
if (email.endsWith('@toeverything.info')) return true;
|
||||
return prisma.userFeatures
|
||||
.count({
|
||||
where: {
|
||||
user: {
|
||||
email,
|
||||
},
|
||||
feature: {
|
||||
feature: FeatureType.EarlyAccess,
|
||||
},
|
||||
activated: true,
|
||||
},
|
||||
})
|
||||
.then(count => count > 0);
|
||||
}
|
||||
return false;
|
||||
},
|
||||
redirect({ url }) {
|
||||
return url;
|
||||
},
|
||||
};
|
||||
|
||||
nextAuthOptions.pages = {
|
||||
newUser: '/auth/onboarding',
|
||||
};
|
||||
return nextAuthOptions;
|
||||
},
|
||||
inject: [Config, PrismaService, MailService, SessionService],
|
||||
};
|
||||
409
packages/backend/server/src/core/auth/next-auth.controller.ts
Normal file
409
packages/backend/server/src/core/auth/next-auth.controller.ts
Normal file
@@ -0,0 +1,409 @@
|
||||
import { URLSearchParams } from 'node:url';
|
||||
|
||||
import {
|
||||
All,
|
||||
BadRequestException,
|
||||
Controller,
|
||||
Get,
|
||||
Inject,
|
||||
Logger,
|
||||
Next,
|
||||
NotFoundException,
|
||||
Query,
|
||||
Req,
|
||||
Res,
|
||||
UseGuards,
|
||||
} from '@nestjs/common';
|
||||
import { hash, verify } from '@node-rs/argon2';
|
||||
import type { User } from '@prisma/client';
|
||||
import type { NextFunction, Request, Response } from 'express';
|
||||
import { pick } from 'lodash-es';
|
||||
import { nanoid } from 'nanoid';
|
||||
import type { AuthAction, CookieOption, NextAuthOptions } from 'next-auth';
|
||||
import { AuthHandler } from 'next-auth/core';
|
||||
|
||||
import {
|
||||
AuthThrottlerGuard,
|
||||
Config,
|
||||
metrics,
|
||||
PrismaService,
|
||||
SessionService,
|
||||
Throttle,
|
||||
} from '../../fundamentals';
|
||||
import { NextAuthOptionsProvide } from './next-auth-options';
|
||||
import { AuthService } from './service';
|
||||
|
||||
const BASE_URL = '/api/auth/';
|
||||
|
||||
const DEFAULT_SESSION_EXPIRE_DATE = 2592000 * 1000; // 30 days
|
||||
|
||||
@Controller(BASE_URL)
|
||||
export class NextAuthController {
|
||||
private readonly callbackSession;
|
||||
|
||||
private readonly logger = new Logger('NextAuthController');
|
||||
|
||||
constructor(
|
||||
readonly config: Config,
|
||||
readonly prisma: PrismaService,
|
||||
private readonly authService: AuthService,
|
||||
@Inject(NextAuthOptionsProvide)
|
||||
private readonly nextAuthOptions: NextAuthOptions,
|
||||
private readonly session: SessionService
|
||||
) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
this.callbackSession = nextAuthOptions.callbacks!.session;
|
||||
}
|
||||
|
||||
@UseGuards(AuthThrottlerGuard)
|
||||
@Throttle({
|
||||
default: {
|
||||
limit: 60,
|
||||
ttl: 60,
|
||||
},
|
||||
})
|
||||
@Get('/challenge')
|
||||
async getChallenge(@Res() res: Response) {
|
||||
const challenge = nanoid();
|
||||
const resource = nanoid();
|
||||
await this.session.set(challenge, resource, 5 * 60 * 1000);
|
||||
res.json({ challenge, resource });
|
||||
}
|
||||
|
||||
@UseGuards(AuthThrottlerGuard)
|
||||
@Throttle({
|
||||
default: {
|
||||
limit: 60,
|
||||
ttl: 60,
|
||||
},
|
||||
})
|
||||
@All('*')
|
||||
async auth(
|
||||
@Req() req: Request,
|
||||
@Res() res: Response,
|
||||
@Query() query: Record<string, any>,
|
||||
@Next() next: NextFunction
|
||||
) {
|
||||
if (req.path === '/api/auth/signin' && req.method === 'GET') {
|
||||
const query = req.query
|
||||
? // @ts-expect-error req.query is satisfy with the Record<string, any>
|
||||
`?${new URLSearchParams(req.query).toString()}`
|
||||
: '';
|
||||
res.redirect(`/signin${query}`);
|
||||
return;
|
||||
}
|
||||
const [action, providerId] = req.url // start with request url
|
||||
.slice(BASE_URL.length) // make relative to baseUrl
|
||||
.replace(/\?.*/, '') // remove query part, use only path part
|
||||
.split('/') as [AuthAction, string]; // as array of strings;
|
||||
|
||||
metrics.auth.counter('call_counter').add(1, { action, providerId });
|
||||
|
||||
const credentialsSignIn =
|
||||
req.method === 'POST' && providerId === 'credentials';
|
||||
let userId: string | undefined;
|
||||
if (credentialsSignIn) {
|
||||
const { email } = req.body;
|
||||
if (email) {
|
||||
const user = await this.prisma.user.findFirst({
|
||||
where: {
|
||||
email,
|
||||
},
|
||||
});
|
||||
if (!user) {
|
||||
req.statusCode = 401;
|
||||
req.statusMessage = 'User not found';
|
||||
req.body = null;
|
||||
throw new NotFoundException(`User not found`);
|
||||
} else {
|
||||
userId = user.id;
|
||||
req.body = {
|
||||
...req.body,
|
||||
name: user.name,
|
||||
email: user.email,
|
||||
image: user.avatarUrl,
|
||||
hashedPassword: user.password,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
const options = this.nextAuthOptions;
|
||||
if (req.method === 'POST' && action === 'session') {
|
||||
if (typeof req.body !== 'object' || typeof req.body.data !== 'object') {
|
||||
metrics.auth
|
||||
.counter('call_fails_counter')
|
||||
.add(1, { reason: 'invalid_session_data' });
|
||||
throw new BadRequestException(`Invalid new session data`);
|
||||
}
|
||||
const user = await this.updateSession(req, req.body.data);
|
||||
// callbacks.session existed
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
options.callbacks!.session = ({ session }) => {
|
||||
return {
|
||||
user: {
|
||||
...pick(user, 'id', 'name', 'email'),
|
||||
image: user.avatarUrl,
|
||||
hasPassword: !!user.password,
|
||||
},
|
||||
expires: session.expires,
|
||||
};
|
||||
};
|
||||
} else {
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
options.callbacks!.session = this.callbackSession;
|
||||
}
|
||||
|
||||
if (
|
||||
this.config.auth.captcha.enable &&
|
||||
req.method === 'POST' &&
|
||||
action === 'signin' &&
|
||||
// TODO: add credentials support in frontend
|
||||
['email'].includes(providerId)
|
||||
) {
|
||||
const isVerified = await this.verifyChallenge(req, res);
|
||||
if (!isVerified) return;
|
||||
}
|
||||
|
||||
const { status, headers, body, redirect, cookies } = await AuthHandler({
|
||||
req: {
|
||||
body: req.body,
|
||||
query: query,
|
||||
method: req.method,
|
||||
action,
|
||||
providerId,
|
||||
error: query.error ?? providerId,
|
||||
cookies: req.cookies,
|
||||
},
|
||||
options,
|
||||
});
|
||||
|
||||
if (headers) {
|
||||
for (const { key, value } of headers) {
|
||||
res.setHeader(key, value);
|
||||
}
|
||||
}
|
||||
if (cookies) {
|
||||
for (const cookie of cookies) {
|
||||
res.cookie(cookie.name, cookie.value, cookie.options);
|
||||
}
|
||||
}
|
||||
|
||||
let nextAuthTokenCookie: (CookieOption & { value: string }) | undefined;
|
||||
const secureCookiePrefix = '__Secure-';
|
||||
const sessionCookieName = `next-auth.session-token`;
|
||||
// next-auth credentials login only support JWT strategy
|
||||
// https://next-auth.js.org/configuration/providers/credentials
|
||||
// let's store the session token in the database
|
||||
if (
|
||||
credentialsSignIn &&
|
||||
(nextAuthTokenCookie = cookies?.find(
|
||||
({ name }) =>
|
||||
name === sessionCookieName ||
|
||||
name === `${secureCookiePrefix}${sessionCookieName}`
|
||||
))
|
||||
) {
|
||||
const cookieExpires = new Date();
|
||||
cookieExpires.setTime(
|
||||
cookieExpires.getTime() + DEFAULT_SESSION_EXPIRE_DATE
|
||||
);
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
await this.nextAuthOptions.adapter!.createSession!({
|
||||
sessionToken: nextAuthTokenCookie.value,
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
userId: userId!,
|
||||
expires: cookieExpires,
|
||||
});
|
||||
}
|
||||
|
||||
if (redirect?.endsWith('api/auth/error?error=AccessDenied')) {
|
||||
this.logger.log(`Early access redirect headers: ${req.headers}`);
|
||||
metrics.auth
|
||||
.counter('call_fails_counter')
|
||||
.add(1, { reason: 'no_early_access_permission' });
|
||||
|
||||
if (
|
||||
!req.headers?.referer ||
|
||||
checkUrlOrigin(req.headers.referer, 'https://accounts.google.com')
|
||||
) {
|
||||
res.redirect('https://community.affine.pro/c/insider-general/');
|
||||
} else {
|
||||
res.status(403);
|
||||
res.json({
|
||||
url: 'https://community.affine.pro/c/insider-general/',
|
||||
error: `You don't have early access permission`,
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (status) {
|
||||
res.status(status);
|
||||
}
|
||||
|
||||
if (redirect) {
|
||||
if (providerId === 'credentials') {
|
||||
res.send(JSON.stringify({ ok: true, url: redirect }));
|
||||
} else if (
|
||||
action === 'callback' ||
|
||||
action === 'error' ||
|
||||
(providerId !== 'credentials' &&
|
||||
// login in the next-auth page, /api/auth/signin, auto redirect.
|
||||
// otherwise, return the json value to allow frontend to handle the redirect.
|
||||
req.headers?.referer?.includes?.('/api/auth/signin'))
|
||||
) {
|
||||
res.redirect(redirect);
|
||||
} else {
|
||||
res.json({ url: redirect });
|
||||
}
|
||||
} else if (typeof body === 'string') {
|
||||
res.send(body);
|
||||
} else if (body && typeof body === 'object') {
|
||||
res.json(body);
|
||||
} else {
|
||||
next();
|
||||
}
|
||||
}
|
||||
|
||||
private async updateSession(
|
||||
req: Request,
|
||||
newSession: Partial<Omit<User, 'id'>> & { oldPassword?: string }
|
||||
): Promise<User> {
|
||||
const { name, email, password, oldPassword } = newSession;
|
||||
if (!name && !email && !password) {
|
||||
throw new BadRequestException(`Invalid new session data`);
|
||||
}
|
||||
if (password) {
|
||||
const user = await this.verifyUserFromRequest(req);
|
||||
const { password: userPassword } = user;
|
||||
if (!oldPassword) {
|
||||
if (userPassword) {
|
||||
throw new BadRequestException(
|
||||
`Old password is required to update password`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
if (!userPassword) {
|
||||
throw new BadRequestException(`No existed password`);
|
||||
}
|
||||
if (await verify(userPassword, oldPassword)) {
|
||||
await this.prisma.user.update({
|
||||
where: {
|
||||
id: user.id,
|
||||
},
|
||||
data: {
|
||||
...pick(newSession, 'email', 'name'),
|
||||
password: await hash(password),
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
return user;
|
||||
} else {
|
||||
const user = await this.verifyUserFromRequest(req);
|
||||
return this.prisma.user.update({
|
||||
where: {
|
||||
id: user.id,
|
||||
},
|
||||
data: pick(newSession, 'name', 'email'),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private async verifyChallenge(req: Request, res: Response): Promise<boolean> {
|
||||
const challenge = req.query?.challenge;
|
||||
if (typeof challenge === 'string' && challenge) {
|
||||
const resource = await this.session.get(challenge);
|
||||
|
||||
if (!resource) {
|
||||
this.rejectResponse(res, 'Invalid Challenge');
|
||||
return false;
|
||||
}
|
||||
|
||||
const isChallengeVerified =
|
||||
await this.authService.verifyChallengeResponse(
|
||||
req.query?.token,
|
||||
resource
|
||||
);
|
||||
|
||||
this.logger.debug(
|
||||
`Challenge: ${challenge}, Resource: ${resource}, Response: ${req.query?.token}, isChallengeVerified: ${isChallengeVerified}`
|
||||
);
|
||||
|
||||
if (!isChallengeVerified) {
|
||||
this.rejectResponse(res, 'Invalid Challenge Response');
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
const isTokenVerified = await this.authService.verifyCaptchaToken(
|
||||
req.query?.token,
|
||||
req.headers['CF-Connecting-IP'] as string
|
||||
);
|
||||
|
||||
if (!isTokenVerified) {
|
||||
this.rejectResponse(res, 'Invalid Captcha Response');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private async verifyUserFromRequest(req: Request): Promise<User> {
|
||||
const token = req.headers.authorization;
|
||||
if (!token) {
|
||||
const session = await AuthHandler({
|
||||
req: {
|
||||
cookies: req.cookies,
|
||||
action: 'session',
|
||||
method: 'GET',
|
||||
headers: req.headers,
|
||||
},
|
||||
options: this.nextAuthOptions,
|
||||
});
|
||||
|
||||
const { body } = session;
|
||||
// @ts-expect-error check if body.user exists
|
||||
if (body && body.user && body.user.id) {
|
||||
const user = await this.prisma.user.findUnique({
|
||||
where: {
|
||||
// @ts-expect-error body.user.id exists
|
||||
id: body.user.id,
|
||||
},
|
||||
});
|
||||
if (user) {
|
||||
return user;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const [type, jwt] = token.split(' ') ?? [];
|
||||
|
||||
if (type === 'Bearer') {
|
||||
const claims = await this.authService.verify(jwt);
|
||||
const user = await this.prisma.user.findUnique({
|
||||
where: { id: claims.id },
|
||||
});
|
||||
if (user) {
|
||||
return user;
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new BadRequestException(`User not found`);
|
||||
}
|
||||
|
||||
rejectResponse(res: Response, error: string, status = 400) {
|
||||
res.status(status);
|
||||
res.json({
|
||||
url: `${this.config.baseUrl}/api/auth/error?${new URLSearchParams({
|
||||
error,
|
||||
}).toString()}`,
|
||||
error,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const checkUrlOrigin = (url: string, origin: string) => {
|
||||
try {
|
||||
return new URL(url).origin === origin;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
@@ -1,26 +1,33 @@
|
||||
import { BadRequestException, ForbiddenException } from '@nestjs/common';
|
||||
import {
|
||||
BadRequestException,
|
||||
ForbiddenException,
|
||||
UseGuards,
|
||||
} from '@nestjs/common';
|
||||
import {
|
||||
Args,
|
||||
Context,
|
||||
Field,
|
||||
Mutation,
|
||||
ObjectType,
|
||||
Parent,
|
||||
Query,
|
||||
ResolveField,
|
||||
Resolver,
|
||||
} from '@nestjs/graphql';
|
||||
import type { Request } from 'express';
|
||||
import { nanoid } from 'nanoid';
|
||||
|
||||
import { Config, SkipThrottle, Throttle } from '../../fundamentals';
|
||||
import { UserService } from '../user';
|
||||
import { UserType } from '../user/types';
|
||||
import { validators } from '../utils/validators';
|
||||
import { CurrentUser } from './current-user';
|
||||
import { Public } from './guard';
|
||||
import {
|
||||
CloudThrottlerGuard,
|
||||
Config,
|
||||
SessionService,
|
||||
Throttle,
|
||||
} from '../../fundamentals';
|
||||
import { UserType } from '../users';
|
||||
import { Auth, CurrentUser } from './guard';
|
||||
import { AuthService } from './service';
|
||||
import { TokenService, TokenType } from './token';
|
||||
|
||||
@ObjectType('tokenType')
|
||||
export class ClientTokenType {
|
||||
@ObjectType()
|
||||
export class TokenType {
|
||||
@Field()
|
||||
token!: string;
|
||||
|
||||
@@ -31,150 +38,200 @@ export class ClientTokenType {
|
||||
sessionToken?: string;
|
||||
}
|
||||
|
||||
@Throttle('strict')
|
||||
/**
|
||||
* Auth resolver
|
||||
* Token rate limit: 20 req/m
|
||||
* Sign up/in rate limit: 10 req/m
|
||||
* Other rate limit: 5 req/m
|
||||
*/
|
||||
@UseGuards(CloudThrottlerGuard)
|
||||
@Resolver(() => UserType)
|
||||
export class AuthResolver {
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly auth: AuthService,
|
||||
private readonly user: UserService,
|
||||
private readonly token: TokenService
|
||||
private readonly session: SessionService
|
||||
) {}
|
||||
|
||||
@SkipThrottle()
|
||||
@Public()
|
||||
@Query(() => UserType, {
|
||||
name: 'currentUser',
|
||||
description: 'Get current user',
|
||||
nullable: true,
|
||||
@Throttle({
|
||||
default: {
|
||||
limit: 20,
|
||||
ttl: 60,
|
||||
},
|
||||
})
|
||||
currentUser(@CurrentUser() user?: CurrentUser): UserType | undefined {
|
||||
return user;
|
||||
}
|
||||
|
||||
@ResolveField(() => ClientTokenType, {
|
||||
name: 'token',
|
||||
deprecationReason: 'use [/api/auth/authorize]',
|
||||
})
|
||||
async clientToken(
|
||||
@CurrentUser() currentUser: CurrentUser,
|
||||
@ResolveField(() => TokenType)
|
||||
async token(
|
||||
@Context() ctx: { req: Request },
|
||||
@CurrentUser() currentUser: UserType,
|
||||
@Parent() user: UserType
|
||||
): Promise<ClientTokenType> {
|
||||
) {
|
||||
if (user.id !== currentUser.id) {
|
||||
throw new ForbiddenException('Invalid user');
|
||||
throw new BadRequestException('Invalid user');
|
||||
}
|
||||
|
||||
const session = await this.auth.createUserSession(
|
||||
user,
|
||||
undefined,
|
||||
this.config.auth.accessToken.ttl
|
||||
);
|
||||
let sessionToken: string | undefined;
|
||||
|
||||
// only return session if the request is from the same origin & path == /open-app
|
||||
if (
|
||||
ctx.req.headers.referer &&
|
||||
ctx.req.headers.host &&
|
||||
new URL(ctx.req.headers.referer).pathname.startsWith('/open-app') &&
|
||||
ctx.req.headers.host === new URL(this.config.origin).host
|
||||
) {
|
||||
const cookiePrefix = this.config.node.prod ? '__Secure-' : '';
|
||||
const sessionCookieName = `${cookiePrefix}next-auth.session-token`;
|
||||
sessionToken = ctx.req.cookies?.[sessionCookieName];
|
||||
}
|
||||
|
||||
return {
|
||||
sessionToken: session.sessionId,
|
||||
token: session.sessionId,
|
||||
refresh: '',
|
||||
sessionToken,
|
||||
token: this.auth.sign(user),
|
||||
refresh: this.auth.refresh(user),
|
||||
};
|
||||
}
|
||||
|
||||
@Throttle({
|
||||
default: {
|
||||
limit: 10,
|
||||
ttl: 60,
|
||||
},
|
||||
})
|
||||
@Mutation(() => UserType)
|
||||
async signUp(
|
||||
@Context() ctx: { req: Request },
|
||||
@Args('name') name: string,
|
||||
@Args('email') email: string,
|
||||
@Args('password') password: string
|
||||
) {
|
||||
const user = await this.auth.signUp(name, email, password);
|
||||
ctx.req.user = user;
|
||||
return user;
|
||||
}
|
||||
|
||||
@Throttle({
|
||||
default: {
|
||||
limit: 10,
|
||||
ttl: 60,
|
||||
},
|
||||
})
|
||||
@Mutation(() => UserType)
|
||||
async signIn(
|
||||
@Context() ctx: { req: Request },
|
||||
@Args('email') email: string,
|
||||
@Args('password') password: string
|
||||
) {
|
||||
const user = await this.auth.signIn(email, password);
|
||||
ctx.req.user = user;
|
||||
return user;
|
||||
}
|
||||
|
||||
@Throttle({
|
||||
default: {
|
||||
limit: 5,
|
||||
ttl: 60,
|
||||
},
|
||||
})
|
||||
@Mutation(() => UserType)
|
||||
@Auth()
|
||||
async changePassword(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@CurrentUser() user: UserType,
|
||||
@Args('token') token: string,
|
||||
@Args('newPassword') newPassword: string
|
||||
) {
|
||||
validators.assertValidPassword(newPassword);
|
||||
// NOTE: Set & Change password are using the same token type.
|
||||
const valid = await this.token.verifyToken(
|
||||
TokenType.ChangePassword,
|
||||
token,
|
||||
{
|
||||
credential: user.id,
|
||||
}
|
||||
);
|
||||
|
||||
if (!valid) {
|
||||
const id = await this.session.get(token);
|
||||
if (!user.emailVerified) {
|
||||
throw new ForbiddenException('Please verify the email first');
|
||||
}
|
||||
if (
|
||||
!id ||
|
||||
(id !== user.id &&
|
||||
// change password after sign in with email link
|
||||
// we only create user account after user sign in with email link
|
||||
id !== user.email)
|
||||
) {
|
||||
throw new ForbiddenException('Invalid token');
|
||||
}
|
||||
|
||||
await this.auth.changePassword(user.id, newPassword);
|
||||
await this.auth.changePassword(user.email, newPassword);
|
||||
await this.session.delete(token);
|
||||
|
||||
return user;
|
||||
}
|
||||
|
||||
@Throttle({
|
||||
default: {
|
||||
limit: 5,
|
||||
ttl: 60,
|
||||
},
|
||||
})
|
||||
@Mutation(() => UserType)
|
||||
@Auth()
|
||||
async changeEmail(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args('token') token: string,
|
||||
@Args('email') email: string
|
||||
@CurrentUser() user: UserType,
|
||||
@Args('token') token: string
|
||||
) {
|
||||
validators.assertValidEmail(email);
|
||||
// @see [sendChangeEmail]
|
||||
const valid = await this.token.verifyToken(TokenType.VerifyEmail, token, {
|
||||
credential: user.id,
|
||||
});
|
||||
|
||||
if (!valid) {
|
||||
const key = await this.session.get(token);
|
||||
if (!key) {
|
||||
throw new ForbiddenException('Invalid token');
|
||||
}
|
||||
|
||||
email = decodeURIComponent(email);
|
||||
// email has set token in `sendVerifyChangeEmail`
|
||||
const [id, email] = key.split(',');
|
||||
if (!id || id !== user.id || !email) {
|
||||
throw new ForbiddenException('Invalid token');
|
||||
}
|
||||
|
||||
await this.auth.changeEmail(id, email);
|
||||
await this.session.delete(token);
|
||||
|
||||
await this.auth.changeEmail(user.id, email);
|
||||
await this.auth.sendNotificationChangeEmail(email);
|
||||
|
||||
return user;
|
||||
}
|
||||
|
||||
@Throttle({
|
||||
default: {
|
||||
limit: 5,
|
||||
ttl: 60,
|
||||
},
|
||||
})
|
||||
@Mutation(() => Boolean)
|
||||
@Auth()
|
||||
async sendChangePasswordEmail(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args('callbackUrl') callbackUrl: string,
|
||||
// @deprecated
|
||||
@Args('email', { nullable: true }) _email?: string
|
||||
@CurrentUser() user: UserType,
|
||||
@Args('email') email: string,
|
||||
@Args('callbackUrl') callbackUrl: string
|
||||
) {
|
||||
if (!user.emailVerified) {
|
||||
throw new ForbiddenException('Please verify your email first.');
|
||||
}
|
||||
|
||||
const token = await this.token.createToken(
|
||||
TokenType.ChangePassword,
|
||||
user.id
|
||||
);
|
||||
const token = nanoid();
|
||||
await this.session.set(token, user.id);
|
||||
|
||||
const url = new URL(callbackUrl, this.config.baseUrl);
|
||||
url.searchParams.set('token', token);
|
||||
|
||||
const res = await this.auth.sendChangePasswordEmail(
|
||||
user.email,
|
||||
url.toString()
|
||||
);
|
||||
|
||||
const res = await this.auth.sendChangePasswordEmail(email, url.toString());
|
||||
return !res.rejected.length;
|
||||
}
|
||||
|
||||
@Throttle({
|
||||
default: {
|
||||
limit: 5,
|
||||
ttl: 60,
|
||||
},
|
||||
})
|
||||
@Mutation(() => Boolean)
|
||||
@Auth()
|
||||
async sendSetPasswordEmail(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args('callbackUrl') callbackUrl: string,
|
||||
@Args('email', { nullable: true }) _email?: string
|
||||
@CurrentUser() user: UserType,
|
||||
@Args('email') email: string,
|
||||
@Args('callbackUrl') callbackUrl: string
|
||||
) {
|
||||
if (!user.emailVerified) {
|
||||
throw new ForbiddenException('Please verify your email first.');
|
||||
}
|
||||
|
||||
const token = await this.token.createToken(
|
||||
TokenType.ChangePassword,
|
||||
user.id
|
||||
);
|
||||
const token = nanoid();
|
||||
await this.session.set(token, user.id);
|
||||
|
||||
const url = new URL(callbackUrl, this.config.baseUrl);
|
||||
url.searchParams.set('token', token);
|
||||
|
||||
const res = await this.auth.sendSetPasswordEmail(
|
||||
user.email,
|
||||
url.toString()
|
||||
);
|
||||
const res = await this.auth.sendSetPasswordEmail(email, url.toString());
|
||||
return !res.rejected.length;
|
||||
}
|
||||
|
||||
@@ -185,101 +242,64 @@ export class AuthResolver {
|
||||
// 4. user open confirm email page from new email
|
||||
// 5. user click confirm button
|
||||
// 6. send notification email
|
||||
@Throttle({
|
||||
default: {
|
||||
limit: 5,
|
||||
ttl: 60,
|
||||
},
|
||||
})
|
||||
@Mutation(() => Boolean)
|
||||
@Auth()
|
||||
async sendChangeEmail(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args('callbackUrl') callbackUrl: string,
|
||||
// @deprecated
|
||||
@Args('email', { nullable: true }) _email?: string
|
||||
@CurrentUser() user: UserType,
|
||||
@Args('email') email: string,
|
||||
@Args('callbackUrl') callbackUrl: string
|
||||
) {
|
||||
if (!user.emailVerified) {
|
||||
throw new ForbiddenException('Please verify your email first.');
|
||||
}
|
||||
|
||||
const token = await this.token.createToken(TokenType.ChangeEmail, user.id);
|
||||
const token = nanoid();
|
||||
await this.session.set(token, user.id);
|
||||
|
||||
const url = new URL(callbackUrl, this.config.baseUrl);
|
||||
url.searchParams.set('token', token);
|
||||
|
||||
const res = await this.auth.sendChangeEmail(user.email, url.toString());
|
||||
const res = await this.auth.sendChangeEmail(email, url.toString());
|
||||
return !res.rejected.length;
|
||||
}
|
||||
|
||||
@Throttle({
|
||||
default: {
|
||||
limit: 5,
|
||||
ttl: 60,
|
||||
},
|
||||
})
|
||||
@Mutation(() => Boolean)
|
||||
@Auth()
|
||||
async sendVerifyChangeEmail(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@CurrentUser() user: UserType,
|
||||
@Args('token') token: string,
|
||||
@Args('email') email: string,
|
||||
@Args('callbackUrl') callbackUrl: string
|
||||
) {
|
||||
validators.assertValidEmail(email);
|
||||
const valid = await this.token.verifyToken(TokenType.ChangeEmail, token, {
|
||||
credential: user.id,
|
||||
});
|
||||
|
||||
if (!valid) {
|
||||
const id = await this.session.get(token);
|
||||
if (!id || id !== user.id) {
|
||||
throw new ForbiddenException('Invalid token');
|
||||
}
|
||||
|
||||
const hasRegistered = await this.user.findUserByEmail(email);
|
||||
const hasRegistered = await this.auth.getUserByEmail(email);
|
||||
|
||||
if (hasRegistered) {
|
||||
if (hasRegistered.id !== user.id) {
|
||||
throw new BadRequestException(`The email provided has been taken.`);
|
||||
} else {
|
||||
throw new BadRequestException(
|
||||
`The email provided is the same as the current email.`
|
||||
);
|
||||
}
|
||||
throw new BadRequestException(`Invalid user email`);
|
||||
}
|
||||
|
||||
const verifyEmailToken = await this.token.createToken(
|
||||
TokenType.VerifyEmail,
|
||||
user.id
|
||||
);
|
||||
const withEmailToken = nanoid();
|
||||
await this.session.set(withEmailToken, `${user.id},${email}`);
|
||||
|
||||
const url = new URL(callbackUrl, this.config.baseUrl);
|
||||
url.searchParams.set('token', verifyEmailToken);
|
||||
url.searchParams.set('email', email);
|
||||
url.searchParams.set('token', withEmailToken);
|
||||
|
||||
const res = await this.auth.sendVerifyChangeEmail(email, url.toString());
|
||||
|
||||
await this.session.delete(token);
|
||||
|
||||
return !res.rejected.length;
|
||||
}
|
||||
|
||||
@Mutation(() => Boolean)
|
||||
async sendVerifyEmail(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args('callbackUrl') callbackUrl: string
|
||||
) {
|
||||
const token = await this.token.createToken(TokenType.VerifyEmail, user.id);
|
||||
|
||||
const url = new URL(callbackUrl, this.config.baseUrl);
|
||||
url.searchParams.set('token', token);
|
||||
|
||||
const res = await this.auth.sendVerifyEmail(user.email, url.toString());
|
||||
return !res.rejected.length;
|
||||
}
|
||||
|
||||
@Mutation(() => Boolean)
|
||||
async verifyEmail(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args('token') token: string
|
||||
) {
|
||||
if (!token) {
|
||||
throw new BadRequestException('Invalid token');
|
||||
}
|
||||
|
||||
const valid = await this.token.verifyToken(TokenType.VerifyEmail, token, {
|
||||
credential: user.id,
|
||||
});
|
||||
|
||||
if (!valid) {
|
||||
throw new ForbiddenException('Invalid token');
|
||||
}
|
||||
|
||||
const { emailVerifiedAt } = await this.auth.setEmailVerified(user.id);
|
||||
|
||||
return emailVerifiedAt !== null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,382 +1,282 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
|
||||
import {
|
||||
BadRequestException,
|
||||
Injectable,
|
||||
NotAcceptableException,
|
||||
OnApplicationBootstrap,
|
||||
InternalServerErrorException,
|
||||
UnauthorizedException,
|
||||
} from '@nestjs/common';
|
||||
import { hash, verify } from '@node-rs/argon2';
|
||||
import { Algorithm, sign, verify as jwtVerify } from '@node-rs/jsonwebtoken';
|
||||
import type { User } from '@prisma/client';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import type { CookieOptions, Request, Response } from 'express';
|
||||
import { assign, omit } from 'lodash-es';
|
||||
import { nanoid } from 'nanoid';
|
||||
|
||||
import { Config, CryptoHelper, MailService } from '../../fundamentals';
|
||||
import { FeatureManagementService } from '../features/management';
|
||||
import { QuotaService } from '../quota/service';
|
||||
import { QuotaType } from '../quota/types';
|
||||
import { UserService } from '../user/service';
|
||||
import type { CurrentUser } from './current-user';
|
||||
import {
|
||||
Config,
|
||||
MailService,
|
||||
PrismaService,
|
||||
verifyChallengeResponse,
|
||||
} from '../../fundamentals';
|
||||
import { Quota_FreePlanV1_1 } from '../quota';
|
||||
|
||||
export function parseAuthUserSeqNum(value: any) {
|
||||
let seq: number = 0;
|
||||
switch (typeof value) {
|
||||
case 'number': {
|
||||
seq = value;
|
||||
break;
|
||||
}
|
||||
case 'string': {
|
||||
const result = value.match(/^([\d{0, 10}])$/);
|
||||
if (result?.[1]) {
|
||||
seq = Number(result[1]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
export type UserClaim = Pick<
|
||||
User,
|
||||
'id' | 'name' | 'email' | 'emailVerified' | 'createdAt' | 'avatarUrl'
|
||||
> & {
|
||||
hasPassword?: boolean;
|
||||
};
|
||||
|
||||
default: {
|
||||
seq = 0;
|
||||
}
|
||||
}
|
||||
|
||||
return Math.max(0, seq);
|
||||
}
|
||||
|
||||
export function sessionUser(
|
||||
user: Pick<
|
||||
User,
|
||||
'id' | 'email' | 'avatarUrl' | 'name' | 'emailVerifiedAt'
|
||||
> & { password?: string | null }
|
||||
): CurrentUser {
|
||||
return assign(
|
||||
omit(user, 'password', 'registered', 'emailVerifiedAt', 'createdAt'),
|
||||
{
|
||||
hasPassword: user.password !== null,
|
||||
emailVerified: user.emailVerifiedAt !== null,
|
||||
}
|
||||
);
|
||||
}
|
||||
export const getUtcTimestamp = () => Math.floor(Date.now() / 1000);
|
||||
|
||||
@Injectable()
|
||||
export class AuthService implements OnApplicationBootstrap {
|
||||
readonly cookieOptions: CookieOptions = {
|
||||
sameSite: 'lax',
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
secure: this.config.https,
|
||||
};
|
||||
static readonly sessionCookieName = 'affine_session';
|
||||
static readonly authUserSeqHeaderName = 'x-auth-user';
|
||||
|
||||
export class AuthService {
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly db: PrismaClient,
|
||||
private readonly mailer: MailService,
|
||||
private readonly feature: FeatureManagementService,
|
||||
private readonly quota: QuotaService,
|
||||
private readonly user: UserService,
|
||||
private readonly crypto: CryptoHelper
|
||||
private readonly prisma: PrismaService,
|
||||
private readonly mailer: MailService
|
||||
) {}
|
||||
|
||||
async onApplicationBootstrap() {
|
||||
if (this.config.node.dev) {
|
||||
try {
|
||||
const [email, name, pwd] = ['dev@affine.pro', 'Dev User', 'dev'];
|
||||
let devUser = await this.user.findUserByEmail(email);
|
||||
if (!devUser) {
|
||||
devUser = await this.user.createUser({
|
||||
email,
|
||||
name,
|
||||
password: await this.crypto.encryptPassword(pwd),
|
||||
});
|
||||
}
|
||||
await this.quota.switchUserQuota(devUser.id, QuotaType.ProPlanV1);
|
||||
await this.feature.addCopilot(devUser.id);
|
||||
} catch (e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
canSignIn(email: string) {
|
||||
return this.feature.canEarlyAccess(email);
|
||||
}
|
||||
|
||||
async signUp(
|
||||
name: string,
|
||||
email: string,
|
||||
password: string
|
||||
): Promise<CurrentUser> {
|
||||
const user = await this.user.findUserByEmail(email);
|
||||
|
||||
if (user) {
|
||||
throw new BadRequestException('Email was taken');
|
||||
}
|
||||
|
||||
const hashedPassword = await this.crypto.encryptPassword(password);
|
||||
|
||||
return this.user
|
||||
.createUser({
|
||||
name,
|
||||
email,
|
||||
password: hashedPassword,
|
||||
})
|
||||
.then(sessionUser);
|
||||
}
|
||||
|
||||
async signIn(email: string, password: string) {
|
||||
const user = await this.user.findUserWithHashedPasswordByEmail(email);
|
||||
|
||||
if (!user) {
|
||||
throw new NotAcceptableException('Invalid sign in credentials');
|
||||
}
|
||||
|
||||
if (!user.password) {
|
||||
throw new NotAcceptableException(
|
||||
'User Password is not set. Should login through email link.'
|
||||
);
|
||||
}
|
||||
|
||||
const passwordMatches = await this.crypto.verifyPassword(
|
||||
password,
|
||||
user.password
|
||||
);
|
||||
|
||||
if (!passwordMatches) {
|
||||
throw new NotAcceptableException('Invalid sign in credentials');
|
||||
}
|
||||
|
||||
return sessionUser(user);
|
||||
}
|
||||
|
||||
async getUser(
|
||||
token: string,
|
||||
seq = 0
|
||||
): Promise<{ user: CurrentUser | null; expiresAt: Date | null }> {
|
||||
const session = await this.getSession(token);
|
||||
|
||||
// no such session
|
||||
if (!session) {
|
||||
return { user: null, expiresAt: null };
|
||||
}
|
||||
|
||||
const userSession = session.userSessions.at(seq);
|
||||
|
||||
// no such user session
|
||||
if (!userSession) {
|
||||
return { user: null, expiresAt: null };
|
||||
}
|
||||
|
||||
// user session expired
|
||||
if (userSession.expiresAt && userSession.expiresAt <= new Date()) {
|
||||
return { user: null, expiresAt: null };
|
||||
}
|
||||
|
||||
const user = await this.db.user.findUnique({
|
||||
where: { id: userSession.userId },
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
return { user: null, expiresAt: null };
|
||||
}
|
||||
|
||||
return { user: sessionUser(user), expiresAt: userSession.expiresAt };
|
||||
}
|
||||
|
||||
async getUserList(token: string) {
|
||||
const session = await this.getSession(token);
|
||||
|
||||
if (!session || !session.userSessions.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const users = await this.db.user.findMany({
|
||||
where: {
|
||||
id: {
|
||||
in: session.userSessions.map(({ userId }) => userId),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// TODO(@forehalo): need to separate expired session, same for [getUser]
|
||||
// Session
|
||||
// | { user: LimitedUser { email, avatarUrl }, expired: true }
|
||||
// | { user: User, expired: false }
|
||||
return session.userSessions
|
||||
.map(userSession => {
|
||||
// keep users in the same order as userSessions
|
||||
const user = users.find(({ id }) => id === userSession.userId);
|
||||
if (!user) {
|
||||
return null;
|
||||
}
|
||||
return sessionUser(user);
|
||||
})
|
||||
.filter(Boolean) as CurrentUser[];
|
||||
}
|
||||
|
||||
async signOut(token: string, seq = 0) {
|
||||
const session = await this.getSession(token);
|
||||
|
||||
if (session) {
|
||||
// overflow the logged in user
|
||||
if (session.userSessions.length <= seq) {
|
||||
return session;
|
||||
}
|
||||
|
||||
await this.db.userSession.deleteMany({
|
||||
where: { id: session.userSessions[seq].id },
|
||||
});
|
||||
|
||||
// no more user session active, delete the whole session
|
||||
if (session.userSessions.length === 1) {
|
||||
await this.db.session.delete({ where: { id: session.id } });
|
||||
return null;
|
||||
}
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async getSession(token: string) {
|
||||
if (!token) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this.db.$transaction(async tx => {
|
||||
const session = await tx.session.findUnique({
|
||||
where: {
|
||||
id: token,
|
||||
},
|
||||
include: {
|
||||
userSessions: {
|
||||
orderBy: {
|
||||
createdAt: 'asc',
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!session) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (session.expiresAt && session.expiresAt <= new Date()) {
|
||||
await tx.session.delete({
|
||||
where: {
|
||||
id: session.id,
|
||||
},
|
||||
});
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
return session;
|
||||
});
|
||||
}
|
||||
|
||||
async refreshUserSessionIfNeeded(
|
||||
_req: Request,
|
||||
res: Response,
|
||||
sessionId: string,
|
||||
userId: string,
|
||||
expiresAt: Date,
|
||||
ttr = this.config.auth.session.ttr
|
||||
): Promise<boolean> {
|
||||
if (expiresAt && expiresAt.getTime() - Date.now() > ttr * 1000) {
|
||||
// no need to refresh
|
||||
return false;
|
||||
}
|
||||
|
||||
const newExpiresAt = new Date(
|
||||
Date.now() + this.config.auth.session.ttl * 1000
|
||||
);
|
||||
|
||||
await this.db.userSession.update({
|
||||
where: {
|
||||
sessionId_userId: {
|
||||
sessionId,
|
||||
userId,
|
||||
},
|
||||
},
|
||||
data: {
|
||||
expiresAt: newExpiresAt,
|
||||
},
|
||||
});
|
||||
|
||||
res.cookie(AuthService.sessionCookieName, sessionId, {
|
||||
expires: newExpiresAt,
|
||||
...this.cookieOptions,
|
||||
});
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async createUserSession(
|
||||
user: { id: string },
|
||||
existingSession?: string,
|
||||
ttl = this.config.auth.session.ttl
|
||||
) {
|
||||
const session = existingSession
|
||||
? await this.getSession(existingSession)
|
||||
: null;
|
||||
|
||||
const expiresAt = new Date(Date.now() + ttl * 1000);
|
||||
if (session) {
|
||||
return this.db.userSession.upsert({
|
||||
where: {
|
||||
sessionId_userId: {
|
||||
sessionId: session.id,
|
||||
userId: user.id,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
expiresAt,
|
||||
},
|
||||
create: {
|
||||
sessionId: session.id,
|
||||
userId: user.id,
|
||||
expiresAt,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
return this.db.userSession.create({
|
||||
sign(user: UserClaim) {
|
||||
const now = getUtcTimestamp();
|
||||
return sign(
|
||||
{
|
||||
data: {
|
||||
expiresAt,
|
||||
session: {
|
||||
create: {},
|
||||
},
|
||||
user: {
|
||||
connect: {
|
||||
id: user.id,
|
||||
},
|
||||
},
|
||||
id: user.id,
|
||||
name: user.name,
|
||||
email: user.email,
|
||||
emailVerified: user.emailVerified?.toISOString(),
|
||||
image: user.avatarUrl,
|
||||
hasPassword: Boolean(user.hasPassword),
|
||||
createdAt: user.createdAt.toISOString(),
|
||||
},
|
||||
});
|
||||
iat: now,
|
||||
exp: now + this.config.auth.accessTokenExpiresIn,
|
||||
iss: this.config.serverId,
|
||||
sub: user.id,
|
||||
aud: 'https://affine.pro',
|
||||
jti: randomUUID({
|
||||
disableEntropyCache: true,
|
||||
}),
|
||||
},
|
||||
this.config.auth.privateKey,
|
||||
{
|
||||
algorithm: Algorithm.ES256,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
refresh(user: UserClaim) {
|
||||
const now = getUtcTimestamp();
|
||||
return sign(
|
||||
{
|
||||
data: {
|
||||
id: user.id,
|
||||
name: user.name,
|
||||
email: user.email,
|
||||
emailVerified: user.emailVerified?.toISOString(),
|
||||
image: user.avatarUrl,
|
||||
hasPassword: Boolean(user.hasPassword),
|
||||
createdAt: user.createdAt.toISOString(),
|
||||
},
|
||||
exp: now + this.config.auth.refreshTokenExpiresIn,
|
||||
iat: now,
|
||||
iss: this.config.serverId,
|
||||
sub: user.id,
|
||||
aud: 'https://affine.pro',
|
||||
jti: randomUUID({
|
||||
disableEntropyCache: true,
|
||||
}),
|
||||
},
|
||||
this.config.auth.privateKey,
|
||||
{
|
||||
algorithm: Algorithm.ES256,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
async verify(token: string) {
|
||||
try {
|
||||
const data = (
|
||||
await jwtVerify(token, this.config.auth.publicKey, {
|
||||
algorithms: [Algorithm.ES256],
|
||||
iss: [this.config.serverId],
|
||||
leeway: this.config.auth.leeway,
|
||||
requiredSpecClaims: ['exp', 'iat', 'iss', 'sub'],
|
||||
aud: ['https://affine.pro'],
|
||||
})
|
||||
).data as UserClaim;
|
||||
|
||||
return {
|
||||
...data,
|
||||
emailVerified: data.emailVerified ? new Date(data.emailVerified) : null,
|
||||
createdAt: new Date(data.createdAt),
|
||||
};
|
||||
} catch (e) {
|
||||
throw new UnauthorizedException('Invalid token');
|
||||
}
|
||||
}
|
||||
|
||||
async setCookie(_req: Request, res: Response, user: { id: string }) {
|
||||
const session = await this.createUserSession(
|
||||
user
|
||||
// TODO(@forehalo): enable multi user session
|
||||
// req.cookies[AuthService.sessionCookieName]
|
||||
);
|
||||
async verifyCaptchaToken(token: any, ip: string) {
|
||||
if (typeof token !== 'string' || !token) return false;
|
||||
|
||||
res.cookie(AuthService.sessionCookieName, session.sessionId, {
|
||||
expires: session.expiresAt ?? void 0,
|
||||
...this.cookieOptions,
|
||||
const formData = new FormData();
|
||||
formData.append('secret', this.config.auth.captcha.turnstile.secret);
|
||||
formData.append('response', token);
|
||||
formData.append('remoteip', ip);
|
||||
// prevent replay attack
|
||||
formData.append('idempotency_key', nanoid());
|
||||
|
||||
const url = 'https://challenges.cloudflare.com/turnstile/v0/siteverify';
|
||||
const result = await fetch(url, {
|
||||
body: formData,
|
||||
method: 'POST',
|
||||
});
|
||||
const outcome = await result.json();
|
||||
|
||||
return (
|
||||
!!outcome.success &&
|
||||
// skip hostname check in dev mode
|
||||
(this.config.node.dev || outcome.hostname === this.config.host)
|
||||
);
|
||||
}
|
||||
|
||||
async changePassword(id: string, newPassword: string): Promise<User> {
|
||||
const user = await this.user.findUserById(id);
|
||||
async verifyChallengeResponse(response: any, resource: string) {
|
||||
return verifyChallengeResponse(
|
||||
response,
|
||||
this.config.auth.captcha.challenge.bits,
|
||||
resource
|
||||
);
|
||||
}
|
||||
|
||||
async signIn(email: string, password: string): Promise<User> {
|
||||
const user = await this.prisma.user.findFirst({
|
||||
where: {
|
||||
email,
|
||||
},
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
throw new BadRequestException('Invalid email');
|
||||
}
|
||||
|
||||
const hashedPassword = await this.crypto.encryptPassword(newPassword);
|
||||
if (!user.password) {
|
||||
throw new BadRequestException('User has no password');
|
||||
}
|
||||
let equal = false;
|
||||
try {
|
||||
equal = await verify(user.password, password);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
throw new InternalServerErrorException(e, 'Verify password failed');
|
||||
}
|
||||
if (!equal) {
|
||||
throw new UnauthorizedException('Invalid password');
|
||||
}
|
||||
|
||||
return this.db.user.update({
|
||||
return user;
|
||||
}
|
||||
|
||||
async signUp(name: string, email: string, password: string): Promise<User> {
|
||||
const user = await this.prisma.user.findFirst({
|
||||
where: {
|
||||
email,
|
||||
},
|
||||
});
|
||||
|
||||
if (user) {
|
||||
throw new BadRequestException('Email already exists');
|
||||
}
|
||||
|
||||
const hashedPassword = await hash(password);
|
||||
|
||||
return this.prisma.user.create({
|
||||
data: {
|
||||
name,
|
||||
email,
|
||||
password: hashedPassword,
|
||||
// TODO(@forehalo): handle in event system
|
||||
features: {
|
||||
create: {
|
||||
reason: 'created by api sign up',
|
||||
activated: true,
|
||||
feature: {
|
||||
connect: {
|
||||
feature_version: Quota_FreePlanV1_1,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async createAnonymousUser(email: string): Promise<User> {
|
||||
const user = await this.prisma.user.findFirst({
|
||||
where: {
|
||||
email,
|
||||
},
|
||||
});
|
||||
|
||||
if (user) {
|
||||
throw new BadRequestException('Email already exists');
|
||||
}
|
||||
|
||||
return this.prisma.user.create({
|
||||
data: {
|
||||
name: 'Unnamed',
|
||||
email,
|
||||
features: {
|
||||
create: {
|
||||
reason: 'created by invite sign up',
|
||||
activated: true,
|
||||
feature: {
|
||||
connect: {
|
||||
feature_version: Quota_FreePlanV1_1,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async getUserByEmail(email: string): Promise<User | null> {
|
||||
return this.prisma.user.findUnique({
|
||||
where: {
|
||||
email,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async isUserHasPassword(email: string): Promise<boolean> {
|
||||
const user = await this.prisma.user.findFirst({
|
||||
where: {
|
||||
email,
|
||||
},
|
||||
});
|
||||
if (!user) {
|
||||
throw new BadRequestException('Invalid email');
|
||||
}
|
||||
return Boolean(user.password);
|
||||
}
|
||||
|
||||
async changePassword(email: string, newPassword: string): Promise<User> {
|
||||
const user = await this.prisma.user.findUnique({
|
||||
where: {
|
||||
email,
|
||||
emailVerified: {
|
||||
not: null,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
throw new BadRequestException('Invalid email');
|
||||
}
|
||||
|
||||
const hashedPassword = await hash(newPassword);
|
||||
|
||||
return this.prisma.user.update({
|
||||
where: {
|
||||
id: user.id,
|
||||
},
|
||||
@@ -387,33 +287,22 @@ export class AuthService implements OnApplicationBootstrap {
|
||||
}
|
||||
|
||||
async changeEmail(id: string, newEmail: string): Promise<User> {
|
||||
const user = await this.user.findUserById(id);
|
||||
const user = await this.prisma.user.findUnique({
|
||||
where: {
|
||||
id,
|
||||
},
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
throw new BadRequestException('Invalid email');
|
||||
}
|
||||
|
||||
return this.db.user.update({
|
||||
return this.prisma.user.update({
|
||||
where: {
|
||||
id,
|
||||
},
|
||||
data: {
|
||||
email: newEmail,
|
||||
emailVerifiedAt: new Date(),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async setEmailVerified(id: string) {
|
||||
return await this.db.user.update({
|
||||
where: {
|
||||
id,
|
||||
},
|
||||
data: {
|
||||
emailVerifiedAt: new Date(),
|
||||
},
|
||||
select: {
|
||||
emailVerifiedAt: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -430,20 +319,7 @@ export class AuthService implements OnApplicationBootstrap {
|
||||
async sendVerifyChangeEmail(email: string, callbackUrl: string) {
|
||||
return this.mailer.sendVerifyChangeEmail(email, callbackUrl);
|
||||
}
|
||||
async sendVerifyEmail(email: string, callbackUrl: string) {
|
||||
return this.mailer.sendVerifyEmail(email, callbackUrl);
|
||||
}
|
||||
async sendNotificationChangeEmail(email: string) {
|
||||
return this.mailer.sendNotificationChangeEmail(email);
|
||||
}
|
||||
|
||||
async sendSignInEmail(email: string, link: string, signUp: boolean) {
|
||||
return signUp
|
||||
? await this.mailer.sendSignUpMail(link.toString(), {
|
||||
to: email,
|
||||
})
|
||||
: await this.mailer.sendSignInMail(link.toString(), {
|
||||
to: email,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,99 +0,0 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Cron, CronExpression } from '@nestjs/schedule';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
import { CryptoHelper } from '../../fundamentals/helpers';
|
||||
|
||||
export enum TokenType {
|
||||
SignIn,
|
||||
VerifyEmail,
|
||||
ChangeEmail,
|
||||
ChangePassword,
|
||||
Challenge,
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class TokenService {
|
||||
constructor(
|
||||
private readonly db: PrismaClient,
|
||||
private readonly crypto: CryptoHelper
|
||||
) {}
|
||||
|
||||
async createToken(
|
||||
type: TokenType,
|
||||
credential?: string,
|
||||
ttlInSec: number = 30 * 60
|
||||
) {
|
||||
const plaintextToken = randomUUID();
|
||||
|
||||
const { token } = await this.db.verificationToken.create({
|
||||
data: {
|
||||
type,
|
||||
token: plaintextToken,
|
||||
credential,
|
||||
expiresAt: new Date(Date.now() + ttlInSec * 1000),
|
||||
},
|
||||
});
|
||||
|
||||
return this.crypto.encrypt(token);
|
||||
}
|
||||
|
||||
async verifyToken(
|
||||
type: TokenType,
|
||||
token: string,
|
||||
{
|
||||
credential,
|
||||
keep,
|
||||
}: {
|
||||
credential?: string;
|
||||
keep?: boolean;
|
||||
} = {}
|
||||
) {
|
||||
token = this.crypto.decrypt(token);
|
||||
const record = await this.db.verificationToken.findUnique({
|
||||
where: {
|
||||
type_token: {
|
||||
token,
|
||||
type,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!record) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const expired = record.expiresAt <= new Date();
|
||||
const valid =
|
||||
!expired && (!record.credential || record.credential === credential);
|
||||
|
||||
if ((expired || valid) && !keep) {
|
||||
const deleted = await this.db.verificationToken.deleteMany({
|
||||
where: {
|
||||
token,
|
||||
type,
|
||||
},
|
||||
});
|
||||
|
||||
// already deleted, means token has been used
|
||||
if (!deleted.count) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
return valid ? record : null;
|
||||
}
|
||||
|
||||
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT)
|
||||
cleanExpiredTokens() {
|
||||
return this.db.verificationToken.deleteMany({
|
||||
where: {
|
||||
expiresAt: {
|
||||
lte: new Date(),
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
3
packages/backend/server/src/core/auth/utils/index.ts
Normal file
3
packages/backend/server/src/core/auth/utils/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export { jwtDecode as decode, jwtEncode as encode } from './jwt';
|
||||
export { sendVerificationRequest } from './send-mail';
|
||||
export type { SendVerificationRequestParams } from 'next-auth/providers/email';
|
||||
75
packages/backend/server/src/core/auth/utils/jwt.ts
Normal file
75
packages/backend/server/src/core/auth/utils/jwt.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { Algorithm, sign, verify as jwtVerify } from '@node-rs/jsonwebtoken';
|
||||
import { JWT } from 'next-auth/jwt';
|
||||
|
||||
import { Config, PrismaService } from '../../../fundamentals';
|
||||
import { getUtcTimestamp, UserClaim } from '../service';
|
||||
|
||||
export const jwtEncode = async (
|
||||
config: Config,
|
||||
prisma: PrismaService,
|
||||
token: JWT | undefined,
|
||||
maxAge: number | undefined
|
||||
) => {
|
||||
if (!token?.email) {
|
||||
throw new BadRequestException('Missing email in jwt token');
|
||||
}
|
||||
const user = await prisma.user.findFirstOrThrow({
|
||||
where: {
|
||||
email: token.email,
|
||||
},
|
||||
});
|
||||
const now = getUtcTimestamp();
|
||||
return sign(
|
||||
{
|
||||
data: {
|
||||
id: user.id,
|
||||
name: user.name,
|
||||
email: user.email,
|
||||
emailVerified: user.emailVerified?.toISOString(),
|
||||
picture: user.avatarUrl,
|
||||
createdAt: user.createdAt.toISOString(),
|
||||
hasPassword: Boolean(user.password),
|
||||
},
|
||||
iat: now,
|
||||
exp: now + (maxAge ?? config.auth.accessTokenExpiresIn),
|
||||
iss: config.serverId,
|
||||
sub: user.id,
|
||||
aud: 'https://affine.pro',
|
||||
jti: randomUUID({
|
||||
disableEntropyCache: true,
|
||||
}),
|
||||
},
|
||||
config.auth.privateKey,
|
||||
{
|
||||
algorithm: Algorithm.ES256,
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
export const jwtDecode = async (config: Config, token: string | undefined) => {
|
||||
if (!token) {
|
||||
return null;
|
||||
}
|
||||
const { name, email, emailVerified, id, picture, hasPassword } = (
|
||||
await jwtVerify(token, config.auth.publicKey, {
|
||||
algorithms: [Algorithm.ES256],
|
||||
iss: [config.serverId],
|
||||
leeway: config.auth.leeway,
|
||||
requiredSpecClaims: ['exp', 'iat', 'iss', 'sub'],
|
||||
})
|
||||
).data as Omit<UserClaim, 'avatarUrl'> & {
|
||||
picture: string | undefined;
|
||||
};
|
||||
return {
|
||||
name,
|
||||
email,
|
||||
emailVerified,
|
||||
picture,
|
||||
sub: id,
|
||||
id,
|
||||
hasPassword,
|
||||
};
|
||||
};
|
||||
38
packages/backend/server/src/core/auth/utils/send-mail.ts
Normal file
38
packages/backend/server/src/core/auth/utils/send-mail.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { nanoid } from 'nanoid';
|
||||
import type { SendVerificationRequestParams } from 'next-auth/providers/email';
|
||||
|
||||
import { Config, MailService, SessionService } from '../../../fundamentals';
|
||||
|
||||
export async function sendVerificationRequest(
|
||||
config: Config,
|
||||
logger: Logger,
|
||||
mailer: MailService,
|
||||
session: SessionService,
|
||||
params: SendVerificationRequestParams
|
||||
) {
|
||||
const { identifier, url } = params;
|
||||
const urlWithToken = new URL(url);
|
||||
const callbackUrl = urlWithToken.searchParams.get('callbackUrl') || '';
|
||||
if (!callbackUrl) {
|
||||
throw new Error('callbackUrl is not set');
|
||||
} else {
|
||||
const newCallbackUrl = new URL(callbackUrl, config.origin);
|
||||
|
||||
const token = nanoid();
|
||||
await session.set(token, identifier);
|
||||
newCallbackUrl.searchParams.set('token', token);
|
||||
|
||||
urlWithToken.searchParams.set('callbackUrl', newCallbackUrl.toString());
|
||||
}
|
||||
|
||||
const result = await mailer.sendSignInEmail(urlWithToken.toString(), {
|
||||
to: identifier,
|
||||
});
|
||||
logger.log(`send verification email success: ${result.accepted.join(', ')}`);
|
||||
|
||||
const failed = result.rejected.concat(result.pending).filter(Boolean);
|
||||
if (failed.length) {
|
||||
throw new Error(`Email (${failed.join(', ')}) could not be sent`);
|
||||
}
|
||||
}
|
||||
@@ -2,12 +2,9 @@ import { Module } from '@nestjs/common';
|
||||
import { Field, ObjectType, Query, registerEnumType } from '@nestjs/graphql';
|
||||
|
||||
import { DeploymentType } from '../fundamentals';
|
||||
import { Public } from './auth';
|
||||
|
||||
export enum ServerFeature {
|
||||
Copilot = 'copilot',
|
||||
Payment = 'payment',
|
||||
OAuth = 'oauth',
|
||||
}
|
||||
|
||||
registerEnumType(ServerFeature, {
|
||||
@@ -18,23 +15,9 @@ registerEnumType(DeploymentType, {
|
||||
name: 'ServerDeploymentType',
|
||||
});
|
||||
|
||||
const ENABLED_FEATURES: Set<ServerFeature> = new Set();
|
||||
const ENABLED_FEATURES: ServerFeature[] = [];
|
||||
export function ADD_ENABLED_FEATURES(feature: ServerFeature) {
|
||||
ENABLED_FEATURES.add(feature);
|
||||
}
|
||||
|
||||
@ObjectType()
|
||||
export class PasswordLimitsType {
|
||||
@Field()
|
||||
minLength!: number;
|
||||
@Field()
|
||||
maxLength!: number;
|
||||
}
|
||||
|
||||
@ObjectType()
|
||||
export class CredentialsRequirementType {
|
||||
@Field()
|
||||
password!: PasswordLimitsType;
|
||||
ENABLED_FEATURES.push(feature);
|
||||
}
|
||||
|
||||
@ObjectType()
|
||||
@@ -62,18 +45,8 @@ export class ServerConfigType {
|
||||
|
||||
@Field(() => [ServerFeature], { description: 'enabled server features' })
|
||||
features!: ServerFeature[];
|
||||
|
||||
@Field(() => CredentialsRequirementType, {
|
||||
description: 'credentials requirement',
|
||||
})
|
||||
credentialsRequirement!: CredentialsRequirementType;
|
||||
|
||||
@Field({ description: 'enable telemetry' })
|
||||
enableTelemetry!: boolean;
|
||||
}
|
||||
|
||||
export class ServerConfigResolver {
|
||||
@Public()
|
||||
@Query(() => ServerConfigType, {
|
||||
description: 'server config',
|
||||
})
|
||||
@@ -87,11 +60,7 @@ export class ServerConfigResolver {
|
||||
// the old flavors contains `selfhosted` but it actually not flavor but deployment type
|
||||
// this field should be removed after frontend feature flags implemented
|
||||
flavor: AFFiNE.type,
|
||||
features: Array.from(ENABLED_FEATURES),
|
||||
credentialsRequirement: {
|
||||
password: AFFiNE.auth.password,
|
||||
},
|
||||
enableTelemetry: AFFiNE.telemetry.enabled,
|
||||
features: ENABLED_FEATURES,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,10 +2,14 @@ import { isDeepStrictEqual } from 'node:util';
|
||||
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { Cron, CronExpression } from '@nestjs/schedule';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
import type { EventPayload } from '../../fundamentals';
|
||||
import { Config, metrics, OnEvent } from '../../fundamentals';
|
||||
import {
|
||||
Config,
|
||||
type EventPayload,
|
||||
metrics,
|
||||
OnEvent,
|
||||
PrismaService,
|
||||
} from '../../fundamentals';
|
||||
import { QuotaService } from '../quota';
|
||||
import { Permission } from '../workspaces/types';
|
||||
import { isEmptyBuffer } from './manager';
|
||||
@@ -15,7 +19,7 @@ export class DocHistoryManager {
|
||||
private readonly logger = new Logger(DocHistoryManager.name);
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly db: PrismaClient,
|
||||
private readonly db: PrismaService,
|
||||
private readonly quota: QuotaService
|
||||
) {}
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ import {
|
||||
OnModuleInit,
|
||||
} from '@nestjs/common';
|
||||
import { Cron, CronExpression } from '@nestjs/schedule';
|
||||
import { PrismaClient, Snapshot, Update } from '@prisma/client';
|
||||
import { Snapshot, Update } from '@prisma/client';
|
||||
import { chunk } from 'lodash-es';
|
||||
import { defer, retry } from 'rxjs';
|
||||
import {
|
||||
@@ -16,15 +16,16 @@ import {
|
||||
transact,
|
||||
} from 'yjs';
|
||||
|
||||
import type { EventPayload } from '../../fundamentals';
|
||||
import {
|
||||
Cache,
|
||||
CallTimer,
|
||||
Config,
|
||||
EventEmitter,
|
||||
type EventPayload,
|
||||
mergeUpdatesInApplyWay as jwstMergeUpdates,
|
||||
metrics,
|
||||
OnEvent,
|
||||
PrismaService,
|
||||
} from '../../fundamentals';
|
||||
|
||||
function compare(yBinary: Buffer, jwstBinary: Buffer, strict = false): boolean {
|
||||
@@ -55,16 +56,6 @@ export function isEmptyBuffer(buf: Buffer): boolean {
|
||||
const MAX_SEQ_NUM = 0x3fffffff; // u31
|
||||
const UPDATES_QUEUE_CACHE_KEY = 'doc:manager:updates';
|
||||
|
||||
interface DocResponse {
|
||||
doc: Doc;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
interface BinaryResponse {
|
||||
binary: Buffer;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Since we can't directly save all client updates into database, in which way the database will overload,
|
||||
* we need to buffer the updates and merge them to reduce db write.
|
||||
@@ -81,7 +72,7 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
|
||||
private busy = false;
|
||||
|
||||
constructor(
|
||||
private readonly db: PrismaClient,
|
||||
private readonly db: PrismaService,
|
||||
private readonly config: Config,
|
||||
private readonly cache: Cache,
|
||||
private readonly event: EventEmitter
|
||||
@@ -239,12 +230,12 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
|
||||
update: Buffer,
|
||||
retryTimes = 10
|
||||
) {
|
||||
const timestamp = await new Promise<number>((resolve, reject) => {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
defer(async () => {
|
||||
const seq = await this.getUpdateSeq(workspaceId, guid);
|
||||
const { createdAt } = await this.db.update.create({
|
||||
await this.db.update.create({
|
||||
select: {
|
||||
createdAt: true,
|
||||
seq: true,
|
||||
},
|
||||
data: {
|
||||
workspaceId,
|
||||
@@ -253,27 +244,23 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
|
||||
blob: update,
|
||||
},
|
||||
});
|
||||
|
||||
return createdAt.getTime();
|
||||
})
|
||||
.pipe(retry(retryTimes)) // retry until seq num not conflict
|
||||
.subscribe({
|
||||
next: timestamp => {
|
||||
next: () => {
|
||||
this.logger.debug(
|
||||
`pushed 1 update for ${guid} in workspace ${workspaceId}`
|
||||
);
|
||||
resolve(timestamp);
|
||||
resolve();
|
||||
},
|
||||
error: e => {
|
||||
this.logger.error('Failed to push updates', e);
|
||||
reject(new Error('Failed to push update'));
|
||||
},
|
||||
});
|
||||
}).then(() => {
|
||||
return this.updateCachedUpdatesCount(workspaceId, guid, 1);
|
||||
});
|
||||
|
||||
await this.updateCachedUpdatesCount(workspaceId, guid, 1);
|
||||
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
async batchPush(
|
||||
@@ -282,124 +269,56 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
|
||||
updates: Buffer[],
|
||||
retryTimes = 10
|
||||
) {
|
||||
const timestamp = await new Promise<number>((resolve, reject) => {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
defer(async () => {
|
||||
const lastSeq = await this.getUpdateSeq(
|
||||
workspaceId,
|
||||
guid,
|
||||
updates.length
|
||||
);
|
||||
const now = Date.now();
|
||||
let timestamp = now;
|
||||
const seq = await this.getUpdateSeq(workspaceId, guid, updates.length);
|
||||
let turn = 0;
|
||||
const batchCount = 10;
|
||||
for (const batch of chunk(updates, batchCount)) {
|
||||
await this.db.update.createMany({
|
||||
data: batch.map((update, i) => {
|
||||
const subSeq = turn * batchCount + i + 1;
|
||||
data: batch.map((update, i) => ({
|
||||
workspaceId,
|
||||
id: guid,
|
||||
// `seq` is the last seq num of the batch
|
||||
// example for 11 batched updates, start from seq num 20
|
||||
// seq for first update in the batch should be:
|
||||
// 31 - 11 + subSeq(0 * 10 + 0 + 1) = 21
|
||||
// ^ last seq num ^ updates.length ^ turn ^ batchCount ^i
|
||||
const seq = lastSeq - updates.length + subSeq;
|
||||
const createdAt = now + subSeq;
|
||||
timestamp = Math.max(timestamp, createdAt);
|
||||
|
||||
return {
|
||||
workspaceId,
|
||||
id: guid,
|
||||
blob: update,
|
||||
seq,
|
||||
createdAt: new Date(createdAt), // make sure the updates can be ordered by create time
|
||||
};
|
||||
}),
|
||||
// 31 - 11 + 0 * 10 + 0 + 1 = 21
|
||||
// ^ last seq num ^ updates.length ^ turn ^ batchCount ^i
|
||||
seq: seq - updates.length + turn * batchCount + i + 1,
|
||||
blob: update,
|
||||
})),
|
||||
});
|
||||
turn++;
|
||||
}
|
||||
|
||||
return timestamp;
|
||||
})
|
||||
.pipe(retry(retryTimes)) // retry until seq num not conflict
|
||||
.subscribe({
|
||||
next: timestamp => {
|
||||
next: () => {
|
||||
this.logger.debug(
|
||||
`pushed ${updates.length} updates for ${guid} in workspace ${workspaceId}`
|
||||
);
|
||||
resolve(timestamp);
|
||||
resolve();
|
||||
},
|
||||
error: e => {
|
||||
this.logger.error('Failed to push updates', e);
|
||||
reject(new Error('Failed to push update'));
|
||||
},
|
||||
});
|
||||
}).then(() => {
|
||||
return this.updateCachedUpdatesCount(workspaceId, guid, updates.length);
|
||||
});
|
||||
await this.updateCachedUpdatesCount(workspaceId, guid, updates.length);
|
||||
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get latest timestamp of all docs in the workspace.
|
||||
*/
|
||||
@CallTimer('doc', 'get_doc_timestamps')
|
||||
async getDocTimestamps(workspaceId: string, after: number | undefined = 0) {
|
||||
const snapshots = await this.db.snapshot.findMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
updatedAt: {
|
||||
gt: new Date(after),
|
||||
},
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
updatedAt: true,
|
||||
},
|
||||
});
|
||||
|
||||
const updates = await this.db.update.groupBy({
|
||||
where: {
|
||||
workspaceId,
|
||||
createdAt: {
|
||||
gt: new Date(after),
|
||||
},
|
||||
},
|
||||
by: ['id'],
|
||||
_max: {
|
||||
createdAt: true,
|
||||
},
|
||||
});
|
||||
|
||||
const result: Record<string, number> = {};
|
||||
|
||||
snapshots.forEach(s => {
|
||||
result[s.id] = s.updatedAt.getTime();
|
||||
});
|
||||
|
||||
updates.forEach(u => {
|
||||
if (u._max.createdAt) {
|
||||
result[u.id] = u._max.createdAt.getTime();
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* get the latest doc with all update applied.
|
||||
*/
|
||||
async get(workspaceId: string, guid: string): Promise<DocResponse | null> {
|
||||
async get(workspaceId: string, guid: string): Promise<Doc | null> {
|
||||
const result = await this._get(workspaceId, guid);
|
||||
if (result) {
|
||||
if ('doc' in result) {
|
||||
return result;
|
||||
} else {
|
||||
const doc = await this.recoverDoc(result.binary);
|
||||
|
||||
return {
|
||||
doc,
|
||||
timestamp: result.timestamp,
|
||||
};
|
||||
return result.doc;
|
||||
} else if ('snapshot' in result) {
|
||||
return this.recoverDoc(result.snapshot);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -409,19 +328,13 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
|
||||
/**
|
||||
* get the latest doc binary with all update applied.
|
||||
*/
|
||||
async getBinary(
|
||||
workspaceId: string,
|
||||
guid: string
|
||||
): Promise<BinaryResponse | null> {
|
||||
async getBinary(workspaceId: string, guid: string): Promise<Buffer | null> {
|
||||
const result = await this._get(workspaceId, guid);
|
||||
if (result) {
|
||||
if ('doc' in result) {
|
||||
return {
|
||||
binary: Buffer.from(encodeStateAsUpdate(result.doc)),
|
||||
timestamp: result.timestamp,
|
||||
};
|
||||
} else {
|
||||
return result;
|
||||
return Buffer.from(encodeStateAsUpdate(result.doc));
|
||||
} else if ('snapshot' in result) {
|
||||
return result.snapshot;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -431,27 +344,16 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
|
||||
/**
|
||||
* get the latest doc state vector with all update applied.
|
||||
*/
|
||||
async getDocState(
|
||||
workspaceId: string,
|
||||
guid: string
|
||||
): Promise<BinaryResponse | null> {
|
||||
async getState(workspaceId: string, guid: string): Promise<Buffer | null> {
|
||||
const snapshot = await this.getSnapshot(workspaceId, guid);
|
||||
const updates = await this.getUpdates(workspaceId, guid);
|
||||
|
||||
if (updates.length) {
|
||||
const { doc, timestamp } = await this.squash(snapshot, updates);
|
||||
return {
|
||||
binary: Buffer.from(encodeStateVector(doc)),
|
||||
timestamp,
|
||||
};
|
||||
const doc = await this.squash(snapshot, updates);
|
||||
return Buffer.from(encodeStateVector(doc));
|
||||
}
|
||||
|
||||
return snapshot?.state
|
||||
? {
|
||||
binary: snapshot.state,
|
||||
timestamp: snapshot.updatedAt.getTime(),
|
||||
}
|
||||
: null;
|
||||
return snapshot ? snapshot.state : null;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -619,17 +521,17 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
|
||||
private async _get(
|
||||
workspaceId: string,
|
||||
guid: string
|
||||
): Promise<DocResponse | BinaryResponse | null> {
|
||||
): Promise<{ doc: Doc } | { snapshot: Buffer } | null> {
|
||||
const snapshot = await this.getSnapshot(workspaceId, guid);
|
||||
const updates = await this.getUpdates(workspaceId, guid);
|
||||
|
||||
if (updates.length) {
|
||||
return this.squash(snapshot, updates);
|
||||
return {
|
||||
doc: await this.squash(snapshot, updates),
|
||||
};
|
||||
}
|
||||
|
||||
return snapshot
|
||||
? { binary: snapshot.blob, timestamp: snapshot.updatedAt.getTime() }
|
||||
: null;
|
||||
return snapshot ? { snapshot: snapshot.blob } : null;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -637,10 +539,7 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
|
||||
* and delete the updates records at the same time.
|
||||
*/
|
||||
@CallTimer('doc', 'squash')
|
||||
private async squash(
|
||||
snapshot: Snapshot | null,
|
||||
updates: Update[]
|
||||
): Promise<DocResponse> {
|
||||
private async squash(snapshot: Snapshot | null, updates: Update[]) {
|
||||
if (!updates.length) {
|
||||
throw new Error('No updates to squash');
|
||||
}
|
||||
@@ -699,7 +598,7 @@ export class DocManager implements OnModuleInit, OnModuleDestroy {
|
||||
await this.updateCachedUpdatesCount(workspaceId, id, -count);
|
||||
}
|
||||
|
||||
return { doc, timestamp: last.createdAt.getTime() };
|
||||
return doc;
|
||||
}
|
||||
|
||||
private async getUpdateSeq(workspaceId: string, guid: string, batch = 1) {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { PrismaTransaction } from '../../fundamentals';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
import { Feature, FeatureSchema, FeatureType } from './types';
|
||||
|
||||
class FeatureConfig {
|
||||
@@ -54,35 +55,10 @@ export class UnlimitedWorkspaceFeatureConfig extends FeatureConfig {
|
||||
}
|
||||
}
|
||||
|
||||
export class UnlimitedCopilotFeatureConfig extends FeatureConfig {
|
||||
override config!: Feature & { feature: FeatureType.UnlimitedCopilot };
|
||||
|
||||
constructor(data: any) {
|
||||
super(data);
|
||||
|
||||
if (this.config.feature !== FeatureType.UnlimitedCopilot) {
|
||||
throw new Error('Invalid feature config: type is not AIEarlyAccess');
|
||||
}
|
||||
}
|
||||
}
|
||||
export class AIEarlyAccessFeatureConfig extends FeatureConfig {
|
||||
override config!: Feature & { feature: FeatureType.AIEarlyAccess };
|
||||
|
||||
constructor(data: any) {
|
||||
super(data);
|
||||
|
||||
if (this.config.feature !== FeatureType.AIEarlyAccess) {
|
||||
throw new Error('Invalid feature config: type is not AIEarlyAccess');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const FeatureConfigMap = {
|
||||
[FeatureType.Copilot]: CopilotFeatureConfig,
|
||||
[FeatureType.EarlyAccess]: EarlyAccessFeatureConfig,
|
||||
[FeatureType.AIEarlyAccess]: AIEarlyAccessFeatureConfig,
|
||||
[FeatureType.UnlimitedWorkspace]: UnlimitedWorkspaceFeatureConfig,
|
||||
[FeatureType.UnlimitedCopilot]: UnlimitedCopilotFeatureConfig,
|
||||
};
|
||||
|
||||
export type FeatureConfigType<F extends FeatureType> = InstanceType<
|
||||
@@ -91,7 +67,7 @@ export type FeatureConfigType<F extends FeatureType> = InstanceType<
|
||||
|
||||
const FeatureCache = new Map<number, FeatureConfigType<FeatureType>>();
|
||||
|
||||
export async function getFeature(prisma: PrismaTransaction, featureId: number) {
|
||||
export async function getFeature(prisma: PrismaClient, featureId: number) {
|
||||
const cachedQuota = FeatureCache.get(featureId);
|
||||
|
||||
if (cachedQuota) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
|
||||
import { EarlyAccessType, FeatureManagementService } from './management';
|
||||
import { FeatureManagementService } from './management';
|
||||
import { FeatureService } from './service';
|
||||
|
||||
/**
|
||||
@@ -15,11 +15,6 @@ import { FeatureService } from './service';
|
||||
})
|
||||
export class FeatureModule {}
|
||||
|
||||
export {
|
||||
type CommonFeature,
|
||||
commonFeatureSchema,
|
||||
FeatureKind,
|
||||
Features,
|
||||
FeatureType,
|
||||
} from './types';
|
||||
export { EarlyAccessType, FeatureManagementService, FeatureService };
|
||||
export { type CommonFeature, commonFeatureSchema } from './types';
|
||||
export { FeatureKind, Features, FeatureType } from './types';
|
||||
export { FeatureManagementService, FeatureService };
|
||||
|
||||
@@ -1,24 +1,18 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
import { Config } from '../../fundamentals';
|
||||
import { Config, PrismaService } from '../../fundamentals';
|
||||
import { FeatureService } from './service';
|
||||
import { FeatureType } from './types';
|
||||
|
||||
const STAFF = ['@toeverything.info'];
|
||||
|
||||
export enum EarlyAccessType {
|
||||
App = 'app',
|
||||
AI = 'ai',
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class FeatureManagementService {
|
||||
protected logger = new Logger(FeatureManagementService.name);
|
||||
|
||||
constructor(
|
||||
private readonly feature: FeatureService,
|
||||
private readonly prisma: PrismaClient,
|
||||
private readonly prisma: PrismaService,
|
||||
private readonly config: Config
|
||||
) {}
|
||||
|
||||
@@ -35,60 +29,33 @@ export class FeatureManagementService {
|
||||
}
|
||||
|
||||
// ======== Early Access ========
|
||||
async addEarlyAccess(
|
||||
userId: string,
|
||||
type: EarlyAccessType = EarlyAccessType.App
|
||||
) {
|
||||
|
||||
async addEarlyAccess(userId: string) {
|
||||
return this.feature.addUserFeature(
|
||||
userId,
|
||||
type === EarlyAccessType.App
|
||||
? FeatureType.EarlyAccess
|
||||
: FeatureType.AIEarlyAccess,
|
||||
FeatureType.EarlyAccess,
|
||||
2,
|
||||
'Early access user'
|
||||
);
|
||||
}
|
||||
|
||||
async removeEarlyAccess(
|
||||
userId: string,
|
||||
type: EarlyAccessType = EarlyAccessType.App
|
||||
) {
|
||||
return this.feature.removeUserFeature(
|
||||
userId,
|
||||
type === EarlyAccessType.App
|
||||
? FeatureType.EarlyAccess
|
||||
: FeatureType.AIEarlyAccess
|
||||
);
|
||||
async removeEarlyAccess(userId: string) {
|
||||
return this.feature.removeUserFeature(userId, FeatureType.EarlyAccess);
|
||||
}
|
||||
|
||||
async listEarlyAccess(type: EarlyAccessType = EarlyAccessType.App) {
|
||||
return this.feature.listFeatureUsers(
|
||||
type === EarlyAccessType.App
|
||||
? FeatureType.EarlyAccess
|
||||
: FeatureType.AIEarlyAccess
|
||||
);
|
||||
async listEarlyAccess() {
|
||||
return this.feature.listFeatureUsers(FeatureType.EarlyAccess);
|
||||
}
|
||||
|
||||
async isEarlyAccessUser(
|
||||
email: string,
|
||||
type: EarlyAccessType = EarlyAccessType.App
|
||||
) {
|
||||
async isEarlyAccessUser(email: string) {
|
||||
const user = await this.prisma.user.findFirst({
|
||||
where: {
|
||||
email: {
|
||||
equals: email,
|
||||
mode: 'insensitive',
|
||||
},
|
||||
email,
|
||||
},
|
||||
});
|
||||
|
||||
if (user) {
|
||||
const canEarlyAccess = await this.feature
|
||||
.hasUserFeature(
|
||||
user.id,
|
||||
type === EarlyAccessType.App
|
||||
? FeatureType.EarlyAccess
|
||||
: FeatureType.AIEarlyAccess
|
||||
)
|
||||
.hasUserFeature(user.id, FeatureType.EarlyAccess)
|
||||
.catch(() => false);
|
||||
|
||||
return canEarlyAccess;
|
||||
@@ -97,52 +64,31 @@ export class FeatureManagementService {
|
||||
}
|
||||
|
||||
/// check early access by email
|
||||
async canEarlyAccess(
|
||||
email: string,
|
||||
type: EarlyAccessType = EarlyAccessType.App
|
||||
) {
|
||||
async canEarlyAccess(email: string) {
|
||||
if (this.config.featureFlags.earlyAccessPreview && !this.isStaff(email)) {
|
||||
return this.isEarlyAccessUser(email, type);
|
||||
return this.isEarlyAccessUser(email);
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// ======== CopilotFeature ========
|
||||
async addCopilot(userId: string, reason = 'Copilot plan user') {
|
||||
return this.feature.addUserFeature(
|
||||
userId,
|
||||
FeatureType.UnlimitedCopilot,
|
||||
reason
|
||||
);
|
||||
}
|
||||
|
||||
async removeCopilot(userId: string) {
|
||||
return this.feature.removeUserFeature(userId, FeatureType.UnlimitedCopilot);
|
||||
}
|
||||
|
||||
async isCopilotUser(userId: string) {
|
||||
return await this.feature.hasUserFeature(
|
||||
userId,
|
||||
FeatureType.UnlimitedCopilot
|
||||
);
|
||||
}
|
||||
|
||||
// ======== User Feature ========
|
||||
async getActivatedUserFeatures(userId: string): Promise<FeatureType[]> {
|
||||
const features = await this.feature.getActivatedUserFeatures(userId);
|
||||
return features.map(f => f.feature.name);
|
||||
}
|
||||
|
||||
// ======== Workspace Feature ========
|
||||
async addWorkspaceFeatures(
|
||||
workspaceId: string,
|
||||
feature: FeatureType,
|
||||
version?: number,
|
||||
reason?: string
|
||||
) {
|
||||
const latestVersions = await this.feature.getFeaturesVersion();
|
||||
// use latest version if not specified
|
||||
const latestVersion = version || latestVersions[feature];
|
||||
if (!Number.isInteger(latestVersion)) {
|
||||
throw new Error(`Version of feature ${feature} not found`);
|
||||
}
|
||||
return this.feature.addWorkspaceFeature(
|
||||
workspaceId,
|
||||
feature,
|
||||
latestVersion,
|
||||
reason || 'add feature by api'
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,13 +1,41 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
import { PrismaService } from '../../fundamentals';
|
||||
import { UserType } from '../users/types';
|
||||
import { WorkspaceType } from '../workspaces/types';
|
||||
import { FeatureConfigType, getFeature } from './feature';
|
||||
import { FeatureKind, FeatureType } from './types';
|
||||
|
||||
@Injectable()
|
||||
export class FeatureService {
|
||||
constructor(private readonly prisma: PrismaClient) {}
|
||||
constructor(private readonly prisma: PrismaService) {}
|
||||
|
||||
async getFeaturesVersion() {
|
||||
const features = await this.prisma.features.findMany({
|
||||
where: {
|
||||
type: FeatureKind.Feature,
|
||||
},
|
||||
select: {
|
||||
feature: true,
|
||||
version: true,
|
||||
},
|
||||
});
|
||||
return features.reduce(
|
||||
(acc, feature) => {
|
||||
// only keep the latest version
|
||||
if (acc[feature.feature]) {
|
||||
if (acc[feature.feature] < feature.version) {
|
||||
acc[feature.feature] = feature.version;
|
||||
}
|
||||
} else {
|
||||
acc[feature.feature] = feature.version;
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, number>
|
||||
);
|
||||
}
|
||||
|
||||
async getFeature<F extends FeatureType>(
|
||||
feature: F
|
||||
): Promise<FeatureConfigType<F> | undefined> {
|
||||
@@ -32,6 +60,7 @@ export class FeatureService {
|
||||
async addUserFeature(
|
||||
userId: string,
|
||||
feature: FeatureType,
|
||||
version: number,
|
||||
reason: string,
|
||||
expiredAt?: Date | string
|
||||
) {
|
||||
@@ -49,30 +78,29 @@ export class FeatureService {
|
||||
createdAt: 'desc',
|
||||
},
|
||||
});
|
||||
|
||||
if (latestFlag) {
|
||||
return latestFlag.id;
|
||||
} else {
|
||||
const featureId = await tx.features
|
||||
.findFirst({
|
||||
where: { feature, type: FeatureKind.Feature },
|
||||
orderBy: { version: 'desc' },
|
||||
select: { id: true },
|
||||
})
|
||||
.then(r => r?.id);
|
||||
|
||||
if (!featureId) {
|
||||
throw new Error(`Feature ${feature} not found`);
|
||||
}
|
||||
|
||||
return tx.userFeatures
|
||||
.create({
|
||||
data: {
|
||||
reason,
|
||||
expiredAt,
|
||||
activated: true,
|
||||
userId,
|
||||
featureId,
|
||||
user: {
|
||||
connect: {
|
||||
id: userId,
|
||||
},
|
||||
},
|
||||
feature: {
|
||||
connect: {
|
||||
feature_version: {
|
||||
feature,
|
||||
version,
|
||||
},
|
||||
type: FeatureKind.Feature,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
.then(r => r.id);
|
||||
@@ -106,8 +134,10 @@ export class FeatureService {
|
||||
async getUserFeatures(userId: string) {
|
||||
const features = await this.prisma.userFeatures.findMany({
|
||||
where: {
|
||||
userId,
|
||||
feature: { type: FeatureKind.Feature },
|
||||
user: { id: userId },
|
||||
feature: {
|
||||
type: FeatureKind.Feature,
|
||||
},
|
||||
},
|
||||
select: {
|
||||
activated: true,
|
||||
@@ -128,34 +158,7 @@ export class FeatureService {
|
||||
return configs.filter(feature => !!feature.feature);
|
||||
}
|
||||
|
||||
async getActivatedUserFeatures(userId: string) {
|
||||
const features = await this.prisma.userFeatures.findMany({
|
||||
where: {
|
||||
userId,
|
||||
feature: { type: FeatureKind.Feature },
|
||||
activated: true,
|
||||
OR: [{ expiredAt: null }, { expiredAt: { gt: new Date() } }],
|
||||
},
|
||||
select: {
|
||||
activated: true,
|
||||
reason: true,
|
||||
createdAt: true,
|
||||
expiredAt: true,
|
||||
featureId: true,
|
||||
},
|
||||
});
|
||||
|
||||
const configs = await Promise.all(
|
||||
features.map(async feature => ({
|
||||
...feature,
|
||||
feature: await getFeature(this.prisma, feature.featureId),
|
||||
}))
|
||||
);
|
||||
|
||||
return configs.filter(feature => !!feature.feature);
|
||||
}
|
||||
|
||||
async listFeatureUsers(feature: FeatureType) {
|
||||
async listFeatureUsers(feature: FeatureType): Promise<UserType[]> {
|
||||
return this.prisma.userFeatures
|
||||
.findMany({
|
||||
where: {
|
||||
@@ -172,7 +175,7 @@ export class FeatureService {
|
||||
name: true,
|
||||
avatarUrl: true,
|
||||
email: true,
|
||||
emailVerifiedAt: true,
|
||||
emailVerified: true,
|
||||
createdAt: true,
|
||||
},
|
||||
},
|
||||
@@ -191,7 +194,6 @@ export class FeatureService {
|
||||
feature,
|
||||
type: FeatureKind.Feature,
|
||||
},
|
||||
OR: [{ expiredAt: null }, { expiredAt: { gt: new Date() } }],
|
||||
},
|
||||
})
|
||||
.then(count => count > 0);
|
||||
@@ -202,6 +204,7 @@ export class FeatureService {
|
||||
async addWorkspaceFeature(
|
||||
workspaceId: string,
|
||||
feature: FeatureType,
|
||||
version: number,
|
||||
reason: string,
|
||||
expiredAt?: Date | string
|
||||
) {
|
||||
@@ -222,27 +225,26 @@ export class FeatureService {
|
||||
if (latestFlag) {
|
||||
return latestFlag.id;
|
||||
} else {
|
||||
// use latest version of feature
|
||||
const featureId = await tx.features
|
||||
.findFirst({
|
||||
where: { feature, type: FeatureKind.Feature },
|
||||
select: { id: true },
|
||||
orderBy: { version: 'desc' },
|
||||
})
|
||||
.then(r => r?.id);
|
||||
|
||||
if (!featureId) {
|
||||
throw new Error(`Feature ${feature} not found`);
|
||||
}
|
||||
|
||||
return tx.workspaceFeatures
|
||||
.create({
|
||||
data: {
|
||||
reason,
|
||||
expiredAt,
|
||||
activated: true,
|
||||
workspaceId,
|
||||
featureId,
|
||||
workspace: {
|
||||
connect: {
|
||||
id: workspaceId,
|
||||
},
|
||||
},
|
||||
feature: {
|
||||
connect: {
|
||||
feature_version: {
|
||||
feature,
|
||||
version,
|
||||
},
|
||||
type: FeatureKind.Feature,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
.then(r => r.id);
|
||||
|
||||
@@ -1,12 +1,8 @@
|
||||
import { registerEnumType } from '@nestjs/graphql';
|
||||
|
||||
export enum FeatureType {
|
||||
// user feature
|
||||
EarlyAccess = 'early_access',
|
||||
AIEarlyAccess = 'ai_early_access',
|
||||
UnlimitedCopilot = 'unlimited_copilot',
|
||||
// workspace feature
|
||||
Copilot = 'copilot',
|
||||
EarlyAccess = 'early_access',
|
||||
UnlimitedWorkspace = 'unlimited_workspace',
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user