Compare commits

..

85 Commits

Author SHA1 Message Date
Peng Xiao
a143379161 fix(electron): remove cors headers hack (#5581) 2024-01-12 16:49:16 +08:00
regischen
8e7dedfe82 feat: bump blocksuite (#5575) 2024-01-12 12:43:56 +08:00
EYHN
d25a8547d0 refactor(core): move page list to core (#5556) 2024-01-12 12:43:45 +08:00
Peng Xiao
4d16229fea chore(core): remove affine/cmdk package (#5552)
patch cmdk based on https://github.com/pengx17/cmdk/tree/patch-1
fix https://github.com/toeverything/AFFiNE/issues/5548
2024-01-12 12:43:35 +08:00
EYHN
99371be7e8 fix(core): workspace not found after import (#5571)
close TOV-393
2024-01-12 11:05:59 +08:00
李华桥
34ed8dd7a5 Merge branch 'canary' into stable 2024-01-10 10:59:28 +08:00
李华桥
39b7b671b1 Merge branch 'canary' into stable 2024-01-09 19:44:52 +08:00
李华桥
207b56d5af Merge branch 'canary' into stable 2024-01-09 17:16:17 +08:00
DarkSky
9e94e7195b fix: use absolute path in gql client (#5454) (#5462) 2023-12-29 16:02:29 +08:00
Peng Xiao
de951c8779 fix(core): enable page history for beta/stable (#5415) 2023-12-27 14:39:59 +08:00
EYHN
fd37026ca5 fix(component): fix font display on safari (#5393)
before

![CleanShot 2023-12-25 at 13.09.26.png](https://graphite-user-uploaded-assets-prod.s3.amazonaws.com/g3jz87HxbjOJpXV3FPT7/4fe08951-67bb-4050-ba14-94391db1cac1.png)

after

![CleanShot 2023-12-25 at 13.09.13.png](https://graphite-user-uploaded-assets-prod.s3.amazonaws.com/g3jz87HxbjOJpXV3FPT7/fbfb17ec-b871-4746-9d3c-d24f850ecca1.png)
2023-12-27 14:39:50 +08:00
JimmFly
4fd5812a89 fix(core): avatars are not aligned (#5404) 2023-12-26 20:43:08 +08:00
Peng Xiao
d01e987ecc fix(core): trash page footer display issue (#5402)
Before

![image.png](https://graphite-user-uploaded-assets-prod.s3.amazonaws.com/T2klNLEk0wxLh4NRDzhk/eb5e5b18-c4a2-469b-8763-be34c39ba736.png)

After

![image.png](https://graphite-user-uploaded-assets-prod.s3.amazonaws.com/T2klNLEk0wxLh4NRDzhk/7b3ef339-0cb5-44fe-9e75-cec0e97d28b7.png)
2023-12-26 20:42:54 +08:00
Joooye_34
d87c218c0b fix(electron): set stable base url to app.affine.pro (#5401)
close TOV-282
2023-12-26 20:42:41 +08:00
Peng Xiao
a5bf5cc244 fix(core): about setting blink issue (#5399) 2023-12-26 20:42:33 +08:00
Peng Xiao
16bcd6e76b fix(core): workpace list blink issue on open (#5400) 2023-12-26 20:42:19 +08:00
JimmFly
2e2ace8472 chore(core): add background color to questionnaire (#5396) 2023-12-26 20:42:06 +08:00
Cats Juice
37cff8fe8d fix(core): correct title of onboarding article-2 (#5387) 2023-12-26 20:41:58 +08:00
DarkSky
70ab3b4916 fix: use prefix in electron to prevent formdata bug (#5395) 2023-12-26 20:41:47 +08:00
EYHN
f42ba54578 fix(core): fix flickering workspace list (#5391) 2023-12-26 20:41:36 +08:00
EYHN
a67c8181fc fix(workspace): fix svg file with xml header (#5388) 2023-12-26 20:41:28 +08:00
regischen
613efbded9 feat: bump blocksuite (#5386) 2023-12-26 20:41:18 +08:00
李华桥
549419d102 Merge branch 'canary' into stable 2023-12-22 16:29:51 +08:00
李华桥
21c42f8771 Merge branch 'canary' into stable 2023-12-22 01:29:30 +08:00
李华桥
9012adda7a Merge branch 'canary' into stable 2023-12-21 18:42:56 +08:00
李华桥
fb442e9055 Merge branch 'canary' into stable 2023-12-21 16:22:57 +08:00
李华桥
a231474dd2 Merge branch 'canary' into stable 2023-12-21 14:26:01 +08:00
李华桥
833b42000b Merge branch 'canary' into stable 2023-12-20 16:36:44 +08:00
李华桥
7690c48710 Merge branch 'canary' into stable 2023-12-20 16:32:36 +08:00
DarkSky
579828a700 fix: use secure websocket (#5297) 2023-12-13 22:28:04 +08:00
DarkSky
746db2ccfc feat: only follow serverUrlPrefix at redirect to client (#5295) 2023-12-13 20:37:20 +08:00
李华桥
eff344a9c1 Merge branch 'canary' into stable 2023-12-12 16:45:47 +08:00
李华桥
c89ebab596 Merge branch 'canary' into stable 2023-12-12 11:04:33 +08:00
liuyi
62f4421b7c fix(server): avoid updates persist forever (#5258) 2023-12-11 17:42:25 +08:00
李华桥
42383dbd29 Merge branch 'canary' into stable 2023-12-10 21:04:15 +08:00
李华桥
120e7397ba Merge branch 'canary' into stable 2023-12-01 16:12:17 +08:00
李华桥
24123ad01c Revert "Revert "Merge remote-tracking branch 'origin/canary' into stable""
This reverts commit 89197bacef.
2023-12-01 13:29:43 +08:00
李华桥
ad50320391 v0.10.3 2023-12-01 12:52:15 +08:00
李华桥
eb21a60dda v0.10.3-beta.7 2023-12-01 12:12:20 +08:00
Joooye_34
c0e3be2d40 fix(core): rerender error boundary when route change and improve sentry report (#5147) 2023-12-01 04:04:44 +00:00
李华桥
09d3b72358 v0.10.3-beta.6 2023-11-30 23:02:26 +08:00
Joooye_34
246e16c6c0 fix(infra): compatibility logic follow blocksuite (#5143) 2023-11-30 23:01:38 +08:00
李华桥
dc279d062b v0.10.3-beta.5 2023-11-30 16:49:55 +08:00
Joooye_34
47d5f9e1c2 fix(infra): use blocksuite api to check compatibility (#5137) 2023-11-30 08:48:13 +00:00
Joooye_34
a226eb8d5f fix(core): expose catched editor load error (#5133) 2023-11-29 20:31:35 +08:00
Joooye_34
908c4e1a6f ci: add sentry env when frontend assets build (#5131) 2023-11-29 10:03:49 +00:00
李华桥
1d0bcc80a0 v0.10.3-beta.4 2023-11-29 16:14:06 +08:00
Joooye_34
50010bd824 fix(core): implement editor timeout and report error from boundary (#5105) 2023-11-29 08:10:38 +00:00
liuyi
c0ede1326d fix(server): wrong OTEL config (#5084) 2023-11-29 11:19:13 +08:00
李华桥
89197bacef Revert "Merge remote-tracking branch 'origin/canary' into stable"
This reverts commit 992ed89a89, reversing
changes made to d272d7922d.
2023-11-29 11:18:45 +08:00
李华桥
f97d323ab5 Revert "Revert "refactor(server): standarderlize metrics and trace with OTEL (#5054)""
This reverts commit c1cd1713b9.
2023-11-29 11:07:28 +08:00
EYHN
2acb219dcc fix(workspace): filter awareness from other workspace (#5093) 2023-11-28 16:47:45 +08:00
LongYinan
992ed89a89 Merge remote-tracking branch 'origin/canary' into stable 2023-11-28 15:12:52 +08:00
李华桥
d272d7922d v0.10.3-beta.2 2023-11-25 23:50:40 +08:00
李华桥
c1cd1713b9 Revert "refactor(server): standarderlize metrics and trace with OTEL (#5054)"
This reverts commit 91efca107a.
2023-11-25 23:50:39 +08:00
李华桥
b20e91bee0 v0.10.3-beta.1 2023-11-25 14:14:40 +08:00
李华桥
9a4e5ec8c3 Merge branch 'canary' into stable 2023-11-25 14:14:14 +08:00
李华桥
2019838ae7 v0.10.3-beta.0 2023-11-24 11:39:23 +08:00
李华桥
30ff25f400 Merge branch 'canary' into stable 2023-11-23 23:40:32 +08:00
李华桥
e766208c18 chore: reset merge wrong codes 2023-11-23 22:53:06 +08:00
李华桥
8742f28148 Merge branch 'canary' into stable 2023-11-23 21:31:42 +08:00
LongYinan
cd291bb60e build: remove useless source-map-loader to speedup webpack (#4910) 2023-11-20 10:52:28 +08:00
LongYinan
62c0efcfd1 fix(core): handle the getSession network error properly (#4909)
If network offline or API error happens, the `session` returned by the `useSession` hook will be null, so we can't assume it is not null.

There should be following changes:
1. create a page in ErrorBoundary to let the user refetch the session.
2. The `SessionProvider` stop to pull the new session once the session is null, we need to figure out a way to pull the new session when the network is back or the user click the refetch button.
2023-11-17 16:50:48 +08:00
liuyi
87248b3337 fix(server): all viewers can share public link (#4968) 2023-11-17 12:34:15 +08:00
Joooye_34
00c940f7df chore: bump affine version to 0.10.2 (#4959) 2023-11-16 15:48:37 +08:00
Flrande
931b459fbd chore: bump blocksuite (#4958) 2023-11-16 14:27:39 +08:00
LongYinan
51e71f4a0a ci: prevent error if rust build is cached by nx (#4951)
If Rust build was cached by nx, only the output file will be presented. The chmod command will be failed in this case like: https://github.com/toeverything/AFFiNE/actions/runs/6874496337/job/18697360212
2023-11-16 10:31:51 +08:00
Peng Xiao
9b631f2328 fix(infra): page id compat fix for page ids in workspace.meta (#4950)
since we strip `page:` in keys of workspacedoc.spaces, we should also strip the prefix in meta.pages as well.
2023-11-15 17:36:08 +08:00
LongYinan
01f481a9b6 ci: only disable postinstall on macOS in nightly desktop build (#4938) 2023-11-14 23:00:30 +08:00
Joooye_34
0177ab5c87 fix(infra): workspace migration without blockVersions (#4936) 2023-11-14 14:38:11 +01:00
Peng Xiao
4db35d341c perf(component): use png instead of svg for rendering noise svg (#4935) 2023-11-14 11:52:51 +00:00
DarkSky
3c4a803c97 fix: change password token check (#4934) (#4932) 2023-11-14 11:15:54 +00:00
LongYinan
05154dc7ca ci: disable postinstall in nightly desktop build (#4930)
Should be part of https://github.com/toeverything/AFFiNE/pull/4885
2023-11-14 14:13:55 +08:00
Peng Xiao
c90b477f60 fix(core): change server url of stable to insider (#4902) (#4926) 2023-11-14 12:05:52 +08:00
李华桥
6f18ddbe85 v0.10.1 2023-11-13 19:49:26 +08:00
LongYinan
dde779a71d test(e2e): add subdoc migration test (#4921)
test(e2e): add subdoc migration test

fix: remove .only
2023-11-13 18:00:40 +08:00
Peng Xiao
bd9f66fbc7 fix(infra): compatibility fix for space prefix (#4912)
It seems there are some cases that [this upstream PR](https://github.com/toeverything/blocksuite/pull/4747) will cause data loss.

Because of some historical reasons, the page id could be different with its doc id.
It might be caused by subdoc migration in the following (not 100% sure if all white screen issue is caused by it) 0714c12703/packages/common/infra/src/blocksuite/index.ts (L538-L540)

In version 0.10, page id in spaces no longer has prefix "space:"
The data flow for fetching a doc's updates is:
- page id in `meta.pages` -> find `${page-id}` in `doc.spaces` -> `doc` -> `doc.guid`
if `doc` is not found in `doc.spaces`, a new doc will be created and its `doc.guid` is the same with its pageId
- because of guid logic change, the doc that previously prefixed with `space:` will not be found in `doc.spaces`
- when fetching the rows of this doc using the doc id === page id,
  it will return EMPTY since there is no updates associated with the page id

The provided fix in the PR will patch the `spaces` field of the root doc so that after 0.10 the page doc can still be found in the `spaces` map. It shall apply to both of the idb & sqlite datasources.

Special thanks to @lawvs 's db file for investigation!
2023-11-13 17:57:56 +08:00
liuyi
92f1f40bfa fix(server): wrap updates applying in a transaction (#4922) 2023-11-13 08:49:30 +00:00
LongYinan
48dc1049b3 Merge pull request #4913 from toeverything/darksky/cleanup-depolyment
chore: cleanup deployment
2023-11-12 11:20:02 +08:00
DarkSky
9add530370 chore: cleanup deployment 2023-11-12 11:03:25 +08:00
LongYinan
b77460d871 Merge pull request #4908 from toeverything/61/hotfix-websocket-payload
fix(server): increase server acceptable websocket payload size
2023-11-10 22:01:48 +08:00
forehalo
42db41776b fix(server): increase server acceptable websocket payload size 2023-11-10 21:31:45 +08:00
李华桥
075439c74f fix(core): change server url of stable to insider 2023-11-10 18:32:53 +08:00
Yifeng Wang
fc6c553ece chore: bump theme (#4904)
Co-authored-by: 李华桥 <joooye1991@gmail.com>
2023-11-10 15:40:38 +08:00
Joooye_34
59cb3d5df1 fix(core): change server url of stable to insider (#4902) 2023-11-10 14:50:57 +08:00
1466 changed files with 56968 additions and 63870 deletions

View File

@@ -8,11 +8,5 @@ corepack prepare yarn@stable --activate
# install dependencies
yarn install
# Build Server Dependencies
yarn workspace @affine/storage build
# Create database
yarn workspace @affine/server prisma db push
# Create user username: affine, password: affine
echo "INSERT INTO \"users\"(\"id\",\"name\",\"email\",\"email_verified\",\"created_at\",\"password\") VALUES('99f3ad04-7c9b-441e-a6db-79f73aa64db9','affine','affine@affine.pro','2024-02-26 15:54:16.974','2024-02-26 15:54:16.974+00','\$argon2id\$v=19\$m=19456,t=2,p=1\$esDS3QCHRH0Kmeh87YPm5Q\$9S+jf+xzw2Hicj6nkWltvaaaXX3dQIxAFwCfFa9o38A');" | yarn workspace @affine/server prisma db execute --stdin
yarn workspace @affine/server prisma db push

View File

@@ -21,6 +21,5 @@
}
},
"updateContentCommand": "bash ./.devcontainer/build.sh",
"postCreateCommand": "bash ./.devcontainer/setup-user.sh",
"postStartCommand": ["yarn dev", "yarn workspace @affine/server dev"]
"postCreateCommand": "bash ./.devcontainer/setup-user.sh"
}

View File

@@ -1,9 +1,7 @@
set -e
if [ -v GRAPHITE_TOKEN ];then
gt auth --token $GRAPHITE_TOKEN
fi
git fetch origin canary:canary --depth=1
git fetch
git branch canary -t origin/canary
gt init --trunk canary

View File

@@ -12,4 +12,3 @@ static
web-static
public
packages/frontend/i18n/src/i18n-generated.ts
packages/frontend/templates/edgeless-templates.gen.ts

View File

@@ -1,4 +1,4 @@
const { join } = require('node:path');
const { resolve } = require('node:path');
const createPattern = packageName => [
{
@@ -31,6 +31,22 @@ const createPattern = packageName => [
message: 'Use `useNavigateHelper` instead',
importNames: ['useNavigate'],
},
{
group: ['next-auth/react'],
message: "Import hooks from 'use-current-user.tsx'",
// useSession is type unsafe
importNames: ['useSession'],
},
{
group: ['next-auth/react'],
message: "Import hooks from 'cloud-utils.ts'",
importNames: ['signIn', 'signOut'],
},
{
group: ['yjs'],
message: 'Do not use this API because it has a bug',
importNames: ['mergeUpdates'],
},
{
group: ['@affine/env/constant'],
message:
@@ -48,7 +64,7 @@ const allPackages = [
'packages/frontend/i18n',
'packages/frontend/native',
'packages/frontend/templates',
'packages/frontend/workspace-impl',
'packages/frontend/workspace',
'packages/common/debug',
'packages/common/env',
'packages/common/infra',
@@ -88,17 +104,16 @@ const config = {
},
ecmaVersion: 'latest',
sourceType: 'module',
project: join(__dirname, 'tsconfig.eslint.json'),
project: resolve(__dirname, './tsconfig.eslint.json'),
},
plugins: [
'react',
'@typescript-eslint',
'simple-import-sort',
'sonarjs',
'import-x',
'i',
'unused-imports',
'unicorn',
'rxjs',
],
rules: {
'array-callback-return': 'error',
@@ -131,7 +146,6 @@ const config = {
'unused-imports/no-unused-imports': 'error',
'simple-import-sort/imports': 'error',
'simple-import-sort/exports': 'error',
'import-x/no-duplicates': 'error',
'@typescript-eslint/ban-ts-comment': [
'error',
{
@@ -165,6 +179,22 @@ const config = {
message: 'Use `useNavigateHelper` instead',
importNames: ['useNavigate'],
},
{
group: ['next-auth/react'],
message: "Import hooks from 'use-current-user.tsx'",
// useSession is type unsafe
importNames: ['useSession'],
},
{
group: ['next-auth/react'],
message: "Import hooks from 'cloud-utils.ts'",
importNames: ['signIn', 'signOut'],
},
{
group: ['yjs'],
message: 'Do not use this API because it has a bug',
importNames: ['mergeUpdates'],
},
],
},
],
@@ -187,7 +217,6 @@ const config = {
'unicorn/no-useless-promise-resolve-reject': 'error',
'unicorn/no-new-array': 'error',
'unicorn/new-for-builtins': 'error',
'unicorn/prefer-node-protocol': 'error',
'sonarjs/no-all-duplicated-branches': 'error',
'sonarjs/no-element-overwrite': 'error',
'sonarjs/no-empty-collection': 'error',
@@ -204,21 +233,6 @@ const config = {
'sonarjs/no-collection-size-mischeck': 'error',
'sonarjs/no-useless-catch': 'error',
'sonarjs/no-identical-functions': 'error',
'rxjs/finnish': [
'error',
{
functions: false,
methods: false,
strict: true,
types: {
'^LiveData$': true,
// some yjs classes are Observables, but they don't need to be in Finnish notation
'^Doc$': false, // yjs Doc
'^Awareness$': false, // yjs Awareness
'^UndoManager$': false, // yjs UndoManager
},
},
],
},
overrides: [
{
@@ -235,6 +249,9 @@ const config = {
},
...allPackages.map(pkg => ({
files: [`${pkg}/src/**/*.ts`, `${pkg}/src/**/*.tsx`],
parserOptions: {
project: resolve(__dirname, './tsconfig.eslint.json'),
},
rules: {
'@typescript-eslint/no-restricted-imports': [
'error',
@@ -251,7 +268,7 @@ const config = {
],
'@typescript-eslint/no-misused-promises': ['error'],
'@typescript-eslint/prefer-readonly': 'error',
'import-x/no-extraneous-dependencies': ['error'],
'i/no-extraneous-dependencies': ['error'],
'react-hooks/exhaustive-deps': [
'warn',
{

View File

@@ -7,8 +7,6 @@ body:
attributes:
value: |
Thanks for taking the time to fill out this bug report!
Check out this [link](https://github.com/toeverything/AFFiNE/blob/canary/docs/issue-triaging.md)
to learn how we manage issues and when your issue will be processed.
- type: textarea
id: what-happened
attributes:
@@ -43,14 +41,6 @@ body:
- Firefox
- Safari
- Other
- type: checkboxes
id: selfhost
attributes:
label: Are you self-hosting?
description: >
If you are self-hosting, please check the box and provide information about your setup.
options:
- label: 'Yes'
- type: textarea
id: logs
attributes:
@@ -63,3 +53,11 @@ body:
description: |
Links? References? Anything that will give us more context about the issue you are encountering!
Tip: You can attach images here
- type: checkboxes
attributes:
label: Are you willing to submit a PR?
description: >
(Optional) We encourage you to submit a [Pull Request](https://github.com/toeverything/affine/pulls) (PR) to help improve AFFiNE for everyone, especially if you have a good understanding of how to implement a fix or feature.
See the AFFiNE [Contributing Guide](https://github.com/toeverything/affine/blob/canary/CONTRIBUTING.md) to get started.
options:
- label: Yes I'd like to help by submitting a PR!

View File

@@ -37,7 +37,7 @@ runs:
echo "TARGET_CC=clang" >> "$GITHUB_ENV"
- name: Cache cargo
uses: actions/cache@v4
uses: actions/cache@v3
with:
path: |
~/.cargo/registry/index/
@@ -49,7 +49,7 @@ runs:
- name: Build
shell: bash
run: |
yarn workspace ${{ inputs.package }} nx build ${{ inputs.package }} -- --target ${{ inputs.target }} --use-napi-cross
yarn workspace ${{ inputs.package }} nx build ${{ inputs.package }} --target ${{ inputs.target }} --use-napi-cross
env:
NX_CLOUD_ACCESS_TOKEN: ${{ inputs.nx_token }}
DEBUG: 'napi:*'

View File

@@ -24,7 +24,7 @@ runs:
shell: bash
run: |
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
- uses: azure/setup-helm@v4
- uses: azure/setup-helm@v3
- id: auth
uses: google-github-actions/auth@v2
with:

View File

@@ -15,9 +15,9 @@ const {
R2_SECRET_ACCESS_KEY,
ENABLE_CAPTCHA,
CAPTCHA_TURNSTILE_SECRET,
MAILER_SENDER,
MAILER_USER,
MAILER_PASSWORD,
OAUTH_EMAIL_SENDER,
OAUTH_EMAIL_LOGIN,
OAUTH_EMAIL_PASSWORD,
AFFINE_GOOGLE_CLIENT_ID,
AFFINE_GOOGLE_CLIENT_SECRET,
CLOUD_SQL_IAM_ACCOUNT,
@@ -65,16 +65,8 @@ const createHelmCommand = ({ isDryRun }) => {
]
: [];
const webReplicaCount = isProduction ? 3 : isBeta ? 2 : 2;
const graphqlReplicaCount = isProduction
? Number(process.env.PRODUCTION_GRAPHQL_REPLICA) || 3
: isBeta
? Number(process.env.isBeta_GRAPHQL_REPLICA) || 2
: 2;
const syncReplicaCount = isProduction
? Number(process.env.PRODUCTION_SYNC_REPLICA) || 3
: isBeta
? Number(process.env.BETA_SYNC_REPLICA) || 2
: 2;
const graphqlReplicaCount = isProduction ? 10 : isBeta ? 5 : 2;
const syncReplicaCount = isProduction ? 10 : isBeta ? 5 : 2;
const namespace = isProduction
? 'production'
: isBeta
@@ -103,17 +95,16 @@ const createHelmCommand = ({ isDryRun }) => {
`--set-string graphql.app.objectStorage.r2.accountId="${R2_ACCOUNT_ID}"`,
`--set-string graphql.app.objectStorage.r2.accessKeyId="${R2_ACCESS_KEY_ID}"`,
`--set-string graphql.app.objectStorage.r2.secretAccessKey="${R2_SECRET_ACCESS_KEY}"`,
`--set-string graphql.app.mailer.sender="${MAILER_SENDER}"`,
`--set-string graphql.app.mailer.user="${MAILER_USER}"`,
`--set-string graphql.app.mailer.password="${MAILER_PASSWORD}"`,
`--set-string graphql.app.oauth.email.sender="${OAUTH_EMAIL_SENDER}"`,
`--set-string graphql.app.oauth.email.login="${OAUTH_EMAIL_LOGIN}"`,
`--set-string graphql.app.oauth.email.password="${OAUTH_EMAIL_PASSWORD}"`,
`--set-string graphql.app.oauth.google.enabled=true`,
`--set-string graphql.app.oauth.google.clientId="${AFFINE_GOOGLE_CLIENT_ID}"`,
`--set-string graphql.app.oauth.google.clientSecret="${AFFINE_GOOGLE_CLIENT_SECRET}"`,
`--set-string graphql.app.payment.stripe.apiKey="${STRIPE_API_KEY}"`,
`--set-string graphql.app.payment.stripe.webhookKey="${STRIPE_WEBHOOK_KEY}"`,
`--set graphql.app.experimental.enableJwstCodec=${namespace === 'dev'}`,
`--set graphql.app.experimental.enableJwstCodec=true`,
`--set graphql.app.features.earlyAccessPreview=false`,
`--set graphql.app.features.syncClientVersionCheck=true`,
`--set sync.replicaCount=${syncReplicaCount}`,
`--set-string sync.image.tag="${imageTag}"`,
...serviceAnnotations,

View File

@@ -11,7 +11,7 @@ runs:
- name: Download tar.gz
uses: actions/download-artifact@v4
with:
name: web
name: core
path: .
- name: Extract core artifacts

View File

@@ -63,7 +63,7 @@ runs:
run: node -e "const p = $(yarn config cacheFolder --json).effective; console.log('yarn_global_cache=' + p)" >> $GITHUB_OUTPUT
- name: Cache non-full yarn cache on Linux
uses: actions/cache@v4
uses: actions/cache@v3
if: ${{ inputs.full-cache != 'true' && runner.os == 'Linux' }}
with:
path: |
@@ -75,7 +75,7 @@ runs:
# and the decompression performance on Windows is very terrible
# so we reduce the number of cached files on non-Linux systems by remove node_modules from cache path.
- name: Cache non-full yarn cache on non-Linux
uses: actions/cache@v4
uses: actions/cache@v3
if: ${{ inputs.full-cache != 'true' && runner.os != 'Linux' }}
with:
path: |
@@ -83,7 +83,7 @@ runs:
key: node_modules-cache-${{ github.job }}-${{ runner.os }}
- name: Cache full yarn cache on Linux
uses: actions/cache@v4
uses: actions/cache@v3
if: ${{ inputs.full-cache == 'true' && runner.os == 'Linux' }}
with:
path: |
@@ -92,7 +92,7 @@ runs:
key: node_modules-cache-full-${{ runner.os }}
- name: Cache full yarn cache on non-Linux
uses: actions/cache@v4
uses: actions/cache@v3
if: ${{ inputs.full-cache == 'true' && runner.os != 'Linux' }}
with:
path: |
@@ -134,7 +134,7 @@ runs:
# Note: Playwright's cache directory is hard coded because that's what it
# says to do in the docs. There doesn't appear to be a command that prints
# it out for us.
- uses: actions/cache@v4
- uses: actions/cache@v3
id: playwright-cache
if: ${{ inputs.playwright-install == 'true' }}
with:
@@ -167,7 +167,7 @@ runs:
run: |
echo "version=$(yarn why --json electron | grep -h 'workspace:.' | jq --raw-output '.children[].locator' | sed -e 's/@playwright\/test@.*://' | head -n 1)" >> $GITHUB_OUTPUT
- uses: actions/cache@v4
- uses: actions/cache@v3
id: electron-cache
if: ${{ inputs.electron-install == 'true' }}
with:

View File

@@ -1,6 +1,6 @@
FROM openresty/openresty:1.25.3.1-0-buster
FROM openresty/openresty:1.21.4.3-0-buster
WORKDIR /app
COPY ./packages/frontend/web/dist ./dist
COPY ./packages/frontend/core/dist ./dist
COPY ./.github/deployment/front/nginx.conf /usr/local/openresty/nginx/conf/nginx.conf
COPY ./.github/deployment/front/affine.nginx.conf /etc/nginx/conf.d/affine.nginx.conf

View File

@@ -1,11 +1,10 @@
FROM node:20-bookworm-slim
FROM node:18-bookworm-slim
COPY ./packages/backend/server /app
COPY ./packages/frontend/web/dist /app/static
WORKDIR /app
RUN apt-get update && \
apt-get install -y --no-install-recommends openssl && \
rm -rf /var/lib/apt/lists/*
CMD ["node", "--import", "./scripts/register.js", "./dist/index.js"]
CMD ["node", "--es-module-specifier-resolution=node", "./dist/index.js"]

View File

@@ -1,59 +0,0 @@
services:
affine:
image: ghcr.io/toeverything/affine-graphql:stable
container_name: affine_selfhosted
command:
['sh', '-c', 'node ./scripts/self-host-predeploy && node ./dist/index.js']
ports:
- '3010:3010'
- '5555:5555'
depends_on:
redis:
condition: service_healthy
postgres:
condition: service_healthy
volumes:
# custom configurations
- ~/.affine/self-host/config:/root/.affine/config
# blob storage
- ~/.affine/self-host/storage:/root/.affine/storage
logging:
driver: 'json-file'
options:
max-size: '1000m'
restart: unless-stopped
environment:
- NODE_OPTIONS="--import=./scripts/register.js"
- AFFINE_CONFIG_PATH=/root/.affine/config
- REDIS_SERVER_HOST=redis
- DATABASE_URL=postgres://affine:affine@postgres:5432/affine
- NODE_ENV=production
- AFFINE_ADMIN_EMAIL=${AFFINE_ADMIN_EMAIL}
- AFFINE_ADMIN_PASSWORD=${AFFINE_ADMIN_PASSWORD}
redis:
image: redis
container_name: affine_redis
restart: unless-stopped
volumes:
- ~/.affine/self-host/redis:/data
healthcheck:
test: ['CMD', 'redis-cli', '--raw', 'incr', 'ping']
interval: 10s
timeout: 5s
retries: 5
postgres:
image: postgres
container_name: affine_postgres
restart: unless-stopped
volumes:
- ~/.affine/self-host/postgres:/var/lib/postgresql/data
healthcheck:
test: ['CMD-SHELL', 'pg_isready -U affine']
interval: 10s
timeout: 5s
retries: 5
environment:
POSTGRES_USER: affine
POSTGRES_PASSWORD: affine
POSTGRES_DB: affine
PGDATA: /var/lib/postgresql/data/pgdata

View File

@@ -3,4 +3,4 @@ name: affine
description: AFFiNE cloud chart
type: application
version: 0.0.0
appVersion: "0.14.0"
appVersion: "0.11.0"

View File

@@ -3,7 +3,7 @@ name: graphql
description: AFFiNE GraphQL server
type: application
version: 0.0.0
appVersion: "0.14.0"
appVersion: "0.11.0"
dependencies:
- name: gcloud-sql-proxy
version: 0.0.0

View File

@@ -61,3 +61,18 @@ Create the name of the service account to use
{{- default "default" .Values.serviceAccount.name }}
{{- end }}
{{- end }}
{{- define "jwt.key" -}}
{{- $secret := lookup "v1" "Secret" .Release.Namespace .Values.app.jwt.secretName -}}
{{- if and $secret $secret.data.private -}}
{{/*
Reusing existing secret data
*/}}
key: {{ $secret.data.private }}
{{- else -}}
{{/*
Generate new data
*/}}
key: {{ genPrivateKey "ecdsa" | b64enc }}
{{- end -}}
{{- end -}}

View File

@@ -28,10 +28,10 @@ spec:
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
env:
- name: AFFINE_PRIVATE_KEY
- name: AUTH_PRIVATE_KEY
valueFrom:
secretKeyRef:
name: "{{ .Values.global.secret.secretName }}"
name: "{{ .Values.app.jwt.secretName }}"
key: key
- name: NODE_ENV
value: "{{ .Values.env }}"
@@ -39,12 +39,12 @@ spec:
value: "--max-old-space-size=4096"
- name: NO_COLOR
value: "1"
- name: DEPLOYMENT_TYPE
value: "affine"
- name: SERVER_FLAVOR
value: "graphql"
- name: AFFINE_ENV
value: "{{ .Release.Namespace }}"
- name: NEXTAUTH_URL
value: "{{ .Values.global.ingress.host }}"
- name: DATABASE_PASSWORD
valueFrom:
secretKeyRef:
@@ -73,41 +73,37 @@ spec:
value: "{{ .Values.app.path }}"
- name: AFFINE_SERVER_HOST
value: "{{ .Values.app.host }}"
- name: AFFINE_SERVER_HTTPS
value: "{{ .Values.app.https }}"
- name: ENABLE_R2_OBJECT_STORAGE
value: "{{ .Values.app.objectStorage.r2.enabled }}"
- name: ENABLE_CAPTCHA
value: "{{ .Values.app.captcha.enabled }}"
- name: FEATURES_EARLY_ACCESS_PREVIEW
value: "{{ .Values.app.features.earlyAccessPreview }}"
- name: FEATURES_SYNC_CLIENT_VERSION_CHECK
value: "{{ .Values.app.features.syncClientVersionCheck }}"
- name: MAILER_HOST
- name: OAUTH_EMAIL_SENDER
valueFrom:
secretKeyRef:
name: "{{ .Values.app.mailer.secretName }}"
key: host
- name: MAILER_PORT
valueFrom:
secretKeyRef:
name: "{{ .Values.app.mailer.secretName }}"
key: port
- name: MAILER_USER
valueFrom:
secretKeyRef:
name: "{{ .Values.app.mailer.secretName }}"
key: user
- name: MAILER_PASSWORD
valueFrom:
secretKeyRef:
name: "{{ .Values.app.mailer.secretName }}"
key: password
- name: MAILER_SENDER
valueFrom:
secretKeyRef:
name: "{{ .Values.app.mailer.secretName }}"
name: "{{ .Values.app.oauth.email.secretName }}"
key: sender
- name: OAUTH_EMAIL_LOGIN
valueFrom:
secretKeyRef:
name: "{{ .Values.app.oauth.email.secretName }}"
key: login
- name: OAUTH_EMAIL_SERVER
valueFrom:
secretKeyRef:
name: "{{ .Values.app.oauth.email.secretName }}"
key: server
- name: OAUTH_EMAIL_PORT
valueFrom:
secretKeyRef:
name: "{{ .Values.app.oauth.email.secretName }}"
key: port
- name: OAUTH_EMAIL_PASSWORD
valueFrom:
secretKeyRef:
name: "{{ .Values.app.oauth.email.secretName }}"
key: password
- name: STRIPE_API_KEY
valueFrom:
secretKeyRef:
@@ -149,8 +145,6 @@ spec:
key: turnstileSecret
{{ end }}
{{ if .Values.app.oauth.google.enabled }}
- name: OAUTH_GOOGLE_ENABLED
value: "true"
- name: OAUTH_GOOGLE_CLIENT_ID
valueFrom:
secretKeyRef:

View File

@@ -0,0 +1,7 @@
apiVersion: v1
kind: Secret
metadata:
name: "{{ .Values.app.jwt.secretName }}"
type: Opaque
data:
{{- ( include "jwt.key" . ) | indent 2 -}}

View File

@@ -1,13 +0,0 @@
{{- if .Values.app.mailer.secretName -}}
apiVersion: v1
kind: Secret
metadata:
name: "{{ .Values.app.mailer.secretName }}"
type: Opaque
data:
host: "{{ .Values.app.mailer.host | b64enc }}"
port: "{{ .Values.app.mailer.port | b64enc }}"
user: "{{ .Values.app.mailer.user | b64enc }}"
password: "{{ .Values.app.mailer.password | b64enc }}"
sender: "{{ .Values.app.mailer.sender | b64enc }}"
{{- end }}

View File

@@ -1,3 +1,15 @@
apiVersion: v1
kind: Secret
metadata:
name: "{{ .Values.app.oauth.email.secretName }}"
type: Opaque
data:
sender: "{{ .Values.app.oauth.email.sender | b64enc }}"
login: "{{ .Values.app.oauth.email.login | b64enc }}"
password: "{{ .Values.app.oauth.email.password | b64enc }}"
server: "{{ .Values.app.oauth.email.server | b64enc }}"
port: "{{ .Values.app.oauth.email.port | b64enc }}"
---
{{- if .Values.app.oauth.google.enabled -}}
apiVersion: v1
kind: Secret

View File

@@ -1,18 +0,0 @@
{{- $privateKey := default (genPrivateKey "ecdsa") .Values.global.secret.privateKey | b64enc | quote }}
{{- if not .Values.global.secret.privateKey }}
{{- $existingKey := (lookup "v1" "Secret" .Release.Namespace .Values.global.secret.secretName) }}
{{- if $existingKey }}
{{- $privateKey = index $existingKey.data "key" }}
{{- end -}}
{{- end -}}
apiVersion: v1
kind: Secret
metadata:
name: {{ .Values.global.secret.secretName }}
annotations:
"helm.sh/resource-policy": "keep"
type: Opaque
data:
key: {{ $privateKey }}

View File

@@ -16,9 +16,12 @@ app:
path: ''
# AFFINE_SERVER_HOST
host: '0.0.0.0'
https: true
doc:
mergeInterval: "3000"
jwt:
secretName: jwt-private-key
# base64 encoded ecdsa private key
privateKey: ''
captcha:
enable: false
secretName: captcha
@@ -31,7 +34,14 @@ app:
accountId: ''
accessKeyId: ''
secretAccessKey: ''
oauth:
oauth:
email:
secretName: 'oauth-email'
sender: 'noreply@toeverything.info'
login: ''
password: ''
server: 'smtp.gmail.com'
port: '465'
google:
enabled: false
secretName: oauth-google
@@ -42,13 +52,6 @@ app:
secretName: oauth-github
clientId: ''
clientSecret: ''
mailer:
secretName: 'mailer'
host: 'smtp.gmail.com'
port: '465'
user: ''
password: ''
sender: 'noreply@toeverything.info'
payment:
stripe:
secretName: 'stripe'
@@ -56,7 +59,6 @@ app:
webhookKey: ''
features:
earlyAccessPreview: false
syncClientVersionCheck: false
serviceAccount:
create: true

View File

@@ -3,7 +3,7 @@ name: sync
description: AFFiNE Sync Server
type: application
version: 0.0.0
appVersion: "0.14.0"
appVersion: "0.11.0"
dependencies:
- name: gcloud-sql-proxy
version: 0.0.0

View File

@@ -32,19 +32,14 @@ spec:
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
env:
- name: AFFINE_PRIVATE_KEY
valueFrom:
secretKeyRef:
name: "{{ .Values.global.secret.secretName }}"
key: key
- name: NODE_ENV
value: "{{ .Values.env }}"
- name: NO_COLOR
value: "1"
- name: DEPLOYMENT_TYPE
value: "affine"
- name: SERVER_FLAVOR
value: "sync"
- name: NEXTAUTH_URL
value: "{{ .Values.global.ingress.host }}"
- name: AFFINE_ENV
value: "{{ .Release.Namespace }}"
- name: DATABASE_PASSWORD

View File

@@ -12,6 +12,7 @@ env: 'production'
app:
# AFFINE_SERVER_HOST
host: '0.0.0.0'
serviceAccount:
create: true
annotations: {}

View File

@@ -60,13 +60,6 @@ spec:
name: affine-graphql
port:
number: {{ .Values.graphql.service.port }}
- path: /oauth
pathType: Prefix
backend:
service:
name: affine-graphql
port:
number: {{ .Values.graphql.service.port }}
- path: /
pathType: Prefix
backend:

View File

@@ -4,9 +4,6 @@ global:
className: ''
host: affine.pro
tls: []
secret:
secretName: 'server-private-key'
privateKey: ''
database:
user: 'postgres'
url: 'pg-postgresql'

5
.github/labeler.yml vendored
View File

@@ -29,6 +29,11 @@ mod:plugin-cli:
- any-glob-to-any-file:
- 'tools/plugin-cli/**/*'
mod:workspace:
- changed-files:
- any-glob-to-any-file:
- 'packages/common/workspace/**/*'
mod:workspace-impl:
- changed-files:
- any-glob-to-any-file:

12
.github/renovate.json vendored
View File

@@ -47,22 +47,17 @@
"groupName": "electron-forge"
},
{
"matchPackageNames": ["oxlint"],
"rangeStrategy": "replace",
"groupName": "oxlint"
},
{
"groupName": "blocksuite-canary",
"groupName": "blocksuite-nightly",
"matchPackagePatterns": ["^@blocksuite"],
"excludePackageNames": ["@blocksuite/icons"],
"rangeStrategy": "replace",
"followTag": "canary"
"followTag": "nightly"
},
{
"groupName": "all non-major dependencies",
"groupSlug": "all-minor-patch",
"matchPackagePatterns": ["*"],
"excludePackagePatterns": ["^@blocksuite/", "oxlint"],
"excludePackagePatterns": ["^@blocksuite/"],
"matchUpdateTypes": ["minor", "patch"]
},
{
@@ -75,7 +70,6 @@
"commitMessageAction": "bump up",
"commitMessageTopic": "{{depName}} version",
"ignoreDeps": [],
"postUpdateOptions": ["yarnDedupeHighest"],
"lockFileMaintenance": {
"enabled": true,
"extends": ["schedule:weekly"]

View File

@@ -1,25 +0,0 @@
name: Build Selfhost Image
on:
workflow_dispatch:
inputs:
flavor:
description: 'Select distribution to build'
type: choice
default: canary
options:
- canary
- beta
- stable
permissions:
contents: 'write'
id-token: 'write'
packages: 'write'
jobs:
build-image:
name: Build Image
uses: ./.github/workflows/build-server-image.yml
with:
flavor: ${{ github.event.inputs.flavor }}

View File

@@ -1,191 +0,0 @@
name: Build Images
on:
workflow_call:
inputs:
flavor:
type: string
required: true
env:
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
permissions:
contents: 'write'
id-token: 'write'
packages: 'write'
jobs:
build-server:
name: Build Server
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
extra-flags: workspaces focus @affine/server
- name: Build Server
run: yarn workspace @affine/server build
- name: Upload server dist
uses: actions/upload-artifact@v4
with:
name: server-dist
path: ./packages/backend/server/dist
if-no-files-found: error
build-web-selfhost:
name: Build @affine/web selfhost
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.flavor }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Core
run: yarn nx build @affine/web --skip-nx-cache
env:
BUILD_TYPE: ${{ github.event.inputs.flavor }}
SHOULD_REPORT_TRACE: false
PUBLIC_PATH: '/'
SELF_HOSTED: true
- name: Download selfhost fonts
run: node ./scripts/download-blocksuite-fonts.mjs
- name: Upload web artifact
uses: actions/upload-artifact@v4
with:
name: selfhost-web
path: ./packages/frontend/web/dist
if-no-files-found: error
build-storage:
name: Build Storage - ${{ matrix.targets.name }}
runs-on: ubuntu-latest
strategy:
matrix:
targets:
- name: x86_64-unknown-linux-gnu
file: storage.node
- name: aarch64-unknown-linux-gnu
file: storage.arm64.node
- name: armv7-unknown-linux-gnueabihf
file: storage.armv7.node
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
extra-flags: workspaces focus @affine/storage
- name: Build Rust
uses: ./.github/actions/build-rust
with:
target: ${{ matrix.targets.name }}
package: '@affine/storage'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload ${{ matrix.targets.file }}
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.targets.file }}
path: ./packages/backend/storage/storage.node
if-no-files-found: error
build-docker:
name: Build Docker
runs-on: ubuntu-latest
needs:
- build-server
- build-web-selfhost
- build-storage
steps:
- uses: actions/checkout@v4
- name: Download server dist
uses: actions/download-artifact@v4
with:
name: server-dist
path: ./packages/backend/server/dist
- name: Download storage.node
uses: actions/download-artifact@v4
with:
name: storage.node
path: ./packages/backend/server
- name: Download storage.node arm64
uses: actions/download-artifact@v4
with:
name: storage.arm64.node
path: ./packages/backend/storage
- name: Download storage.node arm64
uses: actions/download-artifact@v4
with:
name: storage.armv7.node
path: .
- name: move storage files
run: |
mv ./packages/backend/storage/storage.node ./packages/backend/server/storage.arm64.node
mv storage.node ./packages/backend/server/storage.armv7.node
- name: Setup env
run: |
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
if [ -z "${{ inputs.flavor }}" ]
then
echo "RELEASE_FLAVOR=canary" >> "$GITHUB_ENV"
else
echo "RELEASE_FLAVOR=${{ inputs.flavor }}" >> "$GITHUB_ENV"
fi
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
logout: false
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
# setup node without cache configuration
# Prisma cache is not compatible with docker build cache
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
registry-url: https://npm.pkg.github.com
scope: '@toeverything'
- name: Download selfhost web artifact
uses: actions/download-artifact@v4
with:
name: selfhost-web
path: ./packages/frontend/web/dist
- name: Install Node.js dependencies
run: |
yarn config set --json supportedArchitectures.cpu '["x64", "arm64", "arm"]'
yarn config set --json supportedArchitectures.libc '["glibc"]'
yarn workspaces focus @affine/server --production
- name: Generate Prisma client
run: yarn workspace @affine/server prisma generate
- name: Build graphql Dockerfile
uses: docker/build-push-action@v5
with:
context: .
push: true
pull: true
platforms: linux/amd64,linux/arm64,linux/arm/v7
provenance: true
file: .github/deployment/node/Dockerfile
tags: ghcr.io/toeverything/affine-graphql:${{env.RELEASE_FLAVOR}}-${{ env.GIT_SHORT_HASH }},ghcr.io/toeverything/affine-graphql:${{env.RELEASE_FLAVOR}}

View File

@@ -4,8 +4,6 @@ on:
push:
branches:
- canary
- beta
- stable
- v[0-9]+.[0-9]+.x-staging
- v[0-9]+.[0-9]+.x
paths-ignore:
@@ -21,7 +19,6 @@ env:
MACOSX_DEPLOYMENT_TARGET: '10.13'
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
PLAYWRIGHT_BROWSERS_PATH: ${{ github.workspace }}/node_modules/.cache/ms-playwright
DEPLOYMENT_TYPE: affine
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
@@ -98,8 +95,6 @@ jobs:
run: |
git checkout .yarnrc.yml
yarn lint:prettier
- name: Yarn Dedupe
run: yarn dedupe --check
- name: Run Type Check
run: yarn typecheck
@@ -118,7 +113,6 @@ jobs:
runs-on: ubuntu-latest
env:
DISTRIBUTION: browser
IN_CI_TEST: true
strategy:
fail-fast: false
matrix:
@@ -193,7 +187,7 @@ jobs:
run: yarn nx test:coverage @affine/monorepo
- name: Upload unit test coverage results
uses: codecov/codecov-action@v4
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./.coverage/store/lcov.info
@@ -212,8 +206,8 @@ jobs:
spec:
- { os: ubuntu-latest, target: x86_64-unknown-linux-gnu }
- { os: windows-latest, target: x86_64-pc-windows-msvc }
- { os: macos-14, target: x86_64-apple-darwin }
- { os: macos-14, target: aarch64-apple-darwin }
- { os: macos-latest, target: x86_64-apple-darwin }
- { os: macos-latest, target: aarch64-apple-darwin }
steps:
- uses: actions/checkout@v4
@@ -266,8 +260,8 @@ jobs:
path: ./packages/backend/storage/storage.node
if-no-files-found: error
build-web:
name: Build @affine/web
build-core:
name: Build @affine/core
runs-on: ubuntu-latest
steps:
@@ -277,17 +271,15 @@ jobs:
with:
electron-install: false
full-cache: true
- name: Build Web
- name: Build Core
# always skip cache because its fast, and cache configuration is always changing
run: yarn nx build @affine/web --skip-nx-cache
env:
DISTRIBUTION: 'desktop'
- name: zip web
run: tar -czf dist.tar.gz --directory=packages/frontend/electron/dist .
- name: Upload web artifact
run: yarn nx build @affine/core --skip-nx-cache
- name: zip core
run: tar -czf dist.tar.gz --directory=packages/frontend/core/dist .
- name: Upload core artifact
uses: actions/upload-artifact@v4
with:
name: web
name: core
path: dist.tar.gz
if-no-files-found: error
@@ -296,7 +288,6 @@ jobs:
runs-on: ubuntu-latest
needs: build-storage
env:
NODE_ENV: test
DISTRIBUTION: browser
services:
postgres:
@@ -338,11 +329,17 @@ jobs:
env:
PGPASSWORD: affine
- name: Run init-db script
- name: Generate prisma client
run: |
yarn workspace @affine/server exec prisma generate
yarn workspace @affine/server exec prisma db push
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Run init-db script
run: |
yarn workspace @affine/server data-migration run
yarn workspace @affine/server exec node --loader ts-node/esm/transpile-only ./scripts/init-db.ts
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
@@ -353,7 +350,7 @@ jobs:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Upload server test coverage results
uses: codecov/codecov-action@v4
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./packages/backend/server/.coverage/lcov.info
@@ -367,7 +364,6 @@ jobs:
env:
DISTRIBUTION: browser
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
IN_CI_TEST: true
strategy:
fail-fast: false
matrix:
@@ -431,17 +427,24 @@ jobs:
env:
PGPASSWORD: affine
- name: Run init-db script
- name: Generate prisma client
run: |
yarn workspace @affine/server exec prisma generate
yarn workspace @affine/server exec prisma db push
env:
DATABASE_URL: postgresql://affine:affine@localhost:5432/affine
- name: Run init-db script
run: |
yarn workspace @affine/server data-migration run
yarn workspace @affine/server exec node --loader ts-node/esm/transpile-only ./scripts/init-db.ts
- name: ${{ matrix.tests.name }}
run: |
${{ matrix.tests.script }}
env:
DEV_SERVER_URL: http://localhost:8080
ENABLE_LOCAL_EMAIL: true
- name: Upload test results
if: ${{ failure() }}
@@ -456,21 +459,22 @@ jobs:
runs-on: ${{ matrix.spec.os }}
strategy:
fail-fast: false
# all combinations: macos-latest x64, macos-latest arm64, windows-latest x64, ubuntu-latest x64
matrix:
spec:
- {
os: macos-14,
os: macos-latest,
platform: macos,
arch: x64,
target: x86_64-apple-darwin,
test: false,
test: true,
}
- {
os: macos-14,
os: macos-latest,
platform: macos,
arch: arm64,
target: aarch64-apple-darwin,
test: true,
test: false,
}
- {
os: ubuntu-latest,
@@ -487,7 +491,7 @@ jobs:
test: true,
}
needs:
- build-web
- build-core
- build-native
steps:
- uses: actions/checkout@v4
@@ -518,8 +522,8 @@ jobs:
shell: bash
run: yarn workspace @affine/electron vitest
- name: Download web artifact
uses: ./.github/actions/download-web
- name: Download core artifact
uses: ./.github/actions/download-core
with:
path: packages/frontend/electron/resources/web-static
@@ -527,7 +531,7 @@ jobs:
run: yarn workspace @affine/electron build
- name: Run desktop tests
if: ${{ matrix.spec.os == 'ubuntu-latest' }}
if: ${{ matrix.spec.test && matrix.spec.os == 'ubuntu-latest' }}
run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- yarn workspace @affine-test/affine-desktop e2e
- name: Run desktop tests
@@ -535,23 +539,15 @@ jobs:
run: yarn workspace @affine-test/affine-desktop e2e
- name: Make bundle
if: ${{ matrix.spec.target == 'aarch64-apple-darwin' }}
if: ${{ matrix.spec.os == 'macos-latest' && matrix.spec.arch == 'arm64' }}
env:
SKIP_BUNDLE: true
SKIP_WEB_BUILD: true
HOIST_NODE_MODULES: 1
run: yarn workspace @affine/electron package --platform=darwin --arch=arm64
- name: Make AppImage
run: yarn workspace @affine/electron make --platform=linux --arch=x64
if: ${{ matrix.spec.target == 'x86_64-unknown-linux-gnu' }}
env:
SKIP_PLUGIN_BUILD: 1
SKIP_WEB_BUILD: 1
HOIST_NODE_MODULES: 1
- name: Output check
if: ${{ matrix.spec.os == 'macos-14' && matrix.spec.arch == 'arm64' }}
if: ${{ matrix.spec.os == 'macos-latest' && matrix.spec.arch == 'arm64' }}
run: |
yarn workspace @affine/electron exec node --loader ts-node/esm/transpile-only ./scripts/macos-arm64-output-check.ts
@@ -562,22 +558,3 @@ jobs:
name: test-results-e2e-${{ matrix.spec.os }}-${{ matrix.spec.arch }}
path: ./test-results
if-no-files-found: ignore
test-done:
needs:
- analyze
- lint
- check-yarn-binary
- e2e-test
- e2e-migration-test
- unit-test
- server-test
- server-e2e-test
- desktop-test
if: always()
runs-on: ubuntu-latest
name: 3, 2, 1 Launch
steps:
- run: exit 1
# Thank you, next https://github.com/vercel/next.js/blob/canary/.github/workflows/build_and_test.yml#L379
if: ${{ always() && (contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled')) }}

View File

@@ -7,11 +7,6 @@ on:
schedule:
- cron: '0 9 * * *'
permissions:
contents: write
pull-requests: write
actions: write
jobs:
dispatch-deploy:
runs-on: ubuntu-latest

View File

@@ -13,23 +13,32 @@ on:
- stable
- internal
env:
APP_NAME: affine
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
MIXPANEL_TOKEN: '389c0615a69b57cca7d3fa0a4824c930'
permissions:
contents: 'write'
id-token: 'write'
packages: 'write'
jobs:
build-server-image:
name: Build Server Image
uses: ./.github/workflows/build-server-image.yml
with:
flavor: ${{ github.event.inputs.flavor }}
build-web:
name: Build @affine/web
build-server:
name: Build Server
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- name: Build Server
run: yarn workspace @affine/server build
- name: Upload server dist
uses: actions/upload-artifact@v4
with:
name: server-dist
path: ./packages/backend/server/dist
if-no-files-found: error
build-core:
name: Build @affine/core
runs-on: ubuntu-latest
environment: ${{ github.event.inputs.flavor }}
steps:
@@ -40,7 +49,7 @@ jobs:
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Core
run: yarn nx build @affine/web --skip-nx-cache
run: yarn nx build @affine/core --skip-nx-cache
env:
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
@@ -53,26 +62,93 @@ jobs:
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
PERFSEE_TOKEN: ${{ secrets.PERFSEE_TOKEN }}
- name: Upload web artifact
- name: Upload core artifact
uses: actions/upload-artifact@v4
with:
name: web
path: ./packages/frontend/web/dist
name: core
path: ./packages/frontend/core/dist
if-no-files-found: error
build-frontend-image:
name: Build Frontend Image
build-storage:
name: Build Storage
runs-on: ubuntu-latest
needs:
- build-web
steps:
- uses: actions/checkout@v4
- name: Download web artifact
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Rust
uses: ./.github/actions/build-rust
with:
target: 'x86_64-unknown-linux-gnu'
package: '@affine/storage'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload storage.node
uses: actions/upload-artifact@v4
with:
name: storage.node
path: ./packages/backend/storage/storage.node
if-no-files-found: error
build-storage-arm64:
name: Build Storage arm64
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Version
id: version
uses: ./.github/actions/setup-version
- name: Setup Node.js
uses: ./.github/actions/setup-node
- name: Build Rust
uses: ./.github/actions/build-rust
with:
target: 'aarch64-unknown-linux-gnu'
package: '@affine/storage'
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- name: Upload storage.node
uses: actions/upload-artifact@v4
with:
name: storage.arm64.node
path: ./packages/backend/storage/storage.node
if-no-files-found: error
build-docker:
name: Build Docker
runs-on: ubuntu-latest
needs:
- build-server
- build-core
- build-storage
- build-storage-arm64
steps:
- uses: actions/checkout@v4
- name: Download core artifact
uses: actions/download-artifact@v4
with:
name: web
path: ./packages/frontend/web/dist
name: core
path: ./packages/frontend/core/dist
- name: Download server dist
uses: actions/download-artifact@v4
with:
name: server-dist
path: ./packages/backend/server/dist
- name: Download storage.node
uses: actions/download-artifact@v4
with:
name: storage.node
path: ./packages/backend/server
- name: Download storage.node arm64
uses: actions/download-artifact@v4
with:
name: storage.arm64.node
path: ./packages/backend/storage
- name: move storage.arm64.node
run: mv ./packages/backend/storage/storage.node ./packages/backend/server/storage.arm64.node
- name: Setup env
run: |
echo "GIT_SHORT_HASH=$(git rev-parse --short HEAD)" >> "$GITHUB_ENV"
@@ -82,6 +158,7 @@ jobs:
else
echo "RELEASE_FLAVOR=${{ inputs.flavor }}" >> "$GITHUB_ENV"
fi
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
@@ -104,13 +181,43 @@ jobs:
file: .github/deployment/front/Dockerfile
tags: ghcr.io/toeverything/affine-front:${{env.RELEASE_FLAVOR}}-${{ env.GIT_SHORT_HASH }},ghcr.io/toeverything/affine-front:${{env.RELEASE_FLAVOR}}
# setup node without cache configuration
# Prisma cache is not compatible with docker build cache
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
registry-url: https://npm.pkg.github.com
scope: '@toeverything'
- name: Install Node.js dependencies
run: |
yarn config set --json supportedArchitectures.cpu '["x64", "arm64"]'
yarn workspaces focus @affine/server --production
- name: Generate Prisma client
run: yarn workspace @affine/server prisma generate
- name: Build graphql Dockerfile
uses: docker/build-push-action@v5
with:
context: .
push: true
pull: true
platforms: linux/amd64,linux/arm64
provenance: true
file: .github/deployment/node/Dockerfile
tags: ghcr.io/toeverything/affine-graphql:${{env.RELEASE_FLAVOR}}-${{ env.GIT_SHORT_HASH }},ghcr.io/toeverything/affine-graphql:${{env.RELEASE_FLAVOR}}
deploy:
name: Deploy to cluster
if: ${{ github.event_name == 'workflow_dispatch' }}
environment: ${{ github.event.inputs.flavor }}
permissions:
contents: 'write'
id-token: 'write'
needs:
- build-frontend-image
- build-server-image
- build-docker
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
@@ -135,9 +242,9 @@ jobs:
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
ENABLE_CAPTCHA: true
CAPTCHA_TURNSTILE_SECRET: ${{ secrets.CAPTCHA_TURNSTILE_SECRET }}
MAILER_SENDER: ${{ secrets.OAUTH_EMAIL_SENDER }}
MAILER_USER: ${{ secrets.OAUTH_EMAIL_LOGIN }}
MAILER_PASSWORD: ${{ secrets.OAUTH_EMAIL_PASSWORD }}
OAUTH_EMAIL_SENDER: ${{ secrets.OAUTH_EMAIL_SENDER }}
OAUTH_EMAIL_LOGIN: ${{ secrets.OAUTH_EMAIL_LOGIN }}
OAUTH_EMAIL_PASSWORD: ${{ secrets.OAUTH_EMAIL_PASSWORD }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AFFINE_GOOGLE_CLIENT_ID: ${{ secrets.AFFINE_GOOGLE_CLIENT_ID }}
AFFINE_GOOGLE_CLIENT_SECRET: ${{ secrets.AFFINE_GOOGLE_CLIENT_SECRET }}

View File

@@ -24,7 +24,7 @@ jobs:
token: ${{ secrets.HELM_RELEASER_TOKEN }}
- name: Install Helm
uses: azure/setup-helm@v4
uses: azure/setup-helm@v3
- name: Install chart releaser
run: |

View File

@@ -9,4 +9,4 @@ jobs:
add-reviews:
runs-on: ubuntu-latest
steps:
- uses: kentaro-m/auto-assign-action@v2.0.0
- uses: kentaro-m/auto-assign-action@v1.2.5

View File

@@ -1,51 +0,0 @@
name: Publish UI Storybook
env:
NODE_OPTIONS: --max-old-space-size=4096
on:
workflow_dispatch:
push:
branches:
- canary
pull_request:
branches:
- canary
paths-ignore:
- README.md
- .github/**
- packages/backend/server
- packages/frontend/electron
- '!.github/workflows/publish-storybook.yml'
jobs:
publish-ui-storybook:
name: Publish UI Storybook
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.merge_commit_sha }}
# This is required to fetch all commits for chromatic
fetch-depth: 0
- name: Setup Node.js
uses: ./.github/actions/setup-node
with:
electron-install: false
- uses: chromaui/action-next@v1
with:
workingDir: packages/frontend/component
buildScriptName: build:storybook
exitOnceUploaded: true
onlyChanged: false
diagnostics: true
env:
CHROMATIC_PROJECT_TOKEN: ${{ secrets.CHROMATIC_UI_PROJECT_TOKEN }}
NODE_OPTIONS: ${{ env.NODE_OPTIONS }}
- uses: actions/upload-artifact@v4
if: always()
with:
name: chromatic-build-artifacts-${{ github.run_id }}
path: |
chromatic-diagnostics.json
**/build-storybook.log

View File

@@ -7,11 +7,6 @@ on:
schedule:
- cron: '0 9 * * *'
permissions:
contents: write
pull-requests: write
actions: write
jobs:
dispatch-release-desktop:
runs-on: ubuntu-latest

View File

@@ -33,7 +33,6 @@ env:
DEBUG: napi:*
APP_NAME: affine
MACOSX_DEPLOYMENT_TARGET: '10.13'
MIXPANEL_TOKEN: '389c0615a69b57cca7d3fa0a4824c930'
jobs:
before-make:
@@ -61,21 +60,23 @@ jobs:
SKIP_PLUGIN_BUILD: 'true'
SKIP_NX_CACHE: 'true'
- name: Upload web artifact
- name: Upload core artifact
uses: actions/upload-artifact@v4
with:
name: web
name: core
path: packages/frontend/electron/resources/web-static
make-distribution:
strategy:
# all combinations: macos-latest x64, macos-latest arm64, ubuntu-latest x64
# For windows, we need a separate approach
matrix:
spec:
- runner: macos-14
- runner: macos-latest
platform: darwin
arch: x64
target: x86_64-apple-darwin
- runner: macos-14
- runner: macos-latest
platform: darwin
arch: arm64
target: aarch64-apple-darwin
@@ -90,10 +91,6 @@ jobs:
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
SKIP_GENERATE_ASSETS: 1
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
@@ -115,7 +112,7 @@ jobs:
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- uses: actions/download-artifact@v4
with:
name: web
name: core
path: packages/frontend/electron/resources/web-static
- name: Build Desktop Layers
@@ -135,23 +132,18 @@ jobs:
SKIP_WEB_BUILD: 1
HOIST_NODE_MODULES: 1
- name: signing DMG
if: ${{ matrix.spec.platform == 'darwin' }}
run: |
codesign --force --sign "Developer ID Application: TOEVERYTHING PTE. LTD." packages/frontend/electron/out/${{ env.BUILD_TYPE }}/make/AFFiNE.dmg
- name: Save artifacts (mac)
if: ${{ matrix.spec.platform == 'darwin' }}
run: |
mkdir -p builds
mv packages/frontend/electron/out/*/make/*.dmg ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.dmg
mv packages/frontend/electron/out/*/make/zip/darwin/${{ matrix.spec.arch }}/*.zip ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.zip
mv packages/frontend/electron/out/*/make/*.dmg ./builds/affine-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.dmg
mv packages/frontend/electron/out/*/make/zip/darwin/${{ matrix.spec.arch }}/*.zip ./builds/affine-${{ env.BUILD_TYPE }}-macos-${{ matrix.spec.arch }}.zip
- name: Save artifacts (linux)
if: ${{ matrix.spec.platform == 'linux' }}
run: |
mkdir -p builds
mv packages/frontend/electron/out/*/make/zip/linux/x64/*.zip ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-x64.zip
mv packages/frontend/electron/out/*/make/*.AppImage ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-linux-x64.appimage
mv packages/frontend/electron/out/*/make/zip/linux/x64/*.zip ./builds/affine-${{ env.BUILD_TYPE }}-linux-x64.zip
mv packages/frontend/electron/out/*/make/AppImage/x64/*.AppImage ./builds/affine-${{ env.BUILD_TYPE }}-linux-x64.AppImage
- name: Upload Artifact
uses: actions/upload-artifact@v4
@@ -161,6 +153,8 @@ jobs:
package-distribution-windows:
strategy:
# all combinations: macos-latest x64, macos-latest arm64, ubuntu-latest x64
# For windows, we need a separate approach
matrix:
spec:
- runner: windows-latest
@@ -173,10 +167,6 @@ jobs:
FILES_TO_BE_SIGNED: ${{ steps.get_files_to_be_signed.outputs.FILES_TO_BE_SIGNED }}
env:
SKIP_GENERATE_ASSETS: 1
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
steps:
- uses: actions/checkout@v4
- name: Setup Version
@@ -197,7 +187,7 @@ jobs:
nx_token: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
- uses: actions/download-artifact@v4
with:
name: web
name: core
path: packages/frontend/electron/resources/web-static
- name: Build Desktop Layers
@@ -238,6 +228,8 @@ jobs:
make-windows-installer:
needs: sign-packaged-artifacts-windows
strategy:
# all combinations: macos-latest x64, macos-latest arm64, ubuntu-latest x64
# For windows, we need a separate approach
matrix:
spec:
- runner: windows-latest
@@ -287,8 +279,10 @@ jobs:
artifact-name: installer-win32-x64
finalize-installer-windows:
needs: [sign-installer-artifacts-windows, before-make]
needs: sign-installer-artifacts-windows
strategy:
# all combinations: macos-latest x64, macos-latest arm64, ubuntu-latest x64
# For windows, we need a separate approach
matrix:
spec:
- runner: windows-latest
@@ -308,9 +302,9 @@ jobs:
- name: Save artifacts
run: |
mkdir -p builds
mv packages/frontend/electron/out/*/make/zip/win32/x64/AFFiNE*-win32-x64-*.zip ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.zip
mv packages/frontend/electron/out/*/make/squirrel.windows/x64/*.exe ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.exe
mv packages/frontend/electron/out/*/make/squirrel.windows/x64/*.msi ./builds/affine-${{ needs.before-make.outputs.RELEASE_VERSION }}-${{ env.BUILD_TYPE }}-windows-x64.msi
mv packages/frontend/electron/out/*/make/zip/win32/x64/AFFiNE*-win32-x64-*.zip ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.zip
mv packages/frontend/electron/out/*/make/squirrel.windows/x64/*.exe ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.exe
mv packages/frontend/electron/out/*/make/squirrel.windows/x64/*.msi ./builds/affine-${{ env.BUILD_TYPE }}-windows-x64.msi
- name: Upload Artifact
uses: actions/upload-artifact@v4
@@ -326,7 +320,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
name: web
name: core
path: web-static
- name: Zip web-static
run: zip -r web-static.zip web-static
@@ -360,7 +354,7 @@ jobs:
RELEASE_VERSION: ${{ needs.before-make.outputs.RELEASE_VERSION }}
- name: Create Release Draft
if: ${{ github.ref_type == 'tag' }}
uses: softprops/action-gh-release@v2
uses: softprops/action-gh-release@v1
with:
name: ${{ needs.before-make.outputs.RELEASE_VERSION }}
body: ''
@@ -371,12 +365,12 @@ jobs:
./*.zip
./*.dmg
./*.exe
./*.appimage
./*.AppImage
./*.apk
./*.yml
- name: Create Nightly Release Draft
if: ${{ github.ref_type == 'branch' }}
uses: softprops/action-gh-release@v2
uses: softprops/action-gh-release@v1
env:
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
with:
@@ -393,6 +387,6 @@ jobs:
./*.zip
./*.dmg
./*.exe
./*.appimage
./*.AppImage
./*.apk
./*.yml

View File

@@ -15,7 +15,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Publish
uses: cloudflare/wrangler-action@v3.4.1
uses: cloudflare/wrangler-action@v3.4.0
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
accountId: ${{ secrets.CF_ACCOUNT_ID }}

3
.gitignore vendored
View File

@@ -79,6 +79,3 @@ lib
affine.db
apps/web/next-routes.conf
.nx
packages/frontend/templates/edgeless
packages/frontend/core/public/static/templates

View File

@@ -1 +1,4 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
yarn lint-staged && yarn lint:ox

2
.nvmrc
View File

@@ -1 +1 @@
20
18

View File

@@ -16,7 +16,6 @@ packages/frontend/i18n/src/i18n-generated.ts
packages/frontend/graphql/src/graphql/index.ts
tests/affine-legacy/**/static
.yarnrc.yml
packages/frontend/templates/edgeless-templates.gen.ts
packages/frontend/templates/templates.gen.ts
packages/frontend/templates/onboarding

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -12,4 +12,4 @@ npmPublishAccess: public
npmPublishRegistry: "https://registry.npmjs.org"
yarnPath: .yarn/releases/yarn-4.1.1.cjs
yarnPath: .yarn/releases/yarn-4.0.2.cjs

1004
Cargo.lock generated

File diff suppressed because it is too large Load Diff

148
README.md
View File

@@ -5,89 +5,86 @@
Write, Draw and Plan All at Once
<br>
</h1>
<a href="https://affine.pro/download">
<img alt="affine logo" src="https://cdn.affine.pro/Github_hero_image1.png" style="width: 100%">
</a>
<br/>
<p align="center">
A privacy-focused, local-first, open-source, and ready-to-use alternative for Notion & Miro. <br />
One hyper-fused platform for wildly creative minds.
<p>
One hyper-fused platform for wildly creative minds. <br />
A privacy-focussed, local-first, open-source, and ready-to-use alternative for Notion & Miro.
</p>
<br/>
<br/>
<a href="https://www.producthunt.com/posts/affine-3?utm_source=badge-featured&utm_medium=badge&utm_souce=badge-affine&#0045;3" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=440671&theme=light" alt="AFFiNE - One&#0032;app&#0032;for&#0032;all&#0032;&#0045;&#0032;Where&#0032;Notion&#0032;meets&#0032;Miro | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
<br/>
<br/>
</div>
<div align="center">
<a href="https://affine.pro">Home Page</a> |
<a href="https://discord.com/invite/yz6tGVsf5p">Discord</a> |
<a href="https://app.affine.pro">Live Demo</a> |
<a href="https://affine.pro/blog/">Blog</a> |
<a href="https://docs.affine.pro/docs/">Documentation</a>
</div>
<br/>
[![AFFiNE Web](<https://img.shields.io/badge/-Try%20It%20Online%20%E2%86%92-rgb(84,56,255)?style=flat-square&logoColor=white&logo=affine>)](https://app.affine.pro)
[![AFFiNE macOS M1/M2 Chip](https://img.shields.io/badge/-macOS_M_Chip%20%E2%86%92-black?style=flat-square&logo=apple&logoColor=white)](https://affine.pro/download)
[![AFFiNE macOS x64](https://img.shields.io/badge/-macOS_x86%20%E2%86%92-black?style=flat-square&logo=apple&logoColor=white)](https://affine.pro/download)
[![AFFiNE Window x64](https://img.shields.io/badge/-Windows%20%E2%86%92-blue?style=flat-square&logo=windows&logoColor=white)](https://affine.pro/download)
[![AFFiNE Linux](https://img.shields.io/badge/-Linux%20%E2%86%92-yellow?style=flat-square&logo=linux&logoColor=white)](https://affine.pro/download)
[![Releases](https://img.shields.io/github/downloads/toeverything/AFFiNE/total)](https://github.com/toeverything/AFFiNE/releases/latest)
[![stars-icon]](https://github.com/toeverything/AFFiNE)
[![All Contributors][all-contributors-badge]](#contributors)
[![codecov]](https://codecov.io/gh/toeverything/AFFiNE)
[![Node-version-icon]](https://nodejs.org/)
[![TypeScript-version-icon]](https://www.typescriptlang.org/)
[![React-version-icon]](https://reactjs.org/)
[![blocksuite-icon]](https://github.com/toeverything/blocksuite)
[![Rust-version-icon]](https://www.rust-lang.org/)
[![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2Ftoeverything%2FAFFiNE.svg?type=shield)](https://app.fossa.com/projects/git%2Bgithub.com%2Ftoeverything%2FAFFiNE?ref=badge_shield)
[![Deploy](https://github.com/toeverything/AFFiNE/actions/workflows/deploy.yml/badge.svg)](https://github.com/toeverything/AFFiNE/actions/workflows/deploy.yml)
</div>
---
<div align="center">
<a href="http://affine.pro"><img src="https://img.shields.io/badge/-AFFiNE-06449d?style=social&logo=affine" height=25></a>
&nbsp;
<a href="https://community.affine.pro"><img src="https://img.shields.io/badge/-Community-424549?style=social&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAAXNJREFUWEftlitLRUEURtdVEVExWUx2qxgNVouoXYtNDP4Tw20WtftAsItZrHaTYBJREZ98MAc248wcZxi4CGfSeezHmm/23kyPAa/egPPTAXQK/FsFBP7ldVDRZoqcgO9I+2bHy3ZIJBfTCPCZM1tqAxwBmzUBrNQNbEx+5b0B5oEN4NCBrAMnMaiUAuPAs3HU82TLEZwBqwGbaJ4UgKQ8CFR6SoEl4LIWwCJwZQCegKkWBWLHVKSActvdzgG3DqitDf3/VQBskBDALrDnAKXUo3ueAF5KinAf2DKOmnzD7l214bdbA6hC1XHZNQa8hSBC0hwDa57xDHDvvvWB7ciOZoE79+8CWPbsBGc769eFxJdWIKcuyIdRoG3W7AAC1dJkHDIOo8B78+4rEBo8r4AkLFk6Jk3HaeDBBTgHVmIAfpJUz+cAFXVBreQCvQYW/lqEjV1NAMUMqpAaxQMHyDnjYtuS+0BxstwaqJooFqxToFPgB5FuPCEB6XK2AAAAAElFTkSuQmCC" height=25></a>
&nbsp;
<a href="https://discord.com/invite/yz6tGVsf5p"><img src="https://img.shields.io/badge/-Discord-424549?style=social&logo=discord" height=25></a>
&nbsp;
<a href="https://t.me/affineworkos"><img src="https://img.shields.io/badge/-Telegram-red?style=social&logo=telegram" height=25></a>
&nbsp;
<a href="https://twitter.com/AffineOfficial"><img src="https://img.shields.io/badge/-Twitter-red?style=social&logo=twitter" height=25></a>
&nbsp;
<a href="https://medium.com/@affineworkos"><img src="https://img.shields.io/badge/-Medium-red?style=social&logo=medium" height=25></a>
</div>
<br />
<div align="center">
<em>Docs, canvas and tables are hyper-merged with AFFiNE - just like the word affine (əˈɪn | a-fine).</em>
</div>
<br />
<div align="center">
<img src="https://github.com/toeverything/AFFiNE/assets/79301703/49a426bb-8d2b-4216-891a-fa5993642253" style="width: 100%"/>
</div>
![img_v2_37a7cc04-ab3f-4405-ae9a-f84ceb4c948g](https://user-images.githubusercontent.com/79301703/230892907-5fd5c0c5-1665-4d75-8a35-744e0afc36a5.gif)
## Join our community
Before we tell you how to get started with AFFiNE, we'd like to shamelessly plug our awesome user and developer communities across [official social platforms](https://community.affine.pro/c/start-here/)! Once youre familiar with using the software, maybe you will share your wisdom with others and even consider joining the [AFFiNE Ambassador program](https://community.affine.pro/c/start-here/affine-ambassador) to help spread AFFiNE to the world.
## Getting started & staying tuned with us.
Star us, and you will receive all release notifications from GitHub without any delay!
⚠️ Please note that AFFiNE is still under active development and is not yet ready for production use. ⚠️
<img src="https://user-images.githubusercontent.com/79301703/230891830-0110681e-8c7e-483b-b6d9-9e42b291b9ef.gif" style="width: 100%"/>
[![affine.pro](https://img.shields.io/static/v1?label=Try%20it%20Online&logo=affine&message=%E2%86%92&style=for-the-badge)](https://app.affine.pro) No installation or registration required! Head over to our website and try it out now.
## What is AFFiNE
[![community.affine.pro](https://img.shields.io/static/v1?label=Join%20the%20community&message=%E2%86%92&style=for-the-badge)](https://community.affine.pro) Our wonderful community, where you can meet and engage with the team, developers and other like-minded enthusiastic user of AFFiNE.
AFFiNE is an open-source, all-in-one workspace and an operating system for all the building blocks that assemble your knowledge base and much more -- wiki, knowledge management, presentation and digital assets. It's a better alternative to Notion and Miro.
Star us, and you will receive all releases notifications from GitHub without any delay!
![rbU3YmmsQT](https://user-images.githubusercontent.com/79301703/230891830-0110681e-8c7e-483b-b6d9-9e42b291b9ef.gif)
## Features
**A true canvas for blocks in any form. Docs and whiteboard are now fully merged.**
- **Hyper merged** — Write, draw and plan all at once. Assemble any blocks you love on any canvas you like to enjoy seamless transitions between workflows with AFFiNE.
- **Privacy focussed** — AFFiNE is built with your privacy in mind and is one of our key concerns. We want you to keep control of your data, allowing you to store it as you like, where you like while still being able to freely edit and view your data on-demand.
- **Offline-first** — With your privacy in mind we also decided to go offline-first. This means that AFFiNE can be used offline, whether you want to view or edit, with support for conflict-free merging when you are back online.
- **Clean, intuitive design** — With AFFiNE you can concentrate on editing with a clean and modern interface. Which is responsive, so it looks great on tablets too, and mobile support is coming in the future.
- **Modern Block Editor with Markdown support** — A modern block editor can help you not only for docs, but slides and tables as well. When you write in AFFiNE you can use Markdown syntax which helps create an easier editing experience, that can be experienced with just a keyboard. And this allows you to export your data cleanly into Markdown.
- **Collaboration** — Whether you want to collaborate with yourself across multiple devices, or work together with others, support for collaboration and multiplayer is out-of-the-box, which makes it easy for teams to get started with AFFiNE.
- **Choice of multiple languages** — Thanks to community contributions AFFiNE offers support for multiple languages. If you don't find your language or would like to suggest some changes we welcome your contributions.
- Many editor apps claim to be a canvas for productivity, but AFFiNE is one of the very few which allows you to put any building block on an edgeless canvas -- rich text, sticky notes, any embedded web pages, multi-view databases, linked pages, shapes and even slides. We have it all.
**Multimodal AI partner ready to kick in any work**
- Write up professional work report? Turn an outline into expressive and presentable slides? Summary an article into a well-structured mindmap? Sorting your job plan and backlog for tasks? Or... draw and code prototype apps and web pages directly all with one prompt? With you, AFFiNE AI pushes your creativity to the edge of your imagination.
**Local-first & Real-time collaborative**
- We love the idea of local-first that you always own your data on your disk, in spite of the cloud. Furthermore, AFFiNE supports real-time sync and collaborations on web and cross-platform clients.
**Self-host & Shape your own AFFiNE**
- You have the freedom to manage, self-host, fork and build your own AFFiNE. Plugin community and third-party blocks are coming soon. More tractions on [Blocksuite](block-suite.com). Check there to learn how to [self-host AFFiNE](https://docs.affine.pro/docs/self-host-affine-).
## Acknowledgement
“We shape our tools and thereafter our tools shape us”. A lot of pioneers have inspired us along the way, e.g.:
- Quip & Notion with their great concept of “everything is a block”
- Trello with their Kanban
- Airtable & Miro with their no-code programable datasheets
- Miro & Whimiscal with their edgeless visual whiteboard
- Remote & Capacities with their object-based tag system
There is a large overlap of their atomic “building blocks” between these apps. They are not open source, nor do they have a plugin system like Vscode for contributors to customize. We want to have something that contains all the features we love and also goes one step even further.
Thanks for checking us out, we appreciate your interest and sincerely hope that AFFiNE resonates with you! 🎵 Checking https://affine.pro/ for more details ions.
![img_v2_3a4ee0da-6dd7-48cb-8f19-5411f86768ag](https://user-images.githubusercontent.com/79301703/230893796-dc707955-e4e5-4a42-a3c9-18d1ea754f6f.gif)
## Contributing
@@ -104,7 +101,7 @@ For **bug reports**, **feature requests** and other **suggestions** you can also
For **translation** and **language support** you can visit our [i18n General Space](https://community.affine.pro/c/i18n-general).
Looking for **other ways to contribute** and wondering where to start? Check out the [AFFiNE Ambassador program](https://community.affine.pro/c/start-here/affine-ambassador), we work closely with passionate community members and provide them with a wide range of support and resources.
Looking for **others ways to contribute** and wondering where to start? Check out the [AFFiNE Ambassador program](https://community.affine.pro/c/start-here/affine-ambassador), we work closely with passionate community members and provide them with a wide-range of support and resources.
If you have questions, you are welcome to contact us. One of the best places to get more info and learn more is in the [AFFiNE Community](https://community.affine.pro) where you can engage with other like-minded individuals.
@@ -120,7 +117,7 @@ If you have questions, you are welcome to contact us. One of the best places to
We would also like to give thanks to open-source projects that make AFFiNE possible:
- [Blocksuite](https://github.com/toeverything/BlockSuite) - 💠 BlockSuite is the open-source collaborative editor project behind AFFiNE.
- [blocksuite](https://github.com/toeverything/BlockSuite) - 💠 BlockSuite is the open-source collaborative editor project behind AFFiNE.
- [OctoBase](https://github.com/toeverything/OctoBase) - 🐙 OctoBase is the open-source database behind AFFiNE, local-first, yet collaborative. A light-weight, scalable, data engine written in Rust.
- [yjs](https://github.com/yjs/yjs) - Fundamental support of CRDTs for our implementation on state management and data sync.
- [electron](https://github.com/electron/electron) - Build cross-platform desktop apps with JavaScript, HTML, and CSS.
@@ -141,17 +138,44 @@ We would like to express our gratitude to all the individuals who have already c
<img alt="contributors" src="https://opencollective.com/affine/contributors.svg?width=890&button=false" />
</a>
## Data Compatibility
Data compatibility is a very important issue for us. We will try our best to ensure that the data is compatible with the previous version.
If you encounter any problems when upgrading the version, please feel free to [contact us](mailto:developer@toeverything.info).
| AFFiNE Version | Export/Import workspace | Data auto migration |
| --------------- | ----------------------- | ------------------- |
| <= 0.5.4 | ❌️ | ❌ |
| 0.6.x | ✅️ | ✅ |
| 0.7.x | ✅️ | ✅ |
| 0.8.x (current) | ✅ | ✅ |
| 0.9.x (next) | 🚧 | 🚧 |
- ❌️: Not compatible
- ✅: Compatible
- 🚧: Work in progress
## Self-Host
Begin with Docker to deploy your own feature-rich, unrestricted version of AFFiNE. Our team is diligently updating to the latest version. For more information on how to self-host AFFiNE, please refer to our [documentation](https://docs.affine.pro/docs/self-host-affine-).
> We know that the self-host version has been out of date for a long time.
>
> We are working hard to get this updated to the latest version, you can try our desktop version first.
Get started with Docker and deploy your own feature-rich, restriction-free deployment of AFFiNE.
We are working hard to get this updated to the latest version, you can keep an eye on the [latest packages].
## Hiring
Some amazing companies, including AFFiNE, are looking for developers! Are you interested in joining AFFiNE or its partners? Check out our Discord channel for some of the latest jobs available.
Some amazing companies including AFFiNE are looking for developers! Are you interested in helping build with AFFiNE and/or its partners? Check out some of the latest [jobs available].
## Upgrading
For upgrading information, please see our [update page].
## Feature Request
For feature requests, please see [community.affine.pro](https://community.affine.pro/c/feature-requests/).
For feature request, please see [community.affine.pro](https://community.affine.pro/c/feature-requests/).
## Building
@@ -179,6 +203,8 @@ Thanks to [Chromatic](https://www.chromatic.com/) for providing the visual testi
See [LICENSE] for details.
[![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2Ftoeverything%2FAFFiNE.svg?type=large)](https://app.fossa.com/projects/git%2Bgithub.com%2Ftoeverything%2FAFFiNE?ref=badge_large)
[all-contributors-badge]: https://img.shields.io/github/contributors/toeverything/AFFiNE
[license]: ./LICENSE
[building.md]: ./docs/BUILDING.md
@@ -186,7 +212,7 @@ See [LICENSE] for details.
[jobs available]: ./docs/jobs.md
[latest packages]: https://github.com/toeverything/AFFiNE/pkgs/container/affine-self-hosted
[contributor license agreement]: https://github.com/toeverything/affine/edit/canary/.github/CLA.md
[rust-version-icon]: https://img.shields.io/badge/Rust-1.77.0-dea584
[rust-version-icon]: https://img.shields.io/badge/Rust-1.75.0-dea584
[stars-icon]: https://img.shields.io/github/stars/toeverything/AFFiNE.svg?style=flat&logo=github&colorB=red&label=stars
[codecov]: https://codecov.io/gh/toeverything/affine/branch/canary/graphs/badge.svg?branch=canary
[node-version-icon]: https://img.shields.io/badge/node-%3E=18.16.1-success

View File

@@ -1,29 +0,0 @@
# Security Policy
## Supported Versions
We recommend users to always use the latest major version. Security updates will be provided for the current major version until the next major version is released.
| Version | Supported |
| --------------- | ------------------ |
| 0.13.x (stable) | :white_check_mark: |
| < 0.13.x | :x: |
## Reporting a Vulnerability
We welcome you to provide us with bug reports via and email at [security@toeverything.info](mailto:security@toeverything.info). We expect your report to contain at least the following for us to evaluate and reproduce:
1. Using platform and version, for example:
- macos arm64 0.12.0-canary-202402220729-0868ac6
- app.affine.pro 0.12.0-canary-202402220729-0868ac6
2. A sets of video or screenshot containing the reproduce steps that proves you successfully exploited the vulnerability, preferably including the time and software version of the successful exploit.
3. Your classification or analysis of the vulnerability (optional)
Since we are an open source project, we also welcome you to provide corresponding fix PRs.
We will provide bounties for vulnerabilities involving user information leakage, permission leakage, and unauthorized code execution. For other types of vulnerabilities, we will determine specific rewards based on the evaluation results.
If the vulnerability is caused by a library we depend on, we encourage you to submit a security report to the corresponding dependent library at the same time to benefit more users.

View File

@@ -29,7 +29,7 @@ It includes the global constants, browser and system check.
This package should be imported at the very beginning of the entry point.
### `@affine/workspace-impl`
### `@affine/workspace`
Current we have two workspace plugin:

View File

@@ -49,24 +49,28 @@ postgres=# \du
### Set the following config to `packages/backend/server/.env`
In the following setup, we assume you have postgres server running at localhost:5432 and mailhog running at localhost:1025.
When logging in via email, you will see the mail arriving at localhost:8025 in a browser.
```
DATABASE_URL="postgresql://affine:affine@localhost:5432/affine"
MAILER_SENDER="noreply@toeverything.info"
MAILER_USER="auth"
MAILER_PASSWORD="auth"
MAILER_HOST="localhost"
MAILER_PORT="1025"
NEXTAUTH_URL="http://localhost:8080/"
```
You may need additional env for auth login. You may want to put your own one if you are not part of the AFFiNE team
For email login & password, please refer to https://nodemailer.com/usage/using-gmail/
```
OAUTH_EMAIL_SENDER=
OAUTH_EMAIL_LOGIN=
OAUTH_EMAIL_PASSWORD=
OAUTH_GOOGLE_ENABLED="true"
OAUTH_GOOGLE_CLIENT_ID=
OAUTH_GOOGLE_CLIENT_SECRET=
```
## Prepare prisma
```
yarn workspace @affine/server prisma db push
yarn workspace @affine/server data-migration run
```
Note, you may need to do it again if db schema changed.

View File

@@ -7,9 +7,9 @@
"dev": "nodemon --exec 'typedoc --options ../../typedoc.json' & serve dist/"
},
"devDependencies": {
"nodemon": "^3.1.0",
"nodemon": "^3.0.1",
"serve": "^14.2.1",
"typedoc": "^0.25.8"
"typedoc": "^0.25.4"
},
"nodemonConfig": {
"watch": [
@@ -19,5 +19,5 @@
],
"ext": "ts,md,json"
},
"version": "0.14.0"
"version": "0.10.3-canary.2"
}

View File

@@ -1,13 +1,12 @@
{
"$schema": "./node_modules/nx/schemas/nx-schema.json",
"npmScope": "toeverything",
"nxCloudAccessToken": "MzUwNTU4YWItZGFhYi00YjE2LWIxODAtODk4NmIwYjMwYzZkfHJlYWQ=",
"tasksRunnerOptions": {
"default": {
"runner": "nx-cloud",
"options": {
"cacheableOperations": ["build", "test", "e2e", "lint"],
"runtimeCacheInputs": ["node -v"]
"accessToken": "YmQ2NTg1ODktZTk5Mi00YzhiLTk2ZmUtNWQzMDg0NDBkOWM3fHJlYWQtb25seQ=="
}
}
},

View File

@@ -1,6 +1,6 @@
{
"name": "@affine/monorepo",
"version": "0.14.0",
"version": "0.11.0",
"private": true,
"author": "toeverything",
"license": "MIT",
@@ -14,37 +14,36 @@
"tests/affine-legacy/*"
],
"engines": {
"node": "<21.0.0"
"node": ">=18.16.1 <19.0.0"
},
"scripts": {
"dev": "yarn workspace @affine/cli dev",
"dev": "dev-core",
"dev:electron": "yarn workspace @affine/electron dev",
"build": "yarn nx build @affine/web",
"build": "yarn nx build @affine/core",
"build:electron": "yarn nx build @affine/electron",
"build:storage": "yarn nx run-many -t build -p @affine/storage",
"build:storybook": "yarn nx build @affine/storybook",
"start:web-static": "yarn workspace @affine/web static-server",
"start:web-static": "yarn workspace @affine/core static-server",
"start:storybook": "yarn exec serve tests/storybook/storybook-static -l 6006",
"serve:test-static": "yarn exec serve tests/fixtures --cors -p 8081",
"lint:eslint": "cross-env NODE_OPTIONS=\"--max-old-space-size=8192\" eslint . --ext .js,mjs,.ts,.tsx --cache",
"lint:eslint": "eslint . --ext .js,mjs,.ts,.tsx --cache",
"lint:eslint:fix": "yarn lint:eslint --fix",
"lint:prettier": "prettier --ignore-unknown --cache --check .",
"lint:prettier:fix": "prettier --ignore-unknown --cache --write .",
"lint:ox": "oxlint --import-plugin --deny-warnings -D correctness -D nursery -D prefer-array-some -D no-useless-promise-resolve-reject -D perf -A no-undef -A consistent-type-exports -A default -A named -A ban-ts-comment -A export -A no-unresolved -A no-default-export -A no-duplicates -A no-side-effects-in-initialization -A no-named-as-default -A getter-return",
"lint:ox": "oxlint --import-plugin --deny-warnings -D correctness -D nursery -D prefer-array-some -D no-useless-promise-resolve-reject -D perf -A no-undef -A consistent-type-exports -A default -A named -A ban-ts-comment -A export",
"lint": "yarn lint:eslint && yarn lint:prettier",
"lint:fix": "yarn lint:eslint:fix && yarn lint:prettier:fix",
"test": "vitest --run",
"test:ui": "vitest --ui",
"test:coverage": "vitest run --coverage",
"typecheck": "tsc -b tsconfig.json",
"postinstall": "node ./scripts/check-version.mjs && yarn i18n-codegen gen && yarn husky install",
"prepare": "husky"
"typecheck": "tsc -b tsconfig.json --diagnostics",
"postinstall": "node ./scripts/check-version.mjs && yarn i18n-codegen gen && yarn husky install"
},
"lint-staged": {
"*": "prettier --write --ignore-unknown --cache",
"*.{ts,tsx,mjs,js,jsx}": [
"prettier --ignore-unknown --write",
"cross-env NODE_OPTIONS=\"--max-old-space-size=8192\" eslint --cache --fix"
"eslint --cache --fix"
],
"*.toml": [
"taplo format"
@@ -56,62 +55,63 @@
"devDependencies": {
"@affine-test/kit": "workspace:*",
"@affine/cli": "workspace:*",
"@commitlint/cli": "^19.0.0",
"@commitlint/config-conventional": "^19.0.0",
"@faker-js/faker": "^8.4.1",
"@commitlint/cli": "^18.4.3",
"@commitlint/config-conventional": "^18.4.3",
"@faker-js/faker": "^8.3.1",
"@istanbuljs/schema": "^0.1.3",
"@magic-works/i18n-codegen": "^0.5.0",
"@nx/vite": "18.1.2",
"@playwright/test": "^1.41.2",
"@taplo/cli": "^0.7.0",
"@testing-library/react": "^14.2.1",
"@nx/vite": "17.2.8",
"@perfsee/sdk": "^1.9.0",
"@playwright/test": "^1.40.0",
"@taplo/cli": "^0.5.2",
"@testing-library/react": "^14.1.2",
"@toeverything/infra": "workspace:*",
"@types/affine__env": "workspace:*",
"@types/eslint": "^8.56.3",
"@types/node": "^20.11.20",
"@typescript-eslint/eslint-plugin": "^7.0.2",
"@typescript-eslint/parser": "^7.0.2",
"@vanilla-extract/vite-plugin": "^4.0.4",
"@vanilla-extract/webpack-plugin": "^2.3.6",
"@vitejs/plugin-react-swc": "^3.6.0",
"@vitest/coverage-istanbul": "1.4.0",
"@vitest/ui": "1.4.0",
"cross-env": "^7.0.3",
"electron": "^29.0.1",
"eslint": "^8.56.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-import-x": "^0.4.1",
"@types/eslint": "^8.44.7",
"@types/node": "^20.9.3",
"@typescript-eslint/eslint-plugin": "^6.13.1",
"@typescript-eslint/parser": "^6.13.1",
"@vanilla-extract/vite-plugin": "^3.9.2",
"@vanilla-extract/webpack-plugin": "^2.3.1",
"@vitejs/plugin-react-swc": "^3.5.0",
"@vitest/coverage-istanbul": "1.1.3",
"@vitest/ui": "1.1.3",
"electron": "^27.1.0",
"eslint": "^8.54.0",
"eslint-config-prettier": "^9.0.0",
"eslint-plugin-i": "^2.29.0",
"eslint-plugin-react": "^7.33.2",
"eslint-plugin-react-hooks": "^4.6.0",
"eslint-plugin-rxjs": "^5.0.3",
"eslint-plugin-simple-import-sort": "^12.0.0",
"eslint-plugin-sonarjs": "^0.24.0",
"eslint-plugin-unicorn": "^51.0.1",
"eslint-plugin-unused-imports": "^3.1.0",
"eslint-plugin-vue": "^9.22.0",
"eslint-plugin-simple-import-sort": "^10.0.0",
"eslint-plugin-sonarjs": "^0.23.0",
"eslint-plugin-unicorn": "^50.0.0",
"eslint-plugin-unused-imports": "^3.0.0",
"eslint-plugin-vue": "^9.18.1",
"fake-indexeddb": "5.0.2",
"happy-dom": "^14.0.0",
"husky": "^9.0.11",
"lint-staged": "^15.2.2",
"msw": "^2.2.1",
"nanoid": "^5.0.6",
"nx": "^18.0.4",
"happy-dom": "^12.10.3",
"husky": "^8.0.3",
"lint-staged": "^15.1.0",
"msw": "^2.0.8",
"nanoid": "^5.0.3",
"nx": "^17.1.3",
"nx-cloud": "^16.5.2",
"nyc": "^15.1.0",
"oxlint": "0.2.14",
"prettier": "^3.2.5",
"semver": "^7.6.0",
"oxlint": "0.0.22",
"prettier": "^3.1.0",
"semver": "^7.5.4",
"serve": "^14.2.1",
"string-width": "^7.1.0",
"ts-node": "^10.9.2",
"typescript": "^5.3.3",
"vite": "^5.1.4",
"vite-plugin-istanbul": "^6.0.0",
"vite-plugin-static-copy": "^1.0.1",
"vitest": "1.4.0",
"string-width": "^7.0.0",
"ts-node": "^10.9.1",
"typescript": "^5.3.2",
"vite": "^5.0.6",
"vite-plugin-istanbul": "^5.0.0",
"vite-plugin-static-copy": "^1.0.0",
"vite-tsconfig-paths": "^4.2.1",
"vitest": "1.1.3",
"vitest-fetch-mock": "^0.2.2",
"vitest-mock-extended": "^1.3.1"
},
"packageManager": "yarn@4.1.1",
"packageManager": "yarn@4.0.2",
"resolutions": {
"vite": "^5.0.6",
"array-buffer-byte-length": "npm:@nolyfill/array-buffer-byte-length@latest",
@@ -169,8 +169,9 @@
"unbox-primitive": "npm:@nolyfill/unbox-primitive@latest",
"which-boxed-primitive": "npm:@nolyfill/which-boxed-primitive@latest",
"which-typed-array": "npm:@nolyfill/which-typed-array@latest",
"@reforged/maker-appimage/@electron-forge/maker-base": "7.3.0",
"macos-alias": "npm:@napi-rs/macos-alias@0.0.4",
"next-auth@^4.24.5": "patch:next-auth@npm%3A4.24.5#~/.yarn/patches/next-auth-npm-4.24.5-8428e11927.patch",
"@reforged/maker-appimage/@electron-forge/maker-base": "7.2.0",
"macos-alias": "npm:macos-alias-building@latest",
"fs-xattr": "npm:@napi-rs/xattr@latest",
"@radix-ui/react-dialog": "npm:@radix-ui/react-dialog@latest"
}

View File

@@ -1,4 +1,8 @@
# AFFINE_SERVER_PORT=3010
# AFFINE_SERVER_HOST=app.affine.pro
# AFFINE_SERVER_HTTPS=true
# DATABASE_URL="postgres://affine@localhost:5432/affine"
DATABASE_URL="postgresql://affine@localhost:5432/affine"
NEXTAUTH_URL="http://localhost:8080"
OAUTH_EMAIL_SENDER="noreply@toeverything.info"
OAUTH_EMAIL_LOGIN=""
OAUTH_EMAIL_PASSWORD=""
ENABLE_LOCAL_EMAIL="true"
STRIPE_API_KEY=
STRIPE_WEBHOOK_KEY=

View File

@@ -1,70 +0,0 @@
-- DropForeignKey
ALTER TABLE "accounts" DROP CONSTRAINT "accounts_user_id_fkey";
-- DropForeignKey
ALTER TABLE "sessions" DROP CONSTRAINT "sessions_user_id_fkey";
-- CreateTable
CREATE TABLE "user_connected_accounts" (
"id" VARCHAR(36) NOT NULL,
"user_id" VARCHAR(36) NOT NULL,
"provider" VARCHAR NOT NULL,
"provider_account_id" VARCHAR NOT NULL,
"scope" TEXT,
"access_token" TEXT,
"refresh_token" TEXT,
"expires_at" TIMESTAMPTZ(6),
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMPTZ(6) NOT NULL,
CONSTRAINT "user_connected_accounts_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "multiple_users_sessions" (
"id" VARCHAR(36) NOT NULL,
"expires_at" TIMESTAMPTZ(6),
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "multiple_users_sessions_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "user_sessions" (
"id" VARCHAR(36) NOT NULL,
"session_id" VARCHAR(36) NOT NULL,
"user_id" VARCHAR(36) NOT NULL,
"expires_at" TIMESTAMPTZ(6),
"created_at" TIMESTAMPTZ(6) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "user_sessions_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "verification_tokens" (
"token" VARCHAR(36) NOT NULL,
"type" SMALLINT NOT NULL,
"credential" TEXT,
"expiresAt" TIMESTAMPTZ(6) NOT NULL
);
-- CreateIndex
CREATE INDEX "user_connected_accounts_user_id_idx" ON "user_connected_accounts"("user_id");
-- CreateIndex
CREATE INDEX "user_connected_accounts_provider_account_id_idx" ON "user_connected_accounts"("provider_account_id");
-- CreateIndex
CREATE UNIQUE INDEX "user_sessions_session_id_user_id_key" ON "user_sessions"("session_id", "user_id");
-- CreateIndex
CREATE UNIQUE INDEX "verification_tokens_type_token_key" ON "verification_tokens"("type", "token");
-- AddForeignKey
ALTER TABLE "user_connected_accounts" ADD CONSTRAINT "user_connected_accounts_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "user_sessions" ADD CONSTRAINT "user_sessions_session_id_fkey" FOREIGN KEY ("session_id") REFERENCES "multiple_users_sessions"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "user_sessions" ADD CONSTRAINT "user_sessions_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -1,2 +0,0 @@
-- AlterTable
ALTER TABLE "users" ADD COLUMN "registered" BOOLEAN NOT NULL DEFAULT true;

View File

@@ -1,11 +0,0 @@
/*
Warnings:
- A unique constraint covering the columns `[user_id,plan]` on the table `user_subscriptions` will be added. If there are existing duplicate values, this will fail.
*/
-- DropIndex
DROP INDEX "user_subscriptions_user_id_key";
-- CreateIndex
CREATE UNIQUE INDEX "user_subscriptions_user_id_plan_key" ON "user_subscriptions"("user_id", "plan");

View File

@@ -1,7 +1,7 @@
{
"name": "@affine/server",
"private": true,
"version": "0.14.0",
"version": "0.11.0",
"description": "Affine Node.js server",
"type": "module",
"bin": {
@@ -9,116 +9,109 @@
},
"scripts": {
"build": "tsc",
"start": "node --loader ts-node/esm/transpile-only.mjs ./src/index.ts",
"start": "node --loader ts-node/esm/transpile-only.mjs --es-module-specifier-resolution node ./src/index.ts",
"dev": "nodemon ./src/index.ts",
"test": "ava --concurrency 1 --serial",
"test:coverage": "c8 ava --concurrency 1 --serial",
"postinstall": "prisma generate",
"data-migration": "node --loader ts-node/esm/transpile-only.mjs ./src/data/index.ts",
"predeploy": "yarn prisma migrate deploy && node --import ./scripts/register.js ./dist/data/index.js run"
"data-migration": "node --loader ts-node/esm/transpile-only.mjs --es-module-specifier-resolution node ./src/data/app.ts",
"predeploy": "yarn prisma migrate deploy && node --es-module-specifier-resolution node ./dist/data/app.js run"
},
"dependencies": {
"@apollo/server": "^4.10.0",
"@auth/prisma-adapter": "^1.4.0",
"@aws-sdk/client-s3": "^3.536.0",
"@apollo/server": "^4.9.5",
"@auth/prisma-adapter": "^1.0.7",
"@aws-sdk/client-s3": "^3.454.0",
"@google-cloud/opentelemetry-cloud-monitoring-exporter": "^0.17.0",
"@google-cloud/opentelemetry-cloud-trace-exporter": "^2.1.0",
"@google-cloud/opentelemetry-resource-util": "^2.1.0",
"@keyv/redis": "^2.8.4",
"@nestjs/apollo": "^12.1.0",
"@nestjs/common": "^10.3.3",
"@nestjs/core": "^10.3.3",
"@nestjs/event-emitter": "^2.0.4",
"@nestjs/graphql": "^12.1.1",
"@nestjs/platform-express": "^10.3.3",
"@nestjs/platform-socket.io": "^10.3.3",
"@nestjs/schedule": "^4.0.1",
"@nestjs/serve-static": "^4.0.1",
"@keyv/redis": "^2.8.0",
"@nestjs/apollo": "^12.0.11",
"@nestjs/common": "^10.2.10",
"@nestjs/core": "^10.2.10",
"@nestjs/event-emitter": "^2.0.3",
"@nestjs/graphql": "^12.0.11",
"@nestjs/platform-express": "^10.2.10",
"@nestjs/platform-socket.io": "^10.2.10",
"@nestjs/schedule": "^4.0.0",
"@nestjs/throttler": "^5.0.1",
"@nestjs/websockets": "^10.3.3",
"@node-rs/argon2": "^1.7.2",
"@node-rs/crc32": "^1.9.2",
"@node-rs/jsonwebtoken": "^0.5.0",
"@nestjs/websockets": "^10.2.10",
"@node-rs/argon2": "^1.5.2",
"@node-rs/crc32": "^1.7.2",
"@node-rs/jsonwebtoken": "^0.3.0",
"@opentelemetry/api": "^1.7.0",
"@opentelemetry/core": "^1.21.0",
"@opentelemetry/exporter-prometheus": "^0.49.0",
"@opentelemetry/exporter-zipkin": "^1.21.0",
"@opentelemetry/host-metrics": "^0.35.0",
"@opentelemetry/instrumentation": "^0.49.0",
"@opentelemetry/instrumentation-graphql": "^0.38.0",
"@opentelemetry/instrumentation-http": "^0.49.0",
"@opentelemetry/instrumentation-ioredis": "^0.38.0",
"@opentelemetry/instrumentation-nestjs-core": "^0.35.0",
"@opentelemetry/instrumentation-socket.io": "^0.37.0",
"@opentelemetry/resources": "^1.21.0",
"@opentelemetry/sdk-metrics": "^1.21.0",
"@opentelemetry/sdk-node": "^0.49.0",
"@opentelemetry/sdk-trace-node": "^1.21.0",
"@opentelemetry/semantic-conventions": "^1.21.0",
"@prisma/client": "^5.10.2",
"@prisma/instrumentation": "^5.10.2",
"@opentelemetry/core": "^1.19.0",
"@opentelemetry/exporter-prometheus": "^0.46.0",
"@opentelemetry/exporter-zipkin": "^1.19.0",
"@opentelemetry/host-metrics": "^0.34.0",
"@opentelemetry/instrumentation": "^0.46.0",
"@opentelemetry/instrumentation-graphql": "^0.36.0",
"@opentelemetry/instrumentation-http": "^0.46.0",
"@opentelemetry/instrumentation-ioredis": "^0.36.0",
"@opentelemetry/instrumentation-nestjs-core": "^0.33.3",
"@opentelemetry/instrumentation-socket.io": "^0.35.0",
"@opentelemetry/resources": "^1.19.0",
"@opentelemetry/sdk-metrics": "^1.19.0",
"@opentelemetry/sdk-node": "^0.46.0",
"@opentelemetry/sdk-trace-node": "^1.19.0",
"@prisma/client": "^5.7.1",
"@prisma/instrumentation": "^5.7.1",
"@socket.io/redis-adapter": "^8.2.1",
"cookie-parser": "^1.4.6",
"dotenv": "^16.4.5",
"dotenv-cli": "^7.3.0",
"dotenv": "^16.3.1",
"express": "^4.18.2",
"file-type": "^19.0.0",
"get-stream": "^9.0.0",
"get-stream": "^8.0.1",
"graphql": "^16.8.1",
"graphql-scalars": "^1.22.4",
"graphql-type-json": "^0.3.2",
"graphql-upload": "^16.0.2",
"ioredis": "^5.3.2",
"keyv": "^4.5.4",
"lodash-es": "^4.17.21",
"mixpanel": "^0.18.0",
"nanoid": "^5.0.6",
"nest-commander": "^3.12.5",
"nanoid": "^5.0.3",
"nest-commander": "^3.12.2",
"nestjs-throttler-storage-redis": "^0.4.1",
"nodemailer": "^6.9.10",
"next-auth": "^4.24.5",
"nodemailer": "^6.9.7",
"on-headers": "^1.0.2",
"parse-duration": "^1.1.0",
"pretty-time": "^1.1.0",
"prisma": "^5.10.2",
"prom-client": "^15.1.0",
"reflect-metadata": "^0.2.1",
"prisma": "^5.7.1",
"prom-client": "^15.0.0",
"reflect-metadata": "^0.2.0",
"rxjs": "^7.8.1",
"semver": "^7.6.0",
"socket.io": "^4.7.4",
"stripe": "^14.18.0",
"ts-node": "^10.9.2",
"typescript": "^5.3.3",
"ws": "^8.16.0",
"yjs": "^13.6.12",
"semver": "^7.5.4",
"socket.io": "^4.7.2",
"stripe": "^14.5.0",
"ws": "^8.14.2",
"yjs": "^13.6.10",
"zod": "^3.22.4"
},
"devDependencies": {
"@affine-test/kit": "workspace:*",
"@affine/storage": "workspace:*",
"@napi-rs/image": "^1.9.1",
"@nestjs/testing": "^10.3.3",
"@napi-rs/image": "^1.7.0",
"@nestjs/testing": "^10.2.10",
"@types/cookie-parser": "^1.4.6",
"@types/engine.io": "^3.1.10",
"@types/express": "^4.17.21",
"@types/graphql-upload": "^16.0.7",
"@types/graphql-upload": "^16.0.5",
"@types/keyv": "^4.2.0",
"@types/lodash-es": "^4.17.12",
"@types/mixpanel": "^2.14.8",
"@types/node": "^20.11.20",
"@types/lodash-es": "^4.17.11",
"@types/node": "^20.9.3",
"@types/nodemailer": "^6.4.14",
"@types/on-headers": "^1.0.3",
"@types/pretty-time": "^1.1.5",
"@types/sinon": "^17.0.3",
"@types/supertest": "^6.0.2",
"@types/sinon": "^17.0.2",
"@types/supertest": "^6.0.0",
"@types/ws": "^8.5.10",
"ava": "^6.1.1",
"c8": "^9.1.0",
"nodemon": "^3.1.0",
"ava": "^6.0.0",
"c8": "^9.0.0",
"nodemon": "^3.0.1",
"sinon": "^17.0.1",
"supertest": "^6.3.4"
"supertest": "^6.3.3",
"ts-node": "^10.9.1",
"typescript": "^5.3.2"
},
"ava": {
"timeout": "1m",
"extensions": {
"ts": "module"
},
@@ -127,7 +120,8 @@
"--trace-sigint",
"--loader",
"ts-node/esm/transpile-only.mjs",
"--es-module-specifier-resolution=node"
"--es-module-specifier-resolution",
"node"
],
"files": [
"tests/**/*.spec.ts",
@@ -139,13 +133,11 @@
"environmentVariables": {
"TS_NODE_PROJECT": "./tests/tsconfig.json",
"NODE_ENV": "test",
"MAILER_HOST": "0.0.0.0",
"MAILER_PORT": "1025",
"MAILER_USER": "noreply@toeverything.info",
"MAILER_PASSWORD": "affine",
"MAILER_SENDER": "noreply@toeverything.info",
"FEATURES_EARLY_ACCESS_PREVIEW": "false",
"DEPLOYMENT_TYPE": "affine"
"ENABLE_LOCAL_EMAIL": "true",
"OAUTH_EMAIL_LOGIN": "noreply@toeverything.info",
"OAUTH_EMAIL_PASSWORD": "affine",
"OAUTH_EMAIL_SENDER": "noreply@toeverything.info",
"FEATURES_EARLY_ACCESS_PREVIEW": "false"
}
},
"nodemonConfig": {
@@ -154,7 +146,8 @@
"nodeArgs": [
"--loader",
"ts-node/esm.mjs",
"--es-module-specifier-resolution=node"
"--es-module-specifier-resolution",
"node"
],
"ignore": [
"**/__tests__/**",
@@ -163,6 +156,7 @@
"env": {
"TS_NODE_TRANSPILE_ONLY": true,
"TS_NODE_PROJECT": "./tsconfig.json",
"NODE_ENV": "development",
"DEBUG": "affine:*",
"FORCE_COLOR": true,
"DEBUG_COLORS": true

View File

@@ -10,83 +10,28 @@ datasource db {
}
model User {
id String @id @default(uuid()) @db.VarChar
name String
email String @unique
emailVerifiedAt DateTime? @map("email_verified")
avatarUrl String? @map("avatar_url") @db.VarChar
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
id String @id @default(uuid()) @db.VarChar
name String
email String @unique
emailVerified DateTime? @map("email_verified")
// image field is for the next-auth
avatarUrl String? @map("avatar_url") @db.VarChar
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
/// Not available if user signed up through OAuth providers
password String? @db.VarChar
/// Indicate whether the user finished the signup progress.
/// for example, the value will be false if user never registered and invited into a workspace by others.
registered Boolean @default(true)
password String? @db.VarChar
accounts Account[]
sessions Session[]
features UserFeatures[]
customer UserStripeCustomer?
subscriptions UserSubscription[]
subscription UserSubscription?
invoices UserInvoice[]
workspacePermissions WorkspaceUserPermission[]
pagePermissions WorkspacePageUserPermission[]
connectedAccounts ConnectedAccount[]
sessions UserSession[]
@@map("users")
}
model ConnectedAccount {
id String @id @default(uuid()) @db.VarChar(36)
userId String @map("user_id") @db.VarChar(36)
provider String @db.VarChar
providerAccountId String @map("provider_account_id") @db.VarChar
scope String? @db.Text
accessToken String? @map("access_token") @db.Text
refreshToken String? @map("refresh_token") @db.Text
expiresAt DateTime? @map("expires_at") @db.Timestamptz(6)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@index([userId])
@@index([providerAccountId])
@@map("user_connected_accounts")
}
model Session {
id String @id @default(uuid()) @db.VarChar(36)
expiresAt DateTime? @map("expires_at") @db.Timestamptz(6)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
userSessions UserSession[]
@@map("multiple_users_sessions")
}
model UserSession {
id String @id @default(uuid()) @db.VarChar(36)
sessionId String @map("session_id") @db.VarChar(36)
userId String @map("user_id") @db.VarChar(36)
expiresAt DateTime? @map("expires_at") @db.Timestamptz(6)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
session Session @relation(fields: [sessionId], references: [id], onDelete: Cascade)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@unique([sessionId, userId])
@@map("user_sessions")
}
model VerificationToken {
token String @db.VarChar(36)
type Int @db.SmallInt
credential String? @db.Text
expiresAt DateTime @db.Timestamptz(6)
@@unique([type, token])
@@map("verification_tokens")
}
model Workspace {
id String @id @default(uuid()) @db.VarChar
public Boolean
@@ -241,7 +186,7 @@ model Features {
@@map("features")
}
model DeprecatedNextAuthAccount {
model Account {
id String @id @default(cuid())
userId String @map("user_id")
type String
@@ -255,20 +200,23 @@ model DeprecatedNextAuthAccount {
id_token String? @db.Text
session_state String?
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@unique([provider, providerAccountId])
@@map("accounts")
}
model DeprecatedNextAuthSession {
model Session {
id String @id @default(cuid())
sessionToken String @unique @map("session_token")
userId String @map("user_id")
expires DateTime
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@map("sessions")
}
model DeprecatedNextAuthVerificationToken {
model VerificationToken {
identifier String
token String @unique
expires DateTime
@@ -317,9 +265,7 @@ model Snapshot {
seq Int @default(0) @db.Integer
state Bytes? @db.ByteA
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(6)
// the `updated_at` field will not record the time of record changed,
// but the created time of last seen update that has been merged into snapshot.
updatedAt DateTime @map("updated_at") @db.Timestamptz(6)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
@@id([id, workspaceId])
@@map("snapshots")
@@ -369,7 +315,7 @@ model UserStripeCustomer {
model UserSubscription {
id Int @id @default(autoincrement()) @db.Integer
userId String @map("user_id") @db.VarChar(36)
userId String @unique @map("user_id") @db.VarChar(36)
plan String @db.VarChar(20)
// yearly/monthly
recurring String @db.VarChar(20)
@@ -395,7 +341,6 @@ model UserSubscription {
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(6)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@unique([userId, plan])
@@map("user_subscriptions")
}

View File

@@ -0,0 +1,37 @@
import userA from '@affine-test/fixtures/userA.json' assert { type: 'json' };
import { hash } from '@node-rs/argon2';
import { PrismaClient } from '@prisma/client';
const prisma = new PrismaClient();
async function main() {
await prisma.user.create({
data: {
...userA,
password: await hash(userA.password),
features: {
create: {
reason: 'created by api sign up',
activated: true,
feature: {
connect: {
feature_version: {
feature: 'free_plan_v1',
version: 1,
},
},
},
},
},
},
});
}
main()
.then(async () => {
await prisma.$disconnect();
})
.catch(async e => {
console.error(e);
await prisma.$disconnect();
process.exit(1);
});

View File

@@ -1,11 +0,0 @@
import { create, createEsmHooks } from 'ts-node';
const service = create({
experimentalSpecifierResolution: 'node',
transpileOnly: true,
logError: true,
skipProject: true,
});
const hooks = createEsmHooks(service);
export const resolve = hooks.resolve;

View File

@@ -1,4 +0,0 @@
import { register } from 'node:module';
import { pathToFileURL } from 'node:url';
register('./scripts/loader.js', pathToFileURL('./'));

View File

@@ -1,75 +0,0 @@
import { execSync } from 'node:child_process';
import { generateKeyPairSync } from 'node:crypto';
import fs from 'node:fs';
import path from 'node:path';
import { parse } from 'dotenv';
const SELF_HOST_CONFIG_DIR = '/root/.affine/config';
/**
* @type {Array<{ from: string; to?: string, modifier?: (content: string): string }>}
*/
const configFiles = [
{ from: './.env.example', to: '.env' },
{ from: './dist/config/affine.js', modifier: configCleaner },
{ from: './dist/config/affine.env.js', modifier: configCleaner },
];
function configCleaner(content) {
return content.replace(
/(^\/\/#.*$)|(^\/\/\s+TODO.*$)|("use\sstrict";?)|(^.*eslint-disable.*$)/gm,
''
);
}
function prepare() {
fs.mkdirSync(SELF_HOST_CONFIG_DIR, { recursive: true });
for (const { from, to, modifier } of configFiles) {
const targetFileName = to ?? path.parse(from).base;
const targetFilePath = path.join(SELF_HOST_CONFIG_DIR, targetFileName);
if (!fs.existsSync(targetFilePath)) {
console.log(`creating config file [${targetFilePath}].`);
if (modifier) {
const content = fs.readFileSync(from, 'utf-8');
fs.writeFileSync(targetFilePath, modifier(content), 'utf-8');
} else {
fs.cpSync(from, targetFilePath, {
force: false,
});
}
}
// make the default .env
if (to === '.env') {
const dotenvFile = fs.readFileSync(targetFilePath, 'utf-8');
const envs = parse(dotenvFile);
// generate a new private key
if (!envs.AFFINE_PRIVATE_KEY) {
const privateKey = generateKeyPairSync('ec', {
namedCurve: 'prime256v1',
}).privateKey.export({
type: 'sec1',
format: 'pem',
});
fs.writeFileSync(
targetFilePath,
`AFFINE_PRIVATE_KEY=${privateKey}\n` + dotenvFile
);
}
}
}
}
function runPredeployScript() {
console.log('running predeploy script.');
execSync('yarn predeploy', {
encoding: 'utf-8',
env: process.env,
stdio: 'inherit',
});
}
prepare();
runPredeployScript();

View File

@@ -0,0 +1,25 @@
/* eslint-disable @typescript-eslint/no-non-null-assertion */
// Custom configurations
const env = process.env;
const node = AFFiNE.node;
// TODO: may be separate config overring in `affine.[env].config`?
if (node.prod && env.R2_OBJECT_STORAGE_ACCOUNT_ID) {
AFFiNE.storage.providers.r2 = {
accountId: env.R2_OBJECT_STORAGE_ACCOUNT_ID,
credentials: {
accessKeyId: env.R2_OBJECT_STORAGE_ACCESS_KEY_ID!,
secretAccessKey: env.R2_OBJECT_STORAGE_SECRET_ACCESS_KEY!,
},
};
AFFiNE.storage.storages.avatar.provider = 'r2';
AFFiNE.storage.storages.avatar.bucket = 'account-avatar';
AFFiNE.storage.storages.avatar.publicLinkFactory = key =>
`https://avatar.affineassets.com/${key}`;
AFFiNE.storage.storages.blob.provider = 'r2';
AFFiNE.storage.storages.blob.bucket = `workspace-blobs-${
AFFiNE.affine.canary ? 'canary' : 'prod'
}`;
}

View File

@@ -0,0 +1,3 @@
import { getDefaultAFFiNEConfig } from './config/default';
globalThis.AFFiNE = getDefaultAFFiNEConfig();

View File

@@ -1,20 +1,13 @@
import { Controller, Get } from '@nestjs/common';
import { Public } from './core/auth';
import { Config } from './fundamentals/config';
@Controller('/')
export class AppController {
constructor(private readonly config: Config) {}
@Public()
@Get()
info() {
const version = AFFiNE.version;
return {
compatibility: this.config.version,
message: `AFFiNE ${this.config.version} Server`,
type: this.config.type,
flavor: this.config.flavor,
compatibility: version,
message: `AFFiNE ${version} Server`,
};
}
}

View File

@@ -1,174 +0,0 @@
import { join } from 'node:path';
import { Logger, Module } from '@nestjs/common';
import { APP_GUARD, APP_INTERCEPTOR } from '@nestjs/core';
import { ScheduleModule } from '@nestjs/schedule';
import { ServeStaticModule } from '@nestjs/serve-static';
import { get } from 'lodash-es';
import { AppController } from './app.controller';
import { AuthGuard, AuthModule } from './core/auth';
import { ADD_ENABLED_FEATURES, ServerConfigModule } from './core/config';
import { DocModule } from './core/doc';
import { FeatureModule } from './core/features';
import { QuotaModule } from './core/quota';
import { StorageModule } from './core/storage';
import { SyncModule } from './core/sync';
import { UserModule } from './core/user';
import { WorkspaceModule } from './core/workspaces';
import { getOptionalModuleMetadata } from './fundamentals';
import { CacheInterceptor, CacheModule } from './fundamentals/cache';
import type { AvailablePlugins } from './fundamentals/config';
import { Config, ConfigModule } from './fundamentals/config';
import { EventModule } from './fundamentals/event';
import { GqlModule } from './fundamentals/graphql';
import { HelpersModule } from './fundamentals/helpers';
import { MailModule } from './fundamentals/mailer';
import { MetricsModule } from './fundamentals/metrics';
import { MutexModule } from './fundamentals/mutex';
import { PrismaModule } from './fundamentals/prisma';
import { StorageProviderModule } from './fundamentals/storage';
import { RateLimiterModule } from './fundamentals/throttler';
import { WebSocketModule } from './fundamentals/websocket';
import { REGISTERED_PLUGINS } from './plugins';
export const FunctionalityModules = [
ConfigModule.forRoot(),
ScheduleModule.forRoot(),
EventModule,
CacheModule,
MutexModule,
PrismaModule,
MetricsModule,
RateLimiterModule,
MailModule,
StorageProviderModule,
HelpersModule,
];
export class AppModuleBuilder {
private readonly modules: AFFiNEModule[] = [];
constructor(private readonly config: Config) {}
use(...modules: AFFiNEModule[]): this {
modules.forEach(m => {
const requirements = getOptionalModuleMetadata(m, 'requires');
// if condition not set or condition met, include the module
if (requirements?.length) {
const nonMetRequirements = requirements.filter(c => {
const value = get(this.config, c);
return (
value === undefined ||
value === null ||
(typeof value === 'string' && value.trim().length === 0)
);
});
if (nonMetRequirements.length) {
const name = 'module' in m ? m.module.name : m.name;
new Logger(name).warn(
`${name} is not enabled because of the required configuration is not satisfied.`,
'Unsatisfied configuration:',
...nonMetRequirements.map(config => ` AFFiNE.${config}`)
);
return;
}
}
const predicator = getOptionalModuleMetadata(m, 'if');
if (predicator && !predicator(this.config)) {
return;
}
const contribution = getOptionalModuleMetadata(m, 'contributesTo');
if (contribution) {
ADD_ENABLED_FEATURES(contribution);
}
this.modules.push(m);
});
return this;
}
useIf(
predicator: (config: Config) => boolean,
...modules: AFFiNEModule[]
): this {
if (predicator(this.config)) {
this.use(...modules);
}
return this;
}
compile() {
@Module({
providers: [
{
provide: APP_INTERCEPTOR,
useClass: CacheInterceptor,
},
{
provide: APP_GUARD,
useClass: AuthGuard,
},
],
imports: this.modules,
controllers: this.config.isSelfhosted ? [] : [AppController],
})
class AppModule {}
return AppModule;
}
}
function buildAppModule() {
const factor = new AppModuleBuilder(AFFiNE);
factor
// common fundamental modules
.use(...FunctionalityModules)
// auth
.use(AuthModule)
// business modules
.use(DocModule)
// sync server only
.useIf(config => config.flavor.sync, SyncModule)
// graphql server only
.useIf(
config => config.flavor.graphql,
ServerConfigModule,
WebSocketModule,
GqlModule,
StorageModule,
UserModule,
WorkspaceModule,
FeatureModule,
QuotaModule
)
// self hosted server only
.useIf(
config => config.isSelfhosted,
ServeStaticModule.forRoot({
rootPath: join('/app', 'static'),
})
);
// plugin modules
AFFiNE.plugins.enabled.forEach(name => {
const plugin = REGISTERED_PLUGINS.get(name as AvailablePlugins);
if (!plugin) {
throw new Error(`Unknown plugin ${name}`);
}
factor.use(plugin);
});
return factor.compile();
}
export const AppModule = buildAppModule();

View File

@@ -1,54 +1,34 @@
import { Type } from '@nestjs/common';
import { NestFactory } from '@nestjs/core';
import type { NestExpressApplication } from '@nestjs/platform-express';
import cookieParser from 'cookie-parser';
import graphqlUploadExpress from 'graphql-upload/graphqlUploadExpress.mjs';
import { Module } from '@nestjs/common';
import { APP_INTERCEPTOR } from '@nestjs/core';
import { GlobalExceptionFilter } from './fundamentals';
import { SocketIoAdapter, SocketIoAdapterImpl } from './fundamentals/websocket';
import { serverTimingAndCache } from './middleware/timing';
import { AppController } from './app.controller';
import { CacheInterceptor, CacheModule } from './cache';
import { ConfigModule } from './config';
import { EventModule } from './event';
import { BusinessModules } from './modules';
import { AuthModule } from './modules/auth';
import { PrismaModule } from './prisma';
import { SessionModule } from './session';
import { RateLimiterModule } from './throttler';
export async function createApp() {
const { AppModule } = await import('./app.module');
const BasicModules = [
PrismaModule,
ConfigModule.forRoot(),
CacheModule,
EventModule,
SessionModule,
RateLimiterModule,
AuthModule,
];
const app = await NestFactory.create<NestExpressApplication>(AppModule, {
cors: true,
rawBody: true,
bodyParser: true,
logger: AFFiNE.affine.stable ? ['log'] : ['verbose'],
});
app.use(serverTimingAndCache);
app.use(
graphqlUploadExpress({
// TODO: dynamic limit by quota
maxFileSize: 100 * 1024 * 1024,
maxFiles: 5,
})
);
app.useGlobalFilters(new GlobalExceptionFilter(app.getHttpAdapter()));
app.use(cookieParser());
if (AFFiNE.flavor.sync) {
const SocketIoAdapter = app.get<Type<SocketIoAdapter>>(
SocketIoAdapterImpl,
{
strict: false,
}
);
const adapter = new SocketIoAdapter(app);
app.useWebSocketAdapter(adapter);
}
if (AFFiNE.isSelfhosted && AFFiNE.telemetry.enabled) {
const mixpanel = await import('mixpanel');
mixpanel.init(AFFiNE.telemetry.token).track('selfhost-server-started', {
version: AFFiNE.version,
});
}
return app;
}
@Module({
providers: [
{
provide: APP_INTERCEPTOR,
useClass: CacheInterceptor,
},
],
imports: [...BasicModules, ...BusinessModules],
controllers: [AppController],
})
export class AppModule {}

View File

@@ -1,12 +1,60 @@
import Keyv from 'keyv';
import type { Cache, CacheSetOptions } from './def';
export interface CacheSetOptions {
// in milliseconds
ttl?: number;
}
// extends if needed
export interface Cache {
// standard operation
get<T = unknown>(key: string): Promise<T | undefined>;
set<T = unknown>(
key: string,
value: T,
opts?: CacheSetOptions
): Promise<boolean>;
setnx<T = unknown>(
key: string,
value: T,
opts?: CacheSetOptions
): Promise<boolean>;
increase(key: string, count?: number): Promise<number>;
decrease(key: string, count?: number): Promise<number>;
delete(key: string): Promise<boolean>;
has(key: string): Promise<boolean>;
ttl(key: string): Promise<number>;
expire(key: string, ttl: number): Promise<boolean>;
// list operations
pushBack<T = unknown>(key: string, ...values: T[]): Promise<number>;
pushFront<T = unknown>(key: string, ...values: T[]): Promise<number>;
len(key: string): Promise<number>;
list<T = unknown>(key: string, start: number, end: number): Promise<T[]>;
popFront<T = unknown>(key: string, count?: number): Promise<T[]>;
popBack<T = unknown>(key: string, count?: number): Promise<T[]>;
// map operations
mapSet<T = unknown>(
map: string,
key: string,
value: T,
opts: CacheSetOptions
): Promise<boolean>;
mapIncrease(map: string, key: string, count?: number): Promise<number>;
mapDecrease(map: string, key: string, count?: number): Promise<number>;
mapGet<T = unknown>(map: string, key: string): Promise<T | undefined>;
mapDelete(map: string, key: string): Promise<boolean>;
mapKeys(map: string): Promise<string[]>;
mapRandomKey(map: string): Promise<string | undefined>;
mapLen(map: string): Promise<number>;
}
export class LocalCache implements Cache {
private readonly kv: Keyv;
constructor(opts: Keyv.Options<any> = {}) {
this.kv = new Keyv(opts);
constructor() {
this.kv = new Keyv();
}
// standard operation

View File

@@ -0,0 +1,26 @@
import { FactoryProvider, Global, Module } from '@nestjs/common';
import { Redis } from 'ioredis';
import { Config } from '../config';
import { LocalCache } from './cache';
import { RedisCache } from './redis';
const CacheProvider: FactoryProvider = {
provide: LocalCache,
useFactory: (config: Config) => {
return config.redis.enabled
? new RedisCache(new Redis(config.redis))
: new LocalCache();
},
inject: [Config],
};
@Global()
@Module({
providers: [CacheProvider],
exports: [CacheProvider],
})
export class CacheModule {}
export { LocalCache as Cache };
export { CacheInterceptor, MakeCache, PreventCache } from './interceptor';

View File

@@ -10,7 +10,7 @@ import { Reflector } from '@nestjs/core';
import { GqlContextType, GqlExecutionContext } from '@nestjs/graphql';
import { mergeMap, Observable, of } from 'rxjs';
import { Cache } from './instances';
import { LocalCache } from './cache';
export const MakeCache = (key: string[], args?: string[]) =>
SetMetadata('cacheKey', [key, args]);
@@ -24,7 +24,7 @@ export class CacheInterceptor implements NestInterceptor {
private readonly logger = new Logger(CacheInterceptor.name);
constructor(
private readonly reflector: Reflector,
private readonly cache: Cache
private readonly cache: LocalCache
) {}
async intercept(
ctx: ExecutionContext,
@@ -40,9 +40,9 @@ export class CacheInterceptor implements NestInterceptor {
);
if (preventKey) {
this.logger.debug(`prevent cache: ${JSON.stringify(preventKey)}`);
const key = await this.getCacheKey(ctx, preventKey);
if (key) {
this.logger.debug(`cache ${key} staled`);
await this.cache.delete(key);
}
@@ -60,12 +60,12 @@ export class CacheInterceptor implements NestInterceptor {
const cachedData = await this.cache.get(cacheKey);
if (cachedData) {
this.logger.debug(`cache ${cacheKey} hit`);
this.logger.debug('cache hit', cacheKey, cachedData);
return of(cachedData);
} else {
this.logger.debug(`cache ${cacheKey} miss`);
return next.handle().pipe(
mergeMap(async result => {
this.logger.debug('cache miss', cacheKey, result);
await this.cache.set(cacheKey, result);
return result;

View File

@@ -1,6 +1,6 @@
import { Redis } from 'ioredis';
import type { Cache, CacheSetOptions } from '../../fundamentals/cache/def';
import { Cache, CacheSetOptions } from './cache';
export class RedisCache implements Cache {
constructor(private readonly redis: Redis) {}

View File

@@ -1,39 +0,0 @@
// Convenient way to map environment variables to config values.
AFFiNE.ENV_MAP = {
AFFINE_SERVER_PORT: ['port', 'int'],
AFFINE_SERVER_HOST: 'host',
AFFINE_SERVER_SUB_PATH: 'path',
AFFINE_SERVER_HTTPS: ['https', 'boolean'],
DATABASE_URL: 'db.url',
ENABLE_CAPTCHA: ['auth.captcha.enable', 'boolean'],
CAPTCHA_TURNSTILE_SECRET: ['auth.captcha.turnstile.secret', 'string'],
OAUTH_GOOGLE_CLIENT_ID: 'plugins.oauth.providers.google.clientId',
OAUTH_GOOGLE_CLIENT_SECRET: 'plugins.oauth.providers.google.clientSecret',
OAUTH_GITHUB_CLIENT_ID: 'plugins.oauth.providers.github.clientId',
OAUTH_GITHUB_CLIENT_SECRET: 'plugins.oauth.providers.github.clientSecret',
MAILER_HOST: 'mailer.host',
MAILER_PORT: ['mailer.port', 'int'],
MAILER_USER: 'mailer.auth.user',
MAILER_PASSWORD: 'mailer.auth.pass',
MAILER_SENDER: 'mailer.from.address',
MAILER_SECURE: ['mailer.secure', 'boolean'],
THROTTLE_TTL: ['rateLimiter.ttl', 'int'],
THROTTLE_LIMIT: ['rateLimiter.limit', 'int'],
REDIS_SERVER_HOST: 'plugins.redis.host',
REDIS_SERVER_PORT: ['plugins.redis.port', 'int'],
REDIS_SERVER_USER: 'plugins.redis.username',
REDIS_SERVER_PASSWORD: 'plugins.redis.password',
REDIS_SERVER_DATABASE: ['plugins.redis.db', 'int'],
DOC_MERGE_INTERVAL: ['doc.manager.updatePollInterval', 'int'],
DOC_MERGE_USE_JWST_CODEC: [
'doc.manager.experimentalMergeWithYOcto',
'boolean',
],
STRIPE_API_KEY: 'plugins.payment.stripe.keys.APIKey',
STRIPE_WEBHOOK_KEY: 'plugins.payment.stripe.keys.webhookKey',
FEATURES_EARLY_ACCESS_PREVIEW: ['featureFlags.earlyAccessPreview', 'boolean'],
FEATURES_SYNC_CLIENT_VERSION_CHECK: [
'featureFlags.syncClientVersionCheck',
'boolean',
],
};

View File

@@ -1,63 +0,0 @@
/* eslint-disable @typescript-eslint/no-non-null-assertion */
// Custom configurations for AFFiNE Cloud
// ====================================================================================
// Q: WHY THIS FILE EXISTS?
// A: AFFiNE deployment environment may have a lot of custom environment variables,
// which are not suitable to be put in the `affine.ts` file.
// For example, AFFiNE Cloud Clusters are deployed on Google Cloud Platform.
// We need to enable the `gcloud` plugin to make sure the nodes working well,
// but the default selfhost version may not require it.
// So it's not a good idea to put such logic in the common `affine.ts` file.
//
// ```
// if (AFFiNE.deploy) {
// AFFiNE.plugins.use('gcloud');
// }
// ```
// ====================================================================================
const env = process.env;
AFFiNE.metrics.enabled = !AFFiNE.node.test;
if (env.R2_OBJECT_STORAGE_ACCOUNT_ID) {
AFFiNE.plugins.use('cloudflare-r2', {
accountId: env.R2_OBJECT_STORAGE_ACCOUNT_ID,
credentials: {
accessKeyId: env.R2_OBJECT_STORAGE_ACCESS_KEY_ID!,
secretAccessKey: env.R2_OBJECT_STORAGE_SECRET_ACCESS_KEY!,
},
});
AFFiNE.storage.storages.avatar.provider = 'cloudflare-r2';
AFFiNE.storage.storages.avatar.bucket = 'account-avatar';
AFFiNE.storage.storages.avatar.publicLinkFactory = key =>
`https://avatar.affineassets.com/${key}`;
AFFiNE.storage.storages.blob.provider = 'cloudflare-r2';
AFFiNE.storage.storages.blob.bucket = `workspace-blobs-${
AFFiNE.affine.canary ? 'canary' : 'prod'
}`;
}
AFFiNE.plugins.use('redis');
AFFiNE.plugins.use('payment', {
stripe: {
keys: {
// fake the key to ensure the server generate full GraphQL Schema even env vars are not set
APIKey: '1',
webhookKey: '1',
},
},
});
AFFiNE.plugins.use('oauth');
if (AFFiNE.deploy) {
AFFiNE.mailer = {
service: 'gmail',
auth: {
user: env.MAILER_USER,
pass: env.MAILER_PASSWORD,
},
};
AFFiNE.plugins.use('gcloud');
}

View File

@@ -1,153 +0,0 @@
/* eslint-disable @typescript-eslint/no-non-null-assertion */
//
// ###############################################################
// ## AFFiNE Configuration System ##
// ###############################################################
// Here is the file of all AFFiNE configurations that will affect runtime behavior.
// Override any configuration here and it will be merged when starting the server.
// Any changes in this file won't take effect before server restarted.
//
//
// > Configurations merge order
// 1. load environment variables (`.env` if provided, and from system)
// 2. load `src/fundamentals/config/default.ts` for all default settings
// 3. apply `./affine.ts` patches (this file)
// 4. apply `./affine.env.ts` patches
//
//
// ###############################################################
// ## General settings ##
// ###############################################################
//
// /* The unique identity of the server */
// AFFiNE.serverId = 'some-randome-uuid';
//
// /* The name of AFFiNE Server, may show on the UI */
// AFFiNE.serverName = 'Your Cool AFFiNE Selfhosted Cloud';
//
// /* Whether the server is deployed behind a HTTPS proxied environment */
AFFiNE.https = false;
// /* Domain of your server that your server will be available at */
AFFiNE.host = 'localhost';
// /* The local port of your server that will listen on */
AFFiNE.port = 3010;
// /* The sub path of your server */
// /* For example, if you set `AFFiNE.path = '/affine'`, then the server will be available at `${domain}/affine` */
// AFFiNE.path = '/affine';
//
//
// ###############################################################
// ## Database settings ##
// ###############################################################
//
// /* The URL of the database where most of AFFiNE server data will be stored in */
// AFFiNE.db.url = 'postgres://user:passsword@localhost:5432/affine';
//
//
// ###############################################################
// ## Server Function settings ##
// ###############################################################
//
// /* Whether enable metrics and tracing while running the server */
// /* The metrics will be available at `http://localhost:9464/metrics` with [Prometheus] format exported */
// AFFiNE.metrics.enabled = true;
//
// /* Authentication Settings */
// /* User Signup password limitation */
// AFFiNE.auth.password = {
// minLength: 8,
// maxLength: 32,
// };
//
// /* How long the login session would last by default */
// AFFiNE.auth.session = {
// ttl: 15 * 24 * 60 * 60, // 15 days
// };
//
// /* GraphQL configurations that control the behavior of the Apollo Server behind */
// /* @see https://www.apollographql.com/docs/apollo-server/api/apollo-server */
// AFFiNE.graphql = {
// /* Path to mount GraphQL API */
// path: '/graphql',
// buildSchemaOptions: {
// numberScalarMode: 'integer',
// },
// /* Whether allow client to query the schema introspection */
// introspection: true,
// /* Whether enable GraphQL Playground UI */
// playground: true,
// }
//
// /* Doc Store & Collaberation */
// /* How long the buffer time of creating a new history snapshot when doc get updated */
// AFFiNE.doc.history.interval = 1000 * 60 * 10; // 10 minutes
//
// /* Use `y-octo` to merge updates at the same time when merging using Yjs */
// AFFiNE.doc.manager.experimentalMergeWithYOcto = true;
//
// /* How often the manager will start a new turn of merging pending updates into doc snapshot */
// AFFiNE.doc.manager.updatePollInterval = 1000 * 3;
//
//
// ###############################################################
// ## Plugins settings ##
// ###############################################################
//
// /* Redis Plugin */
// /* Provide caching and session storing backed by Redis. */
// /* Useful when you deploy AFFiNE server in a cluster. */
// AFFiNE.plugins.use('redis', {
// /* override options */
// });
//
//
// /* Payment Plugin */
// AFFiNE.plugins.use('payment', {
// stripe: { keys: {}, apiVersion: '2023-10-16' },
// });
//
//
// /* Cloudflare R2 Plugin */
// /* Enable if you choose to store workspace blobs or user avatars in Cloudflare R2 Storage Service */
// AFFiNE.plugins.use('cloudflare-r2', {
// accountId: '',
// credentials: {
// accessKeyId: '',
// secretAccessKey: '',
// },
// });
//
// /* AWS S3 Plugin */
// /* Enable if you choose to store workspace blobs or user avatars in AWS S3 Storage Service */
// AFFiNE.plugins.use('aws-s3', {
// credentials: {
// accessKeyId: '',
// secretAccessKey: '',
// })
// /* Update the provider of storages */
// AFFiNE.storage.storages.blob.provider = 'r2';
// AFFiNE.storage.storages.avatar.provider = 'r2';
//
// /* OAuth Plugin */
// AFFiNE.plugins.use('oauth', {
// providers: {
// github: {
// clientId: '',
// clientSecret: '',
// // See https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/authorizing-oauth-apps
// args: {
// scope: 'user',
// },
// },
// google: {
// clientId: '',
// clientSecret: '',
// args: {
// // See https://developers.google.com/identity/protocols/oauth2
// scope: 'openid email profile',
// promot: 'select_account',
// access_type: 'offline',
// },
// },
// },
// });

View File

@@ -1,8 +1,6 @@
import type { ApolloDriverConfig } from '@nestjs/apollo';
import SMTPTransport from 'nodemailer/lib/smtp-transport';
import type { LeafPaths } from '../utils/types';
import { EnvConfigType } from './env';
import type { AFFiNEStorageConfig } from './storage';
declare global {
@@ -13,33 +11,60 @@ declare global {
}
}
export type ServerFlavor = 'allinone' | 'graphql' | 'sync';
export type AFFINE_ENV = 'dev' | 'beta' | 'production';
export type NODE_ENV = 'development' | 'test' | 'production';
export enum DeploymentType {
Affine = 'affine',
Selfhosted = 'selfhosted',
export enum ExternalAccount {
github = 'github',
google = 'google',
firebase = 'firebase',
}
export type ConfigPaths = LeafPaths<
export type ServerFlavor = 'allinone' | 'graphql' | 'sync' | 'selfhosted';
type EnvConfigType = 'string' | 'int' | 'float' | 'boolean';
type ConfigPaths = LeafPaths<
Omit<
AFFiNEConfig,
| 'ENV_MAP'
| 'version'
| 'type'
| 'isSelfhosted'
| 'flavor'
| 'env'
| 'affine'
| 'deploy'
| 'node'
| 'baseUrl'
| 'origin'
| 'prod'
| 'dev'
| 'test'
| 'deploy'
>,
'',
'.....'
'....'
>;
/**
* parse number value from environment variables
*/
function int(value: string) {
const n = parseInt(value);
return Number.isNaN(n) ? undefined : n;
}
function float(value: string) {
const n = parseFloat(value);
return Number.isNaN(n) ? undefined : n;
}
function boolean(value: string) {
return value === '1' || value.toLowerCase() === 'true';
}
export function parseEnvValue(value: string | undefined, type?: EnvConfigType) {
if (value === undefined) {
return;
}
return type === 'int'
? int(value)
: type === 'float'
? float(value)
: type === 'boolean'
? boolean(value)
: value;
}
/**
* All Configurations that would control AFFiNE server behaviors
@@ -50,65 +75,22 @@ export interface AFFiNEConfig {
/**
* Server Identity
*/
serverId: string;
/**
* Name may show on the UI
*/
serverName: string;
readonly serverId: string;
/**
* System version
*/
readonly version: string;
/**
* Deployment type, AFFiNE Cloud, or Selfhosted
*/
get type(): DeploymentType;
/**
* Fast detect whether currently deployed in a selfhosted environment
*/
get isSelfhosted(): boolean;
/**
* Server flavor
*/
get flavor(): {
type: string;
graphql: boolean;
sync: boolean;
};
/**
* Application secrets for authentication and data encryption
*/
secrets: {
/**
* Application public key
*
*/
publicKey: string;
/**
* Application private key
*
*/
privateKey: string;
};
/**
* Deployment environment
*/
readonly AFFINE_ENV: AFFINE_ENV;
readonly affineEnv: 'dev' | 'beta' | 'production';
/**
* alias to `process.env.NODE_ENV`
*
* @default 'development'
* @default 'production'
* @env NODE_ENV
*/
readonly NODE_ENV: NODE_ENV;
readonly env: string;
/**
* fast AFFiNE environment judge
*/
@@ -125,7 +107,6 @@ export interface AFFiNEConfig {
dev: boolean;
test: boolean;
};
get deploy(): boolean;
/**
@@ -184,7 +165,6 @@ export interface AFFiNEConfig {
*/
featureFlags: {
earlyAccessPreview: boolean;
syncClientVersionCheck: boolean;
};
/**
@@ -210,54 +190,114 @@ export interface AFFiNEConfig {
limit: number;
};
/**
* Redis Config
*
* whether to use redis as Socket.IO adapter
*/
redis: {
/**
* if not enabled, use in-memory adapter by default
*/
enabled: boolean;
/**
* url of redis host
*/
host: string;
/**
* port of redis
*/
port: number;
username: string;
password: string;
/**
* redis database index
*
* Rate Limiter scope: database + 1
*
* Session scope: database + 2
*
* @default 0
*/
database: number;
};
/**
* authentication config
*/
auth: {
/**
* The minimum and maximum length of the password when registering new users
*
* @default [8,32]
* Application access token expiration time
*/
password: {
/**
* The minimum length of the password
*
* @default 8
*/
minLength: number;
/**
* The maximum length of the password
*
* @default 32
*/
maxLength: number;
};
session: {
/**
* Application auth expiration time in seconds
*
* @default 15 days
*/
ttl: number;
};
readonly accessTokenExpiresIn: number;
/**
* Application access token config
* Application refresh token expiration time
*/
accessToken: {
/**
* Application access token expiration time in seconds
*
* @default 7 days
*/
ttl: number;
/**
* Application refresh token expiration time in seconds
*
* @default 30 days
*/
refreshTokenTtl: number;
readonly refreshTokenExpiresIn: number;
/**
* Add some leeway (in seconds) to the exp and nbf validation to account for clock skew.
* Defaults to 60 if omitted.
*/
readonly leeway: number;
/**
* Application public key
*
*/
readonly publicKey: string;
/**
* Application private key
*
*/
readonly privateKey: string;
/**
* whether allow user to signup with email directly
*/
enableSignup: boolean;
/**
* whether allow user to signup by oauth providers
*/
enableOauth: boolean;
/**
* NEXTAUTH_SECRET
*/
nextAuthSecret: string;
/**
* all available oauth providers
*/
oauthProviders: Partial<
Record<
ExternalAccount,
{
enabled: boolean;
clientId: string;
clientSecret: string;
/**
* uri to start oauth flow
*/
authorizationUri?: string;
/**
* uri to authenticate `access_token` when user is redirected back from oauth provider with `code`
*/
accessTokenUri?: string;
/**
* uri to get user info with authenticated `access_token`
*/
userInfoUri?: string;
args?: Record<string, any>;
}
>
>;
/**
* whether to use local email service to send email
* local debug only
*/
localEmail: boolean;
email: {
server: string;
port: number;
login: string;
sender: string;
password: string;
};
captcha: {
/**
@@ -282,13 +322,6 @@ export interface AFFiNEConfig {
};
};
/**
* Configurations for mail service used to post auth or bussiness mails.
*
* @see https://nodemailer.com/smtp/
*/
mailer?: SMTPTransport.Options;
doc: {
manager: {
/**
@@ -307,17 +340,11 @@ export interface AFFiNEConfig {
updatePollInterval: number;
/**
* The maximum number of updates that will be pulled from the server at once.
* Existing for avoiding the server to be overloaded when there are too many updates for one doc.
*/
maxUpdatesPullCount: number;
/**
* Use `y-octo` to merge updates at the same time when merging using Yjs.
* Use JwstCodec to merge updates at the same time when merging using Yjs.
*
* This is an experimental feature, and aimed to check the correctness of JwstCodec.
*/
experimentalMergeWithYOcto: boolean;
experimentalMergeWithJwstCodec: boolean;
};
history: {
/**
@@ -329,14 +356,12 @@ export interface AFFiNEConfig {
};
};
metrics: {
enabled: boolean;
};
telemetry: {
enabled: boolean;
token: string;
payment: {
stripe: {
keys: {
APIKey: string;
webhookKey: string;
};
} & import('stripe').Stripe.StripeConfig;
};
}
export * from './storage';

View File

@@ -0,0 +1,214 @@
/// <reference types="../global.d.ts" />
import { createPrivateKey, createPublicKey } from 'node:crypto';
import parse from 'parse-duration';
import pkg from '../../package.json' assert { type: 'json' };
import type { AFFiNEConfig, ServerFlavor } from './def';
import { applyEnvToConfig } from './env';
import { getDefaultAFFiNEStorageConfig } from './storage';
export const SERVER_FLAVOR = (process.env.SERVER_FLAVOR ??
'allinone') as ServerFlavor;
// Don't use this in production
export const examplePrivateKey = `-----BEGIN EC PRIVATE KEY-----
MHcCAQEEIEtyAJLIULkphVhqXqxk4Nr8Ggty3XLwUJWBxzAWCWTMoAoGCCqGSM49
AwEHoUQDQgAEF3U/0wIeJ3jRKXeFKqQyBKlr9F7xaAUScRrAuSP33rajm3cdfihI
3JvMxVNsS2lE8PSGQrvDrJZaDo0L+Lq9Gg==
-----END EC PRIVATE KEY-----`;
const jwtKeyPair = (function () {
const AUTH_PRIVATE_KEY = process.env.AUTH_PRIVATE_KEY ?? examplePrivateKey;
const privateKey = createPrivateKey({
key: Buffer.from(AUTH_PRIVATE_KEY),
format: 'pem',
type: 'sec1',
})
.export({
format: 'pem',
type: 'pkcs8',
})
.toString('utf8');
const publicKey = createPublicKey({
key: Buffer.from(AUTH_PRIVATE_KEY),
format: 'pem',
type: 'spki',
})
.export({
format: 'pem',
type: 'spki',
})
.toString('utf8');
return {
publicKey,
privateKey,
};
})();
export const getDefaultAFFiNEConfig: () => AFFiNEConfig = () => {
const defaultConfig = {
serverId: 'affine-nestjs-server',
version: pkg.version,
ENV_MAP: {
AFFINE_SERVER_PORT: ['port', 'int'],
AFFINE_SERVER_HOST: 'host',
AFFINE_SERVER_SUB_PATH: 'path',
AFFINE_ENV: 'affineEnv',
DATABASE_URL: 'db.url',
ENABLE_CAPTCHA: ['auth.captcha.enable', 'boolean'],
CAPTCHA_TURNSTILE_SECRET: ['auth.captcha.turnstile.secret', 'string'],
OAUTH_GOOGLE_ENABLED: ['auth.oauthProviders.google.enabled', 'boolean'],
OAUTH_GOOGLE_CLIENT_ID: 'auth.oauthProviders.google.clientId',
OAUTH_GOOGLE_CLIENT_SECRET: 'auth.oauthProviders.google.clientSecret',
OAUTH_GITHUB_ENABLED: ['auth.oauthProviders.github.enabled', 'boolean'],
OAUTH_GITHUB_CLIENT_ID: 'auth.oauthProviders.github.clientId',
OAUTH_GITHUB_CLIENT_SECRET: 'auth.oauthProviders.github.clientSecret',
OAUTH_EMAIL_LOGIN: 'auth.email.login',
OAUTH_EMAIL_SENDER: 'auth.email.sender',
OAUTH_EMAIL_SERVER: 'auth.email.server',
OAUTH_EMAIL_PORT: ['auth.email.port', 'int'],
OAUTH_EMAIL_PASSWORD: 'auth.email.password',
THROTTLE_TTL: ['rateLimiter.ttl', 'int'],
THROTTLE_LIMIT: ['rateLimiter.limit', 'int'],
REDIS_SERVER_ENABLED: ['redis.enabled', 'boolean'],
REDIS_SERVER_HOST: 'redis.host',
REDIS_SERVER_PORT: ['redis.port', 'int'],
REDIS_SERVER_USER: 'redis.username',
REDIS_SERVER_PASSWORD: 'redis.password',
REDIS_SERVER_DATABASE: ['redis.database', 'int'],
DOC_MERGE_INTERVAL: ['doc.manager.updatePollInterval', 'int'],
DOC_MERGE_USE_JWST_CODEC: [
'doc.manager.experimentalMergeWithJwstCodec',
'boolean',
],
ENABLE_LOCAL_EMAIL: ['auth.localEmail', 'boolean'],
STRIPE_API_KEY: 'payment.stripe.keys.APIKey',
STRIPE_WEBHOOK_KEY: 'payment.stripe.keys.webhookKey',
FEATURES_EARLY_ACCESS_PREVIEW: [
'featureFlags.earlyAccessPreview',
'boolean',
],
} satisfies AFFiNEConfig['ENV_MAP'],
affineEnv: 'dev',
get affine() {
const env = this.affineEnv;
return {
canary: env === 'dev',
beta: env === 'beta',
stable: env === 'production',
};
},
env: process.env.NODE_ENV ?? 'development',
get node() {
const env = this.env;
return {
prod: env === 'production',
dev: env === 'development',
test: env === 'test',
};
},
get deploy() {
return !this.node.dev && !this.node.test;
},
featureFlags: {
earlyAccessPreview: false,
},
get https() {
return !this.node.dev;
},
host: 'localhost',
port: 3010,
path: '',
db: {
url: '',
},
get origin() {
return this.node.dev
? 'http://localhost:8080'
: `${this.https ? 'https' : 'http'}://${this.host}${
this.host === 'localhost' ? `:${this.port}` : ''
}`;
},
get baseUrl() {
return `${this.origin}${this.path}`;
},
graphql: {
buildSchemaOptions: {
numberScalarMode: 'integer',
},
introspection: true,
playground: true,
},
auth: {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
accessTokenExpiresIn: parse('1h')! / 1000,
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
refreshTokenExpiresIn: parse('7d')! / 1000,
leeway: 60,
captcha: {
enable: false,
turnstile: {
secret: '1x0000000000000000000000000000000AA',
},
challenge: {
bits: 20,
},
},
privateKey: jwtKeyPair.privateKey,
publicKey: jwtKeyPair.publicKey,
enableSignup: true,
enableOauth: false,
get nextAuthSecret() {
return this.privateKey;
},
oauthProviders: {},
localEmail: false,
email: {
server: 'smtp.gmail.com',
port: 465,
login: '',
sender: '',
password: '',
},
},
storage: getDefaultAFFiNEStorageConfig(),
rateLimiter: {
ttl: 60,
limit: 60,
},
redis: {
enabled: false,
host: '127.0.0.1',
port: 6379,
username: '',
password: '',
database: 0,
},
doc: {
manager: {
enableUpdateAutoMerging: SERVER_FLAVOR !== 'sync',
updatePollInterval: 3000,
experimentalMergeWithJwstCodec: false,
},
history: {
interval: 1000 * 60 * 10 /* 10 mins */,
},
},
payment: {
stripe: {
keys: {
APIKey: '',
webhookKey: '',
},
apiVersion: '2023-10-16',
},
},
} satisfies AFFiNEConfig;
applyEnvToConfig(defaultConfig);
return defaultConfig;
};

View File

@@ -0,0 +1,17 @@
import { set } from 'lodash-es';
import { type AFFiNEConfig, parseEnvValue } from './def';
export function applyEnvToConfig(rawConfig: AFFiNEConfig) {
for (const env in rawConfig.ENV_MAP) {
const config = rawConfig.ENV_MAP[env];
const [path, value] =
typeof config === 'string'
? [config, process.env[env]]
: [config[0], parseEnvValue(process.env[env], config[1])];
if (value !== undefined) {
set(rawConfig, path, value);
}
}
}

View File

@@ -1,12 +1,26 @@
import { DynamicModule, FactoryProvider } from '@nestjs/common';
// eslint-disable-next-line simple-import-sort/imports
import type { DynamicModule, FactoryProvider } from '@nestjs/common';
import { merge } from 'lodash-es';
import { ApplyType } from '../utils/types';
import { AFFiNEConfig } from './def';
import type { DeepPartial } from '../utils/types';
import type { AFFiNEConfig } from './def';
import '../prelude';
type ConstructorOf<T> = {
new (): T;
};
function ApplyType<T>(): ConstructorOf<T> {
// @ts-expect-error used to fake the type of config
return class Inner implements T {
constructor() {}
};
}
/**
* @example
*
* usage:
* ```
* import { Config } from '@affine/server'
*
* class TestConfig {
@@ -15,6 +29,7 @@ import { AFFiNEConfig } from './def';
* return this.config.env
* }
* }
* ```
*/
export class Config extends ApplyType<AFFiNEConfig>() {}
@@ -56,3 +71,7 @@ export class ConfigModule {
};
};
}
export type { AFFiNEConfig } from './def';
export { SERVER_FLAVOR } from './default';
export * from './storage';

View File

@@ -1,34 +1,37 @@
import { homedir } from 'node:os';
import { join } from 'node:path';
import { S3ClientConfigType } from '@aws-sdk/client-s3';
export type StorageProviderType = 'fs' | 'r2' | 's3';
export interface FsStorageConfig {
path: string;
}
export type R2StorageConfig = S3ClientConfigType & {
accountId: string;
};
export type S3StorageConfig = S3ClientConfigType;
export interface StorageProvidersConfig {
fs: FsStorageConfig;
}
export type StorageProviderType = keyof StorageProvidersConfig;
export type StorageConfig<Ext = unknown> = {
export type StorageTargetConfig<Ext = unknown> = {
provider: StorageProviderType;
bucket: string;
} & Ext;
export interface StoragesConfig {
avatar: StorageConfig<{ publicLinkFactory: (key: string) => string }>;
blob: StorageConfig;
}
export interface AFFiNEStorageConfig {
/**
* All providers for object storage
*
* Support different providers for different usage at the same time.
*/
providers: StorageProvidersConfig;
storages: StoragesConfig;
providers: {
fs?: FsStorageConfig;
s3?: S3StorageConfig;
r2?: R2StorageConfig;
};
storages: {
avatar: StorageTargetConfig<{ publicLinkFactory: (key: string) => string }>;
blob: StorageTargetConfig;
};
}
export type StorageProviders = AFFiNEStorageConfig['providers'];

View File

@@ -0,0 +1,3 @@
export const OPERATION_NAME = 'x-operation-name';
export const REQUEST_ID = 'x-request-id';

View File

@@ -1,193 +0,0 @@
import { randomUUID } from 'node:crypto';
import {
BadRequestException,
Body,
Controller,
Get,
Header,
HttpStatus,
Post,
Query,
Req,
Res,
} from '@nestjs/common';
import type { Request, Response } from 'express';
import { PaymentRequiredException, URLHelper } from '../../fundamentals';
import { UserService } from '../user';
import { validators } from '../utils/validators';
import { CurrentUser } from './current-user';
import { Public } from './guard';
import { AuthService, parseAuthUserSeqNum } from './service';
import { TokenService, TokenType } from './token';
class SignInCredential {
email!: string;
password?: string;
}
@Controller('/api/auth')
export class AuthController {
constructor(
private readonly url: URLHelper,
private readonly auth: AuthService,
private readonly user: UserService,
private readonly token: TokenService
) {}
@Public()
@Post('/sign-in')
@Header('content-type', 'application/json')
async signIn(
@Req() req: Request,
@Res() res: Response,
@Body() credential: SignInCredential,
@Query('redirect_uri') redirectUri = this.url.home
) {
validators.assertValidEmail(credential.email);
const canSignIn = await this.auth.canSignIn(credential.email);
if (!canSignIn) {
throw new PaymentRequiredException(
`You don't have early access permission\nVisit https://community.affine.pro/c/insider-general/ for more information`
);
}
if (credential.password) {
const user = await this.auth.signIn(
credential.email,
credential.password
);
await this.auth.setCookie(req, res, user);
res.status(HttpStatus.OK).send(user);
} else {
// send email magic link
const user = await this.user.findUserByEmail(credential.email);
const result = await this.sendSignInEmail(
{ email: credential.email, signUp: !user },
redirectUri
);
if (result.rejected.length) {
throw new Error('Failed to send sign-in email.');
}
res.status(HttpStatus.OK).send({
email: credential.email,
});
}
}
async sendSignInEmail(
{ email, signUp }: { email: string; signUp: boolean },
redirectUri: string
) {
const token = await this.token.createToken(TokenType.SignIn, email);
const magicLink = this.url.link('/api/auth/magic-link', {
token,
email,
redirect_uri: redirectUri,
});
const result = await this.auth.sendSignInEmail(email, magicLink, signUp);
return result;
}
@Get('/sign-out')
async signOut(
@Req() req: Request,
@Res() res: Response,
@Query('redirect_uri') redirectUri?: string
) {
const session = await this.auth.signOut(
req.cookies[AuthService.sessionCookieName],
parseAuthUserSeqNum(req.headers[AuthService.authUserSeqHeaderName])
);
if (session) {
res.cookie(AuthService.sessionCookieName, session.id, {
expires: session.expiresAt ?? void 0, // expiredAt is `string | null`
...this.auth.cookieOptions,
});
} else {
res.clearCookie(AuthService.sessionCookieName);
}
if (redirectUri) {
return this.url.safeRedirect(res, redirectUri);
} else {
return res.send(null);
}
}
@Public()
@Get('/magic-link')
async magicLinkSignIn(
@Req() req: Request,
@Res() res: Response,
@Query('token') token?: string,
@Query('email') email?: string,
@Query('redirect_uri') redirectUri = this.url.home
) {
if (!token || !email) {
throw new BadRequestException('Invalid Sign-in mail Token');
}
email = decodeURIComponent(email);
token = decodeURIComponent(token);
validators.assertValidEmail(email);
const valid = await this.token.verifyToken(TokenType.SignIn, token, {
credential: email,
});
if (!valid) {
throw new BadRequestException('Invalid Sign-in mail Token');
}
const user = await this.user.fulfillUser(email, {
emailVerifiedAt: new Date(),
registered: true,
});
await this.auth.setCookie(req, res, user);
return this.url.safeRedirect(res, redirectUri);
}
@Public()
@Get('/session')
async currentSessionUser(@CurrentUser() user?: CurrentUser) {
return {
user,
};
}
@Public()
@Get('/sessions')
async currentSessionUsers(@Req() req: Request) {
const token = req.cookies[AuthService.sessionCookieName];
if (!token) {
return {
users: [],
};
}
return {
users: await this.auth.getUserList(token),
};
}
@Public()
@Get('/challenge')
async challenge() {
// TODO: impl in following PR
return {
challenge: randomUUID(),
resource: randomUUID(),
};
}
}

View File

@@ -1,55 +0,0 @@
import type { ExecutionContext } from '@nestjs/common';
import { createParamDecorator } from '@nestjs/common';
import { User } from '@prisma/client';
import { getRequestResponseFromContext } from '../../fundamentals';
function getUserFromContext(context: ExecutionContext) {
return getRequestResponseFromContext(context).req.user;
}
/**
* Used to fetch current user from the request context.
*
* > The user may be undefined if authorization token or session cookie is not provided.
*
* @example
*
* ```typescript
* // Graphql Query
* \@Query(() => UserType)
* user(@CurrentUser() user: CurrentUser) {
* return user;
* }
* ```
*
* ```typescript
* // HTTP Controller
* \@Get('/user')
* user(@CurrentUser() user: CurrentUser) {
* return user;
* }
* ```
*
* ```typescript
* // for public apis
* \@Public()
* \@Get('/session')
* session(@currentUser() user?: CurrentUser) {
* return user
* }
* ```
*/
// interface and variable don't conflict
// eslint-disable-next-line no-redeclare
export const CurrentUser = createParamDecorator(
(_: unknown, context: ExecutionContext) => {
return getUserFromContext(context);
}
);
export interface CurrentUser
extends Pick<User, 'id' | 'email' | 'avatarUrl' | 'name'> {
hasPassword: boolean | null;
emailVerified: boolean;
}

View File

@@ -1,100 +0,0 @@
import type {
CanActivate,
ExecutionContext,
OnModuleInit,
} from '@nestjs/common';
import {
Injectable,
SetMetadata,
UnauthorizedException,
UseGuards,
} from '@nestjs/common';
import { ModuleRef, Reflector } from '@nestjs/core';
import { getRequestResponseFromContext } from '../../fundamentals';
import { AuthService, parseAuthUserSeqNum } from './service';
function extractTokenFromHeader(authorization: string) {
if (!/^Bearer\s/i.test(authorization)) {
return;
}
return authorization.substring(7);
}
@Injectable()
export class AuthGuard implements CanActivate, OnModuleInit {
private auth!: AuthService;
constructor(
private readonly ref: ModuleRef,
private readonly reflector: Reflector
) {}
onModuleInit() {
this.auth = this.ref.get(AuthService, { strict: false });
}
async canActivate(context: ExecutionContext) {
const { req } = getRequestResponseFromContext(context);
// check cookie
let sessionToken: string | undefined =
req.cookies[AuthService.sessionCookieName];
if (!sessionToken && req.headers.authorization) {
sessionToken = extractTokenFromHeader(req.headers.authorization);
}
if (sessionToken) {
const userSeq = parseAuthUserSeqNum(
req.headers[AuthService.authUserSeqHeaderName]
);
const user = await this.auth.getUser(sessionToken, userSeq);
if (user) {
req.user = user;
}
}
// api is public
const isPublic = this.reflector.get<boolean>(
'isPublic',
context.getHandler()
);
if (isPublic) {
return true;
}
if (!req.user) {
throw new UnauthorizedException('You are not signed in.');
}
return true;
}
}
/**
* This guard is used to protect routes/queries/mutations that require a user to be logged in.
*
* The `@CurrentUser()` parameter decorator used in a `Auth` guarded queries would always give us the user because the `Auth` guard will
* fast throw if user is not logged in.
*
* @example
*
* ```typescript
* \@Auth()
* \@Query(() => UserType)
* user(@CurrentUser() user: CurrentUser) {
* return user;
* }
* ```
*/
export const Auth = () => {
return UseGuards(AuthGuard);
};
// api is public accessible
export const Public = () => SetMetadata('isPublic', true);

View File

@@ -1,21 +0,0 @@
import { Module } from '@nestjs/common';
import { FeatureModule } from '../features';
import { UserModule } from '../user';
import { AuthController } from './controller';
import { AuthResolver } from './resolver';
import { AuthService } from './service';
import { TokenService, TokenType } from './token';
@Module({
imports: [FeatureModule, UserModule],
providers: [AuthService, AuthResolver, TokenService],
exports: [AuthService],
controllers: [AuthController],
})
export class AuthModule {}
export * from './guard';
export { ClientTokenType } from './resolver';
export { AuthService, TokenService, TokenType };
export * from './current-user';

View File

@@ -1,397 +0,0 @@
import {
BadRequestException,
ForbiddenException,
UseGuards,
} from '@nestjs/common';
import {
Args,
Context,
Field,
Mutation,
ObjectType,
Parent,
Query,
ResolveField,
Resolver,
} from '@nestjs/graphql';
import type { Request, Response } from 'express';
import { CloudThrottlerGuard, Config, Throttle } from '../../fundamentals';
import { UserService } from '../user';
import { UserType } from '../user/types';
import { validators } from '../utils/validators';
import { CurrentUser } from './current-user';
import { Public } from './guard';
import { AuthService } from './service';
import { TokenService, TokenType } from './token';
@ObjectType('tokenType')
export class ClientTokenType {
@Field()
token!: string;
@Field()
refresh!: string;
@Field({ nullable: true })
sessionToken?: string;
}
/**
* Auth resolver
* Token rate limit: 20 req/m
* Sign up/in rate limit: 10 req/m
* Other rate limit: 5 req/m
*/
@UseGuards(CloudThrottlerGuard)
@Resolver(() => UserType)
export class AuthResolver {
constructor(
private readonly config: Config,
private readonly auth: AuthService,
private readonly user: UserService,
private readonly token: TokenService
) {}
@Throttle({
default: {
limit: 10,
ttl: 60,
},
})
@Public()
@Query(() => UserType, {
name: 'currentUser',
description: 'Get current user',
nullable: true,
})
currentUser(@CurrentUser() user?: CurrentUser): UserType | undefined {
return user;
}
@Throttle({
default: {
limit: 20,
ttl: 60,
},
})
@ResolveField(() => ClientTokenType, {
name: 'token',
deprecationReason: 'use [/api/auth/authorize]',
})
async clientToken(
@CurrentUser() currentUser: CurrentUser,
@Parent() user: UserType
): Promise<ClientTokenType> {
if (user.id !== currentUser.id) {
throw new ForbiddenException('Invalid user');
}
const session = await this.auth.createUserSession(
user,
undefined,
this.config.auth.accessToken.ttl
);
return {
sessionToken: session.sessionId,
token: session.sessionId,
refresh: '',
};
}
@Public()
@Throttle({
default: {
limit: 10,
ttl: 60,
},
})
@Mutation(() => UserType)
async signUp(
@Context() ctx: { req: Request; res: Response },
@Args('name') name: string,
@Args('email') email: string,
@Args('password') password: string
) {
validators.assertValidCredential({ email, password });
const user = await this.auth.signUp(name, email, password);
await this.auth.setCookie(ctx.req, ctx.res, user);
ctx.req.user = user;
return user;
}
@Public()
@Throttle({
default: {
limit: 10,
ttl: 60,
},
})
@Mutation(() => UserType)
async signIn(
@Context() ctx: { req: Request; res: Response },
@Args('email') email: string,
@Args('password') password: string
) {
validators.assertValidEmail(email);
const user = await this.auth.signIn(email, password);
await this.auth.setCookie(ctx.req, ctx.res, user);
ctx.req.user = user;
return user;
}
@Throttle({
default: {
limit: 5,
ttl: 60,
},
})
@Mutation(() => UserType)
async changePassword(
@CurrentUser() user: CurrentUser,
@Args('token') token: string,
@Args('newPassword') newPassword: string
) {
validators.assertValidPassword(newPassword);
// NOTE: Set & Change password are using the same token type.
const valid = await this.token.verifyToken(
TokenType.ChangePassword,
token,
{
credential: user.id,
}
);
if (!valid) {
throw new ForbiddenException('Invalid token');
}
await this.auth.changePassword(user.id, newPassword);
return user;
}
@Throttle({
default: {
limit: 5,
ttl: 60,
},
})
@Mutation(() => UserType)
async changeEmail(
@CurrentUser() user: CurrentUser,
@Args('token') token: string,
@Args('email') email: string
) {
validators.assertValidEmail(email);
// @see [sendChangeEmail]
const valid = await this.token.verifyToken(TokenType.VerifyEmail, token, {
credential: user.id,
});
if (!valid) {
throw new ForbiddenException('Invalid token');
}
email = decodeURIComponent(email);
await this.auth.changeEmail(user.id, email);
await this.auth.sendNotificationChangeEmail(email);
return user;
}
@Throttle({
default: {
limit: 5,
ttl: 60,
},
})
@Mutation(() => Boolean)
async sendChangePasswordEmail(
@CurrentUser() user: CurrentUser,
@Args('callbackUrl') callbackUrl: string,
// @deprecated
@Args('email', { nullable: true }) _email?: string
) {
if (!user.emailVerified) {
throw new ForbiddenException('Please verify your email first.');
}
const token = await this.token.createToken(
TokenType.ChangePassword,
user.id
);
const url = new URL(callbackUrl, this.config.baseUrl);
url.searchParams.set('token', token);
const res = await this.auth.sendChangePasswordEmail(
user.email,
url.toString()
);
return !res.rejected.length;
}
@Throttle({
default: {
limit: 5,
ttl: 60,
},
})
@Mutation(() => Boolean)
async sendSetPasswordEmail(
@CurrentUser() user: CurrentUser,
@Args('callbackUrl') callbackUrl: string,
@Args('email', { nullable: true }) _email?: string
) {
if (!user.emailVerified) {
throw new ForbiddenException('Please verify your email first.');
}
const token = await this.token.createToken(
TokenType.ChangePassword,
user.id
);
const url = new URL(callbackUrl, this.config.baseUrl);
url.searchParams.set('token', token);
const res = await this.auth.sendSetPasswordEmail(
user.email,
url.toString()
);
return !res.rejected.length;
}
// The change email step is:
// 1. send email to primitive email `sendChangeEmail`
// 2. user open change email page from email
// 3. send verify email to new email `sendVerifyChangeEmail`
// 4. user open confirm email page from new email
// 5. user click confirm button
// 6. send notification email
@Throttle({
default: {
limit: 5,
ttl: 60,
},
})
@Mutation(() => Boolean)
async sendChangeEmail(
@CurrentUser() user: CurrentUser,
@Args('callbackUrl') callbackUrl: string,
// @deprecated
@Args('email', { nullable: true }) _email?: string
) {
if (!user.emailVerified) {
throw new ForbiddenException('Please verify your email first.');
}
const token = await this.token.createToken(TokenType.ChangeEmail, user.id);
const url = new URL(callbackUrl, this.config.baseUrl);
url.searchParams.set('token', token);
const res = await this.auth.sendChangeEmail(user.email, url.toString());
return !res.rejected.length;
}
@Throttle({
default: {
limit: 5,
ttl: 60,
},
})
@Mutation(() => Boolean)
async sendVerifyChangeEmail(
@CurrentUser() user: CurrentUser,
@Args('token') token: string,
@Args('email') email: string,
@Args('callbackUrl') callbackUrl: string
) {
validators.assertValidEmail(email);
const valid = await this.token.verifyToken(TokenType.ChangeEmail, token, {
credential: user.id,
});
if (!valid) {
throw new ForbiddenException('Invalid token');
}
const hasRegistered = await this.user.findUserByEmail(email);
if (hasRegistered) {
if (hasRegistered.id !== user.id) {
throw new BadRequestException(`The email provided has been taken.`);
} else {
throw new BadRequestException(
`The email provided is the same as the current email.`
);
}
}
const verifyEmailToken = await this.token.createToken(
TokenType.VerifyEmail,
user.id
);
const url = new URL(callbackUrl, this.config.baseUrl);
url.searchParams.set('token', verifyEmailToken);
url.searchParams.set('email', email);
const res = await this.auth.sendVerifyChangeEmail(email, url.toString());
return !res.rejected.length;
}
@Throttle({
default: {
limit: 5,
ttl: 60,
},
})
@Mutation(() => Boolean)
async sendVerifyEmail(
@CurrentUser() user: CurrentUser,
@Args('callbackUrl') callbackUrl: string
) {
const token = await this.token.createToken(TokenType.VerifyEmail, user.id);
const url = new URL(callbackUrl, this.config.baseUrl);
url.searchParams.set('token', token);
const res = await this.auth.sendVerifyEmail(user.email, url.toString());
return !res.rejected.length;
}
@Throttle({
default: {
limit: 5,
ttl: 60,
},
})
@Mutation(() => Boolean)
async verifyEmail(
@CurrentUser() user: CurrentUser,
@Args('token') token: string
) {
if (!token) {
throw new BadRequestException('Invalid token');
}
const valid = await this.token.verifyToken(TokenType.VerifyEmail, token, {
credential: user.id,
});
if (!valid) {
throw new ForbiddenException('Invalid token');
}
const { emailVerifiedAt } = await this.auth.setEmailVerified(user.id);
return emailVerifiedAt !== null;
}
}

View File

@@ -1,394 +0,0 @@
import {
BadRequestException,
Injectable,
NotAcceptableException,
OnApplicationBootstrap,
} from '@nestjs/common';
import type { User } from '@prisma/client';
import { PrismaClient } from '@prisma/client';
import type { CookieOptions, Request, Response } from 'express';
import { assign, omit } from 'lodash-es';
import { Config, CryptoHelper, MailService } from '../../fundamentals';
import { FeatureManagementService } from '../features/management';
import { UserService } from '../user/service';
import type { CurrentUser } from './current-user';
export function parseAuthUserSeqNum(value: any) {
let seq: number = 0;
switch (typeof value) {
case 'number': {
seq = value;
break;
}
case 'string': {
const result = value.match(/^([\d{0, 10}])$/);
if (result?.[1]) {
seq = Number(result[1]);
}
break;
}
default: {
seq = 0;
}
}
return Math.max(0, seq);
}
export function sessionUser(
user: Pick<
User,
'id' | 'email' | 'avatarUrl' | 'name' | 'emailVerifiedAt'
> & { password?: string | null }
): CurrentUser {
return assign(
omit(user, 'password', 'registered', 'emailVerifiedAt', 'createdAt'),
{
hasPassword: user.password !== null,
emailVerified: user.emailVerifiedAt !== null,
}
);
}
@Injectable()
export class AuthService implements OnApplicationBootstrap {
readonly cookieOptions: CookieOptions = {
sameSite: 'lax',
httpOnly: true,
path: '/',
secure: this.config.https,
};
static readonly sessionCookieName = 'affine_session';
static readonly authUserSeqHeaderName = 'x-auth-user';
constructor(
private readonly config: Config,
private readonly db: PrismaClient,
private readonly mailer: MailService,
private readonly feature: FeatureManagementService,
private readonly user: UserService,
private readonly crypto: CryptoHelper
) {}
async onApplicationBootstrap() {
if (this.config.node.dev) {
await this.signUp('Dev User', 'dev@affine.pro', 'dev').catch(() => {
// ignore
});
}
}
canSignIn(email: string) {
return this.feature.canEarlyAccess(email);
}
async signUp(
name: string,
email: string,
password: string
): Promise<CurrentUser> {
const user = await this.user.findUserByEmail(email);
if (user) {
throw new BadRequestException('Email was taken');
}
const hashedPassword = await this.crypto.encryptPassword(password);
return this.user
.createUser({
name,
email,
password: hashedPassword,
})
.then(sessionUser);
}
async signIn(email: string, password: string) {
const user = await this.user.findUserWithHashedPasswordByEmail(email);
if (!user) {
throw new NotAcceptableException('Invalid sign in credentials');
}
if (!user.password) {
throw new NotAcceptableException(
'User Password is not set. Should login through email link.'
);
}
const passwordMatches = await this.crypto.verifyPassword(
password,
user.password
);
if (!passwordMatches) {
throw new NotAcceptableException('Invalid sign in credentials');
}
return sessionUser(user);
}
async getUser(token: string, seq = 0): Promise<CurrentUser | null> {
const session = await this.getSession(token);
// no such session
if (!session) {
return null;
}
const userSession = session.userSessions.at(seq);
// no such user session
if (!userSession) {
return null;
}
// user session expired
if (userSession.expiresAt && userSession.expiresAt <= new Date()) {
return null;
}
const user = await this.db.user.findUnique({
where: { id: userSession.userId },
});
if (!user) {
return null;
}
return sessionUser(user);
}
async getUserList(token: string) {
const session = await this.getSession(token);
if (!session || !session.userSessions.length) {
return [];
}
const users = await this.db.user.findMany({
where: {
id: {
in: session.userSessions.map(({ userId }) => userId),
},
},
});
// TODO(@forehalo): need to separate expired session, same for [getUser]
// Session
// | { user: LimitedUser { email, avatarUrl }, expired: true }
// | { user: User, expired: false }
return session.userSessions
.map(userSession => {
// keep users in the same order as userSessions
const user = users.find(({ id }) => id === userSession.userId);
if (!user) {
return null;
}
return sessionUser(user);
})
.filter(Boolean) as CurrentUser[];
}
async signOut(token: string, seq = 0) {
const session = await this.getSession(token);
if (session) {
// overflow the logged in user
if (session.userSessions.length <= seq) {
return session;
}
await this.db.userSession.deleteMany({
where: { id: session.userSessions[seq].id },
});
// no more user session active, delete the whole session
if (session.userSessions.length === 1) {
await this.db.session.delete({ where: { id: session.id } });
return null;
}
return session;
}
return null;
}
async getSession(token: string) {
if (!token) {
return null;
}
return this.db.$transaction(async tx => {
const session = await tx.session.findUnique({
where: {
id: token,
},
include: {
userSessions: {
orderBy: {
createdAt: 'asc',
},
},
},
});
if (!session) {
return null;
}
if (session.expiresAt && session.expiresAt <= new Date()) {
await tx.session.delete({
where: {
id: session.id,
},
});
return null;
}
return session;
});
}
async createUserSession(
user: { id: string },
existingSession?: string,
ttl = this.config.auth.session.ttl
) {
const session = existingSession
? await this.getSession(existingSession)
: null;
const expiresAt = new Date(Date.now() + ttl * 1000);
if (session) {
return this.db.userSession.upsert({
where: {
sessionId_userId: {
sessionId: session.id,
userId: user.id,
},
},
update: {
expiresAt,
},
create: {
sessionId: session.id,
userId: user.id,
expiresAt,
},
});
} else {
return this.db.userSession.create({
data: {
expiresAt,
session: {
create: {},
},
user: {
connect: {
id: user.id,
},
},
},
});
}
}
async setCookie(_req: Request, res: Response, user: { id: string }) {
const session = await this.createUserSession(
user
// TODO(@forehalo): enable multi user session
// req.cookies[AuthService.sessionCookieName]
);
res.cookie(AuthService.sessionCookieName, session.sessionId, {
expires: session.expiresAt ?? void 0,
...this.cookieOptions,
});
}
async changePassword(id: string, newPassword: string): Promise<User> {
const user = await this.user.findUserById(id);
if (!user) {
throw new BadRequestException('Invalid email');
}
const hashedPassword = await this.crypto.encryptPassword(newPassword);
return this.db.user.update({
where: {
id: user.id,
},
data: {
password: hashedPassword,
},
});
}
async changeEmail(id: string, newEmail: string): Promise<User> {
const user = await this.user.findUserById(id);
if (!user) {
throw new BadRequestException('Invalid email');
}
return this.db.user.update({
where: {
id,
},
data: {
email: newEmail,
emailVerifiedAt: new Date(),
},
});
}
async setEmailVerified(id: string) {
return await this.db.user.update({
where: {
id,
},
data: {
emailVerifiedAt: new Date(),
},
select: {
emailVerifiedAt: true,
},
});
}
async sendChangePasswordEmail(email: string, callbackUrl: string) {
return this.mailer.sendChangePasswordEmail(email, callbackUrl);
}
async sendSetPasswordEmail(email: string, callbackUrl: string) {
return this.mailer.sendSetPasswordEmail(email, callbackUrl);
}
async sendChangeEmail(email: string, callbackUrl: string) {
return this.mailer.sendChangeEmail(email, callbackUrl);
}
async sendVerifyChangeEmail(email: string, callbackUrl: string) {
return this.mailer.sendVerifyChangeEmail(email, callbackUrl);
}
async sendVerifyEmail(email: string, callbackUrl: string) {
return this.mailer.sendVerifyEmail(email, callbackUrl);
}
async sendNotificationChangeEmail(email: string) {
return this.mailer.sendNotificationChangeEmail(email);
}
async sendSignInEmail(email: string, link: string, signUp: boolean) {
return signUp
? await this.mailer.sendSignUpMail(link.toString(), {
to: email,
})
: await this.mailer.sendSignInMail(link.toString(), {
to: email,
});
}
}

View File

@@ -1,96 +0,0 @@
import { randomUUID } from 'node:crypto';
import { Injectable } from '@nestjs/common';
import { Cron, CronExpression } from '@nestjs/schedule';
import { PrismaClient } from '@prisma/client';
import { CryptoHelper } from '../../fundamentals/helpers';
export enum TokenType {
SignIn,
VerifyEmail,
ChangeEmail,
ChangePassword,
Challenge,
}
@Injectable()
export class TokenService {
constructor(
private readonly db: PrismaClient,
private readonly crypto: CryptoHelper
) {}
async createToken(
type: TokenType,
credential?: string,
ttlInSec: number = 30 * 60
) {
const plaintextToken = randomUUID();
const { token } = await this.db.verificationToken.create({
data: {
type,
token: plaintextToken,
credential,
expiresAt: new Date(Date.now() + ttlInSec * 1000),
},
});
return this.crypto.encrypt(token);
}
async verifyToken(
type: TokenType,
token: string,
{
credential,
keep,
}: {
credential?: string;
keep?: boolean;
} = {}
) {
token = this.crypto.decrypt(token);
const record = await this.db.verificationToken.findUnique({
where: {
type_token: {
token,
type,
},
},
});
if (!record) {
return null;
}
const expired = record.expiresAt <= new Date();
const valid =
!expired && (!record.credential || record.credential === credential);
if ((expired || valid) && !keep) {
await this.db.verificationToken.delete({
where: {
type_token: {
token,
type,
},
},
});
}
return valid ? record : null;
}
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT)
cleanExpiredTokens() {
return this.db.verificationToken.deleteMany({
where: {
expiresAt: {
lte: new Date(),
},
},
});
}
}

View File

@@ -1,97 +0,0 @@
import { Module } from '@nestjs/common';
import { Field, ObjectType, Query, registerEnumType } from '@nestjs/graphql';
import { DeploymentType } from '../fundamentals';
import { Public } from './auth';
export enum ServerFeature {
Payment = 'payment',
OAuth = 'oauth',
}
registerEnumType(ServerFeature, {
name: 'ServerFeature',
});
registerEnumType(DeploymentType, {
name: 'ServerDeploymentType',
});
const ENABLED_FEATURES: Set<ServerFeature> = new Set();
export function ADD_ENABLED_FEATURES(feature: ServerFeature) {
ENABLED_FEATURES.add(feature);
}
@ObjectType()
export class PasswordLimitsType {
@Field()
minLength!: number;
@Field()
maxLength!: number;
}
@ObjectType()
export class CredentialsRequirementType {
@Field()
password!: PasswordLimitsType;
}
@ObjectType()
export class ServerConfigType {
@Field({
description:
'server identical name could be shown as badge on user interface',
})
name!: string;
@Field({ description: 'server version' })
version!: string;
@Field({ description: 'server base url' })
baseUrl!: string;
@Field(() => DeploymentType, { description: 'server type' })
type!: DeploymentType;
/**
* @deprecated
*/
@Field({ description: 'server flavor', deprecationReason: 'use `features`' })
flavor!: string;
@Field(() => [ServerFeature], { description: 'enabled server features' })
features!: ServerFeature[];
@Field(() => CredentialsRequirementType, {
description: 'credentials requirement',
})
credentialsRequirement!: CredentialsRequirementType;
}
export class ServerConfigResolver {
@Public()
@Query(() => ServerConfigType, {
description: 'server config',
})
serverConfig(): ServerConfigType {
return {
name: AFFiNE.serverName,
version: AFFiNE.version,
baseUrl: AFFiNE.baseUrl,
type: AFFiNE.type,
// BACKWARD COMPATIBILITY
// the old flavors contains `selfhosted` but it actually not flavor but deployment type
// this field should be removed after frontend feature flags implemented
flavor: AFFiNE.type,
features: Array.from(ENABLED_FEATURES),
credentialsRequirement: {
password: AFFiNE.auth.password,
},
};
}
}
@Module({
providers: [ServerConfigResolver],
})
export class ServerConfigModule {}

View File

@@ -1,8 +0,0 @@
import { z } from 'zod';
import { FeatureType } from './common';
export const featureUnlimitedWorkspace = z.object({
feature: z.literal(FeatureType.UnlimitedWorkspace),
configs: z.object({}),
});

View File

@@ -1,106 +0,0 @@
import { FeatureKind } from '../features/types';
import { OneDay, OneGB, OneMB } from './constant';
import { Quota, QuotaType } from './types';
export const Quotas: Quota[] = [
{
feature: QuotaType.FreePlanV1,
type: FeatureKind.Quota,
version: 1,
configs: {
// quota name
name: 'Free',
// single blob limit 10MB
blobLimit: 10 * OneMB,
// total blob limit 10GB
storageQuota: 10 * OneGB,
// history period of validity 7 days
historyPeriod: 7 * OneDay,
// member limit 3
memberLimit: 3,
},
},
{
feature: QuotaType.ProPlanV1,
type: FeatureKind.Quota,
version: 1,
configs: {
// quota name
name: 'Pro',
// single blob limit 100MB
blobLimit: 100 * OneMB,
// total blob limit 100GB
storageQuota: 100 * OneGB,
// history period of validity 30 days
historyPeriod: 30 * OneDay,
// member limit 10
memberLimit: 10,
},
},
{
feature: QuotaType.RestrictedPlanV1,
type: FeatureKind.Quota,
version: 1,
configs: {
// quota name
name: 'Restricted',
// single blob limit 10MB
blobLimit: OneMB,
// total blob limit 1GB
storageQuota: 10 * OneMB,
// history period of validity 30 days
historyPeriod: 30 * OneDay,
// member limit 10
memberLimit: 10,
},
},
{
feature: QuotaType.FreePlanV1,
type: FeatureKind.Quota,
version: 2,
configs: {
// quota name
name: 'Free',
// single blob limit 10MB
blobLimit: 100 * OneMB,
// total blob limit 10GB
storageQuota: 10 * OneGB,
// history period of validity 7 days
historyPeriod: 7 * OneDay,
// member limit 3
memberLimit: 3,
},
},
{
feature: QuotaType.FreePlanV1,
type: FeatureKind.Quota,
version: 3,
configs: {
// quota name
name: 'Free',
// single blob limit 10MB
blobLimit: 10 * OneMB,
// server limit will larger then client to handle a edge case:
// when a user downgrades from pro to free, he can still continue
// to upload previously added files that exceed the free limit
// NOTE: this is a product decision, may change in future
businessBlobLimit: 100 * OneMB,
// total blob limit 10GB
storageQuota: 10 * OneGB,
// history period of validity 7 days
historyPeriod: 7 * OneDay,
// member limit 3
memberLimit: 3,
},
},
];
export const Quota_FreePlanV1_1 = {
feature: Quotas[4].feature,
version: Quotas[4].version,
};
export const Quota_ProPlanV1 = {
feature: Quotas[1].feature,
version: Quotas[1].version,
};

View File

@@ -1,125 +0,0 @@
import { Injectable, Logger, NotFoundException } from '@nestjs/common';
import { FeatureService, FeatureType } from '../features';
import { WorkspaceBlobStorage } from '../storage';
import { PermissionService } from '../workspaces/permission';
import { OneGB } from './constant';
import { QuotaService } from './service';
import { formatSize, QuotaQueryType } from './types';
type QuotaBusinessType = QuotaQueryType & { businessBlobLimit: number };
@Injectable()
export class QuotaManagementService {
protected logger = new Logger(QuotaManagementService.name);
constructor(
private readonly feature: FeatureService,
private readonly quota: QuotaService,
private readonly permissions: PermissionService,
private readonly storage: WorkspaceBlobStorage
) {}
async getUserQuota(userId: string) {
const quota = await this.quota.getUserQuota(userId);
return {
name: quota.feature.name,
reason: quota.reason,
createAt: quota.createdAt,
expiredAt: quota.expiredAt,
blobLimit: quota.feature.blobLimit,
businessBlobLimit: quota.feature.businessBlobLimit,
storageQuota: quota.feature.storageQuota,
historyPeriod: quota.feature.historyPeriod,
memberLimit: quota.feature.memberLimit,
};
}
// TODO: lazy calc, need to be optimized with cache
async getUserUsage(userId: string) {
const workspaces = await this.permissions.getOwnedWorkspaces(userId);
const sizes = await Promise.allSettled(
workspaces.map(workspace => this.storage.totalSize(workspace))
);
return sizes.reduce((total, size) => {
if (size.status === 'fulfilled') {
if (Number.isSafeInteger(size.value)) {
return total + size.value;
} else {
this.logger.error(`Workspace size is invalid: ${size.value}`);
}
} else {
this.logger.error(`Failed to get workspace size: ${size.reason}`);
}
return total;
}, 0);
}
// get workspace's owner quota and total size of used
// quota was apply to owner's account
async getWorkspaceUsage(workspaceId: string): Promise<QuotaBusinessType> {
const { user: owner } =
await this.permissions.getWorkspaceOwner(workspaceId);
if (!owner) throw new NotFoundException('Workspace owner not found');
const {
feature: {
name,
blobLimit,
businessBlobLimit,
historyPeriod,
memberLimit,
storageQuota,
humanReadable,
},
} = await this.quota.getUserQuota(owner.id);
// get all workspaces size of owner used
const usedSize = await this.getUserUsage(owner.id);
const quota = {
name,
blobLimit,
businessBlobLimit,
historyPeriod,
memberLimit,
storageQuota,
humanReadable,
usedSize,
};
// relax restrictions if workspace has unlimited feature
// todo(@darkskygit): need a mechanism to allow feature as a middleware to edit quota
const unlimited = await this.feature.hasWorkspaceFeature(
workspaceId,
FeatureType.UnlimitedWorkspace
);
if (unlimited) {
return this.mergeUnlimitedQuota(quota);
}
return quota;
}
private mergeUnlimitedQuota(orig: QuotaBusinessType) {
return {
...orig,
storageQuota: 1000 * OneGB,
memberLimit: 1000,
humanReadable: {
...orig.humanReadable,
name: 'Unlimited',
storageQuota: formatSize(1000 * OneGB),
memberLimit: '1000',
},
};
}
async checkBlobQuota(workspaceId: string, size: number) {
const { storageQuota, usedSize } =
await this.getWorkspaceUsage(workspaceId);
return storageQuota - (size + usedSize);
}
}

View File

@@ -1,110 +0,0 @@
import { Field, ObjectType } from '@nestjs/graphql';
import { SafeIntResolver } from 'graphql-scalars';
import { z } from 'zod';
import { commonFeatureSchema, FeatureKind } from '../features/types';
import { ByteUnit, OneDay, OneKB } from './constant';
/// ======== quota define ========
/**
* naming rule:
* we append Vx to the end of the feature name to indicate the version of the feature
* x is a number, start from 1, this number will be change only at the time we change the schema of config
* for example, we change the value of `blobLimit` from 10MB to 100MB, then we will only change `version` field from 1 to 2
* but if we remove the `blobLimit` field or rename it, then we will change the Vx to Vx+1
*/
export enum QuotaType {
FreePlanV1 = 'free_plan_v1',
ProPlanV1 = 'pro_plan_v1',
// only for test, smaller quota
RestrictedPlanV1 = 'restricted_plan_v1',
}
const quotaPlan = z.object({
feature: z.enum([
QuotaType.FreePlanV1,
QuotaType.ProPlanV1,
QuotaType.RestrictedPlanV1,
]),
configs: z.object({
name: z.string(),
blobLimit: z.number().positive().int(),
storageQuota: z.number().positive().int(),
historyPeriod: z.number().positive().int(),
memberLimit: z.number().positive().int(),
businessBlobLimit: z.number().positive().int().nullish(),
}),
});
/// ======== schema infer ========
export const QuotaSchema = commonFeatureSchema
.extend({
type: z.literal(FeatureKind.Quota),
})
.and(z.discriminatedUnion('feature', [quotaPlan]));
export type Quota = z.infer<typeof QuotaSchema>;
/// ======== query types ========
@ObjectType()
export class HumanReadableQuotaType {
@Field(() => String)
name!: string;
@Field(() => String)
blobLimit!: string;
@Field(() => String)
storageQuota!: string;
@Field(() => String)
historyPeriod!: string;
@Field(() => String)
memberLimit!: string;
}
@ObjectType()
export class QuotaQueryType {
@Field(() => String)
name!: string;
@Field(() => SafeIntResolver)
blobLimit!: number;
@Field(() => SafeIntResolver)
historyPeriod!: number;
@Field(() => SafeIntResolver)
memberLimit!: number;
@Field(() => SafeIntResolver)
storageQuota!: number;
@Field(() => HumanReadableQuotaType)
humanReadable!: HumanReadableQuotaType;
@Field(() => SafeIntResolver)
usedSize!: number;
}
/// ======== utils ========
export function formatSize(bytes: number, decimals: number = 2): string {
if (bytes === 0) return '0 B';
const dm = decimals < 0 ? 0 : decimals;
const i = Math.floor(Math.log(bytes) / Math.log(OneKB));
return (
parseFloat((bytes / Math.pow(OneKB, i)).toFixed(dm)) + ' ' + ByteUnit[i]
);
}
export function formatDate(ms: number): string {
return `${(ms / OneDay).toFixed(0)} days`;
}

View File

@@ -1,91 +0,0 @@
import { Injectable } from '@nestjs/common';
import {
type BlobInputType,
Cache,
EventEmitter,
type EventPayload,
type ListObjectsMetadata,
OnEvent,
type StorageProvider,
StorageProviderFactory,
} from '../../../fundamentals';
@Injectable()
export class WorkspaceBlobStorage {
public readonly provider: StorageProvider;
constructor(
private readonly event: EventEmitter,
private readonly storageFactory: StorageProviderFactory,
private readonly cache: Cache
) {
this.provider = this.storageFactory.create('blob');
}
async put(workspaceId: string, key: string, blob: BlobInputType) {
await this.provider.put(`${workspaceId}/${key}`, blob);
await this.cache.delete(`blob-list:${workspaceId}`);
}
async get(workspaceId: string, key: string) {
return this.provider.get(`${workspaceId}/${key}`);
}
async list(workspaceId: string) {
const cachedList = await this.cache.list<ListObjectsMetadata>(
`blob-list:${workspaceId}`,
0,
-1
);
if (cachedList.length > 0) {
return cachedList;
}
const blobs = await this.provider.list(workspaceId + '/');
blobs.forEach(item => {
// trim workspace prefix
item.key = item.key.slice(workspaceId.length + 1);
});
await this.cache.pushBack(`blob-list:${workspaceId}`, ...blobs);
return blobs;
}
/**
* we won't really delete the blobs until the doc blobs manager is implemented sounded
*/
async delete(_workspaceId: string, _key: string) {
// return this.provider.delete(`${workspaceId}/${key}`);
}
async totalSize(workspaceId: string) {
const blobs = await this.list(workspaceId);
// how could we ignore the ones get soft-deleted?
return blobs.reduce((acc, item) => acc + item.size, 0);
}
@OnEvent('workspace.deleted')
async onWorkspaceDeleted(workspaceId: EventPayload<'workspace.deleted'>) {
const blobs = await this.list(workspaceId);
// to reduce cpu time holding
blobs.forEach(blob => {
this.event.emit('workspace.blob.deleted', {
workspaceId: workspaceId,
name: blob.key,
});
});
}
@OnEvent('workspace.blob.deleted')
async onDeleteWorkspaceBlob({
workspaceId,
name,
}: EventPayload<'workspace.blob.deleted'>) {
await this.delete(workspaceId, name);
}
}

View File

@@ -1,368 +0,0 @@
import { applyDecorators, Logger } from '@nestjs/common';
import {
ConnectedSocket,
MessageBody,
OnGatewayConnection,
OnGatewayDisconnect,
SubscribeMessage as RawSubscribeMessage,
WebSocketGateway,
WebSocketServer,
} from '@nestjs/websockets';
import { Server, Socket } from 'socket.io';
import { encodeStateAsUpdate, encodeStateVector } from 'yjs';
import { CallTimer, metrics } from '../../../fundamentals';
import { Auth, CurrentUser } from '../../auth';
import { DocManager } from '../../doc';
import { DocID } from '../../utils/doc';
import { PermissionService } from '../../workspaces/permission';
import { Permission } from '../../workspaces/types';
import {
AccessDeniedError,
DocNotFoundError,
EventError,
EventErrorCode,
InternalError,
NotInWorkspaceError,
} from './error';
export const GatewayErrorWrapper = (): MethodDecorator => {
// @ts-expect-error allow
return (
_target,
_key,
desc: TypedPropertyDescriptor<(...args: any[]) => any>
) => {
const originalMethod = desc.value;
if (!originalMethod) {
return desc;
}
desc.value = async function (...args: any[]) {
try {
return await originalMethod.apply(this, args);
} catch (e) {
if (e instanceof EventError) {
return {
error: e,
};
} else {
metrics.socketio.counter('unhandled_errors').add(1);
new Logger('EventsGateway').error(e, (e as Error).stack);
return {
error: new InternalError(e as Error),
};
}
}
};
return desc;
};
};
const SubscribeMessage = (event: string) =>
applyDecorators(
GatewayErrorWrapper(),
CallTimer('socketio', 'event_duration', { event }),
RawSubscribeMessage(event)
);
type EventResponse<Data = any> =
| {
error: EventError;
}
| (Data extends never
? {
data?: never;
}
: {
data: Data;
});
function Sync(workspaceId: string): `${string}:sync` {
return `${workspaceId}:sync`;
}
function Awareness(workspaceId: string): `${string}:awareness` {
return `${workspaceId}:awareness`;
}
@WebSocketGateway({
cors: !AFFiNE.node.prod,
transports: ['websocket'],
// see: https://socket.io/docs/v4/server-options/#maxhttpbuffersize
maxHttpBufferSize: 1e8, // 100 MB
})
export class EventsGateway implements OnGatewayConnection, OnGatewayDisconnect {
protected logger = new Logger(EventsGateway.name);
private connectionCount = 0;
constructor(
private readonly docManager: DocManager,
private readonly permissions: PermissionService
) {}
@WebSocketServer()
server!: Server;
handleConnection() {
this.connectionCount++;
metrics.socketio.gauge('realtime_connections').record(this.connectionCount);
}
handleDisconnect() {
this.connectionCount--;
metrics.socketio.gauge('realtime_connections').record(this.connectionCount);
}
assertVersion(client: Socket, version?: string) {
if (
// @todo(@darkskygit): remove this flag after 0.12 goes stable
AFFiNE.featureFlags.syncClientVersionCheck &&
version !== AFFiNE.version
) {
client.emit('server-version-rejected', {
currentVersion: version,
requiredVersion: AFFiNE.version,
reason: `Client version${
version ? ` ${version}` : ''
} is outdated, please update to ${AFFiNE.version}`,
});
throw new EventError(
EventErrorCode.VERSION_REJECTED,
`Client version ${version} is outdated, please update to ${AFFiNE.version}`
);
}
}
async joinWorkspace(
client: Socket,
room: `${string}:${'sync' | 'awareness'}`
) {
await client.join(room);
}
async leaveWorkspace(
client: Socket,
room: `${string}:${'sync' | 'awareness'}`
) {
await client.leave(room);
}
assertInWorkspace(client: Socket, room: `${string}:${'sync' | 'awareness'}`) {
if (!client.rooms.has(room)) {
throw new NotInWorkspaceError(room);
}
}
async assertWorkspaceAccessible(
workspaceId: string,
userId: string,
permission: Permission = Permission.Read
) {
if (
!(await this.permissions.isWorkspaceMember(
workspaceId,
userId,
permission
))
) {
throw new AccessDeniedError(workspaceId);
}
}
@Auth()
@SubscribeMessage('client-handshake-sync')
async handleClientHandshakeSync(
@CurrentUser() user: CurrentUser,
@MessageBody('workspaceId') workspaceId: string,
@MessageBody('version') version: string | undefined,
@ConnectedSocket() client: Socket
): Promise<EventResponse<{ clientId: string }>> {
this.assertVersion(client, version);
await this.assertWorkspaceAccessible(
workspaceId,
user.id,
Permission.Write
);
await this.joinWorkspace(client, Sync(workspaceId));
return {
data: {
clientId: client.id,
},
};
}
@Auth()
@SubscribeMessage('client-handshake-awareness')
async handleClientHandshakeAwareness(
@CurrentUser() user: CurrentUser,
@MessageBody('workspaceId') workspaceId: string,
@MessageBody('version') version: string | undefined,
@ConnectedSocket() client: Socket
): Promise<EventResponse<{ clientId: string }>> {
this.assertVersion(client, version);
await this.assertWorkspaceAccessible(
workspaceId,
user.id,
Permission.Write
);
await this.joinWorkspace(client, Awareness(workspaceId));
return {
data: {
clientId: client.id,
},
};
}
@SubscribeMessage('client-leave-sync')
async handleLeaveSync(
@MessageBody() workspaceId: string,
@ConnectedSocket() client: Socket
): Promise<EventResponse> {
this.assertInWorkspace(client, Sync(workspaceId));
await this.leaveWorkspace(client, Sync(workspaceId));
return {};
}
@SubscribeMessage('client-leave-awareness')
async handleLeaveAwareness(
@MessageBody() workspaceId: string,
@ConnectedSocket() client: Socket
): Promise<EventResponse> {
this.assertInWorkspace(client, Awareness(workspaceId));
await this.leaveWorkspace(client, Awareness(workspaceId));
return {};
}
@SubscribeMessage('client-pre-sync')
async loadDocStats(
@ConnectedSocket() client: Socket,
@MessageBody()
{ workspaceId, timestamp }: { workspaceId: string; timestamp?: number }
): Promise<EventResponse<Record<string, number>>> {
this.assertInWorkspace(client, Sync(workspaceId));
const stats = await this.docManager.getDocTimestamps(
workspaceId,
timestamp
);
return {
data: stats,
};
}
@SubscribeMessage('client-update-v2')
async handleClientUpdateV2(
@MessageBody()
{
workspaceId,
guid,
updates,
}: {
workspaceId: string;
guid: string;
updates: string[];
},
@ConnectedSocket() client: Socket
): Promise<EventResponse<{ accepted: true; timestamp?: number }>> {
this.assertInWorkspace(client, Sync(workspaceId));
const docId = new DocID(guid, workspaceId);
const buffers = updates.map(update => Buffer.from(update, 'base64'));
const timestamp = await this.docManager.batchPush(
docId.workspace,
docId.guid,
buffers
);
client
.to(Sync(workspaceId))
.emit('server-updates', { workspaceId, guid, updates, timestamp });
return {
data: {
accepted: true,
timestamp,
},
};
}
@SubscribeMessage('doc-load-v2')
async loadDocV2(
@ConnectedSocket() client: Socket,
@MessageBody()
{
workspaceId,
guid,
stateVector,
}: {
workspaceId: string;
guid: string;
stateVector?: string;
}
): Promise<
EventResponse<{ missing: string; state?: string; timestamp: number }>
> {
this.assertInWorkspace(client, Sync(workspaceId));
const docId = new DocID(guid, workspaceId);
const res = await this.docManager.get(docId.workspace, docId.guid);
if (!res) {
return {
error: new DocNotFoundError(workspaceId, docId.guid),
};
}
const missing = Buffer.from(
encodeStateAsUpdate(
res.doc,
stateVector ? Buffer.from(stateVector, 'base64') : undefined
)
).toString('base64');
const state = Buffer.from(encodeStateVector(res.doc)).toString('base64');
return {
data: {
missing,
state,
timestamp: res.timestamp,
},
};
}
@SubscribeMessage('awareness-init')
async handleInitAwareness(
@MessageBody() workspaceId: string,
@ConnectedSocket() client: Socket
): Promise<EventResponse<{ clientId: string }>> {
this.assertInWorkspace(client, Awareness(workspaceId));
client.to(Awareness(workspaceId)).emit('new-client-awareness-init');
return {
data: {
clientId: client.id,
},
};
}
@SubscribeMessage('awareness-update')
async handleHelpGatheringAwareness(
@MessageBody()
{
workspaceId,
awarenessUpdate,
}: { workspaceId: string; awarenessUpdate: string },
@ConnectedSocket() client: Socket
): Promise<EventResponse> {
this.assertInWorkspace(client, Awareness(workspaceId));
client
.to(Awareness(workspaceId))
.emit('server-awareness-broadcast', { workspaceId, awarenessUpdate });
return {};
}
}

View File

@@ -1,133 +0,0 @@
import { BadRequestException, Injectable } from '@nestjs/common';
import { Prisma, PrismaClient } from '@prisma/client';
import { Quota_FreePlanV1_1 } from '../quota/schema';
@Injectable()
export class UserService {
defaultUserSelect = {
id: true,
name: true,
email: true,
emailVerifiedAt: true,
avatarUrl: true,
registered: true,
} satisfies Prisma.UserSelect;
constructor(private readonly prisma: PrismaClient) {}
get userCreatingData() {
return {
name: 'Unnamed',
features: {
create: {
reason: 'created by invite sign up',
activated: true,
feature: {
connect: {
feature_version: Quota_FreePlanV1_1,
},
},
},
},
};
}
async createUser(data: Prisma.UserCreateInput) {
return this.prisma.user.create({
data: {
...this.userCreatingData,
...data,
},
});
}
async createAnonymousUser(
email: string,
data?: Partial<Prisma.UserCreateInput>
) {
const user = await this.findUserByEmail(email);
if (user) {
throw new BadRequestException('Email already exists');
}
return this.createUser({
email,
name: email.split('@')[0],
...data,
});
}
async findUserById(id: string) {
return this.prisma.user
.findUnique({
where: { id },
select: this.defaultUserSelect,
})
.catch(() => {
return null;
});
}
async findUserByEmail(email: string) {
return this.prisma.user.findFirst({
where: {
email: {
equals: email,
mode: 'insensitive',
},
},
select: this.defaultUserSelect,
});
}
/**
* supposed to be used only for `Credential SignIn`
*/
async findUserWithHashedPasswordByEmail(email: string) {
return this.prisma.user.findFirst({
where: {
email: {
equals: email,
mode: 'insensitive',
},
},
});
}
async findOrCreateUser(
email: string,
data?: Partial<Prisma.UserCreateInput>
) {
const user = await this.findUserByEmail(email);
if (user) {
return user;
}
return this.createAnonymousUser(email, data);
}
async fulfillUser(
email: string,
data: Partial<
Pick<Prisma.UserCreateInput, 'emailVerifiedAt' | 'registered'>
>
) {
return this.prisma.user.upsert({
select: this.defaultUserSelect,
where: {
email,
},
update: data,
create: {
email,
...this.userCreatingData,
...data,
},
});
}
async deleteUser(id: string) {
return this.prisma.user.delete({ where: { id } });
}
}

View File

@@ -1,56 +0,0 @@
import { BadRequestException } from '@nestjs/common';
import z from 'zod';
function getAuthCredentialValidator() {
const email = z.string().email({ message: 'Invalid email address' });
let password = z.string();
password = password
.min(AFFiNE.auth.password.minLength, {
message: `Password must be ${AFFiNE.auth.password.minLength} or more charactors long`,
})
.max(AFFiNE.auth.password.maxLength, {
message: `Password must be ${AFFiNE.auth.password.maxLength} or fewer charactors long`,
});
return z
.object({
email,
password,
})
.required();
}
function assertValid<T>(z: z.ZodType<T>, value: unknown) {
const result = z.safeParse(value);
if (!result.success) {
const firstIssue = result.error.issues.at(0);
if (firstIssue) {
throw new BadRequestException(firstIssue.message);
} else {
throw new BadRequestException('Invalid credential');
}
}
}
export function assertValidEmail(email: string) {
assertValid(getAuthCredentialValidator().shape.email, email);
}
export function assertValidPassword(password: string) {
assertValid(getAuthCredentialValidator().shape.password, password);
}
export function assertValidCredential(credential: {
email: string;
password: string;
}) {
assertValid(getAuthCredentialValidator(), credential);
}
export const validators = {
assertValidEmail,
assertValidPassword,
assertValidCredential,
};

Some files were not shown because too many files have changed in this diff Show More