Compare commits

..

13 Commits

Author SHA1 Message Date
DarkSky
2ac9158f87 Merge branch 'canary' into darksky/native-sync-state 2026-01-04 00:27:17 +08:00
DarkSky
e7d0f31546 fix: blob redirect 2026-01-04 00:23:51 +08:00
DarkSky
fe5d6c0c0f feat(editor): support frontmatter & colored text parsing (#14205)
fix #13847
2026-01-03 22:43:11 +08:00
Yiding Jia
510933becf chore(server): bump ioredis to 5.8.2 for ipv6 support (#14204)
Bump ioredis to 5.8.2 for ipv6 support. 

Prior to 5.8.2 ioredis required passing `family: 0` or `family: 6` when
constructing a client in order to connect to redis over ipv6. This was
fixed in 5.8.2.

fix #14197
2026-01-03 01:06:30 +00:00
DarkSky
3633c75c6f feat: cleanup tables (#14203)
#### PR Dependency Tree


* **PR #14203** 👈

This tree was auto-generated by
[Charcoal](https://github.com/danerwilliams/charcoal)

<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

* **Chores**
* Removed deprecated database tables, enums and schema fields (cleanup
of legacy subscription, invoice, runtime settings and session expiry
data). This includes irreversible data removal for those legacy
elements.
* **Tests**
* Updated tests and test data to align with the cleaned-up schema and
removed fields.

<sub>✏️ Tip: You can customize this high-level summary in your review
settings.</sub>
<!-- end of auto-generated comment: release notes by coderabbit.ai -->
2026-01-03 03:50:14 +08:00
DarkSky
41addfe311 fix: blob sync 2026-01-03 01:40:13 +08:00
DarkSky
9a7f8e7d4d feat: workspace level share settings (#14201)
fix #13698
2026-01-03 01:13:27 +08:00
DarkSky
60de882a30 feat: shared link list (#14200)
#### PR Dependency Tree


* **PR #14200** 👈

This tree was auto-generated by
[Charcoal](https://github.com/danerwilliams/charcoal)

<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->

## Summary by CodeRabbit

## Release Notes

* **New Features**
* Added a "Shared Links" panel to workspace management, enabling admins
to view all published documents within a workspace
* Added publication date tracking for published documents, now displayed
alongside shared links

* **Chores**
  * Removed deprecated `publicPages` field; use `publicDocs` instead

<sub>✏️ Tip: You can customize this high-level summary in your review
settings.</sub>

<!-- end of auto-generated comment: release notes by coderabbit.ai -->
2026-01-02 21:07:41 +08:00
zetaloop
9f96633b33 fix: normalize shortcut display (#14196)
Normalize shortcut tokens and remove stray whitespace.
Uncomment group/ungroup shortcuts now that the feature is implemented.
Fix Windows redo shortcut display.

<img width="142" height="230" alt="image"
src="https://github.com/user-attachments/assets/989e061e-1ca2-489c-ab8e-6baad853d438"
/><img width="142" height="37" alt="image"
src="https://github.com/user-attachments/assets/671ed9b2-ccad-44ad-8889-7810bb01143c"
/>


<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->

## Summary by CodeRabbit

* **Chores**
* Standardized keyboard shortcut representations across the application
for improved consistency and clarity in shortcut displays.
* Corrected spacing inconsistencies in shortcut entries to ensure
uniform formatting.

<sub>✏️ Tip: You can customize this high-level summary in your review
settings.</sub>

<!-- end of auto-generated comment: release notes by coderabbit.ai -->

---------

Co-authored-by: DarkSky <25152247+darkskygit@users.noreply.github.com>
2026-01-01 13:07:35 +00:00
DarkSky
1e8095c224 fix: ci 2026-01-01 18:20:18 +08:00
DarkSky
0b0ae5ea0a feat: add queue management for admin panel 2026-01-01 06:13:50 +08:00
DarkSky
f745f7b669 feat: pre-aggregation workspace stats 2026-01-01 05:01:52 +08:00
DarkSky
7ef550a736 feat: native record encoding 2025-12-31 12:47:34 +08:00
103 changed files with 6120 additions and 1501 deletions

File diff suppressed because one or more lines are too long

View File

@@ -12,4 +12,4 @@ npmPublishAccess: public
npmRegistryServer: "https://registry.npmjs.org"
yarnPath: .yarn/releases/yarn-4.9.1.cjs
yarnPath: .yarn/releases/yarn-4.12.0.cjs

156
Cargo.lock generated
View File

@@ -98,6 +98,9 @@ dependencies = [
"napi-derive",
"objc2",
"objc2-foundation",
"ogg",
"opus-codec",
"rand 0.9.1",
"rubato",
"screencapturekit",
"symphonia",
@@ -566,6 +569,26 @@ dependencies = [
"syn 2.0.111",
]
[[package]]
name = "bindgen"
version = "0.72.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "993776b509cfb49c750f11b8f07a46fa23e0a1386ffc01fb1e7d343efc387895"
dependencies = [
"bitflags 2.9.1",
"cexpr",
"clang-sys",
"itertools 0.13.0",
"log",
"prettyplease",
"proc-macro2",
"quote",
"regex",
"rustc-hash 2.1.1",
"shlex",
"syn 2.0.111",
]
[[package]]
name = "bit-set"
version = "0.5.3"
@@ -669,7 +692,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "234113d19d0d7d613b40e86fb654acf958910802bcceab913a4f9e7cda03b1a4"
dependencies = [
"memchr",
"regex-automata 0.4.9",
"regex-automata",
"serde",
]
@@ -925,6 +948,15 @@ version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
[[package]]
name = "cmake"
version = "0.1.54"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7caa3f9de89ddbe2c607f4101924c5abec803763ae9534e4f4d7d8f84aa81f0"
dependencies = [
"cc",
]
[[package]]
name = "colorchoice"
version = "1.0.3"
@@ -1104,7 +1136,7 @@ version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ce857aa0b77d77287acc1ac3e37a05a8c95a2af3647d23b15f263bdaeb7562b"
dependencies = [
"bindgen",
"bindgen 0.70.1",
]
[[package]]
@@ -1490,7 +1522,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cea14ef9355e3beab063703aa9dab15afd25f0667c341310c1e5274bb1d0da18"
dependencies = [
"libc",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -1557,8 +1589,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "531e46835a22af56d1e3b66f04844bed63158bc094a628bec1d321d9b4c44bf2"
dependencies = [
"bit-set 0.5.3",
"regex-automata 0.4.9",
"regex-syntax 0.8.5",
"regex-automata",
"regex-syntax",
]
[[package]]
@@ -2012,7 +2044,7 @@ dependencies = [
"js-sys",
"log",
"wasm-bindgen",
"windows-core 0.61.2",
"windows-core 0.57.0",
]
[[package]]
@@ -2263,7 +2295,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
dependencies = [
"hermit-abi",
"libc",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -2457,7 +2489,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07033963ba89ebaf1584d767badaa2e8fcec21aedea6b8c0346d487d49c28667"
dependencies = [
"cfg-if",
"windows-targets 0.52.6",
"windows-targets 0.48.5",
]
[[package]]
@@ -2618,11 +2650,11 @@ dependencies = [
[[package]]
name = "matchers"
version = "0.1.0"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9"
dependencies = [
"regex-automata 0.1.10",
"regex-automata",
]
[[package]]
@@ -2861,12 +2893,11 @@ dependencies = [
[[package]]
name = "nu-ansi-term"
version = "0.46.0"
version = "0.50.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5"
dependencies = [
"overload",
"winapi",
"windows-sys 0.59.0",
]
[[package]]
@@ -3047,6 +3078,15 @@ dependencies = [
"cc",
]
[[package]]
name = "ogg"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fdab8dcd8d4052eaacaf8fb07a3ccd9a6e26efadb42878a413c68fc4af1dee2b"
dependencies = [
"byteorder",
]
[[package]]
name = "once_cell"
version = "1.21.3"
@@ -3065,6 +3105,17 @@ version = "11.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e"
[[package]]
name = "opus-codec"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37755dfadaa9c70fd26a4c1ea13d9bd035993cd0a19eb5b76449301609228280"
dependencies = [
"bindgen 0.72.1",
"cmake",
"pkg-config",
]
[[package]]
name = "ordered-float"
version = "5.0.0"
@@ -3082,12 +3133,6 @@ version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a80800c0488c3a21695ea981a54918fbb37abf04f4d0720c453632255e2ff0e"
[[package]]
name = "overload"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "parking"
version = "2.2.1"
@@ -3406,6 +3451,16 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
[[package]]
name = "prettyplease"
version = "0.2.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b"
dependencies = [
"proc-macro2",
"syn 2.0.111",
]
[[package]]
name = "primal-check"
version = "0.3.4"
@@ -3471,7 +3526,7 @@ dependencies = [
"rand 0.8.5",
"rand_chacha 0.3.1",
"rand_xorshift",
"regex-syntax 0.8.5",
"regex-syntax",
"rusty-fork",
"tempfile",
"unarray",
@@ -3663,17 +3718,8 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
dependencies = [
"aho-corasick",
"memchr",
"regex-automata 0.4.9",
"regex-syntax 0.8.5",
]
[[package]]
name = "regex-automata"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
dependencies = [
"regex-syntax 0.6.29",
"regex-automata",
"regex-syntax",
]
[[package]]
@@ -3684,15 +3730,9 @@ checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax 0.8.5",
"regex-syntax",
]
[[package]]
name = "regex-syntax"
version = "0.6.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
[[package]]
name = "regex-syntax"
version = "0.8.5"
@@ -3837,7 +3877,7 @@ dependencies = [
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -4686,7 +4726,7 @@ dependencies = [
"getrandom 0.3.3",
"once_cell",
"rustix",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -4940,14 +4980,14 @@ dependencies = [
[[package]]
name = "tracing-subscriber"
version = "0.3.19"
version = "0.3.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5"
dependencies = [
"matchers",
"nu-ansi-term",
"once_cell",
"regex",
"regex-automata",
"sharded-slab",
"smallvec",
"thread_local",
@@ -4974,7 +5014,7 @@ checksum = "6d7b8994f367f16e6fa14b5aebbcb350de5d7cbea82dc5b00ae997dd71680dd2"
dependencies = [
"cc",
"regex",
"regex-syntax 0.8.5",
"regex-syntax",
"serde_json",
"streaming-iterator",
"tree-sitter-language",
@@ -5560,37 +5600,15 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd7cf3379ca1aac9eea11fba24fd7e315d621f8dfe35c8d7d2be8b793726e07d"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [
"windows-sys 0.59.0",
"windows-sys 0.48.0",
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows"
version = "0.54.0"

View File

@@ -64,6 +64,7 @@ resolver = "3"
notify = { version = "8", features = ["serde"] }
objc2 = "0.6"
objc2-foundation = "0.3"
ogg = "0.9"
once_cell = "1"
ordered-float = "5"
parking_lot = "0.12"

View File

@@ -1,3 +1,4 @@
import { MarkdownTransformer } from '@blocksuite/affine/widgets/linked-doc';
import {
DefaultTheme,
NoteDisplayMode,
@@ -16,12 +17,15 @@ import type {
SliceSnapshot,
TransformerMiddleware,
} from '@blocksuite/store';
import { AssetsManager, MemoryBlobCRUD } from '@blocksuite/store';
import { AssetsManager, MemoryBlobCRUD, Schema } from '@blocksuite/store';
import { TestWorkspace } from '@blocksuite/store/test';
import { describe, expect, test } from 'vitest';
import { AffineSchemas } from '../../schemas.js';
import { createJob } from '../utils/create-job.js';
import { getProvider } from '../utils/get-provider.js';
import { nanoidReplacement } from '../utils/nanoid-replacement.js';
import { testStoreExtensions } from '../utils/store.js';
const provider = getProvider();
@@ -90,6 +94,39 @@ describe('snapshot to markdown', () => {
expect(target.file).toBe(markdown);
});
test('imports frontmatter metadata into doc meta', async () => {
const schema = new Schema().register(AffineSchemas);
const collection = new TestWorkspace();
collection.storeExtensions = testStoreExtensions;
collection.meta.initialize();
const markdown = `---
title: Web developer
created: 2018-04-12T09:51:00
updated: 2018-04-12T10:00:00
tags: [a, b]
favorite: true
---
Hello world
`;
const docId = await MarkdownTransformer.importMarkdownToDoc({
collection,
schema,
markdown,
fileName: 'fallback-title',
extensions: testStoreExtensions,
});
expect(docId).toBeTruthy();
const meta = collection.meta.getDocMeta(docId!);
expect(meta?.title).toBe('Web developer');
expect(meta?.createDate).toBe(Date.parse('2018-04-12T09:51:00'));
expect(meta?.updatedDate).toBe(Date.parse('2018-04-12T10:00:00'));
expect(meta?.favorite).toBe(true);
expect(meta?.tags).toEqual(['a', 'b']);
});
test('paragraph', async () => {
const blockSnapshot: BlockSnapshot = {
type: 'block',
@@ -2996,6 +3033,50 @@ describe('markdown to snapshot', () => {
});
});
test('html inline color span imports to nearest supported text color', async () => {
const markdown = `<span style="color: #00afde;">Hello</span>`;
const blockSnapshot: BlockSnapshot = {
type: 'block',
id: 'matchesReplaceMap[0]',
flavour: 'affine:note',
props: {
xywh: '[0,0,800,95]',
background: DefaultTheme.noteBackgrounColor,
index: 'a0',
hidden: false,
displayMode: NoteDisplayMode.DocAndEdgeless,
},
children: [
{
type: 'block',
id: 'matchesReplaceMap[1]',
flavour: 'affine:paragraph',
props: {
type: 'text',
text: {
'$blocksuite:internal:text$': true,
delta: [
{
insert: 'Hello',
attributes: {
color: 'var(--affine-v2-text-highlight-fg-blue)',
},
},
],
},
},
children: [],
},
],
};
const mdAdapter = new MarkdownAdapter(createJob(), provider);
const rawBlockSnapshot = await mdAdapter.toBlockSnapshot({
file: markdown,
});
expect(nanoidReplacement(rawBlockSnapshot)).toEqual(blockSnapshot);
});
test('paragraph', async () => {
const markdown = `aaa

View File

@@ -0,0 +1,120 @@
import { parseStringToRgba } from '@blocksuite/affine-components/color-picker';
import { cssVarV2, darkThemeV2, lightThemeV2 } from '@toeverything/theme/v2';
type Rgb = { r: number; g: number; b: number };
const COLOR_DISTANCE_THRESHOLD = 90;
const supportedTextColorNames = [
'red',
'orange',
'yellow',
'green',
'teal',
'blue',
'purple',
'grey',
] as const;
const supportedTextColors = supportedTextColorNames.map(name => ({
name,
cssVar: cssVarV2(`text/highlight/fg/${name}`),
light: lightThemeV2[`text/highlight/fg/${name}`],
dark: darkThemeV2[`text/highlight/fg/${name}`],
}));
const hexToRgb = (value: string): Rgb | null => {
const hex = value.replace('#', '');
if (![3, 4, 6, 8].includes(hex.length)) {
return null;
}
const normalized =
hex.length === 3 || hex.length === 4
? hex
.slice(0, 3)
.split('')
.map(c => c + c)
.join('')
: hex.slice(0, 6);
const intVal = Number.parseInt(normalized, 16);
if (Number.isNaN(intVal)) {
return null;
}
return {
r: (intVal >> 16) & 255,
g: (intVal >> 8) & 255,
b: intVal & 255,
};
};
export const parseCssColor = (value: string): Rgb | null => {
const trimmed = value.trim();
if (!trimmed) {
return null;
}
if (trimmed.startsWith('#')) {
return hexToRgb(trimmed);
}
if (/^rgba?\(/i.test(trimmed)) {
const rgba = parseStringToRgba(trimmed);
return {
r: Math.round(rgba.r * 255),
g: Math.round(rgba.g * 255),
b: Math.round(rgba.b * 255),
};
}
return null;
};
const colorDistance = (a: Rgb, b: Rgb) => {
const dr = a.r - b.r;
const dg = a.g - b.g;
const db = a.b - b.b;
return Math.sqrt(dr * dr + dg * dg + db * db);
};
export const resolveNearestSupportedColor = (color: string): string | null => {
const target = parseCssColor(color);
if (!target) {
return null;
}
let nearest:
| {
cssVar: string;
distance: number;
}
| undefined;
for (const supported of supportedTextColors) {
const light = parseCssColor(supported.light);
const dark = parseCssColor(supported.dark);
for (const ref of [light, dark]) {
if (!ref) continue;
const distance = colorDistance(target, ref);
if (!nearest || distance < nearest.distance) {
nearest = { cssVar: supported.cssVar, distance };
}
}
}
if (nearest && nearest.distance <= COLOR_DISTANCE_THRESHOLD) {
return nearest.cssVar;
}
return null;
};
export const extractColorFromStyle = (
style: string | undefined
): string | null => {
if (typeof style !== 'string') {
return null;
}
const declarations = style.split(';');
for (const declaration of declarations) {
const [rawKey, rawValue] = declaration.split(':');
if (!rawKey || !rawValue) continue;
if (rawKey.trim().toLowerCase() === 'color') {
return rawValue.trim();
}
}
return null;
};

View File

@@ -5,6 +5,11 @@ import {
import { collapseWhiteSpace } from 'collapse-white-space';
import type { Element } from 'hast';
import {
extractColorFromStyle,
resolveNearestSupportedColor,
} from './color-utils.js';
/**
* Handle empty text nodes created by HTML parser for styling purposes.
* These nodes typically contain only whitespace/newlines, for example:
@@ -173,6 +178,40 @@ export const htmlTextToDeltaMatcher = HtmlASTToDeltaExtension({
},
});
export const htmlColorStyleElementToDeltaMatcher = HtmlASTToDeltaExtension({
name: 'color-style-element',
match: ast =>
isElement(ast) &&
ast.tagName === 'span' &&
typeof ast.properties?.style === 'string' &&
/color\s*:/i.test(ast.properties.style),
toDelta: (ast, context) => {
if (!isElement(ast)) {
return [];
}
const baseOptions = { ...context.options, trim: false };
// In preformatted contexts (e.g. code blocks) we don't keep inline colors.
if (baseOptions.pre) {
return ast.children.flatMap(child => context.toDelta(child, baseOptions));
}
const colorValue = extractColorFromStyle(
typeof ast.properties?.style === 'string' ? ast.properties.style : ''
);
const mappedColor = colorValue
? resolveNearestSupportedColor(colorValue)
: null;
const deltas = ast.children.flatMap(child =>
context.toDelta(child, baseOptions).map(delta => {
if (mappedColor) {
delta.attributes = { ...delta.attributes, color: mappedColor };
}
return delta;
})
);
return deltas;
},
});
export const htmlTextLikeElementToDeltaMatcher = HtmlASTToDeltaExtension({
name: 'text-like-element',
match: ast => isTextLikeElement(ast),
@@ -300,6 +339,7 @@ export const htmlBrElementToDeltaMatcher = HtmlASTToDeltaExtension({
export const HtmlInlineToDeltaAdapterExtensions = [
htmlTextToDeltaMatcher,
htmlColorStyleElementToDeltaMatcher,
htmlTextLikeElementToDeltaMatcher,
htmlStrongElementToDeltaMatcher,
htmlItalicElementToDeltaMatcher,

View File

@@ -79,11 +79,11 @@ export const markdownListToDeltaMatcher = MarkdownASTToDeltaExtension({
export const markdownHtmlToDeltaMatcher = MarkdownASTToDeltaExtension({
name: 'html',
match: ast => ast.type === 'html',
toDelta: ast => {
toDelta: (ast, context) => {
if (!('value' in ast)) {
return [];
}
return [{ insert: ast.value }];
return context?.htmlToDelta?.(ast.value) ?? [{ insert: ast.value }];
},
});

View File

@@ -3,9 +3,17 @@ import {
type ServiceIdentifier,
} from '@blocksuite/global/di';
import type { DeltaInsert, ExtensionType } from '@blocksuite/store';
import type { Root } from 'hast';
import type { PhrasingContent } from 'mdast';
import rehypeParse from 'rehype-parse';
import { unified } from 'unified';
import type { AffineTextAttributes } from '../../types/index.js';
import { HtmlDeltaConverter } from '../html/delta-converter.js';
import {
rehypeInlineToBlock,
rehypeWrapInlineElements,
} from '../html/rehype-plugins/index.js';
import {
type ASTToDeltaMatcher,
DeltaASTConverter,
@@ -13,6 +21,88 @@ import {
} from '../types/delta-converter.js';
import type { MarkdownAST } from './type.js';
const INLINE_HTML_TAGS = new Set([
'span',
'strong',
'b',
'em',
'i',
'del',
'u',
'mark',
'code',
'ins',
'bdi',
'bdo',
]);
const VOID_HTML_TAGS = new Set([
'area',
'base',
'br',
'col',
'embed',
'hr',
'img',
'input',
'link',
'meta',
'param',
'source',
'track',
'wbr',
]);
const ALLOWED_INLINE_HTML_TAGS = new Set([
...INLINE_HTML_TAGS,
...VOID_HTML_TAGS,
]);
const isHtmlNode = (
node: MarkdownAST
): node is MarkdownAST & { type: 'html'; value: string } =>
node.type === 'html' && 'value' in node && typeof node.value === 'string';
const isTextNode = (
node: MarkdownAST
): node is MarkdownAST & { type: 'text'; value: string } =>
node.type === 'text' && 'value' in node && typeof node.value === 'string';
type HtmlTagInfo =
| { name: string; kind: 'open' | 'self' }
| { name: string; kind: 'close' };
const getHtmlTagInfo = (value: string): HtmlTagInfo | null => {
const closingMatch = value.match(/^<\/([A-Za-z][A-Za-z0-9-]*)\s*>$/);
if (closingMatch) {
return {
name: closingMatch[1].toLowerCase(),
kind: 'close',
};
}
const selfClosingMatch = value.match(
/^<([A-Za-z][A-Za-z0-9-]*)(\s[^>]*)?\/>$/i
);
if (selfClosingMatch) {
return {
name: selfClosingMatch[1].toLowerCase(),
kind: 'self',
};
}
const openingMatch = value.match(/^<([A-Za-z][A-Za-z0-9-]*)(\s[^>]*)?>$/);
if (openingMatch) {
const name = openingMatch[1].toLowerCase();
return {
name,
kind: VOID_HTML_TAGS.has(name) ? 'self' : 'open',
};
}
return null;
};
export type InlineDeltaToMarkdownAdapterMatcher =
InlineDeltaMatcher<PhrasingContent>;
@@ -63,11 +153,30 @@ export class MarkdownDeltaConverter extends DeltaASTConverter<
constructor(
readonly configs: Map<string, string>,
readonly inlineDeltaMatchers: InlineDeltaToMarkdownAdapterMatcher[],
readonly markdownASTToDeltaMatchers: MarkdownASTToDeltaMatcher[]
readonly markdownASTToDeltaMatchers: MarkdownASTToDeltaMatcher[],
readonly htmlDeltaConverter?: HtmlDeltaConverter
) {
super();
}
private _convertHtmlToDelta(
html: string
): DeltaInsert<AffineTextAttributes>[] {
if (!this.htmlDeltaConverter) {
return [{ insert: html }];
}
try {
const processor = unified()
.use(rehypeParse, { fragment: true })
.use(rehypeInlineToBlock)
.use(rehypeWrapInlineElements);
const ast = processor.runSync(processor.parse(html)) as Root;
return this.htmlDeltaConverter.astToDelta(ast, { trim: false });
} catch {
return [{ insert: html }];
}
}
applyTextFormatting(
delta: DeltaInsert<AffineTextAttributes>
): PhrasingContent {
@@ -95,11 +204,110 @@ export class MarkdownDeltaConverter extends DeltaASTConverter<
return mdast;
}
private _mergeInlineHtml(
children: MarkdownAST[],
startIndex: number
): {
endIndex: number;
deltas: DeltaInsert<AffineTextAttributes>[];
} | null {
const startNode = children[startIndex];
if (!isHtmlNode(startNode)) {
return null;
}
const startTag = getHtmlTagInfo(startNode.value);
if (
!startTag ||
startTag.kind !== 'open' ||
!INLINE_HTML_TAGS.has(startTag.name)
) {
return null;
}
const stack = [startTag.name];
let html = startNode.value;
let endIndex = startIndex;
for (let i = startIndex + 1; i < children.length; i++) {
const node = children[i];
if (isHtmlNode(node)) {
const info = getHtmlTagInfo(node.value);
if (!info) {
html += node.value;
continue;
}
if (info.kind === 'open') {
if (!ALLOWED_INLINE_HTML_TAGS.has(info.name)) {
return null;
}
stack.push(info.name);
html += node.value;
continue;
}
if (info.kind === 'self') {
if (!ALLOWED_INLINE_HTML_TAGS.has(info.name)) {
return null;
}
html += node.value;
continue;
}
if (!ALLOWED_INLINE_HTML_TAGS.has(info.name)) {
return null;
}
const last = stack[stack.length - 1];
if (last !== info.name) {
return null;
}
stack.pop();
html += node.value;
endIndex = i;
if (stack.length === 0) {
return {
endIndex,
deltas: this._convertHtmlToDelta(html),
};
}
continue;
}
if (isTextNode(node)) {
html += node.value;
continue;
}
return null;
}
return null;
}
private _astChildrenToDelta(
children: MarkdownAST[]
): DeltaInsert<AffineTextAttributes>[] {
const deltas: DeltaInsert<AffineTextAttributes>[] = [];
for (let i = 0; i < children.length; i++) {
const merged = this._mergeInlineHtml(children, i);
if (merged) {
deltas.push(...merged.deltas);
i = merged.endIndex;
continue;
}
deltas.push(...this.astToDelta(children[i]));
}
return deltas;
}
astToDelta(ast: MarkdownAST): DeltaInsert<AffineTextAttributes>[] {
const context = {
configs: this.configs,
options: Object.create(null),
toDelta: (ast: MarkdownAST) => this.astToDelta(ast),
htmlToDelta: (html: string) => this._convertHtmlToDelta(html),
};
for (const matcher of this.markdownASTToDeltaMatchers) {
if (matcher.match(ast)) {
@@ -107,7 +315,7 @@ export class MarkdownDeltaConverter extends DeltaASTConverter<
}
}
return 'children' in ast
? ast.children.flatMap(child => this.astToDelta(child))
? this._astChildrenToDelta(ast.children as MarkdownAST[])
: [];
}

View File

@@ -26,6 +26,11 @@ import remarkParse from 'remark-parse';
import remarkStringify from 'remark-stringify';
import { unified } from 'unified';
import {
HtmlASTToDeltaMatcherIdentifier,
HtmlDeltaConverter,
InlineDeltaToHtmlAdapterMatcherIdentifier,
} from '../html/delta-converter.js';
import { type AdapterContext, AdapterFactoryIdentifier } from '../types';
import {
type BlockMarkdownAdapterMatcher,
@@ -184,11 +189,24 @@ export class MarkdownAdapter extends BaseAdapter<Markdown> {
const markdownInlineToDeltaMatchers = Array.from(
provider.getAll(MarkdownASTToDeltaMatcherIdentifier).values()
);
const inlineDeltaToHtmlAdapterMatchers = Array.from(
provider.getAll(InlineDeltaToHtmlAdapterMatcherIdentifier).values()
);
const htmlInlineToDeltaMatchers = Array.from(
provider.getAll(HtmlASTToDeltaMatcherIdentifier).values()
);
const htmlDeltaConverter = new HtmlDeltaConverter(
job.adapterConfigs,
inlineDeltaToHtmlAdapterMatchers,
htmlInlineToDeltaMatchers,
provider
);
this.blockMatchers = blockMatchers;
this.deltaConverter = new MarkdownDeltaConverter(
job.adapterConfigs,
inlineDeltaToMarkdownAdapterMatchers,
markdownInlineToDeltaMatchers
markdownInlineToDeltaMatchers,
htmlDeltaConverter
);
this.preprocessorManager = new MarkdownPreprocessorManager(provider);
}

View File

@@ -56,6 +56,7 @@ export type ASTToDeltaMatcher<AST> = {
ast: AST,
options?: DeltaASTConverterOptions
) => DeltaInsert<AffineTextAttributes>[];
htmlToDelta?: (html: string) => DeltaInsert<AffineTextAttributes>[];
}
) => DeltaInsert<AffineTextAttributes>[];
};

View File

@@ -26,6 +26,7 @@
"@toeverything/theme": "^1.1.16",
"@types/lodash-es": "^4.17.12",
"fflate": "^0.8.2",
"js-yaml": "^4.1.1",
"lit": "^3.2.0",
"lodash-es": "^4.17.21",
"mammoth": "^1.11.0",

View File

@@ -15,10 +15,183 @@ import type {
Store,
Workspace,
} from '@blocksuite/store';
import type { DocMeta } from '@blocksuite/store';
import { extMimeMap, Transformer } from '@blocksuite/store';
import type { AssetMap, ImportedFileEntry, PathBlobIdMap } from './type.js';
import { createAssetsArchive, download, Unzip } from './utils.js';
import { createAssetsArchive, download, parseMatter, Unzip } from './utils.js';
type ParsedFrontmatterMeta = Partial<
Pick<DocMeta, 'title' | 'createDate' | 'updatedDate' | 'tags' | 'favorite'>
>;
const FRONTMATTER_KEYS = {
title: ['title', 'name'],
created: [
'created',
'createdat',
'created_at',
'createddate',
'created_date',
'creationdate',
'date',
'time',
],
updated: [
'updated',
'updatedat',
'updated_at',
'updateddate',
'updated_date',
'modified',
'modifiedat',
'modified_at',
'lastmodified',
'last_modified',
'lastedited',
'last_edited',
'lasteditedtime',
'last_edited_time',
],
tags: ['tags', 'tag', 'categories', 'category', 'labels', 'keywords'],
favorite: ['favorite', 'favourite', 'star', 'starred', 'pinned'],
trash: ['trash', 'trashed', 'deleted', 'archived'],
};
const truthyStrings = new Set(['true', 'yes', 'y', '1', 'on']);
const falsyStrings = new Set(['false', 'no', 'n', '0', 'off']);
function parseBoolean(value: unknown): boolean | undefined {
if (typeof value === 'boolean') return value;
if (typeof value === 'number') {
if (value === 1) return true;
if (value === 0) return false;
}
if (typeof value === 'string') {
const normalized = value.trim().toLowerCase();
if (truthyStrings.has(normalized)) return true;
if (falsyStrings.has(normalized)) return false;
}
return undefined;
}
function parseTimestamp(value: unknown): number | undefined {
if (value && value instanceof Date) {
return value.getTime();
}
if (typeof value === 'number' && Number.isFinite(value)) {
return value > 1e10 ? value : Math.round(value * 1000);
}
if (typeof value === 'string') {
const num = Number(value);
if (!Number.isNaN(num)) {
return num > 1e10 ? num : Math.round(num * 1000);
}
const parsed = Date.parse(value);
if (!Number.isNaN(parsed)) {
return parsed;
}
}
return undefined;
}
function parseTags(value: unknown): string[] | undefined {
if (Array.isArray(value)) {
const tags = value
.map(v => (typeof v === 'string' ? v : String(v)))
.map(v => v.trim())
.filter(Boolean);
return tags.length ? [...new Set(tags)] : undefined;
}
if (typeof value === 'string') {
const tags = value
.split(/[,;]+/)
.map(v => v.trim())
.filter(Boolean);
return tags.length ? [...new Set(tags)] : undefined;
}
return undefined;
}
function buildMetaFromFrontmatter(
data: Record<string, unknown>
): ParsedFrontmatterMeta {
const meta: ParsedFrontmatterMeta = {};
for (const [rawKey, value] of Object.entries(data)) {
const key = rawKey.trim().toLowerCase();
if (FRONTMATTER_KEYS.title.includes(key) && typeof value === 'string') {
const title = value.trim();
if (title) meta.title = title;
continue;
}
if (FRONTMATTER_KEYS.created.includes(key)) {
const timestamp = parseTimestamp(value);
if (timestamp !== undefined) {
meta.createDate = timestamp;
}
continue;
}
if (FRONTMATTER_KEYS.updated.includes(key)) {
const timestamp = parseTimestamp(value);
if (timestamp !== undefined) {
meta.updatedDate = timestamp;
}
continue;
}
if (FRONTMATTER_KEYS.tags.includes(key)) {
const tags = parseTags(value);
if (tags) meta.tags = tags;
continue;
}
if (FRONTMATTER_KEYS.favorite.includes(key)) {
const favorite = parseBoolean(value);
if (favorite !== undefined) {
meta.favorite = favorite;
}
continue;
}
}
return meta;
}
function parseFrontmatter(markdown: string): {
content: string;
meta: ParsedFrontmatterMeta;
} {
try {
const parsed = parseMatter(markdown);
if (!parsed) {
return { content: markdown, meta: {} };
}
const content = parsed.body ?? markdown;
if (Array.isArray(parsed.metadata)) {
return { content: String(content), meta: {} };
}
const meta = buildMetaFromFrontmatter({ ...parsed.metadata });
return { content: String(content), meta };
} catch {
return { content: markdown, meta: {} };
}
}
function applyMetaPatch(
collection: Workspace,
docId: string,
meta: ParsedFrontmatterMeta
) {
const metaPatch: Partial<DocMeta> = {};
if (meta.title) metaPatch.title = meta.title;
if (meta.createDate !== undefined) metaPatch.createDate = meta.createDate;
if (meta.updatedDate !== undefined) metaPatch.updatedDate = meta.updatedDate;
if (meta.tags) metaPatch.tags = meta.tags;
if (meta.favorite !== undefined) metaPatch.favorite = meta.favorite;
if (Object.keys(metaPatch).length) {
collection.meta.setDocMeta(docId, metaPatch);
}
}
function getProvider(extensions: ExtensionType[]) {
const container = new Container();
@@ -153,6 +326,8 @@ async function importMarkdownToDoc({
fileName,
extensions,
}: ImportMarkdownToDocOptions) {
const { content, meta } = parseFrontmatter(markdown);
const preferredTitle = meta.title ?? fileName;
const provider = getProvider(extensions);
const job = new Transformer({
schema,
@@ -164,18 +339,19 @@ async function importMarkdownToDoc({
},
middlewares: [
defaultImageProxyMiddleware,
fileNameMiddleware(fileName),
fileNameMiddleware(preferredTitle),
docLinkBaseURLMiddleware(collection.id),
],
});
const mdAdapter = new MarkdownAdapter(job, provider);
const page = await mdAdapter.toDoc({
file: markdown,
file: content,
assets: job.assetsManager,
});
if (!page) {
return;
}
applyMetaPatch(collection, page.id, meta);
return page.id;
}
@@ -232,6 +408,9 @@ async function importMarkdownZip({
markdownBlobs.map(async markdownFile => {
const { filename, contentBlob, fullPath } = markdownFile;
const fileNameWithoutExt = filename.replace(/\.[^/.]+$/, '');
const markdown = await contentBlob.text();
const { content, meta } = parseFrontmatter(markdown);
const preferredTitle = meta.title ?? fileNameWithoutExt;
const job = new Transformer({
schema,
blobCRUD: collection.blobSync,
@@ -242,7 +421,7 @@ async function importMarkdownZip({
},
middlewares: [
defaultImageProxyMiddleware,
fileNameMiddleware(fileNameWithoutExt),
fileNameMiddleware(preferredTitle),
docLinkBaseURLMiddleware(collection.id),
filePathMiddleware(fullPath),
],
@@ -262,12 +441,12 @@ async function importMarkdownZip({
}
const mdAdapter = new MarkdownAdapter(job, provider);
const markdown = await contentBlob.text();
const doc = await mdAdapter.toDoc({
file: markdown,
file: content,
assets: job.assetsManager,
});
if (doc) {
applyMetaPatch(collection, doc.id, meta);
docIds.push(doc.id);
}
})

View File

@@ -1,5 +1,6 @@
import { extMimeMap, getAssetName } from '@blocksuite/store';
import * as fflate from 'fflate';
import { FAILSAFE_SCHEMA, load as loadYaml } from 'js-yaml';
export class Zip {
private compressed = new Uint8Array();
@@ -208,3 +209,14 @@ export function download(blob: Blob, name: string) {
element.remove();
URL.revokeObjectURL(fileURL);
}
const metaMatcher = /(?<=---)(.*?)(?=---)/ms;
const bodyMatcher = /---.*?---/s;
export const parseMatter = (contents: string) => {
const matterMatch = contents.match(metaMatcher);
if (!matterMatch || !matterMatch[0]) return null;
const metadata = loadYaml(matterMatch[0], { schema: FAILSAFE_SCHEMA });
if (!metadata || typeof metadata !== 'object') return null;
const body = contents.replace(bodyMatcher, '');
return { matter: matterMatch[0], body, metadata };
};

View File

@@ -92,7 +92,7 @@
"vite": "^7.2.7",
"vitest": "^3.2.4"
},
"packageManager": "yarn@4.9.1",
"packageManager": "yarn@4.12.0",
"resolutions": {
"array-buffer-byte-length": "npm:@nolyfill/array-buffer-byte-length@^1",
"array-includes": "npm:@nolyfill/array-includes@^1",

View File

@@ -0,0 +1,90 @@
-- Pre-aggregated admin workspace stats tables
CREATE TABLE IF NOT EXISTS "workspace_admin_stats" (
"workspace_id" VARCHAR NOT NULL,
"snapshot_count" BIGINT NOT NULL DEFAULT 0,
"snapshot_size" BIGINT NOT NULL DEFAULT 0,
"blob_count" BIGINT NOT NULL DEFAULT 0,
"blob_size" BIGINT NOT NULL DEFAULT 0,
"member_count" BIGINT NOT NULL DEFAULT 0,
"public_page_count" BIGINT NOT NULL DEFAULT 0,
"features" TEXT[] NOT NULL DEFAULT '{}'::text[],
"updated_at" TIMESTAMPTZ(3) NOT NULL DEFAULT NOW(),
CONSTRAINT "workspace_admin_stats_pkey" PRIMARY KEY ("workspace_id"),
CONSTRAINT "workspace_admin_stats_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE
);
CREATE TABLE IF NOT EXISTS "workspace_admin_stats_dirty" (
"workspace_id" VARCHAR NOT NULL,
"updated_at" TIMESTAMPTZ(3) NOT NULL DEFAULT NOW(),
CONSTRAINT "workspace_admin_stats_dirty_pkey" PRIMARY KEY ("workspace_id"),
CONSTRAINT "workspace_admin_stats_dirty_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE
);
-- Sorting indexes for admin queries
CREATE INDEX IF NOT EXISTS "workspace_admin_stats_snapshot_size_idx" ON "workspace_admin_stats" ("snapshot_size" DESC);
CREATE INDEX IF NOT EXISTS "workspace_admin_stats_blob_count_idx" ON "workspace_admin_stats" ("blob_count" DESC);
CREATE INDEX IF NOT EXISTS "workspace_admin_stats_blob_size_idx" ON "workspace_admin_stats" ("blob_size" DESC);
CREATE INDEX IF NOT EXISTS "workspace_admin_stats_snapshot_count_idx" ON "workspace_admin_stats" ("snapshot_count" DESC);
CREATE INDEX IF NOT EXISTS "workspace_admin_stats_member_count_idx" ON "workspace_admin_stats" ("member_count" DESC);
CREATE INDEX IF NOT EXISTS "workspace_admin_stats_public_page_count_idx" ON "workspace_admin_stats" ("public_page_count" DESC);
CREATE INDEX IF NOT EXISTS "workspace_admin_stats_dirty_updated_at_idx" ON "workspace_admin_stats_dirty" ("updated_at");
-- Feature filtering index
CREATE INDEX IF NOT EXISTS "workspace_features_workspace_id_name_activated_idx" ON "workspace_features" ("workspace_id", "name", "activated");
-- Dirty marker trigger
CREATE OR REPLACE FUNCTION workspace_admin_stats_mark_dirty() RETURNS TRIGGER AS $$
DECLARE
wid VARCHAR;
BEGIN
wid := COALESCE(NEW."workspace_id", OLD."workspace_id");
IF wid IS NULL THEN
RETURN NULL;
END IF;
-- Skip if workspace does not exist to avoid FK errors on orphaned records
IF NOT EXISTS (SELECT 1 FROM "workspaces" WHERE "id" = wid) THEN
RETURN NULL;
END IF;
INSERT INTO "workspace_admin_stats_dirty" ("workspace_id", "updated_at")
VALUES (wid, NOW())
ON CONFLICT ("workspace_id")
DO UPDATE SET "updated_at" = EXCLUDED."updated_at";
RETURN NULL;
END;
$$ LANGUAGE plpgsql;
DROP TRIGGER IF EXISTS "workspace_admin_stats_dirty_snapshots" ON "snapshots";
CREATE TRIGGER "workspace_admin_stats_dirty_snapshots"
AFTER INSERT OR UPDATE OR DELETE ON "snapshots"
FOR EACH ROW EXECUTE FUNCTION workspace_admin_stats_mark_dirty();
DROP TRIGGER IF EXISTS "workspace_admin_stats_dirty_blobs" ON "blobs";
CREATE TRIGGER "workspace_admin_stats_dirty_blobs"
AFTER INSERT OR UPDATE OR DELETE ON "blobs"
FOR EACH ROW EXECUTE FUNCTION workspace_admin_stats_mark_dirty();
DROP TRIGGER IF EXISTS "workspace_admin_stats_dirty_wup" ON "workspace_user_permissions";
CREATE TRIGGER "workspace_admin_stats_dirty_wup"
AFTER INSERT OR UPDATE OR DELETE ON "workspace_user_permissions"
FOR EACH ROW EXECUTE FUNCTION workspace_admin_stats_mark_dirty();
DROP TRIGGER IF EXISTS "workspace_admin_stats_dirty_pages" ON "workspace_pages";
CREATE TRIGGER "workspace_admin_stats_dirty_pages"
AFTER INSERT OR UPDATE OR DELETE ON "workspace_pages"
FOR EACH ROW EXECUTE FUNCTION workspace_admin_stats_mark_dirty();
DROP TRIGGER IF EXISTS "workspace_admin_stats_dirty_features" ON "workspace_features";
CREATE TRIGGER "workspace_admin_stats_dirty_features"
AFTER INSERT OR UPDATE OR DELETE ON "workspace_features"
FOR EACH ROW EXECUTE FUNCTION workspace_admin_stats_mark_dirty();
-- Mark existing workspaces dirty for initial backfill
INSERT INTO "workspace_admin_stats_dirty" ("workspace_id", "updated_at")
SELECT id, NOW() FROM "workspaces"
ON CONFLICT ("workspace_id") DO NOTHING;
-- DropIndex
DROP INDEX "idx_wur_workspace";

View File

@@ -0,0 +1,12 @@
-- Add published_at to workspace_pages to track when a doc was shared
ALTER TABLE "workspace_pages"
ADD COLUMN IF NOT EXISTS "published_at" TIMESTAMPTZ(3);
-- Backfill existing public docs using the snapshot updated_at
UPDATE "workspace_pages" wp
SET "published_at" = s."updated_at"
FROM "snapshots" s
WHERE wp."workspace_id" = s."workspace_id"
AND wp."page_id" = s."guid"
AND wp."public" = TRUE
AND wp."published_at" IS NULL;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "workspaces" ADD COLUMN "enable_sharing" BOOLEAN NOT NULL DEFAULT true;

View File

@@ -0,0 +1,38 @@
/*
Warnings:
- You are about to drop the column `expires_at` on the `multiple_users_sessions` table. All the data in the column will be lost.
- You are about to drop the column `seq` on the `snapshots` table. All the data in the column will be lost.
- You are about to drop the column `seq` on the `updates` table. All the data in the column will be lost.
- You are about to drop the column `accepted` on the `workspace_user_permissions` table. All the data in the column will be lost.
- You are about to drop the `app_runtime_settings` table. If the table is not empty, all the data it contains will be lost.
- You are about to drop the `user_invoices` table. If the table is not empty, all the data it contains will be lost.
- You are about to drop the `user_subscriptions` table. If the table is not empty, all the data it contains will be lost.
*/
-- DropForeignKey
ALTER TABLE "app_runtime_settings" DROP CONSTRAINT "app_runtime_settings_last_updated_by_fkey";
-- AlterTable
ALTER TABLE "multiple_users_sessions" DROP COLUMN "expires_at";
-- AlterTable
ALTER TABLE "snapshots" DROP COLUMN "seq";
-- AlterTable
ALTER TABLE "updates" DROP COLUMN "seq";
-- AlterTable
ALTER TABLE "workspace_user_permissions" DROP COLUMN "accepted";
-- DropTable
DROP TABLE "app_runtime_settings";
-- DropTable
DROP TABLE "user_invoices";
-- DropTable
DROP TABLE "user_subscriptions";
-- DropEnum
DROP TYPE "RuntimeConfigType";

View File

@@ -72,6 +72,7 @@
"@opentelemetry/semantic-conventions": "^1.38.0",
"@prisma/client": "^6.6.0",
"@prisma/instrumentation": "^6.7.0",
"@queuedash/api": "^3.14.0",
"@react-email/components": "0.0.38",
"@socket.io/redis-adapter": "^8.3.0",
"ai": "^5.0.108",
@@ -92,7 +93,7 @@
"html-validate": "^9.0.0",
"htmlrewriter": "^0.0.12",
"http-errors": "^2.0.0",
"ioredis": "^5.4.1",
"ioredis": "^5.8.2",
"is-mobile": "^5.0.0",
"jose": "^6.1.3",
"jsonwebtoken": "^9.0.3",

View File

@@ -25,31 +25,29 @@ model User {
registered Boolean @default(true)
disabled Boolean @default(false)
features UserFeature[]
userStripeCustomer UserStripeCustomer?
workspaces WorkspaceUserRole[]
features UserFeature[]
userStripeCustomer UserStripeCustomer?
workspaces WorkspaceUserRole[]
// Invite others to join the workspace
WorkspaceInvitations WorkspaceUserRole[] @relation("inviter")
docPermissions WorkspaceDocUserRole[]
connectedAccounts ConnectedAccount[]
sessions UserSession[]
aiSessions AiSession[]
/// @deprecated
deprecatedAppRuntimeSettings DeprecatedAppRuntimeSettings[]
appConfigs AppConfig[]
userSnapshots UserSnapshot[]
createdSnapshot Snapshot[] @relation("createdSnapshot")
updatedSnapshot Snapshot[] @relation("updatedSnapshot")
createdUpdate Update[] @relation("createdUpdate")
createdHistory SnapshotHistory[] @relation("createdHistory")
createdAiJobs AiJobs[] @relation("createdAiJobs")
WorkspaceInvitations WorkspaceUserRole[] @relation("inviter")
docPermissions WorkspaceDocUserRole[]
connectedAccounts ConnectedAccount[]
sessions UserSession[]
aiSessions AiSession[]
appConfigs AppConfig[]
userSnapshots UserSnapshot[]
createdSnapshot Snapshot[] @relation("createdSnapshot")
updatedSnapshot Snapshot[] @relation("updatedSnapshot")
createdUpdate Update[] @relation("createdUpdate")
createdHistory SnapshotHistory[] @relation("createdHistory")
createdAiJobs AiJobs[] @relation("createdAiJobs")
// receive notifications
notifications Notification[] @relation("user_notifications")
settings UserSettings?
comments Comment[]
replies Reply[]
commentAttachments CommentAttachment[] @relation("createdCommentAttachments")
AccessToken AccessToken[]
notifications Notification[] @relation("user_notifications")
settings UserSettings?
comments Comment[]
replies Reply[]
commentAttachments CommentAttachment[] @relation("createdCommentAttachments")
AccessToken AccessToken[]
@@index([email])
@@map("users")
@@ -79,9 +77,6 @@ model Session {
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
userSessions UserSession[]
// @deprecated use [UserSession.expiresAt]
deprecated_expiresAt DateTime? @map("expires_at") @db.Timestamptz(3)
@@map("multiple_users_sessions")
}
@@ -117,6 +112,7 @@ model Workspace {
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
// workspace level feature flags
enableAi Boolean @default(true) @map("enable_ai")
enableSharing Boolean @default(true) @map("enable_sharing")
enableUrlPreview Boolean @default(false) @map("enable_url_preview")
enableDocEmbedding Boolean @default(true) @map("enable_doc_embedding")
name String? @db.VarChar
@@ -124,15 +120,17 @@ model Workspace {
indexed Boolean @default(false)
lastCheckEmbeddings DateTime @default("1970-01-01T00:00:00-00:00") @map("last_check_embeddings") @db.Timestamptz(3)
features WorkspaceFeature[]
docs WorkspaceDoc[]
permissions WorkspaceUserRole[]
docPermissions WorkspaceDocUserRole[]
blobs Blob[]
ignoredDocs AiWorkspaceIgnoredDocs[]
embedFiles AiWorkspaceFiles[]
comments Comment[]
commentAttachments CommentAttachment[]
features WorkspaceFeature[]
docs WorkspaceDoc[]
permissions WorkspaceUserRole[]
docPermissions WorkspaceDocUserRole[]
blobs Blob[]
ignoredDocs AiWorkspaceIgnoredDocs[]
embedFiles AiWorkspaceFiles[]
comments Comment[]
commentAttachments CommentAttachment[]
workspaceAdminStats WorkspaceAdminStats[]
workspaceAdminStatsDirties WorkspaceAdminStatsDirty[]
@@index([lastCheckEmbeddings])
@@index([createdAt])
@@ -145,17 +143,18 @@ model Workspace {
// Only the ones that have ever changed will have records here,
// and for others we will make sure it's has a default value return in our business logic.
model WorkspaceDoc {
workspaceId String @map("workspace_id") @db.VarChar
docId String @map("page_id") @db.VarChar
public Boolean @default(false)
workspaceId String @map("workspace_id") @db.VarChar
docId String @map("page_id") @db.VarChar
public Boolean @default(false)
// Workspace user's default role in this page, default is `Manager`
defaultRole Int @default(30) @db.SmallInt
defaultRole Int @default(30) @db.SmallInt
// Page/Edgeless
mode Int @default(0) @db.SmallInt
mode Int @default(0) @db.SmallInt
// Whether the doc is blocked
blocked Boolean @default(false)
title String? @db.VarChar
summary String? @db.VarChar
blocked Boolean @default(false)
title String? @db.VarChar
summary String? @db.VarChar
publishedAt DateTime? @map("published_at") @db.Timestamptz(3)
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
@@ -203,9 +202,6 @@ model WorkspaceUserRole {
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
inviter User? @relation(name: "inviter", fields: [inviterId], references: [id], onDelete: SetNull)
/// @deprecated Whether the permission invitation is accepted by the user, use status instead
accepted Boolean @default(false)
@@unique([workspaceId, userId])
// optimize for querying user's workspace permissions
@@index([workspaceId, type, status])
@@ -241,7 +237,7 @@ model UserFeature {
expiredAt DateTime? @map("expired_at") @db.Timestamptz(3)
activated Boolean @default(false)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@index([userId])
@@index([name])
@@ -267,9 +263,43 @@ model WorkspaceFeature {
@@index([workspaceId])
@@index([name])
// Partial index on (workspace_id, name) where activated = true is created via SQL migration
@@index([workspaceId, name, activated])
@@map("workspace_features")
}
model WorkspaceAdminStats {
workspaceId String @id @map("workspace_id") @db.VarChar
snapshotCount BigInt @default(0) @map("snapshot_count") @db.BigInt
snapshotSize BigInt @default(0) @map("snapshot_size") @db.BigInt
blobCount BigInt @default(0) @map("blob_count") @db.BigInt
blobSize BigInt @default(0) @map("blob_size") @db.BigInt
memberCount BigInt @default(0) @map("member_count") @db.BigInt
publicPageCount BigInt @default(0) @map("public_page_count") @db.BigInt
features String[] @default([]) @db.Text
updatedAt DateTime @default(now()) @map("updated_at") @db.Timestamptz(3)
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
@@index([snapshotSize(sort: Desc)])
@@index([blobCount(sort: Desc)])
@@index([blobSize(sort: Desc)])
@@index([snapshotCount(sort: Desc)])
@@index([memberCount(sort: Desc)])
@@index([publicPageCount(sort: Desc)])
@@map("workspace_admin_stats")
}
model WorkspaceAdminStatsDirty {
workspaceId String @id @map("workspace_id") @db.VarChar
updatedAt DateTime @default(now()) @map("updated_at") @db.Timestamptz(3)
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
@@index([updatedAt])
@@map("workspace_admin_stats_dirty")
}
// the latest snapshot of each doc that we've seen
// Snapshot + Updates are the latest state of the doc
model Snapshot {
@@ -291,9 +321,6 @@ model Snapshot {
createdByUser User? @relation(name: "createdSnapshot", fields: [createdBy], references: [id], onDelete: SetNull)
updatedByUser User? @relation(name: "updatedSnapshot", fields: [updatedBy], references: [id], onDelete: SetNull)
// @deprecated use updatedAt only
seq Int? @default(0) @db.Integer
// we need to clear all hanging updates and snapshots before enable the foreign key on workspaceId
// workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
@@ -331,9 +358,6 @@ model Update {
// will delete creator record if creator's account is deleted
createdByUser User? @relation(name: "createdUpdate", fields: [createdBy], references: [id], onDelete: SetNull)
// @deprecated use createdAt only
seq Int? @db.Integer
@@id([workspaceId, id, createdAt])
@@map("updates")
}
@@ -607,32 +631,6 @@ model DataMigration {
@@map("_data_migrations")
}
enum RuntimeConfigType {
String
Number
Boolean
Object
Array
}
/// @deprecated use AppConfig instead
model DeprecatedAppRuntimeSettings {
id String @id @db.VarChar
type RuntimeConfigType
module String @db.VarChar
key String @db.VarChar
value Json @db.Json
description String @db.Text
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(3)
lastUpdatedBy String? @map("last_updated_by") @db.VarChar
lastUpdatedByUser User? @relation(fields: [lastUpdatedBy], references: [id])
@@unique([module, key])
@@map("app_runtime_settings")
}
model AppConfig {
id String @id @db.VarChar
value Json @db.JsonB
@@ -645,64 +643,6 @@ model AppConfig {
@@map("app_configs")
}
model DeprecatedUserSubscription {
id Int @id @default(autoincrement()) @db.Integer
userId String @map("user_id") @db.VarChar
plan String @db.VarChar(20)
// yearly/monthly/lifetime
recurring String @db.VarChar(20)
// onetime subscription or anything else
variant String? @db.VarChar(20)
// subscription.id, null for lifetime payment or one time payment subscription
stripeSubscriptionId String? @unique @map("stripe_subscription_id")
// subscription.status, active/past_due/canceled/unpaid...
status String @db.VarChar(20)
// subscription.current_period_start
start DateTime @map("start") @db.Timestamptz(3)
// subscription.current_period_end, null for lifetime payment
end DateTime? @map("end") @db.Timestamptz(3)
// subscription.billing_cycle_anchor
nextBillAt DateTime? @map("next_bill_at") @db.Timestamptz(3)
// subscription.canceled_at
canceledAt DateTime? @map("canceled_at") @db.Timestamptz(3)
// subscription.trial_start
trialStart DateTime? @map("trial_start") @db.Timestamptz(3)
// subscription.trial_end
trialEnd DateTime? @map("trial_end") @db.Timestamptz(3)
stripeScheduleId String? @map("stripe_schedule_id") @db.VarChar
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
@@unique([userId, plan])
@@map("user_subscriptions")
}
model DeprecatedUserInvoice {
id Int @id @default(autoincrement()) @db.Integer
userId String @map("user_id") @db.VarChar
stripeInvoiceId String @unique @map("stripe_invoice_id")
currency String @db.VarChar(3)
// CNY 12.50 stored as 1250
amount Int @db.Integer
status String @db.VarChar(20)
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz(3)
// billing reason
reason String? @db.VarChar
lastPaymentError String? @map("last_payment_error") @db.Text
// stripe hosted invoice link
link String? @db.Text
// @deprecated
plan String? @db.VarChar(20)
// @deprecated
recurring String? @db.VarChar(20)
@@index([userId])
@@map("user_invoices")
}
model UserStripeCustomer {
userId String @id @map("user_id") @db.VarChar
stripeCustomerId String @unique @map("stripe_customer_id") @db.VarChar

View File

@@ -41,7 +41,6 @@ const snapshot: Snapshot = {
id: 'doc1',
blob: Uint8Array.from([1, 0]),
state: Uint8Array.from([0]),
seq: 0,
size: BigInt(2),
updatedAt: new Date(),
createdAt: new Date(),

View File

@@ -38,7 +38,6 @@ test('should create a new session', async t => {
const session = await t.context.session.createSession();
t.truthy(session.id);
t.truthy(session.createdAt);
t.is(session.deprecated_expiresAt, null);
});
test('should get a exists session', async t => {

View File

@@ -59,6 +59,7 @@ test('should update workspace', async t => {
const data = {
public: true,
enableAi: true,
enableSharing: false,
enableUrlPreview: true,
enableDocEmbedding: false,
};

View File

@@ -16,7 +16,6 @@ export class WorkspaceResolverMock {
create: {
type: WorkspaceRole.Owner,
userId: user.id,
accepted: true,
status: WorkspaceMemberStatus.Accepted,
},
},

View File

@@ -85,6 +85,26 @@ export async function updateWorkspace(
return res.updateWorkspace.public;
}
export async function setWorkspaceSharing(
app: TestingApp,
workspaceId: string,
enableSharing: boolean
) {
const res = await app.gql(
`
mutation {
updateWorkspace(
input: { id: "${workspaceId}", enableSharing: ${enableSharing} }
) {
enableSharing
}
}
`
);
return res.updateWorkspace.enableSharing as boolean;
}
export async function deleteWorkspace(
app: TestingApp,
workspaceId: string

View File

@@ -10,6 +10,7 @@ import {
inviteUser,
publishDoc,
revokePublicDoc,
setWorkspaceSharing,
TestingApp,
updateWorkspace,
} from './utils';
@@ -180,4 +181,17 @@ test('should be able to get public workspace doc', async t => {
.type('application/octet-stream');
t.deepEqual(res.body, Buffer.from([0, 0]), 'failed to get public doc');
const disabled = await setWorkspaceSharing(app, workspace.id, false);
t.false(disabled, 'failed to disable workspace sharing');
// owner should still be able to access
await app
.GET(`/api/workspaces/${workspace.id}/docs/${workspace.id}`)
.expect(200);
await app.logout();
await app
.GET(`/api/workspaces/${workspace.id}/docs/${workspace.id}`)
.expect(403);
});

View File

@@ -40,6 +40,7 @@ import { MailModule } from './core/mail';
import { MonitorModule } from './core/monitor';
import { NotificationModule } from './core/notification';
import { PermissionModule } from './core/permission';
import { QueueDashboardModule } from './core/queue-dashboard';
import { QuotaModule } from './core/quota';
import { SelfhostModule } from './core/selfhost';
import { StorageModule } from './core/storage';
@@ -189,7 +190,8 @@ export function buildAppModule(env: Env) {
OAuthModule,
CustomerIoModule,
CommentModule,
AccessTokenModule
AccessTokenModule,
QueueDashboardModule
)
// doc service only
.useIf(() => env.flavors.doc, DocServiceModule)

View File

@@ -61,7 +61,8 @@ export type KnownMetricScopes =
| 'event'
| 'queue'
| 'storage'
| 'process';
| 'process'
| 'workspace';
const metricCreators: MetricCreators = {
counter(meter: Meter, name: string, opts?: MetricOptions) {

View File

@@ -100,6 +100,11 @@ export class DocRendererController {
workspaceId: string,
docId: string
): Promise<RenderOptions | null> {
const allowSharing = await this.models.workspace.allowSharing(workspaceId);
if (!allowSharing) {
return null;
}
let allowUrlPreview = await this.models.doc.isPublic(workspaceId, docId);
if (!allowUrlPreview) {
@@ -118,6 +123,11 @@ export class DocRendererController {
private async getWorkspaceContent(
workspaceId: string
): Promise<RenderOptions | null> {
const allowSharing = await this.models.workspace.allowSharing(workspaceId);
if (!allowSharing) {
return null;
}
const allowUrlPreview =
await this.models.workspace.allowUrlPreview(workspaceId);

View File

@@ -21,7 +21,7 @@ let ws: Workspace;
test.before(async () => {
module = await createTestingModule({ imports: [PermissionModule] });
models = module.get<Models>(Models);
ac = new DocAccessController();
ac = module.get(DocAccessController);
});
test.beforeEach(async () => {

View File

@@ -80,6 +80,20 @@ test('should fallback to [External] if workspace is public', async t => {
t.is(role, WorkspaceRole.External);
});
test('should return null if workspace is public but sharing disabled', async t => {
await models.workspace.update(ws.id, {
public: true,
enableSharing: false,
});
const role = await ac.getRole({
workspaceId: ws.id,
userId: 'random-user-id',
});
t.is(role, null);
});
test('should return null even workspace has public doc', async t => {
await models.doc.publish(ws.id, 'doc1');
@@ -91,6 +105,18 @@ test('should return null even workspace has public doc', async t => {
t.is(role, null);
});
test('should return null even workspace has public doc when sharing disabled', async t => {
await models.doc.publish(ws.id, 'doc1');
await models.workspace.update(ws.id, { enableSharing: false });
const role = await ac.getRole({
workspaceId: ws.id,
userId: 'random-user-id',
});
t.is(role, null);
});
test('should return mapped external permission for workspace has public docs', async t => {
await models.doc.publish(ws.id, 'doc1');
@@ -105,6 +131,24 @@ test('should return mapped external permission for workspace has public docs', a
);
});
test('should reject external doc roles when sharing disabled', async t => {
await models.workspace.update(ws.id, {
public: true,
enableSharing: false,
});
const [docRole] = await ac.docRoles(
{
workspaceId: ws.id,
userId: 'random-user-id',
},
['doc1']
);
t.is(docRole.role, null);
t.false(docRole.permissions['Doc.Read']);
});
test('should return mapped permissions', async t => {
const { permissions } = await ac.role({
workspaceId: ws.id,

View File

@@ -1,6 +1,7 @@
import { Injectable } from '@nestjs/common';
import { DocActionDenied } from '../../base';
import { Models } from '../../models';
import { AccessController, getAccessController } from './controller';
import type { Resource } from './resource';
import {
@@ -14,14 +15,21 @@ import { WorkspaceAccessController } from './workspace';
@Injectable()
export class DocAccessController extends AccessController<'doc'> {
protected readonly type = 'doc';
constructor(private readonly models: Models) {
super();
}
async role(resource: Resource<'doc'>) {
const role = await this.getRole(resource);
const permissions = mapDocRoleToPermissions(role);
const sharingAllowed = await this.models.workspace.allowSharing(
resource.workspaceId
);
if (!sharingAllowed) {
permissions['Doc.Publish'] = false;
}
return {
role,
permissions: mapDocRoleToPermissions(role),
};
return { role, permissions };
}
async can(resource: Resource<'doc'>, action: DocAction) {

View File

@@ -27,7 +27,11 @@ export class WorkspaceAccessController extends AccessController<'ws'> {
// NOTE(@forehalo): special case for public page
// Currently, we can not only load binary of a public Doc to render in a shared page,
// so we need to ensure anyone has basic 'read' permission to a workspace that has public pages.
if (!role && (await this.models.doc.hasPublic(resource.workspaceId))) {
if (
!role &&
(await this.models.workspace.allowSharing(resource.workspaceId)) &&
(await this.models.doc.hasPublic(resource.workspaceId))
) {
role = WorkspaceRole.External;
}
@@ -92,6 +96,15 @@ export class WorkspaceAccessController extends AccessController<'ws'> {
}
const workspaceRole = await this.getRole(payload);
const sharingAllowed = await this.models.workspace.allowSharing(
payload.workspaceId
);
if (
!sharingAllowed &&
(workspaceRole === null || workspaceRole === WorkspaceRole.External)
) {
return docIds.map(() => null);
}
const userRoles = await this.models.docUser.findMany(
payload.workspaceId,
@@ -190,7 +203,8 @@ export class WorkspaceAccessController extends AccessController<'ws'> {
}
if (ws.public) {
return WorkspaceRole.External;
const sharingAllowed = await this.models.workspace.allowSharing(ws.id);
return sharingAllowed ? WorkspaceRole.External : null;
}
return null;

View File

@@ -0,0 +1,100 @@
import { getQueueToken } from '@nestjs/bullmq';
import { Injectable, Logger, Module, OnModuleInit } from '@nestjs/common';
import { HttpAdapterHost, ModuleRef } from '@nestjs/core';
import { createQueueDashExpressMiddleware } from '@queuedash/api';
import type { Queue as BullMQQueue } from 'bullmq';
import type { Application, NextFunction, Request, Response } from 'express';
import { Config } from '../../base/config';
import { QUEUES } from '../../base/job/queue/def';
import { AuthGuard, AuthModule } from '../auth';
import { FeatureModule, FeatureService } from '../features';
type QueueDashQueue = {
queue: BullMQQueue;
displayName: string;
type: 'bullmq';
};
@Injectable()
class QueueDashboardService implements OnModuleInit {
private readonly logger = new Logger(QueueDashboardService.name);
constructor(
private readonly adapterHost: HttpAdapterHost,
private readonly config: Config,
private readonly feature: FeatureService,
private readonly authGuard: AuthGuard,
private readonly moduleRef: ModuleRef
) {}
async onModuleInit() {
const httpAdapter = this.adapterHost.httpAdapter;
if (!httpAdapter) {
return;
}
const app = httpAdapter.getInstance<Application>();
const mountPath = `${this.config.server.path}/api/queue`;
const queues = this.collectQueues();
if (!queues.length) {
this.logger.warn('QueueDash not mounted: no queues available');
app.use(mountPath, (_req, res) => {
res.status(404).end();
});
return;
}
const guardMiddleware = async (
req: Request,
res: Response,
next: NextFunction
): Promise<void> => {
try {
const session = await this.authGuard.signIn(req, res);
const userId = session?.user?.id;
const isAdmin = userId ? await this.feature.isAdmin(userId) : false;
if (!isAdmin) {
res.status(404).end();
return;
}
} catch (error) {
this.logger.warn('QueueDash auth failed', error as Error);
res.status(404).end();
return;
}
next();
};
app.use(
mountPath,
guardMiddleware,
createQueueDashExpressMiddleware({ ctx: { queues } })
);
this.logger.log(`QueueDash mounted on ${mountPath}`);
}
private collectQueues(): QueueDashQueue[] {
const queues: QueueDashQueue[] = [];
for (const name of QUEUES) {
const queue = this.moduleRef.get<BullMQQueue>(getQueueToken(name), {
strict: false,
});
if (queue) {
queues.push({ queue, displayName: name, type: 'bullmq' });
}
}
return queues;
}
}
@Module({
imports: [AuthModule, FeatureModule],
providers: [QueueDashboardService],
})
export class QueueDashboardModule {}

View File

@@ -21,6 +21,7 @@ import {
} from './resolvers';
import { AdminWorkspaceResolver } from './resolvers/admin';
import { WorkspaceService } from './service';
import { WorkspaceStatsJob } from './stats.job';
@Module({
imports: [
@@ -45,6 +46,7 @@ import { WorkspaceService } from './service';
WorkspaceService,
WorkspaceEvents,
AdminWorkspaceResolver,
WorkspaceStatsJob,
],
exports: [WorkspaceService],
})

View File

@@ -31,6 +31,9 @@ enum AdminWorkspaceSort {
SnapshotSize = 'SnapshotSize',
BlobCount = 'BlobCount',
BlobSize = 'BlobSize',
SnapshotCount = 'SnapshotCount',
MemberCount = 'MemberCount',
PublicPageCount = 'PublicPageCount',
}
registerEnumType(AdminWorkspaceSort, {
@@ -53,6 +56,21 @@ class ListWorkspaceInput {
@Field(() => AdminWorkspaceSort, { nullable: true })
orderBy?: AdminWorkspaceSort;
@Field({ nullable: true })
public?: boolean;
@Field({ nullable: true })
enableAi?: boolean;
@Field({ nullable: true })
enableSharing?: boolean;
@Field({ nullable: true })
enableUrlPreview?: boolean;
@Field({ nullable: true })
enableDocEmbedding?: boolean;
}
@ObjectType()
@@ -76,6 +94,18 @@ class AdminWorkspaceMember {
status!: WorkspaceMemberStatus;
}
@ObjectType()
class AdminWorkspaceSharedLink {
@Field()
docId!: string;
@Field(() => String, { nullable: true })
title?: string | null;
@Field(() => Date, { nullable: true })
publishedAt?: Date | null;
}
@ObjectType()
export class AdminWorkspace {
@Field()
@@ -96,6 +126,9 @@ export class AdminWorkspace {
@Field()
enableAi!: boolean;
@Field()
enableSharing!: boolean;
@Field()
enableUrlPreview!: boolean;
@@ -125,6 +158,9 @@ export class AdminWorkspace {
@Field(() => SafeIntResolver)
blobSize!: number;
@Field(() => [AdminWorkspaceSharedLink])
sharedLinks!: AdminWorkspaceSharedLink[];
}
@InputType()
@@ -132,6 +168,7 @@ class AdminUpdateWorkspaceInput extends PartialType(
PickType(AdminWorkspace, [
'public',
'enableAi',
'enableSharing',
'enableUrlPreview',
'enableDocEmbedding',
'name',
@@ -165,6 +202,13 @@ export class AdminWorkspaceResolver {
keyword: filter.keyword,
features: filter.features,
order: this.mapSort(filter.orderBy),
flags: {
public: filter.public ?? undefined,
enableAi: filter.enableAi ?? undefined,
enableSharing: filter.enableSharing ?? undefined,
enableUrlPreview: filter.enableUrlPreview ?? undefined,
enableDocEmbedding: filter.enableDocEmbedding ?? undefined,
},
includeTotal: false,
});
return rows;
@@ -178,6 +222,13 @@ export class AdminWorkspaceResolver {
const total = await this.models.workspace.adminCountWorkspaces({
keyword: filter.keyword,
features: filter.features,
flags: {
public: filter.public ?? undefined,
enableAi: filter.enableAi ?? undefined,
enableSharing: filter.enableSharing ?? undefined,
enableUrlPreview: filter.enableUrlPreview ?? undefined,
enableDocEmbedding: filter.enableDocEmbedding ?? undefined,
},
});
return total;
}
@@ -247,6 +298,18 @@ export class AdminWorkspaceResolver {
}));
}
@ResolveField(() => [AdminWorkspaceSharedLink], {
description: 'Shared links of workspace',
})
async sharedLinks(@Parent() workspace: AdminWorkspace) {
const publicDocs = await this.models.doc.findPublics(workspace.id, 'desc');
return publicDocs.map(doc => ({
docId: doc.docId,
title: doc.title,
publishedAt: doc.publishedAt ?? null,
}));
}
@Mutation(() => AdminWorkspace, {
description: 'Update workspace flags and features for admin',
nullable: true,
@@ -298,6 +361,12 @@ export class AdminWorkspaceResolver {
return 'blobCount';
case AdminWorkspaceSort.BlobSize:
return 'blobSize';
case AdminWorkspaceSort.SnapshotCount:
return 'snapshotCount';
case AdminWorkspaceSort.MemberCount:
return 'memberCount';
case AdminWorkspaceSort.PublicPageCount:
return 'publicPageCount';
case AdminWorkspaceSort.CreatedAt:
default:
return 'createdAt';

View File

@@ -230,14 +230,6 @@ export class WorkspaceDocResolver {
};
}
@ResolveField(() => [DocType], {
complexity: 2,
deprecationReason: 'use [WorkspaceType.publicDocs] instead',
})
async publicPages(@Parent() workspace: WorkspaceType) {
return this.publicDocs(workspace);
}
@ResolveField(() => [DocType], {
description: 'Get public docs of a workspace',
complexity: 2,

View File

@@ -0,0 +1,307 @@
import { Injectable, Logger } from '@nestjs/common';
import { Cron, CronExpression } from '@nestjs/schedule';
import { Prisma, PrismaClient } from '@prisma/client';
import { metrics } from '../../base';
const LOCK_NAMESPACE = 97_301;
const LOCK_KEY = 1;
const DIRTY_BATCH_SIZE = 500;
const FULL_REFRESH_BATCH_SIZE = 2000;
const TRANSACTION_TIMEOUT_MS = 120_000;
@Injectable()
export class WorkspaceStatsJob {
private readonly logger = new Logger(WorkspaceStatsJob.name);
constructor(private readonly prisma: PrismaClient) {}
@Cron(CronExpression.EVERY_MINUTE)
async refreshDirty() {
const started = Date.now();
try {
const result = await this.withAdvisoryLock(async tx => {
const backlog = await this.countDirty(tx);
metrics.workspace
.gauge('admin_stats_dirty_backlog')
.record(Number(backlog));
const dirty = await this.loadDirty(tx, DIRTY_BATCH_SIZE);
if (!dirty.length) {
return { processed: 0, backlog };
}
await this.upsertStats(tx, dirty);
await this.clearDirty(tx, dirty);
return { processed: dirty.length, backlog };
});
if (!result) {
this.logger.debug('skip admin stats refresh, lock not acquired');
return;
}
metrics.workspace
.histogram('admin_stats_refresh_duration_ms')
.record(Date.now() - started, { mode: 'incremental' });
if (result.processed > 0) {
this.logger.log(
`Refreshed admin stats for ${result.processed} workspace(s); backlog ${result.backlog}`
);
}
} catch (error) {
metrics.workspace.counter('admin_stats_refresh_failed').add(1, {
mode: 'incremental',
});
this.logger.error('Failed to refresh admin stats', error as Error);
}
}
@Cron(CronExpression.EVERY_DAY_AT_1AM)
async recalibrate() {
let lastSid = 0;
let processed = 0;
while (true) {
const started = Date.now();
try {
const result = await this.withAdvisoryLock(async tx => {
const workspaces = await this.fetchWorkspaceBatch(
tx,
lastSid,
FULL_REFRESH_BATCH_SIZE
);
if (!workspaces.length) {
return { processed: 0, lastSid };
}
const ids = workspaces.map(({ id }) => id);
await this.upsertStats(tx, ids);
return {
processed: ids.length,
lastSid: workspaces[workspaces.length - 1].sid,
};
});
if (!result) {
this.logger.debug(
'skip admin stats recalibration, lock not acquired'
);
break;
}
if (result.processed === 0) {
break;
}
processed += result.processed;
lastSid = result.lastSid;
metrics.workspace
.histogram('admin_stats_refresh_duration_ms')
.record(Date.now() - started, { mode: 'full' });
if (result.processed < FULL_REFRESH_BATCH_SIZE) {
break;
}
} catch (error) {
metrics.workspace.counter('admin_stats_refresh_failed').add(1, {
mode: 'full',
});
this.logger.error(
`Failed to recalibrate admin stats after sid ${lastSid}`,
error as Error
);
break;
}
}
if (processed > 0) {
this.logger.verbose(
`Recalibrate admin stats for ${processed} workspace(s) (last sid ${lastSid})`
);
}
}
private async withAdvisoryLock<T>(
callback: (tx: Prisma.TransactionClient) => Promise<T>
): Promise<T | null> {
const lockIdSql = Prisma.sql`(${LOCK_NAMESPACE}::bigint << 32) + ${LOCK_KEY}::bigint`;
return await this.prisma.$transaction(
async tx => {
const [lock] = await tx.$queryRaw<{ locked: boolean }[]>`
SELECT pg_try_advisory_lock(${lockIdSql}) AS locked
`;
if (!lock?.locked) {
return null;
}
try {
return await callback(tx);
} finally {
await tx.$executeRaw`SELECT pg_advisory_unlock(${lockIdSql})`;
}
},
{
maxWait: 5_000,
timeout: TRANSACTION_TIMEOUT_MS,
}
);
}
private async loadDirty(
tx: Prisma.TransactionClient,
limit: number
): Promise<string[]> {
const rows = await tx.$queryRaw<{ workspace_id: string }[]>`
SELECT workspace_id
FROM workspace_admin_stats_dirty
ORDER BY updated_at ASC
LIMIT ${limit}
FOR UPDATE SKIP LOCKED
`;
return rows.map(row => row.workspace_id);
}
private async countDirty(tx: Prisma.TransactionClient) {
const [row] = await tx.$queryRaw<{ total: bigint | number }[]>`
SELECT COUNT(*) AS total FROM workspace_admin_stats_dirty
`;
return row?.total ? Number(row.total) : 0;
}
private async clearDirty(
tx: Prisma.TransactionClient,
workspaceIds: string[]
) {
if (!workspaceIds.length) {
return;
}
await tx.$executeRaw`
DELETE FROM workspace_admin_stats_dirty
WHERE workspace_id IN (${Prisma.join(
workspaceIds.map(id => Prisma.sql`${id}`)
)})
`;
}
private async upsertStats(
tx: Prisma.TransactionClient,
workspaceIds: string[]
) {
if (!workspaceIds.length) {
return;
}
const targetIds = Prisma.join(workspaceIds.map(id => Prisma.sql`${id}`));
await tx.$executeRaw`
WITH targets AS (
SELECT UNNEST(ARRAY[${targetIds}]::varchar[]) AS workspace_id
),
snapshot_stats AS (
SELECT workspace_id,
COUNT(*) AS snapshot_count,
COALESCE(SUM(COALESCE(size, octet_length(blob))), 0) AS snapshot_size
FROM snapshots
WHERE workspace_id IN (SELECT workspace_id FROM targets)
GROUP BY workspace_id
),
blob_stats AS (
SELECT workspace_id,
COUNT(*) FILTER (WHERE deleted_at IS NULL AND status = 'completed') AS blob_count,
COALESCE(SUM(size) FILTER (WHERE deleted_at IS NULL AND status = 'completed'), 0) AS blob_size
FROM blobs
WHERE workspace_id IN (SELECT workspace_id FROM targets)
GROUP BY workspace_id
),
member_stats AS (
SELECT workspace_id, COUNT(*) AS member_count
FROM workspace_user_permissions
WHERE workspace_id IN (SELECT workspace_id FROM targets)
GROUP BY workspace_id
),
public_page_stats AS (
SELECT workspace_id, COUNT(*) AS public_page_count
FROM workspace_pages
WHERE public = TRUE AND workspace_id IN (SELECT workspace_id FROM targets)
GROUP BY workspace_id
),
feature_stats AS (
SELECT workspace_id,
ARRAY_AGG(DISTINCT name ORDER BY name) FILTER (WHERE activated) AS features
FROM workspace_features
WHERE workspace_id IN (SELECT workspace_id FROM targets)
GROUP BY workspace_id
),
aggregated AS (
SELECT t.workspace_id,
COALESCE(ss.snapshot_count, 0) AS snapshot_count,
COALESCE(ss.snapshot_size, 0) AS snapshot_size,
COALESCE(bs.blob_count, 0) AS blob_count,
COALESCE(bs.blob_size, 0) AS blob_size,
COALESCE(ms.member_count, 0) AS member_count,
COALESCE(pp.public_page_count, 0) AS public_page_count,
COALESCE(fs.features, ARRAY[]::text[]) AS features
FROM targets t
LEFT JOIN snapshot_stats ss ON ss.workspace_id = t.workspace_id
LEFT JOIN blob_stats bs ON bs.workspace_id = t.workspace_id
LEFT JOIN member_stats ms ON ms.workspace_id = t.workspace_id
LEFT JOIN public_page_stats pp ON pp.workspace_id = t.workspace_id
LEFT JOIN feature_stats fs ON fs.workspace_id = t.workspace_id
)
INSERT INTO workspace_admin_stats (
workspace_id,
snapshot_count,
snapshot_size,
blob_count,
blob_size,
member_count,
public_page_count,
features,
updated_at
)
SELECT
workspace_id,
snapshot_count,
snapshot_size,
blob_count,
blob_size,
member_count,
public_page_count,
features,
NOW()
FROM aggregated
ON CONFLICT (workspace_id) DO UPDATE SET
snapshot_count = EXCLUDED.snapshot_count,
snapshot_size = EXCLUDED.snapshot_size,
blob_count = EXCLUDED.blob_count,
blob_size = EXCLUDED.blob_size,
member_count = EXCLUDED.member_count,
public_page_count = EXCLUDED.public_page_count,
features = EXCLUDED.features,
updated_at = EXCLUDED.updated_at
`;
}
private async fetchWorkspaceBatch(
tx: Prisma.TransactionClient,
lastSid: number,
limit: number
) {
return tx.$queryRaw<{ id: string; sid: number }[]>`
SELECT id, sid
FROM workspaces
WHERE sid > ${lastSid}
ORDER BY sid
LIMIT ${limit}
`;
}
}

View File

@@ -79,6 +79,9 @@ export class WorkspaceType extends WorkspaceFeatureType {
@Field({ description: 'Enable AI' })
enableAi!: boolean;
@Field({ description: 'Enable workspace sharing' })
enableSharing!: boolean;
@Field({ description: 'Enable url previous when sharing' })
enableUrlPreview!: boolean;
@@ -130,7 +133,13 @@ export class InvitationType {
@InputType()
export class UpdateWorkspaceInput extends PickType(
PartialType(WorkspaceType),
['public', 'enableAi', 'enableUrlPreview', 'enableDocEmbedding'],
[
'public',
'enableAi',
'enableSharing',
'enableUrlPreview',
'enableDocEmbedding',
],
InputType
) {
@Field(() => ID)

View File

@@ -1,18 +0,0 @@
import { PrismaClient, WorkspaceMemberStatus } from '@prisma/client';
export class MigrateInviteStatus1732861452428 {
// do the migration
static async up(db: PrismaClient) {
await db.workspaceUserRole.updateMany({
where: {
accepted: true,
},
data: {
status: WorkspaceMemberStatus.Accepted,
},
});
}
// revert the migration
static async down(_db: PrismaClient) {}
}

View File

@@ -1,29 +0,0 @@
import { PrismaClient } from '@prisma/client';
import { loop } from './utils/loop';
export class UniversalSubscription1733125339942 {
// do the migration
static async up(db: PrismaClient) {
await loop(async (offset, take) => {
const oldSubscriptions = await db.deprecatedUserSubscription.findMany({
skip: offset,
take,
});
await db.subscription.createMany({
data: oldSubscriptions.map(({ userId, ...subscription }) => ({
targetId: userId,
...subscription,
})),
});
return oldSubscriptions.length;
}, 50);
}
// revert the migration
static async down(_db: PrismaClient) {
// noop
}
}

View File

@@ -1,7 +1,5 @@
export * from './1698398506533-guid';
export * from './1703756315970-unamed-account';
export * from './1721299086340-refresh-unnamed-user';
export * from './1732861452428-migrate-invite-status';
export * from './1733125339942-universal-subscription';
export * from './1745211351719-create-indexer-tables';
export * from './1751966744168-correct-session-update-time';

View File

@@ -61,7 +61,6 @@ export class DocModel extends BaseModel {
blob: record.blob,
createdAt: new Date(record.timestamp),
createdBy: record.editorId || null,
seq: null,
};
}
@@ -484,12 +483,10 @@ export class DocModel extends BaseModel {
/**
* Find the workspace public doc metas.
*/
async findPublics(workspaceId: string) {
async findPublics(workspaceId: string, order: 'asc' | 'desc' = 'asc') {
return await this.db.workspaceDoc.findMany({
where: {
workspaceId,
public: true,
},
where: { workspaceId, public: true },
orderBy: { publishedAt: order },
});
}
@@ -524,6 +521,7 @@ export class DocModel extends BaseModel {
return await this.upsertMeta(workspaceId, docId, {
public: true,
mode,
publishedAt: new Date(),
});
}
@@ -536,6 +534,7 @@ export class DocModel extends BaseModel {
return await this.upsertMeta(workspaceId, docId, {
public: false,
publishedAt: null,
});
}

View File

@@ -14,6 +14,7 @@ type RawWorkspaceSummary = {
name: string | null;
avatarKey: string | null;
enableAi: boolean;
enableSharing: boolean;
enableUrlPreview: boolean;
enableDocEmbedding: boolean;
memberCount: bigint | number | null;
@@ -36,6 +37,7 @@ export type AdminWorkspaceSummary = {
name: string | null;
avatarKey: string | null;
enableAi: boolean;
enableSharing: boolean;
enableUrlPreview: boolean;
enableDocEmbedding: boolean;
memberCount: number;
@@ -67,6 +69,7 @@ export type UpdateWorkspaceInput = Pick<
Partial<Workspace>,
| 'public'
| 'enableAi'
| 'enableSharing'
| 'enableUrlPreview'
| 'enableDocEmbedding'
| 'name'
@@ -169,6 +172,11 @@ export class WorkspaceModel extends BaseModel {
return workspace?.enableUrlPreview ?? false;
}
async allowSharing(workspaceId: string) {
const workspace = await this.get(workspaceId);
return workspace?.enableSharing ?? true;
}
async allowEmbedding(workspaceId: string) {
const workspace = await this.get(workspaceId);
return workspace?.enableDocEmbedding ?? false;
@@ -185,202 +193,137 @@ export class WorkspaceModel extends BaseModel {
first: number;
keyword?: string | null;
features?: WorkspaceFeatureName[] | null;
order?: 'createdAt' | 'snapshotSize' | 'blobCount' | 'blobSize';
flags?: {
public?: boolean;
enableAi?: boolean;
enableSharing?: boolean;
enableUrlPreview?: boolean;
enableDocEmbedding?: boolean;
};
order?:
| 'createdAt'
| 'snapshotSize'
| 'blobCount'
| 'blobSize'
| 'snapshotCount'
| 'memberCount'
| 'publicPageCount';
includeTotal?: boolean;
}): Promise<{ rows: AdminWorkspaceSummary[]; total: number }> {
const keyword = options.keyword?.trim();
const features = options.features ?? [];
const flags = options.flags ?? {};
const includeTotal = options.includeTotal ?? true;
const total = includeTotal
? await this.adminCountWorkspaces({ keyword, features })
? await this.adminCountWorkspaces({ keyword, features, flags })
: 0;
if (includeTotal && total === 0) {
return { rows: [], total: 0 };
}
const rows =
options.order === 'createdAt' || !options.order
? await this.db.$queryRaw<RawWorkspaceSummary[]>`
WITH feature_set AS (
SELECT workspace_id, array_agg(DISTINCT name) FILTER (WHERE activated) AS features
FROM workspace_features
GROUP BY workspace_id
),
owner AS (
SELECT wur.workspace_id,
u.id AS owner_id,
u.name AS owner_name,
u.email AS owner_email,
u.avatar_url AS owner_avatar_url
FROM workspace_user_permissions AS wur
JOIN users u ON wur.user_id = u.id
WHERE wur.type = ${WorkspaceRole.Owner}
AND wur.status = ${Prisma.sql`${WorkspaceMemberStatus.Accepted}::"WorkspaceMemberStatus"`}
),
filtered AS (
SELECT w.id,
w.public,
w.created_at AS "createdAt",
w.name,
w.avatar_key AS "avatarKey",
w.enable_ai AS "enableAi",
w.enable_url_preview AS "enableUrlPreview",
w.enable_doc_embedding AS "enableDocEmbedding",
COALESCE(fs.features, ARRAY[]::text[]) AS features,
o.owner_id AS "ownerId",
o.owner_name AS "ownerName",
o.owner_email AS "ownerEmail",
o.owner_avatar_url AS "ownerAvatarUrl"
FROM workspaces w
LEFT JOIN feature_set fs ON fs.workspace_id = w.id
LEFT JOIN owner o ON o.workspace_id = w.id
WHERE ${
keyword
? Prisma.sql`
(
w.id ILIKE ${'%' + keyword + '%'}
OR o.owner_id ILIKE ${'%' + keyword + '%'}
OR o.owner_email ILIKE ${'%' + keyword + '%'}
)
`
: Prisma.sql`TRUE`
}
AND ${
features.length
? Prisma.sql`CAST(COALESCE(fs.features, ARRAY[]::text[]) AS text[]) @> ${Prisma.sql`${features}::text[]`}`
: Prisma.sql`TRUE`
}
ORDER BY w.created_at DESC
LIMIT ${options.first}
OFFSET ${options.skip}
)
SELECT f.*,
COALESCE(ms.member_count, 0) AS "memberCount",
COALESCE(pp.public_page_count, 0) AS "publicPageCount",
COALESCE(ss.snapshot_count, 0) AS "snapshotCount",
COALESCE(ss.snapshot_size, 0) AS "snapshotSize",
COALESCE(bs.blob_count, 0) AS "blobCount",
COALESCE(bs.blob_size, 0) AS "blobSize"
FROM filtered f
LEFT JOIN LATERAL (
SELECT COUNT(*) AS member_count
FROM workspace_user_permissions
WHERE workspace_id = f.id
) ms ON TRUE
LEFT JOIN LATERAL (
SELECT COUNT(*) AS public_page_count
FROM workspace_pages
WHERE workspace_id = f.id AND public = true
) pp ON TRUE
LEFT JOIN LATERAL (
SELECT COUNT(*) AS snapshot_count,
SUM(size) AS snapshot_size
FROM snapshots
WHERE workspace_id = f.id
) ss ON TRUE
LEFT JOIN LATERAL (
SELECT COUNT(*) FILTER (WHERE deleted_at IS NULL AND status = 'completed') AS blob_count,
SUM(size) FILTER (WHERE deleted_at IS NULL AND status = 'completed') AS blob_size
FROM blobs
WHERE workspace_id = f.id
) bs ON TRUE
ORDER BY f."createdAt" DESC
const featuresHaving =
features.length > 0
? Prisma.sql`
HAVING COUNT(
DISTINCT CASE
WHEN wf.name = ANY(${Prisma.sql`${features}::text[]`}) THEN wf.name
END
) = ${features.length}
`
: await this.db.$queryRaw<RawWorkspaceSummary[]>`
WITH feature_set AS (
SELECT workspace_id, array_agg(DISTINCT name) FILTER (WHERE activated) AS features
FROM workspace_features
GROUP BY workspace_id
),
owner AS (
SELECT wur.workspace_id,
u.id AS owner_id,
u.name AS owner_name,
u.email AS owner_email,
u.avatar_url AS owner_avatar_url
FROM workspace_user_permissions AS wur
JOIN users u ON wur.user_id = u.id
WHERE wur.type = ${WorkspaceRole.Owner}
AND wur.status = ${Prisma.sql`${WorkspaceMemberStatus.Accepted}::"WorkspaceMemberStatus"`}
),
filtered AS (
SELECT w.id,
: Prisma.empty;
const featureJoin =
features.length > 0
? Prisma.sql`
LEFT JOIN workspace_features wf
ON wf.workspace_id = w.id AND wf.activated = TRUE
`
: Prisma.empty;
const groupAndHaving =
features.length > 0
? Prisma.sql`
GROUP BY w.id,
w.public,
w.created_at AS "createdAt",
w.created_at,
w.name,
w.avatar_key AS "avatarKey",
w.enable_ai AS "enableAi",
w.enable_url_preview AS "enableUrlPreview",
w.enable_doc_embedding AS "enableDocEmbedding",
COALESCE(fs.features, ARRAY[]::text[]) AS features,
o.owner_id AS "ownerId",
o.owner_name AS "ownerName",
o.owner_email AS "ownerEmail",
o.owner_avatar_url AS "ownerAvatarUrl"
FROM workspaces w
LEFT JOIN feature_set fs ON fs.workspace_id = w.id
LEFT JOIN owner o ON o.workspace_id = w.id
WHERE ${
keyword
? Prisma.sql`
(
w.id ILIKE ${'%' + keyword + '%'}
OR o.owner_id ILIKE ${'%' + keyword + '%'}
OR o.owner_email ILIKE ${'%' + keyword + '%'}
)
`
: Prisma.sql`TRUE`
}
AND ${
features.length
? Prisma.sql`CAST(COALESCE(fs.features, ARRAY[]::text[]) AS text[]) @> ${Prisma.sql`${features}::text[]`}`
: Prisma.sql`TRUE`
}
),
snapshot_stats AS (
SELECT workspace_id,
SUM(size) AS snapshot_size,
COUNT(*) AS snapshot_count
FROM snapshots
WHERE workspace_id IN (SELECT id FROM filtered)
GROUP BY workspace_id
),
blob_stats AS (
SELECT workspace_id,
SUM(size) FILTER (WHERE deleted_at IS NULL AND status = 'completed') AS blob_size,
COUNT(*) FILTER (WHERE deleted_at IS NULL AND status = 'completed') AS blob_count
FROM blobs
WHERE workspace_id IN (SELECT id FROM filtered)
GROUP BY workspace_id
),
member_stats AS (
SELECT workspace_id, COUNT(*) AS member_count
FROM workspace_user_permissions
WHERE workspace_id IN (SELECT id FROM filtered)
GROUP BY workspace_id
),
public_pages AS (
SELECT workspace_id, COUNT(*) AS public_page_count
FROM workspace_pages
WHERE public = true AND workspace_id IN (SELECT id FROM filtered)
GROUP BY workspace_id
)
SELECT f.*,
COALESCE(ms.member_count, 0) AS "memberCount",
COALESCE(pp.public_page_count, 0) AS "publicPageCount",
COALESCE(ss.snapshot_count, 0) AS "snapshotCount",
COALESCE(ss.snapshot_size, 0) AS "snapshotSize",
COALESCE(bs.blob_count, 0) AS "blobCount",
COALESCE(bs.blob_size, 0) AS "blobSize"
FROM filtered f
LEFT JOIN snapshot_stats ss ON ss.workspace_id = f.id
LEFT JOIN blob_stats bs ON bs.workspace_id = f.id
LEFT JOIN member_stats ms ON ms.workspace_id = f.id
LEFT JOIN public_pages pp ON pp.workspace_id = f.id
ORDER BY ${Prisma.raw(this.buildAdminOrder(options.order))}
LIMIT ${options.first}
OFFSET ${options.skip}
`;
w.avatar_key,
w.enable_ai,
w.enable_url_preview,
w.enable_doc_embedding,
o.owner_id,
o.owner_name,
o.owner_email,
o.owner_avatar_url
${featuresHaving}
`
: Prisma.empty;
const rows = await this.db.$queryRaw<RawWorkspaceSummary[]>`
WITH filtered AS (
SELECT w.id,
w.public,
w.created_at AS "createdAt",
w.name,
w.avatar_key AS "avatarKey",
w.enable_ai AS "enableAi",
w.enable_sharing AS "enableSharing",
w.enable_url_preview AS "enableUrlPreview",
w.enable_doc_embedding AS "enableDocEmbedding",
o.owner_id AS "ownerId",
o.owner_name AS "ownerName",
o.owner_email AS "ownerEmail",
o.owner_avatar_url AS "ownerAvatarUrl"
FROM workspaces w
LEFT JOIN LATERAL (
SELECT u.id AS owner_id,
u.name AS owner_name,
u.email AS owner_email,
u.avatar_url AS owner_avatar_url
FROM workspace_user_permissions AS wur
JOIN users u ON wur.user_id = u.id
WHERE wur.workspace_id = w.id
AND wur.type = ${WorkspaceRole.Owner}
AND wur.status = ${Prisma.sql`${WorkspaceMemberStatus.Accepted}::"WorkspaceMemberStatus"`}
ORDER BY u.created_at ASC
LIMIT 1
) o ON TRUE
${featureJoin}
WHERE ${
keyword
? Prisma.sql`
(
w.id ILIKE ${'%' + keyword + '%'}
OR o.owner_id ILIKE ${'%' + keyword + '%'}
OR o.owner_email ILIKE ${'%' + keyword + '%'}
)
`
: Prisma.sql`TRUE`
}
${
this.buildAdminFlagWhere(flags).length
? Prisma.sql`AND ${Prisma.join(
this.buildAdminFlagWhere(flags),
' AND '
)}`
: Prisma.empty
}
${groupAndHaving}
)
SELECT f.*,
COALESCE(s.snapshot_count, 0) AS "snapshotCount",
COALESCE(s.snapshot_size, 0) AS "snapshotSize",
COALESCE(s.blob_count, 0) AS "blobCount",
COALESCE(s.blob_size, 0) AS "blobSize",
COALESCE(s.member_count, 0) AS "memberCount",
COALESCE(s.public_page_count, 0) AS "publicPageCount",
COALESCE(s.features, ARRAY[]::text[]) AS features
FROM filtered f
LEFT JOIN workspace_admin_stats s ON s.workspace_id = f.id
ORDER BY ${Prisma.raw(this.buildAdminOrder(options.order))}
LIMIT ${options.first}
OFFSET ${options.skip}
`;
const mapped = rows.map(row => ({
id: row.id,
@@ -389,6 +332,7 @@ export class WorkspaceModel extends BaseModel {
name: row.name,
avatarKey: row.avatarKey,
enableAi: row.enableAi,
enableSharing: row.enableSharing,
enableUrlPreview: row.enableUrlPreview,
enableDocEmbedding: row.enableDocEmbedding,
memberCount: Number(row.memberCount ?? 0),
@@ -414,60 +358,144 @@ export class WorkspaceModel extends BaseModel {
async adminCountWorkspaces(options: {
keyword?: string | null;
features?: WorkspaceFeatureName[] | null;
flags?: {
public?: boolean;
enableAi?: boolean;
enableSharing?: boolean;
enableUrlPreview?: boolean;
enableDocEmbedding?: boolean;
};
}) {
const keyword = options.keyword?.trim();
const features = options.features ?? [];
const flags = options.flags ?? {};
const featuresHaving =
features.length > 0
? Prisma.sql`
HAVING COUNT(
DISTINCT CASE
WHEN wf.name = ANY(${Prisma.sql`${features}::text[]`}) THEN wf.name
END
) = ${features.length}
`
: Prisma.empty;
const featureJoin =
features.length > 0
? Prisma.sql`
LEFT JOIN workspace_features wf
ON wf.workspace_id = w.id AND wf.activated = TRUE
`
: Prisma.empty;
const groupAndHaving =
features.length > 0
? Prisma.sql`
GROUP BY w.id, o.owner_id, o.owner_email
${featuresHaving}
`
: Prisma.empty;
const [row] = await this.db.$queryRaw<{ total: bigint | number }[]>`
WITH feature_set AS (
SELECT workspace_id, array_agg(DISTINCT name) FILTER (WHERE activated) AS features
FROM workspace_features
GROUP BY workspace_id
),
owner AS (
SELECT wur.workspace_id,
u.id AS owner_id,
u.email AS owner_email
FROM workspace_user_permissions AS wur
JOIN users u ON wur.user_id = u.id
WHERE wur.type = ${WorkspaceRole.Owner}
AND wur.status = ${Prisma.sql`${WorkspaceMemberStatus.Accepted}::"WorkspaceMemberStatus"`}
WITH filtered AS (
SELECT w.id,
o.owner_id AS "ownerId",
o.owner_email AS "ownerEmail"
FROM workspaces w
LEFT JOIN LATERAL (
SELECT wur.workspace_id,
u.id AS owner_id,
u.email AS owner_email
FROM workspace_user_permissions AS wur
JOIN users u ON wur.user_id = u.id
WHERE wur.workspace_id = w.id
AND wur.type = ${WorkspaceRole.Owner}
AND wur.status = ${Prisma.sql`${WorkspaceMemberStatus.Accepted}::"WorkspaceMemberStatus"`}
ORDER BY u.created_at ASC
LIMIT 1
) o ON TRUE
${featureJoin}
WHERE ${
keyword
? Prisma.sql`
(
w.id ILIKE ${'%' + keyword + '%'}
OR o.owner_id ILIKE ${'%' + keyword + '%'}
OR o.owner_email ILIKE ${'%' + keyword + '%'}
)
`
: Prisma.sql`TRUE`
}
${
this.buildAdminFlagWhere(flags).length
? Prisma.sql`AND ${Prisma.join(
this.buildAdminFlagWhere(flags),
' AND '
)}`
: Prisma.empty
}
${groupAndHaving}
)
SELECT COUNT(*) AS total
FROM workspaces w
LEFT JOIN feature_set fs ON fs.workspace_id = w.id
LEFT JOIN owner o ON o.workspace_id = w.id
WHERE ${
keyword
? Prisma.sql`
(
w.id ILIKE ${'%' + keyword + '%'}
OR o.owner_id ILIKE ${'%' + keyword + '%'}
OR o.owner_email ILIKE ${'%' + keyword + '%'}
)
`
: Prisma.sql`TRUE`
}
AND ${
features.length
? Prisma.sql`CAST(COALESCE(fs.features, ARRAY[]::text[]) AS text[]) @> ${Prisma.sql`${features}::text[]`}`
: Prisma.sql`TRUE`
}
SELECT COUNT(*) AS total FROM filtered
`;
return row?.total ? Number(row.total) : 0;
}
private buildAdminFlagWhere(flags: {
public?: boolean;
enableAi?: boolean;
enableSharing?: boolean;
enableUrlPreview?: boolean;
enableDocEmbedding?: boolean;
}) {
const conditions: Prisma.Sql[] = [];
if (flags.public !== undefined) {
conditions.push(Prisma.sql`w.public = ${flags.public}`);
}
if (flags.enableAi !== undefined) {
conditions.push(Prisma.sql`w.enable_ai = ${flags.enableAi}`);
}
if (flags.enableSharing !== undefined) {
conditions.push(Prisma.sql`w.enable_sharing = ${flags.enableSharing}`);
}
if (flags.enableUrlPreview !== undefined) {
conditions.push(
Prisma.sql`w.enable_url_preview = ${flags.enableUrlPreview}`
);
}
if (flags.enableDocEmbedding !== undefined) {
conditions.push(
Prisma.sql`w.enable_doc_embedding = ${flags.enableDocEmbedding}`
);
}
return conditions;
}
private buildAdminOrder(
order?: 'createdAt' | 'snapshotSize' | 'blobCount' | 'blobSize'
order?:
| 'createdAt'
| 'snapshotSize'
| 'blobCount'
| 'blobSize'
| 'snapshotCount'
| 'memberCount'
| 'publicPageCount'
) {
switch (order) {
case 'snapshotSize':
return `"snapshotSize" DESC NULLS LAST`;
return `"snapshotSize" DESC NULLS LAST, "createdAt" DESC`;
case 'blobCount':
return `"blobCount" DESC NULLS LAST`;
return `"blobCount" DESC NULLS LAST, "createdAt" DESC`;
case 'blobSize':
return `"blobSize" DESC NULLS LAST`;
return `"blobSize" DESC NULLS LAST, "createdAt" DESC`;
case 'snapshotCount':
return `"snapshotCount" DESC NULLS LAST, "createdAt" DESC`;
case 'memberCount':
return `"memberCount" DESC NULLS LAST, "createdAt" DESC`;
case 'publicPageCount':
return `"publicPageCount" DESC NULLS LAST, "createdAt" DESC`;
case 'createdAt':
default:
return `"createdAt" DESC`;

View File

@@ -35,6 +35,7 @@ input AdminUpdateWorkspaceInput {
avatarKey: String
enableAi: Boolean
enableDocEmbedding: Boolean
enableSharing: Boolean
enableUrlPreview: Boolean
features: [FeatureType!]
id: String!
@@ -49,6 +50,7 @@ type AdminWorkspace {
createdAt: DateTime!
enableAi: Boolean!
enableDocEmbedding: Boolean!
enableSharing: Boolean!
enableUrlPreview: Boolean!
features: [FeatureType!]!
id: String!
@@ -60,6 +62,7 @@ type AdminWorkspace {
owner: WorkspaceUserType
public: Boolean!
publicPageCount: Int!
sharedLinks: [AdminWorkspaceSharedLink!]!
snapshotCount: Int!
snapshotSize: SafeInt!
}
@@ -73,10 +76,19 @@ type AdminWorkspaceMember {
status: WorkspaceMemberStatus!
}
type AdminWorkspaceSharedLink {
docId: String!
publishedAt: DateTime
title: String
}
enum AdminWorkspaceSort {
BlobCount
BlobSize
CreatedAt
MemberCount
PublicPageCount
SnapshotCount
SnapshotSize
}
@@ -1216,10 +1228,15 @@ input ListUserInput {
}
input ListWorkspaceInput {
enableAi: Boolean
enableDocEmbedding: Boolean
enableSharing: Boolean
enableUrlPreview: Boolean
features: [FeatureType!]
first: Int! = 20
keyword: String
orderBy: AdminWorkspaceSort
public: Boolean
skip: Int! = 0
}
@@ -2197,6 +2214,9 @@ input UpdateWorkspaceInput {
"""Enable doc embedding"""
enableDocEmbedding: Boolean
"""Enable workspace sharing"""
enableSharing: Boolean
"""Enable url previous when sharing"""
enableUrlPreview: Boolean
id: ID!
@@ -2422,6 +2442,9 @@ type WorkspaceType {
"""Enable doc embedding"""
enableDocEmbedding: Boolean!
"""Enable workspace sharing"""
enableSharing: Boolean!
"""Enable url previous when sharing"""
enableUrlPreview: Boolean!
histories(before: DateTime, guid: String!, take: Int): [DocHistoryType!]!
@@ -2463,7 +2486,6 @@ type WorkspaceType {
"""Get public page of a workspace by page id."""
publicPage(pageId: String!): DocType @deprecated(reason: "use [WorkspaceType.doc] instead")
publicPages: [DocType!]! @deprecated(reason: "use [WorkspaceType.publicDocs] instead")
"""quota of workspace"""
quota: WorkspaceQuotaType!

View File

@@ -6,6 +6,7 @@ mutation adminUpdateWorkspace($input: AdminUpdateWorkspaceInput!) {
name
avatarKey
enableAi
enableSharing
enableUrlPreview
enableDocEmbedding
features

View File

@@ -11,6 +11,7 @@ query adminWorkspace(
name
avatarKey
enableAi
enableSharing
enableUrlPreview
enableDocEmbedding
features
@@ -26,6 +27,11 @@ query adminWorkspace(
snapshotSize
blobCount
blobSize
sharedLinks {
docId
title
publishedAt
}
members(skip: $memberSkip, take: $memberTake, query: $memberQuery) {
id
name

View File

@@ -6,6 +6,7 @@ query adminWorkspaces($filter: ListWorkspaceInput!) {
name
avatarKey
enableAi
enableSharing
enableUrlPreview
enableDocEmbedding
features

View File

@@ -145,6 +145,7 @@ export const adminUpdateWorkspaceMutation = {
name
avatarKey
enableAi
enableSharing
enableUrlPreview
enableDocEmbedding
features
@@ -175,6 +176,7 @@ export const adminWorkspaceQuery = {
name
avatarKey
enableAi
enableSharing
enableUrlPreview
enableDocEmbedding
features
@@ -190,6 +192,11 @@ export const adminWorkspaceQuery = {
snapshotSize
blobCount
blobSize
sharedLinks {
docId
title
publishedAt
}
members(skip: $memberSkip, take: $memberTake, query: $memberQuery) {
id
name
@@ -213,6 +220,7 @@ export const adminWorkspacesQuery = {
name
avatarKey
enableAi
enableSharing
enableUrlPreview
enableDocEmbedding
features
@@ -2540,6 +2548,7 @@ export const getWorkspaceConfigQuery = {
query: `query getWorkspaceConfig($id: String!) {
workspace(id: $id) {
enableAi
enableSharing
enableUrlPreview
enableDocEmbedding
inviteLink {
@@ -2570,6 +2579,16 @@ export const setEnableDocEmbeddingMutation = {
}`,
};
export const setEnableSharingMutation = {
id: 'setEnableSharingMutation' as const,
op: 'setEnableSharing',
query: `mutation setEnableSharing($id: ID!, $enableSharing: Boolean!) {
updateWorkspace(input: {id: $id, enableSharing: $enableSharing}) {
id
}
}`,
};
export const setEnableUrlPreviewMutation = {
id: 'setEnableUrlPreviewMutation' as const,
op: 'setEnableUrlPreview',

View File

@@ -1,6 +1,7 @@
query getWorkspaceConfig($id: String!) {
workspace(id: $id) {
enableAi
enableSharing
enableUrlPreview
enableDocEmbedding
inviteLink {

View File

@@ -0,0 +1,5 @@
mutation setEnableSharing($id: ID!, $enableSharing: Boolean!) {
updateWorkspace(input: { id: $id, enableSharing: $enableSharing }) {
id
}
}

View File

@@ -71,6 +71,7 @@ export interface AdminUpdateWorkspaceInput {
avatarKey?: InputMaybe<Scalars['String']['input']>;
enableAi?: InputMaybe<Scalars['Boolean']['input']>;
enableDocEmbedding?: InputMaybe<Scalars['Boolean']['input']>;
enableSharing?: InputMaybe<Scalars['Boolean']['input']>;
enableUrlPreview?: InputMaybe<Scalars['Boolean']['input']>;
features?: InputMaybe<Array<FeatureType>>;
id: Scalars['String']['input'];
@@ -86,6 +87,7 @@ export interface AdminWorkspace {
createdAt: Scalars['DateTime']['output'];
enableAi: Scalars['Boolean']['output'];
enableDocEmbedding: Scalars['Boolean']['output'];
enableSharing: Scalars['Boolean']['output'];
enableUrlPreview: Scalars['Boolean']['output'];
features: Array<FeatureType>;
id: Scalars['String']['output'];
@@ -96,6 +98,7 @@ export interface AdminWorkspace {
owner: Maybe<WorkspaceUserType>;
public: Scalars['Boolean']['output'];
publicPageCount: Scalars['Int']['output'];
sharedLinks: Array<AdminWorkspaceSharedLink>;
snapshotCount: Scalars['Int']['output'];
snapshotSize: Scalars['SafeInt']['output'];
}
@@ -116,10 +119,20 @@ export interface AdminWorkspaceMember {
status: WorkspaceMemberStatus;
}
export interface AdminWorkspaceSharedLink {
__typename?: 'AdminWorkspaceSharedLink';
docId: Scalars['String']['output'];
publishedAt: Maybe<Scalars['DateTime']['output']>;
title: Maybe<Scalars['String']['output']>;
}
export enum AdminWorkspaceSort {
BlobCount = 'BlobCount',
BlobSize = 'BlobSize',
CreatedAt = 'CreatedAt',
MemberCount = 'MemberCount',
PublicPageCount = 'PublicPageCount',
SnapshotCount = 'SnapshotCount',
SnapshotSize = 'SnapshotSize',
}
@@ -1400,10 +1413,15 @@ export interface ListUserInput {
}
export interface ListWorkspaceInput {
enableAi?: InputMaybe<Scalars['Boolean']['input']>;
enableDocEmbedding?: InputMaybe<Scalars['Boolean']['input']>;
enableSharing?: InputMaybe<Scalars['Boolean']['input']>;
enableUrlPreview?: InputMaybe<Scalars['Boolean']['input']>;
features?: InputMaybe<Array<FeatureType>>;
first?: Scalars['Int']['input'];
keyword?: InputMaybe<Scalars['String']['input']>;
orderBy?: InputMaybe<AdminWorkspaceSort>;
public?: InputMaybe<Scalars['Boolean']['input']>;
skip?: Scalars['Int']['input'];
}
@@ -2870,6 +2888,8 @@ export interface UpdateWorkspaceInput {
enableAi?: InputMaybe<Scalars['Boolean']['input']>;
/** Enable doc embedding */
enableDocEmbedding?: InputMaybe<Scalars['Boolean']['input']>;
/** Enable workspace sharing */
enableSharing?: InputMaybe<Scalars['Boolean']['input']>;
/** Enable url previous when sharing */
enableUrlPreview?: InputMaybe<Scalars['Boolean']['input']>;
id: Scalars['ID']['input'];
@@ -3104,6 +3124,8 @@ export interface WorkspaceType {
enableAi: Scalars['Boolean']['output'];
/** Enable doc embedding */
enableDocEmbedding: Scalars['Boolean']['output'];
/** Enable workspace sharing */
enableSharing: Scalars['Boolean']['output'];
/** Enable url previous when sharing */
enableUrlPreview: Scalars['Boolean']['output'];
histories: Array<DocHistoryType>;
@@ -3139,8 +3161,6 @@ export interface WorkspaceType {
* @deprecated use [WorkspaceType.doc] instead
*/
publicPage: Maybe<DocType>;
/** @deprecated use [WorkspaceType.publicDocs] instead */
publicPages: Array<DocType>;
/** quota of workspace */
quota: WorkspaceQuotaType;
/** Get recently updated docs of a workspace */
@@ -3323,6 +3343,7 @@ export type AdminUpdateWorkspaceMutation = {
name: string | null;
avatarKey: string | null;
enableAi: boolean;
enableSharing: boolean;
enableUrlPreview: boolean;
enableDocEmbedding: boolean;
features: Array<FeatureType>;
@@ -3359,6 +3380,7 @@ export type AdminWorkspaceQuery = {
name: string | null;
avatarKey: string | null;
enableAi: boolean;
enableSharing: boolean;
enableUrlPreview: boolean;
enableDocEmbedding: boolean;
features: Array<FeatureType>;
@@ -3375,6 +3397,12 @@ export type AdminWorkspaceQuery = {
email: string;
avatarUrl: string | null;
} | null;
sharedLinks: Array<{
__typename?: 'AdminWorkspaceSharedLink';
docId: string;
title: string | null;
publishedAt: string | null;
}>;
members: Array<{
__typename?: 'AdminWorkspaceMember';
id: string;
@@ -3401,6 +3429,7 @@ export type AdminWorkspacesQuery = {
name: string | null;
avatarKey: string | null;
enableAi: boolean;
enableSharing: boolean;
enableUrlPreview: boolean;
enableDocEmbedding: boolean;
features: Array<FeatureType>;
@@ -6519,6 +6548,7 @@ export type GetWorkspaceConfigQuery = {
workspace: {
__typename?: 'WorkspaceType';
enableAi: boolean;
enableSharing: boolean;
enableUrlPreview: boolean;
enableDocEmbedding: boolean;
inviteLink: {
@@ -6549,6 +6579,16 @@ export type SetEnableDocEmbeddingMutation = {
updateWorkspace: { __typename?: 'WorkspaceType'; id: string };
};
export type SetEnableSharingMutationVariables = Exact<{
id: Scalars['ID']['input'];
enableSharing: Scalars['Boolean']['input'];
}>;
export type SetEnableSharingMutation = {
__typename?: 'Mutation';
updateWorkspace: { __typename?: 'WorkspaceType'; id: string };
};
export type SetEnableUrlPreviewMutationVariables = Exact<{
id: Scalars['ID']['input'];
enableUrlPreview: Scalars['Boolean']['input'];
@@ -7572,6 +7612,11 @@ export type Mutations =
variables: SetEnableDocEmbeddingMutationVariables;
response: SetEnableDocEmbeddingMutation;
}
| {
name: 'setEnableSharingMutation';
variables: SetEnableSharingMutationVariables;
response: SetEnableSharingMutation;
}
| {
name: 'setEnableUrlPreviewMutation';
variables: SetEnableUrlPreviewMutationVariables;

View File

@@ -25,7 +25,8 @@ interface CloudBlobStorageOptions {
id: string;
}
const SHOULD_MANUAL_REDIRECT = BUILD_CONFIG.isAndroid || BUILD_CONFIG.isIOS;
const SHOULD_MANUAL_REDIRECT =
BUILD_CONFIG.isAndroid || BUILD_CONFIG.isIOS || BUILD_CONFIG.isElectron;
const UPLOAD_REQUEST_TIMEOUT = 0;
export class CloudBlobStorage extends BlobStorageBase {

Submodule packages/common/y-octo/yjs deleted from 7126035d1b

View File

@@ -9,6 +9,7 @@
"@affine/graphql": "workspace:*",
"@affine/routes": "workspace:*",
"@blocksuite/icons": "^2.2.17",
"@queuedash/ui": "^3.14.0",
"@radix-ui/react-accordion": "^1.2.2",
"@radix-ui/react-alert-dialog": "^1.1.3",
"@radix-ui/react-aspect-ratio": "^1.1.1",

View File

@@ -26,6 +26,9 @@ export const Accounts = lazy(
export const Workspaces = lazy(
() => import(/* webpackChunkName: "workspaces" */ './modules/workspaces')
);
export const Queue = lazy(
() => import(/* webpackChunkName: "queue" */ './modules/queue')
);
export const AI = lazy(
() => import(/* webpackChunkName: "ai" */ './modules/ai')
);
@@ -98,6 +101,7 @@ export const App = () => {
path={ROUTES.admin.workspaces}
element={<Workspaces />}
/>
<Route path={`${ROUTES.admin.queue}/*`} element={<Queue />} />
<Route path={ROUTES.admin.ai} element={<AI />} />
<Route path={ROUTES.admin.about} element={<About />} />
<Route

View File

@@ -17,10 +17,12 @@ import { useCallback, useTransition } from 'react';
interface DataTablePaginationProps<TData> {
table: Table<TData>;
disabled?: boolean;
}
export function DataTablePagination<TData>({
table,
disabled = false,
}: DataTablePaginationProps<TData>) {
const [, startTransition] = useTransition();
@@ -63,6 +65,7 @@ export function DataTablePagination<TData>({
<Select
value={`${table.getState().pagination.pageSize}`}
onValueChange={onPageSizeChange}
disabled={disabled}
>
<SelectTrigger className="h-8 w-[70px]">
<SelectValue placeholder={table.getState().pagination.pageSize} />
@@ -86,7 +89,7 @@ export function DataTablePagination<TData>({
variant="outline"
className="hidden h-8 w-8 p-0 lg:flex"
onClick={handleFirstPage}
disabled={!table.getCanPreviousPage()}
disabled={disabled || !table.getCanPreviousPage()}
>
<span className="sr-only">Go to first page</span>
<ChevronsLeftIcon className="h-4 w-4" />
@@ -95,7 +98,7 @@ export function DataTablePagination<TData>({
variant="outline"
className="h-8 w-8 p-0"
onClick={handlePreviousPage}
disabled={!table.getCanPreviousPage()}
disabled={disabled || !table.getCanPreviousPage()}
>
<span className="sr-only">Go to previous page</span>
<ChevronLeftIcon className="h-4 w-4" />
@@ -104,7 +107,7 @@ export function DataTablePagination<TData>({
variant="outline"
className="h-8 w-8 p-0"
onClick={handleNextPage}
disabled={!table.getCanNextPage()}
disabled={disabled || !table.getCanNextPage()}
>
<span className="sr-only">Go to next page</span>
<ChevronRightIcon className="h-4 w-4" />
@@ -113,7 +116,7 @@ export function DataTablePagination<TData>({
variant="outline"
className="h-8 w-8 p-0"
onClick={handleLastPage}
disabled={!table.getCanNextPage()}
disabled={disabled || !table.getCanNextPage()}
>
<span className="sr-only">Go to last page</span>
<ChevronsRightIcon className="h-4 w-4" />

View File

@@ -27,6 +27,8 @@ interface DataTableProps<TData, TValue> {
totalCount: number;
pagination: PaginationState;
onPaginationChange: OnChangeFn<PaginationState>;
loading?: boolean;
disablePagination?: boolean;
// Row Selection
rowSelection?: RowSelectionState;
@@ -51,6 +53,8 @@ export function SharedDataTable<TData extends { id: string }, TValue>({
totalCount,
pagination,
onPaginationChange,
loading = false,
disablePagination = false,
rowSelection,
onRowSelectionChange,
renderToolbar,
@@ -83,9 +87,34 @@ export function SharedDataTable<TData extends { id: string }, TValue>({
});
return (
<div className="flex flex-col gap-4 py-5 px-6 h-full overflow-auto">
<div className="flex flex-col gap-4 py-5 px-6 h-full overflow-auto relative">
{renderToolbar?.(table)}
<div className="rounded-md border h-full flex flex-col overflow-auto">
<div className="rounded-md border h-full flex flex-col overflow-auto relative">
{loading ? (
<div className="absolute inset-0 z-10 bg-gray-50/70 backdrop-blur-[1px] flex flex-col items-center justify-center gap-2 text-sm text-gray-600">
<svg
className="h-5 w-5 animate-spin text-gray-500"
viewBox="0 0 24 24"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<circle
className="opacity-25"
cx="12"
cy="12"
r="10"
stroke="currentColor"
strokeWidth="4"
/>
<path
className="opacity-75"
fill="currentColor"
d="M4 12a8 8 0 018-8v4a4 4 0 00-4 4H4z"
/>
</svg>
<span>Loading...</span>
</div>
) : null}
<Table>
<TableHeader>
{table.getHeaderGroups().map(headerGroup => (
@@ -160,7 +189,10 @@ export function SharedDataTable<TData extends { id: string }, TValue>({
</Table>
</div>
</div>
<DataTablePagination table={table} />
<DataTablePagination
table={table}
disabled={disablePagination || loading}
/>
</div>
);
}

View File

@@ -14,6 +14,7 @@ type FeatureFilterPopoverProps = {
onChange: (features: FeatureType[]) => void;
align?: 'start' | 'center' | 'end';
buttonLabel?: string;
disabled?: boolean;
};
export const FeatureFilterPopover = ({
@@ -22,29 +23,37 @@ export const FeatureFilterPopover = ({
onChange,
align = 'start',
buttonLabel = 'Features',
disabled = false,
}: FeatureFilterPopoverProps) => {
const handleFeatureToggle = useCallback(
(feature: FeatureType, checked: boolean) => {
if (disabled) {
return;
}
if (checked) {
onChange([...new Set([...selectedFeatures, feature])]);
} else {
onChange(selectedFeatures.filter(enabled => enabled !== feature));
}
},
[onChange, selectedFeatures]
[disabled, onChange, selectedFeatures]
);
const handleClearFeatures = useCallback(() => {
if (disabled) {
return;
}
onChange([]);
}, [onChange]);
}, [disabled, onChange]);
return (
<Popover>
<Popover open={disabled ? false : undefined}>
<PopoverTrigger asChild>
<Button
variant="outline"
size="sm"
className="h-8 px-2 lg:px-3 space-x-1"
disabled={disabled}
>
<span>{buttonLabel}</span>
{selectedFeatures.length > 0 ? (
@@ -70,6 +79,7 @@ export const FeatureFilterPopover = ({
onCheckedChange={checked =>
handleFeatureToggle(feature, !!checked)
}
disabled={disabled}
/>
<span className="text-sm truncate">{feature}</span>
</label>
@@ -80,7 +90,7 @@ export const FeatureFilterPopover = ({
variant="ghost"
size="sm"
onClick={handleClearFeatures}
disabled={selectedFeatures.length === 0}
disabled={disabled || selectedFeatures.length === 0}
>
Clear
</Button>

View File

@@ -2,7 +2,7 @@ import { buttonVariants } from '@affine/admin/components/ui/button';
import { cn } from '@affine/admin/utils';
import { AccountIcon, SelfhostIcon } from '@blocksuite/icons/rc';
import { cssVarV2 } from '@toeverything/theme/v2';
import { LayoutDashboardIcon } from 'lucide-react';
import { LayoutDashboardIcon, ListChecksIcon } from 'lucide-react';
import { NavLink } from 'react-router-dom';
import { ServerVersion } from './server-version';
@@ -97,6 +97,12 @@ export function Nav({ isCollapsed = false }: NavProps) {
label="Workspaces"
isCollapsed={isCollapsed}
/>
<NavItem
to="/admin/queue"
icon={<ListChecksIcon size={18} />}
label="Queue"
isCollapsed={isCollapsed}
/>
{/* <NavItem
to="/admin/ai"
icon={<AiOutlineIcon fontSize={20} />}

View File

@@ -0,0 +1,23 @@
// eslint-disable-next-line @typescript-eslint/no-restricted-imports
import '@queuedash/ui/dist/styles.css';
import './queue.css';
import { QueueDashApp } from '@queuedash/ui';
import { Header } from '../header';
export function QueuePage() {
return (
<div className="h-screen flex-1 flex-col flex overflow-hidden">
<Header title="Queue" />
<div className="flex-1 overflow-hidden">
<QueueDashApp
apiUrl={`${environment.subPath}/api/queue/trpc`}
basename="/admin/queue"
/>
</div>
</div>
);
}
export { QueuePage as Component };

View File

@@ -0,0 +1,5 @@
/* Scoped queuedash modal alignment */
.react-aria-ModalOverlay section[role='dialog'] {
transform: unset;
}

View File

@@ -154,7 +154,7 @@ export const useColumns = () => {
{
id: 'actions',
meta: {
className: 'w-[80px] justify-end',
className: 'w-[190px] justify-end',
},
header: () => (
<div className="text-xs font-medium text-right">Actions</div>

View File

@@ -1,11 +1,12 @@
import { Button } from '@affine/admin/components/ui/button';
import { EditIcon } from '@blocksuite/icons/rc';
import { EditIcon, LinkIcon } from '@blocksuite/icons/rc';
import { useCallback, useState } from 'react';
import { DiscardChanges } from '../../../components/shared/discard-changes';
import { useRightPanel } from '../../panel/context';
import type { WorkspaceListItem } from '../schema';
import { WorkspacePanel } from './workspace-panel';
import { WorkspaceSharedLinksPanel } from './workspace-shared-links-panel';
export function DataTableRowActions({
workspace,
@@ -13,6 +14,9 @@ export function DataTableRowActions({
workspace: WorkspaceListItem;
}) {
const [discardDialogOpen, setDiscardDialogOpen] = useState(false);
const [pendingAction, setPendingAction] = useState<
'edit' | 'sharedLinks' | null
>(null);
const {
setPanelContent,
openPanel,
@@ -22,7 +26,7 @@ export function DataTableRowActions({
setHasDirtyChanges,
} = useRightPanel();
const handleConfirm = useCallback(() => {
const openWorkspacePanel = useCallback(() => {
setHasDirtyChanges(false);
setPanelContent(
<WorkspacePanel workspaceId={workspace.id} onClose={closePanel} />
@@ -39,35 +43,89 @@ export function DataTableRowActions({
workspace.id,
]);
const openSharedLinksPanel = useCallback(() => {
setHasDirtyChanges(false);
setPanelContent(
<WorkspaceSharedLinksPanel
workspaceId={workspace.id}
onClose={closePanel}
/>
);
if (!isOpen) {
openPanel();
}
}, [
closePanel,
isOpen,
openPanel,
setHasDirtyChanges,
setPanelContent,
workspace.id,
]);
const handleEdit = useCallback(() => {
if (hasDirtyChanges) {
setPendingAction('edit');
setDiscardDialogOpen(true);
return;
}
handleConfirm();
}, [handleConfirm, hasDirtyChanges]);
openWorkspacePanel();
}, [hasDirtyChanges, openWorkspacePanel]);
const handleSharedLinks = useCallback(() => {
if (hasDirtyChanges) {
setPendingAction('sharedLinks');
setDiscardDialogOpen(true);
return;
}
openSharedLinksPanel();
}, [hasDirtyChanges, openSharedLinksPanel]);
const handleDiscardConfirm = useCallback(() => {
setDiscardDialogOpen(false);
setHasDirtyChanges(false);
handleConfirm();
}, [handleConfirm, setHasDirtyChanges]);
if (pendingAction === 'sharedLinks') {
openSharedLinksPanel();
} else {
openWorkspacePanel();
}
setPendingAction(null);
}, [
openSharedLinksPanel,
openWorkspacePanel,
pendingAction,
setHasDirtyChanges,
]);
return (
<>
<Button
variant="ghost"
size="sm"
className="px-2 h-8 flex items-center gap-2"
onClick={handleEdit}
>
<EditIcon fontSize={18} />
<span>Edit</span>
</Button>
<div className="flex gap-2">
<Button
variant="ghost"
size="sm"
className="px-2 h-8 flex items-center gap-2"
onClick={handleEdit}
>
<EditIcon fontSize={18} />
<span>Edit</span>
</Button>
<Button
variant="ghost"
size="sm"
className="px-2 h-8 flex items-center gap-2"
onClick={handleSharedLinks}
>
<LinkIcon fontSize={18} />
<span>Shared links</span>
</Button>
</div>
<DiscardChanges
open={discardDialogOpen}
onOpenChange={setDiscardDialogOpen}
onClose={() => setDiscardDialogOpen(false)}
onClose={() => {
setDiscardDialogOpen(false);
setPendingAction(null);
}}
onConfirm={handleDiscardConfirm}
description="Changes to this workspace will not be saved."
/>

View File

@@ -18,6 +18,7 @@ import {
} from '../../../components/ui/popover';
import { useDebouncedValue } from '../../../hooks/use-debounced-value';
import { useServerConfig } from '../../common';
import type { WorkspaceFlagFilter } from '../schema';
interface DataTableToolbarProps<TData> {
table?: Table<TData>;
@@ -25,15 +26,21 @@ interface DataTableToolbarProps<TData> {
onKeywordChange: (keyword: string) => void;
selectedFeatures: FeatureType[];
onFeaturesChange: (features: FeatureType[]) => void;
flags: WorkspaceFlagFilter;
onFlagsChange: (flags: WorkspaceFlagFilter) => void;
sort: AdminWorkspaceSort | undefined;
onSortChange: (sort: AdminWorkspaceSort | undefined) => void;
disabled?: boolean;
}
const sortOptions: { value: AdminWorkspaceSort; label: string }[] = [
{ value: AdminWorkspaceSort.SnapshotSize, label: 'Snapshot size' },
{ value: AdminWorkspaceSort.CreatedAt, label: 'Created time' },
{ value: AdminWorkspaceSort.BlobCount, label: 'Blob count' },
{ value: AdminWorkspaceSort.BlobSize, label: 'Blob size' },
{ value: AdminWorkspaceSort.CreatedAt, label: 'Created time' },
{ value: AdminWorkspaceSort.SnapshotCount, label: 'Snapshot count' },
{ value: AdminWorkspaceSort.SnapshotSize, label: 'Snapshot size' },
{ value: AdminWorkspaceSort.MemberCount, label: 'Member count' },
{ value: AdminWorkspaceSort.PublicPageCount, label: 'Public pages' },
];
export function DataTableToolbar<TData>({
@@ -41,8 +48,11 @@ export function DataTableToolbar<TData>({
onKeywordChange,
selectedFeatures,
onFeaturesChange,
flags,
onFlagsChange,
sort,
onSortChange,
disabled = false,
}: DataTableToolbarProps<TData>) {
const [value, setValue] = useState(keyword);
const debouncedValue = useDebouncedValue(value, 400);
@@ -75,6 +85,35 @@ export function DataTableToolbar<TData>({
[sort]
);
const flagOptions: { key: keyof WorkspaceFlagFilter; label: string }[] = [
{ key: 'public', label: 'Public' },
{ key: 'enableSharing', label: 'Enable sharing' },
{ key: 'enableAi', label: 'Enable AI' },
{ key: 'enableUrlPreview', label: 'Enable URL preview' },
{ key: 'enableDocEmbedding', label: 'Enable doc embedding' },
];
const flagLabel = (value: boolean | undefined) => {
if (value === true) return 'On';
if (value === false) return 'Off';
return 'Any';
};
const handleFlagToggle = useCallback(
(key: keyof WorkspaceFlagFilter) => {
const current = flags[key];
const next =
current === undefined ? true : current === true ? false : undefined;
onFlagsChange({ ...flags, [key]: next });
},
[flags, onFlagsChange]
);
const hasFlagFilter = useMemo(
() => Object.values(flags).some(v => v !== undefined),
[flags]
);
return (
<div className="flex items-center justify-between gap-y-2 gap-x-4 flex-wrap">
<FeatureFilterPopover
@@ -82,12 +121,18 @@ export function DataTableToolbar<TData>({
availableFeatures={availableFeatures}
onChange={onFeaturesChange}
align="start"
disabled={disabled}
/>
<div className="flex items-center gap-y-2 flex-wrap justify-end gap-2">
<Popover>
<Popover open={disabled ? false : undefined}>
<PopoverTrigger asChild>
<Button variant="outline" size="sm" className="h-8 px-2 lg:px-3">
<Button
variant="outline"
size="sm"
className="h-8 px-2 lg:px-3"
disabled={disabled}
>
Sort: {selectedSortLabel}
</Button>
</PopoverTrigger>
@@ -99,6 +144,7 @@ export function DataTableToolbar<TData>({
variant="ghost"
className="justify-start"
size="sm"
disabled={disabled}
onClick={() => handleSortChange(option.value)}
>
{option.label}
@@ -107,12 +153,44 @@ export function DataTableToolbar<TData>({
</div>
</PopoverContent>
</Popover>
<Popover open={disabled ? false : undefined}>
<PopoverTrigger asChild>
<Button
variant={hasFlagFilter ? 'secondary' : 'outline'}
size="sm"
className="h-8 px-2 lg:px-3"
disabled={disabled}
>
Flags
</Button>
</PopoverTrigger>
<PopoverContent className="w-[260px] p-2">
<div className="flex flex-col gap-1">
{flagOptions.map(option => (
<Button
key={option.key}
variant="ghost"
className="justify-between"
size="sm"
disabled={disabled}
onClick={() => handleFlagToggle(option.key)}
>
<span>{option.label}</span>
<span className="text-xs text-muted-foreground">
{flagLabel(flags[option.key])}
</span>
</Button>
))}
</div>
</PopoverContent>
</Popover>
<div className="flex">
<Input
placeholder="Search Workspace / Owner"
value={value}
onChange={onValueChange}
className="h-8 w-[150px] lg:w-[250px]"
disabled={disabled}
/>
</div>
</div>

View File

@@ -3,6 +3,7 @@ import type { ColumnDef, PaginationState } from '@tanstack/react-table';
import type { Dispatch, SetStateAction } from 'react';
import { SharedDataTable } from '../../../components/shared/data-table';
import type { WorkspaceFlagFilter } from '../schema';
import { DataTableToolbar } from './data-table-toolbar';
interface DataTableProps<TData, TValue> {
@@ -14,8 +15,11 @@ interface DataTableProps<TData, TValue> {
onKeywordChange: (value: string) => void;
selectedFeatures: FeatureType[];
onFeaturesChange: (features: FeatureType[]) => void;
flags: WorkspaceFlagFilter;
onFlagsChange: Dispatch<SetStateAction<WorkspaceFlagFilter>>;
sort: AdminWorkspaceSort | undefined;
onSortChange: (sort: AdminWorkspaceSort | undefined) => void;
loading?: boolean;
onPaginationChange: Dispatch<
SetStateAction<{
pageIndex: number;
@@ -33,9 +37,12 @@ export function DataTable<TData extends { id: string }, TValue>({
onKeywordChange,
selectedFeatures,
onFeaturesChange,
flags,
onFlagsChange,
sort,
onSortChange,
onPaginationChange,
loading = false,
}: DataTableProps<TData, TValue>) {
return (
<SharedDataTable
@@ -44,7 +51,7 @@ export function DataTable<TData extends { id: string }, TValue>({
totalCount={workspacesCount}
pagination={pagination}
onPaginationChange={onPaginationChange}
resetFiltersDeps={[keyword, selectedFeatures, sort]}
resetFiltersDeps={[keyword, selectedFeatures, sort, flags]}
renderToolbar={table => (
<DataTableToolbar
table={table}
@@ -52,10 +59,15 @@ export function DataTable<TData extends { id: string }, TValue>({
onKeywordChange={onKeywordChange}
selectedFeatures={selectedFeatures}
onFeaturesChange={onFeaturesChange}
flags={flags}
onFlagsChange={onFlagsChange}
sort={sort}
onSortChange={onSortChange}
disabled={loading}
/>
)}
loading={loading}
disablePagination={loading}
/>
);
}

View File

@@ -86,6 +86,7 @@ function WorkspacePanelContent({
flags: {
public: workspace.public,
enableAi: workspace.enableAi,
enableSharing: workspace.enableSharing,
enableUrlPreview: workspace.enableUrlPreview,
enableDocEmbedding: workspace.enableDocEmbedding,
name: workspace.name ?? '',
@@ -110,6 +111,7 @@ function WorkspacePanelContent({
return (
flags.public !== baseline.flags.public ||
flags.enableAi !== baseline.flags.enableAi ||
flags.enableSharing !== baseline.flags.enableSharing ||
flags.enableUrlPreview !== baseline.flags.enableUrlPreview ||
flags.enableDocEmbedding !== baseline.flags.enableDocEmbedding ||
flags.name !== baseline.flags.name ||
@@ -134,6 +136,7 @@ function WorkspacePanelContent({
id: workspace.id,
public: flags.public,
enableAi: flags.enableAi,
enableSharing: flags.enableSharing,
enableUrlPreview: flags.enableUrlPreview,
enableDocEmbedding: flags.enableDocEmbedding,
name: flags.name || null,
@@ -231,6 +234,15 @@ function WorkspacePanelContent({
}
/>
<Separator />
<FlagItem
label="Allow Workspace Sharing"
description="Allow pages in this workspace to be shared publicly"
checked={flags.enableSharing}
onCheckedChange={value =>
setFlags(prev => ({ ...prev, enableSharing: value }))
}
/>
<Separator />
<FlagItem
label="Enable Doc Embedding"
description="Allow document embedding for search"

View File

@@ -0,0 +1,112 @@
import { Separator } from '@affine/admin/components/ui/separator';
import { adminWorkspaceQuery } from '@affine/graphql';
import { cssVarV2 } from '@toeverything/theme/v2';
import { useMemo } from 'react';
import { useQuery } from '../../../use-query';
import { RightPanelHeader } from '../../header';
import type { WorkspaceSharedLink } from '../schema';
export function WorkspaceSharedLinksPanel({
workspaceId,
onClose,
}: {
workspaceId: string;
onClose: () => void;
}) {
const { data } = useQuery({
query: adminWorkspaceQuery,
variables: {
id: workspaceId,
memberSkip: 0,
memberTake: 0,
memberQuery: undefined,
},
});
const workspace = data?.adminWorkspace;
const sharedLinks = useMemo<WorkspaceSharedLink[]>(() => {
const links = workspace?.sharedLinks ?? [];
return [...links].sort((a, b) => {
const aTime = a.publishedAt ? new Date(a.publishedAt).getTime() : 0;
const bTime = b.publishedAt ? new Date(b.publishedAt).getTime() : 0;
return bTime - aTime;
});
}, [workspace?.sharedLinks]);
if (!workspace) {
return (
<div className="flex flex-col h-full">
<RightPanelHeader
title="Shared Links"
handleClose={onClose}
handleConfirm={onClose}
canSave={false}
/>
<div
className="p-6 text-sm"
style={{ color: cssVarV2('text/secondary') }}
>
Workspace not found.
</div>
</div>
);
}
return (
<div className="flex flex-col h-full">
<RightPanelHeader
title="Shared Links"
handleClose={onClose}
handleConfirm={onClose}
canSave={false}
/>
<div className="p-4 flex flex-col gap-3 overflow-y-auto">
{sharedLinks.length === 0 ? (
<div
className="text-sm"
style={{ color: cssVarV2('text/secondary') }}
>
No shared links.
</div>
) : (
<div className="flex flex-col divide-y rounded-md border">
{sharedLinks.map(link => (
<SharedLinkItem key={link.docId} link={link} />
))}
</div>
)}
</div>
</div>
);
}
function SharedLinkItem({ link }: { link: WorkspaceSharedLink }) {
const title = link.title || link.docId;
const sharedDate = formatSharedDate(link.publishedAt);
return (
<div className="flex flex-col gap-1 px-3 py-3">
<div className="text-sm font-medium truncate">{title}</div>
<div className="flex items-center gap-2 text-xs">
<Separator className="h-3" orientation="vertical" />
<span style={{ color: cssVarV2('text/secondary') }}>
Shared on {sharedDate}
</span>
</div>
</div>
);
}
function formatSharedDate(publishedAt?: string | null) {
if (!publishedAt) {
return 'Unknown';
}
const date = new Date(publishedAt);
if (Number.isNaN(date.getTime())) {
return 'Unknown';
}
return date.toISOString().slice(0, 10);
}

View File

@@ -4,20 +4,23 @@ import { useState } from 'react';
import { Header } from '../header';
import { useColumns } from './components/columns';
import { DataTable } from './components/data-table';
import type { WorkspaceFlagFilter } from './schema';
import { useWorkspaceList } from './use-workspace-list';
export function WorkspacePage() {
const [keyword, setKeyword] = useState('');
const [featureFilters, setFeatureFilters] = useState<FeatureType[]>([]);
const [flagFilters, setFlagFilters] = useState<WorkspaceFlagFilter>({});
const [sort, setSort] = useState<AdminWorkspaceSort | undefined>(
AdminWorkspaceSort.CreatedAt
);
const { workspaces, pagination, setPagination, workspacesCount } =
const { workspaces, pagination, setPagination, workspacesCount, loading } =
useWorkspaceList({
keyword,
features: featureFilters,
orderBy: sort,
flags: flagFilters,
});
const columns = useColumns();
@@ -36,8 +39,11 @@ export function WorkspacePage() {
onKeywordChange={setKeyword}
selectedFeatures={featureFilters}
onFeaturesChange={setFeatureFilters}
flags={flagFilters}
onFlagsChange={setFlagFilters}
sort={sort}
onSortChange={setSort}
loading={loading}
/>
</div>
);

View File

@@ -10,8 +10,17 @@ export type WorkspaceDetail = NonNullable<
AdminWorkspaceQuery['adminWorkspace']
>;
export type WorkspaceMember = WorkspaceDetail['members'][0];
export type WorkspaceSharedLink = WorkspaceDetail['sharedLinks'][0];
export type WorkspaceUpdateInput =
AdminUpdateWorkspaceMutation['adminUpdateWorkspace'];
export type WorkspaceFeatureFilter = FeatureType[];
export type WorkspaceFlagFilter = {
public?: boolean;
enableAi?: boolean;
enableSharing?: boolean;
enableUrlPreview?: boolean;
enableDocEmbedding?: boolean;
};

View File

@@ -7,10 +7,13 @@ import {
} from '@affine/graphql';
import { useEffect, useMemo, useState } from 'react';
import type { WorkspaceFlagFilter } from './schema';
export const useWorkspaceList = (filter?: {
keyword?: string;
features?: FeatureType[];
orderBy?: AdminWorkspaceSort;
flags?: WorkspaceFlagFilter;
}) => {
const [pagination, setPagination] = useState({
pageIndex: 0,
@@ -21,8 +24,10 @@ export const useWorkspaceList = (filter?: {
() =>
`${filter?.keyword ?? ''}-${[...(filter?.features ?? [])]
.sort()
.join(',')}-${filter?.orderBy ?? ''}`,
[filter?.features, filter?.keyword, filter?.orderBy]
.join(',')}-${filter?.orderBy ?? ''}-${JSON.stringify(
filter?.flags ?? {}
)}`,
[filter?.features, filter?.flags, filter?.keyword, filter?.orderBy]
);
useEffect(() => {
@@ -40,10 +45,20 @@ export const useWorkspaceList = (filter?: {
? filter.features
: undefined,
orderBy: filter?.orderBy,
public: filter?.flags?.public,
enableAi: filter?.flags?.enableAi,
enableSharing: filter?.flags?.enableSharing,
enableUrlPreview: filter?.flags?.enableUrlPreview,
enableDocEmbedding: filter?.flags?.enableDocEmbedding,
},
}),
[
filter?.features,
filter?.flags?.enableAi,
filter?.flags?.enableDocEmbedding,
filter?.flags?.enableSharing,
filter?.flags?.enableUrlPreview,
filter?.flags?.public,
filter?.keyword,
filter?.orderBy,
pagination.pageIndex,
@@ -51,30 +66,43 @@ export const useWorkspaceList = (filter?: {
]
);
const { data: listData } = useQuery(
const { data: listData, isValidating: isListValidating } = useQuery(
{
query: adminWorkspacesQuery,
variables,
},
{
keepPreviousData: true,
revalidateOnFocus: false,
revalidateIfStale: true,
revalidateOnReconnect: true,
}
);
const { data: countData } = useQuery(
const { data: countData, isValidating: isCountValidating } = useQuery(
{
query: adminWorkspacesCountQuery,
variables,
},
{
keepPreviousData: true,
revalidateOnFocus: false,
revalidateIfStale: true,
revalidateOnReconnect: true,
}
);
const loading =
isListValidating ||
isCountValidating ||
listData === undefined ||
countData === undefined;
return {
workspaces: listData?.adminWorkspaces ?? [],
workspacesCount: countData?.adminWorkspacesCount ?? 0,
pagination,
setPagination,
loading,
};
};

View File

@@ -1,11 +1,6 @@
import { Button } from '@affine/component';
import { useAsyncCallback } from '@affine/core/components/hooks/affine-async-hooks';
import { appIconMap } from '@affine/core/utils';
import {
createStreamEncoder,
encodeRawBufferToOpus,
type OpusStreamEncoder,
} from '@affine/core/utils/opus-encoding';
import { apis, events } from '@affine/electron-api';
import { useI18n } from '@affine/i18n';
import track from '@affine/track';
@@ -105,61 +100,8 @@ export function Recording() {
await apis?.recording?.stopRecording(status.id);
}, [status]);
const handleProcessStoppedRecording = useAsyncCallback(
async (currentStreamEncoder?: OpusStreamEncoder) => {
let id: number | undefined;
try {
const result = await apis?.recording?.getCurrentRecording();
if (!result) {
return;
}
id = result.id;
const { filepath, sampleRate, numberOfChannels } = result;
if (!filepath || !sampleRate || !numberOfChannels) {
return;
}
const [buffer] = await Promise.all([
currentStreamEncoder
? currentStreamEncoder.finish()
: encodeRawBufferToOpus({
filepath,
sampleRate,
numberOfChannels,
}),
new Promise<void>(resolve => {
setTimeout(() => {
resolve();
}, 500); // wait at least 500ms for better user experience
}),
]);
await apis?.recording.readyRecording(result.id, buffer);
} catch (error) {
console.error('Failed to stop recording', error);
await apis?.popup?.dismissCurrentRecording();
if (id) {
await apis?.recording.removeRecording(id);
}
}
},
[]
);
useEffect(() => {
let removed = false;
let currentStreamEncoder: OpusStreamEncoder | undefined;
apis?.recording
.getCurrentRecording()
.then(status => {
if (status) {
return handleRecordingStatusChanged(status);
}
return;
})
.catch(console.error);
const handleRecordingStatusChanged = async (status: Status) => {
if (removed) {
@@ -171,27 +113,18 @@ export function Recording() {
appName: status.appName || 'System Audio',
});
}
if (
status?.status === 'recording' &&
status.sampleRate &&
status.numberOfChannels &&
(!currentStreamEncoder || currentStreamEncoder.id !== status.id)
) {
currentStreamEncoder?.close();
currentStreamEncoder = createStreamEncoder(status.id, {
sampleRate: status.sampleRate,
numberOfChannels: status.numberOfChannels,
});
currentStreamEncoder.poll().catch(console.error);
}
if (status?.status === 'stopped') {
handleProcessStoppedRecording(currentStreamEncoder);
currentStreamEncoder = undefined;
}
};
apis?.recording
.getCurrentRecording()
.then(status => {
if (status) {
return handleRecordingStatusChanged(status);
}
return;
})
.catch(console.error);
// allow processing stopped event in tray menu as well:
const unsubscribe = events?.recording.onRecordingStatusChanged(status => {
if (status) {
@@ -202,9 +135,8 @@ export function Recording() {
return () => {
removed = true;
unsubscribe?.();
currentStreamEncoder?.close();
};
}, [handleProcessStoppedRecording]);
}, []);
const handleStartRecording = useAsyncCallback(async () => {
if (!status) {

View File

@@ -172,9 +172,13 @@ function ensureFrameAncestors(
});
}
function allowCors(headers: Record<string, string[]>) {
function allowCors(
headers: Record<string, string[]>,
origin: string = 'assets://.'
) {
// Signed blob URLs redirect to *.usercontent.affine.pro without CORS headers.
setHeader(headers, 'Access-Control-Allow-Origin', '*');
setHeader(headers, 'Access-Control-Allow-Origin', origin);
setHeader(headers, 'Access-Control-Allow-Credentials', 'true');
setHeader(headers, 'Access-Control-Allow-Methods', 'GET, HEAD, OPTIONS');
setHeader(
headers,

View File

@@ -1,6 +1,5 @@
/* oxlint-disable no-var-requires */
import { execSync } from 'node:child_process';
import { createHash } from 'node:crypto';
import fsp from 'node:fs/promises';
import path from 'node:path';
@@ -32,12 +31,7 @@ import { getMainWindow } from '../windows-manager';
import { popupManager } from '../windows-manager/popup';
import { isAppNameAllowed } from './allow-list';
import { recordingStateMachine } from './state-machine';
import type {
AppGroupInfo,
Recording,
RecordingStatus,
TappableAppInfo,
} from './types';
import type { AppGroupInfo, RecordingStatus, TappableAppInfo } from './types';
export const MeetingsSettingsState = {
$: globalStateStorage.watch<MeetingSettingsSchema>(MeetingSettingsKey).pipe(
@@ -56,7 +50,12 @@ export const MeetingsSettingsState = {
},
};
type Subscriber = {
unsubscribe: () => void;
};
const subscribers: Subscriber[] = [];
let appStateSubscribers: Subscriber[] = [];
// recordings are saved in the app data directory
// may need a way to clean up old recordings
@@ -67,8 +66,22 @@ export const SAVED_RECORDINGS_DIR = path.join(
let shareableContent: ShareableContentType | null = null;
type NativeModule = typeof import('@affine/native');
function getNativeModule(): NativeModule {
return require('@affine/native') as NativeModule;
}
function cleanup() {
shareableContent = null;
appStateSubscribers.forEach(subscriber => {
try {
subscriber.unsubscribe();
} catch {
// ignore unsubscribe error
}
});
appStateSubscribers = [];
subscribers.forEach(subscriber => {
try {
subscriber.unsubscribe();
@@ -76,6 +89,9 @@ function cleanup() {
// ignore unsubscribe error
}
});
subscribers.length = 0;
applications$.next([]);
appGroups$.next([]);
}
beforeAppQuit(() => {
@@ -87,18 +103,12 @@ export const appGroups$ = new BehaviorSubject<AppGroupInfo[]>([]);
export const updateApplicationsPing$ = new Subject<number>();
// recording id -> recording
// recordings will be saved in memory before consumed and created as an audio block to user's doc
const recordings = new Map<number, Recording>();
// there should be only one active recording at a time
// We'll now use recordingStateMachine.status$ instead of our own BehaviorSubject
// There should be only one active recording at a time; state is managed by the state machine
export const recordingStatus$ = recordingStateMachine.status$;
function createAppGroup(processGroupId: number): AppGroupInfo | undefined {
// MUST require dynamically to avoid loading @affine/native for unsupported platforms
const SC: typeof ShareableContentType =
require('@affine/native').ShareableContent;
const SC: typeof ShareableContentType = getNativeModule().ShareableContent;
const groupProcess = SC?.applicationWithProcessId(processGroupId);
if (!groupProcess) {
return;
@@ -176,7 +186,9 @@ function setupNewRunningAppGroup() {
const debounceStartRecording = debounce((appGroup: AppGroupInfo) => {
// check if the app is running again
if (appGroup.isRunning) {
startRecording(appGroup);
startRecording(appGroup).catch(err => {
logger.error('failed to start recording', err);
});
}
}, 1000);
@@ -242,91 +254,20 @@ function setupNewRunningAppGroup() {
);
}
function getSanitizedAppId(bundleIdentifier?: string) {
if (!bundleIdentifier) {
return 'unknown';
}
return isWindows()
? createHash('sha256')
.update(bundleIdentifier)
.digest('hex')
.substring(0, 8)
: bundleIdentifier;
}
export function createRecording(status: RecordingStatus) {
let recording = recordings.get(status.id);
if (recording) {
return recording;
}
const appId = getSanitizedAppId(status.appGroup?.bundleIdentifier);
const bufferedFilePath = path.join(
SAVED_RECORDINGS_DIR,
`${appId}-${status.id}-${status.startTime}.raw`
);
fs.ensureDirSync(SAVED_RECORDINGS_DIR);
const file = fs.createWriteStream(bufferedFilePath);
function tapAudioSamples(err: Error | null, samples: Float32Array) {
const recordingStatus = recordingStatus$.getValue();
if (
!recordingStatus ||
recordingStatus.id !== status.id ||
recordingStatus.status === 'paused'
) {
return;
}
if (err) {
logger.error('failed to get audio samples', err);
} else {
// Writing raw Float32Array samples directly to file
// For stereo audio, samples are interleaved [L,R,L,R,...]
file.write(Buffer.from(samples.buffer));
}
}
// MUST require dynamically to avoid loading @affine/native for unsupported platforms
const SC: typeof ShareableContentType =
require('@affine/native').ShareableContent;
const stream = status.app
? SC.tapAudio(status.app.processId, tapAudioSamples)
: SC.tapGlobalAudio(null, tapAudioSamples);
recording = {
id: status.id,
startTime: status.startTime,
app: status.app,
appGroup: status.appGroup,
file,
session: stream,
};
recordings.set(status.id, recording);
return recording;
}
export async function getRecording(id: number) {
const recording = recordings.get(id);
if (!recording) {
const recording = recordingStateMachine.status;
if (!recording || recording.id !== id) {
logger.error(`Recording ${id} not found`);
return;
}
const rawFilePath = String(recording.file.path);
return {
id,
appGroup: recording.appGroup,
app: recording.app,
startTime: recording.startTime,
filepath: rawFilePath,
sampleRate: recording.session.sampleRate,
numberOfChannels: recording.session.channels,
filepath: recording.filepath,
sampleRate: recording.sampleRate,
numberOfChannels: recording.numberOfChannels,
};
}
@@ -350,18 +291,7 @@ function setupRecordingListeners() {
});
}
if (status?.status === 'recording') {
let recording = recordings.get(status.id);
// create a recording if not exists
if (!recording) {
recording = createRecording(status);
}
} else if (status?.status === 'stopped') {
const recording = recordings.get(status.id);
if (recording) {
recording.session.stop();
}
} else if (
if (
status?.status === 'create-block-success' ||
status?.status === 'create-block-failed'
) {
@@ -400,9 +330,7 @@ function getAllApps(): TappableAppInfo[] {
}
// MUST require dynamically to avoid loading @affine/native for unsupported platforms
const { ShareableContent } = require('@affine/native') as {
ShareableContent: typeof ShareableContentType;
};
const { ShareableContent } = getNativeModule();
const apps = ShareableContent.applications().map(app => {
try {
@@ -433,12 +361,8 @@ function getAllApps(): TappableAppInfo[] {
return filteredApps;
}
type Subscriber = {
unsubscribe: () => void;
};
function setupMediaListeners() {
const ShareableContent = require('@affine/native').ShareableContent;
const ShareableContent = getNativeModule().ShareableContent;
applications$.next(getAllApps());
subscribers.push(
interval(3000).subscribe(() => {
@@ -454,8 +378,6 @@ function setupMediaListeners() {
})
);
let appStateSubscribers: Subscriber[] = [];
subscribers.push(
applications$.subscribe(apps => {
appStateSubscribers.forEach(subscriber => {
@@ -484,15 +406,6 @@ function setupMediaListeners() {
});
appStateSubscribers = _appStateSubscribers;
return () => {
_appStateSubscribers.forEach(subscriber => {
try {
subscriber.unsubscribe();
} catch {
// ignore unsubscribe error
}
});
};
})
);
}
@@ -502,7 +415,7 @@ function askForScreenRecordingPermission() {
return false;
}
try {
const ShareableContent = require('@affine/native').ShareableContent;
const ShareableContent = getNativeModule().ShareableContent;
// this will trigger the permission prompt
new ShareableContent();
return true;
@@ -519,7 +432,7 @@ export function setupRecordingFeature() {
}
try {
const ShareableContent = require('@affine/native').ShareableContent;
const ShareableContent = getNativeModule().ShareableContent;
if (!shareableContent) {
shareableContent = new ShareableContent();
setupMediaListeners();
@@ -558,24 +471,48 @@ export function newRecording(
});
}
export function startRecording(
export async function startRecording(
appGroup?: AppGroupInfo | number
): RecordingStatus | null {
const state = recordingStateMachine.dispatch(
{
type: 'START_RECORDING',
appGroup: normalizeAppGroupInfo(appGroup),
},
false
);
): Promise<RecordingStatus | null> {
const state = recordingStateMachine.dispatch({
type: 'START_RECORDING',
appGroup: normalizeAppGroupInfo(appGroup),
});
if (state?.status === 'recording') {
createRecording(state);
if (!state || state.status !== 'recording') {
return state;
}
recordingStateMachine.status$.next(state);
try {
fs.ensureDirSync(SAVED_RECORDINGS_DIR);
return state;
const meta = getNativeModule().startRecording({
appProcessId: state.app?.processId,
outputDir: SAVED_RECORDINGS_DIR,
format: 'opus',
id: String(state.id),
});
const filepath = assertRecordingFilepath(meta.filepath);
const nextState = recordingStateMachine.dispatch({
type: 'ATTACH_NATIVE_RECORDING',
id: state.id,
nativeId: meta.id,
startTime: meta.startedAt ?? state.startTime,
filepath,
sampleRate: meta.sampleRate,
numberOfChannels: meta.channels,
});
return nextState;
} catch (error) {
logger.error('failed to start recording', error);
return recordingStateMachine.dispatch({
type: 'CREATE_BLOCK_FAILED',
id: state.id,
error: error instanceof Error ? error : undefined,
});
}
}
export function pauseRecording(id: number) {
@@ -587,61 +524,49 @@ export function resumeRecording(id: number) {
}
export async function stopRecording(id: number) {
const recording = recordings.get(id);
if (!recording) {
const recording = recordingStateMachine.status;
if (!recording || recording.id !== id) {
logger.error(`stopRecording: Recording ${id} not found`);
return;
}
if (!recording.file.path) {
logger.error(`Recording ${id} has no file path`);
if (!recording.nativeId) {
logger.error(`stopRecording: Recording ${id} missing native id`);
return;
}
const { file, session: stream } = recording;
// First stop the audio stream to prevent more data coming in
try {
stream.stop();
} catch (err) {
logger.error('Failed to stop audio stream', err);
}
// End the file with a timeout
file.end();
recordingStateMachine.dispatch({
type: 'STOP_RECORDING',
id,
});
try {
await Promise.race([
new Promise<void>((resolve, reject) => {
file.on('finish', () => {
// check if the file is empty
const stats = fs.statSync(file.path);
if (stats.size === 0) {
reject(new Error('Recording is empty'));
return;
}
resolve();
});
file.on('error', err => {
reject(err);
});
}),
new Promise<never>((_, reject) =>
setTimeout(() => reject(new Error('File writing timeout')), 10000)
),
]);
const recordingStatus = recordingStateMachine.dispatch({
type: 'STOP_RECORDING',
const artifact = getNativeModule().stopRecording(recording.nativeId);
const filepath = assertRecordingFilepath(artifact.filepath);
const readyStatus = recordingStateMachine.dispatch({
type: 'SAVE_RECORDING',
id,
filepath,
sampleRate: artifact.sampleRate,
numberOfChannels: artifact.channels,
});
if (!recordingStatus) {
logger.error('No recording status to stop');
if (!readyStatus) {
logger.error('No recording status to save');
return;
}
return serializeRecordingStatus(recordingStatus);
getMainWindow()
.then(mainWindow => {
if (mainWindow) {
mainWindow.show();
}
})
.catch(err => {
logger.error('failed to bring up the window', err);
});
return serializeRecordingStatus(readyStatus);
} catch (error: unknown) {
logger.error('Failed to stop recording', error);
const recordingStatus = recordingStateMachine.dispatch({
@@ -654,38 +579,9 @@ export async function stopRecording(id: number) {
return;
}
return serializeRecordingStatus(recordingStatus);
} finally {
// Clean up the file stream if it's still open
if (!file.closed) {
file.destroy();
}
}
}
export async function getRawAudioBuffers(
id: number,
cursor?: number
): Promise<{
buffer: Buffer;
nextCursor: number;
}> {
const recording = recordings.get(id);
if (!recording) {
throw new Error(`getRawAudioBuffers: Recording ${id} not found`);
}
const start = cursor ?? 0;
const file = await fsp.open(recording.file.path, 'r');
const stats = await file.stat();
const buffer = Buffer.alloc(stats.size - start);
const result = await file.read(buffer, 0, buffer.length, start);
await file.close();
return {
buffer,
nextCursor: start + result.bytesRead,
};
}
function assertRecordingFilepath(filepath: string) {
const normalizedPath = path.normalize(filepath);
const normalizedBase = path.normalize(SAVED_RECORDINGS_DIR + path.sep);
@@ -702,55 +598,6 @@ export async function readRecordingFile(filepath: string) {
return fsp.readFile(normalizedPath);
}
export async function readyRecording(id: number, buffer: Buffer) {
logger.info('readyRecording', id);
const recordingStatus = recordingStatus$.value;
const recording = recordings.get(id);
if (!recordingStatus || recordingStatus.id !== id || !recording) {
logger.error(`readyRecording: Recording ${id} not found`);
return;
}
const rawFilePath = String(recording.file.path);
const filepath = rawFilePath.replace('.raw', '.opus');
if (!filepath) {
logger.error(`readyRecording: Recording ${id} has no filepath`);
return;
}
await fs.writeFile(filepath, buffer);
// can safely remove the raw file now
logger.info('remove raw file', rawFilePath);
if (rawFilePath) {
try {
await fs.unlink(rawFilePath);
} catch (err) {
logger.error('failed to remove raw file', err);
}
}
// Update the status through the state machine
recordingStateMachine.dispatch({
type: 'SAVE_RECORDING',
id,
filepath,
});
// bring up the window
getMainWindow()
.then(mainWindow => {
if (mainWindow) {
mainWindow.show();
}
})
.catch(err => {
logger.error('failed to bring up the window', err);
});
}
export async function handleBlockCreationSuccess(id: number) {
recordingStateMachine.dispatch({
type: 'CREATE_BLOCK_SUCCESS',
@@ -767,7 +614,6 @@ export async function handleBlockCreationFailed(id: number, error?: Error) {
}
export function removeRecording(id: number) {
recordings.delete(id);
recordingStateMachine.dispatch({ type: 'REMOVE_RECORDING', id });
}
@@ -787,7 +633,6 @@ export interface SerializedRecordingStatus {
export function serializeRecordingStatus(
status: RecordingStatus
): SerializedRecordingStatus | null {
const recording = recordings.get(status.id);
return {
id: status.id,
status: status.status,
@@ -795,10 +640,9 @@ export function serializeRecordingStatus(
appGroupId: status.appGroup?.processGroupId,
icon: status.appGroup?.icon,
startTime: status.startTime,
filepath:
status.filepath ?? (recording ? String(recording.file.path) : undefined),
sampleRate: recording?.session.sampleRate,
numberOfChannels: recording?.session.channels,
filepath: status.filepath,
sampleRate: status.sampleRate,
numberOfChannels: status.numberOfChannels,
};
}

View File

@@ -14,13 +14,11 @@ import {
checkMeetingPermissions,
checkRecordingAvailable,
disableRecordingFeature,
getRawAudioBuffers,
getRecording,
handleBlockCreationFailed,
handleBlockCreationSuccess,
pauseRecording,
readRecordingFile,
readyRecording,
recordingStatus$,
removeRecording,
SAVED_RECORDINGS_DIR,
@@ -51,16 +49,9 @@ export const recordingHandlers = {
stopRecording: async (_, id: number) => {
return stopRecording(id);
},
getRawAudioBuffers: async (_, id: number, cursor?: number) => {
return getRawAudioBuffers(id, cursor);
},
readRecordingFile: async (_, filepath: string) => {
return readRecordingFile(filepath);
},
// save the encoded recording buffer to the file system
readyRecording: async (_, id: number, buffer: Uint8Array) => {
return readyRecording(id, Buffer.from(buffer));
},
handleBlockCreationSuccess: async (_, id: number) => {
return handleBlockCreationSuccess(id);
},

View File

@@ -13,6 +13,15 @@ export type RecordingEvent =
type: 'START_RECORDING';
appGroup?: AppGroupInfo;
}
| {
type: 'ATTACH_NATIVE_RECORDING';
id: number;
nativeId: string;
startTime: number;
filepath: string;
sampleRate: number;
numberOfChannels: number;
}
| { type: 'PAUSE_RECORDING'; id: number }
| { type: 'RESUME_RECORDING'; id: number }
| {
@@ -23,6 +32,8 @@ export type RecordingEvent =
type: 'SAVE_RECORDING';
id: number;
filepath: string;
sampleRate?: number;
numberOfChannels?: number;
}
| {
type: 'CREATE_BLOCK_FAILED';
@@ -74,6 +85,9 @@ export class RecordingStateMachine {
case 'START_RECORDING':
newStatus = this.handleStartRecording(event.appGroup);
break;
case 'ATTACH_NATIVE_RECORDING':
newStatus = this.handleAttachNativeRecording(event);
break;
case 'PAUSE_RECORDING':
newStatus = this.handlePauseRecording();
break;
@@ -84,7 +98,12 @@ export class RecordingStateMachine {
newStatus = this.handleStopRecording(event.id);
break;
case 'SAVE_RECORDING':
newStatus = this.handleSaveRecording(event.id, event.filepath);
newStatus = this.handleSaveRecording(
event.id,
event.filepath,
event.sampleRate,
event.numberOfChannels
);
break;
case 'CREATE_BLOCK_SUCCESS':
newStatus = this.handleCreateBlockSuccess(event.id);
@@ -159,6 +178,35 @@ export class RecordingStateMachine {
}
}
/**
* Attach native recording metadata to the current recording
*/
private handleAttachNativeRecording(
event: Extract<RecordingEvent, { type: 'ATTACH_NATIVE_RECORDING' }>
): RecordingStatus | null {
const currentStatus = this.recordingStatus$.value;
if (!currentStatus || currentStatus.id !== event.id) {
logger.error(`Recording ${event.id} not found for native attachment`);
return currentStatus;
}
if (currentStatus.status !== 'recording') {
logger.error(
`Cannot attach native metadata when recording is in ${currentStatus.status} state`
);
return currentStatus;
}
return {
...currentStatus,
nativeId: event.nativeId,
startTime: event.startTime,
filepath: event.filepath,
sampleRate: event.sampleRate,
numberOfChannels: event.numberOfChannels,
};
}
/**
* Handle the PAUSE_RECORDING event
*/
@@ -233,7 +281,9 @@ export class RecordingStateMachine {
*/
private handleSaveRecording(
id: number,
filepath: string
filepath: string,
sampleRate?: number,
numberOfChannels?: number
): RecordingStatus | null {
const currentStatus = this.recordingStatus$.value;
@@ -246,6 +296,8 @@ export class RecordingStateMachine {
...currentStatus,
status: 'ready',
filepath,
sampleRate,
numberOfChannels,
};
}

View File

@@ -1,6 +1,4 @@
import type { WriteStream } from 'node:fs';
import type { ApplicationInfo, AudioCaptureSession } from '@affine/native';
import type { ApplicationInfo } from '@affine/native';
export interface TappableAppInfo {
info: ApplicationInfo;
@@ -20,18 +18,6 @@ export interface AppGroupInfo {
isRunning: boolean;
}
export interface Recording {
id: number;
// the app may not be available if the user choose to record system audio
app?: TappableAppInfo;
appGroup?: AppGroupInfo;
// the buffered file that is being recorded streamed to
file: WriteStream;
session: AudioCaptureSession;
startTime: number;
filepath?: string; // the filepath of the recording (only available when status is ready)
}
export interface RecordingStatus {
id: number; // corresponds to the recording id
// the status of the recording in a linear state machine
@@ -54,4 +40,7 @@ export interface RecordingStatus {
appGroup?: AppGroupInfo;
startTime: number; // 0 means not started yet
filepath?: string; // encoded file path
nativeId?: string;
sampleRate?: number;
numberOfChannels?: number;
}

View File

@@ -28,7 +28,7 @@ export const openDocActions: Array<OpenDocAction> = [
type: 'open-in-new-view',
label: I18n['com.affine.peek-view-controls.open-doc-in-split-view'](),
icon: SplitViewIcon(),
shortcut: '⌘ ⌥ + click',
shortcut: '⌘ + ⌥ + click',
enabled: BUILD_CONFIG.isElectron,
},
{

View File

@@ -82,11 +82,11 @@ export const useWinGeneralKeyboardShortcuts = (): ShortcutMap => {
[t('quickSearch')]: ['Ctrl', 'K'],
[t('newPage')]: ['Ctrl', 'N'],
// not implement yet
// [t('appendDailyNote')]: 'Ctrl + Alt + A',
// [t('appendDailyNote')]: ['Ctrl', 'Alt', 'A'],
[t('expandOrCollapseSidebar')]: ['Ctrl', '/'],
[t('goBack')]: ['Ctrl', '['],
[t('goForward')]: ['Ctrl', ']'],
[t('copy-private-link')]: ['Ctrl', '', 'C'],
[t('copy-private-link')]: ['Ctrl', 'Shift', 'C'],
}),
[t]
);
@@ -99,10 +99,10 @@ export const useMacGeneralKeyboardShortcuts = (): ShortcutMap => {
[t('quickSearch')]: ['⌘', 'K'],
[t('newPage')]: ['⌘', 'N'],
// not implement yet
// [t('appendDailyNote')]: '⌘ + ⌥ + A',
// [t('appendDailyNote')]: ['⌘', '⌥', 'A'],
[t('expandOrCollapseSidebar')]: ['⌘', '/'],
[t('goBack')]: ['⌘ ', '['],
[t('goForward')]: ['⌘ ', ']'],
[t('goBack')]: ['⌘', '['],
[t('goForward')]: ['⌘', ']'],
[t('copy-private-link')]: ['⌘', '⇧', 'C'],
}),
[t]
@@ -118,9 +118,9 @@ export const useMacEdgelessKeyboardShortcuts = (): ShortcutMap => {
[t('redo')]: ['⌘', '⇧', 'Z'],
[t('zoomIn')]: ['⌘', '+'],
[t('zoomOut')]: ['⌘', '-'],
[t('zoomTo100')]: ['Alt', '0'],
[t('zoomToFit')]: ['Alt', '1'],
[t('zoomToSelection')]: ['Alt', '2'],
[t('zoomTo100')]: ['', '0'],
[t('zoomToFit')]: ['', '1'],
[t('zoomToSelection')]: ['', '2'],
[t('select')]: ['V'],
[t('text')]: ['T'],
[t('shape')]: ['S'],
@@ -129,9 +129,9 @@ export const useMacEdgelessKeyboardShortcuts = (): ShortcutMap => {
[t('pen')]: ['P'],
[t('hand')]: ['H'],
[t('note')]: ['N'],
// not implement yet
// [t('group')]: '⌘ + G',
// [t('unGroup')]: '⌘ + ⇧ + G',
[t('switch')]: ['⌥', 'S'],
[t('group')]: ['⌘', 'G'],
[t('unGroup')]: ['⌘', '⇧', 'G'],
}),
[t]
);
@@ -142,7 +142,7 @@ export const useWinEdgelessKeyboardShortcuts = (): ShortcutMap => {
() => ({
[t('selectAll')]: ['Ctrl', 'A'],
[t('undo')]: ['Ctrl', 'Z'],
[t('redo')]: ['Ctrl', 'Y/Ctrl', 'Shift', 'Z'],
[t('redo')]: ['Ctrl', 'Y'],
[t('zoomIn')]: ['Ctrl', '+'],
[t('zoomOut')]: ['Ctrl', '-'],
[t('zoomTo100')]: ['Alt', '0'],
@@ -156,10 +156,9 @@ export const useWinEdgelessKeyboardShortcuts = (): ShortcutMap => {
[t('pen')]: ['P'],
[t('hand')]: ['H'],
[t('note')]: ['N'],
[t('switch')]: ['Alt ', ''],
// not implement yet
// [t('group')]: 'Ctrl + G',
// [t('unGroup')]: 'Ctrl + Shift + G',
[t('switch')]: ['Alt', 'S'],
[t('group')]: ['Ctrl', 'G'],
[t('unGroup')]: ['Ctrl', 'Shift', 'G'],
}),
[t]
);
@@ -194,8 +193,8 @@ export const useMacPageKeyboardShortcuts = (): ShortcutMap => {
[t('groupDatabase')]: ['⌘', 'G'],
[t('switch')]: ['⌥', 'S'],
// not implement yet
// [t('moveUp')]: '⌘ + ⌥ + ↑',
// [t('moveDown')]: '⌘ + ⌥ + ↓',
// [t('moveUp')]: ['⌘', '⌥', '↑'],
// [t('moveDown')]: ['⌘', '⌥', '↓'],
}),
[t, tH]
);
@@ -211,7 +210,7 @@ export const useMacMarkdownShortcuts = (): ShortcutMap => {
[t('underline')]: ['~Text~'],
[t('strikethrough')]: ['~~Text~~'],
[t('divider')]: ['***'],
[t('inlineCode')]: ['`Text` '],
[t('inlineCode')]: ['`Text`'],
[t('codeBlock')]: ['``` Space'],
[tH('1')]: ['# Text'],
[tH('2')]: ['## Text'],
@@ -235,7 +234,7 @@ export const useWinPageKeyboardShortcuts = (): ShortcutMap => {
[t('italic')]: ['Ctrl', 'I'],
[t('underline')]: ['Ctrl', 'U'],
[t('strikethrough')]: ['Ctrl', 'Shift', 'S'],
[t('inlineCode')]: [' Ctrl', 'E'],
[t('inlineCode')]: ['Ctrl', 'E'],
[t('codeBlock')]: ['Ctrl', 'Alt', 'C'],
[t('link')]: ['Ctrl', 'K'],
[t('quickSearch')]: ['Ctrl', 'K'],
@@ -247,15 +246,15 @@ export const useWinPageKeyboardShortcuts = (): ShortcutMap => {
[tH('5')]: ['Ctrl', 'Shift', '5'],
[tH('6')]: ['Ctrl', 'Shift', '6'],
[t('increaseIndent')]: ['Tab'],
[t('reduceIndent')]: ['Shift+Tab'],
[t('reduceIndent')]: ['Shift', 'Tab'],
[t('alignLeft')]: ['Ctrl', 'Shift', 'L'],
[t('alignCenter')]: ['Ctrl', 'Shift', 'E'],
[t('alignRight')]: ['Ctrl', 'Shift', 'R'],
[t('groupDatabase')]: ['Ctrl + G'],
['Switch']: ['Alt + S'],
[t('groupDatabase')]: ['Ctrl', 'G'],
[t('switch')]: ['Alt', 'S'],
// not implement yet
// [t('moveUp')]: 'Ctrl + Alt + ↑',
// [t('moveDown')]: 'Ctrl + Alt + ↓',
// [t('moveUp')]: ['Ctrl', 'Alt', '↑'],
// [t('moveDown')]: ['Ctrl', 'Alt', '↓'],
}),
[t, tH]
);
@@ -265,12 +264,12 @@ export const useWinMarkdownShortcuts = (): ShortcutMap => {
const tH = useHeadingKeyboardShortcutsI18N();
return useMemo(
() => ({
[t('bold')]: ['**Text** '],
[t('italic')]: ['*Text* '],
[t('underline')]: ['~Text~ '],
[t('strikethrough')]: ['~~Text~~ '],
[t('bold')]: ['**Text**'],
[t('italic')]: ['*Text*'],
[t('underline')]: ['~Text~'],
[t('strikethrough')]: ['~~Text~~'],
[t('divider')]: ['***'],
[t('inlineCode')]: ['`Text` '],
[t('inlineCode')]: ['`Text`'],
[t('codeBlock')]: ['``` Text'],
[tH('1')]: ['# Text'],
[tH('2')]: ['## Text'],

View File

@@ -21,11 +21,19 @@ export const SharingPanel = () => {
export const Sharing = () => {
const t = useI18n();
const shareSetting = useService(WorkspaceShareSettingService).sharePreview;
const enableSharing = useLiveData(shareSetting.enableSharing$);
const enableUrlPreview = useLiveData(shareSetting.enableUrlPreview$);
const loading = useLiveData(shareSetting.isLoading$);
const permissionService = useService(WorkspacePermissionService);
const isOwner = useLiveData(permissionService.permission.isOwner$);
const handleToggleSharing = useAsyncCallback(
async (checked: boolean) => {
await shareSetting.setEnableSharing(checked);
},
[shareSetting]
);
const handleCheck = useAsyncCallback(
async (checked: boolean) => {
await shareSetting.setEnableUrlPreview(checked);
@@ -51,6 +59,20 @@ export const Sharing = () => {
disabled={loading}
/>
</SettingRow>
<SettingRow
name={t[
'com.affine.settings.workspace.sharing.workspace-sharing.title'
]()}
desc={t[
'com.affine.settings.workspace.sharing.workspace-sharing.description'
]()}
>
<Switch
checked={enableSharing ?? true}
onChange={handleToggleSharing}
disabled={loading}
/>
</SettingRow>
</SettingWrapper>
);
};

View File

@@ -1,8 +1,11 @@
import { useEnableCloud } from '@affine/core/components/hooks/affine/use-enable-cloud';
import { WorkspaceShareSettingService } from '@affine/core/modules/share-setting';
import type { Workspace } from '@affine/core/modules/workspace';
import { useI18n } from '@affine/i18n';
import { track } from '@affine/track';
import type { Store } from '@blocksuite/affine/store';
import { useCallback } from 'react';
import { useLiveData, useService } from '@toeverything/infra';
import { useCallback, useEffect } from 'react';
import { ShareMenu } from './share-menu';
export { CloudSvg } from './cloud-svg';
@@ -14,6 +17,10 @@ type SharePageModalProps = {
};
export const SharePageButton = ({ workspace, page }: SharePageModalProps) => {
const t = useI18n();
const shareSetting = useService(WorkspaceShareSettingService).sharePreview;
const enableSharing = useLiveData(shareSetting.enableSharing$);
const confirmEnableCloud = useEnableCloud();
const handleOpenShareModal = useCallback((open: boolean) => {
if (open) {
@@ -21,6 +28,18 @@ export const SharePageButton = ({ workspace, page }: SharePageModalProps) => {
}
}, []);
useEffect(() => {
if (workspace.meta.flavour === 'local') {
return;
}
shareSetting.revalidate();
}, [shareSetting, workspace.meta.flavour]);
const sharingDisabled = enableSharing === false;
const disabledReason = sharingDisabled
? t['com.affine.share-menu.workspace-sharing.disabled.tooltip']()
: undefined;
return (
<ShareMenu
workspaceMetadata={workspace.meta}
@@ -31,6 +50,8 @@ export const SharePageButton = ({ workspace, page }: SharePageModalProps) => {
})
}
onOpenShareModal={handleOpenShareModal}
disabled={sharingDisabled}
disabledReason={disabledReason}
/>
);
};

View File

@@ -35,6 +35,8 @@ export interface ShareMenuProps extends PropsWithChildren {
onOpenShareModal?: (open: boolean) => void;
openPaywallModal?: () => void;
hittingPaywall?: boolean;
disabled?: boolean;
disabledReason?: string;
}
export enum ShareMenuTab {
@@ -203,7 +205,7 @@ export const ShareMenuContent = (props: ShareMenuProps) => {
};
const DefaultShareButton = forwardRef(function DefaultShareButton(
_,
props: { disabled?: boolean; tooltip?: string },
ref: Ref<HTMLButtonElement>
) {
const t = useI18n();
@@ -211,18 +213,26 @@ const DefaultShareButton = forwardRef(function DefaultShareButton(
const shared = useLiveData(shareInfoService.shareInfo.isShared$);
useEffect(() => {
if (props.disabled) {
return;
}
shareInfoService.shareInfo.revalidate();
}, [shareInfoService]);
}, [props.disabled, shareInfoService]);
const tooltip =
props.tooltip ??
(shared
? t['com.affine.share-menu.option.link.readonly.description']()
: t['com.affine.share-menu.option.link.no-access.description']());
return (
<Tooltip
content={
shared
? t['com.affine.share-menu.option.link.readonly.description']()
: t['com.affine.share-menu.option.link.no-access.description']()
}
>
<Button ref={ref} className={styles.button} variant="primary">
<Tooltip content={tooltip}>
<Button
ref={ref}
className={styles.button}
variant="primary"
disabled={props.disabled}
>
<div className={styles.buttonContainer}>
{shared ? <PublishIcon fontSize={16} /> : <LockIcon fontSize={16} />}
{t['com.affine.share-menu.shareButton']()}
@@ -233,6 +243,13 @@ const DefaultShareButton = forwardRef(function DefaultShareButton(
});
const LocalShareMenu = (props: ShareMenuProps) => {
if (props.disabled) {
return (
<div data-testid="local-share-menu-button">
<DefaultShareButton disabled tooltip={props.disabledReason} />
</div>
);
}
return (
<Menu
items={<ShareMenuContent {...props} />}
@@ -254,6 +271,13 @@ const LocalShareMenu = (props: ShareMenuProps) => {
};
const CloudShareMenu = (props: ShareMenuProps) => {
if (props.disabled) {
return (
<div data-testid="cloud-share-menu-button">
<DefaultShareButton disabled tooltip={props.disabledReason} />
</div>
);
}
return (
<Menu
items={<ShareMenuContent {...props} />}

View File

@@ -16,6 +16,7 @@ import type { WorkspaceService } from '../../workspace';
import type { WorkspaceShareSettingStore } from '../stores/share-setting';
type EnableAi = GetWorkspaceConfigQuery['workspace']['enableAi'];
type EnableSharing = GetWorkspaceConfigQuery['workspace']['enableSharing'];
type EnableUrlPreview =
GetWorkspaceConfigQuery['workspace']['enableUrlPreview'];
@@ -23,6 +24,7 @@ const logger = new DebugLogger('affine:workspace-permission');
export class WorkspaceShareSetting extends Entity {
enableAi$ = new LiveData<EnableAi | null>(null);
enableSharing$ = new LiveData<EnableSharing | null>(null);
enableUrlPreview$ = new LiveData<EnableUrlPreview | null>(null);
inviteLink$ = new LiveData<InviteLink | null>(null);
isLoading$ = new LiveData(false);
@@ -48,12 +50,13 @@ export class WorkspaceShareSetting extends Entity {
tap(value => {
if (value) {
this.enableAi$.next(value.enableAi);
this.enableSharing$.next(value.enableSharing);
this.enableUrlPreview$.next(value.enableUrlPreview);
this.inviteLink$.next(value.inviteLink);
}
}),
catchErrorInto(this.error$, error => {
logger.error('Failed to fetch enableUrlPreview', error);
logger.error('Failed to fetch workspace share settings', error);
}),
onStart(() => this.isLoading$.setValue(true)),
onComplete(() => this.isLoading$.setValue(false))
@@ -74,6 +77,14 @@ export class WorkspaceShareSetting extends Entity {
await this.waitForRevalidation();
}
async setEnableSharing(enableSharing: EnableSharing) {
await this.store.updateWorkspaceEnableSharing(
this.workspaceService.workspace.id,
enableSharing
);
await this.waitForRevalidation();
}
async setEnableAi(enableAi: EnableAi) {
await this.store.updateWorkspaceEnableAi(
this.workspaceService.workspace.id,

View File

@@ -2,6 +2,7 @@ import type { WorkspaceServerService } from '@affine/core/modules/cloud';
import {
getWorkspaceConfigQuery,
setEnableAiMutation,
setEnableSharingMutation,
setEnableUrlPreviewMutation,
} from '@affine/graphql';
import { Store } from '@toeverything/infra';
@@ -47,6 +48,26 @@ export class WorkspaceShareSettingStore extends Store {
});
}
async updateWorkspaceEnableSharing(
workspaceId: string,
enableSharing: boolean,
signal?: AbortSignal
) {
if (!this.workspaceServerService.server) {
throw new Error('No Server');
}
await this.workspaceServerService.server.gql({
query: setEnableSharingMutation,
variables: {
id: workspaceId,
enableSharing,
},
context: {
signal,
},
});
}
async updateWorkspaceEnableUrlPreview(
workspaceId: string,
enableUrlPreview: boolean,

View File

@@ -423,98 +423,3 @@ export async function encodeAudioBlobToOpusSlices(
await audioContext.close();
}
}
export const createStreamEncoder = (
recordingId: number,
codecs: {
sampleRate: number;
numberOfChannels: number;
targetBitrate?: number;
}
) => {
const { encoder, encodedChunks } = createOpusEncoder({
sampleRate: codecs.sampleRate,
numberOfChannels: codecs.numberOfChannels,
bitrate: codecs.targetBitrate,
});
const toAudioData = (buffer: Uint8Array) => {
// Each sample in f32 format is 4 bytes
const BYTES_PER_SAMPLE = 4;
return new AudioData({
format: 'f32',
sampleRate: codecs.sampleRate,
numberOfChannels: codecs.numberOfChannels,
numberOfFrames:
buffer.length / BYTES_PER_SAMPLE / codecs.numberOfChannels,
timestamp: 0,
data: buffer,
});
};
let cursor = 0;
let isClosed = false;
const next = async () => {
if (!apis) {
throw new Error('Electron API is not available');
}
if (isClosed) {
return;
}
const { buffer, nextCursor } = await apis.recording.getRawAudioBuffers(
recordingId,
cursor
);
if (isClosed || cursor === nextCursor) {
return;
}
cursor = nextCursor;
logger.debug('Encoding next chunk', cursor, nextCursor);
encoder.encode(toAudioData(buffer));
};
const poll = async () => {
if (isClosed) {
return;
}
logger.debug('Polling next chunk');
await next();
await new Promise(resolve => setTimeout(resolve, 1000));
await poll();
};
const close = () => {
if (isClosed) {
return;
}
isClosed = true;
return encoder.close();
};
return {
id: recordingId,
next,
poll,
flush: () => {
return encoder.flush();
},
close,
finish: async () => {
logger.debug('Finishing encoding');
await next();
close();
const buffer = muxToMp4(encodedChunks, {
sampleRate: codecs.sampleRate,
numberOfChannels: codecs.numberOfChannels,
bitrate: codecs.targetBitrate,
});
return buffer;
},
[Symbol.dispose]: () => {
close();
},
};
};
export type OpusStreamEncoder = ReturnType<typeof createStreamEncoder>;

View File

@@ -6346,6 +6346,14 @@ export function useAFFiNEI18N(): {
* `Always enable url preview`
*/
["com.affine.settings.workspace.sharing.url-preview.title"](): string;
/**
* `Control whether pages in this workspace can be shared publicly. Turn off to block new shares and external access for existing shares.`
*/
["com.affine.settings.workspace.sharing.workspace-sharing.description"](): string;
/**
* `Allow workspace page sharing`
*/
["com.affine.settings.workspace.sharing.workspace-sharing.title"](): string;
/**
* `AFFiNE AI`
*/
@@ -6605,6 +6613,10 @@ export function useAFFiNEI18N(): {
* `Anyone can access this link`
*/
["com.affine.share-menu.option.link.readonly.description"](): string;
/**
* `Sharing for this workspace is turned off. Please contact an admin to enable it.`
*/
["com.affine.share-menu.workspace-sharing.disabled.tooltip"](): string;
/**
* `Can manage`
*/

View File

@@ -1591,6 +1591,8 @@
"com.affine.settings.workspace.sharing.title": "Sharing",
"com.affine.settings.workspace.sharing.url-preview.description": "Allow URL unfurling by Slack & other social apps, even if a doc is only accessible by workspace members.",
"com.affine.settings.workspace.sharing.url-preview.title": "Always enable url preview",
"com.affine.settings.workspace.sharing.workspace-sharing.description": "Control whether pages in this workspace can be shared publicly. Turn off to block new shares and external access for existing shares.",
"com.affine.settings.workspace.sharing.workspace-sharing.title": "Allow workspace page sharing",
"com.affine.settings.workspace.affine-ai.title": "AFFiNE AI",
"com.affine.settings.workspace.affine-ai.label": "Allow AFFiNE AI Assistant",
"com.affine.settings.workspace.affine-ai.description": "Allow workspace members to use AFFiNE AI features. This setting doesn't affect billing. Workspace members use AFFiNE AI through their personal accounts.",
@@ -1655,6 +1657,7 @@
"com.affine.share-menu.option.link.no-access.description": "Only workspace members can access this link",
"com.affine.share-menu.option.link.readonly": "Read only",
"com.affine.share-menu.option.link.readonly.description": "Anyone can access this link",
"com.affine.share-menu.workspace-sharing.disabled.tooltip": "Sharing for this workspace is turned off. Please contact an admin to enable it.",
"com.affine.share-menu.option.permission.can-manage": "Can manage",
"com.affine.share-menu.option.permission.can-edit": "Can edit",
"com.affine.share-menu.option.permission.can-read": "Can read",

View File

@@ -1589,6 +1589,8 @@
"com.affine.settings.workspace.sharing.title": "分享",
"com.affine.settings.workspace.sharing.url-preview.description": "允许 Slack 和其他社交应用程序展开 URL即使文档仅由工作区成员访问。",
"com.affine.settings.workspace.sharing.url-preview.title": "始终启用 URL 预览",
"com.affine.settings.workspace.sharing.workspace-sharing.description": "控制此工作区的页面是否允许公开分享。关闭后,禁止新的分享且现有分享外部无法访问。",
"com.affine.settings.workspace.sharing.workspace-sharing.title": "允许工作区页面分享",
"com.affine.settings.workspace.affine-ai.title": "AFFiNE AI",
"com.affine.settings.workspace.affine-ai.label": "启用 AFFiNE AI 助手",
"com.affine.settings.workspace.affine-ai.description": "允许工作区成员使用 AFFiNE AI 功能。此设置不会影响计费。工作区成员通过个人帐户使用 AFFiNE AI。",
@@ -1653,6 +1655,7 @@
"com.affine.share-menu.option.link.no-access.description": "只有此工作区的成员可以打开此链接。",
"com.affine.share-menu.option.link.readonly": "只读",
"com.affine.share-menu.option.link.readonly.description": "任何人可以访问该链接",
"com.affine.share-menu.workspace-sharing.disabled.tooltip": "该工作区已禁用分享,请联系管理员开启。",
"com.affine.share-menu.option.permission.can-manage": "可管理",
"com.affine.share-menu.option.permission.can-edit": "可编辑",
"com.affine.share-menu.option.permission.can-read": "可阅读",

View File

@@ -1566,6 +1566,8 @@
"com.affine.settings.workspace.sharing.title": "分享",
"com.affine.settings.workspace.sharing.url-preview.description": "允許 Slack 和其他社交應用程序展開 URL即使文件僅由工作區成員訪問。",
"com.affine.settings.workspace.sharing.url-preview.title": "始終啟用 URL 預覽",
"com.affine.settings.workspace.sharing.workspace-sharing.description": "控制此工作區的頁面是否允許公開分享。關閉後,禁止新的分享且现有分享外部無法訪問。",
"com.affine.settings.workspace.sharing.workspace-sharing.title": "允許工作區頁面分享",
"com.affine.settings.workspace.affine-ai.title": "AFFiNE AI",
"com.affine.settings.workspace.affine-ai.label": "啟用 AFFiNE AI 助理",
"com.affine.settings.workspace.affine-ai.description": "允許工作區成員使用 AFFiNE AI 功能。此設置不影響計費。工作區成員透過他們的個人帳號使用 AFFiNE AI。",
@@ -1630,6 +1632,7 @@
"com.affine.share-menu.option.link.no-access.description": "只有此工作區的成員可以打開此連結。",
"com.affine.share-menu.option.link.readonly": "只讀",
"com.affine.share-menu.option.link.readonly.description": "任何人可以訪問該連結",
"com.affine.share-menu.workspace-sharing.disabled.tooltip": "此工作區已停用分享,請聯絡管理員開啟。",
"com.affine.share-menu.option.permission.can-manage": "可管理",
"com.affine.share-menu.option.permission.can-edit": "可編輯",
"com.affine.share-menu.option.permission.can-read": "可閱讀",

View File

@@ -40,6 +40,37 @@ export declare function decodeAudio(buf: Uint8Array, destSampleRate?: number | u
/** Decode audio file into a Float32Array */
export declare function decodeAudioSync(buf: Uint8Array, destSampleRate?: number | undefined | null, filename?: string | undefined | null): Float32Array
export interface RecordingArtifact {
id: string
filepath: string
sampleRate: number
channels: number
durationMs: number
size: number
}
export interface RecordingSessionMeta {
id: string
filepath: string
sampleRate: number
channels: number
startedAt: number
}
export interface RecordingStartOptions {
appProcessId?: number
excludeProcessIds?: Array<number>
outputDir: string
format?: string
sampleRate?: number
channels?: number
id?: string
}
export declare function startRecording(opts: RecordingStartOptions): RecordingSessionMeta
export declare function stopRecording(id: string): RecordingArtifact
export declare function mintChallengeResponse(resource: string, bits?: number | undefined | null): Promise<string>
export declare function verifyChallengeResponse(response: string, bits: number, resource: string): Promise<boolean>

View File

@@ -579,6 +579,8 @@ module.exports.AudioCaptureSession = nativeBinding.AudioCaptureSession
module.exports.ShareableContent = nativeBinding.ShareableContent
module.exports.decodeAudio = nativeBinding.decodeAudio
module.exports.decodeAudioSync = nativeBinding.decodeAudioSync
module.exports.startRecording = nativeBinding.startRecording
module.exports.stopRecording = nativeBinding.stopRecording
module.exports.mintChallengeResponse = nativeBinding.mintChallengeResponse
module.exports.verifyChallengeResponse = nativeBinding.verifyChallengeResponse
module.exports.DocStorage = nativeBinding.DocStorage

View File

@@ -11,11 +11,15 @@ harness = false
name = "mix_audio_samples"
[dependencies]
napi = { workspace = true, features = ["napi4"] }
napi-derive = { workspace = true, features = ["type-def"] }
rubato = { workspace = true }
symphonia = { workspace = true, features = ["all", "opt-simd"] }
thiserror = { workspace = true }
crossbeam-channel = { workspace = true }
napi = { workspace = true, features = ["napi4"] }
napi-derive = { workspace = true, features = ["type-def"] }
ogg = { workspace = true }
opus-codec = "0.1.2"
rand = { workspace = true }
rubato = { workspace = true }
symphonia = { workspace = true, features = ["all", "opt-simd"] }
thiserror = { workspace = true }
[target.'cfg(target_os = "macos")'.dependencies]
block2 = { workspace = true }
@@ -29,10 +33,9 @@ screencapturekit = { workspace = true }
uuid = { workspace = true, features = ["v4"] }
[target.'cfg(target_os = "windows")'.dependencies]
cpal = { workspace = true }
crossbeam-channel = { workspace = true }
windows = { workspace = true }
windows-core = { workspace = true }
cpal = { workspace = true }
windows = { workspace = true }
windows-core = { workspace = true }
[dev-dependencies]
criterion2 = { workspace = true }

View File

@@ -0,0 +1,29 @@
use crossbeam_channel::Sender;
use napi::{
bindgen_prelude::Float32Array,
threadsafe_function::{ThreadsafeFunction, ThreadsafeFunctionCallMode},
};
use std::sync::Arc;
/// Internal callback abstraction so audio taps can target JS or native pipelines.
#[derive(Clone)]
pub enum AudioCallback {
Js(Arc<ThreadsafeFunction<Float32Array, ()>>),
Channel(Sender<Vec<f32>>),
}
impl AudioCallback {
pub fn call(&self, samples: Vec<f32>) {
match self {
Self::Js(func) => {
// Non-blocking call into JS; errors are ignored to avoid blocking the
// audio thread.
let _ = func.call(Ok(samples.into()), ThreadsafeFunctionCallMode::NonBlocking);
}
Self::Channel(sender) => {
// Drop the chunk if the channel is full to avoid blocking capture.
let _ = sender.try_send(samples);
}
}
}
}

View File

@@ -8,4 +8,6 @@ pub mod windows;
#[cfg(target_os = "windows")]
pub use windows::*;
pub mod audio_callback;
pub mod audio_decoder;
pub mod recording;

View File

@@ -36,6 +36,7 @@ use screencapturekit::shareable_content::SCShareableContent;
use uuid::Uuid;
use crate::{
audio_callback::AudioCallback,
error::CoreAudioError,
pid::{audio_process_list, get_process_property},
tap_audio::{AggregateDeviceManager, AudioCaptureSession},
@@ -677,10 +678,9 @@ impl ShareableContent {
Ok(false)
}
#[napi]
pub fn tap_audio(
pub(crate) fn tap_audio_with_callback(
process_id: u32,
audio_stream_callback: ThreadsafeFunction<napi::bindgen_prelude::Float32Array, ()>,
audio_stream_callback: AudioCallback,
) -> Result<AudioCaptureSession> {
let app = ShareableContent::applications()?
.into_iter()
@@ -694,13 +694,10 @@ impl ShareableContent {
));
}
// Convert ThreadsafeFunction to Arc<ThreadsafeFunction>
let callback_arc = Arc::new(audio_stream_callback);
// Use AggregateDeviceManager instead of AggregateDevice directly
// This provides automatic default device change detection
let mut device_manager = AggregateDeviceManager::new(&app)?;
device_manager.start_capture(callback_arc)?;
device_manager.start_capture(audio_stream_callback)?;
let boxed_manager = Box::new(device_manager);
Ok(AudioCaptureSession::new(boxed_manager))
} else {
@@ -712,9 +709,19 @@ impl ShareableContent {
}
#[napi]
pub fn tap_global_audio(
excluded_processes: Option<Vec<&ApplicationInfo>>,
pub fn tap_audio(
process_id: u32,
audio_stream_callback: ThreadsafeFunction<napi::bindgen_prelude::Float32Array, ()>,
) -> Result<AudioCaptureSession> {
ShareableContent::tap_audio_with_callback(
process_id,
AudioCallback::Js(Arc::new(audio_stream_callback)),
)
}
pub(crate) fn tap_global_audio_with_callback(
excluded_processes: Option<Vec<&ApplicationInfo>>,
audio_stream_callback: AudioCallback,
) -> Result<AudioCaptureSession> {
let excluded_object_ids = excluded_processes
.unwrap_or_default()
@@ -722,13 +729,21 @@ impl ShareableContent {
.map(|app| app.object_id)
.collect::<Vec<_>>();
// Convert ThreadsafeFunction to Arc<ThreadsafeFunction>
let callback_arc = Arc::new(audio_stream_callback);
// Use the new AggregateDeviceManager for automatic device adaptation
let mut device_manager = AggregateDeviceManager::new_global(&excluded_object_ids)?;
device_manager.start_capture(callback_arc)?;
device_manager.start_capture(audio_stream_callback)?;
let boxed_manager = Box::new(device_manager);
Ok(AudioCaptureSession::new(boxed_manager))
}
#[napi]
pub fn tap_global_audio(
excluded_processes: Option<Vec<&ApplicationInfo>>,
audio_stream_callback: ThreadsafeFunction<napi::bindgen_prelude::Float32Array, ()>,
) -> Result<AudioCaptureSession> {
ShareableContent::tap_global_audio_with_callback(
excluded_processes,
AudioCallback::Js(Arc::new(audio_stream_callback)),
)
}
}

View File

@@ -23,15 +23,13 @@ use coreaudio::sys::{
AudioObjectGetPropertyDataSize, AudioObjectID, AudioObjectPropertyAddress,
AudioObjectRemovePropertyListenerBlock, AudioTimeStamp, OSStatus,
};
use napi::{
bindgen_prelude::{Float32Array, Result, Status},
threadsafe_function::{ThreadsafeFunction, ThreadsafeFunctionCallMode},
};
use napi::bindgen_prelude::Result;
use napi_derive::napi;
use objc2::runtime::AnyObject;
use crate::{
audio_buffer::InputAndOutputAudioBufferList,
audio_callback::AudioCallback,
ca_tap_description::CATapDescription,
cf_types::CFDictionaryBuilder,
device::get_device_uid,
@@ -241,7 +239,7 @@ impl AggregateDevice {
/// Implementation for the AggregateDevice to start processing audio
pub fn start(
&mut self,
audio_stream_callback: Arc<ThreadsafeFunction<Float32Array, (), Float32Array, Status, true>>,
audio_stream_callback: AudioCallback,
// Add original_audio_stats to ensure consistent target rate
original_audio_stats: AudioStats,
) -> Result<AudioTapStream> {
@@ -300,11 +298,8 @@ impl AggregateDevice {
return kAudioHardwareBadStreamError as i32;
};
// Send the processed audio data to JavaScript
audio_stream_callback.call(
Ok(mixed_samples.into()),
ThreadsafeFunctionCallMode::NonBlocking,
);
// Send the processed audio data to the configured sink
audio_stream_callback.call(mixed_samples);
kAudioHardwareNoError as i32
},
@@ -576,7 +571,7 @@ pub struct AggregateDeviceManager {
app_id: Option<AudioObjectID>,
excluded_processes: Vec<AudioObjectID>,
active_stream: Option<Arc<std::sync::Mutex<Option<AudioTapStream>>>>,
audio_callback: Option<Arc<ThreadsafeFunction<Float32Array, (), Float32Array, Status, true>>>,
audio_callback: Option<AudioCallback>,
original_audio_stats: Option<AudioStats>,
}
@@ -614,10 +609,7 @@ impl AggregateDeviceManager {
}
/// This sets up the initial stream and listeners.
pub fn start_capture(
&mut self,
audio_stream_callback: Arc<ThreadsafeFunction<Float32Array, (), Float32Array, Status, true>>,
) -> Result<()> {
pub fn start_capture(&mut self, audio_stream_callback: AudioCallback) -> Result<()> {
// Store the callback for potential device switch later
self.audio_callback = Some(audio_stream_callback.clone());

View File

@@ -0,0 +1,581 @@
use std::{
collections::HashMap,
fs,
io::{BufWriter, Write},
path::PathBuf,
sync::{LazyLock, Mutex},
thread::{self, JoinHandle},
time::{SystemTime, UNIX_EPOCH},
};
use crossbeam_channel::{bounded, Receiver, Sender};
use napi::{bindgen_prelude::Result, Error, Status};
use napi_derive::napi;
use ogg::writing::{PacketWriteEndInfo, PacketWriter};
use opus_codec::{Application, Channels, Encoder, FrameSize, SampleRate as OpusSampleRate};
use rubato::Resampler;
use crate::audio_callback::AudioCallback;
#[cfg(target_os = "macos")]
use crate::macos::screen_capture_kit::{ApplicationInfo, ShareableContent};
#[cfg(target_os = "windows")]
use crate::windows::screen_capture_kit::ShareableContent;
const ENCODE_SAMPLE_RATE: OpusSampleRate = OpusSampleRate::Hz48000;
const MAX_PACKET_SIZE: usize = 4096;
const RESAMPLER_INPUT_CHUNK: usize = 1024;
type RecordingResult<T> = std::result::Result<T, RecordingError>;
#[napi(object)]
pub struct RecordingStartOptions {
pub app_process_id: Option<u32>,
pub exclude_process_ids: Option<Vec<u32>>,
pub output_dir: String,
pub format: Option<String>,
pub sample_rate: Option<u32>,
pub channels: Option<u32>,
pub id: Option<String>,
}
#[napi(object)]
pub struct RecordingSessionMeta {
pub id: String,
pub filepath: String,
pub sample_rate: u32,
pub channels: u32,
pub started_at: i64,
}
#[napi(object)]
pub struct RecordingArtifact {
pub id: String,
pub filepath: String,
pub sample_rate: u32,
pub channels: u32,
pub duration_ms: i64,
pub size: i64,
}
#[derive(Debug, thiserror::Error)]
enum RecordingError {
#[error("unsupported platform")]
UnsupportedPlatform,
#[error("invalid output directory")]
InvalidOutputDir,
#[error("invalid format {0}")]
InvalidFormat(String),
#[error("io error: {0}")]
Io(#[from] std::io::Error),
#[error("encoding error: {0}")]
Encoding(String),
#[error("recording not found")]
NotFound,
#[error("empty recording")]
Empty,
#[error("start failure: {0}")]
Start(String),
#[error("join failure")]
Join,
}
impl RecordingError {
fn code(&self) -> &'static str {
match self {
RecordingError::UnsupportedPlatform => "unsupported-platform",
RecordingError::InvalidOutputDir => "invalid-output-dir",
RecordingError::InvalidFormat(_) => "invalid-format",
RecordingError::Io(_) => "io-error",
RecordingError::Encoding(_) => "encoding-error",
RecordingError::NotFound => "not-found",
RecordingError::Empty => "empty-recording",
RecordingError::Start(_) => "start-failure",
RecordingError::Join => "join-failure",
}
}
}
impl From<RecordingError> for Error {
fn from(err: RecordingError) -> Self {
Error::new(Status::GenericFailure, format!("{}: {}", err.code(), err))
}
}
struct InterleavedResampler {
resampler: rubato::FastFixedIn<f32>,
channels: usize,
fifo: Vec<Vec<f32>>,
warmed: bool,
}
impl InterleavedResampler {
fn new(from_sr: u32, to_sr: u32, channels: usize) -> RecordingResult<Self> {
let ratio = to_sr as f64 / from_sr as f64;
let resampler = rubato::FastFixedIn::<f32>::new(
ratio,
1.0,
rubato::PolynomialDegree::Linear,
RESAMPLER_INPUT_CHUNK,
channels,
)
.map_err(|e| RecordingError::Encoding(format!("resampler init failed: {e}")))?;
Ok(Self {
resampler,
channels,
fifo: vec![Vec::<f32>::new(); channels],
warmed: false,
})
}
fn feed(&mut self, interleaved: &[f32]) -> Vec<f32> {
for frame in interleaved.chunks(self.channels) {
for (idx, sample) in frame.iter().enumerate() {
if let Some(channel_fifo) = self.fifo.get_mut(idx) {
channel_fifo.push(*sample);
}
}
}
let mut out = Vec::new();
while self.fifo.first().map(|q| q.len()).unwrap_or(0) >= RESAMPLER_INPUT_CHUNK {
let mut chunk: Vec<Vec<f32>> = Vec::with_capacity(self.channels);
for channel in &mut self.fifo {
let take: Vec<f32> = channel.drain(..RESAMPLER_INPUT_CHUNK).collect();
chunk.push(take);
}
if let Ok(blocks) = self.resampler.process(&chunk, None) {
if blocks.is_empty() || blocks.len() != self.channels {
continue;
}
if !self.warmed {
self.warmed = true;
continue;
}
let out_len = blocks[0].len();
for i in 0..out_len {
for ch in 0..self.channels {
out.push(blocks[ch][i]);
}
}
}
}
out
}
}
struct OggOpusWriter {
writer: PacketWriter<'static, BufWriter<fs::File>>,
encoder: Encoder,
frame_samples: usize,
pending: Vec<f32>,
granule_position: u64,
samples_written: u64,
channels: Channels,
sample_rate: OpusSampleRate,
resampler: Option<InterleavedResampler>,
filepath: PathBuf,
stream_serial: u32,
}
impl OggOpusWriter {
fn new(filepath: PathBuf, source_sample_rate: u32, channels: u32) -> RecordingResult<Self> {
let channels = if channels > 1 {
Channels::Stereo
} else {
Channels::Mono
};
let sample_rate = ENCODE_SAMPLE_RATE;
let resampler = if source_sample_rate != sample_rate.as_i32() as u32 {
Some(InterleavedResampler::new(
source_sample_rate,
sample_rate.as_i32() as u32,
channels.as_usize(),
)?)
} else {
None
};
if let Some(parent) = filepath.parent() {
fs::create_dir_all(parent)?;
}
let file = fs::File::create(&filepath)?;
let mut writer = PacketWriter::new(BufWriter::new(file));
let stream_serial: u32 = rand::random();
write_opus_headers(&mut writer, stream_serial, channels, sample_rate)?;
let frame_samples = FrameSize::Ms20.samples(sample_rate);
let encoder = Encoder::new(sample_rate, channels, Application::Audio)
.map_err(|e| RecordingError::Encoding(e.to_string()))?;
Ok(Self {
writer,
encoder,
frame_samples,
pending: Vec::new(),
granule_position: 0,
samples_written: 0,
channels,
sample_rate,
resampler,
filepath,
stream_serial,
})
}
fn push_samples(&mut self, samples: &[f32]) -> RecordingResult<()> {
let mut processed = if let Some(resampler) = &mut self.resampler {
resampler.feed(samples)
} else {
samples.to_vec()
};
if processed.is_empty() {
return Ok(());
}
self.pending.append(&mut processed);
let frame_len = self.frame_samples * self.channels.as_usize();
while self.pending.len() >= frame_len {
let frame: Vec<f32> = self.pending.drain(..frame_len).collect();
self.encode_frame(frame, self.frame_samples, PacketWriteEndInfo::NormalPacket)?;
}
Ok(())
}
fn encode_frame(
&mut self,
frame: Vec<f32>,
samples_in_frame: usize,
end: PacketWriteEndInfo,
) -> RecordingResult<()> {
let mut out = vec![0u8; MAX_PACKET_SIZE];
let encoded = self
.encoder
.encode_float(&frame, &mut out)
.map_err(|e| RecordingError::Encoding(e.to_string()))?;
self.granule_position += samples_in_frame as u64;
self.samples_written += samples_in_frame as u64;
let packet = out[..encoded].to_vec();
self
.writer
.write_packet(packet, self.stream_serial, end, self.granule_position)
.map_err(|e| RecordingError::Encoding(format!("failed to write packet: {e}")))?;
Ok(())
}
fn finish(mut self) -> RecordingResult<RecordingArtifact> {
let frame_len = self.frame_samples * self.channels.as_usize();
if !self.pending.is_empty() {
let mut frame = self.pending.clone();
let samples_in_frame = frame.len() / self.channels.as_usize();
frame.resize(frame_len, 0.0);
self.encode_frame(frame, samples_in_frame, PacketWriteEndInfo::NormalPacket)?;
self.pending.clear();
}
// Mark end of stream with an empty packet if nothing was written, otherwise
// flag the last packet as end of stream.
if self.samples_written == 0 {
fs::remove_file(&self.filepath).ok();
return Err(RecordingError::Empty);
}
// Flush a final end-of-stream marker.
self
.writer
.write_packet(
Vec::<u8>::new(),
self.stream_serial,
PacketWriteEndInfo::EndStream,
self.granule_position,
)
.map_err(|e| RecordingError::Encoding(format!("failed to finish stream: {e}")))?;
let _ = self.writer.inner_mut().flush();
let size = fs::metadata(&self.filepath)?.len() as i64;
let duration_ms = (self.samples_written * 1000) as i64 / self.sample_rate.as_i32() as i64;
Ok(RecordingArtifact {
id: String::new(),
filepath: self.filepath.to_string_lossy().to_string(),
sample_rate: self.sample_rate.as_i32() as u32,
channels: self.channels.as_usize() as u32,
duration_ms,
size,
})
}
}
fn write_opus_headers(
writer: &mut PacketWriter<'static, BufWriter<fs::File>>,
stream_serial: u32,
channels: Channels,
sample_rate: OpusSampleRate,
) -> RecordingResult<()> {
let mut opus_head = Vec::with_capacity(19);
opus_head.extend_from_slice(b"OpusHead");
opus_head.push(1); // version
opus_head.push(channels.as_usize() as u8);
opus_head.extend_from_slice(&0u16.to_le_bytes()); // pre-skip
opus_head.extend_from_slice(&(sample_rate.as_i32() as u32).to_le_bytes());
opus_head.extend_from_slice(&0i16.to_le_bytes()); // output gain
opus_head.push(0); // channel mapping
writer
.write_packet(opus_head, stream_serial, PacketWriteEndInfo::EndPage, 0)
.map_err(|e| RecordingError::Encoding(format!("failed to write OpusHead: {e}")))?;
let vendor = b"AFFiNE Native";
let mut opus_tags = Vec::new();
opus_tags.extend_from_slice(b"OpusTags");
opus_tags.extend_from_slice(&(vendor.len() as u32).to_le_bytes());
opus_tags.extend_from_slice(vendor);
opus_tags.extend_from_slice(&0u32.to_le_bytes()); // user comment list length
writer
.write_packet(opus_tags, stream_serial, PacketWriteEndInfo::EndPage, 0)
.map_err(|e| RecordingError::Encoding(format!("failed to write OpusTags: {e}")))?;
Ok(())
}
enum PlatformCapture {
#[cfg(target_os = "macos")]
Mac(crate::macos::tap_audio::AudioCaptureSession),
#[cfg(target_os = "windows")]
Windows(crate::windows::audio_capture::AudioCaptureSession),
}
unsafe impl Send for PlatformCapture {}
impl PlatformCapture {
fn stop(&mut self) -> Result<()> {
match self {
#[cfg(target_os = "macos")]
PlatformCapture::Mac(session) => session.stop(),
#[cfg(target_os = "windows")]
PlatformCapture::Windows(session) => session.stop(),
#[allow(unreachable_patterns)]
_ => Err(RecordingError::UnsupportedPlatform.into()),
}
}
}
struct ActiveRecording {
sender: Option<Sender<Vec<f32>>>,
capture: PlatformCapture,
worker: Option<JoinHandle<std::result::Result<RecordingArtifact, RecordingError>>>,
}
static ACTIVE_RECORDINGS: LazyLock<Mutex<HashMap<String, ActiveRecording>>> =
LazyLock::new(|| Mutex::new(HashMap::new()));
fn now_millis() -> i64 {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.as_millis() as i64)
.unwrap_or(0)
}
fn sanitize_id(id: Option<String>) -> String {
let raw = id.unwrap_or_else(|| format!("{}", now_millis()));
let filtered: String = raw
.chars()
.filter(|c| c.is_ascii_alphanumeric() || *c == '-' || *c == '_')
.collect();
if filtered.is_empty() {
format!("{}", now_millis())
} else {
filtered
}
}
fn validate_output_dir(path: &str) -> Result<PathBuf> {
let dir = PathBuf::from(path);
if !dir.is_absolute() {
return Err(RecordingError::InvalidOutputDir.into());
}
fs::create_dir_all(&dir)?;
let normalized = dir
.canonicalize()
.map_err(|_| RecordingError::InvalidOutputDir)?;
Ok(normalized)
}
#[cfg(target_os = "macos")]
fn build_excluded_refs(ids: &[u32]) -> Result<Vec<ApplicationInfo>> {
if ids.is_empty() {
return Ok(Vec::new());
}
let apps = ShareableContent::applications()?;
let mut excluded = Vec::new();
for app in apps {
if ids.contains(&(app.process_id as u32)) {
excluded.push(app);
}
}
Ok(excluded)
}
fn start_capture(
opts: &RecordingStartOptions,
tx: Sender<Vec<f32>>,
) -> Result<(PlatformCapture, u32, u32)> {
#[cfg(target_os = "macos")]
{
let callback = AudioCallback::Channel(tx);
let session = if let Some(app_id) = opts.app_process_id {
ShareableContent::tap_audio_with_callback(app_id, callback)?
} else {
let excluded_apps = build_excluded_refs(
opts
.exclude_process_ids
.as_ref()
.map(|v| v.as_slice())
.unwrap_or(&[]),
)?;
let excluded_refs: Vec<&ApplicationInfo> = excluded_apps.iter().collect();
ShareableContent::tap_global_audio_with_callback(Some(excluded_refs), callback)?
};
let sample_rate = session.get_sample_rate()?.round().clamp(1.0, f64::MAX) as u32;
let channels = session.get_channels()?;
return Ok((PlatformCapture::Mac(session), sample_rate, channels));
}
#[cfg(target_os = "windows")]
{
let callback = AudioCallback::Channel(tx);
let session = ShareableContent::tap_audio_with_callback(
opts.app_process_id.unwrap_or(0),
callback,
opts.sample_rate,
)?;
let sample_rate = session.get_sample_rate().round() as u32;
let channels = session.get_channels();
return Ok((PlatformCapture::Windows(session), sample_rate, channels));
}
#[cfg(not(any(target_os = "macos", target_os = "windows")))]
{
let _ = opts;
let _ = tx;
Err(RecordingError::UnsupportedPlatform.into())
}
}
fn spawn_worker(
id: String,
filepath: PathBuf,
rx: Receiver<Vec<f32>>,
source_sample_rate: u32,
channels: u32,
) -> JoinHandle<std::result::Result<RecordingArtifact, RecordingError>> {
thread::spawn(move || {
let mut writer = OggOpusWriter::new(filepath.clone(), source_sample_rate, channels)?;
for chunk in rx {
writer.push_samples(&chunk)?;
}
let mut artifact = writer.finish()?;
artifact.id = id;
Ok(artifact)
})
}
#[napi]
pub fn start_recording(opts: RecordingStartOptions) -> Result<RecordingSessionMeta> {
if let Some(fmt) = opts.format.as_deref() {
if fmt.to_ascii_lowercase() != "opus" {
return Err(RecordingError::InvalidFormat(fmt.to_string()).into());
}
}
let output_dir = validate_output_dir(&opts.output_dir)?;
let id = sanitize_id(opts.id.clone());
let filepath = output_dir.join(format!("{id}.opus"));
if filepath.exists() {
fs::remove_file(&filepath)?;
}
let (tx, rx) = bounded::<Vec<f32>>(32);
let (capture, capture_rate, capture_channels) =
start_capture(&opts, tx.clone()).map_err(|e| RecordingError::Start(e.to_string()))?;
let encoding_channels = match opts.channels {
Some(1) => 1,
Some(2) => 2,
_ => capture_channels,
};
let worker = spawn_worker(
id.clone(),
filepath.clone(),
rx,
capture_rate,
encoding_channels,
);
let meta = RecordingSessionMeta {
id: id.clone(),
filepath: filepath.to_string_lossy().to_string(),
sample_rate: ENCODE_SAMPLE_RATE.as_i32() as u32,
channels: encoding_channels,
started_at: now_millis(),
};
let mut recordings = ACTIVE_RECORDINGS
.lock()
.map_err(|_| RecordingError::Start("lock poisoned".into()))?;
if recordings.contains_key(&id) {
return Err(RecordingError::Start("duplicate recording id".into()).into());
}
recordings.insert(
id,
ActiveRecording {
sender: Some(tx),
capture,
worker: Some(worker),
},
);
Ok(meta)
}
#[napi]
pub fn stop_recording(id: String) -> Result<RecordingArtifact> {
let mut recordings = ACTIVE_RECORDINGS
.lock()
.map_err(|_| RecordingError::Start("lock poisoned".into()))?;
let mut entry = recordings.remove(&id).ok_or(RecordingError::NotFound)?;
entry
.capture
.stop()
.map_err(|e| RecordingError::Start(e.to_string()))?;
drop(entry.sender.take());
let handle = entry.worker.take().ok_or(RecordingError::Join)?;
let artifact = handle
.join()
.map_err(|_| RecordingError::Join)?
.map_err(|e| e)?;
Ok(artifact)
}

View File

@@ -8,16 +8,13 @@ use std::{
thread::JoinHandle,
};
use crate::audio_callback::AudioCallback;
use cpal::{
traits::{DeviceTrait, HostTrait, StreamTrait},
SampleRate,
};
use crossbeam_channel::unbounded;
use napi::{
bindgen_prelude::{Float32Array, Result},
threadsafe_function::{ThreadsafeFunction, ThreadsafeFunctionCallMode},
Error, Status,
};
use napi::{bindgen_prelude::Result, Error, Status};
use napi_derive::napi;
use rubato::{FastFixedIn, PolynomialDegree, Resampler};
@@ -221,7 +218,8 @@ impl Drop for AudioCaptureSession {
}
pub fn start_recording(
audio_buffer_callback: ThreadsafeFunction<Float32Array, ()>,
audio_buffer_callback: AudioCallback,
target_sample_rate: Option<SampleRate>,
) -> Result<AudioCaptureSession> {
let available_hosts = cpal::available_hosts();
let host_id = available_hosts
@@ -247,7 +245,7 @@ pub fn start_recording(
let mic_sample_rate = mic_config.sample_rate();
let lb_sample_rate = lb_config.sample_rate();
let target_rate = SampleRate(mic_sample_rate.min(lb_sample_rate).0);
let target_rate = target_sample_rate.unwrap_or(SampleRate(mic_sample_rate.min(lb_sample_rate).0));
let mic_channels = mic_config.channels();
let lb_channels = lb_config.channels();
@@ -347,10 +345,7 @@ pub fn start_recording(
let lb_chunk: Vec<f32> = post_lb.drain(..TARGET_FRAME_SIZE).collect();
let mixed = mix(&mic_chunk, &lb_chunk);
if !mixed.is_empty() {
let _ = audio_buffer_callback.call(
Ok(mixed.clone().into()),
ThreadsafeFunctionCallMode::NonBlocking,
);
audio_buffer_callback.call(mixed);
}
}

View File

@@ -10,6 +10,7 @@ use std::{
time::Duration,
};
use cpal::SampleRate;
use napi::{
bindgen_prelude::{Buffer, Error, Result, Status},
threadsafe_function::{ThreadsafeFunction, ThreadsafeFunctionCallMode},
@@ -27,6 +28,7 @@ use windows::Win32::System::{
};
// Import the function from microphone_listener
use crate::audio_callback::AudioCallback;
use crate::windows::microphone_listener::is_process_actively_using_microphone;
// Type alias to match macOS API
@@ -230,6 +232,15 @@ impl ShareableContent {
}
}
pub(crate) fn tap_audio_with_callback(
_process_id: u32,
audio_stream_callback: AudioCallback,
target_sample_rate: Option<u32>,
) -> Result<AudioCaptureSession> {
let target = target_sample_rate.map(SampleRate);
crate::windows::audio_capture::start_recording(audio_stream_callback, target)
}
#[napi]
pub fn tap_audio(
_process_id: u32, // Currently unused - Windows captures global audio
@@ -237,7 +248,22 @@ impl ShareableContent {
) -> Result<AudioCaptureSession> {
// On Windows with CPAL, we capture global audio (mic + loopback)
// since per-application audio tapping isn't supported the same way as macOS
crate::windows::audio_capture::start_recording(audio_stream_callback)
ShareableContent::tap_audio_with_callback(
_process_id,
AudioCallback::Js(Arc::new(audio_stream_callback)),
None,
)
}
pub(crate) fn tap_global_audio_with_callback(
_excluded_processes: Option<Vec<&ApplicationInfo>>,
audio_stream_callback: AudioCallback,
target_sample_rate: Option<u32>,
) -> Result<AudioCaptureSession> {
let target = target_sample_rate.map(SampleRate);
// Delegate to audio_capture::start_recording which handles mixing mic +
// loopback
crate::windows::audio_capture::start_recording(audio_stream_callback, target)
}
#[napi]
@@ -245,9 +271,11 @@ impl ShareableContent {
_excluded_processes: Option<Vec<&ApplicationInfo>>,
audio_stream_callback: ThreadsafeFunction<napi::bindgen_prelude::Float32Array, ()>,
) -> Result<AudioCaptureSession> {
// Delegate to audio_capture::start_recording which handles mixing mic +
// loopback
crate::windows::audio_capture::start_recording(audio_stream_callback)
ShareableContent::tap_global_audio_with_callback(
_excluded_processes,
AudioCallback::Js(Arc::new(audio_stream_callback)),
None,
)
}
#[napi]

Some files were not shown because too many files have changed in this diff Show More