mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-04 16:44:56 +00:00
Compare commits
114 Commits
use-jemall
...
v0.23.2-be
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c4cf5799d4 | ||
|
|
b53b4884cf | ||
|
|
0525c499a1 | ||
|
|
43f8d852d8 | ||
|
|
06eb17387a | ||
|
|
436d5e5079 | ||
|
|
52e69e0dde | ||
|
|
612c73cab1 | ||
|
|
b7c026bbe8 | ||
|
|
013a6ceb7e | ||
|
|
fa42e3619f | ||
|
|
edd97ae73b | ||
|
|
0770b109cb | ||
|
|
4018b3aeca | ||
|
|
c90d511251 | ||
|
|
bdf1389258 | ||
|
|
dc68c2385d | ||
|
|
07f2f7b5a8 | ||
|
|
38107910f9 | ||
|
|
ea21de8311 | ||
|
|
21360591a9 | ||
|
|
5300eff8f1 | ||
|
|
46a2ad750f | ||
|
|
3949714618 | ||
|
|
7b9e0a215d | ||
|
|
b93d5d5e86 | ||
|
|
c8dc51ccae | ||
|
|
cdff5c3117 | ||
|
|
d44771dfe9 | ||
|
|
45b05f06b3 | ||
|
|
04e002eb77 | ||
|
|
a444941b79 | ||
|
|
39e0ec37fd | ||
|
|
cc1d5b497a | ||
|
|
a4b535a42a | ||
|
|
c797cac87d | ||
|
|
339ecab00f | ||
|
|
8e374f5517 | ||
|
|
cd91bea5c1 | ||
|
|
613597e642 | ||
|
|
a597bdcdf6 | ||
|
|
316c671c92 | ||
|
|
95a97b793c | ||
|
|
eb24074871 | ||
|
|
2a8f18504b | ||
|
|
b85afa7394 | ||
|
|
8ec4bbb298 | ||
|
|
812c199b45 | ||
|
|
36bd8f645a | ||
|
|
7cff8091e4 | ||
|
|
de8feb98a3 | ||
|
|
fbd6e8fa97 | ||
|
|
bcf6bd1dfc | ||
|
|
8627560fd5 | ||
|
|
9a3e44c6d6 | ||
|
|
7b53641a94 | ||
|
|
3948b8eada | ||
|
|
d05bb9992c | ||
|
|
b2c09825ac | ||
|
|
65453c31c6 | ||
|
|
d9e8ce802f | ||
|
|
d5f63b9e43 | ||
|
|
ebefbeefc8 | ||
|
|
4d7d8f215f | ||
|
|
b6187718ea | ||
|
|
3ee82bd9ce | ||
|
|
3dbdb99435 | ||
|
|
0d414d914a | ||
|
|
41f338bce0 | ||
|
|
6f87c1ca50 | ||
|
|
33f6496d79 | ||
|
|
847ef00a75 | ||
|
|
93f13e9e01 | ||
|
|
a2b86bc6d2 | ||
|
|
aee7a8839e | ||
|
|
0e8ffce126 | ||
|
|
9cda655c9e | ||
|
|
15726bd522 | ||
|
|
d65a7494a4 | ||
|
|
0f74e1fa0f | ||
|
|
fef4a9eeb6 | ||
|
|
58dc53581f | ||
|
|
b23f380539 | ||
|
|
d29a97f86c | ||
|
|
0f287f9661 | ||
|
|
18f13626cc | ||
|
|
0eeea5e173 | ||
|
|
2052a34d19 | ||
|
|
b79439b01d | ||
|
|
2dacba9011 | ||
|
|
af9c455ee0 | ||
|
|
3d45c7623f | ||
|
|
e0f88451e1 | ||
|
|
aba0a3d485 | ||
|
|
8b579e3a92 | ||
|
|
d98b45ca3d | ||
|
|
fc1104cd68 | ||
|
|
46901c472c | ||
|
|
9d5c7dd1e9 | ||
|
|
f655e6e8bf | ||
|
|
46a9d0f7fe | ||
|
|
340aae6476 | ||
|
|
6b7d1e91e0 | ||
|
|
3538c78a8b | ||
|
|
7d527c7f3a | ||
|
|
ad5a122391 | ||
|
|
0f9b9789da | ||
|
|
5b027f7986 | ||
|
|
fe00293e3e | ||
|
|
385226083f | ||
|
|
38d8dde6b8 | ||
|
|
ed6fde550f | ||
|
|
11a9e67bc1 | ||
|
|
899585ba7f |
6
.github/actions/setup-version/action.yml
vendored
6
.github/actions/setup-version/action.yml
vendored
@@ -4,9 +4,15 @@ inputs:
|
||||
app-version:
|
||||
description: 'App Version'
|
||||
required: true
|
||||
ios-app-version:
|
||||
description: 'iOS App Store Version (Optional, use App version if empty)'
|
||||
required: false
|
||||
type: string
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: 'Write Version'
|
||||
shell: bash
|
||||
env:
|
||||
IOS_APP_VERSION: ${{ inputs.ios-app-version }}
|
||||
run: ./scripts/set-version.sh ${{ inputs.app-version }}
|
||||
|
||||
5
.github/deployment/node/Dockerfile
vendored
5
.github/deployment/node/Dockerfile
vendored
@@ -7,7 +7,10 @@ COPY ./packages/frontend/apps/mobile/dist /app/static/mobile
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends openssl && \
|
||||
apt-get install -y --no-install-recommends openssl libjemalloc2 && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Enable jemalloc by preloading the library
|
||||
ENV LD_PRELOAD=libjemalloc.so.2
|
||||
|
||||
CMD ["node", "./dist/main.js"]
|
||||
|
||||
2
.github/workflows/release-desktop.yml
vendored
2
.github/workflows/release-desktop.yml
vendored
@@ -465,7 +465,7 @@ jobs:
|
||||
name: ${{ env.RELEASE_VERSION }}
|
||||
draft: ${{ inputs.build-type == 'stable' }}
|
||||
prerelease: ${{ inputs.build-type != 'stable' }}
|
||||
tag_name: ${{ env.RELEASE_VERSION}}
|
||||
tag_name: v${{ env.RELEASE_VERSION}}
|
||||
files: |
|
||||
./release/*
|
||||
./release/.env.example
|
||||
|
||||
8
.github/workflows/release-mobile.yml
vendored
8
.github/workflows/release-mobile.yml
vendored
@@ -12,6 +12,9 @@ on:
|
||||
build-type:
|
||||
type: string
|
||||
required: true
|
||||
ios-app-version:
|
||||
type: string
|
||||
required: false
|
||||
|
||||
env:
|
||||
BUILD_TYPE: ${{ inputs.build-type }}
|
||||
@@ -78,7 +81,7 @@ jobs:
|
||||
path: packages/frontend/apps/android/dist
|
||||
|
||||
ios:
|
||||
runs-on: ${{ github.ref_name == 'canary' && 'macos-latest' || 'blaze/macos-14' }}
|
||||
runs-on: 'macos-15'
|
||||
needs:
|
||||
- build-ios-web
|
||||
steps:
|
||||
@@ -87,6 +90,7 @@ jobs:
|
||||
uses: ./.github/actions/setup-version
|
||||
with:
|
||||
app-version: ${{ inputs.app-version }}
|
||||
ios-app-version: ${{ inputs.ios-app-version }}
|
||||
- name: 'Update Code Sign Identity'
|
||||
shell: bash
|
||||
run: ./packages/frontend/apps/ios/update_code_sign_identity.sh
|
||||
@@ -106,7 +110,7 @@ jobs:
|
||||
enableScripts: false
|
||||
- uses: maxim-lobanov/setup-xcode@v1
|
||||
with:
|
||||
xcode-version: 16.2
|
||||
xcode-version: 16.4
|
||||
- name: Install Swiftformat
|
||||
run: brew install swiftformat
|
||||
- name: Cap sync
|
||||
|
||||
7
.github/workflows/release.yml
vendored
7
.github/workflows/release.yml
vendored
@@ -21,6 +21,10 @@ on:
|
||||
required: true
|
||||
type: boolean
|
||||
default: false
|
||||
ios-app-version:
|
||||
description: 'iOS App Store Version (Optional, use tag version if empty)'
|
||||
required: false
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -30,6 +34,7 @@ permissions:
|
||||
packages: write
|
||||
security-events: write
|
||||
attestations: write
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
@@ -70,6 +75,7 @@ jobs:
|
||||
with:
|
||||
secret: ${{ secrets.GITHUB_TOKEN }}
|
||||
approvers: forehalo,fengmk2
|
||||
minimum-approvals: 1
|
||||
fail-on-denial: true
|
||||
issue-title: Please confirm to release docker image
|
||||
issue-body: |
|
||||
@@ -117,3 +123,4 @@ jobs:
|
||||
build-type: ${{ needs.prepare.outputs.BUILD_TYPE }}
|
||||
app-version: ${{ needs.prepare.outputs.APP_VERSION }}
|
||||
git-short-hash: ${{ needs.prepare.outputs.GIT_SHORT_HASH }}
|
||||
ios-app-version: ${{ inputs.ios-app-version }}
|
||||
|
||||
@@ -266,6 +266,7 @@
|
||||
"./components/toggle-button": "./src/components/toggle-button.ts",
|
||||
"./components/toggle-switch": "./src/components/toggle-switch.ts",
|
||||
"./components/toolbar": "./src/components/toolbar.ts",
|
||||
"./components/tooltip": "./src/components/tooltip.ts",
|
||||
"./components/view-dropdown-menu": "./src/components/view-dropdown-menu.ts",
|
||||
"./components/tooltip-content-with-shortcut": "./src/components/tooltip-content-with-shortcut.ts",
|
||||
"./components/resource": "./src/components/resource.ts",
|
||||
|
||||
1
blocksuite/affine/all/src/components/tooltip.ts
Normal file
1
blocksuite/affine/all/src/components/tooltip.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from '@blocksuite/affine-components/tooltip';
|
||||
@@ -39,6 +39,13 @@ export class CodeBlockHighlighter extends LifeCycleWatcher {
|
||||
private readonly _loadTheme = async (
|
||||
highlighter: HighlighterCore
|
||||
): Promise<void> => {
|
||||
// It is possible that by the time the highlighter is ready all instances
|
||||
// have already been unmounted. In that case there is no need to load
|
||||
// themes or update state.
|
||||
if (CodeBlockHighlighter._refCount === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const config = this.std.getOptional(CodeBlockConfigExtension.identifier);
|
||||
const darkTheme = config?.theme?.dark ?? CODE_BLOCK_DEFAULT_DARK_THEME;
|
||||
const lightTheme = config?.theme?.light ?? CODE_BLOCK_DEFAULT_LIGHT_THEME;
|
||||
@@ -78,14 +85,27 @@ export class CodeBlockHighlighter extends LifeCycleWatcher {
|
||||
override unmounted(): void {
|
||||
CodeBlockHighlighter._refCount--;
|
||||
|
||||
// Only dispose the shared highlighter when no instances are using it
|
||||
if (
|
||||
CodeBlockHighlighter._refCount === 0 &&
|
||||
CodeBlockHighlighter._sharedHighlighter
|
||||
) {
|
||||
CodeBlockHighlighter._sharedHighlighter.dispose();
|
||||
// Dispose the shared highlighter **after** any in-flight creation finishes.
|
||||
if (CodeBlockHighlighter._refCount !== 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const doDispose = (highlighter: HighlighterCore | null) => {
|
||||
if (highlighter) {
|
||||
highlighter.dispose();
|
||||
}
|
||||
CodeBlockHighlighter._sharedHighlighter = null;
|
||||
CodeBlockHighlighter._highlighterPromise = null;
|
||||
};
|
||||
|
||||
if (CodeBlockHighlighter._sharedHighlighter) {
|
||||
// Highlighter already created – dispose immediately.
|
||||
doDispose(CodeBlockHighlighter._sharedHighlighter);
|
||||
} else if (CodeBlockHighlighter._highlighterPromise) {
|
||||
// Highlighter still being created – wait for it, then dispose.
|
||||
CodeBlockHighlighter._highlighterPromise
|
||||
.then(doDispose)
|
||||
.catch(console.error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -73,7 +73,8 @@
|
||||
"./edgeless-line-styles-panel": "./src/edgeless-line-styles-panel/index.ts",
|
||||
"./edgeless-shape-color-picker": "./src/edgeless-shape-color-picker/index.ts",
|
||||
"./open-doc-dropdown-menu": "./src/open-doc-dropdown-menu/index.ts",
|
||||
"./slider": "./src/slider/index.ts"
|
||||
"./slider": "./src/slider/index.ts",
|
||||
"./tooltip": "./src/tooltip/index.ts"
|
||||
},
|
||||
"files": [
|
||||
"src",
|
||||
|
||||
@@ -85,6 +85,8 @@ export class MenuSubMenu extends MenuFocusable {
|
||||
.catch(err => console.error(err));
|
||||
});
|
||||
this.menu.openSubMenu(menu);
|
||||
// in case that the menu is not closed, but the component is removed,
|
||||
this.disposables.add(unsub);
|
||||
}
|
||||
|
||||
protected override render(): unknown {
|
||||
|
||||
@@ -18,6 +18,7 @@ export const LoadingIcon = ({
|
||||
viewBox="0 0 24 24"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
style="fill: none;"
|
||||
>
|
||||
<style>
|
||||
.spinner {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { effects as tooltipEffects } from '../tooltip/effect.js';
|
||||
import { EditorIconButton } from './icon-button.js';
|
||||
import {
|
||||
EditorMenuAction,
|
||||
@@ -6,7 +7,6 @@ import {
|
||||
} from './menu-button.js';
|
||||
import { EditorToolbarSeparator } from './separator.js';
|
||||
import { EditorToolbar } from './toolbar.js';
|
||||
import { Tooltip } from './tooltip.js';
|
||||
|
||||
export { EditorChevronDown } from './chevron-down.js';
|
||||
export { ToolbarMoreMenuConfigExtension } from './config.js';
|
||||
@@ -20,7 +20,6 @@ export { MenuContext } from './menu-context.js';
|
||||
export { EditorToolbarSeparator } from './separator.js';
|
||||
export { darkToolbarStyles, lightToolbarStyles } from './styles.js';
|
||||
export { EditorToolbar } from './toolbar.js';
|
||||
export { Tooltip } from './tooltip.js';
|
||||
export type {
|
||||
AdvancedMenuItem,
|
||||
FatMenuItems,
|
||||
@@ -38,11 +37,12 @@ export {
|
||||
} from './utils.js';
|
||||
|
||||
export function effects() {
|
||||
tooltipEffects();
|
||||
|
||||
customElements.define('editor-toolbar-separator', EditorToolbarSeparator);
|
||||
customElements.define('editor-toolbar', EditorToolbar);
|
||||
customElements.define('editor-icon-button', EditorIconButton);
|
||||
customElements.define('editor-menu-button', EditorMenuButton);
|
||||
customElements.define('editor-menu-content', EditorMenuContent);
|
||||
customElements.define('editor-menu-action', EditorMenuAction);
|
||||
customElements.define('affine-tooltip', Tooltip);
|
||||
}
|
||||
|
||||
7
blocksuite/affine/components/src/tooltip/effect.ts
Normal file
7
blocksuite/affine/components/src/tooltip/effect.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { Tooltip } from './tooltip.js';
|
||||
|
||||
export function effects() {
|
||||
if (!customElements.get('affine-tooltip')) {
|
||||
customElements.define('affine-tooltip', Tooltip);
|
||||
}
|
||||
}
|
||||
2
blocksuite/affine/components/src/tooltip/index.ts
Normal file
2
blocksuite/affine/components/src/tooltip/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export { effects } from './effect.js';
|
||||
export { Tooltip } from './tooltip.js';
|
||||
@@ -116,6 +116,7 @@ export class EdgelessTemplateButton extends EdgelessToolbarToolMixin(
|
||||
`;
|
||||
|
||||
private _cleanup: (() => void) | null = null;
|
||||
private _autoUpdateCleanup: (() => void) | null = null;
|
||||
|
||||
private _prevTool: ToolOptionWithType | null = null;
|
||||
|
||||
@@ -128,6 +129,11 @@ export class EdgelessTemplateButton extends EdgelessToolbarToolMixin(
|
||||
return [TemplateCard1[theme], TemplateCard2[theme], TemplateCard3[theme]];
|
||||
}
|
||||
|
||||
override connectedCallback() {
|
||||
super.connectedCallback();
|
||||
this.disposables.add(() => this._autoUpdateCleanup?.());
|
||||
}
|
||||
|
||||
private _closePanel() {
|
||||
if (this._openedPanel) {
|
||||
this._openedPanel.remove();
|
||||
@@ -175,8 +181,8 @@ export class EdgelessTemplateButton extends EdgelessToolbarToolMixin(
|
||||
|
||||
requestAnimationFrame(() => {
|
||||
const arrowEl = panel.renderRoot.querySelector('.arrow') as HTMLElement;
|
||||
|
||||
autoUpdate(this, panel, () => {
|
||||
this._autoUpdateCleanup?.();
|
||||
this._autoUpdateCleanup = autoUpdate(this, panel, () => {
|
||||
computePosition(this, panel, {
|
||||
placement: 'top',
|
||||
middleware: [offset(20), arrow({ element: arrowEl }), shift()],
|
||||
|
||||
@@ -22,8 +22,11 @@ import { isEqual } from 'lodash-es';
|
||||
})
|
||||
export class InlineComment extends WithDisposable(ShadowlessElement) {
|
||||
static override styles = css`
|
||||
inline-comment {
|
||||
display: inline;
|
||||
}
|
||||
|
||||
inline-comment.unresolved {
|
||||
display: inline-block;
|
||||
background-color: ${unsafeCSSVarV2('block/comment/highlightDefault')};
|
||||
border-bottom: 2px solid
|
||||
${unsafeCSSVarV2('block/comment/highlightUnderline')};
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
"@blocksuite/affine-block-paragraph": "workspace:*",
|
||||
"@blocksuite/affine-block-surface": "workspace:*",
|
||||
"@blocksuite/affine-block-surface-ref": "workspace:*",
|
||||
"@blocksuite/affine-block-table": "workspace:*",
|
||||
"@blocksuite/affine-components": "workspace:*",
|
||||
"@blocksuite/affine-ext-loader": "workspace:*",
|
||||
"@blocksuite/affine-fragment-doc-title": "workspace:*",
|
||||
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
} from '@blocksuite/affine-block-paragraph';
|
||||
import { DefaultTool, getSurfaceBlock } from '@blocksuite/affine-block-surface';
|
||||
import { insertSurfaceRefBlockCommand } from '@blocksuite/affine-block-surface-ref';
|
||||
import { insertTableBlockCommand } from '@blocksuite/affine-block-table';
|
||||
import { toggleEmbedCardCreateModal } from '@blocksuite/affine-components/embed-card-modal';
|
||||
import { toast } from '@blocksuite/affine-components/toast';
|
||||
import { insertInlineLatex } from '@blocksuite/affine-inline-latex';
|
||||
@@ -40,14 +41,20 @@ import {
|
||||
deleteSelectedModelsCommand,
|
||||
draftSelectedModelsCommand,
|
||||
duplicateSelectedModelsCommand,
|
||||
focusBlockEnd,
|
||||
getBlockSelectionsCommand,
|
||||
getSelectedModelsCommand,
|
||||
getTextSelectionCommand,
|
||||
} from '@blocksuite/affine-shared/commands';
|
||||
import { REFERENCE_NODE } from '@blocksuite/affine-shared/consts';
|
||||
import {
|
||||
FeatureFlagService,
|
||||
TelemetryProvider,
|
||||
} from '@blocksuite/affine-shared/services';
|
||||
import type { AffineTextStyleAttributes } from '@blocksuite/affine-shared/types';
|
||||
import {
|
||||
createDefaultDoc,
|
||||
isInsideBlockByFlavour,
|
||||
openSingleFileWith,
|
||||
type Signal,
|
||||
} from '@blocksuite/affine-shared/utils';
|
||||
@@ -87,6 +94,7 @@ import {
|
||||
RedoIcon,
|
||||
RightTabIcon,
|
||||
StrikeThroughIcon,
|
||||
TableIcon,
|
||||
TeXIcon,
|
||||
TextIcon,
|
||||
TodayIcon,
|
||||
@@ -258,6 +266,62 @@ const textToolActionItems: KeyboardToolbarActionItem[] = [
|
||||
.run();
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'Table',
|
||||
icon: TableIcon(),
|
||||
showWhen: ({ std, rootComponent: { model } }) =>
|
||||
std.store.schema.flavourSchemaMap.has('affine:table') &&
|
||||
!isInsideBlockByFlavour(std.store, model, 'affine:edgeless-text'),
|
||||
action: ({ std }) => {
|
||||
std.command
|
||||
.chain()
|
||||
.pipe(getSelectedModelsCommand)
|
||||
.pipe(insertTableBlockCommand, {
|
||||
place: 'after',
|
||||
removeEmptyLine: true,
|
||||
})
|
||||
.pipe(({ insertedTableBlockId }) => {
|
||||
if (insertedTableBlockId) {
|
||||
const telemetry = std.getOptional(TelemetryProvider);
|
||||
telemetry?.track('BlockCreated', {
|
||||
blockType: 'affine:table',
|
||||
});
|
||||
}
|
||||
})
|
||||
.run();
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'Callout',
|
||||
icon: FontIcon(),
|
||||
showWhen: ({ std, rootComponent: { model } }) => {
|
||||
return (
|
||||
std.get(FeatureFlagService).getFlag('enable_callout') &&
|
||||
!isInsideBlockByFlavour(model.store, model, 'affine:edgeless-text')
|
||||
);
|
||||
},
|
||||
action: ({ rootComponent: { model }, std }) => {
|
||||
const { store } = model;
|
||||
const parent = store.getParent(model);
|
||||
if (!parent) return;
|
||||
|
||||
const index = parent.children.indexOf(model);
|
||||
if (index === -1) return;
|
||||
const calloutId = store.addBlock('affine:callout', {}, parent, index + 1);
|
||||
if (!calloutId) return;
|
||||
const paragraphId = store.addBlock('affine:paragraph', {}, calloutId);
|
||||
if (!paragraphId) return;
|
||||
std.host.updateComplete
|
||||
.then(() => {
|
||||
const paragraph = std.view.getBlock(paragraphId);
|
||||
if (!paragraph) return;
|
||||
std.command.exec(focusBlockEnd, {
|
||||
focusBlock: paragraph,
|
||||
});
|
||||
})
|
||||
.catch(console.error);
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const listToolActionItems: KeyboardToolbarActionItem[] = [
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
{ "path": "../../blocks/paragraph" },
|
||||
{ "path": "../../blocks/surface" },
|
||||
{ "path": "../../blocks/surface-ref" },
|
||||
{ "path": "../../blocks/table" },
|
||||
{ "path": "../../components" },
|
||||
{ "path": "../../ext-loader" },
|
||||
{ "path": "../../fragments/doc-title" },
|
||||
|
||||
@@ -343,7 +343,18 @@ export class LinkedDocPopover extends SignalWatcher(
|
||||
override willUpdate() {
|
||||
if (!this.hasUpdated) {
|
||||
const updatePosition = throttle(() => {
|
||||
this._position = getPopperPosition(this, this.context.startNativeRange);
|
||||
this._position = getPopperPosition(
|
||||
{
|
||||
getBoundingClientRect: () => {
|
||||
return {
|
||||
...this.getBoundingClientRect(),
|
||||
// Workaround: the width of the popover is zero when it is not rendered
|
||||
width: 280,
|
||||
};
|
||||
},
|
||||
},
|
||||
this.context.startNativeRange
|
||||
);
|
||||
}, 10);
|
||||
|
||||
this.disposables.addFromEvent(window, 'resize', updatePosition);
|
||||
|
||||
@@ -30,9 +30,9 @@ function inlineTextStyles(
|
||||
}
|
||||
|
||||
return styleMap({
|
||||
'font-weight': props.bold ? 'bold' : 'normal',
|
||||
'font-style': props.italic ? 'italic' : 'normal',
|
||||
'text-decoration': textDecorations.length > 0 ? textDecorations : 'none',
|
||||
'font-weight': props.bold ? 'bold' : 'inherit',
|
||||
'font-style': props.italic ? 'italic' : 'inherit',
|
||||
'text-decoration': textDecorations.length > 0 ? textDecorations : 'inherit',
|
||||
...inlineCodeStyle,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -372,3 +372,75 @@ Generated by [AVA](https://avajs.dev).
|
||||
[assistant]: Quantum computing uses quantum mechanics principles.`,
|
||||
promptName: 'Summary as title',
|
||||
}
|
||||
|
||||
## should handle copilot cron jobs correctly
|
||||
|
||||
> daily job scheduling calls
|
||||
|
||||
[
|
||||
{
|
||||
args: [
|
||||
'copilot.session.cleanupEmptySessions',
|
||||
{},
|
||||
{
|
||||
jobId: 'daily-copilot-cleanup-empty-sessions',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
args: [
|
||||
'copilot.session.generateMissingTitles',
|
||||
{},
|
||||
{
|
||||
jobId: 'daily-copilot-generate-missing-titles',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
args: [
|
||||
'copilot.workspace.cleanupTrashedDocEmbeddings',
|
||||
{},
|
||||
{
|
||||
jobId: 'daily-copilot-cleanup-trashed-doc-embeddings',
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
> cleanup empty sessions calls
|
||||
|
||||
[
|
||||
{
|
||||
args: [
|
||||
'Date',
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
> title generation calls
|
||||
|
||||
{
|
||||
jobCalls: [
|
||||
{
|
||||
args: [
|
||||
'copilot.session.generateTitle',
|
||||
{
|
||||
sessionId: 'session1',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
args: [
|
||||
'copilot.session.generateTitle',
|
||||
{
|
||||
sessionId: 'session2',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
modelCalls: [
|
||||
{
|
||||
args: [],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
Binary file not shown.
@@ -207,6 +207,7 @@ const retry = async (
|
||||
try {
|
||||
await callback(t);
|
||||
} catch (e) {
|
||||
console.error(`Error during ${action}:`, e);
|
||||
t.log(`Error during ${action}:`, e);
|
||||
throw e;
|
||||
}
|
||||
@@ -350,10 +351,10 @@ The term **“CRDT”** was first introduced by Marc Shapiro, Nuno Preguiça, Ca
|
||||
params: {
|
||||
files: [
|
||||
{
|
||||
blobId: 'euclidean_distance',
|
||||
fileName: 'euclidean_distance.rs',
|
||||
fileType: 'text/rust',
|
||||
fileContent: TestAssets.Code,
|
||||
blobId: 'todo_md',
|
||||
fileName: 'todo.md',
|
||||
fileType: 'text/markdown',
|
||||
fileContent: TestAssets.TODO,
|
||||
},
|
||||
],
|
||||
},
|
||||
@@ -475,6 +476,7 @@ The term **“CRDT”** was first introduced by Marc Shapiro, Nuno Preguiça, Ca
|
||||
},
|
||||
},
|
||||
],
|
||||
config: { model: 'gemini-2.5-pro' },
|
||||
verifier: (t: ExecutionContext<Tester>, result: string) => {
|
||||
t.notThrows(() => {
|
||||
TranscriptionResponseSchema.parse(JSON.parse(result));
|
||||
@@ -483,6 +485,34 @@ The term **“CRDT”** was first introduced by Marc Shapiro, Nuno Preguiça, Ca
|
||||
type: 'structured' as const,
|
||||
prefer: CopilotProviderType.Gemini,
|
||||
},
|
||||
{
|
||||
promptName: ['Conversation Summary'],
|
||||
messages: [
|
||||
{
|
||||
role: 'user' as const,
|
||||
content: '',
|
||||
params: {
|
||||
messages: [
|
||||
{ role: 'user', content: 'what is single source of truth?' },
|
||||
{ role: 'assistant', content: TestAssets.SSOT },
|
||||
],
|
||||
focus: 'technical decisions',
|
||||
length: 'comprehensive',
|
||||
},
|
||||
},
|
||||
],
|
||||
verifier: (t: ExecutionContext<Tester>, result: string) => {
|
||||
assertNotWrappedInCodeBlock(t, result);
|
||||
const cleared = result.toLowerCase();
|
||||
t.assert(
|
||||
cleared.includes('single source of truth') ||
|
||||
/single.*source/.test(cleared) ||
|
||||
cleared.includes('ssot'),
|
||||
'should include original keyword'
|
||||
);
|
||||
},
|
||||
type: 'text' as const,
|
||||
},
|
||||
{
|
||||
promptName: [
|
||||
'Summary',
|
||||
@@ -668,11 +698,12 @@ for (const {
|
||||
t.truthy(provider, 'should have provider');
|
||||
await retry(`action: ${promptName}`, t, async t => {
|
||||
const finalConfig = Object.assign({}, prompt.config, config);
|
||||
const modelId = finalConfig.model || prompt.model;
|
||||
|
||||
switch (type) {
|
||||
case 'text': {
|
||||
const result = await provider.text(
|
||||
{ modelId: prompt.model },
|
||||
{ modelId },
|
||||
[
|
||||
...prompt.finish(
|
||||
messages.reduce(
|
||||
@@ -691,7 +722,7 @@ for (const {
|
||||
}
|
||||
case 'structured': {
|
||||
const result = await provider.structure(
|
||||
{ modelId: prompt.model },
|
||||
{ modelId },
|
||||
[
|
||||
...prompt.finish(
|
||||
messages.reduce(
|
||||
@@ -710,7 +741,7 @@ for (const {
|
||||
case 'object': {
|
||||
const streamObjects: StreamObject[] = [];
|
||||
for await (const chunk of provider.streamObject(
|
||||
{ modelId: prompt.model },
|
||||
{ modelId },
|
||||
[
|
||||
...prompt.finish(
|
||||
messages.reduce(
|
||||
@@ -742,7 +773,7 @@ for (const {
|
||||
});
|
||||
}
|
||||
const stream = provider.streamImages(
|
||||
{ modelId: prompt.model },
|
||||
{ modelId },
|
||||
[
|
||||
...prompt.finish(
|
||||
finalMessage.reduce(
|
||||
|
||||
@@ -290,6 +290,7 @@ test('should fork session correctly', async t => {
|
||||
|
||||
const assertForkSession = async (
|
||||
workspaceId: string,
|
||||
docId: string,
|
||||
sessionId: string,
|
||||
lastMessageId: string | undefined,
|
||||
error: string,
|
||||
@@ -300,13 +301,7 @@ test('should fork session correctly', async t => {
|
||||
}
|
||||
) =>
|
||||
await asserter(
|
||||
forkCopilotSession(
|
||||
app,
|
||||
workspaceId,
|
||||
randomUUID(),
|
||||
sessionId,
|
||||
lastMessageId
|
||||
)
|
||||
forkCopilotSession(app, workspaceId, docId, sessionId, lastMessageId)
|
||||
);
|
||||
|
||||
// prepare session
|
||||
@@ -330,6 +325,7 @@ test('should fork session correctly', async t => {
|
||||
// should be able to fork session
|
||||
forkedSessionId = await assertForkSession(
|
||||
id,
|
||||
docId,
|
||||
sessionId,
|
||||
latestMessageId!,
|
||||
'should be able to fork session with cloud workspace that user can access'
|
||||
@@ -340,6 +336,7 @@ test('should fork session correctly', async t => {
|
||||
{
|
||||
forkedSessionId = await assertForkSession(
|
||||
id,
|
||||
docId,
|
||||
sessionId,
|
||||
undefined,
|
||||
'should be able to fork session without latestMessageId'
|
||||
@@ -348,18 +345,25 @@ test('should fork session correctly', async t => {
|
||||
|
||||
// should not be able to fork session with wrong latestMessageId
|
||||
{
|
||||
await assertForkSession(id, sessionId, 'wrong-message-id', '', async x => {
|
||||
await t.throwsAsync(
|
||||
x,
|
||||
{ instanceOf: Error },
|
||||
'should not able to fork session with wrong latestMessageId'
|
||||
);
|
||||
});
|
||||
await assertForkSession(
|
||||
id,
|
||||
docId,
|
||||
sessionId,
|
||||
'wrong-message-id',
|
||||
'',
|
||||
async x => {
|
||||
await t.throwsAsync(
|
||||
x,
|
||||
{ instanceOf: Error },
|
||||
'should not able to fork session with wrong latestMessageId'
|
||||
);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
const u2 = await app.signupV1();
|
||||
await assertForkSession(id, sessionId, randomUUID(), '', async x => {
|
||||
await assertForkSession(id, docId, sessionId, randomUUID(), '', async x => {
|
||||
await t.throwsAsync(
|
||||
x,
|
||||
{ instanceOf: Error },
|
||||
@@ -371,7 +375,7 @@ test('should fork session correctly', async t => {
|
||||
const inviteId = await inviteUser(app, id, u2.email);
|
||||
await app.switchUser(u2);
|
||||
await acceptInviteById(app, id, inviteId, false);
|
||||
await assertForkSession(id, sessionId, randomUUID(), '', async x => {
|
||||
await assertForkSession(id, docId, sessionId, randomUUID(), '', async x => {
|
||||
await t.throwsAsync(
|
||||
x,
|
||||
{ instanceOf: Error },
|
||||
@@ -389,6 +393,7 @@ test('should fork session correctly', async t => {
|
||||
await app.switchUser(u2);
|
||||
await assertForkSession(
|
||||
id,
|
||||
docId,
|
||||
forkedSessionId,
|
||||
latestMessageId!,
|
||||
'should able to fork a forked session created by other user'
|
||||
@@ -456,6 +461,29 @@ test('should create message correctly', async t => {
|
||||
sessionId,
|
||||
undefined,
|
||||
undefined,
|
||||
new File([new Uint8Array(pngData)], '1.png', { type: 'image/png' })
|
||||
);
|
||||
t.truthy(messageId, 'should be able to create message with blob');
|
||||
}
|
||||
|
||||
// with attachments
|
||||
{
|
||||
const { id } = await createWorkspace(app);
|
||||
const sessionId = await createCopilotSession(
|
||||
app,
|
||||
id,
|
||||
randomUUID(),
|
||||
textPromptName
|
||||
);
|
||||
const smallestPng =
|
||||
'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAgAAAAIAQMAAAD+wSzIAAAABlBMVEX///+/v7+jQ3Y5AAAADklEQVQI12P4AIX8EAgALgAD/aNpbtEAAAAASUVORK5CYII';
|
||||
const pngData = await fetch(smallestPng).then(res => res.arrayBuffer());
|
||||
const messageId = await createCopilotMessage(
|
||||
app,
|
||||
sessionId,
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
[new File([new Uint8Array(pngData)], '1.png', { type: 'image/png' })]
|
||||
);
|
||||
t.truthy(messageId, 'should be able to create message with blobs');
|
||||
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
} from '../models';
|
||||
import { CopilotModule } from '../plugins/copilot';
|
||||
import { CopilotContextService } from '../plugins/copilot/context';
|
||||
import { CopilotCronJobs } from '../plugins/copilot/cron';
|
||||
import {
|
||||
CopilotEmbeddingJob,
|
||||
MockEmbeddingClient,
|
||||
@@ -77,6 +78,7 @@ type Context = {
|
||||
jobs: CopilotEmbeddingJob;
|
||||
storage: CopilotStorage;
|
||||
workflow: CopilotWorkflowService;
|
||||
cronJobs: CopilotCronJobs;
|
||||
executors: {
|
||||
image: CopilotChatImageExecutor;
|
||||
text: CopilotChatTextExecutor;
|
||||
@@ -137,6 +139,7 @@ test.before(async t => {
|
||||
const jobs = module.get(CopilotEmbeddingJob);
|
||||
const transcript = module.get(CopilotTranscriptionService);
|
||||
const workspaceEmbedding = module.get(CopilotWorkspaceService);
|
||||
const cronJobs = module.get(CopilotCronJobs);
|
||||
|
||||
t.context.module = module;
|
||||
t.context.auth = auth;
|
||||
@@ -153,6 +156,7 @@ test.before(async t => {
|
||||
t.context.jobs = jobs;
|
||||
t.context.transcript = transcript;
|
||||
t.context.workspaceEmbedding = workspaceEmbedding;
|
||||
t.context.cronJobs = cronJobs;
|
||||
|
||||
t.context.executors = {
|
||||
image: module.get(CopilotChatImageExecutor),
|
||||
@@ -1931,3 +1935,71 @@ test('should handle generateSessionTitle correctly under various conditions', as
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
test('should handle copilot cron jobs correctly', async t => {
|
||||
const { cronJobs, copilotSession } = t.context;
|
||||
|
||||
// mock calls
|
||||
const mockCleanupResult = { removed: 2, cleaned: 3 };
|
||||
const mockSessions = [
|
||||
{ id: 'session1', _count: { messages: 1 } },
|
||||
{ id: 'session2', _count: { messages: 2 } },
|
||||
];
|
||||
const cleanupStub = Sinon.stub(
|
||||
copilotSession,
|
||||
'cleanupEmptySessions'
|
||||
).resolves(mockCleanupResult);
|
||||
const toBeGenerateStub = Sinon.stub(
|
||||
copilotSession,
|
||||
'toBeGenerateTitle'
|
||||
).resolves(mockSessions);
|
||||
const jobAddStub = Sinon.stub(cronJobs['jobs'], 'add').resolves();
|
||||
|
||||
// daily cleanup job scheduling
|
||||
{
|
||||
await cronJobs.dailyCleanupJob();
|
||||
t.snapshot(
|
||||
jobAddStub.getCalls().map(call => ({
|
||||
args: call.args,
|
||||
})),
|
||||
'daily job scheduling calls'
|
||||
);
|
||||
|
||||
jobAddStub.reset();
|
||||
cleanupStub.reset();
|
||||
toBeGenerateStub.reset();
|
||||
}
|
||||
|
||||
// cleanup empty sessions
|
||||
{
|
||||
// mock
|
||||
cleanupStub.resolves(mockCleanupResult);
|
||||
toBeGenerateStub.resolves(mockSessions);
|
||||
|
||||
await cronJobs.cleanupEmptySessions();
|
||||
t.snapshot(
|
||||
cleanupStub.getCalls().map(call => ({
|
||||
args: call.args.map(arg => (arg instanceof Date ? 'Date' : arg)), // Replace Date with string for stable snapshot
|
||||
})),
|
||||
'cleanup empty sessions calls'
|
||||
);
|
||||
}
|
||||
|
||||
// generate missing titles
|
||||
await cronJobs.generateMissingTitles();
|
||||
t.snapshot(
|
||||
{
|
||||
modelCalls: toBeGenerateStub.getCalls().map(call => ({
|
||||
args: call.args,
|
||||
})),
|
||||
jobCalls: jobAddStub.getCalls().map(call => ({
|
||||
args: call.args,
|
||||
})),
|
||||
},
|
||||
'title generation calls'
|
||||
);
|
||||
|
||||
cleanupStub.restore();
|
||||
toBeGenerateStub.restore();
|
||||
jobAddStub.restore();
|
||||
});
|
||||
|
||||
@@ -13,74 +13,45 @@ Generated by [AVA](https://avajs.dev).
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
# You own your data, with no compromises␊
|
||||
␊
|
||||
␊
|
||||
## Local-first & Real-time collaborative␊
|
||||
␊
|
||||
␊
|
||||
We love the idea proposed by Ink & Switch in the famous article about you owning your data, despite the cloud. Furthermore, AFFiNE is the first all-in-one workspace that keeps your data ownership with no compromises on real-time collaboration and editing experience.␊
|
||||
␊
|
||||
␊
|
||||
AFFiNE is a local-first application upon CRDTs with real-time collaboration support. Your data is always stored locally while multiple nodes remain synced in real-time.␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
### Blocks that assemble your next docs, tasks kanban or whiteboard␊
|
||||
␊
|
||||
␊
|
||||
There is a large overlap of their atomic "building blocks" between these apps. They are neither open source nor have a plugin system like VS Code for contributors to customize. We want to have something that contains all the features we love and goes one step further.␊
|
||||
␊
|
||||
␊
|
||||
We are building AFFiNE to be a fundamental open source platform that contains all the building blocks for docs, task management and visual collaboration, hoping you can shape your next workflow with us that can make your life better and also connect others, too.␊
|
||||
␊
|
||||
␊
|
||||
If you want to learn more about the product design of AFFiNE, here goes the concepts:␊
|
||||
␊
|
||||
␊
|
||||
To Shape, not to adapt. AFFiNE is built for individuals & teams who care about their data, who refuse vendor lock-in, and who want to have control over their essential tools.␊
|
||||
␊
|
||||
␊
|
||||
## A true canvas for blocks in any form␊
|
||||
␊
|
||||
␊
|
||||
[Many editor apps](http://notion.so) claimed to be a canvas for productivity. Since _the Mother of All Demos,_ Douglas Engelbart, a creative and programable digital workspace has been a pursuit and an ultimate mission for generations of tool makers.␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
"We shape our tools and thereafter our tools shape us”. A lot of pioneers have inspired us a long the way, e.g.:␊
|
||||
␊
|
||||
␊
|
||||
* Quip & Notion with their great concept of "everything is a block"␊
|
||||
␊
|
||||
␊
|
||||
* Trello with their Kanban␊
|
||||
␊
|
||||
␊
|
||||
* Airtable & Miro with their no-code programable datasheets␊
|
||||
␊
|
||||
␊
|
||||
* Miro & Whimiscal with their edgeless visual whiteboard␊
|
||||
␊
|
||||
␊
|
||||
* Remnote & Capacities with their object-based tag system␊
|
||||
␊
|
||||
␊
|
||||
For more details, please refer to our [RoadMap](https://docs.affine.pro/docs/core-concepts/roadmap)␊
|
||||
␊
|
||||
␊
|
||||
## Self Host␊
|
||||
␊
|
||||
␊
|
||||
Self host AFFiNE␊
|
||||
␊
|
||||
␊
|
||||
||Title|Tag|␊
|
||||
|---|---|---|␊
|
||||
|Affine Development|Affine Development|<span data-affine-option data-value="AxSe-53xjX" data-option-color="var(--affine-tag-pink)">AFFiNE</span>|␊
|
||||
@@ -91,16 +62,12 @@ Generated by [AVA](https://avajs.dev).
|
||||
|Miro & Whimiscal with their edgeless visual whiteboard|Miro & Whimiscal with their edgeless visual whiteboard|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|␊
|
||||
|Remnote & Capacities with their object-based tag system|Remnote & Capacities with their object-based tag system||␊
|
||||
␊
|
||||
␊
|
||||
## Affine Development␊
|
||||
␊
|
||||
␊
|
||||
For developer or installation guides, please go to [AFFiNE Development](https://docs.affine.pro/docs/development/quick-start)␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
`,
|
||||
title: 'Write, Draw, Plan all at Once.',
|
||||
}
|
||||
|
||||
Binary file not shown.
@@ -99,3 +99,56 @@ e2e(
|
||||
t.is(result2.workspace.doc.public, true);
|
||||
}
|
||||
);
|
||||
|
||||
e2e('should get doc with title and summary', async t => {
|
||||
const owner = await app.signup();
|
||||
|
||||
const workspace = await app.create(Mockers.Workspace, {
|
||||
owner: { id: owner.id },
|
||||
});
|
||||
|
||||
const docSnapshot = await app.create(Mockers.DocSnapshot, {
|
||||
workspaceId: workspace.id,
|
||||
user: owner,
|
||||
});
|
||||
const doc = await app.create(Mockers.DocMeta, {
|
||||
workspaceId: workspace.id,
|
||||
docId: docSnapshot.id,
|
||||
title: 'doc1',
|
||||
summary: 'summary1',
|
||||
});
|
||||
|
||||
const result = await app.gql({
|
||||
query: getWorkspacePageByIdQuery,
|
||||
variables: { workspaceId: workspace.id, pageId: doc.docId },
|
||||
});
|
||||
|
||||
t.is(result.workspace.doc.title, doc.title);
|
||||
t.is(result.workspace.doc.summary, doc.summary);
|
||||
});
|
||||
|
||||
e2e('should get doc with title and null summary', async t => {
|
||||
const owner = await app.signup();
|
||||
|
||||
const workspace = await app.create(Mockers.Workspace, {
|
||||
owner: { id: owner.id },
|
||||
});
|
||||
|
||||
const docSnapshot = await app.create(Mockers.DocSnapshot, {
|
||||
workspaceId: workspace.id,
|
||||
user: owner,
|
||||
});
|
||||
const doc = await app.create(Mockers.DocMeta, {
|
||||
workspaceId: workspace.id,
|
||||
docId: docSnapshot.id,
|
||||
title: 'doc1',
|
||||
});
|
||||
|
||||
const result = await app.gql({
|
||||
query: getWorkspacePageByIdQuery,
|
||||
variables: { workspaceId: workspace.id, pageId: doc.docId },
|
||||
});
|
||||
|
||||
t.is(result.workspace.doc.title, doc.title);
|
||||
t.is(result.workspace.doc.summary, null);
|
||||
});
|
||||
|
||||
@@ -111,6 +111,19 @@ export class MockCopilotProvider extends OpenAIProvider {
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'gemini-2.5-pro',
|
||||
capabilities: [
|
||||
{
|
||||
input: [ModelInputType.Text, ModelInputType.Image],
|
||||
output: [
|
||||
ModelOutputType.Text,
|
||||
ModelOutputType.Object,
|
||||
ModelOutputType.Structured,
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
override async text(
|
||||
|
||||
@@ -565,3 +565,65 @@ Generated by [AVA](https://avajs.dev).
|
||||
workspaceSessionExists: true,
|
||||
},
|
||||
}
|
||||
|
||||
## should cleanup empty sessions correctly
|
||||
|
||||
> cleanup empty sessions results
|
||||
|
||||
{
|
||||
cleanupResult: {
|
||||
cleaned: 0,
|
||||
removed: 0,
|
||||
},
|
||||
remainingSessions: [
|
||||
{
|
||||
deleted: false,
|
||||
pinned: false,
|
||||
type: 'zeroCost',
|
||||
},
|
||||
{
|
||||
deleted: false,
|
||||
pinned: false,
|
||||
type: 'zeroCost',
|
||||
},
|
||||
{
|
||||
deleted: false,
|
||||
pinned: false,
|
||||
type: 'noMessages',
|
||||
},
|
||||
{
|
||||
deleted: false,
|
||||
pinned: false,
|
||||
type: 'noMessages',
|
||||
},
|
||||
{
|
||||
deleted: false,
|
||||
pinned: false,
|
||||
type: 'recent',
|
||||
},
|
||||
{
|
||||
deleted: false,
|
||||
pinned: false,
|
||||
type: 'withMessages',
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
## should get sessions for title generation correctly
|
||||
|
||||
> sessions for title generation results
|
||||
|
||||
{
|
||||
onlyValidSessionsReturned: true,
|
||||
sessions: [
|
||||
{
|
||||
assistantMessageCount: 1,
|
||||
isValid: true,
|
||||
},
|
||||
{
|
||||
assistantMessageCount: 2,
|
||||
isValid: true,
|
||||
},
|
||||
],
|
||||
total: 2,
|
||||
}
|
||||
|
||||
Binary file not shown.
@@ -89,3 +89,19 @@ Generated by [AVA](https://avajs.dev).
|
||||
> should not find docs to embed
|
||||
|
||||
0
|
||||
|
||||
## should filter outdated doc id style in embedding status
|
||||
|
||||
> should include modern doc format
|
||||
|
||||
{
|
||||
embedded: 0,
|
||||
total: 1,
|
||||
}
|
||||
|
||||
> should count docs after filtering outdated
|
||||
|
||||
{
|
||||
embedded: 1,
|
||||
total: 1,
|
||||
}
|
||||
|
||||
Binary file not shown.
@@ -164,11 +164,14 @@ test('should insert embedding by doc id', async t => {
|
||||
);
|
||||
|
||||
{
|
||||
const ret = await t.context.copilotContext.hasWorkspaceEmbedding(
|
||||
const ret = await t.context.copilotContext.listWorkspaceEmbedding(
|
||||
workspace.id,
|
||||
[docId]
|
||||
);
|
||||
t.true(ret.has(docId), 'should return doc id when embedding is inserted');
|
||||
t.true(
|
||||
ret.includes(docId),
|
||||
'should return doc id when embedding is inserted'
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
@@ -317,8 +320,8 @@ test('should merge doc status correctly', async t => {
|
||||
|
||||
const hasEmbeddingStub = Sinon.stub(
|
||||
t.context.copilotContext,
|
||||
'hasWorkspaceEmbedding'
|
||||
).resolves(new Set<string>());
|
||||
'listWorkspaceEmbedding'
|
||||
).resolves([]);
|
||||
|
||||
const stubResult = await t.context.copilotContext.mergeDocStatus(
|
||||
workspace.id,
|
||||
|
||||
@@ -917,3 +917,178 @@ test('should handle fork and session attachment operations', async t => {
|
||||
'attach and detach operation results'
|
||||
);
|
||||
});
|
||||
|
||||
test('should cleanup empty sessions correctly', async t => {
|
||||
const { copilotSession, db } = t.context;
|
||||
await createTestPrompts(copilotSession, db);
|
||||
|
||||
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
|
||||
const twoHoursAgo = new Date(Date.now() - 2 * 60 * 60 * 1000);
|
||||
|
||||
// should be deleted
|
||||
const neverUsedSessionIds: string[] = [randomUUID(), randomUUID()];
|
||||
await Promise.all(
|
||||
neverUsedSessionIds.map(async id => {
|
||||
await createTestSession(t, { sessionId: id });
|
||||
await db.aiSession.update({
|
||||
where: { id },
|
||||
data: { messageCost: 0, updatedAt: oneDayAgo },
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
// should be marked as deleted
|
||||
const emptySessionIds: string[] = [randomUUID(), randomUUID()];
|
||||
await Promise.all(
|
||||
emptySessionIds.map(async id => {
|
||||
await createTestSession(t, { sessionId: id });
|
||||
await db.aiSession.update({
|
||||
where: { id },
|
||||
data: { messageCost: 100, updatedAt: oneDayAgo },
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
// should not be affected
|
||||
const recentSessionId = randomUUID();
|
||||
await createTestSession(t, { sessionId: recentSessionId });
|
||||
await db.aiSession.update({
|
||||
where: { id: recentSessionId },
|
||||
data: { messageCost: 0, updatedAt: twoHoursAgo },
|
||||
});
|
||||
|
||||
// Create session with messages (should not be affected)
|
||||
const sessionWithMsgId = randomUUID();
|
||||
await createSessionWithMessages(
|
||||
t,
|
||||
{ sessionId: sessionWithMsgId },
|
||||
'test message'
|
||||
);
|
||||
|
||||
const result = await copilotSession.cleanupEmptySessions(oneDayAgo);
|
||||
|
||||
const remainingSessions = await db.aiSession.findMany({
|
||||
where: {
|
||||
id: {
|
||||
in: [
|
||||
...neverUsedSessionIds,
|
||||
...emptySessionIds,
|
||||
recentSessionId,
|
||||
sessionWithMsgId,
|
||||
],
|
||||
},
|
||||
},
|
||||
select: { id: true, deletedAt: true, pinned: true },
|
||||
});
|
||||
|
||||
t.snapshot(
|
||||
{
|
||||
cleanupResult: result,
|
||||
remainingSessions: remainingSessions.map(s => ({
|
||||
deleted: !!s.deletedAt,
|
||||
pinned: s.pinned,
|
||||
type: neverUsedSessionIds.includes(s.id)
|
||||
? 'zeroCost'
|
||||
: emptySessionIds.includes(s.id)
|
||||
? 'noMessages'
|
||||
: s.id === recentSessionId
|
||||
? 'recent'
|
||||
: 'withMessages',
|
||||
})),
|
||||
},
|
||||
'cleanup empty sessions results'
|
||||
);
|
||||
});
|
||||
|
||||
test('should get sessions for title generation correctly', async t => {
|
||||
const { copilotSession, db } = t.context;
|
||||
await createTestPrompts(copilotSession, db);
|
||||
|
||||
// create valid sessions with messages
|
||||
const sessionIds: string[] = [randomUUID(), randomUUID()];
|
||||
await Promise.all(
|
||||
sessionIds.map(async (id, index) => {
|
||||
await createTestSession(t, { sessionId: id });
|
||||
await db.aiSession.update({
|
||||
where: { id },
|
||||
data: {
|
||||
updatedAt: new Date(Date.now() - index * 1000),
|
||||
messages: {
|
||||
create: Array.from({ length: index + 1 }, (_, i) => ({
|
||||
role: 'assistant',
|
||||
content: `assistant message ${i}`,
|
||||
})),
|
||||
},
|
||||
},
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
// create excluded sessions
|
||||
const excludedSessions = [
|
||||
{
|
||||
reason: 'hasTitle',
|
||||
setupFn: async (id: string) => {
|
||||
await createTestSession(t, { sessionId: id });
|
||||
await db.aiSession.update({
|
||||
where: { id },
|
||||
data: { title: 'Existing Title' },
|
||||
});
|
||||
},
|
||||
},
|
||||
{
|
||||
reason: 'isDeleted',
|
||||
setupFn: async (id: string) => {
|
||||
await createTestSession(t, { sessionId: id });
|
||||
await db.aiSession.update({
|
||||
where: { id },
|
||||
data: { deletedAt: new Date() },
|
||||
});
|
||||
},
|
||||
},
|
||||
{
|
||||
reason: 'noMessages',
|
||||
setupFn: async (id: string) => {
|
||||
await createTestSession(t, { sessionId: id });
|
||||
},
|
||||
},
|
||||
{
|
||||
reason: 'isAction',
|
||||
setupFn: async (id: string) => {
|
||||
await createTestSession(t, {
|
||||
sessionId: id,
|
||||
promptName: TEST_PROMPTS.ACTION,
|
||||
});
|
||||
},
|
||||
},
|
||||
{
|
||||
reason: 'noAssistantMessages',
|
||||
setupFn: async (id: string) => {
|
||||
await createTestSession(t, { sessionId: id });
|
||||
await db.aiSessionMessage.create({
|
||||
data: { sessionId: id, role: 'user', content: 'User message only' },
|
||||
});
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
await Promise.all(
|
||||
excludedSessions.map(async session => {
|
||||
await session.setupFn(randomUUID());
|
||||
})
|
||||
);
|
||||
|
||||
const result = await copilotSession.toBeGenerateTitle();
|
||||
|
||||
t.snapshot(
|
||||
{
|
||||
total: result.length,
|
||||
sessions: result.map(s => ({
|
||||
assistantMessageCount: s._count.messages,
|
||||
isValid: sessionIds.includes(s.id),
|
||||
})),
|
||||
onlyValidSessionsReturned: result.every(s => sessionIds.includes(s.id)),
|
||||
},
|
||||
'sessions for title generation results'
|
||||
);
|
||||
});
|
||||
|
||||
@@ -214,6 +214,21 @@ test('should insert and search embedding', async t => {
|
||||
);
|
||||
t.false(results.includes(docId), 'docs containing `$` should be excluded');
|
||||
}
|
||||
|
||||
{
|
||||
const docId = 'empty_doc';
|
||||
await t.context.doc.upsert({
|
||||
spaceId: workspace.id,
|
||||
docId: docId,
|
||||
blob: Uint8Array.from([0, 0]),
|
||||
timestamp: Date.now(),
|
||||
editorId: user.id,
|
||||
});
|
||||
const results = await t.context.copilotWorkspace.findDocsToEmbed(
|
||||
workspace.id
|
||||
);
|
||||
t.false(results.includes(docId), 'empty documents should be excluded');
|
||||
}
|
||||
});
|
||||
|
||||
test('should check need to be embedded', async t => {
|
||||
@@ -291,3 +306,50 @@ test('should check embedding table', async t => {
|
||||
// t.false(ret, 'should return false when embedding table is not available');
|
||||
// }
|
||||
});
|
||||
|
||||
test('should filter outdated doc id style in embedding status', async t => {
|
||||
const docId = randomUUID();
|
||||
const outdatedDocId = `${workspace.id}:space:${docId}`;
|
||||
|
||||
await t.context.doc.upsert({
|
||||
spaceId: workspace.id,
|
||||
docId,
|
||||
blob: Uint8Array.from([1, 2, 3]),
|
||||
timestamp: Date.now(),
|
||||
editorId: user.id,
|
||||
});
|
||||
|
||||
await t.context.doc.upsert({
|
||||
spaceId: workspace.id,
|
||||
docId: outdatedDocId,
|
||||
blob: Uint8Array.from([1, 2, 3]),
|
||||
timestamp: Date.now(),
|
||||
editorId: user.id,
|
||||
});
|
||||
|
||||
{
|
||||
const status = await t.context.copilotWorkspace.getEmbeddingStatus(
|
||||
workspace.id
|
||||
);
|
||||
t.snapshot(status, 'should include modern doc format');
|
||||
}
|
||||
|
||||
{
|
||||
await t.context.copilotContext.insertWorkspaceEmbedding(
|
||||
workspace.id,
|
||||
docId,
|
||||
[
|
||||
{
|
||||
index: 0,
|
||||
content: 'content',
|
||||
embedding: Array.from({ length: 1024 }, () => 1),
|
||||
},
|
||||
]
|
||||
);
|
||||
|
||||
const status = await t.context.copilotWorkspace.getEmbeddingStatus(
|
||||
workspace.id
|
||||
);
|
||||
t.snapshot(status, 'should count docs after filtering outdated');
|
||||
}
|
||||
});
|
||||
|
||||
@@ -669,7 +669,10 @@ test('should get doc info', async t => {
|
||||
};
|
||||
|
||||
await t.context.doc.upsert(snapshot);
|
||||
await t.context.doc.upsertMeta(workspace.id, docId);
|
||||
await t.context.doc.upsertMeta(workspace.id, docId, {
|
||||
title: 'test title',
|
||||
summary: 'test summary',
|
||||
});
|
||||
|
||||
const docInfo = await t.context.doc.getDocInfo(workspace.id, docId);
|
||||
|
||||
@@ -679,6 +682,8 @@ test('should get doc info', async t => {
|
||||
updatedAt: new Date(snapshot.timestamp),
|
||||
creatorId: user.id,
|
||||
lastUpdaterId: user.id,
|
||||
title: 'test title',
|
||||
summary: 'test summary',
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -433,7 +433,7 @@ export async function submitAudioTranscription(
|
||||
for (const [idx, buffer] of content.entries()) {
|
||||
resp = resp.attach(idx.toString(), buffer, {
|
||||
filename: fileName,
|
||||
contentType: 'application/octet-stream',
|
||||
contentType: 'audio/opus',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -554,52 +554,73 @@ export async function createCopilotMessage(
|
||||
sessionId: string,
|
||||
content?: string,
|
||||
attachments?: string[],
|
||||
blob?: File,
|
||||
blobs?: File[],
|
||||
params?: Record<string, string>
|
||||
): Promise<string> {
|
||||
let resp = app
|
||||
.POST('/graphql')
|
||||
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
|
||||
.field(
|
||||
'operations',
|
||||
JSON.stringify({
|
||||
query: `
|
||||
const gql = {
|
||||
query: `
|
||||
mutation createCopilotMessage($options: CreateChatMessageInput!) {
|
||||
createCopilotMessage(options: $options)
|
||||
}
|
||||
`,
|
||||
variables: {
|
||||
options: { sessionId, content, attachments, blobs: [], params },
|
||||
},
|
||||
})
|
||||
)
|
||||
.field(
|
||||
'map',
|
||||
JSON.stringify(
|
||||
Array.from<any>({ length: blobs?.length ?? 0 }).reduce(
|
||||
(acc, _, idx) => {
|
||||
acc[idx.toString()] = [`variables.options.blobs.${idx}`];
|
||||
return acc;
|
||||
},
|
||||
{}
|
||||
)
|
||||
)
|
||||
);
|
||||
if (blobs && blobs.length) {
|
||||
for (const [idx, file] of blobs.entries()) {
|
||||
resp = resp.attach(
|
||||
idx.toString(),
|
||||
Buffer.from(await file.arrayBuffer()),
|
||||
{
|
||||
filename: file.name || `file${idx}`,
|
||||
contentType: file.type || 'application/octet-stream',
|
||||
}
|
||||
variables: {
|
||||
options: {
|
||||
sessionId,
|
||||
content,
|
||||
attachments,
|
||||
blob: null,
|
||||
blobs: [],
|
||||
params,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
let resp = app
|
||||
.POST('/graphql')
|
||||
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' });
|
||||
if (blob || blobs) {
|
||||
resp = resp.field('operations', JSON.stringify(gql));
|
||||
|
||||
if (blob) {
|
||||
resp = resp.field(
|
||||
'map',
|
||||
JSON.stringify({ '0': ['variables.options.blob'] })
|
||||
);
|
||||
resp = resp.attach('0', Buffer.from(await blob.arrayBuffer()), {
|
||||
filename: blob.name || 'file',
|
||||
contentType: blob.type || 'application/octet-stream',
|
||||
});
|
||||
} else if (blobs && blobs.length) {
|
||||
resp = resp.field(
|
||||
'map',
|
||||
JSON.stringify(
|
||||
Array.from<any>({ length: blobs?.length ?? 0 }).reduce(
|
||||
(acc, _, idx) => {
|
||||
acc[idx.toString()] = [`variables.options.blobs.${idx}`];
|
||||
return acc;
|
||||
},
|
||||
{}
|
||||
)
|
||||
)
|
||||
);
|
||||
for (const [idx, file] of blobs.entries()) {
|
||||
resp = resp.attach(
|
||||
idx.toString(),
|
||||
Buffer.from(await file.arrayBuffer()),
|
||||
{
|
||||
filename: file.name || `file${idx}`,
|
||||
contentType: file.type || 'application/octet-stream',
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
resp = resp.send(gql);
|
||||
}
|
||||
|
||||
const res = await resp.expect(200);
|
||||
|
||||
console.log('createCopilotMessage', res.body);
|
||||
return res.body.data.createCopilotMessage;
|
||||
}
|
||||
|
||||
|
||||
@@ -36,6 +36,7 @@ import { DocRendererModule } from './core/doc-renderer';
|
||||
import { DocServiceModule } from './core/doc-service';
|
||||
import { FeatureModule } from './core/features';
|
||||
import { MailModule } from './core/mail';
|
||||
import { MonitorModule } from './core/monitor';
|
||||
import { NotificationModule } from './core/notification';
|
||||
import { PermissionModule } from './core/permission';
|
||||
import { QuotaModule } from './core/quota';
|
||||
@@ -112,6 +113,8 @@ export const FunctionalityModules = [
|
||||
WebSocketModule,
|
||||
JobModule.forRoot(),
|
||||
ModelsModule,
|
||||
ScheduleModule.forRoot(),
|
||||
MonitorModule,
|
||||
];
|
||||
|
||||
export class AppModuleBuilder {
|
||||
@@ -151,12 +154,8 @@ export function buildAppModule(env: Env) {
|
||||
// basic
|
||||
.use(...FunctionalityModules)
|
||||
|
||||
// enable schedule module on graphql server and doc service
|
||||
.useIf(
|
||||
() => env.flavors.graphql || env.flavors.doc,
|
||||
ScheduleModule.forRoot(),
|
||||
IndexerModule
|
||||
)
|
||||
// enable indexer module on graphql server and doc service
|
||||
.useIf(() => env.flavors.graphql || env.flavors.doc, IndexerModule)
|
||||
|
||||
// auth
|
||||
.use(UserModule, AuthModule, PermissionModule)
|
||||
|
||||
@@ -653,12 +653,19 @@ export const USER_FRIENDLY_ERRORS = {
|
||||
},
|
||||
no_copilot_provider_available: {
|
||||
type: 'internal_server_error',
|
||||
message: `No copilot provider available.`,
|
||||
args: { modelId: 'string' },
|
||||
message: ({ modelId }) => `No copilot provider available: ${modelId}`,
|
||||
},
|
||||
copilot_failed_to_generate_text: {
|
||||
type: 'internal_server_error',
|
||||
message: `Failed to generate text.`,
|
||||
},
|
||||
copilot_failed_to_generate_embedding: {
|
||||
type: 'internal_server_error',
|
||||
args: { provider: 'string', message: 'string' },
|
||||
message: ({ provider, message }) =>
|
||||
`Failed to generate embedding with ${provider}: ${message}`,
|
||||
},
|
||||
copilot_failed_to_create_message: {
|
||||
type: 'internal_server_error',
|
||||
message: `Failed to create chat message.`,
|
||||
|
||||
@@ -668,10 +668,14 @@ export class CopilotSessionDeleted extends UserFriendlyError {
|
||||
super('action_forbidden', 'copilot_session_deleted', message);
|
||||
}
|
||||
}
|
||||
@ObjectType()
|
||||
class NoCopilotProviderAvailableDataType {
|
||||
@Field() modelId!: string
|
||||
}
|
||||
|
||||
export class NoCopilotProviderAvailable extends UserFriendlyError {
|
||||
constructor(message?: string) {
|
||||
super('internal_server_error', 'no_copilot_provider_available', message);
|
||||
constructor(args: NoCopilotProviderAvailableDataType, message?: string | ((args: NoCopilotProviderAvailableDataType) => string)) {
|
||||
super('internal_server_error', 'no_copilot_provider_available', message, args);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -680,6 +684,17 @@ export class CopilotFailedToGenerateText extends UserFriendlyError {
|
||||
super('internal_server_error', 'copilot_failed_to_generate_text', message);
|
||||
}
|
||||
}
|
||||
@ObjectType()
|
||||
class CopilotFailedToGenerateEmbeddingDataType {
|
||||
@Field() provider!: string
|
||||
@Field() message!: string
|
||||
}
|
||||
|
||||
export class CopilotFailedToGenerateEmbedding extends UserFriendlyError {
|
||||
constructor(args: CopilotFailedToGenerateEmbeddingDataType, message?: string | ((args: CopilotFailedToGenerateEmbeddingDataType) => string)) {
|
||||
super('internal_server_error', 'copilot_failed_to_generate_embedding', message, args);
|
||||
}
|
||||
}
|
||||
|
||||
export class CopilotFailedToCreateMessage extends UserFriendlyError {
|
||||
constructor(message?: string) {
|
||||
@@ -1179,6 +1194,7 @@ export enum ErrorNames {
|
||||
COPILOT_SESSION_DELETED,
|
||||
NO_COPILOT_PROVIDER_AVAILABLE,
|
||||
COPILOT_FAILED_TO_GENERATE_TEXT,
|
||||
COPILOT_FAILED_TO_GENERATE_EMBEDDING,
|
||||
COPILOT_FAILED_TO_CREATE_MESSAGE,
|
||||
UNSPLASH_IS_NOT_CONFIGURED,
|
||||
COPILOT_ACTION_TAKEN,
|
||||
@@ -1239,5 +1255,5 @@ registerEnumType(ErrorNames, {
|
||||
export const ErrorDataUnionType = createUnionType({
|
||||
name: 'ErrorDataUnion',
|
||||
types: () =>
|
||||
[GraphqlBadRequestDataType, HttpRequestErrorDataType, QueryTooLongDataType, ValidationErrorDataType, WrongSignInCredentialsDataType, UnknownOauthProviderDataType, InvalidOauthCallbackCodeDataType, MissingOauthQueryParameterDataType, InvalidOauthResponseDataType, InvalidEmailDataType, InvalidPasswordLengthDataType, WorkspacePermissionNotFoundDataType, SpaceNotFoundDataType, MemberNotFoundInSpaceDataType, NotInSpaceDataType, AlreadyInSpaceDataType, SpaceAccessDeniedDataType, SpaceOwnerNotFoundDataType, SpaceShouldHaveOnlyOneOwnerDataType, DocNotFoundDataType, DocActionDeniedDataType, DocUpdateBlockedDataType, VersionRejectedDataType, InvalidHistoryTimestampDataType, DocHistoryNotFoundDataType, BlobNotFoundDataType, ExpectToGrantDocUserRolesDataType, ExpectToRevokeDocUserRolesDataType, ExpectToUpdateDocUserRoleDataType, NoMoreSeatDataType, UnsupportedSubscriptionPlanDataType, SubscriptionAlreadyExistsDataType, SubscriptionNotExistsDataType, SameSubscriptionRecurringDataType, SubscriptionPlanNotFoundDataType, CopilotDocNotFoundDataType, CopilotMessageNotFoundDataType, CopilotPromptNotFoundDataType, CopilotProviderNotSupportedDataType, CopilotProviderSideErrorDataType, CopilotInvalidContextDataType, CopilotContextFileNotSupportedDataType, CopilotFailedToModifyContextDataType, CopilotFailedToMatchContextDataType, CopilotFailedToMatchGlobalContextDataType, CopilotFailedToAddWorkspaceFileEmbeddingDataType, RuntimeConfigNotFoundDataType, InvalidRuntimeConfigTypeDataType, InvalidLicenseToActivateDataType, InvalidLicenseUpdateParamsDataType, UnsupportedClientVersionDataType, MentionUserDocAccessDeniedDataType, InvalidAppConfigDataType, InvalidAppConfigInputDataType, InvalidSearchProviderRequestDataType, InvalidIndexerInputDataType] as const,
|
||||
[GraphqlBadRequestDataType, HttpRequestErrorDataType, QueryTooLongDataType, ValidationErrorDataType, WrongSignInCredentialsDataType, UnknownOauthProviderDataType, InvalidOauthCallbackCodeDataType, MissingOauthQueryParameterDataType, InvalidOauthResponseDataType, InvalidEmailDataType, InvalidPasswordLengthDataType, WorkspacePermissionNotFoundDataType, SpaceNotFoundDataType, MemberNotFoundInSpaceDataType, NotInSpaceDataType, AlreadyInSpaceDataType, SpaceAccessDeniedDataType, SpaceOwnerNotFoundDataType, SpaceShouldHaveOnlyOneOwnerDataType, DocNotFoundDataType, DocActionDeniedDataType, DocUpdateBlockedDataType, VersionRejectedDataType, InvalidHistoryTimestampDataType, DocHistoryNotFoundDataType, BlobNotFoundDataType, ExpectToGrantDocUserRolesDataType, ExpectToRevokeDocUserRolesDataType, ExpectToUpdateDocUserRoleDataType, NoMoreSeatDataType, UnsupportedSubscriptionPlanDataType, SubscriptionAlreadyExistsDataType, SubscriptionNotExistsDataType, SameSubscriptionRecurringDataType, SubscriptionPlanNotFoundDataType, NoCopilotProviderAvailableDataType, CopilotFailedToGenerateEmbeddingDataType, CopilotDocNotFoundDataType, CopilotMessageNotFoundDataType, CopilotPromptNotFoundDataType, CopilotProviderNotSupportedDataType, CopilotProviderSideErrorDataType, CopilotInvalidContextDataType, CopilotContextFileNotSupportedDataType, CopilotFailedToModifyContextDataType, CopilotFailedToMatchContextDataType, CopilotFailedToMatchGlobalContextDataType, CopilotFailedToAddWorkspaceFileEmbeddingDataType, RuntimeConfigNotFoundDataType, InvalidRuntimeConfigTypeDataType, InvalidLicenseToActivateDataType, InvalidLicenseUpdateParamsDataType, UnsupportedClientVersionDataType, MentionUserDocAccessDeniedDataType, InvalidAppConfigDataType, InvalidAppConfigInputDataType, InvalidSearchProviderRequestDataType, InvalidIndexerInputDataType] as const,
|
||||
});
|
||||
|
||||
@@ -59,7 +59,9 @@ export type KnownMetricScopes =
|
||||
| 'mail'
|
||||
| 'ai'
|
||||
| 'event'
|
||||
| 'queue';
|
||||
| 'queue'
|
||||
| 'storage'
|
||||
| 'process';
|
||||
|
||||
const metricCreators: MetricCreators = {
|
||||
counter(meter: Meter, name: string, opts?: MetricOptions) {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import { setTimeout } from 'node:timers/promises';
|
||||
|
||||
import { defer as rxjsDefer, retry } from 'rxjs';
|
||||
|
||||
export class RetryablePromise<T> extends Promise<T> {
|
||||
@@ -48,3 +50,7 @@ export function defer(dispose: () => Promise<void>) {
|
||||
[Symbol.asyncDispose]: dispose,
|
||||
};
|
||||
}
|
||||
|
||||
export function sleep(ms: number): Promise<void> {
|
||||
return setTimeout(ms);
|
||||
}
|
||||
|
||||
@@ -13,74 +13,45 @@ Generated by [AVA](https://avajs.dev).
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
# You own your data, with no compromises␊
|
||||
␊
|
||||
␊
|
||||
## Local-first & Real-time collaborative␊
|
||||
␊
|
||||
␊
|
||||
We love the idea proposed by Ink & Switch in the famous article about you owning your data, despite the cloud. Furthermore, AFFiNE is the first all-in-one workspace that keeps your data ownership with no compromises on real-time collaboration and editing experience.␊
|
||||
␊
|
||||
␊
|
||||
AFFiNE is a local-first application upon CRDTs with real-time collaboration support. Your data is always stored locally while multiple nodes remain synced in real-time.␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
### Blocks that assemble your next docs, tasks kanban or whiteboard␊
|
||||
␊
|
||||
␊
|
||||
There is a large overlap of their atomic "building blocks" between these apps. They are neither open source nor have a plugin system like VS Code for contributors to customize. We want to have something that contains all the features we love and goes one step further.␊
|
||||
␊
|
||||
␊
|
||||
We are building AFFiNE to be a fundamental open source platform that contains all the building blocks for docs, task management and visual collaboration, hoping you can shape your next workflow with us that can make your life better and also connect others, too.␊
|
||||
␊
|
||||
␊
|
||||
If you want to learn more about the product design of AFFiNE, here goes the concepts:␊
|
||||
␊
|
||||
␊
|
||||
To Shape, not to adapt. AFFiNE is built for individuals & teams who care about their data, who refuse vendor lock-in, and who want to have control over their essential tools.␊
|
||||
␊
|
||||
␊
|
||||
## A true canvas for blocks in any form␊
|
||||
␊
|
||||
␊
|
||||
[Many editor apps](http://notion.so) claimed to be a canvas for productivity. Since _the Mother of All Demos,_ Douglas Engelbart, a creative and programable digital workspace has been a pursuit and an ultimate mission for generations of tool makers.␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
"We shape our tools and thereafter our tools shape us”. A lot of pioneers have inspired us a long the way, e.g.:␊
|
||||
␊
|
||||
␊
|
||||
* Quip & Notion with their great concept of "everything is a block"␊
|
||||
␊
|
||||
␊
|
||||
* Trello with their Kanban␊
|
||||
␊
|
||||
␊
|
||||
* Airtable & Miro with their no-code programable datasheets␊
|
||||
␊
|
||||
␊
|
||||
* Miro & Whimiscal with their edgeless visual whiteboard␊
|
||||
␊
|
||||
␊
|
||||
* Remnote & Capacities with their object-based tag system␊
|
||||
␊
|
||||
␊
|
||||
For more details, please refer to our [RoadMap](https://docs.affine.pro/docs/core-concepts/roadmap)␊
|
||||
␊
|
||||
␊
|
||||
## Self Host␊
|
||||
␊
|
||||
␊
|
||||
Self host AFFiNE␊
|
||||
␊
|
||||
␊
|
||||
||Title|Tag|␊
|
||||
|---|---|---|␊
|
||||
|Affine Development|Affine Development|<span data-affine-option data-value="AxSe-53xjX" data-option-color="var(--affine-tag-pink)">AFFiNE</span>|␊
|
||||
@@ -91,16 +62,12 @@ Generated by [AVA](https://avajs.dev).
|
||||
|Miro & Whimiscal with their edgeless visual whiteboard|Miro & Whimiscal with their edgeless visual whiteboard|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|␊
|
||||
|Remnote & Capacities with their object-based tag system|Remnote & Capacities with their object-based tag system||␊
|
||||
␊
|
||||
␊
|
||||
## Affine Development␊
|
||||
␊
|
||||
␊
|
||||
For developer or installation guides, please go to [AFFiNE Development](https://docs.affine.pro/docs/development/quick-start)␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
`,
|
||||
title: 'Write, Draw, Plan all at Once.',
|
||||
}
|
||||
|
||||
Binary file not shown.
@@ -13,74 +13,45 @@ Generated by [AVA](https://avajs.dev).
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
# You own your data, with no compromises␊
|
||||
␊
|
||||
␊
|
||||
## Local-first & Real-time collaborative␊
|
||||
␊
|
||||
␊
|
||||
We love the idea proposed by Ink & Switch in the famous article about you owning your data, despite the cloud. Furthermore, AFFiNE is the first all-in-one workspace that keeps your data ownership with no compromises on real-time collaboration and editing experience.␊
|
||||
␊
|
||||
␊
|
||||
AFFiNE is a local-first application upon CRDTs with real-time collaboration support. Your data is always stored locally while multiple nodes remain synced in real-time.␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
### Blocks that assemble your next docs, tasks kanban or whiteboard␊
|
||||
␊
|
||||
␊
|
||||
There is a large overlap of their atomic "building blocks" between these apps. They are neither open source nor have a plugin system like VS Code for contributors to customize. We want to have something that contains all the features we love and goes one step further.␊
|
||||
␊
|
||||
␊
|
||||
We are building AFFiNE to be a fundamental open source platform that contains all the building blocks for docs, task management and visual collaboration, hoping you can shape your next workflow with us that can make your life better and also connect others, too.␊
|
||||
␊
|
||||
␊
|
||||
If you want to learn more about the product design of AFFiNE, here goes the concepts:␊
|
||||
␊
|
||||
␊
|
||||
To Shape, not to adapt. AFFiNE is built for individuals & teams who care about their data, who refuse vendor lock-in, and who want to have control over their essential tools.␊
|
||||
␊
|
||||
␊
|
||||
## A true canvas for blocks in any form␊
|
||||
␊
|
||||
␊
|
||||
[Many editor apps](http://notion.so) claimed to be a canvas for productivity. Since _the Mother of All Demos,_ Douglas Engelbart, a creative and programable digital workspace has been a pursuit and an ultimate mission for generations of tool makers.␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
"We shape our tools and thereafter our tools shape us”. A lot of pioneers have inspired us a long the way, e.g.:␊
|
||||
␊
|
||||
␊
|
||||
* Quip & Notion with their great concept of "everything is a block"␊
|
||||
␊
|
||||
␊
|
||||
* Trello with their Kanban␊
|
||||
␊
|
||||
␊
|
||||
* Airtable & Miro with their no-code programable datasheets␊
|
||||
␊
|
||||
␊
|
||||
* Miro & Whimiscal with their edgeless visual whiteboard␊
|
||||
␊
|
||||
␊
|
||||
* Remnote & Capacities with their object-based tag system␊
|
||||
␊
|
||||
␊
|
||||
For more details, please refer to our [RoadMap](https://docs.affine.pro/docs/core-concepts/roadmap)␊
|
||||
␊
|
||||
␊
|
||||
## Self Host␊
|
||||
␊
|
||||
␊
|
||||
Self host AFFiNE␊
|
||||
␊
|
||||
␊
|
||||
||Title|Tag|␊
|
||||
|---|---|---|␊
|
||||
|Affine Development|Affine Development|<span data-affine-option data-value="AxSe-53xjX" data-option-color="var(--affine-tag-pink)">AFFiNE</span>|␊
|
||||
@@ -91,16 +62,12 @@ Generated by [AVA](https://avajs.dev).
|
||||
|Miro & Whimiscal with their edgeless visual whiteboard|Miro & Whimiscal with their edgeless visual whiteboard|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|␊
|
||||
|Remnote & Capacities with their object-based tag system|Remnote & Capacities with their object-based tag system||␊
|
||||
␊
|
||||
␊
|
||||
## Affine Development␊
|
||||
␊
|
||||
␊
|
||||
For developer or installation guides, please go to [AFFiNE Development](https://docs.affine.pro/docs/development/quick-start)␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
`,
|
||||
title: 'Write, Draw, Plan all at Once.',
|
||||
}
|
||||
|
||||
Binary file not shown.
@@ -100,7 +100,7 @@ export class PgWorkspaceDocStorageAdapter extends DocStorageAdapter {
|
||||
{
|
||||
// keep it simple to let all update merged in one job
|
||||
jobId: `doc:merge-pending-updates:${workspaceId}:${docId}`,
|
||||
delay: 30 * 1000 /* 30s */,
|
||||
delay: 5 * 1000 /* 5s */,
|
||||
priority: 100,
|
||||
}
|
||||
);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { getStreamAsBuffer } from 'get-stream';
|
||||
|
||||
import { JOB_SIGNAL, OnJob } from '../../base';
|
||||
import { JOB_SIGNAL, OnJob, sleep } from '../../base';
|
||||
import { type MailName, MailProps, Renderers } from '../../mails';
|
||||
import { UserProps, WorkspaceProps } from '../../mails/components';
|
||||
import { Models } from '../../models';
|
||||
@@ -34,7 +34,7 @@ type SendMailJob<Mail extends MailName = MailName, Props = MailProps<Mail>> = {
|
||||
|
||||
declare global {
|
||||
interface Jobs {
|
||||
'notification.sendMail': {
|
||||
'notification.sendMail': { startTime: number } & {
|
||||
[K in MailName]: SendMailJob<K>;
|
||||
}[MailName];
|
||||
}
|
||||
@@ -50,7 +50,12 @@ export class MailJob {
|
||||
) {}
|
||||
|
||||
@OnJob('notification.sendMail')
|
||||
async sendMail({ name, to, props }: Jobs['notification.sendMail']) {
|
||||
async sendMail({
|
||||
startTime,
|
||||
name,
|
||||
to,
|
||||
props,
|
||||
}: Jobs['notification.sendMail']) {
|
||||
let options: Partial<SendOptions> = {};
|
||||
|
||||
for (const key in props) {
|
||||
@@ -100,8 +105,15 @@ export class MailJob {
|
||||
)),
|
||||
...options,
|
||||
});
|
||||
if (result === false) {
|
||||
// wait for a while before retrying
|
||||
const elapsed = Date.now() - startTime;
|
||||
const retryDelay = Math.min(30 * 1000, Math.round(elapsed / 2000) * 1000);
|
||||
await sleep(retryDelay);
|
||||
return JOB_SIGNAL.Retry;
|
||||
}
|
||||
|
||||
return result === false ? JOB_SIGNAL.Retry : undefined;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
private async fetchWorkspaceProps(workspaceId: string) {
|
||||
|
||||
@@ -15,11 +15,14 @@ export class Mailer {
|
||||
*
|
||||
* @note never throw
|
||||
*/
|
||||
async trySend(command: Jobs['notification.sendMail']) {
|
||||
async trySend(command: Omit<Jobs['notification.sendMail'], 'startTime'>) {
|
||||
return this.send(command, true);
|
||||
}
|
||||
|
||||
async send(command: Jobs['notification.sendMail'], suppressError = false) {
|
||||
async send(
|
||||
command: Omit<Jobs['notification.sendMail'], 'startTime'>,
|
||||
suppressError = false
|
||||
) {
|
||||
if (!this.sender.configured) {
|
||||
if (suppressError) {
|
||||
return false;
|
||||
@@ -28,7 +31,12 @@ export class Mailer {
|
||||
}
|
||||
|
||||
try {
|
||||
await this.queue.add('notification.sendMail', command);
|
||||
await this.queue.add(
|
||||
'notification.sendMail',
|
||||
Object.assign({}, command, {
|
||||
startTime: Date.now(),
|
||||
}) as Jobs['notification.sendMail']
|
||||
);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
|
||||
9
packages/backend/server/src/core/monitor/index.ts
Normal file
9
packages/backend/server/src/core/monitor/index.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { Global, Module } from '@nestjs/common';
|
||||
|
||||
import { MonitorService } from './service';
|
||||
|
||||
@Global()
|
||||
@Module({
|
||||
providers: [MonitorService],
|
||||
})
|
||||
export class MonitorModule {}
|
||||
28
packages/backend/server/src/core/monitor/service.ts
Normal file
28
packages/backend/server/src/core/monitor/service.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { Cron, CronExpression } from '@nestjs/schedule';
|
||||
|
||||
import { metrics } from '../../base';
|
||||
|
||||
@Injectable()
|
||||
export class MonitorService {
|
||||
protected logger = new Logger(MonitorService.name);
|
||||
|
||||
@Cron(CronExpression.EVERY_MINUTE)
|
||||
async monitor() {
|
||||
const memoryUsage = process.memoryUsage();
|
||||
this.logger.log(
|
||||
`memory usage: rss: ${memoryUsage.rss}, heapTotal: ${memoryUsage.heapTotal}, heapUsed: ${memoryUsage.heapUsed}, external: ${memoryUsage.external}, arrayBuffers: ${memoryUsage.arrayBuffers}`
|
||||
);
|
||||
metrics.process.gauge('node_process_rss').record(memoryUsage.rss);
|
||||
metrics.process
|
||||
.gauge('node_process_heap_total')
|
||||
.record(memoryUsage.heapTotal);
|
||||
metrics.process
|
||||
.gauge('node_process_heap_used')
|
||||
.record(memoryUsage.heapUsed);
|
||||
metrics.process.gauge('node_process_external').record(memoryUsage.external);
|
||||
metrics.process
|
||||
.gauge('node_process_array_buffers')
|
||||
.record(memoryUsage.arrayBuffers);
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
autoMetadata,
|
||||
Config,
|
||||
EventBus,
|
||||
metrics,
|
||||
OnEvent,
|
||||
type StorageProvider,
|
||||
StorageProviderFactory,
|
||||
@@ -69,15 +70,23 @@ export class CommentAttachmentStorage {
|
||||
blob,
|
||||
meta
|
||||
);
|
||||
const mime = meta.contentType ?? 'application/octet-stream';
|
||||
const size = blob.length;
|
||||
await this.models.commentAttachment.upsert({
|
||||
workspaceId,
|
||||
docId,
|
||||
key,
|
||||
name,
|
||||
mime: meta.contentType ?? 'application/octet-stream',
|
||||
size: blob.length,
|
||||
mime,
|
||||
size,
|
||||
createdBy: userId,
|
||||
});
|
||||
|
||||
metrics.storage.histogram('comment_attachment_size').record(size, { mime });
|
||||
metrics.storage.counter('comment_attachment_total').add(1, { mime });
|
||||
this.logger.log(
|
||||
`uploaded comment attachment ${workspaceId}/${docId}/${key} with size ${size}, mime: ${mime}, name: ${name}, user: ${userId}`
|
||||
);
|
||||
}
|
||||
|
||||
async get(
|
||||
|
||||
@@ -1376,74 +1376,45 @@ Generated by [AVA](https://avajs.dev).
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
# You own your data, with no compromises␊
|
||||
␊
|
||||
␊
|
||||
## Local-first & Real-time collaborative␊
|
||||
␊
|
||||
␊
|
||||
We love the idea proposed by Ink & Switch in the famous article about you owning your data, despite the cloud. Furthermore, AFFiNE is the first all-in-one workspace that keeps your data ownership with no compromises on real-time collaboration and editing experience.␊
|
||||
␊
|
||||
␊
|
||||
AFFiNE is a local-first application upon CRDTs with real-time collaboration support. Your data is always stored locally while multiple nodes remain synced in real-time.␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
### Blocks that assemble your next docs, tasks kanban or whiteboard␊
|
||||
␊
|
||||
␊
|
||||
There is a large overlap of their atomic "building blocks" between these apps. They are neither open source nor have a plugin system like VS Code for contributors to customize. We want to have something that contains all the features we love and goes one step further.␊
|
||||
␊
|
||||
␊
|
||||
We are building AFFiNE to be a fundamental open source platform that contains all the building blocks for docs, task management and visual collaboration, hoping you can shape your next workflow with us that can make your life better and also connect others, too.␊
|
||||
␊
|
||||
␊
|
||||
If you want to learn more about the product design of AFFiNE, here goes the concepts:␊
|
||||
␊
|
||||
␊
|
||||
To Shape, not to adapt. AFFiNE is built for individuals & teams who care about their data, who refuse vendor lock-in, and who want to have control over their essential tools.␊
|
||||
␊
|
||||
␊
|
||||
## A true canvas for blocks in any form␊
|
||||
␊
|
||||
␊
|
||||
[Many editor apps](http://notion.so) claimed to be a canvas for productivity. Since _the Mother of All Demos,_ Douglas Engelbart, a creative and programable digital workspace has been a pursuit and an ultimate mission for generations of tool makers.␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
"We shape our tools and thereafter our tools shape us”. A lot of pioneers have inspired us a long the way, e.g.:␊
|
||||
␊
|
||||
␊
|
||||
* Quip & Notion with their great concept of "everything is a block"␊
|
||||
␊
|
||||
␊
|
||||
* Trello with their Kanban␊
|
||||
␊
|
||||
␊
|
||||
* Airtable & Miro with their no-code programable datasheets␊
|
||||
␊
|
||||
␊
|
||||
* Miro & Whimiscal with their edgeless visual whiteboard␊
|
||||
␊
|
||||
␊
|
||||
* Remnote & Capacities with their object-based tag system␊
|
||||
␊
|
||||
␊
|
||||
For more details, please refer to our [RoadMap](https://docs.affine.pro/docs/core-concepts/roadmap)␊
|
||||
␊
|
||||
␊
|
||||
## Self Host␊
|
||||
␊
|
||||
␊
|
||||
Self host AFFiNE␊
|
||||
␊
|
||||
␊
|
||||
||Title|Tag|␊
|
||||
|---|---|---|␊
|
||||
|Affine Development|Affine Development|<span data-affine-option data-value="AxSe-53xjX" data-option-color="var(--affine-tag-pink)">AFFiNE</span>|␊
|
||||
@@ -1454,16 +1425,12 @@ Generated by [AVA](https://avajs.dev).
|
||||
|Miro & Whimiscal with their edgeless visual whiteboard|Miro & Whimiscal with their edgeless visual whiteboard|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|␊
|
||||
|Remnote & Capacities with their object-based tag system|Remnote & Capacities with their object-based tag system||␊
|
||||
␊
|
||||
␊
|
||||
## Affine Development␊
|
||||
␊
|
||||
␊
|
||||
For developer or installation guides, please go to [AFFiNE Development](https://docs.affine.pro/docs/development/quick-start)␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
`,
|
||||
title: 'Write, Draw, Plan all at Once.',
|
||||
}
|
||||
@@ -1476,113 +1443,80 @@ Generated by [AVA](https://avajs.dev).
|
||||
markdown: `<!-- block_id=FoPQcAyV_m flavour=affine:paragraph -->␊
|
||||
AFFiNE is an open source all in one workspace, an operating system for all the building blocks of your team wiki, knowledge management and digital assets and a better alternative to Notion and Miro.␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=oz48nn_zp8 flavour=affine:paragraph -->␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=g8a-D9-jXS flavour=affine:paragraph -->␊
|
||||
# You own your data, with no compromises␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=J8lHN1GR_5 flavour=affine:paragraph -->␊
|
||||
## Local-first & Real-time collaborative␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=xCuWdM0VLz flavour=affine:paragraph -->␊
|
||||
We love the idea proposed by Ink & Switch in the famous article about you owning your data, despite the cloud. Furthermore, AFFiNE is the first all-in-one workspace that keeps your data ownership with no compromises on real-time collaboration and editing experience.␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=zElMi0tViK flavour=affine:paragraph -->␊
|
||||
AFFiNE is a local-first application upon CRDTs with real-time collaboration support. Your data is always stored locally while multiple nodes remain synced in real-time.␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=Z4rK0OF9Wk flavour=affine:paragraph -->␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=DQ0Ryb-SpW flavour=affine:paragraph -->␊
|
||||
### Blocks that assemble your next docs, tasks kanban or whiteboard␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=HAZC3URZp_ flavour=affine:paragraph -->␊
|
||||
There is a large overlap of their atomic "building blocks" between these apps. They are neither open source nor have a plugin system like VS Code for contributors to customize. We want to have something that contains all the features we love and goes one step further.␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=0H87ypiuv8 flavour=affine:paragraph -->␊
|
||||
We are building AFFiNE to be a fundamental open source platform that contains all the building blocks for docs, task management and visual collaboration, hoping you can shape your next workflow with us that can make your life better and also connect others, too.␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=Sp4G1KD0Wn flavour=affine:paragraph -->␊
|
||||
If you want to learn more about the product design of AFFiNE, here goes the concepts:␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=RsUhDuEqXa flavour=affine:paragraph -->␊
|
||||
To Shape, not to adapt. AFFiNE is built for individuals & teams who care about their data, who refuse vendor lock-in, and who want to have control over their essential tools.␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=Z2HibKzAr- flavour=affine:paragraph -->␊
|
||||
## A true canvas for blocks in any form␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=UwvWddamzM flavour=affine:paragraph -->␊
|
||||
[Many editor apps](http://notion.so) claimed to be a canvas for productivity. Since _the Mother of All Demos,_ Douglas Engelbart, a creative and programable digital workspace has been a pursuit and an ultimate mission for generations of tool makers.␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=g9xKUjhJj1 flavour=affine:paragraph -->␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=wDTn4YJ4pm flavour=affine:paragraph -->␊
|
||||
"We shape our tools and thereafter our tools shape us”. A lot of pioneers have inspired us a long the way, e.g.:␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=xFrrdiP3-V flavour=affine:list -->␊
|
||||
* Quip & Notion with their great concept of "everything is a block"␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=Tp9xyN4Okl flavour=affine:list -->␊
|
||||
* Trello with their Kanban␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=K_4hUzKZFQ flavour=affine:list -->␊
|
||||
* Airtable & Miro with their no-code programable datasheets␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=QwMzON2s7x flavour=affine:list -->␊
|
||||
* Miro & Whimiscal with their edgeless visual whiteboard␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=FFVmit6u1T flavour=affine:list -->␊
|
||||
* Remnote & Capacities with their object-based tag system␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=YqnG5O6AE6 flavour=affine:paragraph -->␊
|
||||
For more details, please refer to our [RoadMap](https://docs.affine.pro/docs/core-concepts/roadmap)␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=sbDTmZMZcq flavour=affine:paragraph -->␊
|
||||
## Self Host␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=QVvitesfbj flavour=affine:paragraph -->␊
|
||||
Self host AFFiNE␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=U_GoHFD9At flavour=affine:database placeholder -->␊
|
||||
␊
|
||||
<!-- block_id=NyHXrMX3R1 flavour=affine:paragraph -->␊
|
||||
## Affine Development␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=9-K49otbCv flavour=affine:paragraph -->␊
|
||||
For developer or installation guides, please go to [AFFiNE Development](https://docs.affine.pro/docs/development/quick-start)␊
|
||||
␊
|
||||
␊
|
||||
<!-- block_id=faFteK9eG- flavour=affine:paragraph -->␊
|
||||
␊
|
||||
␊
|
||||
␊
|
||||
`,
|
||||
title: 'Write, Draw, Plan all at Once.',
|
||||
}
|
||||
|
||||
Binary file not shown.
@@ -79,6 +79,9 @@ class DocType {
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
title?: string | null;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
summary?: string | null;
|
||||
}
|
||||
|
||||
@InputType()
|
||||
@@ -250,10 +253,11 @@ export class WorkspaceDocResolver {
|
||||
deprecationReason: 'use [WorkspaceType.doc] instead',
|
||||
})
|
||||
async publicPage(
|
||||
@CurrentUser() me: CurrentUser,
|
||||
@Parent() workspace: WorkspaceType,
|
||||
@Args('pageId') pageId: string
|
||||
) {
|
||||
return this.doc(workspace, pageId);
|
||||
return this.doc(me, workspace, pageId);
|
||||
}
|
||||
|
||||
@ResolveField(() => PaginatedDocType)
|
||||
@@ -294,11 +298,14 @@ export class WorkspaceDocResolver {
|
||||
complexity: 2,
|
||||
})
|
||||
async doc(
|
||||
@CurrentUser() me: CurrentUser,
|
||||
@Parent() workspace: WorkspaceType,
|
||||
@Args('docId') docId: string
|
||||
): Promise<DocType> {
|
||||
const doc = await this.models.doc.getDocInfo(workspace.id, docId);
|
||||
if (doc) {
|
||||
// check if doc is readable
|
||||
await this.ac.user(me.id).doc(workspace.id, docId).assert('Doc.Read');
|
||||
return doc;
|
||||
}
|
||||
|
||||
|
||||
@@ -84,11 +84,17 @@ export class CopilotContextModel extends BaseModel {
|
||||
}
|
||||
|
||||
async mergeDocStatus(workspaceId: string, docs: ContextDoc[]) {
|
||||
const docIds = Array.from(new Set(docs.map(doc => doc.id)));
|
||||
const finishedDoc = await this.hasWorkspaceEmbedding(workspaceId, docIds);
|
||||
const canEmbedding = await this.checkEmbeddingAvailable();
|
||||
const finishedDoc = canEmbedding
|
||||
? await this.listWorkspaceEmbedding(
|
||||
workspaceId,
|
||||
Array.from(new Set(docs.map(doc => doc.id)))
|
||||
)
|
||||
: [];
|
||||
const finishedDocSet = new Set(finishedDoc);
|
||||
|
||||
for (const doc of docs) {
|
||||
const status = finishedDoc.has(doc.id)
|
||||
const status = finishedDocSet.has(doc.id)
|
||||
? ContextEmbedStatus.finished
|
||||
: undefined;
|
||||
// NOTE: when the document has not been synchronized to the server or is in the embedding queue
|
||||
@@ -120,24 +126,17 @@ export class CopilotContextModel extends BaseModel {
|
||||
return Number(count) === 2;
|
||||
}
|
||||
|
||||
async hasWorkspaceEmbedding(workspaceId: string, docIds: string[]) {
|
||||
const canEmbedding = await this.checkEmbeddingAvailable();
|
||||
if (!canEmbedding) {
|
||||
return new Set();
|
||||
}
|
||||
|
||||
async listWorkspaceEmbedding(workspaceId: string, docIds?: string[]) {
|
||||
const existsIds = await this.db.aiWorkspaceEmbedding
|
||||
.findMany({
|
||||
.groupBy({
|
||||
where: {
|
||||
workspaceId,
|
||||
docId: { in: docIds },
|
||||
},
|
||||
select: {
|
||||
docId: true,
|
||||
docId: docIds ? { in: docIds } : undefined,
|
||||
},
|
||||
by: ['docId'],
|
||||
})
|
||||
.then(r => r.map(r => r.docId));
|
||||
return new Set(existsIds);
|
||||
return existsIds;
|
||||
}
|
||||
|
||||
private processEmbeddings(
|
||||
@@ -165,6 +164,13 @@ export class CopilotContextModel extends BaseModel {
|
||||
fileId: string,
|
||||
embeddings: Embedding[]
|
||||
) {
|
||||
if (embeddings.length === 0) {
|
||||
this.logger.warn(
|
||||
`No embeddings provided for contextId: ${contextId}, fileId: ${fileId}. Skipping insertion.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const values = this.processEmbeddings(contextId, fileId, embeddings);
|
||||
|
||||
await this.db.$executeRaw`
|
||||
@@ -204,6 +210,13 @@ export class CopilotContextModel extends BaseModel {
|
||||
docId: string,
|
||||
embeddings: Embedding[]
|
||||
) {
|
||||
if (embeddings.length === 0) {
|
||||
this.logger.warn(
|
||||
`No embeddings provided for workspaceId: ${workspaceId}, docId: ${docId}. Skipping insertion.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const values = this.processEmbeddings(
|
||||
workspaceId,
|
||||
docId,
|
||||
|
||||
@@ -582,4 +582,56 @@ export class CopilotSessionModel extends BaseModel {
|
||||
.map(({ messageCost, prompt: { action } }) => (action ? 1 : messageCost))
|
||||
.reduce((prev, cost) => prev + cost, 0);
|
||||
}
|
||||
|
||||
@Transactional()
|
||||
async cleanupEmptySessions(earlyThen: Date) {
|
||||
// delete never used sessions
|
||||
const { count: removed } = await this.db.aiSession.deleteMany({
|
||||
where: {
|
||||
messageCost: 0,
|
||||
deletedAt: null,
|
||||
// filter session updated more than 24 hours ago
|
||||
updatedAt: { lt: earlyThen },
|
||||
},
|
||||
});
|
||||
|
||||
// mark empty sessions as deleted
|
||||
const { count: cleaned } = await this.db.aiSession.updateMany({
|
||||
where: {
|
||||
deletedAt: null,
|
||||
messages: { none: {} },
|
||||
// filter session updated more than 24 hours ago
|
||||
updatedAt: { lt: earlyThen },
|
||||
},
|
||||
data: {
|
||||
deletedAt: new Date(),
|
||||
pinned: false,
|
||||
},
|
||||
});
|
||||
|
||||
return { removed, cleaned };
|
||||
}
|
||||
|
||||
@Transactional()
|
||||
async toBeGenerateTitle() {
|
||||
const sessions = await this.db.aiSession
|
||||
.findMany({
|
||||
where: {
|
||||
title: null,
|
||||
deletedAt: null,
|
||||
messages: { some: {} },
|
||||
// only generate titles for non-actions sessions
|
||||
prompt: { action: null },
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
// count assistant messages
|
||||
_count: { select: { messages: { where: { role: 'assistant' } } } },
|
||||
},
|
||||
orderBy: { updatedAt: 'desc' },
|
||||
})
|
||||
.then(s => s.filter(s => s._count.messages > 0));
|
||||
|
||||
return sessions;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,10 +58,12 @@ export class CopilotWorkspaceConfigModel extends BaseModel {
|
||||
ON id.workspace_id = s.workspace_id
|
||||
AND id.doc_id = s.guid
|
||||
WHERE s.workspace_id = ${workspaceId}
|
||||
AND s.guid != s.workspace_id
|
||||
AND s.guid <> s.workspace_id
|
||||
AND s.guid NOT LIKE '%$%'
|
||||
AND s.guid NOT LIKE '%:settings:%'
|
||||
AND e.doc_id IS NULL
|
||||
AND id.doc_id IS NULL;`;
|
||||
AND id.doc_id IS NULL
|
||||
AND s.blob <> E'\\\\x0000';`;
|
||||
|
||||
return docIds.map(r => r.id);
|
||||
}
|
||||
@@ -150,7 +152,7 @@ export class CopilotWorkspaceConfigModel extends BaseModel {
|
||||
}
|
||||
|
||||
@Transactional()
|
||||
async getWorkspaceEmbeddingStatus(workspaceId: string) {
|
||||
async getEmbeddingStatus(workspaceId: string) {
|
||||
const ignoredDocIds = (await this.listIgnoredDocIds(workspaceId)).map(
|
||||
d => d.docId
|
||||
);
|
||||
@@ -160,13 +162,19 @@ export class CopilotWorkspaceConfigModel extends BaseModel {
|
||||
{ id: { notIn: ignoredDocIds } },
|
||||
{ id: { not: workspaceId } },
|
||||
{ id: { not: { contains: '$' } } },
|
||||
{ id: { not: { contains: ':settings:' } } },
|
||||
{ blob: { not: new Uint8Array([0, 0]) } },
|
||||
],
|
||||
};
|
||||
|
||||
const [docTotal, docEmbedded, fileTotal, fileEmbedded] = await Promise.all([
|
||||
this.db.snapshot.count({ where: snapshotCondition }),
|
||||
this.db.snapshot.count({
|
||||
this.db.snapshot.findMany({
|
||||
where: snapshotCondition,
|
||||
select: { id: true },
|
||||
}),
|
||||
this.db.snapshot.findMany({
|
||||
where: { ...snapshotCondition, embedding: { some: {} } },
|
||||
select: { id: true },
|
||||
}),
|
||||
this.db.aiWorkspaceFiles.count({ where: { workspaceId } }),
|
||||
this.db.aiWorkspaceFiles.count({
|
||||
@@ -174,9 +182,23 @@ export class CopilotWorkspaceConfigModel extends BaseModel {
|
||||
}),
|
||||
]);
|
||||
|
||||
const docTotalIds = docTotal.map(d => d.id);
|
||||
const docTotalSet = new Set(docTotalIds);
|
||||
const outdatedDocPrefix = `${workspaceId}:space:`;
|
||||
const duplicateOutdatedDocSet = new Set(
|
||||
docTotalIds
|
||||
.filter(id => id.startsWith(outdatedDocPrefix))
|
||||
.filter(id => docTotalSet.has(id.slice(outdatedDocPrefix.length)))
|
||||
);
|
||||
|
||||
return {
|
||||
total: docTotal + fileTotal,
|
||||
embedded: docEmbedded + fileEmbedded,
|
||||
total:
|
||||
docTotalIds.filter(id => !duplicateOutdatedDocSet.has(id)).length +
|
||||
fileTotal,
|
||||
embedded:
|
||||
docEmbedded
|
||||
.map(d => d.id)
|
||||
.filter(id => !duplicateOutdatedDocSet.has(id)).length + fileEmbedded,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -283,6 +305,13 @@ export class CopilotWorkspaceConfigModel extends BaseModel {
|
||||
fileId: string,
|
||||
embeddings: Embedding[]
|
||||
) {
|
||||
if (embeddings.length === 0) {
|
||||
this.logger.warn(
|
||||
`No embeddings provided for workspaceId: ${workspaceId}, fileId: ${fileId}. Skipping insertion.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const values = this.processEmbeddings(workspaceId, fileId, embeddings);
|
||||
await this.db.$executeRaw`
|
||||
INSERT INTO "ai_workspace_file_embeddings"
|
||||
|
||||
@@ -558,6 +558,8 @@ export class DocModel extends BaseModel {
|
||||
mode: PublicDocMode;
|
||||
public: boolean;
|
||||
defaultRole: DocRole;
|
||||
title: string | null;
|
||||
summary: string | null;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
creatorId?: string;
|
||||
@@ -570,6 +572,8 @@ export class DocModel extends BaseModel {
|
||||
"workspace_pages"."mode" as "mode",
|
||||
"workspace_pages"."public" as "public",
|
||||
"workspace_pages"."defaultRole" as "defaultRole",
|
||||
"workspace_pages"."title" as "title",
|
||||
"workspace_pages"."summary" as "summary",
|
||||
"snapshots"."created_at" as "createdAt",
|
||||
"snapshots"."updated_at" as "updatedAt",
|
||||
"snapshots"."created_by" as "creatorId",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Transactional } from '@nestjs-cls/transactional';
|
||||
import { type Workspace } from '@prisma/client';
|
||||
import { Prisma, type Workspace } from '@prisma/client';
|
||||
|
||||
import { EventBus } from '../base';
|
||||
import { BaseModel } from './base';
|
||||
@@ -93,6 +93,19 @@ export class WorkspaceModel extends BaseModel {
|
||||
});
|
||||
}
|
||||
|
||||
async list<S extends Prisma.WorkspaceSelect>(
|
||||
where: Prisma.WorkspaceWhereInput = {},
|
||||
select?: S
|
||||
) {
|
||||
return (await this.db.workspace.findMany({
|
||||
where,
|
||||
select,
|
||||
orderBy: {
|
||||
sid: 'asc',
|
||||
},
|
||||
})) as Prisma.WorkspaceGetPayload<{ select: S }>[];
|
||||
}
|
||||
|
||||
async delete(workspaceId: string) {
|
||||
const rawResult = await this.db.workspace.deleteMany({
|
||||
where: {
|
||||
|
||||
@@ -356,6 +356,7 @@ export class CopilotContextRootResolver {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Throttle('strict')
|
||||
@Query(() => ContextWorkspaceEmbeddingStatus, {
|
||||
description: 'query workspace embedding status',
|
||||
})
|
||||
@@ -372,9 +373,7 @@ export class CopilotContextRootResolver {
|
||||
|
||||
if (this.context.canEmbedding) {
|
||||
const { total, embedded } =
|
||||
await this.models.copilotWorkspace.getWorkspaceEmbeddingStatus(
|
||||
workspaceId
|
||||
);
|
||||
await this.models.copilotWorkspace.getEmbeddingStatus(workspaceId);
|
||||
return { total, embedded };
|
||||
}
|
||||
|
||||
|
||||
@@ -125,7 +125,10 @@ export class CopilotContextService implements OnApplicationBootstrap {
|
||||
|
||||
async get(id: string): Promise<ContextSession> {
|
||||
if (!this.embeddingClient) {
|
||||
throw new NoCopilotProviderAvailable('embedding client not configured');
|
||||
throw new NoCopilotProviderAvailable(
|
||||
{ modelId: 'embedding' },
|
||||
'embedding client not configured'
|
||||
);
|
||||
}
|
||||
|
||||
const context = await this.getCachedSession(id);
|
||||
|
||||
@@ -124,7 +124,7 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
modelId: model,
|
||||
});
|
||||
if (!provider) {
|
||||
throw new NoCopilotProviderAvailable();
|
||||
throw new NoCopilotProviderAvailable({ modelId: model });
|
||||
}
|
||||
|
||||
return { provider, model, hasAttachment };
|
||||
@@ -299,6 +299,13 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
this.ongoingStreamCount$.next(this.ongoingStreamCount$.value + 1);
|
||||
|
||||
const { signal, onConnectionClosed } = getSignal(req);
|
||||
let endBeforePromiseResolve = false;
|
||||
onConnectionClosed(isAborted => {
|
||||
if (isAborted) {
|
||||
endBeforePromiseResolve = true;
|
||||
}
|
||||
});
|
||||
|
||||
const { messageId, reasoning, webSearch } = ChatQuerySchema.parse(query);
|
||||
|
||||
const source$ = from(
|
||||
@@ -322,21 +329,21 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
shared$.pipe(
|
||||
reduce((acc, chunk) => acc + chunk, ''),
|
||||
tap(buffer => {
|
||||
onConnectionClosed(isAborted => {
|
||||
session.push({
|
||||
role: 'assistant',
|
||||
content: isAborted ? '> Request aborted' : buffer,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
void session
|
||||
.save()
|
||||
.catch(err =>
|
||||
this.logger.error(
|
||||
'Failed to save session in sse stream',
|
||||
err
|
||||
)
|
||||
);
|
||||
session.push({
|
||||
role: 'assistant',
|
||||
content: endBeforePromiseResolve
|
||||
? '> Request aborted'
|
||||
: buffer,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
void session
|
||||
.save()
|
||||
.catch(err =>
|
||||
this.logger.error(
|
||||
'Failed to save session in sse stream',
|
||||
err
|
||||
)
|
||||
);
|
||||
}),
|
||||
ignoreElements()
|
||||
)
|
||||
@@ -384,6 +391,13 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
this.ongoingStreamCount$.next(this.ongoingStreamCount$.value + 1);
|
||||
|
||||
const { signal, onConnectionClosed } = getSignal(req);
|
||||
let endBeforePromiseResolve = false;
|
||||
onConnectionClosed(isAborted => {
|
||||
if (isAborted) {
|
||||
endBeforePromiseResolve = true;
|
||||
}
|
||||
});
|
||||
|
||||
const { messageId, reasoning, webSearch } = ChatQuerySchema.parse(query);
|
||||
|
||||
const source$ = from(
|
||||
@@ -407,25 +421,25 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
shared$.pipe(
|
||||
reduce((acc, chunk) => acc.concat([chunk]), [] as StreamObject[]),
|
||||
tap(result => {
|
||||
onConnectionClosed(isAborted => {
|
||||
const parser = new StreamObjectParser();
|
||||
const streamObjects = parser.mergeTextDelta(result);
|
||||
const content = parser.mergeContent(streamObjects);
|
||||
session.push({
|
||||
role: 'assistant',
|
||||
content: isAborted ? '> Request aborted' : content,
|
||||
streamObjects: isAborted ? null : streamObjects,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
void session
|
||||
.save()
|
||||
.catch(err =>
|
||||
this.logger.error(
|
||||
'Failed to save session in sse stream',
|
||||
err
|
||||
)
|
||||
);
|
||||
const parser = new StreamObjectParser();
|
||||
const streamObjects = parser.mergeTextDelta(result);
|
||||
const content = parser.mergeContent(streamObjects);
|
||||
session.push({
|
||||
role: 'assistant',
|
||||
content: endBeforePromiseResolve
|
||||
? '> Request aborted'
|
||||
: content,
|
||||
streamObjects: endBeforePromiseResolve ? null : streamObjects,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
void session
|
||||
.save()
|
||||
.catch(err =>
|
||||
this.logger.error(
|
||||
'Failed to save session in sse stream',
|
||||
err
|
||||
)
|
||||
);
|
||||
}),
|
||||
ignoreElements()
|
||||
)
|
||||
@@ -477,6 +491,13 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
this.ongoingStreamCount$.next(this.ongoingStreamCount$.value + 1);
|
||||
|
||||
const { signal, onConnectionClosed } = getSignal(req);
|
||||
let endBeforePromiseResolve = false;
|
||||
onConnectionClosed(isAborted => {
|
||||
if (isAborted) {
|
||||
endBeforePromiseResolve = true;
|
||||
}
|
||||
});
|
||||
|
||||
const source$ = from(
|
||||
this.workflow.runGraph(params, session.model, {
|
||||
...session.config.promptConfig,
|
||||
@@ -526,21 +547,21 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
return acc;
|
||||
}, ''),
|
||||
tap(content => {
|
||||
onConnectionClosed(isAborted => {
|
||||
session.push({
|
||||
role: 'assistant',
|
||||
content: isAborted ? '> Request aborted' : content,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
void session
|
||||
.save()
|
||||
.catch(err =>
|
||||
this.logger.error(
|
||||
'Failed to save session in sse stream',
|
||||
err
|
||||
)
|
||||
);
|
||||
session.push({
|
||||
role: 'assistant',
|
||||
content: endBeforePromiseResolve
|
||||
? '> Request aborted'
|
||||
: content,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
void session
|
||||
.save()
|
||||
.catch(err =>
|
||||
this.logger.error(
|
||||
'Failed to save session in sse stream',
|
||||
err
|
||||
)
|
||||
);
|
||||
}),
|
||||
ignoreElements()
|
||||
)
|
||||
@@ -604,6 +625,13 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
this.ongoingStreamCount$.next(this.ongoingStreamCount$.value + 1);
|
||||
|
||||
const { signal, onConnectionClosed } = getSignal(req);
|
||||
let endBeforePromiseResolve = false;
|
||||
onConnectionClosed(isAborted => {
|
||||
if (isAborted) {
|
||||
endBeforePromiseResolve = true;
|
||||
}
|
||||
});
|
||||
|
||||
const source$ = from(
|
||||
provider.streamImages(
|
||||
{
|
||||
@@ -639,22 +667,20 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
shared$.pipe(
|
||||
reduce((acc, chunk) => acc.concat([chunk]), [] as string[]),
|
||||
tap(attachments => {
|
||||
onConnectionClosed(isAborted => {
|
||||
session.push({
|
||||
role: 'assistant',
|
||||
content: isAborted ? '> Request aborted' : '',
|
||||
attachments: isAborted ? [] : attachments,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
void session
|
||||
.save()
|
||||
.catch(err =>
|
||||
this.logger.error(
|
||||
'Failed to save session in sse stream',
|
||||
err
|
||||
)
|
||||
);
|
||||
session.push({
|
||||
role: 'assistant',
|
||||
content: endBeforePromiseResolve ? '> Request aborted' : '',
|
||||
attachments: endBeforePromiseResolve ? [] : attachments,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
void session
|
||||
.save()
|
||||
.catch(err =>
|
||||
this.logger.error(
|
||||
'Failed to save session in sse stream',
|
||||
err
|
||||
)
|
||||
);
|
||||
}),
|
||||
ignoreElements()
|
||||
)
|
||||
|
||||
100
packages/backend/server/src/plugins/copilot/cron.ts
Normal file
100
packages/backend/server/src/plugins/copilot/cron.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { Cron, CronExpression } from '@nestjs/schedule';
|
||||
|
||||
import { JobQueue, OneDay, OnJob } from '../../base';
|
||||
import { Models } from '../../models';
|
||||
|
||||
declare global {
|
||||
interface Jobs {
|
||||
'copilot.session.cleanupEmptySessions': {};
|
||||
'copilot.session.generateMissingTitles': {};
|
||||
'copilot.workspace.cleanupTrashedDocEmbeddings': {};
|
||||
}
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class CopilotCronJobs {
|
||||
private readonly logger = new Logger(CopilotCronJobs.name);
|
||||
|
||||
constructor(
|
||||
private readonly models: Models,
|
||||
private readonly jobs: JobQueue
|
||||
) {}
|
||||
|
||||
async triggerCleanupTrashedDocEmbeddings() {
|
||||
await this.jobs.add(
|
||||
'copilot.workspace.cleanupTrashedDocEmbeddings',
|
||||
{},
|
||||
{ jobId: 'daily-copilot-cleanup-trashed-doc-embeddings' }
|
||||
);
|
||||
}
|
||||
|
||||
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT)
|
||||
async dailyCleanupJob() {
|
||||
await this.jobs.add(
|
||||
'copilot.session.cleanupEmptySessions',
|
||||
{},
|
||||
{ jobId: 'daily-copilot-cleanup-empty-sessions' }
|
||||
);
|
||||
|
||||
await this.jobs.add(
|
||||
'copilot.session.generateMissingTitles',
|
||||
{},
|
||||
{ jobId: 'daily-copilot-generate-missing-titles' }
|
||||
);
|
||||
|
||||
await this.jobs.add(
|
||||
'copilot.workspace.cleanupTrashedDocEmbeddings',
|
||||
{},
|
||||
{ jobId: 'daily-copilot-cleanup-trashed-doc-embeddings' }
|
||||
);
|
||||
}
|
||||
|
||||
async triggerGenerateMissingTitles() {
|
||||
await this.jobs.add(
|
||||
'copilot.session.generateMissingTitles',
|
||||
{},
|
||||
{ jobId: 'trigger-copilot-generate-missing-titles' }
|
||||
);
|
||||
}
|
||||
|
||||
@OnJob('copilot.session.cleanupEmptySessions')
|
||||
async cleanupEmptySessions() {
|
||||
const { removed, cleaned } =
|
||||
await this.models.copilotSession.cleanupEmptySessions(
|
||||
new Date(Date.now() - OneDay)
|
||||
);
|
||||
|
||||
this.logger.log(
|
||||
`Cleanup completed: ${removed} sessions deleted, ${cleaned} sessions marked as deleted`
|
||||
);
|
||||
}
|
||||
|
||||
@OnJob('copilot.session.generateMissingTitles')
|
||||
async generateMissingTitles() {
|
||||
const sessions = await this.models.copilotSession.toBeGenerateTitle();
|
||||
|
||||
for (const session of sessions) {
|
||||
await this.jobs.add('copilot.session.generateTitle', {
|
||||
sessionId: session.id,
|
||||
});
|
||||
}
|
||||
this.logger.log(
|
||||
`Scheduled title generation for ${sessions.length} sessions`
|
||||
);
|
||||
}
|
||||
|
||||
@OnJob('copilot.workspace.cleanupTrashedDocEmbeddings')
|
||||
async cleanupTrashedDocEmbeddings() {
|
||||
const workspaces = await this.models.workspace.list(undefined, {
|
||||
id: true,
|
||||
});
|
||||
for (const { id: workspaceId } of workspaces) {
|
||||
await this.jobs.add(
|
||||
'copilot.embedding.cleanupTrashedDocEmbeddings',
|
||||
{ workspaceId },
|
||||
{ jobId: `cleanup-trashed-doc-embeddings-${workspaceId}` }
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
CopilotPromptNotFound,
|
||||
CopilotProviderNotSupported,
|
||||
} from '../../../base';
|
||||
import { CopilotFailedToGenerateEmbedding } from '../../../base/error/errors.gen';
|
||||
import { ChunkSimilarity, Embedding } from '../../../models';
|
||||
import { PromptService } from '../prompt';
|
||||
import {
|
||||
@@ -74,6 +75,12 @@ class ProductionEmbeddingClient extends EmbeddingClient {
|
||||
input,
|
||||
{ dimensions: EMBEDDING_DIMENSIONS }
|
||||
);
|
||||
if (embeddings.length !== input.length) {
|
||||
throw new CopilotFailedToGenerateEmbedding({
|
||||
provider: provider.type,
|
||||
message: `Expected ${input.length} embeddings, got ${embeddings.length}`,
|
||||
});
|
||||
}
|
||||
|
||||
return Array.from(embeddings.entries()).map(([index, embedding]) => ({
|
||||
index,
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
OnJob,
|
||||
} from '../../../base';
|
||||
import { DocReader } from '../../../core/doc';
|
||||
import { readAllDocIdsFromWorkspaceSnapshot } from '../../../core/utils/blocksuite';
|
||||
import { Models } from '../../../models';
|
||||
import { CopilotStorage } from '../storage';
|
||||
import { readStream } from '../utils';
|
||||
@@ -134,10 +135,30 @@ export class CopilotEmbeddingJob {
|
||||
if (enableDocEmbedding) {
|
||||
const toBeEmbedDocIds =
|
||||
await this.models.copilotWorkspace.findDocsToEmbed(workspaceId);
|
||||
if (!toBeEmbedDocIds.length) {
|
||||
return;
|
||||
}
|
||||
// filter out trashed docs
|
||||
const rootSnapshot = await this.models.doc.getSnapshot(
|
||||
workspaceId,
|
||||
workspaceId
|
||||
);
|
||||
if (!rootSnapshot) {
|
||||
this.logger.warn(
|
||||
`Root snapshot for workspace ${workspaceId} not found, skipping embedding.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
const allDocIds = new Set(
|
||||
readAllDocIdsFromWorkspaceSnapshot(rootSnapshot.blob)
|
||||
);
|
||||
this.logger.log(
|
||||
`Trigger embedding for ${toBeEmbedDocIds.length} docs in workspace ${workspaceId}`
|
||||
);
|
||||
for (const docId of toBeEmbedDocIds) {
|
||||
const finalToBeEmbedDocIds = toBeEmbedDocIds.filter(docId =>
|
||||
allDocIds.has(docId)
|
||||
);
|
||||
for (const docId of finalToBeEmbedDocIds) {
|
||||
await this.queue.add(
|
||||
'copilot.embedding.docs',
|
||||
{
|
||||
@@ -337,6 +358,10 @@ export class CopilotEmbeddingJob {
|
||||
const signal = this.getWorkspaceSignal(workspaceId);
|
||||
|
||||
try {
|
||||
const hasNewDoc = await this.models.doc.exists(
|
||||
workspaceId,
|
||||
docId.split(':space:')[1] || ''
|
||||
);
|
||||
const needEmbedding =
|
||||
await this.models.copilotWorkspace.checkDocNeedEmbedded(
|
||||
workspaceId,
|
||||
@@ -352,8 +377,11 @@ export class CopilotEmbeddingJob {
|
||||
);
|
||||
return;
|
||||
}
|
||||
const fragment = await this.getDocFragment(workspaceId, docId);
|
||||
if (fragment) {
|
||||
// if doc id deprecated, skip embedding and fulfill empty embedding
|
||||
const fragment = !hasNewDoc
|
||||
? await this.getDocFragment(workspaceId, docId)
|
||||
: undefined;
|
||||
if (!hasNewDoc && fragment) {
|
||||
// fast fall for empty doc, journal is easily to create a empty doc
|
||||
if (fragment.summary.trim()) {
|
||||
const embeddings = await this.embeddingClient.getFileEmbeddings(
|
||||
@@ -382,7 +410,7 @@ export class CopilotEmbeddingJob {
|
||||
);
|
||||
await this.fulfillEmptyEmbedding(workspaceId, docId);
|
||||
}
|
||||
} else if (contextId) {
|
||||
} else {
|
||||
this.logger.warn(
|
||||
`Doc ${docId} in workspace ${workspaceId} has no fragment, fulfilling empty embedding.`
|
||||
);
|
||||
@@ -415,4 +443,39 @@ export class CopilotEmbeddingJob {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@OnJob('copilot.embedding.cleanupTrashedDocEmbeddings')
|
||||
async cleanupTrashedDocEmbeddings({
|
||||
workspaceId,
|
||||
}: Jobs['copilot.embedding.cleanupTrashedDocEmbeddings']) {
|
||||
const workspace = await this.models.workspace.get(workspaceId);
|
||||
if (!workspace) {
|
||||
this.logger.warn(`workspace ${workspaceId} not found`);
|
||||
return;
|
||||
}
|
||||
|
||||
const snapshot = await this.models.doc.getSnapshot(
|
||||
workspaceId,
|
||||
workspaceId
|
||||
);
|
||||
if (!snapshot) {
|
||||
this.logger.warn(`workspace snapshot ${workspaceId} not found`);
|
||||
return;
|
||||
}
|
||||
|
||||
const docIdsInWorkspace = readAllDocIdsFromWorkspaceSnapshot(snapshot.blob);
|
||||
const docIdsInEmbedding =
|
||||
await this.models.copilotContext.listWorkspaceEmbedding(workspaceId);
|
||||
const docIdsInWorkspaceSet = new Set(docIdsInWorkspace);
|
||||
|
||||
const deletedDocIds = docIdsInEmbedding.filter(
|
||||
docId => !docIdsInWorkspaceSet.has(docId)
|
||||
);
|
||||
for (const docId of deletedDocIds) {
|
||||
await this.models.copilotContext.deleteWorkspaceEmbedding(
|
||||
workspaceId,
|
||||
docId
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -61,6 +61,10 @@ declare global {
|
||||
fileId: string;
|
||||
fileName: string;
|
||||
};
|
||||
|
||||
'copilot.embedding.cleanupTrashedDocEmbeddings': {
|
||||
workspaceId: string;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
CopilotContextService,
|
||||
} from './context';
|
||||
import { CopilotController } from './controller';
|
||||
import { CopilotCronJobs } from './cron';
|
||||
import { CopilotEmbeddingJob } from './embedding';
|
||||
import { ChatMessageCache } from './message';
|
||||
import { PromptService } from './prompt';
|
||||
@@ -63,7 +64,9 @@ import {
|
||||
// context
|
||||
CopilotContextResolver,
|
||||
CopilotContextService,
|
||||
// jobs
|
||||
CopilotEmbeddingJob,
|
||||
CopilotCronJobs,
|
||||
// transcription
|
||||
CopilotTranscriptionService,
|
||||
CopilotTranscriptionResolver,
|
||||
|
||||
@@ -304,6 +304,7 @@ const textActions: Prompt[] = [
|
||||
name: 'Transcript audio',
|
||||
action: 'Transcript audio',
|
||||
model: 'gemini-2.5-flash',
|
||||
optionalModels: ['gemini-2.5-flash', 'gemini-2.5-pro'],
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
@@ -333,6 +334,7 @@ Convert a multi-speaker audio recording into a structured JSON format by transcr
|
||||
config: {
|
||||
requireContent: false,
|
||||
requireAttachment: true,
|
||||
maxRetries: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -366,6 +368,31 @@ Convert a multi-speaker audio recording into a structured JSON format by transcr
|
||||
requireAttachment: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'Conversation Summary',
|
||||
action: 'Conversation Summary',
|
||||
model: 'gpt-4.1-2025-04-14',
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
content: `You are an expert conversation summarizer. Your job is to distill long dialogues into clear, compact summaries that preserve every key decision, fact, and open question. When asked, always:
|
||||
• Honor any explicit “focus” the user gives you.
|
||||
• Match the desired length style:
|
||||
- “brief” → 1-2 sentences
|
||||
- “detailed” → ≈ 5 sentences or short bullet list
|
||||
- “comprehensive” → full paragraph(s) covering all salient points.
|
||||
• Write in neutral, third-person prose and never add new information.
|
||||
Return only the summary text—no headings, labels, or commentary.`,
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: `Summarize the conversation below so it can be carried forward without loss.\n\nFocus: {{focus}}\nDesired length: {{length}}\n\nConversation:\n{{#messages}}\n{{role}}: {{content}}\n{{/messages}}`,
|
||||
},
|
||||
],
|
||||
config: {
|
||||
requireContent: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'Summary',
|
||||
action: 'Summary',
|
||||
@@ -1598,6 +1625,166 @@ const imageActions: Prompt[] = [
|
||||
},
|
||||
];
|
||||
|
||||
const modelActions: Prompt[] = [
|
||||
{
|
||||
name: 'Apply Updates',
|
||||
action: 'Apply Updates',
|
||||
model: 'claude-sonnet-4@20250514',
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: `
|
||||
You are a Markdown document update engine.
|
||||
|
||||
You will be given:
|
||||
|
||||
1. content: The original Markdown document
|
||||
- The content is structured into blocks.
|
||||
- Each block starts with a comment like <!-- block_id=... flavour=... --> and contains the block's content.
|
||||
- The content is {{content}}
|
||||
|
||||
2. op: A description of the edit intention
|
||||
- This describes the semantic meaning of the edit, such as "Bold the first paragraph".
|
||||
- The op is {{op}}
|
||||
|
||||
3. updates: A Markdown snippet
|
||||
- The updates is {{updates}}
|
||||
- This represents the block-level changes to apply to the original Markdown.
|
||||
- The update may:
|
||||
- **Replace** an existing block (same block_id, new content)
|
||||
- **Delete** block(s) using <!-- delete block BLOCK_ID -->
|
||||
- **Insert** new block(s) with a new unique block_id
|
||||
- When performing deletions, the update will include **surrounding context blocks** (or use <!-- existing blocks -->) to help you determine where and what to delete.
|
||||
|
||||
Your task:
|
||||
- Apply the update in <updates> to the document in <code>, following the intent described in <op>.
|
||||
- Preserve all block_id and flavour comments.
|
||||
- Maintain the original block order unless the update clearly appends new blocks.
|
||||
- Do not remove or alter unrelated blocks.
|
||||
- Output only the fully updated Markdown content. Do not wrap the content in \`\`\`markdown.
|
||||
|
||||
---
|
||||
|
||||
✍️ Examples
|
||||
|
||||
✅ Replacement (modifying an existing block)
|
||||
|
||||
<code>
|
||||
<!-- block_id=101 flavour=paragraph -->
|
||||
## Introduction
|
||||
|
||||
<!-- block_id=102 flavour=paragraph -->
|
||||
This document provides an overview of the system architecture and its components.
|
||||
</code>
|
||||
|
||||
<op>
|
||||
Make the introduction more formal.
|
||||
</op>
|
||||
|
||||
<updates>
|
||||
<!-- block_id=102 flavour=paragraph -->
|
||||
This document outlines the architectural design and individual components of the system in detail.
|
||||
</updates>
|
||||
|
||||
Expected Output:
|
||||
<!-- block_id=101 flavour=paragraph -->
|
||||
## Introduction
|
||||
|
||||
<!-- block_id=102 flavour=paragraph -->
|
||||
This document outlines the architectural design and individual components of the system in detail.
|
||||
|
||||
---
|
||||
|
||||
➕ Insertion (adding new content)
|
||||
|
||||
<code>
|
||||
<!-- block_id=201 flavour=paragraph -->
|
||||
# Project Summary
|
||||
|
||||
<!-- block_id=202 flavour=paragraph -->
|
||||
This project aims to build a collaborative text editing tool.
|
||||
</code>
|
||||
|
||||
<op>
|
||||
Add a disclaimer section at the end.
|
||||
</op>
|
||||
|
||||
<updates>
|
||||
<!-- block_id=new-301 flavour=paragraph -->
|
||||
## Disclaimer
|
||||
|
||||
<!-- block_id=new-302 flavour=paragraph -->
|
||||
This document is subject to change. Do not distribute externally.
|
||||
</updates>
|
||||
|
||||
Expected Output:
|
||||
<!-- block_id=201 flavour=paragraph -->
|
||||
# Project Summary
|
||||
|
||||
<!-- block_id=202 flavour=paragraph -->
|
||||
This project aims to build a collaborative text editing tool.
|
||||
|
||||
<!-- block_id=new-301 flavour=paragraph -->
|
||||
## Disclaimer
|
||||
|
||||
<!-- block_id=new-302 flavour=paragraph -->
|
||||
This document is subject to change. Do not distribute externally.
|
||||
|
||||
---
|
||||
|
||||
❌ Deletion (removing blocks)
|
||||
|
||||
<code>
|
||||
<!-- block_id=401 flavour=paragraph -->
|
||||
## Author
|
||||
|
||||
<!-- block_id=402 flavour=paragraph -->
|
||||
Written by the AI team at OpenResearch.
|
||||
|
||||
<!-- block_id=403 flavour=paragraph -->
|
||||
## Experimental Section
|
||||
|
||||
<!-- block_id=404 flavour=paragraph -->
|
||||
The following section is still under development and may change without notice.
|
||||
|
||||
<!-- block_id=405 flavour=paragraph -->
|
||||
## License
|
||||
|
||||
<!-- block_id=406 flavour=paragraph -->
|
||||
This document is licensed under CC BY-NC 4.0.
|
||||
</code>
|
||||
|
||||
<op>
|
||||
Remove the experimental section.
|
||||
</op>
|
||||
|
||||
<updates>
|
||||
<!-- delete block_id=403 -->
|
||||
<!-- delete block_id=404 -->
|
||||
</updates>
|
||||
|
||||
Expected Output:
|
||||
<!-- block_id=401 flavour=paragraph -->
|
||||
## Author
|
||||
|
||||
<!-- block_id=402 flavour=paragraph -->
|
||||
Written by the AI team at OpenResearch.
|
||||
|
||||
<!-- block_id=405 flavour=paragraph -->
|
||||
## License
|
||||
|
||||
<!-- block_id=406 flavour=paragraph -->
|
||||
This document is licensed under CC BY-NC 4.0.
|
||||
|
||||
---
|
||||
|
||||
Now apply the \`updates\` to the \`content\`, following the intent in \`op\`, and return the updated Markdown.
|
||||
`,
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const CHAT_PROMPT: Omit<Prompt, 'name'> = {
|
||||
model: 'claude-sonnet-4@20250514',
|
||||
optionalModels: [
|
||||
@@ -1770,11 +1957,75 @@ const chat: Prompt[] = [
|
||||
},
|
||||
];
|
||||
|
||||
const artifactActions: Prompt[] = [
|
||||
{
|
||||
name: 'Code Artifact',
|
||||
model: 'claude-sonnet-4@20250514',
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
content: `
|
||||
When sent new notes, respond ONLY with the contents of the html file.
|
||||
DO NOT INCLUDE ANY OTHER TEXT, EXPLANATIONS, APOLOGIES, OR INTRODUCTORY/CLOSING PHRASES.
|
||||
IF USER DOES NOT SPECIFY A STYLE, FOLLOW THE DEFAULT STYLE.
|
||||
<generate_guide>
|
||||
- The results should be a single HTML file.
|
||||
- Use tailwindcss to style the website
|
||||
- Put any additional CSS styles in a style tag and any JavaScript in a script tag.
|
||||
- Use unpkg or skypack to import any required dependencies.
|
||||
- Use Google fonts to pull in any open source fonts you require.
|
||||
- Use lucide icons for any icons.
|
||||
- If you have any images, load them from Unsplash or use solid colored rectangles.
|
||||
</generate_guide>
|
||||
|
||||
<DO_NOT_USE_COLORS>
|
||||
- DO NOT USE ANY COLORS
|
||||
</DO_NOT_USE_COLORS>
|
||||
<DO_NOT_USE_GRADIENTS>
|
||||
- DO NOT USE ANY GRADIENTS
|
||||
</DO_NOT_USE_GRADIENTS>
|
||||
|
||||
<COLOR_THEME>
|
||||
- --affine-blue-300: #93e2fd
|
||||
- --affine-blue-400: #60cffa
|
||||
- --affine-blue-500: #3ab5f7
|
||||
- --affine-blue-600: #1e96eb
|
||||
- --affine-blue-700: #1e67af
|
||||
- --affine-text-primary-color: #121212
|
||||
- --affine-text-secondary-color: #8e8d91
|
||||
- --affine-text-disable-color: #a9a9ad
|
||||
- --affine-background-overlay-panel-color: #fbfbfc
|
||||
- --affine-background-secondary-color: #f4f4f5
|
||||
- --affine-background-primary-color: #fff
|
||||
</COLOR_THEME>
|
||||
<default_style_guide>
|
||||
- MUST USE White and Blue(#1e96eb) as the primary color
|
||||
- KEEP THE DEFAULT STYLE SIMPLE AND CLEAN
|
||||
- DO NOT USE ANY COMPLEX STYLES
|
||||
- DO NOT USE ANY GRADIENTS
|
||||
- USE LESS SHADOWS
|
||||
- USE RADIUS 4px or 8px for rounded corners
|
||||
- USE 12px or 16px for padding
|
||||
- Use the tailwind color gray, zinc, slate, neutral much more.
|
||||
- Use 0.5px border should be better
|
||||
</default_style_guide>
|
||||
`,
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: '{{content}}',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
export const prompts: Prompt[] = [
|
||||
...textActions,
|
||||
...imageActions,
|
||||
...modelActions,
|
||||
...chat,
|
||||
...workflows,
|
||||
...artifactActions,
|
||||
];
|
||||
|
||||
export async function refreshPrompts(db: PrismaClient) {
|
||||
|
||||
@@ -129,7 +129,16 @@ export abstract class GeminiProvider<T> extends CopilotProvider<T> {
|
||||
system,
|
||||
messages: msgs,
|
||||
schema,
|
||||
providerOptions: {
|
||||
google: {
|
||||
thinkingConfig: {
|
||||
thinkingBudget: -1,
|
||||
includeThoughts: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
abortSignal: options.signal,
|
||||
maxRetries: options.maxRetries || 3,
|
||||
experimental_repairText: async ({ text, error }) => {
|
||||
if (error instanceof JSONParseError) {
|
||||
// strange fixed response, temporarily replace it
|
||||
|
||||
@@ -37,6 +37,24 @@ export class MorphProvider extends CopilotProvider<MorphConfig> {
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'morph-v3-fast',
|
||||
capabilities: [
|
||||
{
|
||||
input: [ModelInputType.Text],
|
||||
output: [ModelOutputType.Text],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'morph-v3-large',
|
||||
capabilities: [
|
||||
{
|
||||
input: [ModelInputType.Text],
|
||||
output: [ModelOutputType.Text],
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
#instance!: VercelOpenAICompatibleProvider;
|
||||
|
||||
@@ -21,6 +21,7 @@ import {
|
||||
buildDocKeywordSearchGetter,
|
||||
buildDocSearchGetter,
|
||||
createCodeArtifactTool,
|
||||
createConversationSummaryTool,
|
||||
createDocComposeTool,
|
||||
createDocEditTool,
|
||||
createDocKeywordSearchTool,
|
||||
@@ -139,6 +140,11 @@ export abstract class CopilotProvider<C = any> {
|
||||
if (options?.tools?.length) {
|
||||
this.logger.debug(`getTools: ${JSON.stringify(options.tools)}`);
|
||||
const ac = this.moduleRef.get(AccessController, { strict: false });
|
||||
const docReader = this.moduleRef.get(DocReader, { strict: false });
|
||||
const models = this.moduleRef.get(Models, { strict: false });
|
||||
const prompt = this.moduleRef.get(PromptService, {
|
||||
strict: false,
|
||||
});
|
||||
|
||||
for (const tool of options.tools) {
|
||||
const toolDef = this.getProviderSpecificTools(tool, model);
|
||||
@@ -150,11 +156,23 @@ export abstract class CopilotProvider<C = any> {
|
||||
continue;
|
||||
}
|
||||
switch (tool) {
|
||||
case 'codeArtifact': {
|
||||
tools.code_artifact = createCodeArtifactTool(prompt, this.factory);
|
||||
break;
|
||||
}
|
||||
case 'conversationSummary': {
|
||||
tools.conversation_summary = createConversationSummaryTool(
|
||||
options.session,
|
||||
prompt,
|
||||
this.factory
|
||||
);
|
||||
break;
|
||||
}
|
||||
case 'docEdit': {
|
||||
const doc = this.moduleRef.get(DocReader, { strict: false });
|
||||
const getDocContent = buildContentGetter(ac, doc);
|
||||
const getDocContent = buildContentGetter(ac, docReader);
|
||||
tools.doc_edit = createDocEditTool(
|
||||
this.factory,
|
||||
prompt,
|
||||
getDocContent.bind(null, options)
|
||||
);
|
||||
break;
|
||||
@@ -163,11 +181,15 @@ export abstract class CopilotProvider<C = any> {
|
||||
const context = this.moduleRef.get(CopilotContextService, {
|
||||
strict: false,
|
||||
});
|
||||
|
||||
const docContext = options.session
|
||||
? await context.getBySessionId(options.session)
|
||||
: null;
|
||||
const searchDocs = buildDocSearchGetter(ac, context, docContext);
|
||||
const searchDocs = buildDocSearchGetter(
|
||||
ac,
|
||||
context,
|
||||
docContext,
|
||||
models
|
||||
);
|
||||
tools.doc_semantic_search = createDocSemanticSearchTool(
|
||||
searchDocs.bind(null, options)
|
||||
);
|
||||
@@ -175,9 +197,6 @@ export abstract class CopilotProvider<C = any> {
|
||||
}
|
||||
case 'docKeywordSearch': {
|
||||
if (this.AFFiNEConfig.indexer.enabled) {
|
||||
const ac = this.moduleRef.get(AccessController, {
|
||||
strict: false,
|
||||
});
|
||||
const indexerService = this.moduleRef.get(IndexerService, {
|
||||
strict: false,
|
||||
});
|
||||
@@ -192,9 +211,6 @@ export abstract class CopilotProvider<C = any> {
|
||||
break;
|
||||
}
|
||||
case 'docRead': {
|
||||
const ac = this.moduleRef.get(AccessController, { strict: false });
|
||||
const models = this.moduleRef.get(Models, { strict: false });
|
||||
const docReader = this.moduleRef.get(DocReader, { strict: false });
|
||||
const getDoc = buildDocContentGetter(ac, docReader, models);
|
||||
tools.doc_read = createDocReadTool(getDoc.bind(null, options));
|
||||
break;
|
||||
@@ -205,23 +221,7 @@ export abstract class CopilotProvider<C = any> {
|
||||
break;
|
||||
}
|
||||
case 'docCompose': {
|
||||
const promptService = this.moduleRef.get(PromptService, {
|
||||
strict: false,
|
||||
});
|
||||
tools.doc_compose = createDocComposeTool(
|
||||
promptService,
|
||||
this.factory
|
||||
);
|
||||
break;
|
||||
}
|
||||
case 'codeArtifact': {
|
||||
const promptService = this.moduleRef.get(PromptService, {
|
||||
strict: false,
|
||||
});
|
||||
tools.code_artifact = createCodeArtifactTool(
|
||||
promptService,
|
||||
this.factory
|
||||
);
|
||||
tools.doc_compose = createDocComposeTool(prompt, this.factory);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,6 +60,8 @@ export const VertexSchema: JSONSchema = {
|
||||
export const PromptConfigStrictSchema = z.object({
|
||||
tools: z
|
||||
.enum([
|
||||
'codeArtifact',
|
||||
'conversationSummary',
|
||||
// work with morph
|
||||
'docEdit',
|
||||
// work with indexer
|
||||
@@ -71,7 +73,6 @@ export const PromptConfigStrictSchema = z.object({
|
||||
'webSearch',
|
||||
// artifact tools
|
||||
'docCompose',
|
||||
'codeArtifact',
|
||||
])
|
||||
.array()
|
||||
.nullable()
|
||||
|
||||
@@ -6,20 +6,10 @@ import {
|
||||
ImagePart,
|
||||
TextPart,
|
||||
TextStreamPart,
|
||||
ToolSet,
|
||||
} from 'ai';
|
||||
import { ZodType } from 'zod';
|
||||
|
||||
import {
|
||||
createCodeArtifactTool,
|
||||
createDocComposeTool,
|
||||
createDocEditTool,
|
||||
createDocKeywordSearchTool,
|
||||
createDocReadTool,
|
||||
createDocSemanticSearchTool,
|
||||
createExaCrawlTool,
|
||||
createExaSearchTool,
|
||||
} from '../tools';
|
||||
import { CustomAITools } from '../tools';
|
||||
import { PromptMessage, StreamObject } from './types';
|
||||
|
||||
type ChatMessage = CoreUserMessage | CoreAssistantMessage;
|
||||
@@ -385,17 +375,6 @@ export class CitationParser {
|
||||
}
|
||||
}
|
||||
|
||||
export interface CustomAITools extends ToolSet {
|
||||
doc_edit: ReturnType<typeof createDocEditTool>;
|
||||
doc_semantic_search: ReturnType<typeof createDocSemanticSearchTool>;
|
||||
doc_keyword_search: ReturnType<typeof createDocKeywordSearchTool>;
|
||||
doc_read: ReturnType<typeof createDocReadTool>;
|
||||
doc_compose: ReturnType<typeof createDocComposeTool>;
|
||||
web_search_exa: ReturnType<typeof createExaSearchTool>;
|
||||
web_crawl_exa: ReturnType<typeof createExaCrawlTool>;
|
||||
code_artifact: ReturnType<typeof createCodeArtifactTool>;
|
||||
}
|
||||
|
||||
type ChunkType = TextStreamPart<CustomAITools>['type'];
|
||||
|
||||
export function toError(error: unknown): Error {
|
||||
@@ -451,6 +430,10 @@ export class TextStreamParser {
|
||||
);
|
||||
result = this.addPrefix(result);
|
||||
switch (chunk.toolName) {
|
||||
case 'conversation_summary': {
|
||||
result += `\nSummarizing context\n`;
|
||||
break;
|
||||
}
|
||||
case 'web_search_exa': {
|
||||
result += `\nSearching the web "${chunk.args.query}"\n`;
|
||||
break;
|
||||
@@ -489,10 +472,18 @@ export class TextStreamParser {
|
||||
result = this.addPrefix(result);
|
||||
switch (chunk.toolName) {
|
||||
case 'doc_edit': {
|
||||
if (chunk.result && typeof chunk.result === 'object') {
|
||||
result += `\n${chunk.result.result}\n`;
|
||||
if (
|
||||
chunk.result &&
|
||||
typeof chunk.result === 'object' &&
|
||||
Array.isArray(chunk.result.result)
|
||||
) {
|
||||
result += chunk.result.result
|
||||
.map(item => {
|
||||
return `\n${item.changedContent}\n`;
|
||||
})
|
||||
.join('');
|
||||
this.docEditFootnotes[this.docEditFootnotes.length - 1].result =
|
||||
chunk.result.result;
|
||||
result;
|
||||
} else {
|
||||
this.docEditFootnotes.pop();
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ import {
|
||||
CallMetric,
|
||||
CopilotDocNotFound,
|
||||
CopilotFailedToCreateMessage,
|
||||
CopilotProviderSideError,
|
||||
CopilotSessionNotFound,
|
||||
type FileUpload,
|
||||
paginate,
|
||||
@@ -31,14 +32,18 @@ import {
|
||||
RequestMutex,
|
||||
Throttle,
|
||||
TooManyRequest,
|
||||
UserFriendlyError,
|
||||
} from '../../base';
|
||||
import { CurrentUser } from '../../core/auth';
|
||||
import { Admin } from '../../core/common';
|
||||
import { AccessController } from '../../core/permission';
|
||||
import { DocReader } from '../../core/doc';
|
||||
import { AccessController, DocAction } from '../../core/permission';
|
||||
import { UserType } from '../../core/user';
|
||||
import type { ListSessionOptions, UpdateChatSession } from '../../models';
|
||||
import { CopilotCronJobs } from './cron';
|
||||
import { PromptService } from './prompt';
|
||||
import { PromptMessage, StreamObject } from './providers';
|
||||
import { CopilotProviderFactory } from './providers/factory';
|
||||
import { ChatSessionService } from './session';
|
||||
import { CopilotStorage } from './storage';
|
||||
import { type ChatHistory, type ChatMessage, SubmittedMessage } from './types';
|
||||
@@ -138,6 +143,9 @@ class CreateChatMessageInput implements Omit<SubmittedMessage, 'content'> {
|
||||
@Field(() => [String], { nullable: true, deprecationReason: 'use blobs' })
|
||||
attachments!: string[] | undefined;
|
||||
|
||||
@Field(() => GraphQLUpload, { nullable: true })
|
||||
blob!: Promise<FileUpload> | undefined;
|
||||
|
||||
@Field(() => [GraphQLUpload], { nullable: true })
|
||||
blobs!: Promise<FileUpload>[] | undefined;
|
||||
|
||||
@@ -396,7 +404,9 @@ export class CopilotResolver {
|
||||
private readonly ac: AccessController,
|
||||
private readonly mutex: RequestMutex,
|
||||
private readonly chatSession: ChatSessionService,
|
||||
private readonly storage: CopilotStorage
|
||||
private readonly storage: CopilotStorage,
|
||||
private readonly docReader: DocReader,
|
||||
private readonly providerFactory: CopilotProviderFactory
|
||||
) {}
|
||||
|
||||
@ResolveField(() => CopilotQuotaType, {
|
||||
@@ -410,7 +420,8 @@ export class CopilotResolver {
|
||||
|
||||
private async assertPermission(
|
||||
user: CurrentUser,
|
||||
options: { workspaceId?: string | null; docId?: string | null }
|
||||
options: { workspaceId?: string | null; docId?: string | null },
|
||||
fallbackAction?: DocAction
|
||||
) {
|
||||
const { workspaceId, docId } = options;
|
||||
if (!workspaceId) {
|
||||
@@ -421,7 +432,7 @@ export class CopilotResolver {
|
||||
.user(user.id)
|
||||
.doc({ workspaceId, docId })
|
||||
.allowLocal()
|
||||
.assert('Doc.Update');
|
||||
.assert(fallbackAction ?? 'Doc.Update');
|
||||
} else {
|
||||
await this.ac
|
||||
.user(user.id)
|
||||
@@ -500,7 +511,7 @@ export class CopilotResolver {
|
||||
if (!workspaceId) {
|
||||
return [];
|
||||
} else {
|
||||
await this.assertPermission(user, { workspaceId, docId });
|
||||
await this.assertPermission(user, { workspaceId, docId }, 'Doc.Read');
|
||||
}
|
||||
|
||||
const histories = await this.chatSession.list(
|
||||
@@ -530,7 +541,7 @@ export class CopilotResolver {
|
||||
if (!workspaceId) {
|
||||
return paginate([], 'updatedAt', pagination, 0);
|
||||
} else {
|
||||
await this.assertPermission(user, { workspaceId, docId });
|
||||
await this.assertPermission(user, { workspaceId, docId }, 'Doc.Read');
|
||||
}
|
||||
|
||||
const finalOptions = Object.assign(
|
||||
@@ -696,10 +707,13 @@ export class CopilotResolver {
|
||||
}
|
||||
|
||||
const attachments: PromptMessage['attachments'] = options.attachments || [];
|
||||
if (options.blobs) {
|
||||
if (options.blob || options.blobs) {
|
||||
const { workspaceId } = session.config;
|
||||
|
||||
const blobs = await Promise.all(options.blobs);
|
||||
const blobs = await Promise.all(
|
||||
options.blob ? [options.blob] : options.blobs || []
|
||||
);
|
||||
delete options.blob;
|
||||
delete options.blobs;
|
||||
|
||||
for (const blob of blobs) {
|
||||
@@ -724,6 +738,65 @@ export class CopilotResolver {
|
||||
}
|
||||
}
|
||||
|
||||
@Query(() => String, {
|
||||
description:
|
||||
'Apply updates to a doc using LLM and return the merged markdown.',
|
||||
})
|
||||
async applyDocUpdates(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args({ name: 'workspaceId', type: () => String })
|
||||
workspaceId: string,
|
||||
@Args({ name: 'docId', type: () => String })
|
||||
docId: string,
|
||||
@Args({ name: 'op', type: () => String })
|
||||
op: string,
|
||||
@Args({ name: 'updates', type: () => String })
|
||||
updates: string
|
||||
): Promise<string> {
|
||||
await this.assertPermission(user, { workspaceId, docId });
|
||||
|
||||
const docContent = await this.docReader.getDocMarkdown(
|
||||
workspaceId,
|
||||
docId,
|
||||
true
|
||||
);
|
||||
if (!docContent || !docContent.markdown) {
|
||||
throw new NotFoundException('Doc not found or empty');
|
||||
}
|
||||
|
||||
const markdown = docContent.markdown.trim();
|
||||
|
||||
// Get LLM provider
|
||||
const provider =
|
||||
await this.providerFactory.getProviderByModel('morph-v3-large');
|
||||
if (!provider) {
|
||||
throw new BadRequestException('No LLM provider available');
|
||||
}
|
||||
|
||||
try {
|
||||
return await provider.text(
|
||||
{ modelId: 'morph-v3-large' },
|
||||
[
|
||||
{
|
||||
role: 'user',
|
||||
content: `<instruction>${op}</instruction>\n<code>${markdown}</code>\n<update>${updates}</update>`,
|
||||
},
|
||||
],
|
||||
{ reasoning: false }
|
||||
);
|
||||
} catch (e: any) {
|
||||
if (e instanceof UserFriendlyError) {
|
||||
throw e;
|
||||
} else {
|
||||
throw new CopilotProviderSideError({
|
||||
provider: provider.type,
|
||||
kind: 'unexpected_response',
|
||||
message: e?.message || 'Unexpected apply response',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private transformToSessionType(
|
||||
session: Omit<ChatHistory, 'messages'>
|
||||
): CopilotSessionType {
|
||||
@@ -773,7 +846,26 @@ class CreateCopilotPromptInput {
|
||||
@Admin()
|
||||
@Resolver(() => String)
|
||||
export class PromptsManagementResolver {
|
||||
constructor(private readonly promptService: PromptService) {}
|
||||
constructor(
|
||||
private readonly cron: CopilotCronJobs,
|
||||
private readonly promptService: PromptService
|
||||
) {}
|
||||
|
||||
@Mutation(() => Boolean, {
|
||||
description: 'Trigger generate missing titles cron job',
|
||||
})
|
||||
async triggerGenerateTitleCron() {
|
||||
await this.cron.triggerGenerateMissingTitles();
|
||||
return true;
|
||||
}
|
||||
|
||||
@Mutation(() => Boolean, {
|
||||
description: 'Trigger cleanup of trashed doc embeddings',
|
||||
})
|
||||
async triggerCleanupTrashedDocEmbeddings() {
|
||||
await this.cron.triggerCleanupTrashedDocEmbeddings();
|
||||
return true;
|
||||
}
|
||||
|
||||
@Query(() => [CopilotPromptType], {
|
||||
description: 'List all copilot prompts',
|
||||
|
||||
@@ -507,6 +507,8 @@ export class ChatSessionService {
|
||||
return await this.models.copilotSession.fork({
|
||||
...session,
|
||||
userId: options.userId,
|
||||
// docId can be changed in fork
|
||||
docId: options.docId,
|
||||
sessionId: randomUUID(),
|
||||
parentSessionId: options.sessionId,
|
||||
messages,
|
||||
@@ -569,7 +571,7 @@ export class ChatSessionService {
|
||||
});
|
||||
|
||||
if (!provider) {
|
||||
throw new NoCopilotProviderAvailable();
|
||||
throw new NoCopilotProviderAvailable({ modelId: prompt.model });
|
||||
}
|
||||
|
||||
return provider.text(cond, [...prompt.finish({}), msg], config);
|
||||
|
||||
@@ -5,9 +5,7 @@ import { z } from 'zod';
|
||||
import type { PromptService } from '../prompt';
|
||||
import type { CopilotProviderFactory } from '../providers';
|
||||
import { toolError } from './error';
|
||||
|
||||
const logger = new Logger('CodeArtifactTool');
|
||||
|
||||
/**
|
||||
* A copilot tool that produces a completely self-contained HTML artifact.
|
||||
* The returned HTML must include <style> and <script> tags directly so that
|
||||
@@ -37,23 +35,20 @@ export const createCodeArtifactTool = (
|
||||
}),
|
||||
execute: async ({ title, userPrompt }) => {
|
||||
try {
|
||||
const prompt = await promptService.get('Make it real with text');
|
||||
const prompt = await promptService.get('Code Artifact');
|
||||
if (!prompt) {
|
||||
throw new Error('Prompt not found');
|
||||
}
|
||||
|
||||
const provider = await factory.getProviderByModel(prompt.model);
|
||||
if (!provider) {
|
||||
throw new Error('Provider not found');
|
||||
}
|
||||
|
||||
const content = await provider.text(
|
||||
{
|
||||
modelId: prompt.model,
|
||||
},
|
||||
[...prompt.finish({}), { role: 'user', content: userPrompt }]
|
||||
prompt.finish({ content: userPrompt })
|
||||
);
|
||||
|
||||
// Remove surrounding ``` or ```html fences if present
|
||||
let stripped = content.trim();
|
||||
if (stripped.startsWith('```')) {
|
||||
@@ -65,7 +60,6 @@ export const createCodeArtifactTool = (
|
||||
stripped = stripped.slice(0, -3);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
title,
|
||||
html: stripped,
|
||||
|
||||
@@ -0,0 +1,76 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { tool } from 'ai';
|
||||
import { z } from 'zod';
|
||||
|
||||
import type { PromptService } from '../prompt';
|
||||
import type { CopilotProviderFactory } from '../providers';
|
||||
import { toolError } from './error';
|
||||
|
||||
const logger = new Logger('ConversationSummaryTool');
|
||||
|
||||
export const createConversationSummaryTool = (
|
||||
sessionId: string | undefined,
|
||||
promptService: PromptService,
|
||||
factory: CopilotProviderFactory
|
||||
) => {
|
||||
return tool({
|
||||
description:
|
||||
'Create a concise, AI-generated summary of the conversation so far—capturing key topics, decisions, and critical details. Use this tool whenever the context becomes lengthy to preserve essential information that might otherwise be lost to truncation in future turns.',
|
||||
parameters: z.object({
|
||||
focus: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'Optional focus area for the summary (e.g., "technical decisions", "user requirements", "project status")'
|
||||
),
|
||||
length: z
|
||||
.enum(['brief', 'detailed', 'comprehensive'])
|
||||
.default('detailed')
|
||||
.describe(
|
||||
'The desired length of the summary: brief (1-2 sentences), detailed (paragraph), comprehensive (multiple paragraphs)'
|
||||
),
|
||||
}),
|
||||
execute: async ({ focus, length }, { messages }) => {
|
||||
try {
|
||||
if (!messages || messages.length === 0) {
|
||||
return toolError(
|
||||
'No Conversation Context',
|
||||
'No messages available to summarize'
|
||||
);
|
||||
}
|
||||
|
||||
const prompt = await promptService.get('Conversation Summary');
|
||||
const provider = await factory.getProviderByModel(prompt?.model || '');
|
||||
|
||||
if (!prompt || !provider) {
|
||||
return toolError(
|
||||
'Prompt Not Found',
|
||||
'Failed to summarize conversation.'
|
||||
);
|
||||
}
|
||||
|
||||
const summary = await provider.text(
|
||||
{ modelId: prompt.model },
|
||||
prompt.finish({
|
||||
messages: messages.map(m => ({
|
||||
...m,
|
||||
content: m.content.toString(),
|
||||
})),
|
||||
focus: focus || 'general',
|
||||
length,
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
focusArea: focus || 'general',
|
||||
messageCount: messages.length,
|
||||
summary,
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
} catch (err: any) {
|
||||
logger.error(`Failed to summarize conversation (${sessionId})`, err);
|
||||
return toolError('Conversation Summary Failed', err.message);
|
||||
}
|
||||
},
|
||||
});
|
||||
};
|
||||
@@ -3,6 +3,7 @@ import { z } from 'zod';
|
||||
|
||||
import { DocReader } from '../../../core/doc';
|
||||
import { AccessController } from '../../../core/permission';
|
||||
import { type PromptService } from '../prompt';
|
||||
import type { CopilotChatOptions, CopilotProviderFactory } from '../providers';
|
||||
|
||||
export const buildContentGetter = (ac: AccessController, doc: DocReader) => {
|
||||
@@ -24,14 +25,20 @@ export const buildContentGetter = (ac: AccessController, doc: DocReader) => {
|
||||
|
||||
export const createDocEditTool = (
|
||||
factory: CopilotProviderFactory,
|
||||
prompt: PromptService,
|
||||
getContent: (targetId?: string) => Promise<string | undefined>
|
||||
) => {
|
||||
return tool({
|
||||
description: `
|
||||
Use this tool to propose an edit to a structured Markdown document with identifiable blocks. Each block begins with a comment like <!-- block_id=... -->, and represents a unit of editable content such as a heading, paragraph, list, or code snippet.
|
||||
Use this tool to propose an edit to a structured Markdown document with identifiable blocks.
|
||||
Each block begins with a comment like <!-- block_id=... -->, and represents a unit of editable content such as a heading, paragraph, list, or code snippet.
|
||||
This will be read by a less intelligent model, which will quickly apply the edit. You should make it clear what the edit is, while also minimizing the unchanged code you write.
|
||||
|
||||
Your task is to return a list of block-level changes needed to fulfill the user's intent. Each change should correspond to a specific user instruction and be represented by one of the following operations:
|
||||
If you receive a markdown without block_id comments, you should call \`doc_read\` tool to get the content.
|
||||
|
||||
Your task is to return a list of block-level changes needed to fulfill the user's intent. **Each change in code_edit must be completely independent: each code_edit entry should only perform a single, isolated change, and must not include the effects of other changes. For example, the updates for a delete operation should only show the context related to the deletion, and must not include any content modified by other operations (such as bolding or insertion). This ensures that each change can be applied independently and in any order.**
|
||||
|
||||
Each change should correspond to a specific user instruction and be represented by one of the following operations:
|
||||
|
||||
replace: Replace the content of a block with updated Markdown.
|
||||
|
||||
@@ -41,83 +48,83 @@ insert: Add a new block, and specify its block_id and content.
|
||||
|
||||
Important Instructions:
|
||||
- Use the existing block structure as-is. Do not reformat or reorder blocks unless explicitly asked.
|
||||
- Always preserve block_id and type in your replacements.
|
||||
- When replacing a block, use the full new block including <!-- block_id=... type=... --> and the updated content.
|
||||
- When inserting, follow the same format as a replacement, but ensure the new block_id does not conflict with existing IDs.
|
||||
- Each list item should be a block.
|
||||
- Use <!-- existing blocks ... --> for unchanged sections.
|
||||
- If you plan on deleting a section, you must provide surrounding context to indicate the deletion.
|
||||
- When replacing content, always keep the original block_id unchanged.
|
||||
- When deleting content, only use the format <!-- delete block_id=xxx -->, and only for valid block_id present in the original <code> content.
|
||||
- Each top-level list item should be a block. Like this:
|
||||
\`\`\`markdown
|
||||
<!-- block_id=001 flavour=affine:list -->
|
||||
* Item 1
|
||||
* SubItem 1
|
||||
<!-- block_id=002 flavour=affine:list -->
|
||||
1. Item 1
|
||||
1. SubItem 1
|
||||
\`\`\`
|
||||
- Your task is to return a list of block-level changes needed to fulfill the user's intent.
|
||||
- **Each change in code_edit must be completely independent: each code_edit entry should only perform a single, isolated change, and must not include the effects of other changes. For example, the updates for a delete operation should only show the context related to the deletion, and must not include any content modified by other operations (such as bolding or insertion). This ensures that each change can be applied independently and in any order.**
|
||||
|
||||
Example Input Document:
|
||||
\`\`\`md
|
||||
<!-- block_id=block-001 type=paragraph -->
|
||||
# My Holiday Plan
|
||||
Original Content:
|
||||
\`\`\`markdown
|
||||
<!-- block_id=001 flavour=paragraph -->
|
||||
# Andriy Shevchenko
|
||||
|
||||
<!-- block_id=block-002 type=paragraph -->
|
||||
I plan to travel to Paris, France, where I will visit the Eiffel Tower, the Louvre, and the Champs-Élysées.
|
||||
<!-- block_id=002 flavour=paragraph -->
|
||||
## Player Profile
|
||||
|
||||
<!-- block_id=block-003 type=paragraph -->
|
||||
I love Paris.
|
||||
<!-- block_id=003 flavour=paragraph -->
|
||||
Andriy Shevchenko is a legendary Ukrainian striker, best known for his time at AC Milan and Dynamo Kyiv. He won the Ballon d'Or in 2004.
|
||||
|
||||
<!-- block_id=block-004 type=paragraph -->
|
||||
## Reason for the delay
|
||||
<!-- block_id=004 flavour=paragraph -->
|
||||
## Career Overview
|
||||
|
||||
<!-- block_id=block-005 type=paragraph -->
|
||||
This plan has been brewing for a long time, but I always postponed it because I was too busy with work.
|
||||
|
||||
<!-- block_id=block-006 type=paragraph -->
|
||||
## Trip Steps
|
||||
|
||||
<!-- block_id=block-007 type=list -->
|
||||
- Book flight tickets
|
||||
<!-- block_id=block-008 type=list -->
|
||||
- Reserve a hotel
|
||||
<!-- block_id=block-009 type=list -->
|
||||
- Prepare visa documents
|
||||
<!-- block_id=block-010 type=list -->
|
||||
- Plan the itinerary
|
||||
|
||||
<!-- block_id=block-011 type=paragraph -->
|
||||
Additionally, I plan to learn some basic French to make communication easier during the trip.
|
||||
<!-- block_id=005 flavour=list -->
|
||||
- Born in 1976 in Ukraine.
|
||||
<!-- block_id=006 flavour=list -->
|
||||
- Rose to fame at Dynamo Kyiv in the 1990s.
|
||||
<!-- block_id=007 flavour=list -->
|
||||
- Starred at AC Milan (1999–2006), scoring over 170 goals.
|
||||
<!-- block_id=008 flavour=list -->
|
||||
- Played for Chelsea (2006–2009) before returning to Kyiv.
|
||||
<!-- block_id=009 flavour=list -->
|
||||
- Coached Ukraine national team, reaching Euro 2020 quarter-finals.
|
||||
\`\`\`
|
||||
|
||||
Example User Request:
|
||||
|
||||
User Request:
|
||||
\`\`\`
|
||||
Translate the trip steps to Chinese, remove the reason for the delay, and bold the final paragraph.
|
||||
Bold the player’s name in the intro, add a summary section at the end, and remove the career overview.
|
||||
\`\`\`
|
||||
|
||||
Expected Output:
|
||||
|
||||
\`\`\`md
|
||||
<!-- existing blocks ... -->
|
||||
|
||||
<!-- block_id=block-002 type=paragraph -->
|
||||
I plan to travel to Paris, France, where I will visit the Eiffel Tower, the Louvre, and the Champs-Élysées.
|
||||
|
||||
<!-- block_id=block-003 type=paragraph -->
|
||||
I love Paris.
|
||||
|
||||
<!-- delete block-004 -->
|
||||
|
||||
<!-- delete block-005 -->
|
||||
|
||||
<!-- block_id=block-006 type=paragraph -->
|
||||
## Trip Steps
|
||||
|
||||
<!-- block_id=block-007 type=list -->
|
||||
- 订机票
|
||||
<!-- block_id=block-008 type=list -->
|
||||
- 预定酒店
|
||||
<!-- block_id=block-009 type=list -->
|
||||
- 准备签证材料
|
||||
<!-- block_id=block-010 type=list -->
|
||||
- 规划行程
|
||||
|
||||
<!-- existing blocks ... -->
|
||||
|
||||
<!-- block_id=block-011 type=paragraph -->
|
||||
**Additionally, I plan to learn some basic French to make communication easier during the trip.**
|
||||
Example response:
|
||||
\`\`\`json
|
||||
[
|
||||
{
|
||||
"op": "Bold the player's name in the introduction",
|
||||
"updates": "
|
||||
<!-- block_id=003 flavour=paragraph -->
|
||||
**Andriy Shevchenko** is a legendary Ukrainian striker, best known for his time at AC Milan and Dynamo Kyiv. He won the Ballon d'Or in 2004.
|
||||
"
|
||||
},
|
||||
{
|
||||
"op": "Add a summary section at the end",
|
||||
"updates": "
|
||||
<!-- block_id=new-abc123 flavour=paragraph -->
|
||||
## Summary
|
||||
<!-- block_id=new-def456 flavour=paragraph -->
|
||||
Shevchenko is celebrated as one of the greatest Ukrainian footballers of all time. Known for his composure, strength, and goal-scoring instinct, he left a lasting legacy both on and off the pitch.
|
||||
"
|
||||
},
|
||||
{
|
||||
"op": "Delete the career overview section",
|
||||
"updates": "
|
||||
<!-- delete block_id=004 -->
|
||||
<!-- delete block_id=005 -->
|
||||
<!-- delete block_id=006 -->
|
||||
<!-- delete block_id=007 -->
|
||||
<!-- delete block_id=008 -->
|
||||
<!-- delete block_id=009 -->
|
||||
"
|
||||
}
|
||||
]
|
||||
\`\`\`
|
||||
You should specify the following arguments before the others: [doc_id], [origin_content]
|
||||
|
||||
@@ -143,15 +150,42 @@ You should specify the following arguments before the others: [doc_id], [origin_
|
||||
'A short, first-person description of the intended edit, clearly summarizing what I will change. For example: "I will translate the steps into English and delete the paragraph explaining the delay." This helps the downstream system understand the purpose of the changes.'
|
||||
),
|
||||
|
||||
code_edit: z
|
||||
.string()
|
||||
.describe(
|
||||
'Specify only the necessary Markdown block-level changes. Return a list of inserted, replaced, or deleted blocks. Each block must start with its <!-- block_id=... type=... --> comment. Use <!-- existing blocks ... --> for unchanged sections.If you plan on deleting a section, you must provide surrounding context to indicate the deletion.'
|
||||
),
|
||||
code_edit: z.preprocess(
|
||||
val => {
|
||||
// BACKGROUND: LLM sometimes returns a JSON string instead of an array.
|
||||
if (typeof val === 'string') {
|
||||
return JSON.parse(val);
|
||||
}
|
||||
return val;
|
||||
},
|
||||
z
|
||||
.array(
|
||||
z.object({
|
||||
op: z
|
||||
.string()
|
||||
.describe(
|
||||
'A short description of the change, such as "Bold intro name"'
|
||||
),
|
||||
updates: z
|
||||
.string()
|
||||
.describe(
|
||||
'Markdown block fragments that represent the change, including the block_id and type'
|
||||
),
|
||||
})
|
||||
)
|
||||
.describe(
|
||||
'An array of independent semantic changes to apply to the document.'
|
||||
)
|
||||
),
|
||||
}),
|
||||
execute: async ({ doc_id, origin_content, code_edit }) => {
|
||||
try {
|
||||
const provider = await factory.getProviderByModel('morph-v2');
|
||||
const applyPrompt = await prompt.get('Apply Updates');
|
||||
if (!applyPrompt) {
|
||||
return 'Prompt not found';
|
||||
}
|
||||
const model = applyPrompt.model;
|
||||
const provider = await factory.getProviderByModel(model);
|
||||
if (!provider) {
|
||||
return 'Editing docs is not supported';
|
||||
}
|
||||
@@ -160,14 +194,27 @@ You should specify the following arguments before the others: [doc_id], [origin_
|
||||
if (!content) {
|
||||
return 'Doc not found or doc is empty';
|
||||
}
|
||||
const result = await provider.text({ modelId: 'morph-v2' }, [
|
||||
{
|
||||
role: 'user',
|
||||
content: `<code>${content}</code>\n<update>${code_edit}</update>`,
|
||||
},
|
||||
]);
|
||||
|
||||
return { result, content };
|
||||
const changedContents = await Promise.all(
|
||||
code_edit.map(async edit => {
|
||||
return await provider.text({ modelId: model }, [
|
||||
...applyPrompt.finish({
|
||||
content,
|
||||
op: edit.op,
|
||||
updates: edit.updates,
|
||||
}),
|
||||
]);
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
result: changedContents.map((changedContent, index) => ({
|
||||
op: code_edit[index].op,
|
||||
updates: code_edit[index].updates,
|
||||
originalContent: content,
|
||||
changedContent,
|
||||
})),
|
||||
};
|
||||
} catch {
|
||||
return 'Failed to apply edit to the doc';
|
||||
}
|
||||
|
||||
@@ -1,17 +1,45 @@
|
||||
import { tool } from 'ai';
|
||||
import { omit } from 'lodash-es';
|
||||
import { z } from 'zod';
|
||||
|
||||
import type { AccessController } from '../../../core/permission';
|
||||
import type { ChunkSimilarity } from '../../../models';
|
||||
import type { ChunkSimilarity, Models } from '../../../models';
|
||||
import type { CopilotContextService } from '../context';
|
||||
import type { ContextSession } from '../context/session';
|
||||
import type { CopilotChatOptions } from '../providers';
|
||||
import { toolError } from './error';
|
||||
|
||||
const FILTER_PREFIX = [
|
||||
'Title: ',
|
||||
'Created at: ',
|
||||
'Updated at: ',
|
||||
'Created by: ',
|
||||
'Updated by: ',
|
||||
];
|
||||
|
||||
function clearEmbeddingChunk(chunk: ChunkSimilarity): ChunkSimilarity {
|
||||
if (chunk.content) {
|
||||
const lines = chunk.content.split('\n');
|
||||
let maxLines = 5;
|
||||
while (maxLines > 0 && lines.length > 0) {
|
||||
if (FILTER_PREFIX.some(prefix => lines[0].startsWith(prefix))) {
|
||||
lines.shift();
|
||||
maxLines--;
|
||||
} else {
|
||||
// only process consecutive metadata rows
|
||||
break;
|
||||
}
|
||||
}
|
||||
return { ...chunk, content: lines.join('\n') };
|
||||
}
|
||||
return chunk;
|
||||
}
|
||||
|
||||
export const buildDocSearchGetter = (
|
||||
ac: AccessController,
|
||||
context: CopilotContextService,
|
||||
docContext: ContextSession | null
|
||||
docContext: ContextSession | null,
|
||||
models: Models
|
||||
) => {
|
||||
const searchDocs = async (
|
||||
options: CopilotChatOptions,
|
||||
@@ -45,7 +73,43 @@ export const buildDocSearchGetter = (
|
||||
}
|
||||
if (!docChunks.length && !fileChunks.length)
|
||||
return `No results found for "${query}".`;
|
||||
return [...fileChunks, ...docChunks];
|
||||
|
||||
const docIds = docChunks.map(c => ({
|
||||
// oxlint-disable-next-line no-non-null-assertion
|
||||
workspaceId: options.workspace!,
|
||||
docId: c.docId,
|
||||
}));
|
||||
const docAuthors = await models.doc
|
||||
.findAuthors(docIds)
|
||||
.then(
|
||||
docs =>
|
||||
new Map(
|
||||
docs
|
||||
.filter(d => !!d)
|
||||
.map(doc => [doc.id, omit(doc, ['id', 'workspaceId'])])
|
||||
)
|
||||
);
|
||||
const docMetas = await models.doc
|
||||
.findMetas(docIds, { select: { title: true } })
|
||||
.then(
|
||||
docs =>
|
||||
new Map(
|
||||
docs
|
||||
.filter(d => !!d)
|
||||
.map(doc => [
|
||||
doc.docId,
|
||||
Object.assign({}, doc, docAuthors.get(doc.docId)),
|
||||
])
|
||||
)
|
||||
);
|
||||
|
||||
return [
|
||||
...fileChunks.map(clearEmbeddingChunk),
|
||||
...docChunks.map(c => ({
|
||||
...c,
|
||||
...docMetas.get(c.docId),
|
||||
})),
|
||||
] as ChunkSimilarity[];
|
||||
};
|
||||
return searchDocs;
|
||||
};
|
||||
|
||||
@@ -1,4 +1,29 @@
|
||||
import { ToolSet } from 'ai';
|
||||
|
||||
import { createCodeArtifactTool } from './code-artifact';
|
||||
import { createConversationSummaryTool } from './conversation-summary';
|
||||
import { createDocComposeTool } from './doc-compose';
|
||||
import { createDocEditTool } from './doc-edit';
|
||||
import { createDocKeywordSearchTool } from './doc-keyword-search';
|
||||
import { createDocReadTool } from './doc-read';
|
||||
import { createDocSemanticSearchTool } from './doc-semantic-search';
|
||||
import { createExaCrawlTool } from './exa-crawl';
|
||||
import { createExaSearchTool } from './exa-search';
|
||||
|
||||
export interface CustomAITools extends ToolSet {
|
||||
code_artifact: ReturnType<typeof createCodeArtifactTool>;
|
||||
conversation_summary: ReturnType<typeof createConversationSummaryTool>;
|
||||
doc_edit: ReturnType<typeof createDocEditTool>;
|
||||
doc_semantic_search: ReturnType<typeof createDocSemanticSearchTool>;
|
||||
doc_keyword_search: ReturnType<typeof createDocKeywordSearchTool>;
|
||||
doc_read: ReturnType<typeof createDocReadTool>;
|
||||
doc_compose: ReturnType<typeof createDocComposeTool>;
|
||||
web_search_exa: ReturnType<typeof createExaSearchTool>;
|
||||
web_crawl_exa: ReturnType<typeof createExaCrawlTool>;
|
||||
}
|
||||
|
||||
export * from './code-artifact';
|
||||
export * from './conversation-summary';
|
||||
export * from './doc-compose';
|
||||
export * from './doc-edit';
|
||||
export * from './doc-keyword-search';
|
||||
|
||||
@@ -15,7 +15,6 @@ import GraphQLUpload from 'graphql-upload/GraphQLUpload.mjs';
|
||||
|
||||
import {
|
||||
CopilotTranscriptionAudioNotProvided,
|
||||
CopilotTranscriptionJobNotFound,
|
||||
type FileUpload,
|
||||
} from '../../../base';
|
||||
import { CurrentUser } from '../../../core/auth';
|
||||
@@ -74,7 +73,7 @@ const FinishedStatus: Set<AiJobStatus> = new Set([
|
||||
export class CopilotTranscriptionResolver {
|
||||
constructor(
|
||||
private readonly ac: AccessController,
|
||||
private readonly service: CopilotTranscriptionService
|
||||
private readonly transcript: CopilotTranscriptionService
|
||||
) {}
|
||||
|
||||
private handleJobResult(
|
||||
@@ -122,7 +121,7 @@ export class CopilotTranscriptionResolver {
|
||||
throw new CopilotTranscriptionAudioNotProvided();
|
||||
}
|
||||
|
||||
const jobResult = await this.service.submitTranscriptionJob(
|
||||
const jobResult = await this.transcript.submitJob(
|
||||
user.id,
|
||||
workspaceId,
|
||||
blobId,
|
||||
@@ -144,19 +143,11 @@ export class CopilotTranscriptionResolver {
|
||||
.allowLocal()
|
||||
.assert('Workspace.Copilot');
|
||||
|
||||
const job = await this.service.queryTranscriptionJob(
|
||||
const jobResult = await this.transcript.retryJob(
|
||||
user.id,
|
||||
workspaceId,
|
||||
jobId
|
||||
);
|
||||
if (!job || !job.infos) {
|
||||
throw new CopilotTranscriptionJobNotFound();
|
||||
}
|
||||
|
||||
const jobResult = await this.service.executeTranscriptionJob(
|
||||
job.id,
|
||||
job.infos
|
||||
);
|
||||
|
||||
return this.handleJobResult(jobResult);
|
||||
}
|
||||
@@ -166,7 +157,7 @@ export class CopilotTranscriptionResolver {
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args('jobId') jobId: string
|
||||
): Promise<TranscriptionResultType | null> {
|
||||
const job = await this.service.claimTranscriptionJob(user.id, jobId);
|
||||
const job = await this.transcript.claimJob(user.id, jobId);
|
||||
return this.handleJobResult(job);
|
||||
}
|
||||
|
||||
@@ -190,7 +181,7 @@ export class CopilotTranscriptionResolver {
|
||||
.allowLocal()
|
||||
.assert('Workspace.Copilot');
|
||||
|
||||
const job = await this.service.queryTranscriptionJob(
|
||||
const job = await this.transcript.queryJob(
|
||||
user.id,
|
||||
copilot.workspaceId,
|
||||
jobId,
|
||||
|
||||
@@ -49,7 +49,17 @@ export class CopilotTranscriptionService {
|
||||
private readonly providerFactory: CopilotProviderFactory
|
||||
) {}
|
||||
|
||||
async submitTranscriptionJob(
|
||||
private async getModel(userId: string) {
|
||||
const prompt = await this.prompt.get('Transcript audio');
|
||||
const hasAccess = await this.models.userFeature.has(
|
||||
userId,
|
||||
'unlimited_copilot'
|
||||
);
|
||||
// choose the pro model if user has copilot plan
|
||||
return prompt?.optionalModels[hasAccess ? 1 : 0];
|
||||
}
|
||||
|
||||
async submitJob(
|
||||
userId: string,
|
||||
workspaceId: string,
|
||||
blobId: string,
|
||||
@@ -78,12 +88,26 @@ export class CopilotTranscriptionService {
|
||||
infos.push({ url, mimeType: blob.mimetype });
|
||||
}
|
||||
|
||||
return await this.executeTranscriptionJob(jobId, infos);
|
||||
const model = await this.getModel(userId);
|
||||
return await this.executeJob(jobId, infos, model);
|
||||
}
|
||||
|
||||
async executeTranscriptionJob(
|
||||
async retryJob(userId: string, workspaceId: string, jobId: string) {
|
||||
const job = await this.queryJob(userId, workspaceId, jobId);
|
||||
if (!job || !job.infos) {
|
||||
throw new CopilotTranscriptionJobNotFound();
|
||||
}
|
||||
|
||||
const model = await this.getModel(userId);
|
||||
const jobResult = await this.executeJob(job.id, job.infos, model);
|
||||
|
||||
return jobResult;
|
||||
}
|
||||
|
||||
async executeJob(
|
||||
jobId: string,
|
||||
infos: AudioBlobInfos
|
||||
infos: AudioBlobInfos,
|
||||
modelId?: string
|
||||
): Promise<TranscriptionJob> {
|
||||
const status = AiJobStatus.running;
|
||||
const success = await this.models.copilotJob.update(jobId, {
|
||||
@@ -98,12 +122,13 @@ export class CopilotTranscriptionService {
|
||||
await this.job.add('copilot.transcript.submit', {
|
||||
jobId,
|
||||
infos,
|
||||
modelId,
|
||||
});
|
||||
|
||||
return { id: jobId, status };
|
||||
}
|
||||
|
||||
async claimTranscriptionJob(
|
||||
async claimJob(
|
||||
userId: string,
|
||||
jobId: string
|
||||
): Promise<TranscriptionJob | null> {
|
||||
@@ -118,7 +143,7 @@ export class CopilotTranscriptionService {
|
||||
return null;
|
||||
}
|
||||
|
||||
async queryTranscriptionJob(
|
||||
async queryJob(
|
||||
userId: string,
|
||||
workspaceId: string,
|
||||
jobId?: string,
|
||||
@@ -171,7 +196,7 @@ export class CopilotTranscriptionService {
|
||||
);
|
||||
|
||||
if (!provider) {
|
||||
throw new NoCopilotProviderAvailable();
|
||||
throw new NoCopilotProviderAvailable({ modelId });
|
||||
}
|
||||
|
||||
return provider;
|
||||
@@ -181,14 +206,20 @@ export class CopilotTranscriptionService {
|
||||
promptName: string,
|
||||
message: Partial<PromptMessage>,
|
||||
schema?: ZodType<any>,
|
||||
prefer?: CopilotProviderType
|
||||
prefer?: CopilotProviderType,
|
||||
modelId?: string
|
||||
): Promise<string> {
|
||||
const prompt = await this.prompt.get(promptName);
|
||||
if (!prompt) {
|
||||
throw new CopilotPromptNotFound({ name: promptName });
|
||||
}
|
||||
|
||||
const cond = { modelId: prompt.model };
|
||||
const cond = {
|
||||
modelId:
|
||||
modelId && prompt.optionalModels.includes(modelId)
|
||||
? modelId
|
||||
: prompt.model,
|
||||
};
|
||||
const msg = { role: 'user' as const, content: '', ...message };
|
||||
const config = Object.assign({}, prompt.config);
|
||||
if (schema) {
|
||||
@@ -231,13 +262,19 @@ export class CopilotTranscriptionService {
|
||||
return `${hoursStr}:${minutesStr}:${secondsStr}`;
|
||||
}
|
||||
|
||||
private async callTranscript(url: string, mimeType: string, offset: number) {
|
||||
private async callTranscript(
|
||||
url: string,
|
||||
mimeType: string,
|
||||
offset: number,
|
||||
modelId?: string
|
||||
) {
|
||||
// NOTE: Vertex provider not support transcription yet, we always use Gemini here
|
||||
const result = await this.chatWithPrompt(
|
||||
'Transcript audio',
|
||||
{ attachments: [url], params: { mimetype: mimeType } },
|
||||
TranscriptionResponseSchema,
|
||||
CopilotProviderType.Gemini
|
||||
CopilotProviderType.Gemini,
|
||||
modelId
|
||||
);
|
||||
|
||||
const transcription = TranscriptionResponseSchema.parse(
|
||||
@@ -256,6 +293,7 @@ export class CopilotTranscriptionService {
|
||||
async transcriptAudio({
|
||||
jobId,
|
||||
infos,
|
||||
modelId,
|
||||
// @deprecated
|
||||
url,
|
||||
mimeType,
|
||||
@@ -264,7 +302,7 @@ export class CopilotTranscriptionService {
|
||||
const blobInfos = this.mergeInfos(infos, url, mimeType);
|
||||
const transcriptions = await Promise.all(
|
||||
Array.from(blobInfos.entries()).map(([idx, { url, mimeType }]) =>
|
||||
this.callTranscript(url, mimeType, idx * 10 * 60)
|
||||
this.callTranscript(url, mimeType, idx * 10 * 60, modelId)
|
||||
)
|
||||
);
|
||||
|
||||
|
||||
@@ -56,6 +56,7 @@ declare global {
|
||||
'copilot.transcript.submit': {
|
||||
jobId: string;
|
||||
infos?: AudioBlobInfos;
|
||||
modelId?: string;
|
||||
/// @deprecated use `infos` instead
|
||||
url?: string;
|
||||
/// @deprecated use `infos` instead
|
||||
|
||||
@@ -103,6 +103,7 @@ export class CopilotWorkspaceEmbeddingConfigResolver {
|
||||
|
||||
return ignoredDocs;
|
||||
}
|
||||
|
||||
@Mutation(() => Number, {
|
||||
name: 'updateWorkspaceEmbeddingIgnoredDocs',
|
||||
complexity: 2,
|
||||
|
||||
@@ -140,10 +140,13 @@ export class ElasticsearchProvider extends SearchProvider {
|
||||
const result = await this.request(
|
||||
'POST',
|
||||
url.toString(),
|
||||
JSON.stringify({ query })
|
||||
JSON.stringify({ query }),
|
||||
'application/json',
|
||||
// ignore 409 error: version_conflict_engine_exception, version conflict, required seqNo [255898790], primary term [3]. current document has seqNo [256133002] and primary term [3]
|
||||
[409]
|
||||
);
|
||||
this.logger.debug(
|
||||
`deleted by query ${table} ${JSON.stringify(query)} in ${Date.now() - start}ms, result: ${JSON.stringify(result)}`
|
||||
`deleted by query ${table} ${JSON.stringify(query)} in ${Date.now() - start}ms, result: ${JSON.stringify(result).substring(0, 500)}`
|
||||
);
|
||||
}
|
||||
|
||||
@@ -264,7 +267,8 @@ export class ElasticsearchProvider extends SearchProvider {
|
||||
method: 'POST' | 'PUT',
|
||||
url: string,
|
||||
body: string,
|
||||
contentType = 'application/json'
|
||||
contentType = 'application/json',
|
||||
ignoreErrorStatus?: number[]
|
||||
) {
|
||||
const headers = {
|
||||
'Content-Type': contentType,
|
||||
@@ -280,6 +284,10 @@ export class ElasticsearchProvider extends SearchProvider {
|
||||
headers,
|
||||
});
|
||||
const data = await response.json();
|
||||
if (ignoreErrorStatus?.includes(response.status)) {
|
||||
return data;
|
||||
}
|
||||
|
||||
// handle error, status >= 400
|
||||
// {
|
||||
// "error": {
|
||||
|
||||
@@ -291,6 +291,11 @@ type CopilotFailedToAddWorkspaceFileEmbeddingDataType {
|
||||
message: String!
|
||||
}
|
||||
|
||||
type CopilotFailedToGenerateEmbeddingDataType {
|
||||
message: String!
|
||||
provider: String!
|
||||
}
|
||||
|
||||
type CopilotFailedToMatchContextDataType {
|
||||
content: String!
|
||||
contextId: String!
|
||||
@@ -452,6 +457,7 @@ type CopilotWorkspaceIgnoredDocTypeEdge {
|
||||
|
||||
input CreateChatMessageInput {
|
||||
attachments: [String!]
|
||||
blob: Upload
|
||||
blobs: [Upload!]
|
||||
content: String
|
||||
params: JSON
|
||||
@@ -595,6 +601,7 @@ type DocType {
|
||||
mode: PublicDocMode!
|
||||
permissions: DocPermissions!
|
||||
public: Boolean!
|
||||
summary: String
|
||||
title: String
|
||||
updatedAt: DateTime
|
||||
workspaceId: String!
|
||||
@@ -615,7 +622,7 @@ type EditorType {
|
||||
name: String!
|
||||
}
|
||||
|
||||
union ErrorDataUnion = AlreadyInSpaceDataType | BlobNotFoundDataType | CopilotContextFileNotSupportedDataType | CopilotDocNotFoundDataType | CopilotFailedToAddWorkspaceFileEmbeddingDataType | CopilotFailedToMatchContextDataType | CopilotFailedToMatchGlobalContextDataType | CopilotFailedToModifyContextDataType | CopilotInvalidContextDataType | CopilotMessageNotFoundDataType | CopilotPromptNotFoundDataType | CopilotProviderNotSupportedDataType | CopilotProviderSideErrorDataType | DocActionDeniedDataType | DocHistoryNotFoundDataType | DocNotFoundDataType | DocUpdateBlockedDataType | ExpectToGrantDocUserRolesDataType | ExpectToRevokeDocUserRolesDataType | ExpectToUpdateDocUserRoleDataType | GraphqlBadRequestDataType | HttpRequestErrorDataType | InvalidAppConfigDataType | InvalidAppConfigInputDataType | InvalidEmailDataType | InvalidHistoryTimestampDataType | InvalidIndexerInputDataType | InvalidLicenseToActivateDataType | InvalidLicenseUpdateParamsDataType | InvalidOauthCallbackCodeDataType | InvalidOauthResponseDataType | InvalidPasswordLengthDataType | InvalidRuntimeConfigTypeDataType | InvalidSearchProviderRequestDataType | MemberNotFoundInSpaceDataType | MentionUserDocAccessDeniedDataType | MissingOauthQueryParameterDataType | NoMoreSeatDataType | NotInSpaceDataType | QueryTooLongDataType | RuntimeConfigNotFoundDataType | SameSubscriptionRecurringDataType | SpaceAccessDeniedDataType | SpaceNotFoundDataType | SpaceOwnerNotFoundDataType | SpaceShouldHaveOnlyOneOwnerDataType | SubscriptionAlreadyExistsDataType | SubscriptionNotExistsDataType | SubscriptionPlanNotFoundDataType | UnknownOauthProviderDataType | UnsupportedClientVersionDataType | UnsupportedSubscriptionPlanDataType | ValidationErrorDataType | VersionRejectedDataType | WorkspacePermissionNotFoundDataType | WrongSignInCredentialsDataType
|
||||
union ErrorDataUnion = AlreadyInSpaceDataType | BlobNotFoundDataType | CopilotContextFileNotSupportedDataType | CopilotDocNotFoundDataType | CopilotFailedToAddWorkspaceFileEmbeddingDataType | CopilotFailedToGenerateEmbeddingDataType | CopilotFailedToMatchContextDataType | CopilotFailedToMatchGlobalContextDataType | CopilotFailedToModifyContextDataType | CopilotInvalidContextDataType | CopilotMessageNotFoundDataType | CopilotPromptNotFoundDataType | CopilotProviderNotSupportedDataType | CopilotProviderSideErrorDataType | DocActionDeniedDataType | DocHistoryNotFoundDataType | DocNotFoundDataType | DocUpdateBlockedDataType | ExpectToGrantDocUserRolesDataType | ExpectToRevokeDocUserRolesDataType | ExpectToUpdateDocUserRoleDataType | GraphqlBadRequestDataType | HttpRequestErrorDataType | InvalidAppConfigDataType | InvalidAppConfigInputDataType | InvalidEmailDataType | InvalidHistoryTimestampDataType | InvalidIndexerInputDataType | InvalidLicenseToActivateDataType | InvalidLicenseUpdateParamsDataType | InvalidOauthCallbackCodeDataType | InvalidOauthResponseDataType | InvalidPasswordLengthDataType | InvalidRuntimeConfigTypeDataType | InvalidSearchProviderRequestDataType | MemberNotFoundInSpaceDataType | MentionUserDocAccessDeniedDataType | MissingOauthQueryParameterDataType | NoCopilotProviderAvailableDataType | NoMoreSeatDataType | NotInSpaceDataType | QueryTooLongDataType | RuntimeConfigNotFoundDataType | SameSubscriptionRecurringDataType | SpaceAccessDeniedDataType | SpaceNotFoundDataType | SpaceOwnerNotFoundDataType | SpaceShouldHaveOnlyOneOwnerDataType | SubscriptionAlreadyExistsDataType | SubscriptionNotExistsDataType | SubscriptionPlanNotFoundDataType | UnknownOauthProviderDataType | UnsupportedClientVersionDataType | UnsupportedSubscriptionPlanDataType | ValidationErrorDataType | VersionRejectedDataType | WorkspacePermissionNotFoundDataType | WrongSignInCredentialsDataType
|
||||
|
||||
enum ErrorNames {
|
||||
ACCESS_DENIED
|
||||
@@ -644,6 +651,7 @@ enum ErrorNames {
|
||||
COPILOT_EMBEDDING_UNAVAILABLE
|
||||
COPILOT_FAILED_TO_ADD_WORKSPACE_FILE_EMBEDDING
|
||||
COPILOT_FAILED_TO_CREATE_MESSAGE
|
||||
COPILOT_FAILED_TO_GENERATE_EMBEDDING
|
||||
COPILOT_FAILED_TO_GENERATE_TEXT
|
||||
COPILOT_FAILED_TO_MATCH_CONTEXT
|
||||
COPILOT_FAILED_TO_MATCH_GLOBAL_CONTEXT
|
||||
@@ -1290,6 +1298,12 @@ type Mutation {
|
||||
setBlob(blob: Upload!, workspaceId: String!): String!
|
||||
submitAudioTranscription(blob: Upload, blobId: String!, blobs: [Upload!], workspaceId: String!): TranscriptionResultType
|
||||
|
||||
"""Trigger cleanup of trashed doc embeddings"""
|
||||
triggerCleanupTrashedDocEmbeddings: Boolean!
|
||||
|
||||
"""Trigger generate missing titles cron job"""
|
||||
triggerGenerateTitleCron: Boolean!
|
||||
|
||||
"""update app configuration"""
|
||||
updateAppConfig(updates: [UpdateAppConfigInput!]!): JSONObject!
|
||||
|
||||
@@ -1335,6 +1349,10 @@ type Mutation {
|
||||
verifyEmail(token: String!): Boolean!
|
||||
}
|
||||
|
||||
type NoCopilotProviderAvailableDataType {
|
||||
modelId: String!
|
||||
}
|
||||
|
||||
type NoMoreSeatDataType {
|
||||
spaceId: String!
|
||||
}
|
||||
@@ -1507,6 +1525,9 @@ type PublicUserType {
|
||||
type Query {
|
||||
"""get the whole app configuration"""
|
||||
appConfig: JSONObject!
|
||||
|
||||
"""Apply updates to a doc using LLM and return the merged markdown."""
|
||||
applyDocUpdates(docId: String!, op: String!, updates: String!, workspaceId: String!): String!
|
||||
collectAllBlobSizes: WorkspaceBlobSizes! @deprecated(reason: "use `user.quotaUsage` instead")
|
||||
|
||||
"""Get current user"""
|
||||
|
||||
@@ -82,6 +82,10 @@ export type RequestOptions<Q extends GraphQLQuery> = QueryVariablesOption<Q> & {
|
||||
* @default 15000
|
||||
*/
|
||||
timeout?: number;
|
||||
/**
|
||||
* Abort signal
|
||||
*/
|
||||
signal?: AbortSignal;
|
||||
};
|
||||
|
||||
export type QueryOptions<Q extends GraphQLQuery> = RequestOptions<Q> & {
|
||||
@@ -207,6 +211,7 @@ export const gqlFetcherFactory = (
|
||||
headers,
|
||||
body: isFormData ? body : JSON.stringify(body),
|
||||
timeout: options.timeout,
|
||||
signal: options.signal,
|
||||
})
|
||||
).then(async res => {
|
||||
if (res.headers.get('content-type')?.startsWith('application/json')) {
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
query applyDocUpdates($workspaceId: String!, $docId: String!, $op: String!, $updates: String!) {
|
||||
applyDocUpdates(workspaceId: $workspaceId, docId: $docId, op: $op, updates: $updates)
|
||||
}
|
||||
@@ -3,15 +3,17 @@
|
||||
query getCopilotRecentSessions(
|
||||
$workspaceId: String!
|
||||
$limit: Int = 10
|
||||
$offset: Int = 0
|
||||
) {
|
||||
currentUser {
|
||||
copilot(workspaceId: $workspaceId) {
|
||||
chats(
|
||||
pagination: { first: $limit }
|
||||
pagination: { first: $limit, offset: $offset }
|
||||
options: {
|
||||
action: false
|
||||
fork: false
|
||||
sessionOrder: desc
|
||||
withMessages: true
|
||||
withMessages: false
|
||||
}
|
||||
) {
|
||||
...PaginatedCopilotChats
|
||||
|
||||
@@ -5,6 +5,8 @@ query getWorkspacePageById($workspaceId: String!, $pageId: String!) {
|
||||
mode
|
||||
defaultRole
|
||||
public
|
||||
title
|
||||
summary
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -555,6 +555,19 @@ export const uploadCommentAttachmentMutation = {
|
||||
file: true,
|
||||
};
|
||||
|
||||
export const applyDocUpdatesQuery = {
|
||||
id: 'applyDocUpdatesQuery' as const,
|
||||
op: 'applyDocUpdates',
|
||||
query: `query applyDocUpdates($workspaceId: String!, $docId: String!, $op: String!, $updates: String!) {
|
||||
applyDocUpdates(
|
||||
workspaceId: $workspaceId
|
||||
docId: $docId
|
||||
op: $op
|
||||
updates: $updates
|
||||
)
|
||||
}`,
|
||||
};
|
||||
|
||||
export const addContextCategoryMutation = {
|
||||
id: 'addContextCategoryMutation' as const,
|
||||
op: 'addContextCategory',
|
||||
@@ -1068,12 +1081,12 @@ ${paginatedCopilotChatsFragment}`,
|
||||
export const getCopilotRecentSessionsQuery = {
|
||||
id: 'getCopilotRecentSessionsQuery' as const,
|
||||
op: 'getCopilotRecentSessions',
|
||||
query: `query getCopilotRecentSessions($workspaceId: String!, $limit: Int = 10) {
|
||||
query: `query getCopilotRecentSessions($workspaceId: String!, $limit: Int = 10, $offset: Int = 0) {
|
||||
currentUser {
|
||||
copilot(workspaceId: $workspaceId) {
|
||||
chats(
|
||||
pagination: {first: $limit}
|
||||
options: {fork: false, sessionOrder: desc, withMessages: true}
|
||||
pagination: {first: $limit, offset: $offset}
|
||||
options: {action: false, fork: false, sessionOrder: desc, withMessages: false}
|
||||
) {
|
||||
...PaginatedCopilotChats
|
||||
}
|
||||
@@ -1584,6 +1597,8 @@ export const getWorkspacePageByIdQuery = {
|
||||
mode
|
||||
defaultRole
|
||||
public
|
||||
title
|
||||
summary
|
||||
}
|
||||
}
|
||||
}`,
|
||||
|
||||
@@ -375,6 +375,12 @@ export interface CopilotFailedToAddWorkspaceFileEmbeddingDataType {
|
||||
message: Scalars['String']['output'];
|
||||
}
|
||||
|
||||
export interface CopilotFailedToGenerateEmbeddingDataType {
|
||||
__typename?: 'CopilotFailedToGenerateEmbeddingDataType';
|
||||
message: Scalars['String']['output'];
|
||||
provider: Scalars['String']['output'];
|
||||
}
|
||||
|
||||
export interface CopilotFailedToMatchContextDataType {
|
||||
__typename?: 'CopilotFailedToMatchContextDataType';
|
||||
content: Scalars['String']['output'];
|
||||
@@ -563,6 +569,7 @@ export interface CopilotWorkspaceIgnoredDocTypeEdge {
|
||||
|
||||
export interface CreateChatMessageInput {
|
||||
attachments?: InputMaybe<Array<Scalars['String']['input']>>;
|
||||
blob?: InputMaybe<Scalars['Upload']['input']>;
|
||||
blobs?: InputMaybe<Array<Scalars['Upload']['input']>>;
|
||||
content?: InputMaybe<Scalars['String']['input']>;
|
||||
params?: InputMaybe<Scalars['JSON']['input']>;
|
||||
@@ -703,6 +710,7 @@ export interface DocType {
|
||||
mode: PublicDocMode;
|
||||
permissions: DocPermissions;
|
||||
public: Scalars['Boolean']['output'];
|
||||
summary: Maybe<Scalars['String']['output']>;
|
||||
title: Maybe<Scalars['String']['output']>;
|
||||
updatedAt: Maybe<Scalars['DateTime']['output']>;
|
||||
workspaceId: Scalars['String']['output'];
|
||||
@@ -736,6 +744,7 @@ export type ErrorDataUnion =
|
||||
| CopilotContextFileNotSupportedDataType
|
||||
| CopilotDocNotFoundDataType
|
||||
| CopilotFailedToAddWorkspaceFileEmbeddingDataType
|
||||
| CopilotFailedToGenerateEmbeddingDataType
|
||||
| CopilotFailedToMatchContextDataType
|
||||
| CopilotFailedToMatchGlobalContextDataType
|
||||
| CopilotFailedToModifyContextDataType
|
||||
@@ -768,6 +777,7 @@ export type ErrorDataUnion =
|
||||
| MemberNotFoundInSpaceDataType
|
||||
| MentionUserDocAccessDeniedDataType
|
||||
| MissingOauthQueryParameterDataType
|
||||
| NoCopilotProviderAvailableDataType
|
||||
| NoMoreSeatDataType
|
||||
| NotInSpaceDataType
|
||||
| QueryTooLongDataType
|
||||
@@ -815,6 +825,7 @@ export enum ErrorNames {
|
||||
COPILOT_EMBEDDING_UNAVAILABLE = 'COPILOT_EMBEDDING_UNAVAILABLE',
|
||||
COPILOT_FAILED_TO_ADD_WORKSPACE_FILE_EMBEDDING = 'COPILOT_FAILED_TO_ADD_WORKSPACE_FILE_EMBEDDING',
|
||||
COPILOT_FAILED_TO_CREATE_MESSAGE = 'COPILOT_FAILED_TO_CREATE_MESSAGE',
|
||||
COPILOT_FAILED_TO_GENERATE_EMBEDDING = 'COPILOT_FAILED_TO_GENERATE_EMBEDDING',
|
||||
COPILOT_FAILED_TO_GENERATE_TEXT = 'COPILOT_FAILED_TO_GENERATE_TEXT',
|
||||
COPILOT_FAILED_TO_MATCH_CONTEXT = 'COPILOT_FAILED_TO_MATCH_CONTEXT',
|
||||
COPILOT_FAILED_TO_MATCH_GLOBAL_CONTEXT = 'COPILOT_FAILED_TO_MATCH_GLOBAL_CONTEXT',
|
||||
@@ -1430,6 +1441,10 @@ export interface Mutation {
|
||||
sendVerifyEmail: Scalars['Boolean']['output'];
|
||||
setBlob: Scalars['String']['output'];
|
||||
submitAudioTranscription: Maybe<TranscriptionResultType>;
|
||||
/** Trigger cleanup of trashed doc embeddings */
|
||||
triggerCleanupTrashedDocEmbeddings: Scalars['Boolean']['output'];
|
||||
/** Trigger generate missing titles cron job */
|
||||
triggerGenerateTitleCron: Scalars['Boolean']['output'];
|
||||
/** update app configuration */
|
||||
updateAppConfig: Scalars['JSONObject']['output'];
|
||||
/** Update a comment content */
|
||||
@@ -1880,6 +1895,11 @@ export interface MutationVerifyEmailArgs {
|
||||
token: Scalars['String']['input'];
|
||||
}
|
||||
|
||||
export interface NoCopilotProviderAvailableDataType {
|
||||
__typename?: 'NoCopilotProviderAvailableDataType';
|
||||
modelId: Scalars['String']['output'];
|
||||
}
|
||||
|
||||
export interface NoMoreSeatDataType {
|
||||
__typename?: 'NoMoreSeatDataType';
|
||||
spaceId: Scalars['String']['output'];
|
||||
@@ -2058,6 +2078,8 @@ export interface Query {
|
||||
__typename?: 'Query';
|
||||
/** get the whole app configuration */
|
||||
appConfig: Scalars['JSONObject']['output'];
|
||||
/** Apply updates to a doc using LLM and return the merged markdown. */
|
||||
applyDocUpdates: Scalars['String']['output'];
|
||||
/** @deprecated use `user.quotaUsage` instead */
|
||||
collectAllBlobSizes: WorkspaceBlobSizes;
|
||||
/** Get current user */
|
||||
@@ -2105,6 +2127,13 @@ export interface Query {
|
||||
workspaces: Array<WorkspaceType>;
|
||||
}
|
||||
|
||||
export interface QueryApplyDocUpdatesArgs {
|
||||
docId: Scalars['String']['input'];
|
||||
op: Scalars['String']['input'];
|
||||
updates: Scalars['String']['input'];
|
||||
workspaceId: Scalars['String']['input'];
|
||||
}
|
||||
|
||||
export interface QueryErrorArgs {
|
||||
name: ErrorNames;
|
||||
}
|
||||
@@ -3494,6 +3523,18 @@ export type UploadCommentAttachmentMutation = {
|
||||
uploadCommentAttachment: string;
|
||||
};
|
||||
|
||||
export type ApplyDocUpdatesQueryVariables = Exact<{
|
||||
workspaceId: Scalars['String']['input'];
|
||||
docId: Scalars['String']['input'];
|
||||
op: Scalars['String']['input'];
|
||||
updates: Scalars['String']['input'];
|
||||
}>;
|
||||
|
||||
export type ApplyDocUpdatesQuery = {
|
||||
__typename?: 'Query';
|
||||
applyDocUpdates: string;
|
||||
};
|
||||
|
||||
export type AddContextCategoryMutationVariables = Exact<{
|
||||
options: AddContextCategoryInput;
|
||||
}>;
|
||||
@@ -4350,6 +4391,7 @@ export type GetCopilotSessionQuery = {
|
||||
export type GetCopilotRecentSessionsQueryVariables = Exact<{
|
||||
workspaceId: Scalars['String']['input'];
|
||||
limit?: InputMaybe<Scalars['Int']['input']>;
|
||||
offset?: InputMaybe<Scalars['Int']['input']>;
|
||||
}>;
|
||||
|
||||
export type GetCopilotRecentSessionsQuery = {
|
||||
@@ -5147,6 +5189,8 @@ export type GetWorkspacePageByIdQuery = {
|
||||
mode: PublicDocMode;
|
||||
defaultRole: DocRole;
|
||||
public: boolean;
|
||||
title: string | null;
|
||||
summary: string | null;
|
||||
};
|
||||
};
|
||||
};
|
||||
@@ -6130,6 +6174,11 @@ export type Queries =
|
||||
variables: ListCommentsQueryVariables;
|
||||
response: ListCommentsQuery;
|
||||
}
|
||||
| {
|
||||
name: 'applyDocUpdatesQuery';
|
||||
variables: ApplyDocUpdatesQueryVariables;
|
||||
response: ApplyDocUpdatesQuery;
|
||||
}
|
||||
| {
|
||||
name: 'listContextObjectQuery';
|
||||
variables: ListContextObjectQueryVariables;
|
||||
|
||||
@@ -58,74 +58,45 @@ exports[`should parse page doc work 1`] = `
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# You own your data, with no compromises
|
||||
|
||||
|
||||
## Local-first & Real-time collaborative
|
||||
|
||||
|
||||
We love the idea proposed by Ink & Switch in the famous article about you owning your data, despite the cloud. Furthermore, AFFiNE is the first all-in-one workspace that keeps your data ownership with no compromises on real-time collaboration and editing experience.
|
||||
|
||||
|
||||
AFFiNE is a local-first application upon CRDTs with real-time collaboration support. Your data is always stored locally while multiple nodes remain synced in real-time.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
### Blocks that assemble your next docs, tasks kanban or whiteboard
|
||||
|
||||
|
||||
There is a large overlap of their atomic "building blocks" between these apps. They are neither open source nor have a plugin system like VS Code for contributors to customize. We want to have something that contains all the features we love and goes one step further.
|
||||
|
||||
|
||||
We are building AFFiNE to be a fundamental open source platform that contains all the building blocks for docs, task management and visual collaboration, hoping you can shape your next workflow with us that can make your life better and also connect others, too.
|
||||
|
||||
|
||||
If you want to learn more about the product design of AFFiNE, here goes the concepts:
|
||||
|
||||
|
||||
To Shape, not to adapt. AFFiNE is built for individuals & teams who care about their data, who refuse vendor lock-in, and who want to have control over their essential tools.
|
||||
|
||||
|
||||
## A true canvas for blocks in any form
|
||||
|
||||
|
||||
[Many editor apps](http://notion.so) claimed to be a canvas for productivity. Since _the Mother of All Demos,_ Douglas Engelbart, a creative and programable digital workspace has been a pursuit and an ultimate mission for generations of tool makers.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
"We shape our tools and thereafter our tools shape us”. A lot of pioneers have inspired us a long the way, e.g.:
|
||||
|
||||
|
||||
* Quip & Notion with their great concept of "everything is a block"
|
||||
|
||||
|
||||
* Trello with their Kanban
|
||||
|
||||
|
||||
* Airtable & Miro with their no-code programable datasheets
|
||||
|
||||
|
||||
* Miro & Whimiscal with their edgeless visual whiteboard
|
||||
|
||||
|
||||
* Remnote & Capacities with their object-based tag system
|
||||
|
||||
|
||||
For more details, please refer to our [RoadMap](https://docs.affine.pro/docs/core-concepts/roadmap)
|
||||
|
||||
|
||||
## Self Host
|
||||
|
||||
|
||||
Self host AFFiNE
|
||||
|
||||
|
||||
||Title|Tag|
|
||||
|---|---|---|
|
||||
|Affine Development|Affine Development|<span data-affine-option data-value="AxSe-53xjX" data-option-color="var(--affine-tag-pink)">AFFiNE</span>|
|
||||
@@ -136,16 +107,12 @@ Self host AFFiNE
|
||||
|Miro & Whimiscal with their edgeless visual whiteboard|Miro & Whimiscal with their edgeless visual whiteboard|<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>|
|
||||
|Remnote & Capacities with their object-based tag system|Remnote & Capacities with their object-based tag system||
|
||||
|
||||
|
||||
## Affine Development
|
||||
|
||||
|
||||
For developer or installation guides, please go to [AFFiNE Development](https://docs.affine.pro/docs/development/quick-start)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
",
|
||||
"parsedBlock": {
|
||||
"children": [
|
||||
@@ -322,7 +289,6 @@ For developer or installation guides, please go to [AFFiNE Development](https://
|
||||
{
|
||||
"children": [],
|
||||
"content": "* Quip & Notion with their great concept of "everything is a block"
|
||||
|
||||
",
|
||||
"flavour": "affine:list",
|
||||
"id": "xFrrdiP3-V",
|
||||
@@ -331,7 +297,6 @@ For developer or installation guides, please go to [AFFiNE Development](https://
|
||||
{
|
||||
"children": [],
|
||||
"content": "* Trello with their Kanban
|
||||
|
||||
",
|
||||
"flavour": "affine:list",
|
||||
"id": "Tp9xyN4Okl",
|
||||
@@ -340,7 +305,6 @@ For developer or installation guides, please go to [AFFiNE Development](https://
|
||||
{
|
||||
"children": [],
|
||||
"content": "* Airtable & Miro with their no-code programable datasheets
|
||||
|
||||
",
|
||||
"flavour": "affine:list",
|
||||
"id": "K_4hUzKZFQ",
|
||||
@@ -349,7 +313,6 @@ For developer or installation guides, please go to [AFFiNE Development](https://
|
||||
{
|
||||
"children": [],
|
||||
"content": "* Miro & Whimiscal with their edgeless visual whiteboard
|
||||
|
||||
",
|
||||
"flavour": "affine:list",
|
||||
"id": "QwMzON2s7x",
|
||||
@@ -358,7 +321,6 @@ For developer or installation guides, please go to [AFFiNE Development](https://
|
||||
{
|
||||
"children": [],
|
||||
"content": "* Remnote & Capacities with their object-based tag system
|
||||
|
||||
",
|
||||
"flavour": "affine:list",
|
||||
"id": "FFVmit6u1T",
|
||||
@@ -427,77 +389,63 @@ For developer or installation guides, please go to [AFFiNE Development](https://
|
||||
"Tag": "<span data-affine-option data-value="AxSe-53xjX" data-option-color="var(--affine-tag-pink)">AFFiNE</span>",
|
||||
"Title": "Affine Development
|
||||
|
||||
|
||||
",
|
||||
"undefined": "Affine Development
|
||||
|
||||
|
||||
",
|
||||
},
|
||||
{
|
||||
"Tag": "<span data-affine-option data-value="0jh9gNw4Yl" data-option-color="var(--affine-tag-orange)">Developers</span>",
|
||||
"Title": "For developers or installations guides, please go to AFFiNE Doc
|
||||
|
||||
|
||||
",
|
||||
"undefined": "For developers or installations guides, please go to AFFiNE Doc
|
||||
|
||||
|
||||
",
|
||||
},
|
||||
{
|
||||
"Tag": "<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>",
|
||||
"Title": "Quip & Notion with their great concept of "everything is a block"
|
||||
|
||||
|
||||
",
|
||||
"undefined": "Quip & Notion with their great concept of "everything is a block"
|
||||
|
||||
|
||||
",
|
||||
},
|
||||
{
|
||||
"Tag": "<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>",
|
||||
"Title": "Trello with their Kanban
|
||||
|
||||
|
||||
",
|
||||
"undefined": "Trello with their Kanban
|
||||
|
||||
|
||||
",
|
||||
},
|
||||
{
|
||||
"Tag": "<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>",
|
||||
"Title": "Airtable & Miro with their no-code programable datasheets
|
||||
|
||||
|
||||
",
|
||||
"undefined": "Airtable & Miro with their no-code programable datasheets
|
||||
|
||||
|
||||
",
|
||||
},
|
||||
{
|
||||
"Tag": "<span data-affine-option data-value="HgHsKOUINZ" data-option-color="var(--affine-tag-blue)">Reference</span>",
|
||||
"Title": "Miro & Whimiscal with their edgeless visual whiteboard
|
||||
|
||||
|
||||
",
|
||||
"undefined": "Miro & Whimiscal with their edgeless visual whiteboard
|
||||
|
||||
|
||||
",
|
||||
},
|
||||
{
|
||||
"Tag": "",
|
||||
"Title": "Remnote & Capacities with their object-based tag system
|
||||
|
||||
|
||||
",
|
||||
"undefined": "Remnote & Capacities with their object-based tag system
|
||||
|
||||
|
||||
",
|
||||
},
|
||||
],
|
||||
@@ -559,113 +507,80 @@ exports[`should parse page doc work with ai editable 1`] = `
|
||||
"<!-- block_id=FoPQcAyV_m flavour=affine:paragraph -->
|
||||
AFFiNE is an open source all in one workspace, an operating system for all the building blocks of your team wiki, knowledge management and digital assets and a better alternative to Notion and Miro.
|
||||
|
||||
|
||||
<!-- block_id=oz48nn_zp8 flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=g8a-D9-jXS flavour=affine:paragraph -->
|
||||
# You own your data, with no compromises
|
||||
|
||||
|
||||
<!-- block_id=J8lHN1GR_5 flavour=affine:paragraph -->
|
||||
## Local-first & Real-time collaborative
|
||||
|
||||
|
||||
<!-- block_id=xCuWdM0VLz flavour=affine:paragraph -->
|
||||
We love the idea proposed by Ink & Switch in the famous article about you owning your data, despite the cloud. Furthermore, AFFiNE is the first all-in-one workspace that keeps your data ownership with no compromises on real-time collaboration and editing experience.
|
||||
|
||||
|
||||
<!-- block_id=zElMi0tViK flavour=affine:paragraph -->
|
||||
AFFiNE is a local-first application upon CRDTs with real-time collaboration support. Your data is always stored locally while multiple nodes remain synced in real-time.
|
||||
|
||||
|
||||
<!-- block_id=Z4rK0OF9Wk flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=DQ0Ryb-SpW flavour=affine:paragraph -->
|
||||
### Blocks that assemble your next docs, tasks kanban or whiteboard
|
||||
|
||||
|
||||
<!-- block_id=HAZC3URZp_ flavour=affine:paragraph -->
|
||||
There is a large overlap of their atomic "building blocks" between these apps. They are neither open source nor have a plugin system like VS Code for contributors to customize. We want to have something that contains all the features we love and goes one step further.
|
||||
|
||||
|
||||
<!-- block_id=0H87ypiuv8 flavour=affine:paragraph -->
|
||||
We are building AFFiNE to be a fundamental open source platform that contains all the building blocks for docs, task management and visual collaboration, hoping you can shape your next workflow with us that can make your life better and also connect others, too.
|
||||
|
||||
|
||||
<!-- block_id=Sp4G1KD0Wn flavour=affine:paragraph -->
|
||||
If you want to learn more about the product design of AFFiNE, here goes the concepts:
|
||||
|
||||
|
||||
<!-- block_id=RsUhDuEqXa flavour=affine:paragraph -->
|
||||
To Shape, not to adapt. AFFiNE is built for individuals & teams who care about their data, who refuse vendor lock-in, and who want to have control over their essential tools.
|
||||
|
||||
|
||||
<!-- block_id=Z2HibKzAr- flavour=affine:paragraph -->
|
||||
## A true canvas for blocks in any form
|
||||
|
||||
|
||||
<!-- block_id=UwvWddamzM flavour=affine:paragraph -->
|
||||
[Many editor apps](http://notion.so) claimed to be a canvas for productivity. Since _the Mother of All Demos,_ Douglas Engelbart, a creative and programable digital workspace has been a pursuit and an ultimate mission for generations of tool makers.
|
||||
|
||||
|
||||
<!-- block_id=g9xKUjhJj1 flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=wDTn4YJ4pm flavour=affine:paragraph -->
|
||||
"We shape our tools and thereafter our tools shape us”. A lot of pioneers have inspired us a long the way, e.g.:
|
||||
|
||||
|
||||
<!-- block_id=xFrrdiP3-V flavour=affine:list -->
|
||||
* Quip & Notion with their great concept of "everything is a block"
|
||||
|
||||
|
||||
<!-- block_id=Tp9xyN4Okl flavour=affine:list -->
|
||||
* Trello with their Kanban
|
||||
|
||||
|
||||
<!-- block_id=K_4hUzKZFQ flavour=affine:list -->
|
||||
* Airtable & Miro with their no-code programable datasheets
|
||||
|
||||
|
||||
<!-- block_id=QwMzON2s7x flavour=affine:list -->
|
||||
* Miro & Whimiscal with their edgeless visual whiteboard
|
||||
|
||||
|
||||
<!-- block_id=FFVmit6u1T flavour=affine:list -->
|
||||
* Remnote & Capacities with their object-based tag system
|
||||
|
||||
|
||||
<!-- block_id=YqnG5O6AE6 flavour=affine:paragraph -->
|
||||
For more details, please refer to our [RoadMap](https://docs.affine.pro/docs/core-concepts/roadmap)
|
||||
|
||||
|
||||
<!-- block_id=sbDTmZMZcq flavour=affine:paragraph -->
|
||||
## Self Host
|
||||
|
||||
|
||||
<!-- block_id=QVvitesfbj flavour=affine:paragraph -->
|
||||
Self host AFFiNE
|
||||
|
||||
|
||||
<!-- block_id=U_GoHFD9At flavour=affine:database placeholder -->
|
||||
|
||||
<!-- block_id=NyHXrMX3R1 flavour=affine:paragraph -->
|
||||
## Affine Development
|
||||
|
||||
|
||||
<!-- block_id=9-K49otbCv flavour=affine:paragraph -->
|
||||
For developer or installation guides, please go to [AFFiNE Development](https://docs.affine.pro/docs/development/quick-start)
|
||||
|
||||
|
||||
<!-- block_id=faFteK9eG- flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
"
|
||||
`;
|
||||
|
||||
@@ -673,122 +588,74 @@ exports[`should parse page full doc work with ai editable 1`] = `
|
||||
"<!-- block_id=T4qSXc13wz flavour=affine:paragraph -->
|
||||
# H1 text
|
||||
|
||||
|
||||
<!-- block_id=F5eByK8Fx_ flavour=affine:paragraph -->
|
||||
List all flavours in one document.
|
||||
|
||||
|
||||
<!-- block_id=6_-Ta2Hpsg flavour=affine:paragraph -->
|
||||
## H2 ~ H6
|
||||
|
||||
|
||||
<!-- block_id=QLH8pCeJwr flavour=affine:paragraph -->
|
||||
### H3
|
||||
|
||||
|
||||
<!-- block_id=eRseB5ilzP flavour=affine:paragraph -->
|
||||
#### H4 with emoji 😄
|
||||
|
||||
|
||||
<!-- block_id=xSEIo9I5jQ flavour=affine:paragraph -->
|
||||
##### H5
|
||||
|
||||
|
||||
<!-- block_id=h4Fozi-Mvv flavour=affine:paragraph -->
|
||||
###### H6
|
||||
|
||||
|
||||
<!-- block_id=U-Hd9O6FEZ flavour=affine:paragraph -->
|
||||
max is H6
|
||||
|
||||
|
||||
<!-- block_id=z2aCxUDpOc flavour=affine:paragraph -->
|
||||
## List
|
||||
|
||||
|
||||
<!-- block_id=z5Zw7lMlD7 flavour=affine:list -->
|
||||
* item 1
|
||||
|
||||
|
||||
<!-- block_id=Opmt3x2Ao0 flavour=affine:list -->
|
||||
* item 2
|
||||
|
||||
|
||||
* sub item 1
|
||||
|
||||
|
||||
* sub item 2
|
||||
|
||||
|
||||
* super sub item 1
|
||||
|
||||
|
||||
* sub item 3
|
||||
|
||||
|
||||
* sub item 1
|
||||
* sub item 2
|
||||
* super sub item 1
|
||||
* sub item 3
|
||||
<!-- block_id=_EF3g4194w flavour=affine:list -->
|
||||
* item 3
|
||||
|
||||
|
||||
<!-- block_id=5u-T48lLVF flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=7urxrvhr-p flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=U-96XKGGz7 flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=hOvvRmDGqN flavour=affine:paragraph -->
|
||||
sort list
|
||||
|
||||
|
||||
<!-- block_id=hcqkMyvKnx flavour=affine:list -->
|
||||
1. item 1
|
||||
|
||||
|
||||
<!-- block_id=xUsDktnmuD flavour=affine:list -->
|
||||
1. item 2
|
||||
|
||||
|
||||
<!-- block_id=xa5tsLHHJN flavour=affine:list -->
|
||||
1. item 3
|
||||
|
||||
|
||||
1. sub item 1
|
||||
|
||||
|
||||
1. sub item 2
|
||||
|
||||
|
||||
1. super item 1
|
||||
|
||||
|
||||
1. super item 2
|
||||
|
||||
|
||||
1. sub item 3
|
||||
|
||||
|
||||
1. sub item 1
|
||||
1. sub item 2
|
||||
1. super item 1
|
||||
1. super item 2
|
||||
1. sub item 3
|
||||
<!-- block_id=BX05mQdxJ0 flavour=affine:list -->
|
||||
1. item 4
|
||||
|
||||
|
||||
<!-- block_id=VYzM3O17th flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=epKYpKt5vo flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=5Ghem19uGh flavour=affine:paragraph -->
|
||||
Table
|
||||
|
||||
|
||||
<!-- block_id=OXvH-s1Jx4 flavour=affine:table -->
|
||||
|c1|c2|c3|c4|
|
||||
|---|---|---|---|
|
||||
@@ -796,176 +663,129 @@ Table
|
||||
||||v4|
|
||||
||v6||v5|
|
||||
|
||||
|
||||
<!-- block_id=j2F2hQ3zy9 flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=jLCRD2G_BC flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=794ZoPeBJM flavour=affine:paragraph -->
|
||||
Database
|
||||
|
||||
|
||||
<!-- block_id=xQ7rA57Qxz flavour=affine:database placeholder -->
|
||||
|
||||
<!-- block_id=RbMSmluZYK flavour=affine:paragraph -->
|
||||
Code
|
||||
|
||||
|
||||
<!-- block_id=cJ6CMeUWMg flavour=affine:code -->
|
||||
\`\`\`javascript
|
||||
console.log('hello world');
|
||||
\`\`\`
|
||||
|
||||
|
||||
<!-- block_id=y1xVwkxlDm flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=BKy3zmm8SE flavour=affine:paragraph -->
|
||||
Image
|
||||
|
||||
|
||||
<!-- block_id=WFftQ-qXzr flavour=affine:image -->
|
||||
|
||||

|
||||
|
||||
|
||||
<!-- block_id=F-RKpfxL1z flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=G3LSqjKv8M flavour=affine:paragraph -->
|
||||
File
|
||||
|
||||
|
||||
<!-- block_id=pO8JCsiK4z flavour=affine:attachment -->
|
||||
|
||||

|
||||
|
||||
|
||||
<!-- block_id=dTKFqQhJuA flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=nwld7RMYvp flavour=affine:paragraph -->
|
||||
> foo bar quote text
|
||||
|
||||
|
||||
<!-- block_id=MwBD3BhRnf flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=pakOSAm6EU flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=95-NxAyFuo flavour=affine:divider -->
|
||||
|
||||
---
|
||||
|
||||
|
||||
<!-- block_id=r9EllTNiN1 flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=OpxZ1kYM40 flavour=affine:paragraph -->
|
||||
TeX
|
||||
|
||||
|
||||
<!-- block_id=gjFqI97IRc flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=KXBZ1_Pfdw flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=VHj5gMaGa7 flavour=affine:paragraph -->
|
||||
2025-06-18 13:15
|
||||
|
||||
|
||||
<!-- block_id=JwaUwzuQEH flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=_zu2kl56FY flavour=affine:database placeholder -->
|
||||
|
||||
<!-- block_id=Kcbp6BLA-y flavour=affine:paragraph -->
|
||||
Mind Map
|
||||
|
||||
|
||||
<!-- block_id=R_g1tzqzAU flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=C8G82uLCz1 flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=J6gfR8YMGy flavour=affine:paragraph -->
|
||||
A Link
|
||||
|
||||
|
||||
<!-- block_id=yHky0s_H1v flavour=affine:embed-linked-doc -->
|
||||
|
||||
[null](doc://FmHFPAPzp51JjFP89aZ-b)
|
||||
|
||||
|
||||
<!-- block_id=P7w3ka4Amo flavour=affine:paragraph -->
|
||||
Todo List
|
||||
|
||||
|
||||
<!-- block_id=WbeCXu6fcA flavour=affine:list -->
|
||||
- [ ] abc
|
||||
|
||||
|
||||
<!-- block_id=X_F5fw-MEn flavour=affine:list -->
|
||||
- [ ] edf
|
||||
|
||||
|
||||
- [x] done1
|
||||
|
||||
|
||||
- [x] done1
|
||||
<!-- block_id=sdw-couBVA flavour=affine:list -->
|
||||
- [ ] end
|
||||
|
||||
|
||||
<!-- block_id=COJiWGOVJu flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=shK7TY-Q3F flavour=affine:paragraph -->
|
||||
~~delete text~~
|
||||
|
||||
|
||||
<!-- block_id=_NIj4pT_Iy flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=CaXXPfEt62 flavour=affine:paragraph -->
|
||||
**Bold text**
|
||||
|
||||
|
||||
<!-- block_id=1WFCwn1708 flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=25f19QUjQI flavour=affine:paragraph -->
|
||||
Underline
|
||||
|
||||
|
||||
<!-- block_id=GrS-y17iiw flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=dJm5C8KsEg flavour=affine:paragraph -->
|
||||
Youtube
|
||||
|
||||
|
||||
<!-- block_id=epfNja2Txk flavour=affine:embed-youtube -->
|
||||
|
||||
<iframe
|
||||
@@ -979,23 +799,18 @@ Youtube
|
||||
credentialless>
|
||||
</iframe>
|
||||
|
||||
|
||||
<!-- block_id=wNb6ZRJKMt flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
<!-- block_id=HqKjEGWF_s flavour=affine:paragraph -->
|
||||
## end
|
||||
|
||||
|
||||
<!-- block_id=FOh_TJmcF1 flavour=affine:paragraph -->
|
||||
this is end
|
||||
|
||||
|
||||
<!-- block_id=ImCJN2Xint flavour=affine:paragraph -->
|
||||
|
||||
|
||||
|
||||
"
|
||||
`;
|
||||
|
||||
|
||||
@@ -22,9 +22,10 @@ export const parseBlockToMd = (
|
||||
block.content
|
||||
.split('\n')
|
||||
.map(line => padding + line)
|
||||
.slice(0, -1)
|
||||
.join('\n') +
|
||||
'\n' +
|
||||
block.children.map(b => parseBlockToMd(b, padding + ' ')).join('')
|
||||
block.children.map(b => parseBlockToMd(b, padding + ' ')).join('')
|
||||
);
|
||||
} else {
|
||||
return block.children.map(b => parseBlockToMd(b, padding)).join('');
|
||||
@@ -109,7 +110,7 @@ export function parseBlock(
|
||||
const checked = yBlock.get('prop:checked') as boolean;
|
||||
prefix = checked ? '- [x] ' : '- [ ] ';
|
||||
}
|
||||
result.content = prefix + toMd() + '\n';
|
||||
result.content = prefix + toMd();
|
||||
break;
|
||||
}
|
||||
case 'affine:code': {
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
android:label="@string/app_name"
|
||||
android:roundIcon="@mipmap/ic_launcher_round"
|
||||
android:supportsRtl="true"
|
||||
android:usesCleartextTraffic="true"
|
||||
android:theme="@style/AppTheme">
|
||||
|
||||
<activity
|
||||
|
||||
@@ -10,6 +10,7 @@ import { Suspense } from 'react';
|
||||
import { RouterProvider } from 'react-router-dom';
|
||||
|
||||
import { setupEffects } from './effects';
|
||||
import { DesktopLanguageSync } from './language-sync';
|
||||
import { DesktopThemeSync } from './theme-sync';
|
||||
|
||||
const { frameworkProvider } = setupEffects();
|
||||
@@ -46,6 +47,7 @@ export function App() {
|
||||
<I18nProvider>
|
||||
<AffineContext store={getCurrentStore()}>
|
||||
<DesktopThemeSync />
|
||||
<DesktopLanguageSync />
|
||||
<RouterProvider
|
||||
fallbackElement={<AppContainer fallback />}
|
||||
router={router}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user