mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-08 18:43:46 +00:00
Compare commits
84 Commits
v0.23.0-be
...
v0.23.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8ec4bbb298 | ||
|
|
812c199b45 | ||
|
|
36bd8f645a | ||
|
|
7cff8091e4 | ||
|
|
de8feb98a3 | ||
|
|
fbd6e8fa97 | ||
|
|
bcf6bd1dfc | ||
|
|
8627560fd5 | ||
|
|
9a3e44c6d6 | ||
|
|
7b53641a94 | ||
|
|
3948b8eada | ||
|
|
d05bb9992c | ||
|
|
b2c09825ac | ||
|
|
65453c31c6 | ||
|
|
d9e8ce802f | ||
|
|
d5f63b9e43 | ||
|
|
ebefbeefc8 | ||
|
|
4d7d8f215f | ||
|
|
b6187718ea | ||
|
|
3ee82bd9ce | ||
|
|
3dbdb99435 | ||
|
|
0d414d914a | ||
|
|
41f338bce0 | ||
|
|
6f87c1ca50 | ||
|
|
33f6496d79 | ||
|
|
847ef00a75 | ||
|
|
93f13e9e01 | ||
|
|
a2b86bc6d2 | ||
|
|
aee7a8839e | ||
|
|
0e8ffce126 | ||
|
|
9cda655c9e | ||
|
|
15726bd522 | ||
|
|
d65a7494a4 | ||
|
|
0f74e1fa0f | ||
|
|
fef4a9eeb6 | ||
|
|
58dc53581f | ||
|
|
b23f380539 | ||
|
|
d29a97f86c | ||
|
|
0f287f9661 | ||
|
|
18f13626cc | ||
|
|
0eeea5e173 | ||
|
|
2052a34d19 | ||
|
|
b79439b01d | ||
|
|
2dacba9011 | ||
|
|
af9c455ee0 | ||
|
|
3d45c7623f | ||
|
|
e0f88451e1 | ||
|
|
aba0a3d485 | ||
|
|
8b579e3a92 | ||
|
|
d98b45ca3d | ||
|
|
fc1104cd68 | ||
|
|
46901c472c | ||
|
|
9d5c7dd1e9 | ||
|
|
f655e6e8bf | ||
|
|
46a9d0f7fe | ||
|
|
340aae6476 | ||
|
|
6b7d1e91e0 | ||
|
|
3538c78a8b | ||
|
|
7d527c7f3a | ||
|
|
ad5a122391 | ||
|
|
0f9b9789da | ||
|
|
5b027f7986 | ||
|
|
fe00293e3e | ||
|
|
385226083f | ||
|
|
38d8dde6b8 | ||
|
|
ed6fde550f | ||
|
|
11a9e67bc1 | ||
|
|
899585ba7f | ||
|
|
1fe07410c0 | ||
|
|
0f3066f7d0 | ||
|
|
c4c11da976 | ||
|
|
38537bf310 | ||
|
|
1f87cd8752 | ||
|
|
f54cb5c296 | ||
|
|
45c016af8b | ||
|
|
d4c905600b | ||
|
|
f839e5c136 | ||
|
|
39abd1bbb8 | ||
|
|
ecea7bd825 | ||
|
|
d10e5ee92f | ||
|
|
dace1d1738 | ||
|
|
ae74f4ae51 | ||
|
|
9071c5032d | ||
|
|
8236ecf486 |
6
.github/actions/setup-version/action.yml
vendored
6
.github/actions/setup-version/action.yml
vendored
@@ -4,9 +4,15 @@ inputs:
|
||||
app-version:
|
||||
description: 'App Version'
|
||||
required: true
|
||||
ios-app-version:
|
||||
description: 'iOS App Store Version (Optional, use App version if empty)'
|
||||
required: false
|
||||
type: string
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: 'Write Version'
|
||||
shell: bash
|
||||
env:
|
||||
IOS_APP_VERSION: ${{ inputs.ios-app-version }}
|
||||
run: ./scripts/set-version.sh ${{ inputs.app-version }}
|
||||
|
||||
5
.github/deployment/node/Dockerfile
vendored
5
.github/deployment/node/Dockerfile
vendored
@@ -7,7 +7,10 @@ COPY ./packages/frontend/apps/mobile/dist /app/static/mobile
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends openssl && \
|
||||
apt-get install -y --no-install-recommends openssl libjemalloc2 && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Enable jemalloc by preloading the library
|
||||
ENV LD_PRELOAD=libjemalloc.so.2
|
||||
|
||||
CMD ["node", "./dist/main.js"]
|
||||
|
||||
8
.github/workflows/release-mobile.yml
vendored
8
.github/workflows/release-mobile.yml
vendored
@@ -12,6 +12,9 @@ on:
|
||||
build-type:
|
||||
type: string
|
||||
required: true
|
||||
ios-app-version:
|
||||
type: string
|
||||
required: false
|
||||
|
||||
env:
|
||||
BUILD_TYPE: ${{ inputs.build-type }}
|
||||
@@ -78,7 +81,7 @@ jobs:
|
||||
path: packages/frontend/apps/android/dist
|
||||
|
||||
ios:
|
||||
runs-on: ${{ github.ref_name == 'canary' && 'macos-latest' || 'blaze/macos-14' }}
|
||||
runs-on: 'macos-15'
|
||||
needs:
|
||||
- build-ios-web
|
||||
steps:
|
||||
@@ -87,6 +90,7 @@ jobs:
|
||||
uses: ./.github/actions/setup-version
|
||||
with:
|
||||
app-version: ${{ inputs.app-version }}
|
||||
ios-app-version: ${{ inputs.ios-app-version }}
|
||||
- name: 'Update Code Sign Identity'
|
||||
shell: bash
|
||||
run: ./packages/frontend/apps/ios/update_code_sign_identity.sh
|
||||
@@ -106,7 +110,7 @@ jobs:
|
||||
enableScripts: false
|
||||
- uses: maxim-lobanov/setup-xcode@v1
|
||||
with:
|
||||
xcode-version: 16.2
|
||||
xcode-version: 16.4
|
||||
- name: Install Swiftformat
|
||||
run: brew install swiftformat
|
||||
- name: Cap sync
|
||||
|
||||
5
.github/workflows/release.yml
vendored
5
.github/workflows/release.yml
vendored
@@ -21,6 +21,10 @@ on:
|
||||
required: true
|
||||
type: boolean
|
||||
default: false
|
||||
ios-app-version:
|
||||
description: 'iOS App Store Version (Optional, use tag version if empty)'
|
||||
required: false
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -117,3 +121,4 @@ jobs:
|
||||
build-type: ${{ needs.prepare.outputs.BUILD_TYPE }}
|
||||
app-version: ${{ needs.prepare.outputs.APP_VERSION }}
|
||||
git-short-hash: ${{ needs.prepare.outputs.GIT_SHORT_HASH }}
|
||||
ios-app-version: ${{ inputs.ios-app-version }}
|
||||
|
||||
@@ -266,6 +266,7 @@
|
||||
"./components/toggle-button": "./src/components/toggle-button.ts",
|
||||
"./components/toggle-switch": "./src/components/toggle-switch.ts",
|
||||
"./components/toolbar": "./src/components/toolbar.ts",
|
||||
"./components/tooltip": "./src/components/tooltip.ts",
|
||||
"./components/view-dropdown-menu": "./src/components/view-dropdown-menu.ts",
|
||||
"./components/tooltip-content-with-shortcut": "./src/components/tooltip-content-with-shortcut.ts",
|
||||
"./components/resource": "./src/components/resource.ts",
|
||||
|
||||
1
blocksuite/affine/all/src/components/tooltip.ts
Normal file
1
blocksuite/affine/all/src/components/tooltip.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from '@blocksuite/affine-components/tooltip';
|
||||
@@ -73,7 +73,8 @@
|
||||
"./edgeless-line-styles-panel": "./src/edgeless-line-styles-panel/index.ts",
|
||||
"./edgeless-shape-color-picker": "./src/edgeless-shape-color-picker/index.ts",
|
||||
"./open-doc-dropdown-menu": "./src/open-doc-dropdown-menu/index.ts",
|
||||
"./slider": "./src/slider/index.ts"
|
||||
"./slider": "./src/slider/index.ts",
|
||||
"./tooltip": "./src/tooltip/index.ts"
|
||||
},
|
||||
"files": [
|
||||
"src",
|
||||
|
||||
@@ -85,6 +85,8 @@ export class MenuSubMenu extends MenuFocusable {
|
||||
.catch(err => console.error(err));
|
||||
});
|
||||
this.menu.openSubMenu(menu);
|
||||
// in case that the menu is not closed, but the component is removed,
|
||||
this.disposables.add(unsub);
|
||||
}
|
||||
|
||||
protected override render(): unknown {
|
||||
|
||||
@@ -18,6 +18,7 @@ export const LoadingIcon = ({
|
||||
viewBox="0 0 24 24"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
style="fill: none;"
|
||||
>
|
||||
<style>
|
||||
.spinner {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { effects as tooltipEffects } from '../tooltip/effect.js';
|
||||
import { EditorIconButton } from './icon-button.js';
|
||||
import {
|
||||
EditorMenuAction,
|
||||
@@ -6,7 +7,6 @@ import {
|
||||
} from './menu-button.js';
|
||||
import { EditorToolbarSeparator } from './separator.js';
|
||||
import { EditorToolbar } from './toolbar.js';
|
||||
import { Tooltip } from './tooltip.js';
|
||||
|
||||
export { EditorChevronDown } from './chevron-down.js';
|
||||
export { ToolbarMoreMenuConfigExtension } from './config.js';
|
||||
@@ -20,7 +20,6 @@ export { MenuContext } from './menu-context.js';
|
||||
export { EditorToolbarSeparator } from './separator.js';
|
||||
export { darkToolbarStyles, lightToolbarStyles } from './styles.js';
|
||||
export { EditorToolbar } from './toolbar.js';
|
||||
export { Tooltip } from './tooltip.js';
|
||||
export type {
|
||||
AdvancedMenuItem,
|
||||
FatMenuItems,
|
||||
@@ -38,11 +37,12 @@ export {
|
||||
} from './utils.js';
|
||||
|
||||
export function effects() {
|
||||
tooltipEffects();
|
||||
|
||||
customElements.define('editor-toolbar-separator', EditorToolbarSeparator);
|
||||
customElements.define('editor-toolbar', EditorToolbar);
|
||||
customElements.define('editor-icon-button', EditorIconButton);
|
||||
customElements.define('editor-menu-button', EditorMenuButton);
|
||||
customElements.define('editor-menu-content', EditorMenuContent);
|
||||
customElements.define('editor-menu-action', EditorMenuAction);
|
||||
customElements.define('affine-tooltip', Tooltip);
|
||||
}
|
||||
|
||||
7
blocksuite/affine/components/src/tooltip/effect.ts
Normal file
7
blocksuite/affine/components/src/tooltip/effect.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { Tooltip } from './tooltip.js';
|
||||
|
||||
export function effects() {
|
||||
if (!customElements.get('affine-tooltip')) {
|
||||
customElements.define('affine-tooltip', Tooltip);
|
||||
}
|
||||
}
|
||||
2
blocksuite/affine/components/src/tooltip/index.ts
Normal file
2
blocksuite/affine/components/src/tooltip/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export { effects } from './effect.js';
|
||||
export { Tooltip } from './tooltip.js';
|
||||
@@ -116,6 +116,7 @@ export class EdgelessTemplateButton extends EdgelessToolbarToolMixin(
|
||||
`;
|
||||
|
||||
private _cleanup: (() => void) | null = null;
|
||||
private _autoUpdateCleanup: (() => void) | null = null;
|
||||
|
||||
private _prevTool: ToolOptionWithType | null = null;
|
||||
|
||||
@@ -128,6 +129,11 @@ export class EdgelessTemplateButton extends EdgelessToolbarToolMixin(
|
||||
return [TemplateCard1[theme], TemplateCard2[theme], TemplateCard3[theme]];
|
||||
}
|
||||
|
||||
override connectedCallback() {
|
||||
super.connectedCallback();
|
||||
this.disposables.add(() => this._autoUpdateCleanup?.());
|
||||
}
|
||||
|
||||
private _closePanel() {
|
||||
if (this._openedPanel) {
|
||||
this._openedPanel.remove();
|
||||
@@ -175,8 +181,8 @@ export class EdgelessTemplateButton extends EdgelessToolbarToolMixin(
|
||||
|
||||
requestAnimationFrame(() => {
|
||||
const arrowEl = panel.renderRoot.querySelector('.arrow') as HTMLElement;
|
||||
|
||||
autoUpdate(this, panel, () => {
|
||||
this._autoUpdateCleanup?.();
|
||||
this._autoUpdateCleanup = autoUpdate(this, panel, () => {
|
||||
computePosition(this, panel, {
|
||||
placement: 'top',
|
||||
middleware: [offset(20), arrow({ element: arrowEl }), shift()],
|
||||
|
||||
@@ -43,10 +43,14 @@ export class InlineCommentManager extends LifeCycleWatcher {
|
||||
|
||||
this._disposables.add(provider.onCommentAdded(this._handleAddComment));
|
||||
this._disposables.add(
|
||||
provider.onCommentDeleted(this._handleDeleteAndResolve)
|
||||
provider.onCommentDeleted(id =>
|
||||
this._handleDeleteAndResolve(id, 'delete')
|
||||
)
|
||||
);
|
||||
this._disposables.add(
|
||||
provider.onCommentResolved(this._handleDeleteAndResolve)
|
||||
provider.onCommentResolved(id =>
|
||||
this._handleDeleteAndResolve(id, 'resolve')
|
||||
)
|
||||
);
|
||||
this._disposables.add(
|
||||
provider.onCommentHighlighted(this._handleHighlightComment)
|
||||
@@ -64,15 +68,16 @@ export class InlineCommentManager extends LifeCycleWatcher {
|
||||
const provider = this._provider;
|
||||
if (!provider) return;
|
||||
|
||||
const commentsInProvider = await provider.getComments('unresolved');
|
||||
const commentsInProvider = await provider.getComments('all');
|
||||
|
||||
const commentsInEditor = this.getCommentsInEditor();
|
||||
|
||||
// remove comments that are in editor but not in provider
|
||||
// which means the comment may be removed or resolved in provider side
|
||||
difference(commentsInEditor, commentsInProvider).forEach(comment => {
|
||||
this._handleDeleteAndResolve(comment);
|
||||
this.std.get(BlockElementCommentManager).handleDeleteAndResolve(comment);
|
||||
this.std
|
||||
.get(BlockElementCommentManager)
|
||||
.handleDeleteAndResolve(comment, 'delete');
|
||||
});
|
||||
}
|
||||
|
||||
@@ -162,7 +167,10 @@ export class InlineCommentManager extends LifeCycleWatcher {
|
||||
});
|
||||
};
|
||||
|
||||
private readonly _handleDeleteAndResolve = (id: CommentId) => {
|
||||
private readonly _handleDeleteAndResolve = (
|
||||
id: CommentId,
|
||||
type: 'delete' | 'resolve'
|
||||
) => {
|
||||
const commentedTexts = findCommentedTexts(this.std.store, id);
|
||||
if (commentedTexts.length === 0) return;
|
||||
|
||||
@@ -176,7 +184,7 @@ export class InlineCommentManager extends LifeCycleWatcher {
|
||||
inlineEditor?.formatText(
|
||||
selection.from,
|
||||
{
|
||||
[`comment-${id}`]: null,
|
||||
[`comment-${id}`]: type === 'delete' ? null : false,
|
||||
},
|
||||
{
|
||||
withoutTransact: true,
|
||||
|
||||
@@ -23,7 +23,10 @@ import { isEqual } from 'lodash-es';
|
||||
export class InlineComment extends WithDisposable(ShadowlessElement) {
|
||||
static override styles = css`
|
||||
inline-comment {
|
||||
display: inline-block;
|
||||
display: inline;
|
||||
}
|
||||
|
||||
inline-comment.unresolved {
|
||||
background-color: ${unsafeCSSVarV2('block/comment/highlightDefault')};
|
||||
border-bottom: 2px solid
|
||||
${unsafeCSSVarV2('block/comment/highlightUnderline')};
|
||||
@@ -41,6 +44,9 @@ export class InlineComment extends WithDisposable(ShadowlessElement) {
|
||||
})
|
||||
accessor commentIds!: string[];
|
||||
|
||||
@property({ attribute: false })
|
||||
accessor unresolved = false;
|
||||
|
||||
private _index: number = 0;
|
||||
|
||||
@consume({ context: stdContext })
|
||||
@@ -54,8 +60,10 @@ export class InlineComment extends WithDisposable(ShadowlessElement) {
|
||||
}
|
||||
|
||||
private readonly _handleClick = () => {
|
||||
this._provider?.highlightComment(this.commentIds[this._index]);
|
||||
this._index = (this._index + 1) % this.commentIds.length;
|
||||
if (this.unresolved) {
|
||||
this._provider?.highlightComment(this.commentIds[this._index]);
|
||||
this._index = (this._index + 1) % this.commentIds.length;
|
||||
}
|
||||
};
|
||||
|
||||
private readonly _handleHighlight = (id: CommentId | null) => {
|
||||
@@ -89,6 +97,13 @@ export class InlineComment extends WithDisposable(ShadowlessElement) {
|
||||
this.classList.remove('highlighted');
|
||||
}
|
||||
}
|
||||
if (_changedProperties.has('unresolved')) {
|
||||
if (this.unresolved) {
|
||||
this.classList.add('unresolved');
|
||||
} else {
|
||||
this.classList.remove('unresolved');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override render() {
|
||||
|
||||
@@ -21,19 +21,25 @@ export const CommentInlineSpecExtension =
|
||||
),
|
||||
match: delta => {
|
||||
if (!delta.attributes) return false;
|
||||
const comments = Object.entries(delta.attributes).filter(
|
||||
([key, value]) => isInlineCommendId(key) && value === true
|
||||
);
|
||||
const comments = Object.keys(delta.attributes).filter(isInlineCommendId);
|
||||
return comments.length > 0;
|
||||
},
|
||||
renderer: ({ delta, children }) =>
|
||||
html`<inline-comment .commentIds=${extractCommentIdFromDelta(delta)}
|
||||
renderer: ({ delta, children }) => {
|
||||
if (!delta.attributes) return html`${nothing}`;
|
||||
|
||||
const unresolved = Object.entries(delta.attributes).some(
|
||||
([key, value]) => isInlineCommendId(key) && value === true
|
||||
);
|
||||
return html`<inline-comment
|
||||
.unresolved=${unresolved}
|
||||
.commentIds=${extractCommentIdFromDelta(delta)}
|
||||
>${when(
|
||||
children,
|
||||
() => html`${children}`,
|
||||
() => nothing
|
||||
)}</inline-comment
|
||||
>`,
|
||||
>`;
|
||||
},
|
||||
wrapper: true,
|
||||
});
|
||||
|
||||
@@ -47,3 +53,7 @@ export const NullCommentInlineSpecExtension =
|
||||
match: () => false,
|
||||
renderer: () => html``,
|
||||
});
|
||||
|
||||
// reuse the same identifier
|
||||
NullCommentInlineSpecExtension.identifier =
|
||||
CommentInlineSpecExtension.identifier;
|
||||
|
||||
@@ -57,10 +57,12 @@ export class BlockElementCommentManager extends LifeCycleWatcher {
|
||||
|
||||
this._disposables.add(provider.onCommentAdded(this._handleAddComment));
|
||||
this._disposables.add(
|
||||
provider.onCommentDeleted(this.handleDeleteAndResolve)
|
||||
provider.onCommentDeleted(id => this.handleDeleteAndResolve(id, 'delete'))
|
||||
);
|
||||
this._disposables.add(
|
||||
provider.onCommentResolved(this.handleDeleteAndResolve)
|
||||
provider.onCommentResolved(id =>
|
||||
this.handleDeleteAndResolve(id, 'resolve')
|
||||
)
|
||||
);
|
||||
this._disposables.add(
|
||||
provider.onCommentHighlighted(this._handleHighlightComment)
|
||||
@@ -123,8 +125,7 @@ export class BlockElementCommentManager extends LifeCycleWatcher {
|
||||
const gfx = this.std.get(GfxControllerIdentifier);
|
||||
const elementsFromSurfaceSelection = selections
|
||||
.filter(s => s instanceof SurfaceSelection)
|
||||
.flatMap(({ blockId, elements }) => {
|
||||
if (blockId !== gfx.surface?.id) return [];
|
||||
.flatMap(({ elements }) => {
|
||||
return elements
|
||||
.map(id => gfx.getElementById<GfxModel>(id))
|
||||
.filter(m => m !== null);
|
||||
@@ -147,18 +148,29 @@ export class BlockElementCommentManager extends LifeCycleWatcher {
|
||||
}
|
||||
};
|
||||
|
||||
readonly handleDeleteAndResolve = (id: CommentId) => {
|
||||
readonly handleDeleteAndResolve = (
|
||||
id: CommentId,
|
||||
type: 'delete' | 'resolve'
|
||||
) => {
|
||||
const commentedBlocks = findCommentedBlocks(this.std.store, id);
|
||||
this.std.store.withoutTransact(() => {
|
||||
commentedBlocks.forEach(block => {
|
||||
delete block.props.comments[id];
|
||||
if (type === 'delete') {
|
||||
delete block.props.comments[id];
|
||||
} else {
|
||||
block.props.comments[id] = false;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const commentedElements = findCommentedElements(this.std.store, id);
|
||||
this.std.store.withoutTransact(() => {
|
||||
commentedElements.forEach(element => {
|
||||
delete element.comments[id];
|
||||
if (type === 'delete') {
|
||||
delete element.comments[id];
|
||||
} else {
|
||||
element.comments[id] = false;
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
@@ -40,7 +40,6 @@ export interface NotificationService {
|
||||
}[];
|
||||
onClose?: () => void;
|
||||
}): void;
|
||||
|
||||
/**
|
||||
* Notify with undo action, it is a helper function to notify with undo action.
|
||||
* And the notification card will be closed when undo action is triggered by shortcut key or other ways.
|
||||
@@ -55,13 +54,16 @@ export const NotificationProvider = createIdentifier<NotificationService>(
|
||||
);
|
||||
|
||||
export function NotificationExtension(
|
||||
notificationService: Omit<NotificationService, 'notifyWithUndoAction'>
|
||||
notificationService: NotificationService
|
||||
): ExtensionType {
|
||||
return {
|
||||
setup: di => {
|
||||
di.addImpl(NotificationProvider, provider => {
|
||||
return {
|
||||
...notificationService,
|
||||
notify: notificationService.notify,
|
||||
toast: notificationService.toast,
|
||||
confirm: notificationService.confirm,
|
||||
prompt: notificationService.prompt,
|
||||
notifyWithUndoAction: options => {
|
||||
notifyWithUndoActionImpl(
|
||||
provider,
|
||||
|
||||
@@ -30,9 +30,9 @@ function inlineTextStyles(
|
||||
}
|
||||
|
||||
return styleMap({
|
||||
'font-weight': props.bold ? 'bold' : 'normal',
|
||||
'font-style': props.italic ? 'italic' : 'normal',
|
||||
'text-decoration': textDecorations.length > 0 ? textDecorations : 'none',
|
||||
'font-weight': props.bold ? 'bold' : 'inherit',
|
||||
'font-style': props.italic ? 'italic' : 'inherit',
|
||||
'text-decoration': textDecorations.length > 0 ? textDecorations : 'inherit',
|
||||
...inlineCodeStyle,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -31,7 +31,7 @@ export interface BlockStdOptions {
|
||||
extensions: ExtensionType[];
|
||||
}
|
||||
|
||||
const internalExtensions = [
|
||||
export const internalExtensions = [
|
||||
ServiceManager,
|
||||
CommandManager,
|
||||
UIEventDispatcher,
|
||||
|
||||
@@ -372,3 +372,66 @@ Generated by [AVA](https://avajs.dev).
|
||||
[assistant]: Quantum computing uses quantum mechanics principles.`,
|
||||
promptName: 'Summary as title',
|
||||
}
|
||||
|
||||
## should handle copilot cron jobs correctly
|
||||
|
||||
> daily job scheduling calls
|
||||
|
||||
[
|
||||
{
|
||||
args: [
|
||||
'copilot.session.cleanupEmptySessions',
|
||||
{},
|
||||
{
|
||||
jobId: 'daily-copilot-cleanup-empty-sessions',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
args: [
|
||||
'copilot.session.generateMissingTitles',
|
||||
{},
|
||||
{
|
||||
jobId: 'daily-copilot-generate-missing-titles',
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
> cleanup empty sessions calls
|
||||
|
||||
[
|
||||
{
|
||||
args: [
|
||||
'Date',
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
> title generation calls
|
||||
|
||||
{
|
||||
jobCalls: [
|
||||
{
|
||||
args: [
|
||||
'copilot.session.generateTitle',
|
||||
{
|
||||
sessionId: 'session1',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
args: [
|
||||
'copilot.session.generateTitle',
|
||||
{
|
||||
sessionId: 'session2',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
modelCalls: [
|
||||
{
|
||||
args: [],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
Binary file not shown.
@@ -207,6 +207,7 @@ const retry = async (
|
||||
try {
|
||||
await callback(t);
|
||||
} catch (e) {
|
||||
console.error(`Error during ${action}:`, e);
|
||||
t.log(`Error during ${action}:`, e);
|
||||
throw e;
|
||||
}
|
||||
@@ -350,10 +351,10 @@ The term **“CRDT”** was first introduced by Marc Shapiro, Nuno Preguiça, Ca
|
||||
params: {
|
||||
files: [
|
||||
{
|
||||
blobId: 'euclidean_distance',
|
||||
fileName: 'euclidean_distance.rs',
|
||||
fileType: 'text/rust',
|
||||
fileContent: TestAssets.Code,
|
||||
blobId: 'todo_md',
|
||||
fileName: 'todo.md',
|
||||
fileType: 'text/markdown',
|
||||
fileContent: TestAssets.TODO,
|
||||
},
|
||||
],
|
||||
},
|
||||
@@ -369,7 +370,7 @@ The term **“CRDT”** was first introduced by Marc Shapiro, Nuno Preguiça, Ca
|
||||
.map(c => JSON.parse(c.citationJson).type)
|
||||
.filter(type => ['attachment', 'doc'].includes(type)).length ===
|
||||
0,
|
||||
'should not have citation'
|
||||
`should not have citation: ${JSON.stringify(c, null, 2)}`
|
||||
);
|
||||
});
|
||||
},
|
||||
@@ -475,6 +476,7 @@ The term **“CRDT”** was first introduced by Marc Shapiro, Nuno Preguiça, Ca
|
||||
},
|
||||
},
|
||||
],
|
||||
config: { model: 'gemini-2.5-pro' },
|
||||
verifier: (t: ExecutionContext<Tester>, result: string) => {
|
||||
t.notThrows(() => {
|
||||
TranscriptionResponseSchema.parse(JSON.parse(result));
|
||||
@@ -483,6 +485,34 @@ The term **“CRDT”** was first introduced by Marc Shapiro, Nuno Preguiça, Ca
|
||||
type: 'structured' as const,
|
||||
prefer: CopilotProviderType.Gemini,
|
||||
},
|
||||
{
|
||||
promptName: ['Conversation Summary'],
|
||||
messages: [
|
||||
{
|
||||
role: 'user' as const,
|
||||
content: '',
|
||||
params: {
|
||||
messages: [
|
||||
{ role: 'user', content: 'what is single source of truth?' },
|
||||
{ role: 'assistant', content: TestAssets.SSOT },
|
||||
],
|
||||
focus: 'technical decisions',
|
||||
length: 'comprehensive',
|
||||
},
|
||||
},
|
||||
],
|
||||
verifier: (t: ExecutionContext<Tester>, result: string) => {
|
||||
assertNotWrappedInCodeBlock(t, result);
|
||||
const cleared = result.toLowerCase();
|
||||
t.assert(
|
||||
cleared.includes('single source of truth') ||
|
||||
/single.*source/.test(cleared) ||
|
||||
cleared.includes('ssot'),
|
||||
'should include original keyword'
|
||||
);
|
||||
},
|
||||
type: 'text' as const,
|
||||
},
|
||||
{
|
||||
promptName: [
|
||||
'Summary',
|
||||
@@ -668,11 +698,12 @@ for (const {
|
||||
t.truthy(provider, 'should have provider');
|
||||
await retry(`action: ${promptName}`, t, async t => {
|
||||
const finalConfig = Object.assign({}, prompt.config, config);
|
||||
const modelId = finalConfig.model || prompt.model;
|
||||
|
||||
switch (type) {
|
||||
case 'text': {
|
||||
const result = await provider.text(
|
||||
{ modelId: prompt.model },
|
||||
{ modelId },
|
||||
[
|
||||
...prompt.finish(
|
||||
messages.reduce(
|
||||
@@ -691,7 +722,7 @@ for (const {
|
||||
}
|
||||
case 'structured': {
|
||||
const result = await provider.structure(
|
||||
{ modelId: prompt.model },
|
||||
{ modelId },
|
||||
[
|
||||
...prompt.finish(
|
||||
messages.reduce(
|
||||
@@ -710,7 +741,7 @@ for (const {
|
||||
case 'object': {
|
||||
const streamObjects: StreamObject[] = [];
|
||||
for await (const chunk of provider.streamObject(
|
||||
{ modelId: prompt.model },
|
||||
{ modelId },
|
||||
[
|
||||
...prompt.finish(
|
||||
messages.reduce(
|
||||
@@ -742,7 +773,7 @@ for (const {
|
||||
});
|
||||
}
|
||||
const stream = provider.streamImages(
|
||||
{ modelId: prompt.model },
|
||||
{ modelId },
|
||||
[
|
||||
...prompt.finish(
|
||||
finalMessage.reduce(
|
||||
|
||||
@@ -290,6 +290,7 @@ test('should fork session correctly', async t => {
|
||||
|
||||
const assertForkSession = async (
|
||||
workspaceId: string,
|
||||
docId: string,
|
||||
sessionId: string,
|
||||
lastMessageId: string | undefined,
|
||||
error: string,
|
||||
@@ -300,13 +301,7 @@ test('should fork session correctly', async t => {
|
||||
}
|
||||
) =>
|
||||
await asserter(
|
||||
forkCopilotSession(
|
||||
app,
|
||||
workspaceId,
|
||||
randomUUID(),
|
||||
sessionId,
|
||||
lastMessageId
|
||||
)
|
||||
forkCopilotSession(app, workspaceId, docId, sessionId, lastMessageId)
|
||||
);
|
||||
|
||||
// prepare session
|
||||
@@ -330,6 +325,7 @@ test('should fork session correctly', async t => {
|
||||
// should be able to fork session
|
||||
forkedSessionId = await assertForkSession(
|
||||
id,
|
||||
docId,
|
||||
sessionId,
|
||||
latestMessageId!,
|
||||
'should be able to fork session with cloud workspace that user can access'
|
||||
@@ -340,6 +336,7 @@ test('should fork session correctly', async t => {
|
||||
{
|
||||
forkedSessionId = await assertForkSession(
|
||||
id,
|
||||
docId,
|
||||
sessionId,
|
||||
undefined,
|
||||
'should be able to fork session without latestMessageId'
|
||||
@@ -348,18 +345,25 @@ test('should fork session correctly', async t => {
|
||||
|
||||
// should not be able to fork session with wrong latestMessageId
|
||||
{
|
||||
await assertForkSession(id, sessionId, 'wrong-message-id', '', async x => {
|
||||
await t.throwsAsync(
|
||||
x,
|
||||
{ instanceOf: Error },
|
||||
'should not able to fork session with wrong latestMessageId'
|
||||
);
|
||||
});
|
||||
await assertForkSession(
|
||||
id,
|
||||
docId,
|
||||
sessionId,
|
||||
'wrong-message-id',
|
||||
'',
|
||||
async x => {
|
||||
await t.throwsAsync(
|
||||
x,
|
||||
{ instanceOf: Error },
|
||||
'should not able to fork session with wrong latestMessageId'
|
||||
);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
const u2 = await app.signupV1();
|
||||
await assertForkSession(id, sessionId, randomUUID(), '', async x => {
|
||||
await assertForkSession(id, docId, sessionId, randomUUID(), '', async x => {
|
||||
await t.throwsAsync(
|
||||
x,
|
||||
{ instanceOf: Error },
|
||||
@@ -371,7 +375,7 @@ test('should fork session correctly', async t => {
|
||||
const inviteId = await inviteUser(app, id, u2.email);
|
||||
await app.switchUser(u2);
|
||||
await acceptInviteById(app, id, inviteId, false);
|
||||
await assertForkSession(id, sessionId, randomUUID(), '', async x => {
|
||||
await assertForkSession(id, docId, sessionId, randomUUID(), '', async x => {
|
||||
await t.throwsAsync(
|
||||
x,
|
||||
{ instanceOf: Error },
|
||||
@@ -389,6 +393,7 @@ test('should fork session correctly', async t => {
|
||||
await app.switchUser(u2);
|
||||
await assertForkSession(
|
||||
id,
|
||||
docId,
|
||||
forkedSessionId,
|
||||
latestMessageId!,
|
||||
'should able to fork a forked session created by other user'
|
||||
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
} from '../models';
|
||||
import { CopilotModule } from '../plugins/copilot';
|
||||
import { CopilotContextService } from '../plugins/copilot/context';
|
||||
import { CopilotCronJobs } from '../plugins/copilot/cron';
|
||||
import {
|
||||
CopilotEmbeddingJob,
|
||||
MockEmbeddingClient,
|
||||
@@ -77,6 +78,7 @@ type Context = {
|
||||
jobs: CopilotEmbeddingJob;
|
||||
storage: CopilotStorage;
|
||||
workflow: CopilotWorkflowService;
|
||||
cronJobs: CopilotCronJobs;
|
||||
executors: {
|
||||
image: CopilotChatImageExecutor;
|
||||
text: CopilotChatTextExecutor;
|
||||
@@ -137,6 +139,7 @@ test.before(async t => {
|
||||
const jobs = module.get(CopilotEmbeddingJob);
|
||||
const transcript = module.get(CopilotTranscriptionService);
|
||||
const workspaceEmbedding = module.get(CopilotWorkspaceService);
|
||||
const cronJobs = module.get(CopilotCronJobs);
|
||||
|
||||
t.context.module = module;
|
||||
t.context.auth = auth;
|
||||
@@ -153,6 +156,7 @@ test.before(async t => {
|
||||
t.context.jobs = jobs;
|
||||
t.context.transcript = transcript;
|
||||
t.context.workspaceEmbedding = workspaceEmbedding;
|
||||
t.context.cronJobs = cronJobs;
|
||||
|
||||
t.context.executors = {
|
||||
image: module.get(CopilotChatImageExecutor),
|
||||
@@ -1931,3 +1935,71 @@ test('should handle generateSessionTitle correctly under various conditions', as
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
test('should handle copilot cron jobs correctly', async t => {
|
||||
const { cronJobs, copilotSession } = t.context;
|
||||
|
||||
// mock calls
|
||||
const mockCleanupResult = { removed: 2, cleaned: 3 };
|
||||
const mockSessions = [
|
||||
{ id: 'session1', _count: { messages: 1 } },
|
||||
{ id: 'session2', _count: { messages: 2 } },
|
||||
];
|
||||
const cleanupStub = Sinon.stub(
|
||||
copilotSession,
|
||||
'cleanupEmptySessions'
|
||||
).resolves(mockCleanupResult);
|
||||
const toBeGenerateStub = Sinon.stub(
|
||||
copilotSession,
|
||||
'toBeGenerateTitle'
|
||||
).resolves(mockSessions);
|
||||
const jobAddStub = Sinon.stub(cronJobs['jobs'], 'add').resolves();
|
||||
|
||||
// daily cleanup job scheduling
|
||||
{
|
||||
await cronJobs.dailyCleanupJob();
|
||||
t.snapshot(
|
||||
jobAddStub.getCalls().map(call => ({
|
||||
args: call.args,
|
||||
})),
|
||||
'daily job scheduling calls'
|
||||
);
|
||||
|
||||
jobAddStub.reset();
|
||||
cleanupStub.reset();
|
||||
toBeGenerateStub.reset();
|
||||
}
|
||||
|
||||
// cleanup empty sessions
|
||||
{
|
||||
// mock
|
||||
cleanupStub.resolves(mockCleanupResult);
|
||||
toBeGenerateStub.resolves(mockSessions);
|
||||
|
||||
await cronJobs.cleanupEmptySessions();
|
||||
t.snapshot(
|
||||
cleanupStub.getCalls().map(call => ({
|
||||
args: call.args.map(arg => (arg instanceof Date ? 'Date' : arg)), // Replace Date with string for stable snapshot
|
||||
})),
|
||||
'cleanup empty sessions calls'
|
||||
);
|
||||
}
|
||||
|
||||
// generate missing titles
|
||||
await cronJobs.generateMissingTitles();
|
||||
t.snapshot(
|
||||
{
|
||||
modelCalls: toBeGenerateStub.getCalls().map(call => ({
|
||||
args: call.args,
|
||||
})),
|
||||
jobCalls: jobAddStub.getCalls().map(call => ({
|
||||
args: call.args,
|
||||
})),
|
||||
},
|
||||
'title generation calls'
|
||||
);
|
||||
|
||||
cleanupStub.restore();
|
||||
toBeGenerateStub.restore();
|
||||
jobAddStub.restore();
|
||||
});
|
||||
|
||||
@@ -99,3 +99,56 @@ e2e(
|
||||
t.is(result2.workspace.doc.public, true);
|
||||
}
|
||||
);
|
||||
|
||||
e2e('should get doc with title and summary', async t => {
|
||||
const owner = await app.signup();
|
||||
|
||||
const workspace = await app.create(Mockers.Workspace, {
|
||||
owner: { id: owner.id },
|
||||
});
|
||||
|
||||
const docSnapshot = await app.create(Mockers.DocSnapshot, {
|
||||
workspaceId: workspace.id,
|
||||
user: owner,
|
||||
});
|
||||
const doc = await app.create(Mockers.DocMeta, {
|
||||
workspaceId: workspace.id,
|
||||
docId: docSnapshot.id,
|
||||
title: 'doc1',
|
||||
summary: 'summary1',
|
||||
});
|
||||
|
||||
const result = await app.gql({
|
||||
query: getWorkspacePageByIdQuery,
|
||||
variables: { workspaceId: workspace.id, pageId: doc.docId },
|
||||
});
|
||||
|
||||
t.is(result.workspace.doc.title, doc.title);
|
||||
t.is(result.workspace.doc.summary, doc.summary);
|
||||
});
|
||||
|
||||
e2e('should get doc with title and null summary', async t => {
|
||||
const owner = await app.signup();
|
||||
|
||||
const workspace = await app.create(Mockers.Workspace, {
|
||||
owner: { id: owner.id },
|
||||
});
|
||||
|
||||
const docSnapshot = await app.create(Mockers.DocSnapshot, {
|
||||
workspaceId: workspace.id,
|
||||
user: owner,
|
||||
});
|
||||
const doc = await app.create(Mockers.DocMeta, {
|
||||
workspaceId: workspace.id,
|
||||
docId: docSnapshot.id,
|
||||
title: 'doc1',
|
||||
});
|
||||
|
||||
const result = await app.gql({
|
||||
query: getWorkspacePageByIdQuery,
|
||||
variables: { workspaceId: workspace.id, pageId: doc.docId },
|
||||
});
|
||||
|
||||
t.is(result.workspace.doc.title, doc.title);
|
||||
t.is(result.workspace.doc.summary, null);
|
||||
});
|
||||
|
||||
@@ -73,7 +73,8 @@ e2e('should get comment attachment body', async t => {
|
||||
docId,
|
||||
key,
|
||||
'test.txt',
|
||||
Buffer.from('test')
|
||||
Buffer.from('test'),
|
||||
owner.id
|
||||
);
|
||||
|
||||
const res = await app.GET(
|
||||
|
||||
@@ -111,6 +111,19 @@ export class MockCopilotProvider extends OpenAIProvider {
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'gemini-2.5-pro',
|
||||
capabilities: [
|
||||
{
|
||||
input: [ModelInputType.Text, ModelInputType.Image],
|
||||
output: [
|
||||
ModelOutputType.Text,
|
||||
ModelOutputType.Object,
|
||||
ModelOutputType.Structured,
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
override async text(
|
||||
|
||||
@@ -565,3 +565,65 @@ Generated by [AVA](https://avajs.dev).
|
||||
workspaceSessionExists: true,
|
||||
},
|
||||
}
|
||||
|
||||
## should cleanup empty sessions correctly
|
||||
|
||||
> cleanup empty sessions results
|
||||
|
||||
{
|
||||
cleanupResult: {
|
||||
cleaned: 0,
|
||||
removed: 0,
|
||||
},
|
||||
remainingSessions: [
|
||||
{
|
||||
deleted: false,
|
||||
pinned: false,
|
||||
type: 'zeroCost',
|
||||
},
|
||||
{
|
||||
deleted: false,
|
||||
pinned: false,
|
||||
type: 'zeroCost',
|
||||
},
|
||||
{
|
||||
deleted: false,
|
||||
pinned: false,
|
||||
type: 'noMessages',
|
||||
},
|
||||
{
|
||||
deleted: false,
|
||||
pinned: false,
|
||||
type: 'noMessages',
|
||||
},
|
||||
{
|
||||
deleted: false,
|
||||
pinned: false,
|
||||
type: 'recent',
|
||||
},
|
||||
{
|
||||
deleted: false,
|
||||
pinned: false,
|
||||
type: 'withMessages',
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
## should get sessions for title generation correctly
|
||||
|
||||
> sessions for title generation results
|
||||
|
||||
{
|
||||
onlyValidSessionsReturned: true,
|
||||
sessions: [
|
||||
{
|
||||
assistantMessageCount: 1,
|
||||
isValid: true,
|
||||
},
|
||||
{
|
||||
assistantMessageCount: 2,
|
||||
isValid: true,
|
||||
},
|
||||
],
|
||||
total: 2,
|
||||
}
|
||||
|
||||
Binary file not shown.
@@ -917,3 +917,178 @@ test('should handle fork and session attachment operations', async t => {
|
||||
'attach and detach operation results'
|
||||
);
|
||||
});
|
||||
|
||||
test('should cleanup empty sessions correctly', async t => {
|
||||
const { copilotSession, db } = t.context;
|
||||
await createTestPrompts(copilotSession, db);
|
||||
|
||||
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
|
||||
const twoHoursAgo = new Date(Date.now() - 2 * 60 * 60 * 1000);
|
||||
|
||||
// should be deleted
|
||||
const neverUsedSessionIds: string[] = [randomUUID(), randomUUID()];
|
||||
await Promise.all(
|
||||
neverUsedSessionIds.map(async id => {
|
||||
await createTestSession(t, { sessionId: id });
|
||||
await db.aiSession.update({
|
||||
where: { id },
|
||||
data: { messageCost: 0, updatedAt: oneDayAgo },
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
// should be marked as deleted
|
||||
const emptySessionIds: string[] = [randomUUID(), randomUUID()];
|
||||
await Promise.all(
|
||||
emptySessionIds.map(async id => {
|
||||
await createTestSession(t, { sessionId: id });
|
||||
await db.aiSession.update({
|
||||
where: { id },
|
||||
data: { messageCost: 100, updatedAt: oneDayAgo },
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
// should not be affected
|
||||
const recentSessionId = randomUUID();
|
||||
await createTestSession(t, { sessionId: recentSessionId });
|
||||
await db.aiSession.update({
|
||||
where: { id: recentSessionId },
|
||||
data: { messageCost: 0, updatedAt: twoHoursAgo },
|
||||
});
|
||||
|
||||
// Create session with messages (should not be affected)
|
||||
const sessionWithMsgId = randomUUID();
|
||||
await createSessionWithMessages(
|
||||
t,
|
||||
{ sessionId: sessionWithMsgId },
|
||||
'test message'
|
||||
);
|
||||
|
||||
const result = await copilotSession.cleanupEmptySessions(oneDayAgo);
|
||||
|
||||
const remainingSessions = await db.aiSession.findMany({
|
||||
where: {
|
||||
id: {
|
||||
in: [
|
||||
...neverUsedSessionIds,
|
||||
...emptySessionIds,
|
||||
recentSessionId,
|
||||
sessionWithMsgId,
|
||||
],
|
||||
},
|
||||
},
|
||||
select: { id: true, deletedAt: true, pinned: true },
|
||||
});
|
||||
|
||||
t.snapshot(
|
||||
{
|
||||
cleanupResult: result,
|
||||
remainingSessions: remainingSessions.map(s => ({
|
||||
deleted: !!s.deletedAt,
|
||||
pinned: s.pinned,
|
||||
type: neverUsedSessionIds.includes(s.id)
|
||||
? 'zeroCost'
|
||||
: emptySessionIds.includes(s.id)
|
||||
? 'noMessages'
|
||||
: s.id === recentSessionId
|
||||
? 'recent'
|
||||
: 'withMessages',
|
||||
})),
|
||||
},
|
||||
'cleanup empty sessions results'
|
||||
);
|
||||
});
|
||||
|
||||
test('should get sessions for title generation correctly', async t => {
|
||||
const { copilotSession, db } = t.context;
|
||||
await createTestPrompts(copilotSession, db);
|
||||
|
||||
// create valid sessions with messages
|
||||
const sessionIds: string[] = [randomUUID(), randomUUID()];
|
||||
await Promise.all(
|
||||
sessionIds.map(async (id, index) => {
|
||||
await createTestSession(t, { sessionId: id });
|
||||
await db.aiSession.update({
|
||||
where: { id },
|
||||
data: {
|
||||
updatedAt: new Date(Date.now() - index * 1000),
|
||||
messages: {
|
||||
create: Array.from({ length: index + 1 }, (_, i) => ({
|
||||
role: 'assistant',
|
||||
content: `assistant message ${i}`,
|
||||
})),
|
||||
},
|
||||
},
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
// create excluded sessions
|
||||
const excludedSessions = [
|
||||
{
|
||||
reason: 'hasTitle',
|
||||
setupFn: async (id: string) => {
|
||||
await createTestSession(t, { sessionId: id });
|
||||
await db.aiSession.update({
|
||||
where: { id },
|
||||
data: { title: 'Existing Title' },
|
||||
});
|
||||
},
|
||||
},
|
||||
{
|
||||
reason: 'isDeleted',
|
||||
setupFn: async (id: string) => {
|
||||
await createTestSession(t, { sessionId: id });
|
||||
await db.aiSession.update({
|
||||
where: { id },
|
||||
data: { deletedAt: new Date() },
|
||||
});
|
||||
},
|
||||
},
|
||||
{
|
||||
reason: 'noMessages',
|
||||
setupFn: async (id: string) => {
|
||||
await createTestSession(t, { sessionId: id });
|
||||
},
|
||||
},
|
||||
{
|
||||
reason: 'isAction',
|
||||
setupFn: async (id: string) => {
|
||||
await createTestSession(t, {
|
||||
sessionId: id,
|
||||
promptName: TEST_PROMPTS.ACTION,
|
||||
});
|
||||
},
|
||||
},
|
||||
{
|
||||
reason: 'noAssistantMessages',
|
||||
setupFn: async (id: string) => {
|
||||
await createTestSession(t, { sessionId: id });
|
||||
await db.aiSessionMessage.create({
|
||||
data: { sessionId: id, role: 'user', content: 'User message only' },
|
||||
});
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
await Promise.all(
|
||||
excludedSessions.map(async session => {
|
||||
await session.setupFn(randomUUID());
|
||||
})
|
||||
);
|
||||
|
||||
const result = await copilotSession.toBeGenerateTitle();
|
||||
|
||||
t.snapshot(
|
||||
{
|
||||
total: result.length,
|
||||
sessions: result.map(s => ({
|
||||
assistantMessageCount: s._count.messages,
|
||||
isValid: sessionIds.includes(s.id),
|
||||
})),
|
||||
onlyValidSessionsReturned: result.every(s => sessionIds.includes(s.id)),
|
||||
},
|
||||
'sessions for title generation results'
|
||||
);
|
||||
});
|
||||
|
||||
@@ -669,7 +669,10 @@ test('should get doc info', async t => {
|
||||
};
|
||||
|
||||
await t.context.doc.upsert(snapshot);
|
||||
await t.context.doc.upsertMeta(workspace.id, docId);
|
||||
await t.context.doc.upsertMeta(workspace.id, docId, {
|
||||
title: 'test title',
|
||||
summary: 'test summary',
|
||||
});
|
||||
|
||||
const docInfo = await t.context.doc.getDocInfo(workspace.id, docId);
|
||||
|
||||
@@ -679,6 +682,8 @@ test('should get doc info', async t => {
|
||||
updatedAt: new Date(snapshot.timestamp),
|
||||
creatorId: user.id,
|
||||
lastUpdaterId: user.id,
|
||||
title: 'test title',
|
||||
summary: 'test summary',
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -433,7 +433,7 @@ export async function submitAudioTranscription(
|
||||
for (const [idx, buffer] of content.entries()) {
|
||||
resp = resp.attach(idx.toString(), buffer, {
|
||||
filename: fileName,
|
||||
contentType: 'application/octet-stream',
|
||||
contentType: 'audio/opus',
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -36,6 +36,7 @@ import { DocRendererModule } from './core/doc-renderer';
|
||||
import { DocServiceModule } from './core/doc-service';
|
||||
import { FeatureModule } from './core/features';
|
||||
import { MailModule } from './core/mail';
|
||||
import { MonitorModule } from './core/monitor';
|
||||
import { NotificationModule } from './core/notification';
|
||||
import { PermissionModule } from './core/permission';
|
||||
import { QuotaModule } from './core/quota';
|
||||
@@ -112,6 +113,8 @@ export const FunctionalityModules = [
|
||||
WebSocketModule,
|
||||
JobModule.forRoot(),
|
||||
ModelsModule,
|
||||
ScheduleModule.forRoot(),
|
||||
MonitorModule,
|
||||
];
|
||||
|
||||
export class AppModuleBuilder {
|
||||
@@ -151,12 +154,8 @@ export function buildAppModule(env: Env) {
|
||||
// basic
|
||||
.use(...FunctionalityModules)
|
||||
|
||||
// enable schedule module on graphql server and doc service
|
||||
.useIf(
|
||||
() => env.flavors.graphql || env.flavors.doc,
|
||||
ScheduleModule.forRoot(),
|
||||
IndexerModule
|
||||
)
|
||||
// enable indexer module on graphql server and doc service
|
||||
.useIf(() => env.flavors.graphql || env.flavors.doc, IndexerModule)
|
||||
|
||||
// auth
|
||||
.use(UserModule, AuthModule, PermissionModule)
|
||||
|
||||
@@ -653,12 +653,19 @@ export const USER_FRIENDLY_ERRORS = {
|
||||
},
|
||||
no_copilot_provider_available: {
|
||||
type: 'internal_server_error',
|
||||
message: `No copilot provider available.`,
|
||||
args: { modelId: 'string' },
|
||||
message: ({ modelId }) => `No copilot provider available: ${modelId}`,
|
||||
},
|
||||
copilot_failed_to_generate_text: {
|
||||
type: 'internal_server_error',
|
||||
message: `Failed to generate text.`,
|
||||
},
|
||||
copilot_failed_to_generate_embedding: {
|
||||
type: 'internal_server_error',
|
||||
args: { provider: 'string', message: 'string' },
|
||||
message: ({ provider, message }) =>
|
||||
`Failed to generate embedding with ${provider}: ${message}`,
|
||||
},
|
||||
copilot_failed_to_create_message: {
|
||||
type: 'internal_server_error',
|
||||
message: `Failed to create chat message.`,
|
||||
|
||||
@@ -668,10 +668,14 @@ export class CopilotSessionDeleted extends UserFriendlyError {
|
||||
super('action_forbidden', 'copilot_session_deleted', message);
|
||||
}
|
||||
}
|
||||
@ObjectType()
|
||||
class NoCopilotProviderAvailableDataType {
|
||||
@Field() modelId!: string
|
||||
}
|
||||
|
||||
export class NoCopilotProviderAvailable extends UserFriendlyError {
|
||||
constructor(message?: string) {
|
||||
super('internal_server_error', 'no_copilot_provider_available', message);
|
||||
constructor(args: NoCopilotProviderAvailableDataType, message?: string | ((args: NoCopilotProviderAvailableDataType) => string)) {
|
||||
super('internal_server_error', 'no_copilot_provider_available', message, args);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -680,6 +684,17 @@ export class CopilotFailedToGenerateText extends UserFriendlyError {
|
||||
super('internal_server_error', 'copilot_failed_to_generate_text', message);
|
||||
}
|
||||
}
|
||||
@ObjectType()
|
||||
class CopilotFailedToGenerateEmbeddingDataType {
|
||||
@Field() provider!: string
|
||||
@Field() message!: string
|
||||
}
|
||||
|
||||
export class CopilotFailedToGenerateEmbedding extends UserFriendlyError {
|
||||
constructor(args: CopilotFailedToGenerateEmbeddingDataType, message?: string | ((args: CopilotFailedToGenerateEmbeddingDataType) => string)) {
|
||||
super('internal_server_error', 'copilot_failed_to_generate_embedding', message, args);
|
||||
}
|
||||
}
|
||||
|
||||
export class CopilotFailedToCreateMessage extends UserFriendlyError {
|
||||
constructor(message?: string) {
|
||||
@@ -1179,6 +1194,7 @@ export enum ErrorNames {
|
||||
COPILOT_SESSION_DELETED,
|
||||
NO_COPILOT_PROVIDER_AVAILABLE,
|
||||
COPILOT_FAILED_TO_GENERATE_TEXT,
|
||||
COPILOT_FAILED_TO_GENERATE_EMBEDDING,
|
||||
COPILOT_FAILED_TO_CREATE_MESSAGE,
|
||||
UNSPLASH_IS_NOT_CONFIGURED,
|
||||
COPILOT_ACTION_TAKEN,
|
||||
@@ -1239,5 +1255,5 @@ registerEnumType(ErrorNames, {
|
||||
export const ErrorDataUnionType = createUnionType({
|
||||
name: 'ErrorDataUnion',
|
||||
types: () =>
|
||||
[GraphqlBadRequestDataType, HttpRequestErrorDataType, QueryTooLongDataType, ValidationErrorDataType, WrongSignInCredentialsDataType, UnknownOauthProviderDataType, InvalidOauthCallbackCodeDataType, MissingOauthQueryParameterDataType, InvalidOauthResponseDataType, InvalidEmailDataType, InvalidPasswordLengthDataType, WorkspacePermissionNotFoundDataType, SpaceNotFoundDataType, MemberNotFoundInSpaceDataType, NotInSpaceDataType, AlreadyInSpaceDataType, SpaceAccessDeniedDataType, SpaceOwnerNotFoundDataType, SpaceShouldHaveOnlyOneOwnerDataType, DocNotFoundDataType, DocActionDeniedDataType, DocUpdateBlockedDataType, VersionRejectedDataType, InvalidHistoryTimestampDataType, DocHistoryNotFoundDataType, BlobNotFoundDataType, ExpectToGrantDocUserRolesDataType, ExpectToRevokeDocUserRolesDataType, ExpectToUpdateDocUserRoleDataType, NoMoreSeatDataType, UnsupportedSubscriptionPlanDataType, SubscriptionAlreadyExistsDataType, SubscriptionNotExistsDataType, SameSubscriptionRecurringDataType, SubscriptionPlanNotFoundDataType, CopilotDocNotFoundDataType, CopilotMessageNotFoundDataType, CopilotPromptNotFoundDataType, CopilotProviderNotSupportedDataType, CopilotProviderSideErrorDataType, CopilotInvalidContextDataType, CopilotContextFileNotSupportedDataType, CopilotFailedToModifyContextDataType, CopilotFailedToMatchContextDataType, CopilotFailedToMatchGlobalContextDataType, CopilotFailedToAddWorkspaceFileEmbeddingDataType, RuntimeConfigNotFoundDataType, InvalidRuntimeConfigTypeDataType, InvalidLicenseToActivateDataType, InvalidLicenseUpdateParamsDataType, UnsupportedClientVersionDataType, MentionUserDocAccessDeniedDataType, InvalidAppConfigDataType, InvalidAppConfigInputDataType, InvalidSearchProviderRequestDataType, InvalidIndexerInputDataType] as const,
|
||||
[GraphqlBadRequestDataType, HttpRequestErrorDataType, QueryTooLongDataType, ValidationErrorDataType, WrongSignInCredentialsDataType, UnknownOauthProviderDataType, InvalidOauthCallbackCodeDataType, MissingOauthQueryParameterDataType, InvalidOauthResponseDataType, InvalidEmailDataType, InvalidPasswordLengthDataType, WorkspacePermissionNotFoundDataType, SpaceNotFoundDataType, MemberNotFoundInSpaceDataType, NotInSpaceDataType, AlreadyInSpaceDataType, SpaceAccessDeniedDataType, SpaceOwnerNotFoundDataType, SpaceShouldHaveOnlyOneOwnerDataType, DocNotFoundDataType, DocActionDeniedDataType, DocUpdateBlockedDataType, VersionRejectedDataType, InvalidHistoryTimestampDataType, DocHistoryNotFoundDataType, BlobNotFoundDataType, ExpectToGrantDocUserRolesDataType, ExpectToRevokeDocUserRolesDataType, ExpectToUpdateDocUserRoleDataType, NoMoreSeatDataType, UnsupportedSubscriptionPlanDataType, SubscriptionAlreadyExistsDataType, SubscriptionNotExistsDataType, SameSubscriptionRecurringDataType, SubscriptionPlanNotFoundDataType, NoCopilotProviderAvailableDataType, CopilotFailedToGenerateEmbeddingDataType, CopilotDocNotFoundDataType, CopilotMessageNotFoundDataType, CopilotPromptNotFoundDataType, CopilotProviderNotSupportedDataType, CopilotProviderSideErrorDataType, CopilotInvalidContextDataType, CopilotContextFileNotSupportedDataType, CopilotFailedToModifyContextDataType, CopilotFailedToMatchContextDataType, CopilotFailedToMatchGlobalContextDataType, CopilotFailedToAddWorkspaceFileEmbeddingDataType, RuntimeConfigNotFoundDataType, InvalidRuntimeConfigTypeDataType, InvalidLicenseToActivateDataType, InvalidLicenseUpdateParamsDataType, UnsupportedClientVersionDataType, MentionUserDocAccessDeniedDataType, InvalidAppConfigDataType, InvalidAppConfigInputDataType, InvalidSearchProviderRequestDataType, InvalidIndexerInputDataType] as const,
|
||||
});
|
||||
|
||||
@@ -59,7 +59,9 @@ export type KnownMetricScopes =
|
||||
| 'mail'
|
||||
| 'ai'
|
||||
| 'event'
|
||||
| 'queue';
|
||||
| 'queue'
|
||||
| 'storage'
|
||||
| 'process';
|
||||
|
||||
const metricCreators: MetricCreators = {
|
||||
counter(meter: Meter, name: string, opts?: MetricOptions) {
|
||||
|
||||
@@ -361,7 +361,8 @@ export class CommentResolver {
|
||||
docId,
|
||||
key,
|
||||
attachment.filename ?? key,
|
||||
buffer
|
||||
buffer,
|
||||
me.id
|
||||
);
|
||||
return this.commentAttachmentStorage.getUrl(workspaceId, docId, key);
|
||||
}
|
||||
|
||||
@@ -100,7 +100,7 @@ export class PgWorkspaceDocStorageAdapter extends DocStorageAdapter {
|
||||
{
|
||||
// keep it simple to let all update merged in one job
|
||||
jobId: `doc:merge-pending-updates:${workspaceId}:${docId}`,
|
||||
delay: 30 * 1000 /* 30s */,
|
||||
delay: 5 * 1000 /* 5s */,
|
||||
priority: 100,
|
||||
}
|
||||
);
|
||||
|
||||
9
packages/backend/server/src/core/monitor/index.ts
Normal file
9
packages/backend/server/src/core/monitor/index.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { Global, Module } from '@nestjs/common';
|
||||
|
||||
import { MonitorService } from './service';
|
||||
|
||||
@Global()
|
||||
@Module({
|
||||
providers: [MonitorService],
|
||||
})
|
||||
export class MonitorModule {}
|
||||
28
packages/backend/server/src/core/monitor/service.ts
Normal file
28
packages/backend/server/src/core/monitor/service.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { Cron, CronExpression } from '@nestjs/schedule';
|
||||
|
||||
import { metrics } from '../../base';
|
||||
|
||||
@Injectable()
|
||||
export class MonitorService {
|
||||
protected logger = new Logger(MonitorService.name);
|
||||
|
||||
@Cron(CronExpression.EVERY_MINUTE)
|
||||
async monitor() {
|
||||
const memoryUsage = process.memoryUsage();
|
||||
this.logger.log(
|
||||
`memory usage: rss: ${memoryUsage.rss}, heapTotal: ${memoryUsage.heapTotal}, heapUsed: ${memoryUsage.heapUsed}, external: ${memoryUsage.external}, arrayBuffers: ${memoryUsage.arrayBuffers}`
|
||||
);
|
||||
metrics.process.gauge('node_process_rss').record(memoryUsage.rss);
|
||||
metrics.process
|
||||
.gauge('node_process_heap_total')
|
||||
.record(memoryUsage.heapTotal);
|
||||
metrics.process
|
||||
.gauge('node_process_heap_used')
|
||||
.record(memoryUsage.heapUsed);
|
||||
metrics.process.gauge('node_process_external').record(memoryUsage.external);
|
||||
metrics.process
|
||||
.gauge('node_process_array_buffers')
|
||||
.record(memoryUsage.arrayBuffers);
|
||||
}
|
||||
}
|
||||
@@ -24,11 +24,12 @@ test.after.always(async () => {
|
||||
|
||||
test('should put comment attachment', async t => {
|
||||
const workspace = await module.create(Mockers.Workspace);
|
||||
const user = await module.create(Mockers.User);
|
||||
const docId = randomUUID();
|
||||
const key = randomUUID();
|
||||
const blob = Buffer.from('test');
|
||||
|
||||
await storage.put(workspace.id, docId, key, 'test.txt', blob);
|
||||
await storage.put(workspace.id, docId, key, 'test.txt', blob, user.id);
|
||||
|
||||
const item = await models.commentAttachment.get(workspace.id, docId, key);
|
||||
|
||||
@@ -39,15 +40,17 @@ test('should put comment attachment', async t => {
|
||||
t.is(item?.mime, 'text/plain');
|
||||
t.is(item?.size, blob.length);
|
||||
t.is(item?.name, 'test.txt');
|
||||
t.is(item?.createdBy, user.id);
|
||||
});
|
||||
|
||||
test('should get comment attachment', async t => {
|
||||
const workspace = await module.create(Mockers.Workspace);
|
||||
const user = await module.create(Mockers.User);
|
||||
const docId = randomUUID();
|
||||
const key = randomUUID();
|
||||
const blob = Buffer.from('test');
|
||||
|
||||
await storage.put(workspace.id, docId, key, 'test.txt', blob);
|
||||
await storage.put(workspace.id, docId, key, 'test.txt', blob, user.id);
|
||||
|
||||
const item = await storage.get(workspace.id, docId, key);
|
||||
|
||||
@@ -62,11 +65,12 @@ test('should get comment attachment', async t => {
|
||||
|
||||
test('should get comment attachment with access url', async t => {
|
||||
const workspace = await module.create(Mockers.Workspace);
|
||||
const user = await module.create(Mockers.User);
|
||||
const docId = randomUUID();
|
||||
const key = randomUUID();
|
||||
const blob = Buffer.from('test');
|
||||
|
||||
await storage.put(workspace.id, docId, key, 'test.txt', blob);
|
||||
await storage.put(workspace.id, docId, key, 'test.txt', blob, user.id);
|
||||
|
||||
const url = storage.getUrl(workspace.id, docId, key);
|
||||
|
||||
@@ -79,11 +83,12 @@ test('should get comment attachment with access url', async t => {
|
||||
|
||||
test('should delete comment attachment', async t => {
|
||||
const workspace = await module.create(Mockers.Workspace);
|
||||
const user = await module.create(Mockers.User);
|
||||
const docId = randomUUID();
|
||||
const key = randomUUID();
|
||||
const blob = Buffer.from('test');
|
||||
|
||||
await storage.put(workspace.id, docId, key, 'test.txt', blob);
|
||||
await storage.put(workspace.id, docId, key, 'test.txt', blob, user.id);
|
||||
|
||||
await storage.delete(workspace.id, docId, key);
|
||||
|
||||
@@ -94,11 +99,12 @@ test('should delete comment attachment', async t => {
|
||||
|
||||
test('should handle comment.attachment.delete event', async t => {
|
||||
const workspace = await module.create(Mockers.Workspace);
|
||||
const user = await module.create(Mockers.User);
|
||||
const docId = randomUUID();
|
||||
const key = randomUUID();
|
||||
const blob = Buffer.from('test');
|
||||
|
||||
await storage.put(workspace.id, docId, key, 'test.txt', blob);
|
||||
await storage.put(workspace.id, docId, key, 'test.txt', blob, user.id);
|
||||
|
||||
await storage.onCommentAttachmentDelete({
|
||||
workspaceId: workspace.id,
|
||||
@@ -113,14 +119,15 @@ test('should handle comment.attachment.delete event', async t => {
|
||||
|
||||
test('should handle workspace.deleted event', async t => {
|
||||
const workspace = await module.create(Mockers.Workspace);
|
||||
const user = await module.create(Mockers.User);
|
||||
const docId = randomUUID();
|
||||
const key1 = randomUUID();
|
||||
const key2 = randomUUID();
|
||||
const blob1 = Buffer.from('test');
|
||||
const blob2 = Buffer.from('test2');
|
||||
|
||||
await storage.put(workspace.id, docId, key1, 'test.txt', blob1);
|
||||
await storage.put(workspace.id, docId, key2, 'test.txt', blob2);
|
||||
await storage.put(workspace.id, docId, key1, 'test.txt', blob1, user.id);
|
||||
await storage.put(workspace.id, docId, key2, 'test.txt', blob2, user.id);
|
||||
|
||||
const count = module.event.count('comment.attachment.delete');
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
autoMetadata,
|
||||
Config,
|
||||
EventBus,
|
||||
metrics,
|
||||
OnEvent,
|
||||
type StorageProvider,
|
||||
StorageProviderFactory,
|
||||
@@ -59,7 +60,8 @@ export class CommentAttachmentStorage {
|
||||
docId: string,
|
||||
key: string,
|
||||
name: string,
|
||||
blob: Buffer
|
||||
blob: Buffer,
|
||||
userId: string
|
||||
) {
|
||||
const meta = autoMetadata(blob);
|
||||
|
||||
@@ -68,14 +70,23 @@ export class CommentAttachmentStorage {
|
||||
blob,
|
||||
meta
|
||||
);
|
||||
const mime = meta.contentType ?? 'application/octet-stream';
|
||||
const size = blob.length;
|
||||
await this.models.commentAttachment.upsert({
|
||||
workspaceId,
|
||||
docId,
|
||||
key,
|
||||
name,
|
||||
mime: meta.contentType ?? 'application/octet-stream',
|
||||
size: blob.length,
|
||||
mime,
|
||||
size,
|
||||
createdBy: userId,
|
||||
});
|
||||
|
||||
metrics.storage.histogram('comment_attachment_size').record(size, { mime });
|
||||
metrics.storage.counter('comment_attachment_total').add(1, { mime });
|
||||
this.logger.log(
|
||||
`uploaded comment attachment ${workspaceId}/${docId}/${key} with size ${size}, mime: ${mime}, name: ${name}, user: ${userId}`
|
||||
);
|
||||
}
|
||||
|
||||
async get(
|
||||
|
||||
@@ -195,7 +195,7 @@ export class WorkspacesController {
|
||||
await this.ac.user(user.id).doc(workspaceId, docId).assert('Doc.Read');
|
||||
|
||||
const { body, metadata, redirectUrl } =
|
||||
await this.commentAttachmentStorage.get(workspaceId, docId, key);
|
||||
await this.commentAttachmentStorage.get(workspaceId, docId, key, true);
|
||||
|
||||
if (redirectUrl) {
|
||||
return res.redirect(redirectUrl);
|
||||
|
||||
@@ -79,6 +79,9 @@ class DocType {
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
title?: string | null;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
summary?: string | null;
|
||||
}
|
||||
|
||||
@InputType()
|
||||
@@ -250,10 +253,11 @@ export class WorkspaceDocResolver {
|
||||
deprecationReason: 'use [WorkspaceType.doc] instead',
|
||||
})
|
||||
async publicPage(
|
||||
@CurrentUser() me: CurrentUser,
|
||||
@Parent() workspace: WorkspaceType,
|
||||
@Args('pageId') pageId: string
|
||||
) {
|
||||
return this.doc(workspace, pageId);
|
||||
return this.doc(me, workspace, pageId);
|
||||
}
|
||||
|
||||
@ResolveField(() => PaginatedDocType)
|
||||
@@ -294,11 +298,14 @@ export class WorkspaceDocResolver {
|
||||
complexity: 2,
|
||||
})
|
||||
async doc(
|
||||
@CurrentUser() me: CurrentUser,
|
||||
@Parent() workspace: WorkspaceType,
|
||||
@Args('docId') docId: string
|
||||
): Promise<DocType> {
|
||||
const doc = await this.models.doc.getDocInfo(workspace.id, docId);
|
||||
if (doc) {
|
||||
// check if doc is readable
|
||||
await this.ac.user(me.id).doc(workspace.id, docId).assert('Doc.Read');
|
||||
return doc;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import { chunk } from 'lodash-es';
|
||||
|
||||
type SessionTime = {
|
||||
sessionId: string;
|
||||
@@ -17,16 +18,19 @@ export class CorrectSessionUpdateTime1751966744168 {
|
||||
},
|
||||
});
|
||||
|
||||
await Promise.all(
|
||||
sessionTime
|
||||
.filter((s): s is SessionTime => !!s._max.createdAt)
|
||||
.map(s =>
|
||||
db.aiSession.update({
|
||||
where: { id: s.sessionId },
|
||||
data: { updatedAt: s._max.createdAt },
|
||||
})
|
||||
)
|
||||
);
|
||||
for (const s of chunk(sessionTime, 100)) {
|
||||
const sessions = s.filter((s): s is SessionTime => !!s._max.createdAt);
|
||||
await db.$transaction(async tx => {
|
||||
await Promise.all(
|
||||
sessions.map(s =>
|
||||
tx.aiSession.update({
|
||||
where: { id: s.sessionId },
|
||||
data: { updatedAt: s._max.createdAt },
|
||||
})
|
||||
)
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// revert the migration
|
||||
|
||||
@@ -13,6 +13,7 @@ test.after.always(async () => {
|
||||
|
||||
test('should upsert comment attachment', async t => {
|
||||
const workspace = await module.create(Mockers.Workspace);
|
||||
const user = await module.create(Mockers.User);
|
||||
|
||||
// add
|
||||
const item = await models.commentAttachment.upsert({
|
||||
@@ -22,6 +23,7 @@ test('should upsert comment attachment', async t => {
|
||||
name: 'test-name',
|
||||
mime: 'text/plain',
|
||||
size: 100,
|
||||
createdBy: user.id,
|
||||
});
|
||||
|
||||
t.is(item.workspaceId, workspace.id);
|
||||
@@ -30,6 +32,7 @@ test('should upsert comment attachment', async t => {
|
||||
t.is(item.mime, 'text/plain');
|
||||
t.is(item.size, 100);
|
||||
t.truthy(item.createdAt);
|
||||
t.is(item.createdBy, user.id);
|
||||
|
||||
// update
|
||||
const item2 = await models.commentAttachment.upsert({
|
||||
@@ -46,6 +49,7 @@ test('should upsert comment attachment', async t => {
|
||||
t.is(item2.key, 'test-key');
|
||||
t.is(item2.mime, 'text/html');
|
||||
t.is(item2.size, 200);
|
||||
t.is(item2.createdBy, user.id);
|
||||
|
||||
// make sure only one blob is created
|
||||
const items = await models.commentAttachment.list(workspace.id);
|
||||
|
||||
@@ -32,6 +32,7 @@ export class CommentAttachmentModel extends BaseModel {
|
||||
name: input.name,
|
||||
mime: input.mime,
|
||||
size: input.size,
|
||||
createdBy: input.createdBy,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@@ -165,6 +165,13 @@ export class CopilotContextModel extends BaseModel {
|
||||
fileId: string,
|
||||
embeddings: Embedding[]
|
||||
) {
|
||||
if (embeddings.length === 0) {
|
||||
this.logger.warn(
|
||||
`No embeddings provided for contextId: ${contextId}, fileId: ${fileId}. Skipping insertion.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const values = this.processEmbeddings(contextId, fileId, embeddings);
|
||||
|
||||
await this.db.$executeRaw`
|
||||
@@ -204,6 +211,13 @@ export class CopilotContextModel extends BaseModel {
|
||||
docId: string,
|
||||
embeddings: Embedding[]
|
||||
) {
|
||||
if (embeddings.length === 0) {
|
||||
this.logger.warn(
|
||||
`No embeddings provided for workspaceId: ${workspaceId}, docId: ${docId}. Skipping insertion.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const values = this.processEmbeddings(
|
||||
workspaceId,
|
||||
docId,
|
||||
|
||||
@@ -582,4 +582,56 @@ export class CopilotSessionModel extends BaseModel {
|
||||
.map(({ messageCost, prompt: { action } }) => (action ? 1 : messageCost))
|
||||
.reduce((prev, cost) => prev + cost, 0);
|
||||
}
|
||||
|
||||
@Transactional()
|
||||
async cleanupEmptySessions(earlyThen: Date) {
|
||||
// delete never used sessions
|
||||
const { count: removed } = await this.db.aiSession.deleteMany({
|
||||
where: {
|
||||
messageCost: 0,
|
||||
deletedAt: null,
|
||||
// filter session updated more than 24 hours ago
|
||||
updatedAt: { lt: earlyThen },
|
||||
},
|
||||
});
|
||||
|
||||
// mark empty sessions as deleted
|
||||
const { count: cleaned } = await this.db.aiSession.updateMany({
|
||||
where: {
|
||||
deletedAt: null,
|
||||
messages: { none: {} },
|
||||
// filter session updated more than 24 hours ago
|
||||
updatedAt: { lt: earlyThen },
|
||||
},
|
||||
data: {
|
||||
deletedAt: new Date(),
|
||||
pinned: false,
|
||||
},
|
||||
});
|
||||
|
||||
return { removed, cleaned };
|
||||
}
|
||||
|
||||
@Transactional()
|
||||
async toBeGenerateTitle() {
|
||||
const sessions = await this.db.aiSession
|
||||
.findMany({
|
||||
where: {
|
||||
title: null,
|
||||
deletedAt: null,
|
||||
messages: { some: {} },
|
||||
// only generate titles for non-actions sessions
|
||||
prompt: { action: null },
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
// count assistant messages
|
||||
_count: { select: { messages: { where: { role: 'assistant' } } } },
|
||||
},
|
||||
orderBy: { updatedAt: 'desc' },
|
||||
})
|
||||
.then(s => s.filter(s => s._count.messages > 0));
|
||||
|
||||
return sessions;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -283,6 +283,13 @@ export class CopilotWorkspaceConfigModel extends BaseModel {
|
||||
fileId: string,
|
||||
embeddings: Embedding[]
|
||||
) {
|
||||
if (embeddings.length === 0) {
|
||||
this.logger.warn(
|
||||
`No embeddings provided for workspaceId: ${workspaceId}, fileId: ${fileId}. Skipping insertion.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const values = this.processEmbeddings(workspaceId, fileId, embeddings);
|
||||
await this.db.$executeRaw`
|
||||
INSERT INTO "ai_workspace_file_embeddings"
|
||||
|
||||
@@ -558,6 +558,8 @@ export class DocModel extends BaseModel {
|
||||
mode: PublicDocMode;
|
||||
public: boolean;
|
||||
defaultRole: DocRole;
|
||||
title: string | null;
|
||||
summary: string | null;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
creatorId?: string;
|
||||
@@ -570,6 +572,8 @@ export class DocModel extends BaseModel {
|
||||
"workspace_pages"."mode" as "mode",
|
||||
"workspace_pages"."public" as "public",
|
||||
"workspace_pages"."defaultRole" as "defaultRole",
|
||||
"workspace_pages"."title" as "title",
|
||||
"workspace_pages"."summary" as "summary",
|
||||
"snapshots"."created_at" as "createdAt",
|
||||
"snapshots"."updated_at" as "updatedAt",
|
||||
"snapshots"."created_by" as "creatorId",
|
||||
|
||||
@@ -125,7 +125,10 @@ export class CopilotContextService implements OnApplicationBootstrap {
|
||||
|
||||
async get(id: string): Promise<ContextSession> {
|
||||
if (!this.embeddingClient) {
|
||||
throw new NoCopilotProviderAvailable('embedding client not configured');
|
||||
throw new NoCopilotProviderAvailable(
|
||||
{ modelId: 'embedding' },
|
||||
'embedding client not configured'
|
||||
);
|
||||
}
|
||||
|
||||
const context = await this.getCachedSession(id);
|
||||
|
||||
@@ -124,7 +124,7 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
modelId: model,
|
||||
});
|
||||
if (!provider) {
|
||||
throw new NoCopilotProviderAvailable();
|
||||
throw new NoCopilotProviderAvailable({ modelId: model });
|
||||
}
|
||||
|
||||
return { provider, model, hasAttachment };
|
||||
@@ -299,6 +299,13 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
this.ongoingStreamCount$.next(this.ongoingStreamCount$.value + 1);
|
||||
|
||||
const { signal, onConnectionClosed } = getSignal(req);
|
||||
let endBeforePromiseResolve = false;
|
||||
onConnectionClosed(isAborted => {
|
||||
if (isAborted) {
|
||||
endBeforePromiseResolve = true;
|
||||
}
|
||||
});
|
||||
|
||||
const { messageId, reasoning, webSearch } = ChatQuerySchema.parse(query);
|
||||
|
||||
const source$ = from(
|
||||
@@ -322,21 +329,21 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
shared$.pipe(
|
||||
reduce((acc, chunk) => acc + chunk, ''),
|
||||
tap(buffer => {
|
||||
onConnectionClosed(isAborted => {
|
||||
session.push({
|
||||
role: 'assistant',
|
||||
content: isAborted ? '> Request aborted' : buffer,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
void session
|
||||
.save()
|
||||
.catch(err =>
|
||||
this.logger.error(
|
||||
'Failed to save session in sse stream',
|
||||
err
|
||||
)
|
||||
);
|
||||
session.push({
|
||||
role: 'assistant',
|
||||
content: endBeforePromiseResolve
|
||||
? '> Request aborted'
|
||||
: buffer,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
void session
|
||||
.save()
|
||||
.catch(err =>
|
||||
this.logger.error(
|
||||
'Failed to save session in sse stream',
|
||||
err
|
||||
)
|
||||
);
|
||||
}),
|
||||
ignoreElements()
|
||||
)
|
||||
@@ -384,6 +391,13 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
this.ongoingStreamCount$.next(this.ongoingStreamCount$.value + 1);
|
||||
|
||||
const { signal, onConnectionClosed } = getSignal(req);
|
||||
let endBeforePromiseResolve = false;
|
||||
onConnectionClosed(isAborted => {
|
||||
if (isAborted) {
|
||||
endBeforePromiseResolve = true;
|
||||
}
|
||||
});
|
||||
|
||||
const { messageId, reasoning, webSearch } = ChatQuerySchema.parse(query);
|
||||
|
||||
const source$ = from(
|
||||
@@ -407,25 +421,25 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
shared$.pipe(
|
||||
reduce((acc, chunk) => acc.concat([chunk]), [] as StreamObject[]),
|
||||
tap(result => {
|
||||
onConnectionClosed(isAborted => {
|
||||
const parser = new StreamObjectParser();
|
||||
const streamObjects = parser.mergeTextDelta(result);
|
||||
const content = parser.mergeContent(streamObjects);
|
||||
session.push({
|
||||
role: 'assistant',
|
||||
content: isAborted ? '> Request aborted' : content,
|
||||
streamObjects: isAborted ? null : streamObjects,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
void session
|
||||
.save()
|
||||
.catch(err =>
|
||||
this.logger.error(
|
||||
'Failed to save session in sse stream',
|
||||
err
|
||||
)
|
||||
);
|
||||
const parser = new StreamObjectParser();
|
||||
const streamObjects = parser.mergeTextDelta(result);
|
||||
const content = parser.mergeContent(streamObjects);
|
||||
session.push({
|
||||
role: 'assistant',
|
||||
content: endBeforePromiseResolve
|
||||
? '> Request aborted'
|
||||
: content,
|
||||
streamObjects: endBeforePromiseResolve ? null : streamObjects,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
void session
|
||||
.save()
|
||||
.catch(err =>
|
||||
this.logger.error(
|
||||
'Failed to save session in sse stream',
|
||||
err
|
||||
)
|
||||
);
|
||||
}),
|
||||
ignoreElements()
|
||||
)
|
||||
@@ -477,6 +491,13 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
this.ongoingStreamCount$.next(this.ongoingStreamCount$.value + 1);
|
||||
|
||||
const { signal, onConnectionClosed } = getSignal(req);
|
||||
let endBeforePromiseResolve = false;
|
||||
onConnectionClosed(isAborted => {
|
||||
if (isAborted) {
|
||||
endBeforePromiseResolve = true;
|
||||
}
|
||||
});
|
||||
|
||||
const source$ = from(
|
||||
this.workflow.runGraph(params, session.model, {
|
||||
...session.config.promptConfig,
|
||||
@@ -526,21 +547,21 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
return acc;
|
||||
}, ''),
|
||||
tap(content => {
|
||||
onConnectionClosed(isAborted => {
|
||||
session.push({
|
||||
role: 'assistant',
|
||||
content: isAborted ? '> Request aborted' : content,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
void session
|
||||
.save()
|
||||
.catch(err =>
|
||||
this.logger.error(
|
||||
'Failed to save session in sse stream',
|
||||
err
|
||||
)
|
||||
);
|
||||
session.push({
|
||||
role: 'assistant',
|
||||
content: endBeforePromiseResolve
|
||||
? '> Request aborted'
|
||||
: content,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
void session
|
||||
.save()
|
||||
.catch(err =>
|
||||
this.logger.error(
|
||||
'Failed to save session in sse stream',
|
||||
err
|
||||
)
|
||||
);
|
||||
}),
|
||||
ignoreElements()
|
||||
)
|
||||
@@ -604,6 +625,13 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
this.ongoingStreamCount$.next(this.ongoingStreamCount$.value + 1);
|
||||
|
||||
const { signal, onConnectionClosed } = getSignal(req);
|
||||
let endBeforePromiseResolve = false;
|
||||
onConnectionClosed(isAborted => {
|
||||
if (isAborted) {
|
||||
endBeforePromiseResolve = true;
|
||||
}
|
||||
});
|
||||
|
||||
const source$ = from(
|
||||
provider.streamImages(
|
||||
{
|
||||
@@ -639,22 +667,20 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
shared$.pipe(
|
||||
reduce((acc, chunk) => acc.concat([chunk]), [] as string[]),
|
||||
tap(attachments => {
|
||||
onConnectionClosed(isAborted => {
|
||||
session.push({
|
||||
role: 'assistant',
|
||||
content: isAborted ? '> Request aborted' : '',
|
||||
attachments: isAborted ? [] : attachments,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
void session
|
||||
.save()
|
||||
.catch(err =>
|
||||
this.logger.error(
|
||||
'Failed to save session in sse stream',
|
||||
err
|
||||
)
|
||||
);
|
||||
session.push({
|
||||
role: 'assistant',
|
||||
content: endBeforePromiseResolve ? '> Request aborted' : '',
|
||||
attachments: endBeforePromiseResolve ? [] : attachments,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
void session
|
||||
.save()
|
||||
.catch(err =>
|
||||
this.logger.error(
|
||||
'Failed to save session in sse stream',
|
||||
err
|
||||
)
|
||||
);
|
||||
}),
|
||||
ignoreElements()
|
||||
)
|
||||
|
||||
71
packages/backend/server/src/plugins/copilot/cron.ts
Normal file
71
packages/backend/server/src/plugins/copilot/cron.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { Cron, CronExpression } from '@nestjs/schedule';
|
||||
|
||||
import { JobQueue, OneDay, OnJob } from '../../base';
|
||||
import { Models } from '../../models';
|
||||
|
||||
declare global {
|
||||
interface Jobs {
|
||||
'copilot.session.cleanupEmptySessions': {};
|
||||
'copilot.session.generateMissingTitles': {};
|
||||
}
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class CopilotCronJobs {
|
||||
private readonly logger = new Logger(CopilotCronJobs.name);
|
||||
|
||||
constructor(
|
||||
private readonly models: Models,
|
||||
private readonly jobs: JobQueue
|
||||
) {}
|
||||
|
||||
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT)
|
||||
async dailyCleanupJob() {
|
||||
await this.jobs.add(
|
||||
'copilot.session.cleanupEmptySessions',
|
||||
{},
|
||||
{ jobId: 'daily-copilot-cleanup-empty-sessions' }
|
||||
);
|
||||
|
||||
await this.jobs.add(
|
||||
'copilot.session.generateMissingTitles',
|
||||
{},
|
||||
{ jobId: 'daily-copilot-generate-missing-titles' }
|
||||
);
|
||||
}
|
||||
|
||||
async triggerGenerateMissingTitles() {
|
||||
await this.jobs.add(
|
||||
'copilot.session.generateMissingTitles',
|
||||
{},
|
||||
{ jobId: 'trigger-copilot-generate-missing-titles' }
|
||||
);
|
||||
}
|
||||
|
||||
@OnJob('copilot.session.cleanupEmptySessions')
|
||||
async cleanupEmptySessions() {
|
||||
const { removed, cleaned } =
|
||||
await this.models.copilotSession.cleanupEmptySessions(
|
||||
new Date(Date.now() - OneDay)
|
||||
);
|
||||
|
||||
this.logger.log(
|
||||
`Cleanup completed: ${removed} sessions deleted, ${cleaned} sessions marked as deleted`
|
||||
);
|
||||
}
|
||||
|
||||
@OnJob('copilot.session.generateMissingTitles')
|
||||
async generateMissingTitles() {
|
||||
const sessions = await this.models.copilotSession.toBeGenerateTitle();
|
||||
|
||||
for (const session of sessions) {
|
||||
await this.jobs.add('copilot.session.generateTitle', {
|
||||
sessionId: session.id,
|
||||
});
|
||||
}
|
||||
this.logger.log(
|
||||
`Scheduled title generation for ${sessions.length} sessions`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
CopilotPromptNotFound,
|
||||
CopilotProviderNotSupported,
|
||||
} from '../../../base';
|
||||
import { CopilotFailedToGenerateEmbedding } from '../../../base/error/errors.gen';
|
||||
import { ChunkSimilarity, Embedding } from '../../../models';
|
||||
import { PromptService } from '../prompt';
|
||||
import {
|
||||
@@ -74,6 +75,12 @@ class ProductionEmbeddingClient extends EmbeddingClient {
|
||||
input,
|
||||
{ dimensions: EMBEDDING_DIMENSIONS }
|
||||
);
|
||||
if (embeddings.length !== input.length) {
|
||||
throw new CopilotFailedToGenerateEmbedding({
|
||||
provider: provider.type,
|
||||
message: `Expected ${input.length} embeddings, got ${embeddings.length}`,
|
||||
});
|
||||
}
|
||||
|
||||
return Array.from(embeddings.entries()).map(([index, embedding]) => ({
|
||||
index,
|
||||
@@ -112,11 +119,14 @@ class ProductionEmbeddingClient extends EmbeddingClient {
|
||||
);
|
||||
|
||||
try {
|
||||
return ranks.map((score, chunk) => ({
|
||||
chunk,
|
||||
targetId: this.getTargetId(embeddings[chunk]),
|
||||
score,
|
||||
}));
|
||||
return ranks.map((score, i) => {
|
||||
const chunk = embeddings[i];
|
||||
return {
|
||||
chunk: chunk.chunk,
|
||||
targetId: this.getTargetId(chunk),
|
||||
score: Math.max(score, 1 - (chunk.distance || -Infinity)),
|
||||
};
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to parse rerank results', error);
|
||||
// silent error, will fallback to default sorting in parent method
|
||||
@@ -148,7 +158,7 @@ class ProductionEmbeddingClient extends EmbeddingClient {
|
||||
|
||||
const chunks = sortedEmbeddings.reduce(
|
||||
(acc, e) => {
|
||||
const targetId = 'docId' in e ? e.docId : 'fileId' in e ? e.fileId : '';
|
||||
const targetId = this.getTargetId(e);
|
||||
const key = `${targetId}:${e.chunk}`;
|
||||
acc[key] = e;
|
||||
return acc;
|
||||
@@ -179,7 +189,10 @@ class ProductionEmbeddingClient extends EmbeddingClient {
|
||||
.filter(Boolean);
|
||||
|
||||
this.logger.verbose(
|
||||
`ReRank completed: ${highConfidenceChunks.length} high-confidence results found`
|
||||
`ReRank completed: ${highConfidenceChunks.length} high-confidence results found, total ${sortedEmbeddings.length} embeddings`,
|
||||
highConfidenceChunks.length !== sortedEmbeddings.length
|
||||
? JSON.stringify(ranks)
|
||||
: undefined
|
||||
);
|
||||
return highConfidenceChunks.slice(0, topK);
|
||||
} catch (error) {
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
CopilotContextService,
|
||||
} from './context';
|
||||
import { CopilotController } from './controller';
|
||||
import { CopilotCronJobs } from './cron';
|
||||
import { CopilotEmbeddingJob } from './embedding';
|
||||
import { ChatMessageCache } from './message';
|
||||
import { PromptService } from './prompt';
|
||||
@@ -64,6 +65,8 @@ import {
|
||||
CopilotContextResolver,
|
||||
CopilotContextService,
|
||||
CopilotEmbeddingJob,
|
||||
// cron jobs
|
||||
CopilotCronJobs,
|
||||
// transcription
|
||||
CopilotTranscriptionService,
|
||||
CopilotTranscriptionResolver,
|
||||
|
||||
@@ -303,7 +303,8 @@ const textActions: Prompt[] = [
|
||||
{
|
||||
name: 'Transcript audio',
|
||||
action: 'Transcript audio',
|
||||
model: 'gemini-2.5-flash',
|
||||
model: 'gemini-2.5-pro',
|
||||
optionalModels: ['gemini-2.5-flash', 'gemini-2.5-pro'],
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
@@ -338,7 +339,7 @@ Convert a multi-speaker audio recording into a structured JSON format by transcr
|
||||
{
|
||||
name: 'Rerank results',
|
||||
action: 'Rerank results',
|
||||
model: 'gpt-4.1-mini',
|
||||
model: 'gpt-4.1',
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
@@ -366,6 +367,31 @@ Convert a multi-speaker audio recording into a structured JSON format by transcr
|
||||
requireAttachment: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'Conversation Summary',
|
||||
action: 'Conversation Summary',
|
||||
model: 'gpt-4.1-2025-04-14',
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
content: `You are an expert conversation summarizer. Your job is to distill long dialogues into clear, compact summaries that preserve every key decision, fact, and open question. When asked, always:
|
||||
• Honor any explicit “focus” the user gives you.
|
||||
• Match the desired length style:
|
||||
- “brief” → 1-2 sentences
|
||||
- “detailed” → ≈ 5 sentences or short bullet list
|
||||
- “comprehensive” → full paragraph(s) covering all salient points.
|
||||
• Write in neutral, third-person prose and never add new information.
|
||||
Return only the summary text—no headings, labels, or commentary.`,
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: `Summarize the conversation below so it can be carried forward without loss.\n\nFocus: {{focus}}\nDesired length: {{length}}\n\nConversation:\n{{#messages}}\n{{role}}: {{content}}\n{{/messages}}`,
|
||||
},
|
||||
],
|
||||
config: {
|
||||
requireContent: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'Summary',
|
||||
action: 'Summary',
|
||||
@@ -1286,7 +1312,7 @@ If there are items in the content that can be used as to-do tasks, please refer
|
||||
{
|
||||
name: 'Make it real',
|
||||
action: 'Make it real',
|
||||
model: 'gpt-4.1-2025-04-14',
|
||||
model: 'claude-sonnet-4@20250514',
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
@@ -1327,7 +1353,7 @@ When sent new wireframes, respond ONLY with the contents of the html file.`,
|
||||
{
|
||||
name: 'Make it real with text',
|
||||
action: 'Make it real with text',
|
||||
model: 'gpt-4.1-2025-04-14',
|
||||
model: 'claude-sonnet-4@20250514',
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
@@ -1598,6 +1624,166 @@ const imageActions: Prompt[] = [
|
||||
},
|
||||
];
|
||||
|
||||
const modelActions: Prompt[] = [
|
||||
{
|
||||
name: 'Apply Updates',
|
||||
action: 'Apply Updates',
|
||||
model: 'claude-sonnet-4@20250514',
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: `
|
||||
You are a Markdown document update engine.
|
||||
|
||||
You will be given:
|
||||
|
||||
1. content: The original Markdown document
|
||||
- The content is structured into blocks.
|
||||
- Each block starts with a comment like <!-- block_id=... flavour=... --> and contains the block's content.
|
||||
- The content is {{content}}
|
||||
|
||||
2. op: A description of the edit intention
|
||||
- This describes the semantic meaning of the edit, such as "Bold the first paragraph".
|
||||
- The op is {{op}}
|
||||
|
||||
3. updates: A Markdown snippet
|
||||
- The updates is {{updates}}
|
||||
- This represents the block-level changes to apply to the original Markdown.
|
||||
- The update may:
|
||||
- **Replace** an existing block (same block_id, new content)
|
||||
- **Delete** block(s) using <!-- delete block BLOCK_ID -->
|
||||
- **Insert** new block(s) with a new unique block_id
|
||||
- When performing deletions, the update will include **surrounding context blocks** (or use <!-- existing blocks -->) to help you determine where and what to delete.
|
||||
|
||||
Your task:
|
||||
- Apply the update in <updates> to the document in <code>, following the intent described in <op>.
|
||||
- Preserve all block_id and flavour comments.
|
||||
- Maintain the original block order unless the update clearly appends new blocks.
|
||||
- Do not remove or alter unrelated blocks.
|
||||
- Output only the fully updated Markdown content. Do not wrap the content in \`\`\`markdown.
|
||||
|
||||
---
|
||||
|
||||
✍️ Examples
|
||||
|
||||
✅ Replacement (modifying an existing block)
|
||||
|
||||
<code>
|
||||
<!-- block_id=101 flavour=paragraph -->
|
||||
## Introduction
|
||||
|
||||
<!-- block_id=102 flavour=paragraph -->
|
||||
This document provides an overview of the system architecture and its components.
|
||||
</code>
|
||||
|
||||
<op>
|
||||
Make the introduction more formal.
|
||||
</op>
|
||||
|
||||
<updates>
|
||||
<!-- block_id=102 flavour=paragraph -->
|
||||
This document outlines the architectural design and individual components of the system in detail.
|
||||
</updates>
|
||||
|
||||
Expected Output:
|
||||
<!-- block_id=101 flavour=paragraph -->
|
||||
## Introduction
|
||||
|
||||
<!-- block_id=102 flavour=paragraph -->
|
||||
This document outlines the architectural design and individual components of the system in detail.
|
||||
|
||||
---
|
||||
|
||||
➕ Insertion (adding new content)
|
||||
|
||||
<code>
|
||||
<!-- block_id=201 flavour=paragraph -->
|
||||
# Project Summary
|
||||
|
||||
<!-- block_id=202 flavour=paragraph -->
|
||||
This project aims to build a collaborative text editing tool.
|
||||
</code>
|
||||
|
||||
<op>
|
||||
Add a disclaimer section at the end.
|
||||
</op>
|
||||
|
||||
<updates>
|
||||
<!-- block_id=new-301 flavour=paragraph -->
|
||||
## Disclaimer
|
||||
|
||||
<!-- block_id=new-302 flavour=paragraph -->
|
||||
This document is subject to change. Do not distribute externally.
|
||||
</updates>
|
||||
|
||||
Expected Output:
|
||||
<!-- block_id=201 flavour=paragraph -->
|
||||
# Project Summary
|
||||
|
||||
<!-- block_id=202 flavour=paragraph -->
|
||||
This project aims to build a collaborative text editing tool.
|
||||
|
||||
<!-- block_id=new-301 flavour=paragraph -->
|
||||
## Disclaimer
|
||||
|
||||
<!-- block_id=new-302 flavour=paragraph -->
|
||||
This document is subject to change. Do not distribute externally.
|
||||
|
||||
---
|
||||
|
||||
❌ Deletion (removing blocks)
|
||||
|
||||
<code>
|
||||
<!-- block_id=401 flavour=paragraph -->
|
||||
## Author
|
||||
|
||||
<!-- block_id=402 flavour=paragraph -->
|
||||
Written by the AI team at OpenResearch.
|
||||
|
||||
<!-- block_id=403 flavour=paragraph -->
|
||||
## Experimental Section
|
||||
|
||||
<!-- block_id=404 flavour=paragraph -->
|
||||
The following section is still under development and may change without notice.
|
||||
|
||||
<!-- block_id=405 flavour=paragraph -->
|
||||
## License
|
||||
|
||||
<!-- block_id=406 flavour=paragraph -->
|
||||
This document is licensed under CC BY-NC 4.0.
|
||||
</code>
|
||||
|
||||
<op>
|
||||
Remove the experimental section.
|
||||
</op>
|
||||
|
||||
<updates>
|
||||
<!-- delete block_id=403 -->
|
||||
<!-- delete block_id=404 -->
|
||||
</updates>
|
||||
|
||||
Expected Output:
|
||||
<!-- block_id=401 flavour=paragraph -->
|
||||
## Author
|
||||
|
||||
<!-- block_id=402 flavour=paragraph -->
|
||||
Written by the AI team at OpenResearch.
|
||||
|
||||
<!-- block_id=405 flavour=paragraph -->
|
||||
## License
|
||||
|
||||
<!-- block_id=406 flavour=paragraph -->
|
||||
This document is licensed under CC BY-NC 4.0.
|
||||
|
||||
---
|
||||
|
||||
Now apply the \`updates\` to the \`content\`, following the intent in \`op\`, and return the updated Markdown.
|
||||
`,
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const CHAT_PROMPT: Omit<Prompt, 'name'> = {
|
||||
model: 'claude-sonnet-4@20250514',
|
||||
optionalModels: [
|
||||
@@ -1677,7 +1863,7 @@ This sentence contains information from the first source[^1]. This sentence refe
|
||||
Before starting Tool calling, you need to follow:
|
||||
- DO NOT explain what operation you will perform.
|
||||
- DO NOT embed a tool call mid-sentence.
|
||||
- When searching for unknown information or keyword, prioritize searching the user's workspace.
|
||||
- When searching for unknown information, personal information or keyword, prioritize searching the user's workspace rather than the web.
|
||||
- Depending on the complexity of the question and the information returned by the search tools, you can call different tools multiple times to search.
|
||||
</tool-calling-guidelines>
|
||||
|
||||
@@ -1770,11 +1956,75 @@ const chat: Prompt[] = [
|
||||
},
|
||||
];
|
||||
|
||||
const artifactActions: Prompt[] = [
|
||||
{
|
||||
name: 'Code Artifact',
|
||||
model: 'claude-sonnet-4@20250514',
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
content: `
|
||||
When sent new notes, respond ONLY with the contents of the html file.
|
||||
DO NOT INCLUDE ANY OTHER TEXT, EXPLANATIONS, APOLOGIES, OR INTRODUCTORY/CLOSING PHRASES.
|
||||
IF USER DOES NOT SPECIFY A STYLE, FOLLOW THE DEFAULT STYLE.
|
||||
<generate_guide>
|
||||
- The results should be a single HTML file.
|
||||
- Use tailwindcss to style the website
|
||||
- Put any additional CSS styles in a style tag and any JavaScript in a script tag.
|
||||
- Use unpkg or skypack to import any required dependencies.
|
||||
- Use Google fonts to pull in any open source fonts you require.
|
||||
- Use lucide icons for any icons.
|
||||
- If you have any images, load them from Unsplash or use solid colored rectangles.
|
||||
</generate_guide>
|
||||
|
||||
<DO_NOT_USE_COLORS>
|
||||
- DO NOT USE ANY COLORS
|
||||
</DO_NOT_USE_COLORS>
|
||||
<DO_NOT_USE_GRADIENTS>
|
||||
- DO NOT USE ANY GRADIENTS
|
||||
</DO_NOT_USE_GRADIENTS>
|
||||
|
||||
<COLOR_THEME>
|
||||
- --affine-blue-300: #93e2fd
|
||||
- --affine-blue-400: #60cffa
|
||||
- --affine-blue-500: #3ab5f7
|
||||
- --affine-blue-600: #1e96eb
|
||||
- --affine-blue-700: #1e67af
|
||||
- --affine-text-primary-color: #121212
|
||||
- --affine-text-secondary-color: #8e8d91
|
||||
- --affine-text-disable-color: #a9a9ad
|
||||
- --affine-background-overlay-panel-color: #fbfbfc
|
||||
- --affine-background-secondary-color: #f4f4f5
|
||||
- --affine-background-primary-color: #fff
|
||||
</COLOR_THEME>
|
||||
<default_style_guide>
|
||||
- MUST USE White and Blue(#1e96eb) as the primary color
|
||||
- KEEP THE DEFAULT STYLE SIMPLE AND CLEAN
|
||||
- DO NOT USE ANY COMPLEX STYLES
|
||||
- DO NOT USE ANY GRADIENTS
|
||||
- USE LESS SHADOWS
|
||||
- USE RADIUS 4px or 8px for rounded corners
|
||||
- USE 12px or 16px for padding
|
||||
- Use the tailwind color gray, zinc, slate, neutral much more.
|
||||
- Use 0.5px border should be better
|
||||
</default_style_guide>
|
||||
`,
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: '{{content}}',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
export const prompts: Prompt[] = [
|
||||
...textActions,
|
||||
...imageActions,
|
||||
...modelActions,
|
||||
...chat,
|
||||
...workflows,
|
||||
...artifactActions,
|
||||
];
|
||||
|
||||
export async function refreshPrompts(db: PrismaClient) {
|
||||
|
||||
@@ -53,8 +53,11 @@ export class PromptService implements OnApplicationBootstrap {
|
||||
* @returns prompt messages
|
||||
*/
|
||||
async get(name: string): Promise<ChatPrompt | null> {
|
||||
const cached = this.cache.get(name);
|
||||
if (cached) return cached;
|
||||
// skip cache in dev mode to ensure the latest prompt is always fetched
|
||||
if (!env.dev) {
|
||||
const cached = this.cache.get(name);
|
||||
if (cached) return cached;
|
||||
}
|
||||
|
||||
const prompt = await this.db.aiPrompt.findUnique({
|
||||
where: {
|
||||
|
||||
@@ -62,6 +62,7 @@ export abstract class AnthropicProvider<T> extends CopilotProvider<T> {
|
||||
|
||||
try {
|
||||
metrics.ai.counter('chat_text_calls').add(1, { model: model.id });
|
||||
|
||||
const [system, msgs] = await chatToGPTMessage(messages, true, true);
|
||||
|
||||
const modelInstance = this.instance(model.id);
|
||||
|
||||
@@ -88,6 +88,12 @@ export abstract class GeminiProvider<T> extends CopilotProvider<T> {
|
||||
system,
|
||||
messages: msgs,
|
||||
abortSignal: options.signal,
|
||||
providerOptions: {
|
||||
google: this.getGeminiOptions(options, model.id),
|
||||
},
|
||||
tools: await this.getTools(options, model.id),
|
||||
maxSteps: this.MAX_STEPS,
|
||||
experimental_continueSteps: true,
|
||||
});
|
||||
|
||||
if (!text) throw new Error('Failed to generate text');
|
||||
@@ -233,12 +239,16 @@ export abstract class GeminiProvider<T> extends CopilotProvider<T> {
|
||||
taskType: 'RETRIEVAL_DOCUMENT',
|
||||
});
|
||||
|
||||
const { embeddings } = await embedMany({
|
||||
model: modelInstance,
|
||||
values: messages,
|
||||
});
|
||||
const embeddings = await Promise.allSettled(
|
||||
messages.map(m =>
|
||||
embedMany({ model: modelInstance, values: [m], maxRetries: 3 })
|
||||
)
|
||||
);
|
||||
|
||||
return embeddings.filter(v => v && Array.isArray(v));
|
||||
return embeddings
|
||||
.map(e => (e.status === 'fulfilled' ? e.value.embeddings : null))
|
||||
.flat()
|
||||
.filter((v): v is number[] => !!v && Array.isArray(v));
|
||||
} catch (e: any) {
|
||||
metrics.ai
|
||||
.counter('generate_embedding_errors')
|
||||
@@ -254,16 +264,16 @@ export abstract class GeminiProvider<T> extends CopilotProvider<T> {
|
||||
) {
|
||||
const [system, msgs] = await chatToGPTMessage(messages);
|
||||
const { fullStream } = streamText({
|
||||
model: this.instance(model.id, {
|
||||
useSearchGrounding: this.useSearchGrounding(options),
|
||||
}),
|
||||
model: this.instance(model.id),
|
||||
system,
|
||||
messages: msgs,
|
||||
abortSignal: options.signal,
|
||||
maxSteps: this.MAX_STEPS,
|
||||
providerOptions: {
|
||||
google: this.getGeminiOptions(options, model.id),
|
||||
},
|
||||
tools: await this.getTools(options, model.id),
|
||||
maxSteps: this.MAX_STEPS,
|
||||
experimental_continueSteps: true,
|
||||
});
|
||||
return fullStream;
|
||||
}
|
||||
@@ -282,8 +292,4 @@ export abstract class GeminiProvider<T> extends CopilotProvider<T> {
|
||||
private isReasoningModel(model: string) {
|
||||
return model.startsWith('gemini-2.5');
|
||||
}
|
||||
|
||||
private useSearchGrounding(options: CopilotChatOptions) {
|
||||
return options?.tools?.includes('webSearch');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,6 +37,24 @@ export class MorphProvider extends CopilotProvider<MorphConfig> {
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'morph-v3-fast',
|
||||
capabilities: [
|
||||
{
|
||||
input: [ModelInputType.Text],
|
||||
output: [ModelOutputType.Text],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'morph-v3-large',
|
||||
capabilities: [
|
||||
{
|
||||
input: [ModelInputType.Text],
|
||||
output: [ModelOutputType.Text],
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
#instance!: VercelOpenAICompatibleProvider;
|
||||
|
||||
@@ -274,9 +274,11 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
|
||||
override getProviderSpecificTools(
|
||||
toolName: CopilotChatTools,
|
||||
model: string
|
||||
): [string, Tool] | undefined {
|
||||
): [string, Tool?] | undefined {
|
||||
if (toolName === 'webSearch' && !this.isReasoningModel(model)) {
|
||||
return ['web_search_preview', openai.tools.webSearchPreview()];
|
||||
} else if (toolName === 'docEdit') {
|
||||
return ['doc_edit', undefined];
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ import {
|
||||
buildDocKeywordSearchGetter,
|
||||
buildDocSearchGetter,
|
||||
createCodeArtifactTool,
|
||||
createConversationSummaryTool,
|
||||
createDocComposeTool,
|
||||
createDocEditTool,
|
||||
createDocKeywordSearchTool,
|
||||
@@ -126,7 +127,7 @@ export abstract class CopilotProvider<C = any> {
|
||||
protected getProviderSpecificTools(
|
||||
_toolName: CopilotChatTools,
|
||||
_model: string
|
||||
): [string, Tool] | undefined {
|
||||
): [string, Tool?] | undefined {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -139,19 +140,39 @@ export abstract class CopilotProvider<C = any> {
|
||||
if (options?.tools?.length) {
|
||||
this.logger.debug(`getTools: ${JSON.stringify(options.tools)}`);
|
||||
const ac = this.moduleRef.get(AccessController, { strict: false });
|
||||
const docReader = this.moduleRef.get(DocReader, { strict: false });
|
||||
const models = this.moduleRef.get(Models, { strict: false });
|
||||
const prompt = this.moduleRef.get(PromptService, {
|
||||
strict: false,
|
||||
});
|
||||
|
||||
for (const tool of options.tools) {
|
||||
const toolDef = this.getProviderSpecificTools(tool, model);
|
||||
if (toolDef) {
|
||||
tools[toolDef[0]] = toolDef[1];
|
||||
// allow provider prevent tool creation
|
||||
if (toolDef[1]) {
|
||||
tools[toolDef[0]] = toolDef[1];
|
||||
}
|
||||
continue;
|
||||
}
|
||||
switch (tool) {
|
||||
case 'codeArtifact': {
|
||||
tools.code_artifact = createCodeArtifactTool(prompt, this.factory);
|
||||
break;
|
||||
}
|
||||
case 'conversationSummary': {
|
||||
tools.conversation_summary = createConversationSummaryTool(
|
||||
options.session,
|
||||
prompt,
|
||||
this.factory
|
||||
);
|
||||
break;
|
||||
}
|
||||
case 'docEdit': {
|
||||
const doc = this.moduleRef.get(DocReader, { strict: false });
|
||||
const getDocContent = buildContentGetter(ac, doc);
|
||||
const getDocContent = buildContentGetter(ac, docReader);
|
||||
tools.doc_edit = createDocEditTool(
|
||||
this.factory,
|
||||
prompt,
|
||||
getDocContent.bind(null, options)
|
||||
);
|
||||
break;
|
||||
@@ -160,11 +181,15 @@ export abstract class CopilotProvider<C = any> {
|
||||
const context = this.moduleRef.get(CopilotContextService, {
|
||||
strict: false,
|
||||
});
|
||||
|
||||
const docContext = options.session
|
||||
? await context.getBySessionId(options.session)
|
||||
: null;
|
||||
const searchDocs = buildDocSearchGetter(ac, context, docContext);
|
||||
const searchDocs = buildDocSearchGetter(
|
||||
ac,
|
||||
context,
|
||||
docContext,
|
||||
models
|
||||
);
|
||||
tools.doc_semantic_search = createDocSemanticSearchTool(
|
||||
searchDocs.bind(null, options)
|
||||
);
|
||||
@@ -172,9 +197,6 @@ export abstract class CopilotProvider<C = any> {
|
||||
}
|
||||
case 'docKeywordSearch': {
|
||||
if (this.AFFiNEConfig.indexer.enabled) {
|
||||
const ac = this.moduleRef.get(AccessController, {
|
||||
strict: false,
|
||||
});
|
||||
const indexerService = this.moduleRef.get(IndexerService, {
|
||||
strict: false,
|
||||
});
|
||||
@@ -189,9 +211,6 @@ export abstract class CopilotProvider<C = any> {
|
||||
break;
|
||||
}
|
||||
case 'docRead': {
|
||||
const ac = this.moduleRef.get(AccessController, { strict: false });
|
||||
const models = this.moduleRef.get(Models, { strict: false });
|
||||
const docReader = this.moduleRef.get(DocReader, { strict: false });
|
||||
const getDoc = buildDocContentGetter(ac, docReader, models);
|
||||
tools.doc_read = createDocReadTool(getDoc.bind(null, options));
|
||||
break;
|
||||
@@ -202,23 +221,7 @@ export abstract class CopilotProvider<C = any> {
|
||||
break;
|
||||
}
|
||||
case 'docCompose': {
|
||||
const promptService = this.moduleRef.get(PromptService, {
|
||||
strict: false,
|
||||
});
|
||||
tools.doc_compose = createDocComposeTool(
|
||||
promptService,
|
||||
this.factory
|
||||
);
|
||||
break;
|
||||
}
|
||||
case 'codeArtifact': {
|
||||
const promptService = this.moduleRef.get(PromptService, {
|
||||
strict: false,
|
||||
});
|
||||
tools.code_artifact = createCodeArtifactTool(
|
||||
promptService,
|
||||
this.factory
|
||||
);
|
||||
tools.doc_compose = createDocComposeTool(prompt, this.factory);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,6 +60,8 @@ export const VertexSchema: JSONSchema = {
|
||||
export const PromptConfigStrictSchema = z.object({
|
||||
tools: z
|
||||
.enum([
|
||||
'codeArtifact',
|
||||
'conversationSummary',
|
||||
// work with morph
|
||||
'docEdit',
|
||||
// work with indexer
|
||||
@@ -71,7 +73,6 @@ export const PromptConfigStrictSchema = z.object({
|
||||
'webSearch',
|
||||
// artifact tools
|
||||
'docCompose',
|
||||
'codeArtifact',
|
||||
])
|
||||
.array()
|
||||
.nullable()
|
||||
|
||||
@@ -6,20 +6,10 @@ import {
|
||||
ImagePart,
|
||||
TextPart,
|
||||
TextStreamPart,
|
||||
ToolSet,
|
||||
} from 'ai';
|
||||
import { ZodType } from 'zod';
|
||||
|
||||
import {
|
||||
createCodeArtifactTool,
|
||||
createDocComposeTool,
|
||||
createDocEditTool,
|
||||
createDocKeywordSearchTool,
|
||||
createDocReadTool,
|
||||
createDocSemanticSearchTool,
|
||||
createExaCrawlTool,
|
||||
createExaSearchTool,
|
||||
} from '../tools';
|
||||
import { CustomAITools } from '../tools';
|
||||
import { PromptMessage, StreamObject } from './types';
|
||||
|
||||
type ChatMessage = CoreUserMessage | CoreAssistantMessage;
|
||||
@@ -385,17 +375,6 @@ export class CitationParser {
|
||||
}
|
||||
}
|
||||
|
||||
export interface CustomAITools extends ToolSet {
|
||||
doc_edit: ReturnType<typeof createDocEditTool>;
|
||||
doc_semantic_search: ReturnType<typeof createDocSemanticSearchTool>;
|
||||
doc_keyword_search: ReturnType<typeof createDocKeywordSearchTool>;
|
||||
doc_read: ReturnType<typeof createDocReadTool>;
|
||||
doc_compose: ReturnType<typeof createDocComposeTool>;
|
||||
web_search_exa: ReturnType<typeof createExaSearchTool>;
|
||||
web_crawl_exa: ReturnType<typeof createExaCrawlTool>;
|
||||
code_artifact: ReturnType<typeof createCodeArtifactTool>;
|
||||
}
|
||||
|
||||
type ChunkType = TextStreamPart<CustomAITools>['type'];
|
||||
|
||||
export function toError(error: unknown): Error {
|
||||
@@ -451,6 +430,10 @@ export class TextStreamParser {
|
||||
);
|
||||
result = this.addPrefix(result);
|
||||
switch (chunk.toolName) {
|
||||
case 'conversation_summary': {
|
||||
result += `\nSummarizing context\n`;
|
||||
break;
|
||||
}
|
||||
case 'web_search_exa': {
|
||||
result += `\nSearching the web "${chunk.args.query}"\n`;
|
||||
break;
|
||||
@@ -489,10 +472,18 @@ export class TextStreamParser {
|
||||
result = this.addPrefix(result);
|
||||
switch (chunk.toolName) {
|
||||
case 'doc_edit': {
|
||||
if (chunk.result && typeof chunk.result === 'object') {
|
||||
result += `\n${chunk.result.result}\n`;
|
||||
if (
|
||||
chunk.result &&
|
||||
typeof chunk.result === 'object' &&
|
||||
Array.isArray(chunk.result.result)
|
||||
) {
|
||||
result += chunk.result.result
|
||||
.map(item => {
|
||||
return `\n${item.changedContent}\n`;
|
||||
})
|
||||
.join('');
|
||||
this.docEditFootnotes[this.docEditFootnotes.length - 1].result =
|
||||
chunk.result.result;
|
||||
result;
|
||||
} else {
|
||||
this.docEditFootnotes.pop();
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ import {
|
||||
CallMetric,
|
||||
CopilotDocNotFound,
|
||||
CopilotFailedToCreateMessage,
|
||||
CopilotProviderSideError,
|
||||
CopilotSessionNotFound,
|
||||
type FileUpload,
|
||||
paginate,
|
||||
@@ -31,14 +32,18 @@ import {
|
||||
RequestMutex,
|
||||
Throttle,
|
||||
TooManyRequest,
|
||||
UserFriendlyError,
|
||||
} from '../../base';
|
||||
import { CurrentUser } from '../../core/auth';
|
||||
import { Admin } from '../../core/common';
|
||||
import { DocReader } from '../../core/doc';
|
||||
import { AccessController } from '../../core/permission';
|
||||
import { UserType } from '../../core/user';
|
||||
import type { ListSessionOptions, UpdateChatSession } from '../../models';
|
||||
import { CopilotCronJobs } from './cron';
|
||||
import { PromptService } from './prompt';
|
||||
import { PromptMessage, StreamObject } from './providers';
|
||||
import { CopilotProviderFactory } from './providers/factory';
|
||||
import { ChatSessionService } from './session';
|
||||
import { CopilotStorage } from './storage';
|
||||
import { type ChatHistory, type ChatMessage, SubmittedMessage } from './types';
|
||||
@@ -396,7 +401,9 @@ export class CopilotResolver {
|
||||
private readonly ac: AccessController,
|
||||
private readonly mutex: RequestMutex,
|
||||
private readonly chatSession: ChatSessionService,
|
||||
private readonly storage: CopilotStorage
|
||||
private readonly storage: CopilotStorage,
|
||||
private readonly docReader: DocReader,
|
||||
private readonly providerFactory: CopilotProviderFactory
|
||||
) {}
|
||||
|
||||
@ResolveField(() => CopilotQuotaType, {
|
||||
@@ -724,6 +731,65 @@ export class CopilotResolver {
|
||||
}
|
||||
}
|
||||
|
||||
@Query(() => String, {
|
||||
description:
|
||||
'Apply updates to a doc using LLM and return the merged markdown.',
|
||||
})
|
||||
async applyDocUpdates(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args({ name: 'workspaceId', type: () => String })
|
||||
workspaceId: string,
|
||||
@Args({ name: 'docId', type: () => String })
|
||||
docId: string,
|
||||
@Args({ name: 'op', type: () => String })
|
||||
op: string,
|
||||
@Args({ name: 'updates', type: () => String })
|
||||
updates: string
|
||||
): Promise<string> {
|
||||
await this.assertPermission(user, { workspaceId, docId });
|
||||
|
||||
const docContent = await this.docReader.getDocMarkdown(
|
||||
workspaceId,
|
||||
docId,
|
||||
true
|
||||
);
|
||||
if (!docContent || !docContent.markdown) {
|
||||
throw new NotFoundException('Doc not found or empty');
|
||||
}
|
||||
|
||||
const markdown = docContent.markdown.trim();
|
||||
|
||||
// Get LLM provider
|
||||
const provider =
|
||||
await this.providerFactory.getProviderByModel('morph-v3-large');
|
||||
if (!provider) {
|
||||
throw new BadRequestException('No LLM provider available');
|
||||
}
|
||||
|
||||
try {
|
||||
return await provider.text(
|
||||
{ modelId: 'morph-v3-large' },
|
||||
[
|
||||
{
|
||||
role: 'user',
|
||||
content: `<instruction>${op}</instruction>\n<code>${markdown}</code>\n<update>${updates}</update>`,
|
||||
},
|
||||
],
|
||||
{ reasoning: false }
|
||||
);
|
||||
} catch (e: any) {
|
||||
if (e instanceof UserFriendlyError) {
|
||||
throw e;
|
||||
} else {
|
||||
throw new CopilotProviderSideError({
|
||||
provider: provider.type,
|
||||
kind: 'unexpected_response',
|
||||
message: e?.message || 'Unexpected apply response',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private transformToSessionType(
|
||||
session: Omit<ChatHistory, 'messages'>
|
||||
): CopilotSessionType {
|
||||
@@ -773,7 +839,18 @@ class CreateCopilotPromptInput {
|
||||
@Admin()
|
||||
@Resolver(() => String)
|
||||
export class PromptsManagementResolver {
|
||||
constructor(private readonly promptService: PromptService) {}
|
||||
constructor(
|
||||
private readonly cron: CopilotCronJobs,
|
||||
private readonly promptService: PromptService
|
||||
) {}
|
||||
|
||||
@Query(() => Boolean, {
|
||||
description: 'Trigger generate missing titles cron job',
|
||||
})
|
||||
async triggerGenerateTitleCron() {
|
||||
await this.cron.triggerGenerateMissingTitles();
|
||||
return true;
|
||||
}
|
||||
|
||||
@Query(() => [CopilotPromptType], {
|
||||
description: 'List all copilot prompts',
|
||||
|
||||
@@ -507,6 +507,8 @@ export class ChatSessionService {
|
||||
return await this.models.copilotSession.fork({
|
||||
...session,
|
||||
userId: options.userId,
|
||||
// docId can be changed in fork
|
||||
docId: options.docId,
|
||||
sessionId: randomUUID(),
|
||||
parentSessionId: options.sessionId,
|
||||
messages,
|
||||
@@ -569,7 +571,7 @@ export class ChatSessionService {
|
||||
});
|
||||
|
||||
if (!provider) {
|
||||
throw new NoCopilotProviderAvailable();
|
||||
throw new NoCopilotProviderAvailable({ modelId: prompt.model });
|
||||
}
|
||||
|
||||
return provider.text(cond, [...prompt.finish({}), msg], config);
|
||||
|
||||
@@ -5,9 +5,7 @@ import { z } from 'zod';
|
||||
import type { PromptService } from '../prompt';
|
||||
import type { CopilotProviderFactory } from '../providers';
|
||||
import { toolError } from './error';
|
||||
|
||||
const logger = new Logger('CodeArtifactTool');
|
||||
|
||||
/**
|
||||
* A copilot tool that produces a completely self-contained HTML artifact.
|
||||
* The returned HTML must include <style> and <script> tags directly so that
|
||||
@@ -37,23 +35,20 @@ export const createCodeArtifactTool = (
|
||||
}),
|
||||
execute: async ({ title, userPrompt }) => {
|
||||
try {
|
||||
const prompt = await promptService.get('Make it real with text');
|
||||
const prompt = await promptService.get('Code Artifact');
|
||||
if (!prompt) {
|
||||
throw new Error('Prompt not found');
|
||||
}
|
||||
|
||||
const provider = await factory.getProviderByModel(prompt.model);
|
||||
if (!provider) {
|
||||
throw new Error('Provider not found');
|
||||
}
|
||||
|
||||
const content = await provider.text(
|
||||
{
|
||||
modelId: prompt.model,
|
||||
},
|
||||
[...prompt.finish({}), { role: 'user', content: userPrompt }]
|
||||
prompt.finish({ content: userPrompt })
|
||||
);
|
||||
|
||||
// Remove surrounding ``` or ```html fences if present
|
||||
let stripped = content.trim();
|
||||
if (stripped.startsWith('```')) {
|
||||
@@ -65,7 +60,6 @@ export const createCodeArtifactTool = (
|
||||
stripped = stripped.slice(0, -3);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
title,
|
||||
html: stripped,
|
||||
|
||||
@@ -0,0 +1,76 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { tool } from 'ai';
|
||||
import { z } from 'zod';
|
||||
|
||||
import type { PromptService } from '../prompt';
|
||||
import type { CopilotProviderFactory } from '../providers';
|
||||
import { toolError } from './error';
|
||||
|
||||
const logger = new Logger('ConversationSummaryTool');
|
||||
|
||||
export const createConversationSummaryTool = (
|
||||
sessionId: string | undefined,
|
||||
promptService: PromptService,
|
||||
factory: CopilotProviderFactory
|
||||
) => {
|
||||
return tool({
|
||||
description:
|
||||
'Create a concise, AI-generated summary of the conversation so far—capturing key topics, decisions, and critical details. Use this tool whenever the context becomes lengthy to preserve essential information that might otherwise be lost to truncation in future turns.',
|
||||
parameters: z.object({
|
||||
focus: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'Optional focus area for the summary (e.g., "technical decisions", "user requirements", "project status")'
|
||||
),
|
||||
length: z
|
||||
.enum(['brief', 'detailed', 'comprehensive'])
|
||||
.default('detailed')
|
||||
.describe(
|
||||
'The desired length of the summary: brief (1-2 sentences), detailed (paragraph), comprehensive (multiple paragraphs)'
|
||||
),
|
||||
}),
|
||||
execute: async ({ focus, length }, { messages }) => {
|
||||
try {
|
||||
if (!messages || messages.length === 0) {
|
||||
return toolError(
|
||||
'No Conversation Context',
|
||||
'No messages available to summarize'
|
||||
);
|
||||
}
|
||||
|
||||
const prompt = await promptService.get('Conversation Summary');
|
||||
const provider = await factory.getProviderByModel(prompt?.model || '');
|
||||
|
||||
if (!prompt || !provider) {
|
||||
return toolError(
|
||||
'Prompt Not Found',
|
||||
'Failed to summarize conversation.'
|
||||
);
|
||||
}
|
||||
|
||||
const summary = await provider.text(
|
||||
{ modelId: prompt.model },
|
||||
prompt.finish({
|
||||
messages: messages.map(m => ({
|
||||
...m,
|
||||
content: m.content.toString(),
|
||||
})),
|
||||
focus: focus || 'general',
|
||||
length,
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
focusArea: focus || 'general',
|
||||
messageCount: messages.length,
|
||||
summary,
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
} catch (err: any) {
|
||||
logger.error(`Failed to summarize conversation (${sessionId})`, err);
|
||||
return toolError('Conversation Summary Failed', err.message);
|
||||
}
|
||||
},
|
||||
});
|
||||
};
|
||||
@@ -3,6 +3,7 @@ import { z } from 'zod';
|
||||
|
||||
import { DocReader } from '../../../core/doc';
|
||||
import { AccessController } from '../../../core/permission';
|
||||
import { type PromptService } from '../prompt';
|
||||
import type { CopilotChatOptions, CopilotProviderFactory } from '../providers';
|
||||
|
||||
export const buildContentGetter = (ac: AccessController, doc: DocReader) => {
|
||||
@@ -24,14 +25,20 @@ export const buildContentGetter = (ac: AccessController, doc: DocReader) => {
|
||||
|
||||
export const createDocEditTool = (
|
||||
factory: CopilotProviderFactory,
|
||||
prompt: PromptService,
|
||||
getContent: (targetId?: string) => Promise<string | undefined>
|
||||
) => {
|
||||
return tool({
|
||||
description: `
|
||||
Use this tool to propose an edit to a structured Markdown document with identifiable blocks. Each block begins with a comment like <!-- block_id=... -->, and represents a unit of editable content such as a heading, paragraph, list, or code snippet.
|
||||
Use this tool to propose an edit to a structured Markdown document with identifiable blocks.
|
||||
Each block begins with a comment like <!-- block_id=... -->, and represents a unit of editable content such as a heading, paragraph, list, or code snippet.
|
||||
This will be read by a less intelligent model, which will quickly apply the edit. You should make it clear what the edit is, while also minimizing the unchanged code you write.
|
||||
|
||||
Your task is to return a list of block-level changes needed to fulfill the user's intent. Each change should correspond to a specific user instruction and be represented by one of the following operations:
|
||||
If you receive a markdown without block_id comments, you should call \`doc_read\` tool to get the content.
|
||||
|
||||
Your task is to return a list of block-level changes needed to fulfill the user's intent. **Each change in code_edit must be completely independent: each code_edit entry should only perform a single, isolated change, and must not include the effects of other changes. For example, the updates for a delete operation should only show the context related to the deletion, and must not include any content modified by other operations (such as bolding or insertion). This ensures that each change can be applied independently and in any order.**
|
||||
|
||||
Each change should correspond to a specific user instruction and be represented by one of the following operations:
|
||||
|
||||
replace: Replace the content of a block with updated Markdown.
|
||||
|
||||
@@ -41,83 +48,75 @@ insert: Add a new block, and specify its block_id and content.
|
||||
|
||||
Important Instructions:
|
||||
- Use the existing block structure as-is. Do not reformat or reorder blocks unless explicitly asked.
|
||||
- Always preserve block_id and type in your replacements.
|
||||
- When replacing a block, use the full new block including <!-- block_id=... type=... --> and the updated content.
|
||||
- When inserting, follow the same format as a replacement, but ensure the new block_id does not conflict with existing IDs.
|
||||
- When replacing content, always keep the original block_id unchanged.
|
||||
- When deleting content, only use the format <!-- delete block_id=xxx -->, and only for valid block_id present in the original <code> content.
|
||||
- Each list item should be a block.
|
||||
- Use <!-- existing blocks ... --> for unchanged sections.
|
||||
- If you plan on deleting a section, you must provide surrounding context to indicate the deletion.
|
||||
- Your task is to return a list of block-level changes needed to fulfill the user's intent.
|
||||
- **Each change in code_edit must be completely independent: each code_edit entry should only perform a single, isolated change, and must not include the effects of other changes. For example, the updates for a delete operation should only show the context related to the deletion, and must not include any content modified by other operations (such as bolding or insertion). This ensures that each change can be applied independently and in any order.**
|
||||
|
||||
Example Input Document:
|
||||
\`\`\`md
|
||||
<!-- block_id=block-001 type=paragraph -->
|
||||
# My Holiday Plan
|
||||
Original Content:
|
||||
\`\`\`markdown
|
||||
<!-- block_id=001 flavour=paragraph -->
|
||||
# Andriy Shevchenko
|
||||
|
||||
<!-- block_id=block-002 type=paragraph -->
|
||||
I plan to travel to Paris, France, where I will visit the Eiffel Tower, the Louvre, and the Champs-Élysées.
|
||||
<!-- block_id=002 flavour=paragraph -->
|
||||
## Player Profile
|
||||
|
||||
<!-- block_id=block-003 type=paragraph -->
|
||||
I love Paris.
|
||||
<!-- block_id=003 flavour=paragraph -->
|
||||
Andriy Shevchenko is a legendary Ukrainian striker, best known for his time at AC Milan and Dynamo Kyiv. He won the Ballon d'Or in 2004.
|
||||
|
||||
<!-- block_id=block-004 type=paragraph -->
|
||||
## Reason for the delay
|
||||
<!-- block_id=004 flavour=paragraph -->
|
||||
## Career Overview
|
||||
|
||||
<!-- block_id=block-005 type=paragraph -->
|
||||
This plan has been brewing for a long time, but I always postponed it because I was too busy with work.
|
||||
|
||||
<!-- block_id=block-006 type=paragraph -->
|
||||
## Trip Steps
|
||||
|
||||
<!-- block_id=block-007 type=list -->
|
||||
- Book flight tickets
|
||||
<!-- block_id=block-008 type=list -->
|
||||
- Reserve a hotel
|
||||
<!-- block_id=block-009 type=list -->
|
||||
- Prepare visa documents
|
||||
<!-- block_id=block-010 type=list -->
|
||||
- Plan the itinerary
|
||||
|
||||
<!-- block_id=block-011 type=paragraph -->
|
||||
Additionally, I plan to learn some basic French to make communication easier during the trip.
|
||||
<!-- block_id=005 flavour=list -->
|
||||
- Born in 1976 in Ukraine.
|
||||
<!-- block_id=006 flavour=list -->
|
||||
- Rose to fame at Dynamo Kyiv in the 1990s.
|
||||
<!-- block_id=007 flavour=list -->
|
||||
- Starred at AC Milan (1999–2006), scoring over 170 goals.
|
||||
<!-- block_id=008 flavour=list -->
|
||||
- Played for Chelsea (2006–2009) before returning to Kyiv.
|
||||
<!-- block_id=009 flavour=list -->
|
||||
- Coached Ukraine national team, reaching Euro 2020 quarter-finals.
|
||||
\`\`\`
|
||||
|
||||
Example User Request:
|
||||
|
||||
User Request:
|
||||
\`\`\`
|
||||
Translate the trip steps to Chinese, remove the reason for the delay, and bold the final paragraph.
|
||||
Bold the player’s name in the intro, add a summary section at the end, and remove the career overview.
|
||||
\`\`\`
|
||||
|
||||
Expected Output:
|
||||
|
||||
\`\`\`md
|
||||
<!-- existing blocks ... -->
|
||||
|
||||
<!-- block_id=block-002 type=paragraph -->
|
||||
I plan to travel to Paris, France, where I will visit the Eiffel Tower, the Louvre, and the Champs-Élysées.
|
||||
|
||||
<!-- block_id=block-003 type=paragraph -->
|
||||
I love Paris.
|
||||
|
||||
<!-- delete block-004 -->
|
||||
|
||||
<!-- delete block-005 -->
|
||||
|
||||
<!-- block_id=block-006 type=paragraph -->
|
||||
## Trip Steps
|
||||
|
||||
<!-- block_id=block-007 type=list -->
|
||||
- 订机票
|
||||
<!-- block_id=block-008 type=list -->
|
||||
- 预定酒店
|
||||
<!-- block_id=block-009 type=list -->
|
||||
- 准备签证材料
|
||||
<!-- block_id=block-010 type=list -->
|
||||
- 规划行程
|
||||
|
||||
<!-- existing blocks ... -->
|
||||
|
||||
<!-- block_id=block-011 type=paragraph -->
|
||||
**Additionally, I plan to learn some basic French to make communication easier during the trip.**
|
||||
Example response:
|
||||
\`\`\`json
|
||||
[
|
||||
{
|
||||
"op": "Bold the player's name in the introduction",
|
||||
"updates": "
|
||||
<!-- block_id=003 flavour=paragraph -->
|
||||
**Andriy Shevchenko** is a legendary Ukrainian striker, best known for his time at AC Milan and Dynamo Kyiv. He won the Ballon d'Or in 2004.
|
||||
"
|
||||
},
|
||||
{
|
||||
"op": "Add a summary section at the end",
|
||||
"updates": "
|
||||
<!-- block_id=new-abc123 flavour=paragraph -->
|
||||
## Summary
|
||||
<!-- block_id=new-def456 flavour=paragraph -->
|
||||
Shevchenko is celebrated as one of the greatest Ukrainian footballers of all time. Known for his composure, strength, and goal-scoring instinct, he left a lasting legacy both on and off the pitch.
|
||||
"
|
||||
},
|
||||
{
|
||||
"op": "Delete the career overview section",
|
||||
"updates": "
|
||||
<!-- delete block_id=004 -->
|
||||
<!-- delete block_id=005 -->
|
||||
<!-- delete block_id=006 -->
|
||||
<!-- delete block_id=007 -->
|
||||
<!-- delete block_id=008 -->
|
||||
<!-- delete block_id=009 -->
|
||||
"
|
||||
}
|
||||
]
|
||||
\`\`\`
|
||||
You should specify the following arguments before the others: [doc_id], [origin_content]
|
||||
|
||||
@@ -144,14 +143,32 @@ You should specify the following arguments before the others: [doc_id], [origin_
|
||||
),
|
||||
|
||||
code_edit: z
|
||||
.string()
|
||||
.array(
|
||||
z.object({
|
||||
op: z
|
||||
.string()
|
||||
.describe(
|
||||
'A short description of the change, such as "Bold intro name"'
|
||||
),
|
||||
updates: z
|
||||
.string()
|
||||
.describe(
|
||||
'Markdown block fragments that represent the change, including the block_id and type'
|
||||
),
|
||||
})
|
||||
)
|
||||
.describe(
|
||||
'Specify only the necessary Markdown block-level changes. Return a list of inserted, replaced, or deleted blocks. Each block must start with its <!-- block_id=... type=... --> comment. Use <!-- existing blocks ... --> for unchanged sections.If you plan on deleting a section, you must provide surrounding context to indicate the deletion.'
|
||||
'An array of independent semantic changes to apply to the document.'
|
||||
),
|
||||
}),
|
||||
execute: async ({ doc_id, origin_content, code_edit }) => {
|
||||
try {
|
||||
const provider = await factory.getProviderByModel('morph-v2');
|
||||
const applyPrompt = await prompt.get('Apply Updates');
|
||||
if (!applyPrompt) {
|
||||
return 'Prompt not found';
|
||||
}
|
||||
const model = applyPrompt.model;
|
||||
const provider = await factory.getProviderByModel(model);
|
||||
if (!provider) {
|
||||
return 'Editing docs is not supported';
|
||||
}
|
||||
@@ -160,14 +177,27 @@ You should specify the following arguments before the others: [doc_id], [origin_
|
||||
if (!content) {
|
||||
return 'Doc not found or doc is empty';
|
||||
}
|
||||
const result = await provider.text({ modelId: 'morph-v2' }, [
|
||||
{
|
||||
role: 'user',
|
||||
content: `<code>${content}</code>\n<update>${code_edit}</update>`,
|
||||
},
|
||||
]);
|
||||
|
||||
return { result, content };
|
||||
const changedContents = await Promise.all(
|
||||
code_edit.map(async edit => {
|
||||
return await provider.text({ modelId: model }, [
|
||||
...applyPrompt.finish({
|
||||
content,
|
||||
op: edit.op,
|
||||
updates: edit.updates,
|
||||
}),
|
||||
]);
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
result: changedContents.map((changedContent, index) => ({
|
||||
op: code_edit[index].op,
|
||||
updates: code_edit[index].updates,
|
||||
originalContent: content,
|
||||
changedContent,
|
||||
})),
|
||||
};
|
||||
} catch {
|
||||
return 'Failed to apply edit to the doc';
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ import { tool } from 'ai';
|
||||
import { z } from 'zod';
|
||||
|
||||
import type { AccessController } from '../../../core/permission';
|
||||
import type { ChunkSimilarity } from '../../../models';
|
||||
import type { ChunkSimilarity, Models } from '../../../models';
|
||||
import type { CopilotContextService } from '../context';
|
||||
import type { ContextSession } from '../context/session';
|
||||
import type { CopilotChatOptions } from '../providers';
|
||||
@@ -11,7 +11,8 @@ import { toolError } from './error';
|
||||
export const buildDocSearchGetter = (
|
||||
ac: AccessController,
|
||||
context: CopilotContextService,
|
||||
docContext: ContextSession | null
|
||||
docContext: ContextSession | null,
|
||||
models: Models
|
||||
) => {
|
||||
const searchDocs = async (
|
||||
options: CopilotChatOptions,
|
||||
@@ -45,7 +46,24 @@ export const buildDocSearchGetter = (
|
||||
}
|
||||
if (!docChunks.length && !fileChunks.length)
|
||||
return `No results found for "${query}".`;
|
||||
return [...fileChunks, ...docChunks];
|
||||
|
||||
const docMetas = await models.doc
|
||||
.findAuthors(
|
||||
docChunks.map(c => ({
|
||||
// oxlint-disable-next-line no-non-null-assertion
|
||||
workspaceId: options.workspace!,
|
||||
docId: c.docId,
|
||||
}))
|
||||
)
|
||||
.then(docs => new Map(docs.filter(d => !!d).map(doc => [doc.id, doc])));
|
||||
|
||||
return [
|
||||
...fileChunks,
|
||||
...docChunks.map(c => ({
|
||||
...c,
|
||||
...docMetas.get(c.docId),
|
||||
})),
|
||||
] as ChunkSimilarity[];
|
||||
};
|
||||
return searchDocs;
|
||||
};
|
||||
@@ -58,7 +76,7 @@ export const createDocSemanticSearchTool = (
|
||||
) => {
|
||||
return tool({
|
||||
description:
|
||||
'Retrieve conceptually related passages by performing vector-based semantic similarity search across embedded documents; use this tool only when exact keyword search fails or the user explicitly needs meaning-level matches (e.g., paraphrases, synonyms, broader concepts).',
|
||||
'Retrieve conceptually related passages by performing vector-based semantic similarity search across embedded documents; use this tool only when exact keyword search fails or the user explicitly needs meaning-level matches (e.g., paraphrases, synonyms, broader concepts, recent documents).',
|
||||
parameters: z.object({
|
||||
query: z
|
||||
.string()
|
||||
|
||||
@@ -1,4 +1,29 @@
|
||||
import { ToolSet } from 'ai';
|
||||
|
||||
import { createCodeArtifactTool } from './code-artifact';
|
||||
import { createConversationSummaryTool } from './conversation-summary';
|
||||
import { createDocComposeTool } from './doc-compose';
|
||||
import { createDocEditTool } from './doc-edit';
|
||||
import { createDocKeywordSearchTool } from './doc-keyword-search';
|
||||
import { createDocReadTool } from './doc-read';
|
||||
import { createDocSemanticSearchTool } from './doc-semantic-search';
|
||||
import { createExaCrawlTool } from './exa-crawl';
|
||||
import { createExaSearchTool } from './exa-search';
|
||||
|
||||
export interface CustomAITools extends ToolSet {
|
||||
code_artifact: ReturnType<typeof createCodeArtifactTool>;
|
||||
conversation_summary: ReturnType<typeof createConversationSummaryTool>;
|
||||
doc_edit: ReturnType<typeof createDocEditTool>;
|
||||
doc_semantic_search: ReturnType<typeof createDocSemanticSearchTool>;
|
||||
doc_keyword_search: ReturnType<typeof createDocKeywordSearchTool>;
|
||||
doc_read: ReturnType<typeof createDocReadTool>;
|
||||
doc_compose: ReturnType<typeof createDocComposeTool>;
|
||||
web_search_exa: ReturnType<typeof createExaSearchTool>;
|
||||
web_crawl_exa: ReturnType<typeof createExaCrawlTool>;
|
||||
}
|
||||
|
||||
export * from './code-artifact';
|
||||
export * from './conversation-summary';
|
||||
export * from './doc-compose';
|
||||
export * from './doc-edit';
|
||||
export * from './doc-keyword-search';
|
||||
|
||||
@@ -171,7 +171,7 @@ export class CopilotTranscriptionService {
|
||||
);
|
||||
|
||||
if (!provider) {
|
||||
throw new NoCopilotProviderAvailable();
|
||||
throw new NoCopilotProviderAvailable({ modelId });
|
||||
}
|
||||
|
||||
return provider;
|
||||
|
||||
@@ -140,10 +140,13 @@ export class ElasticsearchProvider extends SearchProvider {
|
||||
const result = await this.request(
|
||||
'POST',
|
||||
url.toString(),
|
||||
JSON.stringify({ query })
|
||||
JSON.stringify({ query }),
|
||||
'application/json',
|
||||
// ignore 409 error: version_conflict_engine_exception, version conflict, required seqNo [255898790], primary term [3]. current document has seqNo [256133002] and primary term [3]
|
||||
[409]
|
||||
);
|
||||
this.logger.debug(
|
||||
`deleted by query ${table} ${JSON.stringify(query)} in ${Date.now() - start}ms, result: ${JSON.stringify(result)}`
|
||||
`deleted by query ${table} ${JSON.stringify(query)} in ${Date.now() - start}ms, result: ${JSON.stringify(result).substring(0, 500)}`
|
||||
);
|
||||
}
|
||||
|
||||
@@ -264,7 +267,8 @@ export class ElasticsearchProvider extends SearchProvider {
|
||||
method: 'POST' | 'PUT',
|
||||
url: string,
|
||||
body: string,
|
||||
contentType = 'application/json'
|
||||
contentType = 'application/json',
|
||||
ignoreErrorStatus?: number[]
|
||||
) {
|
||||
const headers = {
|
||||
'Content-Type': contentType,
|
||||
@@ -280,6 +284,10 @@ export class ElasticsearchProvider extends SearchProvider {
|
||||
headers,
|
||||
});
|
||||
const data = await response.json();
|
||||
if (ignoreErrorStatus?.includes(response.status)) {
|
||||
return data;
|
||||
}
|
||||
|
||||
// handle error, status >= 400
|
||||
// {
|
||||
// "error": {
|
||||
|
||||
@@ -291,6 +291,11 @@ type CopilotFailedToAddWorkspaceFileEmbeddingDataType {
|
||||
message: String!
|
||||
}
|
||||
|
||||
type CopilotFailedToGenerateEmbeddingDataType {
|
||||
message: String!
|
||||
provider: String!
|
||||
}
|
||||
|
||||
type CopilotFailedToMatchContextDataType {
|
||||
content: String!
|
||||
contextId: String!
|
||||
@@ -595,6 +600,7 @@ type DocType {
|
||||
mode: PublicDocMode!
|
||||
permissions: DocPermissions!
|
||||
public: Boolean!
|
||||
summary: String
|
||||
title: String
|
||||
updatedAt: DateTime
|
||||
workspaceId: String!
|
||||
@@ -615,7 +621,7 @@ type EditorType {
|
||||
name: String!
|
||||
}
|
||||
|
||||
union ErrorDataUnion = AlreadyInSpaceDataType | BlobNotFoundDataType | CopilotContextFileNotSupportedDataType | CopilotDocNotFoundDataType | CopilotFailedToAddWorkspaceFileEmbeddingDataType | CopilotFailedToMatchContextDataType | CopilotFailedToMatchGlobalContextDataType | CopilotFailedToModifyContextDataType | CopilotInvalidContextDataType | CopilotMessageNotFoundDataType | CopilotPromptNotFoundDataType | CopilotProviderNotSupportedDataType | CopilotProviderSideErrorDataType | DocActionDeniedDataType | DocHistoryNotFoundDataType | DocNotFoundDataType | DocUpdateBlockedDataType | ExpectToGrantDocUserRolesDataType | ExpectToRevokeDocUserRolesDataType | ExpectToUpdateDocUserRoleDataType | GraphqlBadRequestDataType | HttpRequestErrorDataType | InvalidAppConfigDataType | InvalidAppConfigInputDataType | InvalidEmailDataType | InvalidHistoryTimestampDataType | InvalidIndexerInputDataType | InvalidLicenseToActivateDataType | InvalidLicenseUpdateParamsDataType | InvalidOauthCallbackCodeDataType | InvalidOauthResponseDataType | InvalidPasswordLengthDataType | InvalidRuntimeConfigTypeDataType | InvalidSearchProviderRequestDataType | MemberNotFoundInSpaceDataType | MentionUserDocAccessDeniedDataType | MissingOauthQueryParameterDataType | NoMoreSeatDataType | NotInSpaceDataType | QueryTooLongDataType | RuntimeConfigNotFoundDataType | SameSubscriptionRecurringDataType | SpaceAccessDeniedDataType | SpaceNotFoundDataType | SpaceOwnerNotFoundDataType | SpaceShouldHaveOnlyOneOwnerDataType | SubscriptionAlreadyExistsDataType | SubscriptionNotExistsDataType | SubscriptionPlanNotFoundDataType | UnknownOauthProviderDataType | UnsupportedClientVersionDataType | UnsupportedSubscriptionPlanDataType | ValidationErrorDataType | VersionRejectedDataType | WorkspacePermissionNotFoundDataType | WrongSignInCredentialsDataType
|
||||
union ErrorDataUnion = AlreadyInSpaceDataType | BlobNotFoundDataType | CopilotContextFileNotSupportedDataType | CopilotDocNotFoundDataType | CopilotFailedToAddWorkspaceFileEmbeddingDataType | CopilotFailedToGenerateEmbeddingDataType | CopilotFailedToMatchContextDataType | CopilotFailedToMatchGlobalContextDataType | CopilotFailedToModifyContextDataType | CopilotInvalidContextDataType | CopilotMessageNotFoundDataType | CopilotPromptNotFoundDataType | CopilotProviderNotSupportedDataType | CopilotProviderSideErrorDataType | DocActionDeniedDataType | DocHistoryNotFoundDataType | DocNotFoundDataType | DocUpdateBlockedDataType | ExpectToGrantDocUserRolesDataType | ExpectToRevokeDocUserRolesDataType | ExpectToUpdateDocUserRoleDataType | GraphqlBadRequestDataType | HttpRequestErrorDataType | InvalidAppConfigDataType | InvalidAppConfigInputDataType | InvalidEmailDataType | InvalidHistoryTimestampDataType | InvalidIndexerInputDataType | InvalidLicenseToActivateDataType | InvalidLicenseUpdateParamsDataType | InvalidOauthCallbackCodeDataType | InvalidOauthResponseDataType | InvalidPasswordLengthDataType | InvalidRuntimeConfigTypeDataType | InvalidSearchProviderRequestDataType | MemberNotFoundInSpaceDataType | MentionUserDocAccessDeniedDataType | MissingOauthQueryParameterDataType | NoCopilotProviderAvailableDataType | NoMoreSeatDataType | NotInSpaceDataType | QueryTooLongDataType | RuntimeConfigNotFoundDataType | SameSubscriptionRecurringDataType | SpaceAccessDeniedDataType | SpaceNotFoundDataType | SpaceOwnerNotFoundDataType | SpaceShouldHaveOnlyOneOwnerDataType | SubscriptionAlreadyExistsDataType | SubscriptionNotExistsDataType | SubscriptionPlanNotFoundDataType | UnknownOauthProviderDataType | UnsupportedClientVersionDataType | UnsupportedSubscriptionPlanDataType | ValidationErrorDataType | VersionRejectedDataType | WorkspacePermissionNotFoundDataType | WrongSignInCredentialsDataType
|
||||
|
||||
enum ErrorNames {
|
||||
ACCESS_DENIED
|
||||
@@ -644,6 +650,7 @@ enum ErrorNames {
|
||||
COPILOT_EMBEDDING_UNAVAILABLE
|
||||
COPILOT_FAILED_TO_ADD_WORKSPACE_FILE_EMBEDDING
|
||||
COPILOT_FAILED_TO_CREATE_MESSAGE
|
||||
COPILOT_FAILED_TO_GENERATE_EMBEDDING
|
||||
COPILOT_FAILED_TO_GENERATE_TEXT
|
||||
COPILOT_FAILED_TO_MATCH_CONTEXT
|
||||
COPILOT_FAILED_TO_MATCH_GLOBAL_CONTEXT
|
||||
@@ -1335,6 +1342,10 @@ type Mutation {
|
||||
verifyEmail(token: String!): Boolean!
|
||||
}
|
||||
|
||||
type NoCopilotProviderAvailableDataType {
|
||||
modelId: String!
|
||||
}
|
||||
|
||||
type NoMoreSeatDataType {
|
||||
spaceId: String!
|
||||
}
|
||||
@@ -1507,6 +1518,9 @@ type PublicUserType {
|
||||
type Query {
|
||||
"""get the whole app configuration"""
|
||||
appConfig: JSONObject!
|
||||
|
||||
"""Apply updates to a doc using LLM and return the merged markdown."""
|
||||
applyDocUpdates(docId: String!, op: String!, updates: String!, workspaceId: String!): String!
|
||||
collectAllBlobSizes: WorkspaceBlobSizes! @deprecated(reason: "use `user.quotaUsage` instead")
|
||||
|
||||
"""Get current user"""
|
||||
|
||||
@@ -82,6 +82,10 @@ export type RequestOptions<Q extends GraphQLQuery> = QueryVariablesOption<Q> & {
|
||||
* @default 15000
|
||||
*/
|
||||
timeout?: number;
|
||||
/**
|
||||
* Abort signal
|
||||
*/
|
||||
signal?: AbortSignal;
|
||||
};
|
||||
|
||||
export type QueryOptions<Q extends GraphQLQuery> = RequestOptions<Q> & {
|
||||
@@ -207,6 +211,7 @@ export const gqlFetcherFactory = (
|
||||
headers,
|
||||
body: isFormData ? body : JSON.stringify(body),
|
||||
timeout: options.timeout,
|
||||
signal: options.signal,
|
||||
})
|
||||
).then(async res => {
|
||||
if (res.headers.get('content-type')?.startsWith('application/json')) {
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
query applyDocUpdates($workspaceId: String!, $docId: String!, $op: String!, $updates: String!) {
|
||||
applyDocUpdates(workspaceId: $workspaceId, docId: $docId, op: $op, updates: $updates)
|
||||
}
|
||||
@@ -3,15 +3,17 @@
|
||||
query getCopilotRecentSessions(
|
||||
$workspaceId: String!
|
||||
$limit: Int = 10
|
||||
$offset: Int = 0
|
||||
) {
|
||||
currentUser {
|
||||
copilot(workspaceId: $workspaceId) {
|
||||
chats(
|
||||
pagination: { first: $limit }
|
||||
pagination: { first: $limit, offset: $offset }
|
||||
options: {
|
||||
action: false
|
||||
fork: false
|
||||
sessionOrder: desc
|
||||
withMessages: true
|
||||
withMessages: false
|
||||
}
|
||||
) {
|
||||
...PaginatedCopilotChats
|
||||
|
||||
@@ -5,6 +5,8 @@ query getWorkspacePageById($workspaceId: String!, $pageId: String!) {
|
||||
mode
|
||||
defaultRole
|
||||
public
|
||||
title
|
||||
summary
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -555,6 +555,19 @@ export const uploadCommentAttachmentMutation = {
|
||||
file: true,
|
||||
};
|
||||
|
||||
export const applyDocUpdatesQuery = {
|
||||
id: 'applyDocUpdatesQuery' as const,
|
||||
op: 'applyDocUpdates',
|
||||
query: `query applyDocUpdates($workspaceId: String!, $docId: String!, $op: String!, $updates: String!) {
|
||||
applyDocUpdates(
|
||||
workspaceId: $workspaceId
|
||||
docId: $docId
|
||||
op: $op
|
||||
updates: $updates
|
||||
)
|
||||
}`,
|
||||
};
|
||||
|
||||
export const addContextCategoryMutation = {
|
||||
id: 'addContextCategoryMutation' as const,
|
||||
op: 'addContextCategory',
|
||||
@@ -1068,12 +1081,12 @@ ${paginatedCopilotChatsFragment}`,
|
||||
export const getCopilotRecentSessionsQuery = {
|
||||
id: 'getCopilotRecentSessionsQuery' as const,
|
||||
op: 'getCopilotRecentSessions',
|
||||
query: `query getCopilotRecentSessions($workspaceId: String!, $limit: Int = 10) {
|
||||
query: `query getCopilotRecentSessions($workspaceId: String!, $limit: Int = 10, $offset: Int = 0) {
|
||||
currentUser {
|
||||
copilot(workspaceId: $workspaceId) {
|
||||
chats(
|
||||
pagination: {first: $limit}
|
||||
options: {fork: false, sessionOrder: desc, withMessages: true}
|
||||
pagination: {first: $limit, offset: $offset}
|
||||
options: {action: false, fork: false, sessionOrder: desc, withMessages: false}
|
||||
) {
|
||||
...PaginatedCopilotChats
|
||||
}
|
||||
@@ -1584,6 +1597,8 @@ export const getWorkspacePageByIdQuery = {
|
||||
mode
|
||||
defaultRole
|
||||
public
|
||||
title
|
||||
summary
|
||||
}
|
||||
}
|
||||
}`,
|
||||
|
||||
@@ -375,6 +375,12 @@ export interface CopilotFailedToAddWorkspaceFileEmbeddingDataType {
|
||||
message: Scalars['String']['output'];
|
||||
}
|
||||
|
||||
export interface CopilotFailedToGenerateEmbeddingDataType {
|
||||
__typename?: 'CopilotFailedToGenerateEmbeddingDataType';
|
||||
message: Scalars['String']['output'];
|
||||
provider: Scalars['String']['output'];
|
||||
}
|
||||
|
||||
export interface CopilotFailedToMatchContextDataType {
|
||||
__typename?: 'CopilotFailedToMatchContextDataType';
|
||||
content: Scalars['String']['output'];
|
||||
@@ -703,6 +709,7 @@ export interface DocType {
|
||||
mode: PublicDocMode;
|
||||
permissions: DocPermissions;
|
||||
public: Scalars['Boolean']['output'];
|
||||
summary: Maybe<Scalars['String']['output']>;
|
||||
title: Maybe<Scalars['String']['output']>;
|
||||
updatedAt: Maybe<Scalars['DateTime']['output']>;
|
||||
workspaceId: Scalars['String']['output'];
|
||||
@@ -736,6 +743,7 @@ export type ErrorDataUnion =
|
||||
| CopilotContextFileNotSupportedDataType
|
||||
| CopilotDocNotFoundDataType
|
||||
| CopilotFailedToAddWorkspaceFileEmbeddingDataType
|
||||
| CopilotFailedToGenerateEmbeddingDataType
|
||||
| CopilotFailedToMatchContextDataType
|
||||
| CopilotFailedToMatchGlobalContextDataType
|
||||
| CopilotFailedToModifyContextDataType
|
||||
@@ -768,6 +776,7 @@ export type ErrorDataUnion =
|
||||
| MemberNotFoundInSpaceDataType
|
||||
| MentionUserDocAccessDeniedDataType
|
||||
| MissingOauthQueryParameterDataType
|
||||
| NoCopilotProviderAvailableDataType
|
||||
| NoMoreSeatDataType
|
||||
| NotInSpaceDataType
|
||||
| QueryTooLongDataType
|
||||
@@ -815,6 +824,7 @@ export enum ErrorNames {
|
||||
COPILOT_EMBEDDING_UNAVAILABLE = 'COPILOT_EMBEDDING_UNAVAILABLE',
|
||||
COPILOT_FAILED_TO_ADD_WORKSPACE_FILE_EMBEDDING = 'COPILOT_FAILED_TO_ADD_WORKSPACE_FILE_EMBEDDING',
|
||||
COPILOT_FAILED_TO_CREATE_MESSAGE = 'COPILOT_FAILED_TO_CREATE_MESSAGE',
|
||||
COPILOT_FAILED_TO_GENERATE_EMBEDDING = 'COPILOT_FAILED_TO_GENERATE_EMBEDDING',
|
||||
COPILOT_FAILED_TO_GENERATE_TEXT = 'COPILOT_FAILED_TO_GENERATE_TEXT',
|
||||
COPILOT_FAILED_TO_MATCH_CONTEXT = 'COPILOT_FAILED_TO_MATCH_CONTEXT',
|
||||
COPILOT_FAILED_TO_MATCH_GLOBAL_CONTEXT = 'COPILOT_FAILED_TO_MATCH_GLOBAL_CONTEXT',
|
||||
@@ -1880,6 +1890,11 @@ export interface MutationVerifyEmailArgs {
|
||||
token: Scalars['String']['input'];
|
||||
}
|
||||
|
||||
export interface NoCopilotProviderAvailableDataType {
|
||||
__typename?: 'NoCopilotProviderAvailableDataType';
|
||||
modelId: Scalars['String']['output'];
|
||||
}
|
||||
|
||||
export interface NoMoreSeatDataType {
|
||||
__typename?: 'NoMoreSeatDataType';
|
||||
spaceId: Scalars['String']['output'];
|
||||
@@ -2058,6 +2073,8 @@ export interface Query {
|
||||
__typename?: 'Query';
|
||||
/** get the whole app configuration */
|
||||
appConfig: Scalars['JSONObject']['output'];
|
||||
/** Apply updates to a doc using LLM and return the merged markdown. */
|
||||
applyDocUpdates: Scalars['String']['output'];
|
||||
/** @deprecated use `user.quotaUsage` instead */
|
||||
collectAllBlobSizes: WorkspaceBlobSizes;
|
||||
/** Get current user */
|
||||
@@ -2105,6 +2122,13 @@ export interface Query {
|
||||
workspaces: Array<WorkspaceType>;
|
||||
}
|
||||
|
||||
export interface QueryApplyDocUpdatesArgs {
|
||||
docId: Scalars['String']['input'];
|
||||
op: Scalars['String']['input'];
|
||||
updates: Scalars['String']['input'];
|
||||
workspaceId: Scalars['String']['input'];
|
||||
}
|
||||
|
||||
export interface QueryErrorArgs {
|
||||
name: ErrorNames;
|
||||
}
|
||||
@@ -3494,6 +3518,18 @@ export type UploadCommentAttachmentMutation = {
|
||||
uploadCommentAttachment: string;
|
||||
};
|
||||
|
||||
export type ApplyDocUpdatesQueryVariables = Exact<{
|
||||
workspaceId: Scalars['String']['input'];
|
||||
docId: Scalars['String']['input'];
|
||||
op: Scalars['String']['input'];
|
||||
updates: Scalars['String']['input'];
|
||||
}>;
|
||||
|
||||
export type ApplyDocUpdatesQuery = {
|
||||
__typename?: 'Query';
|
||||
applyDocUpdates: string;
|
||||
};
|
||||
|
||||
export type AddContextCategoryMutationVariables = Exact<{
|
||||
options: AddContextCategoryInput;
|
||||
}>;
|
||||
@@ -4350,6 +4386,7 @@ export type GetCopilotSessionQuery = {
|
||||
export type GetCopilotRecentSessionsQueryVariables = Exact<{
|
||||
workspaceId: Scalars['String']['input'];
|
||||
limit?: InputMaybe<Scalars['Int']['input']>;
|
||||
offset?: InputMaybe<Scalars['Int']['input']>;
|
||||
}>;
|
||||
|
||||
export type GetCopilotRecentSessionsQuery = {
|
||||
@@ -5147,6 +5184,8 @@ export type GetWorkspacePageByIdQuery = {
|
||||
mode: PublicDocMode;
|
||||
defaultRole: DocRole;
|
||||
public: boolean;
|
||||
title: string | null;
|
||||
summary: string | null;
|
||||
};
|
||||
};
|
||||
};
|
||||
@@ -6130,6 +6169,11 @@ export type Queries =
|
||||
variables: ListCommentsQueryVariables;
|
||||
response: ListCommentsQuery;
|
||||
}
|
||||
| {
|
||||
name: 'applyDocUpdatesQuery';
|
||||
variables: ApplyDocUpdatesQueryVariables;
|
||||
response: ApplyDocUpdatesQuery;
|
||||
}
|
||||
| {
|
||||
name: 'listContextObjectQuery';
|
||||
variables: ListContextObjectQueryVariables;
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
package app.affine.pro.ai.chat
|
||||
|
||||
import com.affine.pro.graphql.GetCopilotHistoriesQuery
|
||||
import com.affine.pro.graphql.fragment.CopilotChatHistory
|
||||
import com.affine.pro.graphql.fragment.CopilotChatMessage
|
||||
import kotlinx.datetime.Clock
|
||||
import kotlinx.datetime.Instant
|
||||
|
||||
@@ -51,11 +53,11 @@ data class ChatMessage(
|
||||
createAt = Clock.System.now(),
|
||||
)
|
||||
|
||||
fun from(message: GetCopilotHistoriesQuery.Message) = ChatMessage(
|
||||
fun from(message: CopilotChatMessage) = ChatMessage(
|
||||
id = message.id,
|
||||
role = Role.fromValue(message.role),
|
||||
content = message.content,
|
||||
createAt = message.createdAt,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,8 @@ import com.affine.pro.graphql.GetCopilotHistoryIdsQuery
|
||||
import com.affine.pro.graphql.GetCopilotSessionsQuery
|
||||
import com.affine.pro.graphql.type.CreateChatMessageInput
|
||||
import com.affine.pro.graphql.type.CreateChatSessionInput
|
||||
import com.affine.pro.graphql.type.QueryChatSessionsInput
|
||||
import com.affine.pro.graphql.type.PaginationInput
|
||||
import com.affine.pro.graphql.type.QueryChatHistoriesInput
|
||||
import com.apollographql.apollo.ApolloClient
|
||||
import com.apollographql.apollo.api.Mutation
|
||||
import com.apollographql.apollo.api.Optional
|
||||
@@ -29,12 +30,15 @@ class GraphQLService @Inject constructor() {
|
||||
GetCopilotSessionsQuery(
|
||||
workspaceId = workspaceId,
|
||||
docId = Optional.present(docId),
|
||||
options = Optional.present(QueryChatSessionsInput(action = Optional.present(false)))
|
||||
pagination = PaginationInput(
|
||||
first = Optional.present(100)
|
||||
),
|
||||
options = Optional.present(QueryChatHistoriesInput(action = Optional.present(false)))
|
||||
)
|
||||
).mapCatching { data ->
|
||||
data.currentUser?.copilot?.sessions?.find {
|
||||
data.currentUser?.copilot?.chats?.paginatedCopilotChats?.edges?.map { item -> item.node.copilotChatHistory }?.find {
|
||||
it.parentSessionId == null
|
||||
}?.id ?: error(ERROR_NULL_SESSION_ID)
|
||||
}?.sessionId ?: error(ERROR_NULL_SESSION_ID)
|
||||
}
|
||||
|
||||
suspend fun createCopilotSession(
|
||||
@@ -60,12 +64,15 @@ class GraphQLService @Inject constructor() {
|
||||
) = query(
|
||||
GetCopilotHistoriesQuery(
|
||||
workspaceId = workspaceId,
|
||||
pagination = PaginationInput(
|
||||
first = Optional.present(100)
|
||||
),
|
||||
docId = Optional.present(docId),
|
||||
)
|
||||
).mapCatching { data ->
|
||||
data.currentUser?.copilot?.histories?.firstOrNull { history ->
|
||||
history.sessionId == sessionId
|
||||
}?.messages ?: emptyList()
|
||||
data.currentUser?.copilot?.chats?.paginatedCopilotChats?.edges?.map { item -> item.node.copilotChatHistory }?.firstOrNull { history ->
|
||||
history.sessionId == sessionId
|
||||
}?.messages?.map { msg -> msg.copilotChatMessage } ?: emptyList()
|
||||
}
|
||||
|
||||
suspend fun getCopilotHistoryIds(
|
||||
@@ -76,9 +83,12 @@ class GraphQLService @Inject constructor() {
|
||||
GetCopilotHistoryIdsQuery(
|
||||
workspaceId = workspaceId,
|
||||
docId = Optional.present(docId),
|
||||
pagination = PaginationInput(
|
||||
first = Optional.present(100)
|
||||
),
|
||||
)
|
||||
).mapCatching { data ->
|
||||
data.currentUser?.copilot?.histories?.firstOrNull { history ->
|
||||
data.currentUser?.copilot?.chats?.edges?.map { item -> item.node }?.firstOrNull { history ->
|
||||
history.sessionId == sessionId
|
||||
}?.messages ?: emptyList()
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
archiveVersion = 1;
|
||||
classes = {
|
||||
};
|
||||
objectVersion = 77;
|
||||
objectVersion = 56;
|
||||
objects = {
|
||||
|
||||
/* Begin PBXBuildFile section */
|
||||
@@ -90,6 +90,8 @@
|
||||
/* Begin PBXFileSystemSynchronizedRootGroup section */
|
||||
C45499AB2D140B5000E21978 /* NBStore */ = {
|
||||
isa = PBXFileSystemSynchronizedRootGroup;
|
||||
exceptions = (
|
||||
);
|
||||
path = NBStore;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
@@ -337,13 +339,9 @@
|
||||
);
|
||||
inputFileListPaths = (
|
||||
);
|
||||
inputPaths = (
|
||||
);
|
||||
name = "[CP] Embed Pods Frameworks";
|
||||
outputFileListPaths = (
|
||||
);
|
||||
outputPaths = (
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
shellPath = /bin/sh;
|
||||
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-AFFiNE/Pods-AFFiNE-frameworks.sh\"\n";
|
||||
@@ -543,7 +541,7 @@
|
||||
"$(inherited)",
|
||||
"$(PROJECT_DIR)",
|
||||
);
|
||||
MARKETING_VERSION = 0.22.2;
|
||||
MARKETING_VERSION = 0.23.1;
|
||||
OTHER_SWIFT_FLAGS = "$(inherited) \"-D\" \"COCOAPODS\" \"-DDEBUG\"";
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.affine.pro;
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
@@ -579,7 +577,7 @@
|
||||
"$(inherited)",
|
||||
"$(PROJECT_DIR)",
|
||||
);
|
||||
MARKETING_VERSION = 0.22.2;
|
||||
MARKETING_VERSION = 0.23.1;
|
||||
ONLY_ACTIVE_ARCH = NO;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.affine.pro;
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
|
||||
@@ -0,0 +1,79 @@
|
||||
// @generated
|
||||
// This file was automatically generated and should not be edited.
|
||||
|
||||
@_exported import ApolloAPI
|
||||
|
||||
public struct CopilotChatHistory: AffineGraphQL.SelectionSet, Fragment {
|
||||
public static var fragmentDefinition: StaticString {
|
||||
#"fragment CopilotChatHistory on CopilotHistories { __typename sessionId workspaceId docId parentSessionId promptName model optionalModels action pinned title tokens messages { __typename ...CopilotChatMessage } createdAt updatedAt }"#
|
||||
}
|
||||
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.CopilotHistories }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.field("sessionId", String.self),
|
||||
.field("workspaceId", String.self),
|
||||
.field("docId", String?.self),
|
||||
.field("parentSessionId", String?.self),
|
||||
.field("promptName", String.self),
|
||||
.field("model", String.self),
|
||||
.field("optionalModels", [String].self),
|
||||
.field("action", String?.self),
|
||||
.field("pinned", Bool.self),
|
||||
.field("title", String?.self),
|
||||
.field("tokens", Int.self),
|
||||
.field("messages", [Message].self),
|
||||
.field("createdAt", AffineGraphQL.DateTime.self),
|
||||
.field("updatedAt", AffineGraphQL.DateTime.self),
|
||||
] }
|
||||
|
||||
public var sessionId: String { __data["sessionId"] }
|
||||
public var workspaceId: String { __data["workspaceId"] }
|
||||
public var docId: String? { __data["docId"] }
|
||||
public var parentSessionId: String? { __data["parentSessionId"] }
|
||||
public var promptName: String { __data["promptName"] }
|
||||
public var model: String { __data["model"] }
|
||||
public var optionalModels: [String] { __data["optionalModels"] }
|
||||
/// An mark identifying which view to use to display the session
|
||||
public var action: String? { __data["action"] }
|
||||
public var pinned: Bool { __data["pinned"] }
|
||||
public var title: String? { __data["title"] }
|
||||
/// The number of tokens used in the session
|
||||
public var tokens: Int { __data["tokens"] }
|
||||
public var messages: [Message] { __data["messages"] }
|
||||
public var createdAt: AffineGraphQL.DateTime { __data["createdAt"] }
|
||||
public var updatedAt: AffineGraphQL.DateTime { __data["updatedAt"] }
|
||||
|
||||
/// Message
|
||||
///
|
||||
/// Parent Type: `ChatMessage`
|
||||
public struct Message: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.ChatMessage }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.fragment(CopilotChatMessage.self),
|
||||
] }
|
||||
|
||||
public var id: AffineGraphQL.ID? { __data["id"] }
|
||||
public var role: String { __data["role"] }
|
||||
public var content: String { __data["content"] }
|
||||
public var attachments: [String]? { __data["attachments"] }
|
||||
public var streamObjects: [StreamObject]? { __data["streamObjects"] }
|
||||
public var createdAt: AffineGraphQL.DateTime { __data["createdAt"] }
|
||||
|
||||
public struct Fragments: FragmentContainer {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public var copilotChatMessage: CopilotChatMessage { _toFragment() }
|
||||
}
|
||||
|
||||
public typealias StreamObject = CopilotChatMessage.StreamObject
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,57 @@
|
||||
// @generated
|
||||
// This file was automatically generated and should not be edited.
|
||||
|
||||
@_exported import ApolloAPI
|
||||
|
||||
public struct CopilotChatMessage: AffineGraphQL.SelectionSet, Fragment {
|
||||
public static var fragmentDefinition: StaticString {
|
||||
#"fragment CopilotChatMessage on ChatMessage { __typename id role content attachments streamObjects { __typename type textDelta toolCallId toolName args result } createdAt }"#
|
||||
}
|
||||
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.ChatMessage }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.field("id", AffineGraphQL.ID?.self),
|
||||
.field("role", String.self),
|
||||
.field("content", String.self),
|
||||
.field("attachments", [String]?.self),
|
||||
.field("streamObjects", [StreamObject]?.self),
|
||||
.field("createdAt", AffineGraphQL.DateTime.self),
|
||||
] }
|
||||
|
||||
public var id: AffineGraphQL.ID? { __data["id"] }
|
||||
public var role: String { __data["role"] }
|
||||
public var content: String { __data["content"] }
|
||||
public var attachments: [String]? { __data["attachments"] }
|
||||
public var streamObjects: [StreamObject]? { __data["streamObjects"] }
|
||||
public var createdAt: AffineGraphQL.DateTime { __data["createdAt"] }
|
||||
|
||||
/// StreamObject
|
||||
///
|
||||
/// Parent Type: `StreamObject`
|
||||
public struct StreamObject: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.StreamObject }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.field("type", String.self),
|
||||
.field("textDelta", String?.self),
|
||||
.field("toolCallId", String?.self),
|
||||
.field("toolName", String?.self),
|
||||
.field("args", AffineGraphQL.JSON?.self),
|
||||
.field("result", AffineGraphQL.JSON?.self),
|
||||
] }
|
||||
|
||||
public var type: String { __data["type"] }
|
||||
public var textDelta: String? { __data["textDelta"] }
|
||||
public var toolCallId: String? { __data["toolCallId"] }
|
||||
public var toolName: String? { __data["toolName"] }
|
||||
public var args: AffineGraphQL.JSON? { __data["args"] }
|
||||
public var result: AffineGraphQL.JSON? { __data["result"] }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,103 @@
|
||||
// @generated
|
||||
// This file was automatically generated and should not be edited.
|
||||
|
||||
@_exported import ApolloAPI
|
||||
|
||||
public struct PaginatedCopilotChats: AffineGraphQL.SelectionSet, Fragment {
|
||||
public static var fragmentDefinition: StaticString {
|
||||
#"fragment PaginatedCopilotChats on PaginatedCopilotHistoriesType { __typename pageInfo { __typename hasNextPage hasPreviousPage startCursor endCursor } edges { __typename cursor node { __typename ...CopilotChatHistory } } }"#
|
||||
}
|
||||
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.PaginatedCopilotHistoriesType }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.field("pageInfo", PageInfo.self),
|
||||
.field("edges", [Edge].self),
|
||||
] }
|
||||
|
||||
public var pageInfo: PageInfo { __data["pageInfo"] }
|
||||
public var edges: [Edge] { __data["edges"] }
|
||||
|
||||
/// PageInfo
|
||||
///
|
||||
/// Parent Type: `PageInfo`
|
||||
public struct PageInfo: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.PageInfo }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.field("hasNextPage", Bool.self),
|
||||
.field("hasPreviousPage", Bool.self),
|
||||
.field("startCursor", String?.self),
|
||||
.field("endCursor", String?.self),
|
||||
] }
|
||||
|
||||
public var hasNextPage: Bool { __data["hasNextPage"] }
|
||||
public var hasPreviousPage: Bool { __data["hasPreviousPage"] }
|
||||
public var startCursor: String? { __data["startCursor"] }
|
||||
public var endCursor: String? { __data["endCursor"] }
|
||||
}
|
||||
|
||||
/// Edge
|
||||
///
|
||||
/// Parent Type: `CopilotHistoriesTypeEdge`
|
||||
public struct Edge: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.CopilotHistoriesTypeEdge }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.field("cursor", String.self),
|
||||
.field("node", Node.self),
|
||||
] }
|
||||
|
||||
public var cursor: String { __data["cursor"] }
|
||||
public var node: Node { __data["node"] }
|
||||
|
||||
/// Edge.Node
|
||||
///
|
||||
/// Parent Type: `CopilotHistories`
|
||||
public struct Node: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.CopilotHistories }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.fragment(CopilotChatHistory.self),
|
||||
] }
|
||||
|
||||
public var sessionId: String { __data["sessionId"] }
|
||||
public var workspaceId: String { __data["workspaceId"] }
|
||||
public var docId: String? { __data["docId"] }
|
||||
public var parentSessionId: String? { __data["parentSessionId"] }
|
||||
public var promptName: String { __data["promptName"] }
|
||||
public var model: String { __data["model"] }
|
||||
public var optionalModels: [String] { __data["optionalModels"] }
|
||||
/// An mark identifying which view to use to display the session
|
||||
public var action: String? { __data["action"] }
|
||||
public var pinned: Bool { __data["pinned"] }
|
||||
public var title: String? { __data["title"] }
|
||||
/// The number of tokens used in the session
|
||||
public var tokens: Int { __data["tokens"] }
|
||||
public var messages: [Message] { __data["messages"] }
|
||||
public var createdAt: AffineGraphQL.DateTime { __data["createdAt"] }
|
||||
public var updatedAt: AffineGraphQL.DateTime { __data["updatedAt"] }
|
||||
|
||||
public struct Fragments: FragmentContainer {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public var copilotChatHistory: CopilotChatHistory { _toFragment() }
|
||||
}
|
||||
|
||||
public typealias Message = CopilotChatHistory.Message
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,136 @@
|
||||
// @generated
|
||||
// This file was automatically generated and should not be edited.
|
||||
|
||||
@_exported import ApolloAPI
|
||||
|
||||
public class CreateCommentMutation: GraphQLMutation {
|
||||
public static let operationName: String = "createComment"
|
||||
public static let operationDocument: ApolloAPI.OperationDocument = .init(
|
||||
definition: .init(
|
||||
#"mutation createComment($input: CommentCreateInput!) { createComment(input: $input) { __typename id content resolved createdAt updatedAt user { __typename id name avatarUrl } replies { __typename commentId id content createdAt updatedAt user { __typename id name avatarUrl } } } }"#
|
||||
))
|
||||
|
||||
public var input: CommentCreateInput
|
||||
|
||||
public init(input: CommentCreateInput) {
|
||||
self.input = input
|
||||
}
|
||||
|
||||
public var __variables: Variables? { ["input": input] }
|
||||
|
||||
public struct Data: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.Mutation }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("createComment", CreateComment.self, arguments: ["input": .variable("input")]),
|
||||
] }
|
||||
|
||||
public var createComment: CreateComment { __data["createComment"] }
|
||||
|
||||
/// CreateComment
|
||||
///
|
||||
/// Parent Type: `CommentObjectType`
|
||||
public struct CreateComment: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.CommentObjectType }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.field("id", AffineGraphQL.ID.self),
|
||||
.field("content", AffineGraphQL.JSONObject.self),
|
||||
.field("resolved", Bool.self),
|
||||
.field("createdAt", AffineGraphQL.DateTime.self),
|
||||
.field("updatedAt", AffineGraphQL.DateTime.self),
|
||||
.field("user", User.self),
|
||||
.field("replies", [Reply].self),
|
||||
] }
|
||||
|
||||
public var id: AffineGraphQL.ID { __data["id"] }
|
||||
/// The content of the comment
|
||||
public var content: AffineGraphQL.JSONObject { __data["content"] }
|
||||
/// Whether the comment is resolved
|
||||
public var resolved: Bool { __data["resolved"] }
|
||||
/// The created at time of the comment
|
||||
public var createdAt: AffineGraphQL.DateTime { __data["createdAt"] }
|
||||
/// The updated at time of the comment
|
||||
public var updatedAt: AffineGraphQL.DateTime { __data["updatedAt"] }
|
||||
/// The user who created the comment
|
||||
public var user: User { __data["user"] }
|
||||
/// The replies of the comment
|
||||
public var replies: [Reply] { __data["replies"] }
|
||||
|
||||
/// CreateComment.User
|
||||
///
|
||||
/// Parent Type: `PublicUserType`
|
||||
public struct User: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.PublicUserType }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.field("id", String.self),
|
||||
.field("name", String.self),
|
||||
.field("avatarUrl", String?.self),
|
||||
] }
|
||||
|
||||
public var id: String { __data["id"] }
|
||||
public var name: String { __data["name"] }
|
||||
public var avatarUrl: String? { __data["avatarUrl"] }
|
||||
}
|
||||
|
||||
/// CreateComment.Reply
|
||||
///
|
||||
/// Parent Type: `ReplyObjectType`
|
||||
public struct Reply: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.ReplyObjectType }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.field("commentId", AffineGraphQL.ID.self),
|
||||
.field("id", AffineGraphQL.ID.self),
|
||||
.field("content", AffineGraphQL.JSONObject.self),
|
||||
.field("createdAt", AffineGraphQL.DateTime.self),
|
||||
.field("updatedAt", AffineGraphQL.DateTime.self),
|
||||
.field("user", User.self),
|
||||
] }
|
||||
|
||||
public var commentId: AffineGraphQL.ID { __data["commentId"] }
|
||||
public var id: AffineGraphQL.ID { __data["id"] }
|
||||
/// The content of the reply
|
||||
public var content: AffineGraphQL.JSONObject { __data["content"] }
|
||||
/// The created at time of the reply
|
||||
public var createdAt: AffineGraphQL.DateTime { __data["createdAt"] }
|
||||
/// The updated at time of the reply
|
||||
public var updatedAt: AffineGraphQL.DateTime { __data["updatedAt"] }
|
||||
/// The user who created the reply
|
||||
public var user: User { __data["user"] }
|
||||
|
||||
/// CreateComment.Reply.User
|
||||
///
|
||||
/// Parent Type: `PublicUserType`
|
||||
public struct User: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.PublicUserType }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.field("id", String.self),
|
||||
.field("name", String.self),
|
||||
.field("avatarUrl", String?.self),
|
||||
] }
|
||||
|
||||
public var id: String { __data["id"] }
|
||||
public var name: String { __data["name"] }
|
||||
public var avatarUrl: String? { __data["avatarUrl"] }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,82 @@
|
||||
// @generated
|
||||
// This file was automatically generated and should not be edited.
|
||||
|
||||
@_exported import ApolloAPI
|
||||
|
||||
public class CreateReplyMutation: GraphQLMutation {
|
||||
public static let operationName: String = "createReply"
|
||||
public static let operationDocument: ApolloAPI.OperationDocument = .init(
|
||||
definition: .init(
|
||||
#"mutation createReply($input: ReplyCreateInput!) { createReply(input: $input) { __typename commentId id content createdAt updatedAt user { __typename id name avatarUrl } } }"#
|
||||
))
|
||||
|
||||
public var input: ReplyCreateInput
|
||||
|
||||
public init(input: ReplyCreateInput) {
|
||||
self.input = input
|
||||
}
|
||||
|
||||
public var __variables: Variables? { ["input": input] }
|
||||
|
||||
public struct Data: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.Mutation }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("createReply", CreateReply.self, arguments: ["input": .variable("input")]),
|
||||
] }
|
||||
|
||||
public var createReply: CreateReply { __data["createReply"] }
|
||||
|
||||
/// CreateReply
|
||||
///
|
||||
/// Parent Type: `ReplyObjectType`
|
||||
public struct CreateReply: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.ReplyObjectType }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.field("commentId", AffineGraphQL.ID.self),
|
||||
.field("id", AffineGraphQL.ID.self),
|
||||
.field("content", AffineGraphQL.JSONObject.self),
|
||||
.field("createdAt", AffineGraphQL.DateTime.self),
|
||||
.field("updatedAt", AffineGraphQL.DateTime.self),
|
||||
.field("user", User.self),
|
||||
] }
|
||||
|
||||
public var commentId: AffineGraphQL.ID { __data["commentId"] }
|
||||
public var id: AffineGraphQL.ID { __data["id"] }
|
||||
/// The content of the reply
|
||||
public var content: AffineGraphQL.JSONObject { __data["content"] }
|
||||
/// The created at time of the reply
|
||||
public var createdAt: AffineGraphQL.DateTime { __data["createdAt"] }
|
||||
/// The updated at time of the reply
|
||||
public var updatedAt: AffineGraphQL.DateTime { __data["updatedAt"] }
|
||||
/// The user who created the reply
|
||||
public var user: User { __data["user"] }
|
||||
|
||||
/// CreateReply.User
|
||||
///
|
||||
/// Parent Type: `PublicUserType`
|
||||
public struct User: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.PublicUserType }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.field("id", String.self),
|
||||
.field("name", String.self),
|
||||
.field("avatarUrl", String?.self),
|
||||
] }
|
||||
|
||||
public var id: String { __data["id"] }
|
||||
public var name: String { __data["name"] }
|
||||
public var avatarUrl: String? { __data["avatarUrl"] }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
// @generated
|
||||
// This file was automatically generated and should not be edited.
|
||||
|
||||
@_exported import ApolloAPI
|
||||
|
||||
public class DeleteCommentMutation: GraphQLMutation {
|
||||
public static let operationName: String = "deleteComment"
|
||||
public static let operationDocument: ApolloAPI.OperationDocument = .init(
|
||||
definition: .init(
|
||||
#"mutation deleteComment($id: String!) { deleteComment(id: $id) }"#
|
||||
))
|
||||
|
||||
public var id: String
|
||||
|
||||
public init(id: String) {
|
||||
self.id = id
|
||||
}
|
||||
|
||||
public var __variables: Variables? { ["id": id] }
|
||||
|
||||
public struct Data: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.Mutation }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("deleteComment", Bool.self, arguments: ["id": .variable("id")]),
|
||||
] }
|
||||
|
||||
/// Delete a comment
|
||||
public var deleteComment: Bool { __data["deleteComment"] }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
// @generated
|
||||
// This file was automatically generated and should not be edited.
|
||||
|
||||
@_exported import ApolloAPI
|
||||
|
||||
public class DeleteReplyMutation: GraphQLMutation {
|
||||
public static let operationName: String = "deleteReply"
|
||||
public static let operationDocument: ApolloAPI.OperationDocument = .init(
|
||||
definition: .init(
|
||||
#"mutation deleteReply($id: String!) { deleteReply(id: $id) }"#
|
||||
))
|
||||
|
||||
public var id: String
|
||||
|
||||
public init(id: String) {
|
||||
self.id = id
|
||||
}
|
||||
|
||||
public var __variables: Variables? { ["id": id] }
|
||||
|
||||
public struct Data: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.Mutation }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("deleteReply", Bool.self, arguments: ["id": .variable("id")]),
|
||||
] }
|
||||
|
||||
/// Delete a reply
|
||||
public var deleteReply: Bool { __data["deleteReply"] }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
// @generated
|
||||
// This file was automatically generated and should not be edited.
|
||||
|
||||
@_exported import ApolloAPI
|
||||
|
||||
public class ReadAllNotificationsMutation: GraphQLMutation {
|
||||
public static let operationName: String = "readAllNotifications"
|
||||
public static let operationDocument: ApolloAPI.OperationDocument = .init(
|
||||
definition: .init(
|
||||
#"mutation readAllNotifications { readAllNotifications }"#
|
||||
))
|
||||
|
||||
public init() {}
|
||||
|
||||
public struct Data: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.Mutation }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("readAllNotifications", Bool.self),
|
||||
] }
|
||||
|
||||
/// mark all notifications as read
|
||||
public var readAllNotifications: Bool { __data["readAllNotifications"] }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
// @generated
|
||||
// This file was automatically generated and should not be edited.
|
||||
|
||||
@_exported import ApolloAPI
|
||||
|
||||
public class ResolveCommentMutation: GraphQLMutation {
|
||||
public static let operationName: String = "resolveComment"
|
||||
public static let operationDocument: ApolloAPI.OperationDocument = .init(
|
||||
definition: .init(
|
||||
#"mutation resolveComment($input: CommentResolveInput!) { resolveComment(input: $input) }"#
|
||||
))
|
||||
|
||||
public var input: CommentResolveInput
|
||||
|
||||
public init(input: CommentResolveInput) {
|
||||
self.input = input
|
||||
}
|
||||
|
||||
public var __variables: Variables? { ["input": input] }
|
||||
|
||||
public struct Data: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.Mutation }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("resolveComment", Bool.self, arguments: ["input": .variable("input")]),
|
||||
] }
|
||||
|
||||
/// Resolve a comment or not
|
||||
public var resolveComment: Bool { __data["resolveComment"] }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
// @generated
|
||||
// This file was automatically generated and should not be edited.
|
||||
|
||||
@_exported import ApolloAPI
|
||||
|
||||
public class UpdateCommentMutation: GraphQLMutation {
|
||||
public static let operationName: String = "updateComment"
|
||||
public static let operationDocument: ApolloAPI.OperationDocument = .init(
|
||||
definition: .init(
|
||||
#"mutation updateComment($input: CommentUpdateInput!) { updateComment(input: $input) }"#
|
||||
))
|
||||
|
||||
public var input: CommentUpdateInput
|
||||
|
||||
public init(input: CommentUpdateInput) {
|
||||
self.input = input
|
||||
}
|
||||
|
||||
public var __variables: Variables? { ["input": input] }
|
||||
|
||||
public struct Data: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.Mutation }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("updateComment", Bool.self, arguments: ["input": .variable("input")]),
|
||||
] }
|
||||
|
||||
/// Update a comment content
|
||||
public var updateComment: Bool { __data["updateComment"] }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
// @generated
|
||||
// This file was automatically generated and should not be edited.
|
||||
|
||||
@_exported import ApolloAPI
|
||||
|
||||
public class UpdateReplyMutation: GraphQLMutation {
|
||||
public static let operationName: String = "updateReply"
|
||||
public static let operationDocument: ApolloAPI.OperationDocument = .init(
|
||||
definition: .init(
|
||||
#"mutation updateReply($input: ReplyUpdateInput!) { updateReply(input: $input) }"#
|
||||
))
|
||||
|
||||
public var input: ReplyUpdateInput
|
||||
|
||||
public init(input: ReplyUpdateInput) {
|
||||
self.input = input
|
||||
}
|
||||
|
||||
public var __variables: Variables? { ["input": input] }
|
||||
|
||||
public struct Data: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.Mutation }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("updateReply", Bool.self, arguments: ["input": .variable("input")]),
|
||||
] }
|
||||
|
||||
/// Update a reply content
|
||||
public var updateReply: Bool { __data["updateReply"] }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,49 @@
|
||||
// @generated
|
||||
// This file was automatically generated and should not be edited.
|
||||
|
||||
@_exported import ApolloAPI
|
||||
|
||||
public class UploadCommentAttachmentMutation: GraphQLMutation {
|
||||
public static let operationName: String = "uploadCommentAttachment"
|
||||
public static let operationDocument: ApolloAPI.OperationDocument = .init(
|
||||
definition: .init(
|
||||
#"mutation uploadCommentAttachment($workspaceId: String!, $docId: String!, $attachment: Upload!) { uploadCommentAttachment( workspaceId: $workspaceId docId: $docId attachment: $attachment ) }"#
|
||||
))
|
||||
|
||||
public var workspaceId: String
|
||||
public var docId: String
|
||||
public var attachment: Upload
|
||||
|
||||
public init(
|
||||
workspaceId: String,
|
||||
docId: String,
|
||||
attachment: Upload
|
||||
) {
|
||||
self.workspaceId = workspaceId
|
||||
self.docId = docId
|
||||
self.attachment = attachment
|
||||
}
|
||||
|
||||
public var __variables: Variables? { [
|
||||
"workspaceId": workspaceId,
|
||||
"docId": docId,
|
||||
"attachment": attachment
|
||||
] }
|
||||
|
||||
public struct Data: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.Mutation }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("uploadCommentAttachment", String.self, arguments: [
|
||||
"workspaceId": .variable("workspaceId"),
|
||||
"docId": .variable("docId"),
|
||||
"attachment": .variable("attachment")
|
||||
]),
|
||||
] }
|
||||
|
||||
/// Upload a comment attachment and return the access url
|
||||
public var uploadCommentAttachment: String { __data["uploadCommentAttachment"] }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,114 @@
|
||||
// @generated
|
||||
// This file was automatically generated and should not be edited.
|
||||
|
||||
@_exported import ApolloAPI
|
||||
|
||||
public class GetCopilotDocSessionsQuery: GraphQLQuery {
|
||||
public static let operationName: String = "getCopilotDocSessions"
|
||||
public static let operationDocument: ApolloAPI.OperationDocument = .init(
|
||||
definition: .init(
|
||||
#"query getCopilotDocSessions($workspaceId: String!, $docId: String!, $pagination: PaginationInput!, $options: QueryChatHistoriesInput) { currentUser { __typename copilot(workspaceId: $workspaceId) { __typename chats(pagination: $pagination, docId: $docId, options: $options) { __typename ...PaginatedCopilotChats } } } }"#,
|
||||
fragments: [CopilotChatHistory.self, CopilotChatMessage.self, PaginatedCopilotChats.self]
|
||||
))
|
||||
|
||||
public var workspaceId: String
|
||||
public var docId: String
|
||||
public var pagination: PaginationInput
|
||||
public var options: GraphQLNullable<QueryChatHistoriesInput>
|
||||
|
||||
public init(
|
||||
workspaceId: String,
|
||||
docId: String,
|
||||
pagination: PaginationInput,
|
||||
options: GraphQLNullable<QueryChatHistoriesInput>
|
||||
) {
|
||||
self.workspaceId = workspaceId
|
||||
self.docId = docId
|
||||
self.pagination = pagination
|
||||
self.options = options
|
||||
}
|
||||
|
||||
public var __variables: Variables? { [
|
||||
"workspaceId": workspaceId,
|
||||
"docId": docId,
|
||||
"pagination": pagination,
|
||||
"options": options
|
||||
] }
|
||||
|
||||
public struct Data: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.Query }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("currentUser", CurrentUser?.self),
|
||||
] }
|
||||
|
||||
/// Get current user
|
||||
public var currentUser: CurrentUser? { __data["currentUser"] }
|
||||
|
||||
/// CurrentUser
|
||||
///
|
||||
/// Parent Type: `UserType`
|
||||
public struct CurrentUser: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.UserType }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.field("copilot", Copilot.self, arguments: ["workspaceId": .variable("workspaceId")]),
|
||||
] }
|
||||
|
||||
public var copilot: Copilot { __data["copilot"] }
|
||||
|
||||
/// CurrentUser.Copilot
|
||||
///
|
||||
/// Parent Type: `Copilot`
|
||||
public struct Copilot: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.Copilot }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.field("chats", Chats.self, arguments: [
|
||||
"pagination": .variable("pagination"),
|
||||
"docId": .variable("docId"),
|
||||
"options": .variable("options")
|
||||
]),
|
||||
] }
|
||||
|
||||
public var chats: Chats { __data["chats"] }
|
||||
|
||||
/// CurrentUser.Copilot.Chats
|
||||
///
|
||||
/// Parent Type: `PaginatedCopilotHistoriesType`
|
||||
public struct Chats: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.PaginatedCopilotHistoriesType }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.fragment(PaginatedCopilotChats.self),
|
||||
] }
|
||||
|
||||
public var pageInfo: PageInfo { __data["pageInfo"] }
|
||||
public var edges: [Edge] { __data["edges"] }
|
||||
|
||||
public struct Fragments: FragmentContainer {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public var paginatedCopilotChats: PaginatedCopilotChats { _toFragment() }
|
||||
}
|
||||
|
||||
public typealias PageInfo = PaginatedCopilotChats.PageInfo
|
||||
|
||||
public typealias Edge = PaginatedCopilotChats.Edge
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,25 +7,30 @@ public class GetCopilotHistoriesQuery: GraphQLQuery {
|
||||
public static let operationName: String = "getCopilotHistories"
|
||||
public static let operationDocument: ApolloAPI.OperationDocument = .init(
|
||||
definition: .init(
|
||||
#"query getCopilotHistories($workspaceId: String!, $docId: String, $options: QueryChatHistoriesInput) { currentUser { __typename copilot(workspaceId: $workspaceId) { __typename histories(docId: $docId, options: $options) { __typename sessionId pinned tokens action createdAt messages { __typename id role content streamObjects { __typename type textDelta toolCallId toolName args result } attachments createdAt } } } } }"#
|
||||
#"query getCopilotHistories($workspaceId: String!, $pagination: PaginationInput!, $docId: String, $options: QueryChatHistoriesInput) { currentUser { __typename copilot(workspaceId: $workspaceId) { __typename chats(pagination: $pagination, docId: $docId, options: $options) { __typename ...PaginatedCopilotChats } } } }"#,
|
||||
fragments: [CopilotChatHistory.self, CopilotChatMessage.self, PaginatedCopilotChats.self]
|
||||
))
|
||||
|
||||
public var workspaceId: String
|
||||
public var pagination: PaginationInput
|
||||
public var docId: GraphQLNullable<String>
|
||||
public var options: GraphQLNullable<QueryChatHistoriesInput>
|
||||
|
||||
public init(
|
||||
workspaceId: String,
|
||||
pagination: PaginationInput,
|
||||
docId: GraphQLNullable<String>,
|
||||
options: GraphQLNullable<QueryChatHistoriesInput>
|
||||
) {
|
||||
self.workspaceId = workspaceId
|
||||
self.pagination = pagination
|
||||
self.docId = docId
|
||||
self.options = options
|
||||
}
|
||||
|
||||
public var __variables: Variables? { [
|
||||
"workspaceId": workspaceId,
|
||||
"pagination": pagination,
|
||||
"docId": docId,
|
||||
"options": options
|
||||
] }
|
||||
@@ -67,92 +72,41 @@ public class GetCopilotHistoriesQuery: GraphQLQuery {
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.Copilot }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.field("histories", [History].self, arguments: [
|
||||
.field("chats", Chats.self, arguments: [
|
||||
"pagination": .variable("pagination"),
|
||||
"docId": .variable("docId"),
|
||||
"options": .variable("options")
|
||||
]),
|
||||
] }
|
||||
|
||||
public var histories: [History] { __data["histories"] }
|
||||
public var chats: Chats { __data["chats"] }
|
||||
|
||||
/// CurrentUser.Copilot.History
|
||||
/// CurrentUser.Copilot.Chats
|
||||
///
|
||||
/// Parent Type: `CopilotHistories`
|
||||
public struct History: AffineGraphQL.SelectionSet {
|
||||
/// Parent Type: `PaginatedCopilotHistoriesType`
|
||||
public struct Chats: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.CopilotHistories }
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.PaginatedCopilotHistoriesType }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.field("sessionId", String.self),
|
||||
.field("pinned", Bool.self),
|
||||
.field("tokens", Int.self),
|
||||
.field("action", String?.self),
|
||||
.field("createdAt", AffineGraphQL.DateTime.self),
|
||||
.field("messages", [Message].self),
|
||||
.fragment(PaginatedCopilotChats.self),
|
||||
] }
|
||||
|
||||
public var sessionId: String { __data["sessionId"] }
|
||||
public var pinned: Bool { __data["pinned"] }
|
||||
/// The number of tokens used in the session
|
||||
public var tokens: Int { __data["tokens"] }
|
||||
/// An mark identifying which view to use to display the session
|
||||
public var action: String? { __data["action"] }
|
||||
public var createdAt: AffineGraphQL.DateTime { __data["createdAt"] }
|
||||
public var messages: [Message] { __data["messages"] }
|
||||
public var pageInfo: PageInfo { __data["pageInfo"] }
|
||||
public var edges: [Edge] { __data["edges"] }
|
||||
|
||||
/// CurrentUser.Copilot.History.Message
|
||||
///
|
||||
/// Parent Type: `ChatMessage`
|
||||
public struct Message: AffineGraphQL.SelectionSet {
|
||||
public struct Fragments: FragmentContainer {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.ChatMessage }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.field("id", AffineGraphQL.ID?.self),
|
||||
.field("role", String.self),
|
||||
.field("content", String.self),
|
||||
.field("streamObjects", [StreamObject]?.self),
|
||||
.field("attachments", [String]?.self),
|
||||
.field("createdAt", AffineGraphQL.DateTime.self),
|
||||
] }
|
||||
|
||||
public var id: AffineGraphQL.ID? { __data["id"] }
|
||||
public var role: String { __data["role"] }
|
||||
public var content: String { __data["content"] }
|
||||
public var streamObjects: [StreamObject]? { __data["streamObjects"] }
|
||||
public var attachments: [String]? { __data["attachments"] }
|
||||
public var createdAt: AffineGraphQL.DateTime { __data["createdAt"] }
|
||||
|
||||
/// CurrentUser.Copilot.History.Message.StreamObject
|
||||
///
|
||||
/// Parent Type: `StreamObject`
|
||||
public struct StreamObject: AffineGraphQL.SelectionSet {
|
||||
public let __data: DataDict
|
||||
public init(_dataDict: DataDict) { __data = _dataDict }
|
||||
|
||||
public static var __parentType: any ApolloAPI.ParentType { AffineGraphQL.Objects.StreamObject }
|
||||
public static var __selections: [ApolloAPI.Selection] { [
|
||||
.field("__typename", String.self),
|
||||
.field("type", String.self),
|
||||
.field("textDelta", String?.self),
|
||||
.field("toolCallId", String?.self),
|
||||
.field("toolName", String?.self),
|
||||
.field("args", AffineGraphQL.JSON?.self),
|
||||
.field("result", AffineGraphQL.JSON?.self),
|
||||
] }
|
||||
|
||||
public var type: String { __data["type"] }
|
||||
public var textDelta: String? { __data["textDelta"] }
|
||||
public var toolCallId: String? { __data["toolCallId"] }
|
||||
public var toolName: String? { __data["toolName"] }
|
||||
public var args: AffineGraphQL.JSON? { __data["args"] }
|
||||
public var result: AffineGraphQL.JSON? { __data["result"] }
|
||||
}
|
||||
public var paginatedCopilotChats: PaginatedCopilotChats { _toFragment() }
|
||||
}
|
||||
|
||||
public typealias PageInfo = PaginatedCopilotChats.PageInfo
|
||||
|
||||
public typealias Edge = PaginatedCopilotChats.Edge
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user