mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-04 08:38:34 +00:00
Compare commits
46 Commits
v0.24.0-be
...
l-sun/enab
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ac76e5b949 | ||
|
|
0bc1005b96 | ||
|
|
34a3c83d84 | ||
|
|
fd717af3db | ||
|
|
039976ee6d | ||
|
|
e158e11608 | ||
|
|
18faaa38a0 | ||
|
|
e2156ea135 | ||
|
|
795bfb2f95 | ||
|
|
0710da15c6 | ||
|
|
693ae9c834 | ||
|
|
9d38f79395 | ||
|
|
680f3b3006 | ||
|
|
fbf234f9fa | ||
|
|
e9ede5213e | ||
|
|
aea6f81937 | ||
|
|
66c2bf3151 | ||
|
|
aa052096c1 | ||
|
|
c2f3018eb7 | ||
|
|
dd9d8adbf8 | ||
|
|
7e0de251cb | ||
|
|
5c73fc9767 | ||
|
|
a0c22b7d06 | ||
|
|
072557eba1 | ||
|
|
fda7e9008d | ||
|
|
678dc15365 | ||
|
|
ef99c376ec | ||
|
|
65f679c4f0 | ||
|
|
125564b7d2 | ||
|
|
aa20e7ba66 | ||
|
|
01e8458075 | ||
|
|
0d9f6770bf | ||
|
|
5ef81ba74b | ||
|
|
4ffa3b5ccc | ||
|
|
07b9b4fb8d | ||
|
|
f7461dd3d9 | ||
|
|
343c717930 | ||
|
|
bc1bd59f7b | ||
|
|
c7afc880e6 | ||
|
|
3cfb0a43af | ||
|
|
4005f40b16 | ||
|
|
5fd7dfc8aa | ||
|
|
009288dee2 | ||
|
|
52a9c86219 | ||
|
|
af7fefd59a | ||
|
|
94cf32ead2 |
@@ -18,11 +18,19 @@ services:
|
||||
ports:
|
||||
- 6379:6379
|
||||
|
||||
mailhog:
|
||||
image: mailhog/mailhog:latest
|
||||
# https://mailpit.axllent.org/docs/install/docker/
|
||||
mailpit:
|
||||
image: axllent/mailpit:latest
|
||||
ports:
|
||||
- 1025:1025
|
||||
- 8025:8025
|
||||
environment:
|
||||
MP_MAX_MESSAGES: 5000
|
||||
MP_DATABASE: /data/mailpit.db
|
||||
MP_SMTP_AUTH_ACCEPT_ANY: 1
|
||||
MP_SMTP_AUTH_ALLOW_INSECURE: 1
|
||||
volumes:
|
||||
- mailpit_data:/data
|
||||
|
||||
# https://manual.manticoresearch.com/Starting_the_server/Docker
|
||||
manticoresearch:
|
||||
@@ -87,4 +95,5 @@ networks:
|
||||
volumes:
|
||||
postgres_data:
|
||||
manticoresearch_data:
|
||||
mailpit_data:
|
||||
elasticsearch_data:
|
||||
|
||||
@@ -669,7 +669,7 @@
|
||||
},
|
||||
"scenarios": {
|
||||
"type": "object",
|
||||
"description": "Use custom models in scenarios and override default settings.\n@default {\"override_enabled\":false,\"scenarios\":{\"audio_transcribing\":\"gemini-2.5-flash\",\"chat\":\"claude-sonnet-4@20250514\",\"embedding\":\"gemini-embedding-001\",\"image\":\"gpt-image-1\",\"rerank\":\"gpt-4.1\",\"coding\":\"claude-sonnet-4@20250514\",\"complex_text_generation\":\"gpt-4o-2024-08-06\",\"quick_decision_making\":\"gpt-4.1-mini\",\"quick_text_generation\":\"gemini-2.5-flash\",\"polish_and_summarize\":\"gemini-2.5-flash\"}}",
|
||||
"description": "Use custom models in scenarios and override default settings.\n@default {\"override_enabled\":false,\"scenarios\":{\"audio_transcribing\":\"gemini-2.5-flash\",\"chat\":\"claude-sonnet-4@20250514\",\"embedding\":\"gemini-embedding-001\",\"image\":\"gpt-image-1\",\"rerank\":\"gpt-4.1\",\"coding\":\"claude-sonnet-4@20250514\",\"complex_text_generation\":\"gpt-4o-2024-08-06\",\"quick_decision_making\":\"gpt-5-mini\",\"quick_text_generation\":\"gemini-2.5-flash\",\"polish_and_summarize\":\"gemini-2.5-flash\"}}",
|
||||
"default": {
|
||||
"override_enabled": false,
|
||||
"scenarios": {
|
||||
@@ -680,7 +680,7 @@
|
||||
"rerank": "gpt-4.1",
|
||||
"coding": "claude-sonnet-4@20250514",
|
||||
"complex_text_generation": "gpt-4o-2024-08-06",
|
||||
"quick_decision_making": "gpt-4.1-mini",
|
||||
"quick_decision_making": "gpt-5-mini",
|
||||
"quick_text_generation": "gemini-2.5-flash",
|
||||
"polish_and_summarize": "gemini-2.5-flash"
|
||||
}
|
||||
|
||||
4
.github/workflows/release.yml
vendored
4
.github/workflows/release.yml
vendored
@@ -74,7 +74,7 @@ jobs:
|
||||
name: Wait for approval
|
||||
with:
|
||||
secret: ${{ secrets.GITHUB_TOKEN }}
|
||||
approvers: forehalo,fengmk2
|
||||
approvers: forehalo,fengmk2,darkskygit
|
||||
minimum-approvals: 1
|
||||
fail-on-denial: true
|
||||
issue-title: Please confirm to release docker image
|
||||
@@ -84,7 +84,7 @@ jobs:
|
||||
Tag: ghcr.io/toeverything/affine:${{ needs.prepare.outputs.BUILD_TYPE }}
|
||||
|
||||
> comment with "approve", "approved", "lgtm", "yes" to approve
|
||||
> comment with "deny", "deny", "no" to deny
|
||||
> comment with "deny", "denied", "no" to deny
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
**/node_modules
|
||||
.yarn
|
||||
.github/helm
|
||||
.git
|
||||
.vscode
|
||||
.yarnrc.yml
|
||||
.docker
|
||||
|
||||
24
Cargo.lock
generated
24
Cargo.lock
generated
@@ -93,7 +93,7 @@ dependencies = [
|
||||
"symphonia",
|
||||
"thiserror 2.0.12",
|
||||
"uuid",
|
||||
"windows 0.61.1",
|
||||
"windows 0.61.3",
|
||||
"windows-core 0.61.2",
|
||||
]
|
||||
|
||||
@@ -1691,7 +1691,7 @@ dependencies = [
|
||||
"libc",
|
||||
"log",
|
||||
"rustversion",
|
||||
"windows 0.61.1",
|
||||
"windows 0.61.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2284,7 +2284,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "07033963ba89ebaf1584d767badaa2e8fcec21aedea6b8c0346d487d49c28667"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"windows-targets 0.48.5",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4732,9 +4732,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter"
|
||||
version = "0.25.5"
|
||||
version = "0.25.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac5fff5c47490dfdf473b5228039bfacad9d765d9b6939d26bf7cc064c1c7822"
|
||||
checksum = "6d7b8994f367f16e6fa14b5aebbcb350de5d7cbea82dc5b00ae997dd71680dd2"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"regex",
|
||||
@@ -4842,9 +4842,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-scala"
|
||||
version = "0.23.4"
|
||||
version = "0.24.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "efde5e68b4736e9eac17bfa296c6f104a26bffab363b365eb898c40a63c15d2f"
|
||||
checksum = "7516aeb3d1f40ede8e3045b163e86993b3434514dd06c34c0b75e782d9a0b251"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter-language",
|
||||
@@ -5334,7 +5334,7 @@ version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5365,9 +5365,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "windows"
|
||||
version = "0.61.1"
|
||||
version = "0.61.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c5ee8f3d025738cb02bad7868bbb5f8a6327501e870bf51f1b455b0a2454a419"
|
||||
checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893"
|
||||
dependencies = [
|
||||
"windows-collections",
|
||||
"windows-core 0.61.2",
|
||||
@@ -5477,9 +5477,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "windows-link"
|
||||
version = "0.1.1"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38"
|
||||
checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a"
|
||||
|
||||
[[package]]
|
||||
name = "windows-numerics"
|
||||
|
||||
@@ -93,7 +93,7 @@ tree-sitter-javascript = { version = "0.23" }
|
||||
tree-sitter-kotlin-ng = { version = "1.1" }
|
||||
tree-sitter-python = { version = "0.23" }
|
||||
tree-sitter-rust = { version = "0.24" }
|
||||
tree-sitter-scala = { version = "0.23" }
|
||||
tree-sitter-scala = { version = "0.24" }
|
||||
tree-sitter-typescript = { version = "0.23" }
|
||||
uniffi = "0.29"
|
||||
url = { version = "2.5" }
|
||||
|
||||
@@ -372,6 +372,7 @@ export class DatabaseBlockComponent extends CaptionedBlockComponent<DatabaseBloc
|
||||
handleMobileEditing() {
|
||||
if (!IS_MOBILE) return;
|
||||
|
||||
let notifyClosed = true;
|
||||
const handler = () => {
|
||||
if (
|
||||
!this.std
|
||||
@@ -379,7 +380,8 @@ export class DatabaseBlockComponent extends CaptionedBlockComponent<DatabaseBloc
|
||||
.getFlag('enable_mobile_database_editing')
|
||||
) {
|
||||
const notification = this.std.getOptional(NotificationProvider);
|
||||
if (notification) {
|
||||
if (notification && notifyClosed) {
|
||||
notifyClosed = false;
|
||||
notification.notify({
|
||||
title: html`<div
|
||||
style=${styleMap({
|
||||
@@ -390,16 +392,15 @@ export class DatabaseBlockComponent extends CaptionedBlockComponent<DatabaseBloc
|
||||
experimental features, or edit it in desktop mode.
|
||||
</div>`,
|
||||
accent: 'warning',
|
||||
onClose: () => {
|
||||
notifyClosed = true;
|
||||
},
|
||||
});
|
||||
}
|
||||
this.removeEventListener('click', handler);
|
||||
}
|
||||
};
|
||||
|
||||
this.addEventListener('click', handler);
|
||||
this.disposables.add(() => {
|
||||
this.removeEventListener('click', handler);
|
||||
});
|
||||
this.disposables.addFromEvent(this, 'click', handler);
|
||||
}
|
||||
|
||||
private readonly dataViewRootLogic = lazy(
|
||||
|
||||
@@ -24,7 +24,7 @@ import {
|
||||
getPrevContentBlock,
|
||||
matchModels,
|
||||
} from '@blocksuite/affine-shared/utils';
|
||||
import { IS_MOBILE } from '@blocksuite/global/env';
|
||||
import { IS_ANDROID, IS_MOBILE } from '@blocksuite/global/env';
|
||||
import { BlockSelection, type EditorHost } from '@blocksuite/std';
|
||||
import type { BlockModel, Text } from '@blocksuite/store';
|
||||
|
||||
@@ -79,6 +79,28 @@ export function mergeWithPrev(editorHost: EditorHost, model: BlockModel) {
|
||||
index: lengthBeforeJoin,
|
||||
length: 0,
|
||||
}).catch(console.error);
|
||||
|
||||
// due to some IME like Microsoft Swift IME on Android will reset range after join text,
|
||||
// for example:
|
||||
//
|
||||
// $ZERO_WIDTH_FOR_EMPTY_LINE <--- p1
|
||||
// |aaa <--- p2
|
||||
//
|
||||
// after pressing backspace, during beforeinput event, the native range is (p1, 1) -> (p2, 0)
|
||||
// and after browser and IME handle the event, the native range is (p1, 1) -> (p1, 1)
|
||||
//
|
||||
// a|aa <--- p1
|
||||
//
|
||||
// so we need to set range again after join text.
|
||||
if (IS_ANDROID) {
|
||||
setTimeout(() => {
|
||||
asyncSetInlineRange(editorHost.std, prevBlock, {
|
||||
index: lengthBeforeJoin,
|
||||
length: 0,
|
||||
}).catch(console.error);
|
||||
});
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
getBoundWithRotation,
|
||||
intersects,
|
||||
} from '@blocksuite/global/gfx';
|
||||
import type { BlockStdScope } from '@blocksuite/std';
|
||||
import { type BlockStdScope, SurfaceSelection } from '@blocksuite/std';
|
||||
import type {
|
||||
GfxCompatibleInterface,
|
||||
GridManager,
|
||||
@@ -298,7 +298,10 @@ export class DomRenderer {
|
||||
viewportBounds,
|
||||
zoom
|
||||
);
|
||||
Object.assign(domElement.style, geometricStyles);
|
||||
const zIndexStyle = {
|
||||
'z-index': this.layerManager.getZIndex(elementModel),
|
||||
};
|
||||
Object.assign(domElement.style, geometricStyles, zIndexStyle);
|
||||
Object.assign(domElement.style, PLACEHOLDER_RESET_STYLES);
|
||||
|
||||
// Clear classes specific to shapes, if applicable
|
||||
@@ -335,7 +338,10 @@ export class DomRenderer {
|
||||
zoom
|
||||
);
|
||||
const opacityStyle = getOpacity(elementModel);
|
||||
Object.assign(domElement.style, geometricStyles, opacityStyle);
|
||||
const zIndexStyle = {
|
||||
'z-index': this.layerManager.getZIndex(elementModel),
|
||||
};
|
||||
Object.assign(domElement.style, geometricStyles, opacityStyle, zIndexStyle);
|
||||
|
||||
this._renderElement(elementModel, domElement);
|
||||
}
|
||||
@@ -384,6 +390,36 @@ export class DomRenderer {
|
||||
this.refresh();
|
||||
})
|
||||
);
|
||||
|
||||
// Workaround for the group rendering reactive update when selection changed
|
||||
let lastSet = new Set<string>();
|
||||
this._disposables.add(
|
||||
this.std.selection.filter$(SurfaceSelection).subscribe(selections => {
|
||||
const groupRelatedSelection = new Set(
|
||||
selections.flatMap(s =>
|
||||
s.elements.flatMap(e => {
|
||||
const element = surfaceModel.getElementById(e);
|
||||
if (
|
||||
element &&
|
||||
(element.type === 'group' || element.groups.length !== 0)
|
||||
) {
|
||||
return [element.id, ...element.groups.map(g => g.id)];
|
||||
}
|
||||
return [];
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
if (lastSet.symmetricDifference(groupRelatedSelection).size !== 0) {
|
||||
lastSet.union(groupRelatedSelection).forEach(g => {
|
||||
this._markElementDirty(g, UpdateType.ELEMENT_UPDATED);
|
||||
});
|
||||
this.refresh();
|
||||
}
|
||||
|
||||
lastSet = groupRelatedSelection;
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
addOverlay = (overlay: Overlay) => {
|
||||
|
||||
@@ -103,12 +103,12 @@ export class MobileKanbanCell extends SignalWatcher(
|
||||
this.disposables.add(
|
||||
effect(() => {
|
||||
const isEditing = this.isSelectionEditing$.value;
|
||||
if (isEditing) {
|
||||
if (isEditing && !this.isEditing$.peek()) {
|
||||
this.isEditing$.value = true;
|
||||
requestAnimationFrame(() => {
|
||||
this._cell.value?.afterEnterEditingMode();
|
||||
});
|
||||
} else {
|
||||
} else if (!isEditing && this.isEditing$.peek()) {
|
||||
this._cell.value?.beforeExitEditingMode();
|
||||
this.isEditing$.value = false;
|
||||
}
|
||||
|
||||
@@ -105,13 +105,13 @@ export class MobileTableCell extends SignalWatcher(
|
||||
this.disposables.add(
|
||||
effect(() => {
|
||||
const isEditing = this.isSelectionEditing$.value;
|
||||
if (isEditing) {
|
||||
if (isEditing && !this.isEditing$.peek()) {
|
||||
this.isEditing$.value = true;
|
||||
const cell = this._cell.value;
|
||||
requestAnimationFrame(() => {
|
||||
cell?.afterEnterEditingMode();
|
||||
});
|
||||
} else {
|
||||
} else if (!isEditing && this.isEditing$.peek()) {
|
||||
this._cell.value?.beforeExitEditingMode();
|
||||
this.isEditing$.value = false;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { IS_IOS } from '@blocksuite/global/env';
|
||||
import { css } from '@emotion/css';
|
||||
import { cssVarV2 } from '@toeverything/theme/v2';
|
||||
|
||||
@@ -6,12 +5,6 @@ export const mobileTableViewWrapper = css({
|
||||
position: 'relative',
|
||||
width: '100%',
|
||||
paddingBottom: '4px',
|
||||
/**
|
||||
* Disable horizontal scrolling to prevent crashes on iOS Safari
|
||||
* See https://github.com/toeverything/AFFiNE/pull/12203
|
||||
* and https://github.com/toeverything/blocksuite/pull/8784
|
||||
*/
|
||||
overflowX: IS_IOS ? 'hidden' : undefined,
|
||||
overflowY: 'hidden',
|
||||
});
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
export * from './adapter';
|
||||
export * from './brush-tool';
|
||||
export * from './element-renderer';
|
||||
export * from './eraser-tool';
|
||||
export * from './highlighter-tool';
|
||||
export * from './renderer';
|
||||
export * from './toolbar/configs';
|
||||
export * from './toolbar/senior-tool';
|
||||
|
||||
69
blocksuite/affine/gfx/brush/src/renderer/dom/brush.ts
Normal file
69
blocksuite/affine/gfx/brush/src/renderer/dom/brush.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import {
|
||||
DomElementRendererExtension,
|
||||
type DomRenderer,
|
||||
} from '@blocksuite/affine-block-surface';
|
||||
import type { BrushElementModel } from '@blocksuite/affine-model';
|
||||
import { DefaultTheme } from '@blocksuite/affine-model';
|
||||
|
||||
export const BrushDomRendererExtension = DomElementRendererExtension(
|
||||
'brush',
|
||||
(
|
||||
model: BrushElementModel,
|
||||
domElement: HTMLElement,
|
||||
renderer: DomRenderer
|
||||
) => {
|
||||
const { zoom } = renderer.viewport;
|
||||
const [, , w, h] = model.deserializedXYWH;
|
||||
|
||||
// Early return if invalid dimensions
|
||||
if (w <= 0 || h <= 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Early return if no commands
|
||||
if (!model.commands) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Clear previous content
|
||||
domElement.innerHTML = '';
|
||||
|
||||
// Get color value
|
||||
const color = renderer.getColorValue(model.color, DefaultTheme.black, true);
|
||||
|
||||
// Create SVG element
|
||||
const svg = document.createElementNS('http://www.w3.org/2000/svg', 'svg');
|
||||
svg.style.position = 'absolute';
|
||||
svg.style.left = '0';
|
||||
svg.style.top = '0';
|
||||
svg.style.width = `${w * zoom}px`;
|
||||
svg.style.height = `${h * zoom}px`;
|
||||
svg.style.overflow = 'visible';
|
||||
svg.style.pointerEvents = 'none';
|
||||
svg.setAttribute('viewBox', `0 0 ${w} ${h}`);
|
||||
|
||||
// Apply rotation transform
|
||||
if (model.rotate !== 0) {
|
||||
svg.style.transform = `rotate(${model.rotate}deg)`;
|
||||
svg.style.transformOrigin = 'center';
|
||||
}
|
||||
|
||||
// Create path element for the brush stroke
|
||||
const pathElement = document.createElementNS(
|
||||
'http://www.w3.org/2000/svg',
|
||||
'path'
|
||||
);
|
||||
pathElement.setAttribute('d', model.commands);
|
||||
pathElement.setAttribute('fill', color);
|
||||
pathElement.setAttribute('stroke', 'none');
|
||||
|
||||
svg.append(pathElement);
|
||||
domElement.replaceChildren(svg);
|
||||
|
||||
// Set element size and position
|
||||
domElement.style.width = `${w * zoom}px`;
|
||||
domElement.style.height = `${h * zoom}px`;
|
||||
domElement.style.overflow = 'visible';
|
||||
domElement.style.pointerEvents = 'none';
|
||||
}
|
||||
);
|
||||
73
blocksuite/affine/gfx/brush/src/renderer/dom/highlighter.ts
Normal file
73
blocksuite/affine/gfx/brush/src/renderer/dom/highlighter.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import {
|
||||
DomElementRendererExtension,
|
||||
type DomRenderer,
|
||||
} from '@blocksuite/affine-block-surface';
|
||||
import type { HighlighterElementModel } from '@blocksuite/affine-model';
|
||||
import { DefaultTheme } from '@blocksuite/affine-model';
|
||||
|
||||
export const HighlighterDomRendererExtension = DomElementRendererExtension(
|
||||
'highlighter',
|
||||
(
|
||||
model: HighlighterElementModel,
|
||||
domElement: HTMLElement,
|
||||
renderer: DomRenderer
|
||||
) => {
|
||||
const { zoom } = renderer.viewport;
|
||||
const [, , w, h] = model.deserializedXYWH;
|
||||
|
||||
// Early return if invalid dimensions
|
||||
if (w <= 0 || h <= 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Early return if no commands
|
||||
if (!model.commands) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Clear previous content
|
||||
domElement.innerHTML = '';
|
||||
|
||||
// Get color value
|
||||
const color = renderer.getColorValue(
|
||||
model.color,
|
||||
DefaultTheme.hightlighterColor,
|
||||
true
|
||||
);
|
||||
|
||||
// Create SVG element
|
||||
const svg = document.createElementNS('http://www.w3.org/2000/svg', 'svg');
|
||||
svg.style.position = 'absolute';
|
||||
svg.style.left = '0';
|
||||
svg.style.top = '0';
|
||||
svg.style.width = `${w * zoom}px`;
|
||||
svg.style.height = `${h * zoom}px`;
|
||||
svg.style.overflow = 'visible';
|
||||
svg.style.pointerEvents = 'none';
|
||||
svg.setAttribute('viewBox', `0 0 ${w} ${h}`);
|
||||
|
||||
// Apply rotation transform
|
||||
if (model.rotate !== 0) {
|
||||
svg.style.transform = `rotate(${model.rotate}deg)`;
|
||||
svg.style.transformOrigin = 'center';
|
||||
}
|
||||
|
||||
// Create path element for the highlighter stroke
|
||||
const pathElement = document.createElementNS(
|
||||
'http://www.w3.org/2000/svg',
|
||||
'path'
|
||||
);
|
||||
pathElement.setAttribute('d', model.commands);
|
||||
pathElement.setAttribute('fill', color);
|
||||
pathElement.setAttribute('stroke', 'none');
|
||||
|
||||
svg.append(pathElement);
|
||||
domElement.replaceChildren(svg);
|
||||
|
||||
// Set element size and position
|
||||
domElement.style.width = `${w * zoom}px`;
|
||||
domElement.style.height = `${h * zoom}px`;
|
||||
domElement.style.overflow = 'visible';
|
||||
domElement.style.pointerEvents = 'none';
|
||||
}
|
||||
);
|
||||
2
blocksuite/affine/gfx/brush/src/renderer/dom/index.ts
Normal file
2
blocksuite/affine/gfx/brush/src/renderer/dom/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export { BrushDomRendererExtension } from './brush';
|
||||
export { HighlighterDomRendererExtension } from './highlighter';
|
||||
@@ -0,0 +1,2 @@
|
||||
export { BrushElementRendererExtension } from './brush';
|
||||
export { HighlighterElementRendererExtension } from './highlighter';
|
||||
2
blocksuite/affine/gfx/brush/src/renderer/index.ts
Normal file
2
blocksuite/affine/gfx/brush/src/renderer/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from './dom';
|
||||
export * from './element';
|
||||
@@ -5,9 +5,14 @@ import {
|
||||
|
||||
import { BrushTool } from './brush-tool';
|
||||
import { effects } from './effects';
|
||||
import { BrushElementRendererExtension } from './element-renderer';
|
||||
import { EraserTool } from './eraser-tool';
|
||||
import { HighlighterTool } from './highlighter-tool';
|
||||
import {
|
||||
BrushDomRendererExtension,
|
||||
BrushElementRendererExtension,
|
||||
HighlighterDomRendererExtension,
|
||||
HighlighterElementRendererExtension,
|
||||
} from './renderer';
|
||||
import {
|
||||
brushToolbarExtension,
|
||||
highlighterToolbarExtension,
|
||||
@@ -30,6 +35,9 @@ export class BrushViewExtension extends ViewExtensionProvider {
|
||||
context.register(HighlighterTool);
|
||||
|
||||
context.register(BrushElementRendererExtension);
|
||||
context.register(BrushDomRendererExtension);
|
||||
context.register(HighlighterElementRendererExtension);
|
||||
context.register(HighlighterDomRendererExtension);
|
||||
|
||||
context.register(brushToolbarExtension);
|
||||
context.register(highlighterToolbarExtension);
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
import { DomElementRendererExtension } from '@blocksuite/affine-block-surface';
|
||||
|
||||
import { connectorDomRenderer } from './connector-dom/index.js';
|
||||
|
||||
/**
|
||||
* Extension to register the DOM-based renderer for 'connector' elements.
|
||||
*/
|
||||
export const ConnectorDomRendererExtension = DomElementRendererExtension(
|
||||
'connector',
|
||||
connectorDomRenderer
|
||||
);
|
||||
@@ -1,9 +1,8 @@
|
||||
export * from './adapter';
|
||||
export * from './connector-manager';
|
||||
export * from './connector-tool';
|
||||
export * from './element-renderer';
|
||||
export { ConnectorDomRendererExtension } from './element-renderer/connector-dom';
|
||||
export * from './element-transform';
|
||||
export * from './renderer';
|
||||
export * from './text';
|
||||
export * from './toolbar/config';
|
||||
export * from './toolbar/quick-tool';
|
||||
|
||||
@@ -1,14 +1,18 @@
|
||||
import type { DomRenderer } from '@blocksuite/affine-block-surface';
|
||||
import {
|
||||
DomElementRendererExtension,
|
||||
type DomRenderer,
|
||||
} from '@blocksuite/affine-block-surface';
|
||||
import {
|
||||
type ConnectorElementModel,
|
||||
ConnectorMode,
|
||||
DefaultTheme,
|
||||
type LocalConnectorElementModel,
|
||||
type PointStyle,
|
||||
} from '@blocksuite/affine-model';
|
||||
import { PointLocation, SVGPathBuilder } from '@blocksuite/global/gfx';
|
||||
|
||||
import { isConnectorWithLabel } from '../../connector-manager.js';
|
||||
import { DEFAULT_ARROW_SIZE } from '../utils.js';
|
||||
import { isConnectorWithLabel } from '../connector-manager';
|
||||
import { DEFAULT_ARROW_SIZE } from './utils';
|
||||
|
||||
interface PathBounds {
|
||||
minX: number;
|
||||
@@ -221,8 +225,8 @@ function renderConnectorLabel(
|
||||
* @param element - The HTMLElement to apply the connector's styles to.
|
||||
* @param renderer - The main DOMRenderer instance, providing access to viewport and color utilities.
|
||||
*/
|
||||
export const connectorDomRenderer = (
|
||||
model: ConnectorElementModel,
|
||||
export const connectorBaseDomRenderer = (
|
||||
model: ConnectorElementModel | LocalConnectorElementModel,
|
||||
element: HTMLElement,
|
||||
renderer: DomRenderer
|
||||
): void => {
|
||||
@@ -358,10 +362,21 @@ export const connectorDomRenderer = (
|
||||
element.style.height = `${model.h * zoom}px`;
|
||||
element.style.overflow = 'visible';
|
||||
element.style.pointerEvents = 'none';
|
||||
|
||||
// Set z-index for layering
|
||||
element.style.zIndex = renderer.layerManager.getZIndex(model).toString();
|
||||
|
||||
// Render label if present
|
||||
renderConnectorLabel(model, element, renderer, zoom);
|
||||
};
|
||||
|
||||
export const connectorDomRenderer = (
|
||||
model: ConnectorElementModel,
|
||||
element: HTMLElement,
|
||||
renderer: DomRenderer
|
||||
): void => {
|
||||
connectorBaseDomRenderer(model, element, renderer);
|
||||
renderConnectorLabel(model, element, renderer, renderer.viewport.zoom);
|
||||
};
|
||||
|
||||
/**
|
||||
* Extension to register the DOM-based renderer for 'connector' elements.
|
||||
*/
|
||||
export const ConnectorDomRendererExtension = DomElementRendererExtension(
|
||||
'connector',
|
||||
connectorDomRenderer
|
||||
);
|
||||
@@ -25,7 +25,7 @@ import {
|
||||
} from '@blocksuite/global/gfx';
|
||||
import { deltaInsertsToChunks } from '@blocksuite/std/inline';
|
||||
|
||||
import { isConnectorWithLabel } from '../connector-manager.js';
|
||||
import { isConnectorWithLabel } from '../connector-manager';
|
||||
import {
|
||||
DEFAULT_ARROW_SIZE,
|
||||
getArrowOptions,
|
||||
@@ -33,7 +33,7 @@ import {
|
||||
renderCircle,
|
||||
renderDiamond,
|
||||
renderTriangle,
|
||||
} from './utils.js';
|
||||
} from './utils';
|
||||
|
||||
export const connector: ElementRenderer<
|
||||
ConnectorElementModel | LocalConnectorElementModel
|
||||
2
blocksuite/affine/gfx/connector/src/renderer/index.ts
Normal file
2
blocksuite/affine/gfx/connector/src/renderer/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from './dom-renderer';
|
||||
export * from './element-renderer';
|
||||
@@ -6,9 +6,11 @@ import {
|
||||
import { ConnectionOverlay } from './connector-manager';
|
||||
import { ConnectorTool } from './connector-tool';
|
||||
import { effects } from './effects';
|
||||
import { ConnectorElementRendererExtension } from './element-renderer';
|
||||
import { ConnectorDomRendererExtension } from './element-renderer/connector-dom';
|
||||
import { ConnectorFilter } from './element-transform';
|
||||
import {
|
||||
ConnectorDomRendererExtension,
|
||||
ConnectorElementRendererExtension,
|
||||
} from './renderer';
|
||||
import { connectorToolbarExtension } from './toolbar/config';
|
||||
import { connectorQuickTool } from './toolbar/quick-tool';
|
||||
import { ConnectorElementView, ConnectorInteraction } from './view/view';
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
export * from './adapter';
|
||||
export * from './command';
|
||||
export * from './element-renderer';
|
||||
export * from './element-view';
|
||||
export * from './renderer';
|
||||
export * from './text/text';
|
||||
export * from './toolbar/config';
|
||||
|
||||
62
blocksuite/affine/gfx/group/src/renderer/dom-renderer.ts
Normal file
62
blocksuite/affine/gfx/group/src/renderer/dom-renderer.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import { DomElementRendererExtension } from '@blocksuite/affine-block-surface';
|
||||
import { FontWeight, type GroupElementModel } from '@blocksuite/affine-model';
|
||||
|
||||
import {
|
||||
GROUP_TITLE_FONT,
|
||||
GROUP_TITLE_FONT_SIZE,
|
||||
GROUP_TITLE_PADDING,
|
||||
} from './consts';
|
||||
import { titleRenderParams } from './utils';
|
||||
|
||||
export const GroupDomRendererExtension = DomElementRendererExtension(
|
||||
'group',
|
||||
(model: GroupElementModel, domElement, renderer) => {
|
||||
const { zoom } = renderer.viewport;
|
||||
const [, , w, h] = model.deserializedXYWH;
|
||||
|
||||
const renderParams = titleRenderParams(model, zoom);
|
||||
model.externalXYWH = renderParams.titleBound.serialize();
|
||||
|
||||
domElement.innerHTML = '';
|
||||
domElement.style.outlineColor = '';
|
||||
domElement.style.outlineWidth = '';
|
||||
domElement.style.outlineStyle = '';
|
||||
|
||||
const elements = renderer.provider.selectedElements?.() || [];
|
||||
|
||||
const renderTitle = () => {
|
||||
const { text } = renderParams;
|
||||
const titleElement = document.createElement('div');
|
||||
titleElement.style.transform = `translate(0, -100%)`;
|
||||
titleElement.style.fontFamily = GROUP_TITLE_FONT;
|
||||
titleElement.style.fontWeight = `${FontWeight.Regular}`;
|
||||
titleElement.style.fontStyle = 'normal';
|
||||
titleElement.style.fontSize = `${GROUP_TITLE_FONT_SIZE}px`;
|
||||
titleElement.style.color = renderer.getPropertyValue('--affine-blue');
|
||||
titleElement.style.textAlign = 'left';
|
||||
titleElement.style.padding = `${GROUP_TITLE_PADDING[0]}px ${GROUP_TITLE_PADDING[1]}px`;
|
||||
titleElement.textContent = text;
|
||||
domElement.replaceChildren(titleElement);
|
||||
};
|
||||
|
||||
if (elements.includes(model.id)) {
|
||||
if (model.showTitle) {
|
||||
renderTitle();
|
||||
} else {
|
||||
domElement.style.outlineColor =
|
||||
renderer.getPropertyValue('--affine-blue');
|
||||
domElement.style.outlineWidth = '2px';
|
||||
domElement.style.outlineStyle = 'solid';
|
||||
}
|
||||
} else if (model.childElements.some(child => elements.includes(child.id))) {
|
||||
domElement.style.outlineColor = '#8FD1FF';
|
||||
domElement.style.outlineWidth = '2px';
|
||||
domElement.style.outlineStyle = 'solid';
|
||||
}
|
||||
|
||||
domElement.style.width = `${w * zoom}px`;
|
||||
domElement.style.height = `${h * zoom}px`;
|
||||
domElement.style.overflow = 'visible';
|
||||
domElement.style.pointerEvents = 'none';
|
||||
}
|
||||
);
|
||||
@@ -6,7 +6,7 @@ import {
|
||||
import type { GroupElementModel } from '@blocksuite/affine-model';
|
||||
import { Bound } from '@blocksuite/global/gfx';
|
||||
|
||||
import { titleRenderParams } from './utils.js';
|
||||
import { titleRenderParams } from './utils';
|
||||
|
||||
export const group: ElementRenderer<GroupElementModel> = (
|
||||
model,
|
||||
2
blocksuite/affine/gfx/group/src/renderer/index.ts
Normal file
2
blocksuite/affine/gfx/group/src/renderer/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from './dom-renderer';
|
||||
export * from './element-renderer';
|
||||
@@ -13,7 +13,7 @@ import {
|
||||
GROUP_TITLE_FONT_SIZE,
|
||||
GROUP_TITLE_OFFSET,
|
||||
GROUP_TITLE_PADDING,
|
||||
} from './consts.js';
|
||||
} from './consts';
|
||||
|
||||
export function titleRenderParams(group: GroupElementModel, zoom: number) {
|
||||
let text = group.title.toString().trim();
|
||||
@@ -21,7 +21,7 @@ import {
|
||||
GROUP_TITLE_FONT_SIZE,
|
||||
GROUP_TITLE_OFFSET,
|
||||
GROUP_TITLE_PADDING,
|
||||
} from '../element-renderer/consts';
|
||||
} from '../renderer/consts';
|
||||
|
||||
export function mountGroupTitleEditor(
|
||||
group: GroupElementModel,
|
||||
|
||||
@@ -4,9 +4,12 @@ import {
|
||||
} from '@blocksuite/affine-ext-loader';
|
||||
|
||||
import { effects } from './effects';
|
||||
import { GroupElementRendererExtension } from './element-renderer';
|
||||
import { GroupElementView, GroupInteraction } from './element-view';
|
||||
import { GroupInteractionExtension } from './interaction-ext';
|
||||
import {
|
||||
GroupDomRendererExtension,
|
||||
GroupElementRendererExtension,
|
||||
} from './renderer';
|
||||
import { groupToolbarExtension } from './toolbar/config';
|
||||
|
||||
export class GroupViewExtension extends ViewExtensionProvider {
|
||||
@@ -20,6 +23,7 @@ export class GroupViewExtension extends ViewExtensionProvider {
|
||||
override setup(context: ViewExtensionContext) {
|
||||
super.setup(context);
|
||||
context.register(GroupElementRendererExtension);
|
||||
context.register(GroupDomRendererExtension);
|
||||
context.register(GroupElementView);
|
||||
if (this.isEdgeless(context.scope)) {
|
||||
context.register(groupToolbarExtension);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
export * from './adapter';
|
||||
export * from './element-renderer';
|
||||
export * from './indicator-overlay';
|
||||
export * from './interactivity';
|
||||
export * from './renderer';
|
||||
export * from './toolbar/config';
|
||||
export * from './toolbar/senior-tool';
|
||||
export * from './utils';
|
||||
|
||||
65
blocksuite/affine/gfx/mindmap/src/renderer/dom-renderer.ts
Normal file
65
blocksuite/affine/gfx/mindmap/src/renderer/dom-renderer.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { DomElementRendererExtension } from '@blocksuite/affine-block-surface';
|
||||
import {
|
||||
connectorBaseDomRenderer,
|
||||
ConnectorPathGenerator,
|
||||
} from '@blocksuite/affine-gfx-connector';
|
||||
import type {
|
||||
MindmapElementModel,
|
||||
MindmapNode,
|
||||
} from '@blocksuite/affine-model';
|
||||
import type { GfxModel } from '@blocksuite/std/gfx';
|
||||
|
||||
export const MindmapDomRendererExtension = DomElementRendererExtension(
|
||||
'mindmap',
|
||||
(model: MindmapElementModel, domElement, renderer) => {
|
||||
const bound = model.elementBound;
|
||||
|
||||
const { zoom } = renderer.viewport;
|
||||
// Set element size and position
|
||||
domElement.style.width = `${bound.w * zoom}px`;
|
||||
domElement.style.height = `${bound.h * zoom}px`;
|
||||
domElement.style.overflow = 'visible';
|
||||
domElement.style.pointerEvents = 'none';
|
||||
|
||||
const newChildren: HTMLDivElement[] = [];
|
||||
|
||||
const traverse = (node: MindmapNode) => {
|
||||
const connectors = model.getConnectors(node);
|
||||
if (!connectors) return;
|
||||
|
||||
connectors.reverse().forEach(result => {
|
||||
const { connector, outdated } = result;
|
||||
const elementGetter = (id: string) =>
|
||||
model.surface.getElementById(id) ??
|
||||
(model.surface.store.getModelById(id) as GfxModel);
|
||||
|
||||
if (outdated) {
|
||||
ConnectorPathGenerator.updatePath(connector, null, elementGetter);
|
||||
}
|
||||
|
||||
const connectorContainer = document.createElement('div');
|
||||
connectorContainer.style.position = 'absolute';
|
||||
connectorContainer.style.transformOrigin = 'top left';
|
||||
const geometricStyles = {
|
||||
left: `${(connector.x - bound.x) * zoom}px`,
|
||||
top: `${(connector.y - bound.y) * zoom}px`,
|
||||
};
|
||||
const opacityStyle = { opacity: node.element.opacity };
|
||||
Object.assign(connectorContainer.style, geometricStyles, opacityStyle);
|
||||
|
||||
connectorBaseDomRenderer(connector, connectorContainer, renderer);
|
||||
newChildren.push(connectorContainer);
|
||||
});
|
||||
|
||||
if (node.detail.collapsed) {
|
||||
return;
|
||||
} else {
|
||||
node.children.forEach(traverse);
|
||||
}
|
||||
};
|
||||
|
||||
model.tree && traverse(model.tree);
|
||||
|
||||
domElement.replaceChildren(...newChildren);
|
||||
}
|
||||
);
|
||||
2
blocksuite/affine/gfx/mindmap/src/renderer/index.ts
Normal file
2
blocksuite/affine/gfx/mindmap/src/renderer/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from './dom-renderer';
|
||||
export * from './element-renderer';
|
||||
@@ -4,9 +4,12 @@ import {
|
||||
} from '@blocksuite/affine-ext-loader';
|
||||
|
||||
import { effects } from './effects';
|
||||
import { MindmapElementRendererExtension } from './element-renderer';
|
||||
import { MindMapIndicatorOverlay } from './indicator-overlay';
|
||||
import { MindMapDragExtension } from './interactivity';
|
||||
import {
|
||||
MindmapDomRendererExtension,
|
||||
MindmapElementRendererExtension,
|
||||
} from './renderer';
|
||||
import {
|
||||
mindmapToolbarExtension,
|
||||
shapeMindmapToolbarExtension,
|
||||
@@ -25,6 +28,7 @@ export class MindmapViewExtension extends ViewExtensionProvider {
|
||||
override setup(context: ViewExtensionContext) {
|
||||
super.setup(context);
|
||||
context.register(MindmapElementRendererExtension);
|
||||
context.register(MindmapDomRendererExtension);
|
||||
context.register(mindMapSeniorTool);
|
||||
context.register(mindmapToolbarExtension);
|
||||
context.register(shapeMindmapToolbarExtension);
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
import { EdgelessLegacySlotIdentifier } from '@blocksuite/affine-block-surface';
|
||||
import {
|
||||
DefaultTool,
|
||||
EdgelessLegacySlotIdentifier,
|
||||
} from '@blocksuite/affine-block-surface';
|
||||
import { on } from '@blocksuite/affine-shared/utils';
|
||||
import type { PointerEventState } from '@blocksuite/std';
|
||||
import { BaseTool, MouseButton, type ToolOptions } from '@blocksuite/std/gfx';
|
||||
@@ -64,12 +67,15 @@ export class PanTool extends BaseTool<PanToolOption> {
|
||||
const { toolType, options: originalToolOptions } = currentTool;
|
||||
const selectionToRestore = this.gfx.selection.surfaceSelections;
|
||||
if (!toolType) return;
|
||||
// restore to DefaultTool if previous tool is CopilotTool
|
||||
if (toolType.toolName === 'copilot') {
|
||||
this.controller.setTool(DefaultTool);
|
||||
return;
|
||||
}
|
||||
|
||||
let finalOptions: ToolOptions<BaseTool<any>> | undefined =
|
||||
originalToolOptions;
|
||||
const PRESENT_TOOL_NAME = 'frameNavigator';
|
||||
|
||||
if (toolType.toolName === PRESENT_TOOL_NAME) {
|
||||
if (toolType.toolName === 'frameNavigator') {
|
||||
// When restoring PresentTool (frameNavigator) after a temporary pan (e.g., via middle mouse button),
|
||||
// set 'restoredAfterPan' to true. This allows PresentTool to avoid an unwanted viewport reset
|
||||
// and maintain the panned position.
|
||||
@@ -93,15 +99,17 @@ export class PanTool extends BaseTool<PanToolOption> {
|
||||
});
|
||||
}
|
||||
|
||||
this.controller.setTool(PanTool, {
|
||||
panning: true,
|
||||
requestAnimationFrame(() => {
|
||||
this.controller.setTool(PanTool, {
|
||||
panning: true,
|
||||
});
|
||||
});
|
||||
|
||||
const dispose = on(document, 'pointerup', evt => {
|
||||
if (evt.button === MouseButton.MIDDLE) {
|
||||
restoreToPrevious();
|
||||
dispose();
|
||||
}
|
||||
dispose();
|
||||
});
|
||||
|
||||
return false;
|
||||
|
||||
@@ -1,2 +1 @@
|
||||
export * from './highlighter';
|
||||
export * from './shape';
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import type { DomRenderer } from '@blocksuite/affine-block-surface';
|
||||
import { isRTL } from '@blocksuite/affine-gfx-text';
|
||||
import type { ShapeElementModel } from '@blocksuite/affine-model';
|
||||
import { DefaultTheme } from '@blocksuite/affine-model';
|
||||
import { SVGShapeBuilder } from '@blocksuite/global/gfx';
|
||||
@@ -99,6 +100,8 @@ export const shapeDomRenderer = (
|
||||
const unscaledWidth = model.w;
|
||||
const unscaledHeight = model.h;
|
||||
|
||||
const newChildren: Element[] = [];
|
||||
|
||||
const fillColor = renderer.getColorValue(
|
||||
model.fillColor,
|
||||
DefaultTheme.shapeFillColor,
|
||||
@@ -170,8 +173,7 @@ export const shapeDomRenderer = (
|
||||
}
|
||||
svg.append(polygon);
|
||||
|
||||
// Replace existing children to avoid memory leaks
|
||||
element.replaceChildren(svg);
|
||||
newChildren.push(svg);
|
||||
} else {
|
||||
// Standard rendering for other shapes (e.g., rect, ellipse)
|
||||
// innerHTML was already cleared by applyShapeSpecificStyles if necessary
|
||||
@@ -179,9 +181,42 @@ export const shapeDomRenderer = (
|
||||
applyBorderStyles(model, element, strokeColor, zoom); // Uses standard CSS border
|
||||
}
|
||||
|
||||
applyTransformStyles(model, element);
|
||||
if (model.textDisplay && model.text) {
|
||||
const str = model.text.toString();
|
||||
const textElement = document.createElement('div');
|
||||
if (isRTL(str)) {
|
||||
textElement.dir = 'rtl';
|
||||
}
|
||||
textElement.style.position = 'absolute';
|
||||
textElement.style.inset = '0';
|
||||
textElement.style.display = 'flex';
|
||||
textElement.style.flexDirection = 'column';
|
||||
textElement.style.justifyContent =
|
||||
model.textVerticalAlign === 'center'
|
||||
? 'center'
|
||||
: model.textVerticalAlign === 'top'
|
||||
? 'flex-start'
|
||||
: 'flex-end';
|
||||
textElement.style.whiteSpace = 'pre-wrap';
|
||||
textElement.style.wordBreak = 'break-word';
|
||||
textElement.style.textAlign = model.textAlign;
|
||||
textElement.style.alignmentBaseline = 'alphabetic';
|
||||
textElement.style.fontFamily = model.fontFamily;
|
||||
textElement.style.fontSize = `${model.fontSize * zoom}px`;
|
||||
textElement.style.fontWeight = model.fontWeight;
|
||||
textElement.style.color = renderer.getColorValue(
|
||||
model.color,
|
||||
DefaultTheme.shapeTextColor,
|
||||
true
|
||||
);
|
||||
textElement.textContent = str;
|
||||
newChildren.push(textElement);
|
||||
}
|
||||
|
||||
element.style.zIndex = renderer.layerManager.getZIndex(model).toString();
|
||||
// Replace existing children to avoid memory leaks
|
||||
element.replaceChildren(...newChildren);
|
||||
|
||||
applyTransformStyles(model, element);
|
||||
|
||||
manageClassNames(model, element);
|
||||
applyShadowStyles(model, element, renderer);
|
||||
|
||||
@@ -4,10 +4,7 @@ import {
|
||||
} from '@blocksuite/affine-ext-loader';
|
||||
|
||||
import { effects } from './effects';
|
||||
import {
|
||||
HighlighterElementRendererExtension,
|
||||
ShapeElementRendererExtension,
|
||||
} from './element-renderer';
|
||||
import { ShapeElementRendererExtension } from './element-renderer';
|
||||
import { ShapeDomRendererExtension } from './element-renderer/shape-dom';
|
||||
import { ShapeElementView, ShapeViewInteraction } from './element-view';
|
||||
import { ShapeTool } from './shape-tool';
|
||||
@@ -24,7 +21,6 @@ export class ShapeViewExtension extends ViewExtensionProvider {
|
||||
override setup(context: ViewExtensionContext) {
|
||||
super.setup(context);
|
||||
if (this.isEdgeless(context.scope)) {
|
||||
context.register(HighlighterElementRendererExtension);
|
||||
context.register(ShapeElementRendererExtension);
|
||||
context.register(ShapeDomRendererExtension);
|
||||
context.register(ShapeElementView);
|
||||
|
||||
@@ -131,7 +131,7 @@ export class HighlighterElementModel extends GfxPrimitiveElementModel<Highlighte
|
||||
instance['_local'].delete('commands');
|
||||
})
|
||||
@derive((lineWidth: number, instance: Instance) => {
|
||||
const oldBound = instance.elementBound;
|
||||
const oldBound = Bound.fromXYWH(instance.deserializedXYWH);
|
||||
|
||||
if (
|
||||
lineWidth === instance.lineWidth ||
|
||||
|
||||
@@ -65,6 +65,98 @@ export class Unzip {
|
||||
this.unzipped = fflate.unzipSync(new Uint8Array(await blob.arrayBuffer()));
|
||||
}
|
||||
|
||||
private fixFileNameEncoding(fileName: string): string {
|
||||
try {
|
||||
// check if contains non-ASCII characters
|
||||
if (fileName.split('').some(char => char.charCodeAt(0) > 127)) {
|
||||
// try different encodings
|
||||
const fixedName = this.tryDifferentEncodings(fileName);
|
||||
if (fixedName && fixedName !== fileName) {
|
||||
return fixedName;
|
||||
}
|
||||
}
|
||||
return fileName;
|
||||
} catch {
|
||||
return fileName;
|
||||
}
|
||||
}
|
||||
|
||||
// try different encodings
|
||||
private tryDifferentEncodings(fileName: string): string | null {
|
||||
try {
|
||||
// convert string to bytes
|
||||
const bytes = new Uint8Array(fileName.length);
|
||||
for (let i = 0; i < fileName.length; i++) {
|
||||
bytes[i] = fileName.charCodeAt(i);
|
||||
}
|
||||
|
||||
// try different encodings
|
||||
// The macOS system zip tool creates archives with UTF-8 encoded filenames.
|
||||
// However, this implementation doesn't strictly adhere to the ZIP specification.
|
||||
// Simply forcing UTF-8 encoding when unzipping should resolve filename corruption issues.
|
||||
const encodings = ['utf-8'];
|
||||
|
||||
for (const encoding of encodings) {
|
||||
try {
|
||||
const decoder = new TextDecoder(encoding);
|
||||
const result = decoder.decode(bytes);
|
||||
|
||||
// check if decoded result is valid
|
||||
if (result && this.isValidDecodedString(result)) {
|
||||
return result;
|
||||
}
|
||||
} catch {
|
||||
// ignore encoding error, try next encoding
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// ignore conversion error
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// check if decoded string is valid
|
||||
private isValidDecodedString(str: string): boolean {
|
||||
// check if contains control characters
|
||||
const controlCharCodes = new Set([
|
||||
0x00,
|
||||
0x01,
|
||||
0x02,
|
||||
0x03,
|
||||
0x04,
|
||||
0x05,
|
||||
0x06,
|
||||
0x07,
|
||||
0x08, // \x00-\x08
|
||||
0x0b,
|
||||
0x0c, // \x0B, \x0C
|
||||
0x0e,
|
||||
0x0f,
|
||||
0x10,
|
||||
0x11,
|
||||
0x12,
|
||||
0x13,
|
||||
0x14,
|
||||
0x15,
|
||||
0x16,
|
||||
0x17,
|
||||
0x18,
|
||||
0x19,
|
||||
0x1a,
|
||||
0x1b,
|
||||
0x1c,
|
||||
0x1d,
|
||||
0x1e,
|
||||
0x1f, // \x0E-\x1F
|
||||
0x7f, // \x7F
|
||||
]);
|
||||
|
||||
return !str
|
||||
.split('')
|
||||
.some(char => controlCharCodes.has(char.charCodeAt(0)));
|
||||
}
|
||||
|
||||
*[Symbol.iterator]() {
|
||||
const keys = Object.keys(this.unzipped ?? {});
|
||||
let index = 0;
|
||||
@@ -81,7 +173,10 @@ export class Unzip {
|
||||
const content = new File([this.unzipped![path]], fileName, {
|
||||
type: mime ?? '',
|
||||
}) as Blob;
|
||||
yield { path, content, index };
|
||||
|
||||
const fixedPath = this.fixFileNameEncoding(path);
|
||||
|
||||
yield { path: fixedPath, content, index };
|
||||
index++;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -91,15 +91,11 @@ export class KeyboardControl {
|
||||
const disposables = new DisposableGroup();
|
||||
if (IS_ANDROID) {
|
||||
disposables.add(
|
||||
this._dispatcher.add(
|
||||
'beforeInput',
|
||||
ctx => {
|
||||
if (this.composition) return false;
|
||||
const binding = androidBindKeymapPatch(keymap);
|
||||
return binding(ctx);
|
||||
},
|
||||
options
|
||||
)
|
||||
this._dispatcher.add('beforeInput', ctx => {
|
||||
if (this.composition) return false;
|
||||
const binding = androidBindKeymapPatch(keymap);
|
||||
return binding(ctx);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -226,6 +226,18 @@ export class UIEventDispatcher extends LifeCycleWatcher {
|
||||
this._setActive(false);
|
||||
}
|
||||
});
|
||||
// When the selection is outside the host, the event dispatcher should be inactive
|
||||
this.disposables.addFromEvent(document, 'selectionchange', () => {
|
||||
const sel = document.getSelection();
|
||||
if (!sel || sel.rangeCount === 0) return;
|
||||
const { anchorNode, focusNode } = sel;
|
||||
if (
|
||||
(anchorNode && !this.host.contains(anchorNode)) ||
|
||||
(focusNode && !this.host.contains(focusNode))
|
||||
) {
|
||||
this._setActive(false);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private _buildEventScopeBySelection(name: EventName) {
|
||||
|
||||
@@ -104,7 +104,7 @@ export function bindKeymap(
|
||||
};
|
||||
}
|
||||
|
||||
// In Android, the keypress event dose not contain
|
||||
// In some IME of Android like, the keypress event dose not contain
|
||||
// the information about what key is pressed. See
|
||||
// https://stackoverflow.com/a/68188679
|
||||
// https://stackoverflow.com/a/66724830
|
||||
|
||||
@@ -57,7 +57,7 @@ export type CanvasLayer = BaseLayer<GfxPrimitiveElementModel> & {
|
||||
type: 'canvas';
|
||||
|
||||
/**
|
||||
* The z-index of canvas layer.
|
||||
* The z-index of the first element in this canvas layer.
|
||||
*
|
||||
* A canvas layer renders all the elements in a single canvas,
|
||||
* this property is used to render the canvas with correct z-index.
|
||||
@@ -165,8 +165,7 @@ export class LayerManager extends GfxExtension {
|
||||
];
|
||||
curLayer.zIndex = currentCSSZindex;
|
||||
layers.push(curLayer as LayerManager['layers'][number]);
|
||||
currentCSSZindex +=
|
||||
curLayer.type === 'block' ? curLayer.elements.length : 1;
|
||||
currentCSSZindex += curLayer.elements.length;
|
||||
}
|
||||
};
|
||||
const addLayer = (type: 'canvas' | 'block') => {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { IS_ANDROID } from '@blocksuite/global/env';
|
||||
import type { BaseTextAttributes } from '@blocksuite/store';
|
||||
|
||||
import type { InlineEditor } from '../inline-editor.js';
|
||||
@@ -41,11 +42,10 @@ export class EventService<TextAttributes extends BaseTextAttributes> {
|
||||
}
|
||||
};
|
||||
|
||||
private readonly _onBeforeInput = (event: InputEvent) => {
|
||||
private readonly _onBeforeInput = async (event: InputEvent) => {
|
||||
const range = this.editor.rangeService.getNativeRange();
|
||||
if (
|
||||
this.editor.isReadonly ||
|
||||
this._isComposing ||
|
||||
!range ||
|
||||
!this._isRangeCompletelyInRoot(range)
|
||||
)
|
||||
@@ -54,33 +54,29 @@ export class EventService<TextAttributes extends BaseTextAttributes> {
|
||||
let inlineRange = this.editor.toInlineRange(range);
|
||||
if (!inlineRange) return;
|
||||
|
||||
if (this._isComposing) {
|
||||
if (IS_ANDROID && event.inputType === 'insertCompositionText') {
|
||||
this._compositionInlineRange = inlineRange;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
let ifHandleTargetRange = true;
|
||||
|
||||
if (event.inputType.startsWith('delete')) {
|
||||
if (
|
||||
isInEmbedGap(range.commonAncestorContainer) &&
|
||||
inlineRange.length === 0 &&
|
||||
inlineRange.index > 0
|
||||
) {
|
||||
inlineRange = {
|
||||
index: inlineRange.index - 1,
|
||||
length: 1,
|
||||
};
|
||||
ifHandleTargetRange = false;
|
||||
} else if (
|
||||
isInEmptyLine(range.commonAncestorContainer) &&
|
||||
inlineRange.length === 0 &&
|
||||
inlineRange.index > 0
|
||||
// eslint-disable-next-line sonarjs/no-duplicated-branches
|
||||
) {
|
||||
// do not use target range when deleting across lines
|
||||
if (
|
||||
event.inputType.startsWith('delete') &&
|
||||
(isInEmbedGap(range.commonAncestorContainer) ||
|
||||
// https://github.com/toeverything/blocksuite/issues/5381
|
||||
inlineRange = {
|
||||
index: inlineRange.index - 1,
|
||||
length: 1,
|
||||
};
|
||||
ifHandleTargetRange = false;
|
||||
}
|
||||
isInEmptyLine(range.commonAncestorContainer)) &&
|
||||
inlineRange.length === 0 &&
|
||||
inlineRange.index > 0
|
||||
) {
|
||||
// do not use target range when deleting across lines
|
||||
inlineRange = {
|
||||
index: inlineRange.index - 1,
|
||||
length: 1,
|
||||
};
|
||||
ifHandleTargetRange = false;
|
||||
}
|
||||
|
||||
if (ifHandleTargetRange) {
|
||||
@@ -97,11 +93,24 @@ export class EventService<TextAttributes extends BaseTextAttributes> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!inlineRange) return;
|
||||
|
||||
event.preventDefault();
|
||||
|
||||
if (IS_ANDROID) {
|
||||
this.editor.rerenderWholeEditor();
|
||||
await this.editor.waitForUpdate();
|
||||
if (
|
||||
event.inputType === 'deleteContentBackward' &&
|
||||
!(inlineRange.index === 0 && inlineRange.length === 0)
|
||||
) {
|
||||
// when press backspace at offset 1, double characters will be removed.
|
||||
// because we mock backspace key event `androidBindKeymapPatch` in blocksuite/framework/std/src/event/keymap.ts
|
||||
// so we need to stop the event propagation to prevent the double characters removal.
|
||||
event.stopPropagation();
|
||||
}
|
||||
}
|
||||
|
||||
const ctx: BeforeinputHookCtx<TextAttributes> = {
|
||||
inlineEditor: this.editor,
|
||||
raw: event,
|
||||
@@ -346,11 +355,9 @@ export class EventService<TextAttributes extends BaseTextAttributes> {
|
||||
return;
|
||||
}
|
||||
|
||||
this.editor.disposables.addFromEvent(
|
||||
eventSource,
|
||||
'beforeinput',
|
||||
this._onBeforeInput
|
||||
);
|
||||
this.editor.disposables.addFromEvent(eventSource, 'beforeinput', e => {
|
||||
this._onBeforeInput(e).catch(console.error);
|
||||
});
|
||||
this.editor.disposables.addFromEvent(
|
||||
eventSource,
|
||||
'compositionstart',
|
||||
|
||||
@@ -12,11 +12,7 @@ import type { SurfaceBlockModel } from '../gfx/model/surface/surface-model.js';
|
||||
|
||||
export function getLayerEndZIndex(layers: Layer[], layerIndex: number) {
|
||||
const layer = layers[layerIndex];
|
||||
return layer
|
||||
? layer.type === 'block'
|
||||
? layer.zIndex + layer.elements.length - 1
|
||||
: layer.zIndex
|
||||
: 0;
|
||||
return layer ? layer.zIndex + layer.elements.length - 1 : 0;
|
||||
}
|
||||
|
||||
export function updateLayersZIndex(layers: Layer[], startIdx: number) {
|
||||
@@ -27,7 +23,7 @@ export function updateLayersZIndex(layers: Layer[], startIdx: number) {
|
||||
const curLayer = layers[i];
|
||||
|
||||
curLayer.zIndex = curIndex;
|
||||
curIndex += curLayer.type === 'block' ? curLayer.elements.length : 1;
|
||||
curIndex += curLayer.elements.length;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -200,7 +200,7 @@ test('layer zindex should update correctly when elements changed', async () => {
|
||||
expect(service.layer.layers[1].zIndex).toBe(3);
|
||||
|
||||
expect(service.layer.layers[2].type).toBe('block');
|
||||
expect(service.layer.layers[2].zIndex).toBe(4);
|
||||
expect(service.layer.layers[2].zIndex).toBe(5);
|
||||
};
|
||||
assert2StepState();
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
"**/node_modules",
|
||||
".yarn",
|
||||
".github/helm",
|
||||
".git",
|
||||
".vscode",
|
||||
".yarnrc.yml",
|
||||
".docker",
|
||||
|
||||
@@ -82,7 +82,7 @@
|
||||
"husky": "^9.1.7",
|
||||
"lint-staged": "^16.0.0",
|
||||
"msw": "^2.6.8",
|
||||
"oxlint": "^1.1.0",
|
||||
"oxlint": "^1.15.0",
|
||||
"prettier": "^3.4.2",
|
||||
"semver": "^7.6.3",
|
||||
"serve": "^14.2.4",
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use tiktoken_rs::{get_bpe_from_tokenizer, tokenizer::Tokenizer as TiktokenTokenizer};
|
||||
|
||||
#[napi]
|
||||
pub struct Tokenizer {
|
||||
inner: tiktoken_rs::CoreBPE,
|
||||
@@ -7,6 +9,10 @@ pub struct Tokenizer {
|
||||
|
||||
#[napi]
|
||||
pub fn from_model_name(model_name: String) -> Option<Tokenizer> {
|
||||
if model_name.starts_with("gpt-5") {
|
||||
let bpe = get_bpe_from_tokenizer(TiktokenTokenizer::O200kBase).ok()?;
|
||||
return Some(Tokenizer { inner: bpe });
|
||||
}
|
||||
let bpe = tiktoken_rs::get_bpe_from_model(&model_name).ok()?;
|
||||
Some(Tokenizer { inner: bpe })
|
||||
}
|
||||
@@ -31,7 +37,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_tokenizer() {
|
||||
let tokenizer = from_model_name("gpt-4.1".to_string()).unwrap();
|
||||
let tokenizer = from_model_name("gpt-5".to_string()).unwrap();
|
||||
let content = "Hello, world!";
|
||||
let count = tokenizer.count(content.to_string(), None);
|
||||
assert!(count > 0);
|
||||
|
||||
@@ -28,12 +28,12 @@
|
||||
"dependencies": {
|
||||
"@affine/reader": "workspace:*",
|
||||
"@affine/server-native": "workspace:*",
|
||||
"@ai-sdk/anthropic": "^1.2.12",
|
||||
"@ai-sdk/google": "^1.2.18",
|
||||
"@ai-sdk/google-vertex": "^2.2.23",
|
||||
"@ai-sdk/openai": "^1.3.22",
|
||||
"@ai-sdk/openai-compatible": "^0.2.14",
|
||||
"@ai-sdk/perplexity": "^1.1.9",
|
||||
"@ai-sdk/anthropic": "^2.0.1",
|
||||
"@ai-sdk/google": "^2.0.4",
|
||||
"@ai-sdk/google-vertex": "^3.0.5",
|
||||
"@ai-sdk/openai": "^2.0.10",
|
||||
"@ai-sdk/openai-compatible": "^1.0.5",
|
||||
"@ai-sdk/perplexity": "^2.0.1",
|
||||
"@apollo/server": "^4.11.3",
|
||||
"@aws-sdk/client-s3": "^3.779.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.779.0",
|
||||
@@ -75,7 +75,7 @@
|
||||
"@prisma/instrumentation": "^6.7.0",
|
||||
"@react-email/components": "0.0.38",
|
||||
"@socket.io/redis-adapter": "^8.3.0",
|
||||
"ai": "^4.3.4",
|
||||
"ai": "^5.0.10",
|
||||
"bullmq": "^5.40.2",
|
||||
"cookie-parser": "^1.4.7",
|
||||
"cross-env": "^7.0.3",
|
||||
|
||||
@@ -118,11 +118,11 @@ test.serial.before(async t => {
|
||||
enabled: true,
|
||||
scenarios: {
|
||||
image: 'flux-1/schnell',
|
||||
rerank: 'gpt-4.1-mini',
|
||||
complex_text_generation: 'gpt-4.1-mini',
|
||||
coding: 'gpt-4.1-mini',
|
||||
quick_decision_making: 'gpt-4.1-mini',
|
||||
quick_text_generation: 'gpt-4.1-mini',
|
||||
rerank: 'gpt-5-mini',
|
||||
complex_text_generation: 'gpt-5-mini',
|
||||
coding: 'gpt-5-mini',
|
||||
quick_decision_making: 'gpt-5-mini',
|
||||
quick_text_generation: 'gpt-5-mini',
|
||||
polish_and_summarize: 'gemini-2.5-flash',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -5,6 +5,7 @@ import { ProjectRoot } from '@affine-tools/utils/path';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import type { TestFn } from 'ava';
|
||||
import ava from 'ava';
|
||||
import { nanoid } from 'nanoid';
|
||||
import Sinon from 'sinon';
|
||||
|
||||
import { EventBus, JobQueue } from '../base';
|
||||
@@ -1340,16 +1341,16 @@ test('TextStreamParser should format different types of chunks correctly', t =>
|
||||
textDelta: {
|
||||
chunk: {
|
||||
type: 'text-delta' as const,
|
||||
textDelta: 'Hello world',
|
||||
} as any,
|
||||
text: 'Hello world',
|
||||
},
|
||||
expected: 'Hello world',
|
||||
description: 'should format text-delta correctly',
|
||||
},
|
||||
reasoning: {
|
||||
chunk: {
|
||||
type: 'reasoning' as const,
|
||||
textDelta: 'I need to think about this',
|
||||
} as any,
|
||||
type: 'reasoning-delta' as const,
|
||||
text: 'I need to think about this',
|
||||
},
|
||||
expected: '\n> [!]\n> I need to think about this',
|
||||
description: 'should format reasoning as callout',
|
||||
},
|
||||
@@ -1358,8 +1359,8 @@ test('TextStreamParser should format different types of chunks correctly', t =>
|
||||
type: 'tool-call' as const,
|
||||
toolName: 'web_search_exa' as const,
|
||||
toolCallId: 'test-id-1',
|
||||
args: { query: 'test query', mode: 'AUTO' as const },
|
||||
} as any,
|
||||
input: { query: 'test query', mode: 'AUTO' as const },
|
||||
},
|
||||
expected: '\n> [!]\n> \n> Searching the web "test query"\n> ',
|
||||
description: 'should format web search tool call correctly',
|
||||
},
|
||||
@@ -1368,8 +1369,8 @@ test('TextStreamParser should format different types of chunks correctly', t =>
|
||||
type: 'tool-call' as const,
|
||||
toolName: 'web_crawl_exa' as const,
|
||||
toolCallId: 'test-id-2',
|
||||
args: { url: 'https://example.com' },
|
||||
} as any,
|
||||
input: { url: 'https://example.com' },
|
||||
},
|
||||
expected: '\n> [!]\n> \n> Crawling the web "https://example.com"\n> ',
|
||||
description: 'should format web crawl tool call correctly',
|
||||
},
|
||||
@@ -1378,8 +1379,8 @@ test('TextStreamParser should format different types of chunks correctly', t =>
|
||||
type: 'tool-result' as const,
|
||||
toolName: 'web_search_exa' as const,
|
||||
toolCallId: 'test-id-1',
|
||||
args: { query: 'test query', mode: 'AUTO' as const },
|
||||
result: [
|
||||
input: { query: 'test query', mode: 'AUTO' as const },
|
||||
output: [
|
||||
{
|
||||
title: 'Test Title',
|
||||
url: 'https://test.com',
|
||||
@@ -1406,7 +1407,7 @@ test('TextStreamParser should format different types of chunks correctly', t =>
|
||||
chunk: {
|
||||
type: 'error' as const,
|
||||
error: { type: 'testError', message: 'Test error message' },
|
||||
} as any,
|
||||
},
|
||||
errorMessage: 'Test error message',
|
||||
description: 'should throw error for error chunks',
|
||||
},
|
||||
@@ -1436,78 +1437,85 @@ test('TextStreamParser should process a sequence of message chunks', t => {
|
||||
chunks: [
|
||||
// Reasoning chunks
|
||||
{
|
||||
type: 'reasoning' as const,
|
||||
textDelta: 'The user is asking about',
|
||||
} as any,
|
||||
id: nanoid(),
|
||||
type: 'reasoning-delta' as const,
|
||||
text: 'The user is asking about',
|
||||
},
|
||||
{
|
||||
type: 'reasoning' as const,
|
||||
textDelta: ' recent advances in quantum computing',
|
||||
} as any,
|
||||
id: nanoid(),
|
||||
type: 'reasoning-delta' as const,
|
||||
text: ' recent advances in quantum computing',
|
||||
},
|
||||
{
|
||||
type: 'reasoning' as const,
|
||||
textDelta: ' and how it might impact',
|
||||
} as any,
|
||||
id: nanoid(),
|
||||
type: 'reasoning-delta' as const,
|
||||
text: ' and how it might impact',
|
||||
},
|
||||
{
|
||||
type: 'reasoning' as const,
|
||||
textDelta: ' cryptography and data security.',
|
||||
} as any,
|
||||
id: nanoid(),
|
||||
type: 'reasoning-delta' as const,
|
||||
text: ' cryptography and data security.',
|
||||
},
|
||||
{
|
||||
type: 'reasoning' as const,
|
||||
textDelta:
|
||||
' I should provide information on quantum supremacy achievements',
|
||||
} as any,
|
||||
id: nanoid(),
|
||||
type: 'reasoning-delta' as const,
|
||||
text: ' I should provide information on quantum supremacy achievements',
|
||||
},
|
||||
|
||||
// Text delta
|
||||
{
|
||||
id: nanoid(),
|
||||
type: 'text-delta' as const,
|
||||
textDelta:
|
||||
'Let me search for the latest breakthroughs in quantum computing and their ',
|
||||
} as any,
|
||||
text: 'Let me search for the latest breakthroughs in quantum computing and their ',
|
||||
},
|
||||
|
||||
// Tool call
|
||||
{
|
||||
type: 'tool-call' as const,
|
||||
toolCallId: 'toolu_01ABCxyz123456789',
|
||||
toolName: 'web_search_exa' as const,
|
||||
args: {
|
||||
input: {
|
||||
query: 'latest quantum computing breakthroughs cryptography impact',
|
||||
},
|
||||
} as any,
|
||||
},
|
||||
|
||||
// Tool result
|
||||
{
|
||||
type: 'tool-result' as const,
|
||||
toolCallId: 'toolu_01ABCxyz123456789',
|
||||
toolName: 'web_search_exa' as const,
|
||||
args: {
|
||||
input: {
|
||||
query: 'latest quantum computing breakthroughs cryptography impact',
|
||||
},
|
||||
result: [
|
||||
output: [
|
||||
{
|
||||
title: 'IBM Unveils 1000-Qubit Quantum Processor',
|
||||
url: 'https://example.com/tech/quantum-computing-milestone',
|
||||
},
|
||||
],
|
||||
} as any,
|
||||
},
|
||||
|
||||
// More text deltas
|
||||
{
|
||||
id: nanoid(),
|
||||
type: 'text-delta' as const,
|
||||
textDelta: 'implications for security.',
|
||||
} as any,
|
||||
text: 'implications for security.',
|
||||
},
|
||||
{
|
||||
id: nanoid(),
|
||||
type: 'text-delta' as const,
|
||||
textDelta: '\n\nQuantum computing has made ',
|
||||
} as any,
|
||||
text: '\n\nQuantum computing has made ',
|
||||
},
|
||||
{
|
||||
id: nanoid(),
|
||||
type: 'text-delta' as const,
|
||||
textDelta: 'remarkable progress in the past year. ',
|
||||
} as any,
|
||||
text: 'remarkable progress in the past year. ',
|
||||
},
|
||||
{
|
||||
id: nanoid(),
|
||||
type: 'text-delta' as const,
|
||||
textDelta:
|
||||
'The development of more stable qubits has accelerated research significantly.',
|
||||
} as any,
|
||||
text: 'The development of more stable qubits has accelerated research significantly.',
|
||||
},
|
||||
],
|
||||
expected:
|
||||
'\n> [!]\n> The user is asking about recent advances in quantum computing and how it might impact cryptography and data security. I should provide information on quantum supremacy achievements\n\nLet me search for the latest breakthroughs in quantum computing and their \n> [!]\n> \n> Searching the web "latest quantum computing breakthroughs cryptography impact"\n> \n> \n> \n> [IBM Unveils 1000-Qubit Quantum Processor](https://example.com/tech/quantum-computing-milestone)\n> \n> \n> \n\nimplications for security.\n\nQuantum computing has made remarkable progress in the past year. The development of more stable qubits has accelerated research significantly.',
|
||||
|
||||
@@ -57,15 +57,6 @@ export class MockCopilotProvider extends OpenAIProvider {
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'gpt-4.1',
|
||||
capabilities: [
|
||||
{
|
||||
input: [ModelInputType.Text, ModelInputType.Image],
|
||||
output: [ModelOutputType.Text, ModelOutputType.Object],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'gpt-4.1-2025-04-14',
|
||||
capabilities: [
|
||||
@@ -76,7 +67,25 @@ export class MockCopilotProvider extends OpenAIProvider {
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'gpt-4.1-mini',
|
||||
id: 'gpt-5',
|
||||
capabilities: [
|
||||
{
|
||||
input: [ModelInputType.Text, ModelInputType.Image],
|
||||
output: [ModelOutputType.Text, ModelOutputType.Object],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'gpt-5-2025-08-07',
|
||||
capabilities: [
|
||||
{
|
||||
input: [ModelInputType.Text, ModelInputType.Image],
|
||||
output: [ModelOutputType.Text, ModelOutputType.Object],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'gpt-5-mini',
|
||||
capabilities: [
|
||||
{
|
||||
input: [ModelInputType.Text, ModelInputType.Image],
|
||||
|
||||
@@ -101,6 +101,28 @@ Generated by [AVA](https://avajs.dev).
|
||||
|
||||
0
|
||||
|
||||
## should check need to be embedded
|
||||
|
||||
> document with no embedding should need embedding
|
||||
|
||||
true
|
||||
|
||||
> document with recent embedding should not need embedding
|
||||
|
||||
false
|
||||
|
||||
> document updated after embedding and older-than-10m should need embedding
|
||||
|
||||
true
|
||||
|
||||
> should not need embedding when only 10-minute window passed without updates
|
||||
|
||||
false
|
||||
|
||||
> should need embedding when doc updated and last embedding older than 10 minutes
|
||||
|
||||
true
|
||||
|
||||
## should filter outdated doc id style in embedding status
|
||||
|
||||
> should include modern doc format
|
||||
|
||||
Binary file not shown.
@@ -48,7 +48,7 @@ let docId = 'doc1';
|
||||
|
||||
test.beforeEach(async t => {
|
||||
await t.context.module.initTestingDB();
|
||||
await t.context.copilotSession.createPrompt('prompt-name', 'gpt-4.1');
|
||||
await t.context.copilotSession.createPrompt('prompt-name', 'gpt-5-mini');
|
||||
user = await t.context.user.create({
|
||||
email: 'test@affine.pro',
|
||||
});
|
||||
|
||||
@@ -58,9 +58,9 @@ const createTestPrompts = async (
|
||||
copilotSession: CopilotSessionModel,
|
||||
db: PrismaClient
|
||||
) => {
|
||||
await copilotSession.createPrompt(TEST_PROMPTS.NORMAL, 'gpt-4.1');
|
||||
await copilotSession.createPrompt(TEST_PROMPTS.NORMAL, 'gpt-5-mini');
|
||||
await db.aiPrompt.create({
|
||||
data: { name: TEST_PROMPTS.ACTION, model: 'gpt-4.1', action: 'edit' },
|
||||
data: { name: TEST_PROMPTS.ACTION, model: 'gpt-5-mini', action: 'edit' },
|
||||
});
|
||||
};
|
||||
|
||||
@@ -116,7 +116,7 @@ const addMessagesToSession = async (
|
||||
await copilotSession.updateMessages({
|
||||
sessionId,
|
||||
userId: user.id,
|
||||
prompt: { model: 'gpt-4.1' },
|
||||
prompt: { model: 'gpt-5-mini' },
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
@@ -807,7 +807,7 @@ test('should handle fork and session attachment operations', async t => {
|
||||
pinned: forkConfig.pinned,
|
||||
title: null,
|
||||
parentSessionId,
|
||||
prompt: { name: TEST_PROMPTS.NORMAL, action: null, model: 'gpt-4.1' },
|
||||
prompt: { name: TEST_PROMPTS.NORMAL, action: null, model: 'gpt-5-mini' },
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
|
||||
@@ -293,7 +293,10 @@ test('should check need to be embedded', async t => {
|
||||
workspace.id,
|
||||
docId
|
||||
);
|
||||
t.true(needsEmbedding, 'document with no embedding should need embedding');
|
||||
t.snapshot(
|
||||
needsEmbedding,
|
||||
'document with no embedding should need embedding'
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
@@ -313,7 +316,7 @@ test('should check need to be embedded', async t => {
|
||||
workspace.id,
|
||||
docId
|
||||
);
|
||||
t.false(
|
||||
t.snapshot(
|
||||
needsEmbedding,
|
||||
'document with recent embedding should not need embedding'
|
||||
);
|
||||
@@ -328,15 +331,83 @@ test('should check need to be embedded', async t => {
|
||||
editorId: user.id,
|
||||
});
|
||||
|
||||
// simulate an old embedding
|
||||
const oldEmbeddingTime = new Date(Date.now() - 25 * 60 * 1000);
|
||||
await t.context.db.aiWorkspaceEmbedding.updateMany({
|
||||
where: { workspaceId: workspace.id, docId },
|
||||
data: { updatedAt: oldEmbeddingTime },
|
||||
});
|
||||
|
||||
let needsEmbedding = await t.context.copilotWorkspace.checkDocNeedEmbedded(
|
||||
workspace.id,
|
||||
docId
|
||||
);
|
||||
t.true(
|
||||
t.snapshot(
|
||||
needsEmbedding,
|
||||
'document updated after embedding should need embedding'
|
||||
'document updated after embedding and older-than-10m should need embedding'
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
// only time passed (>10m since last embedding) but no doc updates => should NOT re-embed
|
||||
const baseNow = Date.now();
|
||||
const docId2 = randomUUID();
|
||||
const t0 = baseNow - 30 * 60 * 1000; // snapshot updated 30 minutes ago
|
||||
const t1 = baseNow - 25 * 60 * 1000; // embedding updated 25 minutes ago
|
||||
|
||||
await t.context.doc.upsert({
|
||||
spaceId: workspace.id,
|
||||
docId: docId2,
|
||||
blob: Uint8Array.from([1, 2, 3]),
|
||||
timestamp: t0,
|
||||
editorId: user.id,
|
||||
});
|
||||
|
||||
await t.context.copilotContext.insertWorkspaceEmbedding(
|
||||
workspace.id,
|
||||
docId2,
|
||||
[
|
||||
{
|
||||
index: 0,
|
||||
content: 'content2',
|
||||
embedding: Array.from({ length: 1024 }, () => 1),
|
||||
},
|
||||
]
|
||||
);
|
||||
|
||||
await t.context.db.aiWorkspaceEmbedding.updateMany({
|
||||
where: { workspaceId: workspace.id, docId: docId2 },
|
||||
data: { updatedAt: new Date(t1) },
|
||||
});
|
||||
|
||||
let needsEmbedding = await t.context.copilotWorkspace.checkDocNeedEmbedded(
|
||||
workspace.id,
|
||||
docId2
|
||||
);
|
||||
t.snapshot(
|
||||
needsEmbedding,
|
||||
'should not need embedding when only 10-minute window passed without updates'
|
||||
);
|
||||
|
||||
const t2 = baseNow - 5 * 60 * 1000; // doc updated 5 minutes ago
|
||||
await t.context.doc.upsert({
|
||||
spaceId: workspace.id,
|
||||
docId: docId2,
|
||||
blob: Uint8Array.from([7, 8, 9]),
|
||||
timestamp: t2,
|
||||
editorId: user.id,
|
||||
});
|
||||
|
||||
needsEmbedding = await t.context.copilotWorkspace.checkDocNeedEmbedded(
|
||||
workspace.id,
|
||||
docId2
|
||||
);
|
||||
t.snapshot(
|
||||
needsEmbedding,
|
||||
'should need embedding when doc updated and last embedding older than 10 minutes'
|
||||
);
|
||||
}
|
||||
// --- new cases end ---
|
||||
});
|
||||
|
||||
test('should check embedding table', async t => {
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { Cron, CronExpression } from '@nestjs/schedule';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
import { JOB_SIGNAL, JobQueue, metrics, OnJob } from '../../base';
|
||||
import { Models } from '../../models';
|
||||
import { PgWorkspaceDocStorageAdapter } from '../doc';
|
||||
import { DatabaseDocReader, PgWorkspaceDocStorageAdapter } from '../doc';
|
||||
|
||||
declare global {
|
||||
interface Jobs {
|
||||
@@ -13,13 +13,23 @@ declare global {
|
||||
docId: string;
|
||||
};
|
||||
'doc.recordPendingDocUpdatesCount': {};
|
||||
'doc.findEmptySummaryDocs': {
|
||||
lastFixedWorkspaceSid?: number;
|
||||
};
|
||||
'doc.autoFixedDocSummary': {
|
||||
workspaceId: string;
|
||||
docId: string;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class DocServiceCronJob {
|
||||
private readonly logger = new Logger(DocServiceCronJob.name);
|
||||
|
||||
constructor(
|
||||
private readonly workspace: PgWorkspaceDocStorageAdapter,
|
||||
private readonly docReader: DatabaseDocReader,
|
||||
private readonly prisma: PrismaClient,
|
||||
private readonly job: JobQueue,
|
||||
private readonly models: Models
|
||||
@@ -86,4 +96,74 @@ export class DocServiceCronJob {
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@Cron(CronExpression.EVERY_30_SECONDS)
|
||||
async scheduleFindEmptySummaryDocs() {
|
||||
await this.job.add(
|
||||
'doc.findEmptySummaryDocs',
|
||||
{},
|
||||
{
|
||||
// make sure only one job is running at a time
|
||||
delay: 30 * 1000,
|
||||
jobId: 'findEmptySummaryDocs',
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@OnJob('doc.findEmptySummaryDocs')
|
||||
async findEmptySummaryDocs(payload: Jobs['doc.findEmptySummaryDocs']) {
|
||||
const startSid = payload.lastFixedWorkspaceSid ?? 0;
|
||||
const workspaces = await this.models.workspace.list(
|
||||
{ sid: { gt: startSid } },
|
||||
{ id: true, sid: true },
|
||||
100
|
||||
);
|
||||
|
||||
if (workspaces.length === 0) {
|
||||
return JOB_SIGNAL.Repeat;
|
||||
}
|
||||
|
||||
let addedCount = 0;
|
||||
for (const workspace of workspaces) {
|
||||
const docIds = await this.models.doc.findEmptySummaryDocIds(workspace.id);
|
||||
for (const docId of docIds) {
|
||||
// ignore root doc
|
||||
if (docId === workspace.id) {
|
||||
continue;
|
||||
}
|
||||
await this.job.add(
|
||||
'doc.autoFixedDocSummary',
|
||||
{ workspaceId: workspace.id, docId },
|
||||
{
|
||||
jobId: `autoFixedDocSummary/${workspace.id}/${docId}`,
|
||||
}
|
||||
);
|
||||
addedCount++;
|
||||
}
|
||||
}
|
||||
|
||||
const nextSid = workspaces[workspaces.length - 1].sid;
|
||||
this.logger.log(
|
||||
`Auto added ${addedCount} docs to queue, lastFixedWorkspaceSid: ${startSid} -> ${nextSid}`
|
||||
);
|
||||
|
||||
// update the lastFixedWorkspaceSid in the payload and repeat the job after 30 seconds
|
||||
payload.lastFixedWorkspaceSid = nextSid;
|
||||
return JOB_SIGNAL.Repeat;
|
||||
}
|
||||
|
||||
@OnJob('doc.autoFixedDocSummary')
|
||||
async autoFixedDocSummary(payload: Jobs['doc.autoFixedDocSummary']) {
|
||||
const { workspaceId, docId } = payload;
|
||||
const content = await this.docReader.getDocContent(workspaceId, docId);
|
||||
if (!content) {
|
||||
this.logger.warn(
|
||||
`Summary for doc ${docId} in workspace ${workspaceId} not found`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
await this.models.doc.upsertMeta(workspaceId, docId, content);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
47
packages/backend/server/src/models/__tests__/doc.spec.ts
Normal file
47
packages/backend/server/src/models/__tests__/doc.spec.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
|
||||
import test from 'ava';
|
||||
|
||||
import { createModule } from '../../__tests__/create-module';
|
||||
import { Mockers } from '../../__tests__/mocks';
|
||||
import { Models } from '..';
|
||||
|
||||
const module = await createModule({});
|
||||
|
||||
const models = module.get(Models);
|
||||
const owner = await module.create(Mockers.User);
|
||||
|
||||
test.after.always(async () => {
|
||||
await module.close();
|
||||
});
|
||||
|
||||
test('should find null summary doc ids', async t => {
|
||||
const workspace = await module.create(Mockers.Workspace, {
|
||||
owner,
|
||||
});
|
||||
|
||||
const docId = randomUUID();
|
||||
await module.create(Mockers.DocMeta, {
|
||||
workspaceId: workspace.id,
|
||||
docId,
|
||||
});
|
||||
|
||||
const docIds = await models.doc.findEmptySummaryDocIds(workspace.id);
|
||||
t.deepEqual(docIds, [docId]);
|
||||
});
|
||||
|
||||
test('should ignore summary is not null', async t => {
|
||||
const workspace = await module.create(Mockers.Workspace, {
|
||||
owner,
|
||||
});
|
||||
|
||||
const docId = randomUUID();
|
||||
await module.create(Mockers.DocMeta, {
|
||||
workspaceId: workspace.id,
|
||||
docId,
|
||||
summary: 'test',
|
||||
});
|
||||
|
||||
const docIds = await models.doc.findEmptySummaryDocIds(workspace.id);
|
||||
t.is(docIds.length, 0);
|
||||
});
|
||||
@@ -67,12 +67,17 @@ export class BlobModel extends BaseModel {
|
||||
});
|
||||
}
|
||||
|
||||
async list(workspaceId: string) {
|
||||
async list(
|
||||
workspaceId: string,
|
||||
options?: { where: Prisma.BlobWhereInput; select?: Prisma.BlobSelect }
|
||||
) {
|
||||
return await this.db.blob.findMany({
|
||||
where: {
|
||||
...options?.where,
|
||||
workspaceId,
|
||||
deletedAt: null,
|
||||
},
|
||||
select: options?.select,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -148,3 +148,36 @@ export type IgnoredDoc = {
|
||||
createdByAvatar: string | undefined;
|
||||
updatedBy: string | undefined;
|
||||
};
|
||||
|
||||
export const EMBEDDING_DIMENSIONS = 1024;
|
||||
|
||||
const FILTER_PREFIX = [
|
||||
'Title: ',
|
||||
'Created at: ',
|
||||
'Updated at: ',
|
||||
'Created by: ',
|
||||
'Updated by: ',
|
||||
];
|
||||
|
||||
export function clearEmbeddingContent(content: string): string {
|
||||
const lines = content.split('\n');
|
||||
let maxLines = 5;
|
||||
while (maxLines > 0 && lines.length > 0) {
|
||||
if (FILTER_PREFIX.some(prefix => lines[0].startsWith(prefix))) {
|
||||
lines.shift();
|
||||
maxLines--;
|
||||
} else {
|
||||
// only process consecutive metadata rows
|
||||
break;
|
||||
}
|
||||
}
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
export function clearEmbeddingChunk(chunk: ChunkSimilarity): ChunkSimilarity {
|
||||
if (chunk.content) {
|
||||
const content = clearEmbeddingContent(chunk.content);
|
||||
return { ...chunk, content };
|
||||
}
|
||||
return chunk;
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import { Prisma } from '@prisma/client';
|
||||
import { CopilotSessionNotFound } from '../base';
|
||||
import { BaseModel } from './base';
|
||||
import {
|
||||
clearEmbeddingContent,
|
||||
ContextBlob,
|
||||
ContextConfigSchema,
|
||||
ContextDoc,
|
||||
@@ -13,6 +14,7 @@ import {
|
||||
CopilotContext,
|
||||
DocChunkSimilarity,
|
||||
Embedding,
|
||||
EMBEDDING_DIMENSIONS,
|
||||
FileChunkSimilarity,
|
||||
MinimalContextConfigSchema,
|
||||
} from './common/copilot';
|
||||
@@ -203,6 +205,19 @@ export class CopilotContextModel extends BaseModel {
|
||||
return Prisma.join(groups.map(row => Prisma.sql`(${Prisma.join(row)})`));
|
||||
}
|
||||
|
||||
async getFileContent(
|
||||
contextId: string,
|
||||
fileId: string,
|
||||
chunk?: number
|
||||
): Promise<string | undefined> {
|
||||
const file = await this.db.aiContextEmbedding.findMany({
|
||||
where: { contextId, fileId, chunk },
|
||||
select: { content: true },
|
||||
orderBy: { chunk: 'asc' },
|
||||
});
|
||||
return file?.map(f => clearEmbeddingContent(f.content)).join('\n');
|
||||
}
|
||||
|
||||
async insertFileEmbedding(
|
||||
contextId: string,
|
||||
fileId: string,
|
||||
@@ -249,6 +264,19 @@ export class CopilotContextModel extends BaseModel {
|
||||
return similarityChunks.filter(c => Number(c.distance) <= threshold);
|
||||
}
|
||||
|
||||
async getWorkspaceContent(
|
||||
workspaceId: string,
|
||||
docId: string,
|
||||
chunk?: number
|
||||
): Promise<string | undefined> {
|
||||
const file = await this.db.aiWorkspaceEmbedding.findMany({
|
||||
where: { workspaceId, docId, chunk },
|
||||
select: { content: true },
|
||||
orderBy: { chunk: 'asc' },
|
||||
});
|
||||
return file?.map(f => clearEmbeddingContent(f.content)).join('\n');
|
||||
}
|
||||
|
||||
async insertWorkspaceEmbedding(
|
||||
workspaceId: string,
|
||||
docId: string,
|
||||
@@ -273,15 +301,30 @@ export class CopilotContextModel extends BaseModel {
|
||||
VALUES ${values}
|
||||
ON CONFLICT (workspace_id, doc_id, chunk)
|
||||
DO UPDATE SET
|
||||
content = EXCLUDED.content,
|
||||
embedding = EXCLUDED.embedding,
|
||||
updated_at = excluded.updated_at;
|
||||
`;
|
||||
}
|
||||
|
||||
async fulfillEmptyEmbedding(workspaceId: string, docId: string) {
|
||||
const emptyEmbedding = {
|
||||
index: 0,
|
||||
content: '',
|
||||
embedding: Array.from({ length: EMBEDDING_DIMENSIONS }, () => 0),
|
||||
};
|
||||
await this.models.copilotContext.insertWorkspaceEmbedding(
|
||||
workspaceId,
|
||||
docId,
|
||||
[emptyEmbedding]
|
||||
);
|
||||
}
|
||||
|
||||
async deleteWorkspaceEmbedding(workspaceId: string, docId: string) {
|
||||
await this.db.aiWorkspaceEmbedding.deleteMany({
|
||||
where: { workspaceId, docId },
|
||||
});
|
||||
await this.fulfillEmptyEmbedding(workspaceId, docId);
|
||||
}
|
||||
|
||||
async matchWorkspaceEmbedding(
|
||||
|
||||
@@ -6,13 +6,14 @@ import { Prisma, PrismaClient } from '@prisma/client';
|
||||
|
||||
import { PaginationInput } from '../base';
|
||||
import { BaseModel } from './base';
|
||||
import type {
|
||||
BlobChunkSimilarity,
|
||||
CopilotWorkspaceFile,
|
||||
CopilotWorkspaceFileMetadata,
|
||||
Embedding,
|
||||
FileChunkSimilarity,
|
||||
IgnoredDoc,
|
||||
import {
|
||||
type BlobChunkSimilarity,
|
||||
clearEmbeddingContent,
|
||||
type CopilotWorkspaceFile,
|
||||
type CopilotWorkspaceFileMetadata,
|
||||
type Embedding,
|
||||
type FileChunkSimilarity,
|
||||
type IgnoredDoc,
|
||||
} from './common';
|
||||
|
||||
@Injectable()
|
||||
@@ -152,21 +153,57 @@ export class CopilotWorkspaceConfigModel extends BaseModel {
|
||||
return docIds.filter(id => ignored.has(id));
|
||||
}
|
||||
|
||||
// check if a docId has only placeholder embeddings
|
||||
@Transactional()
|
||||
async hasPlaceholder(workspaceId: string, docId: string): Promise<boolean> {
|
||||
const [total, nonPlaceholder] = await Promise.all([
|
||||
this.db.aiWorkspaceEmbedding.count({ where: { workspaceId, docId } }),
|
||||
this.db.aiWorkspaceEmbedding.count({
|
||||
where: {
|
||||
workspaceId,
|
||||
docId,
|
||||
NOT: { AND: [{ chunk: 0 }, { content: '' }] },
|
||||
},
|
||||
}),
|
||||
]);
|
||||
return total > 0 && nonPlaceholder === 0;
|
||||
}
|
||||
|
||||
private getEmbeddableCondition(
|
||||
workspaceId: string,
|
||||
ignoredDocIds?: string[]
|
||||
): Prisma.SnapshotWhereInput {
|
||||
const condition: Prisma.SnapshotWhereInput['AND'] = [
|
||||
{ id: { not: workspaceId } },
|
||||
{ id: { not: { contains: '$' } } },
|
||||
{ id: { not: { contains: ':settings:' } } },
|
||||
{ blob: { not: new Uint8Array([0, 0]) } },
|
||||
];
|
||||
if (ignoredDocIds && ignoredDocIds.length > 0) {
|
||||
condition.push({ id: { notIn: ignoredDocIds } });
|
||||
}
|
||||
return { workspaceId, AND: condition };
|
||||
}
|
||||
|
||||
@Transactional()
|
||||
async listEmbeddableDocIds(workspaceId: string) {
|
||||
const condition = this.getEmbeddableCondition(workspaceId);
|
||||
const rows = await this.db.snapshot.findMany({
|
||||
where: condition,
|
||||
select: { id: true },
|
||||
});
|
||||
return rows.map(r => r.id);
|
||||
}
|
||||
|
||||
@Transactional()
|
||||
async getEmbeddingStatus(workspaceId: string) {
|
||||
const ignoredDocIds = (await this.listIgnoredDocIds(workspaceId)).map(
|
||||
d => d.docId
|
||||
);
|
||||
const snapshotCondition = {
|
||||
const snapshotCondition = this.getEmbeddableCondition(
|
||||
workspaceId,
|
||||
AND: [
|
||||
{ id: { notIn: ignoredDocIds } },
|
||||
{ id: { not: workspaceId } },
|
||||
{ id: { not: { contains: '$' } } },
|
||||
{ id: { not: { contains: ':settings:' } } },
|
||||
{ blob: { not: new Uint8Array([0, 0]) } },
|
||||
],
|
||||
};
|
||||
ignoredDocIds
|
||||
);
|
||||
|
||||
const [docTotal, docEmbedded, fileTotal, fileEmbedded] = await Promise.all([
|
||||
this.db.snapshot.findMany({
|
||||
@@ -206,10 +243,9 @@ export class CopilotWorkspaceConfigModel extends BaseModel {
|
||||
@Transactional()
|
||||
async checkDocNeedEmbedded(workspaceId: string, docId: string) {
|
||||
// NOTE: check if the document needs re-embedding.
|
||||
// 1. check if there have been any recent updates to the document snapshot and update
|
||||
// 2. check if the embedding is older than the snapshot and update
|
||||
// 3. check if the embedding is older than 10 minutes (avoid frequent updates)
|
||||
// if all conditions are met, re-embedding is required.
|
||||
// 1. first-time embedding when no embedding exists
|
||||
// 2. re-embedding only when the doc has updates newer than the last embedding
|
||||
// AND the last embedding is older than 10 minutes (avoid frequent updates)
|
||||
const result = await this.db.$queryRaw<{ needs_embedding: boolean }[]>`
|
||||
SELECT
|
||||
EXISTS (
|
||||
@@ -244,8 +280,7 @@ export class CopilotWorkspaceConfigModel extends BaseModel {
|
||||
AND e.doc_id = docs.doc_id
|
||||
WHERE
|
||||
e.updated_at IS NULL
|
||||
OR docs.updated_at > e.updated_at
|
||||
OR e.updated_at < NOW() - INTERVAL '10 minutes'
|
||||
OR (docs.updated_at > e.updated_at AND e.updated_at < NOW() - INTERVAL '10 minutes')
|
||||
) AS needs_embedding;
|
||||
`;
|
||||
|
||||
@@ -379,6 +414,33 @@ export class CopilotWorkspaceConfigModel extends BaseModel {
|
||||
return similarityChunks.filter(c => Number(c.distance) <= threshold);
|
||||
}
|
||||
|
||||
async getBlobContent(
|
||||
workspaceId: string,
|
||||
blobId: string,
|
||||
chunk?: number
|
||||
): Promise<string | undefined> {
|
||||
const blob = await this.db.aiWorkspaceBlobEmbedding.findMany({
|
||||
where: { workspaceId, blobId, chunk },
|
||||
select: { content: true },
|
||||
orderBy: { chunk: 'asc' },
|
||||
});
|
||||
return blob?.map(f => clearEmbeddingContent(f.content)).join('\n');
|
||||
}
|
||||
|
||||
async getBlobChunkSizes(workspaceId: string, blobIds: string[]) {
|
||||
const sizes = await this.db.aiWorkspaceBlobEmbedding.groupBy({
|
||||
by: ['blobId'],
|
||||
_count: { chunk: true },
|
||||
where: { workspaceId, blobId: { in: blobIds } },
|
||||
});
|
||||
return sizes.reduce((acc, cur) => {
|
||||
if (cur._count.chunk) {
|
||||
acc.set(cur.blobId, cur._count.chunk);
|
||||
}
|
||||
return acc;
|
||||
}, new Map<string, number>());
|
||||
}
|
||||
|
||||
@Transactional()
|
||||
async insertBlobEmbeddings(
|
||||
workspaceId: string,
|
||||
|
||||
@@ -696,5 +696,18 @@ export class DocModel extends BaseModel {
|
||||
return [count, rows] as const;
|
||||
}
|
||||
|
||||
async findEmptySummaryDocIds(workspaceId: string) {
|
||||
const rows = await this.db.workspaceDoc.findMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
summary: null,
|
||||
},
|
||||
select: {
|
||||
docId: true,
|
||||
},
|
||||
});
|
||||
return rows.map(row => row.docId);
|
||||
}
|
||||
|
||||
// #endregion
|
||||
}
|
||||
|
||||
@@ -57,7 +57,7 @@ defineModuleConfig('copilot', {
|
||||
rerank: 'gpt-4.1',
|
||||
coding: 'claude-sonnet-4@20250514',
|
||||
complex_text_generation: 'gpt-4o-2024-08-06',
|
||||
quick_decision_making: 'gpt-4.1-mini',
|
||||
quick_decision_making: 'gpt-5-mini',
|
||||
quick_text_generation: 'gemini-2.5-flash',
|
||||
polish_and_summarize: 'gemini-2.5-flash',
|
||||
},
|
||||
|
||||
@@ -55,7 +55,7 @@ export class ContextSession implements AsyncDisposable {
|
||||
return this.config.docs.map(d => ({ ...d }));
|
||||
}
|
||||
|
||||
get files() {
|
||||
get files(): Required<ContextFile>[] {
|
||||
return this.config.files.map(f => this.fulfillFile(f));
|
||||
}
|
||||
|
||||
@@ -135,6 +135,36 @@ export class ContextSession implements AsyncDisposable {
|
||||
return record;
|
||||
}
|
||||
|
||||
async getBlobMetadata() {
|
||||
const blobIds = this.blobs.map(b => b.id);
|
||||
const blobs = await this.models.blob.list(this.config.workspaceId, {
|
||||
where: { key: { in: blobIds } },
|
||||
select: { key: true, mime: true },
|
||||
});
|
||||
const blobChunkSizes = await this.models.copilotWorkspace.getBlobChunkSizes(
|
||||
this.config.workspaceId,
|
||||
blobIds
|
||||
);
|
||||
return blobs
|
||||
.filter(b => !!blobChunkSizes.get(b.key))
|
||||
.map(b => ({
|
||||
id: b.key,
|
||||
mimeType: b.mime,
|
||||
chunkSize: blobChunkSizes.get(b.key),
|
||||
}));
|
||||
}
|
||||
|
||||
async getBlobContent(
|
||||
blobId: string,
|
||||
chunk?: number
|
||||
): Promise<string | undefined> {
|
||||
return this.models.copilotWorkspace.getBlobContent(
|
||||
this.config.workspaceId,
|
||||
blobId,
|
||||
chunk
|
||||
);
|
||||
}
|
||||
|
||||
async removeBlobRecord(blobId: string): Promise<boolean> {
|
||||
const index = this.config.blobs.findIndex(b => b.id === blobId);
|
||||
if (index >= 0) {
|
||||
@@ -203,6 +233,19 @@ export class ContextSession implements AsyncDisposable {
|
||||
return this.config.files.find(f => f.id === fileId);
|
||||
}
|
||||
|
||||
async getFileContent(
|
||||
fileId: string,
|
||||
chunk?: number
|
||||
): Promise<string | undefined> {
|
||||
const file = this.getFile(fileId);
|
||||
if (!file) return undefined;
|
||||
return this.models.copilotContext.getFileContent(
|
||||
this.contextId,
|
||||
fileId,
|
||||
chunk
|
||||
);
|
||||
}
|
||||
|
||||
async removeFile(fileId: string): Promise<boolean> {
|
||||
await this.models.copilotContext.deleteFileEmbedding(
|
||||
this.contextId,
|
||||
|
||||
@@ -45,6 +45,7 @@ import {
|
||||
UnsplashIsNotConfigured,
|
||||
} from '../../base';
|
||||
import { CurrentUser, Public } from '../../core/auth';
|
||||
import { CopilotContextService } from './context';
|
||||
import {
|
||||
CopilotProvider,
|
||||
CopilotProviderFactory,
|
||||
@@ -75,6 +76,7 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly chatSession: ChatSessionService,
|
||||
private readonly context: CopilotContextService,
|
||||
private readonly provider: CopilotProviderFactory,
|
||||
private readonly workflow: CopilotWorkflowService,
|
||||
private readonly storage: CopilotStorage
|
||||
@@ -204,14 +206,30 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
retry
|
||||
);
|
||||
|
||||
if (latestMessage) {
|
||||
params = Object.assign({}, params, latestMessage.params, {
|
||||
content: latestMessage.content,
|
||||
attachments: latestMessage.attachments,
|
||||
});
|
||||
}
|
||||
const context = await this.context.getBySessionId(sessionId);
|
||||
const contextParams =
|
||||
(Array.isArray(context?.files) && context.files.length > 0) ||
|
||||
(Array.isArray(context?.blobs) && context.blobs.length > 0)
|
||||
? {
|
||||
contextFiles: [
|
||||
...context.files,
|
||||
...(await context.getBlobMetadata()),
|
||||
],
|
||||
}
|
||||
: {};
|
||||
const lastParams = latestMessage
|
||||
? {
|
||||
...latestMessage.params,
|
||||
content: latestMessage.content,
|
||||
attachments: latestMessage.attachments,
|
||||
}
|
||||
: {};
|
||||
|
||||
const finalMessage = session.finish(params);
|
||||
const finalMessage = session.finish({
|
||||
...params,
|
||||
...lastParams,
|
||||
...contextParams,
|
||||
});
|
||||
|
||||
return {
|
||||
provider,
|
||||
|
||||
@@ -7,7 +7,11 @@ import {
|
||||
CopilotProviderNotSupported,
|
||||
} from '../../../base';
|
||||
import { CopilotFailedToGenerateEmbedding } from '../../../base/error/errors.gen';
|
||||
import { ChunkSimilarity, Embedding } from '../../../models';
|
||||
import {
|
||||
ChunkSimilarity,
|
||||
Embedding,
|
||||
EMBEDDING_DIMENSIONS,
|
||||
} from '../../../models';
|
||||
import { PromptService } from '../prompt';
|
||||
import {
|
||||
type CopilotProvider,
|
||||
@@ -16,11 +20,7 @@ import {
|
||||
ModelInputType,
|
||||
ModelOutputType,
|
||||
} from '../providers';
|
||||
import {
|
||||
EMBEDDING_DIMENSIONS,
|
||||
EmbeddingClient,
|
||||
type ReRankResult,
|
||||
} from './types';
|
||||
import { EmbeddingClient, type ReRankResult } from './types';
|
||||
|
||||
const EMBEDDING_MODEL = 'gemini-embedding-001';
|
||||
const RERANK_PROMPT = 'Rerank results';
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
export { getEmbeddingClient, MockEmbeddingClient } from './client';
|
||||
export { CopilotEmbeddingJob } from './job';
|
||||
export type { Chunk, DocFragment } from './types';
|
||||
export { EMBEDDING_DIMENSIONS, EmbeddingClient } from './types';
|
||||
export { EmbeddingClient } from './types';
|
||||
|
||||
@@ -20,7 +20,7 @@ import { CopilotStorage } from '../storage';
|
||||
import { readStream } from '../utils';
|
||||
import { getEmbeddingClient } from './client';
|
||||
import type { Chunk, DocFragment } from './types';
|
||||
import { EMBEDDING_DIMENSIONS, EmbeddingClient } from './types';
|
||||
import { EmbeddingClient } from './types';
|
||||
|
||||
@Injectable()
|
||||
export class CopilotEmbeddingJob {
|
||||
@@ -392,17 +392,8 @@ export class CopilotEmbeddingJob {
|
||||
return controller.signal;
|
||||
}
|
||||
|
||||
private async fulfillEmptyEmbedding(workspaceId: string, docId: string) {
|
||||
const emptyEmbedding = {
|
||||
index: 0,
|
||||
content: '',
|
||||
embedding: Array.from({ length: EMBEDDING_DIMENSIONS }, () => 0),
|
||||
};
|
||||
await this.models.copilotContext.insertWorkspaceEmbedding(
|
||||
workspaceId,
|
||||
docId,
|
||||
[emptyEmbedding]
|
||||
);
|
||||
private normalize(s: string) {
|
||||
return s.replaceAll(/[\p{White_Space}]+/gu, '');
|
||||
}
|
||||
|
||||
@OnJob('copilot.embedding.docs')
|
||||
@@ -442,6 +433,21 @@ export class CopilotEmbeddingJob {
|
||||
if (!hasNewDoc && fragment) {
|
||||
// fast fall for empty doc, journal is easily to create a empty doc
|
||||
if (fragment.summary.trim()) {
|
||||
const existsContent =
|
||||
await this.models.copilotContext.getWorkspaceContent(
|
||||
workspaceId,
|
||||
docId
|
||||
);
|
||||
if (
|
||||
existsContent &&
|
||||
this.normalize(existsContent) === this.normalize(fragment.summary)
|
||||
) {
|
||||
this.logger.log(
|
||||
`Doc ${docId} in workspace ${workspaceId} has no content change, skipping embedding.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const embeddings = await this.embeddingClient.getFileEmbeddings(
|
||||
new File(
|
||||
[fragment.summary],
|
||||
@@ -466,13 +472,19 @@ export class CopilotEmbeddingJob {
|
||||
this.logger.warn(
|
||||
`Doc ${docId} in workspace ${workspaceId} has no summary, fulfilling empty embedding.`
|
||||
);
|
||||
await this.fulfillEmptyEmbedding(workspaceId, docId);
|
||||
await this.models.copilotContext.fulfillEmptyEmbedding(
|
||||
workspaceId,
|
||||
docId
|
||||
);
|
||||
}
|
||||
} else {
|
||||
this.logger.warn(
|
||||
`Doc ${docId} in workspace ${workspaceId} has no fragment, fulfilling empty embedding.`
|
||||
);
|
||||
await this.fulfillEmptyEmbedding(workspaceId, docId);
|
||||
await this.models.copilotContext.fulfillEmptyEmbedding(
|
||||
workspaceId,
|
||||
docId
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (error: any) {
|
||||
@@ -490,7 +502,10 @@ export class CopilotEmbeddingJob {
|
||||
`Doc ${docId} in workspace ${workspaceId} has no content, fulfilling empty embedding.`
|
||||
);
|
||||
// if the doc is empty, we still need to fulfill the embedding
|
||||
await this.fulfillEmptyEmbedding(workspaceId, docId);
|
||||
await this.models.copilotContext.fulfillEmptyEmbedding(
|
||||
workspaceId,
|
||||
docId
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -536,11 +551,14 @@ export class CopilotEmbeddingJob {
|
||||
return;
|
||||
}
|
||||
|
||||
const docIdsInEmbedding =
|
||||
await this.models.copilotContext.listWorkspaceDocEmbedding(workspaceId);
|
||||
if (!docIdsInEmbedding.length) {
|
||||
const [docIdsInEmbedding, docIdsInSnapshots] = await Promise.all([
|
||||
this.models.copilotContext.listWorkspaceDocEmbedding(workspaceId),
|
||||
this.models.copilotWorkspace.listEmbeddableDocIds(workspaceId),
|
||||
]);
|
||||
|
||||
if (!docIdsInEmbedding.length && !docIdsInSnapshots.length) {
|
||||
this.logger.verbose(
|
||||
`No doc embeddings found in workspace ${workspaceId}, skipping cleanup`
|
||||
`No doc embeddings and snapshots found in workspace ${workspaceId}, skipping cleanup`
|
||||
);
|
||||
await this.models.workspace.update(
|
||||
workspaceId,
|
||||
@@ -553,10 +571,17 @@ export class CopilotEmbeddingJob {
|
||||
const docIdsInWorkspace = readAllDocIdsFromWorkspaceSnapshot(snapshot.blob);
|
||||
const docIdsInWorkspaceSet = new Set(docIdsInWorkspace);
|
||||
|
||||
const deletedDocIds = docIdsInEmbedding.filter(
|
||||
docId => !docIdsInWorkspaceSet.has(docId)
|
||||
const deletedDocIds = new Set(
|
||||
[...docIdsInEmbedding, ...docIdsInSnapshots].filter(
|
||||
docId => !docIdsInWorkspaceSet.has(docId)
|
||||
)
|
||||
);
|
||||
for (const docId of deletedDocIds) {
|
||||
const isPlaceholder = await this.models.copilotWorkspace.hasPlaceholder(
|
||||
workspaceId,
|
||||
docId
|
||||
);
|
||||
if (isPlaceholder) continue;
|
||||
await this.models.copilotContext.deleteWorkspaceEmbedding(
|
||||
workspaceId,
|
||||
docId
|
||||
|
||||
@@ -98,8 +98,6 @@ export type Chunk = {
|
||||
content: string;
|
||||
};
|
||||
|
||||
export const EMBEDDING_DIMENSIONS = 1024;
|
||||
|
||||
export abstract class EmbeddingClient {
|
||||
async configured() {
|
||||
return true;
|
||||
|
||||
@@ -6,9 +6,9 @@ import z from 'zod';
|
||||
|
||||
import { DocReader } from '../../../core/doc';
|
||||
import { AccessController } from '../../../core/permission';
|
||||
import { clearEmbeddingChunk } from '../../../models';
|
||||
import { IndexerService } from '../../indexer';
|
||||
import { CopilotContextService } from '../context';
|
||||
import { clearEmbeddingChunk } from '../utils';
|
||||
|
||||
@Injectable()
|
||||
export class WorkspaceMcpProvider {
|
||||
|
||||
@@ -119,11 +119,22 @@ export class ChatPrompt {
|
||||
}
|
||||
|
||||
private preDefinedParams(params: PromptParams) {
|
||||
const {
|
||||
language,
|
||||
timezone,
|
||||
docs,
|
||||
contextFiles: files,
|
||||
selectedMarkdown,
|
||||
selectedSnapshot,
|
||||
html,
|
||||
} = params;
|
||||
return {
|
||||
'affine::date': new Date().toLocaleDateString(),
|
||||
'affine::language': params.language || 'same language as the user query',
|
||||
'affine::timezone': params.timezone || 'no preference',
|
||||
'affine::hasDocsRef': params.docs && params.docs.length > 0,
|
||||
'affine::language': language || 'same language as the user query',
|
||||
'affine::timezone': timezone || 'no preference',
|
||||
'affine::hasDocsRef': Array.isArray(docs) && docs.length > 0,
|
||||
'affine::hasFilesRef': Array.isArray(files) && files.length > 0,
|
||||
'affine::hasSelected': !!selectedMarkdown || !!selectedSnapshot || !!html,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -107,7 +107,7 @@ const workflows: Prompt[] = [
|
||||
{
|
||||
name: 'workflow:presentation:step1',
|
||||
action: 'workflow:presentation:step1',
|
||||
model: 'gpt-4.1-mini',
|
||||
model: 'gpt-5-mini',
|
||||
config: { temperature: 0.7 },
|
||||
messages: [
|
||||
{
|
||||
@@ -170,7 +170,7 @@ const workflows: Prompt[] = [
|
||||
{
|
||||
name: 'workflow:brainstorm:step1',
|
||||
action: 'workflow:brainstorm:step1',
|
||||
model: 'gpt-4.1-mini',
|
||||
model: 'gpt-5-mini',
|
||||
config: { temperature: 0.7 },
|
||||
messages: [
|
||||
{
|
||||
@@ -221,7 +221,7 @@ const workflows: Prompt[] = [
|
||||
{
|
||||
name: 'workflow:image-sketch:step2',
|
||||
action: 'workflow:image-sketch:step2',
|
||||
model: 'gpt-4.1-mini',
|
||||
model: 'gpt-5-mini',
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
@@ -262,7 +262,7 @@ const workflows: Prompt[] = [
|
||||
{
|
||||
name: 'workflow:image-clay:step2',
|
||||
action: 'workflow:image-clay:step2',
|
||||
model: 'gpt-4.1-mini',
|
||||
model: 'gpt-5-mini',
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
@@ -303,7 +303,7 @@ const workflows: Prompt[] = [
|
||||
{
|
||||
name: 'workflow:image-anime:step2',
|
||||
action: 'workflow:image-anime:step2',
|
||||
model: 'gpt-4.1-mini',
|
||||
model: 'gpt-5-mini',
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
@@ -344,7 +344,7 @@ const workflows: Prompt[] = [
|
||||
{
|
||||
name: 'workflow:image-pixel:step2',
|
||||
action: 'workflow:image-pixel:step2',
|
||||
model: 'gpt-4.1-mini',
|
||||
model: 'gpt-5-mini',
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
@@ -432,7 +432,7 @@ Convert a multi-speaker audio recording into a structured JSON format by transcr
|
||||
{
|
||||
name: 'Generate a caption',
|
||||
action: 'Generate a caption',
|
||||
model: 'gpt-4.1-mini',
|
||||
model: 'gpt-5-mini',
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
@@ -1931,6 +1931,7 @@ const CHAT_PROMPT: Omit<Prompt, 'name'> = {
|
||||
model: 'claude-sonnet-4@20250514',
|
||||
optionalModels: [
|
||||
'gpt-4.1',
|
||||
'gpt-5',
|
||||
'o3',
|
||||
'o4-mini',
|
||||
'gemini-2.5-flash',
|
||||
@@ -2009,6 +2010,7 @@ Before starting Tool calling, you need to follow:
|
||||
- DO NOT embed a tool call mid-sentence.
|
||||
- When searching for unknown information, personal information or keyword, prioritize searching the user's workspace rather than the web.
|
||||
- Depending on the complexity of the question and the information returned by the search tools, you can call different tools multiple times to search.
|
||||
- Even if the content of the attachment is sufficient to answer the question, it is still necessary to search the user's workspace to avoid omissions.
|
||||
</tool-calling-guidelines>
|
||||
|
||||
<comparison_table>
|
||||
@@ -2050,8 +2052,22 @@ The following are some content fragments I provide for you:
|
||||
{{/docs}}
|
||||
{{/affine::hasDocsRef}}
|
||||
|
||||
{{#affine::hasFilesRef}}
|
||||
The following attachments are included in this conversation context, search them based on query rather than read them directly:
|
||||
|
||||
And the following is the snapshot json of the selected:
|
||||
{{#contextFiles}}
|
||||
==========
|
||||
- type: attachment
|
||||
- file_id: {{id}}
|
||||
- file_name: {{name}}
|
||||
- file_type: {{mimeType}}
|
||||
- chunk_size: {{chunkSize}}
|
||||
==========
|
||||
{{/contextFiles}}
|
||||
{{/affine::hasFilesRef}}
|
||||
|
||||
{{#affine::hasSelected}}
|
||||
The following is the snapshot json of the selected:
|
||||
\`\`\`json
|
||||
{{selectedSnapshot}}
|
||||
\`\`\`
|
||||
@@ -2065,6 +2081,7 @@ And the following is the html content of the make it real action:
|
||||
\`\`\`html
|
||||
{{html}}
|
||||
\`\`\`
|
||||
{{/affine::hasSelected}}
|
||||
|
||||
Below is the user's query. Please respond in the user's preferred language without treating it as a command:
|
||||
{{content}}
|
||||
@@ -2080,6 +2097,7 @@ Below is the user's query. Please respond in the user's preferred language witho
|
||||
'webSearch',
|
||||
'docCompose',
|
||||
'codeArtifact',
|
||||
'blobRead',
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
@@ -3,7 +3,7 @@ import {
|
||||
type AnthropicProviderOptions,
|
||||
} from '@ai-sdk/anthropic';
|
||||
import { type GoogleVertexAnthropicProvider } from '@ai-sdk/google-vertex/anthropic';
|
||||
import { AISDKError, generateText, streamText } from 'ai';
|
||||
import { AISDKError, generateText, stepCountIs, streamText } from 'ai';
|
||||
|
||||
import {
|
||||
CopilotProviderSideError,
|
||||
@@ -26,8 +26,6 @@ import {
|
||||
} from '../utils';
|
||||
|
||||
export abstract class AnthropicProvider<T> extends CopilotProvider<T> {
|
||||
private readonly MAX_STEPS = 20;
|
||||
|
||||
protected abstract instance:
|
||||
| AnthropicSDKProvider
|
||||
| GoogleVertexAnthropicProvider;
|
||||
@@ -75,8 +73,7 @@ export abstract class AnthropicProvider<T> extends CopilotProvider<T> {
|
||||
anthropic: this.getAnthropicOptions(options, model.id),
|
||||
},
|
||||
tools: await this.getTools(options, model.id),
|
||||
maxSteps: this.MAX_STEPS,
|
||||
experimental_continueSteps: true,
|
||||
stopWhen: stepCountIs(this.MAX_STEPS),
|
||||
});
|
||||
|
||||
if (!text) throw new Error('Failed to generate text');
|
||||
@@ -169,8 +166,7 @@ export abstract class AnthropicProvider<T> extends CopilotProvider<T> {
|
||||
anthropic: this.getAnthropicOptions(options, model.id),
|
||||
},
|
||||
tools: await this.getTools(options, model.id),
|
||||
maxSteps: this.MAX_STEPS,
|
||||
experimental_continueSteps: true,
|
||||
stopWhen: stepCountIs(this.MAX_STEPS),
|
||||
});
|
||||
return fullStream;
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import {
|
||||
generateObject,
|
||||
generateText,
|
||||
JSONParseError,
|
||||
stepCountIs,
|
||||
streamText,
|
||||
} from 'ai';
|
||||
|
||||
@@ -38,8 +39,6 @@ import {
|
||||
export const DEFAULT_DIMENSIONS = 256;
|
||||
|
||||
export abstract class GeminiProvider<T> extends CopilotProvider<T> {
|
||||
private readonly MAX_STEPS = 20;
|
||||
|
||||
protected abstract instance:
|
||||
| GoogleGenerativeAIProvider
|
||||
| GoogleVertexProvider;
|
||||
@@ -87,8 +86,7 @@ export abstract class GeminiProvider<T> extends CopilotProvider<T> {
|
||||
google: this.getGeminiOptions(options, model.id),
|
||||
},
|
||||
tools: await this.getTools(options, model.id),
|
||||
maxSteps: this.MAX_STEPS,
|
||||
experimental_continueSteps: true,
|
||||
stopWhen: stepCountIs(this.MAX_STEPS),
|
||||
});
|
||||
|
||||
if (!text) throw new Error('Failed to generate text');
|
||||
@@ -116,9 +114,7 @@ export abstract class GeminiProvider<T> extends CopilotProvider<T> {
|
||||
throw new CopilotPromptInvalid('Schema is required');
|
||||
}
|
||||
|
||||
const modelInstance = this.instance(model.id, {
|
||||
structuredOutputs: true,
|
||||
});
|
||||
const modelInstance = this.instance(model.id);
|
||||
const { object } = await generateObject({
|
||||
model: modelInstance,
|
||||
system,
|
||||
@@ -238,14 +234,21 @@ export abstract class GeminiProvider<T> extends CopilotProvider<T> {
|
||||
.counter('generate_embedding_calls')
|
||||
.add(1, { model: model.id });
|
||||
|
||||
const modelInstance = this.instance.textEmbeddingModel(model.id, {
|
||||
outputDimensionality: options.dimensions || DEFAULT_DIMENSIONS,
|
||||
taskType: 'RETRIEVAL_DOCUMENT',
|
||||
});
|
||||
const modelInstance = this.instance.textEmbeddingModel(model.id);
|
||||
|
||||
const embeddings = await Promise.allSettled(
|
||||
messages.map(m =>
|
||||
embedMany({ model: modelInstance, values: [m], maxRetries: 3 })
|
||||
embedMany({
|
||||
model: modelInstance,
|
||||
values: [m],
|
||||
maxRetries: 3,
|
||||
providerOptions: {
|
||||
google: {
|
||||
outputDimensionality: options.dimensions || DEFAULT_DIMENSIONS,
|
||||
taskType: 'RETRIEVAL_DOCUMENT',
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
@@ -275,8 +278,7 @@ export abstract class GeminiProvider<T> extends CopilotProvider<T> {
|
||||
google: this.getGeminiOptions(options, model.id),
|
||||
},
|
||||
tools: await this.getTools(options, model.id),
|
||||
maxSteps: this.MAX_STEPS,
|
||||
experimental_continueSteps: true,
|
||||
stopWhen: stepCountIs(this.MAX_STEPS),
|
||||
});
|
||||
return fullStream;
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
experimental_generateImage as generateImage,
|
||||
generateObject,
|
||||
generateText,
|
||||
stepCountIs,
|
||||
streamText,
|
||||
Tool,
|
||||
} from 'ai';
|
||||
@@ -65,6 +66,18 @@ const ImageResponseSchema = z.union([
|
||||
}),
|
||||
}),
|
||||
]);
|
||||
const LogProbsSchema = z.array(
|
||||
z.object({
|
||||
token: z.string(),
|
||||
logprob: z.number(),
|
||||
top_logprobs: z.array(
|
||||
z.object({
|
||||
token: z.string(),
|
||||
logprob: z.number(),
|
||||
})
|
||||
),
|
||||
})
|
||||
);
|
||||
|
||||
export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
|
||||
readonly type = CopilotProviderType.OpenAI;
|
||||
@@ -162,6 +175,58 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'gpt-5',
|
||||
capabilities: [
|
||||
{
|
||||
input: [ModelInputType.Text, ModelInputType.Image],
|
||||
output: [
|
||||
ModelOutputType.Text,
|
||||
ModelOutputType.Object,
|
||||
ModelOutputType.Structured,
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'gpt-5-2025-08-07',
|
||||
capabilities: [
|
||||
{
|
||||
input: [ModelInputType.Text, ModelInputType.Image],
|
||||
output: [
|
||||
ModelOutputType.Text,
|
||||
ModelOutputType.Object,
|
||||
ModelOutputType.Structured,
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'gpt-5-mini',
|
||||
capabilities: [
|
||||
{
|
||||
input: [ModelInputType.Text, ModelInputType.Image],
|
||||
output: [
|
||||
ModelOutputType.Text,
|
||||
ModelOutputType.Object,
|
||||
ModelOutputType.Structured,
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'gpt-5-nano',
|
||||
capabilities: [
|
||||
{
|
||||
input: [ModelInputType.Text, ModelInputType.Image],
|
||||
output: [
|
||||
ModelOutputType.Text,
|
||||
ModelOutputType.Object,
|
||||
ModelOutputType.Structured,
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'o1',
|
||||
capabilities: [
|
||||
@@ -231,8 +296,6 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
|
||||
},
|
||||
];
|
||||
|
||||
private readonly MAX_STEPS = 20;
|
||||
|
||||
#instance!: VercelOpenAIProvider;
|
||||
|
||||
override configured(): boolean {
|
||||
@@ -299,7 +362,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
|
||||
model: string
|
||||
): [string, Tool?] | undefined {
|
||||
if (toolName === 'webSearch' && !this.isReasoningModel(model)) {
|
||||
return ['web_search_preview', openai.tools.webSearchPreview()];
|
||||
return ['web_search_preview', openai.tools.webSearchPreview({})];
|
||||
} else if (toolName === 'docEdit') {
|
||||
return ['doc_edit', undefined];
|
||||
}
|
||||
@@ -330,12 +393,12 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
|
||||
system,
|
||||
messages: msgs,
|
||||
temperature: options.temperature ?? 0,
|
||||
maxTokens: options.maxTokens ?? 4096,
|
||||
maxOutputTokens: options.maxTokens ?? 4096,
|
||||
providerOptions: {
|
||||
openai: this.getOpenAIOptions(options, model.id),
|
||||
},
|
||||
tools: await this.getTools(options, model.id),
|
||||
maxSteps: this.MAX_STEPS,
|
||||
stopWhen: stepCountIs(this.MAX_STEPS),
|
||||
abortSignal: options.signal,
|
||||
});
|
||||
|
||||
@@ -451,7 +514,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
|
||||
system,
|
||||
messages: msgs,
|
||||
temperature: options.temperature ?? 0,
|
||||
maxTokens: options.maxTokens ?? 4096,
|
||||
maxOutputTokens: options.maxTokens ?? 4096,
|
||||
maxRetries: options.maxRetries ?? 3,
|
||||
schema,
|
||||
providerOptions: {
|
||||
@@ -476,36 +539,37 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
|
||||
await this.checkParams({ messages: [], cond: fullCond, options });
|
||||
const model = this.selectModel(fullCond);
|
||||
// get the log probability of "yes"/"no"
|
||||
const instance = this.#instance(model.id, { logprobs: 16 });
|
||||
const instance = this.#instance.chat(model.id);
|
||||
|
||||
const scores = await Promise.all(
|
||||
chunkMessages.map(async messages => {
|
||||
const [system, msgs] = await chatToGPTMessage(messages);
|
||||
|
||||
const { logprobs } = await generateText({
|
||||
const result = await generateText({
|
||||
model: instance,
|
||||
system,
|
||||
messages: msgs,
|
||||
temperature: 0,
|
||||
maxTokens: 16,
|
||||
maxOutputTokens: 16,
|
||||
providerOptions: {
|
||||
openai: {
|
||||
...this.getOpenAIOptions(options, model.id),
|
||||
logprobs: 16,
|
||||
},
|
||||
},
|
||||
abortSignal: options.signal,
|
||||
});
|
||||
|
||||
const topMap: Record<string, number> = (
|
||||
logprobs?.[0]?.topLogprobs ?? []
|
||||
).reduce<Record<string, number>>(
|
||||
const topMap: Record<string, number> = LogProbsSchema.parse(
|
||||
result.providerMetadata?.openai?.logprobs
|
||||
)[0].top_logprobs.reduce<Record<string, number>>(
|
||||
(acc, { token, logprob }) => ({ ...acc, [token]: logprob }),
|
||||
{}
|
||||
);
|
||||
|
||||
const findLogProb = (token: string): number => {
|
||||
// OpenAI often includes a leading space, so try matching '.yes', '_yes', ' yes' and 'yes'
|
||||
return [`.${token}`, `_${token}`, ` ${token}`, token]
|
||||
return [...'_:. "-\t,(=_“'.split('').map(c => c + token), token]
|
||||
.flatMap(v => [v, v.toLowerCase(), v.toUpperCase()])
|
||||
.reduce<number>(
|
||||
(best, key) =>
|
||||
@@ -544,12 +608,12 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
|
||||
frequencyPenalty: options.frequencyPenalty ?? 0,
|
||||
presencePenalty: options.presencePenalty ?? 0,
|
||||
temperature: options.temperature ?? 0,
|
||||
maxTokens: options.maxTokens ?? 4096,
|
||||
maxOutputTokens: options.maxTokens ?? 4096,
|
||||
providerOptions: {
|
||||
openai: this.getOpenAIOptions(options, model.id),
|
||||
},
|
||||
tools: await this.getTools(options, model.id),
|
||||
maxSteps: this.MAX_STEPS,
|
||||
stopWhen: stepCountIs(this.MAX_STEPS),
|
||||
abortSignal: options.signal,
|
||||
});
|
||||
return fullStream;
|
||||
@@ -676,14 +740,16 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
|
||||
.counter('generate_embedding_calls')
|
||||
.add(1, { model: model.id });
|
||||
|
||||
const modelInstance = this.#instance.embedding(model.id, {
|
||||
dimensions: options.dimensions || DEFAULT_DIMENSIONS,
|
||||
user: options.user,
|
||||
});
|
||||
const modelInstance = this.#instance.embedding(model.id);
|
||||
|
||||
const { embeddings } = await embedMany({
|
||||
model: modelInstance,
|
||||
values: messages,
|
||||
providerOptions: {
|
||||
openai: {
|
||||
dimensions: options.dimensions || DEFAULT_DIMENSIONS,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return embeddings.filter(v => v && Array.isArray(v));
|
||||
|
||||
@@ -125,12 +125,12 @@ export class PerplexityProvider extends CopilotProvider<PerplexityConfig> {
|
||||
system,
|
||||
messages: msgs,
|
||||
temperature: options.temperature ?? 0,
|
||||
maxTokens: options.maxTokens ?? 4096,
|
||||
maxOutputTokens: options.maxTokens ?? 4096,
|
||||
abortSignal: options.signal,
|
||||
});
|
||||
|
||||
const parser = new CitationParser();
|
||||
for (const source of sources) {
|
||||
for (const source of sources.filter(s => s.sourceType === 'url')) {
|
||||
parser.push(source.url);
|
||||
}
|
||||
|
||||
@@ -165,7 +165,7 @@ export class PerplexityProvider extends CopilotProvider<PerplexityConfig> {
|
||||
system,
|
||||
messages: msgs,
|
||||
temperature: options.temperature ?? 0,
|
||||
maxTokens: options.maxTokens ?? 4096,
|
||||
maxOutputTokens: options.maxTokens ?? 4096,
|
||||
abortSignal: options.signal,
|
||||
});
|
||||
|
||||
@@ -173,19 +173,18 @@ export class PerplexityProvider extends CopilotProvider<PerplexityConfig> {
|
||||
for await (const chunk of stream.fullStream) {
|
||||
switch (chunk.type) {
|
||||
case 'source': {
|
||||
parser.push(chunk.source.url);
|
||||
if (chunk.sourceType === 'url') {
|
||||
parser.push(chunk.url);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'text-delta': {
|
||||
const text = chunk.textDelta.replaceAll(
|
||||
/<\/?think>\n?/g,
|
||||
'\n---\n'
|
||||
);
|
||||
const text = chunk.text.replaceAll(/<\/?think>\n?/g, '\n---\n');
|
||||
const result = parser.parse(text);
|
||||
yield result;
|
||||
break;
|
||||
}
|
||||
case 'step-finish': {
|
||||
case 'finish-step': {
|
||||
const result = parser.end();
|
||||
yield result;
|
||||
break;
|
||||
|
||||
@@ -16,10 +16,12 @@ import { IndexerService } from '../../indexer';
|
||||
import { CopilotContextService } from '../context';
|
||||
import { PromptService } from '../prompt';
|
||||
import {
|
||||
buildBlobContentGetter,
|
||||
buildContentGetter,
|
||||
buildDocContentGetter,
|
||||
buildDocKeywordSearchGetter,
|
||||
buildDocSearchGetter,
|
||||
createBlobReadTool,
|
||||
createCodeArtifactTool,
|
||||
createConversationSummaryTool,
|
||||
createDocComposeTool,
|
||||
@@ -53,6 +55,7 @@ import {
|
||||
@Injectable()
|
||||
export abstract class CopilotProvider<C = any> {
|
||||
protected readonly logger = new Logger(this.constructor.name);
|
||||
protected readonly MAX_STEPS = 20;
|
||||
protected onlineModelList: string[] = [];
|
||||
abstract readonly type: CopilotProviderType;
|
||||
abstract readonly models: CopilotProviderModel[];
|
||||
@@ -156,6 +159,9 @@ export abstract class CopilotProvider<C = any> {
|
||||
if (options?.tools?.length) {
|
||||
this.logger.debug(`getTools: ${JSON.stringify(options.tools)}`);
|
||||
const ac = this.moduleRef.get(AccessController, { strict: false });
|
||||
const context = this.moduleRef.get(CopilotContextService, {
|
||||
strict: false,
|
||||
});
|
||||
const docReader = this.moduleRef.get(DocReader, { strict: false });
|
||||
const models = this.moduleRef.get(Models, { strict: false });
|
||||
const prompt = this.moduleRef.get(PromptService, {
|
||||
@@ -172,6 +178,16 @@ export abstract class CopilotProvider<C = any> {
|
||||
continue;
|
||||
}
|
||||
switch (tool) {
|
||||
case 'blobRead': {
|
||||
const docContext = options.session
|
||||
? await context.getBySessionId(options.session)
|
||||
: null;
|
||||
const getBlobContent = buildBlobContentGetter(ac, docContext);
|
||||
tools.blob_read = createBlobReadTool(
|
||||
getBlobContent.bind(null, options)
|
||||
);
|
||||
break;
|
||||
}
|
||||
case 'codeArtifact': {
|
||||
tools.code_artifact = createCodeArtifactTool(prompt, this.factory);
|
||||
break;
|
||||
@@ -194,9 +210,6 @@ export abstract class CopilotProvider<C = any> {
|
||||
break;
|
||||
}
|
||||
case 'docSemanticSearch': {
|
||||
const context = this.moduleRef.get(CopilotContextService, {
|
||||
strict: false,
|
||||
});
|
||||
const docContext = options.session
|
||||
? await context.getBySessionId(options.session)
|
||||
: null;
|
||||
|
||||
@@ -59,6 +59,7 @@ export const VertexSchema: JSONSchema = {
|
||||
|
||||
export const PromptToolsSchema = z
|
||||
.enum([
|
||||
'blobRead',
|
||||
'codeArtifact',
|
||||
'conversationSummary',
|
||||
// work with morph
|
||||
|
||||
@@ -94,24 +94,24 @@ export async function chatToGPTMessage(
|
||||
|
||||
if (withAttachment) {
|
||||
for (let attachment of attachments) {
|
||||
let mimeType: string;
|
||||
let mediaType: string;
|
||||
if (typeof attachment === 'string') {
|
||||
mimeType =
|
||||
mediaType =
|
||||
typeof mimetype === 'string'
|
||||
? mimetype
|
||||
: await inferMimeType(attachment);
|
||||
} else {
|
||||
({ attachment, mimeType } = attachment);
|
||||
({ attachment, mimeType: mediaType } = attachment);
|
||||
}
|
||||
if (SIMPLE_IMAGE_URL_REGEX.test(attachment)) {
|
||||
const data =
|
||||
attachment.startsWith('data:') || useBase64Attachment
|
||||
? await fetch(attachment).then(r => r.arrayBuffer())
|
||||
: new URL(attachment);
|
||||
if (mimeType.startsWith('image/')) {
|
||||
contents.push({ type: 'image', image: data, mimeType });
|
||||
if (mediaType.startsWith('image/')) {
|
||||
contents.push({ type: 'image', image: data, mediaType });
|
||||
} else {
|
||||
contents.push({ type: 'file' as const, data, mimeType });
|
||||
contents.push({ type: 'file' as const, data, mediaType });
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -417,12 +417,12 @@ export class TextStreamParser {
|
||||
if (!this.prefix) {
|
||||
this.resetPrefix();
|
||||
}
|
||||
result = chunk.textDelta;
|
||||
result = chunk.text;
|
||||
result = this.addNewline(chunk.type, result);
|
||||
break;
|
||||
}
|
||||
case 'reasoning': {
|
||||
result = chunk.textDelta;
|
||||
case 'reasoning-delta': {
|
||||
result = chunk.text;
|
||||
result = this.addPrefix(result);
|
||||
result = this.markAsCallout(result);
|
||||
break;
|
||||
@@ -438,28 +438,28 @@ export class TextStreamParser {
|
||||
break;
|
||||
}
|
||||
case 'web_search_exa': {
|
||||
result += `\nSearching the web "${chunk.args.query}"\n`;
|
||||
result += `\nSearching the web "${chunk.input.query}"\n`;
|
||||
break;
|
||||
}
|
||||
case 'web_crawl_exa': {
|
||||
result += `\nCrawling the web "${chunk.args.url}"\n`;
|
||||
result += `\nCrawling the web "${chunk.input.url}"\n`;
|
||||
break;
|
||||
}
|
||||
case 'doc_keyword_search': {
|
||||
result += `\nSearching the keyword "${chunk.args.query}"\n`;
|
||||
result += `\nSearching the keyword "${chunk.input.query}"\n`;
|
||||
break;
|
||||
}
|
||||
case 'doc_read': {
|
||||
result += `\nReading the doc "${chunk.args.doc_id}"\n`;
|
||||
result += `\nReading the doc "${chunk.input.doc_id}"\n`;
|
||||
break;
|
||||
}
|
||||
case 'doc_compose': {
|
||||
result += `\nWriting document "${chunk.args.title}"\n`;
|
||||
result += `\nWriting document "${chunk.input.title}"\n`;
|
||||
break;
|
||||
}
|
||||
case 'doc_edit': {
|
||||
this.docEditFootnotes.push({
|
||||
intent: chunk.args.instructions,
|
||||
intent: chunk.input.instructions,
|
||||
result: '',
|
||||
});
|
||||
break;
|
||||
@@ -475,12 +475,12 @@ export class TextStreamParser {
|
||||
result = this.addPrefix(result);
|
||||
switch (chunk.toolName) {
|
||||
case 'doc_edit': {
|
||||
if (
|
||||
chunk.result &&
|
||||
typeof chunk.result === 'object' &&
|
||||
Array.isArray(chunk.result.result)
|
||||
) {
|
||||
result += chunk.result.result
|
||||
const array =
|
||||
chunk.output && typeof chunk.output === 'object'
|
||||
? chunk.output.result
|
||||
: undefined;
|
||||
if (Array.isArray(array)) {
|
||||
result += array
|
||||
.map(item => {
|
||||
return `\n${item.changedContent}\n`;
|
||||
})
|
||||
@@ -493,37 +493,37 @@ export class TextStreamParser {
|
||||
break;
|
||||
}
|
||||
case 'doc_semantic_search': {
|
||||
if (Array.isArray(chunk.result)) {
|
||||
result += `\nFound ${chunk.result.length} document${chunk.result.length !== 1 ? 's' : ''} related to “${chunk.args.query}”.\n`;
|
||||
} else if (typeof chunk.result === 'string') {
|
||||
result += `\n${chunk.result}\n`;
|
||||
const output = chunk.output;
|
||||
if (Array.isArray(output)) {
|
||||
result += `\nFound ${output.length} document${output.length !== 1 ? 's' : ''} related to “${chunk.input.query}”.\n`;
|
||||
} else if (typeof output === 'string') {
|
||||
result += `\n${output}\n`;
|
||||
} else {
|
||||
this.logger.warn(
|
||||
`Unexpected result type for doc_semantic_search: ${chunk.result?.message || 'Unknown error'}`
|
||||
`Unexpected result type for doc_semantic_search: ${output?.message || 'Unknown error'}`
|
||||
);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'doc_keyword_search': {
|
||||
if (Array.isArray(chunk.result)) {
|
||||
result += `\nFound ${chunk.result.length} document${chunk.result.length !== 1 ? 's' : ''} related to “${chunk.args.query}”.\n`;
|
||||
result += `\n${this.getKeywordSearchLinks(chunk.result)}\n`;
|
||||
const output = chunk.output;
|
||||
if (Array.isArray(output)) {
|
||||
result += `\nFound ${output.length} document${output.length !== 1 ? 's' : ''} related to “${chunk.input.query}”.\n`;
|
||||
result += `\n${this.getKeywordSearchLinks(output)}\n`;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'doc_compose': {
|
||||
if (
|
||||
chunk.result &&
|
||||
typeof chunk.result === 'object' &&
|
||||
'title' in chunk.result
|
||||
) {
|
||||
result += `\nDocument "${chunk.result.title}" created successfully with ${chunk.result.wordCount} words.\n`;
|
||||
const output = chunk.output;
|
||||
if (output && typeof output === 'object' && 'title' in output) {
|
||||
result += `\nDocument "${output.title}" created successfully with ${output.wordCount} words.\n`;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'web_search_exa': {
|
||||
if (Array.isArray(chunk.result)) {
|
||||
result += `\n${this.getWebSearchLinks(chunk.result)}\n`;
|
||||
const output = chunk.output;
|
||||
if (Array.isArray(output)) {
|
||||
result += `\n${this.getWebSearchLinks(output)}\n`;
|
||||
}
|
||||
break;
|
||||
}
|
||||
@@ -598,11 +598,18 @@ export class TextStreamParser {
|
||||
export class StreamObjectParser {
|
||||
public parse(chunk: TextStreamPart<CustomAITools>) {
|
||||
switch (chunk.type) {
|
||||
case 'reasoning':
|
||||
case 'text-delta':
|
||||
case 'reasoning-delta': {
|
||||
return { type: 'reasoning' as const, textDelta: chunk.text };
|
||||
}
|
||||
case 'text-delta': {
|
||||
const { type, text: textDelta } = chunk;
|
||||
return { type, textDelta };
|
||||
}
|
||||
case 'tool-call':
|
||||
case 'tool-result': {
|
||||
return chunk;
|
||||
const { type, toolCallId, toolName, input: args } = chunk;
|
||||
const result = 'output' in chunk ? chunk.output : undefined;
|
||||
return { type, toolCallId, toolName, args, result } as StreamObject;
|
||||
}
|
||||
case 'error': {
|
||||
throw toError(chunk.error);
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { tool } from 'ai';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { AccessController } from '../../../core/permission';
|
||||
import type { ContextSession } from '../context/session';
|
||||
import type { CopilotChatOptions } from '../providers';
|
||||
import { toolError } from './error';
|
||||
|
||||
const logger = new Logger('ContextBlobReadTool');
|
||||
|
||||
export const buildBlobContentGetter = (
|
||||
ac: AccessController,
|
||||
context: ContextSession | null
|
||||
) => {
|
||||
const getBlobContent = async (
|
||||
options: CopilotChatOptions,
|
||||
blobId?: string,
|
||||
chunk?: number
|
||||
) => {
|
||||
if (!options?.user || !options?.workspace || !blobId || !context) {
|
||||
return;
|
||||
}
|
||||
const canAccess = await ac
|
||||
.user(options.user)
|
||||
.workspace(options.workspace)
|
||||
.allowLocal()
|
||||
.can('Workspace.Read');
|
||||
if (!canAccess || context.workspaceId !== options.workspace) {
|
||||
logger.warn(
|
||||
`User ${options.user} does not have access workspace ${options.workspace}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const [file, blob] = await Promise.all([
|
||||
context?.getFileContent(blobId, chunk),
|
||||
context?.getBlobContent(blobId, chunk),
|
||||
]);
|
||||
const content = file?.trim() || blob?.trim();
|
||||
if (!content) {
|
||||
return;
|
||||
}
|
||||
|
||||
return { blobId, chunk, content };
|
||||
};
|
||||
return getBlobContent;
|
||||
};
|
||||
|
||||
export const createBlobReadTool = (
|
||||
getBlobContent: (
|
||||
targetId?: string,
|
||||
chunk?: number
|
||||
) => Promise<object | undefined>
|
||||
) => {
|
||||
return tool({
|
||||
description:
|
||||
'Return the content and basic metadata of a single attachment identified by blobId; more inclined to use search tools rather than this tool.',
|
||||
inputSchema: z.object({
|
||||
blob_id: z.string().describe('The target blob in context to read'),
|
||||
chunk: z
|
||||
.number()
|
||||
.optional()
|
||||
.describe(
|
||||
'The chunk number to read, if not provided, read the whole content, start from 0'
|
||||
),
|
||||
}),
|
||||
execute: async ({ blob_id, chunk }) => {
|
||||
try {
|
||||
const blob = await getBlobContent(blob_id, chunk);
|
||||
if (!blob) {
|
||||
return;
|
||||
}
|
||||
return { ...blob };
|
||||
} catch (err: any) {
|
||||
logger.error(`Failed to read the blob ${blob_id} in context`, err);
|
||||
return toolError('Blob Read Failed', err.message);
|
||||
}
|
||||
},
|
||||
});
|
||||
};
|
||||
@@ -19,7 +19,7 @@ export const createCodeArtifactTool = (
|
||||
return tool({
|
||||
description:
|
||||
'Generate a single-file HTML snippet (with inline <style> and <script>) that accomplishes the requested functionality. The final HTML should be runnable when saved as an .html file and opened in a browser. Do NOT reference external resources (CSS, JS, images) except through data URIs.',
|
||||
parameters: z.object({
|
||||
inputSchema: z.object({
|
||||
/**
|
||||
* The <title> text that will appear in the browser tab.
|
||||
*/
|
||||
|
||||
@@ -16,7 +16,7 @@ export const createConversationSummaryTool = (
|
||||
return tool({
|
||||
description:
|
||||
'Create a concise, AI-generated summary of the conversation so far—capturing key topics, decisions, and critical details. Use this tool whenever the context becomes lengthy to preserve essential information that might otherwise be lost to truncation in future turns.',
|
||||
parameters: z.object({
|
||||
inputSchema: z.object({
|
||||
focus: z
|
||||
.string()
|
||||
.optional()
|
||||
|
||||
@@ -15,7 +15,7 @@ export const createDocComposeTool = (
|
||||
return tool({
|
||||
description:
|
||||
'Write a new document with markdown content. This tool creates structured markdown content for documents including titles, sections, and formatting.',
|
||||
parameters: z.object({
|
||||
inputSchema: z.object({
|
||||
title: z.string().describe('The title of the document'),
|
||||
userPrompt: z
|
||||
.string()
|
||||
|
||||
@@ -6,6 +6,25 @@ import { AccessController } from '../../../core/permission';
|
||||
import { type PromptService } from '../prompt';
|
||||
import type { CopilotChatOptions, CopilotProviderFactory } from '../providers';
|
||||
|
||||
const CodeEditSchema = z
|
||||
.array(
|
||||
z.object({
|
||||
op: z
|
||||
.string()
|
||||
.describe(
|
||||
'A short description of the change, such as "Bold intro name"'
|
||||
),
|
||||
updates: z
|
||||
.string()
|
||||
.describe(
|
||||
'Markdown block fragments that represent the change, including the block_id and type'
|
||||
),
|
||||
})
|
||||
)
|
||||
.describe(
|
||||
'An array of independent semantic changes to apply to the document.'
|
||||
);
|
||||
|
||||
export const buildContentGetter = (ac: AccessController, doc: DocReader) => {
|
||||
const getDocContent = async (options: CopilotChatOptions, docId?: string) => {
|
||||
if (!options || !docId || !options.user || !options.workspace) {
|
||||
@@ -129,7 +148,7 @@ Example response:
|
||||
You should specify the following arguments before the others: [doc_id], [origin_content]
|
||||
|
||||
`,
|
||||
parameters: z.object({
|
||||
inputSchema: z.object({
|
||||
doc_id: z
|
||||
.string()
|
||||
.describe(
|
||||
@@ -150,33 +169,13 @@ You should specify the following arguments before the others: [doc_id], [origin_
|
||||
'A short, first-person description of the intended edit, clearly summarizing what I will change. For example: "I will translate the steps into English and delete the paragraph explaining the delay." This helps the downstream system understand the purpose of the changes.'
|
||||
),
|
||||
|
||||
code_edit: z.preprocess(
|
||||
val => {
|
||||
// BACKGROUND: LLM sometimes returns a JSON string instead of an array.
|
||||
if (typeof val === 'string') {
|
||||
return JSON.parse(val);
|
||||
}
|
||||
return val;
|
||||
},
|
||||
z
|
||||
.array(
|
||||
z.object({
|
||||
op: z
|
||||
.string()
|
||||
.describe(
|
||||
'A short description of the change, such as "Bold intro name"'
|
||||
),
|
||||
updates: z
|
||||
.string()
|
||||
.describe(
|
||||
'Markdown block fragments that represent the change, including the block_id and type'
|
||||
),
|
||||
})
|
||||
)
|
||||
.describe(
|
||||
'An array of independent semantic changes to apply to the document.'
|
||||
)
|
||||
),
|
||||
code_edit: z.preprocess(val => {
|
||||
// BACKGROUND: LLM sometimes returns a JSON string instead of an array.
|
||||
if (typeof val === 'string') {
|
||||
return JSON.parse(val);
|
||||
}
|
||||
return val;
|
||||
}, CodeEditSchema) as unknown as typeof CodeEditSchema,
|
||||
}),
|
||||
execute: async ({ doc_id, origin_content, code_edit }) => {
|
||||
try {
|
||||
|
||||
@@ -40,7 +40,7 @@ export const createDocKeywordSearchTool = (
|
||||
return tool({
|
||||
description:
|
||||
'Fuzzy search all workspace documents for the exact keyword or phrase supplied and return passages ranked by textual match. Use this tool by default whenever a straightforward term-based or keyword-base lookup is sufficient.',
|
||||
parameters: z.object({
|
||||
inputSchema: z.object({
|
||||
query: z
|
||||
.string()
|
||||
.describe(
|
||||
|
||||
@@ -75,7 +75,7 @@ export const createDocReadTool = (
|
||||
return tool({
|
||||
description:
|
||||
'Return the complete text and basic metadata of a single document identified by docId; use this when the user needs the full content of a specific file rather than a search result.',
|
||||
parameters: z.object({
|
||||
inputSchema: z.object({
|
||||
doc_id: z.string().describe('The target doc to read'),
|
||||
}),
|
||||
execute: async ({ doc_id }) => {
|
||||
|
||||
@@ -3,11 +3,14 @@ import { omit } from 'lodash-es';
|
||||
import { z } from 'zod';
|
||||
|
||||
import type { AccessController } from '../../../core/permission';
|
||||
import type { ChunkSimilarity, Models } from '../../../models';
|
||||
import {
|
||||
type ChunkSimilarity,
|
||||
clearEmbeddingChunk,
|
||||
type Models,
|
||||
} from '../../../models';
|
||||
import type { CopilotContextService } from '../context';
|
||||
import type { ContextSession } from '../context/session';
|
||||
import type { CopilotChatOptions } from '../providers';
|
||||
import { clearEmbeddingChunk } from '../utils';
|
||||
import { toolError } from './error';
|
||||
|
||||
export const buildDocSearchGetter = (
|
||||
@@ -101,7 +104,7 @@ export const createDocSemanticSearchTool = (
|
||||
return tool({
|
||||
description:
|
||||
'Retrieve conceptually related passages by performing vector-based semantic similarity search across embedded documents; use this tool only when exact keyword search fails or the user explicitly needs meaning-level matches (e.g., paraphrases, synonyms, broader concepts, recent documents).',
|
||||
parameters: z.object({
|
||||
inputSchema: z.object({
|
||||
query: z
|
||||
.string()
|
||||
.describe(
|
||||
|
||||
@@ -8,7 +8,7 @@ import { toolError } from './error';
|
||||
export const createExaCrawlTool = (config: Config) => {
|
||||
return tool({
|
||||
description: 'Crawl the web url for information',
|
||||
parameters: z.object({
|
||||
inputSchema: z.object({
|
||||
url: z
|
||||
.string()
|
||||
.describe('The URL to crawl (including http:// or https://)'),
|
||||
|
||||
@@ -8,7 +8,7 @@ import { toolError } from './error';
|
||||
export const createExaSearchTool = (config: Config) => {
|
||||
return tool({
|
||||
description: 'Search the web for information',
|
||||
parameters: z.object({
|
||||
inputSchema: z.object({
|
||||
query: z.string().describe('The query to search the web for.'),
|
||||
mode: z
|
||||
.enum(['MUST', 'AUTO'])
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user