diff --git a/.docker/selfhost/schema.json b/.docker/selfhost/schema.json index 6c027fbec1..c625f9e778 100644 --- a/.docker/selfhost/schema.json +++ b/.docker/selfhost/schema.json @@ -971,7 +971,7 @@ }, "scenarios": { "type": "object", - "description": "Use custom models in scenarios and override default settings.\n@default {\"override_enabled\":false,\"scenarios\":{\"audio_transcribing\":\"gemini-2.5-flash\",\"chat\":\"gemini-2.5-flash\",\"embedding\":\"gemini-embedding-001\",\"image\":\"gpt-image-1\",\"rerank\":\"gpt-4.1\",\"coding\":\"claude-sonnet-4-5@20250929\",\"complex_text_generation\":\"gpt-4o-2024-08-06\",\"quick_decision_making\":\"gpt-5-mini\",\"quick_text_generation\":\"gemini-2.5-flash\",\"polish_and_summarize\":\"gemini-2.5-flash\"}}", + "description": "Use custom models in scenarios and override default settings.\n@default {\"override_enabled\":false,\"scenarios\":{\"audio_transcribing\":\"gemini-2.5-flash\",\"chat\":\"gemini-2.5-flash\",\"embedding\":\"gemini-embedding-001\",\"image\":\"gpt-image-1\",\"coding\":\"claude-sonnet-4-5@20250929\",\"complex_text_generation\":\"gpt-5-mini\",\"quick_decision_making\":\"gpt-5-mini\",\"quick_text_generation\":\"gemini-2.5-flash\",\"polish_and_summarize\":\"gemini-2.5-flash\"}}", "default": { "override_enabled": false, "scenarios": { @@ -979,9 +979,8 @@ "chat": "gemini-2.5-flash", "embedding": "gemini-embedding-001", "image": "gpt-image-1", - "rerank": "gpt-4.1", "coding": "claude-sonnet-4-5@20250929", - "complex_text_generation": "gpt-4o-2024-08-06", + "complex_text_generation": "gpt-5-mini", "quick_decision_making": "gpt-5-mini", "quick_text_generation": "gemini-2.5-flash", "polish_and_summarize": "gemini-2.5-flash" diff --git a/packages/backend/server/package.json b/packages/backend/server/package.json index 8ff2b8fa2f..5bef6c0c10 100644 --- a/packages/backend/server/package.json +++ b/packages/backend/server/package.json @@ -25,14 +25,14 @@ "dependencies": { "@affine/s3-compat": "workspace:*", "@affine/server-native": "workspace:*", - "@ai-sdk/google": "^2.0.45", - "@ai-sdk/google-vertex": "^3.0.88", + "@ai-sdk/google": "^3.0.46", + "@ai-sdk/google-vertex": "^4.0.83", "@apollo/server": "^4.13.0", "@fal-ai/serverless-client": "^0.15.0", "@google-cloud/opentelemetry-cloud-trace-exporter": "^3.0.0", "@google-cloud/opentelemetry-resource-util": "^3.0.0", - "@nestjs-cls/transactional": "^2.7.0", - "@nestjs-cls/transactional-adapter-prisma": "^1.2.24", + "@nestjs-cls/transactional": "^3.2.0", + "@nestjs-cls/transactional-adapter-prisma": "^1.3.4", "@nestjs/apollo": "^13.0.4", "@nestjs/bullmq": "^11.0.4", "@nestjs/common": "^11.0.21", @@ -66,7 +66,7 @@ "@queuedash/api": "^3.16.0", "@react-email/components": "^0.5.7", "@socket.io/redis-adapter": "^8.3.0", - "ai": "^5.0.118", + "ai": "^6.0.118", "bullmq": "^5.40.2", "cookie-parser": "^1.4.7", "cross-env": "^10.1.0", diff --git a/packages/backend/server/src/__tests__/copilot/copilot-provider.spec.ts b/packages/backend/server/src/__tests__/copilot/copilot-provider.spec.ts index 4d6bd8cd5d..224051aaff 100644 --- a/packages/backend/server/src/__tests__/copilot/copilot-provider.spec.ts +++ b/packages/backend/server/src/__tests__/copilot/copilot-provider.spec.ts @@ -118,7 +118,6 @@ test.serial.before(async t => { enabled: true, scenarios: { image: 'flux-1/schnell', - rerank: 'gpt-5-mini', complex_text_generation: 'gpt-5-mini', coding: 'gpt-5-mini', quick_decision_making: 'gpt-5-mini', @@ -931,8 +930,8 @@ test( t.log('Rerank scores:', scores); t.is( scores.filter(s => s > 0.5).length, - 4, - 'should have 4 related chunks' + 5, + 'should have 5 related chunks' ); }); } diff --git a/packages/backend/server/src/__tests__/copilot/native-adapter.spec.ts b/packages/backend/server/src/__tests__/copilot/native-adapter.spec.ts index 820d16093c..da44e589a1 100644 --- a/packages/backend/server/src/__tests__/copilot/native-adapter.spec.ts +++ b/packages/backend/server/src/__tests__/copilot/native-adapter.spec.ts @@ -18,7 +18,7 @@ test('NativeProviderAdapter streamText should append citation footnotes', async const adapter = new NativeProviderAdapter(mockDispatch, {}, 3); const chunks: string[] = []; for await (const chunk of adapter.streamText({ - model: 'gpt-4.1', + model: 'gpt-5-mini', stream: true, messages: [{ role: 'user', content: [{ type: 'text', text: 'hi' }] }], })) { @@ -36,7 +36,7 @@ test('NativeProviderAdapter streamObject should append citation footnotes', asyn const adapter = new NativeProviderAdapter(mockDispatch, {}, 3); const chunks = []; for await (const chunk of adapter.streamObject({ - model: 'gpt-4.1', + model: 'gpt-5-mini', stream: true, messages: [{ role: 'user', content: [{ type: 'text', text: 'hi' }] }], })) { @@ -91,7 +91,7 @@ test('NativeProviderAdapter streamObject should append fallback attachment footn const adapter = new NativeProviderAdapter(dispatch, {}, 3); const chunks = []; for await (const chunk of adapter.streamObject({ - model: 'gpt-4.1', + model: 'gpt-5-mini', stream: true, messages: [{ role: 'user', content: [{ type: 'text', text: 'hi' }] }], })) { @@ -148,7 +148,7 @@ test('NativeProviderAdapter streamObject should map tool and text events', async const events = []; for await (const event of adapter.streamObject({ - model: 'gpt-4.1', + model: 'gpt-5-mini', stream: true, messages: [{ role: 'user', content: [{ type: 'text', text: 'read' }] }], })) { @@ -169,7 +169,7 @@ test('NativeProviderAdapter streamObject should map tool and text events', async test('buildNativeRequest should include rust middleware from profile', async t => { const { request } = await buildNativeRequest({ - model: 'gpt-4.1', + model: 'gpt-5-mini', messages: [{ role: 'user', content: 'hello' }], tools: {}, middleware: { @@ -195,7 +195,7 @@ test('NativeProviderAdapter streamText should skip citation footnotes when disab }); const chunks: string[] = []; for await (const chunk of adapter.streamText({ - model: 'gpt-4.1', + model: 'gpt-5-mini', stream: true, messages: [{ role: 'user', content: [{ type: 'text', text: 'hi' }] }], })) { diff --git a/packages/backend/server/src/__tests__/copilot/provider-native.spec.ts b/packages/backend/server/src/__tests__/copilot/provider-native.spec.ts index 34ac62cfc0..890a158a45 100644 --- a/packages/backend/server/src/__tests__/copilot/provider-native.spec.ts +++ b/packages/backend/server/src/__tests__/copilot/provider-native.spec.ts @@ -1,7 +1,9 @@ import test from 'ava'; import { ProviderMiddlewareConfig } from '../../plugins/copilot/config'; +import { normalizeOpenAIOptionsForModel } from '../../plugins/copilot/providers/openai'; import { CopilotProvider } from '../../plugins/copilot/providers/provider'; +import { normalizeRerankModel } from '../../plugins/copilot/providers/rerank'; import { CopilotProviderType, ModelInputType, @@ -12,7 +14,7 @@ class TestOpenAIProvider extends CopilotProvider<{ apiKey: string }> { readonly type = CopilotProviderType.OpenAI; readonly models = [ { - id: 'gpt-4.1', + id: 'gpt-5-mini', capabilities: [ { input: [ModelInputType.Text], @@ -36,7 +38,7 @@ class TestOpenAIProvider extends CopilotProvider<{ apiKey: string }> { } exposeMetricLabels() { - return this.metricLabels('gpt-4.1'); + return this.metricLabels('gpt-5-mini'); } exposeMiddleware() { @@ -97,3 +99,41 @@ test('getActiveProviderMiddleware should merge defaults with profile override', 'thinking_format', ]); }); + +test('normalizeOpenAIOptionsForModel should drop sampling knobs for gpt-5.2', t => { + t.deepEqual( + normalizeOpenAIOptionsForModel( + { + temperature: 0.7, + topP: 0.8, + presencePenalty: 0.2, + frequencyPenalty: 0.1, + maxTokens: 128, + }, + 'gpt-5.4' + ), + { maxTokens: 128 } + ); +}); + +test('normalizeOpenAIOptionsForModel should keep options for gpt-4.1', t => { + t.deepEqual( + normalizeOpenAIOptionsForModel( + { temperature: 0.7, topP: 0.8, maxTokens: 128 }, + 'gpt-4.1' + ), + { temperature: 0.7, topP: 0.8, maxTokens: 128 } + ); +}); + +test('normalizeOpenAIRerankModel should keep supported rerank models', t => { + t.is(normalizeRerankModel('gpt-4.1'), 'gpt-4.1'); + t.is(normalizeRerankModel('gpt-4.1-mini'), 'gpt-4.1-mini'); + t.is(normalizeRerankModel('gpt-5.2'), 'gpt-5.2'); +}); + +test('normalizeOpenAIRerankModel should fall back for unsupported models', t => { + t.is(normalizeRerankModel('gpt-5-mini'), 'gpt-5.2'); + t.is(normalizeRerankModel('gemini-2.5-flash'), 'gpt-5.2'); + t.is(normalizeRerankModel(undefined), 'gpt-5.2'); +}); diff --git a/packages/backend/server/src/__tests__/copilot/provider-registry.spec.ts b/packages/backend/server/src/__tests__/copilot/provider-registry.spec.ts index 858c3d3cde..6412c42abe 100644 --- a/packages/backend/server/src/__tests__/copilot/provider-registry.spec.ts +++ b/packages/backend/server/src/__tests__/copilot/provider-registry.spec.ts @@ -88,11 +88,11 @@ test('resolveModel should support explicit provider prefix and keep slash models const prefixed = resolveModel({ registry, - modelId: 'openai-main/gpt-4.1', + modelId: 'openai-main/gpt-5-mini', }); t.deepEqual(prefixed, { - rawModelId: 'openai-main/gpt-4.1', - modelId: 'gpt-4.1', + rawModelId: 'openai-main/gpt-5-mini', + modelId: 'gpt-5-mini', explicitProviderId: 'openai-main', candidateProviderIds: ['openai-main'], }); @@ -154,12 +154,15 @@ test('stripProviderPrefix should only strip matched provider prefix', t => { }); t.is( - stripProviderPrefix(registry, 'openai-main', 'openai-main/gpt-4.1'), - 'gpt-4.1' + stripProviderPrefix(registry, 'openai-main', 'openai-main/gpt-5-mini'), + 'gpt-5-mini' ); t.is( - stripProviderPrefix(registry, 'openai-main', 'another-main/gpt-4.1'), - 'another-main/gpt-4.1' + stripProviderPrefix(registry, 'openai-main', 'another-main/gpt-5-mini'), + 'another-main/gpt-5-mini' + ); + t.is( + stripProviderPrefix(registry, 'openai-main', 'gpt-5-mini'), + 'gpt-5-mini' ); - t.is(stripProviderPrefix(registry, 'openai-main', 'gpt-4.1'), 'gpt-4.1'); }); diff --git a/packages/backend/server/src/__tests__/copilot/tool-call-loop.spec.ts b/packages/backend/server/src/__tests__/copilot/tool-call-loop.spec.ts index 95ec395a81..a8f76dea69 100644 --- a/packages/backend/server/src/__tests__/copilot/tool-call-loop.spec.ts +++ b/packages/backend/server/src/__tests__/copilot/tool-call-loop.spec.ts @@ -116,7 +116,7 @@ test('ToolCallLoop should execute tool call and continue to next round', async t const events: NativeLlmStreamEvent[] = []; for await (const event of loop.run({ - model: 'gpt-4.1', + model: 'gpt-5-mini', stream: true, messages: [{ role: 'user', content: [{ type: 'text', text: 'read doc' }] }], })) { diff --git a/packages/backend/server/src/__tests__/mocks/copilot.mock.ts b/packages/backend/server/src/__tests__/mocks/copilot.mock.ts index a092329df9..f10dd51049 100644 --- a/packages/backend/server/src/__tests__/mocks/copilot.mock.ts +++ b/packages/backend/server/src/__tests__/mocks/copilot.mock.ts @@ -39,33 +39,6 @@ export class MockCopilotProvider extends OpenAIProvider { }, ], }, - { - id: 'gpt-4o', - capabilities: [ - { - input: [ModelInputType.Text, ModelInputType.Image], - output: [ModelOutputType.Text, ModelOutputType.Object], - }, - ], - }, - { - id: 'gpt-4o-2024-08-06', - capabilities: [ - { - input: [ModelInputType.Text, ModelInputType.Image], - output: [ModelOutputType.Text, ModelOutputType.Object], - }, - ], - }, - { - id: 'gpt-4.1-2025-04-14', - capabilities: [ - { - input: [ModelInputType.Text, ModelInputType.Image], - output: [ModelOutputType.Text, ModelOutputType.Object], - }, - ], - }, { id: 'gpt-5', capabilities: [ @@ -97,6 +70,19 @@ export class MockCopilotProvider extends OpenAIProvider { }, ], }, + { + id: 'gpt-5-nano', + capabilities: [ + { + input: [ModelInputType.Text, ModelInputType.Image], + output: [ + ModelOutputType.Text, + ModelOutputType.Object, + ModelOutputType.Structured, + ], + }, + ], + }, { id: 'gpt-image-1', capabilities: [ @@ -133,6 +119,23 @@ export class MockCopilotProvider extends OpenAIProvider { }, ], }, + { + id: 'gemini-3.1-pro-preview', + capabilities: [ + { + input: [ + ModelInputType.Text, + ModelInputType.Image, + ModelInputType.Audio, + ], + output: [ + ModelOutputType.Text, + ModelOutputType.Object, + ModelOutputType.Structured, + ], + }, + ], + }, ]; override async text( diff --git a/packages/backend/server/src/plugins/copilot/config.ts b/packages/backend/server/src/plugins/copilot/config.ts index bc695edae5..44ecf0244a 100644 --- a/packages/backend/server/src/plugins/copilot/config.ts +++ b/packages/backend/server/src/plugins/copilot/config.ts @@ -230,9 +230,8 @@ defineModuleConfig('copilot', { chat: 'gemini-2.5-flash', embedding: 'gemini-embedding-001', image: 'gpt-image-1', - rerank: 'gpt-4.1', coding: 'claude-sonnet-4-5@20250929', - complex_text_generation: 'gpt-4o-2024-08-06', + complex_text_generation: 'gpt-5-mini', quick_decision_making: 'gpt-5-mini', quick_text_generation: 'gemini-2.5-flash', polish_and_summarize: 'gemini-2.5-flash', diff --git a/packages/backend/server/src/plugins/copilot/embedding/client.ts b/packages/backend/server/src/plugins/copilot/embedding/client.ts index 8268cae01e..af69c2499c 100644 --- a/packages/backend/server/src/plugins/copilot/embedding/client.ts +++ b/packages/backend/server/src/plugins/copilot/embedding/client.ts @@ -15,6 +15,10 @@ import { import { PromptService } from '../prompt/service'; import { CopilotProviderFactory } from '../providers/factory'; import type { CopilotProvider } from '../providers/provider'; +import { + DEFAULT_RERANK_MODEL, + normalizeRerankModel, +} from '../providers/rerank'; import { type ModelFullConditions, ModelInputType, @@ -114,10 +118,16 @@ class ProductionEmbeddingClient extends EmbeddingClient { if (!prompt) { throw new CopilotPromptNotFound({ name: RERANK_PROMPT }); } - const provider = await this.getProvider({ modelId: prompt.model }); + const rerankModel = normalizeRerankModel(prompt.model); + if (prompt.model !== rerankModel) { + this.logger.warn( + `Unsupported rerank model "${prompt.model}" configured, falling back to "${DEFAULT_RERANK_MODEL}".` + ); + } + const provider = await this.getProvider({ modelId: rerankModel }); const ranks = await provider.rerank( - { modelId: prompt.model }, + { modelId: rerankModel }, embeddings.map(e => prompt.finish({ query, doc: e.content })), { signal } ); @@ -171,7 +181,7 @@ class ProductionEmbeddingClient extends EmbeddingClient { ); try { - // 4.1 mini's context windows large enough to handle all embeddings + // The rerank prompt is expected to handle the full deduped candidate list. const ranks = await this.getEmbeddingRelevance( query, sortedEmbeddings, diff --git a/packages/backend/server/src/plugins/copilot/prompt/prompts.ts b/packages/backend/server/src/plugins/copilot/prompt/prompts.ts index cedbfe9368..88532bc2ac 100644 --- a/packages/backend/server/src/plugins/copilot/prompt/prompts.ts +++ b/packages/backend/server/src/plugins/copilot/prompt/prompts.ts @@ -34,7 +34,6 @@ export const Scenario = { 'Remove background', 'Upscale image', ], - rerank: ['Rerank results'], coding: [ 'Apply Updates', 'Code Artifact', @@ -124,7 +123,7 @@ const workflows: Prompt[] = [ { name: 'workflow:presentation:step2', action: 'workflow:presentation:step2', - model: 'gpt-4o-2024-08-06', + model: 'gpt-5-mini', messages: [ { role: 'system', @@ -143,7 +142,7 @@ const workflows: Prompt[] = [ { name: 'workflow:presentation:step4', action: 'workflow:presentation:step4', - model: 'gpt-4o-2024-08-06', + model: 'gpt-5-mini', messages: [ { role: 'system', @@ -187,7 +186,7 @@ const workflows: Prompt[] = [ { name: 'workflow:brainstorm:step2', action: 'workflow:brainstorm:step2', - model: 'gpt-4o-2024-08-06', + model: 'gpt-5-mini', config: { frequencyPenalty: 0.5, presencePenalty: 0.5, @@ -197,7 +196,8 @@ const workflows: Prompt[] = [ messages: [ { role: 'system', - content: `You are the creator of the mind map. You need to analyze and expand on the input and output it according to the indentation formatting template given below without redundancy.\nBelow is an example of indentation for a mind map, the title and content needs to be removed by text replacement and not retained. Please strictly adhere to the hierarchical indentation of the template and my requirements, bold, headings and other formatting (e.g. #, **) are not allowed, a maximum of five levels of indentation is allowed, and the last node of each node should make a judgment on whether to make a detailed statement or not based on the topic:\nexmaple:\n- {topic}\n - {Level 1}\n - {Level 2}\n - {Level 3}\n - {Level 4}\n - {Level 1}\n - {Level 2}\n - {Level 3}\n - {Level 1}\n - {Level 2}\n - {Level 3}`, + content: + 'Use the Markdown nested unordered list syntax without any extra styles or plain text descriptions to analyze and expand the input into a mind map. Regardless of the content, the first-level list should contain only one item, which acts as the root. Each node label must be plain text only. Do not output markdown links, footnotes, citations, URLs, headings, bold text, code fences, or any explanatory text outside the nested list. A maximum of five levels of indentation is allowed.', }, { role: 'assistant', @@ -381,7 +381,11 @@ const textActions: Prompt[] = [ name: 'Transcript audio', action: 'Transcript audio', model: 'gemini-2.5-flash', - optionalModels: ['gemini-2.5-flash', 'gemini-2.5-pro'], + optionalModels: [ + 'gemini-2.5-flash', + 'gemini-2.5-pro', + 'gemini-3.1-pro-preview', + ], messages: [ { role: 'system', @@ -417,7 +421,7 @@ Convert a multi-speaker audio recording into a structured JSON format by transcr { name: 'Rerank results', action: 'Rerank results', - model: 'gpt-4.1', + model: 'gpt-5.2', messages: [ { role: 'system', @@ -432,7 +436,7 @@ Convert a multi-speaker audio recording into a structured JSON format by transcr { name: 'Generate a caption', action: 'Generate a caption', - model: 'gpt-5-mini', + model: 'gemini-2.5-flash', messages: [ { role: 'user', @@ -448,7 +452,7 @@ Convert a multi-speaker audio recording into a structured JSON format by transcr { name: 'Conversation Summary', action: 'Conversation Summary', - model: 'gpt-4.1-2025-04-14', + model: 'gpt-5-mini', messages: [ { role: 'system', @@ -473,7 +477,7 @@ Return only the summary text—no headings, labels, or commentary.`, { name: 'Summary', action: 'Summary', - model: 'gpt-4.1-2025-04-14', + model: 'gpt-5-mini', messages: [ { role: 'system', @@ -504,7 +508,7 @@ You are an assistant helping summarize a document. Use this format, replacing te { name: 'Summary as title', action: 'Summary as title', - model: 'gpt-4.1-2025-04-14', + model: 'gpt-5-mini', messages: [ { role: 'system', @@ -521,7 +525,7 @@ You are an assistant helping summarize a document. Use this format, replacing te { name: 'Summary the webpage', action: 'Summary the webpage', - model: 'gpt-4.1-2025-04-14', + model: 'gpt-5-mini', messages: [ { role: 'user', @@ -533,7 +537,7 @@ You are an assistant helping summarize a document. Use this format, replacing te { name: 'Explain this', action: 'Explain this', - model: 'gpt-4.1-2025-04-14', + model: 'gpt-5-mini', messages: [ { role: 'system', @@ -576,7 +580,7 @@ A concise paragraph that captures the article's main argument and key conclusion { name: 'Explain this image', action: 'Explain this image', - model: 'gpt-4.1-2025-04-14', + model: 'gemini-2.5-flash', messages: [ { role: 'system', @@ -727,7 +731,7 @@ You are a highly accomplished professional translator, demonstrating profound pr { name: 'Summarize the meeting', action: 'Summarize the meeting', - model: 'gpt-4.1-2025-04-14', + model: 'gpt-5-mini', messages: [ { role: 'system', @@ -752,7 +756,7 @@ You are an assistant helping summarize a document. Use this format, replacing te { name: 'Find action for summary', action: 'Find action for summary', - model: 'gpt-4.1-2025-04-14', + model: 'gpt-5-mini', messages: [ { role: 'system', @@ -774,7 +778,7 @@ You are an assistant helping find actions of meeting summary. Use this format, r { name: 'Write an article about this', action: 'Write an article about this', - model: 'gemini-2.5-flash', + model: 'gemini-2.5-pro', messages: [ { role: 'system', @@ -829,7 +833,7 @@ You are an assistant helping find actions of meeting summary. Use this format, r { name: 'Write a twitter about this', action: 'Write a twitter about this', - model: 'gpt-4.1-2025-04-14', + model: 'gemini-2.5-flash', messages: [ { role: 'system', @@ -915,7 +919,7 @@ You are an assistant helping find actions of meeting summary. Use this format, r { name: 'Write a blog post about this', action: 'Write a blog post about this', - model: 'gemini-2.5-flash', + model: 'gemini-2.5-pro', messages: [ { role: 'system', @@ -1005,7 +1009,7 @@ You are an assistant helping find actions of meeting summary. Use this format, r { name: 'Change tone to', action: 'Change tone', - model: 'gpt-4.1-2025-04-14', + model: 'gemini-2.5-flash', messages: [ { role: 'system', @@ -1096,12 +1100,12 @@ You are an assistant helping find actions of meeting summary. Use this format, r { name: 'Brainstorm mindmap', action: 'Brainstorm mindmap', - model: 'gpt-4o-2024-08-06', + model: 'gpt-5-mini', messages: [ { role: 'system', content: - 'Use the Markdown nested unordered list syntax without any extra styles or plain text descriptions to brainstorm the questions or topics provided by user for a mind map. Regardless of the content, the first-level list should contain only one item, which acts as the root. Do not wrap everything into a single code block.', + 'Use the Markdown nested unordered list syntax without any extra styles or plain text descriptions to brainstorm the questions or topics provided by user for a mind map. Regardless of the content, the first-level list should contain only one item, which acts as the root. Each node label must be plain text only. Do not output markdown links, footnotes, citations, URLs, headings, bold text, code fences, or any explanatory text outside the nested list.', }, { role: 'user', @@ -1113,12 +1117,12 @@ You are an assistant helping find actions of meeting summary. Use this format, r { name: 'Expand mind map', action: 'Expand mind map', - model: 'gpt-4o-2024-08-06', + model: 'gpt-5-mini', messages: [ { role: 'system', content: - 'You are a professional writer. Use the Markdown nested unordered list syntax without any extra styles or plain text descriptions to brainstorm the questions or topics provided by user for a mind map.', + 'You are a professional writer. Use the Markdown nested unordered list syntax without any extra styles or plain text descriptions to expand the selected node in a mind map. The output must be exactly one subtree: the first bullet must repeat the selected node text as the subtree root, and it must include at least one new nested child bullet beneath it. Each node label must be plain text only. Do not output markdown links, footnotes, citations, URLs, headings, bold text, code fences, or any explanatory text outside the nested list.', }, { role: 'user', @@ -1190,7 +1194,7 @@ The output must be perfect. Adherence to every detail of these instructions is n { name: 'Improve grammar for it', action: 'Improve grammar for it', - model: 'gpt-4.1-2025-04-14', + model: 'gpt-5-mini', messages: [ { role: 'system', @@ -1259,7 +1263,7 @@ The output must be perfect. Adherence to every detail of these instructions is n { name: 'Find action items from it', action: 'Find action items from it', - model: 'gpt-4.1-2025-04-14', + model: 'gpt-5-mini', messages: [ { role: 'system', @@ -1283,7 +1287,7 @@ If there are items in the content that can be used as to-do tasks, please refer { name: 'Check code error', action: 'Check code error', - model: 'gpt-4.1-2025-04-14', + model: 'gpt-5-mini', messages: [ { role: 'system', @@ -1343,7 +1347,7 @@ If there are items in the content that can be used as to-do tasks, please refer { name: 'Create a presentation', action: 'Create a presentation', - model: 'gpt-4o-2024-08-06', + model: 'gpt-5-mini', messages: [ { role: 'system', @@ -1518,7 +1522,7 @@ When sent new notes, respond ONLY with the contents of the html file.`, { name: 'Continue writing', action: 'Continue writing', - model: 'gemini-2.5-flash', + model: 'gemini-2.5-pro', messages: [ { role: 'system', @@ -1904,6 +1908,7 @@ const CHAT_PROMPT: Omit = { optionalModels: [ 'gemini-2.5-flash', 'gemini-2.5-pro', + 'gemini-3.1-pro-preview', 'claude-sonnet-4-5@20250929', ], messages: [ @@ -2074,7 +2079,11 @@ Below is the user's query. Please respond in the user's preferred language witho 'codeArtifact', 'blobRead', ], - proModels: ['gemini-2.5-pro', 'claude-sonnet-4-5@20250929'], + proModels: [ + 'gemini-2.5-pro', + 'gemini-3.1-pro-preview', + 'claude-sonnet-4-5@20250929', + ], }, }; diff --git a/packages/backend/server/src/plugins/copilot/providers/gemini/gemini.ts b/packages/backend/server/src/plugins/copilot/providers/gemini/gemini.ts index 8525f30355..26ca7caa88 100644 --- a/packages/backend/server/src/plugins/copilot/providers/gemini/gemini.ts +++ b/packages/backend/server/src/plugins/copilot/providers/gemini/gemini.ts @@ -5,6 +5,7 @@ import type { import type { GoogleVertexProvider } from '@ai-sdk/google-vertex'; import { AISDKError, + type EmbeddingModel, embedMany, generateObject, generateText, @@ -43,6 +44,34 @@ export abstract class GeminiProvider extends CopilotProvider { | GoogleGenerativeAIProvider | GoogleVertexProvider; + private getThinkingConfig( + model: string, + options: { includeThoughts: boolean; useDynamicBudget?: boolean } + ): NonNullable { + if (this.isGemini3Model(model)) { + return { + includeThoughts: options.includeThoughts, + thinkingLevel: 'high', + }; + } + + return { + includeThoughts: options.includeThoughts, + thinkingBudget: options.useDynamicBudget ? -1 : 12000, + }; + } + + private getEmbeddingModel(model: string) { + const provider = this.instance as typeof this.instance & { + embeddingModel?: (modelId: string) => EmbeddingModel; + textEmbeddingModel?: (modelId: string) => EmbeddingModel; + }; + + return ( + provider.embeddingModel?.(model) ?? provider.textEmbeddingModel?.(model) + ); + } + private handleError(e: any) { if (e instanceof UserFriendlyError) { return e; @@ -122,10 +151,10 @@ export abstract class GeminiProvider extends CopilotProvider { schema, providerOptions: { google: { - thinkingConfig: { - thinkingBudget: -1, + thinkingConfig: this.getThinkingConfig(model.id, { includeThoughts: false, - }, + useDynamicBudget: true, + }), }, }, abortSignal: options.signal, @@ -234,7 +263,10 @@ export abstract class GeminiProvider extends CopilotProvider { .counter('generate_embedding_calls') .add(1, { model: model.id }); - const modelInstance = this.instance.textEmbeddingModel(model.id); + const modelInstance = this.getEmbeddingModel(model.id); + if (!modelInstance) { + throw new Error(`Embedding model is not available for ${model.id}`); + } const embeddings = await Promise.allSettled( messages.map(m => @@ -286,15 +318,18 @@ export abstract class GeminiProvider extends CopilotProvider { private getGeminiOptions(options: CopilotChatOptions, model: string) { const result: GoogleGenerativeAIProviderOptions = {}; if (options?.reasoning && this.isReasoningModel(model)) { - result.thinkingConfig = { - thinkingBudget: 12000, + result.thinkingConfig = this.getThinkingConfig(model, { includeThoughts: true, - }; + }); } return result; } + private isGemini3Model(model: string) { + return model.startsWith('gemini-3'); + } + private isReasoningModel(model: string) { - return model.startsWith('gemini-2.5'); + return model.startsWith('gemini-2.5') || this.isGemini3Model(model); } } diff --git a/packages/backend/server/src/plugins/copilot/providers/gemini/generative.ts b/packages/backend/server/src/plugins/copilot/providers/gemini/generative.ts index 66eb289528..e7bd955e18 100644 --- a/packages/backend/server/src/plugins/copilot/providers/gemini/generative.ts +++ b/packages/backend/server/src/plugins/copilot/providers/gemini/generative.ts @@ -20,25 +20,6 @@ export class GeminiGenerativeProvider extends GeminiProvider { }, ], }, + { + name: 'Gemini 3.1 Pro Preview', + id: 'gemini-3.1-pro-preview', + capabilities: [ + { + input: [ + ModelInputType.Text, + ModelInputType.Image, + ModelInputType.Audio, + ], + output: [ + ModelOutputType.Text, + ModelOutputType.Object, + ModelOutputType.Structured, + ], + }, + ], + }, { name: 'Gemini Embedding', id: 'gemini-embedding-001', diff --git a/packages/backend/server/src/plugins/copilot/providers/openai.ts b/packages/backend/server/src/plugins/copilot/providers/openai.ts index 4297552c6c..34f1c7a29e 100644 --- a/packages/backend/server/src/plugins/copilot/providers/openai.ts +++ b/packages/backend/server/src/plugins/copilot/providers/openai.ts @@ -18,6 +18,12 @@ import { import type { NodeTextMiddleware } from '../config'; import { buildNativeRequest, NativeProviderAdapter } from './native'; import { CopilotProvider } from './provider'; +import { + normalizeRerankModel, + OPENAI_RERANK_MAX_COMPLETION_TOKENS, + OPENAI_RERANK_TOP_LOGPROBS_LIMIT, + usesRerankReasoning, +} from './rerank'; import type { CopilotChatOptions, CopilotChatTools, @@ -33,6 +39,30 @@ import { chatToGPTMessage } from './utils'; export const DEFAULT_DIMENSIONS = 256; +const GPT_5_SAMPLING_UNSUPPORTED_MODELS = /^(gpt-5(?:$|[.-]))/; + +export function normalizeOpenAIOptionsForModel< + T extends { + frequencyPenalty?: number | null; + presencePenalty?: number | null; + temperature?: number | null; + topP?: number | null; + }, +>(options: T, model: string): T { + if (!GPT_5_SAMPLING_UNSUPPORTED_MODELS.test(model)) { + return options; + } + + const normalizedOptions = { ...options }; + + delete normalizedOptions.frequencyPenalty; + delete normalizedOptions.presencePenalty; + delete normalizedOptions.temperature; + delete normalizedOptions.topP; + + return normalizedOptions; +} + export type OpenAIConfig = { apiKey: string; baseURL?: string; @@ -252,6 +282,34 @@ export class OpenAIProvider extends CopilotProvider { }, ], }, + { + name: 'GPT 5.2', + id: 'gpt-5.2', + capabilities: [ + { + input: [ModelInputType.Text, ModelInputType.Image], + output: [ + ModelOutputType.Text, + ModelOutputType.Object, + ModelOutputType.Structured, + ], + }, + ], + }, + { + name: 'GPT 5.2 2025-12-11', + id: 'gpt-5.2-2025-12-11', + capabilities: [ + { + input: [ModelInputType.Text, ModelInputType.Image], + output: [ + ModelOutputType.Text, + ModelOutputType.Object, + ModelOutputType.Structured, + ], + }, + ], + }, { name: 'GPT 5 Nano', id: 'gpt-5-nano', @@ -435,10 +493,14 @@ export class OpenAIProvider extends CopilotProvider { metrics.ai.counter('chat_text_calls').add(1, this.metricLabels(model.id)); const tools = await this.getTools(options, model.id); const middleware = this.getActiveProviderMiddleware(); + const normalizedOptions = normalizeOpenAIOptionsForModel( + options, + model.id + ); const { request } = await buildNativeRequest({ model: model.id, messages, - options, + options: normalizedOptions, tools, include: options.webSearch ? ['citations'] : undefined, reasoning: this.getReasoning(options, model.id), @@ -472,10 +534,14 @@ export class OpenAIProvider extends CopilotProvider { .add(1, this.metricLabels(model.id)); const tools = await this.getTools(options, model.id); const middleware = this.getActiveProviderMiddleware(); + const normalizedOptions = normalizeOpenAIOptionsForModel( + options, + model.id + ); const { request } = await buildNativeRequest({ model: model.id, messages, - options, + options: normalizedOptions, tools, include: options.webSearch ? ['citations'] : undefined, reasoning: this.getReasoning(options, model.id), @@ -508,10 +574,14 @@ export class OpenAIProvider extends CopilotProvider { .add(1, this.metricLabels(model.id)); const tools = await this.getTools(options, model.id); const middleware = this.getActiveProviderMiddleware(); + const normalizedOptions = normalizeOpenAIOptionsForModel( + options, + model.id + ); const { request } = await buildNativeRequest({ model: model.id, messages, - options, + options: normalizedOptions, tools, include: options.webSearch ? ['citations'] : undefined, reasoning: this.getReasoning(options, model.id), @@ -542,10 +612,14 @@ export class OpenAIProvider extends CopilotProvider { metrics.ai.counter('chat_text_calls').add(1, { model: model.id }); const tools = await this.getTools(options, model.id); const middleware = this.getActiveProviderMiddleware(); + const normalizedOptions = normalizeOpenAIOptionsForModel( + options, + model.id + ); const { request, schema } = await buildNativeRequest({ model: model.id, messages, - options, + options: normalizedOptions, tools, reasoning: this.getReasoning(options, model.id), middleware, @@ -576,15 +650,21 @@ export class OpenAIProvider extends CopilotProvider { const scores = await Promise.all( chunkMessages.map(async messages => { const [system, msgs] = await chatToGPTMessage(messages); + const rerankModel = normalizeRerankModel(model.id); const response = await this.requestOpenAIJson( '/chat/completions', { - model: model.id, + model: rerankModel, messages: this.toOpenAIChatMessages(system, msgs), temperature: 0, - max_tokens: 16, logprobs: true, - top_logprobs: 16, + top_logprobs: OPENAI_RERANK_TOP_LOGPROBS_LIMIT, + ...(usesRerankReasoning(rerankModel) + ? { + reasoning_effort: 'none' as const, + max_completion_tokens: OPENAI_RERANK_MAX_COMPLETION_TOKENS, + } + : { max_tokens: OPENAI_RERANK_MAX_COMPLETION_TOKENS }), }, options.signal ); diff --git a/packages/backend/server/src/plugins/copilot/providers/rerank.ts b/packages/backend/server/src/plugins/copilot/providers/rerank.ts new file mode 100644 index 0000000000..528e5066e0 --- /dev/null +++ b/packages/backend/server/src/plugins/copilot/providers/rerank.ts @@ -0,0 +1,23 @@ +const GPT_4_RERANK_MODELS = /^(gpt-4(?:$|[.-]))/; +const GPT_5_RERANK_LOGPROBS_MODELS = /^(gpt-5\.2(?:$|-))/; + +export const DEFAULT_RERANK_MODEL = 'gpt-5.2'; +export const OPENAI_RERANK_TOP_LOGPROBS_LIMIT = 5; +export const OPENAI_RERANK_MAX_COMPLETION_TOKENS = 16; + +export function supportsRerankModel(model: string): boolean { + return ( + GPT_4_RERANK_MODELS.test(model) || GPT_5_RERANK_LOGPROBS_MODELS.test(model) + ); +} + +export function usesRerankReasoning(model: string): boolean { + return GPT_5_RERANK_LOGPROBS_MODELS.test(model); +} + +export function normalizeRerankModel(model?: string | null): string { + if (model && supportsRerankModel(model)) { + return model; + } + return DEFAULT_RERANK_MODEL; +} diff --git a/packages/backend/server/src/plugins/copilot/providers/utils.ts b/packages/backend/server/src/plugins/copilot/providers/utils.ts index bb95a98b64..26224ff72e 100644 --- a/packages/backend/server/src/plugins/copilot/providers/utils.ts +++ b/packages/backend/server/src/plugins/copilot/providers/utils.ts @@ -2,12 +2,12 @@ import { GoogleVertexProviderSettings } from '@ai-sdk/google-vertex'; import { GoogleVertexAnthropicProviderSettings } from '@ai-sdk/google-vertex/anthropic'; import { Logger } from '@nestjs/common'; import { - CoreAssistantMessage, - CoreUserMessage, + AssistantModelMessage, FilePart, ImagePart, TextPart, TextStreamPart, + UserModelMessage, } from 'ai'; import { GoogleAuth, GoogleAuthOptions } from 'google-auth-library'; import z, { ZodType } from 'zod'; @@ -23,7 +23,7 @@ import { import { CustomAITools } from '../tools'; import { PromptMessage, StreamObject } from './types'; -type ChatMessage = CoreUserMessage | CoreAssistantMessage; +type ChatMessage = UserModelMessage | AssistantModelMessage; const ATTACHMENT_MAX_BYTES = 20 * 1024 * 1024; const ATTACH_HEAD_PARAMS = { timeoutMs: OneMinute / 12, maxRedirects: 3 }; diff --git a/packages/frontend/core/src/blocksuite/ai/components/ai-chat-input/preference-popup.ts b/packages/frontend/core/src/blocksuite/ai/components/ai-chat-input/preference-popup.ts index f4f7296f11..0bc546ec42 100644 --- a/packages/frontend/core/src/blocksuite/ai/components/ai-chat-input/preference-popup.ts +++ b/packages/frontend/core/src/blocksuite/ai/components/ai-chat-input/preference-popup.ts @@ -26,10 +26,17 @@ import { ThinkingIcon, } from '@blocksuite/icons/lit'; import { ShadowlessElement } from '@blocksuite/std'; +import { autoPlacement, offset, shift } from '@floating-ui/dom'; import { computed } from '@preact/signals-core'; import { css, html } from 'lit'; import { property } from 'lit/decorators.js'; +const modelSubMenuMiddleware = [ + autoPlacement({ allowedPlacements: ['right-start', 'left-start'] }), + offset({ mainAxis: 4, crossAxis: 0 }), + shift({ crossAxis: true, padding: 8 }), +]; + export class ChatInputPreference extends SignalWatcher( WithDisposable(ShadowlessElement) ) { @@ -140,6 +147,7 @@ export class ChatInputPreference extends SignalWatcher( menu.subMenu({ name: 'Model', prefix: AiOutlineIcon(), + middleware: modelSubMenuMiddleware, postfix: html` ${this.model.value?.name} `, diff --git a/packages/frontend/core/src/blocksuite/ai/mini-mindmap/__tests__/mindmap-preview.unit.spec.ts b/packages/frontend/core/src/blocksuite/ai/mini-mindmap/__tests__/mindmap-preview.unit.spec.ts index 6ffb7376d7..11fb8fdc13 100644 --- a/packages/frontend/core/src/blocksuite/ai/mini-mindmap/__tests__/mindmap-preview.unit.spec.ts +++ b/packages/frontend/core/src/blocksuite/ai/mini-mindmap/__tests__/mindmap-preview.unit.spec.ts @@ -99,4 +99,69 @@ describe('markdownToMindmap: convert markdown list to a mind map tree', () => { expect(nodes).toEqual(null); }); + + test('accepts leading plain text before the markdown list', () => { + const markdown = `Here is the regenerated mind map: + +- Text A + - Text B`; + const collection = new TestWorkspace(); + collection.meta.initialize(); + const doc = collection.createDoc().getStore(); + const nodes = markdownToMindmap(markdown, doc, provider); + + expect(nodes).toEqual({ + text: 'Text A', + children: [ + { + text: 'Text B', + children: [], + }, + ], + }); + }); + + test('accepts markdown lists wrapped in a code block', () => { + const markdown = `\`\`\`markdown +- Text A + - Text B +\`\`\``; + const collection = new TestWorkspace(); + collection.meta.initialize(); + const doc = collection.createDoc().getStore(); + const nodes = markdownToMindmap(markdown, doc, provider); + + expect(nodes).toEqual({ + text: 'Text A', + children: [ + { + text: 'Text B', + children: [], + }, + ], + }); + }); + + test('keeps inline markdown content inside node labels', () => { + const markdown = ` +- Root with [link](https://example.com) and [^1] + - Child with \`code\` + +[^1]: footnote +`; + const collection = new TestWorkspace(); + collection.meta.initialize(); + const doc = collection.createDoc().getStore(); + const nodes = markdownToMindmap(markdown, doc, provider); + + expect(nodes).toEqual({ + text: 'Root with link and', + children: [ + { + text: 'Child with code', + children: [], + }, + ], + }); + }); }); diff --git a/packages/frontend/core/src/blocksuite/ai/mini-mindmap/mindmap-preview.ts b/packages/frontend/core/src/blocksuite/ai/mini-mindmap/mindmap-preview.ts index 6cdf6ac8d4..079c730e42 100644 --- a/packages/frontend/core/src/blocksuite/ai/mini-mindmap/mindmap-preview.ts +++ b/packages/frontend/core/src/blocksuite/ai/mini-mindmap/mindmap-preview.ts @@ -19,7 +19,7 @@ import { css, html, LitElement, nothing } from 'lit'; import { property, query } from 'lit/decorators.js'; import { repeat } from 'lit/directives/repeat.js'; import { styleMap } from 'lit/directives/style-map.js'; -import type { Root } from 'mdast'; +import type { Root, RootContent } from 'mdast'; import { Doc as YDoc } from 'yjs'; import { MiniMindmapSchema, MiniMindmapSpecs } from './spec.js'; @@ -234,19 +234,68 @@ type Node = { children: Node[]; }; +type MarkdownNode = + | RootContent + | { alt?: string | null; children?: MarkdownNode[]; value?: string }; + export const markdownToMindmap = ( answer: string, doc: Store, provider: ServiceProvider ) => { - let result: Node | null = null; const transformer = doc.getTransformer(); const markdown = new MarkdownAdapter(transformer, provider); - const ast: Root = markdown['_markdownToAst'](answer); + const astToMindmap = (ast: Root): Node | null => { + const findList = ( + nodes: Root['children'] + ): Unpacked | null => { + for (const node of nodes) { + if (node.type === 'list') { + return node; + } + + if (node.type === 'code' && node.value) { + const nestedAst: Root = markdown['_markdownToAst'](node.value); + const nestedList = findList(nestedAst.children); + if (nestedList) { + return nestedList; + } + } + } + + return null; + }; + + const list = findList(ast.children); + if (!list) { + return null; + } + + return traverse(list, true); + }; + const traverse = ( - markdownNode: Unpacked<(typeof ast)['children']>, + markdownNode: Unpacked, firstLevel = false ): Node | null => { + const toPlainText = (node: MarkdownNode): string => { + if ('value' in node && typeof node.value === 'string') { + return node.value; + } + + if ('alt' in node && typeof node.alt === 'string') { + return node.alt; + } + + if ('children' in node && Array.isArray(node.children)) { + return node.children + .map((child: MarkdownNode) => toPlainText(child)) + .join(''); + } + + return ''; + }; + switch (markdownNode.type) { case 'list': { @@ -267,11 +316,11 @@ export const markdownToMindmap = ( children: [], }; - if ( - paragraph?.type === 'paragraph' && - paragraph.children[0]?.type === 'text' - ) { - node.text = paragraph.children[0].value; + if (paragraph?.type === 'paragraph') { + node.text = paragraph.children + .map((child: MarkdownNode) => toPlainText(child)) + .join('') + .trim(); } if (list?.type === 'list') { @@ -287,9 +336,5 @@ export const markdownToMindmap = ( return null; }; - if (ast?.children?.[0]?.type === 'list') { - result = traverse(ast.children[0], true); - } - - return result; + return astToMindmap(markdown['_markdownToAst'](answer)); }; diff --git a/packages/frontend/core/src/blocksuite/ai/mini-mindmap/spec.ts b/packages/frontend/core/src/blocksuite/ai/mini-mindmap/spec.ts index 1d864feb3c..f37131a0ab 100644 --- a/packages/frontend/core/src/blocksuite/ai/mini-mindmap/spec.ts +++ b/packages/frontend/core/src/blocksuite/ai/mini-mindmap/spec.ts @@ -12,6 +12,7 @@ import { ThemeService, } from '@blocksuite/affine/shared/services'; import { BlockViewExtension, FlavourExtension } from '@blocksuite/affine/std'; +import { ToolController } from '@blocksuite/affine/std/gfx'; import type { BlockSchema, ExtensionType } from '@blocksuite/affine/store'; import { literal } from 'lit/static-html.js'; import type { z } from 'zod'; @@ -24,6 +25,7 @@ export const MiniMindmapSpecs: ExtensionType[] = [ ThemeService, FlavourExtension('affine:page'), MindmapService, + ToolController, BlockViewExtension('affine:page', literal`mini-mindmap-root-block`), FlavourExtension('affine:surface'), MindMapView, diff --git a/packages/frontend/media-capture-playground/server/gemini.ts b/packages/frontend/media-capture-playground/server/gemini.ts index 723a5e742c..c4fb7da661 100644 --- a/packages/frontend/media-capture-playground/server/gemini.ts +++ b/packages/frontend/media-capture-playground/server/gemini.ts @@ -4,7 +4,7 @@ import { type UploadFileResponse, } from '@google/generative-ai/server'; -const DEFAULT_MODEL = 'gemini-2.0-flash'; +const DEFAULT_MODEL = 'gemini-2.5-pro'; export interface TranscriptionResult { title: string; @@ -75,7 +75,7 @@ Output in JSON format with the following structure: export async function gemini( audioFilePath: string, options?: { - model?: 'gemini-2.0-flash' | 'gemini-1.5-flash'; + model?: 'gemini-2.5-flash' | 'gemini-2.5-pro'; mode?: 'transcript' | 'summary'; } ) { diff --git a/packages/frontend/track/src/__tests__/tracker.spec.ts b/packages/frontend/track/src/__tests__/tracker.spec.ts index 2c4faee39d..5d07753ecc 100644 --- a/packages/frontend/track/src/__tests__/tracker.spec.ts +++ b/packages/frontend/track/src/__tests__/tracker.spec.ts @@ -3,8 +3,13 @@ */ import { beforeEach, describe, expect, test, vi } from 'vitest'; -const sendTelemetryEvent = vi.fn().mockResolvedValue({ queued: true }); -const setTelemetryContext = vi.fn(); +import { resetTrackerState } from '../state'; +import { tracker } from '../tracker'; + +const { sendTelemetryEvent, setTelemetryContext } = vi.hoisted(() => ({ + sendTelemetryEvent: vi.fn().mockResolvedValue({ queued: true }), + setTelemetryContext: vi.fn(), +})); vi.mock('../telemetry', () => ({ sendTelemetryEvent, @@ -27,17 +32,11 @@ beforeEach(() => { sendTelemetryEvent.mockClear(); setTelemetryContext.mockClear(); vi.useRealTimers(); - vi.resetModules(); + resetTrackerState(); }); -async function loadTracker() { - return await import('../tracker'); -} - describe('tracker session signals', () => { - test('sends first_visit and session_start on first event', async () => { - const { tracker } = await loadTracker(); - + test('sends first_visit and session_start on first event', () => { tracker.track('test_event'); const events = sendTelemetryEvent.mock.calls.map(call => call[0]); @@ -48,14 +47,12 @@ describe('tracker session signals', () => { ]); const firstVisit = events[0]; - expect(typeof (firstVisit.params as any).session_id).toBe('number'); - expect((firstVisit.params as any).session_number).toBe(1); - expect((firstVisit.params as any).engagement_time_msec).toBe(1); + expect(typeof firstVisit.params?.session_id).toBe('number'); + expect(firstVisit.params?.session_number).toBe(1); + expect(firstVisit.params?.engagement_time_msec).toBe(1); }); - test('does not repeat first_visit for later events', async () => { - const { tracker } = await loadTracker(); - + test('does not repeat first_visit for later events', () => { tracker.track('event_a'); tracker.track('event_b'); @@ -64,10 +61,9 @@ describe('tracker session signals', () => { expect(names.filter(name => name === 'session_start')).toHaveLength(1); }); - test('increments session_number after idle timeout', async () => { + test('increments session_number after idle timeout', () => { vi.useFakeTimers(); vi.setSystemTime(new Date('2024-01-01T00:00:00Z')); - const { tracker } = await loadTracker(); tracker.track('event_a'); sendTelemetryEvent.mockClear(); diff --git a/packages/frontend/track/src/state.ts b/packages/frontend/track/src/state.ts new file mode 100644 index 0000000000..71ca960cfd --- /dev/null +++ b/packages/frontend/track/src/state.ts @@ -0,0 +1,99 @@ +import { nanoid } from 'nanoid'; + +export type TrackProperties = Record | undefined; + +export type Middleware = ( + name: string, + properties?: TrackProperties +) => Record; + +type TrackerState = { + enabled: boolean; + clientStorage: Storage | null; + clientId: string; + pendingFirstVisit: boolean; + sessionId: number; + sessionNumber: number; + lastActivityMs: number; + sessionStartSent: boolean; + engagementTrackingEnabled: boolean; + visibleSinceMs: number | null; + pendingEngagementMs: number; + visibilityChangeHandler: (() => void) | null; + pageHideHandler: (() => void) | null; + userId: string | undefined; + userProperties: Record; + middlewares: Set; +}; + +const CLIENT_ID_KEY = 'affine_telemetry_client_id'; + +export let trackerState = createTrackerState(); + +export function resetTrackerState() { + cleanupTrackerState(trackerState); + trackerState = createTrackerState(); +} + +function createTrackerState(): TrackerState { + const clientStorage = localStorageSafe(); + const hasClientId = !!clientStorage?.getItem(CLIENT_ID_KEY); + + return { + enabled: true, + clientStorage, + clientId: readPersistentId(CLIENT_ID_KEY, clientStorage), + pendingFirstVisit: !hasClientId, + sessionId: 0, + sessionNumber: 0, + lastActivityMs: 0, + sessionStartSent: false, + engagementTrackingEnabled: false, + visibleSinceMs: null, + pendingEngagementMs: 0, + visibilityChangeHandler: null, + pageHideHandler: null, + userId: undefined, + userProperties: {}, + middlewares: new Set(), + }; +} + +function cleanupTrackerState(state: TrackerState) { + if (state.visibilityChangeHandler && typeof document !== 'undefined') { + document.removeEventListener( + 'visibilitychange', + state.visibilityChangeHandler + ); + } + if (state.pageHideHandler && typeof window !== 'undefined') { + window.removeEventListener('pagehide', state.pageHideHandler); + } +} + +function readPersistentId(key: string, storage: Storage | null, renew = false) { + if (!storage) { + return nanoid(); + } + if (!renew) { + const existing = storage.getItem(key); + if (existing) { + return existing; + } + } + const id = nanoid(); + try { + storage.setItem(key, id); + } catch { + return id; + } + return id; +} + +function localStorageSafe(): Storage | null { + try { + return typeof localStorage === 'undefined' ? null : localStorage; + } catch { + return null; + } +} diff --git a/packages/frontend/track/src/tracker.ts b/packages/frontend/track/src/tracker.ts index 1fba9cb4d1..c22eb601c0 100644 --- a/packages/frontend/track/src/tracker.ts +++ b/packages/frontend/track/src/tracker.ts @@ -1,43 +1,20 @@ import { DebugLogger } from '@affine/debug'; import { nanoid } from 'nanoid'; +import { type Middleware, trackerState, type TrackProperties } from './state'; import type { TelemetryEvent } from './telemetry'; import { sendTelemetryEvent, setTelemetryContext } from './telemetry'; const logger = new DebugLogger('telemetry'); -type TrackProperties = Record | undefined; type RawTrackProperties = Record | object | undefined; -type Middleware = ( - name: string, - properties?: TrackProperties -) => Record; - -const CLIENT_ID_KEY = 'affine_telemetry_client_id'; const SESSION_ID_KEY = 'affine_telemetry_session_id'; const SESSION_NUMBER_KEY = 'affine_telemetry_session_number'; const SESSION_NUMBER_CURRENT_KEY = 'affine_telemetry_session_number_current'; const LAST_ACTIVITY_KEY = 'affine_telemetry_last_activity_ms'; const SESSION_TIMEOUT_MS = 30 * 60 * 1000; -let enabled = true; -const clientStorage = localStorageSafe(); -const hasClientId = clientStorage?.getItem(CLIENT_ID_KEY); -let clientId = readPersistentId(CLIENT_ID_KEY, clientStorage); -let pendingFirstVisit = !hasClientId; -let sessionId = 0; -let sessionNumber = 0; -let lastActivityMs = 0; -let sessionStartSent = false; -let engagementTrackingEnabled = false; -let visibleSinceMs: number | null = null; -let pendingEngagementMs = 0; - -let userId: string | undefined; -let userProperties: Record = {}; -const middlewares = new Set(); - export const tracker = { init() { this.register({ @@ -51,29 +28,32 @@ export const tracker = { }, register(props: Record) { - userProperties = { - ...userProperties, + trackerState.userProperties = { + ...trackerState.userProperties, ...props, }; - setTelemetryContext({ userProperties }); + setTelemetryContext({ userProperties: trackerState.userProperties }); }, reset() { - userId = undefined; - userProperties = {}; + trackerState.userId = undefined; + trackerState.userProperties = {}; startNewSession(Date.now(), sessionStorageSafe()); setTelemetryContext( - { userId, userProperties }, + { + userId: trackerState.userId, + userProperties: trackerState.userProperties, + }, { replaceUserProperties: true } ); this.init(); }, track(eventName: string, properties?: RawTrackProperties) { - if (!enabled) { + if (!trackerState.enabled) { return; } - const middlewareProperties = Array.from(middlewares).reduce( + const middlewareProperties = Array.from(trackerState.middlewares).reduce( (acc, middleware) => { return middleware(eventName, acc); }, @@ -84,10 +64,10 @@ export const tracker = { }, track_pageview(properties?: { location?: string; [key: string]: unknown }) { - if (!enabled) { + if (!trackerState.enabled) { return; } - const middlewareProperties = Array.from(middlewares).reduce( + const middlewareProperties = Array.from(trackerState.middlewares).reduce( (acc, middleware) => { return middleware('track_pageview', acc); }, @@ -108,41 +88,41 @@ export const tracker = { }, middleware(cb: Middleware): () => void { - middlewares.add(cb); + trackerState.middlewares.add(cb); return () => { - middlewares.delete(cb); + trackerState.middlewares.delete(cb); }; }, opt_out_tracking() { - enabled = false; + trackerState.enabled = false; }, opt_in_tracking() { - enabled = true; + trackerState.enabled = true; }, has_opted_in_tracking() { - return enabled; + return trackerState.enabled; }, has_opted_out_tracking() { - return !enabled; + return !trackerState.enabled; }, identify(nextUserId?: string) { - userId = nextUserId ? String(nextUserId) : undefined; - setTelemetryContext({ userId }); + trackerState.userId = nextUserId ? String(nextUserId) : undefined; + setTelemetryContext({ userId: trackerState.userId }); }, get people() { return { set: (props: Record) => { - userProperties = { - ...userProperties, + trackerState.userProperties = { + ...trackerState.userProperties, ...props, }; - setTelemetryContext({ userProperties }); + setTelemetryContext({ userProperties: trackerState.userProperties }); }, }; }, @@ -193,45 +173,62 @@ function prepareSession(now: number) { if (expired) { startNewSession(now, sessionStorage); } else { - sessionId = storedSessionId; - sessionNumber = readCurrentSessionNumber(sessionStorage, clientStorage); + trackerState.sessionId = storedSessionId; + trackerState.sessionNumber = readCurrentSessionNumber( + sessionStorage, + trackerState.clientStorage + ); updateLastActivity(now, sessionStorage); } } else { const expired = - !sessionId || - !lastActivityMs || - now - lastActivityMs > SESSION_TIMEOUT_MS; + !trackerState.sessionId || + !trackerState.lastActivityMs || + now - trackerState.lastActivityMs > SESSION_TIMEOUT_MS; if (expired) { startNewSession(now, null); } else { - lastActivityMs = now; - if (!sessionNumber) { - sessionNumber = 1; + trackerState.lastActivityMs = now; + if (!trackerState.sessionNumber) { + trackerState.sessionNumber = 1; } } } const preEvents: TelemetryEvent[] = []; - if (pendingFirstVisit) { - pendingFirstVisit = false; + if (trackerState.pendingFirstVisit) { + trackerState.pendingFirstVisit = false; preEvents.push( buildEvent( 'first_visit', - mergeSessionParams({}, sessionId, sessionNumber, 1) + mergeSessionParams( + {}, + trackerState.sessionId, + trackerState.sessionNumber, + 1 + ) ) ); } - if (!sessionStartSent) { - sessionStartSent = true; + if (!trackerState.sessionStartSent) { + trackerState.sessionStartSent = true; preEvents.push( buildEvent( 'session_start', - mergeSessionParams({}, sessionId, sessionNumber, 1) + mergeSessionParams( + {}, + trackerState.sessionId, + trackerState.sessionNumber, + 1 + ) ) ); } - return { sessionId, sessionNumber, preEvents }; + return { + sessionId: trackerState.sessionId, + sessionNumber: trackerState.sessionNumber, + preEvents, + }; } function mergeSessionParams( @@ -256,62 +253,76 @@ function mergeSessionParams( } function startNewSession(now: number, sessionStorage: Storage | null) { - sessionId = Math.floor(now / 1000); - sessionNumber = incrementSessionNumber(clientStorage, sessionStorage); + trackerState.sessionId = Math.floor(now / 1000); + trackerState.sessionNumber = incrementSessionNumber( + trackerState.clientStorage, + sessionStorage + ); updateLastActivity(now, sessionStorage); - writeNumber(sessionStorage, SESSION_ID_KEY, sessionId); - sessionStartSent = false; + writeNumber(sessionStorage, SESSION_ID_KEY, trackerState.sessionId); + trackerState.sessionStartSent = false; resetEngagementState(now); } function updateLastActivity(now: number, sessionStorage: Storage | null) { - lastActivityMs = now; + trackerState.lastActivityMs = now; writeNumber(sessionStorage, LAST_ACTIVITY_KEY, now); } function consumeEngagementTime(now: number) { initEngagementTracking(now); - if (visibleSinceMs !== null) { - pendingEngagementMs += now - visibleSinceMs; - visibleSinceMs = now; + if (trackerState.visibleSinceMs !== null) { + trackerState.pendingEngagementMs += now - trackerState.visibleSinceMs; + trackerState.visibleSinceMs = now; } - const engagementMs = Math.max(0, Math.round(pendingEngagementMs)); - pendingEngagementMs = 0; + const engagementMs = Math.max( + 0, + Math.round(trackerState.pendingEngagementMs) + ); + trackerState.pendingEngagementMs = 0; return engagementMs; } function resetEngagementState(now: number) { - pendingEngagementMs = 0; - visibleSinceMs = isDocumentVisible() ? now : null; + trackerState.pendingEngagementMs = 0; + trackerState.visibleSinceMs = isDocumentVisible() ? now : null; } function initEngagementTracking(now: number) { - if (engagementTrackingEnabled || typeof document === 'undefined') { + if ( + trackerState.engagementTrackingEnabled || + typeof document === 'undefined' + ) { return; } - engagementTrackingEnabled = true; + trackerState.engagementTrackingEnabled = true; resetEngagementState(now); - document.addEventListener('visibilitychange', () => { + trackerState.visibilityChangeHandler = () => { const now = Date.now(); - if (visibleSinceMs !== null) { - pendingEngagementMs += now - visibleSinceMs; + if (trackerState.visibleSinceMs !== null) { + trackerState.pendingEngagementMs += now - trackerState.visibleSinceMs; } - visibleSinceMs = isDocumentVisible() ? now : null; + trackerState.visibleSinceMs = isDocumentVisible() ? now : null; if (!isDocumentVisible()) { dispatchUserEngagement(now); } - }); + }; + document.addEventListener( + 'visibilitychange', + trackerState.visibilityChangeHandler + ); if (typeof window !== 'undefined') { - window.addEventListener('pagehide', () => { + trackerState.pageHideHandler = () => { dispatchUserEngagement(Date.now()); - }); + }; + window.addEventListener('pagehide', trackerState.pageHideHandler); } } function dispatchUserEngagement(now: number) { - if (!enabled) { + if (!trackerState.enabled) { return; } const engagementMs = consumeEngagementTime(now); @@ -377,7 +388,7 @@ function readCurrentSessionNumber( const fallback = localStorage ? (readPositiveNumber(localStorage, SESSION_NUMBER_KEY) ?? 1) - : sessionNumber || 1; + : trackerState.sessionNumber || 1; writeNumber(sessionStorage, SESSION_NUMBER_CURRENT_KEY, fallback); if (localStorage && !readPositiveNumber(localStorage, SESSION_NUMBER_KEY)) { @@ -391,7 +402,7 @@ function incrementSessionNumber( sessionStorage: Storage | null ) { if (!localStorage) { - const next = (sessionNumber || 0) + 1; + const next = (trackerState.sessionNumber || 0) + 1; writeNumber(sessionStorage, SESSION_NUMBER_CURRENT_KEY, next); return next; } @@ -410,10 +421,10 @@ function buildEvent( schemaVersion: 1, eventName, params, - userId, - userProperties, - clientId, - sessionId, + userId: trackerState.userId, + userProperties: trackerState.userProperties, + clientId: trackerState.clientId, + sessionId: trackerState.sessionId, eventId: nanoid(), timestampMicros: Date.now() * 1000, context: buildContext(), @@ -445,33 +456,6 @@ function normalizeProperties(properties?: RawTrackProperties): TrackProperties { return properties as Record; } -function readPersistentId(key: string, storage: Storage | null, renew = false) { - if (!storage) { - return nanoid(); - } - if (!renew) { - const existing = storage.getItem(key); - if (existing) { - return existing; - } - } - const id = nanoid(); - try { - storage.setItem(key, id); - } catch { - return id; - } - return id; -} - -function localStorageSafe(): Storage | null { - try { - return typeof localStorage === 'undefined' ? null : localStorage; - } catch { - return null; - } -} - function sessionStorageSafe(): Storage | null { try { return typeof sessionStorage === 'undefined' ? null : sessionStorage; diff --git a/tests/affine-cloud-copilot/e2e/ai-action/check-code-error.spec.ts b/tests/affine-cloud-copilot/e2e/ai-action/check-code-error.spec.ts index c85a821ac9..e699120987 100644 --- a/tests/affine-cloud-copilot/e2e/ai-action/check-code-error.spec.ts +++ b/tests/affine-cloud-copilot/e2e/ai-action/check-code-error.spec.ts @@ -14,11 +14,13 @@ test.describe('AIAction/CheckCodeError', () => { }) => { const { checkCodeError } = await utils.editor.askAIWithCode( page, - 'consloe.log("Hello,World!");', + 'console.log("Hello,World!"', 'javascript' ); const { answer, responses } = await checkCodeError(); - await expect(answer).toHaveText(/console/); + const answerText = await answer.innerText(); + expect(answerText).toMatch(/syntax|parenthesis|unexpected|missing/i); + expect(answerText).not.toMatch(/No syntax errors were found/i); await expect(responses).toEqual( new Set(['insert-below', 'replace-selection']) ); diff --git a/tests/affine-cloud-copilot/e2e/ai-action/expand-mindmap-node.spec.ts b/tests/affine-cloud-copilot/e2e/ai-action/expand-mindmap-node.spec.ts index c00908585e..c04632fc60 100644 --- a/tests/affine-cloud-copilot/e2e/ai-action/expand-mindmap-node.spec.ts +++ b/tests/affine-cloud-copilot/e2e/ai-action/expand-mindmap-node.spec.ts @@ -33,6 +33,6 @@ test.describe('expand mindmap node', () => { await expect(async () => { const newChild = await utils.editor.getMindMapNode(page, id!, [0, 0, 0]); expect(newChild).toBeDefined(); - }).toPass({ timeout: 20000 }); + }).toPass({ timeout: 60000 }); }); }); diff --git a/tests/affine-cloud-copilot/e2e/ai-action/explain-selection.spec.ts b/tests/affine-cloud-copilot/e2e/ai-action/explain-selection.spec.ts index 3d85ae207d..9e4bd97e51 100644 --- a/tests/affine-cloud-copilot/e2e/ai-action/explain-selection.spec.ts +++ b/tests/affine-cloud-copilot/e2e/ai-action/explain-selection.spec.ts @@ -17,7 +17,10 @@ test.describe('AIAction/ExplainSelection', () => { 'LLM(AI)' ); const { answer, responses } = await explainSelection(); - await expect(answer).toHaveText(/Large Language Model/, { timeout: 20000 }); + await expect(answer).toHaveText( + /Large Language Model|LLM|artificial intelligence/i, + { timeout: 20000 } + ); expect(responses).toEqual(new Set(['insert-below', 'replace-selection'])); }); @@ -33,7 +36,10 @@ test.describe('AIAction/ExplainSelection', () => { ); const { answer, responses } = await explainSelection(); - await expect(answer).toHaveText(/Large Language Model/, { timeout: 20000 }); + await expect(answer).toHaveText( + /Large Language Model|LLM|artificial intelligence/i, + { timeout: 20000 } + ); expect(responses).toEqual(new Set(['insert-below'])); }); @@ -49,7 +55,10 @@ test.describe('AIAction/ExplainSelection', () => { ); const { answer, responses } = await explainSelection(); - await expect(answer).toHaveText(/Large Language Model/, { timeout: 20000 }); + await expect(answer).toHaveText( + /Large Language Model|LLM|artificial intelligence/i, + { timeout: 20000 } + ); expect(responses).toEqual(new Set(['insert-below'])); }); diff --git a/tests/affine-cloud-copilot/e2e/ai-action/generate-presentation.spec.ts b/tests/affine-cloud-copilot/e2e/ai-action/generate-presentation.spec.ts index daafe26448..7c14a857d4 100644 --- a/tests/affine-cloud-copilot/e2e/ai-action/generate-presentation.spec.ts +++ b/tests/affine-cloud-copilot/e2e/ai-action/generate-presentation.spec.ts @@ -3,6 +3,8 @@ import { expect } from '@playwright/test'; import { test } from '../base/base-test'; test.describe('AIAction/GeneratePresentation', () => { + test.describe.configure({ timeout: 240000 }); + test.beforeEach(async ({ loggedInPage: page, utils }) => { await utils.testUtils.setupTestEnvironment(page); await utils.chatPanel.openChatPanel(page); diff --git a/tests/affine-cloud-copilot/e2e/ai-action/make-it-real.spec.ts b/tests/affine-cloud-copilot/e2e/ai-action/make-it-real.spec.ts index 64fc063390..ed39a40fec 100644 --- a/tests/affine-cloud-copilot/e2e/ai-action/make-it-real.spec.ts +++ b/tests/affine-cloud-copilot/e2e/ai-action/make-it-real.spec.ts @@ -3,6 +3,8 @@ import { expect } from '@playwright/test'; import { test } from '../base/base-test'; test.describe('AIAction/MakeItReal', () => { + test.describe.configure({ timeout: 180000 }); + test.beforeEach(async ({ loggedInPage: page, utils }) => { await utils.testUtils.setupTestEnvironment(page); await utils.chatPanel.openChatPanel(page); diff --git a/tests/affine-cloud-copilot/e2e/chat-with/attachments.spec.ts b/tests/affine-cloud-copilot/e2e/chat-with/attachments.spec.ts index 4a94a66d50..6f4355b935 100644 --- a/tests/affine-cloud-copilot/e2e/chat-with/attachments.spec.ts +++ b/tests/affine-cloud-copilot/e2e/chat-with/attachments.spec.ts @@ -74,13 +74,13 @@ test.describe('AIChatWith/Attachments', () => { buffer: buffer2, }, ], - `What is Attachment${randomStr1}? What is Attachment${randomStr2}?` + `Which animal is Attachment${randomStr1} and which animal is Attachment${randomStr2}? Answer with both attachment names.` ); await utils.chatPanel.waitForHistory(page, [ { role: 'user', - content: `What is Attachment${randomStr1}? What is Attachment${randomStr2}?`, + content: `Which animal is Attachment${randomStr1} and which animal is Attachment${randomStr2}? Answer with both attachment names.`, }, { role: 'assistant', @@ -89,14 +89,11 @@ test.describe('AIChatWith/Attachments', () => { ]); await expect(async () => { - const { content, message } = - await utils.chatPanel.getLatestAssistantMessage(page); + const { content } = await utils.chatPanel.getLatestAssistantMessage(page); expect(content).toMatch(new RegExp(`Attachment${randomStr1}`)); expect(content).toMatch(new RegExp(`Attachment${randomStr2}`)); - const footnoteCount = await message - .locator('affine-footnote-node') - .count(); - expect(footnoteCount > 0 || /sources?/i.test(content)).toBe(true); + expect(content).toMatch(/cat/i); + expect(content).toMatch(/dog/i); }).toPass({ timeout: 20000 }); }); }); diff --git a/tests/affine-cloud-copilot/e2e/chat-with/edgeless-mindmap.spec.ts b/tests/affine-cloud-copilot/e2e/chat-with/edgeless-mindmap.spec.ts index 8c64734198..248435e00f 100644 --- a/tests/affine-cloud-copilot/e2e/chat-with/edgeless-mindmap.spec.ts +++ b/tests/affine-cloud-copilot/e2e/chat-with/edgeless-mindmap.spec.ts @@ -4,21 +4,39 @@ import { expect } from '@playwright/test'; import { test } from '../base/base-test'; +type MindmapSnapshot = { + childCount: number; + count: number; + id: string | null; +}; + test.describe('AIChatWith/EdgelessMindMap', () => { + test.describe.configure({ timeout: 180000 }); + test.beforeEach(async ({ loggedInPage: page, utils }) => { await utils.testUtils.setupTestEnvironment(page); await utils.chatPanel.openChatPanel(page); }); - test('should support replace mindmap with the regenerated one', async ({ + test('should preview the regenerated mindmap before replacing it', async ({ loggedInPage: page, utils, }) => { let id: string; + let originalChildCount: number; const { regenerateMindMap } = await utils.editor.askAIWithEdgeless( page, async () => { id = await utils.editor.createMindmap(page); + originalChildCount = await page.evaluate(mindmapId => { + const edgelessBlock = document.querySelector( + 'affine-edgeless-root' + ) as EdgelessRootBlockComponent; + const mindmap = edgelessBlock.gfx.getElementById(mindmapId) as { + tree: { children?: unknown[] }; + } | null; + return mindmap?.tree.children?.length ?? 0; + }, id); }, async () => { const { id: rootId } = await utils.editor.getMindMapNode( @@ -30,22 +48,134 @@ test.describe('AIChatWith/EdgelessMindMap', () => { } ); - const { answer } = await regenerateMindMap(); - await expect(answer.locator('mini-mindmap-preview')).toBeVisible(); - const replace = answer.getByTestId('answer-replace'); - await replace.click(); + const { answer, responses } = await regenerateMindMap(); + expect(responses).toEqual(new Set(['replace-selection'])); + await expect + .poll( + async () => { + return answer + .locator('mini-mindmap-preview') + .evaluate(async preview => { + const walk = (root: ParentNode): Element[] => { + const results: Element[] = []; - // Expect original mindmap to be replaced - const mindmaps = await page.evaluate(() => { + for (const element of root.querySelectorAll('*')) { + results.push(element); + if (element.shadowRoot) { + results.push(...walk(element.shadowRoot)); + } + } + + return results; + }; + + await customElements.whenDefined('mini-mindmap-preview'); + + const previewElement = + preview instanceof HTMLElement + ? (preview as HTMLElement & { + updateComplete?: Promise; + }) + : null; + + await previewElement?.updateComplete; + await new Promise(resolve => + requestAnimationFrame(() => resolve(null)) + ); + + const shadowRoot = previewElement?.shadowRoot ?? null; + const descendants = walk(shadowRoot ?? preview); + const surface = descendants.find( + element => + element instanceof HTMLElement && + element.classList.contains('affine-mini-mindmap-surface') + ) as HTMLElement | undefined; + const surfaceRect = surface?.getBoundingClientRect(); + + return { + hasShadowRoot: !!shadowRoot, + hasRootBlock: descendants.some( + element => + element.tagName.toLowerCase() === 'mini-mindmap-root-block' + ), + hasSurfaceBlock: descendants.some( + element => + element.tagName.toLowerCase() === + 'mini-mindmap-surface-block' + ), + surfaceReady: + !!surface && + (surfaceRect?.width ?? 0) > 0 && + (surfaceRect?.height ?? 0) > 0, + }; + }); + }, + { timeout: 15_000 } + ) + .toEqual({ + hasShadowRoot: true, + hasRootBlock: true, + hasSurfaceBlock: true, + surfaceReady: true, + }); + const replace = answer.getByTestId('answer-replace'); + await expect(replace).toBeVisible(); + await replace.click({ force: true }); + + await expect + .poll( + async () => { + return page.evaluate(() => { + const edgelessBlock = document.querySelector( + 'affine-edgeless-root' + ) as EdgelessRootBlockComponent; + const mindmaps = edgelessBlock?.gfx.gfxElements.filter( + (el: GfxModel) => 'type' in el && el.type === 'mindmap' + ) as unknown as Array<{ + id: string; + tree: { + children?: unknown[]; + element: { text?: { toString(): string } }; + }; + }>; + + const mindmap = mindmaps?.[0]; + return { + count: mindmaps?.length ?? 0, + id: mindmap?.id ?? null, + childCount: mindmap?.tree.children?.length ?? 0, + }; + }); + }, + { timeout: 15_000 } + ) + .toMatchObject({ + count: 1, + }); + + const replacedMindmap = await page.evaluate(() => { const edgelessBlock = document.querySelector( 'affine-edgeless-root' ) as EdgelessRootBlockComponent; - const mindmaps = edgelessBlock?.gfx.gfxElements - .filter((el: GfxModel) => 'type' in el && el.type === 'mindmap') - .map((el: GfxModel) => el.id); - return mindmaps; + const mindmaps = edgelessBlock?.gfx.gfxElements.filter( + (el: GfxModel) => 'type' in el && el.type === 'mindmap' + ) as unknown as Array<{ + id: string; + tree: { + children?: unknown[]; + element: { text?: { toString(): string } }; + }; + }>; + const mindmap = mindmaps?.[0]; + + return { + count: mindmaps?.length ?? 0, + id: mindmap?.id ?? null, + childCount: mindmap?.tree.children?.length ?? 0, + }; }); - expect(mindmaps).toHaveLength(1); - expect(mindmaps?.[0]).not.toBe(id!); + + expect(replacedMindmap.childCount).toBeGreaterThan(originalChildCount!); + expect(replacedMindmap.childCount).toBeGreaterThan(0); }); }); diff --git a/tests/affine-cloud-copilot/e2e/utils/editor-utils.ts b/tests/affine-cloud-copilot/e2e/utils/editor-utils.ts index 1332701fcc..525f4c308d 100644 --- a/tests/affine-cloud-copilot/e2e/utils/editor-utils.ts +++ b/tests/affine-cloud-copilot/e2e/utils/editor-utils.ts @@ -90,17 +90,34 @@ export class EditorUtils { return answer; } - private static createAction(page: Page, action: () => Promise) { + private static createAction( + page: Page, + action: () => Promise, + options?: { responseTimeoutMs?: number } + ) { return async () => { + const responseTimeoutMs = options?.responseTimeoutMs ?? 60000; + await action(); + await this.waitForAiAnswer(page); + await page.getByTestId('ai-generating').waitFor({ + state: 'hidden', + timeout: 2 * 60000, + }); + const responses = new Set(); const answer = await this.waitForAiAnswer(page); const responsesMenu = answer.getByTestId('answer-responses'); - await responsesMenu.isVisible(); - await responsesMenu.scrollIntoViewIfNeeded({ timeout: 60000 }); + await responsesMenu.waitFor({ + state: 'visible', + timeout: responseTimeoutMs, + }); + await responsesMenu.scrollIntoViewIfNeeded({ + timeout: responseTimeoutMs, + }); await responsesMenu .getByTestId('answer-insert-below-loading') - .waitFor({ state: 'hidden' }); + .waitFor({ state: 'hidden', timeout: responseTimeoutMs }); if (await responsesMenu.getByTestId('answer-insert-below').isVisible()) { responses.add('insert-below'); @@ -458,8 +475,10 @@ export class EditorUtils { generateOutline: this.createAction(page, () => page.getByTestId('action-generate-outline').click() ), - generatePresentation: this.createAction(page, () => - page.getByTestId('action-generate-presentation').click() + generatePresentation: this.createAction( + page, + () => page.getByTestId('action-generate-presentation').click(), + { responseTimeoutMs: 120000 } ), imageProcessing: this.createAction(page, () => page.getByTestId('action-image-processing').click() @@ -634,8 +653,10 @@ export class EditorUtils { generateOutline: this.createAction(page, () => page.getByTestId('action-generate-outline').click() ), - generatePresentation: this.createAction(page, () => - page.getByTestId('action-generate-presentation').click() + generatePresentation: this.createAction( + page, + () => page.getByTestId('action-generate-presentation').click(), + { responseTimeoutMs: 120000 } ), imageProcessing: this.createAction(page, () => page.getByTestId('action-image-processing').click() diff --git a/tests/kit/src/utils/cloud.ts b/tests/kit/src/utils/cloud.ts index 0f3f7da33c..1a51b7d2d8 100644 --- a/tests/kit/src/utils/cloud.ts +++ b/tests/kit/src/utils/cloud.ts @@ -280,6 +280,27 @@ export async function loginUserDirectly( } } +async function dismissBlockingModal(page: Page) { + const modal = page.locator('modal-transition-container [data-modal="true"]'); + if ( + !(await modal + .first() + .isVisible() + .catch(() => false)) + ) { + return; + } + + const closeButton = page.getByTestId('modal-close-button').last(); + if (await closeButton.isVisible().catch(() => false)) { + await closeButton.click({ timeout: 5000 }); + } else { + await page.keyboard.press('Escape'); + } + + await expect(modal.first()).toBeHidden({ timeout: 10000 }); +} + export async function enableCloudWorkspace(page: Page) { await clickSideBarSettingButton(page); await page.getByTestId('workspace-setting:preference').click(); @@ -288,6 +309,7 @@ export async function enableCloudWorkspace(page: Page) { // wait for upload and delete local workspace await page.waitForTimeout(2000); await waitForAllPagesLoad(page); + await dismissBlockingModal(page); await clickNewPageButton(page); } @@ -303,6 +325,7 @@ export async function enableCloudWorkspaceFromShareButton(page: Page) { // wait for upload and delete local workspace await page.waitForTimeout(2000); await waitForEditorLoad(page); + await dismissBlockingModal(page); await clickNewPageButton(page); } diff --git a/tests/kit/src/utils/page-logic.ts b/tests/kit/src/utils/page-logic.ts index 1e690f4e54..a3c3057804 100644 --- a/tests/kit/src/utils/page-logic.ts +++ b/tests/kit/src/utils/page-logic.ts @@ -48,7 +48,10 @@ export async function clickNewPageButton(page: Page, title?: string) { } export async function waitForEmptyEditor(page: Page) { - await expect(page.locator('.doc-title-container-empty')).toBeVisible(); + await page.waitForSelector( + '.doc-title-container-empty, doc-title .inline-editor', + { timeout: 20000 } + ); } export function getBlockSuiteEditorTitle(page: Page) { diff --git a/yarn.lock b/yarn.lock index e6545b83b7..024a928797 100644 --- a/yarn.lock +++ b/yarn.lock @@ -962,15 +962,15 @@ __metadata: "@affine/graphql": "workspace:*" "@affine/s3-compat": "workspace:*" "@affine/server-native": "workspace:*" - "@ai-sdk/google": "npm:^2.0.45" - "@ai-sdk/google-vertex": "npm:^3.0.88" + "@ai-sdk/google": "npm:^3.0.46" + "@ai-sdk/google-vertex": "npm:^4.0.83" "@apollo/server": "npm:^4.13.0" "@faker-js/faker": "npm:^10.1.0" "@fal-ai/serverless-client": "npm:^0.15.0" "@google-cloud/opentelemetry-cloud-trace-exporter": "npm:^3.0.0" "@google-cloud/opentelemetry-resource-util": "npm:^3.0.0" - "@nestjs-cls/transactional": "npm:^2.7.0" - "@nestjs-cls/transactional-adapter-prisma": "npm:^1.2.24" + "@nestjs-cls/transactional": "npm:^3.2.0" + "@nestjs-cls/transactional-adapter-prisma": "npm:^1.3.4" "@nestjs/apollo": "npm:^13.0.4" "@nestjs/bullmq": "npm:^11.0.4" "@nestjs/common": "npm:^11.0.21" @@ -1022,7 +1022,7 @@ __metadata: "@types/semver": "npm:^7.5.8" "@types/sinon": "npm:^21.0.0" "@types/supertest": "npm:^7.0.0" - ai: "npm:^5.0.118" + ai: "npm:^6.0.118" ava: "npm:^7.0.0" bullmq: "npm:^5.40.2" c8: "npm:^10.1.3" @@ -1122,77 +1122,77 @@ __metadata: languageName: unknown linkType: soft -"@ai-sdk/anthropic@npm:2.0.57": - version: 2.0.57 - resolution: "@ai-sdk/anthropic@npm:2.0.57" +"@ai-sdk/anthropic@npm:3.0.59": + version: 3.0.59 + resolution: "@ai-sdk/anthropic@npm:3.0.59" dependencies: - "@ai-sdk/provider": "npm:2.0.1" - "@ai-sdk/provider-utils": "npm:3.0.20" + "@ai-sdk/provider": "npm:3.0.8" + "@ai-sdk/provider-utils": "npm:4.0.20" peerDependencies: zod: ^3.25.76 || ^4.1.8 - checksum: 10/99aec7a3ae544f37e7ecd622ce1853bef100874ae94b792c7b9bcc496c946768ff2cb121e86738e03b0d01b0ea1f98ffcc248ea2b5f2d7e494bcf2edca24452d + checksum: 10/b7504dc845f2cd487a4a18db9dbf9e2231fbe3b0a5a22ea12bedb7d4f276463cdd4fd39493efd40f39c78b9023af5f00a4e603981265b66ea43701ad699da5c9 languageName: node linkType: hard -"@ai-sdk/gateway@npm:2.0.24": - version: 2.0.24 - resolution: "@ai-sdk/gateway@npm:2.0.24" +"@ai-sdk/gateway@npm:3.0.68": + version: 3.0.68 + resolution: "@ai-sdk/gateway@npm:3.0.68" dependencies: - "@ai-sdk/provider": "npm:2.0.1" - "@ai-sdk/provider-utils": "npm:3.0.20" - "@vercel/oidc": "npm:3.0.5" + "@ai-sdk/provider": "npm:3.0.8" + "@ai-sdk/provider-utils": "npm:4.0.20" + "@vercel/oidc": "npm:3.1.0" peerDependencies: zod: ^3.25.76 || ^4.1.8 - checksum: 10/8ab1518123bbee08d294002d686dc730e7ed5016de1bcd4fa27dbe5fe6ac3db6aaeb0bb6e2ba77a8464284054e30c96f3e7eb6c48cd443d24cb0f7079ecb68e4 + checksum: 10/4a6923a6e610472de0ea18f25077df0b394d31b287285f512d6e16ee8b0b90421faf284399f37e11045ea370794a6060686a3b69b68fda04b6dc13562cd8fd8e languageName: node linkType: hard -"@ai-sdk/google-vertex@npm:^3.0.88": - version: 3.0.97 - resolution: "@ai-sdk/google-vertex@npm:3.0.97" +"@ai-sdk/google-vertex@npm:^4.0.83": + version: 4.0.83 + resolution: "@ai-sdk/google-vertex@npm:4.0.83" dependencies: - "@ai-sdk/anthropic": "npm:2.0.57" - "@ai-sdk/google": "npm:2.0.52" - "@ai-sdk/provider": "npm:2.0.1" - "@ai-sdk/provider-utils": "npm:3.0.20" + "@ai-sdk/anthropic": "npm:3.0.59" + "@ai-sdk/google": "npm:3.0.46" + "@ai-sdk/provider": "npm:3.0.8" + "@ai-sdk/provider-utils": "npm:4.0.20" google-auth-library: "npm:^10.5.0" peerDependencies: zod: ^3.25.76 || ^4.1.8 - checksum: 10/74a96a17617bd4c8ba52a72baf9bb089d774120ae10219f8ff62627695e030718ac08badb873ac3bb05b4c203f56e2d13b3eea86687e6d2c32af03806f16d0a4 + checksum: 10/330ed81cac6779d81e904fe6668cd366bda9d91f911bf318ea3f4c5f9c246ff9f89523fedd268e27fe77aece89ced8e7f49a18e179977f25a9c3251d07df358c languageName: node linkType: hard -"@ai-sdk/google@npm:2.0.52, @ai-sdk/google@npm:^2.0.45": - version: 2.0.52 - resolution: "@ai-sdk/google@npm:2.0.52" +"@ai-sdk/google@npm:3.0.46, @ai-sdk/google@npm:^3.0.46": + version: 3.0.46 + resolution: "@ai-sdk/google@npm:3.0.46" dependencies: - "@ai-sdk/provider": "npm:2.0.1" - "@ai-sdk/provider-utils": "npm:3.0.20" + "@ai-sdk/provider": "npm:3.0.8" + "@ai-sdk/provider-utils": "npm:4.0.20" peerDependencies: zod: ^3.25.76 || ^4.1.8 - checksum: 10/0057bb8307392a693a586b028bb69a0d06673f91685e39bc0f02c2493efd2240321967358cff55ddbdbb05b4d86faa06fcaba7a8d00e70a1825ec63cfc834baf + checksum: 10/295e9f53c6e14e836164a6755d2c50b2840c7a9542919c2684b916c3b8155cf4fabcd30c431a61e12d658e3dceb6af23a284daa63d5311850a03f9d3346038f9 languageName: node linkType: hard -"@ai-sdk/provider-utils@npm:3.0.20": - version: 3.0.20 - resolution: "@ai-sdk/provider-utils@npm:3.0.20" +"@ai-sdk/provider-utils@npm:4.0.20": + version: 4.0.20 + resolution: "@ai-sdk/provider-utils@npm:4.0.20" dependencies: - "@ai-sdk/provider": "npm:2.0.1" - "@standard-schema/spec": "npm:^1.0.0" + "@ai-sdk/provider": "npm:3.0.8" + "@standard-schema/spec": "npm:^1.1.0" eventsource-parser: "npm:^3.0.6" peerDependencies: zod: ^3.25.76 || ^4.1.8 - checksum: 10/741faf25164ee61bd23982c051d9c81a70eb8c5b897ddb51c224aead6fcdee485b600dced40bc523d5af23fe19367f0a3a7e2920d110dfa3f60c211752cd2443 + checksum: 10/1a2d5adc262582cfff9b86afae37ba6291fae5b9155250f02ee8fdd119a7cc1351960ed20181e6f671c28153daf8d69e864e883dee06b96d36c486e2a1a32be9 languageName: node linkType: hard -"@ai-sdk/provider@npm:2.0.1": - version: 2.0.1 - resolution: "@ai-sdk/provider@npm:2.0.1" +"@ai-sdk/provider@npm:3.0.8": + version: 3.0.8 + resolution: "@ai-sdk/provider@npm:3.0.8" dependencies: json-schema: "npm:^0.4.0" - checksum: 10/b828707f5731b705154174950f3b407b63b5d7e79d641c794fa87e45a2a07534d8a6739f7ec4b4ead8c5edbe19c6e34ceecf67fabbe300413208734486c63fdb + checksum: 10/85fb7b9c7cd9ea1aa9840aa57a9517a7ecec8c25a33a31e4615f4eceede9fe61f072b2a2915e4713f2b78c8b94a8c25a79ddbcf998f0d537c02ba47442402542 languageName: node linkType: hard @@ -8848,7 +8848,7 @@ __metadata: languageName: node linkType: hard -"@nestjs-cls/transactional-adapter-prisma@npm:^1.2.24": +"@nestjs-cls/transactional-adapter-prisma@npm:^1.3.4": version: 1.3.4 resolution: "@nestjs-cls/transactional-adapter-prisma@npm:1.3.4" peerDependencies: @@ -8860,16 +8860,16 @@ __metadata: languageName: node linkType: hard -"@nestjs-cls/transactional@npm:^2.7.0": - version: 2.7.0 - resolution: "@nestjs-cls/transactional@npm:2.7.0" +"@nestjs-cls/transactional@npm:^3.2.0": + version: 3.2.0 + resolution: "@nestjs-cls/transactional@npm:3.2.0" peerDependencies: "@nestjs/common": ">= 10 < 12" "@nestjs/core": ">= 10 < 12" - nestjs-cls: ^5.4.3 + nestjs-cls: ^6.2.0 reflect-metadata: "*" rxjs: ">= 7" - checksum: 10/172f121c279507ec46250197b70fd194d68efd4e0c4baf0c4d38d105cbdda46ee9c42b9123df7e35f55d1a276b4b117bcab66f40772a44f083eb997e83e67ac4 + checksum: 10/cb9768a86e868ef6f71ce5c2746ca44aea457accc6c2754e146bd6d6de92546fced25fc1f13be8ce3f8fbf7230e8cd0cbba9616bfb8e3788c7ef36f2545b2549 languageName: node linkType: hard @@ -15268,10 +15268,10 @@ __metadata: languageName: node linkType: hard -"@standard-schema/spec@npm:^1.0.0": - version: 1.0.0 - resolution: "@standard-schema/spec@npm:1.0.0" - checksum: 10/aee780cc1431888ca4b9aba9b24ffc8f3073fc083acc105e3951481478a2f4dc957796931b2da9e2d8329584cf211e4542275f188296c1cdff3ed44fd93a8bc8 +"@standard-schema/spec@npm:^1.0.0, @standard-schema/spec@npm:^1.1.0": + version: 1.1.0 + resolution: "@standard-schema/spec@npm:1.1.0" + checksum: 10/a209615c9e8b2ea535d7db0a5f6aa0f962fd4ab73ee86a46c100fb78116964af1f55a27c1794d4801e534a196794223daa25ff5135021e03c7828aa3d95e1763 languageName: node linkType: hard @@ -17695,10 +17695,10 @@ __metadata: languageName: node linkType: hard -"@vercel/oidc@npm:3.0.5": - version: 3.0.5 - resolution: "@vercel/oidc@npm:3.0.5" - checksum: 10/a602190fff2e55ff480bdd17ac2c0ae8000bef12d58b179291b6da639a674835c4fd53536c449bef782ae6d24da7bed549551ffc056172215060658f83b74b98 +"@vercel/oidc@npm:3.1.0": + version: 3.1.0 + resolution: "@vercel/oidc@npm:3.1.0" + checksum: 10/2e7fe962a441bbc8b305639f8ab1830fb3c2bb51affa90ae84431af65a29c98343aa089d84dff3730013f0b3fb8dc67ad10fad97c4ce7fdf584510d79fa3919c languageName: node linkType: hard @@ -18284,17 +18284,17 @@ __metadata: languageName: node linkType: hard -"ai@npm:^5.0.118": - version: 5.0.118 - resolution: "ai@npm:5.0.118" +"ai@npm:^6.0.118": + version: 6.0.118 + resolution: "ai@npm:6.0.118" dependencies: - "@ai-sdk/gateway": "npm:2.0.24" - "@ai-sdk/provider": "npm:2.0.1" - "@ai-sdk/provider-utils": "npm:3.0.20" + "@ai-sdk/gateway": "npm:3.0.68" + "@ai-sdk/provider": "npm:3.0.8" + "@ai-sdk/provider-utils": "npm:4.0.20" "@opentelemetry/api": "npm:1.9.0" peerDependencies: zod: ^3.25.76 || ^4.1.8 - checksum: 10/daddc647ee56e717077438272ed22bd27fb393561f19c47cdbab9897a9107232e01dab38fc6b3bf72b332ba8a7661b2689b3046164639a986c772053efa56773 + checksum: 10/ec77fe34a4cfe0e4ac283133fd9e838eea741ed1569598b02a95a54c60113153d88638c95e3f67eed8bb1f307c2cdc8310b5338f50fad76c58e8fb0d2dc457eb languageName: node linkType: hard