feat(server): update claude models (#13677)

<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

* **New Features**
* Copilot now defaults to the updated Claude Sonnet 4.5 model across
experiences for improved responses.

* **Chores**
* Consolidated available Anthropic models, removing older Sonnet 3.x
variants and standardizing Sonnet 4/4.5 options.
* Updated configuration defaults and schema mappings to reference the
new Sonnet 4.5 model.

* **Tests**
* Updated unit and end-to-end tests to reference the new model to ensure
consistent behavior.
<!-- end of auto-generated comment: release notes by coderabbit.ai -->
This commit is contained in:
DarkSky
2025-09-30 10:49:55 +08:00
committed by GitHub
parent b44fdbce0c
commit b59c1f9e57
12 changed files with 37 additions and 59 deletions

View File

@@ -684,7 +684,7 @@
},
"scenarios": {
"type": "object",
"description": "Use custom models in scenarios and override default settings.\n@default {\"override_enabled\":false,\"scenarios\":{\"audio_transcribing\":\"gemini-2.5-flash\",\"chat\":\"gemini-2.5-flash\",\"embedding\":\"gemini-embedding-001\",\"image\":\"gpt-image-1\",\"rerank\":\"gpt-4.1\",\"coding\":\"claude-sonnet-4@20250514\",\"complex_text_generation\":\"gpt-4o-2024-08-06\",\"quick_decision_making\":\"gpt-5-mini\",\"quick_text_generation\":\"gemini-2.5-flash\",\"polish_and_summarize\":\"gemini-2.5-flash\"}}",
"description": "Use custom models in scenarios and override default settings.\n@default {\"override_enabled\":false,\"scenarios\":{\"audio_transcribing\":\"gemini-2.5-flash\",\"chat\":\"gemini-2.5-flash\",\"embedding\":\"gemini-embedding-001\",\"image\":\"gpt-image-1\",\"rerank\":\"gpt-4.1\",\"coding\":\"claude-sonnet-4-5@20250929\",\"complex_text_generation\":\"gpt-4o-2024-08-06\",\"quick_decision_making\":\"gpt-5-mini\",\"quick_text_generation\":\"gemini-2.5-flash\",\"polish_and_summarize\":\"gemini-2.5-flash\"}}",
"default": {
"override_enabled": false,
"scenarios": {
@@ -693,7 +693,7 @@
"embedding": "gemini-embedding-001",
"image": "gpt-image-1",
"rerank": "gpt-4.1",
"coding": "claude-sonnet-4@20250514",
"coding": "claude-sonnet-4-5@20250929",
"complex_text_generation": "gpt-4o-2024-08-06",
"quick_decision_making": "gpt-5-mini",
"quick_text_generation": "gemini-2.5-flash",

View File

@@ -473,7 +473,7 @@ Generated by [AVA](https://avajs.dev).
> should honor requested pro model during active
'claude-sonnet-4@20250514'
'claude-sonnet-4-5@20250929'
> should fallback to default model when requesting non-optional model during active

View File

@@ -2074,11 +2074,11 @@ test('should resolve model correctly based on subscription status and prompt con
messages: {
create: [{ idx: 0, role: 'system', content: 'test' }],
},
config: { proModels: ['gemini-2.5-pro', 'claude-sonnet-4@20250514'] },
config: { proModels: ['gemini-2.5-pro', 'claude-sonnet-4-5@20250929'] },
optionalModels: [
'gemini-2.5-flash',
'gemini-2.5-pro',
'claude-sonnet-4@20250514',
'claude-sonnet-4-5@20250929',
],
},
});
@@ -2138,7 +2138,7 @@ test('should resolve model correctly based on subscription status and prompt con
'should pick default model when no requested model during active'
);
const model7 = await s.resolveModel(true, 'claude-sonnet-4@20250514');
const model7 = await s.resolveModel(true, 'claude-sonnet-4-5@20250929');
t.snapshot(model7, 'should honor requested pro model during active');
const model8 = await s.resolveModel(true, 'not-in-optional');

View File

@@ -55,7 +55,7 @@ defineModuleConfig('copilot', {
embedding: 'gemini-embedding-001',
image: 'gpt-image-1',
rerank: 'gpt-4.1',
coding: 'claude-sonnet-4@20250514',
coding: 'claude-sonnet-4-5@20250929',
complex_text_generation: 'gpt-4o-2024-08-06',
quick_decision_making: 'gpt-5-mini',
quick_text_generation: 'gemini-2.5-flash',

View File

@@ -1390,7 +1390,7 @@ If there are items in the content that can be used as to-do tasks, please refer
{
name: 'Make it real',
action: 'Make it real',
model: 'claude-sonnet-4@20250514',
model: 'claude-sonnet-4-5@20250929',
messages: [
{
role: 'system',
@@ -1431,7 +1431,7 @@ When sent new wireframes, respond ONLY with the contents of the html file.`,
{
name: 'Make it real with text',
action: 'Make it real with text',
model: 'claude-sonnet-4@20250514',
model: 'claude-sonnet-4-5@20250929',
messages: [
{
role: 'system',
@@ -1712,7 +1712,7 @@ const modelActions: Prompt[] = [
{
name: 'Apply Updates',
action: 'Apply Updates',
model: 'claude-sonnet-4@20250514',
model: 'claude-sonnet-4-5@20250929',
messages: [
{
role: 'user',
@@ -1868,7 +1868,7 @@ Now apply the \`updates\` to the \`content\`, following the intent in \`op\`, an
},
{
name: 'Code Artifact',
model: 'claude-sonnet-4@20250514',
model: 'claude-sonnet-4-5@20250929',
messages: [
{
role: 'system',
@@ -1932,7 +1932,7 @@ const CHAT_PROMPT: Omit<Prompt, 'name'> = {
optionalModels: [
'gemini-2.5-flash',
'gemini-2.5-pro',
'claude-sonnet-4@20250514',
'claude-sonnet-4-5@20250929',
],
messages: [
{
@@ -2092,7 +2092,7 @@ Below is the user's query. Please respond in the user's preferred language witho
'codeArtifact',
'blobRead',
],
proModels: ['gemini-2.5-pro', 'claude-sonnet-4@20250514'],
proModels: ['gemini-2.5-pro', 'claude-sonnet-4-5@20250929'],
},
};

View File

@@ -30,6 +30,16 @@ export class AnthropicOfficialProvider extends AnthropicProvider<AnthropicOffici
},
],
},
{
name: 'Claude Sonnet 4',
id: 'claude-sonnet-4-5-20250929',
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
{
name: 'Claude Sonnet 4',
id: 'claude-sonnet-4-20250514',
@@ -40,27 +50,6 @@ export class AnthropicOfficialProvider extends AnthropicProvider<AnthropicOffici
},
],
},
{
name: 'Claude 3.7 Sonnet',
id: 'claude-3-7-sonnet-20250219',
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
{
name: 'Claude 3.5 Sonnet',
id: 'claude-3-5-sonnet-20241022',
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text, ModelOutputType.Object],
defaultForOutputType: true,
},
],
},
];
protected instance!: AnthropicSDKProvider;

View File

@@ -24,6 +24,16 @@ export class AnthropicVertexProvider extends AnthropicProvider<AnthropicVertexCo
},
],
},
{
name: 'Claude Sonnet 4.5',
id: 'claude-sonnet-4-5@20250929',
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
{
name: 'Claude Sonnet 4',
id: 'claude-sonnet-4@20250514',
@@ -34,27 +44,6 @@ export class AnthropicVertexProvider extends AnthropicProvider<AnthropicVertexCo
},
],
},
{
name: 'Claude 3.7 Sonnet',
id: 'claude-3-7-sonnet@20250219',
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text, ModelOutputType.Object],
},
],
},
{
name: 'Claude 3.5 Sonnet',
id: 'claude-3-5-sonnet-v2@20241022',
capabilities: [
{
input: [ModelInputType.Text, ModelInputType.Image],
output: [ModelOutputType.Text, ModelOutputType.Object],
defaultForOutputType: true,
},
],
},
];
protected instance!: GoogleVertexAnthropicProvider;

View File

@@ -8,7 +8,7 @@ test.describe('AIChatWith/Attachments', () => {
test.beforeEach(async ({ loggedInPage: page, utils }) => {
await utils.testUtils.setupTestEnvironment(
page,
'claude-sonnet-4@20250514'
'claude-sonnet-4-5@20250929'
);
await utils.chatPanel.openChatPanel(page);
});

View File

@@ -8,7 +8,7 @@ test.describe('AIChatWith/Collections', () => {
test.beforeEach(async ({ loggedInPage: page, utils }) => {
await utils.testUtils.setupTestEnvironment(
page,
'claude-sonnet-4@20250514'
'claude-sonnet-4-5@20250929'
);
await utils.chatPanel.openChatPanel(page);
await utils.editor.clearAllCollections(page);

View File

@@ -9,7 +9,7 @@ test.describe('AISettings/Embedding', () => {
test.beforeEach(async ({ loggedInPage: page, utils }) => {
await utils.testUtils.setupTestEnvironment(
page,
'claude-sonnet-4@20250514'
'claude-sonnet-4-5@20250929'
);
await utils.chatPanel.openChatPanel(page);
});

View File

@@ -386,7 +386,7 @@ test.describe('paste to code block', () => {
await pressEnter(page);
await addCodeBlock(page);
const plainTextCode = [
' model: anthropic("claude-3-7-sonnet-20250219"),',
' model: anthropic("claude-sonnet-4-5-20250929"),',
' prompt: How many people will live in the world in 2040?',
' providerOptions: {',
' anthropic: {',