feat: get prompt model names (#13607)

fix AI-419

<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

- New Features
- New API to fetch available models for a prompt, returning default,
optional, and pro models with human‑readable names.
- Added temperature and topP settings to prompt configuration for finer
control.
- Refactor
- When no model is chosen, the default model is used instead of
auto-picking a pro model.
- Model metadata across providers now includes readable names, improving
listings and selection UX.
- Tests
- Updated test snapshots and descriptions to reflect the new
default-model behavior.
<!-- end of auto-generated comment: release notes by coderabbit.ai -->
This commit is contained in:
DarkSky
2025-09-18 20:56:54 +08:00
committed by GitHub
parent a0b73cdcec
commit ee77c548ca
12 changed files with 200 additions and 13 deletions

View File

@@ -467,9 +467,9 @@ Generated by [AVA](https://avajs.dev).
'gemini-2.5-flash'
> should pick first pro model when no requested model during active
> should pick default model when no requested model during active
'gemini-2.5-pro'
'gemini-2.5-flash'
> should honor requested pro model during active

View File

@@ -2129,13 +2129,13 @@ test('should resolve model correctly based on subscription status and prompt con
);
}
// payment enabled + active: without requested -> first pro; requested pro should be honored
// payment enabled + active: without requested -> default model; requested pro should be honored
{
mockStatus(SubscriptionStatus.Active);
const model6 = await s.resolveModel(true);
t.snapshot(
model6,
'should pick first pro model when no requested model during active'
'should pick default model when no requested model during active'
);
const model7 = await s.resolveModel(true, 'claude-sonnet-4@20250514');

View File

@@ -21,6 +21,7 @@ export class AnthropicOfficialProvider extends AnthropicProvider<AnthropicOffici
override readonly models = [
{
name: 'Claude Opus 4',
id: 'claude-opus-4-20250514',
capabilities: [
{
@@ -30,6 +31,7 @@ export class AnthropicOfficialProvider extends AnthropicProvider<AnthropicOffici
],
},
{
name: 'Claude Sonnet 4',
id: 'claude-sonnet-4-20250514',
capabilities: [
{
@@ -39,6 +41,7 @@ export class AnthropicOfficialProvider extends AnthropicProvider<AnthropicOffici
],
},
{
name: 'Claude 3.7 Sonnet',
id: 'claude-3-7-sonnet-20250219',
capabilities: [
{
@@ -48,6 +51,7 @@ export class AnthropicOfficialProvider extends AnthropicProvider<AnthropicOffici
],
},
{
name: 'Claude 3.5 Sonnet',
id: 'claude-3-5-sonnet-20241022',
capabilities: [
{

View File

@@ -15,6 +15,7 @@ export class AnthropicVertexProvider extends AnthropicProvider<AnthropicVertexCo
override readonly models = [
{
name: 'Claude Opus 4',
id: 'claude-opus-4@20250514',
capabilities: [
{
@@ -24,6 +25,7 @@ export class AnthropicVertexProvider extends AnthropicProvider<AnthropicVertexCo
],
},
{
name: 'Claude Sonnet 4',
id: 'claude-sonnet-4@20250514',
capabilities: [
{
@@ -33,6 +35,7 @@ export class AnthropicVertexProvider extends AnthropicProvider<AnthropicVertexCo
],
},
{
name: 'Claude 3.7 Sonnet',
id: 'claude-3-7-sonnet@20250219',
capabilities: [
{
@@ -42,6 +45,7 @@ export class AnthropicVertexProvider extends AnthropicProvider<AnthropicVertexCo
],
},
{
name: 'Claude 3.5 Sonnet',
id: 'claude-3-5-sonnet-v2@20241022',
capabilities: [
{

View File

@@ -91,6 +91,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
readonly models = [
// Text to Text models
{
name: 'GPT 4o',
id: 'gpt-4o',
capabilities: [
{
@@ -101,6 +102,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
},
// FIXME(@darkskygit): deprecated
{
name: 'GPT 4o 2024-08-06',
id: 'gpt-4o-2024-08-06',
capabilities: [
{
@@ -110,6 +112,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
],
},
{
name: 'GPT 4o Mini',
id: 'gpt-4o-mini',
capabilities: [
{
@@ -120,6 +123,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
},
// FIXME(@darkskygit): deprecated
{
name: 'GPT 4o Mini 2024-07-18',
id: 'gpt-4o-mini-2024-07-18',
capabilities: [
{
@@ -129,6 +133,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
],
},
{
name: 'GPT 4.1',
id: 'gpt-4.1',
capabilities: [
{
@@ -143,6 +148,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
],
},
{
name: 'GPT 4.1 2025-04-14',
id: 'gpt-4.1-2025-04-14',
capabilities: [
{
@@ -156,6 +162,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
],
},
{
name: 'GPT 4.1 Mini',
id: 'gpt-4.1-mini',
capabilities: [
{
@@ -169,6 +176,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
],
},
{
name: 'GPT 4.1 Nano',
id: 'gpt-4.1-nano',
capabilities: [
{
@@ -182,6 +190,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
],
},
{
name: 'GPT 5',
id: 'gpt-5',
capabilities: [
{
@@ -195,6 +204,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
],
},
{
name: 'GPT 5 2025-08-07',
id: 'gpt-5-2025-08-07',
capabilities: [
{
@@ -208,6 +218,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
],
},
{
name: 'GPT 5 Mini',
id: 'gpt-5-mini',
capabilities: [
{
@@ -221,6 +232,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
],
},
{
name: 'GPT 5 Nano',
id: 'gpt-5-nano',
capabilities: [
{
@@ -234,6 +246,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
],
},
{
name: 'GPT O1',
id: 'o1',
capabilities: [
{
@@ -243,6 +256,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
],
},
{
name: 'GPT O3',
id: 'o3',
capabilities: [
{
@@ -252,6 +266,7 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
],
},
{
name: 'GPT O4 Mini',
id: 'o4-mini',
capabilities: [
{

View File

@@ -362,6 +362,27 @@ class CopilotPromptType {
messages!: CopilotPromptMessageType[];
}
@ObjectType()
class CopilotModelType {
@Field(() => String)
id!: string;
@Field(() => String)
name!: string;
}
@ObjectType()
export class CopilotModelsType {
@Field(() => String)
defaultModel!: string;
@Field(() => [CopilotModelType])
optionalModels!: CopilotModelType[];
@Field(() => [CopilotModelType])
proModels!: CopilotModelType[];
}
@ObjectType()
export class CopilotSessionType {
@Field(() => ID)
@@ -400,9 +421,12 @@ export class CopilotType {
@Throttle()
@Resolver(() => CopilotType)
export class CopilotResolver {
private readonly modelNames = new Map<string, string>();
constructor(
private readonly ac: AccessController,
private readonly mutex: RequestMutex,
private readonly prompt: PromptService,
private readonly chatSession: ChatSessionService,
private readonly storage: CopilotStorage,
private readonly docReader: DocReader,
@@ -443,6 +467,48 @@ export class CopilotResolver {
return { userId: user.id, workspaceId, docId: docId || undefined };
}
@ResolveField(() => CopilotModelsType, {
description:
'List available models for a prompt, with human-readable names',
complexity: 2,
})
async models(
@Args('promptName') promptName: string
): Promise<CopilotModelsType> {
const prompt = await this.prompt.get(promptName);
if (!prompt) {
throw new NotFoundException('Prompt not found');
}
const convertModels = (ids: string[]) => {
return ids
.map(id => ({ id, name: this.modelNames.get(id) }))
.filter(m => !!m.name) as CopilotModelType[];
};
const proModels = prompt.config?.proModels || [];
const missing = new Set(
[...prompt.optionalModels, ...proModels].filter(
id => !this.modelNames.has(id)
)
);
if (missing.size) {
for (const model of missing) {
if (this.modelNames.has(model)) continue;
const provider = await this.providerFactory.getProviderByModel(model);
if (provider?.configured()) {
for (const m of provider.models) {
if (m.name) this.modelNames.set(m.id, m.name);
}
}
}
}
return {
defaultModel: prompt.model,
optionalModels: convertModels(prompt.optionalModels),
proModels: convertModels(proModels),
};
}
@ResolveField(() => CopilotSessionType, {
description: 'Get the session by id',
complexity: 2,

View File

@@ -130,15 +130,8 @@ export class ChatSession implements AsyncDisposable {
paymentEnabled = false;
}
if (paymentEnabled) {
if (isUserAIPro) {
if (!requestedModelId) {
const firstPro = this.proModels[0];
return normalize(firstPro);
}
} else if (isPro(requestedModelId)) {
return defaultModel;
}
if (paymentEnabled && !isUserAIPro && isPro(requestedModelId)) {
return defaultModel;
}
return normalize(requestedModelId);

View File

@@ -226,6 +226,9 @@ type Copilot {
contexts(contextId: String, sessionId: String): [CopilotContext!]!
histories(docId: String, options: QueryChatHistoriesInput): [CopilotHistories!]! @deprecated(reason: "use `chats` instead")
"""List available models for a prompt, with human-readable names"""
models(promptName: String!): CopilotModelsType!
"""Get the quota of the user in the workspace"""
quota: CopilotQuota!
@@ -360,6 +363,17 @@ type CopilotMessageNotFoundDataType {
messageId: String!
}
type CopilotModelType {
id: String!
name: String!
}
type CopilotModelsType {
defaultModel: String!
optionalModels: [CopilotModelType!]!
proModels: [CopilotModelType!]!
}
input CopilotPromptConfigInput {
frequencyPenalty: Float
presencePenalty: Float