chore(server): update config naming (#13419)

<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

* **New Features**
* Updated scenario names and options for Copilot, including new and
renamed scenarios such as "audio_transcribing,"
"complex_text_generation," "quick_decision_making,"
"quick_text_generation," and "polish_and_summarize."
* Enhanced support for customizing and overriding default model
assignments in Copilot scenarios.

* **Bug Fixes**
* Improved consistency and clarity in scenario configuration and prompt
selection.

* **Documentation**
* Updated descriptions in configuration interfaces to better explain the
ability to use custom models and override defaults.
<!-- end of auto-generated comment: release notes by coderabbit.ai -->
This commit is contained in:
DarkSky
2025-08-05 18:26:18 +08:00
committed by GitHub
parent d398aa9a71
commit 46acf9aa4f
8 changed files with 47 additions and 46 deletions

View File

@@ -669,20 +669,20 @@
},
"scenarios": {
"type": "object",
"description": "The models used in the scene for the copilot, will use this config if enabled.\n@default {\"enabled\":false,\"scenarios\":{\"audio\":\"gemini-2.5-flash\",\"chat\":\"claude-sonnet-4@20250514\",\"embedding\":\"gemini-embedding-001\",\"image\":\"gpt-image-1\",\"rerank\":\"gpt-4.1\",\"brainstorm\":\"gpt-4o-2024-08-06\",\"coding\":\"claude-sonnet-4@20250514\",\"quick_decision\":\"gpt-4.1-mini\",\"quick_written\":\"gemini-2.5-flash\",\"summary_inspection\":\"gemini-2.5-flash\"}}",
"description": "Use custom models in scenarios and override default settings.\n@default {\"override_enabled\":false,\"scenarios\":{\"audio_transcribing\":\"gemini-2.5-flash\",\"chat\":\"claude-sonnet-4@20250514\",\"embedding\":\"gemini-embedding-001\",\"image\":\"gpt-image-1\",\"rerank\":\"gpt-4.1\",\"coding\":\"claude-sonnet-4@20250514\",\"complex_text_generation\":\"gpt-4o-2024-08-06\",\"quick_decision_making\":\"gpt-4.1-mini\",\"quick_text_generation\":\"gemini-2.5-flash\",\"polish_and_summarize\":\"gemini-2.5-flash\"}}",
"default": {
"enabled": false,
"override_enabled": false,
"scenarios": {
"audio": "gemini-2.5-flash",
"audio_transcribing": "gemini-2.5-flash",
"chat": "claude-sonnet-4@20250514",
"embedding": "gemini-embedding-001",
"image": "gpt-image-1",
"rerank": "gpt-4.1",
"brainstorm": "gpt-4o-2024-08-06",
"coding": "claude-sonnet-4@20250514",
"quick_decision": "gpt-4.1-mini",
"quick_written": "gemini-2.5-flash",
"summary_inspection": "gemini-2.5-flash"
"complex_text_generation": "gpt-4o-2024-08-06",
"quick_decision_making": "gpt-4.1-mini",
"quick_text_generation": "gemini-2.5-flash",
"polish_and_summarize": "gemini-2.5-flash"
}
}
},

View File

@@ -117,13 +117,13 @@ test.serial.before(async t => {
value: {
enabled: true,
scenarios: {
image: 'lcm',
image: 'flux-1/schnell',
rerank: 'gpt-4.1-mini',
brainstorm: 'gpt-4.1-mini',
complex_text_generation: 'gpt-4.1-mini',
coding: 'gpt-4.1-mini',
quick_decision: 'gpt-4.1-mini',
quick_written: 'gpt-4.1-mini',
summary_inspection: 'gemini-2.5-flash',
quick_decision_making: 'gpt-4.1-mini',
quick_text_generation: 'gpt-4.1-mini',
polish_and_summarize: 'gemini-2.5-flash',
},
},
},

View File

@@ -46,20 +46,20 @@ defineModuleConfig('copilot', {
default: false,
},
scenarios: {
desc: 'The models used in the scene for the copilot, will use this config if enabled.',
desc: 'Use custom models in scenarios and override default settings.',
default: {
enabled: false,
override_enabled: false,
scenarios: {
audio: 'gemini-2.5-flash',
audio_transcribing: 'gemini-2.5-flash',
chat: 'claude-sonnet-4@20250514',
embedding: 'gemini-embedding-001',
image: 'gpt-image-1',
rerank: 'gpt-4.1',
brainstorm: 'gpt-4o-2024-08-06',
coding: 'claude-sonnet-4@20250514',
quick_decision: 'gpt-4.1-mini',
quick_written: 'gemini-2.5-flash',
summary_inspection: 'gemini-2.5-flash',
complex_text_generation: 'gpt-4o-2024-08-06',
quick_decision_making: 'gpt-4.1-mini',
quick_text_generation: 'gemini-2.5-flash',
polish_and_summarize: 'gemini-2.5-flash',
},
},
},

View File

@@ -38,7 +38,7 @@ class ProductionEmbeddingClient extends EmbeddingClient {
override async configured(): Promise<boolean> {
const embedding = await this.providerFactory.getProvider({
modelId: this.config.copilot?.scenarios?.enabled
modelId: this.config.copilot?.scenarios?.override_enabled
? this.config.copilot.scenarios.scenarios?.embedding || EMBEDDING_MODEL
: EMBEDDING_MODEL,
outputType: ModelOutputType.Embedding,

View File

@@ -19,24 +19,9 @@ type Prompt = Omit<
config?: PromptConfig;
};
export const Scenario: Record<string, string[]> = {
audio: ['Transcript audio'],
brainstorm: [
'Brainstorm mindmap',
'Create a presentation',
'Expand mind map',
'workflow:brainstorm:step2',
'workflow:presentation:step2',
'workflow:presentation:step4',
],
export const Scenario = {
audio_transcribing: ['Transcript audio'],
chat: ['Chat With AFFiNE AI'],
coding: [
'Apply Updates',
'Code Artifact',
'Make it real',
'Make it real with text',
'Section Edit',
],
// no prompt needed, just a placeholder
embedding: [],
image: [
@@ -49,7 +34,23 @@ export const Scenario: Record<string, string[]> = {
'Remove background',
'Upscale image',
],
quick_decision: [
rerank: ['Rerank results'],
coding: [
'Apply Updates',
'Code Artifact',
'Make it real',
'Make it real with text',
'Section Edit',
],
complex_text_generation: [
'Brainstorm mindmap',
'Create a presentation',
'Expand mind map',
'workflow:brainstorm:step2',
'workflow:presentation:step2',
'workflow:presentation:step4',
],
quick_decision_making: [
'Create headings',
'Generate a caption',
'Translate to',
@@ -60,7 +61,7 @@ export const Scenario: Record<string, string[]> = {
'workflow:image-pixel:step2',
'workflow:image-sketch:step2',
],
quick_written: [
quick_text_generation: [
'Brainstorm ideas about this',
'Continue writing',
'Explain this code',
@@ -73,8 +74,7 @@ export const Scenario: Record<string, string[]> = {
'Write an article about this',
'Write outline',
],
rerank: ['Rerank results'],
summary_inspection: [
polish_and_summarize: [
'Change tone to',
'Check code error',
'Conversation Summary',
@@ -92,7 +92,7 @@ export const Scenario: Record<string, string[]> = {
};
export type CopilotPromptScenario = {
enabled?: boolean;
override_enabled?: boolean;
scenarios?: Partial<Record<keyof typeof Scenario, string>>;
};

View File

@@ -45,10 +45,11 @@ export class PromptService implements OnApplicationBootstrap {
}
protected async setup(scenarios?: CopilotPromptScenario) {
if (!!scenarios && scenarios.enabled && scenarios.scenarios) {
if (!!scenarios && scenarios.override_enabled && scenarios.scenarios) {
this.logger.log('Updating prompts based on scenarios...');
for (const [scenario, model] of Object.entries(scenarios.scenarios)) {
const promptNames = Scenario[scenario];
const promptNames = Scenario[scenario as keyof typeof Scenario] || [];
if (!promptNames.length) continue;
for (const name of promptNames) {
const prompt = prompts.find(p => p.name === name);
if (prompt && model) {

View File

@@ -75,7 +75,7 @@ export class FalProvider extends CopilotProvider<FalConfig> {
override readonly models = [
{
id: 'lcm',
id: 'flux-1/schnell',
capabilities: [
{
input: [ModelInputType.Text],

View File

@@ -258,7 +258,7 @@
},
"scenarios": {
"type": "Object",
"desc": "The models used in the scene for the copilot, will use this config if enabled."
"desc": "Use custom models in scenarios and override default settings."
},
"providers.openai": {
"type": "Object",