Files
AFFiNE-Mirror/packages/backend/server/src/plugins/copilot/providers/perplexity.ts
DarkSky 29a27b561b feat(server): migrate copilot to native (#14620)
#### PR Dependency Tree


* **PR #14620** 👈

This tree was auto-generated by
[Charcoal](https://github.com/danerwilliams/charcoal)

<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

* **New Features**
* Native LLM workflows: structured outputs, embeddings, and reranking
plus richer multimodal attachments (images, audio, files) and improved
remote-attachment inlining.

* **Refactor**
* Tooling API unified behind a local tool-definition helper;
provider/adapters reorganized to route through native dispatch paths.

* **Chores**
* Dependency updates, removed legacy Google SDK integrations, and
increased front memory allocation.

* **Tests**
* Expanded end-to-end and streaming tests exercising native provider
flows, attachments, and rerank/structured scenarios.
<!-- end of auto-generated comment: release notes by coderabbit.ai -->
2026-03-11 13:55:35 +08:00

201 lines
5.0 KiB
TypeScript

import { CopilotProviderSideError, metrics } from '../../../base';
import {
llmDispatchStream,
type NativeLlmBackendConfig,
type NativeLlmRequest,
} from '../../../native';
import type { NodeTextMiddleware } from '../config';
import type { CopilotToolSet } from '../tools';
import { buildNativeRequest, NativeProviderAdapter } from './native';
import { CopilotProvider } from './provider';
import {
CopilotChatOptions,
CopilotProviderType,
ModelConditions,
ModelInputType,
ModelOutputType,
PromptMessage,
} from './types';
export type PerplexityConfig = {
apiKey: string;
endpoint?: string;
};
export class PerplexityProvider extends CopilotProvider<PerplexityConfig> {
readonly type = CopilotProviderType.Perplexity;
readonly models = [
{
name: 'Sonar',
id: 'sonar',
capabilities: [
{
input: [ModelInputType.Text],
output: [ModelOutputType.Text],
defaultForOutputType: true,
},
],
},
{
name: 'Sonar Pro',
id: 'sonar-pro',
capabilities: [
{
input: [ModelInputType.Text],
output: [ModelOutputType.Text],
},
],
},
{
name: 'Sonar Reasoning',
id: 'sonar-reasoning',
capabilities: [
{
input: [ModelInputType.Text],
output: [ModelOutputType.Text],
},
],
},
{
name: 'Sonar Reasoning Pro',
id: 'sonar-reasoning-pro',
capabilities: [
{
input: [ModelInputType.Text],
output: [ModelOutputType.Text],
},
],
},
];
override configured(): boolean {
return !!this.config.apiKey;
}
protected override setup() {
super.setup();
}
private createNativeConfig(): NativeLlmBackendConfig {
const baseUrl = this.config.endpoint || 'https://api.perplexity.ai';
return {
base_url: baseUrl.replace(/\/v1\/?$/, ''),
auth_token: this.config.apiKey,
};
}
private createNativeAdapter(
tools: CopilotToolSet,
nodeTextMiddleware?: NodeTextMiddleware[]
) {
return new NativeProviderAdapter(
(request: NativeLlmRequest, signal?: AbortSignal) =>
llmDispatchStream(
'openai_chat',
this.createNativeConfig(),
request,
signal
),
tools,
this.MAX_STEPS,
{ nodeTextMiddleware }
);
}
async text(
cond: ModelConditions,
messages: PromptMessage[],
options: CopilotChatOptions = {}
): Promise<string> {
const fullCond = { ...cond, outputType: ModelOutputType.Text };
const normalizedCond = await this.checkParams({
cond: fullCond,
messages,
options,
withAttachment: false,
});
const model = this.selectModel(normalizedCond);
try {
metrics.ai.counter('chat_text_calls').add(1, this.metricLabels(model.id));
const tools = await this.getTools(options, model.id);
const middleware = this.getActiveProviderMiddleware();
const { request } = await buildNativeRequest({
model: model.id,
messages,
options,
tools,
withAttachment: false,
include: ['citations'],
middleware,
});
const adapter = this.createNativeAdapter(tools, middleware.node?.text);
return await adapter.text(request, options.signal, messages);
} catch (e: any) {
metrics.ai
.counter('chat_text_errors')
.add(1, this.metricLabels(model.id));
throw this.handleError(e);
}
}
async *streamText(
cond: ModelConditions,
messages: PromptMessage[],
options: CopilotChatOptions = {}
): AsyncIterable<string> {
const fullCond = { ...cond, outputType: ModelOutputType.Text };
const normalizedCond = await this.checkParams({
cond: fullCond,
messages,
options,
withAttachment: false,
});
const model = this.selectModel(normalizedCond);
try {
metrics.ai
.counter('chat_text_stream_calls')
.add(1, this.metricLabels(model.id));
const tools = await this.getTools(options, model.id);
const middleware = this.getActiveProviderMiddleware();
const { request } = await buildNativeRequest({
model: model.id,
messages,
options,
tools,
withAttachment: false,
include: ['citations'],
middleware,
});
const adapter = this.createNativeAdapter(tools, middleware.node?.text);
for await (const chunk of adapter.streamText(
request,
options.signal,
messages
)) {
yield chunk;
}
} catch (e: any) {
metrics.ai
.counter('chat_text_stream_errors')
.add(1, this.metricLabels(model.id));
throw this.handleError(e);
}
}
private handleError(e: any) {
if (e instanceof CopilotProviderSideError) {
return e;
}
return new CopilotProviderSideError({
provider: this.type,
kind: 'unexpected_response',
message: e?.message || 'Unexpected perplexity response',
});
}
}