feat(server): migrate copilot provider sdk (#11584)

fix AI-15
fix AI-16
This commit is contained in:
darkskygit
2025-04-10 04:14:09 +00:00
parent 0cf8e078e2
commit 5199a74426
12 changed files with 477 additions and 704 deletions

View File

@@ -126,6 +126,8 @@
"is-symbol": "npm:@nolyfill/is-symbol@^1",
"is-weakref": "npm:@nolyfill/is-weakref@^1",
"iterator.prototype": "npm:@nolyfill/iterator.prototype@^1",
"json-stable-stringify": "npm:@nolyfill/json-stable-stringify@^1",
"jsonify": "npm:@nolyfill/jsonify@^1",
"object-is": "npm:@nolyfill/object-is@^1",
"object-keys": "npm:@nolyfill/object-keys@^1",
"object.assign": "npm:@nolyfill/object.assign@^1",

View File

@@ -25,7 +25,9 @@
"postinstall": "prisma generate"
},
"dependencies": {
"@ai-sdk/google": "^1.1.19",
"@ai-sdk/google": "^1.2.10",
"@ai-sdk/openai": "^1.3.9",
"@ai-sdk/perplexity": "^1.1.6",
"@apollo/server": "^4.11.3",
"@aws-sdk/client-s3": "^3.779.0",
"@aws-sdk/s3-request-presigner": "^3.779.0",
@@ -73,7 +75,6 @@
"date-fns": "^4.0.0",
"dotenv": "^16.4.7",
"eventemitter2": "^6.4.9",
"eventsource-parser": "^3.0.0",
"express": "^5.0.1",
"fast-xml-parser": "^5.0.0",
"get-stream": "^9.0.1",
@@ -95,7 +96,6 @@
"nestjs-cls": "^5.0.0",
"nodemailer": "^6.9.16",
"on-headers": "^1.0.2",
"openai": "^4.83.0",
"piscina": "^5.0.0-alpha.0",
"prisma": "^5.22.0",
"react": "19.1.0",

View File

@@ -24,7 +24,7 @@ import {
CopilotProviderType,
OpenAIProvider,
} from '../plugins/copilot/providers';
import { CitationParser } from '../plugins/copilot/providers/perplexity';
import { CitationParser } from '../plugins/copilot/providers/utils';
import { ChatSessionService } from '../plugins/copilot/session';
import { CopilotStorage } from '../plugins/copilot/storage';
import { CopilotTranscriptionService } from '../plugins/copilot/transcript';

View File

@@ -1,27 +1,39 @@
import OpenAI from 'openai';
import {
createOpenAI,
type OpenAIProvider as VercelOpenAIProvider,
} from '@ai-sdk/openai';
import { embedMany } from 'ai';
import { Embedding } from '../../../models';
import { OpenAIConfig } from '../providers/openai';
import { EmbeddingClient } from './types';
export class OpenAIEmbeddingClient extends EmbeddingClient {
constructor(private readonly client: OpenAI) {
readonly #instance: VercelOpenAIProvider;
constructor(config: OpenAIConfig) {
super();
this.#instance = createOpenAI({
apiKey: config.apiKey,
baseURL: config.baseUrl,
});
}
async getEmbeddings(
input: string[],
signal?: AbortSignal
): Promise<Embedding[]> {
const resp = await this.client.embeddings.create(
{
input,
model: 'text-embedding-3-large',
dimensions: 1024,
encoding_format: 'float',
},
{ signal }
);
return resp.data.map(e => ({ ...e, content: input[e.index] }));
async getEmbeddings(input: string[]): Promise<Embedding[]> {
const modelInstance = this.#instance.embedding('text-embedding-3-large', {
dimensions: 1024,
});
const { embeddings } = await embedMany({
model: modelInstance,
values: input,
});
return Array.from(embeddings.entries()).map(([index, embedding]) => ({
index,
embedding,
content: input[index],
}));
}
}

View File

@@ -1,5 +1,4 @@
import { Injectable } from '@nestjs/common';
import OpenAI from 'openai';
import {
AFFiNELogger,
@@ -49,7 +48,7 @@ export class CopilotContextDocJob {
this.supportEmbedding =
await this.models.copilotContext.checkEmbeddingAvailable();
this.client = new OpenAIEmbeddingClient(
new OpenAI(this.config.copilot.providers.openai)
this.config.copilot.providers.openai
);
}

View File

@@ -1,5 +1,4 @@
import { Injectable, OnApplicationBootstrap } from '@nestjs/common';
import OpenAI from 'openai';
import {
Cache,
@@ -46,7 +45,7 @@ export class CopilotContextService implements OnApplicationBootstrap {
private setup() {
const configure = this.config.copilot.providers.openai;
if (configure.apiKey) {
this.client = new OpenAIEmbeddingClient(new OpenAI(configure));
this.client = new OpenAIEmbeddingClient(configure);
}
}

View File

@@ -4,14 +4,10 @@ import {
} from '@ai-sdk/google';
import {
AISDKError,
type CoreAssistantMessage,
type CoreUserMessage,
FilePart,
generateObject,
generateText,
JSONParseError,
streamText,
TextPart,
} from 'ai';
import {
@@ -29,35 +25,15 @@ import {
CopilotTextToTextProvider,
PromptMessage,
} from './types';
import { chatToGPTMessage } from './utils';
export const DEFAULT_DIMENSIONS = 256;
const SIMPLE_IMAGE_URL_REGEX = /^(https?:\/\/|data:image\/)/;
const FORMAT_INFER_MAP: Record<string, string> = {
pdf: 'application/pdf',
mp3: 'audio/mpeg',
wav: 'audio/wav',
png: 'image/png',
jpeg: 'image/jpeg',
jpg: 'image/jpeg',
webp: 'image/webp',
txt: 'text/plain',
md: 'text/plain',
mov: 'video/mov',
mpeg: 'video/mpeg',
mp4: 'video/mp4',
avi: 'video/avi',
wmv: 'video/wmv',
flv: 'video/flv',
};
export type GeminiConfig = {
apiKey: string;
baseUrl?: string;
};
type ChatMessage = CoreUserMessage | CoreAssistantMessage;
export class GeminiProvider
extends CopilotProvider<GeminiConfig>
implements CopilotTextToTextProvider
@@ -86,67 +62,6 @@ export class GeminiProvider
});
}
private inferMimeType(url: string) {
if (url.startsWith('data:')) {
return url.split(';')[0].split(':')[1];
}
const extension = url.split('.').pop();
if (extension) {
return FORMAT_INFER_MAP[extension];
}
return undefined;
}
protected async chatToGPTMessage(
messages: PromptMessage[]
): Promise<[string | undefined, ChatMessage[], any]> {
const system =
messages[0]?.role === 'system' ? messages.shift() : undefined;
const schema = system?.params?.schema;
// filter redundant fields
const msgs: ChatMessage[] = [];
for (let { role, content, attachments, params } of messages.filter(
m => m.role !== 'system'
)) {
content = content.trim();
role = role as 'user' | 'assistant';
const mimetype = params?.mimetype;
if (Array.isArray(attachments)) {
const contents: (TextPart | FilePart)[] = [];
if (content.length) {
contents.push({
type: 'text',
text: content,
});
}
for (const url of attachments) {
if (SIMPLE_IMAGE_URL_REGEX.test(url)) {
const mimeType =
typeof mimetype === 'string' ? mimetype : this.inferMimeType(url);
if (mimeType) {
const data = url.startsWith('data:')
? await fetch(url).then(r => r.arrayBuffer())
: new URL(url);
contents.push({
type: 'file' as const,
data,
mimeType,
});
}
}
}
msgs.push({ role, content: contents } as ChatMessage);
} else {
msgs.push({ role, content });
}
}
return [system?.content, msgs, schema];
}
protected async checkParams({
messages,
embeddings,
@@ -223,7 +138,7 @@ export class GeminiProvider
try {
metrics.ai.counter('chat_text_calls').add(1, { model });
const [system, msgs, schema] = await this.chatToGPTMessage(messages);
const [system, msgs, schema] = await chatToGPTMessage(messages);
const modelInstance = this.#instance(model, {
structuredOutputs: Boolean(options.jsonMode),
@@ -274,7 +189,7 @@ export class GeminiProvider
try {
metrics.ai.counter('chat_text_stream_calls').add(1, { model });
const [system, msgs] = await this.chatToGPTMessage(messages);
const [system, msgs] = await chatToGPTMessage(messages);
const { textStream } = streamText({
model: this.#instance(model),

View File

@@ -1,4 +1,15 @@
import { APIError, BadRequestError, ClientOptions, OpenAI } from 'openai';
import {
createOpenAI,
type OpenAIProvider as VercelOpenAIProvider,
} from '@ai-sdk/openai';
import {
AISDKError,
embedMany,
experimental_generateImage as generateImage,
generateObject,
generateText,
streamText,
} from 'ai';
import {
CopilotPromptInvalid,
@@ -20,12 +31,14 @@ import {
CopilotTextToTextProvider,
PromptMessage,
} from './types';
import { chatToGPTMessage } from './utils';
export const DEFAULT_DIMENSIONS = 256;
const SIMPLE_IMAGE_URL_REGEX = /^(https?:\/\/|data:image\/)/;
export type OpenAIConfig = ClientOptions;
export type OpenAIConfig = {
apiKey: string;
baseUrl?: string;
};
export class OpenAIProvider
extends CopilotProvider<OpenAIConfig>
@@ -62,8 +75,7 @@ export class OpenAIProvider
'dall-e-3',
];
#existsModels: string[] = [];
#instance!: OpenAI;
#instance!: VercelOpenAIProvider;
override configured(): boolean {
return !!this.config.apiKey;
@@ -71,55 +83,9 @@ export class OpenAIProvider
protected override setup() {
super.setup();
this.#instance = new OpenAI(this.config);
}
override async isModelAvailable(model: string): Promise<boolean> {
const knownModels = this.models.includes(model);
if (knownModels) return true;
if (!this.#existsModels) {
try {
this.#existsModels = await this.#instance.models
.list()
.then(({ data }) => data.map(m => m.id));
} catch (e: any) {
this.logger.error('Failed to fetch online model list', e.stack);
}
}
return !!this.#existsModels?.includes(model);
}
protected chatToGPTMessage(
messages: PromptMessage[]
): OpenAI.Chat.Completions.ChatCompletionMessageParam[] {
// filter redundant fields
return messages.map(({ role, content, attachments }) => {
content = content.trim();
if (Array.isArray(attachments) && attachments.length) {
const contents: OpenAI.Chat.Completions.ChatCompletionContentPart[] =
[];
if (content.length) {
contents.push({
type: 'text',
text: content,
});
}
contents.push(
...(attachments
.filter(url => SIMPLE_IMAGE_URL_REGEX.test(url))
.map(url => ({
type: 'image_url',
image_url: { url, detail: 'high' },
})) as OpenAI.Chat.Completions.ChatCompletionContentPartImage[])
);
return {
role,
content: contents,
} as OpenAI.Chat.Completions.ChatCompletionMessageParam;
} else {
return { role, content };
}
this.#instance = createOpenAI({
apiKey: this.config.apiKey,
baseURL: this.config.baseUrl,
});
}
@@ -186,11 +152,8 @@ export class OpenAIProvider
) {
if (e instanceof UserFriendlyError) {
return e;
} else if (e instanceof APIError) {
if (
e instanceof BadRequestError &&
(e.message.includes('safety') || e.message.includes('risk'))
) {
} else if (e instanceof AISDKError) {
if (e.message.includes('safety') || e.message.includes('risk')) {
metrics.ai
.counter('chat_text_risk_errors')
.add(1, { model, user: options.user || undefined });
@@ -198,7 +161,7 @@ export class OpenAIProvider
return new CopilotProviderSideError({
provider: this.type,
kind: e.type || 'unknown',
kind: e.name || 'unknown',
message: e.message,
});
} else {
@@ -217,26 +180,42 @@ export class OpenAIProvider
options: CopilotChatOptions = {}
): Promise<string> {
await this.checkParams({ messages, model, options });
console.log('messages', messages);
try {
metrics.ai.counter('chat_text_calls').add(1, { model });
const result = await this.#instance.chat.completions.create(
{
messages: this.chatToGPTMessage(messages),
model: model,
temperature: options.temperature || 0,
max_completion_tokens: options.maxTokens || 4096,
response_format: {
type: options.jsonMode ? 'json_object' : 'text',
},
user: options.user,
},
{ signal: options.signal }
);
const { content } = result.choices[0].message;
if (!content) throw new Error('Failed to generate text');
return content.trim();
const [system, msgs, schema] = await chatToGPTMessage(messages);
const modelInstance = this.#instance(model, {
structuredOutputs: Boolean(options.jsonMode),
user: options.user,
});
const commonParams = {
model: modelInstance,
system,
messages: msgs,
temperature: options.temperature || 0,
maxTokens: options.maxTokens || 4096,
abortSignal: options.signal,
};
const { text } = schema
? await generateObject({
...commonParams,
schema,
}).then(r => ({ text: JSON.stringify(r.object) }))
: await generateText({
...commonParams,
providerOptions: {
openai: options.user ? { user: options.user } : {},
},
});
return text.trim();
} catch (e: any) {
console.log('error', e);
metrics.ai.counter('chat_text_errors').add(1, { model });
throw this.handleError(e, model, options);
}
@@ -251,34 +230,30 @@ export class OpenAIProvider
try {
metrics.ai.counter('chat_text_stream_calls').add(1, { model });
const result = await this.#instance.chat.completions.create(
{
stream: true,
messages: this.chatToGPTMessage(messages),
model: model,
frequency_penalty: options.frequencyPenalty || 0,
presence_penalty: options.presencePenalty || 0,
temperature: options.temperature || 0.5,
max_completion_tokens: options.maxTokens || 4096,
response_format: {
type: options.jsonMode ? 'json_object' : 'text',
},
user: options.user,
},
{
signal: options.signal,
}
);
for await (const message of result) {
if (!Array.isArray(message.choices) || !message.choices.length) {
continue;
}
const content = message.choices[0].delta.content;
if (content) {
yield content;
const [system, msgs] = await chatToGPTMessage(messages);
const modelInstance = this.#instance(model, {
structuredOutputs: Boolean(options.jsonMode),
user: options.user,
});
const { textStream } = streamText({
model: modelInstance,
system,
messages: msgs,
frequencyPenalty: options.frequencyPenalty || 0,
presencePenalty: options.presencePenalty || 0,
temperature: options.temperature || 0,
maxTokens: options.maxTokens || 4096,
abortSignal: options.signal,
});
for await (const message of textStream) {
if (message) {
yield message;
if (options.signal?.aborted) {
result.controller.abort();
await textStream.cancel();
break;
}
}
@@ -301,15 +276,18 @@ export class OpenAIProvider
try {
metrics.ai.counter('generate_embedding_calls').add(1, { model });
const result = await this.#instance.embeddings.create({
model: model,
input: messages,
const modelInstance = this.#instance.embedding(model, {
dimensions: options.dimensions || DEFAULT_DIMENSIONS,
user: options.user,
});
return result.data
.map(e => e?.embedding)
.filter(v => v && Array.isArray(v));
const { embeddings } = await embedMany({
model: modelInstance,
values: messages,
});
return embeddings.filter(v => v && Array.isArray(v));
} catch (e: any) {
metrics.ai.counter('generate_embedding_errors').add(1, { model });
throw this.handleError(e, model, options);
@@ -327,18 +305,16 @@ export class OpenAIProvider
try {
metrics.ai.counter('generate_images_calls').add(1, { model });
const result = await this.#instance.images.generate(
{
prompt,
model,
response_format: 'url',
user: options.user,
},
{ signal: options.signal }
);
return result.data
.map(image => image.url)
const modelInstance = this.#instance.image(model);
const result = await generateImage({
model: modelInstance,
prompt,
});
return result.images
.map(image => image.base64)
.filter((v): v is string => !!v);
} catch (e: any) {
metrics.ai.counter('generate_images_errors').add(1, { model });

View File

@@ -1,4 +1,8 @@
import { EventSourceParserStream } from 'eventsource-parser/stream';
import {
createPerplexity,
type PerplexityProvider as VercelPerplexityProvider,
} from '@ai-sdk/perplexity';
import { generateText, streamText } from 'ai';
import { z } from 'zod';
import {
@@ -14,6 +18,7 @@ import {
CopilotTextToTextProvider,
PromptMessage,
} from './types';
import { chatToGPTMessage, CitationParser } from './utils';
export type PerplexityConfig = {
apiKey: string;
@@ -39,130 +44,8 @@ const PerplexityErrorSchema = z.union([
}),
]);
const PerplexityDataSchema = z.object({
citations: z.array(z.string()),
choices: z.array(
z.object({
message: z.object({
content: z.string(),
role: z.literal('assistant'),
}),
delta: z.object({
content: z.string(),
role: z.literal('assistant'),
}),
finish_reason: z.union([z.literal('stop'), z.literal(null)]),
})
),
});
const PerplexitySchema = z.union([PerplexityDataSchema, PerplexityErrorSchema]);
type PerplexityError = z.infer<typeof PerplexityErrorSchema>;
export class CitationParser {
private readonly SQUARE_BRACKET_OPEN = '[';
private readonly SQUARE_BRACKET_CLOSE = ']';
private readonly PARENTHESES_OPEN = '(';
private startToken: string[] = [];
private endToken: string[] = [];
private numberToken: string[] = [];
private citations: string[] = [];
public parse(content: string, citations: string[]) {
this.citations = citations;
let result = '';
const contentArray = content.split('');
for (const [index, char] of contentArray.entries()) {
if (char === this.SQUARE_BRACKET_OPEN) {
if (this.numberToken.length === 0) {
this.startToken.push(char);
} else {
result += this.flush() + char;
}
continue;
}
if (char === this.SQUARE_BRACKET_CLOSE) {
this.endToken.push(char);
if (this.startToken.length === this.endToken.length) {
const cIndex = Number(this.numberToken.join('').trim());
if (
cIndex > 0 &&
cIndex <= citations.length &&
contentArray[index + 1] !== this.PARENTHESES_OPEN
) {
const content = `[^${cIndex}]`;
result += content;
this.resetToken();
} else {
result += this.flush();
}
} else if (this.startToken.length < this.endToken.length) {
result += this.flush();
}
continue;
}
if (this.isNumeric(char)) {
if (this.startToken.length > 0) {
this.numberToken.push(char);
} else {
result += this.flush() + char;
}
continue;
}
if (this.startToken.length > 0) {
result += this.flush() + char;
} else {
result += char;
}
}
return result;
}
public end() {
return this.flush() + '\n' + this.getFootnotes();
}
private flush() {
const content = this.getTokenContent();
this.resetToken();
return content;
}
private getFootnotes() {
const footnotes = this.citations.map((citation, index) => {
return `[^${index + 1}]: {"type":"url","url":"${encodeURIComponent(
citation
)}"}`;
});
return footnotes.join('\n');
}
private getTokenContent() {
return this.startToken.concat(this.numberToken, this.endToken).join('');
}
private resetToken() {
this.startToken = [];
this.endToken = [];
this.numberToken = [];
}
private isNumeric(str: string) {
return !isNaN(Number(str)) && str.trim() !== '';
}
}
export class PerplexityProvider
extends CopilotProvider<PerplexityConfig>
implements CopilotTextToTextProvider
@@ -176,10 +59,20 @@ export class PerplexityProvider
'sonar-reasoning-pro',
];
#instance!: VercelPerplexityProvider;
override configured(): boolean {
return !!this.config.apiKey;
}
protected override setup() {
super.setup();
this.#instance = createPerplexity({
apiKey: this.config.apiKey,
baseURL: this.config.endpoint,
});
}
async generateText(
messages: PromptMessage[],
model: string = 'sonar',
@@ -188,38 +81,26 @@ export class PerplexityProvider
await this.checkParams({ messages, model, options });
try {
metrics.ai.counter('chat_text_calls').add(1, { model });
const sMessages = messages
.map(({ content, role }) => ({ content, role }))
.filter(({ content }) => typeof content === 'string');
const params = {
method: 'POST',
headers: {
Authorization: `Bearer ${this.config.apiKey}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
model,
messages: sMessages,
max_tokens: options.maxTokens || 4096,
}),
};
const response = await fetch(
this.config.endpoint || 'https://api.perplexity.ai/chat/completions',
params
);
const data = PerplexitySchema.parse(await response.json());
if ('detail' in data || 'error' in data) {
throw this.convertError(data);
} else {
const citationParser = new CitationParser();
const { content } = data.choices[0].message;
const { citations } = data;
let result = content.replaceAll(/<\/?think>\n/g, '\n---\n');
result = citationParser.parse(result, citations);
result += citationParser.end();
return result;
}
const [system, msgs] = await chatToGPTMessage(messages);
const modelInstance = this.#instance(model);
const { text, sources } = await generateText({
model: modelInstance,
system,
messages: msgs,
temperature: options.temperature || 0,
maxTokens: options.maxTokens || 4096,
abortSignal: options.signal,
});
const citationParser = new CitationParser();
const citations = sources.map(s => s.url);
let result = text.replaceAll(/<\/?think>\n/g, '\n---\n');
result = citationParser.parse(result, citations);
result += citationParser.end();
return result;
} catch (e: any) {
metrics.ai.counter('chat_text_errors').add(1, { model });
throw this.handleError(e);
@@ -234,69 +115,54 @@ export class PerplexityProvider
await this.checkParams({ messages, model, options });
try {
metrics.ai.counter('chat_text_stream_calls').add(1, { model });
const sMessages = messages
.map(({ content, role }) => ({ content, role }))
.filter(({ content }) => typeof content === 'string');
const params = {
method: 'POST',
headers: {
Authorization: `Bearer ${this.config.apiKey}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
model,
messages: sMessages,
max_tokens: options.maxTokens || 4096,
stream: true,
}),
};
const response = await fetch(
this.config.endpoint || 'https://api.perplexity.ai/chat/completions',
params
);
const errorHandler = this.convertError;
if (response.ok && response.body) {
const citationParser = new CitationParser();
const eventStream = response.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(new EventSourceParserStream())
.pipeThrough(
new TransformStream({
transform(chunk, controller) {
if (options.signal?.aborted) {
controller.enqueue(null);
return;
}
const json = JSON.parse(chunk.data);
if (json) {
const data = PerplexitySchema.parse(json);
if ('detail' in data || 'error' in data) {
throw errorHandler(data);
}
const { content } = data.choices[0].delta;
const { citations } = data;
let result = content.replaceAll(/<\/?think>\n?/g, '\n---\n');
result = citationParser.parse(result, citations);
controller.enqueue(result);
}
},
flush(controller) {
controller.enqueue(citationParser.end());
controller.enqueue(null);
},
})
);
const [system, msgs] = await chatToGPTMessage(messages);
const reader = eventStream.getReader();
while (true) {
const { done, value } = await reader.read();
if (done) break;
yield value;
const modelInstance = this.#instance(model);
const stream = streamText({
model: modelInstance,
system,
messages: msgs,
temperature: options.temperature || 0,
maxTokens: options.maxTokens || 4096,
abortSignal: options.signal,
});
const citationParser = new CitationParser();
const citations = [];
for await (const chunk of stream.fullStream) {
switch (chunk.type) {
case 'source': {
citations.push(chunk.source.url);
break;
}
case 'text-delta': {
const result = citationParser.parse(
chunk.textDelta.replaceAll(/<\/?think>\n?/g, '\n---\n'),
citations
);
yield result;
break;
}
case 'step-finish': {
const result = citationParser.end();
yield result;
break;
}
case 'error': {
const json =
typeof chunk.error === 'string'
? JSON.parse(chunk.error)
: chunk.error;
if (json && typeof json === 'object') {
const data = PerplexityErrorSchema.parse(json);
if ('detail' in data || 'error' in data) {
throw this.convertError(data);
}
}
}
}
} else {
const result = await this.generateText(messages, model, options);
yield result;
}
} catch (e) {
metrics.ai.counter('chat_text_stream_errors').add(1, { model });

View File

@@ -0,0 +1,201 @@
import {
CoreAssistantMessage,
CoreUserMessage,
FilePart,
ImagePart,
TextPart,
} from 'ai';
import { PromptMessage } from './types';
type ChatMessage = CoreUserMessage | CoreAssistantMessage;
const SIMPLE_IMAGE_URL_REGEX = /^(https?:\/\/|data:image\/)/;
const FORMAT_INFER_MAP: Record<string, string> = {
pdf: 'application/pdf',
mp3: 'audio/mpeg',
wav: 'audio/wav',
png: 'image/png',
jpeg: 'image/jpeg',
jpg: 'image/jpeg',
webp: 'image/webp',
txt: 'text/plain',
md: 'text/plain',
mov: 'video/mov',
mpeg: 'video/mpeg',
mp4: 'video/mp4',
avi: 'video/avi',
wmv: 'video/wmv',
flv: 'video/flv',
};
function inferMimeType(url: string) {
if (url.startsWith('data:')) {
return url.split(';')[0].split(':')[1];
}
const extension = url.split('.').pop();
if (extension) {
return FORMAT_INFER_MAP[extension];
}
return undefined;
}
export async function chatToGPTMessage(
messages: PromptMessage[]
): Promise<[string | undefined, ChatMessage[], any]> {
const system = messages[0]?.role === 'system' ? messages.shift() : undefined;
const schema = system?.params?.schema;
// filter redundant fields
const msgs: ChatMessage[] = [];
for (let { role, content, attachments, params } of messages.filter(
m => m.role !== 'system'
)) {
content = content.trim();
role = role as 'user' | 'assistant';
const mimetype = params?.mimetype;
if (Array.isArray(attachments)) {
const contents: (TextPart | ImagePart | FilePart)[] = [];
if (content.length) {
contents.push({
type: 'text',
text: content,
});
}
for (const url of attachments) {
if (SIMPLE_IMAGE_URL_REGEX.test(url)) {
const mimeType =
typeof mimetype === 'string' ? mimetype : inferMimeType(url);
if (mimeType) {
if (mimeType.startsWith('image/')) {
contents.push({
type: 'image',
image: url,
mimeType,
});
} else {
const data = url.startsWith('data:')
? await fetch(url).then(r => r.arrayBuffer())
: new URL(url);
contents.push({
type: 'file' as const,
data,
mimeType,
});
}
}
}
}
msgs.push({ role, content: contents } as ChatMessage);
} else {
msgs.push({ role, content });
}
}
return [system?.content, msgs, schema];
}
export class CitationParser {
private readonly SQUARE_BRACKET_OPEN = '[';
private readonly SQUARE_BRACKET_CLOSE = ']';
private readonly PARENTHESES_OPEN = '(';
private startToken: string[] = [];
private endToken: string[] = [];
private numberToken: string[] = [];
private citations: string[] = [];
public parse(content: string, citations: string[]) {
this.citations = citations;
let result = '';
const contentArray = content.split('');
for (const [index, char] of contentArray.entries()) {
if (char === this.SQUARE_BRACKET_OPEN) {
if (this.numberToken.length === 0) {
this.startToken.push(char);
} else {
result += this.flush() + char;
}
continue;
}
if (char === this.SQUARE_BRACKET_CLOSE) {
this.endToken.push(char);
if (this.startToken.length === this.endToken.length) {
const cIndex = Number(this.numberToken.join('').trim());
if (
cIndex > 0 &&
cIndex <= citations.length &&
contentArray[index + 1] !== this.PARENTHESES_OPEN
) {
const content = `[^${cIndex}]`;
result += content;
this.resetToken();
} else {
result += this.flush();
}
} else if (this.startToken.length < this.endToken.length) {
result += this.flush();
}
continue;
}
if (this.isNumeric(char)) {
if (this.startToken.length > 0) {
this.numberToken.push(char);
} else {
result += this.flush() + char;
}
continue;
}
if (this.startToken.length > 0) {
result += this.flush() + char;
} else {
result += char;
}
}
return result;
}
public end() {
return this.flush() + '\n' + this.getFootnotes();
}
private flush() {
const content = this.getTokenContent();
this.resetToken();
return content;
}
private getFootnotes() {
const footnotes = this.citations.map((citation, index) => {
return `[^${index + 1}]: {"type":"url","url":"${encodeURIComponent(
citation
)}"}`;
});
return footnotes.join('\n');
}
private getTokenContent() {
return this.startToken.concat(this.numberToken, this.endToken).join('');
}
private resetToken() {
this.startToken = [];
this.endToken = [];
this.numberToken = [];
}
private isNumeric(str: string) {
return !isNaN(Number(str)) && str.trim() !== '';
}
}

View File

@@ -23,11 +23,9 @@
"fs-extra": "^11.3.0",
"lodash-es": "^4.17.21",
"multer": "^1.4.5-lts.1",
"openai": "^4.85.1",
"react": "^19.0.0",
"react-dom": "^19.0.0",
"react-markdown": "^10.0.0",
"rxjs": "^7.8.1",
"socket.io": "^4.7.4",
"socket.io-client": "^4.7.4",
"swr": "^2.3.2",

317
yarn.lock
View File

@@ -706,11 +706,9 @@ __metadata:
fs-extra: "npm:^11.3.0"
lodash-es: "npm:^4.17.21"
multer: "npm:^1.4.5-lts.1"
openai: "npm:^4.85.1"
react: "npm:^19.0.0"
react-dom: "npm:^19.0.0"
react-markdown: "npm:^10.0.0"
rxjs: "npm:^7.8.1"
socket.io: "npm:^4.7.4"
socket.io-client: "npm:^4.7.4"
swr: "npm:^2.3.2"
@@ -868,7 +866,9 @@ __metadata:
"@affine-tools/utils": "workspace:*"
"@affine/graphql": "workspace:*"
"@affine/server-native": "workspace:*"
"@ai-sdk/google": "npm:^1.1.19"
"@ai-sdk/google": "npm:^1.2.10"
"@ai-sdk/openai": "npm:^1.3.9"
"@ai-sdk/perplexity": "npm:^1.1.6"
"@apollo/server": "npm:^4.11.3"
"@aws-sdk/client-s3": "npm:^3.779.0"
"@aws-sdk/s3-request-presigner": "npm:^3.779.0"
@@ -937,7 +937,6 @@ __metadata:
date-fns: "npm:^4.0.0"
dotenv: "npm:^16.4.7"
eventemitter2: "npm:^6.4.9"
eventsource-parser: "npm:^3.0.0"
express: "npm:^5.0.1"
fast-xml-parser: "npm:^5.0.0"
get-stream: "npm:^9.0.1"
@@ -960,7 +959,6 @@ __metadata:
nodemailer: "npm:^6.9.16"
nodemon: "npm:^3.1.7"
on-headers: "npm:^1.0.2"
openai: "npm:^4.83.0"
piscina: "npm:^5.0.0-alpha.0"
prisma: "npm:^5.22.0"
react: "npm:19.1.0"
@@ -1031,15 +1029,39 @@ __metadata:
languageName: unknown
linkType: soft
"@ai-sdk/google@npm:^1.1.19":
version: 1.2.5
resolution: "@ai-sdk/google@npm:1.2.5"
"@ai-sdk/google@npm:^1.2.10":
version: 1.2.10
resolution: "@ai-sdk/google@npm:1.2.10"
dependencies:
"@ai-sdk/provider": "npm:1.1.0"
"@ai-sdk/provider-utils": "npm:2.2.3"
"@ai-sdk/provider": "npm:1.1.2"
"@ai-sdk/provider-utils": "npm:2.2.6"
peerDependencies:
zod: ^3.0.0
checksum: 10/2de826ea7bea96a90f87a441b341e969541a5490d1c8d4d4355088bbad5a2169e63d3f3f5fce4ef462611b1ec496430e73d1e249cf989d016a75c96d0cca0a07
checksum: 10/537473bf694bedf7de1a239d392fc97b1cc1799fab39d375fdd0989b1d7cef7bbe7cc4b93a320e4de9062735ffa33446e95f028fd29a911625eda7ca185a6241
languageName: node
linkType: hard
"@ai-sdk/openai@npm:^1.3.9":
version: 1.3.9
resolution: "@ai-sdk/openai@npm:1.3.9"
dependencies:
"@ai-sdk/provider": "npm:1.1.2"
"@ai-sdk/provider-utils": "npm:2.2.6"
peerDependencies:
zod: ^3.0.0
checksum: 10/85f1f1581c9124323e3ae01e75dca3aa02566a91ffd0ff5cd6c579f73909811e55bfd44411a1c4bdb44c23ba625a43eaba0a1ef13c971d9f9224d3bfc4e112f1
languageName: node
linkType: hard
"@ai-sdk/perplexity@npm:^1.1.6":
version: 1.1.6
resolution: "@ai-sdk/perplexity@npm:1.1.6"
dependencies:
"@ai-sdk/provider": "npm:1.1.2"
"@ai-sdk/provider-utils": "npm:2.2.6"
peerDependencies:
zod: ^3.0.0
checksum: 10/9afc59654949794b5dab5886e0374176284b6e12bc57625e918d7a054d48b10bec9115f7f35cc87cccee2dcc4a99fd67c84e96832cfa05c9c0f9149b5ecc1bfe
languageName: node
linkType: hard
@@ -1056,6 +1078,19 @@ __metadata:
languageName: node
linkType: hard
"@ai-sdk/provider-utils@npm:2.2.6":
version: 2.2.6
resolution: "@ai-sdk/provider-utils@npm:2.2.6"
dependencies:
"@ai-sdk/provider": "npm:1.1.2"
nanoid: "npm:^3.3.8"
secure-json-parse: "npm:^2.7.0"
peerDependencies:
zod: ^3.23.8
checksum: 10/2bf53d6c49e5e721a15ab36746edfb0839dba104d32efd5a8f336276097c2a56cbbefa1dc164f307f495a659fa33256bdd0465462b8d172510c80d1123552c47
languageName: node
linkType: hard
"@ai-sdk/provider@npm:1.1.0":
version: 1.1.0
resolution: "@ai-sdk/provider@npm:1.1.0"
@@ -1065,6 +1100,15 @@ __metadata:
languageName: node
linkType: hard
"@ai-sdk/provider@npm:1.1.2":
version: 1.1.2
resolution: "@ai-sdk/provider@npm:1.1.2"
dependencies:
json-schema: "npm:^0.4.0"
checksum: 10/1e5b6e695f834ab375c25b750292f60e378c17cf047a2cd62e8829c6e4bbe1582331afa8606333b5b6a5fe77ff434e5d9389a381ace4a58b12da0297a36bb626
languageName: node
linkType: hard
"@ai-sdk/react@npm:1.2.5":
version: 1.2.5
resolution: "@ai-sdk/react@npm:1.2.5"
@@ -14983,7 +15027,7 @@ __metadata:
languageName: node
linkType: hard
"@types/node-fetch@npm:^2.6.1, @types/node-fetch@npm:^2.6.4":
"@types/node-fetch@npm:^2.6.1":
version: 2.6.12
resolution: "@types/node-fetch@npm:2.6.12"
dependencies:
@@ -15011,15 +15055,6 @@ __metadata:
languageName: node
linkType: hard
"@types/node@npm:^18.11.18":
version: 18.19.86
resolution: "@types/node@npm:18.19.86"
dependencies:
undici-types: "npm:~5.26.4"
checksum: 10/83e8f07305b102776c1970c2fbe2347eba15f6cca3d50cf8991d5f4e463ee7e204fbb3dec599892d25e613ed3b93f7fb5794b0de874ea6ba884affbe99670cc2
languageName: node
linkType: hard
"@types/nodemailer@npm:^6.4.17":
version: 6.4.17
resolution: "@types/nodemailer@npm:6.4.17"
@@ -16198,15 +16233,6 @@ __metadata:
languageName: node
linkType: hard
"abort-controller@npm:^3.0.0":
version: 3.0.0
resolution: "abort-controller@npm:3.0.0"
dependencies:
event-target-shim: "npm:^5.0.0"
checksum: 10/ed84af329f1828327798229578b4fe03a4dd2596ba304083ebd2252666bdc1d7647d66d0b18704477e1f8aa315f055944aa6e859afebd341f12d0a53c37b4b40
languageName: node
linkType: hard
"accepts@npm:^2.0.0":
version: 2.0.0
resolution: "accepts@npm:2.0.0"
@@ -17623,38 +17649,6 @@ __metadata:
languageName: node
linkType: hard
"call-bind-apply-helpers@npm:^1.0.0, call-bind-apply-helpers@npm:^1.0.1, call-bind-apply-helpers@npm:^1.0.2":
version: 1.0.2
resolution: "call-bind-apply-helpers@npm:1.0.2"
dependencies:
es-errors: "npm:^1.3.0"
function-bind: "npm:^1.1.2"
checksum: 10/00482c1f6aa7cfb30fb1dbeb13873edf81cfac7c29ed67a5957d60635a56b2a4a480f1016ddbdb3395cc37900d46037fb965043a51c5c789ffeab4fc535d18b5
languageName: node
linkType: hard
"call-bind@npm:^1.0.8":
version: 1.0.8
resolution: "call-bind@npm:1.0.8"
dependencies:
call-bind-apply-helpers: "npm:^1.0.0"
es-define-property: "npm:^1.0.0"
get-intrinsic: "npm:^1.2.4"
set-function-length: "npm:^1.2.2"
checksum: 10/659b03c79bbfccf0cde3a79e7d52570724d7290209823e1ca5088f94b52192dc1836b82a324d0144612f816abb2f1734447438e38d9dafe0b3f82c2a1b9e3bce
languageName: node
linkType: hard
"call-bound@npm:^1.0.3":
version: 1.0.4
resolution: "call-bound@npm:1.0.4"
dependencies:
call-bind-apply-helpers: "npm:^1.0.2"
get-intrinsic: "npm:^1.3.0"
checksum: 10/ef2b96e126ec0e58a7ff694db43f4d0d44f80e641370c21549ed911fecbdbc2df3ebc9bddad918d6bbdefeafb60bb3337902006d5176d72bcd2da74820991af7
languageName: node
linkType: hard
"callsites@npm:^3.0.0":
version: 3.1.0
resolution: "callsites@npm:3.1.0"
@@ -19854,17 +19848,6 @@ __metadata:
languageName: node
linkType: hard
"dunder-proto@npm:^1.0.1":
version: 1.0.1
resolution: "dunder-proto@npm:1.0.1"
dependencies:
call-bind-apply-helpers: "npm:^1.0.1"
es-errors: "npm:^1.3.0"
gopd: "npm:^1.2.0"
checksum: 10/5add88a3d68d42d6e6130a0cac450b7c2edbe73364bbd2fc334564418569bea97c6943a8fcd70e27130bf32afc236f30982fc4905039b703f23e9e0433c29934
languageName: node
linkType: hard
"eastasianwidth@npm:^0.2.0":
version: 0.2.0
resolution: "eastasianwidth@npm:0.2.0"
@@ -20320,20 +20303,6 @@ __metadata:
languageName: node
linkType: hard
"es-define-property@npm:^1.0.0, es-define-property@npm:^1.0.1":
version: 1.0.1
resolution: "es-define-property@npm:1.0.1"
checksum: 10/f8dc9e660d90919f11084db0a893128f3592b781ce967e4fccfb8f3106cb83e400a4032c559184ec52ee1dbd4b01e7776c7cd0b3327b1961b1a4a7008920fe78
languageName: node
linkType: hard
"es-errors@npm:^1.3.0":
version: 1.3.0
resolution: "es-errors@npm:1.3.0"
checksum: 10/96e65d640156f91b707517e8cdc454dd7d47c32833aa3e85d79f24f9eb7ea85f39b63e36216ef0114996581969b59fe609a94e30316b08f5f4df1d44134cf8d5
languageName: node
linkType: hard
"es-iterator-helpers@npm:@nolyfill/es-iterator-helpers@^1":
version: 1.0.21
resolution: "@nolyfill/es-iterator-helpers@npm:1.0.21"
@@ -20350,15 +20319,6 @@ __metadata:
languageName: node
linkType: hard
"es-object-atoms@npm:^1.0.0, es-object-atoms@npm:^1.1.1":
version: 1.1.1
resolution: "es-object-atoms@npm:1.1.1"
dependencies:
es-errors: "npm:^1.3.0"
checksum: 10/54fe77de288451dae51c37bfbfe3ec86732dc3778f98f3eb3bdb4bf48063b2c0b8f9c93542656986149d08aa5be3204286e2276053d19582b76753f1a2728867
languageName: node
linkType: hard
"es-set-tostringtag@npm:@nolyfill/es-set-tostringtag@^1":
version: 1.0.44
resolution: "@nolyfill/es-set-tostringtag@npm:1.0.44"
@@ -20942,13 +20902,6 @@ __metadata:
languageName: node
linkType: hard
"event-target-shim@npm:^5.0.0":
version: 5.0.1
resolution: "event-target-shim@npm:5.0.1"
checksum: 10/49ff46c3a7facbad3decb31f597063e761785d7fdb3920d4989d7b08c97a61c2f51183e2f3a03130c9088df88d4b489b1b79ab632219901f184f85158508f4c8
languageName: node
linkType: hard
"eventemitter2@npm:^6.4.9":
version: 6.4.9
resolution: "eventemitter2@npm:6.4.9"
@@ -20991,13 +20944,6 @@ __metadata:
languageName: node
linkType: hard
"eventsource-parser@npm:^3.0.0":
version: 3.0.1
resolution: "eventsource-parser@npm:3.0.1"
checksum: 10/2730c54c3cb47d55d2967f2ece843f9fc95d8a11c2fef6fece8d17d9080193cbe3cd9ac7b04a325977f63cbf8c1664fdd0512dec1aec601666a5c5bd8564b61f
languageName: node
linkType: hard
"execa@npm:^1.0.0":
version: 1.0.0
resolution: "execa@npm:1.0.0"
@@ -21678,13 +21624,6 @@ __metadata:
languageName: node
linkType: hard
"form-data-encoder@npm:1.7.2":
version: 1.7.2
resolution: "form-data-encoder@npm:1.7.2"
checksum: 10/227bf2cea083284411fd67472ccc22f5cb354ca92c00690e11ff5ed942d993c13ac99dea365046306200f8bd71e1a7858d2d99e236de694b806b1f374a4ee341
languageName: node
linkType: hard
"form-data@npm:^4.0.0":
version: 4.0.2
resolution: "form-data@npm:4.0.2"
@@ -21697,16 +21636,6 @@ __metadata:
languageName: node
linkType: hard
"formdata-node@npm:^4.3.2":
version: 4.4.1
resolution: "formdata-node@npm:4.4.1"
dependencies:
node-domexception: "npm:1.0.0"
web-streams-polyfill: "npm:4.0.0-beta.3"
checksum: 10/29622f75533107c1bbcbe31fda683e6a55859af7f48ec354a9800591ce7947ed84cd3ef2b2fcb812047a884f17a1bac75ce098ffc17e23402cd373e49c1cd335
languageName: node
linkType: hard
"formdata-polyfill@npm:^4.0.10":
version: 4.0.10
resolution: "formdata-polyfill@npm:4.0.10"
@@ -21965,13 +21894,6 @@ __metadata:
languageName: node
linkType: hard
"function-bind@npm:@nolyfill/function-bind@^1":
version: 1.0.21
resolution: "@nolyfill/function-bind@npm:1.0.21"
checksum: 10/b132954ab8b0f17360fc85aff853d7fda8ef2d72124a31dc7f8793e2aeb7da2175d4c1491149f818f8126e93ba8cbfed70b77f160991696caa15e8476223da43
languageName: node
linkType: hard
"functional-red-black-tree@npm:1.0.1":
version: 1.0.1
resolution: "functional-red-black-tree@npm:1.0.1"
@@ -22079,24 +22001,6 @@ __metadata:
languageName: node
linkType: hard
"get-intrinsic@npm:^1.2.4, get-intrinsic@npm:^1.3.0":
version: 1.3.0
resolution: "get-intrinsic@npm:1.3.0"
dependencies:
call-bind-apply-helpers: "npm:^1.0.2"
es-define-property: "npm:^1.0.1"
es-errors: "npm:^1.3.0"
es-object-atoms: "npm:^1.1.1"
function-bind: "npm:^1.1.2"
get-proto: "npm:^1.0.1"
gopd: "npm:^1.2.0"
has-symbols: "npm:^1.1.0"
hasown: "npm:^2.0.2"
math-intrinsics: "npm:^1.1.0"
checksum: 10/6e9dd920ff054147b6f44cb98104330e87caafae051b6d37b13384a45ba15e71af33c3baeac7cb630a0aaa23142718dcf25b45cfdd86c184c5dcb4e56d953a10
languageName: node
linkType: hard
"get-nonce@npm:^1.0.0":
version: 1.0.1
resolution: "get-nonce@npm:1.0.1"
@@ -22123,16 +22027,6 @@ __metadata:
languageName: node
linkType: hard
"get-proto@npm:^1.0.1":
version: 1.0.1
resolution: "get-proto@npm:1.0.1"
dependencies:
dunder-proto: "npm:^1.0.1"
es-object-atoms: "npm:^1.0.0"
checksum: 10/4fc96afdb58ced9a67558698b91433e6b037aaa6f1493af77498d7c85b141382cf223c0e5946f334fb328ee85dfe6edd06d218eaf09556f4bc4ec6005d7f5f7b
languageName: node
linkType: hard
"get-stream@npm:^4.0.0":
version: 4.1.0
resolution: "get-stream@npm:4.1.0"
@@ -22483,13 +22377,6 @@ __metadata:
languageName: node
linkType: hard
"gopd@npm:@nolyfill/gopd@^1":
version: 1.0.44
resolution: "@nolyfill/gopd@npm:1.0.44"
checksum: 10/709da9df577e8b23972df0a3da6f0b520e4ee55e004fa5299865e554113971f33979232a739218b205cb1e5d742fac717e6d3769fb64cb0058e2bcef4cc0eb08
languageName: node
linkType: hard
"got@npm:^11.7.0, got@npm:^11.8.5":
version: 11.8.6
resolution: "got@npm:11.8.6"
@@ -22673,13 +22560,6 @@ __metadata:
languageName: node
linkType: hard
"has-symbols@npm:@nolyfill/has-symbols@^1":
version: 1.0.21
resolution: "@nolyfill/has-symbols@npm:1.0.21"
checksum: 10/cdeaf1a216842769951331496f1adb4bc89170c9bcf33e3587f32be01cafe0a168dde384f22b78d9e5650acea3f6b09c60323d51c74f3a3b101efbaa216fdb26
languageName: node
linkType: hard
"hasown@npm:@nolyfill/hasown@^1":
version: 1.0.44
resolution: "@nolyfill/hasown@npm:1.0.44"
@@ -24451,16 +24331,10 @@ __metadata:
languageName: node
linkType: hard
"json-stable-stringify@npm:^1.2.1":
version: 1.2.1
resolution: "json-stable-stringify@npm:1.2.1"
dependencies:
call-bind: "npm:^1.0.8"
call-bound: "npm:^1.0.3"
isarray: "npm:^2.0.5"
jsonify: "npm:^0.0.1"
object-keys: "npm:^1.1.1"
checksum: 10/f4600d34605e1da81a615ddf7dc62f021a5a5c822aee38b3c878e9a703bbd72623402944dbd7848140602c9ec54bfa2df65dfe75cc40afcfd79f3f072ca5307b
"json-stable-stringify@npm:@nolyfill/json-stable-stringify@^1":
version: 1.0.44
resolution: "@nolyfill/json-stable-stringify@npm:1.0.44"
checksum: 10/8e22c9d4e8a0cf9f69d89f2bfda342e35ca1e361ba4a4020c69bbce7b7753d0a7316b4e61aa5329afdee699349551b280eb5a3e78ad199764dc3ec8555081fc0
languageName: node
linkType: hard
@@ -24535,13 +24409,6 @@ __metadata:
languageName: node
linkType: hard
"jsonify@npm:^0.0.1":
version: 0.0.1
resolution: "jsonify@npm:0.0.1"
checksum: 10/7b86b6f4518582ff1d8b7624ed6c6277affd5246445e864615dbdef843a4057ac58587684faf129ea111eeb80e01c15f0a4d9d03820eb3f3985fa67e81b12398
languageName: node
linkType: hard
"jsonparse@npm:^1.2.0":
version: 1.3.1
resolution: "jsonparse@npm:1.3.1"
@@ -25669,13 +25536,6 @@ __metadata:
languageName: node
linkType: hard
"math-intrinsics@npm:^1.1.0":
version: 1.1.0
resolution: "math-intrinsics@npm:1.1.0"
checksum: 10/11df2eda46d092a6035479632e1ec865b8134bdfc4bd9e571a656f4191525404f13a283a515938c3a8de934dbfd9c09674d9da9fa831e6eb7e22b50b197d2edd
languageName: node
linkType: hard
"md-to-react-email@npm:5.0.5":
version: 5.0.5
resolution: "md-to-react-email@npm:5.0.5"
@@ -27216,7 +27076,7 @@ __metadata:
languageName: node
linkType: hard
"node-domexception@npm:1.0.0, node-domexception@npm:^1.0.0":
"node-domexception@npm:^1.0.0":
version: 1.0.0
resolution: "node-domexception@npm:1.0.0"
checksum: 10/e332522f242348c511640c25a6fc7da4f30e09e580c70c6b13cb0be83c78c3e71c8d4665af2527e869fc96848924a4316ae7ec9014c091e2156f41739d4fa233
@@ -27534,15 +27394,6 @@ __metadata:
languageName: node
linkType: hard
"object-keys@npm:@nolyfill/object-keys@^1":
version: 1.0.44
resolution: "@nolyfill/object-keys@npm:1.0.44"
dependencies:
"@nolyfill/shared": "npm:1.0.44"
checksum: 10/83a6c6f80d939ab95a6af7da6b19ba840ef4d6b51347471b5fd428d4b80824c52c57e3bd476282e178bf0bea00e7a28b7ea33299ccaadccd58fc924bb2af5f5e
languageName: node
linkType: hard
"object-path@npm:^0.11.8":
version: 0.11.8
resolution: "object-path@npm:0.11.8"
@@ -27721,31 +27572,6 @@ __metadata:
languageName: node
linkType: hard
"openai@npm:^4.83.0, openai@npm:^4.85.1":
version: 4.91.1
resolution: "openai@npm:4.91.1"
dependencies:
"@types/node": "npm:^18.11.18"
"@types/node-fetch": "npm:^2.6.4"
abort-controller: "npm:^3.0.0"
agentkeepalive: "npm:^4.2.1"
form-data-encoder: "npm:1.7.2"
formdata-node: "npm:^4.3.2"
node-fetch: "npm:^2.6.7"
peerDependencies:
ws: ^8.18.0
zod: ^3.23.8
peerDependenciesMeta:
ws:
optional: true
zod:
optional: true
bin:
openai: bin/cli
checksum: 10/e5233625c7684584a5b2484cb5c72c037381a79c5da036ab66bb6b52578a0d421224d88ec645a48fb4fbcbc23f88249906232f670e8797c8a3e903402ce6c7dd
languageName: node
linkType: hard
"optionator@npm:^0.9.3":
version: 0.9.4
resolution: "optionator@npm:0.9.4"
@@ -30981,13 +30807,6 @@ __metadata:
languageName: node
linkType: hard
"set-function-length@npm:@nolyfill/set-function-length@^1":
version: 1.0.25
resolution: "@nolyfill/set-function-length@npm:1.0.25"
checksum: 10/530ced403105dd5a923314b5be91fd4ded8de91a2a70fa50dcef2c73b7ff6f9eebd15d1463f23e5d8ff19aeed1c2157613e8a7d7636eb887d6f3c31e22f81f33
languageName: node
linkType: hard
"setimmediate@npm:^1.0.5":
version: 1.0.5
resolution: "setimmediate@npm:1.0.5"
@@ -33116,13 +32935,6 @@ __metadata:
languageName: node
linkType: hard
"undici-types@npm:~5.26.4":
version: 5.26.5
resolution: "undici-types@npm:5.26.5"
checksum: 10/0097779d94bc0fd26f0418b3a05472410408877279141ded2bd449167be1aed7ea5b76f756562cb3586a07f251b90799bab22d9019ceba49c037c76445f7cddd
languageName: node
linkType: hard
"undici-types@npm:~6.21.0":
version: 6.21.0
resolution: "undici-types@npm:6.21.0"
@@ -33943,13 +33755,6 @@ __metadata:
languageName: node
linkType: hard
"web-streams-polyfill@npm:4.0.0-beta.3":
version: 4.0.0-beta.3
resolution: "web-streams-polyfill@npm:4.0.0-beta.3"
checksum: 10/dcdef67de57d83008f9dc330662b65ba4497315555dd0e4e7bcacb132ffdf8a830eaab8f74ad40a4a44f542461f51223f406e2a446ece1cc29927859b1405853
languageName: node
linkType: hard
"web-streams-polyfill@npm:^3.0.3":
version: 3.3.3
resolution: "web-streams-polyfill@npm:3.3.3"