mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-13 04:48:53 +00:00
@@ -30,10 +30,12 @@ import {
|
||||
import { CurrentUser, Public } from '../../core/auth';
|
||||
import {
|
||||
BlobNotFound,
|
||||
CallMetric,
|
||||
Config,
|
||||
CopilotFailedToGenerateText,
|
||||
CopilotSessionNotFound,
|
||||
mapSseError,
|
||||
metrics,
|
||||
NoCopilotProviderAvailable,
|
||||
UnsplashIsNotConfigured,
|
||||
} from '../../fundamentals';
|
||||
@@ -178,6 +180,7 @@ export class CopilotController {
|
||||
}
|
||||
|
||||
@Get('/chat/:sessionId')
|
||||
@CallMetric('ai', 'chat', { timer: true })
|
||||
async chat(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Req() req: Request,
|
||||
@@ -185,6 +188,7 @@ export class CopilotController {
|
||||
@Query() params: Record<string, string | string[]>
|
||||
): Promise<string> {
|
||||
const { messageId } = this.prepareParams(params);
|
||||
|
||||
const provider = await this.chooseTextProvider(
|
||||
user.id,
|
||||
sessionId,
|
||||
@@ -192,8 +196,8 @@ export class CopilotController {
|
||||
);
|
||||
|
||||
const session = await this.appendSessionMessage(sessionId, messageId);
|
||||
|
||||
try {
|
||||
metrics.ai.counter('chat_calls').add(1, { model: session.model });
|
||||
const content = await provider.generateText(
|
||||
session.finish(params),
|
||||
session.model,
|
||||
@@ -213,27 +217,30 @@ export class CopilotController {
|
||||
|
||||
return content;
|
||||
} catch (e: any) {
|
||||
metrics.ai.counter('chat_errors').add(1, { model: session.model });
|
||||
throw new CopilotFailedToGenerateText(e.message);
|
||||
}
|
||||
}
|
||||
|
||||
@Sse('/chat/:sessionId/stream')
|
||||
@CallMetric('ai', 'chat_stream', { timer: true })
|
||||
async chatStream(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Req() req: Request,
|
||||
@Param('sessionId') sessionId: string,
|
||||
@Query() params: Record<string, string>
|
||||
): Promise<Observable<ChatEvent>> {
|
||||
const { messageId } = this.prepareParams(params);
|
||||
|
||||
const provider = await this.chooseTextProvider(
|
||||
user.id,
|
||||
sessionId,
|
||||
messageId
|
||||
);
|
||||
|
||||
const session = await this.appendSessionMessage(sessionId, messageId);
|
||||
try {
|
||||
const { messageId } = this.prepareParams(params);
|
||||
const provider = await this.chooseTextProvider(
|
||||
user.id,
|
||||
sessionId,
|
||||
messageId
|
||||
);
|
||||
|
||||
const session = await this.appendSessionMessage(sessionId, messageId);
|
||||
|
||||
metrics.ai.counter('chat_stream_calls').add(1, { model: session.model });
|
||||
const source$ = from(
|
||||
provider.generateTextStream(session.finish(params), session.model, {
|
||||
...session.config.promptConfig,
|
||||
@@ -262,25 +269,34 @@ export class CopilotController {
|
||||
)
|
||||
)
|
||||
),
|
||||
catchError(mapSseError)
|
||||
catchError(e => {
|
||||
metrics.ai
|
||||
.counter('chat_stream_errors')
|
||||
.add(1, { model: session.model });
|
||||
return mapSseError(e);
|
||||
})
|
||||
);
|
||||
|
||||
return this.mergePingStream(messageId, source$);
|
||||
} catch (err) {
|
||||
metrics.ai.counter('chat_stream_errors').add(1, { model: session.model });
|
||||
return mapSseError(err);
|
||||
}
|
||||
}
|
||||
|
||||
@Sse('/chat/:sessionId/workflow')
|
||||
@CallMetric('ai', 'chat_workflow', { timer: true })
|
||||
async chatWorkflow(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Req() req: Request,
|
||||
@Param('sessionId') sessionId: string,
|
||||
@Query() params: Record<string, string>
|
||||
): Promise<Observable<ChatEvent>> {
|
||||
const { messageId } = this.prepareParams(params);
|
||||
|
||||
const session = await this.appendSessionMessage(sessionId, messageId);
|
||||
try {
|
||||
const { messageId } = this.prepareParams(params);
|
||||
const session = await this.appendSessionMessage(sessionId, messageId);
|
||||
metrics.ai.counter('workflow_calls').add(1, { model: session.model });
|
||||
const latestMessage = session.stashMessages.findLast(
|
||||
m => m.role === 'user'
|
||||
);
|
||||
@@ -347,41 +363,51 @@ export class CopilotController {
|
||||
)
|
||||
)
|
||||
),
|
||||
catchError(mapSseError)
|
||||
catchError(e => {
|
||||
metrics.ai
|
||||
.counter('workflow_errors')
|
||||
.add(1, { model: session.model });
|
||||
return mapSseError(e);
|
||||
})
|
||||
);
|
||||
|
||||
return this.mergePingStream(messageId, source$);
|
||||
} catch (err) {
|
||||
metrics.ai.counter('workflow_errors').add(1, { model: session.model });
|
||||
return mapSseError(err);
|
||||
}
|
||||
}
|
||||
|
||||
@Sse('/chat/:sessionId/images')
|
||||
@CallMetric('ai', 'chat_images', { timer: true })
|
||||
async chatImagesStream(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Req() req: Request,
|
||||
@Param('sessionId') sessionId: string,
|
||||
@Query() params: Record<string, string>
|
||||
): Promise<Observable<ChatEvent>> {
|
||||
const { messageId } = this.prepareParams(params);
|
||||
|
||||
const { model, hasAttachment } = await this.checkRequest(
|
||||
user.id,
|
||||
sessionId,
|
||||
messageId
|
||||
);
|
||||
const provider = await this.provider.getProviderByCapability(
|
||||
hasAttachment
|
||||
? CopilotCapability.ImageToImage
|
||||
: CopilotCapability.TextToImage,
|
||||
model
|
||||
);
|
||||
if (!provider) {
|
||||
throw new NoCopilotProviderAvailable();
|
||||
}
|
||||
|
||||
const session = await this.appendSessionMessage(sessionId, messageId);
|
||||
try {
|
||||
const { messageId } = this.prepareParams(params);
|
||||
const { model, hasAttachment } = await this.checkRequest(
|
||||
user.id,
|
||||
sessionId,
|
||||
messageId
|
||||
);
|
||||
const provider = await this.provider.getProviderByCapability(
|
||||
hasAttachment
|
||||
? CopilotCapability.ImageToImage
|
||||
: CopilotCapability.TextToImage,
|
||||
model
|
||||
);
|
||||
if (!provider) {
|
||||
throw new NoCopilotProviderAvailable();
|
||||
}
|
||||
|
||||
const session = await this.appendSessionMessage(sessionId, messageId);
|
||||
|
||||
metrics.ai
|
||||
.counter('images_stream_calls')
|
||||
.add(1, { model: session.model });
|
||||
const handleRemoteLink = this.storage.handleRemoteLink.bind(
|
||||
this.storage,
|
||||
user.id,
|
||||
@@ -423,15 +449,24 @@ export class CopilotController {
|
||||
)
|
||||
)
|
||||
),
|
||||
catchError(mapSseError)
|
||||
catchError(e => {
|
||||
metrics.ai
|
||||
.counter('images_stream_errors')
|
||||
.add(1, { model: session.model });
|
||||
return mapSseError(e);
|
||||
})
|
||||
);
|
||||
|
||||
return this.mergePingStream(messageId, source$);
|
||||
} catch (err) {
|
||||
metrics.ai
|
||||
.counter('images_stream_errors')
|
||||
.add(1, { model: session.model });
|
||||
return mapSseError(err);
|
||||
}
|
||||
}
|
||||
|
||||
@CallMetric('ai', 'unsplash')
|
||||
@Get('/unsplash/photos')
|
||||
async unsplashPhotos(
|
||||
@Req() req: Request,
|
||||
|
||||
@@ -9,6 +9,7 @@ import { z, ZodType } from 'zod';
|
||||
import {
|
||||
CopilotPromptInvalid,
|
||||
CopilotProviderSideError,
|
||||
metrics,
|
||||
UserFriendlyError,
|
||||
} from '../../../fundamentals';
|
||||
import {
|
||||
@@ -217,6 +218,7 @@ export class FalProvider
|
||||
// by default, image prompt assumes there is only one message
|
||||
const prompt = this.extractPrompt(messages.pop());
|
||||
try {
|
||||
metrics.ai.counter('chat_text_calls').add(1, { model });
|
||||
const response = await fetch(`https://fal.run/fal-ai/${model}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
@@ -237,6 +239,7 @@ export class FalProvider
|
||||
}
|
||||
return data.output;
|
||||
} catch (e: any) {
|
||||
metrics.ai.counter('chat_text_errors').add(1, { model });
|
||||
throw this.handleError(e);
|
||||
}
|
||||
}
|
||||
@@ -246,15 +249,21 @@ export class FalProvider
|
||||
model: string = 'llava-next',
|
||||
options: CopilotChatOptions = {}
|
||||
): AsyncIterable<string> {
|
||||
const result = await this.generateText(messages, model, options);
|
||||
try {
|
||||
metrics.ai.counter('chat_text_stream_calls').add(1, { model });
|
||||
const result = await this.generateText(messages, model, options);
|
||||
|
||||
for await (const content of result) {
|
||||
if (content) {
|
||||
yield content;
|
||||
if (options.signal?.aborted) {
|
||||
break;
|
||||
for await (const content of result) {
|
||||
if (content) {
|
||||
yield content;
|
||||
if (options.signal?.aborted) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
metrics.ai.counter('chat_text_stream_errors').add(1, { model });
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -299,6 +308,8 @@ export class FalProvider
|
||||
}
|
||||
|
||||
try {
|
||||
metrics.ai.counter('generate_images_calls').add(1, { model });
|
||||
|
||||
const data = await this.buildResponse(messages, model, options);
|
||||
|
||||
if (!data.images?.length && !data.image?.url) {
|
||||
@@ -315,6 +326,7 @@ export class FalProvider
|
||||
.map(image => image.url) || []
|
||||
);
|
||||
} catch (e: any) {
|
||||
metrics.ai.counter('generate_images_errors').add(1, { model });
|
||||
throw this.handleError(e);
|
||||
}
|
||||
}
|
||||
@@ -324,9 +336,15 @@ export class FalProvider
|
||||
model: string = this.availableModels[0],
|
||||
options: CopilotImageOptions = {}
|
||||
): AsyncIterable<string> {
|
||||
const ret = await this.generateImages(messages, model, options);
|
||||
for (const url of ret) {
|
||||
yield url;
|
||||
try {
|
||||
metrics.ai.counter('generate_images_stream_calls').add(1, { model });
|
||||
const ret = await this.generateImages(messages, model, options);
|
||||
for (const url of ret) {
|
||||
yield url;
|
||||
}
|
||||
} catch (e) {
|
||||
metrics.ai.counter('generate_images_stream_errors').add(1, { model });
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { APIError, ClientOptions, OpenAI } from 'openai';
|
||||
import { APIError, BadRequestError, ClientOptions, OpenAI } from 'openai';
|
||||
|
||||
import {
|
||||
CopilotPromptInvalid,
|
||||
CopilotProviderSideError,
|
||||
metrics,
|
||||
UserFriendlyError,
|
||||
} from '../../../fundamentals';
|
||||
import {
|
||||
@@ -179,10 +180,23 @@ export class OpenAIProvider
|
||||
}
|
||||
}
|
||||
|
||||
private handleError(e: any) {
|
||||
private handleError(
|
||||
e: any,
|
||||
model: string,
|
||||
options: CopilotImageOptions = {}
|
||||
) {
|
||||
if (e instanceof UserFriendlyError) {
|
||||
return e;
|
||||
} else if (e instanceof APIError) {
|
||||
if (
|
||||
e instanceof BadRequestError &&
|
||||
(e.message.includes('safety') || e.message.includes('risk'))
|
||||
) {
|
||||
metrics.ai
|
||||
.counter('chat_text_risk_errors')
|
||||
.add(1, { model, user: options.user || undefined });
|
||||
}
|
||||
|
||||
return new CopilotProviderSideError({
|
||||
provider: this.type,
|
||||
kind: e.type || 'unknown',
|
||||
@@ -206,6 +220,7 @@ export class OpenAIProvider
|
||||
this.checkParams({ messages, model, options });
|
||||
|
||||
try {
|
||||
metrics.ai.counter('chat_text_calls').add(1, { model });
|
||||
const result = await this.instance.chat.completions.create(
|
||||
{
|
||||
messages: this.chatToGPTMessage(messages),
|
||||
@@ -223,7 +238,8 @@ export class OpenAIProvider
|
||||
if (!content) throw new Error('Failed to generate text');
|
||||
return content.trim();
|
||||
} catch (e: any) {
|
||||
throw this.handleError(e);
|
||||
metrics.ai.counter('chat_text_errors').add(1, { model });
|
||||
throw this.handleError(e, model, options);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -235,6 +251,7 @@ export class OpenAIProvider
|
||||
this.checkParams({ messages, model, options });
|
||||
|
||||
try {
|
||||
metrics.ai.counter('chat_text_stream_calls').add(1, { model });
|
||||
const result = await this.instance.chat.completions.create(
|
||||
{
|
||||
stream: true,
|
||||
@@ -268,7 +285,8 @@ export class OpenAIProvider
|
||||
}
|
||||
}
|
||||
} catch (e: any) {
|
||||
throw this.handleError(e);
|
||||
metrics.ai.counter('chat_text_stream_errors').add(1, { model });
|
||||
throw this.handleError(e, model, options);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -283,15 +301,19 @@ export class OpenAIProvider
|
||||
this.checkParams({ embeddings: messages, model, options });
|
||||
|
||||
try {
|
||||
metrics.ai.counter('generate_embedding_calls').add(1, { model });
|
||||
const result = await this.instance.embeddings.create({
|
||||
model: model,
|
||||
input: messages,
|
||||
dimensions: options.dimensions || DEFAULT_DIMENSIONS,
|
||||
user: options.user,
|
||||
});
|
||||
return result.data.map(e => e.embedding);
|
||||
return result.data
|
||||
.map(e => e?.embedding)
|
||||
.filter(v => v && Array.isArray(v));
|
||||
} catch (e: any) {
|
||||
throw this.handleError(e);
|
||||
metrics.ai.counter('generate_embedding_errors').add(1, { model });
|
||||
throw this.handleError(e, model, options);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -305,6 +327,7 @@ export class OpenAIProvider
|
||||
if (!prompt) throw new CopilotPromptInvalid('Prompt is required');
|
||||
|
||||
try {
|
||||
metrics.ai.counter('generate_images_calls').add(1, { model });
|
||||
const result = await this.instance.images.generate(
|
||||
{
|
||||
prompt,
|
||||
@@ -319,7 +342,8 @@ export class OpenAIProvider
|
||||
.map(image => image.url)
|
||||
.filter((v): v is string => !!v);
|
||||
} catch (e: any) {
|
||||
throw this.handleError(e);
|
||||
metrics.ai.counter('generate_images_errors').add(1, { model });
|
||||
throw this.handleError(e, model, options);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -328,9 +352,15 @@ export class OpenAIProvider
|
||||
model: string = 'dall-e-3',
|
||||
options: CopilotImageOptions = {}
|
||||
): AsyncIterable<string> {
|
||||
const ret = await this.generateImages(messages, model, options);
|
||||
for (const url of ret) {
|
||||
yield url;
|
||||
try {
|
||||
metrics.ai.counter('generate_images_stream_calls').add(1, { model });
|
||||
const ret = await this.generateImages(messages, model, options);
|
||||
for (const url of ret) {
|
||||
yield url;
|
||||
}
|
||||
} catch (e) {
|
||||
metrics.ai.counter('generate_images_stream_errors').add(1, { model });
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ import { Admin } from '../../core/common';
|
||||
import { PermissionService } from '../../core/permission';
|
||||
import { UserType } from '../../core/user';
|
||||
import {
|
||||
CallMetric,
|
||||
CopilotFailedToCreateMessage,
|
||||
FileUpload,
|
||||
RequestMutex,
|
||||
@@ -308,6 +309,7 @@ export class CopilotResolver {
|
||||
}
|
||||
|
||||
@ResolveField(() => [CopilotHistoriesType], {})
|
||||
@CallMetric('ai', 'histories')
|
||||
async histories(
|
||||
@Parent() copilot: CopilotType,
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@@ -334,6 +336,7 @@ export class CopilotResolver {
|
||||
options,
|
||||
true
|
||||
);
|
||||
|
||||
return histories.map(h => ({
|
||||
...h,
|
||||
// filter out empty messages
|
||||
@@ -344,6 +347,7 @@ export class CopilotResolver {
|
||||
@Mutation(() => String, {
|
||||
description: 'Create a chat session',
|
||||
})
|
||||
@CallMetric('ai', 'chat_session_create')
|
||||
async createCopilotSession(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args({ name: 'options', type: () => CreateChatSessionInput })
|
||||
@@ -362,16 +366,16 @@ export class CopilotResolver {
|
||||
|
||||
await this.chatSession.checkQuota(user.id);
|
||||
|
||||
const session = await this.chatSession.create({
|
||||
return await this.chatSession.create({
|
||||
...options,
|
||||
userId: user.id,
|
||||
});
|
||||
return session;
|
||||
}
|
||||
|
||||
@Mutation(() => String, {
|
||||
description: 'Create a chat session',
|
||||
})
|
||||
@CallMetric('ai', 'chat_session_fork')
|
||||
async forkCopilotSession(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args({ name: 'options', type: () => ForkChatSessionInput })
|
||||
@@ -390,16 +394,16 @@ export class CopilotResolver {
|
||||
|
||||
await this.chatSession.checkQuota(user.id);
|
||||
|
||||
const session = await this.chatSession.fork({
|
||||
return await this.chatSession.fork({
|
||||
...options,
|
||||
userId: user.id,
|
||||
});
|
||||
return session;
|
||||
}
|
||||
|
||||
@Mutation(() => [String], {
|
||||
description: 'Cleanup sessions',
|
||||
})
|
||||
@CallMetric('ai', 'chat_session_cleanup')
|
||||
async cleanupCopilotSession(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args({ name: 'options', type: () => DeleteSessionInput })
|
||||
@@ -428,6 +432,7 @@ export class CopilotResolver {
|
||||
@Mutation(() => String, {
|
||||
description: 'Create a chat message',
|
||||
})
|
||||
@CallMetric('ai', 'chat_message_create')
|
||||
async createCopilotMessage(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args({ name: 'options', type: () => CreateChatMessageInput })
|
||||
|
||||
@@ -559,6 +559,7 @@ export class ChatSessionService {
|
||||
this.logger.error(`Prompt not found: ${options.promptName}`);
|
||||
throw new CopilotPromptNotFound({ name: options.promptName });
|
||||
}
|
||||
|
||||
return await this.setSession({
|
||||
...options,
|
||||
sessionId,
|
||||
|
||||
@@ -6,6 +6,7 @@ import { QuotaManagementService } from '../../core/quota';
|
||||
import {
|
||||
type BlobInputType,
|
||||
BlobQuotaExceeded,
|
||||
CallMetric,
|
||||
Config,
|
||||
type FileUpload,
|
||||
type StorageProvider,
|
||||
@@ -28,6 +29,7 @@ export class CopilotStorage {
|
||||
);
|
||||
}
|
||||
|
||||
@CallMetric('ai', 'blob_put')
|
||||
async put(
|
||||
userId: string,
|
||||
workspaceId: string,
|
||||
@@ -43,20 +45,24 @@ export class CopilotStorage {
|
||||
return this.url.link(`/api/copilot/blob/${name}`);
|
||||
}
|
||||
|
||||
@CallMetric('ai', 'blob_get')
|
||||
async get(userId: string, workspaceId: string, key: string) {
|
||||
return this.provider.get(`${userId}/${workspaceId}/${key}`);
|
||||
}
|
||||
|
||||
@CallMetric('ai', 'blob_delete')
|
||||
async delete(userId: string, workspaceId: string, key: string) {
|
||||
return this.provider.delete(`${userId}/${workspaceId}/${key}`);
|
||||
await this.provider.delete(`${userId}/${workspaceId}/${key}`);
|
||||
}
|
||||
|
||||
@CallMetric('ai', 'blob_upload')
|
||||
async handleUpload(userId: string, blob: FileUpload) {
|
||||
const checkExceeded = await this.quota.getQuotaCalculator(userId);
|
||||
|
||||
if (checkExceeded(0)) {
|
||||
throw new BlobQuotaExceeded();
|
||||
}
|
||||
|
||||
const buffer = await new Promise<Buffer>((resolve, reject) => {
|
||||
const stream = blob.createReadStream();
|
||||
const chunks: Uint8Array[] = [];
|
||||
@@ -87,6 +93,7 @@ export class CopilotStorage {
|
||||
};
|
||||
}
|
||||
|
||||
@CallMetric('ai', 'blob_proxy_remote_url')
|
||||
async handleRemoteLink(userId: string, workspaceId: string, link: string) {
|
||||
const response = await fetch(link);
|
||||
const buffer = new Uint8Array(await response.arrayBuffer());
|
||||
|
||||
Reference in New Issue
Block a user