mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-13 21:05:19 +00:00
feat(server): context awareness for copilot (#9611)
fix PD-2167 fix PD-2169 fix PD-2190
This commit is contained in:
@@ -43,3 +43,14 @@ Generated by [AVA](https://avajs.dev).
|
||||
id: 'docId1',
|
||||
},
|
||||
]
|
||||
|
||||
> should list context docs
|
||||
|
||||
[
|
||||
{
|
||||
blobId: 'fileId1',
|
||||
chunkSize: 0,
|
||||
name: 'sample.pdf',
|
||||
status: 'processing',
|
||||
},
|
||||
]
|
||||
|
||||
Binary file not shown.
@@ -1,5 +1,6 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
|
||||
import { ProjectRoot } from '@affine-tools/utils/path';
|
||||
import type { TestFn } from 'ava';
|
||||
import ava from 'ava';
|
||||
import Sinon from 'sinon';
|
||||
@@ -8,7 +9,11 @@ import { ConfigModule } from '../base/config';
|
||||
import { AuthService } from '../core/auth';
|
||||
import { WorkspaceModule } from '../core/workspaces';
|
||||
import { CopilotModule } from '../plugins/copilot';
|
||||
import { CopilotContextService } from '../plugins/copilot/context';
|
||||
import {
|
||||
CopilotContextDocJob,
|
||||
CopilotContextService,
|
||||
} from '../plugins/copilot/context';
|
||||
import { MockEmbeddingClient } from '../plugins/copilot/context/embedding';
|
||||
import { prompts, PromptService } from '../plugins/copilot/prompt';
|
||||
import {
|
||||
CopilotProviderService,
|
||||
@@ -29,6 +34,7 @@ import {
|
||||
} from './utils';
|
||||
import {
|
||||
addContextDoc,
|
||||
addContextFile,
|
||||
array2sse,
|
||||
chatWithImages,
|
||||
chatWithText,
|
||||
@@ -41,6 +47,7 @@ import {
|
||||
getHistories,
|
||||
listContext,
|
||||
listContextFiles,
|
||||
matchContext,
|
||||
MockCopilotTestProvider,
|
||||
sse2array,
|
||||
textToEventStream,
|
||||
@@ -52,6 +59,7 @@ const test = ava as TestFn<{
|
||||
auth: AuthService;
|
||||
app: TestingApp;
|
||||
context: CopilotContextService;
|
||||
jobs: CopilotContextDocJob;
|
||||
prompt: PromptService;
|
||||
provider: CopilotProviderService;
|
||||
storage: CopilotStorage;
|
||||
@@ -86,12 +94,14 @@ test.before(async t => {
|
||||
const context = app.get(CopilotContextService);
|
||||
const prompt = app.get(PromptService);
|
||||
const storage = app.get(CopilotStorage);
|
||||
const jobs = app.get(CopilotContextDocJob);
|
||||
|
||||
t.context.app = app;
|
||||
t.context.auth = auth;
|
||||
t.context.context = context;
|
||||
t.context.prompt = prompt;
|
||||
t.context.storage = storage;
|
||||
t.context.jobs = jobs;
|
||||
});
|
||||
|
||||
const promptName = 'prompt';
|
||||
@@ -719,7 +729,7 @@ test('should be able to search image from unsplash', async t => {
|
||||
});
|
||||
|
||||
test('should be able to manage context', async t => {
|
||||
const { app } = t.context;
|
||||
const { app, context, jobs } = t.context;
|
||||
|
||||
const { id: workspaceId } = await createWorkspace(app);
|
||||
const sessionId = await createCopilotSession(
|
||||
@@ -729,6 +739,10 @@ test('should be able to manage context', async t => {
|
||||
promptName
|
||||
);
|
||||
|
||||
// use mocked embedding client
|
||||
Sinon.stub(context, 'embeddingClient').get(() => new MockEmbeddingClient());
|
||||
Sinon.stub(jobs, 'embeddingClient').get(() => new MockEmbeddingClient());
|
||||
|
||||
{
|
||||
await t.throwsAsync(
|
||||
createCopilotContext(app, workspaceId, randomUUID()),
|
||||
@@ -747,16 +761,49 @@ test('should be able to manage context', async t => {
|
||||
);
|
||||
}
|
||||
|
||||
const fs = await import('node:fs');
|
||||
const buffer = fs.readFileSync(
|
||||
ProjectRoot.join('packages/common/native/fixtures/sample.pdf').toFileUrl()
|
||||
);
|
||||
|
||||
{
|
||||
const contextId = await createCopilotContext(app, workspaceId, sessionId);
|
||||
|
||||
const { id: fileId } = await addContextFile(
|
||||
app,
|
||||
contextId,
|
||||
'fileId1',
|
||||
'sample.pdf',
|
||||
buffer
|
||||
);
|
||||
await addContextDoc(app, contextId, 'docId1');
|
||||
|
||||
const { docs } =
|
||||
const { docs, files } =
|
||||
(await listContextFiles(app, workspaceId, sessionId, contextId)) || {};
|
||||
t.snapshot(
|
||||
docs?.map(({ createdAt: _, ...d }) => d),
|
||||
'should list context files'
|
||||
);
|
||||
t.snapshot(
|
||||
files?.map(({ createdAt: _, id: __, ...f }) => f),
|
||||
'should list context docs'
|
||||
);
|
||||
|
||||
// wait for processing
|
||||
{
|
||||
let { files } =
|
||||
(await listContextFiles(app, workspaceId, sessionId, contextId)) || {};
|
||||
|
||||
while (files?.[0].status !== 'finished') {
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
({ files } =
|
||||
(await listContextFiles(app, workspaceId, sessionId, contextId)) ||
|
||||
{});
|
||||
}
|
||||
}
|
||||
|
||||
const result = (await matchContext(app, contextId, 'test', 1))!;
|
||||
t.is(result.length, 1, 'should match context');
|
||||
t.is(result[0].fileId, fileId, 'should match file id');
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,14 +1,20 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
|
||||
import { ProjectRoot } from '@affine-tools/utils/path';
|
||||
import type { TestFn } from 'ava';
|
||||
import ava from 'ava';
|
||||
import Sinon from 'sinon';
|
||||
|
||||
import { EventBus } from '../base';
|
||||
import { ConfigModule } from '../base/config';
|
||||
import { AuthService } from '../core/auth';
|
||||
import { QuotaModule } from '../core/quota';
|
||||
import { CopilotModule } from '../plugins/copilot';
|
||||
import { CopilotContextService } from '../plugins/copilot/context';
|
||||
import {
|
||||
CopilotContextDocJob,
|
||||
CopilotContextService,
|
||||
} from '../plugins/copilot/context';
|
||||
import { MockEmbeddingClient } from '../plugins/copilot/context/embedding';
|
||||
import { prompts, PromptService } from '../plugins/copilot/prompt';
|
||||
import {
|
||||
CopilotProviderService,
|
||||
@@ -18,6 +24,7 @@ import {
|
||||
} from '../plugins/copilot/providers';
|
||||
import { CitationParser } from '../plugins/copilot/providers/perplexity';
|
||||
import { ChatSessionService } from '../plugins/copilot/session';
|
||||
import { CopilotStorage } from '../plugins/copilot/storage';
|
||||
import {
|
||||
CopilotCapability,
|
||||
CopilotProviderType,
|
||||
@@ -47,10 +54,13 @@ import { MockCopilotTestProvider, WorkflowTestCases } from './utils/copilot';
|
||||
const test = ava as TestFn<{
|
||||
auth: AuthService;
|
||||
module: TestingModule;
|
||||
event: EventBus;
|
||||
context: CopilotContextService;
|
||||
prompt: PromptService;
|
||||
provider: CopilotProviderService;
|
||||
session: ChatSessionService;
|
||||
jobs: CopilotContextDocJob;
|
||||
storage: CopilotStorage;
|
||||
workflow: CopilotWorkflowService;
|
||||
executors: {
|
||||
image: CopilotChatImageExecutor;
|
||||
@@ -85,19 +95,25 @@ test.before(async t => {
|
||||
});
|
||||
|
||||
const auth = module.get(AuthService);
|
||||
const event = module.get(EventBus);
|
||||
const context = module.get(CopilotContextService);
|
||||
const prompt = module.get(PromptService);
|
||||
const provider = module.get(CopilotProviderService);
|
||||
const session = module.get(ChatSessionService);
|
||||
const workflow = module.get(CopilotWorkflowService);
|
||||
const jobs = module.get(CopilotContextDocJob);
|
||||
const storage = module.get(CopilotStorage);
|
||||
|
||||
t.context.module = module;
|
||||
t.context.auth = auth;
|
||||
t.context.event = event;
|
||||
t.context.context = context;
|
||||
t.context.prompt = prompt;
|
||||
t.context.provider = provider;
|
||||
t.context.session = session;
|
||||
t.context.workflow = workflow;
|
||||
t.context.jobs = jobs;
|
||||
t.context.storage = storage;
|
||||
t.context.executors = {
|
||||
image: module.get(CopilotChatImageExecutor),
|
||||
text: module.get(CopilotChatTextExecutor),
|
||||
@@ -1276,7 +1292,7 @@ test('CitationParser should not replace chunks of citation already with URLs', t
|
||||
|
||||
// ==================== context ====================
|
||||
test('should be able to manage context', async t => {
|
||||
const { context, prompt, session } = t.context;
|
||||
const { context, prompt, session, event, jobs, storage } = t.context;
|
||||
|
||||
await prompt.set('prompt', 'model', [
|
||||
{ role: 'system', content: 'hello {{word}}' },
|
||||
@@ -1288,6 +1304,10 @@ test('should be able to manage context', async t => {
|
||||
promptName: 'prompt',
|
||||
});
|
||||
|
||||
// use mocked embedding client
|
||||
Sinon.stub(context, 'embeddingClient').get(() => new MockEmbeddingClient());
|
||||
Sinon.stub(jobs, 'embeddingClient').get(() => new MockEmbeddingClient());
|
||||
|
||||
{
|
||||
await t.throwsAsync(
|
||||
context.create(randomUUID()),
|
||||
@@ -1310,9 +1330,45 @@ test('should be able to manage context', async t => {
|
||||
);
|
||||
}
|
||||
|
||||
const fs = await import('node:fs');
|
||||
const buffer = fs.readFileSync(
|
||||
ProjectRoot.join('packages/common/native/fixtures/sample.pdf').toFileUrl()
|
||||
);
|
||||
|
||||
{
|
||||
const session = await context.create(chatSession);
|
||||
|
||||
await storage.put(userId, session.workspaceId, 'blob', buffer);
|
||||
|
||||
const file = await session.addFile('blob', 'sample.pdf');
|
||||
|
||||
const handler = Sinon.spy(event, 'emit');
|
||||
|
||||
await jobs.embedPendingFile({
|
||||
userId,
|
||||
workspaceId: session.workspaceId,
|
||||
contextId: session.id,
|
||||
blobId: file.blobId,
|
||||
fileId: file.id,
|
||||
fileName: file.name,
|
||||
});
|
||||
|
||||
t.deepEqual(handler.lastCall.args, [
|
||||
'workspace.file.embed.finished',
|
||||
{
|
||||
contextId: session.id,
|
||||
fileId: file.id,
|
||||
chunkSize: 1,
|
||||
},
|
||||
]);
|
||||
|
||||
const list = session.listFiles();
|
||||
t.deepEqual(
|
||||
list.map(f => f.id),
|
||||
[file.id],
|
||||
'should list file id'
|
||||
);
|
||||
|
||||
const docId = randomUUID();
|
||||
await session.addDocRecord(docId);
|
||||
const docs = session.listDocs().map(d => d.id);
|
||||
@@ -1320,5 +1376,9 @@ test('should be able to manage context', async t => {
|
||||
|
||||
await session.removeDocRecord(docId);
|
||||
t.deepEqual(session.listDocs(), [], 'should remove doc id');
|
||||
|
||||
const result = await session.matchFileChunks('test', 1, undefined, 1);
|
||||
t.is(result.length, 1, 'should match context');
|
||||
t.is(result[0].fileId, file.id, 'should match file id');
|
||||
}
|
||||
});
|
||||
|
||||
@@ -240,19 +240,25 @@ export async function matchContext(
|
||||
> {
|
||||
const res = await app.gql(
|
||||
`
|
||||
mutation matchContext($content: String!, $contextId: String!, $limit: SafeInt) {
|
||||
matchContext(content: $content, contextId: $contextId, limit: $limit) {
|
||||
fileId
|
||||
chunk
|
||||
content
|
||||
distance
|
||||
query matchContext($contextId: String!, $content: String!, $limit: SafeInt, $threshold: Float) {
|
||||
currentUser {
|
||||
copilot {
|
||||
contexts(contextId: $contextId) {
|
||||
matchContext(content: $content, limit: $limit, threshold: $threshold) {
|
||||
fileId
|
||||
chunk
|
||||
content
|
||||
distance
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`,
|
||||
{ contextId, content, limit }
|
||||
{ contextId, content, limit, threshold: 1 }
|
||||
);
|
||||
|
||||
return res.matchContext;
|
||||
return res.currentUser?.copilot?.contexts?.[0]?.matchContext;
|
||||
}
|
||||
|
||||
export async function listContext(
|
||||
@@ -287,7 +293,7 @@ export async function addContextFile(
|
||||
blobId: string,
|
||||
fileName: string,
|
||||
content: Buffer
|
||||
): Promise<{ id: string }[]> {
|
||||
): Promise<{ id: string }> {
|
||||
const res = await app
|
||||
.POST(gql)
|
||||
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
|
||||
@@ -303,7 +309,7 @@ export async function addContextFile(
|
||||
`,
|
||||
variables: {
|
||||
content: null,
|
||||
options: { contextId, blobId, fileName },
|
||||
options: { contextId, blobId },
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
@@ -682,6 +682,10 @@ export const USER_FRIENDLY_ERRORS = {
|
||||
message: ({ contextId, content, message }) =>
|
||||
`Failed to match context ${contextId} with "${escape(content)}": ${message}`,
|
||||
},
|
||||
copilot_embedding_unavailable: {
|
||||
type: 'action_forbidden',
|
||||
message: `Embedding feature not available, you may need to install pgvector extension to your database`,
|
||||
},
|
||||
|
||||
// Quota & Limit errors
|
||||
blob_quota_exceeded: {
|
||||
|
||||
@@ -737,6 +737,12 @@ export class CopilotFailedToMatchContext extends UserFriendlyError {
|
||||
}
|
||||
}
|
||||
|
||||
export class CopilotEmbeddingUnavailable extends UserFriendlyError {
|
||||
constructor(message?: string) {
|
||||
super('action_forbidden', 'copilot_embedding_unavailable', message);
|
||||
}
|
||||
}
|
||||
|
||||
export class BlobQuotaExceeded extends UserFriendlyError {
|
||||
constructor(message?: string) {
|
||||
super('quota_exceeded', 'blob_quota_exceeded', message);
|
||||
@@ -976,6 +982,7 @@ export enum ErrorNames {
|
||||
COPILOT_CONTEXT_FILE_NOT_SUPPORTED,
|
||||
COPILOT_FAILED_TO_MODIFY_CONTEXT,
|
||||
COPILOT_FAILED_TO_MATCH_CONTEXT,
|
||||
COPILOT_EMBEDDING_UNAVAILABLE,
|
||||
BLOB_QUOTA_EXCEEDED,
|
||||
MEMBER_QUOTA_EXCEEDED,
|
||||
COPILOT_QUOTA_EXCEEDED,
|
||||
|
||||
@@ -30,6 +30,7 @@ export const mintChallengeResponse = async (resource: string, bits: number) => {
|
||||
};
|
||||
|
||||
export const getMime = serverNativeModule.getMime;
|
||||
export const parseDoc = serverNativeModule.parseDoc;
|
||||
export const Tokenizer = serverNativeModule.Tokenizer;
|
||||
export const fromModelName = serverNativeModule.fromModelName;
|
||||
export const htmlSanitize = serverNativeModule.htmlSanitize;
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
import OpenAI from 'openai';
|
||||
|
||||
import { Embedding, EmbeddingClient } from './types';
|
||||
|
||||
export class OpenAIEmbeddingClient extends EmbeddingClient {
|
||||
constructor(private readonly client: OpenAI) {
|
||||
super();
|
||||
}
|
||||
|
||||
async getEmbeddings(
|
||||
input: string[],
|
||||
signal?: AbortSignal
|
||||
): Promise<Embedding[]> {
|
||||
const resp = await this.client.embeddings.create(
|
||||
{
|
||||
input,
|
||||
model: 'text-embedding-3-small',
|
||||
dimensions: 512,
|
||||
encoding_format: 'float',
|
||||
},
|
||||
{ signal }
|
||||
);
|
||||
return resp.data.map(e => ({ ...e, content: input[e.index] }));
|
||||
}
|
||||
}
|
||||
|
||||
export class MockEmbeddingClient extends EmbeddingClient {
|
||||
async getEmbeddings(input: string[]): Promise<Embedding[]> {
|
||||
return input.map((_, i) => ({
|
||||
index: i,
|
||||
content: input[i],
|
||||
embedding: Array.from({ length: 512 }, () => Math.random()),
|
||||
}));
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
export { CopilotContextDocJob } from './job';
|
||||
export { CopilotContextResolver, CopilotContextRootResolver } from './resolver';
|
||||
export { CopilotContextService } from './service';
|
||||
export { type ContextFile, ContextFileStatus } from './types';
|
||||
|
||||
205
packages/backend/server/src/plugins/copilot/context/job.ts
Normal file
205
packages/backend/server/src/plugins/copilot/context/job.ts
Normal file
@@ -0,0 +1,205 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
|
||||
import { Injectable, OnModuleInit } from '@nestjs/common';
|
||||
import { Prisma, PrismaClient } from '@prisma/client';
|
||||
import OpenAI from 'openai';
|
||||
|
||||
import {
|
||||
AFFiNELogger,
|
||||
BlobNotFound,
|
||||
Config,
|
||||
EventBus,
|
||||
JobQueue,
|
||||
OnEvent,
|
||||
OnJob,
|
||||
} from '../../../base';
|
||||
import { DocReader } from '../../../core/doc';
|
||||
import { CopilotStorage } from '../storage';
|
||||
import { OpenAIEmbeddingClient } from './embedding';
|
||||
import { Embedding, EmbeddingClient } from './types';
|
||||
import { checkEmbeddingAvailable, readStream } from './utils';
|
||||
|
||||
declare global {
|
||||
interface Jobs {
|
||||
'doc.embedPendingDocs': {
|
||||
workspaceId: string;
|
||||
docId: string;
|
||||
};
|
||||
|
||||
'doc.embedPendingFiles': {
|
||||
contextId: string;
|
||||
userId: string;
|
||||
workspaceId: string;
|
||||
blobId: string;
|
||||
fileId: string;
|
||||
fileName: string;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class CopilotContextDocJob implements OnModuleInit {
|
||||
private supportEmbedding = false;
|
||||
private readonly client: EmbeddingClient | undefined;
|
||||
|
||||
constructor(
|
||||
config: Config,
|
||||
private readonly db: PrismaClient,
|
||||
private readonly doc: DocReader,
|
||||
private readonly event: EventBus,
|
||||
private readonly logger: AFFiNELogger,
|
||||
private readonly queue: JobQueue,
|
||||
private readonly storage: CopilotStorage
|
||||
) {
|
||||
this.logger.setContext(CopilotContextDocJob.name);
|
||||
const configure = config.plugins.copilot.openai;
|
||||
if (configure) {
|
||||
this.client = new OpenAIEmbeddingClient(new OpenAI(configure));
|
||||
}
|
||||
}
|
||||
|
||||
async onModuleInit() {
|
||||
this.supportEmbedding = await checkEmbeddingAvailable(this.db);
|
||||
}
|
||||
|
||||
// public this client to allow overriding in tests
|
||||
get embeddingClient() {
|
||||
return this.client as EmbeddingClient;
|
||||
}
|
||||
|
||||
async addFileEmbeddingQueue(file: Jobs['doc.embedPendingFiles']) {
|
||||
if (!this.supportEmbedding) return;
|
||||
|
||||
const { userId, workspaceId, contextId, blobId, fileId, fileName } = file;
|
||||
await this.queue.add('doc.embedPendingFiles', {
|
||||
userId,
|
||||
workspaceId,
|
||||
contextId,
|
||||
blobId,
|
||||
fileId,
|
||||
fileName,
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent('workspace.doc.embedding')
|
||||
async addDocEmbeddingQueue(docs: Events['workspace.doc.embedding']) {
|
||||
if (!this.supportEmbedding) return;
|
||||
|
||||
for (const { workspaceId, docId } of docs) {
|
||||
await this.queue.add('doc.embedPendingDocs', { workspaceId, docId });
|
||||
}
|
||||
}
|
||||
|
||||
private processEmbeddings(
|
||||
contextOrWorkspaceId: string,
|
||||
fileOrDocId: string,
|
||||
embeddings: Embedding[]
|
||||
) {
|
||||
const groups = embeddings.map(e => [
|
||||
randomUUID(),
|
||||
contextOrWorkspaceId,
|
||||
fileOrDocId,
|
||||
e.index,
|
||||
e.content,
|
||||
Prisma.raw(`'[${e.embedding.join(',')}]'`),
|
||||
new Date(),
|
||||
]);
|
||||
return Prisma.join(groups.map(row => Prisma.sql`(${Prisma.join(row)})`));
|
||||
}
|
||||
|
||||
async readCopilotBlob(
|
||||
userId: string,
|
||||
workspaceId: string,
|
||||
blobId: string,
|
||||
fileName: string
|
||||
) {
|
||||
const { body } = await this.storage.get(userId, workspaceId, blobId);
|
||||
if (!body) throw new BlobNotFound({ spaceId: workspaceId, blobId });
|
||||
const buffer = await readStream(body);
|
||||
return new File([buffer], fileName);
|
||||
}
|
||||
|
||||
@OnJob('doc.embedPendingFiles')
|
||||
async embedPendingFile({
|
||||
userId,
|
||||
workspaceId,
|
||||
contextId,
|
||||
blobId,
|
||||
fileId,
|
||||
fileName,
|
||||
}: Jobs['doc.embedPendingFiles']) {
|
||||
if (!this.supportEmbedding || !this.embeddingClient) return;
|
||||
|
||||
try {
|
||||
const file = await this.readCopilotBlob(
|
||||
userId,
|
||||
workspaceId,
|
||||
blobId,
|
||||
fileName
|
||||
);
|
||||
|
||||
// no need to check if embeddings is empty, will throw internally
|
||||
const chunks = await this.embeddingClient.getFileChunks(file);
|
||||
const total = chunks.reduce((acc, c) => acc + c.length, 0);
|
||||
|
||||
for (const chunk of chunks) {
|
||||
const embeddings = await this.embeddingClient.generateEmbeddings(chunk);
|
||||
const values = this.processEmbeddings(contextId, fileId, embeddings);
|
||||
|
||||
await this.db.$executeRaw`
|
||||
INSERT INTO "ai_context_embeddings"
|
||||
("id", "context_id", "file_id", "chunk", "content", "embedding", "updated_at") VALUES ${values}
|
||||
ON CONFLICT (context_id, file_id, chunk) DO UPDATE SET
|
||||
content = EXCLUDED.content, embedding = EXCLUDED.embedding, updated_at = excluded.updated_at;
|
||||
`;
|
||||
}
|
||||
|
||||
this.event.emit('workspace.file.embed.finished', {
|
||||
contextId,
|
||||
fileId,
|
||||
chunkSize: total,
|
||||
});
|
||||
} catch (e: any) {
|
||||
this.logger.error(
|
||||
`Failed to embed pending file: ${contextId}::${fileId}`,
|
||||
e
|
||||
);
|
||||
|
||||
this.event.emit('workspace.file.embed.failed', {
|
||||
contextId,
|
||||
fileId,
|
||||
error: e.toString(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@OnJob('doc.embedPendingDocs')
|
||||
async embedPendingDocs({ workspaceId, docId }: Jobs['doc.embedPendingDocs']) {
|
||||
if (!this.supportEmbedding) return;
|
||||
|
||||
try {
|
||||
const content = await this.doc.getDocContent(workspaceId, docId);
|
||||
if (content) {
|
||||
// no need to check if embeddings is empty, will throw internally
|
||||
const embeddings = await this.embeddingClient.getFileEmbeddings(
|
||||
new File([content.summary], `${content.title}.md`)
|
||||
);
|
||||
|
||||
for (const chunks of embeddings) {
|
||||
const values = this.processEmbeddings(workspaceId, docId, chunks);
|
||||
await this.db.$executeRaw`
|
||||
INSERT INTO "ai_workspace_embeddings"
|
||||
("workspace_id", "doc_id", "chunk", "content", "embedding", "updated_at") VALUES ${values}
|
||||
ON CONFLICT (context_id, file_id, chunk) DO UPDATE SET
|
||||
embedding = EXCLUDED.embedding, updated_at = excluded.updated_at;
|
||||
`;
|
||||
}
|
||||
}
|
||||
} catch (e: any) {
|
||||
this.logger.error(
|
||||
`Failed to embed pending doc: ${workspaceId}::${docId}`,
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,30 +1,53 @@
|
||||
import {
|
||||
Args,
|
||||
Context,
|
||||
Field,
|
||||
Float,
|
||||
ID,
|
||||
InputType,
|
||||
Mutation,
|
||||
ObjectType,
|
||||
Parent,
|
||||
Query,
|
||||
registerEnumType,
|
||||
ResolveField,
|
||||
Resolver,
|
||||
} from '@nestjs/graphql';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import type { Request } from 'express';
|
||||
import { SafeIntResolver } from 'graphql-scalars';
|
||||
import GraphQLUpload from 'graphql-upload/GraphQLUpload.mjs';
|
||||
|
||||
import {
|
||||
BlobQuotaExceeded,
|
||||
CallMetric,
|
||||
CopilotEmbeddingUnavailable,
|
||||
CopilotFailedToMatchContext,
|
||||
CopilotFailedToModifyContext,
|
||||
CopilotSessionNotFound,
|
||||
EventBus,
|
||||
type FileUpload,
|
||||
RequestMutex,
|
||||
Throttle,
|
||||
TooManyRequest,
|
||||
UserFriendlyError,
|
||||
} from '../../../base';
|
||||
import { CurrentUser } from '../../../core/auth';
|
||||
import { AccessController } from '../../../core/permission';
|
||||
import { COPILOT_LOCKER, CopilotType } from '../resolver';
|
||||
import { ChatSessionService } from '../session';
|
||||
import { CopilotStorage } from '../storage';
|
||||
import { CopilotContextDocJob } from './job';
|
||||
import { CopilotContextService } from './service';
|
||||
import { ContextDoc, type ContextFile, ContextFileStatus } from './types';
|
||||
import {
|
||||
ContextDoc,
|
||||
type ContextFile,
|
||||
ContextFileStatus,
|
||||
DocChunkSimilarity,
|
||||
FileChunkSimilarity,
|
||||
MAX_EMBEDDABLE_SIZE,
|
||||
} from './types';
|
||||
import { readStream } from './utils';
|
||||
|
||||
@InputType()
|
||||
class AddContextDocInput {
|
||||
@@ -44,6 +67,24 @@ class RemoveContextDocInput {
|
||||
docId!: string;
|
||||
}
|
||||
|
||||
@InputType()
|
||||
class AddContextFileInput {
|
||||
@Field(() => String)
|
||||
contextId!: string;
|
||||
|
||||
@Field(() => String)
|
||||
blobId!: string;
|
||||
}
|
||||
|
||||
@InputType()
|
||||
class RemoveContextFileInput {
|
||||
@Field(() => String)
|
||||
contextId!: string;
|
||||
|
||||
@Field(() => String)
|
||||
fileId!: string;
|
||||
}
|
||||
|
||||
@ObjectType('CopilotContext')
|
||||
export class CopilotContextType {
|
||||
@Field(() => ID)
|
||||
@@ -78,6 +119,9 @@ class CopilotContextFile implements ContextFile {
|
||||
@Field(() => ContextFileStatus)
|
||||
status!: ContextFileStatus;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
error!: string | null;
|
||||
|
||||
@Field(() => String)
|
||||
blobId!: string;
|
||||
|
||||
@@ -86,30 +130,51 @@ class CopilotContextFile implements ContextFile {
|
||||
}
|
||||
|
||||
@ObjectType()
|
||||
class CopilotContextListItem {
|
||||
@Field(() => ID)
|
||||
id!: string;
|
||||
class ContextMatchedFileChunk implements FileChunkSimilarity {
|
||||
@Field(() => String)
|
||||
fileId!: string;
|
||||
|
||||
@Field(() => SafeIntResolver)
|
||||
createdAt!: number;
|
||||
chunk!: number;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
name!: string;
|
||||
@Field(() => String)
|
||||
content!: string;
|
||||
|
||||
@Field(() => SafeIntResolver, { nullable: true })
|
||||
chunkSize!: number;
|
||||
@Field(() => Float, { nullable: true })
|
||||
distance!: number | null;
|
||||
}
|
||||
|
||||
@Field(() => ContextFileStatus, { nullable: true })
|
||||
status!: ContextFileStatus;
|
||||
@ObjectType()
|
||||
class ContextWorkspaceEmbeddingStatus {
|
||||
@Field(() => SafeIntResolver)
|
||||
total!: number;
|
||||
|
||||
@Field(() => String, { nullable: true })
|
||||
blobId!: string;
|
||||
@Field(() => SafeIntResolver)
|
||||
embedded!: number;
|
||||
}
|
||||
|
||||
@ObjectType()
|
||||
class ContextMatchedDocChunk implements DocChunkSimilarity {
|
||||
@Field(() => String)
|
||||
docId!: string;
|
||||
|
||||
@Field(() => SafeIntResolver)
|
||||
chunk!: number;
|
||||
|
||||
@Field(() => String)
|
||||
content!: string;
|
||||
|
||||
@Field(() => Float, { nullable: true })
|
||||
distance!: number | null;
|
||||
}
|
||||
|
||||
@Throttle()
|
||||
@Resolver(() => CopilotType)
|
||||
export class CopilotContextRootResolver {
|
||||
constructor(
|
||||
private readonly db: PrismaClient,
|
||||
private readonly ac: AccessController,
|
||||
private readonly event: EventBus,
|
||||
private readonly mutex: RequestMutex,
|
||||
private readonly chatSession: ChatSessionService,
|
||||
private readonly context: CopilotContextService
|
||||
@@ -138,27 +203,30 @@ export class CopilotContextRootResolver {
|
||||
async contexts(
|
||||
@Parent() copilot: CopilotType,
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args('sessionId') sessionId: string,
|
||||
@Args('sessionId', { nullable: true }) sessionId?: string,
|
||||
@Args('contextId', { nullable: true }) contextId?: string
|
||||
) {
|
||||
const lockFlag = `${COPILOT_LOCKER}:context:${sessionId}`;
|
||||
await using lock = await this.mutex.acquire(lockFlag);
|
||||
if (!lock) {
|
||||
return new TooManyRequest('Server is busy');
|
||||
}
|
||||
await this.checkChatSession(
|
||||
user,
|
||||
sessionId,
|
||||
copilot.workspaceId || undefined
|
||||
);
|
||||
if (sessionId || contextId) {
|
||||
const lockFlag = `${COPILOT_LOCKER}:context:${sessionId || contextId}`;
|
||||
await using lock = await this.mutex.acquire(lockFlag);
|
||||
if (!lock) {
|
||||
return new TooManyRequest('Server is busy');
|
||||
}
|
||||
|
||||
if (contextId) {
|
||||
const context = await this.context.get(contextId);
|
||||
if (context) return [context];
|
||||
} else {
|
||||
const context = await this.context.getBySessionId(sessionId);
|
||||
if (context) return [context];
|
||||
if (contextId) {
|
||||
const context = await this.context.get(contextId);
|
||||
if (context) return [context];
|
||||
} else if (sessionId) {
|
||||
await this.checkChatSession(
|
||||
user,
|
||||
sessionId,
|
||||
copilot.workspaceId || undefined
|
||||
);
|
||||
const context = await this.context.getBySessionId(sessionId);
|
||||
if (context) return [context];
|
||||
}
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
@@ -181,17 +249,80 @@ export class CopilotContextRootResolver {
|
||||
const context = await this.context.create(sessionId);
|
||||
return context.id;
|
||||
}
|
||||
|
||||
@Mutation(() => Boolean, {
|
||||
description: 'queue workspace doc embedding',
|
||||
})
|
||||
@CallMetric('ai', 'context_queue_workspace_doc')
|
||||
async queueWorkspaceEmbedding(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args('workspaceId') workspaceId: string,
|
||||
@Args('docId', { type: () => [String] }) docIds: string[]
|
||||
) {
|
||||
await this.ac
|
||||
.user(user.id)
|
||||
.workspace(workspaceId)
|
||||
.allowLocal()
|
||||
.assert('Workspace.Copilot');
|
||||
|
||||
if (this.context.canEmbedding) {
|
||||
this.event.emit(
|
||||
'workspace.doc.embedding',
|
||||
docIds.map(docId => ({ workspaceId, docId }))
|
||||
);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@Query(() => ContextWorkspaceEmbeddingStatus, {
|
||||
description: 'query workspace embedding status',
|
||||
})
|
||||
@CallMetric('ai', 'context_query_workspace_embedding_status')
|
||||
async queryWorkspaceEmbeddingStatus(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Args('workspaceId') workspaceId: string
|
||||
) {
|
||||
await this.ac
|
||||
.user(user.id)
|
||||
.workspace(workspaceId)
|
||||
.allowLocal()
|
||||
.assert('Workspace.Copilot');
|
||||
|
||||
if (this.context.canEmbedding) {
|
||||
const total = await this.db.snapshot.count({ where: { workspaceId } });
|
||||
const embedded = await this.db.snapshot.count({
|
||||
where: { workspaceId, embedding: { isNot: null } },
|
||||
});
|
||||
return { total, embedded };
|
||||
}
|
||||
|
||||
return { total: 0, embedded: 0 };
|
||||
}
|
||||
}
|
||||
|
||||
@Throttle()
|
||||
@Resolver(() => CopilotContextType)
|
||||
export class CopilotContextResolver {
|
||||
constructor(
|
||||
private readonly ac: AccessController,
|
||||
private readonly mutex: RequestMutex,
|
||||
|
||||
private readonly context: CopilotContextService
|
||||
private readonly context: CopilotContextService,
|
||||
private readonly jobs: CopilotContextDocJob,
|
||||
private readonly storage: CopilotStorage
|
||||
) {}
|
||||
|
||||
private getSignal(req: Request) {
|
||||
const controller = new AbortController();
|
||||
req.socket.on('close', hasError => {
|
||||
if (hasError) {
|
||||
controller.abort();
|
||||
}
|
||||
});
|
||||
return controller.signal;
|
||||
}
|
||||
|
||||
@ResolveField(() => [CopilotContextDoc], {
|
||||
description: 'list files in context',
|
||||
})
|
||||
@@ -201,7 +332,7 @@ export class CopilotContextResolver {
|
||||
return session.listDocs();
|
||||
}
|
||||
|
||||
@Mutation(() => [CopilotContextListItem], {
|
||||
@Mutation(() => CopilotContextDoc, {
|
||||
description: 'add a doc to context',
|
||||
})
|
||||
@CallMetric('ai', 'context_doc_add')
|
||||
@@ -261,4 +392,175 @@ export class CopilotContextResolver {
|
||||
const session = await this.context.get(context.id);
|
||||
return session.listFiles();
|
||||
}
|
||||
|
||||
@Mutation(() => CopilotContextFile, {
|
||||
description: 'add a file to context',
|
||||
})
|
||||
@CallMetric('ai', 'context_file_add')
|
||||
async addContextFile(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Context() ctx: { req: Request },
|
||||
@Args({ name: 'options', type: () => AddContextFileInput })
|
||||
options: AddContextFileInput,
|
||||
@Args({ name: 'content', type: () => GraphQLUpload })
|
||||
content: FileUpload
|
||||
) {
|
||||
if (!this.context.canEmbedding) {
|
||||
throw new CopilotEmbeddingUnavailable();
|
||||
}
|
||||
|
||||
const lockFlag = `${COPILOT_LOCKER}:context:${options.contextId}`;
|
||||
await using lock = await this.mutex.acquire(lockFlag);
|
||||
if (!lock) {
|
||||
return new TooManyRequest('Server is busy');
|
||||
}
|
||||
|
||||
const length = Number(ctx.req.headers['content-length']);
|
||||
if (length && length >= MAX_EMBEDDABLE_SIZE) {
|
||||
throw new BlobQuotaExceeded();
|
||||
}
|
||||
|
||||
const session = await this.context.get(options.contextId);
|
||||
|
||||
try {
|
||||
const file = await session.addFile(options.blobId, content.filename);
|
||||
|
||||
const buffer = await readStream(content.createReadStream());
|
||||
await this.storage.put(
|
||||
user.id,
|
||||
session.workspaceId,
|
||||
options.blobId,
|
||||
buffer
|
||||
);
|
||||
|
||||
await this.jobs.addFileEmbeddingQueue({
|
||||
userId: user.id,
|
||||
workspaceId: session.workspaceId,
|
||||
contextId: session.id,
|
||||
blobId: file.blobId,
|
||||
fileId: file.id,
|
||||
fileName: file.name,
|
||||
});
|
||||
|
||||
return file;
|
||||
} catch (e: any) {
|
||||
// passthrough user friendly error
|
||||
if (e instanceof UserFriendlyError) {
|
||||
throw e;
|
||||
}
|
||||
throw new CopilotFailedToModifyContext({
|
||||
contextId: options.contextId,
|
||||
message: e.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@Mutation(() => Boolean, {
|
||||
description: 'remove a file from context',
|
||||
})
|
||||
@CallMetric('ai', 'context_file_remove')
|
||||
async removeContextFile(
|
||||
@Args({ name: 'options', type: () => RemoveContextFileInput })
|
||||
options: RemoveContextFileInput
|
||||
) {
|
||||
if (!this.context.canEmbedding) {
|
||||
throw new CopilotEmbeddingUnavailable();
|
||||
}
|
||||
|
||||
const lockFlag = `${COPILOT_LOCKER}:context:${options.contextId}`;
|
||||
await using lock = await this.mutex.acquire(lockFlag);
|
||||
if (!lock) {
|
||||
return new TooManyRequest('Server is busy');
|
||||
}
|
||||
const session = await this.context.get(options.contextId);
|
||||
|
||||
try {
|
||||
return await session.removeFile(options.fileId);
|
||||
} catch (e: any) {
|
||||
throw new CopilotFailedToModifyContext({
|
||||
contextId: options.contextId,
|
||||
message: e.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ResolveField(() => [ContextMatchedFileChunk], {
|
||||
description: 'match file context',
|
||||
})
|
||||
@CallMetric('ai', 'context_file_remove')
|
||||
async matchContext(
|
||||
@Context() ctx: { req: Request },
|
||||
@Parent() context: CopilotContextType,
|
||||
@Args('content') content: string,
|
||||
@Args('limit', { type: () => SafeIntResolver, nullable: true })
|
||||
limit?: number,
|
||||
@Args('threshold', { type: () => Float, nullable: true })
|
||||
threshold?: number
|
||||
) {
|
||||
if (!this.context.canEmbedding) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const lockFlag = `${COPILOT_LOCKER}:context:${context.id}`;
|
||||
await using lock = await this.mutex.acquire(lockFlag);
|
||||
if (!lock) {
|
||||
return new TooManyRequest('Server is busy');
|
||||
}
|
||||
const session = await this.context.get(context.id);
|
||||
|
||||
try {
|
||||
return await session.matchFileChunks(
|
||||
content,
|
||||
limit,
|
||||
this.getSignal(ctx.req),
|
||||
threshold
|
||||
);
|
||||
} catch (e: any) {
|
||||
throw new CopilotFailedToMatchContext({
|
||||
contextId: context.id,
|
||||
// don't record the large content
|
||||
content: content.slice(0, 512),
|
||||
message: e.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ResolveField(() => ContextMatchedDocChunk, {
|
||||
description: 'match workspace doc content',
|
||||
})
|
||||
@CallMetric('ai', 'context_match_workspace_doc')
|
||||
async matchWorkspaceContext(
|
||||
@CurrentUser() user: CurrentUser,
|
||||
@Context() ctx: { req: Request },
|
||||
@Parent() context: CopilotContextType,
|
||||
@Args('content') content: string,
|
||||
@Args('limit', { type: () => SafeIntResolver, nullable: true })
|
||||
limit?: number
|
||||
) {
|
||||
if (!this.context.canEmbedding) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const session = await this.context.get(context.id);
|
||||
await this.ac
|
||||
.user(user.id)
|
||||
.workspace(session.workspaceId)
|
||||
.allowLocal()
|
||||
.assert('Workspace.Copilot');
|
||||
|
||||
try {
|
||||
return await session.matchWorkspaceChunks(
|
||||
content,
|
||||
limit,
|
||||
this.getSignal(ctx.req)
|
||||
);
|
||||
} catch (e: any) {
|
||||
throw new CopilotFailedToMatchContext({
|
||||
contextId: context.id,
|
||||
// don't record the large content
|
||||
content: content.slice(0, 512),
|
||||
message: e.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,30 +1,70 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Injectable, OnModuleInit } from '@nestjs/common';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import OpenAI from 'openai';
|
||||
|
||||
import {
|
||||
Cache,
|
||||
Config,
|
||||
CopilotInvalidContext,
|
||||
CopilotSessionNotFound,
|
||||
NoCopilotProviderAvailable,
|
||||
OnEvent,
|
||||
PrismaTransaction,
|
||||
} from '../../../base';
|
||||
import { OpenAIEmbeddingClient } from './embedding';
|
||||
import { ContextSession } from './session';
|
||||
import { ContextConfig, ContextConfigSchema } from './types';
|
||||
import {
|
||||
ContextConfig,
|
||||
ContextConfigSchema,
|
||||
ContextFile,
|
||||
ContextFileStatus,
|
||||
EmbeddingClient,
|
||||
} from './types';
|
||||
import { checkEmbeddingAvailable } from './utils';
|
||||
|
||||
const CONTEXT_SESSION_KEY = 'context-session';
|
||||
|
||||
@Injectable()
|
||||
export class CopilotContextService {
|
||||
export class CopilotContextService implements OnModuleInit {
|
||||
private supportEmbedding = false;
|
||||
private readonly client: EmbeddingClient | undefined;
|
||||
|
||||
constructor(
|
||||
config: Config,
|
||||
private readonly cache: Cache,
|
||||
private readonly db: PrismaClient
|
||||
) {}
|
||||
) {
|
||||
const configure = config.plugins.copilot.openai;
|
||||
if (configure) {
|
||||
this.client = new OpenAIEmbeddingClient(new OpenAI(configure));
|
||||
}
|
||||
}
|
||||
|
||||
async onModuleInit() {
|
||||
const supportEmbedding = await checkEmbeddingAvailable(this.db);
|
||||
if (supportEmbedding) {
|
||||
this.supportEmbedding = true;
|
||||
}
|
||||
}
|
||||
|
||||
get canEmbedding() {
|
||||
return this.supportEmbedding;
|
||||
}
|
||||
|
||||
// public this client to allow overriding in tests
|
||||
get embeddingClient() {
|
||||
return this.client as EmbeddingClient;
|
||||
}
|
||||
|
||||
private async saveConfig(
|
||||
contextId: string,
|
||||
config: ContextConfig,
|
||||
tx?: PrismaTransaction,
|
||||
refreshCache = false
|
||||
): Promise<void> {
|
||||
if (!refreshCache) {
|
||||
await this.db.aiContext.update({
|
||||
const executor = tx || this.db;
|
||||
await executor.aiContext.update({
|
||||
where: { id: contextId },
|
||||
data: { config },
|
||||
});
|
||||
@@ -42,8 +82,10 @@ export class CopilotContextService {
|
||||
const config = ContextConfigSchema.safeParse(cachedSession);
|
||||
if (config.success) {
|
||||
return new ContextSession(
|
||||
this.embeddingClient,
|
||||
contextId,
|
||||
config.data,
|
||||
this.db,
|
||||
this.saveConfig.bind(this, contextId)
|
||||
);
|
||||
}
|
||||
@@ -60,8 +102,14 @@ export class CopilotContextService {
|
||||
config: ContextConfig
|
||||
): Promise<ContextSession> {
|
||||
const dispatcher = this.saveConfig.bind(this, contextId);
|
||||
await dispatcher(config, true);
|
||||
return new ContextSession(contextId, config, dispatcher);
|
||||
await dispatcher(config, undefined, true);
|
||||
return new ContextSession(
|
||||
this.embeddingClient,
|
||||
contextId,
|
||||
config,
|
||||
this.db,
|
||||
dispatcher
|
||||
);
|
||||
}
|
||||
|
||||
async create(sessionId: string): Promise<ContextSession> {
|
||||
@@ -89,6 +137,10 @@ export class CopilotContextService {
|
||||
}
|
||||
|
||||
async get(id: string): Promise<ContextSession> {
|
||||
if (!this.embeddingClient) {
|
||||
throw new NoCopilotProviderAvailable('embedding client not configured');
|
||||
}
|
||||
|
||||
const context = await this.getCachedSession(id);
|
||||
if (context) return context;
|
||||
const ret = await this.db.aiContext.findUnique({
|
||||
@@ -110,4 +162,32 @@ export class CopilotContextService {
|
||||
if (existsContext) return this.get(existsContext.id);
|
||||
return null;
|
||||
}
|
||||
|
||||
@OnEvent('workspace.file.embed.finished')
|
||||
async onFileEmbedFinish({
|
||||
contextId,
|
||||
fileId,
|
||||
chunkSize,
|
||||
}: Events['workspace.file.embed.finished']) {
|
||||
const context = await this.get(contextId);
|
||||
await context.saveFileRecord(fileId, file => ({
|
||||
...(file as ContextFile),
|
||||
chunkSize,
|
||||
status: ContextFileStatus.finished,
|
||||
}));
|
||||
}
|
||||
|
||||
@OnEvent('workspace.file.embed.failed')
|
||||
async onFileEmbedFailed({
|
||||
contextId,
|
||||
fileId,
|
||||
error,
|
||||
}: Events['workspace.file.embed.failed']) {
|
||||
const context = await this.get(contextId);
|
||||
await context.saveFileRecord(fileId, file => ({
|
||||
...(file as ContextFile),
|
||||
error,
|
||||
status: ContextFileStatus.failed,
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,29 @@
|
||||
import { ContextConfig, ContextDoc, ContextList } from './types';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import { nanoid } from 'nanoid';
|
||||
|
||||
import { PrismaTransaction } from '../../../base';
|
||||
import {
|
||||
ChunkSimilarity,
|
||||
ContextConfig,
|
||||
ContextDoc,
|
||||
ContextFile,
|
||||
ContextFileStatus,
|
||||
ContextList,
|
||||
DocChunkSimilarity,
|
||||
EmbeddingClient,
|
||||
FileChunkSimilarity,
|
||||
} from './types';
|
||||
|
||||
export class ContextSession implements AsyncDisposable {
|
||||
constructor(
|
||||
private readonly client: EmbeddingClient,
|
||||
private readonly contextId: string,
|
||||
private readonly config: ContextConfig,
|
||||
private readonly dispatcher?: (config: ContextConfig) => Promise<void>
|
||||
private readonly db: PrismaClient,
|
||||
private readonly dispatcher?: (
|
||||
config: ContextConfig,
|
||||
tx?: PrismaTransaction
|
||||
) => Promise<void>
|
||||
) {}
|
||||
|
||||
get id() {
|
||||
@@ -30,12 +49,15 @@ export class ContextSession implements AsyncDisposable {
|
||||
) as ContextList;
|
||||
}
|
||||
|
||||
async addDocRecord(docId: string): Promise<ContextList> {
|
||||
if (!this.config.docs.some(f => f.id === docId)) {
|
||||
this.config.docs.push({ id: docId, createdAt: Date.now() });
|
||||
await this.save();
|
||||
async addDocRecord(docId: string): Promise<ContextDoc> {
|
||||
const doc = this.config.docs.find(f => f.id === docId);
|
||||
if (doc) {
|
||||
return doc;
|
||||
}
|
||||
return this.sortedList;
|
||||
const record = { id: docId, createdAt: Date.now() };
|
||||
this.config.docs.push(record);
|
||||
await this.save();
|
||||
return record;
|
||||
}
|
||||
|
||||
async removeDocRecord(docId: string): Promise<boolean> {
|
||||
@@ -48,8 +70,123 @@ export class ContextSession implements AsyncDisposable {
|
||||
return false;
|
||||
}
|
||||
|
||||
async save() {
|
||||
await this.dispatcher?.(this.config);
|
||||
async addFile(blobId: string, name: string): Promise<ContextFile> {
|
||||
let fileId = nanoid();
|
||||
const existsBlob = this.config.files.find(f => f.blobId === blobId);
|
||||
if (existsBlob) {
|
||||
// use exists file id if the blob exists
|
||||
// we assume that the file content pointed to by the same blobId is consistent.
|
||||
if (existsBlob.status === ContextFileStatus.finished) {
|
||||
return existsBlob;
|
||||
}
|
||||
fileId = existsBlob.id;
|
||||
} else {
|
||||
await this.saveFileRecord(fileId, file => ({
|
||||
...file,
|
||||
blobId,
|
||||
chunkSize: 0,
|
||||
name,
|
||||
error: null,
|
||||
createdAt: Date.now(),
|
||||
}));
|
||||
}
|
||||
return this.getFile(fileId) as ContextFile;
|
||||
}
|
||||
|
||||
getFile(fileId: string): ContextFile | undefined {
|
||||
return this.config.files.find(f => f.id === fileId);
|
||||
}
|
||||
|
||||
async removeFile(fileId: string): Promise<boolean> {
|
||||
return await this.db.$transaction(async tx => {
|
||||
await tx.aiContextEmbedding.deleteMany({
|
||||
where: { contextId: this.contextId, fileId },
|
||||
});
|
||||
this.config.files = this.config.files.filter(f => f.id !== fileId);
|
||||
await this.save(tx);
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Match the input text with the file chunks
|
||||
* @param content input text to match
|
||||
* @param topK number of similar chunks to return, default 5
|
||||
* @param signal abort signal
|
||||
* @param threshold relevance threshold for the similarity score, higher threshold means more similar chunks, default 0.7, good enough based on prior experiments
|
||||
* @returns list of similar chunks
|
||||
*/
|
||||
async matchFileChunks(
|
||||
content: string,
|
||||
topK: number = 5,
|
||||
signal?: AbortSignal,
|
||||
threshold: number = 0.7
|
||||
): Promise<FileChunkSimilarity[]> {
|
||||
const embedding = await this.client
|
||||
.getEmbeddings([content], signal)
|
||||
.then(r => r?.[0]?.embedding);
|
||||
if (!embedding) return [];
|
||||
const similarityChunks = await this.db.$queryRaw<
|
||||
Array<FileChunkSimilarity>
|
||||
>`
|
||||
SELECT "file_id" as "fileId", "chunk", "content", "embedding" <=> ${embedding}::vector as "distance"
|
||||
FROM "ai_context_embeddings"
|
||||
WHERE context_id = ${this.id}
|
||||
ORDER BY "distance" ASC
|
||||
LIMIT ${topK};
|
||||
`;
|
||||
return similarityChunks.filter(c => Number(c.distance) <= threshold);
|
||||
}
|
||||
|
||||
/**
|
||||
* Match the input text with the workspace chunks
|
||||
* @param content input text to match
|
||||
* @param topK number of similar chunks to return, default 5
|
||||
* @param signal abort signal
|
||||
* @param threshold relevance threshold for the similarity score, higher threshold means more similar chunks, default 0.7, good enough based on prior experiments
|
||||
* @returns list of similar chunks
|
||||
*/
|
||||
async matchWorkspaceChunks(
|
||||
content: string,
|
||||
topK: number = 5,
|
||||
signal?: AbortSignal,
|
||||
threshold: number = 0.7
|
||||
): Promise<ChunkSimilarity[]> {
|
||||
const embedding = await this.client
|
||||
.getEmbeddings([content], signal)
|
||||
.then(r => r?.[0]?.embedding);
|
||||
if (!embedding) return [];
|
||||
const similarityChunks = await this.db.$queryRaw<Array<DocChunkSimilarity>>`
|
||||
SELECT "doc_id" as "docId", "chunk", "content", "embedding" <=> ${embedding}::vector as "distance"
|
||||
FROM "ai_workspace_embeddings"
|
||||
WHERE "workspace_id" = ${this.workspaceId}
|
||||
ORDER BY "distance" ASC
|
||||
LIMIT ${topK};
|
||||
`;
|
||||
return similarityChunks.filter(c => Number(c.distance) <= threshold);
|
||||
}
|
||||
|
||||
async saveFileRecord(
|
||||
fileId: string,
|
||||
cb: (
|
||||
record: Pick<ContextFile, 'id' | 'status'> &
|
||||
Partial<Omit<ContextFile, 'id' | 'status'>>
|
||||
) => ContextFile,
|
||||
tx?: PrismaTransaction
|
||||
) {
|
||||
const files = this.config.files;
|
||||
const file = files.find(f => f.id === fileId);
|
||||
if (file) {
|
||||
Object.assign(file, cb({ ...file }));
|
||||
} else {
|
||||
const file = { id: fileId, status: ContextFileStatus.processing };
|
||||
files.push(cb(file));
|
||||
}
|
||||
await this.save(tx);
|
||||
}
|
||||
|
||||
async save(tx?: PrismaTransaction) {
|
||||
await this.dispatcher?.(this.config, tx);
|
||||
}
|
||||
|
||||
async [Symbol.asyncDispose]() {
|
||||
|
||||
@@ -1,14 +1,31 @@
|
||||
import { File } from 'node:buffer';
|
||||
|
||||
import { z } from 'zod';
|
||||
|
||||
import { CopilotContextFileNotSupported, OneMB } from '../../../base';
|
||||
import { parseDoc } from '../../../native';
|
||||
|
||||
declare global {
|
||||
interface Events {
|
||||
'workspace.doc.embedding': {
|
||||
'workspace.doc.embedding': Array<{
|
||||
workspaceId: string;
|
||||
docId: string;
|
||||
}>;
|
||||
'workspace.file.embed.finished': {
|
||||
contextId: string;
|
||||
fileId: string;
|
||||
chunkSize: number;
|
||||
};
|
||||
'workspace.file.embed.failed': {
|
||||
contextId: string;
|
||||
fileId: string;
|
||||
error: string;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export const MAX_EMBEDDABLE_SIZE = 50 * OneMB;
|
||||
|
||||
export enum ContextFileStatus {
|
||||
processing = 'processing',
|
||||
finished = 'finished',
|
||||
@@ -27,6 +44,7 @@ export const ContextConfigSchema = z.object({
|
||||
ContextFileStatus.finished,
|
||||
ContextFileStatus.failed,
|
||||
]),
|
||||
error: z.string().nullable(),
|
||||
blobId: z.string(),
|
||||
createdAt: z.number(),
|
||||
})
|
||||
@@ -45,6 +63,11 @@ export type ContextFile = z.infer<typeof ContextConfigSchema>['files'][number];
|
||||
export type ContextListItem = ContextDoc | ContextFile;
|
||||
export type ContextList = ContextListItem[];
|
||||
|
||||
export type Chunk = {
|
||||
index: number;
|
||||
content: string;
|
||||
};
|
||||
|
||||
export type ChunkSimilarity = {
|
||||
chunk: number;
|
||||
content: string;
|
||||
@@ -67,3 +90,72 @@ export type Embedding = {
|
||||
content: string;
|
||||
embedding: Array<number>;
|
||||
};
|
||||
|
||||
export abstract class EmbeddingClient {
|
||||
async getFileEmbeddings(
|
||||
file: File,
|
||||
signal?: AbortSignal
|
||||
): Promise<Embedding[][]> {
|
||||
const chunks = await this.getFileChunks(file, signal);
|
||||
const chunkedEmbeddings = await Promise.all(
|
||||
chunks.map(chunk => this.generateEmbeddings(chunk))
|
||||
);
|
||||
return chunkedEmbeddings;
|
||||
}
|
||||
|
||||
async getFileChunks(file: File, signal?: AbortSignal): Promise<Chunk[][]> {
|
||||
const buffer = Buffer.from(await file.arrayBuffer());
|
||||
let doc;
|
||||
try {
|
||||
doc = await parseDoc(file.name, buffer);
|
||||
} catch (e: any) {
|
||||
throw new CopilotContextFileNotSupported({
|
||||
fileName: file.name,
|
||||
message: e?.message || e?.toString?.() || 'format not supported',
|
||||
});
|
||||
}
|
||||
if (doc && !signal?.aborted) {
|
||||
if (!doc.chunks.length) {
|
||||
throw new CopilotContextFileNotSupported({
|
||||
fileName: file.name,
|
||||
message: 'no content found',
|
||||
});
|
||||
}
|
||||
const input = doc.chunks.toSorted((a, b) => a.index - b.index);
|
||||
// chunk input into 32 every array
|
||||
const chunks: Chunk[][] = [];
|
||||
for (let i = 0; i < input.length; i += 32) {
|
||||
chunks.push(input.slice(i, i + 32));
|
||||
}
|
||||
return chunks;
|
||||
}
|
||||
throw new CopilotContextFileNotSupported({
|
||||
fileName: file.name,
|
||||
message: 'failed to parse file',
|
||||
});
|
||||
}
|
||||
|
||||
async generateEmbeddings(chunks: Chunk[]): Promise<Embedding[]> {
|
||||
const retry = 3;
|
||||
|
||||
let embeddings: Embedding[] = [];
|
||||
let error = null;
|
||||
for (let i = 0; i < retry; i++) {
|
||||
try {
|
||||
embeddings = await this.getEmbeddings(chunks.map(c => c.content));
|
||||
break;
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
}
|
||||
if (error) throw error;
|
||||
|
||||
// fix the index of the embeddings
|
||||
return embeddings.map(e => ({ ...e, index: chunks[e.index].index }));
|
||||
}
|
||||
|
||||
abstract getEmbeddings(
|
||||
input: string[],
|
||||
signal?: AbortSignal
|
||||
): Promise<Embedding[]>;
|
||||
}
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
import { Readable } from 'node:stream';
|
||||
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
import { BlobQuotaExceeded } from '../../../base';
|
||||
import { MAX_EMBEDDABLE_SIZE } from './types';
|
||||
|
||||
export class GqlSignal implements AsyncDisposable {
|
||||
readonly abortController = new AbortController();
|
||||
|
||||
@@ -9,3 +16,42 @@ export class GqlSignal implements AsyncDisposable {
|
||||
this.abortController.abort();
|
||||
}
|
||||
}
|
||||
|
||||
export async function checkEmbeddingAvailable(
|
||||
db: PrismaClient
|
||||
): Promise<boolean> {
|
||||
const [{ count }] = await db.$queryRaw<
|
||||
{
|
||||
count: number;
|
||||
}[]
|
||||
>`SELECT count(1) FROM pg_tables WHERE tablename in ('ai_context_embeddings', 'ai_workspace_embeddings')`;
|
||||
return Number(count) === 2;
|
||||
}
|
||||
|
||||
export function readStream(
|
||||
readable: Readable,
|
||||
maxSize = MAX_EMBEDDABLE_SIZE
|
||||
): Promise<Buffer<ArrayBuffer>> {
|
||||
return new Promise<Buffer<ArrayBuffer>>((resolve, reject) => {
|
||||
const chunks: Uint8Array[] = [];
|
||||
let totalSize = 0;
|
||||
|
||||
readable.on('data', chunk => {
|
||||
totalSize += chunk.length;
|
||||
if (totalSize > maxSize) {
|
||||
reject(new BlobQuotaExceeded());
|
||||
readable.destroy(new BlobQuotaExceeded());
|
||||
return;
|
||||
}
|
||||
chunks.push(chunk);
|
||||
});
|
||||
|
||||
readable.on('end', () => {
|
||||
resolve(Buffer.concat(chunks, totalSize));
|
||||
});
|
||||
|
||||
readable.on('error', err => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import './config';
|
||||
|
||||
import { ServerFeature } from '../../core/config';
|
||||
import { DocStorageModule } from '../../core/doc';
|
||||
import { FeatureModule } from '../../core/features';
|
||||
import { PermissionModule } from '../../core/permission';
|
||||
import { QuotaModule } from '../../core/quota';
|
||||
import { Plugin } from '../registry';
|
||||
import {
|
||||
CopilotContextDocJob,
|
||||
CopilotContextResolver,
|
||||
CopilotContextRootResolver,
|
||||
CopilotContextService,
|
||||
@@ -36,7 +38,7 @@ registerCopilotProvider(PerplexityProvider);
|
||||
|
||||
@Plugin({
|
||||
name: 'copilot',
|
||||
imports: [FeatureModule, QuotaModule, PermissionModule],
|
||||
imports: [DocStorageModule, FeatureModule, QuotaModule, PermissionModule],
|
||||
providers: [
|
||||
ChatSessionService,
|
||||
CopilotResolver,
|
||||
@@ -53,6 +55,7 @@ registerCopilotProvider(PerplexityProvider);
|
||||
CopilotContextRootResolver,
|
||||
CopilotContextResolver,
|
||||
CopilotContextService,
|
||||
CopilotContextDocJob,
|
||||
],
|
||||
controllers: [CopilotController],
|
||||
contributesTo: ServerFeature.Copilot,
|
||||
|
||||
@@ -957,35 +957,65 @@ When writing mathematical expressions and equations in your responses, please us
|
||||
Please avoid using LaTeX native delimiters like \\(...\\) for inline math or \\[...\\] for block math. Always use the Markdown dollar sign notation as it's more compatible with the platform I'm using.
|
||||
This formatting will help ensure that mathematical content is properly rendered and easily readable in my environment.
|
||||
|
||||
# Context Documents
|
||||
The following user messages provide relevant context and background information for your reference.
|
||||
If the provided documents are relevant to the user's query:
|
||||
# Reference Guide
|
||||
The following user messages provide relevant documents and files for your reference.
|
||||
|
||||
If the provided documents or files are relevant to the user's query:
|
||||
- Use them to enrich and support your response
|
||||
- Cite sources using the citation rules below
|
||||
|
||||
If the documents are not relevant:
|
||||
If the documents or files are not relevant:
|
||||
- Answer the question directly based on your knowledge
|
||||
- Do not reference or mention the provided documents
|
||||
- Do not reference or mention the provided documents or files
|
||||
|
||||
# Citations Rules:
|
||||
When referencing information from the provided documents in your response:
|
||||
## Citations Rules
|
||||
When referencing information from the provided documents or files in your response:
|
||||
1. Use markdown footnote format for citations
|
||||
2. Add citations immediately after the relevant sentence or paragraph
|
||||
3. Required format: [^document_index] where document_index is the numerical index of the source document
|
||||
4. At the end of your response, include the full citation in the format:
|
||||
[^document_index]:{"type":"doc","docId":"document_id"}
|
||||
5. Ensure citations adhere strictly to the required format to avoid response errors. Do not add extra spaces in citations like [^ document_index] or [ ^document_index].`,
|
||||
3. Required format: [^reference_index] where reference_index is the numerical index of the source document or file
|
||||
4. You MUST include citations at the end of your response in this exact format:
|
||||
- For documents: [^reference_index]:{"type":"doc","docId":"document_id"}
|
||||
- For files: [^reference_index]:{"type":"attachment","blobId":"blob_id","fileName":"file_name","fileType":"file_type"}
|
||||
5. Ensure citations adhere strictly to the required format. Do not add extra spaces in citations like [^ reference_index] or [ ^reference_index].
|
||||
|
||||
### Citations Structure
|
||||
Your response MUST follow this structure:
|
||||
1. Main response content with inline citations [^reference_index]
|
||||
2. Empty line
|
||||
3. Citations section with all referenced sources in the required format
|
||||
|
||||
Example Output with Citations:
|
||||
This is my response with a citation[^1]. Here is more content with another citation[^2].
|
||||
|
||||
[^1]:{"type":"doc","docId":"abc123"}
|
||||
[^2]:{"type":"attachment","blobId":"xyz789","fileName":"example.txt","fileType":"text"}
|
||||
`,
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: `# Context Documents
|
||||
content: `The following content is not user's query, just reference documents and files for you to answer the user's question.
|
||||
## Reference Documents
|
||||
{{#docs}}
|
||||
## Document {{index}}
|
||||
- document_index: {{index}}
|
||||
### Document {{refIndex}}
|
||||
- reference_index: {{refIndex}}
|
||||
- document_id: {{docId}}
|
||||
- document_content:
|
||||
{{markdown}}
|
||||
{{/docs}}`,
|
||||
{{/docs}}
|
||||
If no documents are provided, please answer the question directly based on your knowledge.
|
||||
|
||||
## Reference Files
|
||||
{{#files}}
|
||||
### File {{refIndex}}
|
||||
- reference_index: {{refIndex}}
|
||||
- blob_id: {{blobId}}
|
||||
- file_name: {{fileName}}
|
||||
- file_type: {{fileType}}
|
||||
- file_content:
|
||||
{{chunks}}
|
||||
{{/files}}
|
||||
If no files are provided, please answer the question directly based on your knowledge.
|
||||
`,
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
@@ -100,7 +100,7 @@ export class OpenAIProvider
|
||||
// filter redundant fields
|
||||
return messages.map(({ role, content, attachments }) => {
|
||||
content = content.trim();
|
||||
if (Array.isArray(attachments)) {
|
||||
if (Array.isArray(attachments) && attachments.length) {
|
||||
const contents: OpenAI.Chat.Completions.ChatCompletionContentPart[] =
|
||||
[];
|
||||
if (content.length) {
|
||||
|
||||
@@ -7,6 +7,11 @@ input AddContextDocInput {
|
||||
docId: String!
|
||||
}
|
||||
|
||||
input AddContextFileInput {
|
||||
blobId: String!
|
||||
contextId: String!
|
||||
}
|
||||
|
||||
type AlreadyInSpaceDataType {
|
||||
spaceId: String!
|
||||
}
|
||||
@@ -36,9 +41,28 @@ enum ContextFileStatus {
|
||||
processing
|
||||
}
|
||||
|
||||
type ContextMatchedDocChunk {
|
||||
chunk: SafeInt!
|
||||
content: String!
|
||||
distance: Float
|
||||
docId: String!
|
||||
}
|
||||
|
||||
type ContextMatchedFileChunk {
|
||||
chunk: SafeInt!
|
||||
content: String!
|
||||
distance: Float
|
||||
fileId: String!
|
||||
}
|
||||
|
||||
type ContextWorkspaceEmbeddingStatus {
|
||||
embedded: SafeInt!
|
||||
total: SafeInt!
|
||||
}
|
||||
|
||||
type Copilot {
|
||||
"""Get the context list of a session"""
|
||||
contexts(contextId: String, sessionId: String!): [CopilotContext!]!
|
||||
contexts(contextId: String, sessionId: String): [CopilotContext!]!
|
||||
histories(docId: String, options: QueryChatHistoriesInput): [CopilotHistories!]!
|
||||
|
||||
"""Get the quota of the user in the workspace"""
|
||||
@@ -59,6 +83,12 @@ type CopilotContext {
|
||||
"""list files in context"""
|
||||
files: [CopilotContextFile!]!
|
||||
id: ID!
|
||||
|
||||
"""match file context"""
|
||||
matchContext(content: String!, limit: SafeInt, threshold: Float): [ContextMatchedFileChunk!]!
|
||||
|
||||
"""match workspace doc content"""
|
||||
matchWorkspaceContext(content: String!, limit: SafeInt): ContextMatchedDocChunk!
|
||||
workspaceId: String!
|
||||
}
|
||||
|
||||
@@ -71,6 +101,7 @@ type CopilotContextFile {
|
||||
blobId: String!
|
||||
chunkSize: SafeInt!
|
||||
createdAt: SafeInt!
|
||||
error: String
|
||||
id: ID!
|
||||
name: String!
|
||||
status: ContextFileStatus!
|
||||
@@ -81,15 +112,6 @@ type CopilotContextFileNotSupportedDataType {
|
||||
message: String!
|
||||
}
|
||||
|
||||
type CopilotContextListItem {
|
||||
blobId: String
|
||||
chunkSize: SafeInt
|
||||
createdAt: SafeInt!
|
||||
id: ID!
|
||||
name: String
|
||||
status: ContextFileStatus
|
||||
}
|
||||
|
||||
type CopilotDocNotFoundDataType {
|
||||
docId: String!
|
||||
}
|
||||
@@ -350,6 +372,7 @@ enum ErrorNames {
|
||||
COPILOT_ACTION_TAKEN
|
||||
COPILOT_CONTEXT_FILE_NOT_SUPPORTED
|
||||
COPILOT_DOC_NOT_FOUND
|
||||
COPILOT_EMBEDDING_UNAVAILABLE
|
||||
COPILOT_FAILED_TO_CREATE_MESSAGE
|
||||
COPILOT_FAILED_TO_GENERATE_TEXT
|
||||
COPILOT_FAILED_TO_MATCH_CONTEXT
|
||||
@@ -772,7 +795,10 @@ type Mutation {
|
||||
activateLicense(license: String!, workspaceId: String!): License!
|
||||
|
||||
"""add a doc to context"""
|
||||
addContextDoc(options: AddContextDocInput!): [CopilotContextListItem!]!
|
||||
addContextDoc(options: AddContextDocInput!): CopilotContextDoc!
|
||||
|
||||
"""add a file to context"""
|
||||
addContextFile(content: Upload!, options: AddContextFileInput!): CopilotContextFile!
|
||||
addWorkspaceFeature(feature: FeatureType!, workspaceId: String!): Boolean!
|
||||
approveMember(userId: String!, workspaceId: String!): Boolean!
|
||||
|
||||
@@ -841,6 +867,9 @@ type Mutation {
|
||||
publishDoc(docId: String!, mode: PublicDocMode = Page, workspaceId: String!): DocType!
|
||||
publishPage(mode: PublicDocMode = Page, pageId: String!, workspaceId: String!): DocType! @deprecated(reason: "use publishDoc instead")
|
||||
|
||||
"""queue workspace doc embedding"""
|
||||
queueWorkspaceEmbedding(docId: [String!]!, workspaceId: String!): Boolean!
|
||||
|
||||
"""mark notification as read"""
|
||||
readNotification(id: String!): Boolean!
|
||||
recoverDoc(guid: String!, timestamp: DateTime!, workspaceId: String!): DateTime!
|
||||
@@ -851,6 +880,9 @@ type Mutation {
|
||||
|
||||
"""remove a doc from context"""
|
||||
removeContextDoc(options: RemoveContextDocInput!): Boolean!
|
||||
|
||||
"""remove a file from context"""
|
||||
removeContextFile(options: RemoveContextFileInput!): Boolean!
|
||||
removeWorkspaceFeature(feature: FeatureType!, workspaceId: String!): Boolean!
|
||||
resumeSubscription(idempotencyKey: String @deprecated(reason: "use header `Idempotency-Key`"), plan: SubscriptionPlan = Pro, workspaceId: String): SubscriptionType!
|
||||
revoke(userId: String!, workspaceId: String!): Boolean!
|
||||
@@ -1041,6 +1073,9 @@ type Query {
|
||||
"""Get public user by id"""
|
||||
publicUserById(id: String!): PublicUserType
|
||||
|
||||
"""query workspace embedding status"""
|
||||
queryWorkspaceEmbeddingStatus(workspaceId: String!): ContextWorkspaceEmbeddingStatus!
|
||||
|
||||
"""server config"""
|
||||
serverConfig: ServerConfigType!
|
||||
|
||||
@@ -1108,6 +1143,11 @@ input RemoveContextDocInput {
|
||||
docId: String!
|
||||
}
|
||||
|
||||
input RemoveContextFileInput {
|
||||
contextId: String!
|
||||
fileId: String!
|
||||
}
|
||||
|
||||
input RevokeDocUserRoleInput {
|
||||
docId: String!
|
||||
userId: String!
|
||||
|
||||
Reference in New Issue
Block a user