mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-12 04:18:54 +00:00
test(server): add transcript e2e (#11557)
This commit is contained in:
File diff suppressed because it is too large
Load Diff
@@ -54,3 +54,37 @@ Generated by [AVA](https://avajs.dev).
|
||||
id: 'docId1',
|
||||
},
|
||||
]
|
||||
|
||||
## should be able to transcript
|
||||
|
||||
> should submit audio transcription job
|
||||
|
||||
[
|
||||
{
|
||||
status: 'running',
|
||||
},
|
||||
]
|
||||
|
||||
> should claim audio transcription job
|
||||
|
||||
[
|
||||
{
|
||||
status: 'claimed',
|
||||
summary: '[{"a":"A","s":30,"e":45,"t":"Hello, everyone."},{"a":"B","s":46,"e":70,"t":"Hi, thank you for joining the meeting today."}]',
|
||||
title: '[{"a":"A","s":30,"e":45,"t":"Hello, everyone."},{"a":"B","s":46,"e":70,"t":"Hi, thank you for joining the meeting today."}]',
|
||||
transcription: [
|
||||
{
|
||||
end: '00:00:45',
|
||||
speaker: 'A',
|
||||
start: '00:00:30',
|
||||
transcription: 'Hello, everyone.',
|
||||
},
|
||||
{
|
||||
end: '00:01:10',
|
||||
speaker: 'B',
|
||||
start: '00:00:46',
|
||||
transcription: 'Hi, thank you for joining the meeting today.',
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
Binary file not shown.
@@ -19,6 +19,7 @@ import { MockEmbeddingClient } from '../plugins/copilot/context/embedding';
|
||||
import { prompts, PromptService } from '../plugins/copilot/prompt';
|
||||
import {
|
||||
CopilotProviderFactory,
|
||||
GeminiProvider,
|
||||
OpenAIProvider,
|
||||
} from '../plugins/copilot/providers';
|
||||
import { CopilotStorage } from '../plugins/copilot/storage';
|
||||
@@ -35,10 +36,12 @@ import {
|
||||
addContextDoc,
|
||||
addContextFile,
|
||||
array2sse,
|
||||
audioTranscription,
|
||||
chatWithImages,
|
||||
chatWithText,
|
||||
chatWithTextStream,
|
||||
chatWithWorkflow,
|
||||
claimAudioTranscription,
|
||||
cleanObject,
|
||||
createCopilotContext,
|
||||
createCopilotMessage,
|
||||
@@ -50,6 +53,7 @@ import {
|
||||
matchFiles,
|
||||
matchWorkspaceDocs,
|
||||
sse2array,
|
||||
submitAudioTranscription,
|
||||
textToEventStream,
|
||||
unsplashSearch,
|
||||
updateCopilotSession,
|
||||
@@ -96,6 +100,7 @@ test.before(async t => {
|
||||
},
|
||||
});
|
||||
m.overrideProvider(OpenAIProvider).useClass(MockCopilotProvider);
|
||||
m.overrideProvider(GeminiProvider).useClass(MockCopilotProvider);
|
||||
},
|
||||
});
|
||||
|
||||
@@ -868,3 +873,44 @@ test('should be able to manage context', async t => {
|
||||
t.is(result[0].docId, docId, 'should match doc id');
|
||||
}
|
||||
});
|
||||
|
||||
test('should be able to transcript', async t => {
|
||||
const { app } = t.context;
|
||||
|
||||
const { id: workspaceId } = await createWorkspace(app);
|
||||
|
||||
Sinon.stub(app.get(GeminiProvider), 'generateText').resolves(
|
||||
'[{"a":"A","s":30,"e":45,"t":"Hello, everyone."},{"a":"B","s":46,"e":70,"t":"Hi, thank you for joining the meeting today."}]'
|
||||
);
|
||||
|
||||
const job = await submitAudioTranscription(
|
||||
app,
|
||||
workspaceId,
|
||||
'blobId',
|
||||
'test.mp3',
|
||||
Buffer.from([1, 1])
|
||||
);
|
||||
t.snapshot(
|
||||
cleanObject([job], ['id']),
|
||||
'should submit audio transcription job'
|
||||
);
|
||||
t.truthy(job.id, 'should have job id');
|
||||
|
||||
// wait for processing
|
||||
{
|
||||
let { status } = (await audioTranscription(app, workspaceId, job.id)) || {};
|
||||
|
||||
while (status !== 'finished') {
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
({ status } = (await audioTranscription(app, workspaceId, job.id)) || {});
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
const result = await claimAudioTranscription(app, job.id);
|
||||
t.snapshot(
|
||||
cleanObject([result], ['id']),
|
||||
'should claim audio transcription job'
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -27,6 +27,7 @@ import {
|
||||
import { CitationParser } from '../plugins/copilot/providers/perplexity';
|
||||
import { ChatSessionService } from '../plugins/copilot/session';
|
||||
import { CopilotStorage } from '../plugins/copilot/storage';
|
||||
import { CopilotTranscriptionService } from '../plugins/copilot/transcript';
|
||||
import {
|
||||
CopilotChatTextExecutor,
|
||||
CopilotWorkflowService,
|
||||
@@ -57,6 +58,7 @@ const test = ava as TestFn<{
|
||||
event: EventBus;
|
||||
context: CopilotContextService;
|
||||
prompt: PromptService;
|
||||
transcript: CopilotTranscriptionService;
|
||||
factory: CopilotProviderFactory;
|
||||
session: ChatSessionService;
|
||||
jobs: CopilotContextDocJob;
|
||||
@@ -100,25 +102,30 @@ test.before(async t => {
|
||||
const auth = module.get(AuthService);
|
||||
const db = module.get(PrismaClient);
|
||||
const event = module.get(EventBus);
|
||||
const context = module.get(CopilotContextService);
|
||||
const prompt = module.get(PromptService);
|
||||
const factory = module.get(CopilotProviderFactory);
|
||||
|
||||
const session = module.get(ChatSessionService);
|
||||
const workflow = module.get(CopilotWorkflowService);
|
||||
const jobs = module.get(CopilotContextDocJob);
|
||||
const storage = module.get(CopilotStorage);
|
||||
|
||||
const context = module.get(CopilotContextService);
|
||||
const jobs = module.get(CopilotContextDocJob);
|
||||
const transcript = module.get(CopilotTranscriptionService);
|
||||
|
||||
t.context.module = module;
|
||||
t.context.auth = auth;
|
||||
t.context.db = db;
|
||||
t.context.event = event;
|
||||
t.context.context = context;
|
||||
t.context.prompt = prompt;
|
||||
t.context.factory = factory;
|
||||
t.context.session = session;
|
||||
t.context.workflow = workflow;
|
||||
t.context.jobs = jobs;
|
||||
t.context.storage = storage;
|
||||
t.context.context = context;
|
||||
t.context.jobs = jobs;
|
||||
t.context.transcript = transcript;
|
||||
|
||||
t.context.executors = {
|
||||
image: module.get(CopilotChatImageExecutor),
|
||||
text: module.get(CopilotChatTextExecutor),
|
||||
|
||||
@@ -21,6 +21,7 @@ export class MockCopilotProvider extends OpenAIProvider {
|
||||
'lcm-sd15-i2i',
|
||||
'clarity-upscaler',
|
||||
'imageutils/rembg',
|
||||
'gemini-2.5-pro-exp-03-25',
|
||||
];
|
||||
|
||||
override readonly capabilities = [
|
||||
|
||||
@@ -330,6 +330,149 @@ export async function listContextDocAndFiles(
|
||||
return { docs, files };
|
||||
}
|
||||
|
||||
export async function submitAudioTranscription(
|
||||
app: TestingApp,
|
||||
workspaceId: string,
|
||||
blobId: string,
|
||||
fileName: string,
|
||||
content: Buffer
|
||||
): Promise<{ id: string; status: string }> {
|
||||
const res = await app
|
||||
.POST('/graphql')
|
||||
.set({ 'x-request-id': 'test', 'x-operation-name': 'test' })
|
||||
.field(
|
||||
'operations',
|
||||
JSON.stringify({
|
||||
query: `
|
||||
mutation submitAudioTranscription($blob: Upload!, $blobId: String!, $workspaceId: String!) {
|
||||
submitAudioTranscription(blob: $blob, blobId: $blobId, workspaceId: $workspaceId) {
|
||||
id
|
||||
status
|
||||
}
|
||||
}
|
||||
`,
|
||||
variables: {
|
||||
blob: null,
|
||||
blobId,
|
||||
workspaceId,
|
||||
},
|
||||
})
|
||||
)
|
||||
.field('map', JSON.stringify({ '0': ['variables.blob'] }))
|
||||
.attach('0', content, {
|
||||
filename: fileName,
|
||||
contentType: 'application/octet-stream',
|
||||
})
|
||||
.expect(200);
|
||||
|
||||
return res.body.data.submitAudioTranscription;
|
||||
}
|
||||
|
||||
export async function retryAudioTranscription(
|
||||
app: TestingApp,
|
||||
workspaceId: string,
|
||||
jobId: string
|
||||
): Promise<{ id: string; status: string }> {
|
||||
const res = await app.gql(
|
||||
`
|
||||
mutation retryAudioTranscription($workspaceId: String!, $jobId: String!) {
|
||||
retryAudioTranscription(workspaceId: $workspaceId, jobId: $jobId) {
|
||||
id
|
||||
status
|
||||
}
|
||||
}
|
||||
`,
|
||||
{ workspaceId, jobId }
|
||||
);
|
||||
|
||||
return res.retryAudioTranscription;
|
||||
}
|
||||
|
||||
export async function claimAudioTranscription(
|
||||
app: TestingApp,
|
||||
jobId: string
|
||||
): Promise<{
|
||||
id: string;
|
||||
status: string;
|
||||
title: string | null;
|
||||
summary: string | null;
|
||||
transcription:
|
||||
| {
|
||||
speaker: string;
|
||||
start: number;
|
||||
end: number;
|
||||
transcription: string;
|
||||
}[]
|
||||
| null;
|
||||
}> {
|
||||
const res = await app.gql(
|
||||
`
|
||||
mutation claimAudioTranscription($jobId: String!) {
|
||||
claimAudioTranscription(jobId: $jobId) {
|
||||
id
|
||||
status
|
||||
title
|
||||
summary
|
||||
transcription {
|
||||
speaker
|
||||
start
|
||||
end
|
||||
transcription
|
||||
}
|
||||
}
|
||||
}
|
||||
`,
|
||||
{ jobId }
|
||||
);
|
||||
|
||||
return res.claimAudioTranscription;
|
||||
}
|
||||
|
||||
export async function audioTranscription(
|
||||
app: TestingApp,
|
||||
workspaceId: string,
|
||||
jobId: string
|
||||
): Promise<{
|
||||
id: string;
|
||||
status: string;
|
||||
title: string | null;
|
||||
summary: string | null;
|
||||
transcription:
|
||||
| {
|
||||
speaker: string;
|
||||
start: number;
|
||||
end: number;
|
||||
transcription: string;
|
||||
}[]
|
||||
| null;
|
||||
}> {
|
||||
const res = await app.gql(
|
||||
`
|
||||
query audioTranscription($workspaceId: String!, $jobId: String!) {
|
||||
currentUser {
|
||||
copilot(workspaceId: $workspaceId) {
|
||||
audioTranscription(jobId: $jobId) {
|
||||
id
|
||||
status
|
||||
title
|
||||
summary
|
||||
transcription {
|
||||
speaker
|
||||
start
|
||||
end
|
||||
transcription
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`,
|
||||
{ workspaceId, jobId }
|
||||
);
|
||||
|
||||
return res.currentUser?.copilot?.audioTranscription;
|
||||
}
|
||||
|
||||
export async function createCopilotMessage(
|
||||
app: TestingApp,
|
||||
sessionId: string,
|
||||
|
||||
Reference in New Issue
Block a user