feat(electron): audio capture permissions and settings (#11185)

fix AF-2420, AF-2391, AF-2265
This commit is contained in:
pengx17
2025-03-28 09:12:25 +00:00
parent 8c582122a8
commit 6c125d9a38
59 changed files with 2661 additions and 1699 deletions

View File

@@ -1,5 +1,6 @@
import { DesktopApiService } from '@affine/core/modules/desktop-api';
import { WorkspaceDialogService } from '@affine/core/modules/dialogs';
import type { SettingTab } from '@affine/core/modules/dialogs/constant';
import { DocsService } from '@affine/core/modules/doc';
import { EditorSettingService } from '@affine/core/modules/editor-setting';
import { JournalService } from '@affine/core/modules/journal';
@@ -24,14 +25,14 @@ export function setupEvents(frameworkProvider: FrameworkProvider) {
.catch(console.error);
});
events?.applicationMenu.openAboutPageInSettingModal(() => {
events?.applicationMenu.openInSettingModal(activeTab => {
using currentWorkspace = getCurrentWorkspace(frameworkProvider);
if (!currentWorkspace) {
return;
}
const { workspace } = currentWorkspace;
workspace.scope.get(WorkspaceDialogService).open('setting', {
activeTab: 'about',
activeTab: activeTab as unknown as SettingTab,
});
});

View File

@@ -2,6 +2,7 @@ import type { DocProps } from '@affine/core/blocksuite/initialization';
import { DocsService } from '@affine/core/modules/doc';
import { EditorSettingService } from '@affine/core/modules/editor-setting';
import { AudioAttachmentService } from '@affine/core/modules/media/services/audio-attachment';
import { MeetingSettingsService } from '@affine/core/modules/media/services/meeting-settings';
import { WorkbenchService } from '@affine/core/modules/workbench';
import { DebugLogger } from '@affine/debug';
import { apis, events } from '@affine/electron-api';
@@ -34,6 +35,8 @@ export function setupRecordingEvents(frameworkProvider: FrameworkProvider) {
if ((await apis?.ui.isActiveTab()) && status?.status === 'ready') {
using currentWorkspace = getCurrentWorkspace(frameworkProvider);
if (!currentWorkspace) {
// maybe the workspace is not ready yet, eg. for shared workspace view
await apis?.recording.handleBlockCreationFailed(status.id);
return;
}
const { workspace } = currentWorkspace;
@@ -89,6 +92,16 @@ export function setupRecordingEvents(frameworkProvider: FrameworkProvider) {
model.props.sourceId = blobId;
model.props.embed = true;
const meetingSettingsService = frameworkProvider.get(
MeetingSettingsService
);
if (!meetingSettingsService.settings.autoTranscription) {
// auto transcription is disabled,
// so we don't need to transcribe the recording by default
return;
}
using currentWorkspace = getCurrentWorkspace(frameworkProvider);
if (!currentWorkspace) {
return;
@@ -100,8 +113,23 @@ export function setupRecordingEvents(frameworkProvider: FrameworkProvider) {
audioAttachment?.obj.transcribe().catch(err => {
logger.error('Failed to transcribe recording', err);
});
} else {
throw new Error('No attachment model found');
}
})().catch(console.error);
})()
.then(async () => {
await apis?.recording.handleBlockCreationSuccess(status.id);
})
.catch(error => {
logger.error('Failed to transcribe recording', error);
return apis?.recording.handleBlockCreationFailed(
status.id,
error
);
})
.catch(error => {
console.error('unknown error', error);
});
},
};
const page = docsService.createDoc({ docProps, primaryMode: 'page' });

View File

@@ -1,100 +0,0 @@
import { ArrayBufferTarget, Muxer } from 'webm-muxer';
/**
* Encodes raw audio data to Opus in WebM container.
*/
export async function encodeRawBufferToOpus({
filepath,
sampleRate,
numberOfChannels,
}: {
filepath: string;
sampleRate: number;
numberOfChannels: number;
}): Promise<Uint8Array> {
// Use streams to process audio data incrementally
const response = await fetch(new URL(filepath, location.origin));
if (!response.body) {
throw new Error('Response body is null');
}
// Setup Opus encoder
const encodedChunks: EncodedAudioChunk[] = [];
const encoder = new AudioEncoder({
output: chunk => {
encodedChunks.push(chunk);
},
error: err => {
throw new Error(`Encoding error: ${err}`);
},
});
// Configure Opus encoder
encoder.configure({
codec: 'opus',
sampleRate: sampleRate,
numberOfChannels: numberOfChannels,
bitrate: 128000,
});
// Process the stream
const reader = response.body.getReader();
let offset = 0;
const CHUNK_SIZE = numberOfChannels * 1024; // Process 1024 samples per channel at a time
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
// Convert the chunk to Float32Array
const float32Data = new Float32Array(value.buffer);
// Process in smaller chunks to avoid large frames
for (let i = 0; i < float32Data.length; i += CHUNK_SIZE) {
const chunkSize = Math.min(CHUNK_SIZE, float32Data.length - i);
const chunk = float32Data.subarray(i, i + chunkSize);
// Create and encode frame
const frame = new AudioData({
format: 'f32',
sampleRate: sampleRate,
numberOfFrames: chunk.length / numberOfChannels,
numberOfChannels: numberOfChannels,
timestamp: (offset * 1000000) / sampleRate, // timestamp in microseconds
data: chunk,
});
encoder.encode(frame);
frame.close();
offset += chunk.length / numberOfChannels;
}
}
} finally {
await encoder.flush();
encoder.close();
}
// Initialize WebM muxer
const target = new ArrayBufferTarget();
const muxer = new Muxer({
target,
audio: {
codec: 'A_OPUS',
sampleRate: sampleRate,
numberOfChannels: numberOfChannels,
},
});
// Add all chunks to the muxer
for (const chunk of encodedChunks) {
muxer.addAudioChunk(chunk, {});
}
// Finalize and get WebM container
muxer.finalize();
const { buffer: webmBuffer } = target;
return new Uint8Array(webmBuffer);
}

View File

@@ -1,19 +1,27 @@
import { Button } from '@affine/component';
import { useAsyncCallback } from '@affine/core/components/hooks/affine-async-hooks';
import { appIconMap } from '@affine/core/utils';
import { encodeRawBufferToOpus } from '@affine/core/utils/webm-encoding';
import { apis, events } from '@affine/electron-api';
import { useI18n } from '@affine/i18n';
import { useEffect, useMemo, useState } from 'react';
import { encodeRawBufferToOpus } from './encode';
import * as styles from './styles.css';
type Status = {
id: number;
status: 'new' | 'recording' | 'paused' | 'stopped' | 'ready';
status:
| 'new'
| 'recording'
| 'paused'
| 'stopped'
| 'ready'
| 'create-block-success'
| 'create-block-failed';
appName?: string;
appGroupId?: number;
icon?: Buffer;
filepath?: string;
};
export const useRecordingStatus = () => {
@@ -23,12 +31,12 @@ export const useRecordingStatus = () => {
// Get initial status
apis?.recording
.getCurrentRecording()
.then(status => setStatus(status as Status))
.then(status => setStatus(status satisfies Status | null))
.catch(console.error);
// Subscribe to status changes
const unsubscribe = events?.recording.onRecordingStatusChanged(status =>
setStatus(status as Status)
setStatus(status satisfies Status | null)
);
return () => {
@@ -51,15 +59,24 @@ export function Recording() {
}
if (status.status === 'new') {
return t['com.affine.recording.new']();
} else if (status.status === 'ready') {
return t['com.affine.recording.ready']();
} else if (status.appName) {
return t['com.affine.recording.recording']({
appName: status.appName,
});
} else {
return t['com.affine.recording.recording.unnamed']();
} else if (status.status === 'create-block-success') {
return t['com.affine.recording.success.prompt']();
} else if (status.status === 'create-block-failed') {
return t['com.affine.recording.failed.prompt']();
} else if (
status.status === 'recording' ||
status.status === 'ready' ||
status.status === 'stopped'
) {
if (status.appName) {
return t['com.affine.recording.recording']({
appName: status.appName,
});
} else {
return t['com.affine.recording.recording.unnamed']();
}
}
return null;
}, [status, t]);
const handleDismiss = useAsyncCallback(async () => {
@@ -96,7 +113,7 @@ export function Recording() {
new Promise<void>(resolve => {
setTimeout(() => {
resolve();
}, 1000); // wait at least 1 second for better user experience
}, 500); // wait at least 500ms for better user experience
}),
]);
await apis?.recording.readyRecording(result.id, buffer);
@@ -125,6 +142,13 @@ export function Recording() {
await apis?.recording?.startRecording(status.appGroupId);
}, [status]);
const handleOpenFile = useAsyncCallback(async () => {
if (!status) {
return;
}
await apis?.recording?.showSavedRecordings(status.filepath);
}, [status]);
const controlsElement = useMemo(() => {
if (!status) {
return null;
@@ -150,7 +174,7 @@ export function Recording() {
{t['com.affine.recording.stop']()}
</Button>
);
} else if (status.status === 'stopped') {
} else if (status.status === 'stopped' || status.status === 'ready') {
return (
<Button
variant="error"
@@ -159,15 +183,33 @@ export function Recording() {
disabled
/>
);
} else if (status.status === 'ready') {
} else if (status.status === 'create-block-success') {
return (
<Button variant="primary" onClick={handleDismiss}>
{t['com.affine.recording.ready']()}
{t['com.affine.recording.success.button']()}
</Button>
);
} else if (status.status === 'create-block-failed') {
return (
<>
<Button variant="plain" onClick={handleDismiss}>
{t['com.affine.recording.dismiss']()}
</Button>
<Button variant="error" onClick={handleOpenFile}>
{t['com.affine.recording.failed.button']()}
</Button>
</>
);
}
return null;
}, [handleDismiss, handleStartRecording, handleStopRecording, status, t]);
}, [
handleDismiss,
handleOpenFile,
handleStartRecording,
handleStopRecording,
status,
t,
]);
if (!status) {
return null;