refactor(electron): encoding recording on the fly (#11457)

fix AF-2460, AF-2463

When recording is started, we start polling the pending raw buffers that are waiting for encoding. The buffers are determined by the cursor of the original raw buffer file. When recording is stopped, we will flush the pending buffers and wrap the encoded chunks into WebM.

```mermaid
sequenceDiagram
    participant App as App/UI
    participant RecordingFeature as Recording Feature
    participant StateMachine as State Machine
    participant FileSystem as File System
    participant StreamEncoder as Stream Encoder
    participant OpusEncoder as Opus Encoder
    participant WebM as WebM Muxer

    Note over App,WebM: Recording Start Flow
    App->>RecordingFeature: startRecording()
    RecordingFeature->>StateMachine: dispatch(START_RECORDING)
    StateMachine-->>RecordingFeature: status: 'recording'
    RecordingFeature->>StreamEncoder: createStreamEncoder(id, {sampleRate, channels})

    Note over App,WebM: Streaming Flow
    loop Audio Data Streaming
        RecordingFeature->>FileSystem: Write raw audio chunks to .raw file
        StreamEncoder->>FileSystem: Poll raw audio data
        FileSystem-->>StreamEncoder: Raw audio chunks
        StreamEncoder->>OpusEncoder: Encode chunks
        OpusEncoder-->>StreamEncoder: Encoded Opus frames
    end

    Note over App,WebM: Recording Stop Flow
    App->>RecordingFeature: stopRecording()
    RecordingFeature->>StateMachine: dispatch(STOP_RECORDING)
    StateMachine-->>RecordingFeature: status: 'stopped'
    StreamEncoder->>OpusEncoder: flush()
    StreamEncoder->>WebM: muxToWebM(encodedChunks)
    WebM-->>RecordingFeature: WebM buffer
    RecordingFeature->>FileSystem: Save as .opus file
    RecordingFeature->>StateMachine: dispatch(SAVE_RECORDING)
```
This commit is contained in:
pengx17
2025-04-03 15:56:53 +00:00
parent 8ce10e6d0a
commit 133be72ac2
7 changed files with 260 additions and 76 deletions

View File

@@ -1,7 +1,11 @@
import { Button } from '@affine/component';
import { useAsyncCallback } from '@affine/core/components/hooks/affine-async-hooks';
import { appIconMap } from '@affine/core/utils';
import { encodeRawBufferToOpus } from '@affine/core/utils/webm-encoding';
import {
createStreamEncoder,
encodeRawBufferToOpus,
type OpusStreamEncoder,
} from '@affine/core/utils/webm-encoding';
import { apis, events } from '@affine/electron-api';
import { useI18n } from '@affine/i18n';
import track from '@affine/track';
@@ -23,6 +27,8 @@ type Status = {
appGroupId?: number;
icon?: Buffer;
filepath?: string;
sampleRate?: number;
numberOfChannels?: number;
};
export const useRecordingStatus = () => {
@@ -99,56 +105,100 @@ export function Recording() {
await apis?.recording?.stopRecording(status.id);
}, [status]);
const handleProcessStoppedRecording = useAsyncCallback(async () => {
let id: number | undefined;
try {
const result = await apis?.recording?.getCurrentRecording();
const handleProcessStoppedRecording = useAsyncCallback(
async (currentStreamEncoder?: OpusStreamEncoder) => {
let id: number | undefined;
try {
const result = await apis?.recording?.getCurrentRecording();
if (!result) {
return;
}
if (!result) {
return;
}
id = result.id;
id = result.id;
const { filepath, sampleRate, numberOfChannels } = result;
if (!filepath || !sampleRate || !numberOfChannels) {
return;
const { filepath, sampleRate, numberOfChannels } = result;
if (!filepath || !sampleRate || !numberOfChannels) {
return;
}
const [buffer] = await Promise.all([
currentStreamEncoder
? currentStreamEncoder.finish()
: encodeRawBufferToOpus({
filepath,
sampleRate,
numberOfChannels,
}),
new Promise<void>(resolve => {
setTimeout(() => {
resolve();
}, 500); // wait at least 500ms for better user experience
}),
]);
await apis?.recording.readyRecording(result.id, buffer);
} catch (error) {
console.error('Failed to stop recording', error);
await apis?.popup?.dismissCurrentRecording();
if (id) {
await apis?.recording.removeRecording(id);
}
}
const [buffer] = await Promise.all([
encodeRawBufferToOpus({
filepath,
sampleRate,
numberOfChannels,
}),
new Promise<void>(resolve => {
setTimeout(() => {
resolve();
}, 500); // wait at least 500ms for better user experience
}),
]);
await apis?.recording.readyRecording(result.id, buffer);
} catch (error) {
console.error('Failed to stop recording', error);
await apis?.popup?.dismissCurrentRecording();
if (id) {
await apis?.recording.removeRecording(id);
}
}
}, []);
},
[]
);
useEffect(() => {
// allow processing stopped event in tray menu as well:
return events?.recording.onRecordingStatusChanged(status => {
let currentStreamEncoder: OpusStreamEncoder | undefined;
apis?.recording
.getCurrentRecording()
.then(status => {
if (status) {
return handleRecordingStatusChanged(status);
}
return;
})
.catch(console.error);
const handleRecordingStatusChanged = async (status: Status) => {
if (status?.status === 'new') {
track.popup.$.recordingBar.toggleRecordingBar({
type: 'Meeting record',
appName: status.appName || 'System Audio',
});
}
if (
status?.status === 'recording' &&
status.sampleRate &&
status.numberOfChannels &&
(!currentStreamEncoder || currentStreamEncoder.id !== status.id)
) {
currentStreamEncoder?.close();
currentStreamEncoder = createStreamEncoder(status.id, {
sampleRate: status.sampleRate,
numberOfChannels: status.numberOfChannels,
});
currentStreamEncoder.poll().catch(console.error);
}
if (status?.status === 'stopped') {
handleProcessStoppedRecording();
handleProcessStoppedRecording(currentStreamEncoder);
currentStreamEncoder = undefined;
}
};
// allow processing stopped event in tray menu as well:
const unsubscribe = events?.recording.onRecordingStatusChanged(status => {
if (status) {
handleRecordingStatusChanged(status).catch(console.error);
}
});
return () => {
unsubscribe?.();
currentStreamEncoder?.close();
};
}, [handleProcessStoppedRecording]);
const handleStartRecording = useAsyncCallback(async () => {