feat(electron): audio capture permissions and settings (#11185)

fix AF-2420, AF-2391, AF-2265
This commit is contained in:
pengx17
2025-03-28 09:12:25 +00:00
parent 8c582122a8
commit 6c125d9a38
59 changed files with 2661 additions and 1699 deletions

View File

@@ -1,5 +1,6 @@
import { DesktopApiService } from '@affine/core/modules/desktop-api'; import { DesktopApiService } from '@affine/core/modules/desktop-api';
import { WorkspaceDialogService } from '@affine/core/modules/dialogs'; import { WorkspaceDialogService } from '@affine/core/modules/dialogs';
import type { SettingTab } from '@affine/core/modules/dialogs/constant';
import { DocsService } from '@affine/core/modules/doc'; import { DocsService } from '@affine/core/modules/doc';
import { EditorSettingService } from '@affine/core/modules/editor-setting'; import { EditorSettingService } from '@affine/core/modules/editor-setting';
import { JournalService } from '@affine/core/modules/journal'; import { JournalService } from '@affine/core/modules/journal';
@@ -24,14 +25,14 @@ export function setupEvents(frameworkProvider: FrameworkProvider) {
.catch(console.error); .catch(console.error);
}); });
events?.applicationMenu.openAboutPageInSettingModal(() => { events?.applicationMenu.openInSettingModal(activeTab => {
using currentWorkspace = getCurrentWorkspace(frameworkProvider); using currentWorkspace = getCurrentWorkspace(frameworkProvider);
if (!currentWorkspace) { if (!currentWorkspace) {
return; return;
} }
const { workspace } = currentWorkspace; const { workspace } = currentWorkspace;
workspace.scope.get(WorkspaceDialogService).open('setting', { workspace.scope.get(WorkspaceDialogService).open('setting', {
activeTab: 'about', activeTab: activeTab as unknown as SettingTab,
}); });
}); });

View File

@@ -2,6 +2,7 @@ import type { DocProps } from '@affine/core/blocksuite/initialization';
import { DocsService } from '@affine/core/modules/doc'; import { DocsService } from '@affine/core/modules/doc';
import { EditorSettingService } from '@affine/core/modules/editor-setting'; import { EditorSettingService } from '@affine/core/modules/editor-setting';
import { AudioAttachmentService } from '@affine/core/modules/media/services/audio-attachment'; import { AudioAttachmentService } from '@affine/core/modules/media/services/audio-attachment';
import { MeetingSettingsService } from '@affine/core/modules/media/services/meeting-settings';
import { WorkbenchService } from '@affine/core/modules/workbench'; import { WorkbenchService } from '@affine/core/modules/workbench';
import { DebugLogger } from '@affine/debug'; import { DebugLogger } from '@affine/debug';
import { apis, events } from '@affine/electron-api'; import { apis, events } from '@affine/electron-api';
@@ -34,6 +35,8 @@ export function setupRecordingEvents(frameworkProvider: FrameworkProvider) {
if ((await apis?.ui.isActiveTab()) && status?.status === 'ready') { if ((await apis?.ui.isActiveTab()) && status?.status === 'ready') {
using currentWorkspace = getCurrentWorkspace(frameworkProvider); using currentWorkspace = getCurrentWorkspace(frameworkProvider);
if (!currentWorkspace) { if (!currentWorkspace) {
// maybe the workspace is not ready yet, eg. for shared workspace view
await apis?.recording.handleBlockCreationFailed(status.id);
return; return;
} }
const { workspace } = currentWorkspace; const { workspace } = currentWorkspace;
@@ -89,6 +92,16 @@ export function setupRecordingEvents(frameworkProvider: FrameworkProvider) {
model.props.sourceId = blobId; model.props.sourceId = blobId;
model.props.embed = true; model.props.embed = true;
const meetingSettingsService = frameworkProvider.get(
MeetingSettingsService
);
if (!meetingSettingsService.settings.autoTranscription) {
// auto transcription is disabled,
// so we don't need to transcribe the recording by default
return;
}
using currentWorkspace = getCurrentWorkspace(frameworkProvider); using currentWorkspace = getCurrentWorkspace(frameworkProvider);
if (!currentWorkspace) { if (!currentWorkspace) {
return; return;
@@ -100,8 +113,23 @@ export function setupRecordingEvents(frameworkProvider: FrameworkProvider) {
audioAttachment?.obj.transcribe().catch(err => { audioAttachment?.obj.transcribe().catch(err => {
logger.error('Failed to transcribe recording', err); logger.error('Failed to transcribe recording', err);
}); });
} else {
throw new Error('No attachment model found');
} }
})().catch(console.error); })()
.then(async () => {
await apis?.recording.handleBlockCreationSuccess(status.id);
})
.catch(error => {
logger.error('Failed to transcribe recording', error);
return apis?.recording.handleBlockCreationFailed(
status.id,
error
);
})
.catch(error => {
console.error('unknown error', error);
});
}, },
}; };
const page = docsService.createDoc({ docProps, primaryMode: 'page' }); const page = docsService.createDoc({ docProps, primaryMode: 'page' });

View File

@@ -1,100 +0,0 @@
import { ArrayBufferTarget, Muxer } from 'webm-muxer';
/**
* Encodes raw audio data to Opus in WebM container.
*/
export async function encodeRawBufferToOpus({
filepath,
sampleRate,
numberOfChannels,
}: {
filepath: string;
sampleRate: number;
numberOfChannels: number;
}): Promise<Uint8Array> {
// Use streams to process audio data incrementally
const response = await fetch(new URL(filepath, location.origin));
if (!response.body) {
throw new Error('Response body is null');
}
// Setup Opus encoder
const encodedChunks: EncodedAudioChunk[] = [];
const encoder = new AudioEncoder({
output: chunk => {
encodedChunks.push(chunk);
},
error: err => {
throw new Error(`Encoding error: ${err}`);
},
});
// Configure Opus encoder
encoder.configure({
codec: 'opus',
sampleRate: sampleRate,
numberOfChannels: numberOfChannels,
bitrate: 128000,
});
// Process the stream
const reader = response.body.getReader();
let offset = 0;
const CHUNK_SIZE = numberOfChannels * 1024; // Process 1024 samples per channel at a time
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
// Convert the chunk to Float32Array
const float32Data = new Float32Array(value.buffer);
// Process in smaller chunks to avoid large frames
for (let i = 0; i < float32Data.length; i += CHUNK_SIZE) {
const chunkSize = Math.min(CHUNK_SIZE, float32Data.length - i);
const chunk = float32Data.subarray(i, i + chunkSize);
// Create and encode frame
const frame = new AudioData({
format: 'f32',
sampleRate: sampleRate,
numberOfFrames: chunk.length / numberOfChannels,
numberOfChannels: numberOfChannels,
timestamp: (offset * 1000000) / sampleRate, // timestamp in microseconds
data: chunk,
});
encoder.encode(frame);
frame.close();
offset += chunk.length / numberOfChannels;
}
}
} finally {
await encoder.flush();
encoder.close();
}
// Initialize WebM muxer
const target = new ArrayBufferTarget();
const muxer = new Muxer({
target,
audio: {
codec: 'A_OPUS',
sampleRate: sampleRate,
numberOfChannels: numberOfChannels,
},
});
// Add all chunks to the muxer
for (const chunk of encodedChunks) {
muxer.addAudioChunk(chunk, {});
}
// Finalize and get WebM container
muxer.finalize();
const { buffer: webmBuffer } = target;
return new Uint8Array(webmBuffer);
}

View File

@@ -1,19 +1,27 @@
import { Button } from '@affine/component'; import { Button } from '@affine/component';
import { useAsyncCallback } from '@affine/core/components/hooks/affine-async-hooks'; import { useAsyncCallback } from '@affine/core/components/hooks/affine-async-hooks';
import { appIconMap } from '@affine/core/utils'; import { appIconMap } from '@affine/core/utils';
import { encodeRawBufferToOpus } from '@affine/core/utils/webm-encoding';
import { apis, events } from '@affine/electron-api'; import { apis, events } from '@affine/electron-api';
import { useI18n } from '@affine/i18n'; import { useI18n } from '@affine/i18n';
import { useEffect, useMemo, useState } from 'react'; import { useEffect, useMemo, useState } from 'react';
import { encodeRawBufferToOpus } from './encode';
import * as styles from './styles.css'; import * as styles from './styles.css';
type Status = { type Status = {
id: number; id: number;
status: 'new' | 'recording' | 'paused' | 'stopped' | 'ready'; status:
| 'new'
| 'recording'
| 'paused'
| 'stopped'
| 'ready'
| 'create-block-success'
| 'create-block-failed';
appName?: string; appName?: string;
appGroupId?: number; appGroupId?: number;
icon?: Buffer; icon?: Buffer;
filepath?: string;
}; };
export const useRecordingStatus = () => { export const useRecordingStatus = () => {
@@ -23,12 +31,12 @@ export const useRecordingStatus = () => {
// Get initial status // Get initial status
apis?.recording apis?.recording
.getCurrentRecording() .getCurrentRecording()
.then(status => setStatus(status as Status)) .then(status => setStatus(status satisfies Status | null))
.catch(console.error); .catch(console.error);
// Subscribe to status changes // Subscribe to status changes
const unsubscribe = events?.recording.onRecordingStatusChanged(status => const unsubscribe = events?.recording.onRecordingStatusChanged(status =>
setStatus(status as Status) setStatus(status satisfies Status | null)
); );
return () => { return () => {
@@ -51,15 +59,24 @@ export function Recording() {
} }
if (status.status === 'new') { if (status.status === 'new') {
return t['com.affine.recording.new'](); return t['com.affine.recording.new']();
} else if (status.status === 'ready') { } else if (status.status === 'create-block-success') {
return t['com.affine.recording.ready'](); return t['com.affine.recording.success.prompt']();
} else if (status.appName) { } else if (status.status === 'create-block-failed') {
return t['com.affine.recording.recording']({ return t['com.affine.recording.failed.prompt']();
appName: status.appName, } else if (
}); status.status === 'recording' ||
} else { status.status === 'ready' ||
return t['com.affine.recording.recording.unnamed'](); status.status === 'stopped'
) {
if (status.appName) {
return t['com.affine.recording.recording']({
appName: status.appName,
});
} else {
return t['com.affine.recording.recording.unnamed']();
}
} }
return null;
}, [status, t]); }, [status, t]);
const handleDismiss = useAsyncCallback(async () => { const handleDismiss = useAsyncCallback(async () => {
@@ -96,7 +113,7 @@ export function Recording() {
new Promise<void>(resolve => { new Promise<void>(resolve => {
setTimeout(() => { setTimeout(() => {
resolve(); resolve();
}, 1000); // wait at least 1 second for better user experience }, 500); // wait at least 500ms for better user experience
}), }),
]); ]);
await apis?.recording.readyRecording(result.id, buffer); await apis?.recording.readyRecording(result.id, buffer);
@@ -125,6 +142,13 @@ export function Recording() {
await apis?.recording?.startRecording(status.appGroupId); await apis?.recording?.startRecording(status.appGroupId);
}, [status]); }, [status]);
const handleOpenFile = useAsyncCallback(async () => {
if (!status) {
return;
}
await apis?.recording?.showSavedRecordings(status.filepath);
}, [status]);
const controlsElement = useMemo(() => { const controlsElement = useMemo(() => {
if (!status) { if (!status) {
return null; return null;
@@ -150,7 +174,7 @@ export function Recording() {
{t['com.affine.recording.stop']()} {t['com.affine.recording.stop']()}
</Button> </Button>
); );
} else if (status.status === 'stopped') { } else if (status.status === 'stopped' || status.status === 'ready') {
return ( return (
<Button <Button
variant="error" variant="error"
@@ -159,15 +183,33 @@ export function Recording() {
disabled disabled
/> />
); );
} else if (status.status === 'ready') { } else if (status.status === 'create-block-success') {
return ( return (
<Button variant="primary" onClick={handleDismiss}> <Button variant="primary" onClick={handleDismiss}>
{t['com.affine.recording.ready']()} {t['com.affine.recording.success.button']()}
</Button> </Button>
); );
} else if (status.status === 'create-block-failed') {
return (
<>
<Button variant="plain" onClick={handleDismiss}>
{t['com.affine.recording.dismiss']()}
</Button>
<Button variant="error" onClick={handleOpenFile}>
{t['com.affine.recording.failed.button']()}
</Button>
</>
);
} }
return null; return null;
}, [handleDismiss, handleStartRecording, handleStopRecording, status, t]); }, [
handleDismiss,
handleOpenFile,
handleStartRecording,
handleStopRecording,
status,
t,
]);
if (!status) { if (!status) {
return null; return null;

View File

@@ -169,6 +169,10 @@ export default {
], ],
executableName: productName, executableName: productName,
asar: true, asar: true,
extendInfo: {
NSAudioCaptureUsageDescription:
'Please allow access in order to capture audio from other apps by AFFiNE.',
},
}, },
makers, makers,
plugins: [{ name: '@electron-forge/plugin-auto-unpack-natives', config: {} }], plugins: [{ name: '@electron-forge/plugin-auto-unpack-natives', config: {} }],

View File

@@ -39,7 +39,7 @@ export function createApplicationMenu() {
label: `About ${app.getName()}`, label: `About ${app.getName()}`,
click: async () => { click: async () => {
await showMainWindow(); await showMainWindow();
applicationMenuSubjects.openAboutPageInSettingModal$.next(); applicationMenuSubjects.openInSettingModal$.next('about');
}, },
}, },
{ type: 'separator' }, { type: 'separator' },

View File

@@ -17,9 +17,9 @@ export const applicationMenuEvents = {
sub.unsubscribe(); sub.unsubscribe();
}; };
}, },
openAboutPageInSettingModal: (fn: () => void) => { // todo: properly define the active tab type
const sub = openInSettingModal: (fn: (activeTab: string) => void) => {
applicationMenuSubjects.openAboutPageInSettingModal$.subscribe(fn); const sub = applicationMenuSubjects.openInSettingModal$.subscribe(fn);
return () => { return () => {
sub.unsubscribe(); sub.unsubscribe();
}; };

View File

@@ -3,5 +3,5 @@ import { Subject } from 'rxjs';
export const applicationMenuSubjects = { export const applicationMenuSubjects = {
newPageAction$: new Subject<'page' | 'edgeless'>(), newPageAction$: new Subject<'page' | 'edgeless'>(),
openJournal$: new Subject<void>(), openJournal$: new Subject<void>(),
openAboutPageInSettingModal$: new Subject<void>(), openInSettingModal$: new Subject<string>(),
}; };

View File

@@ -14,7 +14,7 @@ import { registerEvents } from './events';
import { registerHandlers } from './handlers'; import { registerHandlers } from './handlers';
import { logger } from './logger'; import { logger } from './logger';
import { registerProtocol } from './protocol'; import { registerProtocol } from './protocol';
import { setupRecording } from './recording'; import { setupRecordingFeature } from './recording/feature';
import { setupTrayState } from './tray'; import { setupTrayState } from './tray';
import { registerUpdater } from './updater'; import { registerUpdater } from './updater';
import { launch } from './windows-manager/launcher'; import { launch } from './windows-manager/launcher';
@@ -89,18 +89,10 @@ app
.then(launch) .then(launch)
.then(createApplicationMenu) .then(createApplicationMenu)
.then(registerUpdater) .then(registerUpdater)
.then(setupRecordingFeature)
.then(setupTrayState)
.catch(e => console.error('Failed create window:', e)); .catch(e => console.error('Failed create window:', e));
if (isDev) {
app
.whenReady()
.then(setupRecording)
.then(setupTrayState)
.catch(e => {
logger.error('Failed setup recording or tray state:', e);
});
}
if (process.env.SENTRY_RELEASE) { if (process.env.SENTRY_RELEASE) {
// https://docs.sentry.io/platforms/javascript/guides/electron/ // https://docs.sentry.io/platforms/javascript/guides/electron/
Sentry.init({ Sentry.init({

View File

@@ -0,0 +1,685 @@
/* oxlint-disable no-var-requires */
import { execSync } from 'node:child_process';
import path from 'node:path';
// Should not load @affine/native for unsupported platforms
import type { ShareableContent } from '@affine/native';
import { app, systemPreferences } from 'electron';
import fs from 'fs-extra';
import { debounce } from 'lodash-es';
import {
BehaviorSubject,
distinctUntilChanged,
groupBy,
interval,
mergeMap,
Subject,
throttleTime,
} from 'rxjs';
import { map, shareReplay } from 'rxjs/operators';
import { isMacOS, shallowEqual } from '../../shared/utils';
import { beforeAppQuit } from '../cleanup';
import { logger } from '../logger';
import {
MeetingSettingsKey,
MeetingSettingsSchema,
} from '../shared-state-schema';
import { globalStateStorage } from '../shared-storage/storage';
import { getMainWindow } from '../windows-manager';
import { popupManager } from '../windows-manager/popup';
import { recordingStateMachine } from './state-machine';
import type {
AppGroupInfo,
Recording,
RecordingStatus,
TappableAppInfo,
} from './types';
const MAX_DURATION_FOR_TRANSCRIPTION = 1.5 * 60 * 60 * 1000; // 1.5 hours
export const MeetingsSettingsState = {
$: globalStateStorage.watch<MeetingSettingsSchema>(MeetingSettingsKey).pipe(
map(v => MeetingSettingsSchema.parse(v ?? {})),
shareReplay(1)
),
get value() {
return MeetingSettingsSchema.parse(
globalStateStorage.get(MeetingSettingsKey) ?? {}
);
},
set value(value: MeetingSettingsSchema) {
globalStateStorage.set(MeetingSettingsKey, value);
},
};
const subscribers: Subscriber[] = [];
// recordings are saved in the app data directory
// may need a way to clean up old recordings
export const SAVED_RECORDINGS_DIR = path.join(
app.getPath('sessionData'),
'recordings'
);
let shareableContent: ShareableContent | null = null;
function cleanup() {
shareableContent = null;
subscribers.forEach(subscriber => {
try {
subscriber.unsubscribe();
} catch {
// ignore unsubscribe error
}
});
}
beforeAppQuit(() => {
cleanup();
});
export const applications$ = new BehaviorSubject<TappableAppInfo[]>([]);
export const appGroups$ = new BehaviorSubject<AppGroupInfo[]>([]);
export const updateApplicationsPing$ = new Subject<number>();
// recording id -> recording
// recordings will be saved in memory before consumed and created as an audio block to user's doc
const recordings = new Map<number, Recording>();
// there should be only one active recording at a time
// We'll now use recordingStateMachine.status$ instead of our own BehaviorSubject
export const recordingStatus$ = recordingStateMachine.status$;
function createAppGroup(processGroupId: number): AppGroupInfo | undefined {
const groupProcess =
shareableContent?.applicationWithProcessId(processGroupId);
if (!groupProcess) {
return;
}
return {
processGroupId: processGroupId,
apps: [], // leave it empty for now.
name: groupProcess.name,
bundleIdentifier: groupProcess.bundleIdentifier,
// icon should be lazy loaded
get icon() {
try {
return groupProcess.icon;
} catch (error) {
logger.error(`Failed to get icon for ${groupProcess.name}`, error);
return undefined;
}
},
isRunning: false,
};
}
// pipe applications$ to appGroups$
function setupAppGroups() {
subscribers.push(
applications$.pipe(distinctUntilChanged()).subscribe(apps => {
const appGroups: AppGroupInfo[] = [];
apps.forEach(app => {
let appGroup = appGroups.find(
group => group.processGroupId === app.processGroupId
);
if (!appGroup) {
appGroup = createAppGroup(app.processGroupId);
if (appGroup) {
appGroups.push(appGroup);
}
}
if (appGroup) {
appGroup.apps.push(app);
}
});
appGroups.forEach(appGroup => {
appGroup.isRunning = appGroup.apps.some(app => app.isRunning);
});
appGroups$.next(appGroups);
})
);
}
function setupNewRunningAppGroup() {
const appGroupRunningChanged$ = appGroups$.pipe(
mergeMap(groups => groups),
groupBy(group => group.processGroupId),
mergeMap(groupStream$ =>
groupStream$.pipe(
distinctUntilChanged((prev, curr) => prev.isRunning === curr.isRunning)
)
)
);
appGroups$.value.forEach(group => {
const recordingStatus = recordingStatus$.value;
if (
group.isRunning &&
(!recordingStatus || recordingStatus.status === 'new')
) {
newRecording(group);
}
});
const debounceStartRecording = debounce((appGroup: AppGroupInfo) => {
// check if the app is running again
if (appGroup.isRunning) {
startRecording(appGroup);
}
}, 1000);
subscribers.push(
appGroupRunningChanged$.subscribe(currentGroup => {
logger.info(
'appGroupRunningChanged',
currentGroup.bundleIdentifier,
currentGroup.isRunning
);
if (MeetingsSettingsState.value.recordingMode === 'none') {
return;
}
const recordingStatus = recordingStatus$.value;
if (currentGroup.isRunning) {
// when the app is running and there is no active recording popup
// we should show a new recording popup
if (
!recordingStatus ||
recordingStatus.status === 'new' ||
recordingStatus.status === 'create-block-success' ||
recordingStatus.status === 'create-block-failed'
) {
if (MeetingsSettingsState.value.recordingMode === 'prompt') {
newRecording(currentGroup);
} else if (
MeetingsSettingsState.value.recordingMode === 'auto-start'
) {
// there is a case that the watched app's running state changed rapidly
// we will schedule the start recording to avoid that
debounceStartRecording(currentGroup);
} else {
// do nothing, skip
}
}
} else {
// when displaying in "new" state but the app is not running any more
// we should remove the recording
if (
recordingStatus?.status === 'new' &&
currentGroup.bundleIdentifier ===
recordingStatus.appGroup?.bundleIdentifier
) {
removeRecording(recordingStatus.id);
}
// if the recording is stopped and we are recording it,
// we should stop the recording
if (
recordingStatus?.status === 'recording' &&
recordingStatus.appGroup?.bundleIdentifier ===
currentGroup.bundleIdentifier
) {
stopRecording(recordingStatus.id).catch(err => {
logger.error('failed to stop recording', err);
});
}
}
})
);
}
function createRecording(status: RecordingStatus) {
const bufferedFilePath = path.join(
SAVED_RECORDINGS_DIR,
`${status.appGroup?.bundleIdentifier ?? 'unknown'}-${status.id}-${status.startTime}.raw`
);
fs.ensureDirSync(SAVED_RECORDINGS_DIR);
const file = fs.createWriteStream(bufferedFilePath);
function tapAudioSamples(err: Error | null, samples: Float32Array) {
const recordingStatus = recordingStatus$.getValue();
if (
!recordingStatus ||
recordingStatus.id !== status.id ||
recordingStatus.status === 'paused'
) {
return;
}
if (err) {
logger.error('failed to get audio samples', err);
} else {
// Writing raw Float32Array samples directly to file
// For stereo audio, samples are interleaved [L,R,L,R,...]
file.write(Buffer.from(samples.buffer));
}
}
// MUST require dynamically to avoid loading @affine/native for unsupported platforms
const ShareableContent = require('@affine/native').ShareableContent;
const stream = status.app
? status.app.rawInstance.tapAudio(tapAudioSamples)
: ShareableContent.tapGlobalAudio(null, tapAudioSamples);
const recording: Recording = {
id: status.id,
startTime: status.startTime,
app: status.app,
appGroup: status.appGroup,
file,
stream,
};
return recording;
}
export async function getRecording(id: number) {
const recording = recordings.get(id);
if (!recording) {
logger.error(`Recording ${id} not found`);
return;
}
const rawFilePath = String(recording.file.path);
return {
id,
appGroup: recording.appGroup,
app: recording.app,
startTime: recording.startTime,
filepath: rawFilePath,
sampleRate: recording.stream.sampleRate,
numberOfChannels: recording.stream.channels,
};
}
// recording popup status
// new: recording is started, popup is shown
// recording: recording is started, popup is shown
// stopped: recording is stopped, popup showing processing status
// create-block-success: recording is ready, show "open app" button
// create-block-failed: recording is failed, show "failed to save" button
// null: hide popup
function setupRecordingListeners() {
subscribers.push(
recordingStatus$
.pipe(distinctUntilChanged(shallowEqual))
.subscribe(status => {
const popup = popupManager.get('recording');
if (status && !popup.showing) {
popup.show().catch(err => {
logger.error('failed to show recording popup', err);
});
}
if (status?.status === 'recording') {
let recording = recordings.get(status.id);
// create a recording if not exists
if (!recording) {
recording = createRecording(status);
recordings.set(status.id, recording);
}
} else if (status?.status === 'stopped') {
const recording = recordings.get(status.id);
if (recording) {
recording.stream.stop();
}
} else if (
status?.status === 'create-block-success' ||
status?.status === 'create-block-failed'
) {
// show the popup for 10s
setTimeout(() => {
// check again if current status is still ready
if (
(recordingStatus$.value?.status === 'create-block-success' ||
recordingStatus$.value?.status === 'create-block-failed') &&
recordingStatus$.value.id === status.id
) {
popup.hide().catch(err => {
logger.error('failed to hide recording popup', err);
});
}
}, 10_000);
} else if (!status) {
// status is removed, we should hide the popup
popupManager
.get('recording')
.hide()
.catch(err => {
logger.error('failed to hide recording popup', err);
});
}
})
);
}
function getAllApps(): TappableAppInfo[] {
if (!shareableContent) {
return [];
}
const apps = shareableContent.applications().map(app => {
try {
return {
rawInstance: app,
processId: app.processId,
processGroupId: app.processGroupId,
bundleIdentifier: app.bundleIdentifier,
name: app.name,
isRunning: app.isRunning,
};
} catch (error) {
logger.error('failed to get app info', error);
return null;
}
});
const filteredApps = apps.filter(
(v): v is TappableAppInfo =>
v !== null &&
!v.bundleIdentifier.startsWith('com.apple') &&
v.processId !== process.pid
);
return filteredApps;
}
type Subscriber = {
unsubscribe: () => void;
};
function setupMediaListeners() {
const ShareableContent = require('@affine/native').ShareableContent;
applications$.next(getAllApps());
subscribers.push(
interval(3000).subscribe(() => {
updateApplicationsPing$.next(Date.now());
}),
ShareableContent.onApplicationListChanged(() => {
updateApplicationsPing$.next(Date.now());
}),
updateApplicationsPing$
.pipe(distinctUntilChanged(), throttleTime(3000))
.subscribe(() => {
applications$.next(getAllApps());
})
);
let appStateSubscribers: Subscriber[] = [];
subscribers.push(
applications$.subscribe(apps => {
appStateSubscribers.forEach(subscriber => {
try {
subscriber.unsubscribe();
} catch {
// ignore unsubscribe error
}
});
const _appStateSubscribers: Subscriber[] = [];
apps.forEach(app => {
try {
const tappableApp = app.rawInstance;
_appStateSubscribers.push(
ShareableContent.onAppStateChanged(tappableApp, () => {
updateApplicationsPing$.next(Date.now());
})
);
} catch (error) {
logger.error(
`Failed to convert app ${app.name} to TappableApplication`,
error
);
}
});
appStateSubscribers = _appStateSubscribers;
return () => {
_appStateSubscribers.forEach(subscriber => {
try {
subscriber.unsubscribe();
} catch {
// ignore unsubscribe error
}
});
};
})
);
}
// will be called when the app is ready or when the user has enabled the recording feature in settings
export function setupRecordingFeature() {
if (!MeetingsSettingsState.value.enabled || !checkRecordingAvailable()) {
return;
}
try {
const ShareableContent = require('@affine/native').ShareableContent;
if (!shareableContent) {
shareableContent = new ShareableContent();
setupMediaListeners();
}
setupAppGroups();
setupNewRunningAppGroup();
setupRecordingListeners();
// reset all states
recordingStatus$.next(null);
return true;
} catch (error) {
logger.error('failed to setup recording feature', error);
return false;
}
}
export function disableRecordingFeature() {
recordingStatus$.next(null);
cleanup();
}
function normalizeAppGroupInfo(
appGroup?: AppGroupInfo | number
): AppGroupInfo | undefined {
return typeof appGroup === 'number'
? appGroups$.value.find(group => group.processGroupId === appGroup)
: appGroup;
}
export function newRecording(
appGroup?: AppGroupInfo | number
): RecordingStatus | null {
if (!shareableContent) {
return null; // likely called on unsupported platform
}
return recordingStateMachine.dispatch({
type: 'NEW_RECORDING',
appGroup: normalizeAppGroupInfo(appGroup),
});
}
export function startRecording(
appGroup?: AppGroupInfo | number
): RecordingStatus | null {
const state = recordingStateMachine.dispatch({
type: 'START_RECORDING',
appGroup: normalizeAppGroupInfo(appGroup),
});
if (state?.status === 'recording') {
// set a timeout to stop the recording after MAX_DURATION_FOR_TRANSCRIPTION
setTimeout(() => {
stopRecording(state.id).catch(err => {
logger.error('failed to stop recording', err);
});
}, MAX_DURATION_FOR_TRANSCRIPTION);
}
return state;
}
export function pauseRecording(id: number) {
return recordingStateMachine.dispatch({ type: 'PAUSE_RECORDING', id });
}
export function resumeRecording(id: number) {
return recordingStateMachine.dispatch({ type: 'RESUME_RECORDING', id });
}
export async function stopRecording(id: number) {
const recording = recordings.get(id);
if (!recording) {
logger.error(`Recording ${id} not found`);
return;
}
if (!recording.file.path) {
logger.error(`Recording ${id} has no file path`);
return;
}
const recordingStatus = recordingStateMachine.dispatch({
type: 'STOP_RECORDING',
id,
filepath: String(recording.file.path),
sampleRate: recording.stream.sampleRate,
numberOfChannels: recording.stream.channels,
});
if (!recordingStatus) {
logger.error('No recording status to stop');
return;
}
const { file } = recording;
file.end();
// Wait for file to finish writing
await new Promise<void>(resolve => {
file.on('finish', () => {
resolve();
});
});
return serializeRecordingStatus(recordingStatus);
}
export async function readyRecording(id: number, buffer: Buffer) {
const recordingStatus = recordingStatus$.value;
const recording = recordings.get(id);
if (!recordingStatus || recordingStatus.id !== id || !recording) {
logger.error(`Recording ${id} not found`);
return;
}
const filepath = path.join(
SAVED_RECORDINGS_DIR,
`${recordingStatus.appGroup?.bundleIdentifier ?? 'unknown'}-${recordingStatus.id}-${recordingStatus.startTime}.webm`
);
await fs.writeFile(filepath, buffer);
// Update the status through the state machine
recordingStateMachine.dispatch({
type: 'SAVE_RECORDING',
id,
filepath,
});
// bring up the window
getMainWindow()
.then(mainWindow => {
if (mainWindow) {
mainWindow.show();
}
})
.catch(err => {
logger.error('failed to bring up the window', err);
});
}
export async function handleBlockCreationSuccess(id: number) {
recordingStateMachine.dispatch({
type: 'CREATE_BLOCK_SUCCESS',
id,
});
}
export async function handleBlockCreationFailed(id: number, error?: Error) {
recordingStateMachine.dispatch({
type: 'CREATE_BLOCK_FAILED',
id,
error,
});
}
export function removeRecording(id: number) {
recordings.delete(id);
recordingStateMachine.dispatch({ type: 'REMOVE_RECORDING', id });
}
export interface SerializedRecordingStatus {
id: number;
status: RecordingStatus['status'];
appName?: string;
// if there is no app group, it means the recording is for system audio
appGroupId?: number;
icon?: Buffer;
startTime: number;
filepath?: string;
sampleRate?: number;
numberOfChannels?: number;
}
export function serializeRecordingStatus(
status: RecordingStatus
): SerializedRecordingStatus {
return {
id: status.id,
status: status.status,
appName: status.appGroup?.name,
appGroupId: status.appGroup?.processGroupId,
icon: status.appGroup?.icon,
startTime: status.startTime,
filepath: status.filepath,
sampleRate: status.sampleRate,
numberOfChannels: status.numberOfChannels,
};
}
export const getMacOSVersion = () => {
try {
const stdout = execSync('sw_vers -productVersion').toString();
const [major, minor, patch] = stdout.trim().split('.').map(Number);
return { major, minor, patch };
} catch (error) {
logger.error('Failed to get MacOS version', error);
return { major: 0, minor: 0, patch: 0 };
}
};
// check if the system is MacOS and the version is >= 14.2
export const checkRecordingAvailable = () => {
if (!isMacOS()) {
return false;
}
const version = getMacOSVersion();
return (version.major === 14 && version.minor >= 2) || version.major > 14;
};
export const checkScreenRecordingPermission = () => {
if (!isMacOS()) {
return false;
}
return systemPreferences.getMediaAccessStatus('screen') === 'granted';
};

View File

@@ -1,546 +1,32 @@
// eslint-disable no-var-requires
// Should not load @affine/native for unsupported platforms
import path from 'node:path'; import path from 'node:path';
import { ShareableContent } from '@affine/native'; import { shell } from 'electron';
import { app } from 'electron';
import fs from 'fs-extra';
import {
BehaviorSubject,
distinctUntilChanged,
groupBy,
interval,
mergeMap,
Subject,
throttleTime,
} from 'rxjs';
import { isMacOS, shallowEqual } from '../../shared/utils'; import { isMacOS } from '../../shared/utils';
import { beforeAppQuit } from '../cleanup';
import { logger } from '../logger';
import type { NamespaceHandlers } from '../type'; import type { NamespaceHandlers } from '../type';
import { getMainWindow } from '../windows-manager'; import {
import { popupManager } from '../windows-manager/popup'; checkRecordingAvailable,
import { recordingStateMachine } from './state-machine'; checkScreenRecordingPermission,
import type { disableRecordingFeature,
AppGroupInfo, getRecording,
Recording, handleBlockCreationFailed,
RecordingStatus, handleBlockCreationSuccess,
TappableAppInfo, pauseRecording,
} from './types'; readyRecording,
recordingStatus$,
const subscribers: Subscriber[] = []; removeRecording,
SAVED_RECORDINGS_DIR,
// adhoc recordings are saved in the temp directory type SerializedRecordingStatus,
const SAVED_RECORDINGS_DIR = path.join( serializeRecordingStatus,
app.getPath('temp'), setupRecordingFeature,
'affine-recordings' startRecording,
); stopRecording,
} from './feature';
beforeAppQuit(() => { import type { AppGroupInfo } from './types';
subscribers.forEach(subscriber => {
try {
subscriber.unsubscribe();
} catch {
// ignore unsubscribe error
}
});
});
let shareableContent: ShareableContent | null = null;
export const applications$ = new BehaviorSubject<TappableAppInfo[]>([]);
export const appGroups$ = new BehaviorSubject<AppGroupInfo[]>([]);
export const updateApplicationsPing$ = new Subject<number>();
// recording id -> recording
// recordings will be saved in memory before consumed and created as an audio block to user's doc
const recordings = new Map<number, Recording>();
// there should be only one active recording at a time
// We'll now use recordingStateMachine.status$ instead of our own BehaviorSubject
export const recordingStatus$ = recordingStateMachine.status$;
function createAppGroup(processGroupId: number): AppGroupInfo | undefined {
const groupProcess =
shareableContent?.applicationWithProcessId(processGroupId);
if (!groupProcess) {
return;
}
return {
processGroupId: processGroupId,
apps: [], // leave it empty for now.
name: groupProcess.name,
bundleIdentifier: groupProcess.bundleIdentifier,
// icon should be lazy loaded
get icon() {
try {
return groupProcess.icon;
} catch (error) {
logger.error(`Failed to get icon for ${groupProcess.name}`, error);
return undefined;
}
},
isRunning: false,
};
}
// pipe applications$ to appGroups$
function setupAppGroups() {
subscribers.push(
applications$.pipe(distinctUntilChanged()).subscribe(apps => {
const appGroups: AppGroupInfo[] = [];
apps.forEach(app => {
let appGroup = appGroups.find(
group => group.processGroupId === app.processGroupId
);
if (!appGroup) {
appGroup = createAppGroup(app.processGroupId);
if (appGroup) {
appGroups.push(appGroup);
}
}
if (appGroup) {
appGroup.apps.push(app);
}
});
appGroups.forEach(appGroup => {
appGroup.isRunning = appGroup.apps.some(app => app.isRunning);
});
appGroups$.next(appGroups);
})
);
}
function setupNewRunningAppGroup() {
const appGroupRunningChanged$ = appGroups$.pipe(
mergeMap(groups => groups),
groupBy(group => group.processGroupId),
mergeMap(groupStream$ =>
groupStream$.pipe(
distinctUntilChanged((prev, curr) => prev.isRunning === curr.isRunning)
)
)
);
appGroups$.value.forEach(group => {
const recordingStatus = recordingStatus$.value;
if (
group.isRunning &&
(!recordingStatus || recordingStatus.status === 'new')
) {
newRecording(group);
}
});
subscribers.push(
appGroupRunningChanged$.subscribe(currentGroup => {
logger.info(
'appGroupRunningChanged',
currentGroup.bundleIdentifier,
currentGroup.isRunning
);
const recordingStatus = recordingStatus$.value;
if (currentGroup.isRunning) {
// when the app is running and there is no active recording popup
// we should show a new recording popup
if (
!recordingStatus ||
recordingStatus.status === 'new' ||
recordingStatus.status === 'ready'
) {
newRecording(currentGroup);
}
} else {
// when displaying in "new" state but the app is not running any more
// we should remove the recording
if (
recordingStatus?.status === 'new' &&
currentGroup.bundleIdentifier ===
recordingStatus.appGroup?.bundleIdentifier
) {
removeRecording(recordingStatus.id);
}
}
})
);
}
function createRecording(status: RecordingStatus) {
const bufferedFilePath = path.join(
SAVED_RECORDINGS_DIR,
`${status.appGroup?.bundleIdentifier ?? 'unknown'}-${status.id}-${status.startTime}.raw`
);
fs.ensureDirSync(SAVED_RECORDINGS_DIR);
const file = fs.createWriteStream(bufferedFilePath);
function tapAudioSamples(err: Error | null, samples: Float32Array) {
const recordingStatus = recordingStatus$.getValue();
if (
!recordingStatus ||
recordingStatus.id !== status.id ||
recordingStatus.status === 'paused'
) {
return;
}
if (err) {
logger.error('failed to get audio samples', err);
} else {
// Writing raw Float32Array samples directly to file
// For stereo audio, samples are interleaved [L,R,L,R,...]
file.write(Buffer.from(samples.buffer));
}
}
const stream = status.app
? status.app.rawInstance.tapAudio(tapAudioSamples)
: ShareableContent.tapGlobalAudio(null, tapAudioSamples);
const recording: Recording = {
id: status.id,
startTime: status.startTime,
app: status.app,
appGroup: status.appGroup,
file,
stream,
};
return recording;
}
export async function getRecording(id: number) {
const recording = recordings.get(id);
if (!recording) {
logger.error(`Recording ${id} not found`);
return;
}
const rawFilePath = String(recording.file.path);
return {
id,
appGroup: recording.appGroup,
app: recording.app,
startTime: recording.startTime,
filepath: rawFilePath,
sampleRate: recording.stream.sampleRate,
numberOfChannels: recording.stream.channels,
};
}
// recording popup status
// new: recording is started, popup is shown
// recording: recording is started, popup is shown
// stopped: recording is stopped, popup showing processing status
// ready: recording is ready, show "open app" button
// null: hide popup
function setupRecordingListeners() {
subscribers.push(
recordingStatus$
.pipe(distinctUntilChanged(shallowEqual))
.subscribe(status => {
const popup = popupManager.get('recording');
if (status && !popup.showing) {
popup.show().catch(err => {
logger.error('failed to show recording popup', err);
});
}
if (status?.status === 'recording') {
let recording = recordings.get(status.id);
// create a recording if not exists
if (!recording) {
recording = createRecording(status);
recordings.set(status.id, recording);
}
} else if (status?.status === 'stopped') {
const recording = recordings.get(status.id);
if (recording) {
recording.stream.stop();
}
} else if (status?.status === 'ready') {
// show the popup for 10s
setTimeout(() => {
// check again if current status is still ready
if (
recordingStatus$.value?.status === 'ready' &&
recordingStatus$.value.id === status.id
) {
popup.hide().catch(err => {
logger.error('failed to hide recording popup', err);
});
}
}, 10_000);
} else if (!status) {
// status is removed, we should hide the popup
popupManager
.get('recording')
.hide()
.catch(err => {
logger.error('failed to hide recording popup', err);
});
}
})
);
}
function getAllApps(): TappableAppInfo[] {
if (!shareableContent) {
return [];
}
const apps = shareableContent.applications().map(app => {
try {
return {
rawInstance: app,
processId: app.processId,
processGroupId: app.processGroupId,
bundleIdentifier: app.bundleIdentifier,
name: app.name,
isRunning: app.isRunning,
};
} catch (error) {
logger.error('failed to get app info', error);
return null;
}
});
const filteredApps = apps.filter(
(v): v is TappableAppInfo =>
v !== null &&
!v.bundleIdentifier.startsWith('com.apple') &&
v.processId !== process.pid
);
return filteredApps;
}
type Subscriber = {
unsubscribe: () => void;
};
function setupMediaListeners() {
applications$.next(getAllApps());
subscribers.push(
interval(3000).subscribe(() => {
updateApplicationsPing$.next(Date.now());
}),
ShareableContent.onApplicationListChanged(() => {
updateApplicationsPing$.next(Date.now());
}),
updateApplicationsPing$
.pipe(distinctUntilChanged(), throttleTime(3000))
.subscribe(() => {
applications$.next(getAllApps());
})
);
let appStateSubscribers: Subscriber[] = [];
subscribers.push(
applications$.subscribe(apps => {
appStateSubscribers.forEach(subscriber => {
try {
subscriber.unsubscribe();
} catch {
// ignore unsubscribe error
}
});
const _appStateSubscribers: Subscriber[] = [];
apps.forEach(app => {
try {
const tappableApp = app.rawInstance;
_appStateSubscribers.push(
ShareableContent.onAppStateChanged(tappableApp, () => {
updateApplicationsPing$.next(Date.now());
})
);
} catch (error) {
logger.error(
`Failed to convert app ${app.name} to TappableApplication`,
error
);
}
});
appStateSubscribers = _appStateSubscribers;
return () => {
_appStateSubscribers.forEach(subscriber => {
try {
subscriber.unsubscribe();
} catch {
// ignore unsubscribe error
}
});
};
})
);
}
export function setupRecording() {
if (!isMacOS()) {
return;
}
if (!shareableContent) {
try {
shareableContent = new ShareableContent();
setupMediaListeners();
} catch (error) {
logger.error('failed to get shareable content', error);
}
}
setupAppGroups();
setupNewRunningAppGroup();
setupRecordingListeners();
}
function normalizeAppGroupInfo(
appGroup?: AppGroupInfo | number
): AppGroupInfo | undefined {
return typeof appGroup === 'number'
? appGroups$.value.find(group => group.processGroupId === appGroup)
: appGroup;
}
export function newRecording(
appGroup?: AppGroupInfo | number
): RecordingStatus | null {
if (!shareableContent) {
return null; // likely called on unsupported platform
}
return recordingStateMachine.dispatch({
type: 'NEW_RECORDING',
appGroup: normalizeAppGroupInfo(appGroup),
});
}
export function startRecording(
appGroup?: AppGroupInfo | number
): RecordingStatus | null {
return recordingStateMachine.dispatch({
type: 'START_RECORDING',
appGroup: normalizeAppGroupInfo(appGroup),
});
}
export function pauseRecording(id: number) {
return recordingStateMachine.dispatch({ type: 'PAUSE_RECORDING', id });
}
export function resumeRecording(id: number) {
return recordingStateMachine.dispatch({ type: 'RESUME_RECORDING', id });
}
export async function stopRecording(id: number) {
const recording = recordings.get(id);
if (!recording) {
logger.error(`Recording ${id} not found`);
return;
}
if (!recording.file.path) {
logger.error(`Recording ${id} has no file path`);
return;
}
const recordingStatus = recordingStateMachine.dispatch({
type: 'STOP_RECORDING',
id,
filepath: String(recording.file.path),
sampleRate: recording.stream.sampleRate,
numberOfChannels: recording.stream.channels,
});
if (!recordingStatus) {
logger.error('No recording status to stop');
return;
}
const { file } = recording;
file.end();
// Wait for file to finish writing
await new Promise<void>(resolve => {
file.on('finish', () => {
resolve();
});
});
return serializeRecordingStatus(recordingStatus);
}
export async function readyRecording(id: number, buffer: Buffer) {
const recordingStatus = recordingStatus$.value;
const recording = recordings.get(id);
if (!recordingStatus || recordingStatus.id !== id || !recording) {
logger.error(`Recording ${id} not found`);
return;
}
const filepath = path.join(
SAVED_RECORDINGS_DIR,
`${recordingStatus.appGroup?.bundleIdentifier ?? 'unknown'}-${recordingStatus.id}-${recordingStatus.startTime}.webm`
);
await fs.writeFile(filepath, buffer);
// Update the status through the state machine
recordingStateMachine.dispatch({
type: 'SAVE_RECORDING',
id,
filepath,
});
// bring up the window
getMainWindow()
.then(mainWindow => {
if (mainWindow) {
mainWindow.show();
}
})
.catch(err => {
logger.error('failed to bring up the window', err);
});
}
function removeRecording(id: number) {
recordings.delete(id);
recordingStateMachine.dispatch({ type: 'REMOVE_RECORDING', id });
}
export interface SerializedRecordingStatus {
id: number;
status: RecordingStatus['status'];
appName?: string;
// if there is no app group, it means the recording is for system audio
appGroupId?: number;
icon?: Buffer;
startTime: number;
filepath?: string;
sampleRate?: number;
numberOfChannels?: number;
}
function serializeRecordingStatus(
status: RecordingStatus
): SerializedRecordingStatus {
return {
id: status.id,
status: status.status,
appName: status.appGroup?.name,
appGroupId: status.appGroup?.processGroupId,
icon: status.appGroup?.icon,
startTime: status.startTime,
filepath: status.filepath,
sampleRate: status.sampleRate,
numberOfChannels: status.numberOfChannels,
};
}
export const recordingHandlers = { export const recordingHandlers = {
getRecording: async (_, id: number) => { getRecording: async (_, id: number) => {
@@ -565,9 +51,47 @@ export const recordingHandlers = {
readyRecording: async (_, id: number, buffer: Uint8Array) => { readyRecording: async (_, id: number, buffer: Uint8Array) => {
return readyRecording(id, Buffer.from(buffer)); return readyRecording(id, Buffer.from(buffer));
}, },
handleBlockCreationSuccess: async (_, id: number) => {
return handleBlockCreationSuccess(id);
},
handleBlockCreationFailed: async (_, id: number, error?: Error) => {
return handleBlockCreationFailed(id, error);
},
removeRecording: async (_, id: number) => { removeRecording: async (_, id: number) => {
return removeRecording(id); return removeRecording(id);
}, },
checkRecordingAvailable: async () => {
return checkRecordingAvailable();
},
setupRecordingFeature: async () => {
return setupRecordingFeature();
},
disableRecordingFeature: async () => {
return disableRecordingFeature();
},
checkScreenRecordingPermission: async () => {
return checkScreenRecordingPermission();
},
showScreenRecordingPermissionSetting: async () => {
if (isMacOS()) {
return shell.openExternal(
'x-apple.systempreferences:com.apple.preference.security?Privacy_ScreenCapture'
);
}
// this only available on MacOS
return false;
},
showSavedRecordings: async (_, subpath?: string) => {
const normalizedDir = path.normalize(
path.join(SAVED_RECORDINGS_DIR, subpath ?? '')
);
const normalizedBase = path.normalize(SAVED_RECORDINGS_DIR);
if (!normalizedDir.startsWith(normalizedBase)) {
throw new Error('Invalid directory');
}
return shell.showItemInFolder(normalizedDir);
},
} satisfies NamespaceHandlers; } satisfies NamespaceHandlers;
export const recordingEvents = { export const recordingEvents = {

View File

@@ -4,17 +4,6 @@ import { shallowEqual } from '../../shared/utils';
import { logger } from '../logger'; import { logger } from '../logger';
import type { AppGroupInfo, RecordingStatus } from './types'; import type { AppGroupInfo, RecordingStatus } from './types';
/**
* Possible states for a recording
*/
export type RecordingState =
| 'new'
| 'recording'
| 'paused'
| 'stopped'
| 'ready'
| 'inactive';
/** /**
* Recording state machine events * Recording state machine events
*/ */
@@ -35,6 +24,15 @@ export type RecordingEvent =
id: number; id: number;
filepath: string; filepath: string;
} }
| {
type: 'CREATE_BLOCK_FAILED';
id: number;
error?: Error;
}
| {
type: 'CREATE_BLOCK_SUCCESS';
id: number;
}
| { type: 'REMOVE_RECORDING'; id: number }; | { type: 'REMOVE_RECORDING'; id: number };
/** /**
@@ -93,6 +91,12 @@ export class RecordingStateMachine {
case 'SAVE_RECORDING': case 'SAVE_RECORDING':
newStatus = this.handleSaveRecording(event.id, event.filepath); newStatus = this.handleSaveRecording(event.id, event.filepath);
break; break;
case 'CREATE_BLOCK_SUCCESS':
newStatus = this.handleCreateBlockSuccess(event.id);
break;
case 'CREATE_BLOCK_FAILED':
newStatus = this.handleCreateBlockFailed(event.id, event.error);
break;
case 'REMOVE_RECORDING': case 'REMOVE_RECORDING':
this.handleRemoveRecording(event.id); this.handleRemoveRecording(event.id);
newStatus = currentStatus?.id === event.id ? null : currentStatus; newStatus = currentStatus?.id === event.id ? null : currentStatus;
@@ -255,6 +259,47 @@ export class RecordingStateMachine {
}; };
} }
/**
* Handle the CREATE_BLOCK_SUCCESS event
*/
private handleCreateBlockSuccess(id: number): RecordingStatus | null {
const currentStatus = this.recordingStatus$.value;
if (!currentStatus || currentStatus.id !== id) {
logger.error(`Recording ${id} not found for create-block-success`);
return currentStatus;
}
return {
...currentStatus,
status: 'create-block-success',
};
}
/**
* Handle the CREATE_BLOCK_FAILED event
*/
private handleCreateBlockFailed(
id: number,
error?: Error
): RecordingStatus | null {
const currentStatus = this.recordingStatus$.value;
if (!currentStatus || currentStatus.id !== id) {
logger.error(`Recording ${id} not found for create-block-failed`);
return currentStatus;
}
if (error) {
logger.error(`Recording ${id} create block failed:`, error);
}
return {
...currentStatus,
status: 'create-block-failed',
};
}
/** /**
* Handle the REMOVE_RECORDING event * Handle the REMOVE_RECORDING event
*/ */

View File

@@ -39,7 +39,16 @@ export interface RecordingStatus {
// paused: the recording is paused // paused: the recording is paused
// stopped: the recording is stopped (processing audio file for use in the editor) // stopped: the recording is stopped (processing audio file for use in the editor)
// ready: the recording is ready to be used // ready: the recording is ready to be used
status: 'new' | 'recording' | 'paused' | 'stopped' | 'ready'; // create-block-success: the recording is successfully created as a block
// create-block-failed: creating block failed
status:
| 'new'
| 'recording'
| 'paused'
| 'stopped'
| 'ready'
| 'create-block-success'
| 'create-block-failed';
app?: TappableAppInfo; app?: TappableAppInfo;
appGroup?: AppGroupInfo; appGroup?: AppGroupInfo;
startTime: number; // 0 means not started yet startTime: number; // 0 means not started yet

View File

@@ -53,3 +53,21 @@ export const SpellCheckStateSchema = z.object({
export const SpellCheckStateKey = 'spellCheckState' as const; export const SpellCheckStateKey = 'spellCheckState' as const;
// eslint-disable-next-line no-redeclare // eslint-disable-next-line no-redeclare
export type SpellCheckStateSchema = z.infer<typeof SpellCheckStateSchema>; export type SpellCheckStateSchema = z.infer<typeof SpellCheckStateSchema>;
export const MeetingSettingsKey = 'meetingSettings' as const;
export const MeetingSettingsSchema = z.object({
// global meeting feature control
enabled: z.boolean().default(false),
// when recording is saved, where to create the recording block
recordingSavingMode: z.enum(['new-doc', 'journal-today']).default('new-doc'),
// whether to enable auto transcription for new meeting recordings
autoTranscription: z.boolean().default(true),
// recording reactions to new meeting events
recordingMode: z.enum(['none', 'prompt', 'auto-start']).default('prompt'),
});
// eslint-disable-next-line no-redeclare
export type MeetingSettingsSchema = z.infer<typeof MeetingSettingsSchema>;

View File

@@ -14,11 +14,14 @@ import { beforeAppQuit } from '../cleanup';
import { logger } from '../logger'; import { logger } from '../logger';
import { import {
appGroups$, appGroups$,
checkRecordingAvailable,
checkScreenRecordingPermission,
MeetingsSettingsState,
recordingStatus$, recordingStatus$,
startRecording, startRecording,
stopRecording, stopRecording,
updateApplicationsPing$, updateApplicationsPing$,
} from '../recording'; } from '../recording/feature';
import { getMainWindow } from '../windows-manager'; import { getMainWindow } from '../windows-manager';
import { icons } from './icons'; import { icons } from './icons';
@@ -125,30 +128,37 @@ class TrayState {
}; };
} }
getRecordingMenuProvider(): TrayMenuProvider { getRecordingMenuProvider(): TrayMenuProvider | null {
const appGroups = appGroups$.value;
const runningAppGroups = appGroups.filter(appGroup => appGroup.isRunning);
const recordingStatus = recordingStatus$.value;
if ( if (
!recordingStatus || !checkRecordingAvailable() ||
(recordingStatus?.status !== 'paused' && !checkScreenRecordingPermission() ||
recordingStatus?.status !== 'recording') !MeetingsSettingsState.value.enabled
) { ) {
const appMenuItems = runningAppGroups.map(appGroup => ({ return null;
label: appGroup.name, }
icon: appGroup.icon || undefined,
click: () => { const getConfig = () => {
logger.info( const appGroups = appGroups$.value;
`User action: Start Recording Meeting (${appGroup.name})` const runningAppGroups = appGroups.filter(appGroup => appGroup.isRunning);
);
startRecording(appGroup); const recordingStatus = recordingStatus$.value;
},
})); if (
return { !recordingStatus ||
key: 'recording', (recordingStatus?.status !== 'paused' &&
getConfig: () => [ recordingStatus?.status !== 'recording')
) {
const appMenuItems = runningAppGroups.map(appGroup => ({
label: appGroup.name,
icon: appGroup.icon || undefined,
click: () => {
logger.info(
`User action: Start Recording Meeting (${appGroup.name})`
);
startRecording(appGroup);
},
}));
return [
{ {
label: 'Start Recording Meeting', label: 'Start Recording Meeting',
icon: icons.record, icon: icons.record,
@@ -167,18 +177,22 @@ class TrayState {
], ],
}, },
...appMenuItems, ...appMenuItems,
], {
}; label: `Meetings Settings...`,
} click: async () => {
showMainWindow();
applicationMenuSubjects.openInSettingModal$.next('meetings');
},
},
];
}
const recordingLabel = recordingStatus.appGroup?.name const recordingLabel = recordingStatus.appGroup?.name
? `Recording (${recordingStatus.appGroup?.name})` ? `Recording (${recordingStatus.appGroup?.name})`
: 'Recording'; : 'Recording';
// recording is either started or paused // recording is either started or paused
return { return [
key: 'recording',
getConfig: () => [
{ {
label: recordingLabel, label: recordingLabel,
icon: icons.recording, icon: icons.recording,
@@ -193,7 +207,12 @@ class TrayState {
}); });
}, },
}, },
], ];
};
return {
key: 'recording',
getConfig,
}; };
} }
@@ -214,6 +233,13 @@ class TrayState {
}); });
}, },
}, },
{
label: `About ${app.getName()}`,
click: () => {
showMainWindow();
applicationMenuSubjects.openInSettingModal$.next('about');
},
},
'separator', 'separator',
{ {
label: 'Quit AFFiNE Completely...', label: 'Quit AFFiNE Completely...',
@@ -267,7 +293,7 @@ class TrayState {
const providers = [ const providers = [
this.getPrimaryMenuProvider(), this.getPrimaryMenuProvider(),
isMacOS() ? this.getRecordingMenuProvider() : null, this.getRecordingMenuProvider(),
this.getSecondaryMenuProvider(), this.getSecondaryMenuProvider(),
].filter(p => p !== null); ].filter(p => p !== null);

View File

@@ -46,9 +46,11 @@
"@radix-ui/react-visually-hidden": "^1.1.1", "@radix-ui/react-visually-hidden": "^1.1.1",
"@toeverything/theme": "^1.1.12", "@toeverything/theme": "^1.1.12",
"@vanilla-extract/dynamic": "^2.1.2", "@vanilla-extract/dynamic": "^2.1.2",
"bytes": "^3.1.2",
"check-password-strength": "^3.0.0", "check-password-strength": "^3.0.0",
"clsx": "^2.1.1", "clsx": "^2.1.1",
"dayjs": "^1.11.13", "dayjs": "^1.11.13",
"foxact": "^0.2.45",
"jotai": "^2.10.3", "jotai": "^2.10.3",
"lit": "^3.2.1", "lit": "^3.2.1",
"lodash-es": "^4.17.21", "lodash-es": "^4.17.21",
@@ -77,6 +79,7 @@
"@storybook/react-vite": "^8.4.7", "@storybook/react-vite": "^8.4.7",
"@testing-library/dom": "^10.4.0", "@testing-library/dom": "^10.4.0",
"@testing-library/react": "^16.1.0", "@testing-library/react": "^16.1.0",
"@types/bytes": "^3.1.5",
"@types/react": "^19.0.1", "@types/react": "^19.0.1",
"@types/react-dom": "^19.0.2", "@types/react-dom": "^19.0.2",
"@vanilla-extract/css": "^1.17.0", "@vanilla-extract/css": "^1.17.0",

View File

@@ -29,6 +29,10 @@ export const wrapper = style({
}, },
}, },
}); });
export const wrapperDisabled = style({
opacity: 0.5,
pointerEvents: 'none',
});
globalStyle(`${wrapper} .title`, { globalStyle(`${wrapper} .title`, {
fontSize: cssVar('fontSm'), fontSize: cssVar('fontSm'),
fontWeight: 600, fontWeight: 600,

View File

@@ -1,17 +1,20 @@
import clsx from 'clsx';
import type { PropsWithChildren, ReactNode } from 'react'; import type { PropsWithChildren, ReactNode } from 'react';
import { wrapper } from './share.css'; import { wrapper, wrapperDisabled } from './share.css';
interface SettingWrapperProps { interface SettingWrapperProps {
title?: ReactNode; title?: ReactNode;
disabled?: boolean;
} }
export const SettingWrapper = ({ export const SettingWrapper = ({
title, title,
children, children,
disabled,
}: PropsWithChildren<SettingWrapperProps>) => { }: PropsWithChildren<SettingWrapperProps>) => {
return ( return (
<div className={wrapper}> <div className={clsx(wrapper, disabled && wrapperDisabled)}>
{title ? <div className="title">{title}</div> : null} {title ? <div className="title">{title}</div> : null}
{children} {children}
</div> </div>

View File

@@ -1,6 +1,7 @@
export * from './hooks'; export * from './hooks';
export * from './lit-react'; export * from './lit-react';
export * from './styles'; export * from './styles';
export * from './ui/audio-player';
export * from './ui/avatar'; export * from './ui/avatar';
export * from './ui/button'; export * from './ui/button';
export * from './ui/checkbox'; export * from './ui/checkbox';
@@ -14,9 +15,7 @@ export * from './ui/error-message';
export * from './ui/input'; export * from './ui/input';
export * from './ui/layout'; export * from './ui/layout';
export * from './ui/loading'; export * from './ui/loading';
export * from './ui/lottie/collections-icon'; export * from './ui/lottie';
export * from './ui/lottie/delete-icon';
export * from './ui/lottie/folder-icon';
export * from './ui/masonry'; export * from './ui/masonry';
export * from './ui/menu'; export * from './ui/menu';
export * from './ui/modal'; export * from './ui/modal';

View File

@@ -4,8 +4,9 @@ import React, { createElement, type ReactNode } from 'react';
import { createComponent } from './create-component'; import { createComponent } from './create-component';
export
@customElement('affine-lit-template-wrapper') @customElement('affine-lit-template-wrapper')
export class LitTemplateWrapper extends LitElement { class LitTemplateWrapper extends LitElement {
static override get properties() { static override get properties() {
return { return {
template: { type: Object }, template: { type: Object },

View File

@@ -0,0 +1,331 @@
import type { Meta, StoryObj } from '@storybook/react';
import { useCallback, useEffect, useRef, useState } from 'react';
import { AudioPlayer, MiniAudioPlayer } from './audio-player';
const AudioWrapper = () => {
const [audioFile, setAudioFile] = useState<File | null>(null);
const [waveform, setWaveform] = useState<number[] | null>(null);
const [playbackState, setPlaybackState] = useState<
'idle' | 'playing' | 'paused' | 'stopped'
>('idle');
const [seekTime, setSeekTime] = useState(0);
const [duration, setDuration] = useState(0);
const [loading, setLoading] = useState(false);
const audioRef = useRef<HTMLAudioElement>(null);
const audioUrlRef = useRef<string | null>(null);
// Generate waveform data from audio file
const generateWaveform = async (audioBuffer: AudioBuffer) => {
const channelData = audioBuffer.getChannelData(0);
const samples = 1000;
const blockSize = Math.floor(channelData.length / samples);
const waveformData = [];
for (let i = 0; i < samples; i++) {
const start = i * blockSize;
const end = start + blockSize;
let sum = 0;
for (let j = start; j < end; j++) {
sum += Math.abs(channelData[j]);
}
waveformData.push(sum / blockSize);
}
// Normalize waveform data
const max = Math.max(...waveformData);
return waveformData.map(val => val / max);
};
const handleFileChange = useCallback(async (file: File) => {
setLoading(true);
setAudioFile(file);
setPlaybackState('idle');
setSeekTime(0);
setDuration(0);
setWaveform(null);
// Revoke previous URL if exists
if (audioUrlRef.current) {
URL.revokeObjectURL(audioUrlRef.current);
}
// Create new URL for the audio file
const fileUrl = URL.createObjectURL(file);
audioUrlRef.current = fileUrl;
try {
const arrayBuffer = await file.arrayBuffer();
const audioContext = new AudioContext();
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
const waveformData = await generateWaveform(audioBuffer);
setWaveform(waveformData);
} catch (error) {
console.error('Error processing audio file:', error);
} finally {
setLoading(false);
}
}, []);
// Cleanup object URL when component unmounts
useEffect(() => {
return () => {
if (audioUrlRef.current) {
URL.revokeObjectURL(audioUrlRef.current);
}
};
}, []);
const handleDrop = useCallback(
(e: React.DragEvent) => {
e.preventDefault();
const file = e.dataTransfer.files[0];
if (file && file.type.startsWith('audio/')) {
handleFileChange(file);
}
},
[handleFileChange]
);
const handleFileSelect = useCallback(
(e: React.ChangeEvent<HTMLInputElement>) => {
const file = e.target.files?.[0];
if (file) {
handleFileChange(file);
}
},
[handleFileChange]
);
const handlePlay = useCallback((e: React.MouseEvent) => {
e.stopPropagation();
if (audioRef.current) {
const playPromise = audioRef.current.play();
// Handle play promise to catch any errors
if (playPromise !== undefined) {
playPromise
.then(() => {
setPlaybackState('playing');
})
.catch(error => {
console.error('Error playing audio:', error);
setPlaybackState('paused');
});
}
}
}, []);
const handlePause = useCallback((e: React.MouseEvent) => {
e.stopPropagation();
if (audioRef.current) {
audioRef.current.pause();
setPlaybackState('paused');
}
}, []);
const handleStop = useCallback((e: React.MouseEvent) => {
e.stopPropagation();
if (audioRef.current) {
audioRef.current.pause();
audioRef.current.currentTime = 0;
setPlaybackState('stopped');
setSeekTime(0);
}
}, []);
const handleSeek = useCallback(
(time: number) => {
if (audioRef.current) {
// Ensure time is within valid range
const clampedTime = Math.max(
0,
Math.min(time, audioRef.current.duration)
);
audioRef.current.currentTime = clampedTime;
if (playbackState === 'stopped') {
setPlaybackState('paused');
}
}
},
[playbackState]
);
useEffect(() => {
const audio = audioRef.current;
if (!audio || !audioFile) return;
const updateTime = () => {
setSeekTime(audio.currentTime);
};
const updateDuration = () => {
if (!isNaN(audio.duration) && isFinite(audio.duration)) {
setDuration(audio.duration);
setPlaybackState('paused');
setLoading(false);
}
};
// Handle direct interaction with audio element controls
const handleNativeTimeUpdate = () => {
setSeekTime(audio.currentTime);
};
const handleNativePlay = () => {
setPlaybackState('playing');
};
const handleNativePause = () => {
if (audio.currentTime >= audio.duration - 0.1) {
setPlaybackState('stopped');
setSeekTime(0);
} else {
setPlaybackState('paused');
}
};
const handleEnded = () => {
setPlaybackState('stopped');
setSeekTime(0);
};
const handlePlaying = () => {
setPlaybackState('playing');
};
const handlePaused = () => {
if (audio.currentTime === 0) {
setPlaybackState('stopped');
} else {
setPlaybackState('paused');
}
};
const handleError = () => {
console.error('Audio playback error');
setPlaybackState('stopped');
setLoading(false);
};
const handleWaiting = () => {
setLoading(true);
};
const handleCanPlay = () => {
setLoading(false);
};
// Add all event listeners
audio.addEventListener('timeupdate', updateTime);
audio.addEventListener('seeking', handleNativeTimeUpdate);
audio.addEventListener('seeked', handleNativeTimeUpdate);
audio.addEventListener('play', handleNativePlay);
audio.addEventListener('pause', handleNativePause);
audio.addEventListener('loadedmetadata', updateDuration);
audio.addEventListener('durationchange', updateDuration);
audio.addEventListener('ended', handleEnded);
audio.addEventListener('playing', handlePlaying);
audio.addEventListener('pause', handlePaused);
audio.addEventListener('error', handleError);
audio.addEventListener('waiting', handleWaiting);
audio.addEventListener('canplay', handleCanPlay);
return () => {
// Remove all event listeners
audio.removeEventListener('timeupdate', updateTime);
audio.removeEventListener('seeking', handleNativeTimeUpdate);
audio.removeEventListener('seeked', handleNativeTimeUpdate);
audio.removeEventListener('play', handleNativePlay);
audio.removeEventListener('pause', handleNativePause);
audio.removeEventListener('loadedmetadata', updateDuration);
audio.removeEventListener('durationchange', updateDuration);
audio.removeEventListener('ended', handleEnded);
audio.removeEventListener('playing', handlePlaying);
audio.removeEventListener('pause', handlePaused);
audio.removeEventListener('error', handleError);
audio.removeEventListener('waiting', handleWaiting);
audio.removeEventListener('canplay', handleCanPlay);
};
}, [audioFile]);
return (
<div
style={{
width: '100%',
minHeight: '200px',
border: '2px dashed #ccc',
borderRadius: '8px',
display: 'flex',
flexDirection: 'column',
alignItems: 'center',
justifyContent: 'center',
padding: '20px',
gap: '20px',
}}
onDrop={handleDrop}
onDragOver={e => e.preventDefault()}
>
{!audioFile ? (
<>
<div>Drag & drop an audio file here, or</div>
<input
type="file"
accept="audio/*"
onChange={handleFileSelect}
style={{ maxWidth: '200px' }}
/>
</>
) : (
<>
<audio
ref={audioRef}
src={audioUrlRef.current || ''}
preload="metadata"
controls
style={{ width: '100%', maxWidth: '600px' }}
/>
<MiniAudioPlayer
name={audioFile.name}
size={audioFile.size}
waveform={waveform}
playbackState={playbackState}
seekTime={seekTime}
duration={duration}
loading={loading}
onPlay={handlePlay}
onPause={handlePause}
onStop={handleStop}
onSeek={handleSeek}
/>
<AudioPlayer
name={audioFile.name}
size={audioFile.size}
waveform={waveform}
playbackState={playbackState}
seekTime={seekTime}
duration={duration}
loading={loading}
onPlay={handlePlay}
onPause={handlePause}
onStop={handleStop}
onSeek={handleSeek}
/>
</>
)}
</div>
);
};
const meta: Meta<typeof AudioWrapper> = {
title: 'UI/AudioPlayer',
component: AudioWrapper,
parameters: {
layout: 'centered',
},
};
export default meta;
type Story = StoryObj<typeof AudioWrapper>;
export const Default: Story = {};

View File

@@ -1,4 +1,3 @@
import { IconButton } from '@affine/component';
import { import {
AddThirtySecondIcon, AddThirtySecondIcon,
CloseIcon, CloseIcon,
@@ -9,9 +8,10 @@ import bytes from 'bytes';
import { clamp } from 'lodash-es'; import { clamp } from 'lodash-es';
import { type MouseEventHandler, type ReactNode, useCallback } from 'react'; import { type MouseEventHandler, type ReactNode, useCallback } from 'react';
import { IconButton } from '../button';
import { AnimatedPlayIcon } from '../lottie';
import * as styles from './audio-player.css'; import * as styles from './audio-player.css';
import { AudioWaveform } from './audio-waveform'; import { AudioWaveform } from './audio-waveform';
import { AnimatedPlayIcon } from './lottie/animated-play-icon';
// Format seconds to mm:ss // Format seconds to mm:ss
const formatTime = (seconds: number): string => { const formatTime = (seconds: number): string => {

View File

@@ -0,0 +1,2 @@
export * from './audio-player';
export * from './audio-waveform';

View File

@@ -0,0 +1,62 @@
import type { Meta, StoryFn } from '@storybook/react';
import { useState } from 'react';
import { AnimatedPlayIcon } from './animated-play-icon';
export default {
title: 'UI/Audio Player/Animated Play Icon',
component: AnimatedPlayIcon,
parameters: {
docs: {
description: {
component:
'An animated icon that transitions between play, pause, and loading states.',
},
},
},
} satisfies Meta<typeof AnimatedPlayIcon>;
const Template: StoryFn<typeof AnimatedPlayIcon> = args => (
<AnimatedPlayIcon {...args} />
);
export const Play = Template.bind({});
Play.args = {
state: 'play',
};
export const Pause = Template.bind({});
Pause.args = {
state: 'pause',
};
export const Loading = Template.bind({});
Loading.args = {
state: 'loading',
};
export const WithStateToggle: StoryFn<typeof AnimatedPlayIcon> = () => {
const [state, setState] = useState<'play' | 'pause' | 'loading'>('play');
const cycleState = () => {
setState(current => {
switch (current) {
case 'play':
return 'pause';
case 'pause':
return 'play';
case 'loading':
return 'play';
default:
return 'play';
}
});
};
return (
<div style={{ display: 'flex', alignItems: 'center', gap: '16px' }}>
<AnimatedPlayIcon state={state} />
<button onClick={cycleState}>Toggle State (Current: {state})</button>
</div>
);
};

View File

@@ -0,0 +1,71 @@
import clsx from 'clsx';
import { useDebouncedValue } from 'foxact/use-debounced-value';
import type { LottieRef } from 'lottie-react';
import Lottie from 'lottie-react';
import { useEffect, useRef } from 'react';
import { Loading } from '../loading';
import playandpause from './playandpause.json';
import * as styles from './styles.css';
export interface AnimatedPlayIconProps {
state: 'play' | 'pause' | 'loading';
className?: string;
onClick?: (e: React.MouseEvent) => void;
}
const PlayAndPauseIcon = ({
onClick,
className,
state,
}: {
onClick?: (e: React.MouseEvent) => void;
className?: string;
state: 'play' | 'pause';
}) => {
const lottieRef: LottieRef = useRef(null);
const prevStateRef = useRef(state);
useEffect(() => {
if (!lottieRef.current) return;
const lottie = lottieRef.current;
lottie.setSpeed(2);
// Only animate if state actually changed
if (prevStateRef.current !== state) {
if (state === 'play') {
// Animate from pause to play
lottie.playSegments([120, 160], true);
} else {
// Animate from play to pause
lottie.playSegments([60, 100], true);
}
prevStateRef.current = state;
}
}, [state]);
return (
<Lottie
onClick={onClick}
lottieRef={lottieRef}
className={clsx(styles.root, className)}
animationData={playandpause}
loop={false}
autoplay={false}
/>
);
};
export const AnimatedPlayIcon = ({
state: _state,
className,
onClick,
}: AnimatedPlayIconProps) => {
const state = useDebouncedValue(_state, 25);
if (state === 'loading') {
return <Loading size={40} />;
}
return (
<PlayAndPauseIcon state={state} onClick={onClick} className={className} />
);
};

View File

@@ -0,0 +1,46 @@
import type { Meta, StoryFn } from '@storybook/react';
import { useState } from 'react';
import { AnimatedTranscribeIcon } from './animated-transcribe-icon';
export default {
title: 'UI/Audio Player/Animated Transcribe Icon',
component: AnimatedTranscribeIcon,
parameters: {
docs: {
description: {
component:
'An animated icon that shows transcription state with smooth transitions.',
},
},
},
} satisfies Meta<typeof AnimatedTranscribeIcon>;
const Template: StoryFn<typeof AnimatedTranscribeIcon> = args => (
<AnimatedTranscribeIcon {...args} />
);
export const Idle = Template.bind({});
Idle.args = {
state: 'idle',
};
export const Transcribing = Template.bind({});
Transcribing.args = {
state: 'transcribing',
};
export const WithStateToggle: StoryFn<typeof AnimatedTranscribeIcon> = () => {
const [state, setState] = useState<'idle' | 'transcribing'>('idle');
const toggleState = () => {
setState(current => (current === 'idle' ? 'transcribing' : 'idle'));
};
return (
<div style={{ display: 'flex', alignItems: 'center', gap: '16px' }}>
<AnimatedTranscribeIcon state={state} />
<button onClick={toggleState}>Toggle State (Current: {state})</button>
</div>
);
};

View File

@@ -0,0 +1,5 @@
export * from './animated-play-icon';
export * from './animated-transcribe-icon';
export * from './collections-icon';
export * from './delete-icon';
export * from './folder-icon';

View File

@@ -1,8 +1,8 @@
{ {
"v": "5.12.1", "v": "5.12.1",
"fr": 60, "fr": 60,
"ip": 60, "ip": 0,
"op": 103, "op": 161,
"w": 40, "w": 40,
"h": 40, "h": 40,
"nm": "pause to play", "nm": "pause to play",
@@ -12,8 +12,160 @@
{ {
"ddd": 0, "ddd": 0,
"ind": 1, "ind": 1,
"ty": 3,
"nm": "Void::Icon (Stroke)",
"sr": 1,
"ks": {
"o": { "a": 0, "k": 100, "ix": 11 },
"r": { "a": 0, "k": 0, "ix": 10 },
"p": { "a": 0, "k": [21.125, 20, 0], "ix": 2, "l": 2 },
"a": { "a": 0, "k": [0, 0, 0], "ix": 1, "l": 2 },
"s": { "a": 0, "k": [100, 100, 100], "ix": 6, "l": 2 }
},
"ao": 0,
"ef": [
{
"ty": 5,
"nm": "Void",
"np": 19,
"mn": "Pseudo/250958",
"ix": 1,
"en": 1,
"ef": [
{
"ty": 0,
"nm": "Width",
"mn": "Pseudo/250958-0001",
"ix": 1,
"v": { "a": 0, "k": 100, "ix": 1 }
},
{
"ty": 0,
"nm": "Height",
"mn": "Pseudo/250958-0002",
"ix": 2,
"v": { "a": 0, "k": 100, "ix": 2 }
},
{
"ty": 0,
"nm": "Offset X",
"mn": "Pseudo/250958-0003",
"ix": 3,
"v": { "a": 0, "k": 0, "ix": 3 }
},
{
"ty": 0,
"nm": "Offset Y",
"mn": "Pseudo/250958-0004",
"ix": 4,
"v": { "a": 0, "k": 0, "ix": 4 }
},
{
"ty": 0,
"nm": "Roundness",
"mn": "Pseudo/250958-0005",
"ix": 5,
"v": { "a": 0, "k": 0, "ix": 5 }
},
{
"ty": 6,
"nm": "About",
"mn": "Pseudo/250958-0006",
"ix": 6,
"v": 0
},
{
"ty": 6,
"nm": "Plague of null layers.",
"mn": "Pseudo/250958-0007",
"ix": 7,
"v": 0
},
{
"ty": 6,
"nm": "Void",
"mn": "Pseudo/250958-0008",
"ix": 8,
"v": 0
},
{
"ty": 6,
"nm": "Following projects",
"mn": "Pseudo/250958-0009",
"ix": 9,
"v": 0
},
{
"ty": 6,
"nm": "Void",
"mn": "Pseudo/250958-0010",
"ix": 10,
"v": 0
},
{
"ty": 6,
"nm": "through time.",
"mn": "Pseudo/250958-0011",
"ix": 11,
"v": 0
},
{
"ty": 6,
"nm": "Void",
"mn": "Pseudo/250958-0012",
"ix": 12,
"v": 0
},
{
"ty": 6,
"nm": "Be free of the past.",
"mn": "Pseudo/250958-0013",
"ix": 13,
"v": 0
},
{
"ty": 6,
"nm": "Void",
"mn": "Pseudo/250958-0014",
"ix": 14,
"v": 0
},
{
"ty": 6,
"nm": "Copyright 2023 Battle Axe Inc",
"mn": "Pseudo/250958-0015",
"ix": 15,
"v": 0
},
{
"ty": 6,
"nm": "Void",
"mn": "Pseudo/250958-0016",
"ix": 16,
"v": 0
},
{
"ty": 6,
"nm": "Void",
"mn": "Pseudo/250958-0017",
"ix": 17,
"v": 0
}
]
}
],
"ip": 0,
"op": 5400,
"st": 0,
"ct": 1,
"bm": 0
},
{
"ddd": 0,
"ind": 2,
"ty": 4, "ty": 4,
"nm": "Icon (Stroke)", "nm": "Icon (Stroke)",
"parent": 1,
"sr": 1, "sr": 1,
"ks": { "ks": {
"o": { "o": {
@@ -42,7 +194,7 @@
"ix": 11 "ix": 11
}, },
"r": { "a": 0, "k": 0, "ix": 10 }, "r": { "a": 0, "k": 0, "ix": 10 },
"p": { "a": 0, "k": [20, 20, 0], "ix": 2, "l": 2 }, "p": { "a": 0, "k": [0, 0, 0], "ix": 2, "l": 2 },
"a": { "a": 0, "k": [0, 0, 0], "ix": 1, "l": 2 }, "a": { "a": 0, "k": [0, 0, 0], "ix": 1, "l": 2 },
"s": { "s": {
"a": 1, "a": 1,
@@ -54,7 +206,7 @@
"s": [100, 100, 100] "s": [100, 100, 100]
}, },
{ {
"i": { "x": [0.833, 0.833, 0.833], "y": [0.833, 0.833, 0.02] }, "i": { "x": [0.833, 0.833, 0.833], "y": [0.833, 0.833, 1] },
"o": { "x": [0.26, 0.26, 0.26], "y": [0, 0, 0] }, "o": { "x": [0.26, 0.26, 0.26], "y": [0, 0, 0] },
"t": 90, "t": 90,
"s": [32, 32, 100] "s": [32, 32, 100]
@@ -67,11 +219,11 @@
}, },
{ {
"i": { "x": [0.6, 0.6, 0.6], "y": [1, 1, 1] }, "i": { "x": [0.6, 0.6, 0.6], "y": [1, 1, 1] },
"o": { "x": [0.32, 0.32, 0.32], "y": [0.94, 0.94, 0] }, "o": { "x": [0.32, 0.32, 0.32], "y": [0.999, 0.999, 0] },
"t": 143, "t": 143,
"s": [115, 115, 100] "s": [115, 115, 100]
}, },
{ "t": 159, "s": [100, 100, 100] } { "t": 160, "s": [100, 100, 100] }
], ],
"ix": 6, "ix": 6,
"l": 2 "l": 2
@@ -200,7 +352,7 @@
}, },
{ {
"ddd": 0, "ddd": 0,
"ind": 2, "ind": 3,
"ty": 4, "ty": 4,
"nm": "Union", "nm": "Union",
"sr": 1, "sr": 1,
@@ -244,14 +396,14 @@
}, },
{ {
"i": { "x": [0.6, 0.6, 0.6], "y": [1, 1, 1] }, "i": { "x": [0.6, 0.6, 0.6], "y": [1, 1, 1] },
"o": { "x": [0.32, 0.32, 0.32], "y": [0.94, 0.94, 0] }, "o": { "x": [0.32, 0.32, 0.32], "y": [0.999, 0.999, 0] },
"t": 83, "t": 83,
"s": [115, 115, 100] "s": [115, 115, 100]
}, },
{ {
"i": { "x": [0.833, 0.833, 0.833], "y": [0.833, 0.833, 0.833] }, "i": { "x": [0.833, 0.833, 0.833], "y": [0.833, 0.833, 0.833] },
"o": { "x": [0.167, 0.167, 0.167], "y": [0.167, 0.167, 0.167] }, "o": { "x": [0.167, 0.167, 0.167], "y": [0.167, 0.167, 0.167] },
"t": 99, "t": 100,
"s": [100, 100, 100] "s": [100, 100, 100]
}, },
{ {
@@ -406,9 +558,9 @@
}, },
{ {
"ddd": 0, "ddd": 0,
"ind": 3, "ind": 4,
"ty": 4, "ty": 4,
"nm": "形状图层 3", "nm": "wave",
"sr": 1, "sr": 1,
"ks": { "ks": {
"o": { "o": {
@@ -516,9 +668,9 @@
}, },
{ {
"ddd": 0, "ddd": 0,
"ind": 4, "ind": 5,
"ty": 4, "ty": 4,
"nm": "形状图层 2", "nm": "wave",
"sr": 1, "sr": 1,
"ks": { "ks": {
"o": { "o": {
@@ -626,9 +778,9 @@
}, },
{ {
"ddd": 0, "ddd": 0,
"ind": 5, "ind": 6,
"ty": 4, "ty": 4,
"nm": "形状图层 1", "nm": "circle",
"sr": 1, "sr": 1,
"ks": { "ks": {
"o": { "a": 0, "k": 100, "ix": 11 }, "o": { "a": 0, "k": 100, "ix": 11 },

View File

@@ -1,15 +1,61 @@
import { cssVarV2 } from '@toeverything/theme/v2';
import { globalStyle, style } from '@vanilla-extract/css'; import { globalStyle, style } from '@vanilla-extract/css';
export const root = style({ export const root = style({
width: '1em', display: 'inline-flex',
height: '1em',
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
});
const magicColor = `rgb(119,117,125)`;
globalStyle(`${root} path[stroke="${magicColor}"]`, {
stroke: 'currentColor',
});
globalStyle(`${root} path[fill="${magicColor}"]`, {
fill: 'currentColor',
}); });
// replace primary colors to cssVarV2('icon/primary')
const iconPrimaryColors = [
// legacy "--affine-icon-color"
'rgb(119,117,125)',
// --affine-v2-icon-primary
'rgb(122,122,122)',
];
// todo: may need to replace secondary colors & background colors as well?
const backgroundPrimaryColors = [
// --affine-v2-background-primary
'rgb(255,255,255)',
'#ffffff',
];
const backgroundSecondaryColors = [
// --affine-v2-background-secondary
'rgb(245,245,245)',
];
globalStyle(
`${root} :is(${iconPrimaryColors.map(color => `path[fill="${color}"]`).join(',')})`,
{
fill: cssVarV2('icon/primary'),
}
);
globalStyle(
`${root} :is(${iconPrimaryColors.map(color => `path[stroke="${color}"]`).join(',')})`,
{
stroke: cssVarV2('icon/primary'),
}
);
globalStyle(
`${root} :is(${backgroundPrimaryColors.map(color => `rect[fill="${color}"]`).join(',')})`,
{
fill: 'transparent',
}
);
globalStyle(
`${root} :is(${backgroundPrimaryColors.map(color => `path[fill="${color}"]`).join(',')})`,
{
fill: 'transparent',
}
);
globalStyle(
`${root} :is(${backgroundSecondaryColors.map(color => `path[fill="${color}"]`).join(',')})`,
{
fill: cssVarV2('layer/background/secondary'),
}
);

View File

@@ -76,6 +76,7 @@
"socket.io-client": "^4.8.1", "socket.io-client": "^4.8.1",
"swr": "2.3.3", "swr": "2.3.3",
"tinykeys": "patch:tinykeys@npm%3A2.1.0#~/.yarn/patches/tinykeys-npm-2.1.0-819feeaed0.patch", "tinykeys": "patch:tinykeys@npm%3A2.1.0#~/.yarn/patches/tinykeys-npm-2.1.0-819feeaed0.patch",
"webm-muxer": "^5.1.0",
"y-protocols": "^1.0.6", "y-protocols": "^1.0.6",
"yjs": "^13.6.21", "yjs": "^13.6.21",
"zod": "^3.24.1" "zod": "^3.24.1"

View File

@@ -13,8 +13,21 @@ export class LitTranscriptionBlock extends BlockComponent<TranscriptionBlockMode
} }
`, `,
]; ];
get lastCalloutBlock() {
for (const child of this.model.children.toReversed()) {
if (child.flavour === 'affine:callout') {
return child;
}
}
return null;
}
override render() { override render() {
return this.std.host.renderChildren(this.model); return this.std.host.renderChildren(this.model, model => {
// if model is the last transcription block, we should render it
return model === this.lastCalloutBlock;
});
} }
@property({ type: String, attribute: 'data-block-id' }) @property({ type: String, attribute: 'data-block-id' })

View File

@@ -1,17 +1,22 @@
import { Button, Tooltip, useConfirmModal } from '@affine/component'; import {
import { AudioPlayer } from '@affine/core/components/audio-player'; AnimatedTranscribeIcon,
import { AnimatedTranscribeIcon } from '@affine/core/components/audio-player/lottie/animated-transcribe-icon'; Button,
import { useSeekTime } from '@affine/core/components/audio-player/use-seek-time'; Tooltip,
useConfirmModal,
} from '@affine/component';
import { AudioPlayer } from '@affine/component/ui/audio-player';
import { useEnableAI } from '@affine/core/components/hooks/affine/use-enable-ai'; import { useEnableAI } from '@affine/core/components/hooks/affine/use-enable-ai';
import { useAsyncCallback } from '@affine/core/components/hooks/affine-async-hooks'; import { useAsyncCallback } from '@affine/core/components/hooks/affine-async-hooks';
import { useSeekTime } from '@affine/core/components/hooks/use-seek-time';
import { CurrentServerScopeProvider } from '@affine/core/components/providers/current-server-scope'; import { CurrentServerScopeProvider } from '@affine/core/components/providers/current-server-scope';
import { PublicUserLabel } from '@affine/core/modules/cloud/views/public-user'; import { PublicUserLabel } from '@affine/core/modules/cloud/views/public-user';
import { GlobalDialogService } from '@affine/core/modules/dialogs'; import { GlobalDialogService } from '@affine/core/modules/dialogs';
import type { AudioAttachmentBlock } from '@affine/core/modules/media/entities/audio-attachment-block'; import type { AudioAttachmentBlock } from '@affine/core/modules/media/entities/audio-attachment-block';
import { useAttachmentMediaBlock } from '@affine/core/modules/media/views/use-attachment-media'; import { AudioAttachmentService } from '@affine/core/modules/media/services/audio-attachment';
import { Trans, useI18n } from '@affine/i18n'; import { Trans, useI18n } from '@affine/i18n';
import type { AttachmentBlockModel } from '@blocksuite/affine/model';
import { useLiveData, useService } from '@toeverything/infra'; import { useLiveData, useService } from '@toeverything/infra';
import { useCallback, useMemo, useState } from 'react'; import { useCallback, useEffect, useMemo, useState } from 'react';
import type { AttachmentViewerProps } from '../types'; import type { AttachmentViewerProps } from '../types';
import * as styles from './audio-block.css'; import * as styles from './audio-block.css';
@@ -177,6 +182,31 @@ const AttachmentAudioPlayer = ({ block }: { block: AudioAttachmentBlock }) => {
); );
}; };
const useAttachmentMediaBlock = (model: AttachmentBlockModel) => {
const audioAttachmentService = useService(AudioAttachmentService);
const [audioAttachmentBlock, setAttachmentMedia] = useState<
AudioAttachmentBlock | undefined
>(undefined);
useEffect(() => {
if (!model.props.sourceId) {
return;
}
const entity = audioAttachmentService.get(model);
if (!entity) {
return;
}
const audioAttachmentBlock = entity.obj;
setAttachmentMedia(audioAttachmentBlock);
audioAttachmentBlock.mount();
return () => {
audioAttachmentBlock.unmount();
entity.release();
};
}, [audioAttachmentService, model]);
return audioAttachmentBlock;
};
export const AudioBlockEmbedded = (props: AttachmentViewerProps) => { export const AudioBlockEmbedded = (props: AttachmentViewerProps) => {
const audioAttachmentBlock = useAttachmentMediaBlock(props.model); const audioAttachmentBlock = useAttachmentMediaBlock(props.model);
const transcriptionBlock = useLiveData( const transcriptionBlock = useLiveData(

View File

@@ -1 +0,0 @@
export * from './audio-player';

View File

@@ -1,60 +0,0 @@
import { Loading } from '@affine/component';
import clsx from 'clsx';
import type { LottieRef } from 'lottie-react';
import Lottie from 'lottie-react';
import { useEffect, useRef } from 'react';
import pausetoplay from './pausetoplay.json';
import playtopause from './playtopause.json';
import * as styles from './styles.css';
export interface AnimatedPlayIconProps {
state: 'play' | 'pause' | 'loading';
className?: string;
onClick?: (e: React.MouseEvent) => void;
}
const buildAnimatedLottieIcon = (data: Record<string, unknown>) => {
const Component = ({
onClick,
className,
}: {
onClick?: (e: React.MouseEvent) => void;
className?: string;
}) => {
const lottieRef: LottieRef = useRef(null);
useEffect(() => {
if (lottieRef.current) {
const lottie = lottieRef.current;
lottie.setSpeed(2);
lottie.play();
}
}, []);
return (
<Lottie
onClick={onClick}
lottieRef={lottieRef}
className={clsx(styles.root, className)}
animationData={data}
loop={false}
autoplay={false}
/>
);
};
return Component;
};
const PlayIcon = buildAnimatedLottieIcon(playtopause);
const PauseIcon = buildAnimatedLottieIcon(pausetoplay);
export const AnimatedPlayIcon = ({
state,
className,
onClick,
}: AnimatedPlayIconProps) => {
if (state === 'loading') {
return <Loading size={40} />;
}
const Icon = state === 'play' ? PlayIcon : PauseIcon;
return <Icon onClick={onClick} className={className} />;
};

View File

@@ -1,715 +0,0 @@
{
"v": "5.12.1",
"fr": 60,
"ip": 120,
"op": 159,
"w": 40,
"h": 40,
"nm": "playtopause",
"ddd": 0,
"assets": [],
"layers": [
{
"ddd": 0,
"ind": 1,
"ty": 4,
"nm": "Icon (Stroke)",
"sr": 1,
"ks": {
"o": {
"a": 1,
"k": [
{
"i": { "x": [0.48], "y": [1] },
"o": { "x": [0.26], "y": [1] },
"t": 60,
"s": [100]
},
{
"i": { "x": [0.833], "y": [1] },
"o": { "x": [0.26], "y": [0] },
"t": 90,
"s": [0]
},
{
"i": { "x": [0.833], "y": [1] },
"o": { "x": [0.167], "y": [0] },
"t": 120,
"s": [0]
},
{ "t": 142, "s": [100] }
],
"ix": 11
},
"r": { "a": 0, "k": 0, "ix": 10 },
"p": { "a": 0, "k": [20, 20, 0], "ix": 2, "l": 2 },
"a": { "a": 0, "k": [0, 0, 0], "ix": 1, "l": 2 },
"s": {
"a": 1,
"k": [
{
"i": { "x": [0.48, 0.48, 0.48], "y": [1, 1, 1] },
"o": { "x": [0.26, 0.26, 0.26], "y": [1, 1, 0] },
"t": 60,
"s": [100, 100, 100]
},
{
"i": { "x": [0.833, 0.833, 0.833], "y": [0.833, 0.833, 0.02] },
"o": { "x": [0.26, 0.26, 0.26], "y": [0, 0, 0] },
"t": 90,
"s": [32, 32, 100]
},
{
"i": { "x": [0.64, 0.64, 0.64], "y": [1, 1, 1] },
"o": { "x": [0.33, 0.33, 0.33], "y": [0.52, 0.52, 0] },
"t": 120,
"s": [43, 43, 100]
},
{
"i": { "x": [0.6, 0.6, 0.6], "y": [1, 1, 1] },
"o": { "x": [0.32, 0.32, 0.32], "y": [0.94, 0.94, 0] },
"t": 143,
"s": [115, 115, 100]
},
{ "t": 159, "s": [100, 100, 100] }
],
"ix": 6,
"l": 2
}
},
"ao": 0,
"shapes": [
{
"ty": "gr",
"it": [
{
"ind": 0,
"ty": "sh",
"ix": 1,
"ks": {
"a": 0,
"k": {
"i": [
[0.782, 0.44],
[0, 0],
[0.504, 0.225],
[0.525, -0.059],
[0.453, -0.629],
[0.051, -0.554],
[0, -0.867],
[0, 0],
[-0.05, -0.55],
[-0.309, -0.428],
[-0.77, -0.087],
[-0.508, 0.227],
[-0.756, 0.425],
[0, 0],
[-0.468, 0.323],
[-0.221, 0.491],
[0.324, 0.718],
[0.47, 0.324]
],
"o": [
[0, 0],
[-0.756, -0.425],
[-0.508, -0.227],
[-0.77, 0.087],
[-0.309, 0.428],
[-0.05, 0.55],
[0, 0],
[0, 0.867],
[0.051, 0.554],
[0.453, 0.629],
[0.525, 0.059],
[0.504, -0.225],
[0, 0],
[0.782, -0.44],
[0.47, -0.324],
[0.324, -0.718],
[-0.221, -0.491],
[-0.468, -0.323]
],
"v": [
[4.482, -3.424],
[-1.857, -6.99],
[-3.729, -7.985],
[-5.269, -8.313],
[-7.19, -7.189],
[-7.66, -5.686],
[-7.71, -3.566],
[-7.71, 3.566],
[-7.66, 5.686],
[-7.19, 7.189],
[-5.269, 8.313],
[-3.729, 7.985],
[-1.857, 6.99],
[4.482, 3.424],
[6.365, 2.305],
[7.467, 1.13],
[7.467, -1.13],
[6.365, -2.305]
],
"c": true
},
"ix": 2
},
"nm": "路径 1",
"mn": "ADBE Vector Shape - Group",
"hd": false
},
{
"ty": "fl",
"c": {
"a": 0,
"k": [0.478431373835, 0.478431373835, 0.478431373835, 1],
"ix": 4
},
"o": { "a": 0, "k": 100, "ix": 5 },
"r": 1,
"bm": 0,
"nm": "填充 1",
"mn": "ADBE Vector Graphic - Fill",
"hd": false
},
{
"ty": "tr",
"p": { "a": 0, "k": [0, 0], "ix": 2 },
"a": { "a": 0, "k": [0, 0], "ix": 1 },
"s": { "a": 0, "k": [100, 100], "ix": 3 },
"r": { "a": 0, "k": 0, "ix": 6 },
"o": { "a": 0, "k": 100, "ix": 7 },
"sk": { "a": 0, "k": 0, "ix": 4 },
"sa": { "a": 0, "k": 0, "ix": 5 },
"nm": "变换"
}
],
"nm": "Icon (Stroke)",
"np": 2,
"cix": 2,
"bm": 0,
"ix": 1,
"mn": "ADBE Vector Group",
"hd": false
}
],
"ip": 0,
"op": 5400,
"st": 0,
"ct": 1,
"bm": 0
},
{
"ddd": 0,
"ind": 2,
"ty": 4,
"nm": "Union",
"sr": 1,
"ks": {
"o": {
"a": 1,
"k": [
{
"i": { "x": [0.833], "y": [0.833] },
"o": { "x": [0.167], "y": [0.167] },
"t": 60,
"s": [0]
},
{
"i": { "x": [0.833], "y": [0.833] },
"o": { "x": [0.167], "y": [0.167] },
"t": 83,
"s": [100]
},
{
"i": { "x": [0.6], "y": [1] },
"o": { "x": [0.32], "y": [0.94] },
"t": 120,
"s": [100]
},
{ "t": 150, "s": [0] }
],
"ix": 11
},
"r": { "a": 0, "k": 0, "ix": 10 },
"p": { "a": 0, "k": [20, 20, 0], "ix": 2, "l": 2 },
"a": { "a": 0, "k": [0, 0, 0], "ix": 1, "l": 2 },
"s": {
"a": 1,
"k": [
{
"i": { "x": [0.64, 0.64, 0.64], "y": [1, 1, 1] },
"o": { "x": [0.33, 0.33, 0.33], "y": [0.52, 0.52, 0] },
"t": 60,
"s": [43, 43, 100]
},
{
"i": { "x": [0.6, 0.6, 0.6], "y": [1, 1, 1] },
"o": { "x": [0.32, 0.32, 0.32], "y": [0.94, 0.94, 0] },
"t": 83,
"s": [115, 115, 100]
},
{
"i": { "x": [0.833, 0.833, 0.833], "y": [0.833, 0.833, 0.833] },
"o": { "x": [0.167, 0.167, 0.167], "y": [0.167, 0.167, 0.167] },
"t": 99,
"s": [100, 100, 100]
},
{
"i": { "x": [0.6, 0.6, 0.6], "y": [1, 1, 1] },
"o": { "x": [0.32, 0.32, 0.32], "y": [0.94, 0.94, 0] },
"t": 120,
"s": [100, 100, 100]
},
{ "t": 150, "s": [39, 39, 100] }
],
"ix": 6,
"l": 2
}
},
"ao": 0,
"shapes": [
{
"ty": "gr",
"it": [
{
"ind": 0,
"ty": "sh",
"ix": 1,
"ks": {
"a": 0,
"k": {
"i": [
[0, 0],
[0.849, 0],
[0, -0.849],
[0, 0],
[-0.849, 0],
[0, 0.849]
],
"o": [
[0, -0.849],
[-0.849, 0],
[0, 0],
[0, 0.849],
[0.849, 0],
[0, 0]
],
"v": [
[-2.563, -6.152],
[-4.101, -7.69],
[-5.639, -6.152],
[-5.639, 6.152],
[-4.101, 7.69],
[-2.563, 6.152]
],
"c": true
},
"ix": 2
},
"nm": "路径 1",
"mn": "ADBE Vector Shape - Group",
"hd": false
},
{
"ty": "mm",
"mm": 5,
"nm": "合并路径 1",
"mn": "ADBE Vector Filter - Merge",
"hd": false
},
{
"ind": 2,
"ty": "sh",
"ix": 3,
"ks": {
"a": 0,
"k": {
"i": [
[-0.849, 0],
[0, -0.849],
[0, 0],
[0.849, 0],
[0, 0.849],
[0, 0]
],
"o": [
[0.849, 0],
[0, 0],
[0, 0.849],
[-0.849, 0],
[0, 0],
[0, -0.849]
],
"v": [
[4.101, -7.69],
[5.639, -6.152],
[5.639, 6.152],
[4.101, 7.69],
[2.563, 6.152],
[2.563, -6.152]
],
"c": true
},
"ix": 2
},
"nm": "路径 2",
"mn": "ADBE Vector Shape - Group",
"hd": false
},
{
"ty": "mm",
"mm": 5,
"nm": "合并路径 2",
"mn": "ADBE Vector Filter - Merge",
"hd": false
},
{
"ty": "fl",
"c": {
"a": 0,
"k": [0.478431373835, 0.478431373835, 0.478431373835, 1],
"ix": 4
},
"o": { "a": 0, "k": 100, "ix": 5 },
"r": 1,
"bm": 0,
"nm": "填充 1",
"mn": "ADBE Vector Graphic - Fill",
"hd": false
},
{
"ty": "tr",
"p": { "a": 0, "k": [0, 0], "ix": 2 },
"a": { "a": 0, "k": [0, 0], "ix": 1 },
"s": { "a": 0, "k": [100, 100], "ix": 3 },
"r": { "a": 0, "k": 0, "ix": 6 },
"o": { "a": 0, "k": 100, "ix": 7 },
"sk": { "a": 0, "k": 0, "ix": 4 },
"sa": { "a": 0, "k": 0, "ix": 5 },
"nm": "变换"
}
],
"nm": "Union",
"np": 5,
"cix": 2,
"bm": 0,
"ix": 1,
"mn": "ADBE Vector Group",
"hd": false
}
],
"ip": 0,
"op": 5400,
"st": 0,
"ct": 1,
"bm": 0
},
{
"ddd": 0,
"ind": 3,
"ty": 4,
"nm": "形状图层 3",
"sr": 1,
"ks": {
"o": {
"a": 1,
"k": [
{
"i": { "x": [0.833], "y": [0.833] },
"o": { "x": [0.167], "y": [0.167] },
"t": 133,
"s": [100]
},
{ "t": 145, "s": [0] }
],
"ix": 11
},
"r": { "a": 0, "k": 0, "ix": 10 },
"p": { "a": 0, "k": [20.52, 20.457, 0], "ix": 2, "l": 2 },
"a": { "a": 0, "k": [0, 0, 0], "ix": 1, "l": 2 },
"s": {
"a": 1,
"k": [
{
"i": { "x": [0.6, 0.6, 0.6], "y": [1, 1, 1] },
"o": { "x": [0.32, 0.32, 0.32], "y": [0.94, 0.94, 0] },
"t": 120,
"s": [12, 12, 100]
},
{ "t": 145, "s": [100, 100, 100] }
],
"ix": 6,
"l": 2
}
},
"ao": 0,
"shapes": [
{
"ty": "gr",
"it": [
{
"d": 1,
"ty": "el",
"s": { "a": 0, "k": [40, 40], "ix": 2 },
"p": { "a": 0, "k": [0, 0], "ix": 3 },
"nm": "椭圆路径 1",
"mn": "ADBE Vector Shape - Ellipse",
"hd": false
},
{
"ty": "st",
"c": {
"a": 0,
"k": [0.960784316063, 0.960784316063, 0.960784316063, 1],
"ix": 3
},
"o": { "a": 0, "k": 100, "ix": 4 },
"w": { "a": 0, "k": 0, "ix": 5 },
"lc": 1,
"lj": 1,
"ml": 4,
"bm": 0,
"nm": "描边 1",
"mn": "ADBE Vector Graphic - Stroke",
"hd": false
},
{
"ty": "fl",
"c": {
"a": 0,
"k": [0.936106004902, 0.936106004902, 0.936106004902, 1],
"ix": 4
},
"o": { "a": 0, "k": 100, "ix": 5 },
"r": 1,
"bm": 0,
"nm": "填充 1",
"mn": "ADBE Vector Graphic - Fill",
"hd": false
},
{
"ty": "tr",
"p": { "a": 0, "k": [-0.52, -0.457], "ix": 2 },
"a": { "a": 0, "k": [0, 0], "ix": 1 },
"s": { "a": 0, "k": [100, 100], "ix": 3 },
"r": { "a": 0, "k": 0, "ix": 6 },
"o": { "a": 0, "k": 100, "ix": 7 },
"sk": { "a": 0, "k": 0, "ix": 4 },
"sa": { "a": 0, "k": 0, "ix": 5 },
"nm": "变换"
}
],
"nm": "椭圆 1",
"np": 3,
"cix": 2,
"bm": 0,
"ix": 1,
"mn": "ADBE Vector Group",
"hd": false
}
],
"ip": 120,
"op": 161,
"st": 60,
"ct": 1,
"bm": 0
},
{
"ddd": 0,
"ind": 4,
"ty": 4,
"nm": "形状图层 2",
"sr": 1,
"ks": {
"o": {
"a": 1,
"k": [
{
"i": { "x": [0.833], "y": [0.833] },
"o": { "x": [0.167], "y": [0.167] },
"t": 73,
"s": [100]
},
{ "t": 85, "s": [0] }
],
"ix": 11
},
"r": { "a": 0, "k": 0, "ix": 10 },
"p": { "a": 0, "k": [20.52, 20.457, 0], "ix": 2, "l": 2 },
"a": { "a": 0, "k": [0, 0, 0], "ix": 1, "l": 2 },
"s": {
"a": 1,
"k": [
{
"i": { "x": [0.6, 0.6, 0.6], "y": [1, 1, 1] },
"o": { "x": [0.32, 0.32, 0.32], "y": [0.94, 0.94, 0] },
"t": 60,
"s": [12, 12, 100]
},
{ "t": 85, "s": [100, 100, 100] }
],
"ix": 6,
"l": 2
}
},
"ao": 0,
"shapes": [
{
"ty": "gr",
"it": [
{
"d": 1,
"ty": "el",
"s": { "a": 0, "k": [40, 40], "ix": 2 },
"p": { "a": 0, "k": [0, 0], "ix": 3 },
"nm": "椭圆路径 1",
"mn": "ADBE Vector Shape - Ellipse",
"hd": false
},
{
"ty": "st",
"c": {
"a": 0,
"k": [0.960784316063, 0.960784316063, 0.960784316063, 1],
"ix": 3
},
"o": { "a": 0, "k": 100, "ix": 4 },
"w": { "a": 0, "k": 0, "ix": 5 },
"lc": 1,
"lj": 1,
"ml": 4,
"bm": 0,
"nm": "描边 1",
"mn": "ADBE Vector Graphic - Stroke",
"hd": false
},
{
"ty": "fl",
"c": {
"a": 0,
"k": [0.936106004902, 0.936106004902, 0.936106004902, 1],
"ix": 4
},
"o": { "a": 0, "k": 100, "ix": 5 },
"r": 1,
"bm": 0,
"nm": "填充 1",
"mn": "ADBE Vector Graphic - Fill",
"hd": false
},
{
"ty": "tr",
"p": { "a": 0, "k": [-0.52, -0.457], "ix": 2 },
"a": { "a": 0, "k": [0, 0], "ix": 1 },
"s": { "a": 0, "k": [100, 100], "ix": 3 },
"r": { "a": 0, "k": 0, "ix": 6 },
"o": { "a": 0, "k": 100, "ix": 7 },
"sk": { "a": 0, "k": 0, "ix": 4 },
"sa": { "a": 0, "k": 0, "ix": 5 },
"nm": "变换"
}
],
"nm": "椭圆 1",
"np": 3,
"cix": 2,
"bm": 0,
"ix": 1,
"mn": "ADBE Vector Group",
"hd": false
}
],
"ip": 0,
"op": 101,
"st": 0,
"ct": 1,
"bm": 0
},
{
"ddd": 0,
"ind": 5,
"ty": 4,
"nm": "形状图层 1",
"sr": 1,
"ks": {
"o": { "a": 0, "k": 100, "ix": 11 },
"r": { "a": 0, "k": 0, "ix": 10 },
"p": { "a": 0, "k": [20.52, 20.457, 0], "ix": 2, "l": 2 },
"a": { "a": 0, "k": [0, 0, 0], "ix": 1, "l": 2 },
"s": { "a": 0, "k": [100, 100, 100], "ix": 6, "l": 2 }
},
"ao": 0,
"shapes": [
{
"ty": "gr",
"it": [
{
"d": 1,
"ty": "el",
"s": { "a": 0, "k": [40, 40], "ix": 2 },
"p": { "a": 0, "k": [0, 0], "ix": 3 },
"nm": "椭圆路径 1",
"mn": "ADBE Vector Shape - Ellipse",
"hd": false
},
{
"ty": "st",
"c": {
"a": 0,
"k": [0.960784316063, 0.960784316063, 0.960784316063, 1],
"ix": 3
},
"o": { "a": 0, "k": 100, "ix": 4 },
"w": { "a": 0, "k": 0, "ix": 5 },
"lc": 1,
"lj": 1,
"ml": 4,
"bm": 0,
"nm": "描边 1",
"mn": "ADBE Vector Graphic - Stroke",
"hd": false
},
{
"ty": "fl",
"c": {
"a": 0,
"k": [0.960784316063, 0.960784316063, 0.960784316063, 1],
"ix": 4
},
"o": { "a": 0, "k": 100, "ix": 5 },
"r": 1,
"bm": 0,
"nm": "填充 1",
"mn": "ADBE Vector Graphic - Fill",
"hd": false
},
{
"ty": "tr",
"p": { "a": 0, "k": [-0.52, -0.457], "ix": 2 },
"a": { "a": 0, "k": [0, 0], "ix": 1 },
"s": { "a": 0, "k": [100, 100], "ix": 3 },
"r": { "a": 0, "k": 0, "ix": 6 },
"o": { "a": 0, "k": 100, "ix": 7 },
"sk": { "a": 0, "k": 0, "ix": 4 },
"sa": { "a": 0, "k": 0, "ix": 5 },
"nm": "变换"
}
],
"nm": "椭圆 1",
"np": 3,
"cix": 2,
"bm": 0,
"ix": 1,
"mn": "ADBE Vector Group",
"hd": false
}
],
"ip": 0,
"op": 5400,
"st": 0,
"ct": 1,
"bm": 0
}
],
"markers": [],
"props": {}
}

View File

@@ -1,61 +0,0 @@
import { cssVarV2 } from '@toeverything/theme/v2';
import { globalStyle, style } from '@vanilla-extract/css';
export const root = style({
display: 'inline-flex',
});
// replace primary colors to cssVarV2('icon/primary')
const iconPrimaryColors = [
// legacy "--affine-icon-color"
'rgb(119,117,125)',
// --affine-v2-icon-primary
'rgb(122,122,122)',
];
// todo: may need to replace secondary colors & background colors as well?
const backgroundPrimaryColors = [
// --affine-v2-background-primary
'rgb(255,255,255)',
'#ffffff',
];
const backgroundSecondaryColors = [
// --affine-v2-background-secondary
'rgb(245,245,245)',
];
globalStyle(
`${root} :is(${iconPrimaryColors.map(color => `path[fill="${color}"]`).join(',')})`,
{
fill: cssVarV2('icon/primary'),
}
);
globalStyle(
`${root} :is(${iconPrimaryColors.map(color => `path[stroke="${color}"]`).join(',')})`,
{
stroke: cssVarV2('icon/primary'),
}
);
globalStyle(
`${root} :is(${backgroundPrimaryColors.map(color => `rect[fill="${color}"]`).join(',')})`,
{
fill: 'transparent',
}
);
globalStyle(
`${root} :is(${backgroundPrimaryColors.map(color => `path[fill="${color}"]`).join(',')})`,
{
fill: 'transparent',
}
);
globalStyle(
`${root} :is(${backgroundSecondaryColors.map(color => `path[fill="${color}"]`).join(',')})`,
{
fill: cssVarV2('layer/background/secondary'),
}
);

View File

@@ -1,3 +1,4 @@
import { MiniAudioPlayer } from '@affine/component/ui/audio-player';
import { AudioMediaManagerService } from '@affine/core/modules/media'; import { AudioMediaManagerService } from '@affine/core/modules/media';
import type { AudioAttachmentBlock } from '@affine/core/modules/media/entities/audio-attachment-block'; import type { AudioAttachmentBlock } from '@affine/core/modules/media/entities/audio-attachment-block';
import { AudioAttachmentService } from '@affine/core/modules/media/services/audio-attachment'; import { AudioAttachmentService } from '@affine/core/modules/media/services/audio-attachment';
@@ -5,8 +6,7 @@ import { LiveData, useLiveData, useService } from '@toeverything/infra';
import { useCallback, useEffect, useMemo, useState } from 'react'; import { useCallback, useEffect, useMemo, useState } from 'react';
import { combineLatest, debounceTime, map, of } from 'rxjs'; import { combineLatest, debounceTime, map, of } from 'rxjs';
import { MiniAudioPlayer } from '../audio-player'; import { useSeekTime } from '../hooks/use-seek-time';
import { useSeekTime } from '../audio-player/use-seek-time';
import * as styles from './sidebar-audio-player.css'; import * as styles from './sidebar-audio-player.css';
export const SidebarAudioPlayer = () => { export const SidebarAudioPlayer = () => {

View File

@@ -8,6 +8,7 @@ import {
FolderIcon, FolderIcon,
InformationIcon, InformationIcon,
KeyboardIcon, KeyboardIcon,
MeetingIcon,
NotificationIcon, NotificationIcon,
PenIcon, PenIcon,
} from '@blocksuite/icons/rc'; } from '@blocksuite/icons/rc';
@@ -23,6 +24,7 @@ import { BillingSettings } from './billing';
import { EditorSettings } from './editor'; import { EditorSettings } from './editor';
import { ExperimentalFeatures } from './experimental-features'; import { ExperimentalFeatures } from './experimental-features';
import { PaymentIcon, UpgradeIcon } from './icons'; import { PaymentIcon, UpgradeIcon } from './icons';
import { MeetingsSettings } from './meetings';
import { NotificationSettings } from './notifications'; import { NotificationSettings } from './notifications';
import { AFFiNEPricingPlans } from './plans'; import { AFFiNEPricingPlans } from './plans';
import { Shortcuts } from './shortcuts'; import { Shortcuts } from './shortcuts';
@@ -46,6 +48,9 @@ export const useGeneralSettingList = (): GeneralSettingList => {
const enableEditorSettings = useLiveData( const enableEditorSettings = useLiveData(
featureFlagService.flags.enable_editor_settings.$ featureFlagService.flags.enable_editor_settings.$
); );
const enableMeetings = useLiveData(
featureFlagService.flags.enable_meetings.$
);
useEffect(() => { useEffect(() => {
userFeatureService.userFeature.revalidate(); userFeatureService.userFeature.revalidate();
@@ -83,6 +88,15 @@ export const useGeneralSettingList = (): GeneralSettingList => {
}); });
} }
if (enableMeetings) {
settings.push({
key: 'meetings',
title: t['com.affine.settings.meetings'](),
icon: <MeetingIcon />,
testId: 'meetings-panel-trigger',
});
}
if (hasPaymentFeature) { if (hasPaymentFeature) {
settings.splice(4, 0, { settings.splice(4, 0, {
key: 'plans', key: 'plans',
@@ -147,6 +161,8 @@ export const GeneralSetting = ({
return <EditorSettings />; return <EditorSettings />;
case 'appearance': case 'appearance':
return <AppearanceSettings />; return <AppearanceSettings />;
case 'meetings':
return <MeetingsSettings />;
case 'about': case 'about':
return <AboutAffine />; return <AboutAffine />;
case 'plans': case 'plans':

View File

@@ -0,0 +1,254 @@
import {
IconButton,
Menu,
MenuItem,
MenuTrigger,
Switch,
useConfirmModal,
} from '@affine/component';
import {
SettingHeader,
SettingRow,
SettingWrapper,
} from '@affine/component/setting-components';
import { useAsyncCallback } from '@affine/core/components/hooks/affine-async-hooks';
import { MeetingSettingsService } from '@affine/core/modules/media/services/meeting-settings';
import type { MeetingSettingsSchema } from '@affine/electron/main/shared-state-schema';
import { useI18n } from '@affine/i18n';
import {
ArrowRightSmallIcon,
DoneIcon,
InformationFillDuotoneIcon,
} from '@blocksuite/icons/rc';
import { useLiveData, useService } from '@toeverything/infra';
import { useCallback, useEffect, useMemo, useState } from 'react';
import * as styles from './styles.css';
const RecordingModes: MeetingSettingsSchema['recordingMode'][] = [
'prompt',
'auto-start',
'none',
];
const RecordingModeMenu = () => {
const meetingSettingsService = useService(MeetingSettingsService);
const settings = useLiveData(meetingSettingsService.settings$);
const t = useI18n();
const options = useMemo(() => {
return RecordingModes.map(mode => ({
label: t[`com.affine.settings.meetings.record.recording-mode.${mode}`](),
value: mode,
}));
}, [t]);
const currentMode = settings.recordingMode;
const handleRecordingModeChange = useCallback(
(mode: MeetingSettingsSchema['recordingMode']) => {
meetingSettingsService.setRecordingMode(mode);
},
[meetingSettingsService]
);
return (
<Menu
items={options.map(option => {
return (
<MenuItem
key={option.value}
title={option.label}
onSelect={() => handleRecordingModeChange(option.value)}
data-selected={currentMode === option.value}
>
{option.label}
</MenuItem>
);
})}
>
<MenuTrigger style={{ fontWeight: 600, width: '250px' }} block={true}>
{options.find(option => option.value === currentMode)?.label}
</MenuTrigger>
</Menu>
);
};
export const MeetingsSettings = () => {
const t = useI18n();
const meetingSettingsService = useService(MeetingSettingsService);
const settings = useLiveData(meetingSettingsService.settings$);
const [recordingFeatureAvailable, setRecordingFeatureAvailable] =
useState(false);
const [screenRecordingPermission, setScreenRecordingPermission] =
useState(false);
const confirmModal = useConfirmModal();
useEffect(() => {
meetingSettingsService
.isRecordingFeatureAvailable()
.then(available => {
setRecordingFeatureAvailable(available ?? false);
})
.catch(() => {
setRecordingFeatureAvailable(false);
});
meetingSettingsService
.checkScreenRecordingPermission()
.then(permission => {
setScreenRecordingPermission(permission ?? false);
})
.catch(err => console.log(err));
}, [meetingSettingsService]);
const handleEnabledChange = useAsyncCallback(
async (checked: boolean) => {
try {
await meetingSettingsService.setEnabled(checked);
} catch {
confirmModal.openConfirmModal({
title:
t['com.affine.settings.meetings.record.permission-modal.title'](),
description:
t[
'com.affine.settings.meetings.record.permission-modal.description'
](),
onConfirm: async () => {
await meetingSettingsService.showScreenRecordingPermissionSetting();
},
cancelText: t['com.affine.recording.dismiss'](),
confirmButtonOptions: {
variant: 'primary',
},
confirmText:
t[
'com.affine.settings.meetings.record.permission-modal.open-setting'
](),
});
}
},
[confirmModal, meetingSettingsService, t]
);
const handleAutoTranscriptionChange = useCallback(
(checked: boolean) => {
meetingSettingsService.setAutoTranscription(checked);
},
[meetingSettingsService]
);
const handleOpenScreenRecordingPermissionSetting =
useAsyncCallback(async () => {
await meetingSettingsService.showScreenRecordingPermissionSetting();
}, [meetingSettingsService]);
const handleOpenSavedRecordings = useAsyncCallback(async () => {
await meetingSettingsService.openSavedRecordings();
}, [meetingSettingsService]);
return (
<div className={styles.meetingWrapper}>
<SettingHeader title={t['com.affine.settings.meetings']()} />
<SettingRow
name={t['com.affine.settings.meetings.enable.title']()}
desc={t['com.affine.settings.meetings.enable.description']()}
>
<Switch
checked={settings.enabled}
onChange={handleEnabledChange}
data-testid="meetings-enable-switch"
/>
</SettingRow>
{recordingFeatureAvailable && (
<>
<SettingWrapper
disabled={!settings.enabled}
title={t['com.affine.settings.meetings.record.header']()}
>
<SettingRow
name={t['com.affine.settings.meetings.record.recording-mode']()}
desc={t[
'com.affine.settings.meetings.record.recording-mode.description'
]()}
>
<RecordingModeMenu />
</SettingRow>
<SettingRow
name={t['com.affine.settings.meetings.record.open-saved-file']()}
desc={t[
'com.affine.settings.meetings.record.open-saved-file.description'
]()}
>
<IconButton
icon={<ArrowRightSmallIcon />}
onClick={handleOpenSavedRecordings}
/>
</SettingRow>
</SettingWrapper>
<SettingWrapper
disabled={!settings.enabled}
title={t['com.affine.settings.meetings.transcription.header']()}
>
<SettingRow
name={t[
'com.affine.settings.meetings.transcription.auto-transcription'
]()}
desc={t[
'com.affine.settings.meetings.transcription.auto-transcription.description'
]()}
>
<Switch
checked={settings.autoTranscription}
onChange={handleAutoTranscriptionChange}
data-testid="meetings-auto-transcription-switch"
/>
</SettingRow>
</SettingWrapper>
<SettingWrapper
title={t['com.affine.settings.meetings.privacy.header']()}
>
<SettingRow
name={t[
'com.affine.settings.meetings.privacy.screen-system-audio-recording'
]()}
desc={
<>
{t[
'com.affine.settings.meetings.privacy.screen-system-audio-recording.description'
]()}
{!screenRecordingPermission && (
<span
onClick={handleOpenScreenRecordingPermissionSetting}
className={styles.permissionSetting}
>
{t[
'com.affine.settings.meetings.privacy.screen-system-audio-recording.permission-setting'
]()}
</span>
)}
</>
}
>
<IconButton
icon={
screenRecordingPermission ? (
<DoneIcon />
) : (
<InformationFillDuotoneIcon
className={styles.noPermissionIcon}
/>
)
}
onClick={handleOpenScreenRecordingPermissionSetting}
/>
</SettingRow>
</SettingWrapper>
</>
)}
</div>
);
};

View File

@@ -0,0 +1,18 @@
import { cssVarV2 } from '@toeverything/theme/v2';
import { style } from '@vanilla-extract/css';
export const meetingWrapper = style({
display: 'flex',
flexDirection: 'column',
gap: 16,
});
export const permissionSetting = style({
color: cssVarV2('text/link'),
cursor: 'pointer',
marginLeft: 4,
});
export const noPermissionIcon = style({
color: cssVarV2('button/error'),
});

View File

@@ -13,6 +13,7 @@ export type SettingTab =
| 'experimental-features' | 'experimental-features'
| 'editor' | 'editor'
| 'account' | 'account'
| 'meetings'
| `workspace:${'preference' | 'properties' | 'members' | 'storage' | 'billing' | 'license' | 'integrations'}`; | `workspace:${'preference' | 'properties' | 'members' | 'storage' | 'billing' | 'license' | 'integrations'}`;
export type GLOBAL_DIALOG_SCHEMA = { export type GLOBAL_DIALOG_SCHEMA = {

View File

@@ -225,15 +225,6 @@ export const AFFINE_FLAGS = {
configurable: !isMobile, configurable: !isMobile,
defaultState: false, defaultState: false,
}, },
enable_audio_block: {
category: 'affine',
displayName:
'com.affine.settings.workspace.experimental-features.enable-audio-block.name',
description:
'com.affine.settings.workspace.experimental-features.enable-audio-block.description',
configurable: !isMobile,
defaultState: false,
},
enable_editor_rtl: { enable_editor_rtl: {
category: 'affine', category: 'affine',
displayName: displayName:
@@ -274,6 +265,24 @@ export const AFFINE_FLAGS = {
configurable: isCanaryBuild, configurable: isCanaryBuild,
defaultState: false, defaultState: false,
}, },
enable_audio_block: {
category: 'affine',
displayName:
'com.affine.settings.workspace.experimental-features.enable-audio-block.name',
description:
'com.affine.settings.workspace.experimental-features.enable-audio-block.description',
configurable: !isMobile,
defaultState: false,
},
enable_meetings: {
category: 'affine',
displayName:
'com.affine.settings.workspace.experimental-features.enable-meetings.name',
description:
'com.affine.settings.workspace.experimental-features.enable-meetings.description',
configurable: !isMobile && environment.isMacOs,
defaultState: false,
},
} satisfies { [key in string]: FlagInfo }; } satisfies { [key in string]: FlagInfo };
// oxlint-disable-next-line no-redeclare // oxlint-disable-next-line no-redeclare

View File

@@ -1,3 +1,4 @@
import { encodeAudioBlobToOpus } from '@affine/core/utils/webm-encoding';
import { DebugLogger } from '@affine/debug'; import { DebugLogger } from '@affine/debug';
import { AiJobStatus } from '@affine/graphql'; import { AiJobStatus } from '@affine/graphql';
import { import {
@@ -118,7 +119,8 @@ export class AudioAttachmentBlock extends Entity<AttachmentBlockModel> {
if (!buffer) { if (!buffer) {
throw new Error('No audio buffer available'); throw new Error('No audio buffer available');
} }
const blob = new Blob([buffer], { type: this.props.props.type }); const encodedBuffer = await encodeAudioBlobToOpus(buffer, 64000);
const blob = new Blob([encodedBuffer], { type: this.props.props.type });
const file = new File([blob], this.props.props.name, { const file = new File([blob], this.props.props.name, {
type: this.props.props.type, type: this.props.props.type,
}); });

View File

@@ -1,8 +1,7 @@
import type { Framework } from '@toeverything/infra'; import type { Framework } from '@toeverything/infra';
import { DefaultServerService, WorkspaceServerService } from '../cloud'; import { DefaultServerService, WorkspaceServerService } from '../cloud';
import { DesktopApiService } from '../desktop-api'; import { GlobalState, GlobalStateService } from '../storage';
import { GlobalState } from '../storage';
import { WorkbenchService } from '../workbench'; import { WorkbenchService } from '../workbench';
import { WorkspaceScope, WorkspaceService } from '../workspace'; import { WorkspaceScope, WorkspaceService } from '../workspace';
import { AudioAttachmentBlock } from './entities/audio-attachment-block'; import { AudioAttachmentBlock } from './entities/audio-attachment-block';
@@ -16,9 +15,11 @@ import {
} from './providers/global-audio-state'; } from './providers/global-audio-state';
import { AudioAttachmentService } from './services/audio-attachment'; import { AudioAttachmentService } from './services/audio-attachment';
import { AudioMediaManagerService } from './services/audio-media-manager'; import { AudioMediaManagerService } from './services/audio-media-manager';
import { MeetingSettingsService } from './services/meeting-settings';
export function configureMediaModule(framework: Framework) { export function configureMediaModule(framework: Framework) {
framework framework
.service(MeetingSettingsService, [GlobalStateService])
.scope(WorkspaceScope) .scope(WorkspaceScope)
.entity(AudioMedia, [WorkspaceService]) .entity(AudioMedia, [WorkspaceService])
.entity(AudioAttachmentBlock, [AudioMediaManagerService, WorkspaceService]) .entity(AudioAttachmentBlock, [AudioMediaManagerService, WorkspaceService])
@@ -31,27 +32,18 @@ export function configureMediaModule(framework: Framework) {
WorkspaceServerService, WorkspaceServerService,
DefaultServerService, DefaultServerService,
]) ])
.service(AudioAttachmentService); .service(AudioAttachmentService)
.service(AudioMediaManagerService, [
GlobalMediaStateProvider,
WorkbenchService,
]);
if (BUILD_CONFIG.isElectron) { if (BUILD_CONFIG.isElectron) {
framework framework.impl(GlobalMediaStateProvider, ElectronGlobalMediaStateProvider, [
.impl(GlobalMediaStateProvider, ElectronGlobalMediaStateProvider, [ GlobalState,
GlobalState, ]);
])
.scope(WorkspaceScope)
.service(AudioMediaManagerService, [
GlobalMediaStateProvider,
WorkbenchService,
DesktopApiService,
]);
} else { } else {
framework framework.impl(GlobalMediaStateProvider, WebGlobalMediaStateProvider);
.impl(GlobalMediaStateProvider, WebGlobalMediaStateProvider)
.scope(WorkspaceScope)
.service(AudioMediaManagerService, [
GlobalMediaStateProvider,
WorkbenchService,
]);
} }
} }

View File

@@ -13,7 +13,7 @@ import {
import { clamp } from 'lodash-es'; import { clamp } from 'lodash-es';
import { distinctUntilChanged } from 'rxjs'; import { distinctUntilChanged } from 'rxjs';
import type { DesktopApiService } from '../../desktop-api'; import { DesktopApiService } from '../../desktop-api';
import type { WorkbenchService } from '../../workbench'; import type { WorkbenchService } from '../../workbench';
import { AudioMedia } from '../entities/audio-media'; import { AudioMedia } from '../entities/audio-media';
import type { BaseGlobalMediaStateProvider } from '../providers/global-audio-state'; import type { BaseGlobalMediaStateProvider } from '../providers/global-audio-state';
@@ -36,18 +36,13 @@ export class AudioMediaManagerService extends Service {
}); });
private readonly mediaDisposables = new WeakMap<AudioMedia, (() => void)[]>(); private readonly mediaDisposables = new WeakMap<AudioMedia, (() => void)[]>();
private readonly desktopApi = this.framework.getOptional(DesktopApiService);
constructor( constructor(
private readonly globalMediaState: BaseGlobalMediaStateProvider, private readonly globalMediaState: BaseGlobalMediaStateProvider,
private readonly workbench: WorkbenchService, private readonly workbench: WorkbenchService
private readonly desktopApi?: DesktopApiService
) { ) {
super(); super();
if (!BUILD_CONFIG.isElectron) {
this.desktopApi = undefined;
}
this.disposables.push(() => { this.disposables.push(() => {
this.mediaPool.clear(); this.mediaPool.clear();
}); });

View File

@@ -0,0 +1,133 @@
import type {
MeetingSettingsKey,
MeetingSettingsSchema,
} from '@affine/electron/main/shared-state-schema';
import { LiveData, Service } from '@toeverything/infra';
import { defaults } from 'lodash-es';
import { DesktopApiService } from '../../desktop-api';
import type { GlobalStateService } from '../../storage';
const MEETING_SETTINGS_KEY: typeof MeetingSettingsKey = 'meetingSettings';
const defaultMeetingSettings: MeetingSettingsSchema = {
enabled: false,
recordingSavingMode: 'new-doc',
autoTranscription: true,
recordingMode: 'prompt',
};
export class MeetingSettingsService extends Service {
constructor(private readonly globalStateService: GlobalStateService) {
super();
}
private readonly desktopApiService =
this.framework.getOptional(DesktopApiService);
readonly settings$ = LiveData.computed(get => {
const value = get(
LiveData.from(
this.globalStateService.globalState.watch<MeetingSettingsSchema>(
MEETING_SETTINGS_KEY
),
undefined
)
);
return defaults(value, defaultMeetingSettings);
});
get settings() {
return this.settings$.value;
}
// we do not want the caller to directly set the settings,
// there could be some side effects when the settings are changed.
async setEnabled(enabled: boolean) {
const currentEnabled = this.settings.enabled;
if (currentEnabled === enabled) {
return;
}
if (!(await this.isRecordingFeatureAvailable())) {
return;
}
// when the user enable the recording feature the first time,
// the app may prompt the user to allow the recording feature by MacOS.
// when the user allows the recording feature, the app may be required to restart.
if (enabled) {
// if the user already enabled the recording feature, we need to disable it
const successful =
await this.desktopApiService?.handler.recording.setupRecordingFeature();
if (!successful) {
throw new Error('Failed to setup recording feature');
}
} else {
// check if there is any ongoing recording
const ongoingRecording =
await this.desktopApiService?.handler.recording.getCurrentRecording();
if (
ongoingRecording &&
ongoingRecording.status !== 'new' &&
ongoingRecording.status !== 'ready'
) {
throw new Error('There is an ongoing recording, please stop it first');
}
// if the user disabled the recording feature, we need to setup the recording feature
await this.desktopApiService?.handler.recording.disableRecordingFeature();
}
// Only update the state after successful feature setup/disable
this.globalStateService.globalState.set(MEETING_SETTINGS_KEY, {
...this.settings$.value,
enabled,
});
}
setRecordingSavingMode(mode: MeetingSettingsSchema['recordingSavingMode']) {
this.globalStateService.globalState.set(MEETING_SETTINGS_KEY, {
...this.settings$.value,
recordingSavingMode: mode,
});
}
setAutoTranscription(autoTranscription: boolean) {
this.globalStateService.globalState.set(MEETING_SETTINGS_KEY, {
...this.settings$.value,
autoTranscription,
});
}
// this is a desktop-only feature for MacOS version 14.2 and above
async isRecordingFeatureAvailable() {
return this.desktopApiService?.handler.recording.checkRecordingAvailable();
}
async checkScreenRecordingPermission() {
return this.desktopApiService?.handler.recording.checkScreenRecordingPermission();
}
// the following methods are only available on MacOS right?
async showScreenRecordingPermissionSetting() {
return this.desktopApiService?.handler.recording.showScreenRecordingPermissionSetting();
}
setRecordingMode = (mode: MeetingSettingsSchema['recordingMode']) => {
const currentMode = this.settings.recordingMode;
if (currentMode === mode) {
return;
}
this.globalStateService.globalState.set(MEETING_SETTINGS_KEY, {
...this.settings,
recordingMode: mode,
});
};
async openSavedRecordings() {
// todo: open the saved recordings folder
await this.desktopApiService?.handler.recording.showSavedRecordings();
}
}

View File

@@ -1,31 +0,0 @@
import type { AttachmentBlockModel } from '@blocksuite/affine/model';
import { useService } from '@toeverything/infra';
import { useEffect, useState } from 'react';
import type { AudioAttachmentBlock } from '../entities/audio-attachment-block';
import { AudioAttachmentService } from '../services/audio-attachment';
export const useAttachmentMediaBlock = (model: AttachmentBlockModel) => {
const audioAttachmentService = useService(AudioAttachmentService);
const [audioAttachmentBlock, setAttachmentMedia] = useState<
AudioAttachmentBlock | undefined
>(undefined);
useEffect(() => {
if (!model.props.sourceId) {
return;
}
const entity = audioAttachmentService.get(model);
if (!entity) {
return;
}
const audioAttachmentBlock = entity.obj;
setAttachmentMedia(audioAttachmentBlock);
audioAttachmentBlock.mount();
return () => {
audioAttachmentBlock.unmount();
entity.release();
};
}, [audioAttachmentService, model]);
return audioAttachmentBlock;
};

View File

@@ -0,0 +1,222 @@
import { DebugLogger } from '@affine/debug';
import { ArrayBufferTarget, Muxer } from 'webm-muxer';
interface AudioEncodingConfig {
sampleRate: number;
numberOfChannels: number;
bitrate?: number;
}
const logger = new DebugLogger('webm-encoding');
/**
* Creates and configures an Opus encoder with the given settings
*/
async function createOpusEncoder(config: AudioEncodingConfig): Promise<{
encoder: AudioEncoder;
encodedChunks: EncodedAudioChunk[];
}> {
const encodedChunks: EncodedAudioChunk[] = [];
const encoder = new AudioEncoder({
output: chunk => {
encodedChunks.push(chunk);
},
error: err => {
throw new Error(`Encoding error: ${err}`);
},
});
encoder.configure({
codec: 'opus',
sampleRate: config.sampleRate,
numberOfChannels: config.numberOfChannels,
bitrate: config.bitrate ?? 64000,
});
return { encoder, encodedChunks };
}
/**
* Encodes audio frames using the provided encoder
*/
async function encodeAudioFrames({
audioData,
numberOfChannels,
sampleRate,
encoder,
}: {
audioData: Float32Array;
numberOfChannels: number;
sampleRate: number;
encoder: AudioEncoder;
}): Promise<void> {
const CHUNK_SIZE = numberOfChannels * 1024;
let offset = 0;
try {
for (let i = 0; i < audioData.length; i += CHUNK_SIZE) {
const chunkSize = Math.min(CHUNK_SIZE, audioData.length - i);
const chunk = audioData.subarray(i, i + chunkSize);
const frame = new AudioData({
format: 'f32',
sampleRate,
numberOfFrames: chunk.length / numberOfChannels,
numberOfChannels,
timestamp: (offset * 1000000) / sampleRate,
data: chunk,
});
encoder.encode(frame);
frame.close();
offset += chunk.length / numberOfChannels;
}
} finally {
await encoder.flush();
encoder.close();
}
}
/**
* Creates a WebM container with the encoded audio chunks
*/
function muxToWebM(
encodedChunks: EncodedAudioChunk[],
config: AudioEncodingConfig
): Uint8Array {
const target = new ArrayBufferTarget();
const muxer = new Muxer({
target,
audio: {
codec: 'A_OPUS',
sampleRate: config.sampleRate,
numberOfChannels: config.numberOfChannels,
},
});
for (const chunk of encodedChunks) {
muxer.addAudioChunk(chunk, {});
}
muxer.finalize();
return new Uint8Array(target.buffer);
}
/**
* Encodes raw audio data to Opus in WebM container.
*/
export async function encodeRawBufferToOpus({
filepath,
sampleRate,
numberOfChannels,
}: {
filepath: string;
sampleRate: number;
numberOfChannels: number;
}): Promise<Uint8Array> {
logger.debug('Encoding raw buffer to Opus');
const response = await fetch(new URL(filepath, location.origin));
if (!response.body) {
throw new Error('Response body is null');
}
const { encoder, encodedChunks } = await createOpusEncoder({
sampleRate,
numberOfChannels,
});
// Process the stream
const reader = response.body.getReader();
const chunks: Float32Array[] = [];
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
chunks.push(new Float32Array(value.buffer));
}
} finally {
reader.releaseLock();
}
// Combine all chunks into a single Float32Array
const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
const audioData = new Float32Array(totalLength);
let offset = 0;
for (const chunk of chunks) {
audioData.set(chunk, offset);
offset += chunk.length;
}
await encodeAudioFrames({
audioData,
numberOfChannels,
sampleRate,
encoder,
});
const webm = muxToWebM(encodedChunks, { sampleRate, numberOfChannels });
logger.debug('Encoded raw buffer to Opus');
return webm;
}
/**
* Encodes an audio file Blob to Opus in WebM container with specified bitrate.
* @param blob Input audio file blob (supports any browser-decodable format)
* @param targetBitrate Target bitrate in bits per second (bps)
* @returns Promise resolving to encoded WebM data as Uint8Array
*/
export async function encodeAudioBlobToOpus(
blob: Blob | ArrayBuffer | Uint8Array,
targetBitrate: number = 64000
): Promise<Uint8Array> {
const audioContext = new AudioContext();
logger.debug('Encoding audio blob to Opus');
try {
let buffer: ArrayBuffer;
if (blob instanceof Blob) {
buffer = await blob.arrayBuffer();
} else if (blob instanceof Uint8Array) {
buffer =
blob.buffer instanceof ArrayBuffer ? blob.buffer : blob.slice().buffer;
} else {
buffer = blob;
}
const audioBuffer = await audioContext.decodeAudioData(buffer);
const config: AudioEncodingConfig = {
sampleRate: audioBuffer.sampleRate,
numberOfChannels: audioBuffer.numberOfChannels,
bitrate: targetBitrate,
};
const { encoder, encodedChunks } = await createOpusEncoder(config);
// Combine all channels into a single Float32Array
const audioData = new Float32Array(
audioBuffer.length * config.numberOfChannels
);
for (let channel = 0; channel < config.numberOfChannels; channel++) {
const channelData = audioBuffer.getChannelData(channel);
for (let i = 0; i < channelData.length; i++) {
audioData[i * config.numberOfChannels + channel] = channelData[i];
}
}
await encodeAudioFrames({
audioData,
numberOfChannels: config.numberOfChannels,
sampleRate: config.sampleRate,
encoder,
});
const webm = muxToWebM(encodedChunks, config);
logger.debug('Encoded audio blob to Opus');
return webm;
} finally {
await audioContext.close();
}
}

View File

@@ -1,26 +1,26 @@
{ {
"ar": 93, "ar": 92,
"ca": 4, "ca": 4,
"da": 4, "da": 4,
"de": 93, "de": 92,
"el-GR": 93, "el-GR": 92,
"en": 100, "en": 100,
"es-AR": 93, "es-AR": 92,
"es-CL": 95, "es-CL": 93,
"es": 93, "es": 92,
"fa": 93, "fa": 92,
"fr": 93, "fr": 92,
"hi": 2, "hi": 2,
"it-IT": 93, "it-IT": 92,
"it": 1, "it": 1,
"ja": 93, "ja": 92,
"ko": 59, "ko": 58,
"pl": 93, "pl": 92,
"pt-BR": 93, "pt-BR": 92,
"ru": 93, "ru": 92,
"sv-SE": 93, "sv-SE": 92,
"uk": 93, "uk": 92,
"ur": 2, "ur": 2,
"zh-Hans": 93, "zh-Hans": 92,
"zh-Hant": 93 "zh-Hant": 92
} }

View File

@@ -5327,6 +5327,94 @@ export function useAFFiNEI18N(): {
* `Use transparency effect on the sidebar.` * `Use transparency effect on the sidebar.`
*/ */
["com.affine.settings.translucent-style-description"](): string; ["com.affine.settings.translucent-style-description"](): string;
/**
* `Meetings`
*/
["com.affine.settings.meetings"](): string;
/**
* `Enable meeting notes`
*/
["com.affine.settings.meetings.enable.title"](): string;
/**
* `Utilize the meeting notes and AI summarization features provided by AFFiNE.`
*/
["com.affine.settings.meetings.enable.description"](): string;
/**
* `Meeting recording`
*/
["com.affine.settings.meetings.record.header"](): string;
/**
* `When meeting starts`
*/
["com.affine.settings.meetings.record.recording-mode"](): string;
/**
* `Choose the behavior when the meeting starts.`
*/
["com.affine.settings.meetings.record.recording-mode.description"](): string;
/**
* `Open saved recordings`
*/
["com.affine.settings.meetings.record.open-saved-file"](): string;
/**
* `Open the locally stored recording files.`
*/
["com.affine.settings.meetings.record.open-saved-file.description"](): string;
/**
* `Transcription with AI`
*/
["com.affine.settings.meetings.transcription.header"](): string;
/**
* `Auto transcription`
*/
["com.affine.settings.meetings.transcription.auto-transcription"](): string;
/**
* `Automatically transcribe the meeting notes.`
*/
["com.affine.settings.meetings.transcription.auto-transcription.description"](): string;
/**
* `Privacy & Security`
*/
["com.affine.settings.meetings.privacy.header"](): string;
/**
* `Screen & System audio recording`
*/
["com.affine.settings.meetings.privacy.screen-system-audio-recording"](): string;
/**
* `The Meeting feature requires permission to be used.`
*/
["com.affine.settings.meetings.privacy.screen-system-audio-recording.description"](): string;
/**
* `Click to allow`
*/
["com.affine.settings.meetings.privacy.screen-system-audio-recording.permission-setting"](): string;
/**
* `Do nothing`
*/
["com.affine.settings.meetings.record.recording-mode.none"](): string;
/**
* `Auto start recording`
*/
["com.affine.settings.meetings.record.recording-mode.auto-start"](): string;
/**
* `Show a recording prompt`
*/
["com.affine.settings.meetings.record.recording-mode.prompt"](): string;
/**
* `Screen & System Audio Recording`
*/
["com.affine.settings.meetings.record.permission-modal.title"](): string;
/**
* `AFFiNE will generate meeting notes by recording your meetings. Authorization to "Screen & System Audio Recording" is necessary.`
*/
["com.affine.settings.meetings.record.permission-modal.description"](): string;
/**
* `Save meeting's recording block to`
*/
["com.affine.settings.meetings.record.save-mode"](): string;
/**
* `Open System Settings`
*/
["com.affine.settings.meetings.record.permission-modal.open-setting"](): string;
/** /**
* `Workspace` * `Workspace`
*/ */
@@ -5583,6 +5671,14 @@ export function useAFFiNEI18N(): {
* `Audio block allows you to play audio files globally and add notes to them.` * `Audio block allows you to play audio files globally and add notes to them.`
*/ */
["com.affine.settings.workspace.experimental-features.enable-audio-block.description"](): string; ["com.affine.settings.workspace.experimental-features.enable-audio-block.description"](): string;
/**
* `Meetings`
*/
["com.affine.settings.workspace.experimental-features.enable-meetings.name"](): string;
/**
* `Meetings allows you to record and transcribe meetings. Don't forget to enable it in AFFiNE settings.`
*/
["com.affine.settings.workspace.experimental-features.enable-meetings.description"](): string;
/** /**
* `Editor RTL` * `Editor RTL`
*/ */
@@ -7449,10 +7545,22 @@ export function useAFFiNEI18N(): {
* `Audio activity` * `Audio activity`
*/ */
["com.affine.recording.new"](): string; ["com.affine.recording.new"](): string;
/**
* `Finished`
*/
["com.affine.recording.success.prompt"](): string;
/** /**
* `Open app` * `Open app`
*/ */
["com.affine.recording.ready"](): string; ["com.affine.recording.success.button"](): string;
/**
* `Failed to save`
*/
["com.affine.recording.failed.prompt"](): string;
/**
* `Open file`
*/
["com.affine.recording.failed.button"](): string;
/** /**
* `{{appName}}'s audio` * `{{appName}}'s audio`
*/ */

View File

@@ -1328,6 +1328,28 @@
"com.affine.settings.suggestion-2": "Love our app? <1>Star us on GitHub</1> and <2>create issues</2> for your valuable feedback!", "com.affine.settings.suggestion-2": "Love our app? <1>Star us on GitHub</1> and <2>create issues</2> for your valuable feedback!",
"com.affine.settings.translucent-style": "Translucent UI on the sidebar", "com.affine.settings.translucent-style": "Translucent UI on the sidebar",
"com.affine.settings.translucent-style-description": "Use transparency effect on the sidebar.", "com.affine.settings.translucent-style-description": "Use transparency effect on the sidebar.",
"com.affine.settings.meetings": "Meetings",
"com.affine.settings.meetings.enable.title": "Enable meeting notes",
"com.affine.settings.meetings.enable.description": "Utilize the meeting notes and AI summarization features provided by AFFiNE.",
"com.affine.settings.meetings.record.header": "Meeting recording",
"com.affine.settings.meetings.record.recording-mode": "When meeting starts",
"com.affine.settings.meetings.record.recording-mode.description": "Choose the behavior when the meeting starts.",
"com.affine.settings.meetings.record.open-saved-file": "Open saved recordings",
"com.affine.settings.meetings.record.open-saved-file.description": "Open the locally stored recording files.",
"com.affine.settings.meetings.transcription.header": "Transcription with AI",
"com.affine.settings.meetings.transcription.auto-transcription": "Auto transcription",
"com.affine.settings.meetings.transcription.auto-transcription.description": "Automatically transcribe the meeting notes.",
"com.affine.settings.meetings.privacy.header": "Privacy & Security",
"com.affine.settings.meetings.privacy.screen-system-audio-recording": "Screen & System audio recording",
"com.affine.settings.meetings.privacy.screen-system-audio-recording.description": "The Meeting feature requires permission to be used.",
"com.affine.settings.meetings.privacy.screen-system-audio-recording.permission-setting": "Click to allow",
"com.affine.settings.meetings.record.recording-mode.none": "Do nothing",
"com.affine.settings.meetings.record.recording-mode.auto-start": "Auto start recording",
"com.affine.settings.meetings.record.recording-mode.prompt": "Show a recording prompt",
"com.affine.settings.meetings.record.permission-modal.title": "Screen & System Audio Recording",
"com.affine.settings.meetings.record.permission-modal.description": "AFFiNE will generate meeting notes by recording your meetings. Authorization to \"Screen & System Audio Recording\" is necessary.",
"com.affine.settings.meetings.record.save-mode": "Save meeting's recording block to",
"com.affine.settings.meetings.record.permission-modal.open-setting": "Open System Settings",
"com.affine.settings.workspace": "Workspace", "com.affine.settings.workspace": "Workspace",
"com.affine.settings.workspace.description": "You can view current workspace's information here.", "com.affine.settings.workspace.description": "You can view current workspace's information here.",
"com.affine.settings.workspace.experimental-features": "Experimental features", "com.affine.settings.workspace.experimental-features": "Experimental features",
@@ -1392,6 +1414,8 @@
"com.affine.settings.workspace.experimental-features.enable-pdf-embed-preview.description": "Once enabled, you can preview PDF in embed view.", "com.affine.settings.workspace.experimental-features.enable-pdf-embed-preview.description": "Once enabled, you can preview PDF in embed view.",
"com.affine.settings.workspace.experimental-features.enable-audio-block.name": "Audio block", "com.affine.settings.workspace.experimental-features.enable-audio-block.name": "Audio block",
"com.affine.settings.workspace.experimental-features.enable-audio-block.description": "Audio block allows you to play audio files globally and add notes to them.", "com.affine.settings.workspace.experimental-features.enable-audio-block.description": "Audio block allows you to play audio files globally and add notes to them.",
"com.affine.settings.workspace.experimental-features.enable-meetings.name": "Meetings",
"com.affine.settings.workspace.experimental-features.enable-meetings.description": "Meetings allows you to record and transcribe meetings. Don't forget to enable it in AFFiNE settings.",
"com.affine.settings.workspace.experimental-features.enable-editor-rtl.name": "Editor RTL", "com.affine.settings.workspace.experimental-features.enable-editor-rtl.name": "Editor RTL",
"com.affine.settings.workspace.experimental-features.enable-editor-rtl.description": "Once enabled, the editor will be displayed in RTL mode.", "com.affine.settings.workspace.experimental-features.enable-editor-rtl.description": "Once enabled, the editor will be displayed in RTL mode.",
"com.affine.settings.workspace.experimental-features.enable-edgeless-scribbled-style.name": "Edgeless scribbled style", "com.affine.settings.workspace.experimental-features.enable-edgeless-scribbled-style.name": "Edgeless scribbled style",
@@ -1864,7 +1888,10 @@
"com.affine.audio.transcribe.non-owner.confirm.title": "Unable to retrieve AI results for others", "com.affine.audio.transcribe.non-owner.confirm.title": "Unable to retrieve AI results for others",
"com.affine.audio.transcribe.non-owner.confirm.message": "Please contact <1>{{user}}</1> to upgrade AI rights or resend the attachment.", "com.affine.audio.transcribe.non-owner.confirm.message": "Please contact <1>{{user}}</1> to upgrade AI rights or resend the attachment.",
"com.affine.recording.new": "Audio activity", "com.affine.recording.new": "Audio activity",
"com.affine.recording.ready": "Open app", "com.affine.recording.success.prompt": "Finished",
"com.affine.recording.success.button": "Open app",
"com.affine.recording.failed.prompt": "Failed to save",
"com.affine.recording.failed.button": "Open file",
"com.affine.recording.recording": "{{appName}}'s audio", "com.affine.recording.recording": "{{appName}}'s audio",
"com.affine.recording.recording.unnamed": "Audio recording", "com.affine.recording.recording.unnamed": "Audio recording",
"com.affine.recording.start": "Start", "com.affine.recording.start": "Start",

View File

@@ -448,7 +448,7 @@ impl TappableApplication {
#[napi] #[napi]
pub struct ApplicationListChangedSubscriber { pub struct ApplicationListChangedSubscriber {
listener_block: *const Block<dyn Fn(u32, *mut c_void)>, listener_block: RcBlock<dyn Fn(u32, *mut c_void)>,
} }
#[napi] #[napi]
@@ -464,7 +464,9 @@ impl ApplicationListChangedSubscriber {
mElement: kAudioObjectPropertyElementMain, mElement: kAudioObjectPropertyElementMain,
}, },
ptr::null_mut(), ptr::null_mut(),
self.listener_block.cast_mut().cast(), (&*self.listener_block as *const Block<dyn Fn(u32, *mut c_void)>)
.cast_mut()
.cast(),
) )
}; };
if status != 0 { if status != 0 {
@@ -554,7 +556,7 @@ impl ShareableContent {
callback.call(Ok(()), ThreadsafeFunctionCallMode::NonBlocking); callback.call(Ok(()), ThreadsafeFunctionCallMode::NonBlocking);
} }
}); });
let listener_block = &*callback_block as *const Block<dyn Fn(u32, *mut c_void)>;
let status = unsafe { let status = unsafe {
AudioObjectAddPropertyListenerBlock( AudioObjectAddPropertyListenerBlock(
kAudioObjectSystemObject, kAudioObjectSystemObject,
@@ -564,7 +566,9 @@ impl ShareableContent {
mElement: kAudioObjectPropertyElementMain, mElement: kAudioObjectPropertyElementMain,
}, },
ptr::null_mut(), ptr::null_mut(),
listener_block.cast_mut().cast(), (&*callback_block as *const Block<dyn Fn(u32, *mut c_void)>)
.cast_mut()
.cast(),
) )
}; };
if status != 0 { if status != 0 {
@@ -573,7 +577,9 @@ impl ShareableContent {
"Failed to add property listener", "Failed to add property listener",
)); ));
} }
Ok(ApplicationListChangedSubscriber { listener_block }) Ok(ApplicationListChangedSubscriber {
listener_block: callback_block,
})
} }
#[napi] #[napi]

View File

@@ -328,13 +328,16 @@ __metadata:
"@testing-library/dom": "npm:^10.4.0" "@testing-library/dom": "npm:^10.4.0"
"@testing-library/react": "npm:^16.1.0" "@testing-library/react": "npm:^16.1.0"
"@toeverything/theme": "npm:^1.1.12" "@toeverything/theme": "npm:^1.1.12"
"@types/bytes": "npm:^3.1.5"
"@types/react": "npm:^19.0.1" "@types/react": "npm:^19.0.1"
"@types/react-dom": "npm:^19.0.2" "@types/react-dom": "npm:^19.0.2"
"@vanilla-extract/css": "npm:^1.17.0" "@vanilla-extract/css": "npm:^1.17.0"
"@vanilla-extract/dynamic": "npm:^2.1.2" "@vanilla-extract/dynamic": "npm:^2.1.2"
bytes: "npm:^3.1.2"
check-password-strength: "npm:^3.0.0" check-password-strength: "npm:^3.0.0"
clsx: "npm:^2.1.1" clsx: "npm:^2.1.1"
dayjs: "npm:^1.11.13" dayjs: "npm:^1.11.13"
foxact: "npm:^0.2.45"
jotai: "npm:^2.10.3" jotai: "npm:^2.10.3"
lit: "npm:^3.2.1" lit: "npm:^3.2.1"
lodash-es: "npm:^4.17.21" lodash-es: "npm:^4.17.21"
@@ -456,6 +459,7 @@ __metadata:
swr: "npm:2.3.3" swr: "npm:2.3.3"
tinykeys: "patch:tinykeys@npm%3A2.1.0#~/.yarn/patches/tinykeys-npm-2.1.0-819feeaed0.patch" tinykeys: "patch:tinykeys@npm%3A2.1.0#~/.yarn/patches/tinykeys-npm-2.1.0-819feeaed0.patch"
vitest: "npm:3.0.9" vitest: "npm:3.0.9"
webm-muxer: "npm:^5.1.0"
y-protocols: "npm:^1.0.6" y-protocols: "npm:^1.0.6"
yjs: "npm:^13.6.21" yjs: "npm:^13.6.21"
zod: "npm:^3.24.1" zod: "npm:^3.24.1"
@@ -21594,7 +21598,7 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"foxact@npm:^0.2.43": "foxact@npm:^0.2.43, foxact@npm:^0.2.45":
version: 0.2.45 version: 0.2.45
resolution: "foxact@npm:0.2.45" resolution: "foxact@npm:0.2.45"
dependencies: dependencies:
@@ -33879,7 +33883,7 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"webm-muxer@npm:^5.0.3": "webm-muxer@npm:^5.0.3, webm-muxer@npm:^5.1.0":
version: 5.1.0 version: 5.1.0
resolution: "webm-muxer@npm:5.1.0" resolution: "webm-muxer@npm:5.1.0"
dependencies: dependencies: