perf(core): only full sync before exporting (#10408)

This commit is contained in:
liuyi
2025-02-25 12:41:56 +08:00
committed by GitHub
parent c644a46b8d
commit e5e5c0a8ba
8 changed files with 222 additions and 120 deletions

View File

@@ -15,8 +15,12 @@ export class BlobFrontend {
return this.sync.uploadBlob(blob);
}
fullSync() {
return this.sync.fullSync();
fullDownload() {
return this.sync.fullDownload();
}
fullUpload() {
return this.sync.fullUpload();
}
addPriority(_id: string, _priority: number) {

View File

@@ -9,6 +9,8 @@ import type { PeerStorageOptions } from '../types';
export interface BlobSyncState {
isStorageOverCapacity: boolean;
total: number;
synced: number;
}
export interface BlobSync {
@@ -18,7 +20,8 @@ export interface BlobSync {
signal?: AbortSignal
): Promise<BlobRecord | null>;
uploadBlob(blob: BlobRecord, signal?: AbortSignal): Promise<void>;
fullSync(signal?: AbortSignal): Promise<void>;
fullDownload(signal?: AbortSignal): Promise<void>;
fullUpload(signal?: AbortSignal): Promise<void>;
setMaxBlobSize(size: number): void;
onReachedMaxBlobSize(cb: (byteSize: number) => void): () => void;
}
@@ -26,6 +29,8 @@ export interface BlobSync {
export class BlobSyncImpl implements BlobSync {
readonly state$ = new BehaviorSubject<BlobSyncState>({
isStorageOverCapacity: false,
total: Object.values(this.storages.remotes).length ? 1 : 0,
synced: 0,
});
private abort: AbortController | null = null;
private maxBlobSize: number = 1024 * 1024 * 100; // 100MB
@@ -34,19 +39,24 @@ export class BlobSyncImpl implements BlobSync {
constructor(readonly storages: PeerStorageOptions<BlobStorage>) {}
async downloadBlob(blobId: string, signal?: AbortSignal) {
const localBlob = await this.storages.local.get(blobId, signal);
if (localBlob) {
return localBlob;
}
for (const storage of Object.values(this.storages.remotes)) {
const data = await storage.get(blobId, signal);
if (data) {
await this.storages.local.set(data, signal);
return data;
try {
const localBlob = await this.storages.local.get(blobId, signal);
if (localBlob) {
return localBlob;
}
for (const storage of Object.values(this.storages.remotes)) {
const data = await storage.get(blobId, signal);
if (data) {
await this.storages.local.set(data, signal);
return data;
}
}
return null;
} catch (e) {
console.error('error when download blob', e);
return null;
}
return null;
}
async uploadBlob(blob: BlobRecord, signal?: AbortSignal) {
@@ -62,7 +72,11 @@ export class BlobSyncImpl implements BlobSync {
return await remote.set(blob, signal);
} catch (err) {
if (err instanceof OverCapacityError) {
this.state$.next({ isStorageOverCapacity: true });
this.state$.next({
isStorageOverCapacity: true,
total: this.state$.value.total,
synced: this.state$.value.synced,
});
}
throw err;
}
@@ -70,71 +84,95 @@ export class BlobSyncImpl implements BlobSync {
);
}
async fullSync(signal?: AbortSignal) {
async fullDownload(signal?: AbortSignal) {
throwIfAborted(signal);
await this.storages.local.connection.waitForConnected(signal);
const localList = (await this.storages.local.list(signal)).map(b => b.key);
this.state$.next({
...this.state$.value,
synced: localList.length,
});
for (const [remotePeer, remote] of Object.entries(this.storages.remotes)) {
let localList: string[] = [];
let remoteList: string[] = [];
await Promise.allSettled(
Object.entries(this.storages.remotes).map(
async ([remotePeer, remote]) => {
await remote.connection.waitForConnected(signal);
await remote.connection.waitForConnected(signal);
const remoteList = (await remote.list(signal)).map(b => b.key);
try {
localList = (await this.storages.local.list(signal)).map(b => b.key);
throwIfAborted(signal);
remoteList = (await remote.list(signal)).map(b => b.key);
throwIfAborted(signal);
} catch (err) {
if (err === MANUALLY_STOP) {
throw err;
}
console.error(`error when sync`, err);
continue;
}
this.state$.next({
...this.state$.value,
total: Math.max(this.state$.value.total, remoteList.length),
});
const needUpload = difference(localList, remoteList);
for (const key of needUpload) {
try {
const data = await this.storages.local.get(key, signal);
throwIfAborted(signal);
if (data) {
await remote.set(data, signal);
throwIfAborted(signal);
const needDownload = difference(remoteList, localList);
for (const key of needDownload) {
try {
const data = await remote.get(key, signal);
throwIfAborted(signal);
if (data) {
await this.storages.local.set(data, signal);
this.state$.next({
...this.state$.value,
synced: this.state$.value.synced + 1,
});
throwIfAborted(signal);
}
} catch (err) {
if (err === MANUALLY_STOP) {
throw err;
}
console.error(
`error when sync ${key} from [${remotePeer}] to [local]`,
err
);
}
}
} catch (err) {
if (err === MANUALLY_STOP) {
throw err;
}
console.error(
`error when sync ${key} from [local] to [${remotePeer}]`,
err
);
}
}
)
);
}
const needDownload = difference(remoteList, localList);
async fullUpload(signal?: AbortSignal) {
throwIfAborted(signal);
await this.storages.local.connection.waitForConnected(signal);
const localList = (await this.storages.local.list(signal)).map(b => b.key);
await Promise.allSettled(
Object.entries(this.storages.remotes).map(
async ([remotePeer, remote]) => {
await remote.connection.waitForConnected(signal);
const remoteList = (await remote.list(signal)).map(b => b.key);
for (const key of needDownload) {
try {
const data = await remote.get(key, signal);
throwIfAborted(signal);
if (data) {
await this.storages.local.set(data, signal);
throwIfAborted(signal);
const needUpload = difference(localList, remoteList);
for (const key of needUpload) {
try {
const data = await this.storages.local.get(key, signal);
throwIfAborted(signal);
if (data) {
await remote.set(data, signal);
throwIfAborted(signal);
}
} catch (err) {
if (err === MANUALLY_STOP) {
throw err;
}
console.error(
`error when sync ${key} from [local] to [${remotePeer}]`,
err
);
}
}
} catch (err) {
if (err === MANUALLY_STOP) {
throw err;
}
console.error(
`error when sync ${key} from [${remotePeer}] to [local]`,
err
);
}
}
}
)
);
}
start() {
@@ -144,16 +182,12 @@ export class BlobSyncImpl implements BlobSync {
const abort = new AbortController();
this.abort = abort;
// TODO(@eyhn): fix this, large blob may cause iOS to crash?
if (!BUILD_CONFIG.isIOS) {
this.fullSync(abort.signal).catch(error => {
if (error === MANUALLY_STOP) {
return;
}
console.error('sync blob error', error);
});
}
this.fullUpload(abort.signal).catch(error => {
if (error === MANUALLY_STOP) {
return;
}
console.error('sync blob error', error);
});
}
stop() {

View File

@@ -257,26 +257,23 @@ class WorkerBlobSync implements BlobSync {
uploadBlob(blob: BlobRecord, _signal?: AbortSignal): Promise<void> {
return this.client.call('blobSync.uploadBlob', blob);
}
fullSync(signal?: AbortSignal): Promise<void> {
return new Promise((resolve, reject) => {
const abortListener = () => {
reject(signal?.reason);
subscription.unsubscribe();
};
fullDownload(signal?: AbortSignal): Promise<void> {
const download = this.client.call('blobSync.fullDownload');
signal?.addEventListener('abort', abortListener);
const subscription = this.client.ob$('blobSync.fullSync').subscribe({
next() {
signal?.removeEventListener('abort', abortListener);
resolve();
},
error(err) {
signal?.removeEventListener('abort', abortListener);
reject(err);
},
});
signal?.addEventListener('abort', () => {
download.cancel();
});
return download;
}
fullUpload(signal?: AbortSignal): Promise<void> {
const upload = this.client.call('blobSync.fullUpload');
signal?.addEventListener('abort', () => {
upload.cancel();
});
return upload;
}
}

View File

@@ -234,20 +234,10 @@ class StoreConsumer {
'docSync.resetSync': () => this.docSync.resetSync(),
'blobSync.downloadBlob': key => this.blobSync.downloadBlob(key),
'blobSync.uploadBlob': blob => this.blobSync.uploadBlob(blob),
'blobSync.fullSync': () =>
new Observable(subscriber => {
const abortController = new AbortController();
this.blobSync
.fullSync(abortController.signal)
.then(() => {
subscriber.next(true);
subscriber.complete();
})
.catch(error => {
subscriber.error(error);
});
return () => abortController.abort(MANUALLY_STOP);
}),
'blobSync.fullDownload': (_, { signal }) =>
this.blobSync.fullDownload(signal),
'blobSync.fullUpload': (_, { signal }) =>
this.blobSync.fullUpload(signal),
'blobSync.state': () => this.blobSync.state$,
'blobSync.setMaxBlobSize': size => this.blobSync.setMaxBlobSize(size),
'blobSync.onReachedMaxBlobSize': () =>

View File

@@ -87,7 +87,8 @@ interface GroupedWorkerOps {
blobSync: {
downloadBlob: [string, BlobRecord | null];
uploadBlob: [BlobRecord, void];
fullSync: [void, boolean];
fullDownload: [void, void];
fullUpload: [void, void];
setMaxBlobSize: [number, void];
onReachedMaxBlobSize: [void, number];
state: [void, BlobSyncState];

View File

@@ -8,8 +8,8 @@ import type { Workspace } from '@affine/core/modules/workspace';
import { useI18n } from '@affine/i18n';
import { universalId } from '@affine/nbstore';
import track from '@affine/track';
import { useService } from '@toeverything/infra';
import { useState } from 'react';
import { LiveData, useLiveData, useService } from '@toeverything/infra';
import { useMemo, useState } from 'react';
interface ExportPanelProps {
workspace: Workspace;
@@ -20,9 +20,44 @@ export const DesktopExportPanel = ({ workspace }: ExportPanelProps) => {
const [saving, setSaving] = useState(false);
const isOnline = useSystemOnline();
const desktopApi = useService(DesktopApiService);
const isLocalWorkspace = workspace.flavour === 'local';
const docSyncState = useLiveData(
useMemo(() => {
return workspace
? LiveData.from(workspace.engine.doc.state$, null).throttleTime(500)
: null;
}, [workspace])
);
const blobSyncState = useLiveData(
useMemo(() => {
return workspace
? LiveData.from(workspace.engine.blob.state$, null).throttleTime(500)
: null;
}, [workspace])
);
const docSynced = !docSyncState?.syncing;
const blobSynced =
!blobSyncState || blobSyncState.synced === blobSyncState.total;
const [fullSynced, setFullSynced] = useState(false);
const shouldWaitForFullSync =
isLocalWorkspace || !isOnline || (fullSynced && docSynced && blobSynced);
const fullSyncing = fullSynced && (!docSynced || !blobSynced);
const fullSync = useAsyncCallback(async () => {
// NOTE: doc full sync is always started by default
// await workspace.engine.doc.waitForSynced();
workspace.engine.blob.fullDownload().catch(() => {
/* noop */
});
setFullSynced(true);
}, [workspace.engine.blob]);
const onExport = useAsyncCallback(async () => {
if (saving || !workspace) {
if (saving) {
return;
}
setSaving(true);
@@ -30,10 +65,6 @@ export const DesktopExportPanel = ({ workspace }: ExportPanelProps) => {
track.$.settingsPanel.workspace.export({
type: 'workspace',
});
if (isOnline) {
await workspace.engine.doc.waitForSynced();
await workspace.engine.blob.fullSync();
}
const result = await desktopApi.handler?.dialog.saveDBFileAs(
universalId({
@@ -53,16 +84,37 @@ export const DesktopExportPanel = ({ workspace }: ExportPanelProps) => {
} finally {
setSaving(false);
}
}, [desktopApi, isOnline, saving, t, workspace]);
}, [desktopApi, saving, t, workspace]);
if (!shouldWaitForFullSync) {
return (
<SettingRow name={t['Export']()} desc={t['Full Sync Description']()}>
<Button
data-testid="export-affine-full-sync"
onClick={fullSync}
loading={fullSyncing}
>
{t['Full Sync']()}
</Button>
</SettingRow>
);
}
const button =
isLocalWorkspace || isOnline ? t['Export']() : t['Export(Offline)']();
const desc =
isLocalWorkspace || isOnline
? t['Export Description']()
: t['Export Description(Offline)']();
return (
<SettingRow name={t['Export']()} desc={t['Export Description']()}>
<SettingRow name={t['Export']()} desc={desc}>
<Button
data-testid="export-affine-backup"
onClick={onExport}
disabled={saving}
>
{t['Export']()}
{button}
</Button>
</SettingRow>
);

View File

@@ -195,10 +195,26 @@ export function useAFFiNEI18N(): {
* `Export`
*/
Export(): string;
/**
* `Export (Offline)`
*/
["Export(Offline)"](): string;
/**
* `Full Sync`
*/
["Full Sync"](): string;
/**
* `You can export the entire Workspace data for backup, and the exported data can be re-imported.`
*/
["Export Description"](): string;
/**
* `You can export the entire Workspace data for backup, and the exported data can be re-imported, but you are offline now which will cause the exported data not up to date.`
*/
["Export Description(Offline)"](): string;
/**
* `You can export the entire Workspace data for backup, and the exported data can be re-imported, but you must sync all cloud data first to keep your exported data up to date.`
*/
["Full Sync Description"](): string;
/**
* `Export failed`
*/
@@ -2675,6 +2691,10 @@ export function useAFFiNEI18N(): {
* `Workspace name`
*/
["com.affine.nameWorkspace.subtitle.workspace-name"](): string;
/**
* `Workspace type`
*/
["com.affine.nameWorkspace.subtitle.workspace-type"](): string;
/**
* `Name your workspace`
*/
@@ -3513,11 +3533,11 @@ export function useAFFiNEI18N(): {
*/
["com.affine.payment.cloud.free.benefit.g2-5"](): string;
/**
* `Open-source under MIT license.`
* `Local Editor under MIT license.`
*/
["com.affine.payment.cloud.free.description"](): string;
/**
* `FOSS + Basic`
* `Local FOSS + Cloud Basic`
*/
["com.affine.payment.cloud.free.name"](): string;
/**

View File

@@ -39,7 +39,11 @@
"Enable AFFiNE Cloud Description": "If enabled, the data in this workspace will be backed up and synchronised via AFFiNE Cloud.",
"Enable cloud hint": "The following functions rely on AFFiNE Cloud. All data is stored on the current device. You can enable AFFiNE Cloud for this workspace to keep data in sync with the cloud.",
"Export": "Export",
"Export(Offline)": "Export (Offline)",
"Full Sync": "Full Sync",
"Export Description": "You can export the entire Workspace data for backup, and the exported data can be re-imported.",
"Export Description(Offline)": "You can export the entire Workspace data for backup, and the exported data can be re-imported. But you are offline now which will cause the exported data not up to date.",
"Full Sync Description": "You can export the entire Workspace data for backup, and the exported data can be re-imported. But you must sync all cloud data first to keep your exported data up to date.",
"Export failed": "Export failed",
"Export success": "Export success",
"Export to HTML": "Export to HTML",