chore: merge blocksuite source code (#9213)

This commit is contained in:
Mirone
2024-12-20 15:38:06 +08:00
committed by GitHub
parent 2c9ef916f4
commit 30200ff86d
2031 changed files with 238888 additions and 229 deletions

View File

@@ -0,0 +1,197 @@
import { type Logger, sha } from '@blocksuite/global/utils';
import type { BlobSource } from './source.js';
export interface BlobStatus {
isStorageOverCapacity: boolean;
}
/**
* # BlobEngine
*
* sync blobs between storages in background.
*
* all operations priority use main, then use shadows.
*/
export class BlobEngine {
private _abort: AbortController | null = null;
get sources() {
return [this.main, ...this.shadows];
}
constructor(
readonly main: BlobSource,
readonly shadows: BlobSource[],
readonly logger: Logger
) {}
async delete(_key: string) {
this.logger.error(
'You are trying to delete a blob. We do not support this feature yet. We need to wait until we implement the indexer, which will inform us which doc is using a particular blob so that we can safely delete it.'
);
}
async get(key: string) {
this.logger.debug('get blob', key);
for (const source of this.sources) {
const data = await source.get(key);
if (data) {
return data;
}
}
return null;
}
async list() {
const blobIdSet = new Set<string>();
for (const source of this.sources) {
const blobs = await source.list();
for (const blob of blobs) {
blobIdSet.add(blob);
}
}
return Array.from(blobIdSet);
}
async set(value: Blob): Promise<string>;
async set(key: string, value: Blob): Promise<string>;
async set(valueOrKey: string | Blob, _value?: Blob) {
if (this.main.readonly) {
throw new Error('main peer is readonly');
}
const key =
typeof valueOrKey === 'string'
? valueOrKey
: await sha(await valueOrKey.arrayBuffer());
const value = typeof valueOrKey === 'string' ? _value : valueOrKey;
if (!value) {
throw new Error('value is empty');
}
// await upload to the main peer
await this.main.set(key, value);
// uploads to other peers in the background
Promise.allSettled(
this.shadows
.filter(r => !r.readonly)
.map(peer =>
peer.set(key, value).catch(err => {
this.logger.error('Error when uploading to peer', err);
})
)
)
.then(result => {
if (result.some(({ status }) => status === 'rejected')) {
this.logger.error(
`blob ${key} update finish, but some peers failed to update`
);
} else {
this.logger.debug(`blob ${key} update finish`);
}
})
.catch(() => {
// Promise.allSettled never reject
});
return key;
}
start() {
if (this._abort) {
return;
}
this._abort = new AbortController();
const abortSignal = this._abort.signal;
const sync = () => {
if (abortSignal.aborted) {
return;
}
this.sync()
.catch(error => {
this.logger.error('sync blob error', error);
})
.finally(() => {
// sync every 1 minute
setTimeout(sync, 60000);
});
};
sync();
}
stop() {
this._abort?.abort();
this._abort = null;
}
async sync() {
if (this.main.readonly) {
return;
}
this.logger.debug('start syncing blob...');
for (const shadow of this.shadows) {
let mainList: string[] = [];
let shadowList: string[] = [];
if (!shadow.readonly) {
try {
mainList = await this.main.list();
shadowList = await shadow.list();
} catch (err) {
this.logger.error(`error when sync`, err);
continue;
}
const needUpload = mainList.filter(key => !shadowList.includes(key));
for (const key of needUpload) {
try {
const data = await this.main.get(key);
if (data) {
await shadow.set(key, data);
} else {
this.logger.error(
'data not found when trying upload from main to shadow'
);
}
} catch (err) {
this.logger.error(
`error when sync ${key} from [${this.main.name}] to [${shadow.name}]`,
err
);
}
}
}
const needDownload = shadowList.filter(key => !mainList.includes(key));
for (const key of needDownload) {
try {
const data = await shadow.get(key);
if (data) {
await this.main.set(key, data);
} else {
this.logger.error(
'data not found when trying download from shadow to main'
);
}
} catch (err) {
this.logger.error(
`error when sync ${key} from [${shadow.name}] to [${this.main.name}]`,
err
);
}
}
}
this.logger.debug('finish syncing blob');
}
}

View File

@@ -0,0 +1,2 @@
export * from './indexeddb.js';
export * from './memory.js';

View File

@@ -0,0 +1,39 @@
import { createStore, del, get, keys, set } from 'idb-keyval';
import type { BlobSource } from '../source.js';
export class IndexedDBBlobSource implements BlobSource {
readonly mimeTypeStore = createStore(`${this.name}_blob_mime`, 'blob_mime');
readonly = false;
readonly store = createStore(`${this.name}_blob`, 'blob');
constructor(readonly name: string) {}
async delete(key: string) {
await del(key, this.store);
await del(key, this.mimeTypeStore);
}
async get(key: string) {
const res = await get<ArrayBuffer>(key, this.store);
if (res) {
return new Blob([res], {
type: await get(key, this.mimeTypeStore),
});
}
return null;
}
async list() {
const list = await keys<string>(this.store);
return list;
}
async set(key: string, value: Blob) {
await set(key, await value.arrayBuffer(), this.store);
await set(key, value.type, this.mimeTypeStore);
return key;
}
}

View File

@@ -0,0 +1,27 @@
import type { BlobSource } from '../source.js';
export class MemoryBlobSource implements BlobSource {
readonly map = new Map<string, Blob>();
name = 'memory';
readonly = false;
delete(key: string) {
this.map.delete(key);
return Promise.resolve();
}
get(key: string) {
return Promise.resolve(this.map.get(key) ?? null);
}
list() {
return Promise.resolve(Array.from(this.map.keys()));
}
set(key: string, value: Blob) {
this.map.set(key, value);
return Promise.resolve(key);
}
}

View File

@@ -0,0 +1,3 @@
export * from './engine.js';
export * from './impl/index.js';
export * from './source.js';

View File

@@ -0,0 +1,8 @@
export interface BlobSource {
name: string;
readonly: boolean;
get: (key: string) => Promise<Blob | null>;
set: (key: string, value: Blob) => Promise<string>;
delete: (key: string) => Promise<void>;
list: () => Promise<string[]>;
}