mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-14 21:27:20 +00:00
refactor(server): separate s3 & r2 storage to plugin (#5805)
This commit is contained in:
@@ -1,37 +1,34 @@
|
||||
import { homedir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import { S3ClientConfigType } from '@aws-sdk/client-s3';
|
||||
|
||||
export type StorageProviderType = 'fs' | 'r2' | 's3';
|
||||
export interface FsStorageConfig {
|
||||
path: string;
|
||||
}
|
||||
export type R2StorageConfig = S3ClientConfigType & {
|
||||
accountId: string;
|
||||
};
|
||||
export type S3StorageConfig = S3ClientConfigType;
|
||||
|
||||
export type StorageTargetConfig<Ext = unknown> = {
|
||||
export interface StorageProvidersConfig {
|
||||
fs: FsStorageConfig;
|
||||
}
|
||||
|
||||
export type StorageProviderType = keyof StorageProvidersConfig;
|
||||
|
||||
export type StorageConfig<Ext = unknown> = {
|
||||
provider: StorageProviderType;
|
||||
bucket: string;
|
||||
} & Ext;
|
||||
|
||||
export interface StoragesConfig {
|
||||
avatar: StorageConfig<{ publicLinkFactory: (key: string) => string }>;
|
||||
blob: StorageConfig;
|
||||
}
|
||||
|
||||
export interface AFFiNEStorageConfig {
|
||||
/**
|
||||
* All providers for object storage
|
||||
*
|
||||
* Support different providers for different usage at the same time.
|
||||
*/
|
||||
providers: {
|
||||
fs?: FsStorageConfig;
|
||||
s3?: S3StorageConfig;
|
||||
r2?: R2StorageConfig;
|
||||
};
|
||||
storages: {
|
||||
avatar: StorageTargetConfig<{ publicLinkFactory: (key: string) => string }>;
|
||||
blob: StorageTargetConfig;
|
||||
};
|
||||
providers: StorageProvidersConfig;
|
||||
storages: StoragesConfig;
|
||||
}
|
||||
|
||||
export type StorageProviders = AFFiNEStorageConfig['providers'];
|
||||
|
||||
@@ -24,6 +24,7 @@ export {
|
||||
export { PrismaService } from './prisma';
|
||||
export { SessionService } from './session';
|
||||
export * from './storage';
|
||||
export { type StorageProvider, StorageProviderFactory } from './storage';
|
||||
export { AuthThrottlerGuard, CloudThrottlerGuard, Throttle } from './throttler';
|
||||
export {
|
||||
getRequestFromHost,
|
||||
|
||||
@@ -1,36 +1,24 @@
|
||||
import { createRequire } from 'node:module';
|
||||
import { Global, Module } from '@nestjs/common';
|
||||
|
||||
export const StorageProvide = Symbol('Storage');
|
||||
import { registerStorageProvider, StorageProviderFactory } from './providers';
|
||||
import { FsStorageProvider } from './providers/fs';
|
||||
|
||||
let storageModule: typeof import('@affine/storage');
|
||||
try {
|
||||
storageModule = await import('@affine/storage');
|
||||
} catch {
|
||||
const require = createRequire(import.meta.url);
|
||||
storageModule =
|
||||
process.arch === 'arm64'
|
||||
? require('../../../storage.arm64.node')
|
||||
: process.arch === 'arm'
|
||||
? require('../../../storage.armv7.node')
|
||||
: require('../../../storage.node');
|
||||
}
|
||||
registerStorageProvider('fs', (config, bucket) => {
|
||||
if (!config.storage.providers.fs) {
|
||||
throw new Error('Missing fs storage provider configuration');
|
||||
}
|
||||
|
||||
export const mergeUpdatesInApplyWay = storageModule.mergeUpdatesInApplyWay;
|
||||
return new FsStorageProvider(config.storage.providers.fs, bucket);
|
||||
});
|
||||
|
||||
export const verifyChallengeResponse = async (
|
||||
response: any,
|
||||
bits: number,
|
||||
resource: string
|
||||
) => {
|
||||
if (typeof response !== 'string' || !response || !resource) return false;
|
||||
return storageModule.verifyChallengeResponse(response, bits, resource);
|
||||
};
|
||||
|
||||
export const mintChallengeResponse = async (resource: string, bits: number) => {
|
||||
if (!resource) return null;
|
||||
return storageModule.mintChallengeResponse(resource, bits);
|
||||
};
|
||||
@Global()
|
||||
@Module({
|
||||
providers: [StorageProviderFactory],
|
||||
exports: [StorageProviderFactory],
|
||||
})
|
||||
export class StorageProviderModule {}
|
||||
|
||||
export * from './native';
|
||||
export type {
|
||||
BlobInputType,
|
||||
BlobOutputType,
|
||||
@@ -39,5 +27,5 @@ export type {
|
||||
PutObjectMetadata,
|
||||
StorageProvider,
|
||||
} from './providers';
|
||||
export { createStorageProvider } from './providers';
|
||||
export { toBuffer } from './providers/utils';
|
||||
export { registerStorageProvider, StorageProviderFactory } from './providers';
|
||||
export { autoMetadata, toBuffer } from './providers/utils';
|
||||
|
||||
30
packages/backend/server/src/fundamentals/storage/native.ts
Normal file
30
packages/backend/server/src/fundamentals/storage/native.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { createRequire } from 'node:module';
|
||||
|
||||
let storageModule: typeof import('@affine/storage');
|
||||
try {
|
||||
storageModule = await import('@affine/storage');
|
||||
} catch {
|
||||
const require = createRequire(import.meta.url);
|
||||
storageModule =
|
||||
process.arch === 'arm64'
|
||||
? require('../../../storage.arm64.node')
|
||||
: process.arch === 'arm'
|
||||
? require('../../../storage.armv7.node')
|
||||
: require('../../../storage.node');
|
||||
}
|
||||
|
||||
export const mergeUpdatesInApplyWay = storageModule.mergeUpdatesInApplyWay;
|
||||
|
||||
export const verifyChallengeResponse = async (
|
||||
response: any,
|
||||
bits: number,
|
||||
resource: string
|
||||
) => {
|
||||
if (typeof response !== 'string' || !response || !resource) return false;
|
||||
return storageModule.verifyChallengeResponse(response, bits, resource);
|
||||
};
|
||||
|
||||
export const mintChallengeResponse = async (resource: string, bits: number) => {
|
||||
if (!resource) return null;
|
||||
return storageModule.mintChallengeResponse(resource, bits);
|
||||
};
|
||||
@@ -1,34 +1,37 @@
|
||||
import { AFFiNEStorageConfig, Storages } from '../../config/storage';
|
||||
import { FsStorageProvider } from './fs';
|
||||
import { Injectable } from '@nestjs/common';
|
||||
|
||||
import { Config } from '../../config';
|
||||
import type { StorageProviderType, Storages } from '../../config/storage';
|
||||
import type { StorageProvider } from './provider';
|
||||
import { R2StorageProvider } from './r2';
|
||||
import { S3StorageProvider } from './s3';
|
||||
|
||||
export function createStorageProvider(
|
||||
config: AFFiNEStorageConfig,
|
||||
storage: Storages
|
||||
): StorageProvider {
|
||||
const storageConfig = config.storages[storage];
|
||||
const providerConfig = config.providers[storageConfig.provider] as any;
|
||||
if (!providerConfig) {
|
||||
throw new Error(
|
||||
`Failed to create ${storageConfig.provider} storage, configuration not correctly set`
|
||||
);
|
||||
const availableProviders = new Map<
|
||||
StorageProviderType,
|
||||
(config: Config, bucket: string) => StorageProvider
|
||||
>();
|
||||
|
||||
export function registerStorageProvider(
|
||||
type: StorageProviderType,
|
||||
providerFactory: (config: Config, bucket: string) => StorageProvider
|
||||
) {
|
||||
availableProviders.set(type, providerFactory);
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class StorageProviderFactory {
|
||||
constructor(private readonly config: Config) {}
|
||||
|
||||
create(storage: Storages): StorageProvider {
|
||||
const storageConfig = this.config.storage.storages[storage];
|
||||
const providerFactory = availableProviders.get(storageConfig.provider);
|
||||
|
||||
if (!providerFactory) {
|
||||
throw new Error(
|
||||
`Unknown storage provider type: ${storageConfig.provider}`
|
||||
);
|
||||
}
|
||||
|
||||
return providerFactory(this.config, storageConfig.bucket);
|
||||
}
|
||||
|
||||
if (storageConfig.provider === 's3') {
|
||||
return new S3StorageProvider(providerConfig, storageConfig.bucket);
|
||||
}
|
||||
|
||||
if (storageConfig.provider === 'r2') {
|
||||
return new R2StorageProvider(providerConfig, storageConfig.bucket);
|
||||
}
|
||||
|
||||
if (storageConfig.provider === 'fs') {
|
||||
return new FsStorageProvider(providerConfig, storageConfig.bucket);
|
||||
}
|
||||
|
||||
throw new Error(`Unknown storage provider type: ${storageConfig.provider}`);
|
||||
}
|
||||
|
||||
export type * from './provider';
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
|
||||
import { R2StorageConfig } from '../../config/storage';
|
||||
import { S3StorageProvider } from './s3';
|
||||
|
||||
export class R2StorageProvider extends S3StorageProvider {
|
||||
override readonly type = 'r2' as any /* cast 'r2' to 's3' */;
|
||||
|
||||
constructor(config: R2StorageConfig, bucket: string) {
|
||||
super(
|
||||
{
|
||||
...config,
|
||||
forcePathStyle: true,
|
||||
endpoint: `https://${config.accountId}.r2.cloudflarestorage.com`,
|
||||
},
|
||||
bucket
|
||||
);
|
||||
this.logger = new Logger(`${R2StorageProvider.name}:${bucket}`);
|
||||
}
|
||||
}
|
||||
@@ -1,170 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
import { Readable } from 'node:stream';
|
||||
|
||||
import {
|
||||
DeleteObjectCommand,
|
||||
GetObjectCommand,
|
||||
ListObjectsV2Command,
|
||||
NoSuchKey,
|
||||
PutObjectCommand,
|
||||
S3Client,
|
||||
} from '@aws-sdk/client-s3';
|
||||
import { Logger } from '@nestjs/common';
|
||||
|
||||
import { S3StorageConfig } from '../../config/storage';
|
||||
import {
|
||||
BlobInputType,
|
||||
GetObjectMetadata,
|
||||
ListObjectsMetadata,
|
||||
PutObjectMetadata,
|
||||
StorageProvider,
|
||||
} from './provider';
|
||||
import { autoMetadata, toBuffer } from './utils';
|
||||
|
||||
export class S3StorageProvider implements StorageProvider {
|
||||
protected logger: Logger;
|
||||
protected client: S3Client;
|
||||
|
||||
readonly type = 's3';
|
||||
|
||||
constructor(
|
||||
config: S3StorageConfig,
|
||||
public readonly bucket: string
|
||||
) {
|
||||
this.client = new S3Client({ region: 'auto', ...config });
|
||||
this.logger = new Logger(`${S3StorageProvider.name}:${bucket}`);
|
||||
}
|
||||
|
||||
async put(
|
||||
key: string,
|
||||
body: BlobInputType,
|
||||
metadata: PutObjectMetadata = {}
|
||||
): Promise<void> {
|
||||
const blob = await toBuffer(body);
|
||||
|
||||
metadata = await autoMetadata(blob, metadata);
|
||||
|
||||
try {
|
||||
await this.client.send(
|
||||
new PutObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
Body: blob,
|
||||
|
||||
// metadata
|
||||
ContentType: metadata.contentType,
|
||||
ContentLength: metadata.contentLength,
|
||||
// TODO: Cloudflare doesn't support CRC32, use md5 instead later.
|
||||
// ChecksumCRC32: metadata.checksumCRC32,
|
||||
})
|
||||
);
|
||||
|
||||
this.logger.verbose(`Object \`${key}\` put`);
|
||||
} catch (e) {
|
||||
throw new Error(`Failed to put object \`${key}\``, {
|
||||
cause: e,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async get(key: string): Promise<{
|
||||
body?: Readable;
|
||||
metadata?: GetObjectMetadata;
|
||||
}> {
|
||||
try {
|
||||
const obj = await this.client.send(
|
||||
new GetObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
})
|
||||
);
|
||||
|
||||
if (!obj.Body) {
|
||||
this.logger.verbose(`Object \`${key}\` not found`);
|
||||
return {};
|
||||
}
|
||||
|
||||
this.logger.verbose(`Read object \`${key}\``);
|
||||
return {
|
||||
// @ts-expect-errors ignore browser response type `Blob`
|
||||
body: obj.Body,
|
||||
metadata: {
|
||||
// always set when putting object
|
||||
contentType: obj.ContentType!,
|
||||
contentLength: obj.ContentLength!,
|
||||
lastModified: obj.LastModified!,
|
||||
checksumCRC32: obj.ChecksumCRC32,
|
||||
},
|
||||
};
|
||||
} catch (e) {
|
||||
// 404
|
||||
if (e instanceof NoSuchKey) {
|
||||
this.logger.verbose(`Object \`${key}\` not found`);
|
||||
return {};
|
||||
} else {
|
||||
throw new Error(`Failed to read object \`${key}\``, {
|
||||
cause: e,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async list(prefix?: string): Promise<ListObjectsMetadata[]> {
|
||||
// continuationToken should be `string | undefined`,
|
||||
// but TypeScript will fail on type infer in the code below.
|
||||
// Seems to be a bug in TypeScript
|
||||
let continuationToken: any = undefined;
|
||||
let hasMore = true;
|
||||
let result: ListObjectsMetadata[] = [];
|
||||
|
||||
try {
|
||||
while (hasMore) {
|
||||
const listResult = await this.client.send(
|
||||
new ListObjectsV2Command({
|
||||
Bucket: this.bucket,
|
||||
Prefix: prefix,
|
||||
ContinuationToken: continuationToken,
|
||||
})
|
||||
);
|
||||
|
||||
if (listResult.Contents?.length) {
|
||||
result = result.concat(
|
||||
listResult.Contents.map(r => ({
|
||||
key: r.Key!,
|
||||
lastModified: r.LastModified!,
|
||||
size: r.Size!,
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
// has more items not listed
|
||||
hasMore = !!listResult.IsTruncated;
|
||||
continuationToken = listResult.NextContinuationToken;
|
||||
}
|
||||
|
||||
this.logger.verbose(
|
||||
`List ${result.length} objects with prefix \`${prefix}\``
|
||||
);
|
||||
return result;
|
||||
} catch (e) {
|
||||
throw new Error(`Failed to list objects with prefix \`${prefix}\``, {
|
||||
cause: e,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async delete(key: string): Promise<void> {
|
||||
try {
|
||||
await this.client.send(
|
||||
new DeleteObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
})
|
||||
);
|
||||
} catch (e) {
|
||||
throw new Error(`Failed to delete object \`${key}\``, {
|
||||
cause: e,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user