mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-04 00:28:33 +00:00
feat(server): lightweight s3 client (#14348)
#### PR Dependency Tree * **PR #14348** 👈 This tree was auto-generated by [Charcoal](https://github.com/danerwilliams/charcoal) <!-- This is an auto-generated comment: release notes by coderabbit.ai --> ## Summary by CodeRabbit * **New Features** * Added a dedicated S3-compatible client package and expanded S3-compatible storage config (endpoint, region, forcePathStyle, requestTimeoutMs, minPartSize, presign options, sessionToken). * Document sync now broadcasts batched/compressed doc updates for more efficient real-time syncing. * **Tests** * New unit and benchmark tests for base64 utilities and S3 multipart listing; updated storage-related tests to match new formats. <sub>✏️ Tip: You can customize this high-level summary in your review settings.</sub> <!-- end of auto-generated comment: release notes by coderabbit.ai -->
This commit is contained in:
@@ -337,8 +337,42 @@
|
||||
},
|
||||
"config": {
|
||||
"type": "object",
|
||||
"description": "The config for the s3 compatible storage provider. directly passed to aws-sdk client.\n@link https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html",
|
||||
"description": "The config for the S3 compatible storage provider.",
|
||||
"properties": {
|
||||
"endpoint": {
|
||||
"type": "string",
|
||||
"description": "The S3 compatible endpoint. Example: \"https://s3.us-east-1.amazonaws.com\" or \"https://<account>.r2.cloudflarestorage.com\"."
|
||||
},
|
||||
"region": {
|
||||
"type": "string",
|
||||
"description": "The region for the storage provider. Example: \"us-east-1\" or \"auto\" for R2."
|
||||
},
|
||||
"forcePathStyle": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to use path-style bucket addressing."
|
||||
},
|
||||
"requestTimeoutMs": {
|
||||
"type": "number",
|
||||
"description": "Request timeout in milliseconds."
|
||||
},
|
||||
"minPartSize": {
|
||||
"type": "number",
|
||||
"description": "Minimum multipart part size in bytes."
|
||||
},
|
||||
"presign": {
|
||||
"type": "object",
|
||||
"description": "Presigned URL behavior configuration.",
|
||||
"properties": {
|
||||
"expiresInSeconds": {
|
||||
"type": "number",
|
||||
"description": "Expiration time in seconds for presigned URLs."
|
||||
},
|
||||
"signContentTypeForPut": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to sign Content-Type for presigned PUT."
|
||||
}
|
||||
}
|
||||
},
|
||||
"credentials": {
|
||||
"type": "object",
|
||||
"description": "The credentials for the s3 compatible storage provider.",
|
||||
@@ -348,6 +382,9 @@
|
||||
},
|
||||
"secretAccessKey": {
|
||||
"type": "string"
|
||||
},
|
||||
"sessionToken": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -369,8 +406,42 @@
|
||||
},
|
||||
"config": {
|
||||
"type": "object",
|
||||
"description": "The config for the s3 compatible storage provider. directly passed to aws-sdk client.\n@link https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html",
|
||||
"description": "The config for the S3 compatible storage provider.",
|
||||
"properties": {
|
||||
"endpoint": {
|
||||
"type": "string",
|
||||
"description": "The S3 compatible endpoint. Example: \"https://s3.us-east-1.amazonaws.com\" or \"https://<account>.r2.cloudflarestorage.com\"."
|
||||
},
|
||||
"region": {
|
||||
"type": "string",
|
||||
"description": "The region for the storage provider. Example: \"us-east-1\" or \"auto\" for R2."
|
||||
},
|
||||
"forcePathStyle": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to use path-style bucket addressing."
|
||||
},
|
||||
"requestTimeoutMs": {
|
||||
"type": "number",
|
||||
"description": "Request timeout in milliseconds."
|
||||
},
|
||||
"minPartSize": {
|
||||
"type": "number",
|
||||
"description": "Minimum multipart part size in bytes."
|
||||
},
|
||||
"presign": {
|
||||
"type": "object",
|
||||
"description": "Presigned URL behavior configuration.",
|
||||
"properties": {
|
||||
"expiresInSeconds": {
|
||||
"type": "number",
|
||||
"description": "Expiration time in seconds for presigned URLs."
|
||||
},
|
||||
"signContentTypeForPut": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to sign Content-Type for presigned PUT."
|
||||
}
|
||||
}
|
||||
},
|
||||
"credentials": {
|
||||
"type": "object",
|
||||
"description": "The credentials for the s3 compatible storage provider.",
|
||||
@@ -380,6 +451,9 @@
|
||||
},
|
||||
"secretAccessKey": {
|
||||
"type": "string"
|
||||
},
|
||||
"sessionToken": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -458,8 +532,42 @@
|
||||
},
|
||||
"config": {
|
||||
"type": "object",
|
||||
"description": "The config for the s3 compatible storage provider. directly passed to aws-sdk client.\n@link https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html",
|
||||
"description": "The config for the S3 compatible storage provider.",
|
||||
"properties": {
|
||||
"endpoint": {
|
||||
"type": "string",
|
||||
"description": "The S3 compatible endpoint. Example: \"https://s3.us-east-1.amazonaws.com\" or \"https://<account>.r2.cloudflarestorage.com\"."
|
||||
},
|
||||
"region": {
|
||||
"type": "string",
|
||||
"description": "The region for the storage provider. Example: \"us-east-1\" or \"auto\" for R2."
|
||||
},
|
||||
"forcePathStyle": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to use path-style bucket addressing."
|
||||
},
|
||||
"requestTimeoutMs": {
|
||||
"type": "number",
|
||||
"description": "Request timeout in milliseconds."
|
||||
},
|
||||
"minPartSize": {
|
||||
"type": "number",
|
||||
"description": "Minimum multipart part size in bytes."
|
||||
},
|
||||
"presign": {
|
||||
"type": "object",
|
||||
"description": "Presigned URL behavior configuration.",
|
||||
"properties": {
|
||||
"expiresInSeconds": {
|
||||
"type": "number",
|
||||
"description": "Expiration time in seconds for presigned URLs."
|
||||
},
|
||||
"signContentTypeForPut": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to sign Content-Type for presigned PUT."
|
||||
}
|
||||
}
|
||||
},
|
||||
"credentials": {
|
||||
"type": "object",
|
||||
"description": "The credentials for the s3 compatible storage provider.",
|
||||
@@ -469,6 +577,9 @@
|
||||
},
|
||||
"secretAccessKey": {
|
||||
"type": "string"
|
||||
},
|
||||
"sessionToken": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -490,8 +601,42 @@
|
||||
},
|
||||
"config": {
|
||||
"type": "object",
|
||||
"description": "The config for the s3 compatible storage provider. directly passed to aws-sdk client.\n@link https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html",
|
||||
"description": "The config for the S3 compatible storage provider.",
|
||||
"properties": {
|
||||
"endpoint": {
|
||||
"type": "string",
|
||||
"description": "The S3 compatible endpoint. Example: \"https://s3.us-east-1.amazonaws.com\" or \"https://<account>.r2.cloudflarestorage.com\"."
|
||||
},
|
||||
"region": {
|
||||
"type": "string",
|
||||
"description": "The region for the storage provider. Example: \"us-east-1\" or \"auto\" for R2."
|
||||
},
|
||||
"forcePathStyle": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to use path-style bucket addressing."
|
||||
},
|
||||
"requestTimeoutMs": {
|
||||
"type": "number",
|
||||
"description": "Request timeout in milliseconds."
|
||||
},
|
||||
"minPartSize": {
|
||||
"type": "number",
|
||||
"description": "Minimum multipart part size in bytes."
|
||||
},
|
||||
"presign": {
|
||||
"type": "object",
|
||||
"description": "Presigned URL behavior configuration.",
|
||||
"properties": {
|
||||
"expiresInSeconds": {
|
||||
"type": "number",
|
||||
"description": "Expiration time in seconds for presigned URLs."
|
||||
},
|
||||
"signContentTypeForPut": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to sign Content-Type for presigned PUT."
|
||||
}
|
||||
}
|
||||
},
|
||||
"credentials": {
|
||||
"type": "object",
|
||||
"description": "The credentials for the s3 compatible storage provider.",
|
||||
@@ -501,6 +646,9 @@
|
||||
},
|
||||
"secretAccessKey": {
|
||||
"type": "string"
|
||||
},
|
||||
"sessionToken": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -941,8 +1089,42 @@
|
||||
},
|
||||
"config": {
|
||||
"type": "object",
|
||||
"description": "The config for the s3 compatible storage provider. directly passed to aws-sdk client.\n@link https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html",
|
||||
"description": "The config for the S3 compatible storage provider.",
|
||||
"properties": {
|
||||
"endpoint": {
|
||||
"type": "string",
|
||||
"description": "The S3 compatible endpoint. Example: \"https://s3.us-east-1.amazonaws.com\" or \"https://<account>.r2.cloudflarestorage.com\"."
|
||||
},
|
||||
"region": {
|
||||
"type": "string",
|
||||
"description": "The region for the storage provider. Example: \"us-east-1\" or \"auto\" for R2."
|
||||
},
|
||||
"forcePathStyle": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to use path-style bucket addressing."
|
||||
},
|
||||
"requestTimeoutMs": {
|
||||
"type": "number",
|
||||
"description": "Request timeout in milliseconds."
|
||||
},
|
||||
"minPartSize": {
|
||||
"type": "number",
|
||||
"description": "Minimum multipart part size in bytes."
|
||||
},
|
||||
"presign": {
|
||||
"type": "object",
|
||||
"description": "Presigned URL behavior configuration.",
|
||||
"properties": {
|
||||
"expiresInSeconds": {
|
||||
"type": "number",
|
||||
"description": "Expiration time in seconds for presigned URLs."
|
||||
},
|
||||
"signContentTypeForPut": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to sign Content-Type for presigned PUT."
|
||||
}
|
||||
}
|
||||
},
|
||||
"credentials": {
|
||||
"type": "object",
|
||||
"description": "The credentials for the s3 compatible storage provider.",
|
||||
@@ -952,6 +1134,9 @@
|
||||
},
|
||||
"secretAccessKey": {
|
||||
"type": "string"
|
||||
},
|
||||
"sessionToken": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -973,8 +1158,42 @@
|
||||
},
|
||||
"config": {
|
||||
"type": "object",
|
||||
"description": "The config for the s3 compatible storage provider. directly passed to aws-sdk client.\n@link https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html",
|
||||
"description": "The config for the S3 compatible storage provider.",
|
||||
"properties": {
|
||||
"endpoint": {
|
||||
"type": "string",
|
||||
"description": "The S3 compatible endpoint. Example: \"https://s3.us-east-1.amazonaws.com\" or \"https://<account>.r2.cloudflarestorage.com\"."
|
||||
},
|
||||
"region": {
|
||||
"type": "string",
|
||||
"description": "The region for the storage provider. Example: \"us-east-1\" or \"auto\" for R2."
|
||||
},
|
||||
"forcePathStyle": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to use path-style bucket addressing."
|
||||
},
|
||||
"requestTimeoutMs": {
|
||||
"type": "number",
|
||||
"description": "Request timeout in milliseconds."
|
||||
},
|
||||
"minPartSize": {
|
||||
"type": "number",
|
||||
"description": "Minimum multipart part size in bytes."
|
||||
},
|
||||
"presign": {
|
||||
"type": "object",
|
||||
"description": "Presigned URL behavior configuration.",
|
||||
"properties": {
|
||||
"expiresInSeconds": {
|
||||
"type": "number",
|
||||
"description": "Expiration time in seconds for presigned URLs."
|
||||
},
|
||||
"signContentTypeForPut": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to sign Content-Type for presigned PUT."
|
||||
}
|
||||
}
|
||||
},
|
||||
"credentials": {
|
||||
"type": "object",
|
||||
"description": "The credentials for the s3 compatible storage provider.",
|
||||
@@ -984,6 +1203,9 @@
|
||||
},
|
||||
"secretAccessKey": {
|
||||
"type": "string"
|
||||
},
|
||||
"sessionToken": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -26,6 +26,7 @@
|
||||
"postinstall": "prisma generate"
|
||||
},
|
||||
"dependencies": {
|
||||
"@affine/s3-compat": "workspace:*",
|
||||
"@affine/server-native": "workspace:*",
|
||||
"@ai-sdk/anthropic": "^2.0.54",
|
||||
"@ai-sdk/google": "^2.0.45",
|
||||
@@ -34,8 +35,6 @@
|
||||
"@ai-sdk/openai-compatible": "^1.0.28",
|
||||
"@ai-sdk/perplexity": "^2.0.21",
|
||||
"@apollo/server": "^4.12.2",
|
||||
"@aws-sdk/client-s3": "^3.948.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.948.0",
|
||||
"@fal-ai/serverless-client": "^0.15.0",
|
||||
"@google-cloud/opentelemetry-cloud-trace-exporter": "^3.0.0",
|
||||
"@google-cloud/opentelemetry-resource-util": "^3.0.0",
|
||||
|
||||
@@ -41,9 +41,7 @@ class MockR2Provider extends R2StorageProvider {
|
||||
super(config, bucket);
|
||||
}
|
||||
|
||||
destroy() {
|
||||
this.client.destroy();
|
||||
}
|
||||
destroy() {}
|
||||
|
||||
// @ts-ignore expect override
|
||||
override async proxyPutObject(
|
||||
@@ -66,7 +64,7 @@ class MockR2Provider extends R2StorageProvider {
|
||||
body: any,
|
||||
options: { contentLength?: number } = {}
|
||||
) {
|
||||
const etag = `"etag-${partNumber}"`;
|
||||
const etag = `etag-${partNumber}`;
|
||||
this.partCalls.push({
|
||||
key,
|
||||
uploadId,
|
||||
@@ -322,7 +320,7 @@ e2e('should proxy multipart upload and return etag', async t => {
|
||||
.send(payload);
|
||||
|
||||
t.is(res.status, 200);
|
||||
t.is(res.get('etag'), '"etag-1"');
|
||||
t.is(res.get('etag'), 'etag-1');
|
||||
|
||||
const calls = getProvider().partCalls;
|
||||
t.is(calls.length, 1);
|
||||
@@ -356,7 +354,7 @@ e2e('should resume multipart upload and return uploaded parts', async t => {
|
||||
const init2 = await createBlobUpload(workspace.id, key, totalSize, 'bin');
|
||||
t.is(init2.method, 'MULTIPART');
|
||||
t.is(init2.uploadId, 'upload-id');
|
||||
t.deepEqual(init2.uploadedParts, [{ partNumber: 1, etag: '"etag-1"' }]);
|
||||
t.deepEqual(init2.uploadedParts, [{ partNumber: 1, etag: 'etag-1' }]);
|
||||
t.is(getProvider().createMultipartCalls, 1);
|
||||
});
|
||||
|
||||
|
||||
@@ -141,7 +141,7 @@ test('should override correctly', t => {
|
||||
config: {
|
||||
credentials: {
|
||||
accessKeyId: '1',
|
||||
accessKeySecret: '1',
|
||||
secretAccessKey: '1',
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -169,7 +169,7 @@ test('should override correctly', t => {
|
||||
config: {
|
||||
credentials: {
|
||||
accessKeyId: '1',
|
||||
accessKeySecret: '1',
|
||||
secretAccessKey: '1',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
@@ -0,0 +1,49 @@
|
||||
import { parseListPartsXml } from '@affine/s3-compat';
|
||||
import test from 'ava';
|
||||
|
||||
test('parseListPartsXml handles array parts and pagination', t => {
|
||||
const xml = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ListPartsResult>
|
||||
<Bucket>test</Bucket>
|
||||
<Key>key</Key>
|
||||
<UploadId>upload-id</UploadId>
|
||||
<PartNumberMarker>0</PartNumberMarker>
|
||||
<NextPartNumberMarker>3</NextPartNumberMarker>
|
||||
<IsTruncated>true</IsTruncated>
|
||||
<Part>
|
||||
<PartNumber>1</PartNumber>
|
||||
<ETag>"etag-1"</ETag>
|
||||
</Part>
|
||||
<Part>
|
||||
<PartNumber>2</PartNumber>
|
||||
<ETag>etag-2</ETag>
|
||||
</Part>
|
||||
</ListPartsResult>`;
|
||||
|
||||
const result = parseListPartsXml(xml);
|
||||
t.deepEqual(result.parts, [
|
||||
{ partNumber: 1, etag: 'etag-1' },
|
||||
{ partNumber: 2, etag: 'etag-2' },
|
||||
]);
|
||||
t.true(result.isTruncated);
|
||||
t.is(result.nextPartNumberMarker, '3');
|
||||
});
|
||||
|
||||
test('parseListPartsXml handles single part', t => {
|
||||
const xml = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ListPartsResult>
|
||||
<Bucket>test</Bucket>
|
||||
<Key>key</Key>
|
||||
<UploadId>upload-id</UploadId>
|
||||
<IsTruncated>false</IsTruncated>
|
||||
<Part>
|
||||
<PartNumber>5</PartNumber>
|
||||
<ETag>"etag-5"</ETag>
|
||||
</Part>
|
||||
</ListPartsResult>`;
|
||||
|
||||
const result = parseListPartsXml(xml);
|
||||
t.deepEqual(result.parts, [{ partNumber: 5, etag: 'etag-5' }]);
|
||||
t.false(result.isTruncated);
|
||||
t.is(result.nextPartNumberMarker, undefined);
|
||||
});
|
||||
@@ -4,7 +4,8 @@ import { S3StorageProvider } from '../providers/s3';
|
||||
import { SIGNED_URL_EXPIRED } from '../providers/utils';
|
||||
|
||||
const config = {
|
||||
region: 'auto',
|
||||
region: 'us-east-1',
|
||||
endpoint: 'https://s3.us-east-1.amazonaws.com',
|
||||
credentials: {
|
||||
accessKeyId: 'test',
|
||||
secretAccessKey: 'test',
|
||||
@@ -24,6 +25,8 @@ test('presignPut should return url and headers', async t => {
|
||||
t.truthy(result);
|
||||
t.true(result!.url.length > 0);
|
||||
t.true(result!.url.includes('X-Amz-Algorithm=AWS4-HMAC-SHA256'));
|
||||
t.true(result!.url.includes('X-Amz-SignedHeaders='));
|
||||
t.true(result!.url.includes('content-type'));
|
||||
t.deepEqual(result!.headers, { 'Content-Type': 'text/plain' });
|
||||
const now = Date.now();
|
||||
t.true(result!.expiresAt.getTime() >= now + SIGNED_URL_EXPIRED * 1000 - 2000);
|
||||
@@ -41,12 +44,15 @@ test('presignUploadPart should return url', async t => {
|
||||
|
||||
test('createMultipartUpload should return uploadId', async t => {
|
||||
const provider = createProvider();
|
||||
let receivedCommand: any;
|
||||
const sendStub = async (command: any) => {
|
||||
receivedCommand = command;
|
||||
return { UploadId: 'upload-1' };
|
||||
let receivedKey: string | undefined;
|
||||
let receivedMeta: any;
|
||||
(provider as any).client = {
|
||||
createMultipartUpload: async (key: string, meta: any) => {
|
||||
receivedKey = key;
|
||||
receivedMeta = meta;
|
||||
return { uploadId: 'upload-1' };
|
||||
},
|
||||
};
|
||||
(provider as any).client = { send: sendStub };
|
||||
|
||||
const now = Date.now();
|
||||
const result = await provider.createMultipartUpload('key', {
|
||||
@@ -56,25 +62,29 @@ test('createMultipartUpload should return uploadId', async t => {
|
||||
t.is(result?.uploadId, 'upload-1');
|
||||
t.true(result!.expiresAt.getTime() >= now + SIGNED_URL_EXPIRED * 1000 - 2000);
|
||||
t.true(result!.expiresAt.getTime() <= now + SIGNED_URL_EXPIRED * 1000 + 2000);
|
||||
t.is(receivedCommand.input.Key, 'key');
|
||||
t.is(receivedCommand.input.ContentType, 'text/plain');
|
||||
t.is(receivedKey, 'key');
|
||||
t.is(receivedMeta.contentType, 'text/plain');
|
||||
});
|
||||
|
||||
test('completeMultipartUpload should order parts', async t => {
|
||||
const provider = createProvider();
|
||||
let called = false;
|
||||
const sendStub = async (command: any) => {
|
||||
called = true;
|
||||
t.deepEqual(command.input.MultipartUpload.Parts, [
|
||||
{ ETag: 'a', PartNumber: 1 },
|
||||
{ ETag: 'b', PartNumber: 2 },
|
||||
]);
|
||||
let receivedParts: any;
|
||||
(provider as any).client = {
|
||||
completeMultipartUpload: async (
|
||||
_key: string,
|
||||
_uploadId: string,
|
||||
parts: any
|
||||
) => {
|
||||
receivedParts = parts;
|
||||
},
|
||||
};
|
||||
(provider as any).client = { send: sendStub };
|
||||
|
||||
await provider.completeMultipartUpload('key', 'upload-1', [
|
||||
{ partNumber: 2, etag: 'b' },
|
||||
{ partNumber: 1, etag: 'a' },
|
||||
]);
|
||||
t.true(called);
|
||||
t.deepEqual(receivedParts, [
|
||||
{ partNumber: 1, etag: 'a' },
|
||||
{ partNumber: 2, etag: 'b' },
|
||||
]);
|
||||
});
|
||||
|
||||
@@ -33,9 +33,44 @@ export type StorageProviderConfig = { bucket: string } & (
|
||||
|
||||
const S3ConfigSchema: JSONSchema = {
|
||||
type: 'object',
|
||||
description:
|
||||
'The config for the s3 compatible storage provider. directly passed to aws-sdk client.\n@link https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html',
|
||||
description: 'The config for the S3 compatible storage provider.',
|
||||
properties: {
|
||||
endpoint: {
|
||||
type: 'string',
|
||||
description:
|
||||
'The S3 compatible endpoint. Example: "https://s3.us-east-1.amazonaws.com" or "https://<account>.r2.cloudflarestorage.com".',
|
||||
},
|
||||
region: {
|
||||
type: 'string',
|
||||
description:
|
||||
'The region for the storage provider. Example: "us-east-1" or "auto" for R2.',
|
||||
},
|
||||
forcePathStyle: {
|
||||
type: 'boolean',
|
||||
description: 'Whether to use path-style bucket addressing.',
|
||||
},
|
||||
requestTimeoutMs: {
|
||||
type: 'number',
|
||||
description: 'Request timeout in milliseconds.',
|
||||
},
|
||||
minPartSize: {
|
||||
type: 'number',
|
||||
description: 'Minimum multipart part size in bytes.',
|
||||
},
|
||||
presign: {
|
||||
type: 'object',
|
||||
description: 'Presigned URL behavior configuration.',
|
||||
properties: {
|
||||
expiresInSeconds: {
|
||||
type: 'number',
|
||||
description: 'Expiration time in seconds for presigned URLs.',
|
||||
},
|
||||
signContentTypeForPut: {
|
||||
type: 'boolean',
|
||||
description: 'Whether to sign Content-Type for presigned PUT.',
|
||||
},
|
||||
},
|
||||
},
|
||||
credentials: {
|
||||
type: 'object',
|
||||
description: 'The credentials for the s3 compatible storage provider.',
|
||||
@@ -46,6 +81,9 @@ const S3ConfigSchema: JSONSchema = {
|
||||
secretAccessKey: {
|
||||
type: 'string',
|
||||
},
|
||||
sessionToken: {
|
||||
type: 'string',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import assert from 'node:assert';
|
||||
import { Readable } from 'node:stream';
|
||||
|
||||
import { PutObjectCommand, UploadPartCommand } from '@aws-sdk/client-s3';
|
||||
import { Logger } from '@nestjs/common';
|
||||
|
||||
import {
|
||||
@@ -39,9 +38,6 @@ export class R2StorageProvider extends S3StorageProvider {
|
||||
...config,
|
||||
forcePathStyle: true,
|
||||
endpoint: `https://${config.accountId}.r2.cloudflarestorage.com`,
|
||||
// see https://github.com/aws/aws-sdk-js-v3/issues/6810
|
||||
requestChecksumCalculation: 'WHEN_REQUIRED',
|
||||
responseChecksumValidation: 'WHEN_REQUIRED',
|
||||
},
|
||||
bucket
|
||||
);
|
||||
@@ -179,15 +175,10 @@ export class R2StorageProvider extends S3StorageProvider {
|
||||
body: Readable | Buffer | Uint8Array | string,
|
||||
options: { contentType?: string; contentLength?: number } = {}
|
||||
) {
|
||||
return this.client.send(
|
||||
new PutObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
Body: body,
|
||||
ContentType: options.contentType,
|
||||
ContentLength: options.contentLength,
|
||||
})
|
||||
);
|
||||
return this.client.putObject(key, body as any, {
|
||||
contentType: options.contentType,
|
||||
contentLength: options.contentLength,
|
||||
});
|
||||
}
|
||||
|
||||
async proxyUploadPart(
|
||||
@@ -197,18 +188,15 @@ export class R2StorageProvider extends S3StorageProvider {
|
||||
body: Readable | Buffer | Uint8Array | string,
|
||||
options: { contentLength?: number } = {}
|
||||
) {
|
||||
const result = await this.client.send(
|
||||
new UploadPartCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
UploadId: uploadId,
|
||||
PartNumber: partNumber,
|
||||
Body: body,
|
||||
ContentLength: options.contentLength,
|
||||
})
|
||||
const result = await this.client.uploadPart(
|
||||
key,
|
||||
uploadId,
|
||||
partNumber,
|
||||
body as any,
|
||||
{ contentLength: options.contentLength }
|
||||
);
|
||||
|
||||
return result.ETag;
|
||||
return result.etag;
|
||||
}
|
||||
|
||||
override async get(
|
||||
|
||||
@@ -1,24 +1,12 @@
|
||||
/* oxlint-disable @typescript-eslint/no-non-null-assertion */
|
||||
import { Readable } from 'node:stream';
|
||||
|
||||
import {
|
||||
AbortMultipartUploadCommand,
|
||||
CompleteMultipartUploadCommand,
|
||||
CreateMultipartUploadCommand,
|
||||
DeleteObjectCommand,
|
||||
GetObjectCommand,
|
||||
HeadObjectCommand,
|
||||
ListObjectsV2Command,
|
||||
ListPartsCommand,
|
||||
NoSuchKey,
|
||||
NoSuchUpload,
|
||||
NotFound,
|
||||
PutObjectCommand,
|
||||
S3Client,
|
||||
S3ClientConfig,
|
||||
UploadPartCommand,
|
||||
} from '@aws-sdk/client-s3';
|
||||
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
|
||||
import type {
|
||||
S3CompatClient,
|
||||
S3CompatConfig,
|
||||
S3CompatCredentials,
|
||||
} from '@affine/s3-compat';
|
||||
import { createS3CompatClient } from '@affine/s3-compat';
|
||||
import { Logger } from '@nestjs/common';
|
||||
|
||||
import {
|
||||
@@ -33,30 +21,55 @@ import {
|
||||
} from './provider';
|
||||
import { autoMetadata, SIGNED_URL_EXPIRED, toBuffer } from './utils';
|
||||
|
||||
export interface S3StorageConfig extends S3ClientConfig {
|
||||
export interface S3StorageConfig {
|
||||
endpoint?: string;
|
||||
region: string;
|
||||
credentials: S3CompatCredentials;
|
||||
forcePathStyle?: boolean;
|
||||
requestTimeoutMs?: number;
|
||||
minPartSize?: number;
|
||||
presign?: {
|
||||
expiresInSeconds?: number;
|
||||
signContentTypeForPut?: boolean;
|
||||
};
|
||||
usePresignedURL?: {
|
||||
enabled: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
function resolveEndpoint(config: S3StorageConfig) {
|
||||
if (config.endpoint) {
|
||||
return config.endpoint;
|
||||
}
|
||||
if (config.region === 'us-east-1') {
|
||||
return 'https://s3.amazonaws.com';
|
||||
}
|
||||
return `https://s3.${config.region}.amazonaws.com`;
|
||||
}
|
||||
|
||||
export class S3StorageProvider implements StorageProvider {
|
||||
protected logger: Logger;
|
||||
protected client: S3Client;
|
||||
protected client: S3CompatClient;
|
||||
private readonly usePresignedURL: boolean;
|
||||
|
||||
constructor(
|
||||
config: S3StorageConfig,
|
||||
public readonly bucket: string
|
||||
) {
|
||||
const { usePresignedURL, ...clientConfig } = config;
|
||||
this.client = new S3Client({
|
||||
region: 'auto',
|
||||
// s3 client uses keep-alive by default to accelerate requests, and max requests queue is 50.
|
||||
// If some of them are long holding or dead without response, the whole queue will block.
|
||||
// By default no timeout is set for requests or connections, so we set them here.
|
||||
requestHandler: { requestTimeout: 60_000, connectionTimeout: 10_000 },
|
||||
const { usePresignedURL, presign, credentials, ...clientConfig } = config;
|
||||
|
||||
const compatConfig: S3CompatConfig = {
|
||||
...clientConfig,
|
||||
});
|
||||
endpoint: resolveEndpoint(config),
|
||||
bucket,
|
||||
requestTimeoutMs: clientConfig.requestTimeoutMs ?? 60_000,
|
||||
presign: {
|
||||
expiresInSeconds: presign?.expiresInSeconds ?? SIGNED_URL_EXPIRED,
|
||||
signContentTypeForPut: presign?.signContentTypeForPut ?? true,
|
||||
},
|
||||
};
|
||||
|
||||
this.client = createS3CompatClient(compatConfig, credentials);
|
||||
this.usePresignedURL = usePresignedURL?.enabled ?? false;
|
||||
this.logger = new Logger(`${S3StorageProvider.name}:${bucket}`);
|
||||
}
|
||||
@@ -71,19 +84,10 @@ export class S3StorageProvider implements StorageProvider {
|
||||
metadata = autoMetadata(blob, metadata);
|
||||
|
||||
try {
|
||||
await this.client.send(
|
||||
new PutObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
Body: blob,
|
||||
|
||||
// metadata
|
||||
ContentType: metadata.contentType,
|
||||
ContentLength: metadata.contentLength,
|
||||
// TODO(@forehalo): Cloudflare doesn't support CRC32, use md5 instead later.
|
||||
// ChecksumCRC32: metadata.checksumCRC32,
|
||||
})
|
||||
);
|
||||
await this.client.putObject(key, blob, {
|
||||
contentType: metadata.contentType,
|
||||
contentLength: metadata.contentLength,
|
||||
});
|
||||
|
||||
this.logger.verbose(`Object \`${key}\` put`);
|
||||
} catch (e) {
|
||||
@@ -104,20 +108,12 @@ export class S3StorageProvider implements StorageProvider {
|
||||
): Promise<PresignedUpload | undefined> {
|
||||
try {
|
||||
const contentType = metadata.contentType ?? 'application/octet-stream';
|
||||
const url = await getSignedUrl(
|
||||
this.client,
|
||||
new PutObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
ContentType: contentType,
|
||||
}),
|
||||
{ expiresIn: SIGNED_URL_EXPIRED }
|
||||
);
|
||||
const result = await this.client.presignPutObject(key, { contentType });
|
||||
|
||||
return {
|
||||
url,
|
||||
headers: { 'Content-Type': contentType },
|
||||
expiresAt: new Date(Date.now() + SIGNED_URL_EXPIRED * 1000),
|
||||
url: result.url,
|
||||
headers: result.headers,
|
||||
expiresAt: result.expiresAt,
|
||||
};
|
||||
} catch (e) {
|
||||
this.logger.error(
|
||||
@@ -137,20 +133,16 @@ export class S3StorageProvider implements StorageProvider {
|
||||
): Promise<MultipartUploadInit | undefined> {
|
||||
try {
|
||||
const contentType = metadata.contentType ?? 'application/octet-stream';
|
||||
const response = await this.client.send(
|
||||
new CreateMultipartUploadCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
ContentType: contentType,
|
||||
})
|
||||
);
|
||||
const response = await this.client.createMultipartUpload(key, {
|
||||
contentType,
|
||||
});
|
||||
|
||||
if (!response.UploadId) {
|
||||
if (!response.uploadId) {
|
||||
return;
|
||||
}
|
||||
|
||||
return {
|
||||
uploadId: response.UploadId,
|
||||
uploadId: response.uploadId,
|
||||
expiresAt: new Date(Date.now() + SIGNED_URL_EXPIRED * 1000),
|
||||
};
|
||||
} catch (e) {
|
||||
@@ -171,20 +163,15 @@ export class S3StorageProvider implements StorageProvider {
|
||||
partNumber: number
|
||||
): Promise<PresignedUpload | undefined> {
|
||||
try {
|
||||
const url = await getSignedUrl(
|
||||
this.client,
|
||||
new UploadPartCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
UploadId: uploadId,
|
||||
PartNumber: partNumber,
|
||||
}),
|
||||
{ expiresIn: SIGNED_URL_EXPIRED }
|
||||
const result = await this.client.presignUploadPart(
|
||||
key,
|
||||
uploadId,
|
||||
partNumber
|
||||
);
|
||||
|
||||
return {
|
||||
url,
|
||||
expiresAt: new Date(Date.now() + SIGNED_URL_EXPIRED * 1000),
|
||||
url: result.url,
|
||||
expiresAt: result.expiresAt,
|
||||
};
|
||||
} catch (e) {
|
||||
this.logger.error(
|
||||
@@ -198,47 +185,9 @@ export class S3StorageProvider implements StorageProvider {
|
||||
key: string,
|
||||
uploadId: string
|
||||
): Promise<MultipartUploadPart[] | undefined> {
|
||||
const parts: MultipartUploadPart[] = [];
|
||||
let partNumberMarker: string | undefined;
|
||||
|
||||
try {
|
||||
// ListParts is paginated by part number marker
|
||||
// https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListParts.html
|
||||
// R2 follows S3 semantics here.
|
||||
while (true) {
|
||||
const response = await this.client.send(
|
||||
new ListPartsCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
UploadId: uploadId,
|
||||
PartNumberMarker: partNumberMarker,
|
||||
})
|
||||
);
|
||||
|
||||
for (const part of response.Parts ?? []) {
|
||||
if (!part.PartNumber || !part.ETag) {
|
||||
continue;
|
||||
}
|
||||
parts.push({ partNumber: part.PartNumber, etag: part.ETag });
|
||||
}
|
||||
|
||||
if (!response.IsTruncated) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (response.NextPartNumberMarker === undefined) {
|
||||
break;
|
||||
}
|
||||
|
||||
partNumberMarker = response.NextPartNumberMarker;
|
||||
}
|
||||
|
||||
return parts;
|
||||
return await this.client.listParts(key, uploadId);
|
||||
} catch (e) {
|
||||
// the upload may have been aborted/expired by provider lifecycle rules
|
||||
if (e instanceof NoSuchUpload || e instanceof NotFound) {
|
||||
return undefined;
|
||||
}
|
||||
this.logger.error(`Failed to list multipart upload parts for \`${key}\``);
|
||||
throw e;
|
||||
}
|
||||
@@ -254,19 +203,7 @@ export class S3StorageProvider implements StorageProvider {
|
||||
(left, right) => left.partNumber - right.partNumber
|
||||
);
|
||||
|
||||
await this.client.send(
|
||||
new CompleteMultipartUploadCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
UploadId: uploadId,
|
||||
MultipartUpload: {
|
||||
Parts: orderedParts.map(part => ({
|
||||
ETag: part.etag,
|
||||
PartNumber: part.partNumber,
|
||||
})),
|
||||
},
|
||||
})
|
||||
);
|
||||
await this.client.completeMultipartUpload(key, uploadId, orderedParts);
|
||||
} catch (e) {
|
||||
this.logger.error(`Failed to complete multipart upload for \`${key}\``);
|
||||
throw e;
|
||||
@@ -275,13 +212,7 @@ export class S3StorageProvider implements StorageProvider {
|
||||
|
||||
async abortMultipartUpload(key: string, uploadId: string): Promise<void> {
|
||||
try {
|
||||
await this.client.send(
|
||||
new AbortMultipartUploadCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
UploadId: uploadId,
|
||||
})
|
||||
);
|
||||
await this.client.abortMultipartUpload(key, uploadId);
|
||||
} catch (e) {
|
||||
this.logger.error(`Failed to abort multipart upload for \`${key}\``);
|
||||
throw e;
|
||||
@@ -290,25 +221,19 @@ export class S3StorageProvider implements StorageProvider {
|
||||
|
||||
async head(key: string) {
|
||||
try {
|
||||
const obj = await this.client.send(
|
||||
new HeadObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
contentType: obj.ContentType!,
|
||||
contentLength: obj.ContentLength!,
|
||||
lastModified: obj.LastModified!,
|
||||
checksumCRC32: obj.ChecksumCRC32,
|
||||
};
|
||||
} catch (e) {
|
||||
// 404
|
||||
if (e instanceof NoSuchKey || e instanceof NotFound) {
|
||||
const obj = await this.client.headObject(key);
|
||||
if (!obj) {
|
||||
this.logger.verbose(`Object \`${key}\` not found`);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return {
|
||||
contentType: obj.contentType ?? 'application/octet-stream',
|
||||
contentLength: obj.contentLength ?? 0,
|
||||
lastModified: obj.lastModified ?? new Date(0),
|
||||
checksumCRC32: obj.checksumCRC32,
|
||||
};
|
||||
} catch (e) {
|
||||
this.logger.error(`Failed to head object \`${key}\``);
|
||||
throw e;
|
||||
}
|
||||
@@ -323,25 +248,13 @@ export class S3StorageProvider implements StorageProvider {
|
||||
redirectUrl?: string;
|
||||
}> {
|
||||
try {
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
});
|
||||
|
||||
if (this.usePresignedURL && signedUrl) {
|
||||
const metadata = await this.head(key);
|
||||
if (metadata) {
|
||||
const url = await getSignedUrl(
|
||||
this.client,
|
||||
new GetObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
}),
|
||||
{ expiresIn: SIGNED_URL_EXPIRED }
|
||||
);
|
||||
const result = await this.client.presignGetObject(key);
|
||||
|
||||
return {
|
||||
redirectUrl: url,
|
||||
redirectUrl: result.url,
|
||||
metadata,
|
||||
};
|
||||
}
|
||||
@@ -350,68 +263,41 @@ export class S3StorageProvider implements StorageProvider {
|
||||
return {};
|
||||
}
|
||||
|
||||
const obj = await this.client.send(command);
|
||||
|
||||
if (!obj.Body) {
|
||||
const obj = await this.client.getObjectResponse(key);
|
||||
if (!obj || !obj.body) {
|
||||
this.logger.verbose(`Object \`${key}\` not found`);
|
||||
return {};
|
||||
}
|
||||
|
||||
const contentType = obj.headers.get('content-type') ?? undefined;
|
||||
const contentLengthHeader = obj.headers.get('content-length');
|
||||
const contentLength = contentLengthHeader
|
||||
? Number(contentLengthHeader)
|
||||
: undefined;
|
||||
const lastModifiedHeader = obj.headers.get('last-modified');
|
||||
const lastModified = lastModifiedHeader
|
||||
? new Date(lastModifiedHeader)
|
||||
: undefined;
|
||||
|
||||
this.logger.verbose(`Read object \`${key}\``);
|
||||
return {
|
||||
// @ts-expect-errors ignore browser response type `Blob`
|
||||
body: obj.Body,
|
||||
body: Readable.fromWeb(obj.body as any),
|
||||
metadata: {
|
||||
// always set when putting object
|
||||
contentType: obj.ContentType ?? 'application/octet-stream',
|
||||
contentLength: obj.ContentLength!,
|
||||
lastModified: obj.LastModified!,
|
||||
checksumCRC32: obj.ChecksumCRC32,
|
||||
contentType: contentType ?? 'application/octet-stream',
|
||||
contentLength: contentLength ?? 0,
|
||||
lastModified: lastModified ?? new Date(0),
|
||||
checksumCRC32: obj.headers.get('x-amz-checksum-crc32') ?? undefined,
|
||||
},
|
||||
};
|
||||
} catch (e) {
|
||||
// 404
|
||||
if (e instanceof NoSuchKey) {
|
||||
this.logger.verbose(`Object \`${key}\` not found`);
|
||||
return {};
|
||||
}
|
||||
this.logger.error(`Failed to read object \`${key}\``);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
async list(prefix?: string): Promise<ListObjectsMetadata[]> {
|
||||
// continuationToken should be `string | undefined`,
|
||||
// but TypeScript will fail on type infer in the code below.
|
||||
// Seems to be a bug in TypeScript
|
||||
let continuationToken: any = undefined;
|
||||
let hasMore = true;
|
||||
let result: ListObjectsMetadata[] = [];
|
||||
|
||||
try {
|
||||
while (hasMore) {
|
||||
const listResult = await this.client.send(
|
||||
new ListObjectsV2Command({
|
||||
Bucket: this.bucket,
|
||||
Prefix: prefix,
|
||||
ContinuationToken: continuationToken,
|
||||
})
|
||||
);
|
||||
|
||||
if (listResult.Contents?.length) {
|
||||
result = result.concat(
|
||||
listResult.Contents.map(r => ({
|
||||
key: r.Key!,
|
||||
lastModified: r.LastModified!,
|
||||
contentLength: r.Size!,
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
// has more items not listed
|
||||
hasMore = !!listResult.IsTruncated;
|
||||
continuationToken = listResult.NextContinuationToken;
|
||||
}
|
||||
const result = await this.client.listObjectsV2(prefix);
|
||||
|
||||
this.logger.verbose(
|
||||
`List ${result.length} objects with prefix \`${prefix}\``
|
||||
@@ -425,12 +311,7 @@ export class S3StorageProvider implements StorageProvider {
|
||||
|
||||
async delete(key: string): Promise<void> {
|
||||
try {
|
||||
await this.client.send(
|
||||
new DeleteObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
})
|
||||
);
|
||||
await this.client.deleteObject(key);
|
||||
|
||||
this.logger.verbose(`Deleted object \`${key}\``);
|
||||
} catch (e) {
|
||||
|
||||
@@ -23,6 +23,7 @@ import {
|
||||
SpaceAccessDenied,
|
||||
} from '../../base';
|
||||
import { Models } from '../../models';
|
||||
import { mergeUpdatesInApplyWay } from '../../native';
|
||||
import { CurrentUser } from '../auth';
|
||||
import {
|
||||
DocReader,
|
||||
@@ -48,8 +49,9 @@ type EventResponse<Data = any> = Data extends never
|
||||
data: Data;
|
||||
};
|
||||
|
||||
// 019 only receives space:broadcast-doc-updates and send space:push-doc-updates
|
||||
// 020 only receives space:broadcast-doc-update and send space:push-doc-update
|
||||
// sync-019: legacy 0.19.x clients (broadcast-doc-updates/push-doc-updates).
|
||||
// Remove after 2026-06-30 once metrics show 0 usage for 30 days.
|
||||
// 020+: receives space:broadcast-doc-updates (batch) and sends space:push-doc-update.
|
||||
type RoomType = 'sync' | `${string}:awareness` | 'sync-019';
|
||||
|
||||
function Room(
|
||||
@@ -105,6 +107,16 @@ interface PushDocUpdateMessage {
|
||||
update: string;
|
||||
}
|
||||
|
||||
interface BroadcastDocUpdatesMessage {
|
||||
spaceType: SpaceType;
|
||||
spaceId: string;
|
||||
docId: string;
|
||||
updates: string[];
|
||||
timestamp: number;
|
||||
editor?: string;
|
||||
compressed?: boolean;
|
||||
}
|
||||
|
||||
interface LoadDocMessage {
|
||||
spaceType: SpaceType;
|
||||
spaceId: string;
|
||||
@@ -157,6 +169,62 @@ export class SpaceSyncGateway
|
||||
private readonly models: Models
|
||||
) {}
|
||||
|
||||
private encodeUpdates(updates: Uint8Array[]) {
|
||||
return updates.map(update => Buffer.from(update).toString('base64'));
|
||||
}
|
||||
|
||||
private buildBroadcastPayload(
|
||||
spaceType: SpaceType,
|
||||
spaceId: string,
|
||||
docId: string,
|
||||
updates: Uint8Array[],
|
||||
timestamp: number,
|
||||
editor?: string
|
||||
): BroadcastDocUpdatesMessage {
|
||||
const encodedUpdates = this.encodeUpdates(updates);
|
||||
if (updates.length <= 1) {
|
||||
return {
|
||||
spaceType,
|
||||
spaceId,
|
||||
docId,
|
||||
updates: encodedUpdates,
|
||||
timestamp,
|
||||
editor,
|
||||
compressed: false,
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const merged = mergeUpdatesInApplyWay(
|
||||
updates.map(update => Buffer.from(update))
|
||||
);
|
||||
metrics.socketio.counter('doc_updates_compressed').add(1);
|
||||
return {
|
||||
spaceType,
|
||||
spaceId,
|
||||
docId,
|
||||
updates: [Buffer.from(merged).toString('base64')],
|
||||
timestamp,
|
||||
editor,
|
||||
compressed: true,
|
||||
};
|
||||
} catch (error) {
|
||||
this.logger.warn(
|
||||
'Failed to merge updates for broadcast, falling back to batch',
|
||||
error as Error
|
||||
);
|
||||
return {
|
||||
spaceType,
|
||||
spaceId,
|
||||
docId,
|
||||
updates: encodedUpdates,
|
||||
timestamp,
|
||||
editor,
|
||||
compressed: false,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
handleConnection() {
|
||||
this.connectionCount++;
|
||||
this.logger.debug(`New connection, total: ${this.connectionCount}`);
|
||||
@@ -184,9 +252,7 @@ export class SpaceSyncGateway
|
||||
return;
|
||||
}
|
||||
|
||||
const encodedUpdates = updates.map(update =>
|
||||
Buffer.from(update).toString('base64')
|
||||
);
|
||||
const encodedUpdates = this.encodeUpdates(updates);
|
||||
|
||||
this.server
|
||||
.to(Room(spaceId, 'sync-019'))
|
||||
@@ -196,18 +262,26 @@ export class SpaceSyncGateway
|
||||
docId,
|
||||
updates: encodedUpdates,
|
||||
timestamp,
|
||||
});
|
||||
|
||||
const room = `${spaceType}:${Room(spaceId)}`;
|
||||
encodedUpdates.forEach(update => {
|
||||
this.server.to(room).emit('space:broadcast-doc-update', {
|
||||
spaceType,
|
||||
spaceId,
|
||||
docId,
|
||||
update,
|
||||
timestamp,
|
||||
editor,
|
||||
});
|
||||
metrics.socketio
|
||||
.counter('sync_019_broadcast')
|
||||
.add(encodedUpdates.length, { event: 'doc_updates_pushed' });
|
||||
|
||||
const room = `${spaceType}:${Room(spaceId)}`;
|
||||
const payload = this.buildBroadcastPayload(
|
||||
spaceType as SpaceType,
|
||||
spaceId,
|
||||
docId,
|
||||
updates,
|
||||
timestamp,
|
||||
editor
|
||||
);
|
||||
this.server.to(room).emit('space:broadcast-doc-updates', payload);
|
||||
metrics.socketio
|
||||
.counter('doc_updates_broadcast')
|
||||
.add(payload.updates.length, {
|
||||
mode: payload.compressed ? 'compressed' : 'batch',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -330,18 +404,34 @@ export class SpaceSyncGateway
|
||||
user.id
|
||||
);
|
||||
|
||||
metrics.socketio
|
||||
.counter('sync_019_event')
|
||||
.add(1, { event: 'push-doc-updates' });
|
||||
|
||||
// broadcast to 0.19.x clients
|
||||
client
|
||||
.to(Room(spaceId, 'sync-019'))
|
||||
.emit('space:broadcast-doc-updates', { ...message, timestamp });
|
||||
client.to(Room(spaceId, 'sync-019')).emit('space:broadcast-doc-updates', {
|
||||
...message,
|
||||
timestamp,
|
||||
editor: user.id,
|
||||
});
|
||||
|
||||
// broadcast to new clients
|
||||
updates.forEach(update => {
|
||||
client.to(adapter.room(spaceId)).emit('space:broadcast-doc-update', {
|
||||
...message,
|
||||
update,
|
||||
const decodedUpdates = updates.map(update => Buffer.from(update, 'base64'));
|
||||
const payload = this.buildBroadcastPayload(
|
||||
spaceType,
|
||||
spaceId,
|
||||
docId,
|
||||
decodedUpdates,
|
||||
timestamp,
|
||||
});
|
||||
user.id
|
||||
);
|
||||
client
|
||||
.to(adapter.room(spaceId))
|
||||
.emit('space:broadcast-doc-updates', payload);
|
||||
metrics.socketio
|
||||
.counter('doc_updates_broadcast')
|
||||
.add(payload.updates.length, {
|
||||
mode: payload.compressed ? 'compressed' : 'batch',
|
||||
});
|
||||
|
||||
return {
|
||||
@@ -378,15 +468,24 @@ export class SpaceSyncGateway
|
||||
docId,
|
||||
updates: [update],
|
||||
timestamp,
|
||||
editor: user.id,
|
||||
});
|
||||
|
||||
client.to(adapter.room(spaceId)).emit('space:broadcast-doc-update', {
|
||||
const payload = this.buildBroadcastPayload(
|
||||
spaceType,
|
||||
spaceId,
|
||||
docId,
|
||||
update,
|
||||
[Buffer.from(update, 'base64')],
|
||||
timestamp,
|
||||
editor: user.id,
|
||||
user.id
|
||||
);
|
||||
client
|
||||
.to(adapter.room(spaceId))
|
||||
.emit('space:broadcast-doc-updates', payload);
|
||||
metrics.socketio
|
||||
.counter('doc_updates_broadcast')
|
||||
.add(payload.updates.length, {
|
||||
mode: payload.compressed ? 'compressed' : 'batch',
|
||||
});
|
||||
|
||||
return {
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
},
|
||||
"include": ["./src"],
|
||||
"references": [
|
||||
{ "path": "../../common/s3-compat" },
|
||||
{ "path": "../native" },
|
||||
{ "path": "../../../tools/cli" },
|
||||
{ "path": "../../../tools/utils" },
|
||||
|
||||
23
packages/common/nbstore/src/__tests__/base64.bench.ts
Normal file
23
packages/common/nbstore/src/__tests__/base64.bench.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { bench, describe } from 'vitest';
|
||||
|
||||
import { base64ToUint8Array, uint8ArrayToBase64 } from '../impls/cloud/socket';
|
||||
|
||||
const data = new Uint8Array(1024 * 256);
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
data[i] = i % 251;
|
||||
}
|
||||
let encoded = '';
|
||||
|
||||
await uint8ArrayToBase64(data).then(result => {
|
||||
encoded = result;
|
||||
});
|
||||
|
||||
describe('base64 helpers', () => {
|
||||
bench('encode Uint8Array to base64', async () => {
|
||||
await uint8ArrayToBase64(data);
|
||||
});
|
||||
|
||||
bench('decode base64 to Uint8Array', () => {
|
||||
base64ToUint8Array(encoded);
|
||||
});
|
||||
});
|
||||
27
packages/common/nbstore/src/__tests__/base64.spec.ts
Normal file
27
packages/common/nbstore/src/__tests__/base64.spec.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { describe, expect, test } from 'vitest';
|
||||
|
||||
import { base64ToUint8Array, uint8ArrayToBase64 } from '../impls/cloud/socket';
|
||||
|
||||
function makeSample(size: number) {
|
||||
const data = new Uint8Array(size);
|
||||
for (let i = 0; i < size; i++) {
|
||||
data[i] = i % 251;
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
describe('base64 helpers', () => {
|
||||
test('roundtrip preserves data', async () => {
|
||||
const input = makeSample(1024);
|
||||
const encoded = await uint8ArrayToBase64(input);
|
||||
const decoded = base64ToUint8Array(encoded);
|
||||
expect(decoded).toEqual(input);
|
||||
});
|
||||
|
||||
test('handles large payloads', async () => {
|
||||
const input = makeSample(256 * 1024);
|
||||
const encoded = await uint8ArrayToBase64(input);
|
||||
const decoded = base64ToUint8Array(encoded);
|
||||
expect(decoded).toEqual(input);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,41 @@
|
||||
import { describe, expect, test } from 'vitest';
|
||||
|
||||
import { CloudDocStorage } from '../impls/cloud/doc';
|
||||
|
||||
const base64UpdateA = 'AQID';
|
||||
const base64UpdateB = 'BAUG';
|
||||
|
||||
describe('CloudDocStorage broadcast updates', () => {
|
||||
test('emits updates from batch payload', () => {
|
||||
const storage = new CloudDocStorage({
|
||||
id: 'space-1',
|
||||
serverBaseUrl: 'http://localhost',
|
||||
isSelfHosted: true,
|
||||
type: 'workspace',
|
||||
readonlyMode: true,
|
||||
});
|
||||
|
||||
(storage as any).connection.idConverter = {
|
||||
oldIdToNewId: (id: string) => id,
|
||||
newIdToOldId: (id: string) => id,
|
||||
};
|
||||
|
||||
const received: Uint8Array[] = [];
|
||||
storage.subscribeDocUpdate(update => {
|
||||
received.push(update.bin);
|
||||
});
|
||||
|
||||
storage.onServerUpdates({
|
||||
spaceType: 'workspace',
|
||||
spaceId: 'space-1',
|
||||
docId: 'doc-1',
|
||||
updates: [base64UpdateA, base64UpdateB],
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
|
||||
expect(received).toEqual([
|
||||
new Uint8Array([1, 2, 3]),
|
||||
new Uint8Array([4, 5, 6]),
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -38,21 +38,42 @@ export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
|
||||
|
||||
onServerUpdate: ServerEventsMap['space:broadcast-doc-update'] = message => {
|
||||
if (
|
||||
this.spaceType === message.spaceType &&
|
||||
this.spaceId === message.spaceId
|
||||
this.spaceType !== message.spaceType ||
|
||||
this.spaceId !== message.spaceId
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.emit('update', {
|
||||
docId: this.idConverter.oldIdToNewId(message.docId),
|
||||
bin: base64ToUint8Array(message.update),
|
||||
timestamp: new Date(message.timestamp),
|
||||
editor: message.editor,
|
||||
});
|
||||
};
|
||||
|
||||
onServerUpdates: ServerEventsMap['space:broadcast-doc-updates'] = message => {
|
||||
if (
|
||||
this.spaceType !== message.spaceType ||
|
||||
this.spaceId !== message.spaceId
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const update of message.updates) {
|
||||
this.emit('update', {
|
||||
docId: this.idConverter.oldIdToNewId(message.docId),
|
||||
bin: base64ToUint8Array(update),
|
||||
timestamp: new Date(message.timestamp),
|
||||
editor: message.editor,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
readonly connection = new CloudDocStorageConnection(
|
||||
this.options,
|
||||
this.onServerUpdate
|
||||
this.onServerUpdate,
|
||||
this.onServerUpdates
|
||||
);
|
||||
|
||||
override async getDocSnapshot(docId: string) {
|
||||
@@ -184,7 +205,8 @@ export class CloudDocStorage extends DocStorageBase<CloudDocStorageOptions> {
|
||||
class CloudDocStorageConnection extends SocketConnection {
|
||||
constructor(
|
||||
private readonly options: CloudDocStorageOptions,
|
||||
private readonly onServerUpdate: ServerEventsMap['space:broadcast-doc-update']
|
||||
private readonly onServerUpdate: ServerEventsMap['space:broadcast-doc-update'],
|
||||
private readonly onServerUpdates: ServerEventsMap['space:broadcast-doc-updates']
|
||||
) {
|
||||
super(options.serverBaseUrl, options.isSelfHosted);
|
||||
}
|
||||
@@ -210,6 +232,7 @@ class CloudDocStorageConnection extends SocketConnection {
|
||||
}
|
||||
|
||||
socket.on('space:broadcast-doc-update', this.onServerUpdate);
|
||||
socket.on('space:broadcast-doc-updates', this.onServerUpdates);
|
||||
|
||||
return { socket, disconnect };
|
||||
} catch (e) {
|
||||
@@ -230,6 +253,7 @@ class CloudDocStorageConnection extends SocketConnection {
|
||||
spaceId: this.options.id,
|
||||
});
|
||||
socket.off('space:broadcast-doc-update', this.onServerUpdate);
|
||||
socket.off('space:broadcast-doc-updates', this.onServerUpdates);
|
||||
super.doDisconnect({ socket, disconnect });
|
||||
}
|
||||
|
||||
|
||||
@@ -30,6 +30,15 @@ interface ServerEvents {
|
||||
timestamp: number;
|
||||
editor: string;
|
||||
};
|
||||
'space:broadcast-doc-updates': {
|
||||
spaceType: string;
|
||||
spaceId: string;
|
||||
docId: string;
|
||||
updates: string[];
|
||||
timestamp: number;
|
||||
editor?: string;
|
||||
compressed?: boolean;
|
||||
};
|
||||
|
||||
'space:collect-awareness': {
|
||||
spaceType: string;
|
||||
@@ -124,33 +133,42 @@ export type ClientEventsMap = {
|
||||
|
||||
export type Socket = SocketIO<ServerEventsMap, ClientEventsMap>;
|
||||
|
||||
export function uint8ArrayToBase64(array: Uint8Array): Promise<string> {
|
||||
return new Promise<string>(resolve => {
|
||||
// Create a blob from the Uint8Array
|
||||
const blob = new Blob([array]);
|
||||
|
||||
const reader = new FileReader();
|
||||
reader.onload = function () {
|
||||
const dataUrl = reader.result as string | null;
|
||||
if (!dataUrl) {
|
||||
resolve('');
|
||||
return;
|
||||
}
|
||||
// The result includes the `data:` URL prefix and the MIME type. We only want the Base64 data
|
||||
const base64 = dataUrl.split(',')[1];
|
||||
resolve(base64);
|
||||
type BufferConstructorLike = {
|
||||
from(
|
||||
data: Uint8Array | string,
|
||||
encoding?: string
|
||||
): Uint8Array & {
|
||||
toString(encoding: string): string;
|
||||
};
|
||||
};
|
||||
|
||||
reader.readAsDataURL(blob);
|
||||
});
|
||||
const BufferCtor = (globalThis as { Buffer?: BufferConstructorLike }).Buffer;
|
||||
const CHUNK_SIZE = 0x8000;
|
||||
|
||||
export async function uint8ArrayToBase64(array: Uint8Array): Promise<string> {
|
||||
if (BufferCtor) {
|
||||
return BufferCtor.from(array).toString('base64');
|
||||
}
|
||||
|
||||
let binary = '';
|
||||
for (let i = 0; i < array.length; i += CHUNK_SIZE) {
|
||||
const chunk = array.subarray(i, i + CHUNK_SIZE);
|
||||
binary += String.fromCharCode(...chunk);
|
||||
}
|
||||
return btoa(binary);
|
||||
}
|
||||
|
||||
export function base64ToUint8Array(base64: string) {
|
||||
if (BufferCtor) {
|
||||
return new Uint8Array(BufferCtor.from(base64, 'base64'));
|
||||
}
|
||||
|
||||
const binaryString = atob(base64);
|
||||
const binaryArray = [...binaryString].map(function (char) {
|
||||
return char.charCodeAt(0);
|
||||
});
|
||||
return new Uint8Array(binaryArray);
|
||||
const bytes = new Uint8Array(binaryString.length);
|
||||
for (let i = 0; i < binaryString.length; i++) {
|
||||
bytes[i] = binaryString.charCodeAt(i);
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
|
||||
let authMethod:
|
||||
|
||||
18
packages/common/s3-compat/package.json
Normal file
18
packages/common/s3-compat/package.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"name": "@affine/s3-compat",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": "./src/index.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"aws4": "^1.13.2",
|
||||
"fast-xml-parser": "^5.3.4",
|
||||
"s3mini": "^0.9.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/aws4": "^1.11.6",
|
||||
"vitest": "^3.2.4"
|
||||
},
|
||||
"version": "0.26.0"
|
||||
}
|
||||
529
packages/common/s3-compat/src/index.ts
Normal file
529
packages/common/s3-compat/src/index.ts
Normal file
@@ -0,0 +1,529 @@
|
||||
import { Buffer } from 'node:buffer';
|
||||
import { stringify as stringifyQuery } from 'node:querystring';
|
||||
import { Readable } from 'node:stream';
|
||||
|
||||
import aws4 from 'aws4';
|
||||
import { XMLParser } from 'fast-xml-parser';
|
||||
import { S3mini, sanitizeETag } from 's3mini';
|
||||
|
||||
export type S3CompatCredentials = {
|
||||
accessKeyId: string;
|
||||
secretAccessKey: string;
|
||||
sessionToken?: string;
|
||||
};
|
||||
|
||||
export type S3CompatConfig = {
|
||||
endpoint: string;
|
||||
region: string;
|
||||
bucket: string;
|
||||
forcePathStyle?: boolean;
|
||||
requestTimeoutMs?: number;
|
||||
minPartSize?: number;
|
||||
presign?: {
|
||||
expiresInSeconds: number;
|
||||
signContentTypeForPut?: boolean;
|
||||
};
|
||||
};
|
||||
|
||||
export type PresignedResult = {
|
||||
url: string;
|
||||
headers?: Record<string, string>;
|
||||
expiresAt: Date;
|
||||
};
|
||||
|
||||
export type ListPartItem = { partNumber: number; etag: string };
|
||||
|
||||
export type ListObjectsItem = {
|
||||
key: string;
|
||||
lastModified: Date;
|
||||
contentLength: number;
|
||||
};
|
||||
|
||||
export interface S3CompatClient {
|
||||
putObject(
|
||||
key: string,
|
||||
body: Blob | Buffer | Uint8Array | ReadableStream | Readable,
|
||||
meta?: { contentType?: string; contentLength?: number }
|
||||
): Promise<void>;
|
||||
getObjectResponse(key: string): Promise<Response | null>;
|
||||
headObject(key: string): Promise<
|
||||
| {
|
||||
contentType?: string;
|
||||
contentLength?: number;
|
||||
lastModified?: Date;
|
||||
checksumCRC32?: string;
|
||||
}
|
||||
| undefined
|
||||
>;
|
||||
deleteObject(key: string): Promise<void>;
|
||||
listObjectsV2(prefix?: string): Promise<ListObjectsItem[]>;
|
||||
|
||||
createMultipartUpload(
|
||||
key: string,
|
||||
meta?: { contentType?: string }
|
||||
): Promise<{ uploadId: string }>;
|
||||
uploadPart(
|
||||
key: string,
|
||||
uploadId: string,
|
||||
partNumber: number,
|
||||
body: Blob | Buffer | Uint8Array | ReadableStream | Readable,
|
||||
meta?: { contentLength?: number }
|
||||
): Promise<{ etag: string }>;
|
||||
listParts(key: string, uploadId: string): Promise<ListPartItem[] | undefined>;
|
||||
completeMultipartUpload(
|
||||
key: string,
|
||||
uploadId: string,
|
||||
parts: ListPartItem[]
|
||||
): Promise<void>;
|
||||
abortMultipartUpload(key: string, uploadId: string): Promise<void>;
|
||||
|
||||
presignGetObject(key: string): Promise<PresignedResult>;
|
||||
presignPutObject(
|
||||
key: string,
|
||||
meta?: { contentType?: string }
|
||||
): Promise<PresignedResult>;
|
||||
presignUploadPart(
|
||||
key: string,
|
||||
uploadId: string,
|
||||
partNumber: number
|
||||
): Promise<PresignedResult>;
|
||||
}
|
||||
|
||||
export type ParsedListParts = {
|
||||
parts: ListPartItem[];
|
||||
isTruncated: boolean;
|
||||
nextPartNumberMarker?: string;
|
||||
};
|
||||
|
||||
const listPartsParser = new XMLParser({
|
||||
ignoreAttributes: false,
|
||||
parseTagValue: false,
|
||||
trimValues: true,
|
||||
});
|
||||
|
||||
function asArray<T>(value: T | T[] | undefined): T[] {
|
||||
if (!value) return [];
|
||||
return Array.isArray(value) ? value : [value];
|
||||
}
|
||||
|
||||
function toBoolean(value: unknown): boolean {
|
||||
if (typeof value === 'boolean') return value;
|
||||
if (typeof value === 'string') return value.toLowerCase() === 'true';
|
||||
return false;
|
||||
}
|
||||
|
||||
function joinPath(basePath: string, suffix: string) {
|
||||
const trimmedBase = basePath.endsWith('/') ? basePath.slice(0, -1) : basePath;
|
||||
const trimmedSuffix = suffix.startsWith('/') ? suffix.slice(1) : suffix;
|
||||
if (!trimmedBase) {
|
||||
return `/${trimmedSuffix}`;
|
||||
}
|
||||
if (!trimmedSuffix) {
|
||||
return trimmedBase;
|
||||
}
|
||||
return `${trimmedBase}/${trimmedSuffix}`;
|
||||
}
|
||||
|
||||
function encodeKey(key: string) {
|
||||
return key.split('/').map(encodeURIComponent).join('/');
|
||||
}
|
||||
|
||||
function buildQuery(params: Record<string, string | number | undefined>) {
|
||||
const entries = Object.entries(params).filter(
|
||||
([, value]) => value !== undefined
|
||||
);
|
||||
if (entries.length === 0) return '';
|
||||
return stringifyQuery(
|
||||
Object.fromEntries(entries.map(([key, value]) => [key, String(value)]))
|
||||
);
|
||||
}
|
||||
|
||||
function detectErrorCode(xml: string): string | undefined {
|
||||
const parsed = listPartsParser.parse(xml);
|
||||
if (!parsed || typeof parsed !== 'object') return undefined;
|
||||
const error = (parsed as any).Error;
|
||||
if (!error || typeof error !== 'object') return undefined;
|
||||
const code = error.Code;
|
||||
return typeof code === 'string' ? code : undefined;
|
||||
}
|
||||
|
||||
export function parseListPartsXml(xml: string): ParsedListParts {
|
||||
const parsed = listPartsParser.parse(xml);
|
||||
const root =
|
||||
parsed?.ListPartsResult ??
|
||||
parsed?.ListPartsResult?.ListPartsResult ??
|
||||
parsed?.ListPartsResult;
|
||||
const result = root && typeof root === 'object' ? root : parsed;
|
||||
const partsNode = result?.Part;
|
||||
|
||||
const parts = asArray(partsNode)
|
||||
.map((part: any) => {
|
||||
const partNumber = Number(part?.PartNumber);
|
||||
const etag =
|
||||
typeof part?.ETag === 'string' ? sanitizeETag(part.ETag) : '';
|
||||
if (!partNumber || !etag) return undefined;
|
||||
return { partNumber, etag } satisfies ListPartItem;
|
||||
})
|
||||
.filter((part): part is ListPartItem => !!part);
|
||||
|
||||
const isTruncated = toBoolean(result?.IsTruncated);
|
||||
const nextPartNumberMarker =
|
||||
typeof result?.NextPartNumberMarker === 'string'
|
||||
? result?.NextPartNumberMarker
|
||||
: result?.NextPartNumberMarker !== undefined
|
||||
? String(result?.NextPartNumberMarker)
|
||||
: undefined;
|
||||
|
||||
return { parts, isTruncated, nextPartNumberMarker };
|
||||
}
|
||||
|
||||
function buildEndpoint(config: S3CompatConfig) {
|
||||
const url = new URL(config.endpoint);
|
||||
if (config.forcePathStyle) {
|
||||
const segments = url.pathname.split('/').filter(Boolean);
|
||||
if (segments[0] !== config.bucket) {
|
||||
url.pathname = joinPath(url.pathname, config.bucket);
|
||||
}
|
||||
return url;
|
||||
}
|
||||
|
||||
const pathSegments = url.pathname.split('/').filter(Boolean);
|
||||
const hostHasBucket = url.hostname.startsWith(`${config.bucket}.`);
|
||||
const pathHasBucket = pathSegments[0] === config.bucket;
|
||||
if (!hostHasBucket && !pathHasBucket) {
|
||||
url.hostname = `${config.bucket}.${url.hostname}`;
|
||||
}
|
||||
return url;
|
||||
}
|
||||
|
||||
function shouldUseDuplex(init: RequestInit | undefined) {
|
||||
if (!init?.body) return false;
|
||||
if (typeof init.body === 'string') return false;
|
||||
if (init.body instanceof ArrayBuffer) return false;
|
||||
if (init.body instanceof Uint8Array) return false;
|
||||
if (typeof Blob !== 'undefined' && init.body instanceof Blob) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
export class S3Compat implements S3CompatClient {
|
||||
private readonly client: S3mini;
|
||||
private readonly endpoint: URL;
|
||||
private readonly basePath: string;
|
||||
private readonly region: string;
|
||||
private readonly credentials: S3CompatCredentials;
|
||||
private readonly presignConfig: {
|
||||
expiresInSeconds: number;
|
||||
signContentTypeForPut: boolean;
|
||||
};
|
||||
private readonly fetchImpl: typeof fetch;
|
||||
|
||||
constructor(config: S3CompatConfig, credentials: S3CompatCredentials) {
|
||||
this.endpoint = buildEndpoint(config);
|
||||
this.basePath =
|
||||
this.endpoint.pathname === '/' ? '' : this.endpoint.pathname;
|
||||
this.region = config.region;
|
||||
this.credentials = credentials;
|
||||
this.presignConfig = {
|
||||
expiresInSeconds: config.presign?.expiresInSeconds ?? 60 * 60,
|
||||
signContentTypeForPut: config.presign?.signContentTypeForPut ?? true,
|
||||
};
|
||||
|
||||
const fetchImpl = globalThis.fetch.bind(globalThis);
|
||||
this.fetchImpl = (input, init) => {
|
||||
if (shouldUseDuplex(init)) {
|
||||
return fetchImpl(input, { ...init, duplex: 'half' } as RequestInit);
|
||||
}
|
||||
return fetchImpl(input, init);
|
||||
};
|
||||
|
||||
this.client = new S3mini({
|
||||
accessKeyId: credentials.accessKeyId,
|
||||
secretAccessKey: credentials.secretAccessKey,
|
||||
endpoint: this.endpoint.toString(),
|
||||
region: config.region,
|
||||
requestAbortTimeout: config.requestTimeoutMs,
|
||||
minPartSize: config.minPartSize,
|
||||
fetch: this.fetchImpl,
|
||||
});
|
||||
}
|
||||
|
||||
static fromConfig(config: S3CompatConfig, credentials: S3CompatCredentials) {
|
||||
return new S3Compat(config, credentials);
|
||||
}
|
||||
|
||||
private buildObjectPath(key: string) {
|
||||
const encodedKey = encodeKey(key);
|
||||
return joinPath(this.basePath, encodedKey);
|
||||
}
|
||||
|
||||
private async signedFetch(
|
||||
method: string,
|
||||
key: string,
|
||||
query?: Record<string, string | number | undefined>,
|
||||
headers?: Record<string, string>
|
||||
) {
|
||||
const path = this.buildObjectPath(key);
|
||||
const queryString = query ? buildQuery(query) : '';
|
||||
const requestPath = queryString ? `${path}?${queryString}` : path;
|
||||
const signed = aws4.sign(
|
||||
{
|
||||
method,
|
||||
service: 's3',
|
||||
region: this.region,
|
||||
host: this.endpoint.host,
|
||||
path: requestPath,
|
||||
headers: headers ?? {},
|
||||
},
|
||||
this.credentials
|
||||
);
|
||||
|
||||
const signedHeaders = Object.fromEntries(
|
||||
Object.entries(signed.headers ?? {}).map(([key, value]) => [
|
||||
key,
|
||||
String(value),
|
||||
])
|
||||
);
|
||||
|
||||
const url = `${this.endpoint.origin}${signed.path}`;
|
||||
return this.fetchImpl(url, { method, headers: signedHeaders });
|
||||
}
|
||||
|
||||
private presign(
|
||||
method: string,
|
||||
key: string,
|
||||
query?: Record<string, string | number | undefined>,
|
||||
headers?: Record<string, string>
|
||||
): PresignedResult {
|
||||
const expiresInSeconds = this.presignConfig.expiresInSeconds;
|
||||
const path = this.buildObjectPath(key);
|
||||
const queryString = buildQuery({
|
||||
...(query ?? {}),
|
||||
'X-Amz-Expires': expiresInSeconds,
|
||||
});
|
||||
const requestPath = queryString ? `${path}?${queryString}` : path;
|
||||
const signed = aws4.sign(
|
||||
{
|
||||
method,
|
||||
service: 's3',
|
||||
region: this.region,
|
||||
host: this.endpoint.host,
|
||||
path: requestPath,
|
||||
headers: headers ?? {},
|
||||
signQuery: true,
|
||||
},
|
||||
this.credentials
|
||||
);
|
||||
|
||||
return {
|
||||
url: `${this.endpoint.origin}${signed.path}`,
|
||||
headers,
|
||||
expiresAt: new Date(Date.now() + expiresInSeconds * 1000),
|
||||
};
|
||||
}
|
||||
|
||||
async putObject(
|
||||
key: string,
|
||||
body: Blob | Buffer | Uint8Array | ReadableStream | Readable,
|
||||
meta?: { contentType?: string; contentLength?: number }
|
||||
): Promise<void> {
|
||||
const res = await this.client.putObject(
|
||||
key,
|
||||
body as any,
|
||||
meta?.contentType,
|
||||
undefined,
|
||||
undefined,
|
||||
meta?.contentLength
|
||||
);
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to put object: ${res.status}`);
|
||||
}
|
||||
}
|
||||
|
||||
async getObjectResponse(key: string) {
|
||||
return this.client.getObjectResponse(key);
|
||||
}
|
||||
|
||||
async headObject(key: string) {
|
||||
const res = await this.signedFetch('HEAD', key);
|
||||
if (res.status === 404) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (!res.ok) {
|
||||
const errorBody = await res.text();
|
||||
const errorCode = detectErrorCode(errorBody);
|
||||
if (errorCode === 'NoSuchKey' || errorCode === 'NotFound') {
|
||||
return undefined;
|
||||
}
|
||||
throw new Error(`Failed to head object: ${res.status}`);
|
||||
}
|
||||
|
||||
const contentLengthHeader = res.headers.get('content-length');
|
||||
const contentLength = contentLengthHeader
|
||||
? Number(contentLengthHeader)
|
||||
: undefined;
|
||||
const contentType = res.headers.get('content-type') ?? undefined;
|
||||
const lastModifiedHeader = res.headers.get('last-modified');
|
||||
const lastModified = lastModifiedHeader
|
||||
? new Date(lastModifiedHeader)
|
||||
: undefined;
|
||||
const checksumCRC32 = res.headers.get('x-amz-checksum-crc32') ?? undefined;
|
||||
|
||||
return {
|
||||
contentType,
|
||||
contentLength,
|
||||
lastModified,
|
||||
checksumCRC32,
|
||||
};
|
||||
}
|
||||
|
||||
async deleteObject(key: string): Promise<void> {
|
||||
await this.client.deleteObject(key);
|
||||
}
|
||||
|
||||
async listObjectsV2(prefix?: string): Promise<ListObjectsItem[]> {
|
||||
const results: ListObjectsItem[] = [];
|
||||
let continuationToken: string | undefined;
|
||||
do {
|
||||
const page = await this.client.listObjectsPaged(
|
||||
'/',
|
||||
prefix ?? '',
|
||||
1000,
|
||||
continuationToken
|
||||
);
|
||||
if (!page || !page.objects) {
|
||||
break;
|
||||
}
|
||||
for (const item of page.objects) {
|
||||
results.push({
|
||||
key: item.Key,
|
||||
lastModified: item.LastModified,
|
||||
contentLength: item.Size,
|
||||
});
|
||||
}
|
||||
continuationToken = page.nextContinuationToken;
|
||||
} while (continuationToken);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
async createMultipartUpload(
|
||||
key: string,
|
||||
meta?: { contentType?: string }
|
||||
): Promise<{ uploadId: string }> {
|
||||
const uploadId = await this.client.getMultipartUploadId(
|
||||
key,
|
||||
meta?.contentType
|
||||
);
|
||||
return { uploadId };
|
||||
}
|
||||
|
||||
async uploadPart(
|
||||
key: string,
|
||||
uploadId: string,
|
||||
partNumber: number,
|
||||
body: Blob | Buffer | Uint8Array | ReadableStream | Readable,
|
||||
meta?: { contentLength?: number }
|
||||
): Promise<{ etag: string }> {
|
||||
const additionalHeaders = meta?.contentLength
|
||||
? { 'Content-Length': String(meta.contentLength) }
|
||||
: undefined;
|
||||
const part = await this.client.uploadPart(
|
||||
key,
|
||||
uploadId,
|
||||
body as any,
|
||||
partNumber,
|
||||
{},
|
||||
undefined,
|
||||
additionalHeaders
|
||||
);
|
||||
return { etag: part.etag };
|
||||
}
|
||||
|
||||
async listParts(
|
||||
key: string,
|
||||
uploadId: string
|
||||
): Promise<ListPartItem[] | undefined> {
|
||||
const parts: ListPartItem[] = [];
|
||||
let partNumberMarker: string | undefined;
|
||||
|
||||
while (true) {
|
||||
const res = await this.signedFetch('GET', key, {
|
||||
uploadId,
|
||||
'part-number-marker': partNumberMarker,
|
||||
});
|
||||
|
||||
if (res.status === 404) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const body = await res.text();
|
||||
if (!res.ok) {
|
||||
const errorCode = detectErrorCode(body);
|
||||
if (errorCode === 'NoSuchUpload' || errorCode === 'NotFound') {
|
||||
return undefined;
|
||||
}
|
||||
throw new Error(`Failed to list multipart upload parts: ${res.status}`);
|
||||
}
|
||||
|
||||
const parsed = parseListPartsXml(body);
|
||||
parts.push(...parsed.parts);
|
||||
|
||||
if (!parsed.isTruncated || !parsed.nextPartNumberMarker) {
|
||||
break;
|
||||
}
|
||||
|
||||
partNumberMarker = parsed.nextPartNumberMarker;
|
||||
}
|
||||
|
||||
return parts;
|
||||
}
|
||||
|
||||
async completeMultipartUpload(
|
||||
key: string,
|
||||
uploadId: string,
|
||||
parts: ListPartItem[]
|
||||
): Promise<void> {
|
||||
await this.client.completeMultipartUpload(key, uploadId, parts);
|
||||
}
|
||||
|
||||
async abortMultipartUpload(key: string, uploadId: string): Promise<void> {
|
||||
await this.client.abortMultipartUpload(key, uploadId);
|
||||
}
|
||||
|
||||
async presignGetObject(key: string): Promise<PresignedResult> {
|
||||
return this.presign('GET', key);
|
||||
}
|
||||
|
||||
async presignPutObject(
|
||||
key: string,
|
||||
meta?: { contentType?: string }
|
||||
): Promise<PresignedResult> {
|
||||
const contentType = meta?.contentType ?? 'application/octet-stream';
|
||||
const signContentType = this.presignConfig.signContentTypeForPut ?? true;
|
||||
const headers = signContentType
|
||||
? { 'Content-Type': contentType }
|
||||
: undefined;
|
||||
const result = this.presign('PUT', key, undefined, headers);
|
||||
|
||||
return {
|
||||
...result,
|
||||
headers: headers ? { 'Content-Type': contentType } : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
async presignUploadPart(
|
||||
key: string,
|
||||
uploadId: string,
|
||||
partNumber: number
|
||||
): Promise<PresignedResult> {
|
||||
return this.presign('PUT', key, { uploadId, partNumber });
|
||||
}
|
||||
}
|
||||
|
||||
export function createS3CompatClient(
|
||||
config: S3CompatConfig,
|
||||
credentials: S3CompatCredentials
|
||||
) {
|
||||
return new S3Compat(config, credentials);
|
||||
}
|
||||
11
packages/common/s3-compat/tsconfig.json
Normal file
11
packages/common/s3-compat/tsconfig.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"extends": "../../../tsconfig.node.json",
|
||||
"compilerOptions": {
|
||||
"lib": ["ESNext", "DOM", "DOM.Iterable"],
|
||||
"rootDir": "./src",
|
||||
"outDir": "./dist",
|
||||
"tsBuildInfoFile": "./dist/tsconfig.tsbuildinfo"
|
||||
},
|
||||
"include": ["./src"],
|
||||
"references": []
|
||||
}
|
||||
@@ -112,7 +112,7 @@ export const KNOWN_CONFIG_GROUPS = [
|
||||
key: 'blob.storage',
|
||||
sub: 'config',
|
||||
type: 'JSON',
|
||||
desc: 'The config passed directly to the storage provider(e.g. aws-sdk)',
|
||||
desc: 'The S3 compatible config for the storage provider (endpoint/region/credentials).',
|
||||
},
|
||||
{
|
||||
key: 'avatar.storage',
|
||||
@@ -131,7 +131,7 @@ export const KNOWN_CONFIG_GROUPS = [
|
||||
key: 'avatar.storage',
|
||||
sub: 'config',
|
||||
type: 'JSON',
|
||||
desc: 'The config passed directly to the storage provider(e.g. aws-sdk)',
|
||||
desc: 'The S3 compatible config for the storage provider (endpoint/region/credentials).',
|
||||
},
|
||||
{
|
||||
key: 'avatar.publicPath',
|
||||
@@ -175,7 +175,7 @@ export const KNOWN_CONFIG_GROUPS = [
|
||||
key: 'storage',
|
||||
sub: 'config',
|
||||
type: 'JSON',
|
||||
desc: 'The config passed directly to the storage provider(e.g. aws-sdk)',
|
||||
desc: 'The S3 compatible config for the storage provider (endpoint/region/credentials).',
|
||||
},
|
||||
],
|
||||
} as ConfigGroup<'copilot'>,
|
||||
|
||||
@@ -0,0 +1,20 @@
|
||||
import { describe, expect, test } from 'vitest';
|
||||
|
||||
import { base64ToUint8Array, uint8ArrayToBase64 } from '../base64';
|
||||
|
||||
function makeSample(size: number) {
|
||||
const data = new Uint8Array(size);
|
||||
for (let i = 0; i < size; i++) {
|
||||
data[i] = (i * 13) % 251;
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
describe('base64 helpers', () => {
|
||||
test('roundtrip preserves data', async () => {
|
||||
const input = makeSample(2048);
|
||||
const encoded = await uint8ArrayToBase64(input);
|
||||
const decoded = base64ToUint8Array(encoded);
|
||||
expect(decoded).toEqual(input);
|
||||
});
|
||||
});
|
||||
@@ -1,28 +1,37 @@
|
||||
export function uint8ArrayToBase64(array: Uint8Array): Promise<string> {
|
||||
return new Promise<string>(resolve => {
|
||||
// Create a blob from the Uint8Array
|
||||
const blob = new Blob([array]);
|
||||
|
||||
const reader = new FileReader();
|
||||
reader.onload = function () {
|
||||
const dataUrl = reader.result as string | null;
|
||||
if (!dataUrl) {
|
||||
resolve('');
|
||||
return;
|
||||
}
|
||||
// The result includes the `data:` URL prefix and the MIME type. We only want the Base64 data
|
||||
const base64 = dataUrl.split(',')[1];
|
||||
resolve(base64);
|
||||
type BufferConstructorLike = {
|
||||
from(
|
||||
data: Uint8Array | string,
|
||||
encoding?: string
|
||||
): Uint8Array & {
|
||||
toString(encoding: string): string;
|
||||
};
|
||||
};
|
||||
|
||||
reader.readAsDataURL(blob);
|
||||
});
|
||||
const BufferCtor = (globalThis as { Buffer?: BufferConstructorLike }).Buffer;
|
||||
const CHUNK_SIZE = 0x8000;
|
||||
|
||||
export async function uint8ArrayToBase64(array: Uint8Array): Promise<string> {
|
||||
if (BufferCtor) {
|
||||
return BufferCtor.from(array).toString('base64');
|
||||
}
|
||||
|
||||
let binary = '';
|
||||
for (let i = 0; i < array.length; i += CHUNK_SIZE) {
|
||||
const chunk = array.subarray(i, i + CHUNK_SIZE);
|
||||
binary += String.fromCharCode(...chunk);
|
||||
}
|
||||
return btoa(binary);
|
||||
}
|
||||
|
||||
export function base64ToUint8Array(base64: string) {
|
||||
const binaryString = atob(base64);
|
||||
const binaryArray = [...binaryString].map(function (char) {
|
||||
return char.charCodeAt(0);
|
||||
});
|
||||
return new Uint8Array(binaryArray);
|
||||
if (BufferCtor) {
|
||||
return new Uint8Array(BufferCtor.from(base64, 'base64'));
|
||||
}
|
||||
|
||||
const binaryString = atob(base64);
|
||||
const bytes = new Uint8Array(binaryString.length);
|
||||
for (let i = 0; i < binaryString.length; i++) {
|
||||
bytes[i] = binaryString.charCodeAt(i);
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ name = "uniffi-bindgen"
|
||||
path = "uniffi-bindgen.rs"
|
||||
|
||||
[features]
|
||||
default = ["use-as-lib"]
|
||||
use-as-lib = ["affine_nbstore/use-as-lib"]
|
||||
|
||||
[dependencies]
|
||||
|
||||
@@ -89,11 +89,57 @@ impl TryFrom<DocUpdate> for affine_nbstore::DocUpdate {
|
||||
timestamp: chrono::DateTime::<chrono::Utc>::from_timestamp_millis(update.timestamp)
|
||||
.ok_or(UniffiError::TimestampDecodingError)?
|
||||
.naive_utc(),
|
||||
bin: update.bin.into(),
|
||||
bin: Into::<Data>::into(
|
||||
base64_simd::STANDARD
|
||||
.decode_to_vec(update.bin)
|
||||
.map_err(|e| UniffiError::Base64DecodingError(e.to_string()))?,
|
||||
),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn doc_update_roundtrip_base64() {
|
||||
let timestamp = chrono::DateTime::<chrono::Utc>::from_timestamp_millis(1_700_000_000_000)
|
||||
.unwrap()
|
||||
.naive_utc();
|
||||
let original = affine_nbstore::DocUpdate {
|
||||
doc_id: "doc-1".to_string(),
|
||||
timestamp,
|
||||
bin: vec![1, 2, 3, 4, 5],
|
||||
};
|
||||
|
||||
let encoded: DocUpdate = original.into();
|
||||
let decoded = affine_nbstore::DocUpdate::try_from(encoded).unwrap();
|
||||
|
||||
assert_eq!(decoded.doc_id, "doc-1");
|
||||
assert_eq!(decoded.timestamp, timestamp);
|
||||
assert_eq!(decoded.bin, vec![1, 2, 3, 4, 5]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doc_update_rejects_invalid_base64() {
|
||||
let update = DocUpdate {
|
||||
doc_id: "doc-2".to_string(),
|
||||
timestamp: 0,
|
||||
bin: "not-base64!!".to_string(),
|
||||
};
|
||||
|
||||
let err = match affine_nbstore::DocUpdate::try_from(update) {
|
||||
Ok(_) => panic!("expected base64 decode error"),
|
||||
Err(err) => err,
|
||||
};
|
||||
match err {
|
||||
UniffiError::Base64DecodingError(_) => {}
|
||||
other => panic!("unexpected error: {other:?}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(uniffi::Record)]
|
||||
pub struct DocClock {
|
||||
pub doc_id: String,
|
||||
|
||||
@@ -9,7 +9,7 @@ crate-type = ["cdylib", "rlib"]
|
||||
[dependencies]
|
||||
affine_common = { workspace = true, features = ["hashcash"] }
|
||||
affine_media_capture = { path = "./media_capture" }
|
||||
affine_nbstore = { path = "./nbstore" }
|
||||
affine_nbstore = { workspace = true, features = ["napi"] }
|
||||
affine_sqlite_v1 = { path = "./sqlite_v1" }
|
||||
napi = { workspace = true }
|
||||
napi-derive = { workspace = true }
|
||||
@@ -25,6 +25,12 @@ sqlx = { workspace = true, default-features = false, features = [
|
||||
thiserror = { workspace = true }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
|
||||
[target.'cfg(not(target_os = "linux"))'.dependencies]
|
||||
mimalloc = { workspace = true }
|
||||
|
||||
[target.'cfg(all(target_os = "linux", not(target_arch = "arm")))'.dependencies]
|
||||
mimalloc = { workspace = true, features = ["local_dynamic_tls"] }
|
||||
|
||||
[dev-dependencies]
|
||||
chrono = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
|
||||
4
packages/frontend/native/index.d.ts
vendored
4
packages/frontend/native/index.d.ts
vendored
@@ -19,10 +19,10 @@ export declare class ApplicationStateChangedSubscriber {
|
||||
}
|
||||
|
||||
export declare class AudioCaptureSession {
|
||||
stop(): void
|
||||
get sampleRate(): number
|
||||
get channels(): number
|
||||
get actualSampleRate(): number
|
||||
stop(): void
|
||||
}
|
||||
|
||||
export declare class ShareableContent {
|
||||
@@ -31,9 +31,9 @@ export declare class ShareableContent {
|
||||
constructor()
|
||||
static applications(): Array<ApplicationInfo>
|
||||
static applicationWithProcessId(processId: number): ApplicationInfo | null
|
||||
static isUsingMicrophone(processId: number): boolean
|
||||
static tapAudio(processId: number, audioStreamCallback: ((err: Error | null, arg: Float32Array) => void)): AudioCaptureSession
|
||||
static tapGlobalAudio(excludedProcesses: Array<ApplicationInfo> | undefined | null, audioStreamCallback: ((err: Error | null, arg: Float32Array) => void)): AudioCaptureSession
|
||||
static isUsingMicrophone(processId: number): boolean
|
||||
}
|
||||
|
||||
export declare function decodeAudio(buf: Uint8Array, destSampleRate?: number | undefined | null, filename?: string | undefined | null, signal?: AbortSignal | undefined | null): Promise<Float32Array>
|
||||
|
||||
@@ -7,6 +7,8 @@ version = "0.0.0"
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[features]
|
||||
default = []
|
||||
napi = ["affine_common/napi"]
|
||||
use-as-lib = ["napi-derive/noop", "napi/noop"]
|
||||
|
||||
[dependencies]
|
||||
|
||||
@@ -8,6 +8,8 @@ pub mod indexer_sync;
|
||||
pub mod pool;
|
||||
pub mod storage;
|
||||
|
||||
#[cfg(not(feature = "use-as-lib"))]
|
||||
use affine_common::napi_utils::to_napi_error;
|
||||
use chrono::NaiveDateTime;
|
||||
use napi::bindgen_prelude::*;
|
||||
use napi_derive::napi;
|
||||
@@ -23,7 +25,7 @@ type Result<T> = napi::Result<T>;
|
||||
#[cfg(not(feature = "use-as-lib"))]
|
||||
impl From<error::Error> for napi::Error {
|
||||
fn from(err: error::Error) -> Self {
|
||||
napi::Error::new(napi::Status::GenericFailure, err.to_string())
|
||||
to_napi_error(err, napi::Status::GenericFailure)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -491,3 +493,15 @@ impl DocStorage {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(test, not(feature = "use-as-lib")))]
|
||||
mod tests {
|
||||
use super::error;
|
||||
|
||||
#[test]
|
||||
fn napi_error_mapping_preserves_reason() {
|
||||
let err: napi::Error = error::Error::InvalidOperation.into();
|
||||
assert_eq!(err.status, napi::Status::GenericFailure);
|
||||
assert!(err.reason.contains("Invalid operation"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -64,3 +64,27 @@ impl Task for AsyncMintChallengeResponse {
|
||||
pub fn mint_challenge_response(resource: String, bits: Option<u32>) -> AsyncTask<AsyncMintChallengeResponse> {
|
||||
AsyncTask::new(AsyncMintChallengeResponse { bits, resource })
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use napi::Task;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn hashcash_roundtrip() {
|
||||
let resource = "test-resource".to_string();
|
||||
let mut mint = AsyncMintChallengeResponse {
|
||||
bits: Some(8),
|
||||
resource: resource.clone(),
|
||||
};
|
||||
let stamp = mint.compute().unwrap();
|
||||
|
||||
let mut verify = AsyncVerifyChallengeResponse {
|
||||
response: stamp,
|
||||
bits: 8,
|
||||
resource,
|
||||
};
|
||||
assert!(verify.compute().unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
pub mod hashcash;
|
||||
|
||||
#[cfg(not(target_arch = "arm"))]
|
||||
#[global_allocator]
|
||||
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
|
||||
#[allow(unused_imports)]
|
||||
pub use affine_media_capture::*;
|
||||
pub use affine_nbstore::*;
|
||||
|
||||
@@ -173,6 +173,21 @@ test.describe('frame clipboard', () => {
|
||||
});
|
||||
|
||||
test.describe('pasting URLs', () => {
|
||||
test.beforeEach(async ({ page }) => {
|
||||
await page.route(
|
||||
'https://affine-worker.toeverything.workers.dev/api/worker/link-preview',
|
||||
async route => {
|
||||
await route.fulfill({
|
||||
json: {},
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
test('pasting github pr url', async ({ page }) => {
|
||||
await commonSetup(page);
|
||||
await waitNextFrame(page);
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@affine-tools/utils": "workspace:*",
|
||||
"@aws-sdk/client-s3": "^3.948.0",
|
||||
"@affine/s3-compat": "workspace:*",
|
||||
"@napi-rs/simple-git": "^0.1.22",
|
||||
"@perfsee/webpack": "^1.13.0",
|
||||
"@sentry/webpack-plugin": "^3.0.0",
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import type { PutObjectCommandInput } from '@aws-sdk/client-s3';
|
||||
import { PutObjectCommand, S3Client } from '@aws-sdk/client-s3';
|
||||
import { createS3CompatClient } from '@affine/s3-compat';
|
||||
import { lookup } from 'mime-types';
|
||||
import type { Compiler, WebpackPluginInstance } from 'webpack';
|
||||
|
||||
@@ -11,16 +10,18 @@ export const R2_BUCKET =
|
||||
(process.env.BUILD_TYPE === 'canary' ? 'assets-dev' : 'assets-prod');
|
||||
|
||||
export class WebpackS3Plugin implements WebpackPluginInstance {
|
||||
private readonly s3 = new S3Client({
|
||||
private readonly s3 = createS3CompatClient(
|
||||
{
|
||||
region: 'auto',
|
||||
bucket: R2_BUCKET,
|
||||
forcePathStyle: true,
|
||||
endpoint: `https://${process.env.R2_ACCOUNT_ID}.r2.cloudflarestorage.com`,
|
||||
credentials: {
|
||||
},
|
||||
{
|
||||
accessKeyId: process.env.R2_ACCESS_KEY_ID!,
|
||||
secretAccessKey: process.env.R2_SECRET_ACCESS_KEY!,
|
||||
},
|
||||
requestChecksumCalculation: 'WHEN_REQUIRED',
|
||||
responseChecksumValidation: 'WHEN_REQUIRED',
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
apply(compiler: Compiler) {
|
||||
compiler.hooks.assetEmitted.tapPromise(
|
||||
@@ -31,16 +32,11 @@ export class WebpackS3Plugin implements WebpackPluginInstance {
|
||||
}
|
||||
const assetPath = join(outputPath, asset);
|
||||
const assetSource = await readFile(assetPath);
|
||||
const putObjectCommandOptions: PutObjectCommandInput = {
|
||||
Body: assetSource,
|
||||
Bucket: R2_BUCKET,
|
||||
Key: asset,
|
||||
};
|
||||
const contentType = lookup(asset);
|
||||
if (contentType) {
|
||||
putObjectCommandOptions.ContentType = contentType;
|
||||
}
|
||||
await this.s3.send(new PutObjectCommand(putObjectCommandOptions));
|
||||
const contentType = lookup(asset) || undefined;
|
||||
await this.s3.putObject(asset, assetSource, {
|
||||
contentType,
|
||||
contentLength: assetSource.byteLength,
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@@ -6,5 +6,8 @@
|
||||
"tsBuildInfoFile": "./dist/tsconfig.tsbuildinfo"
|
||||
},
|
||||
"include": ["./src"],
|
||||
"references": [{ "path": "../utils" }]
|
||||
"references": [
|
||||
{ "path": "../utils" },
|
||||
{ "path": "../../packages/common/s3-compat" }
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1166,6 +1166,7 @@ export const PackageList = [
|
||||
location: 'packages/backend/server',
|
||||
name: '@affine/server',
|
||||
workspaceDependencies: [
|
||||
'packages/common/s3-compat',
|
||||
'packages/backend/native',
|
||||
'tools/cli',
|
||||
'tools/utils',
|
||||
@@ -1222,6 +1223,11 @@ export const PackageList = [
|
||||
name: '@affine/reader',
|
||||
workspaceDependencies: ['blocksuite/affine/all'],
|
||||
},
|
||||
{
|
||||
location: 'packages/common/s3-compat',
|
||||
name: '@affine/s3-compat',
|
||||
workspaceDependencies: [],
|
||||
},
|
||||
{
|
||||
location: 'packages/frontend/admin',
|
||||
name: '@affine/admin',
|
||||
@@ -1462,7 +1468,7 @@ export const PackageList = [
|
||||
{
|
||||
location: 'tools/cli',
|
||||
name: '@affine-tools/cli',
|
||||
workspaceDependencies: ['tools/utils'],
|
||||
workspaceDependencies: ['tools/utils', 'packages/common/s3-compat'],
|
||||
},
|
||||
{
|
||||
location: 'tools/commitlint',
|
||||
@@ -1580,6 +1586,7 @@ export type PackageName =
|
||||
| '@toeverything/infra'
|
||||
| '@affine/nbstore'
|
||||
| '@affine/reader'
|
||||
| '@affine/s3-compat'
|
||||
| '@affine/admin'
|
||||
| '@affine/android'
|
||||
| '@affine/electron'
|
||||
|
||||
@@ -132,6 +132,7 @@
|
||||
{ "path": "./packages/common/infra" },
|
||||
{ "path": "./packages/common/nbstore" },
|
||||
{ "path": "./packages/common/reader" },
|
||||
{ "path": "./packages/common/s3-compat" },
|
||||
{ "path": "./packages/frontend/admin" },
|
||||
{ "path": "./packages/frontend/apps/android" },
|
||||
{ "path": "./packages/frontend/apps/electron" },
|
||||
|
||||
Reference in New Issue
Block a user