feat: multipart blob sync support (#14138)

<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

* **New Features**
* Flexible blob uploads: GRAPHQL, presigned, and multipart flows with
per‑part URLs, abort/complete operations, presigned proxy endpoints, and
nightly cleanup of expired pending uploads.

* **API / Schema**
* GraphQL additions: new types, mutations, enum and error to manage
upload lifecycle (create, complete, abort, get part URL).

* **Database**
* New blob status enum and columns (status, upload_id); listing now
defaults to completed blobs.

* **Localization**
  * Added user-facing message: "Blob is invalid."

* **Tests**
* Expanded unit and end‑to‑end coverage for upload flows, proxy
behavior, multipart and provider integrations.

<sub>✏️ Tip: You can customize this high-level summary in your review
settings.</sub>
<!-- end of auto-generated comment: release notes by coderabbit.ai -->
This commit is contained in:
DarkSky
2025-12-23 22:09:21 +08:00
committed by GitHub
parent a9937e18b6
commit 76524084d1
36 changed files with 2880 additions and 33 deletions

View File

@@ -0,0 +1,3 @@
mutation abortBlobUpload($workspaceId: String!, $key: String!, $uploadId: String!) {
abortBlobUpload(workspaceId: $workspaceId, key: $key, uploadId: $uploadId)
}

View File

@@ -0,0 +1,3 @@
mutation completeBlobUpload($workspaceId: String!, $key: String!, $uploadId: String, $parts: [BlobUploadPartInput!]) {
completeBlobUpload(workspaceId: $workspaceId, key: $key, uploadId: $uploadId, parts: $parts)
}

View File

@@ -0,0 +1,16 @@
mutation createBlobUpload($workspaceId: String!, $key: String!, $size: Int!, $mime: String!) {
createBlobUpload(workspaceId: $workspaceId, key: $key, size: $size, mime: $mime) {
method
blobKey
alreadyUploaded
uploadUrl
headers
expiresAt
uploadId
partSize
uploadedParts {
partNumber
etag
}
}
}

View File

@@ -0,0 +1,7 @@
mutation getBlobUploadPartUrl($workspaceId: String!, $key: String!, $uploadId: String!, $partNumber: Int!) {
getBlobUploadPartUrl(workspaceId: $workspaceId, key: $key, uploadId: $uploadId, partNumber: $partNumber) {
uploadUrl
headers
expiresAt
}
}

View File

@@ -383,6 +383,65 @@ export const setBlobMutation = {
file: true,
};
export const abortBlobUploadMutation = {
id: 'abortBlobUploadMutation' as const,
op: 'abortBlobUpload',
query: `mutation abortBlobUpload($workspaceId: String!, $key: String!, $uploadId: String!) {
abortBlobUpload(workspaceId: $workspaceId, key: $key, uploadId: $uploadId)
}`,
};
export const completeBlobUploadMutation = {
id: 'completeBlobUploadMutation' as const,
op: 'completeBlobUpload',
query: `mutation completeBlobUpload($workspaceId: String!, $key: String!, $uploadId: String, $parts: [BlobUploadPartInput!]) {
completeBlobUpload(
workspaceId: $workspaceId
key: $key
uploadId: $uploadId
parts: $parts
)
}`,
};
export const createBlobUploadMutation = {
id: 'createBlobUploadMutation' as const,
op: 'createBlobUpload',
query: `mutation createBlobUpload($workspaceId: String!, $key: String!, $size: Int!, $mime: String!) {
createBlobUpload(workspaceId: $workspaceId, key: $key, size: $size, mime: $mime) {
method
blobKey
alreadyUploaded
uploadUrl
headers
expiresAt
uploadId
partSize
uploadedParts {
partNumber
etag
}
}
}`,
};
export const getBlobUploadPartUrlMutation = {
id: 'getBlobUploadPartUrlMutation' as const,
op: 'getBlobUploadPartUrl',
query: `mutation getBlobUploadPartUrl($workspaceId: String!, $key: String!, $uploadId: String!, $partNumber: Int!) {
getBlobUploadPartUrl(
workspaceId: $workspaceId
key: $key
uploadId: $uploadId
partNumber: $partNumber
) {
uploadUrl
headers
expiresAt
}
}`,
};
export const cancelSubscriptionMutation = {
id: 'cancelSubscriptionMutation' as const,
op: 'cancelSubscription',

View File

@@ -137,6 +137,44 @@ export interface BlobNotFoundDataType {
spaceId: Scalars['String']['output'];
}
export interface BlobUploadInit {
__typename?: 'BlobUploadInit';
alreadyUploaded: Maybe<Scalars['Boolean']['output']>;
blobKey: Scalars['String']['output'];
expiresAt: Maybe<Scalars['DateTime']['output']>;
headers: Maybe<Scalars['JSONObject']['output']>;
method: BlobUploadMethod;
partSize: Maybe<Scalars['Int']['output']>;
uploadId: Maybe<Scalars['String']['output']>;
uploadUrl: Maybe<Scalars['String']['output']>;
uploadedParts: Maybe<Array<BlobUploadedPart>>;
}
/** Blob upload method */
export enum BlobUploadMethod {
GRAPHQL = 'GRAPHQL',
MULTIPART = 'MULTIPART',
PRESIGNED = 'PRESIGNED',
}
export interface BlobUploadPart {
__typename?: 'BlobUploadPart';
expiresAt: Maybe<Scalars['DateTime']['output']>;
headers: Maybe<Scalars['JSONObject']['output']>;
uploadUrl: Scalars['String']['output'];
}
export interface BlobUploadPartInput {
etag: Scalars['String']['input'];
partNumber: Scalars['Int']['input'];
}
export interface BlobUploadedPart {
__typename?: 'BlobUploadedPart';
etag: Scalars['String']['output'];
partNumber: Scalars['Int']['output'];
}
export enum ChatHistoryOrder {
asc = 'asc',
desc = 'desc',
@@ -838,6 +876,7 @@ export enum ErrorNames {
ALREADY_IN_SPACE = 'ALREADY_IN_SPACE',
AUTHENTICATION_REQUIRED = 'AUTHENTICATION_REQUIRED',
BAD_REQUEST = 'BAD_REQUEST',
BLOB_INVALID = 'BLOB_INVALID',
BLOB_NOT_FOUND = 'BLOB_NOT_FOUND',
BLOB_QUOTA_EXCEEDED = 'BLOB_QUOTA_EXCEEDED',
CANNOT_DELETE_ACCOUNT_WITH_OWNED_TEAM_WORKSPACE = 'CANNOT_DELETE_ACCOUNT_WITH_OWNED_TEAM_WORKSPACE',
@@ -1370,6 +1409,7 @@ export interface MissingOauthQueryParameterDataType {
export interface Mutation {
__typename?: 'Mutation';
abortBlobUpload: Scalars['Boolean']['output'];
acceptInviteById: Scalars['Boolean']['output'];
activateLicense: License;
/** add a blob to context */
@@ -1392,6 +1432,8 @@ export interface Mutation {
claimAudioTranscription: Maybe<TranscriptionResultType>;
/** Cleanup sessions */
cleanupCopilotSession: Array<Scalars['String']['output']>;
completeBlobUpload: Scalars['String']['output'];
createBlobUpload: BlobUploadInit;
/** Create change password url */
createChangePasswordUrl: Scalars['String']['output'];
/** Create a subscription checkout link of stripe */
@@ -1430,6 +1472,7 @@ export interface Mutation {
forkCopilotSession: Scalars['String']['output'];
generateLicenseKey: Scalars['String']['output'];
generateUserAccessToken: RevealedAccessToken;
getBlobUploadPartUrl: BlobUploadPart;
grantDocUserRoles: Scalars['Boolean']['output'];
grantMember: Scalars['Boolean']['output'];
/** import users */
@@ -1527,6 +1570,12 @@ export interface Mutation {
verifyEmail: Scalars['Boolean']['output'];
}
export interface MutationAbortBlobUploadArgs {
key: Scalars['String']['input'];
uploadId: Scalars['String']['input'];
workspaceId: Scalars['String']['input'];
}
export interface MutationAcceptInviteByIdArgs {
inviteId: Scalars['String']['input'];
sendAcceptMail?: InputMaybe<Scalars['Boolean']['input']>;
@@ -1599,6 +1648,20 @@ export interface MutationCleanupCopilotSessionArgs {
options: DeleteSessionInput;
}
export interface MutationCompleteBlobUploadArgs {
key: Scalars['String']['input'];
parts?: InputMaybe<Array<BlobUploadPartInput>>;
uploadId?: InputMaybe<Scalars['String']['input']>;
workspaceId: Scalars['String']['input'];
}
export interface MutationCreateBlobUploadArgs {
key: Scalars['String']['input'];
mime: Scalars['String']['input'];
size: Scalars['Int']['input'];
workspaceId: Scalars['String']['input'];
}
export interface MutationCreateChangePasswordUrlArgs {
callbackUrl: Scalars['String']['input'];
userId: Scalars['String']['input'];
@@ -1693,6 +1756,13 @@ export interface MutationGenerateUserAccessTokenArgs {
input: GenerateAccessTokenInput;
}
export interface MutationGetBlobUploadPartUrlArgs {
key: Scalars['String']['input'];
partNumber: Scalars['Int']['input'];
uploadId: Scalars['String']['input'];
workspaceId: Scalars['String']['input'];
}
export interface MutationGrantDocUserRolesArgs {
input: GrantDocUserRolesInput;
}
@@ -3411,6 +3481,73 @@ export type SetBlobMutationVariables = Exact<{
export type SetBlobMutation = { __typename?: 'Mutation'; setBlob: string };
export type AbortBlobUploadMutationVariables = Exact<{
workspaceId: Scalars['String']['input'];
key: Scalars['String']['input'];
uploadId: Scalars['String']['input'];
}>;
export type AbortBlobUploadMutation = {
__typename?: 'Mutation';
abortBlobUpload: boolean;
};
export type CompleteBlobUploadMutationVariables = Exact<{
workspaceId: Scalars['String']['input'];
key: Scalars['String']['input'];
uploadId?: InputMaybe<Scalars['String']['input']>;
parts?: InputMaybe<Array<BlobUploadPartInput> | BlobUploadPartInput>;
}>;
export type CompleteBlobUploadMutation = {
__typename?: 'Mutation';
completeBlobUpload: string;
};
export type CreateBlobUploadMutationVariables = Exact<{
workspaceId: Scalars['String']['input'];
key: Scalars['String']['input'];
size: Scalars['Int']['input'];
mime: Scalars['String']['input'];
}>;
export type CreateBlobUploadMutation = {
__typename?: 'Mutation';
createBlobUpload: {
__typename?: 'BlobUploadInit';
method: BlobUploadMethod;
blobKey: string;
alreadyUploaded: boolean | null;
uploadUrl: string | null;
headers: any | null;
expiresAt: string | null;
uploadId: string | null;
partSize: number | null;
uploadedParts: Array<{
__typename?: 'BlobUploadedPart';
partNumber: number;
etag: string;
}> | null;
};
};
export type GetBlobUploadPartUrlMutationVariables = Exact<{
workspaceId: Scalars['String']['input'];
key: Scalars['String']['input'];
uploadId: Scalars['String']['input'];
partNumber: Scalars['Int']['input'];
}>;
export type GetBlobUploadPartUrlMutation = {
__typename?: 'Mutation';
getBlobUploadPartUrl: {
__typename?: 'BlobUploadPart';
uploadUrl: string;
headers: any | null;
expiresAt: string | null;
};
};
export type CancelSubscriptionMutationVariables = Exact<{
plan?: InputMaybe<SubscriptionPlan>;
workspaceId?: InputMaybe<Scalars['String']['input']>;
@@ -6824,6 +6961,26 @@ export type Mutations =
variables: SetBlobMutationVariables;
response: SetBlobMutation;
}
| {
name: 'abortBlobUploadMutation';
variables: AbortBlobUploadMutationVariables;
response: AbortBlobUploadMutation;
}
| {
name: 'completeBlobUploadMutation';
variables: CompleteBlobUploadMutationVariables;
response: CompleteBlobUploadMutation;
}
| {
name: 'createBlobUploadMutation';
variables: CreateBlobUploadMutationVariables;
response: CreateBlobUploadMutation;
}
| {
name: 'getBlobUploadPartUrlMutation';
variables: GetBlobUploadPartUrlMutationVariables;
response: GetBlobUploadPartUrlMutation;
}
| {
name: 'cancelSubscriptionMutation';
variables: CancelSubscriptionMutationVariables;

View File

@@ -0,0 +1,162 @@
import {
abortBlobUploadMutation,
BlobUploadMethod,
completeBlobUploadMutation,
createBlobUploadMutation,
getBlobUploadPartUrlMutation,
setBlobMutation,
workspaceBlobQuotaQuery,
} from '@affine/graphql';
import { afterEach, expect, test, vi } from 'vitest';
import { CloudBlobStorage } from '../impls/cloud/blob';
const quotaResponse = {
workspace: {
quota: {
humanReadable: {
blobLimit: '1 MB',
},
blobLimit: 1024 * 1024,
},
},
};
afterEach(() => {
vi.restoreAllMocks();
vi.unstubAllGlobals();
});
function createStorage() {
return new CloudBlobStorage({
serverBaseUrl: 'https://example.com',
id: 'workspace-1',
});
}
test('uses graphql upload when server returns GRAPHQL method', async () => {
const storage = createStorage();
const gqlMock = vi.fn(async ({ query }) => {
if (query === workspaceBlobQuotaQuery) {
return quotaResponse;
}
if (query === createBlobUploadMutation) {
return {
createBlobUpload: {
method: BlobUploadMethod.GRAPHQL,
blobKey: 'blob-key',
alreadyUploaded: false,
},
};
}
if (query === setBlobMutation) {
return { setBlob: 'blob-key' };
}
throw new Error('Unexpected query');
});
(storage.connection as any).gql = gqlMock;
await storage.set({
key: 'blob-key',
data: new Uint8Array([1, 2, 3]),
mime: 'text/plain',
});
const queries = gqlMock.mock.calls.map(call => call[0].query);
expect(queries).toContain(createBlobUploadMutation);
expect(queries).toContain(setBlobMutation);
});
test('falls back to graphql when presigned upload fails', async () => {
const storage = createStorage();
const gqlMock = vi.fn(async ({ query }) => {
if (query === workspaceBlobQuotaQuery) {
return quotaResponse;
}
if (query === createBlobUploadMutation) {
return {
createBlobUpload: {
method: BlobUploadMethod.PRESIGNED,
blobKey: 'blob-key',
alreadyUploaded: false,
uploadUrl: 'https://upload.example.com/blob',
},
};
}
if (query === setBlobMutation) {
return { setBlob: 'blob-key' };
}
if (query === completeBlobUploadMutation) {
return { completeBlobUpload: 'blob-key' };
}
throw new Error('Unexpected query');
});
(storage.connection as any).gql = gqlMock;
vi.stubGlobal(
'fetch',
vi.fn(async () => new Response('', { status: 500 }))
);
await storage.set({
key: 'blob-key',
data: new Uint8Array([1, 2, 3]),
mime: 'text/plain',
});
const queries = gqlMock.mock.calls.map(call => call[0].query);
expect(queries).toContain(setBlobMutation);
expect(queries).not.toContain(completeBlobUploadMutation);
});
test('falls back to graphql and aborts when multipart upload fails', async () => {
const storage = createStorage();
const gqlMock = vi.fn(async ({ query }) => {
if (query === workspaceBlobQuotaQuery) {
return quotaResponse;
}
if (query === createBlobUploadMutation) {
return {
createBlobUpload: {
method: BlobUploadMethod.MULTIPART,
blobKey: 'blob-key',
alreadyUploaded: false,
uploadId: 'upload-1',
partSize: 2,
uploadedParts: [],
},
};
}
if (query === getBlobUploadPartUrlMutation) {
return {
getBlobUploadPartUrl: {
uploadUrl: 'https://upload.example.com/part',
},
};
}
if (query === abortBlobUploadMutation) {
return { abortBlobUpload: true };
}
if (query === setBlobMutation) {
return { setBlob: 'blob-key' };
}
throw new Error('Unexpected query');
});
(storage.connection as any).gql = gqlMock;
vi.stubGlobal(
'fetch',
vi.fn(async () => new Response('', { status: 500 }))
);
await storage.set({
key: 'blob-key',
data: new Uint8Array([1, 2, 3]),
mime: 'text/plain',
});
const queries = gqlMock.mock.calls.map(call => call[0].query);
expect(queries).toContain(abortBlobUploadMutation);
expect(queries).toContain(setBlobMutation);
});

View File

@@ -1,6 +1,11 @@
import { UserFriendlyError } from '@affine/error';
import {
abortBlobUploadMutation,
BlobUploadMethod,
completeBlobUploadMutation,
createBlobUploadMutation,
deleteBlobMutation,
getBlobUploadPartUrlMutation,
listBlobsQuery,
releaseDeletedBlobsMutation,
setBlobMutation,
@@ -21,6 +26,7 @@ interface CloudBlobStorageOptions {
}
const SHOULD_MANUAL_REDIRECT = BUILD_CONFIG.isAndroid || BUILD_CONFIG.isIOS;
const UPLOAD_REQUEST_TIMEOUT = 0;
export class CloudBlobStorage extends BlobStorageBase {
static readonly identifier = 'CloudBlobStorage';
@@ -97,16 +103,69 @@ export class CloudBlobStorage extends BlobStorageBase {
if (blob.data.byteLength > blobSizeLimit) {
throw new OverSizeError(this.humanReadableBlobSizeLimitCache);
}
await this.connection.gql({
query: setBlobMutation,
const init = await this.connection.gql({
query: createBlobUploadMutation,
variables: {
workspaceId: this.options.id,
blob: new File([blob.data], blob.key, { type: blob.mime }),
},
context: {
signal,
key: blob.key,
size: blob.data.byteLength,
mime: blob.mime,
},
context: { signal },
});
const upload = init.createBlobUpload;
if (upload.alreadyUploaded) {
return;
}
if (upload.method === BlobUploadMethod.GRAPHQL) {
await this.uploadViaGraphql(blob, signal);
return;
}
if (upload.method === BlobUploadMethod.PRESIGNED) {
try {
await this.uploadViaPresigned(
upload.uploadUrl!,
upload.headers,
blob.data,
signal
);
await this.completeUpload(blob.key, undefined, undefined, signal);
return;
} catch {
await this.uploadViaGraphql(blob, signal);
return;
}
}
if (upload.method === BlobUploadMethod.MULTIPART) {
try {
const parts = await this.uploadViaMultipart(
blob.key,
upload.uploadId!,
upload.partSize!,
blob.data,
upload.uploadedParts,
signal
);
await this.completeUpload(blob.key, upload.uploadId!, parts, signal);
return;
} catch {
if (upload.uploadId) {
await this.tryAbortMultipartUpload(
blob.key,
upload.uploadId,
signal
);
}
await this.uploadViaGraphql(blob, signal);
return;
}
}
await this.uploadViaGraphql(blob, signal);
} catch (err) {
const userFriendlyError = UserFriendlyError.fromAny(err);
if (userFriendlyError.is('STORAGE_QUOTA_EXCEEDED')) {
@@ -151,6 +210,159 @@ export class CloudBlobStorage extends BlobStorageBase {
}));
}
private async uploadViaGraphql(blob: BlobRecord, signal?: AbortSignal) {
await this.connection.gql({
query: setBlobMutation,
variables: {
workspaceId: this.options.id,
blob: new File([blob.data], blob.key, { type: blob.mime }),
},
context: { signal },
timeout: UPLOAD_REQUEST_TIMEOUT,
});
}
private async uploadViaPresigned(
uploadUrl: string,
headers: Record<string, string> | null | undefined,
data: Uint8Array,
signal?: AbortSignal
) {
const res = await this.fetchWithTimeout(uploadUrl, {
method: 'PUT',
headers: headers ?? undefined,
body: data,
signal,
timeout: UPLOAD_REQUEST_TIMEOUT,
});
if (!res.ok) {
throw new Error(`Presigned upload failed with status ${res.status}`);
}
}
private async uploadViaMultipart(
key: string,
uploadId: string,
partSize: number,
data: Uint8Array,
uploadedParts: { partNumber: number; etag: string }[] | null | undefined,
signal?: AbortSignal
) {
const partsMap = new Map<number, string>();
for (const part of uploadedParts ?? []) {
partsMap.set(part.partNumber, part.etag);
}
const total = data.byteLength;
const totalParts = Math.ceil(total / partSize);
for (let partNumber = 1; partNumber <= totalParts; partNumber += 1) {
if (partsMap.has(partNumber)) {
continue;
}
const start = (partNumber - 1) * partSize;
const end = Math.min(start + partSize, total);
const chunk = data.subarray(start, end);
const part = await this.connection.gql({
query: getBlobUploadPartUrlMutation,
variables: { workspaceId: this.options.id, key, uploadId, partNumber },
context: { signal },
});
const res = await this.fetchWithTimeout(
part.getBlobUploadPartUrl.uploadUrl,
{
method: 'PUT',
headers: part.getBlobUploadPartUrl.headers ?? undefined,
body: chunk,
signal,
timeout: UPLOAD_REQUEST_TIMEOUT,
}
);
if (!res.ok) {
throw new Error(
`Multipart upload failed at part ${partNumber} with status ${res.status}`
);
}
const etag = res.headers.get('etag');
if (!etag) {
throw new Error(`Missing ETag for part ${partNumber}.`);
}
partsMap.set(partNumber, etag);
}
if (partsMap.size !== totalParts) {
throw new Error('Multipart upload has missing parts.');
}
return [...partsMap.entries()]
.sort((left, right) => left[0] - right[0])
.map(([partNumber, etag]) => ({ partNumber, etag }));
}
private async completeUpload(
key: string,
uploadId: string | undefined,
parts: { partNumber: number; etag: string }[] | undefined,
signal?: AbortSignal
) {
await this.connection.gql({
query: completeBlobUploadMutation,
variables: { workspaceId: this.options.id, key, uploadId, parts },
context: { signal },
timeout: UPLOAD_REQUEST_TIMEOUT,
});
}
private async tryAbortMultipartUpload(
key: string,
uploadId: string,
signal?: AbortSignal
) {
try {
await this.connection.gql({
query: abortBlobUploadMutation,
variables: { workspaceId: this.options.id, key, uploadId },
context: { signal },
});
} catch {}
}
private async fetchWithTimeout(
input: string,
init: RequestInit & { timeout?: number }
) {
const externalSignal = init.signal;
if (externalSignal?.aborted) {
throw externalSignal.reason;
}
const abortController = new AbortController();
externalSignal?.addEventListener('abort', reason => {
abortController.abort(reason);
});
const timeout = init.timeout ?? 15000;
const timeoutId =
timeout > 0
? setTimeout(() => {
abortController.abort(new Error('request timeout'));
}, timeout)
: undefined;
try {
return await globalThis.fetch(input, {
...init,
signal: abortController.signal,
});
} finally {
if (timeoutId) {
clearTimeout(timeoutId);
}
}
}
private humanReadableBlobSizeLimitCache: string | null = null;
private blobSizeLimitCache: number | null = null;
private blobSizeLimitCacheTime = 0;

View File

@@ -19,9 +19,12 @@ export class HttpConnection extends DummyConnection {
});
const timeout = init?.timeout ?? 15000;
const timeoutId = setTimeout(() => {
abortController.abort(new Error('request timeout'));
}, timeout);
const timeoutId =
timeout > 0
? setTimeout(() => {
abortController.abort(new Error('request timeout'));
}, timeout)
: undefined;
const res = await globalThis
.fetch(new URL(input, this.serverBaseUrl), {
@@ -43,7 +46,9 @@ export class HttpConnection extends DummyConnection {
stacktrace: err.stack,
});
});
clearTimeout(timeoutId);
if (timeoutId) {
clearTimeout(timeoutId);
}
if (!res.ok && res.status !== 404) {
if (res.status === 413) {
throw new UserFriendlyError({