feat(nbstore): add cloud implementation (#8810)

This commit is contained in:
forehalo
2024-12-10 10:48:27 +00:00
parent 1721875ab6
commit 2f80b4f822
32 changed files with 1030 additions and 315 deletions

View File

@@ -0,0 +1,14 @@
-- CreateTable
CREATE TABLE "blobs" (
"workspace_id" VARCHAR NOT NULL,
"key" VARCHAR NOT NULL,
"size" INTEGER NOT NULL,
"mime" VARCHAR NOT NULL,
"created_at" TIMESTAMPTZ(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"deleted_at" TIMESTAMPTZ(3),
CONSTRAINT "blobs_pkey" PRIMARY KEY ("workspace_id","key")
);
-- AddForeignKey
ALTER TABLE "blobs" ADD CONSTRAINT "blobs_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -106,6 +106,7 @@ model Workspace {
permissions WorkspaceUserPermission[]
pagePermissions WorkspacePageUserPermission[]
features WorkspaceFeature[]
blobs Blob[]
@@map("workspaces")
}
@@ -568,3 +569,19 @@ model Invoice {
@@index([targetId])
@@map("invoices")
}
// Blob table only exists for fast non-data queries.
// like, total size of blobs in a workspace, or blob list for sync service.
// it should only be a map of metadata of blobs stored anywhere else
model Blob {
workspaceId String @map("workspace_id") @db.VarChar
key String @db.VarChar
size Int @db.Integer
mime String @db.VarChar
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(3)
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
@@id([workspaceId, key])
@@map("blobs")
}

View File

@@ -1,8 +1,10 @@
import {
applyUpdate,
diffUpdate,
Doc,
encodeStateAsUpdate,
encodeStateVector,
encodeStateVectorFromUpdate,
mergeUpdates,
UndoManager,
} from 'yjs';
@@ -19,6 +21,12 @@ export interface DocRecord {
editor?: string;
}
export interface DocDiff {
missing: Uint8Array;
state: Uint8Array;
timestamp: number;
}
export interface DocUpdate {
bin: Uint8Array;
timestamp: number;
@@ -96,6 +104,27 @@ export abstract class DocStorageAdapter extends Connection {
return snapshot;
}
async getDocDiff(
spaceId: string,
docId: string,
stateVector?: Uint8Array
): Promise<DocDiff | null> {
const doc = await this.getDoc(spaceId, docId);
if (!doc) {
return null;
}
const missing = stateVector ? diffUpdate(doc.bin, stateVector) : doc.bin;
const state = encodeStateVectorFromUpdate(doc.bin);
return {
missing,
state,
timestamp: doc.timestamp,
};
}
abstract pushDocUpdates(
spaceId: string,
docId: string,

View File

@@ -1,4 +1,6 @@
// TODO(@forehalo): share with frontend
// This is a totally copy of definitions in [@affine/space-store]
// because currently importing cross workspace package from [@affine/server] is not yet supported
// should be kept updated with the original definitions in [@affine/space-store]
import type { BlobStorageAdapter } from './blob';
import { Connection } from './connection';
import type { DocStorageAdapter } from './doc';

View File

@@ -11,6 +11,7 @@ import { CurrentUser } from '../auth/session';
import { EarlyAccessType } from '../features';
import { UserType } from '../user';
import { QuotaService } from './service';
import { QuotaManagementService } from './storage';
registerEnumType(EarlyAccessType, {
name: 'EarlyAccessType',
@@ -55,9 +56,18 @@ class UserQuotaType {
humanReadable!: UserQuotaHumanReadableType;
}
@ObjectType('UserQuotaUsage')
class UserQuotaUsageType {
@Field(() => SafeIntResolver, { name: 'storageQuota' })
storageQuota!: number;
}
@Resolver(() => UserType)
export class QuotaManagementResolver {
constructor(private readonly quota: QuotaService) {}
constructor(
private readonly quota: QuotaService,
private readonly management: QuotaManagementService
) {}
@ResolveField(() => UserQuotaType, { name: 'quota', nullable: true })
async getQuota(@CurrentUser() me: UserType) {
@@ -65,4 +75,15 @@ export class QuotaManagementResolver {
return quota.feature;
}
@ResolveField(() => UserQuotaUsageType, { name: 'quotaUsage' })
async getQuotaUsage(
@CurrentUser() me: UserType
): Promise<UserQuotaUsageType> {
const usage = await this.management.getUserStorageUsage(me.id);
return {
storageQuota: usage,
};
}
}

View File

@@ -77,7 +77,7 @@ export class QuotaManagementService {
return this.quota.hasWorkspaceQuota(workspaceId, QuotaType.TeamPlanV1);
}
async getUserUsage(userId: string) {
async getUserStorageUsage(userId: string) {
const workspaces = await this.permissions.getOwnedWorkspaces(userId);
const sizes = await Promise.allSettled(
@@ -125,7 +125,7 @@ export class QuotaManagementService {
async getQuotaCalculator(userId: string) {
const quota = await this.getUserQuota(userId);
const { storageQuota, businessBlobLimit } = quota;
const usedSize = await this.getUserUsage(userId);
const usedSize = await this.getUserStorageUsage(userId);
return this.generateQuotaCalculator(
storageQuota,
@@ -183,7 +183,7 @@ export class QuotaManagementService {
humanReadable,
} = await this.getWorkspaceQuota(owner.id, workspaceId);
// get all workspaces size of owner used
const usedSize = await this.getUserUsage(owner.id);
const usedSize = await this.getUserStorageUsage(owner.id);
// relax restrictions if workspace has unlimited feature
// todo(@darkskygit): need a mechanism to allow feature as a middleware to edit quota
const unlimited = await this.feature.hasWorkspaceFeature(

View File

@@ -1,33 +1,42 @@
import { Injectable } from '@nestjs/common';
import { Injectable, Logger } from '@nestjs/common';
import { PrismaClient } from '@prisma/client';
import {
type BlobInputType,
Cache,
autoMetadata,
Config,
EventEmitter,
type EventPayload,
type ListObjectsMetadata,
type GetObjectMetadata,
ListObjectsMetadata,
OnEvent,
PutObjectMetadata,
type StorageProvider,
StorageProviderFactory,
} from '../../../fundamentals';
@Injectable()
export class WorkspaceBlobStorage {
private readonly logger = new Logger(WorkspaceBlobStorage.name);
public readonly provider: StorageProvider;
constructor(
private readonly config: Config,
private readonly event: EventEmitter,
private readonly storageFactory: StorageProviderFactory,
private readonly cache: Cache
private readonly db: PrismaClient
) {
this.provider = this.storageFactory.create(this.config.storages.blob);
}
async put(workspaceId: string, key: string, blob: BlobInputType) {
await this.provider.put(`${workspaceId}/${key}`, blob);
await this.cache.delete(`blob-list:${workspaceId}`);
async put(workspaceId: string, key: string, blob: Buffer) {
const meta: PutObjectMetadata = autoMetadata(blob);
await this.provider.put(`${workspaceId}/${key}`, blob, meta);
this.trySyncBlobMeta(workspaceId, key, {
contentType: meta.contentType ?? 'application/octet-stream',
contentLength: blob.length,
lastModified: new Date(),
});
}
async get(workspaceId: string, key: string) {
@@ -35,41 +44,141 @@ export class WorkspaceBlobStorage {
}
async list(workspaceId: string) {
const cachedList = await this.cache.list<ListObjectsMetadata>(
`blob-list:${workspaceId}`,
0,
-1
);
const blobsInDb = await this.db.blob.findMany({
where: {
workspaceId,
deletedAt: null,
},
});
if (cachedList.length > 0) {
return cachedList;
if (blobsInDb.length > 0) {
return blobsInDb;
}
const blobs = await this.provider.list(workspaceId + '/');
blobs.forEach(item => {
// trim workspace prefix
item.key = item.key.slice(workspaceId.length + 1);
blobs.forEach(blob => {
blob.key = blob.key.slice(workspaceId.length + 1);
});
await this.cache.pushBack(`blob-list:${workspaceId}`, ...blobs);
this.trySyncBlobsMeta(workspaceId, blobs);
return blobs;
return blobs.map(blob => ({
key: blob.key,
size: blob.contentLength,
createdAt: blob.lastModified,
mime: 'application/octet-stream',
}));
}
/**
* we won't really delete the blobs until the doc blobs manager is implemented sounded
*/
async delete(_workspaceId: string, _key: string) {
// return this.provider.delete(`${workspaceId}/${key}`);
async delete(workspaceId: string, key: string, permanently = false) {
if (permanently) {
await this.provider.delete(`${workspaceId}/${key}`);
await this.db.blob.deleteMany({
where: {
workspaceId,
key,
},
});
} else {
await this.db.blob.update({
where: {
workspaceId_key: {
workspaceId,
key,
},
},
data: {
deletedAt: new Date(),
},
});
}
}
async release(workspaceId: string) {
const deletedBlobs = await this.db.blob.findMany({
where: {
workspaceId,
deletedAt: {
not: null,
},
},
});
deletedBlobs.forEach(blob => {
this.event.emit('workspace.blob.deleted', {
workspaceId: workspaceId,
key: blob.key,
});
});
}
async totalSize(workspaceId: string) {
const blobs = await this.list(workspaceId);
// how could we ignore the ones get soft-deleted?
return blobs.reduce((acc, item) => acc + item.size, 0);
}
private trySyncBlobsMeta(workspaceId: string, blobs: ListObjectsMetadata[]) {
for (const blob of blobs) {
this.trySyncBlobMeta(workspaceId, blob.key);
}
}
private trySyncBlobMeta(
workspaceId: string,
key: string,
meta?: GetObjectMetadata
) {
setImmediate(() => {
this.syncBlobMeta(workspaceId, key, meta).catch(() => {
/* never throw */
});
});
}
private async syncBlobMeta(
workspaceId: string,
key: string,
meta?: GetObjectMetadata
) {
try {
if (!meta) {
const blob = await this.get(workspaceId, key);
meta = blob.metadata;
}
if (meta) {
await this.db.blob.upsert({
where: {
workspaceId_key: {
workspaceId,
key,
},
},
update: {
mime: meta.contentType,
size: meta.contentLength,
},
create: {
workspaceId,
key,
mime: meta.contentType,
size: meta.contentLength,
},
});
} else {
await this.db.blob.deleteMany({
where: {
workspaceId,
key,
},
});
}
} catch (e) {
// never throw
this.logger.error('failed to sync blob meta to DB', e);
}
}
@OnEvent('workspace.deleted')
async onWorkspaceDeleted(workspaceId: EventPayload<'workspace.deleted'>) {
const blobs = await this.list(workspaceId);
@@ -78,7 +187,7 @@ export class WorkspaceBlobStorage {
blobs.forEach(blob => {
this.event.emit('workspace.blob.deleted', {
workspaceId: workspaceId,
name: blob.key,
key: blob.key,
});
});
}
@@ -86,8 +195,8 @@ export class WorkspaceBlobStorage {
@OnEvent('workspace.blob.deleted')
async onDeleteWorkspaceBlob({
workspaceId,
name,
key,
}: EventPayload<'workspace.blob.deleted'>) {
await this.delete(workspaceId, name);
await this.delete(workspaceId, key, true);
}
}

View File

@@ -8,7 +8,6 @@ import {
WebSocketGateway,
} from '@nestjs/websockets';
import { Socket } from 'socket.io';
import { diffUpdate, encodeStateVectorFromUpdate } from 'yjs';
import {
AlreadyInSpace,
@@ -83,6 +82,9 @@ interface LeaveSpaceAwarenessMessage {
docId: string;
}
/**
* @deprecated
*/
interface PushDocUpdatesMessage {
spaceType: SpaceType;
spaceId: string;
@@ -90,6 +92,13 @@ interface PushDocUpdatesMessage {
updates: string[];
}
interface PushDocUpdateMessage {
spaceType: SpaceType;
spaceId: string;
docId: string;
update: string;
}
interface LoadDocMessage {
spaceType: SpaceType;
spaceId: string;
@@ -97,6 +106,12 @@ interface LoadDocMessage {
stateVector?: string;
}
interface DeleteDocMessage {
spaceType: SpaceType;
spaceId: string;
docId: string;
}
interface LoadDocTimestampsMessage {
spaceType: SpaceType;
spaceId: string;
@@ -114,6 +129,7 @@ interface UpdateAwarenessMessage {
docId: string;
awarenessUpdate: string;
}
@WebSocketGateway()
export class SpaceSyncGateway
implements OnGatewayConnection, OnGatewayDisconnect
@@ -182,26 +198,6 @@ export class SpaceSyncGateway
}
}
async joinWorkspace(
client: Socket,
room: `${string}:${'sync' | 'awareness'}`
) {
await client.join(room);
}
async leaveWorkspace(
client: Socket,
room: `${string}:${'sync' | 'awareness'}`
) {
await client.leave(room);
}
assertInWorkspace(client: Socket, room: `${string}:${'sync' | 'awareness'}`) {
if (!client.rooms.has(room)) {
throw new NotInSpace({ spaceId: room.split(':')[0] });
}
}
// v3
@SubscribeMessage('space:join')
async onJoinSpace(
@@ -233,36 +229,42 @@ export class SpaceSyncGateway
@MessageBody()
{ spaceType, spaceId, docId, stateVector }: LoadDocMessage
): Promise<
EventResponse<{ missing: string; state?: string; timestamp: number }>
EventResponse<{ missing: string; state: string; timestamp: number }>
> {
const adapter = this.selectAdapter(client, spaceType);
adapter.assertIn(spaceId);
const doc = await adapter.get(spaceId, docId);
const doc = await adapter.diff(
spaceId,
docId,
stateVector ? Buffer.from(stateVector, 'base64') : undefined
);
if (!doc) {
throw new DocNotFound({ spaceId, docId });
}
const missing = Buffer.from(
stateVector
? diffUpdate(doc.bin, Buffer.from(stateVector, 'base64'))
: doc.bin
).toString('base64');
const state = Buffer.from(encodeStateVectorFromUpdate(doc.bin)).toString(
'base64'
);
return {
data: {
missing,
state,
missing: Buffer.from(doc.missing).toString('base64'),
state: Buffer.from(doc.state).toString('base64'),
timestamp: doc.timestamp,
},
};
}
@SubscribeMessage('space:delete-doc')
async onDeleteSpaceDoc(
@ConnectedSocket() client: Socket,
@MessageBody() { spaceType, spaceId, docId }: DeleteDocMessage
) {
const adapter = this.selectAdapter(client, spaceType);
await adapter.delete(spaceId, docId);
}
/**
* @deprecated use [space:push-doc-update] instead, client should always merge updates on their own
*/
@SubscribeMessage('space:push-doc-updates')
async onReceiveDocUpdates(
@ConnectedSocket() client: Socket,
@@ -307,6 +309,51 @@ export class SpaceSyncGateway
};
}
@SubscribeMessage('space:push-doc-update')
async onReceiveDocUpdate(
@ConnectedSocket() client: Socket,
@CurrentUser() user: CurrentUser,
@MessageBody()
message: PushDocUpdateMessage
): Promise<EventResponse<{ accepted: true; timestamp?: number }>> {
const { spaceType, spaceId, docId, update } = message;
const adapter = this.selectAdapter(client, spaceType);
// TODO(@forehalo): we might need to check write permission before push updates
const timestamp = await adapter.push(
spaceId,
docId,
[Buffer.from(update, 'base64')],
user.id
);
// TODO(@forehalo): separate different version of clients into different rooms,
// so the clients won't receive useless updates events
client.to(adapter.room(spaceId)).emit('space:broadcast-doc-updates', {
spaceType,
spaceId,
docId,
updates: [update],
timestamp,
});
client.to(adapter.room(spaceId)).emit('space:broadcast-doc-update', {
spaceType,
spaceId,
docId,
update,
timestamp,
editor: user.id,
});
return {
data: {
accepted: true,
timestamp,
},
};
}
@SubscribeMessage('space:load-doc-timestamps')
async onLoadDocTimestamps(
@ConnectedSocket() client: Socket,
@@ -600,9 +647,14 @@ abstract class SyncSocketAdapter {
return this.storage.pushDocUpdates(spaceId, docId, updates, editorId);
}
get(spaceId: string, docId: string) {
diff(spaceId: string, docId: string, stateVector?: Uint8Array) {
this.assertIn(spaceId);
return this.storage.getDoc(spaceId, docId);
return this.storage.getDocDiff(spaceId, docId, stateVector);
}
delete(spaceId: string, docId: string) {
this.assertIn(spaceId);
return this.storage.deleteDoc(spaceId, docId);
}
getTimestamps(spaceId: string, timestamp?: number) {
@@ -630,9 +682,9 @@ class WorkspaceSyncAdapter extends SyncSocketAdapter {
return super.push(spaceId, id.guid, updates, editorId);
}
override get(spaceId: string, docId: string) {
override diff(spaceId: string, docId: string, stateVector?: Uint8Array) {
const id = new DocID(docId, spaceId);
return this.storage.getDoc(spaceId, id.guid);
return this.storage.getDocDiff(spaceId, id.guid, stateVector);
}
async assertAccessible(

View File

@@ -1,29 +1,40 @@
import { Logger, UseGuards } from '@nestjs/common';
import {
Args,
Field,
Int,
Mutation,
ObjectType,
Parent,
Query,
ResolveField,
Resolver,
} from '@nestjs/graphql';
import { SafeIntResolver } from 'graphql-scalars';
import GraphQLUpload from 'graphql-upload/GraphQLUpload.mjs';
import type { FileUpload } from '../../../fundamentals';
import {
BlobQuotaExceeded,
CloudThrottlerGuard,
MakeCache,
PreventCache,
} from '../../../fundamentals';
import { BlobQuotaExceeded, CloudThrottlerGuard } from '../../../fundamentals';
import { CurrentUser } from '../../auth';
import { Permission, PermissionService } from '../../permission';
import { QuotaManagementService } from '../../quota';
import { WorkspaceBlobStorage } from '../../storage';
import { WorkspaceBlobSizes, WorkspaceType } from '../types';
@ObjectType()
class ListedBlob {
@Field()
key!: string;
@Field()
mime!: string;
@Field()
size!: number;
@Field()
createdAt!: string;
}
@UseGuards(CloudThrottlerGuard)
@Resolver(() => WorkspaceType)
export class WorkspaceBlobResolver {
@@ -34,7 +45,7 @@ export class WorkspaceBlobResolver {
private readonly storage: WorkspaceBlobStorage
) {}
@ResolveField(() => [String], {
@ResolveField(() => [ListedBlob], {
description: 'List blobs of workspace',
complexity: 2,
})
@@ -44,9 +55,7 @@ export class WorkspaceBlobResolver {
) {
await this.permissions.checkWorkspace(workspace.id, user.id);
return this.storage
.list(workspace.id)
.then(list => list.map(item => item.key));
return this.storage.list(workspace.id);
}
@ResolveField(() => Int, {
@@ -64,7 +73,6 @@ export class WorkspaceBlobResolver {
description: 'List blobs of workspace',
deprecationReason: 'use `workspace.blobs` instead',
})
@MakeCache(['blobs'], ['workspaceId'])
async listBlobs(
@CurrentUser() user: CurrentUser,
@Args('workspaceId') workspaceId: string
@@ -76,42 +84,15 @@ export class WorkspaceBlobResolver {
.then(list => list.map(item => item.key));
}
/**
* @deprecated use `user.storageUsage` instead
*/
@Query(() => WorkspaceBlobSizes, {
deprecationReason: 'use `user.storageUsage` instead',
deprecationReason: 'use `user.quotaUsage` instead',
})
async collectAllBlobSizes(@CurrentUser() user: CurrentUser) {
const size = await this.quota.getUserUsage(user.id);
const size = await this.quota.getUserStorageUsage(user.id);
return { size };
}
/**
* @deprecated mutation `setBlob` will check blob limit & quota usage
*/
@Query(() => WorkspaceBlobSizes, {
deprecationReason: 'no more needed',
})
async checkBlobSize(
@CurrentUser() user: CurrentUser,
@Args('workspaceId') workspaceId: string,
@Args('size', { type: () => SafeIntResolver }) blobSize: number
) {
const canWrite = await this.permissions.tryCheckWorkspace(
workspaceId,
user.id,
Permission.Write
);
if (canWrite) {
const size = await this.quota.checkBlobQuota(workspaceId, blobSize);
return { size };
}
return false;
}
@Mutation(() => String)
@PreventCache(['blobs'], ['workspaceId'])
async setBlob(
@CurrentUser() user: CurrentUser,
@Args('workspaceId') workspaceId: string,
@@ -160,11 +141,35 @@ export class WorkspaceBlobResolver {
}
@Mutation(() => Boolean)
@PreventCache(['blobs'], ['workspaceId'])
async deleteBlob(
@CurrentUser() user: CurrentUser,
@Args('workspaceId') workspaceId: string,
@Args('hash') name: string
@Args('hash', {
type: () => String,
deprecationReason: 'use parameter [key]',
nullable: true,
})
hash?: string,
@Args('key', { type: () => String, nullable: true }) key?: string,
@Args('permanently', { type: () => Boolean, defaultValue: false })
permanently = false
) {
key = key ?? hash;
if (!key) {
return false;
}
await this.permissions.checkWorkspace(workspaceId, user.id);
await this.storage.delete(workspaceId, key, permanently);
return true;
}
@Mutation(() => Boolean)
async releaseDeletedBlobs(
@CurrentUser() user: CurrentUser,
@Args('workspaceId') workspaceId: string
) {
await this.permissions.checkWorkspace(
workspaceId,
@@ -172,7 +177,7 @@ export class WorkspaceBlobResolver {
Permission.Write
);
await this.storage.delete(workspaceId, name);
await this.storage.release(workspaceId);
return true;
}

View File

@@ -7,7 +7,7 @@ export interface WorkspaceEvents {
blob: {
deleted: Payload<{
workspaceId: Workspace['id'];
name: string;
key: string;
}>;
};
}

View File

@@ -119,7 +119,7 @@ export class FsStorageProvider implements StorageProvider {
results.push({
key: res,
lastModified: stat.mtime,
size: stat.size,
contentLength: stat.size,
});
}
}
@@ -216,7 +216,7 @@ export class FsStorageProvider implements StorageProvider {
raw: PutObjectMetadata
) {
try {
const metadata = await autoMetadata(blob, raw);
const metadata = autoMetadata(blob, raw);
if (raw.checksumCRC32 && metadata.checksumCRC32 !== raw.checksumCRC32) {
throw new Error(
@@ -224,6 +224,12 @@ export class FsStorageProvider implements StorageProvider {
);
}
if (raw.contentLength && metadata.contentLength !== raw.contentLength) {
throw new Error(
'The content length of the uploaded file is not matched with the one you provide, the file may be corrupted and the uploading will not be processed.'
);
}
writeFileSync(
this.join(`${key}.metadata.json`),
JSON.stringify({

View File

@@ -21,7 +21,7 @@ export interface PutObjectMetadata {
export interface ListObjectsMetadata {
key: string;
lastModified: Date;
size: number;
contentLength: number;
}
export type BlobInputType = Buffer | Readable | string;

View File

@@ -14,19 +14,19 @@ export async function toBuffer(input: BlobInputType): Promise<Buffer> {
: Buffer.from(input);
}
export async function autoMetadata(
export function autoMetadata(
blob: Buffer,
raw: PutObjectMetadata
): Promise<PutObjectMetadata> {
raw: PutObjectMetadata = {}
): PutObjectMetadata {
const metadata = {
...raw,
};
try {
// length
if (!metadata.contentLength) {
metadata.contentLength = blob.length;
}
if (!metadata.contentLength) {
metadata.contentLength = blob.byteLength;
}
try {
// checksum
if (!metadata.checksumCRC32) {
metadata.checksumCRC32 = crc32(blob).toString(16);
@@ -34,15 +34,11 @@ export async function autoMetadata(
// mime type
if (!metadata.contentType) {
try {
metadata.contentType = getMime(blob);
} catch {
// ignore
}
metadata.contentType = getMime(blob);
}
return metadata;
} catch {
return metadata;
// noop
}
return metadata;
}

View File

@@ -50,7 +50,7 @@ export class S3StorageProvider implements StorageProvider {
): Promise<void> {
const blob = await toBuffer(body);
metadata = await autoMetadata(blob, metadata);
metadata = autoMetadata(blob, metadata);
try {
await this.client.send(
@@ -140,7 +140,7 @@ export class S3StorageProvider implements StorageProvider {
listResult.Contents.map(r => ({
key: r.Key!,
lastModified: r.LastModified!,
size: r.Size!,
contentLength: r.Size!,
}))
);
}

View File

@@ -449,6 +449,13 @@ input ListUserInput {
skip: Int = 0
}
type ListedBlob {
createdAt: String!
key: String!
mime: String!
size: Int!
}
input ManageUserInput {
"""User email"""
email: String
@@ -496,7 +503,7 @@ type Mutation {
"""Create a new workspace"""
createWorkspace(init: Upload): WorkspaceType!
deleteAccount: DeleteAccount!
deleteBlob(hash: String!, workspaceId: String!): Boolean!
deleteBlob(hash: String @deprecated(reason: "use parameter [key]"), key: String, permanently: Boolean! = false, workspaceId: String!): Boolean!
"""Delete a user account"""
deleteUser(id: String!): DeleteAccount!
@@ -511,6 +518,7 @@ type Mutation {
leaveWorkspace(sendLeaveMail: Boolean, workspaceId: String!, workspaceName: String!): Boolean!
publishPage(mode: PublicPageMode = Page, pageId: String!, workspaceId: String!): WorkspacePage!
recoverDoc(guid: String!, timestamp: DateTime!, workspaceId: String!): DateTime!
releaseDeletedBlobs(workspaceId: String!): Boolean!
"""Remove user avatar"""
removeAvatar: RemoveAvatar!
@@ -584,8 +592,7 @@ enum PublicPageMode {
}
type Query {
checkBlobSize(size: SafeInt!, workspaceId: String!): WorkspaceBlobSizes! @deprecated(reason: "no more needed")
collectAllBlobSizes: WorkspaceBlobSizes! @deprecated(reason: "use `user.storageUsage` instead")
collectAllBlobSizes: WorkspaceBlobSizes! @deprecated(reason: "use `user.quotaUsage` instead")
"""Get current user"""
currentUser: UserType
@@ -885,6 +892,10 @@ type UserQuotaHumanReadable {
storageQuota: String!
}
type UserQuotaUsage {
storageQuota: SafeInt!
}
type UserType {
"""User avatar url"""
avatarUrl: String
@@ -913,6 +924,7 @@ type UserType {
"""User name"""
name: String!
quota: UserQuota
quotaUsage: UserQuotaUsage!
subscriptions: [SubscriptionType!]!
token: tokenType! @deprecated(reason: "use [/api/auth/sign-in?native=true] instead")
}
@@ -962,7 +974,7 @@ type WorkspaceType {
availableFeatures: [FeatureType!]!
"""List blobs of workspace"""
blobs: [String!]!
blobs: [ListedBlob!]!
"""Blobs size of workspace"""
blobsSize: Int!

View File

@@ -54,35 +54,16 @@ export async function collectAllBlobSizes(
.send({
query: `
query {
collectAllBlobSizes {
size
currentUser {
quotaUsage {
storageQuota
}
}
}
`,
})
.expect(200);
return res.body.data.collectAllBlobSizes.size;
}
export async function checkBlobSize(
app: INestApplication,
token: string,
workspaceId: string,
size: number
): Promise<number> {
const res = await request(app.getHttpServer())
.post(gql)
.auth(token, { type: 'bearer' })
.send({
query: `query checkBlobSize($workspaceId: String!, $size: SafeInt!) {
checkBlobSize(workspaceId: $workspaceId, size: $size) {
size
}
}`,
variables: { workspaceId, size },
})
.expect(200);
return res.body.data.checkBlobSize.size;
return res.body.data.currentUser.quotaUsage.storageQuota;
}
export async function setBlob(

View File

@@ -2,11 +2,10 @@ import type { INestApplication } from '@nestjs/common';
import test from 'ava';
import request from 'supertest';
import { AppModule } from '../src/app.module';
import { FeatureManagementService, FeatureType } from '../src/core/features';
import { QuotaService, QuotaType } from '../src/core/quota';
import { AppModule } from '../../src/app.module';
import { FeatureManagementService, FeatureType } from '../../src/core/features';
import { QuotaService, QuotaType } from '../../src/core/quota';
import {
checkBlobSize,
collectAllBlobSizes,
createTestingApp,
createWorkspace,
@@ -14,7 +13,7 @@ import {
listBlobs,
setBlob,
signUp,
} from './utils';
} from '../utils';
const OneMB = 1024 * 1024;
@@ -114,58 +113,6 @@ test('should calc all blobs size', async t => {
const size = await collectAllBlobSizes(app, u1.token.token);
t.is(size, 8, 'failed to collect all blob sizes');
const size1 = await checkBlobSize(
app,
u1.token.token,
workspace1.id,
10 * 1024 * 1024 * 1024 - 8
);
t.is(size1, 0, 'failed to check blob size');
const size2 = await checkBlobSize(
app,
u1.token.token,
workspace1.id,
10 * 1024 * 1024 * 1024 - 7
);
t.is(size2, -1, 'failed to check blob size');
});
test('should be able calc quota after switch plan', async t => {
const u1 = await signUp(app, 'darksky', 'darksky@affine.pro', '1');
const workspace1 = await createWorkspace(app, u1.token.token);
const buffer1 = Buffer.from([0, 0]);
await setBlob(app, u1.token.token, workspace1.id, buffer1);
const buffer2 = Buffer.from([0, 1]);
await setBlob(app, u1.token.token, workspace1.id, buffer2);
const workspace2 = await createWorkspace(app, u1.token.token);
const buffer3 = Buffer.from([0, 0]);
await setBlob(app, u1.token.token, workspace2.id, buffer3);
const buffer4 = Buffer.from([0, 1]);
await setBlob(app, u1.token.token, workspace2.id, buffer4);
const size1 = await checkBlobSize(
app,
u1.token.token,
workspace1.id,
10 * 1024 * 1024 * 1024 - 8
);
t.is(size1, 0, 'failed to check free plan blob size');
await quota.switchUserQuota(u1.id, QuotaType.ProPlanV1);
const size2 = await checkBlobSize(
app,
u1.token.token,
workspace1.id,
100 * 1024 * 1024 * 1024 - 8
);
t.is(size2, 0, 'failed to check pro plan blob size');
});
test('should reject blob exceeded limit', async t => {

View File

@@ -8,7 +8,8 @@
".": "./src/index.ts",
"./op": "./src/op/index.ts",
"./idb": "./src/impls/idb/index.ts",
"./idb/v1": "./src/impls/idb/v1/index.ts"
"./idb/v1": "./src/impls/idb/v1/index.ts",
"./cloud": "./src/impls/cloud/index.ts"
},
"dependencies": {
"@datastructures-js/binary-search-tree": "^5.3.2",
@@ -20,11 +21,15 @@
"yjs": "patch:yjs@npm%3A13.6.18#~/.yarn/patches/yjs-npm-13.6.18-ad0d5f7c43.patch"
},
"devDependencies": {
"@affine/graphql": "workspace:*",
"fake-indexeddb": "^6.0.0",
"idb": "^8.0.0",
"socket.io-client": "^4.7.5",
"vitest": "2.1.4"
},
"peerDependencies": {
"idb": "^8.0.0"
"@affine/graphql": "workspace:*",
"idb": "^8.0.0",
"socket.io-client": "^4.7.5"
}
}

View File

@@ -0,0 +1,72 @@
import {
deleteBlobMutation,
gqlFetcherFactory,
listBlobsQuery,
releaseDeletedBlobsMutation,
setBlobMutation,
} from '@affine/graphql';
import { DummyConnection } from '../../connection';
import { type BlobRecord, BlobStorage } from '../../storage';
export class CloudBlobStorage extends BlobStorage {
private readonly gql = gqlFetcherFactory(this.options.peer + '/graphql');
override connection = new DummyConnection();
override async get(key: string) {
const res = await fetch(
this.options.peer + '/api/workspaces/' + this.spaceId + '/blobs/' + key,
{ cache: 'default' }
);
if (!res.ok) {
return null;
}
const data = await res.arrayBuffer();
return {
key,
data: new Uint8Array(data),
mime: res.headers.get('content-type') || '',
size: data.byteLength,
createdAt: new Date(res.headers.get('last-modified') || Date.now()),
};
}
override async set(blob: BlobRecord) {
await this.gql({
query: setBlobMutation,
variables: {
workspaceId: this.spaceId,
blob: new File([blob.data], blob.key, { type: blob.mime }),
},
});
}
override async delete(key: string, permanently: boolean) {
await this.gql({
query: deleteBlobMutation,
variables: { workspaceId: this.spaceId, key, permanently },
});
}
override async release() {
await this.gql({
query: releaseDeletedBlobsMutation,
variables: { workspaceId: this.spaceId },
});
}
override async list() {
const res = await this.gql({
query: listBlobsQuery,
variables: { workspaceId: this.spaceId },
});
return res.workspace.blobs.map(blob => ({
...blob,
createdAt: new Date(blob.createdAt),
}));
}
}

View File

@@ -0,0 +1,199 @@
import { noop } from 'lodash-es';
import type { SocketOptions } from 'socket.io-client';
import { share } from '../../connection';
import {
type DocClock,
type DocClocks,
DocStorage,
type DocStorageOptions,
type DocUpdate,
} from '../../storage';
import {
base64ToUint8Array,
type ServerEventsMap,
SocketConnection,
uint8ArrayToBase64,
} from './socket';
interface CloudDocStorageOptions extends DocStorageOptions {
socketOptions: SocketOptions;
}
export class CloudDocStorage extends DocStorage<CloudDocStorageOptions> {
connection = share(
new SocketConnection(this.peer, this.options.socketOptions)
);
private get socket() {
return this.connection.inner;
}
override async connect(): Promise<void> {
await super.connect();
this.connection.onStatusChanged(status => {
if (status === 'connected') {
this.join().catch(noop);
this.socket.on('space:broadcast-doc-update', this.onServerUpdate);
}
});
}
override async disconnect(): Promise<void> {
this.socket.emit('space:leave', {
spaceType: this.spaceType,
spaceId: this.spaceId,
});
this.socket.off('space:broadcast-doc-update', this.onServerUpdate);
await super.connect();
}
async join() {
try {
const res = await this.socket.emitWithAck('space:join', {
spaceType: this.spaceType,
spaceId: this.spaceId,
clientVersion: BUILD_CONFIG.appVersion,
});
if ('error' in res) {
this.connection.setStatus('closed', new Error(res.error.message));
}
} catch (e) {
this.connection.setStatus('error', e as Error);
}
}
onServerUpdate: ServerEventsMap['space:broadcast-doc-update'] = message => {
if (
this.spaceType === message.spaceType &&
this.spaceId === message.spaceId
) {
this.emit('update', {
docId: message.docId,
bin: base64ToUint8Array(message.update),
timestamp: new Date(message.timestamp),
editor: message.editor,
});
}
};
override async getDocSnapshot(docId: string) {
const response = await this.socket.emitWithAck('space:load-doc', {
spaceType: this.spaceType,
spaceId: this.spaceId,
docId,
});
if ('error' in response) {
// TODO: use [UserFriendlyError]
throw new Error(response.error.message);
}
return {
docId,
bin: base64ToUint8Array(response.data.missing),
timestamp: new Date(response.data.timestamp),
};
}
override async getDocDiff(docId: string, state?: Uint8Array) {
const response = await this.socket.emitWithAck('space:load-doc', {
spaceType: this.spaceType,
spaceId: this.spaceId,
docId,
stateVector: state ? await uint8ArrayToBase64(state) : void 0,
});
if ('error' in response) {
// TODO: use [UserFriendlyError]
throw new Error(response.error.message);
}
return {
docId,
missing: base64ToUint8Array(response.data.missing),
state: base64ToUint8Array(response.data.state),
timestamp: new Date(response.data.timestamp),
};
}
override async pushDocUpdate(update: DocUpdate) {
const response = await this.socket.emitWithAck('space:push-doc-update', {
spaceType: this.spaceType,
spaceId: this.spaceId,
docId: update.docId,
updates: await uint8ArrayToBase64(update.bin),
});
if ('error' in response) {
// TODO(@forehalo): use [UserFriendlyError]
throw new Error(response.error.message);
}
return {
docId: update.docId,
timestamp: new Date(response.data.timestamp),
};
}
/**
* Just a rough implementation, cloud doc storage should not need this method.
*/
override async getDocTimestamp(docId: string): Promise<DocClock | null> {
const response = await this.socket.emitWithAck('space:load-doc', {
spaceType: this.spaceType,
spaceId: this.spaceId,
docId,
});
if ('error' in response) {
// TODO: use [UserFriendlyError]
throw new Error(response.error.message);
}
return {
docId,
timestamp: new Date(response.data.timestamp),
};
}
override async getDocTimestamps(after?: Date) {
const response = await this.socket.emitWithAck(
'space:load-doc-timestamps',
{
spaceType: this.spaceType,
spaceId: this.spaceId,
timestamp: after ? after.getTime() : undefined,
}
);
if ('error' in response) {
// TODO(@forehalo): use [UserFriendlyError]
throw new Error(response.error.message);
}
return Object.entries(response.data).reduce((ret, [docId, timestamp]) => {
ret[docId] = new Date(timestamp);
return ret;
}, {} as DocClocks);
}
override async deleteDoc(docId: string) {
this.socket.emit('space:delete-doc', {
spaceType: this.spaceType,
spaceId: this.spaceId,
docId,
});
}
protected async setDocSnapshot() {
return false;
}
protected async getDocUpdates() {
return [];
}
protected async markUpdatesMerged() {
return 0;
}
}

View File

@@ -0,0 +1,2 @@
export * from './blob';
export * from './doc';

View File

@@ -0,0 +1,173 @@
import {
Manager as SocketIOManager,
type Socket as SocketIO,
type SocketOptions,
} from 'socket.io-client';
import { Connection, type ConnectionStatus } from '../../connection';
// TODO(@forehalo): use [UserFriendlyError]
interface EventError {
name: string;
message: string;
}
type WebsocketResponse<T> =
| {
error: EventError;
}
| {
data: T;
};
interface ServerEvents {
'space:broadcast-doc-update': {
spaceType: string;
spaceId: string;
docId: string;
update: string;
timestamp: number;
editor: string;
};
}
interface ClientEvents {
'space:join': [
{ spaceType: string; spaceId: string; clientVersion: string },
{ clientId: string },
];
'space:leave': { spaceType: string; spaceId: string };
'space:join-awareness': [
{
spaceType: string;
spaceId: string;
docId: string;
clientVersion: string;
},
{ clientId: string },
];
'space:leave-awareness': {
spaceType: string;
spaceId: string;
docId: string;
};
'space:push-doc-update': [
{ spaceType: string; spaceId: string; docId: string; updates: string },
{ timestamp: number },
];
'space:load-doc-timestamps': [
{
spaceType: string;
spaceId: string;
timestamp?: number;
},
Record<string, number>,
];
'space:load-doc': [
{
spaceType: string;
spaceId: string;
docId: string;
stateVector?: string;
},
{
missing: string;
state: string;
timestamp: number;
},
];
'space:delete-doc': { spaceType: string; spaceId: string; docId: string };
}
export type ServerEventsMap = {
[Key in keyof ServerEvents]: (data: ServerEvents[Key]) => void;
};
export type ClientEventsMap = {
[Key in keyof ClientEvents]: ClientEvents[Key] extends Array<any>
? (
data: ClientEvents[Key][0],
ack: (res: WebsocketResponse<ClientEvents[Key][1]>) => void
) => void
: (data: ClientEvents[Key]) => void;
};
export type Socket = SocketIO<ServerEventsMap, ClientEventsMap>;
export function uint8ArrayToBase64(array: Uint8Array): Promise<string> {
return new Promise<string>(resolve => {
// Create a blob from the Uint8Array
const blob = new Blob([array]);
const reader = new FileReader();
reader.onload = function () {
const dataUrl = reader.result as string | null;
if (!dataUrl) {
resolve('');
return;
}
// The result includes the `data:` URL prefix and the MIME type. We only want the Base64 data
const base64 = dataUrl.split(',')[1];
resolve(base64);
};
reader.readAsDataURL(blob);
});
}
export function base64ToUint8Array(base64: string) {
const binaryString = atob(base64);
const binaryArray = binaryString.split('').map(function (char) {
return char.charCodeAt(0);
});
return new Uint8Array(binaryArray);
}
export class SocketConnection extends Connection<Socket> {
manager = new SocketIOManager(this.endpoint, {
autoConnect: false,
transports: ['websocket'],
secure: new URL(this.endpoint).protocol === 'https:',
});
constructor(
private readonly endpoint: string,
private readonly socketOptions: SocketOptions
) {
super();
}
override get shareId() {
return `socket:${this.endpoint}`;
}
override async doConnect() {
const conn = this.manager.socket('/', this.socketOptions);
await new Promise<void>((resolve, reject) => {
conn.once('connect', () => {
resolve();
});
conn.once('connect_error', err => {
reject(err);
});
conn.open();
});
return conn;
}
override async doDisconnect(conn: Socket) {
conn.close();
}
/**
* Socket connection allow explicitly set status by user
*
* used when join space failed
*/
override setStatus(status: ConnectionStatus, error?: Error) {
super.setStatus(status, error);
}
}

View File

@@ -1,4 +1,5 @@
import type { Storage } from '../storage';
import { CloudBlobStorage, CloudDocStorage } from './cloud';
import {
IndexedDBBlobStorage,
IndexedDBDocStorage,
@@ -19,7 +20,9 @@ const idbv1: StorageConstructor[] = [
IndexedDBV1BlobStorage,
];
export const storages: StorageConstructor[] = [...idbv1, ...idb];
const cloud: StorageConstructor[] = [CloudDocStorage, CloudBlobStorage];
export const storages: StorageConstructor[] = cloud.concat(idbv1, idb);
const AvailableStorageImplementations = storages.reduce(
(acc, curr) => {

View File

@@ -23,7 +23,7 @@ export class UserQuotaStore extends Store {
return {
userId: data.currentUser.id,
quota: data.currentUser.quota,
used: data.collectAllBlobSizes.size,
used: data.currentUser.quotaUsage.storageQuota,
};
}
}

View File

@@ -70,7 +70,7 @@ export class CloudBlobStorage implements BlobStorage {
query: deleteBlobMutation,
variables: {
workspaceId: key,
hash: key,
key,
},
});
}
@@ -82,6 +82,6 @@ export class CloudBlobStorage implements BlobStorage {
workspaceId: this.workspaceId,
},
});
return result.listBlobs;
return result.workspace.blobs.map(blob => blob.key);
}
}

View File

@@ -1,3 +1,7 @@
mutation deleteBlob($workspaceId: String!, $hash: String!) {
deleteBlob(workspaceId: $workspaceId, hash: $hash)
mutation deleteBlob(
$workspaceId: String!
$key: String!
$permanently: Boolean
) {
deleteBlob(workspaceId: $workspaceId, key: $key, permanently: $permanently)
}

View File

@@ -1,3 +1,10 @@
query listBlobs($workspaceId: String!) {
listBlobs(workspaceId: $workspaceId)
workspace(id: $workspaceId) {
blobs {
key
size
mime
createdAt
}
}
}

View File

@@ -0,0 +1,3 @@
mutation releaseDeletedBlobs($workspaceId: String!) {
releaseDeletedBlobs(workspaceId: $workspaceId)
}

View File

@@ -47,19 +47,37 @@ export const deleteBlobMutation = {
definitionName: 'deleteBlob',
containsFile: false,
query: `
mutation deleteBlob($workspaceId: String!, $hash: String!) {
deleteBlob(workspaceId: $workspaceId, hash: $hash)
mutation deleteBlob($workspaceId: String!, $key: String!, $permanently: Boolean) {
deleteBlob(workspaceId: $workspaceId, key: $key, permanently: $permanently)
}`,
};
export const listBlobsQuery = {
id: 'listBlobsQuery' as const,
operationName: 'listBlobs',
definitionName: 'listBlobs',
definitionName: 'workspace',
containsFile: false,
query: `
query listBlobs($workspaceId: String!) {
listBlobs(workspaceId: $workspaceId)
workspace(id: $workspaceId) {
blobs {
key
size
mime
createdAt
}
}
}`,
};
export const releaseDeletedBlobsMutation = {
id: 'releaseDeletedBlobsMutation' as const,
operationName: 'releaseDeletedBlobs',
definitionName: 'releaseDeletedBlobs',
containsFile: false,
query: `
mutation releaseDeletedBlobs($workspaceId: String!) {
releaseDeletedBlobs(workspaceId: $workspaceId)
}`,
};
@@ -885,7 +903,7 @@ mutation publishPage($workspaceId: String!, $pageId: String!, $mode: PublicPageM
export const quotaQuery = {
id: 'quotaQuery' as const,
operationName: 'quota',
definitionName: 'currentUser,collectAllBlobSizes',
definitionName: 'currentUser',
containsFile: false,
query: `
query quota {
@@ -905,9 +923,9 @@ query quota {
memberLimit
}
}
}
collectAllBlobSizes {
size
quotaUsage {
storageQuota
}
}
}`,
};

View File

@@ -15,8 +15,8 @@ query quota {
memberLimit
}
}
}
collectAllBlobSizes {
size
quotaUsage {
storageQuota
}
}
}

View File

@@ -517,6 +517,14 @@ export interface ListUserInput {
skip?: InputMaybe<Scalars['Int']['input']>;
}
export interface ListedBlob {
__typename?: 'ListedBlob';
createdAt: Scalars['String']['output'];
key: Scalars['String']['output'];
mime: Scalars['String']['output'];
size: Scalars['Int']['output'];
}
export interface ManageUserInput {
/** User email */
email?: InputMaybe<Scalars['String']['input']>;
@@ -569,6 +577,7 @@ export interface Mutation {
leaveWorkspace: Scalars['Boolean']['output'];
publishPage: WorkspacePage;
recoverDoc: Scalars['DateTime']['output'];
releaseDeletedBlobs: Scalars['Boolean']['output'];
/** Remove user avatar */
removeAvatar: RemoveAvatar;
removeWorkspaceFeature: Scalars['Int']['output'];
@@ -673,7 +682,9 @@ export interface MutationCreateWorkspaceArgs {
}
export interface MutationDeleteBlobArgs {
hash: Scalars['String']['input'];
hash?: InputMaybe<Scalars['String']['input']>;
key?: InputMaybe<Scalars['String']['input']>;
permanently?: Scalars['Boolean']['input'];
workspaceId: Scalars['String']['input'];
}
@@ -731,6 +742,10 @@ export interface MutationRecoverDocArgs {
workspaceId: Scalars['String']['input'];
}
export interface MutationReleaseDeletedBlobsArgs {
workspaceId: Scalars['String']['input'];
}
export interface MutationRemoveWorkspaceFeatureArgs {
feature: FeatureType;
workspaceId: Scalars['String']['input'];
@@ -882,9 +897,7 @@ export enum PublicPageMode {
export interface Query {
__typename?: 'Query';
/** @deprecated no more needed */
checkBlobSize: WorkspaceBlobSizes;
/** @deprecated use `user.storageUsage` instead */
/** @deprecated use `user.quotaUsage` instead */
collectAllBlobSizes: WorkspaceBlobSizes;
/** Get current user */
currentUser: Maybe<UserType>;
@@ -925,11 +938,6 @@ export interface Query {
workspaces: Array<WorkspaceType>;
}
export interface QueryCheckBlobSizeArgs {
size: Scalars['SafeInt']['input'];
workspaceId: Scalars['String']['input'];
}
export interface QueryErrorArgs {
name: ErrorNames;
}
@@ -1225,6 +1233,11 @@ export interface UserQuotaHumanReadable {
storageQuota: Scalars['String']['output'];
}
export interface UserQuotaUsage {
__typename?: 'UserQuotaUsage';
storageQuota: Scalars['SafeInt']['output'];
}
export interface UserType {
__typename?: 'UserType';
/** User avatar url */
@@ -1250,6 +1263,7 @@ export interface UserType {
/** User name */
name: Scalars['String']['output'];
quota: Maybe<UserQuota>;
quotaUsage: UserQuotaUsage;
subscriptions: Array<SubscriptionType>;
/** @deprecated use [/api/auth/sign-in?native=true] instead */
token: TokenType;
@@ -1313,7 +1327,7 @@ export interface WorkspaceType {
/** Available features of workspace */
availableFeatures: Array<FeatureType>;
/** List blobs of workspace */
blobs: Array<Scalars['String']['output']>;
blobs: Array<ListedBlob>;
/** Blobs size of workspace */
blobsSize: Scalars['Int']['output'];
/** Workspace created date */
@@ -1417,7 +1431,8 @@ export type AdminServerConfigQuery = {
export type DeleteBlobMutationVariables = Exact<{
workspaceId: Scalars['String']['input'];
hash: Scalars['String']['input'];
key: Scalars['String']['input'];
permanently?: InputMaybe<Scalars['Boolean']['input']>;
}>;
export type DeleteBlobMutation = {
@@ -1429,7 +1444,28 @@ export type ListBlobsQueryVariables = Exact<{
workspaceId: Scalars['String']['input'];
}>;
export type ListBlobsQuery = { __typename?: 'Query'; listBlobs: Array<string> };
export type ListBlobsQuery = {
__typename?: 'Query';
workspace: {
__typename?: 'WorkspaceType';
blobs: Array<{
__typename?: 'ListedBlob';
key: string;
size: number;
mime: string;
createdAt: string;
}>;
};
};
export type ReleaseDeletedBlobsMutationVariables = Exact<{
workspaceId: Scalars['String']['input'];
}>;
export type ReleaseDeletedBlobsMutation = {
__typename?: 'Mutation';
releaseDeletedBlobs: boolean;
};
export type SetBlobMutationVariables = Exact<{
workspaceId: Scalars['String']['input'];
@@ -2196,8 +2232,8 @@ export type QuotaQuery = {
memberLimit: string;
};
} | null;
quotaUsage: { __typename?: 'UserQuotaUsage'; storageQuota: number };
} | null;
collectAllBlobSizes: { __typename?: 'WorkspaceBlobSizes'; size: number };
};
export type RecoverDocMutationVariables = Exact<{
@@ -2953,6 +2989,11 @@ export type Mutations =
variables: DeleteBlobMutationVariables;
response: DeleteBlobMutation;
}
| {
name: 'releaseDeletedBlobsMutation';
variables: ReleaseDeletedBlobsMutationVariables;
response: ReleaseDeletedBlobsMutation;
}
| {
name: 'setBlobMutation';
variables: SetBlobMutationVariables;