feat(server): add workspace name and avatarKey to database (#10513)

close CLOUD-153
This commit is contained in:
fengmk2
2025-03-06 15:25:05 +00:00
parent c76b2504fe
commit 81694a1144
8 changed files with 90 additions and 15 deletions

View File

@@ -118,3 +118,66 @@ test('should ignore update doc content to database when snapshot parse failed',
const content = await models.doc.getMeta(workspace.id, docId);
t.is(content, null);
});
test('should update workspace content to database when workspace is updated', async t => {
const { docReader, models, adapter, listener } = t.context;
const updates: Buffer[] = [];
{
const doc = new YDoc();
doc.on('update', data => {
updates.push(Buffer.from(data));
});
const text = doc.getText('content');
text.insert(0, 'hello');
text.insert(5, 'world');
}
await adapter.pushDocUpdates(workspace.id, workspace.id, updates);
await adapter.getDoc(workspace.id, workspace.id);
mock.method(docReader, 'parseWorkspaceContent', () => {
return {
name: 'test workspace name',
avatarKey: 'test avatar key',
};
});
await listener.markDocContentCacheStale({
workspaceId: workspace.id,
docId: workspace.id,
blob: Buffer.from([]),
});
const content = await models.workspace.get(workspace.id);
t.truthy(content);
t.is(content!.name, 'test workspace name');
t.is(content!.avatarKey, 'test avatar key');
});
test('should ignore update workspace content to database when parse workspace content return null', async t => {
const { models, adapter, listener } = t.context;
const updates: Buffer[] = [];
{
const doc = new YDoc();
doc.on('update', data => {
updates.push(Buffer.from(data));
});
const text = doc.getText('content');
text.insert(0, 'hello');
text.insert(5, 'world');
}
await adapter.pushDocUpdates(workspace.id, workspace.id, updates);
const doc = await adapter.getDoc(workspace.id, workspace.id);
const spy = Sinon.spy(models.workspace, 'update');
await listener.markDocContentCacheStale({
workspaceId: workspace.id,
docId: workspace.id,
blob: Buffer.from(doc!.bin),
});
t.is(spy.callCount, 0);
const content = await models.workspace.get(workspace.id);
t.truthy(content);
t.is(content!.name, null);
t.is(content!.avatarKey, null);
});

View File

@@ -193,7 +193,7 @@ test('should get workspace content with default avatar', async t => {
user.id
);
const track = mock.method(docReader, 'parseWorkspaceContent', () => ({
mock.method(docReader, 'parseWorkspaceContent', () => ({
name: 'Test Workspace',
avatarKey: '',
}));
@@ -206,7 +206,6 @@ test('should get workspace content with default avatar', async t => {
avatarKey: '',
avatarUrl: undefined,
});
t.is(track.mock.callCount(), 1);
});
test('should get workspace content with custom avatar', async t => {
@@ -239,7 +238,7 @@ test('should get workspace content with custom avatar', async t => {
Buffer.from('mock avatar image data here')
);
const track = mock.method(docReader, 'parseWorkspaceContent', () => ({
mock.method(docReader, 'parseWorkspaceContent', () => ({
name: 'Test Workspace',
avatarKey,
}));
@@ -252,5 +251,4 @@ test('should get workspace content with custom avatar', async t => {
avatarKey,
avatarUrl: `http://localhost:3010/api/workspaces/${workspace.id}/blobs/${avatarKey}`,
});
t.is(track.mock.callCount(), 1);
});

View File

@@ -26,6 +26,13 @@ export class DocEventsListener {
return;
}
await this.models.doc.upsertMeta(workspaceId, docId, content);
} else {
// update workspace content to database
const content = this.docReader.parseWorkspaceContent(blob);
if (!content) {
return;
}
await this.models.workspace.update(workspaceId, content);
}
}
}

View File

@@ -12,7 +12,6 @@ import {
Config,
CryptoHelper,
getOrGenRequestId,
URLHelper,
UserFriendlyError,
} from '../../base';
import { WorkspaceBlobStorage } from '../storage';
@@ -79,6 +78,7 @@ export abstract class DocReader {
return content;
}
// TODO(@fengmk2): should remove this method after frontend support workspace content update
async getWorkspaceContent(
workspaceId: string
): Promise<WorkspaceDocInfo | null> {
@@ -129,8 +129,7 @@ export class DatabaseDocReader extends DocReader {
constructor(
protected override readonly cache: Cache,
protected readonly workspace: PgWorkspaceDocStorageAdapter,
protected readonly blobStorage: WorkspaceBlobStorage,
protected readonly url: URLHelper
protected readonly blobStorage: WorkspaceBlobStorage
) {
super(cache);
}
@@ -178,9 +177,7 @@ export class DatabaseDocReader extends DocReader {
}
let avatarUrl: string | undefined;
if (content.avatarKey) {
avatarUrl = this.url.link(
`/api/workspaces/${workspaceId}/blobs/${content.avatarKey}`
);
avatarUrl = this.blobStorage.getAvatarUrl(workspaceId, content.avatarKey);
}
return {
id: workspaceId,
@@ -200,10 +197,9 @@ export class RpcDocReader extends DatabaseDocReader {
private readonly crypto: CryptoHelper,
protected override readonly cache: Cache,
protected override readonly workspace: PgWorkspaceDocStorageAdapter,
protected override readonly blobStorage: WorkspaceBlobStorage,
protected override readonly url: URLHelper
protected override readonly blobStorage: WorkspaceBlobStorage
) {
super(cache, workspace, blobStorage, url);
super(cache, workspace, blobStorage);
}
private async fetch(

View File

@@ -11,6 +11,7 @@ import {
PutObjectMetadata,
type StorageProvider,
StorageProviderFactory,
URLHelper,
} from '../../../base';
declare global {
@@ -35,7 +36,8 @@ export class WorkspaceBlobStorage {
private readonly config: Config,
private readonly event: EventBus,
private readonly storageFactory: StorageProviderFactory,
private readonly db: PrismaClient
private readonly db: PrismaClient,
private readonly url: URLHelper
) {
this.provider = this.storageFactory.create(this.config.storages.blob);
}
@@ -140,6 +142,10 @@ export class WorkspaceBlobStorage {
return sum._sum.size ?? 0;
}
getAvatarUrl(workspaceId: string, avatarKey: string) {
return this.url.link(`/api/workspaces/${workspaceId}/blobs/${avatarKey}`);
}
private trySyncBlobsMeta(workspaceId: string, blobs: ListObjectsMetadata[]) {
for (const blob of blobs) {
this.event.emit('workspace.blob.sync', {