mirror of
https://github.com/toeverything/AFFiNE.git
synced 2026-02-07 10:03:45 +00:00
Compare commits
10 Commits
0.23.0-bet
...
0.23.0-bet
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5599c39e97 | ||
|
|
6b2639cbbb | ||
|
|
82b3c0d264 | ||
|
|
a4680d236d | ||
|
|
f88e1dffb6 | ||
|
|
e773930256 | ||
|
|
1c1dade2d5 | ||
|
|
e2a799c70a | ||
|
|
9b881eb59a | ||
|
|
e6f91cced6 |
@@ -565,6 +565,11 @@
|
||||
"type": "boolean",
|
||||
"description": "Only allow users with early access features to access the app\n@default false",
|
||||
"default": false
|
||||
},
|
||||
"allowGuestDemoWorkspace": {
|
||||
"type": "boolean",
|
||||
"description": "Whether allow guest users to create demo workspaces.\n@default true",
|
||||
"default": true
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -592,6 +597,11 @@
|
||||
"type": "string",
|
||||
"description": "Allowed version range of the app that allowed to access the server. Requires 'client/versionControl.enabled' to be true to take effect.\n@default \">=0.20.0\"",
|
||||
"default": ">=0.20.0"
|
||||
},
|
||||
"allowGuestDemoWorkspace": {
|
||||
"type": "boolean",
|
||||
"description": "Allow guests to access demo workspace.\n@default true",
|
||||
"default": true
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -732,6 +742,11 @@
|
||||
},
|
||||
"default": {}
|
||||
},
|
||||
"providers.morph": {
|
||||
"type": "object",
|
||||
"description": "The config for the morph provider.\n@default {}",
|
||||
"default": {}
|
||||
},
|
||||
"unsplash": {
|
||||
"type": "object",
|
||||
"description": "The config for the unsplash key.\n@default {\"key\":\"\"}",
|
||||
|
||||
@@ -0,0 +1,67 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "comments" (
|
||||
"sid" INT GENERATED BY DEFAULT AS IDENTITY,
|
||||
"id" VARCHAR NOT NULL,
|
||||
"workspace_id" VARCHAR NOT NULL,
|
||||
"doc_id" VARCHAR NOT NULL,
|
||||
"user_id" VARCHAR NOT NULL,
|
||||
"content" JSONB NOT NULL,
|
||||
"created_at" TIMESTAMPTZ(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"deleted_at" TIMESTAMPTZ(3),
|
||||
"resolved" BOOLEAN NOT NULL DEFAULT false,
|
||||
|
||||
CONSTRAINT "comments_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "replies" (
|
||||
"sid" INT GENERATED BY DEFAULT AS IDENTITY,
|
||||
"id" VARCHAR NOT NULL,
|
||||
"user_id" VARCHAR NOT NULL,
|
||||
"comment_id" VARCHAR NOT NULL,
|
||||
"workspace_id" VARCHAR NOT NULL,
|
||||
"doc_id" VARCHAR NOT NULL,
|
||||
"content" JSONB NOT NULL,
|
||||
"created_at" TIMESTAMPTZ(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"deleted_at" TIMESTAMPTZ(3),
|
||||
|
||||
CONSTRAINT "replies_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "comments_sid_key" ON "comments"("sid");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "comments_workspace_id_doc_id_sid_idx" ON "comments"("workspace_id", "doc_id", "sid");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "comments_workspace_id_doc_id_updated_at_idx" ON "comments"("workspace_id", "doc_id", "updated_at");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "comments_user_id_idx" ON "comments"("user_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "replies_sid_key" ON "replies"("sid");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "replies_comment_id_sid_idx" ON "replies"("comment_id", "sid");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "replies_workspace_id_doc_id_updated_at_idx" ON "replies"("workspace_id", "doc_id", "updated_at");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "replies_user_id_idx" ON "replies"("user_id");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "comments" ADD CONSTRAINT "comments_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "comments" ADD CONSTRAINT "comments_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "replies" ADD CONSTRAINT "replies_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "replies" ADD CONSTRAINT "replies_comment_id_fkey" FOREIGN KEY ("comment_id") REFERENCES "comments"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,23 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "comment_attachments" (
|
||||
"sid" INT GENERATED BY DEFAULT AS IDENTITY,
|
||||
"workspace_id" VARCHAR NOT NULL,
|
||||
"doc_id" VARCHAR NOT NULL,
|
||||
"key" VARCHAR NOT NULL,
|
||||
"size" INTEGER NOT NULL,
|
||||
"mime" VARCHAR NOT NULL,
|
||||
"name" VARCHAR NOT NULL,
|
||||
"created_at" TIMESTAMPTZ(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"created_by" VARCHAR,
|
||||
|
||||
CONSTRAINT "comment_attachments_pkey" PRIMARY KEY ("workspace_id","doc_id","key")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "comment_attachments_sid_key" ON "comment_attachments"("sid");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "comment_attachments" ADD CONSTRAINT "comment_attachments_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "comment_attachments" ADD CONSTRAINT "comment_attachments_created_by_fkey" FOREIGN KEY ("created_by") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
@@ -32,6 +32,7 @@
|
||||
"@ai-sdk/google": "^1.2.18",
|
||||
"@ai-sdk/google-vertex": "^2.2.23",
|
||||
"@ai-sdk/openai": "^1.3.22",
|
||||
"@ai-sdk/openai-compatible": "^0.2.14",
|
||||
"@ai-sdk/perplexity": "^1.1.9",
|
||||
"@apollo/server": "^4.11.3",
|
||||
"@aws-sdk/client-s3": "^3.779.0",
|
||||
|
||||
@@ -46,6 +46,9 @@ model User {
|
||||
// receive notifications
|
||||
notifications Notification[] @relation("user_notifications")
|
||||
settings UserSettings?
|
||||
comments Comment[]
|
||||
replies Reply[]
|
||||
commentAttachments CommentAttachment[] @relation("createdCommentAttachments")
|
||||
|
||||
@@index([email])
|
||||
@@map("users")
|
||||
@@ -126,6 +129,8 @@ model Workspace {
|
||||
blobs Blob[]
|
||||
ignoredDocs AiWorkspaceIgnoredDocs[]
|
||||
embedFiles AiWorkspaceFiles[]
|
||||
comments Comment[]
|
||||
commentAttachments CommentAttachment[]
|
||||
|
||||
@@map("workspaces")
|
||||
}
|
||||
@@ -856,3 +861,70 @@ model UserSettings {
|
||||
|
||||
@@map("user_settings")
|
||||
}
|
||||
|
||||
model Comment {
|
||||
// NOTE: manually set this column type to identity in migration file
|
||||
sid Int @unique @default(autoincrement()) @db.Integer
|
||||
id String @id @default(uuid()) @db.VarChar
|
||||
workspaceId String @map("workspace_id") @db.VarChar
|
||||
docId String @map("doc_id") @db.VarChar
|
||||
userId String @map("user_id") @db.VarChar
|
||||
content Json @db.JsonB
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
updatedAt DateTime @default(now()) @updatedAt @map("updated_at") @db.Timestamptz(3)
|
||||
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(3)
|
||||
// whether the comment is resolved
|
||||
resolved Boolean @default(false) @map("resolved")
|
||||
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
replies Reply[]
|
||||
|
||||
@@index([workspaceId, docId, sid])
|
||||
@@index([workspaceId, docId, updatedAt])
|
||||
@@index([userId])
|
||||
@@map("comments")
|
||||
}
|
||||
|
||||
model Reply {
|
||||
// NOTE: manually set this column type to identity in migration file
|
||||
sid Int @unique @default(autoincrement()) @db.Integer
|
||||
id String @id @default(uuid()) @db.VarChar
|
||||
userId String @map("user_id") @db.VarChar
|
||||
commentId String @map("comment_id") @db.VarChar
|
||||
// query new replies by workspaceId and docId
|
||||
workspaceId String @map("workspace_id") @db.VarChar
|
||||
docId String @map("doc_id") @db.VarChar
|
||||
content Json @db.JsonB
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
updatedAt DateTime @default(now()) @updatedAt @map("updated_at") @db.Timestamptz(3)
|
||||
deletedAt DateTime? @map("deleted_at") @db.Timestamptz(3)
|
||||
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
comment Comment @relation(fields: [commentId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([commentId, sid])
|
||||
@@index([workspaceId, docId, updatedAt])
|
||||
@@index([userId])
|
||||
@@map("replies")
|
||||
}
|
||||
|
||||
model CommentAttachment {
|
||||
// NOTE: manually set this column type to identity in migration file
|
||||
sid Int @unique @default(autoincrement())
|
||||
workspaceId String @map("workspace_id") @db.VarChar
|
||||
docId String @map("doc_id") @db.VarChar
|
||||
key String @db.VarChar
|
||||
size Int @db.Integer
|
||||
mime String @db.VarChar
|
||||
name String @db.VarChar
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz(3)
|
||||
createdBy String? @map("created_by") @db.VarChar
|
||||
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
// will delete creator record if creator's account is deleted
|
||||
createdByUser User? @relation(name: "createdCommentAttachments", fields: [createdBy], references: [id], onDelete: SetNull)
|
||||
|
||||
@@id([workspaceId, docId, key])
|
||||
@@map("comment_attachments")
|
||||
}
|
||||
|
||||
@@ -907,4 +907,14 @@ export const USER_FRIENDLY_ERRORS = {
|
||||
args: { reason: 'string' },
|
||||
message: ({ reason }) => `Invalid indexer input: ${reason}`,
|
||||
},
|
||||
|
||||
// comment and reply errors
|
||||
comment_not_found: {
|
||||
type: 'resource_not_found',
|
||||
message: 'Comment not found.',
|
||||
},
|
||||
reply_not_found: {
|
||||
type: 'resource_not_found',
|
||||
message: 'Reply not found.',
|
||||
},
|
||||
} satisfies Record<string, UserFriendlyErrorOptions>;
|
||||
|
||||
@@ -1067,6 +1067,18 @@ export class InvalidIndexerInput extends UserFriendlyError {
|
||||
super('invalid_input', 'invalid_indexer_input', message, args);
|
||||
}
|
||||
}
|
||||
|
||||
export class CommentNotFound extends UserFriendlyError {
|
||||
constructor(message?: string) {
|
||||
super('resource_not_found', 'comment_not_found', message);
|
||||
}
|
||||
}
|
||||
|
||||
export class ReplyNotFound extends UserFriendlyError {
|
||||
constructor(message?: string) {
|
||||
super('resource_not_found', 'reply_not_found', message);
|
||||
}
|
||||
}
|
||||
export enum ErrorNames {
|
||||
INTERNAL_SERVER_ERROR,
|
||||
NETWORK_ERROR,
|
||||
@@ -1202,7 +1214,9 @@ export enum ErrorNames {
|
||||
INVALID_APP_CONFIG_INPUT,
|
||||
SEARCH_PROVIDER_NOT_FOUND,
|
||||
INVALID_SEARCH_PROVIDER_REQUEST,
|
||||
INVALID_INDEXER_INPUT
|
||||
INVALID_INDEXER_INPUT,
|
||||
COMMENT_NOT_FOUND,
|
||||
REPLY_NOT_FOUND
|
||||
}
|
||||
registerEnumType(ErrorNames, {
|
||||
name: 'ErrorNames'
|
||||
|
||||
@@ -4,6 +4,7 @@ import { defineModuleConfig } from '../../base';
|
||||
|
||||
export interface ServerFlags {
|
||||
earlyAccessControl: boolean;
|
||||
allowGuestDemoWorkspace: boolean;
|
||||
}
|
||||
|
||||
declare global {
|
||||
@@ -75,4 +76,8 @@ defineModuleConfig('flags', {
|
||||
desc: 'Only allow users with early access features to access the app',
|
||||
default: false,
|
||||
},
|
||||
allowGuestDemoWorkspace: {
|
||||
desc: 'Whether allow guest users to create demo workspaces.',
|
||||
default: true,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -85,6 +85,7 @@ export class ServerConfigResolver {
|
||||
baseUrl: this.url.requestBaseUrl,
|
||||
type: env.DEPLOYMENT_TYPE,
|
||||
features: this.server.features,
|
||||
allowGuestDemoWorkspace: this.config.flags.allowGuestDemoWorkspace,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -38,4 +38,9 @@ export class ServerConfigType {
|
||||
|
||||
@Field(() => [ServerFeature], { description: 'enabled server features' })
|
||||
features!: ServerFeature[];
|
||||
|
||||
@Field(() => Boolean, {
|
||||
description: 'Whether allow guest users to create demo workspaces.',
|
||||
})
|
||||
allowGuestDemoWorkspace!: boolean;
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ export interface VersionConfig {
|
||||
enabled: boolean;
|
||||
requiredVersion: string;
|
||||
};
|
||||
allowGuestDemoWorkspace?: boolean;
|
||||
}
|
||||
|
||||
declare global {
|
||||
@@ -28,4 +29,8 @@ defineModuleConfig('client', {
|
||||
desc: "Allowed version range of the app that allowed to access the server. Requires 'client/versionControl.enabled' to be true to take effect.",
|
||||
default: '>=0.20.0',
|
||||
},
|
||||
allowGuestDemoWorkspace: {
|
||||
desc: 'Allow guests to access demo workspace.',
|
||||
default: true,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
# Snapshot report for `src/models/__tests__/comment.spec.ts`
|
||||
|
||||
The actual snapshot is saved in `comment.spec.ts.snap`.
|
||||
|
||||
Generated by [AVA](https://avajs.dev).
|
||||
|
||||
## should create and get a reply
|
||||
|
||||
> Snapshot 1
|
||||
|
||||
{
|
||||
content: [
|
||||
{
|
||||
text: 'test reply',
|
||||
type: 'text',
|
||||
},
|
||||
],
|
||||
type: 'paragraph',
|
||||
}
|
||||
|
||||
## should update a reply
|
||||
|
||||
> Snapshot 1
|
||||
|
||||
{
|
||||
content: [
|
||||
{
|
||||
text: 'test reply2',
|
||||
type: 'text',
|
||||
},
|
||||
],
|
||||
type: 'paragraph',
|
||||
}
|
||||
Binary file not shown.
@@ -0,0 +1,125 @@
|
||||
import test from 'ava';
|
||||
|
||||
import { createModule } from '../../__tests__/create-module';
|
||||
import { Mockers } from '../../__tests__/mocks';
|
||||
import { Models } from '..';
|
||||
|
||||
const module = await createModule();
|
||||
const models = module.get(Models);
|
||||
|
||||
test.after.always(async () => {
|
||||
await module.close();
|
||||
});
|
||||
|
||||
test('should upsert comment attachment', async t => {
|
||||
const workspace = await module.create(Mockers.Workspace);
|
||||
|
||||
// add
|
||||
const item = await models.commentAttachment.upsert({
|
||||
workspaceId: workspace.id,
|
||||
docId: 'test-doc-id',
|
||||
key: 'test-key',
|
||||
name: 'test-name',
|
||||
mime: 'text/plain',
|
||||
size: 100,
|
||||
});
|
||||
|
||||
t.is(item.workspaceId, workspace.id);
|
||||
t.is(item.docId, 'test-doc-id');
|
||||
t.is(item.key, 'test-key');
|
||||
t.is(item.mime, 'text/plain');
|
||||
t.is(item.size, 100);
|
||||
t.truthy(item.createdAt);
|
||||
|
||||
// update
|
||||
const item2 = await models.commentAttachment.upsert({
|
||||
workspaceId: workspace.id,
|
||||
docId: 'test-doc-id',
|
||||
name: 'test-name',
|
||||
key: 'test-key',
|
||||
mime: 'text/html',
|
||||
size: 200,
|
||||
});
|
||||
|
||||
t.is(item2.workspaceId, workspace.id);
|
||||
t.is(item2.docId, 'test-doc-id');
|
||||
t.is(item2.key, 'test-key');
|
||||
t.is(item2.mime, 'text/html');
|
||||
t.is(item2.size, 200);
|
||||
|
||||
// make sure only one blob is created
|
||||
const items = await models.commentAttachment.list(workspace.id);
|
||||
t.is(items.length, 1);
|
||||
t.deepEqual(items[0], item2);
|
||||
});
|
||||
|
||||
test('should delete comment attachment', async t => {
|
||||
const workspace = await module.create(Mockers.Workspace);
|
||||
const item = await models.commentAttachment.upsert({
|
||||
workspaceId: workspace.id,
|
||||
docId: 'test-doc-id',
|
||||
key: 'test-key',
|
||||
name: 'test-name',
|
||||
mime: 'text/plain',
|
||||
size: 100,
|
||||
});
|
||||
|
||||
await models.commentAttachment.delete(workspace.id, item.docId, item.key);
|
||||
|
||||
const item2 = await models.commentAttachment.get(
|
||||
workspace.id,
|
||||
item.docId,
|
||||
item.key
|
||||
);
|
||||
|
||||
t.is(item2, null);
|
||||
});
|
||||
|
||||
test('should list comment attachments', async t => {
|
||||
const workspace = await module.create(Mockers.Workspace);
|
||||
const item1 = await models.commentAttachment.upsert({
|
||||
workspaceId: workspace.id,
|
||||
docId: 'test-doc-id',
|
||||
name: 'test-name',
|
||||
key: 'test-key',
|
||||
mime: 'text/plain',
|
||||
size: 100,
|
||||
});
|
||||
|
||||
const item2 = await models.commentAttachment.upsert({
|
||||
workspaceId: workspace.id,
|
||||
docId: 'test-doc-id2',
|
||||
name: 'test-name2',
|
||||
key: 'test-key2',
|
||||
mime: 'text/plain',
|
||||
size: 200,
|
||||
});
|
||||
|
||||
const items = await models.commentAttachment.list(workspace.id);
|
||||
|
||||
t.is(items.length, 2);
|
||||
items.sort((a, b) => a.key.localeCompare(b.key));
|
||||
t.is(items[0].key, item1.key);
|
||||
t.is(items[1].key, item2.key);
|
||||
});
|
||||
|
||||
test('should get comment attachment', async t => {
|
||||
const workspace = await module.create(Mockers.Workspace);
|
||||
const item = await models.commentAttachment.upsert({
|
||||
workspaceId: workspace.id,
|
||||
docId: 'test-doc-id',
|
||||
name: 'test-name',
|
||||
key: 'test-key',
|
||||
mime: 'text/plain',
|
||||
size: 100,
|
||||
});
|
||||
|
||||
const item2 = await models.commentAttachment.get(
|
||||
workspace.id,
|
||||
item.docId,
|
||||
item.key
|
||||
);
|
||||
|
||||
t.truthy(item2);
|
||||
t.is(item2?.key, item.key);
|
||||
});
|
||||
526
packages/backend/server/src/models/__tests__/comment.spec.ts
Normal file
526
packages/backend/server/src/models/__tests__/comment.spec.ts
Normal file
@@ -0,0 +1,526 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
|
||||
import test from 'ava';
|
||||
|
||||
import { createModule } from '../../__tests__/create-module';
|
||||
import { Mockers } from '../../__tests__/mocks';
|
||||
import { Models } from '..';
|
||||
import { CommentChangeAction, Reply } from '../comment';
|
||||
|
||||
const module = await createModule({});
|
||||
|
||||
const models = module.get(Models);
|
||||
const owner = await module.create(Mockers.User);
|
||||
const workspace = await module.create(Mockers.Workspace, {
|
||||
owner,
|
||||
});
|
||||
|
||||
test.after.always(async () => {
|
||||
await module.close();
|
||||
});
|
||||
|
||||
test('should throw error when content is null', async t => {
|
||||
const docId = randomUUID();
|
||||
await t.throwsAsync(
|
||||
models.comment.create({
|
||||
// @ts-expect-error test null content
|
||||
content: null,
|
||||
workspaceId: workspace.id,
|
||||
docId,
|
||||
userId: owner.id,
|
||||
}),
|
||||
{
|
||||
message: /Expected object, received null/,
|
||||
}
|
||||
);
|
||||
|
||||
await t.throwsAsync(
|
||||
models.comment.createReply({
|
||||
// @ts-expect-error test null content
|
||||
content: null,
|
||||
commentId: randomUUID(),
|
||||
}),
|
||||
{
|
||||
message: /Expected object, received null/,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
test('should create a comment', async t => {
|
||||
const docId = randomUUID();
|
||||
const comment = await models.comment.create({
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test' }],
|
||||
},
|
||||
workspaceId: workspace.id,
|
||||
docId,
|
||||
userId: owner.id,
|
||||
});
|
||||
t.is(comment.createdAt.getTime(), comment.updatedAt.getTime());
|
||||
t.is(comment.deletedAt, null);
|
||||
t.is(comment.resolved, false);
|
||||
t.deepEqual(comment.content, {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test' }],
|
||||
});
|
||||
});
|
||||
|
||||
test('should get a comment', async t => {
|
||||
const docId = randomUUID();
|
||||
const comment1 = await models.comment.create({
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test' }],
|
||||
},
|
||||
workspaceId: workspace.id,
|
||||
docId,
|
||||
userId: owner.id,
|
||||
});
|
||||
|
||||
const comment2 = await models.comment.get(comment1.id);
|
||||
t.deepEqual(comment2, comment1);
|
||||
t.deepEqual(comment2?.content, {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test' }],
|
||||
});
|
||||
});
|
||||
|
||||
test('should update a comment', async t => {
|
||||
const docId = randomUUID();
|
||||
const comment1 = await models.comment.create({
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test' }],
|
||||
},
|
||||
workspaceId: workspace.id,
|
||||
docId,
|
||||
userId: owner.id,
|
||||
});
|
||||
|
||||
const comment2 = await models.comment.update({
|
||||
id: comment1.id,
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test2' }],
|
||||
},
|
||||
});
|
||||
t.deepEqual(comment2.content, {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test2' }],
|
||||
});
|
||||
// updatedAt should be changed
|
||||
t.true(comment2.updatedAt.getTime() > comment2.createdAt.getTime());
|
||||
|
||||
const comment3 = await models.comment.get(comment1.id);
|
||||
t.deepEqual(comment3, comment2);
|
||||
});
|
||||
|
||||
test('should delete a comment', async t => {
|
||||
const docId = randomUUID();
|
||||
const comment = await models.comment.create({
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test' }],
|
||||
},
|
||||
workspaceId: workspace.id,
|
||||
docId,
|
||||
userId: owner.id,
|
||||
});
|
||||
|
||||
await models.comment.delete(comment.id);
|
||||
|
||||
const comment2 = await models.comment.get(comment.id);
|
||||
|
||||
t.is(comment2, null);
|
||||
});
|
||||
|
||||
test('should resolve a comment', async t => {
|
||||
const docId = randomUUID();
|
||||
const comment = await models.comment.create({
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test' }],
|
||||
},
|
||||
workspaceId: workspace.id,
|
||||
docId,
|
||||
userId: owner.id,
|
||||
});
|
||||
|
||||
const comment2 = await models.comment.resolve({
|
||||
id: comment.id,
|
||||
resolved: true,
|
||||
});
|
||||
t.is(comment2.resolved, true);
|
||||
|
||||
const comment3 = await models.comment.get(comment.id);
|
||||
t.is(comment3!.resolved, true);
|
||||
// updatedAt should be changed
|
||||
t.true(comment3!.updatedAt.getTime() > comment3!.createdAt.getTime());
|
||||
|
||||
const comment4 = await models.comment.resolve({
|
||||
id: comment.id,
|
||||
resolved: false,
|
||||
});
|
||||
|
||||
t.is(comment4.resolved, false);
|
||||
|
||||
const comment5 = await models.comment.get(comment.id);
|
||||
t.is(comment5!.resolved, false);
|
||||
// updatedAt should be changed
|
||||
t.true(comment5!.updatedAt.getTime() > comment3!.updatedAt.getTime());
|
||||
});
|
||||
|
||||
test('should count comments', async t => {
|
||||
const docId = randomUUID();
|
||||
const comment1 = await models.comment.create({
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test' }],
|
||||
},
|
||||
workspaceId: workspace.id,
|
||||
docId,
|
||||
userId: owner.id,
|
||||
});
|
||||
|
||||
const count = await models.comment.count(workspace.id, docId);
|
||||
t.is(count, 1);
|
||||
|
||||
await models.comment.delete(comment1.id);
|
||||
const count2 = await models.comment.count(workspace.id, docId);
|
||||
t.is(count2, 0);
|
||||
});
|
||||
|
||||
test('should create and get a reply', async t => {
|
||||
const docId = randomUUID();
|
||||
const comment = await models.comment.create({
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test' }],
|
||||
},
|
||||
workspaceId: workspace.id,
|
||||
docId,
|
||||
userId: owner.id,
|
||||
});
|
||||
|
||||
const reply = await models.comment.createReply({
|
||||
userId: owner.id,
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test reply' }],
|
||||
},
|
||||
commentId: comment.id,
|
||||
});
|
||||
|
||||
t.snapshot(reply.content);
|
||||
t.is(reply.commentId, comment.id);
|
||||
t.is(reply.userId, owner.id);
|
||||
t.is(reply.workspaceId, workspace.id);
|
||||
t.is(reply.docId, docId);
|
||||
|
||||
const reply2 = await models.comment.getReply(reply.id);
|
||||
t.deepEqual(reply2, reply);
|
||||
});
|
||||
|
||||
test('should throw error reply on a deleted comment', async t => {
|
||||
const docId = randomUUID();
|
||||
const comment = await models.comment.create({
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test' }],
|
||||
},
|
||||
workspaceId: workspace.id,
|
||||
docId,
|
||||
userId: owner.id,
|
||||
});
|
||||
|
||||
await models.comment.delete(comment.id);
|
||||
|
||||
await t.throwsAsync(
|
||||
models.comment.createReply({
|
||||
userId: owner.id,
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test reply' }],
|
||||
},
|
||||
commentId: comment.id,
|
||||
}),
|
||||
{
|
||||
message: /Comment not found/,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
test('should update a reply', async t => {
|
||||
const docId = randomUUID();
|
||||
const comment = await models.comment.create({
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test' }],
|
||||
},
|
||||
workspaceId: workspace.id,
|
||||
docId,
|
||||
userId: owner.id,
|
||||
});
|
||||
|
||||
const reply = await models.comment.createReply({
|
||||
userId: owner.id,
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test reply' }],
|
||||
},
|
||||
commentId: comment.id,
|
||||
});
|
||||
|
||||
const reply2 = await models.comment.updateReply({
|
||||
id: reply.id,
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test reply2' }],
|
||||
},
|
||||
});
|
||||
|
||||
t.snapshot(reply2.content);
|
||||
t.true(reply2.updatedAt.getTime() > reply2.createdAt.getTime());
|
||||
});
|
||||
|
||||
test('should delete a reply', async t => {
|
||||
const docId = randomUUID();
|
||||
const comment = await models.comment.create({
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test' }],
|
||||
},
|
||||
workspaceId: workspace.id,
|
||||
docId,
|
||||
userId: owner.id,
|
||||
});
|
||||
|
||||
const reply = await models.comment.createReply({
|
||||
userId: owner.id,
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test reply' }],
|
||||
},
|
||||
commentId: comment.id,
|
||||
});
|
||||
|
||||
await models.comment.deleteReply(reply.id);
|
||||
const reply2 = await models.comment.getReply(reply.id);
|
||||
t.is(reply2, null);
|
||||
});
|
||||
|
||||
test('should list comments with replies', async t => {
|
||||
const docId = randomUUID();
|
||||
const comment1 = await models.comment.create({
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test' }],
|
||||
},
|
||||
workspaceId: workspace.id,
|
||||
docId,
|
||||
userId: owner.id,
|
||||
});
|
||||
|
||||
const comment2 = await models.comment.create({
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test2' }],
|
||||
},
|
||||
workspaceId: workspace.id,
|
||||
docId,
|
||||
userId: owner.id,
|
||||
});
|
||||
|
||||
const comment3 = await models.comment.create({
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test3' }],
|
||||
},
|
||||
workspaceId: workspace.id,
|
||||
docId,
|
||||
userId: owner.id,
|
||||
});
|
||||
|
||||
const reply1 = await models.comment.createReply({
|
||||
userId: owner.id,
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test reply1' }],
|
||||
},
|
||||
commentId: comment1.id,
|
||||
});
|
||||
|
||||
const reply2 = await models.comment.createReply({
|
||||
userId: owner.id,
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test reply2' }],
|
||||
},
|
||||
commentId: comment1.id,
|
||||
});
|
||||
|
||||
const reply3 = await models.comment.createReply({
|
||||
userId: owner.id,
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test reply3' }],
|
||||
},
|
||||
commentId: comment1.id,
|
||||
});
|
||||
|
||||
const reply4 = await models.comment.createReply({
|
||||
userId: owner.id,
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test reply4' }],
|
||||
},
|
||||
commentId: comment2.id,
|
||||
});
|
||||
|
||||
const comments = await models.comment.list(workspace.id, docId);
|
||||
t.is(comments.length, 3);
|
||||
t.is(comments[0].id, comment3.id);
|
||||
t.is(comments[1].id, comment2.id);
|
||||
t.is(comments[2].id, comment1.id);
|
||||
t.is(comments[0].replies.length, 0);
|
||||
t.is(comments[1].replies.length, 1);
|
||||
t.is(comments[2].replies.length, 3);
|
||||
|
||||
t.is(comments[1].replies[0].id, reply4.id);
|
||||
t.is(comments[2].replies[0].id, reply1.id);
|
||||
t.is(comments[2].replies[1].id, reply2.id);
|
||||
t.is(comments[2].replies[2].id, reply3.id);
|
||||
|
||||
// list with sid
|
||||
const comments2 = await models.comment.list(workspace.id, docId, {
|
||||
sid: comment2.sid,
|
||||
});
|
||||
t.is(comments2.length, 1);
|
||||
t.is(comments2[0].id, comment1.id);
|
||||
t.is(comments2[0].replies.length, 3);
|
||||
|
||||
// ignore deleted comments
|
||||
await models.comment.delete(comment1.id);
|
||||
const comments3 = await models.comment.list(workspace.id, docId);
|
||||
t.is(comments3.length, 2);
|
||||
t.is(comments3[0].id, comment3.id);
|
||||
t.is(comments3[1].id, comment2.id);
|
||||
t.is(comments3[0].replies.length, 0);
|
||||
t.is(comments3[1].replies.length, 1);
|
||||
});
|
||||
|
||||
test('should list changes', async t => {
|
||||
const docId = randomUUID();
|
||||
const comment1 = await models.comment.create({
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test' }],
|
||||
},
|
||||
workspaceId: workspace.id,
|
||||
docId,
|
||||
userId: owner.id,
|
||||
});
|
||||
|
||||
const comment2 = await models.comment.create({
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test2' }],
|
||||
},
|
||||
workspaceId: workspace.id,
|
||||
docId,
|
||||
userId: owner.id,
|
||||
});
|
||||
|
||||
const reply1 = await models.comment.createReply({
|
||||
userId: owner.id,
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test reply1' }],
|
||||
},
|
||||
commentId: comment1.id,
|
||||
});
|
||||
|
||||
const reply2 = await models.comment.createReply({
|
||||
userId: owner.id,
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test reply2' }],
|
||||
},
|
||||
commentId: comment1.id,
|
||||
});
|
||||
|
||||
// all changes
|
||||
const changes1 = await models.comment.listChanges(workspace.id, docId);
|
||||
t.is(changes1.length, 4);
|
||||
t.is(changes1[0].action, CommentChangeAction.update);
|
||||
t.is(changes1[0].id, comment1.id);
|
||||
t.is(changes1[1].action, CommentChangeAction.update);
|
||||
t.is(changes1[1].id, comment2.id);
|
||||
t.is(changes1[2].action, CommentChangeAction.update);
|
||||
t.is(changes1[2].id, reply1.id);
|
||||
t.is(changes1[3].action, CommentChangeAction.update);
|
||||
t.is(changes1[3].id, reply2.id);
|
||||
// reply has commentId
|
||||
t.is((changes1[2].item as Reply).commentId, comment1.id);
|
||||
|
||||
const changes2 = await models.comment.listChanges(workspace.id, docId, {
|
||||
commentUpdatedAt: comment1.updatedAt,
|
||||
replyUpdatedAt: reply1.updatedAt,
|
||||
});
|
||||
t.is(changes2.length, 2);
|
||||
t.is(changes2[0].action, CommentChangeAction.update);
|
||||
t.is(changes2[0].id, comment2.id);
|
||||
t.is(changes2[1].action, CommentChangeAction.update);
|
||||
t.is(changes2[1].id, reply2.id);
|
||||
t.is(changes2[1].commentId, comment1.id);
|
||||
|
||||
// update comment1
|
||||
const comment1Updated = await models.comment.update({
|
||||
id: comment1.id,
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test3' }],
|
||||
},
|
||||
});
|
||||
|
||||
const changes3 = await models.comment.listChanges(workspace.id, docId, {
|
||||
commentUpdatedAt: comment2.updatedAt,
|
||||
replyUpdatedAt: reply2.updatedAt,
|
||||
});
|
||||
t.is(changes3.length, 1);
|
||||
t.is(changes3[0].action, CommentChangeAction.update);
|
||||
t.is(changes3[0].id, comment1Updated.id);
|
||||
|
||||
// delete comment1 and reply1, update reply2
|
||||
await models.comment.delete(comment1.id);
|
||||
await models.comment.deleteReply(reply1.id);
|
||||
await models.comment.updateReply({
|
||||
id: reply2.id,
|
||||
content: {
|
||||
type: 'paragraph',
|
||||
content: [{ type: 'text', text: 'test reply2 updated' }],
|
||||
},
|
||||
});
|
||||
|
||||
const changes4 = await models.comment.listChanges(workspace.id, docId, {
|
||||
commentUpdatedAt: comment1Updated.updatedAt,
|
||||
replyUpdatedAt: reply2.updatedAt,
|
||||
});
|
||||
t.is(changes4.length, 3);
|
||||
t.is(changes4[0].action, CommentChangeAction.delete);
|
||||
t.is(changes4[0].id, comment1.id);
|
||||
t.is(changes4[1].action, CommentChangeAction.delete);
|
||||
t.is(changes4[1].id, reply1.id);
|
||||
t.is(changes4[1].commentId, comment1.id);
|
||||
t.is(changes4[2].action, CommentChangeAction.update);
|
||||
t.is(changes4[2].id, reply2.id);
|
||||
t.is(changes4[2].commentId, comment1.id);
|
||||
|
||||
// no changes
|
||||
const changes5 = await models.comment.listChanges(workspace.id, docId, {
|
||||
commentUpdatedAt: changes4[2].item.updatedAt,
|
||||
replyUpdatedAt: changes4[2].item.updatedAt,
|
||||
});
|
||||
t.is(changes5.length, 0);
|
||||
});
|
||||
70
packages/backend/server/src/models/comment-attachment.ts
Normal file
70
packages/backend/server/src/models/comment-attachment.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Prisma } from '@prisma/client';
|
||||
|
||||
import { BaseModel } from './base';
|
||||
|
||||
export type CreateCommentAttachmentInput =
|
||||
Prisma.CommentAttachmentUncheckedCreateInput;
|
||||
|
||||
/**
|
||||
* Comment Attachment Model
|
||||
*/
|
||||
@Injectable()
|
||||
export class CommentAttachmentModel extends BaseModel {
|
||||
async upsert(input: CreateCommentAttachmentInput) {
|
||||
return await this.db.commentAttachment.upsert({
|
||||
where: {
|
||||
workspaceId_docId_key: {
|
||||
workspaceId: input.workspaceId,
|
||||
docId: input.docId,
|
||||
key: input.key,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
name: input.name,
|
||||
mime: input.mime,
|
||||
size: input.size,
|
||||
},
|
||||
create: {
|
||||
workspaceId: input.workspaceId,
|
||||
docId: input.docId,
|
||||
key: input.key,
|
||||
name: input.name,
|
||||
mime: input.mime,
|
||||
size: input.size,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async delete(workspaceId: string, docId: string, key: string) {
|
||||
await this.db.commentAttachment.deleteMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
docId,
|
||||
key,
|
||||
},
|
||||
});
|
||||
this.logger.log(`deleted comment attachment ${workspaceId}/${key}`);
|
||||
}
|
||||
|
||||
async get(workspaceId: string, docId: string, key: string) {
|
||||
return await this.db.commentAttachment.findUnique({
|
||||
where: {
|
||||
workspaceId_docId_key: {
|
||||
workspaceId,
|
||||
docId,
|
||||
key,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async list(workspaceId: string, docId?: string) {
|
||||
return await this.db.commentAttachment.findMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
docId,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
330
packages/backend/server/src/models/comment.ts
Normal file
330
packages/backend/server/src/models/comment.ts
Normal file
@@ -0,0 +1,330 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Comment as CommentType, Reply as ReplyType } from '@prisma/client';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { CommentNotFound } from '../base';
|
||||
import { BaseModel } from './base';
|
||||
|
||||
export interface Comment extends CommentType {
|
||||
content: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface Reply extends ReplyType {
|
||||
content: Record<string, any>;
|
||||
}
|
||||
|
||||
// TODO(@fengmk2): move IdSchema to common/base.ts
|
||||
const IdSchema = z.string().trim().min(1).max(100);
|
||||
const JSONSchema = z.record(z.any());
|
||||
|
||||
export const CommentCreateSchema = z.object({
|
||||
workspaceId: IdSchema,
|
||||
docId: IdSchema,
|
||||
userId: IdSchema,
|
||||
content: JSONSchema,
|
||||
});
|
||||
|
||||
export const CommentUpdateSchema = z.object({
|
||||
id: IdSchema,
|
||||
content: JSONSchema,
|
||||
});
|
||||
|
||||
export const CommentResolveSchema = z.object({
|
||||
id: IdSchema,
|
||||
resolved: z.boolean(),
|
||||
});
|
||||
|
||||
export const ReplyCreateSchema = z.object({
|
||||
commentId: IdSchema,
|
||||
userId: IdSchema,
|
||||
content: JSONSchema,
|
||||
});
|
||||
|
||||
export const ReplyUpdateSchema = z.object({
|
||||
id: IdSchema,
|
||||
content: JSONSchema,
|
||||
});
|
||||
|
||||
export type CommentCreate = z.input<typeof CommentCreateSchema>;
|
||||
export type CommentUpdate = z.input<typeof CommentUpdateSchema>;
|
||||
export type CommentResolve = z.input<typeof CommentResolveSchema>;
|
||||
export type ReplyCreate = z.input<typeof ReplyCreateSchema>;
|
||||
export type ReplyUpdate = z.input<typeof ReplyUpdateSchema>;
|
||||
|
||||
export interface CommentWithReplies extends Comment {
|
||||
replies: Reply[];
|
||||
}
|
||||
|
||||
export enum CommentChangeAction {
|
||||
update = 'update',
|
||||
delete = 'delete',
|
||||
}
|
||||
|
||||
export interface DeletedChangeItem {
|
||||
deletedAt: Date;
|
||||
updatedAt: Date;
|
||||
}
|
||||
|
||||
export interface CommentChange {
|
||||
action: CommentChangeAction;
|
||||
id: string;
|
||||
commentId?: string;
|
||||
item: Comment | Reply | DeletedChangeItem;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class CommentModel extends BaseModel {
|
||||
// #region Comment
|
||||
|
||||
/**
|
||||
* Create a comment
|
||||
* @param input - The comment create input
|
||||
* @returns The created comment
|
||||
*/
|
||||
async create(input: CommentCreate) {
|
||||
const data = CommentCreateSchema.parse(input);
|
||||
return (await this.db.comment.create({
|
||||
data,
|
||||
})) as Comment;
|
||||
}
|
||||
|
||||
async get(id: string) {
|
||||
return (await this.db.comment.findUnique({
|
||||
where: { id, deletedAt: null },
|
||||
})) as Comment | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a comment content
|
||||
* @param input - The comment update input
|
||||
* @returns The updated comment
|
||||
*/
|
||||
async update(input: CommentUpdate) {
|
||||
const data = CommentUpdateSchema.parse(input);
|
||||
return await this.db.comment.update({
|
||||
where: { id: data.id, deletedAt: null },
|
||||
data: {
|
||||
content: data.content,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a comment or reply
|
||||
* @param id - The id of the comment or reply
|
||||
* @returns The deleted comment or reply
|
||||
*/
|
||||
async delete(id: string) {
|
||||
await this.db.comment.update({
|
||||
where: { id, deletedAt: null },
|
||||
data: { deletedAt: new Date() },
|
||||
});
|
||||
this.logger.log(`Comment ${id} deleted`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a comment or not
|
||||
* @param input - The comment resolve input
|
||||
* @returns The resolved comment
|
||||
*/
|
||||
async resolve(input: CommentResolve) {
|
||||
const data = CommentResolveSchema.parse(input);
|
||||
return await this.db.comment.update({
|
||||
where: { id: data.id, deletedAt: null },
|
||||
data: { resolved: data.resolved },
|
||||
});
|
||||
}
|
||||
|
||||
async count(workspaceId: string, docId: string) {
|
||||
return await this.db.comment.count({
|
||||
where: { workspaceId, docId, deletedAt: null },
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* List comments ordered by sid descending
|
||||
* @param workspaceId - The workspace id
|
||||
* @param docId - The doc id
|
||||
* @param options - The options
|
||||
* @returns The list of comments with replies
|
||||
*/
|
||||
async list(
|
||||
workspaceId: string,
|
||||
docId: string,
|
||||
options?: {
|
||||
sid?: number;
|
||||
take?: number;
|
||||
}
|
||||
): Promise<CommentWithReplies[]> {
|
||||
const comments = (await this.db.comment.findMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
docId,
|
||||
...(options?.sid ? { sid: { lt: options.sid } } : {}),
|
||||
deletedAt: null,
|
||||
},
|
||||
orderBy: { sid: 'desc' },
|
||||
take: options?.take ?? 100,
|
||||
})) as Comment[];
|
||||
|
||||
const replies = (await this.db.reply.findMany({
|
||||
where: {
|
||||
commentId: { in: comments.map(comment => comment.id) },
|
||||
deletedAt: null,
|
||||
},
|
||||
orderBy: { sid: 'asc' },
|
||||
})) as Reply[];
|
||||
|
||||
const replyMap = new Map<string, Reply[]>();
|
||||
for (const reply of replies) {
|
||||
const items = replyMap.get(reply.commentId) ?? [];
|
||||
items.push(reply);
|
||||
replyMap.set(reply.commentId, items);
|
||||
}
|
||||
|
||||
const commentWithReplies = comments.map(comment => ({
|
||||
...comment,
|
||||
replies: replyMap.get(comment.id) ?? [],
|
||||
}));
|
||||
|
||||
return commentWithReplies;
|
||||
}
|
||||
|
||||
async listChanges(
|
||||
workspaceId: string,
|
||||
docId: string,
|
||||
options?: {
|
||||
commentUpdatedAt?: Date;
|
||||
replyUpdatedAt?: Date;
|
||||
take?: number;
|
||||
}
|
||||
): Promise<CommentChange[]> {
|
||||
const take = options?.take ?? 10000;
|
||||
const comments = (await this.db.comment.findMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
docId,
|
||||
...(options?.commentUpdatedAt
|
||||
? { updatedAt: { gt: options.commentUpdatedAt } }
|
||||
: {}),
|
||||
},
|
||||
take,
|
||||
orderBy: { updatedAt: 'asc' },
|
||||
})) as Comment[];
|
||||
|
||||
const replies = (await this.db.reply.findMany({
|
||||
where: {
|
||||
workspaceId,
|
||||
docId,
|
||||
...(options?.replyUpdatedAt
|
||||
? { updatedAt: { gt: options.replyUpdatedAt } }
|
||||
: {}),
|
||||
},
|
||||
take,
|
||||
orderBy: { updatedAt: 'asc' },
|
||||
})) as Reply[];
|
||||
|
||||
const changes: CommentChange[] = [];
|
||||
for (const comment of comments) {
|
||||
if (comment.deletedAt) {
|
||||
changes.push({
|
||||
action: CommentChangeAction.delete,
|
||||
id: comment.id,
|
||||
item: {
|
||||
deletedAt: comment.deletedAt,
|
||||
updatedAt: comment.updatedAt,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
changes.push({
|
||||
action: CommentChangeAction.update,
|
||||
id: comment.id,
|
||||
item: comment,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (const reply of replies) {
|
||||
if (reply.deletedAt) {
|
||||
changes.push({
|
||||
action: CommentChangeAction.delete,
|
||||
id: reply.id,
|
||||
commentId: reply.commentId,
|
||||
item: {
|
||||
deletedAt: reply.deletedAt,
|
||||
updatedAt: reply.updatedAt,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
changes.push({
|
||||
action: CommentChangeAction.update,
|
||||
id: reply.id,
|
||||
commentId: reply.commentId,
|
||||
item: reply,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return changes;
|
||||
}
|
||||
|
||||
// #endregion
|
||||
|
||||
// #region Reply
|
||||
|
||||
/**
|
||||
* Reply to a comment
|
||||
* @param input - The reply create input
|
||||
* @returns The created reply
|
||||
*/
|
||||
async createReply(input: ReplyCreate) {
|
||||
const data = ReplyCreateSchema.parse(input);
|
||||
// find comment
|
||||
const comment = await this.get(data.commentId);
|
||||
if (!comment) {
|
||||
throw new CommentNotFound();
|
||||
}
|
||||
|
||||
return (await this.db.reply.create({
|
||||
data: {
|
||||
...data,
|
||||
workspaceId: comment.workspaceId,
|
||||
docId: comment.docId,
|
||||
},
|
||||
})) as Reply;
|
||||
}
|
||||
|
||||
async getReply(id: string) {
|
||||
return (await this.db.reply.findUnique({
|
||||
where: { id, deletedAt: null },
|
||||
})) as Reply | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a reply content
|
||||
* @param input - The reply update input
|
||||
* @returns The updated reply
|
||||
*/
|
||||
async updateReply(input: ReplyUpdate) {
|
||||
const data = ReplyUpdateSchema.parse(input);
|
||||
return await this.db.reply.update({
|
||||
where: { id: data.id, deletedAt: null },
|
||||
data: { content: data.content },
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a reply
|
||||
* @param id - The id of the reply
|
||||
* @returns The deleted reply
|
||||
*/
|
||||
async deleteReply(id: string) {
|
||||
await this.db.reply.update({
|
||||
where: { id, deletedAt: null },
|
||||
data: { deletedAt: new Date() },
|
||||
});
|
||||
this.logger.log(`Reply ${id} deleted`);
|
||||
}
|
||||
|
||||
// #endregion
|
||||
}
|
||||
@@ -7,6 +7,8 @@ import {
|
||||
import { ModuleRef } from '@nestjs/core';
|
||||
|
||||
import { ApplyType } from '../base';
|
||||
import { CommentModel } from './comment';
|
||||
import { CommentAttachmentModel } from './comment-attachment';
|
||||
import { AppConfigModel } from './config';
|
||||
import { CopilotContextModel } from './copilot-context';
|
||||
import { CopilotJobModel } from './copilot-job';
|
||||
@@ -48,6 +50,8 @@ const MODELS = {
|
||||
copilotWorkspace: CopilotWorkspaceConfigModel,
|
||||
copilotJob: CopilotJobModel,
|
||||
appConfig: AppConfigModel,
|
||||
comment: CommentModel,
|
||||
commentAttachment: CommentAttachmentModel,
|
||||
};
|
||||
|
||||
type ModelsType = {
|
||||
@@ -99,6 +103,8 @@ const ModelsSymbolProvider: ExistingProvider = {
|
||||
})
|
||||
export class ModelsModule {}
|
||||
|
||||
export * from './comment';
|
||||
export * from './comment-attachment';
|
||||
export * from './common';
|
||||
export * from './copilot-context';
|
||||
export * from './copilot-job';
|
||||
|
||||
@@ -9,6 +9,7 @@ import {
|
||||
} from './providers/anthropic';
|
||||
import type { FalConfig } from './providers/fal';
|
||||
import { GeminiGenerativeConfig, GeminiVertexConfig } from './providers/gemini';
|
||||
import { MorphConfig } from './providers/morph';
|
||||
import { OpenAIConfig } from './providers/openai';
|
||||
import { PerplexityConfig } from './providers/perplexity';
|
||||
import { VertexSchema } from './providers/types';
|
||||
@@ -31,6 +32,7 @@ declare global {
|
||||
perplexity: ConfigItem<PerplexityConfig>;
|
||||
anthropic: ConfigItem<AnthropicOfficialConfig>;
|
||||
anthropicVertex: ConfigItem<AnthropicVertexConfig>;
|
||||
morph: ConfigItem<MorphConfig>;
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -82,6 +84,10 @@ defineModuleConfig('copilot', {
|
||||
default: {},
|
||||
schema: VertexSchema,
|
||||
},
|
||||
'providers.morph': {
|
||||
desc: 'The config for the morph provider.',
|
||||
default: {},
|
||||
},
|
||||
unsplash: {
|
||||
desc: 'The config for the unsplash key.',
|
||||
default: {
|
||||
|
||||
@@ -197,34 +197,52 @@ export class CopilotContextService implements OnApplicationBootstrap {
|
||||
async matchWorkspaceAll(
|
||||
workspaceId: string,
|
||||
content: string,
|
||||
topK: number = 5,
|
||||
topK: number,
|
||||
signal?: AbortSignal,
|
||||
threshold: number = 0.5
|
||||
threshold: number = 0.8,
|
||||
docIds?: string[],
|
||||
scopedThreshold: number = 0.85
|
||||
) {
|
||||
if (!this.embeddingClient) return [];
|
||||
const embedding = await this.embeddingClient.getEmbedding(content, signal);
|
||||
if (!embedding) return [];
|
||||
|
||||
const [fileChunks, workspaceChunks] = await Promise.all([
|
||||
this.models.copilotWorkspace.matchFileEmbedding(
|
||||
workspaceId,
|
||||
embedding,
|
||||
topK * 2,
|
||||
threshold
|
||||
),
|
||||
this.models.copilotContext.matchWorkspaceEmbedding(
|
||||
embedding,
|
||||
workspaceId,
|
||||
topK * 2,
|
||||
threshold
|
||||
),
|
||||
]);
|
||||
const [fileChunks, workspaceChunks, scopedWorkspaceChunks] =
|
||||
await Promise.all([
|
||||
this.models.copilotWorkspace.matchFileEmbedding(
|
||||
workspaceId,
|
||||
embedding,
|
||||
topK * 2,
|
||||
threshold
|
||||
),
|
||||
|
||||
if (!fileChunks.length && !workspaceChunks.length) return [];
|
||||
this.models.copilotContext.matchWorkspaceEmbedding(
|
||||
embedding,
|
||||
workspaceId,
|
||||
topK * 2,
|
||||
threshold
|
||||
),
|
||||
docIds
|
||||
? this.models.copilotContext.matchWorkspaceEmbedding(
|
||||
embedding,
|
||||
workspaceId,
|
||||
topK * 2,
|
||||
scopedThreshold,
|
||||
docIds
|
||||
)
|
||||
: null,
|
||||
]);
|
||||
|
||||
if (
|
||||
!fileChunks.length &&
|
||||
!workspaceChunks.length &&
|
||||
!scopedWorkspaceChunks?.length
|
||||
)
|
||||
return [];
|
||||
|
||||
return await this.embeddingClient.reRank(
|
||||
content,
|
||||
[...fileChunks, ...workspaceChunks],
|
||||
[...fileChunks, ...workspaceChunks, ...(scopedWorkspaceChunks || [])],
|
||||
topK,
|
||||
signal
|
||||
);
|
||||
|
||||
@@ -257,6 +257,7 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
...session.config.promptConfig,
|
||||
signal: this.getSignal(req),
|
||||
user: user.id,
|
||||
session: session.config.sessionId,
|
||||
workspace: session.config.workspaceId,
|
||||
reasoning,
|
||||
webSearch,
|
||||
@@ -311,6 +312,7 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
...session.config.promptConfig,
|
||||
signal: this.getSignal(req),
|
||||
user: user.id,
|
||||
session: session.config.sessionId,
|
||||
workspace: session.config.workspaceId,
|
||||
reasoning,
|
||||
webSearch,
|
||||
@@ -384,6 +386,7 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
...session.config.promptConfig,
|
||||
signal: this.getSignal(req),
|
||||
user: user.id,
|
||||
session: session.config.sessionId,
|
||||
workspace: session.config.workspaceId,
|
||||
reasoning,
|
||||
webSearch,
|
||||
@@ -463,6 +466,7 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
...session.config.promptConfig,
|
||||
signal: this.getSignal(req),
|
||||
user: user.id,
|
||||
session: session.config.sessionId,
|
||||
workspace: session.config.workspaceId,
|
||||
})
|
||||
).pipe(
|
||||
@@ -586,6 +590,7 @@ export class CopilotController implements BeforeApplicationShutdown {
|
||||
seed: this.parseNumber(params.seed),
|
||||
signal: this.getSignal(req),
|
||||
user: user.id,
|
||||
session: session.config.sessionId,
|
||||
workspace: session.config.workspaceId,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -17,7 +17,6 @@ import {
|
||||
import {
|
||||
EMBEDDING_DIMENSIONS,
|
||||
EmbeddingClient,
|
||||
getReRankSchema,
|
||||
type ReRankResult,
|
||||
} from './types';
|
||||
|
||||
@@ -81,9 +80,9 @@ class ProductionEmbeddingClient extends EmbeddingClient {
|
||||
}
|
||||
|
||||
private getTargetId<T extends ChunkSimilarity>(embedding: T) {
|
||||
return 'docId' in embedding
|
||||
return 'docId' in embedding && typeof embedding.docId === 'string'
|
||||
? embedding.docId
|
||||
: 'fileId' in embedding
|
||||
: 'fileId' in embedding && typeof embedding.fileId === 'string'
|
||||
? embedding.fileId
|
||||
: '';
|
||||
}
|
||||
@@ -102,24 +101,19 @@ class ProductionEmbeddingClient extends EmbeddingClient {
|
||||
throw new CopilotPromptNotFound({ name: RERANK_PROMPT });
|
||||
}
|
||||
const provider = await this.getProvider({ modelId: prompt.model });
|
||||
const schema = getReRankSchema(embeddings.length);
|
||||
|
||||
const ranks = await provider.structure(
|
||||
const ranks = await provider.rerank(
|
||||
{ modelId: prompt.model },
|
||||
prompt.finish({
|
||||
query,
|
||||
results: embeddings.map(e => ({
|
||||
targetId: this.getTargetId(e),
|
||||
chunk: e.chunk,
|
||||
content: e.content,
|
||||
})),
|
||||
schema,
|
||||
}),
|
||||
{ maxRetries: 3, signal }
|
||||
embeddings.map(e => prompt.finish({ query, doc: e.content })),
|
||||
{ signal }
|
||||
);
|
||||
|
||||
try {
|
||||
return schema.parse(JSON.parse(ranks)).ranks;
|
||||
return ranks.map((score, i) => ({
|
||||
chunk: embeddings[i].content,
|
||||
targetId: this.getTargetId(embeddings[i]),
|
||||
score,
|
||||
}));
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to parse rerank results', error);
|
||||
// silent error, will fallback to default sorting in parent method
|
||||
@@ -176,9 +170,9 @@ class ProductionEmbeddingClient extends EmbeddingClient {
|
||||
|
||||
const highConfidenceChunks = ranks
|
||||
.flat()
|
||||
.toSorted((a, b) => b.scores.score - a.scores.score)
|
||||
.filter(r => r.scores.score > 5)
|
||||
.map(r => chunks[`${r.scores.targetId}:${r.scores.chunk}`])
|
||||
.toSorted((a, b) => b.score - a.score)
|
||||
.filter(r => r.score > 5)
|
||||
.map(r => chunks[`${r.targetId}:${r.chunk}`])
|
||||
.filter(Boolean);
|
||||
|
||||
this.logger.verbose(
|
||||
|
||||
@@ -177,11 +177,6 @@ export abstract class EmbeddingClient {
|
||||
|
||||
const ReRankItemSchema = z.object({
|
||||
scores: z.object({
|
||||
reason: z
|
||||
.string()
|
||||
.describe(
|
||||
'Think step by step, describe in 20 words the reason for giving this score.'
|
||||
),
|
||||
chunk: z.string().describe('The chunk index of the search result.'),
|
||||
targetId: z.string().describe('The id of the target.'),
|
||||
score: z
|
||||
@@ -194,11 +189,4 @@ const ReRankItemSchema = z.object({
|
||||
}),
|
||||
});
|
||||
|
||||
export const getReRankSchema = (size: number) =>
|
||||
z.object({
|
||||
ranks: ReRankItemSchema.array().describe(
|
||||
`A array of scores. Make sure to score all ${size} results.`
|
||||
),
|
||||
});
|
||||
|
||||
export type ReRankResult = z.infer<ReturnType<typeof getReRankSchema>>['ranks'];
|
||||
export type ReRankResult = z.infer<typeof ReRankItemSchema>['scores'][];
|
||||
|
||||
@@ -342,57 +342,11 @@ Convert a multi-speaker audio recording into a structured JSON format by transcr
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
content: `Evaluate and rank search results based on their relevance and quality to the given query by assigning a score from 1 to 10, where 10 denotes the highest relevance.
|
||||
|
||||
Consider various factors such as content alignment with the query, source credibility, timeliness, and user intent.
|
||||
|
||||
# Steps
|
||||
|
||||
1. **Read the Query**: Understand the main intent and specific details of the search query.
|
||||
2. **Review Each Result**:
|
||||
- Analyze the content's relevance to the query.
|
||||
- Assess the credibility of the source or website.
|
||||
- Consider the timeliness of the information, ensuring it's current and relevant.
|
||||
- Evaluate the alignment with potential user intent based on the query.
|
||||
3. **Scoring**:
|
||||
- Assign a score from 1 to 10 based on the overall relevance and quality, with 10 being the most relevant.
|
||||
- Each chunk returns a score and should not be mixed together.
|
||||
|
||||
# Output Format
|
||||
|
||||
Return a JSON object for each result in the following format in raw:
|
||||
{
|
||||
"scores": [
|
||||
{
|
||||
"reason": "[Reasoning behind the score in 20 words]",
|
||||
"chunk": "[chunk]",
|
||||
"targetId": "[targetId]",
|
||||
"score": [1-10]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
# Notes
|
||||
|
||||
- Be aware of the potential biases or inaccuracies in the sources.
|
||||
- Consider if the content is comprehensive and directly answers the query.
|
||||
- Pay attention to the nuances of user intent that might influence relevance.`,
|
||||
content: `Judge whether the Document meets the requirements based on the Query and the Instruct provided. The answer must be "yes" or "no".`,
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: `
|
||||
<query>{{query}}</query>
|
||||
<results>
|
||||
{{#results}}
|
||||
<result>
|
||||
<targetId>{{targetId}}</targetId>
|
||||
<chunk>{{chunk}}</chunk>
|
||||
<content>
|
||||
{{content}}
|
||||
</content>
|
||||
</result>
|
||||
{{/results}}
|
||||
</results>`,
|
||||
content: `<Instruct>: Given a web search query, retrieve relevant passages that answer the query\n<Query>: {query}\n<Document>: {doc}`,
|
||||
},
|
||||
],
|
||||
},
|
||||
@@ -1670,6 +1624,11 @@ Your mission is to do your utmost to help users leverage AFFiNE's capabilities f
|
||||
AFFiNE is developed by Toeverything Pte. Ltd., a Singapore-registered company with a diverse international team. The company has also open-sourced BlockSuite and OctoBase to support the creation of tools similar to AFFiNE. The name "AFFiNE" is inspired by the concept of affine transformation, as blocks within AFFiNE can move freely across page, edgeless, and database modes. Currently, the AFFiNE team consists of 25 members and is an engineer-driven open-source company.
|
||||
|
||||
<response_guide>
|
||||
<tool_usage_guide>
|
||||
- When searching for information, prioritize searching the user's Workspace information.
|
||||
- Depending on the complexity of the question and the information returned by the search tools, you can call different tools multiple times to search.
|
||||
</tool_usage_guide>
|
||||
|
||||
<real_world_info>
|
||||
Today is: {{affine::date}}.
|
||||
User's preferred language is {{affine::language}}.
|
||||
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
} from './anthropic';
|
||||
import { FalProvider } from './fal';
|
||||
import { GeminiGenerativeProvider, GeminiVertexProvider } from './gemini';
|
||||
import { MorphProvider } from './morph';
|
||||
import { OpenAIProvider } from './openai';
|
||||
import { PerplexityProvider } from './perplexity';
|
||||
|
||||
@@ -15,6 +16,7 @@ export const CopilotProviders = [
|
||||
PerplexityProvider,
|
||||
AnthropicOfficialProvider,
|
||||
AnthropicVertexProvider,
|
||||
MorphProvider,
|
||||
];
|
||||
|
||||
export {
|
||||
|
||||
163
packages/backend/server/src/plugins/copilot/providers/morph.ts
Normal file
163
packages/backend/server/src/plugins/copilot/providers/morph.ts
Normal file
@@ -0,0 +1,163 @@
|
||||
import {
|
||||
createOpenAICompatible,
|
||||
OpenAICompatibleProvider as VercelOpenAICompatibleProvider,
|
||||
} from '@ai-sdk/openai-compatible';
|
||||
import { AISDKError, generateText, streamText } from 'ai';
|
||||
|
||||
import {
|
||||
CopilotProviderSideError,
|
||||
metrics,
|
||||
UserFriendlyError,
|
||||
} from '../../../base';
|
||||
import { CopilotProvider } from './provider';
|
||||
import type {
|
||||
CopilotChatOptions,
|
||||
ModelConditions,
|
||||
PromptMessage,
|
||||
} from './types';
|
||||
import { CopilotProviderType, ModelInputType, ModelOutputType } from './types';
|
||||
import { chatToGPTMessage, CitationParser, TextStreamParser } from './utils';
|
||||
|
||||
export const DEFAULT_DIMENSIONS = 256;
|
||||
|
||||
export type MorphConfig = {
|
||||
apiKey?: string;
|
||||
};
|
||||
|
||||
export class MorphProvider extends CopilotProvider<MorphConfig> {
|
||||
readonly type = CopilotProviderType.Morph;
|
||||
|
||||
readonly models = [
|
||||
{
|
||||
id: 'morph-v2',
|
||||
capabilities: [
|
||||
{
|
||||
input: [ModelInputType.Text],
|
||||
output: [ModelOutputType.Text],
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
#instance!: VercelOpenAICompatibleProvider;
|
||||
|
||||
override configured(): boolean {
|
||||
return !!this.config.apiKey;
|
||||
}
|
||||
|
||||
protected override setup() {
|
||||
super.setup();
|
||||
this.#instance = createOpenAICompatible({
|
||||
name: this.type,
|
||||
apiKey: this.config.apiKey,
|
||||
baseURL: 'https://api.morphllm.com/v1',
|
||||
});
|
||||
}
|
||||
|
||||
private handleError(e: any) {
|
||||
if (e instanceof UserFriendlyError) {
|
||||
return e;
|
||||
} else if (e instanceof AISDKError) {
|
||||
return new CopilotProviderSideError({
|
||||
provider: this.type,
|
||||
kind: e.name || 'unknown',
|
||||
message: e.message,
|
||||
});
|
||||
} else {
|
||||
return new CopilotProviderSideError({
|
||||
provider: this.type,
|
||||
kind: 'unexpected_response',
|
||||
message: e?.message || 'Unexpected morph response',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async text(
|
||||
cond: ModelConditions,
|
||||
messages: PromptMessage[],
|
||||
options: CopilotChatOptions = {}
|
||||
): Promise<string> {
|
||||
const fullCond = {
|
||||
...cond,
|
||||
outputType: ModelOutputType.Text,
|
||||
};
|
||||
await this.checkParams({ messages, cond: fullCond, options });
|
||||
const model = this.selectModel(fullCond);
|
||||
|
||||
try {
|
||||
metrics.ai.counter('chat_text_calls').add(1, { model: model.id });
|
||||
|
||||
const [system, msgs] = await chatToGPTMessage(messages);
|
||||
|
||||
const modelInstance = this.#instance(model.id);
|
||||
|
||||
const { text } = await generateText({
|
||||
model: modelInstance,
|
||||
system,
|
||||
messages: msgs,
|
||||
abortSignal: options.signal,
|
||||
});
|
||||
|
||||
return text.trim();
|
||||
} catch (e: any) {
|
||||
metrics.ai.counter('chat_text_errors').add(1, { model: model.id });
|
||||
throw this.handleError(e);
|
||||
}
|
||||
}
|
||||
|
||||
async *streamText(
|
||||
cond: ModelConditions,
|
||||
messages: PromptMessage[],
|
||||
options: CopilotChatOptions = {}
|
||||
): AsyncIterable<string> {
|
||||
const fullCond = {
|
||||
...cond,
|
||||
outputType: ModelOutputType.Text,
|
||||
};
|
||||
await this.checkParams({ messages, cond: fullCond, options });
|
||||
const model = this.selectModel(fullCond);
|
||||
|
||||
try {
|
||||
metrics.ai.counter('chat_text_stream_calls').add(1, { model: model.id });
|
||||
const [system, msgs] = await chatToGPTMessage(messages);
|
||||
|
||||
const modelInstance = this.#instance(model.id);
|
||||
|
||||
const { fullStream } = streamText({
|
||||
model: modelInstance,
|
||||
system,
|
||||
messages: msgs,
|
||||
abortSignal: options.signal,
|
||||
});
|
||||
|
||||
const citationParser = new CitationParser();
|
||||
const textParser = new TextStreamParser();
|
||||
for await (const chunk of fullStream) {
|
||||
switch (chunk.type) {
|
||||
case 'text-delta': {
|
||||
let result = textParser.parse(chunk);
|
||||
result = citationParser.parse(result);
|
||||
yield result;
|
||||
break;
|
||||
}
|
||||
case 'finish': {
|
||||
const result = citationParser.end();
|
||||
yield result;
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
yield textParser.parse(chunk);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (options.signal?.aborted) {
|
||||
await fullStream.cancel();
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (e: any) {
|
||||
metrics.ai.counter('chat_text_stream_errors').add(1, { model: model.id });
|
||||
throw this.handleError(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -440,6 +440,60 @@ export class OpenAIProvider extends CopilotProvider<OpenAIConfig> {
|
||||
}
|
||||
}
|
||||
|
||||
override async rerank(
|
||||
cond: ModelConditions,
|
||||
chunkMessages: PromptMessage[][],
|
||||
options: CopilotChatOptions = {}
|
||||
): Promise<number[]> {
|
||||
const fullCond = { ...cond, outputType: ModelOutputType.Text };
|
||||
await this.checkParams({ messages: [], cond: fullCond, options });
|
||||
const model = this.selectModel(fullCond);
|
||||
const instance = this.#instance.responses(model.id);
|
||||
|
||||
const scores = await Promise.all(
|
||||
chunkMessages.map(async messages => {
|
||||
const [system, msgs] = await chatToGPTMessage(messages);
|
||||
|
||||
const { logprobs } = await generateText({
|
||||
model: instance,
|
||||
system,
|
||||
messages: msgs,
|
||||
temperature: 0,
|
||||
maxTokens: 1,
|
||||
providerOptions: {
|
||||
openai: {
|
||||
...this.getOpenAIOptions(options, model.id),
|
||||
// get the log probability of "yes"/"no"
|
||||
logprobs: 2,
|
||||
},
|
||||
},
|
||||
maxSteps: 1,
|
||||
abortSignal: options.signal,
|
||||
});
|
||||
|
||||
const top = (logprobs?.[0]?.topLogprobs ?? []).reduce(
|
||||
(acc, item) => {
|
||||
acc[item.token] = item.logprob;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, number>
|
||||
);
|
||||
|
||||
// OpenAI often includes a leading space, so try matching both ' yes' and 'yes'
|
||||
const logYes = top[' yes'] ?? top['yes'] ?? Number.NEGATIVE_INFINITY;
|
||||
const logNo = top[' no'] ?? top['no'] ?? Number.NEGATIVE_INFINITY;
|
||||
|
||||
const pYes = Math.exp(logYes);
|
||||
const pNo = Math.exp(logNo);
|
||||
const prob = pYes + pNo === 0 ? 0 : pYes / (pYes + pNo);
|
||||
|
||||
return prob;
|
||||
})
|
||||
);
|
||||
|
||||
return scores;
|
||||
}
|
||||
|
||||
private async getFullStream(
|
||||
model: CopilotProviderModel,
|
||||
messages: PromptMessage[],
|
||||
|
||||
@@ -9,13 +9,19 @@ import {
|
||||
CopilotProviderNotSupported,
|
||||
OnEvent,
|
||||
} from '../../../base';
|
||||
import { DocReader } from '../../../core/doc';
|
||||
import { AccessController } from '../../../core/permission';
|
||||
import { Models } from '../../../models';
|
||||
import { IndexerService } from '../../indexer';
|
||||
import { CopilotContextService } from '../context';
|
||||
import {
|
||||
buildContentGetter,
|
||||
buildDocContentGetter,
|
||||
buildDocKeywordSearchGetter,
|
||||
buildDocSearchGetter,
|
||||
createDocEditTool,
|
||||
createDocKeywordSearchTool,
|
||||
createDocReadTool,
|
||||
createDocSemanticSearchTool,
|
||||
createExaCrawlTool,
|
||||
createExaSearchTool,
|
||||
@@ -129,6 +135,8 @@ export abstract class CopilotProvider<C = any> {
|
||||
const tools: ToolSet = {};
|
||||
if (options?.tools?.length) {
|
||||
this.logger.debug(`getTools: ${JSON.stringify(options.tools)}`);
|
||||
const ac = this.moduleRef.get(AccessController, { strict: false });
|
||||
|
||||
for (const tool of options.tools) {
|
||||
const toolDef = this.getProviderSpecificTools(tool, model);
|
||||
if (toolDef) {
|
||||
@@ -136,12 +144,24 @@ export abstract class CopilotProvider<C = any> {
|
||||
continue;
|
||||
}
|
||||
switch (tool) {
|
||||
case 'docEdit': {
|
||||
const doc = this.moduleRef.get(DocReader, { strict: false });
|
||||
const getDocContent = buildContentGetter(ac, doc);
|
||||
tools.doc_edit = createDocEditTool(
|
||||
this.factory,
|
||||
getDocContent.bind(null, options)
|
||||
);
|
||||
break;
|
||||
}
|
||||
case 'docSemanticSearch': {
|
||||
const ac = this.moduleRef.get(AccessController, { strict: false });
|
||||
const context = this.moduleRef.get(CopilotContextService, {
|
||||
strict: false,
|
||||
});
|
||||
const searchDocs = buildDocSearchGetter(ac, context);
|
||||
|
||||
const docContext = options.session
|
||||
? await context.getBySessionId(options.session)
|
||||
: null;
|
||||
const searchDocs = buildDocSearchGetter(ac, context, docContext);
|
||||
tools.doc_semantic_search = createDocSemanticSearchTool(
|
||||
searchDocs.bind(null, options)
|
||||
);
|
||||
@@ -165,6 +185,14 @@ export abstract class CopilotProvider<C = any> {
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'docRead': {
|
||||
const ac = this.moduleRef.get(AccessController, { strict: false });
|
||||
const models = this.moduleRef.get(Models, { strict: false });
|
||||
const docReader = this.moduleRef.get(DocReader, { strict: false });
|
||||
const getDoc = buildDocContentGetter(ac, docReader, models);
|
||||
tools.doc_read = createDocReadTool(getDoc.bind(null, options));
|
||||
break;
|
||||
}
|
||||
case 'webSearch': {
|
||||
tools.web_search_exa = createExaSearchTool(this.AFFiNEConfig);
|
||||
tools.web_crawl_exa = createExaCrawlTool(this.AFFiNEConfig);
|
||||
@@ -291,4 +319,15 @@ export abstract class CopilotProvider<C = any> {
|
||||
kind: 'embedding',
|
||||
});
|
||||
}
|
||||
|
||||
async rerank(
|
||||
_model: ModelConditions,
|
||||
_messages: PromptMessage[][],
|
||||
_options?: CopilotChatOptions
|
||||
): Promise<number[]> {
|
||||
throw new CopilotProviderNotSupported({
|
||||
provider: this.type,
|
||||
kind: 'rerank',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ export enum CopilotProviderType {
|
||||
GeminiVertex = 'geminiVertex',
|
||||
OpenAI = 'openai',
|
||||
Perplexity = 'perplexity',
|
||||
Morph = 'morph',
|
||||
}
|
||||
|
||||
export const CopilotProviderSchema = z.object({
|
||||
@@ -161,6 +162,7 @@ export type StreamObject = z.infer<typeof StreamObjectSchema>;
|
||||
const CopilotProviderOptionsSchema = z.object({
|
||||
signal: z.instanceof(AbortSignal).optional(),
|
||||
user: z.string().optional(),
|
||||
session: z.string().optional(),
|
||||
workspace: z.string().optional(),
|
||||
});
|
||||
|
||||
|
||||
@@ -11,7 +11,9 @@ import {
|
||||
import { ZodType } from 'zod';
|
||||
|
||||
import {
|
||||
createDocEditTool,
|
||||
createDocKeywordSearchTool,
|
||||
createDocReadTool,
|
||||
createDocSemanticSearchTool,
|
||||
createExaCrawlTool,
|
||||
createExaSearchTool,
|
||||
@@ -382,8 +384,10 @@ export class CitationParser {
|
||||
}
|
||||
|
||||
export interface CustomAITools extends ToolSet {
|
||||
doc_edit: ReturnType<typeof createDocEditTool>;
|
||||
doc_semantic_search: ReturnType<typeof createDocSemanticSearchTool>;
|
||||
doc_keyword_search: ReturnType<typeof createDocKeywordSearchTool>;
|
||||
doc_read: ReturnType<typeof createDocReadTool>;
|
||||
web_search_exa: ReturnType<typeof createExaSearchTool>;
|
||||
web_crawl_exa: ReturnType<typeof createExaCrawlTool>;
|
||||
}
|
||||
@@ -449,6 +453,10 @@ export class TextStreamParser {
|
||||
result += `\nSearching the keyword "${chunk.args.query}"\n`;
|
||||
break;
|
||||
}
|
||||
case 'doc_read': {
|
||||
result += `\nReading the doc "${chunk.args.doc_id}"\n`;
|
||||
break;
|
||||
}
|
||||
}
|
||||
result = this.markAsCallout(result);
|
||||
break;
|
||||
@@ -459,6 +467,12 @@ export class TextStreamParser {
|
||||
);
|
||||
result = this.addPrefix(result);
|
||||
switch (chunk.toolName) {
|
||||
case 'doc_edit': {
|
||||
if (chunk.result && typeof chunk.result === 'object') {
|
||||
result += `\n${chunk.result.result}\n`;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'doc_semantic_search': {
|
||||
if (Array.isArray(chunk.result)) {
|
||||
result += `\nFound ${chunk.result.length} document${chunk.result.length !== 1 ? 's' : ''} related to “${chunk.args.query}”.\n`;
|
||||
|
||||
@@ -0,0 +1,79 @@
|
||||
import { tool } from 'ai';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { DocReader } from '../../../core/doc';
|
||||
import { AccessController } from '../../../core/permission';
|
||||
import type { CopilotChatOptions, CopilotProviderFactory } from '../providers';
|
||||
|
||||
export const buildContentGetter = (ac: AccessController, doc: DocReader) => {
|
||||
const getDocContent = async (options: CopilotChatOptions, docId?: string) => {
|
||||
if (!options || !docId || !options.user || !options.workspace) {
|
||||
return undefined;
|
||||
}
|
||||
const canAccess = await ac
|
||||
.user(options.user)
|
||||
.workspace(options.workspace)
|
||||
.doc(docId)
|
||||
.can('Doc.Read');
|
||||
if (!canAccess) return undefined;
|
||||
const content = await doc.getFullDocContent(options.workspace, docId);
|
||||
return content?.summary.trim() || undefined;
|
||||
};
|
||||
return getDocContent;
|
||||
};
|
||||
|
||||
export const createDocEditTool = (
|
||||
factory: CopilotProviderFactory,
|
||||
getContent: (targetId?: string) => Promise<string | undefined>
|
||||
) => {
|
||||
return tool({
|
||||
description:
|
||||
"Use this tool to propose an edit to an existing doc.\n\nThis will be read by a less intelligent model, which will quickly apply the edit. You should make it clear what the edit is, while also minimizing the unchanged code you write.\nWhen writing the edit, you should specify each edit in sequence, with the special comment // ... existing code ... to represent unchanged code in between edited lines.\n\nYou should bias towards repeating as few lines of the original doc as possible to convey the change.\nEach edit should contain sufficient context of unchanged lines around the code you're editing to resolve ambiguity.\nIf you plan on deleting a section, you must provide surrounding context to indicate the deletion.\nDO NOT omit spans of pre-existing code without using the // ... existing code ... comment to indicate its absence.\n\nYou should specify the following arguments before the others: [target_id], [origin_content]",
|
||||
parameters: z.object({
|
||||
doc_id: z
|
||||
.string()
|
||||
.describe(
|
||||
'The target doc to modify. Always specify the target doc as the first argument. If the content to be modified does not include a specific document, the full text should be provided through origin_content.'
|
||||
)
|
||||
.optional(),
|
||||
origin_content: z
|
||||
.string()
|
||||
.describe(
|
||||
'The original content of the doc you are editing. If the original text is from a specific document, the target_id should be provided instead of this parameter.'
|
||||
)
|
||||
.optional(),
|
||||
instructions: z
|
||||
.string()
|
||||
.describe(
|
||||
'A single sentence instruction describing what you are going to do for the sketched edit. This is used to assist the less intelligent model in applying the edit. Please use the first person to describe what you are going to do. Dont repeat what you have said previously in normal messages. And use it to disambiguate uncertainty in the edit.'
|
||||
),
|
||||
code_edit: z
|
||||
.string()
|
||||
.describe(
|
||||
"Specify ONLY the precise lines of code that you wish to edit. NEVER specify or write out unchanged code. Instead, represent all unchanged code using the comment of the language you're editing in - example: // ... existing code ..."
|
||||
),
|
||||
}),
|
||||
execute: async ({ doc_id, origin_content, code_edit }) => {
|
||||
try {
|
||||
const provider = await factory.getProviderByModel('morph-v2');
|
||||
if (!provider) {
|
||||
return 'Editing docs is not supported';
|
||||
}
|
||||
|
||||
const content = origin_content || (await getContent(doc_id));
|
||||
if (!content) {
|
||||
return 'Doc not found or doc is empty';
|
||||
}
|
||||
const result = await provider.text({ modelId: 'morph-v2' }, [
|
||||
{
|
||||
role: 'user',
|
||||
content: `<code>${content}</code>\n<update>${code_edit}</update>`,
|
||||
},
|
||||
]);
|
||||
return { result };
|
||||
} catch {
|
||||
return 'Failed to apply edit to the doc';
|
||||
}
|
||||
},
|
||||
});
|
||||
};
|
||||
@@ -0,0 +1,88 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { tool } from 'ai';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { DocReader } from '../../../core/doc';
|
||||
import { AccessController } from '../../../core/permission';
|
||||
import { Models, publicUserSelect } from '../../../models';
|
||||
import type { CopilotChatOptions } from '../providers';
|
||||
import { toolError } from './error';
|
||||
|
||||
const logger = new Logger('DocReadTool');
|
||||
|
||||
export const buildDocContentGetter = (
|
||||
ac: AccessController,
|
||||
docReader: DocReader,
|
||||
models: Models
|
||||
) => {
|
||||
const getDoc = async (options: CopilotChatOptions, docId?: string) => {
|
||||
if (!options?.user || !options?.workspace || !docId) {
|
||||
return;
|
||||
}
|
||||
const canAccess = await ac
|
||||
.user(options.user)
|
||||
.workspace(options.workspace)
|
||||
.doc(docId)
|
||||
.can('Doc.Read');
|
||||
if (!canAccess) {
|
||||
logger.warn(
|
||||
`User ${options.user} does not have access to doc ${docId} in workspace ${options.workspace}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const docMeta = await models.doc.getSnapshot(options.workspace, docId, {
|
||||
select: {
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
createdByUser: {
|
||||
select: publicUserSelect,
|
||||
},
|
||||
updatedByUser: {
|
||||
select: publicUserSelect,
|
||||
},
|
||||
},
|
||||
});
|
||||
if (!docMeta) {
|
||||
return;
|
||||
}
|
||||
|
||||
const content = await docReader.getDocMarkdown(options.workspace, docId);
|
||||
if (!content) {
|
||||
return;
|
||||
}
|
||||
|
||||
return {
|
||||
title: content.title,
|
||||
markdown: content.markdown,
|
||||
createdAt: docMeta.createdAt,
|
||||
updatedAt: docMeta.updatedAt,
|
||||
createdByUser: docMeta.createdByUser,
|
||||
updatedByUser: docMeta.updatedByUser,
|
||||
};
|
||||
};
|
||||
return getDoc;
|
||||
};
|
||||
|
||||
export const createDocReadTool = (
|
||||
getDoc: (targetId?: string) => Promise<object | undefined>
|
||||
) => {
|
||||
return tool({
|
||||
description: 'Read the content of a doc in the current workspace',
|
||||
parameters: z.object({
|
||||
doc_id: z.string().describe('The target doc to read'),
|
||||
}),
|
||||
execute: async ({ doc_id }) => {
|
||||
try {
|
||||
const doc = await getDoc(doc_id);
|
||||
if (!doc) {
|
||||
return;
|
||||
}
|
||||
return { ...doc };
|
||||
} catch (err: any) {
|
||||
logger.error(`Failed to read the doc ${doc_id}`, err);
|
||||
return toolError('Doc Read Failed', err.message);
|
||||
}
|
||||
},
|
||||
});
|
||||
};
|
||||
@@ -4,14 +4,20 @@ import { z } from 'zod';
|
||||
import type { AccessController } from '../../../core/permission';
|
||||
import type { ChunkSimilarity } from '../../../models';
|
||||
import type { CopilotContextService } from '../context';
|
||||
import type { ContextSession } from '../context/session';
|
||||
import type { CopilotChatOptions } from '../providers';
|
||||
import { toolError } from './error';
|
||||
|
||||
export const buildDocSearchGetter = (
|
||||
ac: AccessController,
|
||||
context: CopilotContextService
|
||||
context: CopilotContextService,
|
||||
docContext: ContextSession | null
|
||||
) => {
|
||||
const searchDocs = async (options: CopilotChatOptions, query?: string) => {
|
||||
const searchDocs = async (
|
||||
options: CopilotChatOptions,
|
||||
query?: string,
|
||||
abortSignal?: AbortSignal
|
||||
) => {
|
||||
if (!options || !query?.trim() || !options.user || !options.workspace) {
|
||||
return undefined;
|
||||
}
|
||||
@@ -20,7 +26,11 @@ export const buildDocSearchGetter = (
|
||||
.workspace(options.workspace)
|
||||
.can('Workspace.Read');
|
||||
if (!canAccess) return undefined;
|
||||
const chunks = await context.matchWorkspaceAll(options.workspace, query);
|
||||
const [chunks, contextChunks] = await Promise.all([
|
||||
context.matchWorkspaceAll(options.workspace, query, 10, abortSignal),
|
||||
docContext?.matchFiles(query, 10, abortSignal) ?? [],
|
||||
]);
|
||||
|
||||
const docChunks = await ac
|
||||
.user(options.user)
|
||||
.workspace(options.workspace)
|
||||
@@ -29,6 +39,9 @@ export const buildDocSearchGetter = (
|
||||
'Doc.Read'
|
||||
);
|
||||
const fileChunks = chunks.filter(c => 'fileId' in c);
|
||||
if (contextChunks.length) {
|
||||
fileChunks.push(...contextChunks);
|
||||
}
|
||||
if (!docChunks.length && !fileChunks.length) return undefined;
|
||||
return [...fileChunks, ...docChunks];
|
||||
};
|
||||
@@ -36,17 +49,24 @@ export const buildDocSearchGetter = (
|
||||
};
|
||||
|
||||
export const createDocSemanticSearchTool = (
|
||||
searchDocs: (query: string) => Promise<ChunkSimilarity[] | undefined>
|
||||
searchDocs: (
|
||||
query: string,
|
||||
abortSignal?: AbortSignal
|
||||
) => Promise<ChunkSimilarity[] | undefined>
|
||||
) => {
|
||||
return tool({
|
||||
description:
|
||||
'Semantic search for relevant documents in the current workspace',
|
||||
parameters: z.object({
|
||||
query: z.string().describe('The query to search for.'),
|
||||
query: z
|
||||
.string()
|
||||
.describe(
|
||||
'The query statement to search for, e.g. "What is the capital of France?"'
|
||||
),
|
||||
}),
|
||||
execute: async ({ query }) => {
|
||||
execute: async ({ query }, options) => {
|
||||
try {
|
||||
return await searchDocs(query);
|
||||
return await searchDocs(query, options.abortSignal);
|
||||
} catch (e: any) {
|
||||
return toolError('Doc Semantic Search Failed', e.message);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
export * from './doc-edit';
|
||||
export * from './doc-keyword-search';
|
||||
export * from './doc-read';
|
||||
export * from './doc-semantic-search';
|
||||
export * from './error';
|
||||
export * from './exa-crawl';
|
||||
|
||||
@@ -539,6 +539,7 @@ enum ErrorNames {
|
||||
CAN_NOT_BATCH_GRANT_DOC_OWNER_PERMISSIONS
|
||||
CAN_NOT_REVOKE_YOURSELF
|
||||
CAPTCHA_VERIFICATION_FAILED
|
||||
COMMENT_NOT_FOUND
|
||||
COPILOT_ACTION_TAKEN
|
||||
COPILOT_CONTEXT_FILE_NOT_SUPPORTED
|
||||
COPILOT_DOCS_NOT_FOUND
|
||||
@@ -628,6 +629,7 @@ enum ErrorNames {
|
||||
OWNER_CAN_NOT_LEAVE_WORKSPACE
|
||||
PASSWORD_REQUIRED
|
||||
QUERY_TOO_LONG
|
||||
REPLY_NOT_FOUND
|
||||
RUNTIME_CONFIG_NOT_FOUND
|
||||
SAME_EMAIL_PROVIDED
|
||||
SAME_SUBSCRIPTION_RECURRING
|
||||
@@ -1583,6 +1585,9 @@ enum SearchTable {
|
||||
}
|
||||
|
||||
type ServerConfigType {
|
||||
"""Whether allow guest users to create demo workspaces."""
|
||||
allowGuestDemoWorkspace: Boolean!
|
||||
|
||||
"""fetch latest available upgradable release of server"""
|
||||
availableUpgrade: ReleaseVersionType
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ query adminServerConfig {
|
||||
baseUrl
|
||||
name
|
||||
features
|
||||
allowGuestDemoWorkspace
|
||||
type
|
||||
initialized
|
||||
credentialsRequirement {
|
||||
|
||||
@@ -32,6 +32,7 @@ export const adminServerConfigQuery = {
|
||||
baseUrl
|
||||
name
|
||||
features
|
||||
allowGuestDemoWorkspace
|
||||
type
|
||||
initialized
|
||||
credentialsRequirement {
|
||||
@@ -1822,6 +1823,7 @@ export const serverConfigQuery = {
|
||||
baseUrl
|
||||
name
|
||||
features
|
||||
allowGuestDemoWorkspace
|
||||
type
|
||||
initialized
|
||||
credentialsRequirement {
|
||||
|
||||
@@ -7,6 +7,7 @@ query serverConfig {
|
||||
baseUrl
|
||||
name
|
||||
features
|
||||
allowGuestDemoWorkspace
|
||||
type
|
||||
initialized
|
||||
credentialsRequirement {
|
||||
|
||||
@@ -708,6 +708,7 @@ export enum ErrorNames {
|
||||
CAN_NOT_BATCH_GRANT_DOC_OWNER_PERMISSIONS = 'CAN_NOT_BATCH_GRANT_DOC_OWNER_PERMISSIONS',
|
||||
CAN_NOT_REVOKE_YOURSELF = 'CAN_NOT_REVOKE_YOURSELF',
|
||||
CAPTCHA_VERIFICATION_FAILED = 'CAPTCHA_VERIFICATION_FAILED',
|
||||
COMMENT_NOT_FOUND = 'COMMENT_NOT_FOUND',
|
||||
COPILOT_ACTION_TAKEN = 'COPILOT_ACTION_TAKEN',
|
||||
COPILOT_CONTEXT_FILE_NOT_SUPPORTED = 'COPILOT_CONTEXT_FILE_NOT_SUPPORTED',
|
||||
COPILOT_DOCS_NOT_FOUND = 'COPILOT_DOCS_NOT_FOUND',
|
||||
@@ -797,6 +798,7 @@ export enum ErrorNames {
|
||||
OWNER_CAN_NOT_LEAVE_WORKSPACE = 'OWNER_CAN_NOT_LEAVE_WORKSPACE',
|
||||
PASSWORD_REQUIRED = 'PASSWORD_REQUIRED',
|
||||
QUERY_TOO_LONG = 'QUERY_TOO_LONG',
|
||||
REPLY_NOT_FOUND = 'REPLY_NOT_FOUND',
|
||||
RUNTIME_CONFIG_NOT_FOUND = 'RUNTIME_CONFIG_NOT_FOUND',
|
||||
SAME_EMAIL_PROVIDED = 'SAME_EMAIL_PROVIDED',
|
||||
SAME_SUBSCRIPTION_RECURRING = 'SAME_SUBSCRIPTION_RECURRING',
|
||||
@@ -2147,6 +2149,8 @@ export enum SearchTable {
|
||||
|
||||
export interface ServerConfigType {
|
||||
__typename?: 'ServerConfigType';
|
||||
/** Whether allow guest users to create demo workspaces. */
|
||||
allowGuestDemoWorkspace: Scalars['Boolean']['output'];
|
||||
/** fetch latest available upgradable release of server */
|
||||
availableUpgrade: Maybe<ReleaseVersionType>;
|
||||
/** Features for user that can be configured */
|
||||
@@ -2741,6 +2745,7 @@ export type AdminServerConfigQuery = {
|
||||
baseUrl: string;
|
||||
name: string;
|
||||
features: Array<ServerFeature>;
|
||||
allowGuestDemoWorkspace: boolean;
|
||||
type: ServerDeploymentType;
|
||||
initialized: boolean;
|
||||
availableUserFeatures: Array<FeatureType>;
|
||||
@@ -4826,6 +4831,7 @@ export type ServerConfigQuery = {
|
||||
baseUrl: string;
|
||||
name: string;
|
||||
features: Array<ServerFeature>;
|
||||
allowGuestDemoWorkspace: boolean;
|
||||
type: ServerDeploymentType;
|
||||
initialized: boolean;
|
||||
credentialsRequirement: {
|
||||
|
||||
@@ -190,6 +190,10 @@
|
||||
"earlyAccessControl": {
|
||||
"type": "Boolean",
|
||||
"desc": "Only allow users with early access features to access the app"
|
||||
},
|
||||
"allowGuestDemoWorkspace": {
|
||||
"type": "Boolean",
|
||||
"desc": "Whether allow guest users to create demo workspaces."
|
||||
}
|
||||
},
|
||||
"docService": {
|
||||
@@ -207,6 +211,10 @@
|
||||
"versionControl.requiredVersion": {
|
||||
"type": "String",
|
||||
"desc": "Allowed version range of the app that allowed to access the server. Requires 'client/versionControl.enabled' to be true to take effect."
|
||||
},
|
||||
"allowGuestDemoWorkspace": {
|
||||
"type": "Boolean",
|
||||
"desc": "Allow guests to access demo workspace."
|
||||
}
|
||||
},
|
||||
"captcha": {
|
||||
@@ -253,6 +261,10 @@
|
||||
"type": "Object",
|
||||
"desc": "The config for the anthropic provider in Google Vertex AI."
|
||||
},
|
||||
"providers.morph": {
|
||||
"type": "Object",
|
||||
"desc": "The config for the morph provider."
|
||||
},
|
||||
"unsplash": {
|
||||
"type": "Object",
|
||||
"desc": "The config for the unsplash key."
|
||||
|
||||
@@ -585,7 +585,7 @@ export class AIChatInput extends SignalWatcher(
|
||||
await this._preUpdateMessages(userInput, attachments);
|
||||
|
||||
const sessionId = await this.createSessionId();
|
||||
let contexts = await this._getMatchedContexts(userInput);
|
||||
let contexts = await this._getMatchedContexts();
|
||||
if (abortController.signal.aborted) {
|
||||
return;
|
||||
}
|
||||
@@ -685,46 +685,11 @@ export class AIChatInput extends SignalWatcher(
|
||||
}
|
||||
};
|
||||
|
||||
private async _getMatchedContexts(userInput: string) {
|
||||
const contextId = await this.getContextId();
|
||||
const workspaceId = this.host.store.workspace.id;
|
||||
|
||||
private async _getMatchedContexts() {
|
||||
const docContexts = new Map<
|
||||
string,
|
||||
{ docId: string; docContent: string }
|
||||
>();
|
||||
const fileContexts = new Map<
|
||||
string,
|
||||
BlockSuitePresets.AIFileContextOption
|
||||
>();
|
||||
|
||||
const { files: matchedFiles = [], docs: matchedDocs = [] } =
|
||||
(await AIProvider.context?.matchContext(
|
||||
userInput,
|
||||
contextId,
|
||||
workspaceId
|
||||
)) ?? {};
|
||||
|
||||
matchedDocs.forEach(doc => {
|
||||
docContexts.set(doc.docId, {
|
||||
docId: doc.docId,
|
||||
docContent: doc.content,
|
||||
});
|
||||
});
|
||||
|
||||
matchedFiles.forEach(file => {
|
||||
const context = fileContexts.get(file.fileId);
|
||||
if (context) {
|
||||
context.fileContent += `\n${file.content}`;
|
||||
} else {
|
||||
fileContexts.set(file.fileId, {
|
||||
blobId: file.blobId,
|
||||
fileName: file.name,
|
||||
fileType: file.mimeType,
|
||||
fileContent: file.content,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
this.chips.forEach(chip => {
|
||||
if (isDocChip(chip) && !!chip.markdown?.value) {
|
||||
@@ -759,10 +724,7 @@ export class AIChatInput extends SignalWatcher(
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
docs,
|
||||
files: Array.from(fileContexts.values()),
|
||||
};
|
||||
return { docs, files: [] };
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -34,6 +34,13 @@ import {
|
||||
UnauthorizedError,
|
||||
} from './error';
|
||||
|
||||
export enum Endpoint {
|
||||
Stream = 'stream',
|
||||
StreamObject = 'stream-object',
|
||||
Workflow = 'workflow',
|
||||
Images = 'images',
|
||||
}
|
||||
|
||||
type OptionsField<T extends GraphQLQuery> =
|
||||
RequestOptions<T>['variables'] extends { options: infer U } ? U : never;
|
||||
|
||||
@@ -415,7 +422,7 @@ export class CopilotClient {
|
||||
webSearch?: boolean;
|
||||
modelId?: string;
|
||||
},
|
||||
endpoint = 'stream'
|
||||
endpoint = Endpoint.Stream
|
||||
) {
|
||||
let url = `/api/copilot/chat/${sessionId}/${endpoint}`;
|
||||
const queryString = this.paramsToQueryString({
|
||||
@@ -435,7 +442,7 @@ export class CopilotClient {
|
||||
sessionId: string,
|
||||
messageId?: string,
|
||||
seed?: string,
|
||||
endpoint = 'images'
|
||||
endpoint = Endpoint.Images
|
||||
) {
|
||||
let url = `/api/copilot/chat/${sessionId}/${endpoint}`;
|
||||
const queryString = this.paramsToQueryString({
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { partition } from 'lodash-es';
|
||||
|
||||
import { AIProvider } from './ai-provider';
|
||||
import type { CopilotClient } from './copilot-client';
|
||||
import { type CopilotClient, Endpoint } from './copilot-client';
|
||||
import { delay, toTextStream } from './event-source';
|
||||
|
||||
const TIMEOUT = 50000;
|
||||
@@ -16,7 +16,7 @@ export type TextToTextOptions = {
|
||||
stream?: boolean;
|
||||
signal?: AbortSignal;
|
||||
retry?: boolean;
|
||||
workflow?: boolean;
|
||||
endpoint?: Endpoint;
|
||||
isRootSession?: boolean;
|
||||
postfix?: (text: string) => string;
|
||||
reasoning?: boolean;
|
||||
@@ -114,7 +114,7 @@ export function textToText({
|
||||
signal,
|
||||
timeout = TIMEOUT,
|
||||
retry = false,
|
||||
workflow = false,
|
||||
endpoint = Endpoint.Stream,
|
||||
postfix,
|
||||
reasoning,
|
||||
webSearch,
|
||||
@@ -142,7 +142,7 @@ export function textToText({
|
||||
webSearch,
|
||||
modelId,
|
||||
},
|
||||
workflow ? 'workflow' : 'stream-object'
|
||||
endpoint
|
||||
);
|
||||
AIProvider.LAST_ACTION_SESSIONID = sessionId;
|
||||
|
||||
@@ -219,7 +219,7 @@ export function toImage({
|
||||
signal,
|
||||
timeout = TIMEOUT,
|
||||
retry = false,
|
||||
workflow = false,
|
||||
endpoint,
|
||||
client,
|
||||
}: ToImageOptions) {
|
||||
let messageId: string | undefined;
|
||||
@@ -238,7 +238,7 @@ export function toImage({
|
||||
sessionId,
|
||||
messageId,
|
||||
seed,
|
||||
workflow ? 'workflow' : undefined
|
||||
endpoint
|
||||
);
|
||||
AIProvider.LAST_ACTION_SESSIONID = sessionId;
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
import { z } from 'zod';
|
||||
|
||||
import { AIProvider } from './ai-provider';
|
||||
import type { CopilotClient } from './copilot-client';
|
||||
import { type CopilotClient, Endpoint } from './copilot-client';
|
||||
import type { PromptKey } from './prompt';
|
||||
import { textToText, toImage } from './request';
|
||||
import { setupTracker } from './tracker';
|
||||
@@ -95,11 +95,13 @@ export function setupAIProvider(
|
||||
client,
|
||||
sessionId,
|
||||
content: input,
|
||||
timeout: 5 * 60 * 1000, // 5 minutes
|
||||
params: {
|
||||
docs: contexts?.docs,
|
||||
files: contexts?.files,
|
||||
searchMode: webSearch ? 'MUST' : 'AUTO',
|
||||
},
|
||||
endpoint: Endpoint.StreamObject,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -355,7 +357,7 @@ export function setupAIProvider(
|
||||
content: options.input,
|
||||
// 3 minutes
|
||||
timeout: 180000,
|
||||
workflow: true,
|
||||
endpoint: Endpoint.Workflow,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -481,7 +483,7 @@ Could you make a new website based on these notes and send back just the html fi
|
||||
content: options.input,
|
||||
// 3 minutes
|
||||
timeout: 180000,
|
||||
workflow: true,
|
||||
endpoint: Endpoint.Workflow,
|
||||
postfix,
|
||||
});
|
||||
});
|
||||
@@ -516,13 +518,14 @@ Could you make a new website based on these notes and send back just the html fi
|
||||
promptName,
|
||||
...options,
|
||||
});
|
||||
const isWorkflow = !!promptName?.startsWith('workflow:');
|
||||
return toImage({
|
||||
...options,
|
||||
client,
|
||||
sessionId,
|
||||
content: options.input,
|
||||
timeout: 180000,
|
||||
workflow: !!promptName?.startsWith('workflow:'),
|
||||
endpoint: isWorkflow ? Endpoint.Workflow : Endpoint.Images,
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import { useLiveData, useService } from '@toeverything/infra';
|
||||
import { useEffect, useMemo } from 'react';
|
||||
|
||||
import { AuthService, SubscriptionService } from '../../../modules/cloud';
|
||||
import { useNavigateHelper } from '../../hooks/use-navigate-helper';
|
||||
import * as styles from './styles.css';
|
||||
|
||||
const UserInfo = () => {
|
||||
@@ -51,9 +52,11 @@ export const PublishPageUserAvatar = () => {
|
||||
const user = useLiveData(authService.session.account$);
|
||||
const t = useI18n();
|
||||
|
||||
const navigateHelper = useNavigateHelper();
|
||||
const handleSignOut = useAsyncCallback(async () => {
|
||||
await authService.signOut();
|
||||
}, [authService]);
|
||||
navigateHelper.jumpToSignIn();
|
||||
}, [authService, navigateHelper]);
|
||||
|
||||
const menuItem = useMemo(() => {
|
||||
return (
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
/* eslint-disable rxjs/finnish */
|
||||
/**
|
||||
* @vitest-environment happy-dom
|
||||
*/
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { beforeEach, describe, expect, test, vi } from 'vitest';
|
||||
|
||||
// mocks
|
||||
const signOutFn = vi.fn();
|
||||
const jumpToIndex = vi.fn();
|
||||
const jumpToSignIn = vi.fn();
|
||||
let allowGuestDemo: boolean | undefined = true;
|
||||
|
||||
vi.mock('@affine/core/modules/cloud', () => ({
|
||||
AuthService: class {},
|
||||
DefaultServerService: class {},
|
||||
}));
|
||||
|
||||
vi.mock('@toeverything/infra', () => {
|
||||
return {
|
||||
useService: () => ({ signOut: signOutFn }),
|
||||
useServices: () => ({
|
||||
defaultServerService: {
|
||||
server: {
|
||||
config$: {
|
||||
value: {
|
||||
get allowGuestDemoWorkspace() {
|
||||
return allowGuestDemo;
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('@affine/component', () => {
|
||||
return {
|
||||
useConfirmModal: () => ({
|
||||
openConfirmModal: ({ onConfirm }: { onConfirm?: () => unknown }) => {
|
||||
return Promise.resolve(onConfirm?.());
|
||||
},
|
||||
}),
|
||||
notify: { error: vi.fn() },
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('@affine/i18n', () => ({
|
||||
useI18n: () => new Proxy({}, { get: () => () => '' }),
|
||||
}));
|
||||
|
||||
vi.mock('../../use-navigate-helper', () => ({
|
||||
useNavigateHelper: () => ({ jumpToIndex, jumpToSignIn }),
|
||||
}));
|
||||
|
||||
import { useSignOut } from '../use-sign-out';
|
||||
|
||||
describe('useSignOut', () => {
|
||||
beforeEach(() => {
|
||||
signOutFn.mockClear();
|
||||
jumpToIndex.mockClear();
|
||||
jumpToSignIn.mockClear();
|
||||
});
|
||||
|
||||
test('redirects to index when guest demo allowed', async () => {
|
||||
allowGuestDemo = true;
|
||||
const { result } = renderHook(() => useSignOut());
|
||||
result.current();
|
||||
await waitFor(() => expect(signOutFn).toHaveBeenCalled());
|
||||
expect(jumpToIndex).toHaveBeenCalled();
|
||||
expect(jumpToSignIn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('redirects to index when guest demo config not provided', async () => {
|
||||
allowGuestDemo = undefined;
|
||||
const { result } = renderHook(() => useSignOut());
|
||||
result.current();
|
||||
await waitFor(() => expect(signOutFn).toHaveBeenCalled());
|
||||
expect(jumpToIndex).toHaveBeenCalled();
|
||||
expect(jumpToSignIn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('redirects to sign in when guest demo disabled', async () => {
|
||||
allowGuestDemo = false;
|
||||
const { result } = renderHook(() => useSignOut());
|
||||
result.current();
|
||||
await waitFor(() => expect(signOutFn).toHaveBeenCalled());
|
||||
expect(jumpToSignIn).toHaveBeenCalled();
|
||||
expect(jumpToIndex).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -3,10 +3,10 @@ import {
|
||||
notify,
|
||||
useConfirmModal,
|
||||
} from '@affine/component';
|
||||
import { AuthService } from '@affine/core/modules/cloud';
|
||||
import { AuthService, DefaultServerService } from '@affine/core/modules/cloud';
|
||||
import { UserFriendlyError } from '@affine/error';
|
||||
import { useI18n } from '@affine/i18n';
|
||||
import { useService } from '@toeverything/infra';
|
||||
import { useService, useServices } from '@toeverything/infra';
|
||||
import { useCallback } from 'react';
|
||||
|
||||
import { useNavigateHelper } from '../use-navigate-helper';
|
||||
@@ -25,21 +25,29 @@ export const useSignOut = ({
|
||||
}: ConfirmModalProps = {}) => {
|
||||
const t = useI18n();
|
||||
const { openConfirmModal } = useConfirmModal();
|
||||
const { jumpToIndex } = useNavigateHelper();
|
||||
const { jumpToSignIn, jumpToIndex } = useNavigateHelper();
|
||||
|
||||
const authService = useService(AuthService);
|
||||
const { defaultServerService } = useServices({ DefaultServerService });
|
||||
|
||||
const signOut = useCallback(async () => {
|
||||
onConfirm?.()?.catch(console.error);
|
||||
try {
|
||||
await authService.signOut();
|
||||
jumpToIndex();
|
||||
if (
|
||||
defaultServerService.server.config$.value.allowGuestDemoWorkspace !==
|
||||
false
|
||||
) {
|
||||
jumpToIndex();
|
||||
} else {
|
||||
jumpToSignIn();
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
const error = UserFriendlyError.fromAny(err);
|
||||
notify.error(error);
|
||||
}
|
||||
}, [authService, jumpToIndex, onConfirm]);
|
||||
}, [authService, jumpToIndex, jumpToSignIn, defaultServerService, onConfirm]);
|
||||
|
||||
const getDefaultText = useCallback(
|
||||
(key: SignOutConfirmModalI18NKeys) => {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { MenuItem } from '@affine/component/ui/menu';
|
||||
import { FeatureFlagService } from '@affine/core/modules/feature-flag';
|
||||
import { DefaultServerService } from '@affine/core/modules/cloud';
|
||||
import { useI18n } from '@affine/i18n';
|
||||
import { ImportIcon, PlusIcon } from '@blocksuite/icons/rc';
|
||||
import { useLiveData, useService } from '@toeverything/infra';
|
||||
@@ -14,10 +14,11 @@ export const AddWorkspace = ({
|
||||
onNewWorkspace?: () => void;
|
||||
}) => {
|
||||
const t = useI18n();
|
||||
const featureFlagService = useService(FeatureFlagService);
|
||||
const enableLocalWorkspace = useLiveData(
|
||||
featureFlagService.flags.enable_local_workspace.$
|
||||
const defaultServerService = useService(DefaultServerService);
|
||||
const allowGuestDemo = useLiveData(
|
||||
defaultServerService.server.config$.selector(c => c.allowGuestDemoWorkspace)
|
||||
);
|
||||
const guestDemoEnabled = allowGuestDemo !== false;
|
||||
|
||||
return (
|
||||
<>
|
||||
@@ -44,7 +45,7 @@ export const AddWorkspace = ({
|
||||
className={styles.ItemContainer}
|
||||
>
|
||||
<div className={styles.ItemText}>
|
||||
{enableLocalWorkspace
|
||||
{guestDemoEnabled
|
||||
? t['com.affine.workspaceList.addWorkspace.create']()
|
||||
: t['com.affine.workspaceList.addWorkspace.create-cloud']()}
|
||||
</div>
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import { ScrollableContainer } from '@affine/component';
|
||||
import { MenuItem } from '@affine/component/ui/menu';
|
||||
import { AuthService } from '@affine/core/modules/cloud';
|
||||
import { AuthService, DefaultServerService } from '@affine/core/modules/cloud';
|
||||
import { GlobalDialogService } from '@affine/core/modules/dialogs';
|
||||
import { FeatureFlagService } from '@affine/core/modules/feature-flag';
|
||||
import { type WorkspaceMetadata } from '@affine/core/modules/workspace';
|
||||
import { useI18n } from '@affine/i18n';
|
||||
import { track } from '@affine/track';
|
||||
@@ -66,7 +65,7 @@ export const UserWithWorkspaceList = ({
|
||||
}: UserWithWorkspaceListProps) => {
|
||||
const globalDialogService = useService(GlobalDialogService);
|
||||
const session = useLiveData(useService(AuthService).session.session$);
|
||||
const featureFlagService = useService(FeatureFlagService);
|
||||
const defaultServerService = useService(DefaultServerService);
|
||||
|
||||
const isAuthenticated = session.status === 'authenticated';
|
||||
|
||||
@@ -77,7 +76,8 @@ export const UserWithWorkspaceList = ({
|
||||
const onNewWorkspace = useCallback(() => {
|
||||
if (
|
||||
!isAuthenticated &&
|
||||
!featureFlagService.flags.enable_local_workspace.value
|
||||
defaultServerService.server.config$.value.allowGuestDemoWorkspace ===
|
||||
false
|
||||
) {
|
||||
return openSignInModal();
|
||||
}
|
||||
@@ -90,7 +90,7 @@ export const UserWithWorkspaceList = ({
|
||||
onEventEnd?.();
|
||||
}, [
|
||||
globalDialogService,
|
||||
featureFlagService,
|
||||
defaultServerService,
|
||||
isAuthenticated,
|
||||
onCreatedWorkspace,
|
||||
onEventEnd,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { IconButton, Menu, MenuItem } from '@affine/component';
|
||||
import { Divider } from '@affine/component/ui/divider';
|
||||
import { useEnableCloud } from '@affine/core/components/hooks/affine/use-enable-cloud';
|
||||
import { useSignOut } from '@affine/core/components/hooks/affine/use-sign-out';
|
||||
import { useAsyncCallback } from '@affine/core/components/hooks/affine-async-hooks';
|
||||
import { useNavigateHelper } from '@affine/core/components/hooks/use-navigate-helper';
|
||||
import type { AuthAccountInfo, Server } from '@affine/core/modules/cloud';
|
||||
@@ -161,9 +162,7 @@ const CloudWorkSpaceList = ({
|
||||
workspaces,
|
||||
]);
|
||||
|
||||
const handleSignOut = useAsyncCallback(async () => {
|
||||
await authService.signOut();
|
||||
}, [authService]);
|
||||
const handleSignOut = useSignOut();
|
||||
|
||||
const handleSignIn = useAsyncCallback(async () => {
|
||||
globalDialogService.open('sign-in', {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { DefaultServerService } from '@affine/core/modules/cloud';
|
||||
import { DesktopApiService } from '@affine/core/modules/desktop-api';
|
||||
import { WorkspacesService } from '@affine/core/modules/workspace';
|
||||
import {
|
||||
@@ -46,16 +47,23 @@ export const Component = ({
|
||||
const [navigating, setNavigating] = useState(true);
|
||||
const [creating, setCreating] = useState(false);
|
||||
const authService = useService(AuthService);
|
||||
const defaultServerService = useService(DefaultServerService);
|
||||
|
||||
const loggedIn = useLiveData(
|
||||
authService.session.status$.map(s => s === 'authenticated')
|
||||
);
|
||||
const allowGuestDemo =
|
||||
useLiveData(
|
||||
defaultServerService.server.config$.selector(
|
||||
c => c.allowGuestDemoWorkspace
|
||||
)
|
||||
) ?? true;
|
||||
|
||||
const workspacesService = useService(WorkspacesService);
|
||||
const list = useLiveData(workspacesService.list.workspaces$);
|
||||
const listIsLoading = useLiveData(workspacesService.list.isRevalidating$);
|
||||
|
||||
const { openPage, jumpToPage } = useNavigateHelper();
|
||||
const { openPage, jumpToPage, jumpToSignIn } = useNavigateHelper();
|
||||
const [searchParams] = useSearchParams();
|
||||
|
||||
const createOnceRef = useRef(false);
|
||||
@@ -84,6 +92,12 @@ export const Component = ({
|
||||
return;
|
||||
}
|
||||
|
||||
if (!allowGuestDemo && !loggedIn) {
|
||||
localStorage.removeItem('last_workspace_id');
|
||||
jumpToSignIn();
|
||||
return;
|
||||
}
|
||||
|
||||
// check is user logged in && has cloud workspace
|
||||
if (searchParams.get('initCloud') === 'true') {
|
||||
if (loggedIn) {
|
||||
@@ -111,10 +125,12 @@ export const Component = ({
|
||||
openPage(openWorkspace.id, defaultIndexRoute, RouteLogic.REPLACE);
|
||||
}
|
||||
}, [
|
||||
allowGuestDemo,
|
||||
createCloudWorkspace,
|
||||
list,
|
||||
openPage,
|
||||
searchParams,
|
||||
jumpToSignIn,
|
||||
listIsLoading,
|
||||
loggedIn,
|
||||
navigating,
|
||||
@@ -128,7 +144,9 @@ export const Component = ({
|
||||
}, [desktopApi]);
|
||||
|
||||
useEffect(() => {
|
||||
setCreating(true);
|
||||
if (listIsLoading || list.length > 0) {
|
||||
return;
|
||||
}
|
||||
createFirstAppData(workspacesService)
|
||||
.then(createdWorkspace => {
|
||||
if (createdWorkspace) {
|
||||
@@ -148,7 +166,15 @@ export const Component = ({
|
||||
.finally(() => {
|
||||
setCreating(false);
|
||||
});
|
||||
}, [jumpToPage, openPage, workspacesService]);
|
||||
}, [
|
||||
jumpToPage,
|
||||
jumpToSignIn,
|
||||
openPage,
|
||||
workspacesService,
|
||||
loggedIn,
|
||||
listIsLoading,
|
||||
list,
|
||||
]);
|
||||
|
||||
if (navigating || creating) {
|
||||
return fallback ?? <AppContainer fallback />;
|
||||
|
||||
@@ -83,7 +83,8 @@ const AcceptInvite = ({ inviteId: targetInviteId }: { inviteId: string }) => {
|
||||
|
||||
const onSignOut = useAsyncCallback(async () => {
|
||||
await authService.signOut();
|
||||
}, [authService]);
|
||||
navigateHelper.jumpToSignIn();
|
||||
}, [authService, navigateHelper]);
|
||||
|
||||
if ((loading && !requestToJoinLoading) || inviteId !== targetInviteId) {
|
||||
return null;
|
||||
|
||||
@@ -228,7 +228,8 @@ const CloudWorkSpaceList = ({
|
||||
|
||||
const handleSignOut = useAsyncCallback(async () => {
|
||||
await authService.signOut();
|
||||
}, [authService]);
|
||||
navigateHelper.jumpToSignIn();
|
||||
}, [authService, navigateHelper]);
|
||||
|
||||
const handleSignIn = useAsyncCallback(async () => {
|
||||
globalDialogService.open('sign-in', {
|
||||
|
||||
@@ -26,6 +26,7 @@ export const BUILD_IN_SERVERS: (ServerMetadata & { config: ServerConfig })[] =
|
||||
maxLength: 32,
|
||||
},
|
||||
},
|
||||
allowGuestDemoWorkspace: true,
|
||||
},
|
||||
},
|
||||
]
|
||||
@@ -56,6 +57,7 @@ export const BUILD_IN_SERVERS: (ServerMetadata & { config: ServerConfig })[] =
|
||||
maxLength: 32,
|
||||
},
|
||||
},
|
||||
allowGuestDemoWorkspace: true,
|
||||
},
|
||||
},
|
||||
]
|
||||
@@ -88,6 +90,7 @@ export const BUILD_IN_SERVERS: (ServerMetadata & { config: ServerConfig })[] =
|
||||
maxLength: 32,
|
||||
},
|
||||
},
|
||||
allowGuestDemoWorkspace: true,
|
||||
},
|
||||
},
|
||||
]
|
||||
@@ -120,6 +123,7 @@ export const BUILD_IN_SERVERS: (ServerMetadata & { config: ServerConfig })[] =
|
||||
maxLength: 32,
|
||||
},
|
||||
},
|
||||
allowGuestDemoWorkspace: true,
|
||||
},
|
||||
},
|
||||
]
|
||||
@@ -148,6 +152,7 @@ export const BUILD_IN_SERVERS: (ServerMetadata & { config: ServerConfig })[] =
|
||||
maxLength: 32,
|
||||
},
|
||||
},
|
||||
allowGuestDemoWorkspace: true,
|
||||
},
|
||||
},
|
||||
]
|
||||
@@ -178,6 +183,7 @@ export const BUILD_IN_SERVERS: (ServerMetadata & { config: ServerConfig })[] =
|
||||
maxLength: 32,
|
||||
},
|
||||
},
|
||||
allowGuestDemoWorkspace: true,
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
@@ -82,6 +82,7 @@ export class Server extends Entity<{
|
||||
credentialsRequirement: config.credentialsRequirement,
|
||||
features: config.features,
|
||||
oauthProviders: config.oauthProviders,
|
||||
allowGuestDemoWorkspace: config.allowGuestDemoWorkspace,
|
||||
serverName: config.name,
|
||||
type: config.type,
|
||||
version: config.version,
|
||||
|
||||
@@ -82,6 +82,7 @@ export class ServersService extends Service {
|
||||
credentialsRequirement: config.credentialsRequirement,
|
||||
features: config.features,
|
||||
oauthProviders: config.oauthProviders,
|
||||
allowGuestDemoWorkspace: config.allowGuestDemoWorkspace,
|
||||
serverName: config.name,
|
||||
type: config.type,
|
||||
initialized: config.initialized,
|
||||
|
||||
@@ -14,6 +14,7 @@ export interface ServerMetadata {
|
||||
export interface ServerConfig {
|
||||
serverName: string;
|
||||
features: ServerFeature[];
|
||||
allowGuestDemoWorkspace: boolean;
|
||||
oauthProviders: OAuthProviderType[];
|
||||
type: ServerDeploymentType;
|
||||
initialized?: boolean;
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import type { FlagInfo } from './types';
|
||||
|
||||
// const isNotStableBuild = BUILD_CONFIG.appBuildType !== 'stable';
|
||||
const isDesktopEnvironment = BUILD_CONFIG.isElectron;
|
||||
const isCanaryBuild = BUILD_CONFIG.appBuildType === 'canary';
|
||||
const isMobile = BUILD_CONFIG.isMobileEdition;
|
||||
|
||||
@@ -149,15 +148,6 @@ export const AFFINE_FLAGS = {
|
||||
configurable: isCanaryBuild && !isMobile,
|
||||
defaultState: isCanaryBuild,
|
||||
},
|
||||
enable_local_workspace: {
|
||||
category: 'affine',
|
||||
displayName:
|
||||
'com.affine.settings.workspace.experimental-features.enable-local-workspace.name',
|
||||
description:
|
||||
'com.affine.settings.workspace.experimental-features.enable-local-workspace.description',
|
||||
configurable: isCanaryBuild,
|
||||
defaultState: isDesktopEnvironment || isCanaryBuild,
|
||||
},
|
||||
enable_advanced_block_visibility: {
|
||||
category: 'blocksuite',
|
||||
bsFlag: 'enable_advanced_block_visibility',
|
||||
|
||||
@@ -8883,6 +8883,14 @@ export function useAFFiNEI18N(): {
|
||||
["error.INVALID_INDEXER_INPUT"](options: {
|
||||
readonly reason: string;
|
||||
}): string;
|
||||
/**
|
||||
* `Comment not found.`
|
||||
*/
|
||||
["error.COMMENT_NOT_FOUND"](): string;
|
||||
/**
|
||||
* `Reply not found.`
|
||||
*/
|
||||
["error.REPLY_NOT_FOUND"](): string;
|
||||
} { const { t } = useTranslation(); return useMemo(() => createProxy((key) => t.bind(null, key)), [t]); }
|
||||
function createComponent(i18nKey: string) {
|
||||
return (props) => createElement(Trans, { i18nKey, shouldUnescape: true, ...props });
|
||||
|
||||
@@ -903,12 +903,6 @@
|
||||
"com.affine.payment.billing-type-form.description": "يرجى إخبارنا المزيد عن استخدامك، لتحسين AFFiNE.",
|
||||
"com.affine.payment.billing-type-form.go": "اذهب",
|
||||
"com.affine.payment.billing-type-form.title": "أخبرنا عن استخدامك",
|
||||
"com.affine.payment.blob-limit.description.local": "أقصى حجم للملفات التي يمكن تحميلها لمساحات العمل المحلية هو {{quota}}.",
|
||||
"com.affine.payment.blob-limit.description.member": "أقصى حجم للملفات القابلة للتحميل في هذه المساحة المشتركة هو {{quota}}. يمكنك الاتصال بمالك هذه المساحة.",
|
||||
"com.affine.payment.blob-limit.description.owner": "أقصى حجم للملفات التي يمكن تحميلها لمساحة العمل هذه هو {{quota}}. للاستمرار، يمكنك:",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-1": "ترقية الحساب للحصول على حدود أكبر لرفع الملفات",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-2": "ترقية خطة مساحة العمل لزيادة السعة التخزينية لجميع الأعضاء",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-3": "ضغط ملفك وإعادة الرفع",
|
||||
"com.affine.payment.blob-limit.title": "لقد وصلت إلى الحد الأقصى",
|
||||
"com.affine.payment.book-a-demo": "احجز عرضًا تجريبيًا",
|
||||
"com.affine.payment.buy-pro": "اشترِ Pro",
|
||||
@@ -2050,8 +2044,6 @@
|
||||
"com.affine.recording.start": "ابدأ",
|
||||
"com.affine.recording.dismiss": "تجاهل",
|
||||
"com.affine.recording.stop": "توقف",
|
||||
"com.affine.migration-all-docs-notification.title": "يجب ترحيل بيانات كل المستندات لاستخدام ميزاتها بشكل كامل.",
|
||||
"com.affine.migration-all-docs-notification.content": "لقد قمنا بترقية تنسيق البيانات، مما يتطلب ترحيل البيانات المحلية للاستفادة من الميزات. معلومات الإنشاء والتحديث محفوظة الآن محليًا. بدون التحديث، سترى الحالة الفعلية. قم بالترقية في ظروف شبكة أفضل.",
|
||||
"com.affine.migration-all-docs-notification.error": "فشل الترحيل: {{errorMessage}}",
|
||||
"com.affine.migration-all-docs-notification.button": "ترحيل البيانات",
|
||||
"error.INTERNAL_SERVER_ERROR": "حدث خطأ داخلي.",
|
||||
|
||||
@@ -903,12 +903,6 @@
|
||||
"com.affine.payment.billing-type-form.description": "Bitte erzähle uns mehr über deinen Anwendungsfall, um AFFiNE zu verbessern.",
|
||||
"com.affine.payment.billing-type-form.go": "Los",
|
||||
"com.affine.payment.billing-type-form.title": "Teile uns deinen Anwendungsfall mit",
|
||||
"com.affine.payment.blob-limit.description.local": "Die maximale Datei-Uploadgröße für lokale Workspaces beträgt {{quota}}.",
|
||||
"com.affine.payment.blob-limit.description.member": "Die maximale Datei-Uploadgröße für diesen verbundenen Workspace beträgt {{quota}}. Du kannst den Besitzer dieses Workspaces kontaktieren.",
|
||||
"com.affine.payment.blob-limit.description.owner": "Die maximale Datei-Uploadgröße für diesen Workspace beträgt {{quota}}. Um fortzufahren, kannst du:",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-1": "Upgrade dein Konto, um höhere Datei-Upload-Limits zu erhalten",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-2": "Upgrade den Workspace-Plan, um den Speicher für alle Mitglieder zu erhöhen",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-3": "Komprimiere deine Datei und lade sie erneut hoch",
|
||||
"com.affine.payment.blob-limit.title": "Du hast das Limit erreicht",
|
||||
"com.affine.payment.book-a-demo": "Demo buchen",
|
||||
"com.affine.payment.buy-pro": "Pro kaufen",
|
||||
@@ -2053,8 +2047,6 @@
|
||||
"com.affine.recording.start": "Start",
|
||||
"com.affine.recording.dismiss": "Verwerfen",
|
||||
"com.affine.recording.stop": "Stopp",
|
||||
"com.affine.migration-all-docs-notification.title": "Alle Seitendaten müssen migriert werden, um die Funktionen vollständig nutzen zu können.",
|
||||
"com.affine.migration-all-docs-notification.content": "Wir haben das Datenformat aktualisiert, wodurch eine lokale Datenmigration erforderlich ist, um die Funktionen zu nutzen. Informationen zu \"Erstellt von\" und \"Aktualisiert von\" werden jetzt lokal gespeichert. Ohne die Aktualisierung siehst du den aktuellen Status. Führe das Upgrade unter besseren Netzwerkbedingungen durch.",
|
||||
"com.affine.migration-all-docs-notification.error": "Migration fehlgeschlagen: {{errorMessage}}",
|
||||
"com.affine.migration-all-docs-notification.button": "Daten migrieren",
|
||||
"error.INTERNAL_SERVER_ERROR": "Es ist ein interner Fehler aufgetreten.",
|
||||
|
||||
@@ -903,12 +903,6 @@
|
||||
"com.affine.payment.billing-type-form.description": "Παρακαλώ πείτε μας περισσότερα για τη χρήση σας, ώστε να κάνουμε το AFFiNE καλύτερο.",
|
||||
"com.affine.payment.billing-type-form.go": "Πάμε",
|
||||
"com.affine.payment.billing-type-form.title": "Πείτε μας τη χρήση σας",
|
||||
"com.affine.payment.blob-limit.description.local": "Το μέγιστο μέγεθος αρχείου για τοπικούς χώρους εργασίας είναι {{quota}}.",
|
||||
"com.affine.payment.blob-limit.description.member": "Το μέγιστο μέγεθος αρχείου για αυτόν τον συνδεδεμένο χώρο εργασίας είναι {{quota}}. Μπορείτε να επικοινωνήσετε με τον ιδιοκτήτη αυτού του χώρου εργασίας.",
|
||||
"com.affine.payment.blob-limit.description.owner": "Το μέγιστο μέγεθος αρχείου για μεταφόρτωση σε αυτόν τον χώρο εργασίας είναι {{quota}}. Για να συνεχίσετε, μπορείτε να:",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-1": "Αναβαθμίστε το λογαριασμό σας για μεγαλύτερα όρια μεταφόρτωσης αρχείων",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-2": "Αναβαθμίστε το πλάνο του χώρου εργασίας για μεγαλύτερη αποθήκευση για όλα τα μέλη",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-3": "Συμπιέστε το αρχείο σας και ανεβάστε ξανά",
|
||||
"com.affine.payment.blob-limit.title": "Έχετε φτάσει το όριο",
|
||||
"com.affine.payment.book-a-demo": "Κλείστε μια επίδειξη",
|
||||
"com.affine.payment.buy-pro": "Αγοράστε το Pro",
|
||||
@@ -2050,8 +2044,6 @@
|
||||
"com.affine.recording.start": "Έναρξη",
|
||||
"com.affine.recording.dismiss": "Απόρριψη",
|
||||
"com.affine.recording.stop": "Σταματήστε",
|
||||
"com.affine.migration-all-docs-notification.title": "Όλα τα δεδομένα των εγγράφων χρειάζονται μετανάστευση για να αξιοποιήσουν πλήρως τις δυνατότητές τους.",
|
||||
"com.affine.migration-all-docs-notification.content": "Αναβαθμίσαμε τη μορφή δεδομένων, απαιτώντας μετανάστευση τοπικών δεδομένων για τη χρήση δυνατοτήτων. Οι πληροφορίες Δημιουργήθηκε από και Ενημερώθηκε από αποθηκεύονται τώρα τοπικά. Χωρίς την ενημέρωση, θα βλέπετε την πραγματική κατάσταση. Εκτελέστε την αναβάθμιση σε καλύτερες συνθήκες δικτύου.",
|
||||
"com.affine.migration-all-docs-notification.error": "Αποτυχία μετανάστευσης: {{errorMessage}}",
|
||||
"com.affine.migration-all-docs-notification.button": "Μετανάστευση δεδομένων",
|
||||
"error.INTERNAL_SERVER_ERROR": "Παρουσιάστηκε εσωτερικό σφάλμα.",
|
||||
|
||||
@@ -2193,5 +2193,7 @@
|
||||
"error.INVALID_APP_CONFIG_INPUT": "Invalid app config input: {{message}}",
|
||||
"error.SEARCH_PROVIDER_NOT_FOUND": "Search provider not found.",
|
||||
"error.INVALID_SEARCH_PROVIDER_REQUEST": "Invalid request argument to search provider: {{reason}}",
|
||||
"error.INVALID_INDEXER_INPUT": "Invalid indexer input: {{reason}}"
|
||||
"error.INVALID_INDEXER_INPUT": "Invalid indexer input: {{reason}}",
|
||||
"error.COMMENT_NOT_FOUND": "Comment not found.",
|
||||
"error.REPLY_NOT_FOUND": "Reply not found."
|
||||
}
|
||||
|
||||
@@ -903,12 +903,6 @@
|
||||
"com.affine.payment.billing-type-form.description": "Por favor, cuéntenos más sobre su caso de uso para mejorar AFFiNE.",
|
||||
"com.affine.payment.billing-type-form.go": "Ir",
|
||||
"com.affine.payment.billing-type-form.title": "Cuéntenos sobre su caso de uso",
|
||||
"com.affine.payment.blob-limit.description.local": "El tamaño máximo de carga de archivos para espacios de trabajo locales es {{quota}}.",
|
||||
"com.affine.payment.blob-limit.description.member": "El tamaño máximo de carga de archivos para este espacio de trabajo unido es {{quota}}. Puedes contactar al propietario de este espacio de trabajo.",
|
||||
"com.affine.payment.blob-limit.description.owner": "El tamaño máximo de carga de archivos para este espacio de trabajo es {{quota}}. Para continuar, puede:",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-1": "Actualice su cuenta para límites de carga de archivos más grandes",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-2": "Actualice el plan de espacio de trabajo para aumentar el almacenamiento para todos los miembros",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-3": "Comprima su archivo y cárguelo de nuevo",
|
||||
"com.affine.payment.blob-limit.title": "Has alcanzado el límite",
|
||||
"com.affine.payment.book-a-demo": "Reservar una demostración",
|
||||
"com.affine.payment.buy-pro": "Comprar Pro",
|
||||
@@ -2050,8 +2044,6 @@
|
||||
"com.affine.recording.start": "Iniciar",
|
||||
"com.affine.recording.dismiss": "Descartar",
|
||||
"com.affine.recording.stop": "Detener",
|
||||
"com.affine.migration-all-docs-notification.title": "Todos los datos de los documentos deben ser migrados para utilizar completamente sus funciones.",
|
||||
"com.affine.migration-all-docs-notification.content": "Hemos actualizado el formato de datos, requiriendo la migración de datos locales para utilizar las funciones. La información de Creado por y Actualizado por ahora se almacena localmente. Sin la actualización, verá el estado actual. Realice la actualización en mejores condiciones de red.",
|
||||
"com.affine.migration-all-docs-notification.error": "Falló la migración: {{errorMessage}}",
|
||||
"com.affine.migration-all-docs-notification.button": "Migrar datos",
|
||||
"error.INTERNAL_SERVER_ERROR": "Ocurrió un error interno.",
|
||||
|
||||
@@ -903,12 +903,6 @@
|
||||
"com.affine.payment.billing-type-form.description": "لطفاً اطلاعات بیشتری درباره مورد استفاده خود به ما بگویید تا AFFiNE بهتر شود.",
|
||||
"com.affine.payment.billing-type-form.go": "برو",
|
||||
"com.affine.payment.billing-type-form.title": "مورد استفاده خود را به ما بگویید",
|
||||
"com.affine.payment.blob-limit.description.local": "حداکثر اندازه بارگذاری فایل برای فضاهای کاری محلی {{quota}} است.",
|
||||
"com.affine.payment.blob-limit.description.member": "حداکثر اندازه بارگذاری فایل برای این فضای کاری پیوسته {{quota}} است. میتوانید با مالک این فضای کاری تماس بگیرید.",
|
||||
"com.affine.payment.blob-limit.description.owner": "حداکثر اندازه بارگذاری فایل برای این فضای کاری {{quota}} است. برای ادامه، میتوانید:",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-1": "حساب خود را برای محدودیتهای بارگذاری فایل بزرگتر ارتقاء دهید",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-2": "طرح فضای کاری را ارتقاء دهید تا فضای ذخیرهسازی برای همه اعضا افزایش یابد",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-3": "فایل خود را فشرده کنید و دوباره بارگذاری کنید",
|
||||
"com.affine.payment.blob-limit.title": "شما به حد مجاز رسیدهاید",
|
||||
"com.affine.payment.book-a-demo": "رزرو دمو",
|
||||
"com.affine.payment.buy-pro": "خرید Pro",
|
||||
@@ -2050,8 +2044,6 @@
|
||||
"com.affine.recording.start": "شروع",
|
||||
"com.affine.recording.dismiss": "رد کردن",
|
||||
"com.affine.recording.stop": "توقف",
|
||||
"com.affine.migration-all-docs-notification.title": "برای استفاده کامل از ویژگیهای آن، تمام دادههای اسناد باید مهاجرت یابند.",
|
||||
"com.affine.migration-all-docs-notification.content": "ما فرمت داده را ارتقاء دادیم، نیاز به مهاجرت دادههای محلی برای استفاده از ویژگیها میباشد. اطلاعات ایجاد شده و بهروزرسانی شده اکنون محلی ذخیره میشود. بدون بهروزرسانی، وضعیت واقعی را مشاهده خواهید کرد. ارتقاء را در شرایط شبکه بهتر انجام دهید.",
|
||||
"com.affine.migration-all-docs-notification.error": "مهاجرت ناموفق بود: {{errorMessage}}",
|
||||
"com.affine.migration-all-docs-notification.button": "مهاجرت داده",
|
||||
"error.INTERNAL_SERVER_ERROR": "خطای داخلی رخ داد.",
|
||||
|
||||
@@ -903,12 +903,6 @@
|
||||
"com.affine.payment.billing-type-form.description": "Veuillez nous en dire plus sur votre cas d'utilisation, pour améliorer AFFiNE.",
|
||||
"com.affine.payment.billing-type-form.go": "Aller",
|
||||
"com.affine.payment.billing-type-form.title": "Dites-nous votre cas d'utilisation",
|
||||
"com.affine.payment.blob-limit.description.local": "La taille maximale des fichiers pour les espaces de travail locaux est de {{quota}}.",
|
||||
"com.affine.payment.blob-limit.description.member": "La taille maximale des fichiers pour les espaces de travail locaux est de {{quota}}. Vous pouvez contacter la propriétaire de cet espace de travail.",
|
||||
"com.affine.payment.blob-limit.description.owner": "La taille maximale du fichier pour cet espace de travail est de {{quota}}. Pour continuer, vous pouvez :",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-1": "Mettez à niveau votre compte pour obtenir des limites de téléchargement de fichiers plus importantes",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-2": "Améliorez le plan de l'espace de travail pour augmenter le stockage pour tous les membres",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-3": "Compressez votre fichier et téléchargez-le à nouveau",
|
||||
"com.affine.payment.blob-limit.title": "Vous avez atteint la limite",
|
||||
"com.affine.payment.book-a-demo": "Prendre rendez-vous pour une démonstration",
|
||||
"com.affine.payment.buy-pro": "Acheter la version pro",
|
||||
@@ -2050,8 +2044,6 @@
|
||||
"com.affine.recording.start": "Démarrer",
|
||||
"com.affine.recording.dismiss": "Ignorer",
|
||||
"com.affine.recording.stop": "Arrêter",
|
||||
"com.affine.migration-all-docs-notification.title": "Toutes les données documentaires doivent être migrées pour profiter pleinement de ses fonctionnalités.",
|
||||
"com.affine.migration-all-docs-notification.content": "Nous avons mis à jour le format des données, nécessitant une migration des données locales pour utiliser les fonctionnalités. Les informations \"Créé par\" et \"Mis à jour par\" sont désormais stockées localement. Sans mise à jour, vous verrez le statut actuel. Effectuez la mise à niveau dans de meilleures conditions réseau.",
|
||||
"com.affine.migration-all-docs-notification.error": "Échec de la migration : {{errorMessage}}",
|
||||
"com.affine.migration-all-docs-notification.button": "Migrer les données",
|
||||
"error.INTERNAL_SERVER_ERROR": "Une erreur interne est survenue.",
|
||||
|
||||
@@ -903,12 +903,6 @@
|
||||
"com.affine.payment.billing-type-form.description": "Per favore, raccontaci di più sul tuo caso d'uso, per migliorare AFFiNE.",
|
||||
"com.affine.payment.billing-type-form.go": "Vai",
|
||||
"com.affine.payment.billing-type-form.title": "Raccontaci il tuo caso d'uso",
|
||||
"com.affine.payment.blob-limit.description.local": "La dimensione massima del file upload per i workspace locali è di {{quota}}.",
|
||||
"com.affine.payment.blob-limit.description.member": "La dimensione massima del file upload per questo workspace unito è di {{quota}}. Puoi contattare il proprietario di questo spazio di lavoro.",
|
||||
"com.affine.payment.blob-limit.description.owner": "La dimensione massima del file per questo spazio di lavoro è {{quota}}. Per procedere, puoi:",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-1": "Aggiorna il tuo account per limiti di upload file più grandi",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-2": "Aggiorna il piano dello spazio di lavoro per aumentare lo spazio di archiviazione per tutti i membri",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-3": "Comprimi il tuo file e carica di nuovo",
|
||||
"com.affine.payment.blob-limit.title": "Hai raggiunto il limite",
|
||||
"com.affine.payment.book-a-demo": "Prenota una demo",
|
||||
"com.affine.payment.buy-pro": "Acquista Pro",
|
||||
@@ -2050,8 +2044,6 @@
|
||||
"com.affine.recording.start": "Avvia",
|
||||
"com.affine.recording.dismiss": "Ignora",
|
||||
"com.affine.recording.stop": "Ferma",
|
||||
"com.affine.migration-all-docs-notification.title": "Tutti i dati dei documenti devono essere migrati per utilizzarne appieno le funzionalità.",
|
||||
"com.affine.migration-all-docs-notification.content": "Abbiamo aggiornato il formato dei dati, richiedendo la migrazione dei dati locali per utilizzare le funzionalità. Le informazioni \"Creato da\" e \"Aggiornato da\" sono ora memorizzate localmente. Senza l'aggiornamento, vedrai lo stato attuale. Effettua l'aggiornamento in condizioni di rete migliori.",
|
||||
"com.affine.migration-all-docs-notification.error": "Migrazione fallita: {{errorMessage}}",
|
||||
"com.affine.migration-all-docs-notification.button": "Migra i dati",
|
||||
"error.INTERNAL_SERVER_ERROR": "Si è verificato un errore interno.",
|
||||
|
||||
@@ -903,12 +903,6 @@
|
||||
"com.affine.payment.billing-type-form.description": "AFFiNEをより良くするため、ユースケースについて教えてください。",
|
||||
"com.affine.payment.billing-type-form.go": "進む",
|
||||
"com.affine.payment.billing-type-form.title": "ユースケースを教えてください",
|
||||
"com.affine.payment.blob-limit.description.local": "ローカルワークスペースのファイルアップロードの最大サイズは{{quota}}です。",
|
||||
"com.affine.payment.blob-limit.description.member": "この参加済みワークスペースのファイルアップロードの最大サイズは{{quota}} です。ワークスペースの所有者に連絡することができます。",
|
||||
"com.affine.payment.blob-limit.description.owner": "このワークスペースのファイルの最大アップロードサイズは{{quota}}です。続行するには、以下の方法があります:",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-1": "アカウントをアップグレードして、より大きなファイルアップロード制限を取得",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-2": "すべてのメンバーのためにストレージを増やすためにワークスペースプランをアップグレードする",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-3": "ファイルを圧縮して再度アップロードする",
|
||||
"com.affine.payment.blob-limit.title": "制限に達しています",
|
||||
"com.affine.payment.book-a-demo": "デモを予約",
|
||||
"com.affine.payment.buy-pro": "プロを購入",
|
||||
@@ -2050,8 +2044,6 @@
|
||||
"com.affine.recording.start": "開始",
|
||||
"com.affine.recording.dismiss": "閉じる",
|
||||
"com.affine.recording.stop": "ストップ",
|
||||
"com.affine.migration-all-docs-notification.title": "すべての文書データを移行して、その機能を完全に活用します。",
|
||||
"com.affine.migration-all-docs-notification.content": "データフォーマットをアップグレードし、機能を活用するためにデータのローカル移行が必要です。「作成者」と「アップデート」情報が今ローカルに保存されます。更新を行わないと、実際の状態が表示されます。より良いネットワーク条件でアップグレードを行ってください。",
|
||||
"com.affine.migration-all-docs-notification.error": "移行失敗: {{errorMessage}}",
|
||||
"com.affine.migration-all-docs-notification.button": "データを移行する",
|
||||
"error.INTERNAL_SERVER_ERROR": "内部エラーが発生しました。",
|
||||
|
||||
@@ -903,12 +903,6 @@
|
||||
"com.affine.payment.billing-type-form.description": "Proszę, powiedz nam więcej o swoim przypadku użycia, aby uczynić AFFiNE lepszym.",
|
||||
"com.affine.payment.billing-type-form.go": "Idź",
|
||||
"com.affine.payment.billing-type-form.title": "Powiedz nam o swoim przypadku użycia",
|
||||
"com.affine.payment.blob-limit.description.local": "Maksymalny rozmiar przesyłanego pliku dla lokalnych przestrzeni roboczych to {{quota}}.",
|
||||
"com.affine.payment.blob-limit.description.member": "Maksymalny rozmiar przesyłanego pliku dla tej przestrzeni roboczej to {{quota}}. Możesz skontaktować się z właścicielem tej przestrzeni roboczej.",
|
||||
"com.affine.payment.blob-limit.description.owner": "Maksymalny rozmiar przesyłanych plików dla tej przestrzeni roboczej to {{quota}}. Aby kontynuować, możesz:",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-1": "Ulepsz konto, aby uzyskać większe limity przesyłania plików",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-2": "Ulepsz plan przestrzeni roboczej, aby zwiększyć pojemność przechowywania dla wszystkich członków",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-3": "Skompresuj plik i spróbuj przesłać ponownie",
|
||||
"com.affine.payment.blob-limit.title": "Osiągnąłeś limit",
|
||||
"com.affine.payment.book-a-demo": "Zarezerwuj demo",
|
||||
"com.affine.payment.buy-pro": "Kup Pro",
|
||||
@@ -2050,8 +2044,6 @@
|
||||
"com.affine.recording.start": "Rozpocznij",
|
||||
"com.affine.recording.dismiss": "Pomiń",
|
||||
"com.affine.recording.stop": "Stopp",
|
||||
"com.affine.migration-all-docs-notification.title": "Wszystkie dane dokumentów muszą zostać zmigrowane, aby w pełni wykorzystać ich funkcje.",
|
||||
"com.affine.migration-all-docs-notification.content": "Zaktualizowaliśmy format danych, wymagając migracji danych lokalnych, aby wykorzystać funkcje. Informacje \"Utworzone przez\" i \"Zaktualizowane przez\" są teraz przechowywane lokalnie. Bez aktualizacji zobaczysz faktyczny status. Przeprowadź aktualizację w lepszych warunkach sieciowych.",
|
||||
"com.affine.migration-all-docs-notification.error": "Migracja nie powiodła się: {{errorMessage}}",
|
||||
"com.affine.migration-all-docs-notification.button": "Migruj dane",
|
||||
"error.INTERNAL_SERVER_ERROR": "Wystąpił błąd wewnętrzny.",
|
||||
|
||||
@@ -903,12 +903,6 @@
|
||||
"com.affine.payment.billing-type-form.description": "Por favor, conte-nos mais sobre seu caso de uso, para tornar o AFFiNE melhor.",
|
||||
"com.affine.payment.billing-type-form.go": "Ir",
|
||||
"com.affine.payment.billing-type-form.title": "Conte-nos seu caso de uso",
|
||||
"com.affine.payment.blob-limit.description.local": "O tamanho máximo de upload de arquivo para workspaces locais é {{quota}}.",
|
||||
"com.affine.payment.blob-limit.description.member": "O tamanho máximo de upload de arquivo para este workspace compartilhado é {{quota}}. Você pode entrar em contato com o proprietário deste workspace.",
|
||||
"com.affine.payment.blob-limit.description.owner": "O tamanho máximo de upload de arquivo para este espaço de trabalho é {{quota}}. Para prosseguir, você pode:",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-1": "Atualize sua conta para limites de upload de arquivo maiores",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-2": "Atualize o plano do espaço de trabalho para aumentar o armazenamento para todos os membros",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-3": "Comprimir seu arquivo e fazer o upload novamente",
|
||||
"com.affine.payment.blob-limit.title": "Você atingiu o limite",
|
||||
"com.affine.payment.book-a-demo": "Agendar uma demonstração",
|
||||
"com.affine.payment.buy-pro": "Comprar Pro",
|
||||
@@ -2050,8 +2044,6 @@
|
||||
"com.affine.recording.start": "Iniciar",
|
||||
"com.affine.recording.dismiss": "Dispensar",
|
||||
"com.affine.recording.stop": "Parar",
|
||||
"com.affine.migration-all-docs-notification.title": "Todos os dados dos documentos precisam ser migrados para utilizar totalmente seus recursos.",
|
||||
"com.affine.migration-all-docs-notification.content": "Atualizamos o formato de dados, requerendo migração de dados locais para aproveitar os recursos. Criados por e Atualizados por informações agora são armazenadas localmente. Sem a atualização, você verá o estado atual. Realize a atualização em condições de rede melhores.",
|
||||
"com.affine.migration-all-docs-notification.error": "Falha na migração: {{errorMessage}}",
|
||||
"com.affine.migration-all-docs-notification.button": "Migrar dados",
|
||||
"error.INTERNAL_SERVER_ERROR": "Ocorreu um erro interno.",
|
||||
|
||||
@@ -903,12 +903,6 @@
|
||||
"com.affine.payment.billing-type-form.description": "Расскажите нам подробнее о вашем варианте использования, чтобы сделать AFFiNE лучше.",
|
||||
"com.affine.payment.billing-type-form.go": "Вперёд",
|
||||
"com.affine.payment.billing-type-form.title": "Расскажите нам о своём опыте использования",
|
||||
"com.affine.payment.blob-limit.description.local": "Максимальный размер загружаемого файла для локальных рабочих пространств равен {{quota}}.",
|
||||
"com.affine.payment.blob-limit.description.member": "Максимальный размер загружаемого файла для этого присоединённого рабочего пространства равен {{quota}}. Вы можете связаться с владельцем этого рабочего пространства.",
|
||||
"com.affine.payment.blob-limit.description.owner": "Максимальный размер загружаемого файла для этого рабочего пространства равен {{quota}}. Чтобы продолжить, вы можете:",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-1": "Обновите аккаунт для увеличения лимитов на загрузку файлов",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-2": "Обновите план рабочего пространства, чтобы увеличить хранилище для всех членов",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-3": "Сожмите ваш файл и загрузите его снова",
|
||||
"com.affine.payment.blob-limit.title": "Вы достигли предела",
|
||||
"com.affine.payment.book-a-demo": "Заказать демонстрацию",
|
||||
"com.affine.payment.buy-pro": "Купить Pro",
|
||||
@@ -2050,8 +2044,6 @@
|
||||
"com.affine.recording.start": "Начать",
|
||||
"com.affine.recording.dismiss": "Отклонить",
|
||||
"com.affine.recording.stop": "Остановить",
|
||||
"com.affine.migration-all-docs-notification.title": "Для полноценного использования всех функций необходимо перенести все данные документов.",
|
||||
"com.affine.migration-all-docs-notification.content": "Мы обновили формат данных, что требует локальной миграции данных для использования функций. Информация о том, кем создано и когда обновлено, теперь хранится локально. Без обновления вы будете видеть текущий статус. Выполните обновление при лучших условиях сети.",
|
||||
"com.affine.migration-all-docs-notification.error": "Миграция не удалась: {{errorMessage}}",
|
||||
"com.affine.migration-all-docs-notification.button": "Перенос данных",
|
||||
"error.INTERNAL_SERVER_ERROR": "Произошла внутренняя ошибка.",
|
||||
|
||||
@@ -903,12 +903,6 @@
|
||||
"com.affine.payment.billing-type-form.description": "Vänligen berätta mer om ditt användningsfall, för att göra AFFiNE bättre.",
|
||||
"com.affine.payment.billing-type-form.go": "Gå",
|
||||
"com.affine.payment.billing-type-form.title": "Berätta om ditt användningsfall",
|
||||
"com.affine.payment.blob-limit.description.local": "Den maximala filuppladdningsstorleken för lokala arbetsytor är {{quota}}.",
|
||||
"com.affine.payment.blob-limit.description.member": "Den maximala filuppladdningsstorleken för denna anslutna arbetsyta är {{quota}}. Du kan kontakta ägaren av denna arbetsyta.",
|
||||
"com.affine.payment.blob-limit.description.owner": "Den maximala filuppladdningsstorleken för denna arbetsyta är {{quota}}. För att fortsätta kan du:",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-1": "Uppgradera ditt konto för större filuppladdningsgränser",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-2": "Uppgradera arbetsytans plan för att öka lagringsutrymmet för alla medlemmar",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-3": "Komprimera din fil och ladda upp igen",
|
||||
"com.affine.payment.blob-limit.title": "Du har nått gränsen",
|
||||
"com.affine.payment.book-a-demo": "Boka en demo",
|
||||
"com.affine.payment.buy-pro": "Köp Pro",
|
||||
@@ -2050,8 +2044,6 @@
|
||||
"com.affine.recording.start": "Starta",
|
||||
"com.affine.recording.dismiss": "Avfärda",
|
||||
"com.affine.recording.stop": "Stoppa",
|
||||
"com.affine.migration-all-docs-notification.title": "All dokumentsdata måste migreras för att fullt ut utnyttja dess funktioner.",
|
||||
"com.affine.migration-all-docs-notification.content": "Vi uppgraderade dataformatet, vilket kräver lokal datamigrering för att utnyttja funktionerna. Skapat av och Uppdaterad av info lagras nu lokalt. Utan uppdateringen kommer du att se den faktiska statusen. Utför uppgraderingen under bättre nätverksförhållanden.",
|
||||
"com.affine.migration-all-docs-notification.error": "Migration misslyckades: {{errorMessage}}",
|
||||
"com.affine.migration-all-docs-notification.button": "Migrera data",
|
||||
"error.INTERNAL_SERVER_ERROR": "Ett internt fel har inträffat.",
|
||||
|
||||
@@ -903,12 +903,6 @@
|
||||
"com.affine.payment.billing-type-form.description": "Будь ласка, розкажіть нам більше про свій випадок використання, щоб покращити AFFiNE.",
|
||||
"com.affine.payment.billing-type-form.go": "Перейти",
|
||||
"com.affine.payment.billing-type-form.title": "Розкажіть нам про свої випадки використання",
|
||||
"com.affine.payment.blob-limit.description.local": "Максимальний розмір файлу для завантаження в локальні робочі простори – {{quota}}.",
|
||||
"com.affine.payment.blob-limit.description.member": "Максимальний розмір файлу для завантаження в цій спільній робочій області становить {{quota}}. Ви можете зв'язатися з власником цієї робочої області.",
|
||||
"com.affine.payment.blob-limit.description.owner": "Максимальний розмір завантаження файлів для цього робочого простору становить {{quota}}. Щоб продовжити, ви можете:",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-1": "Оновіть свій акаунт, щоб збільшити обмеження на завантаження файлів",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-2": "Оновіть план робочого простору, щоб збільшити сховище для всіх учасників",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-3": "Стисніть ваш файл і завантажте знову",
|
||||
"com.affine.payment.blob-limit.title": "Ви досягли ліміту",
|
||||
"com.affine.payment.book-a-demo": "Забронювати демо",
|
||||
"com.affine.payment.buy-pro": "Купити Pro",
|
||||
@@ -2050,8 +2044,6 @@
|
||||
"com.affine.recording.start": "Почати",
|
||||
"com.affine.recording.dismiss": "Відхилити",
|
||||
"com.affine.recording.stop": "Стоп",
|
||||
"com.affine.migration-all-docs-notification.title": "Для повного використання функцій усі дані документів необхідно мігрувати.",
|
||||
"com.affine.migration-all-docs-notification.content": "Ми оновили формат даних, що вимагає міграції локальних даних для використання функцій. Інформація про \"Створено\" та \"Оновлено\" тепер зберігається локально. Без оновлення ви побачите поточний стан. Виконуйте оновлення за кращих мережевих умов.",
|
||||
"com.affine.migration-all-docs-notification.error": "Міграція провалилася: {{errorMessage}}",
|
||||
"com.affine.migration-all-docs-notification.button": "Міграція даних",
|
||||
"error.INTERNAL_SERVER_ERROR": "Сталася внутрішня помилка.",
|
||||
|
||||
@@ -903,12 +903,6 @@
|
||||
"com.affine.payment.billing-type-form.description": "请告诉我们更多关于您的使用案例,以使 AFFiNE 变得更好。",
|
||||
"com.affine.payment.billing-type-form.go": "前往",
|
||||
"com.affine.payment.billing-type-form.title": "请告诉我们您的使用案例",
|
||||
"com.affine.payment.blob-limit.description.local": "本地工作区的最大文件上传大小为 {{quota}}。",
|
||||
"com.affine.payment.blob-limit.description.member": "您加入的此工作区的最大文件上传大小为 {{quota}}。 您可以联系该工作区的所有者。",
|
||||
"com.affine.payment.blob-limit.description.owner": "此工作区的最大文件上传限制为 {{quota}}。如需继续,您可以:",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-1": "升级您的账户以获取更大的文件上传限制",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-2": "升级工作区计划以为所有成员增加存储空间",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-3": "压缩您的文件并尝试重新上传",
|
||||
"com.affine.payment.blob-limit.title": "您已达到存储极限",
|
||||
"com.affine.payment.book-a-demo": "预订 Demo",
|
||||
"com.affine.payment.buy-pro": "购买专业版",
|
||||
@@ -2050,8 +2044,6 @@
|
||||
"com.affine.recording.start": "开始",
|
||||
"com.affine.recording.dismiss": "忽略",
|
||||
"com.affine.recording.stop": "停止",
|
||||
"com.affine.migration-all-docs-notification.title": "为了充分利用其功能,文档数据需要迁移。",
|
||||
"com.affine.migration-all-docs-notification.content": "我们升级了数据格式,需要迁移本地数据才能使用相关功能。“创建者”和“更新者”信息现在存储在本地。如果不进行更新,您将看到实际状态。请在网络状况较好的情况下进行升级。",
|
||||
"com.affine.migration-all-docs-notification.error": "迁移失败:{{errorMessage}}",
|
||||
"com.affine.migration-all-docs-notification.button": "迁移数据",
|
||||
"error.INTERNAL_SERVER_ERROR": "发生内部错误。",
|
||||
|
||||
@@ -903,12 +903,6 @@
|
||||
"com.affine.payment.billing-type-form.description": "請告訴我們更多關於您的使用案例,以使 AFFiNE 變得更好。",
|
||||
"com.affine.payment.billing-type-form.go": "前往",
|
||||
"com.affine.payment.billing-type-form.title": "請告訴我們您的使用案例",
|
||||
"com.affine.payment.blob-limit.description.local": "本地工作區的最大文件上傳大小為 {{quota}}。",
|
||||
"com.affine.payment.blob-limit.description.member": "您加入的此工作區的最大文件上傳大小為 {{quota}}。 您可以聯繫該工作區的所有者。",
|
||||
"com.affine.payment.blob-limit.description.owner": "此工作区的最大文件上傳大小為 {{quota}}。如需繼續,您可以:",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-1": "升級您的帳戶以獲取更大的文件上傳限制",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-2": "升級工作區計劃以為所有成員增加存儲空間",
|
||||
"com.affine.payment.blob-limit.description.owner.tips-3": "壓縮您的文件並重新上傳",
|
||||
"com.affine.payment.blob-limit.title": "您已達到存儲極限",
|
||||
"com.affine.payment.book-a-demo": "預訂 Demo",
|
||||
"com.affine.payment.buy-pro": "購買專業版",
|
||||
@@ -2050,8 +2044,6 @@
|
||||
"com.affine.recording.start": "開始",
|
||||
"com.affine.recording.dismiss": "忽略",
|
||||
"com.affine.recording.stop": "停止",
|
||||
"com.affine.migration-all-docs-notification.title": "為了充分利用其功能,文檔數據需要遷移。",
|
||||
"com.affine.migration-all-docs-notification.content": "我們升級了數據格式,需要遷移本地數據才能使用相關功能。“創建者”和“更新者”信息現在存儲在本地。如果不進行更新,您將看到實際狀態。請在網絡狀況較好的情況下進行升級。",
|
||||
"com.affine.migration-all-docs-notification.error": "遷移失敗:{{errorMessage}}",
|
||||
"com.affine.migration-all-docs-notification.button": "遷移數據",
|
||||
"error.INTERNAL_SERVER_ERROR": "發生內部錯誤。",
|
||||
|
||||
@@ -73,7 +73,8 @@ update_app_stream_version() {
|
||||
|
||||
update_ios_marketing_version() {
|
||||
local file_path=$1
|
||||
local new_version=$2
|
||||
# Remove everything after the "-"
|
||||
local new_version=$(echo "$2" | sed -E 's/-.*$//')
|
||||
|
||||
# Check if file exists
|
||||
if [ ! -f "$file_path" ]; then
|
||||
|
||||
@@ -12,10 +12,7 @@ test.describe('AIAction/MakeItReal', () => {
|
||||
loggedInPage: page,
|
||||
utils,
|
||||
}) => {
|
||||
const { makeItReal } = await utils.editor.askAIWithText(
|
||||
page,
|
||||
'AFFiNE is a workspace with fully merged docs'
|
||||
);
|
||||
const { makeItReal } = await utils.editor.askAIWithText(page, 'Hello');
|
||||
const { answer, responses } = await makeItReal();
|
||||
await expect(answer.locator('iframe')).toBeVisible({ timeout: 30000 });
|
||||
expect(responses).toEqual(new Set(['insert-below']));
|
||||
@@ -28,10 +25,7 @@ test.describe('AIAction/MakeItReal', () => {
|
||||
const { makeItReal } = await utils.editor.askAIWithEdgeless(
|
||||
page,
|
||||
async () => {
|
||||
await utils.editor.createEdgelessText(
|
||||
page,
|
||||
'AFFiNE is a workspace with fully merged docs'
|
||||
);
|
||||
await utils.editor.createEdgelessText(page, 'Hello');
|
||||
}
|
||||
);
|
||||
const { answer, responses } = await makeItReal();
|
||||
@@ -46,10 +40,7 @@ test.describe('AIAction/MakeItReal', () => {
|
||||
const { makeItReal } = await utils.editor.askAIWithEdgeless(
|
||||
page,
|
||||
async () => {
|
||||
await utils.editor.createEdgelessNote(
|
||||
page,
|
||||
'AFFiNE is a workspace with fully merged docs'
|
||||
);
|
||||
await utils.editor.createEdgelessNote(page, 'Hello');
|
||||
}
|
||||
);
|
||||
const { answer, responses } = await makeItReal();
|
||||
@@ -77,10 +68,7 @@ test.describe('AIAction/MakeItReal', () => {
|
||||
loggedInPage: page,
|
||||
utils,
|
||||
}) => {
|
||||
const { makeItReal } = await utils.editor.askAIWithText(
|
||||
page,
|
||||
'AFFiNE is a workspace with fully merged docs'
|
||||
);
|
||||
const { makeItReal } = await utils.editor.askAIWithText(page, 'Hello');
|
||||
const { answer } = await makeItReal();
|
||||
const insert = answer.getByTestId('answer-insert-below');
|
||||
await insert.click();
|
||||
|
||||
@@ -165,9 +165,11 @@ export class ChatPanelUtils {
|
||||
const actionList = await message.getByTestId('chat-action-list');
|
||||
return {
|
||||
message,
|
||||
content: await message
|
||||
.locator('chat-content-rich-text editor-host')
|
||||
.innerText(),
|
||||
content: (
|
||||
await message
|
||||
.locator('chat-content-rich-text editor-host')
|
||||
.allInnerTexts()
|
||||
).join(' '),
|
||||
actions: {
|
||||
copy: async () => actions.getByTestId('action-copy-button').click(),
|
||||
retry: async () => actions.getByTestId('action-retry-button').click(),
|
||||
|
||||
13
yarn.lock
13
yarn.lock
@@ -911,6 +911,7 @@ __metadata:
|
||||
"@ai-sdk/google": "npm:^1.2.18"
|
||||
"@ai-sdk/google-vertex": "npm:^2.2.23"
|
||||
"@ai-sdk/openai": "npm:^1.3.22"
|
||||
"@ai-sdk/openai-compatible": "npm:^0.2.14"
|
||||
"@ai-sdk/perplexity": "npm:^1.1.9"
|
||||
"@apollo/server": "npm:^4.11.3"
|
||||
"@aws-sdk/client-s3": "npm:^3.779.0"
|
||||
@@ -1113,6 +1114,18 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/openai-compatible@npm:^0.2.14":
|
||||
version: 0.2.14
|
||||
resolution: "@ai-sdk/openai-compatible@npm:0.2.14"
|
||||
dependencies:
|
||||
"@ai-sdk/provider": "npm:1.1.3"
|
||||
"@ai-sdk/provider-utils": "npm:2.2.8"
|
||||
peerDependencies:
|
||||
zod: ^3.0.0
|
||||
checksum: 10/a2b9fbe6c9a0a9edbe6c5d91fbb06708088c881060cff7018ce0bb7ca52d8f63a20dd334389099d9ea256482f2c22f9f1ff6be0de836d3af98a27274578f0be6
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/openai@npm:^1.3.22":
|
||||
version: 1.3.22
|
||||
resolution: "@ai-sdk/openai@npm:1.3.22"
|
||||
|
||||
Reference in New Issue
Block a user