refactor: remove legacy cloud (#2987)

This commit is contained in:
Alex Yang
2023-07-03 22:29:37 +08:00
committed by GitHub
parent 3d0a907b49
commit a5d2fafad6
87 changed files with 148 additions and 6383 deletions

View File

@@ -3,16 +3,11 @@
"private": true,
"exports": {
"./atom": "./src/atom.ts",
"./blob": "./src/blob/index.ts",
"./utils": "./src/utils.ts",
"./type": "./src/type.ts",
"./migration": "./src/migration/index.ts",
"./local/crud": "./src/local/crud.ts",
"./providers": "./src/providers/index.ts",
"./affine/*": "./src/affine/*.ts",
"./affine/api": "./src/affine/api/index.ts",
"./affine/keck": "./src/affine/keck/index.ts",
"./affine/shared": "./src/affine/shared.ts"
"./providers": "./src/providers/index.ts"
},
"peerDependencies": {
"@blocksuite/blocks": "*",
@@ -26,7 +21,6 @@
"@toeverything/plugin-infra": "workspace:*",
"@toeverything/y-indexeddb": "workspace:*",
"async-call-rpc": "^6.3.1",
"firebase": "^9.23.0",
"jotai": "^2.2.1",
"js-base64": "^3.7.5",
"ky": "^0.33.3",

View File

@@ -1,313 +0,0 @@
/**
* @deprecated Remove this file after we migrate to the new cloud.
*/
import { MessageCode, Messages } from '@affine/env/constant';
import type {
AcceptInvitingParams,
DeleteWorkspaceParams,
GetUserByEmailParams,
GetWorkspaceDetailParams,
InviteMemberParams,
LeaveWorkspaceParams,
Member,
Permission,
RemoveMemberParams,
UpdateWorkspaceParams,
UsageResponse,
User,
Workspace,
WorkspaceDetail,
} from '@affine/env/workspace/legacy-cloud';
import { checkLoginStorage } from '../login';
export class RequestError extends Error {
public readonly code: (typeof MessageCode)[keyof typeof MessageCode];
constructor(
code: (typeof MessageCode)[keyof typeof MessageCode],
cause: unknown | null = null
) {
super(Messages[code].message);
sendMessage(code);
this.code = code;
this.name = 'RequestError';
this.cause = cause;
}
}
function sendMessage(code: (typeof MessageCode)[keyof typeof MessageCode]) {
document.dispatchEvent(
new CustomEvent('affine-error', {
detail: {
code,
},
})
);
}
export function createUserApis(prefixUrl = '/') {
return {
getUsage: async (): Promise<UsageResponse> => {
const auth = await checkLoginStorage(prefixUrl);
return fetch(prefixUrl + 'api/resource/usage', {
method: 'GET',
headers: {
Authorization: auth.token,
},
}).then(r => r.json());
},
getUserByEmail: async (
params: GetUserByEmailParams
): Promise<User[] | null> => {
const auth = await checkLoginStorage(prefixUrl);
const target = new URL(prefixUrl + 'api/user', window.location.origin);
target.searchParams.append('email', params.email);
target.searchParams.append('workspace_id', params.workspace_id);
return fetch(target, {
method: 'GET',
headers: {
Authorization: auth.token,
},
}).then(r => r.json());
},
} as const;
}
export function createWorkspaceApis(prefixUrl = '/') {
return {
getWorkspaces: async (): Promise<Workspace[]> => {
const auth = await checkLoginStorage(prefixUrl);
return fetch(prefixUrl + 'api/workspace', {
method: 'GET',
headers: {
Authorization: auth.token,
'Cache-Control': 'no-cache',
},
})
.then(r => r.json())
.catch(e => {
throw new RequestError(MessageCode.loadListFailed, e);
});
},
getWorkspaceDetail: async (
params: GetWorkspaceDetailParams
): Promise<WorkspaceDetail | null> => {
const auth = await checkLoginStorage(prefixUrl);
return fetch(prefixUrl + `api/workspace/${params.id}`, {
method: 'GET',
headers: {
Authorization: auth.token,
},
})
.then(r => r.json())
.catch(e => {
throw new RequestError(MessageCode.loadListFailed, e);
});
},
getWorkspaceMembers: async (
params: GetWorkspaceDetailParams
): Promise<Member[]> => {
const auth = await checkLoginStorage(prefixUrl);
return fetch(prefixUrl + `api/workspace/${params.id}/permission`, {
method: 'GET',
headers: {
Authorization: auth.token,
},
})
.then(r => r.json())
.catch(e => {
throw new RequestError(MessageCode.getMembersFailed, e);
});
},
createWorkspace: async (
encodedYDoc: ArrayBuffer
): Promise<{ id: string }> => {
const auth = await checkLoginStorage();
return fetch(prefixUrl + 'api/workspace', {
method: 'POST',
body: encodedYDoc,
headers: {
'Content-Type': 'application/octet-stream',
Authorization: auth.token,
},
})
.then(r => r.json())
.catch(e => {
throw new RequestError(MessageCode.createWorkspaceFailed, e);
});
},
updateWorkspace: async (
params: UpdateWorkspaceParams
): Promise<{ public: boolean | null }> => {
const auth = await checkLoginStorage(prefixUrl);
return fetch(prefixUrl + `api/workspace/${params.id}`, {
method: 'POST',
body: JSON.stringify({
public: params.public,
}),
headers: {
'Content-Type': 'application/json',
Authorization: auth.token,
},
})
.then(r => r.json())
.catch(e => {
throw new RequestError(MessageCode.updateWorkspaceFailed, e);
});
},
deleteWorkspace: async (
params: DeleteWorkspaceParams
): Promise<boolean> => {
const auth = await checkLoginStorage(prefixUrl);
return fetch(prefixUrl + `api/workspace/${params.id}`, {
method: 'DELETE',
headers: {
Authorization: auth.token,
},
})
.then(r => r.ok)
.catch(e => {
throw new RequestError(MessageCode.deleteWorkspaceFailed, e);
});
},
/**
* Notice: Only support normal(contrast to private) workspace.
*/
inviteMember: async (params: InviteMemberParams): Promise<void> => {
const auth = await checkLoginStorage(prefixUrl);
return fetch(prefixUrl + `api/workspace/${params.id}/permission`, {
method: 'POST',
body: JSON.stringify({
email: params.email,
}),
headers: {
'Content-Type': 'application/json',
Authorization: auth.token,
},
})
.then(r => r.json())
.catch(e => {
throw new RequestError(MessageCode.inviteMemberFailed, e);
});
},
removeMember: async (params: RemoveMemberParams): Promise<void> => {
const auth = await checkLoginStorage(prefixUrl);
return fetch(prefixUrl + `api/permission/${params.permissionId}`, {
method: 'DELETE',
headers: {
Authorization: auth.token,
},
})
.then(r => r.json())
.catch(e => {
throw new RequestError(MessageCode.removeMemberFailed, e);
});
},
acceptInviting: async (
params: AcceptInvitingParams
): Promise<Permission> => {
return fetch(prefixUrl + `api/invitation/${params.invitingCode}`, {
method: 'POST',
})
.then(r => r.json())
.catch(e => {
throw new RequestError(MessageCode.acceptInvitingFailed, e);
});
},
uploadBlob: async (
workspaceId: string,
arrayBuffer: ArrayBuffer,
type: string
): Promise<string> => {
const auth = await checkLoginStorage(prefixUrl);
const mb = arrayBuffer.byteLength / 1048576;
if (mb > 10) {
throw new RequestError(MessageCode.blobTooLarge);
}
return fetch(prefixUrl + `api/workspace/${workspaceId}/blob`, {
method: 'PUT',
body: arrayBuffer,
headers: {
'Content-Type': type,
Authorization: auth.token,
},
}).then(r => r.text());
},
getBlob: async (
workspaceId: string,
blobId: string
): Promise<ArrayBuffer> => {
const auth = await checkLoginStorage(prefixUrl);
return fetch(prefixUrl + `api/workspace/${workspaceId}/blob/${blobId}`, {
method: 'GET',
headers: {
Authorization: auth.token,
},
})
.then(r => r.arrayBuffer())
.catch(e => {
throw new RequestError(MessageCode.getBlobFailed, e);
});
},
leaveWorkspace: async ({ id }: LeaveWorkspaceParams) => {
const auth = await checkLoginStorage(prefixUrl);
return fetch(prefixUrl + `api/workspace/${id}/permission`, {
method: 'DELETE',
headers: {
Authorization: auth.token,
},
})
.then(r => r.json())
.catch(e => {
throw new RequestError(MessageCode.leaveWorkspaceFailed, e);
});
},
downloadPublicWorkspacePage: async (
workspaceId: string,
pageId: string
): Promise<ArrayBuffer> => {
return fetch(
prefixUrl + `api/public/workspace/${workspaceId}/${pageId}`,
{
method: 'GET',
}
).then(r =>
r.ok
? r.arrayBuffer()
: Promise.reject(new RequestError(MessageCode.noPermission))
);
},
downloadWorkspace: async (
workspaceId: string,
published = false
): Promise<ArrayBuffer> => {
if (published) {
return fetch(prefixUrl + `api/public/workspace/${workspaceId}`, {
method: 'GET',
}).then(r => r.arrayBuffer());
} else {
const auth = await checkLoginStorage(prefixUrl);
return fetch(prefixUrl + `api/workspace/${workspaceId}/doc`, {
method: 'GET',
headers: {
Authorization: auth.token,
},
})
.then(r =>
!r.ok
? Promise.reject(new RequestError(MessageCode.noPermission))
: r
)
.then(r => r.arrayBuffer())
.catch(e => {
if (e instanceof RequestError) {
throw e;
}
throw new RequestError(MessageCode.downloadWorkspaceFailed, e);
});
}
},
} as const;
}

View File

@@ -1,10 +0,0 @@
/**
* @deprecated Remove this file after we migrate to the new cloud.
*/
export function createStatusApis(prefixUrl = '/') {
return {
healthz: async (): Promise<boolean> => {
return fetch(`${prefixUrl}api/healthz`).then(r => r.status === 204);
},
} as const;
}

View File

@@ -1,11 +0,0 @@
/**
* @deprecated Remove this file after we migrate to the new cloud.
*/
import { atomWithStorage } from 'jotai/utils';
import type { AccessTokenMessage } from '../affine/login';
export const currentAffineUserAtom = atomWithStorage<AccessTokenMessage | null>(
'affine-user-atom',
null
);

View File

@@ -1,99 +0,0 @@
/**
* @deprecated Remove this file after we migrate to the new cloud.
*/
import { DebugLogger } from '@affine/debug';
import { WorkspaceFlavour } from '@affine/env/workspace';
import { assertExists } from '@blocksuite/global/utils';
import * as url from 'lib0/url';
import * as websocket from 'lib0/websocket';
import { getLoginStorage, isExpired, parseIdToken } from '../affine/login';
import { cleanupWorkspace } from '../utils';
const RECONNECT_INTERVAL_TIME = 500;
const MAX_RECONNECT_TIMES = 50;
export class WebsocketClient {
public readonly baseServerUrl: string;
private _client: websocket.WebsocketClient | null = null;
public shouldReconnect = false;
private _retryTimes = 0;
private _logger = new DebugLogger('affine:channel');
private _callback: ((message: any) => void) | null = null;
constructor(serverUrl: string) {
while (serverUrl.endsWith('/')) {
serverUrl = serverUrl.slice(0, serverUrl.length - 1);
}
this.baseServerUrl = serverUrl;
}
public connect(callback: (message: any) => void) {
const loginResponse = getLoginStorage();
if (!loginResponse || isExpired(parseIdToken(loginResponse.token))) {
cleanupWorkspace(WorkspaceFlavour.AFFINE);
return;
}
assertExists(loginResponse, 'loginResponse is null');
const encodedParams = url.encodeQueryParams({
token: loginResponse.token,
});
const serverUrl =
this.baseServerUrl +
(encodedParams.length === 0 ? '' : '?' + encodedParams);
this._client = new websocket.WebsocketClient(serverUrl);
this._callback = callback;
this._setupChannel();
this._client.on('message', this._callback);
}
public disconnect() {
assertExists(this._client, 'client is null');
if (this._callback) {
this._client.off('message', this._callback);
}
this._client.disconnect();
this._client.destroy();
this._client = null;
}
private _setupChannel() {
assertExists(this._client, 'client is null');
const client = this._client;
client.on('connect', () => {
this._logger.debug('Affine channel connected');
this.shouldReconnect = true;
this._retryTimes = 0;
});
client.on('disconnect', ({ error }: { error: Error }) => {
if (error) {
const loginResponse = getLoginStorage();
const isLogin = loginResponse
? isExpired(parseIdToken(loginResponse.token))
: false;
// Try to re-connect if connect error has occurred
if (this.shouldReconnect && isLogin && !client.connected) {
try {
setTimeout(() => {
if (this._retryTimes <= MAX_RECONNECT_TIMES) {
assertExists(this._callback, 'callback is null');
this.connect(this._callback);
this._logger.info(
`try reconnect channel ${++this._retryTimes} times`
);
} else {
this._logger.error(
'reconnect failed, max reconnect times reached'
);
}
}, RECONNECT_INTERVAL_TIME);
} catch (e) {
this._logger.error('reconnect failed', e);
}
}
}
});
}
}

View File

@@ -1,5 +0,0 @@
# Keck
> This directory will be removed in the future once we publish the jwt library to npm.
The latest Keck code of AFFiNE is at https://github.com/toeverything/OctoBase/tree/master/libs/jwt

View File

@@ -1,59 +0,0 @@
/**
* @deprecated Remove this file after we migrate to the new cloud.
*/
import * as decoding from 'lib0/decoding';
import * as encoding from 'lib0/encoding';
import * as awarenessProtocol from 'y-protocols/awareness';
import * as syncProtocol from 'y-protocols/sync';
import type { KeckProvider } from '.';
export enum Message {
sync = 0,
awareness = 1,
queryAwareness = 3,
}
export type MessageCallback = (
encoder: encoding.Encoder,
decoder: decoding.Decoder,
provider: KeckProvider,
emitSynced: boolean,
messageType: number
) => void;
export const handler: Record<Message, MessageCallback> = {
[Message.sync]: (encoder, decoder, provider, emitSynced) => {
encoding.writeVarUint(encoder, Message.sync);
const syncMessageType = syncProtocol.readSyncMessage(
decoder,
encoder,
provider.doc,
provider
);
if (
emitSynced &&
syncMessageType === syncProtocol.messageYjsSyncStep2 &&
!provider.synced
) {
provider.synced = true;
}
},
[Message.awareness]: (_encoder, decoder, provider) => {
awarenessProtocol.applyAwarenessUpdate(
provider.awareness,
decoding.readVarUint8Array(decoder),
provider
);
},
[Message.queryAwareness]: (encoder, _decoder, provider) => {
encoding.writeVarUint(encoder, Message.awareness);
encoding.writeVarUint8Array(
encoder,
awarenessProtocol.encodeAwarenessUpdate(
provider.awareness,
Array.from(provider.awareness.getStates().keys())
)
);
},
};

View File

@@ -1,290 +0,0 @@
/**
* @deprecated Remove this file after we migrate to the new cloud.
*/
import { isBrowser } from '@affine/env/constant';
import * as encoding from 'lib0/encoding';
import * as math from 'lib0/math';
import { Observable } from 'lib0/observable';
import * as url from 'lib0/url';
import * as awarenessProtocol from 'y-protocols/awareness';
import * as syncProtocol from 'y-protocols/sync';
import type * as Y from 'yjs';
import { handler, Message } from './handler';
import { readMessage } from './processor';
// @todo - this should depend on awareness.outdatedTime
const messageReconnectTimeout = 30000;
const setupWS = (provider: KeckProvider) => {
if (provider.shouldConnect && provider.ws === null) {
const websocket = new WebSocket(provider.url);
websocket.binaryType = 'arraybuffer';
provider.ws = websocket;
provider.wsconnecting = true;
provider.wsconnected = false;
provider.synced = false;
websocket.onmessage = (event: any) => {
provider.wsLastMessageReceived = Date.now();
const encoder = readMessage(provider, new Uint8Array(event.data), true);
if (encoding.length(encoder) > 1) {
websocket.send(encoding.toUint8Array(encoder));
}
};
websocket.onerror = (event: any) => {
provider.emit('connection-error', [event, provider]);
};
websocket.onclose = (event: any) => {
provider.emit('connection-close', [event, provider]);
provider.ws = null;
provider.wsconnecting = false;
if (provider.wsconnected) {
provider.wsconnected = false;
provider.synced = false;
// update awareness (all users except local left)
awarenessProtocol.removeAwarenessStates(
provider.awareness,
Array.from(provider.awareness.getStates().keys()).filter(
client => client !== provider.doc.clientID
),
provider
);
provider.emit('status', [
{
status: 'disconnected',
},
]);
} else {
provider.wsUnsuccessfulReconnects++;
}
// Start with no reconnect timeout and increase timeout by
// using exponential backoff starting with 100ms
setTimeout(
setupWS,
math.min(
math.pow(2, provider.wsUnsuccessfulReconnects) * 100,
provider.maxBackOffTime
) + provider.extraToleranceTime,
provider
);
};
websocket.onopen = () => {
provider.wsLastMessageReceived = Date.now();
provider.wsconnecting = false;
provider.wsconnected = true;
provider.wsUnsuccessfulReconnects = 0;
provider.emit('status', [
{
status: 'connected',
},
]);
// always send sync step 1 when connected
const encoder = encoding.createEncoder();
encoding.writeVarUint(encoder, Message.sync);
syncProtocol.writeSyncStep1(encoder, provider.doc);
websocket.send(encoding.toUint8Array(encoder));
// broadcast local awareness state
if (provider.awareness.getLocalState() !== null) {
const encoderAwarenessState = encoding.createEncoder();
encoding.writeVarUint(encoderAwarenessState, Message.awareness);
encoding.writeVarUint8Array(
encoderAwarenessState,
awarenessProtocol.encodeAwarenessUpdate(provider.awareness, [
provider.doc.clientID,
])
);
websocket.send(encoding.toUint8Array(encoderAwarenessState));
}
};
provider.emit('status', [
{
status: 'connecting',
},
]);
}
};
const broadcastMessage = (provider: KeckProvider, buf: ArrayBuffer) => {
const ws = provider.ws;
if (provider.wsconnected && ws && ws.readyState === ws.OPEN) {
ws.send(buf);
}
};
export class KeckProvider extends Observable<string> {
doc: Y.Doc;
awareness: awarenessProtocol.Awareness;
url: any;
messageHandlers: typeof handler;
shouldConnect: boolean;
ws: any;
wsconnecting: boolean;
wsconnected: boolean;
wsLastMessageReceived: number;
wsUnsuccessfulReconnects: any;
maxBackOffTime: number;
roomName: string;
_synced: boolean;
_resyncInterval: any;
extraToleranceTime: number;
_updateHandler: (update: Uint8Array, origin: any) => void;
_awarenessUpdateHandler: ({ added, updated, removed }: any) => void;
_unloadHandler: () => void;
_checkInterval: NodeJS.Timer;
constructor(
serverUrl: string,
roomName: string,
doc: Y.Doc,
{
connect = true,
awareness = new awarenessProtocol.Awareness(doc),
params = {},
resyncInterval = -1,
maxBackOffTime = 2500,
extraToleranceTime = 0,
} = {}
) {
super();
// ensure that url is always ends with /
while (serverUrl[serverUrl.length - 1] === '/') {
serverUrl = serverUrl.slice(0, serverUrl.length - 1);
}
const encodedParams = url.encodeQueryParams(params);
this.maxBackOffTime = maxBackOffTime;
this.extraToleranceTime = extraToleranceTime;
this.url =
serverUrl +
'/' +
roomName +
(encodedParams.length === 0 ? '' : '?' + encodedParams);
this.roomName = roomName;
this.doc = doc;
this.awareness = awareness;
this.wsconnected = false;
this.wsconnecting = false;
this.wsUnsuccessfulReconnects = 0;
this.messageHandlers = handler;
/**
* @type {boolean}
*/
this._synced = false;
/**
* @type {WebSocket?}
*/
this.ws = null;
this.wsLastMessageReceived = 0;
/**
* Whether to connect to other peers or not
* @type {boolean}
*/
this.shouldConnect = connect;
this._resyncInterval = 0;
if (resyncInterval > 0) {
this._resyncInterval = /** @type {any} */ setInterval(() => {
if (this.ws && this.ws.readyState === WebSocket.OPEN) {
// resend sync step 1
const encoder = encoding.createEncoder();
encoding.writeVarUint(encoder, Message.sync);
syncProtocol.writeSyncStep1(encoder, doc);
this.ws.send(encoding.toUint8Array(encoder));
}
}, resyncInterval);
}
this._updateHandler = (update: Uint8Array, origin: any) => {
if (origin !== this) {
const encoder = encoding.createEncoder();
encoding.writeVarUint(encoder, Message.sync);
syncProtocol.writeUpdate(encoder, update);
broadcastMessage(this, encoding.toUint8Array(encoder));
}
};
this.doc.on('update', this._updateHandler);
this._awarenessUpdateHandler = ({ added, updated, removed }: any) => {
const changedClients = added.concat(updated).concat(removed);
const encoder = encoding.createEncoder();
encoding.writeVarUint(encoder, Message.awareness);
encoding.writeVarUint8Array(
encoder,
awarenessProtocol.encodeAwarenessUpdate(awareness, changedClients)
);
broadcastMessage(this, encoding.toUint8Array(encoder));
};
this._unloadHandler = () => {
awarenessProtocol.removeAwarenessStates(
this.awareness,
[doc.clientID],
'window unload'
);
};
if (isBrowser) {
window.addEventListener('unload', this._unloadHandler);
} else if (typeof process !== 'undefined') {
process.on('exit', this._unloadHandler);
}
awareness.on('update', this._awarenessUpdateHandler);
this._checkInterval = /** @type {any} */ setInterval(() => {
if (
this.wsconnected &&
messageReconnectTimeout < Date.now() - this.wsLastMessageReceived
) {
// no message received in a long time - not even your own awareness
// updates (which are updated every 15 seconds)
/** @type {WebSocket} */ this.ws.close();
}
}, messageReconnectTimeout / 10);
if (connect) {
this.connect();
}
}
/**
* @type {boolean}
*/
get synced() {
return this._synced;
}
set synced(state) {
if (this._synced !== state) {
this._synced = state;
this.emit('synced', [state]);
this.emit('sync', [state]);
}
}
override destroy() {
if (this._resyncInterval !== 0) {
clearInterval(this._resyncInterval);
}
clearInterval(this._checkInterval);
this.disconnect();
if (isBrowser) {
window.removeEventListener('unload', this._unloadHandler);
} else if (typeof process !== 'undefined') {
process.off('exit', this._unloadHandler);
}
this.awareness.off('update', this._awarenessUpdateHandler);
this.doc.off('update', this._updateHandler);
super.destroy();
}
disconnect() {
this.shouldConnect = false;
if (this.ws !== null) {
this.ws.close();
}
}
connect() {
this.shouldConnect = true;
if (!this.wsconnected && this.ws === null) {
setupWS(this);
}
}
}

View File

@@ -1,25 +0,0 @@
/**
* @deprecated Remove this file after we migrate to the new cloud.
*/
import * as decoding from 'lib0/decoding';
import * as encoding from 'lib0/encoding';
import type { KeckProvider } from '.';
import type { Message } from './handler.js';
export const readMessage = (
provider: KeckProvider,
buf: Uint8Array,
emitSynced: boolean
): encoding.Encoder => {
const decoder = decoding.createDecoder(buf);
const encoder = encoding.createEncoder();
const messageType = decoding.readVarUint(decoder) as Message;
const messageHandler = provider.messageHandlers[messageType];
if (messageHandler) {
messageHandler(encoder, decoder, provider, emitSynced, messageType);
} else {
console.error('Unable to compute message');
}
return encoder;
};

View File

@@ -1,238 +0,0 @@
/**
* @deprecated Remove this file after we migrate to the new cloud.
*/
import { DebugLogger } from '@affine/debug';
import { assertExists } from '@blocksuite/global/utils';
import { Slot } from '@blocksuite/store';
import { initializeApp } from 'firebase/app';
import type { AuthProvider } from 'firebase/auth';
import {
type Auth as FirebaseAuth,
connectAuthEmulator,
getAuth as getFirebaseAuth,
GithubAuthProvider,
GoogleAuthProvider,
signInWithCredential,
signInWithPopup,
} from 'firebase/auth';
import { decode } from 'js-base64';
import { z } from 'zod';
// Connect emulators based on env vars
const envConnectEmulators = process.env.REACT_APP_FIREBASE_EMULATORS === 'true';
export type AccessTokenMessage = {
created_at: number;
exp: number;
email: string;
id: string;
name: string;
avatar_url: string;
};
export type LoginParams = {
type: 'Google' | 'Refresh';
token: string;
};
export const loginResponseSchema = z.object({
token: z.string(),
refresh: z.string(),
});
export type LoginResponse = z.infer<typeof loginResponseSchema>;
const logger = new DebugLogger('token');
export const STORAGE_KEY = 'affine-login-v2';
export function parseIdToken(token: string): AccessTokenMessage {
return JSON.parse(decode(token.split('.')[1]));
}
export const isExpired = (
token: AccessTokenMessage,
// earlier than `before`, consider it expired
before = 60 // 1 minute
): boolean => {
const now = Math.floor(Date.now() / 1000);
return token.exp < now - before;
};
export const setLoginStorage = (login: LoginResponse) => {
loginResponseSchema.parse(login);
localStorage.setItem(
STORAGE_KEY,
JSON.stringify({
token: login.token,
refresh: login.refresh,
})
);
};
const signInWithElectron = async (firebaseAuth: FirebaseAuth) => {
if (window.apis) {
const { url, requestInit } = await window.apis.ui.getGoogleOauthCode();
const { id_token } = await fetch(url, requestInit).then(res => res.json());
const credential = GoogleAuthProvider.credential(id_token);
const user = await signInWithCredential(firebaseAuth, credential);
return await user.user.getIdToken();
}
return void 0;
};
export const clearLoginStorage = () => {
localStorage.removeItem(STORAGE_KEY);
};
export const getLoginStorage = (): LoginResponse | null => {
const login = localStorage.getItem(STORAGE_KEY);
if (login) {
try {
return JSON.parse(login);
} catch (error) {
logger.error('Failed to parse login', error);
}
}
return null;
};
export const storageChangeSlot = new Slot();
export const checkLoginStorage = async (
prefixUrl = '/'
): Promise<LoginResponse> => {
const storage = getLoginStorage();
assertExists(storage, 'Login token is not set');
if (isExpired(parseIdToken(storage.token), 0)) {
logger.debug('refresh token needed');
const response: LoginResponse = await fetch(prefixUrl + 'api/user/token', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
type: 'Refresh',
token: storage.refresh,
}),
}).then(r => r.json());
setLoginStorage(response);
logger.debug('refresh token emit');
storageChangeSlot.emit();
}
return getLoginStorage() as LoginResponse;
};
export enum SignMethod {
Google = 'Google',
GitHub = 'GitHub',
// Twitter = 'Twitter',
}
declare global {
// eslint-disable-next-line no-var
var firebaseAuthEmulatorStarted: boolean | undefined;
}
export function createAffineAuth(prefix = '/') {
let _firebaseAuth: FirebaseAuth | null = null;
const getAuth = (): FirebaseAuth | null => {
try {
if (!_firebaseAuth) {
const app = initializeApp({
apiKey: process.env.NEXT_PUBLIC_FIREBASE_API_KEY,
authDomain: process.env.NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN,
projectId: process.env.NEXT_PUBLIC_FIREBASE_PROJECT_ID,
storageBucket: process.env.NEXT_PUBLIC_FIREBASE_STORAGE_BUCKET,
messagingSenderId:
process.env.NEXT_PUBLIC_FIREBASE_MESSAGING_SENDER_ID,
appId: process.env.NEXT_PUBLIC_FIREBASE_APP_ID,
measurementId: process.env.NEXT_PUBLIC_FIREBASE_MEASUREMENT_ID,
});
_firebaseAuth = getFirebaseAuth(app);
}
if (envConnectEmulators && !globalThis.firebaseAuthEmulatorStarted) {
connectAuthEmulator(_firebaseAuth, 'http://localhost:9099', {
disableWarnings: true,
});
globalThis.firebaseAuthEmulatorStarted = true;
}
return _firebaseAuth;
} catch (error) {
logger.error('Failed to initialize firebase', error);
return null;
}
};
return {
generateToken: async (
method: SignMethod
): Promise<LoginResponse | null> => {
const auth = getAuth();
if (!auth) {
throw new Error('Failed to initialize firebase');
}
let provider: AuthProvider;
switch (method) {
case SignMethod.Google: {
const googleProvider = new GoogleAuthProvider();
// make sure the user has a chance to select an account
// https://developers.google.com/identity/openid-connect/openid-connect#prompt
googleProvider.setCustomParameters({
prompt: 'select_account',
});
provider = googleProvider;
break;
}
case SignMethod.GitHub:
provider = new GithubAuthProvider();
break;
default:
throw new Error('Unsupported sign method');
}
try {
let idToken: string | undefined;
if (environment.isDesktop) {
idToken = await signInWithElectron(auth);
} else {
const response = await signInWithPopup(auth, provider);
idToken = await response.user.getIdToken();
}
logger.debug('idToken', idToken);
return fetch(prefix + 'api/user/token', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
type: 'Google',
token: idToken,
}),
}).then(r => r.json()) as Promise<LoginResponse>;
} catch (error) {
if (
error instanceof Error &&
'code' in error &&
error.code === 'auth/popup-closed-by-user'
) {
return null;
}
logger.error('Failed to sign in', error);
}
return null;
},
refreshToken: async (
loginResponse: LoginResponse
): Promise<LoginResponse | null> => {
return fetch(prefix + 'api/user/token', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
type: 'Refresh',
token: loginResponse.refresh,
}),
}).then(r => r.json()) as Promise<LoginResponse>;
},
} as const;
}

View File

@@ -1,76 +0,0 @@
/**
* @deprecated Remove this file after we migrate to the new cloud.
*/
import { setupGlobal } from '@affine/env/global';
import { rootStore } from '@toeverything/plugin-infra/manager';
import { createUserApis, createWorkspaceApis } from './api/index';
import { currentAffineUserAtom } from './atom';
import type { LoginResponse } from './login';
import { createAffineAuth, parseIdToken, setLoginStorage } from './login';
setupGlobal();
export const affineAuth = createAffineAuth(prefixUrl);
const affineApis = {} as ReturnType<typeof createUserApis> &
ReturnType<typeof createWorkspaceApis>;
Object.assign(affineApis, createUserApis(prefixUrl));
Object.assign(affineApis, createWorkspaceApis(prefixUrl));
if (!globalThis.AFFINE_APIS) {
globalThis.AFFINE_APIS = affineApis;
globalThis.setLogin = (response: LoginResponse) => {
rootStore.set(currentAffineUserAtom, parseIdToken(response.token));
setLoginStorage(response);
};
const loginMockUser1 = async () => {
const user1 = await import('@affine-test/fixtures/built-in-user1.json');
const data = await fetch(prefixUrl + 'api/user/token', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
type: 'DebugLoginUser',
email: user1.email,
password: user1.password,
}),
}).then(r => r.json());
setLogin(data);
};
const loginMockUser2 = async () => {
const user2 = await import('@affine-test/fixtures/built-in-user2.json');
const data = await fetch(prefixUrl + 'api/user/token', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
type: 'DebugLoginUser',
email: user2.email,
password: user2.password,
}),
}).then(r => r.json());
setLogin(data);
};
globalThis.AFFINE_DEBUG = {
loginMockUser1,
loginMockUser2,
};
}
declare global {
// eslint-disable-next-line no-var
var setLogin: typeof setLoginStorage;
// eslint-disable-next-line no-var
var AFFINE_APIS:
| undefined
| (ReturnType<typeof createUserApis> &
ReturnType<typeof createWorkspaceApis>);
// eslint-disable-next-line no-var
var AFFINE_DEBUG: Record<string, unknown>;
}
export { affineApis };

View File

@@ -1,85 +0,0 @@
/**
* @deprecated Remove this file after we migrate to the new cloud.
*/
import { DebugLogger } from '@affine/debug';
import type { WorkspaceCRUD } from '@affine/env/workspace';
import type { WorkspaceFlavour } from '@affine/env/workspace';
import {
workspaceDetailSchema,
workspaceSchema,
} from '@affine/env/workspace/legacy-cloud';
import { assertExists } from '@blocksuite/global/utils';
import type { Disposable } from '@blocksuite/store';
import { rootStore } from '@toeverything/plugin-infra/manager';
import { z } from 'zod';
import { WebsocketClient } from '../affine/channel';
import { storageChangeSlot } from '../affine/login';
import { rootWorkspacesMetadataAtom } from '../atom';
const logger = new DebugLogger('affine-sync');
const channelMessageSchema = z.object({
ws_list: z.array(workspaceSchema),
ws_details: z.record(workspaceDetailSchema),
metadata: z.record(
z.object({
search_index: z.array(z.string()),
name: z.string(),
})
),
});
type ChannelMessage = z.infer<typeof channelMessageSchema>;
export function createAffineGlobalChannel(
crud: WorkspaceCRUD<WorkspaceFlavour.AFFINE>
) {
let client: WebsocketClient | null;
async function handleMessage(channelMessage: ChannelMessage) {
logger.debug('channelMessage', channelMessage);
const parseResult = channelMessageSchema.safeParse(channelMessage);
if (!parseResult.success) {
console.error(
'channelMessageSchema.safeParse(channelMessage) failed',
parseResult
);
}
const { ws_details } = channelMessage;
const currentWorkspaces = await crud.list();
for (const [id] of Object.entries(ws_details)) {
const workspaceIndex = currentWorkspaces.findIndex(
workspace => workspace.id === id
);
// If the workspace is not in the current workspace list, remove it
if (workspaceIndex === -1) {
await rootStore.set(rootWorkspacesMetadataAtom, workspaces => {
const idx = workspaces.findIndex(workspace => workspace.id === id);
workspaces.splice(idx, 1);
return [...workspaces];
});
}
}
}
let dispose: Disposable | undefined = undefined;
const apis = {
connect: () => {
client = new WebsocketClient(websocketPrefixUrl + '/api/global/sync/');
client.connect(handleMessage);
dispose = storageChangeSlot.on(() => {
apis.disconnect();
apis.connect();
});
},
disconnect: () => {
assertExists(client, 'client is null');
client.disconnect();
dispose?.dispose();
client = null;
},
};
return apis;
}

View File

@@ -1,118 +0,0 @@
import { DebugLogger } from '@affine/debug';
import type { BlobStorage } from '@blocksuite/store';
import { createIndexeddbStorage } from '@blocksuite/store';
import { openDB } from 'idb';
import type { DBSchema } from 'idb/build/entry';
import type { createWorkspaceApis } from '../affine/api';
type UploadingBlob = {
key: string;
arrayBuffer: ArrayBuffer;
type: string;
};
interface AffineBlob extends DBSchema {
uploading: {
key: string;
value: UploadingBlob;
};
// todo: migrate blob storage from `createIndexeddbStorage`
}
const logger = new DebugLogger('affine:blob');
export const createAffineBlobStorage = (
workspaceId: string,
workspaceApis: ReturnType<typeof createWorkspaceApis>
): BlobStorage => {
const storage = createIndexeddbStorage(workspaceId);
const dbPromise = openDB<AffineBlob>('affine-blob', 1, {
upgrade(db) {
db.createObjectStore('uploading', { keyPath: 'key' });
},
});
dbPromise
.then(async db => {
const t = db
.transaction('uploading', 'readwrite')
.objectStore('uploading');
await t.getAll().then(blobs =>
blobs.map(({ arrayBuffer, type }) =>
workspaceApis.uploadBlob(workspaceId, arrayBuffer, type).then(key => {
const t = db
.transaction('uploading', 'readwrite')
.objectStore('uploading');
return t.delete(key);
})
)
);
})
.catch(err => {
logger.error('[createAffineBlobStorage] dbPromise error', err);
});
return {
crud: {
get: async key => {
const blob = await storage.crud.get(key);
if (!blob) {
const buffer = await workspaceApis.getBlob(workspaceId, key);
return new Blob([buffer]);
} else {
return blob;
}
},
set: async (key, value) => {
const db = await dbPromise;
const arrayBuffer = await value.arrayBuffer();
const t = db
.transaction('uploading', 'readwrite')
.objectStore('uploading');
let uploaded = false;
await t.put({
key,
arrayBuffer,
type: value.type,
});
// delete the uploading blob after uploaded
if (uploaded) {
const t = db
.transaction('uploading', 'readwrite')
.objectStore('uploading');
// don't await here, we don't care if it's deleted
t.delete(key).catch(err => {
logger.error('[createAffineBlobStorage] delete error', err);
});
}
await Promise.all([
storage.crud.set(key, value),
workspaceApis
.uploadBlob(workspaceId, await value.arrayBuffer(), value.type)
.then(async () => {
uploaded = true;
const t = db
.transaction('uploading', 'readwrite')
.objectStore('uploading');
// delete the uploading blob after uploaded
if (await t.get(key)) {
await t.delete(key);
}
}),
]);
return key;
},
delete: async (key: string) => {
await Promise.all([
storage.crud.delete(key),
// we don't support deleting a blob in API?
// workspaceApis.deleteBlob(workspaceId, key)
]);
},
list: async () => {
const blobs = await storage.crud.list();
// we don't support listing blobs in API?
return [...blobs];
},
},
};
};

View File

@@ -1,57 +0,0 @@
/**
* @deprecated Remove this file after we migrate to the new cloud.
*/
import { DebugLogger } from '@affine/debug';
import type { AffineDownloadProvider } from '@affine/env/workspace';
import type { DocProviderCreator } from '@blocksuite/store';
import { Workspace } from '@blocksuite/store';
import { affineApis } from '../affine/shared';
const hashMap = new Map<string, ArrayBuffer>();
const logger = new DebugLogger('affine:workspace:download-provider');
export const createAffineDownloadProvider: DocProviderCreator = (
id,
doc
): AffineDownloadProvider => {
let connected = false;
return {
flavour: 'affine-download',
passive: true,
get connected() {
return connected;
},
connect: () => {
logger.info('connect download provider', id);
if (hashMap.has(id)) {
logger.debug('applyUpdate');
Workspace.Y.applyUpdate(
doc,
new Uint8Array(hashMap.get(id) as ArrayBuffer)
);
connected = true;
return;
}
affineApis
.downloadWorkspace(id, false)
.then(binary => {
hashMap.set(id, binary);
logger.debug('applyUpdate');
Workspace.Y.applyUpdate(doc, new Uint8Array(binary));
connected = true;
})
.catch(e => {
logger.error('downloadWorkspace', e);
});
},
disconnect: () => {
logger.info('disconnect download provider', id);
connected = false;
},
cleanup: () => {
hashMap.delete(id);
},
};
};

View File

@@ -1,9 +1,8 @@
import type {
AffineWebSocketProvider,
LocalIndexedDBBackgroundProvider,
LocalIndexedDBDownloadProvider,
} from '@affine/env/workspace';
import type { Disposable, DocProviderCreator } from '@blocksuite/store';
import type { DocProviderCreator } from '@blocksuite/store';
import { assertExists, Workspace } from '@blocksuite/store';
import { createBroadcastChannelProvider } from '@blocksuite/store/providers/broadcast-channel';
import {
@@ -13,10 +12,7 @@ import {
} from '@toeverything/y-indexeddb';
import type { Doc } from 'yjs';
import { KeckProvider } from '../affine/keck';
import { getLoginStorage, storageChangeSlot } from '../affine/login';
import { CallbackSet } from '../utils';
import { createAffineDownloadProvider } from './affine-download';
import { localProviderLogger as logger } from './logger';
import {
createSQLiteDBDownloadProvider,
@@ -25,59 +21,6 @@ import {
const Y = Workspace.Y;
const createAffineWebSocketProvider: DocProviderCreator = (
id,
doc,
{ awareness }
): AffineWebSocketProvider => {
let webSocketProvider: KeckProvider | null = null;
let dispose: Disposable | undefined = undefined;
const callbacks = new CallbackSet();
const cb = () => callbacks.forEach(cb => cb());
const apis = {
flavour: 'affine-websocket',
passive: true,
get connected() {
return callbacks.ready;
},
cleanup: () => {
assertExists(webSocketProvider);
webSocketProvider.destroy();
webSocketProvider = null;
dispose?.dispose();
},
connect: () => {
dispose = storageChangeSlot.on(() => {
apis.disconnect();
apis.connect();
});
webSocketProvider = new KeckProvider(
websocketPrefixUrl + '/api/sync/',
id,
doc,
{
params: { token: getLoginStorage()?.token ?? '' },
awareness,
// we maintain a broadcast channel by ourselves
connect: false,
}
);
logger.info('connect', webSocketProvider.url);
webSocketProvider.on('synced', cb);
webSocketProvider.connect();
},
disconnect: () => {
assertExists(webSocketProvider);
logger.info('disconnect', webSocketProvider.url);
webSocketProvider.disconnect();
webSocketProvider.off('synced', cb);
dispose?.dispose();
},
} satisfies AffineWebSocketProvider;
return apis;
};
const createIndexedDBBackgroundProvider: DocProviderCreator = (
id,
blockSuiteWorkspace
@@ -153,8 +96,6 @@ const createIndexedDBDownloadProvider: DocProviderCreator = (
};
export {
createAffineDownloadProvider,
createAffineWebSocketProvider,
createBroadcastChannelProvider,
createIndexedDBBackgroundProvider,
createIndexedDBDownloadProvider,
@@ -182,8 +123,6 @@ export const createLocalProviders = (): DocProviderCreator[] => {
export const createAffineProviders = (): DocProviderCreator[] => {
return (
[
createAffineDownloadProvider,
createAffineWebSocketProvider,
runtimeConfig.enableBroadcastChannelProvider &&
createBroadcastChannelProvider,
createIndexedDBDownloadProvider,

View File

@@ -14,9 +14,7 @@ import type {
import { createIndexeddbStorage, Workspace } from '@blocksuite/store';
import { rootStore } from '@toeverything/plugin-infra/manager';
import type { createWorkspaceApis } from './affine/api';
import { rootWorkspacesMetadataAtom } from './atom';
import { createAffineBlobStorage } from './blob';
import { createSQLiteStorage } from './blob/sqlite-blob-storage';
export function cleanupWorkspace(flavour: WorkspaceFlavour) {
@@ -49,9 +47,8 @@ export const _cleanupBlockSuiteWorkspaceCache = () => hashMap.clear();
export function createEmptyBlockSuiteWorkspace(
id: string,
flavour: WorkspaceFlavour.AFFINE,
flavour: WorkspaceFlavour.AFFINE_CLOUD,
config: {
workspaceApis: ReturnType<typeof createWorkspaceApis>;
cachePrefix?: string;
idGenerator?: Generator;
}
@@ -60,7 +57,6 @@ export function createEmptyBlockSuiteWorkspace(
id: string,
flavour: WorkspaceFlavour.LOCAL,
config?: {
workspaceApis?: ReturnType<typeof createWorkspaceApis>;
cachePrefix?: string;
idGenerator?: Generator;
}
@@ -69,18 +65,10 @@ export function createEmptyBlockSuiteWorkspace(
id: string,
flavour: WorkspaceFlavour,
config?: {
workspaceApis?: ReturnType<typeof createWorkspaceApis>;
cachePrefix?: string;
idGenerator?: Generator;
}
): Workspace {
if (
flavour === WorkspaceFlavour.AFFINE &&
!config?.workspaceApis?.getBlob &&
!config?.workspaceApis?.uploadBlob
) {
throw new Error('workspaceApis is required for affine flavour');
}
const providerCreators: DocProviderCreator[] = [];
const prefix: string = config?.cachePrefix ?? '';
const cacheKey = `${prefix}${id}`;
@@ -91,10 +79,14 @@ export function createEmptyBlockSuiteWorkspace(
const blobStorages: StoreOptions['blobStorages'] = [];
if (flavour === WorkspaceFlavour.AFFINE) {
if (config && config.workspaceApis) {
const workspaceApis = config.workspaceApis;
blobStorages.push(id => createAffineBlobStorage(id, workspaceApis));
if (flavour === WorkspaceFlavour.AFFINE_CLOUD) {
if (isBrowser) {
blobStorages.push(createIndexeddbStorage);
if (isDesktop && runtimeConfig.enableSQLiteProvider) {
blobStorages.push(createSQLiteStorage);
}
// todo: add support for cloud storage
}
providerCreators.push(...createAffineProviders());
} else {