diff --git a/blocksuite/affine/inlines/preset/src/adapters/html/html-inline.ts b/blocksuite/affine/inlines/preset/src/adapters/html/html-inline.ts index 5323c6bf31..30d309e547 100644 --- a/blocksuite/affine/inlines/preset/src/adapters/html/html-inline.ts +++ b/blocksuite/affine/inlines/preset/src/adapters/html/html-inline.ts @@ -320,9 +320,21 @@ export const htmlMarkElementToDeltaMatcher = HtmlASTToDeltaExtension({ if (!isElement(ast)) { return []; } + const dataColor = + typeof ast.properties?.dataColor === 'string' + ? ast.properties.dataColor + : ''; + const colorName = + dataColor && + /^(red|orange|yellow|green|teal|blue|purple|grey)$/.test(dataColor) + ? dataColor + : 'yellow'; return ast.children.flatMap(child => context.toDelta(child, { trim: false }).map(delta => { - delta.attributes = { ...delta.attributes }; + delta.attributes = { + ...delta.attributes, + background: `var(--affine-text-highlight-${colorName})`, + }; return delta; }) ); diff --git a/blocksuite/affine/widgets/linked-doc/package.json b/blocksuite/affine/widgets/linked-doc/package.json index 69d444d362..dd01cbe969 100644 --- a/blocksuite/affine/widgets/linked-doc/package.json +++ b/blocksuite/affine/widgets/linked-doc/package.json @@ -25,6 +25,7 @@ "@types/lodash-es": "^4.17.12", "fflate": "^0.8.2", "js-yaml": "^4.1.1", + "jszip": "^3.10.1", "lit": "^3.2.0", "lodash-es": "^4.17.23", "mammoth": "^1.11.0", diff --git a/blocksuite/affine/widgets/linked-doc/src/transformers/bear.ts b/blocksuite/affine/widgets/linked-doc/src/transformers/bear.ts new file mode 100644 index 0000000000..dd32291ac2 --- /dev/null +++ b/blocksuite/affine/widgets/linked-doc/src/transformers/bear.ts @@ -0,0 +1,531 @@ +import { + defaultImageProxyMiddleware, + docLinkBaseURLMiddleware, + fileNameMiddleware, + filePathMiddleware, + MarkdownAdapter, +} from '@blocksuite/affine-shared/adapters'; +import { Container } from '@blocksuite/global/di'; +import { sha } from '@blocksuite/global/utils'; +import type { ExtensionType, Schema, Workspace } from '@blocksuite/store'; +import { extMimeMap, Transformer } from '@blocksuite/store'; +import JSZip from 'jszip'; + +import { createCollectionDocCRUD } from './markdown.js'; + +/** Recursive tree node representing a tag-based folder hierarchy. */ +type FolderHierarchy = { + name: string; + path: string; + children: Map; + pageId?: string; + parentPath?: string; +}; + +type BearImportOptions = { + collection: Workspace; + schema: Schema; + imported: Blob; + extensions: ExtensionType[]; +}; + +type BearImportResult = { + docIds: string[]; + tags: Map; + folderHierarchy: FolderHierarchy; +}; + +type BundleEntry = { + bundlePath: string; + markdownPath: string | null; + infoJsonPath: string | null; + assetPaths: string[]; +}; + +/** Create a DI provider from the given extensions. */ +function getProvider(extensions: ExtensionType[]) { + const container = new Container(); + extensions.forEach(ext => { + ext.setup(container); + }); + return container.provider(); +} + +/** + * Extract Bear tags from the trailing footer of a markdown document. + * Bear places tags (e.g. `#tag`, `#multi word tag#`, `#nested/tag`) at the end + * of notes. This scans from the bottom up, collecting tag-only lines (up to 5) + * and returns the deduplicated tags plus the content with those lines removed. + */ +function parseBearTags(markdown: string): { + tags: string[]; + content: string; +} { + const lines = markdown.split('\n'); + + const codeFenceState: boolean[] = []; + let inCodeBlock = false; + for (const line of lines) { + if (line.trimStart().startsWith('```')) { + inCodeBlock = !inCodeBlock; + } + codeFenceState.push(inCodeBlock); + } + + const tags: string[] = []; + const tagLineIndices = new Set(); + + for (let i = lines.length - 1; i >= 0; i--) { + const line = lines[i].trim(); + if (!line) continue; + if (codeFenceState[i]) break; + + const lineTags = extractTagsFromLine(line); + if (lineTags.length > 0) { + for (const tag of lineTags) { + tags.push(tag); + } + tagLineIndices.add(i); + } else { + break; + } + + if (tagLineIndices.size >= 5) break; + } + + const filteredLines = lines.filter((_, i) => !tagLineIndices.has(i)); + while ( + filteredLines.length > 0 && + filteredLines[filteredLines.length - 1].trim() === '' + ) { + filteredLines.pop(); + } + + return { + tags: deduplicateTags(tags), + content: filteredLines.join('\n'), + }; +} + +/** + * Parse Bear tags from a single line. Supports open tags (`#tag`), + * closed tags (`#multi word tag#`), and nested tags (`#parent/child`). + * Returns an empty array if the line contains non-tag content. + */ +function extractTagsFromLine(line: string): string[] { + const tags: string[] = []; + let remaining = line; + + while (remaining.length > 0) { + remaining = remaining.trimStart(); + if (!remaining) break; + + if (remaining.startsWith('[')) return []; + + if (remaining.startsWith('#')) { + if (remaining.length > 1 && remaining[1] === ' ') return []; + if (remaining.length > 2 && remaining[1] === '#') return []; + + const closedMatch = remaining.match(/^#([^#\n]+)#/); + if (closedMatch) { + const tagValue = closedMatch[1].trim(); + if (tagValue) { + tags.push(tagValue); + remaining = remaining.slice(closedMatch[0].length); + continue; + } + } + + const openMatch = remaining.match( + /^#([\p{L}\p{N}_][\p{L}\p{N}_/-]*)(.*)$/u + ); + if (openMatch) { + const tagValue = openMatch[1]; + const after = openMatch[2].trim(); + if (tagValue) { + tags.push(tagValue); + remaining = after; + continue; + } + } + + return []; + } else { + return []; + } + } + + return tags; +} + +/** + * Deduplicate tags case-insensitively while preserving the original + * capitalization of the first occurrence of each tag. + */ +function deduplicateTags(tags: string[]): string[] { + const seen = new Set(); + const result: string[] = []; + for (const tag of tags) { + const normalized = tag.toLowerCase(); + if (!seen.has(normalized)) { + seen.add(normalized); + result.push(tag); + } + } + return result; +} + +/** + * Build a nested folder hierarchy from Bear tags. + * Tags like `parent/child` create nested folders. Documents are attached + * as leaf nodes under their tag's folder using `__doc__` prefixed keys. + */ +function buildFolderHierarchyFromTags( + tagDocMap: Map +): FolderHierarchy { + const root: FolderHierarchy = { + name: '', + path: '', + children: new Map(), + }; + + for (const [tag, docIds] of tagDocMap) { + const parts = tag.split('/'); + let current = root; + let currentPath = ''; + + for (const part of parts) { + const parentPath = currentPath; + currentPath = currentPath ? `${currentPath}/${part}` : part; + + if (!current.children.has(part)) { + current.children.set(part, { + name: part, + path: currentPath, + parentPath: parentPath || undefined, + children: new Map(), + }); + } + current = current.children.get(part)!; + } + + for (const docId of docIds) { + const docNodeKey = `__doc__${docId}`; + if (!current.children.has(docNodeKey)) { + current.children.set(docNodeKey, { + name: docNodeKey, + path: `${current.path}/${docNodeKey}`, + parentPath: current.path, + children: new Map(), + pageId: docId, + }); + } + } + } + + return root; +} + +const GFM_CALLOUT_MAP: Record = { + IMPORTANT: '\u26A0', + NOTE: '\uD83D\uDCDD', + WARNING: '\u26A0', + TIP: '\uD83D\uDCA1', + CAUTION: '\uD83D\uDD34', +}; + +/** + * Convert GFM-style callouts (`> [!NOTE]`, `> [!WARNING]`, etc.) to + * emoji-based callouts that AFFiNE's remark-callout plugin understands. + * Skips content inside fenced code blocks. + */ +function convertGfmCallouts(markdown: string): string { + const lines = markdown.split('\n'); + let inCodeBlock = false; + for (let i = 0; i < lines.length; i++) { + if (lines[i].trimStart().startsWith('```')) { + inCodeBlock = !inCodeBlock; + continue; + } + if (!inCodeBlock) { + lines[i] = lines[i].replace( + /^(>\s*)\[!(\w+)\]/, + (_match, prefix: string, type: string) => { + const emoji = GFM_CALLOUT_MAP[type.toUpperCase()]; + return emoji ? `${prefix}[!${emoji}]` : _match; + } + ); + } + } + return lines.join('\n'); +} + +const HIGHLIGHT_COLOR_MAP: Record = { + '\uD83D\uDFE2': 'green', + '\uD83D\uDD35': 'blue', + '\uD83D\uDFE3': 'purple', + '\uD83D\uDD34': 'red', + '\uD83D\uDFE1': 'yellow', + '\uD83D\uDFE0': 'orange', +}; + +/** Escape HTML special characters to prevent markup injection. */ +function escapeHtml(value: string): string { + return value + .replace(/&/g, '&') + .replace(//g, '>') + .replace(/"/g, '"') + .replace(/'/g, '''); +} + +/** + * Convert Bear `==highlight==` syntax to `` HTML elements. + * Supports colored highlights via leading color emoji (e.g. `==🟢green text==`). + * Skips content inside fenced code blocks. + */ +function convertHighlights(markdown: string): string { + const lines = markdown.split('\n'); + let inCodeBlock = false; + for (let i = 0; i < lines.length; i++) { + if (lines[i].trimStart().startsWith('```')) { + inCodeBlock = !inCodeBlock; + continue; + } + if (!inCodeBlock) { + lines[i] = lines[i].replace( + /==(\S(?:[^=]|=[^=])*?)==/g, + (_match, content: string) => { + const firstChar = String.fromCodePoint(content.codePointAt(0)!); + const color = HIGHLIGHT_COLOR_MAP[firstChar]; + if (color) { + const text = content.slice(firstChar.length); + return `${escapeHtml(text)}`; + } + return `${escapeHtml(content)}`; + } + ); + } + } + return lines.join('\n'); +} + +/** Extract the document title from the first `# heading` or fall back to the bundle name. */ +function extractTitle(markdown: string, bundleName: string): string { + const lines = markdown.split('\n'); + let inCodeBlock = false; + for (const line of lines) { + if (line.trimStart().startsWith('```')) { + inCodeBlock = !inCodeBlock; + continue; + } + if (inCodeBlock) continue; + const match = line.match(/^#\s+(.+)/); + if (match) { + const title = match[1].trim(); + if (title) return title; + } + } + return bundleName.replace(/\.textbundle$/i, '') || 'Untitled'; +} + +/** + * Import a Bear .bear2bk backup file. + * Uses JSZip for lazy/streaming decompression to handle large backups. + */ +async function importBearBackup({ + collection, + schema, + imported, + extensions, +}: BearImportOptions): Promise { + const provider = getProvider(extensions); + + // JSZip reads the zip directory without decompressing all entries + const zip = await JSZip.loadAsync(imported); + + // Scan entries and group by textbundle + const bundleMap = new Map(); + + zip.forEach((path, _entry) => { + if (path.includes('__MACOSX') || path.includes('.DS_Store')) return; + + const tbMatch = path.match(/^(.+?\.textbundle)\/(.*)/i); + if (!tbMatch) return; + + const bundlePath = tbMatch[1]; + const innerPath = tbMatch[2]; + + if (!bundleMap.has(bundlePath)) { + bundleMap.set(bundlePath, { + bundlePath, + markdownPath: null, + infoJsonPath: null, + assetPaths: [], + }); + } + const bundle = bundleMap.get(bundlePath)!; + + if (innerPath === 'text.md' || innerPath === 'text.txt') { + bundle.markdownPath = path; + } else if (innerPath === 'info.json') { + bundle.infoJsonPath = path; + } else if (innerPath.startsWith('assets/') && innerPath !== 'assets/') { + bundle.assetPaths.push(path); + } + }); + + // Read info.json for all bundles to filter out trashed notes + // (info.json is tiny, safe to read all at once) + const validBundles: Array<{ + entry: BundleEntry; + bearMeta: Record | undefined; + }> = []; + + for (const entry of bundleMap.values()) { + if (!entry.markdownPath) continue; + + let info: Record = {}; + if (entry.infoJsonPath) { + try { + const text = await zip.file(entry.infoJsonPath)!.async('string'); + info = JSON.parse(text); + } catch { + // Invalid JSON + } + } + + const bearMeta = info['net.shinyfrog.bear'] as + | Record + | undefined; + if (bearMeta?.trashed === 1) continue; + + validBundles.push({ entry, bearMeta }); + } + + if (validBundles.length === 0) { + throw new Error( + 'No valid Bear textbundles found in the archive. Please select a .bear2bk backup file.' + ); + } + + const docIds: string[] = []; + const tagDocMap = new Map(); + + // Process bundles sequentially to limit memory. + // Each bundle is wrapped in try/catch so one bad note does not abort the + // entire import after earlier notes have already been written. + for (const { entry, bearMeta } of validBundles) { + try { + // Read markdown (decompress on demand) + const rawMarkdown = await zip.file(entry.markdownPath!)!.async('string'); + if (!rawMarkdown.trim()) continue; + + const { tags, content: cleanedMarkdown } = parseBearTags(rawMarkdown); + const bundleDirName = + entry.bundlePath.split('/').findLast(Boolean) ?? 'Untitled'; + const title = extractTitle(cleanedMarkdown, bundleDirName); + const markdown = convertHighlights( + convertGfmCallouts( + cleanedMarkdown.replace(//g, '') + ) + ); + + // Read assets on demand (decompress only this bundle's assets) + const pendingAssets = new Map(); + const pendingPathBlobIdMap = new Map(); + + for (const assetFullPath of entry.assetPaths) { + try { + const data = await zip.file(assetFullPath)!.async('arraybuffer'); + const tbMatch = assetFullPath.match(/^.+?\.textbundle\/(.*)/i); + const assetRelPath = tbMatch ? tbMatch[1] : assetFullPath; + const ext = assetRelPath.split('.').at(-1) ?? ''; + const mime = extMimeMap.get(ext.toLowerCase()) ?? ''; + const key = await sha(data); + // Map both the full zip path and the relative path (assets/...) + pendingPathBlobIdMap.set(assetFullPath, key); + pendingPathBlobIdMap.set(assetRelPath, key); + try { + const decodedRel = decodeURIComponent(assetRelPath); + if (decodedRel !== assetRelPath) { + pendingPathBlobIdMap.set(decodedRel, key); + } + const decodedFull = decodeURIComponent(assetFullPath); + if (decodedFull !== assetFullPath) { + pendingPathBlobIdMap.set(decodedFull, key); + } + } catch { + // Invalid URI encoding + } + const fileName = assetRelPath.split('/').pop() ?? ''; + pendingAssets.set(key, new File([data], fileName, { type: mime })); + } catch { + // Failed to read asset, skip + } + } + + const fullPath = `${entry.bundlePath}/text.md`; + const job = new Transformer({ + schema, + blobCRUD: collection.blobSync, + docCRUD: createCollectionDocCRUD(collection), + middlewares: [ + defaultImageProxyMiddleware, + fileNameMiddleware(title), + filePathMiddleware(fullPath), + docLinkBaseURLMiddleware(collection.id), + ], + }); + + const assets = job.assets; + const pathBlobIdMap = job.assetsManager.getPathBlobIdMap(); + for (const [p, key] of pendingPathBlobIdMap.entries()) { + pathBlobIdMap.set(p, key); + } + for (const [key, file] of pendingAssets.entries()) { + assets.set(key, file); + } + + const mdAdapter = new MarkdownAdapter(job, provider); + const doc = await mdAdapter.toDoc({ + file: markdown, + assets: job.assetsManager, + }); + + if (doc) { + docIds.push(doc.id); + + const metaPatch: Record = {}; + if (bearMeta?.creationDate) { + const ts = Date.parse(String(bearMeta.creationDate)); + if (!isNaN(ts)) metaPatch.createDate = ts; + } + if (bearMeta?.modificationDate) { + const ts = Date.parse(String(bearMeta.modificationDate)); + if (!isNaN(ts)) metaPatch.updatedDate = ts; + } + if (Object.keys(metaPatch).length) { + collection.meta.setDocMeta(doc.id, metaPatch); + } + + for (const tag of tags) { + if (!tagDocMap.has(tag)) { + tagDocMap.set(tag, []); + } + tagDocMap.get(tag)!.push(doc.id); + } + } + } catch (err) { + console.warn(`Failed to import bundle: ${entry.bundlePath}`, err); + } + } + + const folderHierarchy = buildFolderHierarchyFromTags(tagDocMap); + return { docIds, tags: tagDocMap, folderHierarchy }; +} + +/** Public API for importing Bear .bear2bk backup archives. */ +export const BearTransformer = { + importBearBackup, +}; diff --git a/blocksuite/affine/widgets/linked-doc/src/transformers/index.ts b/blocksuite/affine/widgets/linked-doc/src/transformers/index.ts index f5581e4c68..46152a77ac 100644 --- a/blocksuite/affine/widgets/linked-doc/src/transformers/index.ts +++ b/blocksuite/affine/widgets/linked-doc/src/transformers/index.ts @@ -1,3 +1,4 @@ +export { BearTransformer } from './bear.js'; export { DocxTransformer } from './docx.js'; export { HtmlTransformer } from './html.js'; export { MarkdownTransformer } from './markdown.js'; diff --git a/blocksuite/affine/widgets/linked-doc/src/transformers/markdown.ts b/blocksuite/affine/widgets/linked-doc/src/transformers/markdown.ts index 7d6e3a4341..4cb5d9bce9 100644 --- a/blocksuite/affine/widgets/linked-doc/src/transformers/markdown.ts +++ b/blocksuite/affine/widgets/linked-doc/src/transformers/markdown.ts @@ -462,12 +462,23 @@ async function importMarkdownToDoc({ * @param options.imported The zip file as a Blob * @returns A Promise that resolves to an array of IDs of the newly created docs */ +type FolderHierarchy = { + name: string; + path: string; + children: Map; + pageId?: string; + parentPath?: string; +}; + async function importMarkdownZip({ collection, schema, imported, extensions, -}: ImportMarkdownZipOptions) { +}: ImportMarkdownZipOptions): Promise<{ + docIds: string[]; + folderHierarchy?: FolderHierarchy; +}> { const provider = getProvider(extensions); const unzip = new Unzip(); await unzip.load(imported); @@ -476,6 +487,7 @@ async function importMarkdownZip({ const pendingAssets: AssetMap = new Map(); const pendingPathBlobIdMap: PathBlobIdMap = new Map(); const markdownBlobs: ImportedFileEntry[] = []; + const docPathMap: Array<{ fullPath: string; docId: string }> = []; // Iterate over all files in the zip for (const { path, content: blob } of unzip) { @@ -527,10 +539,94 @@ async function importMarkdownZip({ if (doc) { applyMetaPatch(collection, doc.id, meta); docIds.push(doc.id); + docPathMap.push({ fullPath, docId: doc.id }); } }) ); - return docIds; + + // Build folder hierarchy from zip paths + const folderHierarchy = buildMarkdownZipFolderHierarchy(docPathMap); + + return { docIds, folderHierarchy }; +} + +/** + * Builds a tree of {@link FolderHierarchy} nodes from the zip paths of + * imported markdown files. Returns `undefined` when every entry sits at + * the same level (no real subfolder structure). A common root directory + * shared by all entries is stripped automatically so that the resulting + * hierarchy starts one level deeper. + */ +function buildMarkdownZipFolderHierarchy( + entries: Array<{ fullPath: string; docId: string }> +): FolderHierarchy | undefined { + if (entries.length === 0) return undefined; + + // Check if any entries have folder structure + const hasSubfolders = entries.some(e => { + const parts = e.fullPath.split('/').filter(Boolean); + // More than just "root/file.md" -- need at least one real subfolder + return parts.length > 2; + }); + if (!hasSubfolders) { + // All files are at the same level, no folder hierarchy needed + return undefined; + } + + const root: FolderHierarchy = { + name: '', + path: '', + children: new Map(), + }; + + // Check once whether all entries share a common root directory + const candidateRoot = entries[0]?.fullPath.split('/').find(Boolean); + const skipRoot = + !!candidateRoot && + entries.every(e => e.fullPath.startsWith(candidateRoot + '/')); + + for (const { fullPath, docId } of entries) { + const parts = fullPath.split('/').filter(Boolean); + const fileName = parts.pop(); // Remove filename + if (!fileName) continue; + + let folderParts = skipRoot ? parts.slice(1) : parts; + + if (folderParts.length === 0) { + // Root-level file, no folder needed + continue; + } + + let current = root; + let currentPath = ''; + + for (const folderName of folderParts) { + const parentPath = currentPath; + currentPath = currentPath ? `${currentPath}/${folderName}` : folderName; + + if (!current.children.has(folderName)) { + current.children.set(folderName, { + name: folderName, + path: currentPath, + parentPath: parentPath || undefined, + children: new Map(), + }); + } + current = current.children.get(folderName)!; + } + + // Add the doc as a leaf + const docNodeKey = `__doc__${docId}`; + current.children.set(docNodeKey, { + name: docNodeKey, + path: `${current.path}/${docNodeKey}`, + parentPath: current.path, + children: new Map(), + pageId: docId, + }); + } + + return root.children.size > 0 ? root : undefined; } export const MarkdownTransformer = { diff --git a/blocksuite/playground/apps/_common/components/starter-debug-menu.ts b/blocksuite/playground/apps/_common/components/starter-debug-menu.ts index f6990a7c2f..186b17a81e 100644 --- a/blocksuite/playground/apps/_common/components/starter-debug-menu.ts +++ b/blocksuite/playground/apps/_common/components/starter-debug-menu.ts @@ -436,7 +436,7 @@ export class StarterDebugMenu extends ShadowlessElement { try { const file = await openSingleFileWith('Zip'); if (!file) return; - const result = await MarkdownTransformer.importMarkdownZip({ + const { docIds } = await MarkdownTransformer.importMarkdownZip({ collection: this.collection, schema: this.editor.doc.schema, imported: file, @@ -445,7 +445,7 @@ export class StarterDebugMenu extends ShadowlessElement { if (!this.editor.host) return; toast( this.editor.host, - `Successfully imported ${result.length} markdown files.` + `Successfully imported ${docIds.length} markdown files.` ); } catch (error) { console.error('Import markdown zip files failed:', error); diff --git a/packages/frontend/core/src/desktop/dialogs/import/index.tsx b/packages/frontend/core/src/desktop/dialogs/import/index.tsx index 921c383634..72551c5667 100644 --- a/packages/frontend/core/src/desktop/dialogs/import/index.tsx +++ b/packages/frontend/core/src/desktop/dialogs/import/index.tsx @@ -15,6 +15,7 @@ import { } from '@affine/core/modules/dialogs'; import { ExplorerIconService } from '@affine/core/modules/explorer-icon/services/explorer-icon'; import { OrganizeService } from '@affine/core/modules/organize'; +import { TagService } from '@affine/core/modules/tag'; import { UrlService } from '@affine/core/modules/url'; import { getAFFiNEWorkspaceSchema, @@ -27,6 +28,7 @@ import track from '@affine/track'; import { openDirectory, openFilesWith } from '@blocksuite/affine/shared/utils'; import type { Workspace } from '@blocksuite/affine/store'; import { + BearTransformer, DocxTransformer, HtmlTransformer, MarkdownTransformer, @@ -188,11 +190,49 @@ function createFolderStructure( return { folderId: rootFolderId, docLinks }; } +/** + * Creates the folder tree described by {@link folderHierarchy} via + * {@link OrganizeService} and links every document into its folder. + * Returns the root folder ID on success, or `undefined` if the + * hierarchy is empty or an error occurs. + * + * When {@link explorerIconService} is provided, document icons from the + * hierarchy (e.g. Notion page emojis) are applied. Callers that do not + * need icon support can omit it safely. + */ +function applyFolderHierarchy( + organizeService: OrganizeService, + folderHierarchy: FolderHierarchy, + explorerIconService?: ExplorerIconService +): string | undefined { + if (folderHierarchy.children.size === 0) return undefined; + try { + const { folderId, docLinks } = createFolderStructure( + organizeService, + folderHierarchy, + null, + explorerIconService + ); + for (const { folderId, docId } of docLinks) { + const folder = organizeService.folderTree.folderNode$(folderId).value; + if (folder) { + const index = folder.indexAt('after'); + folder.createLink('doc', docId, index); + } + } + return folderId || undefined; + } catch (error) { + logger.warn('Failed to create folder structure:', error); + return undefined; + } +} + type ImportType = | 'markdown' | 'markdownZip' | 'notion' | 'obsidian' + | 'bear' | 'snapshot' | 'html' | 'docx' @@ -218,7 +258,8 @@ type ImportConfig = { files: File[], handleImportAffineFile: () => Promise, organizeService?: OrganizeService, - explorerIconService?: ExplorerIconService + explorerIconService?: ExplorerIconService, + tagService?: TagService ) => Promise; }; @@ -290,6 +331,19 @@ const importOptions = [ testId: 'editor-option-menu-import-obsidian', type: 'obsidian' as ImportType, }, + { + key: 'bear', + label: 'com.affine.import.bear', + prefixIcon: ( + + ), + suffixIcon: ( + + ), + suffixTooltip: 'com.affine.import.bear.tooltip', + testId: 'editor-option-menu-import-bear', + type: 'bear' as ImportType, + }, { key: 'docx', label: 'com.affine.import.docx', @@ -365,21 +419,29 @@ const importConfigs: Record = { docCollection, files, _handleImportAffineFile, - _organizeService, + organizeService, _explorerIconService ) => { const file = files.length === 1 ? files[0] : null; if (!file) { throw new Error('Expected a single zip file for markdownZip import'); } - const docIds = await MarkdownTransformer.importMarkdownZip({ - collection: docCollection, - schema: getAFFiNEWorkspaceSchema(), - imported: file, - extensions: getStoreManager().config.init().value.get('store'), - }); + const { docIds, folderHierarchy } = + await MarkdownTransformer.importMarkdownZip({ + collection: docCollection, + schema: getAFFiNEWorkspaceSchema(), + imported: file, + extensions: getStoreManager().config.init().value.get('store'), + }); + + const rootFolderId = + folderHierarchy && organizeService + ? applyFolderHierarchy(organizeService, folderHierarchy) + : undefined; + return { docIds, + rootFolderId, }; }, }, @@ -431,37 +493,14 @@ const importConfigs: Record = { extensions: getStoreManager().config.init().value.get('store'), }); - let rootFolderId: string | undefined; - - // Create folder structure if hierarchy exists and OrganizeService is available - if ( - folderHierarchy && - organizeService && - folderHierarchy.children.size > 0 - ) { - try { - const { folderId, docLinks } = createFolderStructure( - organizeService, - folderHierarchy, - null, - explorerIconService - ); - rootFolderId = folderId || undefined; - - // Create links for all documents to their respective folders - for (const { folderId, docId } of docLinks) { - const folder = - organizeService.folderTree.folderNode$(folderId).value; - if (folder) { - const index = folder.indexAt('after'); - folder.createLink('doc', docId, index); - } - } - } catch (error) { - logger.warn('Failed to create folder structure:', error); - // Continue with import even if folder creation fails - } - } + const rootFolderId = + folderHierarchy && organizeService + ? applyFolderHierarchy( + organizeService, + folderHierarchy, + explorerIconService + ) + : undefined; return { docIds: pageIds, @@ -501,6 +540,114 @@ const importConfigs: Record = { return { docIds }; }, }, + bear: { + fileOptions: { acceptType: 'Zip', multiple: false }, + importFunction: async ( + docCollection, + files, + _handleImportAffineFile, + organizeService, + _explorerIconService, + tagService + ) => { + const file = files.length === 1 ? files[0] : null; + if (!file) { + throw new Error('Expected a single .bear2bk file for Bear import'); + } + let docIds: string[]; + let tags: Map; + let folderHierarchy: FolderHierarchy; + try { + const result = await BearTransformer.importBearBackup({ + collection: docCollection, + schema: getAFFiNEWorkspaceSchema(), + imported: file, + extensions: getStoreManager().config.init().value.get('store'), + }); + docIds = result.docIds; + tags = result.tags; + folderHierarchy = result.folderHierarchy; + } catch (err) { + logger.error('Bear import failed:', err); + throw err instanceof Error + ? err + : new Error(String(err) || 'Bear import failed'); + } + + // Create AFFiNE tags from Bear tags + if (tagService && tags.size > 0) { + try { + // Get existing tags for deduplication + const existingTags = tagService.tagList.tags$.value; + const existingTagMap = new Map(); // lowercase name → tag id + for (const tag of existingTags) { + const name = tag.value$.value.toLowerCase(); + existingTagMap.set(name, tag.id); + } + + // Consolidate tags by root segment (e.g., "privat/bike" → "privat"). + // Keyed by lowercase root for case-insensitive dedup, but the + // original capitalization of the first occurrence is preserved + // so new AFFiNE tags are created with the user's casing. + const rootTagDocMap = new Map< + string, + { displayName: string; docs: Set } + >(); + for (const [tagName, tagDocIds] of tags) { + const originalRoot = tagName.split('/')[0]; + const key = originalRoot.toLowerCase(); + let entry = rootTagDocMap.get(key); + if (!entry) { + entry = { displayName: originalRoot, docs: new Set() }; + rootTagDocMap.set(key, entry); + } + for (const docId of tagDocIds) { + entry.docs.add(docId); + } + } + + for (const [ + rootTagKey, + { displayName, docs: docIdSet }, + ] of rootTagDocMap) { + // Check if tag already exists (case-insensitive) + let tagId = existingTagMap.get(rootTagKey); + if (!tagId) { + const newTag = tagService.tagList.createTag( + displayName, + tagService.randomTagColor() + ); + tagId = newTag.id; + existingTagMap.set(rootTagKey, tagId); + } + + // Assign tag to each doc + for (const docId of docIdSet) { + const doc = docCollection.getDoc(docId); + const currentTags = doc?.meta?.tags ?? []; + if (!currentTags.includes(tagId)) { + docCollection.meta.setDocMeta(docId, { + tags: [...currentTags, tagId], + }); + } + } + } + } catch (error) { + logger.warn('Failed to create Bear tags:', error); + } + } + + const rootFolderId = + folderHierarchy && organizeService + ? applyFolderHierarchy(organizeService, folderHierarchy) + : undefined; + + return { + docIds, + rootFolderId, + }; + }, + }, docx: { fileOptions: { acceptType: 'Docx', multiple: false }, importFunction: async (docCollection, file) => { @@ -735,6 +882,7 @@ export const ImportDialog = ({ const docCollection = workspace.docCollection; const organizeService = useService(OrganizeService); const explorerIconService = useService(ExplorerIconService); + const tagService = useService(TagService); const globalDialogService = useService(GlobalDialogService); @@ -824,7 +972,8 @@ export const ImportDialog = ({ files, handleImportAffineFile, organizeService, - explorerIconService + explorerIconService, + tagService ); setImportResult({ @@ -863,6 +1012,7 @@ export const ImportDialog = ({ explorerIconService, handleImportAffineFile, organizeService, + tagService, t, ] ); diff --git a/packages/frontend/i18n/src/i18n.gen.ts b/packages/frontend/i18n/src/i18n.gen.ts index ab98e46782..909f17b670 100644 --- a/packages/frontend/i18n/src/i18n.gen.ts +++ b/packages/frontend/i18n/src/i18n.gen.ts @@ -2462,6 +2462,14 @@ export function useAFFiNEI18N(): { * `AFFiNE workspace data` */ ["com.affine.import.affine-workspace-data"](): string; + /** + * `Bear (.bear2bk)` + */ + ["com.affine.import.bear"](): string; + /** + * `Import your Bear note backup. Tags will be converted to AFFiNE tags and folders.` + */ + ["com.affine.import.bear.tooltip"](): string; /** * `Docx` */ diff --git a/packages/frontend/i18n/src/resources/en.json b/packages/frontend/i18n/src/resources/en.json index 5b316c7db8..b033dedbd0 100644 --- a/packages/frontend/i18n/src/resources/en.json +++ b/packages/frontend/i18n/src/resources/en.json @@ -614,6 +614,8 @@ "com.affine.import-clipper.dialog.errorLoad": "Failed to load content, please try again.", "com.affine.import_file": "Support Markdown/Notion", "com.affine.import.affine-workspace-data": "AFFiNE workspace data", + "com.affine.import.bear": "Bear (.bear2bk)", + "com.affine.import.bear.tooltip": "Import your Bear note backup. Tags will be converted to AFFiNE tags and folders.", "com.affine.import.docx": "Docx", "com.affine.import.docx.tooltip": "Import your .docx file.", "com.affine.import.html-files": "HTML", diff --git a/yarn.lock b/yarn.lock index 642d407df6..d67ca21ff9 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3101,6 +3101,7 @@ __metadata: "@types/lodash-es": "npm:^4.17.12" fflate: "npm:^0.8.2" js-yaml: "npm:^4.1.1" + jszip: "npm:^3.10.1" lit: "npm:^3.2.0" lodash-es: "npm:^4.17.23" mammoth: "npm:^1.11.0"