diff --git a/package.json b/package.json index 499aaa8d..116b28f2 100644 --- a/package.json +++ b/package.json @@ -91,6 +91,7 @@ "@types/fs-extra": "^11", "@types/lodash": "^4.17.5", "@types/markdown-it": "^14", + "@types/md5": "^2.3.5", "@types/node": "^18.19.9", "@types/react": "^18.2.48", "@types/react-dom": "^18.2.18", diff --git a/packages/shared/config/types.ts b/packages/shared/config/types.ts new file mode 100644 index 00000000..48a76c47 --- /dev/null +++ b/packages/shared/config/types.ts @@ -0,0 +1,6 @@ +export type LoaderReturn = { + entriesAdded: number + uniqueId: string + uniqueIds: string[] + loaderType: string +} diff --git a/src/main/loader/index.ts b/src/main/loader/index.ts new file mode 100644 index 00000000..b7416efc --- /dev/null +++ b/src/main/loader/index.ts @@ -0,0 +1,82 @@ +import * as fs from 'node:fs' + +import { LocalPathLoader, RAGApplication, TextLoader } from '@llm-tools/embedjs' +import type { AddLoaderReturn } from '@llm-tools/embedjs-interfaces' +import { LoaderReturn } from '@shared/config/types' +import { FileType, KnowledgeBaseParams } from '@types' + +import { OdLoader, OdType } from './odLoader' + +// embedjs内置loader类型 +const commonExts = ['.pdf', '.csv', '.json', '.docx', '.pptx', '.xlsx', '.md', '.jpeg'] + +export async function addOdLoader( + ragApplication: RAGApplication, + file: FileType, + base: KnowledgeBaseParams, + forceReload: boolean +): Promise { + const loaderMap: Record = { + '.odt': OdType.OdtLoader, + '.ods': OdType.OdsLoader, + '.odp': OdType.OdpLoader + } + const odType = loaderMap[file.ext] + if (!odType) { + throw new Error('Unknown odType') + } + return ragApplication.addLoader( + new OdLoader({ + odType, + filePath: file.path, + chunkSize: base.chunkSize, + chunkOverlap: base.chunkOverlap + }) as any, + forceReload + ) +} + +export async function addFileLoader( + ragApplication: RAGApplication, + file: FileType, + base: KnowledgeBaseParams, + forceReload: boolean +): Promise { + // 内置类型 + if (commonExts.includes(file.ext)) { + const loaderReturn = await ragApplication.addLoader( + new LocalPathLoader({ path: file.path, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }) as any, + forceReload + ) + return { + entriesAdded: loaderReturn.entriesAdded, + uniqueId: loaderReturn.uniqueId, + uniqueIds: [loaderReturn.uniqueId], + loaderType: loaderReturn.loaderType + } as LoaderReturn + } + + // 自定义类型 + if (['.odt', '.ods', '.odp'].includes(file.ext)) { + const loaderReturn = await addOdLoader(ragApplication, file, base, forceReload) + return { + entriesAdded: loaderReturn.entriesAdded, + uniqueId: loaderReturn.uniqueId, + uniqueIds: [loaderReturn.uniqueId], + loaderType: loaderReturn.loaderType + } as LoaderReturn + } + + // 文本类型 + const fileContent = fs.readFileSync(file.path, 'utf-8') + const loaderReturn = await ragApplication.addLoader( + new TextLoader({ text: fileContent, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }) as any, + forceReload + ) + return { + entriesAdded: loaderReturn.entriesAdded, + uniqueId: loaderReturn.uniqueId, + uniqueIds: [loaderReturn.uniqueId], + loaderType: loaderReturn.loaderType + } as LoaderReturn +} diff --git a/src/main/loader/odLoader.ts b/src/main/loader/odLoader.ts new file mode 100644 index 00000000..286fe34e --- /dev/null +++ b/src/main/loader/odLoader.ts @@ -0,0 +1,71 @@ +import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters' +import { BaseLoader } from '@llm-tools/embedjs-interfaces' +import { cleanString } from '@llm-tools/embedjs-utils' +import md5 from 'md5' +import { OfficeParserConfig, parseOfficeAsync } from 'officeparser' + +export enum OdType { + OdtLoader = 'OdtLoader', + OdsLoader = 'OdsLoader', + OdpLoader = 'OdpLoader', + undefined = 'undefined' +} + +export class OdLoader extends BaseLoader<{ type: string }> { + private readonly odType: OdType + private readonly filePath: string + private extractedText: string + private config: OfficeParserConfig + + constructor({ + odType, + filePath, + chunkSize, + chunkOverlap + }: { + odType: OdType + filePath: string + chunkSize?: number + chunkOverlap?: number + }) { + super(`${odType}_${md5(filePath)}`, { filePath }, chunkSize ?? 1000, chunkOverlap ?? 0) + this.odType = odType + this.filePath = filePath + this.extractedText = '' + this.config = { + newlineDelimiter: ' ', + ignoreNotes: false + } + } + + private async extractTextFromOdt() { + try { + this.extractedText = await parseOfficeAsync(this.filePath, this.config) + } catch (err) { + console.error('odLoader error', err) + throw err + } + } + + override async *getUnfilteredChunks() { + if (!this.extractedText) { + await this.extractTextFromOdt() + } + const chunker = new RecursiveCharacterTextSplitter({ + chunkSize: this.chunkSize, + chunkOverlap: this.chunkOverlap + }) + + const chunks = await chunker.splitText(cleanString(this.extractedText)) + + for (const chunk of chunks) { + yield { + pageContent: chunk, + metadata: { + type: this.odType as string, + source: this.filePath + } + } + } + } +} diff --git a/src/main/services/KnowledgeService.ts b/src/main/services/KnowledgeService.ts index ccbf2bb3..a1fa8104 100644 --- a/src/main/services/KnowledgeService.ts +++ b/src/main/services/KnowledgeService.ts @@ -1,18 +1,19 @@ import * as fs from 'node:fs' import path from 'node:path' -import { LocalPathLoader, RAGApplication, RAGApplicationBuilder, TextLoader } from '@llm-tools/embedjs' -import type { AddLoaderReturn, ExtractChunkData } from '@llm-tools/embedjs-interfaces' +import { RAGApplication, RAGApplicationBuilder, TextLoader } from '@llm-tools/embedjs' +import type { ExtractChunkData } from '@llm-tools/embedjs-interfaces' import { LibSqlDb } from '@llm-tools/embedjs-libsql' -import { MarkdownLoader } from '@llm-tools/embedjs-loader-markdown' -import { DocxLoader, ExcelLoader, PptLoader } from '@llm-tools/embedjs-loader-msoffice' -import { PdfLoader } from '@llm-tools/embedjs-loader-pdf' import { SitemapLoader } from '@llm-tools/embedjs-loader-sitemap' import { WebLoader } from '@llm-tools/embedjs-loader-web' import { AzureOpenAiEmbeddings, OpenAiEmbeddings } from '@llm-tools/embedjs-openai' +import { addFileLoader } from '@main/loader' import { getInstanceName } from '@main/utils' +import { getAllFiles } from '@main/utils/file' +import type { LoaderReturn } from '@shared/config/types' import { FileType, KnowledgeBaseParams, KnowledgeItem } from '@types' import { app } from 'electron' +import { v4 as uuidv4 } from 'uuid' class KnowledgeService { private storageDir = path.join(app.getPath('userData'), 'Data', 'KnowledgeBase') @@ -41,20 +42,20 @@ class KnowledgeService { .setEmbeddingModel( apiVersion ? new AzureOpenAiEmbeddings({ - azureOpenAIApiKey: apiKey, - azureOpenAIApiVersion: apiVersion, - azureOpenAIApiDeploymentName: model, - azureOpenAIApiInstanceName: getInstanceName(baseURL), - dimensions, - batchSize - }) + azureOpenAIApiKey: apiKey, + azureOpenAIApiVersion: apiVersion, + azureOpenAIApiDeploymentName: model, + azureOpenAIApiInstanceName: getInstanceName(baseURL), + dimensions, + batchSize + }) : new OpenAiEmbeddings({ - model, - apiKey, - configuration: { baseURL }, - dimensions, - batchSize - }) + model, + apiKey, + configuration: { baseURL }, + dimensions, + batchSize + }) ) .setVectorDatabase(new LibSqlDb({ path: path.join(this.storageDir, id) })) .build() @@ -79,131 +80,87 @@ class KnowledgeService { public add = async ( _: Electron.IpcMainInvokeEvent, { base, item, forceReload = false }: { base: KnowledgeBaseParams; item: KnowledgeItem; forceReload: boolean } - ): Promise => { + ): Promise => { const ragApplication = await this.getRagApplication(base) if (item.type === 'directory') { const directory = item.content as string - return await ragApplication.addLoader( - new LocalPathLoader({ path: directory, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }) as any, - forceReload - ) + const files = getAllFiles(directory) + const loaderPromises = files.map((file) => addFileLoader(ragApplication, file, base, forceReload)) + const loaderResults = await Promise.all(loaderPromises) + const uniqueIds = loaderResults.map((result) => result.uniqueId) + return { + entriesAdded: loaderResults.length, + uniqueId: `DirectoryLoader_${uuidv4()}`, + uniqueIds, + loaderType: 'DirectoryLoader' + } as LoaderReturn } if (item.type === 'url') { const content = item.content as string if (content.startsWith('http')) { - return await ragApplication.addLoader( + const loaderReturn = await ragApplication.addLoader( new WebLoader({ urlOrContent: content, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }) as any, forceReload ) + return { + entriesAdded: loaderReturn.entriesAdded, + uniqueId: loaderReturn.uniqueId, + uniqueIds: [loaderReturn.uniqueId], + loaderType: loaderReturn.loaderType + } as LoaderReturn } } if (item.type === 'sitemap') { const content = item.content as string // @ts-ignore loader type - return await ragApplication.addLoader( + const loaderReturn = await ragApplication.addLoader( new SitemapLoader({ url: content, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }) as any, forceReload ) + return { + entriesAdded: loaderReturn.entriesAdded, + uniqueId: loaderReturn.uniqueId, + uniqueIds: [loaderReturn.uniqueId], + loaderType: loaderReturn.loaderType + } as LoaderReturn } if (item.type === 'note') { const content = item.content as string console.debug('chunkSize', base.chunkSize) - return await ragApplication.addLoader( + const loaderReturn = await ragApplication.addLoader( new TextLoader({ text: content, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }), forceReload ) + return { + entriesAdded: loaderReturn.entriesAdded, + uniqueId: loaderReturn.uniqueId, + uniqueIds: [loaderReturn.uniqueId], + loaderType: loaderReturn.loaderType + } as LoaderReturn } if (item.type === 'file') { const file = item.content as FileType - if (file.ext === '.pdf') { - return await ragApplication.addLoader( - new PdfLoader({ - filePathOrUrl: file.path, - chunkSize: base.chunkSize, - chunkOverlap: base.chunkOverlap - }) as any, - forceReload - ) - } - - if (file.ext === '.docx') { - return await ragApplication.addLoader( - new DocxLoader({ - filePathOrUrl: file.path, - chunkSize: base.chunkSize, - chunkOverlap: base.chunkOverlap - }) as any, - forceReload - ) - } - - if (file.ext === '.pptx') { - return await ragApplication.addLoader( - new PptLoader({ - filePathOrUrl: file.path, - chunkSize: base.chunkSize, - chunkOverlap: base.chunkOverlap - }) as any, - forceReload - ) - } - - if (file.ext === '.xlsx') { - return await ragApplication.addLoader( - new ExcelLoader({ - filePathOrUrl: file.path, - chunkSize: base.chunkSize, - chunkOverlap: base.chunkOverlap - }) as any, - forceReload - ) - } - - if (['.md'].includes(file.ext)) { - return await ragApplication.addLoader( - new MarkdownLoader({ - filePathOrUrl: file.path, - chunkSize: base.chunkSize, - chunkOverlap: base.chunkOverlap - }) as any, - forceReload - ) - } - - const fileContent = fs.readFileSync(file.path, 'utf-8') - - if (['.html'].includes(file.ext)) { - return await ragApplication.addLoader( - new WebLoader({ - urlOrContent: fileContent, - chunkSize: base.chunkSize, - chunkOverlap: base.chunkOverlap - }) as any, - forceReload - ) - } - - return await ragApplication.addLoader( - new TextLoader({ text: fileContent, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }), - forceReload - ) + return await addFileLoader(ragApplication, file, base, forceReload) } - return { entriesAdded: 0, uniqueId: '', loaderType: '' } + return { entriesAdded: 0, uniqueId: '', uniqueIds: [''], loaderType: '' } } public remove = async ( _: Electron.IpcMainInvokeEvent, - { uniqueId, base }: { uniqueId: string; base: KnowledgeBaseParams } + { uniqueId, uniqueIds, base }: { uniqueId: string; uniqueIds: string[]; base: KnowledgeBaseParams } ): Promise => { const ragApplication = await this.getRagApplication(base) - await ragApplication.deleteLoader(uniqueId) + console.debug(`[ KnowledgeService Remove Item UniqueId: ${uniqueId}]`) + for (const id of uniqueIds) { + await ragApplication.deleteLoader(id) + } } public search = async ( diff --git a/src/main/utils/file.ts b/src/main/utils/file.ts index 51971122..50e14798 100644 --- a/src/main/utils/file.ts +++ b/src/main/utils/file.ts @@ -1,5 +1,9 @@ +import * as fs from 'node:fs' +import path from 'node:path' + import { audioExts, documentExts, imageExts, textExts, videoExts } from '@shared/config/constant' -import { FileTypes } from '@types' +import { FileType, FileTypes } from '@types' +import { v4 as uuidv4 } from 'uuid' export function getFileType(ext: string): FileTypes { ext = ext.toLowerCase() @@ -10,3 +14,36 @@ export function getFileType(ext: string): FileTypes { if (documentExts.includes(ext)) return FileTypes.DOCUMENT return FileTypes.OTHER } +export function getAllFiles(dirPath: string, arrayOfFiles: FileType[] = []): FileType[] { + const files = fs.readdirSync(dirPath) + + files.forEach((file) => { + const fullPath = path.join(dirPath, file) + if (fs.statSync(fullPath).isDirectory()) { + arrayOfFiles = getAllFiles(fullPath, arrayOfFiles) + } else { + const ext = path.extname(file) + const fileType = getFileType(ext) + + if (fileType === FileTypes.OTHER) return + + const name = path.basename(file) + const size = fs.statSync(fullPath).size + + const fileItem: FileType = { + id: uuidv4(), + name, + path: fullPath, + size, + ext, + count: 1, + origin_name: name, + type: fileType, + created_at: new Date() + } + arrayOfFiles.push(fileItem) + } + }) + + return arrayOfFiles +} diff --git a/src/preload/index.d.ts b/src/preload/index.d.ts index f7ce536e..0dc6d477 100644 --- a/src/preload/index.d.ts +++ b/src/preload/index.d.ts @@ -1,9 +1,10 @@ import { ElectronAPI } from '@electron-toolkit/preload' import type { FileMetadataResponse, ListFilesResponse, UploadFileResponse } from '@google/generative-ai/server' -import { AddLoaderReturn, ExtractChunkData } from '@llm-tools/embedjs-interfaces' +import { ExtractChunkData } from '@llm-tools/embedjs-interfaces' import { FileType } from '@renderer/types' import { WebDavConfig } from '@renderer/types' import { AppInfo, KnowledgeBaseParams, KnowledgeItem, LanguageVarious } from '@renderer/types' +import type { LoaderReturn } from '@shared/config/types' import type { OpenDialogOptions } from 'electron' import type { UpdateInfo } from 'electron-updater' import { Readable } from 'stream' @@ -78,8 +79,16 @@ declare global { base: KnowledgeBaseParams item: KnowledgeItem forceReload?: boolean - }) => Promise - remove: ({ uniqueId, base }: { uniqueId: string; base: KnowledgeBaseParams }) => Promise + }) => Promise + remove: ({ + uniqueId, + uniqueIds, + base + }: { + uniqueId: string + uniqueIds: string[] + base: KnowledgeBaseParams + }) => Promise search: ({ search, base }: { search: string; base: KnowledgeBaseParams }) => Promise } window: { diff --git a/src/preload/index.ts b/src/preload/index.ts index 60548ca6..080ffc22 100644 --- a/src/preload/index.ts +++ b/src/preload/index.ts @@ -71,8 +71,8 @@ const api = { item: KnowledgeItem forceReload?: boolean }) => ipcRenderer.invoke('knowledge-base:add', { base, item, forceReload }), - remove: ({ uniqueId, base }: { uniqueId: string; base: KnowledgeBaseParams }) => - ipcRenderer.invoke('knowledge-base:remove', { uniqueId, base }), + remove: ({ uniqueId, uniqueIds, base }: { uniqueId: string; uniqueIds: string[]; base: KnowledgeBaseParams }) => + ipcRenderer.invoke('knowledge-base:remove', { uniqueId, uniqueIds, base }), search: ({ search, base }: { search: string; base: KnowledgeBaseParams }) => ipcRenderer.invoke('knowledge-base:search', { search, base }) }, diff --git a/src/renderer/src/hooks/useKnowledge.ts b/src/renderer/src/hooks/useKnowledge.ts index 07cef783..ffd425e1 100644 --- a/src/renderer/src/hooks/useKnowledge.ts +++ b/src/renderer/src/hooks/useKnowledge.ts @@ -138,15 +138,18 @@ export const useKnowledge = (baseId: string) => { const removeItem = async (item: KnowledgeItem) => { dispatch(removeItemAction({ baseId, item })) if (base) { - if (item?.uniqueId) { - await window.api.knowledgeBase.remove({ uniqueId: item.uniqueId, base: getKnowledgeBaseParams(base) }) - } - if (item.type === 'file' && typeof item.content === 'object') { - await FileManager.deleteFile(item.content.id) + if (item?.uniqueId && item?.uniqueIds) { + await window.api.knowledgeBase.remove({ + uniqueId: item.uniqueId, + uniqueIds: item.uniqueIds, + base: getKnowledgeBaseParams(base) + }) } } + if (item.type === 'file' && typeof item.content === 'object') { + await FileManager.deleteFile(item.content.id) + } } - // 刷新项目 const refreshItem = async (item: KnowledgeItem) => { const status = getProcessingStatus(item.id) @@ -155,8 +158,12 @@ export const useKnowledge = (baseId: string) => { return } - if (base && item.uniqueId) { - await window.api.knowledgeBase.remove({ uniqueId: item.uniqueId, base: getKnowledgeBaseParams(base) }) + if (base && item.uniqueId && item.uniqueIds) { + await window.api.knowledgeBase.remove({ + uniqueId: item.uniqueId, + uniqueIds: item.uniqueIds, + base: getKnowledgeBaseParams(base) + }) updateItem({ ...item, processingStatus: 'pending', diff --git a/src/renderer/src/pages/knowledge/KnowledgeContent.tsx b/src/renderer/src/pages/knowledge/KnowledgeContent.tsx index 6474e08e..0c1f7146 100644 --- a/src/renderer/src/pages/knowledge/KnowledgeContent.tsx +++ b/src/renderer/src/pages/knowledge/KnowledgeContent.tsx @@ -17,6 +17,7 @@ import { useKnowledge } from '@renderer/hooks/useKnowledge' import FileManager from '@renderer/services/FileManager' import { getProviderName } from '@renderer/services/ProviderService' import { FileType, FileTypes, KnowledgeBase } from '@renderer/types' +import { documentExts, textExts } from '@shared/config/constant' import { Alert, Button, Card, Divider, message, Tag, Typography, Upload } from 'antd' import { FC } from 'react' import { useTranslation } from 'react-i18next' @@ -33,8 +34,7 @@ interface KnowledgeContentProps { selectedBase: KnowledgeBase } -const fileTypes = ['.pdf', '.docx', '.pptx', '.xlsx', '.txt', '.md', '.html'] - +const fileTypes = [...documentExts, ...textExts] const KnowledgeContent: FC = ({ selectedBase }) => { const { t } = useTranslation() const { @@ -209,7 +209,7 @@ const KnowledgeContent: FC = ({ selectedBase }) => { style={{ marginTop: 10, background: 'transparent' }}>

{t('knowledge.drag_file')}

- {t('knowledge.file_hint', { file_types: fileTypes.join(', ').replaceAll('.', '') })} + {t('knowledge.file_hint', { file_types: fileTypes.slice(0, 5).join(', ').replaceAll('.', '') })}

diff --git a/src/renderer/src/queue/KnowledgeQueue.ts b/src/renderer/src/queue/KnowledgeQueue.ts index 128f9e6a..a4ccef13 100644 --- a/src/renderer/src/queue/KnowledgeQueue.ts +++ b/src/renderer/src/queue/KnowledgeQueue.ts @@ -1,9 +1,9 @@ -import type { AddLoaderReturn } from '@llm-tools/embedjs-interfaces' import db from '@renderer/databases' import { getKnowledgeBaseParams } from '@renderer/services/KnowledgeService' import store from '@renderer/store' import { clearCompletedProcessing, updateBaseItemUniqueId, updateItemProcessingStatus } from '@renderer/store/knowledge' import { KnowledgeItem } from '@renderer/types' +import type { LoaderReturn } from '@shared/config/types' class KnowledgeQueue { private processing: Map = new Map() @@ -113,7 +113,7 @@ class KnowledgeQueue { throw new Error(`[KnowledgeQueue] Source item ${item.id} not found in base ${baseId}`) } - let result: AddLoaderReturn | null = null + let result: LoaderReturn | null = null let note, content console.log(`[KnowledgeQueue] Processing item: ${sourceItem.content}`) @@ -146,16 +146,16 @@ class KnowledgeQueue { updateBaseItemUniqueId({ baseId, itemId: item.id, - uniqueId: result.uniqueId + uniqueId: result.uniqueId, + uniqueIds: result.uniqueIds }) ) } - - console.debug(`[KnowledgeQueue] Updated uniqueId for item ${item.id} in base ${baseId}`) + console.debug(`[KnowledgeQueue] Updated uniqueId for item ${item.id} in base ${baseId} `) setTimeout(() => store.dispatch(clearCompletedProcessing({ baseId })), 1000) } catch (error) { - console.error(`[KnowledgeQueue] Error processing item ${item.id}:`, error) + console.error(`[KnowledgeQueue] Error processing item ${item.id}: `, error) store.dispatch( updateItemProcessingStatus({ baseId, diff --git a/src/renderer/src/store/knowledge.ts b/src/renderer/src/store/knowledge.ts index fd6763ba..5d5cb78b 100644 --- a/src/renderer/src/store/knowledge.ts +++ b/src/renderer/src/store/knowledge.ts @@ -171,12 +171,16 @@ const knowledgeSlice = createSlice({ } }, - updateBaseItemUniqueId(state, action: PayloadAction<{ baseId: string; itemId: string; uniqueId: string }>) { + updateBaseItemUniqueId( + state, + action: PayloadAction<{ baseId: string; itemId: string; uniqueId: string; uniqueIds: string[] }> + ) { const base = state.bases.find((b) => b.id === action.payload.baseId) if (base) { const item = base.items.find((item) => item.id === action.payload.itemId) if (item) { item.uniqueId = action.payload.uniqueId + item.uniqueIds = action.payload.uniqueIds } } } diff --git a/src/renderer/src/types/index.ts b/src/renderer/src/types/index.ts index ce8c6379..ba801eee 100644 --- a/src/renderer/src/types/index.ts +++ b/src/renderer/src/types/index.ts @@ -219,6 +219,7 @@ export type KnowledgeItem = { id: string baseId?: string uniqueId?: string + uniqueIds?: string[] type: KnowledgeItemType content: string | FileType created_at: number diff --git a/yarn.lock b/yarn.lock index e1d2e886..773699ba 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2601,6 +2601,13 @@ __metadata: languageName: node linkType: hard +"@types/md5@npm:^2.3.5": + version: 2.3.5 + resolution: "@types/md5@npm:2.3.5" + checksum: 10c0/aef9c40ab0ec61f1ae2acb04ee04e55b99eeec64af58176ef36d4feaf6aac335f32caca07eca0e3a1813fc362bf0b8663423250e24b39ebcfdc24c6ca4abe1cf + languageName: node + linkType: hard + "@types/mdast@npm:^4.0.0": version: 4.0.4 resolution: "@types/mdast@npm:4.0.4" @@ -3002,6 +3009,7 @@ __metadata: "@types/fs-extra": "npm:^11" "@types/lodash": "npm:^4.17.5" "@types/markdown-it": "npm:^14" + "@types/md5": "npm:^2.3.5" "@types/node": "npm:^18.19.9" "@types/react": "npm:^18.2.48" "@types/react-dom": "npm:^18.2.18"