feat: knowledge base support more file types (#1300)

* feat: knowledge base support more file types

* chore: add common document types

* feat: finish basic odloader

* feat: finish open document loader

* feat: support more type from dictionary

* fix: 删除目录时仍能检索到信息

* chore
This commit is contained in:
Chen Tao 2025-02-13 11:36:33 +08:00 committed by GitHub
parent 827959e580
commit c6f136caa2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 308 additions and 125 deletions

View File

@ -91,6 +91,7 @@
"@types/fs-extra": "^11", "@types/fs-extra": "^11",
"@types/lodash": "^4.17.5", "@types/lodash": "^4.17.5",
"@types/markdown-it": "^14", "@types/markdown-it": "^14",
"@types/md5": "^2.3.5",
"@types/node": "^18.19.9", "@types/node": "^18.19.9",
"@types/react": "^18.2.48", "@types/react": "^18.2.48",
"@types/react-dom": "^18.2.18", "@types/react-dom": "^18.2.18",

View File

@ -0,0 +1,6 @@
export type LoaderReturn = {
entriesAdded: number
uniqueId: string
uniqueIds: string[]
loaderType: string
}

82
src/main/loader/index.ts Normal file
View File

@ -0,0 +1,82 @@
import * as fs from 'node:fs'
import { LocalPathLoader, RAGApplication, TextLoader } from '@llm-tools/embedjs'
import type { AddLoaderReturn } from '@llm-tools/embedjs-interfaces'
import { LoaderReturn } from '@shared/config/types'
import { FileType, KnowledgeBaseParams } from '@types'
import { OdLoader, OdType } from './odLoader'
// embedjs内置loader类型
const commonExts = ['.pdf', '.csv', '.json', '.docx', '.pptx', '.xlsx', '.md', '.jpeg']
export async function addOdLoader(
ragApplication: RAGApplication,
file: FileType,
base: KnowledgeBaseParams,
forceReload: boolean
): Promise<AddLoaderReturn> {
const loaderMap: Record<string, OdType> = {
'.odt': OdType.OdtLoader,
'.ods': OdType.OdsLoader,
'.odp': OdType.OdpLoader
}
const odType = loaderMap[file.ext]
if (!odType) {
throw new Error('Unknown odType')
}
return ragApplication.addLoader(
new OdLoader({
odType,
filePath: file.path,
chunkSize: base.chunkSize,
chunkOverlap: base.chunkOverlap
}) as any,
forceReload
)
}
export async function addFileLoader(
ragApplication: RAGApplication,
file: FileType,
base: KnowledgeBaseParams,
forceReload: boolean
): Promise<LoaderReturn> {
// 内置类型
if (commonExts.includes(file.ext)) {
const loaderReturn = await ragApplication.addLoader(
new LocalPathLoader({ path: file.path, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }) as any,
forceReload
)
return {
entriesAdded: loaderReturn.entriesAdded,
uniqueId: loaderReturn.uniqueId,
uniqueIds: [loaderReturn.uniqueId],
loaderType: loaderReturn.loaderType
} as LoaderReturn
}
// 自定义类型
if (['.odt', '.ods', '.odp'].includes(file.ext)) {
const loaderReturn = await addOdLoader(ragApplication, file, base, forceReload)
return {
entriesAdded: loaderReturn.entriesAdded,
uniqueId: loaderReturn.uniqueId,
uniqueIds: [loaderReturn.uniqueId],
loaderType: loaderReturn.loaderType
} as LoaderReturn
}
// 文本类型
const fileContent = fs.readFileSync(file.path, 'utf-8')
const loaderReturn = await ragApplication.addLoader(
new TextLoader({ text: fileContent, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }) as any,
forceReload
)
return {
entriesAdded: loaderReturn.entriesAdded,
uniqueId: loaderReturn.uniqueId,
uniqueIds: [loaderReturn.uniqueId],
loaderType: loaderReturn.loaderType
} as LoaderReturn
}

View File

@ -0,0 +1,71 @@
import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters'
import { BaseLoader } from '@llm-tools/embedjs-interfaces'
import { cleanString } from '@llm-tools/embedjs-utils'
import md5 from 'md5'
import { OfficeParserConfig, parseOfficeAsync } from 'officeparser'
export enum OdType {
OdtLoader = 'OdtLoader',
OdsLoader = 'OdsLoader',
OdpLoader = 'OdpLoader',
undefined = 'undefined'
}
export class OdLoader<OdType> extends BaseLoader<{ type: string }> {
private readonly odType: OdType
private readonly filePath: string
private extractedText: string
private config: OfficeParserConfig
constructor({
odType,
filePath,
chunkSize,
chunkOverlap
}: {
odType: OdType
filePath: string
chunkSize?: number
chunkOverlap?: number
}) {
super(`${odType}_${md5(filePath)}`, { filePath }, chunkSize ?? 1000, chunkOverlap ?? 0)
this.odType = odType
this.filePath = filePath
this.extractedText = ''
this.config = {
newlineDelimiter: ' ',
ignoreNotes: false
}
}
private async extractTextFromOdt() {
try {
this.extractedText = await parseOfficeAsync(this.filePath, this.config)
} catch (err) {
console.error('odLoader error', err)
throw err
}
}
override async *getUnfilteredChunks() {
if (!this.extractedText) {
await this.extractTextFromOdt()
}
const chunker = new RecursiveCharacterTextSplitter({
chunkSize: this.chunkSize,
chunkOverlap: this.chunkOverlap
})
const chunks = await chunker.splitText(cleanString(this.extractedText))
for (const chunk of chunks) {
yield {
pageContent: chunk,
metadata: {
type: this.odType as string,
source: this.filePath
}
}
}
}
}

View File

@ -1,18 +1,19 @@
import * as fs from 'node:fs' import * as fs from 'node:fs'
import path from 'node:path' import path from 'node:path'
import { LocalPathLoader, RAGApplication, RAGApplicationBuilder, TextLoader } from '@llm-tools/embedjs' import { RAGApplication, RAGApplicationBuilder, TextLoader } from '@llm-tools/embedjs'
import type { AddLoaderReturn, ExtractChunkData } from '@llm-tools/embedjs-interfaces' import type { ExtractChunkData } from '@llm-tools/embedjs-interfaces'
import { LibSqlDb } from '@llm-tools/embedjs-libsql' import { LibSqlDb } from '@llm-tools/embedjs-libsql'
import { MarkdownLoader } from '@llm-tools/embedjs-loader-markdown'
import { DocxLoader, ExcelLoader, PptLoader } from '@llm-tools/embedjs-loader-msoffice'
import { PdfLoader } from '@llm-tools/embedjs-loader-pdf'
import { SitemapLoader } from '@llm-tools/embedjs-loader-sitemap' import { SitemapLoader } from '@llm-tools/embedjs-loader-sitemap'
import { WebLoader } from '@llm-tools/embedjs-loader-web' import { WebLoader } from '@llm-tools/embedjs-loader-web'
import { AzureOpenAiEmbeddings, OpenAiEmbeddings } from '@llm-tools/embedjs-openai' import { AzureOpenAiEmbeddings, OpenAiEmbeddings } from '@llm-tools/embedjs-openai'
import { addFileLoader } from '@main/loader'
import { getInstanceName } from '@main/utils' import { getInstanceName } from '@main/utils'
import { getAllFiles } from '@main/utils/file'
import type { LoaderReturn } from '@shared/config/types'
import { FileType, KnowledgeBaseParams, KnowledgeItem } from '@types' import { FileType, KnowledgeBaseParams, KnowledgeItem } from '@types'
import { app } from 'electron' import { app } from 'electron'
import { v4 as uuidv4 } from 'uuid'
class KnowledgeService { class KnowledgeService {
private storageDir = path.join(app.getPath('userData'), 'Data', 'KnowledgeBase') private storageDir = path.join(app.getPath('userData'), 'Data', 'KnowledgeBase')
@ -79,131 +80,87 @@ class KnowledgeService {
public add = async ( public add = async (
_: Electron.IpcMainInvokeEvent, _: Electron.IpcMainInvokeEvent,
{ base, item, forceReload = false }: { base: KnowledgeBaseParams; item: KnowledgeItem; forceReload: boolean } { base, item, forceReload = false }: { base: KnowledgeBaseParams; item: KnowledgeItem; forceReload: boolean }
): Promise<AddLoaderReturn> => { ): Promise<LoaderReturn> => {
const ragApplication = await this.getRagApplication(base) const ragApplication = await this.getRagApplication(base)
if (item.type === 'directory') { if (item.type === 'directory') {
const directory = item.content as string const directory = item.content as string
return await ragApplication.addLoader( const files = getAllFiles(directory)
new LocalPathLoader({ path: directory, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }) as any, const loaderPromises = files.map((file) => addFileLoader(ragApplication, file, base, forceReload))
forceReload const loaderResults = await Promise.all(loaderPromises)
) const uniqueIds = loaderResults.map((result) => result.uniqueId)
return {
entriesAdded: loaderResults.length,
uniqueId: `DirectoryLoader_${uuidv4()}`,
uniqueIds,
loaderType: 'DirectoryLoader'
} as LoaderReturn
} }
if (item.type === 'url') { if (item.type === 'url') {
const content = item.content as string const content = item.content as string
if (content.startsWith('http')) { if (content.startsWith('http')) {
return await ragApplication.addLoader( const loaderReturn = await ragApplication.addLoader(
new WebLoader({ urlOrContent: content, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }) as any, new WebLoader({ urlOrContent: content, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }) as any,
forceReload forceReload
) )
return {
entriesAdded: loaderReturn.entriesAdded,
uniqueId: loaderReturn.uniqueId,
uniqueIds: [loaderReturn.uniqueId],
loaderType: loaderReturn.loaderType
} as LoaderReturn
} }
} }
if (item.type === 'sitemap') { if (item.type === 'sitemap') {
const content = item.content as string const content = item.content as string
// @ts-ignore loader type // @ts-ignore loader type
return await ragApplication.addLoader( const loaderReturn = await ragApplication.addLoader(
new SitemapLoader({ url: content, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }) as any, new SitemapLoader({ url: content, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }) as any,
forceReload forceReload
) )
return {
entriesAdded: loaderReturn.entriesAdded,
uniqueId: loaderReturn.uniqueId,
uniqueIds: [loaderReturn.uniqueId],
loaderType: loaderReturn.loaderType
} as LoaderReturn
} }
if (item.type === 'note') { if (item.type === 'note') {
const content = item.content as string const content = item.content as string
console.debug('chunkSize', base.chunkSize) console.debug('chunkSize', base.chunkSize)
return await ragApplication.addLoader( const loaderReturn = await ragApplication.addLoader(
new TextLoader({ text: content, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }), new TextLoader({ text: content, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }),
forceReload forceReload
) )
return {
entriesAdded: loaderReturn.entriesAdded,
uniqueId: loaderReturn.uniqueId,
uniqueIds: [loaderReturn.uniqueId],
loaderType: loaderReturn.loaderType
} as LoaderReturn
} }
if (item.type === 'file') { if (item.type === 'file') {
const file = item.content as FileType const file = item.content as FileType
if (file.ext === '.pdf') { return await addFileLoader(ragApplication, file, base, forceReload)
return await ragApplication.addLoader(
new PdfLoader({
filePathOrUrl: file.path,
chunkSize: base.chunkSize,
chunkOverlap: base.chunkOverlap
}) as any,
forceReload
)
} }
if (file.ext === '.docx') { return { entriesAdded: 0, uniqueId: '', uniqueIds: [''], loaderType: '' }
return await ragApplication.addLoader(
new DocxLoader({
filePathOrUrl: file.path,
chunkSize: base.chunkSize,
chunkOverlap: base.chunkOverlap
}) as any,
forceReload
)
}
if (file.ext === '.pptx') {
return await ragApplication.addLoader(
new PptLoader({
filePathOrUrl: file.path,
chunkSize: base.chunkSize,
chunkOverlap: base.chunkOverlap
}) as any,
forceReload
)
}
if (file.ext === '.xlsx') {
return await ragApplication.addLoader(
new ExcelLoader({
filePathOrUrl: file.path,
chunkSize: base.chunkSize,
chunkOverlap: base.chunkOverlap
}) as any,
forceReload
)
}
if (['.md'].includes(file.ext)) {
return await ragApplication.addLoader(
new MarkdownLoader({
filePathOrUrl: file.path,
chunkSize: base.chunkSize,
chunkOverlap: base.chunkOverlap
}) as any,
forceReload
)
}
const fileContent = fs.readFileSync(file.path, 'utf-8')
if (['.html'].includes(file.ext)) {
return await ragApplication.addLoader(
new WebLoader({
urlOrContent: fileContent,
chunkSize: base.chunkSize,
chunkOverlap: base.chunkOverlap
}) as any,
forceReload
)
}
return await ragApplication.addLoader(
new TextLoader({ text: fileContent, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }),
forceReload
)
}
return { entriesAdded: 0, uniqueId: '', loaderType: '' }
} }
public remove = async ( public remove = async (
_: Electron.IpcMainInvokeEvent, _: Electron.IpcMainInvokeEvent,
{ uniqueId, base }: { uniqueId: string; base: KnowledgeBaseParams } { uniqueId, uniqueIds, base }: { uniqueId: string; uniqueIds: string[]; base: KnowledgeBaseParams }
): Promise<void> => { ): Promise<void> => {
const ragApplication = await this.getRagApplication(base) const ragApplication = await this.getRagApplication(base)
await ragApplication.deleteLoader(uniqueId) console.debug(`[ KnowledgeService Remove Item UniqueId: ${uniqueId}]`)
for (const id of uniqueIds) {
await ragApplication.deleteLoader(id)
}
} }
public search = async ( public search = async (

View File

@ -1,5 +1,9 @@
import * as fs from 'node:fs'
import path from 'node:path'
import { audioExts, documentExts, imageExts, textExts, videoExts } from '@shared/config/constant' import { audioExts, documentExts, imageExts, textExts, videoExts } from '@shared/config/constant'
import { FileTypes } from '@types' import { FileType, FileTypes } from '@types'
import { v4 as uuidv4 } from 'uuid'
export function getFileType(ext: string): FileTypes { export function getFileType(ext: string): FileTypes {
ext = ext.toLowerCase() ext = ext.toLowerCase()
@ -10,3 +14,36 @@ export function getFileType(ext: string): FileTypes {
if (documentExts.includes(ext)) return FileTypes.DOCUMENT if (documentExts.includes(ext)) return FileTypes.DOCUMENT
return FileTypes.OTHER return FileTypes.OTHER
} }
export function getAllFiles(dirPath: string, arrayOfFiles: FileType[] = []): FileType[] {
const files = fs.readdirSync(dirPath)
files.forEach((file) => {
const fullPath = path.join(dirPath, file)
if (fs.statSync(fullPath).isDirectory()) {
arrayOfFiles = getAllFiles(fullPath, arrayOfFiles)
} else {
const ext = path.extname(file)
const fileType = getFileType(ext)
if (fileType === FileTypes.OTHER) return
const name = path.basename(file)
const size = fs.statSync(fullPath).size
const fileItem: FileType = {
id: uuidv4(),
name,
path: fullPath,
size,
ext,
count: 1,
origin_name: name,
type: fileType,
created_at: new Date()
}
arrayOfFiles.push(fileItem)
}
})
return arrayOfFiles
}

View File

@ -1,9 +1,10 @@
import { ElectronAPI } from '@electron-toolkit/preload' import { ElectronAPI } from '@electron-toolkit/preload'
import type { FileMetadataResponse, ListFilesResponse, UploadFileResponse } from '@google/generative-ai/server' import type { FileMetadataResponse, ListFilesResponse, UploadFileResponse } from '@google/generative-ai/server'
import { AddLoaderReturn, ExtractChunkData } from '@llm-tools/embedjs-interfaces' import { ExtractChunkData } from '@llm-tools/embedjs-interfaces'
import { FileType } from '@renderer/types' import { FileType } from '@renderer/types'
import { WebDavConfig } from '@renderer/types' import { WebDavConfig } from '@renderer/types'
import { AppInfo, KnowledgeBaseParams, KnowledgeItem, LanguageVarious } from '@renderer/types' import { AppInfo, KnowledgeBaseParams, KnowledgeItem, LanguageVarious } from '@renderer/types'
import type { LoaderReturn } from '@shared/config/types'
import type { OpenDialogOptions } from 'electron' import type { OpenDialogOptions } from 'electron'
import type { UpdateInfo } from 'electron-updater' import type { UpdateInfo } from 'electron-updater'
import { Readable } from 'stream' import { Readable } from 'stream'
@ -78,8 +79,16 @@ declare global {
base: KnowledgeBaseParams base: KnowledgeBaseParams
item: KnowledgeItem item: KnowledgeItem
forceReload?: boolean forceReload?: boolean
}) => Promise<AddLoaderReturn> }) => Promise<LoaderReturn>
remove: ({ uniqueId, base }: { uniqueId: string; base: KnowledgeBaseParams }) => Promise<void> remove: ({
uniqueId,
uniqueIds,
base
}: {
uniqueId: string
uniqueIds: string[]
base: KnowledgeBaseParams
}) => Promise<void>
search: ({ search, base }: { search: string; base: KnowledgeBaseParams }) => Promise<ExtractChunkData[]> search: ({ search, base }: { search: string; base: KnowledgeBaseParams }) => Promise<ExtractChunkData[]>
} }
window: { window: {

View File

@ -71,8 +71,8 @@ const api = {
item: KnowledgeItem item: KnowledgeItem
forceReload?: boolean forceReload?: boolean
}) => ipcRenderer.invoke('knowledge-base:add', { base, item, forceReload }), }) => ipcRenderer.invoke('knowledge-base:add', { base, item, forceReload }),
remove: ({ uniqueId, base }: { uniqueId: string; base: KnowledgeBaseParams }) => remove: ({ uniqueId, uniqueIds, base }: { uniqueId: string; uniqueIds: string[]; base: KnowledgeBaseParams }) =>
ipcRenderer.invoke('knowledge-base:remove', { uniqueId, base }), ipcRenderer.invoke('knowledge-base:remove', { uniqueId, uniqueIds, base }),
search: ({ search, base }: { search: string; base: KnowledgeBaseParams }) => search: ({ search, base }: { search: string; base: KnowledgeBaseParams }) =>
ipcRenderer.invoke('knowledge-base:search', { search, base }) ipcRenderer.invoke('knowledge-base:search', { search, base })
}, },

View File

@ -138,15 +138,18 @@ export const useKnowledge = (baseId: string) => {
const removeItem = async (item: KnowledgeItem) => { const removeItem = async (item: KnowledgeItem) => {
dispatch(removeItemAction({ baseId, item })) dispatch(removeItemAction({ baseId, item }))
if (base) { if (base) {
if (item?.uniqueId) { if (item?.uniqueId && item?.uniqueIds) {
await window.api.knowledgeBase.remove({ uniqueId: item.uniqueId, base: getKnowledgeBaseParams(base) }) await window.api.knowledgeBase.remove({
uniqueId: item.uniqueId,
uniqueIds: item.uniqueIds,
base: getKnowledgeBaseParams(base)
})
}
} }
if (item.type === 'file' && typeof item.content === 'object') { if (item.type === 'file' && typeof item.content === 'object') {
await FileManager.deleteFile(item.content.id) await FileManager.deleteFile(item.content.id)
} }
} }
}
// 刷新项目 // 刷新项目
const refreshItem = async (item: KnowledgeItem) => { const refreshItem = async (item: KnowledgeItem) => {
const status = getProcessingStatus(item.id) const status = getProcessingStatus(item.id)
@ -155,8 +158,12 @@ export const useKnowledge = (baseId: string) => {
return return
} }
if (base && item.uniqueId) { if (base && item.uniqueId && item.uniqueIds) {
await window.api.knowledgeBase.remove({ uniqueId: item.uniqueId, base: getKnowledgeBaseParams(base) }) await window.api.knowledgeBase.remove({
uniqueId: item.uniqueId,
uniqueIds: item.uniqueIds,
base: getKnowledgeBaseParams(base)
})
updateItem({ updateItem({
...item, ...item,
processingStatus: 'pending', processingStatus: 'pending',

View File

@ -17,6 +17,7 @@ import { useKnowledge } from '@renderer/hooks/useKnowledge'
import FileManager from '@renderer/services/FileManager' import FileManager from '@renderer/services/FileManager'
import { getProviderName } from '@renderer/services/ProviderService' import { getProviderName } from '@renderer/services/ProviderService'
import { FileType, FileTypes, KnowledgeBase } from '@renderer/types' import { FileType, FileTypes, KnowledgeBase } from '@renderer/types'
import { documentExts, textExts } from '@shared/config/constant'
import { Alert, Button, Card, Divider, message, Tag, Typography, Upload } from 'antd' import { Alert, Button, Card, Divider, message, Tag, Typography, Upload } from 'antd'
import { FC } from 'react' import { FC } from 'react'
import { useTranslation } from 'react-i18next' import { useTranslation } from 'react-i18next'
@ -33,8 +34,7 @@ interface KnowledgeContentProps {
selectedBase: KnowledgeBase selectedBase: KnowledgeBase
} }
const fileTypes = ['.pdf', '.docx', '.pptx', '.xlsx', '.txt', '.md', '.html'] const fileTypes = [...documentExts, ...textExts]
const KnowledgeContent: FC<KnowledgeContentProps> = ({ selectedBase }) => { const KnowledgeContent: FC<KnowledgeContentProps> = ({ selectedBase }) => {
const { t } = useTranslation() const { t } = useTranslation()
const { const {
@ -209,7 +209,7 @@ const KnowledgeContent: FC<KnowledgeContentProps> = ({ selectedBase }) => {
style={{ marginTop: 10, background: 'transparent' }}> style={{ marginTop: 10, background: 'transparent' }}>
<p className="ant-upload-text">{t('knowledge.drag_file')}</p> <p className="ant-upload-text">{t('knowledge.drag_file')}</p>
<p className="ant-upload-hint"> <p className="ant-upload-hint">
{t('knowledge.file_hint', { file_types: fileTypes.join(', ').replaceAll('.', '') })} {t('knowledge.file_hint', { file_types: fileTypes.slice(0, 5).join(', ').replaceAll('.', '') })}
</p> </p>
</Dragger> </Dragger>
</FileSection> </FileSection>

View File

@ -1,9 +1,9 @@
import type { AddLoaderReturn } from '@llm-tools/embedjs-interfaces'
import db from '@renderer/databases' import db from '@renderer/databases'
import { getKnowledgeBaseParams } from '@renderer/services/KnowledgeService' import { getKnowledgeBaseParams } from '@renderer/services/KnowledgeService'
import store from '@renderer/store' import store from '@renderer/store'
import { clearCompletedProcessing, updateBaseItemUniqueId, updateItemProcessingStatus } from '@renderer/store/knowledge' import { clearCompletedProcessing, updateBaseItemUniqueId, updateItemProcessingStatus } from '@renderer/store/knowledge'
import { KnowledgeItem } from '@renderer/types' import { KnowledgeItem } from '@renderer/types'
import type { LoaderReturn } from '@shared/config/types'
class KnowledgeQueue { class KnowledgeQueue {
private processing: Map<string, boolean> = new Map() private processing: Map<string, boolean> = new Map()
@ -113,7 +113,7 @@ class KnowledgeQueue {
throw new Error(`[KnowledgeQueue] Source item ${item.id} not found in base ${baseId}`) throw new Error(`[KnowledgeQueue] Source item ${item.id} not found in base ${baseId}`)
} }
let result: AddLoaderReturn | null = null let result: LoaderReturn | null = null
let note, content let note, content
console.log(`[KnowledgeQueue] Processing item: ${sourceItem.content}`) console.log(`[KnowledgeQueue] Processing item: ${sourceItem.content}`)
@ -146,16 +146,16 @@ class KnowledgeQueue {
updateBaseItemUniqueId({ updateBaseItemUniqueId({
baseId, baseId,
itemId: item.id, itemId: item.id,
uniqueId: result.uniqueId uniqueId: result.uniqueId,
uniqueIds: result.uniqueIds
}) })
) )
} }
console.debug(`[KnowledgeQueue] Updated uniqueId for item ${item.id} in base ${baseId} `)
console.debug(`[KnowledgeQueue] Updated uniqueId for item ${item.id} in base ${baseId}`)
setTimeout(() => store.dispatch(clearCompletedProcessing({ baseId })), 1000) setTimeout(() => store.dispatch(clearCompletedProcessing({ baseId })), 1000)
} catch (error) { } catch (error) {
console.error(`[KnowledgeQueue] Error processing item ${item.id}:`, error) console.error(`[KnowledgeQueue] Error processing item ${item.id}: `, error)
store.dispatch( store.dispatch(
updateItemProcessingStatus({ updateItemProcessingStatus({
baseId, baseId,

View File

@ -171,12 +171,16 @@ const knowledgeSlice = createSlice({
} }
}, },
updateBaseItemUniqueId(state, action: PayloadAction<{ baseId: string; itemId: string; uniqueId: string }>) { updateBaseItemUniqueId(
state,
action: PayloadAction<{ baseId: string; itemId: string; uniqueId: string; uniqueIds: string[] }>
) {
const base = state.bases.find((b) => b.id === action.payload.baseId) const base = state.bases.find((b) => b.id === action.payload.baseId)
if (base) { if (base) {
const item = base.items.find((item) => item.id === action.payload.itemId) const item = base.items.find((item) => item.id === action.payload.itemId)
if (item) { if (item) {
item.uniqueId = action.payload.uniqueId item.uniqueId = action.payload.uniqueId
item.uniqueIds = action.payload.uniqueIds
} }
} }
} }

View File

@ -219,6 +219,7 @@ export type KnowledgeItem = {
id: string id: string
baseId?: string baseId?: string
uniqueId?: string uniqueId?: string
uniqueIds?: string[]
type: KnowledgeItemType type: KnowledgeItemType
content: string | FileType content: string | FileType
created_at: number created_at: number

View File

@ -2601,6 +2601,13 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"@types/md5@npm:^2.3.5":
version: 2.3.5
resolution: "@types/md5@npm:2.3.5"
checksum: 10c0/aef9c40ab0ec61f1ae2acb04ee04e55b99eeec64af58176ef36d4feaf6aac335f32caca07eca0e3a1813fc362bf0b8663423250e24b39ebcfdc24c6ca4abe1cf
languageName: node
linkType: hard
"@types/mdast@npm:^4.0.0": "@types/mdast@npm:^4.0.0":
version: 4.0.4 version: 4.0.4
resolution: "@types/mdast@npm:4.0.4" resolution: "@types/mdast@npm:4.0.4"
@ -3002,6 +3009,7 @@ __metadata:
"@types/fs-extra": "npm:^11" "@types/fs-extra": "npm:^11"
"@types/lodash": "npm:^4.17.5" "@types/lodash": "npm:^4.17.5"
"@types/markdown-it": "npm:^14" "@types/markdown-it": "npm:^14"
"@types/md5": "npm:^2.3.5"
"@types/node": "npm:^18.19.9" "@types/node": "npm:^18.19.9"
"@types/react": "npm:^18.2.48" "@types/react": "npm:^18.2.48"
"@types/react-dom": "npm:^18.2.18" "@types/react-dom": "npm:^18.2.18"