diff --git a/src/renderer/src/config/models.ts b/src/renderer/src/config/models.ts index 1034467a..14436c1a 100644 --- a/src/renderer/src/config/models.ts +++ b/src/renderer/src/config/models.ts @@ -262,6 +262,44 @@ export function getModelLogo(modelId: string) { } export const SYSTEM_MODELS: Record = { + aihubmix: [ + { + id: 'gpt-4o', + provider: 'aihubmix', + name: 'GPT-4o', + group: 'GPT-4o' + }, + { + id: 'claude-3-5-sonnet-latest', + provider: 'aihubmix', + name: 'Claude 3.5 Sonnet', + group: 'Claude 3.5' + }, + { + id: 'gemini-2.0-flash-exp-search', + provider: 'aihubmix', + name: 'Gemini 2.0 Flash Exp Search', + group: 'Gemini 2.0' + }, + { + id: 'deepseek-chat', + provider: 'aihubmix', + name: 'DeepSeek Chat', + group: 'DeepSeek Chat' + }, + { + id: 'aihubmix-Llama-3-3-70B-Instruct', + provider: 'aihubmix', + name: 'Llama-3.3-70b', + group: 'Llama 3.3' + }, + { + id: 'Qwen/QVQ-72B-Preview', + provider: 'aihubmix', + name: 'Qwen/QVQ-72B', + group: 'Qwen' + } + ], ollama: [], silicon: [ { @@ -750,20 +788,6 @@ export const SYSTEM_MODELS: Record = { group: 'Jina Embeddings V3' } ], - aihubmix: [ - { - id: 'gpt-4o-mini', - provider: 'aihubmix', - name: 'GPT-4o Mini', - group: 'GPT-4o' - }, - { - id: 'aihubmix-Llama-3-70B-Instruct', - provider: 'aihubmix', - name: 'Llama 3 70B Instruct', - group: 'Llama3' - } - ], fireworks: [ { id: 'accounts/fireworks/models/mythomax-l2-13b', @@ -1017,5 +1041,9 @@ export function isWebSearchModel(model: Model): boolean { return model?.id !== 'hunyuan-lite' } + if (provider.id === 'aihubmix') { + return model?.id === 'gemini-2.0-flash-exp-search' + } + return false } diff --git a/src/renderer/src/store/llm.ts b/src/renderer/src/store/llm.ts index 9ae332b2..384cd36c 100644 --- a/src/renderer/src/store/llm.ts +++ b/src/renderer/src/store/llm.ts @@ -33,6 +33,16 @@ const initialState: LlmState = { isSystem: true, enabled: true }, + { + id: 'aihubmix', + name: 'AiHubMix', + type: 'openai', + apiKey: '', + apiHost: 'https://aihubmix.com', + models: SYSTEM_MODELS.aihubmix, + isSystem: true, + enabled: false + }, { id: 'ollama', name: 'Ollama', @@ -313,16 +323,6 @@ const initialState: LlmState = { models: SYSTEM_MODELS.jina, isSystem: true, enabled: false - }, - { - id: 'aihubmix', - name: 'AiHubMix', - type: 'openai', - apiKey: '', - apiHost: 'https://aihubmix.com', - models: SYSTEM_MODELS.aihubmix, - isSystem: true, - enabled: false } ], settings: {