feat: check api use selected model

This commit is contained in:
kangfenmao 2025-01-13 16:11:09 +08:00
parent cf7c0fc1fc
commit 2ad6a1f24c
14 changed files with 128 additions and 26 deletions

View File

@ -231,6 +231,7 @@
"message": { "message": {
"api.connection.failed": "Connection failed", "api.connection.failed": "Connection failed",
"api.connection.success": "Connection successful", "api.connection.success": "Connection successful",
"api.check.model.title": "Select the model to use for detection",
"assistant.added.content": "Assistant added successfully", "assistant.added.content": "Assistant added successfully",
"backup.failed": "Backup failed", "backup.failed": "Backup failed",
"backup.success": "Backup successful", "backup.success": "Backup successful",

View File

@ -231,6 +231,7 @@
"message": { "message": {
"api.connection.failed": "接続に失敗しました", "api.connection.failed": "接続に失敗しました",
"api.connection.success": "接続に成功しました", "api.connection.success": "接続に成功しました",
"api.check.model.title": "検出に使用するモデルを選択してください",
"assistant.added.content": "アシスタントが追加されました", "assistant.added.content": "アシスタントが追加されました",
"backup.failed": "バックアップに失敗しました", "backup.failed": "バックアップに失敗しました",
"backup.success": "バックアップに成功しました", "backup.success": "バックアップに成功しました",

View File

@ -231,6 +231,7 @@
"message": { "message": {
"api.connection.failed": "Соединение не удалось", "api.connection.failed": "Соединение не удалось",
"api.connection.success": "Соединение успешно", "api.connection.success": "Соединение успешно",
"api.check.model.title": "Выберите модель для проверки",
"assistant.added.content": "Ассистент успешно добавлен", "assistant.added.content": "Ассистент успешно добавлен",
"backup.failed": "Создание резервной копии не удалось", "backup.failed": "Создание резервной копии не удалось",
"backup.success": "Резервная копия успешно создана", "backup.success": "Резервная копия успешно создана",

View File

@ -232,6 +232,7 @@
"message": { "message": {
"api.connection.failed": "连接失败", "api.connection.failed": "连接失败",
"api.connection.success": "连接成功", "api.connection.success": "连接成功",
"api.check.model.title": "请选择要检测的模型",
"assistant.added.content": "智能体添加成功", "assistant.added.content": "智能体添加成功",
"backup.failed": "备份失败", "backup.failed": "备份失败",
"backup.success": "备份成功", "backup.success": "备份成功",

View File

@ -231,6 +231,7 @@
"message": { "message": {
"api.connection.failed": "連接失敗", "api.connection.failed": "連接失敗",
"api.connection.success": "連接成功", "api.connection.success": "連接成功",
"api.check.model.title": "請選擇要檢測的模型",
"assistant.added.content": "智能體添加成功", "assistant.added.content": "智能體添加成功",
"backup.failed": "備份失敗", "backup.failed": "備份失敗",
"backup.success": "備份成功", "backup.success": "備份成功",

View File

@ -2,13 +2,16 @@ import { CheckCircleFilled, CloseCircleFilled, LoadingOutlined } from '@ant-desi
import Scrollbar from '@renderer/components/Scrollbar' import Scrollbar from '@renderer/components/Scrollbar'
import { TopView } from '@renderer/components/TopView' import { TopView } from '@renderer/components/TopView'
import { checkApi } from '@renderer/services/ApiService' import { checkApi } from '@renderer/services/ApiService'
import { Model } from '@renderer/types'
import { Provider } from '@renderer/types'
import { Button, List, Modal, Space, Spin, Typography } from 'antd' import { Button, List, Modal, Space, Spin, Typography } from 'antd'
import { useState } from 'react' import { useState } from 'react'
import { useTranslation } from 'react-i18next' import { useTranslation } from 'react-i18next'
interface ShowParams { interface ShowParams {
title: string title: string
provider: any provider: Provider
model: Model
apiKeys: string[] apiKeys: string[]
} }
@ -22,7 +25,7 @@ interface KeyStatus {
checking?: boolean checking?: boolean
} }
const PopupContainer: React.FC<Props> = ({ title, provider, apiKeys, resolve }) => { const PopupContainer: React.FC<Props> = ({ title, provider, model, apiKeys, resolve }) => {
const [open, setOpen] = useState(true) const [open, setOpen] = useState(true)
const [keyStatuses, setKeyStatuses] = useState<KeyStatus[]>(() => { const [keyStatuses, setKeyStatuses] = useState<KeyStatus[]>(() => {
const uniqueKeys = new Set(apiKeys) const uniqueKeys = new Set(apiKeys)
@ -39,7 +42,7 @@ const PopupContainer: React.FC<Props> = ({ title, provider, apiKeys, resolve })
for (let i = 0; i < newStatuses.length; i++) { for (let i = 0; i < newStatuses.length; i++) {
setKeyStatuses((prev) => prev.map((status, idx) => (idx === i ? { ...status, checking: true } : status))) setKeyStatuses((prev) => prev.map((status, idx) => (idx === i ? { ...status, checking: true } : status)))
const valid = await checkApi({ ...provider, apiKey: newStatuses[i].key }) const valid = await checkApi({ ...provider, apiKey: newStatuses[i].key }, model)
setKeyStatuses((prev) => setKeyStatuses((prev) =>
prev.map((status, idx) => (idx === i ? { ...status, checking: false, isValid: valid } : status)) prev.map((status, idx) => (idx === i ? { ...status, checking: false, isValid: valid } : status))

View File

@ -39,6 +39,7 @@ import ApiCheckPopup from './ApiCheckPopup'
import EditModelsPopup from './EditModelsPopup' import EditModelsPopup from './EditModelsPopup'
import GraphRAGSettings from './GraphRAGSettings' import GraphRAGSettings from './GraphRAGSettings'
import OllamSettings from './OllamaSettings' import OllamSettings from './OllamaSettings'
import SelectProviderModelPopup from './SelectProviderModelPopup'
interface Props { interface Props {
provider: Provider provider: Provider
@ -83,14 +84,23 @@ const ProviderSetting: FC<Props> = ({ provider: _provider }) => {
return return
} }
const model = await SelectProviderModelPopup.show({ provider })
if (!model) {
window.message.error({ content: i18n.t('message.error.enter.model'), key: 'api-check' })
return
}
if (apiKey.includes(',')) { if (apiKey.includes(',')) {
const keys = apiKey const keys = apiKey
.split(',') .split(',')
.map((k) => k.trim()) .map((k) => k.trim())
.filter((k) => k) .filter((k) => k)
const result = await ApiCheckPopup.show({ const result = await ApiCheckPopup.show({
title: t('settings.provider.check_multiple_keys'), title: t('settings.provider.check_multiple_keys'),
provider: { ...provider, apiHost }, provider: { ...provider, apiHost },
model,
apiKeys: keys apiKeys: keys
}) })
@ -100,7 +110,8 @@ const ProviderSetting: FC<Props> = ({ provider: _provider }) => {
} }
} else { } else {
setApiChecking(true) setApiChecking(true)
const valid = await checkApi({ ...provider, apiKey, apiHost })
const valid = await checkApi({ ...provider, apiKey, apiHost }, model)
window.message[valid ? 'success' : 'error']({ window.message[valid ? 'success' : 'error']({
key: 'api-check', key: 'api-check',
style: { marginTop: '3vh' }, style: { marginTop: '3vh' },

View File

@ -0,0 +1,90 @@
import { TopView } from '@renderer/components/TopView'
import { isEmbeddingModel } from '@renderer/config/models'
import i18n from '@renderer/i18n'
import { Provider } from '@renderer/types'
import { Modal, Select } from 'antd'
import { last, orderBy } from 'lodash'
import { useState } from 'react'
interface ShowParams {
provider: Provider
}
interface Props extends ShowParams {
reject: (reason?: any) => void
resolve: (data: any) => void
}
const PopupContainer: React.FC<Props> = ({ provider, resolve, reject }) => {
const models = orderBy(provider.models, 'group').filter((i) => !isEmbeddingModel(i))
const [open, setOpen] = useState(true)
const [model, setModel] = useState(last(models))
const onOk = () => {
if (!model) {
window.message.error({ content: i18n.t('message.error.enter.model'), key: 'api-check' })
return
}
setOpen(false)
resolve(model)
}
const onCancel = () => {
setOpen(false)
setTimeout(reject, 300)
}
const onClose = () => {
TopView.hide(TopViewKey)
}
SelectProviderModelPopup.hide = onCancel
return (
<Modal
title={i18n.t('message.api.check.model.title', { model: model })}
open={open}
onOk={onOk}
onCancel={onCancel}
afterClose={onClose}
transitionName="ant-move-down"
width={300}
centered>
<Select
value={model?.id}
options={models.map((m) => ({ label: m.name, value: m.id }))}
style={{ width: '100%' }}
onChange={(value) => {
setModel(provider.models.find((m) => m.id === value)!)
}}
/>
</Modal>
)
}
const TopViewKey = 'SelectProviderModelPopup'
export default class SelectProviderModelPopup {
static topviewId = 0
static hide() {
TopView.hide(TopViewKey)
}
static show(props: ShowParams) {
return new Promise<any>((resolve, reject) => {
TopView.show(
<PopupContainer
{...props}
reject={() => {
reject()
TopView.hide(TopViewKey)
}}
resolve={(v) => {
resolve(v)
TopView.hide(TopViewKey)
}}
/>,
TopViewKey
)
})
}
}

View File

@ -36,8 +36,8 @@ export default class AiProvider {
return this.sdk.generateText({ prompt, content }) return this.sdk.generateText({ prompt, content })
} }
public async check(): Promise<{ valid: boolean; error: Error | null }> { public async check(model: Model): Promise<{ valid: boolean; error: Error | null }> {
return this.sdk.check() return this.sdk.check(model)
} }
public async models(): Promise<OpenAI.Models.Model[]> { public async models(): Promise<OpenAI.Models.Model[]> {

View File

@ -1,15 +1,14 @@
import Anthropic from '@anthropic-ai/sdk' import Anthropic from '@anthropic-ai/sdk'
import { MessageCreateParamsNonStreaming, MessageParam } from '@anthropic-ai/sdk/resources' import { MessageCreateParamsNonStreaming, MessageParam } from '@anthropic-ai/sdk/resources'
import { DEFAULT_MAX_TOKENS } from '@renderer/config/constant' import { DEFAULT_MAX_TOKENS } from '@renderer/config/constant'
import { isEmbeddingModel } from '@renderer/config/models'
import { getStoreSetting } from '@renderer/hooks/useSettings' import { getStoreSetting } from '@renderer/hooks/useSettings'
import i18n from '@renderer/i18n' import i18n from '@renderer/i18n'
import { getAssistantSettings, getDefaultModel, getTopNamingModel } from '@renderer/services/AssistantService' import { getAssistantSettings, getDefaultModel, getTopNamingModel } from '@renderer/services/AssistantService'
import { EVENT_NAMES } from '@renderer/services/EventService' import { EVENT_NAMES } from '@renderer/services/EventService'
import { filterContextMessages } from '@renderer/services/MessagesService' import { filterContextMessages } from '@renderer/services/MessagesService'
import { Assistant, FileTypes, Message, Provider, Suggestion } from '@renderer/types' import { Assistant, FileTypes, Message, Model, Provider, Suggestion } from '@renderer/types'
import { removeSpecialCharacters } from '@renderer/utils' import { removeSpecialCharacters } from '@renderer/utils'
import { first, flatten, last, sum, takeRight } from 'lodash' import { first, flatten, sum, takeRight } from 'lodash'
import OpenAI from 'openai' import OpenAI from 'openai'
import { CompletionsParams } from '.' import { CompletionsParams } from '.'
@ -239,9 +238,7 @@ export default class AnthropicProvider extends BaseProvider {
return [] return []
} }
public async check(): Promise<{ valid: boolean; error: Error | null }> { public async check(model: Model): Promise<{ valid: boolean; error: Error | null }> {
const model = last(this.provider.models.filter((m) => !isEmbeddingModel(m)))
if (!model) { if (!model) {
return { valid: false, error: new Error('No model found') } return { valid: false, error: new Error('No model found') }
} }

View File

@ -24,7 +24,7 @@ export default abstract class BaseProvider {
abstract summaries(messages: Message[], assistant: Assistant): Promise<string> abstract summaries(messages: Message[], assistant: Assistant): Promise<string>
abstract suggestions(messages: Message[], assistant: Assistant): Promise<Suggestion[]> abstract suggestions(messages: Message[], assistant: Assistant): Promise<Suggestion[]>
abstract generateText({ prompt, content }: { prompt: string; content: string }): Promise<string> abstract generateText({ prompt, content }: { prompt: string; content: string }): Promise<string>
abstract check(): Promise<{ valid: boolean; error: Error | null }> abstract check(model: Model): Promise<{ valid: boolean; error: Error | null }>
abstract models(): Promise<OpenAI.Models.Model[]> abstract models(): Promise<OpenAI.Models.Model[]>
abstract generateImage(params: GenerateImageParams): Promise<string[]> abstract generateImage(params: GenerateImageParams): Promise<string[]>
abstract getEmbeddingDimensions(model: Model): Promise<number> abstract getEmbeddingDimensions(model: Model): Promise<number>

View File

@ -9,7 +9,7 @@ import {
RequestOptions, RequestOptions,
TextPart TextPart
} from '@google/generative-ai' } from '@google/generative-ai'
import { isEmbeddingModel, isWebSearchModel } from '@renderer/config/models' import { isWebSearchModel } from '@renderer/config/models'
import { getStoreSetting } from '@renderer/hooks/useSettings' import { getStoreSetting } from '@renderer/hooks/useSettings'
import i18n from '@renderer/i18n' import i18n from '@renderer/i18n'
import { getAssistantSettings, getDefaultModel, getTopNamingModel } from '@renderer/services/AssistantService' import { getAssistantSettings, getDefaultModel, getTopNamingModel } from '@renderer/services/AssistantService'
@ -18,7 +18,7 @@ import { filterContextMessages } from '@renderer/services/MessagesService'
import { Assistant, FileType, FileTypes, Message, Model, Provider, Suggestion } from '@renderer/types' import { Assistant, FileType, FileTypes, Message, Model, Provider, Suggestion } from '@renderer/types'
import { removeSpecialCharacters } from '@renderer/utils' import { removeSpecialCharacters } from '@renderer/utils'
import axios from 'axios' import axios from 'axios'
import { first, isEmpty, last, takeRight } from 'lodash' import { first, isEmpty, takeRight } from 'lodash'
import OpenAI from 'openai' import OpenAI from 'openai'
import { CompletionsParams } from '.' import { CompletionsParams } from '.'
@ -291,9 +291,7 @@ export default class GeminiProvider extends BaseProvider {
return [] return []
} }
public async check(): Promise<{ valid: boolean; error: Error | null }> { public async check(model: Model): Promise<{ valid: boolean; error: Error | null }> {
const model = last(this.provider.models.filter((m) => !isEmbeddingModel(m)))
if (!model) { if (!model) {
return { valid: false, error: new Error('No model found') } return { valid: false, error: new Error('No model found') }
} }

View File

@ -1,4 +1,4 @@
import { getWebSearchParams, isEmbeddingModel, isSupportedModel, isVisionModel } from '@renderer/config/models' import { getWebSearchParams, isSupportedModel, isVisionModel } from '@renderer/config/models'
import { getStoreSetting } from '@renderer/hooks/useSettings' import { getStoreSetting } from '@renderer/hooks/useSettings'
import i18n from '@renderer/i18n' import i18n from '@renderer/i18n'
import { getAssistantSettings, getDefaultModel, getTopNamingModel } from '@renderer/services/AssistantService' import { getAssistantSettings, getDefaultModel, getTopNamingModel } from '@renderer/services/AssistantService'
@ -6,7 +6,7 @@ import { EVENT_NAMES } from '@renderer/services/EventService'
import { filterContextMessages } from '@renderer/services/MessagesService' import { filterContextMessages } from '@renderer/services/MessagesService'
import { Assistant, FileTypes, GenerateImageParams, Message, Model, Provider, Suggestion } from '@renderer/types' import { Assistant, FileTypes, GenerateImageParams, Message, Model, Provider, Suggestion } from '@renderer/types'
import { removeSpecialCharacters } from '@renderer/utils' import { removeSpecialCharacters } from '@renderer/utils'
import { last, takeRight } from 'lodash' import { takeRight } from 'lodash'
import OpenAI, { AzureOpenAI } from 'openai' import OpenAI, { AzureOpenAI } from 'openai'
import { import {
ChatCompletionContentPart, ChatCompletionContentPart,
@ -286,9 +286,7 @@ export default class OpenAIProvider extends BaseProvider {
return response?.questions?.filter(Boolean)?.map((q: any) => ({ content: q })) || [] return response?.questions?.filter(Boolean)?.map((q: any) => ({ content: q })) || []
} }
public async check(): Promise<{ valid: boolean; error: Error | null }> { public async check(model: Model): Promise<{ valid: boolean; error: Error | null }> {
const model = last(this.provider.models.filter((m) => !isEmbeddingModel(m)))
if (!model) { if (!model) {
return { valid: false, error: new Error('No model found') } return { valid: false, error: new Error('No model found') }
} }

View File

@ -1,7 +1,7 @@
import i18n from '@renderer/i18n' import i18n from '@renderer/i18n'
import store from '@renderer/store' import store from '@renderer/store'
import { setGenerating } from '@renderer/store/runtime' import { setGenerating } from '@renderer/store/runtime'
import { Assistant, Message, Provider, Suggestion, Topic } from '@renderer/types' import { Assistant, Message, Model, Provider, Suggestion, Topic } from '@renderer/types'
import { isEmpty } from 'lodash' import { isEmpty } from 'lodash'
import AiProvider from '../providers/AiProvider' import AiProvider from '../providers/AiProvider'
@ -184,7 +184,7 @@ export async function fetchSuggestions({
} }
} }
export async function checkApi(provider: Provider) { export async function checkApi(provider: Provider, model: Model) {
const key = 'api-check' const key = 'api-check'
const style = { marginTop: '3vh' } const style = { marginTop: '3vh' }
@ -207,7 +207,7 @@ export async function checkApi(provider: Provider) {
const AI = new AiProvider(provider) const AI = new AiProvider(provider)
const { valid } = await AI.check() const { valid } = await AI.check(model)
return valid return valid
} }