feat: check api use selected model
This commit is contained in:
parent
cf7c0fc1fc
commit
2ad6a1f24c
@ -231,6 +231,7 @@
|
||||
"message": {
|
||||
"api.connection.failed": "Connection failed",
|
||||
"api.connection.success": "Connection successful",
|
||||
"api.check.model.title": "Select the model to use for detection",
|
||||
"assistant.added.content": "Assistant added successfully",
|
||||
"backup.failed": "Backup failed",
|
||||
"backup.success": "Backup successful",
|
||||
|
||||
@ -231,6 +231,7 @@
|
||||
"message": {
|
||||
"api.connection.failed": "接続に失敗しました",
|
||||
"api.connection.success": "接続に成功しました",
|
||||
"api.check.model.title": "検出に使用するモデルを選択してください",
|
||||
"assistant.added.content": "アシスタントが追加されました",
|
||||
"backup.failed": "バックアップに失敗しました",
|
||||
"backup.success": "バックアップに成功しました",
|
||||
|
||||
@ -231,6 +231,7 @@
|
||||
"message": {
|
||||
"api.connection.failed": "Соединение не удалось",
|
||||
"api.connection.success": "Соединение успешно",
|
||||
"api.check.model.title": "Выберите модель для проверки",
|
||||
"assistant.added.content": "Ассистент успешно добавлен",
|
||||
"backup.failed": "Создание резервной копии не удалось",
|
||||
"backup.success": "Резервная копия успешно создана",
|
||||
|
||||
@ -232,6 +232,7 @@
|
||||
"message": {
|
||||
"api.connection.failed": "连接失败",
|
||||
"api.connection.success": "连接成功",
|
||||
"api.check.model.title": "请选择要检测的模型",
|
||||
"assistant.added.content": "智能体添加成功",
|
||||
"backup.failed": "备份失败",
|
||||
"backup.success": "备份成功",
|
||||
|
||||
@ -231,6 +231,7 @@
|
||||
"message": {
|
||||
"api.connection.failed": "連接失敗",
|
||||
"api.connection.success": "連接成功",
|
||||
"api.check.model.title": "請選擇要檢測的模型",
|
||||
"assistant.added.content": "智能體添加成功",
|
||||
"backup.failed": "備份失敗",
|
||||
"backup.success": "備份成功",
|
||||
|
||||
@ -2,13 +2,16 @@ import { CheckCircleFilled, CloseCircleFilled, LoadingOutlined } from '@ant-desi
|
||||
import Scrollbar from '@renderer/components/Scrollbar'
|
||||
import { TopView } from '@renderer/components/TopView'
|
||||
import { checkApi } from '@renderer/services/ApiService'
|
||||
import { Model } from '@renderer/types'
|
||||
import { Provider } from '@renderer/types'
|
||||
import { Button, List, Modal, Space, Spin, Typography } from 'antd'
|
||||
import { useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
interface ShowParams {
|
||||
title: string
|
||||
provider: any
|
||||
provider: Provider
|
||||
model: Model
|
||||
apiKeys: string[]
|
||||
}
|
||||
|
||||
@ -22,7 +25,7 @@ interface KeyStatus {
|
||||
checking?: boolean
|
||||
}
|
||||
|
||||
const PopupContainer: React.FC<Props> = ({ title, provider, apiKeys, resolve }) => {
|
||||
const PopupContainer: React.FC<Props> = ({ title, provider, model, apiKeys, resolve }) => {
|
||||
const [open, setOpen] = useState(true)
|
||||
const [keyStatuses, setKeyStatuses] = useState<KeyStatus[]>(() => {
|
||||
const uniqueKeys = new Set(apiKeys)
|
||||
@ -39,7 +42,7 @@ const PopupContainer: React.FC<Props> = ({ title, provider, apiKeys, resolve })
|
||||
for (let i = 0; i < newStatuses.length; i++) {
|
||||
setKeyStatuses((prev) => prev.map((status, idx) => (idx === i ? { ...status, checking: true } : status)))
|
||||
|
||||
const valid = await checkApi({ ...provider, apiKey: newStatuses[i].key })
|
||||
const valid = await checkApi({ ...provider, apiKey: newStatuses[i].key }, model)
|
||||
|
||||
setKeyStatuses((prev) =>
|
||||
prev.map((status, idx) => (idx === i ? { ...status, checking: false, isValid: valid } : status))
|
||||
|
||||
@ -39,6 +39,7 @@ import ApiCheckPopup from './ApiCheckPopup'
|
||||
import EditModelsPopup from './EditModelsPopup'
|
||||
import GraphRAGSettings from './GraphRAGSettings'
|
||||
import OllamSettings from './OllamaSettings'
|
||||
import SelectProviderModelPopup from './SelectProviderModelPopup'
|
||||
|
||||
interface Props {
|
||||
provider: Provider
|
||||
@ -83,14 +84,23 @@ const ProviderSetting: FC<Props> = ({ provider: _provider }) => {
|
||||
return
|
||||
}
|
||||
|
||||
const model = await SelectProviderModelPopup.show({ provider })
|
||||
|
||||
if (!model) {
|
||||
window.message.error({ content: i18n.t('message.error.enter.model'), key: 'api-check' })
|
||||
return
|
||||
}
|
||||
|
||||
if (apiKey.includes(',')) {
|
||||
const keys = apiKey
|
||||
.split(',')
|
||||
.map((k) => k.trim())
|
||||
.filter((k) => k)
|
||||
|
||||
const result = await ApiCheckPopup.show({
|
||||
title: t('settings.provider.check_multiple_keys'),
|
||||
provider: { ...provider, apiHost },
|
||||
model,
|
||||
apiKeys: keys
|
||||
})
|
||||
|
||||
@ -100,7 +110,8 @@ const ProviderSetting: FC<Props> = ({ provider: _provider }) => {
|
||||
}
|
||||
} else {
|
||||
setApiChecking(true)
|
||||
const valid = await checkApi({ ...provider, apiKey, apiHost })
|
||||
|
||||
const valid = await checkApi({ ...provider, apiKey, apiHost }, model)
|
||||
window.message[valid ? 'success' : 'error']({
|
||||
key: 'api-check',
|
||||
style: { marginTop: '3vh' },
|
||||
|
||||
@ -0,0 +1,90 @@
|
||||
import { TopView } from '@renderer/components/TopView'
|
||||
import { isEmbeddingModel } from '@renderer/config/models'
|
||||
import i18n from '@renderer/i18n'
|
||||
import { Provider } from '@renderer/types'
|
||||
import { Modal, Select } from 'antd'
|
||||
import { last, orderBy } from 'lodash'
|
||||
import { useState } from 'react'
|
||||
|
||||
interface ShowParams {
|
||||
provider: Provider
|
||||
}
|
||||
|
||||
interface Props extends ShowParams {
|
||||
reject: (reason?: any) => void
|
||||
resolve: (data: any) => void
|
||||
}
|
||||
|
||||
const PopupContainer: React.FC<Props> = ({ provider, resolve, reject }) => {
|
||||
const models = orderBy(provider.models, 'group').filter((i) => !isEmbeddingModel(i))
|
||||
const [open, setOpen] = useState(true)
|
||||
const [model, setModel] = useState(last(models))
|
||||
|
||||
const onOk = () => {
|
||||
if (!model) {
|
||||
window.message.error({ content: i18n.t('message.error.enter.model'), key: 'api-check' })
|
||||
return
|
||||
}
|
||||
setOpen(false)
|
||||
resolve(model)
|
||||
}
|
||||
|
||||
const onCancel = () => {
|
||||
setOpen(false)
|
||||
setTimeout(reject, 300)
|
||||
}
|
||||
|
||||
const onClose = () => {
|
||||
TopView.hide(TopViewKey)
|
||||
}
|
||||
|
||||
SelectProviderModelPopup.hide = onCancel
|
||||
|
||||
return (
|
||||
<Modal
|
||||
title={i18n.t('message.api.check.model.title', { model: model })}
|
||||
open={open}
|
||||
onOk={onOk}
|
||||
onCancel={onCancel}
|
||||
afterClose={onClose}
|
||||
transitionName="ant-move-down"
|
||||
width={300}
|
||||
centered>
|
||||
<Select
|
||||
value={model?.id}
|
||||
options={models.map((m) => ({ label: m.name, value: m.id }))}
|
||||
style={{ width: '100%' }}
|
||||
onChange={(value) => {
|
||||
setModel(provider.models.find((m) => m.id === value)!)
|
||||
}}
|
||||
/>
|
||||
</Modal>
|
||||
)
|
||||
}
|
||||
|
||||
const TopViewKey = 'SelectProviderModelPopup'
|
||||
|
||||
export default class SelectProviderModelPopup {
|
||||
static topviewId = 0
|
||||
static hide() {
|
||||
TopView.hide(TopViewKey)
|
||||
}
|
||||
static show(props: ShowParams) {
|
||||
return new Promise<any>((resolve, reject) => {
|
||||
TopView.show(
|
||||
<PopupContainer
|
||||
{...props}
|
||||
reject={() => {
|
||||
reject()
|
||||
TopView.hide(TopViewKey)
|
||||
}}
|
||||
resolve={(v) => {
|
||||
resolve(v)
|
||||
TopView.hide(TopViewKey)
|
||||
}}
|
||||
/>,
|
||||
TopViewKey
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -36,8 +36,8 @@ export default class AiProvider {
|
||||
return this.sdk.generateText({ prompt, content })
|
||||
}
|
||||
|
||||
public async check(): Promise<{ valid: boolean; error: Error | null }> {
|
||||
return this.sdk.check()
|
||||
public async check(model: Model): Promise<{ valid: boolean; error: Error | null }> {
|
||||
return this.sdk.check(model)
|
||||
}
|
||||
|
||||
public async models(): Promise<OpenAI.Models.Model[]> {
|
||||
|
||||
@ -1,15 +1,14 @@
|
||||
import Anthropic from '@anthropic-ai/sdk'
|
||||
import { MessageCreateParamsNonStreaming, MessageParam } from '@anthropic-ai/sdk/resources'
|
||||
import { DEFAULT_MAX_TOKENS } from '@renderer/config/constant'
|
||||
import { isEmbeddingModel } from '@renderer/config/models'
|
||||
import { getStoreSetting } from '@renderer/hooks/useSettings'
|
||||
import i18n from '@renderer/i18n'
|
||||
import { getAssistantSettings, getDefaultModel, getTopNamingModel } from '@renderer/services/AssistantService'
|
||||
import { EVENT_NAMES } from '@renderer/services/EventService'
|
||||
import { filterContextMessages } from '@renderer/services/MessagesService'
|
||||
import { Assistant, FileTypes, Message, Provider, Suggestion } from '@renderer/types'
|
||||
import { Assistant, FileTypes, Message, Model, Provider, Suggestion } from '@renderer/types'
|
||||
import { removeSpecialCharacters } from '@renderer/utils'
|
||||
import { first, flatten, last, sum, takeRight } from 'lodash'
|
||||
import { first, flatten, sum, takeRight } from 'lodash'
|
||||
import OpenAI from 'openai'
|
||||
|
||||
import { CompletionsParams } from '.'
|
||||
@ -239,9 +238,7 @@ export default class AnthropicProvider extends BaseProvider {
|
||||
return []
|
||||
}
|
||||
|
||||
public async check(): Promise<{ valid: boolean; error: Error | null }> {
|
||||
const model = last(this.provider.models.filter((m) => !isEmbeddingModel(m)))
|
||||
|
||||
public async check(model: Model): Promise<{ valid: boolean; error: Error | null }> {
|
||||
if (!model) {
|
||||
return { valid: false, error: new Error('No model found') }
|
||||
}
|
||||
|
||||
@ -24,7 +24,7 @@ export default abstract class BaseProvider {
|
||||
abstract summaries(messages: Message[], assistant: Assistant): Promise<string>
|
||||
abstract suggestions(messages: Message[], assistant: Assistant): Promise<Suggestion[]>
|
||||
abstract generateText({ prompt, content }: { prompt: string; content: string }): Promise<string>
|
||||
abstract check(): Promise<{ valid: boolean; error: Error | null }>
|
||||
abstract check(model: Model): Promise<{ valid: boolean; error: Error | null }>
|
||||
abstract models(): Promise<OpenAI.Models.Model[]>
|
||||
abstract generateImage(params: GenerateImageParams): Promise<string[]>
|
||||
abstract getEmbeddingDimensions(model: Model): Promise<number>
|
||||
|
||||
@ -9,7 +9,7 @@ import {
|
||||
RequestOptions,
|
||||
TextPart
|
||||
} from '@google/generative-ai'
|
||||
import { isEmbeddingModel, isWebSearchModel } from '@renderer/config/models'
|
||||
import { isWebSearchModel } from '@renderer/config/models'
|
||||
import { getStoreSetting } from '@renderer/hooks/useSettings'
|
||||
import i18n from '@renderer/i18n'
|
||||
import { getAssistantSettings, getDefaultModel, getTopNamingModel } from '@renderer/services/AssistantService'
|
||||
@ -18,7 +18,7 @@ import { filterContextMessages } from '@renderer/services/MessagesService'
|
||||
import { Assistant, FileType, FileTypes, Message, Model, Provider, Suggestion } from '@renderer/types'
|
||||
import { removeSpecialCharacters } from '@renderer/utils'
|
||||
import axios from 'axios'
|
||||
import { first, isEmpty, last, takeRight } from 'lodash'
|
||||
import { first, isEmpty, takeRight } from 'lodash'
|
||||
import OpenAI from 'openai'
|
||||
|
||||
import { CompletionsParams } from '.'
|
||||
@ -291,9 +291,7 @@ export default class GeminiProvider extends BaseProvider {
|
||||
return []
|
||||
}
|
||||
|
||||
public async check(): Promise<{ valid: boolean; error: Error | null }> {
|
||||
const model = last(this.provider.models.filter((m) => !isEmbeddingModel(m)))
|
||||
|
||||
public async check(model: Model): Promise<{ valid: boolean; error: Error | null }> {
|
||||
if (!model) {
|
||||
return { valid: false, error: new Error('No model found') }
|
||||
}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { getWebSearchParams, isEmbeddingModel, isSupportedModel, isVisionModel } from '@renderer/config/models'
|
||||
import { getWebSearchParams, isSupportedModel, isVisionModel } from '@renderer/config/models'
|
||||
import { getStoreSetting } from '@renderer/hooks/useSettings'
|
||||
import i18n from '@renderer/i18n'
|
||||
import { getAssistantSettings, getDefaultModel, getTopNamingModel } from '@renderer/services/AssistantService'
|
||||
@ -6,7 +6,7 @@ import { EVENT_NAMES } from '@renderer/services/EventService'
|
||||
import { filterContextMessages } from '@renderer/services/MessagesService'
|
||||
import { Assistant, FileTypes, GenerateImageParams, Message, Model, Provider, Suggestion } from '@renderer/types'
|
||||
import { removeSpecialCharacters } from '@renderer/utils'
|
||||
import { last, takeRight } from 'lodash'
|
||||
import { takeRight } from 'lodash'
|
||||
import OpenAI, { AzureOpenAI } from 'openai'
|
||||
import {
|
||||
ChatCompletionContentPart,
|
||||
@ -286,9 +286,7 @@ export default class OpenAIProvider extends BaseProvider {
|
||||
return response?.questions?.filter(Boolean)?.map((q: any) => ({ content: q })) || []
|
||||
}
|
||||
|
||||
public async check(): Promise<{ valid: boolean; error: Error | null }> {
|
||||
const model = last(this.provider.models.filter((m) => !isEmbeddingModel(m)))
|
||||
|
||||
public async check(model: Model): Promise<{ valid: boolean; error: Error | null }> {
|
||||
if (!model) {
|
||||
return { valid: false, error: new Error('No model found') }
|
||||
}
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import i18n from '@renderer/i18n'
|
||||
import store from '@renderer/store'
|
||||
import { setGenerating } from '@renderer/store/runtime'
|
||||
import { Assistant, Message, Provider, Suggestion, Topic } from '@renderer/types'
|
||||
import { Assistant, Message, Model, Provider, Suggestion, Topic } from '@renderer/types'
|
||||
import { isEmpty } from 'lodash'
|
||||
|
||||
import AiProvider from '../providers/AiProvider'
|
||||
@ -184,7 +184,7 @@ export async function fetchSuggestions({
|
||||
}
|
||||
}
|
||||
|
||||
export async function checkApi(provider: Provider) {
|
||||
export async function checkApi(provider: Provider, model: Model) {
|
||||
const key = 'api-check'
|
||||
const style = { marginTop: '3vh' }
|
||||
|
||||
@ -207,7 +207,7 @@ export async function checkApi(provider: Provider) {
|
||||
|
||||
const AI = new AiProvider(provider)
|
||||
|
||||
const { valid } = await AI.check()
|
||||
const { valid } = await AI.check(model)
|
||||
|
||||
return valid
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user