feat: add zhipu provider

This commit is contained in:
kangfenmao 2024-07-10 13:13:44 +08:00
parent 62520fad90
commit 8009e05c80
10 changed files with 109 additions and 25 deletions

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

View File

@ -67,7 +67,7 @@ const PopupContainer: React.FC<Props> = ({ provider: _provider, resolve }) => {
<ListItem key={model.id}>
<ListItemHeader>
<Avatar src={getModelLogo(model.id)} size={24} />
<ListItemName>{model.id}</ListItemName>
<ListItemName>{model.name}</ListItemName>
</ListItemHeader>
{hasModel ? (
<Button type="default" onClick={() => onRemoveModel(model)} icon={<MinusOutlined />} />

View File

@ -121,7 +121,7 @@ export const SYSTEM_MODELS: Record<string, SystemModel[]> = {
{
id: 'deepseek-ai/deepseek-llm-67b-chat',
provider: 'silicon',
name: 'deepseek-llm-67b-chat',
name: 'Deepseek-LLM-67B-Chat',
group: 'DeepSeek',
temperature: 0.7,
defaultEnabled: false
@ -129,7 +129,7 @@ export const SYSTEM_MODELS: Record<string, SystemModel[]> = {
{
id: 'THUDM/glm-4-9b-chat',
provider: 'silicon',
name: 'glm-4-9b-chat',
name: 'GLM-4-9B-Chat',
group: 'GLM',
temperature: 0.7,
defaultEnabled: true
@ -137,7 +137,7 @@ export const SYSTEM_MODELS: Record<string, SystemModel[]> = {
{
id: 'THUDM/chatglm3-6b',
provider: 'silicon',
name: 'chatglm3-6b',
name: 'GhatGLM3-6B',
group: 'GLM',
temperature: 0.7,
defaultEnabled: false
@ -171,16 +171,16 @@ export const SYSTEM_MODELS: Record<string, SystemModel[]> = {
{
id: 'deepseek-chat',
provider: 'deepseek',
name: 'deepseek-chat',
group: 'Deepseek Chat',
name: 'DeepSeek Chat',
group: 'DeepSeek Chat',
temperature: 0.7,
defaultEnabled: true
},
{
id: 'deepseek-coder',
provider: 'deepseek',
name: 'deepseek-coder',
group: 'Deepseek Coder',
name: 'DeepSeek Coder',
group: 'DeepSeek Coder',
temperature: 1.0,
defaultEnabled: true
}
@ -189,7 +189,7 @@ export const SYSTEM_MODELS: Record<string, SystemModel[]> = {
{
id: 'yi-large',
provider: 'yi',
name: 'yi-large',
name: 'Yi-Large',
group: 'Yi',
temperature: 0.7,
defaultEnabled: false
@ -197,7 +197,7 @@ export const SYSTEM_MODELS: Record<string, SystemModel[]> = {
{
id: 'yi-large-turbo',
provider: 'yi',
name: 'yi-large-turbo',
name: 'Yi-Large-Turbo',
group: 'Yi',
temperature: 0.7,
defaultEnabled: true
@ -205,7 +205,7 @@ export const SYSTEM_MODELS: Record<string, SystemModel[]> = {
{
id: 'yi-large-rag',
provider: 'yi',
name: 'yi-large-rag',
name: 'Yi-Large-Rag',
group: 'Yi',
temperature: 0.7,
defaultEnabled: false
@ -213,7 +213,7 @@ export const SYSTEM_MODELS: Record<string, SystemModel[]> = {
{
id: 'yi-medium',
provider: 'yi',
name: 'yi-medium',
name: 'Yi-Medium',
group: 'Yi',
temperature: 0.7,
defaultEnabled: true
@ -221,7 +221,7 @@ export const SYSTEM_MODELS: Record<string, SystemModel[]> = {
{
id: 'yi-medium-200k',
provider: 'yi',
name: 'yi-medium-200k',
name: 'Yi-Medium-200k',
group: 'Yi',
temperature: 0.7,
defaultEnabled: false
@ -229,17 +229,67 @@ export const SYSTEM_MODELS: Record<string, SystemModel[]> = {
{
id: 'yi-spark',
provider: 'yi',
name: 'yi-spark',
name: 'Yi-Spark',
group: 'Yi',
temperature: 0.7,
defaultEnabled: false
}
],
zhipu: [
{
id: 'glm-4-0520',
provider: 'zhipu',
name: 'GLM-4-0520',
group: 'GLM',
temperature: 0.7,
defaultEnabled: true
},
{
id: 'glm-4',
provider: 'zhipu',
name: 'GLM-4',
group: 'GLM',
temperature: 0.7,
defaultEnabled: false
},
{
id: 'glm-4-airx',
provider: 'zhipu',
name: 'GLM-4-AirX',
group: 'GLM',
temperature: 0.7,
defaultEnabled: false
},
{
id: 'glm-4-air',
provider: 'zhipu',
name: 'GLM-4-Air',
group: 'GLM',
temperature: 0.7,
defaultEnabled: true
},
{
id: 'glm-4v',
provider: 'zhipu',
name: 'GLM-4V',
group: 'GLM',
temperature: 0.7,
defaultEnabled: false
},
{
id: 'glm-4-alltools',
provider: 'zhipu',
name: 'GLM-4-AllTools',
group: 'GLM',
temperature: 0.7,
defaultEnabled: false
}
],
groq: [
{
id: 'llama3-8b-8192',
provider: 'groq',
name: 'LLaMA3 8b',
name: 'LLaMA3 8B',
group: 'Llama3',
temperature: 0.7,
defaultEnabled: false
@ -247,7 +297,7 @@ export const SYSTEM_MODELS: Record<string, SystemModel[]> = {
{
id: 'llama3-70b-8192',
provider: 'groq',
name: 'LLaMA3 70b',
name: 'LLaMA3 70B',
group: 'Llama3',
temperature: 0.7,
defaultEnabled: true
@ -255,7 +305,7 @@ export const SYSTEM_MODELS: Record<string, SystemModel[]> = {
{
id: 'mixtral-8x7b-32768',
provider: 'groq',
name: 'Mixtral 8x7b',
name: 'Mixtral 8x7B',
group: 'Mixtral',
temperature: 0.7,
defaultEnabled: false
@ -263,7 +313,7 @@ export const SYSTEM_MODELS: Record<string, SystemModel[]> = {
{
id: 'gemma-7b-it',
provider: 'groq',
name: 'Gemma 7b',
name: 'Gemma 7B',
group: 'Gemma',
temperature: 0.7,
defaultEnabled: false

View File

@ -2,7 +2,7 @@ import { useSystemProviders } from '@renderer/hooks/useProvider'
import { Provider } from '@renderer/types'
import { FC, useState } from 'react'
import styled from 'styled-components'
import ModalProviderSetting from './components/ModalProviderSetting'
import ProviderModals from './components/ProviderModals'
import { Avatar } from 'antd'
import { getProviderLogo } from '@renderer/services/provider'
@ -23,7 +23,7 @@ const ProviderSettings: FC = () => {
</ProviderListItem>
))}
</ProviderListContainer>
<ModalProviderSetting provider={selectedProvider} />
<ProviderModals provider={selectedProvider} />
</Container>
)
}

View File

@ -12,7 +12,7 @@ interface Props {
provider: Provider
}
const ModalProviderSetting: FC<Props> = ({ provider }) => {
const ProviderModals: FC<Props> = ({ provider }) => {
const [apiKey, setApiKey] = useState(provider.apiKey)
const [apiHost, setApiHost] = useState(provider.apiHost)
const { updateProvider, models } = useProvider(provider.id)
@ -61,7 +61,7 @@ const ModalProviderSetting: FC<Props> = ({ provider }) => {
{modelGroups[group].map((model) => (
<ModelListItem key={model.id}>
<Avatar src={getModelLogo(model.id)} size={22} style={{ marginRight: '8px' }} />
{model.id}
{model.name}
</ModelListItem>
))}
</Card>
@ -81,4 +81,4 @@ const ModelListItem = styled.div`
padding: 5px 0;
`
export default ModalProviderSetting
export default ProviderModals

View File

@ -15,10 +15,11 @@ interface FetchChatCompletionParams {
}
const getOpenAiProvider = (provider: Provider) => {
const host = provider.apiHost
return new OpenAI({
dangerouslyAllowBrowser: true,
apiKey: provider.apiKey,
baseURL: `${provider.apiHost}/v1/`
baseURL: host.endsWith('/') ? host : `${provider.apiHost}/v1/`
})
}

View File

@ -3,6 +3,7 @@ import SiliconFlowProviderLogo from '@renderer/assets/images/providers/silicon.p
import DeepSeekProviderLogo from '@renderer/assets/images/providers/deepseek.png'
import YiProviderLogo from '@renderer/assets/images/providers/yi.svg'
import GroqProviderLogo from '@renderer/assets/images/providers/groq.png'
import ZhipuProviderLogo from '@renderer/assets/images/providers/zhipu.png'
import ChatGPTModelLogo from '@renderer/assets/images/models/chatgpt.jpeg'
import ChatGLMModelLogo from '@renderer/assets/images/models/chatglm.jpeg'
import DeepSeekModelLogo from '@renderer/assets/images/models/deepseek.png'
@ -33,6 +34,10 @@ export function getProviderLogo(providerId: string) {
return GroqProviderLogo
}
if (providerId === 'zhipu') {
return ZhipuProviderLogo
}
return ''
}

View File

@ -19,7 +19,7 @@ const persistedReducer = persistReducer(
{
key: 'cherry-studio',
storage,
version: 2,
version: 3,
blacklist: ['runtime'],
migrate
},

View File

@ -45,6 +45,14 @@ const initialState: LlmState = {
isSystem: true,
models: SYSTEM_MODELS.yi.filter((m) => m.defaultEnabled)
},
{
id: 'zhipu',
name: 'ZhiPu',
apiKey: '',
apiHost: 'https://open.bigmodel.cn/api/paas/v4/',
isSystem: true,
models: SYSTEM_MODELS.groq.filter((m) => m.defaultEnabled)
},
{
id: 'groq',
name: 'Groq',

View File

@ -22,6 +22,26 @@ const migrate = createMigrate({
]
}
}
},
// @ts-ignore store type is unknown
'3': (state: RootState) => {
return {
...state,
llm: {
...state.llm,
providers: [
...state.llm.providers,
{
id: 'zhipu',
name: 'ZhiPu',
apiKey: '',
apiHost: 'https://open.bigmodel.cn/api/paas/v4/',
isSystem: true,
models: SYSTEM_MODELS.zhipu.filter((m) => m.defaultEnabled)
}
]
}
}
}
})