feat: hide entry for local ai

This commit is contained in:
kangfenmao 2024-08-28 18:11:35 +08:00
parent d42ee59335
commit 89bdab58f7
9 changed files with 118 additions and 55 deletions

View File

@ -1,6 +1,6 @@
import { TranslationOutlined } from '@ant-design/icons' import { TranslationOutlined } from '@ant-design/icons'
import { isMac } from '@renderer/config/constant' import { isMac } from '@renderer/config/constant'
import { AppLogo } from '@renderer/config/env' import { AppLogo, isLocalAi } from '@renderer/config/env'
import useAvatar from '@renderer/hooks/useAvatar' import useAvatar from '@renderer/hooks/useAvatar'
import { useRuntime } from '@renderer/hooks/useStore' import { useRuntime } from '@renderer/hooks/useStore'
import { Avatar } from 'antd' import { Avatar } from 'antd'
@ -51,7 +51,7 @@ const Sidebar: FC = () => {
</Menus> </Menus>
</MainMenus> </MainMenus>
<Menus> <Menus>
<StyledLink to="/settings/provider"> <StyledLink to={isLocalAi ? '/settings/assistant' : '/settings/provider'}>
<Icon className={pathname.startsWith('/settings') ? 'active' : ''}> <Icon className={pathname.startsWith('/settings') ? 'active' : ''}>
<i className="iconfont icon-setting"></i> <i className="iconfont icon-setting"></i>
</Icon> </Icon>

View File

@ -3,15 +3,7 @@ import { Model } from '@renderer/types'
type SystemModel = Model & { enabled: boolean } type SystemModel = Model & { enabled: boolean }
export const SYSTEM_MODELS: Record<string, SystemModel[]> = { export const SYSTEM_MODELS: Record<string, SystemModel[]> = {
ollama: [ ollama: [],
{
id: 'qwen2:0.5b',
provider: 'ollama',
name: ' Qwen2 0.5b',
group: 'Qwen2',
enabled: true
}
],
openai: [ openai: [
{ {
id: 'gpt-4o', id: 'gpt-4o',

View File

@ -4,6 +4,14 @@ import type KeyvStorage from '@kangfenmao/keyv-storage'
import { MessageInstance } from 'antd/es/message/interface' import { MessageInstance } from 'antd/es/message/interface'
import { HookAPI } from 'antd/es/modal/useModal' import { HookAPI } from 'antd/es/modal/useModal'
interface ImportMetaEnv {
VITE_RENDERER_INTEGRATED_MODEL: string
}
interface ImportMeta {
readonly env: ImportMetaEnv
}
declare global { declare global {
interface Window { interface Window {
message: MessageInstance message: MessageInstance

View File

@ -1,3 +1,4 @@
import { isLocalAi } from '@renderer/config/env'
import i18n from '@renderer/i18n' import i18n from '@renderer/i18n'
import LocalStorage from '@renderer/services/storage' import LocalStorage from '@renderer/services/storage'
import { useAppDispatch } from '@renderer/store' import { useAppDispatch } from '@renderer/store'
@ -5,12 +6,14 @@ import { setAvatar } from '@renderer/store/runtime'
import { runAsyncFunction } from '@renderer/utils' import { runAsyncFunction } from '@renderer/utils'
import { useEffect } from 'react' import { useEffect } from 'react'
import { useDefaultModel } from './useAssistant'
import { useSettings } from './useSettings' import { useSettings } from './useSettings'
export function useAppInit() { export function useAppInit() {
const dispatch = useAppDispatch() const dispatch = useAppDispatch()
const { proxyUrl } = useSettings() const { proxyUrl } = useSettings()
const { language } = useSettings() const { language } = useSettings()
const { setDefaultModel, setTopicNamingModel, setTranslateModel } = useDefaultModel()
useEffect(() => { useEffect(() => {
runAsyncFunction(async () => { runAsyncFunction(async () => {
@ -33,4 +36,14 @@ export function useAppInit() {
useEffect(() => { useEffect(() => {
i18n.changeLanguage(language || navigator.language || 'en-US') i18n.changeLanguage(language || navigator.language || 'en-US')
}, [language]) }, [language])
useEffect(() => {
if (isLocalAi) {
const model = JSON.parse(import.meta.env.VITE_RENDERER_INTEGRATED_MODEL)
setDefaultModel(model)
setTopicNamingModel(model)
setTranslateModel(model)
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [])
} }

View File

@ -1,4 +1,5 @@
import ModelAvatar from '@renderer/components/Avatar/ModelAvatar' import ModelAvatar from '@renderer/components/Avatar/ModelAvatar'
import { isLocalAi } from '@renderer/config/env'
import { useAssistant } from '@renderer/hooks/useAssistant' import { useAssistant } from '@renderer/hooks/useAssistant'
import { Assistant } from '@renderer/types' import { Assistant } from '@renderer/types'
import { Button } from 'antd' import { Button } from 'antd'
@ -17,6 +18,10 @@ const SelectModelButton: FC<Props> = ({ assistant }) => {
const { model, setModel } = useAssistant(assistant.id) const { model, setModel } = useAssistant(assistant.id)
const { t } = useTranslation() const { t } = useTranslation()
if (isLocalAi) {
return null
}
return ( return (
<SelectModelDropdown model={model} onSelect={setModel}> <SelectModelDropdown model={model} onSelect={setModel}>
<DropdownButton size="small" type="default"> <DropdownButton size="small" type="default">

View File

@ -6,6 +6,7 @@ import {
SettingOutlined SettingOutlined
} from '@ant-design/icons' } from '@ant-design/icons'
import { Navbar, NavbarCenter } from '@renderer/components/app/Navbar' import { Navbar, NavbarCenter } from '@renderer/components/app/Navbar'
import { isLocalAi } from '@renderer/config/env'
import { FC } from 'react' import { FC } from 'react'
import { useTranslation } from 'react-i18next' import { useTranslation } from 'react-i18next'
import { Link, Route, Routes, useLocation } from 'react-router-dom' import { Link, Route, Routes, useLocation } from 'react-router-dom'
@ -30,18 +31,22 @@ const SettingsPage: FC = () => {
</Navbar> </Navbar>
<ContentContainer> <ContentContainer>
<SettingMenus> <SettingMenus>
<MenuItemLink to="/settings/provider"> {!isLocalAi && (
<MenuItem className={isRoute('/settings/provider')}> <>
<CloudOutlined /> <MenuItemLink to="/settings/provider">
{t('settings.provider')} <MenuItem className={isRoute('/settings/provider')}>
</MenuItem> <CloudOutlined />
</MenuItemLink> {t('settings.provider')}
<MenuItemLink to="/settings/model"> </MenuItem>
<MenuItem className={isRoute('/settings/model')}> </MenuItemLink>
<CodeSandboxOutlined /> <MenuItemLink to="/settings/model">
{t('settings.model')} <MenuItem className={isRoute('/settings/model')}>
</MenuItem> <CodeSandboxOutlined />
</MenuItemLink> {t('settings.model')}
</MenuItem>
</MenuItemLink>
</>
)}
<MenuItemLink to="/settings/assistant"> <MenuItemLink to="/settings/assistant">
<MenuItem className={isRoute('/settings/assistant')}> <MenuItem className={isRoute('/settings/assistant')}>
<MessageOutlined /> <MessageOutlined />

View File

@ -1,6 +1,7 @@
import { CheckOutlined, SendOutlined, SettingOutlined, SwapOutlined, WarningOutlined } from '@ant-design/icons' import { CheckOutlined, SendOutlined, SettingOutlined, SwapOutlined, WarningOutlined } from '@ant-design/icons'
import { Navbar, NavbarCenter } from '@renderer/components/app/Navbar' import { Navbar, NavbarCenter } from '@renderer/components/app/Navbar'
import CopyIcon from '@renderer/components/Icons/CopyIcon' import CopyIcon from '@renderer/components/Icons/CopyIcon'
import { isLocalAi } from '@renderer/config/env'
import { useDefaultModel } from '@renderer/hooks/useAssistant' import { useDefaultModel } from '@renderer/hooks/useAssistant'
import { fetchTranslate } from '@renderer/services/api' import { fetchTranslate } from '@renderer/services/api'
import { getDefaultAssistant } from '@renderer/services/assistant' import { getDefaultAssistant } from '@renderer/services/assistant'
@ -133,6 +134,31 @@ const TranslatePage: FC = () => {
isEmpty(text) && setResult('') isEmpty(text) && setResult('')
}, [text]) }, [text])
const SettingButton = () => {
if (isLocalAi) {
return null
}
if (translateModel) {
return (
<Link to="/settings/model" style={{ color: 'var(--color-text-2)' }}>
<SettingOutlined />
</Link>
)
}
return (
<Link to="/settings/model" style={{ marginLeft: -10 }}>
<Button
type="link"
style={{ color: 'var(--color-error)', textDecoration: 'underline' }}
icon={<WarningOutlined />}>
{t('translate.error.not_configured')}
</Button>
</Link>
)
}
return ( return (
<Container> <Container>
<Navbar> <Navbar>
@ -165,21 +191,7 @@ const TranslatePage: FC = () => {
</Space> </Space>
)} )}
/> />
{translateModel && ( <SettingButton />
<Link to="/settings/model" style={{ color: 'var(--color-text-2)' }}>
<SettingOutlined />
</Link>
)}
{!translateModel && (
<Link to="/settings/model" style={{ marginLeft: -10 }}>
<Button
type="link"
style={{ color: 'var(--color-error)', textDecoration: 'underline' }}
icon={<WarningOutlined />}>
{t('translate.error.not_configured')}
</Button>
</Link>
)}
</MenuContainer> </MenuContainer>
<TranslateInputWrapper> <TranslateInputWrapper>
<InputContainer> <InputContainer>

View File

@ -2,11 +2,12 @@ import Anthropic from '@anthropic-ai/sdk'
import { MessageCreateParamsNonStreaming, MessageParam } from '@anthropic-ai/sdk/resources' import { MessageCreateParamsNonStreaming, MessageParam } from '@anthropic-ai/sdk/resources'
import { GoogleGenerativeAI } from '@google/generative-ai' import { GoogleGenerativeAI } from '@google/generative-ai'
import { DEFAULT_MAX_TOKENS } from '@renderer/config/constant' import { DEFAULT_MAX_TOKENS } from '@renderer/config/constant'
import { isLocalAi } from '@renderer/config/env'
import { getOllamaKeepAliveTime } from '@renderer/hooks/useOllama' import { getOllamaKeepAliveTime } from '@renderer/hooks/useOllama'
import { Assistant, Message, Provider, Suggestion } from '@renderer/types' import { Assistant, Message, Provider, Suggestion } from '@renderer/types'
import { removeQuotes } from '@renderer/utils' import { removeQuotes } from '@renderer/utils'
import axios from 'axios' import axios from 'axios'
import { isEmpty, sum, takeRight } from 'lodash' import { first, isEmpty, sum, takeRight } from 'lodash'
import OpenAI from 'openai' import OpenAI from 'openai'
import { ChatCompletionCreateParamsNonStreaming, ChatCompletionMessageParam } from 'openai/resources' import { ChatCompletionCreateParamsNonStreaming, ChatCompletionMessageParam } from 'openai/resources'
@ -239,13 +240,13 @@ export default class ProviderSDK {
// @ts-ignore key is not typed // @ts-ignore key is not typed
const response = await this.openaiSdk.chat.completions.create({ const response = await this.openaiSdk.chat.completions.create({
model: model.id, model: model.id,
messages: [systemMessage, ...userMessages] as ChatCompletionMessageParam[], messages: [systemMessage, ...(isLocalAi ? [first(userMessages)] : userMessages)] as ChatCompletionMessageParam[],
stream: false, stream: false,
max_tokens: 50, max_tokens: 50,
keep_alive: this.keepAliveTime keep_alive: this.keepAliveTime
}) })
return removeQuotes(response.choices[0].message?.content || '') return removeQuotes(response.choices[0].message?.content?.substring(0, 50) || '')
} }
public async suggestions(messages: Message[], assistant: Assistant): Promise<Suggestion[]> { public async suggestions(messages: Message[], assistant: Assistant): Promise<Suggestion[]> {

View File

@ -1,4 +1,5 @@
import { createSlice, PayloadAction } from '@reduxjs/toolkit' import { createSlice, PayloadAction } from '@reduxjs/toolkit'
import { isLocalAi } from '@renderer/config/env'
import { SYSTEM_MODELS } from '@renderer/config/models' import { SYSTEM_MODELS } from '@renderer/config/models'
import { Model, Provider } from '@renderer/types' import { Model, Provider } from '@renderer/types'
import { uniqBy } from 'lodash' import { uniqBy } from 'lodash'
@ -18,19 +19,10 @@ export interface LlmState {
} }
const initialState: LlmState = { const initialState: LlmState = {
defaultModel: SYSTEM_MODELS.ollama[0], defaultModel: SYSTEM_MODELS.openai[0],
topicNamingModel: SYSTEM_MODELS.ollama[0], topicNamingModel: SYSTEM_MODELS.openai[0],
translateModel: SYSTEM_MODELS.ollama[0], translateModel: SYSTEM_MODELS.openai[0],
providers: [ providers: [
{
id: 'ollama',
name: 'Ollama',
apiKey: '',
apiHost: 'http://localhost:11434/v1/',
models: SYSTEM_MODELS.ollama.filter((m) => m.enabled),
isSystem: true,
enabled: true
},
{ {
id: 'openai', id: 'openai',
name: 'OpenAI', name: 'OpenAI',
@ -38,7 +30,7 @@ const initialState: LlmState = {
apiHost: 'https://api.openai.com', apiHost: 'https://api.openai.com',
models: SYSTEM_MODELS.openai.filter((m) => m.enabled), models: SYSTEM_MODELS.openai.filter((m) => m.enabled),
isSystem: true, isSystem: true,
enabled: false enabled: true
}, },
{ {
id: 'gemini', id: 'gemini',
@ -58,6 +50,15 @@ const initialState: LlmState = {
isSystem: true, isSystem: true,
enabled: false enabled: false
}, },
{
id: 'ollama',
name: 'Ollama',
apiKey: '',
apiHost: 'http://localhost:11434/v1/',
models: SYSTEM_MODELS.ollama.filter((m) => m.enabled),
isSystem: true,
enabled: false
},
{ {
id: 'silicon', id: 'silicon',
name: 'Silicon', name: 'Silicon',
@ -192,9 +193,35 @@ const initialState: LlmState = {
} }
} }
const getIntegratedInitialState = () => {
const model = JSON.parse(import.meta.env.VITE_RENDERER_INTEGRATED_MODEL)
return {
defaultModel: model,
topicNamingModel: model,
translateModel: model,
providers: [
{
id: 'ollama',
name: 'Ollama',
apiKey: 'ollama',
apiHost: 'http://localhost:15537/v1/',
models: [model],
isSystem: true,
enabled: true
}
],
settings: {
ollama: {
keepAliveTime: 3600
}
}
} as LlmState
}
const settingsSlice = createSlice({ const settingsSlice = createSlice({
name: 'llm', name: 'llm',
initialState, initialState: isLocalAi ? getIntegratedInitialState() : initialState,
reducers: { reducers: {
updateProvider: (state, action: PayloadAction<Provider>) => { updateProvider: (state, action: PayloadAction<Provider>) => {
state.providers = state.providers.map((p) => (p.id === action.payload.id ? { ...p, ...action.payload } : p)) state.providers = state.providers.map((p) => (p.id === action.payload.id ? { ...p, ...action.payload } : p))