feat: hide entry for local ai
This commit is contained in:
parent
d42ee59335
commit
89bdab58f7
@ -1,6 +1,6 @@
|
||||
import { TranslationOutlined } from '@ant-design/icons'
|
||||
import { isMac } from '@renderer/config/constant'
|
||||
import { AppLogo } from '@renderer/config/env'
|
||||
import { AppLogo, isLocalAi } from '@renderer/config/env'
|
||||
import useAvatar from '@renderer/hooks/useAvatar'
|
||||
import { useRuntime } from '@renderer/hooks/useStore'
|
||||
import { Avatar } from 'antd'
|
||||
@ -51,7 +51,7 @@ const Sidebar: FC = () => {
|
||||
</Menus>
|
||||
</MainMenus>
|
||||
<Menus>
|
||||
<StyledLink to="/settings/provider">
|
||||
<StyledLink to={isLocalAi ? '/settings/assistant' : '/settings/provider'}>
|
||||
<Icon className={pathname.startsWith('/settings') ? 'active' : ''}>
|
||||
<i className="iconfont icon-setting"></i>
|
||||
</Icon>
|
||||
|
||||
@ -3,15 +3,7 @@ import { Model } from '@renderer/types'
|
||||
type SystemModel = Model & { enabled: boolean }
|
||||
|
||||
export const SYSTEM_MODELS: Record<string, SystemModel[]> = {
|
||||
ollama: [
|
||||
{
|
||||
id: 'qwen2:0.5b',
|
||||
provider: 'ollama',
|
||||
name: ' Qwen2 0.5b',
|
||||
group: 'Qwen2',
|
||||
enabled: true
|
||||
}
|
||||
],
|
||||
ollama: [],
|
||||
openai: [
|
||||
{
|
||||
id: 'gpt-4o',
|
||||
|
||||
8
src/renderer/src/env.d.ts
vendored
8
src/renderer/src/env.d.ts
vendored
@ -4,6 +4,14 @@ import type KeyvStorage from '@kangfenmao/keyv-storage'
|
||||
import { MessageInstance } from 'antd/es/message/interface'
|
||||
import { HookAPI } from 'antd/es/modal/useModal'
|
||||
|
||||
interface ImportMetaEnv {
|
||||
VITE_RENDERER_INTEGRATED_MODEL: string
|
||||
}
|
||||
|
||||
interface ImportMeta {
|
||||
readonly env: ImportMetaEnv
|
||||
}
|
||||
|
||||
declare global {
|
||||
interface Window {
|
||||
message: MessageInstance
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import { isLocalAi } from '@renderer/config/env'
|
||||
import i18n from '@renderer/i18n'
|
||||
import LocalStorage from '@renderer/services/storage'
|
||||
import { useAppDispatch } from '@renderer/store'
|
||||
@ -5,12 +6,14 @@ import { setAvatar } from '@renderer/store/runtime'
|
||||
import { runAsyncFunction } from '@renderer/utils'
|
||||
import { useEffect } from 'react'
|
||||
|
||||
import { useDefaultModel } from './useAssistant'
|
||||
import { useSettings } from './useSettings'
|
||||
|
||||
export function useAppInit() {
|
||||
const dispatch = useAppDispatch()
|
||||
const { proxyUrl } = useSettings()
|
||||
const { language } = useSettings()
|
||||
const { setDefaultModel, setTopicNamingModel, setTranslateModel } = useDefaultModel()
|
||||
|
||||
useEffect(() => {
|
||||
runAsyncFunction(async () => {
|
||||
@ -33,4 +36,14 @@ export function useAppInit() {
|
||||
useEffect(() => {
|
||||
i18n.changeLanguage(language || navigator.language || 'en-US')
|
||||
}, [language])
|
||||
|
||||
useEffect(() => {
|
||||
if (isLocalAi) {
|
||||
const model = JSON.parse(import.meta.env.VITE_RENDERER_INTEGRATED_MODEL)
|
||||
setDefaultModel(model)
|
||||
setTopicNamingModel(model)
|
||||
setTranslateModel(model)
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [])
|
||||
}
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import ModelAvatar from '@renderer/components/Avatar/ModelAvatar'
|
||||
import { isLocalAi } from '@renderer/config/env'
|
||||
import { useAssistant } from '@renderer/hooks/useAssistant'
|
||||
import { Assistant } from '@renderer/types'
|
||||
import { Button } from 'antd'
|
||||
@ -17,6 +18,10 @@ const SelectModelButton: FC<Props> = ({ assistant }) => {
|
||||
const { model, setModel } = useAssistant(assistant.id)
|
||||
const { t } = useTranslation()
|
||||
|
||||
if (isLocalAi) {
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<SelectModelDropdown model={model} onSelect={setModel}>
|
||||
<DropdownButton size="small" type="default">
|
||||
|
||||
@ -6,6 +6,7 @@ import {
|
||||
SettingOutlined
|
||||
} from '@ant-design/icons'
|
||||
import { Navbar, NavbarCenter } from '@renderer/components/app/Navbar'
|
||||
import { isLocalAi } from '@renderer/config/env'
|
||||
import { FC } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { Link, Route, Routes, useLocation } from 'react-router-dom'
|
||||
@ -30,18 +31,22 @@ const SettingsPage: FC = () => {
|
||||
</Navbar>
|
||||
<ContentContainer>
|
||||
<SettingMenus>
|
||||
<MenuItemLink to="/settings/provider">
|
||||
<MenuItem className={isRoute('/settings/provider')}>
|
||||
<CloudOutlined />
|
||||
{t('settings.provider')}
|
||||
</MenuItem>
|
||||
</MenuItemLink>
|
||||
<MenuItemLink to="/settings/model">
|
||||
<MenuItem className={isRoute('/settings/model')}>
|
||||
<CodeSandboxOutlined />
|
||||
{t('settings.model')}
|
||||
</MenuItem>
|
||||
</MenuItemLink>
|
||||
{!isLocalAi && (
|
||||
<>
|
||||
<MenuItemLink to="/settings/provider">
|
||||
<MenuItem className={isRoute('/settings/provider')}>
|
||||
<CloudOutlined />
|
||||
{t('settings.provider')}
|
||||
</MenuItem>
|
||||
</MenuItemLink>
|
||||
<MenuItemLink to="/settings/model">
|
||||
<MenuItem className={isRoute('/settings/model')}>
|
||||
<CodeSandboxOutlined />
|
||||
{t('settings.model')}
|
||||
</MenuItem>
|
||||
</MenuItemLink>
|
||||
</>
|
||||
)}
|
||||
<MenuItemLink to="/settings/assistant">
|
||||
<MenuItem className={isRoute('/settings/assistant')}>
|
||||
<MessageOutlined />
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import { CheckOutlined, SendOutlined, SettingOutlined, SwapOutlined, WarningOutlined } from '@ant-design/icons'
|
||||
import { Navbar, NavbarCenter } from '@renderer/components/app/Navbar'
|
||||
import CopyIcon from '@renderer/components/Icons/CopyIcon'
|
||||
import { isLocalAi } from '@renderer/config/env'
|
||||
import { useDefaultModel } from '@renderer/hooks/useAssistant'
|
||||
import { fetchTranslate } from '@renderer/services/api'
|
||||
import { getDefaultAssistant } from '@renderer/services/assistant'
|
||||
@ -133,6 +134,31 @@ const TranslatePage: FC = () => {
|
||||
isEmpty(text) && setResult('')
|
||||
}, [text])
|
||||
|
||||
const SettingButton = () => {
|
||||
if (isLocalAi) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (translateModel) {
|
||||
return (
|
||||
<Link to="/settings/model" style={{ color: 'var(--color-text-2)' }}>
|
||||
<SettingOutlined />
|
||||
</Link>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<Link to="/settings/model" style={{ marginLeft: -10 }}>
|
||||
<Button
|
||||
type="link"
|
||||
style={{ color: 'var(--color-error)', textDecoration: 'underline' }}
|
||||
icon={<WarningOutlined />}>
|
||||
{t('translate.error.not_configured')}
|
||||
</Button>
|
||||
</Link>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<Container>
|
||||
<Navbar>
|
||||
@ -165,21 +191,7 @@ const TranslatePage: FC = () => {
|
||||
</Space>
|
||||
)}
|
||||
/>
|
||||
{translateModel && (
|
||||
<Link to="/settings/model" style={{ color: 'var(--color-text-2)' }}>
|
||||
<SettingOutlined />
|
||||
</Link>
|
||||
)}
|
||||
{!translateModel && (
|
||||
<Link to="/settings/model" style={{ marginLeft: -10 }}>
|
||||
<Button
|
||||
type="link"
|
||||
style={{ color: 'var(--color-error)', textDecoration: 'underline' }}
|
||||
icon={<WarningOutlined />}>
|
||||
{t('translate.error.not_configured')}
|
||||
</Button>
|
||||
</Link>
|
||||
)}
|
||||
<SettingButton />
|
||||
</MenuContainer>
|
||||
<TranslateInputWrapper>
|
||||
<InputContainer>
|
||||
|
||||
@ -2,11 +2,12 @@ import Anthropic from '@anthropic-ai/sdk'
|
||||
import { MessageCreateParamsNonStreaming, MessageParam } from '@anthropic-ai/sdk/resources'
|
||||
import { GoogleGenerativeAI } from '@google/generative-ai'
|
||||
import { DEFAULT_MAX_TOKENS } from '@renderer/config/constant'
|
||||
import { isLocalAi } from '@renderer/config/env'
|
||||
import { getOllamaKeepAliveTime } from '@renderer/hooks/useOllama'
|
||||
import { Assistant, Message, Provider, Suggestion } from '@renderer/types'
|
||||
import { removeQuotes } from '@renderer/utils'
|
||||
import axios from 'axios'
|
||||
import { isEmpty, sum, takeRight } from 'lodash'
|
||||
import { first, isEmpty, sum, takeRight } from 'lodash'
|
||||
import OpenAI from 'openai'
|
||||
import { ChatCompletionCreateParamsNonStreaming, ChatCompletionMessageParam } from 'openai/resources'
|
||||
|
||||
@ -239,13 +240,13 @@ export default class ProviderSDK {
|
||||
// @ts-ignore key is not typed
|
||||
const response = await this.openaiSdk.chat.completions.create({
|
||||
model: model.id,
|
||||
messages: [systemMessage, ...userMessages] as ChatCompletionMessageParam[],
|
||||
messages: [systemMessage, ...(isLocalAi ? [first(userMessages)] : userMessages)] as ChatCompletionMessageParam[],
|
||||
stream: false,
|
||||
max_tokens: 50,
|
||||
keep_alive: this.keepAliveTime
|
||||
})
|
||||
|
||||
return removeQuotes(response.choices[0].message?.content || '')
|
||||
return removeQuotes(response.choices[0].message?.content?.substring(0, 50) || '')
|
||||
}
|
||||
|
||||
public async suggestions(messages: Message[], assistant: Assistant): Promise<Suggestion[]> {
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import { createSlice, PayloadAction } from '@reduxjs/toolkit'
|
||||
import { isLocalAi } from '@renderer/config/env'
|
||||
import { SYSTEM_MODELS } from '@renderer/config/models'
|
||||
import { Model, Provider } from '@renderer/types'
|
||||
import { uniqBy } from 'lodash'
|
||||
@ -18,19 +19,10 @@ export interface LlmState {
|
||||
}
|
||||
|
||||
const initialState: LlmState = {
|
||||
defaultModel: SYSTEM_MODELS.ollama[0],
|
||||
topicNamingModel: SYSTEM_MODELS.ollama[0],
|
||||
translateModel: SYSTEM_MODELS.ollama[0],
|
||||
defaultModel: SYSTEM_MODELS.openai[0],
|
||||
topicNamingModel: SYSTEM_MODELS.openai[0],
|
||||
translateModel: SYSTEM_MODELS.openai[0],
|
||||
providers: [
|
||||
{
|
||||
id: 'ollama',
|
||||
name: 'Ollama',
|
||||
apiKey: '',
|
||||
apiHost: 'http://localhost:11434/v1/',
|
||||
models: SYSTEM_MODELS.ollama.filter((m) => m.enabled),
|
||||
isSystem: true,
|
||||
enabled: true
|
||||
},
|
||||
{
|
||||
id: 'openai',
|
||||
name: 'OpenAI',
|
||||
@ -38,7 +30,7 @@ const initialState: LlmState = {
|
||||
apiHost: 'https://api.openai.com',
|
||||
models: SYSTEM_MODELS.openai.filter((m) => m.enabled),
|
||||
isSystem: true,
|
||||
enabled: false
|
||||
enabled: true
|
||||
},
|
||||
{
|
||||
id: 'gemini',
|
||||
@ -58,6 +50,15 @@ const initialState: LlmState = {
|
||||
isSystem: true,
|
||||
enabled: false
|
||||
},
|
||||
{
|
||||
id: 'ollama',
|
||||
name: 'Ollama',
|
||||
apiKey: '',
|
||||
apiHost: 'http://localhost:11434/v1/',
|
||||
models: SYSTEM_MODELS.ollama.filter((m) => m.enabled),
|
||||
isSystem: true,
|
||||
enabled: false
|
||||
},
|
||||
{
|
||||
id: 'silicon',
|
||||
name: 'Silicon',
|
||||
@ -192,9 +193,35 @@ const initialState: LlmState = {
|
||||
}
|
||||
}
|
||||
|
||||
const getIntegratedInitialState = () => {
|
||||
const model = JSON.parse(import.meta.env.VITE_RENDERER_INTEGRATED_MODEL)
|
||||
|
||||
return {
|
||||
defaultModel: model,
|
||||
topicNamingModel: model,
|
||||
translateModel: model,
|
||||
providers: [
|
||||
{
|
||||
id: 'ollama',
|
||||
name: 'Ollama',
|
||||
apiKey: 'ollama',
|
||||
apiHost: 'http://localhost:15537/v1/',
|
||||
models: [model],
|
||||
isSystem: true,
|
||||
enabled: true
|
||||
}
|
||||
],
|
||||
settings: {
|
||||
ollama: {
|
||||
keepAliveTime: 3600
|
||||
}
|
||||
}
|
||||
} as LlmState
|
||||
}
|
||||
|
||||
const settingsSlice = createSlice({
|
||||
name: 'llm',
|
||||
initialState,
|
||||
initialState: isLocalAi ? getIntegratedInitialState() : initialState,
|
||||
reducers: {
|
||||
updateProvider: (state, action: PayloadAction<Provider>) => {
|
||||
state.providers = state.providers.map((p) => (p.id === action.payload.id ? { ...p, ...action.payload } : p))
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user