feat: add top-p settings #224

This commit is contained in:
kangfenmao 2024-12-10 19:00:07 +08:00
parent 44c64a571a
commit 272efaf76e
13 changed files with 160 additions and 32 deletions

View File

@ -3,7 +3,7 @@ import { HStack } from '@renderer/components/Layout'
import { DEFAULT_CONTEXTCOUNT, DEFAULT_TEMPERATURE } from '@renderer/config/constant'
import { SettingRow } from '@renderer/pages/settings'
import { Assistant, AssistantSettings } from '@renderer/types'
import { Button, Col, Divider, Row, Slider, Switch, Tooltip } from 'antd'
import { Button, Col, Divider, InputNumber, Row, Slider, Switch, Tooltip } from 'antd'
import { FC, useState } from 'react'
import { useTranslation } from 'react-i18next'
import styled from 'styled-components'
@ -25,6 +25,7 @@ const AssistantModelSettings: FC<Props> = ({ assistant, updateAssistant, updateA
const [autoResetModel, setAutoResetModel] = useState(assistant?.settings?.autoResetModel ?? false)
const [streamOutput, setStreamOutput] = useState(assistant?.settings?.streamOutput ?? true)
const [defaultModel, setDefaultModel] = useState(assistant?.defaultModel)
const [topP, setTopP] = useState(assistant?.settings?.topP ?? 1)
const { t } = useTranslation()
const onTemperatureChange = (value) => {
@ -45,18 +46,26 @@ const AssistantModelSettings: FC<Props> = ({ assistant, updateAssistant, updateA
}
}
const onTopPChange = (value) => {
if (!isNaN(value as number)) {
updateAssistantSettings({ topP: value })
}
}
const onReset = () => {
setTemperature(DEFAULT_TEMPERATURE)
setContextCount(DEFAULT_CONTEXTCOUNT)
setEnableMaxTokens(false)
setMaxTokens(0)
setStreamOutput(true)
setTopP(1)
updateAssistantSettings({
temperature: DEFAULT_TEMPERATURE,
contextCount: DEFAULT_CONTEXTCOUNT,
enableMaxTokens: false,
maxTokens: 0,
streamOutput: true
streamOutput: true,
topP: 1
})
}
@ -108,17 +117,50 @@ const AssistantModelSettings: FC<Props> = ({ assistant, updateAssistant, updateA
<QuestionIcon />
</Tooltip>
</Row>
<Row align="middle" gutter={10}>
<Col span={24}>
<Row align="middle" gutter={20}>
<Col span={21}>
<Slider
min={0}
max={2}
onChange={setTemperature}
onChangeComplete={onTemperatureChange}
value={typeof temperature === 'number' ? temperature : 0}
marks={{ 0: '0', 0.7: '0.7', 2: '2' }}
step={0.1}
/>
</Col>
<Col span={3}>
<InputNumber
min={0}
max={2}
step={0.1}
value={temperature}
onChange={onTemperatureChange}
style={{ width: '100%' }}
/>
</Col>
</Row>
<Row align="middle">
<Label>{t('chat.settings.top_p')}</Label>
<Tooltip title={t('chat.settings.top_p.tip')}>
<QuestionIcon />
</Tooltip>
</Row>
<Row align="middle" gutter={20}>
<Col span={21}>
<Slider
min={0}
max={1}
onChange={setTopP}
onChangeComplete={onTopPChange}
value={typeof topP === 'number' ? topP : 1}
marks={{ 0: '0', 1: '1' }}
step={0.1}
/>
</Col>
<Col span={3}>
<InputNumber min={0} max={1} step={0.01} value={topP} onChange={onTopPChange} style={{ width: '100%' }} />
</Col>
</Row>
<Row align="middle">
<Label>
@ -128,19 +170,31 @@ const AssistantModelSettings: FC<Props> = ({ assistant, updateAssistant, updateA
</Tooltip>
</Label>
</Row>
<Row align="middle" gutter={10}>
<Col span={24}>
<Row align="middle" gutter={20}>
<Col span={21}>
<Slider
min={0}
max={20}
onChange={setContextCount}
onChangeComplete={onContextCountChange}
value={typeof contextCount === 'number' ? contextCount : 0}
marks={{ 0: '0', 5: '5', 10: '10', 15: '15', 20: t('chat.settings.max') }}
step={1}
/>
</Col>
<Col span={3}>
<InputNumber
min={0}
max={20}
step={1}
value={contextCount}
onChange={onContextCountChange}
style={{ width: '100%' }}
/>
</Col>
</Row>
<Row align="middle" justify="space-between">
<Divider style={{ margin: '10px 0' }} />
<SettingRow style={{ minHeight: 30 }}>
<HStack alignItems="center">
<Label>{t('chat.settings.max_tokens')}</Label>
<Tooltip title={t('chat.settings.max_tokens.tip')}>
@ -154,21 +208,39 @@ const AssistantModelSettings: FC<Props> = ({ assistant, updateAssistant, updateA
updateAssistantSettings({ enableMaxTokens: enabled })
}}
/>
</Row>
<Row align="middle" gutter={10}>
<Col span={24}>
<Slider
disabled={!enableMaxTokens}
min={0}
max={32000}
onChange={setMaxTokens}
onChangeComplete={onMaxTokensChange}
value={typeof maxTokens === 'number' ? maxTokens : 0}
step={100}
/>
</Col>
</Row>
<SettingRow>
</SettingRow>
{enableMaxTokens && (
<Row align="middle" gutter={20}>
<Col span={21}>
<Slider
disabled={!enableMaxTokens}
min={0}
max={32000}
onChange={setMaxTokens}
onChangeComplete={onMaxTokensChange}
value={typeof maxTokens === 'number' ? maxTokens : 0}
step={100}
marks={{
0: '0',
32000: t('chat.settings.max')
}}
/>
</Col>
<Col span={3}>
<InputNumber
disabled={!enableMaxTokens}
min={0}
max={32000}
step={100}
value={maxTokens}
onChange={onMaxTokensChange}
style={{ width: '100%' }}
/>
</Col>
</Row>
)}
<Divider style={{ margin: '10px 0' }} />
<SettingRow style={{ minHeight: 30 }}>
<Label>{t('model.stream_output')}</Label>
<Switch
checked={streamOutput}

View File

@ -95,6 +95,8 @@
"settings.show_line_numbers": "Show line numbers in code",
"settings.temperature": "Temperature",
"settings.temperature.tip": "Lower values make the model more creative and unpredictable, while higher values make it more deterministic and precise.",
"settings.top_p": "Top-P",
"settings.top_p.tip": "Default value is 1, the smaller the value, the less variety in the answers, the easier to understand, the larger the value, the larger the range of the AI's vocabulary, the more diverse",
"suggestions.title": "Suggested Questions",
"topics.auto_rename": "Auto Rename",
"topics.clear.title": "Clear Messages",

View File

@ -95,6 +95,8 @@
"settings.show_line_numbers": "Показать номера строк в коде",
"settings.temperature": "Температура",
"settings.temperature.tip": "Меньшие значения делают модель более креативной и непредсказуемой, в то время как большие значения делают её более детерминированной и точной.",
"settings.top_p": "Top-P",
"settings.top_p.tip": "Значение по умолчанию 1, чем меньше значение, тем меньше вариативности в ответах, тем проще понять, чем больше значение, тем больше вариативности в ответах, тем больше разнообразие",
"suggestions.title": "Предложенные вопросы",
"topics.auto_rename": "Автопереименование",
"topics.clear.title": "Очистить сообщения",

View File

@ -95,6 +95,8 @@
"settings.show_line_numbers": "代码显示行号",
"settings.temperature": "模型温度",
"settings.temperature.tip": "模型生成文本的随机程度。值越大,回复内容越赋有多样性、创造性、随机性;设为 0 根据事实回答。日常聊天建议设置为 0.7",
"settings.top_p": "Top-P",
"settings.top_p.tip": "默认值为 1值越小AI 生成的内容越单调也越容易理解值越大AI 回复的词汇围越大,越多样化",
"suggestions.title": "建议的问题",
"topics.auto_rename": "生成话题名",
"topics.clear.title": "清空消息",

View File

@ -95,6 +95,8 @@
"settings.show_line_numbers": "代码顯示行號",
"settings.temperature": "溫度",
"settings.temperature.tip": "較低的值使模型更具創造性和不可預測性,較高的值則使其更具確定性和精確性。",
"settings.top_p": "Top-P",
"settings.top_p.tip": "模型生成文本的隨機程度。值越小AI 生成的內容越單調也越容易理解值越大AI 回覆的詞彙範圍越大,越多樣化",
"suggestions.title": "建議的問題",
"topics.auto_rename": "自動重新命名",
"topics.clear.title": "清空消息",

View File

@ -11,7 +11,7 @@ import { Dispatch, FC, SetStateAction, useState } from 'react'
import { useTranslation } from 'react-i18next'
import styled from 'styled-components'
import { SettingContainer, SettingSubtitle } from '.'
import { SettingContainer, SettingSubtitle } from '..'
const AssistantSettings: FC = () => {
const { defaultAssistant, updateDefaultAssistant } = useDefaultAssistant()
@ -19,6 +19,7 @@ const AssistantSettings: FC = () => {
const [contextCount, setContextCount] = useState(defaultAssistant.settings?.contextCount ?? DEFAULT_CONTEXTCOUNT)
const [enableMaxTokens, setEnableMaxTokens] = useState(defaultAssistant?.settings?.enableMaxTokens ?? false)
const [maxTokens, setMaxTokens] = useState(defaultAssistant?.settings?.maxTokens ?? 0)
const [topP, setTopP] = useState(defaultAssistant.settings?.topP ?? 1)
const { theme } = useTheme()
const { t } = useTranslation()
@ -32,7 +33,8 @@ const AssistantSettings: FC = () => {
contextCount: settings.contextCount ?? contextCount,
enableMaxTokens: settings.enableMaxTokens ?? enableMaxTokens,
maxTokens: settings.maxTokens ?? maxTokens,
streamOutput: settings.streamOutput ?? true
streamOutput: settings.streamOutput ?? true,
topP: settings.topP ?? topP
}
})
}
@ -49,12 +51,14 @@ const AssistantSettings: FC = () => {
onUpdateAssistantSettings({ contextCount: value })
)
const onMaxTokensChange = handleChange(setMaxTokens, (value) => onUpdateAssistantSettings({ maxTokens: value }))
const onTopPChange = handleChange(setTopP, (value) => onUpdateAssistantSettings({ topP: value }))
const onReset = () => {
setTemperature(DEFAULT_TEMPERATURE)
setContextCount(DEFAULT_CONTEXTCOUNT)
setEnableMaxTokens(false)
setMaxTokens(0)
setTopP(1)
updateDefaultAssistant({
...defaultAssistant,
settings: {
@ -63,7 +67,8 @@ const AssistantSettings: FC = () => {
contextCount: DEFAULT_CONTEXTCOUNT,
enableMaxTokens: false,
maxTokens: DEFAULT_MAX_TOKENS,
streamOutput: true
streamOutput: true,
topP: 1
}
})
}
@ -125,6 +130,28 @@ const AssistantSettings: FC = () => {
/>
</Col>
</Row>
<Row align="middle">
<Label>{t('chat.settings.top_p')}</Label>
<Tooltip title={t('chat.settings.top_p.tip')}>
<QuestionIcon />
</Tooltip>
</Row>
<Row align="middle" style={{ marginBottom: 10 }} gutter={20}>
<Col span={21}>
<Slider
min={0}
max={1}
onChange={setTopP}
onChangeComplete={onTopPChange}
value={typeof topP === 'number' ? topP : 1}
marks={{ 0: '0', 0.5: '0.5', 1: '1' }}
step={0.1}
/>
</Col>
<Col span={3}>
<InputNumber min={0} max={1} step={0.01} value={topP} onChange={onTopPChange} style={{ width: '100%' }} />
</Col>
</Row>
<Row align="middle">
<Label>{t('chat.settings.context_count')}</Label>
<Tooltip title={t('chat.settings.context_count.tip')}>
@ -223,6 +250,8 @@ const PopupContainer: React.FC<Props> = ({ resolve }) => {
resolve({})
}
DefaultAssistantSettingsPopup.hide = onCancel
return (
<Modal
title={t('settings.assistant.title')}
@ -238,10 +267,12 @@ const PopupContainer: React.FC<Props> = ({ resolve }) => {
)
}
export default class AssistantSettingsPopup {
const TopViewKey = 'DefaultAssistantSettingsPopup'
export default class DefaultAssistantSettingsPopup {
static topviewId = 0
static hide() {
TopView.hide('AssistantSettingsPopup')
TopView.hide(TopViewKey)
}
static show() {
return new Promise<any>((resolve) => {
@ -249,10 +280,10 @@ export default class AssistantSettingsPopup {
<PopupContainer
resolve={(v) => {
resolve(v)
this.hide()
TopView.hide(TopViewKey)
}}
/>,
'AssistantSettingsPopup'
TopViewKey
)
})
}

View File

@ -16,7 +16,7 @@ import { FC, useMemo } from 'react'
import { useTranslation } from 'react-i18next'
import { SettingContainer, SettingDescription, SettingGroup, SettingTitle } from '..'
import AssistantSettingsPopup from '../AssistantSettings'
import DefaultAssistantSettings from './DefaultAssistantSettings'
import TopicNamingModalPopup from './TopicNamingModalPopup'
const ModelSettings: FC = () => {
@ -93,7 +93,7 @@ const ModelSettings: FC = () => {
showSearch
placeholder={t('settings.models.empty')}
/>
<Button icon={<SettingOutlined />} style={{ marginLeft: 8 }} onClick={() => AssistantSettingsPopup.show()} />
<Button icon={<SettingOutlined />} style={{ marginLeft: 8 }} onClick={DefaultAssistantSettings.show} />
</HStack>
<SettingDescription>{t('settings.models.default_assistant_model_description')}</SettingDescription>
</SettingGroup>

View File

@ -78,6 +78,7 @@ export default class AnthropicProvider extends BaseProvider {
messages: userMessages,
max_tokens: maxTokens || DEFAULT_MAX_TOKENS,
temperature: assistant?.settings?.temperature,
top_p: assistant?.settings?.topP,
system: assistant.prompt
}

View File

@ -86,7 +86,8 @@ export default class GeminiProvider extends BaseProvider {
systemInstruction: assistant.prompt,
generationConfig: {
maxOutputTokens: maxTokens,
temperature: assistant?.settings?.temperature
temperature: assistant?.settings?.temperature,
topP: assistant?.settings?.topP
},
safetySettings: [
{ category: HarmCategory.HARM_CATEGORY_HATE_SPEECH, threshold: HarmBlockThreshold.BLOCK_NONE },

View File

@ -146,6 +146,7 @@ export default class OpenAIProvider extends BaseProvider {
Boolean
) as ChatCompletionMessageParam[],
temperature: isOpenAIo1 ? 1 : assistant?.settings?.temperature,
top_p: assistant?.settings?.topP,
max_tokens: maxTokens,
keep_alive: this.keepAliveTime,
stream: isSupportStreamOutput

View File

@ -95,6 +95,7 @@ export const getAssistantSettings = (assistant: Assistant): AssistantSettings =>
return {
contextCount: contextCount === 20 ? 100000 : contextCount,
temperature: assistant?.settings?.temperature ?? DEFAULT_TEMPERATURE,
topP: assistant?.settings?.topP ?? 1,
enableMaxTokens: assistant?.settings?.enableMaxTokens ?? false,
maxTokens: getAssistantMaxTokens(),
streamOutput: assistant?.settings?.streamOutput ?? true,

View File

@ -23,6 +23,7 @@ export type AssistantMessage = {
export type AssistantSettings = {
contextCount: number
temperature: number
topP: number
maxTokens: number | undefined
enableMaxTokens: boolean
streamOutput: boolean

View File

@ -0,0 +1,12 @@
export const oauthWithSiliconFlow = async (setKey) => {
const clientId = 'SFrugiu0ezVmREv8BAU6GV'
const ACCOUNT_ENDPOINT = 'https://account.siliconflow.cn'
const authUrl = `${ACCOUNT_ENDPOINT}/oauth?client_id=${clientId}`
const popup = window.open(authUrl, 'oauthPopup', 'width=600,height=600')
window.addEventListener('message', (event) => {
if (event.data.length > 0 && event.data[0]['secretKey'] !== undefined) {
setKey(event.data[0]['secretKey'])
popup?.close()
}
})
}