feat: add deepseek-reasoner model support
This commit is contained in:
parent
f7db1289e4
commit
21ad28ee62
@ -113,7 +113,9 @@
|
||||
"topics.move_to": "Move to",
|
||||
"topics.title": "Topics",
|
||||
"translate": "Translate",
|
||||
"resend": "Resend"
|
||||
"resend": "Resend",
|
||||
"thinking": "Thinking",
|
||||
"deeply_thought": "Deeply thought ({{secounds}} seconds)"
|
||||
},
|
||||
"common": {
|
||||
"and": "and",
|
||||
|
||||
@ -113,7 +113,9 @@
|
||||
"topics.move_to": "移動先",
|
||||
"topics.title": "トピック",
|
||||
"translate": "翻訳",
|
||||
"resend": "再送信"
|
||||
"resend": "再送信",
|
||||
"thinking": "思考中...",
|
||||
"deeply_thought": "深く考えています({{secounds}} 秒)"
|
||||
},
|
||||
"common": {
|
||||
"and": "と",
|
||||
@ -260,7 +262,8 @@
|
||||
"upgrade.success.content": "アップグレードを完了するためにアプリケーションを再起動してください",
|
||||
"upgrade.success.title": "アップグレードに成功しました",
|
||||
"regenerate.confirm": "再生成すると現在のメッセージが置き換えられます",
|
||||
"copy.success": "コピーしました!"
|
||||
"copy.success": "コピーしました!",
|
||||
"error.get_embedding_dimensions": "埋込み次元を取得できませんでした"
|
||||
},
|
||||
"minapp": {
|
||||
"title": "ミニアプリ",
|
||||
|
||||
@ -113,7 +113,9 @@
|
||||
"topics.move_to": "Переместить в",
|
||||
"topics.title": "Топики",
|
||||
"translate": "Перевести",
|
||||
"resend": "Переотправить"
|
||||
"resend": "Переотправить",
|
||||
"thinking": "Мыслим",
|
||||
"deeply_thought": "Мыслим ({{secounds}} секунд)"
|
||||
},
|
||||
"common": {
|
||||
"and": "и",
|
||||
|
||||
@ -113,7 +113,9 @@
|
||||
"topics.move_to": "移动到",
|
||||
"topics.title": "话题",
|
||||
"translate": "翻译",
|
||||
"resend": "重新发送"
|
||||
"resend": "重新发送",
|
||||
"thinking": "思考中",
|
||||
"deeply_thought": "已深度思考(用时 {{secounds}} 秒)"
|
||||
},
|
||||
"common": {
|
||||
"and": "和",
|
||||
|
||||
@ -113,7 +113,9 @@
|
||||
"topics.move_to": "移動到",
|
||||
"topics.title": "話題",
|
||||
"translate": "翻譯",
|
||||
"resend": "重新發送"
|
||||
"resend": "重新發送",
|
||||
"thinking": "思考中",
|
||||
"deeply_thought": "已深度思考(用時 {{secounds}} 秒)"
|
||||
},
|
||||
"common": {
|
||||
"and": "與",
|
||||
|
||||
@ -292,13 +292,14 @@ const Tabs = styled(TabsAntd)<{ $language: string }>`
|
||||
justify-content: ${({ $language }) => ($language.startsWith('zh') ? 'center' : 'flex-start')};
|
||||
user-select: none;
|
||||
transition: all 0.3s cubic-bezier(0.645, 0.045, 0.355, 1);
|
||||
|
||||
outline: none !important;
|
||||
.ant-tabs-tab-btn {
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
max-width: 100px;
|
||||
transition: all 0.3s cubic-bezier(0.645, 0.045, 0.355, 1);
|
||||
outline: none !important;
|
||||
}
|
||||
&:hover {
|
||||
color: var(--color-text) !important;
|
||||
|
||||
@ -11,6 +11,7 @@ import Markdown from '../Markdown/Markdown'
|
||||
import MessageAttachments from './MessageAttachments'
|
||||
import MessageError from './MessageError'
|
||||
import MessageSearchResults from './MessageSearchResults'
|
||||
import MessageThought from './MessageThought'
|
||||
|
||||
const MessageContent: React.FC<{
|
||||
message: Message
|
||||
@ -40,6 +41,7 @@ const MessageContent: React.FC<{
|
||||
<Flex gap="8px" wrap>
|
||||
{message.mentions?.map((model) => <MentionTag key={model.id}>{'@' + model.name}</MentionTag>)}
|
||||
</Flex>
|
||||
<MessageThought message={message} />
|
||||
<Markdown message={message} />
|
||||
{message.translatedContent && (
|
||||
<>
|
||||
|
||||
60
src/renderer/src/pages/home/Messages/MessageThought.tsx
Normal file
60
src/renderer/src/pages/home/Messages/MessageThought.tsx
Normal file
@ -0,0 +1,60 @@
|
||||
import { Message } from '@renderer/types'
|
||||
import { Collapse } from 'antd'
|
||||
import { FC } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import ReactMarkdown from 'react-markdown'
|
||||
import BarLoader from 'react-spinners/BarLoader'
|
||||
import styled from 'styled-components'
|
||||
|
||||
interface Props {
|
||||
message: Message
|
||||
}
|
||||
|
||||
const MessageThought: FC<Props> = ({ message }) => {
|
||||
const isThinking = !message.content
|
||||
const { t } = useTranslation()
|
||||
|
||||
if (!message.reasoning_content) {
|
||||
return null
|
||||
}
|
||||
|
||||
const thinkingTime = message.metrics?.time_thinking_millsec || 0
|
||||
const thinkingTimeSecounds = (thinkingTime / 1000).toFixed(1)
|
||||
|
||||
return (
|
||||
<CollapseContainer
|
||||
items={[
|
||||
{
|
||||
key: 'thought',
|
||||
label: (
|
||||
<MessageTitleLabel>
|
||||
<TinkingText>
|
||||
{isThinking ? t('chat.thinking') : t('chat.deeply_thought', { secounds: thinkingTimeSecounds })}
|
||||
</TinkingText>
|
||||
{isThinking && <BarLoader color="#9254de" />}
|
||||
</MessageTitleLabel>
|
||||
),
|
||||
children: <ReactMarkdown>{message.reasoning_content}</ReactMarkdown>
|
||||
}
|
||||
]}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
const CollapseContainer = styled(Collapse)`
|
||||
margin-bottom: 15px;
|
||||
`
|
||||
|
||||
const MessageTitleLabel = styled.div`
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
height: 22px;
|
||||
gap: 15px;
|
||||
`
|
||||
|
||||
const TinkingText = styled.span`
|
||||
color: var(--color-text-2);
|
||||
`
|
||||
|
||||
export default MessageThought
|
||||
@ -3,7 +3,7 @@ import { getOllamaKeepAliveTime } from '@renderer/hooks/useOllama'
|
||||
import { getKnowledgeReferences } from '@renderer/services/KnowledgeService'
|
||||
import store from '@renderer/store'
|
||||
import { Assistant, GenerateImageParams, Message, Model, Provider, Suggestion } from '@renderer/types'
|
||||
import { delay, isJSON } from '@renderer/utils'
|
||||
import { delay, isJSON, parseJSON } from '@renderer/utils'
|
||||
import OpenAI from 'openai'
|
||||
|
||||
import { CompletionsParams } from '.'
|
||||
@ -98,9 +98,15 @@ export default abstract class BaseProvider {
|
||||
}
|
||||
if (param.type === 'json') {
|
||||
const value = param.value as string
|
||||
return { ...acc, [param.name]: isJSON(value) ? JSON.parse(value) : value }
|
||||
return {
|
||||
...acc,
|
||||
[param.name]: isJSON(value) ? parseJSON(value) : value
|
||||
}
|
||||
}
|
||||
return {
|
||||
...acc,
|
||||
[param.name]: param.value
|
||||
}
|
||||
return { ...acc, [param.name]: param.value }
|
||||
}, {}) || {}
|
||||
)
|
||||
}
|
||||
|
||||
@ -117,6 +117,20 @@ export default class OpenAIProvider extends BaseProvider {
|
||||
} as ChatCompletionMessageParam
|
||||
}
|
||||
|
||||
private getTemperature(assistant: Assistant, model: Model) {
|
||||
const isOpenAIo1 = model.id.startsWith('o1')
|
||||
|
||||
if (isOpenAIo1) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (model.provider === 'deepseek' && model.id === 'deepseek-reasoner') {
|
||||
return undefined
|
||||
}
|
||||
|
||||
return assistant?.settings?.temperature
|
||||
}
|
||||
|
||||
async completions({ messages, assistant, onChunk, onFilterMessages }: CompletionsParams): Promise<void> {
|
||||
const defaultModel = getDefaultModel()
|
||||
const model = assistant.model || defaultModel
|
||||
@ -128,6 +142,12 @@ export default class OpenAIProvider extends BaseProvider {
|
||||
const _messages = filterContextMessages(takeRight(messages, contextCount + 1))
|
||||
onFilterMessages(_messages)
|
||||
|
||||
if (model.id === 'deepseek-reasoner') {
|
||||
if (_messages[0]?.role !== 'user') {
|
||||
userMessages.push({ role: 'user', content: '' })
|
||||
}
|
||||
}
|
||||
|
||||
for (const message of _messages) {
|
||||
userMessages.push(await this.getMessageParam(message, model))
|
||||
}
|
||||
@ -142,6 +162,7 @@ export default class OpenAIProvider extends BaseProvider {
|
||||
}
|
||||
|
||||
let time_first_token_millsec = 0
|
||||
let time_first_content_millsec = 0
|
||||
const start_time_millsec = new Date().getTime()
|
||||
|
||||
// @ts-ignore key is not typed
|
||||
@ -150,7 +171,7 @@ export default class OpenAIProvider extends BaseProvider {
|
||||
messages: [isOpenAIo1 ? undefined : systemMessage, ...userMessages].filter(
|
||||
Boolean
|
||||
) as ChatCompletionMessageParam[],
|
||||
temperature: isOpenAIo1 ? 1 : assistant?.settings?.temperature,
|
||||
temperature: this.getTemperature(assistant, model),
|
||||
top_p: assistant?.settings?.topP,
|
||||
max_tokens: maxTokens,
|
||||
keep_alive: this.keepAliveTime,
|
||||
@ -176,17 +197,28 @@ export default class OpenAIProvider extends BaseProvider {
|
||||
if (window.keyv.get(EVENT_NAMES.CHAT_COMPLETION_PAUSED)) {
|
||||
break
|
||||
}
|
||||
|
||||
if (time_first_token_millsec == 0) {
|
||||
time_first_token_millsec = new Date().getTime() - start_time_millsec
|
||||
}
|
||||
|
||||
if (time_first_content_millsec == 0 && chunk.choices[0]?.delta?.content) {
|
||||
time_first_content_millsec = new Date().getTime()
|
||||
}
|
||||
|
||||
const time_completion_millsec = new Date().getTime() - start_time_millsec
|
||||
const time_thinking_millsec = time_first_content_millsec ? time_first_content_millsec - start_time_millsec : 0
|
||||
|
||||
onChunk({
|
||||
text: chunk.choices[0]?.delta?.content || '',
|
||||
// @ts-ignore key is not typed
|
||||
reasoning_content: chunk.choices[0]?.delta?.reasoning_content || '',
|
||||
usage: chunk.usage,
|
||||
metrics: {
|
||||
completion_tokens: chunk.usage?.completion_tokens,
|
||||
time_completion_millsec,
|
||||
time_first_token_millsec
|
||||
time_first_token_millsec,
|
||||
time_thinking_millsec
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
5
src/renderer/src/providers/index.d.ts
vendored
5
src/renderer/src/providers/index.d.ts
vendored
@ -1,8 +1,9 @@
|
||||
import type { GroundingMetadata } from '@google/generative-ai'
|
||||
import type { Assistant, Metrics } from '@renderer/types'
|
||||
import type { Assistant, Message, Metrics } from '@renderer/types'
|
||||
|
||||
interface ChunkCallbackData {
|
||||
text?: string
|
||||
reasoning_content?: string
|
||||
usage?: OpenAI.Completions.CompletionUsage
|
||||
metrics?: Metrics
|
||||
search?: GroundingMetadata
|
||||
@ -11,6 +12,6 @@ interface ChunkCallbackData {
|
||||
interface CompletionsParams {
|
||||
messages: Message[]
|
||||
assistant: Assistant
|
||||
onChunk: ({ text, usage, metrics, search }: ChunkCallbackData) => void
|
||||
onChunk: ({ text, reasoning_content, usage, metrics, search }: ChunkCallbackData) => void
|
||||
onFilterMessages: (messages: Message[]) => void
|
||||
}
|
||||
|
||||
@ -56,11 +56,15 @@ export async function fetchChatCompletion({
|
||||
messages,
|
||||
assistant,
|
||||
onFilterMessages: (messages) => (_messages = messages),
|
||||
onChunk: ({ text, usage, metrics, search }) => {
|
||||
onChunk: ({ text, reasoning_content, usage, metrics, search }) => {
|
||||
message.content = message.content + text || ''
|
||||
message.usage = usage
|
||||
message.metrics = metrics
|
||||
|
||||
if (reasoning_content) {
|
||||
message.reasoning_content = (message.reasoning_content || '') + reasoning_content
|
||||
}
|
||||
|
||||
if (search) {
|
||||
message.metadata = { groundingMetadata: search }
|
||||
}
|
||||
|
||||
@ -47,6 +47,7 @@ export type Message = {
|
||||
assistantId: string
|
||||
role: 'user' | 'assistant'
|
||||
content: string
|
||||
reasoning_content?: string
|
||||
translatedContent?: string
|
||||
topicId: string
|
||||
createdAt: string
|
||||
@ -71,6 +72,7 @@ export type Metrics = {
|
||||
completion_tokens?: number
|
||||
time_completion_millsec?: number
|
||||
time_first_token_millsec?: number
|
||||
time_thinking_millsec?: number
|
||||
}
|
||||
|
||||
export type Topic = {
|
||||
|
||||
@ -26,6 +26,18 @@ export function isJSON(str: any): boolean {
|
||||
}
|
||||
}
|
||||
|
||||
export function parseJSON(str: string) {
|
||||
if (str === 'undefined') {
|
||||
return undefined
|
||||
}
|
||||
|
||||
try {
|
||||
return JSON.parse(str)
|
||||
} catch (e) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
export const delay = (seconds: number) => {
|
||||
return new Promise((resolve) => {
|
||||
setTimeout(() => {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user