feat: add deepseek-reasoner model support

This commit is contained in:
kangfenmao 2025-01-21 13:24:32 +08:00
parent f7db1289e4
commit 21ad28ee62
14 changed files with 146 additions and 15 deletions

View File

@ -113,7 +113,9 @@
"topics.move_to": "Move to", "topics.move_to": "Move to",
"topics.title": "Topics", "topics.title": "Topics",
"translate": "Translate", "translate": "Translate",
"resend": "Resend" "resend": "Resend",
"thinking": "Thinking",
"deeply_thought": "Deeply thought ({{secounds}} seconds)"
}, },
"common": { "common": {
"and": "and", "and": "and",

View File

@ -113,7 +113,9 @@
"topics.move_to": "移動先", "topics.move_to": "移動先",
"topics.title": "トピック", "topics.title": "トピック",
"translate": "翻訳", "translate": "翻訳",
"resend": "再送信" "resend": "再送信",
"thinking": "思考中...",
"deeply_thought": "深く考えています({{secounds}} 秒)"
}, },
"common": { "common": {
"and": "と", "and": "と",
@ -260,7 +262,8 @@
"upgrade.success.content": "アップグレードを完了するためにアプリケーションを再起動してください", "upgrade.success.content": "アップグレードを完了するためにアプリケーションを再起動してください",
"upgrade.success.title": "アップグレードに成功しました", "upgrade.success.title": "アップグレードに成功しました",
"regenerate.confirm": "再生成すると現在のメッセージが置き換えられます", "regenerate.confirm": "再生成すると現在のメッセージが置き換えられます",
"copy.success": "コピーしました!" "copy.success": "コピーしました!",
"error.get_embedding_dimensions": "埋込み次元を取得できませんでした"
}, },
"minapp": { "minapp": {
"title": "ミニアプリ", "title": "ミニアプリ",

View File

@ -113,7 +113,9 @@
"topics.move_to": "Переместить в", "topics.move_to": "Переместить в",
"topics.title": "Топики", "topics.title": "Топики",
"translate": "Перевести", "translate": "Перевести",
"resend": "Переотправить" "resend": "Переотправить",
"thinking": "Мыслим",
"deeply_thought": "Мыслим ({{secounds}} секунд)"
}, },
"common": { "common": {
"and": "и", "and": "и",

View File

@ -113,7 +113,9 @@
"topics.move_to": "移动到", "topics.move_to": "移动到",
"topics.title": "话题", "topics.title": "话题",
"translate": "翻译", "translate": "翻译",
"resend": "重新发送" "resend": "重新发送",
"thinking": "思考中",
"deeply_thought": "已深度思考(用时 {{secounds}} 秒)"
}, },
"common": { "common": {
"and": "和", "and": "和",

View File

@ -113,7 +113,9 @@
"topics.move_to": "移動到", "topics.move_to": "移動到",
"topics.title": "話題", "topics.title": "話題",
"translate": "翻譯", "translate": "翻譯",
"resend": "重新發送" "resend": "重新發送",
"thinking": "思考中",
"deeply_thought": "已深度思考(用時 {{secounds}} 秒)"
}, },
"common": { "common": {
"and": "與", "and": "與",

View File

@ -292,13 +292,14 @@ const Tabs = styled(TabsAntd)<{ $language: string }>`
justify-content: ${({ $language }) => ($language.startsWith('zh') ? 'center' : 'flex-start')}; justify-content: ${({ $language }) => ($language.startsWith('zh') ? 'center' : 'flex-start')};
user-select: none; user-select: none;
transition: all 0.3s cubic-bezier(0.645, 0.045, 0.355, 1); transition: all 0.3s cubic-bezier(0.645, 0.045, 0.355, 1);
outline: none !important;
.ant-tabs-tab-btn { .ant-tabs-tab-btn {
white-space: nowrap; white-space: nowrap;
overflow: hidden; overflow: hidden;
text-overflow: ellipsis; text-overflow: ellipsis;
max-width: 100px; max-width: 100px;
transition: all 0.3s cubic-bezier(0.645, 0.045, 0.355, 1); transition: all 0.3s cubic-bezier(0.645, 0.045, 0.355, 1);
outline: none !important;
} }
&:hover { &:hover {
color: var(--color-text) !important; color: var(--color-text) !important;

View File

@ -11,6 +11,7 @@ import Markdown from '../Markdown/Markdown'
import MessageAttachments from './MessageAttachments' import MessageAttachments from './MessageAttachments'
import MessageError from './MessageError' import MessageError from './MessageError'
import MessageSearchResults from './MessageSearchResults' import MessageSearchResults from './MessageSearchResults'
import MessageThought from './MessageThought'
const MessageContent: React.FC<{ const MessageContent: React.FC<{
message: Message message: Message
@ -40,6 +41,7 @@ const MessageContent: React.FC<{
<Flex gap="8px" wrap> <Flex gap="8px" wrap>
{message.mentions?.map((model) => <MentionTag key={model.id}>{'@' + model.name}</MentionTag>)} {message.mentions?.map((model) => <MentionTag key={model.id}>{'@' + model.name}</MentionTag>)}
</Flex> </Flex>
<MessageThought message={message} />
<Markdown message={message} /> <Markdown message={message} />
{message.translatedContent && ( {message.translatedContent && (
<> <>

View File

@ -0,0 +1,60 @@
import { Message } from '@renderer/types'
import { Collapse } from 'antd'
import { FC } from 'react'
import { useTranslation } from 'react-i18next'
import ReactMarkdown from 'react-markdown'
import BarLoader from 'react-spinners/BarLoader'
import styled from 'styled-components'
interface Props {
message: Message
}
const MessageThought: FC<Props> = ({ message }) => {
const isThinking = !message.content
const { t } = useTranslation()
if (!message.reasoning_content) {
return null
}
const thinkingTime = message.metrics?.time_thinking_millsec || 0
const thinkingTimeSecounds = (thinkingTime / 1000).toFixed(1)
return (
<CollapseContainer
items={[
{
key: 'thought',
label: (
<MessageTitleLabel>
<TinkingText>
{isThinking ? t('chat.thinking') : t('chat.deeply_thought', { secounds: thinkingTimeSecounds })}
</TinkingText>
{isThinking && <BarLoader color="#9254de" />}
</MessageTitleLabel>
),
children: <ReactMarkdown>{message.reasoning_content}</ReactMarkdown>
}
]}
/>
)
}
const CollapseContainer = styled(Collapse)`
margin-bottom: 15px;
`
const MessageTitleLabel = styled.div`
display: flex;
flex-direction: row;
align-items: center;
height: 22px;
gap: 15px;
`
const TinkingText = styled.span`
color: var(--color-text-2);
`
export default MessageThought

View File

@ -3,7 +3,7 @@ import { getOllamaKeepAliveTime } from '@renderer/hooks/useOllama'
import { getKnowledgeReferences } from '@renderer/services/KnowledgeService' import { getKnowledgeReferences } from '@renderer/services/KnowledgeService'
import store from '@renderer/store' import store from '@renderer/store'
import { Assistant, GenerateImageParams, Message, Model, Provider, Suggestion } from '@renderer/types' import { Assistant, GenerateImageParams, Message, Model, Provider, Suggestion } from '@renderer/types'
import { delay, isJSON } from '@renderer/utils' import { delay, isJSON, parseJSON } from '@renderer/utils'
import OpenAI from 'openai' import OpenAI from 'openai'
import { CompletionsParams } from '.' import { CompletionsParams } from '.'
@ -98,9 +98,15 @@ export default abstract class BaseProvider {
} }
if (param.type === 'json') { if (param.type === 'json') {
const value = param.value as string const value = param.value as string
return { ...acc, [param.name]: isJSON(value) ? JSON.parse(value) : value } return {
...acc,
[param.name]: isJSON(value) ? parseJSON(value) : value
}
}
return {
...acc,
[param.name]: param.value
} }
return { ...acc, [param.name]: param.value }
}, {}) || {} }, {}) || {}
) )
} }

View File

@ -117,6 +117,20 @@ export default class OpenAIProvider extends BaseProvider {
} as ChatCompletionMessageParam } as ChatCompletionMessageParam
} }
private getTemperature(assistant: Assistant, model: Model) {
const isOpenAIo1 = model.id.startsWith('o1')
if (isOpenAIo1) {
return undefined
}
if (model.provider === 'deepseek' && model.id === 'deepseek-reasoner') {
return undefined
}
return assistant?.settings?.temperature
}
async completions({ messages, assistant, onChunk, onFilterMessages }: CompletionsParams): Promise<void> { async completions({ messages, assistant, onChunk, onFilterMessages }: CompletionsParams): Promise<void> {
const defaultModel = getDefaultModel() const defaultModel = getDefaultModel()
const model = assistant.model || defaultModel const model = assistant.model || defaultModel
@ -128,6 +142,12 @@ export default class OpenAIProvider extends BaseProvider {
const _messages = filterContextMessages(takeRight(messages, contextCount + 1)) const _messages = filterContextMessages(takeRight(messages, contextCount + 1))
onFilterMessages(_messages) onFilterMessages(_messages)
if (model.id === 'deepseek-reasoner') {
if (_messages[0]?.role !== 'user') {
userMessages.push({ role: 'user', content: '' })
}
}
for (const message of _messages) { for (const message of _messages) {
userMessages.push(await this.getMessageParam(message, model)) userMessages.push(await this.getMessageParam(message, model))
} }
@ -142,6 +162,7 @@ export default class OpenAIProvider extends BaseProvider {
} }
let time_first_token_millsec = 0 let time_first_token_millsec = 0
let time_first_content_millsec = 0
const start_time_millsec = new Date().getTime() const start_time_millsec = new Date().getTime()
// @ts-ignore key is not typed // @ts-ignore key is not typed
@ -150,7 +171,7 @@ export default class OpenAIProvider extends BaseProvider {
messages: [isOpenAIo1 ? undefined : systemMessage, ...userMessages].filter( messages: [isOpenAIo1 ? undefined : systemMessage, ...userMessages].filter(
Boolean Boolean
) as ChatCompletionMessageParam[], ) as ChatCompletionMessageParam[],
temperature: isOpenAIo1 ? 1 : assistant?.settings?.temperature, temperature: this.getTemperature(assistant, model),
top_p: assistant?.settings?.topP, top_p: assistant?.settings?.topP,
max_tokens: maxTokens, max_tokens: maxTokens,
keep_alive: this.keepAliveTime, keep_alive: this.keepAliveTime,
@ -176,17 +197,28 @@ export default class OpenAIProvider extends BaseProvider {
if (window.keyv.get(EVENT_NAMES.CHAT_COMPLETION_PAUSED)) { if (window.keyv.get(EVENT_NAMES.CHAT_COMPLETION_PAUSED)) {
break break
} }
if (time_first_token_millsec == 0) { if (time_first_token_millsec == 0) {
time_first_token_millsec = new Date().getTime() - start_time_millsec time_first_token_millsec = new Date().getTime() - start_time_millsec
} }
if (time_first_content_millsec == 0 && chunk.choices[0]?.delta?.content) {
time_first_content_millsec = new Date().getTime()
}
const time_completion_millsec = new Date().getTime() - start_time_millsec const time_completion_millsec = new Date().getTime() - start_time_millsec
const time_thinking_millsec = time_first_content_millsec ? time_first_content_millsec - start_time_millsec : 0
onChunk({ onChunk({
text: chunk.choices[0]?.delta?.content || '', text: chunk.choices[0]?.delta?.content || '',
// @ts-ignore key is not typed
reasoning_content: chunk.choices[0]?.delta?.reasoning_content || '',
usage: chunk.usage, usage: chunk.usage,
metrics: { metrics: {
completion_tokens: chunk.usage?.completion_tokens, completion_tokens: chunk.usage?.completion_tokens,
time_completion_millsec, time_completion_millsec,
time_first_token_millsec time_first_token_millsec,
time_thinking_millsec
} }
}) })
} }

View File

@ -1,8 +1,9 @@
import type { GroundingMetadata } from '@google/generative-ai' import type { GroundingMetadata } from '@google/generative-ai'
import type { Assistant, Metrics } from '@renderer/types' import type { Assistant, Message, Metrics } from '@renderer/types'
interface ChunkCallbackData { interface ChunkCallbackData {
text?: string text?: string
reasoning_content?: string
usage?: OpenAI.Completions.CompletionUsage usage?: OpenAI.Completions.CompletionUsage
metrics?: Metrics metrics?: Metrics
search?: GroundingMetadata search?: GroundingMetadata
@ -11,6 +12,6 @@ interface ChunkCallbackData {
interface CompletionsParams { interface CompletionsParams {
messages: Message[] messages: Message[]
assistant: Assistant assistant: Assistant
onChunk: ({ text, usage, metrics, search }: ChunkCallbackData) => void onChunk: ({ text, reasoning_content, usage, metrics, search }: ChunkCallbackData) => void
onFilterMessages: (messages: Message[]) => void onFilterMessages: (messages: Message[]) => void
} }

View File

@ -56,11 +56,15 @@ export async function fetchChatCompletion({
messages, messages,
assistant, assistant,
onFilterMessages: (messages) => (_messages = messages), onFilterMessages: (messages) => (_messages = messages),
onChunk: ({ text, usage, metrics, search }) => { onChunk: ({ text, reasoning_content, usage, metrics, search }) => {
message.content = message.content + text || '' message.content = message.content + text || ''
message.usage = usage message.usage = usage
message.metrics = metrics message.metrics = metrics
if (reasoning_content) {
message.reasoning_content = (message.reasoning_content || '') + reasoning_content
}
if (search) { if (search) {
message.metadata = { groundingMetadata: search } message.metadata = { groundingMetadata: search }
} }

View File

@ -47,6 +47,7 @@ export type Message = {
assistantId: string assistantId: string
role: 'user' | 'assistant' role: 'user' | 'assistant'
content: string content: string
reasoning_content?: string
translatedContent?: string translatedContent?: string
topicId: string topicId: string
createdAt: string createdAt: string
@ -71,6 +72,7 @@ export type Metrics = {
completion_tokens?: number completion_tokens?: number
time_completion_millsec?: number time_completion_millsec?: number
time_first_token_millsec?: number time_first_token_millsec?: number
time_thinking_millsec?: number
} }
export type Topic = { export type Topic = {

View File

@ -26,6 +26,18 @@ export function isJSON(str: any): boolean {
} }
} }
export function parseJSON(str: string) {
if (str === 'undefined') {
return undefined
}
try {
return JSON.parse(str)
} catch (e) {
return null
}
}
export const delay = (seconds: number) => { export const delay = (seconds: number) => {
return new Promise((resolve) => { return new Promise((resolve) => {
setTimeout(() => { setTimeout(() => {