feat: fetch chat completion with history messages
This commit is contained in:
parent
10c07413b5
commit
f068109e95
@ -57,7 +57,7 @@ const Messages: FC<Props> = ({ assistant, topic }) => {
|
||||
EventEmitter.on(EVENT_NAMES.SEND_MESSAGE, async (msg: Message) => {
|
||||
console.debug({ assistant, provider, message: msg, topic })
|
||||
onSendMessage(msg)
|
||||
fetchChatCompletion({ assistant, message: msg, topic, onResponse: setLastMessage })
|
||||
fetchChatCompletion({ assistant, messages: [messages, msg], topic, onResponse: setLastMessage })
|
||||
}),
|
||||
EventEmitter.on(EVENT_NAMES.AI_CHAT_COMPLETION, async (msg: Message) => {
|
||||
setLastMessage(null)
|
||||
@ -72,7 +72,7 @@ const Messages: FC<Props> = ({ assistant, topic }) => {
|
||||
})
|
||||
]
|
||||
return () => unsubscribes.forEach((unsub) => unsub())
|
||||
}, [assistant, autoRenameTopic, onSendMessage, provider, topic, updateTopic])
|
||||
}, [assistant, autoRenameTopic, messages, onSendMessage, provider, topic, updateTopic])
|
||||
|
||||
useEffect(() => {
|
||||
runAsyncFunction(async () => {
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import { useSettings } from '@renderer/hooks/useSettings'
|
||||
import { Dropdown, MenuProps } from 'antd'
|
||||
import { FC, PropsWithChildren } from 'react'
|
||||
import { ArrowUpOutlined, EnterOutlined } from '@ant-design/icons'
|
||||
|
||||
interface Props extends PropsWithChildren {}
|
||||
|
||||
@ -11,11 +12,13 @@ const SendMessageSetting: FC<Props> = ({ children }) => {
|
||||
{
|
||||
label: 'Enter Send',
|
||||
key: 'Enter',
|
||||
icon: <EnterOutlined />,
|
||||
onClick: () => setSendMessageShortcut('Enter')
|
||||
},
|
||||
{
|
||||
label: 'Shift + Enter Send',
|
||||
key: 'Shift+Enter',
|
||||
icon: <ArrowUpOutlined />,
|
||||
onClick: () => setSendMessageShortcut('Shift+Enter')
|
||||
}
|
||||
]
|
||||
|
||||
@ -4,9 +4,10 @@ import { EVENT_NAMES, EventEmitter } from './event'
|
||||
import { ChatCompletionMessageParam, ChatCompletionSystemMessageParam } from 'openai/resources'
|
||||
import OpenAI from 'openai'
|
||||
import { getAssistantProvider, getDefaultModel } from './assistant'
|
||||
import { takeRight } from 'lodash'
|
||||
|
||||
interface FetchChatCompletionParams {
|
||||
message: Message
|
||||
messages: Message[]
|
||||
topic: Topic
|
||||
assistant: Assistant
|
||||
onResponse: (message: Message) => void
|
||||
@ -20,7 +21,7 @@ const getOpenAiProvider = (provider: Provider) => {
|
||||
})
|
||||
}
|
||||
|
||||
export async function fetchChatCompletion({ message, topic, assistant, onResponse }: FetchChatCompletionParams) {
|
||||
export async function fetchChatCompletion({ messages, topic, assistant, onResponse }: FetchChatCompletionParams) {
|
||||
const provider = getAssistantProvider(assistant)
|
||||
const openaiProvider = getOpenAiProvider(provider)
|
||||
const defaultModel = getDefaultModel()
|
||||
@ -30,7 +31,7 @@ export async function fetchChatCompletion({ message, topic, assistant, onRespons
|
||||
model: model.id,
|
||||
messages: [
|
||||
{ role: 'system', content: assistant.prompt },
|
||||
{ role: 'user', content: message.content }
|
||||
...takeRight(messages, 5).map((message) => ({ role: message.role, content: message.content }))
|
||||
],
|
||||
stream: true
|
||||
})
|
||||
@ -69,7 +70,7 @@ export async function fetchMessagesSummary({ messages, assistant }: FetchMessage
|
||||
const defaultModel = getDefaultModel()
|
||||
const model = assistant.model || defaultModel
|
||||
|
||||
const userMessages: ChatCompletionMessageParam[] = messages.map((message) => ({
|
||||
const userMessages: ChatCompletionMessageParam[] = takeRight(messages, 5).map((message) => ({
|
||||
role: 'user',
|
||||
content: message.content
|
||||
}))
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user