feat: fetch chat completion with history messages
This commit is contained in:
parent
10c07413b5
commit
f068109e95
@ -57,7 +57,7 @@ const Messages: FC<Props> = ({ assistant, topic }) => {
|
|||||||
EventEmitter.on(EVENT_NAMES.SEND_MESSAGE, async (msg: Message) => {
|
EventEmitter.on(EVENT_NAMES.SEND_MESSAGE, async (msg: Message) => {
|
||||||
console.debug({ assistant, provider, message: msg, topic })
|
console.debug({ assistant, provider, message: msg, topic })
|
||||||
onSendMessage(msg)
|
onSendMessage(msg)
|
||||||
fetchChatCompletion({ assistant, message: msg, topic, onResponse: setLastMessage })
|
fetchChatCompletion({ assistant, messages: [messages, msg], topic, onResponse: setLastMessage })
|
||||||
}),
|
}),
|
||||||
EventEmitter.on(EVENT_NAMES.AI_CHAT_COMPLETION, async (msg: Message) => {
|
EventEmitter.on(EVENT_NAMES.AI_CHAT_COMPLETION, async (msg: Message) => {
|
||||||
setLastMessage(null)
|
setLastMessage(null)
|
||||||
@ -72,7 +72,7 @@ const Messages: FC<Props> = ({ assistant, topic }) => {
|
|||||||
})
|
})
|
||||||
]
|
]
|
||||||
return () => unsubscribes.forEach((unsub) => unsub())
|
return () => unsubscribes.forEach((unsub) => unsub())
|
||||||
}, [assistant, autoRenameTopic, onSendMessage, provider, topic, updateTopic])
|
}, [assistant, autoRenameTopic, messages, onSendMessage, provider, topic, updateTopic])
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
runAsyncFunction(async () => {
|
runAsyncFunction(async () => {
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
import { useSettings } from '@renderer/hooks/useSettings'
|
import { useSettings } from '@renderer/hooks/useSettings'
|
||||||
import { Dropdown, MenuProps } from 'antd'
|
import { Dropdown, MenuProps } from 'antd'
|
||||||
import { FC, PropsWithChildren } from 'react'
|
import { FC, PropsWithChildren } from 'react'
|
||||||
|
import { ArrowUpOutlined, EnterOutlined } from '@ant-design/icons'
|
||||||
|
|
||||||
interface Props extends PropsWithChildren {}
|
interface Props extends PropsWithChildren {}
|
||||||
|
|
||||||
@ -11,11 +12,13 @@ const SendMessageSetting: FC<Props> = ({ children }) => {
|
|||||||
{
|
{
|
||||||
label: 'Enter Send',
|
label: 'Enter Send',
|
||||||
key: 'Enter',
|
key: 'Enter',
|
||||||
|
icon: <EnterOutlined />,
|
||||||
onClick: () => setSendMessageShortcut('Enter')
|
onClick: () => setSendMessageShortcut('Enter')
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: 'Shift + Enter Send',
|
label: 'Shift + Enter Send',
|
||||||
key: 'Shift+Enter',
|
key: 'Shift+Enter',
|
||||||
|
icon: <ArrowUpOutlined />,
|
||||||
onClick: () => setSendMessageShortcut('Shift+Enter')
|
onClick: () => setSendMessageShortcut('Shift+Enter')
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|||||||
@ -4,9 +4,10 @@ import { EVENT_NAMES, EventEmitter } from './event'
|
|||||||
import { ChatCompletionMessageParam, ChatCompletionSystemMessageParam } from 'openai/resources'
|
import { ChatCompletionMessageParam, ChatCompletionSystemMessageParam } from 'openai/resources'
|
||||||
import OpenAI from 'openai'
|
import OpenAI from 'openai'
|
||||||
import { getAssistantProvider, getDefaultModel } from './assistant'
|
import { getAssistantProvider, getDefaultModel } from './assistant'
|
||||||
|
import { takeRight } from 'lodash'
|
||||||
|
|
||||||
interface FetchChatCompletionParams {
|
interface FetchChatCompletionParams {
|
||||||
message: Message
|
messages: Message[]
|
||||||
topic: Topic
|
topic: Topic
|
||||||
assistant: Assistant
|
assistant: Assistant
|
||||||
onResponse: (message: Message) => void
|
onResponse: (message: Message) => void
|
||||||
@ -20,7 +21,7 @@ const getOpenAiProvider = (provider: Provider) => {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetchChatCompletion({ message, topic, assistant, onResponse }: FetchChatCompletionParams) {
|
export async function fetchChatCompletion({ messages, topic, assistant, onResponse }: FetchChatCompletionParams) {
|
||||||
const provider = getAssistantProvider(assistant)
|
const provider = getAssistantProvider(assistant)
|
||||||
const openaiProvider = getOpenAiProvider(provider)
|
const openaiProvider = getOpenAiProvider(provider)
|
||||||
const defaultModel = getDefaultModel()
|
const defaultModel = getDefaultModel()
|
||||||
@ -30,7 +31,7 @@ export async function fetchChatCompletion({ message, topic, assistant, onRespons
|
|||||||
model: model.id,
|
model: model.id,
|
||||||
messages: [
|
messages: [
|
||||||
{ role: 'system', content: assistant.prompt },
|
{ role: 'system', content: assistant.prompt },
|
||||||
{ role: 'user', content: message.content }
|
...takeRight(messages, 5).map((message) => ({ role: message.role, content: message.content }))
|
||||||
],
|
],
|
||||||
stream: true
|
stream: true
|
||||||
})
|
})
|
||||||
@ -69,7 +70,7 @@ export async function fetchMessagesSummary({ messages, assistant }: FetchMessage
|
|||||||
const defaultModel = getDefaultModel()
|
const defaultModel = getDefaultModel()
|
||||||
const model = assistant.model || defaultModel
|
const model = assistant.model || defaultModel
|
||||||
|
|
||||||
const userMessages: ChatCompletionMessageParam[] = messages.map((message) => ({
|
const userMessages: ChatCompletionMessageParam[] = takeRight(messages, 5).map((message) => ({
|
||||||
role: 'user',
|
role: 'user',
|
||||||
content: message.content
|
content: message.content
|
||||||
}))
|
}))
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user