diff --git a/src/renderer/src/pages/home/components/Messages.tsx b/src/renderer/src/pages/home/components/Messages.tsx index 38eaed6d..81c7e76f 100644 --- a/src/renderer/src/pages/home/components/Messages.tsx +++ b/src/renderer/src/pages/home/components/Messages.tsx @@ -57,7 +57,7 @@ const Messages: FC = ({ assistant, topic }) => { const _messages = messages.filter((m) => m.id !== message.id) setMessages(_messages) localforage.setItem(`topic:${topic.id}`, { - ...topic, + id: topic.id, messages: _messages }) } diff --git a/src/renderer/src/services/api.ts b/src/renderer/src/services/api.ts index 3af2c34d..8f5864a7 100644 --- a/src/renderer/src/services/api.ts +++ b/src/renderer/src/services/api.ts @@ -42,13 +42,19 @@ export async function fetchChatCompletion({ messages, topic, assistant, onRespon onResponse({ ..._message }) + const systemMessage = assistant.prompt ? { role: 'system', content: assistant.prompt } : undefined + + const userMessages = takeRight(messages, 5).map((message) => ({ + role: message.role, + content: message.content + })) + + const _messages = [systemMessage, ...userMessages].filter(Boolean) as ChatCompletionMessageParam[] + try { const stream = await openaiProvider.chat.completions.create({ model: model.id, - messages: [ - { role: 'system', content: assistant.prompt }, - ...takeRight(messages, 5).map((message) => ({ role: message.role, content: message.content })) - ], + messages: _messages, stream: true })