From a973c5fb898af3af234426385be93295bcdc9ab6 Mon Sep 17 00:00:00 2001 From: kangfenmao Date: Thu, 26 Sep 2024 14:55:09 +0800 Subject: [PATCH] fix: remove filter messages --- src/renderer/src/config/constant.ts | 2 +- .../src/pages/home/Messages/Message.tsx | 13 +++++++++---- .../src/pages/home/Messages/Messages.tsx | 19 +++++++++++++++---- .../src/providers/AnthropicProvider.ts | 4 ++-- src/renderer/src/providers/GeminiProvider.ts | 10 +++++++--- src/renderer/src/providers/OpenAIProvider.ts | 4 ++-- 6 files changed, 36 insertions(+), 16 deletions(-) diff --git a/src/renderer/src/config/constant.ts b/src/renderer/src/config/constant.ts index ada48a81..8174f5c5 100644 --- a/src/renderer/src/config/constant.ts +++ b/src/renderer/src/config/constant.ts @@ -1,5 +1,5 @@ export const DEFAULT_TEMPERATURE = 0.7 -export const DEFAULT_CONEXTCOUNT = 6 +export const DEFAULT_CONEXTCOUNT = 5 export const DEFAULT_MAX_TOKENS = 4096 export const FONT_FAMILY = "Ubuntu, -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif" diff --git a/src/renderer/src/pages/home/Messages/Message.tsx b/src/renderer/src/pages/home/Messages/Message.tsx index 0cdbf3c2..866dbb57 100644 --- a/src/renderer/src/pages/home/Messages/Message.tsx +++ b/src/renderer/src/pages/home/Messages/Message.tsx @@ -9,8 +9,8 @@ import { useAssistant } from '@renderer/hooks/useAssistant' import useAvatar from '@renderer/hooks/useAvatar' import { useModel } from '@renderer/hooks/useModel' import { useSettings } from '@renderer/hooks/useSettings' -import { Message } from '@renderer/types' -import { firstLetter, removeLeadingEmoji } from '@renderer/utils' +import { Message, Model } from '@renderer/types' +import { firstLetter, getBriefInfo, removeLeadingEmoji } from '@renderer/utils' import { Alert, Avatar, Divider } from 'antd' import dayjs from 'dayjs' import { upperFirst } from 'lodash' @@ -106,7 +106,7 @@ const MessageItem: FC = ({ message, index, lastMessage, onDeleteMessage } - + {!lastMessage && ( @@ -126,7 +126,7 @@ const MessageItem: FC = ({ message, index, lastMessage, onDeleteMessage } ) } -const MessageContent: React.FC<{ message: Message }> = ({ message }) => { +const MessageContent: React.FC<{ message: Message; model?: Model }> = ({ message, model }) => { const { t } = useTranslation() if (message.status === 'sending') { @@ -148,6 +148,11 @@ const MessageContent: React.FC<{ message: Message }> = ({ message }) => { ) } + if (message.type === '@' && model) { + const content = `[@${model.name}](#) ${getBriefInfo(message.content)}` + return + } + return ( <> diff --git a/src/renderer/src/pages/home/Messages/Messages.tsx b/src/renderer/src/pages/home/Messages/Messages.tsx index 51c90ae5..8510c3b2 100644 --- a/src/renderer/src/pages/home/Messages/Messages.tsx +++ b/src/renderer/src/pages/home/Messages/Messages.tsx @@ -7,7 +7,7 @@ import { EVENT_NAMES, EventEmitter } from '@renderer/services/event' import { deleteMessageFiles, filterMessages, getContextCount } from '@renderer/services/messages' import { estimateHistoryTokens, estimateMessageUsage } from '@renderer/services/tokens' import { Assistant, Message, Model, Topic } from '@renderer/types' -import { captureScrollableDiv, getBriefInfo, runAsyncFunction, uuid } from '@renderer/utils' +import { captureScrollableDiv, runAsyncFunction, uuid } from '@renderer/utils' import { t } from 'i18next' import { flatten, last, reverse, take } from 'lodash' import { FC, useCallback, useEffect, useRef, useState } from 'react' @@ -73,7 +73,14 @@ const Messages: FC = ({ assistant, topic, setActiveTopic }) => { const unsubscribes = [ EventEmitter.on(EVENT_NAMES.SEND_MESSAGE, async (msg: Message) => { await onSendMessage(msg) - containerRef.current?.scrollTo({ top: containerRef.current.scrollHeight, behavior: 'auto' }) + + // Scroll to bottom + setTimeout( + () => containerRef.current?.scrollTo({ top: containerRef.current.scrollHeight, behavior: 'auto' }), + 10 + ) + + // Fetch completion fetchChatCompletion({ assistant, messages: [...messages, msg], @@ -89,8 +96,12 @@ const Messages: FC = ({ assistant, topic, setActiveTopic }) => { EventEmitter.on(EVENT_NAMES.REGENERATE_MESSAGE, async (model: Model) => { const lastUserMessage = last(filterMessages(messages).filter((m) => m.role === 'user')) if (lastUserMessage) { - const content = `[@${model.name}](#) ${getBriefInfo(lastUserMessage.content)}` - onSendMessage({ ...lastUserMessage, id: uuid(), type: '@', content }) + onSendMessage({ + ...lastUserMessage, + id: uuid(), + type: '@', + modelId: model.id + }) fetchChatCompletion({ assistant, topic, diff --git a/src/renderer/src/providers/AnthropicProvider.ts b/src/renderer/src/providers/AnthropicProvider.ts index feef21d1..8b48a686 100644 --- a/src/renderer/src/providers/AnthropicProvider.ts +++ b/src/renderer/src/providers/AnthropicProvider.ts @@ -3,7 +3,7 @@ import { MessageCreateParamsNonStreaming, MessageParam } from '@anthropic-ai/sdk import { DEFAULT_MAX_TOKENS } from '@renderer/config/constant' import { getAssistantSettings, getDefaultModel, getTopNamingModel } from '@renderer/services/assistant' import { EVENT_NAMES } from '@renderer/services/event' -import { filterContextMessages, filterMessages } from '@renderer/services/messages' +import { filterContextMessages } from '@renderer/services/messages' import { Assistant, FileTypes, Message, Provider, Suggestion } from '@renderer/types' import { first, flatten, sum, takeRight } from 'lodash' import OpenAI from 'openai' @@ -58,7 +58,7 @@ export default class AnthropicProvider extends BaseProvider { const { contextCount, maxTokens, streamOutput } = getAssistantSettings(assistant) const userMessagesParams: MessageParam[] = [] - const _messages = filterMessages(filterContextMessages(takeRight(messages, contextCount + 2))) + const _messages = filterContextMessages(takeRight(messages, contextCount + 2)) onFilterMessages(_messages) diff --git a/src/renderer/src/providers/GeminiProvider.ts b/src/renderer/src/providers/GeminiProvider.ts index f0c3d346..a98f67da 100644 --- a/src/renderer/src/providers/GeminiProvider.ts +++ b/src/renderer/src/providers/GeminiProvider.ts @@ -9,10 +9,10 @@ import { } from '@google/generative-ai' import { getAssistantSettings, getDefaultModel, getTopNamingModel } from '@renderer/services/assistant' import { EVENT_NAMES } from '@renderer/services/event' -import { filterContextMessages, filterMessages } from '@renderer/services/messages' +import { filterContextMessages } from '@renderer/services/messages' import { Assistant, FileTypes, Message, Provider, Suggestion } from '@renderer/types' import axios from 'axios' -import { isEmpty, takeRight } from 'lodash' +import { first, isEmpty, takeRight } from 'lodash' import OpenAI from 'openai' import BaseProvider from './BaseProvider' @@ -59,9 +59,13 @@ export default class GeminiProvider extends BaseProvider { const model = assistant.model || defaultModel const { contextCount, maxTokens, streamOutput } = getAssistantSettings(assistant) - const userMessages = filterMessages(filterContextMessages(takeRight(messages, contextCount + 1))) + const userMessages = filterContextMessages(takeRight(messages, contextCount + 2)) onFilterMessages(userMessages) + if (first(userMessages)?.role === 'assistant') { + userMessages.shift() + } + const userLastMessage = userMessages.pop() const history: Content[] = [] diff --git a/src/renderer/src/providers/OpenAIProvider.ts b/src/renderer/src/providers/OpenAIProvider.ts index cfa3be8e..6cce6d90 100644 --- a/src/renderer/src/providers/OpenAIProvider.ts +++ b/src/renderer/src/providers/OpenAIProvider.ts @@ -2,7 +2,7 @@ import { isLocalAi } from '@renderer/config/env' import { isVisionModel } from '@renderer/config/models' import { getAssistantSettings, getDefaultModel, getTopNamingModel } from '@renderer/services/assistant' import { EVENT_NAMES } from '@renderer/services/event' -import { filterContextMessages, filterMessages } from '@renderer/services/messages' +import { filterContextMessages } from '@renderer/services/messages' import { Assistant, FileTypes, Message, Model, Provider, Suggestion } from '@renderer/types' import { removeQuotes } from '@renderer/utils' import { first, takeRight } from 'lodash' @@ -117,7 +117,7 @@ export default class OpenAIProvider extends BaseProvider { const systemMessage = assistant.prompt ? { role: 'system', content: assistant.prompt } : undefined const userMessages: ChatCompletionMessageParam[] = [] - const _messages = filterMessages(filterContextMessages(takeRight(messages, contextCount + 1))) + const _messages = filterContextMessages(takeRight(messages, contextCount + 1)) onFilterMessages(_messages) for (const message of _messages) {