From 591bb45a4e9892adeb19a529ab0c60860c0041d0 Mon Sep 17 00:00:00 2001 From: kangfenmao Date: Fri, 6 Sep 2024 13:54:26 +0800 Subject: [PATCH] feat: Improved chat UI with context handling and filtering #43 - Updated default context count from 5 to 6. - Updated string translations for multiple languages. - Added functionality to handle new context and update context count in Inputbar component. - Added support for displaying new chat context divider for 'clear' type messages. - Added functionality to emit estimated token count with context count when the estimated token count event is triggered. - Improved filtering and processing of user messages for the AnthropicProvider class. - Updated message filtering logic with context consideration. - Improved filtering of user messages to include only context-relevant messages. - Updated logic to pass messages directly to AI.completions and AI.suggestions API requests instead of filtered messages. - Added new event names for handling topic sidebar and context switching. - Improved handling of message filtering and context counting. - Added new valid value 'clear' to type option in Message type. --- src/renderer/src/config/constant.ts | 2 +- src/renderer/src/i18n/index.ts | 18 +++++++----- .../src/pages/home/Inputbar/Inputbar.tsx | 22 ++++++++++++-- .../src/pages/home/Messages/Message.tsx | 10 ++++++- .../src/pages/home/Messages/Messages.tsx | 29 +++++++++++++++++-- .../src/providers/AnthropicProvider.ts | 3 +- src/renderer/src/providers/GeminiProvider.ts | 3 +- src/renderer/src/providers/OpenAIProvider.ts | 3 +- src/renderer/src/services/api.ts | 4 +-- src/renderer/src/services/event.ts | 3 +- src/renderer/src/services/messages.ts | 28 ++++++++++++++++-- src/renderer/src/types/index.ts | 2 +- 12 files changed, 104 insertions(+), 23 deletions(-) diff --git a/src/renderer/src/config/constant.ts b/src/renderer/src/config/constant.ts index 6e7c4018..b33c51a9 100644 --- a/src/renderer/src/config/constant.ts +++ b/src/renderer/src/config/constant.ts @@ -1,5 +1,5 @@ export const DEFAULT_TEMPERATURE = 0.7 -export const DEFAULT_CONEXTCOUNT = 5 +export const DEFAULT_CONEXTCOUNT = 6 export const DEFAULT_MAX_TOKENS = 4096 export const FONT_FAMILY = "Ubuntu, -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif" diff --git a/src/renderer/src/i18n/index.ts b/src/renderer/src/i18n/index.ts index 0b084cef..5baf4555 100644 --- a/src/renderer/src/i18n/index.ts +++ b/src/renderer/src/i18n/index.ts @@ -71,11 +71,12 @@ const resources = { 'topics.list': 'Topic List', 'input.new_topic': 'New Topic', 'input.topics': ' Topics ', - 'input.clear': 'Clear', + 'input.clear': 'Clear Messages', + 'input.new.context': 'Clear Context', 'input.expand': 'Expand', 'input.collapse': 'Collapse', 'input.clear.title': 'Clear all messages?', - 'input.clear.content': 'Are you sure to clear all messages?', + 'input.clear.content': 'Do you want to clear all messages of the current topic?', 'input.placeholder': 'Type your message here...', 'input.send': 'Send', 'input.pause': 'Pause', @@ -95,7 +96,8 @@ const resources = { 'settings.set_as_default': 'Apply to default assistant', 'settings.max': 'Max', 'suggestions.title': 'Suggested Questions', - 'add.assistant.title': 'Add Assistant' + 'add.assistant.title': 'Add Assistant', + 'message.new.context': 'New Context' }, agents: { title: 'Agents', @@ -320,11 +322,12 @@ const resources = { 'topics.list': '话题列表', 'input.new_topic': '新话题', 'input.topics': ' 话题 ', - 'input.clear': '清除', + 'input.clear': '清除会话消息', + 'input.new.context': '清除上下文', 'input.expand': '展开', 'input.collapse': '收起', - 'input.clear.title': '清除所有消息?', - 'input.clear.content': '确定要清除所有消息吗?', + 'input.clear.title': '清除消息?', + 'input.clear.content': '确定要清除当前会话所有消息吗?', 'input.placeholder': '在这里输入消息...', 'input.send': '发送', 'input.pause': '暂停', @@ -345,7 +348,8 @@ const resources = { 'settings.set_as_default': '应用到默认助手', 'settings.max': '不限', 'suggestions.title': '建议的问题', - 'add.assistant.title': '添加智能体' + 'add.assistant.title': '添加智能体', + 'message.new.context': '清除上下文' }, agents: { title: '智能体', diff --git a/src/renderer/src/pages/home/Inputbar/Inputbar.tsx b/src/renderer/src/pages/home/Inputbar/Inputbar.tsx index 329e4b17..f824e2cc 100644 --- a/src/renderer/src/pages/home/Inputbar/Inputbar.tsx +++ b/src/renderer/src/pages/home/Inputbar/Inputbar.tsx @@ -8,7 +8,6 @@ import { PauseCircleOutlined, QuestionCircleOutlined } from '@ant-design/icons' -import { DEFAULT_CONEXTCOUNT } from '@renderer/config/constant' import { useAssistant } from '@renderer/hooks/useAssistant' import { useSettings } from '@renderer/hooks/useSettings' import { useShowTopics } from '@renderer/hooks/useStore' @@ -44,6 +43,7 @@ const Inputbar: FC = ({ assistant, setActiveTopic }) => { const { sendMessageShortcut, showInputEstimatedTokens, fontSize } = useSettings() const [expended, setExpend] = useState(false) const [estimateTokenCount, setEstimateTokenCount] = useState(0) + const [contextCount, setContextCount] = useState(0) const generating = useAppSelector((state) => state.runtime.generating) const textareaRef = useRef(null) const [files, setFiles] = useState([]) @@ -130,6 +130,14 @@ const Inputbar: FC = ({ assistant, setActiveTopic }) => { store.dispatch(setGenerating(false)) } + const onNewContext = () => { + if (generating) { + onPause() + return + } + EventEmitter.emit(EVENT_NAMES.NEW_CONTEXT) + } + const resizeTextArea = () => { const textArea = textareaRef.current?.resizableTextArea?.textArea if (textArea) { @@ -178,7 +186,10 @@ const Inputbar: FC = ({ assistant, setActiveTopic }) => { setText(message.content) textareaRef.current?.focus() }), - EventEmitter.on(EVENT_NAMES.ESTIMATED_TOKEN_COUNT, _setEstimateTokenCount) + EventEmitter.on(EVENT_NAMES.ESTIMATED_TOKEN_COUNT, ({ tokensCount, contextCount }) => { + _setEstimateTokenCount(tokensCount) + setContextCount(contextCount) + }) ] return () => unsubscribes.forEach((unsub) => unsub()) }, []) @@ -212,6 +223,11 @@ const Inputbar: FC = ({ assistant, setActiveTopic }) => { + + + + + = ({ assistant, setActiveTopic }) => { - {assistant?.settings?.contextCount ?? DEFAULT_CONEXTCOUNT} + {contextCount} ↑{inputTokenCount} / {estimateTokenCount} diff --git a/src/renderer/src/pages/home/Messages/Message.tsx b/src/renderer/src/pages/home/Messages/Message.tsx index 7d4eb395..b506faf5 100644 --- a/src/renderer/src/pages/home/Messages/Message.tsx +++ b/src/renderer/src/pages/home/Messages/Message.tsx @@ -19,7 +19,7 @@ import { useRuntime } from '@renderer/hooks/useStore' import { EVENT_NAMES, EventEmitter } from '@renderer/services/event' import { Message, Model } from '@renderer/types' import { firstLetter, removeLeadingEmoji } from '@renderer/utils' -import { Alert, Avatar, Dropdown, Popconfirm, Tooltip } from 'antd' +import { Alert, Avatar, Divider, Dropdown, Popconfirm, Tooltip } from 'antd' import dayjs from 'dayjs' import { upperFirst } from 'lodash' import { FC, memo, useCallback, useMemo, useState } from 'react' @@ -130,6 +130,14 @@ const MessageItem: FC = ({ message, index, showMenu, onDeleteMessage }) = const showMiniApp = () => model?.provider && startMinAppById(model?.provider) + if (message.type === 'clear') { + return ( + + {t('chat.message.new.context')} + + ) + } + return ( diff --git a/src/renderer/src/pages/home/Messages/Messages.tsx b/src/renderer/src/pages/home/Messages/Messages.tsx index 1e437867..3c77d36a 100644 --- a/src/renderer/src/pages/home/Messages/Messages.tsx +++ b/src/renderer/src/pages/home/Messages/Messages.tsx @@ -3,7 +3,7 @@ import { useProviderByAssistant } from '@renderer/hooks/useProvider' import { getTopic } from '@renderer/hooks/useTopic' import { fetchChatCompletion, fetchMessagesSummary } from '@renderer/services/api' import { EVENT_NAMES, EventEmitter } from '@renderer/services/event' -import { estimateHistoryTokenCount, filterMessages } from '@renderer/services/messages' +import { estimateHistoryTokenCount, filterMessages, getContextCount } from '@renderer/services/messages' import LocalStorage from '@renderer/services/storage' import { Assistant, Message, Model, Topic } from '@renderer/types' import { getBriefInfo, runAsyncFunction, uuid } from '@renderer/utils' @@ -89,6 +89,28 @@ const Messages: FC = ({ assistant, topic, setActiveTopic }) => { setMessages([]) updateTopic({ ...topic, messages: [] }) LocalStorage.clearTopicMessages(topic.id) + }), + EventEmitter.on(EVENT_NAMES.NEW_CONTEXT, () => { + const lastMessage = last(messages) + + if (lastMessage && lastMessage.type === 'clear') { + return + } + + if (messages.length === 0) { + return + } + + onSendMessage({ + id: uuid(), + assistantId: assistant.id, + role: 'user', + content: '', + topicId: topic.id, + createdAt: new Date().toISOString(), + status: 'success', + type: 'clear' + } as Message) }) ] return () => unsubscribes.forEach((unsub) => unsub()) @@ -106,7 +128,10 @@ const Messages: FC = ({ assistant, topic, setActiveTopic }) => { }, [messages]) useEffect(() => { - EventEmitter.emit(EVENT_NAMES.ESTIMATED_TOKEN_COUNT, estimateHistoryTokenCount(assistant, messages)) + EventEmitter.emit(EVENT_NAMES.ESTIMATED_TOKEN_COUNT, { + tokensCount: estimateHistoryTokenCount(assistant, messages), + contextCount: getContextCount(assistant, messages) + }) }, [assistant, messages]) return ( diff --git a/src/renderer/src/providers/AnthropicProvider.ts b/src/renderer/src/providers/AnthropicProvider.ts index ba2b6c08..dabcec1e 100644 --- a/src/renderer/src/providers/AnthropicProvider.ts +++ b/src/renderer/src/providers/AnthropicProvider.ts @@ -3,6 +3,7 @@ import { MessageCreateParamsNonStreaming, MessageParam } from '@anthropic-ai/sdk import { DEFAULT_MAX_TOKENS } from '@renderer/config/constant' import { getAssistantSettings, getDefaultModel, getTopNamingModel } from '@renderer/services/assistant' import { EVENT_NAMES } from '@renderer/services/event' +import { filterContextMessages, filterMessages } from '@renderer/services/messages' import { Assistant, Message, Provider, Suggestion } from '@renderer/types' import { first, sum, takeRight } from 'lodash' import OpenAI from 'openai' @@ -26,7 +27,7 @@ export default class AnthropicProvider extends BaseProvider { const model = assistant.model || defaultModel const { contextCount, maxTokens } = getAssistantSettings(assistant) - const userMessages = takeRight(messages, contextCount + 2).map((message) => { + const userMessages = filterMessages(filterContextMessages(takeRight(messages, contextCount + 2))).map((message) => { return { role: message.role, content: message.content diff --git a/src/renderer/src/providers/GeminiProvider.ts b/src/renderer/src/providers/GeminiProvider.ts index 1fbebbd3..ffbe9762 100644 --- a/src/renderer/src/providers/GeminiProvider.ts +++ b/src/renderer/src/providers/GeminiProvider.ts @@ -1,6 +1,7 @@ import { GoogleGenerativeAI } from '@google/generative-ai' import { getAssistantSettings, getDefaultModel, getTopNamingModel } from '@renderer/services/assistant' import { EVENT_NAMES } from '@renderer/services/event' +import { filterContextMessages, filterMessages } from '@renderer/services/messages' import { Assistant, Message, Provider, Suggestion } from '@renderer/types' import axios from 'axios' import { isEmpty, takeRight } from 'lodash' @@ -25,7 +26,7 @@ export default class GeminiProvider extends BaseProvider { const model = assistant.model || defaultModel const { contextCount, maxTokens } = getAssistantSettings(assistant) - const userMessages = takeRight(messages, contextCount + 1).map((message) => { + const userMessages = filterMessages(filterContextMessages(takeRight(messages, contextCount + 1))).map((message) => { return { role: message.role, content: message.content diff --git a/src/renderer/src/providers/OpenAIProvider.ts b/src/renderer/src/providers/OpenAIProvider.ts index b9f71809..315686a8 100644 --- a/src/renderer/src/providers/OpenAIProvider.ts +++ b/src/renderer/src/providers/OpenAIProvider.ts @@ -1,6 +1,7 @@ import { isLocalAi } from '@renderer/config/env' import { getAssistantSettings, getDefaultModel, getTopNamingModel } from '@renderer/services/assistant' import { EVENT_NAMES } from '@renderer/services/event' +import { filterContextMessages, filterMessages } from '@renderer/services/messages' import { Assistant, Message, Provider, Suggestion } from '@renderer/types' import { fileToBase64, removeQuotes } from '@renderer/utils' import { first, takeRight } from 'lodash' @@ -60,7 +61,7 @@ export default class OpenAIProvider extends BaseProvider { const userMessages: ChatCompletionMessageParam[] = [] - for (const message of takeRight(messages, contextCount + 1)) { + for (const message of filterMessages(filterContextMessages(takeRight(messages, contextCount + 1)))) { userMessages.push({ role: message.role, content: await this.getMessageContent(message) diff --git a/src/renderer/src/services/api.ts b/src/renderer/src/services/api.ts index b5fc8da1..ceeeeddd 100644 --- a/src/renderer/src/services/api.ts +++ b/src/renderer/src/services/api.ts @@ -61,7 +61,7 @@ export async function fetchChatCompletion({ }, 1000) try { - await AI.completions(filterMessages(messages), assistant, ({ text, usage }) => { + await AI.completions(messages, assistant, ({ text, usage }) => { message.content = message.content + text || '' message.usage = usage onResponse({ ...message, status: 'pending' }) @@ -153,7 +153,7 @@ export async function fetchSuggestions({ } try { - return await AI.suggestions(messages, assistant) + return await AI.suggestions(filterMessages(messages), assistant) } catch (error: any) { return [] } diff --git a/src/renderer/src/services/event.ts b/src/renderer/src/services/event.ts index c061e3f7..b7eee8c8 100644 --- a/src/renderer/src/services/event.ts +++ b/src/renderer/src/services/event.ts @@ -14,5 +14,6 @@ export const EVENT_NAMES = { ESTIMATED_TOKEN_COUNT: 'ESTIMATED_TOKEN_COUNT', SHOW_CHAT_SETTINGS: 'SHOW_CHAT_SETTINGS', SHOW_TOPIC_SIDEBAR: 'SHOW_TOPIC_SIDEBAR', - SWITCH_TOPIC_SIDEBAR: 'SWITCH_TOPIC_SIDEBAR' + SWITCH_TOPIC_SIDEBAR: 'SWITCH_TOPIC_SIDEBAR', + NEW_CONTEXT: 'NEW_CONTEXT' } diff --git a/src/renderer/src/services/messages.ts b/src/renderer/src/services/messages.ts index 6c662045..6689e05b 100644 --- a/src/renderer/src/services/messages.ts +++ b/src/renderer/src/services/messages.ts @@ -1,3 +1,4 @@ +import { DEFAULT_CONEXTCOUNT } from '@renderer/config/constant' import { Assistant, Message } from '@renderer/types' import { GPTTokens } from 'gpt-tokens' import { isEmpty, takeRight } from 'lodash' @@ -5,7 +6,30 @@ import { isEmpty, takeRight } from 'lodash' import { getAssistantSettings } from './assistant' export const filterMessages = (messages: Message[]) => { - return messages.filter((message) => message.type !== '@').filter((message) => !isEmpty(message.content.trim())) + return messages + .filter((message) => !['@', 'clear'].includes(message.type!)) + .filter((message) => !isEmpty(message.content.trim())) +} + +export function filterContextMessages(messages: Message[]): Message[] { + const clearIndex = messages.findLastIndex((message) => message.type === 'clear') + + if (clearIndex === -1) { + return messages + } + + return messages.slice(clearIndex + 1) +} + +export function getContextCount(assistant: Assistant, messages: Message[]) { + const contextCount = assistant?.settings?.contextCount ?? DEFAULT_CONEXTCOUNT + const clearIndex = takeRight(messages, contextCount).findLastIndex((message) => message.type === 'clear') + + if (clearIndex === -1) { + return contextCount + } + + return contextCount - (clearIndex + 1) } export function estimateInputTokenCount(text: string) { @@ -24,7 +48,7 @@ export function estimateHistoryTokenCount(assistant: Assistant, msgs: Message[]) model: 'gpt-4o', messages: [ { role: 'system', content: assistant.prompt }, - ...filterMessages(takeRight(msgs, contextCount)).map((message) => ({ + ...filterMessages(filterContextMessages(takeRight(msgs, contextCount))).map((message) => ({ role: message.role, content: message.content })) diff --git a/src/renderer/src/types/index.ts b/src/renderer/src/types/index.ts index 700afed1..4dec9f35 100644 --- a/src/renderer/src/types/index.ts +++ b/src/renderer/src/types/index.ts @@ -30,7 +30,7 @@ export type Message = { files?: File[] images?: string[] usage?: OpenAI.Completions.CompletionUsage - type?: 'text' | '@' + type?: 'text' | '@' | 'clear' } export type Topic = {