From a8451b7c3d5e54937eea7930ca03b1c29030c89c Mon Sep 17 00:00:00 2001 From: LiuVaayne <10231735+vaayne@users.noreply.github.com> Date: Fri, 7 Mar 2025 19:17:29 +0800 Subject: [PATCH] =?UTF-8?q?=E2=9C=A8=20feat(MCP):=20add=20support=20for=20?= =?UTF-8?q?enabling/disabling=20MCPServers=20per=20message=20(#2989)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ✨ feat: add MCP servers in chat input - Introduce MCPToolsButton component for managing MCP servers - Add new icon for MCP server tools in iconfont.css - Update Inputbar to include MCP tools functionality - Add toggle functionality for enabling/disabling MCP servers - Implement styled dropdown menu for server selection - Add necessary type imports and useState for MCP server management * ✨ feat: add support for enabling/disabling MCPServers per message (main) - Added `enabledMCPs` property to the `Message` type to track enabled MCPServers. - Modified `MCPToolsButton` to enable all active MCPServers by default using a new `enableAll` state. - Introduced `filterMCPTools` utility to filter tools based on enabled MCPServers. - Updated `AnthropicProvider`, `GeminiProvider`, and `OpenAIProvider` to filter tools using `filterMCPTools`. - Enhanced `Inputbar` to include `enabledMCPs` in the message payload when set. --- .../src/assets/fonts/icon-fonts/iconfont.css | 4 + .../src/pages/home/Inputbar/Inputbar.tsx | 21 +- .../pages/home/Inputbar/MCPToolsButton.tsx | 204 ++++++++++++++++++ .../src/providers/AnthropicProvider.ts | 12 +- src/renderer/src/providers/GeminiProvider.ts | 10 +- src/renderer/src/providers/OpenAIProvider.ts | 9 +- src/renderer/src/providers/mcpToolUtils.ts | 16 +- src/renderer/src/types/index.ts | 1 + 8 files changed, 269 insertions(+), 8 deletions(-) create mode 100644 src/renderer/src/pages/home/Inputbar/MCPToolsButton.tsx diff --git a/src/renderer/src/assets/fonts/icon-fonts/iconfont.css b/src/renderer/src/assets/fonts/icon-fonts/iconfont.css index d1512cd6..5a78fc57 100644 --- a/src/renderer/src/assets/fonts/icon-fonts/iconfont.css +++ b/src/renderer/src/assets/fonts/icon-fonts/iconfont.css @@ -19,6 +19,10 @@ content: '\e623'; } +.icon-mcp:before { + content: '\e78e'; +} + .icon-icon-adaptive-width:before { content: '\e87a'; } diff --git a/src/renderer/src/pages/home/Inputbar/Inputbar.tsx b/src/renderer/src/pages/home/Inputbar/Inputbar.tsx index 53b16c2f..01e687e3 100644 --- a/src/renderer/src/pages/home/Inputbar/Inputbar.tsx +++ b/src/renderer/src/pages/home/Inputbar/Inputbar.tsx @@ -26,7 +26,7 @@ import { translateText } from '@renderer/services/TranslateService' import WebSearchService from '@renderer/services/WebSearchService' import store, { useAppDispatch, useAppSelector } from '@renderer/store' import { setGenerating, setSearching } from '@renderer/store/runtime' -import { Assistant, FileType, KnowledgeBase, Message, Model, Topic } from '@renderer/types' +import { Assistant, FileType, KnowledgeBase, MCPServer, Message, Model, Topic } from '@renderer/types' import { classNames, delay, getFileExtension, uuid } from '@renderer/utils' import { abortCompletion } from '@renderer/utils/abortController' import { getFilesFromDropEvent } from '@renderer/utils/input' @@ -45,6 +45,7 @@ import NarrowLayout from '../Messages/NarrowLayout' import AttachmentButton from './AttachmentButton' import AttachmentPreview from './AttachmentPreview' import KnowledgeBaseButton from './KnowledgeBaseButton' +import MCPToolsButton from './MCPToolsButton' import MentionModelsButton from './MentionModelsButton' import MentionModelsInput from './MentionModelsInput' import SendMessageButton from './SendMessageButton' @@ -88,6 +89,7 @@ const Inputbar: FC = ({ assistant: _assistant, setActiveTopic }) => { const [isTranslating, setIsTranslating] = useState(false) const [selectedKnowledgeBases, setSelectedKnowledgeBases] = useState([]) const [mentionModels, setMentionModels] = useState([]) + const [enabledMCPs, setEnabledMCPs] = useState([]) const [isMentionPopupOpen, setIsMentionPopupOpen] = useState(false) const [isDragging, setIsDragging] = useState(false) const [textareaHeight, setTextareaHeight] = useState() @@ -157,6 +159,11 @@ const Inputbar: FC = ({ assistant: _assistant, setActiveTopic }) => { if (mentionModels.length > 0) { message.mentions = mentionModels } + + if (enabledMCPs.length > 0) { + message.enabledMCPs = enabledMCPs + } + currentMessageId.current = message.id EventEmitter.emit(EVENT_NAMES.SEND_MESSAGE, message) @@ -587,6 +594,17 @@ const Inputbar: FC = ({ assistant: _assistant, setActiveTopic }) => { setMentionModels(mentionModels.filter((m) => m.id !== model.id)) } + const toggelEnableMCP = (mcp: MCPServer) => { + setEnabledMCPs((prev) => { + const exists = prev.some((item) => item.name === mcp.name) + if (exists) { + return prev.filter((item) => item.name !== mcp.name) + } else { + return [...prev, mcp] + } + }) + } + const onEnableWebSearch = () => { console.log(assistant) if (!isWebSearchModel(model)) { @@ -682,6 +700,7 @@ const Inputbar: FC = ({ assistant: _assistant, setActiveTopic }) => { onMentionModel={(model) => onMentionModel(model, mentionFromKeyboard)} ToolbarButton={ToolbarButton} /> + void + ToolbarButton: any +} + +const MCPToolsButton: FC = ({ enabledMCPs, onEnableMCP, ToolbarButton }) => { + const { mcpServers } = useMCPServers() + const [isOpen, setIsOpen] = useState(false) + const [enableAll, setEnableAll] = useState(true) + const dropdownRef = useRef(null) + const menuRef = useRef(null) + const { t } = useTranslation() + + const truncateText = (text: string, maxLength: number = 50) => { + if (!text || text.length <= maxLength) return text + return text.substring(0, maxLength) + '...' + } + + // Check if all active servers are enabled + const activeServers = mcpServers.filter((s) => s.isActive) + + // Enable all active servers by default + useEffect(() => { + if (activeServers.length > 0) { + activeServers.forEach((server) => { + if (enableAll && !enabledMCPs.includes(server)) { + onEnableMCP(server) + } + if (!enableAll && enabledMCPs.includes(server)) { + onEnableMCP(server) + } + }) + } + }, [enableAll]) + + const menu = ( +
+
+
+

{t('settings.mcp.title')}

+
+ {/* {t('mcp.enable_all')} */} + +
+
+
+ {mcpServers.length > 0 ? ( + mcpServers + .filter((s) => s.isActive) + .map((server) => ( +
+
+
{server.name}
+ {server.description && ( + +
{truncateText(server.description)}
+
+ )} + {server.baseUrl &&
{server.baseUrl}
} +
+ onEnableMCP(server)} /> +
+ )) + ) : ( +
+
{t('models.no_matches')}
+
+ )} +
+ ) + + return ( + <> + + menu} + trigger={['click']} + open={isOpen} + onOpenChange={setIsOpen} + overlayClassName="mention-models-dropdown"> + + + + + + + + ) +} + +const DropdownMenuStyle = createGlobalStyle` + .mention-models-dropdown { + .ant-dropdown-menu { + max-height: 400px; + overflow-y: auto; + overflow-x: hidden; + padding: 4px 0; + margin-bottom: 40px; + position: relative; + + &::-webkit-scrollbar { + width: 6px; + height: 6px; + } + + &::-webkit-scrollbar-thumb { + border-radius: 10px; + background: var(--color-scrollbar-thumb); + + &:hover { + background: var(--color-scrollbar-thumb-hover); + } + } + + &::-webkit-scrollbar-track { + background: transparent; + } + + .no-results { + padding: 8px 12px; + color: var(--color-text-3); + cursor: default; + font-size: 14px; + + &:hover { + background: none; + } + } + + .dropdown-header { + padding: 8px 12px; + border-bottom: 1px solid var(--color-border); + margin-bottom: 4px; + + .header-content { + display: flex; + justify-content: space-between; + align-items: center; + } + + h4 { + margin: 0; + color: var(--color-text-1); + font-size: 14px; + font-weight: 500; + } + + .enable-all-container { + display: flex; + align-items: center; + gap: 8px; + + .enable-all-label { + font-size: 12px; + color: var(--color-text-3); + } + } + } + + .mcp-server-item { + display: flex; + justify-content: space-between; + align-items: center; + padding: 8px 12px; + + .server-info { + flex: 1; + overflow: hidden; + + .server-name { + font-weight: 500; + font-size: 14px; + color: var(--color-text-1); + } + + .server-description { + font-size: 12px; + color: var(--color-text-3); + margin-top: 2px; + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; + } + + .server-url { + font-size: 11px; + color: var(--color-text-4); + margin-top: 2px; + } + } + } + } + } +` + +export default MCPToolsButton diff --git a/src/renderer/src/providers/AnthropicProvider.ts b/src/renderer/src/providers/AnthropicProvider.ts index 0f9aa1fe..b129a0f6 100644 --- a/src/renderer/src/providers/AnthropicProvider.ts +++ b/src/renderer/src/providers/AnthropicProvider.ts @@ -19,7 +19,13 @@ import OpenAI from 'openai' import { CompletionsParams } from '.' import BaseProvider from './BaseProvider' -import { anthropicToolUseToMcpTool, callMCPTool, mcpToolsToAnthropicTools, upsertMCPToolResponse } from './mcpToolUtils' +import { + anthropicToolUseToMcpTool, + callMCPTool, + filterMCPTools, + mcpToolsToAnthropicTools, + upsertMCPToolResponse +} from './mcpToolUtils' type ReasoningEffort = 'high' | 'medium' | 'low' @@ -139,6 +145,8 @@ export default class AnthropicProvider extends BaseProvider { } const userMessages = flatten(userMessagesParams) + const lastUserMessage = _messages.findLast((m) => m.role === 'user') + mcpTools = filterMCPTools(mcpTools, lastUserMessage?.enabledMCPs) const tools = mcpTools ? mcpToolsToAnthropicTools(mcpTools) : undefined const body: MessageCreateParamsNonStreaming = { @@ -189,8 +197,6 @@ export default class AnthropicProvider extends BaseProvider { }) } - const lastUserMessage = _messages.findLast((m) => m.role === 'user') - const { abortController, cleanup } = this.createAbortController(lastUserMessage?.id) const { signal } = abortController const toolResponses: MCPToolResponse[] = [] diff --git a/src/renderer/src/providers/GeminiProvider.ts b/src/renderer/src/providers/GeminiProvider.ts index c6693b05..5a3ac559 100644 --- a/src/renderer/src/providers/GeminiProvider.ts +++ b/src/renderer/src/providers/GeminiProvider.ts @@ -27,7 +27,13 @@ import OpenAI from 'openai' import { CompletionsParams } from '.' import BaseProvider from './BaseProvider' -import { callMCPTool, geminiFunctionCallToMcpTool, mcpToolsToGeminiTools, upsertMCPToolResponse } from './mcpToolUtils' +import { + callMCPTool, + filterMCPTools, + geminiFunctionCallToMcpTool, + mcpToolsToGeminiTools, + upsertMCPToolResponse +} from './mcpToolUtils' export default class GeminiProvider extends BaseProvider { private sdk: GoogleGenerativeAI @@ -161,7 +167,7 @@ export default class GeminiProvider extends BaseProvider { for (const message of userMessages) { history.push(await this.getMessageContents(message)) } - + mcpTools = filterMCPTools(mcpTools, userLastMessage?.enabledMCPs) const tools = mcpToolsToGeminiTools(mcpTools) const toolResponses: MCPToolResponse[] = [] if (assistant.enableWebSearch && isWebSearchModel(model)) { diff --git a/src/renderer/src/providers/OpenAIProvider.ts b/src/renderer/src/providers/OpenAIProvider.ts index 7716bfaf..9c828fa1 100644 --- a/src/renderer/src/providers/OpenAIProvider.ts +++ b/src/renderer/src/providers/OpenAIProvider.ts @@ -35,7 +35,13 @@ import { import { CompletionsParams } from '.' import BaseProvider from './BaseProvider' -import { callMCPTool, mcpToolsToOpenAITools, openAIToolsToMcpTool, upsertMCPToolResponse } from './mcpToolUtils' +import { + callMCPTool, + filterMCPTools, + mcpToolsToOpenAITools, + openAIToolsToMcpTool, + upsertMCPToolResponse +} from './mcpToolUtils' type ReasoningEffort = 'high' | 'medium' | 'low' @@ -298,6 +304,7 @@ export default class OpenAIProvider extends BaseProvider { const { abortController, cleanup } = this.createAbortController(lastUserMessage?.id) const { signal } = abortController + mcpTools = filterMCPTools(mcpTools, lastUserMessage?.enabledMCPs) const tools = mcpTools && mcpTools.length > 0 ? mcpToolsToOpenAITools(mcpTools) : undefined const reqMessages: ChatCompletionMessageParam[] = [systemMessage, ...userMessages].filter( diff --git a/src/renderer/src/providers/mcpToolUtils.ts b/src/renderer/src/providers/mcpToolUtils.ts index c3731557..f7840335 100644 --- a/src/renderer/src/providers/mcpToolUtils.ts +++ b/src/renderer/src/providers/mcpToolUtils.ts @@ -1,6 +1,6 @@ import { Tool, ToolUnion, ToolUseBlock } from '@anthropic-ai/sdk/resources' import { FunctionCall, FunctionDeclaration, SchemaType, Tool as geminiToool } from '@google/generative-ai' -import { MCPTool, MCPToolResponse } from '@renderer/types' +import { MCPServer, MCPTool, MCPToolResponse } from '@renderer/types' import { ChatCompletionMessageToolCall, ChatCompletionTool } from 'openai/resources' import { ChunkCallbackData } from '.' @@ -146,3 +146,17 @@ export function upsertMCPToolResponse( }) } } + +export function filterMCPTools( + mcpTools: MCPTool[] | undefined, + enabledServers: MCPServer[] | undefined +): MCPTool[] | undefined { + if (mcpTools) { + if (enabledServers) { + mcpTools = mcpTools.filter((t) => enabledServers.some((m) => m.name === t.serverName)) + } else { + mcpTools = [] + } + } + return mcpTools +} diff --git a/src/renderer/src/types/index.ts b/src/renderer/src/types/index.ts index 451b421a..fa9011bc 100644 --- a/src/renderer/src/types/index.ts +++ b/src/renderer/src/types/index.ts @@ -67,6 +67,7 @@ export type Message = { askId?: string useful?: boolean error?: Record + enabledMCPs?: MCPServer[] metadata?: { // Gemini groundingMetadata?: any