diff --git a/src/renderer/src/providers/OpenAIProvider.ts b/src/renderer/src/providers/OpenAIProvider.ts index 9c828fa1..0d893305 100644 --- a/src/renderer/src/providers/OpenAIProvider.ts +++ b/src/renderer/src/providers/OpenAIProvider.ts @@ -385,30 +385,19 @@ export default class OpenAIProvider extends BaseProvider { continue } - upsertMCPToolResponse( - toolResponses, - { - tool: mcpTool, - status: 'invoking' - }, - onChunk - ) + upsertMCPToolResponse(toolResponses, { tool: mcpTool, status: 'invoking' }, onChunk) + const toolCallResponse = await callMCPTool(mcpTool) - console.log(toolCallResponse) + + console.log('[OpenAIProvider] toolCallResponse', toolCallResponse) + reqMessages.push({ role: 'tool', content: toolCallResponse.content, tool_call_id: toolCall.id } as ChatCompletionToolMessageParam) - upsertMCPToolResponse( - toolResponses, - { - tool: mcpTool, - status: 'done', - response: toolCallResponse - }, - onChunk - ) + + upsertMCPToolResponse(toolResponses, { tool: mcpTool, status: 'done', response: toolCallResponse }, onChunk) } const newStream = await this.sdk.chat.completions diff --git a/src/renderer/src/services/ApiService.ts b/src/renderer/src/services/ApiService.ts index 00421d19..4af7dd4c 100644 --- a/src/renderer/src/services/ApiService.ts +++ b/src/renderer/src/services/ApiService.ts @@ -5,7 +5,7 @@ import { setGenerating } from '@renderer/store/runtime' import { Assistant, Message, Model, Provider, Suggestion } from '@renderer/types' import { addAbortController } from '@renderer/utils/abortController' import { formatMessageError } from '@renderer/utils/error' -import { findLast, isEmpty } from 'lodash' +import { cloneDeep, findLast, isEmpty } from 'lodash' import AiProvider from '../providers/AiProvider' import { @@ -99,7 +99,7 @@ export async function fetchChatCompletion({ } if (mcpToolResponse) { - message.metadata = { ...message.metadata, mcpTools: mcpToolResponse } + message.metadata = { ...message.metadata, mcpTools: cloneDeep(mcpToolResponse) } } // Handle citations from Perplexity API