fix: Improve MCP tool response handling and logging

- Add more descriptive console logging for tool call responses
- Use cloneDeep when storing MCP tool responses to prevent reference issues
- Simplify upsertMCPToolResponse method calls
This commit is contained in:
kangfenmao 2025-03-09 13:37:37 +08:00
parent 9c9f200874
commit a8941326dc
2 changed files with 9 additions and 20 deletions

View File

@ -385,30 +385,19 @@ export default class OpenAIProvider extends BaseProvider {
continue
}
upsertMCPToolResponse(
toolResponses,
{
tool: mcpTool,
status: 'invoking'
},
onChunk
)
upsertMCPToolResponse(toolResponses, { tool: mcpTool, status: 'invoking' }, onChunk)
const toolCallResponse = await callMCPTool(mcpTool)
console.log(toolCallResponse)
console.log('[OpenAIProvider] toolCallResponse', toolCallResponse)
reqMessages.push({
role: 'tool',
content: toolCallResponse.content,
tool_call_id: toolCall.id
} as ChatCompletionToolMessageParam)
upsertMCPToolResponse(
toolResponses,
{
tool: mcpTool,
status: 'done',
response: toolCallResponse
},
onChunk
)
upsertMCPToolResponse(toolResponses, { tool: mcpTool, status: 'done', response: toolCallResponse }, onChunk)
}
const newStream = await this.sdk.chat.completions

View File

@ -5,7 +5,7 @@ import { setGenerating } from '@renderer/store/runtime'
import { Assistant, Message, Model, Provider, Suggestion } from '@renderer/types'
import { addAbortController } from '@renderer/utils/abortController'
import { formatMessageError } from '@renderer/utils/error'
import { findLast, isEmpty } from 'lodash'
import { cloneDeep, findLast, isEmpty } from 'lodash'
import AiProvider from '../providers/AiProvider'
import {
@ -99,7 +99,7 @@ export async function fetchChatCompletion({
}
if (mcpToolResponse) {
message.metadata = { ...message.metadata, mcpTools: mcpToolResponse }
message.metadata = { ...message.metadata, mcpTools: cloneDeep(mcpToolResponse) }
}
// Handle citations from Perplexity API