diff --git a/src/ai/client.py b/src/ai/client.py index cc075de..861604d 100644 --- a/src/ai/client.py +++ b/src/ai/client.py @@ -187,14 +187,44 @@ class AIClient: # 准备工具 tools = None + tool_names: List[str] = [] if use_tools and self.tools.list(): tools = self.tools.to_openai_format() + tool_names = [tool.name for tool in self.tools.list()] + + logger.info( + "LLM请求: " + f"user_id={user_id}, use_memory={use_memory}, use_tools={use_tools}, " + f"registered_tools={len(tool_names)}, sent_tools={len(tools or [])}, " + f"tool_names={self._preview_log_payload(tool_names)}" + ) + logger.info( + "LLM输入: " + f"user_message={self._preview_log_payload(user_message)}" + ) # 调用模型 if stream: return self._chat_stream(messages, tools, **kwargs) else: response = await self.model.chat(messages, tools, **kwargs) + response_tool_count = len(response.tool_calls or []) + response_tool_names = [] + for tool_call in response.tool_calls or []: + if isinstance(tool_call, dict): + function_info = tool_call.get("function") or {} + response_tool_names.append(function_info.get("name")) + else: + function_info = getattr(tool_call, "function", None) + response_tool_names.append( + getattr(function_info, "name", None) if function_info else None + ) + logger.info( + "LLM首轮输出: " + f"tool_calls={response_tool_count}, " + f"tool_names={self._preview_log_payload(response_tool_names)}, " + f"content={self._preview_log_payload(response.content)}" + ) # 处理工具调用 if response.tool_calls: @@ -312,7 +342,12 @@ class AIClient: )) # 再次调用模型获取最终响应 - return await self.model.chat(messages, tools, **kwargs) + final_response = await self.model.chat(messages, tools, **kwargs) + logger.info( + "LLM最终输出: " + f"content={self._preview_log_payload(final_response.content)}" + ) + return final_response def _parse_tool_call(self, tool_call: Any) -> Tuple[Optional[str], Dict[str, Any], Optional[str]]: """兼容不同 SDK 返回的工具调用结构。""" diff --git a/src/ai/models/openai_model.py b/src/ai/models/openai_model.py index 7645d78..b005fca 100644 --- a/src/ai/models/openai_model.py +++ b/src/ai/models/openai_model.py @@ -145,6 +145,20 @@ class OpenAIModel(BaseAIModel): params.update(self._build_tool_params(tools)) params.update(kwargs) + tool_mode = "none" + tool_count = 0 + if "tools" in params: + tool_mode = "tools" + tool_count = len(params.get("tools") or []) + elif "functions" in params: + tool_mode = "functions" + tool_count = len(params.get("functions") or []) + + self.logger.info( + "OpenAI chat request: " + f"model={self.config.model_name}, tool_mode={tool_mode}, tool_count={tool_count}" + ) + response = await self._create_completion_with_fallback(params, tools) choice = response.choices[0]