diff --git a/mini_agent/llm/openai_client.py b/mini_agent/llm/openai_client.py index b8f891c..6e8a35b 100644 --- a/mini_agent/llm/openai_client.py +++ b/mini_agent/llm/openai_client.py @@ -144,13 +144,19 @@ def _convert_messages(self, messages: list[Message]) -> tuple[str | None, list[d if msg.tool_calls: tool_calls_list = [] for tool_call in msg.tool_calls: + arguments_json = tool_call.function.arguments_json + if arguments_json is None: + # Fall back to deterministic dump if raw string missing + arguments_json = json.dumps( + tool_call.function.arguments, separators=(",", ":"), sort_keys=True + ) tool_calls_list.append( { "id": tool_call.id, "type": "function", "function": { "name": tool_call.function.name, - "arguments": json.dumps(tool_call.function.arguments), + "arguments": arguments_json, }, } ) @@ -223,8 +229,9 @@ def _parse_response(self, response: Any) -> LLMResponse: tool_calls = [] if response.tool_calls: for tool_call in response.tool_calls: - # Parse arguments from JSON string - arguments = json.loads(tool_call.function.arguments) + # Parse arguments from JSON string while preserving the raw text + raw_arguments = tool_call.function.arguments + arguments = json.loads(raw_arguments) if raw_arguments else {} tool_calls.append( ToolCall( @@ -233,6 +240,7 @@ def _parse_response(self, response: Any) -> LLMResponse: function=FunctionCall( name=tool_call.function.name, arguments=arguments, + arguments_json=raw_arguments, ), ) ) diff --git a/mini_agent/schema/schema.py b/mini_agent/schema/schema.py index d3b032f..910c5ca 100644 --- a/mini_agent/schema/schema.py +++ b/mini_agent/schema/schema.py @@ -15,7 +15,8 @@ class FunctionCall(BaseModel): """Function call details.""" name: str - arguments: dict[str, Any] # Function arguments as dict + arguments: dict[str, Any] # Parsed function arguments + arguments_json: str | None = None # Raw JSON string (for deterministic replay) class ToolCall(BaseModel):