diff --git a/run_agent.py b/run_agent.py index a47455e53..e88096a60 100644 --- a/run_agent.py +++ b/run_agent.py @@ -5868,7 +5868,15 @@ class AIAgent: entry["id"] = tc_delta.id if tc_delta.function: if tc_delta.function.name: - entry["function"]["name"] += tc_delta.function.name + # Use assignment, not +=. Function names are + # atomic identifiers delivered complete in the + # first chunk (OpenAI spec). Some providers + # (MiniMax M2.7 via NVIDIA NIM) resend the full + # name in every chunk; concatenation would + # produce "read_fileread_file". Assignment + # (matching the OpenAI Node SDK / LiteLLM / + # Vercel AI patterns) is immune to this. + entry["function"]["name"] = tc_delta.function.name if tc_delta.function.arguments: entry["function"]["arguments"] += tc_delta.function.arguments extra = getattr(tc_delta, "extra_content", None) diff --git a/scripts/release.py b/scripts/release.py index 4c32dccfd..88ddb2f43 100755 --- a/scripts/release.py +++ b/scripts/release.py @@ -266,6 +266,7 @@ AUTHOR_MAP = { "limkuan24@gmail.com": "WideLee", "aviralarora002@gmail.com": "AviArora02-commits", "junminliu@gmail.com": "JimLiu", + "jarvischer@gmail.com": "maxchernin", } diff --git a/tests/run_agent/test_streaming.py b/tests/run_agent/test_streaming.py index 6afe36ee3..e4825599a 100644 --- a/tests/run_agent/test_streaming.py +++ b/tests/run_agent/test_streaming.py @@ -141,6 +141,50 @@ class TestStreamingAccumulator: assert tc[0].function.name == "terminal" assert tc[0].function.arguments == '{"command": "ls"}' + @patch("run_agent.AIAgent._create_request_openai_client") + @patch("run_agent.AIAgent._close_request_openai_client") + def test_tool_name_not_duplicated_when_resent_per_chunk(self, mock_close, mock_create): + """MiniMax M2.7 via NVIDIA NIM resends the full name in every chunk. + + Bug #8259: the old += accumulation produced "read_fileread_file". + Assignment (matching OpenAI Node SDK / LiteLLM) prevents this. + """ + from run_agent import AIAgent + + chunks = [ + _make_stream_chunk(tool_calls=[ + _make_tool_call_delta(index=0, tc_id="call_nim", name="read_file") + ]), + _make_stream_chunk(tool_calls=[ + _make_tool_call_delta(index=0, tc_id="call_nim", name="read_file", arguments='{"path":') + ]), + _make_stream_chunk(tool_calls=[ + _make_tool_call_delta(index=0, tc_id="call_nim", name="read_file", arguments=' "x.py"}') + ]), + _make_stream_chunk(finish_reason="tool_calls"), + ] + + mock_client = MagicMock() + mock_client.chat.completions.create.return_value = iter(chunks) + mock_create.return_value = mock_client + + agent = AIAgent( + model="test/model", + quiet_mode=True, + skip_context_files=True, + skip_memory=True, + ) + agent.api_mode = "chat_completions" + agent._interrupt_requested = False + + response = agent._interruptible_streaming_api_call({}) + + tc = response.choices[0].message.tool_calls + assert tc is not None + assert len(tc) == 1 + assert tc[0].function.name == "read_file" + assert tc[0].function.arguments == '{"path": "x.py"}' + @patch("run_agent.AIAgent._create_request_openai_client") @patch("run_agent.AIAgent._close_request_openai_client") def test_tool_call_extra_content_preserved(self, mock_close, mock_create):