fix: prevent tool name duplication in streaming accumulator (MiniMax/NVIDIA NIM)

Based on #11984 by @maxchernin.  Fixes #8259.

Some providers (MiniMax M2.7 via NVIDIA NIM) resend the full function
name in every streaming chunk instead of only the first.  The old
accumulator used += which concatenated them into 'read_fileread_file'.

Changed to simple assignment (=), matching the OpenAI Node SDK, LiteLLM,
and Vercel AI SDK patterns.  Function names are atomic identifiers
delivered complete — no provider splits them across chunks, so
concatenation was never correct semantics.
This commit is contained in:
jarvischer 2026-04-18 22:46:36 +05:30 committed by Teknium
parent 0bebf5b948
commit 0f778f7768
3 changed files with 54 additions and 1 deletions

View file

@ -141,6 +141,50 @@ class TestStreamingAccumulator:
assert tc[0].function.name == "terminal"
assert tc[0].function.arguments == '{"command": "ls"}'
@patch("run_agent.AIAgent._create_request_openai_client")
@patch("run_agent.AIAgent._close_request_openai_client")
def test_tool_name_not_duplicated_when_resent_per_chunk(self, mock_close, mock_create):
"""MiniMax M2.7 via NVIDIA NIM resends the full name in every chunk.
Bug #8259: the old += accumulation produced "read_fileread_file".
Assignment (matching OpenAI Node SDK / LiteLLM) prevents this.
"""
from run_agent import AIAgent
chunks = [
_make_stream_chunk(tool_calls=[
_make_tool_call_delta(index=0, tc_id="call_nim", name="read_file")
]),
_make_stream_chunk(tool_calls=[
_make_tool_call_delta(index=0, tc_id="call_nim", name="read_file", arguments='{"path":')
]),
_make_stream_chunk(tool_calls=[
_make_tool_call_delta(index=0, tc_id="call_nim", name="read_file", arguments=' "x.py"}')
]),
_make_stream_chunk(finish_reason="tool_calls"),
]
mock_client = MagicMock()
mock_client.chat.completions.create.return_value = iter(chunks)
mock_create.return_value = mock_client
agent = AIAgent(
model="test/model",
quiet_mode=True,
skip_context_files=True,
skip_memory=True,
)
agent.api_mode = "chat_completions"
agent._interrupt_requested = False
response = agent._interruptible_streaming_api_call({})
tc = response.choices[0].message.tool_calls
assert tc is not None
assert len(tc) == 1
assert tc[0].function.name == "read_file"
assert tc[0].function.arguments == '{"path": "x.py"}'
@patch("run_agent.AIAgent._create_request_openai_client")
@patch("run_agent.AIAgent._close_request_openai_client")
def test_tool_call_extra_content_preserved(self, mock_close, mock_create):