fix: align MiniMax provider with official API docs

Aligns MiniMax provider with official API documentation. Fixes 6 bugs:
transport mismatch (openai_chat -> anthropic_messages), credential leak
in switch_model(), prompt caching sent to non-Anthropic endpoints,
dot-to-hyphen model name corruption, trajectory compressor URL routing,
and stale doctor health check.

Also corrects context window (204,800), thinking support (manual mode),
max output (131,072), and model catalog (M2 family only on /anthropic).

Source: https://platform.minimax.io/docs/api-reference/text-anthropic-api

Co-authored-by: kshitijk4poor <kshitijk4poor@users.noreply.github.com>
This commit is contained in:
kshitijk4poor 2026-04-10 03:53:18 -07:00 committed by Teknium
parent d9f53dba4c
commit d442f25a2f
9 changed files with 237 additions and 74 deletions

View file

@ -375,8 +375,9 @@ class TrajectoryCompressor:
f"Missing API key. Set {self.config.api_key_env} "
f"environment variable.")
from openai import OpenAI
from agent.auxiliary_client import _to_openai_base_url
self.client = OpenAI(
api_key=api_key, base_url=self.config.base_url)
api_key=api_key, base_url=_to_openai_base_url(self.config.base_url))
# AsyncOpenAI is created lazily in _get_async_client() so it
# binds to the current event loop — avoids "Event loop is closed"
# when process_directory() is called multiple times (each call
@ -395,10 +396,11 @@ class TrajectoryCompressor:
avoiding "Event loop is closed" errors on repeated calls.
"""
from openai import AsyncOpenAI
from agent.auxiliary_client import _to_openai_base_url
# Always create a fresh client so it binds to the running loop.
self.async_client = AsyncOpenAI(
api_key=self._async_client_api_key,
base_url=self.config.base_url,
base_url=_to_openai_base_url(self.config.base_url),
)
return self.async_client