diff --git a/agent/auxiliary_client.py b/agent/auxiliary_client.py index 5e8a60e76..a04a9568c 100644 --- a/agent/auxiliary_client.py +++ b/agent/auxiliary_client.py @@ -144,6 +144,7 @@ _API_KEY_PROVIDER_AUX_MODELS: Dict[str, str] = { "stepfun": "step-3.5-flash", "kimi-coding-cn": "kimi-k2-turbo-preview", "minimax": "MiniMax-M2.7", + "minimax-oauth": "MiniMax-M2.7-highspeed", "minimax-cn": "MiniMax-M2.7", "anthropic": "claude-haiku-4-5-20251001", "ai-gateway": "google/gemini-3-flash", @@ -2672,7 +2673,7 @@ def _get_task_extra_body(task: str) -> Dict[str, Any]: # Providers that use Anthropic-compatible endpoints (via OpenAI SDK wrapper). # Their image content blocks must use Anthropic format, not OpenAI format. -_ANTHROPIC_COMPAT_PROVIDERS = frozenset({"minimax", "minimax-cn"}) +_ANTHROPIC_COMPAT_PROVIDERS = frozenset({"minimax", "minimax-oauth", "minimax-cn"}) def _is_anthropic_compat_endpoint(provider: str, base_url: str) -> bool: diff --git a/agent/model_metadata.py b/agent/model_metadata.py index 850e16662..04746fdf8 100644 --- a/agent/model_metadata.py +++ b/agent/model_metadata.py @@ -46,7 +46,7 @@ def _resolve_requests_verify() -> bool | str: # are preserved so the full model name reaches cache lookups and server queries. _PROVIDER_PREFIXES: frozenset[str] = frozenset({ "openrouter", "nous", "openai-codex", "copilot", "copilot-acp", - "gemini", "ollama-cloud", "zai", "kimi-coding", "kimi-coding-cn", "stepfun", "minimax", "minimax-cn", "anthropic", "deepseek", + "gemini", "ollama-cloud", "zai", "kimi-coding", "kimi-coding-cn", "stepfun", "minimax", "minimax-oauth", "minimax-cn", "anthropic", "deepseek", "opencode-zen", "opencode-go", "ai-gateway", "kilocode", "alibaba", "qwen-oauth", "xiaomi", diff --git a/hermes_cli/main.py b/hermes_cli/main.py index 7de68d2cb..ffd6ec4cf 100644 --- a/hermes_cli/main.py +++ b/hermes_cli/main.py @@ -1594,6 +1594,8 @@ def select_provider_and_model(args=None): _model_flow_openai_codex(config, current_model) elif selected_provider == "qwen-oauth": _model_flow_qwen_oauth(config, current_model) + elif selected_provider == "minimax-oauth": + _model_flow_minimax_oauth(config, current_model, args=args) elif selected_provider == "google-gemini-cli": _model_flow_google_gemini_cli(config, current_model) elif selected_provider == "copilot-acp": @@ -2474,6 +2476,53 @@ def _model_flow_qwen_oauth(_config, current_model=""): print("No change.") +def _model_flow_minimax_oauth(config, current_model="", args=None): + """MiniMax OAuth provider: ensure logged in, then pick model.""" + from hermes_cli.auth import ( + get_provider_auth_state, + _prompt_model_selection, + _save_model_choice, + _update_config_for_provider, + resolve_minimax_oauth_runtime_credentials, + AuthError, + format_auth_error, + _login_minimax_oauth, + PROVIDER_REGISTRY, + ) + state = get_provider_auth_state("minimax-oauth") + if not state or not state.get("access_token"): + print("Not logged into MiniMax. Starting OAuth login...") + print() + try: + mock_args = argparse.Namespace( + region=getattr(args, "region", None) or "global", + no_browser=bool(getattr(args, "no_browser", False)), + timeout=getattr(args, "timeout", None) or 15.0, + ) + _login_minimax_oauth(mock_args, PROVIDER_REGISTRY["minimax-oauth"]) + except SystemExit: + print("Login cancelled or failed.") + return + except Exception as exc: + print(f"Login failed: {exc}") + return + + try: + creds = resolve_minimax_oauth_runtime_credentials() + except AuthError as exc: + print(format_auth_error(exc)) + return + + from hermes_cli.models import _PROVIDER_MODELS + model_ids = _PROVIDER_MODELS.get("minimax-oauth", []) + selected = _prompt_model_selection(model_ids, current_model) + if not selected: + return + _save_model_choice(selected) + _update_config_for_provider("minimax-oauth", creds["base_url"]) + print(f"\u2713 Using MiniMax model: {selected}") + + def _model_flow_google_gemini_cli(_config, current_model=""): """Google Gemini OAuth (PKCE) via Cloud Code Assist — supports free AND paid tiers. @@ -6949,6 +6998,7 @@ For more help on a command: "kimi-coding-cn", "stepfun", "minimax", + "minimax-oauth", "minimax-cn", "kilocode", "xiaomi", diff --git a/hermes_cli/models.py b/hermes_cli/models.py index 3a902ffdf..6ac5413f2 100644 --- a/hermes_cli/models.py +++ b/hermes_cli/models.py @@ -248,6 +248,10 @@ _PROVIDER_MODELS: dict[str, list[str]] = { "MiniMax-M2.1", "MiniMax-M2", ], + "minimax-oauth": [ + "MiniMax-M2.7", + "MiniMax-M2.7-highspeed", + ], "minimax-cn": [ "MiniMax-M2.7", "MiniMax-M2.5", @@ -732,6 +736,7 @@ CANONICAL_PROVIDERS: list[ProviderEntry] = [ ProviderEntry("kimi-coding-cn", "Kimi / Moonshot (China)", "Kimi / Moonshot China (Moonshot CN direct API)"), ProviderEntry("stepfun", "StepFun Step Plan", "StepFun Step Plan (agent/coding models via Step Plan API)"), ProviderEntry("minimax", "MiniMax", "MiniMax (global direct API)"), + ProviderEntry("minimax-oauth", "MiniMax (OAuth)", "MiniMax via OAuth browser login (Coding Plan, minimax.io)"), ProviderEntry("minimax-cn", "MiniMax (China)", "MiniMax China (domestic direct API)"), ProviderEntry("alibaba", "Alibaba Cloud (DashScope)","Alibaba Cloud / DashScope Coding (Qwen + multi-provider)"), ProviderEntry("ollama-cloud", "Ollama Cloud", "Ollama Cloud (cloud-hosted open models — ollama.com)"), @@ -771,6 +776,9 @@ _PROVIDER_ALIASES = { "arceeai": "arcee", "minimax-china": "minimax-cn", "minimax_cn": "minimax-cn", + "minimax-portal": "minimax-oauth", + "minimax-global": "minimax-oauth", + "minimax_oauth": "minimax-oauth", "claude": "anthropic", "claude-code": "anthropic", "deep-seek": "deepseek",