fix: repair OpenCode model routing and selection (#4508)

OpenCode Zen and Go are mixed-API-surface providers — different models
behind them use different API surfaces (GPT on Zen uses codex_responses,
Claude on Zen uses anthropic_messages, MiniMax on Go uses
anthropic_messages, GLM/Kimi on Go use chat_completions).

Changes:
- Add normalize_opencode_model_id() and opencode_model_api_mode() to
  models.py for model ID normalization and API surface routing
- Add _provider_supports_explicit_api_mode() to runtime_provider.py
  to prevent stale api_mode from leaking across provider switches
- Wire opencode routing into all three api_mode resolution paths:
  pool entry, api_key provider, and explicit runtime
- Add api_mode field to ModelSwitchResult for propagation through the
  switch pipeline
- Consolidate _PROVIDER_MODELS from main.py into models.py (single
  source of truth, eliminates duplicate dict)
- Add opencode normalization to setup wizard and model picker flows
- Add opencode block to _normalize_model_for_provider in CLI
- Add opencode-zen/go fallback model lists to setup.py

Tests: 160 targeted tests pass (26 new tests covering normalization,
api_mode routing per provider/model, persistence, and setup wizard
normalization).

Based on PR #3017 by SaM13997.

Co-authored-by: SaM13997 <139419381+SaM13997@users.noreply.github.com>
This commit is contained in:
Teknium 2026-04-02 09:36:24 -07:00 committed by GitHub
parent f4f64c413f
commit 28a073edc6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
13 changed files with 381 additions and 84 deletions

View file

@ -114,6 +114,8 @@ _DEFAULT_PROVIDER_MODELS = {
"minimax-cn": ["MiniMax-M2.7", "MiniMax-M2.7-highspeed", "MiniMax-M2.5", "MiniMax-M2.5-highspeed", "MiniMax-M2.1"],
"ai-gateway": ["anthropic/claude-opus-4.6", "anthropic/claude-sonnet-4.6", "openai/gpt-5", "google/gemini-3-flash"],
"kilocode": ["anthropic/claude-opus-4.6", "anthropic/claude-sonnet-4.6", "openai/gpt-5.4", "google/gemini-3-pro-preview", "google/gemini-3-flash-preview"],
"opencode-zen": ["gpt-5.4", "gpt-5.3-codex", "claude-sonnet-4-6", "gemini-3-flash", "glm-5", "kimi-k2.5", "minimax-m2.7"],
"opencode-go": ["glm-5", "kimi-k2.5", "minimax-m2.5", "minimax-m2.7"],
"huggingface": [
"Qwen/Qwen3.5-397B-A17B", "Qwen/Qwen3-235B-A22B-Thinking-2507",
"Qwen/Qwen3-Coder-480B-A35B-Instruct", "deepseek-ai/DeepSeek-R1-0528",
@ -189,6 +191,8 @@ def _setup_provider_model_selection(config, provider_id, current_model, prompt_c
fetch_api_models,
fetch_github_model_catalog,
normalize_copilot_model_id,
normalize_opencode_model_id,
opencode_model_api_mode,
)
pconfig = PROVIDER_REGISTRY[provider_id]
@ -242,6 +246,11 @@ def _setup_provider_model_selection(config, provider_id, current_model, prompt_c
f" Use \"Custom model\" if the model you expect isn't listed."
)
if provider_id in {"opencode-zen", "opencode-go"}:
provider_models = [normalize_opencode_model_id(provider_id, mid) for mid in provider_models]
current_model = normalize_opencode_model_id(provider_id, current_model)
provider_models = list(dict.fromkeys(mid for mid in provider_models if mid))
model_choices = list(provider_models)
model_choices.append("Custom model")
model_choices.append(f"Keep current ({current_model})")
@ -259,6 +268,8 @@ def _setup_provider_model_selection(config, provider_id, current_model, prompt_c
catalog=catalog,
api_key=api_key,
) or selected_model
elif provider_id in {"opencode-zen", "opencode-go"}:
selected_model = normalize_opencode_model_id(provider_id, selected_model)
_set_default_model(config, selected_model)
elif model_idx == len(provider_models):
custom = prompt_fn("Enter model name")
@ -269,6 +280,8 @@ def _setup_provider_model_selection(config, provider_id, current_model, prompt_c
catalog=catalog,
api_key=api_key,
) or custom
elif provider_id in {"opencode-zen", "opencode-go"}:
selected_model = normalize_opencode_model_id(provider_id, custom)
else:
selected_model = custom
_set_default_model(config, selected_model)
@ -300,6 +313,10 @@ def _setup_provider_model_selection(config, provider_id, current_model, prompt_c
catalog=catalog,
api_key=api_key,
)
elif provider_id in {"opencode-zen", "opencode-go"} and selected_model:
model_cfg = _model_config_dict(config)
model_cfg["api_mode"] = opencode_model_api_mode(provider_id, selected_model)
config["model"] = model_cfg
def _sync_model_from_disk(config: Dict[str, Any]) -> None: