feat(models): add deepseek-v4-pro and deepseek-v4-flash (#14934)

- OpenRouter: deepseek/deepseek-v4-pro, deepseek/deepseek-v4-flash
- Nous Portal (fallback list): same two slugs
- Native DeepSeek provider: bare deepseek-v4-pro, deepseek-v4-flash
  alongside existing deepseek-chat/deepseek-reasoner

Context length resolves via existing 'deepseek' substring entry (128K)
in DEFAULT_CONTEXT_LENGTHS.
This commit is contained in:
Teknium 2026-04-23 22:35:04 -07:00 committed by GitHub
parent 5a1c599412
commit 2e78a2b6b2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -33,6 +33,8 @@ COPILOT_REASONING_EFFORTS_O_SERIES = ["low", "medium", "high"]
# (model_id, display description shown in menus) # (model_id, display description shown in menus)
OPENROUTER_MODELS: list[tuple[str, str]] = [ OPENROUTER_MODELS: list[tuple[str, str]] = [
("moonshotai/kimi-k2.6", "recommended"), ("moonshotai/kimi-k2.6", "recommended"),
("deepseek/deepseek-v4-pro", ""),
("deepseek/deepseek-v4-flash", ""),
("anthropic/claude-opus-4.7", ""), ("anthropic/claude-opus-4.7", ""),
("anthropic/claude-opus-4.6", ""), ("anthropic/claude-opus-4.6", ""),
("anthropic/claude-sonnet-4.6", ""), ("anthropic/claude-sonnet-4.6", ""),
@ -109,6 +111,8 @@ def _codex_curated_models() -> list[str]:
_PROVIDER_MODELS: dict[str, list[str]] = { _PROVIDER_MODELS: dict[str, list[str]] = {
"nous": [ "nous": [
"moonshotai/kimi-k2.6", "moonshotai/kimi-k2.6",
"deepseek/deepseek-v4-pro",
"deepseek/deepseek-v4-flash",
"xiaomi/mimo-v2.5-pro", "xiaomi/mimo-v2.5-pro",
"xiaomi/mimo-v2.5", "xiaomi/mimo-v2.5",
"anthropic/claude-opus-4.7", "anthropic/claude-opus-4.7",
@ -246,6 +250,8 @@ _PROVIDER_MODELS: dict[str, list[str]] = {
"claude-haiku-4-5-20251001", "claude-haiku-4-5-20251001",
], ],
"deepseek": [ "deepseek": [
"deepseek-v4-pro",
"deepseek-v4-flash",
"deepseek-chat", "deepseek-chat",
"deepseek-reasoner", "deepseek-reasoner",
], ],