feat(providers): add Google AI Studio (Gemini) as a first-class provider

Cherry-picked from PR #5494 by kshitijk4poor.
Adds native Gemini support via Google's OpenAI-compatible endpoint.
Zero new dependencies.
This commit is contained in:
Teknium 2026-04-06 10:14:01 -07:00 committed by Teknium
parent 85973e0082
commit 6dfab35501
11 changed files with 283 additions and 7 deletions

View file

@ -14,6 +14,16 @@
# LLM_MODEL is no longer read from .env — this line is kept for reference only.
# LLM_MODEL=anthropic/claude-opus-4.6
# =============================================================================
# LLM PROVIDER (Google AI Studio / Gemini)
# =============================================================================
# Native Gemini API via Google's OpenAI-compatible endpoint.
# Get your key at: https://aistudio.google.com/app/apikey
# GOOGLE_API_KEY=your_google_ai_studio_key_here
# GEMINI_API_KEY=your_gemini_key_here # alias for GOOGLE_API_KEY
# Optional base URL override (default: Google's OpenAI-compatible endpoint)
# GEMINI_BASE_URL=https://generativelanguage.googleapis.com/v1beta/openai
# =============================================================================
# LLM PROVIDER (z.ai / GLM)
# =============================================================================

View file

@ -55,6 +55,7 @@ logger = logging.getLogger(__name__)
# Default auxiliary models for direct API-key providers (cheap/fast for side tasks)
_API_KEY_PROVIDER_AUX_MODELS: Dict[str, str] = {
"gemini": "gemini-2.5-flash",
"zai": "glm-4.5-flash",
"kimi-coding": "kimi-k2-turbo-preview",
"minimax": "MiniMax-M2.7-highspeed",

View file

@ -24,10 +24,11 @@ logger = logging.getLogger(__name__)
# are preserved so the full model name reaches cache lookups and server queries.
_PROVIDER_PREFIXES: frozenset[str] = frozenset({
"openrouter", "nous", "openai-codex", "copilot", "copilot-acp",
"zai", "kimi-coding", "minimax", "minimax-cn", "anthropic", "deepseek",
"gemini", "zai", "kimi-coding", "minimax", "minimax-cn", "anthropic", "deepseek",
"opencode-zen", "opencode-go", "ai-gateway", "kilocode", "alibaba",
"custom", "local",
# Common aliases
"google", "google-gemini", "google-ai-studio",
"glm", "z-ai", "z.ai", "zhipu", "github", "github-copilot",
"github-models", "kimi", "moonshot", "claude", "deep-seek",
"opencode", "zen", "go", "vercel", "kilo", "dashscope", "aliyun", "qwen",
@ -101,6 +102,13 @@ DEFAULT_CONTEXT_LENGTHS = {
"gpt-4": 128000,
# Google
"gemini": 1048576,
# Gemma (open models served via AI Studio)
"gemma-4-31b": 262144,
"gemma-4-26b": 262144,
"gemma-4-e4b": 131072,
"gemma-4-e2b": 131072,
"gemma-3": 131072,
"gemma": 8192, # fallback for older gemma models
# DeepSeek
"deepseek": 128000,
# Meta
@ -175,7 +183,7 @@ _URL_TO_PROVIDER: Dict[str, str] = {
"dashscope.aliyuncs.com": "alibaba",
"dashscope-intl.aliyuncs.com": "alibaba",
"openrouter.ai": "openrouter",
"generativelanguage.googleapis.com": "google",
"generativelanguage.googleapis.com": "gemini",
"inference-api.nousresearch.com": "nous",
"api.deepseek.com": "deepseek",
"api.githubcopilot.com": "copilot",

View file

@ -18,7 +18,8 @@ model:
# "anthropic" - Direct Anthropic API (requires: ANTHROPIC_API_KEY)
# "openai-codex" - OpenAI Codex (requires: hermes login --provider openai-codex)
# "copilot" - GitHub Copilot / GitHub Models (requires: GITHUB_TOKEN)
# "zai" - z.ai / ZhipuAI GLM (requires: GLM_API_KEY)
# "gemini" - Use Google AI Studio direct (requires: GOOGLE_API_KEY or GEMINI_API_KEY)
# "zai" - Use z.ai / ZhipuAI GLM models (requires: GLM_API_KEY)
# "kimi-coding" - Kimi / Moonshot AI (requires: KIMI_API_KEY)
# "minimax" - MiniMax global (requires: MINIMAX_API_KEY)
# "minimax-cn" - MiniMax China (requires: MINIMAX_CN_API_KEY)
@ -315,7 +316,8 @@ compression:
# "auto" - Best available: OpenRouter → Nous Portal → main endpoint (default)
# "openrouter" - Force OpenRouter (requires OPENROUTER_API_KEY)
# "nous" - Force Nous Portal (requires: hermes login)
# "codex" - Force Codex OAuth (requires: hermes model → Codex).
# "gemini" - Force Google AI Studio direct (requires: GOOGLE_API_KEY or GEMINI_API_KEY)
# "codex" - Force Codex OAuth (requires: hermes model → Codex).
# Uses gpt-5.3-codex which supports vision.
# "main" - Use your custom endpoint (OPENAI_BASE_URL + OPENAI_API_KEY).
# Works with OpenAI API, local models, or any OpenAI-compatible

View file

@ -69,6 +69,7 @@ DEVICE_AUTH_POLL_INTERVAL_CAP_SECONDS = 1 # poll at most every 1s
DEFAULT_CODEX_BASE_URL = "https://chatgpt.com/backend-api/codex"
DEFAULT_GITHUB_MODELS_BASE_URL = "https://api.githubcopilot.com"
DEFAULT_COPILOT_ACP_BASE_URL = "acp://copilot"
DEFAULT_GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai"
CODEX_OAUTH_CLIENT_ID = "app_EMoamEEZ73f0CkXaXp7hrann"
CODEX_OAUTH_TOKEN_URL = "https://auth.openai.com/oauth/token"
CODEX_ACCESS_TOKEN_REFRESH_SKEW_SECONDS = 120
@ -125,6 +126,14 @@ PROVIDER_REGISTRY: Dict[str, ProviderConfig] = {
inference_base_url=DEFAULT_COPILOT_ACP_BASE_URL,
base_url_env_var="COPILOT_ACP_BASE_URL",
),
"gemini": ProviderConfig(
id="gemini",
name="Google AI Studio",
auth_type="api_key",
inference_base_url="https://generativelanguage.googleapis.com/v1beta/openai",
api_key_env_vars=("GOOGLE_API_KEY", "GEMINI_API_KEY"),
base_url_env_var="GEMINI_BASE_URL",
),
"zai": ProviderConfig(
id="zai",
name="Z.AI / GLM",
@ -758,6 +767,7 @@ def resolve_provider(
# Normalize provider aliases
_PROVIDER_ALIASES = {
"glm": "zai", "z-ai": "zai", "z.ai": "zai", "zhipu": "zai",
"google": "gemini", "google-gemini": "gemini", "google-ai-studio": "gemini",
"kimi": "kimi-coding", "moonshot": "kimi-coding",
"minimax-china": "minimax-cn", "minimax_cn": "minimax-cn",
"claude": "anthropic", "claude-code": "anthropic",

View file

@ -590,6 +590,30 @@ OPTIONAL_ENV_VARS = {
"category": "provider",
"advanced": True,
},
"GOOGLE_API_KEY": {
"description": "Google AI Studio API key (also recognized as GEMINI_API_KEY)",
"prompt": "Google AI Studio API key",
"url": "https://aistudio.google.com/app/apikey",
"password": True,
"category": "provider",
"advanced": True,
},
"GEMINI_API_KEY": {
"description": "Google AI Studio API key (alias for GOOGLE_API_KEY)",
"prompt": "Gemini API key",
"url": "https://aistudio.google.com/app/apikey",
"password": True,
"category": "provider",
"advanced": True,
},
"GEMINI_BASE_URL": {
"description": "Google AI Studio base URL override",
"prompt": "Gemini base URL (leave empty for default)",
"url": None,
"password": False,
"category": "provider",
"advanced": True,
},
"GLM_API_KEY": {
"description": "Z.AI / GLM API key (also recognized as ZAI_API_KEY / Z_AI_API_KEY)",
"prompt": "Z.AI / GLM API key",

View file

@ -921,6 +921,7 @@ def select_provider_and_model(args=None):
"copilot-acp": "GitHub Copilot ACP",
"copilot": "GitHub Copilot",
"anthropic": "Anthropic",
"gemini": "Google AI Studio",
"zai": "Z.AI / GLM",
"kimi-coding": "Kimi / Moonshot",
"minimax": "MiniMax",
@ -952,6 +953,7 @@ def select_provider_and_model(args=None):
extended_providers = [
("copilot-acp", "GitHub Copilot ACP (spawns `copilot --acp --stdio`)"),
("gemini", "Google AI Studio (Gemini models — OpenAI-compatible endpoint)"),
("zai", "Z.AI / GLM (Zhipu AI direct API)"),
("kimi-coding", "Kimi / Moonshot (Moonshot AI direct API)"),
("minimax", "MiniMax (global direct API)"),
@ -1055,7 +1057,7 @@ def select_provider_and_model(args=None):
_model_flow_anthropic(config, current_model)
elif selected_provider == "kimi-coding":
_model_flow_kimi(config, current_model)
elif selected_provider in ("zai", "minimax", "minimax-cn", "kilocode", "opencode-zen", "opencode-go", "ai-gateway", "alibaba", "huggingface"):
elif selected_provider in ("gemini", "zai", "minimax", "minimax-cn", "kilocode", "opencode-zen", "opencode-go", "ai-gateway", "alibaba", "huggingface"):
_model_flow_api_key_provider(config, selected_provider, current_model)
@ -4182,7 +4184,7 @@ For more help on a command:
)
chat_parser.add_argument(
"--provider",
choices=["auto", "openrouter", "nous", "openai-codex", "copilot-acp", "copilot", "anthropic", "huggingface", "zai", "kimi-coding", "minimax", "minimax-cn", "kilocode"],
choices=["auto", "openrouter", "nous", "openai-codex", "copilot-acp", "copilot", "anthropic", "gemini", "huggingface", "zai", "kimi-coding", "minimax", "minimax-cn", "kilocode"],
default=None,
help="Inference provider (default: auto)"
)

View file

@ -41,6 +41,7 @@ _VENDOR_PREFIXES: dict[str, str] = {
"o3": "openai",
"o4": "openai",
"gemini": "google",
"gemma": "google",
"deepseek": "deepseek",
"glm": "z-ai",
"kimi": "moonshotai",
@ -77,6 +78,7 @@ _STRIP_VENDOR_ONLY_PROVIDERS: frozenset[str] = frozenset({
# Providers whose own naming is authoritative -- pass through unchanged.
_PASSTHROUGH_PROVIDERS: frozenset[str] = frozenset({
"gemini",
"zai",
"kimi-coding",
"minimax",

View file

@ -111,6 +111,17 @@ _PROVIDER_MODELS: dict[str, list[str]] = {
"gemini-2.5-pro",
"grok-code-fast-1",
],
"gemini": [
"gemini-2.5-pro",
"gemini-2.5-flash",
"gemini-2.0-flash",
"gemini-2.0-flash-lite",
# Gemma open models (also served via AI Studio)
"gemma-4-31b-it",
"gemma-4-26b-a4b-it",
"gemma-4-e4b-it",
"gemma-4-e2b-it",
],
"zai": [
"glm-5",
"glm-5-turbo",
@ -260,6 +271,7 @@ _PROVIDER_LABELS = {
"copilot-acp": "GitHub Copilot ACP",
"nous": "Nous Portal",
"copilot": "GitHub Copilot",
"gemini": "Google AI Studio",
"zai": "Z.AI / GLM",
"kimi-coding": "Kimi / Moonshot",
"minimax": "MiniMax",
@ -286,6 +298,9 @@ _PROVIDER_ALIASES = {
"github-model": "copilot",
"github-copilot-acp": "copilot-acp",
"copilot-acp-agent": "copilot-acp",
"google": "gemini",
"google-gemini": "gemini",
"google-ai-studio": "gemini",
"kimi": "kimi-coding",
"moonshot": "kimi-coding",
"minimax-china": "minimax-cn",
@ -550,7 +565,8 @@ def list_available_providers() -> list[dict[str, str]]:
# Canonical providers in display order
_PROVIDER_ORDER = [
"openrouter", "nous", "openai-codex", "copilot", "copilot-acp",
"huggingface", "zai", "kimi-coding", "minimax", "minimax-cn", "kilocode", "anthropic", "alibaba",
"gemini", "huggingface",
"zai", "kimi-coding", "minimax", "minimax-cn", "kilocode", "anthropic", "alibaba",
"opencode-zen", "opencode-go",
"ai-gateway", "deepseek", "custom",
]

View file

@ -111,6 +111,10 @@ _DEFAULT_PROVIDER_MODELS = {
"gemini-2.5-pro",
"grok-code-fast-1",
],
"gemini": [
"gemini-2.5-pro", "gemini-2.5-flash", "gemini-2.0-flash", "gemini-2.0-flash-lite",
"gemma-4-31b-it", "gemma-4-26b-a4b-it", "gemma-4-e4b-it", "gemma-4-e2b-it",
],
"zai": ["glm-5", "glm-4.7", "glm-4.5", "glm-4.5-flash"],
"kimi-coding": ["kimi-k2.5", "kimi-k2-thinking", "kimi-k2-turbo-preview"],
"minimax": ["MiniMax-M2.7", "MiniMax-M2.7-highspeed", "MiniMax-M2.5", "MiniMax-M2.5-highspeed", "MiniMax-M2.1"],

View file

@ -0,0 +1,197 @@
"""Tests for Google AI Studio (Gemini) provider integration."""
import os
import pytest
from unittest.mock import patch, MagicMock
from hermes_cli.auth import PROVIDER_REGISTRY, resolve_provider, resolve_api_key_provider_credentials
from hermes_cli.models import _PROVIDER_MODELS, _PROVIDER_LABELS, _PROVIDER_ALIASES, normalize_provider
from hermes_cli.model_normalize import normalize_model_for_provider, detect_vendor
from agent.model_metadata import get_model_context_length
# ── Provider Registry ──
class TestGeminiProviderRegistry:
def test_gemini_in_registry(self):
assert "gemini" in PROVIDER_REGISTRY
def test_gemini_config(self):
pconfig = PROVIDER_REGISTRY["gemini"]
assert pconfig.id == "gemini"
assert pconfig.name == "Google AI Studio"
assert pconfig.auth_type == "api_key"
assert pconfig.inference_base_url == "https://generativelanguage.googleapis.com/v1beta/openai"
def test_gemini_env_vars(self):
pconfig = PROVIDER_REGISTRY["gemini"]
assert pconfig.api_key_env_vars == ("GOOGLE_API_KEY", "GEMINI_API_KEY")
assert pconfig.base_url_env_var == "GEMINI_BASE_URL"
def test_gemini_base_url(self):
assert "generativelanguage.googleapis.com" in PROVIDER_REGISTRY["gemini"].inference_base_url
# ── Provider Aliases ──
PROVIDER_ENV_VARS = (
"OPENROUTER_API_KEY", "OPENAI_API_KEY", "ANTHROPIC_API_KEY",
"GOOGLE_API_KEY", "GEMINI_API_KEY", "GEMINI_BASE_URL",
"GLM_API_KEY", "ZAI_API_KEY", "KIMI_API_KEY",
"MINIMAX_API_KEY", "DEEPSEEK_API_KEY",
)
@pytest.fixture(autouse=True)
def _clean_provider_env(monkeypatch):
for var in PROVIDER_ENV_VARS:
monkeypatch.delenv(var, raising=False)
class TestGeminiAliases:
def test_explicit_gemini(self):
assert resolve_provider("gemini") == "gemini"
def test_alias_google(self):
assert resolve_provider("google") == "gemini"
def test_alias_google_gemini(self):
assert resolve_provider("google-gemini") == "gemini"
def test_alias_google_ai_studio(self):
assert resolve_provider("google-ai-studio") == "gemini"
def test_models_py_aliases(self):
assert _PROVIDER_ALIASES.get("google") == "gemini"
assert _PROVIDER_ALIASES.get("google-gemini") == "gemini"
assert _PROVIDER_ALIASES.get("google-ai-studio") == "gemini"
def test_normalize_provider(self):
assert normalize_provider("google") == "gemini"
assert normalize_provider("gemini") == "gemini"
assert normalize_provider("google-ai-studio") == "gemini"
# ── Auto-detection ──
class TestGeminiAutoDetection:
def test_auto_detects_google_api_key(self, monkeypatch):
monkeypatch.setenv("GOOGLE_API_KEY", "test-google-key")
assert resolve_provider("auto") == "gemini"
def test_auto_detects_gemini_api_key(self, monkeypatch):
monkeypatch.setenv("GEMINI_API_KEY", "test-gemini-key")
assert resolve_provider("auto") == "gemini"
def test_google_api_key_priority_over_gemini(self, monkeypatch):
monkeypatch.setenv("GOOGLE_API_KEY", "primary-key")
monkeypatch.setenv("GEMINI_API_KEY", "alias-key")
creds = resolve_api_key_provider_credentials("gemini")
assert creds["api_key"] == "primary-key"
assert creds["source"] == "GOOGLE_API_KEY"
# ── Credential Resolution ──
class TestGeminiCredentials:
def test_resolve_with_google_api_key(self, monkeypatch):
monkeypatch.setenv("GOOGLE_API_KEY", "google-secret")
creds = resolve_api_key_provider_credentials("gemini")
assert creds["provider"] == "gemini"
assert creds["api_key"] == "google-secret"
assert creds["base_url"] == "https://generativelanguage.googleapis.com/v1beta/openai"
def test_resolve_with_gemini_api_key(self, monkeypatch):
monkeypatch.setenv("GEMINI_API_KEY", "gemini-secret")
creds = resolve_api_key_provider_credentials("gemini")
assert creds["api_key"] == "gemini-secret"
def test_resolve_with_custom_base_url(self, monkeypatch):
monkeypatch.setenv("GOOGLE_API_KEY", "key")
monkeypatch.setenv("GEMINI_BASE_URL", "https://custom.endpoint/v1")
creds = resolve_api_key_provider_credentials("gemini")
assert creds["base_url"] == "https://custom.endpoint/v1"
def test_runtime_gemini(self, monkeypatch):
monkeypatch.setenv("GOOGLE_API_KEY", "google-key")
from hermes_cli.runtime_provider import resolve_runtime_provider
result = resolve_runtime_provider(requested="gemini")
assert result["provider"] == "gemini"
assert result["api_mode"] == "chat_completions"
assert result["api_key"] == "google-key"
assert result["base_url"] == "https://generativelanguage.googleapis.com/v1beta/openai"
# ── Model Catalog ──
class TestGeminiModelCatalog:
def test_provider_models_exist(self):
assert "gemini" in _PROVIDER_MODELS
models = _PROVIDER_MODELS["gemini"]
assert "gemini-2.5-pro" in models
assert "gemini-2.5-flash" in models
assert "gemma-4-31b-it" in models
def test_provider_label(self):
assert "gemini" in _PROVIDER_LABELS
assert _PROVIDER_LABELS["gemini"] == "Google AI Studio"
# ── Model Normalization ──
class TestGeminiModelNormalization:
def test_passthrough_bare_name(self):
assert normalize_model_for_provider("gemini-2.5-flash", "gemini") == "gemini-2.5-flash"
def test_strip_vendor_prefix(self):
assert normalize_model_for_provider("google/gemini-2.5-flash", "gemini") == "google/gemini-2.5-flash"
def test_gemma_vendor_detection(self):
assert detect_vendor("gemma-4-31b-it") == "google"
def test_gemini_vendor_detection(self):
assert detect_vendor("gemini-2.5-flash") == "google"
def test_aggregator_prepends_vendor(self):
result = normalize_model_for_provider("gemini-2.5-flash", "openrouter")
assert result == "google/gemini-2.5-flash"
def test_gemma_aggregator_prepends_vendor(self):
result = normalize_model_for_provider("gemma-4-31b-it", "openrouter")
assert result == "google/gemma-4-31b-it"
# ── Context Length ──
class TestGeminiContextLength:
def test_gemma_4_31b_context(self):
ctx = get_model_context_length("gemma-4-31b-it", provider="gemini")
assert ctx == 262144
def test_gemma_4_e4b_context(self):
ctx = get_model_context_length("gemma-4-e4b-it", provider="gemini")
assert ctx == 131072
# ── Agent Init (no SyntaxError) ──
class TestGeminiAgentInit:
def test_agent_imports_without_error(self):
"""Verify run_agent.py has no SyntaxError (the critical bug)."""
import importlib
import run_agent
importlib.reload(run_agent)
def test_gemini_agent_uses_chat_completions(self, monkeypatch):
"""Gemini falls through to chat_completions — no special elif needed."""
monkeypatch.setenv("GOOGLE_API_KEY", "test-key")
with patch("run_agent.OpenAI") as mock_openai:
mock_openai.return_value = MagicMock()
from run_agent import AIAgent
agent = AIAgent(
model="gemini-2.5-flash",
provider="gemini",
api_key="test-key",
base_url="https://generativelanguage.googleapis.com/v1beta/openai",
)
assert agent.api_mode == "chat_completions"
assert agent.provider == "gemini"