fix: resolve ollama provider alias mismatch

This commit is contained in:
Hermes Agent 2026-04-16 10:27:52 -04:00
parent 1ccd063786
commit 80c3cffbff
3 changed files with 75 additions and 3 deletions

View file

@ -145,6 +145,12 @@ HERMES_OVERLAYS: Dict[str, HermesOverlay] = {
transport="openai_chat",
base_url_env_var="OLLAMA_BASE_URL",
),
"ollama": HermesOverlay(
transport="openai_chat",
extra_env_vars=("OLLAMA_API_KEY",),
base_url_override="http://127.0.0.1:11434/v1",
base_url_env_var="OLLAMA_BASE_URL",
),
}
@ -255,7 +261,7 @@ ALIASES: Dict[str, str] = {
"lmstudio": "lmstudio",
"lm-studio": "lmstudio",
"lm_studio": "lmstudio",
"ollama": "custom", # bare "ollama" = local; use "ollama-cloud" for cloud
"ollama": "ollama",
"vllm": "local",
"llamacpp": "local",
"llama.cpp": "local",
@ -275,6 +281,7 @@ _LABEL_OVERRIDES: Dict[str, str] = {
"local": "Local endpoint",
"bedrock": "AWS Bedrock",
"ollama-cloud": "Ollama Cloud",
"ollama": "Ollama",
}