mirror of
https://github.com/NousResearch/hermes-agent.git
synced 2026-04-25 00:51:20 +00:00
fix(aux): force anthropic oauth refresh after 401
Co-Authored-By: Paperclip <noreply@paperclip.ing>
This commit is contained in:
parent
e1106772d9
commit
be6b83562d
2 changed files with 260 additions and 0 deletions
|
|
@ -1215,3 +1215,201 @@ class TestAnthropicCompatImageConversion:
|
|||
}]
|
||||
result = _convert_openai_images_to_anthropic(messages)
|
||||
assert result[0]["content"][0]["source"]["media_type"] == "image/jpeg"
|
||||
|
||||
|
||||
class _AuxAuth401(Exception):
|
||||
status_code = 401
|
||||
|
||||
def __init__(self, message="Provided authentication token is expired"):
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
class _DummyResponse:
|
||||
def __init__(self, text="ok"):
|
||||
self.choices = [MagicMock(message=MagicMock(content=text))]
|
||||
|
||||
|
||||
class _FailingThenSuccessCompletions:
|
||||
def __init__(self):
|
||||
self.calls = 0
|
||||
|
||||
def create(self, **kwargs):
|
||||
self.calls += 1
|
||||
if self.calls == 1:
|
||||
raise _AuxAuth401()
|
||||
return _DummyResponse("sync-ok")
|
||||
|
||||
|
||||
class _AsyncFailingThenSuccessCompletions:
|
||||
def __init__(self):
|
||||
self.calls = 0
|
||||
|
||||
async def create(self, **kwargs):
|
||||
self.calls += 1
|
||||
if self.calls == 1:
|
||||
raise _AuxAuth401()
|
||||
return _DummyResponse("async-ok")
|
||||
|
||||
|
||||
class TestAuxiliaryAuthRefreshRetry:
|
||||
def test_call_llm_refreshes_codex_on_401_for_vision(self):
|
||||
failing_client = MagicMock()
|
||||
failing_client.base_url = "https://chatgpt.com/backend-api/codex"
|
||||
failing_client.chat.completions = _FailingThenSuccessCompletions()
|
||||
|
||||
fresh_client = MagicMock()
|
||||
fresh_client.base_url = "https://chatgpt.com/backend-api/codex"
|
||||
fresh_client.chat.completions.create.return_value = _DummyResponse("fresh-sync")
|
||||
|
||||
with (
|
||||
patch(
|
||||
"agent.auxiliary_client.resolve_vision_provider_client",
|
||||
side_effect=[("openai-codex", failing_client, "gpt-5.2-codex"), ("openai-codex", fresh_client, "gpt-5.2-codex")],
|
||||
),
|
||||
patch("agent.auxiliary_client._refresh_provider_credentials", return_value=True) as mock_refresh,
|
||||
):
|
||||
resp = call_llm(
|
||||
task="vision",
|
||||
provider="openai-codex",
|
||||
model="gpt-5.2-codex",
|
||||
messages=[{"role": "user", "content": "hi"}],
|
||||
)
|
||||
|
||||
assert resp.choices[0].message.content == "fresh-sync"
|
||||
mock_refresh.assert_called_once_with("openai-codex")
|
||||
|
||||
def test_call_llm_refreshes_codex_on_401_for_non_vision(self):
|
||||
stale_client = MagicMock()
|
||||
stale_client.base_url = "https://chatgpt.com/backend-api/codex"
|
||||
stale_client.chat.completions.create.side_effect = _AuxAuth401("stale codex token")
|
||||
|
||||
fresh_client = MagicMock()
|
||||
fresh_client.base_url = "https://chatgpt.com/backend-api/codex"
|
||||
fresh_client.chat.completions.create.return_value = _DummyResponse("fresh-non-vision")
|
||||
|
||||
with (
|
||||
patch("agent.auxiliary_client._resolve_task_provider_model", return_value=("openai-codex", "gpt-5.2-codex", None, None, None)),
|
||||
patch("agent.auxiliary_client._get_cached_client", side_effect=[(stale_client, "gpt-5.2-codex"), (fresh_client, "gpt-5.2-codex")]),
|
||||
patch("agent.auxiliary_client._refresh_provider_credentials", return_value=True) as mock_refresh,
|
||||
):
|
||||
resp = call_llm(
|
||||
task="compression",
|
||||
provider="openai-codex",
|
||||
model="gpt-5.2-codex",
|
||||
messages=[{"role": "user", "content": "hi"}],
|
||||
)
|
||||
|
||||
assert resp.choices[0].message.content == "fresh-non-vision"
|
||||
mock_refresh.assert_called_once_with("openai-codex")
|
||||
assert stale_client.chat.completions.create.call_count == 1
|
||||
assert fresh_client.chat.completions.create.call_count == 1
|
||||
|
||||
def test_call_llm_refreshes_anthropic_on_401_for_non_vision(self):
|
||||
stale_client = MagicMock()
|
||||
stale_client.base_url = "https://api.anthropic.com"
|
||||
stale_client.chat.completions.create.side_effect = _AuxAuth401("anthropic token expired")
|
||||
|
||||
fresh_client = MagicMock()
|
||||
fresh_client.base_url = "https://api.anthropic.com"
|
||||
fresh_client.chat.completions.create.return_value = _DummyResponse("fresh-anthropic")
|
||||
|
||||
with (
|
||||
patch("agent.auxiliary_client._resolve_task_provider_model", return_value=("anthropic", "claude-haiku-4-5-20251001", None, None, None)),
|
||||
patch("agent.auxiliary_client._get_cached_client", side_effect=[(stale_client, "claude-haiku-4-5-20251001"), (fresh_client, "claude-haiku-4-5-20251001")]),
|
||||
patch("agent.auxiliary_client._refresh_provider_credentials", return_value=True) as mock_refresh,
|
||||
):
|
||||
resp = call_llm(
|
||||
task="compression",
|
||||
provider="anthropic",
|
||||
model="claude-haiku-4-5-20251001",
|
||||
messages=[{"role": "user", "content": "hi"}],
|
||||
)
|
||||
|
||||
assert resp.choices[0].message.content == "fresh-anthropic"
|
||||
mock_refresh.assert_called_once_with("anthropic")
|
||||
assert stale_client.chat.completions.create.call_count == 1
|
||||
assert fresh_client.chat.completions.create.call_count == 1
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_async_call_llm_refreshes_codex_on_401_for_vision(self):
|
||||
failing_client = MagicMock()
|
||||
failing_client.base_url = "https://chatgpt.com/backend-api/codex"
|
||||
failing_client.chat.completions = _AsyncFailingThenSuccessCompletions()
|
||||
|
||||
fresh_client = MagicMock()
|
||||
fresh_client.base_url = "https://chatgpt.com/backend-api/codex"
|
||||
fresh_client.chat.completions.create = AsyncMock(return_value=_DummyResponse("fresh-async"))
|
||||
|
||||
with (
|
||||
patch(
|
||||
"agent.auxiliary_client.resolve_vision_provider_client",
|
||||
side_effect=[("openai-codex", failing_client, "gpt-5.2-codex"), ("openai-codex", fresh_client, "gpt-5.2-codex")],
|
||||
),
|
||||
patch("agent.auxiliary_client._refresh_provider_credentials", return_value=True) as mock_refresh,
|
||||
):
|
||||
resp = await async_call_llm(
|
||||
task="vision",
|
||||
provider="openai-codex",
|
||||
model="gpt-5.2-codex",
|
||||
messages=[{"role": "user", "content": "hi"}],
|
||||
)
|
||||
|
||||
assert resp.choices[0].message.content == "fresh-async"
|
||||
mock_refresh.assert_called_once_with("openai-codex")
|
||||
|
||||
def test_refresh_provider_credentials_force_refreshes_anthropic_oauth_and_evicts_cache(self, monkeypatch):
|
||||
stale_client = MagicMock()
|
||||
cache_key = ("anthropic", False, None, None, None)
|
||||
|
||||
monkeypatch.setenv("ANTHROPIC_TOKEN", "")
|
||||
monkeypatch.setenv("CLAUDE_CODE_OAUTH_TOKEN", "")
|
||||
monkeypatch.setenv("ANTHROPIC_API_KEY", "")
|
||||
|
||||
with (
|
||||
patch("agent.auxiliary_client._client_cache", {cache_key: (stale_client, "claude-haiku-4-5-20251001", None)}),
|
||||
patch("agent.anthropic_adapter.read_claude_code_credentials", return_value={
|
||||
"accessToken": "expired-token",
|
||||
"refreshToken": "refresh-token",
|
||||
"expiresAt": 0,
|
||||
}),
|
||||
patch("agent.anthropic_adapter.refresh_anthropic_oauth_pure", return_value={
|
||||
"access_token": "fresh-token",
|
||||
"refresh_token": "refresh-token-2",
|
||||
"expires_at_ms": 9999999999999,
|
||||
}) as mock_refresh_oauth,
|
||||
patch("agent.anthropic_adapter._write_claude_code_credentials") as mock_write,
|
||||
):
|
||||
from agent.auxiliary_client import _refresh_provider_credentials
|
||||
|
||||
assert _refresh_provider_credentials("anthropic") is True
|
||||
|
||||
mock_refresh_oauth.assert_called_once_with("refresh-token", use_json=False)
|
||||
mock_write.assert_called_once_with("fresh-token", "refresh-token-2", 9999999999999)
|
||||
stale_client.close.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_async_call_llm_refreshes_anthropic_on_401_for_non_vision(self):
|
||||
stale_client = MagicMock()
|
||||
stale_client.base_url = "https://api.anthropic.com"
|
||||
stale_client.chat.completions.create = AsyncMock(side_effect=_AuxAuth401("anthropic token expired"))
|
||||
|
||||
fresh_client = MagicMock()
|
||||
fresh_client.base_url = "https://api.anthropic.com"
|
||||
fresh_client.chat.completions.create = AsyncMock(return_value=_DummyResponse("fresh-async-anthropic"))
|
||||
|
||||
with (
|
||||
patch("agent.auxiliary_client._resolve_task_provider_model", return_value=("anthropic", "claude-haiku-4-5-20251001", None, None, None)),
|
||||
patch("agent.auxiliary_client._get_cached_client", side_effect=[(stale_client, "claude-haiku-4-5-20251001"), (fresh_client, "claude-haiku-4-5-20251001")]),
|
||||
patch("agent.auxiliary_client._refresh_provider_credentials", return_value=True) as mock_refresh,
|
||||
):
|
||||
resp = await async_call_llm(
|
||||
task="compression",
|
||||
provider="anthropic",
|
||||
model="claude-haiku-4-5-20251001",
|
||||
messages=[{"role": "user", "content": "hi"}],
|
||||
)
|
||||
|
||||
assert resp.choices[0].message.content == "fresh-async-anthropic"
|
||||
mock_refresh.assert_called_once_with("anthropic")
|
||||
assert stale_client.chat.completions.create.await_count == 1
|
||||
assert fresh_client.chat.completions.create.await_count == 1
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue