mirror of
https://github.com/NousResearch/hermes-agent.git
synced 2026-04-25 00:51:20 +00:00
fix(telegram): honor no_proxy for explicit proxy setup
This commit is contained in:
parent
a5129c72ef
commit
e7590f92a2
5 changed files with 159 additions and 9 deletions
|
|
@ -148,7 +148,102 @@ def _detect_macos_system_proxy() -> str | None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def resolve_proxy_url(platform_env_var: str | None = None) -> str | None:
|
def _split_host_port(value: str) -> tuple[str, int | None]:
|
||||||
|
raw = str(value or "").strip()
|
||||||
|
if not raw:
|
||||||
|
return "", None
|
||||||
|
if "://" in raw:
|
||||||
|
parsed = urlsplit(raw)
|
||||||
|
return (parsed.hostname or "").lower().rstrip("."), parsed.port
|
||||||
|
if raw.startswith("[") and "]" in raw:
|
||||||
|
host, _, rest = raw[1:].partition("]")
|
||||||
|
port = None
|
||||||
|
if rest.startswith(":") and rest[1:].isdigit():
|
||||||
|
port = int(rest[1:])
|
||||||
|
return host.lower().rstrip("."), port
|
||||||
|
if raw.count(":") == 1:
|
||||||
|
host, _, maybe_port = raw.rpartition(":")
|
||||||
|
if maybe_port.isdigit():
|
||||||
|
return host.lower().rstrip("."), int(maybe_port)
|
||||||
|
return raw.lower().strip("[]").rstrip("."), None
|
||||||
|
|
||||||
|
|
||||||
|
def _no_proxy_entries() -> list[str]:
|
||||||
|
entries: list[str] = []
|
||||||
|
for key in ("NO_PROXY", "no_proxy"):
|
||||||
|
raw = os.environ.get(key, "")
|
||||||
|
entries.extend(part.strip() for part in raw.split(",") if part.strip())
|
||||||
|
return entries
|
||||||
|
|
||||||
|
|
||||||
|
def _no_proxy_entry_matches(entry: str, host: str, port: int | None = None) -> bool:
|
||||||
|
token = str(entry or "").strip().lower()
|
||||||
|
if not token:
|
||||||
|
return False
|
||||||
|
if token == "*":
|
||||||
|
return True
|
||||||
|
|
||||||
|
token_host, token_port = _split_host_port(token)
|
||||||
|
if token_port is not None and port is not None and token_port != port:
|
||||||
|
return False
|
||||||
|
if token_port is not None and port is None:
|
||||||
|
return False
|
||||||
|
if not token_host:
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
network = ipaddress.ip_network(token_host, strict=False)
|
||||||
|
try:
|
||||||
|
return ipaddress.ip_address(host) in network
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
token_ip = ipaddress.ip_address(token_host)
|
||||||
|
try:
|
||||||
|
return ipaddress.ip_address(host) == token_ip
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if token_host.startswith("*."):
|
||||||
|
suffix = token_host[1:]
|
||||||
|
return host.endswith(suffix)
|
||||||
|
if token_host.startswith("."):
|
||||||
|
return host == token_host[1:] or host.endswith(token_host)
|
||||||
|
return host == token_host or host.endswith(f".{token_host}")
|
||||||
|
|
||||||
|
|
||||||
|
def should_bypass_proxy(target_hosts: str | list[str] | tuple[str, ...] | set[str] | None) -> bool:
|
||||||
|
"""Return True when NO_PROXY/no_proxy matches at least one target host.
|
||||||
|
|
||||||
|
Supports exact hosts, domain suffixes, wildcard suffixes, IP literals,
|
||||||
|
CIDR ranges, optional host:port entries, and ``*``.
|
||||||
|
"""
|
||||||
|
entries = _no_proxy_entries()
|
||||||
|
if not entries or not target_hosts:
|
||||||
|
return False
|
||||||
|
if isinstance(target_hosts, str):
|
||||||
|
candidates = [target_hosts]
|
||||||
|
else:
|
||||||
|
candidates = list(target_hosts)
|
||||||
|
for candidate in candidates:
|
||||||
|
host, port = _split_host_port(str(candidate))
|
||||||
|
if not host:
|
||||||
|
continue
|
||||||
|
if any(_no_proxy_entry_matches(entry, host, port) for entry in entries):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_proxy_url(
|
||||||
|
platform_env_var: str | None = None,
|
||||||
|
*,
|
||||||
|
target_hosts: str | list[str] | tuple[str, ...] | set[str] | None = None,
|
||||||
|
) -> str | None:
|
||||||
"""Return a proxy URL from env vars, or macOS system proxy.
|
"""Return a proxy URL from env vars, or macOS system proxy.
|
||||||
|
|
||||||
Check order:
|
Check order:
|
||||||
|
|
@ -156,18 +251,26 @@ def resolve_proxy_url(platform_env_var: str | None = None) -> str | None:
|
||||||
1. HTTPS_PROXY / HTTP_PROXY / ALL_PROXY (and lowercase variants)
|
1. HTTPS_PROXY / HTTP_PROXY / ALL_PROXY (and lowercase variants)
|
||||||
2. macOS system proxy via ``scutil --proxy`` (auto-detect)
|
2. macOS system proxy via ``scutil --proxy`` (auto-detect)
|
||||||
|
|
||||||
Returns *None* if no proxy is found.
|
Returns *None* if no proxy is found, or if NO_PROXY/no_proxy matches one
|
||||||
|
of ``target_hosts``.
|
||||||
"""
|
"""
|
||||||
if platform_env_var:
|
if platform_env_var:
|
||||||
value = (os.environ.get(platform_env_var) or "").strip()
|
value = (os.environ.get(platform_env_var) or "").strip()
|
||||||
if value:
|
if value:
|
||||||
|
if should_bypass_proxy(target_hosts):
|
||||||
|
return None
|
||||||
return normalize_proxy_url(value)
|
return normalize_proxy_url(value)
|
||||||
for key in ("HTTPS_PROXY", "HTTP_PROXY", "ALL_PROXY",
|
for key in ("HTTPS_PROXY", "HTTP_PROXY", "ALL_PROXY",
|
||||||
"https_proxy", "http_proxy", "all_proxy"):
|
"https_proxy", "http_proxy", "all_proxy"):
|
||||||
value = (os.environ.get(key) or "").strip()
|
value = (os.environ.get(key) or "").strip()
|
||||||
if value:
|
if value:
|
||||||
|
if should_bypass_proxy(target_hosts):
|
||||||
|
return None
|
||||||
return normalize_proxy_url(value)
|
return normalize_proxy_url(value)
|
||||||
return normalize_proxy_url(_detect_macos_system_proxy())
|
detected = normalize_proxy_url(_detect_macos_system_proxy())
|
||||||
|
if detected and should_bypass_proxy(target_hosts):
|
||||||
|
return None
|
||||||
|
return detected
|
||||||
|
|
||||||
|
|
||||||
def proxy_kwargs_for_bot(proxy_url: str | None) -> dict:
|
def proxy_kwargs_for_bot(proxy_url: str | None) -> dict:
|
||||||
|
|
|
||||||
|
|
@ -703,7 +703,6 @@ class TelegramAdapter(BasePlatformAdapter):
|
||||||
"write_timeout": _env_float("HERMES_TELEGRAM_HTTP_WRITE_TIMEOUT", 20.0),
|
"write_timeout": _env_float("HERMES_TELEGRAM_HTTP_WRITE_TIMEOUT", 20.0),
|
||||||
}
|
}
|
||||||
|
|
||||||
proxy_url = resolve_proxy_url("TELEGRAM_PROXY")
|
|
||||||
disable_fallback = (os.getenv("HERMES_TELEGRAM_DISABLE_FALLBACK_IPS", "").strip().lower() in ("1", "true", "yes", "on"))
|
disable_fallback = (os.getenv("HERMES_TELEGRAM_DISABLE_FALLBACK_IPS", "").strip().lower() in ("1", "true", "yes", "on"))
|
||||||
fallback_ips = self._fallback_ips()
|
fallback_ips = self._fallback_ips()
|
||||||
if not fallback_ips:
|
if not fallback_ips:
|
||||||
|
|
@ -714,6 +713,8 @@ class TelegramAdapter(BasePlatformAdapter):
|
||||||
", ".join(fallback_ips),
|
", ".join(fallback_ips),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
proxy_targets = ["api.telegram.org", *fallback_ips]
|
||||||
|
proxy_url = resolve_proxy_url("TELEGRAM_PROXY", target_hosts=proxy_targets)
|
||||||
if fallback_ips and not proxy_url and not disable_fallback:
|
if fallback_ips and not proxy_url and not disable_fallback:
|
||||||
logger.info(
|
logger.info(
|
||||||
"[%s] Telegram fallback IPs active: %s",
|
"[%s] Telegram fallback IPs active: %s",
|
||||||
|
|
|
||||||
|
|
@ -43,10 +43,10 @@ _DOH_PROVIDERS: list[dict] = [
|
||||||
_SEED_FALLBACK_IPS: list[str] = ["149.154.167.220"]
|
_SEED_FALLBACK_IPS: list[str] = ["149.154.167.220"]
|
||||||
|
|
||||||
|
|
||||||
def _resolve_proxy_url() -> str | None:
|
def _resolve_proxy_url(target_hosts=None) -> str | None:
|
||||||
# Delegate to shared implementation (env vars + macOS system proxy detection)
|
# Delegate to shared implementation (env vars + macOS system proxy detection)
|
||||||
from gateway.platforms.base import resolve_proxy_url
|
from gateway.platforms.base import resolve_proxy_url
|
||||||
return resolve_proxy_url("TELEGRAM_PROXY")
|
return resolve_proxy_url("TELEGRAM_PROXY", target_hosts=target_hosts)
|
||||||
|
|
||||||
|
|
||||||
class TelegramFallbackTransport(httpx.AsyncBaseTransport):
|
class TelegramFallbackTransport(httpx.AsyncBaseTransport):
|
||||||
|
|
@ -60,7 +60,7 @@ class TelegramFallbackTransport(httpx.AsyncBaseTransport):
|
||||||
|
|
||||||
def __init__(self, fallback_ips: Iterable[str], **transport_kwargs):
|
def __init__(self, fallback_ips: Iterable[str], **transport_kwargs):
|
||||||
self._fallback_ips = [ip for ip in dict.fromkeys(_normalize_fallback_ips(fallback_ips))]
|
self._fallback_ips = [ip for ip in dict.fromkeys(_normalize_fallback_ips(fallback_ips))]
|
||||||
proxy_url = _resolve_proxy_url()
|
proxy_url = _resolve_proxy_url(target_hosts=[_TELEGRAM_API_HOST, *self._fallback_ips])
|
||||||
if proxy_url and "proxy" not in transport_kwargs:
|
if proxy_url and "proxy" not in transport_kwargs:
|
||||||
transport_kwargs["proxy"] = proxy_url
|
transport_kwargs["proxy"] = proxy_url
|
||||||
self._primary = httpx.AsyncHTTPTransport(**transport_kwargs)
|
self._primary = httpx.AsyncHTTPTransport(**transport_kwargs)
|
||||||
|
|
|
||||||
|
|
@ -137,11 +137,38 @@ class TestGetProxyUrl:
|
||||||
class TestResolveProxyUrl:
|
class TestResolveProxyUrl:
|
||||||
def test_normalizes_socks_alias_from_all_proxy(self, monkeypatch):
|
def test_normalizes_socks_alias_from_all_proxy(self, monkeypatch):
|
||||||
for key in ("HTTPS_PROXY", "HTTP_PROXY", "ALL_PROXY",
|
for key in ("HTTPS_PROXY", "HTTP_PROXY", "ALL_PROXY",
|
||||||
"https_proxy", "http_proxy", "all_proxy"):
|
"https_proxy", "http_proxy", "all_proxy", "NO_PROXY", "no_proxy"):
|
||||||
monkeypatch.delenv(key, raising=False)
|
monkeypatch.delenv(key, raising=False)
|
||||||
monkeypatch.setenv("ALL_PROXY", "socks://127.0.0.1:1080/")
|
monkeypatch.setenv("ALL_PROXY", "socks://127.0.0.1:1080/")
|
||||||
assert resolve_proxy_url() == "socks5://127.0.0.1:1080/"
|
assert resolve_proxy_url() == "socks5://127.0.0.1:1080/"
|
||||||
|
|
||||||
|
def test_no_proxy_bypasses_matching_host(self, monkeypatch):
|
||||||
|
for key in ("HTTPS_PROXY", "HTTP_PROXY", "ALL_PROXY",
|
||||||
|
"https_proxy", "http_proxy", "all_proxy", "NO_PROXY", "no_proxy"):
|
||||||
|
monkeypatch.delenv(key, raising=False)
|
||||||
|
monkeypatch.setenv("HTTPS_PROXY", "http://proxy.example:8080")
|
||||||
|
monkeypatch.setenv("NO_PROXY", "api.telegram.org")
|
||||||
|
|
||||||
|
assert resolve_proxy_url(target_hosts="api.telegram.org") is None
|
||||||
|
|
||||||
|
def test_no_proxy_bypasses_cidr_target(self, monkeypatch):
|
||||||
|
for key in ("HTTPS_PROXY", "HTTP_PROXY", "ALL_PROXY",
|
||||||
|
"https_proxy", "http_proxy", "all_proxy", "NO_PROXY", "no_proxy"):
|
||||||
|
monkeypatch.delenv(key, raising=False)
|
||||||
|
monkeypatch.setenv("HTTPS_PROXY", "http://proxy.example:8080")
|
||||||
|
monkeypatch.setenv("NO_PROXY", "149.154.160.0/20")
|
||||||
|
|
||||||
|
assert resolve_proxy_url(target_hosts=["149.154.167.220"]) is None
|
||||||
|
|
||||||
|
def test_no_proxy_ignored_without_target(self, monkeypatch):
|
||||||
|
for key in ("HTTPS_PROXY", "HTTP_PROXY", "ALL_PROXY",
|
||||||
|
"https_proxy", "http_proxy", "all_proxy", "NO_PROXY", "no_proxy"):
|
||||||
|
monkeypatch.delenv(key, raising=False)
|
||||||
|
monkeypatch.setenv("HTTPS_PROXY", "http://proxy.example:8080")
|
||||||
|
monkeypatch.setenv("NO_PROXY", "*")
|
||||||
|
|
||||||
|
assert resolve_proxy_url() == "http://proxy.example:8080"
|
||||||
|
|
||||||
|
|
||||||
class TestRunAgentProxyDispatch:
|
class TestRunAgentProxyDispatch:
|
||||||
"""Test that _run_agent() delegates to proxy when configured."""
|
"""Test that _run_agent() delegates to proxy when configured."""
|
||||||
|
|
|
||||||
|
|
@ -322,7 +322,7 @@ class TestFallbackTransportInit:
|
||||||
seen_kwargs.append(kwargs.copy())
|
seen_kwargs.append(kwargs.copy())
|
||||||
return FakeTransport([], {})
|
return FakeTransport([], {})
|
||||||
|
|
||||||
for key in ("HTTPS_PROXY", "HTTP_PROXY", "ALL_PROXY", "https_proxy", "http_proxy", "all_proxy", "TELEGRAM_PROXY"):
|
for key in ("HTTPS_PROXY", "HTTP_PROXY", "ALL_PROXY", "https_proxy", "http_proxy", "all_proxy", "TELEGRAM_PROXY", "NO_PROXY", "no_proxy"):
|
||||||
monkeypatch.delenv(key, raising=False)
|
monkeypatch.delenv(key, raising=False)
|
||||||
monkeypatch.setenv("HTTPS_PROXY", "http://proxy.example:8080")
|
monkeypatch.setenv("HTTPS_PROXY", "http://proxy.example:8080")
|
||||||
monkeypatch.setattr(tnet.httpx, "AsyncHTTPTransport", factory)
|
monkeypatch.setattr(tnet.httpx, "AsyncHTTPTransport", factory)
|
||||||
|
|
@ -333,6 +333,25 @@ class TestFallbackTransportInit:
|
||||||
assert len(seen_kwargs) == 2
|
assert len(seen_kwargs) == 2
|
||||||
assert all(kwargs["proxy"] == "http://proxy.example:8080" for kwargs in seen_kwargs)
|
assert all(kwargs["proxy"] == "http://proxy.example:8080" for kwargs in seen_kwargs)
|
||||||
|
|
||||||
|
def test_no_proxy_bypasses_fallback_ip_cidr(self, monkeypatch):
|
||||||
|
seen_kwargs = []
|
||||||
|
|
||||||
|
def factory(**kwargs):
|
||||||
|
seen_kwargs.append(kwargs.copy())
|
||||||
|
return FakeTransport([], {})
|
||||||
|
|
||||||
|
for key in ("HTTPS_PROXY", "HTTP_PROXY", "ALL_PROXY", "https_proxy", "http_proxy", "all_proxy", "TELEGRAM_PROXY", "NO_PROXY", "no_proxy"):
|
||||||
|
monkeypatch.delenv(key, raising=False)
|
||||||
|
monkeypatch.setenv("HTTPS_PROXY", "http://proxy.example:8080")
|
||||||
|
monkeypatch.setenv("NO_PROXY", "149.154.160.0/20")
|
||||||
|
monkeypatch.setattr(tnet.httpx, "AsyncHTTPTransport", factory)
|
||||||
|
|
||||||
|
transport = tnet.TelegramFallbackTransport(["149.154.167.220"])
|
||||||
|
|
||||||
|
assert transport._fallback_ips == ["149.154.167.220"]
|
||||||
|
assert len(seen_kwargs) == 2
|
||||||
|
assert all("proxy" not in kwargs for kwargs in seen_kwargs)
|
||||||
|
|
||||||
|
|
||||||
class TestFallbackTransportClose:
|
class TestFallbackTransportClose:
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue