mirror of
https://github.com/NousResearch/hermes-agent.git
synced 2026-05-08 03:01:47 +00:00
feat(web): add SearXNG as a native search-only backend
Adds SearXNG as a free, self-hosted web search provider. SearXNG is a
privacy-respecting metasearch engine that requires no API key — just a
running instance and SEARXNG_URL pointing at it.
## What this adds
- `tools/web_providers/searxng.py` — `SearXNGSearchProvider` implementing
`WebSearchProvider` (search only; no extract capability)
- `_is_backend_available("searxng")` — gates on SEARXNG_URL
- `_get_backend()` — accepts "searxng" as a configured value; adds it to
auto-detect candidates (lower priority than paid services)
- `web_search_tool` — dispatches to SearXNG when it is the active backend
- `check_web_api_key()` — includes SearXNG in availability check
- `OPTIONAL_ENV_VARS["SEARXNG_URL"]` — registered with tools=["web_search"]
- `tools_config.py` — SearXNG appears in the `hermes tools` provider picker
- `nous_subscription.py` — `direct_searxng` detection, web_active / web_available
- `setup.py` — SEARXNG_URL listed in the missing-credential hint
- 23 tests covering: is_configured, happy-path search, score sorting, limit,
HTTP/request errors, _is_backend_available, _get_backend, check_web_api_key
## Config
```yaml
# Use SearXNG for search, any paid provider for extract
web:
search_backend: "searxng"
extract_backend: "firecrawl"
# Or: SearXNG as the sole backend (web_extract will use the next available)
web:
backend: "searxng"
```
SearXNG is search-only — it does not implement WebExtractProvider. Users
who only configure SEARXNG_URL get web_search available; web_extract falls
back to the next available extract provider (or is unavailable if none).
Closes #19198 (Phase 2 Task 4 — SearXNG provider)
Ref: #11562 (original SearXNG PR)
This commit is contained in:
parent
cd2cbc73b7
commit
5c906d7026
7 changed files with 535 additions and 7 deletions
131
tools/web_providers/searxng.py
Normal file
131
tools/web_providers/searxng.py
Normal file
|
|
@ -0,0 +1,131 @@
|
|||
"""SearXNG web search provider.
|
||||
|
||||
SearXNG is a free, self-hosted, privacy-respecting metasearch engine.
|
||||
It implements ``WebSearchProvider`` only — there is no extract capability.
|
||||
|
||||
Configuration::
|
||||
|
||||
# ~/.hermes/config.yaml (SEARXNG_URL is a URL, not a secret — use config.yaml not .env)
|
||||
SEARXNG_URL: http://localhost:8080
|
||||
|
||||
# Use SearXNG for search, pair with any extract provider:
|
||||
web:
|
||||
search_backend: "searxng"
|
||||
extract_backend: "firecrawl"
|
||||
|
||||
Public SearXNG instances are listed at https://searx.space/ but self-hosting
|
||||
is recommended for production use (rate limits and availability vary per
|
||||
public instance).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from typing import Any, Dict
|
||||
|
||||
from tools.web_providers.base import WebSearchProvider
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SearXNGSearchProvider(WebSearchProvider):
|
||||
"""Search via a SearXNG instance.
|
||||
|
||||
Requires ``SEARXNG_URL`` to be set (e.g. ``http://localhost:8080``).
|
||||
No API key needed — SearXNG is open-source and self-hosted.
|
||||
|
||||
Uses the SearXNG JSON API (``/search?format=json``). Results are
|
||||
sorted by SearXNG's own score and truncated to *limit*.
|
||||
"""
|
||||
|
||||
def provider_name(self) -> str:
|
||||
return "searxng"
|
||||
|
||||
def is_configured(self) -> bool:
|
||||
"""Return True when ``SEARXNG_URL`` is set to a non-empty value."""
|
||||
return bool(os.getenv("SEARXNG_URL", "").strip())
|
||||
|
||||
def search(self, query: str, limit: int = 5) -> Dict[str, Any]:
|
||||
"""Execute a search against the configured SearXNG instance.
|
||||
|
||||
Returns normalized results::
|
||||
|
||||
{
|
||||
"success": True,
|
||||
"data": {
|
||||
"web": [
|
||||
{
|
||||
"title": str,
|
||||
"url": str,
|
||||
"description": str,
|
||||
"position": int,
|
||||
},
|
||||
...
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
On failure returns ``{"success": False, "error": str}``.
|
||||
"""
|
||||
import httpx
|
||||
|
||||
base_url = os.getenv("SEARXNG_URL", "").strip().rstrip("/")
|
||||
if not base_url:
|
||||
return {"success": False, "error": "SEARXNG_URL is not set"}
|
||||
|
||||
params: Dict[str, Any] = {
|
||||
"q": query,
|
||||
"format": "json",
|
||||
"pageno": 1,
|
||||
}
|
||||
|
||||
try:
|
||||
resp = httpx.get(
|
||||
f"{base_url}/search",
|
||||
params=params,
|
||||
timeout=15,
|
||||
headers={"Accept": "application/json"},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
except httpx.HTTPStatusError as exc:
|
||||
logger.warning("SearXNG HTTP error: %s", exc)
|
||||
return {"success": False, "error": f"SearXNG returned HTTP {exc.response.status_code}"}
|
||||
except httpx.RequestError as exc:
|
||||
logger.warning("SearXNG request error: %s", exc)
|
||||
return {"success": False, "error": f"Could not reach SearXNG at {base_url}: {exc}"}
|
||||
|
||||
try:
|
||||
data = resp.json()
|
||||
except Exception as exc: # noqa: BLE001
|
||||
logger.warning("SearXNG response parse error: %s", exc)
|
||||
return {"success": False, "error": "Could not parse SearXNG response as JSON"}
|
||||
|
||||
raw_results = data.get("results", [])
|
||||
|
||||
# SearXNG may return a score field; sort descending and cap to limit.
|
||||
sorted_results = sorted(
|
||||
raw_results,
|
||||
key=lambda r: float(r.get("score", 0)),
|
||||
reverse=True,
|
||||
)[:limit]
|
||||
|
||||
web_results = [
|
||||
{
|
||||
"title": str(r.get("title", "")),
|
||||
"url": str(r.get("url", "")),
|
||||
"description": str(r.get("content", "")),
|
||||
"position": i + 1,
|
||||
}
|
||||
for i, r in enumerate(sorted_results)
|
||||
]
|
||||
|
||||
logger.info(
|
||||
"SearXNG search '%s': %d results (from %d raw, limit %d)",
|
||||
query,
|
||||
len(web_results),
|
||||
len(raw_results),
|
||||
limit,
|
||||
)
|
||||
|
||||
return {"success": True, "data": {"web": web_results}}
|
||||
|
|
@ -126,7 +126,7 @@ def _get_backend() -> str:
|
|||
keys manually without running setup.
|
||||
"""
|
||||
configured = (_load_web_config().get("backend") or "").lower().strip()
|
||||
if configured in ("parallel", "firecrawl", "tavily", "exa"):
|
||||
if configured in ("parallel", "firecrawl", "tavily", "exa", "searxng"):
|
||||
return configured
|
||||
|
||||
# Fallback for manual / legacy config — pick the highest-priority
|
||||
|
|
@ -137,6 +137,7 @@ def _get_backend() -> str:
|
|||
("parallel", _has_env("PARALLEL_API_KEY")),
|
||||
("tavily", _has_env("TAVILY_API_KEY")),
|
||||
("exa", _has_env("EXA_API_KEY")),
|
||||
("searxng", _has_env("SEARXNG_URL")),
|
||||
)
|
||||
for backend, available in backend_candidates:
|
||||
if available:
|
||||
|
|
@ -193,6 +194,8 @@ def _is_backend_available(backend: str) -> bool:
|
|||
return check_firecrawl_api_key()
|
||||
if backend == "tavily":
|
||||
return _has_env("TAVILY_API_KEY")
|
||||
if backend == "searxng":
|
||||
return _has_env("SEARXNG_URL")
|
||||
return False
|
||||
|
||||
# ─── Firecrawl Client ────────────────────────────────────────────────────────
|
||||
|
|
@ -1187,6 +1190,16 @@ def web_search_tool(query: str, limit: int = 5) -> str:
|
|||
_debug.save()
|
||||
return result_json
|
||||
|
||||
if backend == "searxng":
|
||||
from tools.web_providers.searxng import SearXNGSearchProvider
|
||||
response_data = SearXNGSearchProvider().search(query, limit)
|
||||
debug_call_data["results_count"] = len(response_data.get("data", {}).get("web", []))
|
||||
result_json = json.dumps(response_data, indent=2, ensure_ascii=False)
|
||||
debug_call_data["final_response_size"] = len(result_json)
|
||||
_debug.log_call("web_search_tool", debug_call_data)
|
||||
_debug.save()
|
||||
return result_json
|
||||
|
||||
if backend == "tavily":
|
||||
logger.info("Tavily search: '%s' (limit: %d)", query, limit)
|
||||
raw = _tavily_request("search", {
|
||||
|
|
@ -1337,6 +1350,13 @@ async def web_extract_tool(
|
|||
"include_images": False,
|
||||
})
|
||||
results = _normalize_tavily_documents(raw, fallback_url=safe_urls[0] if safe_urls else "")
|
||||
elif backend == "searxng":
|
||||
# SearXNG is search-only — it cannot extract URL content
|
||||
return json.dumps({
|
||||
"success": False,
|
||||
"error": "SearXNG is a search-only backend and cannot extract URL content. "
|
||||
"Set web.extract_backend to firecrawl, tavily, exa, or parallel.",
|
||||
}, ensure_ascii=False)
|
||||
else:
|
||||
# ── Firecrawl extraction ──
|
||||
# Determine requested formats for Firecrawl v2
|
||||
|
|
@ -1712,6 +1732,14 @@ async def web_crawl_tool(
|
|||
_debug.save()
|
||||
return cleaned_result
|
||||
|
||||
# SearXNG is search-only — it cannot crawl
|
||||
if backend == "searxng":
|
||||
return json.dumps({
|
||||
"error": "SearXNG is a search-only backend and cannot crawl URLs. "
|
||||
"Set FIRECRAWL_API_KEY for crawling, or use web_search instead.",
|
||||
"success": False,
|
||||
}, ensure_ascii=False)
|
||||
|
||||
# web_crawl requires Firecrawl or the Firecrawl tool-gateway — Parallel has no crawl API
|
||||
if not check_firecrawl_api_key():
|
||||
return json.dumps({
|
||||
|
|
@ -2007,9 +2035,9 @@ def check_firecrawl_api_key() -> bool:
|
|||
def check_web_api_key() -> bool:
|
||||
"""Check whether the configured web backend is available."""
|
||||
configured = _load_web_config().get("backend", "").lower().strip()
|
||||
if configured in ("exa", "parallel", "firecrawl", "tavily"):
|
||||
if configured in ("exa", "parallel", "firecrawl", "tavily", "searxng"):
|
||||
return _is_backend_available(configured)
|
||||
return any(_is_backend_available(backend) for backend in ("exa", "parallel", "firecrawl", "tavily"))
|
||||
return any(_is_backend_available(backend) for backend in ("exa", "parallel", "firecrawl", "tavily", "searxng"))
|
||||
|
||||
|
||||
def check_auxiliary_model() -> bool:
|
||||
|
|
@ -2044,6 +2072,8 @@ if __name__ == "__main__":
|
|||
print(" Using Parallel API (https://parallel.ai)")
|
||||
elif backend == "tavily":
|
||||
print(" Using Tavily API (https://tavily.com)")
|
||||
elif backend == "searxng":
|
||||
print(f" Using SearXNG (search only): {os.getenv('SEARXNG_URL', '').strip()}")
|
||||
else:
|
||||
if firecrawl_url_available:
|
||||
print(f" Using self-hosted Firecrawl: {os.getenv('FIRECRAWL_API_URL').strip().rstrip('/')}")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue