feat(web): add SearXNG as a native search-only backend

Adds SearXNG as a free, self-hosted web search provider.  SearXNG is a
privacy-respecting metasearch engine that requires no API key — just a
running instance and SEARXNG_URL pointing at it.

## What this adds

- `tools/web_providers/searxng.py` — `SearXNGSearchProvider` implementing
  `WebSearchProvider` (search only; no extract capability)
- `_is_backend_available("searxng")` — gates on SEARXNG_URL
- `_get_backend()` — accepts "searxng" as a configured value; adds it to
  auto-detect candidates (lower priority than paid services)
- `web_search_tool` — dispatches to SearXNG when it is the active backend
- `check_web_api_key()` — includes SearXNG in availability check
- `OPTIONAL_ENV_VARS["SEARXNG_URL"]` — registered with tools=["web_search"]
- `tools_config.py` — SearXNG appears in the `hermes tools` provider picker
- `nous_subscription.py` — `direct_searxng` detection, web_active / web_available
- `setup.py` — SEARXNG_URL listed in the missing-credential hint
- 23 tests covering: is_configured, happy-path search, score sorting, limit,
  HTTP/request errors, _is_backend_available, _get_backend, check_web_api_key

## Config

```yaml
# Use SearXNG for search, any paid provider for extract
web:
  search_backend: "searxng"
  extract_backend: "firecrawl"

# Or: SearXNG as the sole backend (web_extract will use the next available)
web:
  backend: "searxng"
```

SearXNG is search-only — it does not implement WebExtractProvider.  Users
who only configure SEARXNG_URL get web_search available; web_extract falls
back to the next available extract provider (or is unavailable if none).

Closes #19198 (Phase 2 Task 4 — SearXNG provider)
Ref: #11562 (original SearXNG PR)
This commit is contained in:
kshitij 2026-05-06 10:05:29 -07:00 committed by GitHub
parent cd2cbc73b7
commit 5c906d7026
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 535 additions and 7 deletions

View file

@ -126,7 +126,7 @@ def _get_backend() -> str:
keys manually without running setup.
"""
configured = (_load_web_config().get("backend") or "").lower().strip()
if configured in ("parallel", "firecrawl", "tavily", "exa"):
if configured in ("parallel", "firecrawl", "tavily", "exa", "searxng"):
return configured
# Fallback for manual / legacy config — pick the highest-priority
@ -137,6 +137,7 @@ def _get_backend() -> str:
("parallel", _has_env("PARALLEL_API_KEY")),
("tavily", _has_env("TAVILY_API_KEY")),
("exa", _has_env("EXA_API_KEY")),
("searxng", _has_env("SEARXNG_URL")),
)
for backend, available in backend_candidates:
if available:
@ -193,6 +194,8 @@ def _is_backend_available(backend: str) -> bool:
return check_firecrawl_api_key()
if backend == "tavily":
return _has_env("TAVILY_API_KEY")
if backend == "searxng":
return _has_env("SEARXNG_URL")
return False
# ─── Firecrawl Client ────────────────────────────────────────────────────────
@ -1187,6 +1190,16 @@ def web_search_tool(query: str, limit: int = 5) -> str:
_debug.save()
return result_json
if backend == "searxng":
from tools.web_providers.searxng import SearXNGSearchProvider
response_data = SearXNGSearchProvider().search(query, limit)
debug_call_data["results_count"] = len(response_data.get("data", {}).get("web", []))
result_json = json.dumps(response_data, indent=2, ensure_ascii=False)
debug_call_data["final_response_size"] = len(result_json)
_debug.log_call("web_search_tool", debug_call_data)
_debug.save()
return result_json
if backend == "tavily":
logger.info("Tavily search: '%s' (limit: %d)", query, limit)
raw = _tavily_request("search", {
@ -1337,6 +1350,13 @@ async def web_extract_tool(
"include_images": False,
})
results = _normalize_tavily_documents(raw, fallback_url=safe_urls[0] if safe_urls else "")
elif backend == "searxng":
# SearXNG is search-only — it cannot extract URL content
return json.dumps({
"success": False,
"error": "SearXNG is a search-only backend and cannot extract URL content. "
"Set web.extract_backend to firecrawl, tavily, exa, or parallel.",
}, ensure_ascii=False)
else:
# ── Firecrawl extraction ──
# Determine requested formats for Firecrawl v2
@ -1712,6 +1732,14 @@ async def web_crawl_tool(
_debug.save()
return cleaned_result
# SearXNG is search-only — it cannot crawl
if backend == "searxng":
return json.dumps({
"error": "SearXNG is a search-only backend and cannot crawl URLs. "
"Set FIRECRAWL_API_KEY for crawling, or use web_search instead.",
"success": False,
}, ensure_ascii=False)
# web_crawl requires Firecrawl or the Firecrawl tool-gateway — Parallel has no crawl API
if not check_firecrawl_api_key():
return json.dumps({
@ -2007,9 +2035,9 @@ def check_firecrawl_api_key() -> bool:
def check_web_api_key() -> bool:
"""Check whether the configured web backend is available."""
configured = _load_web_config().get("backend", "").lower().strip()
if configured in ("exa", "parallel", "firecrawl", "tavily"):
if configured in ("exa", "parallel", "firecrawl", "tavily", "searxng"):
return _is_backend_available(configured)
return any(_is_backend_available(backend) for backend in ("exa", "parallel", "firecrawl", "tavily"))
return any(_is_backend_available(backend) for backend in ("exa", "parallel", "firecrawl", "tavily", "searxng"))
def check_auxiliary_model() -> bool:
@ -2044,6 +2072,8 @@ if __name__ == "__main__":
print(" Using Parallel API (https://parallel.ai)")
elif backend == "tavily":
print(" Using Tavily API (https://tavily.com)")
elif backend == "searxng":
print(f" Using SearXNG (search only): {os.getenv('SEARXNG_URL', '').strip()}")
else:
if firecrawl_url_available:
print(f" Using self-hosted Firecrawl: {os.getenv('FIRECRAWL_API_URL').strip().rstrip('/')}")