refactor: remove redundant local imports already available at module level

Sweep ~74 redundant local imports across 21 files where the same module
was already imported at the top level. Also includes type fixes and lint
cleanups on the same branch.
This commit is contained in:
alt-glitch 2026-04-21 12:35:10 +05:30 committed by Teknium
parent ce9c91c8f7
commit 1010e5fa3c
31 changed files with 289 additions and 316 deletions

View file

@ -613,8 +613,8 @@ class HermesACPAgent(acp.Agent):
await self._conn.session_update(
session_id=session_id,
update=AvailableCommandsUpdate(
sessionUpdate="available_commands_update",
availableCommands=self._available_commands(),
session_update="available_commands_update",
available_commands=self._available_commands(),
),
)
except Exception:

View file

@ -807,7 +807,7 @@ The user has requested that this compaction PRIORITISE preserving all informatio
)
self.summary_model = "" # empty = use main model
self._summary_failure_cooldown_until = 0.0 # no cooldown
return self._generate_summary(messages, summary_budget) # retry immediately
return self._generate_summary(turns_to_summarize) # retry immediately
# Transient errors (timeout, rate limit, network) — shorter cooldown
_transient_cooldown = 60

View file

@ -386,6 +386,8 @@ class CopilotACPClient:
stderr_tail: deque[str] = deque(maxlen=40)
def _stdout_reader() -> None:
if proc.stdout is None:
return
for line in proc.stdout:
try:
inbox.put(json.loads(line))

View file

@ -799,7 +799,8 @@ def _gemini_http_error(response: httpx.Response) -> CodeAssistError:
err_obj = {}
err_status = str(err_obj.get("status") or "").strip()
err_message = str(err_obj.get("message") or "").strip()
err_details_list = err_obj.get("details") if isinstance(err_obj.get("details"), list) else []
_raw_details = err_obj.get("details")
err_details_list = _raw_details if isinstance(_raw_details, list) else []
# Extract google.rpc.ErrorInfo reason + metadata. There may be more
# than one ErrorInfo (rare), so we pick the first one with a reason.

View file

@ -613,7 +613,8 @@ def gemini_http_error(response: httpx.Response) -> GeminiAPIError:
err_obj = {}
err_status = str(err_obj.get("status") or "").strip()
err_message = str(err_obj.get("message") or "").strip()
details_list = err_obj.get("details") if isinstance(err_obj.get("details"), list) else []
_raw_details = err_obj.get("details")
details_list = _raw_details if isinstance(_raw_details, list) else []
reason = ""
retry_after: Optional[float] = None

40
cli.py
View file

@ -529,7 +529,6 @@ def load_cli_config() -> Dict[str, Any]:
if _file_has_terminal_config or env_var not in os.environ:
val = terminal_config[config_key]
if isinstance(val, list):
import json
os.environ[env_var] = json.dumps(val)
else:
os.environ[env_var] = str(val)
@ -1144,8 +1143,6 @@ def _rich_text_from_ansi(text: str) -> _RichText:
def _strip_markdown_syntax(text: str) -> str:
"""Best-effort markdown marker removal for plain-text display."""
import re
plain = _rich_text_from_ansi(text or "").plain
plain = re.sub(r"^\s{0,3}(?:[-*_]\s*){3,}$", "", plain, flags=re.MULTILINE)
plain = re.sub(r"^\s{0,3}#{1,6}\s+", "", plain, flags=re.MULTILINE)
@ -2002,8 +1999,7 @@ class HermesCLI:
def _invalidate(self, min_interval: float = 0.25) -> None:
"""Throttled UI repaint — prevents terminal blinking on slow/SSH connections."""
import time as _time
now = _time.monotonic()
now = time.monotonic()
if hasattr(self, "_app") and self._app and (now - self._last_invalidate) >= min_interval:
self._last_invalidate = now
self._app.invalidate()
@ -2221,8 +2217,7 @@ class HermesCLI:
return ""
t0 = getattr(self, "_tool_start_time", 0) or 0
if t0 > 0:
import time as _time
elapsed = _time.monotonic() - t0
elapsed = time.monotonic() - t0
if elapsed >= 60:
_m, _s = int(elapsed // 60), int(elapsed % 60)
elapsed_str = f"{_m}m {_s}s"
@ -2477,9 +2472,6 @@ class HermesCLI:
def _emit_reasoning_preview(self, reasoning_text: str) -> None:
"""Render a buffered reasoning preview as a single [thinking] block."""
import re
import textwrap
preview_text = reasoning_text.strip()
if not preview_text:
return
@ -2598,9 +2590,7 @@ class HermesCLI:
"""Expand [Pasted text #N -> file] placeholders into file contents."""
if not isinstance(text, str) or "[Pasted text #" not in text:
return text or ""
import re as _re
paste_ref_re = _re.compile(r'\[Pasted text #\d+: \d+ lines \u2192 (.+?)\]')
paste_ref_re = re.compile(r'\[Pasted text #\d+: \d+ lines \u2192 (.+?)\]')
def _expand_ref(match):
path = Path(match.group(1))
@ -2923,9 +2913,7 @@ class HermesCLI:
def _command_spinner_frame(self) -> str:
"""Return the current spinner frame for slow slash commands."""
import time as _time
frame_idx = int(_time.monotonic() * 10) % len(_COMMAND_SPINNER_FRAMES)
frame_idx = int(time.monotonic() * 10) % len(_COMMAND_SPINNER_FRAMES)
return _COMMAND_SPINNER_FRAMES[frame_idx]
@contextmanager
@ -3936,7 +3924,6 @@ class HermesCLI:
image later with ``vision_analyze`` if needed.
"""
import asyncio as _asyncio
import json as _json
from tools.vision_tools import vision_analyze_tool
analysis_prompt = (
@ -3956,7 +3943,7 @@ class HermesCLI:
result_json = _asyncio.run(
vision_analyze_tool(image_url=str(img_path), user_prompt=analysis_prompt)
)
result = _json.loads(result_json)
result = json.loads(result_json)
if result.get("success"):
description = result.get("analysis", "")
enriched_parts.append(
@ -6282,8 +6269,7 @@ class HermesCLI:
# with the output (fixes #2718).
if self._app:
self._app.invalidate()
import time as _tmod
_tmod.sleep(0.05) # brief pause for refresh
time.sleep(0.05) # brief pause for refresh
print()
ChatConsole().print(f"[{_accent_hex()}]{'' * 40}[/]")
_cprint(f" ✅ Background task #{task_num} complete")
@ -6323,8 +6309,7 @@ class HermesCLI:
# Same TUI refresh pattern as success path (#2718)
if self._app:
self._app.invalidate()
import time as _tmod
_tmod.sleep(0.05)
time.sleep(0.05)
print()
_cprint(f" ❌ Background task #{task_num} failed: {e}")
finally:
@ -6544,7 +6529,6 @@ class HermesCLI:
_launched = self._try_launch_chrome_debug(_port, _plat.system())
if _launched:
# Wait for the port to come up
import time as _time
for _wait in range(10):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
@ -6554,7 +6538,7 @@ class HermesCLI:
_already_open = True
break
except (OSError, socket.timeout):
_time.sleep(0.5)
time.sleep(0.5)
if _already_open:
print(f" ✓ Chrome launched and listening on port {_port}")
else:
@ -7084,7 +7068,6 @@ class HermesCLI:
known state. When a change is detected, triggers _reload_mcp() and
informs the user so they know the tool list has been refreshed.
"""
import time
import yaml as _yaml
CONFIG_WATCH_INTERVAL = 5.0 # seconds between config.yaml stat() calls
@ -7943,7 +7926,9 @@ class HermesCLI:
return
selected = state.get("selected", 0)
choices = state.get("choices") or []
choices = state.get("choices")
if not isinstance(choices, list):
choices = []
if not (0 <= selected < len(choices)):
return
@ -10025,7 +10010,8 @@ class HermesCLI:
if stage == "provider":
title = "⚙ Model Picker — Select Provider"
choices = []
for p in state.get("providers") or []:
_providers = state.get("providers")
for p in _providers if isinstance(_providers, list) else []:
count = p.get("total_models", len(p.get("models", [])))
label = f"{p['name']} ({count} model{'s' if count != 1 else ''})"
if p.get("is_current"):

View file

@ -670,8 +670,7 @@ def load_gateway_config() -> GatewayConfig:
if "require_mention" in telegram_cfg and not os.getenv("TELEGRAM_REQUIRE_MENTION"):
os.environ["TELEGRAM_REQUIRE_MENTION"] = str(telegram_cfg["require_mention"]).lower()
if "mention_patterns" in telegram_cfg and not os.getenv("TELEGRAM_MENTION_PATTERNS"):
import json as _json
os.environ["TELEGRAM_MENTION_PATTERNS"] = _json.dumps(telegram_cfg["mention_patterns"])
os.environ["TELEGRAM_MENTION_PATTERNS"] = json.dumps(telegram_cfg["mention_patterns"])
frc = telegram_cfg.get("free_response_chats")
if frc is not None and not os.getenv("TELEGRAM_FREE_RESPONSE_CHATS"):
if isinstance(frc, list):
@ -1259,7 +1258,6 @@ def _apply_env_overrides(config: GatewayConfig) -> None:
if legacy_home:
qq_home = legacy_home
qq_home_name_env = "QQ_HOME_CHANNEL_NAME"
import logging
logging.getLogger(__name__).warning(
"QQ_HOME_CHANNEL is deprecated; rename to QQBOT_HOME_CHANNEL "
"in your .env for consistency with the platform key."

View file

@ -323,7 +323,6 @@ class ResponseStore:
).fetchone()
if row is None:
return None
import time
self._conn.execute(
"UPDATE responses SET accessed_at = ? WHERE response_id = ?",
(time.time(), response_id),
@ -333,7 +332,6 @@ class ResponseStore:
def put(self, response_id: str, data: Dict[str, Any]) -> None:
"""Store a response, evicting the oldest if at capacity."""
import time
self._conn.execute(
"INSERT OR REPLACE INTO responses (response_id, data, accessed_at) VALUES (?, ?, ?)",
(response_id, json.dumps(data, default=str), time.time()),
@ -474,8 +472,7 @@ class _IdempotencyCache:
self._max = max_items
def _purge(self):
import time as _t
now = _t.time()
now = time.time()
expired = [k for k, v in self._store.items() if now - v["ts"] > self._ttl]
for k in expired:
self._store.pop(k, None)
@ -537,6 +534,30 @@ def _derive_chat_session_id(
return f"api-{digest}"
_CRON_AVAILABLE = False
try:
from cron.jobs import (
list_jobs as _cron_list,
get_job as _cron_get,
create_job as _cron_create,
update_job as _cron_update,
remove_job as _cron_remove,
pause_job as _cron_pause,
resume_job as _cron_resume,
trigger_job as _cron_trigger,
)
_CRON_AVAILABLE = True
except ImportError:
_cron_list = None
_cron_get = None
_cron_create = None
_cron_update = None
_cron_remove = None
_cron_pause = None
_cron_resume = None
_cron_trigger = None
class APIServerAdapter(BasePlatformAdapter):
"""
OpenAI-compatible HTTP API server adapter.
@ -1866,44 +1887,16 @@ class APIServerAdapter(BasePlatformAdapter):
# Cron jobs API
# ------------------------------------------------------------------
# Check cron module availability once (not per-request)
_CRON_AVAILABLE = False
try:
from cron.jobs import (
list_jobs as _cron_list,
get_job as _cron_get,
create_job as _cron_create,
update_job as _cron_update,
remove_job as _cron_remove,
pause_job as _cron_pause,
resume_job as _cron_resume,
trigger_job as _cron_trigger,
)
# Wrap as staticmethod to prevent descriptor binding — these are plain
# module functions, not instance methods. Without this, self._cron_*()
# injects ``self`` as the first positional argument and every call
# raises TypeError.
_cron_list = staticmethod(_cron_list)
_cron_get = staticmethod(_cron_get)
_cron_create = staticmethod(_cron_create)
_cron_update = staticmethod(_cron_update)
_cron_remove = staticmethod(_cron_remove)
_cron_pause = staticmethod(_cron_pause)
_cron_resume = staticmethod(_cron_resume)
_cron_trigger = staticmethod(_cron_trigger)
_CRON_AVAILABLE = True
except ImportError:
pass
_JOB_ID_RE = __import__("re").compile(r"[a-f0-9]{12}")
# Allowed fields for update — prevents clients injecting arbitrary keys
_UPDATE_ALLOWED_FIELDS = {"name", "schedule", "prompt", "deliver", "skills", "skill", "repeat", "enabled"}
_MAX_NAME_LENGTH = 200
_MAX_PROMPT_LENGTH = 5000
def _check_jobs_available(self) -> Optional["web.Response"]:
@staticmethod
def _check_jobs_available() -> Optional["web.Response"]:
"""Return error response if cron module isn't available."""
if not self._CRON_AVAILABLE:
if not _CRON_AVAILABLE:
return web.json_response(
{"error": "Cron module not available"}, status=501,
)
@ -1928,7 +1921,7 @@ class APIServerAdapter(BasePlatformAdapter):
return cron_err
try:
include_disabled = request.query.get("include_disabled", "").lower() in ("true", "1")
jobs = self._cron_list(include_disabled=include_disabled)
jobs = _cron_list(include_disabled=include_disabled)
return web.json_response({"jobs": jobs})
except Exception as e:
return web.json_response({"error": str(e)}, status=500)
@ -1976,7 +1969,7 @@ class APIServerAdapter(BasePlatformAdapter):
if repeat is not None:
kwargs["repeat"] = repeat
job = self._cron_create(**kwargs)
job = _cron_create(**kwargs)
return web.json_response({"job": job})
except Exception as e:
return web.json_response({"error": str(e)}, status=500)
@ -1993,7 +1986,7 @@ class APIServerAdapter(BasePlatformAdapter):
if id_err:
return id_err
try:
job = self._cron_get(job_id)
job = _cron_get(job_id)
if not job:
return web.json_response({"error": "Job not found"}, status=404)
return web.json_response({"job": job})
@ -2026,7 +2019,7 @@ class APIServerAdapter(BasePlatformAdapter):
return web.json_response(
{"error": f"Prompt must be ≤ {self._MAX_PROMPT_LENGTH} characters"}, status=400,
)
job = self._cron_update(job_id, sanitized)
job = _cron_update(job_id, sanitized)
if not job:
return web.json_response({"error": "Job not found"}, status=404)
return web.json_response({"job": job})
@ -2045,7 +2038,7 @@ class APIServerAdapter(BasePlatformAdapter):
if id_err:
return id_err
try:
success = self._cron_remove(job_id)
success = _cron_remove(job_id)
if not success:
return web.json_response({"error": "Job not found"}, status=404)
return web.json_response({"ok": True})
@ -2064,7 +2057,7 @@ class APIServerAdapter(BasePlatformAdapter):
if id_err:
return id_err
try:
job = self._cron_pause(job_id)
job = _cron_pause(job_id)
if not job:
return web.json_response({"error": "Job not found"}, status=404)
return web.json_response({"job": job})
@ -2083,7 +2076,7 @@ class APIServerAdapter(BasePlatformAdapter):
if id_err:
return id_err
try:
job = self._cron_resume(job_id)
job = _cron_resume(job_id)
if not job:
return web.json_response({"error": "Job not found"}, status=404)
return web.json_response({"job": job})
@ -2102,7 +2095,7 @@ class APIServerAdapter(BasePlatformAdapter):
if id_err:
return id_err
try:
job = self._cron_trigger(job_id)
job = _cron_trigger(job_id)
if not job:
return web.json_response({"error": "Job not found"}, status=404)
return web.json_response({"job": job})

View file

@ -391,12 +391,9 @@ async def cache_image_from_url(url: str, ext: str = ".jpg", retries: int = 2) ->
if not is_safe_url(url):
raise ValueError(f"Blocked unsafe URL (SSRF protection): {safe_url_for_log(url)}")
import asyncio
import httpx
import logging as _logging
_log = _logging.getLogger(__name__)
_log = logging.getLogger(__name__)
last_exc = None
async with httpx.AsyncClient(
timeout=30.0,
follow_redirects=True,
@ -414,7 +411,6 @@ async def cache_image_from_url(url: str, ext: str = ".jpg", retries: int = 2) ->
response.raise_for_status()
return cache_image_from_bytes(response.content, ext)
except (httpx.TimeoutException, httpx.HTTPStatusError) as exc:
last_exc = exc
if isinstance(exc, httpx.HTTPStatusError) and exc.response.status_code < 429:
raise
if attempt < retries:
@ -430,7 +426,6 @@ async def cache_image_from_url(url: str, ext: str = ".jpg", retries: int = 2) ->
await asyncio.sleep(wait)
continue
raise
raise last_exc
def cleanup_image_cache(max_age_hours: int = 24) -> int:
@ -510,12 +505,9 @@ async def cache_audio_from_url(url: str, ext: str = ".ogg", retries: int = 2) ->
if not is_safe_url(url):
raise ValueError(f"Blocked unsafe URL (SSRF protection): {safe_url_for_log(url)}")
import asyncio
import httpx
import logging as _logging
_log = _logging.getLogger(__name__)
_log = logging.getLogger(__name__)
last_exc = None
async with httpx.AsyncClient(
timeout=30.0,
follow_redirects=True,
@ -533,7 +525,6 @@ async def cache_audio_from_url(url: str, ext: str = ".ogg", retries: int = 2) ->
response.raise_for_status()
return cache_audio_from_bytes(response.content, ext)
except (httpx.TimeoutException, httpx.HTTPStatusError) as exc:
last_exc = exc
if isinstance(exc, httpx.HTTPStatusError) and exc.response.status_code < 429:
raise
if attempt < retries:
@ -549,7 +540,6 @@ async def cache_audio_from_url(url: str, ext: str = ".ogg", retries: int = 2) ->
await asyncio.sleep(wait)
continue
raise
raise last_exc
# ---------------------------------------------------------------------------
@ -1787,8 +1777,6 @@ class BasePlatformAdapter(ABC):
HERMES_HUMAN_DELAY_MIN_MS: minimum delay in ms (default 800, custom mode)
HERMES_HUMAN_DELAY_MAX_MS: maximum delay in ms (default 2500, custom mode)
"""
import random
mode = os.getenv("HERMES_HUMAN_DELAY_MODE", "off").lower()
if mode == "off":
return 0.0

View file

@ -541,7 +541,6 @@ class DiscordAdapter(BasePlatformAdapter):
# ctypes.util.find_library fails on macOS with Homebrew-installed libs,
# so fall back to known Homebrew paths if needed.
if not opus_path:
import sys
_homebrew_paths = (
"/opt/homebrew/lib/libopus.dylib", # Apple Silicon
"/usr/local/lib/libopus.dylib", # Intel Mac
@ -1422,8 +1421,7 @@ class DiscordAdapter(BasePlatformAdapter):
speaking_user_ids: set = set()
receiver = self._voice_receivers.get(guild_id)
if receiver:
import time as _time
now = _time.monotonic()
now = time.monotonic()
with receiver._lock:
for ssrc, last_t in receiver._last_packet_time.items():
# Consider "speaking" if audio received within last 2 seconds

View file

@ -410,7 +410,6 @@ class MattermostAdapter(BasePlatformAdapter):
logger.warning("Mattermost: blocked unsafe URL (SSRF protection)")
return await self.send(chat_id, f"{caption or ''}\n{url}".strip(), reply_to)
import asyncio
import aiohttp
last_exc = None

View file

@ -1086,11 +1086,8 @@ class QQAdapter(BasePlatformAdapter):
return MessageType.VIDEO
if "image" in first_type or "photo" in first_type:
return MessageType.PHOTO
# Unknown content type with an attachment — don't assume PHOTO
# to prevent non-image files from being sent to vision analysis.
logger.debug(
"[%s] Unknown media content_type '%s', defaulting to TEXT",
self._log_tag,
"Unknown media content_type '%s', defaulting to TEXT",
first_type,
)
return MessageType.TEXT
@ -1826,14 +1823,12 @@ class QQAdapter(BasePlatformAdapter):
body["file_name"] = file_name
# Retry transient upload failures
last_exc = None
for attempt in range(3):
try:
return await self._api_request(
"POST", path, body, timeout=FILE_UPLOAD_TIMEOUT
)
except RuntimeError as exc:
last_exc = exc
err_msg = str(exc)
if any(
kw in err_msg
@ -1842,8 +1837,8 @@ class QQAdapter(BasePlatformAdapter):
raise
if attempt < 2:
await asyncio.sleep(1.5 * (attempt + 1))
raise last_exc # type: ignore[misc]
else:
raise
# Maximum time (seconds) to wait for reconnection before giving up on send.
_RECONNECT_WAIT_SECONDS = 15.0

View file

@ -1600,11 +1600,9 @@ class SlackAdapter(BasePlatformAdapter):
async def _download_slack_file(self, url: str, ext: str, audio: bool = False, team_id: str = "") -> str:
"""Download a Slack file using the bot token for auth, with retry."""
import asyncio
import httpx
bot_token = self._team_clients[team_id].token if team_id and team_id in self._team_clients else self.config.token
last_exc = None
async with httpx.AsyncClient(timeout=30.0, follow_redirects=True) as client:
for attempt in range(3):
@ -1634,7 +1632,6 @@ class SlackAdapter(BasePlatformAdapter):
from gateway.platforms.base import cache_image_from_bytes
return cache_image_from_bytes(response.content, ext)
except (httpx.TimeoutException, httpx.HTTPStatusError) as exc:
last_exc = exc
if isinstance(exc, httpx.HTTPStatusError) and exc.response.status_code < 429:
raise
if attempt < 2:
@ -1643,15 +1640,12 @@ class SlackAdapter(BasePlatformAdapter):
await asyncio.sleep(1.5 * (attempt + 1))
continue
raise
raise last_exc
async def _download_slack_file_bytes(self, url: str, team_id: str = "") -> bytes:
"""Download a Slack file and return raw bytes, with retry."""
import asyncio
import httpx
bot_token = self._team_clients[team_id].token if team_id and team_id in self._team_clients else self.config.token
last_exc = None
async with httpx.AsyncClient(timeout=30.0, follow_redirects=True) as client:
for attempt in range(3):
@ -1663,7 +1657,6 @@ class SlackAdapter(BasePlatformAdapter):
response.raise_for_status()
return response.content
except (httpx.TimeoutException, httpx.HTTPStatusError) as exc:
last_exc = exc
if isinstance(exc, httpx.HTTPStatusError) and exc.response.status_code < 429:
raise
if attempt < 2:
@ -1672,7 +1665,6 @@ class SlackAdapter(BasePlatformAdapter):
await asyncio.sleep(1.5 * (attempt + 1))
continue
raise
raise last_exc
# ── Channel mention gating ─────────────────────────────────────────────

View file

@ -1713,7 +1713,6 @@ class TelegramAdapter(BasePlatformAdapter):
return SendResult(success=False, error="Not connected")
try:
import os
if not os.path.exists(audio_path):
return SendResult(success=False, error=self._missing_media_path_error("Audio", audio_path))
@ -1762,7 +1761,6 @@ class TelegramAdapter(BasePlatformAdapter):
return SendResult(success=False, error="Not connected")
try:
import os
if not os.path.exists(image_path):
return SendResult(success=False, error=self._missing_media_path_error("Image", image_path))
@ -2823,13 +2821,11 @@ class TelegramAdapter(BasePlatformAdapter):
logger.info("[Telegram] Analyzing sticker at %s", cached_path)
from tools.vision_tools import vision_analyze_tool
import json as _json
result_json = await vision_analyze_tool(
image_url=cached_path,
user_prompt=STICKER_VISION_PROMPT,
)
result = _json.loads(result_json)
result = json.loads(result_json)
if result.get("success"):
description = result.get("analysis", "a sticker")

View file

@ -624,13 +624,16 @@ class WeComAdapter(BasePlatformAdapter):
msgtype = str(body.get("msgtype") or "").lower()
if msgtype == "mixed":
mixed = body.get("mixed") if isinstance(body.get("mixed"), dict) else {}
items = mixed.get("msg_item") if isinstance(mixed.get("msg_item"), list) else []
_raw_mixed = body.get("mixed")
mixed = _raw_mixed if isinstance(_raw_mixed, dict) else {}
_raw_items = mixed.get("msg_item")
items = _raw_items if isinstance(_raw_items, list) else []
for item in items:
if not isinstance(item, dict):
continue
if str(item.get("msgtype") or "").lower() == "text":
text_block = item.get("text") if isinstance(item.get("text"), dict) else {}
_raw_text = item.get("text")
text_block = _raw_text if isinstance(_raw_text, dict) else {}
content = str(text_block.get("content") or "").strip()
if content:
text_parts.append(content)
@ -672,8 +675,10 @@ class WeComAdapter(BasePlatformAdapter):
msgtype = str(body.get("msgtype") or "").lower()
if msgtype == "mixed":
mixed = body.get("mixed") if isinstance(body.get("mixed"), dict) else {}
items = mixed.get("msg_item") if isinstance(mixed.get("msg_item"), list) else []
_raw_mixed = body.get("mixed")
mixed = _raw_mixed if isinstance(_raw_mixed, dict) else {}
_raw_items = mixed.get("msg_item")
items = _raw_items if isinstance(_raw_items, list) else []
for item in items:
if not isinstance(item, dict):
continue

View file

@ -1266,7 +1266,6 @@ class GatewayRunner:
the prefill_messages_file key in ~/.hermes/config.yaml.
Relative paths are resolved from ~/.hermes/.
"""
import json as _json
file_path = os.getenv("HERMES_PREFILL_MESSAGES_FILE", "")
if not file_path:
try:
@ -1288,7 +1287,7 @@ class GatewayRunner:
return []
try:
with open(path, "r", encoding="utf-8") as f:
data = _json.load(f)
data = json.load(f)
if not isinstance(data, list):
logger.warning("Prefill messages file must contain a JSON array: %s", path)
return []
@ -3675,9 +3674,8 @@ class GatewayRunner:
plugin_handler = get_plugin_command_handler(command.replace("_", "-"))
if plugin_handler:
user_args = event.get_command_args().strip()
import asyncio as _aio
result = plugin_handler(user_args)
if _aio.iscoroutine(result):
if asyncio.iscoroutine(result):
result = await result
return str(result) if result else None
except Exception as e:
@ -3871,13 +3869,10 @@ class GatewayRunner:
if not mtype.startswith(("application/", "text/")):
continue
import os as _os
import re as _re
basename = _os.path.basename(path)
basename = os.path.basename(path)
parts = basename.split("_", 2)
display_name = parts[2] if len(parts) >= 3 else basename
display_name = _re.sub(r'[^\w.\- ]', '_', display_name)
display_name = re.sub(r'[^\w.\- ]', '_', display_name)
if mtype.startswith("text/"):
context_note = (
@ -5175,7 +5170,6 @@ class GatewayRunner:
# Save the requester's routing info so the new gateway process can
# notify them once it comes back online.
try:
import json as _json
notify_data = {
"platform": event.source.platform.value if event.source.platform else None,
"chat_id": event.source.chat_id,
@ -5183,7 +5177,7 @@ class GatewayRunner:
if event.source.thread_id:
notify_data["thread_id"] = event.source.thread_id
(_hermes_home / ".restart_notify.json").write_text(
_json.dumps(notify_data)
json.dumps(notify_data)
)
except Exception as e:
logger.debug("Failed to write restart notify file: %s", e)
@ -5194,16 +5188,14 @@ class GatewayRunner:
# marker persists so the new gateway can still detect a delayed
# /restart redelivery from Telegram. Overwritten on every /restart.
try:
import json as _json
import time as _time
dedup_data = {
"platform": event.source.platform.value if event.source.platform else None,
"requested_at": _time.time(),
"requested_at": time.time(),
}
if event.platform_update_id is not None:
dedup_data["update_id"] = event.platform_update_id
(_hermes_home / ".restart_last_processed.json").write_text(
_json.dumps(dedup_data)
json.dumps(dedup_data)
)
except Exception as e:
logger.debug("Failed to write restart dedup marker: %s", e)
@ -5251,12 +5243,10 @@ class GatewayRunner:
return False
try:
import json as _json
import time as _time
marker_path = _hermes_home / ".restart_last_processed.json"
if not marker_path.exists():
return False
data = _json.loads(marker_path.read_text())
data = json.loads(marker_path.read_text())
except Exception:
return False
@ -5270,7 +5260,7 @@ class GatewayRunner:
# swallow a fresh /restart from the user.
requested_at = data.get("requested_at")
if isinstance(requested_at, (int, float)):
if _time.time() - requested_at > 300:
if time.time() - requested_at > 300:
return False
return event.platform_update_id <= recorded_uid
@ -7352,13 +7342,10 @@ class GatewayRunner:
async def _handle_insights_command(self, event: MessageEvent) -> str:
"""Handle /insights command -- show usage insights and analytics."""
import asyncio as _asyncio
args = event.get_command_args().strip()
# Normalize Unicode dashes (Telegram/iOS auto-converts -- to em/en dash)
import re as _re
args = _re.sub(r'[\u2012\u2013\u2014\u2015](days|source)', r'--\1', args)
args = re.sub(r'[\u2012\u2013\u2014\u2015](days|source)', r'--\1', args)
days = 30
source = None
@ -7387,7 +7374,7 @@ class GatewayRunner:
from hermes_state import SessionDB
from agent.insights import InsightsEngine
loop = _asyncio.get_running_loop()
loop = asyncio.get_running_loop()
def _run_insights():
db = SessionDB()
@ -7745,9 +7732,6 @@ class GatewayRunner:
the messenger. The user's next message is intercepted by
``_handle_message`` and written to ``.update_response``.
"""
import json
import re as _re
pending_path = _hermes_home / ".update_pending.json"
claimed_path = _hermes_home / ".update_pending.claimed.json"
output_path = _hermes_home / ".update_output.txt"
@ -7792,7 +7776,7 @@ class GatewayRunner:
return
def _strip_ansi(text: str) -> str:
return _re.sub(r'\x1b\[[0-9;]*[A-Za-z]', '', text)
return re.sub(r'\x1b\[[0-9;]*[A-Za-z]', '', text)
bytes_sent = 0
last_stream_time = loop.time()
@ -7940,9 +7924,6 @@ class GatewayRunner:
cannot resolve the adapter (e.g. after a gateway restart where the
platform hasn't reconnected yet).
"""
import json
import re as _re
pending_path = _hermes_home / ".update_pending.json"
claimed_path = _hermes_home / ".update_pending.claimed.json"
output_path = _hermes_home / ".update_output.txt"
@ -7988,7 +7969,7 @@ class GatewayRunner:
if adapter and chat_id:
# Strip ANSI escape codes for clean display
output = _re.sub(r'\x1b\[[0-9;]*m', '', output).strip()
output = re.sub(r'\x1b\[[0-9;]*m', '', output).strip()
if output:
if len(output) > 3500:
output = "" + output[-3500:]
@ -8021,14 +8002,12 @@ class GatewayRunner:
async def _send_restart_notification(self) -> None:
"""Notify the chat that initiated /restart that the gateway is back."""
import json as _json
notify_path = _hermes_home / ".restart_notify.json"
if not notify_path.exists():
return
try:
data = _json.loads(notify_path.read_text())
data = json.loads(notify_path.read_text())
platform_str = data.get("platform")
chat_id = data.get("chat_id")
thread_id = data.get("thread_id")
@ -8114,7 +8093,6 @@ class GatewayRunner:
The enriched message string with vision descriptions prepended.
"""
from tools.vision_tools import vision_analyze_tool
import json as _json
analysis_prompt = (
"Describe everything visible in this image in thorough detail. "
@ -8130,7 +8108,7 @@ class GatewayRunner:
image_url=path,
user_prompt=analysis_prompt,
)
result = _json.loads(result_json)
result = json.loads(result_json)
if result.get("success"):
description = result.get("analysis", "")
enriched_parts.append(
@ -8189,7 +8167,6 @@ class GatewayRunner:
return disabled_note
from tools.transcription_tools import transcribe_audio
import asyncio
enriched_parts = []
for path in audio_paths:
@ -9236,8 +9213,7 @@ class GatewayRunner:
if args:
from agent.display import get_tool_preview_max_len
_pl = get_tool_preview_max_len()
import json as _json
args_str = _json.dumps(args, ensure_ascii=False, default=str)
args_str = json.dumps(args, ensure_ascii=False, default=str)
# When tool_preview_length is 0 (default), don't truncate
# in verbose mode — the user explicitly asked for full
# detail. Platform message-length limits handle the rest.
@ -10752,7 +10728,6 @@ async def start_gateway(config: Optional[GatewayConfig] = None, replace: bool =
# The PID file is scoped to HERMES_HOME, so future multi-profile
# setups (each profile using a distinct HERMES_HOME) will naturally
# allow concurrent instances without tripping this guard.
import time as _time
from gateway.status import get_running_pid, remove_pid_file, terminate_pid
existing_pid = get_running_pid()
if existing_pid is not None and existing_pid != os.getpid():
@ -10792,7 +10767,7 @@ async def start_gateway(config: Optional[GatewayConfig] = None, replace: bool =
for _ in range(20):
try:
os.kill(existing_pid, 0)
_time.sleep(0.5)
time.sleep(0.5)
except (ProcessLookupError, PermissionError):
break # Process is gone
else:
@ -10803,7 +10778,7 @@ async def start_gateway(config: Optional[GatewayConfig] = None, replace: bool =
)
try:
terminate_pid(existing_pid, force=True)
_time.sleep(0.5)
time.sleep(0.5)
except (ProcessLookupError, PermissionError, OSError):
pass
remove_pid_file()

View file

@ -2249,7 +2249,6 @@ def print_config_warnings(config: Optional[Dict[str, Any]] = None) -> None:
if not issues:
return
import sys
lines = ["\033[33m⚠ Config issues detected in config.yaml:\033[0m"]
for ci in issues:
marker = "\033[31m✗\033[0m" if ci.severity == "error" else "\033[33m⚠\033[0m"
@ -2264,7 +2263,6 @@ def warn_deprecated_cwd_env_vars(config: Optional[Dict[str, Any]] = None) -> Non
These env vars are deprecated the canonical setting is terminal.cwd
in config.yaml. Prints a migration hint to stderr.
"""
import os, sys
messaging_cwd = os.environ.get("MESSAGING_CWD")
terminal_cwd_env = os.environ.get("TERMINAL_CWD")
@ -3273,7 +3271,6 @@ def _check_non_ascii_credential(key: str, value: str) -> str:
bad_chars.append(f" position {i}: {ch!r} (U+{ord(ch):04X})")
sanitized = value.encode("ascii", errors="ignore").decode("ascii")
import sys
print(
f"\n Warning: {key} contains non-ASCII characters that will break API requests.\n"
f" This usually happens when copy-pasting from a PDF, rich-text editor,\n"

View file

@ -994,8 +994,6 @@ def get_systemd_linger_status() -> tuple[bool | None, str]:
if not is_linux():
return None, "not supported on this platform"
import shutil
if not shutil.which("loginctl"):
return None, "loginctl not found"
@ -1347,7 +1345,6 @@ def _ensure_linger_enabled() -> None:
return
import getpass
import shutil
username = getpass.getuser()
linger_file = Path(f"/var/lib/systemd/linger/{username}")
@ -1656,7 +1653,6 @@ def get_launchd_label() -> str:
def _launchd_domain() -> str:
import os
return f"gui/{os.getuid()}"

View file

@ -618,7 +618,6 @@ def _exec_in_container(container_info: dict, cli_args: list):
container_info: dict with backend, container_name, exec_user, hermes_bin
cli_args: the original CLI arguments (everything after 'hermes')
"""
import shutil
backend = container_info["backend"]
container_name = container_info["container_name"]
@ -1181,8 +1180,6 @@ def cmd_gateway(args):
def cmd_whatsapp(args):
"""Set up WhatsApp: choose mode, configure, install bridge, pair via QR."""
_require_tty("whatsapp")
import subprocess
from pathlib import Path
from hermes_cli.config import get_env_value, save_env_value
print()
@ -1425,8 +1422,6 @@ def select_provider_and_model(args=None):
# Read effective provider the same way the CLI does at startup:
# config.yaml model.provider > env var > auto-detect
import os
config_provider = None
model_cfg = config.get("model")
if isinstance(model_cfg, dict):
@ -2132,7 +2127,6 @@ def _model_flow_nous(config, current_model="", args=None):
save_env_value,
)
from hermes_cli.nous_subscription import prompt_enable_tool_gateway
import argparse
state = get_provider_auth_state("nous")
if not state or not state.get("access_token"):
@ -2300,7 +2294,6 @@ def _model_flow_openai_codex(config, current_model=""):
DEFAULT_CODEX_BASE_URL,
)
from hermes_cli.codex_models import get_codex_model_ids
import argparse
status = get_codex_auth_status()
if not status.get("logged_in"):
@ -4287,9 +4280,7 @@ def _clear_bytecode_cache(root: Path) -> int:
]
if os.path.basename(dirpath) == "__pycache__":
try:
import shutil as _shutil
_shutil.rmtree(dirpath)
shutil.rmtree(dirpath)
removed += 1
except OSError:
pass
@ -4361,7 +4352,6 @@ def _build_web_ui(web_dir: Path, *, fatal: bool = False) -> bool:
"""
if not (web_dir / "package.json").exists():
return True
import shutil
npm = shutil.which("npm")
if not npm:
@ -4398,7 +4388,6 @@ def _update_via_zip(args):
Used on Windows when git file I/O is broken (antivirus, NTFS filter
drivers causing 'Invalid argument' errors on file creation).
"""
import shutil
import tempfile
import zipfile
from urllib.request import urlretrieve
@ -4475,7 +4464,6 @@ def _update_via_zip(args):
# breaks on this machine, keep base deps and reinstall the remaining extras
# individually so update does not silently strip working capabilities.
print("→ Updating Python dependencies...")
import subprocess
uv_bin = shutil.which("uv")
if uv_bin:
@ -8078,7 +8066,6 @@ Examples:
return
line = _json.dumps(data, ensure_ascii=False) + "\n"
if args.output == "-":
import sys
sys.stdout.write(line)
else:
@ -8088,7 +8075,6 @@ Examples:
else:
sessions = db.export_all(source=args.source)
if args.output == "-":
import sys
for s in sessions:
sys.stdout.write(_json.dumps(s, ensure_ascii=False) + "\n")

View file

@ -515,8 +515,6 @@ def check_nous_free_tier() -> bool:
Returns False (assume paid) on any error never blocks paying users.
"""
global _free_tier_cache
import time
now = time.monotonic()
if _free_tier_cache is not None:
cached_result, cached_at = _free_tier_cache
@ -1259,7 +1257,6 @@ def detect_provider_for_model(
from hermes_cli.auth import PROVIDER_REGISTRY
pconfig = PROVIDER_REGISTRY.get(direct_match)
if pconfig:
import os
for env_var in pconfig.api_key_env_vars:
if os.getenv(env_var, "").strip():
has_creds = True

View file

@ -849,7 +849,6 @@ def setup_model_provider(config: dict, *, quick: bool = False):
def _check_espeak_ng() -> bool:
"""Check if espeak-ng is installed."""
import shutil
return shutil.which("espeak-ng") is not None or shutil.which("espeak") is not None
@ -1084,8 +1083,6 @@ def setup_tts(config: dict):
def setup_terminal_backend(config: dict):
"""Configure the terminal execution backend."""
import platform as _platform
import shutil
print_header("Terminal Backend")
print_info("Choose where Hermes runs shell commands and code.")
print_info("This affects tool execution, file access, and isolation.")

View file

@ -2324,12 +2324,10 @@ def start_server(
)
if open_browser:
import threading
import webbrowser
def _open():
import time as _t
_t.sleep(1.0)
time.sleep(1.0)
webbrowser.open(f"http://{host}:{port}")
threading.Thread(target=_open, daemon=True).start()

View file

@ -1088,8 +1088,7 @@ class AIAgent:
_is_bedrock_anthropic = self.provider == "bedrock"
if _is_bedrock_anthropic:
from agent.anthropic_adapter import build_anthropic_bedrock_client
import re as _re
_region_match = _re.search(r"bedrock-runtime\.([a-z0-9-]+)\.", base_url or "")
_region_match = re.search(r"bedrock-runtime\.([a-z0-9-]+)\.", base_url or "")
_br_region = _region_match.group(1) if _region_match else "us-east-1"
self._bedrock_region = _br_region
self._anthropic_client = build_anthropic_bedrock_client(_br_region)
@ -1130,8 +1129,7 @@ class AIAgent:
elif self.api_mode == "bedrock_converse":
# AWS Bedrock — uses boto3 directly, no OpenAI client needed.
# Region is extracted from the base_url or defaults to us-east-1.
import re as _re
_region_match = _re.search(r"bedrock-runtime\.([a-z0-9-]+)\.", base_url or "")
_region_match = re.search(r"bedrock-runtime\.([a-z0-9-]+)\.", base_url or "")
self._bedrock_region = _region_match.group(1) if _region_match else "us-east-1"
# Guardrail config — read from config.yaml at init time.
self._bedrock_guardrail_config = None
@ -1576,7 +1574,6 @@ class AIAgent:
"Falling back to auto-detection.",
_config_context_length,
)
import sys
print(
f"\n⚠ Invalid model.context_length in config.yaml: {_config_context_length!r}\n"
f" Must be a plain integer (e.g. 256000, not '256K').\n"
@ -1618,7 +1615,6 @@ class AIAgent:
"Falling back to auto-detection.",
self.model, _cp_ctx,
)
import sys
print(
f"\n⚠ Invalid context_length for model {self.model!r} in custom_providers: {_cp_ctx!r}\n"
f" Must be a plain integer (e.g. 256000, not '256K').\n"
@ -1881,8 +1877,6 @@ class AIAgent:
change persists across turns (unlike fallback which is
turn-scoped).
"""
import logging
import re as _re
from hermes_cli.providers import determine_api_mode
# ── Determine api_mode if not provided ──
@ -1900,7 +1894,7 @@ class AIAgent:
and isinstance(base_url, str)
and base_url
):
base_url = _re.sub(r"/v1/?$", "", base_url)
base_url = re.sub(r"/v1/?$", "", base_url)
old_model = self.model
old_provider = self.provider
@ -2916,7 +2910,7 @@ class AIAgent:
role = msg.get("role", "unknown")
content = msg.get("content")
tool_calls_data = None
if hasattr(msg, "tool_calls") and msg.tool_calls:
if hasattr(msg, "tool_calls") and isinstance(msg.tool_calls, list) and msg.tool_calls:
tool_calls_data = [
{"name": tc.function.name, "arguments": tc.function.arguments}
for tc in msg.tool_calls
@ -3182,15 +3176,14 @@ class AIAgent:
<title> tag instead of dumping raw HTML. Falls back to a truncated
str(error) for everything else.
"""
import re as _re
raw = str(error)
# Cloudflare / proxy HTML pages: grab the <title> for a clean summary
if "<!DOCTYPE" in raw or "<html" in raw:
m = _re.search(r"<title[^>]*>([^<]+)</title>", raw, _re.IGNORECASE)
m = re.search(r"<title[^>]*>([^<]+)</title>", raw, re.IGNORECASE)
title = m.group(1).strip() if m else "HTML error page (title not found)"
# Also grab Cloudflare Ray ID if present
ray = _re.search(r"Cloudflare Ray ID:\s*<strong[^>]*>([^<]+)</strong>", raw)
ray = re.search(r"Cloudflare Ray ID:\s*<strong[^>]*>([^<]+)</strong>", raw)
ray_id = ray.group(1).strip() if ray else None
status_code = getattr(error, "status_code", None)
parts = []

View file

@ -20,6 +20,8 @@ from aiohttp.test_utils import TestClient, TestServer
from gateway.config import PlatformConfig
from gateway.platforms.api_server import APIServerAdapter, cors_middleware
_MOD = "gateway.platforms.api_server"
# ---------------------------------------------------------------------------
# Helpers
@ -83,10 +85,10 @@ class TestListJobs:
"""GET /api/jobs returns job list."""
app = _create_app(adapter)
async with TestClient(TestServer(app)) as cli:
with patch.object(
APIServerAdapter, "_CRON_AVAILABLE", True
), patch.object(
APIServerAdapter, "_cron_list", return_value=[SAMPLE_JOB]
with patch(
f"{_MOD}._CRON_AVAILABLE", True
), patch(
f"{_MOD}._cron_list", return_value=[SAMPLE_JOB]
):
resp = await cli.get("/api/jobs")
assert resp.status == 200
@ -104,10 +106,10 @@ class TestListJobs:
app = _create_app(adapter)
mock_list = MagicMock(return_value=[SAMPLE_JOB])
async with TestClient(TestServer(app)) as cli:
with patch.object(
APIServerAdapter, "_CRON_AVAILABLE", True
), patch.object(
APIServerAdapter, "_cron_list", mock_list
with patch(
f"{_MOD}._CRON_AVAILABLE", True
), patch(
f"{_MOD}._cron_list", mock_list
):
resp = await cli.get("/api/jobs?include_disabled=true")
assert resp.status == 200
@ -119,10 +121,10 @@ class TestListJobs:
app = _create_app(adapter)
mock_list = MagicMock(return_value=[])
async with TestClient(TestServer(app)) as cli:
with patch.object(
APIServerAdapter, "_CRON_AVAILABLE", True
), patch.object(
APIServerAdapter, "_cron_list", mock_list
with patch(
f"{_MOD}._CRON_AVAILABLE", True
), patch(
f"{_MOD}._cron_list", mock_list
):
resp = await cli.get("/api/jobs")
assert resp.status == 200
@ -140,10 +142,10 @@ class TestCreateJob:
app = _create_app(adapter)
mock_create = MagicMock(return_value=SAMPLE_JOB)
async with TestClient(TestServer(app)) as cli:
with patch.object(
APIServerAdapter, "_CRON_AVAILABLE", True
), patch.object(
APIServerAdapter, "_cron_create", mock_create
with patch(
f"{_MOD}._CRON_AVAILABLE", True
), patch(
f"{_MOD}._cron_create", mock_create
):
resp = await cli.post("/api/jobs", json={
"name": "test-job",
@ -164,7 +166,7 @@ class TestCreateJob:
"""POST /api/jobs without name returns 400."""
app = _create_app(adapter)
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", True):
with patch(f"{_MOD}._CRON_AVAILABLE", True):
resp = await cli.post("/api/jobs", json={
"schedule": "*/5 * * * *",
"prompt": "do something",
@ -178,7 +180,7 @@ class TestCreateJob:
"""POST /api/jobs with name > 200 chars returns 400."""
app = _create_app(adapter)
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", True):
with patch(f"{_MOD}._CRON_AVAILABLE", True):
resp = await cli.post("/api/jobs", json={
"name": "x" * 201,
"schedule": "*/5 * * * *",
@ -192,7 +194,7 @@ class TestCreateJob:
"""POST /api/jobs with prompt > 5000 chars returns 400."""
app = _create_app(adapter)
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", True):
with patch(f"{_MOD}._CRON_AVAILABLE", True):
resp = await cli.post("/api/jobs", json={
"name": "test-job",
"schedule": "*/5 * * * *",
@ -207,7 +209,7 @@ class TestCreateJob:
"""POST /api/jobs with repeat=0 returns 400."""
app = _create_app(adapter)
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", True):
with patch(f"{_MOD}._CRON_AVAILABLE", True):
resp = await cli.post("/api/jobs", json={
"name": "test-job",
"schedule": "*/5 * * * *",
@ -222,7 +224,7 @@ class TestCreateJob:
"""POST /api/jobs without schedule returns 400."""
app = _create_app(adapter)
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", True):
with patch(f"{_MOD}._CRON_AVAILABLE", True):
resp = await cli.post("/api/jobs", json={
"name": "test-job",
})
@ -242,10 +244,10 @@ class TestGetJob:
app = _create_app(adapter)
mock_get = MagicMock(return_value=SAMPLE_JOB)
async with TestClient(TestServer(app)) as cli:
with patch.object(
APIServerAdapter, "_CRON_AVAILABLE", True
), patch.object(
APIServerAdapter, "_cron_get", mock_get
with patch(
f"{_MOD}._CRON_AVAILABLE", True
), patch(
f"{_MOD}._cron_get", mock_get
):
resp = await cli.get(f"/api/jobs/{VALID_JOB_ID}")
assert resp.status == 200
@ -259,10 +261,10 @@ class TestGetJob:
app = _create_app(adapter)
mock_get = MagicMock(return_value=None)
async with TestClient(TestServer(app)) as cli:
with patch.object(
APIServerAdapter, "_CRON_AVAILABLE", True
), patch.object(
APIServerAdapter, "_cron_get", mock_get
with patch(
f"{_MOD}._CRON_AVAILABLE", True
), patch(
f"{_MOD}._cron_get", mock_get
):
resp = await cli.get(f"/api/jobs/{VALID_JOB_ID}")
assert resp.status == 404
@ -272,7 +274,7 @@ class TestGetJob:
"""GET /api/jobs/{id} with non-hex id returns 400."""
app = _create_app(adapter)
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", True):
with patch(f"{_MOD}._CRON_AVAILABLE", True):
resp = await cli.get("/api/jobs/not-a-valid-hex!")
assert resp.status == 400
data = await resp.json()
@ -291,10 +293,10 @@ class TestUpdateJob:
updated_job = {**SAMPLE_JOB, "name": "updated-name"}
mock_update = MagicMock(return_value=updated_job)
async with TestClient(TestServer(app)) as cli:
with patch.object(
APIServerAdapter, "_CRON_AVAILABLE", True
), patch.object(
APIServerAdapter, "_cron_update", mock_update
with patch(
f"{_MOD}._CRON_AVAILABLE", True
), patch(
f"{_MOD}._cron_update", mock_update
):
resp = await cli.patch(
f"/api/jobs/{VALID_JOB_ID}",
@ -317,10 +319,10 @@ class TestUpdateJob:
updated_job = {**SAMPLE_JOB, "name": "new-name"}
mock_update = MagicMock(return_value=updated_job)
async with TestClient(TestServer(app)) as cli:
with patch.object(
APIServerAdapter, "_CRON_AVAILABLE", True
), patch.object(
APIServerAdapter, "_cron_update", mock_update
with patch(
f"{_MOD}._CRON_AVAILABLE", True
), patch(
f"{_MOD}._cron_update", mock_update
):
resp = await cli.patch(
f"/api/jobs/{VALID_JOB_ID}",
@ -342,7 +344,7 @@ class TestUpdateJob:
"""PATCH /api/jobs/{id} with only unknown fields returns 400."""
app = _create_app(adapter)
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", True):
with patch(f"{_MOD}._CRON_AVAILABLE", True):
resp = await cli.patch(
f"/api/jobs/{VALID_JOB_ID}",
json={"evil_field": "malicious"},
@ -363,10 +365,10 @@ class TestDeleteJob:
app = _create_app(adapter)
mock_remove = MagicMock(return_value=True)
async with TestClient(TestServer(app)) as cli:
with patch.object(
APIServerAdapter, "_CRON_AVAILABLE", True
), patch.object(
APIServerAdapter, "_cron_remove", mock_remove
with patch(
f"{_MOD}._CRON_AVAILABLE", True
), patch(
f"{_MOD}._cron_remove", mock_remove
):
resp = await cli.delete(f"/api/jobs/{VALID_JOB_ID}")
assert resp.status == 200
@ -380,10 +382,10 @@ class TestDeleteJob:
app = _create_app(adapter)
mock_remove = MagicMock(return_value=False)
async with TestClient(TestServer(app)) as cli:
with patch.object(
APIServerAdapter, "_CRON_AVAILABLE", True
), patch.object(
APIServerAdapter, "_cron_remove", mock_remove
with patch(
f"{_MOD}._CRON_AVAILABLE", True
), patch(
f"{_MOD}._cron_remove", mock_remove
):
resp = await cli.delete(f"/api/jobs/{VALID_JOB_ID}")
assert resp.status == 404
@ -401,10 +403,10 @@ class TestPauseJob:
paused_job = {**SAMPLE_JOB, "enabled": False}
mock_pause = MagicMock(return_value=paused_job)
async with TestClient(TestServer(app)) as cli:
with patch.object(
APIServerAdapter, "_CRON_AVAILABLE", True
), patch.object(
APIServerAdapter, "_cron_pause", mock_pause
with patch(
f"{_MOD}._CRON_AVAILABLE", True
), patch(
f"{_MOD}._cron_pause", mock_pause
):
resp = await cli.post(f"/api/jobs/{VALID_JOB_ID}/pause")
assert resp.status == 200
@ -426,10 +428,10 @@ class TestResumeJob:
resumed_job = {**SAMPLE_JOB, "enabled": True}
mock_resume = MagicMock(return_value=resumed_job)
async with TestClient(TestServer(app)) as cli:
with patch.object(
APIServerAdapter, "_CRON_AVAILABLE", True
), patch.object(
APIServerAdapter, "_cron_resume", mock_resume
with patch(
f"{_MOD}._CRON_AVAILABLE", True
), patch(
f"{_MOD}._cron_resume", mock_resume
):
resp = await cli.post(f"/api/jobs/{VALID_JOB_ID}/resume")
assert resp.status == 200
@ -451,10 +453,10 @@ class TestRunJob:
triggered_job = {**SAMPLE_JOB, "last_run": "2025-01-01T00:00:00Z"}
mock_trigger = MagicMock(return_value=triggered_job)
async with TestClient(TestServer(app)) as cli:
with patch.object(
APIServerAdapter, "_CRON_AVAILABLE", True
), patch.object(
APIServerAdapter, "_cron_trigger", mock_trigger
with patch(
f"{_MOD}._CRON_AVAILABLE", True
), patch(
f"{_MOD}._cron_trigger", mock_trigger
):
resp = await cli.post(f"/api/jobs/{VALID_JOB_ID}/run")
assert resp.status == 200
@ -473,7 +475,7 @@ class TestAuthRequired:
"""GET /api/jobs without API key returns 401 when key is set."""
app = _create_app(auth_adapter)
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", True):
with patch(f"{_MOD}._CRON_AVAILABLE", True):
resp = await cli.get("/api/jobs")
assert resp.status == 401
@ -482,7 +484,7 @@ class TestAuthRequired:
"""POST /api/jobs without API key returns 401 when key is set."""
app = _create_app(auth_adapter)
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", True):
with patch(f"{_MOD}._CRON_AVAILABLE", True):
resp = await cli.post("/api/jobs", json={
"name": "test", "schedule": "* * * * *",
})
@ -493,7 +495,7 @@ class TestAuthRequired:
"""GET /api/jobs/{id} without API key returns 401 when key is set."""
app = _create_app(auth_adapter)
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", True):
with patch(f"{_MOD}._CRON_AVAILABLE", True):
resp = await cli.get(f"/api/jobs/{VALID_JOB_ID}")
assert resp.status == 401
@ -502,7 +504,7 @@ class TestAuthRequired:
"""DELETE /api/jobs/{id} without API key returns 401."""
app = _create_app(auth_adapter)
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", True):
with patch(f"{_MOD}._CRON_AVAILABLE", True):
resp = await cli.delete(f"/api/jobs/{VALID_JOB_ID}")
assert resp.status == 401
@ -512,10 +514,10 @@ class TestAuthRequired:
app = _create_app(auth_adapter)
mock_list = MagicMock(return_value=[])
async with TestClient(TestServer(app)) as cli:
with patch.object(
APIServerAdapter, "_CRON_AVAILABLE", True
), patch.object(
APIServerAdapter, "_cron_list", mock_list
with patch(
f"{_MOD}._CRON_AVAILABLE", True
), patch(
f"{_MOD}._cron_list", mock_list
):
resp = await cli.get(
"/api/jobs",
@ -534,7 +536,7 @@ class TestCronUnavailable:
"""GET /api/jobs returns 501 when _CRON_AVAILABLE is False."""
app = _create_app(adapter)
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", False):
with patch(f"{_MOD}._CRON_AVAILABLE", False):
resp = await cli.get("/api/jobs")
assert resp.status == 501
data = await resp.json()
@ -551,8 +553,8 @@ class TestCronUnavailable:
return SAMPLE_JOB
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", True), patch.object(
APIServerAdapter, "_cron_pause", staticmethod(_plain_pause)
with patch(f"{_MOD}._CRON_AVAILABLE", True), patch(
f"{_MOD}._cron_pause", _plain_pause
):
resp = await cli.post(f"/api/jobs/{VALID_JOB_ID}/pause")
assert resp.status == 200
@ -571,8 +573,8 @@ class TestCronUnavailable:
return [SAMPLE_JOB]
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", True), patch.object(
APIServerAdapter, "_cron_list", staticmethod(_plain_list)
with patch(f"{_MOD}._CRON_AVAILABLE", True), patch(
f"{_MOD}._cron_list", _plain_list
):
resp = await cli.get("/api/jobs?include_disabled=true")
assert resp.status == 200
@ -593,8 +595,8 @@ class TestCronUnavailable:
return updated_job
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", True), patch.object(
APIServerAdapter, "_cron_update", staticmethod(_plain_update)
with patch(f"{_MOD}._CRON_AVAILABLE", True), patch(
f"{_MOD}._cron_update", _plain_update
):
resp = await cli.patch(
f"/api/jobs/{VALID_JOB_ID}",
@ -611,7 +613,7 @@ class TestCronUnavailable:
"""POST /api/jobs returns 501 when _CRON_AVAILABLE is False."""
app = _create_app(adapter)
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", False):
with patch(f"{_MOD}._CRON_AVAILABLE", False):
resp = await cli.post("/api/jobs", json={
"name": "test", "schedule": "* * * * *",
})
@ -622,7 +624,7 @@ class TestCronUnavailable:
"""GET /api/jobs/{id} returns 501 when _CRON_AVAILABLE is False."""
app = _create_app(adapter)
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", False):
with patch(f"{_MOD}._CRON_AVAILABLE", False):
resp = await cli.get(f"/api/jobs/{VALID_JOB_ID}")
assert resp.status == 501
@ -631,7 +633,7 @@ class TestCronUnavailable:
"""DELETE /api/jobs/{id} returns 501 when _CRON_AVAILABLE is False."""
app = _create_app(adapter)
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", False):
with patch(f"{_MOD}._CRON_AVAILABLE", False):
resp = await cli.delete(f"/api/jobs/{VALID_JOB_ID}")
assert resp.status == 501
@ -640,7 +642,7 @@ class TestCronUnavailable:
"""POST /api/jobs/{id}/pause returns 501 when _CRON_AVAILABLE is False."""
app = _create_app(adapter)
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", False):
with patch(f"{_MOD}._CRON_AVAILABLE", False):
resp = await cli.post(f"/api/jobs/{VALID_JOB_ID}/pause")
assert resp.status == 501
@ -649,7 +651,7 @@ class TestCronUnavailable:
"""POST /api/jobs/{id}/resume returns 501 when _CRON_AVAILABLE is False."""
app = _create_app(adapter)
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", False):
with patch(f"{_MOD}._CRON_AVAILABLE", False):
resp = await cli.post(f"/api/jobs/{VALID_JOB_ID}/resume")
assert resp.status == 501
@ -658,6 +660,6 @@ class TestCronUnavailable:
"""POST /api/jobs/{id}/run returns 501 when _CRON_AVAILABLE is False."""
app = _create_app(adapter)
async with TestClient(TestServer(app)) as cli:
with patch.object(APIServerAdapter, "_CRON_AVAILABLE", False):
with patch(f"{_MOD}._CRON_AVAILABLE", False):
resp = await cli.post(f"/api/jobs/{VALID_JOB_ID}/run")
assert resp.status == 501

View file

@ -1911,7 +1911,6 @@ def _maybe_start_recording(task_id: str):
recordings_dir.mkdir(parents=True, exist_ok=True)
_cleanup_old_recordings(max_age_hours=72)
import time
timestamp = time.strftime("%Y%m%d_%H%M%S")
recording_path = recordings_dir / f"session_{timestamp}_{task_id[:16]}.webm"
@ -2027,8 +2026,6 @@ def browser_vision(question: str, annotate: bool = False, task_id: Optional[str]
import base64
import uuid as uuid_mod
from pathlib import Path
effective_task_id = task_id or "default"
# Save screenshot to persistent location so it can be shared with users
@ -2210,7 +2207,6 @@ def _cleanup_old_screenshots(screenshots_dir, max_age_hours=24):
def _cleanup_old_recordings(max_age_hours=72):
"""Remove browser recordings older than max_age_hours to prevent disk bloat."""
import time
try:
hermes_home = get_hermes_home()
recordings_dir = hermes_home / "browser_recordings"

View file

@ -389,7 +389,6 @@ class CheckpointManager:
@staticmethod
def _parse_shortstat(stat_line: str, entry: Dict) -> None:
"""Parse git --shortstat output into entry dict."""
import re
m = re.search(r'(\d+) file', stat_line)
if m:
entry["files_changed"] = int(m.group(1))

View file

@ -1540,7 +1540,6 @@ def _interrupted_call_result() -> str:
def _interpolate_env_vars(value):
"""Recursively resolve ``${VAR}`` placeholders from ``os.environ``."""
if isinstance(value, str):
import re
def _replace(m):
return os.environ.get(m.group(1), m.group(0))
return re.sub(r"\$\{([^}]+)\}", _replace, value)

View file

@ -1167,32 +1167,31 @@ PROCESS_SCHEMA = {
def _handle_process(args, **kw):
import json as _json
task_id = kw.get("task_id")
action = args.get("action", "")
# Coerce to string — some models send session_id as an integer
session_id = str(args.get("session_id", "")) if args.get("session_id") is not None else ""
if action == "list":
return _json.dumps({"processes": process_registry.list_sessions(task_id=task_id)}, ensure_ascii=False)
return json.dumps({"processes": process_registry.list_sessions(task_id=task_id)}, ensure_ascii=False)
elif action in ("poll", "log", "wait", "kill", "write", "submit", "close"):
if not session_id:
return tool_error(f"session_id is required for {action}")
if action == "poll":
return _json.dumps(process_registry.poll(session_id), ensure_ascii=False)
return json.dumps(process_registry.poll(session_id), ensure_ascii=False)
elif action == "log":
return _json.dumps(process_registry.read_log(
return json.dumps(process_registry.read_log(
session_id, offset=args.get("offset", 0), limit=args.get("limit", 200)), ensure_ascii=False)
elif action == "wait":
return _json.dumps(process_registry.wait(session_id, timeout=args.get("timeout")), ensure_ascii=False)
return json.dumps(process_registry.wait(session_id, timeout=args.get("timeout")), ensure_ascii=False)
elif action == "kill":
return _json.dumps(process_registry.kill_process(session_id), ensure_ascii=False)
return json.dumps(process_registry.kill_process(session_id), ensure_ascii=False)
elif action == "write":
return _json.dumps(process_registry.write_stdin(session_id, str(args.get("data", ""))), ensure_ascii=False)
return json.dumps(process_registry.write_stdin(session_id, str(args.get("data", ""))), ensure_ascii=False)
elif action == "submit":
return _json.dumps(process_registry.submit_stdin(session_id, str(args.get("data", ""))), ensure_ascii=False)
return json.dumps(process_registry.submit_stdin(session_id, str(args.get("data", ""))), ensure_ascii=False)
elif action == "close":
return _json.dumps(process_registry.close_stdin(session_id), ensure_ascii=False)
return json.dumps(process_registry.close_stdin(session_id), ensure_ascii=False)
return tool_error(f"Unknown process action: {action}. Use: list, poll, log, wait, kill, write, submit, close")

View file

@ -509,7 +509,6 @@ def _get_disabled_skill_names() -> Set[str]:
def _is_skill_disabled(name: str, platform: str = None) -> bool:
"""Check if a skill is disabled in config."""
import os
try:
from hermes_cli.config import load_config
config = load_config()

View file

@ -217,7 +217,6 @@ def _prompt_for_sudo_password(timeout_seconds: int = 45) -> str:
directly from /dev/tty with echo disabled.
"""
import sys
import time as time_module
# Use the registered callback when available (prompt_toolkit-compatible)
if _sudo_password_callback is not None:
@ -278,7 +277,7 @@ def _prompt_for_sudo_password(timeout_seconds: int = 45) -> str:
try:
os.environ["HERMES_SPINNER_PAUSE"] = "1"
time_module.sleep(0.2)
time.sleep(0.2)
print()
print("" + "" * 58 + "")

105
uv.lock generated
View file

@ -426,7 +426,7 @@ wheels = [
[[package]]
name = "atroposlib"
version = "0.4.0"
source = { git = "https://github.com/NousResearch/atropos.git#c421582b6f7ce8a32f751aab3117d3824ac8f709" }
source = { git = "https://github.com/NousResearch/atropos.git?rev=c20c85256e5a45ad31edf8b7276e9c5ee1995a30#c20c85256e5a45ad31edf8b7276e9c5ee1995a30" }
dependencies = [
{ name = "aiofiles" },
{ name = "aiohttp" },
@ -558,6 +558,34 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458, upload-time = "2024-11-08T17:25:46.184Z" },
]
[[package]]
name = "boto3"
version = "1.42.92"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "botocore" },
{ name = "jmespath" },
{ name = "s3transfer" },
]
sdist = { url = "https://files.pythonhosted.org/packages/e7/3b/84cafa37e85a57618554bd2bc21bd569417097f45f18c23ef488e6c69683/boto3-1.42.92.tar.gz", hash = "sha256:55ec6ef6fc81f46d567a7d1d398d1e5c375d468905d0ccd9e1f767f0c77dbe9b", size = 113207, upload-time = "2026-04-20T19:38:17.293Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8f/8f/350ffd50aaa515429464deb1dc85893a21a64cb41892feb6b22ce87304ad/boto3-1.42.92-py3-none-any.whl", hash = "sha256:c90d9a170faa0585755fa103a3cd9595e1f53443864e902c180f3d8177589125", size = 140555, upload-time = "2026-04-20T19:38:14.323Z" },
]
[[package]]
name = "botocore"
version = "1.42.92"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "jmespath" },
{ name = "python-dateutil" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d5/0a/6785ce224ba4483b3e1282d959e1dd2c2898823336f013464c43cb154036/botocore-1.42.92.tar.gz", hash = "sha256:f1193d3057a2d0267353d7ef4e136be37ea432336d097fcb1951fae566ca3a22", size = 15235239, upload-time = "2026-04-20T19:38:05.085Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/32/b8/41d4d7ba75a4fb4f11362e96371a12695bc6ba0bb7cc680137db0213f97e/botocore-1.42.92-py3-none-any.whl", hash = "sha256:09ddefddbb1565ceef4b44b4b6e61b1ca5f12701d1494ecc85c1133d1b1e81fb", size = 14916275, upload-time = "2026-04-20T19:38:01.684Z" },
]
[[package]]
name = "cachetools"
version = "5.5.2"
@ -1838,7 +1866,7 @@ wheels = [
[[package]]
name = "hermes-agent"
version = "0.9.0"
version = "0.10.0"
source = { editable = "." }
dependencies = [
{ name = "anthropic" },
@ -1871,6 +1899,7 @@ all = [
{ name = "aiosqlite", marker = "sys_platform == 'linux'" },
{ name = "alibabacloud-dingtalk" },
{ name = "asyncpg", marker = "sys_platform == 'linux'" },
{ name = "boto3" },
{ name = "croniter" },
{ name = "daytona" },
{ name = "debugpy" },
@ -1893,12 +1922,16 @@ all = [
{ name = "pytest-xdist" },
{ name = "python-telegram-bot", extra = ["webhooks"] },
{ name = "pywinpty", marker = "sys_platform == 'win32'" },
{ name = "qrcode" },
{ name = "simple-term-menu" },
{ name = "slack-bolt" },
{ name = "slack-sdk" },
{ name = "sounddevice" },
{ name = "uvicorn", extra = ["standard"] },
]
bedrock = [
{ name = "boto3" },
]
cli = [
{ name = "simple-term-menu" },
]
@ -1918,9 +1951,11 @@ dev = [
dingtalk = [
{ name = "alibabacloud-dingtalk" },
{ name = "dingtalk-stream" },
{ name = "qrcode" },
]
feishu = [
{ name = "lark-oapi" },
{ name = "qrcode" },
]
homeassistant = [
{ name = "aiohttp" },
@ -1941,6 +1976,7 @@ messaging = [
{ name = "aiohttp" },
{ name = "discord-py", extra = ["voice"] },
{ name = "python-telegram-bot", extra = ["webhooks"] },
{ name = "qrcode" },
{ name = "slack-bolt" },
{ name = "slack-sdk" },
]
@ -1974,6 +2010,7 @@ termux = [
{ name = "honcho-ai" },
{ name = "mcp" },
{ name = "ptyprocess", marker = "sys_platform != 'win32'" },
{ name = "python-telegram-bot", extra = ["webhooks"] },
{ name = "pywinpty", marker = "sys_platform == 'win32'" },
{ name = "simple-term-menu" },
]
@ -2003,7 +2040,8 @@ requires-dist = [
{ name = "alibabacloud-dingtalk", marker = "extra == 'dingtalk'", specifier = ">=2.0.0" },
{ name = "anthropic", specifier = ">=0.39.0,<1" },
{ name = "asyncpg", marker = "extra == 'matrix'", specifier = ">=0.29" },
{ name = "atroposlib", marker = "extra == 'rl'", git = "https://github.com/NousResearch/atropos.git" },
{ name = "atroposlib", marker = "extra == 'rl'", git = "https://github.com/NousResearch/atropos.git?rev=c20c85256e5a45ad31edf8b7276e9c5ee1995a30" },
{ name = "boto3", marker = "extra == 'bedrock'", specifier = ">=1.35.0,<2" },
{ name = "croniter", marker = "extra == 'cron'", specifier = ">=6.0.0,<7" },
{ name = "daytona", marker = "extra == 'daytona'", specifier = ">=0.148.0,<1" },
{ name = "debugpy", marker = "extra == 'dev'", specifier = ">=1.8.0,<2" },
@ -2020,6 +2058,7 @@ requires-dist = [
{ name = "firecrawl-py", specifier = ">=4.16.0,<5" },
{ name = "hermes-agent", extras = ["acp"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["acp"], marker = "extra == 'termux'" },
{ name = "hermes-agent", extras = ["bedrock"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["cli"], marker = "extra == 'all'" },
{ name = "hermes-agent", extras = ["cli"], marker = "extra == 'termux'" },
{ name = "hermes-agent", extras = ["cron"], marker = "extra == 'all'" },
@ -2066,8 +2105,12 @@ requires-dist = [
{ name = "pytest-xdist", marker = "extra == 'dev'", specifier = ">=3.0,<4" },
{ name = "python-dotenv", specifier = ">=1.2.1,<2" },
{ name = "python-telegram-bot", extras = ["webhooks"], marker = "extra == 'messaging'", specifier = ">=22.6,<23" },
{ name = "python-telegram-bot", extras = ["webhooks"], marker = "extra == 'termux'", specifier = ">=22.6,<23" },
{ name = "pywinpty", marker = "sys_platform == 'win32' and extra == 'pty'", specifier = ">=2.0.0,<3" },
{ name = "pyyaml", specifier = ">=6.0.2,<7" },
{ name = "qrcode", marker = "extra == 'dingtalk'", specifier = ">=7.0,<8" },
{ name = "qrcode", marker = "extra == 'feishu'", specifier = ">=7.0,<8" },
{ name = "qrcode", marker = "extra == 'messaging'", specifier = ">=7.0,<8" },
{ name = "requests", specifier = ">=2.33.0,<3" },
{ name = "rich", specifier = ">=14.3.3,<15" },
{ name = "simple-term-menu", marker = "extra == 'cli'", specifier = ">=1.0,<2" },
@ -2077,13 +2120,13 @@ requires-dist = [
{ name = "slack-sdk", marker = "extra == 'slack'", specifier = ">=3.27.0,<4" },
{ name = "sounddevice", marker = "extra == 'voice'", specifier = ">=0.4.6,<1" },
{ name = "tenacity", specifier = ">=9.1.4,<10" },
{ name = "tinker", marker = "extra == 'rl'", git = "https://github.com/thinking-machines-lab/tinker.git" },
{ name = "tinker", marker = "extra == 'rl'", git = "https://github.com/thinking-machines-lab/tinker.git?rev=30517b667f18a3dfb7ef33fb56cf686d5820ba2b" },
{ name = "uvicorn", extras = ["standard"], marker = "extra == 'rl'", specifier = ">=0.24.0,<1" },
{ name = "uvicorn", extras = ["standard"], marker = "extra == 'web'", specifier = ">=0.24.0,<1" },
{ name = "wandb", marker = "extra == 'rl'", specifier = ">=0.15.0,<1" },
{ name = "yc-bench", marker = "python_full_version >= '3.12' and extra == 'yc-bench'", git = "https://github.com/collinear-ai/yc-bench.git" },
{ name = "yc-bench", marker = "python_full_version >= '3.12' and extra == 'yc-bench'", git = "https://github.com/collinear-ai/yc-bench.git?rev=bfb0c88062450f46341bd9a5298903fc2e952a5c" },
]
provides-extras = ["modal", "daytona", "dev", "messaging", "cron", "slack", "matrix", "cli", "tts-premium", "voice", "pty", "honcho", "mcp", "homeassistant", "sms", "acp", "mistral", "termux", "dingtalk", "feishu", "web", "rl", "yc-bench", "all"]
provides-extras = ["modal", "daytona", "dev", "messaging", "cron", "slack", "matrix", "cli", "tts-premium", "voice", "pty", "honcho", "mcp", "homeassistant", "sms", "acp", "mistral", "bedrock", "termux", "dingtalk", "feishu", "web", "rl", "yc-bench", "all"]
[[package]]
name = "hf-transfer"
@ -2410,6 +2453,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/67/8a/a342b2f0251f3dac4ca17618265d93bf244a2a4d089126e81e4c1056ac50/jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bb00b6d26db67a05fe3e12c76edc75f32077fb51deed13822dc648fa373bc19", size = 343768, upload-time = "2026-02-02T12:37:55.055Z" },
]
[[package]]
name = "jmespath"
version = "1.1.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/d3/59/322338183ecda247fb5d1763a6cbe46eff7222eaeebafd9fa65d4bf5cb11/jmespath-1.1.0.tar.gz", hash = "sha256:472c87d80f36026ae83c6ddd0f1d05d4e510134ed462851fd5f754c8c3cbb88d", size = 27377, upload-time = "2026-01-22T16:35:26.279Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/14/2f/967ba146e6d58cf6a652da73885f52fc68001525b4197effc174321d70b4/jmespath-1.1.0-py3-none-any.whl", hash = "sha256:a5663118de4908c91729bea0acadca56526eb2698e83de10cd116ae0f4e97c64", size = 20419, upload-time = "2026-01-22T16:35:24.919Z" },
]
[[package]]
name = "joblib"
version = "1.5.3"
@ -4109,6 +4161,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" },
]
[[package]]
name = "pypng"
version = "0.20220715.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/93/cd/112f092ec27cca83e0516de0a3368dbd9128c187fb6b52aaaa7cde39c96d/pypng-0.20220715.0.tar.gz", hash = "sha256:739c433ba96f078315de54c0db975aee537cbc3e1d0ae4ed9aab0ca1e427e2c1", size = 128992, upload-time = "2022-07-15T14:11:05.301Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3e/b9/3766cc361d93edb2ce81e2e1f87dd98f314d7d513877a342d31b30741680/pypng-0.20220715.0-py3-none-any.whl", hash = "sha256:4a43e969b8f5aaafb2a415536c1a8ec7e341cd6a3f957fd5b5f32a4cfeed902c", size = 58057, upload-time = "2022-07-15T14:11:03.713Z" },
]
[[package]]
name = "pytest"
version = "9.0.2"
@ -4311,6 +4372,20 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" },
]
[[package]]
name = "qrcode"
version = "7.4.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" },
{ name = "pypng" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/30/35/ad6d4c5a547fe9a5baf85a9edbafff93fc6394b014fab30595877305fa59/qrcode-7.4.2.tar.gz", hash = "sha256:9dd969454827e127dbd93696b20747239e6d540e082937c90f14ac95b30f5845", size = 535974, upload-time = "2023-02-05T22:11:46.548Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/24/79/aaf0c1c7214f2632badb2771d770b1500d3d7cbdf2590ae62e721ec50584/qrcode-7.4.2-py3-none-any.whl", hash = "sha256:581dca7a029bcb2deef5d01068e39093e80ef00b4a61098a2182eac59d01643a", size = 46197, upload-time = "2023-02-05T22:11:43.4Z" },
]
[[package]]
name = "referencing"
version = "0.37.0"
@ -4577,6 +4652,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d1/b7/b95708304cd49b7b6f82fdd039f1748b66ec2b21d6a45180910802f1abf1/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ac37f9f516c51e5753f27dfdef11a88330f04de2d564be3991384b2f3535d02e", size = 562191, upload-time = "2025-11-30T20:24:36.853Z" },
]
[[package]]
name = "s3transfer"
version = "0.16.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "botocore" },
]
sdist = { url = "https://files.pythonhosted.org/packages/05/04/74127fc843314818edfa81b5540e26dd537353b123a4edc563109d8f17dd/s3transfer-0.16.0.tar.gz", hash = "sha256:8e990f13268025792229cd52fa10cb7163744bf56e719e0b9cb925ab79abf920", size = 153827, upload-time = "2025-12-01T02:30:59.114Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fc/51/727abb13f44c1fcf6d145979e1535a35794db0f6e450a0cb46aa24732fe2/s3transfer-0.16.0-py3-none-any.whl", hash = "sha256:18e25d66fed509e3868dc1572b3f427ff947dd2c56f844a5bf09481ad3f3b2fe", size = 86830, upload-time = "2025-12-01T02:30:57.729Z" },
]
[[package]]
name = "safetensors"
version = "0.7.0"
@ -4927,8 +5014,8 @@ wheels = [
[[package]]
name = "tinker"
version = "0.16.1"
source = { git = "https://github.com/thinking-machines-lab/tinker.git#07bd3c2dd3cd4398ac1c26f0ec0deccbf3c1f913" }
version = "0.18.0"
source = { git = "https://github.com/thinking-machines-lab/tinker.git?rev=30517b667f18a3dfb7ef33fb56cf686d5820ba2b#30517b667f18a3dfb7ef33fb56cf686d5820ba2b" }
dependencies = [
{ name = "anyio" },
{ name = "click" },
@ -5653,7 +5740,7 @@ wheels = [
[[package]]
name = "yc-bench"
version = "0.1.0"
source = { git = "https://github.com/collinear-ai/yc-bench.git#0c53c98f01a431db2e391482bc46013045854ab2" }
source = { git = "https://github.com/collinear-ai/yc-bench.git?rev=bfb0c88062450f46341bd9a5298903fc2e952a5c#bfb0c88062450f46341bd9a5298903fc2e952a5c" }
dependencies = [
{ name = "litellm", marker = "python_full_version >= '3.12'" },
{ name = "matplotlib", marker = "python_full_version >= '3.12'" },