refactor: move standalone scripts to scripts/ directory

Move batch_runner, trajectory_compressor, mini_swe_runner, and rl_cli
from the project root into scripts/, update all imports, logger names,
pyproject.toml, and downstream test references.
This commit is contained in:
alt-glitch 2026-04-21 15:23:23 +05:30
parent 224e6d46d9
commit ca2b6a529e
20 changed files with 51 additions and 41 deletions

2
cli.py
View file

@ -7000,7 +7000,7 @@ class HermesCLI:
logging.getLogger(noisy).setLevel(logging.WARNING)
else:
logging.getLogger().setLevel(logging.INFO)
for quiet_logger in ('tools', 'run_agent', 'trajectory_compressor', 'cron', 'hermes_cli'):
for quiet_logger in ('tools', 'run_agent', 'scripts.trajectory_compressor', 'cron', 'hermes_cli'):
logging.getLogger(quiet_logger).setLevel(logging.ERROR)
def _show_insights(self, command: str = "/insights"):

View file

@ -29,7 +29,7 @@ echo "📝 Logging to: $LOG_FILE"
# Point to the example dataset in this directory
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
python batch_runner.py \
python scripts/batch_runner.py \
--dataset_file="$SCRIPT_DIR/example_browser_tasks.jsonl" \
--batch_size=5 \
--run_name="browser_tasks_example" \

View file

@ -4,7 +4,7 @@
# Generates tool-calling trajectories for multi-step web research tasks.
#
# Usage:
# python batch_runner.py \
# python scripts/batch_runner.py \
# --config datagen-config-examples/web_research.yaml \
# --run_name web_research_v1

View file

@ -142,7 +142,7 @@ class _ComponentFilter(logging.Filter):
# Used by _ComponentFilter and exposed for ``hermes logs --component``.
COMPONENT_PREFIXES = {
"gateway": ("gateway",),
"agent": ("agent", "run_agent", "model_tools", "batch_runner"),
"agent": ("agent", "run_agent", "model_tools", "scripts.batch_runner"),
"tools": ("tools",),
"cli": ("hermes_cli", "cli"),
"cron": ("cron",),

View file

@ -120,13 +120,13 @@ hermes-agent = "run_agent:main"
hermes-acp = "acp_adapter.entry:main"
[tool.setuptools]
py-modules = ["run_agent", "model_tools", "toolsets", "batch_runner", "trajectory_compressor", "toolset_distributions", "cli", "hermes_constants", "hermes_state", "hermes_time", "hermes_logging", "rl_cli", "utils"]
py-modules = ["run_agent", "model_tools", "toolsets", "toolset_distributions", "cli", "hermes_constants", "hermes_state", "hermes_time", "hermes_logging", "utils"]
[tool.setuptools.package-data]
hermes_cli = ["web_dist/**/*"]
[tool.setuptools.packages.find]
include = ["agent", "tools", "tools.*", "hermes_cli", "gateway", "gateway.*", "tui_gateway", "tui_gateway.*", "cron", "acp_adapter", "plugins", "plugins.*"]
include = ["agent", "tools", "tools.*", "hermes_cli", "gateway", "gateway.*", "tui_gateway", "tui_gateway.*", "cron", "acp_adapter", "plugins", "plugins.*", "scripts"]
[tool.pytest.ini_options]
testpaths = ["tests"]

View file

@ -1041,7 +1041,7 @@ class AIAgent:
for quiet_logger in [
'tools', # all tools.* (terminal, browser, web, file, etc.)
'run_agent', # agent runner internals
'trajectory_compressor',
'scripts.trajectory_compressor',
'cron', # scheduler (only relevant in daemon mode)
'hermes_cli', # CLI helpers
]:

0
scripts/__init__.py Normal file
View file

View file

@ -20,9 +20,13 @@ Usage:
python batch_runner.py --dataset_file=data.jsonl --batch_size=10 --run_name=my_run --distribution=image_gen
"""
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import json
import logging
import os
import time
from pathlib import Path
from typing import List, Dict, Any, Optional, Tuple

View file

@ -26,10 +26,13 @@ Usage:
python mini_swe_runner.py --prompts_file prompts.jsonl --output_file trajectories.jsonl --env docker
"""
import json
import logging
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import json
import logging
import time
import uuid
from datetime import datetime

View file

@ -19,9 +19,12 @@ Environment Variables:
OPENROUTER_API_KEY: API key for OpenRouter (required for agent)
"""
import asyncio
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import asyncio
from pathlib import Path
import fire
@ -32,7 +35,7 @@ from hermes_constants import get_hermes_home, OPENROUTER_BASE_URL
# Load .env from ~/.hermes/.env first, then project root as dev fallback.
# User-managed env files should override stale shell exports on restart.
_hermes_home = get_hermes_home()
_project_env = Path(__file__).parent / '.env'
_project_env = Path(__file__).parent.parent / '.env'
from hermes_cli.env_loader import load_hermes_dotenv

View file

@ -267,7 +267,7 @@ def run_compression(input_dir: Path, output_dir: Path, config_path: str):
# Import the compressor
import sys
sys.path.insert(0, str(Path(__file__).parent.parent))
from trajectory_compressor import TrajectoryCompressor, CompressionConfig
from scripts.trajectory_compressor import TrajectoryCompressor, CompressionConfig
print(f"\n🗜️ Running trajectory compression...")
print(f" Input: {input_dir}")

View file

@ -30,8 +30,12 @@ Usage:
python trajectory_compressor.py --input=data/my_run --sample_percent=10
"""
import json
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import json
import time
import yaml
import logging
@ -52,7 +56,7 @@ from agent.retry_utils import jittered_backoff
from hermes_cli.env_loader import load_hermes_dotenv
_hermes_home = get_hermes_home()
_project_env = Path(__file__).parent / ".env"
_project_env = Path(__file__).parent.parent / ".env"
load_hermes_dotenv(hermes_home=_hermes_home, project_env=_project_env)

View file

@ -164,7 +164,7 @@ class TestArceeURLMapping:
assert "arceeai" in _PROVIDER_PREFIXES
def test_trajectory_compressor_detects_arcee(self):
import trajectory_compressor as tc
import scripts.trajectory_compressor as tc
comp = tc.TrajectoryCompressor.__new__(tc.TrajectoryCompressor)
comp.config = types.SimpleNamespace(base_url="https://api.arcee.ai/api/v1")
assert comp._detect_provider() == "arcee"

View file

@ -104,7 +104,7 @@ def main():
test_file = create_test_dataset()
print(f"\n📝 To run the test manually:")
print(f" python batch_runner.py \\")
print(f" python scripts/batch_runner.py \\")
print(f" --dataset_file={test_file} \\")
print(f" --batch_size=2 \\")
print(f" --run_name={run_name} \\")
@ -112,7 +112,7 @@ def main():
print(f" --num_workers=2")
print(f"\n💡 Or test with different distributions:")
print(f" python batch_runner.py --list_distributions")
print(f" python scripts/batch_runner.py --list_distributions")
print(f"\n🔍 After running, you can verify output with:")
print(f" python tests/test_batch_runner.py --verify")

View file

@ -30,7 +30,7 @@ from pathlib import Path
from typing import List, Dict, Any
import traceback
# Add project root to path to import batch_runner
# Add project root to path to import scripts.batch_runner
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
@ -135,7 +135,7 @@ def test_current_implementation():
shutil.rmtree(output_dir)
# Import here to avoid issues if module changes
from batch_runner import BatchRunner
from scripts.batch_runner import BatchRunner
checkpoint_file = output_dir / "checkpoint.json"
@ -229,7 +229,7 @@ def test_interruption_and_resume():
if output_dir.exists():
shutil.rmtree(output_dir)
from batch_runner import BatchRunner
from scripts.batch_runner import BatchRunner
checkpoint_file = output_dir / "checkpoint.json"

View file

@ -8,11 +8,7 @@ from unittest.mock import patch, MagicMock
import pytest
# batch_runner uses relative imports, ensure project root is on path
import sys
sys.path.insert(0, str(Path(__file__).parent.parent))
from batch_runner import BatchRunner, _process_batch_worker
from scripts.batch_runner import BatchRunner, _process_batch_worker
@pytest.fixture
@ -173,7 +169,7 @@ class TestBatchWorkerResumeBehavior:
"toolsets_used": [],
}
monkeypatch.setattr("batch_runner._process_single_prompt", lambda *args, **kwargs: prompt_result)
monkeypatch.setattr("scripts.batch_runner._process_single_prompt", lambda *args, **kwargs: prompt_result)
result = _process_batch_worker((
1,

View file

@ -14,7 +14,7 @@ def test_run_task_kimi_omits_temperature():
)
mock_openai.return_value = client
from mini_swe_runner import MiniSWERunner
from scripts.mini_swe_runner import MiniSWERunner
runner = MiniSWERunner(
model="kimi-for-coding",
@ -42,7 +42,7 @@ def test_run_task_public_moonshot_kimi_k2_5_omits_temperature():
)
mock_openai.return_value = client
from mini_swe_runner import MiniSWERunner
from scripts.mini_swe_runner import MiniSWERunner
runner = MiniSWERunner(
model="kimi-k2.5",

View file

@ -9,7 +9,7 @@ from unittest.mock import AsyncMock, patch, MagicMock
import pytest
from trajectory_compressor import (
from scripts.trajectory_compressor import (
CompressionConfig,
TrajectoryMetrics,
AggregateMetrics,
@ -25,8 +25,8 @@ def test_import_loads_env_from_hermes_home(tmp_path, monkeypatch):
monkeypatch.setenv("HERMES_HOME", str(home))
monkeypatch.delenv("OPENROUTER_API_KEY", raising=False)
sys.modules.pop("trajectory_compressor", None)
importlib.import_module("trajectory_compressor")
sys.modules.pop("scripts.trajectory_compressor", None)
importlib.import_module("scripts.trajectory_compressor")
assert os.getenv("OPENROUTER_API_KEY") == "from-hermes-home"

View file

@ -22,7 +22,7 @@ class TestAsyncClientLazyCreation:
def test_async_client_none_after_init(self):
"""async_client should be None after __init__ (not eagerly created)."""
from trajectory_compressor import TrajectoryCompressor
from scripts.trajectory_compressor import TrajectoryCompressor
comp = TrajectoryCompressor.__new__(TrajectoryCompressor)
comp.config = MagicMock()
@ -36,7 +36,7 @@ class TestAsyncClientLazyCreation:
def test_get_async_client_creates_new_client(self):
"""_get_async_client() should create a fresh AsyncOpenAI instance."""
from trajectory_compressor import TrajectoryCompressor
from scripts.trajectory_compressor import TrajectoryCompressor
comp = TrajectoryCompressor.__new__(TrajectoryCompressor)
comp.config = MagicMock()
@ -57,7 +57,7 @@ class TestAsyncClientLazyCreation:
def test_get_async_client_creates_fresh_each_call(self):
"""Each call to _get_async_client() creates a NEW client instance,
so it binds to the current event loop."""
from trajectory_compressor import TrajectoryCompressor
from scripts.trajectory_compressor import TrajectoryCompressor
comp = TrajectoryCompressor.__new__(TrajectoryCompressor)
comp.config = MagicMock()
@ -91,7 +91,7 @@ class TestSourceLineVerification:
def _read_file() -> str:
import os
base = os.path.dirname(os.path.dirname(__file__))
with open(os.path.join(base, "trajectory_compressor.py")) as f:
with open(os.path.join(base, "scripts", "trajectory_compressor.py")) as f:
return f.read()
def test_no_eager_async_openai_in_init(self):
@ -119,7 +119,7 @@ class TestSourceLineVerification:
@pytest.mark.asyncio
async def test_generate_summary_async_kimi_omits_temperature():
"""Kimi models should have temperature omitted — server manages it."""
from trajectory_compressor import CompressionConfig, TrajectoryCompressor, TrajectoryMetrics
from scripts.trajectory_compressor import CompressionConfig, TrajectoryCompressor, TrajectoryMetrics
config = CompressionConfig(
summarization_model="kimi-for-coding",
@ -147,7 +147,7 @@ async def test_generate_summary_async_kimi_omits_temperature():
@pytest.mark.asyncio
async def test_generate_summary_async_public_moonshot_kimi_k2_5_omits_temperature():
"""kimi-k2.5 on the public Moonshot API should not get a forced temperature."""
from trajectory_compressor import CompressionConfig, TrajectoryCompressor, TrajectoryMetrics
from scripts.trajectory_compressor import CompressionConfig, TrajectoryCompressor, TrajectoryMetrics
config = CompressionConfig(
summarization_model="kimi-k2.5",
@ -176,7 +176,7 @@ async def test_generate_summary_async_public_moonshot_kimi_k2_5_omits_temperatur
@pytest.mark.asyncio
async def test_generate_summary_async_public_moonshot_cn_kimi_k2_5_omits_temperature():
"""kimi-k2.5 on api.moonshot.cn should not get a forced temperature."""
from trajectory_compressor import CompressionConfig, TrajectoryCompressor, TrajectoryMetrics
from scripts.trajectory_compressor import CompressionConfig, TrajectoryCompressor, TrajectoryMetrics
config = CompressionConfig(
summarization_model="kimi-k2.5",

View file

@ -87,7 +87,7 @@ class TestTrajectoryCompressorNullGuard:
def test_null_base_url_does_not_crash(self):
"""base_url=None should not crash _detect_provider()."""
from trajectory_compressor import CompressionConfig, TrajectoryCompressor
from scripts.trajectory_compressor import CompressionConfig, TrajectoryCompressor
config = CompressionConfig()
config.base_url = None
@ -101,7 +101,7 @@ class TestTrajectoryCompressorNullGuard:
def test_config_loading_null_base_url_keeps_default(self):
"""YAML ``summarization: {base_url: null}`` should keep default."""
from trajectory_compressor import CompressionConfig
from scripts.trajectory_compressor import CompressionConfig
from hermes_constants import OPENROUTER_BASE_URL
config = CompressionConfig()