fix: remove 115 verified dead code symbols across 46 production files

Automated dead code audit using vulture + coverage.py + ast-grep intersection,
confirmed by Opus deep verification pass. Every symbol verified to have zero
production callers (test imports excluded from reachability analysis).

Removes ~1,534 lines of dead production code across 46 files and ~1,382 lines
of stale test code. 3 entire files deleted (agent/builtin_memory_provider.py,
hermes_cli/checklist.py, tests/hermes_cli/test_setup_model_selection.py).

Co-authored-by: alt-glitch <balyan.sid@gmail.com>
This commit is contained in:
alt-glitch 2026-04-10 03:03:30 -07:00 committed by Teknium
parent 04baab5422
commit 96c060018a
70 changed files with 876 additions and 2877 deletions

View file

@ -919,68 +919,6 @@ Write only the summary, starting with "[CONTEXT SUMMARY]:" prefix."""
return result, metrics
def process_file(
self,
input_path: Path,
output_path: Path,
progress_callback: Optional[Callable[[TrajectoryMetrics], None]] = None
) -> List[TrajectoryMetrics]:
"""
Process a single JSONL file.
Args:
input_path: Path to input JSONL file
output_path: Path to output JSONL file
progress_callback: Optional callback called after each entry with its metrics
Returns:
List of metrics for each trajectory
"""
file_metrics = []
# Read all entries
entries = []
with open(input_path, 'r', encoding='utf-8') as f:
for line_num, line in enumerate(f, 1):
line = line.strip()
if line:
try:
entries.append(json.loads(line))
except json.JSONDecodeError as e:
self.logger.warning(f"Skipping invalid JSON at {input_path}:{line_num}: {e}")
# Process entries
processed_entries = []
for entry in entries:
try:
processed_entry, metrics = self.process_entry(entry)
processed_entries.append(processed_entry)
file_metrics.append(metrics)
self.aggregate_metrics.add_trajectory_metrics(metrics)
# Call progress callback if provided
if progress_callback:
progress_callback(metrics)
except Exception as e:
self.logger.error(f"Error processing entry: {e}")
self.aggregate_metrics.trajectories_failed += 1
# Keep original entry on error
processed_entries.append(entry)
empty_metrics = TrajectoryMetrics()
file_metrics.append(empty_metrics)
if progress_callback:
progress_callback(empty_metrics)
# Write output
output_path.parent.mkdir(parents=True, exist_ok=True)
with open(output_path, 'w', encoding='utf-8') as f:
for entry in processed_entries:
f.write(json.dumps(entry, ensure_ascii=False) + '\n')
return file_metrics
def process_directory(self, input_dir: Path, output_dir: Path):
"""
Process all JSONL files in a directory using async parallel processing.