refactor: remove dead code — 1,784 lines across 77 files (#9180)

Deep scan with vulture, pyflakes, and manual cross-referencing identified:
- 41 dead functions/methods (zero callers in production)
- 7 production-dead functions (only test callers, tests deleted)
- 5 dead constants/variables
- ~35 unused imports across agent/, hermes_cli/, tools/, gateway/

Categories of dead code removed:
- Refactoring leftovers: _set_default_model, _setup_copilot_reasoning_selection,
  rebuild_lookups, clear_session_context, get_logs_dir, clear_session
- Unused API surface: search_models_dev, get_pricing, skills_categories,
  get_read_files_summary, clear_read_tracker, menu_labels, get_spinner_list
- Dead compatibility wrappers: schedule_cronjob, list_cronjobs, remove_cronjob
- Stale debug helpers: get_debug_session_info copies in 4 tool files
  (centralized version in debug_helpers.py already exists)
- Dead gateway methods: send_emote, send_notice (matrix), send_reaction
  (bluebubbles), _normalize_inbound_text (feishu), fetch_room_history
  (matrix), _start_typing_indicator (signal), parse_feishu_post_content
- Dead constants: NOUS_API_BASE_URL, SKILLS_TOOL_DESCRIPTION,
  FILE_TOOLS, VALID_ASPECT_RATIOS, MEMORY_DIR
- Unused UI code: _interactive_provider_selection,
  _interactive_model_selection (superseded by prompt_toolkit picker)

Test suite verified: 609 tests covering affected files all pass.
Tests for removed functions deleted. Tests using removed utilities
(clear_read_tracker, MEMORY_DIR) updated to use internal APIs directly.
This commit is contained in:
Teknium 2026-04-13 16:32:04 -07:00 committed by GitHub
parent a66fc1365d
commit 8d023e43ed
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
77 changed files with 44 additions and 1784 deletions

View file

@ -16,11 +16,11 @@ from unittest.mock import patch, MagicMock
from tools.file_tools import (
read_file_tool,
clear_read_tracker,
reset_file_dedup,
_is_blocked_device,
_get_max_read_chars,
_DEFAULT_MAX_READ_CHARS,
_read_tracker,
)
@ -95,10 +95,10 @@ class TestCharacterCountGuard(unittest.TestCase):
"""Large reads should be rejected with guidance to use offset/limit."""
def setUp(self):
clear_read_tracker()
_read_tracker.clear()
def tearDown(self):
clear_read_tracker()
_read_tracker.clear()
@patch("tools.file_tools._get_file_ops")
@patch("tools.file_tools._get_max_read_chars", return_value=_DEFAULT_MAX_READ_CHARS)
@ -145,14 +145,14 @@ class TestFileDedup(unittest.TestCase):
"""Re-reading an unchanged file should return a lightweight stub."""
def setUp(self):
clear_read_tracker()
_read_tracker.clear()
self._tmpdir = tempfile.mkdtemp()
self._tmpfile = os.path.join(self._tmpdir, "dedup_test.txt")
with open(self._tmpfile, "w") as f:
f.write("line one\nline two\n")
def tearDown(self):
clear_read_tracker()
_read_tracker.clear()
try:
os.unlink(self._tmpfile)
os.rmdir(self._tmpdir)
@ -224,14 +224,14 @@ class TestDedupResetOnCompression(unittest.TestCase):
reads return full content."""
def setUp(self):
clear_read_tracker()
_read_tracker.clear()
self._tmpdir = tempfile.mkdtemp()
self._tmpfile = os.path.join(self._tmpdir, "compress_test.txt")
with open(self._tmpfile, "w") as f:
f.write("original content\n")
def tearDown(self):
clear_read_tracker()
_read_tracker.clear()
try:
os.unlink(self._tmpfile)
os.rmdir(self._tmpdir)
@ -305,10 +305,10 @@ class TestLargeFileHint(unittest.TestCase):
"""Large truncated files should include a hint about targeted reads."""
def setUp(self):
clear_read_tracker()
_read_tracker.clear()
def tearDown(self):
clear_read_tracker()
_read_tracker.clear()
@patch("tools.file_tools._get_file_ops")
def test_large_truncated_file_gets_hint(self, mock_ops):
@ -341,13 +341,13 @@ class TestConfigOverride(unittest.TestCase):
"""file_read_max_chars in config.yaml should control the char guard."""
def setUp(self):
clear_read_tracker()
_read_tracker.clear()
# Reset the cached value so each test gets a fresh lookup
import tools.file_tools as _ft
_ft._max_read_chars_cached = None
def tearDown(self):
clear_read_tracker()
_read_tracker.clear()
import tools.file_tools as _ft
_ft._max_read_chars_cached = None