mirror of
https://github.com/ChuckBuilds/LEDMatrix.git
synced 2026-04-10 13:02:59 +00:00
Three interacting bugs reported by a user (Discord/ericepe) on a fresh install: 1. The state reconciler retried failed auto-repairs on every HTTP request, pegging CPU and flooding logs with "Plugin not found in registry: github / youtube". Root cause: ``_run_startup_reconciliation`` reset ``_reconciliation_started`` to False on any unresolved inconsistency, so ``@app.before_request`` re-fired the entire pass on the next request. Fix: run reconciliation exactly once per process; cache per-plugin unrecoverable failures inside the reconciler so even an explicit re-trigger stays cheap; add a registry pre-check to skip the expensive GitHub fetch when we already know the plugin is missing; expose ``force=True`` on ``/plugins/state/reconcile`` so users can retry after fixing the underlying issue. 2. Uninstalling a plugin via the UI succeeded but the plugin reappeared. Root cause: a race between ``store_manager.uninstall_plugin`` (removes files) and ``cleanup_plugin_config`` (removes config entry) — if reconciliation fired in the gap it saw "config entry with no files" and reinstalled. Fix: reorder uninstall to clean config FIRST, drop a short-lived "recently uninstalled" tombstone on the store manager that the reconciler honors, and pass ``store_manager`` to the manual ``/plugins/state/reconcile`` endpoint (it was previously omitted, which silently disabled auto-repair entirely). 3. ``GET /plugins/installed`` was very slow on a Pi4 (UI hung on "connecting to display" for minutes, ~98% CPU). Root causes: per-request ``discover_plugins()`` + manifest re-read + four ``git`` subprocesses per plugin (``rev-parse``, ``--abbrev-ref``, ``config``, ``log``). Fix: mtime-gate ``discover_plugins()`` and drop the per-plugin manifest re-read in the endpoint; cache ``_get_local_git_info`` keyed on ``.git/HEAD`` mtime so subprocesses only run when the working copy actually moved; bump registry cache TTL from 5 to 15 minutes and fall back to stale cache on transient network failure. Tests: 16 reconciliation cases (including 5 new ones covering the unrecoverable cache, force-reconcile path, transient-failure handling, and recently-uninstalled tombstone) and 8 new store_manager cache tests covering tombstone TTL, git-info mtime cache hit/miss, and the registry stale-cache fallback. All 24 pass; the broader 288-test suite continues to pass with no new failures. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
152 lines
5.7 KiB
Python
152 lines
5.7 KiB
Python
"""
|
|
Tests for the caching and tombstone behaviors added to PluginStoreManager
|
|
to fix the plugin-list slowness and the uninstall-resurrection bugs.
|
|
|
|
Coverage targets:
|
|
- ``mark_recently_uninstalled`` / ``was_recently_uninstalled`` lifecycle and
|
|
TTL expiry.
|
|
- ``_get_local_git_info`` mtime-gated cache: ``git`` subprocesses only run
|
|
when ``.git/HEAD`` mtime changes.
|
|
- ``fetch_registry`` stale-cache fallback on network failure.
|
|
"""
|
|
|
|
import os
|
|
import time
|
|
import unittest
|
|
from pathlib import Path
|
|
from tempfile import TemporaryDirectory
|
|
from unittest.mock import patch, MagicMock
|
|
|
|
from src.plugin_system.store_manager import PluginStoreManager
|
|
|
|
|
|
class TestUninstallTombstone(unittest.TestCase):
|
|
def setUp(self):
|
|
self._tmp = TemporaryDirectory()
|
|
self.addCleanup(self._tmp.cleanup)
|
|
self.sm = PluginStoreManager(plugins_dir=self._tmp.name)
|
|
|
|
def test_unmarked_plugin_is_not_recent(self):
|
|
self.assertFalse(self.sm.was_recently_uninstalled("foo"))
|
|
|
|
def test_marking_makes_it_recent(self):
|
|
self.sm.mark_recently_uninstalled("foo")
|
|
self.assertTrue(self.sm.was_recently_uninstalled("foo"))
|
|
|
|
def test_tombstone_expires_after_ttl(self):
|
|
self.sm._uninstall_tombstone_ttl = 0.05
|
|
self.sm.mark_recently_uninstalled("foo")
|
|
self.assertTrue(self.sm.was_recently_uninstalled("foo"))
|
|
time.sleep(0.1)
|
|
self.assertFalse(self.sm.was_recently_uninstalled("foo"))
|
|
# Expired entry should also be pruned from the dict.
|
|
self.assertNotIn("foo", self.sm._uninstall_tombstones)
|
|
|
|
|
|
class TestGitInfoCache(unittest.TestCase):
|
|
def setUp(self):
|
|
self._tmp = TemporaryDirectory()
|
|
self.addCleanup(self._tmp.cleanup)
|
|
self.plugins_dir = Path(self._tmp.name)
|
|
self.sm = PluginStoreManager(plugins_dir=str(self.plugins_dir))
|
|
|
|
# Minimal fake git checkout: .git/HEAD needs to exist so the cache
|
|
# key (its mtime) is stable, but we mock subprocess so no actual git
|
|
# is required.
|
|
self.plugin_path = self.plugins_dir / "plg"
|
|
(self.plugin_path / ".git").mkdir(parents=True)
|
|
(self.plugin_path / ".git" / "HEAD").write_text("ref: refs/heads/main\n")
|
|
|
|
def _fake_subprocess_run(self, *args, **kwargs):
|
|
# Return different dummy values depending on which git subcommand
|
|
# was invoked so the code paths that parse output all succeed.
|
|
cmd = args[0]
|
|
result = MagicMock()
|
|
result.returncode = 0
|
|
if "rev-parse" in cmd and "HEAD" in cmd and "--abbrev-ref" not in cmd:
|
|
result.stdout = "abcdef1234567890\n"
|
|
elif "--abbrev-ref" in cmd:
|
|
result.stdout = "main\n"
|
|
elif "config" in cmd:
|
|
result.stdout = "https://example.com/repo.git\n"
|
|
elif "log" in cmd:
|
|
result.stdout = "2026-04-08T12:00:00+00:00\n"
|
|
else:
|
|
result.stdout = ""
|
|
return result
|
|
|
|
def test_cache_hits_avoid_subprocess_calls(self):
|
|
with patch(
|
|
"src.plugin_system.store_manager.subprocess.run",
|
|
side_effect=self._fake_subprocess_run,
|
|
) as mock_run:
|
|
first = self.sm._get_local_git_info(self.plugin_path)
|
|
self.assertIsNotNone(first)
|
|
self.assertEqual(first["short_sha"], "abcdef1")
|
|
calls_after_first = mock_run.call_count
|
|
self.assertEqual(calls_after_first, 4)
|
|
|
|
# Second call with unchanged HEAD: zero new subprocess calls.
|
|
second = self.sm._get_local_git_info(self.plugin_path)
|
|
self.assertEqual(second, first)
|
|
self.assertEqual(mock_run.call_count, calls_after_first)
|
|
|
|
def test_cache_invalidates_on_head_mtime_change(self):
|
|
with patch(
|
|
"src.plugin_system.store_manager.subprocess.run",
|
|
side_effect=self._fake_subprocess_run,
|
|
) as mock_run:
|
|
self.sm._get_local_git_info(self.plugin_path)
|
|
calls_after_first = mock_run.call_count
|
|
|
|
# Bump mtime on .git/HEAD to simulate a new commit being checked out.
|
|
head = self.plugin_path / ".git" / "HEAD"
|
|
new_time = head.stat().st_mtime + 10
|
|
os.utime(head, (new_time, new_time))
|
|
|
|
self.sm._get_local_git_info(self.plugin_path)
|
|
self.assertEqual(mock_run.call_count, calls_after_first + 4)
|
|
|
|
def test_no_git_directory_returns_none(self):
|
|
non_git = self.plugins_dir / "no_git"
|
|
non_git.mkdir()
|
|
self.assertIsNone(self.sm._get_local_git_info(non_git))
|
|
|
|
|
|
class TestRegistryStaleCacheFallback(unittest.TestCase):
|
|
def setUp(self):
|
|
self._tmp = TemporaryDirectory()
|
|
self.addCleanup(self._tmp.cleanup)
|
|
self.sm = PluginStoreManager(plugins_dir=self._tmp.name)
|
|
|
|
def test_network_failure_returns_stale_cache(self):
|
|
# Prime the cache with a known-good registry.
|
|
self.sm.registry_cache = {"plugins": [{"id": "cached"}]}
|
|
self.sm.registry_cache_time = time.time() - 10_000 # very old
|
|
self.sm.registry_cache_timeout = 1 # force re-fetch attempt
|
|
|
|
import requests as real_requests
|
|
with patch.object(
|
|
self.sm,
|
|
"_http_get_with_retries",
|
|
side_effect=real_requests.RequestException("boom"),
|
|
):
|
|
result = self.sm.fetch_registry()
|
|
|
|
self.assertEqual(result, {"plugins": [{"id": "cached"}]})
|
|
|
|
def test_network_failure_with_no_cache_returns_empty(self):
|
|
self.sm.registry_cache = None
|
|
import requests as real_requests
|
|
with patch.object(
|
|
self.sm,
|
|
"_http_get_with_retries",
|
|
side_effect=real_requests.RequestException("boom"),
|
|
):
|
|
result = self.sm.fetch_registry()
|
|
self.assertEqual(result, {"plugins": []})
|
|
|
|
|
|
if __name__ == "__main__":
|
|
unittest.main()
|