mirror of
https://github.com/ChuckBuilds/LEDMatrix.git
synced 2026-04-11 05:13:01 +00:00
Compare commits
1 Commits
941291561a
...
feat/confi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a84b65fffb |
597
src/backup_manager.py
Normal file
597
src/backup_manager.py
Normal file
@@ -0,0 +1,597 @@
|
||||
"""
|
||||
User configuration backup and restore.
|
||||
|
||||
Packages the user's LEDMatrix configuration, secrets, WiFi settings,
|
||||
user-uploaded fonts, plugin image uploads, and installed-plugin manifest
|
||||
into a single ``.zip`` that can be exported from one installation and
|
||||
imported on a fresh install.
|
||||
|
||||
This module is intentionally Flask-free so it can be unit-tested and
|
||||
used from scripts.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import socket
|
||||
import tempfile
|
||||
import zipfile
|
||||
from dataclasses import dataclass, field, asdict
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = 1
|
||||
|
||||
# Filenames shipped with the LEDMatrix repository under ``assets/fonts/``.
|
||||
# Anything present on disk but NOT in this set is treated as a user upload
|
||||
# and included in backups. Keep this snapshot in sync with the repo — regenerate
|
||||
# with::
|
||||
#
|
||||
# ls assets/fonts/
|
||||
#
|
||||
# Tests assert the set matches the checked-in fonts.
|
||||
BUNDLED_FONTS: frozenset[str] = frozenset({
|
||||
"10x20.bdf",
|
||||
"4x6.bdf",
|
||||
"4x6-font.ttf",
|
||||
"5by7.regular.ttf",
|
||||
"5x7.bdf",
|
||||
"5x8.bdf",
|
||||
"6x9.bdf",
|
||||
"6x10.bdf",
|
||||
"6x12.bdf",
|
||||
"6x13.bdf",
|
||||
"6x13B.bdf",
|
||||
"6x13O.bdf",
|
||||
"7x13.bdf",
|
||||
"7x13B.bdf",
|
||||
"7x13O.bdf",
|
||||
"7x14.bdf",
|
||||
"7x14B.bdf",
|
||||
"8x13.bdf",
|
||||
"8x13B.bdf",
|
||||
"8x13O.bdf",
|
||||
"9x15.bdf",
|
||||
"9x15B.bdf",
|
||||
"9x18.bdf",
|
||||
"9x18B.bdf",
|
||||
"AUTHORS",
|
||||
"bdf_font_guide",
|
||||
"clR6x12.bdf",
|
||||
"helvR12.bdf",
|
||||
"ic8x8u.bdf",
|
||||
"MatrixChunky8.bdf",
|
||||
"MatrixChunky8X.bdf",
|
||||
"MatrixLight6.bdf",
|
||||
"MatrixLight6X.bdf",
|
||||
"MatrixLight8X.bdf",
|
||||
"PressStart2P-Regular.ttf",
|
||||
"README",
|
||||
"README.md",
|
||||
"texgyre-27.bdf",
|
||||
"tom-thumb.bdf",
|
||||
})
|
||||
|
||||
# Relative paths inside the project that the backup knows how to round-trip.
|
||||
_CONFIG_REL = Path("config/config.json")
|
||||
_SECRETS_REL = Path("config/config_secrets.json")
|
||||
_WIFI_REL = Path("config/wifi_config.json")
|
||||
_FONTS_REL = Path("assets/fonts")
|
||||
_PLUGIN_UPLOADS_REL = Path("assets/plugins")
|
||||
_STATE_REL = Path("data/plugin_state.json")
|
||||
|
||||
MANIFEST_NAME = "manifest.json"
|
||||
PLUGINS_MANIFEST_NAME = "plugins.json"
|
||||
|
||||
# Hard cap on the size of a single file we'll accept inside an uploaded ZIP
|
||||
# to limit zip-bomb risk. 50 MB matches the existing plugin-image upload cap.
|
||||
_MAX_MEMBER_BYTES = 50 * 1024 * 1024
|
||||
# Hard cap on the total uncompressed size of an uploaded ZIP.
|
||||
_MAX_TOTAL_BYTES = 200 * 1024 * 1024
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Data classes
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@dataclass
|
||||
class RestoreOptions:
|
||||
"""Which sections of a backup should be restored."""
|
||||
|
||||
restore_config: bool = True
|
||||
restore_secrets: bool = True
|
||||
restore_wifi: bool = True
|
||||
restore_fonts: bool = True
|
||||
restore_plugin_uploads: bool = True
|
||||
reinstall_plugins: bool = True
|
||||
|
||||
|
||||
@dataclass
|
||||
class RestoreResult:
|
||||
"""Outcome of a restore operation."""
|
||||
|
||||
success: bool = False
|
||||
restored: List[str] = field(default_factory=list)
|
||||
skipped: List[str] = field(default_factory=list)
|
||||
plugins_to_install: List[Dict[str, Any]] = field(default_factory=list)
|
||||
plugins_installed: List[str] = field(default_factory=list)
|
||||
plugins_failed: List[Dict[str, str]] = field(default_factory=list)
|
||||
errors: List[str] = field(default_factory=list)
|
||||
manifest: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return asdict(self)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Manifest helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _ledmatrix_version(project_root: Path) -> str:
|
||||
"""Best-effort version string for the current install."""
|
||||
version_file = project_root / "VERSION"
|
||||
if version_file.exists():
|
||||
try:
|
||||
return version_file.read_text(encoding="utf-8").strip() or "unknown"
|
||||
except OSError:
|
||||
pass
|
||||
head_file = project_root / ".git" / "HEAD"
|
||||
if head_file.exists():
|
||||
try:
|
||||
head = head_file.read_text(encoding="utf-8").strip()
|
||||
if head.startswith("ref: "):
|
||||
ref = head[5:]
|
||||
ref_path = project_root / ".git" / ref
|
||||
if ref_path.exists():
|
||||
return ref_path.read_text(encoding="utf-8").strip()[:12] or "unknown"
|
||||
return head[:12] or "unknown"
|
||||
except OSError:
|
||||
pass
|
||||
return "unknown"
|
||||
|
||||
|
||||
def _build_manifest(contents: List[str], project_root: Path) -> Dict[str, Any]:
|
||||
return {
|
||||
"schema_version": SCHEMA_VERSION,
|
||||
"created_at": datetime.now(timezone.utc).isoformat().replace("+00:00", "Z"),
|
||||
"ledmatrix_version": _ledmatrix_version(project_root),
|
||||
"hostname": socket.gethostname(),
|
||||
"contents": contents,
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Installed-plugin enumeration
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def list_installed_plugins(project_root: Path) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Return a list of currently-installed plugins suitable for the backup
|
||||
manifest. Each entry has ``plugin_id`` and ``version``.
|
||||
|
||||
Reads ``data/plugin_state.json`` if present; otherwise walks the plugin
|
||||
directory and reads each ``manifest.json``.
|
||||
"""
|
||||
plugins: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
state_file = project_root / _STATE_REL
|
||||
if state_file.exists():
|
||||
try:
|
||||
with state_file.open("r", encoding="utf-8") as f:
|
||||
state = json.load(f)
|
||||
raw_plugins = state.get("plugins", {}) if isinstance(state, dict) else {}
|
||||
if isinstance(raw_plugins, dict):
|
||||
for plugin_id, info in raw_plugins.items():
|
||||
if not isinstance(info, dict):
|
||||
continue
|
||||
plugins[plugin_id] = {
|
||||
"plugin_id": plugin_id,
|
||||
"version": info.get("version") or "",
|
||||
"enabled": bool(info.get("enabled", True)),
|
||||
}
|
||||
except (OSError, json.JSONDecodeError) as e:
|
||||
logger.warning("Could not read plugin_state.json: %s", e)
|
||||
|
||||
# Fall back to scanning plugin-repos/ for manifests.
|
||||
plugins_root = project_root / "plugin-repos"
|
||||
if plugins_root.exists():
|
||||
for entry in sorted(plugins_root.iterdir()):
|
||||
if not entry.is_dir():
|
||||
continue
|
||||
manifest = entry / "manifest.json"
|
||||
if not manifest.exists():
|
||||
continue
|
||||
try:
|
||||
with manifest.open("r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
except (OSError, json.JSONDecodeError):
|
||||
continue
|
||||
plugin_id = data.get("id") or entry.name
|
||||
if plugin_id not in plugins:
|
||||
plugins[plugin_id] = {
|
||||
"plugin_id": plugin_id,
|
||||
"version": data.get("version", ""),
|
||||
"enabled": True,
|
||||
}
|
||||
|
||||
return sorted(plugins.values(), key=lambda p: p["plugin_id"])
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Font filtering
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def iter_user_fonts(project_root: Path) -> List[Path]:
|
||||
"""Return absolute paths to user-uploaded fonts (anything in
|
||||
``assets/fonts/`` not listed in :data:`BUNDLED_FONTS`)."""
|
||||
fonts_dir = project_root / _FONTS_REL
|
||||
if not fonts_dir.exists():
|
||||
return []
|
||||
user_fonts: List[Path] = []
|
||||
for entry in sorted(fonts_dir.iterdir()):
|
||||
if entry.is_file() and entry.name not in BUNDLED_FONTS:
|
||||
user_fonts.append(entry)
|
||||
return user_fonts
|
||||
|
||||
|
||||
def iter_plugin_uploads(project_root: Path) -> List[Path]:
|
||||
"""Return every file under ``assets/plugins/*/uploads/`` (recursive)."""
|
||||
plugin_root = project_root / _PLUGIN_UPLOADS_REL
|
||||
if not plugin_root.exists():
|
||||
return []
|
||||
out: List[Path] = []
|
||||
for plugin_dir in sorted(plugin_root.iterdir()):
|
||||
if not plugin_dir.is_dir():
|
||||
continue
|
||||
uploads = plugin_dir / "uploads"
|
||||
if not uploads.exists():
|
||||
continue
|
||||
for root, _dirs, files in os.walk(uploads):
|
||||
for name in sorted(files):
|
||||
out.append(Path(root) / name)
|
||||
return out
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Export
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def create_backup(
|
||||
project_root: Path,
|
||||
output_dir: Optional[Path] = None,
|
||||
) -> Path:
|
||||
"""
|
||||
Build a backup ZIP and write it into ``output_dir`` (defaults to
|
||||
``<project_root>/config/backups/exports/``). Returns the path to the
|
||||
created file.
|
||||
"""
|
||||
project_root = Path(project_root).resolve()
|
||||
if output_dir is None:
|
||||
output_dir = project_root / "config" / "backups" / "exports"
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
hostname = socket.gethostname() or "ledmatrix"
|
||||
safe_host = "".join(c for c in hostname if c.isalnum() or c in "-_") or "ledmatrix"
|
||||
zip_name = f"ledmatrix-backup-{safe_host}-{timestamp}.zip"
|
||||
zip_path = output_dir / zip_name
|
||||
|
||||
contents: List[str] = []
|
||||
|
||||
# Build bundle in memory first, then atomically write to final path.
|
||||
buffer = io.BytesIO()
|
||||
with zipfile.ZipFile(buffer, "w", compression=zipfile.ZIP_DEFLATED) as zf:
|
||||
# Config files.
|
||||
if (project_root / _CONFIG_REL).exists():
|
||||
zf.write(project_root / _CONFIG_REL, _CONFIG_REL.as_posix())
|
||||
contents.append("config")
|
||||
if (project_root / _SECRETS_REL).exists():
|
||||
zf.write(project_root / _SECRETS_REL, _SECRETS_REL.as_posix())
|
||||
contents.append("secrets")
|
||||
if (project_root / _WIFI_REL).exists():
|
||||
zf.write(project_root / _WIFI_REL, _WIFI_REL.as_posix())
|
||||
contents.append("wifi")
|
||||
|
||||
# User-uploaded fonts.
|
||||
user_fonts = iter_user_fonts(project_root)
|
||||
if user_fonts:
|
||||
for font in user_fonts:
|
||||
arcname = font.relative_to(project_root).as_posix()
|
||||
zf.write(font, arcname)
|
||||
contents.append("fonts")
|
||||
|
||||
# Plugin uploads.
|
||||
plugin_uploads = iter_plugin_uploads(project_root)
|
||||
if plugin_uploads:
|
||||
for upload in plugin_uploads:
|
||||
arcname = upload.relative_to(project_root).as_posix()
|
||||
zf.write(upload, arcname)
|
||||
contents.append("plugin_uploads")
|
||||
|
||||
# Installed plugins manifest.
|
||||
plugins = list_installed_plugins(project_root)
|
||||
if plugins:
|
||||
zf.writestr(
|
||||
PLUGINS_MANIFEST_NAME,
|
||||
json.dumps(plugins, indent=2),
|
||||
)
|
||||
contents.append("plugins")
|
||||
|
||||
# Manifest goes last so that `contents` reflects what we actually wrote.
|
||||
manifest = _build_manifest(contents, project_root)
|
||||
zf.writestr(MANIFEST_NAME, json.dumps(manifest, indent=2))
|
||||
|
||||
# Write atomically.
|
||||
tmp_path = zip_path.with_suffix(".zip.tmp")
|
||||
tmp_path.write_bytes(buffer.getvalue())
|
||||
os.replace(tmp_path, zip_path)
|
||||
logger.info("Created backup %s (%d bytes)", zip_path, zip_path.stat().st_size)
|
||||
return zip_path
|
||||
|
||||
|
||||
def preview_backup_contents(project_root: Path) -> Dict[str, Any]:
|
||||
"""Return a summary of what ``create_backup`` would include."""
|
||||
project_root = Path(project_root).resolve()
|
||||
return {
|
||||
"has_config": (project_root / _CONFIG_REL).exists(),
|
||||
"has_secrets": (project_root / _SECRETS_REL).exists(),
|
||||
"has_wifi": (project_root / _WIFI_REL).exists(),
|
||||
"user_fonts": [p.name for p in iter_user_fonts(project_root)],
|
||||
"plugin_uploads": len(iter_plugin_uploads(project_root)),
|
||||
"plugins": list_installed_plugins(project_root),
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Validate
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _safe_extract_path(base_dir: Path, member_name: str) -> Optional[Path]:
|
||||
"""Resolve a ZIP member name against ``base_dir`` and reject anything
|
||||
that escapes it. Returns the resolved absolute path, or ``None`` if the
|
||||
name is unsafe."""
|
||||
# Reject absolute paths and Windows-style drives outright.
|
||||
if member_name.startswith(("/", "\\")) or (len(member_name) >= 2 and member_name[1] == ":"):
|
||||
return None
|
||||
target = (base_dir / member_name).resolve()
|
||||
try:
|
||||
target.relative_to(base_dir.resolve())
|
||||
except ValueError:
|
||||
return None
|
||||
return target
|
||||
|
||||
|
||||
def validate_backup(zip_path: Path) -> Tuple[bool, str, Dict[str, Any]]:
|
||||
"""
|
||||
Inspect a backup ZIP without extracting to disk.
|
||||
|
||||
Returns ``(ok, error_message, manifest_dict)``. ``manifest_dict`` contains
|
||||
the parsed manifest plus diagnostic fields:
|
||||
- ``detected_contents``: list of section names present in the archive
|
||||
- ``plugins``: parsed plugins.json if present
|
||||
- ``total_uncompressed``: sum of uncompressed sizes
|
||||
"""
|
||||
zip_path = Path(zip_path)
|
||||
if not zip_path.exists():
|
||||
return False, f"Backup file not found: {zip_path}", {}
|
||||
|
||||
try:
|
||||
with zipfile.ZipFile(zip_path, "r") as zf:
|
||||
names = zf.namelist()
|
||||
if MANIFEST_NAME not in names:
|
||||
return False, "Backup is missing manifest.json", {}
|
||||
|
||||
total = 0
|
||||
for info in zf.infolist():
|
||||
if info.file_size > _MAX_MEMBER_BYTES:
|
||||
return False, f"Member {info.filename} is too large", {}
|
||||
total += info.file_size
|
||||
if total > _MAX_TOTAL_BYTES:
|
||||
return False, "Backup exceeds maximum allowed size", {}
|
||||
# Safety: reject members with unsafe paths up front.
|
||||
if _safe_extract_path(Path("/tmp/_zip_check"), info.filename) is None:
|
||||
return False, f"Unsafe path in backup: {info.filename}", {}
|
||||
|
||||
try:
|
||||
manifest_raw = zf.read(MANIFEST_NAME).decode("utf-8")
|
||||
manifest = json.loads(manifest_raw)
|
||||
except (OSError, UnicodeDecodeError, json.JSONDecodeError) as e:
|
||||
return False, f"Invalid manifest.json: {e}", {}
|
||||
|
||||
if not isinstance(manifest, dict) or "schema_version" not in manifest:
|
||||
return False, "Invalid manifest structure", {}
|
||||
if manifest.get("schema_version") != SCHEMA_VERSION:
|
||||
return (
|
||||
False,
|
||||
f"Unsupported backup schema version: {manifest.get('schema_version')}",
|
||||
{},
|
||||
)
|
||||
|
||||
detected: List[str] = []
|
||||
if _CONFIG_REL.as_posix() in names:
|
||||
detected.append("config")
|
||||
if _SECRETS_REL.as_posix() in names:
|
||||
detected.append("secrets")
|
||||
if _WIFI_REL.as_posix() in names:
|
||||
detected.append("wifi")
|
||||
if any(n.startswith(_FONTS_REL.as_posix() + "/") for n in names):
|
||||
detected.append("fonts")
|
||||
if any(n.startswith(_PLUGIN_UPLOADS_REL.as_posix() + "/") for n in names):
|
||||
detected.append("plugin_uploads")
|
||||
|
||||
plugins: List[Dict[str, Any]] = []
|
||||
if PLUGINS_MANIFEST_NAME in names:
|
||||
try:
|
||||
plugins = json.loads(zf.read(PLUGINS_MANIFEST_NAME).decode("utf-8"))
|
||||
if not isinstance(plugins, list):
|
||||
plugins = []
|
||||
else:
|
||||
detected.append("plugins")
|
||||
except (OSError, UnicodeDecodeError, json.JSONDecodeError):
|
||||
plugins = []
|
||||
|
||||
result_manifest = dict(manifest)
|
||||
result_manifest["detected_contents"] = detected
|
||||
result_manifest["plugins"] = plugins
|
||||
result_manifest["total_uncompressed"] = total
|
||||
result_manifest["file_count"] = len(names)
|
||||
return True, "", result_manifest
|
||||
except zipfile.BadZipFile:
|
||||
return False, "File is not a valid ZIP archive", {}
|
||||
except OSError as e:
|
||||
return False, f"Could not read backup: {e}", {}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Restore
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _extract_zip_safe(zip_path: Path, dest_dir: Path) -> None:
|
||||
"""Extract ``zip_path`` into ``dest_dir`` rejecting any unsafe members."""
|
||||
with zipfile.ZipFile(zip_path, "r") as zf:
|
||||
for info in zf.infolist():
|
||||
target = _safe_extract_path(dest_dir, info.filename)
|
||||
if target is None:
|
||||
raise ValueError(f"Unsafe path in backup: {info.filename}")
|
||||
if info.is_dir():
|
||||
target.mkdir(parents=True, exist_ok=True)
|
||||
continue
|
||||
target.parent.mkdir(parents=True, exist_ok=True)
|
||||
with zf.open(info, "r") as src, open(target, "wb") as dst:
|
||||
shutil.copyfileobj(src, dst, length=64 * 1024)
|
||||
|
||||
|
||||
def _copy_file(src: Path, dst: Path) -> None:
|
||||
dst.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(src, dst)
|
||||
|
||||
|
||||
def restore_backup(
|
||||
zip_path: Path,
|
||||
project_root: Path,
|
||||
options: Optional[RestoreOptions] = None,
|
||||
) -> RestoreResult:
|
||||
"""
|
||||
Restore ``zip_path`` into ``project_root`` according to ``options``.
|
||||
|
||||
Plugin reinstalls are NOT performed here — the caller is responsible for
|
||||
walking ``result.plugins_to_install`` and calling the store manager. This
|
||||
keeps this module Flask-free and side-effect free beyond the filesystem.
|
||||
"""
|
||||
if options is None:
|
||||
options = RestoreOptions()
|
||||
project_root = Path(project_root).resolve()
|
||||
result = RestoreResult()
|
||||
|
||||
ok, err, manifest = validate_backup(zip_path)
|
||||
if not ok:
|
||||
result.errors.append(err)
|
||||
return result
|
||||
result.manifest = manifest
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="ledmatrix_restore_") as tmp:
|
||||
tmp_dir = Path(tmp)
|
||||
try:
|
||||
_extract_zip_safe(Path(zip_path), tmp_dir)
|
||||
except (ValueError, zipfile.BadZipFile, OSError) as e:
|
||||
result.errors.append(f"Failed to extract backup: {e}")
|
||||
return result
|
||||
|
||||
# Main config.
|
||||
if options.restore_config and (tmp_dir / _CONFIG_REL).exists():
|
||||
try:
|
||||
_copy_file(tmp_dir / _CONFIG_REL, project_root / _CONFIG_REL)
|
||||
result.restored.append("config")
|
||||
except OSError as e:
|
||||
result.errors.append(f"Failed to restore config.json: {e}")
|
||||
elif (tmp_dir / _CONFIG_REL).exists():
|
||||
result.skipped.append("config")
|
||||
|
||||
# Secrets.
|
||||
if options.restore_secrets and (tmp_dir / _SECRETS_REL).exists():
|
||||
try:
|
||||
_copy_file(tmp_dir / _SECRETS_REL, project_root / _SECRETS_REL)
|
||||
result.restored.append("secrets")
|
||||
except OSError as e:
|
||||
result.errors.append(f"Failed to restore config_secrets.json: {e}")
|
||||
elif (tmp_dir / _SECRETS_REL).exists():
|
||||
result.skipped.append("secrets")
|
||||
|
||||
# WiFi.
|
||||
if options.restore_wifi and (tmp_dir / _WIFI_REL).exists():
|
||||
try:
|
||||
_copy_file(tmp_dir / _WIFI_REL, project_root / _WIFI_REL)
|
||||
result.restored.append("wifi")
|
||||
except OSError as e:
|
||||
result.errors.append(f"Failed to restore wifi_config.json: {e}")
|
||||
elif (tmp_dir / _WIFI_REL).exists():
|
||||
result.skipped.append("wifi")
|
||||
|
||||
# User fonts — skip anything that collides with a bundled font.
|
||||
tmp_fonts = tmp_dir / _FONTS_REL
|
||||
if options.restore_fonts and tmp_fonts.exists():
|
||||
restored_count = 0
|
||||
for font in sorted(tmp_fonts.iterdir()):
|
||||
if not font.is_file():
|
||||
continue
|
||||
if font.name in BUNDLED_FONTS:
|
||||
result.skipped.append(f"font:{font.name} (bundled)")
|
||||
continue
|
||||
try:
|
||||
_copy_file(font, project_root / _FONTS_REL / font.name)
|
||||
restored_count += 1
|
||||
except OSError as e:
|
||||
result.errors.append(f"Failed to restore font {font.name}: {e}")
|
||||
if restored_count:
|
||||
result.restored.append(f"fonts ({restored_count})")
|
||||
elif tmp_fonts.exists():
|
||||
result.skipped.append("fonts")
|
||||
|
||||
# Plugin uploads.
|
||||
tmp_uploads = tmp_dir / _PLUGIN_UPLOADS_REL
|
||||
if options.restore_plugin_uploads and tmp_uploads.exists():
|
||||
count = 0
|
||||
for root, _dirs, files in os.walk(tmp_uploads):
|
||||
for name in files:
|
||||
src = Path(root) / name
|
||||
rel = src.relative_to(tmp_dir)
|
||||
try:
|
||||
_copy_file(src, project_root / rel)
|
||||
count += 1
|
||||
except OSError as e:
|
||||
result.errors.append(f"Failed to restore {rel}: {e}")
|
||||
if count:
|
||||
result.restored.append(f"plugin_uploads ({count})")
|
||||
elif tmp_uploads.exists():
|
||||
result.skipped.append("plugin_uploads")
|
||||
|
||||
# Plugins list (for caller to reinstall).
|
||||
if options.reinstall_plugins and (tmp_dir / PLUGINS_MANIFEST_NAME).exists():
|
||||
try:
|
||||
with (tmp_dir / PLUGINS_MANIFEST_NAME).open("r", encoding="utf-8") as f:
|
||||
plugins = json.load(f)
|
||||
if isinstance(plugins, list):
|
||||
result.plugins_to_install = [
|
||||
{"plugin_id": p.get("plugin_id"), "version": p.get("version", "")}
|
||||
for p in plugins
|
||||
if isinstance(p, dict) and p.get("plugin_id")
|
||||
]
|
||||
except (OSError, json.JSONDecodeError) as e:
|
||||
result.errors.append(f"Could not read plugins.json: {e}")
|
||||
|
||||
result.success = not result.errors
|
||||
return result
|
||||
@@ -8,7 +8,7 @@ Detects and fixes inconsistencies between:
|
||||
- State manager state
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, List, Optional, Set
|
||||
from typing import Dict, Any, List, Optional
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
@@ -86,38 +86,16 @@ class StateReconciliation:
|
||||
self.plugins_dir = Path(plugins_dir)
|
||||
self.store_manager = store_manager
|
||||
self.logger = get_logger(__name__)
|
||||
|
||||
# Plugin IDs that failed auto-repair and should NOT be retried this
|
||||
# process lifetime. Prevents the infinite "attempt to reinstall missing
|
||||
# plugin" loop when a config entry references a plugin that isn't in
|
||||
# the registry (e.g. legacy 'github', 'youtube' entries). A process
|
||||
# restart — or an explicit user-initiated reconcile with force=True —
|
||||
# clears this so recovery is possible after the underlying issue is
|
||||
# fixed.
|
||||
self._unrecoverable_missing_on_disk: Set[str] = set()
|
||||
|
||||
def reconcile_state(self, force: bool = False) -> ReconciliationResult:
|
||||
def reconcile_state(self) -> ReconciliationResult:
|
||||
"""
|
||||
Perform state reconciliation.
|
||||
|
||||
|
||||
Compares state from all sources and fixes safe inconsistencies.
|
||||
|
||||
Args:
|
||||
force: If True, clear the unrecoverable-plugin cache before
|
||||
reconciling so previously-failed auto-repairs are retried.
|
||||
Intended for user-initiated reconcile requests after the
|
||||
underlying issue (e.g. registry update) has been fixed.
|
||||
|
||||
|
||||
Returns:
|
||||
ReconciliationResult with findings and fixes
|
||||
"""
|
||||
if force and self._unrecoverable_missing_on_disk:
|
||||
self.logger.info(
|
||||
"Force reconcile requested; clearing %d cached unrecoverable plugin(s)",
|
||||
len(self._unrecoverable_missing_on_disk),
|
||||
)
|
||||
self._unrecoverable_missing_on_disk.clear()
|
||||
|
||||
self.logger.info("Starting state reconciliation")
|
||||
|
||||
inconsistencies = []
|
||||
@@ -302,26 +280,7 @@ class StateReconciliation:
|
||||
|
||||
# Check: Plugin in config but not on disk
|
||||
if config.get('exists_in_config') and not disk.get('exists_on_disk'):
|
||||
# Skip plugins that previously failed auto-repair in this process.
|
||||
# Re-attempting wastes CPU (network + git clone each request) and
|
||||
# spams the logs with the same "Plugin not found in registry"
|
||||
# error. The entry is still surfaced as MANUAL_FIX_REQUIRED so the
|
||||
# UI can show it, but no auto-repair will run.
|
||||
previously_unrecoverable = plugin_id in self._unrecoverable_missing_on_disk
|
||||
# Also refuse to re-install a plugin that the user just uninstalled
|
||||
# through the UI — prevents a race where the reconciler fires
|
||||
# between file removal and config cleanup and resurrects the
|
||||
# plugin the user just deleted.
|
||||
recently_uninstalled = (
|
||||
self.store_manager is not None
|
||||
and hasattr(self.store_manager, 'was_recently_uninstalled')
|
||||
and self.store_manager.was_recently_uninstalled(plugin_id)
|
||||
)
|
||||
can_repair = (
|
||||
self.store_manager is not None
|
||||
and not previously_unrecoverable
|
||||
and not recently_uninstalled
|
||||
)
|
||||
can_repair = self.store_manager is not None
|
||||
inconsistencies.append(Inconsistency(
|
||||
plugin_id=plugin_id,
|
||||
inconsistency_type=InconsistencyType.PLUGIN_MISSING_ON_DISK,
|
||||
@@ -383,13 +342,7 @@ class StateReconciliation:
|
||||
return False
|
||||
|
||||
def _auto_repair_missing_plugin(self, plugin_id: str) -> bool:
|
||||
"""Attempt to reinstall a missing plugin from the store.
|
||||
|
||||
On failure, records plugin_id in ``_unrecoverable_missing_on_disk`` so
|
||||
subsequent reconciliation passes within this process do not retry and
|
||||
spam the log / CPU. A process restart (or an explicit ``force=True``
|
||||
reconcile) is required to clear the cache.
|
||||
"""
|
||||
"""Attempt to reinstall a missing plugin from the store."""
|
||||
if not self.store_manager:
|
||||
return False
|
||||
|
||||
@@ -398,43 +351,6 @@ class StateReconciliation:
|
||||
if plugin_id.startswith('ledmatrix-'):
|
||||
candidates.append(plugin_id[len('ledmatrix-'):])
|
||||
|
||||
# Cheap pre-check: is any candidate actually present in the registry
|
||||
# at all? If not, we know up-front this is unrecoverable and can skip
|
||||
# the expensive install_plugin path (which does a forced GitHub fetch
|
||||
# before failing).
|
||||
#
|
||||
# IMPORTANT: we must pass raise_on_failure=True here. The default
|
||||
# fetch_registry() silently falls back to a stale cache or an empty
|
||||
# dict on network failure, which would make it impossible to tell
|
||||
# "plugin genuinely not in registry" from "I can't reach the
|
||||
# registry right now" — in the second case we'd end up poisoning
|
||||
# _unrecoverable_missing_on_disk with every config entry on a fresh
|
||||
# boot with no cache.
|
||||
registry_has_candidate = False
|
||||
try:
|
||||
registry = self.store_manager.fetch_registry(raise_on_failure=True)
|
||||
registry_ids = {
|
||||
p.get('id') for p in (registry.get('plugins', []) or []) if p.get('id')
|
||||
}
|
||||
registry_has_candidate = any(c in registry_ids for c in candidates)
|
||||
except Exception as e:
|
||||
# If we can't reach the registry, treat this as transient — don't
|
||||
# mark unrecoverable, let the next pass try again.
|
||||
self.logger.warning(
|
||||
"[AutoRepair] Could not read registry to check %s: %s", plugin_id, e
|
||||
)
|
||||
return False
|
||||
|
||||
if not registry_has_candidate:
|
||||
self.logger.warning(
|
||||
"[AutoRepair] %s not present in registry; marking unrecoverable "
|
||||
"(will not retry this session). Reinstall from the Plugin Store "
|
||||
"or remove the stale config entry to clear this warning.",
|
||||
plugin_id,
|
||||
)
|
||||
self._unrecoverable_missing_on_disk.add(plugin_id)
|
||||
return False
|
||||
|
||||
for candidate_id in candidates:
|
||||
try:
|
||||
self.logger.info("[AutoRepair] Attempting to reinstall missing plugin: %s", candidate_id)
|
||||
@@ -450,11 +366,6 @@ class StateReconciliation:
|
||||
except Exception as e:
|
||||
self.logger.error("[AutoRepair] Error reinstalling %s: %s", candidate_id, e, exc_info=True)
|
||||
|
||||
self.logger.warning(
|
||||
"[AutoRepair] Could not reinstall %s from store; marking unrecoverable "
|
||||
"(will not retry this session).",
|
||||
plugin_id,
|
||||
)
|
||||
self._unrecoverable_missing_on_disk.add(plugin_id)
|
||||
self.logger.warning("[AutoRepair] Could not reinstall %s from store", plugin_id)
|
||||
return False
|
||||
|
||||
|
||||
@@ -14,10 +14,9 @@ import zipfile
|
||||
import tempfile
|
||||
import requests
|
||||
import time
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Optional, Any, Tuple
|
||||
from typing import List, Dict, Optional, Any
|
||||
import logging
|
||||
|
||||
from src.common.permission_utils import sudo_remove_directory
|
||||
@@ -53,89 +52,19 @@ class PluginStoreManager:
|
||||
self.registry_cache = None
|
||||
self.registry_cache_time = None # Timestamp of when registry was cached
|
||||
self.github_cache = {} # Cache for GitHub API responses
|
||||
self.cache_timeout = 3600 # 1 hour cache timeout (repo info: stars, default_branch)
|
||||
# 15 minutes for registry cache. Long enough that the plugin list
|
||||
# endpoint on a warm cache never hits the network, short enough that
|
||||
# new plugins show up within a reasonable window. See also the
|
||||
# stale-cache fallback in fetch_registry for transient network
|
||||
# failures.
|
||||
self.registry_cache_timeout = 900
|
||||
self.cache_timeout = 3600 # 1 hour cache timeout
|
||||
self.registry_cache_timeout = 300 # 5 minutes for registry cache
|
||||
self.commit_info_cache = {} # Cache for latest commit info: {key: (timestamp, data)}
|
||||
# 30 minutes for commit/manifest caches. Plugin Store users browse
|
||||
# the catalog via /plugins/store/list which fetches commit info and
|
||||
# manifest data per plugin. 5-min TTLs meant every fresh browse on
|
||||
# a Pi4 paid for ~3 HTTP requests x N plugins (30-60s serial). 30
|
||||
# minutes keeps the cache warm across a realistic session while
|
||||
# still picking up upstream updates within a reasonable window.
|
||||
self.commit_cache_timeout = 1800
|
||||
self.commit_cache_timeout = 300 # 5 minutes (same as registry)
|
||||
self.manifest_cache = {} # Cache for GitHub manifest fetches: {key: (timestamp, data)}
|
||||
self.manifest_cache_timeout = 1800
|
||||
self.manifest_cache_timeout = 300 # 5 minutes
|
||||
self.github_token = self._load_github_token()
|
||||
self._token_validation_cache = {} # Cache for token validation results: {token: (is_valid, timestamp, error_message)}
|
||||
self._token_validation_cache_timeout = 300 # 5 minutes cache for token validation
|
||||
|
||||
# Per-plugin tombstone timestamps for plugins that were uninstalled
|
||||
# recently via the UI. Used by the state reconciler to avoid
|
||||
# resurrecting a plugin the user just deleted when reconciliation
|
||||
# races against the uninstall operation. Cleared after ``_uninstall_tombstone_ttl``.
|
||||
self._uninstall_tombstones: Dict[str, float] = {}
|
||||
self._uninstall_tombstone_ttl = 300 # 5 minutes
|
||||
|
||||
# Cache for _get_local_git_info: {plugin_path_str: (signature, data)}
|
||||
# where ``signature`` is a tuple of (head_mtime, resolved_ref_mtime,
|
||||
# head_contents) so a fast-forward update to the current branch
|
||||
# (which touches .git/refs/heads/<branch> but NOT .git/HEAD) still
|
||||
# invalidates the cache. Before this cache, every
|
||||
# /plugins/installed request fired 4 git subprocesses per plugin,
|
||||
# which pegged the CPU on a Pi4 with a dozen plugins. The cached
|
||||
# ``data`` dict is the same shape returned by ``_get_local_git_info``
|
||||
# itself (sha / short_sha / branch / optional remote_url, date_iso,
|
||||
# date) — all string-keyed strings.
|
||||
self._git_info_cache: Dict[str, Tuple[Tuple, Dict[str, str]]] = {}
|
||||
|
||||
# How long to wait before re-attempting a failed GitHub metadata
|
||||
# fetch after we've already served a stale cache hit. Without this,
|
||||
# a single expired-TTL + network-error would cause every subsequent
|
||||
# request to re-hit the network (and fail again) until the network
|
||||
# actually came back — amplifying the failure and blocking request
|
||||
# handlers. Bumping the cached-entry timestamp on failure serves
|
||||
# the stale payload cheaply until the backoff expires.
|
||||
self._failure_backoff_seconds = 60
|
||||
|
||||
# Ensure plugins directory exists
|
||||
self.plugins_dir.mkdir(exist_ok=True)
|
||||
|
||||
def _record_cache_backoff(self, cache_dict: Dict, cache_key: str,
|
||||
cache_timeout: int, payload: Any) -> None:
|
||||
"""Bump a cache entry's timestamp so subsequent lookups hit the
|
||||
cache rather than re-failing over the network.
|
||||
|
||||
Used by the stale-on-error fallbacks in the GitHub metadata fetch
|
||||
paths. Without this, a cache entry whose TTL just expired would
|
||||
cause every subsequent request to re-hit the network and fail
|
||||
again until the network actually came back. We write a synthetic
|
||||
timestamp ``(now + backoff - cache_timeout)`` so the cache-valid
|
||||
check ``(now - ts) < cache_timeout`` succeeds for another
|
||||
``backoff`` seconds.
|
||||
"""
|
||||
synthetic_ts = time.time() + self._failure_backoff_seconds - cache_timeout
|
||||
cache_dict[cache_key] = (synthetic_ts, payload)
|
||||
|
||||
def mark_recently_uninstalled(self, plugin_id: str) -> None:
|
||||
"""Record that ``plugin_id`` was just uninstalled by the user."""
|
||||
self._uninstall_tombstones[plugin_id] = time.time()
|
||||
|
||||
def was_recently_uninstalled(self, plugin_id: str) -> bool:
|
||||
"""Return True if ``plugin_id`` has an active uninstall tombstone."""
|
||||
ts = self._uninstall_tombstones.get(plugin_id)
|
||||
if ts is None:
|
||||
return False
|
||||
if time.time() - ts > self._uninstall_tombstone_ttl:
|
||||
# Expired — clean up so the dict doesn't grow unbounded.
|
||||
self._uninstall_tombstones.pop(plugin_id, None)
|
||||
return False
|
||||
return True
|
||||
|
||||
def _load_github_token(self) -> Optional[str]:
|
||||
"""
|
||||
Load GitHub API token from config_secrets.json if available.
|
||||
@@ -379,25 +308,7 @@ class PluginStoreManager:
|
||||
if self.github_token:
|
||||
headers['Authorization'] = f'token {self.github_token}'
|
||||
|
||||
try:
|
||||
response = requests.get(api_url, headers=headers, timeout=10)
|
||||
except requests.RequestException as req_err:
|
||||
# Network error: prefer a stale cache hit over an
|
||||
# empty default so the UI keeps working on a flaky
|
||||
# Pi WiFi link. Bump the cached entry's timestamp
|
||||
# into a short backoff window so subsequent
|
||||
# requests serve the stale payload cheaply instead
|
||||
# of re-hitting the network on every request.
|
||||
if cache_key in self.github_cache:
|
||||
_, stale = self.github_cache[cache_key]
|
||||
self._record_cache_backoff(self.github_cache, cache_key, self.cache_timeout, stale)
|
||||
self.logger.warning(
|
||||
"GitHub repo info fetch failed for %s (%s); serving stale cache.",
|
||||
cache_key, req_err,
|
||||
)
|
||||
return stale
|
||||
raise
|
||||
|
||||
response = requests.get(api_url, headers=headers, timeout=10)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
pushed_at = data.get('pushed_at', '') or data.get('updated_at', '')
|
||||
@@ -417,20 +328,7 @@ class PluginStoreManager:
|
||||
self.github_cache[cache_key] = (time.time(), repo_info)
|
||||
return repo_info
|
||||
elif response.status_code == 403:
|
||||
# Rate limit or authentication issue. If we have a
|
||||
# previously-cached value, serve it rather than
|
||||
# returning empty defaults — a stale star count is
|
||||
# better than a reset to zero. Apply the same
|
||||
# failure-backoff bump as the network-error path
|
||||
# so we don't hammer the API with repeat requests
|
||||
# while rate-limited.
|
||||
if cache_key in self.github_cache:
|
||||
_, stale = self.github_cache[cache_key]
|
||||
self._record_cache_backoff(self.github_cache, cache_key, self.cache_timeout, stale)
|
||||
self.logger.warning(
|
||||
"GitHub API 403 for %s; serving stale cache.", cache_key,
|
||||
)
|
||||
return stale
|
||||
# Rate limit or authentication issue
|
||||
if not self.github_token:
|
||||
self.logger.warning(
|
||||
f"GitHub API rate limit likely exceeded (403). "
|
||||
@@ -444,10 +342,6 @@ class PluginStoreManager:
|
||||
)
|
||||
else:
|
||||
self.logger.warning(f"GitHub API request failed: {response.status_code} for {api_url}")
|
||||
if cache_key in self.github_cache:
|
||||
_, stale = self.github_cache[cache_key]
|
||||
self._record_cache_backoff(self.github_cache, cache_key, self.cache_timeout, stale)
|
||||
return stale
|
||||
|
||||
return {
|
||||
'stars': 0,
|
||||
@@ -548,34 +442,23 @@ class PluginStoreManager:
|
||||
self.logger.error(f"Error fetching registry from URL: {e}", exc_info=True)
|
||||
return None
|
||||
|
||||
def fetch_registry(self, force_refresh: bool = False, raise_on_failure: bool = False) -> Dict:
|
||||
def fetch_registry(self, force_refresh: bool = False) -> Dict:
|
||||
"""
|
||||
Fetch the plugin registry from GitHub.
|
||||
|
||||
|
||||
Args:
|
||||
force_refresh: Force refresh even if cached
|
||||
raise_on_failure: If True, re-raise network / JSON errors instead
|
||||
of silently falling back to stale cache / empty dict. UI
|
||||
callers prefer the stale-fallback default so the plugin
|
||||
list keeps working on flaky WiFi; the state reconciler
|
||||
needs the explicit failure signal so it can distinguish
|
||||
"plugin genuinely not in registry" from "I couldn't reach
|
||||
the registry at all" and not mark everything unrecoverable.
|
||||
|
||||
|
||||
Returns:
|
||||
Registry data with list of available plugins
|
||||
|
||||
Raises:
|
||||
requests.RequestException / json.JSONDecodeError when
|
||||
``raise_on_failure`` is True and the fetch fails.
|
||||
"""
|
||||
# Check if cache is still valid (within timeout)
|
||||
current_time = time.time()
|
||||
if (self.registry_cache and self.registry_cache_time and
|
||||
not force_refresh and
|
||||
if (self.registry_cache and self.registry_cache_time and
|
||||
not force_refresh and
|
||||
(current_time - self.registry_cache_time) < self.registry_cache_timeout):
|
||||
return self.registry_cache
|
||||
|
||||
|
||||
try:
|
||||
self.logger.info(f"Fetching plugin registry from {self.REGISTRY_URL}")
|
||||
response = self._http_get_with_retries(self.REGISTRY_URL, timeout=10)
|
||||
@@ -586,30 +469,9 @@ class PluginStoreManager:
|
||||
return self.registry_cache
|
||||
except requests.RequestException as e:
|
||||
self.logger.error(f"Error fetching registry: {e}")
|
||||
if raise_on_failure:
|
||||
raise
|
||||
# Prefer stale cache over an empty list so the plugin list UI
|
||||
# keeps working on a flaky connection (e.g. Pi on WiFi). Bump
|
||||
# registry_cache_time into a short backoff window so the next
|
||||
# request serves the stale payload cheaply instead of
|
||||
# re-hitting the network on every request (matches the
|
||||
# pattern used by github_cache / commit_info_cache).
|
||||
if self.registry_cache:
|
||||
self.logger.warning("Falling back to stale registry cache")
|
||||
self.registry_cache_time = (
|
||||
time.time() + self._failure_backoff_seconds - self.registry_cache_timeout
|
||||
)
|
||||
return self.registry_cache
|
||||
return {"plugins": []}
|
||||
except json.JSONDecodeError as e:
|
||||
self.logger.error(f"Error parsing registry JSON: {e}")
|
||||
if raise_on_failure:
|
||||
raise
|
||||
if self.registry_cache:
|
||||
self.registry_cache_time = (
|
||||
time.time() + self._failure_backoff_seconds - self.registry_cache_timeout
|
||||
)
|
||||
return self.registry_cache
|
||||
return {"plugins": []}
|
||||
|
||||
def search_plugins(self, query: str = "", category: str = "", tags: List[str] = None, fetch_commit_info: bool = True, include_saved_repos: bool = True, saved_repositories_manager = None) -> List[Dict]:
|
||||
@@ -655,95 +517,68 @@ class PluginStoreManager:
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Failed to fetch plugins from saved repository {repo_url}: {e}")
|
||||
|
||||
# First pass: apply cheap filters (category/tags/query) so we only
|
||||
# fetch GitHub metadata for plugins that will actually be returned.
|
||||
filtered: List[Dict] = []
|
||||
results = []
|
||||
for plugin in plugins:
|
||||
# Category filter
|
||||
if category and plugin.get('category') != category:
|
||||
continue
|
||||
|
||||
# Tags filter (match any tag)
|
||||
if tags and not any(tag in plugin.get('tags', []) for tag in tags):
|
||||
continue
|
||||
|
||||
# Query search (case-insensitive)
|
||||
if query:
|
||||
query_lower = query.lower()
|
||||
searchable_text = ' '.join([
|
||||
plugin.get('name', ''),
|
||||
plugin.get('description', ''),
|
||||
plugin.get('id', ''),
|
||||
plugin.get('author', ''),
|
||||
plugin.get('author', '')
|
||||
]).lower()
|
||||
|
||||
if query_lower not in searchable_text:
|
||||
continue
|
||||
filtered.append(plugin)
|
||||
|
||||
def _enrich(plugin: Dict) -> Dict:
|
||||
"""Enrich a single plugin with GitHub metadata.
|
||||
|
||||
Called concurrently from a ThreadPoolExecutor. Each underlying
|
||||
HTTP helper (``_get_github_repo_info`` / ``_get_latest_commit_info``
|
||||
/ ``_fetch_manifest_from_github``) is thread-safe — they use
|
||||
``requests`` and write their own cache keys on Python dicts,
|
||||
which is atomic under the GIL for single-key assignments.
|
||||
"""
|
||||
# Enhance plugin data with GitHub metadata
|
||||
enhanced_plugin = plugin.copy()
|
||||
|
||||
# Get real GitHub stars
|
||||
repo_url = plugin.get('repo', '')
|
||||
if not repo_url:
|
||||
return enhanced_plugin
|
||||
if repo_url:
|
||||
github_info = self._get_github_repo_info(repo_url)
|
||||
enhanced_plugin['stars'] = github_info.get('stars', plugin.get('stars', 0))
|
||||
enhanced_plugin['default_branch'] = github_info.get('default_branch', plugin.get('branch', 'main'))
|
||||
enhanced_plugin['last_updated_iso'] = github_info.get('last_commit_iso')
|
||||
enhanced_plugin['last_updated'] = github_info.get('last_commit_date')
|
||||
|
||||
github_info = self._get_github_repo_info(repo_url)
|
||||
enhanced_plugin['stars'] = github_info.get('stars', plugin.get('stars', 0))
|
||||
enhanced_plugin['default_branch'] = github_info.get('default_branch', plugin.get('branch', 'main'))
|
||||
enhanced_plugin['last_updated_iso'] = github_info.get('last_commit_iso')
|
||||
enhanced_plugin['last_updated'] = github_info.get('last_commit_date')
|
||||
if fetch_commit_info:
|
||||
branch = plugin.get('branch') or github_info.get('default_branch', 'main')
|
||||
|
||||
if fetch_commit_info:
|
||||
branch = plugin.get('branch') or github_info.get('default_branch', 'main')
|
||||
commit_info = self._get_latest_commit_info(repo_url, branch)
|
||||
if commit_info:
|
||||
enhanced_plugin['last_commit'] = commit_info.get('short_sha')
|
||||
enhanced_plugin['last_commit_sha'] = commit_info.get('sha')
|
||||
enhanced_plugin['last_updated'] = commit_info.get('date') or enhanced_plugin.get('last_updated')
|
||||
enhanced_plugin['last_updated_iso'] = commit_info.get('date_iso') or enhanced_plugin.get('last_updated_iso')
|
||||
enhanced_plugin['last_commit_message'] = commit_info.get('message')
|
||||
enhanced_plugin['last_commit_author'] = commit_info.get('author')
|
||||
enhanced_plugin['branch'] = commit_info.get('branch', branch)
|
||||
enhanced_plugin['last_commit_branch'] = commit_info.get('branch')
|
||||
|
||||
commit_info = self._get_latest_commit_info(repo_url, branch)
|
||||
if commit_info:
|
||||
enhanced_plugin['last_commit'] = commit_info.get('short_sha')
|
||||
enhanced_plugin['last_commit_sha'] = commit_info.get('sha')
|
||||
enhanced_plugin['last_updated'] = commit_info.get('date') or enhanced_plugin.get('last_updated')
|
||||
enhanced_plugin['last_updated_iso'] = commit_info.get('date_iso') or enhanced_plugin.get('last_updated_iso')
|
||||
enhanced_plugin['last_commit_message'] = commit_info.get('message')
|
||||
enhanced_plugin['last_commit_author'] = commit_info.get('author')
|
||||
enhanced_plugin['branch'] = commit_info.get('branch', branch)
|
||||
enhanced_plugin['last_commit_branch'] = commit_info.get('branch')
|
||||
# Fetch manifest from GitHub for additional metadata (description, etc.)
|
||||
plugin_subpath = plugin.get('plugin_path', '')
|
||||
manifest_rel = f"{plugin_subpath}/manifest.json" if plugin_subpath else "manifest.json"
|
||||
github_manifest = self._fetch_manifest_from_github(repo_url, branch, manifest_rel)
|
||||
if github_manifest:
|
||||
if 'last_updated' in github_manifest and not enhanced_plugin.get('last_updated'):
|
||||
enhanced_plugin['last_updated'] = github_manifest['last_updated']
|
||||
if 'description' in github_manifest:
|
||||
enhanced_plugin['description'] = github_manifest['description']
|
||||
|
||||
# Intentionally NO per-plugin manifest.json fetch here.
|
||||
# The registry's plugins.json already carries ``description``
|
||||
# (it is generated from each plugin's manifest by
|
||||
# ``update_registry.py``), and ``last_updated`` is filled in
|
||||
# from the commit info above. An earlier implementation
|
||||
# fetched manifest.json per plugin anyway, which meant one
|
||||
# extra HTTPS round trip per result; on a Pi4 with a flaky
|
||||
# WiFi link the tail retries of that one extra call
|
||||
# (_http_get_with_retries does 3 attempts with exponential
|
||||
# backoff) dominated wall time even after parallelization.
|
||||
results.append(enhanced_plugin)
|
||||
|
||||
return enhanced_plugin
|
||||
|
||||
# Fan out the per-plugin GitHub enrichment. The previous
|
||||
# implementation did this serially, which on a Pi4 with ~15 plugins
|
||||
# and a fresh cache meant 30+ HTTP requests in strict sequence (the
|
||||
# "connecting to display" hang reported by users). With a thread
|
||||
# pool, latency is dominated by the slowest request rather than
|
||||
# their sum. Workers capped at 10 to stay well under the
|
||||
# unauthenticated GitHub rate limit burst and avoid overwhelming a
|
||||
# Pi's WiFi link. For a small number of plugins the pool is
|
||||
# essentially free.
|
||||
if not filtered:
|
||||
return []
|
||||
|
||||
# Not worth the pool overhead for tiny workloads. Parenthesized to
|
||||
# make Python's default ``and`` > ``or`` precedence explicit: a
|
||||
# single plugin, OR a small batch where we don't need commit info.
|
||||
if (len(filtered) == 1) or ((not fetch_commit_info) and (len(filtered) < 4)):
|
||||
return [_enrich(p) for p in filtered]
|
||||
|
||||
max_workers = min(10, len(filtered))
|
||||
with ThreadPoolExecutor(max_workers=max_workers, thread_name_prefix='plugin-search') as executor:
|
||||
# executor.map preserves input order, which the UI relies on.
|
||||
return list(executor.map(_enrich, filtered))
|
||||
return results
|
||||
|
||||
def _fetch_manifest_from_github(self, repo_url: str, branch: str = "master", manifest_path: str = "manifest.json", force_refresh: bool = False) -> Optional[Dict]:
|
||||
"""
|
||||
@@ -841,28 +676,7 @@ class PluginStoreManager:
|
||||
last_error = None
|
||||
for branch_name in branches_to_try:
|
||||
api_url = f"https://api.github.com/repos/{owner}/{repo}/commits/{branch_name}"
|
||||
try:
|
||||
response = requests.get(api_url, headers=headers, timeout=10)
|
||||
except requests.RequestException as req_err:
|
||||
# Network failure: fall back to a stale cache hit if
|
||||
# available so the plugin store UI keeps populating
|
||||
# commit info on a flaky WiFi link. Bump the cached
|
||||
# timestamp into the backoff window so we don't
|
||||
# re-retry on every request.
|
||||
if cache_key in self.commit_info_cache:
|
||||
_, stale = self.commit_info_cache[cache_key]
|
||||
if stale is not None:
|
||||
self._record_cache_backoff(
|
||||
self.commit_info_cache, cache_key,
|
||||
self.commit_cache_timeout, stale,
|
||||
)
|
||||
self.logger.warning(
|
||||
"GitHub commit fetch failed for %s (%s); serving stale cache.",
|
||||
cache_key, req_err,
|
||||
)
|
||||
return stale
|
||||
last_error = str(req_err)
|
||||
continue
|
||||
response = requests.get(api_url, headers=headers, timeout=10)
|
||||
if response.status_code == 200:
|
||||
commit_data = response.json()
|
||||
commit_sha_full = commit_data.get('sha', '')
|
||||
@@ -892,23 +706,7 @@ class PluginStoreManager:
|
||||
if last_error:
|
||||
self.logger.debug(f"Unable to fetch commit info for {repo_url}: {last_error}")
|
||||
|
||||
# All branches returned a non-200 response (e.g. 404 on every
|
||||
# candidate, or a transient 5xx). If we already had a good
|
||||
# cached value, prefer serving that — overwriting it with
|
||||
# None here would wipe out commit info the UI just showed
|
||||
# on the previous request. Bump the timestamp into the
|
||||
# backoff window so subsequent lookups hit the cache.
|
||||
if cache_key in self.commit_info_cache:
|
||||
_, prior = self.commit_info_cache[cache_key]
|
||||
if prior is not None:
|
||||
self._record_cache_backoff(
|
||||
self.commit_info_cache, cache_key,
|
||||
self.commit_cache_timeout, prior,
|
||||
)
|
||||
return prior
|
||||
|
||||
# No prior good value — cache the negative result so we don't
|
||||
# hammer a plugin that genuinely has no reachable commits.
|
||||
# Cache negative result to avoid repeated failing calls
|
||||
self.commit_info_cache[cache_key] = (time.time(), None)
|
||||
|
||||
except Exception as e:
|
||||
@@ -1762,93 +1560,12 @@ class PluginStoreManager:
|
||||
self.logger.error(f"Unexpected error installing dependencies for {plugin_path.name}: {e}", exc_info=True)
|
||||
return False
|
||||
|
||||
def _git_cache_signature(self, git_dir: Path) -> Optional[Tuple]:
|
||||
"""Build a cache signature that invalidates on the kind of updates
|
||||
a plugin user actually cares about.
|
||||
|
||||
Caching on ``.git/HEAD`` mtime alone is not enough: a ``git pull``
|
||||
that fast-forwards the current branch updates
|
||||
``.git/refs/heads/<branch>`` (or ``.git/packed-refs``) but leaves
|
||||
HEAD's contents and mtime untouched. And the cached ``result``
|
||||
dict includes ``remote_url`` — a value read from ``.git/config`` —
|
||||
so a config-only change (e.g. a monorepo-migration re-pointing
|
||||
``remote.origin.url``) must also invalidate the cache.
|
||||
|
||||
Signature components:
|
||||
- HEAD contents (catches detach / branch switch)
|
||||
- HEAD mtime
|
||||
- if HEAD points at a ref, that ref file's mtime (catches
|
||||
fast-forward / reset on the current branch)
|
||||
- packed-refs mtime as a coarse fallback for repos using packed refs
|
||||
- .git/config contents + mtime (catches remote URL changes and
|
||||
any other config-only edit that affects what the cached
|
||||
``remote_url`` field should contain)
|
||||
|
||||
Returns ``None`` if HEAD cannot be read at all (caller will skip
|
||||
the cache and take the slow path).
|
||||
"""
|
||||
head_file = git_dir / 'HEAD'
|
||||
try:
|
||||
head_mtime = head_file.stat().st_mtime
|
||||
head_contents = head_file.read_text(encoding='utf-8', errors='replace').strip()
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
ref_mtime = None
|
||||
if head_contents.startswith('ref: '):
|
||||
ref_path = head_contents[len('ref: '):].strip()
|
||||
# ``ref_path`` looks like ``refs/heads/main``. It lives either
|
||||
# as a loose file under .git/ or inside .git/packed-refs.
|
||||
loose_ref = git_dir / ref_path
|
||||
try:
|
||||
ref_mtime = loose_ref.stat().st_mtime
|
||||
except OSError:
|
||||
ref_mtime = None
|
||||
|
||||
packed_refs_mtime = None
|
||||
if ref_mtime is None:
|
||||
try:
|
||||
packed_refs_mtime = (git_dir / 'packed-refs').stat().st_mtime
|
||||
except OSError:
|
||||
packed_refs_mtime = None
|
||||
|
||||
config_mtime = None
|
||||
config_contents = None
|
||||
config_file = git_dir / 'config'
|
||||
try:
|
||||
config_mtime = config_file.stat().st_mtime
|
||||
config_contents = config_file.read_text(encoding='utf-8', errors='replace').strip()
|
||||
except OSError:
|
||||
config_mtime = None
|
||||
config_contents = None
|
||||
|
||||
return (
|
||||
head_contents, head_mtime,
|
||||
ref_mtime, packed_refs_mtime,
|
||||
config_contents, config_mtime,
|
||||
)
|
||||
|
||||
def _get_local_git_info(self, plugin_path: Path) -> Optional[Dict[str, str]]:
|
||||
"""Return local git branch, commit hash, and commit date if the plugin is a git checkout.
|
||||
|
||||
Results are cached keyed on a signature that includes HEAD
|
||||
contents plus the mtime of HEAD AND the resolved ref (or
|
||||
packed-refs). Repeated calls skip the four ``git`` subprocesses
|
||||
when nothing has changed, and a ``git pull`` that fast-forwards
|
||||
the branch correctly invalidates the cache.
|
||||
"""
|
||||
"""Return local git branch, commit hash, and commit date if the plugin is a git checkout."""
|
||||
git_dir = plugin_path / '.git'
|
||||
if not git_dir.exists():
|
||||
return None
|
||||
|
||||
cache_key = str(plugin_path)
|
||||
signature = self._git_cache_signature(git_dir)
|
||||
|
||||
if signature is not None:
|
||||
cached = self._git_info_cache.get(cache_key)
|
||||
if cached is not None and cached[0] == signature:
|
||||
return cached[1]
|
||||
|
||||
try:
|
||||
sha_result = subprocess.run(
|
||||
['git', '-C', str(plugin_path), 'rev-parse', 'HEAD'],
|
||||
@@ -1906,8 +1623,6 @@ class PluginStoreManager:
|
||||
result['date_iso'] = commit_date_iso
|
||||
result['date'] = self._iso_to_date(commit_date_iso)
|
||||
|
||||
if signature is not None:
|
||||
self._git_info_cache[cache_key] = (signature, result)
|
||||
return result
|
||||
except subprocess.CalledProcessError as err:
|
||||
self.logger.debug(f"Failed to read git info for {plugin_path.name}: {err}")
|
||||
|
||||
283
test/test_backup_manager.py
Normal file
283
test/test_backup_manager.py
Normal file
@@ -0,0 +1,283 @@
|
||||
"""Tests for src.backup_manager."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import json
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from src import backup_manager
|
||||
from src.backup_manager import (
|
||||
BUNDLED_FONTS,
|
||||
RestoreOptions,
|
||||
create_backup,
|
||||
list_installed_plugins,
|
||||
preview_backup_contents,
|
||||
restore_backup,
|
||||
validate_backup,
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Fixtures
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _make_project(root: Path) -> Path:
|
||||
"""Build a minimal fake project tree under ``root``."""
|
||||
(root / "config").mkdir(parents=True)
|
||||
(root / "config" / "config.json").write_text(
|
||||
json.dumps({"web_ui": {"port": 8080}, "my-plugin": {"enabled": True, "favorites": ["A", "B"]}}),
|
||||
encoding="utf-8",
|
||||
)
|
||||
(root / "config" / "config_secrets.json").write_text(
|
||||
json.dumps({"ledmatrix-weather": {"api_key": "SECRET"}}),
|
||||
encoding="utf-8",
|
||||
)
|
||||
(root / "config" / "wifi_config.json").write_text(
|
||||
json.dumps({"ap_mode": {"ssid": "LEDMatrix"}}),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
fonts = root / "assets" / "fonts"
|
||||
fonts.mkdir(parents=True)
|
||||
# One bundled font (should be excluded) and one user-uploaded font.
|
||||
(fonts / "5x7.bdf").write_text("BUNDLED", encoding="utf-8")
|
||||
(fonts / "my-custom-font.ttf").write_bytes(b"\x00\x01USER")
|
||||
|
||||
uploads = root / "assets" / "plugins" / "static-image" / "uploads"
|
||||
uploads.mkdir(parents=True)
|
||||
(uploads / "image_1.png").write_bytes(b"\x89PNG\r\n\x1a\nfake")
|
||||
(uploads / ".metadata.json").write_text(json.dumps({"a": 1}), encoding="utf-8")
|
||||
|
||||
# plugin-repos for installed-plugin enumeration.
|
||||
plugin_dir = root / "plugin-repos" / "my-plugin"
|
||||
plugin_dir.mkdir(parents=True)
|
||||
(plugin_dir / "manifest.json").write_text(
|
||||
json.dumps({"id": "my-plugin", "version": "1.2.3"}),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
# plugin_state.json
|
||||
(root / "data").mkdir()
|
||||
(root / "data" / "plugin_state.json").write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"plugins": {
|
||||
"my-plugin": {"version": "1.2.3", "enabled": True},
|
||||
"other-plugin": {"version": "0.1.0", "enabled": False},
|
||||
}
|
||||
}
|
||||
),
|
||||
encoding="utf-8",
|
||||
)
|
||||
return root
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def project(tmp_path: Path) -> Path:
|
||||
return _make_project(tmp_path / "src_project")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def empty_project(tmp_path: Path) -> Path:
|
||||
root = tmp_path / "dst_project"
|
||||
root.mkdir()
|
||||
# Pre-seed only the bundled font to simulate a fresh install.
|
||||
(root / "assets" / "fonts").mkdir(parents=True)
|
||||
(root / "assets" / "fonts" / "5x7.bdf").write_text("BUNDLED", encoding="utf-8")
|
||||
return root
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# BUNDLED_FONTS sanity
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def test_bundled_fonts_matches_repo() -> None:
|
||||
"""Every entry in BUNDLED_FONTS must exist on disk in assets/fonts/.
|
||||
|
||||
The reverse direction is intentionally not checked: real installations
|
||||
have user-uploaded fonts in the same directory, and they should be
|
||||
treated as user data (not bundled).
|
||||
"""
|
||||
repo_fonts = Path(__file__).resolve().parent.parent / "assets" / "fonts"
|
||||
if not repo_fonts.exists():
|
||||
pytest.skip("assets/fonts not present in test env")
|
||||
on_disk = {p.name for p in repo_fonts.iterdir() if p.is_file()}
|
||||
missing = set(BUNDLED_FONTS) - on_disk
|
||||
assert not missing, f"BUNDLED_FONTS references files not in assets/fonts/: {missing}"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Preview / enumeration
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def test_list_installed_plugins(project: Path) -> None:
|
||||
plugins = list_installed_plugins(project)
|
||||
ids = [p["plugin_id"] for p in plugins]
|
||||
assert "my-plugin" in ids
|
||||
assert "other-plugin" in ids
|
||||
my = next(p for p in plugins if p["plugin_id"] == "my-plugin")
|
||||
assert my["version"] == "1.2.3"
|
||||
|
||||
|
||||
def test_preview_backup_contents(project: Path) -> None:
|
||||
preview = preview_backup_contents(project)
|
||||
assert preview["has_config"] is True
|
||||
assert preview["has_secrets"] is True
|
||||
assert preview["has_wifi"] is True
|
||||
assert preview["user_fonts"] == ["my-custom-font.ttf"]
|
||||
assert preview["plugin_uploads"] >= 2
|
||||
assert any(p["plugin_id"] == "my-plugin" for p in preview["plugins"])
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Export
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def test_create_backup_contents(project: Path, tmp_path: Path) -> None:
|
||||
out_dir = tmp_path / "exports"
|
||||
zip_path = create_backup(project, output_dir=out_dir)
|
||||
assert zip_path.exists()
|
||||
assert zip_path.parent == out_dir
|
||||
with zipfile.ZipFile(zip_path) as zf:
|
||||
names = set(zf.namelist())
|
||||
assert "manifest.json" in names
|
||||
assert "config/config.json" in names
|
||||
assert "config/config_secrets.json" in names
|
||||
assert "config/wifi_config.json" in names
|
||||
assert "assets/fonts/my-custom-font.ttf" in names
|
||||
# Bundled font must NOT be included.
|
||||
assert "assets/fonts/5x7.bdf" not in names
|
||||
assert "assets/plugins/static-image/uploads/image_1.png" in names
|
||||
assert "plugins.json" in names
|
||||
|
||||
|
||||
def test_create_backup_manifest(project: Path, tmp_path: Path) -> None:
|
||||
zip_path = create_backup(project, output_dir=tmp_path / "exports")
|
||||
with zipfile.ZipFile(zip_path) as zf:
|
||||
manifest = json.loads(zf.read("manifest.json"))
|
||||
assert manifest["schema_version"] == backup_manager.SCHEMA_VERSION
|
||||
assert "created_at" in manifest
|
||||
assert set(manifest["contents"]) >= {"config", "secrets", "wifi", "fonts", "plugin_uploads", "plugins"}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Validate
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def test_validate_backup_ok(project: Path, tmp_path: Path) -> None:
|
||||
zip_path = create_backup(project, output_dir=tmp_path / "exports")
|
||||
ok, err, manifest = validate_backup(zip_path)
|
||||
assert ok, err
|
||||
assert err == ""
|
||||
assert "config" in manifest["detected_contents"]
|
||||
assert "secrets" in manifest["detected_contents"]
|
||||
assert any(p["plugin_id"] == "my-plugin" for p in manifest["plugins"])
|
||||
|
||||
|
||||
def test_validate_backup_missing_manifest(tmp_path: Path) -> None:
|
||||
zip_path = tmp_path / "bad.zip"
|
||||
with zipfile.ZipFile(zip_path, "w") as zf:
|
||||
zf.writestr("config/config.json", "{}")
|
||||
ok, err, _ = validate_backup(zip_path)
|
||||
assert not ok
|
||||
assert "manifest" in err.lower()
|
||||
|
||||
|
||||
def test_validate_backup_bad_schema_version(tmp_path: Path) -> None:
|
||||
zip_path = tmp_path / "bad.zip"
|
||||
with zipfile.ZipFile(zip_path, "w") as zf:
|
||||
zf.writestr("manifest.json", json.dumps({"schema_version": 999}))
|
||||
ok, err, _ = validate_backup(zip_path)
|
||||
assert not ok
|
||||
assert "schema" in err.lower()
|
||||
|
||||
|
||||
def test_validate_backup_rejects_zip_traversal(tmp_path: Path) -> None:
|
||||
zip_path = tmp_path / "malicious.zip"
|
||||
with zipfile.ZipFile(zip_path, "w") as zf:
|
||||
zf.writestr("manifest.json", json.dumps({"schema_version": 1, "contents": []}))
|
||||
zf.writestr("../../etc/passwd", "x")
|
||||
ok, err, _ = validate_backup(zip_path)
|
||||
assert not ok
|
||||
assert "unsafe" in err.lower()
|
||||
|
||||
|
||||
def test_validate_backup_not_a_zip(tmp_path: Path) -> None:
|
||||
p = tmp_path / "nope.zip"
|
||||
p.write_text("hello", encoding="utf-8")
|
||||
ok, err, _ = validate_backup(p)
|
||||
assert not ok
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Restore
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def test_restore_roundtrip(project: Path, empty_project: Path, tmp_path: Path) -> None:
|
||||
zip_path = create_backup(project, output_dir=tmp_path / "exports")
|
||||
result = restore_backup(zip_path, empty_project, RestoreOptions())
|
||||
|
||||
assert result.success, result.errors
|
||||
assert "config" in result.restored
|
||||
assert "secrets" in result.restored
|
||||
assert "wifi" in result.restored
|
||||
|
||||
# Files exist with correct contents.
|
||||
restored_config = json.loads((empty_project / "config" / "config.json").read_text())
|
||||
assert restored_config["my-plugin"]["favorites"] == ["A", "B"]
|
||||
|
||||
restored_secrets = json.loads((empty_project / "config" / "config_secrets.json").read_text())
|
||||
assert restored_secrets["ledmatrix-weather"]["api_key"] == "SECRET"
|
||||
|
||||
# User font restored, bundled font untouched.
|
||||
assert (empty_project / "assets" / "fonts" / "my-custom-font.ttf").read_bytes() == b"\x00\x01USER"
|
||||
assert (empty_project / "assets" / "fonts" / "5x7.bdf").read_text() == "BUNDLED"
|
||||
|
||||
# Plugin uploads restored.
|
||||
assert (empty_project / "assets" / "plugins" / "static-image" / "uploads" / "image_1.png").exists()
|
||||
|
||||
# Plugins to install surfaced for the caller.
|
||||
plugin_ids = {p["plugin_id"] for p in result.plugins_to_install}
|
||||
assert "my-plugin" in plugin_ids
|
||||
|
||||
|
||||
def test_restore_honors_options(project: Path, empty_project: Path, tmp_path: Path) -> None:
|
||||
zip_path = create_backup(project, output_dir=tmp_path / "exports")
|
||||
opts = RestoreOptions(
|
||||
restore_config=True,
|
||||
restore_secrets=False,
|
||||
restore_wifi=False,
|
||||
restore_fonts=False,
|
||||
restore_plugin_uploads=False,
|
||||
reinstall_plugins=False,
|
||||
)
|
||||
result = restore_backup(zip_path, empty_project, opts)
|
||||
assert result.success, result.errors
|
||||
assert (empty_project / "config" / "config.json").exists()
|
||||
assert not (empty_project / "config" / "config_secrets.json").exists()
|
||||
assert not (empty_project / "config" / "wifi_config.json").exists()
|
||||
assert not (empty_project / "assets" / "fonts" / "my-custom-font.ttf").exists()
|
||||
assert result.plugins_to_install == []
|
||||
assert "secrets" in result.skipped
|
||||
assert "wifi" in result.skipped
|
||||
|
||||
|
||||
def test_restore_rejects_malicious_zip(empty_project: Path, tmp_path: Path) -> None:
|
||||
zip_path = tmp_path / "bad.zip"
|
||||
with zipfile.ZipFile(zip_path, "w") as zf:
|
||||
zf.writestr("manifest.json", json.dumps({"schema_version": 1, "contents": []}))
|
||||
zf.writestr("../escape.txt", "x")
|
||||
result = restore_backup(zip_path, empty_project, RestoreOptions())
|
||||
# validate_backup catches it before extraction.
|
||||
assert not result.success
|
||||
assert any("unsafe" in e.lower() for e in result.errors)
|
||||
@@ -1,747 +0,0 @@
|
||||
"""
|
||||
Tests for the caching and tombstone behaviors added to PluginStoreManager
|
||||
to fix the plugin-list slowness and the uninstall-resurrection bugs.
|
||||
|
||||
Coverage targets:
|
||||
- ``mark_recently_uninstalled`` / ``was_recently_uninstalled`` lifecycle and
|
||||
TTL expiry.
|
||||
- ``_get_local_git_info`` mtime-gated cache: ``git`` subprocesses only run
|
||||
when ``.git/HEAD`` mtime changes.
|
||||
- ``fetch_registry`` stale-cache fallback on network failure.
|
||||
"""
|
||||
|
||||
import os
|
||||
import time
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from src.plugin_system.store_manager import PluginStoreManager
|
||||
|
||||
|
||||
class TestUninstallTombstone(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self._tmp = TemporaryDirectory()
|
||||
self.addCleanup(self._tmp.cleanup)
|
||||
self.sm = PluginStoreManager(plugins_dir=self._tmp.name)
|
||||
|
||||
def test_unmarked_plugin_is_not_recent(self):
|
||||
self.assertFalse(self.sm.was_recently_uninstalled("foo"))
|
||||
|
||||
def test_marking_makes_it_recent(self):
|
||||
self.sm.mark_recently_uninstalled("foo")
|
||||
self.assertTrue(self.sm.was_recently_uninstalled("foo"))
|
||||
|
||||
def test_tombstone_expires_after_ttl(self):
|
||||
self.sm._uninstall_tombstone_ttl = 0.05
|
||||
self.sm.mark_recently_uninstalled("foo")
|
||||
self.assertTrue(self.sm.was_recently_uninstalled("foo"))
|
||||
time.sleep(0.1)
|
||||
self.assertFalse(self.sm.was_recently_uninstalled("foo"))
|
||||
# Expired entry should also be pruned from the dict.
|
||||
self.assertNotIn("foo", self.sm._uninstall_tombstones)
|
||||
|
||||
|
||||
class TestGitInfoCache(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self._tmp = TemporaryDirectory()
|
||||
self.addCleanup(self._tmp.cleanup)
|
||||
self.plugins_dir = Path(self._tmp.name)
|
||||
self.sm = PluginStoreManager(plugins_dir=str(self.plugins_dir))
|
||||
|
||||
# Minimal fake git checkout: .git/HEAD needs to exist so the cache
|
||||
# key (its mtime) is stable, but we mock subprocess so no actual git
|
||||
# is required.
|
||||
self.plugin_path = self.plugins_dir / "plg"
|
||||
(self.plugin_path / ".git").mkdir(parents=True)
|
||||
(self.plugin_path / ".git" / "HEAD").write_text("ref: refs/heads/main\n")
|
||||
|
||||
def _fake_subprocess_run(self, *args, **kwargs):
|
||||
# Return different dummy values depending on which git subcommand
|
||||
# was invoked so the code paths that parse output all succeed.
|
||||
cmd = args[0]
|
||||
result = MagicMock()
|
||||
result.returncode = 0
|
||||
if "rev-parse" in cmd and "HEAD" in cmd and "--abbrev-ref" not in cmd:
|
||||
result.stdout = "abcdef1234567890\n"
|
||||
elif "--abbrev-ref" in cmd:
|
||||
result.stdout = "main\n"
|
||||
elif "config" in cmd:
|
||||
result.stdout = "https://example.com/repo.git\n"
|
||||
elif "log" in cmd:
|
||||
result.stdout = "2026-04-08T12:00:00+00:00\n"
|
||||
else:
|
||||
result.stdout = ""
|
||||
return result
|
||||
|
||||
def test_cache_hits_avoid_subprocess_calls(self):
|
||||
with patch(
|
||||
"src.plugin_system.store_manager.subprocess.run",
|
||||
side_effect=self._fake_subprocess_run,
|
||||
) as mock_run:
|
||||
first = self.sm._get_local_git_info(self.plugin_path)
|
||||
self.assertIsNotNone(first)
|
||||
self.assertEqual(first["short_sha"], "abcdef1")
|
||||
calls_after_first = mock_run.call_count
|
||||
self.assertEqual(calls_after_first, 4)
|
||||
|
||||
# Second call with unchanged HEAD: zero new subprocess calls.
|
||||
second = self.sm._get_local_git_info(self.plugin_path)
|
||||
self.assertEqual(second, first)
|
||||
self.assertEqual(mock_run.call_count, calls_after_first)
|
||||
|
||||
def test_cache_invalidates_on_head_mtime_change(self):
|
||||
with patch(
|
||||
"src.plugin_system.store_manager.subprocess.run",
|
||||
side_effect=self._fake_subprocess_run,
|
||||
) as mock_run:
|
||||
self.sm._get_local_git_info(self.plugin_path)
|
||||
calls_after_first = mock_run.call_count
|
||||
|
||||
# Bump mtime on .git/HEAD to simulate a new commit being checked out.
|
||||
head = self.plugin_path / ".git" / "HEAD"
|
||||
new_time = head.stat().st_mtime + 10
|
||||
os.utime(head, (new_time, new_time))
|
||||
|
||||
self.sm._get_local_git_info(self.plugin_path)
|
||||
self.assertEqual(mock_run.call_count, calls_after_first + 4)
|
||||
|
||||
def test_no_git_directory_returns_none(self):
|
||||
non_git = self.plugins_dir / "no_git"
|
||||
non_git.mkdir()
|
||||
self.assertIsNone(self.sm._get_local_git_info(non_git))
|
||||
|
||||
def test_cache_invalidates_on_git_config_change(self):
|
||||
"""A config-only change (e.g. ``git remote set-url``) must invalidate
|
||||
the cache, because the cached ``result`` dict includes ``remote_url``
|
||||
which is read from ``.git/config``. Without config in the signature,
|
||||
a stale remote URL would be served indefinitely.
|
||||
"""
|
||||
head_file = self.plugin_path / ".git" / "HEAD"
|
||||
head_file.write_text("ref: refs/heads/main\n")
|
||||
refs_heads = self.plugin_path / ".git" / "refs" / "heads"
|
||||
refs_heads.mkdir(parents=True, exist_ok=True)
|
||||
(refs_heads / "main").write_text("a" * 40 + "\n")
|
||||
config_file = self.plugin_path / ".git" / "config"
|
||||
config_file.write_text(
|
||||
'[remote "origin"]\n\turl = https://old.example.com/repo.git\n'
|
||||
)
|
||||
|
||||
remote_url = {"current": "https://old.example.com/repo.git"}
|
||||
|
||||
def fake_subprocess_run(*args, **kwargs):
|
||||
cmd = args[0]
|
||||
result = MagicMock()
|
||||
result.returncode = 0
|
||||
if "rev-parse" in cmd and "--abbrev-ref" not in cmd:
|
||||
result.stdout = "a" * 40 + "\n"
|
||||
elif "--abbrev-ref" in cmd:
|
||||
result.stdout = "main\n"
|
||||
elif "config" in cmd:
|
||||
result.stdout = remote_url["current"] + "\n"
|
||||
elif "log" in cmd:
|
||||
result.stdout = "2026-04-08T12:00:00+00:00\n"
|
||||
else:
|
||||
result.stdout = ""
|
||||
return result
|
||||
|
||||
with patch(
|
||||
"src.plugin_system.store_manager.subprocess.run",
|
||||
side_effect=fake_subprocess_run,
|
||||
):
|
||||
first = self.sm._get_local_git_info(self.plugin_path)
|
||||
self.assertEqual(first["remote_url"], "https://old.example.com/repo.git")
|
||||
|
||||
# Simulate ``git remote set-url origin https://new.example.com/repo.git``:
|
||||
# ``.git/config`` contents AND mtime change. HEAD is untouched.
|
||||
time.sleep(0.01) # ensure a detectable mtime delta
|
||||
config_file.write_text(
|
||||
'[remote "origin"]\n\turl = https://new.example.com/repo.git\n'
|
||||
)
|
||||
new_time = config_file.stat().st_mtime + 10
|
||||
os.utime(config_file, (new_time, new_time))
|
||||
remote_url["current"] = "https://new.example.com/repo.git"
|
||||
|
||||
second = self.sm._get_local_git_info(self.plugin_path)
|
||||
self.assertEqual(
|
||||
second["remote_url"], "https://new.example.com/repo.git",
|
||||
"config-only change did not invalidate the cache — "
|
||||
".git/config mtime/contents must be part of the signature",
|
||||
)
|
||||
|
||||
def test_cache_invalidates_on_fast_forward_of_current_branch(self):
|
||||
"""Regression: .git/HEAD mtime alone is not enough.
|
||||
|
||||
``git pull`` that fast-forwards the current branch touches
|
||||
``.git/refs/heads/<branch>`` (or packed-refs) but NOT HEAD. If
|
||||
we cache on HEAD mtime alone, we serve a stale SHA indefinitely.
|
||||
"""
|
||||
# Build a realistic loose-ref layout.
|
||||
refs_heads = self.plugin_path / ".git" / "refs" / "heads"
|
||||
refs_heads.mkdir(parents=True)
|
||||
branch_file = refs_heads / "main"
|
||||
branch_file.write_text("a" * 40 + "\n")
|
||||
# Overwrite HEAD to point at refs/heads/main.
|
||||
(self.plugin_path / ".git" / "HEAD").write_text("ref: refs/heads/main\n")
|
||||
|
||||
call_log = []
|
||||
|
||||
def fake_subprocess_run(*args, **kwargs):
|
||||
call_log.append(args[0])
|
||||
result = MagicMock()
|
||||
result.returncode = 0
|
||||
cmd = args[0]
|
||||
if "rev-parse" in cmd and "--abbrev-ref" not in cmd:
|
||||
result.stdout = branch_file.read_text().strip() + "\n"
|
||||
elif "--abbrev-ref" in cmd:
|
||||
result.stdout = "main\n"
|
||||
elif "config" in cmd:
|
||||
result.stdout = "https://example.com/repo.git\n"
|
||||
elif "log" in cmd:
|
||||
result.stdout = "2026-04-08T12:00:00+00:00\n"
|
||||
else:
|
||||
result.stdout = ""
|
||||
return result
|
||||
|
||||
with patch(
|
||||
"src.plugin_system.store_manager.subprocess.run",
|
||||
side_effect=fake_subprocess_run,
|
||||
):
|
||||
first = self.sm._get_local_git_info(self.plugin_path)
|
||||
calls_after_first = len(call_log)
|
||||
self.assertIsNotNone(first)
|
||||
self.assertTrue(first["sha"].startswith("a"))
|
||||
|
||||
# Second call: unchanged. Cache hit → no new subprocess calls.
|
||||
self.sm._get_local_git_info(self.plugin_path)
|
||||
self.assertEqual(len(call_log), calls_after_first,
|
||||
"cache should hit on unchanged state")
|
||||
|
||||
# Simulate a fast-forward: the branch ref file gets a new SHA
|
||||
# and a new mtime, but .git/HEAD is untouched.
|
||||
branch_file.write_text("b" * 40 + "\n")
|
||||
new_time = branch_file.stat().st_mtime + 10
|
||||
os.utime(branch_file, (new_time, new_time))
|
||||
|
||||
second = self.sm._get_local_git_info(self.plugin_path)
|
||||
# Cache MUST have been invalidated — we should have re-run git.
|
||||
self.assertGreater(
|
||||
len(call_log), calls_after_first,
|
||||
"cache should have invalidated on branch ref update",
|
||||
)
|
||||
self.assertTrue(second["sha"].startswith("b"))
|
||||
|
||||
|
||||
class TestSearchPluginsParallel(unittest.TestCase):
|
||||
"""Plugin Store browse path — the per-plugin GitHub enrichment used to
|
||||
run serially, turning a browse of 15 plugins into 30–45 sequential HTTP
|
||||
requests on a cold cache. This batch of tests locks in the parallel
|
||||
fan-out and verifies output shape/ordering haven't regressed.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self._tmp = TemporaryDirectory()
|
||||
self.addCleanup(self._tmp.cleanup)
|
||||
self.sm = PluginStoreManager(plugins_dir=self._tmp.name)
|
||||
|
||||
# Fake registry with 5 plugins.
|
||||
self.registry = {
|
||||
"plugins": [
|
||||
{"id": f"plg{i}", "name": f"Plugin {i}",
|
||||
"repo": f"https://github.com/owner/plg{i}", "category": "util"}
|
||||
for i in range(5)
|
||||
]
|
||||
}
|
||||
self.sm.registry_cache = self.registry
|
||||
self.sm.registry_cache_time = time.time()
|
||||
|
||||
self._enrich_calls = []
|
||||
|
||||
def fake_repo(repo_url):
|
||||
self._enrich_calls.append(("repo", repo_url))
|
||||
return {"stars": 1, "default_branch": "main",
|
||||
"last_commit_iso": "2026-04-08T00:00:00Z",
|
||||
"last_commit_date": "2026-04-08"}
|
||||
|
||||
def fake_commit(repo_url, branch):
|
||||
self._enrich_calls.append(("commit", repo_url, branch))
|
||||
return {"short_sha": "abc1234", "sha": "abc1234" + "0" * 33,
|
||||
"date_iso": "2026-04-08T00:00:00Z", "date": "2026-04-08",
|
||||
"message": "m", "author": "a", "branch": branch}
|
||||
|
||||
def fake_manifest(repo_url, branch, manifest_path):
|
||||
self._enrich_calls.append(("manifest", repo_url, branch))
|
||||
return {"description": "desc"}
|
||||
|
||||
self.sm._get_github_repo_info = fake_repo
|
||||
self.sm._get_latest_commit_info = fake_commit
|
||||
self.sm._fetch_manifest_from_github = fake_manifest
|
||||
|
||||
def test_results_preserve_registry_order(self):
|
||||
results = self.sm.search_plugins(include_saved_repos=False)
|
||||
self.assertEqual([p["id"] for p in results],
|
||||
[f"plg{i}" for i in range(5)])
|
||||
|
||||
def test_filters_applied_before_enrichment(self):
|
||||
# Filter down to a single plugin via category — ensures we don't
|
||||
# waste GitHub calls enriching plugins that won't be returned.
|
||||
self.registry["plugins"][2]["category"] = "special"
|
||||
self.sm.registry_cache = self.registry
|
||||
self._enrich_calls.clear()
|
||||
results = self.sm.search_plugins(category="special", include_saved_repos=False)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]["id"], "plg2")
|
||||
# Only one plugin should have been enriched.
|
||||
repo_calls = [c for c in self._enrich_calls if c[0] == "repo"]
|
||||
self.assertEqual(len(repo_calls), 1)
|
||||
|
||||
def test_enrichment_runs_concurrently(self):
|
||||
"""Verify the thread pool actually runs fetches in parallel.
|
||||
|
||||
Deterministic check: each stub repo fetch holds a lock while it
|
||||
increments a "currently running" counter, then sleeps briefly,
|
||||
then decrements. If execution is serial, the peak counter can
|
||||
never exceed 1. If the thread pool is engaged, we see at least
|
||||
2 concurrent workers.
|
||||
|
||||
We deliberately do NOT assert on elapsed wall time — that check
|
||||
was flaky on low-power / CI boxes where scheduler noise dwarfed
|
||||
the 50ms-per-worker budget. ``peak["count"] >= 2`` is the signal
|
||||
we actually care about.
|
||||
"""
|
||||
import threading
|
||||
peak_lock = threading.Lock()
|
||||
peak = {"count": 0, "current": 0}
|
||||
|
||||
def slow_repo(repo_url):
|
||||
with peak_lock:
|
||||
peak["current"] += 1
|
||||
if peak["current"] > peak["count"]:
|
||||
peak["count"] = peak["current"]
|
||||
# Small sleep gives other workers a chance to enter the
|
||||
# critical section before we leave it. 50ms is large enough
|
||||
# to dominate any scheduling jitter without slowing the test
|
||||
# suite meaningfully.
|
||||
time.sleep(0.05)
|
||||
with peak_lock:
|
||||
peak["current"] -= 1
|
||||
return {"stars": 0, "default_branch": "main",
|
||||
"last_commit_iso": "", "last_commit_date": ""}
|
||||
|
||||
self.sm._get_github_repo_info = slow_repo
|
||||
self.sm._get_latest_commit_info = lambda *a, **k: None
|
||||
self.sm._fetch_manifest_from_github = lambda *a, **k: None
|
||||
|
||||
results = self.sm.search_plugins(fetch_commit_info=False, include_saved_repos=False)
|
||||
|
||||
self.assertEqual(len(results), 5)
|
||||
self.assertGreaterEqual(
|
||||
peak["count"], 2,
|
||||
"no concurrent fetches observed — thread pool not engaging",
|
||||
)
|
||||
|
||||
|
||||
class TestStaleOnErrorFallbacks(unittest.TestCase):
|
||||
"""When GitHub is unreachable, previously-cached values should still be
|
||||
returned rather than zero/None. Important on Pi's WiFi links.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self._tmp = TemporaryDirectory()
|
||||
self.addCleanup(self._tmp.cleanup)
|
||||
self.sm = PluginStoreManager(plugins_dir=self._tmp.name)
|
||||
|
||||
def test_repo_info_stale_on_network_error(self):
|
||||
cache_key = "owner/repo"
|
||||
good = {"stars": 42, "default_branch": "main",
|
||||
"last_commit_iso": "", "last_commit_date": "",
|
||||
"forks": 0, "open_issues": 0, "updated_at_iso": "",
|
||||
"language": "", "license": ""}
|
||||
# Seed the cache with a known-good value, then force expiry.
|
||||
self.sm.github_cache[cache_key] = (time.time() - 10_000, good)
|
||||
self.sm.cache_timeout = 1 # force re-fetch
|
||||
|
||||
import requests as real_requests
|
||||
with patch("src.plugin_system.store_manager.requests.get",
|
||||
side_effect=real_requests.ConnectionError("boom")):
|
||||
result = self.sm._get_github_repo_info("https://github.com/owner/repo")
|
||||
self.assertEqual(result["stars"], 42)
|
||||
|
||||
def test_repo_info_stale_bumps_timestamp_into_backoff(self):
|
||||
"""Regression: after serving stale, next lookup must hit cache.
|
||||
|
||||
Without the failure-backoff timestamp bump, a repeat request
|
||||
would see the cache as still expired and re-hit the network,
|
||||
amplifying the original failure. The fix is to update the
|
||||
cached entry's timestamp so ``(now - ts) < cache_timeout`` holds
|
||||
for the backoff window.
|
||||
"""
|
||||
cache_key = "owner/repo"
|
||||
good = {"stars": 99, "default_branch": "main",
|
||||
"last_commit_iso": "", "last_commit_date": "",
|
||||
"forks": 0, "open_issues": 0, "updated_at_iso": "",
|
||||
"language": "", "license": ""}
|
||||
self.sm.github_cache[cache_key] = (time.time() - 10_000, good)
|
||||
self.sm.cache_timeout = 1
|
||||
self.sm._failure_backoff_seconds = 60
|
||||
|
||||
import requests as real_requests
|
||||
call_count = {"n": 0}
|
||||
|
||||
def counting_get(*args, **kwargs):
|
||||
call_count["n"] += 1
|
||||
raise real_requests.ConnectionError("boom")
|
||||
|
||||
with patch("src.plugin_system.store_manager.requests.get", side_effect=counting_get):
|
||||
first = self.sm._get_github_repo_info("https://github.com/owner/repo")
|
||||
self.assertEqual(first["stars"], 99)
|
||||
self.assertEqual(call_count["n"], 1)
|
||||
|
||||
# Second call must hit the bumped cache and NOT make another request.
|
||||
second = self.sm._get_github_repo_info("https://github.com/owner/repo")
|
||||
self.assertEqual(second["stars"], 99)
|
||||
self.assertEqual(
|
||||
call_count["n"], 1,
|
||||
"stale-cache fallback must bump the timestamp to avoid "
|
||||
"re-retrying on every request during the backoff window",
|
||||
)
|
||||
|
||||
def test_repo_info_stale_on_403_also_backs_off(self):
|
||||
"""Same backoff requirement for 403 rate-limit responses."""
|
||||
cache_key = "owner/repo"
|
||||
good = {"stars": 7, "default_branch": "main",
|
||||
"last_commit_iso": "", "last_commit_date": "",
|
||||
"forks": 0, "open_issues": 0, "updated_at_iso": "",
|
||||
"language": "", "license": ""}
|
||||
self.sm.github_cache[cache_key] = (time.time() - 10_000, good)
|
||||
self.sm.cache_timeout = 1
|
||||
|
||||
rate_limited = MagicMock()
|
||||
rate_limited.status_code = 403
|
||||
rate_limited.text = "rate limited"
|
||||
call_count = {"n": 0}
|
||||
|
||||
def counting_get(*args, **kwargs):
|
||||
call_count["n"] += 1
|
||||
return rate_limited
|
||||
|
||||
with patch("src.plugin_system.store_manager.requests.get", side_effect=counting_get):
|
||||
self.sm._get_github_repo_info("https://github.com/owner/repo")
|
||||
self.assertEqual(call_count["n"], 1)
|
||||
self.sm._get_github_repo_info("https://github.com/owner/repo")
|
||||
self.assertEqual(
|
||||
call_count["n"], 1,
|
||||
"403 stale fallback must also bump the timestamp",
|
||||
)
|
||||
|
||||
def test_commit_info_stale_on_network_error(self):
|
||||
cache_key = "owner/repo:main"
|
||||
good = {"branch": "main", "sha": "a" * 40, "short_sha": "aaaaaaa",
|
||||
"date_iso": "2026-04-08T00:00:00Z", "date": "2026-04-08",
|
||||
"author": "x", "message": "y"}
|
||||
self.sm.commit_info_cache[cache_key] = (time.time() - 10_000, good)
|
||||
self.sm.commit_cache_timeout = 1 # force re-fetch
|
||||
|
||||
import requests as real_requests
|
||||
with patch("src.plugin_system.store_manager.requests.get",
|
||||
side_effect=real_requests.ConnectionError("boom")):
|
||||
result = self.sm._get_latest_commit_info(
|
||||
"https://github.com/owner/repo", branch="main"
|
||||
)
|
||||
self.assertIsNotNone(result)
|
||||
self.assertEqual(result["short_sha"], "aaaaaaa")
|
||||
|
||||
def test_commit_info_preserves_good_cache_on_all_branches_404(self):
|
||||
"""Regression: all-branches-404 used to overwrite good cache with None.
|
||||
|
||||
The previous implementation unconditionally wrote
|
||||
``self.commit_info_cache[cache_key] = (time.time(), None)`` after
|
||||
the branch loop, which meant a single transient failure (e.g. an
|
||||
odd 5xx or an ls-refs hiccup) wiped out the commit info we had
|
||||
just served to the UI the previous minute.
|
||||
"""
|
||||
cache_key = "owner/repo:main"
|
||||
good = {"branch": "main", "sha": "a" * 40, "short_sha": "aaaaaaa",
|
||||
"date_iso": "2026-04-08T00:00:00Z", "date": "2026-04-08",
|
||||
"author": "x", "message": "y"}
|
||||
self.sm.commit_info_cache[cache_key] = (time.time() - 10_000, good)
|
||||
self.sm.commit_cache_timeout = 1
|
||||
|
||||
# Each branches_to_try attempt returns a 404. No network error
|
||||
# exception — just a non-200 response. This is the code path
|
||||
# that used to overwrite the cache with None.
|
||||
not_found = MagicMock()
|
||||
not_found.status_code = 404
|
||||
not_found.text = "Not Found"
|
||||
with patch("src.plugin_system.store_manager.requests.get", return_value=not_found):
|
||||
result = self.sm._get_latest_commit_info(
|
||||
"https://github.com/owner/repo", branch="main"
|
||||
)
|
||||
|
||||
self.assertIsNotNone(result, "good cache was wiped out by transient 404s")
|
||||
self.assertEqual(result["short_sha"], "aaaaaaa")
|
||||
# The cache entry must still be the good value, not None.
|
||||
self.assertIsNotNone(self.sm.commit_info_cache[cache_key][1])
|
||||
|
||||
|
||||
class TestInstallUpdateUninstallInvariants(unittest.TestCase):
|
||||
"""Regression guard: the caching and tombstone work added in this PR
|
||||
must not break the install / update / uninstall code paths.
|
||||
|
||||
Specifically:
|
||||
- ``install_plugin`` bypasses commit/manifest caches via force_refresh,
|
||||
so the 5→30 min TTL bump cannot cause users to install a stale commit.
|
||||
- ``update_plugin`` does the same.
|
||||
- The uninstall tombstone is only honored by the state reconciler, not
|
||||
by explicit ``install_plugin`` calls — so a user can uninstall and
|
||||
immediately reinstall from the store UI without the tombstone getting
|
||||
in the way.
|
||||
- ``was_recently_uninstalled`` is not touched by ``install_plugin``.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self._tmp = TemporaryDirectory()
|
||||
self.addCleanup(self._tmp.cleanup)
|
||||
self.sm = PluginStoreManager(plugins_dir=self._tmp.name)
|
||||
|
||||
def test_get_plugin_info_with_force_refresh_forwards_to_commit_fetch(self):
|
||||
"""install_plugin's code path must reach the network bypass."""
|
||||
self.sm.registry_cache = {
|
||||
"plugins": [{"id": "foo", "repo": "https://github.com/o/r"}]
|
||||
}
|
||||
self.sm.registry_cache_time = time.time()
|
||||
|
||||
repo_calls = []
|
||||
commit_calls = []
|
||||
manifest_calls = []
|
||||
|
||||
def fake_repo(url):
|
||||
repo_calls.append(url)
|
||||
return {"default_branch": "main", "stars": 0,
|
||||
"last_commit_iso": "", "last_commit_date": ""}
|
||||
|
||||
def fake_commit(url, branch, force_refresh=False):
|
||||
commit_calls.append((url, branch, force_refresh))
|
||||
return {"short_sha": "deadbee", "sha": "d" * 40,
|
||||
"message": "m", "author": "a", "branch": branch,
|
||||
"date": "2026-04-08", "date_iso": "2026-04-08T00:00:00Z"}
|
||||
|
||||
def fake_manifest(url, branch, manifest_path, force_refresh=False):
|
||||
manifest_calls.append((url, branch, manifest_path, force_refresh))
|
||||
return None
|
||||
|
||||
self.sm._get_github_repo_info = fake_repo
|
||||
self.sm._get_latest_commit_info = fake_commit
|
||||
self.sm._fetch_manifest_from_github = fake_manifest
|
||||
|
||||
info = self.sm.get_plugin_info("foo", fetch_latest_from_github=True, force_refresh=True)
|
||||
|
||||
self.assertIsNotNone(info)
|
||||
self.assertEqual(info["last_commit_sha"], "d" * 40)
|
||||
# force_refresh must have propagated through to the fetch helpers.
|
||||
self.assertTrue(commit_calls, "commit fetch was not called")
|
||||
self.assertTrue(commit_calls[0][2], "force_refresh=True did not reach _get_latest_commit_info")
|
||||
self.assertTrue(manifest_calls, "manifest fetch was not called")
|
||||
self.assertTrue(manifest_calls[0][3], "force_refresh=True did not reach _fetch_manifest_from_github")
|
||||
|
||||
def test_install_plugin_is_not_blocked_by_tombstone(self):
|
||||
"""A tombstone must only gate the reconciler, not explicit installs.
|
||||
|
||||
Uses a complete, valid manifest stub and a no-op dependency
|
||||
installer so ``install_plugin`` runs all the way through to a
|
||||
True return. Anything less (e.g. swallowing exceptions) would
|
||||
hide real regressions in the install path.
|
||||
"""
|
||||
import json as _json
|
||||
self.sm.registry_cache = {
|
||||
"plugins": [{"id": "bar", "repo": "https://github.com/o/bar",
|
||||
"plugin_path": ""}]
|
||||
}
|
||||
self.sm.registry_cache_time = time.time()
|
||||
|
||||
# Mark it recently uninstalled (simulates a user who just clicked
|
||||
# uninstall and then immediately clicked install again).
|
||||
self.sm.mark_recently_uninstalled("bar")
|
||||
self.assertTrue(self.sm.was_recently_uninstalled("bar"))
|
||||
|
||||
# Stub the heavy bits so install_plugin can run without network.
|
||||
self.sm._get_github_repo_info = lambda url: {
|
||||
"default_branch": "main", "stars": 0,
|
||||
"last_commit_iso": "", "last_commit_date": ""
|
||||
}
|
||||
self.sm._get_latest_commit_info = lambda *a, **k: {
|
||||
"short_sha": "abc1234", "sha": "a" * 40, "branch": "main",
|
||||
"message": "m", "author": "a",
|
||||
"date": "2026-04-08", "date_iso": "2026-04-08T00:00:00Z",
|
||||
}
|
||||
self.sm._fetch_manifest_from_github = lambda *a, **k: None
|
||||
# Skip dependency install entirely (real install calls pip).
|
||||
self.sm._install_dependencies = lambda *a, **k: True
|
||||
|
||||
def fake_install_via_git(repo_url, plugin_path, branches):
|
||||
# Write a COMPLETE valid manifest so install_plugin's
|
||||
# post-download validation succeeds. Required fields come
|
||||
# from install_plugin itself: id, name, class_name, display_modes.
|
||||
plugin_path.mkdir(parents=True, exist_ok=True)
|
||||
manifest = {
|
||||
"id": "bar",
|
||||
"name": "Bar Plugin",
|
||||
"version": "1.0.0",
|
||||
"class_name": "BarPlugin",
|
||||
"entry_point": "manager.py",
|
||||
"display_modes": ["bar_mode"],
|
||||
}
|
||||
(plugin_path / "manifest.json").write_text(_json.dumps(manifest))
|
||||
return branches[0]
|
||||
|
||||
self.sm._install_via_git = fake_install_via_git
|
||||
|
||||
# No exception-swallowing: if install_plugin fails for ANY reason
|
||||
# unrelated to the tombstone, the test fails loudly.
|
||||
result = self.sm.install_plugin("bar")
|
||||
|
||||
self.assertTrue(
|
||||
result,
|
||||
"install_plugin returned False — the tombstone should not gate "
|
||||
"explicit installs and all other stubs should allow success.",
|
||||
)
|
||||
# Tombstone survives install (harmless — nothing reads it for installed plugins).
|
||||
self.assertTrue(self.sm.was_recently_uninstalled("bar"))
|
||||
|
||||
|
||||
class TestRegistryStaleCacheFallback(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self._tmp = TemporaryDirectory()
|
||||
self.addCleanup(self._tmp.cleanup)
|
||||
self.sm = PluginStoreManager(plugins_dir=self._tmp.name)
|
||||
|
||||
def test_network_failure_returns_stale_cache(self):
|
||||
# Prime the cache with a known-good registry.
|
||||
self.sm.registry_cache = {"plugins": [{"id": "cached"}]}
|
||||
self.sm.registry_cache_time = time.time() - 10_000 # very old
|
||||
self.sm.registry_cache_timeout = 1 # force re-fetch attempt
|
||||
|
||||
import requests as real_requests
|
||||
with patch.object(
|
||||
self.sm,
|
||||
"_http_get_with_retries",
|
||||
side_effect=real_requests.RequestException("boom"),
|
||||
):
|
||||
result = self.sm.fetch_registry()
|
||||
|
||||
self.assertEqual(result, {"plugins": [{"id": "cached"}]})
|
||||
|
||||
def test_network_failure_with_no_cache_returns_empty(self):
|
||||
self.sm.registry_cache = None
|
||||
import requests as real_requests
|
||||
with patch.object(
|
||||
self.sm,
|
||||
"_http_get_with_retries",
|
||||
side_effect=real_requests.RequestException("boom"),
|
||||
):
|
||||
result = self.sm.fetch_registry()
|
||||
self.assertEqual(result, {"plugins": []})
|
||||
|
||||
def test_stale_fallback_bumps_timestamp_into_backoff(self):
|
||||
"""Regression: after the stale-cache fallback fires, the next
|
||||
fetch_registry call must NOT re-hit the network. Without the
|
||||
timestamp bump, a flaky connection causes every request to pay
|
||||
the network timeout before falling back to stale.
|
||||
"""
|
||||
self.sm.registry_cache = {"plugins": [{"id": "cached"}]}
|
||||
self.sm.registry_cache_time = time.time() - 10_000 # expired
|
||||
self.sm.registry_cache_timeout = 1
|
||||
self.sm._failure_backoff_seconds = 60
|
||||
|
||||
import requests as real_requests
|
||||
call_count = {"n": 0}
|
||||
|
||||
def counting_get(*args, **kwargs):
|
||||
call_count["n"] += 1
|
||||
raise real_requests.ConnectionError("boom")
|
||||
|
||||
with patch.object(self.sm, "_http_get_with_retries", side_effect=counting_get):
|
||||
first = self.sm.fetch_registry()
|
||||
self.assertEqual(first, {"plugins": [{"id": "cached"}]})
|
||||
self.assertEqual(call_count["n"], 1)
|
||||
|
||||
second = self.sm.fetch_registry()
|
||||
self.assertEqual(second, {"plugins": [{"id": "cached"}]})
|
||||
self.assertEqual(
|
||||
call_count["n"], 1,
|
||||
"stale registry fallback must bump registry_cache_time so "
|
||||
"subsequent requests hit the cache instead of re-retrying",
|
||||
)
|
||||
|
||||
|
||||
class TestFetchRegistryRaiseOnFailure(unittest.TestCase):
|
||||
"""``fetch_registry(raise_on_failure=True)`` must propagate errors
|
||||
instead of silently falling back to the stale cache / empty dict.
|
||||
|
||||
Regression guard: the state reconciler relies on this to distinguish
|
||||
"plugin genuinely not in registry" from "I can't reach the registry
|
||||
right now". Without it, a fresh boot with flaky WiFi would poison
|
||||
``_unrecoverable_missing_on_disk`` with every config entry.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self._tmp = TemporaryDirectory()
|
||||
self.addCleanup(self._tmp.cleanup)
|
||||
self.sm = PluginStoreManager(plugins_dir=self._tmp.name)
|
||||
|
||||
def test_request_exception_propagates_when_flag_set(self):
|
||||
import requests as real_requests
|
||||
self.sm.registry_cache = None # no stale cache
|
||||
with patch.object(
|
||||
self.sm,
|
||||
"_http_get_with_retries",
|
||||
side_effect=real_requests.RequestException("boom"),
|
||||
):
|
||||
with self.assertRaises(real_requests.RequestException):
|
||||
self.sm.fetch_registry(raise_on_failure=True)
|
||||
|
||||
def test_request_exception_propagates_even_with_stale_cache(self):
|
||||
"""Explicit caller opt-in beats the stale-cache convenience."""
|
||||
import requests as real_requests
|
||||
self.sm.registry_cache = {"plugins": [{"id": "stale"}]}
|
||||
self.sm.registry_cache_time = time.time() - 10_000
|
||||
self.sm.registry_cache_timeout = 1
|
||||
with patch.object(
|
||||
self.sm,
|
||||
"_http_get_with_retries",
|
||||
side_effect=real_requests.RequestException("boom"),
|
||||
):
|
||||
with self.assertRaises(real_requests.RequestException):
|
||||
self.sm.fetch_registry(raise_on_failure=True)
|
||||
|
||||
def test_json_decode_error_propagates_when_flag_set(self):
|
||||
import json as _json
|
||||
self.sm.registry_cache = None
|
||||
bad_response = MagicMock()
|
||||
bad_response.status_code = 200
|
||||
bad_response.raise_for_status = MagicMock()
|
||||
bad_response.json = MagicMock(
|
||||
side_effect=_json.JSONDecodeError("bad", "", 0)
|
||||
)
|
||||
with patch.object(self.sm, "_http_get_with_retries", return_value=bad_response):
|
||||
with self.assertRaises(_json.JSONDecodeError):
|
||||
self.sm.fetch_registry(raise_on_failure=True)
|
||||
|
||||
def test_default_behavior_unchanged_by_new_parameter(self):
|
||||
"""UI callers that don't pass the flag still get stale-cache fallback."""
|
||||
import requests as real_requests
|
||||
self.sm.registry_cache = {"plugins": [{"id": "cached"}]}
|
||||
self.sm.registry_cache_time = time.time() - 10_000
|
||||
self.sm.registry_cache_timeout = 1
|
||||
with patch.object(
|
||||
self.sm,
|
||||
"_http_get_with_retries",
|
||||
side_effect=real_requests.RequestException("boom"),
|
||||
):
|
||||
result = self.sm.fetch_registry() # default raise_on_failure=False
|
||||
self.assertEqual(result, {"plugins": [{"id": "cached"}]})
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -1,395 +0,0 @@
|
||||
"""Regression tests for the transactional uninstall helper and the
|
||||
``/plugins/state/reconcile`` endpoint's payload handling.
|
||||
|
||||
Bug 1: the original uninstall flow caught
|
||||
``cleanup_plugin_config`` exceptions and only logged a warning before
|
||||
proceeding to file deletion. A failure there would leave the plugin
|
||||
files on disk with no config entry (orphan). The fix is a
|
||||
``_do_transactional_uninstall`` helper that (a) aborts before touching
|
||||
the filesystem if cleanup fails, and (b) restores the config+secrets
|
||||
snapshot if file removal fails after cleanup succeeded.
|
||||
|
||||
Bug 2: the reconcile endpoint did ``payload.get('force', False)`` after
|
||||
``request.get_json(silent=True) or {}``, which raises AttributeError if
|
||||
the client sent a non-object JSON body (e.g. a bare string or array).
|
||||
Additionally, ``bool("false")`` is ``True``, so string-encoded booleans
|
||||
were mis-handled. The fix is an ``isinstance(payload, dict)`` guard plus
|
||||
routing the value through ``_coerce_to_bool``.
|
||||
"""
|
||||
|
||||
import json
|
||||
import sys
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
project_root = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(project_root))
|
||||
|
||||
from flask import Flask
|
||||
|
||||
|
||||
_API_V3_MOCKED_ATTRS = (
|
||||
'config_manager', 'plugin_manager', 'plugin_store_manager',
|
||||
'plugin_state_manager', 'saved_repositories_manager', 'schema_manager',
|
||||
'operation_queue', 'operation_history', 'cache_manager',
|
||||
)
|
||||
|
||||
|
||||
def _make_client():
|
||||
"""Minimal Flask app + mocked deps that exercises the api_v3 blueprint.
|
||||
|
||||
Returns ``(client, module, cleanup_fn)``. Callers (test ``setUp``
|
||||
methods) must register ``cleanup_fn`` with ``self.addCleanup(...)``
|
||||
so the original api_v3 singleton attributes are restored at the end
|
||||
of the test — otherwise the MagicMocks leak into later tests that
|
||||
import api_v3 expecting fresh state.
|
||||
"""
|
||||
from web_interface.blueprints import api_v3 as api_v3_module
|
||||
from web_interface.blueprints.api_v3 import api_v3
|
||||
|
||||
# Snapshot the originals so we can restore them.
|
||||
_SENTINEL = object()
|
||||
originals = {
|
||||
name: getattr(api_v3, name, _SENTINEL) for name in _API_V3_MOCKED_ATTRS
|
||||
}
|
||||
|
||||
# Mocks for all the bits the reconcile / uninstall endpoints touch.
|
||||
api_v3.config_manager = MagicMock()
|
||||
api_v3.config_manager.get_raw_file_content.return_value = {}
|
||||
api_v3.config_manager.secrets_path = "/tmp/nonexistent_secrets.json"
|
||||
api_v3.plugin_manager = MagicMock()
|
||||
api_v3.plugin_manager.plugins = {}
|
||||
api_v3.plugin_manager.plugins_dir = "/tmp"
|
||||
api_v3.plugin_store_manager = MagicMock()
|
||||
api_v3.plugin_state_manager = MagicMock()
|
||||
api_v3.plugin_state_manager.get_all_states.return_value = {}
|
||||
api_v3.saved_repositories_manager = MagicMock()
|
||||
api_v3.schema_manager = MagicMock()
|
||||
api_v3.operation_queue = None # force the direct (non-queue) path
|
||||
api_v3.operation_history = MagicMock()
|
||||
api_v3.cache_manager = MagicMock()
|
||||
|
||||
def _cleanup():
|
||||
for name, original in originals.items():
|
||||
if original is _SENTINEL:
|
||||
# Attribute didn't exist before — remove it to match.
|
||||
if hasattr(api_v3, name):
|
||||
try:
|
||||
delattr(api_v3, name)
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
setattr(api_v3, name, original)
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config['TESTING'] = True
|
||||
app.config['SECRET_KEY'] = 'test'
|
||||
app.register_blueprint(api_v3, url_prefix='/api/v3')
|
||||
return app.test_client(), api_v3_module, _cleanup
|
||||
|
||||
|
||||
class TestTransactionalUninstall(unittest.TestCase):
|
||||
"""Exercises ``_do_transactional_uninstall`` directly.
|
||||
|
||||
Using the direct (non-queue) code path via the Flask client gives us
|
||||
the full uninstall endpoint behavior end-to-end, including the
|
||||
rollback on mid-flight failures.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.client, self.mod, _cleanup = _make_client()
|
||||
self.addCleanup(_cleanup)
|
||||
self.api_v3 = self.mod.api_v3
|
||||
|
||||
def test_cleanup_failure_aborts_before_file_removal(self):
|
||||
"""If cleanup_plugin_config raises, uninstall_plugin must NOT run."""
|
||||
self.api_v3.config_manager.cleanup_plugin_config.side_effect = RuntimeError("disk full")
|
||||
|
||||
response = self.client.post(
|
||||
'/api/v3/plugins/uninstall',
|
||||
data=json.dumps({'plugin_id': 'thing'}),
|
||||
content_type='application/json',
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
# File removal must NOT have been attempted — otherwise we'd have
|
||||
# deleted the plugin after failing to clean its config, leaving
|
||||
# the reconciler to potentially resurrect it later.
|
||||
self.api_v3.plugin_store_manager.uninstall_plugin.assert_not_called()
|
||||
|
||||
def test_file_removal_failure_restores_snapshot(self):
|
||||
"""If uninstall_plugin returns False after cleanup, snapshot must be restored."""
|
||||
# Start with the plugin in main config and in secrets.
|
||||
stored_main = {'thing': {'enabled': True, 'custom': 'stuff'}}
|
||||
stored_secrets = {'thing': {'api_key': 'secret'}}
|
||||
|
||||
# get_raw_file_content is called twice during snapshot (main +
|
||||
# secrets) and then again during restore. We track writes through
|
||||
# save_raw_file_content so we can assert the restore happened.
|
||||
def raw_get(file_type):
|
||||
if file_type == 'main':
|
||||
return dict(stored_main)
|
||||
if file_type == 'secrets':
|
||||
return dict(stored_secrets)
|
||||
return {}
|
||||
|
||||
self.api_v3.config_manager.get_raw_file_content.side_effect = raw_get
|
||||
self.api_v3.config_manager.secrets_path = __file__ # any existing file
|
||||
self.api_v3.config_manager.cleanup_plugin_config.return_value = None
|
||||
self.api_v3.plugin_store_manager.uninstall_plugin.return_value = False
|
||||
|
||||
response = self.client.post(
|
||||
'/api/v3/plugins/uninstall',
|
||||
data=json.dumps({'plugin_id': 'thing'}),
|
||||
content_type='application/json',
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
# After the file removal returned False, the helper must have
|
||||
# written the snapshot back. Inspect save_raw_file_content calls.
|
||||
calls = self.api_v3.config_manager.save_raw_file_content.call_args_list
|
||||
file_types_written = [c.args[0] for c in calls]
|
||||
self.assertIn('main', file_types_written,
|
||||
f"main config was not restored after uninstall failure; calls={calls}")
|
||||
# Find the main restore call and confirm our snapshot entry is present.
|
||||
for c in calls:
|
||||
if c.args[0] == 'main':
|
||||
written = c.args[1]
|
||||
self.assertIn('thing', written,
|
||||
"main config was written without the restored snapshot entry")
|
||||
self.assertEqual(written['thing'], stored_main['thing'])
|
||||
break
|
||||
|
||||
def test_file_removal_raising_also_restores_snapshot(self):
|
||||
"""Same restore path, but triggered by an exception instead of False."""
|
||||
stored_main = {'thing': {'enabled': False}}
|
||||
|
||||
def raw_get(file_type):
|
||||
if file_type == 'main':
|
||||
return dict(stored_main)
|
||||
return {}
|
||||
|
||||
self.api_v3.config_manager.get_raw_file_content.side_effect = raw_get
|
||||
self.api_v3.config_manager.cleanup_plugin_config.return_value = None
|
||||
self.api_v3.plugin_store_manager.uninstall_plugin.side_effect = OSError("rm failed")
|
||||
|
||||
response = self.client.post(
|
||||
'/api/v3/plugins/uninstall',
|
||||
data=json.dumps({'plugin_id': 'thing'}),
|
||||
content_type='application/json',
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
calls = self.api_v3.config_manager.save_raw_file_content.call_args_list
|
||||
self.assertTrue(
|
||||
any(c.args[0] == 'main' for c in calls),
|
||||
"main config was not restored after uninstall raised",
|
||||
)
|
||||
|
||||
def test_happy_path_succeeds(self):
|
||||
"""Sanity: the transactional rework did not break the happy path."""
|
||||
self.api_v3.config_manager.get_raw_file_content.return_value = {}
|
||||
self.api_v3.config_manager.cleanup_plugin_config.return_value = None
|
||||
self.api_v3.plugin_store_manager.uninstall_plugin.return_value = True
|
||||
|
||||
response = self.client.post(
|
||||
'/api/v3/plugins/uninstall',
|
||||
data=json.dumps({'plugin_id': 'thing'}),
|
||||
content_type='application/json',
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.api_v3.plugin_store_manager.uninstall_plugin.assert_called_once_with('thing')
|
||||
|
||||
def test_file_removal_failure_reloads_previously_loaded_plugin(self):
|
||||
"""Regression: rollback must restore BOTH config AND runtime state.
|
||||
|
||||
If the plugin was loaded at runtime before the uninstall
|
||||
request, and file removal fails after unload has already
|
||||
succeeded, the rollback must call ``load_plugin`` so the user
|
||||
doesn't end up in a state where the files exist and the config
|
||||
exists but the plugin is no longer loaded.
|
||||
"""
|
||||
# Plugin is currently loaded.
|
||||
self.api_v3.plugin_manager.plugins = {'thing': MagicMock()}
|
||||
self.api_v3.config_manager.get_raw_file_content.return_value = {
|
||||
'thing': {'enabled': True}
|
||||
}
|
||||
self.api_v3.config_manager.cleanup_plugin_config.return_value = None
|
||||
self.api_v3.plugin_manager.unload_plugin.return_value = None
|
||||
self.api_v3.plugin_store_manager.uninstall_plugin.return_value = False
|
||||
|
||||
response = self.client.post(
|
||||
'/api/v3/plugins/uninstall',
|
||||
data=json.dumps({'plugin_id': 'thing'}),
|
||||
content_type='application/json',
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
# Unload did happen (it's part of the uninstall sequence)...
|
||||
self.api_v3.plugin_manager.unload_plugin.assert_called_once_with('thing')
|
||||
# ...and because file removal failed, the rollback must have
|
||||
# called load_plugin to restore runtime state.
|
||||
self.api_v3.plugin_manager.load_plugin.assert_called_once_with('thing')
|
||||
|
||||
def test_snapshot_survives_config_read_error(self):
|
||||
"""Regression: if get_raw_file_content raises an expected error
|
||||
(OSError / ConfigError) during snapshot, the uninstall should
|
||||
still proceed — we just won't have a rollback snapshot. Narrow
|
||||
exception list must still cover the realistic failure modes.
|
||||
"""
|
||||
from src.exceptions import ConfigError
|
||||
self.api_v3.config_manager.get_raw_file_content.side_effect = ConfigError(
|
||||
"file missing", config_path="/tmp/missing"
|
||||
)
|
||||
self.api_v3.config_manager.cleanup_plugin_config.return_value = None
|
||||
self.api_v3.plugin_store_manager.uninstall_plugin.return_value = True
|
||||
|
||||
response = self.client.post(
|
||||
'/api/v3/plugins/uninstall',
|
||||
data=json.dumps({'plugin_id': 'thing'}),
|
||||
content_type='application/json',
|
||||
)
|
||||
|
||||
# Uninstall should still succeed — snapshot failure is logged
|
||||
# but doesn't block the uninstall.
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.api_v3.plugin_store_manager.uninstall_plugin.assert_called_once_with('thing')
|
||||
|
||||
def test_snapshot_does_not_swallow_programmer_errors(self):
|
||||
"""Regression: unexpected exceptions (TypeError, AttributeError)
|
||||
must propagate out of the snapshot helper so bugs surface
|
||||
during development instead of being silently logged and
|
||||
ignored. Narrowing from ``except Exception`` to
|
||||
``(OSError, ConfigError)`` is what makes this work.
|
||||
"""
|
||||
# Raise an exception that is NOT in the narrow catch list.
|
||||
self.api_v3.config_manager.get_raw_file_content.side_effect = TypeError(
|
||||
"unexpected kwarg"
|
||||
)
|
||||
|
||||
response = self.client.post(
|
||||
'/api/v3/plugins/uninstall',
|
||||
data=json.dumps({'plugin_id': 'thing'}),
|
||||
content_type='application/json',
|
||||
)
|
||||
|
||||
# The TypeError should propagate up to the endpoint's outer
|
||||
# try/except and produce a 500, NOT be silently swallowed like
|
||||
# the previous ``except Exception`` did.
|
||||
self.assertEqual(response.status_code, 500)
|
||||
# uninstall_plugin must NOT have been called — the snapshot
|
||||
# exception bubbled up before we got that far.
|
||||
self.api_v3.plugin_store_manager.uninstall_plugin.assert_not_called()
|
||||
|
||||
def test_unload_failure_restores_config_and_does_not_call_uninstall(self):
|
||||
"""If unload_plugin itself raises, config must be restored and
|
||||
uninstall_plugin must NOT be called."""
|
||||
self.api_v3.plugin_manager.plugins = {'thing': MagicMock()}
|
||||
self.api_v3.config_manager.get_raw_file_content.return_value = {
|
||||
'thing': {'enabled': True}
|
||||
}
|
||||
self.api_v3.config_manager.cleanup_plugin_config.return_value = None
|
||||
self.api_v3.plugin_manager.unload_plugin.side_effect = RuntimeError("unload boom")
|
||||
|
||||
response = self.client.post(
|
||||
'/api/v3/plugins/uninstall',
|
||||
data=json.dumps({'plugin_id': 'thing'}),
|
||||
content_type='application/json',
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 500)
|
||||
self.api_v3.plugin_store_manager.uninstall_plugin.assert_not_called()
|
||||
# Config should have been restored.
|
||||
calls = self.api_v3.config_manager.save_raw_file_content.call_args_list
|
||||
self.assertTrue(
|
||||
any(c.args[0] == 'main' for c in calls),
|
||||
"main config was not restored after unload_plugin raised",
|
||||
)
|
||||
# load_plugin must NOT have been called — unload didn't succeed,
|
||||
# so runtime state is still what it was.
|
||||
self.api_v3.plugin_manager.load_plugin.assert_not_called()
|
||||
|
||||
|
||||
class TestReconcileEndpointPayload(unittest.TestCase):
|
||||
"""``/plugins/state/reconcile`` must handle weird JSON payloads without
|
||||
crashing, and must accept string booleans for ``force``.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.client, self.mod, _cleanup = _make_client()
|
||||
self.addCleanup(_cleanup)
|
||||
self.api_v3 = self.mod.api_v3
|
||||
# Stub the reconciler so we only test the payload plumbing, not
|
||||
# the full reconciliation. We patch StateReconciliation at the
|
||||
# module level where the endpoint imports it lazily.
|
||||
self._reconciler_instance = MagicMock()
|
||||
self._reconciler_instance.reconcile_state.return_value = MagicMock(
|
||||
inconsistencies_found=[],
|
||||
inconsistencies_fixed=[],
|
||||
inconsistencies_manual=[],
|
||||
message="ok",
|
||||
)
|
||||
# Patch the StateReconciliation class where it's imported inside
|
||||
# the reconcile endpoint.
|
||||
self._patcher = patch(
|
||||
'src.plugin_system.state_reconciliation.StateReconciliation',
|
||||
return_value=self._reconciler_instance,
|
||||
)
|
||||
self._patcher.start()
|
||||
self.addCleanup(self._patcher.stop)
|
||||
|
||||
def _post(self, body, content_type='application/json'):
|
||||
return self.client.post(
|
||||
'/api/v3/plugins/state/reconcile',
|
||||
data=body,
|
||||
content_type=content_type,
|
||||
)
|
||||
|
||||
def test_non_object_json_body_does_not_crash(self):
|
||||
"""A bare string JSON body must not raise AttributeError."""
|
||||
response = self._post('"just a string"')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
# force must default to False.
|
||||
self._reconciler_instance.reconcile_state.assert_called_once_with(force=False)
|
||||
|
||||
def test_array_json_body_does_not_crash(self):
|
||||
response = self._post('[1, 2, 3]')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self._reconciler_instance.reconcile_state.assert_called_once_with(force=False)
|
||||
|
||||
def test_null_json_body_does_not_crash(self):
|
||||
response = self._post('null')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self._reconciler_instance.reconcile_state.assert_called_once_with(force=False)
|
||||
|
||||
def test_missing_force_key_defaults_to_false(self):
|
||||
response = self._post('{}')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self._reconciler_instance.reconcile_state.assert_called_once_with(force=False)
|
||||
|
||||
def test_force_true_boolean(self):
|
||||
response = self._post(json.dumps({'force': True}))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self._reconciler_instance.reconcile_state.assert_called_once_with(force=True)
|
||||
|
||||
def test_force_false_boolean(self):
|
||||
response = self._post(json.dumps({'force': False}))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self._reconciler_instance.reconcile_state.assert_called_once_with(force=False)
|
||||
|
||||
def test_force_string_false_coerced_correctly(self):
|
||||
"""``bool("false")`` is ``True`` — _coerce_to_bool must fix that."""
|
||||
response = self._post(json.dumps({'force': 'false'}))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self._reconciler_instance.reconcile_state.assert_called_once_with(force=False)
|
||||
|
||||
def test_force_string_true_coerced_correctly(self):
|
||||
response = self._post(json.dumps({'force': 'true'}))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self._reconciler_instance.reconcile_state.assert_called_once_with(force=True)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -342,167 +342,6 @@ class TestStateReconciliation(unittest.TestCase):
|
||||
self.assertEqual(state, {})
|
||||
|
||||
|
||||
class TestStateReconciliationUnrecoverable(unittest.TestCase):
|
||||
"""Tests for the unrecoverable-plugin cache and force reconcile.
|
||||
|
||||
Regression coverage for the infinite reinstall loop where a config
|
||||
entry referenced a plugin not present in the registry (e.g. legacy
|
||||
'github' / 'youtube' entries). The reconciler used to retry the
|
||||
install on every HTTP request; it now caches the failure for the
|
||||
process lifetime and only retries on an explicit ``force=True``
|
||||
reconcile call.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.temp_dir = Path(tempfile.mkdtemp())
|
||||
self.plugins_dir = self.temp_dir / "plugins"
|
||||
self.plugins_dir.mkdir()
|
||||
|
||||
self.state_manager = Mock(spec=PluginStateManager)
|
||||
self.state_manager.get_all_states.return_value = {}
|
||||
self.config_manager = Mock()
|
||||
self.config_manager.load_config.return_value = {
|
||||
"ghost": {"enabled": True}
|
||||
}
|
||||
self.plugin_manager = Mock()
|
||||
self.plugin_manager.plugin_manifests = {}
|
||||
self.plugin_manager.plugins = {}
|
||||
|
||||
# Store manager with an empty registry — install_plugin always fails
|
||||
self.store_manager = Mock()
|
||||
self.store_manager.fetch_registry.return_value = {"plugins": []}
|
||||
self.store_manager.install_plugin.return_value = False
|
||||
self.store_manager.was_recently_uninstalled.return_value = False
|
||||
|
||||
self.reconciler = StateReconciliation(
|
||||
state_manager=self.state_manager,
|
||||
config_manager=self.config_manager,
|
||||
plugin_manager=self.plugin_manager,
|
||||
plugins_dir=self.plugins_dir,
|
||||
store_manager=self.store_manager,
|
||||
)
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.temp_dir)
|
||||
|
||||
def test_not_in_registry_marks_unrecoverable_without_install(self):
|
||||
"""If the plugin isn't in the registry at all, skip install_plugin."""
|
||||
result = self.reconciler.reconcile_state()
|
||||
|
||||
# One inconsistency, unfixable, no install attempt made.
|
||||
self.assertEqual(len(result.inconsistencies_found), 1)
|
||||
self.assertEqual(len(result.inconsistencies_fixed), 0)
|
||||
self.store_manager.install_plugin.assert_not_called()
|
||||
self.assertIn("ghost", self.reconciler._unrecoverable_missing_on_disk)
|
||||
|
||||
def test_subsequent_reconcile_does_not_retry(self):
|
||||
"""Second reconcile pass must not touch install_plugin or fetch_registry again."""
|
||||
self.reconciler.reconcile_state()
|
||||
self.store_manager.fetch_registry.reset_mock()
|
||||
self.store_manager.install_plugin.reset_mock()
|
||||
|
||||
result = self.reconciler.reconcile_state()
|
||||
|
||||
# Still one inconsistency, still no install attempt, no new registry fetch
|
||||
self.assertEqual(len(result.inconsistencies_found), 1)
|
||||
inc = result.inconsistencies_found[0]
|
||||
self.assertFalse(inc.can_auto_fix)
|
||||
self.assertEqual(inc.fix_action, FixAction.MANUAL_FIX_REQUIRED)
|
||||
self.store_manager.install_plugin.assert_not_called()
|
||||
self.store_manager.fetch_registry.assert_not_called()
|
||||
|
||||
def test_force_reconcile_clears_unrecoverable_cache(self):
|
||||
"""force=True must re-attempt previously-failed plugins."""
|
||||
self.reconciler.reconcile_state()
|
||||
self.assertIn("ghost", self.reconciler._unrecoverable_missing_on_disk)
|
||||
|
||||
# Now pretend the registry gained the plugin so the pre-check passes
|
||||
# and install_plugin is actually invoked.
|
||||
self.store_manager.fetch_registry.return_value = {
|
||||
"plugins": [{"id": "ghost"}]
|
||||
}
|
||||
self.store_manager.install_plugin.return_value = True
|
||||
self.store_manager.install_plugin.reset_mock()
|
||||
|
||||
# Config still references ghost; disk still missing it — the
|
||||
# reconciler should re-attempt install now that force=True cleared
|
||||
# the cache. Use assert_called_once_with so a future regression
|
||||
# that accidentally triggers a second install attempt on force=True
|
||||
# is caught.
|
||||
result = self.reconciler.reconcile_state(force=True)
|
||||
|
||||
self.store_manager.install_plugin.assert_called_once_with("ghost")
|
||||
|
||||
def test_registry_unreachable_does_not_mark_unrecoverable(self):
|
||||
"""Transient registry failures should not poison the cache."""
|
||||
self.store_manager.fetch_registry.side_effect = Exception("network down")
|
||||
|
||||
result = self.reconciler.reconcile_state()
|
||||
|
||||
self.assertEqual(len(result.inconsistencies_found), 1)
|
||||
self.assertNotIn("ghost", self.reconciler._unrecoverable_missing_on_disk)
|
||||
self.store_manager.install_plugin.assert_not_called()
|
||||
|
||||
def test_recently_uninstalled_skips_auto_repair(self):
|
||||
"""A freshly-uninstalled plugin must not be resurrected by the reconciler."""
|
||||
self.store_manager.was_recently_uninstalled.return_value = True
|
||||
self.store_manager.fetch_registry.return_value = {
|
||||
"plugins": [{"id": "ghost"}]
|
||||
}
|
||||
|
||||
result = self.reconciler.reconcile_state()
|
||||
|
||||
self.assertEqual(len(result.inconsistencies_found), 1)
|
||||
inc = result.inconsistencies_found[0]
|
||||
self.assertFalse(inc.can_auto_fix)
|
||||
self.assertEqual(inc.fix_action, FixAction.MANUAL_FIX_REQUIRED)
|
||||
self.store_manager.install_plugin.assert_not_called()
|
||||
|
||||
def test_real_store_manager_empty_registry_on_network_failure(self):
|
||||
"""Regression: using the REAL PluginStoreManager (not a Mock), verify
|
||||
the reconciler does NOT poison the unrecoverable cache when
|
||||
``fetch_registry`` fails with no stale cache available.
|
||||
|
||||
Previously, the default stale-cache fallback in ``fetch_registry``
|
||||
silently returned ``{"plugins": []}`` on network failure with no
|
||||
cache. The reconciler's ``_auto_repair_missing_plugin`` saw "no
|
||||
candidates in registry" and marked everything unrecoverable — a
|
||||
regression that would bite every user doing a fresh boot on flaky
|
||||
WiFi. The fix is ``fetch_registry(raise_on_failure=True)`` in
|
||||
``_auto_repair_missing_plugin`` so the reconciler can tell a real
|
||||
registry miss from a network error.
|
||||
"""
|
||||
from src.plugin_system.store_manager import PluginStoreManager
|
||||
import requests as real_requests
|
||||
|
||||
real_store = PluginStoreManager(plugins_dir=str(self.plugins_dir))
|
||||
real_store.registry_cache = None # fresh boot, no cache
|
||||
real_store.registry_cache_time = None
|
||||
|
||||
# Stub the underlying HTTP so no real network call is made but the
|
||||
# real fetch_registry code path runs.
|
||||
real_store._http_get_with_retries = Mock(
|
||||
side_effect=real_requests.ConnectionError("wifi down")
|
||||
)
|
||||
|
||||
reconciler = StateReconciliation(
|
||||
state_manager=self.state_manager,
|
||||
config_manager=self.config_manager,
|
||||
plugin_manager=self.plugin_manager,
|
||||
plugins_dir=self.plugins_dir,
|
||||
store_manager=real_store,
|
||||
)
|
||||
|
||||
result = reconciler.reconcile_state()
|
||||
|
||||
# One inconsistency (ghost is in config, not on disk), but
|
||||
# because the registry lookup failed transiently, we must NOT
|
||||
# have marked it unrecoverable — a later reconcile (after the
|
||||
# network comes back) can still auto-repair.
|
||||
self.assertEqual(len(result.inconsistencies_found), 1)
|
||||
self.assertNotIn("ghost", reconciler._unrecoverable_missing_on_disk)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
|
||||
@@ -667,20 +667,8 @@ import threading as _threading
|
||||
_reconciliation_lock = _threading.Lock()
|
||||
|
||||
def _run_startup_reconciliation() -> None:
|
||||
"""Run state reconciliation in background to auto-repair missing plugins.
|
||||
|
||||
Reconciliation runs exactly once per process lifetime, regardless of
|
||||
whether every inconsistency could be auto-fixed. Previously, a failed
|
||||
auto-repair (e.g. a config entry referencing a plugin that no longer
|
||||
exists in the registry) would reset ``_reconciliation_started`` to False,
|
||||
causing the ``@app.before_request`` hook to re-trigger reconciliation on
|
||||
every single HTTP request — an infinite install-retry loop that pegged
|
||||
the CPU and flooded the log. Unresolved issues are now left in place for
|
||||
the user to address via the UI; the reconciler itself also caches
|
||||
per-plugin unrecoverable failures internally so repeated reconcile calls
|
||||
stay cheap.
|
||||
"""
|
||||
global _reconciliation_done
|
||||
"""Run state reconciliation in background to auto-repair missing plugins."""
|
||||
global _reconciliation_done, _reconciliation_started
|
||||
from src.logging_config import get_logger
|
||||
_logger = get_logger('reconciliation')
|
||||
|
||||
@@ -696,22 +684,18 @@ def _run_startup_reconciliation() -> None:
|
||||
result = reconciler.reconcile_state()
|
||||
if result.inconsistencies_found:
|
||||
_logger.info("[Reconciliation] %s", result.message)
|
||||
if result.inconsistencies_fixed:
|
||||
plugin_manager.discover_plugins()
|
||||
if not result.reconciliation_successful:
|
||||
_logger.warning(
|
||||
"[Reconciliation] Finished with %d unresolved issue(s); "
|
||||
"will not retry automatically. Use the Plugin Store or the "
|
||||
"manual 'Reconcile' action to resolve.",
|
||||
len(result.inconsistencies_manual),
|
||||
)
|
||||
if result.reconciliation_successful:
|
||||
if result.inconsistencies_fixed:
|
||||
plugin_manager.discover_plugins()
|
||||
_reconciliation_done = True
|
||||
else:
|
||||
_logger.warning("[Reconciliation] Finished with unresolved issues, will retry")
|
||||
with _reconciliation_lock:
|
||||
_reconciliation_started = False
|
||||
except Exception as e:
|
||||
_logger.error("[Reconciliation] Error: %s", e, exc_info=True)
|
||||
finally:
|
||||
# Always mark done — we do not want an unhandled exception (or an
|
||||
# unresolved inconsistency) to cause the @before_request hook to
|
||||
# retrigger reconciliation on every subsequent request.
|
||||
_reconciliation_done = True
|
||||
with _reconciliation_lock:
|
||||
_reconciliation_started = False
|
||||
|
||||
# Initialize health monitor and run reconciliation on first request
|
||||
@app.before_request
|
||||
@@ -726,6 +710,4 @@ def check_health_monitor():
|
||||
_threading.Thread(target=_run_startup_reconciliation, daemon=True).start()
|
||||
|
||||
if __name__ == '__main__':
|
||||
# threaded=True is Flask's default since 1.0 but stated explicitly so that
|
||||
# long-lived /api/v3/stream/* SSE connections don't starve other requests.
|
||||
app.run(host='0.0.0.0', port=5000, debug=True, threaded=True)
|
||||
app.run(host='0.0.0.0', port=5000, debug=True)
|
||||
|
||||
@@ -1105,6 +1105,267 @@ def save_raw_secrets_config():
|
||||
status_code=500
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Backup & Restore
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_BACKUP_FILENAME_RE = re.compile(r'^ledmatrix-backup-[A-Za-z0-9_-]+-\d{8}_\d{6}\.zip$')
|
||||
|
||||
|
||||
def _backup_exports_dir() -> Path:
|
||||
"""Directory where user-downloadable backup ZIPs are stored."""
|
||||
d = PROJECT_ROOT / 'config' / 'backups' / 'exports'
|
||||
d.mkdir(parents=True, exist_ok=True)
|
||||
return d
|
||||
|
||||
|
||||
def _is_safe_backup_filename(name: str) -> bool:
|
||||
"""Allow-list filter for backup filenames used in download/delete."""
|
||||
return bool(_BACKUP_FILENAME_RE.match(name))
|
||||
|
||||
|
||||
@api_v3.route('/backup/preview', methods=['GET'])
|
||||
def backup_preview():
|
||||
"""Return a summary of what a new backup would include."""
|
||||
try:
|
||||
from src import backup_manager
|
||||
preview = backup_manager.preview_backup_contents(PROJECT_ROOT)
|
||||
return jsonify({'status': 'success', 'data': preview})
|
||||
except Exception:
|
||||
logger.exception("[Backup] preview failed")
|
||||
return jsonify({'status': 'error', 'message': 'Failed to compute backup preview'}), 500
|
||||
|
||||
|
||||
@api_v3.route('/backup/export', methods=['POST'])
|
||||
def backup_export():
|
||||
"""Create a new backup ZIP and return its filename."""
|
||||
try:
|
||||
from src import backup_manager
|
||||
zip_path = backup_manager.create_backup(PROJECT_ROOT, output_dir=_backup_exports_dir())
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'filename': zip_path.name,
|
||||
'size': zip_path.stat().st_size,
|
||||
'created_at': datetime.utcnow().isoformat() + 'Z',
|
||||
})
|
||||
except Exception:
|
||||
logger.exception("[Backup] export failed")
|
||||
return jsonify({'status': 'error', 'message': 'Failed to create backup'}), 500
|
||||
|
||||
|
||||
@api_v3.route('/backup/list', methods=['GET'])
|
||||
def backup_list():
|
||||
"""List backup ZIPs currently stored on disk."""
|
||||
try:
|
||||
exports = _backup_exports_dir()
|
||||
entries = []
|
||||
for path in sorted(exports.glob('ledmatrix-backup-*.zip'), reverse=True):
|
||||
if not _is_safe_backup_filename(path.name):
|
||||
continue
|
||||
stat = path.stat()
|
||||
entries.append({
|
||||
'filename': path.name,
|
||||
'size': stat.st_size,
|
||||
'created_at': datetime.utcfromtimestamp(stat.st_mtime).isoformat() + 'Z',
|
||||
})
|
||||
return jsonify({'status': 'success', 'data': entries})
|
||||
except Exception:
|
||||
logger.exception("[Backup] list failed")
|
||||
return jsonify({'status': 'error', 'message': 'Failed to list backups'}), 500
|
||||
|
||||
|
||||
@api_v3.route('/backup/download/<path:filename>', methods=['GET'])
|
||||
def backup_download(filename):
|
||||
"""Stream a previously-created backup ZIP to the browser."""
|
||||
try:
|
||||
if not _is_safe_backup_filename(filename):
|
||||
return jsonify({'status': 'error', 'message': 'Invalid backup filename'}), 400
|
||||
exports = _backup_exports_dir()
|
||||
target = exports / filename
|
||||
if not target.exists():
|
||||
return jsonify({'status': 'error', 'message': 'Backup not found'}), 404
|
||||
return send_from_directory(
|
||||
str(exports),
|
||||
filename,
|
||||
as_attachment=True,
|
||||
mimetype='application/zip',
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("[Backup] download failed")
|
||||
return jsonify({'status': 'error', 'message': 'Failed to download backup'}), 500
|
||||
|
||||
|
||||
@api_v3.route('/backup/<path:filename>', methods=['DELETE'])
|
||||
def backup_delete(filename):
|
||||
"""Delete a stored backup ZIP."""
|
||||
try:
|
||||
if not _is_safe_backup_filename(filename):
|
||||
return jsonify({'status': 'error', 'message': 'Invalid backup filename'}), 400
|
||||
target = _backup_exports_dir() / filename
|
||||
if not target.exists():
|
||||
return jsonify({'status': 'error', 'message': 'Backup not found'}), 404
|
||||
target.unlink()
|
||||
return jsonify({'status': 'success', 'message': f'Deleted {filename}'})
|
||||
except Exception:
|
||||
logger.exception("[Backup] delete failed")
|
||||
return jsonify({'status': 'error', 'message': 'Failed to delete backup'}), 500
|
||||
|
||||
|
||||
def _save_uploaded_backup_to_temp() -> Tuple[Optional[Path], Optional[Tuple[Response, int]]]:
|
||||
"""Shared upload handler for validate/restore endpoints. Returns
|
||||
``(temp_path, None)`` on success or ``(None, error_response)`` on failure.
|
||||
The caller is responsible for deleting the returned temp file."""
|
||||
import tempfile as _tempfile
|
||||
if 'backup_file' not in request.files:
|
||||
return None, (jsonify({'status': 'error', 'message': 'No backup file provided'}), 400)
|
||||
upload = request.files['backup_file']
|
||||
if not upload.filename:
|
||||
return None, (jsonify({'status': 'error', 'message': 'No file selected'}), 400)
|
||||
is_valid, err = validate_file_upload(
|
||||
upload.filename,
|
||||
max_size_mb=200,
|
||||
allowed_extensions=['.zip'],
|
||||
)
|
||||
if not is_valid:
|
||||
return None, (jsonify({'status': 'error', 'message': err}), 400)
|
||||
fd, tmp_name = _tempfile.mkstemp(prefix='ledmatrix_upload_', suffix='.zip')
|
||||
os.close(fd)
|
||||
tmp_path = Path(tmp_name)
|
||||
try:
|
||||
upload.save(str(tmp_path))
|
||||
except Exception:
|
||||
tmp_path.unlink(missing_ok=True)
|
||||
logger.exception("[Backup] Failed to save uploaded backup")
|
||||
return None, (jsonify({'status': 'error', 'message': 'Failed to read uploaded file'}), 500)
|
||||
return tmp_path, None
|
||||
|
||||
|
||||
@api_v3.route('/backup/validate', methods=['POST'])
|
||||
def backup_validate():
|
||||
"""Inspect an uploaded backup without applying it."""
|
||||
tmp_path, err = _save_uploaded_backup_to_temp()
|
||||
if err is not None:
|
||||
return err
|
||||
try:
|
||||
from src import backup_manager
|
||||
ok, error, manifest = backup_manager.validate_backup(tmp_path)
|
||||
if not ok:
|
||||
return jsonify({'status': 'error', 'message': error}), 400
|
||||
return jsonify({'status': 'success', 'data': manifest})
|
||||
except Exception:
|
||||
logger.exception("[Backup] validate failed")
|
||||
return jsonify({'status': 'error', 'message': 'Failed to validate backup'}), 500
|
||||
finally:
|
||||
try:
|
||||
tmp_path.unlink(missing_ok=True)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
@api_v3.route('/backup/restore', methods=['POST'])
|
||||
def backup_restore():
|
||||
"""
|
||||
Restore an uploaded backup into the running installation.
|
||||
|
||||
The request is multipart/form-data with:
|
||||
- ``backup_file``: the ZIP upload
|
||||
- ``options``: JSON string with RestoreOptions fields (all boolean)
|
||||
"""
|
||||
tmp_path, err = _save_uploaded_backup_to_temp()
|
||||
if err is not None:
|
||||
return err
|
||||
try:
|
||||
from src import backup_manager
|
||||
|
||||
# Parse options (all optional; default is "restore everything").
|
||||
raw_opts = request.form.get('options') or '{}'
|
||||
try:
|
||||
opts_dict = json.loads(raw_opts)
|
||||
except json.JSONDecodeError:
|
||||
return jsonify({'status': 'error', 'message': 'Invalid options JSON'}), 400
|
||||
|
||||
opts = backup_manager.RestoreOptions(
|
||||
restore_config=bool(opts_dict.get('restore_config', True)),
|
||||
restore_secrets=bool(opts_dict.get('restore_secrets', True)),
|
||||
restore_wifi=bool(opts_dict.get('restore_wifi', True)),
|
||||
restore_fonts=bool(opts_dict.get('restore_fonts', True)),
|
||||
restore_plugin_uploads=bool(opts_dict.get('restore_plugin_uploads', True)),
|
||||
reinstall_plugins=bool(opts_dict.get('reinstall_plugins', True)),
|
||||
)
|
||||
|
||||
# Snapshot current config through the atomic manager so the pre-restore
|
||||
# state is recoverable via the existing rollback_config() path.
|
||||
if api_v3.config_manager and opts.restore_config:
|
||||
try:
|
||||
current = api_v3.config_manager.load_config()
|
||||
_save_config_atomic(api_v3.config_manager, current, create_backup=True)
|
||||
except Exception:
|
||||
logger.warning("[Backup] Pre-restore snapshot failed (continuing)", exc_info=True)
|
||||
|
||||
result = backup_manager.restore_backup(tmp_path, PROJECT_ROOT, opts)
|
||||
|
||||
# Reinstall plugins via the store manager, one at a time.
|
||||
if opts.reinstall_plugins and api_v3.plugin_store_manager and result.plugins_to_install:
|
||||
installed_names = set()
|
||||
if api_v3.plugin_manager:
|
||||
try:
|
||||
existing = api_v3.plugin_manager.get_all_plugin_info() or []
|
||||
installed_names = {p.get('id') for p in existing if p.get('id')}
|
||||
except Exception:
|
||||
installed_names = set()
|
||||
for entry in result.plugins_to_install:
|
||||
plugin_id = entry.get('plugin_id')
|
||||
if not plugin_id:
|
||||
continue
|
||||
if plugin_id in installed_names:
|
||||
result.plugins_installed.append(plugin_id)
|
||||
continue
|
||||
try:
|
||||
ok = api_v3.plugin_store_manager.install_plugin(plugin_id)
|
||||
if ok:
|
||||
result.plugins_installed.append(plugin_id)
|
||||
else:
|
||||
result.plugins_failed.append({'plugin_id': plugin_id, 'error': 'install returned False'})
|
||||
except Exception as install_err:
|
||||
logger.exception("[Backup] plugin reinstall failed for %s", plugin_id)
|
||||
result.plugins_failed.append({'plugin_id': plugin_id, 'error': str(install_err)})
|
||||
|
||||
# Clear font catalog cache so restored fonts show up.
|
||||
if 'fonts' in ' '.join(result.restored):
|
||||
try:
|
||||
from web_interface.cache import delete_cached
|
||||
delete_cached('fonts_catalog')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Reload config_manager state so the UI picks up the new values
|
||||
# without a full service restart.
|
||||
if api_v3.config_manager and opts.restore_config:
|
||||
try:
|
||||
api_v3.config_manager.load_config(force_reload=True)
|
||||
except TypeError:
|
||||
try:
|
||||
api_v3.config_manager.load_config()
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
logger.warning("[Backup] Could not reload config after restore", exc_info=True)
|
||||
|
||||
return jsonify({
|
||||
'status': 'success' if result.success else 'partial',
|
||||
'data': result.to_dict(),
|
||||
})
|
||||
except Exception:
|
||||
logger.exception("[Backup] restore failed")
|
||||
return jsonify({'status': 'error', 'message': 'Failed to restore backup'}), 500
|
||||
finally:
|
||||
try:
|
||||
tmp_path.unlink(missing_ok=True)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
@api_v3.route('/system/status', methods=['GET'])
|
||||
def get_system_status():
|
||||
"""Get system status"""
|
||||
@@ -1714,23 +1975,9 @@ def get_installed_plugins():
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
# Re-discover plugins only if the plugins directory has actually
|
||||
# changed since our last scan, or if the caller explicitly asked
|
||||
# for a refresh. The previous unconditional ``discover_plugins()``
|
||||
# call (plus a per-plugin manifest re-read) made this endpoint
|
||||
# O(plugins) in disk I/O on every page refresh, which on an SD-card
|
||||
# Pi4 with ~15 plugins was pegging the CPU and blocking the UI
|
||||
# "connecting to display" spinner for minutes.
|
||||
force_refresh = request.args.get('refresh', '').lower() in ('1', 'true', 'yes')
|
||||
plugins_dir_path = Path(api_v3.plugin_manager.plugins_dir)
|
||||
try:
|
||||
current_mtime = plugins_dir_path.stat().st_mtime if plugins_dir_path.exists() else 0
|
||||
except OSError:
|
||||
current_mtime = 0
|
||||
last_mtime = getattr(api_v3, '_installed_plugins_dir_mtime', None)
|
||||
if force_refresh or last_mtime != current_mtime:
|
||||
api_v3.plugin_manager.discover_plugins()
|
||||
api_v3._installed_plugins_dir_mtime = current_mtime
|
||||
# Re-discover plugins to ensure we have the latest list
|
||||
# This handles cases where plugins are added/removed after app startup
|
||||
api_v3.plugin_manager.discover_plugins()
|
||||
|
||||
# Get all installed plugin info from the plugin manager
|
||||
all_plugin_info = api_v3.plugin_manager.get_all_plugin_info()
|
||||
@@ -1743,10 +1990,17 @@ def get_installed_plugins():
|
||||
for plugin_info in all_plugin_info:
|
||||
plugin_id = plugin_info.get('id')
|
||||
|
||||
# Note: we intentionally do NOT re-read manifest.json here.
|
||||
# discover_plugins() above already reparses manifests on change;
|
||||
# re-reading on every request added ~1 syscall+json.loads per
|
||||
# plugin per request for no benefit.
|
||||
# Re-read manifest from disk to ensure we have the latest metadata
|
||||
manifest_path = Path(api_v3.plugin_manager.plugins_dir) / plugin_id / "manifest.json"
|
||||
if manifest_path.exists():
|
||||
try:
|
||||
with open(manifest_path, 'r', encoding='utf-8') as f:
|
||||
fresh_manifest = json.load(f)
|
||||
# Update plugin_info with fresh manifest data
|
||||
plugin_info.update(fresh_manifest)
|
||||
except Exception as e:
|
||||
# If we can't read the fresh manifest, use the cached one
|
||||
logger.warning("[PluginStore] Could not read fresh manifest for %s: %s", plugin_id, e)
|
||||
|
||||
# Get enabled status from config (source of truth)
|
||||
# Read from config file first, fall back to plugin instance if config doesn't have the key
|
||||
@@ -2376,30 +2630,14 @@ def reconcile_plugin_state():
|
||||
|
||||
from src.plugin_system.state_reconciliation import StateReconciliation
|
||||
|
||||
# Pass the store manager so auto-repair of missing-on-disk plugins
|
||||
# can actually run. Previously this endpoint silently degraded to
|
||||
# MANUAL_FIX_REQUIRED because store_manager was omitted.
|
||||
reconciler = StateReconciliation(
|
||||
state_manager=api_v3.plugin_state_manager,
|
||||
config_manager=api_v3.config_manager,
|
||||
plugin_manager=api_v3.plugin_manager,
|
||||
plugins_dir=Path(api_v3.plugin_manager.plugins_dir),
|
||||
store_manager=api_v3.plugin_store_manager,
|
||||
plugins_dir=Path(api_v3.plugin_manager.plugins_dir)
|
||||
)
|
||||
|
||||
# Allow the caller to force a retry of previously-unrecoverable
|
||||
# plugins (e.g. after the registry has been updated or a typo fixed).
|
||||
# Non-object JSON bodies (e.g. a bare string or array) must fall
|
||||
# through to the default False instead of raising AttributeError,
|
||||
# and string booleans like "false" must coerce correctly — hence
|
||||
# the isinstance guard plus _coerce_to_bool.
|
||||
force = False
|
||||
if request.is_json:
|
||||
payload = request.get_json(silent=True)
|
||||
if isinstance(payload, dict):
|
||||
force = _coerce_to_bool(payload.get('force', False))
|
||||
|
||||
result = reconciler.reconcile_state(force=force)
|
||||
result = reconciler.reconcile_state()
|
||||
|
||||
return success_response(
|
||||
data={
|
||||
@@ -2822,181 +3060,6 @@ def update_plugin():
|
||||
status_code=500
|
||||
)
|
||||
|
||||
def _snapshot_plugin_config(plugin_id: str):
|
||||
"""Capture the plugin's current config and secrets entries for rollback.
|
||||
|
||||
Returns a tuple ``(main_entry, secrets_entry)`` where each element is
|
||||
the plugin's dict from the respective file, or ``None`` if the plugin
|
||||
was not present there. Used by the transactional uninstall path so we
|
||||
can restore state if file removal fails after config cleanup has
|
||||
already succeeded.
|
||||
"""
|
||||
main_entry = None
|
||||
secrets_entry = None
|
||||
# Narrow exception list: filesystem errors (FileNotFoundError is a
|
||||
# subclass of OSError, IOError is an alias for OSError in Python 3)
|
||||
# and ConfigError, which is what ``get_raw_file_content`` wraps all
|
||||
# load failures in. Programmer errors (TypeError, AttributeError,
|
||||
# etc.) are intentionally NOT caught — they should surface loudly.
|
||||
try:
|
||||
main_config = api_v3.config_manager.get_raw_file_content('main')
|
||||
if plugin_id in main_config:
|
||||
import copy as _copy
|
||||
main_entry = _copy.deepcopy(main_config[plugin_id])
|
||||
except (OSError, ConfigError) as e:
|
||||
logger.warning("[PluginUninstall] Could not snapshot main config for %s: %s", plugin_id, e)
|
||||
try:
|
||||
import os as _os
|
||||
if _os.path.exists(api_v3.config_manager.secrets_path):
|
||||
secrets_config = api_v3.config_manager.get_raw_file_content('secrets')
|
||||
if plugin_id in secrets_config:
|
||||
import copy as _copy
|
||||
secrets_entry = _copy.deepcopy(secrets_config[plugin_id])
|
||||
except (OSError, ConfigError) as e:
|
||||
logger.warning("[PluginUninstall] Could not snapshot secrets for %s: %s", plugin_id, e)
|
||||
return (main_entry, secrets_entry)
|
||||
|
||||
|
||||
def _restore_plugin_config(plugin_id: str, snapshot) -> None:
|
||||
"""Best-effort restoration of a snapshot taken by ``_snapshot_plugin_config``.
|
||||
|
||||
Called on the unhappy path when ``cleanup_plugin_config`` already
|
||||
succeeded but the subsequent file removal failed. If the restore
|
||||
itself fails, we log loudly — the caller still sees the original
|
||||
uninstall error and the user can reconcile manually.
|
||||
"""
|
||||
main_entry, secrets_entry = snapshot
|
||||
if main_entry is not None:
|
||||
try:
|
||||
main_config = api_v3.config_manager.get_raw_file_content('main')
|
||||
main_config[plugin_id] = main_entry
|
||||
api_v3.config_manager.save_raw_file_content('main', main_config)
|
||||
logger.warning("[PluginUninstall] Restored main config entry for %s after uninstall failure", plugin_id)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"[PluginUninstall] FAILED to restore main config entry for %s after uninstall failure: %s",
|
||||
plugin_id, e, exc_info=True,
|
||||
)
|
||||
if secrets_entry is not None:
|
||||
try:
|
||||
import os as _os
|
||||
if _os.path.exists(api_v3.config_manager.secrets_path):
|
||||
secrets_config = api_v3.config_manager.get_raw_file_content('secrets')
|
||||
else:
|
||||
secrets_config = {}
|
||||
secrets_config[plugin_id] = secrets_entry
|
||||
api_v3.config_manager.save_raw_file_content('secrets', secrets_config)
|
||||
logger.warning("[PluginUninstall] Restored secrets entry for %s after uninstall failure", plugin_id)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"[PluginUninstall] FAILED to restore secrets entry for %s after uninstall failure: %s",
|
||||
plugin_id, e, exc_info=True,
|
||||
)
|
||||
|
||||
|
||||
def _do_transactional_uninstall(plugin_id: str, preserve_config: bool) -> None:
|
||||
"""Run the full uninstall as a best-effort transaction.
|
||||
|
||||
Order:
|
||||
1. Mark tombstone (so any reconciler racing with us cannot resurrect
|
||||
the plugin mid-flight).
|
||||
2. Snapshot existing config + secrets entries (for rollback).
|
||||
3. Run ``cleanup_plugin_config``. If this raises, re-raise — files
|
||||
have NOT been touched, so aborting here leaves a fully consistent
|
||||
state: plugin is still installed and still in config.
|
||||
4. Unload the plugin from the running plugin manager.
|
||||
5. Call ``store_manager.uninstall_plugin``. If it returns False or
|
||||
raises, RESTORE the snapshot (so config matches disk) and then
|
||||
propagate the failure.
|
||||
6. Invalidate schema cache and remove from the state manager only
|
||||
after the file removal succeeds.
|
||||
|
||||
Raises on any failure so the caller can return an error to the user.
|
||||
"""
|
||||
if hasattr(api_v3.plugin_store_manager, 'mark_recently_uninstalled'):
|
||||
api_v3.plugin_store_manager.mark_recently_uninstalled(plugin_id)
|
||||
|
||||
snapshot = _snapshot_plugin_config(plugin_id) if not preserve_config else (None, None)
|
||||
|
||||
# Step 1: config cleanup. If this fails, bail out early — the plugin
|
||||
# files on disk are still intact and the caller will get a clear
|
||||
# error.
|
||||
if not preserve_config:
|
||||
try:
|
||||
api_v3.config_manager.cleanup_plugin_config(plugin_id, remove_secrets=True)
|
||||
except Exception as cleanup_err:
|
||||
logger.error(
|
||||
"[PluginUninstall] Config cleanup failed for %s; aborting uninstall (files untouched): %s",
|
||||
plugin_id, cleanup_err, exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
# Remember whether the plugin was loaded *before* we touched runtime
|
||||
# state — we need this so we can reload it on rollback if file
|
||||
# removal fails after we've already unloaded it.
|
||||
was_loaded = bool(
|
||||
api_v3.plugin_manager and plugin_id in api_v3.plugin_manager.plugins
|
||||
)
|
||||
|
||||
def _rollback(reason_err):
|
||||
"""Undo both the config cleanup AND the unload."""
|
||||
if not preserve_config:
|
||||
_restore_plugin_config(plugin_id, snapshot)
|
||||
if was_loaded and api_v3.plugin_manager:
|
||||
try:
|
||||
api_v3.plugin_manager.load_plugin(plugin_id)
|
||||
except Exception as reload_err:
|
||||
logger.error(
|
||||
"[PluginUninstall] FAILED to reload %s after uninstall rollback: %s",
|
||||
plugin_id, reload_err, exc_info=True,
|
||||
)
|
||||
|
||||
# Step 2: unload if loaded. Also part of the rollback boundary — if
|
||||
# unload itself raises, restore config and surface the error.
|
||||
if was_loaded:
|
||||
try:
|
||||
api_v3.plugin_manager.unload_plugin(plugin_id)
|
||||
except Exception as unload_err:
|
||||
logger.error(
|
||||
"[PluginUninstall] unload_plugin raised for %s; restoring config snapshot: %s",
|
||||
plugin_id, unload_err, exc_info=True,
|
||||
)
|
||||
if not preserve_config:
|
||||
_restore_plugin_config(plugin_id, snapshot)
|
||||
# Plugin was never successfully unloaded, so no reload is
|
||||
# needed here — runtime state is still what it was before.
|
||||
raise
|
||||
|
||||
# Step 3: remove files. If this fails, roll back the config cleanup
|
||||
# AND reload the plugin so the user doesn't end up with an orphaned
|
||||
# install (files on disk + no config entry + plugin no longer
|
||||
# loaded at runtime).
|
||||
try:
|
||||
success = api_v3.plugin_store_manager.uninstall_plugin(plugin_id)
|
||||
except Exception as uninstall_err:
|
||||
logger.error(
|
||||
"[PluginUninstall] uninstall_plugin raised for %s; rolling back: %s",
|
||||
plugin_id, uninstall_err, exc_info=True,
|
||||
)
|
||||
_rollback(uninstall_err)
|
||||
raise
|
||||
|
||||
if not success:
|
||||
logger.error(
|
||||
"[PluginUninstall] uninstall_plugin returned False for %s; rolling back",
|
||||
plugin_id,
|
||||
)
|
||||
_rollback(None)
|
||||
raise RuntimeError(f"Failed to uninstall plugin {plugin_id}")
|
||||
|
||||
# Past this point the filesystem and config are both in the
|
||||
# "uninstalled" state. Clean up the cheap in-memory bookkeeping.
|
||||
if api_v3.schema_manager:
|
||||
api_v3.schema_manager.invalidate_cache(plugin_id)
|
||||
if api_v3.plugin_state_manager:
|
||||
api_v3.plugin_state_manager.remove_plugin_state(plugin_id)
|
||||
|
||||
|
||||
@api_v3.route('/plugins/uninstall', methods=['POST'])
|
||||
def uninstall_plugin():
|
||||
"""Uninstall plugin"""
|
||||
@@ -3019,28 +3082,49 @@ def uninstall_plugin():
|
||||
# Use operation queue if available
|
||||
if api_v3.operation_queue:
|
||||
def uninstall_callback(operation):
|
||||
"""Callback to execute plugin uninstallation transactionally."""
|
||||
try:
|
||||
_do_transactional_uninstall(plugin_id, preserve_config)
|
||||
except Exception as err:
|
||||
error_msg = f'Failed to uninstall plugin {plugin_id}: {err}'
|
||||
"""Callback to execute plugin uninstallation."""
|
||||
# Unload the plugin first if it's loaded
|
||||
if api_v3.plugin_manager and plugin_id in api_v3.plugin_manager.plugins:
|
||||
api_v3.plugin_manager.unload_plugin(plugin_id)
|
||||
|
||||
# Uninstall the plugin
|
||||
success = api_v3.plugin_store_manager.uninstall_plugin(plugin_id)
|
||||
|
||||
if not success:
|
||||
error_msg = f'Failed to uninstall plugin {plugin_id}'
|
||||
if api_v3.operation_history:
|
||||
api_v3.operation_history.record_operation(
|
||||
"uninstall",
|
||||
plugin_id=plugin_id,
|
||||
status="failed",
|
||||
error=error_msg,
|
||||
error=error_msg
|
||||
)
|
||||
# Re-raise so the operation_queue marks this op as failed.
|
||||
raise
|
||||
raise Exception(error_msg)
|
||||
|
||||
# Invalidate schema cache
|
||||
if api_v3.schema_manager:
|
||||
api_v3.schema_manager.invalidate_cache(plugin_id)
|
||||
|
||||
# Clean up plugin configuration if not preserving
|
||||
if not preserve_config:
|
||||
try:
|
||||
api_v3.config_manager.cleanup_plugin_config(plugin_id, remove_secrets=True)
|
||||
except Exception as cleanup_err:
|
||||
logger.warning("[PluginUninstall] Failed to cleanup config for %s: %s", plugin_id, cleanup_err)
|
||||
|
||||
# Remove from state manager
|
||||
if api_v3.plugin_state_manager:
|
||||
api_v3.plugin_state_manager.remove_plugin_state(plugin_id)
|
||||
|
||||
# Record in history
|
||||
if api_v3.operation_history:
|
||||
api_v3.operation_history.record_operation(
|
||||
"uninstall",
|
||||
plugin_id=plugin_id,
|
||||
status="success",
|
||||
details={"preserve_config": preserve_config},
|
||||
details={"preserve_config": preserve_config}
|
||||
)
|
||||
|
||||
return {'success': True, 'message': f'Plugin {plugin_id} uninstalled successfully'}
|
||||
|
||||
# Enqueue operation
|
||||
@@ -3055,32 +3139,55 @@ def uninstall_plugin():
|
||||
message=f'Plugin {plugin_id} uninstallation queued'
|
||||
)
|
||||
else:
|
||||
# Fallback to direct uninstall — same transactional helper.
|
||||
try:
|
||||
_do_transactional_uninstall(plugin_id, preserve_config)
|
||||
except Exception as err:
|
||||
# Fallback to direct uninstall
|
||||
# Unload the plugin first if it's loaded
|
||||
if api_v3.plugin_manager and plugin_id in api_v3.plugin_manager.plugins:
|
||||
api_v3.plugin_manager.unload_plugin(plugin_id)
|
||||
|
||||
# Uninstall the plugin
|
||||
success = api_v3.plugin_store_manager.uninstall_plugin(plugin_id)
|
||||
|
||||
if success:
|
||||
# Invalidate schema cache
|
||||
if api_v3.schema_manager:
|
||||
api_v3.schema_manager.invalidate_cache(plugin_id)
|
||||
|
||||
# Clean up plugin configuration if not preserving
|
||||
if not preserve_config:
|
||||
try:
|
||||
api_v3.config_manager.cleanup_plugin_config(plugin_id, remove_secrets=True)
|
||||
except Exception as cleanup_err:
|
||||
logger.warning("[PluginUninstall] Failed to cleanup config for %s: %s", plugin_id, cleanup_err)
|
||||
|
||||
# Remove from state manager
|
||||
if api_v3.plugin_state_manager:
|
||||
api_v3.plugin_state_manager.remove_plugin_state(plugin_id)
|
||||
|
||||
# Record in history
|
||||
if api_v3.operation_history:
|
||||
api_v3.operation_history.record_operation(
|
||||
"uninstall",
|
||||
plugin_id=plugin_id,
|
||||
status="success",
|
||||
details={"preserve_config": preserve_config}
|
||||
)
|
||||
|
||||
return success_response(message=f'Plugin {plugin_id} uninstalled successfully')
|
||||
else:
|
||||
if api_v3.operation_history:
|
||||
api_v3.operation_history.record_operation(
|
||||
"uninstall",
|
||||
plugin_id=plugin_id,
|
||||
status="failed",
|
||||
error=f'Failed to uninstall plugin {plugin_id}: {err}',
|
||||
error=f'Failed to uninstall plugin {plugin_id}'
|
||||
)
|
||||
|
||||
return error_response(
|
||||
ErrorCode.PLUGIN_UNINSTALL_FAILED,
|
||||
f'Failed to uninstall plugin {plugin_id}: {err}',
|
||||
status_code=500,
|
||||
f'Failed to uninstall plugin {plugin_id}',
|
||||
status_code=500
|
||||
)
|
||||
|
||||
if api_v3.operation_history:
|
||||
api_v3.operation_history.record_operation(
|
||||
"uninstall",
|
||||
plugin_id=plugin_id,
|
||||
status="success",
|
||||
details={"preserve_config": preserve_config},
|
||||
)
|
||||
return success_response(message=f'Plugin {plugin_id} uninstalled successfully')
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("[PluginUninstall] Unhandled exception")
|
||||
from src.web_interface.errors import WebInterfaceError
|
||||
|
||||
@@ -76,6 +76,8 @@ def load_partial(partial_name):
|
||||
return _load_logs_partial()
|
||||
elif partial_name == 'raw-json':
|
||||
return _load_raw_json_partial()
|
||||
elif partial_name == 'backup-restore':
|
||||
return _load_backup_restore_partial()
|
||||
elif partial_name == 'wifi':
|
||||
return _load_wifi_partial()
|
||||
elif partial_name == 'cache':
|
||||
@@ -296,6 +298,13 @@ def _load_raw_json_partial():
|
||||
except Exception as e:
|
||||
return f"Error: {str(e)}", 500
|
||||
|
||||
def _load_backup_restore_partial():
|
||||
"""Load backup & restore partial."""
|
||||
try:
|
||||
return render_template('v3/partials/backup_restore.html')
|
||||
except Exception as e:
|
||||
return f"Error: {str(e)}", 500
|
||||
|
||||
@pages_v3.route('/setup')
|
||||
def captive_setup():
|
||||
"""Lightweight captive portal setup page — self-contained, no frameworks."""
|
||||
|
||||
@@ -120,11 +120,7 @@ def main():
|
||||
|
||||
# Run the web server with error handling for client disconnections
|
||||
try:
|
||||
# threaded=True is Flask's default since 1.0, but set it explicitly
|
||||
# so it's self-documenting: the two /api/v3/stream/* SSE endpoints
|
||||
# hold long-lived connections and would starve other requests under
|
||||
# a single-threaded server.
|
||||
app.run(host='0.0.0.0', port=5000, debug=False, threaded=True)
|
||||
app.run(host='0.0.0.0', port=5000, debug=False)
|
||||
except (OSError, BrokenPipeError) as e:
|
||||
# Suppress non-critical socket errors (client disconnections)
|
||||
if isinstance(e, OSError) and e.errno in (113, 32, 104): # No route to host, Broken pipe, Connection reset
|
||||
|
||||
@@ -7161,13 +7161,6 @@ window.getSchemaProperty = getSchemaProperty;
|
||||
window.escapeHtml = escapeHtml;
|
||||
window.escapeAttribute = escapeAttribute;
|
||||
|
||||
// Expose GitHub install handlers. These must be assigned inside the IIFE —
|
||||
// from outside the IIFE, `typeof attachInstallButtonHandler` evaluates to
|
||||
// 'undefined' and the fallback path at the bottom of this file fires a
|
||||
// [FALLBACK] attachInstallButtonHandler not available on window warning.
|
||||
window.attachInstallButtonHandler = attachInstallButtonHandler;
|
||||
window.setupGitHubInstallHandlers = setupGitHubInstallHandlers;
|
||||
|
||||
})(); // End IIFE
|
||||
|
||||
// Functions to handle array-of-objects
|
||||
@@ -7397,8 +7390,16 @@ if (typeof loadInstalledPlugins !== 'undefined') {
|
||||
if (typeof renderInstalledPlugins !== 'undefined') {
|
||||
window.renderInstalledPlugins = renderInstalledPlugins;
|
||||
}
|
||||
// GitHub install handlers are now exposed inside the IIFE (see above).
|
||||
// searchPluginStore is also exposed inside the IIFE after its definition.
|
||||
// Expose GitHub install handlers for debugging and manual testing
|
||||
if (typeof setupGitHubInstallHandlers !== 'undefined') {
|
||||
window.setupGitHubInstallHandlers = setupGitHubInstallHandlers;
|
||||
console.log('[GLOBAL] setupGitHubInstallHandlers exposed to window');
|
||||
}
|
||||
if (typeof attachInstallButtonHandler !== 'undefined') {
|
||||
window.attachInstallButtonHandler = attachInstallButtonHandler;
|
||||
console.log('[GLOBAL] attachInstallButtonHandler exposed to window');
|
||||
}
|
||||
// searchPluginStore is now exposed inside the IIFE after its definition
|
||||
|
||||
// Verify critical functions are available
|
||||
if (_PLUGIN_DEBUG_EARLY) {
|
||||
|
||||
@@ -786,25 +786,56 @@
|
||||
})();
|
||||
</script>
|
||||
|
||||
<!-- Alpine.js for reactive components.
|
||||
Load the local copy first (always works, no CDN round-trip, no AP-mode
|
||||
branch needed). `defer` on an HTML-parsed <script> is honored and runs
|
||||
after DOM parse but before DOMContentLoaded, which is exactly what
|
||||
Alpine wants — so no deferLoadingAlpine gymnastics are needed.
|
||||
The inline rescue below only fires if the local file is missing. -->
|
||||
<script defer src="{{ url_for('static', filename='v3/js/alpinejs.min.js') }}"></script>
|
||||
<!-- Alpine.js for reactive components -->
|
||||
<!-- Use local file when in AP mode (192.168.4.x) to avoid CDN dependency -->
|
||||
<script>
|
||||
// Rescue: if the local Alpine didn't load for any reason, pull the CDN
|
||||
// copy once on window load. This is a last-ditch fallback, not the
|
||||
// primary path.
|
||||
window.addEventListener('load', function() {
|
||||
if (typeof window.Alpine === 'undefined') {
|
||||
console.warn('[Alpine] Local file failed to load, falling back to CDN');
|
||||
const s = document.createElement('script');
|
||||
s.src = 'https://unpkg.com/alpinejs@3.x.x/dist/cdn.min.js';
|
||||
document.head.appendChild(s);
|
||||
}
|
||||
});
|
||||
(function() {
|
||||
// Prevent Alpine from auto-initializing by setting deferLoadingAlpine before it loads
|
||||
window.deferLoadingAlpine = function(callback) {
|
||||
// Wait for DOM to be ready
|
||||
function waitForReady() {
|
||||
if (document.readyState === 'loading') {
|
||||
document.addEventListener('DOMContentLoaded', waitForReady);
|
||||
return;
|
||||
}
|
||||
|
||||
// app() is already defined in head, so we can initialize Alpine
|
||||
if (callback && typeof callback === 'function') {
|
||||
callback();
|
||||
} else if (window.Alpine && typeof window.Alpine.start === 'function') {
|
||||
// If callback not provided but Alpine is available, start it
|
||||
try {
|
||||
window.Alpine.start();
|
||||
} catch (e) {
|
||||
// Alpine may already be initialized, ignore
|
||||
console.warn('Alpine start error (may already be initialized):', e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
waitForReady();
|
||||
};
|
||||
|
||||
// Detect AP mode by IP address
|
||||
const isAPMode = window.location.hostname === '192.168.4.1' ||
|
||||
window.location.hostname.startsWith('192.168.4.');
|
||||
|
||||
const alpineSrc = isAPMode ? '/static/v3/js/alpinejs.min.js' : 'https://unpkg.com/alpinejs@3.x.x/dist/cdn.min.js';
|
||||
const alpineFallback = isAPMode ? 'https://unpkg.com/alpinejs@3.x.x/dist/cdn.min.js' : '/static/v3/js/alpinejs.min.js';
|
||||
|
||||
const script = document.createElement('script');
|
||||
script.defer = true;
|
||||
script.src = alpineSrc;
|
||||
script.onerror = function() {
|
||||
if (alpineSrc !== alpineFallback) {
|
||||
const fallback = document.createElement('script');
|
||||
fallback.defer = true;
|
||||
fallback.src = alpineFallback;
|
||||
document.head.appendChild(fallback);
|
||||
}
|
||||
};
|
||||
document.head.appendChild(script);
|
||||
})();
|
||||
</script>
|
||||
|
||||
<!-- CodeMirror for JSON editing - lazy loaded when needed -->
|
||||
@@ -937,6 +968,11 @@
|
||||
class="nav-tab">
|
||||
<i class="fas fa-file-code"></i>Config Editor
|
||||
</button>
|
||||
<button @click="activeTab = 'backup-restore'"
|
||||
:class="activeTab === 'backup-restore' ? 'nav-tab-active' : ''"
|
||||
class="nav-tab">
|
||||
<i class="fas fa-save"></i>Backup & Restore
|
||||
</button>
|
||||
<button @click="activeTab = 'fonts'"
|
||||
:class="activeTab === 'fonts' ? 'nav-tab-active' : ''"
|
||||
class="nav-tab">
|
||||
@@ -1139,6 +1175,18 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Backup & Restore tab -->
|
||||
<div x-show="activeTab === 'backup-restore'" x-transition>
|
||||
<div id="backup-restore-content" hx-get="/v3/partials/backup-restore" hx-trigger="revealed" hx-swap="innerHTML">
|
||||
<div class="animate-pulse">
|
||||
<div class="bg-white rounded-lg shadow p-6">
|
||||
<div class="h-4 bg-gray-200 rounded w-1/4 mb-4"></div>
|
||||
<div class="h-32 bg-gray-200 rounded"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Config Editor tab -->
|
||||
<div x-show="activeTab === 'config-editor'" x-transition>
|
||||
<div id="config-editor-content" hx-get="/v3/partials/raw-json" hx-trigger="revealed" hx-swap="innerHTML">
|
||||
|
||||
341
web_interface/templates/v3/partials/backup_restore.html
Normal file
341
web_interface/templates/v3/partials/backup_restore.html
Normal file
@@ -0,0 +1,341 @@
|
||||
<div class="space-y-6" id="backup-restore-root">
|
||||
|
||||
<!-- Security warning -->
|
||||
<div class="bg-red-50 border border-red-200 rounded-lg p-4">
|
||||
<div class="flex">
|
||||
<div class="flex-shrink-0">
|
||||
<i class="fas fa-exclamation-triangle text-red-600"></i>
|
||||
</div>
|
||||
<div class="ml-3">
|
||||
<h3 class="text-sm font-medium text-red-800">Backup files contain secrets in plaintext</h3>
|
||||
<div class="mt-1 text-sm text-red-700">
|
||||
Your API keys (weather, Spotify, YouTube, GitHub, etc.) and any saved WiFi passwords
|
||||
are stored inside the backup ZIP as plain text. Treat the file like a password —
|
||||
store it somewhere private and delete it when you no longer need it.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Export card -->
|
||||
<div class="bg-white rounded-lg shadow p-6">
|
||||
<div class="border-b border-gray-200 pb-4 mb-6">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<h2 class="text-lg font-semibold text-gray-900">Export backup</h2>
|
||||
<p class="mt-1 text-sm text-gray-600">
|
||||
Download a single ZIP with all of your settings so you can restore it later.
|
||||
</p>
|
||||
</div>
|
||||
<button onclick="exportBackup()" id="export-backup-btn"
|
||||
class="inline-flex items-center px-4 py-2 border border-transparent text-sm font-medium rounded-md text-white bg-blue-600 hover:bg-blue-700">
|
||||
<i class="fas fa-download mr-2"></i>
|
||||
Download backup
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div id="export-preview" class="text-sm text-gray-600">
|
||||
<div class="animate-pulse">Loading summary…</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Restore card -->
|
||||
<div class="bg-white rounded-lg shadow p-6">
|
||||
<div class="border-b border-gray-200 pb-4 mb-6">
|
||||
<h2 class="text-lg font-semibold text-gray-900">Restore from backup</h2>
|
||||
<p class="mt-1 text-sm text-gray-600">
|
||||
Upload a backup ZIP exported from this or another LEDMatrix install.
|
||||
You'll see a summary before anything is written to disk.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div class="space-y-4">
|
||||
<div>
|
||||
<label class="block text-sm font-medium text-gray-700 mb-2">Backup file</label>
|
||||
<input type="file" id="restore-file-input" accept=".zip"
|
||||
class="block w-full text-sm text-gray-700 file:mr-4 file:py-2 file:px-4 file:rounded-md file:border-0 file:text-sm file:font-medium file:bg-blue-50 file:text-blue-700 hover:file:bg-blue-100">
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<button onclick="validateRestoreFile()" id="validate-restore-btn"
|
||||
class="inline-flex items-center px-3 py-2 border border-gray-300 text-sm font-medium rounded-md text-gray-700 bg-white hover:bg-gray-50">
|
||||
<i class="fas fa-check-circle mr-2"></i>
|
||||
Inspect file
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div id="restore-preview" class="hidden bg-gray-50 border border-gray-200 rounded-md p-4">
|
||||
<h3 class="text-sm font-medium text-gray-900 mb-2">Backup contents</h3>
|
||||
<dl id="restore-preview-body" class="text-sm text-gray-700 space-y-1"></dl>
|
||||
|
||||
<h3 class="text-sm font-medium text-gray-900 mt-4 mb-2">Choose what to restore</h3>
|
||||
<div class="grid grid-cols-1 md:grid-cols-2 gap-2 text-sm text-gray-700">
|
||||
<label class="flex items-center gap-2"><input type="checkbox" id="opt-config" checked> <span>Main configuration</span></label>
|
||||
<label class="flex items-center gap-2"><input type="checkbox" id="opt-secrets" checked> <span>API keys (secrets)</span></label>
|
||||
<label class="flex items-center gap-2"><input type="checkbox" id="opt-wifi" checked> <span>WiFi configuration</span></label>
|
||||
<label class="flex items-center gap-2"><input type="checkbox" id="opt-fonts" checked> <span>User-uploaded fonts</span></label>
|
||||
<label class="flex items-center gap-2"><input type="checkbox" id="opt-plugin-uploads" checked> <span>Plugin image uploads</span></label>
|
||||
<label class="flex items-center gap-2"><input type="checkbox" id="opt-reinstall" checked> <span>Reinstall missing plugins</span></label>
|
||||
</div>
|
||||
|
||||
<div class="mt-4 flex gap-2">
|
||||
<button onclick="runRestore()" id="run-restore-btn"
|
||||
class="inline-flex items-center px-4 py-2 border border-transparent text-sm font-medium rounded-md text-white bg-green-600 hover:bg-green-700">
|
||||
<i class="fas fa-upload mr-2"></i>
|
||||
Restore now
|
||||
</button>
|
||||
<button onclick="clearRestore()"
|
||||
class="inline-flex items-center px-3 py-2 border border-gray-300 text-sm font-medium rounded-md text-gray-700 bg-white hover:bg-gray-50">
|
||||
Cancel
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="restore-result" class="hidden"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- History card -->
|
||||
<div class="bg-white rounded-lg shadow p-6">
|
||||
<div class="border-b border-gray-200 pb-4 mb-6">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<h2 class="text-lg font-semibold text-gray-900">Backup history</h2>
|
||||
<p class="mt-1 text-sm text-gray-600">Previously exported backups stored on this device.</p>
|
||||
</div>
|
||||
<button onclick="loadBackupList()"
|
||||
class="inline-flex items-center px-3 py-2 border border-gray-300 text-sm font-medium rounded-md text-gray-700 bg-white hover:bg-gray-50">
|
||||
<i class="fas fa-sync-alt mr-2"></i>
|
||||
Refresh
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div id="backup-history" class="text-sm text-gray-600">Loading…</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<script>
|
||||
(function () {
|
||||
function notify(message, kind) {
|
||||
if (typeof showNotification === 'function') {
|
||||
showNotification(message, kind || 'info');
|
||||
} else {
|
||||
console.log('[backup]', kind || 'info', message);
|
||||
}
|
||||
}
|
||||
|
||||
function formatSize(bytes) {
|
||||
if (!bytes) return '0 B';
|
||||
const units = ['B', 'KB', 'MB', 'GB'];
|
||||
let i = 0, size = bytes;
|
||||
while (size >= 1024 && i < units.length - 1) { size /= 1024; i++; }
|
||||
return size.toFixed(i === 0 ? 0 : 1) + ' ' + units[i];
|
||||
}
|
||||
|
||||
function escapeHtml(value) {
|
||||
return String(value == null ? '' : value).replace(/[&<>"']/g, function (c) {
|
||||
return ({ '&': '&', '<': '<', '>': '>', '"': '"', "'": ''' })[c];
|
||||
});
|
||||
}
|
||||
|
||||
async function loadPreview() {
|
||||
const el = document.getElementById('export-preview');
|
||||
try {
|
||||
const res = await fetch('/api/v3/backup/preview');
|
||||
const payload = await res.json();
|
||||
if (payload.status !== 'success') throw new Error(payload.message || 'Preview failed');
|
||||
const d = payload.data || {};
|
||||
el.innerHTML = `
|
||||
<ul class="list-disc pl-5 space-y-1">
|
||||
<li>Main config: <strong>${d.has_config ? 'yes' : 'no'}</strong></li>
|
||||
<li>Secrets: <strong>${d.has_secrets ? 'yes' : 'no'}</strong></li>
|
||||
<li>WiFi config: <strong>${d.has_wifi ? 'yes' : 'no'}</strong></li>
|
||||
<li>User fonts: <strong>${(d.user_fonts || []).length}</strong> ${d.user_fonts && d.user_fonts.length ? '(' + d.user_fonts.map(escapeHtml).join(', ') + ')' : ''}</li>
|
||||
<li>Plugin image uploads: <strong>${d.plugin_uploads || 0}</strong> file(s)</li>
|
||||
<li>Installed plugins: <strong>${(d.plugins || []).length}</strong></li>
|
||||
</ul>`;
|
||||
} catch (err) {
|
||||
el.textContent = 'Could not load preview: ' + err.message;
|
||||
}
|
||||
}
|
||||
|
||||
async function loadBackupList() {
|
||||
const el = document.getElementById('backup-history');
|
||||
el.textContent = 'Loading…';
|
||||
try {
|
||||
const res = await fetch('/api/v3/backup/list');
|
||||
const payload = await res.json();
|
||||
if (payload.status !== 'success') throw new Error(payload.message || 'List failed');
|
||||
const entries = payload.data || [];
|
||||
if (!entries.length) {
|
||||
el.innerHTML = '<p>No backups have been created yet.</p>';
|
||||
return;
|
||||
}
|
||||
el.innerHTML = `
|
||||
<table class="min-w-full divide-y divide-gray-200">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="text-left py-2">Filename</th>
|
||||
<th class="text-left py-2">Size</th>
|
||||
<th class="text-left py-2">Created</th>
|
||||
<th></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody class="divide-y divide-gray-100">
|
||||
${entries.map(e => `
|
||||
<tr>
|
||||
<td class="py-2 font-mono text-xs">${escapeHtml(e.filename)}</td>
|
||||
<td class="py-2">${formatSize(e.size)}</td>
|
||||
<td class="py-2">${escapeHtml(e.created_at)}</td>
|
||||
<td class="py-2 text-right space-x-2">
|
||||
<a href="/api/v3/backup/download/${encodeURIComponent(e.filename)}"
|
||||
class="text-blue-600 hover:underline">Download</a>
|
||||
<button data-filename="${escapeHtml(e.filename)}"
|
||||
class="text-red-600 hover:underline backup-delete-btn">Delete</button>
|
||||
</td>
|
||||
</tr>
|
||||
`).join('')}
|
||||
</tbody>
|
||||
</table>`;
|
||||
el.querySelectorAll('.backup-delete-btn').forEach(btn => {
|
||||
btn.addEventListener('click', () => deleteBackup(btn.dataset.filename));
|
||||
});
|
||||
} catch (err) {
|
||||
el.textContent = 'Could not load backups: ' + err.message;
|
||||
}
|
||||
}
|
||||
|
||||
async function exportBackup() {
|
||||
const btn = document.getElementById('export-backup-btn');
|
||||
btn.disabled = true;
|
||||
btn.innerHTML = '<i class="fas fa-spinner fa-spin mr-2"></i>Creating…';
|
||||
try {
|
||||
const res = await fetch('/api/v3/backup/export', { method: 'POST' });
|
||||
const payload = await res.json();
|
||||
if (payload.status !== 'success') throw new Error(payload.message || 'Export failed');
|
||||
notify('Backup created: ' + payload.filename, 'success');
|
||||
// Trigger browser download immediately.
|
||||
window.location.href = '/api/v3/backup/download/' + encodeURIComponent(payload.filename);
|
||||
await loadBackupList();
|
||||
} catch (err) {
|
||||
notify('Export failed: ' + err.message, 'error');
|
||||
} finally {
|
||||
btn.disabled = false;
|
||||
btn.innerHTML = '<i class="fas fa-download mr-2"></i>Download backup';
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteBackup(filename) {
|
||||
if (!confirm('Delete ' + filename + '?')) return;
|
||||
try {
|
||||
const res = await fetch('/api/v3/backup/' + encodeURIComponent(filename), { method: 'DELETE' });
|
||||
const payload = await res.json();
|
||||
if (payload.status !== 'success') throw new Error(payload.message || 'Delete failed');
|
||||
notify('Backup deleted', 'success');
|
||||
await loadBackupList();
|
||||
} catch (err) {
|
||||
notify('Delete failed: ' + err.message, 'error');
|
||||
}
|
||||
}
|
||||
|
||||
async function validateRestoreFile() {
|
||||
const input = document.getElementById('restore-file-input');
|
||||
if (!input.files || !input.files[0]) {
|
||||
notify('Choose a backup file first', 'error');
|
||||
return;
|
||||
}
|
||||
const fd = new FormData();
|
||||
fd.append('backup_file', input.files[0]);
|
||||
try {
|
||||
const res = await fetch('/api/v3/backup/validate', { method: 'POST', body: fd });
|
||||
const payload = await res.json();
|
||||
if (payload.status !== 'success') throw new Error(payload.message || 'Validation failed');
|
||||
renderRestorePreview(payload.data);
|
||||
} catch (err) {
|
||||
notify('Invalid backup: ' + err.message, 'error');
|
||||
}
|
||||
}
|
||||
|
||||
function renderRestorePreview(manifest) {
|
||||
const wrap = document.getElementById('restore-preview');
|
||||
const body = document.getElementById('restore-preview-body');
|
||||
const detected = manifest.detected_contents || [];
|
||||
const plugins = manifest.plugins || [];
|
||||
body.innerHTML = `
|
||||
<div><strong>Created:</strong> ${escapeHtml(manifest.created_at || 'unknown')}</div>
|
||||
<div><strong>Source host:</strong> ${escapeHtml(manifest.hostname || 'unknown')}</div>
|
||||
<div><strong>LEDMatrix version:</strong> ${escapeHtml(manifest.ledmatrix_version || 'unknown')}</div>
|
||||
<div><strong>Includes:</strong> ${detected.length ? detected.map(escapeHtml).join(', ') : '(nothing detected)'}</div>
|
||||
<div><strong>Plugins referenced:</strong> ${plugins.length ? plugins.map(p => escapeHtml(p.plugin_id)).join(', ') : 'none'}</div>
|
||||
`;
|
||||
wrap.classList.remove('hidden');
|
||||
}
|
||||
|
||||
function clearRestore() {
|
||||
document.getElementById('restore-preview').classList.add('hidden');
|
||||
document.getElementById('restore-result').classList.add('hidden');
|
||||
document.getElementById('restore-file-input').value = '';
|
||||
}
|
||||
|
||||
async function runRestore() {
|
||||
const input = document.getElementById('restore-file-input');
|
||||
if (!input.files || !input.files[0]) {
|
||||
notify('Choose a backup file first', 'error');
|
||||
return;
|
||||
}
|
||||
if (!confirm('Restore from this backup? Current configuration will be overwritten.')) return;
|
||||
|
||||
const options = {
|
||||
restore_config: document.getElementById('opt-config').checked,
|
||||
restore_secrets: document.getElementById('opt-secrets').checked,
|
||||
restore_wifi: document.getElementById('opt-wifi').checked,
|
||||
restore_fonts: document.getElementById('opt-fonts').checked,
|
||||
restore_plugin_uploads: document.getElementById('opt-plugin-uploads').checked,
|
||||
reinstall_plugins: document.getElementById('opt-reinstall').checked,
|
||||
};
|
||||
const fd = new FormData();
|
||||
fd.append('backup_file', input.files[0]);
|
||||
fd.append('options', JSON.stringify(options));
|
||||
|
||||
const btn = document.getElementById('run-restore-btn');
|
||||
btn.disabled = true;
|
||||
btn.innerHTML = '<i class="fas fa-spinner fa-spin mr-2"></i>Restoring…';
|
||||
try {
|
||||
const res = await fetch('/api/v3/backup/restore', { method: 'POST', body: fd });
|
||||
const payload = await res.json();
|
||||
const data = payload.data || {};
|
||||
const result = document.getElementById('restore-result');
|
||||
const ok = payload.status === 'success';
|
||||
result.className = (ok ? 'bg-green-50 border-green-200 text-green-800' : 'bg-yellow-50 border-yellow-200 text-yellow-800') + ' border rounded-md p-4';
|
||||
result.classList.remove('hidden');
|
||||
result.innerHTML = `
|
||||
<h3 class="font-medium mb-2">${ok ? 'Restore complete' : 'Restore finished with warnings'}</h3>
|
||||
<div><strong>Restored:</strong> ${(data.restored || []).map(escapeHtml).join(', ') || 'none'}</div>
|
||||
<div><strong>Skipped:</strong> ${(data.skipped || []).map(escapeHtml).join(', ') || 'none'}</div>
|
||||
<div><strong>Plugins installed:</strong> ${(data.plugins_installed || []).map(escapeHtml).join(', ') || 'none'}</div>
|
||||
<div><strong>Plugins failed:</strong> ${(data.plugins_failed || []).map(p => escapeHtml(p.plugin_id + ' (' + p.error + ')')).join(', ') || 'none'}</div>
|
||||
<div><strong>Errors:</strong> ${(data.errors || []).map(escapeHtml).join('; ') || 'none'}</div>
|
||||
<p class="mt-2">Restart the display service to apply all changes.</p>
|
||||
`;
|
||||
notify(ok ? 'Restore complete' : 'Restore finished with warnings', ok ? 'success' : 'info');
|
||||
} catch (err) {
|
||||
notify('Restore failed: ' + err.message, 'error');
|
||||
} finally {
|
||||
btn.disabled = false;
|
||||
btn.innerHTML = '<i class="fas fa-upload mr-2"></i>Restore now';
|
||||
}
|
||||
}
|
||||
|
||||
// Expose handlers to inline onclick attributes.
|
||||
window.exportBackup = exportBackup;
|
||||
window.loadBackupList = loadBackupList;
|
||||
window.validateRestoreFile = validateRestoreFile;
|
||||
window.clearRestore = clearRestore;
|
||||
window.runRestore = runRestore;
|
||||
|
||||
// Initial load.
|
||||
loadPreview();
|
||||
loadBackupList();
|
||||
})();
|
||||
</script>
|
||||
Reference in New Issue
Block a user