mirror of
https://github.com/ChuckBuilds/LEDMatrix.git
synced 2026-04-29 20:13:00 +00:00
feat(web): add config backup & restore UI (#310)
* feat(web): add config backup & restore UI Adds a Backup & Restore tab to the v3 web UI that packages user config, secrets, WiFi, user-uploaded fonts, plugin image uploads, and the installed plugin list into a single ZIP for safe reinstall recovery. Restore extracts the bundle, snapshots current state via the existing atomic config manager (so rollback stays available), reapplies the selected sections, and optionally reinstalls missing plugins from the store. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com> * fix(backup): address PR review findings - backup_manager: read plugin state from "states" key (not "plugins") to match the actual plugin_state.json format written by state_manager - backup_manager: stream ZIP directly to a temp file instead of building it in an io.BytesIO buffer to avoid OOM on Raspberry Pi - backup_manager: tighten plugin-uploads path validation in validate_backup and restore_backup to require "/uploads/" in the path, rejecting any non-uploads files smuggled under assets/plugins/ - api_v3: enforce 200 MB upload limit by streaming in chunks rather than relying on validate_file_upload (which only checks the filename) - api_v3: replace bool() with _coerce_to_bool() for RestoreOptions fields so string "false" is not treated as truthy - api_v3: capture and log _save_config_atomic return value instead of discarding it; log rather than silence font-cache and config-reload errors - backup_restore.html: track inspectedFile so runRestore always applies to the file the user inspected, not a subsequently selected file; clear on input change or clearRestore() - backup_restore.html: throw on non-success restore payload so errors are surfaced via the error notification path instead of yellow "warnings" - test: update fixture to use correct "states" key structure; import SCHEMA_VERSION constant instead of hardcoding 1; rename unused err -> _err Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com> * fix(backup): address second round of PR review findings - api_v3: guard opts_dict with isinstance check after json.loads so a non-object JSON payload (null, array, etc.) returns a 400 instead of a 500 AttributeError - backup_manager: wrap tmp ZIP creation and os.replace in try/except so the .zip.tmp temp file is always removed on any failure - backup_manager: replace hardcoded Path("/tmp/_zip_check") sentinel in validate_backup with a proper tempfile.TemporaryDirectory() so path traversal checks are portable and leave no artifacts - backup_restore.html: detect partial-success responses (plugins_failed or errors non-empty) even when status is 'success' and render yellow/warning styling and notify instead of green Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com> * fix(backup): add post-install steps for restored plugins; conditional restart hint - api_v3: after a successful plugin reinstall during restore, run the same post-install sequence used by the normal /plugins/install flow: invalidate schema cache, discover_plugins()/load_plugin(), and set_plugin_installed() so restored plugins are immediately available - backup_restore.html: only show the "restart the display service" hint when at least one item was restored or at least one plugin was installed Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com> * fix(backup): address Codacy findings - api_v3: replace 'fonts' in ' '.join(result.restored) substring check with any(r.startswith("fonts") for r in result.restored) to avoid fragile joined-string membership testing - api_v3: replace deprecated datetime.utcnow() and utcfromtimestamp() with datetime.now(timezone.utc) and fromtimestamp(..., timezone.utc); add timezone to import - test: remove unused import io (backup_manager no longer uses BytesIO) - src/backup_manager.py hardcoded /tmp sentinel was already fixed in a prior commit (tempfile.TemporaryDirectory) Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com> --------- Co-authored-by: Chuck <chuck@example.com> Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -9,7 +9,7 @@ import time
|
||||
import hashlib
|
||||
import uuid
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Optional, Tuple, Dict, Any, Type
|
||||
|
||||
@@ -1106,6 +1106,290 @@ def save_raw_secrets_config():
|
||||
status_code=500
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Backup & Restore
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_BACKUP_FILENAME_RE = re.compile(r'^ledmatrix-backup-[A-Za-z0-9_-]+-\d{8}_\d{6}\.zip$')
|
||||
|
||||
|
||||
def _backup_exports_dir() -> Path:
|
||||
"""Directory where user-downloadable backup ZIPs are stored."""
|
||||
d = PROJECT_ROOT / 'config' / 'backups' / 'exports'
|
||||
d.mkdir(parents=True, exist_ok=True)
|
||||
return d
|
||||
|
||||
|
||||
def _is_safe_backup_filename(name: str) -> bool:
|
||||
"""Allow-list filter for backup filenames used in download/delete."""
|
||||
return bool(_BACKUP_FILENAME_RE.match(name))
|
||||
|
||||
|
||||
@api_v3.route('/backup/preview', methods=['GET'])
|
||||
def backup_preview():
|
||||
"""Return a summary of what a new backup would include."""
|
||||
try:
|
||||
from src import backup_manager
|
||||
preview = backup_manager.preview_backup_contents(PROJECT_ROOT)
|
||||
return jsonify({'status': 'success', 'data': preview})
|
||||
except Exception:
|
||||
logger.exception("[Backup] preview failed")
|
||||
return jsonify({'status': 'error', 'message': 'Failed to compute backup preview'}), 500
|
||||
|
||||
|
||||
@api_v3.route('/backup/export', methods=['POST'])
|
||||
def backup_export():
|
||||
"""Create a new backup ZIP and return its filename."""
|
||||
try:
|
||||
from src import backup_manager
|
||||
zip_path = backup_manager.create_backup(PROJECT_ROOT, output_dir=_backup_exports_dir())
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'filename': zip_path.name,
|
||||
'size': zip_path.stat().st_size,
|
||||
'created_at': datetime.now(timezone.utc).isoformat(),
|
||||
})
|
||||
except Exception:
|
||||
logger.exception("[Backup] export failed")
|
||||
return jsonify({'status': 'error', 'message': 'Failed to create backup'}), 500
|
||||
|
||||
|
||||
@api_v3.route('/backup/list', methods=['GET'])
|
||||
def backup_list():
|
||||
"""List backup ZIPs currently stored on disk."""
|
||||
try:
|
||||
exports = _backup_exports_dir()
|
||||
entries = []
|
||||
for path in sorted(exports.glob('ledmatrix-backup-*.zip'), reverse=True):
|
||||
if not _is_safe_backup_filename(path.name):
|
||||
continue
|
||||
stat = path.stat()
|
||||
entries.append({
|
||||
'filename': path.name,
|
||||
'size': stat.st_size,
|
||||
'created_at': datetime.fromtimestamp(stat.st_mtime, timezone.utc).isoformat(),
|
||||
})
|
||||
return jsonify({'status': 'success', 'data': entries})
|
||||
except Exception:
|
||||
logger.exception("[Backup] list failed")
|
||||
return jsonify({'status': 'error', 'message': 'Failed to list backups'}), 500
|
||||
|
||||
|
||||
@api_v3.route('/backup/download/<path:filename>', methods=['GET'])
|
||||
def backup_download(filename):
|
||||
"""Stream a previously-created backup ZIP to the browser."""
|
||||
try:
|
||||
if not _is_safe_backup_filename(filename):
|
||||
return jsonify({'status': 'error', 'message': 'Invalid backup filename'}), 400
|
||||
exports = _backup_exports_dir()
|
||||
target = exports / filename
|
||||
if not target.exists():
|
||||
return jsonify({'status': 'error', 'message': 'Backup not found'}), 404
|
||||
return send_from_directory(
|
||||
str(exports),
|
||||
filename,
|
||||
as_attachment=True,
|
||||
mimetype='application/zip',
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("[Backup] download failed")
|
||||
return jsonify({'status': 'error', 'message': 'Failed to download backup'}), 500
|
||||
|
||||
|
||||
@api_v3.route('/backup/<path:filename>', methods=['DELETE'])
|
||||
def backup_delete(filename):
|
||||
"""Delete a stored backup ZIP."""
|
||||
try:
|
||||
if not _is_safe_backup_filename(filename):
|
||||
return jsonify({'status': 'error', 'message': 'Invalid backup filename'}), 400
|
||||
target = _backup_exports_dir() / filename
|
||||
if not target.exists():
|
||||
return jsonify({'status': 'error', 'message': 'Backup not found'}), 404
|
||||
target.unlink()
|
||||
return jsonify({'status': 'success', 'message': f'Deleted {filename}'})
|
||||
except Exception:
|
||||
logger.exception("[Backup] delete failed")
|
||||
return jsonify({'status': 'error', 'message': 'Failed to delete backup'}), 500
|
||||
|
||||
|
||||
def _save_uploaded_backup_to_temp() -> Tuple[Optional[Path], Optional[Tuple[Response, int]]]:
|
||||
"""Shared upload handler for validate/restore endpoints. Returns
|
||||
``(temp_path, None)`` on success or ``(None, error_response)`` on failure.
|
||||
The caller is responsible for deleting the returned temp file."""
|
||||
import tempfile as _tempfile
|
||||
if 'backup_file' not in request.files:
|
||||
return None, (jsonify({'status': 'error', 'message': 'No backup file provided'}), 400)
|
||||
upload = request.files['backup_file']
|
||||
if not upload.filename:
|
||||
return None, (jsonify({'status': 'error', 'message': 'No file selected'}), 400)
|
||||
is_valid, err = validate_file_upload(
|
||||
upload.filename,
|
||||
max_size_mb=200,
|
||||
allowed_extensions=['.zip'],
|
||||
)
|
||||
if not is_valid:
|
||||
return None, (jsonify({'status': 'error', 'message': err}), 400)
|
||||
fd, tmp_name = _tempfile.mkstemp(prefix='ledmatrix_upload_', suffix='.zip')
|
||||
os.close(fd)
|
||||
tmp_path = Path(tmp_name)
|
||||
max_bytes = 200 * 1024 * 1024
|
||||
try:
|
||||
written = 0
|
||||
with open(tmp_path, 'wb') as fh:
|
||||
while True:
|
||||
chunk = upload.stream.read(65536)
|
||||
if not chunk:
|
||||
break
|
||||
written += len(chunk)
|
||||
if written > max_bytes:
|
||||
fh.close()
|
||||
tmp_path.unlink(missing_ok=True)
|
||||
return None, (jsonify({'status': 'error', 'message': 'Backup file exceeds 200 MB limit'}), 413)
|
||||
fh.write(chunk)
|
||||
except Exception:
|
||||
tmp_path.unlink(missing_ok=True)
|
||||
logger.exception("[Backup] Failed to save uploaded backup")
|
||||
return None, (jsonify({'status': 'error', 'message': 'Failed to read uploaded file'}), 500)
|
||||
return tmp_path, None
|
||||
|
||||
|
||||
@api_v3.route('/backup/validate', methods=['POST'])
|
||||
def backup_validate():
|
||||
"""Inspect an uploaded backup without applying it."""
|
||||
tmp_path, err = _save_uploaded_backup_to_temp()
|
||||
if err is not None:
|
||||
return err
|
||||
try:
|
||||
from src import backup_manager
|
||||
ok, error, manifest = backup_manager.validate_backup(tmp_path)
|
||||
if not ok:
|
||||
return jsonify({'status': 'error', 'message': error}), 400
|
||||
return jsonify({'status': 'success', 'data': manifest})
|
||||
except Exception:
|
||||
logger.exception("[Backup] validate failed")
|
||||
return jsonify({'status': 'error', 'message': 'Failed to validate backup'}), 500
|
||||
finally:
|
||||
try:
|
||||
tmp_path.unlink(missing_ok=True)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
@api_v3.route('/backup/restore', methods=['POST'])
|
||||
def backup_restore():
|
||||
"""
|
||||
Restore an uploaded backup into the running installation.
|
||||
|
||||
The request is multipart/form-data with:
|
||||
- ``backup_file``: the ZIP upload
|
||||
- ``options``: JSON string with RestoreOptions fields (all boolean)
|
||||
"""
|
||||
tmp_path, err = _save_uploaded_backup_to_temp()
|
||||
if err is not None:
|
||||
return err
|
||||
try:
|
||||
from src import backup_manager
|
||||
|
||||
# Parse options (all optional; default is "restore everything").
|
||||
raw_opts = request.form.get('options') or '{}'
|
||||
try:
|
||||
opts_dict = json.loads(raw_opts)
|
||||
except json.JSONDecodeError:
|
||||
return jsonify({'status': 'error', 'message': 'Invalid options JSON'}), 400
|
||||
if not isinstance(opts_dict, dict):
|
||||
return jsonify({'status': 'error', 'message': 'options must be an object'}), 400
|
||||
|
||||
opts = backup_manager.RestoreOptions(
|
||||
restore_config=_coerce_to_bool(opts_dict.get('restore_config', True)),
|
||||
restore_secrets=_coerce_to_bool(opts_dict.get('restore_secrets', True)),
|
||||
restore_wifi=_coerce_to_bool(opts_dict.get('restore_wifi', True)),
|
||||
restore_fonts=_coerce_to_bool(opts_dict.get('restore_fonts', True)),
|
||||
restore_plugin_uploads=_coerce_to_bool(opts_dict.get('restore_plugin_uploads', True)),
|
||||
reinstall_plugins=_coerce_to_bool(opts_dict.get('reinstall_plugins', True)),
|
||||
)
|
||||
|
||||
# Snapshot current config through the atomic manager so the pre-restore
|
||||
# state is recoverable via the existing rollback_config() path.
|
||||
if api_v3.config_manager and opts.restore_config:
|
||||
try:
|
||||
current = api_v3.config_manager.load_config()
|
||||
snapshot_ok, snapshot_err = _save_config_atomic(api_v3.config_manager, current, create_backup=True)
|
||||
if not snapshot_ok:
|
||||
logger.warning("[Backup] Pre-restore snapshot failed: %s (continuing)", snapshot_err)
|
||||
except Exception:
|
||||
logger.warning("[Backup] Pre-restore snapshot failed (continuing)", exc_info=True)
|
||||
|
||||
result = backup_manager.restore_backup(tmp_path, PROJECT_ROOT, opts)
|
||||
|
||||
# Reinstall plugins via the store manager, one at a time.
|
||||
if opts.reinstall_plugins and api_v3.plugin_store_manager and result.plugins_to_install:
|
||||
installed_names = set()
|
||||
if api_v3.plugin_manager:
|
||||
try:
|
||||
existing = api_v3.plugin_manager.get_all_plugin_info() or []
|
||||
installed_names = {p.get('id') for p in existing if p.get('id')}
|
||||
except Exception:
|
||||
installed_names = set()
|
||||
for entry in result.plugins_to_install:
|
||||
plugin_id = entry.get('plugin_id')
|
||||
if not plugin_id:
|
||||
continue
|
||||
if plugin_id in installed_names:
|
||||
result.plugins_installed.append(plugin_id)
|
||||
continue
|
||||
try:
|
||||
ok = api_v3.plugin_store_manager.install_plugin(plugin_id)
|
||||
if ok:
|
||||
if api_v3.schema_manager:
|
||||
api_v3.schema_manager.invalidate_cache(plugin_id)
|
||||
if api_v3.plugin_manager:
|
||||
api_v3.plugin_manager.discover_plugins()
|
||||
api_v3.plugin_manager.load_plugin(plugin_id)
|
||||
if api_v3.plugin_state_manager:
|
||||
api_v3.plugin_state_manager.set_plugin_installed(plugin_id)
|
||||
result.plugins_installed.append(plugin_id)
|
||||
else:
|
||||
result.plugins_failed.append({'plugin_id': plugin_id, 'error': 'install returned False'})
|
||||
except Exception as install_err:
|
||||
logger.exception("[Backup] plugin reinstall failed for %s", plugin_id)
|
||||
result.plugins_failed.append({'plugin_id': plugin_id, 'error': str(install_err)})
|
||||
|
||||
# Clear font catalog cache so restored fonts show up.
|
||||
if any(r.startswith("fonts") for r in result.restored):
|
||||
try:
|
||||
from web_interface.cache import delete_cached
|
||||
delete_cached('fonts_catalog')
|
||||
except Exception:
|
||||
logger.warning("[Backup] Failed to clear font cache", exc_info=True)
|
||||
|
||||
# Reload config_manager state so the UI picks up the new values
|
||||
# without a full service restart.
|
||||
if api_v3.config_manager and opts.restore_config:
|
||||
try:
|
||||
api_v3.config_manager.load_config(force_reload=True)
|
||||
except TypeError:
|
||||
try:
|
||||
api_v3.config_manager.load_config()
|
||||
except Exception:
|
||||
logger.warning("[Backup] Could not reload config after restore", exc_info=True)
|
||||
except Exception:
|
||||
logger.warning("[Backup] Could not reload config after restore", exc_info=True)
|
||||
|
||||
return jsonify({
|
||||
'status': 'success' if result.success else 'partial',
|
||||
'data': result.to_dict(),
|
||||
})
|
||||
except Exception:
|
||||
logger.exception("[Backup] restore failed")
|
||||
return jsonify({'status': 'error', 'message': 'Failed to restore backup'}), 500
|
||||
finally:
|
||||
try:
|
||||
tmp_path.unlink(missing_ok=True)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
@api_v3.route('/system/status', methods=['GET'])
|
||||
def get_system_status():
|
||||
"""Get system status"""
|
||||
|
||||
@@ -76,6 +76,8 @@ def load_partial(partial_name):
|
||||
return _load_logs_partial()
|
||||
elif partial_name == 'raw-json':
|
||||
return _load_raw_json_partial()
|
||||
elif partial_name == 'backup-restore':
|
||||
return _load_backup_restore_partial()
|
||||
elif partial_name == 'wifi':
|
||||
return _load_wifi_partial()
|
||||
elif partial_name == 'cache':
|
||||
@@ -296,6 +298,13 @@ def _load_raw_json_partial():
|
||||
except Exception as e:
|
||||
return f"Error: {str(e)}", 500
|
||||
|
||||
def _load_backup_restore_partial():
|
||||
"""Load backup & restore partial."""
|
||||
try:
|
||||
return render_template('v3/partials/backup_restore.html')
|
||||
except Exception as e:
|
||||
return f"Error: {str(e)}", 500
|
||||
|
||||
@pages_v3.route('/setup')
|
||||
def captive_setup():
|
||||
"""Lightweight captive portal setup page — self-contained, no frameworks."""
|
||||
|
||||
Reference in New Issue
Block a user