mirror of
https://github.com/ChuckBuilds/LEDMatrix.git
synced 2026-04-10 13:02:59 +00:00
Compare commits
6 Commits
c584f227c1
...
a8609aea18
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a8609aea18 | ||
|
|
0dc1a8f6f4 | ||
|
|
d876679b9f | ||
|
|
6a04e882c1 | ||
|
|
45f6e7c20e | ||
|
|
a821060084 |
@@ -13,7 +13,7 @@ import re
|
||||
import shutil
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, Optional, Tuple
|
||||
from typing import Dict, Any, Optional, Tuple, List
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -305,7 +305,12 @@ class PixletRenderer:
|
||||
|
||||
def extract_schema(self, star_file: str) -> Tuple[bool, Optional[Dict[str, Any]], Optional[str]]:
|
||||
"""
|
||||
Extract configuration schema from a .star file.
|
||||
Extract configuration schema from a .star file by parsing source code.
|
||||
|
||||
Supports:
|
||||
- Static field definitions (location, text, toggle, dropdown, color, datetime)
|
||||
- Variable-referenced dropdown options
|
||||
- Graceful degradation for unsupported field types
|
||||
|
||||
Args:
|
||||
star_file: Path to .star file
|
||||
@@ -313,47 +318,282 @@ class PixletRenderer:
|
||||
Returns:
|
||||
Tuple of (success: bool, schema: Optional[Dict], error: Optional[str])
|
||||
"""
|
||||
if not self.pixlet_binary:
|
||||
return False, None, "Pixlet binary not found"
|
||||
|
||||
if not os.path.isfile(star_file):
|
||||
return False, None, f"Star file not found: {star_file}"
|
||||
|
||||
try:
|
||||
# Use 'pixlet info' or 'pixlet serve' to extract schema
|
||||
# Note: Schema extraction may vary by Pixlet version
|
||||
cmd = [self.pixlet_binary, "serve", star_file, "--print-schema"]
|
||||
# Read .star file
|
||||
with open(star_file, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
|
||||
logger.debug(f"Extracting schema: {' '.join(cmd)}")
|
||||
# Parse schema from source
|
||||
schema = self._parse_schema_from_source(content, star_file)
|
||||
|
||||
safe_cwd = self._get_safe_working_directory(star_file)
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=10,
|
||||
cwd=safe_cwd # Run in .star file directory (or None if relative path)
|
||||
)
|
||||
|
||||
if result.returncode == 0:
|
||||
# Parse JSON schema from output
|
||||
try:
|
||||
schema = json.loads(result.stdout)
|
||||
logger.debug(f"Extracted schema from: {star_file}")
|
||||
return True, schema, None
|
||||
except json.JSONDecodeError as e:
|
||||
error = f"Invalid schema JSON: {e}"
|
||||
logger.warning(error)
|
||||
return False, None, error
|
||||
if schema:
|
||||
field_count = len(schema.get('schema', []))
|
||||
logger.debug(f"Extracted schema with {field_count} field(s) from: {star_file}")
|
||||
return True, schema, None
|
||||
else:
|
||||
# Schema extraction might not be supported
|
||||
logger.debug(f"Schema extraction not available or failed: {result.stderr}")
|
||||
return True, None, None # Not an error, just no schema
|
||||
# No schema found - not an error, app just doesn't have configuration
|
||||
logger.debug(f"No schema found in: {star_file}")
|
||||
return True, None, None
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
error = "Schema extraction timeout"
|
||||
except UnicodeDecodeError as e:
|
||||
error = f"File encoding error: {e}"
|
||||
logger.warning(error)
|
||||
return False, None, error
|
||||
except (subprocess.SubprocessError, OSError):
|
||||
logger.exception("Schema extraction exception")
|
||||
return False, None, "Schema extraction failed - see logs for details"
|
||||
except Exception as e:
|
||||
logger.exception(f"Schema extraction failed for {star_file}")
|
||||
return False, None, f"Schema extraction error: {str(e)}"
|
||||
|
||||
def _parse_schema_from_source(self, content: str, file_path: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Parse get_schema() function from Starlark source code.
|
||||
|
||||
Args:
|
||||
content: .star file content
|
||||
file_path: Path to file (for logging)
|
||||
|
||||
Returns:
|
||||
Schema dict with format {"version": "1", "schema": [...]}, or None
|
||||
"""
|
||||
# Extract variable definitions (for dropdown options)
|
||||
var_table = self._extract_variable_definitions(content)
|
||||
|
||||
# Extract get_schema() function body
|
||||
schema_body = self._extract_get_schema_body(content)
|
||||
if not schema_body:
|
||||
return None
|
||||
|
||||
# Extract version
|
||||
version_match = re.search(r'version\s*=\s*"([^"]+)"', schema_body)
|
||||
version = version_match.group(1) if version_match else "1"
|
||||
|
||||
# Extract fields array from schema.Schema(...) - handle nested brackets
|
||||
fields_start_match = re.search(r'fields\s*=\s*\[', schema_body)
|
||||
if not fields_start_match:
|
||||
# Empty schema or no fields
|
||||
return {"version": version, "schema": []}
|
||||
|
||||
# Find matching closing bracket
|
||||
bracket_count = 1
|
||||
i = fields_start_match.end()
|
||||
while i < len(schema_body) and bracket_count > 0:
|
||||
if schema_body[i] == '[':
|
||||
bracket_count += 1
|
||||
elif schema_body[i] == ']':
|
||||
bracket_count -= 1
|
||||
i += 1
|
||||
|
||||
if bracket_count != 0:
|
||||
# Unmatched brackets
|
||||
return {"version": version, "schema": []}
|
||||
|
||||
fields_text = schema_body[fields_start_match.end():i-1]
|
||||
|
||||
# Parse individual fields
|
||||
schema_fields = []
|
||||
# Match schema.FieldType(...) patterns
|
||||
field_pattern = r'schema\.(\w+)\s*\((.*?)\)'
|
||||
|
||||
# Find all field definitions (handle nested parentheses)
|
||||
pos = 0
|
||||
while pos < len(fields_text):
|
||||
match = re.search(field_pattern, fields_text[pos:], re.DOTALL)
|
||||
if not match:
|
||||
break
|
||||
|
||||
field_type = match.group(1)
|
||||
field_start = pos + match.start()
|
||||
field_end = pos + match.end()
|
||||
|
||||
# Handle nested parentheses properly
|
||||
paren_count = 1
|
||||
i = pos + match.start() + len(f'schema.{field_type}(')
|
||||
while i < len(fields_text) and paren_count > 0:
|
||||
if fields_text[i] == '(':
|
||||
paren_count += 1
|
||||
elif fields_text[i] == ')':
|
||||
paren_count -= 1
|
||||
i += 1
|
||||
|
||||
field_params_text = fields_text[pos + match.start() + len(f'schema.{field_type}('):i-1]
|
||||
|
||||
# Parse field
|
||||
field_dict = self._parse_schema_field(field_type, field_params_text, var_table)
|
||||
if field_dict:
|
||||
schema_fields.append(field_dict)
|
||||
|
||||
pos = i
|
||||
|
||||
return {
|
||||
"version": version,
|
||||
"schema": schema_fields
|
||||
}
|
||||
|
||||
def _extract_variable_definitions(self, content: str) -> Dict[str, List[Dict]]:
|
||||
"""
|
||||
Extract top-level variable assignments (for dropdown options).
|
||||
|
||||
Args:
|
||||
content: .star file content
|
||||
|
||||
Returns:
|
||||
Dict mapping variable names to their option lists
|
||||
"""
|
||||
var_table = {}
|
||||
|
||||
# Find variable definitions like: variableName = [schema.Option(...), ...]
|
||||
var_pattern = r'^(\w+)\s*=\s*\[(.*?schema\.Option.*?)\]'
|
||||
matches = re.finditer(var_pattern, content, re.MULTILINE | re.DOTALL)
|
||||
|
||||
for match in matches:
|
||||
var_name = match.group(1)
|
||||
options_text = match.group(2)
|
||||
|
||||
# Parse schema.Option entries
|
||||
options = self._parse_schema_options(options_text, {})
|
||||
if options:
|
||||
var_table[var_name] = options
|
||||
|
||||
return var_table
|
||||
|
||||
def _extract_get_schema_body(self, content: str) -> Optional[str]:
|
||||
"""
|
||||
Extract get_schema() function body.
|
||||
|
||||
Args:
|
||||
content: .star file content
|
||||
|
||||
Returns:
|
||||
Function body text, or None if not found
|
||||
"""
|
||||
# Find def get_schema():
|
||||
pattern = r'def\s+get_schema\s*\(\s*\)\s*:(.*?)(?=\ndef\s|\Z)'
|
||||
match = re.search(pattern, content, re.DOTALL)
|
||||
|
||||
if match:
|
||||
return match.group(1)
|
||||
return None
|
||||
|
||||
def _parse_schema_field(self, field_type: str, params_text: str, var_table: Dict) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Parse individual schema field definition.
|
||||
|
||||
Args:
|
||||
field_type: Field type (Location, Text, Toggle, etc.)
|
||||
params_text: Field parameters text
|
||||
var_table: Variable lookup table
|
||||
|
||||
Returns:
|
||||
Field dict, or None if parse fails
|
||||
"""
|
||||
# Map Pixlet field types to JSON typeOf
|
||||
type_mapping = {
|
||||
'Location': 'location',
|
||||
'Text': 'text',
|
||||
'Toggle': 'toggle',
|
||||
'Dropdown': 'dropdown',
|
||||
'Color': 'color',
|
||||
'DateTime': 'datetime',
|
||||
'OAuth2': 'oauth2',
|
||||
'PhotoSelect': 'photo_select',
|
||||
'LocationBased': 'location_based',
|
||||
'Typeahead': 'typeahead',
|
||||
'Generated': 'generated',
|
||||
}
|
||||
|
||||
type_of = type_mapping.get(field_type, field_type.lower())
|
||||
|
||||
# Skip Generated fields (invisible meta-fields)
|
||||
if type_of == 'generated':
|
||||
return None
|
||||
|
||||
field_dict = {"typeOf": type_of}
|
||||
|
||||
# Extract common parameters
|
||||
# id
|
||||
id_match = re.search(r'id\s*=\s*"([^"]+)"', params_text)
|
||||
if id_match:
|
||||
field_dict['id'] = id_match.group(1)
|
||||
else:
|
||||
# id is required, skip field if missing
|
||||
return None
|
||||
|
||||
# name
|
||||
name_match = re.search(r'name\s*=\s*"([^"]+)"', params_text)
|
||||
if name_match:
|
||||
field_dict['name'] = name_match.group(1)
|
||||
|
||||
# desc
|
||||
desc_match = re.search(r'desc\s*=\s*"([^"]+)"', params_text)
|
||||
if desc_match:
|
||||
field_dict['desc'] = desc_match.group(1)
|
||||
|
||||
# icon
|
||||
icon_match = re.search(r'icon\s*=\s*"([^"]+)"', params_text)
|
||||
if icon_match:
|
||||
field_dict['icon'] = icon_match.group(1)
|
||||
|
||||
# default (can be string, bool, or variable reference)
|
||||
default_match = re.search(r'default\s*=\s*([^,\)]+)', params_text)
|
||||
if default_match:
|
||||
default_value = default_match.group(1).strip()
|
||||
# Handle boolean
|
||||
if default_value in ('True', 'False'):
|
||||
field_dict['default'] = default_value.lower()
|
||||
# Handle string literal
|
||||
elif default_value.startswith('"') and default_value.endswith('"'):
|
||||
field_dict['default'] = default_value.strip('"')
|
||||
# Handle variable reference (can't resolve, use as-is)
|
||||
else:
|
||||
# Try to extract just the value if it's like options[0].value
|
||||
if '.' in default_value or '[' in default_value:
|
||||
# Complex expression, skip default
|
||||
pass
|
||||
else:
|
||||
field_dict['default'] = default_value
|
||||
|
||||
# For dropdown, extract options
|
||||
if type_of == 'dropdown':
|
||||
options_match = re.search(r'options\s*=\s*([^,\)]+)', params_text)
|
||||
if options_match:
|
||||
options_ref = options_match.group(1).strip()
|
||||
# Check if it's a variable reference
|
||||
if options_ref in var_table:
|
||||
field_dict['options'] = var_table[options_ref]
|
||||
# Or inline options
|
||||
elif options_ref.startswith('['):
|
||||
# Find the full options array (handle nested brackets)
|
||||
# This is tricky, for now try to extract inline options
|
||||
inline_match = re.search(r'options\s*=\s*(\[.*?\])', params_text, re.DOTALL)
|
||||
if inline_match:
|
||||
options_text = inline_match.group(1)
|
||||
field_dict['options'] = self._parse_schema_options(options_text, var_table)
|
||||
|
||||
return field_dict
|
||||
|
||||
def _parse_schema_options(self, options_text: str, var_table: Dict) -> List[Dict[str, str]]:
|
||||
"""
|
||||
Parse schema.Option list.
|
||||
|
||||
Args:
|
||||
options_text: Text containing schema.Option(...) entries
|
||||
var_table: Variable lookup table (not currently used)
|
||||
|
||||
Returns:
|
||||
List of {"display": "...", "value": "..."} dicts
|
||||
"""
|
||||
options = []
|
||||
|
||||
# Match schema.Option(display = "...", value = "...")
|
||||
option_pattern = r'schema\.Option\s*\(\s*display\s*=\s*"([^"]+)"\s*,\s*value\s*=\s*"([^"]+)"\s*\)'
|
||||
matches = re.finditer(option_pattern, options_text)
|
||||
|
||||
for match in matches:
|
||||
options.append({
|
||||
"display": match.group(1),
|
||||
"value": match.group(2)
|
||||
})
|
||||
|
||||
return options
|
||||
|
||||
@@ -6982,11 +6982,17 @@ def clear_old_errors():
|
||||
def _get_tronbyte_repository_class():
|
||||
"""Import TronbyteRepository from plugin-repos directory."""
|
||||
import importlib.util
|
||||
import importlib
|
||||
|
||||
module_path = PROJECT_ROOT / 'plugin-repos' / 'starlark-apps' / 'tronbyte_repository.py'
|
||||
if not module_path.exists():
|
||||
raise ImportError(f"TronbyteRepository module not found at {module_path}")
|
||||
|
||||
# If already imported, reload to pick up code changes
|
||||
if "tronbyte_repository" in sys.modules:
|
||||
importlib.reload(sys.modules["tronbyte_repository"])
|
||||
return sys.modules["tronbyte_repository"].TronbyteRepository
|
||||
|
||||
spec = importlib.util.spec_from_file_location("tronbyte_repository", module_path)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
sys.modules["tronbyte_repository"] = module
|
||||
@@ -6994,6 +7000,27 @@ def _get_tronbyte_repository_class():
|
||||
return module.TronbyteRepository
|
||||
|
||||
|
||||
def _get_pixlet_renderer_class():
|
||||
"""Import PixletRenderer from plugin-repos directory."""
|
||||
import importlib.util
|
||||
import importlib
|
||||
|
||||
module_path = PROJECT_ROOT / 'plugin-repos' / 'starlark-apps' / 'pixlet_renderer.py'
|
||||
if not module_path.exists():
|
||||
raise ImportError(f"PixletRenderer module not found at {module_path}")
|
||||
|
||||
# If already imported, reload to pick up code changes
|
||||
if "pixlet_renderer" in sys.modules:
|
||||
importlib.reload(sys.modules["pixlet_renderer"])
|
||||
return sys.modules["pixlet_renderer"].PixletRenderer
|
||||
|
||||
spec = importlib.util.spec_from_file_location("pixlet_renderer", module_path)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
sys.modules["pixlet_renderer"] = module
|
||||
spec.loader.exec_module(module)
|
||||
return module.PixletRenderer
|
||||
|
||||
|
||||
def _validate_and_sanitize_app_id(app_id, fallback_source=None):
|
||||
"""Validate and sanitize app_id to a safe slug."""
|
||||
if not app_id and fallback_source:
|
||||
@@ -7064,11 +7091,26 @@ def _write_starlark_manifest(manifest: dict) -> bool:
|
||||
def _install_star_file(app_id: str, star_file_path: str, metadata: dict) -> bool:
|
||||
"""Install a .star file and update the manifest (standalone, no plugin needed)."""
|
||||
import shutil
|
||||
import json
|
||||
app_dir = _STARLARK_APPS_DIR / app_id
|
||||
app_dir.mkdir(parents=True, exist_ok=True)
|
||||
dest = app_dir / f"{app_id}.star"
|
||||
shutil.copy2(star_file_path, str(dest))
|
||||
|
||||
# Try to extract schema using PixletRenderer
|
||||
try:
|
||||
PixletRenderer = _get_pixlet_renderer_class()
|
||||
pixlet = PixletRenderer()
|
||||
if pixlet.is_available():
|
||||
_, schema, _ = pixlet.extract_schema(str(dest))
|
||||
if schema:
|
||||
schema_path = app_dir / "schema.json"
|
||||
with open(schema_path, 'w') as f:
|
||||
json.dump(schema, f, indent=2)
|
||||
logger.info(f"Extracted schema for {app_id}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to extract schema for {app_id}: {e}")
|
||||
|
||||
manifest = _read_starlark_manifest()
|
||||
manifest.setdefault('apps', {})[app_id] = {
|
||||
'name': metadata.get('name', app_id),
|
||||
@@ -7217,6 +7259,17 @@ def get_starlark_app(app_id):
|
||||
app_data = manifest.get('apps', {}).get(app_id)
|
||||
if not app_data:
|
||||
return jsonify({'status': 'error', 'message': f'App not found: {app_id}'}), 404
|
||||
|
||||
# Load schema from schema.json if it exists
|
||||
schema = None
|
||||
schema_file = _STARLARK_APPS_DIR / app_id / 'schema.json'
|
||||
if schema_file.exists():
|
||||
try:
|
||||
with open(schema_file, 'r') as f:
|
||||
schema = json.load(f)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to load schema for {app_id}: {e}")
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'app': {
|
||||
@@ -7224,7 +7277,7 @@ def get_starlark_app(app_id):
|
||||
'name': app_data.get('name', app_id),
|
||||
'enabled': app_data.get('enabled', True),
|
||||
'config': app_data.get('config', {}),
|
||||
'schema': None,
|
||||
'schema': schema,
|
||||
'render_interval': app_data.get('render_interval', 300),
|
||||
'display_duration': app_data.get('display_duration', 15),
|
||||
'has_frames': False,
|
||||
@@ -7559,7 +7612,8 @@ def install_from_tronbyte_repository():
|
||||
|
||||
try:
|
||||
# Pass filename from metadata (e.g., "analog_clock.star" for analogclock app)
|
||||
filename = metadata.get('filename') if metadata else None
|
||||
# Note: manifest uses 'fileName' (camelCase), not 'filename'
|
||||
filename = metadata.get('fileName') if metadata else None
|
||||
success, error = repo.download_star_file(data['app_id'], Path(temp_path), filename=filename)
|
||||
if not success:
|
||||
return jsonify({'status': 'error', 'message': f'Failed to download app: {error}'}), 500
|
||||
|
||||
Reference in New Issue
Block a user