Initial Commit

This commit is contained in:
2026-02-13 12:22:06 -05:00
parent 43fdccf67b
commit 8fec41e3e6
612 changed files with 1313484 additions and 2 deletions

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,134 @@
"""
Background Cache Mixin for Sports Managers
This mixin provides common caching functionality to eliminate code duplication
across all sports managers. It implements the background service cache pattern
where Recent/Upcoming managers consume data from the background service cache.
"""
import time
import logging
from typing import Dict, Optional, Any, Callable
from datetime import datetime
import pytz
class BackgroundCacheMixin:
"""
Mixin class that provides background service cache functionality to sports managers.
This mixin eliminates code duplication by providing a common implementation
for the background service cache pattern used across all sports managers.
"""
def _fetch_data_with_background_cache(self,
sport_key: str,
api_fetch_method: Callable,
live_manager_class: type = None) -> Optional[Dict]:
"""
Common logic for fetching data with background service cache support.
This method implements the background service cache pattern:
1. Live managers always fetch fresh data
2. Recent/Upcoming managers try background cache first
3. Fallback to direct API call if background data unavailable
Args:
sport_key: Sport identifier (e.g., 'nba', 'nfl', 'ncaa_fb')
api_fetch_method: Method to call for direct API fetch
live_manager_class: Class to check if this is a live manager
Returns:
Cached or fresh data from API
"""
start_time = time.time()
cache_hit = False
cache_source = None
try:
# For Live managers, always fetch fresh data
if live_manager_class and isinstance(self, live_manager_class):
self.logger.info(f"[{sport_key.upper()}] Live manager - fetching fresh data")
result = api_fetch_method(use_cache=False)
cache_source = "live_fresh"
else:
# For Recent/Upcoming managers, try background service cache first
cache_key = self.cache_manager.generate_sport_cache_key(sport_key)
# Check if background service has fresh data
if self.cache_manager.is_background_data_available(cache_key, sport_key):
cached_data = self.cache_manager.get_background_cached_data(cache_key, sport_key)
if cached_data:
self.logger.info(f"[{sport_key.upper()}] Using background service cache for {cache_key}")
result = cached_data
cache_hit = True
cache_source = "background_cache"
else:
self.logger.warning(f"[{sport_key.upper()}] Background cache check passed but no data returned for {cache_key}")
result = None
cache_source = "background_miss"
else:
self.logger.info(f"[{sport_key.upper()}] Background data not available for {cache_key}")
result = None
cache_source = "background_unavailable"
# Fallback to direct API call if background data not available
if result is None:
self.logger.info(f"[{sport_key.upper()}] Fetching directly from API for {cache_key}")
result = api_fetch_method(use_cache=True)
cache_source = "api_fallback"
# Record performance metrics
duration = time.time() - start_time
self.cache_manager.record_fetch_time(duration)
# Log performance metrics
self._log_fetch_performance(sport_key, duration, cache_hit, cache_source)
return result
except Exception as e:
duration = time.time() - start_time
self.logger.error(f"[{sport_key.upper()}] Error in background cache fetch after {duration:.2f}s: {e}")
self.cache_manager.record_fetch_time(duration)
raise
def _log_fetch_performance(self, sport_key: str, duration: float, cache_hit: bool, cache_source: str):
"""
Log detailed performance metrics for fetch operations.
Args:
sport_key: Sport identifier
duration: Fetch operation duration in seconds
cache_hit: Whether this was a cache hit
cache_source: Source of the data (background_cache, api_fallback, etc.)
"""
# Log basic performance info
self.logger.info(f"[{sport_key.upper()}] Fetch completed in {duration:.2f}s "
f"(cache_hit={cache_hit}, source={cache_source})")
# Log detailed metrics every 10 operations
if hasattr(self, '_fetch_count'):
self._fetch_count += 1
else:
self._fetch_count = 1
if self._fetch_count % 10 == 0:
metrics = self.cache_manager.get_cache_metrics()
self.logger.info(f"[{sport_key.upper()}] Cache Performance Summary - "
f"Hit Rate: {metrics['cache_hit_rate']:.2%}, "
f"Background Hit Rate: {metrics['background_hit_rate']:.2%}, "
f"API Calls Saved: {metrics['api_calls_saved']}")
def get_cache_performance_summary(self) -> Dict[str, Any]:
"""
Get cache performance summary for this manager.
Returns:
Dictionary containing cache performance metrics
"""
return self.cache_manager.get_cache_metrics()
def log_cache_performance(self):
"""Log current cache performance metrics."""
self.cache_manager.log_cache_metrics()

View File

@@ -0,0 +1,542 @@
"""
Background Data Service for LEDMatrix
This service provides background threading capabilities for season data fetching
to prevent blocking the main display loop. It's designed to be used across
all sport managers for consistent background data management.
Key Features:
- Thread-safe data caching
- Automatic retry logic with exponential backoff
- Configurable timeouts and intervals
- Graceful error handling
- Progress tracking and logging
- Memory-efficient data storage
"""
import os
import time
import logging
import threading
import requests
from typing import Dict, Any, Optional, List, Callable, Union
from datetime import datetime, timedelta
from dataclasses import dataclass, field
from enum import Enum
import json
import queue
from concurrent.futures import ThreadPoolExecutor, Future
import weakref
from src.cache_manager import CacheManager
# Configure logging
logger = logging.getLogger(__name__)
class FetchStatus(Enum):
"""Status of background fetch operations."""
PENDING = "pending"
IN_PROGRESS = "in_progress"
COMPLETED = "completed"
FAILED = "failed"
CANCELLED = "cancelled"
@dataclass
class FetchRequest:
"""Represents a background fetch request."""
id: str
sport: str
year: int
cache_key: str
url: str
params: Dict[str, Any] = field(default_factory=dict)
headers: Dict[str, str] = field(default_factory=dict)
timeout: int = 30
retry_count: int = 0
max_retries: int = 3
priority: int = 1 # Higher number = higher priority
callback: Optional[Callable] = None
created_at: float = field(default_factory=time.time)
status: FetchStatus = FetchStatus.PENDING
result: Optional[Any] = None
error: Optional[str] = None
@dataclass
class FetchResult:
"""Result of a background fetch operation."""
request_id: str
success: bool
data: Optional[Any] = None
error: Optional[str] = None
cached: bool = False
fetch_time: float = 0.0
retry_count: int = 0
class BackgroundDataService:
"""
Background data service for fetching season data without blocking the main thread.
This service manages a pool of background threads to fetch data asynchronously,
with intelligent caching, retry logic, and progress tracking.
"""
def __init__(self, cache_manager: CacheManager, max_workers: int = 3, request_timeout: int = 30):
"""
Initialize the background data service.
Args:
cache_manager: Cache manager instance for storing fetched data
max_workers: Maximum number of background threads
request_timeout: Default timeout for HTTP requests
"""
self.cache_manager = cache_manager
self.max_workers = max_workers
self.request_timeout = request_timeout
# Thread management
self.executor = ThreadPoolExecutor(max_workers=max_workers, thread_name_prefix="BackgroundData")
self.active_requests: Dict[str, FetchRequest] = {}
self.completed_requests: Dict[str, FetchResult] = {}
self.request_queue = queue.PriorityQueue()
# Thread safety
self._lock = threading.RLock()
self._shutdown = False
# Statistics
self.stats = {
'total_requests': 0,
'completed_requests': 0,
'failed_requests': 0,
'cached_hits': 0,
'cache_misses': 0,
'total_fetch_time': 0.0,
'average_fetch_time': 0.0
}
# Session for HTTP requests
self.session = requests.Session()
self.session.mount('http://', requests.adapters.HTTPAdapter(max_retries=3))
self.session.mount('https://', requests.adapters.HTTPAdapter(max_retries=3))
# Default headers
self.default_headers = {
'User-Agent': 'LEDMatrix/1.0 (https://github.com/yourusername/LEDMatrix)',
'Accept': 'application/json',
'Accept-Language': 'en-US,en;q=0.9',
'Accept-Encoding': 'gzip, deflate, br',
'Connection': 'keep-alive'
}
logger.info(f"BackgroundDataService initialized with {max_workers} workers")
def get_sport_cache_key(self, sport: str, date_str: str = None) -> str:
"""
Generate consistent cache keys for sports data.
This ensures Recent/Upcoming managers and background service
use the same cache keys.
"""
# Use the centralized cache key generation from CacheManager
from src.cache_manager import CacheManager
cache_manager = CacheManager()
return cache_manager.generate_sport_cache_key(sport, date_str)
def submit_fetch_request(self,
sport: str,
year: int,
url: str,
cache_key: str = None,
params: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, str]] = None,
timeout: Optional[int] = None,
max_retries: int = 3,
priority: int = 1,
callback: Optional[Callable] = None) -> str:
"""
Submit a background fetch request.
Args:
sport: Sport identifier (e.g., 'nfl', 'ncaafb')
year: Year to fetch data for
url: URL to fetch data from
cache_key: Cache key for storing/retrieving data
params: URL parameters
headers: HTTP headers
timeout: Request timeout
max_retries: Maximum number of retries
priority: Request priority (higher = more important)
callback: Optional callback function when request completes
Returns:
Request ID for tracking the fetch operation
"""
if self._shutdown:
raise RuntimeError("BackgroundDataService is shutting down")
# Generate cache key if not provided
if cache_key is None:
cache_key = self.get_sport_cache_key(sport)
request_id = f"{sport}_{year}_{int(time.time() * 1000)}"
# Check cache first
cached_data = self.cache_manager.get(cache_key)
if cached_data:
with self._lock:
self.stats['cached_hits'] += 1
result = FetchResult(
request_id=request_id,
success=True,
data=cached_data,
cached=True,
fetch_time=0.0
)
self.completed_requests[request_id] = result
if callback:
try:
callback(result)
except Exception as e:
logger.error(f"Error in callback for request {request_id}: {e}")
logger.debug(f"Cache hit for {sport} {year} data")
return request_id
# Create fetch request
request = FetchRequest(
id=request_id,
sport=sport,
year=year,
cache_key=cache_key,
url=url,
params=params or {},
headers={**self.default_headers, **(headers or {})},
timeout=timeout or self.request_timeout,
max_retries=max_retries,
priority=priority,
callback=callback
)
with self._lock:
self.active_requests[request_id] = request
self.stats['total_requests'] += 1
self.stats['cache_misses'] += 1
# Submit to executor
future = self.executor.submit(self._fetch_data_worker, request)
logger.info(f"Submitted background fetch request {request_id} for {sport} {year}")
return request_id
def _fetch_data_worker(self, request: FetchRequest) -> FetchResult:
"""
Worker function that performs the actual data fetching.
Args:
request: Fetch request to process
Returns:
Fetch result with data or error information
"""
start_time = time.time()
result = FetchResult(request_id=request.id, success=False, retry_count=request.retry_count)
try:
with self._lock:
request.status = FetchStatus.IN_PROGRESS
logger.info(f"Starting background fetch for {request.sport} {request.year}")
# Perform HTTP request with retry logic
response = self._make_request_with_retry(request)
response.raise_for_status()
# Parse response
data = response.json()
# Validate data structure
if not isinstance(data, dict):
raise ValueError(f"Expected dict response, got {type(data)}")
if 'events' not in data:
raise ValueError("Response missing 'events' field")
# Validate events structure
events = data.get('events', [])
if not isinstance(events, list):
raise ValueError(f"Expected events to be list, got {type(events)}")
# Log data validation
logger.debug(f"Validated {len(events)} events for {request.sport} {request.year}")
# Cache the data
self.cache_manager.set(request.cache_key, data)
# Update request status
with self._lock:
request.status = FetchStatus.COMPLETED
request.result = data
# Create successful result
fetch_time = time.time() - start_time
result = FetchResult(
request_id=request.id,
success=True,
data=data,
fetch_time=fetch_time,
retry_count=request.retry_count
)
logger.info(f"Successfully fetched {request.sport} {request.year} data in {fetch_time:.2f}s")
except Exception as e:
error_msg = str(e)
logger.error(f"Failed to fetch {request.sport} {request.year} data: {error_msg}")
with self._lock:
request.status = FetchStatus.FAILED
request.error = error_msg
result = FetchResult(
request_id=request.id,
success=False,
error=error_msg,
fetch_time=time.time() - start_time,
retry_count=request.retry_count
)
finally:
# Store result and clean up
with self._lock:
self.completed_requests[request.id] = result
if request.id in self.active_requests:
del self.active_requests[request.id]
# Update statistics
if result.success:
self.stats['completed_requests'] += 1
else:
self.stats['failed_requests'] += 1
self.stats['total_fetch_time'] += result.fetch_time
self.stats['average_fetch_time'] = (
self.stats['total_fetch_time'] /
(self.stats['completed_requests'] + self.stats['failed_requests'])
)
# Call callback if provided
if request.callback:
try:
request.callback(result)
except Exception as e:
logger.error(f"Error in callback for request {request.id}: {e}")
return result
def _make_request_with_retry(self, request: FetchRequest) -> requests.Response:
"""
Make HTTP request with retry logic and exponential backoff.
Args:
request: Fetch request containing request details
Returns:
HTTP response
Raises:
requests.RequestException: If all retries fail
"""
last_exception = None
for attempt in range(request.max_retries + 1):
try:
response = self.session.get(
request.url,
params=request.params,
headers=request.headers,
timeout=request.timeout
)
return response
except requests.RequestException as e:
last_exception = e
request.retry_count = attempt + 1
if attempt < request.max_retries:
# Exponential backoff: 1s, 2s, 4s, 8s...
delay = 2 ** attempt
logger.warning(f"Request failed (attempt {attempt + 1}/{request.max_retries + 1}), retrying in {delay}s: {e}")
time.sleep(delay)
else:
logger.error(f"All {request.max_retries + 1} attempts failed for {request.sport} {request.year}")
raise last_exception
def get_result(self, request_id: str) -> Optional[FetchResult]:
"""
Get the result of a fetch request.
Args:
request_id: Request ID to get result for
Returns:
Fetch result if available, None otherwise
"""
with self._lock:
return self.completed_requests.get(request_id)
def is_request_complete(self, request_id: str) -> bool:
"""
Check if a request has completed.
Args:
request_id: Request ID to check
Returns:
True if request is complete, False otherwise
"""
with self._lock:
return request_id in self.completed_requests
def get_request_status(self, request_id: str) -> Optional[FetchStatus]:
"""
Get the status of a fetch request.
Args:
request_id: Request ID to get status for
Returns:
Request status if found, None otherwise
"""
with self._lock:
if request_id in self.active_requests:
return self.active_requests[request_id].status
elif request_id in self.completed_requests:
result = self.completed_requests[request_id]
return FetchStatus.COMPLETED if result.success else FetchStatus.FAILED
return None
def cancel_request(self, request_id: str) -> bool:
"""
Cancel a pending or in-progress request.
Args:
request_id: Request ID to cancel
Returns:
True if request was cancelled, False if not found or already complete
"""
with self._lock:
if request_id in self.active_requests:
request = self.active_requests[request_id]
request.status = FetchStatus.CANCELLED
del self.active_requests[request_id]
logger.info(f"Cancelled request {request_id}")
return True
return False
def get_statistics(self) -> Dict[str, Any]:
"""
Get service statistics.
Returns:
Dictionary containing service statistics
"""
with self._lock:
return {
**self.stats,
'active_requests': len(self.active_requests),
'completed_requests_count': len(self.completed_requests),
'queue_size': self.request_queue.qsize()
}
def clear_completed_requests(self, older_than_hours: int = 24):
"""
Clear completed requests older than specified time.
Args:
older_than_hours: Clear requests older than this many hours
"""
cutoff_time = time.time() - (older_than_hours * 3600)
with self._lock:
to_remove = []
for request_id, result in self.completed_requests.items():
# We don't store creation time in results, so we'll use a simple count-based approach
# In a real implementation, you'd want to store timestamps
if len(self.completed_requests) > 1000: # Keep last 1000 results
to_remove.append(request_id)
for request_id in to_remove:
del self.completed_requests[request_id]
if to_remove:
logger.info(f"Cleared {len(to_remove)} old completed requests")
def shutdown(self, wait: bool = True, timeout: int = 30):
"""
Shutdown the background data service.
Args:
wait: Whether to wait for active requests to complete
timeout: Maximum time to wait for shutdown
"""
logger.info("Shutting down BackgroundDataService...")
self._shutdown = True
# Cancel all active requests
with self._lock:
for request_id in list(self.active_requests.keys()):
self.cancel_request(request_id)
# Shutdown executor with compatibility for older Python versions
try:
# Try with timeout parameter (Python 3.9+)
self.executor.shutdown(wait=wait, timeout=timeout)
except TypeError:
# Fallback for older Python versions that don't support timeout
if wait and timeout:
# For older versions, we can't specify timeout, so just wait
self.executor.shutdown(wait=True)
else:
self.executor.shutdown(wait=wait)
logger.info("BackgroundDataService shutdown complete")
def __del__(self):
"""Cleanup when service is destroyed."""
if not self._shutdown:
self.shutdown(wait=False, timeout=None)
# Global service instance
_background_service: Optional[BackgroundDataService] = None
_service_lock = threading.Lock()
def get_background_service(cache_manager=None, max_workers: int = 3) -> BackgroundDataService:
"""
Get the global background data service instance.
Args:
cache_manager: Cache manager instance (required for first call)
max_workers: Maximum number of background threads
Returns:
Background data service instance
"""
global _background_service
with _service_lock:
if _background_service is None:
if cache_manager is None:
raise ValueError("cache_manager is required for first call to get_background_service")
_background_service = BackgroundDataService(cache_manager, max_workers)
return _background_service
def shutdown_background_service():
"""Shutdown the global background data service."""
global _background_service
with _service_lock:
if _background_service is not None:
_background_service.shutdown()
_background_service = None

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,363 @@
"""
Abstract API Data Extraction Layer
This module provides a pluggable system for extracting game data from different
sports APIs. Each sport can have its own extractor that handles sport-specific
fields and data structures.
"""
from abc import ABC, abstractmethod
from typing import Dict, Any, Optional, List
import logging
from datetime import datetime
import pytz
class APIDataExtractor(ABC):
"""Abstract base class for API data extraction."""
def __init__(self, logger: logging.Logger):
self.logger = logger
@abstractmethod
def extract_game_details(self, game_event: Dict) -> Optional[Dict]:
"""Extract common game details from raw API data."""
pass
@abstractmethod
def get_sport_specific_fields(self, game_event: Dict) -> Dict:
"""Extract sport-specific fields (downs, innings, periods, etc.)."""
pass
def _extract_common_details(self, game_event: Dict) -> tuple[Dict | None, Dict | None, Dict | None, Dict | None, Dict | None]:
"""Extract common game details that work across all sports."""
if not game_event:
return None, None, None, None, None
try:
competition = game_event["competitions"][0]
status = competition["status"]
competitors = competition["competitors"]
game_date_str = game_event["date"]
situation = competition.get("situation")
# Parse game time
start_time_utc = None
try:
start_time_utc = datetime.fromisoformat(game_date_str.replace("Z", "+00:00"))
except ValueError:
self.logger.warning(f"Could not parse game date: {game_date_str}")
# Extract teams
home_team = next((c for c in competitors if c.get("homeAway") == "home"), None)
away_team = next((c for c in competitors if c.get("homeAway") == "away"), None)
if not home_team or not away_team:
self.logger.warning(f"Could not find home or away team in event: {game_event.get('id')}")
return None, None, None, None, None
return {
"game_event": game_event,
"competition": competition,
"status": status,
"situation": situation,
"start_time_utc": start_time_utc,
"home_team": home_team,
"away_team": away_team
}, home_team, away_team, status, situation
except Exception as e:
self.logger.error(f"Error extracting common details: {e}")
return None, None, None, None, None
class ESPNFootballExtractor(APIDataExtractor):
"""ESPN API extractor for football (NFL/NCAA)."""
def extract_game_details(self, game_event: Dict) -> Optional[Dict]:
"""Extract football game details from ESPN API."""
common_data, home_team, away_team, status, situation = self._extract_common_details(game_event)
if not common_data:
return None
try:
# Extract basic team info
home_abbr = home_team["team"]["abbreviation"]
away_abbr = away_team["team"]["abbreviation"]
home_score = home_team.get("score", "0")
away_score = away_team.get("score", "0")
# Extract sport-specific fields
sport_fields = self.get_sport_specific_fields(game_event)
# Build game details
details = {
"id": game_event.get("id"),
"home_abbr": home_abbr,
"away_abbr": away_abbr,
"home_score": str(home_score),
"away_score": str(away_score),
"home_team_name": home_team["team"].get("displayName", ""),
"away_team_name": away_team["team"].get("displayName", ""),
"status_text": status["type"].get("shortDetail", ""),
"is_live": status["type"]["state"] == "in",
"is_final": status["type"]["state"] == "post",
"is_upcoming": status["type"]["state"] == "pre",
**sport_fields # Add sport-specific fields
}
return details
except Exception as e:
self.logger.error(f"Error extracting football game details: {e}")
return None
def get_sport_specific_fields(self, game_event: Dict) -> Dict:
"""Extract football-specific fields."""
try:
competition = game_event["competitions"][0]
status = competition["status"]
situation = competition.get("situation", {})
sport_fields = {
"down": "",
"distance": "",
"possession": "",
"is_redzone": False,
"home_timeouts": 0,
"away_timeouts": 0,
"scoring_event": ""
}
if situation and status["type"]["state"] == "in":
sport_fields.update({
"down": situation.get("down", ""),
"distance": situation.get("distance", ""),
"possession": situation.get("possession", ""),
"is_redzone": situation.get("isRedZone", False),
"home_timeouts": situation.get("homeTimeouts", 0),
"away_timeouts": situation.get("awayTimeouts", 0)
})
# Detect scoring events
status_detail = status["type"].get("detail", "").lower()
if "touchdown" in status_detail or "field goal" in status_detail:
sport_fields["scoring_event"] = status_detail
return sport_fields
except Exception as e:
self.logger.error(f"Error extracting football-specific fields: {e}")
return {}
class ESPNBaseballExtractor(APIDataExtractor):
"""ESPN API extractor for baseball (MLB)."""
def extract_game_details(self, game_event: Dict) -> Optional[Dict]:
"""Extract baseball game details from ESPN API."""
common_data, home_team, away_team, status, situation = self._extract_common_details(game_event)
if not common_data:
return None
try:
# Extract basic team info
home_abbr = home_team["team"]["abbreviation"]
away_abbr = away_team["team"]["abbreviation"]
home_score = home_team.get("score", "0")
away_score = away_team.get("score", "0")
# Extract sport-specific fields
sport_fields = self.get_sport_specific_fields(game_event)
# Build game details
details = {
"id": game_event.get("id"),
"home_abbr": home_abbr,
"away_abbr": away_abbr,
"home_score": str(home_score),
"away_score": str(away_score),
"home_team_name": home_team["team"].get("displayName", ""),
"away_team_name": away_team["team"].get("displayName", ""),
"status_text": status["type"].get("shortDetail", ""),
"is_live": status["type"]["state"] == "in",
"is_final": status["type"]["state"] == "post",
"is_upcoming": status["type"]["state"] == "pre",
**sport_fields # Add sport-specific fields
}
return details
except Exception as e:
self.logger.error(f"Error extracting baseball game details: {e}")
return None
def get_sport_specific_fields(self, game_event: Dict) -> Dict:
"""Extract baseball-specific fields."""
try:
competition = game_event["competitions"][0]
status = competition["status"]
situation = competition.get("situation", {})
sport_fields = {
"inning": "",
"outs": 0,
"bases": "",
"strikes": 0,
"balls": 0,
"pitcher": "",
"batter": ""
}
if situation and status["type"]["state"] == "in":
sport_fields.update({
"inning": situation.get("inning", ""),
"outs": situation.get("outs", 0),
"bases": situation.get("bases", ""),
"strikes": situation.get("strikes", 0),
"balls": situation.get("balls", 0),
"pitcher": situation.get("pitcher", ""),
"batter": situation.get("batter", "")
})
return sport_fields
except Exception as e:
self.logger.error(f"Error extracting baseball-specific fields: {e}")
return {}
class ESPNHockeyExtractor(APIDataExtractor):
"""ESPN API extractor for hockey (NHL/NCAA)."""
def extract_game_details(self, game_event: Dict) -> Optional[Dict]:
"""Extract hockey game details from ESPN API."""
common_data, home_team, away_team, status, situation = self._extract_common_details(game_event)
if not common_data:
return None
try:
# Extract basic team info
home_abbr = home_team["team"]["abbreviation"]
away_abbr = away_team["team"]["abbreviation"]
home_score = home_team.get("score", "0")
away_score = away_team.get("score", "0")
# Extract sport-specific fields
sport_fields = self.get_sport_specific_fields(game_event)
# Build game details
details = {
"id": game_event.get("id"),
"home_abbr": home_abbr,
"away_abbr": away_abbr,
"home_score": str(home_score),
"away_score": str(away_score),
"home_team_name": home_team["team"].get("displayName", ""),
"away_team_name": away_team["team"].get("displayName", ""),
"status_text": status["type"].get("shortDetail", ""),
"is_live": status["type"]["state"] == "in",
"is_final": status["type"]["state"] == "post",
"is_upcoming": status["type"]["state"] == "pre",
**sport_fields # Add sport-specific fields
}
return details
except Exception as e:
self.logger.error(f"Error extracting hockey game details: {e}")
return None
def get_sport_specific_fields(self, game_event: Dict) -> Dict:
"""Extract hockey-specific fields."""
try:
competition = game_event["competitions"][0]
status = competition["status"]
situation = competition.get("situation", {})
sport_fields = {
"period": "",
"period_text": "",
"power_play": False,
"penalties": "",
"shots_on_goal": {"home": 0, "away": 0}
}
if situation and status["type"]["state"] == "in":
period = status.get("period", 0)
period_text = ""
if period == 1:
period_text = "P1"
elif period == 2:
period_text = "P2"
elif period == 3:
period_text = "P3"
elif period > 3:
period_text = f"OT{period-3}"
sport_fields.update({
"period": str(period),
"period_text": period_text,
"power_play": situation.get("isPowerPlay", False),
"penalties": situation.get("penalties", ""),
"shots_on_goal": {
"home": situation.get("homeShots", 0),
"away": situation.get("awayShots", 0)
}
})
return sport_fields
except Exception as e:
self.logger.error(f"Error extracting hockey-specific fields: {e}")
return {}
class SoccerAPIExtractor(APIDataExtractor):
"""Generic extractor for soccer APIs (different structure than ESPN)."""
def extract_game_details(self, game_event: Dict) -> Optional[Dict]:
"""Extract soccer game details from various soccer APIs."""
# This would need to be adapted based on the specific soccer API being used
# For now, return a basic structure
try:
return {
"id": game_event.get("id"),
"home_abbr": game_event.get("home_team", {}).get("abbreviation", ""),
"away_abbr": game_event.get("away_team", {}).get("abbreviation", ""),
"home_score": str(game_event.get("home_score", "0")),
"away_score": str(game_event.get("away_score", "0")),
"home_team_name": game_event.get("home_team", {}).get("name", ""),
"away_team_name": game_event.get("away_team", {}).get("name", ""),
"status_text": game_event.get("status", ""),
"is_live": game_event.get("is_live", False),
"is_final": game_event.get("is_final", False),
"is_upcoming": game_event.get("is_upcoming", False),
**self.get_sport_specific_fields(game_event)
}
except Exception as e:
self.logger.error(f"Error extracting soccer game details: {e}")
return None
def get_sport_specific_fields(self, game_event: Dict) -> Dict:
"""Extract soccer-specific fields."""
try:
return {
"half": game_event.get("half", ""),
"stoppage_time": game_event.get("stoppage_time", ""),
"cards": {
"home_yellow": game_event.get("home_yellow_cards", 0),
"away_yellow": game_event.get("away_yellow_cards", 0),
"home_red": game_event.get("home_red_cards", 0),
"away_red": game_event.get("away_red_cards", 0)
},
"possession": {
"home": game_event.get("home_possession", 0),
"away": game_event.get("away_possession", 0)
}
}
except Exception as e:
self.logger.error(f"Error extracting soccer-specific fields: {e}")
return {}
# Factory function removed - sport classes now instantiate extractors directly

View File

@@ -0,0 +1,291 @@
"""
Pluggable Data Source Architecture
This module provides abstract data sources that can be plugged into the sports system
to support different APIs and data providers.
"""
from abc import ABC, abstractmethod
from typing import Dict, Any, Optional, List
import requests
import logging
from datetime import datetime, timedelta
import time
class DataSource(ABC):
"""Abstract base class for data sources."""
def __init__(self, logger: logging.Logger):
self.logger = logger
self.session = requests.Session()
# Configure retry strategy
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
retry_strategy = Retry(
total=5,
backoff_factor=1,
status_forcelist=[429, 500, 502, 503, 504],
)
adapter = HTTPAdapter(max_retries=retry_strategy)
self.session.mount("http://", adapter)
self.session.mount("https://", adapter)
@abstractmethod
def fetch_live_games(self, sport: str, league: str) -> List[Dict]:
"""Fetch live games for a sport/league."""
pass
@abstractmethod
def fetch_schedule(self, sport: str, league: str, date_range: tuple) -> List[Dict]:
"""Fetch schedule for a sport/league within date range."""
pass
@abstractmethod
def fetch_standings(self, sport: str, league: str) -> Dict:
"""Fetch standings for a sport/league."""
pass
def get_headers(self) -> Dict[str, str]:
"""Get headers for API requests."""
return {
'User-Agent': 'LEDMatrix/1.0',
'Accept': 'application/json'
}
class ESPNDataSource(DataSource):
"""ESPN API data source."""
def __init__(self, logger: logging.Logger):
super().__init__(logger)
self.base_url = "https://site.api.espn.com/apis/site/v2/sports"
def fetch_live_games(self, sport: str, league: str) -> List[Dict]:
"""Fetch live games from ESPN API."""
try:
now = datetime.now()
formatted_date = now.strftime("%Y%m%d")
url = f"{self.base_url}/{sport}/{league}/scoreboard"
response = self.session.get(url, params={"dates": formatted_date, "limit": 1000}, headers=self.get_headers(), timeout=15)
response.raise_for_status()
data = response.json()
events = data.get('events', [])
# Filter for live games
live_events = [event for event in events
if event.get('competitions', [{}])[0].get('status', {}).get('type', {}).get('state') == 'in']
self.logger.debug(f"Fetched {len(live_events)} live games for {sport}/{league}")
return live_events
except Exception as e:
self.logger.error(f"Error fetching live games from ESPN: {e}")
return []
def fetch_schedule(self, sport: str, league: str, date_range: tuple) -> List[Dict]:
"""Fetch schedule from ESPN API."""
try:
start_date, end_date = date_range
url = f"{self.base_url}/{sport}/{league}/scoreboard"
params = {
'dates': f"{start_date.strftime('%Y%m%d')}-{end_date.strftime('%Y%m%d')}",
"limit": 1000
}
response = self.session.get(url, headers=self.get_headers(), params=params, timeout=15)
response.raise_for_status()
data = response.json()
events = data.get('events', [])
self.logger.debug(f"Fetched {len(events)} scheduled games for {sport}/{league}")
return events
except Exception as e:
self.logger.error(f"Error fetching schedule from ESPN: {e}")
return []
def fetch_standings(self, sport: str, league: str) -> Dict:
"""Fetch standings from ESPN API."""
try:
url = f"{self.base_url}/{sport}/{league}/rankings"
response = self.session.get(url, headers=self.get_headers(), timeout=15)
response.raise_for_status()
data = response.json()
self.logger.debug(f"Fetched standings for {sport}/{league}")
return data
except Exception as e:
self.logger.error(f"Error fetching standings from ESPN: {e}")
return {}
class MLBAPIDataSource(DataSource):
"""MLB API data source."""
def __init__(self, logger: logging.Logger):
super().__init__(logger)
self.base_url = "https://statsapi.mlb.com/api/v1"
def fetch_live_games(self, sport: str, league: str) -> List[Dict]:
"""Fetch live games from MLB API."""
try:
url = f"{self.base_url}/schedule"
params = {
'sportId': 1, # MLB
'date': datetime.now().strftime('%Y-%m-%d'),
'hydrate': 'game,team,venue,weather'
}
response = self.session.get(url, headers=self.get_headers(), params=params, timeout=15)
response.raise_for_status()
data = response.json()
games = data.get('dates', [{}])[0].get('games', [])
# Filter for live games
live_games = [game for game in games
if game.get('status', {}).get('abstractGameState') == 'Live']
self.logger.debug(f"Fetched {len(live_games)} live games from MLB API")
return live_games
except Exception as e:
self.logger.error(f"Error fetching live games from MLB API: {e}")
return []
def fetch_schedule(self, sport: str, league: str, date_range: tuple) -> List[Dict]:
"""Fetch schedule from MLB API."""
try:
start_date, end_date = date_range
url = f"{self.base_url}/schedule"
params = {
'sportId': 1, # MLB
'startDate': start_date.strftime('%Y-%m-%d'),
'endDate': end_date.strftime('%Y-%m-%d'),
'hydrate': 'game,team,venue'
}
response = self.session.get(url, headers=self.get_headers(), params=params, timeout=15)
response.raise_for_status()
data = response.json()
all_games = []
for date_data in data.get('dates', []):
all_games.extend(date_data.get('games', []))
self.logger.debug(f"Fetched {len(all_games)} scheduled games from MLB API")
return all_games
except Exception as e:
self.logger.error(f"Error fetching schedule from MLB API: {e}")
return []
def fetch_standings(self, sport: str, league: str) -> Dict:
"""Fetch standings from MLB API."""
try:
url = f"{self.base_url}/standings"
params = {
'leagueId': 103, # American League
'season': datetime.now().year,
'standingsType': 'regularSeason'
}
response = self.session.get(url, headers=self.get_headers(), params=params, timeout=15)
response.raise_for_status()
data = response.json()
self.logger.debug(f"Fetched standings from MLB API")
return data
except Exception as e:
self.logger.error(f"Error fetching standings from MLB API: {e}")
return {}
class SoccerAPIDataSource(DataSource):
"""Soccer API data source (generic structure)."""
def __init__(self, logger: logging.Logger, api_key: str = None):
super().__init__(logger)
self.api_key = api_key
self.base_url = "https://api.football-data.org/v4" # Example API
def get_headers(self) -> Dict[str, str]:
"""Get headers with API key for soccer API."""
headers = super().get_headers()
if self.api_key:
headers['X-Auth-Token'] = self.api_key
return headers
def fetch_live_games(self, sport: str, league: str) -> List[Dict]:
"""Fetch live games from soccer API."""
try:
# This would need to be adapted based on the specific soccer API
url = f"{self.base_url}/matches"
params = {
'status': 'LIVE',
'competition': league
}
response = self.session.get(url, headers=self.get_headers(), params=params, timeout=15)
response.raise_for_status()
data = response.json()
matches = data.get('matches', [])
self.logger.debug(f"Fetched {len(matches)} live games from soccer API")
return matches
except Exception as e:
self.logger.error(f"Error fetching live games from soccer API: {e}")
return []
def fetch_schedule(self, sport: str, league: str, date_range: tuple) -> List[Dict]:
"""Fetch schedule from soccer API."""
try:
start_date, end_date = date_range
url = f"{self.base_url}/matches"
params = {
'competition': league,
'dateFrom': start_date.strftime('%Y-%m-%d'),
'dateTo': end_date.strftime('%Y-%m-%d')
}
response = self.session.get(url, headers=self.get_headers(), params=params, timeout=15)
response.raise_for_status()
data = response.json()
matches = data.get('matches', [])
self.logger.debug(f"Fetched {len(matches)} scheduled games from soccer API")
return matches
except Exception as e:
self.logger.error(f"Error fetching schedule from soccer API: {e}")
return []
def fetch_standings(self, sport: str, league: str) -> Dict:
"""Fetch standings from soccer API."""
try:
url = f"{self.base_url}/competitions/{league}/standings"
response = self.session.get(url, headers=self.get_headers(), timeout=15)
response.raise_for_status()
data = response.json()
self.logger.debug(f"Fetched standings from soccer API")
return data
except Exception as e:
self.logger.error(f"Error fetching standings from soccer API: {e}")
return {}
# Factory function removed - sport classes now instantiate data sources directly

404
src/base_classes/hockey.py Normal file
View File

@@ -0,0 +1,404 @@
import logging
import time
from datetime import datetime, timezone
from typing import Any, Dict, Optional
from PIL import Image, ImageDraw, ImageFont
from src.base_classes.data_sources import ESPNDataSource
from src.base_classes.sports import SportsCore, SportsLive
from src.cache_manager import CacheManager
from src.display_manager import DisplayManager
class Hockey(SportsCore):
"""Base class for hockey sports with common functionality."""
def __init__(
self,
config: Dict[str, Any],
display_manager: DisplayManager,
cache_manager: CacheManager,
logger: logging.Logger,
sport_key: str,
):
super().__init__(config, display_manager, cache_manager, logger, sport_key)
self.data_source = ESPNDataSource(logger)
self.sport = "hockey"
self.show_shots_on_goal = self.mode_config.get("show_shots_on_goal", False)
def _extract_game_details(self, game_event: Dict) -> Optional[Dict]:
"""Extract relevant game details from ESPN NCAA FB API response."""
# --- THIS METHOD MAY NEED ADJUSTMENTS FOR NCAA FB API DIFFERENCES ---
try:
details, home_team, away_team, status, situation = (
self._extract_game_details_common(game_event)
)
except Exception as e:
self.logger.error(f"Error in _extract_game_details_common: {e}", exc_info=True)
return None
if details is None or home_team is None or away_team is None or status is None:
self.logger.warning(f"Missing required data in game event {game_event.get('id')}")
return None
try:
competition = game_event["competitions"][0]
status = competition["status"]
powerplay = False
penalties = ""
home_team_saves = next(
(
int(c["displayValue"])
for c in home_team["statistics"]
if c.get("name") == "saves"
),
0,
)
home_team_saves_per = next(
(
float(c["displayValue"])
for c in home_team["statistics"]
if c.get("name") == "savePct"
),
0.0,
)
away_team_saves = next(
(
int(c["displayValue"])
for c in away_team["statistics"]
if c.get("name") == "saves"
),
0,
)
away_team_saves_per = next(
(
float(c["displayValue"])
for c in away_team["statistics"]
if c.get("name") == "savePct"
),
0.0,
)
home_shots = 0
away_shots = 0
if home_team_saves_per > 0:
away_shots = round(home_team_saves / home_team_saves_per)
if away_team_saves_per > 0:
home_shots = round(away_team_saves / away_team_saves_per)
status_short = status["type"].get("shortDetail", "")
if situation and status["type"]["state"] == "in":
# Detect scoring events from status detail
# status_detail = status["type"].get("detail", "")
powerplay = situation.get("isPowerPlay", False)
penalties = situation.get("penalties", "")
# Format period/quarter
period = status.get("period", 0)
# Check for custom period text (used by EOJHL to preserve "1st", "2nd", "3rd" from API)
period_text = status.get("customPeriodText", "")
display_clock = status.get("displayClock", "0:00")
self.logger.info(f"Status parsing - period: {period}, customPeriodText: '{period_text}', displayClock: '{display_clock}'")
if not period_text:
# Default period text formatting
if status["type"]["state"] == "in":
if period == 0:
period_text = "Start" # Before kickoff
elif period >= 1 and period <= 3:
period_text = f"P{period}" # OT starts after Q4
elif period > 3:
period_text = f"OT{period - 3}" # OT starts after Q4
elif status["type"]["state"] == "post":
if period > 3:
period_text = "FINAL/OT"
elif period > 4:
period_text = "FINAL/SO"
else:
period_text = "FINAL"
elif status["type"]["state"] == "pre":
period_text = details.get("game_time", "") # Show time for upcoming
details.update(
{
"period": period,
"period_text": period_text, # Formatted quarter/status
"clock": status.get("displayClock", "0:00"),
"power_play": powerplay,
"penalties": penalties,
"home_shots": home_shots,
"away_shots": away_shots,
}
)
# Basic validation (can be expanded)
if not details["home_abbr"] or not details["away_abbr"]:
self.logger.warning(
f"Missing team abbreviation in event: {details['id']}"
)
return None
self.logger.info(
f"Parsed game: {details['away_abbr']}@{details['home_abbr']}, period_text='{period_text}', clock='{status.get('displayClock', '0:00')}', Live: {details['is_live']}"
)
return details
except Exception as e:
# Log the problematic event structure if possible
self.logger.error(
f"Error extracting game details: {e} from event: {game_event.get('id')}",
exc_info=True,
)
return None
class HockeyLive(Hockey, SportsLive):
def __init__(
self,
config: Dict[str, Any],
display_manager: DisplayManager,
cache_manager: CacheManager,
logger: logging.Logger,
sport_key: str,
):
super().__init__(config, display_manager, cache_manager, logger, sport_key)
def _test_mode_update(self):
if self.current_game and self.current_game["is_live"]:
# For testing, we'll just update the clock to show it's working
minutes = int(self.current_game["clock"].split(":")[0])
seconds = int(self.current_game["clock"].split(":")[1])
seconds -= 1
if seconds < 0:
seconds = 59
minutes -= 1
if minutes < 0:
minutes = 19
if self.current_game["period"] < 3:
self.current_game["period"] += 1
else:
self.current_game["period"] = 1
self.current_game["clock"] = f"{minutes:02d}:{seconds:02d}"
# Always update display in test mode
def _draw_scorebug_layout(self, game: Dict, force_clear: bool = False) -> None:
"""Draw the detailed scorebug layout for a live NCAA FB game.""" # Updated docstring
try:
main_img = Image.new(
"RGBA", (self.display_width, self.display_height), (0, 0, 0, 255)
)
overlay = Image.new(
"RGBA", (self.display_width, self.display_height), (0, 0, 0, 0)
)
draw_overlay = ImageDraw.Draw(
overlay
) # Draw text elements on overlay first
home_logo = self._load_and_resize_logo(
game["home_id"],
game["home_abbr"],
game["home_logo_path"],
game.get("home_logo_url"),
)
away_logo = self._load_and_resize_logo(
game["away_id"],
game["away_abbr"],
game["away_logo_path"],
game.get("away_logo_url"),
)
if not home_logo or not away_logo:
self.logger.error(
f"Failed to load logos for live game: {game.get('id')}"
) # Changed log prefix
# Draw placeholder text if logos fail
draw_final = ImageDraw.Draw(main_img.convert("RGB"))
self._draw_text_with_outline(
draw_final, "Logo Error", (5, 5), self.fonts["status"]
)
self.display_manager.image.paste(main_img.convert("RGB"), (0, 0))
self.display_manager.update_display()
return
center_y = self.display_height // 2
# Draw logos (shifted slightly more inward than NHL perhaps)
home_x = (
self.display_width - home_logo.width + 10
) # adjusted from 18 # Adjust position as needed
home_y = center_y - (home_logo.height // 2)
main_img.paste(home_logo, (home_x, home_y), home_logo)
away_x = -10 # adjusted from 18 # Adjust position as needed
away_y = center_y - (away_logo.height // 2)
main_img.paste(away_logo, (away_x, away_y), away_logo)
# --- Draw Text Elements on Overlay ---
# Note: Rankings are now handled in the records/rankings section below
# Period/Quarter and Clock (Top center)
period_text_val = game.get('period_text', '')
clock_val = game.get('clock', '')
self.logger.info(f"Rendering clock - period_text: '{period_text_val}', clock: '{clock_val}'")
period_clock_text = (
f"{period_text_val} {clock_val}".strip()
)
if game.get("is_period_break"):
period_clock_text = game.get("status_text", "Period Break")
status_width = draw_overlay.textlength(
period_clock_text, font=self.fonts["time"]
)
status_x = (self.display_width - status_width) // 2
status_y = 1 # Position at top
self._draw_text_with_outline(
draw_overlay,
period_clock_text,
(status_x, status_y),
self.fonts["time"],
)
# Scores (centered, slightly above bottom)
home_score = str(game.get("home_score", "0"))
away_score = str(game.get("away_score", "0"))
score_text = f"{away_score}-{home_score}"
score_width = draw_overlay.textlength(score_text, font=self.fonts["score"])
score_x = (self.display_width - score_width) // 2
score_y = (
self.display_height // 2
) - 3 # centered #from 14 # Position score higher
self._draw_text_with_outline(
draw_overlay, score_text, (score_x, score_y), self.fonts["score"]
)
# Shots on Goal
if self.show_shots_on_goal:
shots_font = ImageFont.truetype("assets/fonts/4x6-font.ttf", 6)
home_shots = str(game.get("home_shots", "0"))
away_shots = str(game.get("away_shots", "0"))
shots_text = f"{away_shots} SHOTS {home_shots}"
shots_bbox = draw_overlay.textbbox((0, 0), shots_text, font=shots_font)
shots_height = shots_bbox[3] - shots_bbox[1]
shots_y = self.display_height - shots_height - 1
shots_width = draw_overlay.textlength(shots_text, font=shots_font)
shots_x = (self.display_width - shots_width) // 2
self._draw_text_with_outline(
draw_overlay, shots_text, (shots_x, shots_y), shots_font
)
# Draw odds if available
if "odds" in game and game["odds"]:
self._draw_dynamic_odds(
draw_overlay, game["odds"], self.display_width, self.display_height
)
# Draw records or rankings if enabled
if self.show_records or self.show_ranking:
try:
record_font = ImageFont.truetype("assets/fonts/4x6-font.ttf", 6)
self.logger.debug(f"Loaded 6px record font successfully")
except IOError:
record_font = ImageFont.load_default()
self.logger.warning(
f"Failed to load 6px font, using default font (size: {record_font.size})"
)
# Get team abbreviations
away_abbr = game.get("away_abbr", "")
home_abbr = game.get("home_abbr", "")
record_bbox = draw_overlay.textbbox((0, 0), "0-0", font=record_font)
record_height = record_bbox[3] - record_bbox[1]
record_y = self.display_height - record_height - 1
self.logger.debug(
f"Record positioning: height={record_height}, record_y={record_y}, display_height={self.display_height}"
)
# Display away team info
if away_abbr:
if self.show_ranking and self.show_records:
# When both rankings and records are enabled, rankings replace records completely
away_rank = self._team_rankings_cache.get(away_abbr, 0)
if away_rank > 0:
away_text = f"#{away_rank}"
else:
# Show nothing for unranked teams when rankings are prioritized
away_text = ""
elif self.show_ranking:
# Show ranking only if available
away_rank = self._team_rankings_cache.get(away_abbr, 0)
if away_rank > 0:
away_text = f"#{away_rank}"
else:
away_text = ""
elif self.show_records:
# Show record only when rankings are disabled
away_text = game.get("away_record", "")
else:
away_text = ""
if away_text:
away_record_x = 3
self.logger.debug(
f"Drawing away ranking '{away_text}' at ({away_record_x}, {record_y}) with font size {record_font.size if hasattr(record_font, 'size') else 'unknown'}"
)
self._draw_text_with_outline(
draw_overlay,
away_text,
(away_record_x, record_y),
record_font,
)
# Display home team info
if home_abbr:
if self.show_ranking and self.show_records:
# When both rankings and records are enabled, rankings replace records completely
home_rank = self._team_rankings_cache.get(home_abbr, 0)
if home_rank > 0:
home_text = f"#{home_rank}"
else:
# Show nothing for unranked teams when rankings are prioritized
home_text = ""
elif self.show_ranking:
# Show ranking only if available
home_rank = self._team_rankings_cache.get(home_abbr, 0)
if home_rank > 0:
home_text = f"#{home_rank}"
else:
home_text = ""
elif self.show_records:
# Show record only when rankings are disabled
home_text = game.get("home_record", "")
else:
home_text = ""
if home_text:
home_record_bbox = draw_overlay.textbbox(
(0, 0), home_text, font=record_font
)
home_record_width = home_record_bbox[2] - home_record_bbox[0]
home_record_x = self.display_width - home_record_width - 3
self.logger.debug(
f"Drawing home ranking '{home_text}' at ({home_record_x}, {record_y}) with font size {record_font.size if hasattr(record_font, 'size') else 'unknown'}"
)
self._draw_text_with_outline(
draw_overlay,
home_text,
(home_record_x, record_y),
record_font,
)
# Composite the text overlay onto the main image
main_img = Image.alpha_composite(main_img, overlay)
main_img = main_img.convert("RGB") # Convert for display
# Display the final image
self.display_manager.image.paste(main_img, (0, 0))
self.display_manager.update_display() # Update display here for live
except Exception as e:
self.logger.error(
f"Error displaying live Hockey game: {e}", exc_info=True
) # Changed log prefix

1501
src/base_classes/sports.py Normal file

File diff suppressed because it is too large Load Diff

868
src/cache_manager.py Normal file
View File

@@ -0,0 +1,868 @@
import json
import os
import time
from datetime import datetime
import pytz
from typing import Any, Dict, Optional
import logging
import stat
import threading
import tempfile
from pathlib import Path
class DateTimeEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime):
return obj.isoformat()
return super().default(obj)
class CacheManager:
"""Manages caching of API responses to reduce API calls."""
def __init__(self):
# Initialize logger first
self.logger = logging.getLogger(__name__)
# Determine the most reliable writable directory
self.cache_dir = self._get_writable_cache_dir()
if self.cache_dir:
self.logger.info(f"Using cache directory: {self.cache_dir}")
else:
# This is a critical failure, as caching is essential.
self.logger.error("Could not find or create a writable cache directory. Caching will be disabled.")
self.cache_dir = None
self._memory_cache = {} # In-memory cache for faster access
self._memory_cache_timestamps = {}
self._cache_lock = threading.Lock()
# Initialize config manager for sport-specific intervals
try:
from src.config_manager import ConfigManager
self.config_manager = ConfigManager()
self.config_manager.load_config()
except ImportError:
self.config_manager = None
self.logger.warning("ConfigManager not available, using default cache intervals")
# Initialize performance metrics
self._cache_metrics = {
'hits': 0,
'misses': 0,
'api_calls_saved': 0,
'background_hits': 0,
'background_misses': 0,
'total_fetch_time': 0.0,
'fetch_count': 0
}
def _get_writable_cache_dir(self) -> Optional[str]:
"""Tries to find or create a writable cache directory, preferring a system path when available."""
# Attempt 1: System-wide persistent cache directory (preferred for services)
try:
system_cache_dir = '/var/cache/ledmatrix'
if os.path.exists(system_cache_dir):
test_file = os.path.join(system_cache_dir, '.writetest')
try:
with open(test_file, 'w') as f:
f.write('test')
os.remove(test_file)
return system_cache_dir
except (IOError, OSError):
self.logger.warning(f"Directory exists but is not writable: {system_cache_dir}")
else:
os.makedirs(system_cache_dir, exist_ok=True)
if os.access(system_cache_dir, os.W_OK):
return system_cache_dir
except Exception as e:
self.logger.warning(f"Could not use /var/cache/ledmatrix: {e}")
# Attempt 2: User's home directory (handling sudo), but avoid /root preference
try:
real_user = os.environ.get('SUDO_USER') or os.environ.get('USER', 'default')
if real_user and real_user != 'root':
home_dir = os.path.expanduser(f"~{real_user}")
else:
# When running as root and /var/cache/ledmatrix failed, still allow fallback to /root
home_dir = os.path.expanduser('~')
user_cache_dir = os.path.join(home_dir, '.ledmatrix_cache')
os.makedirs(user_cache_dir, exist_ok=True)
test_file = os.path.join(user_cache_dir, '.writetest')
with open(test_file, 'w') as f:
f.write('test')
os.remove(test_file)
return user_cache_dir
except Exception as e:
self.logger.warning(f"Could not use user-specific cache directory: {e}")
# Attempt 3: /opt/ledmatrix/cache (alternative persistent location)
try:
opt_cache_dir = '/opt/ledmatrix/cache'
# Check if directory exists and we can write to it
if os.path.exists(opt_cache_dir):
# Test if we can write to the existing directory
test_file = os.path.join(opt_cache_dir, '.writetest')
try:
with open(test_file, 'w') as f:
f.write('test')
os.remove(test_file)
return opt_cache_dir
except (IOError, OSError):
self.logger.warning(f"Directory exists but is not writable: {opt_cache_dir}")
else:
# Try to create the directory
os.makedirs(opt_cache_dir, exist_ok=True)
if os.access(opt_cache_dir, os.W_OK):
return opt_cache_dir
except Exception as e:
self.logger.warning(f"Could not use /opt/ledmatrix/cache: {e}")
# Attempt 4: System-wide temporary directory (fallback, not persistent)
try:
temp_cache_dir = os.path.join(tempfile.gettempdir(), 'ledmatrix_cache')
os.makedirs(temp_cache_dir, exist_ok=True)
if os.access(temp_cache_dir, os.W_OK):
self.logger.warning("Using temporary cache directory - cache will NOT persist across restarts")
return temp_cache_dir
except Exception as e:
self.logger.warning(f"Could not use system-wide temporary cache directory: {e}")
# Return None if no directory is writable
return None
def _ensure_cache_dir(self):
"""This method is deprecated and no longer needed."""
pass
def _get_cache_path(self, key: str) -> Optional[str]:
"""Get the path for a cache file."""
if not self.cache_dir:
return None
return os.path.join(self.cache_dir, f"{key}.json")
def get_cached_data(self, key: str, max_age: int = 300, memory_ttl: Optional[int] = None) -> Optional[Dict]:
"""Get data from cache (memory first, then disk) honoring TTLs.
- memory_ttl: TTL for in-memory entry; defaults to max_age if not provided
- max_age: TTL for persisted (on-disk) entry based on the stored timestamp
"""
now = time.time()
in_memory_ttl = memory_ttl if memory_ttl is not None else max_age
# 1) Memory cache
if key in self._memory_cache:
timestamp = self._memory_cache_timestamps.get(key)
if isinstance(timestamp, str):
try:
timestamp = float(timestamp)
except ValueError:
self.logger.error(f"Invalid timestamp format for key {key}: {timestamp}")
timestamp = None
if timestamp is not None and (now - float(timestamp) <= in_memory_ttl):
return self._memory_cache[key]
# Expired memory entry → evict and fall through to disk
self._memory_cache.pop(key, None)
self._memory_cache_timestamps.pop(key, None)
# 2) Disk cache
cache_path = self._get_cache_path(key)
if cache_path and os.path.exists(cache_path):
try:
with self._cache_lock:
with open(cache_path, 'r') as f:
record = json.load(f)
# Determine record timestamp (prefer embedded, else file mtime)
record_ts = None
if isinstance(record, dict):
record_ts = record.get('timestamp')
if record_ts is None:
try:
record_ts = os.path.getmtime(cache_path)
except OSError:
record_ts = None
if record_ts is not None:
try:
record_ts = float(record_ts)
except (TypeError, ValueError):
record_ts = None
if record_ts is None or (now - record_ts) <= max_age:
# Hydrate memory cache (use current time to start memory TTL window)
self._memory_cache[key] = record
self._memory_cache_timestamps[key] = now
return record
else:
# Stale on disk; keep file for potential diagnostics but treat as miss
return None
except json.JSONDecodeError as e:
self.logger.error(f"Error parsing cache file for {key}: {e}")
# If the file is corrupted, remove it
try:
os.remove(cache_path)
except OSError:
pass
return None
except Exception as e:
self.logger.error(f"Error loading cache for {key}: {e}")
return None
# 3) Miss
return None
def save_cache(self, key: str, data: Dict) -> None:
"""
Save data to cache.
Args:
key: Cache key
data: Data to cache
"""
try:
# Update memory cache first
self._memory_cache[key] = data
self._memory_cache_timestamps[key] = time.time()
# Save to file if a cache directory is available
cache_path = self._get_cache_path(key)
if cache_path:
# Atomic write to avoid partial/corrupt files
with self._cache_lock:
tmp_dir = os.path.dirname(cache_path)
try:
fd, tmp_path = tempfile.mkstemp(prefix=f".{os.path.basename(cache_path)}.", dir=tmp_dir)
try:
with os.fdopen(fd, 'w') as tmp_file:
json.dump(data, tmp_file, indent=4, cls=DateTimeEncoder)
tmp_file.flush()
os.fsync(tmp_file.fileno())
os.replace(tmp_path, cache_path)
finally:
if os.path.exists(tmp_path):
try:
os.remove(tmp_path)
except OSError:
pass
except Exception as e:
self.logger.error(f"Atomic write failed for key '{key}': {e}")
# Attempt one-time fallback write directly into /var/cache/ledmatrix if available
try:
fallback_dir = '/var/cache/ledmatrix'
if os.path.isdir(fallback_dir) and os.access(fallback_dir, os.W_OK):
fallback_path = os.path.join(fallback_dir, os.path.basename(cache_path))
with open(fallback_path, 'w') as tmp_file:
json.dump(data, tmp_file, indent=4, cls=DateTimeEncoder)
self.logger.warning(f"Cache wrote to fallback location: {fallback_path}")
except Exception as e2:
self.logger.error(f"Fallback cache write also failed: {e2}")
except (IOError, OSError) as e:
self.logger.error(f"Failed to save cache for key '{key}': {e}")
except Exception as e:
self.logger.error(f"An unexpected error occurred while saving cache for key '{key}': {e}")
def load_cache(self, key: str) -> Optional[Dict[str, Any]]:
"""Load data from cache with memory caching."""
current_time = time.time()
# Check memory cache first
if key in self._memory_cache:
if current_time - self._memory_cache_timestamps.get(key, 0) < 60: # 1 minute TTL
return self._memory_cache[key]
else:
# Clear expired memory cache
if key in self._memory_cache:
del self._memory_cache[key]
if key in self._memory_cache_timestamps:
del self._memory_cache_timestamps[key]
cache_path = self._get_cache_path(key)
if not cache_path or not os.path.exists(cache_path):
return None
try:
with self._cache_lock:
with open(cache_path, 'r') as f:
try:
data = json.load(f)
# Update memory cache
self._memory_cache[key] = data
self._memory_cache_timestamps[key] = current_time
return data
except json.JSONDecodeError as e:
self.logger.error(f"Error parsing cache file for {key}: {e}")
# If the file is corrupted, remove it
os.remove(cache_path)
return None
except Exception as e:
self.logger.error(f"Error loading cache for {key}: {e}")
return None
def clear_cache(self, key: Optional[str] = None) -> None:
"""Clear cache for a specific key or all keys."""
with self._cache_lock:
if key:
# Clear specific key
if key in self._memory_cache:
del self._memory_cache[key]
del self._memory_cache_timestamps[key]
cache_path = self._get_cache_path(key)
if cache_path and os.path.exists(cache_path):
os.remove(cache_path)
self.logger.info(f"Cleared cache for key: {key}")
else:
# Clear all keys
memory_count = len(self._memory_cache)
self._memory_cache.clear()
self._memory_cache_timestamps.clear()
file_count = 0
if self.cache_dir:
for file in os.listdir(self.cache_dir):
if file.endswith('.json'):
os.remove(os.path.join(self.cache_dir, file))
file_count += 1
self.logger.info(f"Cleared all cache: {memory_count} memory entries, {file_count} cache files")
def has_data_changed(self, data_type: str, new_data: Dict[str, Any]) -> bool:
"""Check if data has changed from cached version."""
cached_data = self.load_cache(data_type)
if not cached_data:
return True
if data_type == 'weather':
return self._has_weather_changed(cached_data, new_data)
elif data_type == 'stocks':
return self._has_stocks_changed(cached_data, new_data)
elif data_type == 'stock_news':
return self._has_news_changed(cached_data, new_data)
elif data_type == 'nhl':
return self._has_nhl_changed(cached_data, new_data)
elif data_type == 'mlb':
return self._has_mlb_changed(cached_data, new_data)
return True
def _has_weather_changed(self, cached: Dict[str, Any], new: Dict[str, Any]) -> bool:
"""Check if weather data has changed."""
# Handle new cache structure where data is nested under 'data' key
if 'data' in cached:
cached = cached['data']
# Handle case where cached data might be the weather data directly
if 'current' in cached:
# This is the new structure with 'current' and 'forecast' keys
current_weather = cached.get('current', {})
if current_weather and 'main' in current_weather and 'weather' in current_weather:
cached_temp = round(current_weather['main']['temp'])
cached_condition = current_weather['weather'][0]['main']
return (cached_temp != new.get('temp') or
cached_condition != new.get('condition'))
# Handle old structure where temp and condition are directly accessible
return (cached.get('temp') != new.get('temp') or
cached.get('condition') != new.get('condition'))
def _has_stocks_changed(self, cached: Dict[str, Any], new: Dict[str, Any]) -> bool:
"""Check if stock data has changed."""
if not self._is_market_open():
return False
return cached.get('price') != new.get('price')
def _has_news_changed(self, cached: Dict[str, Any], new: Dict[str, Any]) -> bool:
"""Check if news data has changed."""
# Handle both dictionary and list formats
if isinstance(new, list):
# If new data is a list, cached data should also be a list
if not isinstance(cached, list):
return True
# Compare lengths and content
if len(cached) != len(new):
return True
# Compare titles since they're unique enough for our purposes
cached_titles = set(item.get('title', '') for item in cached)
new_titles = set(item.get('title', '') for item in new)
return cached_titles != new_titles
else:
# Original dictionary format handling
cached_headlines = set(h.get('id') for h in cached.get('headlines', []))
new_headlines = set(h.get('id') for h in new.get('headlines', []))
return not cached_headlines.issuperset(new_headlines)
def _has_nhl_changed(self, cached: Dict[str, Any], new: Dict[str, Any]) -> bool:
"""Check if NHL data has changed."""
return (cached.get('game_status') != new.get('game_status') or
cached.get('score') != new.get('score'))
def _has_mlb_changed(self, cached: Dict[str, Any], new: Dict[str, Any]) -> bool:
"""Check if MLB game data has changed."""
if not cached or not new:
return True
# Check if any games have changed status or score
for game_id, new_game in new.items():
cached_game = cached.get(game_id)
if not cached_game:
return True
# Check for score changes
if (new_game['away_score'] != cached_game['away_score'] or
new_game['home_score'] != cached_game['home_score']):
return True
# Check for status changes
if new_game['status'] != cached_game['status']:
return True
# For live games, check inning and count
if new_game['status'] == 'in':
if (new_game['inning'] != cached_game['inning'] or
new_game['inning_half'] != cached_game['inning_half'] or
new_game['balls'] != cached_game['balls'] or
new_game['strikes'] != cached_game['strikes'] or
new_game['bases_occupied'] != cached_game['bases_occupied']):
return True
return False
def _is_market_open(self) -> bool:
"""Check if the US stock market is currently open."""
et_tz = pytz.timezone('America/New_York')
now = datetime.now(et_tz)
# Check if it's a weekday
if now.weekday() >= 5: # 5 = Saturday, 6 = Sunday
return False
# Convert current time to ET
current_time = now.time()
market_open = datetime.strptime('09:30', '%H:%M').time()
market_close = datetime.strptime('16:00', '%H:%M').time()
return market_open <= current_time <= market_close
def update_cache(self, data_type: str, data: Dict[str, Any]) -> bool:
"""Update cache with new data."""
cache_data = {
'data': data,
'timestamp': time.time()
}
return self.save_cache(data_type, cache_data)
def get(self, key: str, max_age: int = 300) -> Optional[Dict]:
"""Get data from cache if it exists and is not stale."""
cached_data = self.get_cached_data(key, max_age)
if cached_data and 'data' in cached_data:
return cached_data['data']
return cached_data
def set(self, key: str, data: Dict) -> None:
"""Store data in cache with current timestamp."""
cache_data = {
'data': data,
'timestamp': time.time()
}
self.save_cache(key, cache_data)
def setup_persistent_cache(self) -> bool:
"""
Set up a persistent cache directory with proper permissions.
This should be run once with sudo to create the directory.
"""
try:
# Try to create /var/cache/ledmatrix with proper permissions
cache_dir = '/var/cache/ledmatrix'
os.makedirs(cache_dir, exist_ok=True)
# Set ownership to the real user (not root)
real_user = os.environ.get('SUDO_USER')
if real_user:
import pwd
try:
uid = pwd.getpwnam(real_user).pw_uid
gid = pwd.getpwnam(real_user).pw_gid
os.chown(cache_dir, uid, gid)
self.logger.info(f"Set ownership of {cache_dir} to {real_user}")
except Exception as e:
self.logger.warning(f"Could not set ownership: {e}")
# Set permissions to 755 (rwxr-xr-x)
os.chmod(cache_dir, 0o755)
self.logger.info(f"Successfully set up persistent cache directory: {cache_dir}")
return True
except Exception as e:
self.logger.error(f"Failed to set up persistent cache directory: {e}")
return False
def get_sport_live_interval(self, sport_key: str) -> int:
"""
Get the live_update_interval for a specific sport from config.
Falls back to default values if config is not available.
"""
if not self.config_manager:
# Default intervals - all sports use 60 seconds as default
default_intervals = {
'soccer': 60, # Soccer default
'nfl': 60, # NFL default
'nhl': 60, # NHL default
'nba': 60, # NBA default
'mlb': 60, # MLB default
'milb': 60, # Minor league default
'ncaa_fb': 60, # College football default
'ncaa_baseball': 60, # College baseball default
'ncaam_basketball': 60, # College basketball default
}
return default_intervals.get(sport_key, 60)
try:
config = self.config_manager.config
# All sports now use _scoreboard suffix
sport_config = config.get(f"{sport_key}_scoreboard", {})
return sport_config.get("live_update_interval", 60) # Default to 60 seconds
except Exception as e:
self.logger.warning(f"Could not get live_update_interval for {sport_key}: {e}")
return 60 # Default to 60 seconds
def get_cache_strategy(self, data_type: str, sport_key: str = None) -> Dict[str, Any]:
"""
Get cache strategy for different data types.
Now respects sport-specific live_update_interval configurations.
"""
# Get sport-specific live interval if provided
live_interval = None
if sport_key and data_type in ['sports_live', 'live_scores']:
live_interval = self.get_sport_live_interval(sport_key)
# Try to read sport-specific config for recent/upcoming
recent_interval = None
upcoming_interval = None
if self.config_manager and sport_key:
try:
# All sports now use _scoreboard suffix
sport_cfg = self.config_manager.config.get(f"{sport_key}_scoreboard", {})
recent_interval = sport_cfg.get('recent_update_interval')
upcoming_interval = sport_cfg.get('upcoming_update_interval')
except Exception as e:
self.logger.debug(f"Could not read sport-specific recent/upcoming intervals for {sport_key}: {e}")
strategies = {
# Ultra time-sensitive data (live scores, current weather)
'live_scores': {
'max_age': live_interval or 15, # Use sport-specific interval
'memory_ttl': (live_interval or 15) * 2, # 2x for memory cache
'force_refresh': True
},
'sports_live': {
'max_age': live_interval or 30, # Use sport-specific interval
'memory_ttl': (live_interval or 30) * 2,
'force_refresh': True
},
'weather_current': {
'max_age': 300, # 5 minutes
'memory_ttl': 600,
'force_refresh': False
},
# Market data (stocks, crypto)
'stocks': {
'max_age': 600, # 10 minutes
'memory_ttl': 1200,
'market_hours_only': True,
'force_refresh': False
},
'crypto': {
'max_age': 300, # 5 minutes (crypto trades 24/7)
'memory_ttl': 600,
'force_refresh': False
},
# Sports data
'sports_recent': {
'max_age': recent_interval or 1800, # 30 minutes default; override by config
'memory_ttl': (recent_interval or 1800) * 2,
'force_refresh': False
},
'sports_upcoming': {
'max_age': upcoming_interval or 10800, # 3 hours default; override by config
'memory_ttl': (upcoming_interval or 10800) * 2,
'force_refresh': False
},
'sports_schedules': {
'max_age': 86400, # 24 hours
'memory_ttl': 172800,
'force_refresh': False
},
'leaderboard': {
'max_age': 604800, # 7 days (1 week) - football rankings updated weekly
'memory_ttl': 1209600, # 14 days in memory
'force_refresh': False
},
# News and odds
'news': {
'max_age': 3600, # 1 hour
'memory_ttl': 7200,
'force_refresh': False
},
'odds': {
'max_age': 1800, # 30 minutes for upcoming games
'memory_ttl': 3600,
'force_refresh': False
},
'odds_live': {
'max_age': 120, # 2 minutes for live games (odds change rapidly)
'memory_ttl': 240,
'force_refresh': False
},
# Static/stable data
'team_info': {
'max_age': 604800, # 1 week
'memory_ttl': 1209600,
'force_refresh': False
},
'logos': {
'max_age': 2592000, # 30 days
'memory_ttl': 5184000,
'force_refresh': False
},
# Default fallback
'default': {
'max_age': 300, # 5 minutes
'memory_ttl': 600,
'force_refresh': False
}
}
return strategies.get(data_type, strategies['default'])
def get_data_type_from_key(self, key: str) -> str:
"""
Determine the appropriate cache strategy based on the cache key.
This helps automatically select the right cache duration.
"""
key_lower = key.lower()
# Live sports data
if any(x in key_lower for x in ['live', 'current', 'scoreboard']):
if 'soccer' in key_lower:
return 'sports_live' # Soccer live data is very time-sensitive
return 'sports_live'
# Weather data
if 'weather' in key_lower:
return 'weather_current'
# Market data
if 'stock' in key_lower or 'crypto' in key_lower:
if 'crypto' in key_lower:
return 'crypto'
return 'stocks'
# News data
if 'news' in key_lower:
return 'news'
# Odds data - differentiate between live and upcoming games
if 'odds' in key_lower:
# For live games, use shorter cache; for upcoming games, use longer cache
if any(x in key_lower for x in ['live', 'current']):
return 'odds_live' # Live odds change more frequently
return 'odds' # Regular odds for upcoming games
# Sports schedules and team info
if any(x in key_lower for x in ['schedule', 'team_map', 'league']):
return 'sports_schedules'
# Recent games (last few hours)
if 'recent' in key_lower:
return 'sports_recent'
# Upcoming games
if 'upcoming' in key_lower:
return 'sports_upcoming'
# Static data like logos, team info
if any(x in key_lower for x in ['logo', 'team_info', 'config']):
return 'team_info'
# Default fallback
return 'default'
def get_sport_key_from_cache_key(self, key: str) -> Optional[str]:
"""
Extract sport key from cache key to determine appropriate live_update_interval.
"""
key_lower = key.lower()
# Map cache key patterns to sport keys
sport_patterns = {
'nfl': ['nfl'],
'nba': ['nba', 'basketball'],
'mlb': ['mlb', 'baseball'],
'nhl': ['nhl', 'hockey'],
'soccer': ['soccer'],
'ncaa_fb': ['ncaa_fb', 'ncaafb', 'college_football'],
'ncaa_baseball': ['ncaa_baseball', 'college_baseball'],
'ncaam_basketball': ['ncaam_basketball', 'college_basketball'],
'milb': ['milb', 'minor_league'],
}
for sport_key, patterns in sport_patterns.items():
if any(pattern in key_lower for pattern in patterns):
return sport_key
return None
def get_cached_data_with_strategy(self, key: str, data_type: str = 'default') -> Optional[Dict]:
"""
Get data from cache using data-type-specific strategy.
Now respects sport-specific live_update_interval configurations.
"""
# Extract sport key for live sports data
sport_key = None
if data_type in ['sports_live', 'live_scores']:
sport_key = self.get_sport_key_from_cache_key(key)
strategy = self.get_cache_strategy(data_type, sport_key)
max_age = strategy['max_age']
memory_ttl = strategy.get('memory_ttl', max_age)
# For market data, check if market is open
if strategy.get('market_hours_only', False) and not self._is_market_open():
# During off-hours, extend cache duration
max_age *= 4 # 4x longer cache during off-hours
record = self.get_cached_data(key, max_age, memory_ttl)
# Unwrap if stored in { 'data': ..., 'timestamp': ... }
if isinstance(record, dict) and 'data' in record:
return record['data']
return record
def get_with_auto_strategy(self, key: str) -> Optional[Dict]:
"""
Get cached data using automatically determined strategy.
Now respects sport-specific live_update_interval configurations.
"""
data_type = self.get_data_type_from_key(key)
return self.get_cached_data_with_strategy(key, data_type)
def get_background_cached_data(self, key: str, sport_key: str = None) -> Optional[Dict]:
"""
Get data from background service cache with appropriate strategy.
This method is specifically designed for Recent/Upcoming managers
to use data cached by the background service.
Args:
key: Cache key to retrieve
sport_key: Sport key for determining appropriate cache strategy
Returns:
Cached data if available and fresh, None otherwise
"""
# Determine the appropriate cache strategy
data_type = self.get_data_type_from_key(key)
strategy = self.get_cache_strategy(data_type, sport_key)
# For Recent/Upcoming managers, we want to use the background service cache
# which should have longer TTLs than the individual manager caches
max_age = strategy['max_age']
memory_ttl = strategy.get('memory_ttl', max_age)
# Get the cached data
cached_data = self.get_cached_data(key, max_age, memory_ttl)
if cached_data:
# Record cache hit for performance monitoring
self.record_cache_hit('background')
# Unwrap if stored in { 'data': ..., 'timestamp': ... } format
if isinstance(cached_data, dict) and 'data' in cached_data:
return cached_data['data']
return cached_data
# Record cache miss for performance monitoring
self.record_cache_miss('background')
return None
def is_background_data_available(self, key: str, sport_key: str = None) -> bool:
"""
Check if background service has fresh data available.
This helps Recent/Upcoming managers determine if they should
wait for background data or fetch immediately.
"""
data_type = self.get_data_type_from_key(key)
strategy = self.get_cache_strategy(data_type, sport_key)
# Check if we have data that's still fresh according to background service TTL
cached_data = self.get_cached_data(key, strategy['max_age'])
return cached_data is not None
def generate_sport_cache_key(self, sport: str, date_str: str = None) -> str:
"""
Centralized cache key generation for sports data.
This ensures consistent cache keys across background service and managers.
Args:
sport: Sport identifier (e.g., 'nba', 'nfl', 'ncaa_fb')
date_str: Date string in YYYYMMDD format. If None, uses current UTC date.
Returns:
Cache key in format: {sport}_{date}
"""
if date_str is None:
date_str = datetime.now(pytz.utc).strftime('%Y%m%d')
return f"{sport}_{date_str}"
def record_cache_hit(self, cache_type: str = 'regular'):
"""Record a cache hit for performance monitoring."""
with self._cache_lock:
if cache_type == 'background':
self._cache_metrics['background_hits'] += 1
else:
self._cache_metrics['hits'] += 1
def record_cache_miss(self, cache_type: str = 'regular'):
"""Record a cache miss for performance monitoring."""
with self._cache_lock:
if cache_type == 'background':
self._cache_metrics['background_misses'] += 1
else:
self._cache_metrics['misses'] += 1
self._cache_metrics['api_calls_saved'] += 1
def record_fetch_time(self, duration: float):
"""Record fetch operation duration for performance monitoring."""
with self._cache_lock:
self._cache_metrics['total_fetch_time'] += duration
self._cache_metrics['fetch_count'] += 1
def get_cache_metrics(self) -> Dict[str, Any]:
"""Get current cache performance metrics."""
with self._cache_lock:
total_hits = self._cache_metrics['hits'] + self._cache_metrics['background_hits']
total_misses = self._cache_metrics['misses'] + self._cache_metrics['background_misses']
total_requests = total_hits + total_misses
avg_fetch_time = (self._cache_metrics['total_fetch_time'] /
self._cache_metrics['fetch_count']) if self._cache_metrics['fetch_count'] > 0 else 0.0
return {
'total_requests': total_requests,
'cache_hit_rate': total_hits / total_requests if total_requests > 0 else 0.0,
'background_hit_rate': (self._cache_metrics['background_hits'] /
(self._cache_metrics['background_hits'] + self._cache_metrics['background_misses'])
if (self._cache_metrics['background_hits'] + self._cache_metrics['background_misses']) > 0 else 0.0),
'api_calls_saved': self._cache_metrics['api_calls_saved'],
'average_fetch_time': avg_fetch_time,
'total_fetch_time': self._cache_metrics['total_fetch_time'],
'fetch_count': self._cache_metrics['fetch_count']
}
def log_cache_metrics(self):
"""Log current cache performance metrics."""
metrics = self.get_cache_metrics()
self.logger.info(f"Cache Performance - Hit Rate: {metrics['cache_hit_rate']:.2%}, "
f"Background Hit Rate: {metrics['background_hit_rate']:.2%}, "
f"API Calls Saved: {metrics['api_calls_saved']}, "
f"Avg Fetch Time: {metrics['average_fetch_time']:.2f}s")

166
src/cchl_managers.py Normal file
View File

@@ -0,0 +1,166 @@
# src/cchl_managers.py
import logging
from typing import Any, Dict, Optional, List
import json
import time
import requests
from datetime import datetime, timezone, date
from src.base_classes.hockey import Hockey, HockeyLive
from src.base_classes.sports import SportsRecent, SportsUpcoming
from src.cache_manager import CacheManager
from src.display_manager import DisplayManager
LOCAL_CCHL_FILE = "data/cchl_scoreboard.json"
LOCAL_CCHL_STANDINGS = "data/cchl_standings.json"
class BaseCCHLManager(Hockey):
"""Base class for CCHL managers; renders scoreboard and supplies leaderboard data."""
def __init__(self, config: Dict[str, Any], display_manager: DisplayManager, cache_manager: CacheManager):
self.logger = logging.getLogger('CCHL')
super().__init__(config=config, display_manager=display_manager, cache_manager=cache_manager,
logger=self.logger, sport_key="cchl")
self.league = "cchl"
# CCHL specifics used for leaderboard data
self.conferences: List[str] = ["yates", "robinson"] # adjust to actual division names
self.league_logo_path: str = "assets/sports/cchl_logos/CCHL.png"
self._cached_standings: Optional[Dict[str, List[Dict[str, Any]]]] = None
# ---------- Leaderboard data providers ----------
def standings_provider(self) -> Dict[str, List[Dict[str, Any]]]:
"""Read local standings JSON and return {division: [teams...]}. Called by LeaderboardManager."""
try:
with open(LOCAL_CCHL_STANDINGS, "r") as f:
data = json.load(f)
return {
"yates": data.get("yates", []),
"robinson": data.get("robinson", []),
}
except Exception as e:
self.logger.error(f"Failed to load CCHL standings JSON: {e}")
return {"yates": [], "robinson": []}
def get_league_data(self) -> Dict[str, Any]:
"""Return all static and dynamic data the generic LeaderboardManager needs."""
return {
"conferences": self.conferences,
"league_logo": self.league_logo_path,
"standings_provider": self.standings_provider,
}
# ---------- Scoreboard methods ----------
def _build_url(self):
today = date.today().strftime("%Y-%m-%d")
return (
"https://lscluster.hockeytech.com/feed/index.php"
f"?feed=statviewfeed&view=schedule_day&date={today}"
"&site_id=2&key=1defb601c9b37c24&client_code=cchl"
"&league_id=1&season_id=110&conference_id=undefined"
"&division_id=-1&team=-1&lang=en&forceDate=true"
"&useSeason=false&allLeagues=0"
)
def fetch_games(self):
url = self._build_url()
try:
raw = requests.get(url, timeout=10).text
if raw.startswith("angular.callbacks"):
raw = raw[raw.find("(")+1 : raw.rfind(")")]
data = json.loads(raw)
return data.get("games", [])
except Exception as e:
self.logger.error(f"[CCHL] Failed to fetch games: {e}")
return []
def parse_record(self, team):
ot = int(team.get("solosses", 0)) + int(team.get("otlosses", 0))
return f"{team.get('wins',0)}-{team.get('losses',0)}-{ot}"
def _fetch_local_data(self):
try:
with open(LOCAL_CCHL_FILE, "r") as f:
return json.load(f)
except Exception as e:
self.logger.error(f"Failed to load local CCHL scoreboard JSON: {e}")
return None
def _fetch_data(self, date_str: str = None):
return self._fetch_local_data()
def _load_standings_records(self):
try:
with open(LOCAL_CCHL_STANDINGS, "r") as f:
standings = json.load(f)
lookup = {}
for div in self.conferences:
for team in standings.get(div, []):
abbr = team.get("abbreviation")
rec = team.get("record")
if abbr and rec:
lookup[abbr] = rec
return lookup
except Exception as e:
self.logger.warning(f"Could not load CCHL standings records: {e}")
return {}
def _extract_game_details(self, game_event: Dict) -> Optional[Dict]:
details, home_team, away_team, status, situation = (
self._extract_game_details_common(game_event)
)
if not details:
return None
try:
if not hasattr(self, "_standings_lookup"):
self._standings_lookup = self._load_standings_records()
home_abbr = home_team["team"]["abbreviation"]
away_abbr = away_team["team"]["abbreviation"]
details.update({
"home_score": home_team.get("score", "0"),
"away_score": away_team.get("score", "0"),
"home_logo_path": self.logo_dir / f"{home_abbr}.png",
"away_logo_path": self.logo_dir / f"{away_abbr}.png",
"status_text": status["type"]["shortDetail"],
"home_record": self._standings_lookup.get(home_abbr, "0-0-0"),
"away_record": self._standings_lookup.get(away_abbr, "0-0-0"),
})
return details
except Exception as e:
self.logger.error(f"CCHL extract error: {e}")
return None
# --- Concrete Managers ---
class CCHLLiveManager(BaseCCHLManager, HockeyLive):
def __init__(self, config, display_manager, cache_manager):
super().__init__(config, display_manager, cache_manager)
self.logger = logging.getLogger('CCHLLiveManager')
class CCHLRecentManager(BaseCCHLManager, SportsRecent):
def __init__(self, config, display_manager, cache_manager):
super().__init__(config, display_manager, cache_manager)
self.logger = logging.getLogger('CCHLRecentManager')
class CCHLUpcomingManager(BaseCCHLManager, SportsUpcoming):
def __init__(self, config, display_manager, cache_manager):
super().__init__(config, display_manager, cache_manager)
self.logger = logging.getLogger('CCHLUpcomingManager')
def _fetch_data(self, date_str: str = None):
data = super()._fetch_data(date_str)
if not data:
return None
now = datetime.now(timezone.utc)
future_events = []
for e in data.get("events", []):
try:
dt = datetime.strptime(e["date"], "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=timezone.utc)
if dt >= now:
future_events.append((dt, e))
except Exception as ex:
self.logger.warning(f"Skipping event with bad date: {ex}")
future_events.sort(key=lambda x: x[0])
data["events"] = [e for _, e in future_events]
return data

144
src/clock.py Normal file
View File

@@ -0,0 +1,144 @@
import time
import logging
from datetime import datetime
import pytz
from typing import Dict, Any
from src.config_manager import ConfigManager
from src.display_manager import DisplayManager
# Get logger without configuring
logger = logging.getLogger(__name__)
class Clock:
def __init__(self, display_manager: DisplayManager = None, config: Dict[str, Any] = None):
if config is not None:
# Use provided config
self.config = config
self.config_manager = None # Not needed when config is provided
else:
# Fallback: create ConfigManager and load config (for standalone usage)
self.config_manager = ConfigManager()
self.config = self.config_manager.load_config()
# Use the provided display_manager or create a new one if none provided
self.display_manager = display_manager or DisplayManager(self.config.get('display', {}))
logger.info("Clock initialized with display_manager: %s", id(self.display_manager))
self.location = self.config.get('location', {})
self.clock_config = self.config.get('clock', {})
# Use configured timezone if available, otherwise try to determine it
self.timezone = self._get_timezone()
self.last_time = None
self.last_date = None
# Colors for different elements - using super bright colors
self.COLORS = {
'time': (255, 255, 255), # Pure white for time
'ampm': (255, 255, 128), # Bright warm yellow for AM/PM
'date': (255, 128, 64) # Bright orange for date
}
def _get_timezone(self) -> pytz.timezone:
"""Get timezone from the config file."""
config_timezone = self.config.get('timezone', 'UTC')
try:
return pytz.timezone(config_timezone)
except pytz.exceptions.UnknownTimeZoneError:
logger.warning(
f"Invalid timezone '{config_timezone}' in config. "
"Falling back to UTC. Please check your config.json file. "
"A list of valid timezones can be found at "
"https://en.wikipedia.org/wiki/List_of_tz_database_time_zones"
)
return pytz.utc
def _get_ordinal_suffix(self, day: int) -> str:
"""Get the ordinal suffix for a day number (1st, 2nd, 3rd, etc.)."""
if 10 <= day % 100 <= 20:
suffix = 'th'
else:
suffix = {1: 'st', 2: 'nd', 3: 'rd'}.get(day % 10, 'th')
return suffix
def get_current_time(self) -> tuple:
"""Get the current time and date in the configured timezone."""
current = datetime.now(self.timezone)
# Format time in 12-hour format with AM/PM
time_str = current.strftime('%I:%M') # Remove leading zero from hour
if time_str.startswith('0'):
time_str = time_str[1:]
# Get AM/PM
ampm = current.strftime('%p')
# Format date with ordinal suffix - split into two lines
day_suffix = self._get_ordinal_suffix(current.day)
# Full weekday on first line, full month and day on second line
weekday = current.strftime('%A')
date_str = current.strftime(f'%B %-d{day_suffix}')
return time_str, ampm, weekday, date_str
def display_time(self, force_clear: bool = False) -> None:
"""Display the current time and date."""
time_str, ampm, weekday, date_str = self.get_current_time()
# Only update if something has changed
if time_str != self.last_time or date_str != self.last_date or force_clear:
# Clear the display
self.display_manager.clear()
# Calculate positions
display_width = self.display_manager.matrix.width
display_height = self.display_manager.matrix.height
# Draw time (large, centered, near top)
self.display_manager.draw_text(
time_str,
y=4, # Move up slightly to make room for two lines of date
color=self.COLORS['time'],
small_font=True
)
# Draw AM/PM (small, next to time)
time_width = self.display_manager.font.getlength(time_str)
ampm_x = (display_width + time_width) // 2 + 4
self.display_manager.draw_text(
ampm,
x=ampm_x,
y=4, # Align with time
color=self.COLORS['ampm'],
small_font=True
)
# Draw weekday on first line (small font)
self.display_manager.draw_text(
weekday,
y=display_height - 18, # First line of date
color=self.COLORS['date'],
small_font=True
)
# Draw month and day on second line (small font)
self.display_manager.draw_text(
date_str,
y=display_height - 9, # Second line of date
color=self.COLORS['date'],
small_font=True
)
# Update the display after drawing everything
self.display_manager.update_display()
# Update cache
self.last_time = time_str
self.last_date = date_str
if __name__ == "__main__":
clock = Clock()
try:
while True:
clock.display_time()
time.sleep(clock.clock_config.get('update_interval', 1))
except KeyboardInterrupt:
print("\nClock stopped by user")
finally:
clock.display_manager.cleanup()

274
src/config_manager.py Normal file
View File

@@ -0,0 +1,274 @@
import json
import os
from typing import Dict, Any, Optional
class ConfigManager:
def __init__(self, config_path: str = None, secrets_path: str = None):
# Use current working directory as base
self.config_path = config_path or "config/config.json"
self.secrets_path = secrets_path or "config/config_secrets.json"
self.template_path = "config/config.template.json"
self.config: Dict[str, Any] = {}
def get_config_path(self) -> str:
return self.config_path
def get_secrets_path(self) -> str:
return self.secrets_path
def load_config(self) -> Dict[str, Any]:
"""Load configuration from JSON files."""
try:
# Check if config file exists, if not create from template
if not os.path.exists(self.config_path):
self._create_config_from_template()
# Load main config
print(f"Attempting to load config from: {os.path.abspath(self.config_path)}")
with open(self.config_path, 'r') as f:
self.config = json.load(f)
# Migrate config to add any new items from template
self._migrate_config()
# Load and merge secrets if they exist (be permissive on errors)
if os.path.exists(self.secrets_path):
try:
with open(self.secrets_path, 'r') as f:
secrets = json.load(f)
# Deep merge secrets into config
self._deep_merge(self.config, secrets)
except PermissionError as e:
print(f"Secrets file not readable ({self.secrets_path}): {e}. Continuing without secrets.")
except (json.JSONDecodeError, OSError) as e:
print(f"Error reading secrets file ({self.secrets_path}): {e}. Continuing without secrets.")
return self.config
except FileNotFoundError as e:
if str(e).find('config_secrets.json') == -1: # Only raise if main config is missing
print(f"Configuration file not found at {os.path.abspath(self.config_path)}")
raise
return self.config
except json.JSONDecodeError:
print("Error parsing configuration file")
raise
except Exception as e:
print(f"Error loading configuration: {str(e)}")
raise
def _strip_secrets_recursive(self, data_to_filter: Dict[str, Any], secrets: Dict[str, Any]) -> Dict[str, Any]:
"""Recursively remove secret keys from a dictionary."""
result = {}
for key, value in data_to_filter.items():
if key in secrets:
if isinstance(value, dict) and isinstance(secrets[key], dict):
# This key is a shared group, recurse
stripped_sub_dict = self._strip_secrets_recursive(value, secrets[key])
if stripped_sub_dict: # Only add if there's non-secret data left
result[key] = stripped_sub_dict
# Else, it's a secret key at this level, so we skip it
else:
# This key is not in secrets, so we keep it
result[key] = value
return result
def save_config(self, new_config_data: Dict[str, Any]) -> None:
"""Save configuration to the main JSON file, stripping out secrets."""
secrets_content = {}
if os.path.exists(self.secrets_path):
try:
with open(self.secrets_path, 'r') as f_secrets:
secrets_content = json.load(f_secrets)
except Exception as e:
print(f"Warning: Could not load secrets file {self.secrets_path} during save: {e}")
# Continue without stripping if secrets can't be loaded, or handle as critical error
# For now, we'll proceed cautiously and save the full new_config_data if secrets are unreadable
# to prevent accidental data loss if the secrets file is temporarily corrupt.
# A more robust approach might be to fail the save or use a cached version of secrets.
config_to_write = self._strip_secrets_recursive(new_config_data, secrets_content)
try:
with open(self.config_path, 'w') as f:
json.dump(config_to_write, f, indent=4)
# Update the in-memory config to the new state (which includes secrets for runtime)
self.config = new_config_data
print(f"Configuration successfully saved to {os.path.abspath(self.config_path)}")
if secrets_content:
print("Secret values were preserved in memory and not written to the main config file.")
except IOError as e:
print(f"Error writing configuration to file {os.path.abspath(self.config_path)}: {e}")
raise
except Exception as e:
print(f"An unexpected error occurred while saving configuration: {str(e)}")
raise
def get_secret(self, key: str) -> Optional[Any]:
"""Get a secret value by key."""
try:
if not os.path.exists(self.secrets_path):
return None
with open(self.secrets_path, 'r') as f:
secrets = json.load(f)
return secrets.get(key)
except (json.JSONDecodeError, IOError) as e:
print(f"Error reading secrets file: {e}")
return None
def _deep_merge(self, target: Dict, source: Dict) -> None:
"""Deep merge source dict into target dict."""
for key, value in source.items():
if key in target and isinstance(target[key], dict) and isinstance(value, dict):
self._deep_merge(target[key], value)
else:
target[key] = value
def _create_config_from_template(self) -> None:
"""Create config.json from template if it doesn't exist."""
if not os.path.exists(self.template_path):
raise FileNotFoundError(f"Template file not found at {os.path.abspath(self.template_path)}")
print(f"Creating config.json from template at {os.path.abspath(self.template_path)}")
# Ensure config directory exists
os.makedirs(os.path.dirname(self.config_path), exist_ok=True)
# Copy template to config
with open(self.template_path, 'r') as template_file:
template_data = json.load(template_file)
with open(self.config_path, 'w') as config_file:
json.dump(template_data, config_file, indent=4)
print(f"Created config.json from template at {os.path.abspath(self.config_path)}")
def _migrate_config(self) -> None:
"""Migrate config to add new items from template with defaults."""
if not os.path.exists(self.template_path):
print(f"Template file not found at {os.path.abspath(self.template_path)}, skipping migration")
return
try:
with open(self.template_path, 'r') as f:
template_config = json.load(f)
# Check if migration is needed
if self._config_needs_migration(self.config, template_config):
print("Config migration needed - adding new configuration items with defaults")
# Create backup of current config
backup_path = f"{self.config_path}.backup"
with open(backup_path, 'w') as backup_file:
json.dump(self.config, backup_file, indent=4)
print(f"Created backup of current config at {os.path.abspath(backup_path)}")
# Merge template defaults into current config
self._merge_template_defaults(self.config, template_config)
# Save migrated config
with open(self.config_path, 'w') as f:
json.dump(self.config, f, indent=4)
print(f"Config migration completed and saved to {os.path.abspath(self.config_path)}")
else:
print("Config is up to date, no migration needed")
except Exception as e:
print(f"Error during config migration: {e}")
# Don't raise - continue with current config
def _config_needs_migration(self, current_config: Dict[str, Any], template_config: Dict[str, Any]) -> bool:
"""Check if config needs migration by comparing with template."""
return self._has_new_keys(current_config, template_config)
def _has_new_keys(self, current: Dict[str, Any], template: Dict[str, Any]) -> bool:
"""Recursively check if template has keys not in current config."""
for key, value in template.items():
if key not in current:
return True
if isinstance(value, dict) and isinstance(current[key], dict):
if self._has_new_keys(current[key], value):
return True
return False
def _merge_template_defaults(self, current: Dict[str, Any], template: Dict[str, Any]) -> None:
"""Recursively merge template defaults into current config."""
for key, value in template.items():
if key not in current:
# Add new key with template value
current[key] = value
print(f"Added new config key: {key}")
elif isinstance(value, dict) and isinstance(current[key], dict):
# Recursively merge nested dictionaries
self._merge_template_defaults(current[key], value)
def get_timezone(self) -> str:
"""Get the configured timezone."""
return self.config.get('timezone', 'UTC')
def get_display_config(self) -> Dict[str, Any]:
"""Get display configuration."""
return self.config.get('display', {})
def get_clock_config(self) -> Dict[str, Any]:
"""Get clock configuration."""
return self.config.get('clock', {})
def get_raw_file_content(self, file_type: str) -> Dict[str, Any]:
"""Load raw content of 'main' config or 'secrets' config file."""
path_to_load = ""
if file_type == "main":
path_to_load = self.config_path
elif file_type == "secrets":
path_to_load = self.secrets_path
else:
raise ValueError("Invalid file_type specified. Must be 'main' or 'secrets'.")
if not os.path.exists(path_to_load):
# If a secrets file doesn't exist, it's not an error, just return empty
if file_type == "secrets":
return {}
print(f"{file_type.capitalize()} configuration file not found at {os.path.abspath(path_to_load)}")
raise FileNotFoundError(f"{file_type.capitalize()} configuration file not found at {os.path.abspath(path_to_load)}")
try:
with open(path_to_load, 'r') as f:
return json.load(f)
except json.JSONDecodeError:
print(f"Error parsing {file_type} configuration file: {path_to_load}")
raise
except Exception as e:
print(f"Error loading {file_type} configuration file {path_to_load}: {str(e)}")
raise
def save_raw_file_content(self, file_type: str, data: Dict[str, Any]) -> None:
"""Save data directly to 'main' config or 'secrets' config file."""
path_to_save = ""
if file_type == "main":
path_to_save = self.config_path
elif file_type == "secrets":
path_to_save = self.secrets_path
else:
raise ValueError("Invalid file_type specified. Must be 'main' or 'secrets'.")
try:
# Create directory if it doesn't exist, especially for config/
os.makedirs(os.path.dirname(path_to_save), exist_ok=True)
with open(path_to_save, 'w') as f:
json.dump(data, f, indent=4)
print(f"{file_type.capitalize()} configuration successfully saved to {os.path.abspath(path_to_save)}")
# If we just saved the main config or secrets, the merged self.config might be stale.
# Reload it to reflect the new state.
if file_type == "main" or file_type == "secrets":
self.load_config()
except IOError as e:
print(f"Error writing {file_type} configuration to file {os.path.abspath(path_to_save)}: {e}")
raise
except Exception as e:
print(f"An unexpected error occurred while saving {file_type} configuration: {str(e)}")
raise

287
src/display_controller.py Normal file
View File

@@ -0,0 +1,287 @@
import time
import logging
import sys
from pytz import timezone
from datetime import datetime, time as time_obj
# Configure logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s.%(msecs)03d - %(levelname)s:%(name)s:%(message)s',
datefmt='%H:%M:%S',
stream=sys.stdout
)
from src.clock import Clock
from src.display_manager import DisplayManager
from src.config_manager import ConfigManager
from src.cache_manager import CacheManager
from src.eojhl_managers import EOJHLLiveManager, EOJHLRecentManager, EOJHLUpcomingManager
from src.leaderboard_manager import LeaderboardManager
from src.sponsor_manager import EOJHLSponsorsManager
logger = logging.getLogger(__name__)
class DisplayController:
def __init__(self):
start_time = time.time()
logger.info("Starting DisplayController initialization")
# Load config
self.config_manager = ConfigManager()
self.config = self.config_manager.load_config()
self.cache_manager = CacheManager()
logger.info("Config loaded in %.3f seconds", time.time() - start_time)
# Initialize display manager
self.display_manager = DisplayManager(self.config)
logger.info("DisplayManager initialized")
# Initialize clock if enabled
init_time = time.time()
self.clock = Clock(self.display_manager, self.config) if self.config.get('clock', {}).get('enabled', True) else None
logger.info("Display modes initialized in %.3f seconds", time.time() - init_time)
self.is_display_active = True
self._load_schedule_config()
# --- League Managers Dictionary ---
self.league_managers = {}
# EOJHL Managers
if self.config.get('eojhl_scoreboard', {}).get('enabled', False):
eojhl_recent = EOJHLRecentManager(self.config, self.display_manager, self.cache_manager)
eojhl_upcoming = EOJHLUpcomingManager(self.config, self.display_manager, self.cache_manager)
eojhl_live = EOJHLLiveManager(self.config, self.display_manager, self.cache_manager)
self.league_managers["eojhl"] = {
"recent": eojhl_recent,
"upcoming": eojhl_upcoming,
"live": eojhl_live,
"leaderboard": LeaderboardManager(self.config, self.display_manager, eojhl_recent.get_league_data())
}
# TODO: Add CCHL managers in the same pattern when ready
# Sponsors
self.sponsors = EOJHLSponsorsManager(self.config, self.display_manager) if self.config.get('sponsors', {}).get('enabled', False) else None
# Build available modes list from league_order
self.available_modes = []
if self.clock: self.available_modes.append("clock")
for league in sorted(self.config.get("league_order", {}), key=lambda k: self.config["league_order"][k]):
managers = self.league_managers.get(league, {})
# Only add leaderboard if enabled in config
if managers.get("leaderboard") and self.config["leaderboard"]["enabled_sports"].get(league, {}).get("enabled", False):
self.available_modes.append(f"{league}_leaderboard")
if managers.get("recent"):
self.available_modes.append(f"{league}_recent")
if managers.get("upcoming"):
self.available_modes.append(f"{league}_upcoming")
# live is handled separately if you want to prioritize it
if self.sponsors:
self.available_modes.append("sponsors")
if self.config.get("team_info", {}).get("enabled", False):
self.available_modes.append("team_info")
self.current_mode_index = 0
self.last_switch = time.time()
self.force_clear = True
self.update_interval = 0.01
logger.info(f"DisplayController initialized with modes: {self.available_modes}")
# --- SCHEDULING ---
self.is_display_active = True
self._load_schedule_config() # Load schedule config once at startup
def run_transition(self, when="before"):
"""Run transition effect either before or after a mode."""
tcfg = self.config.get("transitions", {})
ttype = tcfg.get("type", "fade")
if ttype == "fade":
dur = tcfg.get("fade_duration", 0.4)
steps = tcfg.get("fade_steps", 6)
easing = tcfg.get("easing", True)
if when == "before":
self.display_manager.fade_in(duration=dur, steps=steps, easing=easing)
else:
self.display_manager.fade_out(duration=dur, steps=steps, easing=easing)
elif ttype == "cut":
dur = tcfg.get("cut_duration", 0.2)
if when == "before":
# nothing to fade in, just show the mode
return
else:
self.display_manager.cut_to_black(duration=dur)
def _load_schedule_config(self):
"""Load schedule configuration once at startup."""
schedule_config = self.config.get('schedule', {})
self.schedule_enabled = schedule_config.get('enabled', False)
try:
self.start_time = datetime.strptime(schedule_config.get('start_time', '07:00'), '%H:%M').time()
self.end_time = datetime.strptime(schedule_config.get('end_time', '22:00'), '%H:%M').time()
logger.info(f"Schedule loaded: enabled={self.schedule_enabled}, start={self.start_time}, end={self.end_time}")
except (ValueError, TypeError):
logger.warning("Invalid time format in schedule config. Using defaults.")
self.start_time = time_obj(7, 0)
self.end_time = time_obj(22, 0)
def _check_schedule(self):
"""Check if the display should be active based on the schedule."""
if not self.schedule_enabled:
if not self.is_display_active:
logger.info("Schedule is disabled. Activating display.")
self.is_display_active = True
return
local_tz = timezone(self.config.get("timezone", "America/Toronto"))
now_time = datetime.now(local_tz).time()
if self.start_time <= self.end_time:
should_be_active = self.start_time <= now_time < self.end_time
else:
should_be_active = now_time >= self.start_time or now_time < self.end_time
if should_be_active and not self.is_display_active:
logger.info("Within scheduled time. Activating display.")
self.is_display_active = True
self.force_clear = True
elif not should_be_active and self.is_display_active:
logger.info("Outside of scheduled time. Deactivating display.")
self.display_manager.clear()
self.is_display_active = False
def main():
controller = DisplayController()
logger.info("Entering main display loop")
try:
while True:
controller._check_schedule()
if not controller.is_display_active:
controller.display_manager.clear()
time.sleep(5)
continue
if not controller.available_modes:
logger.warning("No display modes available")
time.sleep(5)
continue
# Check for live games with priority for each league
live_game_shown = False
for league in controller.league_managers:
league_config = controller.config.get(f"{league}_scoreboard", {})
if league_config.get("live_priority", False):
live_mgr = controller.league_managers[league].get("live")
if live_mgr:
# Update live manager to check for live games
live_mgr.update()
if live_mgr.live_games:
logger.info(f"Live {league.upper()} games detected with priority - displaying live games")
live_update_interval = league_config.get("live_update_interval", 30)
# Display live games continuously until they end
while live_mgr.live_games:
controller._check_schedule()
if not controller.is_display_active:
break
live_mgr.display()
time.sleep(1) # Small sleep for display refresh
# Check if it's time to update live data
current_time = time.time()
if current_time - live_mgr.last_update >= live_update_interval:
live_mgr.update()
if not live_mgr.live_games:
logger.info(f"No more live {league.upper()} games - returning to normal rotation")
break
live_game_shown = True
break # Exit league loop after showing live games
if live_game_shown:
continue # Skip normal rotation and check for live games again
mode = controller.available_modes[controller.current_mode_index]
duration = controller.config.get("display", {}).get("display_durations", {}).get(mode.split("_")[-1], 10)
logger.info(f"Displaying mode: {mode} for {duration} seconds")
# Fade out current mode
controller.run_transition("before")
if mode == "clock" and controller.clock:
controller.clock.display_time()
time.sleep(duration)
elif mode.endswith("_leaderboard"):
league = mode.split("_")[0]
controller.league_managers[league]["leaderboard"].display()
elif mode.endswith("_recent"):
league = mode.split("_")[0]
mgr = controller.league_managers[league]["recent"]
mgr.update()
start = time.time()
while time.time() - start < duration * mgr.recent_games_to_show:
mgr.display()
time.sleep(1)
elif mode.endswith("_upcoming"):
league = mode.split("_")[0]
mgr = controller.league_managers[league]["upcoming"]
mgr.update()
start = time.time()
while time.time() - start < duration * mgr.upcoming_games_to_show:
mgr.display()
time.sleep(1)
elif mode == "sponsors" and controller.sponsors:
# Get batch of sponsors to display
sponsors_to_show = controller.sponsors.get_next_sponsors()
if sponsors_to_show:
cfg = controller.config.get("sponsors", {})
# Show title slide once at the beginning
controller.sponsors.render_title()
time.sleep(cfg.get("title_duration", 5))
# Loop through each sponsor in the batch
for sponsor in sponsors_to_show:
# Show logo
controller.sponsors.render_logo(sponsor)
time.sleep(cfg.get("logo_duration", 10))
# Show details if enabled
details_dur = controller.sponsors.render_details(sponsor)
if details_dur > 0:
time.sleep(details_dur)
elif mode == "team_info":
cfg = controller.config.get("team_info", {})
controller.display_manager.render_team_info(cfg)
#time.sleep(cfg.get("slide3_duration", 10))
controller.run_transition("after")
# Move to next mode
controller.current_mode_index = (controller.current_mode_index + 1) % len(controller.available_modes)
except KeyboardInterrupt:
logger.info("Shutting down display loop gracefully")
if __name__ == "__main__":
main()

1012
src/display_manager.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,239 @@
#!/usr/bin/env python3
"""
Dynamic Team Resolver for LEDMatrix
This module provides functionality to resolve dynamic team names like "AP_TOP_25"
into actual team abbreviations that update automatically with rankings.
Supported dynamic teams:
- AP_TOP_25: Resolves to current AP Top 25 teams for NCAA Football
- AP_TOP_10: Resolves to current AP Top 10 teams for NCAA Football
- AP_TOP_5: Resolves to current AP Top 5 teams for NCAA Football
Usage:
resolver = DynamicTeamResolver()
resolved_teams = resolver.resolve_teams(["UGA", "AP_TOP_25", "AUB"])
# Returns: ["UGA", "UGA", "AUB", "MICH", "OSU", ...] (AP_TOP_25 teams)
"""
import logging
import time
import requests
from typing import Dict, List, Set, Optional, Any
from datetime import datetime, timezone
logger = logging.getLogger(__name__)
class DynamicTeamResolver:
"""
Resolves dynamic team names to actual team abbreviations.
This class handles special team names that represent dynamic groups
like AP Top 25 rankings, which update automatically.
"""
# Cache for rankings data
_rankings_cache: Dict[str, List[str]] = {}
_cache_timestamp: float = 0
_cache_duration: int = 3600 # 1 hour cache
# Supported dynamic team patterns
DYNAMIC_PATTERNS = {
'AP_TOP_25': {'sport': 'ncaa_fb', 'limit': 25},
'AP_TOP_10': {'sport': 'ncaa_fb', 'limit': 10},
'AP_TOP_5': {'sport': 'ncaa_fb', 'limit': 5},
}
def __init__(self, request_timeout: int = 30):
"""Initialize the dynamic team resolver."""
self.request_timeout = request_timeout
self.logger = logger
def resolve_teams(self, team_list: List[str], sport: str = 'ncaa_fb') -> List[str]:
"""
Resolve a list of team names, expanding dynamic team names.
Args:
team_list: List of team names (can include dynamic names like "AP_TOP_25")
sport: Sport type for context (default: 'ncaa_fb')
Returns:
List of resolved team abbreviations
"""
if not team_list:
return []
resolved_teams = []
for team in team_list:
if team in self.DYNAMIC_PATTERNS:
# Resolve dynamic team
dynamic_teams = self._resolve_dynamic_team(team, sport)
resolved_teams.extend(dynamic_teams)
self.logger.info(f"Resolved {team} to {len(dynamic_teams)} teams: {dynamic_teams[:5]}{'...' if len(dynamic_teams) > 5 else ''}")
elif self._is_potential_dynamic_team(team):
# Unknown dynamic team, skip it
self.logger.warning(f"Unknown dynamic team '{team}' - skipping")
else:
# Regular team name, add as-is
resolved_teams.append(team)
# Remove duplicates while preserving order
seen = set()
unique_teams = []
for team in resolved_teams:
if team not in seen:
seen.add(team)
unique_teams.append(team)
return unique_teams
def _resolve_dynamic_team(self, dynamic_team: str, sport: str) -> List[str]:
"""
Resolve a dynamic team name to actual team abbreviations.
Args:
dynamic_team: Dynamic team name (e.g., "AP_TOP_25")
sport: Sport type for context
Returns:
List of team abbreviations
"""
if dynamic_team not in self.DYNAMIC_PATTERNS:
self.logger.warning(f"Unknown dynamic team: {dynamic_team}")
return []
pattern_config = self.DYNAMIC_PATTERNS[dynamic_team]
target_sport = pattern_config['sport']
limit = pattern_config['limit']
# Only support NCAA Football rankings for now
if target_sport != 'ncaa_fb':
self.logger.warning(f"Dynamic team {dynamic_team} not supported for sport {sport}")
return []
# Fetch current rankings
rankings = self._fetch_ncaa_fb_rankings()
if not rankings:
self.logger.warning(f"Could not fetch rankings for {dynamic_team}")
return []
# Get top N teams
top_teams = list(rankings.keys())[:limit]
self.logger.info(f"Resolved {dynamic_team} to top {len(top_teams)} teams: {top_teams}")
return top_teams
def _fetch_ncaa_fb_rankings(self) -> Dict[str, int]:
"""
Fetch current NCAA Football rankings from ESPN API.
Returns:
Dictionary mapping team abbreviations to rankings
"""
current_time = time.time()
# Check cache first
if (self._rankings_cache and
current_time - self._cache_timestamp < self._cache_duration):
return self._rankings_cache
try:
self.logger.info("Fetching fresh NCAA Football rankings from ESPN API")
rankings_url = "https://site.api.espn.com/apis/site/v2/sports/football/college-football/rankings"
response = requests.get(rankings_url, timeout=self.request_timeout)
response.raise_for_status()
data = response.json()
rankings = {}
rankings_data = data.get('rankings', [])
if rankings_data:
# Use the first ranking (usually AP Top 25)
first_ranking = rankings_data[0]
ranking_name = first_ranking.get('name', 'Unknown')
teams = first_ranking.get('ranks', [])
self.logger.info(f"Using ranking: {ranking_name}")
self.logger.info(f"Found {len(teams)} teams in ranking")
for team_data in teams:
team_info = team_data.get('team', {})
team_abbr = team_info.get('abbreviation', '')
current_rank = team_data.get('current', 0)
if team_abbr and current_rank > 0:
rankings[team_abbr] = current_rank
# Sort by ranking (1, 2, 3, etc.)
sorted_rankings = dict(sorted(rankings.items(), key=lambda x: x[1]))
# Cache the results
self._rankings_cache = sorted_rankings
self._cache_timestamp = current_time
self.logger.info(f"Fetched rankings for {len(sorted_rankings)} teams")
return sorted_rankings
except Exception as e:
self.logger.error(f"Error fetching NCAA Football rankings: {e}")
return {}
def get_available_dynamic_teams(self) -> List[str]:
"""
Get list of available dynamic team names.
Returns:
List of supported dynamic team names
"""
return list(self.DYNAMIC_PATTERNS.keys())
def is_dynamic_team(self, team_name: str) -> bool:
"""
Check if a team name is a dynamic team.
Args:
team_name: Team name to check
Returns:
True if the team name is dynamic
"""
return team_name in self.DYNAMIC_PATTERNS
def _is_potential_dynamic_team(self, team_name: str) -> bool:
"""
Check if a team name looks like it might be a dynamic team but isn't recognized.
Args:
team_name: Team name to check
Returns:
True if the team name looks like a dynamic team pattern
"""
# Check for common dynamic team patterns
dynamic_patterns = ['AP_TOP_', 'TOP_', 'RANKED_', 'PLAYOFF_']
return any(pattern in team_name.upper() for pattern in dynamic_patterns)
def clear_cache(self):
"""Clear the rankings cache to force fresh data on next request."""
self._rankings_cache = {}
self._cache_timestamp = 0
self.logger.info("Cleared dynamic team rankings cache")
# Convenience function for easy integration
def resolve_dynamic_teams(team_list: List[str], sport: str = 'ncaa_fb') -> List[str]:
"""
Convenience function to resolve dynamic teams in a team list.
Args:
team_list: List of team names (can include dynamic names)
sport: Sport type for context
Returns:
List of resolved team abbreviations
"""
resolver = DynamicTeamResolver()
return resolver.resolve_teams(team_list, sport)

568
src/eojhl_managers.py Normal file
View File

@@ -0,0 +1,568 @@
# src/eojhl_managers.py
import logging
from typing import Any, Dict, Optional, List
import os
import json
import time
import re
import requests
from datetime import datetime, timezone, date
from pathlib import Path
from src.base_classes.hockey import Hockey, HockeyLive
from src.base_classes.sports import SportsRecent, SportsUpcoming
from src.cache_manager import CacheManager
from src.display_manager import DisplayManager
# Import the API counter function from web interface
try:
from web_interface_v2 import increment_api_counter
except ImportError:
# Fallback if web interface is not available
def increment_api_counter(kind: str, count: int = 1):
pass
LOCAL_EOJHL_FILE = "data/eojhl_scoreboard.json"
LOCAL_EOJHL_STANDINGS = "data/eojhl_standings.json"
class BaseEOJHLManager(Hockey):
"""Base class for EOJHL managers; renders scoreboard and supplies leaderboard data."""
def __init__(self, config: Dict[str, Any], display_manager: DisplayManager, cache_manager: CacheManager):
self.logger = logging.getLogger('EOJHL')
super().__init__(config=config, display_manager=display_manager, cache_manager=cache_manager,
logger=self.logger, sport_key="eojhl")
self.league = "eojhl"
# EOJHL specifics used for leaderboard data
self.conferences: List[str] = ["martin", "richardson"]
self.league_logo_path: str = "assets/sports/eojhl_logos/EOJHL.png"
self._last_standings_read: float = 0.0
self._cached_standings: Optional[Dict[str, List[Dict[str, Any]]]] = None
# ---------- Leaderboard data providers ----------
def standings_provider(self) -> Dict[str, List[Dict[str, Any]]]:
"""
Read local standings JSON and return {"richardson":[...], "martin":[...]}.
The LeaderboardManager will call this at most once per hour (via its update_interval).
"""
try:
with open(LOCAL_EOJHL_STANDINGS, "r") as f:
data = json.load(f)
return {
"martin": data.get("martin", []),
"richardson": data.get("richardson", []),
}
except Exception as e:
self.logger.error(f"Failed to load EOJHL standings JSON: {e}")
return {"martin": [], "richardson": []}
def get_league_data(self) -> Dict[str, Any]:
"""
Return all static and dynamic data the generic LeaderboardManager needs.
"""
return {
"conferences": self.conferences,
"league_logo": self.league_logo_path,
"standings_provider": self.standings_provider,
}
# ---------- Scoreboard methods ----------
def _fetch_todays_games(self) -> Optional[Dict]:
"""Fetch today's games for live updates using JSONP API."""
today = date.today().strftime("%Y-%m-%d")
url = (
"https://lscluster.hockeytech.com/feed/index.php"
f"?feed=statviewfeed&view=schedule_day&date={today}"
"&site_id=2&key=1defb601c9b37c24&client_code=eojhl"
"&league_id=2&season_id=110&conference_id=undefined"
"&division_id=-1&team=-1&lang=en&forceDate=true"
"&useSeason=false&allLeagues=0&callback=angular.callbacks._2"
)
try:
self.logger.info(f"Fetching EOJHL live games for {today}")
increment_api_counter("eojhl")
response = self.session.get(url, headers=self.headers, timeout=10)
response.raise_for_status()
raw = response.text
# Strip JSONP wrapper (angular.callbacks._2(...))
if "angular.callbacks" in raw:
match = re.search(r'angular\.callbacks\._\d+\((.+)\)\s*$', raw, re.DOTALL)
if match:
raw = match.group(1)
else:
# Fallback: find first '(' and last ')'
raw = raw[raw.find("(")+1 : raw.rfind(")")]
data = json.loads(raw)
# Log the top-level structure for debugging
if isinstance(data, dict):
self.logger.info(f"EOJHL API response keys: {list(data.keys())}")
elif isinstance(data, list):
self.logger.info(f"EOJHL API response is a list with {len(data)} games")
# Convert to ESPN-like format for compatibility with base classes
games = []
if isinstance(data, list):
# Direct list of games
games = data
elif isinstance(data, dict):
# Try common HockeyTech API structures
if "SiteKit" in data:
site_kit = data["SiteKit"]
if isinstance(site_kit, dict):
games = site_kit.get("Gamesbydate", [])
elif isinstance(site_kit, list):
games = site_kit
elif "Gamesbydate" in data:
games = data["Gamesbydate"]
elif "games" in data:
games = data["games"]
if not games:
self.logger.info(f"No EOJHL games found for {today}")
return {"events": []}
events = []
for game in games:
event = self._convert_eojhl_to_espn_format(game)
if event:
events.append(event)
self.logger.info(f"Successfully fetched {len(events)} EOJHL games")
return {"events": events}
except requests.exceptions.RequestException as e:
self.logger.error(f"API error fetching EOJHL games: {e}")
return None
except json.JSONDecodeError as e:
self.logger.error(f"JSON decode error for EOJHL data: {e}")
return None
except Exception as e:
self.logger.error(f"Unexpected error fetching EOJHL games: {e}", exc_info=True)
return None
def _convert_eojhl_to_espn_format(self, eojhl_game: Dict) -> Optional[Dict]:
"""Convert EOJHL HockeyTech API format to ESPN-like format for base class compatibility."""
try:
# Debug: Log first game structure
if not hasattr(self, '_logged_game_structure'):
self.logger.info(f"Sample game keys: {list(eojhl_game.keys())}")
home_team_data_temp = eojhl_game.get("homeTeam", {})
if isinstance(home_team_data_temp, dict):
self.logger.info(f"homeTeam keys: {list(home_team_data_temp.keys())}")
visiting_team_data_temp = eojhl_game.get("visitingTeam", {})
if isinstance(visiting_team_data_temp, dict):
self.logger.info(f"visitingTeam keys: {list(visiting_team_data_temp.keys())}")
self._logged_game_structure = True
# Parse HockeyTech format
game_id = str(eojhl_game.get("id", ""))
date_played = eojhl_game.get("date", "")
# Extract team data
home_team_data = eojhl_game.get("homeTeam", {})
visiting_team_data = eojhl_game.get("visitingTeam", {})
# Team info is nested in 'info'
home_info = home_team_data.get("info", {})
visiting_info = visiting_team_data.get("info", {})
# Team abbreviations
home_abbr = home_info.get("abbr", home_info.get("abbreviation", ""))
visiting_abbr = visiting_info.get("abbr", visiting_info.get("abbreviation", ""))
# Get shots and goals from stats
home_stats = home_team_data.get("stats", {})
visiting_stats = visiting_team_data.get("stats", {})
# Parse shots as integers (they come as strings in the API)
try:
home_shots = int(home_stats.get("shots", "0"))
except (ValueError, TypeError):
home_shots = 0
try:
visiting_shots = int(visiting_stats.get("shots", "0"))
except (ValueError, TypeError):
visiting_shots = 0
# Parse goals as integers
try:
home_goals = int(home_stats.get("goals", "0"))
except (ValueError, TypeError):
home_goals = 0
try:
visiting_goals = int(visiting_stats.get("goals", "0"))
except (ValueError, TypeError):
visiting_goals = 0
# Records: wins-losses-(OTLosses+SOLosses) from seasonStats.teamRecord
home_season_stats = home_team_data.get("seasonStats", {})
home_record_data = home_season_stats.get("teamRecord", {})
home_wins = home_record_data.get("wins", 0)
home_losses = home_record_data.get("losses", 0)
home_ot = home_record_data.get("OTLosses", 0) + home_record_data.get("SOLosses", 0)
home_record = f"{home_wins}-{home_losses}-{home_ot}"
visiting_season_stats = visiting_team_data.get("seasonStats", {})
visiting_record_data = visiting_season_stats.get("teamRecord", {})
visiting_wins = visiting_record_data.get("wins", 0)
visiting_losses = visiting_record_data.get("losses", 0)
visiting_ot = visiting_record_data.get("OTLosses", 0) + visiting_record_data.get("SOLosses", 0)
visiting_record = f"{visiting_wins}-{visiting_losses}-{visiting_ot}"
# Parse game status for clock and period
status_str = eojhl_game.get("status", "")
game_clock = ""
period_text = ""
if status_str:
# Split at space: "02:41 3rd" -> ["02:41", "3rd"]
parts = status_str.split(" ", 1)
if len(parts) == 2:
game_clock = parts[0] # "02:41"
period_text = parts[1] # "3rd"
elif len(parts) == 1:
# Status might be "Final", "Final/OT", etc.
period_text = parts[0]
game_clock = ""
# Extract period number for internal use
periods_data = eojhl_game.get("periods", [])
period_num = 0
if isinstance(periods_data, list) and len(periods_data) > 0:
# Get the last period (current period)
last_period = periods_data[-1]
if isinstance(last_period, dict):
period_info = last_period.get("info", {})
period_short_name = period_info.get("shortName", "0")
try:
period_num = int(period_short_name)
except (ValueError, TypeError):
period_num = 0
# Determine game state using started and final flags
started = str(eojhl_game.get("started", "0"))
final = str(eojhl_game.get("final", "0"))
is_final = final == "1"
is_live = started == "1" and final == "0"
is_upcoming = started == "0" and final == "0"
# Build ESPN-like event structure
event = {
"id": game_id,
"date": date_played,
"competitions": [{
"id": game_id,
"status": {
"type": {
"name": "STATUS_IN_PROGRESS" if is_live else ("STATUS_FINAL" if is_final else "STATUS_SCHEDULED"),
"state": "in" if is_live else ("post" if is_final else "pre"),
"shortDetail": status_str if is_live else ("Final" if is_final else date_played)
},
"period": period_num,
"displayClock": game_clock if is_live else "",
"customPeriodText": period_text
},
"competitors": [
{
"id": str(home_info.get("id", "")),
"homeAway": "home",
"team": {
"id": str(home_info.get("id", "")),
"abbreviation": home_abbr,
"name": home_info.get("name", home_abbr),
"logo": ""
},
"score": str(home_goals),
"records": [{"summary": home_record}],
"statistics": [
{"name": "shots", "displayValue": str(home_shots)},
{"name": "saves", "displayValue": "0"},
{"name": "savePct", "displayValue": "0.0"}
]
},
{
"id": str(visiting_info.get("id", "")),
"homeAway": "away",
"team": {
"id": str(visiting_info.get("id", "")),
"abbreviation": visiting_abbr,
"name": visiting_info.get("name", visiting_abbr),
"logo": ""
},
"score": str(visiting_goals),
"records": [{"summary": visiting_record}],
"statistics": [
{"name": "shots", "displayValue": str(visiting_shots)},
{"name": "saves", "displayValue": "0"},
{"name": "savePct", "displayValue": "0.0"}
]
}
],
"situation": {
"isPowerPlay": False,
"penalties": ""
}
}]
}
return event
except Exception as e:
self.logger.error(f"Error converting EOJHL game format: {e}", exc_info=True)
return None
def _fetch_local_data(self):
try:
with open(LOCAL_EOJHL_FILE, "r") as f:
return json.load(f)
except Exception as e:
self.logger.error(f"Failed to load local EOJHL scoreboard JSON: {e}")
return None
def _fetch_data(self, date_str: str = None) -> Optional[Dict]:
"""Fetch data - live managers use API, others use local files."""
if isinstance(self, EOJHLLiveManager):
# Live games should fetch from API
return self._fetch_todays_games()
else:
# Recent and Upcoming managers use local data
return self._fetch_local_data()
def _load_standings_records(self):
"""Load standings file and return dict {abbreviation: record}."""
try:
with open(LOCAL_EOJHL_STANDINGS, "r") as f:
standings = json.load(f)
lookup = {}
for div in ("martin", "richardson"):
for team in standings.get(div, []):
abbr = team.get("abbreviation")
rec = team.get("record")
if abbr and rec:
lookup[abbr] = rec
return lookup
except Exception as e:
self.logger.warning(f"Could not load standings records: {e}")
return {}
def _extract_game_details(self, game_event: Dict) -> Optional[Dict]:
"""Extract game details with hockey-specific fields for EOJHL."""
# First get common details
details, home_team, away_team, status, situation = (
self._extract_game_details_common(game_event)
)
if not details:
return None
try:
if not hasattr(self, "_standings_lookup"):
self._standings_lookup = self._load_standings_records()
home_abbr = home_team["team"]["abbreviation"]
away_abbr = away_team["team"]["abbreviation"]
# Get shots data
home_shots = 0
away_shots = 0
try:
home_shots = next(
(int(c["displayValue"]) for c in home_team["statistics"] if c.get("name") == "shots"),
0
)
away_shots = next(
(int(c["displayValue"]) for c in away_team["statistics"] if c.get("name") == "shots"),
0
)
except (ValueError, KeyError, StopIteration):
pass
# Get period and clock info
period = status.get("period", 0)
display_clock = status.get("displayClock", "0:00")
# Check for custom period text (used by EOJHL)
period_text = status.get("customPeriodText", "")
if not period_text:
# Default period text formatting
if status["type"]["state"] == "in":
if period == 0:
period_text = "Start"
elif period >= 1 and period <= 3:
period_text = f"P{period}"
elif period > 3:
period_text = f"OT{period - 3}"
elif status["type"]["state"] == "post":
if period > 3:
period_text = "FINAL/OT"
elif period > 4:
period_text = "FINAL/SO"
else:
period_text = "FINAL"
elif status["type"]["state"] == "pre":
period_text = details.get("game_time", "")
details.update({
"home_score": home_team.get("score", "0"),
"away_score": away_team.get("score", "0"),
"home_logo_path": self.logo_dir / f"{home_abbr}.png",
"away_logo_path": self.logo_dir / f"{away_abbr}.png",
"status_text": status["type"]["shortDetail"],
"home_record": self._standings_lookup.get(home_abbr, "0-0-0"),
"away_record": self._standings_lookup.get(away_abbr, "0-0-0"),
"period": period,
"period_text": period_text,
"clock": display_clock,
"power_play": False,
"penalties": "",
"home_shots": home_shots,
"away_shots": away_shots,
})
return details
except Exception as e:
self.logger.error(f"EOJHL extract error: {e}", exc_info=True)
return None
class EOJHLLiveManager(BaseEOJHLManager, HockeyLive):
"""Manager for live EOJHL games."""
def __init__(self, config: Dict[str, Any], display_manager: DisplayManager, cache_manager: CacheManager):
super().__init__(config, display_manager, cache_manager)
self.logger = logging.getLogger('EOJHLLiveManager')
# Initialize with test game only if test mode is enabled
if self.test_mode:
# Load test data from example_live.jsonp
try:
test_file = Path("data/example_live.jsonp")
with open(test_file, "r") as f:
raw = f.read()
# Strip JSONP wrapper
if "angular.callbacks" in raw:
match = re.search(r'angular\.callbacks\._\d+\((.+)\)\s*$', raw, re.DOTALL)
if match:
raw = match.group(1)
else:
raw = raw[raw.find("(")+1 : raw.rfind(")")]
test_games = json.loads(raw)
# Convert first game to ESPN format and extract details
if test_games and len(test_games) > 0:
first_game = test_games[0]
event = self._convert_eojhl_to_espn_format(first_game)
if event:
self.current_game = self._extract_game_details(event)
if self.current_game:
self.live_games = [self.current_game]
home_abbr = self.current_game.get('home_abbr', 'UNK')
away_abbr = self.current_game.get('away_abbr', 'UNK')
self.logger.info(f"Initialized EOJHLLiveManager with test game from example_live.jsonp: {away_abbr} @ {home_abbr}")
else:
self.logger.error("Failed to extract game details from test data")
self.current_game = None
self.live_games = []
else:
self.logger.error("Failed to convert test game to ESPN format")
self.current_game = None
self.live_games = []
else:
self.logger.error("No games found in example_live.jsonp")
self.current_game = None
self.live_games = []
except FileNotFoundError:
self.logger.error("Test file data/example_live.jsonp not found")
self.current_game = None
self.live_games = []
except Exception as e:
self.logger.error(f"Error loading test data: {e}", exc_info=True)
self.current_game = None
self.live_games = []
else:
self.logger.info("Initialized EOJHLLiveManager in live mode")
class EOJHLRecentManager(BaseEOJHLManager, SportsRecent):
def __init__(self, config, display_manager, cache_manager):
super().__init__(config, display_manager, cache_manager)
self.logger = logging.getLogger('EOJHLRecentManager')
# Load recent game title settings
self.status_titles = self.mode_config.get("status_titles", {
"RECENT_GAME": "LAST GAME"
})
self.title_colors = self.mode_config.get("title_colors", {
"RECENT_GAME": [0, 51, 204]
})
class EOJHLUpcomingManager(BaseEOJHLManager, SportsUpcoming):
def __init__(self, config, display_manager, cache_manager):
super().__init__(config, display_manager, cache_manager)
self.logger = logging.getLogger('EOJHLUpcomingManager')
# Load home/away settings
self.show_home_away = self.mode_config.get("show_home_away", False)
self.status_titles = self.mode_config.get("status_titles", {
"GAME_DAY": "GAME DAY",
"HOME_GAME": "NEXT HOME GAME",
"AWAY_GAME": "NEXT AWAY GAME"
})
self.title_colors = self.mode_config.get("title_colors", {
"GAME_DAY": [255, 255, 0],
"HOME_GAME": [0, 200, 255],
"AWAY_GAME": [0, 255, 128]
})
def _fetch_data(self, date_str: str = None):
data = super()._fetch_data(date_str)
if not data:
return None
now = datetime.now(timezone.utc)
future_events = []
for e in data.get("events", []):
try:
dt = datetime.strptime(e["date"], "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=timezone.utc)
if dt >= now:
future_events.append((dt, e))
except Exception as ex:
self.logger.warning(f"Skipping event with bad date: {ex}")
future_events.sort(key=lambda x: x[0])
data["events"] = [e for _, e in future_events]
return data
def _get_game_status_type(self, game: Dict, favorite_team: str) -> str:
"""Determine if game is GAME_DAY, HOME_GAME, or AWAY_GAME."""
try:
game_date = game.get('start_time_utc')
if not game_date:
return "AWAY_GAME"
now = datetime.now(timezone.utc)
today_start = now.replace(hour=0, minute=0, second=0, microsecond=0)
today_end = now.replace(hour=23, minute=59, second=59, microsecond=999999)
# Check if game is today
if today_start <= game_date <= today_end:
return "GAME_DAY"
# Check if it's a home or away game
if game.get('home_abbr') == favorite_team:
return "HOME_GAME"
else:
return "AWAY_GAME"
except Exception as e:
self.logger.error(f"Error determining game status type: {e}")
return "AWAY_GAME"

141
src/font_test_manager.py Normal file
View File

@@ -0,0 +1,141 @@
import os
import time
import freetype
from PIL import Image, ImageDraw, ImageFont
import logging
from typing import Dict, Any
from src.display_manager import DisplayManager
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class FontTestManager:
"""Manager for testing fonts with easy BDF/TTF switching."""
def __init__(self, config: Dict[str, Any], display_manager: DisplayManager):
self.display_manager = display_manager
self.config = config
self.logger = logging.getLogger('FontTest')
# FONT CONFIGURATION - EASY SWITCHING
# Set to 'bdf' or 'ttf' to switch font types
self.font_type = 'bdf' # Change this to 'ttf' to use TTF font
# Font configurations
self.font_configs = {
'bdf': {
'path': "assets/fonts/cozette.bdf",
'display_name': "Cozette BTF",
'description': "BTF font Test"
},
'ttf': {
'path': "assets/fonts/5by7.regular.ttf",
'display_name': "5by7 TTF",
'description': "TTF font test"
}
}
# Get current font configuration
self.current_config = self.font_configs[self.font_type]
self.font_path = self.current_config['path']
# Verify font exists
if not os.path.exists(self.font_path):
self.logger.error(f"Font file not found: {self.font_path}")
raise FileNotFoundError(f"Font file not found: {self.font_path}")
# Load the font based on type
if self.font_type == 'bdf':
self._load_bdf_font()
else:
self._load_ttf_font()
self.logger.info(f"Initialized FontTestManager with {self.current_config['description']}")
def _load_bdf_font(self):
"""Load BDF font using freetype."""
try:
self.face = freetype.Face(self.font_path)
self.logger.info(f"Successfully loaded BDF font from {self.font_path}")
except Exception as e:
self.logger.error(f"Failed to load BDF font: {e}")
raise
def _load_ttf_font(self):
"""Load TTF font using PIL."""
try:
self.font = ImageFont.truetype(self.font_path, 8) # Size 8 for 5x7 font
self.logger.info(f"Successfully loaded TTF font from {self.font_path}")
except Exception as e:
self.logger.error(f"Failed to load TTF font: {e}")
raise
def update(self):
"""No update needed for static display."""
pass
def display(self, force_clear: bool = False):
"""Display the font with sample text."""
try:
# Clear the display
self.display_manager.clear()
# Get display dimensions
width = self.display_manager.matrix.width
height = self.display_manager.matrix.height
# Draw font name at the top
self.display_manager.draw_text(self.current_config['display_name'], y=2, color=(255, 255, 255))
# Draw sample text
draw = ImageDraw.Draw(self.display_manager.image)
sample_text = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
# Calculate starting position
x = 10 # Start 10 pixels from the left
y = 10 # Start 10 pixels from the top
# Draw text based on font type
if self.font_type == 'bdf':
self._draw_bdf_text(draw, sample_text, x, y)
else:
self._draw_ttf_text(draw, sample_text, x, y)
# Update the display once
self.display_manager.update_display()
# Log that display is complete
self.logger.info("Font test display complete.")
except Exception as e:
self.logger.error(f"Error displaying font test: {e}", exc_info=True)
def _draw_bdf_text(self, draw, text, x, y):
"""Draw text using BDF font."""
for char in text:
# Load the glyph
self.face.load_char(char)
bitmap = self.face.glyph.bitmap
# Draw the glyph
for i in range(bitmap.rows):
for j in range(bitmap.width):
try:
# Get the byte containing the pixel
byte_index = i * bitmap.pitch + (j // 8)
if byte_index < len(bitmap.buffer):
byte = bitmap.buffer[byte_index]
# Check if the specific bit is set
if byte & (1 << (7 - (j % 8))):
draw.point((x + j, y + i), fill=(255, 255, 255))
except IndexError:
self.logger.warning(f"Index out of range for char '{char}' at position ({i}, {j})")
continue
# Move to next character position
x += self.face.glyph.advance.x >> 6
def _draw_ttf_text(self, draw, text, x, y):
"""Draw text using TTF font."""
draw.text((x, y), text, font=self.font, fill=(255, 255, 255))

148
src/generic_cache_mixin.py Normal file
View File

@@ -0,0 +1,148 @@
"""
Generic Cache Mixin for Any Manager
This mixin provides caching functionality that can be used by any manager
that needs to cache data, not just sports managers. It's a more general
version of BackgroundCacheMixin that works for weather, stocks, news, etc.
"""
import time
import logging
from typing import Dict, Optional, Any, Callable
class GenericCacheMixin:
"""
Generic mixin class that provides caching functionality to any manager.
This mixin can be used by weather, stock, news, or any other manager
that needs to cache data with performance monitoring.
"""
def _fetch_data_with_cache(self,
cache_key: str,
api_fetch_method: Callable,
cache_ttl: int = 300,
force_refresh: bool = False) -> Optional[Dict]:
"""
Generic caching pattern for any manager.
Args:
cache_key: Unique cache key for this data
api_fetch_method: Method to call for fresh data
cache_ttl: Time-to-live in seconds (default: 5 minutes)
force_refresh: Skip cache and fetch fresh data
Returns:
Cached or fresh data from API
"""
start_time = time.time()
cache_hit = False
cache_source = None
try:
# Check cache first (unless forcing refresh)
if not force_refresh:
cached_data = self.cache_manager.get_cached_data(cache_key, cache_ttl)
if cached_data:
self.logger.info(f"Using cached data for {cache_key}")
cache_hit = True
cache_source = "cache"
self.cache_manager.record_cache_hit('regular')
# Record performance metrics
duration = time.time() - start_time
self.cache_manager.record_fetch_time(duration)
self._log_fetch_performance(cache_key, duration, cache_hit, cache_source)
return cached_data
# Fetch fresh data
self.logger.info(f"Fetching fresh data for {cache_key}")
result = api_fetch_method()
cache_source = "api_fresh"
# Store in cache if we got data
if result:
self.cache_manager.save_cache(cache_key, result)
self.cache_manager.record_cache_miss('regular')
else:
self.logger.warning(f"No data returned for {cache_key}")
# Record performance metrics
duration = time.time() - start_time
self.cache_manager.record_fetch_time(duration)
# Log performance
self._log_fetch_performance(cache_key, duration, cache_hit, cache_source)
return result
except Exception as e:
duration = time.time() - start_time
self.logger.error(f"Error fetching data for {cache_key} after {duration:.2f}s: {e}")
self.cache_manager.record_fetch_time(duration)
raise
def _log_fetch_performance(self, cache_key: str, duration: float, cache_hit: bool, cache_source: str):
"""
Log detailed performance metrics for fetch operations.
Args:
cache_key: Cache key that was accessed
duration: Fetch operation duration in seconds
cache_hit: Whether this was a cache hit
cache_source: Source of the data (cache, api_fresh, etc.)
"""
# Log basic performance info
self.logger.info(f"Fetch completed for {cache_key} in {duration:.2f}s "
f"(cache_hit={cache_hit}, source={cache_source})")
# Log detailed metrics every 10 operations
if hasattr(self, '_fetch_count'):
self._fetch_count += 1
else:
self._fetch_count = 1
if self._fetch_count % 10 == 0:
metrics = self.cache_manager.get_cache_metrics()
self.logger.info(f"Cache Performance Summary - "
f"Hit Rate: {metrics['cache_hit_rate']:.2%}, "
f"API Calls Saved: {metrics['api_calls_saved']}, "
f"Avg Fetch Time: {metrics['average_fetch_time']:.2f}s")
def get_cache_performance_summary(self) -> Dict[str, Any]:
"""
Get cache performance summary for this manager.
Returns:
Dictionary containing cache performance metrics
"""
return self.cache_manager.get_cache_metrics()
def log_cache_performance(self):
"""Log current cache performance metrics."""
self.cache_manager.log_cache_metrics()
def clear_cache_for_key(self, cache_key: str):
"""Clear cache for a specific key."""
self.cache_manager.clear_cache(cache_key)
self.logger.info(f"Cleared cache for {cache_key}")
def get_cache_info(self, cache_key: str) -> Dict[str, Any]:
"""
Get information about a cached item.
Args:
cache_key: Cache key to check
Returns:
Dictionary with cache information
"""
# This would need to be implemented in CacheManager
# For now, just return basic info
return {
'key': cache_key,
'exists': self.cache_manager.get_cached_data(cache_key, 0) is not None,
'ttl': 'unknown' # Would need to be implemented
}

404
src/layout_manager.py Normal file
View File

@@ -0,0 +1,404 @@
"""
Layout Manager for LED Matrix Display
Handles custom layouts, element positioning, and display composition.
"""
import json
import os
import logging
from typing import Dict, List, Any, Tuple
from datetime import datetime
from PIL import Image, ImageDraw, ImageFont
logger = logging.getLogger(__name__)
class LayoutManager:
def __init__(self, display_manager=None, config_path="config/custom_layouts.json"):
self.display_manager = display_manager
self.config_path = config_path
self.layouts = self.load_layouts()
self.current_layout = None
def load_layouts(self) -> Dict[str, Any]:
"""Load saved layouts from file."""
try:
if os.path.exists(self.config_path):
with open(self.config_path, 'r') as f:
return json.load(f)
return {}
except Exception as e:
logger.error(f"Error loading layouts: {e}")
return {}
def save_layouts(self) -> bool:
"""Save layouts to file."""
try:
os.makedirs(os.path.dirname(self.config_path), exist_ok=True)
with open(self.config_path, 'w') as f:
json.dump(self.layouts, f, indent=2)
return True
except Exception as e:
logger.error(f"Error saving layouts: {e}")
return False
def create_layout(self, name: str, elements: List[Dict], description: str = "") -> bool:
"""Create a new layout."""
try:
self.layouts[name] = {
'elements': elements,
'description': description,
'created': datetime.now().isoformat(),
'modified': datetime.now().isoformat()
}
return self.save_layouts()
except Exception as e:
logger.error(f"Error creating layout '{name}': {e}")
return False
def update_layout(self, name: str, elements: List[Dict], description: str = None) -> bool:
"""Update an existing layout."""
try:
if name not in self.layouts:
return False
self.layouts[name]['elements'] = elements
self.layouts[name]['modified'] = datetime.now().isoformat()
if description is not None:
self.layouts[name]['description'] = description
return self.save_layouts()
except Exception as e:
logger.error(f"Error updating layout '{name}': {e}")
return False
def delete_layout(self, name: str) -> bool:
"""Delete a layout."""
try:
if name in self.layouts:
del self.layouts[name]
return self.save_layouts()
return False
except Exception as e:
logger.error(f"Error deleting layout '{name}': {e}")
return False
def get_layout(self, name: str) -> Dict[str, Any]:
"""Get a specific layout."""
return self.layouts.get(name, {})
def list_layouts(self) -> List[str]:
"""Get list of all layout names."""
return list(self.layouts.keys())
def set_current_layout(self, name: str) -> bool:
"""Set the current active layout."""
if name in self.layouts:
self.current_layout = name
return True
return False
def render_layout(self, layout_name: str = None, data_context: Dict = None) -> bool:
"""Render a layout to the display."""
if not self.display_manager:
logger.error("No display manager available")
return False
layout_name = layout_name or self.current_layout
if not layout_name or layout_name not in self.layouts:
logger.error(f"Layout '{layout_name}' not found")
return False
try:
# Clear the display
self.display_manager.clear()
# Get layout elements
elements = self.layouts[layout_name]['elements']
# Render each element
for element in elements:
self.render_element(element, data_context or {})
# Update the display
self.display_manager.update_display()
return True
except Exception as e:
logger.error(f"Error rendering layout '{layout_name}': {e}")
return False
def render_element(self, element: Dict, data_context: Dict) -> None:
"""Render a single element."""
element_type = element.get('type')
x = element.get('x', 0)
y = element.get('y', 0)
properties = element.get('properties', {})
try:
if element_type == 'text':
self._render_text_element(x, y, properties, data_context)
elif element_type == 'weather_icon':
self._render_weather_icon_element(x, y, properties, data_context)
elif element_type == 'rectangle':
self._render_rectangle_element(x, y, properties)
elif element_type == 'line':
self._render_line_element(x, y, properties)
elif element_type == 'clock':
self._render_clock_element(x, y, properties)
elif element_type == 'data_text':
self._render_data_text_element(x, y, properties, data_context)
else:
logger.warning(f"Unknown element type: {element_type}")
except Exception as e:
logger.error(f"Error rendering element {element_type}: {e}")
def _render_text_element(self, x: int, y: int, properties: Dict, data_context: Dict) -> None:
"""Render a text element."""
text = properties.get('text', 'Sample Text')
color = tuple(properties.get('color', [255, 255, 255]))
font_size = properties.get('font_size', 'normal')
# Support template variables in text
text = self._process_template_text(text, data_context)
# Select font
if font_size == 'small':
font = self.display_manager.small_font
elif font_size == 'large':
font = self.display_manager.regular_font
else:
font = self.display_manager.regular_font
self.display_manager.draw_text(text, x, y, color, font=font)
def _render_weather_icon_element(self, x: int, y: int, properties: Dict, data_context: Dict) -> None:
"""Render a weather icon element."""
condition = properties.get('condition', 'sunny')
size = properties.get('size', 16)
# Use weather data from context if available
if 'weather' in data_context and 'condition' in data_context['weather']:
condition = data_context['weather']['condition'].lower()
self.display_manager.draw_weather_icon(condition, x, y, size)
def _render_rectangle_element(self, x: int, y: int, properties: Dict) -> None:
"""Render a rectangle element."""
width = properties.get('width', 10)
height = properties.get('height', 10)
color = tuple(properties.get('color', [255, 255, 255]))
filled = properties.get('filled', False)
if filled:
self.display_manager.draw.rectangle(
[x, y, x + width, y + height],
fill=color
)
else:
self.display_manager.draw.rectangle(
[x, y, x + width, y + height],
outline=color
)
def _render_line_element(self, x: int, y: int, properties: Dict) -> None:
"""Render a line element."""
x2 = properties.get('x2', x + 10)
y2 = properties.get('y2', y)
color = tuple(properties.get('color', [255, 255, 255]))
width = properties.get('width', 1)
self.display_manager.draw.line([x, y, x2, y2], fill=color, width=width)
def _render_clock_element(self, x: int, y: int, properties: Dict) -> None:
"""Render a clock element."""
format_str = properties.get('format', '%H:%M')
color = tuple(properties.get('color', [255, 255, 255]))
current_time = datetime.now().strftime(format_str)
self.display_manager.draw_text(current_time, x, y, color)
def _render_data_text_element(self, x: int, y: int, properties: Dict, data_context: Dict) -> None:
"""Render a data-driven text element."""
data_key = properties.get('data_key', '')
format_str = properties.get('format', '{value}')
color = tuple(properties.get('color', [255, 255, 255]))
default_value = properties.get('default', 'N/A')
# Extract data from context
value = self._get_nested_value(data_context, data_key, default_value)
# Format the text
try:
text = format_str.format(value=value)
except:
text = str(value)
self.display_manager.draw_text(text, x, y, color)
def _process_template_text(self, text: str, data_context: Dict) -> str:
"""Process template variables in text."""
try:
# Simple template processing - replace {key} with values from context
for key, value in data_context.items():
placeholder = f"{{{key}}}"
if placeholder in text:
text = text.replace(placeholder, str(value))
return text
except Exception as e:
logger.error(f"Error processing template text: {e}")
return text
def _get_nested_value(self, data: Dict, key: str, default=None):
"""Get a nested value from a dictionary using dot notation."""
try:
keys = key.split('.')
value = data
for k in keys:
value = value[k]
return value
except (KeyError, TypeError):
return default
def create_preset_layouts(self) -> None:
"""Create some preset layouts for common use cases."""
# Basic clock layout
clock_layout = [
{
'type': 'clock',
'x': 10,
'y': 10,
'properties': {
'format': '%H:%M',
'color': [255, 255, 255]
}
},
{
'type': 'clock',
'x': 10,
'y': 20,
'properties': {
'format': '%m/%d',
'color': [100, 100, 255]
}
}
]
self.create_layout('basic_clock', clock_layout, 'Simple clock with date')
# Weather layout
weather_layout = [
{
'type': 'weather_icon',
'x': 5,
'y': 5,
'properties': {
'condition': 'sunny',
'size': 20
}
},
{
'type': 'data_text',
'x': 30,
'y': 8,
'properties': {
'data_key': 'weather.temperature',
'format': '{value}°',
'color': [255, 200, 0],
'default': '--°'
}
},
{
'type': 'data_text',
'x': 30,
'y': 18,
'properties': {
'data_key': 'weather.condition',
'format': '{value}',
'color': [200, 200, 200],
'default': 'Unknown'
}
}
]
self.create_layout('weather_display', weather_layout, 'Weather icon with temperature and condition')
# Mixed dashboard layout
dashboard_layout = [
{
'type': 'clock',
'x': 2,
'y': 2,
'properties': {
'format': '%H:%M',
'color': [255, 255, 255]
}
},
{
'type': 'weather_icon',
'x': 50,
'y': 2,
'properties': {
'size': 16
}
},
{
'type': 'data_text',
'x': 70,
'y': 5,
'properties': {
'data_key': 'weather.temperature',
'format': '{value}°',
'color': [255, 200, 0],
'default': '--°'
}
},
{
'type': 'line',
'x': 0,
'y': 15,
'properties': {
'x2': 128,
'y2': 15,
'color': [100, 100, 100]
}
},
{
'type': 'data_text',
'x': 2,
'y': 18,
'properties': {
'data_key': 'stocks.AAPL.price',
'format': 'AAPL: ${value}',
'color': [0, 255, 0],
'default': 'AAPL: N/A'
}
}
]
self.create_layout('dashboard', dashboard_layout, 'Mixed dashboard with clock, weather, and stocks')
logger.info("Created preset layouts")
def get_layout_preview(self, layout_name: str) -> Dict[str, Any]:
"""Get a preview representation of a layout."""
if layout_name not in self.layouts:
return {}
layout = self.layouts[layout_name]
elements = layout['elements']
# Create a simple preview representation
preview = {
'name': layout_name,
'description': layout.get('description', ''),
'element_count': len(elements),
'elements': []
}
for element in elements:
preview['elements'].append({
'type': element.get('type'),
'position': f"({element.get('x', 0)}, {element.get('y', 0)})",
'properties': list(element.get('properties', {}).keys())
})
return preview

213
src/leaderboard_manager.py Normal file
View File

@@ -0,0 +1,213 @@
import logging
import os
import time
from typing import Dict, Any, List, Optional
from PIL import Image, ImageDraw, ImageFont
logger = logging.getLogger(__name__)
class LeaderboardManager:
"""
Generic leaderboard renderer: scrolls a league strip of:
[league logo] [conference label] [#rank] [team logo] [abbr] [record (optional, spacing preserved)]
League-specific data is injected via league_data from the league manager.
"""
def __init__(self, config: Dict[str, Any], display_manager, league_data: Dict[str, Any]):
self.leaderboard_config = config.get("leaderboard", {})
self.is_enabled = self.leaderboard_config.get("enabled", False)
self.update_interval = self.leaderboard_config.get("update_interval", 3600)
self.scroll_speed = max(1, self.leaderboard_config.get("scroll_speed", 1))
self.scroll_delay = self.leaderboard_config.get("scroll_delay", 0.01)
self.loop = self.leaderboard_config.get("loop", False)
self.show_record = self.leaderboard_config.get("show_record", True)
self.display_manager = display_manager
self.league_data = league_data
self.fonts = self._load_fonts()
self._cached_standings: Optional[Dict[str, List[Dict[str, Any]]]] = None
self._last_standings_reload: float = 0.0
def _load_fonts(self) -> Dict[str, ImageFont.FreeTypeFont]:
try:
return {
'small': ImageFont.truetype("assets/fonts/PressStart2P-Regular.ttf", 6),
'medium': ImageFont.truetype("assets/fonts/PressStart2P-Regular.ttf", 8),
'large': ImageFont.truetype("assets/fonts/PressStart2P-Regular.ttf", 10),
'xlarge': ImageFont.truetype("assets/fonts/PressStart2P-Regular.ttf", 12),
}
except IOError:
logger.warning("Custom fonts not found, falling back to default PIL font.")
return {k: ImageFont.load_default() for k in ['small','medium','large','xlarge']}
def _get_team_logo(self, logo_path: str) -> Optional[Image.Image]:
try:
if logo_path and os.path.exists(logo_path):
logo = Image.open(logo_path).convert("RGBA")
bbox = logo.getbbox()
if bbox:
logo = logo.crop(bbox)
max_h = self.display_manager.height
max_w = int(max_h * 1.5)
scale = min(max_w / logo.width, max_h / logo.height)
return logo.resize((int(logo.width * scale), int(logo.height * scale)), Image.Resampling.LANCZOS)
except Exception as e:
logger.error(f"Error loading logo {logo_path}: {e}")
return None
def _fetch_standings(self) -> Dict[str, List[Dict[str, Any]]]:
now = time.time()
if self._cached_standings and (now - self._last_standings_reload < self.update_interval):
return self._cached_standings
provider = self.league_data.get("standings_provider")
try:
standings = provider() if callable(provider) else self.league_data.get("standings", {})
if isinstance(standings, dict):
self._cached_standings = standings
self._last_standings_reload = now
logger.info(f"[Leaderboard] Standings reloaded: {list(standings.keys())}")
return standings
except Exception as e:
logger.error(f"[Leaderboard] standings_provider failed: {e}")
return self._cached_standings or {}
def display(self):
if not self.is_enabled:
logger.info("[Leaderboard] Disabled in config")
return
standings = self._fetch_standings()
conferences: List[str] = self.league_data.get("conferences", [])
if not conferences or not any(standings.get(conf) for conf in conferences):
logger.info("[Leaderboard] No conferences or empty standings")
return
strip_height = self.display_manager.height
logo_spacing = self.leaderboard_config.get("logo_spacing", 20)
conference_spacing = self.leaderboard_config.get("conference_spacing", 60)
# --- Measure total width ---
dummy_img = Image.new("RGB", (1, 1))
draw = ImageDraw.Draw(dummy_img)
total_width = 0
league_logo_path = self.league_data.get("league_logo", "")
league_logo_probe = self._get_team_logo(league_logo_path)
if league_logo_probe:
total_width += league_logo_probe.width + logo_spacing
for conf in conferences:
teams = standings.get(conf, [])
if not teams:
continue
conf_text = f"{conf.capitalize()} Div."
total_width += int(draw.textlength(conf_text, font=self.fonts['medium'])) + 40
for rank, team in enumerate(teams, start=1):
total_width += int(draw.textlength(f"#{rank}.", font=self.fonts['xlarge'])) + 5
total_width += int(strip_height * 1.5) + 5 # logo box space
abbr = team.get("abbreviation", team.get("name", "")[:3])
total_width += int(draw.textlength(abbr, font=self.fonts['large'])) + 10
record = team.get("record", "")
total_width += (int(draw.textlength(record, font=self.fonts['medium'])) + 30) if record else 30
total_width += conference_spacing
# --- Build strip ---
strip = Image.new("RGB", (total_width, strip_height), (0, 0, 0))
draw = ImageDraw.Draw(strip)
x_offset = 0
league_logo = self._get_team_logo(league_logo_path)
if league_logo:
strip.paste(league_logo, (x_offset, (strip_height - league_logo.height) // 2), league_logo)
x_offset += league_logo.width + logo_spacing
for conf in conferences:
teams = standings.get(conf, [])
if not teams:
continue
conf_text = f"{conf.capitalize()} Div."
conf_y = (strip_height - (self.fonts['medium'].getbbox(conf_text)[3])) // 2
draw.text((x_offset, conf_y), conf_text, font=self.fonts['medium'], fill=(0, 200, 255))
x_offset += int(draw.textlength(conf_text, font=self.fonts['medium'])) + 40
for rank, team in enumerate(teams, start=1):
rank_text = f"#{rank}."
rank_y = (strip_height - (self.fonts['xlarge'].getbbox(rank_text)[3])) // 2
draw.text((x_offset, rank_y), rank_text, font=self.fonts['xlarge'], fill=(255, 255, 0))
x_offset += int(draw.textlength(rank_text, font=self.fonts['xlarge'])) + 5
logo = self._get_team_logo(team.get("logo", ""))
if logo:
strip.paste(logo, (x_offset, (strip_height - logo.height) // 2), logo)
x_offset += logo.width + 5
else:
# Reserve logo space even if missing, for consistent layout
x_offset += int(strip_height * 1.5) + 5
abbr = team.get("abbreviation", team.get("name", "")[:3])
abbr_y = (strip_height - (self.fonts['large'].getbbox(abbr)[3])) // 2
draw.text((x_offset, abbr_y), abbr, font=self.fonts['large'], fill=(255, 255, 255))
x_offset += int(draw.textlength(abbr, font=self.fonts['large'])) + 10
record = team.get("record", "")
if self.show_record and record:
rec_y = (strip_height - (self.fonts['medium'].getbbox(record)[3])) // 2
draw.text((x_offset, rec_y), record, font=self.fonts['medium'], fill=(255, 255, 0))
x_offset += int(draw.textlength(record, font=self.fonts['medium'])) + 30
else:
x_offset += 30
x_offset += conference_spacing
# --- Scroll ---
visible_w = self.display_manager.width
intro_hold = self.leaderboard_config.get("logo_intro_hold", 0)
if self.leaderboard_config.get("dynamic_duration", False):
est_duration = (total_width / self.scroll_speed) * self.scroll_delay
duration = max(
self.leaderboard_config.get("min_duration", 30),
min(est_duration, self.leaderboard_config.get("max_display_time", 600)),
)
else:
duration = self.leaderboard_config.get("max_display_time", 600)
end_time = time.time() + duration
def scroll_once():
for start_x in range(-visible_w, x_offset, self.scroll_speed):
# Crop the appropriate segment from the strip
frame = strip.crop((max(0, start_x), 0, start_x + visible_w, strip_height))
# Create a visible frame canvas and paste the cropped segment at the right offset
frame_canvas = Image.new("RGB", (visible_w, strip_height), (0, 0, 0))
paste_x = 0 if start_x >= 0 else -start_x
frame_canvas.paste(frame, (paste_x, 0))
# Paint to the matrix
self.display_manager.image.paste(frame_canvas, (0, 0))
self.display_manager.update_display()
# Optional intro hold when the strip first fully aligns
if start_x == 0 and intro_hold > 0:
time.sleep(intro_hold)
time.sleep(self.scroll_delay)
while True:
scroll_once()
if not self.loop or time.time() >= end_time:
break
# --- Static league logo hold at the end ---
if league_logo:
canvas = Image.new("RGB", (visible_w, strip_height), (0, 0, 0))
offset_x = (visible_w - league_logo.width) // 2
offset_y = (strip_height - league_logo.height) // 2
canvas.paste(league_logo, (offset_x, offset_y), league_logo)
self.display_manager.image.paste(canvas, (0, 0))
self.display_manager.update_display()
time.sleep(self.leaderboard_config.get("logo_hold", 3))

736
src/logo_downloader.py Normal file
View File

@@ -0,0 +1,736 @@
#!/usr/bin/env python3
"""
Centralized logo downloader utility for automatically fetching team logos from ESPN API.
This module provides functionality to download missing team logos for various sports leagues,
with special support for FCS teams and other NCAA divisions.
"""
import os
import time
import logging
import requests
import json
from typing import Dict, Any, List, Optional, Tuple
from pathlib import Path
from PIL import Image, ImageDraw, ImageFont
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
logger = logging.getLogger(__name__)
class LogoDownloader:
"""Centralized logo downloader for team logos from ESPN API."""
# ESPN API endpoints for different sports/leagues
API_ENDPOINTS = {
'nfl': 'https://site.api.espn.com/apis/site/v2/sports/football/nfl/teams',
'nba': 'https://site.api.espn.com/apis/site/v2/sports/basketball/nba/teams',
'mlb': 'https://site.api.espn.com/apis/site/v2/sports/baseball/mlb/teams',
'nhl': 'https://site.api.espn.com/apis/site/v2/sports/hockey/nhl/teams',
'ncaa_fb': 'https://site.api.espn.com/apis/site/v2/sports/football/college-football/teams',
'ncaa_fb_all': 'https://site.api.espn.com/apis/site/v2/sports/football/college-football/teams', # Includes FCS
'fcs': 'https://site.api.espn.com/apis/site/v2/sports/football/college-football/teams', # FCS teams from same endpoint
'ncaam_basketball': 'https://site.api.espn.com/apis/site/v2/sports/basketball/mens-college-basketball/teams',
'ncaa_baseball': 'https://site.api.espn.com/apis/site/v2/sports/baseball/college-baseball/teams',
'ncaam_hockey': 'https://site.api.espn.com/apis/site/v2/sports/hockey/mens-college-hockey/teams',
# Soccer leagues
'soccer_eng.1': 'https://site.api.espn.com/apis/site/v2/sports/soccer/eng.1/teams',
'soccer_esp.1': 'https://site.api.espn.com/apis/site/v2/sports/soccer/esp.1/teams',
'soccer_ger.1': 'https://site.api.espn.com/apis/site/v2/sports/soccer/ger.1/teams',
'soccer_ita.1': 'https://site.api.espn.com/apis/site/v2/sports/soccer/ita.1/teams',
'soccer_fra.1': 'https://site.api.espn.com/apis/site/v2/sports/soccer/fra.1/teams',
'soccer_por.1': 'https://site.api.espn.com/apis/site/v2/sports/soccer/por.1/teams',
'soccer_uefa.champions': 'https://site.api.espn.com/apis/site/v2/sports/soccer/uefa.champions/teams',
'soccer_uefa.europa': 'https://site.api.espn.com/apis/site/v2/sports/soccer/uefa.europa/teams',
'soccer_usa.1': 'https://site.api.espn.com/apis/site/v2/sports/soccer/usa.1/teams'
}
# Directory mappings for different leagues
LOGO_DIRECTORIES = {
'nfl': 'assets/sports/nfl_logos',
'nba': 'assets/sports/nba_logos',
'wnba': 'assets/sports/wnba_logos',
'mlb': 'assets/sports/mlb_logos',
'nhl': 'assets/sports/nhl_logos',
# NCAA sports use same directory
'ncaa_fb': 'assets/sports/ncaa_logos',
'ncaa_fb_all': 'assets/sports/ncaa_logos',
'fcs': 'assets/sports/ncaa_logos',
'ncaam_basketball': 'assets/sports/ncaa_logos',
'ncaa_baseball': 'assets/sports/ncaa_logos',
'ncaam_hockey': 'assets/sports/ncaa_logos',
'ncaaw_hockey': 'assets/sports/ncaa_logos',
# Soccer leagues - all use the same soccer_logos directory
'soccer_eng.1': 'assets/sports/soccer_logos',
'soccer_esp.1': 'assets/sports/soccer_logos',
'soccer_ger.1': 'assets/sports/soccer_logos',
'soccer_ita.1': 'assets/sports/soccer_logos',
'soccer_fra.1': 'assets/sports/soccer_logos',
'soccer_por.1': 'assets/sports/soccer_logos',
'soccer_uefa.champions': 'assets/sports/soccer_logos',
'soccer_uefa.europa': 'assets/sports/soccer_logos',
'soccer_usa.1': 'assets/sports/soccer_logos'
}
def __init__(self, request_timeout: int = 30, retry_attempts: int = 3):
"""Initialize the logo downloader with HTTP session and retry logic."""
self.request_timeout = request_timeout
self.retry_attempts = retry_attempts
# Set up session with retry logic
self.session = requests.Session()
retry_strategy = Retry(
total=retry_attempts,
backoff_factor=1,
status_forcelist=[429, 500, 502, 503, 504],
allowed_methods=["GET", "HEAD", "OPTIONS"]
)
adapter = HTTPAdapter(max_retries=retry_strategy)
self.session.mount("https://", adapter)
self.session.mount("http://", adapter)
# Set up headers
self.headers = {
'User-Agent': 'LEDMatrix/1.0 (https://github.com/yourusername/LEDMatrix; contact@example.com)',
'Accept': 'application/json',
'Accept-Language': 'en-US,en;q=0.9',
'Accept-Encoding': 'gzip, deflate, br',
'Connection': 'keep-alive'
}
@staticmethod
def normalize_abbreviation(abbreviation: str) -> str:
"""Normalize team abbreviation for consistent filename usage."""
# Handle special characters that can cause filesystem issues
normalized = abbreviation.upper()
# Replace problematic characters with safe alternatives
normalized = normalized.replace('&', 'AND')
normalized = normalized.replace('/', '_')
normalized = normalized.replace('\\', '_')
normalized = normalized.replace(':', '_')
normalized = normalized.replace('*', '_')
normalized = normalized.replace('?', '_')
normalized = normalized.replace('"', '_')
normalized = normalized.replace('<', '_')
normalized = normalized.replace('>', '_')
normalized = normalized.replace('|', '_')
return normalized
@staticmethod
def get_logo_filename_variations(abbreviation: str) -> list:
"""Get possible filename variations for a team abbreviation."""
variations = []
original = abbreviation.upper()
normalized = LogoDownloader.normalize_abbreviation(abbreviation)
# Add original and normalized versions
variations.extend([f"{original}.png", f"{normalized}.png"])
# Special handling for known cases
if original == 'TA&M':
# TA&M has a file named TA&M.png, but normalize creates TAANDM.png
variations = [f"{original}.png", f"{normalized}.png"]
return variations
def get_logo_directory(self, league: str) -> str:
"""Get the logo directory for a given league."""
return self.LOGO_DIRECTORIES.get(league, f'assets/sports/{league}_logos')
def ensure_logo_directory(self, logo_dir: str) -> bool:
"""Ensure the logo directory exists, create if necessary."""
try:
os.makedirs(logo_dir, exist_ok=True)
# Check if we can actually write to the directory
test_file = os.path.join(logo_dir, '.write_test')
try:
with open(test_file, 'w') as f:
f.write('test')
os.remove(test_file)
logger.debug(f"Directory {logo_dir} is writable")
return True
except PermissionError:
logger.error(f"Permission denied: Cannot write to directory {logo_dir}")
logger.error(f"Please run: sudo ./fix_assets_permissions.sh")
return False
except Exception as e:
logger.error(f"Failed to test write access to directory {logo_dir}: {e}")
return False
except Exception as e:
logger.error(f"Failed to create logo directory {logo_dir}: {e}")
return False
def download_logo(self, logo_url: str, filepath: Path, team_abbreviation: str) -> bool:
"""Download a single logo from URL and save to filepath."""
try:
response = self.session.get(logo_url, headers=self.headers, timeout=self.request_timeout)
response.raise_for_status()
# Verify it's actually an image
content_type = response.headers.get('content-type', '').lower()
if not any(img_type in content_type for img_type in ['image/png', 'image/jpeg', 'image/jpg', 'image/gif']):
logger.warning(f"Downloaded content for {team_abbreviation} is not an image: {content_type}")
return False
with open(filepath, 'wb') as f:
f.write(response.content)
# Verify and convert the downloaded image to RGBA format
try:
with Image.open(filepath) as img:
# Convert to RGBA to avoid PIL warnings about palette images with transparency
if img.mode in ('P', 'LA', 'L'):
# Convert palette or grayscale images to RGBA
img = img.convert('RGBA')
elif img.mode == 'RGB':
# Convert RGB to RGBA (add alpha channel)
img = img.convert('RGBA')
elif img.mode != 'RGBA':
# For any other mode, convert to RGBA
img = img.convert('RGBA')
# Save the converted image
img.save(filepath, 'PNG')
logger.info(f"Successfully downloaded and converted logo for {team_abbreviation} -> {filepath.name}")
return True
except Exception as e:
logger.error(f"Downloaded file for {team_abbreviation} is not a valid image or conversion failed: {e}")
try:
os.remove(filepath) # Remove invalid file
except:
pass
return False
except PermissionError as e:
logger.error(f"Permission denied downloading logo for {team_abbreviation}: {e}")
logger.error(f"Please run: sudo ./fix_assets_permissions.sh")
return False
except requests.exceptions.RequestException as e:
logger.error(f"Failed to download logo for {team_abbreviation}: {e}")
return False
except Exception as e:
logger.error(f"Unexpected error downloading logo for {team_abbreviation}: {e}")
return False
def fetch_teams_data(self, league: str) -> Optional[Dict]:
"""Fetch team data from ESPN API for a specific league."""
api_url = self.API_ENDPOINTS.get(league)
if not api_url:
logger.error(f"No API endpoint configured for league: {league}")
return None
try:
logger.info(f"Fetching team data for {league} from ESPN API...")
response = self.session.get(api_url, params={'limit':1000},headers=self.headers, timeout=self.request_timeout)
response.raise_for_status()
data = response.json()
logger.info(f"Successfully fetched team data for {league}")
return data
except requests.exceptions.RequestException as e:
logger.error(f"Error fetching team data for {league}: {e}")
return None
except json.JSONDecodeError as e:
logger.error(f"Error parsing JSON response for {league}: {e}")
return None
def fetch_single_team(self, league: str, team_id: str) -> Optional[Dict]:
"""Fetch team data from ESPN API for a specific league."""
api_url = self.API_ENDPOINTS.get(league)
if not api_url:
logger.error(f"No API endpoint configured for league: {league}")
return None
try:
logger.info(f"Fetching team data for team {team_id} in {league} from ESPN API...")
response = self.session.get(f"{api_url}/{team_id}", headers=self.headers, timeout=self.request_timeout)
response.raise_for_status()
data = response.json()
logger.info(f"Successfully fetched team data for {team_id} in {league}")
return data
except requests.exceptions.RequestException as e:
logger.error(f"Error fetching team data for {team_id} in {league}: {e}")
return None
except json.JSONDecodeError as e:
logger.error(f"Error parsing JSON response for{team_id} in {league}: {e}")
return None
def extract_teams_from_data(self, data: Dict, league: str) -> List[Dict[str, str]]:
"""Extract team information from ESPN API response."""
teams = []
try:
sports = data.get('sports', [])
for sport in sports:
leagues_data = sport.get('leagues', [])
for league_data in leagues_data:
teams_data = league_data.get('teams', [])
for team_data in teams_data:
team_info = team_data.get('team', {})
abbreviation = team_info.get('abbreviation', '')
display_name = team_info.get('displayName', 'Unknown')
logos = team_info.get('logos', [])
if not abbreviation or not logos:
continue
# Get the default logo (first one is usually default)
logo_url = logos[0].get('href', '')
if not logo_url:
continue
# For NCAA football, try to determine if it's FCS or FBS
team_category = 'FBS' # Default
if league in ['ncaa_fb', 'ncaa_fb_all', 'fcs']:
# Check if this is an FCS team by looking at conference or other indicators
# ESPN API includes both FBS and FCS teams in the same endpoint
# We'll include all teams and let the user decide which ones to use
team_category = self._determine_ncaa_football_division(team_info, league_data)
teams.append({
'abbreviation': abbreviation,
'display_name': display_name,
'logo_url': logo_url,
'league': league,
'category': team_category,
'conference': league_data.get('name', 'Unknown')
})
logger.info(f"Extracted {len(teams)} teams for {league}")
return teams
except Exception as e:
logger.error(f"Error extracting teams for {league}: {e}")
return []
def _determine_ncaa_football_division(self, team_info: Dict, league_data: Dict) -> str:
"""Determine if an NCAA football team is FBS or FCS based on conference and other indicators."""
conference_name = league_data.get('name', '').lower()
# FBS Conferences (more comprehensive list)
fbs_conferences = {
'acc', 'american athletic', 'big 12', 'big ten', 'conference usa', 'c-usa',
'mid-american', 'mac', 'mountain west', 'pac-12', 'pac-10', 'sec',
'sun belt', 'independents', 'big east'
}
# FCS Conferences (more comprehensive list)
fcs_conferences = {
'big sky', 'big south', 'colonial athletic', 'caa', 'ivy league',
'meac', 'missouri valley', 'mvfc', 'northeast', 'nec',
'ohio valley', 'ovc', 'patriot league', 'pioneer football',
'southland', 'southern', 'southwestern athletic', 'swac',
'western athletic', 'wac', 'ncaa division i-aa'
}
# Also check for specific team indicators
team_abbreviation = team_info.get('abbreviation', '').upper()
# Known FBS teams that might be misclassified
known_fbs_teams = {
'ASU', 'ARIZ', 'ARK', 'AUB', 'BOIS', 'CSU', 'FLA', 'HAW', 'IDHO', 'USA'
}
# Check if it's a known FBS team first
if team_abbreviation in known_fbs_teams:
return 'FBS'
# Check conference names
if any(fbs_conf in conference_name for fbs_conf in fbs_conferences):
return 'FBS'
elif any(fcs_conf in conference_name for fcs_conf in fcs_conferences):
return 'FCS'
# If conference is just "NCAA - Football", we need to use other indicators
if conference_name == 'ncaa - football':
# Check team name for indicators of FCS (smaller schools, Division II/III)
team_name = team_info.get('displayName', '').lower()
fcs_indicators = ['college', 'university', 'state', 'tech', 'community']
# If it has typical FCS naming patterns and isn't a known FBS team
if any(indicator in team_name for indicator in fcs_indicators):
return 'FCS'
else:
return 'FBS'
# Default to FBS for unknown conferences
return 'FBS'
def _get_team_name_variations(self, abbreviation: str) -> List[str]:
"""Generate common variations of a team abbreviation for matching."""
variations = set()
abbr = abbreviation.upper()
variations.add(abbr)
# Add normalized version
variations.add(self.normalize_abbreviation(abbr))
# Common substitutions
substitutions = {
'&': ['AND', 'A'],
'A&M': ['TAMU', 'TA&M', 'TEXASAM'],
'STATE': ['ST', 'ST.'],
'UNIVERSITY': ['U', 'UNIV'],
'COLLEGE': ['C', 'COL'],
'TECHNICAL': ['TECH', 'T'],
'NORTHERN': ['NORTH', 'N'],
'SOUTHERN': ['SOUTH', 'S'],
'EASTERN': ['EAST', 'E'],
'WESTERN': ['WEST', 'W']
}
# Apply substitutions
for original, replacements in substitutions.items():
if original in abbr:
for replacement in replacements:
variations.add(abbr.replace(original, replacement))
variations.add(abbr.replace(original, '')) # Remove the word entirely
# Add common abbreviations for Texas A&M
if 'A&M' in abbr or 'TAMU' in abbr:
variations.update(['TAMU', 'TA&M', 'TEXASAM', 'TEXAS_A&M', 'TEXAS_AM'])
return list(variations)
def download_missing_logos_for_league(self, league: str, force_download: bool = False) -> Tuple[int, int]:
"""Download missing logos for a specific league."""
logger.info(f"Starting logo download for league: {league}")
# Get logo directory
logo_dir = self.get_logo_directory(league)
if not self.ensure_logo_directory(logo_dir):
logger.error(f"Failed to create logo directory for {league}")
return 0, 0
# Fetch team data
data = self.fetch_teams_data(league)
if not data:
logger.error(f"Failed to fetch team data for {league}")
return 0, 0
# Extract teams
teams = self.extract_teams_from_data(data, league)
if not teams:
logger.warning(f"No teams found for {league}")
return 0, 0
# Download missing logos
downloaded_count = 0
failed_count = 0
for team in teams:
abbreviation = team['abbreviation']
display_name = team['display_name']
logo_url = team['logo_url']
# Create filename
filename = f"{self.normalize_abbreviation(abbreviation)}.png"
filepath = Path(logo_dir) / filename
# Skip if already exists and not forcing download
if filepath.exists() and not force_download:
logger.debug(f"Skipping {display_name}: {filename} already exists")
continue
# Download logo
if self.download_logo(logo_url, filepath, display_name):
downloaded_count += 1
else:
failed_count += 1
# Small delay to be respectful to the API
time.sleep(0.1)
logger.info(f"Logo download complete for {league}: {downloaded_count} downloaded, {failed_count} failed")
return downloaded_count, failed_count
def download_all_ncaa_football_logos(self, include_fcs: bool = True, force_download: bool = False) -> Tuple[int, int]:
"""Download all NCAA football team logos including FCS teams."""
logger.info(f"Starting comprehensive NCAA football logo download (FCS: {include_fcs})")
# Use the comprehensive NCAA football endpoint
league = 'ncaa_fb_all'
logo_dir = self.get_logo_directory(league)
if not self.ensure_logo_directory(logo_dir):
logger.error(f"Failed to create logo directory for {league}")
return 0, 0
# Fetch team data
data = self.fetch_teams_data(league)
if not data:
logger.error(f"Failed to fetch team data for {league}")
return 0, 0
# Extract teams
teams = self.extract_teams_from_data(data, league)
if not teams:
logger.warning(f"No teams found for {league}")
return 0, 0
# Filter teams based on FCS inclusion
if not include_fcs:
teams = [team for team in teams if team.get('category') == 'FBS']
logger.info(f"Filtered to FBS teams only: {len(teams)} teams")
# Download missing logos
downloaded_count = 0
failed_count = 0
for team in teams:
abbreviation = team['abbreviation']
display_name = team['display_name']
logo_url = team['logo_url']
category = team.get('category', 'Unknown')
conference = team.get('conference', 'Unknown')
# Create filename
filename = f"{self.normalize_abbreviation(abbreviation)}.png"
filepath = Path(logo_dir) / filename
# Skip if already exists and not forcing download
if filepath.exists() and not force_download:
logger.debug(f"Skipping {display_name} ({category}, {conference}): {filename} already exists")
continue
# Download logo
if self.download_logo(logo_url, filepath, display_name):
downloaded_count += 1
logger.info(f"Downloaded {display_name} ({category}, {conference}) -> {filename}")
else:
failed_count += 1
logger.warning(f"Failed to download {display_name} ({category}, {conference})")
# Small delay to be respectful to the API
time.sleep(0.1)
logger.info(f"Comprehensive NCAA football logo download complete: {downloaded_count} downloaded, {failed_count} failed")
return downloaded_count, failed_count
def download_missing_logo_for_team(self, league: str, team_id: str, team_abbreviation: str, logo_path: Path) -> bool:
"""Download a specific team's logo if it's missing."""
# Ensure the logo directory exists and is writable
logo_dir = str(logo_path.parent)
if not self.ensure_logo_directory(logo_dir):
logger.error(f"Cannot download logo for {team_abbreviation}: directory {logo_dir} is not writable")
return False
# Fetch team data to find the logo URL
data = self.fetch_single_team(league, team_id)
if not data:
return False
try:
logo_url = data["team"]["logos"][0]["href"]
except KeyError:
return False
# Download the logo
success = self.download_logo(logo_url, logo_path, team_abbreviation)
if success:
time.sleep(0.1) # Small delay
return success
def download_all_missing_logos(self, leagues: List[str] | None = None, force_download: bool = False) -> Dict[str, Tuple[int, int]]:
"""Download missing logos for all specified leagues."""
if leagues is None:
leagues = list(self.API_ENDPOINTS.keys())
results = {}
total_downloaded = 0
total_failed = 0
for league in leagues:
if league not in self.API_ENDPOINTS:
logger.warning(f"Skipping unknown league: {league}")
continue
downloaded, failed = self.download_missing_logos_for_league(league, force_download)
results[league] = (downloaded, failed)
total_downloaded += downloaded
total_failed += failed
logger.info(f"Overall logo download results: {total_downloaded} downloaded, {total_failed} failed")
return results
def create_placeholder_logo(self, team_abbreviation: str, logo_dir: str) -> bool:
"""Create a placeholder logo when real logo cannot be downloaded."""
try:
# Ensure the logo directory exists
if not self.ensure_logo_directory(logo_dir):
logger.error(f"Failed to create logo directory: {logo_dir}")
return False
filename = f"{self.normalize_abbreviation(team_abbreviation)}.png"
filepath = Path(logo_dir) / filename
# Check if we can write to the directory
try:
# Test write permissions by creating a temporary file
test_file = filepath.parent / "test_write.tmp"
test_file.touch()
test_file.unlink() # Remove the test file
except PermissionError:
logger.error(f"Permission denied: Cannot write to directory {logo_dir}")
return False
except Exception as e:
logger.error(f"Directory access error for {logo_dir}: {e}")
return False
# Create a simple placeholder logo
logo = Image.new('RGBA', (64, 64), (100, 100, 100, 255)) # Gray background
draw = ImageDraw.Draw(logo)
# Try to load a font, fallback to default
try:
font = ImageFont.truetype("assets/fonts/PressStart2P-Regular.ttf", 12)
except:
try:
font = ImageFont.load_default()
except:
font = None
# Draw team abbreviation
text = team_abbreviation
if font:
# Center the text
bbox = draw.textbbox((0, 0), text, font=font)
text_width = bbox[2] - bbox[0]
text_height = bbox[3] - bbox[1]
x = (64 - text_width) // 2
y = (64 - text_height) // 2
draw.text((x, y), text, font=font, fill=(255, 255, 255, 255))
else:
# Fallback without font
draw.text((16, 24), text, fill=(255, 255, 255, 255))
logo.save(filepath)
logger.info(f"Created placeholder logo for {team_abbreviation} at {filepath}")
return True
except Exception as e:
logger.error(f"Failed to create placeholder logo for {team_abbreviation}: {e}")
return False
def convert_image_to_rgba(self, filepath: Path) -> bool:
"""Convert an image file to RGBA format to avoid PIL warnings."""
try:
with Image.open(filepath) as img:
if img.mode != 'RGBA':
# Convert to RGBA
converted_img = img.convert('RGBA')
converted_img.save(filepath, 'PNG')
logger.debug(f"Converted {filepath.name} from {img.mode} to RGBA")
return True
else:
logger.debug(f"{filepath.name} is already in RGBA format")
return True
except Exception as e:
logger.error(f"Failed to convert {filepath.name} to RGBA: {e}")
return False
def convert_all_logos_to_rgba(self, league: str) -> Tuple[int, int]:
"""Convert all logos in a league directory to RGBA format."""
logo_dir = Path(self.get_logo_directory(league))
if not logo_dir.exists():
logger.warning(f"Logo directory does not exist: {logo_dir}")
return 0, 0
converted_count = 0
failed_count = 0
for logo_file in logo_dir.glob("*.png"):
if self.convert_image_to_rgba(logo_file):
converted_count += 1
else:
failed_count += 1
logger.info(f"Converted {converted_count} logos to RGBA format for {league}, {failed_count} failed")
return converted_count, failed_count
# Helper function to map soccer league codes to logo downloader format
def get_soccer_league_key(league_code: str) -> str:
"""
Map soccer league codes to logo downloader format.
Args:
league_code: Soccer league code (e.g., 'eng.1', 'por.1')
Returns:
Logo downloader league key (e.g., 'soccer_eng.1', 'soccer_por.1')
"""
return f"soccer_{league_code}"
# Convenience function for easy integration
def download_missing_logo(league: str, team_id: str, team_abbreviation: str, logo_path: Path, logo_url: str | None = None, create_placeholder: bool = True) -> bool:
"""
Convenience function to download a missing team logo.
Args:
team_abbreviation: Team abbreviation (e.g., 'UGA', 'BAMA', 'TA&M')
league: League identifier (e.g., 'ncaa_fb', 'nfl')
team_name: Optional team name for logging
create_placeholder: Whether to create a placeholder if download fails
Returns:
True if logo exists or was successfully downloaded, False otherwise
"""
downloader = LogoDownloader()
# Check if logo already exists
logo_dir = downloader.get_logo_directory(league)
if not downloader.ensure_logo_directory(logo_dir):
logger.error(f"Cannot download logo for {team_abbreviation}: directory {logo_dir} is not writable")
return False
filename = f"{downloader.normalize_abbreviation(team_abbreviation)}.png"
filepath = Path(logo_dir) / filename
if filepath.exists():
logger.debug(f"Logo already exists for {team_abbreviation} ({league})")
return True
# Try to download the real logo first
logger.info(f"Attempting to download logo for {team_abbreviation} from {league}")
if logo_url:
success = downloader.download_logo(logo_url, filepath, team_abbreviation)
if success:
time.sleep(0.1) # Small delay
return success
success = downloader.download_missing_logo_for_team(league, team_id, team_abbreviation, logo_path)
#if not success: # and create_placeholder:
# logger.info(f"Creating placeholder logo for {team_abbreviation}")
# # Create placeholder as fallback
# success = downloader.create_placeholder_logo(team_abbreviation, logo_dir)
#if success:
# logger.info(f"Successfully handled logo for {team_abbreviation}")
#else:
# logger.warning(f"Failed to download or create logo for {team_abbreviation}")
return success
def download_all_logos_for_league(league: str, force_download: bool = False) -> Tuple[int, int]:
"""
Convenience function to download all missing logos for a league.
Args:
league: League identifier (e.g., 'ncaa_fb', 'nfl')
force_download: Whether to re-download existing logos
Returns:
Tuple of (downloaded_count, failed_count)
"""
downloader = LogoDownloader()
return downloader.download_missing_logos_for_league(league, force_download)

212
src/sponsor_manager.py Normal file
View File

@@ -0,0 +1,212 @@
import logging
import os
import random
from PIL import Image, ImageFont
logger = logging.getLogger(__name__)
class EOJHLSponsorsManager:
def __init__(self, config, display_manager):
self.config = config.get("sponsors", {})
self.display_manager = display_manager
self.logger = logging.getLogger("EOJHLSponsorsManager")
self.sponsors = self._load_sponsors()
self.index = 0
# New settings for batch display
self.show_all = self.config.get("show_all", False)
self.show_num_sponsors = self.config.get("show_num_sponsors", 1)
def _load_sponsors(self):
sponsors = []
for key in self.config.get("enabled_sponsors", []):
sponsor_cfg = self.config.get(key, {})
if sponsor_cfg.get("enabled", True):
sponsors.append({
"key": key,
"image": f"assets/sponsors/{sponsor_cfg.get('pngabbr')}.png",
"details_row1": sponsor_cfg.get("details_row1", key),
"details_row2": sponsor_cfg.get("details_row2", "")
})
return sponsors
def get_next_sponsors(self):
"""
Return a list of sponsors to display based on show_all and show_num_sponsors settings.
Advances the index appropriately.
"""
if not self.sponsors:
return []
if self.show_all:
# Show all sponsors, don't advance index
return self.sponsors
else:
# Show a batch of sponsors based on show_num_sponsors
num_to_show = min(self.show_num_sponsors, len(self.sponsors))
sponsors_to_show = []
for i in range(num_to_show):
current_index = (self.index + i) % len(self.sponsors)
sponsors_to_show.append(self.sponsors[current_index])
# Advance index by the number shown
self.index = (self.index + num_to_show) % len(self.sponsors)
return sponsors_to_show
def get_next_sponsor(self):
"""
Legacy method for backward compatibility.
Return next sponsor sequentially and advance index.
"""
if not self.sponsors:
return None
sponsor = self.sponsors[self.index]
self.index = (self.index + 1) % len(self.sponsors)
return sponsor
# --- Slide Renderers ---
def render_title(self, title_row1=None, title_row2=None):
dm = self.display_manager
dm.clear()
# Resolve text from arguments or global config
row1_text = title_row1 or self.config.get("title_row1", "Thank You to")
row2_text = title_row2 or self.config.get("title_row2", "Our Sponsor")
# Resolve font sizes and colours from global config
row1_size = self.config.get("title_row1_font_size", 10)
row2_size = self.config.get("title_row2_font_size", 10)
row1_colour = self.config.get("title_row1_colour", "yellow")
row2_colour = self.config.get("title_row2_colour", "yellow")
try:
font_row1 = ImageFont.truetype("assets/fonts/PressStart2P-Regular.ttf", row1_size)
font_row2 = ImageFont.truetype("assets/fonts/PressStart2P-Regular.ttf", row2_size)
except Exception as e:
self.logger.warning(f"[Sponsors] Title font load failed: {e}")
font_row1, font_row2 = dm.regular_font, dm.small_font
# Build lines
lines = []
if row1_text:
lines.append((row1_text, font_row1, row1_colour))
if row2_text:
lines.append((row2_text, font_row2, row2_colour))
# Vertical centering
total_h = 0
heights = []
for text, font, _ in lines:
bbox = dm.draw.textbbox((0, 0), text, font=font)
h = bbox[3] - bbox[1]
heights.append(h)
total_h += h
total_h += (len(lines) - 1) * 2 # spacing between rows
y = (dm.height - total_h) // 2
# Draw each line centered
for (text, font, color), h in zip(lines, heights):
bbox = dm.draw.textbbox((0, 0), text, font=font)
w = bbox[2] - bbox[0]
x = (dm.width - w) // 2
dm.draw.text((x, y), text, font=font, fill=color)
y += h + 2
dm.update_display()
# Return configured duration for sequencing
return self.config.get("title_duration", 5)
def render_logo(self, sponsor):
dm = self.display_manager
dm.clear()
logo_path = sponsor["image"]
if os.path.exists(logo_path):
logo = Image.open(logo_path).convert("RGBA")
# Scale proportionally so the logo fills the full matrix height
scale = dm.height / logo.height
new_w = int(logo.width * scale)
new_h = dm.height
logo = logo.resize((new_w, new_h), Image.Resampling.LANCZOS)
# Center horizontally and vertically
offset_x = (dm.width - new_w) // 2
offset_y = (dm.height - new_h) // 2
# Paste logo directly onto dm.image with alpha mask
dm.image.paste(logo, (offset_x, offset_y), logo)
else:
self.logger.warning(f"[Sponsors] Logo not found: {logo_path}")
dm.update_display()
def render_details(self, sponsor):
dm = self.display_manager
dm.clear()
# Look up the sponsor-specific config (if any)
sponsor_cfg = self.config.get(sponsor["key"], {})
# Global default (sponsors.show_details) can be overridden per sponsor
global_show = self.config.get("show_details", True)
sponsor_show = sponsor_cfg.get("show_details", global_show)
if not sponsor_show:
self.logger.info(f"[Sponsors] Skipping details for {sponsor['key']} (show_details=false)")
return 0
# Resolve font sizes and colours with per-sponsor override → global default → hardcoded fallback
details_row1_font_size = sponsor_cfg.get("details_row1_font_size",
self.config.get("details_row1_font_size", 10))
details_row2_font_size = sponsor_cfg.get("details_row2_font_size",
self.config.get("details_row2_font_size", 8))
details_row1_colour = sponsor_cfg.get("details_row1_colour",
self.config.get("details_row1_colour", "white"))
details_row2_colour = sponsor_cfg.get("details_row2_colour",
self.config.get("details_row2_colour", "blue"))
try:
font_details_row1 = ImageFont.truetype(
"assets/fonts/PressStart2P-Regular.ttf", details_row1_font_size
)
font_details_row2 = ImageFont.truetype(
"assets/fonts/PressStart2P-Regular.ttf", details_row2_font_size
)
except Exception as e:
self.logger.warning(f"[Sponsors] Detail font load failed: {e}")
font_details_row1, font_details_row2 = dm.regular_font, dm.small_font
# Build lines with resolved colours
lines = []
if sponsor.get("details_row1"):
lines.append((sponsor["details_row1"], font_details_row1, details_row1_colour))
if sponsor.get("details_row2") and sponsor["details_row2"].upper() != "NONE":
lines.append((sponsor["details_row2"], font_details_row2, details_row2_colour))
# Vertical centering
total_h = 0
heights = []
for text, font, _ in lines:
bbox = dm.draw.textbbox((0, 0), text, font=font)
h = bbox[3] - bbox[1]
heights.append(h)
total_h += h
total_h += (len(lines) - 1) * 2
y = (dm.height - total_h) // 2
for (text, font, color), h in zip(lines, heights):
bbox = dm.draw.textbbox((0, 0), text, font=font)
w = bbox[2] - bbox[0]
x = (dm.width - w) // 2
dm.draw.text((x, y), text, font=font, fill=color)
y += h + 2
dm.update_display()
# Return the configured duration if we actually rendered
return self.config.get("details_duration", 5)

211
src/static_image_manager.py Normal file
View File

@@ -0,0 +1,211 @@
import logging
import os
import time
from typing import Optional, Tuple
from PIL import Image, ImageOps
import json
from .display_manager import DisplayManager
logger = logging.getLogger(__name__)
class StaticImageManager:
"""
Manager for displaying static images on the LED matrix.
Supports image scaling, transparency, and configurable display duration.
"""
def __init__(self, display_manager: DisplayManager, config: dict):
self.display_manager = display_manager
self.config = config.get('static_image', {})
# Configuration
self.enabled = self.config.get('enabled', False)
self.image_path = self.config.get('image_path', '')
# Get display duration from main display_durations block
self.display_duration = config.get('display', {}).get('display_durations', {}).get('static_image', 10)
self.fit_to_display = self.config.get('fit_to_display', True) # Auto-fit to display dimensions
self.preserve_aspect_ratio = self.config.get('preserve_aspect_ratio', True)
self.background_color = tuple(self.config.get('background_color', [0, 0, 0]))
# State
self.current_image = None
self.image_loaded = False
self.last_update_time = 0
# Load initial image if enabled
if self.enabled and self.image_path:
self._load_image()
def _load_image(self) -> bool:
"""
Load and process the image for display.
Returns True if successful, False otherwise.
"""
if not self.image_path or not os.path.exists(self.image_path):
logger.warning(f"[Static Image] Image file not found: {self.image_path}")
return False
try:
# Load the image
img = Image.open(self.image_path)
# Convert to RGBA to handle transparency
if img.mode != 'RGBA':
img = img.convert('RGBA')
# Get display dimensions
display_width = self.display_manager.matrix.width
display_height = self.display_manager.matrix.height
# Calculate target size - always fit to display while preserving aspect ratio
target_size = self._calculate_fit_size(img.size, (display_width, display_height))
# Resize image
if self.preserve_aspect_ratio:
img = img.resize(target_size, Image.Resampling.LANCZOS)
else:
img = img.resize((display_width, display_height), Image.Resampling.LANCZOS)
# Create display-sized canvas with background color
canvas = Image.new('RGB', (display_width, display_height), self.background_color)
# Calculate position to center the image
paste_x = (display_width - img.width) // 2
paste_y = (display_height - img.height) // 2
# Handle transparency by compositing
if img.mode == 'RGBA':
# Create a temporary image with the background color
temp_canvas = Image.new('RGB', (display_width, display_height), self.background_color)
temp_canvas.paste(img, (paste_x, paste_y), img)
canvas = temp_canvas
else:
canvas.paste(img, (paste_x, paste_y))
self.current_image = canvas
self.image_loaded = True
self.last_update_time = time.time()
logger.info(f"[Static Image] Successfully loaded and processed image: {self.image_path}")
logger.info(f"[Static Image] Original size: {Image.open(self.image_path).size}, "
f"Display size: {target_size}, Position: ({paste_x}, {paste_y})")
return True
except Exception as e:
logger.error(f"[Static Image] Error loading image {self.image_path}: {e}")
self.image_loaded = False
return False
def _calculate_fit_size(self, image_size: Tuple[int, int], display_size: Tuple[int, int]) -> Tuple[int, int]:
"""
Calculate the size to fit an image within display bounds while preserving aspect ratio.
"""
img_width, img_height = image_size
display_width, display_height = display_size
# Calculate scaling factor to fit within display
scale_x = display_width / img_width
scale_y = display_height / img_height
scale = min(scale_x, scale_y)
return (int(img_width * scale), int(img_height * scale))
def update(self):
"""
Update method - no continuous updates needed for static images.
"""
pass
def display(self, force_clear: bool = False):
"""
Display the static image on the LED matrix.
"""
if not self.enabled or not self.image_loaded or not self.current_image:
if self.enabled:
logger.warning("[Static Image] Manager enabled but no image loaded")
return
# Clear display if requested
if force_clear:
self.display_manager.clear()
# Set the image on the display manager
self.display_manager.image = self.current_image.copy()
# Update the display
self.display_manager.update_display()
logger.debug(f"[Static Image] Displayed image: {self.image_path}")
def set_image_path(self, image_path: str) -> bool:
"""
Set a new image path and load it.
Returns True if successful, False otherwise.
"""
self.image_path = image_path
return self._load_image()
def set_fit_to_display(self, fit_to_display: bool):
"""
Set the fit to display option and reload the image.
"""
self.fit_to_display = fit_to_display
if self.image_path:
self._load_image()
logger.info(f"[Static Image] Fit to display set to: {self.fit_to_display}")
def set_display_duration(self, duration: int):
"""
Set the display duration in seconds.
"""
self.display_duration = max(1, duration) # Minimum 1 second
logger.info(f"[Static Image] Display duration set to: {self.display_duration} seconds")
def set_background_color(self, color: Tuple[int, int, int]):
"""
Set the background color and reload the image.
"""
self.background_color = color
if self.image_path:
self._load_image()
logger.info(f"[Static Image] Background color set to: {self.background_color}")
def get_image_info(self) -> dict:
"""
Get information about the currently loaded image.
"""
if not self.image_loaded or not self.current_image:
return {"loaded": False}
return {
"loaded": True,
"path": self.image_path,
"display_size": self.current_image.size,
"fit_to_display": self.fit_to_display,
"display_duration": self.display_duration,
"background_color": self.background_color
}
def reload_image(self) -> bool:
"""
Reload the current image.
"""
if not self.image_path:
logger.warning("[Static Image] No image path set for reload")
return False
return self._load_image()
def is_enabled(self) -> bool:
"""
Check if the manager is enabled.
"""
return self.enabled
def get_display_duration(self) -> int:
"""
Get the display duration in seconds.
"""
return self.display_duration

259
src/text_display.py Normal file
View File

@@ -0,0 +1,259 @@
import logging
import time
from PIL import ImageFont, Image, ImageDraw
import freetype
import os
from .display_manager import DisplayManager
logger = logging.getLogger(__name__)
class TextDisplay:
def __init__(self, display_manager: DisplayManager, config: dict):
self.display_manager = display_manager
self.config = config.get('text_display', {})
self.text = self.config.get('text', "Hello, World!")
self.font_path = self.config.get('font_path', "assets/fonts/PressStart2P-Regular.ttf")
self.font_size = self.config.get('font_size', 8)
self.scroll_enabled = self.config.get('scroll', False)
self.text_color = tuple(self.config.get('text_color', [255, 255, 255]))
self.bg_color = tuple(self.config.get('background_color', [0, 0, 0]))
# scroll_gap_width defaults to the width of the display matrix
self.scroll_gap_width = self.config.get('scroll_gap_width', self.display_manager.matrix.width)
self.font = self._load_font()
self.text_content_width = 0 # Pixel width of the actual text string
self.text_image_cache = None # For pre-rendered text (PIL.Image)
self.cached_total_scroll_width = 0 # Total width of the cache: text_content_width + scroll_gap_width
self._regenerate_renderings() # Initial creation of cache and width calculation
self.scroll_pos = 0.0 # Use float for precision
self.last_update_time = time.time()
self.scroll_speed = self.config.get('scroll_speed', 30) # Pixels per second
def _regenerate_renderings(self):
"""Calculate text width and attempt to create/update the text image cache."""
if not self.text or not self.font:
self.text_content_width = 0
self.text_image_cache = None
self.cached_total_scroll_width = 0
return
try:
self.text_content_width = self.display_manager.get_text_width(self.text, self.font)
except Exception as e:
logger.error(f"Error calculating text content width: {e}")
self.text_content_width = 0
self.text_image_cache = None
self.cached_total_scroll_width = 0
return
self._create_text_image_cache()
self.scroll_pos = 0.0 # Reset scroll position when text/font/colors change
def _create_text_image_cache(self):
"""Pre-render the text onto an image if using a TTF font. Includes a trailing gap."""
self.text_image_cache = None # Clear previous cache
self.cached_total_scroll_width = 0
if not self.text or not self.font or self.text_content_width == 0:
return
if isinstance(self.font, freetype.Face):
logger.info("TextDisplay: Pre-rendering cache is not used for BDF/freetype fonts. Will use direct drawing.")
# For BDF, the "scroll width" for reset purposes is handled by the direct drawing logic's conditions
return
# --- TTF Caching Path ---
try:
dummy_img = Image.new('RGB', (1, 1))
dummy_draw = ImageDraw.Draw(dummy_img)
bbox = dummy_draw.textbbox((0, 0), self.text, font=self.font)
actual_text_render_height = bbox[3] - bbox[1]
# Total width of the cache is the text width plus the configured gap
self.cached_total_scroll_width = self.text_content_width + self.scroll_gap_width
cache_height = self.display_manager.matrix.height
self.text_image_cache = Image.new('RGB', (self.cached_total_scroll_width, cache_height), self.bg_color)
draw_cache = ImageDraw.Draw(self.text_image_cache)
desired_top_edge = (cache_height - actual_text_render_height) // 2
y_draw_on_cache = desired_top_edge - bbox[1]
# Draw the text at the beginning of the cache
draw_cache.text((0, y_draw_on_cache), self.text, font=self.font, fill=self.text_color)
# The rest of the image (the gap) is already bg_color
logger.info(f"TextDisplay: Created text cache for '{self.text[:30]}...' (TTF). Text width: {self.text_content_width}, Gap: {self.scroll_gap_width}, Total cache width: {self.cached_total_scroll_width}x{cache_height}")
except Exception as e:
logger.error(f"TextDisplay: Failed to create text image cache: {e}", exc_info=True)
self.text_image_cache = None
self.cached_total_scroll_width = 0
def _load_font(self):
"""Load the specified font file (TTF or BDF)."""
font_path = self.font_path
# Resolve relative paths against project root based on this file location
if not os.path.isabs(font_path):
base_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
font_path = os.path.join(base_path, font_path)
logger.info(f"Attempting to load font: {font_path} at size {self.font_size}")
if not os.path.exists(font_path):
logger.error(f"Font file not found: {font_path}. Falling back to default.")
return self.display_manager.regular_font
try:
if font_path.lower().endswith('.ttf'):
font = ImageFont.truetype(font_path, self.font_size)
logger.info(f"Loaded TTF font: {self.font_path}")
return font
elif font_path.lower().endswith('.bdf'):
face = freetype.Face(font_path)
face.set_pixel_sizes(0, self.font_size)
logger.info(f"Loaded BDF font: {self.font_path} with freetype")
return face
else:
logger.warning(f"Unsupported font type: {font_path}. Falling back.")
return self.display_manager.regular_font
except Exception as e:
logger.error(f"Failed to load font {font_path}: {e}", exc_info=True)
return self.display_manager.regular_font
# _calculate_text_width is effectively replaced by logic in _regenerate_renderings
# but kept for direct calls if ever needed, or as a reference to DisplayManager's method
def _calculate_text_width(self):
"""DEPRECATED somewhat: Get text width. Relies on self.text_content_width set by _regenerate_renderings."""
try:
return self.display_manager.get_text_width(self.text, self.font)
except Exception as e:
logger.error(f"Error calculating text width: {e}")
return 0
def update(self):
"""Update scroll position if scrolling is enabled."""
# Scrolling is only meaningful if the actual text content is wider than the screen,
# or if a cache is used (which implies scrolling over text + gap).
# The condition self.text_content_width <= self.display_manager.matrix.width handles non-scrolling for static text.
if not self.scroll_enabled or (not self.text_image_cache and self.text_content_width <= self.display_manager.matrix.width):
self.scroll_pos = 0.0
return
current_time = time.time()
delta_time = current_time - self.last_update_time
self.last_update_time = current_time
scroll_delta = delta_time * self.scroll_speed
self.scroll_pos += scroll_delta
if self.text_image_cache:
# Using cached image: scroll_pos loops over the total cache width (text + gap)
if self.cached_total_scroll_width > 0 and self.scroll_pos >= self.cached_total_scroll_width:
self.scroll_pos %= self.cached_total_scroll_width
else:
# Not using cache (e.g., BDF direct drawing):
# Reset when text fully scrolled past left edge + matrix width (original behavior creating a conceptual gap)
# self.text_content_width is used here as it refers to the actual text being drawn directly.
if self.text_content_width > 0 and self.scroll_pos > self.text_content_width + self.display_manager.matrix.width:
self.scroll_pos = 0.0
def display(self):
"""Draw the text onto the display manager's canvas."""
dm = self.display_manager
matrix_width = dm.matrix.width
matrix_height = dm.matrix.height
dm.image = Image.new('RGB', (matrix_width, matrix_height), self.bg_color)
dm.draw = ImageDraw.Draw(dm.image)
if not self.text or self.text_content_width == 0:
dm.update_display()
return
# Use pre-rendered cache if available and scrolling is active
# Scrolling via cache is only relevant if the actual text content itself is wider than the matrix,
# or if we want to scroll a short text with a large gap.
# The self.cached_total_scroll_width > matrix_width implies the content (text+gap) is scrollable.
if self.text_image_cache and self.scroll_enabled and self.cached_total_scroll_width > matrix_width :
current_scroll_int = int(self.scroll_pos)
source_x1 = current_scroll_int
source_x2 = current_scroll_int + matrix_width
if source_x2 <= self.cached_total_scroll_width:
segment = self.text_image_cache.crop((source_x1, 0, source_x2, matrix_height))
dm.image.paste(segment, (0, 0))
else:
# Wrap-around: paste two parts from cache
width1 = self.cached_total_scroll_width - source_x1
if width1 > 0:
segment1 = self.text_image_cache.crop((source_x1, 0, self.cached_total_scroll_width, matrix_height))
dm.image.paste(segment1, (0, 0))
remaining_width_for_screen = matrix_width - width1
if remaining_width_for_screen > 0:
segment2 = self.text_image_cache.crop((0, 0, remaining_width_for_screen, matrix_height))
dm.image.paste(segment2, (width1 if width1 > 0 else 0, 0))
else:
# Fallback: Direct drawing (BDF, static TTF, or TTF text that fits screen and isn't forced to scroll by gap)
final_y_for_draw = 0
try:
if isinstance(self.font, freetype.Face):
text_render_height = self.font.size.height >> 6
final_y_for_draw = (matrix_height - text_render_height) // 2
else:
pil_bbox = dm.draw.textbbox((0, 0), self.text, font=self.font)
text_render_height = pil_bbox[3] - pil_bbox[1]
final_y_for_draw = (matrix_height - text_render_height) // 2 - pil_bbox[1]
except Exception as e:
logger.warning(f"TextDisplay: Could not calculate text height for direct drawing: {e}. Using y=0.", exc_info=True)
final_y_for_draw = 0
if self.scroll_enabled and self.text_content_width > matrix_width:
# Scrolling text (direct drawing path, e.g., for BDF)
x_draw_pos = matrix_width - int(self.scroll_pos) # scroll_pos for BDF already considers a type of gap for reset
dm.draw_text(
text=self.text, x=x_draw_pos, y=final_y_for_draw,
color=self.text_color, font=self.font
)
else:
# Static text (centered horizontally)
x_draw_pos = (matrix_width - self.text_content_width) // 2
dm.draw_text(
text=self.text, x=x_draw_pos, y=final_y_for_draw,
color=self.text_color, font=self.font
)
dm.update_display()
def set_text(self, new_text: str):
self.text = new_text
self._regenerate_renderings()
def set_font(self, font_path: str, font_size: int):
self.font_path = font_path
self.font_size = font_size
self.font = self._load_font()
self._regenerate_renderings()
def set_color(self, text_color: tuple, bg_color: tuple):
self.text_color = text_color
self.bg_color = bg_color
self._regenerate_renderings()
def set_scroll_enabled(self, enabled: bool):
self.scroll_enabled = enabled
self.scroll_pos = 0.0
# Cache regeneration is not strictly needed, display logic handles scroll_enabled.
def set_scroll_speed(self, speed: float):
self.scroll_speed = speed
def set_scroll_gap_width(self, gap_width: int):
self.scroll_gap_width = gap_width
self._regenerate_renderings() # Gap change requires cache rebuild