remove all sports live displays from checking cache

This commit is contained in:
Chuck
2025-07-23 20:31:25 -05:00
parent 4994d8ac21
commit 18b0a9703e
8 changed files with 160 additions and 569 deletions

View File

@@ -315,8 +315,15 @@ class BaseMiLBManager:
logger.error(f"Error formatting game time: {e}")
return "TBD"
def _fetch_milb_api_data(self) -> Dict[str, Any]:
def _fetch_milb_api_data(self, use_cache: bool = True) -> Dict[str, Any]:
"""Fetch MiLB game data from the MLB Stats API."""
cache_key = "milb_api_data"
if use_cache:
cached_data = self.cache_manager.get_with_auto_strategy(cache_key)
if cached_data:
self.logger.info("Using cached MiLB API data.")
return cached_data
try:
# Check if test mode is enabled
if self.milb_config.get('test_mode', False):
@@ -444,6 +451,8 @@ class BaseMiLBManager:
all_games[game_pk] = game_data
if use_cache:
self.cache_manager.set(cache_key, all_games)
return all_games
except Exception as e:
@@ -632,7 +641,7 @@ class MiLBLiveManager(BaseMiLBManager):
self.current_game["away_score"] = str(int(self.current_game["away_score"]) + 1)
else:
# Fetch live game data from MiLB API
games = self._fetch_milb_api_data()
games = self._fetch_milb_api_data(use_cache=False)
if games:
# Find all live games involving favorite teams
new_live_games = []
@@ -975,7 +984,7 @@ class MiLBRecentManager(BaseMiLBManager):
try:
# Fetch data from MiLB API
games = self._fetch_milb_api_data()
games = self._fetch_milb_api_data(use_cache=True)
if not games:
logger.warning("[MiLB] No games returned from API")
return
@@ -1129,7 +1138,7 @@ class MiLBUpcomingManager(BaseMiLBManager):
try:
# Fetch data from MiLB API
games = self._fetch_milb_api_data()
games = self._fetch_milb_api_data(use_cache=True)
if not games:
self.logger.warning("[MiLB] No games returned from API for upcoming games update.")
return

View File

@@ -265,106 +265,40 @@ class BaseNBAManager:
self.logger.error(f"Error loading logo for {team_abbrev}: {e}", exc_info=True)
return None
@classmethod
def _fetch_shared_data(cls, date_str: str = None) -> Optional[Dict]:
def _fetch_nba_api_data(self, use_cache: bool = True) -> Optional[Dict]:
"""Fetch and cache data for all managers to share."""
current_time = time.time()
# If we have recent data, use it
if cls._shared_data and (current_time - cls._last_shared_update) < 300: # 5 minutes
return cls._shared_data
try:
# Check cache first
cache_key = date_str if date_str else 'today'
cached_data = cls.cache_manager.get(cache_key)
now = datetime.now(pytz.utc)
date_str = now.strftime('%Y%m%d')
cache_key = f"nba_api_data_{date_str}"
if use_cache:
cached_data = self.cache_manager.get(cache_key)
if cached_data:
cls.logger.info(f"[NBA] Using cached data for {cache_key}")
cls._shared_data = cached_data
cls._last_shared_update = current_time
self.logger.info(f"[NBA] Using cached data for {date_str}")
return cached_data
# If not in cache or stale, fetch from API
try:
url = ESPN_NBA_SCOREBOARD_URL
params = {}
if date_str:
params['dates'] = date_str
params = {'dates': date_str}
response = requests.get(url, params=params)
response.raise_for_status()
data = response.json()
cls.logger.info(f"[NBA] Successfully fetched data from ESPN API")
# Cache the response
cls.cache_manager.update_cache(cache_key, data)
cls._shared_data = data
cls._last_shared_update = current_time
# If no date specified, fetch data from multiple days
if not date_str:
# Get today's date in YYYYMMDD format
today = datetime.now(timezone.utc).date()
dates_to_fetch = [
(today - timedelta(days=2)).strftime('%Y%m%d'),
(today - timedelta(days=1)).strftime('%Y%m%d'),
today.strftime('%Y%m%d')
]
if use_cache:
self.cache_manager.set(cache_key, data)
# Fetch data for each date
all_events = []
for fetch_date in dates_to_fetch:
if fetch_date != today.strftime('%Y%m%d'): # Skip today as we already have it
# Check cache for this date
cached_date_data = cls.cache_manager.get(fetch_date)
if cached_date_data:
cls.logger.info(f"[NBA] Using cached data for date {fetch_date}")
if "events" in cached_date_data:
all_events.extend(cached_date_data["events"])
continue
params['dates'] = fetch_date
response = requests.get(url, params=params)
response.raise_for_status()
date_data = response.json()
if date_data and "events" in date_data:
all_events.extend(date_data["events"])
cls.logger.info(f"[NBA] Fetched {len(date_data['events'])} events for date {fetch_date}")
# Cache the response
cls.cache_manager.update_cache(fetch_date, date_data)
# Combine events from all dates
if all_events:
data["events"].extend(all_events)
cls.logger.info(f"[NBA] Combined {len(data['events'])} total events from all dates")
cls._shared_data = data
cls._last_shared_update = current_time
self.logger.info(f"[NBA] Successfully fetched data from ESPN API for {date_str}")
return data
except requests.exceptions.RequestException as e:
cls.logger.error(f"[NBA] Error fetching data from ESPN: {e}")
self.logger.error(f"[NBA] Error fetching data from ESPN: {e}")
return None
def _fetch_data(self, date_str: str = None) -> Optional[Dict]:
"""Fetch data using shared data mechanism."""
# For live games, bypass the shared cache to ensure fresh data
if isinstance(self, NBALiveManager):
try:
url = ESPN_NBA_SCOREBOARD_URL
params = {}
if date_str:
params['dates'] = date_str
response = requests.get(url, params=params)
response.raise_for_status()
data = response.json()
self.logger.info(f"[NBA] Successfully fetched live game data from ESPN API")
return data
except requests.exceptions.RequestException as e:
self.logger.error(f"[NBA] Error fetching live game data from ESPN: {e}")
return None
return self._fetch_nba_api_data(use_cache=False)
else:
# For non-live games, use the shared cache
return self._fetch_shared_data(date_str)
return self._fetch_nba_api_data(use_cache=True)
def _fetch_odds(self, game: Dict) -> None:
"""Fetch odds for a specific game if conditions are met."""

View File

@@ -373,8 +373,15 @@ class BaseNCAABaseballManager:
logger.error(f"[NCAABaseball] Error formatting game time: {e}")
return "TBD"
def _fetch_ncaa_baseball_api_data(self) -> Dict[str, Any]:
def _fetch_ncaa_baseball_api_data(self, use_cache: bool = True) -> Dict[str, Any]:
"""Fetch NCAA Baseball game data from the ESPN API."""
cache_key = "ncaa_baseball_api_data"
if use_cache:
cached_data = self.cache_manager.get_with_auto_strategy(cache_key)
if cached_data:
self.logger.info("Using cached NCAA Baseball API data.")
return cached_data
try:
# Check if test mode is enabled
if self.ncaa_baseball_config.get('test_mode', False):
@@ -525,6 +532,8 @@ class BaseNCAABaseballManager:
for game in favorite_games:
self.logger.info(f"[NCAABaseball] Favorite team game: {game['away_team']} @ {game['home_team']} (Status: {game['status']}, State: {game['status_state']})")
if use_cache:
self.cache_manager.set(cache_key, all_games)
return all_games
except Exception as e:
@@ -593,7 +602,7 @@ class NCAABaseballLiveManager(BaseNCAABaseballManager):
if self.current_game["inning"] % 2 == 0: self.current_game["home_score"] = str(int(self.current_game["home_score"]) + 1)
else: self.current_game["away_score"] = str(int(self.current_game["away_score"]) + 1)
else:
games = self._fetch_ncaa_baseball_api_data()
games = self._fetch_ncaa_baseball_api_data(use_cache=False)
if games:
new_live_games = []
for game in games.values():
@@ -849,7 +858,7 @@ class NCAABaseballRecentManager(BaseNCAABaseballManager):
return
self.last_update = current_time
try:
games = self._fetch_ncaa_baseball_api_data()
games = self._fetch_ncaa_baseball_api_data(use_cache=True)
if not games:
logger.warning("[NCAABaseball] No games returned from API")
self.recent_games = []
@@ -955,7 +964,7 @@ class NCAABaseballUpcomingManager(BaseNCAABaseballManager):
return
self.last_update = current_time
try:
games = self._fetch_ncaa_baseball_api_data()
games = self._fetch_ncaa_baseball_api_data(use_cache=True)
if games:
new_upcoming_games = []
now = datetime.now(timezone.utc)

View File

@@ -158,141 +158,54 @@ class BaseNCAAFBManager: # Renamed class
except Exception as e:
self.logger.error(f"Error fetching odds for game {game.get('id', 'N/A')}: {e}")
def _fetch_shared_data(self) -> Optional[Dict]:
def _fetch_ncaa_fb_api_data(self, use_cache: bool = True) -> Optional[Dict]:
"""
Fetches the full season schedule for NCAAFB, caches it, and then filters
for relevant games based on the current configuration.
Caching Strategy:
- Season schedules: Cached for 24 hours (configurable) - schedules rarely change
- Live games: Cached for 60 seconds - scores update frequently
- Processed data: Cached for 5 minutes - avoids re-processing
- Recent/Upcoming: Use shared season data + local processing cache
"""
now = datetime.now(pytz.utc)
current_year = now.year
# NCAAFB season spans years, so we might need to check last year too if it's early in the current year
years_to_check = [current_year]
if now.month < 8: # If it's before August, check previous year's schedule too
if now.month < 8:
years_to_check.append(current_year - 1)
all_events = []
for year in years_to_check:
cache_key = f"ncaafb_schedule_{year}"
# Use much longer cache duration for season schedules (configurable, default 24 hours)
# Season schedules rarely change and can be cached for days
cached_data = BaseNCAAFBManager.cache_manager.get(cache_key, max_age=self.season_cache_duration)
if cached_data:
self.logger.info(f"[NCAAFB] Using cached schedule for {year}")
all_events.extend(cached_data)
else:
self.logger.info(f"[NCAAFB] Fetching full {year} season schedule from ESPN API...")
try:
# Fetching only regular season type for now. Can be expanded.
url = f"https://site.api.espn.com/apis/site/v2/sports/football/college-football/scoreboard?dates={year}&seasontype=2"
response = self.session.get(url, headers=self.headers, timeout=15)
response.raise_for_status()
data = response.json()
events = data.get('events', [])
BaseNCAAFBManager.cache_manager.update_cache(cache_key, events)
self.logger.info(f"[NCAAFB] Successfully fetched and cached {len(events)} events for the {year} season.")
all_events.extend(events)
except requests.exceptions.RequestException as e:
self.logger.error(f"[NCAAFB] API error fetching full schedule for {year}: {e}")
if use_cache:
cached_data = self.cache_manager.get(cache_key, max_age=self.season_cache_duration)
if cached_data:
self.logger.info(f"[NCAAFB] Using cached schedule for {year}")
all_events.extend(cached_data)
continue
self.logger.info(f"[NCAAFB] Fetching full {year} season schedule from ESPN API...")
try:
url = f"https://site.api.espn.com/apis/site/v2/sports/football/college-football/scoreboard?dates={year}&seasontype=2"
response = self.session.get(url, headers=self.headers, timeout=15)
response.raise_for_status()
data = response.json()
events = data.get('events', [])
if use_cache:
self.cache_manager.set(cache_key, events)
self.logger.info(f"[NCAAFB] Successfully fetched and cached {len(events)} events for {year} season.")
all_events.extend(events)
except requests.exceptions.RequestException as e:
self.logger.error(f"[NCAAFB] API error fetching full schedule for {year}: {e}")
continue
if not all_events:
self.logger.warning("[NCAAFB] No events found in the schedule data for checked years.")
self.logger.warning("[NCAAFB] No events found in schedule data.")
return None
# Filter the events for live, upcoming, and recent games
live_events = []
upcoming_events = []
past_events = []
for event in all_events:
status = event.get('status', {}).get('type', {}).get('name', 'unknown').lower()
is_live = status in ('status_in_progress', 'status_halftime')
is_upcoming = status in ('status_scheduled', 'status_pre_game')
is_final = status == 'status_final'
if is_live:
live_events.append(event)
elif is_upcoming:
upcoming_events.append(event)
elif is_final:
past_events.append(event)
# Sort games by date
upcoming_events.sort(key=lambda x: x['date'])
past_events.sort(key=lambda x: x['date'], reverse=True)
# Include all games in shared data - let individual managers filter by count
selected_upcoming = upcoming_events
selected_past = past_events
# Combine all relevant events into a single list
BaseNCAAFBManager.all_events = live_events + selected_upcoming + selected_past
self.logger.info(f"[NCAAFB] Processed schedule: {len(live_events)} live, {len(selected_upcoming)} upcoming, {len(selected_past)} recent games.")
# Return the data in the expected format
return {'events': BaseNCAAFBManager.all_events}
def _get_cached_processed_games(self, manager_type: str) -> Optional[List[Dict]]:
"""Get cached processed games for a specific manager type."""
current_time = time.time()
cache_key = f"processed_games_{manager_type}"
# Cache processed games for 5 minutes
if (current_time - BaseNCAAFBManager._processed_games_timestamp < 300 and
cache_key in BaseNCAAFBManager._processed_games_cache):
return BaseNCAAFBManager._processed_games_cache[cache_key]
return None
def _cache_processed_games(self, manager_type: str, games: List[Dict]) -> None:
"""Cache processed games for a specific manager type."""
cache_key = f"processed_games_{manager_type}"
BaseNCAAFBManager._processed_games_cache[cache_key] = games
BaseNCAAFBManager._processed_games_timestamp = time.time()
return {'events': all_events}
def _fetch_data(self, date_str: str = None) -> Optional[Dict]:
"""Fetch data using shared data mechanism or direct fetch for live."""
# Check if the instance is NCAAFBLiveManager
if isinstance(self, NCAAFBLiveManager): # Changed class name
# For live games, use shorter cache duration (60 seconds)
# Live scores can be fetched more frequently if needed
cache_key = f"ncaafb_live_{date_str or 'current'}"
cached_data = self.cache_manager.get(cache_key, max_age=60)
if cached_data:
self.logger.debug(f"[NCAAFB] Using cached live data")
return cached_data
try:
url = ESPN_NCAAFB_SCOREBOARD_URL # Use NCAA FB URL
params = {}
if date_str:
params['dates'] = date_str
response = requests.get(url, params=params)
response.raise_for_status()
data = response.json()
# Cache live data for 60 seconds
self.cache_manager.update_cache(cache_key, data)
self.logger.info(f"[NCAAFB] Successfully fetched live game data from ESPN API")
return data
except requests.exceptions.RequestException as e:
self.logger.error(f"[NCAAFB] Error fetching live game data from ESPN: {e}")
return None
if isinstance(self, NCAAFBLiveManager):
return self._fetch_ncaa_fb_api_data(use_cache=False)
else:
# For non-live games, use the shared cache
shared_data = self._fetch_shared_data()
if shared_data is None:
self.logger.warning("[NCAAFB] No shared data available")
return None
return shared_data
return self._fetch_ncaa_fb_api_data(use_cache=True)
def _load_fonts(self):
"""Load fonts used by the scoreboard."""

View File

@@ -311,107 +311,40 @@ class BaseNCAAMBasketballManager:
# Draw the text in the specified color
draw.text((x, y), text, font=font, fill=fill)
@classmethod
def _fetch_shared_data(cls, date_str: str = None) -> Optional[Dict]:
def _fetch_ncaam_basketball_api_data(self, use_cache: bool = True) -> Optional[Dict]:
"""Fetch and cache data for all managers to share."""
current_time = time.time()
# If we have recent data, use it
if cls._shared_data and (current_time - cls._last_shared_update) < 300: # 5 minutes
return cls._shared_data
try:
# Check cache first
cache_key = f"ncaam_basketball_{date_str}" if date_str else 'ncaam_basketball_today' # Prefix cache key
cached_data = cls.cache_manager.get(cache_key)
now = datetime.now(pytz.utc)
date_str = now.strftime('%Y%m%d')
cache_key = f"ncaam_basketball_{date_str}"
if use_cache:
cached_data = self.cache_manager.get(cache_key)
if cached_data:
cls.logger.info(f"[NCAAMBasketball] Using cached data for {cache_key}")
cls._shared_data = cached_data
cls._last_shared_update = current_time
self.logger.info(f"[NCAAMBasketball] Using cached data for {date_str}")
return cached_data
# If not in cache or stale, fetch from API
try:
url = ESPN_NCAAMB_SCOREBOARD_URL
params = {}
if date_str:
params['dates'] = date_str
params = {'dates': date_str}
response = requests.get(url, params=params)
response.raise_for_status()
data = response.json()
cls.logger.info(f"[NCAAMBasketball] Successfully fetched data from ESPN API")
# Cache the response
cls.cache_manager.update_cache(cache_key, data)
cls._shared_data = data
cls._last_shared_update = current_time
# If no date specified, fetch data from multiple days
if not date_str:
# Get today's date in YYYYMMDD format
today = datetime.now(pytz.utc).date()
dates_to_fetch = [
(today - timedelta(days=2)).strftime('%Y%m%d'),
(today - timedelta(days=1)).strftime('%Y%m%d'),
today.strftime('%Y%m%d')
]
if use_cache:
self.cache_manager.set(cache_key, data)
# Fetch data for each date
all_events = []
for fetch_date in dates_to_fetch:
if fetch_date != today.strftime('%Y%m%d'): # Skip today as we already have it
date_cache_key = f"ncaam_basketball_{fetch_date}" # Prefix cache key
# Check cache for this date
cached_date_data = cls.cache_manager.get(date_cache_key)
if cached_date_data:
cls.logger.info(f"[NCAAMBasketball] Using cached data for date {fetch_date}")
if "events" in cached_date_data:
all_events.extend(cached_date_data["events"])
continue
params['dates'] = fetch_date
response = requests.get(url, params=params)
response.raise_for_status()
date_data = response.json()
if date_data and "events" in date_data:
all_events.extend(date_data["events"])
cls.logger.info(f"[NCAAMBasketball] Fetched {len(date_data['events'])} events for date {fetch_date}")
# Cache the response
cls.cache_manager.update_cache(date_cache_key, date_data)
# Combine events from all dates
if all_events:
data["events"].extend(all_events)
cls.logger.info(f"[NCAAMBasketball] Combined {len(data['events'])} total events from all dates")
cls._shared_data = data
cls._last_shared_update = current_time
self.logger.info(f"[NCAAMBasketball] Successfully fetched data from ESPN API for {date_str}")
return data
except requests.exceptions.RequestException as e:
cls.logger.error(f"[NCAAMBasketball] Error fetching data from ESPN: {e}")
self.logger.error(f"[NCAAMBasketball] Error fetching data from ESPN: {e}")
return None
def _fetch_data(self, date_str: str = None) -> Optional[Dict]:
"""Fetch data using shared data mechanism."""
# For live games, bypass the shared cache to ensure fresh data
if isinstance(self, NCAAMBasketballLiveManager):
try:
url = ESPN_NCAAMB_SCOREBOARD_URL
params = {}
if date_str:
params['dates'] = date_str
response = requests.get(url, params=params)
response.raise_for_status()
data = response.json()
self.logger.info(f"[NCAAMBasketball] Successfully fetched live game data from ESPN API")
return data
except requests.exceptions.RequestException as e:
self.logger.error(f"[NCAAMBasketball] Error fetching live game data from ESPN: {e}")
return None
return self._fetch_ncaam_basketball_api_data(use_cache=False)
else:
# For non-live games, use the shared cache
return self._fetch_shared_data(date_str)
return self._fetch_ncaam_basketball_api_data(use_cache=True)
def _extract_game_details(self, game_event: Dict) -> Optional[Dict]:
"""Extract relevant game details from ESPN API response."""

View File

@@ -139,7 +139,7 @@ class BaseNFLManager: # Renamed class
except Exception as e:
self.logger.error(f"Error fetching odds for game {game.get('id', 'N/A')}: {e}")
def _fetch_shared_data(self) -> Optional[Dict]:
def _fetch_nfl_api_data(self, use_cache: bool = True) -> Optional[Dict]:
"""
Fetches the full season schedule for NFL, caches it, and then filters
for relevant games based on the current configuration.
@@ -147,89 +147,36 @@ class BaseNFLManager: # Renamed class
now = datetime.now(pytz.utc)
current_year = now.year
cache_key = f"nfl_schedule_{current_year}"
if use_cache:
cached_data = self.cache_manager.get(cache_key)
if cached_data:
self.logger.info(f"[NFL] Using cached schedule for {current_year}")
return {'events': cached_data}
# Try to get the full schedule from cache
cached_data = BaseNFLManager.cache_manager.get(cache_key)
if cached_data:
self.logger.info(f"[NFL] Using cached schedule for {current_year}")
events = cached_data
else:
self.logger.info(f"[NFL] Fetching full {current_year} season schedule from ESPN API...")
try:
url = f"https://site.api.espn.com/apis/site/v2/sports/football/nfl/scoreboard?dates={current_year}"
response = self.session.get(url, headers=self.headers, timeout=15)
response.raise_for_status()
data = response.json()
events = data.get('events', [])
BaseNFLManager.cache_manager.set(cache_key, events) # Cache for 24 hours
self.logger.info(f"[NFL] Successfully fetched and cached {len(events)} events for the {current_year} season.")
except requests.exceptions.RequestException as e:
self.logger.error(f"[NFL] API error fetching full schedule: {e}")
return None
if not events:
self.logger.warning("[NFL] No events found in the schedule data.")
self.logger.info(f"[NFL] Fetching full {current_year} season schedule from ESPN API (cache_enabled={use_cache})...")
try:
url = f"https://site.api.espn.com/apis/site/v2/sports/football/nfl/scoreboard?dates={current_year}"
response = self.session.get(url, headers=self.headers, timeout=15)
response.raise_for_status()
data = response.json()
events = data.get('events', [])
if use_cache:
self.cache_manager.set(cache_key, events)
self.logger.info(f"[NFL] Successfully fetched {len(events)} events for the {current_year} season.")
return {'events': events}
except requests.exceptions.RequestException as e:
self.logger.error(f"[NFL] API error fetching full schedule: {e}")
return None
# Filter the events for live, upcoming, and recent games
live_events = []
upcoming_events = []
past_events = []
for event in events:
status = event.get('status', {}).get('type', {}).get('name', 'unknown').lower()
is_live = status in ('status_in_progress', 'status_halftime')
is_upcoming = status in ('status_scheduled', 'status_pre_game')
is_final = status == 'status_final'
if is_live:
live_events.append(event)
elif is_upcoming:
upcoming_events.append(event)
elif is_final:
past_events.append(event)
# Sort games by date
upcoming_events.sort(key=lambda x: x['date'])
past_events.sort(key=lambda x: x['date'], reverse=True)
# Include all games in shared data - let individual managers filter by count
selected_upcoming = upcoming_events
selected_past = past_events
# Combine all relevant events into a single list
BaseNFLManager.all_events = live_events + selected_upcoming + selected_past
self.logger.info(f"[NFL] Processed schedule: {len(live_events)} live, {len(selected_upcoming)} upcoming, {len(selected_past)} recent games.")
# Return the data in the expected format
return {'events': BaseNFLManager.all_events}
def _fetch_data(self, date_str: str = None) -> Optional[Dict]:
"""Fetch data using shared data mechanism or direct fetch for live."""
# Check if the instance is NFLLiveManager
if isinstance(self, NFLLiveManager):
try:
url = ESPN_NFL_SCOREBOARD_URL # Use NFL URL
params = {}
if date_str:
params['dates'] = date_str
response = requests.get(url, params=params)
response.raise_for_status()
data = response.json()
self.logger.info(f"[NFL] Successfully fetched live game data from ESPN API")
return data
except requests.exceptions.RequestException as e:
self.logger.error(f"[NFL] Error fetching live game data from ESPN: {e}")
return None
return self._fetch_nfl_api_data(use_cache=False)
else:
# For non-live games, use the shared cache
shared_data = self._fetch_shared_data()
if shared_data is None:
self.logger.warning("[NFL] No shared data available")
return None
return shared_data
return self._fetch_nfl_api_data(use_cache=True)
def _load_fonts(self):
"""Load fonts used by the scoreboard."""

View File

@@ -153,104 +153,48 @@ class BaseNHLManager:
except pytz.UnknownTimeZoneError:
return pytz.utc
def _fetch_shared_data(self, date_str: str = None) -> Optional[Dict]:
"""Fetch and cache data for all managers to share."""
def _fetch_nhl_api_data(self, use_cache: bool = True) -> Optional[Dict]:
"""Fetch and cache data from the NHL API."""
current_time = time.time()
# If we have recent data, use it
if BaseNHLManager._shared_data and (current_time - BaseNHLManager._last_shared_update) < 300: # 5 minutes
return BaseNHLManager._shared_data
try:
# Check cache first
cache_key = date_str if date_str else 'today'
cached_data = BaseNHLManager.cache_manager.get(cache_key, max_age=300) # 5 minutes cache
# Use today's date for the request
date_str = datetime.now(self._get_timezone()).strftime('%Y-%m-%d')
cache_key = f"nhl_api_data_{date_str}"
# If using cache, try to load from cache first
if use_cache:
cached_data = self.cache_manager.get(cache_key, max_age=300)
if cached_data:
BaseNHLManager.logger.info(f"[NHL] Using cached data for {cache_key}")
BaseNHLManager._shared_data = cached_data
BaseNHLManager._last_shared_update = current_time
self.logger.info(f"[NHL] Using cached data for {date_str}")
return cached_data
# If not in cache or stale, fetch from API
if not date_str:
# Get today's date in YYYY-MM-DD format
today = datetime.now(self._get_timezone()).date()
date_str = today.strftime('%Y-%m-%d')
try:
# If not in cache or stale, or if cache is disabled, fetch from API
url = f"{NHL_API_BASE_URL}{date_str}"
BaseNHLManager.logger.info(f"Fetching data from URL: {url}")
self.logger.info(f"Fetching data from URL: {url}")
response = requests.get(url)
response.raise_for_status()
data = response.json()
BaseNHLManager.logger.info(f"[NHL] Successfully fetched data from NHL API")
self.logger.info(f"[NHL] Successfully fetched data from NHL API for {date_str}")
# Cache the response
BaseNHLManager.cache_manager.set(cache_key, data)
BaseNHLManager._shared_data = data
BaseNHLManager._last_shared_update = current_time
# If no date specified, fetch data from multiple days
if not date_str:
# Get today's date in YYYYMMDD format
today = datetime.now(self._get_timezone()).date()
dates_to_fetch = [
(today - timedelta(days=2)).strftime('%Y-%m-%d'),
(today - timedelta(days=1)).strftime('%Y-%m-%d'),
today.strftime('%Y-%m-%d')
]
# Fetch data for each date
all_events = []
for fetch_date in dates_to_fetch:
if fetch_date != today.strftime('%Y-%m-%d'): # Skip today as we already have it
# Check cache for this date
cached_date_data = BaseNHLManager.cache_manager.get(fetch_date, max_age=300)
if cached_date_data:
BaseNHLManager.logger.info(f"[NHL] Using cached data for date {fetch_date}")
if "events" in cached_date_data:
all_events.extend(cached_date_data["events"])
continue
url = f"{NHL_API_BASE_URL}{fetch_date}"
response = requests.get(url)
response.raise_for_status()
date_data = response.json()
if date_data and "events" in date_data:
all_events.extend(date_data["events"])
BaseNHLManager.logger.info(f"[NHL] Fetched {len(date_data['events'])} events for date {fetch_date}")
# Cache the response
BaseNHLManager.cache_manager.set(fetch_date, date_data)
# Combine events from all dates
if all_events:
data["events"].extend(all_events)
BaseNHLManager.logger.info(f"[NHL] Combined {len(data['events'])} total events from all dates")
BaseNHLManager._shared_data = data
BaseNHLManager._last_shared_update = current_time
# Save to cache if caching is enabled
if use_cache:
self.cache_manager.set(cache_key, data)
return data
except requests.exceptions.RequestException as e:
BaseNHLManager.logger.error(f"[NHL] Error fetching data from NHL: {e}")
self.logger.error(f"[NHL] Error fetching data from NHL API: {e}")
return None
def _fetch_data(self, date_str: str = None) -> Optional[Dict]:
"""Fetch data using shared data mechanism."""
"""Fetch data using the new centralized method."""
# For live games, bypass the shared cache to ensure fresh data
if isinstance(self, NHLLiveManager):
try:
url = f"{NHL_API_BASE_URL}{date_str}" if date_str else f"{NHL_API_BASE_URL}{datetime.now(self._get_timezone()).strftime('%Y-%m-%d')}"
response = requests.get(url)
response.raise_for_status()
data = response.json()
self.logger.info(f"[NHL] Successfully fetched live game data from NHL API")
return data
except requests.exceptions.RequestException as e:
self.logger.error(f"[NHL] Error fetching live game data from NHL: {e}")
return None
return self._fetch_nhl_api_data(use_cache=False)
else:
# For non-live games, use the shared cache
return self._fetch_shared_data(date_str)
return self._fetch_nhl_api_data(use_cache=True)
def _load_fonts(self):
"""Load fonts used by the scoreboard."""

View File

@@ -269,114 +269,54 @@ class BaseSoccerManager:
else:
cls.logger.info(f"[Soccer] Team-league map is up-to-date (last updated: {datetime.fromtimestamp(cls._map_last_updated).strftime('%Y-%m-%d %H:%M:%S')}).")
# --- End Team League Map Management ---
@classmethod
def _fetch_shared_data(cls, date_str: str = None) -> Optional[Dict]:
def _fetch_soccer_api_data(self, use_cache: bool = True) -> Optional[Dict]:
"""Fetch and cache data for all managers to share, iterating through target leagues."""
current_time = time.time()
all_data = {"events": []}
# Access shared config through the class attribute
favorite_teams = cls._soccer_config_shared.get("favorite_teams", [])
target_leagues_config = cls._soccer_config_shared.get("leagues", list(LEAGUE_SLUGS.keys()))
upcoming_fetch_days = cls._soccer_config_shared.get("upcoming_fetch_days", 1) # Fetch days
favorite_teams = self.soccer_config.get("favorite_teams", [])
target_leagues_config = self.soccer_config.get("leagues", list(LEAGUE_SLUGS.keys()))
upcoming_fetch_days = self.soccer_config.get("upcoming_fetch_days", 1)
# Determine which leagues to actually fetch
leagues_to_fetch = set()
if favorite_teams and cls._team_league_map:
for team in favorite_teams:
league = cls._team_league_map.get(team)
if league:
leagues_to_fetch.add(league)
else:
cls.logger.warning(f"[Soccer] Favorite team '{team}' not found in team-league map. Cannot filter by its league.")
# If no leagues were found for favorites, should we fetch configured leagues or nothing?
# Current approach: fetch configured leagues as fallback if map lookups fail for all favs.
if not leagues_to_fetch:
cls.logger.warning("[Soccer] No leagues found for any favorite teams in map. Falling back to configured leagues.")
leagues_to_fetch = set(target_leagues_config)
else:
# No favorite teams specified, or map not loaded, use configured leagues
leagues_to_fetch = set(target_leagues_config)
cls.logger.debug(f"[Soccer] Determined leagues to fetch for shared data: {leagues_to_fetch}")
today = datetime.now(pytz.utc).date()
# Generate dates from yesterday up to 'upcoming_fetch_days' in the future
dates_to_fetch = [
(today + timedelta(days=i)).strftime('%Y%m%d')
for i in range(-1, upcoming_fetch_days + 1) # -1 (yesterday) to upcoming_fetch_days
]
leagues_to_fetch = set(target_leagues_config)
# Add specific date if provided and not already included (e.g., for testing/debugging)
if date_str and date_str not in dates_to_fetch:
dates_to_fetch.append(date_str)
cls.logger.debug(f"[Soccer] Fetching shared data for dates: {dates_to_fetch}")
today = datetime.now(pytz.utc).date()
dates_to_fetch = [(today + timedelta(days=i)).strftime('%Y%m%d') for i in range(-1, upcoming_fetch_days + 1)]
# Fetch data only for the determined leagues
for league_slug in leagues_to_fetch:
for fetch_date in dates_to_fetch:
cache_key = f"soccer_{league_slug}_{fetch_date}"
# Check cache first
cached_data = cls.cache_manager.get(cache_key, max_age=300)
if cached_data:
cls.logger.debug(f"[Soccer] Using cached data for {league_slug} on {fetch_date}")
if "events" in cached_data:
all_data["events"].extend(cached_data["events"])
continue
if use_cache:
cached_data = self.cache_manager.get(cache_key, max_age=300)
if cached_data:
self.logger.debug(f"[Soccer] Using cached data for {league_slug} on {fetch_date}")
if "events" in cached_data:
all_data["events"].extend(cached_data["events"])
continue
try:
url = ESPN_SOCCER_LEAGUE_SCOREBOARD_URL_FORMAT.format(league_slug)
params = {'dates': fetch_date, 'limit': 100} # Limit per league/date call
params = {'dates': fetch_date, 'limit': 100}
response = requests.get(url, params=params)
response.raise_for_status()
data = response.json()
cls.logger.info(f"[Soccer] Fetched data from ESPN API for {league_slug} on {fetch_date}")
cls.cache_manager.set(cache_key, data)
self.logger.info(f"[Soccer] Fetched data from ESPN API for {league_slug} on {fetch_date}")
if use_cache:
self.cache_manager.set(cache_key, data)
if "events" in data:
all_data["events"].extend(data["events"])
except requests.exceptions.RequestException as e:
# Log specific error but continue trying other leagues/dates
if response is not None and response.status_code == 404:
cls.logger.debug(f"[Soccer] No data found (404) for {league_slug} on {fetch_date}. URL: {url}")
self.logger.debug(f"[Soccer] No data found (404) for {league_slug} on {fetch_date}. URL: {url}")
if use_cache:
self.cache_manager.set(cache_key, {"events": []})
else:
cls.logger.error(f"[Soccer] Error fetching data from ESPN for {league_slug} on {fetch_date}: {e}")
# Cache an empty result for 404s to avoid retrying immediately
if response is not None and response.status_code == 404:
cls.cache_manager.set(cache_key, {"events": []})
self.logger.error(f"[Soccer] Error fetching data for {league_slug} on {fetch_date}: {e}")
# Filter events based on favorite teams, if specified
if favorite_teams:
leagues_with_favorites = set()
for event in all_data.get("events", []):
league_slug = event.get("league", {}).get("slug")
competitors = event.get("competitions", [{}])[0].get("competitors", [])
for competitor in competitors:
team_abbr = competitor.get("team", {}).get("abbreviation")
if team_abbr in favorite_teams and league_slug:
leagues_with_favorites.add(league_slug)
break # No need to check other competitor in this event
if leagues_with_favorites:
cls.logger.debug(f"[Soccer] Filtering shared data for leagues with favorite teams: {leagues_with_favorites}")
filtered_events = [
event for event in all_data.get("events", [])
if event.get("league", {}).get("slug") in leagues_with_favorites
]
all_data["events"] = filtered_events
else:
cls.logger.debug("[Soccer] No favorite teams found in any fetched events. Shared data will be empty.")
all_data["events"] = [] # No relevant leagues found
cls._shared_data = all_data # Store combined (and potentially filtered) data
cls._last_shared_update = current_time # Update timestamp
return cls._shared_data
return all_data
def _get_live_leagues_to_fetch(self) -> set:
"""Determine which leagues to fetch for live data based on favorites and map."""
@@ -400,47 +340,9 @@ class BaseSoccerManager:
def _fetch_data(self, date_str: str = None) -> Optional[Dict]:
"""Fetch data using shared data mechanism or live fetching per league."""
if isinstance(self, SoccerLiveManager) and not self.test_mode:
# Live manager bypasses shared cache; fetches today's data per league
live_data = {"events": []}
today_date_str = datetime.now(pytz.utc).strftime('%Y%m%d')
# Determine leagues to fetch based on favorites and map
leagues_to_fetch = self._get_live_leagues_to_fetch()
self.logger.debug(f"[Soccer Live] Determined leagues to fetch: {leagues_to_fetch}")
for league_slug in leagues_to_fetch:
try:
# Check cache first for live data (shorter expiry?)
cache_key = f"soccer_live_{league_slug}_{today_date_str}"
cached_league_data = self.cache_manager.get(cache_key, max_age=15) # Short cache for live
if cached_league_data:
self.logger.debug(f"[Soccer] Using cached live data for {league_slug}")
if "events" in cached_league_data: live_data["events"].extend(cached_league_data["events"])
continue
url = ESPN_SOCCER_LEAGUE_SCOREBOARD_URL_FORMAT.format(league_slug)
params = {'dates': today_date_str, 'limit': 100}
response = requests.get(url, params=params)
response.raise_for_status()
league_data = response.json()
self.logger.info(f"[Soccer] Fetched live game data from ESPN API for {league_slug} on {today_date_str}")
if "events" in league_data:
live_data["events"].extend(league_data["events"])
self.cache_manager.set(cache_key, league_data) # Cache the result
except requests.exceptions.RequestException as e:
if response is not None and response.status_code == 404:
self.logger.debug(f"[Soccer] No live data found (404) for {league_slug} today. URL: {url}")
else:
self.logger.error(f"[Soccer] Error fetching live game data for {league_slug}: {e}")
# Cache empty result on 404? Maybe not for live?
return live_data
return self._fetch_soccer_api_data(use_cache=False)
else:
# Non-live or test mode: use the shared data fetch (which now iterates leagues)
# The filtering by target_leagues is inherently done within _fetch_shared_data now.
return self._fetch_shared_data(date_str)
return self._fetch_soccer_api_data(use_cache=True)
def _load_fonts(self):
"""Load fonts used by the scoreboard."""