Fix duplicate caching (#62)

* Fix duplicate/redundant caching issue

- Add get_background_cached_data() and is_background_data_available() methods to CacheManager
- Update sports managers to use background service cache instead of individual caching
- Ensure consistent cache key generation between background service and managers
- Eliminate redundant API calls by making Recent/Upcoming managers cache consumers
- Fix cache miss issues where TTL < update interval

This addresses GitHub issue #57 by implementing a cleaner caching architecture
where the background service is the primary data source and managers are cache consumers.

* Update remaining sports managers to use background service cache

- Update NHL managers to use background service cache
- Update NCAA Football managers to use background service cache
- Update NCAA Hockey managers to use background service cache
- Update MLB managers to use background service cache for Recent/Upcoming

All sports managers now use the new caching architecture to eliminate
duplicate caching and redundant API calls.

* cache improvements

* updated cache manager
This commit is contained in:
Chuck
2025-09-24 16:13:41 -04:00
committed by GitHub
parent b1295047e2
commit 42e14f99b0
14 changed files with 581 additions and 83 deletions

View File

@@ -0,0 +1,134 @@
"""
Background Cache Mixin for Sports Managers
This mixin provides common caching functionality to eliminate code duplication
across all sports managers. It implements the background service cache pattern
where Recent/Upcoming managers consume data from the background service cache.
"""
import time
import logging
from typing import Dict, Optional, Any, Callable
from datetime import datetime
import pytz
class BackgroundCacheMixin:
"""
Mixin class that provides background service cache functionality to sports managers.
This mixin eliminates code duplication by providing a common implementation
for the background service cache pattern used across all sports managers.
"""
def _fetch_data_with_background_cache(self,
sport_key: str,
api_fetch_method: Callable,
live_manager_class: type = None) -> Optional[Dict]:
"""
Common logic for fetching data with background service cache support.
This method implements the background service cache pattern:
1. Live managers always fetch fresh data
2. Recent/Upcoming managers try background cache first
3. Fallback to direct API call if background data unavailable
Args:
sport_key: Sport identifier (e.g., 'nba', 'nfl', 'ncaa_fb')
api_fetch_method: Method to call for direct API fetch
live_manager_class: Class to check if this is a live manager
Returns:
Cached or fresh data from API
"""
start_time = time.time()
cache_hit = False
cache_source = None
try:
# For Live managers, always fetch fresh data
if live_manager_class and isinstance(self, live_manager_class):
self.logger.info(f"[{sport_key.upper()}] Live manager - fetching fresh data")
result = api_fetch_method(use_cache=False)
cache_source = "live_fresh"
else:
# For Recent/Upcoming managers, try background service cache first
cache_key = self.cache_manager.generate_sport_cache_key(sport_key)
# Check if background service has fresh data
if self.cache_manager.is_background_data_available(cache_key, sport_key):
cached_data = self.cache_manager.get_background_cached_data(cache_key, sport_key)
if cached_data:
self.logger.info(f"[{sport_key.upper()}] Using background service cache for {cache_key}")
result = cached_data
cache_hit = True
cache_source = "background_cache"
else:
self.logger.warning(f"[{sport_key.upper()}] Background cache check passed but no data returned for {cache_key}")
result = None
cache_source = "background_miss"
else:
self.logger.info(f"[{sport_key.upper()}] Background data not available for {cache_key}")
result = None
cache_source = "background_unavailable"
# Fallback to direct API call if background data not available
if result is None:
self.logger.info(f"[{sport_key.upper()}] Fetching directly from API for {cache_key}")
result = api_fetch_method(use_cache=True)
cache_source = "api_fallback"
# Record performance metrics
duration = time.time() - start_time
self.cache_manager.record_fetch_time(duration)
# Log performance metrics
self._log_fetch_performance(sport_key, duration, cache_hit, cache_source)
return result
except Exception as e:
duration = time.time() - start_time
self.logger.error(f"[{sport_key.upper()}] Error in background cache fetch after {duration:.2f}s: {e}")
self.cache_manager.record_fetch_time(duration)
raise
def _log_fetch_performance(self, sport_key: str, duration: float, cache_hit: bool, cache_source: str):
"""
Log detailed performance metrics for fetch operations.
Args:
sport_key: Sport identifier
duration: Fetch operation duration in seconds
cache_hit: Whether this was a cache hit
cache_source: Source of the data (background_cache, api_fallback, etc.)
"""
# Log basic performance info
self.logger.info(f"[{sport_key.upper()}] Fetch completed in {duration:.2f}s "
f"(cache_hit={cache_hit}, source={cache_source})")
# Log detailed metrics every 10 operations
if hasattr(self, '_fetch_count'):
self._fetch_count += 1
else:
self._fetch_count = 1
if self._fetch_count % 10 == 0:
metrics = self.cache_manager.get_cache_metrics()
self.logger.info(f"[{sport_key.upper()}] Cache Performance Summary - "
f"Hit Rate: {metrics['cache_hit_rate']:.2%}, "
f"Background Hit Rate: {metrics['background_hit_rate']:.2%}, "
f"API Calls Saved: {metrics['api_calls_saved']}")
def get_cache_performance_summary(self) -> Dict[str, Any]:
"""
Get cache performance summary for this manager.
Returns:
Dictionary containing cache performance metrics
"""
return self.cache_manager.get_cache_metrics()
def log_cache_performance(self):
"""Log current cache performance metrics."""
self.cache_manager.log_cache_metrics()

View File

@@ -128,11 +128,22 @@ class BackgroundDataService:
logger.info(f"BackgroundDataService initialized with {max_workers} workers")
def get_sport_cache_key(self, sport: str, date_str: str = None) -> str:
"""
Generate consistent cache keys for sports data.
This ensures Recent/Upcoming managers and background service
use the same cache keys.
"""
# Use the centralized cache key generation from CacheManager
from src.cache_manager import CacheManager
cache_manager = CacheManager()
return cache_manager.generate_sport_cache_key(sport, date_str)
def submit_fetch_request(self,
sport: str,
year: int,
url: str,
cache_key: str,
cache_key: str = None,
params: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, str]] = None,
timeout: Optional[int] = None,
@@ -160,6 +171,10 @@ class BackgroundDataService:
if self._shutdown:
raise RuntimeError("BackgroundDataService is shutting down")
# Generate cache key if not provided
if cache_key is None:
cache_key = self.get_sport_cache_key(sport)
request_id = f"{sport}_{year}_{int(time.time() * 1000)}"
# Check cache first

View File

@@ -44,6 +44,17 @@ class CacheManager:
except ImportError:
self.config_manager = None
self.logger.warning("ConfigManager not available, using default cache intervals")
# Initialize performance metrics
self._cache_metrics = {
'hits': 0,
'misses': 0,
'api_calls_saved': 0,
'background_hits': 0,
'background_misses': 0,
'total_fetch_time': 0.0,
'fetch_count': 0
}
def _get_writable_cache_dir(self) -> Optional[str]:
"""Tries to find or create a writable cache directory, preferring a system path when available."""
@@ -735,4 +746,123 @@ class CacheManager:
Now respects sport-specific live_update_interval configurations.
"""
data_type = self.get_data_type_from_key(key)
return self.get_cached_data_with_strategy(key, data_type)
return self.get_cached_data_with_strategy(key, data_type)
def get_background_cached_data(self, key: str, sport_key: str = None) -> Optional[Dict]:
"""
Get data from background service cache with appropriate strategy.
This method is specifically designed for Recent/Upcoming managers
to use data cached by the background service.
Args:
key: Cache key to retrieve
sport_key: Sport key for determining appropriate cache strategy
Returns:
Cached data if available and fresh, None otherwise
"""
# Determine the appropriate cache strategy
data_type = self.get_data_type_from_key(key)
strategy = self.get_cache_strategy(data_type, sport_key)
# For Recent/Upcoming managers, we want to use the background service cache
# which should have longer TTLs than the individual manager caches
max_age = strategy['max_age']
memory_ttl = strategy.get('memory_ttl', max_age)
# Get the cached data
cached_data = self.get_cached_data(key, max_age, memory_ttl)
if cached_data:
# Record cache hit for performance monitoring
self.record_cache_hit('background')
# Unwrap if stored in { 'data': ..., 'timestamp': ... } format
if isinstance(cached_data, dict) and 'data' in cached_data:
return cached_data['data']
return cached_data
# Record cache miss for performance monitoring
self.record_cache_miss('background')
return None
def is_background_data_available(self, key: str, sport_key: str = None) -> bool:
"""
Check if background service has fresh data available.
This helps Recent/Upcoming managers determine if they should
wait for background data or fetch immediately.
"""
data_type = self.get_data_type_from_key(key)
strategy = self.get_cache_strategy(data_type, sport_key)
# Check if we have data that's still fresh according to background service TTL
cached_data = self.get_cached_data(key, strategy['max_age'])
return cached_data is not None
def generate_sport_cache_key(self, sport: str, date_str: str = None) -> str:
"""
Centralized cache key generation for sports data.
This ensures consistent cache keys across background service and managers.
Args:
sport: Sport identifier (e.g., 'nba', 'nfl', 'ncaa_fb')
date_str: Date string in YYYYMMDD format. If None, uses current UTC date.
Returns:
Cache key in format: {sport}_{date}
"""
if date_str is None:
date_str = datetime.now(pytz.utc).strftime('%Y%m%d')
return f"{sport}_{date_str}"
def record_cache_hit(self, cache_type: str = 'regular'):
"""Record a cache hit for performance monitoring."""
with self._cache_lock:
if cache_type == 'background':
self._cache_metrics['background_hits'] += 1
else:
self._cache_metrics['hits'] += 1
def record_cache_miss(self, cache_type: str = 'regular'):
"""Record a cache miss for performance monitoring."""
with self._cache_lock:
if cache_type == 'background':
self._cache_metrics['background_misses'] += 1
else:
self._cache_metrics['misses'] += 1
self._cache_metrics['api_calls_saved'] += 1
def record_fetch_time(self, duration: float):
"""Record fetch operation duration for performance monitoring."""
with self._cache_lock:
self._cache_metrics['total_fetch_time'] += duration
self._cache_metrics['fetch_count'] += 1
def get_cache_metrics(self) -> Dict[str, Any]:
"""Get current cache performance metrics."""
with self._cache_lock:
total_hits = self._cache_metrics['hits'] + self._cache_metrics['background_hits']
total_misses = self._cache_metrics['misses'] + self._cache_metrics['background_misses']
total_requests = total_hits + total_misses
avg_fetch_time = (self._cache_metrics['total_fetch_time'] /
self._cache_metrics['fetch_count']) if self._cache_metrics['fetch_count'] > 0 else 0.0
return {
'total_requests': total_requests,
'cache_hit_rate': total_hits / total_requests if total_requests > 0 else 0.0,
'background_hit_rate': (self._cache_metrics['background_hits'] /
(self._cache_metrics['background_hits'] + self._cache_metrics['background_misses'])
if (self._cache_metrics['background_hits'] + self._cache_metrics['background_misses']) > 0 else 0.0),
'api_calls_saved': self._cache_metrics['api_calls_saved'],
'average_fetch_time': avg_fetch_time,
'total_fetch_time': self._cache_metrics['total_fetch_time'],
'fetch_count': self._cache_metrics['fetch_count']
}
def log_cache_metrics(self):
"""Log current cache performance metrics."""
metrics = self.get_cache_metrics()
self.logger.info(f"Cache Performance - Hit Rate: {metrics['cache_hit_rate']:.2%}, "
f"Background Hit Rate: {metrics['background_hit_rate']:.2%}, "
f"API Calls Saved: {metrics['api_calls_saved']}, "
f"Avg Fetch Time: {metrics['average_fetch_time']:.2f}s")

148
src/generic_cache_mixin.py Normal file
View File

@@ -0,0 +1,148 @@
"""
Generic Cache Mixin for Any Manager
This mixin provides caching functionality that can be used by any manager
that needs to cache data, not just sports managers. It's a more general
version of BackgroundCacheMixin that works for weather, stocks, news, etc.
"""
import time
import logging
from typing import Dict, Optional, Any, Callable
class GenericCacheMixin:
"""
Generic mixin class that provides caching functionality to any manager.
This mixin can be used by weather, stock, news, or any other manager
that needs to cache data with performance monitoring.
"""
def _fetch_data_with_cache(self,
cache_key: str,
api_fetch_method: Callable,
cache_ttl: int = 300,
force_refresh: bool = False) -> Optional[Dict]:
"""
Generic caching pattern for any manager.
Args:
cache_key: Unique cache key for this data
api_fetch_method: Method to call for fresh data
cache_ttl: Time-to-live in seconds (default: 5 minutes)
force_refresh: Skip cache and fetch fresh data
Returns:
Cached or fresh data from API
"""
start_time = time.time()
cache_hit = False
cache_source = None
try:
# Check cache first (unless forcing refresh)
if not force_refresh:
cached_data = self.cache_manager.get_cached_data(cache_key, cache_ttl)
if cached_data:
self.logger.info(f"Using cached data for {cache_key}")
cache_hit = True
cache_source = "cache"
self.cache_manager.record_cache_hit('regular')
# Record performance metrics
duration = time.time() - start_time
self.cache_manager.record_fetch_time(duration)
self._log_fetch_performance(cache_key, duration, cache_hit, cache_source)
return cached_data
# Fetch fresh data
self.logger.info(f"Fetching fresh data for {cache_key}")
result = api_fetch_method()
cache_source = "api_fresh"
# Store in cache if we got data
if result:
self.cache_manager.save_cache(cache_key, result)
self.cache_manager.record_cache_miss('regular')
else:
self.logger.warning(f"No data returned for {cache_key}")
# Record performance metrics
duration = time.time() - start_time
self.cache_manager.record_fetch_time(duration)
# Log performance
self._log_fetch_performance(cache_key, duration, cache_hit, cache_source)
return result
except Exception as e:
duration = time.time() - start_time
self.logger.error(f"Error fetching data for {cache_key} after {duration:.2f}s: {e}")
self.cache_manager.record_fetch_time(duration)
raise
def _log_fetch_performance(self, cache_key: str, duration: float, cache_hit: bool, cache_source: str):
"""
Log detailed performance metrics for fetch operations.
Args:
cache_key: Cache key that was accessed
duration: Fetch operation duration in seconds
cache_hit: Whether this was a cache hit
cache_source: Source of the data (cache, api_fresh, etc.)
"""
# Log basic performance info
self.logger.info(f"Fetch completed for {cache_key} in {duration:.2f}s "
f"(cache_hit={cache_hit}, source={cache_source})")
# Log detailed metrics every 10 operations
if hasattr(self, '_fetch_count'):
self._fetch_count += 1
else:
self._fetch_count = 1
if self._fetch_count % 10 == 0:
metrics = self.cache_manager.get_cache_metrics()
self.logger.info(f"Cache Performance Summary - "
f"Hit Rate: {metrics['cache_hit_rate']:.2%}, "
f"API Calls Saved: {metrics['api_calls_saved']}, "
f"Avg Fetch Time: {metrics['average_fetch_time']:.2f}s")
def get_cache_performance_summary(self) -> Dict[str, Any]:
"""
Get cache performance summary for this manager.
Returns:
Dictionary containing cache performance metrics
"""
return self.cache_manager.get_cache_metrics()
def log_cache_performance(self):
"""Log current cache performance metrics."""
self.cache_manager.log_cache_metrics()
def clear_cache_for_key(self, cache_key: str):
"""Clear cache for a specific key."""
self.cache_manager.clear_cache(cache_key)
self.logger.info(f"Cleared cache for {cache_key}")
def get_cache_info(self, cache_key: str) -> Dict[str, Any]:
"""
Get information about a cached item.
Args:
cache_key: Cache key to check
Returns:
Dictionary with cache information
"""
# This would need to be implemented in CacheManager
# For now, just return basic info
return {
'key': cache_key,
'exists': self.cache_manager.get_cached_data(cache_key, 0) is not None,
'ttl': 'unknown' # Would need to be implemented
}

View File

@@ -410,7 +410,10 @@ class BaseMLBManager:
return "TBD"
def _fetch_mlb_api_data(self, use_cache: bool = True) -> Dict[str, Any]:
"""Fetch MLB game data from the ESPN API."""
"""
Fetch MLB game data from the ESPN API.
Updated to use background service cache for Recent/Upcoming managers.
"""
# Define cache key based on dates
now = datetime.now(timezone.utc)
yesterday = now - timedelta(days=1)
@@ -420,6 +423,16 @@ class BaseMLBManager:
# If using cache, try to load from cache first
if use_cache:
# For Recent/Upcoming managers, try background service cache first
if hasattr(self, '__class__') and any(x in self.__class__.__name__ for x in ['Recent', 'Upcoming']):
if self.cache_manager.is_background_data_available(cache_key, 'mlb'):
cached_data = self.cache_manager.get_background_cached_data(cache_key, 'mlb')
if cached_data:
self.logger.info(f"[MLB] Using background service cache for {cache_key}")
return cached_data
self.logger.info(f"[MLB] Background data not available, fetching directly for {cache_key}")
# Fallback to regular cache strategy
cached_data = self.cache_manager.get_with_auto_strategy(cache_key)
if cached_data:
self.logger.info("Using cached MLB API data.")

View File

@@ -12,6 +12,7 @@ from src.cache_manager import CacheManager
from src.config_manager import ConfigManager
from src.odds_manager import OddsManager
from src.background_data_service import get_background_service
from src.background_cache_mixin import BackgroundCacheMixin
import pytz
# Import the API counter function from web interface
@@ -32,7 +33,7 @@ logging.basicConfig(
datefmt='%Y-%m-%d %H:%M:%S'
)
class BaseNBAManager:
class BaseNBAManager(BackgroundCacheMixin):
"""Base class for NBA managers with common functionality."""
# Class variables for warning tracking
_no_data_warning_logged = False
@@ -317,11 +318,16 @@ class BaseNBAManager:
return None
def _fetch_data(self, date_str: str = None) -> Optional[Dict]:
"""Fetch data using shared data mechanism."""
if isinstance(self, NBALiveManager):
return self._fetch_nba_api_data(use_cache=False)
else:
return self._fetch_nba_api_data(use_cache=True)
"""
Fetch data using background service cache first, fallback to direct API call.
This eliminates redundant caching and ensures Recent/Upcoming managers
use the same data source as the background service.
"""
return self._fetch_data_with_background_cache(
sport_key='nba',
api_fetch_method=self._fetch_nba_api_data,
live_manager_class=NBALiveManager
)
def _fetch_odds(self, game: Dict) -> None:
"""Fetch odds for a specific game if conditions are met."""

View File

@@ -377,11 +377,30 @@ class BaseNCAAFBManager: # Renamed class
return []
def _fetch_data(self, date_str: str = None) -> Optional[Dict]:
"""Fetch data using shared data mechanism or direct fetch for live."""
"""
Fetch data using background service cache first, fallback to direct API call.
This eliminates redundant caching and ensures Recent/Upcoming managers
use the same data source as the background service.
"""
# For Live managers, always fetch fresh data
if isinstance(self, NCAAFBLiveManager):
return self._fetch_ncaa_fb_api_data(use_cache=False)
else:
return self._fetch_ncaa_fb_api_data(use_cache=True)
# For Recent/Upcoming managers, try to use background service cache first
from datetime import datetime
import pytz
cache_key = f"ncaa_fb_{datetime.now(pytz.utc).strftime('%Y%m%d')}"
# Check if background service has fresh data
if self.cache_manager.is_background_data_available(cache_key, 'ncaa_fb'):
cached_data = self.cache_manager.get_background_cached_data(cache_key, 'ncaa_fb')
if cached_data:
self.logger.info(f"[NCAAFB] Using background service cache for {cache_key}")
return cached_data
# Fallback to direct API call if background data not available
self.logger.info(f"[NCAAFB] Background data not available, fetching directly for {cache_key}")
return self._fetch_ncaa_fb_api_data(use_cache=True)
def _load_fonts(self):

View File

@@ -347,11 +347,30 @@ class BaseNCAAMBasketballManager:
return None
def _fetch_data(self, date_str: str = None) -> Optional[Dict]:
"""Fetch data using shared data mechanism."""
"""
Fetch data using background service cache first, fallback to direct API call.
This eliminates redundant caching and ensures Recent/Upcoming managers
use the same data source as the background service.
"""
# For Live managers, always fetch fresh data
if isinstance(self, NCAAMBasketballLiveManager):
return self._fetch_ncaam_basketball_api_data(use_cache=False)
else:
return self._fetch_ncaam_basketball_api_data(use_cache=True)
# For Recent/Upcoming managers, try to use background service cache first
from datetime import datetime
import pytz
cache_key = f"ncaam_basketball_{datetime.now(pytz.utc).strftime('%Y%m%d')}"
# Check if background service has fresh data
if self.cache_manager.is_background_data_available(cache_key, 'ncaam_basketball'):
cached_data = self.cache_manager.get_background_cached_data(cache_key, 'ncaam_basketball')
if cached_data:
self.logger.info(f"[NCAAMBasketball] Using background service cache for {cache_key}")
return cached_data
# Fallback to direct API call if background data not available
self.logger.info(f"[NCAAMBasketball] Background data not available, fetching directly for {cache_key}")
return self._fetch_ncaam_basketball_api_data(use_cache=True)
def _extract_game_details(self, game_event: Dict) -> Optional[Dict]:
"""Extract relevant game details from ESPN API response."""

View File

@@ -244,11 +244,30 @@ class BaseNCAAMHockeyManager: # Renamed class
return {'events': all_events}
def _fetch_data(self, date_str: str = None) -> Optional[Dict]:
"""Fetch data using shared data mechanism or direct fetch for live."""
"""
Fetch data using background service cache first, fallback to direct API call.
This eliminates redundant caching and ensures Recent/Upcoming managers
use the same data source as the background service.
"""
# For Live managers, always fetch fresh data
if isinstance(self, NCAAMHockeyLiveManager):
return self._fetch_ncaa_fb_api_data(use_cache=False)
else:
return self._fetch_ncaa_fb_api_data(use_cache=True)
# For Recent/Upcoming managers, try to use background service cache first
from datetime import datetime
import pytz
cache_key = f"ncaam_hockey_{datetime.now(pytz.utc).strftime('%Y%m%d')}"
# Check if background service has fresh data
if self.cache_manager.is_background_data_available(cache_key, 'ncaam_hockey'):
cached_data = self.cache_manager.get_background_cached_data(cache_key, 'ncaam_hockey')
if cached_data:
self.logger.info(f"[NCAAMHockey] Using background service cache for {cache_key}")
return cached_data
# Fallback to direct API call if background data not available
self.logger.info(f"[NCAAMHockey] Background data not available, fetching directly for {cache_key}")
return self._fetch_ncaa_fb_api_data(use_cache=True)
def _load_fonts(self):
"""Load fonts used by the scoreboard."""

View File

@@ -12,6 +12,7 @@ from src.cache_manager import CacheManager
from src.config_manager import ConfigManager
from src.odds_manager import OddsManager
from src.background_data_service import get_background_service
from src.background_cache_mixin import BackgroundCacheMixin
import pytz
# Constants
@@ -27,7 +28,7 @@ logging.basicConfig(
class BaseNFLManager: # Renamed class
class BaseNFLManager(BackgroundCacheMixin): # Renamed class
"""Base class for NFL managers with common functionality."""
# Class variables for warning tracking
_no_data_warning_logged = False
@@ -330,13 +331,22 @@ class BaseNFLManager: # Renamed class
return None
def _fetch_data(self, date_str: str = None) -> Optional[Dict]:
"""Fetch data using shared data mechanism or direct fetch for live."""
"""
Fetch data using background service cache first, fallback to direct API call.
This eliminates redundant caching and ensures Recent/Upcoming managers
use the same data source as the background service.
"""
# For Live managers, always fetch fresh data
if isinstance(self, NFLLiveManager):
# Live games should fetch only current games, not entire season
return self._fetch_current_nfl_games()
else:
# Recent and Upcoming managers should use cached season data
return self._fetch_nfl_api_data(use_cache=True)
# For Recent/Upcoming managers, use the centralized background cache method
return self._fetch_data_with_background_cache(
sport_key='nfl',
api_fetch_method=self._fetch_nfl_api_data,
live_manager_class=NFLLiveManager
)
def _load_fonts(self):
"""Load fonts used by the scoreboard."""

View File

@@ -163,13 +163,30 @@ class BaseNHLManager:
return None
def _fetch_data(self, date_str: str = None) -> Optional[Dict]:
"""Fetch data using the new centralized method."""
# For live games, bypass the shared cache to ensure fresh data
"""
Fetch data using background service cache first, fallback to direct API call.
This eliminates redundant caching and ensures Recent/Upcoming managers
use the same data source as the background service.
"""
# For Live managers, always fetch fresh data
if isinstance(self, NHLLiveManager):
return self._fetch_nhl_api_data(use_cache=False)
else:
# For non-live games, use the shared cache
return self._fetch_nhl_api_data(use_cache=True)
# For Recent/Upcoming managers, try to use background service cache first
from datetime import datetime
import pytz
cache_key = f"nhl_{datetime.now(pytz.utc).strftime('%Y%m%d')}"
# Check if background service has fresh data
if self.cache_manager.is_background_data_available(cache_key, 'nhl'):
cached_data = self.cache_manager.get_background_cached_data(cache_key, 'nhl')
if cached_data:
self.logger.info(f"[NHL] Using background service cache for {cache_key}")
return cached_data
# Fallback to direct API call if background data not available
self.logger.info(f"[NHL] Background data not available, fetching directly for {cache_key}")
return self._fetch_nhl_api_data(use_cache=True)
def _load_fonts(self):
"""Load fonts used by the scoreboard."""

View File

@@ -332,11 +332,30 @@ class BaseSoccerManager:
return set(self.target_leagues_config)
def _fetch_data(self, date_str: str = None) -> Optional[Dict]:
"""Fetch data using shared data mechanism or live fetching per league."""
"""
Fetch data using background service cache first, fallback to direct API call.
This eliminates redundant caching and ensures Recent/Upcoming managers
use the same data source as the background service.
"""
# For Live managers, always fetch fresh data
if isinstance(self, SoccerLiveManager) and not self.test_mode:
return self._fetch_soccer_api_data(use_cache=False)
else:
return self._fetch_soccer_api_data(use_cache=True)
# For Recent/Upcoming managers, try to use background service cache first
from datetime import datetime
import pytz
cache_key = f"soccer_{datetime.now(pytz.utc).strftime('%Y%m%d')}"
# Check if background service has fresh data
if self.cache_manager.is_background_data_available(cache_key, 'soccer'):
cached_data = self.cache_manager.get_background_cached_data(cache_key, 'soccer')
if cached_data:
self.logger.info(f"[Soccer] Using background service cache for {cache_key}")
return cached_data
# Fallback to direct API call if background data not available
self.logger.info(f"[Soccer] Background data not available, fetching directly for {cache_key}")
return self._fetch_soccer_api_data(use_cache=True)
def _load_fonts(self):
"""Load fonts used by the scoreboard."""