updates to caching

This commit is contained in:
Chuck
2025-07-21 21:02:19 -05:00
parent 2516a06ad3
commit 3a450b717a
7 changed files with 36 additions and 16 deletions

View File

@@ -255,6 +255,21 @@ class CacheManager:
def _has_weather_changed(self, cached: Dict[str, Any], new: Dict[str, Any]) -> bool: def _has_weather_changed(self, cached: Dict[str, Any], new: Dict[str, Any]) -> bool:
"""Check if weather data has changed.""" """Check if weather data has changed."""
# Handle new cache structure where data is nested under 'data' key
if 'data' in cached:
cached = cached['data']
# Handle case where cached data might be the weather data directly
if 'current' in cached:
# This is the new structure with 'current' and 'forecast' keys
current_weather = cached.get('current', {})
if current_weather and 'main' in current_weather and 'weather' in current_weather:
cached_temp = round(current_weather['main']['temp'])
cached_condition = current_weather['weather'][0]['main']
return (cached_temp != new.get('temp') or
cached_condition != new.get('condition'))
# Handle old structure where temp and condition are directly accessible
return (cached.get('temp') != new.get('temp') or return (cached.get('temp') != new.get('temp') or
cached.get('condition') != new.get('condition')) cached.get('condition') != new.get('condition'))
@@ -346,7 +361,10 @@ class CacheManager:
def get(self, key: str, max_age: int = 300) -> Optional[Dict]: def get(self, key: str, max_age: int = 300) -> Optional[Dict]:
"""Get data from cache if it exists and is not stale.""" """Get data from cache if it exists and is not stale."""
return self.get_cached_data(key, max_age) cached_data = self.get_cached_data(key, max_age)
if cached_data and 'data' in cached_data:
return cached_data['data']
return cached_data
def set(self, key: str, data: Dict) -> None: def set(self, key: str, data: Dict) -> None:
"""Store data in cache with current timestamp.""" """Store data in cache with current timestamp."""

View File

@@ -277,7 +277,7 @@ class BaseNBAManager:
try: try:
# Check cache first # Check cache first
cache_key = date_str if date_str else 'today' cache_key = date_str if date_str else 'today'
cached_data = cls.cache_manager.get_cached_data(cache_key, max_age=300) # 5 minutes cache cached_data = cls.cache_manager.get(cache_key)
if cached_data: if cached_data:
cls.logger.info(f"[NBA] Using cached data for {cache_key}") cls.logger.info(f"[NBA] Using cached data for {cache_key}")
cls._shared_data = cached_data cls._shared_data = cached_data
@@ -296,7 +296,7 @@ class BaseNBAManager:
cls.logger.info(f"[NBA] Successfully fetched data from ESPN API") cls.logger.info(f"[NBA] Successfully fetched data from ESPN API")
# Cache the response # Cache the response
cls.cache_manager.save_cache(cache_key, data) cls.cache_manager.update_cache(cache_key, data)
cls._shared_data = data cls._shared_data = data
cls._last_shared_update = current_time cls._last_shared_update = current_time
@@ -315,7 +315,7 @@ class BaseNBAManager:
for fetch_date in dates_to_fetch: for fetch_date in dates_to_fetch:
if fetch_date != today.strftime('%Y%m%d'): # Skip today as we already have it if fetch_date != today.strftime('%Y%m%d'): # Skip today as we already have it
# Check cache for this date # Check cache for this date
cached_date_data = cls.cache_manager.get_cached_data(fetch_date, max_age=300) cached_date_data = cls.cache_manager.get(fetch_date)
if cached_date_data: if cached_date_data:
cls.logger.info(f"[NBA] Using cached data for date {fetch_date}") cls.logger.info(f"[NBA] Using cached data for date {fetch_date}")
if "events" in cached_date_data: if "events" in cached_date_data:
@@ -330,7 +330,7 @@ class BaseNBAManager:
all_events.extend(date_data["events"]) all_events.extend(date_data["events"])
cls.logger.info(f"[NBA] Fetched {len(date_data['events'])} events for date {fetch_date}") cls.logger.info(f"[NBA] Fetched {len(date_data['events'])} events for date {fetch_date}")
# Cache the response # Cache the response
cls.cache_manager.save_cache(fetch_date, date_data) cls.cache_manager.update_cache(fetch_date, date_data)
# Combine events from all dates # Combine events from all dates
if all_events: if all_events:

View File

@@ -311,7 +311,7 @@ class BaseNCAAMBasketballManager:
try: try:
# Check cache first # Check cache first
cache_key = f"ncaam_basketball_{date_str}" if date_str else 'ncaam_basketball_today' # Prefix cache key cache_key = f"ncaam_basketball_{date_str}" if date_str else 'ncaam_basketball_today' # Prefix cache key
cached_data = cls.cache_manager.get_cached_data(cache_key, max_age=300) # 5 minutes cache cached_data = cls.cache_manager.get(cache_key)
if cached_data: if cached_data:
cls.logger.info(f"[NCAAMBasketball] Using cached data for {cache_key}") cls.logger.info(f"[NCAAMBasketball] Using cached data for {cache_key}")
cls._shared_data = cached_data cls._shared_data = cached_data
@@ -330,7 +330,7 @@ class BaseNCAAMBasketballManager:
cls.logger.info(f"[NCAAMBasketball] Successfully fetched data from ESPN API") cls.logger.info(f"[NCAAMBasketball] Successfully fetched data from ESPN API")
# Cache the response # Cache the response
cls.cache_manager.save_cache(cache_key, data) cls.cache_manager.update_cache(cache_key, data)
cls._shared_data = data cls._shared_data = data
cls._last_shared_update = current_time cls._last_shared_update = current_time
@@ -350,7 +350,7 @@ class BaseNCAAMBasketballManager:
if fetch_date != today.strftime('%Y%m%d'): # Skip today as we already have it if fetch_date != today.strftime('%Y%m%d'): # Skip today as we already have it
date_cache_key = f"ncaam_basketball_{fetch_date}" # Prefix cache key date_cache_key = f"ncaam_basketball_{fetch_date}" # Prefix cache key
# Check cache for this date # Check cache for this date
cached_date_data = cls.cache_manager.get_cached_data(date_cache_key, max_age=300) cached_date_data = cls.cache_manager.get(date_cache_key)
if cached_date_data: if cached_date_data:
cls.logger.info(f"[NCAAMBasketball] Using cached data for date {fetch_date}") cls.logger.info(f"[NCAAMBasketball] Using cached data for date {fetch_date}")
if "events" in cached_date_data: if "events" in cached_date_data:
@@ -365,7 +365,7 @@ class BaseNCAAMBasketballManager:
all_events.extend(date_data["events"]) all_events.extend(date_data["events"])
cls.logger.info(f"[NCAAMBasketball] Fetched {len(date_data['events'])} events for date {fetch_date}") cls.logger.info(f"[NCAAMBasketball] Fetched {len(date_data['events'])} events for date {fetch_date}")
# Cache the response # Cache the response
cls.cache_manager.save_cache(date_cache_key, date_data) cls.cache_manager.update_cache(date_cache_key, date_data)
# Combine events from all dates # Combine events from all dates
if all_events: if all_events:

View File

@@ -17,7 +17,7 @@ class OddsManager:
cache_key = f"odds_espn_{sport}_{league}_{event_id}" cache_key = f"odds_espn_{sport}_{league}_{event_id}"
# Check cache first with 1-hour update interval # Check cache first with 1-hour update interval
cached_data = self.cache_manager.get_cached_data(cache_key, max_age=update_interval_seconds) cached_data = self.cache_manager.get(cache_key)
if cached_data: if cached_data:
self.logger.info(f"Using cached odds from ESPN for {cache_key}") self.logger.info(f"Using cached odds from ESPN for {cache_key}")
@@ -37,12 +37,12 @@ class OddsManager:
self.logger.info(f"Extracted odds data: {odds_data}") self.logger.info(f"Extracted odds data: {odds_data}")
if odds_data: if odds_data:
self.cache_manager.save_cache(cache_key, odds_data) self.cache_manager.update_cache(cache_key, odds_data)
self.logger.info(f"Saved odds data to cache for {cache_key}") self.logger.info(f"Saved odds data to cache for {cache_key}")
else: else:
self.logger.warning(f"No odds data extracted for {cache_key}") self.logger.warning(f"No odds data extracted for {cache_key}")
# Cache the fact that no odds are available to avoid repeated API calls # Cache the fact that no odds are available to avoid repeated API calls
self.cache_manager.save_cache(cache_key, {"no_odds": True}) self.cache_manager.update_cache(cache_key, {"no_odds": True})
return odds_data return odds_data
@@ -51,7 +51,7 @@ class OddsManager:
except json.JSONDecodeError: except json.JSONDecodeError:
self.logger.error(f"Error decoding JSON response from ESPN API for {cache_key}.") self.logger.error(f"Error decoding JSON response from ESPN API for {cache_key}.")
return self.cache_manager.load_cache(cache_key) return self.cache_manager.get(cache_key)
def _extract_espn_data(self, data: Dict[str, Any]) -> Optional[Dict[str, Any]]: def _extract_espn_data(self, data: Dict[str, Any]) -> Optional[Dict[str, Any]]:
self.logger.debug(f"Extracting ESPN odds data. Data keys: {list(data.keys())}") self.logger.debug(f"Extracting ESPN odds data. Data keys: {list(data.keys())}")

View File

@@ -153,7 +153,7 @@ class StockManager:
"""Fetch stock or crypto data from Yahoo Finance public API.""" """Fetch stock or crypto data from Yahoo Finance public API."""
# Try to get cached data first # Try to get cached data first
cache_key = 'crypto' if is_crypto else 'stocks' cache_key = 'crypto' if is_crypto else 'stocks'
cached_data = self.cache_manager.get_cached_data(cache_key) cached_data = self.cache_manager.get(cache_key)
if cached_data and symbol in cached_data: if cached_data and symbol in cached_data:
logger.info(f"Using cached data for {symbol}") logger.info(f"Using cached data for {symbol}")
return cached_data[symbol] return cached_data[symbol]

View File

@@ -162,7 +162,7 @@ class StockNewsManager:
return return
# Get cached data # Get cached data
cached_data = self.cache_manager.get_cached_data('stock_news') cached_data = self.cache_manager.get('stock_news')
# Update each symbol # Update each symbol
new_data = {} new_data = {}
@@ -186,6 +186,8 @@ class StockNewsManager:
success = True success = True
if success: if success:
# Cache the new data
self.cache_manager.update_cache('stock_news', new_data)
# Only update the displayed data when we have new data # Only update the displayed data when we have new data
self.news_data = new_data self.news_data = new_data
self.last_update = current_time self.last_update = current_time

View File

@@ -73,7 +73,7 @@ class WeatherManager:
return return
# Try to get cached data first # Try to get cached data first
cached_data = self.cache_manager.get_cached_data('weather') cached_data = self.cache_manager.get('weather')
if cached_data: if cached_data:
self.weather_data = cached_data.get('current') self.weather_data = cached_data.get('current')
self.forecast_data = cached_data.get('forecast') self.forecast_data = cached_data.get('forecast')