feat: implement caching for ESPN API responses to reduce API calls

This commit is contained in:
ChuckBuilds
2025-04-19 18:25:01 -05:00
parent 09649f0a07
commit 471790c851
3 changed files with 72 additions and 110 deletions

View File

@@ -16,52 +16,80 @@ class DateTimeEncoder(json.JSONEncoder):
return super().default(obj)
class CacheManager:
def __init__(self, cache_dir: str = None):
"""Manages caching of API responses to reduce API calls."""
def __init__(self, cache_dir: str = "cache"):
self.cache_dir = cache_dir
self._ensure_cache_dir()
self._memory_cache = {} # In-memory cache for faster access
self.logger = logging.getLogger(__name__)
self._memory_cache = {}
self._memory_cache_timestamps = {}
self._cache_lock = threading.Lock()
# Try to determine the best cache directory location
if cache_dir:
self.cache_dir = cache_dir
else:
# Try user's home directory first
home_dir = os.path.expanduser("~")
if os.access(home_dir, os.W_OK):
self.cache_dir = os.path.join(home_dir, ".ledmatrix_cache")
else:
# Fall back to system temp directory
self.cache_dir = os.path.join(tempfile.gettempdir(), "ledmatrix_cache")
self._ensure_cache_dir()
def _ensure_cache_dir(self) -> None:
"""Ensure cache directory exists with proper permissions."""
try:
if not os.path.exists(self.cache_dir):
# Create directory with 755 permissions (rwxr-xr-x)
os.makedirs(self.cache_dir, mode=0o755, exist_ok=True)
self.logger.info(f"Created cache directory: {self.cache_dir}")
def _ensure_cache_dir(self):
"""Ensure the cache directory exists."""
if not os.path.exists(self.cache_dir):
os.makedirs(self.cache_dir)
# Verify we have write permissions
if not os.access(self.cache_dir, os.W_OK):
raise PermissionError(f"No write access to cache directory: {self.cache_dir}")
except Exception as e:
self.logger.error(f"Error setting up cache directory: {e}")
# Fall back to system temp directory
self.cache_dir = os.path.join(tempfile.gettempdir(), "ledmatrix_cache")
try:
os.makedirs(self.cache_dir, mode=0o755, exist_ok=True)
self.logger.info(f"Using temporary cache directory: {self.cache_dir}")
except Exception as e:
self.logger.error(f"Failed to create temporary cache directory: {e}")
raise
def _get_cache_path(self, key: str) -> str:
"""Get the path for a cache file."""
return os.path.join(self.cache_dir, f"{key}.json")
def get_cached_data(self, key: str, max_age: int = 300) -> Optional[Dict]:
"""
Get cached data if it exists and is not too old.
Args:
key: Cache key
max_age: Maximum age of cache in seconds
Returns:
Cached data or None if not found/too old
"""
# Check memory cache first
if key in self._memory_cache:
data, timestamp = self._memory_cache[key]
if time.time() - timestamp <= max_age:
return data
# Check file cache
cache_path = self._get_cache_path(key)
if not os.path.exists(cache_path):
return None
try:
# Check file age
if time.time() - os.path.getmtime(cache_path) > max_age:
return None
# Load and return cached data
with self._cache_lock:
with open(cache_path, 'r') as f:
data = json.load(f)
# Update memory cache
self._memory_cache[key] = (data, time.time())
return data
except Exception:
return None
def save_cache(self, key: str, data: Dict) -> None:
"""
Save data to cache.
Args:
key: Cache key
data: Data to cache
"""
try:
# Save to file
cache_path = self._get_cache_path(key)
with self._cache_lock:
with open(cache_path, 'w') as f:
json.dump(data, f)
# Update memory cache
self._memory_cache[key] = (data, time.time())
except Exception:
pass # Silently fail if cache save fails
def load_cache(self, key: str) -> Optional[Dict[str, Any]]:
"""Load data from cache with memory caching."""
@@ -98,72 +126,6 @@ class CacheManager:
self.logger.error(f"Error loading cache for {key}: {e}")
return None
def save_cache(self, key: str, data: Dict[str, Any]) -> None:
"""Save data to cache with memory caching."""
cache_path = self._get_cache_path(key)
current_time = time.time()
try:
with self._cache_lock:
# Update memory cache first
self._memory_cache[key] = data
self._memory_cache_timestamps[key] = current_time
# Create a temporary file first
temp_path = f"{cache_path}.tmp"
with open(temp_path, 'w') as f:
json.dump(data, f, cls=DateTimeEncoder)
# Atomic rename to avoid corruption
os.replace(temp_path, cache_path)
except Exception as e:
self.logger.error(f"Error saving cache for {key}: {e}")
# Clean up temp file if it exists
if os.path.exists(temp_path):
try:
os.remove(temp_path)
except:
pass
def get_cached_data(self, key: str, max_age: int = 60) -> Optional[Dict[str, Any]]:
"""Get cached data with memory cache priority and max age check."""
current_time = time.time()
# Check memory cache first
if key in self._memory_cache:
if current_time - self._memory_cache_timestamps.get(key, 0) < max_age: # Use provided max_age
return self._memory_cache[key]
else:
# Clear expired memory cache
del self._memory_cache[key]
del self._memory_cache_timestamps[key]
# Fall back to disk cache
cache_path = self._get_cache_path(key)
if not os.path.exists(cache_path):
return None
try:
with self._cache_lock:
with open(cache_path, 'r') as f:
try:
data = json.load(f)
# Check if data is stale
if current_time - data.get('timestamp', 0) > max_age:
return None
# Update memory cache
self._memory_cache[key] = data['data']
self._memory_cache_timestamps[key] = current_time
return data['data']
except json.JSONDecodeError as e:
self.logger.error(f"Error parsing cache file for {key}: {e}")
# If the file is corrupted, remove it
os.remove(cache_path)
return None
except Exception as e:
self.logger.error(f"Error loading cache for {key}: {e}")
return None
def clear_cache(self, key: Optional[str] = None) -> None:
"""Clear cache for a specific key or all keys."""
with self._cache_lock:

View File

@@ -251,7 +251,7 @@ class BaseNBAManager:
return None
def _fetch_data(self, date_str: str = None) -> Optional[Dict]:
"""Fetch data from ESPN API."""
"""Fetch data from ESPN API with caching."""
if self.test_mode:
return self._load_test_data()

View File

@@ -193,7 +193,7 @@ class BaseNHLManager:
return None
def _fetch_data(self, date_str: str = None) -> Optional[Dict]:
"""Fetch data from ESPN API."""
"""Fetch data from ESPN API with caching."""
if self.test_mode:
return self._load_test_data()
@@ -205,7 +205,7 @@ class BaseNHLManager:
try:
# Check cache first
cache_key = date_str if date_str else 'today'
cached_data = CacheManager.get(cache_key, max_age=self.update_interval)
cached_data = self.cache_manager.get_cached_data(cache_key, max_age=self.update_interval)
if cached_data:
self.logger.info(f"[NHL] Using cached data for {cache_key}")
return cached_data
@@ -217,7 +217,7 @@ class BaseNHLManager:
self.logger.info(f"[NHL] Successfully fetched data from ESPN API")
# Cache the response
CacheManager.set(cache_key, data)
self.cache_manager.save_cache(cache_key, data)
# If no date specified, fetch data from multiple days
if not date_str:
@@ -234,7 +234,7 @@ class BaseNHLManager:
for fetch_date in dates_to_fetch:
if fetch_date != today.strftime('%Y%m%d'): # Skip today as we already have it
# Check cache for this date
cached_date_data = CacheManager.get(fetch_date, max_age=self.update_interval)
cached_date_data = self.cache_manager.get_cached_data(fetch_date, max_age=self.update_interval)
if cached_date_data:
self.logger.info(f"[NHL] Using cached data for date {fetch_date}")
if "events" in cached_date_data:
@@ -249,7 +249,7 @@ class BaseNHLManager:
all_events.extend(date_data["events"])
self.logger.info(f"[NHL] Fetched {len(date_data['events'])} events for date {fetch_date}")
# Cache the response
CacheManager.set(fetch_date, date_data)
self.cache_manager.save_cache(fetch_date, date_data)
# Combine events from all dates
if all_events: