found random timezone error from old time import method

This commit is contained in:
Chuck
2025-07-20 19:34:02 -05:00
parent c958a1c094
commit 9dcedc7c0e
5 changed files with 72 additions and 69 deletions

View File

@@ -161,7 +161,7 @@
}
},
"nfl_scoreboard": {
"enabled": true,
"enabled": false,
"show_odds": false,
"test_mode": false,
"update_interval_seconds": 3600,
@@ -179,7 +179,7 @@
}
},
"ncaa_fb_scoreboard": {
"enabled": true,
"enabled": false,
"show_odds": false,
"test_mode": false,
"update_interval_seconds": 3600,

View File

@@ -69,6 +69,12 @@ class BaseNBAManager:
self.logger.info(f"Initialized NBA manager with display dimensions: {self.display_width}x{self.display_height}")
self.logger.info(f"Logo directory: {self.logo_dir}")
def _get_timezone(self):
try:
return pytz.timezone(self.config_manager.get_timezone())
except pytz.UnknownTimeZoneError:
return pytz.utc
def _should_log(self, message_type: str, cooldown: int = 300) -> bool:
"""Check if a message should be logged based on cooldown period."""
current_time = time.time()

View File

@@ -162,21 +162,20 @@ class BaseNCAAFBManager: # Renamed class
except Exception as e:
self.logger.error(f"Error fetching odds for game {game.get('id', 'N/A')}: {e}")
@classmethod
def _fetch_shared_data(cls, past_days: int, future_days: int, date_str: str = None) -> Optional[Dict]:
def _fetch_shared_data(self, past_days: int, future_days: int, date_str: str = None) -> Optional[Dict]:
"""Fetch and cache data for all managers to share."""
current_time = time.time()
if cls._shared_data and (current_time - cls._last_shared_update) < 300:
return cls._shared_data
if BaseNCAAFBManager._shared_data and (current_time - BaseNCAAFBManager._last_shared_update) < 300:
return BaseNCAAFBManager._shared_data
try:
cache_key = date_str if date_str else 'today_ncaafb' # Changed cache key prefix
cached_data = cls.cache_manager.get(cache_key, max_age=300)
cached_data = BaseNCAAFBManager.cache_manager.get(cache_key, max_age=300)
if cached_data:
cls.logger.info(f"[NCAAFB] Using cached data for {cache_key}")
cls._shared_data = cached_data
cls._last_shared_update = current_time
BaseNCAAFBManager.logger.info(f"[NCAAFB] Using cached data for {cache_key}")
BaseNCAAFBManager._shared_data = cached_data
BaseNCAAFBManager._last_shared_update = current_time
return cached_data
url = ESPN_NCAAFB_SCOREBOARD_URL # Use NCAA FB URL
@@ -187,21 +186,21 @@ class BaseNCAAFBManager: # Renamed class
response = requests.get(url, params=params)
response.raise_for_status()
data = response.json()
cls.logger.info(f"[NCAAFB] Successfully fetched data from ESPN API")
BaseNCAAFBManager.logger.info(f"[NCAAFB] Successfully fetched data from ESPN API")
cls.cache_manager.set(cache_key, data)
cls._shared_data = data
cls._last_shared_update = current_time
BaseNCAAFBManager.cache_manager.set(cache_key, data)
BaseNCAAFBManager._shared_data = data
BaseNCAAFBManager._last_shared_update = current_time
if not date_str:
today = datetime.now(cls._get_timezone()).date()
today = datetime.now(self._get_timezone()).date()
dates_to_fetch = []
# Generate dates from past_days ago to future_days ahead
for i in range(-past_days, future_days + 1):
fetch_dt = today + timedelta(days=i)
dates_to_fetch.append(fetch_dt.strftime('%Y%m%d'))
cls.logger.info(f"[NCAAFB] Fetching data for dates: {dates_to_fetch}")
BaseNCAAFBManager.logger.info(f"[NCAAFB] Fetching data for dates: {dates_to_fetch}")
all_events = []
# Fetch data for each date (excluding today if already fetched)
@@ -210,9 +209,9 @@ class BaseNCAAFBManager: # Renamed class
continue
date_cache_key = f"{fetch_date}_ncaafb" # Changed cache key suffix
cached_date_data = cls.cache_manager.get(date_cache_key, max_age=300)
cached_date_data = BaseNCAAFBManager.cache_manager.get(date_cache_key, max_age=300)
if cached_date_data:
cls.logger.info(f"[NCAAFB] Using cached data for date {fetch_date}")
BaseNCAAFBManager.logger.info(f"[NCAAFB] Using cached data for date {fetch_date}")
if "events" in cached_date_data:
all_events.extend(cached_date_data["events"])
continue
@@ -223,19 +222,19 @@ class BaseNCAAFBManager: # Renamed class
date_data = response.json()
if date_data and "events" in date_data:
all_events.extend(date_data["events"])
cls.logger.info(f"[NCAAFB] Fetched {len(date_data['events'])} events for date {fetch_date}")
cls.cache_manager.set(date_cache_key, date_data)
BaseNCAAFBManager.logger.info(f"[NCAAFB] Fetched {len(date_data['events'])} events for date {fetch_date}")
BaseNCAAFBManager.cache_manager.set(date_cache_key, date_data)
if all_events:
if "events" not in data: data["events"] = [] # Ensure 'events' key exists
data["events"].extend(all_events)
cls.logger.info(f"[NCAAFB] Combined {len(data['events'])} total events from all dates")
cls._shared_data = data
cls._last_shared_update = current_time
BaseNCAAFBManager.logger.info(f"[NCAAFB] Combined {len(data['events'])} total events from all dates")
BaseNCAAFBManager._shared_data = data
BaseNCAAFBManager._last_shared_update = current_time
return data
except requests.exceptions.RequestException as e:
cls.logger.error(f"[NCAAFB] Error fetching data from ESPN: {e}")
BaseNCAAFBManager.logger.error(f"[NCAAFB] Error fetching data from ESPN: {e}")
return None
def _fetch_data(self, date_str: str = None) -> Optional[Dict]:

View File

@@ -162,21 +162,20 @@ class BaseNFLManager: # Renamed class
except Exception as e:
self.logger.error(f"Error fetching odds for game {game.get('id', 'N/A')}: {e}")
@classmethod
def _fetch_shared_data(cls, past_days: int, future_days: int, date_str: str = None) -> Optional[Dict]:
def _fetch_shared_data(self, past_days: int, future_days: int, date_str: str = None) -> Optional[Dict]:
"""Fetch and cache data for all managers to share."""
current_time = time.time()
if cls._shared_data and (current_time - cls._last_shared_update) < 300:
return cls._shared_data
if BaseNFLManager._shared_data and (current_time - BaseNFLManager._last_shared_update) < 300:
return BaseNFLManager._shared_data
try:
cache_key = date_str if date_str else 'today_nfl' # Changed cache key prefix
cached_data = cls.cache_manager.get(cache_key, max_age=300)
cached_data = BaseNFLManager.cache_manager.get(cache_key, max_age=300)
if cached_data:
cls.logger.info(f"[NFL] Using cached data for {cache_key}")
cls._shared_data = cached_data
cls._last_shared_update = current_time
BaseNFLManager.logger.info(f"[NFL] Using cached data for {cache_key}")
BaseNFLManager._shared_data = cached_data
BaseNFLManager._last_shared_update = current_time
return cached_data
url = ESPN_NFL_SCOREBOARD_URL # Use NFL URL
@@ -187,21 +186,21 @@ class BaseNFLManager: # Renamed class
response = requests.get(url, params=params)
response.raise_for_status()
data = response.json()
cls.logger.info(f"[NFL] Successfully fetched data from ESPN API")
BaseNFLManager.logger.info(f"[NFL] Successfully fetched data from ESPN API")
cls.cache_manager.set(cache_key, data)
cls._shared_data = data
cls._last_shared_update = current_time
BaseNFLManager.cache_manager.set(cache_key, data)
BaseNFLManager._shared_data = data
BaseNFLManager._last_shared_update = current_time
if not date_str:
today = datetime.now(cls._get_timezone()).date()
today = datetime.now(self._get_timezone()).date()
dates_to_fetch = []
# Generate dates from past_days ago to future_days ahead
for i in range(-past_days, future_days + 1):
fetch_dt = today + timedelta(days=i)
dates_to_fetch.append(fetch_dt.strftime('%Y%m%d'))
cls.logger.info(f"[NFL] Fetching data for dates: {dates_to_fetch}")
BaseNFLManager.logger.info(f"[NFL] Fetching data for dates: {dates_to_fetch}")
all_events = []
# Fetch data for each date (excluding today if already fetched)
@@ -210,9 +209,9 @@ class BaseNFLManager: # Renamed class
continue
date_cache_key = f"{fetch_date}_nfl"
cached_date_data = cls.cache_manager.get(date_cache_key, max_age=300)
cached_date_data = BaseNFLManager.cache_manager.get(date_cache_key, max_age=300)
if cached_date_data:
cls.logger.info(f"[NFL] Using cached data for date {fetch_date}")
BaseNFLManager.logger.info(f"[NFL] Using cached data for date {fetch_date}")
if "events" in cached_date_data:
all_events.extend(cached_date_data["events"])
continue
@@ -223,19 +222,19 @@ class BaseNFLManager: # Renamed class
date_data = response.json()
if date_data and "events" in date_data:
all_events.extend(date_data["events"])
cls.logger.info(f"[NFL] Fetched {len(date_data['events'])} events for date {fetch_date}")
cls.cache_manager.set(date_cache_key, date_data)
BaseNFLManager.logger.info(f"[NFL] Fetched {len(date_data['events'])} events for date {fetch_date}")
BaseNFLManager.cache_manager.set(date_cache_key, date_data)
if all_events:
if "events" not in data: data["events"] = [] # Ensure 'events' key exists
data["events"].extend(all_events)
cls.logger.info(f"[NFL] Combined {len(data['events'])} total events from all dates")
cls._shared_data = data
cls._last_shared_update = current_time
BaseNFLManager.logger.info(f"[NFL] Combined {len(data['events'])} total events from all dates")
BaseNFLManager._shared_data = data
BaseNFLManager._last_shared_update = current_time
return data
except requests.exceptions.RequestException as e:
cls.logger.error(f"[NFL] Error fetching data from ESPN: {e}")
BaseNFLManager.logger.error(f"[NFL] Error fetching data from ESPN: {e}")
return None
def _fetch_data(self, date_str: str = None) -> Optional[Dict]:

View File

@@ -140,48 +140,47 @@ class BaseNHLManager:
except pytz.UnknownTimeZoneError:
return pytz.utc
@classmethod
def _fetch_shared_data(cls, date_str: str = None) -> Optional[Dict]:
def _fetch_shared_data(self, date_str: str = None) -> Optional[Dict]:
"""Fetch and cache data for all managers to share."""
current_time = time.time()
# If we have recent data, use it
if cls._shared_data and (current_time - cls._last_shared_update) < 300: # 5 minutes
return cls._shared_data
if BaseNHLManager._shared_data and (current_time - BaseNHLManager._last_shared_update) < 300: # 5 minutes
return BaseNHLManager._shared_data
try:
# Check cache first
cache_key = date_str if date_str else 'today'
cached_data = cls.cache_manager.get(cache_key, max_age=300) # 5 minutes cache
cached_data = BaseNHLManager.cache_manager.get(cache_key, max_age=300) # 5 minutes cache
if cached_data:
cls.logger.info(f"[NHL] Using cached data for {cache_key}")
cls._shared_data = cached_data
cls._last_shared_update = current_time
BaseNHLManager.logger.info(f"[NHL] Using cached data for {cache_key}")
BaseNHLManager._shared_data = cached_data
BaseNHLManager._last_shared_update = current_time
return cached_data
# If not in cache or stale, fetch from API
if not date_str:
# Get today's date in YYYY-MM-DD format
today = datetime.now(cls._get_timezone()).date()
today = datetime.now(self._get_timezone()).date()
date_str = today.strftime('%Y-%m-%d')
url = f"{NHL_API_BASE_URL}{date_str}"
cls.logger.info(f"Fetching data from URL: {url}")
BaseNHLManager.logger.info(f"Fetching data from URL: {url}")
response = requests.get(url)
response.raise_for_status()
data = response.json()
cls.logger.info(f"[NHL] Successfully fetched data from NHL API")
BaseNHLManager.logger.info(f"[NHL] Successfully fetched data from NHL API")
# Cache the response
cls.cache_manager.set(cache_key, data)
cls._shared_data = data
cls._last_shared_update = current_time
BaseNHLManager.cache_manager.set(cache_key, data)
BaseNHLManager._shared_data = data
BaseNHLManager._last_shared_update = current_time
# If no date specified, fetch data from multiple days
if not date_str:
# Get today's date in YYYYMMDD format
today = datetime.now(cls._get_timezone()).date()
today = datetime.now(self._get_timezone()).date()
dates_to_fetch = [
(today - timedelta(days=2)).strftime('%Y-%m-%d'),
(today - timedelta(days=1)).strftime('%Y-%m-%d'),
@@ -193,9 +192,9 @@ class BaseNHLManager:
for fetch_date in dates_to_fetch:
if fetch_date != today.strftime('%Y-%m-%d'): # Skip today as we already have it
# Check cache for this date
cached_date_data = cls.cache_manager.get(fetch_date, max_age=300)
cached_date_data = BaseNHLManager.cache_manager.get(fetch_date, max_age=300)
if cached_date_data:
cls.logger.info(f"[NHL] Using cached data for date {fetch_date}")
BaseNHLManager.logger.info(f"[NHL] Using cached data for date {fetch_date}")
if "events" in cached_date_data:
all_events.extend(cached_date_data["events"])
continue
@@ -206,20 +205,20 @@ class BaseNHLManager:
date_data = response.json()
if date_data and "events" in date_data:
all_events.extend(date_data["events"])
cls.logger.info(f"[NHL] Fetched {len(date_data['events'])} events for date {fetch_date}")
BaseNHLManager.logger.info(f"[NHL] Fetched {len(date_data['events'])} events for date {fetch_date}")
# Cache the response
cls.cache_manager.set(fetch_date, date_data)
BaseNHLManager.cache_manager.set(fetch_date, date_data)
# Combine events from all dates
if all_events:
data["events"].extend(all_events)
cls.logger.info(f"[NHL] Combined {len(data['events'])} total events from all dates")
cls._shared_data = data
cls._last_shared_update = current_time
BaseNHLManager.logger.info(f"[NHL] Combined {len(data['events'])} total events from all dates")
BaseNHLManager._shared_data = data
BaseNHLManager._last_shared_update = current_time
return data
except requests.exceptions.RequestException as e:
cls.logger.error(f"[NHL] Error fetching data from NHL: {e}")
BaseNHLManager.logger.error(f"[NHL] Error fetching data from NHL: {e}")
return None
def _fetch_data(self, date_str: str = None) -> Optional[Dict]: