253 lines
8.7 KiB
Python
253 lines
8.7 KiB
Python
import requests, json, re, os, shutil, glob
|
|
from datetime import datetime
|
|
|
|
# --- FEED URLS ---
|
|
SCHEDULE_URL = (
|
|
"https://lscluster.hockeytech.com/feed/index.php?"
|
|
"feed=statviewfeed&view=schedule&team=54&season=110"
|
|
"&month=-1&location=homeaway&key=1defb601c9b37c24"
|
|
"&client_code=eojhl&site_id=2&league_id=2&conference_id=-1"
|
|
"&division_id=-1&lang=en&callback=angular.callbacks._2"
|
|
)
|
|
|
|
SCHEDULE_FILE = "eojhl_scoreboard.json"
|
|
MAX_BACKUPS = 2
|
|
|
|
# --- TEAM ABBREVIATION MAP ---
|
|
TEAM_ABBR_MAP = {
|
|
"Ottawa": "OJC",
|
|
"Carleton Place": "CPC",
|
|
"Ottawa West": "OTW",
|
|
"Richmond": "RCH",
|
|
"Casselman": "CAS",
|
|
"Smiths Falls": "SFB",
|
|
"Embrun": "EMB",
|
|
"Perth": "PER",
|
|
"Glengarry": "GB",
|
|
"Arnprior": "ARP",
|
|
"Athens": "ATH",
|
|
"Renfrew": "REN",
|
|
"Winchester": "WIN"
|
|
}
|
|
|
|
# --- HELPERS ---
|
|
|
|
def backup_file(filename):
|
|
"""Backup existing file with timestamp and prune old backups."""
|
|
if os.path.exists(filename):
|
|
ts = datetime.now().strftime("%Y%m%d-%H%M%S")
|
|
backup = f"{filename}.{ts}.bak"
|
|
shutil.move(filename, backup)
|
|
backups = sorted(glob.glob(f"{filename}.*.bak"), reverse=True)
|
|
for old in backups[MAX_BACKUPS:]:
|
|
os.remove(old)
|
|
print(f"Backed up {filename} -> {backup}")
|
|
|
|
def fetch_jsonp(url):
|
|
r = requests.get(url)
|
|
|
|
# Robust stripping of the angular.callbacks wrapper
|
|
match = re.search(r"angular\.callbacks\._\d+\s*\((.*)\);?\s*$", r.text, re.DOTALL)
|
|
|
|
if match:
|
|
data_string = match.group(1).strip()
|
|
try:
|
|
data = json.loads(data_string)
|
|
if isinstance(data, list):
|
|
# print("Successfully stripped JSONP and decoded data.")
|
|
return data
|
|
# else:
|
|
# print("Error: Decoded JSON data is not a list (expected format).")
|
|
# return None
|
|
except json.JSONDecodeError:
|
|
# print(f"Error decoding JSON from raw data string: {e}")
|
|
return None
|
|
|
|
# print("Error: Could not find or strip the JSONP wrapper.")
|
|
return None
|
|
|
|
from datetime import datetime
|
|
from zoneinfo import ZoneInfo
|
|
|
|
def parse_game_date(raw_date, raw_status):
|
|
"""
|
|
Parses raw date ("Oct 14") and raw status ("8:00 PM" or "Final")
|
|
into ISO 8601 UTC string.
|
|
"""
|
|
current_year = "2025"
|
|
cleaned_raw_date = raw_date.replace('.', '')
|
|
|
|
# Try to extract time for scheduled games
|
|
time_match = re.search(r"(\d{1,2}:\d{2}\s*[APMapm]{2})", raw_status)
|
|
try:
|
|
if time_match:
|
|
time_str = time_match.group(1).upper().replace(" ", "")
|
|
# Insert a space before AM/PM for parsing
|
|
if time_str.endswith("AM") or time_str.endswith("PM"):
|
|
time_str = time_str[:-2] + " " + time_str[-2:]
|
|
dt_local = datetime.strptime(
|
|
f"{cleaned_raw_date} {current_year} {time_str}",
|
|
"%b %d %Y %I:%M %p"
|
|
)
|
|
else:
|
|
# Completed game, assume default 7:30 PM local
|
|
dt_local = datetime.strptime(
|
|
f"{cleaned_raw_date} {current_year} 7:30 PM",
|
|
"%b %d %Y %I:%M %p"
|
|
)
|
|
|
|
# Attach Toronto timezone
|
|
dt_local = dt_local.replace(tzinfo=ZoneInfo("America/Toronto"))
|
|
# Convert to UTC
|
|
dt_utc = dt_local.astimezone(ZoneInfo("UTC"))
|
|
return dt_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
|
|
except Exception as e:
|
|
print(f"Date parse error: {e} for input {raw_date} {raw_status}")
|
|
return None
|
|
|
|
|
|
|
|
def get_status_details(game_status):
|
|
"""Parses the game status string (e.g., "Final", "7:30 pm EST") into the required status structure."""
|
|
|
|
status = {
|
|
"name": "STATUS_SCHEDULED",
|
|
"state": "pre",
|
|
"shortDetail": game_status,
|
|
"period": 0,
|
|
"displayClock": "00:00"
|
|
}
|
|
|
|
if "Final" in game_status:
|
|
status["state"] = "post"
|
|
status["name"] = "STATUS_FINAL"
|
|
status["period"] = 3
|
|
status["shortDetail"] = "Final"
|
|
|
|
if "OT" in game_status:
|
|
status["shortDetail"] = "Final OT"
|
|
status["name"] = "STATUS_FINAL_OVERTIME"
|
|
status["period"] = 4
|
|
elif "SO" in game_status:
|
|
status["shortDetail"] = "Final SO"
|
|
status["name"] = "STATUS_FINAL_SHOOTOUT"
|
|
status["period"] = 4
|
|
|
|
if re.search(r"(\d(st|nd|rd|th) Period|\d{2}:\d{2})", game_status):
|
|
status["state"] = "in"
|
|
status["name"] = "STATUS_IN_PROGRESS"
|
|
status["shortDetail"] = game_status
|
|
|
|
return status
|
|
|
|
def transform_schedule(raw_data):
|
|
events = []
|
|
|
|
game_list = None
|
|
try:
|
|
# Path: raw_data[0] -> "sections" (list) -> sections[0] (dict) -> "data" (list of games)
|
|
game_list = raw_data[0].get("sections", [{}])[0].get("data")
|
|
except (TypeError, IndexError, AttributeError):
|
|
print("Error: Could not safely extract 'data' list from raw data structure.")
|
|
return {"events": []}
|
|
|
|
if not game_list:
|
|
print("Error: 'data' list is empty or None.")
|
|
return {"events": []}
|
|
|
|
for game_data in game_list:
|
|
game_row = game_data.get("row", {})
|
|
game_prop = game_data.get("prop", {})
|
|
|
|
# --- Extract Data ---
|
|
home_city = game_row.get("home_team_city")
|
|
away_city = game_row.get("visiting_team_city")
|
|
home_score = game_row.get("home_goal_count", '0')
|
|
away_score = game_row.get("visiting_goal_count", '0')
|
|
game_id = game_row.get("game_id")
|
|
game_status = game_row.get("game_status", "TBA")
|
|
raw_date = game_row.get("date")
|
|
|
|
if not all([game_id, home_city, away_city, raw_date]):
|
|
continue
|
|
|
|
# --- Derive Data ---
|
|
home_abbr = TEAM_ABBR_MAP.get(home_city)
|
|
away_abbr = TEAM_ABBR_MAP.get(away_city)
|
|
|
|
home_team_id = game_prop.get("home_team_city", {}).get("teamLink")
|
|
away_team_id = game_prop.get("visiting_team_city", {}).get("teamLink")
|
|
|
|
status_details = get_status_details(game_status)
|
|
date_time_str = parse_game_date(raw_date, game_status)
|
|
|
|
home_score = home_score if home_score and home_score != '-' else '0'
|
|
away_score = away_score if away_score and away_score != '-' else '0'
|
|
|
|
# --- Build Event Object ---
|
|
event = {
|
|
"id": game_id,
|
|
"date": date_time_str,
|
|
"competitions": [{
|
|
"status": {
|
|
"type": {
|
|
"name": status_details["name"],
|
|
"state": status_details["state"],
|
|
"shortDetail": status_details["shortDetail"]
|
|
},
|
|
"period": status_details["period"],
|
|
"displayClock": status_details["displayClock"]
|
|
},
|
|
"competitors": [
|
|
{
|
|
"id": home_team_id,
|
|
"homeAway": "home",
|
|
"team": {
|
|
"id": home_team_id,
|
|
"abbreviation": home_abbr,
|
|
"name": home_city,
|
|
"logo": f"assets/sports/eojhl_logos/{home_abbr}.png" if home_abbr else None
|
|
},
|
|
"score": home_score,
|
|
"records": [{"summary": "0-0-0"}]
|
|
},
|
|
{
|
|
"id": away_team_id,
|
|
"homeAway": "away",
|
|
"team": {
|
|
"id": away_team_id,
|
|
"abbreviation": away_abbr,
|
|
"name": away_city,
|
|
"logo": f"assets/sports/eojhl_logos/{away_abbr}.png" if away_abbr else None
|
|
},
|
|
"score": away_score,
|
|
"records": [{"summary": "0-0-0"}]
|
|
}
|
|
]
|
|
}]
|
|
}
|
|
events.append(event)
|
|
|
|
return {"events": events}
|
|
|
|
# --- MAIN ---
|
|
if __name__ == "__main__":
|
|
backup_file(SCHEDULE_FILE)
|
|
print(f"Starting schedule data fetch at {datetime.now().isoformat()}")
|
|
|
|
# Fetch and transform schedule data
|
|
schedule_raw = fetch_jsonp(SCHEDULE_URL)
|
|
|
|
if schedule_raw:
|
|
schedule = transform_schedule(schedule_raw)
|
|
else:
|
|
schedule = {"events": []}
|
|
|
|
# Write output to file
|
|
if schedule and schedule['events']:
|
|
with open(SCHEDULE_FILE, "w") as f:
|
|
json.dump(schedule, f, indent=2)
|
|
print(f"Successfully wrote {len(schedule['events'])} events to {SCHEDULE_FILE}")
|
|
else:
|
|
print(f"Failed to transform schedule data. {SCHEDULE_FILE} not updated.") |