Initial Commit
This commit is contained in:
2248
data/eojhl_scoreboard.json
Normal file
2248
data/eojhl_scoreboard.json
Normal file
File diff suppressed because it is too large
Load Diff
2248
data/eojhl_scoreboard.json.20251130-123746.bak
Normal file
2248
data/eojhl_scoreboard.json.20251130-123746.bak
Normal file
File diff suppressed because it is too large
Load Diff
2248
data/eojhl_scoreboard.json.20251130-180927.bak
Normal file
2248
data/eojhl_scoreboard.json.20251130-180927.bak
Normal file
File diff suppressed because it is too large
Load Diff
84
data/eojhl_standings.json
Normal file
84
data/eojhl_standings.json
Normal file
@@ -0,0 +1,84 @@
|
||||
{
|
||||
"martin": [
|
||||
{
|
||||
"name": "Ottawa Jr Canadians",
|
||||
"abbreviation": "OJC",
|
||||
"logo": "assets/sports/eojhl_logos/OJC.png",
|
||||
"record": "19-3-0"
|
||||
},
|
||||
{
|
||||
"name": "Ottawa West Golden Knights",
|
||||
"abbreviation": "OTW",
|
||||
"logo": "assets/sports/eojhl_logos/OTW.png",
|
||||
"record": "18-2-2"
|
||||
},
|
||||
{
|
||||
"name": "Casselman Vikings",
|
||||
"abbreviation": "CAS",
|
||||
"logo": "assets/sports/eojhl_logos/CAS.png",
|
||||
"record": "16-4-0"
|
||||
},
|
||||
{
|
||||
"name": "Glengarry Brigade",
|
||||
"abbreviation": "GB",
|
||||
"logo": "assets/sports/eojhl_logos/GB.png",
|
||||
"record": "11-8-3"
|
||||
},
|
||||
{
|
||||
"name": "Winchester Hawks",
|
||||
"abbreviation": "WIN",
|
||||
"logo": "assets/sports/eojhl_logos/WIN.png",
|
||||
"record": "10-11-0"
|
||||
},
|
||||
{
|
||||
"name": "Embrun Panthers",
|
||||
"abbreviation": "EMB",
|
||||
"logo": "assets/sports/eojhl_logos/EMB.png",
|
||||
"record": "6-10-7"
|
||||
}
|
||||
],
|
||||
"richardson": [
|
||||
{
|
||||
"name": "Arnprior Packers",
|
||||
"abbreviation": "ARN",
|
||||
"logo": "assets/sports/eojhl_logos/ARN.png",
|
||||
"record": "15-5-0"
|
||||
},
|
||||
{
|
||||
"name": "Richmond Royals",
|
||||
"abbreviation": "RCH",
|
||||
"logo": "assets/sports/eojhl_logos/RCH.png",
|
||||
"record": "10-10-1"
|
||||
},
|
||||
{
|
||||
"name": "Renfrew Timberwolves",
|
||||
"abbreviation": "REN",
|
||||
"logo": "assets/sports/eojhl_logos/REN.png",
|
||||
"record": "10-12-1"
|
||||
},
|
||||
{
|
||||
"name": "Athens Aeros",
|
||||
"abbreviation": "ATH",
|
||||
"logo": "assets/sports/eojhl_logos/ATH.png",
|
||||
"record": "7-14-4"
|
||||
},
|
||||
{
|
||||
"name": "Smiths Falls Jr. Bears",
|
||||
"abbreviation": "SFB",
|
||||
"logo": "assets/sports/eojhl_logos/SFB.png",
|
||||
"record": "8-14-1"
|
||||
},
|
||||
{
|
||||
"name": "Carleton Place Jr Canadians",
|
||||
"abbreviation": "CPC",
|
||||
"logo": "assets/sports/eojhl_logos/CPC.png",
|
||||
"record": "7-13-2"
|
||||
},
|
||||
{
|
||||
"name": "Perth Blue Wings",
|
||||
"abbreviation": "PER",
|
||||
"logo": "assets/sports/eojhl_logos/PER.png",
|
||||
"record": "5-13-2"
|
||||
}
|
||||
]
|
||||
}
|
||||
84
data/eojhl_standings.json.20251130-123743.bak
Normal file
84
data/eojhl_standings.json.20251130-123743.bak
Normal file
@@ -0,0 +1,84 @@
|
||||
{
|
||||
"martin": [
|
||||
{
|
||||
"name": "Ottawa Jr Canadians",
|
||||
"abbreviation": "OJC",
|
||||
"logo": "assets/sports/eojhl_logos/OJC.png",
|
||||
"record": "17-2-0"
|
||||
},
|
||||
{
|
||||
"name": "Ottawa West Golden Knights",
|
||||
"abbreviation": "OTW",
|
||||
"logo": "assets/sports/eojhl_logos/OTW.png",
|
||||
"record": "13-2-2"
|
||||
},
|
||||
{
|
||||
"name": "Casselman Vikings",
|
||||
"abbreviation": "CAS",
|
||||
"logo": "assets/sports/eojhl_logos/CAS.png",
|
||||
"record": "13-3-0"
|
||||
},
|
||||
{
|
||||
"name": "Glengarry Brigade",
|
||||
"abbreviation": "GB",
|
||||
"logo": "assets/sports/eojhl_logos/GB.png",
|
||||
"record": "11-6-2"
|
||||
},
|
||||
{
|
||||
"name": "Winchester Hawks",
|
||||
"abbreviation": "WIN",
|
||||
"logo": "assets/sports/eojhl_logos/WIN.png",
|
||||
"record": "8-9-0"
|
||||
},
|
||||
{
|
||||
"name": "Embrun Panthers",
|
||||
"abbreviation": "EMB",
|
||||
"logo": "assets/sports/eojhl_logos/EMB.png",
|
||||
"record": "3-9-7"
|
||||
}
|
||||
],
|
||||
"richardson": [
|
||||
{
|
||||
"name": "Arnprior Packers",
|
||||
"abbreviation": "ARN",
|
||||
"logo": "assets/sports/eojhl_logos/ARN.png",
|
||||
"record": "11-5-0"
|
||||
},
|
||||
{
|
||||
"name": "Renfrew Timberwolves",
|
||||
"abbreviation": "REN",
|
||||
"logo": "assets/sports/eojhl_logos/REN.png",
|
||||
"record": "9-9-1"
|
||||
},
|
||||
{
|
||||
"name": "Richmond Royals",
|
||||
"abbreviation": "RCH",
|
||||
"logo": "assets/sports/eojhl_logos/RCH.png",
|
||||
"record": "8-7-1"
|
||||
},
|
||||
{
|
||||
"name": "Smiths Falls Jr. Bears",
|
||||
"abbreviation": "SFB",
|
||||
"logo": "assets/sports/eojhl_logos/SFB.png",
|
||||
"record": "8-11-0"
|
||||
},
|
||||
{
|
||||
"name": "Carleton Place Jr Canadians",
|
||||
"abbreviation": "CPC",
|
||||
"logo": "assets/sports/eojhl_logos/CPC.png",
|
||||
"record": "6-11-1"
|
||||
},
|
||||
{
|
||||
"name": "Perth Blue Wings",
|
||||
"abbreviation": "PER",
|
||||
"logo": "assets/sports/eojhl_logos/PER.png",
|
||||
"record": "5-10-2"
|
||||
},
|
||||
{
|
||||
"name": "Athens Aeros",
|
||||
"abbreviation": "ATH",
|
||||
"logo": "assets/sports/eojhl_logos/ATH.png",
|
||||
"record": "4-12-4"
|
||||
}
|
||||
]
|
||||
}
|
||||
84
data/eojhl_standings.json.20251130-180925.bak
Normal file
84
data/eojhl_standings.json.20251130-180925.bak
Normal file
@@ -0,0 +1,84 @@
|
||||
{
|
||||
"martin": [
|
||||
{
|
||||
"name": "Ottawa Jr Canadians",
|
||||
"abbreviation": "OJC",
|
||||
"logo": "assets/sports/eojhl_logos/OJC.png",
|
||||
"record": "19-3-0"
|
||||
},
|
||||
{
|
||||
"name": "Ottawa West Golden Knights",
|
||||
"abbreviation": "OTW",
|
||||
"logo": "assets/sports/eojhl_logos/OTW.png",
|
||||
"record": "18-2-2"
|
||||
},
|
||||
{
|
||||
"name": "Casselman Vikings",
|
||||
"abbreviation": "CAS",
|
||||
"logo": "assets/sports/eojhl_logos/CAS.png",
|
||||
"record": "16-4-0"
|
||||
},
|
||||
{
|
||||
"name": "Glengarry Brigade",
|
||||
"abbreviation": "GB",
|
||||
"logo": "assets/sports/eojhl_logos/GB.png",
|
||||
"record": "11-8-3"
|
||||
},
|
||||
{
|
||||
"name": "Embrun Panthers",
|
||||
"abbreviation": "EMB",
|
||||
"logo": "assets/sports/eojhl_logos/EMB.png",
|
||||
"record": "6-9-7"
|
||||
},
|
||||
{
|
||||
"name": "Winchester Hawks",
|
||||
"abbreviation": "WIN",
|
||||
"logo": "assets/sports/eojhl_logos/WIN.png",
|
||||
"record": "9-11-0"
|
||||
}
|
||||
],
|
||||
"richardson": [
|
||||
{
|
||||
"name": "Arnprior Packers",
|
||||
"abbreviation": "ARN",
|
||||
"logo": "assets/sports/eojhl_logos/ARN.png",
|
||||
"record": "14-5-0"
|
||||
},
|
||||
{
|
||||
"name": "Richmond Royals",
|
||||
"abbreviation": "RCH",
|
||||
"logo": "assets/sports/eojhl_logos/RCH.png",
|
||||
"record": "10-9-1"
|
||||
},
|
||||
{
|
||||
"name": "Renfrew Timberwolves",
|
||||
"abbreviation": "REN",
|
||||
"logo": "assets/sports/eojhl_logos/REN.png",
|
||||
"record": "10-11-1"
|
||||
},
|
||||
{
|
||||
"name": "Smiths Falls Jr. Bears",
|
||||
"abbreviation": "SFB",
|
||||
"logo": "assets/sports/eojhl_logos/SFB.png",
|
||||
"record": "8-14-1"
|
||||
},
|
||||
{
|
||||
"name": "Carleton Place Jr Canadians",
|
||||
"abbreviation": "CPC",
|
||||
"logo": "assets/sports/eojhl_logos/CPC.png",
|
||||
"record": "7-13-2"
|
||||
},
|
||||
{
|
||||
"name": "Athens Aeros",
|
||||
"abbreviation": "ATH",
|
||||
"logo": "assets/sports/eojhl_logos/ATH.png",
|
||||
"record": "6-14-4"
|
||||
},
|
||||
{
|
||||
"name": "Perth Blue Wings",
|
||||
"abbreviation": "PER",
|
||||
"logo": "assets/sports/eojhl_logos/PER.png",
|
||||
"record": "5-13-2"
|
||||
}
|
||||
]
|
||||
}
|
||||
1
data/example_live.jsonp
Normal file
1
data/example_live.jsonp
Normal file
File diff suppressed because one or more lines are too long
277
data/get_eojhl_data.py
Normal file
277
data/get_eojhl_data.py
Normal file
@@ -0,0 +1,277 @@
|
||||
import requests, json, re, os, shutil, glob
|
||||
from datetime import datetime
|
||||
|
||||
# --- FEED URLS ---
|
||||
STANDINGS_URL = (
|
||||
"https://lscluster.hockeytech.com/feed/index.php?"
|
||||
"feed=statviewfeed&view=teams&groupTeamsBy=division&context=overall"
|
||||
"&site_id=2&season=110&special=false&key=1defb601c9b37c24"
|
||||
"&client_code=eojhl&league_id=2&conference=-1&division=-1"
|
||||
"&sort=points&lang=en&callback=angular.callbacks._4"
|
||||
)
|
||||
|
||||
SCHEDULE_URL = (
|
||||
"https://lscluster.hockeytech.com/feed/index.php?"
|
||||
"feed=statviewfeed&view=schedule&team=-1&season=110&month=-1&location=homeaway"
|
||||
"&key=1defb601c9b37c24&client_code=eojhl&site_id=2&league_id=2"
|
||||
"&conference_id=-1&division_id=-1&lang=en&callback=angular.callbacks._4"
|
||||
)
|
||||
|
||||
STANDINGS_FILE = "eojhl_standings.json"
|
||||
SCHEDULE_FILE = "eojhl_scoreboard.json"
|
||||
MAX_BACKUPS = 5
|
||||
|
||||
# --- TEAM ABBREVIATION MAP ---
|
||||
# This maps the 'city' name from the raw data to the required abbreviation.
|
||||
TEAM_ABBR_MAP = {
|
||||
"Ottawa": "OTT",
|
||||
"Carleton Place": "CPC",
|
||||
"Ottawa West": "OW",
|
||||
"Richmond": "RCH",
|
||||
"Casselman": "CAS",
|
||||
"Smiths Falls": "SF",
|
||||
"Embrun": "EMB",
|
||||
"Perth": "PER",
|
||||
"Glengarry": "GB",
|
||||
"Arnprior": "ARP",
|
||||
"Athens": "ATH",
|
||||
"Renfrew": "REN",
|
||||
"Winchester": "WIN"
|
||||
}
|
||||
|
||||
# --- HELPERS ---
|
||||
def backup_file(filename):
|
||||
if os.path.exists(filename):
|
||||
ts = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
backup_name = f"{filename}.{ts}.bak"
|
||||
shutil.copy2(filename, backup_name)
|
||||
|
||||
# Clean up old backups
|
||||
backups = sorted(glob.glob(f"{filename}.*.bak"), reverse=True)
|
||||
for old_backup in backups[MAX_BACKUPS:]:
|
||||
os.remove(old_backup)
|
||||
|
||||
def fetch_jsonp(url):
|
||||
r = requests.get(url)
|
||||
|
||||
# FIX: Updated regex for more reliable stripping of the angular.callbacks wrapper
|
||||
match = re.search(r"angular\.callbacks\._\d+\s*\((.*)\);?\s*$", r.text, re.DOTALL)
|
||||
|
||||
if match:
|
||||
data_string = match.group(1).strip()
|
||||
try:
|
||||
data = json.loads(data_string)
|
||||
if isinstance(data, list):
|
||||
print("Successfully stripped JSONP and decoded data.")
|
||||
return data
|
||||
else:
|
||||
print("Error: Decoded JSON data is not a list (expected format).")
|
||||
return None
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"Error decoding JSON from raw data string: {e}")
|
||||
return None
|
||||
|
||||
print("Error: Could not find or strip the JSONP wrapper.")
|
||||
return None
|
||||
|
||||
def parse_game_date(raw_date, raw_status):
|
||||
"""
|
||||
Parses the raw date ("Sep. 16") and raw status ("7:30 pm EST" or "Final")
|
||||
to generate an ISO 8601 date string. Hardcoded to '2025' for consistency.
|
||||
"""
|
||||
current_year = "2025"
|
||||
date_time_str = None
|
||||
|
||||
# Clean up the raw_date for better parsing
|
||||
cleaned_raw_date = raw_date.replace('.', '')
|
||||
|
||||
# Try to extract time for scheduled games (e.g., "7:30 pm EST")
|
||||
time_match = re.search(r"(\d{1,2}:\d{2})\s*(pm|am)", raw_status, re.IGNORECASE)
|
||||
|
||||
if time_match:
|
||||
# Scheduled game
|
||||
time_str = time_match.group(0).replace(" ", "").upper()
|
||||
try:
|
||||
# Parse full date and time
|
||||
dt_obj = datetime.strptime(f"{cleaned_raw_date} {current_year} {time_str}", "%b %d %Y %I:%M%p")
|
||||
# Output in ISO 8601 format with Z (UTC marker)
|
||||
date_time_str = dt_obj.strftime("%Y-%m-%dT%H:%M:00Z")
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
# Completed game, use a default time (e.g., 7:30 PM)
|
||||
try:
|
||||
dt_obj = datetime.strptime(f"{cleaned_raw_date} {current_year}", "%b %d %Y")
|
||||
date_time_str = dt_obj.strftime("%Y-%m-%dT19:30:00Z")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return date_time_str
|
||||
|
||||
def get_status_details(game_status):
|
||||
"""Parses the game status string (e.g., "Final", "7:30 pm EST") into the required status structure."""
|
||||
|
||||
status = {
|
||||
"name": "STATUS_SCHEDULED",
|
||||
"state": "pre",
|
||||
"shortDetail": game_status,
|
||||
"period": 0,
|
||||
"displayClock": "00:00"
|
||||
}
|
||||
|
||||
if "Final" in game_status:
|
||||
status["state"] = "post"
|
||||
status["name"] = "STATUS_FINAL"
|
||||
status["period"] = 3
|
||||
status["shortDetail"] = "Final"
|
||||
|
||||
if "OT" in game_status:
|
||||
status["shortDetail"] = "Final OT"
|
||||
status["name"] = "STATUS_FINAL_OVERTIME"
|
||||
status["period"] = 4
|
||||
elif "SO" in game_status:
|
||||
status["shortDetail"] = "Final SO"
|
||||
status["name"] = "STATUS_FINAL_SHOOTOUT"
|
||||
status["period"] = 4
|
||||
|
||||
# Add logic for In Progress (assuming live data would show "3rd Period - 12:00")
|
||||
if re.search(r"(\d(st|nd|rd|th) Period|\d{2}:\d{2})", game_status):
|
||||
status["state"] = "in"
|
||||
status["name"] = "STATUS_IN_PROGRESS"
|
||||
status["shortDetail"] = game_status
|
||||
# Note: Parsing period/clock from live status is complex and often requires more specific regex.
|
||||
# We will keep period/clock at 0/00:00 or 3/00:00 unless the user provides the specific "in progress" raw status example.
|
||||
|
||||
return status
|
||||
|
||||
def transform_schedule(raw_data):
|
||||
events = []
|
||||
|
||||
# FIX: Use safe dictionary lookups to prevent IndexErrors
|
||||
game_list = None
|
||||
try:
|
||||
# Path: raw_data[0] -> "sections" (list) -> sections[0] (dict) -> "data" (list of games)
|
||||
game_list = raw_data[0].get("sections", [{}])[0].get("data")
|
||||
except (TypeError, IndexError, AttributeError):
|
||||
# This will catch if raw_data is not a list, sections is empty, or get fails deep down.
|
||||
print("Error: Could not safely extract 'data' list from raw data structure.")
|
||||
return {"events": []}
|
||||
|
||||
if not game_list:
|
||||
print("Error: 'data' list is empty or None.")
|
||||
return {"events": []}
|
||||
|
||||
for game_data in game_list:
|
||||
# The game data is stored in the 'row' key, team IDs are in 'prop'
|
||||
game_row = game_data.get("row", {})
|
||||
game_prop = game_data.get("prop", {})
|
||||
|
||||
# --- Extract Data (Using Corrected Keys) ---
|
||||
home_city = game_row.get("home_team_city")
|
||||
away_city = game_row.get("visiting_team_city")
|
||||
home_score = game_row.get("home_goal_count", '0')
|
||||
away_score = game_row.get("visiting_goal_count", '0')
|
||||
game_id = game_row.get("game_id")
|
||||
game_status = game_row.get("game_status", "TBA")
|
||||
raw_date = game_row.get("date")
|
||||
|
||||
# Skip if essential data is missing
|
||||
if not all([game_id, home_city, away_city, raw_date]):
|
||||
continue
|
||||
|
||||
# --- Derive Data ---
|
||||
home_abbr = TEAM_ABBR_MAP.get(home_city)
|
||||
away_abbr = TEAM_ABBR_MAP.get(away_city)
|
||||
|
||||
# Team IDs are deeply nested: prop -> home_team_city (key) -> teamLink (ID)
|
||||
home_team_id = game_prop.get("home_team_city", {}).get("teamLink")
|
||||
away_team_id = game_prop.get("visiting_team_city", {}).get("teamLink")
|
||||
|
||||
status_details = get_status_details(game_status)
|
||||
date_time_str = parse_game_date(raw_date, game_status)
|
||||
|
||||
# Clean scores: replace '-' with '0'
|
||||
home_score = home_score if home_score and home_score != '-' else '0'
|
||||
away_score = away_score if away_score and away_score != '-' else '0'
|
||||
|
||||
# --- Build Event Object ---
|
||||
event = {
|
||||
"id": game_id,
|
||||
"date": date_time_str,
|
||||
"competitions": [{
|
||||
"status": {
|
||||
"type": {
|
||||
"name": status_details["name"],
|
||||
"state": status_details["state"],
|
||||
"shortDetail": status_details["shortDetail"]
|
||||
},
|
||||
"period": status_details["period"],
|
||||
"displayClock": status_details["displayClock"]
|
||||
},
|
||||
"competitors": [
|
||||
{
|
||||
"id": home_team_id,
|
||||
"homeAway": "home",
|
||||
"team": {
|
||||
"id": home_team_id,
|
||||
"abbreviation": home_abbr,
|
||||
"name": home_city,
|
||||
"logo": f"assets/sports/eojhl_logos/{home_abbr}.png" if home_abbr else None
|
||||
},
|
||||
"score": home_score,
|
||||
"records": [{"summary": "0-0-0"}]
|
||||
},
|
||||
{
|
||||
"id": away_team_id,
|
||||
"homeAway": "away",
|
||||
"team": {
|
||||
"id": away_team_id,
|
||||
"abbreviation": away_abbr,
|
||||
"name": away_city,
|
||||
"logo": f"assets/sports/eojhl_logos/{away_abbr}.png" if away_abbr else None
|
||||
},
|
||||
"score": away_score,
|
||||
"records": [{"summary": "0-0-0"}]
|
||||
}
|
||||
]
|
||||
}]
|
||||
}
|
||||
events.append(event)
|
||||
|
||||
return {"events": events}
|
||||
|
||||
# --- The main block (Placeholder for transform_standings) ---
|
||||
def transform_standings(raw_data):
|
||||
# This is a placeholder; you'll need your actual standings logic here.
|
||||
return {"standings": []}
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(f"Starting data fetch at {datetime.now().isoformat()}")
|
||||
|
||||
# Assuming 'transform_standings' is defined
|
||||
backup_file(STANDINGS_FILE)
|
||||
backup_file(SCHEDULE_FILE)
|
||||
|
||||
# Fetch and transform schedule data
|
||||
schedule_raw = fetch_jsonp(SCHEDULE_URL)
|
||||
|
||||
if schedule_raw:
|
||||
schedule = transform_schedule(schedule_raw)
|
||||
else:
|
||||
schedule = {"events": []}
|
||||
|
||||
# Fetch and transform standings data
|
||||
standings_raw = fetch_jsonp(STANDINGS_URL)
|
||||
standings = transform_standings(standings_raw)
|
||||
|
||||
# Write output to file
|
||||
if schedule and schedule['events']:
|
||||
with open(SCHEDULE_FILE, "w") as f:
|
||||
json.dump(schedule, f, indent=2)
|
||||
print(f"Successfully wrote {len(schedule['events'])} events to {SCHEDULE_FILE}")
|
||||
else:
|
||||
print(f"Failed to transform schedule data. {SCHEDULE_FILE} not updated.")
|
||||
|
||||
if standings:
|
||||
with open(STANDINGS_FILE, "w") as f:
|
||||
json.dump(standings, f, indent=2)
|
||||
253
data/get_schedule.py
Normal file
253
data/get_schedule.py
Normal file
@@ -0,0 +1,253 @@
|
||||
import requests, json, re, os, shutil, glob
|
||||
from datetime import datetime
|
||||
|
||||
# --- FEED URLS ---
|
||||
SCHEDULE_URL = (
|
||||
"https://lscluster.hockeytech.com/feed/index.php?"
|
||||
"feed=statviewfeed&view=schedule&team=54&season=110"
|
||||
"&month=-1&location=homeaway&key=1defb601c9b37c24"
|
||||
"&client_code=eojhl&site_id=2&league_id=2&conference_id=-1"
|
||||
"&division_id=-1&lang=en&callback=angular.callbacks._2"
|
||||
)
|
||||
|
||||
SCHEDULE_FILE = "eojhl_scoreboard.json"
|
||||
MAX_BACKUPS = 2
|
||||
|
||||
# --- TEAM ABBREVIATION MAP ---
|
||||
TEAM_ABBR_MAP = {
|
||||
"Ottawa": "OJC",
|
||||
"Carleton Place": "CPC",
|
||||
"Ottawa West": "OTW",
|
||||
"Richmond": "RCH",
|
||||
"Casselman": "CAS",
|
||||
"Smiths Falls": "SFB",
|
||||
"Embrun": "EMB",
|
||||
"Perth": "PER",
|
||||
"Glengarry": "GB",
|
||||
"Arnprior": "ARP",
|
||||
"Athens": "ATH",
|
||||
"Renfrew": "REN",
|
||||
"Winchester": "WIN"
|
||||
}
|
||||
|
||||
# --- HELPERS ---
|
||||
|
||||
def backup_file(filename):
|
||||
"""Backup existing file with timestamp and prune old backups."""
|
||||
if os.path.exists(filename):
|
||||
ts = datetime.now().strftime("%Y%m%d-%H%M%S")
|
||||
backup = f"{filename}.{ts}.bak"
|
||||
shutil.move(filename, backup)
|
||||
backups = sorted(glob.glob(f"{filename}.*.bak"), reverse=True)
|
||||
for old in backups[MAX_BACKUPS:]:
|
||||
os.remove(old)
|
||||
print(f"Backed up {filename} -> {backup}")
|
||||
|
||||
def fetch_jsonp(url):
|
||||
r = requests.get(url)
|
||||
|
||||
# Robust stripping of the angular.callbacks wrapper
|
||||
match = re.search(r"angular\.callbacks\._\d+\s*\((.*)\);?\s*$", r.text, re.DOTALL)
|
||||
|
||||
if match:
|
||||
data_string = match.group(1).strip()
|
||||
try:
|
||||
data = json.loads(data_string)
|
||||
if isinstance(data, list):
|
||||
# print("Successfully stripped JSONP and decoded data.")
|
||||
return data
|
||||
# else:
|
||||
# print("Error: Decoded JSON data is not a list (expected format).")
|
||||
# return None
|
||||
except json.JSONDecodeError:
|
||||
# print(f"Error decoding JSON from raw data string: {e}")
|
||||
return None
|
||||
|
||||
# print("Error: Could not find or strip the JSONP wrapper.")
|
||||
return None
|
||||
|
||||
from datetime import datetime
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
def parse_game_date(raw_date, raw_status):
|
||||
"""
|
||||
Parses raw date ("Oct 14") and raw status ("8:00 PM" or "Final")
|
||||
into ISO 8601 UTC string.
|
||||
"""
|
||||
current_year = "2025"
|
||||
cleaned_raw_date = raw_date.replace('.', '')
|
||||
|
||||
# Try to extract time for scheduled games
|
||||
time_match = re.search(r"(\d{1,2}:\d{2}\s*[APMapm]{2})", raw_status)
|
||||
try:
|
||||
if time_match:
|
||||
time_str = time_match.group(1).upper().replace(" ", "")
|
||||
# Insert a space before AM/PM for parsing
|
||||
if time_str.endswith("AM") or time_str.endswith("PM"):
|
||||
time_str = time_str[:-2] + " " + time_str[-2:]
|
||||
dt_local = datetime.strptime(
|
||||
f"{cleaned_raw_date} {current_year} {time_str}",
|
||||
"%b %d %Y %I:%M %p"
|
||||
)
|
||||
else:
|
||||
# Completed game, assume default 7:30 PM local
|
||||
dt_local = datetime.strptime(
|
||||
f"{cleaned_raw_date} {current_year} 7:30 PM",
|
||||
"%b %d %Y %I:%M %p"
|
||||
)
|
||||
|
||||
# Attach Toronto timezone
|
||||
dt_local = dt_local.replace(tzinfo=ZoneInfo("America/Toronto"))
|
||||
# Convert to UTC
|
||||
dt_utc = dt_local.astimezone(ZoneInfo("UTC"))
|
||||
return dt_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Date parse error: {e} for input {raw_date} {raw_status}")
|
||||
return None
|
||||
|
||||
|
||||
|
||||
def get_status_details(game_status):
|
||||
"""Parses the game status string (e.g., "Final", "7:30 pm EST") into the required status structure."""
|
||||
|
||||
status = {
|
||||
"name": "STATUS_SCHEDULED",
|
||||
"state": "pre",
|
||||
"shortDetail": game_status,
|
||||
"period": 0,
|
||||
"displayClock": "00:00"
|
||||
}
|
||||
|
||||
if "Final" in game_status:
|
||||
status["state"] = "post"
|
||||
status["name"] = "STATUS_FINAL"
|
||||
status["period"] = 3
|
||||
status["shortDetail"] = "Final"
|
||||
|
||||
if "OT" in game_status:
|
||||
status["shortDetail"] = "Final OT"
|
||||
status["name"] = "STATUS_FINAL_OVERTIME"
|
||||
status["period"] = 4
|
||||
elif "SO" in game_status:
|
||||
status["shortDetail"] = "Final SO"
|
||||
status["name"] = "STATUS_FINAL_SHOOTOUT"
|
||||
status["period"] = 4
|
||||
|
||||
if re.search(r"(\d(st|nd|rd|th) Period|\d{2}:\d{2})", game_status):
|
||||
status["state"] = "in"
|
||||
status["name"] = "STATUS_IN_PROGRESS"
|
||||
status["shortDetail"] = game_status
|
||||
|
||||
return status
|
||||
|
||||
def transform_schedule(raw_data):
|
||||
events = []
|
||||
|
||||
game_list = None
|
||||
try:
|
||||
# Path: raw_data[0] -> "sections" (list) -> sections[0] (dict) -> "data" (list of games)
|
||||
game_list = raw_data[0].get("sections", [{}])[0].get("data")
|
||||
except (TypeError, IndexError, AttributeError):
|
||||
print("Error: Could not safely extract 'data' list from raw data structure.")
|
||||
return {"events": []}
|
||||
|
||||
if not game_list:
|
||||
print("Error: 'data' list is empty or None.")
|
||||
return {"events": []}
|
||||
|
||||
for game_data in game_list:
|
||||
game_row = game_data.get("row", {})
|
||||
game_prop = game_data.get("prop", {})
|
||||
|
||||
# --- Extract Data ---
|
||||
home_city = game_row.get("home_team_city")
|
||||
away_city = game_row.get("visiting_team_city")
|
||||
home_score = game_row.get("home_goal_count", '0')
|
||||
away_score = game_row.get("visiting_goal_count", '0')
|
||||
game_id = game_row.get("game_id")
|
||||
game_status = game_row.get("game_status", "TBA")
|
||||
raw_date = game_row.get("date")
|
||||
|
||||
if not all([game_id, home_city, away_city, raw_date]):
|
||||
continue
|
||||
|
||||
# --- Derive Data ---
|
||||
home_abbr = TEAM_ABBR_MAP.get(home_city)
|
||||
away_abbr = TEAM_ABBR_MAP.get(away_city)
|
||||
|
||||
home_team_id = game_prop.get("home_team_city", {}).get("teamLink")
|
||||
away_team_id = game_prop.get("visiting_team_city", {}).get("teamLink")
|
||||
|
||||
status_details = get_status_details(game_status)
|
||||
date_time_str = parse_game_date(raw_date, game_status)
|
||||
|
||||
home_score = home_score if home_score and home_score != '-' else '0'
|
||||
away_score = away_score if away_score and away_score != '-' else '0'
|
||||
|
||||
# --- Build Event Object ---
|
||||
event = {
|
||||
"id": game_id,
|
||||
"date": date_time_str,
|
||||
"competitions": [{
|
||||
"status": {
|
||||
"type": {
|
||||
"name": status_details["name"],
|
||||
"state": status_details["state"],
|
||||
"shortDetail": status_details["shortDetail"]
|
||||
},
|
||||
"period": status_details["period"],
|
||||
"displayClock": status_details["displayClock"]
|
||||
},
|
||||
"competitors": [
|
||||
{
|
||||
"id": home_team_id,
|
||||
"homeAway": "home",
|
||||
"team": {
|
||||
"id": home_team_id,
|
||||
"abbreviation": home_abbr,
|
||||
"name": home_city,
|
||||
"logo": f"assets/sports/eojhl_logos/{home_abbr}.png" if home_abbr else None
|
||||
},
|
||||
"score": home_score,
|
||||
"records": [{"summary": "0-0-0"}]
|
||||
},
|
||||
{
|
||||
"id": away_team_id,
|
||||
"homeAway": "away",
|
||||
"team": {
|
||||
"id": away_team_id,
|
||||
"abbreviation": away_abbr,
|
||||
"name": away_city,
|
||||
"logo": f"assets/sports/eojhl_logos/{away_abbr}.png" if away_abbr else None
|
||||
},
|
||||
"score": away_score,
|
||||
"records": [{"summary": "0-0-0"}]
|
||||
}
|
||||
]
|
||||
}]
|
||||
}
|
||||
events.append(event)
|
||||
|
||||
return {"events": events}
|
||||
|
||||
# --- MAIN ---
|
||||
if __name__ == "__main__":
|
||||
backup_file(SCHEDULE_FILE)
|
||||
print(f"Starting schedule data fetch at {datetime.now().isoformat()}")
|
||||
|
||||
# Fetch and transform schedule data
|
||||
schedule_raw = fetch_jsonp(SCHEDULE_URL)
|
||||
|
||||
if schedule_raw:
|
||||
schedule = transform_schedule(schedule_raw)
|
||||
else:
|
||||
schedule = {"events": []}
|
||||
|
||||
# Write output to file
|
||||
if schedule and schedule['events']:
|
||||
with open(SCHEDULE_FILE, "w") as f:
|
||||
json.dump(schedule, f, indent=2)
|
||||
print(f"Successfully wrote {len(schedule['events'])} events to {SCHEDULE_FILE}")
|
||||
else:
|
||||
print(f"Failed to transform schedule data. {SCHEDULE_FILE} not updated.")
|
||||
93
data/get_standings.py
Normal file
93
data/get_standings.py
Normal file
@@ -0,0 +1,93 @@
|
||||
import requests, json, re, os, shutil, glob
|
||||
from datetime import datetime
|
||||
|
||||
# EOJHL standings feed (JSONP)
|
||||
STANDINGS_URL = (
|
||||
"https://lscluster.hockeytech.com/feed/index.php?"
|
||||
"feed=statviewfeed&view=teams&groupTeamsBy=division&context=overall"
|
||||
"&site_id=2&season=110&special=false&key=1defb601c9b37c24"
|
||||
"&client_code=eojhl&league_id=2&conference=-1&division=-1"
|
||||
"&sort=points&lang=en&callback=angular.callbacks._4"
|
||||
)
|
||||
|
||||
# --- TEAM ABBREVIATION MAP ---
|
||||
TEAM_ABBR_MAP = {
|
||||
"Ottawa": "OJC",
|
||||
"Carleton Place": "CPC",
|
||||
"Ottawa West": "OTW",
|
||||
"Richmond": "RCH",
|
||||
"Casselman": "CAS",
|
||||
"Smiths Falls": "SFB",
|
||||
"Embrun": "EMB",
|
||||
"Perth": "PER",
|
||||
"Glengarry": "GB",
|
||||
"Arnprior": "ARP",
|
||||
"Athens": "ATH",
|
||||
"Renfrew": "REN",
|
||||
"Winchester": "WIN"
|
||||
}
|
||||
|
||||
OUTPUT_FILE = "eojhl_standings.json"
|
||||
MAX_BACKUPS = 2
|
||||
|
||||
def backup_file(filename):
|
||||
"""Backup existing file with timestamp and prune old backups."""
|
||||
if os.path.exists(filename):
|
||||
ts = datetime.now().strftime("%Y%m%d-%H%M%S")
|
||||
backup = f"{filename}.{ts}.bak"
|
||||
shutil.move(filename, backup)
|
||||
backups = sorted(glob.glob(f"{filename}.*.bak"), reverse=True)
|
||||
for old in backups[MAX_BACKUPS:]:
|
||||
os.remove(old)
|
||||
print(f"Backed up {filename} -> {backup}")
|
||||
|
||||
def fetch_jsonp(url):
|
||||
"""Fetch JSONP and return parsed JSON (handles arrays or objects)."""
|
||||
text = requests.get(url).text.strip()
|
||||
if text.startswith("{") or text.startswith("["):
|
||||
return json.loads(text)
|
||||
match = re.search(r'^[^(]+\((.*)\)\s*$', text, re.S)
|
||||
if match:
|
||||
return json.loads(match.group(1))
|
||||
raise ValueError("Response was not valid JSON or JSONP:\n" + text[:200])
|
||||
|
||||
def transform_standings(raw):
|
||||
"""Transform HockeyTech standings into legacy JSON format with records."""
|
||||
standings = {"martin": [], "richardson": []}
|
||||
sections = raw[0].get("sections", [])
|
||||
for section in sections:
|
||||
headers = section.get("headers", {})
|
||||
label = headers.get("name", {}).get("properties", {}).get("label", "").lower()
|
||||
key = "martin" if "martin" in label else "richardson" if "richardson" in label else None
|
||||
if not key:
|
||||
continue
|
||||
for team in section.get("data", []):
|
||||
row = team.get("row", {})
|
||||
name = row.get("name", "").strip()
|
||||
if not name:
|
||||
continue
|
||||
abbr = row.get("team_code") or name[:3].upper()
|
||||
|
||||
# Extract record fields (keys may vary: wins, losses, ties, ot_losses, shootout_losses)
|
||||
wins = row.get("wins", "0")
|
||||
losses = row.get("losses", "0")
|
||||
ot_losses = row.get("ot_losses") or int("0")
|
||||
so_losses = row.get("shootout_losses") or int("0")
|
||||
otlosses = int(ot_losses) + int(so_losses)
|
||||
record = f"{wins}-{losses}-{otlosses}"
|
||||
|
||||
standings[key].append({
|
||||
"name": name,
|
||||
"abbreviation": abbr,
|
||||
"logo": f"assets/sports/eojhl_logos/{abbr}.png",
|
||||
"record": record
|
||||
})
|
||||
return standings
|
||||
|
||||
if __name__ == "__main__":
|
||||
backup_file(OUTPUT_FILE)
|
||||
raw = fetch_jsonp(STANDINGS_URL)
|
||||
standings = transform_standings(raw)
|
||||
with open(OUTPUT_FILE, "w") as f:
|
||||
json.dump(standings, f, indent=2)
|
||||
print(f"Standings written to {OUTPUT_FILE}")
|
||||
Reference in New Issue
Block a user