feat(v2): add snapshot extractor framework and run command
This commit is contained in:
130
apps/ingestion/extractors/public_json.py
Normal file
130
apps/ingestion/extractors/public_json.py
Normal file
@ -0,0 +1,130 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from .base import (
|
||||
BaseSnapshotExtractor,
|
||||
ExtractorConfigError,
|
||||
ExtractorNormalizationError,
|
||||
ExtractorParseError,
|
||||
)
|
||||
from .http import ResponsibleHttpClient
|
||||
|
||||
|
||||
def _first_non_empty(record: dict[str, Any], *keys: str) -> Any:
|
||||
for key in keys:
|
||||
if key in record and record[key] not in (None, ""):
|
||||
return record[key]
|
||||
return None
|
||||
|
||||
|
||||
class PublicJsonSnapshotExtractor(BaseSnapshotExtractor):
|
||||
"""
|
||||
Generic public JSON extractor for MVP v2.
|
||||
|
||||
This extractor is intentionally generic and lightweight:
|
||||
- fetch from one public JSON endpoint
|
||||
- parse list-like payloads
|
||||
- normalize into HoopScout snapshot schema
|
||||
"""
|
||||
|
||||
extractor_name = "public_json_snapshot"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
url: str | None = None,
|
||||
source_name: str | None = None,
|
||||
include_raw_payload: bool | None = None,
|
||||
http_client: ResponsibleHttpClient | None = None,
|
||||
):
|
||||
self.url = (url or settings.EXTRACTOR_PUBLIC_JSON_URL).strip()
|
||||
self.source_name = (source_name or settings.EXTRACTOR_PUBLIC_SOURCE_NAME).strip()
|
||||
self.include_raw_payload = (
|
||||
settings.EXTRACTOR_INCLUDE_RAW_PAYLOAD if include_raw_payload is None else include_raw_payload
|
||||
)
|
||||
if not self.url:
|
||||
raise ExtractorConfigError("EXTRACTOR_PUBLIC_JSON_URL is required for public_json_snapshot extractor.")
|
||||
if not self.source_name:
|
||||
raise ExtractorConfigError("EXTRACTOR_PUBLIC_SOURCE_NAME must not be empty.")
|
||||
|
||||
self.http_client = http_client or ResponsibleHttpClient(
|
||||
user_agent=settings.EXTRACTOR_USER_AGENT,
|
||||
timeout_seconds=settings.EXTRACTOR_HTTP_TIMEOUT_SECONDS,
|
||||
retries=settings.EXTRACTOR_HTTP_RETRIES,
|
||||
retry_sleep_seconds=settings.EXTRACTOR_RETRY_SLEEP_SECONDS,
|
||||
request_delay_seconds=settings.EXTRACTOR_REQUEST_DELAY_SECONDS,
|
||||
)
|
||||
|
||||
def fetch(self) -> Any:
|
||||
return self.http_client.get_json(self.url)
|
||||
|
||||
def parse(self, payload: Any) -> list[dict[str, Any]]:
|
||||
if isinstance(payload, list):
|
||||
return payload
|
||||
if not isinstance(payload, dict):
|
||||
raise ExtractorParseError("Fetched payload must be a JSON object or array.")
|
||||
|
||||
rows = payload.get("records")
|
||||
if isinstance(rows, list):
|
||||
return rows
|
||||
|
||||
data_rows = payload.get("data")
|
||||
if isinstance(data_rows, list):
|
||||
return data_rows
|
||||
|
||||
raise ExtractorParseError("Payload must contain 'records' or 'data' list.")
|
||||
|
||||
def normalize_record(self, source_record: dict[str, Any]) -> dict[str, Any]:
|
||||
normalized = {
|
||||
"competition_external_id": _first_non_empty(
|
||||
source_record, "competition_external_id", "competition_id", "league_id"
|
||||
),
|
||||
"competition_name": _first_non_empty(
|
||||
source_record, "competition_name", "competition", "league_name"
|
||||
),
|
||||
"season": _first_non_empty(source_record, "season", "season_label", "season_name"),
|
||||
"team_external_id": _first_non_empty(source_record, "team_external_id", "team_id"),
|
||||
"team_name": _first_non_empty(source_record, "team_name", "team"),
|
||||
"player_external_id": _first_non_empty(source_record, "player_external_id", "player_id"),
|
||||
"full_name": _first_non_empty(source_record, "full_name", "player_name", "name"),
|
||||
"first_name": _first_non_empty(source_record, "first_name"),
|
||||
"last_name": _first_non_empty(source_record, "last_name"),
|
||||
"birth_date": _first_non_empty(source_record, "birth_date"),
|
||||
"nationality": _first_non_empty(source_record, "nationality", "nationality_code"),
|
||||
"height_cm": _first_non_empty(source_record, "height_cm"),
|
||||
"weight_kg": _first_non_empty(source_record, "weight_kg"),
|
||||
"position": _first_non_empty(source_record, "position"),
|
||||
"role": _first_non_empty(source_record, "role"),
|
||||
"games_played": _first_non_empty(source_record, "games_played", "gp"),
|
||||
"minutes_per_game": _first_non_empty(source_record, "minutes_per_game", "mpg"),
|
||||
"points_per_game": _first_non_empty(source_record, "points_per_game", "ppg"),
|
||||
"rebounds_per_game": _first_non_empty(source_record, "rebounds_per_game", "rpg"),
|
||||
"assists_per_game": _first_non_empty(source_record, "assists_per_game", "apg"),
|
||||
"steals_per_game": _first_non_empty(source_record, "steals_per_game", "spg"),
|
||||
"blocks_per_game": _first_non_empty(source_record, "blocks_per_game", "bpg"),
|
||||
"turnovers_per_game": _first_non_empty(source_record, "turnovers_per_game", "tov"),
|
||||
"fg_pct": _first_non_empty(source_record, "fg_pct"),
|
||||
"three_pt_pct": _first_non_empty(
|
||||
source_record, "three_pt_pct", "three_point_pct", "three_pct", "3p_pct"
|
||||
),
|
||||
"ft_pct": _first_non_empty(source_record, "ft_pct"),
|
||||
}
|
||||
|
||||
missing = [key for key, value in normalized.items() if key != "role" and value in (None, "")]
|
||||
if missing:
|
||||
raise ExtractorNormalizationError(
|
||||
f"public_json_snapshot row missing required fields: {', '.join(sorted(missing))}"
|
||||
)
|
||||
|
||||
normalized["season"] = str(normalized["season"]).strip()
|
||||
normalized["competition_external_id"] = str(normalized["competition_external_id"]).strip()
|
||||
normalized["team_external_id"] = str(normalized["team_external_id"]).strip()
|
||||
normalized["player_external_id"] = str(normalized["player_external_id"]).strip()
|
||||
|
||||
if self.include_raw_payload:
|
||||
normalized["raw_payload"] = source_record
|
||||
|
||||
return normalized
|
||||
Reference in New Issue
Block a user