16 Commits

Author SHA1 Message Date
3b5f1f37dd Make release compose topology immutable and verifiable 2026-03-12 16:40:17 +01:00
dac63f9148 Align balldontlie OpenAPI integration and clarify search metric semantics 2026-03-12 16:37:02 +01:00
c9dd10a438 Improve balldontlie query flow and dev container write stability 2026-03-12 11:13:05 +01:00
e0e75cfb0c Tighten provider normalization contract and fallback semantics 2026-03-10 16:47:39 +01:00
2252821daf Improve ingestion concurrency safety and batch transaction robustness 2026-03-10 16:37:29 +01:00
abd3419aac Harden Celery schedule parsing and startup safety 2026-03-10 16:18:57 +01:00
1ba1a8eebd Add release compose override without source bind mounts 2026-03-10 16:13:37 +01:00
dd09b71eb4 Harden production settings safety checks and docs 2026-03-10 16:04:02 +01:00
2586f15ae8 Make invalid search input explicit in UI and API 2026-03-10 15:53:55 +01:00
92c804a474 Fix combined search filter semantics across player season joins 2026-03-10 15:47:01 +01:00
a1ae380fd5 Improve search quality, ORM efficiency, and filter consistency 2026-03-10 14:37:01 +01:00
ceff4bc42c Wire Celery Beat periodic sync with ingestion run tracking 2026-03-10 13:44:36 +01:00
b39c6ced3a Harden runtime configuration and container security defaults 2026-03-10 13:06:12 +01:00
3d795991fe feat(frontend): integrate tailwind pipeline and update templates 2026-03-10 12:49:25 +01:00
4d49d30495 feat(players): add origin competition/team model and filtering 2026-03-10 12:29:38 +01:00
acfccbea08 feat(providers): add balldontlie http adapter with backend selection 2026-03-10 12:11:03 +01:00
78 changed files with 6203 additions and 539 deletions

View File

@ -1,10 +1,14 @@
# Django
DJANGO_SETTINGS_MODULE=config.settings.development
DJANGO_ENV=development
# Required to be a strong, unique value outside development.
DJANGO_SECRET_KEY=change-me-in-production
DJANGO_DEBUG=1
DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1
DJANGO_CSRF_TRUSTED_ORIGINS=http://localhost,http://127.0.0.1
DJANGO_TIME_ZONE=UTC
DJANGO_LOG_LEVEL=INFO
DJANGO_LOG_SQL=0
DJANGO_SUPERUSER_USERNAME=admin
DJANGO_SUPERUSER_EMAIL=admin@example.com
DJANGO_SUPERUSER_PASSWORD=adminpass
@ -26,15 +30,55 @@ CELERY_RESULT_BACKEND=redis://redis:6379/0
# Runtime behavior
AUTO_APPLY_MIGRATIONS=1
AUTO_COLLECTSTATIC=1
AUTO_BUILD_TAILWIND=1
GUNICORN_WORKERS=3
# Development container UID/GID for bind-mounted source write permissions.
LOCAL_UID=1000
LOCAL_GID=1000
# Production-minded security toggles
DJANGO_SECURE_SSL_REDIRECT=1
DJANGO_SECURE_HSTS_SECONDS=31536000
DJANGO_SESSION_COOKIE_SAMESITE=Lax
DJANGO_CSRF_COOKIE_SAMESITE=Lax
# Mandatory production variables (example values):
# DJANGO_SETTINGS_MODULE=config.settings.production
# DJANGO_ENV=production
# DJANGO_DEBUG=0
# DJANGO_SECRET_KEY=<strong-unique-secret-at-least-32-chars>
# DJANGO_ALLOWED_HOSTS=app.example.com
# DJANGO_CSRF_TRUSTED_ORIGINS=https://app.example.com
# Providers / ingestion
PROVIDER_DEFAULT_NAMESPACE=mvp_demo
PROVIDER_BACKEND=demo
PROVIDER_NAMESPACE_DEMO=mvp_demo
PROVIDER_NAMESPACE_BALLDONTLIE=balldontlie
PROVIDER_DEFAULT_NAMESPACE=
PROVIDER_MVP_DATA_FILE=/app/apps/providers/data/mvp_provider.json
PROVIDER_REQUEST_RETRIES=3
PROVIDER_REQUEST_RETRY_SLEEP=1
PROVIDER_HTTP_TIMEOUT_SECONDS=10
PROVIDER_BALLDONTLIE_BASE_URL=https://api.balldontlie.io
PROVIDER_BALLDONTLIE_API_KEY=
# NBA-centric MVP provider seasons to ingest (comma-separated years).
PROVIDER_BALLDONTLIE_SEASONS=2024
PROVIDER_BALLDONTLIE_PLAYERS_PAGE_LIMIT=5
PROVIDER_BALLDONTLIE_PLAYERS_PER_PAGE=100
PROVIDER_BALLDONTLIE_STATS_PAGE_LIMIT=10
PROVIDER_BALLDONTLIE_STATS_PER_PAGE=100
# When 0, a 401 on stats endpoint degrades to players/teams-only sync.
PROVIDER_BALLDONTLIE_STATS_STRICT=0
CELERY_TASK_TIME_LIMIT=1800
CELERY_TASK_SOFT_TIME_LIMIT=1500
INGESTION_SCHEDULE_ENABLED=0
# 5-field cron: minute hour day_of_month month day_of_week
# Example hourly: 0 * * * *
INGESTION_SCHEDULE_CRON=*/30 * * * *
INGESTION_SCHEDULE_PROVIDER_NAMESPACE=
INGESTION_SCHEDULE_JOB_TYPE=incremental
INGESTION_PREVENT_OVERLAP=1
INGESTION_OVERLAP_WINDOW_MINUTES=180
API_THROTTLE_ANON=100/hour
API_THROTTLE_USER=1000/hour

3
.gitignore vendored
View File

@ -26,3 +26,6 @@ venv/
.vscode/
.idea/
.DS_Store
# Frontend
node_modules/

View File

@ -24,19 +24,31 @@ ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
PIP_NO_CACHE_DIR=1 \
VIRTUAL_ENV=/opt/venv \
PATH="/opt/venv/bin:${PATH}"
PATH="/opt/venv/bin:/home/app/.local/bin:${PATH}" \
APP_USER=app \
APP_UID=10001 \
APP_GID=10001
WORKDIR /app
RUN apt-get update \
&& apt-get install -y --no-install-recommends libpq5 postgresql-client curl \
&& apt-get install -y --no-install-recommends libpq5 postgresql-client curl nodejs npm \
&& rm -rf /var/lib/apt/lists/*
RUN groupadd --gid "${APP_GID}" "${APP_USER}" \
&& useradd --uid "${APP_UID}" --gid "${APP_GID}" --create-home --shell /usr/sbin/nologin "${APP_USER}"
RUN printf '%s\n' 'export PATH="/opt/venv/bin:/home/app/.local/bin:$PATH"' > /etc/profile.d/hoopscout-path.sh
COPY --from=builder /opt/venv /opt/venv
COPY . /app
RUN chmod +x /app/entrypoint.sh
RUN mkdir -p /app/staticfiles /app/media /app/runtime
RUN if [ -f package.json ]; then npm install --no-audit --no-fund; fi
RUN if [ -f package.json ]; then npm run build; fi
RUN chmod +x /app/entrypoint.sh
RUN mkdir -p /app/staticfiles /app/media /app/runtime /app/node_modules /app/static/vendor \
&& chown -R "${APP_UID}:${APP_GID}" /app /opt/venv
USER ${APP_UID}:${APP_GID}
ENTRYPOINT ["/app/entrypoint.sh"]
CMD ["gunicorn", "config.wsgi:application", "--bind", "0.0.0.0:8000"]

199
README.md
View File

@ -9,6 +9,7 @@ A minimal read-only API is included as a secondary integration surface.
- Python 3.12+
- Django
- Django Templates + HTMX
- Tailwind CSS (CLI build pipeline)
- PostgreSQL
- Redis
- Celery + Celery Beat
@ -45,6 +46,8 @@ A minimal read-only API is included as a secondary integration surface.
├── docs/
├── nginx/
├── requirements/
├── package.json
├── tailwind.config.js
├── static/
├── templates/
├── tests/
@ -66,9 +69,12 @@ cp .env.example .env
2. Build and run services:
```bash
docker compose up --build
docker compose -f docker-compose.yml -f docker-compose.dev.yml --profile dev up --build
```
This starts the development-oriented topology (source bind mounts enabled).
In development, bind-mounted app containers run as `LOCAL_UID`/`LOCAL_GID` from `.env` (set them to your host user/group IDs).
3. If `AUTO_APPLY_MIGRATIONS=0`, run migrations manually:
```bash
@ -88,13 +94,84 @@ docker compose exec web python manage.py createsuperuser
- Health: http://localhost/health/
- API root endpoints: `/api/players/`, `/api/competitions/`, `/api/teams/`, `/api/seasons/`
## Development vs Release Compose
Base compose (`docker-compose.yml`) is release-oriented and immutable for runtime services.
Development mutability is enabled via `docker-compose.dev.yml`.
Development startup (mutable source bind mounts for `web`/`celery_*`):
```bash
docker compose -f docker-compose.yml -f docker-compose.dev.yml up --build
```
Development startup with Tailwind watch:
```bash
docker compose -f docker-compose.yml -f docker-compose.dev.yml --profile dev up --build
```
Release-style startup (immutable runtime services):
```bash
docker compose -f docker-compose.yml -f docker-compose.release.yml up -d --build
```
Optional release-style stop:
```bash
docker compose -f docker-compose.yml -f docker-compose.release.yml down
```
Notes:
- In release-style mode, `web`, `celery_worker`, and `celery_beat` run from built image filesystem with no repository source bind mount.
- In development mode (with `docker-compose.dev.yml`), `web`, `celery_worker`, and `celery_beat` are mutable and bind-mount `.:/app`.
- `tailwind` is a dev-profile service and is not required for release runtime.
- `nginx`, `postgres`, and `redis` service naming remains unchanged.
- Release-style `web`, `celery_worker`, and `celery_beat` explicitly run as container user `10001:10001`.
## Release Topology Verification
Inspect merged release config:
```bash
docker compose -f docker-compose.yml -f docker-compose.release.yml config
```
What to verify:
- `services.web.volumes` does not include a bind mount from repository path to `/app`
- `services.celery_worker.volumes` does not include a bind mount from repository path to `/app`
- `services.celery_beat.volumes` does not include a bind mount from repository path to `/app`
- persistent named volumes still exist for `postgres_data`, `static_data`, `media_data`, `runtime_data`, and `redis_data`
Automated local/CI-friendly check:
```bash
./scripts/verify_release_topology.sh
```
## Setup and Run Notes
- `web` service starts through `entrypoint.sh` and waits for PostgreSQL readiness.
- `web` service also builds Tailwind CSS before `collectstatic` when `AUTO_BUILD_TAILWIND=1`.
- `web`, `celery_worker`, `celery_beat`, and `tailwind` run as a non-root user inside the image.
- `celery_worker` executes background sync work.
- `celery_beat` supports scheduled jobs (future scheduling strategy can be added per provider).
- `celery_beat` triggers periodic provider sync (`apps.ingestion.tasks.scheduled_provider_sync`).
- `tailwind` service runs watch mode for development (`npm run dev`).
- nginx proxies web traffic and serves static/media volume mounts.
## Search Consistency Notes
- The server-rendered player search page (`/players/`) and read-only players API (`/api/players/`) use the same search form and ORM filter service.
- Sorting/filter semantics are aligned across UI, HTMX partial refreshes, and API responses.
- Search result metrics in the UI table use **best eligible semantics**:
- each metric (Games, MPG, PPG, RPG, APG) is the maximum value across eligible player-season rows
- eligibility is scoped by the active season/team/competition/stat filters
- different displayed metrics for one player can come from different eligible rows
- Metric-based API sorting (`ppg_*`, `mpg_*`) uses the same best-eligible semantics as UI search.
## Docker Volumes and Persistence
`docker-compose.yml` uses named volumes:
@ -104,9 +181,18 @@ docker compose exec web python manage.py createsuperuser
- `media_data`: user/provider media artifacts
- `runtime_data`: app runtime files (e.g., celery beat schedule)
- `redis_data`: Redis persistence (`/data` for RDB/AOF files)
- `node_modules_data`: Node modules cache for Tailwind builds in development override
This keeps persistent state outside container lifecycles.
In release-style mode, these volumes remain the persistence layer:
- `postgres_data` for database state
- `static_data` for collected static assets served by nginx
- `media_data` for uploaded/provider media
- `runtime_data` for Celery beat schedule/runtime files
- `redis_data` for Redis persistence
## Migrations
Create migration files:
@ -135,6 +221,68 @@ Run a focused module:
docker compose run --rm web sh -lc 'pip install -r requirements/dev.txt && pytest -q tests/test_api.py'
```
## Frontend Assets (Tailwind)
Build Tailwind once:
```bash
docker compose run --rm web sh -lc 'npm install --no-audit --no-fund && npm run build'
```
If you see `Permission denied` writing `static/vendor` or `static/css` in development, fix local file ownership once:
```bash
sudo chown -R "$(id -u):$(id -g)" static
```
Run Tailwind in watch mode during development:
```bash
docker compose -f docker-compose.yml -f docker-compose.dev.yml --profile dev up tailwind
```
Source CSS lives in `static/src/tailwind.css` and compiles to `static/css/main.css`.
HTMX is served from local static assets (`static/vendor/htmx.min.js`) instead of a CDN dependency.
## Production Configuration
Use production settings in deployed environments:
```bash
DJANGO_SETTINGS_MODULE=config.settings.production
DJANGO_DEBUG=0
DJANGO_ENV=production
```
When `DJANGO_DEBUG=0`, startup fails fast unless:
- `DJANGO_SECRET_KEY` is a real non-default value
- `DJANGO_ALLOWED_HOSTS` is set
- `DJANGO_CSRF_TRUSTED_ORIGINS` is set (for production settings)
Additional production safety checks:
- `DJANGO_SECRET_KEY` must be strong and non-default in non-development environments
- `DJANGO_ALLOWED_HOSTS` must not contain localhost-style values
- `DJANGO_CSRF_TRUSTED_ORIGINS` must be explicit HTTPS origins only (no localhost/http)
Production settings enable hardened defaults such as:
- secure cookies
- HSTS
- security headers
- `ManifestStaticFilesStorage` for static asset integrity/versioning
### Production Configuration Checklist
- `DJANGO_SETTINGS_MODULE=config.settings.production`
- `DJANGO_ENV=production`
- `DJANGO_DEBUG=0`
- strong `DJANGO_SECRET_KEY` (unique, non-default, >= 32 chars)
- explicit `DJANGO_ALLOWED_HOSTS` (no localhost values)
- explicit `DJANGO_CSRF_TRUSTED_ORIGINS` with HTTPS origins only
- `DJANGO_SECURE_SSL_REDIRECT=1` and `DJANGO_SECURE_HSTS_SECONDS` set appropriately
## Superuser and Auth
Create superuser:
@ -155,8 +303,8 @@ Default auth routes:
- Open `/admin/` -> `IngestionRun`
- Use admin actions:
- `Queue full MVP sync`
- `Queue incremental MVP sync`
- `Queue full sync (default provider)`
- `Queue incremental sync (default provider)`
- `Retry selected ingestion runs`
### Trigger from shell (manual)
@ -167,7 +315,7 @@ docker compose exec web python manage.py shell
```python
from apps.ingestion.tasks import trigger_full_sync
trigger_full_sync.delay(provider_namespace="mvp_demo")
trigger_full_sync.delay(provider_namespace="balldontlie")
```
### Logs and diagnostics
@ -175,6 +323,47 @@ trigger_full_sync.delay(provider_namespace="mvp_demo")
- Run-level status/counters: `IngestionRun`
- Structured error records: `IngestionError`
- Provider entity mappings + diagnostic payload snippets: `ExternalMapping`
- `IngestionRun.error_summary` captures top-level failure/partial-failure context
### Scheduled sync via Celery Beat
Configure scheduled sync through environment variables:
- `INGESTION_SCHEDULE_ENABLED` (`0`/`1`)
- `INGESTION_SCHEDULE_CRON` (5-field cron expression, default `*/30 * * * *`)
- `INGESTION_SCHEDULE_PROVIDER_NAMESPACE` (optional; falls back to default provider namespace)
- `INGESTION_SCHEDULE_JOB_TYPE` (`incremental` or `full_sync`)
- `INGESTION_PREVENT_OVERLAP` (`0`/`1`) to skip obvious overlapping runs
- `INGESTION_OVERLAP_WINDOW_MINUTES` overlap guard window
When enabled, Celery Beat enqueues the scheduled sync task on the configured cron.
The task uses the existing ingestion service path and writes run/error records in the same tables as manual sync.
Valid cron examples:
- `*/30 * * * *` every 30 minutes
- `0 * * * *` hourly
- `15 2 * * *` daily at 02:15
Failure behavior for invalid cron values:
- invalid `INGESTION_SCHEDULE_CRON` does not crash unrelated startup paths (for example, web)
- periodic ingestion task is disabled until cron is fixed
- an error is logged at startup indicating the invalid schedule value
## Provider Backend Selection
Provider backend is selected via environment variables:
- `PROVIDER_BACKEND=demo` uses the local JSON fixture adapter (`mvp_demo`)
- `PROVIDER_BACKEND=balldontlie` uses the HTTP adapter (`balldontlie`)
- `PROVIDER_DEFAULT_NAMESPACE` can override backend mapping explicitly
The balldontlie adapter is NBA-centric and intended as MVP ingestion only. The provider abstraction remains ready for future multi-league providers (for example Sportradar or FIBA GDAP).
The adapter follows the published balldontlie OpenAPI contract: server `https://api.balldontlie.io`, NBA endpoints under `/nba/v1/*`, cursor pagination via `meta.next_cursor`, and `stats` ingestion filtered by `seasons[]`.
Some balldontlie plans do not include stats endpoints; set `PROVIDER_BALLDONTLIE_STATS_STRICT=0` (default) to ingest players/teams/seasons even when stats are unauthorized.
Provider normalization details and explicit adapter assumptions are documented in [docs/provider-normalization.md](docs/provider-normalization.md).
## GitFlow Workflow

View File

@ -43,6 +43,8 @@ class PlayerListSerializer(serializers.ModelSerializer):
nationality = serializers.CharField(source="nationality.name", allow_null=True)
nominal_position = serializers.CharField(source="nominal_position.code", allow_null=True)
inferred_role = serializers.CharField(source="inferred_role.name", allow_null=True)
origin_competition = serializers.CharField(source="origin_competition.name", allow_null=True)
origin_team = serializers.CharField(source="origin_team.name", allow_null=True)
class Meta:
model = Player
@ -53,6 +55,8 @@ class PlayerListSerializer(serializers.ModelSerializer):
"nationality",
"nominal_position",
"inferred_role",
"origin_competition",
"origin_team",
"height_cm",
"weight_kg",
"dominant_hand",
@ -88,6 +92,8 @@ class PlayerDetailSerializer(serializers.ModelSerializer):
nationality = serializers.CharField(source="nationality.name", allow_null=True)
nominal_position = serializers.CharField(source="nominal_position.name", allow_null=True)
inferred_role = serializers.CharField(source="inferred_role.name", allow_null=True)
origin_competition = serializers.CharField(source="origin_competition.name", allow_null=True)
origin_team = serializers.CharField(source="origin_team.name", allow_null=True)
age = serializers.SerializerMethodField()
aliases = serializers.SerializerMethodField()
season_stats = serializers.SerializerMethodField()
@ -102,6 +108,8 @@ class PlayerDetailSerializer(serializers.ModelSerializer):
"nationality",
"nominal_position",
"inferred_role",
"origin_competition",
"origin_team",
"height_cm",
"weight_kg",
"dominant_hand",

View File

@ -1,11 +1,19 @@
from rest_framework import generics
from rest_framework import status
from rest_framework.pagination import PageNumberPagination
from rest_framework.response import Response
from rest_framework.throttling import AnonRateThrottle, UserRateThrottle
from apps.competitions.models import Competition, Season
from apps.players.forms import PlayerSearchForm
from apps.players.models import Player
from apps.players.services.search import apply_sorting, base_player_queryset, filter_players
from apps.players.services.search import (
METRIC_SORT_KEYS,
annotate_player_metrics,
apply_sorting,
base_player_queryset,
filter_players,
)
from apps.teams.models import Team
from .permissions import ReadOnlyOrDeny
@ -30,17 +38,45 @@ class ReadOnlyBaseAPIView:
class PlayerSearchApiView(ReadOnlyBaseAPIView, generics.ListAPIView):
"""
Read-only player search API.
Metric sorts (`ppg_*`, `mpg_*`) follow the same best-eligible semantics as UI search:
max metric value across eligible player-season rows after applying search filters.
"""
serializer_class = PlayerListSerializer
pagination_class = ApiPagination
def get_search_form(self):
if not hasattr(self, "_search_form"):
self._search_form = PlayerSearchForm(self.request.query_params)
return self._search_form
def _validation_error_response(self):
form = self.get_search_form()
return Response(
{
"detail": "Invalid search parameters.",
"errors": form.errors.get_json_data(escape_html=True),
},
status=status.HTTP_400_BAD_REQUEST,
)
def list(self, request, *args, **kwargs):
form = self.get_search_form()
if form.is_bound and not form.is_valid():
return self._validation_error_response()
return super().list(request, *args, **kwargs)
def get_queryset(self):
form = PlayerSearchForm(self.request.query_params or None)
form = self.get_search_form()
queryset = base_player_queryset()
if form.is_valid():
queryset = filter_players(queryset, form.cleaned_data)
queryset = apply_sorting(queryset, form.cleaned_data.get("sort", "name_asc"))
else:
queryset = queryset.order_by("full_name", "id")
queryset = filter_players(queryset, form.cleaned_data)
sort_key = form.cleaned_data.get("sort", "name_asc")
if sort_key in METRIC_SORT_KEYS:
queryset = annotate_player_metrics(queryset, form.cleaned_data)
queryset = apply_sorting(queryset, sort_key)
return queryset
@ -50,6 +86,8 @@ class PlayerDetailApiView(ReadOnlyBaseAPIView, generics.RetrieveAPIView):
"nationality",
"nominal_position",
"inferred_role",
"origin_competition",
"origin_team",
).prefetch_related("aliases")

View File

@ -1,5 +1,8 @@
from django.contrib import admin
from django.contrib import messages
from django.db.models import Count
from apps.providers.registry import get_default_provider_namespace
from .models import IngestionError, IngestionRun
from .tasks import trigger_full_sync, trigger_incremental_sync
@ -18,7 +21,11 @@ class IngestionRunAdmin(admin.ModelAdmin):
"job_type",
"status",
"records_processed",
"records_created",
"records_updated",
"records_failed",
"error_count",
"short_error_summary",
"started_at",
"finished_at",
)
@ -36,25 +43,28 @@ class IngestionRunAdmin(admin.ModelAdmin):
"records_created",
"records_updated",
"records_failed",
"error_summary",
"context",
"raw_payload",
"created_at",
)
actions = (
"enqueue_full_sync_mvp",
"enqueue_incremental_sync_mvp",
"enqueue_full_sync_default_provider",
"enqueue_incremental_sync_default_provider",
"retry_selected_runs",
)
@admin.action(description="Queue full MVP sync")
def enqueue_full_sync_mvp(self, request, queryset):
trigger_full_sync.delay(provider_namespace="mvp_demo", triggered_by_id=request.user.id)
self.message_user(request, "Queued full MVP sync task.", level=messages.SUCCESS)
@admin.action(description="Queue full sync (default provider)")
def enqueue_full_sync_default_provider(self, request, queryset):
provider_namespace = get_default_provider_namespace()
trigger_full_sync.delay(provider_namespace=provider_namespace, triggered_by_id=request.user.id)
self.message_user(request, f"Queued full sync task for {provider_namespace}.", level=messages.SUCCESS)
@admin.action(description="Queue incremental MVP sync")
def enqueue_incremental_sync_mvp(self, request, queryset):
trigger_incremental_sync.delay(provider_namespace="mvp_demo", triggered_by_id=request.user.id)
self.message_user(request, "Queued incremental MVP sync task.", level=messages.SUCCESS)
@admin.action(description="Queue incremental sync (default provider)")
def enqueue_incremental_sync_default_provider(self, request, queryset):
provider_namespace = get_default_provider_namespace()
trigger_incremental_sync.delay(provider_namespace=provider_namespace, triggered_by_id=request.user.id)
self.message_user(request, f"Queued incremental sync task for {provider_namespace}.", level=messages.SUCCESS)
@admin.action(description="Retry selected ingestion runs")
def retry_selected_runs(self, request, queryset):
@ -75,6 +85,20 @@ class IngestionRunAdmin(admin.ModelAdmin):
count += 1
self.message_user(request, f"Queued {count} retry task(s).", level=messages.SUCCESS)
def get_queryset(self, request):
queryset = super().get_queryset(request)
return queryset.annotate(_error_count=Count("errors"))
@admin.display(ordering="_error_count", description="Errors")
def error_count(self, obj):
return getattr(obj, "_error_count", 0)
@admin.display(description="Error summary")
def short_error_summary(self, obj):
if not obj.error_summary:
return "-"
return (obj.error_summary[:90] + "...") if len(obj.error_summary) > 90 else obj.error_summary
@admin.register(IngestionError)
class IngestionErrorAdmin(admin.ModelAdmin):

View File

@ -0,0 +1,18 @@
# Generated by Django 5.2.12 on 2026-03-10 16:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("ingestion", "0001_initial"),
]
operations = [
migrations.AddField(
model_name="ingestionrun",
name="error_summary",
field=models.TextField(blank=True, default=""),
),
]

View File

@ -31,6 +31,7 @@ class IngestionRun(models.Model):
records_created = models.PositiveIntegerField(default=0)
records_updated = models.PositiveIntegerField(default=0)
records_failed = models.PositiveIntegerField(default=0)
error_summary = models.TextField(blank=True, default="")
context = models.JSONField(default=dict, blank=True)
raw_payload = models.JSONField(default=dict, blank=True)
created_at = models.DateTimeField(auto_now_add=True)

View File

@ -1,3 +1,7 @@
import hashlib
from contextlib import contextmanager
from django.db import connection
from django.utils import timezone
from apps.ingestion.models import IngestionError, IngestionRun
@ -14,12 +18,22 @@ def start_ingestion_run(*, provider_namespace: str, job_type: str, triggered_by=
)
def finish_ingestion_run(*, run: IngestionRun, status: str, processed: int = 0, created: int = 0, updated: int = 0, failed: int = 0) -> IngestionRun:
def finish_ingestion_run(
*,
run: IngestionRun,
status: str,
processed: int = 0,
created: int = 0,
updated: int = 0,
failed: int = 0,
error_summary: str = "",
) -> IngestionRun:
run.status = status
run.records_processed = processed
run.records_created = created
run.records_updated = updated
run.records_failed = failed
run.error_summary = error_summary
run.finished_at = timezone.now()
run.save(
update_fields=[
@ -28,12 +42,82 @@ def finish_ingestion_run(*, run: IngestionRun, status: str, processed: int = 0,
"records_created",
"records_updated",
"records_failed",
"error_summary",
"finished_at",
]
)
return run
def mark_ingestion_run_skipped(*, provider_namespace: str, job_type: str, reason: str, context: dict | None = None) -> IngestionRun:
now = timezone.now()
run = IngestionRun.objects.create(
provider_namespace=provider_namespace,
job_type=job_type,
status=IngestionRun.RunStatus.CANCELED,
started_at=now,
finished_at=now,
error_summary=reason,
context=context or {},
)
return run
def _build_ingestion_lock_key(*, provider_namespace: str, job_type: str) -> int:
digest = hashlib.blake2b(
f"{provider_namespace}:{job_type}".encode("utf-8"),
digest_size=8,
).digest()
return int.from_bytes(digest, byteorder="big", signed=True)
def try_acquire_ingestion_lock(*, provider_namespace: str, job_type: str) -> tuple[bool, int]:
lock_key = _build_ingestion_lock_key(provider_namespace=provider_namespace, job_type=job_type)
with connection.cursor() as cursor:
cursor.execute("SELECT pg_try_advisory_lock(%s);", [lock_key])
acquired = bool(cursor.fetchone()[0])
return acquired, lock_key
def release_ingestion_lock(*, lock_key: int) -> None:
with connection.cursor() as cursor:
cursor.execute("SELECT pg_advisory_unlock(%s);", [lock_key])
@contextmanager
def ingestion_advisory_lock(*, provider_namespace: str, job_type: str):
acquired, lock_key = try_acquire_ingestion_lock(
provider_namespace=provider_namespace,
job_type=job_type,
)
try:
yield acquired
finally:
if acquired:
release_ingestion_lock(lock_key=lock_key)
def update_ingestion_run_progress(
*,
run: IngestionRun,
completed_step: str,
step_summary: dict,
source_counts: dict | None = None,
) -> IngestionRun:
context = dict(run.context or {})
completed_steps = list(context.get("completed_steps") or [])
completed_steps.append(completed_step)
context["completed_steps"] = completed_steps
step_summaries = dict(context.get("step_summaries") or {})
step_summaries[completed_step] = step_summary
context["step_summaries"] = step_summaries
if source_counts is not None:
context["source_counts"] = source_counts
run.context = context
run.save(update_fields=["context"])
return run
def log_ingestion_error(*, run: IngestionRun, message: str, provider_namespace: str, severity: str = IngestionError.Severity.ERROR, entity_type: str = "", external_id: str = "", raw_payload: dict | None = None) -> IngestionError:
return IngestionError.objects.create(
ingestion_run=run,

View File

@ -9,8 +9,14 @@ from django.utils.dateparse import parse_date
from apps.competitions.models import Competition, Season
from apps.ingestion.models import IngestionRun
from apps.ingestion.services.runs import finish_ingestion_run, log_ingestion_error, start_ingestion_run
from apps.ingestion.services.runs import (
finish_ingestion_run,
log_ingestion_error,
start_ingestion_run,
update_ingestion_run_progress,
)
from apps.players.models import Nationality, Player, PlayerAlias, PlayerCareerEntry, Position, Role
from apps.players.services.origin import refresh_player_origin
from apps.providers.exceptions import ProviderRateLimitError, ProviderTransientError
from apps.providers.registry import get_provider
from apps.providers.services.mappings import upsert_external_mapping
@ -358,6 +364,7 @@ def _sync_player_stats(provider_namespace: str, payloads: list[dict], run: Inges
def _sync_player_careers(provider_namespace: str, payloads: list[dict], run: IngestionRun, summary: SyncSummary):
touched_player_ids: set[int] = set()
for payload in payloads:
summary.processed += 1
external_id = payload.get("external_id", "")
@ -380,6 +387,7 @@ def _sync_player_careers(provider_namespace: str, payloads: list[dict], run: Ing
)
continue
touched_player_ids.add(player.id)
_, created = PlayerCareerEntry.objects.update_or_create(
player=player,
team=team,
@ -399,6 +407,10 @@ def _sync_player_careers(provider_namespace: str, payloads: list[dict], run: Ing
else:
summary.updated += 1
if touched_player_ids:
for player in Player.objects.filter(id__in=touched_player_ids):
refresh_player_origin(player)
def run_sync_job(
*,
@ -420,6 +432,12 @@ def run_sync_job(
context=context or {},
)
summary = SyncSummary()
logger.info(
"Starting ingestion run id=%s provider=%s job_type=%s",
run.id,
provider_namespace,
job_type,
)
try:
provider = get_provider(provider_namespace)
@ -428,15 +446,58 @@ def run_sync_job(
if job_type == IngestionRun.JobType.INCREMENTAL
else provider.sync_all()
)
source_counts = {
"competitions": len(payload.get("competitions", [])),
"teams": len(payload.get("teams", [])),
"seasons": len(payload.get("seasons", [])),
"players": len(payload.get("players", [])),
"player_stats": len(payload.get("player_stats", [])),
"player_careers": len(payload.get("player_careers", [])),
}
with transaction.atomic():
_sync_competitions(provider_namespace, payload.get("competitions", []), run, summary)
_sync_teams(provider_namespace, payload.get("teams", []), run, summary)
_sync_seasons(provider_namespace, payload.get("seasons", []), run, summary)
_sync_players(provider_namespace, payload.get("players", []), run, summary)
_sync_player_stats(provider_namespace, payload.get("player_stats", []), run, summary)
_sync_player_careers(provider_namespace, payload.get("player_careers", []), run, summary)
steps: list[tuple[str, callable, list[dict]]] = [
("competitions", _sync_competitions, payload.get("competitions", [])),
("teams", _sync_teams, payload.get("teams", [])),
("seasons", _sync_seasons, payload.get("seasons", [])),
("players", _sync_players, payload.get("players", [])),
("player_stats", _sync_player_stats, payload.get("player_stats", [])),
("player_careers", _sync_player_careers, payload.get("player_careers", [])),
]
for step_name, step_fn, step_payload in steps:
step_summary = SyncSummary()
with transaction.atomic():
step_fn(provider_namespace, step_payload, run, step_summary)
summary.processed += step_summary.processed
summary.created += step_summary.created
summary.updated += step_summary.updated
summary.failed += step_summary.failed
update_ingestion_run_progress(
run=run,
completed_step=step_name,
step_summary={
"processed": step_summary.processed,
"created": step_summary.created,
"updated": step_summary.updated,
"failed": step_summary.failed,
},
source_counts=source_counts,
)
logger.info(
"Completed ingestion step run_id=%s step=%s processed=%s created=%s updated=%s failed=%s",
run.id,
step_name,
step_summary.processed,
step_summary.created,
step_summary.updated,
step_summary.failed,
)
success_error_summary = ""
if summary.failed > 0:
success_error_summary = f"Completed with {summary.failed} failed record(s)."
finish_ingestion_run(
run=run,
status=IngestionRun.RunStatus.SUCCESS,
@ -444,6 +505,16 @@ def run_sync_job(
created=summary.created,
updated=summary.updated,
failed=summary.failed,
error_summary=success_error_summary,
)
logger.info(
"Completed ingestion run id=%s status=%s processed=%s created=%s updated=%s failed=%s",
run.id,
IngestionRun.RunStatus.SUCCESS,
summary.processed,
summary.created,
summary.updated,
summary.failed,
)
return run
@ -464,6 +535,7 @@ def run_sync_job(
created=summary.created,
updated=summary.updated,
failed=summary.failed + 1,
error_summary=f"Rate limit from provider: {exc}",
)
raise
@ -483,6 +555,7 @@ def run_sync_job(
created=summary.created,
updated=summary.updated,
failed=summary.failed + 1,
error_summary=f"Transient provider error: {exc}",
)
raise
@ -502,5 +575,6 @@ def run_sync_job(
created=summary.created,
updated=summary.updated,
failed=summary.failed + 1,
error_summary=f"Unhandled ingestion error: {exc}",
)
raise

View File

@ -1,8 +1,57 @@
import logging
from celery import shared_task
from django.conf import settings
from apps.ingestion.models import IngestionRun
from apps.ingestion.services.runs import ingestion_advisory_lock, mark_ingestion_run_skipped
from apps.ingestion.services.sync import run_sync_job
from apps.providers.exceptions import ProviderRateLimitError, ProviderTransientError
from apps.providers.registry import get_default_provider_namespace
logger = logging.getLogger(__name__)
def _run_sync_with_overlap_guard(
*,
provider_namespace: str,
job_type: str,
triggered_by_id: int | None = None,
context: dict | None = None,
cursor: str | None = None,
):
effective_context = context or {}
if settings.INGESTION_PREVENT_OVERLAP:
with ingestion_advisory_lock(provider_namespace=provider_namespace, job_type=job_type) as acquired:
if not acquired:
reason = (
f"Skipped due to advisory lock for provider={provider_namespace}, "
f"job_type={job_type}."
)
logger.warning(reason)
run = mark_ingestion_run_skipped(
provider_namespace=provider_namespace,
job_type=job_type,
reason=reason,
context=effective_context,
)
return run.id
return run_sync_job(
provider_namespace=provider_namespace,
job_type=job_type,
triggered_by_id=triggered_by_id,
context=effective_context,
cursor=cursor,
).id
return run_sync_job(
provider_namespace=provider_namespace,
job_type=job_type,
triggered_by_id=triggered_by_id,
context=effective_context,
cursor=cursor,
).id
@shared_task(
@ -13,12 +62,12 @@ from apps.providers.exceptions import ProviderRateLimitError, ProviderTransientE
retry_kwargs={"max_retries": 5},
)
def trigger_full_sync(self, provider_namespace: str, triggered_by_id: int | None = None, context: dict | None = None):
return run_sync_job(
return _run_sync_with_overlap_guard(
provider_namespace=provider_namespace,
job_type=IngestionRun.JobType.FULL_SYNC,
triggered_by_id=triggered_by_id,
context=context or {},
).id
)
@shared_task(
@ -29,10 +78,34 @@ def trigger_full_sync(self, provider_namespace: str, triggered_by_id: int | None
retry_kwargs={"max_retries": 5},
)
def trigger_incremental_sync(self, provider_namespace: str, cursor: str | None = None, triggered_by_id: int | None = None, context: dict | None = None):
return run_sync_job(
return _run_sync_with_overlap_guard(
provider_namespace=provider_namespace,
job_type=IngestionRun.JobType.INCREMENTAL,
triggered_by_id=triggered_by_id,
context=context or {},
cursor=cursor,
).id
)
@shared_task(
bind=True,
name="apps.ingestion.tasks.scheduled_provider_sync",
autoretry_for=(ProviderRateLimitError, ProviderTransientError),
retry_backoff=True,
retry_jitter=True,
retry_kwargs={"max_retries": 5},
)
def scheduled_provider_sync(self):
provider_namespace = settings.INGESTION_SCHEDULE_PROVIDER_NAMESPACE or get_default_provider_namespace()
context = {"trigger": "celery_beat", "task_id": self.request.id}
if settings.INGESTION_SCHEDULE_JOB_TYPE == IngestionRun.JobType.FULL_SYNC:
return _run_sync_with_overlap_guard(
provider_namespace=provider_namespace,
job_type=IngestionRun.JobType.FULL_SYNC,
context=context,
)
return _run_sync_with_overlap_guard(
provider_namespace=provider_namespace,
job_type=IngestionRun.JobType.INCREMENTAL,
context=context,
)

View File

@ -1,6 +1,8 @@
from django.contrib import admin
from django.contrib import messages
from .models import Nationality, Player, PlayerAlias, PlayerCareerEntry, Position, Role
from .services.origin import refresh_player_origins
@admin.register(Nationality)
@ -39,11 +41,26 @@ class PlayerAdmin(admin.ModelAdmin):
"nationality",
"nominal_position",
"inferred_role",
"origin_competition",
"origin_team",
"is_active",
)
list_filter = ("is_active", "nationality", "nominal_position", "inferred_role")
list_filter = (
"is_active",
"nationality",
"nominal_position",
"inferred_role",
"origin_competition",
"origin_team",
)
search_fields = ("full_name", "first_name", "last_name")
inlines = (PlayerAliasInline, PlayerCareerEntryInline)
actions = ("recompute_origin_fields",)
@admin.action(description="Recompute origin fields")
def recompute_origin_fields(self, request, queryset):
updated = refresh_player_origins(queryset)
self.message_user(request, f"Updated origin fields for {updated} player(s).", level=messages.SUCCESS)
@admin.register(PlayerAlias)

View File

@ -13,10 +13,10 @@ class PlayerSearchForm(forms.Form):
("age_oldest", "Age (Oldest first)"),
("height_desc", "Height (Tallest first)"),
("height_asc", "Height (Shortest first)"),
("ppg_desc", "Points per game (High to low)"),
("ppg_asc", "Points per game (Low to high)"),
("mpg_desc", "Minutes per game (High to low)"),
("mpg_asc", "Minutes per game (Low to high)"),
("ppg_desc", "Best eligible PPG (High to low)"),
("ppg_asc", "Best eligible PPG (Low to high)"),
("mpg_desc", "Best eligible MPG (High to low)"),
("mpg_asc", "Best eligible MPG (Low to high)"),
)
PAGE_SIZE_CHOICES = ((20, "20"), (50, "50"), (100, "100"))
@ -25,8 +25,10 @@ class PlayerSearchForm(forms.Form):
nominal_position = forms.ModelChoiceField(queryset=Position.objects.none(), required=False)
inferred_role = forms.ModelChoiceField(queryset=Role.objects.none(), required=False)
competition = forms.ModelChoiceField(queryset=Competition.objects.none(), required=False)
origin_competition = forms.ModelChoiceField(queryset=Competition.objects.none(), required=False)
nationality = forms.ModelChoiceField(queryset=Nationality.objects.none(), required=False)
team = forms.ModelChoiceField(queryset=Team.objects.none(), required=False)
origin_team = forms.ModelChoiceField(queryset=Team.objects.none(), required=False)
season = forms.ModelChoiceField(queryset=Season.objects.none(), required=False)
age_min = forms.IntegerField(required=False, min_value=0, max_value=60, label="Min age")
@ -86,8 +88,10 @@ class PlayerSearchForm(forms.Form):
self.fields["nominal_position"].queryset = Position.objects.order_by("code")
self.fields["inferred_role"].queryset = Role.objects.order_by("name")
self.fields["competition"].queryset = Competition.objects.order_by("name")
self.fields["origin_competition"].queryset = Competition.objects.order_by("name")
self.fields["nationality"].queryset = Nationality.objects.order_by("name")
self.fields["team"].queryset = Team.objects.order_by("name")
self.fields["origin_team"].queryset = Team.objects.order_by("name")
self.fields["season"].queryset = Season.objects.order_by("-start_date")
def clean(self):

View File

@ -0,0 +1,34 @@
# Generated by Django 5.2.12 on 2026-03-10 11:17
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('competitions', '0002_initial'),
('players', '0002_initial'),
('teams', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='player',
name='origin_competition',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='origin_players', to='competitions.competition'),
),
migrations.AddField(
model_name='player',
name='origin_team',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='origin_players', to='teams.team'),
),
migrations.AddIndex(
model_name='player',
index=models.Index(fields=['origin_competition'], name='players_pla_origin__1a711b_idx'),
),
migrations.AddIndex(
model_name='player',
index=models.Index(fields=['origin_team'], name='players_pla_origin__b33403_idx'),
),
]

View File

@ -0,0 +1,35 @@
from django.db import migrations
from django.db.models import F, Q
def backfill_player_origins(apps, schema_editor):
Player = apps.get_model("players", "Player")
PlayerCareerEntry = apps.get_model("players", "PlayerCareerEntry")
for player in Player.objects.all().iterator():
entry = (
PlayerCareerEntry.objects.filter(player=player)
.filter(Q(competition__isnull=False) | Q(team__isnull=False))
.order_by(
F("start_date").asc(nulls_last=True),
F("season__start_date").asc(nulls_last=True),
"id",
)
.first()
)
if entry is None:
continue
player.origin_competition_id = entry.competition_id
player.origin_team_id = entry.team_id
player.save(update_fields=["origin_competition", "origin_team"])
class Migration(migrations.Migration):
dependencies = [
("players", "0003_player_origin_competition_player_origin_team_and_more"),
]
operations = [
migrations.RunPython(backfill_player_origins, migrations.RunPython.noop),
]

View File

@ -0,0 +1,17 @@
# Generated by Django 5.2.12 on 2026-03-10 17:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("players", "0004_backfill_player_origins"),
]
operations = [
migrations.AddIndex(
model_name="player",
index=models.Index(fields=["weight_kg"], name="players_pla_weight__fb76a4_idx"),
),
]

View File

@ -80,6 +80,20 @@ class Player(TimeStampedModel):
null=True,
related_name="role_players",
)
origin_competition = models.ForeignKey(
"competitions.Competition",
on_delete=models.SET_NULL,
blank=True,
null=True,
related_name="origin_players",
)
origin_team = models.ForeignKey(
"teams.Team",
on_delete=models.SET_NULL,
blank=True,
null=True,
related_name="origin_players",
)
height_cm = models.PositiveSmallIntegerField(blank=True, null=True)
weight_kg = models.PositiveSmallIntegerField(blank=True, null=True)
wingspan_cm = models.PositiveSmallIntegerField(blank=True, null=True)
@ -105,8 +119,11 @@ class Player(TimeStampedModel):
models.Index(fields=["nationality"]),
models.Index(fields=["nominal_position"]),
models.Index(fields=["inferred_role"]),
models.Index(fields=["origin_competition"]),
models.Index(fields=["origin_team"]),
models.Index(fields=["is_active"]),
models.Index(fields=["height_cm"]),
models.Index(fields=["weight_kg"]),
]
def __str__(self) -> str:

View File

@ -0,0 +1,46 @@
from django.db.models import F, Q, QuerySet
from apps.players.models import Player, PlayerCareerEntry
def get_origin_career_entry(player: Player) -> PlayerCareerEntry | None:
"""Earliest meaningful career entry, ordered by start_date then season start date."""
return (
PlayerCareerEntry.objects.select_related("competition", "team", "season")
.filter(player=player)
.filter(Q(competition__isnull=False) | Q(team__isnull=False))
.order_by(
F("start_date").asc(nulls_last=True),
F("season__start_date").asc(nulls_last=True),
"id",
)
.first()
)
def refresh_player_origin(player: Player, *, save: bool = True) -> bool:
"""Update origin fields from earliest meaningful career entry."""
entry = get_origin_career_entry(player)
origin_competition = entry.competition if entry else None
origin_team = entry.team if entry else None
changed = (
player.origin_competition_id != (origin_competition.id if origin_competition else None)
or player.origin_team_id != (origin_team.id if origin_team else None)
)
if changed:
player.origin_competition = origin_competition
player.origin_team = origin_team
if save:
player.save(update_fields=["origin_competition", "origin_team", "updated_at"])
return changed
def refresh_player_origins(queryset: QuerySet[Player] | None = None) -> int:
"""Backfill/recompute origin fields for players in queryset."""
players = queryset if queryset is not None else Player.objects.all()
updated = 0
for player in players.iterator():
if refresh_player_origin(player):
updated += 1
return updated

View File

@ -3,11 +3,13 @@ from datetime import date, timedelta
from django.db.models import (
Case,
DecimalField,
Exists,
ExpressionWrapper,
F,
FloatField,
IntegerField,
Max,
OuterRef,
Q,
Value,
When,
@ -15,6 +17,13 @@ from django.db.models import (
from django.db.models.functions import Coalesce
from apps.players.models import Player
from apps.stats.models import PlayerSeason
METRIC_SORT_KEYS = {"ppg_desc", "ppg_asc", "mpg_desc", "mpg_asc"}
SEARCH_METRIC_SEMANTICS_TEXT = (
"Search metrics are best eligible values per player (max per metric across eligible player-season rows). "
"With season/team/competition/stat filters, eligibility is scoped by those filters."
)
def _years_ago_today(years: int) -> date:
@ -29,63 +38,108 @@ def _years_ago_today(years: int) -> date:
def _apply_min_max_filter(queryset, min_key: str, max_key: str, field_name: str, data: dict):
min_value = data.get(min_key)
max_value = data.get(max_key)
if min_value is not None:
if min_value not in (None, ""):
queryset = queryset.filter(**{f"{field_name}__gte": min_value})
if max_value is not None:
if max_value not in (None, ""):
queryset = queryset.filter(**{f"{field_name}__lte": max_value})
return queryset
def filter_players(queryset, data: dict):
query = data.get("q")
if query:
queryset = queryset.filter(Q(full_name__icontains=query) | Q(aliases__alias__icontains=query))
if data.get("nominal_position"):
queryset = queryset.filter(nominal_position=data["nominal_position"])
if data.get("inferred_role"):
queryset = queryset.filter(inferred_role=data["inferred_role"])
if data.get("nationality"):
queryset = queryset.filter(nationality=data["nationality"])
if data.get("team"):
queryset = queryset.filter(player_seasons__team=data["team"])
if data.get("competition"):
queryset = queryset.filter(player_seasons__competition=data["competition"])
if data.get("season"):
queryset = queryset.filter(player_seasons__season=data["season"])
queryset = _apply_min_max_filter(queryset, "height_min", "height_max", "height_cm", data)
queryset = _apply_min_max_filter(queryset, "weight_min", "weight_max", "weight_kg", data)
age_min = data.get("age_min")
age_max = data.get("age_max")
if age_min is not None:
queryset = queryset.filter(birth_date__lte=_years_ago_today(age_min))
if age_max is not None:
earliest_birth = _years_ago_today(age_max + 1) + timedelta(days=1)
queryset = queryset.filter(birth_date__gte=earliest_birth)
queryset = _apply_min_max_filter(
queryset,
def _season_scope_filter_keys() -> tuple[str, ...]:
return (
"q",
"team",
"competition",
"season",
"games_played_min",
"games_played_max",
"player_seasons__games_played",
data,
"minutes_per_game_min",
"minutes_per_game_max",
"points_per_game_min",
"points_per_game_max",
"rebounds_per_game_min",
"rebounds_per_game_max",
"assists_per_game_min",
"assists_per_game_max",
"steals_per_game_min",
"steals_per_game_max",
"blocks_per_game_min",
"blocks_per_game_max",
"turnovers_per_game_min",
"turnovers_per_game_max",
"fg_pct_min",
"fg_pct_max",
"three_pct_min",
"three_pct_max",
"ft_pct_min",
"ft_pct_max",
"efficiency_metric_min",
"efficiency_metric_max",
)
mpg_min = data.get("minutes_per_game_min")
mpg_max = data.get("minutes_per_game_max")
if mpg_min is not None:
queryset = queryset.filter(player_seasons__games_played__gt=0).filter(
player_seasons__minutes_played__gte=F("player_seasons__games_played") * mpg_min
def _has_season_scope_filters(data: dict) -> bool:
return any(data.get(key) not in (None, "") for key in _season_scope_filter_keys() if key != "q")
def _apply_mpg_filter(queryset, *, games_field: str, minutes_field: str, min_value, max_value):
if min_value not in (None, ""):
queryset = queryset.filter(**{f"{games_field}__gt": 0}).filter(
**{f"{minutes_field}__gte": F(games_field) * min_value}
)
if mpg_max is not None:
queryset = queryset.filter(player_seasons__games_played__gt=0).filter(
player_seasons__minutes_played__lte=F("player_seasons__games_played") * mpg_max
if max_value not in (None, ""):
queryset = queryset.filter(**{f"{games_field}__gt": 0}).filter(
**{f"{minutes_field}__lte": F(games_field) * max_value}
)
return queryset
def _apply_player_season_scope_filters(queryset, data: dict):
if data.get("team"):
queryset = queryset.filter(team=data["team"])
if data.get("competition"):
queryset = queryset.filter(competition=data["competition"])
if data.get("season"):
queryset = queryset.filter(season=data["season"])
queryset = _apply_min_max_filter(queryset, "games_played_min", "games_played_max", "games_played", data)
queryset = _apply_mpg_filter(
queryset,
games_field="games_played",
minutes_field="minutes_played",
min_value=data.get("minutes_per_game_min"),
max_value=data.get("minutes_per_game_max"),
)
stat_pairs = (
("points_per_game_min", "points_per_game_max", "stats__points"),
("rebounds_per_game_min", "rebounds_per_game_max", "stats__rebounds"),
("assists_per_game_min", "assists_per_game_max", "stats__assists"),
("steals_per_game_min", "steals_per_game_max", "stats__steals"),
("blocks_per_game_min", "blocks_per_game_max", "stats__blocks"),
("turnovers_per_game_min", "turnovers_per_game_max", "stats__turnovers"),
("fg_pct_min", "fg_pct_max", "stats__fg_pct"),
("three_pct_min", "three_pct_max", "stats__three_pct"),
("ft_pct_min", "ft_pct_max", "stats__ft_pct"),
("efficiency_metric_min", "efficiency_metric_max", "stats__player_efficiency_rating"),
)
for min_key, max_key, field_name in stat_pairs:
queryset = _apply_min_max_filter(queryset, min_key, max_key, field_name, data)
return queryset
def _build_metric_context_filter(data: dict) -> Q:
context_filter = Q()
if data.get("team"):
context_filter &= Q(player_seasons__team=data["team"])
if data.get("competition"):
context_filter &= Q(player_seasons__competition=data["competition"])
if data.get("season"):
context_filter &= Q(player_seasons__season=data["season"])
minmax_pairs = (
("games_played_min", "games_played_max", "player_seasons__games_played"),
("points_per_game_min", "points_per_game_max", "player_seasons__stats__points"),
("rebounds_per_game_min", "rebounds_per_game_max", "player_seasons__stats__rebounds"),
("assists_per_game_min", "assists_per_game_max", "player_seasons__stats__assists"),
@ -101,8 +155,77 @@ def filter_players(queryset, data: dict):
"player_seasons__stats__player_efficiency_rating",
),
)
for min_key, max_key, field_name in stat_pairs:
queryset = _apply_min_max_filter(queryset, min_key, max_key, field_name, data)
for min_key, max_key, field_name in minmax_pairs:
min_value = data.get(min_key)
max_value = data.get(max_key)
if min_value not in (None, ""):
context_filter &= Q(**{f"{field_name}__gte": min_value})
if max_value not in (None, ""):
context_filter &= Q(**{f"{field_name}__lte": max_value})
mpg_min = data.get("minutes_per_game_min")
mpg_max = data.get("minutes_per_game_max")
if mpg_min not in (None, ""):
context_filter &= Q(player_seasons__games_played__gt=0) & Q(
player_seasons__minutes_played__gte=F("player_seasons__games_played") * mpg_min
)
if mpg_max not in (None, ""):
context_filter &= Q(player_seasons__games_played__gt=0) & Q(
player_seasons__minutes_played__lte=F("player_seasons__games_played") * mpg_max
)
return context_filter
def filter_players(queryset, data: dict):
query = data.get("q")
if query:
queryset = queryset.filter(Q(full_name__icontains=query) | Q(aliases__alias__icontains=query))
if data.get("nominal_position"):
queryset = queryset.filter(nominal_position=data["nominal_position"])
if data.get("inferred_role"):
queryset = queryset.filter(inferred_role=data["inferred_role"])
if data.get("nationality"):
queryset = queryset.filter(nationality=data["nationality"])
if data.get("origin_competition"):
queryset = queryset.filter(origin_competition=data["origin_competition"])
if data.get("origin_team"):
queryset = queryset.filter(origin_team=data["origin_team"])
queryset = _apply_min_max_filter(queryset, "height_min", "height_max", "height_cm", data)
queryset = _apply_min_max_filter(queryset, "weight_min", "weight_max", "weight_kg", data)
age_min = data.get("age_min")
age_max = data.get("age_max")
if age_min is not None:
queryset = queryset.filter(birth_date__lte=_years_ago_today(age_min))
if age_max is not None:
earliest_birth = _years_ago_today(age_max + 1) + timedelta(days=1)
queryset = queryset.filter(birth_date__gte=earliest_birth)
if _has_season_scope_filters(data):
scoped_seasons = _apply_player_season_scope_filters(
PlayerSeason.objects.filter(player_id=OuterRef("pk")),
data,
)
queryset = queryset.filter(Exists(scoped_seasons))
if query:
return queryset.distinct()
return queryset
def annotate_player_metrics(queryset, data: dict | None = None):
"""
Annotate player list metrics using best-eligible semantics.
Each metric is computed as MAX over eligible player-season rows. This is intentionally
not a single-row projection; different displayed metrics for one player can come from
different eligible player-season rows.
"""
data = data or {}
context_filter = _build_metric_context_filter(data)
mpg_expression = Case(
When(
@ -116,47 +239,45 @@ def filter_players(queryset, data: dict):
output_field=FloatField(),
)
queryset = queryset.annotate(
return queryset.annotate(
games_played_value=Coalesce(
Max("player_seasons__games_played"),
Max("player_seasons__games_played", filter=context_filter),
Value(0, output_field=IntegerField()),
output_field=IntegerField(),
),
mpg_value=Coalesce(Max(mpg_expression), Value(0.0)),
mpg_value=Coalesce(Max(mpg_expression, filter=context_filter), Value(0.0)),
ppg_value=Coalesce(
Max("player_seasons__stats__points"),
Max("player_seasons__stats__points", filter=context_filter),
Value(0, output_field=DecimalField(max_digits=6, decimal_places=2)),
output_field=DecimalField(max_digits=6, decimal_places=2),
),
rpg_value=Coalesce(
Max("player_seasons__stats__rebounds"),
Max("player_seasons__stats__rebounds", filter=context_filter),
Value(0, output_field=DecimalField(max_digits=6, decimal_places=2)),
output_field=DecimalField(max_digits=6, decimal_places=2),
),
apg_value=Coalesce(
Max("player_seasons__stats__assists"),
Max("player_seasons__stats__assists", filter=context_filter),
Value(0, output_field=DecimalField(max_digits=6, decimal_places=2)),
output_field=DecimalField(max_digits=6, decimal_places=2),
),
spg_value=Coalesce(
Max("player_seasons__stats__steals"),
Max("player_seasons__stats__steals", filter=context_filter),
Value(0, output_field=DecimalField(max_digits=6, decimal_places=2)),
output_field=DecimalField(max_digits=6, decimal_places=2),
),
bpg_value=Coalesce(
Max("player_seasons__stats__blocks"),
Max("player_seasons__stats__blocks", filter=context_filter),
Value(0, output_field=DecimalField(max_digits=6, decimal_places=2)),
output_field=DecimalField(max_digits=6, decimal_places=2),
),
top_efficiency=Coalesce(
Max("player_seasons__stats__player_efficiency_rating"),
Max("player_seasons__stats__player_efficiency_rating", filter=context_filter),
Value(0, output_field=DecimalField(max_digits=6, decimal_places=2)),
output_field=DecimalField(max_digits=6, decimal_places=2),
),
)
return queryset.distinct()
def apply_sorting(queryset, sort_key: str):
if sort_key == "name_desc":
@ -185,4 +306,6 @@ def base_player_queryset():
"nationality",
"nominal_position",
"inferred_role",
).prefetch_related("aliases")
"origin_competition",
"origin_team",
)

View File

@ -7,8 +7,14 @@ from apps.scouting.models import FavoritePlayer
from apps.stats.models import PlayerSeason
from .forms import PlayerSearchForm
from .models import Player
from .services.search import apply_sorting, base_player_queryset, filter_players
from .models import Player, PlayerCareerEntry
from .services.search import (
SEARCH_METRIC_SEMANTICS_TEXT,
annotate_player_metrics,
apply_sorting,
base_player_queryset,
filter_players,
)
def calculate_age(birth_date):
@ -33,7 +39,7 @@ class PlayerSearchView(ListView):
def get_form(self):
if not hasattr(self, "_search_form"):
self._search_form = PlayerSearchForm(self.request.GET or None)
self._search_form = PlayerSearchForm(self.request.GET)
return self._search_form
def get_paginate_by(self, queryset):
@ -44,19 +50,24 @@ class PlayerSearchView(ListView):
def get_queryset(self):
form = self.get_form()
form_valid = form.is_valid()
if form.is_bound and not form_valid:
return Player.objects.none()
queryset = base_player_queryset()
if form.is_valid():
queryset = filter_players(queryset, form.cleaned_data)
queryset = apply_sorting(queryset, form.cleaned_data.get("sort", "name_asc"))
else:
queryset = queryset.order_by("full_name", "id")
queryset = filter_players(queryset, form.cleaned_data)
queryset = annotate_player_metrics(queryset, form.cleaned_data)
queryset = apply_sorting(queryset, form.cleaned_data.get("sort", "name_asc"))
return queryset
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["search_form"] = self.get_form()
search_form = self.get_form()
context["search_form"] = search_form
context["search_has_errors"] = search_form.is_bound and bool(search_form.errors)
context["search_metric_semantics"] = SEARCH_METRIC_SEMANTICS_TEXT
context["favorite_player_ids"] = set()
if self.request.user.is_authenticated:
player_ids = [player.id for player in context["players"]]
@ -81,20 +92,25 @@ class PlayerDetailView(DetailView):
"competition",
"stats",
).order_by("-season__start_date", "-id")
career_queryset = PlayerCareerEntry.objects.select_related(
"team",
"competition",
"season",
"role_snapshot",
).order_by("-start_date", "-id")
return (
Player.objects.select_related(
"nationality",
"nominal_position",
"inferred_role",
"origin_competition",
"origin_team",
)
.prefetch_related(
"aliases",
Prefetch("player_seasons", queryset=season_queryset),
"career_entries__team",
"career_entries__competition",
"career_entries__season",
"career_entries__role_snapshot",
Prefetch("career_entries", queryset=career_queryset),
)
)
@ -130,7 +146,7 @@ class PlayerDetailView(DetailView):
context["age"] = calculate_age(player.birth_date)
context["current_assignment"] = current_assignment
context["career_entries"] = player.career_entries.all().order_by("-start_date", "-id")
context["career_entries"] = player.career_entries.all()
context["season_rows"] = season_rows
context["is_favorite"] = False
if self.request.user.is_authenticated:

View File

@ -0,0 +1,168 @@
import logging
from django.conf import settings
from apps.providers.clients import BalldontlieClient
from apps.providers.contracts import (
CompetitionPayload,
NormalizedSyncPayload,
PlayerCareerPayload,
PlayerPayload,
PlayerStatsPayload,
SeasonPayload,
TeamPayload,
)
from apps.providers.interfaces import BaseProviderAdapter
from apps.providers.exceptions import ProviderUnauthorizedError
from apps.providers.services.balldontlie_mappings import (
map_competitions,
map_player_stats,
map_players,
map_seasons,
map_teams,
)
logger = logging.getLogger(__name__)
class BalldontlieProviderAdapter(BaseProviderAdapter):
"""HTTP MVP adapter for balldontlie (NBA-centric data source)."""
namespace = "balldontlie"
nba_prefix = "/nba/v1"
def __init__(self, client: BalldontlieClient | None = None):
self.client = client or BalldontlieClient()
@property
def configured_seasons(self) -> list[int]:
return settings.PROVIDER_BALLDONTLIE_SEASONS
def _api_path(self, path: str) -> str:
# Support both base URL variants:
# - https://api.balldontlie.io
# - https://api.balldontlie.io/nba/v1
base = getattr(self.client, "base_url", "").rstrip("/")
if base.endswith("/nba/v1"):
return path.lstrip("/")
return f"{self.nba_prefix}/{path.lstrip('/')}"
def _fetch_stats_rows(self) -> list[dict]:
all_rows: list[dict] = []
try:
# OpenAPI supports seasons[] directly for /nba/v1/stats.
for season in self.configured_seasons:
rows = self.client.list_paginated(
self._api_path("stats"),
params={"seasons[]": season},
per_page=settings.PROVIDER_BALLDONTLIE_STATS_PER_PAGE,
page_limit=settings.PROVIDER_BALLDONTLIE_STATS_PAGE_LIMIT,
)
all_rows.extend(rows)
except ProviderUnauthorizedError as exc:
if settings.PROVIDER_BALLDONTLIE_STATS_STRICT:
raise
logger.warning(
"provider_stats_unauthorized_degraded",
extra={
"provider": self.namespace,
"path": exc.path,
"status_code": exc.status_code,
"detail": exc.detail,
},
)
return []
return all_rows
def search_players(self, *, query: str = "", limit: int = 50, offset: int = 0) -> list[PlayerPayload]:
params = {"search": query} if query else None
rows = self.client.list_paginated(
self._api_path("players"),
params=params,
per_page=min(limit, settings.PROVIDER_BALLDONTLIE_PLAYERS_PER_PAGE),
page_limit=1,
)
mapped = map_players(rows)
return mapped[offset : offset + limit]
def fetch_player(self, *, external_player_id: str) -> PlayerPayload | None:
if not external_player_id.startswith("player-"):
return None
player_id = external_player_id.replace("player-", "", 1)
payload = self.client.get_json(self._api_path(f"players/{player_id}"))
data = payload.get("data")
if not isinstance(data, dict):
return None
mapped = map_players([data])
return mapped[0] if mapped else None
def fetch_players(self) -> list[PlayerPayload]:
rows = self.client.list_paginated(
self._api_path("players"),
per_page=settings.PROVIDER_BALLDONTLIE_PLAYERS_PER_PAGE,
page_limit=settings.PROVIDER_BALLDONTLIE_PLAYERS_PAGE_LIMIT,
)
return map_players(rows)
def fetch_competitions(self) -> list[CompetitionPayload]:
return map_competitions()
def fetch_teams(self) -> list[TeamPayload]:
payload = self.client.get_json(self._api_path("teams"))
rows = payload.get("data") or []
return map_teams(rows if isinstance(rows, list) else [])
def fetch_seasons(self) -> list[SeasonPayload]:
return map_seasons(self.configured_seasons)
def fetch_player_stats(self) -> list[PlayerStatsPayload]:
all_rows = self._fetch_stats_rows()
player_stats, _ = map_player_stats(all_rows, allowed_seasons=self.configured_seasons)
return player_stats
def fetch_player_careers(self) -> list[PlayerCareerPayload]:
all_rows = self._fetch_stats_rows()
_, player_careers = map_player_stats(all_rows, allowed_seasons=self.configured_seasons)
return player_careers
def sync_all(self) -> NormalizedSyncPayload:
logger.info(
"provider_sync_start",
extra={"provider": self.namespace, "seasons": self.configured_seasons},
)
competitions = self.fetch_competitions()
teams = self.fetch_teams()
seasons = self.fetch_seasons()
players = self.fetch_players()
all_rows = self._fetch_stats_rows()
player_stats, player_careers = map_player_stats(all_rows, allowed_seasons=self.configured_seasons)
logger.info(
"provider_sync_complete",
extra={
"provider": self.namespace,
"competitions": len(competitions),
"teams": len(teams),
"seasons": len(seasons),
"players": len(players),
"player_stats": len(player_stats),
"player_careers": len(player_careers),
},
)
return {
"players": players,
"competitions": competitions,
"teams": teams,
"seasons": seasons,
"player_stats": player_stats,
"player_careers": player_careers,
"cursor": None,
}
def sync_incremental(self, *, cursor: str | None = None) -> NormalizedSyncPayload:
payload = self.sync_all()
payload["cursor"] = cursor
return payload

View File

@ -6,6 +6,15 @@ from pathlib import Path
from django.conf import settings
from apps.providers.contracts import (
CompetitionPayload,
NormalizedSyncPayload,
PlayerCareerPayload,
PlayerPayload,
PlayerStatsPayload,
SeasonPayload,
TeamPayload,
)
from apps.providers.exceptions import ProviderRateLimitError, ProviderTransientError
from apps.providers.interfaces import BaseProviderAdapter
@ -50,38 +59,38 @@ class MvpDemoProviderAdapter(BaseProviderAdapter):
value = payload.get(key, [])
return value if isinstance(value, list) else []
def search_players(self, *, query: str = "", limit: int = 50, offset: int = 0) -> list[dict]:
def search_players(self, *, query: str = "", limit: int = 50, offset: int = 0) -> list[PlayerPayload]:
players = self.fetch_players()
if query:
query_lower = query.lower()
players = [p for p in players if query_lower in p.get("full_name", "").lower()]
return players[offset : offset + limit]
def fetch_player(self, *, external_player_id: str) -> dict | None:
def fetch_player(self, *, external_player_id: str) -> PlayerPayload | None:
for payload in self.fetch_players():
if payload.get("external_id") == external_player_id:
return payload
return None
def fetch_players(self) -> list[dict]:
return self._payload_list("players")
def fetch_players(self) -> list[PlayerPayload]:
return self._payload_list("players") # type: ignore[return-value]
def fetch_competitions(self) -> list[dict]:
return self._payload_list("competitions")
def fetch_competitions(self) -> list[CompetitionPayload]:
return self._payload_list("competitions") # type: ignore[return-value]
def fetch_teams(self) -> list[dict]:
return self._payload_list("teams")
def fetch_teams(self) -> list[TeamPayload]:
return self._payload_list("teams") # type: ignore[return-value]
def fetch_seasons(self) -> list[dict]:
return self._payload_list("seasons")
def fetch_seasons(self) -> list[SeasonPayload]:
return self._payload_list("seasons") # type: ignore[return-value]
def fetch_player_stats(self) -> list[dict]:
return self._payload_list("player_stats")
def fetch_player_stats(self) -> list[PlayerStatsPayload]:
return self._payload_list("player_stats") # type: ignore[return-value]
def fetch_player_careers(self) -> list[dict]:
return self._payload_list("player_careers")
def fetch_player_careers(self) -> list[PlayerCareerPayload]:
return self._payload_list("player_careers") # type: ignore[return-value]
def sync_all(self) -> dict:
def sync_all(self) -> NormalizedSyncPayload:
return {
"players": self.fetch_players(),
"competitions": self.fetch_competitions(),
@ -92,7 +101,7 @@ class MvpDemoProviderAdapter(BaseProviderAdapter):
"cursor": None,
}
def sync_incremental(self, *, cursor: str | None = None) -> dict:
def sync_incremental(self, *, cursor: str | None = None) -> NormalizedSyncPayload:
payload = self.sync_all()
# MVP source has no change feed yet; returns full snapshot.
payload["cursor"] = cursor

View File

@ -0,0 +1,3 @@
from .balldontlie import BalldontlieClient
__all__ = ["BalldontlieClient"]

View File

@ -0,0 +1,141 @@
import logging
import time
from typing import Any
import requests
from django.conf import settings
from apps.providers.exceptions import ProviderRateLimitError, ProviderTransientError, ProviderUnauthorizedError
logger = logging.getLogger(__name__)
class BalldontlieClient:
"""HTTP client for balldontlie with timeout/retry/rate-limit handling."""
def __init__(self, session: requests.Session | None = None):
self.base_url = settings.PROVIDER_BALLDONTLIE_BASE_URL.rstrip("/")
self.api_key = settings.PROVIDER_BALLDONTLIE_API_KEY
self.timeout_seconds = settings.PROVIDER_HTTP_TIMEOUT_SECONDS
self.max_retries = settings.PROVIDER_REQUEST_RETRIES
self.retry_sleep_seconds = settings.PROVIDER_REQUEST_RETRY_SLEEP
self.session = session or requests.Session()
def _headers(self) -> dict[str, str]:
headers = {"Accept": "application/json"}
if self.api_key:
headers["Authorization"] = self.api_key
return headers
def get_json(self, path: str, *, params: dict[str, Any] | None = None) -> dict[str, Any]:
url = f"{self.base_url}/{path.lstrip('/')}"
for attempt in range(1, self.max_retries + 1):
try:
response = self.session.get(
url,
params=params,
headers=self._headers(),
timeout=self.timeout_seconds,
)
except requests.Timeout as exc:
logger.warning(
"provider_http_timeout",
extra={"provider": "balldontlie", "url": url, "attempt": attempt},
)
if attempt >= self.max_retries:
raise ProviderTransientError(f"Timeout calling balldontlie: {url}") from exc
time.sleep(self.retry_sleep_seconds * attempt)
continue
except requests.RequestException as exc:
logger.warning(
"provider_http_error",
extra={"provider": "balldontlie", "url": url, "attempt": attempt},
)
if attempt >= self.max_retries:
raise ProviderTransientError(f"Network error calling balldontlie: {url}") from exc
time.sleep(self.retry_sleep_seconds * attempt)
continue
status = response.status_code
if status == 429:
retry_after = int(response.headers.get("Retry-After", "30") or "30")
logger.warning(
"provider_rate_limited",
extra={
"provider": "balldontlie",
"url": url,
"attempt": attempt,
"retry_after": retry_after,
},
)
if attempt >= self.max_retries:
raise ProviderRateLimitError(
"balldontlie rate limit reached",
retry_after_seconds=retry_after,
)
time.sleep(max(retry_after, self.retry_sleep_seconds * attempt))
continue
if status >= 500:
logger.warning(
"provider_server_error",
extra={"provider": "balldontlie", "url": url, "attempt": attempt, "status": status},
)
if attempt >= self.max_retries:
raise ProviderTransientError(f"balldontlie server error: {status}")
time.sleep(self.retry_sleep_seconds * attempt)
continue
if status >= 400:
body_preview = response.text[:240]
if status == 401:
raise ProviderUnauthorizedError(
provider="balldontlie",
path=path,
status_code=status,
detail=body_preview,
)
raise ProviderTransientError(f"balldontlie client error status={status} path={path} body={body_preview}")
try:
return response.json()
except ValueError as exc:
raise ProviderTransientError(f"Invalid JSON from balldontlie for {path}") from exc
raise ProviderTransientError(f"Failed to call balldontlie path={path}")
def list_paginated(
self,
path: str,
*,
params: dict[str, Any] | None = None,
per_page: int = 100,
page_limit: int = 1,
) -> list[dict[str, Any]]:
page = 1
cursor = None
rows: list[dict[str, Any]] = []
query = dict(params or {})
while page <= page_limit:
request_query = dict(query)
request_query["per_page"] = per_page
if cursor is not None:
request_query["cursor"] = cursor
payload = self.get_json(path, params=request_query)
data = payload.get("data") or []
if isinstance(data, list):
rows.extend(data)
meta = payload.get("meta") or {}
next_cursor = meta.get("next_cursor")
if next_cursor:
cursor = next_cursor
page += 1
continue
break
return rows

109
apps/providers/contracts.py Normal file
View File

@ -0,0 +1,109 @@
from __future__ import annotations
from typing import NotRequired, TypedDict
class NationalityPayload(TypedDict):
name: str
iso2_code: str
iso3_code: NotRequired[str | None]
class PositionPayload(TypedDict):
code: str
name: str
class RolePayload(TypedDict):
code: str
name: str
class PlayerPayload(TypedDict):
external_id: str
first_name: str
last_name: str
full_name: str
birth_date: str | None
nationality: NationalityPayload | None
nominal_position: PositionPayload | None
inferred_role: RolePayload | None
height_cm: int | None
weight_kg: int | None
dominant_hand: str
is_active: bool
aliases: list[str]
class CompetitionPayload(TypedDict):
external_id: str
name: str
slug: str
competition_type: str
gender: str
level: int
country: NationalityPayload | None
is_active: bool
class TeamPayload(TypedDict):
external_id: str
name: str
short_name: str
slug: str
country: NationalityPayload | None
is_national_team: bool
class SeasonPayload(TypedDict):
external_id: str
label: str
start_date: str
end_date: str
is_current: bool
class PlayerStatsPayload(TypedDict):
external_id: str
player_external_id: str
team_external_id: str | None
competition_external_id: str | None
season_external_id: str
games_played: int
games_started: int
minutes_played: int
points: float
rebounds: float
assists: float
steals: float
blocks: float
turnovers: float
fg_pct: float | None
three_pct: float | None
ft_pct: float | None
usage_rate: float | None
true_shooting_pct: float | None
player_efficiency_rating: float | None
class PlayerCareerPayload(TypedDict):
external_id: str
player_external_id: str
team_external_id: str | None
competition_external_id: str | None
season_external_id: str | None
role_code: str
shirt_number: int | None
start_date: str | None
end_date: str | None
notes: str
class NormalizedSyncPayload(TypedDict):
players: list[PlayerPayload]
competitions: list[CompetitionPayload]
teams: list[TeamPayload]
seasons: list[SeasonPayload]
player_stats: list[PlayerStatsPayload]
player_careers: list[PlayerCareerPayload]
cursor: str | None

View File

@ -6,6 +6,17 @@ class ProviderTransientError(ProviderError):
"""Temporary provider failure that can be retried."""
class ProviderUnauthorizedError(ProviderError):
"""Raised when provider credentials are valid format but not authorized for an endpoint."""
def __init__(self, *, provider: str, path: str, status_code: int, detail: str = ""):
super().__init__(f"{provider} unauthorized status={status_code} path={path} detail={detail}")
self.provider = provider
self.path = path
self.status_code = status_code
self.detail = detail
class ProviderRateLimitError(ProviderTransientError):
"""Raised when provider rate limit is hit."""

View File

@ -1,45 +1,63 @@
from abc import ABC, abstractmethod
from apps.providers.contracts import (
CompetitionPayload,
NormalizedSyncPayload,
PlayerCareerPayload,
PlayerPayload,
PlayerStatsPayload,
SeasonPayload,
TeamPayload,
)
class BaseProviderAdapter(ABC):
"""
Provider contract for normalized entity payloads consumed by ingestion services.
Adapters must return provider-agnostic entity dictionaries (see
``apps.providers.contracts``) and keep provider-specific response shapes
internal to the adapter/client/mapping layer.
"""
namespace: str
@abstractmethod
def search_players(self, *, query: str = "", limit: int = 50, offset: int = 0) -> list[dict]:
def search_players(self, *, query: str = "", limit: int = 50, offset: int = 0) -> list[PlayerPayload]:
raise NotImplementedError
@abstractmethod
def fetch_player(self, *, external_player_id: str) -> dict | None:
def fetch_player(self, *, external_player_id: str) -> PlayerPayload | None:
raise NotImplementedError
@abstractmethod
def fetch_players(self) -> list[dict]:
def fetch_players(self) -> list[PlayerPayload]:
raise NotImplementedError
@abstractmethod
def fetch_competitions(self) -> list[dict]:
def fetch_competitions(self) -> list[CompetitionPayload]:
raise NotImplementedError
@abstractmethod
def fetch_teams(self) -> list[dict]:
def fetch_teams(self) -> list[TeamPayload]:
raise NotImplementedError
@abstractmethod
def fetch_seasons(self) -> list[dict]:
def fetch_seasons(self) -> list[SeasonPayload]:
raise NotImplementedError
@abstractmethod
def fetch_player_stats(self) -> list[dict]:
def fetch_player_stats(self) -> list[PlayerStatsPayload]:
raise NotImplementedError
@abstractmethod
def fetch_player_careers(self) -> list[dict]:
def fetch_player_careers(self) -> list[PlayerCareerPayload]:
raise NotImplementedError
@abstractmethod
def sync_all(self) -> dict:
def sync_all(self) -> NormalizedSyncPayload:
raise NotImplementedError
@abstractmethod
def sync_incremental(self, *, cursor: str | None = None) -> dict:
def sync_incremental(self, *, cursor: str | None = None) -> NormalizedSyncPayload:
raise NotImplementedError

View File

@ -1,16 +1,29 @@
from django.conf import settings
from apps.providers.adapters.balldontlie_provider import BalldontlieProviderAdapter
from apps.providers.adapters.mvp_provider import MvpDemoProviderAdapter
from apps.providers.exceptions import ProviderNotFoundError
PROVIDER_REGISTRY = {
MvpDemoProviderAdapter.namespace: MvpDemoProviderAdapter,
BalldontlieProviderAdapter.namespace: BalldontlieProviderAdapter,
}
def get_default_provider_namespace() -> str:
if settings.PROVIDER_DEFAULT_NAMESPACE:
return settings.PROVIDER_DEFAULT_NAMESPACE
backend_map = {
"demo": settings.PROVIDER_NAMESPACE_DEMO,
"balldontlie": settings.PROVIDER_NAMESPACE_BALLDONTLIE,
}
return backend_map.get(settings.PROVIDER_BACKEND, settings.PROVIDER_NAMESPACE_DEMO)
def get_provider(namespace: str | None = None):
provider_namespace = namespace or settings.PROVIDER_DEFAULT_NAMESPACE
provider_namespace = namespace or get_default_provider_namespace()
provider_cls = PROVIDER_REGISTRY.get(provider_namespace)
if not provider_cls:
raise ProviderNotFoundError(f"Unknown provider namespace: {provider_namespace}")

View File

@ -0,0 +1,288 @@
from __future__ import annotations
from collections import defaultdict
from datetime import date
from typing import Any
from django.utils.text import slugify
from apps.providers.contracts import (
CompetitionPayload,
PlayerCareerPayload,
PlayerPayload,
PlayerStatsPayload,
SeasonPayload,
TeamPayload,
)
NBA_COMPETITION_EXTERNAL_ID = "competition-nba"
def map_competitions() -> list[CompetitionPayload]:
"""
balldontlie assumptions:
- The API is NBA-focused, so competition is normalized as a single NBA league.
- Competition country is set to US (league home country), not player/team nationality.
"""
return [
{
"external_id": NBA_COMPETITION_EXTERNAL_ID,
"name": "NBA",
"slug": "nba",
"competition_type": "league",
"gender": "men",
"level": 1,
"country": {"name": "United States", "iso2_code": "US", "iso3_code": "USA"},
"is_active": True,
}
]
def map_teams(rows: list[dict[str, Any]]) -> list[TeamPayload]:
"""
Team country is unknown from balldontlie team payloads and stays null.
"""
mapped: list[TeamPayload] = []
for row in rows:
team_id = row.get("id")
if not team_id:
continue
full_name = row.get("full_name") or row.get("name") or f"Team {team_id}"
abbreviation = (row.get("abbreviation") or "").strip()
mapped.append(
{
"external_id": f"team-{team_id}",
"name": full_name,
"short_name": abbreviation,
"slug": slugify(full_name) or f"team-{team_id}",
"country": None,
"is_national_team": False,
}
)
return mapped
def _map_position(position: str | None) -> dict[str, str] | None:
if not position:
return None
normalized = position.upper().strip()
position_map = {
"G": ("PG", "Point Guard"),
"G-F": ("SG", "Shooting Guard"),
"F-G": ("SF", "Small Forward"),
"F": ("PF", "Power Forward"),
"F-C": ("PF", "Power Forward"),
"C-F": ("C", "Center"),
"C": ("C", "Center"),
}
code_name = position_map.get(normalized)
if not code_name:
return None
return {"code": code_name[0], "name": code_name[1]}
def _map_role(position: str | None) -> dict[str, str] | None:
if not position:
return None
normalized = position.upper().strip()
if "G" in normalized:
return {"code": "playmaker", "name": "Playmaker"}
if "F" in normalized:
return {"code": "wing", "name": "Wing"}
if "C" in normalized:
return {"code": "big", "name": "Big"}
return None
def map_players(rows: list[dict[str, Any]]) -> list[PlayerPayload]:
"""
Player-level nationality/birth/physical details are not exposed by this provider's
players endpoint in the current MVP integration, so they are left null.
"""
mapped: list[PlayerPayload] = []
for row in rows:
player_id = row.get("id")
if not player_id:
continue
first_name = row.get("first_name", "")
last_name = row.get("last_name", "")
full_name = f"{first_name} {last_name}".strip() or f"Player {player_id}"
position_value = row.get("position")
mapped.append(
{
"external_id": f"player-{player_id}",
"first_name": first_name,
"last_name": last_name,
"full_name": full_name,
"birth_date": None,
"nationality": None,
"nominal_position": _map_position(position_value),
"inferred_role": _map_role(position_value),
"height_cm": None,
"weight_kg": None,
"dominant_hand": "unknown",
"is_active": True,
"aliases": [],
}
)
return mapped
def map_seasons(seasons: list[int]) -> list[SeasonPayload]:
"""
Current-season fallback:
- if configured seasons are supplied, the maximum season year is treated as current.
"""
normalized_seasons = sorted(set(seasons))
current = max(normalized_seasons) if normalized_seasons else None
mapped: list[SeasonPayload] = []
for season in normalized_seasons:
mapped.append(
{
"external_id": f"season-{season}",
"label": f"{season}-{season + 1}",
"start_date": date(season, 10, 1).isoformat(),
"end_date": date(season + 1, 6, 30).isoformat(),
"is_current": season == current,
}
)
return mapped
def _to_float(value: Any) -> float:
if value in (None, ""):
return 0.0
try:
return float(value)
except (TypeError, ValueError):
return 0.0
def _parse_minutes(value: Any) -> int:
if value in (None, ""):
return 0
if isinstance(value, (int, float)):
return int(value)
text = str(value)
if ":" in text:
minutes, _ = text.split(":", 1)
return int(_to_float(minutes))
return int(_to_float(text))
def _pct(value: Any, *, count: int) -> float | None:
if count <= 0:
return None
pct = _to_float(value) / count
if pct <= 1:
pct *= 100
return round(pct, 2)
def map_player_stats(
rows: list[dict[str, Any]],
*,
allowed_seasons: list[int],
) -> tuple[list[PlayerStatsPayload], list[PlayerCareerPayload]]:
aggregates: dict[tuple[int, int, int], dict[str, Any]] = defaultdict(
lambda: {
"games": 0,
"minutes": 0,
"points": 0.0,
"rebounds": 0.0,
"assists": 0.0,
"steals": 0.0,
"blocks": 0.0,
"turnovers": 0.0,
"fg_pct_sum": 0.0,
"fg_pct_count": 0,
"three_pct_sum": 0.0,
"three_pct_count": 0,
"ft_pct_sum": 0.0,
"ft_pct_count": 0,
}
)
for row in rows:
game = row.get("game") or {}
season = game.get("season")
player = row.get("player") or {}
team = row.get("team") or {}
player_id = player.get("id")
team_id = team.get("id")
if not (season and player_id and team_id):
continue
if allowed_seasons and season not in allowed_seasons:
continue
key = (season, player_id, team_id)
agg = aggregates[key]
agg["games"] += 1
agg["minutes"] += _parse_minutes(row.get("min"))
agg["points"] += _to_float(row.get("pts"))
agg["rebounds"] += _to_float(row.get("reb"))
agg["assists"] += _to_float(row.get("ast"))
agg["steals"] += _to_float(row.get("stl"))
agg["blocks"] += _to_float(row.get("blk"))
agg["turnovers"] += _to_float(row.get("turnover"))
if row.get("fg_pct") is not None:
agg["fg_pct_sum"] += _to_float(row.get("fg_pct"))
agg["fg_pct_count"] += 1
if row.get("fg3_pct") is not None:
agg["three_pct_sum"] += _to_float(row.get("fg3_pct"))
agg["three_pct_count"] += 1
if row.get("ft_pct") is not None:
agg["ft_pct_sum"] += _to_float(row.get("ft_pct"))
agg["ft_pct_count"] += 1
player_stats: list[PlayerStatsPayload] = []
player_careers: list[PlayerCareerPayload] = []
for (season, player_id, team_id), agg in aggregates.items():
games = agg["games"] or 1
player_stats.append(
{
"external_id": f"ps-{season}-{player_id}-{team_id}",
"player_external_id": f"player-{player_id}",
"team_external_id": f"team-{team_id}",
"competition_external_id": NBA_COMPETITION_EXTERNAL_ID,
"season_external_id": f"season-{season}",
"games_played": agg["games"],
"games_started": 0,
"minutes_played": agg["minutes"],
"points": round(agg["points"] / games, 2),
"rebounds": round(agg["rebounds"] / games, 2),
"assists": round(agg["assists"] / games, 2),
"steals": round(agg["steals"] / games, 2),
"blocks": round(agg["blocks"] / games, 2),
"turnovers": round(agg["turnovers"] / games, 2),
"fg_pct": _pct(agg["fg_pct_sum"], count=agg["fg_pct_count"]),
"three_pct": _pct(agg["three_pct_sum"], count=agg["three_pct_count"]),
"ft_pct": _pct(agg["ft_pct_sum"], count=agg["ft_pct_count"]),
"usage_rate": None,
"true_shooting_pct": None,
"player_efficiency_rating": None,
}
)
player_careers.append(
{
"external_id": f"career-{season}-{player_id}-{team_id}",
"player_external_id": f"player-{player_id}",
"team_external_id": f"team-{team_id}",
"competition_external_id": NBA_COMPETITION_EXTERNAL_ID,
"season_external_id": f"season-{season}",
"role_code": "",
"shirt_number": None,
"start_date": date(season, 10, 1).isoformat(),
"end_date": date(season + 1, 6, 30).isoformat(),
"notes": "Imported from balldontlie aggregated box scores",
}
)
return player_stats, player_careers

View File

@ -0,0 +1,41 @@
# Generated by Django 5.2.12 on 2026-03-10 17:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("stats", "0001_initial"),
]
operations = [
migrations.AddIndex(
model_name="playerseasonstats",
index=models.Index(fields=["steals"], name="stats_playe_steals_59b0f3_idx"),
),
migrations.AddIndex(
model_name="playerseasonstats",
index=models.Index(fields=["blocks"], name="stats_playe_blocks_b2d4de_idx"),
),
migrations.AddIndex(
model_name="playerseasonstats",
index=models.Index(fields=["turnovers"], name="stats_playe_turnove_aa4e87_idx"),
),
migrations.AddIndex(
model_name="playerseasonstats",
index=models.Index(fields=["fg_pct"], name="stats_playe_fg_pct_bf2ff1_idx"),
),
migrations.AddIndex(
model_name="playerseasonstats",
index=models.Index(fields=["three_pct"], name="stats_playe_three_p_c67201_idx"),
),
migrations.AddIndex(
model_name="playerseasonstats",
index=models.Index(fields=["ft_pct"], name="stats_playe_ft_pct_da7421_idx"),
),
migrations.AddIndex(
model_name="playerseasonstats",
index=models.Index(fields=["player_efficiency_rating"], name="stats_playe_player__641815_idx"),
),
]

View File

@ -63,8 +63,15 @@ class PlayerSeasonStats(models.Model):
models.Index(fields=["points"]),
models.Index(fields=["rebounds"]),
models.Index(fields=["assists"]),
models.Index(fields=["steals"]),
models.Index(fields=["blocks"]),
models.Index(fields=["turnovers"]),
models.Index(fields=["fg_pct"]),
models.Index(fields=["three_pct"]),
models.Index(fields=["ft_pct"]),
models.Index(fields=["usage_rate"]),
models.Index(fields=["true_shooting_pct"]),
models.Index(fields=["player_efficiency_rating"]),
]
def __str__(self) -> str:

View File

@ -1,8 +1,53 @@
import logging
import os
from celery import Celery
from celery.schedules import crontab
from django.conf import settings
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.development")
logger = logging.getLogger(__name__)
app = Celery("hoopscout")
app.config_from_object("django.conf:settings", namespace="CELERY")
app.autodiscover_tasks()
def _parse_cron_expression(expression: str) -> dict[str, str]:
parts = expression.split()
if len(parts) != 5:
raise ValueError(
"INGESTION_SCHEDULE_CRON must have 5 fields: minute hour day_of_month month_of_year day_of_week."
)
return {
"minute": parts[0],
"hour": parts[1],
"day_of_month": parts[2],
"month_of_year": parts[3],
"day_of_week": parts[4],
}
def build_periodic_schedule() -> dict:
if not settings.INGESTION_SCHEDULE_ENABLED:
logger.info("Periodic ingestion schedule disabled by INGESTION_SCHEDULE_ENABLED=0.")
return {}
try:
schedule_kwargs = _parse_cron_expression(settings.INGESTION_SCHEDULE_CRON)
return {
"ingestion.scheduled_provider_sync": {
"task": "apps.ingestion.tasks.scheduled_provider_sync",
"schedule": crontab(**schedule_kwargs),
}
}
except Exception as exc: # noqa: BLE001
logger.error(
"Invalid periodic ingestion schedule config. Task disabled. "
"INGESTION_SCHEDULE_CRON=%r error=%s",
settings.INGESTION_SCHEDULE_CRON,
exc,
)
return {}
app.conf.beat_schedule = build_periodic_schedule()

View File

@ -1,7 +1,10 @@
from pathlib import Path
import logging
import os
from django.core.exceptions import ImproperlyConfigured
BASE_DIR = Path(__file__).resolve().parent.parent.parent
settings_logger = logging.getLogger("config.settings")
def env_bool(key: str, default: bool = False) -> bool:
@ -16,13 +19,43 @@ def env_list(key: str, default: str = "") -> list[str]:
return [item.strip() for item in value.split(",") if item.strip()]
DJANGO_ENV = os.getenv("DJANGO_ENV", "development").strip().lower()
SECRET_KEY = os.getenv("DJANGO_SECRET_KEY", "insecure-development-secret")
DEBUG = env_bool("DJANGO_DEBUG", False)
IS_DEVELOPMENT_ENV = DJANGO_ENV in {"development", "local", "dev"}
ALLOWED_HOSTS = env_list("DJANGO_ALLOWED_HOSTS", "localhost,127.0.0.1")
CSRF_TRUSTED_ORIGINS = env_list(
"DJANGO_CSRF_TRUSTED_ORIGINS", "http://localhost,http://127.0.0.1"
)
DEFAULT_SECRET_KEY_MARKERS = {"", "insecure-development-secret", "change-me-in-production"}
def raise_config_error(message: str) -> None:
settings_logger.critical("Configuration error: %s", message)
raise ImproperlyConfigured(message)
def is_secret_key_unsafe(secret_key: str) -> bool:
if secret_key in DEFAULT_SECRET_KEY_MARKERS:
return True
if len(secret_key) < 32:
return True
lower = secret_key.lower()
return "change-me" in lower or "insecure" in lower or "default" in lower
if (not IS_DEVELOPMENT_ENV or not DEBUG) and is_secret_key_unsafe(SECRET_KEY):
raise_config_error(
"DJANGO_SECRET_KEY is unsafe. Set a strong, unique value for non-development environments."
)
if not DEBUG and not ALLOWED_HOSTS:
raise_config_error("DJANGO_ALLOWED_HOSTS must not be empty when DEBUG=0.")
if not DEBUG and "*" in ALLOWED_HOSTS:
raise_config_error("DJANGO_ALLOWED_HOSTS must not contain '*' when DEBUG=0.")
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
@ -96,12 +129,12 @@ TIME_ZONE = os.getenv("DJANGO_TIME_ZONE", "UTC")
USE_I18N = True
USE_TZ = True
STATIC_URL = "/static/"
STATIC_ROOT = BASE_DIR / "staticfiles"
STATIC_URL = os.getenv("DJANGO_STATIC_URL", "/static/")
STATIC_ROOT = Path(os.getenv("DJANGO_STATIC_ROOT", str(BASE_DIR / "staticfiles")))
STATICFILES_DIRS = [BASE_DIR / "static"]
MEDIA_URL = "/media/"
MEDIA_ROOT = BASE_DIR / "media"
MEDIA_URL = os.getenv("DJANGO_MEDIA_URL", "/media/")
MEDIA_ROOT = Path(os.getenv("DJANGO_MEDIA_ROOT", str(BASE_DIR / "media")))
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
@ -117,14 +150,68 @@ CELERY_RESULT_SERIALIZER = "json"
CELERY_TIMEZONE = TIME_ZONE
CELERY_TASK_TIME_LIMIT = int(os.getenv("CELERY_TASK_TIME_LIMIT", "1800"))
CELERY_TASK_SOFT_TIME_LIMIT = int(os.getenv("CELERY_TASK_SOFT_TIME_LIMIT", "1500"))
INGESTION_SCHEDULE_ENABLED = env_bool("INGESTION_SCHEDULE_ENABLED", False)
INGESTION_SCHEDULE_CRON = os.getenv("INGESTION_SCHEDULE_CRON", "*/30 * * * *").strip()
INGESTION_SCHEDULE_PROVIDER_NAMESPACE = os.getenv("INGESTION_SCHEDULE_PROVIDER_NAMESPACE", "").strip()
INGESTION_SCHEDULE_JOB_TYPE = os.getenv("INGESTION_SCHEDULE_JOB_TYPE", "incremental").strip().lower()
INGESTION_PREVENT_OVERLAP = env_bool("INGESTION_PREVENT_OVERLAP", True)
INGESTION_OVERLAP_WINDOW_MINUTES = int(os.getenv("INGESTION_OVERLAP_WINDOW_MINUTES", "180"))
PROVIDER_DEFAULT_NAMESPACE = os.getenv("PROVIDER_DEFAULT_NAMESPACE", "mvp_demo")
if INGESTION_SCHEDULE_JOB_TYPE not in {"incremental", "full_sync"}:
raise ImproperlyConfigured("INGESTION_SCHEDULE_JOB_TYPE must be either 'incremental' or 'full_sync'.")
PROVIDER_BACKEND = os.getenv("PROVIDER_BACKEND", "demo").strip().lower()
PROVIDER_NAMESPACE_DEMO = os.getenv("PROVIDER_NAMESPACE_DEMO", "mvp_demo")
PROVIDER_NAMESPACE_BALLDONTLIE = os.getenv("PROVIDER_NAMESPACE_BALLDONTLIE", "balldontlie")
PROVIDER_DEFAULT_NAMESPACE = os.getenv("PROVIDER_DEFAULT_NAMESPACE", "").strip()
PROVIDER_MVP_DATA_FILE = os.getenv(
"PROVIDER_MVP_DATA_FILE",
str(BASE_DIR / "apps" / "providers" / "data" / "mvp_provider.json"),
)
PROVIDER_REQUEST_RETRIES = int(os.getenv("PROVIDER_REQUEST_RETRIES", "3"))
PROVIDER_REQUEST_RETRY_SLEEP = float(os.getenv("PROVIDER_REQUEST_RETRY_SLEEP", "1"))
PROVIDER_HTTP_TIMEOUT_SECONDS = float(os.getenv("PROVIDER_HTTP_TIMEOUT_SECONDS", "10"))
PROVIDER_BALLDONTLIE_BASE_URL = os.getenv("PROVIDER_BALLDONTLIE_BASE_URL", "https://api.balldontlie.io")
PROVIDER_BALLDONTLIE_API_KEY = os.getenv("PROVIDER_BALLDONTLIE_API_KEY", "")
PROVIDER_BALLDONTLIE_PLAYERS_PAGE_LIMIT = int(os.getenv("PROVIDER_BALLDONTLIE_PLAYERS_PAGE_LIMIT", "5"))
PROVIDER_BALLDONTLIE_PLAYERS_PER_PAGE = int(os.getenv("PROVIDER_BALLDONTLIE_PLAYERS_PER_PAGE", "100"))
PROVIDER_BALLDONTLIE_STATS_PAGE_LIMIT = int(os.getenv("PROVIDER_BALLDONTLIE_STATS_PAGE_LIMIT", "10"))
PROVIDER_BALLDONTLIE_STATS_PER_PAGE = int(os.getenv("PROVIDER_BALLDONTLIE_STATS_PER_PAGE", "100"))
PROVIDER_BALLDONTLIE_STATS_STRICT = env_bool("PROVIDER_BALLDONTLIE_STATS_STRICT", False)
PROVIDER_BALLDONTLIE_SEASONS = [
int(value.strip())
for value in os.getenv("PROVIDER_BALLDONTLIE_SEASONS", "2024").split(",")
if value.strip().isdigit()
]
LOG_LEVEL = os.getenv("DJANGO_LOG_LEVEL", "INFO").upper()
LOG_SQL = env_bool("DJANGO_LOG_SQL", False)
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {
"format": "%(asctime)s %(levelname)s %(name)s %(message)s",
}
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"formatter": "standard",
}
},
"root": {
"handlers": ["console"],
"level": LOG_LEVEL,
},
"loggers": {
"django.db.backends": {
"handlers": ["console"],
"level": "DEBUG" if LOG_SQL else "WARNING",
"propagate": False,
},
},
}
REST_FRAMEWORK = {
"DEFAULT_PERMISSION_CLASSES": [

View File

@ -1,3 +1,6 @@
from .base import * # noqa: F403,F401
DEBUG = True
SECURE_SSL_REDIRECT = False
SESSION_COOKIE_SECURE = False
CSRF_COOKIE_SECURE = False

View File

@ -1,13 +1,61 @@
from .base import * # noqa: F403,F401
import os
from urllib.parse import urlparse
from django.core.exceptions import ImproperlyConfigured
DEBUG = False
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
USE_X_FORWARDED_HOST = True
SECURE_SSL_REDIRECT = os.getenv("DJANGO_SECURE_SSL_REDIRECT", "1") == "1"
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
SECURE_BROWSER_XSS_FILTER = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_REFERRER_POLICY = "same-origin"
X_FRAME_OPTIONS = "DENY"
SECURE_HSTS_SECONDS = int(os.getenv("DJANGO_SECURE_HSTS_SECONDS", "31536000"))
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
SECURE_HSTS_PRELOAD = True
CSRF_COOKIE_HTTPONLY = True
SESSION_COOKIE_HTTPONLY = True
SESSION_COOKIE_SAMESITE = os.getenv("DJANGO_SESSION_COOKIE_SAMESITE", "Lax")
CSRF_COOKIE_SAMESITE = os.getenv("DJANGO_CSRF_COOKIE_SAMESITE", "Lax")
def _is_local_host(hostname: str | None) -> bool:
return (hostname or "").lower() in {"localhost", "127.0.0.1", "::1", "0.0.0.0"}
def _is_safe_csrf_origin(origin: str) -> bool:
parsed = urlparse(origin)
if parsed.scheme != "https":
return False
return not _is_local_host(parsed.hostname)
if not CSRF_TRUSTED_ORIGINS: # noqa: F405
raise ImproperlyConfigured("DJANGO_CSRF_TRUSTED_ORIGINS must be explicitly set for production.")
invalid_origins = [origin for origin in CSRF_TRUSTED_ORIGINS if not _is_safe_csrf_origin(origin)] # noqa: F405
if invalid_origins:
joined = ", ".join(invalid_origins)
raise ImproperlyConfigured(
"DJANGO_CSRF_TRUSTED_ORIGINS contains unsafe values for production. "
f"Use explicit HTTPS origins only. Invalid: {joined}"
)
unsafe_hosts = [host for host in ALLOWED_HOSTS if host in {"localhost", "127.0.0.1", "::1", "0.0.0.0"}] # noqa: F405
if unsafe_hosts:
joined = ", ".join(unsafe_hosts)
raise ImproperlyConfigured(
"DJANGO_ALLOWED_HOSTS contains localhost-style values in production. "
f"Invalid: {joined}"
)
STORAGES = {
"default": {
"BACKEND": "django.core.files.storage.FileSystemStorage",
},
"staticfiles": {
"BACKEND": "django.contrib.staticfiles.storage.ManifestStaticFilesStorage",
},
}

27
docker-compose.dev.yml Normal file
View File

@ -0,0 +1,27 @@
services:
web:
user: "${LOCAL_UID:-1000}:${LOCAL_GID:-1000}"
volumes:
- .:/app
- node_modules_data:/app/node_modules
- static_data:/app/staticfiles
- media_data:/app/media
- runtime_data:/app/runtime
celery_worker:
user: "${LOCAL_UID:-1000}:${LOCAL_GID:-1000}"
volumes:
- .:/app
- runtime_data:/app/runtime
celery_beat:
user: "${LOCAL_UID:-1000}:${LOCAL_GID:-1000}"
volumes:
- .:/app
- runtime_data:/app/runtime
tailwind:
user: "${LOCAL_UID:-1000}:${LOCAL_GID:-1000}"
volumes:
- .:/app
- node_modules_data:/app/node_modules

View File

@ -0,0 +1,15 @@
services:
web:
environment:
DJANGO_SETTINGS_MODULE: config.settings.production
DJANGO_DEBUG: "0"
celery_worker:
environment:
DJANGO_SETTINGS_MODULE: config.settings.production
DJANGO_DEBUG: "0"
celery_beat:
environment:
DJANGO_SETTINGS_MODULE: config.settings.production
DJANGO_DEBUG: "0"

View File

@ -10,11 +10,16 @@ services:
- ./nginx/nginx.conf:/etc/nginx/nginx.conf:ro
- static_data:/var/www/static:ro
- media_data:/var/www/media:ro
read_only: true
tmpfs:
- /var/cache/nginx
- /var/run
healthcheck:
test: ["CMD-SHELL", "wget -q -O /dev/null http://127.0.0.1/health/ || exit 1"]
interval: 15s
timeout: 5s
retries: 5
start_period: 10s
restart: unless-stopped
web:
@ -28,9 +33,9 @@ services:
condition: service_healthy
redis:
condition: service_healthy
command: gunicorn config.wsgi:application --bind 0.0.0.0:8000 --workers ${GUNICORN_WORKERS:-3}
command: gunicorn config.wsgi:application --bind 0.0.0.0:8000 --workers ${GUNICORN_WORKERS:-3} --access-logfile - --error-logfile -
user: "10001:10001"
volumes:
- .:/app
- static_data:/app/staticfiles
- media_data:/app/media
- runtime_data:/app/runtime
@ -41,6 +46,19 @@ services:
interval: 15s
timeout: 5s
retries: 8
start_period: 20s
restart: unless-stopped
tailwind:
build:
context: .
dockerfile: Dockerfile
env_file:
- .env
command: npm run dev
user: "10001:10001"
profiles:
- dev
restart: unless-stopped
celery_worker:
@ -55,14 +73,15 @@ services:
redis:
condition: service_healthy
command: celery -A config worker -l info
user: "10001:10001"
volumes:
- .:/app
- runtime_data:/app/runtime
healthcheck:
test: ["CMD-SHELL", "celery -A config inspect ping -d celery@$$HOSTNAME | grep -q pong || exit 1"]
interval: 30s
timeout: 10s
retries: 5
start_period: 30s
restart: unless-stopped
celery_beat:
@ -77,20 +96,23 @@ services:
redis:
condition: service_healthy
command: celery -A config beat -l info --schedule=/app/runtime/celerybeat-schedule
user: "10001:10001"
volumes:
- .:/app
- runtime_data:/app/runtime
healthcheck:
test: ["CMD-SHELL", "test -f /app/runtime/celerybeat-schedule || exit 1"]
interval: 30s
timeout: 5s
retries: 10
start_period: 20s
restart: unless-stopped
postgres:
image: postgres:16-alpine
env_file:
- .env
environment:
POSTGRES_DB: ${POSTGRES_DB}
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
volumes:
- postgres_data:/var/lib/postgresql/data
healthcheck:
@ -118,3 +140,4 @@ volumes:
media_data:
runtime_data:
redis_data:
node_modules_data:

View File

@ -0,0 +1,38 @@
# Provider Normalization Contract
HoopScout ingestion consumes provider data through a normalized, provider-agnostic contract defined in:
- `apps/providers/contracts.py`
- `apps/providers/interfaces.py`
## Contract scope
Adapters must return only normalized entities used by ingestion:
- `players`
- `competitions`
- `teams`
- `seasons`
- `player_stats`
- `player_careers`
- optional `cursor`
Raw provider response structures must remain inside `apps/providers` (client/adapter/mapping code).
`ExternalMapping.raw_payload` is used only for diagnostics and troubleshooting.
## Current balldontlie assumptions (MVP)
- Source scope is NBA-centric.
- Competition is normalized as a single NBA competition (`competition-nba`).
- API contract source is `https://www.balldontlie.io/openapi.yml` (server `https://api.balldontlie.io`, NBA endpoints under `/nba/v1/*`).
- Team country is not reliably available in source payloads and is normalized to `null`.
- Player nationality/birth/physical details are not available in player list payloads and are normalized to `null` (except fields explicitly present).
- Configured seasons are normalized from `PROVIDER_BALLDONTLIE_SEASONS`; the highest configured season is marked `is_current=true`.
- Stats ingestion uses `/nba/v1/stats` with `seasons[]` and cursor pagination.
- Advanced metrics (`usage_rate`, `true_shooting_pct`, `player_efficiency_rating`) are currently unavailable from this source path and normalized to `null`.
## Domain rules vs provider assumptions
- Domain rules live in ingestion/domain services and models.
- Provider assumptions live only in adapter/mapping modules.
- New providers must map to the same normalized contract and should not require ingestion logic changes.

View File

@ -8,12 +8,26 @@ done
echo "PostgreSQL is available."
if [ "${DJANGO_SETTINGS_MODULE:-}" = "config.settings.production" ] && [ "$1" = "gunicorn" ]; then
echo "Running Django deployment checks..."
python manage.py check --deploy --fail-level WARNING
fi
if [ "${AUTO_APPLY_MIGRATIONS:-0}" = "1" ] && [ "$1" = "gunicorn" ]; then
echo "Applying database migrations..."
python manage.py migrate --noinput
fi
if [ "${AUTO_COLLECTSTATIC:-0}" = "1" ] && [ "$1" = "gunicorn" ]; then
if [ "${AUTO_BUILD_TAILWIND:-1}" = "1" ] && [ -f /app/package.json ]; then
if [ -x /app/node_modules/.bin/tailwindcss ]; then
echo "Building Tailwind assets..."
npm run build
else
echo "Tailwind dependencies missing; skipping AUTO_BUILD_TAILWIND."
fi
fi
echo "Collecting static files..."
python manage.py collectstatic --noinput
fi

View File

@ -19,16 +19,21 @@ http {
server {
listen 80;
server_name _;
add_header X-Content-Type-Options "nosniff" always;
add_header X-Frame-Options "DENY" always;
add_header Referrer-Policy "same-origin" always;
location /static/ {
alias /var/www/static/;
expires 30d;
add_header Cache-Control "public, max-age=2592000, immutable";
access_log off;
}
location /media/ {
alias /var/www/media/;
expires 30d;
expires 7d;
add_header Cache-Control "public, max-age=604800";
access_log off;
}

866
package-lock.json generated Normal file
View File

@ -0,0 +1,866 @@
{
"name": "hoopscout-frontend",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "hoopscout-frontend",
"version": "1.0.0",
"dependencies": {
"htmx.org": "^1.9.12"
},
"devDependencies": {
"tailwindcss": "^3.4.17"
}
},
"node_modules/@alloc/quick-lru": {
"version": "5.2.0",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/@jridgewell/gen-mapping": {
"version": "0.3.13",
"dev": true,
"license": "MIT",
"dependencies": {
"@jridgewell/sourcemap-codec": "^1.5.0",
"@jridgewell/trace-mapping": "^0.3.24"
}
},
"node_modules/@jridgewell/resolve-uri": {
"version": "3.1.2",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/@jridgewell/sourcemap-codec": {
"version": "1.5.5",
"dev": true,
"license": "MIT"
},
"node_modules/@jridgewell/trace-mapping": {
"version": "0.3.31",
"dev": true,
"license": "MIT",
"dependencies": {
"@jridgewell/resolve-uri": "^3.1.0",
"@jridgewell/sourcemap-codec": "^1.4.14"
}
},
"node_modules/@nodelib/fs.scandir": {
"version": "2.1.5",
"dev": true,
"license": "MIT",
"dependencies": {
"@nodelib/fs.stat": "2.0.5",
"run-parallel": "^1.1.9"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/@nodelib/fs.stat": {
"version": "2.0.5",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 8"
}
},
"node_modules/@nodelib/fs.walk": {
"version": "1.2.8",
"dev": true,
"license": "MIT",
"dependencies": {
"@nodelib/fs.scandir": "2.1.5",
"fastq": "^1.6.0"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/any-promise": {
"version": "1.3.0",
"dev": true,
"license": "MIT"
},
"node_modules/anymatch": {
"version": "3.1.3",
"dev": true,
"license": "ISC",
"dependencies": {
"normalize-path": "^3.0.0",
"picomatch": "^2.0.4"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/arg": {
"version": "5.0.2",
"dev": true,
"license": "MIT"
},
"node_modules/binary-extensions": {
"version": "2.3.0",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/braces": {
"version": "3.0.3",
"dev": true,
"license": "MIT",
"dependencies": {
"fill-range": "^7.1.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/camelcase-css": {
"version": "2.0.1",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 6"
}
},
"node_modules/chokidar": {
"version": "3.6.0",
"dev": true,
"license": "MIT",
"dependencies": {
"anymatch": "~3.1.2",
"braces": "~3.0.2",
"glob-parent": "~5.1.2",
"is-binary-path": "~2.1.0",
"is-glob": "~4.0.1",
"normalize-path": "~3.0.0",
"readdirp": "~3.6.0"
},
"engines": {
"node": ">= 8.10.0"
},
"funding": {
"url": "https://paulmillr.com/funding/"
},
"optionalDependencies": {
"fsevents": "~2.3.2"
}
},
"node_modules/chokidar/node_modules/glob-parent": {
"version": "5.1.2",
"dev": true,
"license": "ISC",
"dependencies": {
"is-glob": "^4.0.1"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/commander": {
"version": "4.1.1",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 6"
}
},
"node_modules/cssesc": {
"version": "3.0.0",
"dev": true,
"license": "MIT",
"bin": {
"cssesc": "bin/cssesc"
},
"engines": {
"node": ">=4"
}
},
"node_modules/didyoumean": {
"version": "1.2.2",
"dev": true,
"license": "Apache-2.0"
},
"node_modules/dlv": {
"version": "1.1.3",
"dev": true,
"license": "MIT"
},
"node_modules/fast-glob": {
"version": "3.3.3",
"dev": true,
"license": "MIT",
"dependencies": {
"@nodelib/fs.stat": "^2.0.2",
"@nodelib/fs.walk": "^1.2.3",
"glob-parent": "^5.1.2",
"merge2": "^1.3.0",
"micromatch": "^4.0.8"
},
"engines": {
"node": ">=8.6.0"
}
},
"node_modules/fast-glob/node_modules/glob-parent": {
"version": "5.1.2",
"dev": true,
"license": "ISC",
"dependencies": {
"is-glob": "^4.0.1"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/fastq": {
"version": "1.20.1",
"dev": true,
"license": "ISC",
"dependencies": {
"reusify": "^1.0.4"
}
},
"node_modules/fill-range": {
"version": "7.1.1",
"dev": true,
"license": "MIT",
"dependencies": {
"to-regex-range": "^5.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/function-bind": {
"version": "1.1.2",
"dev": true,
"license": "MIT",
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/glob-parent": {
"version": "6.0.2",
"dev": true,
"license": "ISC",
"dependencies": {
"is-glob": "^4.0.3"
},
"engines": {
"node": ">=10.13.0"
}
},
"node_modules/hasown": {
"version": "2.0.2",
"dev": true,
"license": "MIT",
"dependencies": {
"function-bind": "^1.1.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/htmx.org": {
"version": "1.9.12",
"resolved": "https://registry.npmjs.org/htmx.org/-/htmx.org-1.9.12.tgz",
"integrity": "sha512-VZAohXyF7xPGS52IM8d1T1283y+X4D+Owf3qY1NZ9RuBypyu9l8cGsxUMAG5fEAb/DhT7rDoJ9Hpu5/HxFD3cw=="
},
"node_modules/is-binary-path": {
"version": "2.1.0",
"dev": true,
"license": "MIT",
"dependencies": {
"binary-extensions": "^2.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/is-core-module": {
"version": "2.16.1",
"dev": true,
"license": "MIT",
"dependencies": {
"hasown": "^2.0.2"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/is-extglob": {
"version": "2.1.1",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/is-glob": {
"version": "4.0.3",
"dev": true,
"license": "MIT",
"dependencies": {
"is-extglob": "^2.1.1"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/is-number": {
"version": "7.0.0",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.12.0"
}
},
"node_modules/jiti": {
"version": "1.21.7",
"dev": true,
"license": "MIT",
"bin": {
"jiti": "bin/jiti.js"
}
},
"node_modules/lilconfig": {
"version": "3.1.3",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=14"
},
"funding": {
"url": "https://github.com/sponsors/antonk52"
}
},
"node_modules/lines-and-columns": {
"version": "1.2.4",
"dev": true,
"license": "MIT"
},
"node_modules/merge2": {
"version": "1.4.1",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 8"
}
},
"node_modules/micromatch": {
"version": "4.0.8",
"dev": true,
"license": "MIT",
"dependencies": {
"braces": "^3.0.3",
"picomatch": "^2.3.1"
},
"engines": {
"node": ">=8.6"
}
},
"node_modules/mz": {
"version": "2.7.0",
"dev": true,
"license": "MIT",
"dependencies": {
"any-promise": "^1.0.0",
"object-assign": "^4.0.1",
"thenify-all": "^1.0.0"
}
},
"node_modules/nanoid": {
"version": "3.3.11",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"bin": {
"nanoid": "bin/nanoid.cjs"
},
"engines": {
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
}
},
"node_modules/normalize-path": {
"version": "3.0.0",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/object-assign": {
"version": "4.1.1",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/object-hash": {
"version": "3.0.0",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 6"
}
},
"node_modules/path-parse": {
"version": "1.0.7",
"dev": true,
"license": "MIT"
},
"node_modules/picocolors": {
"version": "1.1.1",
"dev": true,
"license": "ISC"
},
"node_modules/picomatch": {
"version": "2.3.1",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8.6"
},
"funding": {
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/pify": {
"version": "2.3.0",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/pirates": {
"version": "4.0.7",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 6"
}
},
"node_modules/postcss": {
"version": "8.5.8",
"dev": true,
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/postcss/"
},
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/postcss"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"dependencies": {
"nanoid": "^3.3.11",
"picocolors": "^1.1.1",
"source-map-js": "^1.2.1"
},
"engines": {
"node": "^10 || ^12 || >=14"
}
},
"node_modules/postcss-import": {
"version": "15.1.0",
"dev": true,
"license": "MIT",
"dependencies": {
"postcss-value-parser": "^4.0.0",
"read-cache": "^1.0.0",
"resolve": "^1.1.7"
},
"engines": {
"node": ">=14.0.0"
},
"peerDependencies": {
"postcss": "^8.0.0"
}
},
"node_modules/postcss-js": {
"version": "4.1.0",
"dev": true,
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/postcss/"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"dependencies": {
"camelcase-css": "^2.0.1"
},
"engines": {
"node": "^12 || ^14 || >= 16"
},
"peerDependencies": {
"postcss": "^8.4.21"
}
},
"node_modules/postcss-load-config": {
"version": "6.0.1",
"dev": true,
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/postcss/"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"dependencies": {
"lilconfig": "^3.1.1"
},
"engines": {
"node": ">= 18"
},
"peerDependencies": {
"jiti": ">=1.21.0",
"postcss": ">=8.0.9",
"tsx": "^4.8.1",
"yaml": "^2.4.2"
},
"peerDependenciesMeta": {
"jiti": {
"optional": true
},
"postcss": {
"optional": true
},
"tsx": {
"optional": true
},
"yaml": {
"optional": true
}
}
},
"node_modules/postcss-nested": {
"version": "6.2.0",
"dev": true,
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/postcss/"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"license": "MIT",
"dependencies": {
"postcss-selector-parser": "^6.1.1"
},
"engines": {
"node": ">=12.0"
},
"peerDependencies": {
"postcss": "^8.2.14"
}
},
"node_modules/postcss-selector-parser": {
"version": "6.1.2",
"dev": true,
"license": "MIT",
"dependencies": {
"cssesc": "^3.0.0",
"util-deprecate": "^1.0.2"
},
"engines": {
"node": ">=4"
}
},
"node_modules/postcss-value-parser": {
"version": "4.2.0",
"dev": true,
"license": "MIT"
},
"node_modules/queue-microtask": {
"version": "1.2.3",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"license": "MIT"
},
"node_modules/read-cache": {
"version": "1.0.0",
"dev": true,
"license": "MIT",
"dependencies": {
"pify": "^2.3.0"
}
},
"node_modules/readdirp": {
"version": "3.6.0",
"dev": true,
"license": "MIT",
"dependencies": {
"picomatch": "^2.2.1"
},
"engines": {
"node": ">=8.10.0"
}
},
"node_modules/resolve": {
"version": "1.22.11",
"dev": true,
"license": "MIT",
"dependencies": {
"is-core-module": "^2.16.1",
"path-parse": "^1.0.7",
"supports-preserve-symlinks-flag": "^1.0.0"
},
"bin": {
"resolve": "bin/resolve"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/reusify": {
"version": "1.1.0",
"dev": true,
"license": "MIT",
"engines": {
"iojs": ">=1.0.0",
"node": ">=0.10.0"
}
},
"node_modules/run-parallel": {
"version": "1.2.0",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"license": "MIT",
"dependencies": {
"queue-microtask": "^1.2.2"
}
},
"node_modules/source-map-js": {
"version": "1.2.1",
"dev": true,
"license": "BSD-3-Clause",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/sucrase": {
"version": "3.35.1",
"dev": true,
"license": "MIT",
"dependencies": {
"@jridgewell/gen-mapping": "^0.3.2",
"commander": "^4.0.0",
"lines-and-columns": "^1.1.6",
"mz": "^2.7.0",
"pirates": "^4.0.1",
"tinyglobby": "^0.2.11",
"ts-interface-checker": "^0.1.9"
},
"bin": {
"sucrase": "bin/sucrase",
"sucrase-node": "bin/sucrase-node"
},
"engines": {
"node": ">=16 || 14 >=14.17"
}
},
"node_modules/supports-preserve-symlinks-flag": {
"version": "1.0.0",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/tailwindcss": {
"version": "3.4.19",
"dev": true,
"license": "MIT",
"dependencies": {
"@alloc/quick-lru": "^5.2.0",
"arg": "^5.0.2",
"chokidar": "^3.6.0",
"didyoumean": "^1.2.2",
"dlv": "^1.1.3",
"fast-glob": "^3.3.2",
"glob-parent": "^6.0.2",
"is-glob": "^4.0.3",
"jiti": "^1.21.7",
"lilconfig": "^3.1.3",
"micromatch": "^4.0.8",
"normalize-path": "^3.0.0",
"object-hash": "^3.0.0",
"picocolors": "^1.1.1",
"postcss": "^8.4.47",
"postcss-import": "^15.1.0",
"postcss-js": "^4.0.1",
"postcss-load-config": "^4.0.2 || ^5.0 || ^6.0",
"postcss-nested": "^6.2.0",
"postcss-selector-parser": "^6.1.2",
"resolve": "^1.22.8",
"sucrase": "^3.35.0"
},
"bin": {
"tailwind": "lib/cli.js",
"tailwindcss": "lib/cli.js"
},
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/thenify": {
"version": "3.3.1",
"dev": true,
"license": "MIT",
"dependencies": {
"any-promise": "^1.0.0"
}
},
"node_modules/thenify-all": {
"version": "1.6.0",
"dev": true,
"license": "MIT",
"dependencies": {
"thenify": ">= 3.1.0 < 4"
},
"engines": {
"node": ">=0.8"
}
},
"node_modules/tinyglobby": {
"version": "0.2.15",
"dev": true,
"license": "MIT",
"dependencies": {
"fdir": "^6.5.0",
"picomatch": "^4.0.3"
},
"engines": {
"node": ">=12.0.0"
},
"funding": {
"url": "https://github.com/sponsors/SuperchupuDev"
}
},
"node_modules/tinyglobby/node_modules/fdir": {
"version": "6.5.0",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12.0.0"
},
"peerDependencies": {
"picomatch": "^3 || ^4"
},
"peerDependenciesMeta": {
"picomatch": {
"optional": true
}
}
},
"node_modules/tinyglobby/node_modules/picomatch": {
"version": "4.0.3",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/to-regex-range": {
"version": "5.0.1",
"dev": true,
"license": "MIT",
"dependencies": {
"is-number": "^7.0.0"
},
"engines": {
"node": ">=8.0"
}
},
"node_modules/ts-interface-checker": {
"version": "0.1.13",
"dev": true,
"license": "Apache-2.0"
},
"node_modules/util-deprecate": {
"version": "1.0.2",
"dev": true,
"license": "MIT"
}
}
}

17
package.json Normal file
View File

@ -0,0 +1,17 @@
{
"name": "hoopscout-frontend",
"version": "1.0.0",
"private": true,
"description": "Tailwind pipeline for HoopScout Django templates",
"scripts": {
"build:vendor": "mkdir -p ./static/vendor && cp ./node_modules/htmx.org/dist/htmx.min.js ./static/vendor/htmx.min.js",
"build": "npm run build:vendor && tailwindcss -c tailwind.config.js -i ./static/src/tailwind.css -o ./static/css/main.css --minify",
"dev": "npm run build:vendor && tailwindcss -c tailwind.config.js -i ./static/src/tailwind.css -o ./static/css/main.css --watch=always"
},
"dependencies": {
"htmx.org": "^1.9.12"
},
"devDependencies": {
"tailwindcss": "^3.4.17"
}
}

View File

@ -1,3 +1,4 @@
[pytest]
DJANGO_SETTINGS_MODULE = config.settings.development
python_files = tests.py test_*.py *_tests.py
cache_dir = /tmp/.pytest_cache

View File

@ -5,3 +5,4 @@ gunicorn>=22.0,<23.0
celery[redis]>=5.4,<6.0
redis>=5.2,<6.0
python-dotenv>=1.0,<2.0
requests>=2.32,<3.0

View File

@ -0,0 +1,36 @@
#!/usr/bin/env sh
set -eu
ROOT_DIR="$(CDPATH= cd -- "$(dirname -- "$0")/.." && pwd)"
cd "$ROOT_DIR"
MERGED_FILE="$(mktemp)"
trap 'rm -f "$MERGED_FILE"' EXIT
docker compose -f docker-compose.yml -f docker-compose.release.yml config > "$MERGED_FILE"
check_service_bind_mount() {
service_name="$1"
if awk -v service=" ${service_name}:" -v root="$ROOT_DIR" '
BEGIN { in_service = 0 }
$0 == service { in_service = 1; next }
in_service && /^ [a-zA-Z0-9_]+:/ { in_service = 0 }
in_service && /source: / {
if (index($0, root) > 0) {
print $0
exit 1
}
}
' "$MERGED_FILE"; then
printf "OK: %s has no source bind mount from repository path.\n" "$service_name"
else
printf "ERROR: %s still has a source bind mount from repository path in release config.\n" "$service_name" >&2
exit 1
fi
}
check_service_bind_mount "web"
check_service_bind_mount "celery_worker"
check_service_bind_mount "celery_beat"
echo "Release topology verification passed."

File diff suppressed because it is too large Load Diff

94
static/src/tailwind.css Normal file
View File

@ -0,0 +1,94 @@
@tailwind base;
@tailwind components;
@tailwind utilities;
@layer base {
body {
@apply bg-slate-100 text-slate-900 antialiased;
}
h1 {
@apply text-2xl font-semibold tracking-tight text-slate-900;
}
h2 {
@apply text-xl font-semibold tracking-tight text-slate-900;
}
h3 {
@apply text-lg font-semibold text-slate-900;
}
a {
@apply text-brand-700 hover:text-brand-600;
}
label {
@apply mb-1 block text-sm font-medium text-slate-700;
}
input,
select,
textarea {
@apply w-full rounded-md border border-slate-300 bg-white px-3 py-2 text-sm text-slate-900 shadow-sm outline-none ring-brand-600 transition focus:border-brand-600 focus:ring-2;
}
input[type='checkbox'] {
@apply h-4 w-4 rounded border-slate-300 p-0 text-brand-700;
}
summary {
@apply cursor-pointer font-medium text-slate-800;
}
}
@layer components {
.page-container {
@apply mx-auto w-full max-w-6xl px-4 sm:px-6 lg:px-8;
}
.panel {
@apply rounded-xl border border-slate-200 bg-white p-5 shadow-soft;
}
.btn {
@apply inline-flex items-center justify-center rounded-md border border-brand-700 bg-brand-700 px-3 py-2 text-sm font-medium text-white transition hover:bg-brand-600;
}
.btn-secondary {
@apply inline-flex items-center justify-center rounded-md border border-slate-300 bg-white px-3 py-2 text-sm font-medium text-slate-700 transition hover:bg-slate-50;
}
.table-wrap {
@apply overflow-x-auto rounded-lg border border-slate-200;
}
.data-table {
@apply min-w-full divide-y divide-slate-200 text-sm;
}
.data-table thead {
@apply bg-slate-50;
}
.data-table th {
@apply px-3 py-2 text-left text-xs font-semibold uppercase tracking-wide text-slate-600;
}
.data-table td {
@apply whitespace-nowrap px-3 py-2 text-slate-700;
}
.empty-state {
@apply rounded-lg border border-dashed border-slate-300 bg-slate-50 p-6 text-center text-sm text-slate-600;
}
.htmx-indicator {
display: none;
}
.htmx-request .htmx-indicator,
.htmx-request.htmx-indicator {
display: block;
}
}

1
static/vendor/htmx.min.js vendored Normal file

File diff suppressed because one or more lines are too long

25
tailwind.config.js Normal file
View File

@ -0,0 +1,25 @@
/** @type {import('tailwindcss').Config} */
module.exports = {
content: [
'./templates/**/*.html',
'./apps/**/templates/**/*.html',
'./apps/**/*.py'
],
theme: {
extend: {
colors: {
brand: {
50: '#eef6ff',
100: '#d8e8ff',
600: '#1d63dd',
700: '#184fb3',
900: '#142746'
}
},
boxShadow: {
soft: '0 8px 24px -14px rgba(16, 35, 64, 0.35)'
}
}
},
plugins: []
};

View File

@ -1,38 +1,41 @@
{% load static %}
<!doctype html>
<html lang="en">
<html lang="en" class="h-full">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>{% block title %}HoopScout{% endblock %}</title>
<link rel="stylesheet" href="{% static 'css/main.css' %}">
<script src="https://unpkg.com/htmx.org@1.9.12" defer></script>
<script src="{% static 'vendor/htmx.min.js' %}" defer></script>
</head>
<body>
<header class="site-header">
<div class="container row-between">
<a class="brand" href="{% url 'core:home' %}">HoopScout</a>
<nav class="row-gap">
<a href="{% url 'players:index' %}">Players</a>
<a href="{% url 'competitions:index' %}">Competitions</a>
<a href="{% url 'teams:index' %}">Teams</a>
<a href="{% url 'scouting:index' %}">Scouting</a>
<body class="min-h-full bg-slate-100 text-slate-900">
<header class="border-b border-slate-200 bg-white">
<div class="page-container flex flex-wrap items-center justify-between gap-4 py-3">
<a class="text-xl font-bold tracking-tight text-slate-900 no-underline" href="{% url 'core:home' %}">HoopScout</a>
<nav class="flex flex-wrap items-center gap-2 text-sm">
<a class="rounded-md px-2 py-1 hover:bg-slate-100" href="{% url 'players:index' %}">Players</a>
<a class="rounded-md px-2 py-1 hover:bg-slate-100" href="{% url 'competitions:index' %}">Competitions</a>
<a class="rounded-md px-2 py-1 hover:bg-slate-100" href="{% url 'teams:index' %}">Teams</a>
<a class="rounded-md px-2 py-1 hover:bg-slate-100" href="{% url 'scouting:index' %}">Scouting</a>
{% if request.user.is_authenticated %}
<a href="{% url 'core:dashboard' %}">Dashboard</a>
<a class="rounded-md px-2 py-1 hover:bg-slate-100" href="{% url 'core:dashboard' %}">Dashboard</a>
<form method="post" action="{% url 'users:logout' %}">
{% csrf_token %}
<button type="submit" class="link-button">Logout</button>
<button type="submit" class="btn-secondary px-2 py-1 text-xs">Logout</button>
</form>
{% else %}
<a href="{% url 'users:login' %}">Login</a>
<a href="{% url 'users:signup' %}">Signup</a>
<a class="rounded-md px-2 py-1 hover:bg-slate-100" href="{% url 'users:login' %}">Login</a>
<a class="btn px-2 py-1 text-xs" href="{% url 'users:signup' %}">Signup</a>
{% endif %}
</nav>
</div>
</header>
<main class="container">
<main class="page-container py-6">
{% include 'partials/messages.html' %}
<div id="htmx-loading" class="htmx-indicator mb-4 rounded-md border border-slate-200 bg-white px-3 py-2 text-sm text-slate-600" aria-live="polite">
Loading...
</div>
{% block content %}{% endblock %}
</main>
</body>

View File

@ -1,7 +1,13 @@
{% if messages %}
<section class="messages">
<section class="mb-4 space-y-2" aria-live="polite">
{% for message in messages %}
<div class="message {{ message.tags }}">{{ message }}</div>
{% if message.tags == "success" %}
<div role="status" class="rounded-md border border-emerald-200 bg-emerald-50 px-3 py-2 text-sm text-emerald-800">{{ message }}</div>
{% elif message.tags == "error" %}
<div role="alert" class="rounded-md border border-rose-200 bg-rose-50 px-3 py-2 text-sm text-rose-800">{{ message }}</div>
{% else %}
<div role="status" class="rounded-md border border-slate-200 bg-white px-3 py-2 text-sm text-slate-700">{{ message }}</div>
{% endif %}
{% endfor %}
</section>
{% endif %}

View File

@ -4,48 +4,51 @@
{% block content %}
<section class="panel">
<div class="row-between wrap-gap">
<div class="flex flex-wrap items-start justify-between gap-3">
<div>
<h1>{{ player.full_name }}</h1>
<p class="muted-text">
{{ player.nominal_position.name|default:"No nominal position" }}
· {{ player.inferred_role.name|default:"No inferred role" }}
</p>
<p class="mt-1 text-sm text-slate-600">{{ player.nominal_position.name|default:"No nominal position" }} · {{ player.inferred_role.name|default:"No inferred role" }}</p>
</div>
<div class="row-gap">
<div class="flex flex-wrap items-center gap-2">
{% if request.user.is_authenticated %}
{% include "scouting/partials/favorite_button.html" with player=player is_favorite=is_favorite next_url=request.get_full_path %}
{% endif %}
<a class="button ghost" href="{% url 'players:index' %}">Back to search</a>
<a class="btn-secondary" href="{% url 'players:index' %}">Back to search</a>
</div>
</div>
<div class="detail-grid mt-16">
<div class="detail-card">
<h2>Summary</h2>
<p><strong>Nationality:</strong> {{ player.nationality.name|default:"-" }}</p>
<p><strong>Birth date:</strong> {{ player.birth_date|date:"Y-m-d"|default:"-" }}</p>
<p><strong>Age:</strong> {{ age|default:"-" }}</p>
<p><strong>Height:</strong> {{ player.height_cm|default:"-" }} cm</p>
<p><strong>Weight:</strong> {{ player.weight_kg|default:"-" }} kg</p>
<p><strong>Dominant hand:</strong> {{ player.get_dominant_hand_display|default:"-" }}</p>
<div class="mt-4 grid gap-3 md:grid-cols-3">
<div class="rounded-lg border border-slate-200 p-4">
<h2 class="text-base">Summary</h2>
<dl class="mt-2 space-y-1 text-sm">
<div><dt class="inline font-semibold">Nationality:</dt> <dd class="inline">{{ player.nationality.name|default:"-" }}</dd></div>
<div><dt class="inline font-semibold">Origin competition:</dt> <dd class="inline">{{ player.origin_competition.name|default:"-" }}</dd></div>
<div><dt class="inline font-semibold">Origin team:</dt> <dd class="inline">{{ player.origin_team.name|default:"-" }}</dd></div>
<div><dt class="inline font-semibold">Birth date:</dt> <dd class="inline">{{ player.birth_date|date:"Y-m-d"|default:"-" }}</dd></div>
<div><dt class="inline font-semibold">Age:</dt> <dd class="inline">{{ age|default:"-" }}</dd></div>
<div><dt class="inline font-semibold">Height:</dt> <dd class="inline">{{ player.height_cm|default:"-" }} cm</dd></div>
<div><dt class="inline font-semibold">Weight:</dt> <dd class="inline">{{ player.weight_kg|default:"-" }} kg</dd></div>
<div><dt class="inline font-semibold">Dominant hand:</dt> <dd class="inline">{{ player.get_dominant_hand_display|default:"-" }}</dd></div>
</dl>
</div>
<div class="detail-card">
<h2>Current Assignment</h2>
<div class="rounded-lg border border-slate-200 p-4">
<h2 class="text-base">Current Assignment</h2>
{% if current_assignment %}
<p><strong>Team:</strong> {{ current_assignment.team.name|default:"-" }}</p>
<p><strong>Competition:</strong> {{ current_assignment.competition.name|default:"-" }}</p>
<p><strong>Season:</strong> {{ current_assignment.season.label|default:"-" }}</p>
<p><strong>Games:</strong> {{ current_assignment.games_played }}</p>
<dl class="mt-2 space-y-1 text-sm">
<div><dt class="inline font-semibold">Team:</dt> <dd class="inline">{{ current_assignment.team.name|default:"-" }}</dd></div>
<div><dt class="inline font-semibold">Competition:</dt> <dd class="inline">{{ current_assignment.competition.name|default:"-" }}</dd></div>
<div><dt class="inline font-semibold">Season:</dt> <dd class="inline">{{ current_assignment.season.label|default:"-" }}</dd></div>
<div><dt class="inline font-semibold">Games:</dt> <dd class="inline">{{ current_assignment.games_played }}</dd></div>
</dl>
{% else %}
<p>No active assignment available.</p>
<div class="empty-state mt-2">No active assignment available.</div>
{% endif %}
</div>
<div class="detail-card">
<h2>Aliases</h2>
<ul>
<div class="rounded-lg border border-slate-200 p-4">
<h2 class="text-base">Aliases</h2>
<ul class="mt-2 list-inside list-disc text-sm text-slate-700">
{% for alias in player.aliases.all %}
<li>{{ alias.alias }}{% if alias.source %} ({{ alias.source }}){% endif %}</li>
{% empty %}
@ -56,50 +59,33 @@
</div>
</section>
<section class="panel mt-16">
<section class="panel mt-4">
<h2>Team History</h2>
{% if season_rows %}
<div class="table-wrap">
<table>
<thead>
<tr>
<th>Season</th>
<th>Team</th>
<th>Competition</th>
</tr>
</thead>
<tbody>
<div class="table-wrap mt-3">
<table class="data-table">
<thead><tr><th>Season</th><th>Team</th><th>Competition</th></tr></thead>
<tbody class="divide-y divide-slate-100 bg-white">
{% for row in season_rows %}
<tr>
<td>{{ row.season.label|default:"-" }}</td>
<td>{{ row.team.name|default:"-" }}</td>
<td>{{ row.competition.name|default:"-" }}</td>
</tr>
<tr><td>{{ row.season.label|default:"-" }}</td><td>{{ row.team.name|default:"-" }}</td><td>{{ row.competition.name|default:"-" }}</td></tr>
{% endfor %}
</tbody>
</table>
</div>
{% else %}
<p>No team history available.</p>
<div class="empty-state mt-3">No team history available.</div>
{% endif %}
</section>
<section class="panel mt-16">
<section class="panel mt-4">
<h2>Career History</h2>
{% if career_entries %}
<div class="table-wrap">
<table>
<div class="table-wrap mt-3">
<table class="data-table">
<thead>
<tr>
<th>Season</th>
<th>Team</th>
<th>Competition</th>
<th>Role</th>
<th>From</th>
<th>To</th>
</tr>
<tr><th>Season</th><th>Team</th><th>Competition</th><th>Role</th><th>From</th><th>To</th></tr>
</thead>
<tbody>
<tbody class="divide-y divide-slate-100 bg-white">
{% for entry in career_entries %}
<tr>
<td>{{ entry.season.label|default:"-" }}</td>
@ -114,44 +100,28 @@
</table>
</div>
{% else %}
<p>No career entries available.</p>
<div class="empty-state mt-3">No career entries available.</div>
{% endif %}
</section>
<section class="panel mt-16">
<section class="panel mt-4">
<h2>Season-by-Season Stats</h2>
{% if season_rows %}
<div class="table-wrap">
<table>
<div class="table-wrap mt-3">
<table class="data-table">
<thead>
<tr>
<th>Season</th>
<th>Team</th>
<th>Competition</th>
<th>Games</th>
<th>MPG</th>
<th>PPG</th>
<th>RPG</th>
<th>APG</th>
<th>SPG</th>
<th>BPG</th>
<th>TOPG</th>
<th>FG%</th>
<th>3P%</th>
<th>FT%</th>
<th>Impact</th>
<th>Season</th><th>Team</th><th>Competition</th><th>Games</th><th>MPG</th><th>PPG</th><th>RPG</th><th>APG</th><th>SPG</th><th>BPG</th><th>TOPG</th><th>FG%</th><th>3P%</th><th>FT%</th><th>Impact</th>
</tr>
</thead>
<tbody>
<tbody class="divide-y divide-slate-100 bg-white">
{% for row in season_rows %}
<tr>
<td>{{ row.season.label|default:"-" }}</td>
<td>{{ row.team.name|default:"-" }}</td>
<td>{{ row.competition.name|default:"-" }}</td>
<td>{{ row.games_played }}</td>
<td>
{% if row.mpg is not None %}{{ row.mpg|floatformat:1 }}{% else %}-{% endif %}
</td>
<td>{% if row.mpg is not None %}{{ row.mpg|floatformat:1 }}{% else %}-{% endif %}</td>
<td>{% if row.stats %}{{ row.stats.points }}{% else %}-{% endif %}</td>
<td>{% if row.stats %}{{ row.stats.rebounds }}{% else %}-{% endif %}</td>
<td>{% if row.stats %}{{ row.stats.assists }}{% else %}-{% endif %}</td>
@ -168,7 +138,7 @@
</table>
</div>
{% else %}
<p>No season stats available.</p>
<div class="empty-state mt-3">No season stats available.</div>
{% endif %}
</section>
{% endblock %}

View File

@ -5,18 +5,32 @@
{% block content %}
<section class="panel">
<h1>Player Search</h1>
<p>Filter players by profile, context, and production metrics.</p>
<p class="mt-1 text-sm text-slate-600">Filter players by profile, origin, context, and production metrics.</p>
{% if search_has_errors %}
<div class="mt-4 rounded-md border border-rose-200 bg-rose-50 p-3 text-sm text-rose-800">
<p class="font-medium">Please correct the highlighted filters.</p>
{% for field in search_form %}
{% for error in field.errors %}
<p>{{ field.label }}: {{ error }}</p>
{% endfor %}
{% endfor %}
{% for error in search_form.non_field_errors %}
<p>{{ error }}</p>
{% endfor %}
</div>
{% endif %}
<form
method="get"
class="stack search-form"
class="mt-4 space-y-4"
hx-get="{% url 'players:index' %}"
hx-target="#player-results"
hx-swap="innerHTML"
hx-push-url="true"
hx-indicator="#htmx-loading"
hx-trigger="submit, change delay:200ms from:select, keyup changed delay:400ms from:#id_q"
>
<div class="filter-grid filter-grid-4">
<div class="grid gap-3 md:grid-cols-4">
<div>
<label for="id_q">Name</label>
{{ search_form.q }}
@ -29,24 +43,26 @@
<label for="id_page_size">Page size</label>
{{ search_form.page_size }}
</div>
<div class="filter-actions">
<button type="submit" class="button">Apply</button>
<a class="button ghost" href="{% url 'players:index' %}">Reset</a>
<div class="flex items-end gap-2">
<button type="submit" class="btn">Apply</button>
<a class="btn-secondary" href="{% url 'players:index' %}">Reset</a>
</div>
</div>
<div class="filter-grid filter-grid-3">
<div class="grid gap-3 md:grid-cols-3">
<div><label for="id_nominal_position">Nominal position</label>{{ search_form.nominal_position }}</div>
<div><label for="id_inferred_role">Inferred role</label>{{ search_form.inferred_role }}</div>
<div><label for="id_nationality">Nationality</label>{{ search_form.nationality }}</div>
<div><label for="id_competition">Competition</label>{{ search_form.competition }}</div>
<div><label for="id_team">Team</label>{{ search_form.team }}</div>
<div><label for="id_season">Season</label>{{ search_form.season }}</div>
<div><label for="id_origin_competition">Origin competition</label>{{ search_form.origin_competition }}</div>
<div><label for="id_origin_team">Origin team</label>{{ search_form.origin_team }}</div>
</div>
<details>
<details class="rounded-lg border border-slate-200 bg-slate-50 p-3">
<summary>Physical and age filters</summary>
<div class="filter-grid filter-grid-4">
<div class="mt-3 grid gap-3 md:grid-cols-4">
<div><label for="id_age_min">Age min</label>{{ search_form.age_min }}</div>
<div><label for="id_age_max">Age max</label>{{ search_form.age_max }}</div>
<div><label for="id_height_min">Height min (cm)</label>{{ search_form.height_min }}</div>
@ -56,34 +72,29 @@
</div>
</details>
<details>
<details class="rounded-lg border border-slate-200 bg-slate-50 p-3">
<summary>Statistical filters</summary>
<div class="filter-grid filter-grid-4">
<div class="mt-3 grid gap-3 md:grid-cols-4">
<div><label for="id_games_played_min">Games min</label>{{ search_form.games_played_min }}</div>
<div><label for="id_games_played_max">Games max</label>{{ search_form.games_played_max }}</div>
<div><label for="id_minutes_per_game_min">MPG min</label>{{ search_form.minutes_per_game_min }}</div>
<div><label for="id_minutes_per_game_max">MPG max</label>{{ search_form.minutes_per_game_max }}</div>
<div><label for="id_points_per_game_min">PPG min</label>{{ search_form.points_per_game_min }}</div>
<div><label for="id_points_per_game_max">PPG max</label>{{ search_form.points_per_game_max }}</div>
<div><label for="id_rebounds_per_game_min">RPG min</label>{{ search_form.rebounds_per_game_min }}</div>
<div><label for="id_rebounds_per_game_max">RPG max</label>{{ search_form.rebounds_per_game_max }}</div>
<div><label for="id_assists_per_game_min">APG min</label>{{ search_form.assists_per_game_min }}</div>
<div><label for="id_assists_per_game_max">APG max</label>{{ search_form.assists_per_game_max }}</div>
<div><label for="id_steals_per_game_min">SPG min</label>{{ search_form.steals_per_game_min }}</div>
<div><label for="id_steals_per_game_max">SPG max</label>{{ search_form.steals_per_game_max }}</div>
<div><label for="id_blocks_per_game_min">BPG min</label>{{ search_form.blocks_per_game_min }}</div>
<div><label for="id_blocks_per_game_max">BPG max</label>{{ search_form.blocks_per_game_max }}</div>
<div><label for="id_turnovers_per_game_min">TOPG min</label>{{ search_form.turnovers_per_game_min }}</div>
<div><label for="id_turnovers_per_game_max">TOPG max</label>{{ search_form.turnovers_per_game_max }}</div>
<div><label for="id_fg_pct_min">FG% min</label>{{ search_form.fg_pct_min }}</div>
<div><label for="id_fg_pct_max">FG% max</label>{{ search_form.fg_pct_max }}</div>
<div><label for="id_three_pct_min">3P% min</label>{{ search_form.three_pct_min }}</div>
<div><label for="id_three_pct_max">3P% max</label>{{ search_form.three_pct_max }}</div>
<div><label for="id_ft_pct_min">FT% min</label>{{ search_form.ft_pct_min }}</div>
<div><label for="id_ft_pct_max">FT% max</label>{{ search_form.ft_pct_max }}</div>
<div><label for="id_efficiency_metric_min">Impact min</label>{{ search_form.efficiency_metric_min }}</div>
@ -93,7 +104,7 @@
</form>
</section>
<section id="player-results" class="panel mt-16">
<section id="player-results" class="panel mt-4" aria-live="polite">
{% include "players/partials/results.html" %}
</section>
{% endblock %}

View File

@ -1,43 +1,60 @@
{% load player_query %}
<div class="row-between wrap-gap">
<div class="flex flex-wrap items-center justify-between gap-3">
<h2>Results</h2>
<div class="muted-text">
<div class="text-sm text-slate-600">
{{ page_obj.paginator.count }} player{{ page_obj.paginator.count|pluralize }} found
</div>
</div>
<p class="mt-2 text-xs text-slate-600">
{{ search_metric_semantics }}
</p>
{% if search_has_errors %}
<div class="mt-3 rounded-md border border-rose-200 bg-rose-50 p-3 text-sm text-rose-800">
<p class="font-medium">Search filters are invalid.</p>
{% for field in search_form %}
{% for error in field.errors %}
<p>{{ field.label }}: {{ error }}</p>
{% endfor %}
{% endfor %}
{% for error in search_form.non_field_errors %}
<p>{{ error }}</p>
{% endfor %}
</div>
{% endif %}
{% if request.user.is_authenticated %}
{% include "scouting/partials/save_search_form.html" %}
{% endif %}
{% if players %}
<div class="table-wrap">
<table>
<div class="table-wrap mt-4">
<table class="data-table">
<thead>
<tr>
<th>Player</th>
<th>Nationality</th>
<th>Pos / Role</th>
<th>Origin</th>
<th>Height / Weight</th>
<th>Games</th>
<th>MPG</th>
<th>PPG</th>
<th>RPG</th>
<th>APG</th>
<th>Best Eligible Games</th>
<th>Best Eligible MPG</th>
<th>Best Eligible PPG</th>
<th>Best Eligible RPG</th>
<th>Best Eligible APG</th>
{% if request.user.is_authenticated %}<th>Watchlist</th>{% endif %}
</tr>
</thead>
<tbody>
<tbody class="divide-y divide-slate-100 bg-white">
{% for player in players %}
<tr>
<td>
<a href="{% url 'players:detail' player.pk %}">{{ player.full_name }}</a>
</td>
<td><a class="font-medium" href="{% url 'players:detail' player.pk %}">{{ player.full_name }}</a></td>
<td>{{ player.nationality.name|default:"-" }}</td>
<td>{{ player.nominal_position.code|default:"-" }} / {{ player.inferred_role.name|default:"-" }}</td>
<td>
{{ player.nominal_position.code|default:"-" }}
/ {{ player.inferred_role.name|default:"-" }}
{{ player.origin_competition.name|default:"-" }}
{% if player.origin_team %}<div class="text-xs text-slate-500">{{ player.origin_team.name }}</div>{% endif %}
</td>
<td>{{ player.height_cm|default:"-" }} / {{ player.weight_kg|default:"-" }}</td>
<td>{{ player.games_played_value|floatformat:0 }}</td>
@ -60,37 +77,21 @@
</table>
</div>
<div class="pagination row-gap mt-16">
{% if page_obj.has_previous %}
{% query_transform page=page_obj.previous_page_number as prev_query %}
<a
class="button ghost"
href="?{{ prev_query }}"
hx-get="?{{ prev_query }}"
hx-target="#player-results"
hx-swap="innerHTML"
hx-push-url="true"
>
Previous
</a>
{% endif %}
<span>Page {{ page_obj.number }} of {{ page_obj.paginator.num_pages }}</span>
{% if page_obj.has_next %}
{% query_transform page=page_obj.next_page_number as next_query %}
<a
class="button ghost"
href="?{{ next_query }}"
hx-get="?{{ next_query }}"
hx-target="#player-results"
hx-swap="innerHTML"
hx-push-url="true"
>
Next
</a>
{% endif %}
<div class="mt-4 flex items-center justify-between gap-3">
<div>
{% if page_obj.has_previous %}
{% query_transform page=page_obj.previous_page_number as prev_query %}
<a class="btn-secondary" href="?{{ prev_query }}" hx-get="?{{ prev_query }}" hx-target="#player-results" hx-swap="innerHTML" hx-push-url="true" hx-indicator="#htmx-loading">Previous</a>
{% endif %}
</div>
<span class="text-sm text-slate-600">Page {{ page_obj.number }} of {{ page_obj.paginator.num_pages }}</span>
<div>
{% if page_obj.has_next %}
{% query_transform page=page_obj.next_page_number as next_query %}
<a class="btn-secondary" href="?{{ next_query }}" hx-get="?{{ next_query }}" hx-target="#player-results" hx-swap="innerHTML" hx-push-url="true" hx-indicator="#htmx-loading">Next</a>
{% endif %}
</div>
</div>
{% else %}
<p>No players matched the current filters.</p>
<div class="empty-state mt-4">No players matched the current filters.</div>
{% endif %}

View File

@ -4,24 +4,24 @@
{% block content %}
<section class="panel">
<div class="row-between wrap-gap">
<div class="flex flex-wrap items-start justify-between gap-3">
<div>
<h1>Scouting Workspace</h1>
<p class="muted-text">Manage saved searches and your player watchlist.</p>
<p class="mt-1 text-sm text-slate-600">Manage saved searches and your player watchlist.</p>
</div>
<div class="row-gap">
<a class="button ghost" href="{% url 'scouting:saved_search_list' %}">All saved searches</a>
<a class="button ghost" href="{% url 'scouting:watchlist' %}">Watchlist</a>
<div class="flex flex-wrap gap-2">
<a class="btn-secondary" href="{% url 'scouting:saved_search_list' %}">All saved searches</a>
<a class="btn-secondary" href="{% url 'scouting:watchlist' %}">Watchlist</a>
</div>
</div>
</section>
<section class="panel mt-16">
<section class="panel mt-4">
<h2>Saved Searches</h2>
{% include "scouting/partials/saved_search_table.html" with saved_searches=saved_searches %}
</section>
<section class="panel mt-16">
<section class="panel mt-4">
<h2>Watchlist</h2>
{% include "scouting/partials/watchlist_table.html" with favorites=favorites %}
</section>

View File

@ -5,12 +5,13 @@
hx-post="{% url 'scouting:favorite_toggle' player.id %}"
hx-target="#favorite-form-{{ player.id }}"
hx-swap="outerHTML"
hx-indicator="#htmx-loading"
>
{% csrf_token %}
<input type="hidden" name="next" value="{{ next_url }}">
{% if is_favorite %}
<button type="submit" class="button ghost">Remove favorite</button>
<button type="submit" class="btn-secondary">Remove favorite</button>
{% else %}
<button type="submit" class="button ghost">Add favorite</button>
<button type="submit" class="btn-secondary">Add favorite</button>
{% endif %}
</form>

View File

@ -1,3 +1,5 @@
<div class="message {% if ok %}success{% else %}error{% endif %}">
{{ message }}
</div>
{% if ok %}
<div class="rounded-md border border-emerald-200 bg-emerald-50 px-3 py-2 text-sm text-emerald-800" role="status">{{ message }}</div>
{% else %}
<div class="rounded-md border border-rose-200 bg-rose-50 px-3 py-2 text-sm text-rose-800" role="alert">{{ message }}</div>
{% endif %}

View File

@ -1,18 +1,23 @@
<div class="panel mt-16">
<div class="mt-4 rounded-lg border border-slate-200 bg-slate-50 p-4">
<h3>Save Current Search</h3>
<p class="muted-text">Store current filters and replay them later.</p>
<p class="mt-1 text-sm text-slate-600">Store current filters and replay them later.</p>
<form
method="post"
action="{% url 'scouting:saved_search_create' %}"
class="row-gap"
class="mt-3 flex flex-wrap items-end gap-3"
hx-post="{% url 'scouting:saved_search_create' %}"
hx-target="#saved-search-feedback"
hx-swap="innerHTML"
hx-indicator="#htmx-loading"
>
{% csrf_token %}
<input type="text" name="name" placeholder="Search name" required>
<label class="inline-label">
<div class="min-w-56 flex-1">
<label for="saved-search-name">Search name</label>
<input id="saved-search-name" type="text" name="name" placeholder="Search name" required>
</div>
<label class="inline-flex items-center gap-2 pb-2 text-sm text-slate-700">
<input type="checkbox" name="is_public">
Public
</label>
@ -21,7 +26,7 @@
<input type="hidden" name="{{ key }}" value="{{ value }}">
{% endfor %}
<button class="button" type="submit">Save search</button>
<button class="btn" type="submit">Save search</button>
</form>
<div id="saved-search-feedback" class="mt-16"></div>
<div id="saved-search-feedback" class="mt-3" aria-live="polite"></div>
</div>

View File

@ -1,6 +1,6 @@
{% if saved_searches %}
<div class="table-wrap mt-16">
<table>
<div class="table-wrap mt-4">
<table class="data-table">
<thead>
<tr>
<th>Name</th>
@ -10,20 +10,20 @@
<th>Actions</th>
</tr>
</thead>
<tbody>
<tbody class="divide-y divide-slate-100 bg-white">
{% for saved_search in saved_searches %}
<tr>
<td>{{ saved_search.name }}</td>
<td class="font-medium text-slate-800">{{ saved_search.name }}</td>
<td>{% if saved_search.is_public %}Public{% else %}Private{% endif %}</td>
<td>{{ saved_search.updated_at|date:"Y-m-d H:i" }}</td>
<td>{{ saved_search.last_run_at|date:"Y-m-d H:i"|default:"-" }}</td>
<td>
<div class="row-gap">
<a class="button ghost" href="{% url 'scouting:saved_search_run' saved_search.pk %}">Run</a>
<a class="button ghost" href="{% url 'scouting:saved_search_edit' saved_search.pk %}">Edit</a>
<div class="flex flex-wrap gap-2">
<a class="btn-secondary" href="{% url 'scouting:saved_search_run' saved_search.pk %}">Run</a>
<a class="btn-secondary" href="{% url 'scouting:saved_search_edit' saved_search.pk %}">Edit</a>
<form method="post" action="{% url 'scouting:saved_search_delete' saved_search.pk %}">
{% csrf_token %}
<button class="button ghost" type="submit">Delete</button>
<button class="btn-secondary" type="submit">Delete</button>
</form>
</div>
</td>
@ -33,5 +33,5 @@
</table>
</div>
{% else %}
<p class="mt-16">No saved searches yet.</p>
<div class="empty-state mt-4">No saved searches yet.</div>
{% endif %}

View File

@ -1,6 +1,6 @@
{% if favorites %}
<div class="table-wrap mt-16">
<table>
<div class="table-wrap mt-4">
<table class="data-table">
<thead>
<tr>
<th>Player</th>
@ -10,15 +10,12 @@
<th>Action</th>
</tr>
</thead>
<tbody>
<tbody class="divide-y divide-slate-100 bg-white">
{% for favorite in favorites %}
<tr>
<td><a href="{% url 'players:detail' favorite.player_id %}">{{ favorite.player.full_name }}</a></td>
<td><a class="font-medium" href="{% url 'players:detail' favorite.player_id %}">{{ favorite.player.full_name }}</a></td>
<td>{{ favorite.player.nationality.name|default:"-" }}</td>
<td>
{{ favorite.player.nominal_position.code|default:"-" }}
/ {{ favorite.player.inferred_role.name|default:"-" }}
</td>
<td>{{ favorite.player.nominal_position.code|default:"-" }} / {{ favorite.player.inferred_role.name|default:"-" }}</td>
<td>{{ favorite.created_at|date:"Y-m-d" }}</td>
<td>
<div id="favorite-toggle-{{ favorite.player_id }}">
@ -31,5 +28,5 @@
</table>
</div>
{% else %}
<p class="mt-16">No players in your watchlist yet.</p>
<div class="empty-state mt-4">No players in your watchlist yet.</div>
{% endif %}

View File

@ -3,14 +3,14 @@
{% block title %}HoopScout | Edit Saved Search{% endblock %}
{% block content %}
<section class="panel narrow">
<section class="panel mx-auto max-w-lg">
<h1>Edit Saved Search</h1>
<form method="post" class="stack">
<form method="post" class="mt-4 space-y-4">
{% csrf_token %}
{{ form.as_p }}
<div class="row-gap">
<button type="submit" class="button">Update</button>
<a class="button ghost" href="{% url 'scouting:index' %}">Cancel</a>
<div class="flex flex-wrap gap-2">
<button type="submit" class="btn">Update</button>
<a class="btn-secondary" href="{% url 'scouting:index' %}">Cancel</a>
</div>
</form>
</section>

View File

@ -4,9 +4,9 @@
{% block content %}
<section class="panel">
<div class="row-between wrap-gap">
<div class="flex flex-wrap items-center justify-between gap-3">
<h1>Saved Searches</h1>
<a class="button ghost" href="{% url 'scouting:index' %}">Back to scouting</a>
<a class="btn-secondary" href="{% url 'scouting:index' %}">Back to scouting</a>
</div>
{% include "scouting/partials/saved_search_table.html" with saved_searches=saved_searches %}
</section>

View File

@ -4,9 +4,9 @@
{% block content %}
<section class="panel">
<div class="row-between wrap-gap">
<div class="flex flex-wrap items-center justify-between gap-3">
<h1>Watchlist</h1>
<a class="button ghost" href="{% url 'scouting:index' %}">Back to scouting</a>
<a class="btn-secondary" href="{% url 'scouting:index' %}">Back to scouting</a>
</div>
{% include "scouting/partials/watchlist_table.html" with favorites=favorites %}
</section>

View File

@ -3,12 +3,12 @@
{% block title %}HoopScout | Login{% endblock %}
{% block content %}
<section class="panel narrow">
<section class="panel mx-auto max-w-lg">
<h1>Login</h1>
<form method="post" class="stack">
<form method="post" class="mt-4 space-y-4">
{% csrf_token %}
{{ form.as_p }}
<button type="submit" class="button">Sign in</button>
<button type="submit" class="btn">Sign in</button>
</form>
</section>
{% endblock %}

View File

@ -3,12 +3,12 @@
{% block title %}HoopScout | Signup{% endblock %}
{% block content %}
<section class="panel narrow">
<section class="panel mx-auto max-w-lg">
<h1>Create account</h1>
<form method="post" class="stack">
<form method="post" class="mt-4 space-y-4">
{% csrf_token %}
{{ form.as_p }}
<button type="submit" class="button">Create account</button>
<button type="submit" class="btn">Create account</button>
</form>
</section>
{% endblock %}

View File

@ -5,6 +5,7 @@ from django.urls import reverse
from apps.competitions.models import Competition, Season
from apps.players.models import Nationality, Player, Position, Role
from apps.stats.models import PlayerSeason, PlayerSeasonStats
from apps.teams.models import Team
@ -57,3 +58,217 @@ def test_lookup_list_endpoints(client):
def test_api_is_read_only(client):
response = client.post(reverse("api:players"), data={"q": "x"})
assert response.status_code == 403
@pytest.mark.django_db
def test_players_api_search_consistent_with_ui_filters(client):
nationality = Nationality.objects.create(name="Portugal", iso2_code="PT", iso3_code="PRT")
position = Position.objects.create(code="SF", name="Small Forward")
role = Role.objects.create(code="wing", name="Wing")
competition = Competition.objects.create(
name="Liga Betclic",
slug="liga-betclic",
competition_type=Competition.CompetitionType.LEAGUE,
gender=Competition.Gender.MEN,
country=nationality,
)
team = Team.objects.create(name="Porto Hoops", slug="porto-hoops", country=nationality)
season = Season.objects.create(label="2025-2026", start_date=date(2025, 9, 1), end_date=date(2026, 6, 30))
matching = Player.objects.create(
first_name="Tiago",
last_name="Silva",
full_name="Tiago Silva",
birth_date=date(2001, 3, 1),
nationality=nationality,
nominal_position=position,
inferred_role=role,
origin_competition=competition,
origin_team=team,
)
ps = PlayerSeason.objects.create(
player=matching,
season=season,
team=team,
competition=competition,
games_played=10,
minutes_played=320,
)
PlayerSeasonStats.objects.create(
player_season=ps,
points=16.5,
rebounds=5,
assists=3,
steals=1,
blocks=0.5,
turnovers=2,
)
Player.objects.create(
first_name="Pedro",
last_name="Costa",
full_name="Pedro Costa",
birth_date=date(2001, 4, 2),
nationality=nationality,
)
params = {
"origin_competition": competition.id,
"nominal_position": position.id,
"points_per_game_min": "10",
"sort": "ppg_desc",
}
ui_response = client.get(reverse("players:index"), data=params)
api_response = client.get(reverse("api:players"), data=params)
assert ui_response.status_code == 200
assert api_response.status_code == 200
assert list(ui_response.context["players"])[0].id == matching.id
assert api_response.json()["count"] == 1
assert api_response.json()["results"][0]["id"] == matching.id
@pytest.mark.django_db
def test_players_api_metric_sort_uses_best_eligible_values(client):
nationality = Nationality.objects.create(name="Romania", iso2_code="RO", iso3_code="ROU")
competition = Competition.objects.create(
name="LNBM",
slug="lnbm",
competition_type=Competition.CompetitionType.LEAGUE,
gender=Competition.Gender.MEN,
country=nationality,
)
season_old = Season.objects.create(label="2022-2023", start_date=date(2022, 9, 1), end_date=date(2023, 6, 30))
season_new = Season.objects.create(label="2023-2024", start_date=date(2023, 9, 1), end_date=date(2024, 6, 30))
team = Team.objects.create(name="Bucharest", slug="bucharest", country=nationality)
p1 = Player.objects.create(first_name="Ion", last_name="Low", full_name="Ion Low", nationality=nationality)
p1s = PlayerSeason.objects.create(
player=p1,
season=season_old,
team=team,
competition=competition,
games_played=20,
minutes_played=400,
)
PlayerSeasonStats.objects.create(player_season=p1s, points=13, rebounds=3, assists=2, steals=1, blocks=0.1, turnovers=2)
p2 = Player.objects.create(first_name="Dan", last_name="High", full_name="Dan High", nationality=nationality)
p2s_old = PlayerSeason.objects.create(
player=p2,
season=season_old,
team=team,
competition=competition,
games_played=20,
minutes_played=400,
)
PlayerSeasonStats.objects.create(player_season=p2s_old, points=9, rebounds=3, assists=2, steals=1, blocks=0.1, turnovers=2)
p2s_new = PlayerSeason.objects.create(
player=p2,
season=season_new,
team=team,
competition=competition,
games_played=20,
minutes_played=500,
)
PlayerSeasonStats.objects.create(player_season=p2s_new, points=22, rebounds=5, assists=4, steals=1.3, blocks=0.2, turnovers=2.3)
response = client.get(reverse("api:players"), data={"sort": "ppg_desc"})
assert response.status_code == 200
names = [row["full_name"] for row in response.json()["results"]]
assert names.index("Dan High") < names.index("Ion Low")
@pytest.mark.django_db
def test_player_detail_api_includes_origin_fields(client):
nationality = Nationality.objects.create(name="Greece", iso2_code="GR", iso3_code="GRC")
competition = Competition.objects.create(
name="HEBA A1",
slug="heba-a1",
competition_type=Competition.CompetitionType.LEAGUE,
gender=Competition.Gender.MEN,
country=nationality,
)
team = Team.objects.create(name="Athens BC", slug="athens-bc", country=nationality)
player = Player.objects.create(
first_name="Alex",
last_name="Dimitriou",
full_name="Alex Dimitriou",
birth_date=date(2000, 2, 2),
nationality=nationality,
origin_competition=competition,
origin_team=team,
)
response = client.get(reverse("api:player_detail", kwargs={"pk": player.pk}))
assert response.status_code == 200
payload = response.json()
assert payload["origin_competition"] == competition.name
assert payload["origin_team"] == team.name
@pytest.mark.django_db
def test_api_combined_filters_respect_same_player_season_context(client):
nationality = Nationality.objects.create(name="Poland", iso2_code="PL", iso3_code="POL")
competition = Competition.objects.create(
name="PLK",
slug="plk",
competition_type=Competition.CompetitionType.LEAGUE,
gender=Competition.Gender.MEN,
country=nationality,
)
season = Season.objects.create(label="2024-2025", start_date=date(2024, 9, 1), end_date=date(2025, 6, 30))
team_a = Team.objects.create(name="Warsaw", slug="warsaw", country=nationality)
team_b = Team.objects.create(name="Gdansk", slug="gdansk", country=nationality)
player = Player.objects.create(first_name="Piotr", last_name="Filter", full_name="Piotr Filter", nationality=nationality)
ps_a = PlayerSeason.objects.create(
player=player,
season=season,
team=team_a,
competition=competition,
games_played=10,
minutes_played=200,
)
PlayerSeasonStats.objects.create(player_season=ps_a, points=7, rebounds=2, assists=3, steals=1, blocks=0, turnovers=1)
ps_b = PlayerSeason.objects.create(
player=player,
season=season,
team=team_b,
competition=competition,
games_played=10,
minutes_played=300,
)
PlayerSeasonStats.objects.create(player_season=ps_b, points=21, rebounds=4, assists=5, steals=1, blocks=0, turnovers=2)
response = client.get(
reverse("api:players"),
data={"team": team_a.id, "season": season.id, "competition": competition.id, "points_per_game_min": "20"},
)
assert response.status_code == 200
assert response.json()["count"] == 0
@pytest.mark.django_db
def test_players_api_returns_400_for_invalid_numeric_filter(client):
response = client.get(reverse("api:players"), data={"points_per_game_min": "abc"})
assert response.status_code == 400
payload = response.json()
assert payload["detail"] == "Invalid search parameters."
assert "points_per_game_min" in payload["errors"]
@pytest.mark.django_db
def test_players_api_returns_400_for_invalid_choice_filter(client):
response = client.get(reverse("api:players"), data={"sort": "not-a-sort"})
assert response.status_code == 400
payload = response.json()
assert "sort" in payload["errors"]
@pytest.mark.django_db
def test_players_api_returns_400_for_invalid_range_combination(client):
response = client.get(reverse("api:players"), data={"age_min": 30, "age_max": 20})
assert response.status_code == 400
payload = response.json()
assert "age_max" in payload["errors"]

View File

@ -0,0 +1,38 @@
import os
import subprocess
import sys
import pytest
def _run_python_import(code: str, env_overrides: dict[str, str]) -> subprocess.CompletedProcess:
env = os.environ.copy()
env.update(env_overrides)
return subprocess.run(
[sys.executable, "-c", code],
capture_output=True,
text=True,
env=env,
check=False,
)
@pytest.mark.django_db
def test_invalid_cron_does_not_crash_config_import_path():
result = _run_python_import(
(
"import config; "
"from config.celery import app; "
"print(f'beat_schedule_size={len(app.conf.beat_schedule or {})}')"
),
{
"DJANGO_SETTINGS_MODULE": "config.settings.development",
"DJANGO_ENV": "development",
"DJANGO_DEBUG": "1",
"INGESTION_SCHEDULE_ENABLED": "1",
"INGESTION_SCHEDULE_CRON": "bad cron value",
},
)
assert result.returncode == 0
assert "beat_schedule_size=0" in result.stdout

View File

@ -5,8 +5,9 @@ import pytest
from apps.competitions.models import Competition, Season
from apps.ingestion.models import IngestionError, IngestionRun
from apps.ingestion.services.sync import run_sync_job
from apps.players.models import Player
from apps.players.models import Nationality, Player
from apps.providers.exceptions import ProviderRateLimitError
from apps.providers.models import ExternalMapping
from apps.stats.models import PlayerSeason, PlayerSeasonStats
from apps.teams.models import Team
@ -24,6 +25,16 @@ def test_run_full_sync_creates_domain_objects(settings):
assert Player.objects.count() >= 1
assert PlayerSeason.objects.count() >= 1
assert PlayerSeasonStats.objects.count() >= 1
assert Player.objects.filter(origin_competition__isnull=False).exists()
assert run.context.get("completed_steps") == [
"competitions",
"teams",
"seasons",
"players",
"player_stats",
"player_careers",
]
assert run.context.get("source_counts", {}).get("players", 0) >= 1
@pytest.mark.django_db
@ -65,6 +76,10 @@ def test_incremental_sync_runs_successfully(settings):
assert run.status == IngestionRun.RunStatus.SUCCESS
assert run.records_processed > 0
assert run.started_at is not None
assert run.finished_at is not None
assert run.finished_at >= run.started_at
assert run.error_summary == ""
@pytest.mark.django_db
@ -78,6 +93,159 @@ def test_run_sync_handles_rate_limit(settings):
run = IngestionRun.objects.order_by("-id").first()
assert run is not None
assert run.status == IngestionRun.RunStatus.FAILED
assert run.started_at is not None
assert run.finished_at is not None
assert "Rate limit" in run.error_summary
assert IngestionError.objects.filter(ingestion_run=run).exists()
os.environ.pop("PROVIDER_MVP_FORCE_RATE_LIMIT", None)
@pytest.mark.django_db
def test_balldontlie_sync_idempotency_with_stable_payload(monkeypatch):
class StableProvider:
def sync_all(self):
return {
"competitions": [
{
"external_id": "competition-nba",
"name": "NBA",
"slug": "nba",
"competition_type": "league",
"gender": "men",
"level": 1,
"country": None,
"is_active": True,
}
],
"teams": [
{
"external_id": "team-14",
"name": "Los Angeles Lakers",
"short_name": "LAL",
"slug": "los-angeles-lakers",
"country": None,
"is_national_team": False,
}
],
"seasons": [
{
"external_id": "season-2024",
"label": "2024-2025",
"start_date": "2024-10-01",
"end_date": "2025-06-30",
"is_current": False,
}
],
"players": [
{
"external_id": "player-237",
"first_name": "LeBron",
"last_name": "James",
"full_name": "LeBron James",
"birth_date": None,
"nationality": None,
"nominal_position": {"code": "SF", "name": "Small Forward"},
"inferred_role": {"code": "wing", "name": "Wing"},
"height_cm": None,
"weight_kg": None,
"dominant_hand": "unknown",
"is_active": True,
"aliases": [],
}
],
"player_stats": [
{
"external_id": "ps-2024-237-14",
"player_external_id": "player-237",
"team_external_id": "team-14",
"competition_external_id": "competition-nba",
"season_external_id": "season-2024",
"games_played": 2,
"games_started": 0,
"minutes_played": 68,
"points": 25,
"rebounds": 9,
"assists": 8,
"steals": 1.5,
"blocks": 0.5,
"turnovers": 3.5,
"fg_pct": 55.0,
"three_pct": 45.0,
"ft_pct": 95.0,
"usage_rate": None,
"true_shooting_pct": None,
"player_efficiency_rating": None,
}
],
"player_careers": [
{
"external_id": "career-2024-237-14",
"player_external_id": "player-237",
"team_external_id": "team-14",
"competition_external_id": "competition-nba",
"season_external_id": "season-2024",
"role_code": "",
"shirt_number": None,
"start_date": "2024-10-01",
"end_date": "2025-06-30",
"notes": "Imported from balldontlie aggregated box scores",
}
],
}
def sync_incremental(self, *, cursor: str | None = None):
payload = self.sync_all()
payload["cursor"] = cursor
return payload
monkeypatch.setattr("apps.ingestion.services.sync.get_provider", lambda namespace: StableProvider())
run_sync_job(provider_namespace="balldontlie", job_type=IngestionRun.JobType.FULL_SYNC)
lebron = Player.objects.get(full_name="LeBron James")
assert lebron.nationality is None
assert not Nationality.objects.filter(iso2_code="ZZ").exists()
counts_first = {
"competition": Competition.objects.count(),
"team": Team.objects.count(),
"season": Season.objects.count(),
"player": Player.objects.count(),
"player_season": PlayerSeason.objects.count(),
"player_stats": PlayerSeasonStats.objects.count(),
"mapping": ExternalMapping.objects.filter(provider_namespace="balldontlie").count(),
}
run_sync_job(provider_namespace="balldontlie", job_type=IngestionRun.JobType.FULL_SYNC)
counts_second = {
"competition": Competition.objects.count(),
"team": Team.objects.count(),
"season": Season.objects.count(),
"player": Player.objects.count(),
"player_season": PlayerSeason.objects.count(),
"player_stats": PlayerSeasonStats.objects.count(),
"mapping": ExternalMapping.objects.filter(provider_namespace="balldontlie").count(),
}
assert counts_first == counts_second
@pytest.mark.django_db
def test_batch_transactions_preserve_prior_step_progress_on_failure(settings, monkeypatch):
settings.PROVIDER_DEFAULT_NAMESPACE = "mvp_demo"
def boom(*args, **kwargs):
raise RuntimeError("teams-sync-failed")
monkeypatch.setattr("apps.ingestion.services.sync._sync_teams", boom)
with pytest.raises(RuntimeError):
run_sync_job(provider_namespace="mvp_demo", job_type=IngestionRun.JobType.FULL_SYNC)
run = IngestionRun.objects.order_by("-id").first()
assert run is not None
assert run.status == IngestionRun.RunStatus.FAILED
assert Competition.objects.exists()
assert Team.objects.count() == 0
assert run.context.get("completed_steps") == ["competitions"]
assert "Unhandled ingestion error" in run.error_summary

View File

@ -0,0 +1,112 @@
import pytest
from contextlib import contextmanager
from celery.schedules import crontab
import psycopg
from django.conf import settings
from apps.ingestion.models import IngestionRun
from apps.ingestion.services.runs import _build_ingestion_lock_key, release_ingestion_lock, try_acquire_ingestion_lock
from apps.ingestion.tasks import scheduled_provider_sync, trigger_incremental_sync
from config.celery import app as celery_app, build_periodic_schedule
@pytest.mark.django_db
def test_periodic_task_registered():
assert "apps.ingestion.tasks.scheduled_provider_sync" in celery_app.tasks
@pytest.mark.django_db
def test_build_periodic_schedule_enabled(settings):
settings.INGESTION_SCHEDULE_ENABLED = True
settings.INGESTION_SCHEDULE_CRON = "15 * * * *"
schedule = build_periodic_schedule()
assert "ingestion.scheduled_provider_sync" in schedule
entry = schedule["ingestion.scheduled_provider_sync"]
assert entry["task"] == "apps.ingestion.tasks.scheduled_provider_sync"
assert isinstance(entry["schedule"], crontab)
assert entry["schedule"]._orig_minute == "15"
@pytest.mark.django_db
def test_build_periodic_schedule_disabled(settings):
settings.INGESTION_SCHEDULE_ENABLED = False
assert build_periodic_schedule() == {}
@pytest.mark.django_db
def test_build_periodic_schedule_invalid_cron_disables_task_and_logs(settings, caplog):
settings.INGESTION_SCHEDULE_ENABLED = True
settings.INGESTION_SCHEDULE_CRON = "invalid-cron"
with caplog.at_level("ERROR"):
schedule = build_periodic_schedule()
assert schedule == {}
assert any("Invalid periodic ingestion schedule config. Task disabled." in message for message in caplog.messages)
@pytest.mark.django_db
def test_trigger_incremental_sync_skips_when_advisory_lock_not_acquired(settings, monkeypatch):
settings.INGESTION_PREVENT_OVERLAP = True
@contextmanager
def fake_lock(**kwargs):
yield False
monkeypatch.setattr("apps.ingestion.tasks.ingestion_advisory_lock", fake_lock)
run_id = trigger_incremental_sync.apply(
kwargs={"provider_namespace": "mvp_demo"},
).get()
skipped_run = IngestionRun.objects.get(id=run_id)
assert skipped_run.status == IngestionRun.RunStatus.CANCELED
assert "advisory lock" in skipped_run.error_summary
@pytest.mark.django_db
def test_advisory_lock_prevents_concurrent_acquisition():
provider_namespace = "mvp_demo"
job_type = IngestionRun.JobType.INCREMENTAL
lock_key = _build_ingestion_lock_key(provider_namespace=provider_namespace, job_type=job_type)
conninfo = (
f"dbname={settings.DATABASES['default']['NAME']} "
f"user={settings.DATABASES['default']['USER']} "
f"password={settings.DATABASES['default']['PASSWORD']} "
f"host={settings.DATABASES['default']['HOST']} "
f"port={settings.DATABASES['default']['PORT']}"
)
with psycopg.connect(conninfo) as external_conn:
with external_conn.cursor() as cursor:
cursor.execute("SELECT pg_advisory_lock(%s);", [lock_key])
acquired, _ = try_acquire_ingestion_lock(
provider_namespace=provider_namespace,
job_type=job_type,
)
assert acquired is False
cursor.execute("SELECT pg_advisory_unlock(%s);", [lock_key])
acquired, django_key = try_acquire_ingestion_lock(
provider_namespace=provider_namespace,
job_type=job_type,
)
assert acquired is True
release_ingestion_lock(lock_key=django_key)
@pytest.mark.django_db
def test_scheduled_provider_sync_uses_configured_job_type(settings, monkeypatch):
settings.INGESTION_SCHEDULE_JOB_TYPE = IngestionRun.JobType.FULL_SYNC
settings.INGESTION_SCHEDULE_PROVIDER_NAMESPACE = "mvp_demo"
captured = {}
def fake_runner(**kwargs):
captured.update(kwargs)
return 99
monkeypatch.setattr("apps.ingestion.tasks._run_sync_with_overlap_guard", fake_runner)
result = scheduled_provider_sync.apply().get()
assert result == 99
assert captured["provider_namespace"] == "mvp_demo"
assert captured["job_type"] == IngestionRun.JobType.FULL_SYNC

View File

@ -4,6 +4,8 @@ import pytest
from django.contrib.auth.models import User
from django.urls import reverse
from apps.ingestion.models import IngestionRun
from apps.ingestion.services.sync import run_sync_job
from apps.players.models import Nationality, Player, Position, Role
from apps.scouting.models import SavedSearch
@ -47,3 +49,25 @@ def test_saved_search_run_filters_player_results(client):
assert response.status_code == 200
assert "Marco Rossi" in response.content.decode()
assert "Luca Bianchi" not in response.content.decode()
@pytest.mark.django_db
def test_ingestion_output_is_searchable_in_ui_and_api(settings, client):
settings.PROVIDER_DEFAULT_NAMESPACE = "mvp_demo"
run = run_sync_job(provider_namespace="mvp_demo", job_type=IngestionRun.JobType.FULL_SYNC)
assert run.status == IngestionRun.RunStatus.SUCCESS
player = Player.objects.filter(origin_competition__isnull=False).order_by("id").first()
assert player is not None
assert player.origin_competition_id is not None
params = {"origin_competition": player.origin_competition_id}
ui_response = client.get(reverse("players:index"), data=params)
api_response = client.get(reverse("api:players"), data=params)
assert ui_response.status_code == 200
assert api_response.status_code == 200
ui_ids = {item.id for item in ui_response.context["players"]}
api_ids = {item["id"] for item in api_response.json()["results"]}
assert player.id in ui_ids
assert player.id in api_ids

190
tests/test_player_origin.py Normal file
View File

@ -0,0 +1,190 @@
from datetime import date
import pytest
from django.urls import reverse
from apps.competitions.models import Competition, Season
from apps.players.models import Nationality, Player, PlayerCareerEntry, Position, Role
from apps.players.services.origin import refresh_player_origin, refresh_player_origins
from apps.teams.models import Team
@pytest.mark.django_db
def test_origin_derivation_uses_earliest_meaningful_career_entry():
nationality = Nationality.objects.create(name="Italy", iso2_code="IT", iso3_code="ITA")
position = Position.objects.create(code="PG", name="Point Guard")
role = Role.objects.create(code="playmaker", name="Playmaker")
player = Player.objects.create(
first_name="Marco",
last_name="Rossi",
full_name="Marco Rossi",
birth_date=date(2000, 1, 1),
nationality=nationality,
nominal_position=position,
inferred_role=role,
)
comp_early = Competition.objects.create(
name="Lega 2",
slug="lega-2",
competition_type=Competition.CompetitionType.LEAGUE,
gender=Competition.Gender.MEN,
)
comp_late = Competition.objects.create(
name="Lega 1",
slug="lega-1",
competition_type=Competition.CompetitionType.LEAGUE,
gender=Competition.Gender.MEN,
)
team_early = Team.objects.create(name="Bologna B", slug="bologna-b", country=nationality)
team_late = Team.objects.create(name="Bologna A", slug="bologna-a", country=nationality)
season_early = Season.objects.create(label="2017-2018", start_date=date(2017, 9, 1), end_date=date(2018, 6, 30))
season_late = Season.objects.create(label="2019-2020", start_date=date(2019, 9, 1), end_date=date(2020, 6, 30))
PlayerCareerEntry.objects.create(
player=player,
team=team_late,
competition=comp_late,
season=season_late,
start_date=date(2019, 9, 15),
)
PlayerCareerEntry.objects.create(
player=player,
team=team_early,
competition=comp_early,
season=season_early,
start_date=date(2017, 9, 15),
)
changed = refresh_player_origin(player)
assert changed is True
player.refresh_from_db()
assert player.origin_competition == comp_early
assert player.origin_team == team_early
@pytest.mark.django_db
def test_origin_unknown_when_no_meaningful_career_entries():
nationality = Nationality.objects.create(name="Spain", iso2_code="ES", iso3_code="ESP")
position = Position.objects.create(code="SF", name="Small Forward")
role = Role.objects.create(code="wing", name="Wing")
player = Player.objects.create(
first_name="Juan",
last_name="Perez",
full_name="Juan Perez",
birth_date=date(2001, 5, 1),
nationality=nationality,
nominal_position=position,
inferred_role=role,
)
changed = refresh_player_origin(player)
assert changed is False
player.refresh_from_db()
assert player.origin_competition is None
assert player.origin_team is None
@pytest.mark.django_db
def test_player_search_filters_by_origin_competition(client):
nationality = Nationality.objects.create(name="France", iso2_code="FR", iso3_code="FRA")
position = Position.objects.create(code="SG", name="Shooting Guard")
role = Role.objects.create(code="scorer", name="Scorer")
origin_a = Competition.objects.create(
name="LNB Pro A",
slug="lnb-pro-a-origin",
competition_type=Competition.CompetitionType.LEAGUE,
gender=Competition.Gender.MEN,
)
origin_b = Competition.objects.create(
name="LNB Pro B",
slug="lnb-pro-b-origin",
competition_type=Competition.CompetitionType.LEAGUE,
gender=Competition.Gender.MEN,
)
p1 = Player.objects.create(
first_name="A",
last_name="One",
full_name="A One",
birth_date=date(2000, 1, 1),
nationality=nationality,
nominal_position=position,
inferred_role=role,
origin_competition=origin_a,
)
Player.objects.create(
first_name="B",
last_name="Two",
full_name="B Two",
birth_date=date(2000, 1, 1),
nationality=nationality,
nominal_position=position,
inferred_role=role,
origin_competition=origin_b,
)
response = client.get(reverse("players:index"), data={"origin_competition": origin_a.id})
assert response.status_code == 200
players = list(response.context["players"])
assert len(players) == 1
assert players[0].id == p1.id
@pytest.mark.django_db
def test_backfill_refresh_player_origins_updates_existing_players():
nationality = Nationality.objects.create(name="Germany", iso2_code="DE", iso3_code="DEU")
position = Position.objects.create(code="PF", name="Power Forward")
role = Role.objects.create(code="big", name="Big")
competition = Competition.objects.create(
name="BBL",
slug="bbl-origin",
competition_type=Competition.CompetitionType.LEAGUE,
gender=Competition.Gender.MEN,
)
team = Team.objects.create(name="Berlin", slug="berlin-origin", country=nationality)
season = Season.objects.create(label="2018-2019", start_date=date(2018, 9, 1), end_date=date(2019, 6, 30))
p1 = Player.objects.create(
first_name="F1",
last_name="L1",
full_name="Player One",
birth_date=date(1999, 1, 1),
nationality=nationality,
nominal_position=position,
inferred_role=role,
)
p2 = Player.objects.create(
first_name="F2",
last_name="L2",
full_name="Player Two",
birth_date=date(1998, 1, 1),
nationality=nationality,
nominal_position=position,
inferred_role=role,
)
PlayerCareerEntry.objects.create(
player=p1,
team=team,
competition=competition,
season=season,
start_date=date(2018, 9, 10),
)
updated = refresh_player_origins(Player.objects.filter(id__in=[p1.id, p2.id]))
assert updated == 1
p1.refresh_from_db()
p2.refresh_from_db()
assert p1.origin_competition == competition
assert p1.origin_team == team
assert p2.origin_competition is None
assert p2.origin_team is None

View File

@ -1,10 +1,12 @@
from datetime import date
import pytest
from django.contrib.auth.models import User
from django.urls import reverse
from apps.competitions.models import Competition, Season
from apps.players.models import Nationality, Player, Position, Role
from apps.scouting.models import FavoritePlayer
from apps.stats.models import PlayerSeason, PlayerSeasonStats
from apps.teams.models import Team
@ -81,3 +83,456 @@ def test_player_search_pagination_preserves_querystring(client):
assert response.status_code == 200
assert response.context["page_obj"].number == 2
@pytest.mark.django_db
def test_player_search_combined_filters_sorting_and_pagination(client):
nationality = Nationality.objects.create(name="Serbia", iso2_code="RS", iso3_code="SRB")
position = Position.objects.create(code="SG", name="Shooting Guard")
role = Role.objects.create(code="scorer", name="Scorer")
season = Season.objects.create(label="2024-2025", start_date=date(2024, 9, 1), end_date=date(2025, 6, 30))
competition = Competition.objects.create(
name="ABA League",
slug="aba-league",
competition_type=Competition.CompetitionType.LEAGUE,
gender=Competition.Gender.MEN,
country=nationality,
)
team = Team.objects.create(name="Belgrade BC", slug="belgrade-bc", country=nationality)
players = []
for idx, ppg in enumerate(range(40, 19, -1), start=1):
player = Player.objects.create(
first_name=f"S{idx}",
last_name=f"Guard{idx}",
full_name=f"Serbian Guard {idx}",
birth_date=date(2001, 1, idx),
nationality=nationality,
nominal_position=position,
inferred_role=role,
origin_competition=competition,
origin_team=team,
)
player_season = PlayerSeason.objects.create(
player=player,
season=season,
team=team,
competition=competition,
games_played=20,
minutes_played=600,
)
PlayerSeasonStats.objects.create(
player_season=player_season,
points=ppg,
rebounds=4.0,
assists=3.0,
steals=1.0,
blocks=0.3,
turnovers=2.0,
)
players.append(player)
response = client.get(
reverse("players:index"),
data={
"origin_competition": competition.id,
"nominal_position": position.id,
"sort": "ppg_desc",
"page_size": 20,
"page": 1,
},
)
assert response.status_code == 200
page_items = list(response.context["players"])
assert len(page_items) == 20
assert page_items[0].full_name == players[0].full_name
assert response.context["page_obj"].has_next()
page2 = client.get(
reverse("players:index"),
data={
"origin_competition": competition.id,
"nominal_position": position.id,
"sort": "ppg_desc",
"page_size": 20,
"page": 2,
},
)
assert page2.status_code == 200
page2_items = list(page2.context["players"])
assert [item.full_name for item in page2_items] == [players[20].full_name]
@pytest.mark.django_db
def test_player_search_results_include_favorite_ids(client):
user = User.objects.create_user(username="fav-check", password="pass12345")
client.force_login(user)
nationality = Nationality.objects.create(name="Croatia", iso2_code="HR", iso3_code="HRV")
position = Position.objects.create(code="PG", name="Point Guard")
role = Role.objects.create(code="playmaker", name="Playmaker")
player = Player.objects.create(
first_name="Niko",
last_name="Play",
full_name="Niko Play",
birth_date=date(2002, 5, 5),
nationality=nationality,
nominal_position=position,
inferred_role=role,
)
FavoritePlayer.objects.create(user=user, player=player)
response = client.get(reverse("players:index"))
assert response.status_code == 200
assert player.id in response.context["favorite_player_ids"]
@pytest.mark.django_db
def test_combined_reverse_join_filters_do_not_match_across_different_player_seasons(client):
nationality = Nationality.objects.create(name="Lithuania", iso2_code="LT", iso3_code="LTU")
position = Position.objects.create(code="SG", name="Shooting Guard")
role = Role.objects.create(code="scorer", name="Scorer")
competition = Competition.objects.create(
name="LKL",
slug="lkl",
competition_type=Competition.CompetitionType.LEAGUE,
gender=Competition.Gender.MEN,
country=nationality,
)
season = Season.objects.create(label="2025-2026", start_date=date(2025, 9, 1), end_date=date(2026, 6, 30))
target_team = Team.objects.create(name="Kaunas", slug="kaunas", country=nationality)
other_team = Team.objects.create(name="Vilnius", slug="vilnius", country=nationality)
player = Player.objects.create(
first_name="Jonas",
last_name="Scope",
full_name="Jonas Scope",
birth_date=date(2001, 1, 1),
nationality=nationality,
nominal_position=position,
inferred_role=role,
)
# Matching team/season row but low scoring.
ps_target = PlayerSeason.objects.create(
player=player,
season=season,
team=target_team,
competition=competition,
games_played=20,
minutes_played=400,
)
PlayerSeasonStats.objects.create(
player_season=ps_target,
points=8.0,
rebounds=3.0,
assists=2.0,
steals=1.0,
blocks=0.2,
turnovers=1.5,
)
# High-scoring row but different team; should not satisfy combined filter.
ps_other = PlayerSeason.objects.create(
player=player,
season=season,
team=other_team,
competition=competition,
games_played=20,
minutes_played=400,
)
PlayerSeasonStats.objects.create(
player_season=ps_other,
points=22.0,
rebounds=4.0,
assists=3.0,
steals=1.2,
blocks=0.3,
turnovers=2.0,
)
response = client.get(
reverse("players:index"),
data={
"team": target_team.id,
"season": season.id,
"competition": competition.id,
"points_per_game_min": "20",
},
)
assert response.status_code == 200
assert list(response.context["players"]) == []
@pytest.mark.django_db
def test_displayed_metrics_are_scoped_to_filtered_context(client):
nationality = Nationality.objects.create(name="Turkey", iso2_code="TR", iso3_code="TUR")
position = Position.objects.create(code="PG", name="Point Guard")
role = Role.objects.create(code="playmaker", name="Playmaker")
competition = Competition.objects.create(
name="BSL",
slug="bsl",
competition_type=Competition.CompetitionType.LEAGUE,
gender=Competition.Gender.MEN,
country=nationality,
)
season = Season.objects.create(label="2025-2026", start_date=date(2025, 9, 1), end_date=date(2026, 6, 30))
target_team = Team.objects.create(name="Ankara", slug="ankara", country=nationality)
other_team = Team.objects.create(name="Izmir", slug="izmir", country=nationality)
player = Player.objects.create(
first_name="Can",
last_name="Context",
full_name="Can Context",
birth_date=date(2000, 2, 2),
nationality=nationality,
nominal_position=position,
inferred_role=role,
)
ps_target = PlayerSeason.objects.create(
player=player,
season=season,
team=target_team,
competition=competition,
games_played=10,
minutes_played=250,
)
PlayerSeasonStats.objects.create(
player_season=ps_target,
points=9.0,
rebounds=2.0,
assists=4.0,
steals=1.0,
blocks=0.1,
turnovers=2.0,
)
ps_other = PlayerSeason.objects.create(
player=player,
season=season,
team=other_team,
competition=competition,
games_played=12,
minutes_played=420,
)
PlayerSeasonStats.objects.create(
player_season=ps_other,
points=24.0,
rebounds=5.0,
assists=7.0,
steals=1.5,
blocks=0.2,
turnovers=3.0,
)
response = client.get(reverse("players:index"), data={"team": target_team.id, "season": season.id})
assert response.status_code == 200
row = list(response.context["players"])[0]
assert float(row.ppg_value) == pytest.approx(9.0)
assert float(row.mpg_value) == pytest.approx(25.0)
@pytest.mark.django_db
def test_displayed_metrics_are_scoped_to_team_season_competition_context(client):
nationality = Nationality.objects.create(name="Czechia", iso2_code="CZ", iso3_code="CZE")
position = Position.objects.create(code="SF", name="Small Forward")
role = Role.objects.create(code="wing", name="Wing")
competition_a = Competition.objects.create(
name="NBL",
slug="nbl-cz",
competition_type=Competition.CompetitionType.LEAGUE,
gender=Competition.Gender.MEN,
country=nationality,
)
competition_b = Competition.objects.create(
name="CZ Cup",
slug="cz-cup",
competition_type=Competition.CompetitionType.CUP,
gender=Competition.Gender.MEN,
country=nationality,
)
season = Season.objects.create(label="2024-2025", start_date=date(2024, 9, 1), end_date=date(2025, 6, 30))
team = Team.objects.create(name="Prague", slug="prague", country=nationality)
player = Player.objects.create(
first_name="Adam",
last_name="Scope",
full_name="Adam Scope",
birth_date=date(2002, 2, 2),
nationality=nationality,
nominal_position=position,
inferred_role=role,
)
ps_a = PlayerSeason.objects.create(
player=player,
season=season,
team=team,
competition=competition_a,
games_played=12,
minutes_played=300,
)
PlayerSeasonStats.objects.create(player_season=ps_a, points=11, rebounds=4, assists=3, steals=1, blocks=0.2, turnovers=1.5)
ps_b = PlayerSeason.objects.create(
player=player,
season=season,
team=team,
competition=competition_b,
games_played=6,
minutes_played=210,
)
PlayerSeasonStats.objects.create(player_season=ps_b, points=24, rebounds=6, assists=5, steals=1.5, blocks=0.3, turnovers=2.2)
response = client.get(
reverse("players:index"),
data={"team": team.id, "season": season.id, "competition": competition_a.id},
)
assert response.status_code == 200
row = list(response.context["players"])[0]
assert float(row.ppg_value) == pytest.approx(11.0)
assert float(row.mpg_value) == pytest.approx(25.0)
@pytest.mark.django_db
def test_displayed_metrics_without_season_filter_use_best_eligible_values(client):
nationality = Nationality.objects.create(name="Slovenia", iso2_code="SI", iso3_code="SVN")
position = Position.objects.create(code="PG", name="Point Guard")
role = Role.objects.create(code="playmaker", name="Playmaker")
competition = Competition.objects.create(
name="SBL",
slug="sbl",
competition_type=Competition.CompetitionType.LEAGUE,
gender=Competition.Gender.MEN,
country=nationality,
)
season_a = Season.objects.create(label="2023-2024", start_date=date(2023, 9, 1), end_date=date(2024, 6, 30))
season_b = Season.objects.create(label="2024-2025", start_date=date(2024, 9, 1), end_date=date(2025, 6, 30))
team = Team.objects.create(name="Ljubljana", slug="ljubljana", country=nationality)
player = Player.objects.create(
first_name="Luka",
last_name="Semantics",
full_name="Luka Semantics",
birth_date=date(2001, 3, 3),
nationality=nationality,
nominal_position=position,
inferred_role=role,
)
# High games, lower per-game production.
ps_a = PlayerSeason.objects.create(
player=player,
season=season_a,
team=team,
competition=competition,
games_played=34,
minutes_played=680, # 20 MPG
)
PlayerSeasonStats.objects.create(
player_season=ps_a,
points=10.0,
rebounds=3.0,
assists=4.0,
steals=1.0,
blocks=0.1,
turnovers=2.0,
)
# Lower games, higher per-game production.
ps_b = PlayerSeason.objects.create(
player=player,
season=season_b,
team=team,
competition=competition,
games_played=18,
minutes_played=630, # 35 MPG
)
PlayerSeasonStats.objects.create(
player_season=ps_b,
points=25.0,
rebounds=6.0,
assists=8.0,
steals=1.5,
blocks=0.2,
turnovers=3.0,
)
response = client.get(reverse("players:index"))
assert response.status_code == 200
row = next(item for item in response.context["players"] if item.id == player.id)
assert float(row.games_played_value) == pytest.approx(34.0)
assert float(row.mpg_value) == pytest.approx(35.0)
assert float(row.ppg_value) == pytest.approx(25.0)
@pytest.mark.django_db
def test_ppg_sort_uses_best_eligible_metrics_without_season_filter(client):
nationality = Nationality.objects.create(name="Latvia", iso2_code="LV", iso3_code="LVA")
position = Position.objects.create(code="SG", name="Shooting Guard")
role = Role.objects.create(code="scorer", name="Scorer")
competition = Competition.objects.create(
name="LBL",
slug="lbl",
competition_type=Competition.CompetitionType.LEAGUE,
gender=Competition.Gender.MEN,
country=nationality,
)
season_old = Season.objects.create(label="2022-2023", start_date=date(2022, 9, 1), end_date=date(2023, 6, 30))
season_new = Season.objects.create(label="2023-2024", start_date=date(2023, 9, 1), end_date=date(2024, 6, 30))
team = Team.objects.create(name="Riga", slug="riga", country=nationality)
player_a = Player.objects.create(
first_name="A",
last_name="Sorter",
full_name="A Sorter",
birth_date=date(2000, 1, 1),
nationality=nationality,
nominal_position=position,
inferred_role=role,
)
ps_a = PlayerSeason.objects.create(
player=player_a,
season=season_old,
team=team,
competition=competition,
games_played=20,
minutes_played=400,
)
PlayerSeasonStats.objects.create(player_season=ps_a, points=14, rebounds=4, assists=3, steals=1, blocks=0.1, turnovers=2)
player_b = Player.objects.create(
first_name="B",
last_name="Sorter",
full_name="B Sorter",
birth_date=date(2000, 1, 1),
nationality=nationality,
nominal_position=position,
inferred_role=role,
)
# Lower old season.
ps_b_old = PlayerSeason.objects.create(
player=player_b,
season=season_old,
team=team,
competition=competition,
games_played=20,
minutes_played=400,
)
PlayerSeasonStats.objects.create(player_season=ps_b_old, points=12, rebounds=3, assists=2, steals=1, blocks=0.1, turnovers=2)
# Better newer season; should drive best-eligible ppg sort.
ps_b_new = PlayerSeason.objects.create(
player=player_b,
season=season_new,
team=team,
competition=competition,
games_played=20,
minutes_played=420,
)
PlayerSeasonStats.objects.create(player_season=ps_b_new, points=21, rebounds=5, assists=4, steals=1.2, blocks=0.2, turnovers=2.5)
response = client.get(reverse("players:index"), data={"sort": "ppg_desc"})
assert response.status_code == 200
ordered = [row.full_name for row in response.context["players"] if row.full_name in {"A Sorter", "B Sorter"}]
assert ordered.index("B Sorter") < ordered.index("A Sorter")

View File

@ -135,3 +135,110 @@ def test_player_detail_page_loads(client):
body = response.content.decode()
assert "Paul Martin" in body
assert "P. Martin" in body
@pytest.mark.django_db
def test_player_search_invalid_numeric_filter_shows_errors_and_no_broad_fallback(client):
nationality = Nationality.objects.create(name="Belgium", iso2_code="BE", iso3_code="BEL")
position = Position.objects.create(code="PG", name="Point Guard")
role = Role.objects.create(code="playmaker", name="Playmaker")
Player.objects.create(
first_name="Any",
last_name="Player",
full_name="Any Player",
birth_date=date(2000, 1, 1),
nationality=nationality,
nominal_position=position,
inferred_role=role,
)
response = client.get(reverse("players:index"), data={"points_per_game_min": "abc", "q": "Any"})
assert response.status_code == 200
assert list(response.context["players"]) == []
assert response.context["search_has_errors"] is True
assert "points per game min" in response.content.decode().lower()
assert response.context["search_form"]["q"].value() == "Any"
@pytest.mark.django_db
def test_player_search_invalid_choice_filter_shows_errors(client):
response = client.get(reverse("players:index"), data={"sort": "bad-sort"})
assert response.status_code == 200
assert list(response.context["players"]) == []
assert response.context["search_has_errors"] is True
assert "select a valid choice" in response.content.decode().lower()
@pytest.mark.django_db
def test_player_search_invalid_range_combination_shows_errors(client):
response = client.get(reverse("players:index"), data={"age_min": 30, "age_max": 20})
assert response.status_code == 200
assert list(response.context["players"]) == []
assert response.context["search_has_errors"] is True
body = response.content.decode().lower()
assert "age max" in body
assert "must be &gt;=" in body or "must be >=" in body
@pytest.mark.django_db
def test_player_search_htmx_invalid_filters_return_validation_feedback(client):
response = client.get(
reverse("players:index"),
HTTP_HX_REQUEST="true",
data={"points_per_game_min": "abc"},
)
assert response.status_code == 200
body = response.content.decode().lower()
assert "search filters are invalid" in body
assert "points per game min" in body
@pytest.mark.django_db
def test_player_search_results_render_best_eligible_metric_labels(client):
nationality = Nationality.objects.create(name="Ireland", iso2_code="IE", iso3_code="IRL")
position = Position.objects.create(code="PG", name="Point Guard")
role = Role.objects.create(code="playmaker", name="Playmaker")
season = Season.objects.create(label="2025-2026", start_date=date(2025, 9, 1), end_date=date(2026, 6, 30))
competition = Competition.objects.create(
name="Super League",
slug="super-league",
competition_type=Competition.CompetitionType.LEAGUE,
gender=Competition.Gender.MEN,
country=nationality,
)
team = Team.objects.create(name="Dublin", slug="dublin", country=nationality)
player = Player.objects.create(
first_name="Sean",
last_name="Label",
full_name="Sean Label",
birth_date=date(2001, 1, 1),
nationality=nationality,
nominal_position=position,
inferred_role=role,
)
season_row = PlayerSeason.objects.create(
player=player,
season=season,
team=team,
competition=competition,
games_played=10,
minutes_played=250,
)
PlayerSeasonStats.objects.create(
player_season=season_row,
points=12,
rebounds=4,
assists=5,
steals=1,
blocks=0.1,
turnovers=2,
)
response = client.get(reverse("players:index"))
assert response.status_code == 200
body = response.content.decode()
assert "Best Eligible PPG" in body
assert "Best Eligible MPG" in body
assert "best eligible values per player" in body.lower()

View File

@ -41,3 +41,37 @@ def test_provider_registry_resolution(settings):
with pytest.raises(ProviderNotFoundError):
get_provider("does-not-exist")
@pytest.mark.django_db
def test_demo_provider_sync_payload_uses_normalized_shape():
adapter = MvpDemoProviderAdapter()
payload = adapter.sync_all()
assert set(payload.keys()) == {
"players",
"competitions",
"teams",
"seasons",
"player_stats",
"player_careers",
"cursor",
}
assert payload["cursor"] is None
player = payload["players"][0]
assert set(player.keys()) == {
"external_id",
"first_name",
"last_name",
"full_name",
"birth_date",
"nationality",
"nominal_position",
"inferred_role",
"height_cm",
"weight_kg",
"dominant_hand",
"is_active",
"aliases",
}

View File

@ -0,0 +1,263 @@
from __future__ import annotations
import time
from typing import Any
import pytest
import requests
from apps.providers.adapters.balldontlie_provider import BalldontlieProviderAdapter
from apps.providers.adapters.mvp_provider import MvpDemoProviderAdapter
from apps.providers.clients.balldontlie import BalldontlieClient
from apps.providers.exceptions import ProviderRateLimitError, ProviderTransientError, ProviderUnauthorizedError
from apps.providers.registry import get_default_provider_namespace, get_provider
from apps.providers.services.balldontlie_mappings import map_seasons
class _FakeResponse:
def __init__(self, *, status_code: int, payload: dict[str, Any] | None = None, headers: dict[str, str] | None = None, text: str = ""):
self.status_code = status_code
self._payload = payload or {}
self.headers = headers or {}
self.text = text
def json(self):
return self._payload
class _FakeSession:
def __init__(self, responses: list[Any]):
self._responses = responses
self.calls: list[dict[str, Any]] = []
def get(self, *args, **kwargs):
self.calls.append(kwargs)
item = self._responses.pop(0)
if isinstance(item, Exception):
raise item
return item
class _FakeBalldontlieClient:
def get_json(self, path: str, *, params: dict[str, Any] | None = None) -> dict[str, Any]:
if path == "/nba/v1/teams":
return {
"data": [
{
"id": 14,
"full_name": "Los Angeles Lakers",
"abbreviation": "LAL",
}
]
}
return {"data": []}
def list_paginated(
self,
path: str,
*,
params: dict[str, Any] | None = None,
per_page: int = 100,
page_limit: int = 1,
) -> list[dict[str, Any]]:
if path == "/nba/v1/players":
return [
{
"id": 237,
"first_name": "LeBron",
"last_name": "James",
"position": "F",
"team": {"id": 14},
}
]
if path == "/nba/v1/stats":
return [
{
"pts": 20,
"reb": 8,
"ast": 7,
"stl": 1,
"blk": 1,
"turnover": 3,
"fg_pct": 0.5,
"fg3_pct": 0.4,
"ft_pct": 0.9,
"min": "35:12",
"player": {"id": 237},
"team": {"id": 14},
"game": {"season": 2024},
},
{
"pts": 30,
"reb": 10,
"ast": 9,
"stl": 2,
"blk": 0,
"turnover": 4,
"fg_pct": 0.6,
"fg3_pct": 0.5,
"ft_pct": 1.0,
"min": "33:00",
"player": {"id": 237},
"team": {"id": 14},
"game": {"season": 2024},
},
]
return []
@pytest.mark.django_db
def test_provider_registry_backend_selection(settings):
settings.PROVIDER_DEFAULT_NAMESPACE = ""
settings.PROVIDER_BACKEND = "demo"
assert get_default_provider_namespace() == "mvp_demo"
assert isinstance(get_provider(), MvpDemoProviderAdapter)
settings.PROVIDER_BACKEND = "balldontlie"
assert get_default_provider_namespace() == "balldontlie"
assert isinstance(get_provider(), BalldontlieProviderAdapter)
settings.PROVIDER_DEFAULT_NAMESPACE = "mvp_demo"
assert get_default_provider_namespace() == "mvp_demo"
@pytest.mark.django_db
def test_balldontlie_adapter_maps_payloads(settings):
settings.PROVIDER_BALLDONTLIE_SEASONS = [2024]
adapter = BalldontlieProviderAdapter(client=_FakeBalldontlieClient())
payload = adapter.sync_all()
assert payload["competitions"][0]["external_id"] == "competition-nba"
assert payload["teams"][0]["external_id"] == "team-14"
assert payload["players"][0]["external_id"] == "player-237"
assert payload["seasons"][0]["external_id"] == "season-2024"
assert payload["player_stats"][0]["games_played"] == 2
assert payload["player_stats"][0]["points"] == 25.0
assert payload["player_stats"][0]["fg_pct"] == 55.0
player = payload["players"][0]
assert player["nationality"] is None
assert "current_team_external_id" not in player
expected_keys = {
"external_id",
"first_name",
"last_name",
"full_name",
"birth_date",
"nationality",
"nominal_position",
"inferred_role",
"height_cm",
"weight_kg",
"dominant_hand",
"is_active",
"aliases",
}
assert set(player.keys()) == expected_keys
@pytest.mark.django_db
def test_balldontlie_map_seasons_marks_latest_as_current():
seasons = map_seasons([2022, 2024, 2023, 2024])
current_rows = [row for row in seasons if row["is_current"]]
assert len(current_rows) == 1
assert current_rows[0]["external_id"] == "season-2024"
assert [row["external_id"] for row in seasons] == ["season-2022", "season-2023", "season-2024"]
@pytest.mark.django_db
def test_balldontlie_adapter_degrades_when_stats_unauthorized(settings):
class _UnauthorizedStatsClient(_FakeBalldontlieClient):
def list_paginated(self, path: str, *, params=None, per_page=100, page_limit=1):
if path == "/nba/v1/stats":
raise ProviderUnauthorizedError(
provider="balldontlie",
path="stats",
status_code=401,
detail="Unauthorized",
)
return super().list_paginated(path, params=params, per_page=per_page, page_limit=page_limit)
settings.PROVIDER_BALLDONTLIE_SEASONS = [2024]
settings.PROVIDER_BALLDONTLIE_STATS_STRICT = False
adapter = BalldontlieProviderAdapter(client=_UnauthorizedStatsClient())
payload = adapter.sync_all()
assert payload["players"]
assert payload["teams"]
assert payload["player_stats"] == []
assert payload["player_careers"] == []
@pytest.mark.django_db
def test_balldontlie_client_retries_after_rate_limit(monkeypatch, settings):
monkeypatch.setattr(time, "sleep", lambda _: None)
settings.PROVIDER_REQUEST_RETRIES = 2
settings.PROVIDER_REQUEST_RETRY_SLEEP = 0
session = _FakeSession(
responses=[
_FakeResponse(status_code=429, headers={"Retry-After": "0"}),
_FakeResponse(status_code=200, payload={"data": []}),
]
)
client = BalldontlieClient(session=session)
payload = client.get_json("players")
assert payload == {"data": []}
@pytest.mark.django_db
def test_balldontlie_client_timeout_retries_then_fails(monkeypatch, settings):
monkeypatch.setattr(time, "sleep", lambda _: None)
settings.PROVIDER_REQUEST_RETRIES = 2
settings.PROVIDER_REQUEST_RETRY_SLEEP = 0
session = _FakeSession(responses=[requests.Timeout("slow"), requests.Timeout("slow")])
client = BalldontlieClient(session=session)
with pytest.raises(ProviderTransientError):
client.get_json("players")
@pytest.mark.django_db
def test_balldontlie_client_raises_rate_limit_after_max_retries(monkeypatch, settings):
monkeypatch.setattr(time, "sleep", lambda _: None)
settings.PROVIDER_REQUEST_RETRIES = 2
settings.PROVIDER_REQUEST_RETRY_SLEEP = 0
session = _FakeSession(
responses=[
_FakeResponse(status_code=429, headers={"Retry-After": "1"}),
_FakeResponse(status_code=429, headers={"Retry-After": "1"}),
]
)
client = BalldontlieClient(session=session)
with pytest.raises(ProviderRateLimitError):
client.get_json("players")
@pytest.mark.django_db
def test_balldontlie_client_cursor_pagination(settings):
session = _FakeSession(
responses=[
_FakeResponse(
status_code=200,
payload={"data": [{"id": 1}], "meta": {"next_cursor": 101}},
),
_FakeResponse(
status_code=200,
payload={"data": [{"id": 2}], "meta": {"next_cursor": None}},
),
]
)
client = BalldontlieClient(session=session)
rows = client.list_paginated("players", per_page=1, page_limit=5)
assert rows == [{"id": 1}, {"id": 2}]
assert "page" not in session.calls[0]["params"]
assert "cursor" not in session.calls[0]["params"]
assert session.calls[1]["params"]["cursor"] == 101

View File

@ -0,0 +1,68 @@
import os
import subprocess
import sys
import pytest
def _import_settings_module(module: str, env_overrides: dict[str, str]) -> subprocess.CompletedProcess:
env = os.environ.copy()
env.update(env_overrides)
command = [
sys.executable,
"-c",
(
"import importlib; "
f"importlib.import_module('{module}'); "
"print('import-ok')"
),
]
return subprocess.run(command, capture_output=True, text=True, env=env, check=False)
@pytest.mark.django_db
def test_production_settings_reject_default_secret_key():
result = _import_settings_module(
"config.settings.production",
{
"DJANGO_ENV": "production",
"DJANGO_DEBUG": "0",
"DJANGO_SECRET_KEY": "change-me-in-production",
"DJANGO_ALLOWED_HOSTS": "app.example.com",
"DJANGO_CSRF_TRUSTED_ORIGINS": "https://app.example.com",
},
)
assert result.returncode != 0
assert "DJANGO_SECRET_KEY is unsafe" in (result.stderr + result.stdout)
@pytest.mark.django_db
def test_production_settings_reject_localhost_csrf_origins():
result = _import_settings_module(
"config.settings.production",
{
"DJANGO_ENV": "production",
"DJANGO_DEBUG": "0",
"DJANGO_SECRET_KEY": "A-very-strong-secret-key-for-production-environment-12345",
"DJANGO_ALLOWED_HOSTS": "app.example.com",
"DJANGO_CSRF_TRUSTED_ORIGINS": "http://localhost,https://app.example.com",
},
)
assert result.returncode != 0
assert "DJANGO_CSRF_TRUSTED_ORIGINS contains unsafe values" in (result.stderr + result.stdout)
@pytest.mark.django_db
def test_development_settings_allow_local_defaults():
result = _import_settings_module(
"config.settings.development",
{
"DJANGO_ENV": "development",
"DJANGO_DEBUG": "1",
"DJANGO_SECRET_KEY": "insecure-development-secret",
"DJANGO_ALLOWED_HOSTS": "localhost,127.0.0.1",
"DJANGO_CSRF_TRUSTED_ORIGINS": "http://localhost,http://127.0.0.1",
},
)
assert result.returncode == 0
assert "import-ok" in result.stdout