SAMI Functionality add
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
Ekaropolus 2025-09-16 16:18:45 -06:00
parent 8a5189b926
commit 0eb2b393f2
95 changed files with 4077 additions and 173 deletions

View File

@ -1,40 +1,34 @@
# Etapa base: Python oficial
FROM python:3.10-slim as base
# Dockerfile (prod)
FROM python:3.10-slim
# Variables de entorno para producción
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
PIP_NO_CACHE_DIR=1
# Instala dependencias del sistema (incluye lo necesario para mysqlclient)
# System deps needed by geopandas/shapely/pyproj, mysqlclient, etc.
RUN apt-get update && apt-get install -y --no-install-recommends \
build-essential \
curl \
libgeos-dev \
libspatialindex-dev \
libproj-dev \
proj-data \
proj-bin \
gdal-bin \
libgdal-dev \
python3-dev \
pkg-config \
libproj-dev proj-data proj-bin \
gdal-bin libgdal-dev \
python3-dev pkg-config \
default-libmysqlclient-dev \
&& rm -rf /var/lib/apt/lists/*
# Crea directorio de trabajo
WORKDIR /app
# Copia requirements primero (mejor cacheo)
# Install Python deps first (layer cache friendly)
COPY requirements.txt .
RUN python -m pip install --upgrade pip \
&& pip install --no-cache-dir -r requirements.txt
# Instala dependencias Python
RUN pip install --no-cache-dir -r requirements.txt
# Copia el resto del proyecto
# Copy project
COPY . .
# Expone el puerto del contenedor
EXPOSE 8000
# Comando por defecto para producción con Gunicorn
CMD ["gunicorn", "polisplexity.wsgi:application", "--bind", "0.0.0.0:8000", "--workers=3", "--timeout=120"]
# Expose prod port (compose overrides CMD/port, but this documents intent)
EXPOSE 8002
# Default CMD (compose will override with your shell that migrates, collectstatic, and runs gunicorn:8002)
CMD ["gunicorn", "polisplexity.wsgi:application", "--bind", "0.0.0.0:8002", "--workers=3", "--timeout=180"]

6
data/denue/CDMX_cafe.csv Normal file
View File

@ -0,0 +1,6 @@
name,lat,lon,category
Cafe Centro,19.4335,-99.1342,cafe
Cafe Alameda,19.4350,-99.1410,cafe
Cafe Madero,19.4321,-99.1358,cafe
Cafe Zocalo,19.4329,-99.1320,cafe
Cafe Bellas Artes,19.4365,-99.1415,cafe
1 name lat lon category
2 Cafe Centro 19.4335 -99.1342 cafe
3 Cafe Alameda 19.4350 -99.1410 cafe
4 Cafe Madero 19.4321 -99.1358 cafe
5 Cafe Zocalo 19.4329 -99.1320 cafe
6 Cafe Bellas Artes 19.4365 -99.1415 cafe

View File

@ -0,0 +1,9 @@
cell_id,lat,lon,pop
ZC_01,19.4334,-99.1322,1200
ZC_02,19.4318,-99.1339,950
ZC_03,19.4347,-99.1351,800
ZC_04,19.4309,-99.1311,700
ZC_05,19.4360,-99.1405,1100
ZC_06,19.4298,-99.1368,600
ZC_07,19.4382,-99.1450,900
ZC_08,19.4355,-99.1289,750
1 cell_id lat lon pop
2 ZC_01 19.4334 -99.1322 1200
3 ZC_02 19.4318 -99.1339 950
4 ZC_03 19.4347 -99.1351 800
5 ZC_04 19.4309 -99.1311 700
6 ZC_05 19.4360 -99.1405 1100
7 ZC_06 19.4298 -99.1368 600
8 ZC_07 19.4382 -99.1450 900
9 ZC_08 19.4355 -99.1289 750

View File

@ -0,0 +1,4 @@
city,value
CDMX,100
GDL,55
MTY,60
1 city value
2 CDMX 100
3 GDL 55
4 MTY 60

4
data/sami/population.csv Normal file
View File

@ -0,0 +1,4 @@
city,N
CDMX,9209944
GDL,5269191
MTY,5341174
1 city N
2 CDMX 9209944
3 GDL 5269191
4 MTY 5341174

View File

@ -36,6 +36,7 @@ services:
- static_data:/app/static
- media_data:/app/media
- ./staticfiles:/app/staticfiles
- ./data:/app/polisplexity/data:ro
# - .:/app # ←❌ No lo uses en producción: desactiva para evitar sobrescribir
volumes:

BIN
main.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 162 KiB

View File

@ -52,6 +52,14 @@ INSTALLED_APPS = [
"pxy_dashboard.layouts",
"pxy_building_digital_twins",
"pxy_messenger",
'pxy_contracts',
'pxy_sami',
'pxy_routing',
'pxy_sites',
"rest_framework",
"pxy_api",
# Third-party apps
"crispy_forms",
@ -139,7 +147,8 @@ STATICFILES_DIRS = [
]
MEDIA_URL = "/media/"
MEDIA_ROOT = os.path.join(BASE_DIR, "mediafiles")
MEDIA_ROOT = BASE_DIR / "media"
# Default primary key field type
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
@ -182,3 +191,27 @@ MESSENGER_VERIFY_TOKEN = os.getenv("MESSENGER_VERIFY_TOKEN", "dev-change-me")
FACEBOOK_APP_SECRET = os.getenv("FACEBOOK_APP_SECRET", "") # set this in .env for prod
REST_FRAMEWORK = {
# Deshabilitamos auth por ahora para evitar CSRF en curl
"DEFAULT_AUTHENTICATION_CLASSES": [],
# Throttling global + por-scope
"DEFAULT_THROTTLE_CLASSES": [
"rest_framework.throttling.AnonRateThrottle",
"rest_framework.throttling.UserRateThrottle",
"rest_framework.throttling.ScopedRateThrottle",
],
"DEFAULT_THROTTLE_RATES": {
"anon": "100/hour",
"user": "1000/hour",
"sami_run": "30/minute",
"sites_search": "15/minute",
"routing_isochrone": "60/minute",
"routing_health": "120/minute",
"sami_health": "120/minute",
"sites_health": "120/minute",
},
# Manejo de errores uniforme
"EXCEPTION_HANDLER": "pxy_api.exceptions.envelope_exception_handler",
}

View File

@ -38,8 +38,20 @@ urlpatterns = [
namespace="pxy_building_digital_twins"),
),
path("messenger/", include("pxy_messenger.urls")),
path("", include("pxy_sami.api.urls")),
path("", include("pxy_routing.api.urls")),
path("", include("pxy_sites.api.urls")),
path("", include("pxy_de.urls")),
path("share/", include("pxy_dashboard.share_urls")), # ← NEW
path("api/", include("pxy_bots.api.urls")),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)

BIN
preview.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 110 KiB

1
pxy_api/__init__.py Normal file
View File

@ -0,0 +1 @@
default_app_config = "pxy_api.apps.PxyApiConfig"

5
pxy_api/apps.py Normal file
View File

@ -0,0 +1,5 @@
from django.apps import AppConfig
class PxyApiConfig(AppConfig):
default_auto_field = "django.db.models.BigAutoField"
name = "pxy_api"

57
pxy_api/exceptions.py Normal file
View File

@ -0,0 +1,57 @@
from __future__ import annotations
import uuid, traceback
from django.conf import settings
from rest_framework.views import exception_handler as drf_exception_handler
from rest_framework.response import Response
from rest_framework import status
from rest_framework.exceptions import ValidationError as DRFValidationError
from pydantic import ValidationError as PydValidationError
def envelope_exception_handler(exc, context):
"""
Envuelve *todas* las excepciones DRF en:
{ ok: false, code, message, errors?, hint?, trace_id, detail?(DEBUG) }
"""
resp = drf_exception_handler(exc, context)
trace_id = str(uuid.uuid4())
if resp is not None:
# DRF ya resolvió un status_code razonable
code = getattr(exc, "default_code", "error")
message = None
if isinstance(exc, DRFValidationError):
message = "Validation error"
else:
# fallback a string corto
message = str(getattr(exc, "detail", "")) or exc.__class__.__name__
data = {
"ok": False,
"code": code,
"message": message,
"errors": resp.data, # DRF normaliza los errores aquí
"hint": None,
"trace_id": trace_id,
}
if settings.DEBUG:
data["detail"] = _short_trace()
return Response(data, status=resp.status_code)
# Excepción no manejada por DRF -> 500
data = {
"ok": False,
"code": "server_error",
"message": "Unexpected server error",
"hint": None,
"trace_id": trace_id,
}
if settings.DEBUG:
data["detail"] = _short_trace()
return Response(data, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def _short_trace():
try:
return "\n".join(traceback.format_exc().splitlines()[-6:])
except Exception:
return None

BIN
pxy_bots (2).zip Normal file

Binary file not shown.

BIN
pxy_bots.zip Normal file

Binary file not shown.

7
pxy_bots/api/urls.py Normal file
View File

@ -0,0 +1,7 @@
# pxy_bots/api/urls.py
from django.urls import path
from . import views
urlpatterns = [
path("bots/health/", views.health, name="pxy_bots_health"),
]

8
pxy_bots/api/views.py Normal file
View File

@ -0,0 +1,8 @@
# pxy_bots/api/views.py
import json
from django.http import JsonResponse, HttpResponse
from django.views.decorators.csrf import csrf_exempt
def health(request):
return JsonResponse({"ok": True, "service": "pxy_bots", "schema_ready": ["req.v1", "render.v1"]})

126
pxy_bots/canonical.py Normal file
View File

@ -0,0 +1,126 @@
# pxy_bots/canonical.py
from typing import Any, Dict, Optional
def _pick_photo(sizes):
# Telegram sends photos as array of sizes; pick the largest
if not sizes:
return None
sizes = sorted(sizes, key=lambda s: (s.get("width", 0) * s.get("height", 0)), reverse=True)
top = sizes[0]
return {
"type": "photo",
"file_id": top.get("file_id"),
"mime": "image/jpeg", # Telegram photos are JPEG
"size_bytes": None, # Telegram doesn't include bytes here; leave None
"width": top.get("width"),
"height": top.get("height"),
}
def _extract_media(msg: Dict[str, Any]) -> Optional[Dict[str, Any]]:
if "photo" in msg:
return _pick_photo(msg.get("photo") or [])
if "voice" in msg:
v = msg["voice"]
return {"type": "voice", "file_id": v.get("file_id"), "mime": v.get("mime_type"), "size_bytes": v.get("file_size"), "duration": v.get("duration")}
if "audio" in msg:
a = msg["audio"]
return {"type": "audio", "file_id": a.get("file_id"), "mime": a.get("mime_type"), "size_bytes": a.get("file_size"), "duration": a.get("duration")}
if "video" in msg:
v = msg["video"]
return {"type": "video", "file_id": v.get("file_id"), "mime": v.get("mime_type"), "size_bytes": v.get("file_size"), "duration": v.get("duration"), "width": v.get("width"), "height": v.get("height")}
if "video_note" in msg:
v = msg["video_note"]
return {"type": "video_note", "file_id": v.get("file_id"), "mime": None, "size_bytes": v.get("file_size"), "duration": v.get("duration"), "length": v.get("length")}
if "animation" in msg:
a = msg["animation"]
return {"type": "animation", "file_id": a.get("file_id"), "mime": a.get("mime_type"), "size_bytes": a.get("file_size")}
if "document" in msg:
d = msg["document"]
return {"type": "document", "file_id": d.get("file_id"), "mime": d.get("mime_type"), "size_bytes": d.get("file_size"), "file_name": d.get("file_name")}
return None
def build_req_v1(update: Dict[str, Any], bot_name: str) -> Dict[str, Any]:
"""
Normalize a Telegram update into our canonical req.v1 envelope.
Pure function. No network, no state.
"""
schema_version = "req.v1"
update_id = update.get("update_id")
# Determine primary container: message, edited_message, callback_query
msg = update.get("message") or update.get("edited_message")
cbq = update.get("callback_query")
# Chat/user basics
if msg:
chat = msg.get("chat") or {}
user = msg.get("from") or {}
message_id = msg.get("message_id")
ts = msg.get("date")
text = msg.get("text")
caption = msg.get("caption")
location = msg.get("location")
media = _extract_media(msg)
trigger = "message"
elif cbq:
m = cbq.get("message") or {}
chat = m.get("chat") or {}
user = cbq.get("from") or {}
message_id = m.get("message_id")
ts = m.get("date") or None
text = None
caption = None
location = None
media = None
trigger = "callback"
else:
# Fallback for other update types we haven't mapped yet
chat = {}
user = update.get("from") or {}
message_id = None
ts = None
text = None
caption = None
location = None
media = None
trigger = "unknown"
# Command name (if text/caption starts with '/')
raw_cmd = None
if text and isinstance(text, str) and text.startswith("/"):
raw_cmd = text.split()[0][1:]
elif caption and isinstance(caption, str) and caption.startswith("/"):
raw_cmd = caption.split()[0][1:]
elif cbq and isinstance(cbq.get("data"), str):
raw_cmd = None # callbacks carry 'action' instead
# Build envelope
env = {
"schema_version": schema_version,
"bot": {"username": bot_name},
"chat": {"id": chat.get("id"), "type": chat.get("type")},
"user": {"id": user.get("id"), "language": user.get("language_code")},
"command": {
"name": raw_cmd,
"version": 1,
"trigger": ("text_command" if raw_cmd and trigger == "message" else ("callback" if trigger == "callback" else trigger)),
},
"input": {
"text": text,
"caption": caption,
"args_raw": text or caption,
"media": media,
"location": ({"lat": location.get("latitude"), "lon": location.get("longitude")} if location else None),
},
"callback": (
{"id": cbq.get("id"), "data": cbq.get("data"), "origin": {"message_id": message_id, "chat_id": chat.get("id")}}
if cbq else None
),
"context": {
"message_id": message_id,
"update_id": update_id,
"ts": ts,
"idempotency_key": f"tg:{message_id}:{user.get('id')}" if message_id and user.get("id") else None,
},
}
return env

View File

@ -1,10 +1,12 @@
# pxy_bots/views.py
import os
import json
import logging
from typing import Any, Dict, Optional
import openai
from telegram import Update, Bot
from django.http import JsonResponse
from django.http import JsonResponse, HttpResponse
from django.views.decorators.csrf import csrf_exempt
from asgiref.sync import sync_to_async
@ -20,15 +22,141 @@ from .handlers import (
logger = logging.getLogger(__name__)
openai.api_key = os.getenv("OPENAI_API_KEY")
# ---------------------------
# Canonical req.v1 builder
# ---------------------------
async def handle_location_message(update):
if update.message.location:
def _pick_photo(sizes):
if not sizes:
return None
sizes = sorted(sizes, key=lambda s: (s.get("width", 0) * s.get("height", 0)), reverse=True)
top = sizes[0]
return {
"type": "photo",
"file_id": top.get("file_id"),
"mime": "image/jpeg",
"size_bytes": None,
"width": top.get("width"),
"height": top.get("height"),
}
def _extract_media(msg: Dict[str, Any]) -> Optional[Dict[str, Any]]:
if "photo" in msg:
return _pick_photo(msg.get("photo") or [])
if "voice" in msg:
v = msg["voice"]
return {"type": "voice", "file_id": v.get("file_id"), "mime": v.get("mime_type"), "size_bytes": v.get("file_size"), "duration": v.get("duration")}
if "audio" in msg:
a = msg["audio"]
return {"type": "audio", "file_id": a.get("file_id"), "mime": a.get("mime_type"), "size_bytes": a.get("file_size"), "duration": a.get("duration")}
if "video" in msg:
v = msg["video"]
return {"type": "video", "file_id": v.get("file_id"), "mime": v.get("mime_type"), "size_bytes": v.get("file_size"), "duration": v.get("duration"), "width": v.get("width"), "height": v.get("height")}
if "video_note" in msg:
v = msg["video_note"]
return {"type": "video_note", "file_id": v.get("file_id"), "mime": None, "size_bytes": v.get("file_size"), "duration": v.get("duration"), "length": v.get("length")}
if "animation" in msg:
a = msg["animation"]
return {"type": "animation", "file_id": a.get("file_id"), "mime": a.get("mime_type"), "size_bytes": a.get("file_size")}
if "document" in msg:
d = msg["document"]
return {"type": "document", "file_id": d.get("file_id"), "mime": d.get("mime_type"), "size_bytes": d.get("file_size"), "file_name": d.get("file_name")}
return None
def build_req_v1(update: Dict[str, Any], bot_name: str) -> Dict[str, Any]:
"""
Normalize a Telegram update into our canonical req.v1 envelope.
Pure function. No network, no state.
"""
schema_version = "req.v1"
update_id = update.get("update_id")
msg = update.get("message") or update.get("edited_message")
cbq = update.get("callback_query")
if msg:
chat = msg.get("chat") or {}
user = msg.get("from") or {}
message_id = msg.get("message_id")
ts = msg.get("date")
text = msg.get("text")
caption = msg.get("caption")
location = msg.get("location")
media = _extract_media(msg)
trigger = "message"
elif cbq:
m = cbq.get("message") or {}
chat = m.get("chat") or {}
user = cbq.get("from") or {}
message_id = m.get("message_id")
ts = m.get("date") or None
text = None
caption = None
location = None
media = None
trigger = "callback"
else:
chat = {}
user = update.get("from") or {}
message_id = None
ts = None
text = None
caption = None
location = None
media = None
trigger = "unknown"
raw_cmd = None
if text and isinstance(text, str) and text.startswith("/"):
raw_cmd = text.split()[0][1:]
elif caption and isinstance(caption, str) and caption.startswith("/"):
raw_cmd = caption.split()[0][1:]
elif cbq and isinstance(cbq.get("data"), str):
raw_cmd = None # callbacks carry 'data' instead
env = {
"schema_version": schema_version,
"bot": {"username": bot_name},
"chat": {"id": chat.get("id"), "type": chat.get("type")},
"user": {"id": user.get("id"), "language": user.get("language_code")},
"command": {
"name": raw_cmd,
"version": 1,
"trigger": ("text_command" if raw_cmd and trigger == "message"
else ("callback" if trigger == "callback" else trigger)),
},
"input": {
"text": text,
"caption": caption,
"args_raw": text or caption,
"media": media,
"location": ({"lat": location.get("latitude"), "lon": location.get("longitude")} if location else None),
},
"callback": (
{"id": cbq.get("id"), "data": cbq.get("data"),
"origin": {"message_id": message_id, "chat_id": chat.get("id")}}
if cbq else None
),
"context": {
"message_id": message_id,
"update_id": update_id,
"ts": ts,
"idempotency_key": f"tg:{message_id}:{user.get('id')}" if message_id and user.get("id") else None,
},
}
return env
# ---------------------------
# Existing helper flows
# ---------------------------
async def handle_location_message(update: Update):
if update.message and update.message.location:
await handle_location(update)
return True
return False
async def dispatch_citizen_commands(update, text):
async def dispatch_citizen_commands(update: Update, text: str):
if text == "/start":
await start(update)
elif text == "/help":
@ -45,8 +173,7 @@ async def dispatch_citizen_commands(update, text):
return False
return True
async def dispatch_city_commands(update, text):
async def dispatch_city_commands(update: Update, text: str):
if text == "/start":
await start(update)
elif text == "/help":
@ -63,8 +190,7 @@ async def dispatch_city_commands(update, text):
return False
return True
async def dispatch_private_commands(update, text):
async def dispatch_private_commands(update: Update, text: str):
if text == "/start":
await start(update)
elif text == "/help":
@ -83,31 +209,35 @@ async def dispatch_private_commands(update, text):
return False
return True
async def transcribe_with_whisper(update, bot):
# 1) Descarga el audio
async def transcribe_with_whisper(update: Update, bot: Bot) -> Optional[str]:
# 1) Download audio from Telegram
tg_file = await bot.get_file(update.message.voice.file_id)
download_path = f"/tmp/{update.message.voice.file_id}.ogg"
await tg_file.download_to_drive(download_path)
# 2) Llama al endpoint de transcripción
# 2) Transcribe (OpenAI)
with open(download_path, "rb") as audio:
# Como response_format="text", esto retorna un str
transcript_str = openai.audio.transcriptions.create(
model="gpt-4o-transcribe", # o "whisper-1"
model="gpt-4o-transcribe", # or "whisper-1"
file=audio,
response_format="text",
language="es"
language="es",
)
return transcript_str.strip()
return transcript_str.strip() if transcript_str else None
# ---------------------------
# Webhook
# ---------------------------
@csrf_exempt
async def telegram_webhook(request, bot_name):
async def telegram_webhook(request, bot_name: str):
try:
logger.info(f"Webhook called for bot: {bot_name}")
logger.info("Webhook called for bot=%s", bot_name)
# Carga bot (solo ORM en sync_to_async)
if request.method != "POST":
return HttpResponse(status=405)
# Load bot (sync ORM via sync_to_async)
try:
bot_instance = await sync_to_async(TelegramBot.objects.get)(
name=bot_name, is_active=True
@ -117,34 +247,44 @@ async def telegram_webhook(request, bot_name):
if not bot_instance.assistant:
return JsonResponse({"error": "Assistant not configured."}, status=400)
if request.method != "POST":
return JsonResponse({"error": "Invalid request method"}, status=400)
payload = json.loads(request.body.decode("utf-8"))
# Parse raw payload
try:
payload = json.loads(request.body.decode("utf-8") or "{}")
except json.JSONDecodeError:
return JsonResponse({"ok": False, "error": "invalid_json"}, status=400)
# Build canonical req.v1 (LOG ONLY for now)
try:
canon = build_req_v1(payload, bot_name)
logger.info("tg.canonical env=%s", json.dumps(canon, ensure_ascii=False))
except Exception as e:
logger.exception("tg.canonical.failed: %s", e)
# Convert to telegram.Update
update = Update.de_json(payload, Bot(token=bot_instance.token))
if not update.message:
# No message (e.g., callback handled elsewhere in legacy); ack anyway
return JsonResponse({"status": "no message"})
# 1) Geolocalización
# 1) Location first
if await handle_location_message(update):
return JsonResponse({"status": "ok"})
# 2) Voz: transcribe y report_trash
# 2) Voice → transcribe → LLM reply
if update.message.voice:
bot = Bot(token=bot_instance.token)
transcript = await transcribe_with_whisper(update, bot)
if not transcript:
await update.message.reply_text(
"No pude entender tu mensaje de voz. Intenta de nuevo."
)
await update.message.reply_text("No pude entender tu mensaje de voz. Intenta de nuevo.")
return JsonResponse({"status": "ok"})
assistant_instance = await sync_to_async(LangchainAIService)(bot_instance.assistant)
bot_response = await sync_to_async(assistant_instance.generate_response)(transcript)
await update.message.reply_text(bot_response)
return JsonResponse({"status": "ok"})
# 3) Comandos de texto
# 3) Text commands by bot persona
text = update.message.text or ""
if bot_name == "PepeBasuritaCoinsBot" and await dispatch_citizen_commands(update, text):
return JsonResponse({"status": "ok"})
@ -153,7 +293,7 @@ async def telegram_webhook(request, bot_name):
if bot_name == "PepeMotitoBot" and await dispatch_private_commands(update, text):
return JsonResponse({"status": "ok"})
# 4) Fallback LLM
# 4) Fallback LLM for any other text
assistant_instance = await sync_to_async(LangchainAIService)(bot_instance.assistant)
bot_response = await sync_to_async(assistant_instance.generate_response)(text)
await update.message.reply_text(bot_response)
@ -161,5 +301,5 @@ async def telegram_webhook(request, bot_name):
return JsonResponse({"status": "ok"})
except Exception as e:
logger.error(f"Error in webhook: {e}")
logger.exception("Error in webhook: %s", e)
return JsonResponse({"error": f"Unexpected error: {str(e)}"}, status=500)

BIN
pxy_contracts.zip Normal file

Binary file not shown.

View File

3
pxy_contracts/admin.py Normal file
View File

@ -0,0 +1,3 @@
from django.contrib import admin
# Register your models here.

6
pxy_contracts/apps.py Normal file
View File

@ -0,0 +1,6 @@
from django.apps import AppConfig
class PxyContractsConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'pxy_contracts'

View File

@ -0,0 +1,9 @@
from .sami import SAMIRunRequest, SAMIRunResponse, SAMICity, SAMIPoint
from .sites import SiteSearchRequest, SiteSearchResponse, CandidateSite, ScoreBreakdown
__all__ = [
# SAMI
"SAMIRunRequest", "SAMIRunResponse", "SAMICity", "SAMIPoint",
# Sites
"SiteSearchRequest", "SiteSearchResponse", "CandidateSite", "ScoreBreakdown",
]

View File

@ -0,0 +1,48 @@
from __future__ import annotations
from typing import List, Optional
from pydantic import BaseModel, Field
from ..version import SPEC_VERSION
class SAMICity(BaseModel):
"""Per-city SAMI score (size-adjusted residual)."""
city: str = Field(..., description="City name or code")
sami: float = Field(..., description="Size-adjusted residual (z-like score)")
rank: Optional[int] = Field(None, description="Rank among requested cities")
class SAMIPoint(BaseModel):
"""Raw point used in the fit (optionally with logs for client plots)."""
city: str = Field(..., description="City name or code")
value: float = Field(..., description="Indicator value (Y)")
N: float = Field(..., description="Scale variable, typically population (N)")
log_value: Optional[float] = Field(None, description="log(value) if computed server-side")
log_N: Optional[float] = Field(None, description="log(N) if computed server-side")
class SAMIRunRequest(BaseModel):
"""Request to run SAMI for an indicator over a set of cities."""
cities: List[str] = Field(..., description="Cities to evaluate")
indicator: str = Field(..., description="Indicator id, e.g., imss_wages_2023")
data_release: Optional[str] = Field(None, description="Data snapshot id, e.g., inegi_sun_2020_r1")
class SAMIRunResponse(BaseModel):
"""SAMI run output (fit metrics, per-city scores, and optional assets)."""
model_id: str = Field("sami-ols-v2.0.0", description="Model identifier")
spec_version: str = Field(SPEC_VERSION, description="Contracts spec version")
run_id: str = Field(..., description="UUID for this run")
indicator: str = Field(..., description="Indicator id echoed back")
beta: float = Field(..., description="Scaling exponent β")
r2: float = Field(..., description="Coefficient of determination")
residuals: List[SAMICity] = Field(..., description="Per-city SAMI results")
chart_url: Optional[str] = Field(None, description="PNG/SVG chart URL if available")
data_release: Optional[str] = Field(None, description="Data snapshot used")
warnings: Optional[List[str]] = Field(None, description="Any non-fatal warnings")
# 56B additions (optional for backward compatibility)
alpha: Optional[float] = Field(None, description="Intercept α of loglog OLS")
points: Optional[List[SAMIPoint]] = Field(
None,
description="Raw per-city points (value, N, logs) used in the fit",
)

View File

@ -0,0 +1,55 @@
from __future__ import annotations
from typing import List, Optional, Dict, Tuple
from pydantic import BaseModel, Field, confloat
from ..version import SPEC_VERSION
class ScoreBreakdown(BaseModel):
demand: confloat(ge=0, le=1) = Field(..., description="Normalized demand component (01)")
competition: confloat(ge=0, le=1) = Field(..., description="Competition penalty (01, higher = better after penalty)")
access: confloat(ge=0, le=1) = Field(..., description="Accessibility component (01)")
class CandidateSite(BaseModel):
lat: float = Field(..., description="Latitude (WGS84)")
lon: float = Field(..., description="Longitude (WGS84)")
score: confloat(ge=0, le=1) = Field(..., description="Final normalized score (01)")
breakdown: Optional[ScoreBreakdown] = Field(None, description="Score components")
reasons: Optional[List[str]] = Field(None, description="Human-readable justifications")
address: Optional[str] = Field(None, description="Optional address or label")
grid_id: Optional[str] = Field(None, description="Optional grid/AGEB/cell identifier")
class SiteSearchRequest(BaseModel):
city: str = Field(..., description="City id/name (e.g., CDMX)")
business: str = Field(..., description="Business type (e.g., cafe, farmacia)")
time_bands: List[int] = Field(..., description="Isochrone minutes, e.g., [10,20,30]")
max_candidates: int = Field(3, description="How many top sites to return")
data_release: Optional[str] = Field(None, description="Data snapshot id (e.g., denue_2024q4)")
center: Optional[Tuple[float, float]] = Field(
None, description="Optional center [lat, lon] for access calculations"
)
num_samples: int = Field(
12, ge=1, le=50,
description="How many candidate points to sample when center is provided"
)
class SiteSearchResponse(BaseModel):
model_id: str = Field("site-score-v0.1.0", description="Model identifier")
spec_version: str = Field(SPEC_VERSION, description="Contracts spec version")
search_id: str = Field(..., description="UUID for this search")
city: str = Field(..., description="Echoed city")
business: str = Field(..., description="Echoed business type")
time_bands: List[int] = Field(..., description="Echoed time bands")
candidates: List[CandidateSite] = Field(..., description="Ranked list of sites")
# Mapas
map_url: Optional[str] = Field(None, description="Main map (isochrones + Top-K)")
demand_map_url: Optional[str] = Field(None, description="Demand heat-style map (PNG)")
competition_map_url: Optional[str] = Field(None, description="Competition heat-style map (PNG)")
data_release: Optional[str] = Field(None, description="Data snapshot used")
warnings: Optional[List[str]] = Field(None, description="Any non-fatal warnings")

View File

3
pxy_contracts/models.py Normal file
View File

@ -0,0 +1,3 @@
from django.db import models
# Create your models here.

3
pxy_contracts/tests.py Normal file
View File

@ -0,0 +1,3 @@
from django.test import TestCase
# Create your tests here.

1
pxy_contracts/version.py Normal file
View File

@ -0,0 +1 @@
SPEC_VERSION = "0.1.0"

3
pxy_contracts/views.py Normal file
View File

@ -0,0 +1,3 @@
from django.shortcuts import render
# Create your views here.

View File

@ -38,6 +38,10 @@ from pxy_dashboard.apps.views import (
apps_config_api,
apps_config_map,
apps_config_collection,
apps_sites_runs, # ← add
apps_sites_viewer, # ← add
apps_sami_explorer, # ← add
)
app_name = "apps"
@ -80,4 +84,10 @@ urlpatterns = [
path("config-api", apps_config_api, name="config-api"),
path("config-map", apps_config_map, name="config-map"),
path("config-collection", apps_config_collection, name="config-collection"),
# Sites analytics (new)
path("sites/runs", apps_sites_runs, name="sites-runs"),
path("sites/viewer", apps_sites_viewer, name="sites-viewer"),
path("sami/explorer", apps_sami_explorer, name="sami-explorer"), # ← add
]

View File

@ -64,6 +64,11 @@ apps_config_api = AppsView.as_view(template_name="pxy_dashboard/apps/apps-config
apps_config_map = AppsView.as_view(template_name="pxy_dashboard/apps/apps-config-map.html")
apps_config_collection = AppsView.as_view(template_name="pxy_dashboard/apps/apps-config-collection.html")
# Sites (new)
apps_sites_runs = AppsView.as_view(template_name="pxy_dashboard/apps/apps-sites-runs.html")
apps_sites_viewer = AppsView.as_view(template_name="pxy_dashboard/apps/apps-sites-viewer.html")
apps_sami_explorer = AppsView.as_view(template_name="pxy_dashboard/apps/apps-sami-explorer.html")
from django.shortcuts import render
from .models import GeoScenario

View File

@ -50,6 +50,49 @@ EXEMPT_URLS += [
re.compile(r"^messenger/webhook/?$"), # regex with optional trailing slash
]
# SAMI API (public for bot/agents; add auth later if needed)
EXEMPT_URLS += [
re.compile(r"^api/sami/health$"),
re.compile(r"^api/sami/run$"),
]
# Routing API health (public for now)
EXEMPT_URLS += [
re.compile(r"^api/routing/health$"),
]
# Sites API (public for now)
EXEMPT_URLS += [
re.compile(r"^api/sites/health$"),
re.compile(r"^api/sites/search$"),
]
# Routing API isochrone (público para demo)
EXEMPT_URLS += [
re.compile(r"^api/routing/isochrone$"),
]
# pxy_dashboard/middleware.py (añadir a EXEMPT_URLS)
EXEMPT_URLS += [
re.compile(r"^api/sites/download/.+$"),
]
EXEMPT_URLS += [
re.compile(r"^api/sites/geojson/.+$"),
]
EXEMPT_URLS += [
re.compile(r"^api/sites/preview/.+$"),
]
# Telegram webhook (with /api/ prefix)
EXEMPT_URLS += [
"api/bots/webhook/",
re.compile(r"^api/bots/webhook/.+/?$"),
]
# (Optional) health, if you want it public
EXEMPT_URLS += [ re.compile(r"^api/bots/health/?$") ]
class LoginRequiredMiddleware(MiddlewareMixin):
def process_request(self, request):

View File

@ -0,0 +1,7 @@
from django.urls import path
from .views_share import share_sites_card, share_sami_card
urlpatterns = [
path("sites/<uuid:search_id>/<str:token>", share_sites_card, name="share_sites_card"),
path("sami/<uuid:run_id>/<str:token>", share_sami_card, name="share_sami_card"),
]

View File

@ -0,0 +1,313 @@
{% extends "pxy_dashboard/partials/base.html" %}
{% load static %}
{% block title %}SAMI · Explorer{% endblock title %}
{% block extra_css %}
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/chart.js@4.4.1/dist/chart.min.css">
<style>
.form-inline .form-control { min-width: 200px; }
.metric { font-variant-numeric: tabular-nums; }
.chart-wrap { position: relative; height: 340px; }
.sami-img { max-width: 100%; border-radius: .5rem; box-shadow: 0 0.25rem 0.75rem rgba(0,0,0,.05); }
.table thead th { white-space: nowrap; }
</style>
{% endblock extra_css %}
{% block pagetitle %}
{% include "pxy_dashboard/partials/page-title.html" with pagetitle="SAMI" title="Explorer" %}
{% endblock pagetitle %}
{% block content %}
<div class="row">
<div class="col-12">
<div class="card mb-3">
<div class="card-body">
<form id="runForm" class="row gy-2 gx-2 align-items-end">
<div class="col-12 col-md-4">
<label class="form-label">Indicator</label>
<input id="indicator" class="form-control" placeholder="imss_wages_2023" value="imss_wages_2023">
</div>
<div class="col-12 col-md-6">
<label class="form-label">Cities (comma-separated)</label>
<input id="cities" class="form-control" placeholder="CDMX,GDL,MTY,PUE">
</div>
<div class="col-12 col-md-2">
<button id="btnRun" type="submit" class="btn btn-primary w-100">Run SAMI</button>
</div>
<div class="col-12">
<small class="text-muted">Calls <code>/api/sami/run</code> and displays results interactively.</small>
</div>
</form>
</div>
</div>
<div id="results" class="card d-none">
<div class="card-body">
<div class="d-flex flex-wrap justify-content-between mb-3">
<div>
<h4 class="header-title mb-0">Results</h4>
<small id="meta" class="text-muted"></small>
</div>
<div class="d-flex gap-2">
<a id="dlCsv" class="btn btn-outline-secondary btn-sm" download="sami_residuals.csv">Download CSV</a>
<a id="openPng" class="btn btn-outline-secondary btn-sm" target="_blank">Open chart PNG</a>
</div>
</div>
<div class="row g-3">
<div class="col-12 col-lg-5">
<div class="row g-2">
<div class="col-4">
<div class="text-muted small">β (scaling)</div>
<div id="beta" class="metric fs-5"></div>
</div>
<div class="col-4">
<div class="text-muted small"></div>
<div id="r2" class="metric fs-5"></div>
</div>
<div class="col-4">
<div class="text-muted small">n (cities)</div>
<div id="nobs" class="metric fs-5"></div>
</div>
</div>
<hr>
<!-- NEW: Interactive scatter -->
<div class="chart-wrap mb-3">
<canvas id="scatterChart"></canvas>
</div>
<!-- Existing: Ranking bars -->
<div class="chart-wrap">
<canvas id="barChart"></canvas>
</div>
</div>
<div class="col-12 col-lg-7">
<div class="mb-2 text-muted small">Model scatter (server-rendered)</div>
<img id="chartPng" class="sami-img" alt="SAMI scatter" src="">
</div>
</div>
<hr>
<div class="table-responsive">
<table id="tbl" class="table table-sm table-hover align-middle">
<thead>
<tr>
<th scope="col">Rank</th>
<th scope="col">City</th>
<th scope="col">SAMI (z)</th>
</tr>
</thead>
<tbody></tbody>
</table>
</div>
</div>
</div>
</div>
</div>
{% endblock content %}
{% block extra_js %}
<script src="https://cdn.jsdelivr.net/npm/chart.js@4.4.1/dist/chart.umd.min.js"></script>
<script>
(function(){
const form = document.getElementById('runForm');
const btn = document.getElementById('btnRun');
const indInp = document.getElementById('indicator');
const cityInp= document.getElementById('cities');
const meta = document.getElementById('meta');
const betaEl = document.getElementById('beta');
const r2El = document.getElementById('r2');
const nobsEl = document.getElementById('nobs');
const img = document.getElementById('chartPng');
const openPng= document.getElementById('openPng');
const dlCsv = document.getElementById('dlCsv');
const card = document.getElementById('results');
const tbody = document.querySelector('#tbl tbody');
let barChart = null;
let scatterChart = null; // NEW
function parseCities(txt){
return (txt || '')
.split(/[,\n]/g)
.map(s=>s.trim())
.filter(Boolean);
}
function mkCsv(residuals){
const rows = [['rank','city','sami_z']];
residuals.forEach(r => rows.push([r.rank, r.city, r.sami]));
const csv = rows.map(r => r.join(',')).join('\n');
return 'data:text/csv;charset=utf-8,' + encodeURIComponent(csv);
}
function toLog(v){ return (v>0) ? Math.log(v) : null; } // in case server didnt include logs
form.addEventListener('submit', async (e)=>{
e.preventDefault();
const indicator = indInp.value.trim();
const cities = parseCities(cityInp.value);
if (!indicator || !cities.length){
alert('Please provide indicator and at least one city.');
return;
}
btn.disabled = true;
btn.textContent = 'Running…';
try{
const resp = await fetch('/api/sami/run', {
method: 'POST',
headers: { 'Content-Type':'application/json' },
body: JSON.stringify({ indicator, cities })
});
if (!resp.ok){
const t = await resp.text();
throw new Error(`HTTP ${resp.status}: ${t}`);
}
const data = await resp.json();
// Meta & metrics
meta.textContent = `indicator: ${data.indicator} · run ${String(data.run_id||'').slice(0,8)}…`;
betaEl.textContent = (data.beta!=null) ? Number(data.beta).toFixed(3) : '—';
r2El.textContent = (data.r2!=null) ? Number(data.r2).toFixed(3) : '—';
nobsEl.textContent = (data.residuals||[]).length;
// PNG
const abs = (u)=> !u ? '' : (u.startsWith('http') ? u : (window.location.origin + u));
img.src = abs(data.chart_url || '');
openPng.href = abs(data.chart_url || '#');
// --- NEW: Interactive scatter (uses `points[]`, `alpha`, `beta`) ---
const pts = Array.isArray(data.points) ? data.points : [];
if (scatterChart){ scatterChart.destroy(); }
if (pts.length >= 2){
const scatter = pts
.map(p => ({
x: (p.log_N ?? toLog(p.N)),
y: (p.log_value ?? toLog(p.value)),
city: p.city
}))
.filter(p => Number.isFinite(p.x) && Number.isFinite(p.y));
if (scatter.length >= 2){
const xs = scatter.map(p=>p.x);
const minX = Math.min(...xs), maxX = Math.max(...xs);
const alpha = (typeof data.alpha === 'number') ? data.alpha : 0;
const beta = (typeof data.beta === 'number') ? data.beta : 1;
const line = [
{ x: minX, y: alpha + beta*minX },
{ x: maxX, y: alpha + beta*maxX },
];
const ctxS = document.getElementById('scatterChart');
scatterChart = new Chart(ctxS, {
type: 'scatter',
data: {
datasets: [
{
label: 'Cities',
data: scatter,
pointRadius: 4,
pointHoverRadius: 6,
borderWidth: 0
},
{
type: 'line',
label: 'OLS fit',
data: line,
borderWidth: 2,
pointRadius: 0
}
]
},
options: {
maintainAspectRatio: false,
plugins: {
legend: { display: false },
tooltip: {
callbacks: {
label: ctx => {
const d = ctx.raw;
return ` ${d.city}: (${ctx.parsed.x.toFixed(3)}, ${ctx.parsed.y.toFixed(3)})`;
}
}
}
},
scales: {
x: { type: 'linear', title: { display: true, text: 'log(N)' } },
y: { title: { display: true, text: 'log(value)' } }
}
}
});
}
}
// --- end interactive scatter ---
// Ranking bar chart (sorted by rank ascending)
const res = Array.from(data.residuals || []).sort((a,b)=> Number(a.rank)-Number(b.rank));
const labels = res.map(r => `${r.rank}. ${r.city}`);
const values = res.map(r => Number(r.sami||0));
if (barChart){ barChart.destroy(); }
const ctx = document.getElementById('barChart');
barChart = new Chart(ctx, {
type: 'bar',
data: {
labels,
datasets: [{
label: 'SAMI (z)',
data: values,
borderWidth: 1
}]
},
options: {
maintainAspectRatio: false,
plugins: {
legend: { display: false },
tooltip: {
callbacks: {
label: (ctx)=> ` ${ctx.parsed.y.toFixed(2)} z`
}
}
},
scales: {
x: { ticks: { maxRotation: 0, autoSkip: true } },
y: { beginAtZero: true }
}
}
});
// Table
tbody.innerHTML = '';
res.forEach(r => {
const tr = document.createElement('tr');
tr.innerHTML = `
<td class="text-muted">${r.rank}</td>
<td>${r.city}</td>
<td class="fw-semibold">${Number(r.sami||0).toFixed(2)}</td>
`;
tbody.appendChild(tr);
});
// CSV
dlCsv.href = mkCsv(res);
// Show card
card.classList.remove('d-none');
} catch(err){
console.error(err);
alert('SAMI run failed. See console for details.');
} finally {
btn.disabled = false;
btn.textContent = 'Run SAMI';
}
});
})();
</script>
{% endblock extra_js %}

View File

@ -0,0 +1,96 @@
{% extends "pxy_dashboard/partials/base.html" %}
{% load static %}
{% block title %}Sites · Recent runs{% endblock title %}
{% block pagetitle %}
{% include "pxy_dashboard/partials/page-title.html" with pagetitle="Sites" title="Recent runs" %}
{% endblock pagetitle %}
{% block content %}
<div class="row">
<div class="col-12">
<div class="card">
<div class="card-body">
<div class="d-flex justify-content-between align-items-center mb-3">
<div>
<h4 class="header-title mb-0">Latest runs</h4>
<small class="text-muted">Click a card to open the interactive viewer</small>
</div>
<div class="d-flex align-items-center">
<label class="me-2">Limit</label>
<select id="limitSel" class="form-select form-select-sm" style="width:auto">
<option>6</option><option selected>12</option><option>24</option><option>36</option>
</select>
</div>
</div>
<div id="runsGrid" class="row g-3"></div>
<div id="runsEmpty" class="text-center text-muted py-5 d-none">
<div class="mb-2">No runs yet</div>
<small>Trigger a run via <code>/api/sites/search</code> and refresh.</small>
</div>
</div>
</div>
</div>
</div>
{% endblock content %}
{% block extra_js %}
<script>
(function(){
const grid = document.getElementById('runsGrid');
const empty = document.getElementById('runsEmpty');
const limitSel = document.getElementById('limitSel');
limitSel.addEventListener('change', loadRuns);
loadRuns();
function asAbs(url) {
if (!url) return null;
if (url.startsWith('http')) return url;
return window.location.origin + url;
}
async function loadRuns() {
grid.innerHTML = '';
empty.classList.add('d-none');
const limit = parseInt(limitSel.value || '12', 10);
const resp = await fetch(`/api/sites/runs/recent?limit=${limit}`);
if (!resp.ok) { empty.classList.remove('d-none'); return; }
const data = await resp.json();
const items = (data && data.items) || [];
if (!items.length) { empty.classList.remove('d-none'); return; }
items.forEach(item => {
const preview = asAbs(item.map_url) || asAbs((item.download||{}).main); // fallback
const title = `${item.business || '—'} @ ${item.city || '—'}`;
const when = new Date(item.created_at).toLocaleString();
const card = document.createElement('div');
card.className = 'col-12 col-sm-6 col-lg-4';
card.innerHTML = `
<div class="card h-100 shadow-sm">
<div class="ratio ratio-4x3 bg-light">
${preview ? `<img src="${preview}" class="card-img-top object-fit-cover" alt="preview">` : `<div class="d-flex align-items-center justify-content-center text-muted">No preview</div>`}
</div>
<div class="card-body">
<h5 class="card-title mb-1" style="min-height: 2.2rem">${title}</h5>
<div class="text-muted small mb-2">${when}</div>
<div class="d-flex gap-2 flex-wrap">
<a class="btn btn-primary btn-sm" href="/apps/sites/viewer?search_id=${encodeURIComponent(item.search_id)}">
Open viewer
</a>
${item.download?.main ? `<a class="btn btn-outline-secondary btn-sm" href="${item.download.main}" target="_blank">Download PNG</a>` : ``}
${item.geojson?.candidates ? `<a class="btn btn-outline-secondary btn-sm" href="${item.geojson.candidates}" target="_blank">Candidates GeoJSON</a>` : ``}
</div>
</div>
</div>
`;
grid.appendChild(card);
});
}
})();
</script>
{% endblock extra_js %}

View File

@ -0,0 +1,411 @@
{% extends "pxy_dashboard/partials/base.html" %}
{% load static %}
{% block title %}Sites · Run viewer{% endblock title %}
{% block extra_css %}
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.9.4/dist/leaflet.css"
integrity="sha256-p4NxAoJBhIIN+hmNHrzRCf9tD/miZyoHS5obTRR9BMY=" crossorigin="">
<style>
#sites-map { height: 70vh; border-radius: 0.5rem; }
.layer-chip { margin-right: .75rem; }
.leaflet-popup-content { margin: 10px 14px; }
.scrub-wrap { display:flex; align-items:center; gap:.5rem; }
.scrub-wrap input[type=range]{ width: 220px; }
.scrub-metric { min-width: 10ch; text-align:right; }
/* 55B: mini chart */
#areaChart { width: 100%; height: 110px; }
.chart-grid line { stroke: #e9ecef; stroke-width: 1; }
.chart-axis text { fill: #6c757d; font-size: 10px; }
</style>
{% endblock extra_css %}
{% block pagetitle %}
{% include "pxy_dashboard/partials/page-title.html" with pagetitle="Sites" title="Run viewer" %}
{% endblock pagetitle %}
{% block content %}
<div class="row">
<div class="col-12">
<div class="card">
<div class="card-body">
<div class="d-flex flex-wrap align-items-center justify-content-between mb-3">
<div>
<h4 class="header-title mb-0">Interactive run viewer</h4>
<small id="runMeta" class="text-muted">Loading…</small>
</div>
<div class="d-flex flex-wrap align-items-center gap-3">
<!-- 55A — Minutes scrubber -->
<div id="scrubWrap" class="scrub-wrap d-none">
<button id="btnPlay" class="btn btn-sm btn-outline-primary" type="button">Play</button>
<button id="btnPause" class="btn btn-sm btn-outline-secondary" type="button" disabled>Pause</button>
<div class="d-flex align-items-center gap-2">
<label class="mb-0 text-muted small">Minutes</label>
<input id="minuteSlider" type="range" min="0" max="0" step="1" value="0">
<span id="minuteText" class="badge bg-primary-subtle text-primary-emphasis"></span>
<span class="text-muted small">Area:</span>
<span id="areaText" class="scrub-metric text-muted small"></span>
</div>
</div>
<!-- Existing layer toggles -->
<div class="d-flex align-items-center">
<label class="me-2">Layers</label>
<div class="form-check form-check-inline layer-chip">
<input class="form-check-input" type="checkbox" id="chkIso" checked>
<label class="form-check-label" for="chkIso">Isochrones</label>
</div>
<div class="form-check form-check-inline layer-chip">
<input class="form-check-input" type="checkbox" id="chkCand" checked>
<label class="form-check-label" for="chkCand">Candidates</label>
</div>
<div class="form-check form-check-inline layer-chip">
<input class="form-check-input" type="checkbox" id="chkDemand">
<label class="form-check-label" for="chkDemand">Demand (sample)</label>
</div>
<div class="form-check form-check-inline layer-chip">
<input class="form-check-input" type="checkbox" id="chkComp">
<label class="form-check-label" for="chkComp">Competition (sample)</label>
</div>
</div>
</div>
</div>
<div id="sites-map" class="mb-2"></div>
<!-- 55B — area vs minutes mini-chart -->
<div id="areaChartWrap" class="mt-2 d-none">
<svg id="areaChart" viewBox="0 0 640 120" preserveAspectRatio="xMidYMid meet" role="img" aria-label="Isochrone area by minutes"></svg>
</div>
<div class="text-muted small mt-1">
Tip: hover candidates for score & breakdown; toggle layers on the right; use the scrubber to switch minutes.
</div>
</div>
</div>
</div>
</div>
{% endblock content %}
{% block extra_js %}
<script src="https://unpkg.com/leaflet@1.9.4/dist/leaflet.js"
integrity="sha256-20nQCchB9co0qIjJZRGuk2/Z9VM+kNiyxNV1lvTlZBo=" crossorigin=""></script>
<script>
(function(){
const q = new URLSearchParams(window.location.search);
const sid = q.get('search_id');
const metaEl = document.getElementById('runMeta');
const scrubWrap = document.getElementById('scrubWrap');
const minuteSlider = document.getElementById('minuteSlider');
const minuteText = document.getElementById('minuteText');
const areaText = document.getElementById('areaText');
const btnPlay = document.getElementById('btnPlay');
const btnPause = document.getElementById('btnPause');
// 55B: chart elements
const chartWrap = document.getElementById('areaChartWrap');
const chartEl = document.getElementById('areaChart');
if (!sid) {
metaEl.textContent = "Missing ?search_id=...";
console.warn("No search_id param");
return;
}
// Base map
const map = L.map('sites-map', { zoomControl: true });
L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
{ maxZoom: 19, attribution: '&copy; OpenStreetMap' }).addTo(map);
// Layers
const layers = {
iso: L.layerGroup().addTo(map),
cand: L.layerGroup().addTo(map),
demand: L.layerGroup(),
comp: L.layerGroup(),
};
document.getElementById('chkIso').addEventListener('change', (e)=> toggle(layers.iso, e.target.checked));
document.getElementById('chkCand').addEventListener('change', (e)=> toggle(layers.cand, e.target.checked));
document.getElementById('chkDemand').addEventListener('change', (e)=> toggle(layers.demand, e.target.checked));
document.getElementById('chkComp').addEventListener('change', (e)=> toggle(layers.comp, e.target.checked));
function toggle(layer, on){ if (on) layer.addTo(map); else map.removeLayer(layer); }
const isoURL = `/api/sites/geojson/isochrones/${encodeURIComponent(sid)}`;
const candURL = `/api/sites/geojson/candidates/${encodeURIComponent(sid)}`;
const demURL = `/api/sites/geojson/popgrid/${encodeURIComponent(sid)}`;
const compURL = `/api/sites/geojson/pois_competition/${encodeURIComponent(sid)}`;
const artURL = `/media/sites/run_${encodeURIComponent(sid)}.json`;
let fitBounds = null;
// 55A scrubber state
const bandColors = ['#2E86AB','#F18F01','#C73E1D','#6C5B7B','#17B890','#7E57C2'];
let minutes = [];
const isoGroups = new Map(); // minute -> LayerGroup
const minuteAreas = new Map(); // minute -> total area_km2
let currentIndex = 0;
let timer = null;
function fmtArea(km2) {
if (km2 == null) return '—';
if (km2 < 10) return km2.toFixed(2) + ' km²';
if (km2 < 100) return km2.toFixed(1) + ' km²';
return Math.round(km2) + ' km²';
}
function setButtons(playing) {
btnPlay.disabled = playing;
btnPause.disabled = !playing;
}
function renderMinute(idx) {
currentIndex = Math.max(0, Math.min(idx, minutes.length - 1));
const m = minutes[currentIndex];
minuteText.textContent = `${m} min`;
areaText.textContent = fmtArea(minuteAreas.get(m));
layers.iso.clearLayers();
const group = isoGroups.get(m);
if (group) layers.iso.addLayer(group);
// 55B: move chart marker
updateAreaMarker(m);
}
minuteSlider.addEventListener('input', (e)=>{
renderMinute(parseInt(e.target.value, 10) || 0);
});
btnPlay.addEventListener('click', ()=>{
if (timer || minutes.length <= 1) return;
setButtons(true);
timer = setInterval(()=>{
const next = (currentIndex + 1) % minutes.length;
minuteSlider.value = String(next);
renderMinute(next);
}, 1200);
});
btnPause.addEventListener('click', ()=>{
if (timer){ clearInterval(timer); timer = null; }
setButtons(false);
});
// 55B — area chart internals
let chartState = null; // {mins, areas, x, y, markerX, markerY}
function drawAreaChart(mins, areas) {
// Responsive box
const vbW = 640, vbH = 120;
const padL = 42, padR = 14, padT = 10, padB = 24;
const W = vbW - padL - padR;
const H = vbH - padT - padB;
const minM = Math.min(...mins);
const maxM = Math.max(...mins);
const maxA = Math.max(...areas) * 1.1 || 1;
const x = (m)=> padL + ( (m - minM) / (maxM - minM || 1) ) * W;
const y = (a)=> padT + (1 - (a / (maxA || 1))) * H;
// Build path
let d = '';
mins.forEach((m, i)=>{
const X = x(m), Y = y(areas[i]);
d += (i ? 'L' : 'M') + X + ' ' + Y + ' ';
});
// Close to baseline for fill
d += `L ${x(maxM)} ${padT + H} L ${x(minM)} ${padT + H} Z`;
// X ticks at each minute; Y ticks 0, mid, max
const midA = maxA/2;
function tickY(val){ return padT + (1 - (val/maxA)) * H; }
const yTicks = [
{v:0,label:'0'},
{v:midA,label: midA>=1 ? (midA<10?midA.toFixed(1):Math.round(midA)) : midA.toFixed(2)},
{v:maxA,label: maxA>=10 ? Math.round(maxA) : maxA.toFixed(1)}
];
// SVG
chartEl.innerHTML = `
<defs>
<linearGradient id="ag" x1="0" y1="0" x2="0" y2="1">
<stop offset="0%" stop-color="#2E86AB" stop-opacity="0.30"/>
<stop offset="100%" stop-color="#2E86AB" stop-opacity="0.05"/>
</linearGradient>
</defs>
<g class="chart-grid">
${yTicks.map(t=>`<line x1="${padL}" y1="${tickY(t.v)}" x2="${padL+W}" y2="${tickY(t.v)}"/>`).join('')}
</g>
<path d="${d}" fill="url(#ag)" stroke="#2E86AB" stroke-width="2" />
<!-- X ticks -->
<g class="chart-axis">
${mins.map(m=>{
const X = x(m);
return `
<line x1="${X}" y1="${padT+H}" x2="${X}" y2="${padT+H+4}" stroke="#adb5bd"/>
<text x="${X}" y="${padT+H+14}" text-anchor="middle">${m}m</text>
`;
}).join('')}
</g>
<!-- Y ticks -->
<g class="chart-axis">
${yTicks.map(t=>{
return `<text x="${padL-6}" y="${tickY(t.v)+3}" text-anchor="end">${t.label} km²</text>`;
}).join('')}
</g>
<!-- Marker group (updated dynamically) -->
<g id="marker">
<line id="markerLine" x1="${padL}" y1="${padT}" x2="${padL}" y2="${padT+H}" stroke="#495057" stroke-dasharray="3,3"/>
<circle id="markerDot" cx="${padL}" cy="${padT+H}" r="4" fill="#2E86AB" stroke="#111" stroke-width="1.5"/>
</g>
`;
chartState = { mins, areas, x, y, padT, H };
updateAreaMarker(minutes[currentIndex] || mins[mins.length-1]);
chartWrap.classList.toggle('d-none', mins.length < 2);
}
function updateAreaMarker(minute) {
if (!chartState) return;
const { mins, areas, x, y, padT, H } = chartState;
const i = Math.max(0, mins.indexOf(minute));
const X = x(mins[i]);
const Y = y(areas[i]);
const line = chartEl.querySelector('#markerLine');
const dot = chartEl.querySelector('#markerDot');
if (line) { line.setAttribute('x1', X); line.setAttribute('x2', X); }
if (dot) { dot.setAttribute('cx', X); dot.setAttribute('cy', Y); }
}
// Fetch everything
Promise.allSettled([
fetch(isoURL).then(r=>r.ok?r.json():null),
fetch(candURL).then(r=>r.ok?r.json():null),
fetch(demURL).then(r=>r.ok?r.json():null),
fetch(compURL).then(r=>r.ok?r.json():null),
fetch(artURL).then(r=>r.ok?r.json():null)
]).then(([iso, cand, dem, comp, art])=>{
const isoFC = iso.value || {type:'FeatureCollection',features:[]};
const candFC = cand.value|| {type:'FeatureCollection',features:[]};
const demFC = dem.value || {type:'FeatureCollection',features:[]};
const compFC = comp.value|| {type:'FeatureCollection',features:[]};
const artObj = art.value || null;
// Fit bounds to all isochrones
const allIso = L.geoJSON(isoFC);
const b = allIso.getBounds();
if (b.isValid()) { map.fitBounds(b.pad(0.05)); fitBounds = b; }
// Build minute groups + areas
(isoFC.features || []).forEach((f)=>{
const m = (f.properties && Number(f.properties.minutes)) || 0;
if (!isoGroups.has(m)) isoGroups.set(m, L.layerGroup());
const i = Math.max(0, Math.floor(m/5) - 1);
const c = bandColors[i % bandColors.length];
const lyr = L.geoJSON(f, { style: { color:c, weight:2, fillColor:c, fillOpacity:.25 } });
isoGroups.get(m).addLayer(lyr);
const a = Number((f.properties && f.properties.area_km2) || 0);
minuteAreas.set(m, (minuteAreas.get(m) || 0) + (isFinite(a) ? a : 0));
});
minutes = Array.from(isoGroups.keys()).sort((a,b)=>a-b);
// 55B: draw chart if ≥2 bands
if (minutes.length >= 2) {
const areas = minutes.map(m => minuteAreas.get(m) || 0);
drawAreaChart(minutes, areas);
} else {
chartWrap.classList.add('d-none');
}
// Configure slider
if (minutes.length > 0) {
minuteSlider.min = 0;
minuteSlider.max = Math.max(0, minutes.length - 1);
minuteSlider.step = 1;
minuteSlider.value = String(minutes.length - 1); // default to largest band
scrubWrap.classList.toggle('d-none', minutes.length <= 1);
renderMinute(parseInt(minuteSlider.value, 10));
setButtons(false);
} else {
scrubWrap.classList.add('d-none');
}
// Candidates
L.geoJSON(candFC, {
pointToLayer: (f, latlng)=>{
const s = Number(f.properties?.score ?? 0.5);
const r = 6 + Math.round(s * 10);
return L.circleMarker(latlng, { radius: r, weight: 2, color: '#111', fillOpacity: 0.9 });
},
onEachFeature: (f, layer)=>{
const p = f.properties || {};
const score = Number(p.score ?? 0).toFixed(2);
const br = `Demand ${Number(p.demand||0).toFixed(2)}, `
+ `Comp ${Number(p.competition||0).toFixed(2)}, `
+ `Access ${Number(p.access||0).toFixed(2)}`;
layer.bindPopup(`<b>Rank ${p.rank || '—'}</b> · score ${score}<br><small>${br}</small>`);
}
}).addTo(layers.cand);
if (!fitBounds) {
const b2 = L.geoJSON(candFC).getBounds();
if (b2.isValid()) map.fitBounds(b2.pad(0.1));
else map.setView([19.4326, -99.1332], 11);
}
// Demand & Competition (start hidden)
L.geoJSON(demFC, {
pointToLayer: (f, latlng)=> L.circleMarker(latlng, { radius: 3, color: '#d9534f', weight: 0, fillOpacity: .7 }),
onEachFeature: (f, layer)=>{
const pop = f.properties?.pop;
if (pop) layer.bindPopup(`Population: ${Math.round(pop)}`);
}
}).addTo(layers.demand);
L.geoJSON(compFC, {
pointToLayer: (f, latlng)=> L.circleMarker(latlng, { radius: 3, color: '#0d6efd', weight: 0, fillOpacity: .7 }),
onEachFeature: (f, layer)=>{
const name = f.properties?.name || '(poi)';
const cat = f.properties?.category || '';
layer.bindPopup(`${name}${cat?' — '+cat:''}`);
}
}).addTo(layers.comp);
map.removeLayer(layers.demand);
map.removeLayer(layers.comp);
document.getElementById('chkDemand').checked = false;
document.getElementById('chkComp').checked = false;
// Meta (best-effort via artifact)
if (artObj && artObj.request) {
const req = artObj.request;
const bands = (req.time_bands || []).join(', ');
metaEl.textContent = `${req.business || '—'} @ ${req.city || '—'} · bands: ${bands} · search_id ${sid.slice(0,8)}…`;
} else {
metaEl.textContent = `search_id ${sid.slice(0,8)}…`;
}
});
// Redraw chart on resize (debounced)
let _rt = null;
window.addEventListener('resize', ()=>{
if (!_rt) {
_rt = requestAnimationFrame(()=>{
if (minutes.length >= 2) {
const areas = minutes.map(m => minuteAreas.get(m) || 0);
drawAreaChart(minutes, areas);
}
_rt = null;
});
}
});
})();
</script>
{% endblock extra_js %}

View File

@ -0,0 +1,35 @@
{% load static %}
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>{{ title|default:"SAMI · Card" }}</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
{% if chart_url %}<meta property="og:image" content="{{ chart_url }}">{% endif %}
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/css/bootstrap.min.css" rel="stylesheet">
<style>
body { background:#0b1220; color:#e5e7eb; }
.card { background:#111827; border-color:#1f2937; }
.thumb { max-width:100%; border-radius:.5rem; box-shadow:0 0.5rem 1.25rem rgba(0,0,0,.35); }
.muted { color:#9ca3af; }
.pill { display:inline-block; padding:.15rem .5rem; border-radius:999px; background:#1f2937; margin-right:.25rem; }
</style>
</head>
<body class="py-4">
<div class="container">
<div class="card shadow-lg">
<div class="card-body">
<h3 class="mb-1">SAMI <span class="muted">· {{ indicator }}</span></h3>
<div class="mb-3">
{% if beta is not None %}<span class="pill">β {{ beta|floatformat:3 }}</span>{% endif %}
{% if r2 is not None %}<span class="pill">R² {{ r2|floatformat:3 }}</span>{% endif %}
{% if n %}<span class="pill">n = {{ n }}</span>{% endif %}
</div>
{% if chart_url %}
<img class="thumb" src="{{ chart_url }}" alt="SAMI chart">
{% endif %}
</div>
</div>
</div>
</body>
</html>

View File

@ -0,0 +1,47 @@
{% load static %}
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>{{ title|default:"Sites · Card" }}</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
{% if main_preview_url %}
<meta property="og:image" content="{{ main_preview_url }}">
{% endif %}
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/css/bootstrap.min.css" rel="stylesheet">
<style>
body { background:#0b1220; color:#e5e7eb; }
.card { background:#111827; border-color:#1f2937; }
.thumb { max-width:100%; border-radius:.5rem; box-shadow:0 0.5rem 1.25rem rgba(0,0,0,.35); }
.muted { color:#9ca3af; }
a.btn { text-transform: none; }
</style>
</head>
<body class="py-4">
<div class="container">
<div class="card shadow-lg">
<div class="card-body">
<h3 class="mb-1">{{ business }} <span class="muted">@ {{ city }}</span></h3>
<div class="muted mb-3">Sites · {{ created_at }}</div>
{% if main_preview_url %}
<img class="thumb mb-3" src="{{ main_preview_url }}" alt="Sites preview">
{% endif %}
<div class="row g-2 mb-3">
{% if download.main %}<div class="col-auto"><a class="btn btn-outline-light btn-sm" href="{{ download.main }}" target="_blank">Download main PNG</a></div>{% endif %}
{% if download.demand %}<div class="col-auto"><a class="btn btn-outline-light btn-sm" href="{{ download.demand }}" target="_blank">Download demand PNG</a></div>{% endif %}
{% if download.competition %}<div class="col-auto"><a class="btn btn-outline-light btn-sm" href="{{ download.competition }}" target="_blank">Download competition PNG</a></div>{% endif %}
</div>
<div class="row g-2">
{% if geojson.isochrones %}<div class="col-auto"><a class="btn btn-secondary btn-sm" href="{{ geojson.isochrones }}" target="_blank">Isochrones GeoJSON</a></div>{% endif %}
{% if geojson.candidates %}<div class="col-auto"><a class="btn btn-secondary btn-sm" href="{{ geojson.candidates }}" target="_blank">Candidates GeoJSON</a></div>{% endif %}
{% if geojson.pois_competition %}<div class="col-auto"><a class="btn btn-secondary btn-sm" href="{{ geojson.pois_competition }}" target="_blank">POIs GeoJSON</a></div>{% endif %}
{% if geojson.popgrid %}<div class="col-auto"><a class="btn btn-secondary btn-sm" href="{{ geojson.popgrid }}" target="_blank">PopGrid GeoJSON</a></div>{% endif %}
</div>
</div>
</div>
</div>
</body>
</html>

View File

@ -0,0 +1,37 @@
from __future__ import annotations
from typing import Any, Dict
from django.conf import settings
from django.core.signing import TimestampSigner, BadSignature, SignatureExpired, dumps, loads
DEFAULT_TTL = int(getattr(settings, "SHARE_TTL_SECONDS", 7 * 24 * 3600))
def _base_url(request=None) -> str:
if request:
forwarded_proto = request.META.get("HTTP_X_FORWARDED_PROTO")
scheme = (forwarded_proto.split(",")[0].strip() if forwarded_proto else None) or (
"https" if request.is_secure() else "http"
)
host = request.get_host() or settings.BASE_URL.replace("https://", "").replace("http://", "")
return f"{scheme}://{host}"
return settings.BASE_URL
def mint_sites_share_url(search_id: str, request=None, ttl: int | None = None) -> str:
payload = {"k": "sites", "sid": search_id}
token = dumps(payload) # signed + timestamped
base = _base_url(request)
return f"{base}/share/sites/{search_id}/{token}"
def mint_sami_share_url(run_id: str, meta: Dict[str, Any], request=None, ttl: int | None = None) -> str:
# meta can include: indicator, beta, r2, n
payload = {"k": "sami", "rid": run_id, **{k: v for k, v in meta.items() if k in ("indicator","beta","r2","n")}}
token = dumps(payload)
base = _base_url(request)
return f"{base}/share/sami/{run_id}/{token}"
def verify_token(token: str, max_age: int | None = None) -> Dict[str, Any]:
try:
return loads(token, max_age=max_age or DEFAULT_TTL)
except SignatureExpired as e:
raise
except BadSignature as e:
raise

View File

@ -0,0 +1,59 @@
from __future__ import annotations
import uuid
from django.http import HttpResponse, HttpResponseForbidden, HttpResponseNotFound
from django.shortcuts import render
from django.conf import settings
from django.utils.html import escape
from pxy_dashboard.utils.share import verify_token, DEFAULT_TTL
from pxy_sites.models import SiteRun
# --- Sites ---
def share_sites_card(request, search_id, token):
# Django gives a UUID object when using <uuid:..>; normalize to str
sid = str(search_id)
try:
uuid.UUID(sid)
except Exception:
return HttpResponseNotFound("Invalid search_id")
try:
payload = verify_token(token, max_age=DEFAULT_TTL)
if payload.get("k") != "sites" or payload.get("sid") != sid:
return HttpResponseForbidden("Bad token")
except Exception:
return HttpResponseForbidden("Expired or invalid token")
# Load run by string id
try:
run = SiteRun.objects.get(search_id=sid)
except SiteRun.DoesNotExist:
return HttpResponseNotFound("Run not found")
# ... rest unchanged ...
# --- SAMI ---
def share_sami_card(request, run_id, token):
rid = str(run_id) # normalize
try:
uuid.UUID(rid)
except Exception:
return HttpResponseNotFound("Invalid run_id")
try:
payload = verify_token(token, max_age=DEFAULT_TTL)
if payload.get("k") != "sami" or payload.get("rid") != rid:
return HttpResponseForbidden("Bad token")
except Exception:
return HttpResponseForbidden("Expired or invalid token")
chart_url = f"{settings.MEDIA_URL}sami/sami_{rid}.png"
ctx = {
"title": f"SAMI · {payload.get('indicator', '')}",
"indicator": payload.get("indicator"),
"beta": payload.get("beta"),
"r2": payload.get("r2"),
"n": payload.get("n"),
"chart_url": chart_url,
}
return render(request, "pxy_dashboard/share/sami_card.html", ctx)

BIN
pxy_de.zip Normal file

Binary file not shown.

106
pxy_de/api.py Normal file
View File

@ -0,0 +1,106 @@
# pxy_de/api.py
from __future__ import annotations
from pathlib import Path
from typing import List, Dict, Any
import pandas as pd
from django.conf import settings
from rest_framework.decorators import api_view
from rest_framework.response import Response
from .providers.base import get_provider
def _rel(path: Path, base_dir: Path) -> str:
"""
Return a clean relative path like 'data/...'
"""
try:
return str(path.relative_to(settings.BASE_DIR))
except Exception:
# Fallback: show relative to provider base_dir
try:
return str(Path("data") / path.relative_to(base_dir))
except Exception:
return str(path)
def _probe_csv(path: Path) -> Dict[str, Any]:
"""
Lightweight readability probe: existence + sample columns (no full read).
"""
info: Dict[str, Any] = {"exists": path.exists()}
if not info["exists"]:
return info
try:
sample = pd.read_csv(path, nrows=5)
info["columns"] = list(sample.columns)
info["sample_rows"] = int(sample.shape[0]) # up to 5
except Exception as e:
info["error"] = f"{type(e).__name__}: {e}"
return info
@api_view(["GET"])
def de_health(request):
"""
GET /api/de/health?city=CDMX&business=cafe&indicator=imss_wages_2023
Reports:
- provider in use
- base_dir used by the provider
- required/missing files (population.csv always; others if params passed)
- lightweight probes for each checked file (exists, columns, sample_rows)
"""
provider = get_provider()
base_dir: Path = getattr(provider, "base_dir", Path(settings.BASE_DIR) / "data")
checks: List[Dict[str, Any]] = []
missing: List[str] = []
city = (request.query_params.get("city") or "").strip()
business = (request.query_params.get("business") or "").strip()
indicator = (request.query_params.get("indicator") or "").strip()
# Always check SAMI population
pop_path = base_dir / "sami" / "population.csv"
pop_probe = _probe_csv(pop_path)
pop_probe["path"] = _rel(pop_path, base_dir)
checks.append(pop_probe)
if not pop_probe["exists"]:
missing.append(pop_probe["path"])
# Optional: indicator for SAMI
if indicator:
ind_path = base_dir / "sami" / f"{indicator}.csv"
ind_probe = _probe_csv(ind_path)
ind_probe["path"] = _rel(ind_path, base_dir)
checks.append(ind_probe)
if not ind_probe["exists"]:
missing.append(ind_probe["path"])
# Optional: Sites (competition / DENUE)
if city and business:
denue_path = base_dir / "denue" / f"{city}_{business}.csv"
denue_probe = _probe_csv(denue_path)
denue_probe["path"] = _rel(denue_path, base_dir)
checks.append(denue_probe)
if not denue_probe["exists"]:
missing.append(denue_probe["path"])
# Optional: Sites (demand / population grid)
if city:
grid_path = base_dir / "popgrid" / f"{city}_grid.csv"
grid_probe = _probe_csv(grid_path)
grid_probe["path"] = _rel(grid_path, base_dir)
checks.append(grid_probe)
if not grid_probe["exists"]:
missing.append(grid_probe["path"])
ok = len(missing) == 0
return Response({
"provider": "csv-data",
"base_dir": str(base_dir),
"ok": ok,
"missing": missing,
"files": checks,
})

View File

72
pxy_de/providers/base.py Normal file
View File

@ -0,0 +1,72 @@
from __future__ import annotations
from abc import ABC, abstractmethod
from functools import lru_cache
from typing import List, Dict, Any
import os
try:
import pandas as pd # type: ignore
except Exception: # pragma: no cover
pd = None # for type hints only
class DataProvider(ABC):
"""
Abstract provider interface for data access used across modules (SAMI, Sites, etc.).
Implementations must live under pxy_de.providers.* and implement these methods.
"""
# ---------- Common ----------
@abstractmethod
def health(self) -> Dict[str, Any]:
...
# ---------- SAMI ----------
@abstractmethod
def indicator(self, indicator: str, cities: List[str]) -> "pd.DataFrame":
"""
Return columns: city, value, N (N = population or scale variable)
"""
...
# ---------- Sites: competition (POIs) ----------
@abstractmethod
def denue(self, city: str, business: str) -> "pd.DataFrame":
"""
Return columns: name, lat, lon, category
"""
...
# ---------- Sites: demand (population grid) ----------
@abstractmethod
def popgrid(self, city: str) -> "pd.DataFrame":
"""
Return columns: cell_id, lat, lon, pop
"""
...
# ---------- Optional: city boundary (GeoJSON-like) ----------
@abstractmethod
def city_boundary(self, city: str) -> Dict[str, Any]:
"""
Return a GeoJSON-like dict for city boundary, or {} if not available.
"""
...
@lru_cache(maxsize=1)
def get_provider() -> DataProvider:
"""
Factory for data providers. Choose via env:
DATA_PROVIDER = csv (default) | <future providers>
"""
name = os.getenv("DATA_PROVIDER", "csv").strip().lower()
if name == "csv":
from .csv_provider import CsvDataProvider
return CsvDataProvider()
# Add more providers here in the future:
# elif name == "postgres": from .pg_provider import PgDataProvider; return PgDataProvider(...)
# elif name == "bigquery": ...
# Fallback
from .csv_provider import CsvDataProvider
return CsvDataProvider()

View File

@ -0,0 +1,117 @@
from __future__ import annotations
from pathlib import Path
from typing import List, Dict, Any
import pandas as pd
from django.conf import settings
from .base import DataProvider
class CsvDataProvider(DataProvider):
"""
Simple provider reading local CSVs under BASE_DIR/data/.
Expected layout (current):
SAMI:
data/sami/population.csv -> cols: city, N
data/sami/{indicator}.csv -> cols: city, value
Sites (competition POIs - DENUE-like):
data/denue/{city}_{business}.csv -> cols: name, lat, lon, category (name/category optional)
Sites (demand pop grid):
data/popgrid/{city}_grid.csv -> cols: cell_id, lat, lon, pop
"""
def __init__(self, base_dir: str | Path | None = None):
self.base_dir = Path(base_dir) if base_dir else Path(settings.BASE_DIR) / "data"
def _exists(self, *parts: str) -> bool:
return (self.base_dir.joinpath(*parts)).exists()
# ---------- Common ----------
def health(self) -> Dict[str, Any]:
missing = []
# We can only check basics here.
if not self._exists("sami", "population.csv"):
missing.append("data/sami/population.csv")
ok = len(missing) == 0
return {
"provider": "csv-data",
"ok": ok,
"base_dir": str(self.base_dir),
"missing": missing,
}
# ---------- SAMI ----------
def indicator(self, indicator: str, cities: List[str]) -> pd.DataFrame:
pop_path = self.base_dir / "sami" / "population.csv"
ind_path = self.base_dir / "sami" / f"{indicator}.csv"
pop = pd.read_csv(pop_path) # cols: city, N
ind = pd.read_csv(ind_path) # cols: city, value
df = pd.merge(ind, pop, on="city", how="inner")
if cities:
df = df[df["city"].isin(cities)].copy()
# Ensure numeric
df["value"] = pd.to_numeric(df["value"], errors="coerce")
df["N"] = pd.to_numeric(df["N"], errors="coerce")
df = df.dropna(subset=["value", "N"])
return df[["city", "value", "N"]]
# ---------- Sites: competition (POIs) ----------
def denue(self, city: str, business: str) -> pd.DataFrame:
"""
Reads POIs from data/denue/{city}_{business}.csv
Expected columns:
- lat (float), lon (float)
- name (str, optional), category (str, optional)
"""
path = self.base_dir / "denue" / f"{city}_{business}.csv"
if not path.exists():
return pd.DataFrame(columns=["name", "lat", "lon", "category"])
df = pd.read_csv(path)
# minimal columns
if "lat" not in df.columns or "lon" not in df.columns:
return pd.DataFrame(columns=["name", "lat", "lon", "category"])
# quick cleaning
df["lat"] = pd.to_numeric(df["lat"], errors="coerce")
df["lon"] = pd.to_numeric(df["lon"], errors="coerce")
df = df.dropna(subset=["lat", "lon"]).copy()
if "name" not in df.columns:
df["name"] = None
if "category" not in df.columns:
df["category"] = business
return df[["name", "lat", "lon", "category"]]
# ---------- Sites: demand (population grid) ----------
def popgrid(self, city: str) -> pd.DataFrame:
"""
Loads population grid points from data/popgrid/{city}_grid.csv
Required columns: lat, lon, pop
Optional: cell_id
"""
path = self.base_dir / "popgrid" / f"{city}_grid.csv"
if not path.exists():
return pd.DataFrame(columns=["cell_id", "lat", "lon", "pop"])
df = pd.read_csv(path)
for col in ["lat", "lon", "pop"]:
if col not in df.columns:
return pd.DataFrame(columns=["cell_id", "lat", "lon", "pop"])
# numeric & drop invalid
df["lat"] = pd.to_numeric(df["lat"], errors="coerce")
df["lon"] = pd.to_numeric(df["lon"], errors="coerce")
df["pop"] = pd.to_numeric(df["pop"], errors="coerce")
df = df.dropna(subset=["lat", "lon", "pop"]).copy()
if "cell_id" not in df.columns:
df["cell_id"] = None
return df[["cell_id", "lat", "lon", "pop"]]
# ---------- Optional: city boundary ----------
def city_boundary(self, city: str) -> Dict[str, Any]:
# Not implemented yet; return empty dict.
return {}
# ---------- Backwards compatibility alias ----------
# Some earlier code used "grid(city)" for population grid.
def grid(self, city: str) -> pd.DataFrame:
return self.popgrid(city)

6
pxy_de/urls.py Normal file
View File

@ -0,0 +1,6 @@
from django.urls import path
from pxy_de import api as de_api
urlpatterns = [
path("api/de/health", de_api.de_health, name="de_health"),
]

View File

@ -1,4 +1,9 @@
# pxy_meta_pages/admin.py
from __future__ import annotations
import json
from typing import Optional, Dict, Any
import requests
from django.conf import settings
from django.contrib import admin, messages
@ -6,7 +11,14 @@ from django.contrib import admin, messages
from .models import FacebookPageAssistant, EventType, BotInteraction
from .services import FacebookService
# Required fields we want on every Page
# -----------------------------------------------------------------------------
# Config
# -----------------------------------------------------------------------------
FACEBOOK_API_VERSION: str = getattr(settings, "FACEBOOK_API_VERSION", "v22.0")
APP_ID: Optional[str] = getattr(settings, "FACEBOOK_APP_ID", None)
# Fields we require on every Page subscription (Page Feed + Messenger)
REQUIRED_FIELDS = [
# Page feed (comments/shares/mentions)
"feed",
@ -20,32 +32,67 @@ REQUIRED_FIELDS = [
"message_echoes",
]
APP_ID = getattr(settings, "FACEBOOK_APP_ID", None) # optional (nice-to-have for filtering)
def _graph_get(url, params):
r = requests.get(url, params=params, timeout=15)
# Graph often returns 200 even for failures with {"error":{...}}
data = r.json() if r.content else {}
if "error" in data:
# -----------------------------------------------------------------------------
# Small Graph helpers with consistent error handling
# -----------------------------------------------------------------------------
def _graph_get(url: str, params: Dict[str, Any]) -> Dict[str, Any]:
"""GET wrapper that raises RequestException on Graph errors."""
resp = requests.get(url, params=params, timeout=15)
data = resp.json() if resp.content else {}
if isinstance(data, dict) and "error" in data:
# Normalize to RequestException so callers can unify handling
raise requests.RequestException(json.dumps(data["error"]))
r.raise_for_status()
return data
resp.raise_for_status()
return data or {}
def _graph_post(url, data):
r = requests.post(url, data=data, timeout=15)
data = r.json() if r.content else {}
if "error" in data:
raise requests.RequestException(json.dumps(data["error"]))
r.raise_for_status()
return data
def _graph_post(url: str, data: Dict[str, Any]) -> Dict[str, Any]:
"""POST wrapper that raises RequestException on Graph errors."""
resp = requests.post(url, data=data, timeout=15)
payload = resp.json() if resp.content else {}
if isinstance(payload, dict) and "error" in payload:
raise requests.RequestException(json.dumps(payload["error"]))
resp.raise_for_status()
return payload or {}
def _decode_graph_error(e: requests.RequestException) -> str:
"""
Attempt to pretty-print a Graph API error dict, else return the raw message.
"""
msg = str(e)
try:
err = json.loads(msg)
# Typical Graph error shape
code = err.get("code")
sub = err.get("error_subcode")
text = err.get("message", "Graph error")
return f"Graph error (code={code}, subcode={sub}): {text}"
except Exception:
return msg
def _get_page_token(fb_service: FacebookService, page_id: str) -> Optional[str]:
"""
Works with either a public get_page_access_token or the private _get_page_access_token.
"""
getter = getattr(fb_service, "get_page_access_token", None)
if callable(getter):
return getter(page_id)
private_getter = getattr(fb_service, "_get_page_access_token", None)
if callable(private_getter):
return private_getter(page_id)
return None
# -----------------------------------------------------------------------------
# Admins
# -----------------------------------------------------------------------------
@admin.register(FacebookPageAssistant)
class FacebookPageAssistantAdmin(admin.ModelAdmin):
"""
Admin for wiring a Facebook Page to your assistant and managing webhook subs.
Admin for wiring a Facebook Page to an OpenAI assistant and managing webhook subscriptions.
"""
list_display = (
"page_name",
@ -65,75 +112,63 @@ class FacebookPageAssistantAdmin(admin.ModelAdmin):
"probe_messenger_access",
]
# ----- small counters -----
def comment_count(self, obj):
# ----- Counters ----------------------------------------------------------
def comment_count(self, obj: FacebookPageAssistant) -> int:
return obj.events.filter(event_type__code="comment").count()
comment_count.short_description = "Comments"
def share_count(self, obj):
def share_count(self, obj: FacebookPageAssistant) -> int:
return obj.events.filter(event_type__code="share").count()
share_count.short_description = "Shares"
# =====================================================================
# ACTION 1: Ensure required fields are subscribed (feed + Messenger)
# =====================================================================
# ----- Action 1: Ensure required fields (feed + Messenger) --------------
def ensure_feed_and_messenger_subscription(self, request, queryset):
"""
For each selected Page:
- fetch Page Access Token with FacebookService
- read current subscribed_fields
- add any missing REQUIRED_FIELDS
1) Fetch the Page Access Token via FacebookService.
2) Read current subscribed_fields.
3) Add any missing REQUIRED_FIELDS in a single POST.
"""
fb_service = FacebookService(user_access_token=settings.PAGE_ACCESS_TOKEN)
for page in queryset:
try:
# 1) token
page_token = getattr(fb_service, "get_page_access_token", None)
if callable(page_token):
page_access_token = page_token(page.page_id)
else:
# fallback to private method name in case your svc only exposes _get_page_access_token
page_access_token = fb_service._get_page_access_token(page.page_id) # noqa
page_access_token = _get_page_token(fb_service, page.page_id)
if not page_access_token:
self.message_user(
request,
f"[{page.page_name}] Unable to get Page Access Token.",
f"[{page.page_name}] Unable to obtain Page Access Token.",
level=messages.ERROR,
)
continue
# 2) read existing
url_list = f"https://graph.facebook.com/v22.0/{page.page_id}/subscribed_apps"
data = _graph_get(url_list, {"access_token": page_access_token}) or {}
entries = data.get("data", [])
list_url = f"https://graph.facebook.com/{FACEBOOK_API_VERSION}/{page.page_id}/subscribed_apps"
current_data = _graph_get(list_url, {"access_token": page_access_token})
entries = current_data.get("data", [])
# pick this app's entry (if APP_ID known), else first entry if any
# If APP_ID is known, narrow to our app row; otherwise use first row if present
app_entry = None
if APP_ID:
app_entry = next((e for e in entries if str(e.get("id")) == str(APP_ID)), None)
if app_entry is None and entries:
app_entry = entries[0]
current = set(app_entry.get("subscribed_fields", [])) if app_entry else set()
current_fields = set(app_entry.get("subscribed_fields", [])) if app_entry else set()
required = set(REQUIRED_FIELDS)
union_fields = sorted(current | required)
# 3) update only if needed
if required - current:
_graph_post(
f"https://graph.facebook.com/v22.0/{page.page_id}/subscribed_apps",
{
"subscribed_fields": ",".join(union_fields),
if required - current_fields:
new_fields_csv = ",".join(sorted(current_fields | required))
_graph_post(list_url, {
"subscribed_fields": new_fields_csv,
"access_token": page_access_token,
},
)
})
page.is_subscribed = True
page.save(update_fields=["is_subscribed"])
self.message_user(
request,
f"[{page.page_name}] Subscribed/updated. Fields now include: {', '.join(union_fields)}",
f"[{page.page_name}] Subscribed/updated. Now includes: {new_fields_csv}",
level=messages.SUCCESS,
)
else:
@ -141,65 +176,45 @@ class FacebookPageAssistantAdmin(admin.ModelAdmin):
page.save(update_fields=["is_subscribed"])
self.message_user(
request,
f"[{page.page_name}] Already has all required fields: {', '.join(sorted(current))}",
f"[{page.page_name}] Already has required fields.",
level=messages.INFO,
)
except requests.RequestException as e:
# try to decode Graph error for clarity
msg = str(e)
try:
err = json.loads(msg)
code = err.get("code")
sub = err.get("error_subcode")
err_msg = err.get("message", "Graph error")
self.message_user(
request,
f"[{page.page_name}] Graph error (code={code}, subcode={sub}): {err_msg}",
f"[{page.page_name}] {_decode_graph_error(e)}",
level=messages.ERROR,
)
except Exception:
self.message_user(
request,
f"[{page.page_name}] Subscription failed: {msg}",
level=messages.ERROR,
)
except Exception as e:
self.message_user(
request, f"[{page.page_name}] Unexpected error: {e}", level=messages.ERROR
request,
f"[{page.page_name}] Unexpected error: {e}",
level=messages.ERROR,
)
ensure_feed_and_messenger_subscription.short_description = "Ensure Webhooks (feed + Messenger) on selected Pages"
ensure_feed_and_messenger_subscription.short_description = "Ensure Webhooks (feed + Messenger)"
# =====================================================================
# ACTION 2: Check status (show exact fields)
# =====================================================================
# ----- Action 2: Check subscription status ------------------------------
def check_subscription_status(self, request, queryset):
"""
Shows the actual subscribed_fields for each Page.
Shows the exact subscribed_fields currently active for each Page.
"""
fb_service = FacebookService(user_access_token=settings.PAGE_ACCESS_TOKEN)
for page in queryset:
try:
# token
page_token = getattr(fb_service, "get_page_access_token", None)
if callable(page_token):
page_access_token = page_token(page.page_id)
else:
page_access_token = fb_service._get_page_access_token(page.page_id) # noqa
page_access_token = _get_page_token(fb_service, page.page_id)
if not page_access_token:
self.message_user(
request,
f"[{page.page_name}] Unable to get Page Access Token.",
f"[{page.page_name}] Unable to obtain Page Access Token.",
level=messages.ERROR,
)
continue
url = f"https://graph.facebook.com/v22.0/{page.page_id}/subscribed_apps"
data = _graph_get(url, {"access_token": page_access_token}) or {}
url = f"https://graph.facebook.com/{FACEBOOK_API_VERSION}/{page.page_id}/subscribed_apps"
data = _graph_get(url, {"access_token": page_access_token})
entries = data.get("data", [])
app_entry = None
@ -209,66 +224,68 @@ class FacebookPageAssistantAdmin(admin.ModelAdmin):
app_entry = entries[0]
fields = app_entry.get("subscribed_fields", []) if app_entry else []
has_required = set(REQUIRED_FIELDS).issubset(set(fields))
page.is_subscribed = bool(fields)
page.save(update_fields=["is_subscribed"])
level = messages.SUCCESS if has_required else messages.WARNING
has_all = set(REQUIRED_FIELDS).issubset(set(fields))
level = messages.SUCCESS if has_all else messages.WARNING
self.message_user(
request,
f"[{page.page_name}] Subscribed fields: {', '.join(fields) or '(none)'}",
f"[{page.page_name}] Subscribed fields: {', '.join(fields) if fields else '(none)'}",
level=level,
)
except requests.RequestException as e:
self.message_user(
request, f"[{page.page_name}] Check failed: {e}", level=messages.ERROR
request,
f"[{page.page_name}] {_decode_graph_error(e)}",
level=messages.ERROR,
)
except Exception as e:
self.message_user(
request,
f"[{page.page_name}] Unexpected error: {e}",
level=messages.ERROR,
)
check_subscription_status.short_description = "Check webhook subscription fields on selected Pages"
check_subscription_status.short_description = "Check webhook subscription fields"
# =====================================================================
# ACTION 3: Probe Messenger access (lightweight)
# =====================================================================
# ----- Action 3: Probe Messenger access ---------------------------------
def probe_messenger_access(self, request, queryset):
"""
Tries /{PAGE_ID}/conversations to confirm Messenger perms are usable.
(If app is in Dev Mode, only app roles will appear here.)
Light probe for Messenger perms using /{PAGE_ID}/conversations.
(In Dev Mode, youll only see app-role users here.)
"""
fb_service = FacebookService(user_access_token=settings.PAGE_ACCESS_TOKEN)
for page in queryset:
try:
page_token = getattr(fb_service, "get_page_access_token", None)
if callable(page_token):
page_access_token = page_token(page.page_id)
else:
page_access_token = fb_service._get_page_access_token(page.page_id) # noqa
page_access_token = _get_page_token(fb_service, page.page_id)
if not page_access_token:
self.message_user(
request,
f"[{page.page_name}] Unable to get Page Access Token.",
f"[{page.page_name}] Unable to obtain Page Access Token.",
level=messages.ERROR,
)
continue
url = f"https://graph.facebook.com/v22.0/{page.page_id}/conversations"
url = f"https://graph.facebook.com/{FACEBOOK_API_VERSION}/{page.page_id}/conversations"
data = _graph_get(url, {"access_token": page_access_token, "limit": 1})
total = len(data.get("data", []))
self.message_user(
request,
(
f"[{page.page_name}] Messenger probe OK. Conversations sample: {total}. "
"Note: in Dev Mode youll only see app-role users here.",
"Note: in Dev Mode youll only see app-role users."
),
level=messages.SUCCESS,
)
except requests.RequestException as e:
# common Graph codes for perms/token issues:
# 190 invalid/expired token, 200 permissions error, 10 permission denied
msg = str(e)
msg = _decode_graph_error(e)
# Add quick hints for common codes
hint = ""
if any(x in msg for x in ('"code": 190', "Invalid OAuth 2.0")):
if '"code": 190' in msg or "Invalid OAuth 2.0" in msg:
hint = " (Token invalid/expired)"
elif '"code": 200' in msg:
hint = " (Permissions error: check pages_messaging & pages_manage_metadata; app roles or Advanced Access)"
@ -279,8 +296,14 @@ class FacebookPageAssistantAdmin(admin.ModelAdmin):
f"[{page.page_name}] Messenger probe failed: {msg}{hint}",
level=messages.ERROR,
)
except Exception as e:
self.message_user(
request,
f"[{page.page_name}] Unexpected error: {e}",
level=messages.ERROR,
)
probe_messenger_access.short_description = "Probe Messenger access on selected Pages"
probe_messenger_access.short_description = "Probe Messenger access"
@admin.register(EventType)

BIN
pxy_routing.zip Normal file

Binary file not shown.

0
pxy_routing/__init__.py Normal file
View File

3
pxy_routing/admin.py Normal file
View File

@ -0,0 +1,3 @@
from django.contrib import admin
# Register your models here.

7
pxy_routing/api/urls.py Normal file
View File

@ -0,0 +1,7 @@
from django.urls import path
from . import views
urlpatterns = [
path("api/routing/health", views.health, name="routing_health"),
path("api/routing/isochrone", views.isochrone, name="routing_isochrone"),
]

76
pxy_routing/api/views.py Normal file
View File

@ -0,0 +1,76 @@
# pxy_routing/api/views.py
from __future__ import annotations
import uuid
from rest_framework.decorators import api_view, throttle_classes
from rest_framework.response import Response
from rest_framework import status
from rest_framework.throttling import ScopedRateThrottle
from pxy_routing.services.factory import get_routing_provider
def _err(code: str, message: str, hint: str | None = None, http_status: int = 400):
return Response(
{
"ok": False,
"code": code,
"message": message,
"hint": hint,
"trace_id": str(uuid.uuid4()),
},
status=http_status,
)
@api_view(["GET"])
@throttle_classes([ScopedRateThrottle])
def routing_health(request):
routing_health.throttle_scope = "routing_health" # DRF scoped throttle
provider = get_routing_provider()
try:
info = provider.health() or {}
ok = bool(info.get("ok", False))
return Response(
{
"ok": ok,
"provider": info.get("provider"),
**info, # includes base_url/profile/reason
}
)
except Exception as e:
return _err(
code="routing_health_error",
message="Routing health check failed",
hint=str(e),
http_status=status.HTTP_502_BAD_GATEWAY,
)
@api_view(["POST"])
@throttle_classes([ScopedRateThrottle])
def routing_isochrone(request):
routing_isochrone.throttle_scope = "routing_isochrone" # DRF scoped throttle
data = request.data or {}
center = data.get("center")
minutes = data.get("minutes")
if not isinstance(center, (list, tuple)) or len(center) != 2:
return _err("invalid", "center must be [lat, lon]", http_status=status.HTTP_400_BAD_REQUEST)
try:
minutes = int(minutes)
except Exception:
return _err("invalid", "minutes must be an integer", http_status=status.HTTP_400_BAD_REQUEST)
provider = get_routing_provider()
try:
feat = provider.isochrone(tuple(center), minutes)
return Response(feat)
except Exception as e:
# Map to a consistent envelope for upstream/provider issues
return _err(
code="routing_error",
message="Failed to compute isochrone",
hint=str(e),
http_status=status.HTTP_502_BAD_GATEWAY,
)
# Backward-compatible aliases expected by urls.py
health = routing_health
isochrone = routing_isochrone

6
pxy_routing/apps.py Normal file
View File

@ -0,0 +1,6 @@
from django.apps import AppConfig
class PxyRoutingConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'pxy_routing'

View File

3
pxy_routing/models.py Normal file
View File

@ -0,0 +1,3 @@
from django.db import models
# Create your models here.

View File

@ -0,0 +1,18 @@
# pxy_routing/services/__init__.py
from .factory import get_routing_provider
from .provider import (
health,
isochrone,
rp,
RoutingProvider,
NullRoutingProvider,
)
__all__ = [
"get_routing_provider",
"health",
"isochrone",
"rp",
"RoutingProvider",
"NullRoutingProvider",
]

View File

@ -0,0 +1,43 @@
from __future__ import annotations
from typing import Dict, Any, Tuple, List
import os
from pyproj import Geod
LatLon = Tuple[float, float]
class CrowFlyRoutingProvider:
"""
Isócrona aproximada como círculo geodésico:
radio = velocidad_km_h * (minutos / 60.0)
Útil para demos sin depender de motores externos.
"""
def __init__(self) -> None:
self._geod = Geod(ellps="WGS84")
# Puedes ajustar la velocidad por entorno (urbana 2535 km/h es razonable)
self._speed_kmh = float(os.getenv("ROUTING_CROWFLY_SPEED_KMH", "30"))
def health(self) -> Dict[str, Any]:
return {"provider": "crowfly", "ok": True, "speed_kmh": self._speed_kmh}
def isochrone(self, center: LatLon, minutes: int) -> Dict[str, Any]:
lat, lon = float(center[0]), float(center[1])
# distancia en metros
km = self._speed_kmh * (float(minutes) / 60.0)
dist_m = km * 1000.0
# polígono aproximado (64 segmentos)
coords: List[Tuple[float, float]] = []
for b in range(0, 360, 360 // 64):
lon2, lat2, _ = self._geod.fwd(lon, lat, b, dist_m)
coords.append((lon2, lat2))
coords.append(coords[0]) # cerrar
# GeoJSON-like
return {
"type": "Feature",
"properties": {"minutes": minutes, "speed_kmh": self._speed_kmh},
"geometry": {
"type": "Polygon",
"coordinates": [coords], # lon,lat
},
}

View File

@ -0,0 +1,22 @@
# pxy_routing/services/factory.py
from __future__ import annotations
import os
from functools import lru_cache
from .crowfly_provider import CrowFlyRoutingProvider
from .ors_provider import ORSRoutingProvider
@lru_cache(maxsize=1)
def get_routing_provider():
"""
Select routing provider by env:
ROUTING_PROVIDER = ors | crowfly (default: crowfly)
"""
name = (os.getenv("ROUTING_PROVIDER") or "crowfly").strip().lower()
if name == "ors":
# ORS_* knobs are read inside ORSRoutingProvider
return ORSRoutingProvider()
# Fallback/default
return CrowFlyRoutingProvider()

View File

@ -0,0 +1,204 @@
# pxy_routing/services/ors_provider.py
from __future__ import annotations
from typing import Any, Dict, Iterable, List, Tuple
import math, os, time, random, requests
# Optional graceful fallback
try:
from .crowfly_provider import CrowFlyRoutingProvider
except Exception:
CrowFlyRoutingProvider = None # fallback disabled if not available
LatLon = Tuple[float, float] # (lat, lon)
class ORSRoutingProvider:
"""
ORS isochrones with retries/backoff and optional crow-fly fallback.
Env:
ORS_BASE_URL e.g., https://api.openrouteservice.org
ORS_API_KEY key (omit/blank for self-host)
ORS_PROFILE driving-car | cycling-regular | foot-walking (default: driving-car)
ORS_TIMEOUT_S request timeout seconds (default: 5)
ORS_GENERALIZE generalization in meters for polygons (optional, e.g., 20)
ORS_MAX_RANGE safety cap in minutes (optional; e.g., 45)
# New hardening knobs:
ORS_RETRY number of retries on 429/5xx (default: 2)
ORS_BACKOFF_BASE_S base seconds for exponential backoff (default: 0.8)
ORS_FALLBACK set to "crowfly" to degrade gracefully on errors
"""
def __init__(
self,
base_url: str | None = None,
api_key: str | None = None,
profile: str = "driving-car",
timeout_s: int = 5,
generalize: int | None = None,
max_range_min: int | None = None,
):
self.base_url = (base_url or os.getenv("ORS_BASE_URL") or "").rstrip("/")
self.api_key = api_key if api_key is not None else os.getenv("ORS_API_KEY", "")
self.profile = os.getenv("ORS_PROFILE", profile)
self.timeout_s = int(os.getenv("ORS_TIMEOUT_S", str(timeout_s)))
gen = os.getenv("ORS_GENERALIZE")
self.generalize = int(gen) if (gen and gen.isdigit()) else generalize
mr = os.getenv("ORS_MAX_RANGE")
self.max_range_min = int(mr) if (mr and mr.isdigit()) else max_range_min
# Hardening knobs
self.retries = int(os.getenv("ORS_RETRY", "2"))
self.backoff_base = float(os.getenv("ORS_BACKOFF_BASE_S", "0.8"))
self.fallback_mode = (os.getenv("ORS_FALLBACK") or "").strip().lower()
self._fallback = None
if self.fallback_mode == "crowfly" and CrowFlyRoutingProvider:
self._fallback = CrowFlyRoutingProvider()
if not self.base_url:
raise ValueError("ORS_BASE_URL is required for ORSRoutingProvider")
self._iso_url = f"{self.base_url}/v2/isochrones/{self.profile}"
self._headers = {
"Content-Type": "application/json; charset=utf-8",
"Accept": "application/json, application/geo+json",
}
if self.api_key:
self._headers["Authorization"] = self.api_key
# ---------- internals ----------
def _post(self, url: str, payload: Dict[str, Any]) -> requests.Response:
attempts = 1 + max(0, self.retries)
r = None
for i in range(attempts):
r = requests.post(url, json=payload, headers=self._headers, timeout=self.timeout_s)
if r.status_code in (429, 502, 503, 504) and i < attempts - 1:
delay = self.backoff_base * (2 ** i) * (0.75 + 0.5 * random.random())
time.sleep(delay)
continue
return r
return r # type: ignore
# ---------- public API ----------
def health(self) -> Dict[str, Any]:
try:
lat, lon = 19.4326, -99.1332
payload = {"locations": [[lon, lat]], "range": [60]}
if self.generalize:
payload["generalize"] = self.generalize
r = self._post(self._iso_url, payload)
ok = (r.status_code == 200)
return {"provider": "ors", "ok": ok, "profile": self.profile,
"base_url": self.base_url, "reason": None if ok else f"http {r.status_code}"}
except Exception as e:
return {"provider": "ors", "ok": False, "profile": self.profile,
"base_url": self.base_url, "reason": f"{type(e).__name__}: {e}"}
def isochrone(self, center: LatLon, minutes: int) -> Dict[str, Any]:
if self.max_range_min and minutes > self.max_range_min:
raise ValueError(f"minutes exceeds ORS_MAX_RANGE ({minutes} > {self.max_range_min})")
lat, lon = center
payload = {"locations": [[lon, lat]], "range": [int(minutes) * 60]}
if self.generalize:
payload["generalize"] = self.generalize
r = self._post(self._iso_url, payload)
if r.status_code != 200:
if self._fallback is not None:
feat = self._fallback.isochrone(center, minutes)
feat["properties"]["provider"] = "ors_fallback_crowfly"
return feat
hint = {
400: "Bad request (profile/range/params).",
401: "Unauthorized (check ORS_API_KEY).",
403: "Forbidden (quota/key).",
404: "Profile/endpoint not found.",
413: "Payload too large.",
422: "Unprocessable (non-routable location or bad range).",
429: "Rate limited.",
500: "Server error.",
502: "Bad gateway.",
503: "Service unavailable.",
504: "Gateway timeout.",
}.get(r.status_code, "Unexpected error.")
raise RuntimeError(f"ORS isochrone error: HTTP {r.status_code}. {hint}")
data = r.json()
geom = _largest_polygon_geometry_from_ors(data)
if not geom:
if self._fallback is not None:
feat = self._fallback.isochrone(center, minutes)
feat["properties"]["provider"] = "ors_empty_fallback_crowfly"
return feat
raise RuntimeError("ORS returned no polygon geometry.")
return {
"type": "Feature",
"properties": {"provider": "ors", "profile": self.profile, "minutes": minutes, "center": [lon, lat]},
"geometry": geom,
}
# Batch multiple ranges in one call (reduces rate-limit pressure)
def isochrones(self, center: LatLon, minutes_list: List[int]) -> List[Dict[str, Any]]:
lat, lon = center
secs = [int(m) * 60 for m in minutes_list]
payload = {"locations": [[lon, lat]], "range": secs}
if self.generalize:
payload["generalize"] = self.generalize
r = self._post(self._iso_url, payload)
if r.status_code != 200:
# degrade by single-calls if fallback exists
if self._fallback is not None:
return [self.isochrone(center, m) for m in minutes_list]
raise RuntimeError(f"ORS isochrones error: HTTP {r.status_code}")
data = r.json()
feats: List[Dict[str, Any]] = []
for feat in (data.get("features") or []):
geom = _largest_polygon_geometry_from_ors({"features": [feat]})
if not geom:
continue
feats.append({
"type": "Feature",
"properties": {"provider": "ors", "profile": self.profile},
"geometry": geom
})
return feats
# ---------- helpers ----------
def _largest_polygon_geometry_from_ors(fc: Dict[str, Any]) -> Dict[str, Any] | None:
features = fc.get("features") or []
best_geom, best_area = None, -1.0
for feat in features:
geom = feat.get("geometry") or {}
gtype = geom.get("type"); coords = geom.get("coordinates")
if not coords: continue
if gtype == "Polygon":
area = _polygon_area_m2(coords[0])
if area > best_area:
best_area, best_geom = area, {"type":"Polygon","coordinates":coords}
elif gtype == "MultiPolygon":
for poly in coords:
if not poly: continue
ring = poly[0]
area = _polygon_area_m2(ring)
if area > best_area:
best_area, best_geom = area, {"type":"Polygon","coordinates":[ring]}
return best_geom
def _polygon_area_m2(ring_lonlat: Iterable[Iterable[float]]) -> float:
pts = list(ring_lonlat)
if len(pts) < 3: return 0.0
if pts[0] != pts[-1]: pts = pts + [pts[0]]
def merc(lon: float, lat: float) -> Tuple[float, float]:
R = 6378137.0
x = math.radians(lon) * R
lat = max(min(lat, 89.9999), -89.9999)
y = math.log(math.tan(math.pi/4 + math.radians(lat)/2)) * R
return x, y
xs, ys = [], []
for lon, lat in pts:
x, y = merc(float(lon), float(lat)); xs.append(x); ys.append(y)
area = 0.0
for i in range(len(xs)-1):
area += xs[i]*ys[i+1] - xs[i+1]*ys[i]
return abs(area) * 0.5

View File

@ -0,0 +1,30 @@
# pxy_routing/services/provider.py
from __future__ import annotations
from typing import Any, Dict, Tuple
from .factory import get_routing_provider
# --- Legacy classes kept for backward compatibility ---
class RoutingProvider:
def health(self) -> Dict[str, Any]:
raise NotImplementedError
def isochrone(self, center: Tuple[float, float], minutes: int) -> Dict[str, Any]:
raise NotImplementedError
class NullRoutingProvider(RoutingProvider):
def health(self) -> Dict[str, Any]:
return {"provider": "null", "ok": False, "reason": "Routing provider not configured"}
def isochrone(self, center: Tuple[float, float], minutes: int) -> Dict[str, Any]:
raise NotImplementedError("Routing provider not configured")
# --- Functional API kept for existing callers ---
def health() -> Dict[str, Any]:
return get_routing_provider().health()
def isochrone(center: Tuple[float, float], minutes: int) -> Dict[str, Any]:
return get_routing_provider().isochrone(center, minutes)
# Optional convenience instance some code may import as 'rp'
rp = get_routing_provider()

3
pxy_routing/tests.py Normal file
View File

@ -0,0 +1,3 @@
from django.test import TestCase
# Create your tests here.

3
pxy_routing/views.py Normal file
View File

@ -0,0 +1,3 @@
from django.shortcuts import render
# Create your views here.

0
pxy_sami/__init__.py Normal file
View File

3
pxy_sami/admin.py Normal file
View File

@ -0,0 +1,3 @@
from django.contrib import admin
# Register your models here.

8
pxy_sami/api/urls.py Normal file
View File

@ -0,0 +1,8 @@
# pxy_sami/api/urls.py
from django.urls import path
from .views import sami_health, sami_run
urlpatterns = [
path("api/sami/health", sami_health, name="sami_health"),
path("api/sami/run", sami_run, name="sami_run"),
]

64
pxy_sami/api/views.py Normal file
View File

@ -0,0 +1,64 @@
# pxy_sami/api/views.py
from __future__ import annotations
import uuid
from rest_framework.decorators import api_view, throttle_classes
from rest_framework.response import Response
from rest_framework import status
from rest_framework.throttling import ScopedRateThrottle
from pydantic import ValidationError
from pxy_contracts.contracts import SAMIRunRequest
from pxy_sami.estimators.sami_core import run_sami
from pxy_dashboard.utils.share import mint_sami_share_url
def _err(code: str, message: str, hint: str | None = None, http_status: int = 400):
return Response(
{"ok": False, "code": code, "message": message, "hint": hint, "trace_id": str(uuid.uuid4())},
status=http_status,
)
@api_view(["GET"])
@throttle_classes([ScopedRateThrottle])
def sami_health(request):
sami_health.throttle_scope = "sami_health"
try:
# If you have deeper checks, put them here. Keep simple/fast.
return Response({"ok": True, "service": "sami"})
except Exception as e:
return _err(
"sami_health_error",
"SAMI health check failed",
str(e),
http_status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@api_view(["POST"])
@throttle_classes([ScopedRateThrottle])
def sami_run(request):
sami_run.throttle_scope = "sami_run"
try:
req = SAMIRunRequest.model_validate(request.data or {})
except ValidationError as ve:
return _err("invalid", "Validation error", hint=str(ve), http_status=status.HTTP_400_BAD_REQUEST)
try:
resp = run_sami(req)
data = resp.model_dump()
# Inject share URL (signed, expiring)
rid = data.get("run_id")
if rid:
meta = {
"indicator": data.get("indicator"),
"beta": data.get("beta"),
"r2": data.get("r2"),
"n": len(data.get("residuals") or []),
}
data["share_url"] = mint_sami_share_url(rid, meta=meta, request=request)
return Response(data)
except Exception as e:
return _err("sami_error", "SAMI run failed", hint=str(e), http_status=status.HTTP_502_BAD_GATEWAY)

6
pxy_sami/apps.py Normal file
View File

@ -0,0 +1,6 @@
from django.apps import AppConfig
class PxySamiConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'pxy_sami'

View File

View File

@ -0,0 +1,326 @@
# pxy_sami/estimators/sami_core.py
from __future__ import annotations
import uuid
from typing import List, Tuple
from pathlib import Path
import numpy as np
import pandas as pd
from django.conf import settings
# Headless backend for saving PNGs in containers
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
from pxy_contracts.contracts import (
SAMIRunRequest,
SAMIRunResponse,
SAMICity,
SAMIPoint, # ← for 56B interactive scatter
)
from pxy_contracts.version import SPEC_VERSION
from pxy_de.providers.base import get_provider
def _fit_loglog(df: pd.DataFrame) -> Tuple[float, float, float, np.ndarray]:
import statsmodels.api as sm
"""
Ajusta: log(value) = alpha + beta * log(N) (OLS)
Regresa: (alpha, beta, R^2, residuales)
"""
df = df.copy()
df["logY"] = np.log(df["value"].astype(float))
df["logN"] = np.log(df["N"].astype(float))
X = sm.add_constant(df["logN"].values)
y = df["logY"].values
model = sm.OLS(y, X).fit()
alpha = float(model.params[0])
beta = float(model.params[1])
r2 = float(model.rsquared) if model.nobs and model.nobs >= 2 else 0.0
resid = model.resid
return alpha, beta, r2, resid
def _color_for_sami(s: float) -> str:
"""Colores sencillos: verde = arriba, rojo = abajo, gris = ~0."""
if s > 0.15:
return "#2ca02c" # green
if s < -0.15:
return "#d62728" # red
return "#7f7f7f" # gray
def _size_for_N(N: float, N_med: float) -> float:
"""Tamaño del punto ~ sqrt(N/mediana), acotado para demo."""
if N <= 0 or N_med <= 0:
return 60.0
s = 80.0 * np.sqrt(N / N_med)
return float(np.clip(s, 40.0, 300.0))
def _save_chart(
df: pd.DataFrame, alpha: float, beta: float, r2: float, run_id: str, indicator: str
) -> str | None:
"""
Crea un gráfico bonito para demo:
- Izquierda: scatter loglog con línea de regresión, puntos coloreados por SAMI,
tamaño por N, etiquetas de ciudades y textbox con ecuación.
- Derecha: ranking horizontal por SAMI (barh).
Devuelve URL pública (/media/...).
"""
try:
media_dir = Path(settings.MEDIA_ROOT) / "sami" # ensure Path
media_dir.mkdir(parents=True, exist_ok=True)
out_path = media_dir / f"sami_{run_id}.png"
# Preparación de datos
df = df.copy()
df["logN"] = np.log(df["N"].astype(float))
df["logY"] = np.log(df["value"].astype(float))
x = df["logN"].values
y = df["logY"].values
# Línea de regresión
xs = np.linspace(x.min(), x.max(), 100)
ys = alpha + beta * xs
# Tamaños por N
N_med = float(df["N"].median())
sizes = [_size_for_N(n, N_med) for n in df["N"].values]
# Colores por SAMI
colors = [_color_for_sami(s) for s in df["sami"].values]
# Orden para ranking
df_rank = df[["city", "sami"]].sort_values("sami", ascending=True).reset_index(drop=True)
# Figure
fig, axes = plt.subplots(
1, 2, figsize=(11, 4.5), gridspec_kw={"width_ratios": [1.35, 1.0]}
)
ax, axr = axes
# --- (L) Scatter loglog ---
ax.scatter(
x, y, s=sizes, c=colors, alpha=0.9, edgecolors="white", linewidths=0.8, zorder=3
)
ax.plot(xs, ys, linewidth=2.0, zorder=2)
# Etiquetas por ciudad (offset según signo SAMI)
for _, row in df.iterrows():
dx = 0.02 * (x.max() - x.min() if x.max() > x.min() else 1.0)
dy = 0.02 * (y.max() - y.min() if y.max() > y.min() else 1.0)
offset_y = dy if row["sami"] >= 0 else -dy
ax.annotate(
row["city"],
(row["logN"], row["logY"]),
xytext=(row["logN"] + dx, row["logY"] + offset_y),
fontsize=9,
color="#303030",
bbox=dict(boxstyle="round,pad=0.2", fc="white", ec="none", alpha=0.7),
arrowprops=dict(arrowstyle="-", lw=0.6, color="#888888", alpha=0.8),
)
# Texto con ecuación y métricas
eq_txt = (
f"log(Value) = {alpha:.2f} + {beta:.3f}·log(N)\n"
f"$R^2$ = {r2:.3f} n = {len(df)} indicador: {indicator}"
)
ax.text(
0.02,
0.98,
eq_txt,
transform=ax.transAxes,
va="top",
ha="left",
fontsize=9,
bbox=dict(boxstyle="round", fc="white", ec="#dddddd", alpha=0.9),
)
# Estética
ax.set_xlabel("log(N)")
ax.set_ylabel("log(Value)")
ax.grid(True, linestyle=":", linewidth=0.7, alpha=0.6)
for spine in ["top", "right"]:
ax.spines[spine].set_visible(False)
ax.set_title("Escalamiento urbano y SAMI", fontsize=12, pad=8)
# --- (R) Ranking SAMI (barh) ---
y_pos = np.arange(len(df_rank))
bar_colors = [_color_for_sami(s) for s in df_rank["sami"].values]
axr.barh(y_pos, df_rank["sami"].values, color=bar_colors, alpha=0.9)
axr.set_yticks(y_pos, labels=df_rank["city"].values, fontsize=9)
axr.set_xlabel("SAMI (z)")
axr.axvline(0, color="#444444", linewidth=0.8)
axr.grid(axis="x", linestyle=":", linewidth=0.7, alpha=0.6)
for spine in ["top", "right"]:
axr.spines[spine].set_visible(False)
axr.set_title("Ranking por desviación (SAMI)", fontsize=12, pad=8)
# Anotar top y bottom
try:
top_city = df_rank.iloc[-1]
bottom_city = df_rank.iloc[0]
axr.text(
float(top_city["sami"]),
float(len(df_rank) - 1),
f"{top_city['sami']:.2f}",
va="center",
ha="left",
fontsize=9,
color="#2ca02c",
weight="bold",
)
axr.text(
float(bottom_city["sami"]),
0,
f"{bottom_city['sami']:.2f}",
va="center",
ha="left",
fontsize=9,
color="#d62728",
weight="bold",
)
except Exception:
pass
fig.tight_layout()
fig.savefig(out_path, dpi=144)
plt.close(fig)
return f"{settings.MEDIA_URL}sami/{out_path.name}"
except Exception:
return None
def run_sami(req: SAMIRunRequest) -> SAMIRunResponse:
"""
SAMI v2 (demo ready):
- Fit OLS loglog
- SAMI = resid / std(resid)
- Gráfico mejorado (scatter + ranking)
- 56B: return alpha + raw points for interactive scatter
"""
provider = get_provider()
warnings: List[str] = []
# 1) Cargar datos
try:
df = provider.indicator(req.indicator, req.cities or [])
except Exception as e:
warnings.append(f"data_provider_error: {e}")
residuals = [SAMICity(city=c, sami=0.0, rank=i + 1) for i, c in enumerate(req.cities or [])]
return SAMIRunResponse(
model_id="sami-ols-v2.0.0",
spec_version=SPEC_VERSION,
run_id=str(uuid.uuid4()),
indicator=req.indicator,
beta=1.0,
r2=0.0,
residuals=residuals,
chart_url=None,
data_release=req.data_release,
warnings=warnings or ["stub implementation"],
)
# 2) Limpieza mínima
n_before = len(df)
df = df.replace([np.inf, -np.inf], np.nan).dropna(subset=["value", "N"])
df = df[(df["value"] > 0) & (df["N"] > 0)].copy()
n_after = len(df)
if n_before - n_after > 0:
warnings.append(f"filtered_nonpositive_or_nan: {n_before - n_after}")
if n_after < 2:
warnings.append("not_enough_data_for_fit")
residuals = [SAMICity(city=c, sami=0.0, rank=i + 1) for i, c in enumerate(req.cities or [])]
return SAMIRunResponse(
model_id="sami-ols-v2.0.0",
spec_version=SPEC_VERSION,
run_id=str(uuid.uuid4()),
indicator=req.indicator,
beta=1.0,
r2=0.0,
residuals=residuals,
chart_url=None,
data_release=req.data_release,
warnings=warnings,
)
# 3) Ajuste y SAMI
try:
alpha, beta, r2, resid = _fit_loglog(df)
except Exception as e:
warnings.append(f"ols_fit_error: {e}")
residuals = [SAMICity(city=c, sami=0.0, rank=i + 1) for i, c in enumerate(df["city"].tolist())]
return SAMIRunResponse(
model_id="sami-ols-v2.0.0",
spec_version=SPEC_VERSION,
run_id=str(uuid.uuid4()),
indicator=req.indicator,
beta=1.0,
r2=0.0,
residuals=residuals,
chart_url=None,
data_release=req.data_release,
warnings=warnings,
)
std = float(np.std(resid, ddof=1)) if len(resid) > 1 else 0.0
sami_vals = (resid / std) if std > 0 else np.zeros_like(resid)
# 56B: build raw points (with logs) for interactive scatter
df_pts = df.copy()
df_pts["log_value"] = np.log(df_pts["value"].astype(float))
df_pts["log_N"] = np.log(df_pts["N"].astype(float))
points: List[SAMIPoint] = []
for row in df_pts.itertuples(index=False):
try:
points.append(
SAMIPoint(
city=str(row.city),
value=float(row.value),
N=float(row.N),
log_value=float(row.log_value),
log_N=float(row.log_N),
)
)
except Exception:
# If any row is malformed, skip it; interactive chart is best-effort.
continue
out = df[["city", "value", "N"]].copy()
out["sami"] = sami_vals
out = out.sort_values("sami", ascending=False).reset_index(drop=True)
out["rank"] = np.arange(1, len(out) + 1)
residuals = [
SAMICity(city=row.city, sami=float(row.sami), rank=int(row.rank))
for row in out.itertuples(index=False)
]
# 4) Guardar gráfico bonito
run_id = str(uuid.uuid4())
chart_url = _save_chart(out, alpha, beta, r2, run_id, req.indicator)
if chart_url is None:
warnings.append("chart_save_failed")
else:
warnings.append("chart_saved")
warnings.append(f"fit_ok_n={n_after}")
return SAMIRunResponse(
model_id="sami-ols-v2.0.0",
spec_version=SPEC_VERSION,
run_id=run_id,
indicator=req.indicator,
beta=float(beta),
r2=float(r2),
residuals=residuals,
chart_url=chart_url,
data_release=req.data_release,
warnings=warnings,
# 56B extras
alpha=float(alpha),
points=points,
)

View File

3
pxy_sami/models.py Normal file
View File

@ -0,0 +1,3 @@
from django.db import models
# Create your models here.

View File

3
pxy_sami/tests.py Normal file
View File

@ -0,0 +1,3 @@
from django.test import TestCase
# Create your tests here.

View File

View File

3
pxy_sami/views.py Normal file
View File

@ -0,0 +1,3 @@
from django.shortcuts import render
# Create your views here.

0
pxy_sites/__init__.py Normal file
View File

26
pxy_sites/admin.py Normal file
View File

@ -0,0 +1,26 @@
from __future__ import annotations
from django.contrib import admin
from django.utils.html import format_html
from .models import SiteRun
@admin.register(SiteRun)
class SiteRunAdmin(admin.ModelAdmin):
list_display = ("created_at", "city", "business", "short_id", "preview", "download")
list_filter = ("city", "business", "created_at")
search_fields = ("search_id", "city", "business")
readonly_fields = ("created_at", "search_id", "city", "business", "payload_json", "result_json")
def short_id(self, obj: SiteRun) -> str:
return obj.search_id[:8]
def preview(self, obj: SiteRun):
if obj.map_url:
return format_html('<a href="{}" target="_blank">map</a>', obj.map_url)
return ""
def download(self, obj: SiteRun):
# if you added a PNG/CSV download endpoint, link it here later
url = (obj.result_json or {}).get("download_url")
if url:
return format_html('<a href="{}" target="_blank">download</a>', url)
return ""

View File

16
pxy_sites/api/urls.py Normal file
View File

@ -0,0 +1,16 @@
from django.urls import path
from .views import (
SitesHealth, SiteSearchView,
sites_download, sites_geojson, sites_preview, sites_recent_runs
)
urlpatterns = [
path("api/sites/health", SitesHealth.as_view(), name="sites_health"),
path("api/sites/search", SiteSearchView.as_view(), name="sites_search"),
# artifacts
path("api/sites/download/<str:kind>/<str:search_id>", sites_download, name="sites_download"),
path("api/sites/geojson/<str:kind>/<str:search_id>", sites_geojson, name="sites_geojson"),
path("api/sites/preview/<str:kind>/<str:search_id>", sites_preview, name="sites_preview"),
path("api/sites/runs/recent", sites_recent_runs, name="sites_recent_runs"),
]

304
pxy_sites/api/views.py Normal file
View File

@ -0,0 +1,304 @@
# pxy_sites/api/views.py
from __future__ import annotations
import io
import json
import logging
import time
import uuid
from pathlib import Path
import numpy as np
from PIL import Image
from django.conf import settings
from django.http import (
FileResponse,
HttpRequest,
HttpResponse,
HttpResponseNotFound,
JsonResponse,
)
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_GET
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.exceptions import ValidationError as DRFValidationError
from rest_framework import status
from rest_framework.throttling import ScopedRateThrottle # 👈 add
from pydantic import ValidationError as PydValidationError
from pxy_contracts.contracts.sites import SiteSearchRequest, SiteSearchResponse
from pxy_sites.models import SiteRun
from pxy_sites.services.site_scoring import run_site_search
from pxy_dashboard.utils.share import mint_sites_share_url
log = logging.getLogger(__name__)
# -------- uniform error envelope helpers --------
def _env(code: str, message: str, *, hint: str | None = None, http: int = 400):
return Response(
{"ok": False, "code": code, "message": message, "hint": hint, "trace_id": str(uuid.uuid4())},
status=http,
)
def _env_json(code: str, message: str, *, hint: str | None = None, http: int = 400):
return JsonResponse(
{"ok": False, "code": code, "message": message, "hint": hint, "trace_id": str(uuid.uuid4())},
status=http,
)
# -------- helpers --------
def _pyify(o):
"""Make objects JSONField-safe (NumPy → native Python)."""
if isinstance(o, (np.floating, np.float32, np.float64)):
return float(o)
if isinstance(o, (np.integer, np.int32, np.int64)):
return int(o)
if isinstance(o, np.ndarray):
return o.tolist()
return str(o)
def _build_base_url(request) -> str:
forwarded_proto = request.META.get("HTTP_X_FORWARDED_PROTO")
scheme = (forwarded_proto.split(",")[0].strip() if forwarded_proto else None) or (
"https" if request.is_secure() else "http"
)
host = request.get_host() or settings.BASE_URL.replace("https://", "").replace("http://", "")
return f"{scheme}://{host}"
# -------- DRF API views --------
class SitesHealth(APIView):
authentication_classes = []
throttle_classes = [ScopedRateThrottle] # 👈 enable throttling
throttle_scope = "sites_health"
def get(self, request, *args, **kwargs):
return Response({"ok": True, "app": "pxy_sites"})
class SiteSearchView(APIView):
# DRF ScopedRateThrottle is active via project settings; scope name here:
throttle_scope = "sites_search"
def post(self, request, *args, **kwargs):
t0 = time.perf_counter()
# 1) Validate contract
try:
req = SiteSearchRequest(**(request.data or {}))
except PydValidationError as ve:
# DRFValidationError would be handled by your global handler too,
# but we return the consistent envelope directly
return _env("invalid", "Validation error", hint=str(ve), http=status.HTTP_400_BAD_REQUEST)
# 2) Run scoring (catch provider/upstream failures -> 502 envelope)
try:
resp: SiteSearchResponse = run_site_search(req)
except Exception as e:
dur_ms = (time.perf_counter() - t0) * 1000.0
log.warning(
"[sites] search_failed city=%s business=%s bands=%s err=%s duration_ms=%.1f",
getattr(req, "city", None), getattr(req, "business", None), getattr(req, "time_bands", None),
e, dur_ms,
)
return _env("sites_error", "Sites search failed", hint=str(e), http=status.HTTP_502_BAD_GATEWAY)
data = resp.model_dump()
# 3) Build absolute URLs (proxy-friendly)
base = _build_base_url(request)
sid = data.get("search_id")
if sid:
data["share_url"] = mint_sites_share_url(sid, request=request)
def _dl(kind: str) -> str: return f"{base}/api/sites/download/{kind}/{sid}"
def _gj(kind: str) -> str: return f"{base}/api/sites/geojson/{kind}/{sid}"
def _pv(kind: str) -> str: return f"{base}/api/sites/preview/{kind}/{sid}"
if sid and data.get("map_url"):
data["main_download_url"] = _dl("main")
data["main_preview_url"] = _pv("main")
if sid and data.get("demand_map_url"):
data["demand_download_url"] = _dl("demand")
data["demand_preview_url"] = _pv("demand")
if sid and data.get("competition_map_url"):
data["competition_download_url"] = _dl("competition")
data["competition_preview_url"] = _pv("competition")
if sid:
data["isochrones_geojson_url"] = _gj("isochrones")
data["candidates_geojson_url"] = _gj("candidates")
data["pois_competition_geojson_url"] = _gj("pois_competition")
data["popgrid_geojson_url"] = _gj("popgrid")
# 4) Persist run in DB (best-effort)
try:
safe_payload = json.loads(json.dumps(req.model_dump(), default=_pyify))
safe_result = json.loads(json.dumps(data, default=_pyify))
SiteRun.objects.create(
search_id=sid,
city=safe_result.get("city"),
business=safe_result.get("business"),
payload_json=safe_payload,
result_json=safe_result,
)
log.info("[sites] saved SiteRun %s", sid)
except Exception as e:
data.setdefault("warnings", []).append(f"persist_failed: {e}")
log.warning("[sites] persist_failed for %s: %s", sid, e)
dur_ms = (time.perf_counter() - t0) * 1000.0
log.info(
"[sites] search_ok city=%s business=%s bands=%s duration_ms=%.1f",
data.get("city"), data.get("business"), data.get("time_bands"), dur_ms,
)
return Response(data, status=status.HTTP_200_OK)
# -------- Artifacts (FBVs) --------
_KIND_PREFIX = {"main": "sites", "demand": "demand", "competition": "competition"}
@csrf_exempt
def sites_download(request: HttpRequest, kind: str, search_id: str):
prefix = _KIND_PREFIX.get(kind)
if not prefix:
return _env_json("invalid_kind", "Invalid kind", hint=str(list(_KIND_PREFIX)), http=400)
try:
uuid.UUID(search_id)
except Exception:
return _env_json("invalid_search_id", "search_id must be a UUID", http=400)
fname = f"{prefix}_{search_id}.png"
fpath = Path(settings.MEDIA_ROOT) / "sites" / fname
if not fpath.exists():
return _env_json("not_found", f"file not found: {fname}", http=404)
return FileResponse(open(fpath, "rb"), content_type="image/png", as_attachment=True, filename=fname)
_GJ_KEYS = {
"isochrones": "isochrones_fc",
"candidates": "candidates_fc",
"pois_competition": "pois_competition_fc",
"popgrid": "popgrid_fc",
}
@csrf_exempt
def sites_geojson(request: HttpRequest, kind: str, search_id: str):
if kind not in _GJ_KEYS:
return _env_json("invalid_kind", "Invalid kind", hint=str(list(_GJ_KEYS)), http=400)
try:
uuid.UUID(search_id)
except Exception:
return _env_json("invalid_search_id", "search_id must be a UUID", http=400)
fpath = Path(settings.MEDIA_ROOT) / "sites" / f"run_{search_id}.json"
if not fpath.exists():
return _env_json("not_found", f"artifact not found: run_{search_id}.json", http=404)
try:
with open(fpath, "r", encoding="utf-8") as f:
artifact = json.load(f)
fc = artifact.get(_GJ_KEYS[kind]) or {"type": "FeatureCollection", "features": []}
return HttpResponse(json.dumps(fc), content_type="application/geo+json")
except Exception as e:
return _env_json("artifact_read_error", "Failed to read artifact", hint=str(e), http=500)
_PREVIEW_PREFIX = {"main": "sites", "demand": "demand", "competition": "competition"}
@csrf_exempt
def sites_preview(request: HttpRequest, kind: str, search_id: str):
prefix = _PREVIEW_PREFIX.get(kind)
if not prefix:
return _env_json("invalid_kind", "Invalid kind", hint=str(list(_PREVIEW_PREFIX)), http=400)
try:
uuid.UUID(search_id)
except Exception:
return _env_json("invalid_search_id", "search_id must be a UUID", http=400)
fname = f"{prefix}_{search_id}.png"
fpath = Path(settings.MEDIA_ROOT) / "sites" / fname
if not fpath.exists():
return _env_json("not_found", f"file not found: {fname}", http=404)
# resize params
def _clamp_int(val, lo, hi, default=None):
try:
v = int(val)
return max(lo, min(hi, v))
except Exception:
return default
w_q = _clamp_int(request.GET.get("w"), 16, 2000, None)
h_q = _clamp_int(request.GET.get("h"), 16, 2000, None)
try:
scale_q = float(request.GET.get("scale")) if request.GET.get("scale") else None
if scale_q is not None:
scale_q = max(0.05, min(3.0, scale_q))
except Exception:
scale_q = None
if not any([w_q, h_q, scale_q]):
with open(fpath, "rb") as f:
data = f.read()
resp = HttpResponse(data, content_type="image/png")
resp["Cache-Control"] = "public, max-age=3600"
return resp
try:
im = Image.open(fpath)
im = im.convert("RGBA") if im.mode not in ("RGB", "RGBA") else im
orig_w, orig_h = im.size
if scale_q:
w = int(round(orig_w * scale_q)); h = int(round(orig_h * scale_q))
elif w_q and h_q:
w, h = w_q, h_q
elif w_q:
ratio = w_q / float(orig_w); w, h = w_q, max(1, int(round(orig_h * ratio)))
elif h_q:
ratio = h_q / float(orig_h); w, h = max(1, int(round(orig_w * ratio))), h_q
else:
w, h = orig_w, orig_h
w = max(16, min(2000, w)); h = max(16, min(2000, h))
im = im.resize((w, h), Image.LANCZOS)
buf = io.BytesIO(); im.save(buf, format="PNG", optimize=True); buf.seek(0)
resp = HttpResponse(buf.getvalue(), content_type="image/png")
resp["Cache-Control"] = "public, max-age=600"
return resp
except Exception as e:
return _env_json("resize_failed", "Image resize failed", hint=str(e), http=500)
@require_GET
def sites_recent_runs(request: HttpRequest):
"""GET /api/sites/runs/recent?limit=10 — list latest runs with handy URLs."""
try:
limit = int(request.GET.get("limit", "10"))
except Exception:
limit = 10
limit = max(1, min(limit, 50))
items = []
qs = SiteRun.objects.order_by("-created_at")[:limit]
for r in qs:
res = r.result_json or {}
items.append({
"search_id": r.search_id,
"city": r.city,
"business": r.business,
"created_at": r.created_at.isoformat(),
"map_url": res.get("map_url"),
"demand_map_url": res.get("demand_map_url"),
"competition_map_url": res.get("competition_map_url"),
"download": {
"main": res.get("main_download_url"),
"demand": res.get("demand_download_url"),
"competition": res.get("competition_download_url"),
},
"geojson": {
"isochrones": res.get("isochrones_geojson_url"),
"candidates": res.get("candidates_geojson_url"),
"pois_competition": res.get("pois_competition_geojson_url"),
"popgrid": res.get("popgrid_geojson_url"),
},
})
return JsonResponse({"items": items})

6
pxy_sites/apps.py Normal file
View File

@ -0,0 +1,6 @@
from django.apps import AppConfig
class PxySitesConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'pxy_sites'

View File

@ -0,0 +1,29 @@
# Generated by Django 5.0.3 on 2025-09-15 07:36
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='SiteRun',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('search_id', models.CharField(db_index=True, max_length=64)),
('city', models.CharField(max_length=64)),
('business', models.CharField(max_length=128)),
('payload_json', models.JSONField()),
('result_json', models.JSONField()),
('created_at', models.DateTimeField(auto_now_add=True)),
],
options={
'ordering': ['-created_at'],
},
),
]

View File

26
pxy_sites/models.py Normal file
View File

@ -0,0 +1,26 @@
from __future__ import annotations
from django.db import models
class SiteRun(models.Model):
search_id = models.CharField(max_length=64, db_index=True)
city = models.CharField(max_length=64)
business = models.CharField(max_length=128)
payload_json = models.JSONField() # request we received
result_json = models.JSONField() # full response we returned
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
ordering = ["-created_at"]
def __str__(self) -> str:
return f"{self.created_at:%Y-%m-%d %H:%M}{self.city}/{self.business}{self.search_id[:8]}"
# convenience accessors
@property
def map_url(self) -> str | None:
return (self.result_json or {}).get("map_url")
@property
def geojson_url(self) -> str | None:
# if you already expose one, wire it here later
return (self.result_json or {}).get("geojson_url")

View File

View File

@ -0,0 +1,723 @@
# pxy_sites/services/site_scoring.py
from __future__ import annotations
import os, json, uuid, random, math
from typing import List, Tuple, Optional, Dict, Any
from datetime import datetime
from django.conf import settings
from pyproj import Geod
# Headless backend para matplotlib
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
from matplotlib.patches import Polygon as MplPolygon
import numpy as np
from shapely.geometry import Point, Polygon
from scipy.stats import gaussian_kde
from pxy_contracts.contracts import (
SiteSearchRequest, SiteSearchResponse,
CandidateSite, ScoreBreakdown
)
from pxy_routing.services import get_routing_provider
from pxy_de.providers.base import get_provider
# --------------------------- Helpers geométricos ---------------------------
def _isochrone_area_km2(feature: dict) -> float:
geom = (feature or {}).get("geometry") or {}
if geom.get("type") != "Polygon":
return 0.0
rings = geom.get("coordinates") or []
if not rings:
return 0.0
coords = rings[0]
if len(coords) < 4:
return 0.0
geod = Geod(ellps="WGS84")
lons = [float(x[0]) for x in coords]
lats = [float(x[1]) for x in coords]
area_m2, _ = geod.polygon_area_perimeter(lons, lats)
return abs(area_m2) / 1_000_000.0 # m² -> km²
def _polygon_from_feature(feature: dict) -> Optional[Polygon]:
geom = (feature or {}).get("geometry") or {}
if geom.get("type") != "Polygon":
return None
coords = geom.get("coordinates")
if not coords or not coords[0]:
return None
try:
ring = [(float(x[0]), float(x[1])) for x in coords[0]]
if len(ring) < 4:
return None
return Polygon(ring)
except Exception:
return None
def _extent_from_iso_list(iso_list: List[dict]) -> Optional[Tuple[float, float, float, float]]:
xs, ys = [], []
for item in iso_list or []:
feat = item.get("feature") or {}
geom = feat.get("geometry") or {}
if geom.get("type") != "Polygon":
continue
coords = geom.get("coordinates") or []
if not coords:
continue
ring = coords[0]
for x, y in ring:
xs.append(float(x)); ys.append(float(y))
if not xs or not ys:
return None
return (min(xs), min(ys), max(xs), max(ys))
def _build_isochrones(center: Tuple[float, float], time_bands: List[int]) -> List[dict]:
"""
Build isochrones for the requested minute bands.
- If the routing provider supports `isochrones(center, minutes_list)`, use it once
(reduces ORS requests and rate-limit pressure).
- Otherwise, fall back to one call per band.
Output schema stays the same as before: a list of dicts with
{"minutes": int, "feature": Feature(Polygon), "area_km2": float}
"""
rp = get_routing_provider()
bands: List[int] = [int(m) for m in (time_bands or [])]
out: List[dict] = []
# Try a single batched call first
if hasattr(rp, "isochrones"):
try:
feats = rp.isochrones(center, bands) # expected same order as requested bands
n = min(len(bands), len(feats))
for m, feat in zip(bands[:n], feats[:n]):
area_km2 = _isochrone_area_km2(feat)
props = {"minutes": int(m), "area_km2": float(area_km2)}
f = {"type": "Feature", "geometry": feat.get("geometry"), "properties": props}
out.append({"minutes": int(m), "feature": f, "area_km2": float(area_km2)})
# If provider returned fewer features than requested, fill the rest via single calls
for m in bands[n:]:
feat = rp.isochrone(center, int(m))
area_km2 = _isochrone_area_km2(feat)
props = {"minutes": int(m), "area_km2": float(area_km2)}
f = {"type": "Feature", "geometry": feat.get("geometry"), "properties": props}
out.append({"minutes": int(m), "feature": f, "area_km2": float(area_km2)})
return out
except Exception:
# Fall back to per-band calls below if the batch call fails for any reason
pass
# Fallback: one request per band (original behavior)
for m in bands:
feat = rp.isochrone(center, int(m))
area_km2 = _isochrone_area_km2(feat)
props = {"minutes": int(m), "area_km2": float(area_km2)}
f = {"type": "Feature", "geometry": feat.get("geometry"), "properties": props}
out.append({"minutes": int(m), "feature": f, "area_km2": float(area_km2)})
return out
def _access_from_iso_list(iso_list: List[dict]) -> Tuple[float, List[str]]:
if not iso_list:
return 0.0, ["no_isochrones"]
areas = [item["area_km2"] for item in iso_list]
max_a = max(areas) if areas else 0.0
if max_a <= 0:
return 0.0, [f"{item['minutes']} min area ≈ 0.0 km²" for item in iso_list]
norms = [a / max_a for a in areas]
access = sum(norms) / len(norms)
reasons = [f"{item['minutes']} min area ≈ {item['area_km2']:.1f} km²" for item in iso_list]
return float(access), reasons
# --------------------------- Scores data-driven ---------------------------
def _competition_from_pois(city: str, business: str, iso_list: List[dict]) -> Tuple[float, List[str]]:
prov = get_provider()
try:
pois = prov.denue(city, business) # DataFrame[name,lat,lon,category]
except Exception as e:
return 0.5, [f"competition_fallback: provider_error={e}"]
if pois.empty or not iso_list:
return 0.5, ["competition_fallback: no_pois_or_isochrones"]
largest = max(iso_list, key=lambda x: x["minutes"])
poly = _polygon_from_feature(largest["feature"])
if poly is None:
return 0.5, ["competition_fallback: invalid_polygon"]
area_km2 = float(largest.get("area_km2") or 0.0)
if area_km2 <= 0.0:
return 0.5, ["competition_fallback: zero_area"]
cnt = 0
for row in pois.itertuples(index=False):
try:
p = Point(float(row.lon), float(row.lat))
if poly.contains(p):
cnt += 1
except Exception:
continue
density = cnt / area_km2 # POIs per km²
D_ref = float(os.getenv("COMP_REF_DENSITY", "5.0"))
comp = 1.0 / (1.0 + density / D_ref)
comp = float(max(0.0, min(1.0, comp)))
reasons = [
f"largest_band: {largest['minutes']} min, area ≈ {area_km2:.1f} km²",
f"competitors_inside: {cnt}, density ≈ {density:.2f} /km²",
f"competition_score = 1/(1 + density/{D_ref:.1f}) ≈ {comp:.2f}",
]
return comp, reasons
def _demand_from_popgrid(city: str, iso_list: List[dict]) -> Tuple[float, List[str]]:
prov = get_provider()
try:
grid = prov.popgrid(city) # DataFrame[cell_id, lat, lon, pop]
except Exception as e:
return 0.5, [f"demand_fallback: provider_error={e}"]
if grid.empty or not iso_list:
return 0.5, ["demand_fallback: no_grid_or_isochrones"]
largest = max(iso_list, key=lambda x: x["minutes"])
poly = _polygon_from_feature(largest["feature"])
if poly is None:
return 0.5, ["demand_fallback: invalid_polygon"]
area_km2 = float(largest.get("area_km2") or 0.0)
if area_km2 <= 0.0:
return 0.5, ["demand_fallback: zero_area"]
total_pop = 0.0
for row in grid.itertuples(index=False):
try:
p = Point(float(row.lon), float(row.lat))
if poly.contains(p):
total_pop += float(row.pop)
except Exception:
continue
density = total_pop / area_km2 if area_km2 > 0 else 0.0
P_ref = float(os.getenv("DEMAND_REF_POP", "50000"))
demand = total_pop / (total_pop + P_ref) if (total_pop + P_ref) > 0 else 0.0
demand = float(max(0.0, min(1.0, demand)))
reasons = [
f"largest_band: {largest['minutes']} min, area ≈ {area_km2:.1f} km²",
f"population_inside ≈ {int(total_pop)}, density ≈ {density:.1f} /km²",
f"demand_score = pop/(pop+{int(P_ref)}) ≈ {demand:.2f}",
]
return demand, reasons
# --------------------------- Sampling y Mapa principal ---------------------------
def _sample_points_in_polygon(poly: Polygon, n: int, rng: random.Random) -> List[Tuple[float, float]]:
minx, miny, maxx, maxy = poly.bounds
pts: List[Tuple[float, float]] = []
max_tries = n * 50
tries = 0
while len(pts) < n and tries < max_tries:
tries += 1
x = rng.uniform(minx, maxx)
y = rng.uniform(miny, maxy)
if poly.contains(Point(x, y)):
pts.append((y, x)) # (lat, lon)
return pts
def _km_per_deg_lon(lat_deg: float) -> float:
return 111.320 * math.cos(math.radians(lat_deg))
def _km_per_deg_lat() -> float:
return 110.574
def _save_sites_map(center: Tuple[float, float], iso_list_for_map: List[dict],
search_id: str, city: str, business: str,
top_candidates: List[Tuple[float, float, float]]) -> str | None:
try:
media_dir = settings.MEDIA_ROOT / "sites"
media_dir.mkdir(parents=True, exist_ok=True)
out_path = media_dir / f"sites_{search_id}.png"
# recolectar polígonos/extent
lons, lats = [center[1]], [center[0]]
polys = []
for item in iso_list_for_map:
feat = item["feature"]
geom = feat.get("geometry") or {}
if geom.get("type") != "Polygon":
continue
coords = geom.get("coordinates")[0]
poly_xy = [(float(x[0]), float(x[1])) for x in coords]
polys.append({"minutes": item["minutes"], "coords": poly_xy, "area": item["area_km2"]})
lons.extend([p[0] for p in poly_xy])
lats.extend([p[1] for p in poly_xy])
fig, ax = plt.subplots(figsize=(7.6, 7.6))
band_palette = ["#2E86AB", "#F18F01", "#C73E1D", "#6C5B7B", "#17B890", "#7E57C2"]
rank_palette = ["#1B998B", "#3A86FF", "#FB5607", "#FFBE0B", "#8338EC", "#FF006E"]
for i, item in enumerate(sorted(polys, key=lambda d: d["minutes"], reverse=True)):
poly = MplPolygon(item["coords"], closed=True,
facecolor=band_palette[i % len(band_palette)], alpha=0.25,
edgecolor=band_palette[i % len(band_palette)], linewidth=1.6,
label=f"{item['minutes']} min · {item['area']:.1f} km²")
ax.add_patch(poly)
ax.scatter([center[1]], [center[0]], s=68, zorder=6,
facecolor="#000", edgecolor="white", linewidth=1.2)
ax.annotate("center", (center[1], center[0]),
xytext=(center[1] + 0.01, center[0] + 0.01),
fontsize=9, color="#303030",
bbox=dict(boxstyle="round,pad=0.2", fc="white", ec="none", alpha=0.75),
arrowprops=dict(arrowstyle="-", lw=0.7, color="#666", alpha=0.9))
sizes = [90, 80, 72, 64, 56, 50, 46, 42, 38, 34]
legend_rows = []
for idx, (lat, lon, score) in enumerate(top_candidates, start=1):
color = rank_palette[(idx - 1) % len(rank_palette)]
size = sizes[idx - 1] if idx - 1 < len(sizes) else 30
ax.scatter([lon], [lat], s=size, zorder=7,
facecolor=color, edgecolor="white", linewidth=1.0)
ax.annotate(f"{idx} · {score:.2f}", (lon, lat),
xytext=(lon + 0.008, lat + 0.008),
fontsize=8, color="#111",
bbox=dict(boxstyle="round,pad=0.2", fc="white", ec="#bbb", alpha=0.9))
legend_rows.append(f"{idx}. ({score:.2f}) {lat:.4f}, {lon:.4f}")
lons.append(lon); lats.append(lat)
if lons and lats:
minx, maxx = min(lons), max(lons)
miny, maxy = min(lats), max(lats)
pad_x = max((maxx - minx) * 0.08, 0.01)
pad_y = max((maxy - miny) * 0.08, 0.01)
ax.set_xlim(minx - pad_x, maxx + pad_x)
ax.set_ylim(miny - pad_y, maxy + pad_y)
ax.set_title(f"Top sites — {business} @ {city}", fontsize=13, pad=10)
ax.set_xlabel("Longitude")
ax.set_ylabel("Latitude")
ax.grid(True, linestyle=":", linewidth=0.6, alpha=0.6)
for spine in ["top", "right"]:
ax.spines[spine].set_visible(False)
leg = ax.legend(loc="lower right", frameon=True, fontsize=8, title="Isochrones")
if leg and leg.get_frame():
leg.get_frame().set_alpha(0.9)
x0, x1 = ax.get_xlim()
y0, y1 = ax.get_ylim()
x_text = x0 + (x1 - x0) * 0.70
y_text = y0 + (y1 - y0) * 0.97
ax.text(x_text, y_text,
"Top-K (score)\n" + "\n".join(legend_rows),
ha="left", va="top", fontsize=8, color="#111",
bbox=dict(boxstyle="round,pad=0.3", fc="white", ec="#ccc", alpha=0.9))
km_per_deg_x = _km_per_deg_lon(center[0])
deg_len = 5.0 / km_per_deg_x if km_per_deg_x > 0 else 0.05
px = x0 + (x1 - x0) * 0.10
py = y0 + (y1 - y0) * 0.08
ax.plot([px, px + deg_len], [py, py], lw=3, color="#222")
ax.plot([px, px], [py - 0.001, py + 0.001], lw=2, color="#222")
ax.plot([px + deg_len, px + deg_len], [py - 0.001, py + 0.001], lw=2, color="#222")
ax.text(px + deg_len / 2.0, py + 0.002, "5 km",
ha="center", va="bottom", fontsize=8, color="#222",
bbox=dict(boxstyle="round,pad=0.2", fc="white", ec="none", alpha=0.7))
fig.tight_layout()
fig.savefig(out_path, dpi=150)
plt.close(fig)
return f"{settings.MEDIA_URL}sites/{out_path.name}"
except Exception:
return None
# --------------------------- Mapas densidad: Demanda / Competencia ---------------------------
def _grid_kde(xy: np.ndarray, weights: Optional[np.ndarray],
x_grid: np.ndarray, y_grid: np.ndarray, bw: Optional[float] = None) -> np.ndarray:
if xy.shape[1] != 2 or xy.shape[0] < 2:
return np.zeros((y_grid.size, x_grid.size), dtype=float)
kde = gaussian_kde(xy.T, weights=weights, bw_method=bw)
Xg, Yg = np.meshgrid(x_grid, y_grid)
pts = np.vstack([Xg.ravel(), Yg.ravel()])
z = kde(pts).reshape(Yg.shape)
z = z - z.min()
if z.max() > 0:
z = z / z.max()
return z
def _render_density_map(kind: str,
center: Tuple[float, float],
iso_list: List[dict],
points_xy: np.ndarray,
weights: Optional[np.ndarray],
search_id: str,
city: str,
business: str) -> Optional[str]:
try:
extent = _extent_from_iso_list(iso_list)
if extent is None:
cx, cy = center[1], center[0]
extent = (cx - 0.08, cy - 0.08, cx + 0.08, cy + 0.08)
minx, miny, maxx, maxy = extent
pad_x = max((maxx - minx) * 0.05, 0.01)
pad_y = max((maxy - miny) * 0.05, 0.01)
minx -= pad_x; maxx += pad_x
miny -= pad_y; maxy += pad_y
lat0 = max(miny, min(maxy, center[0]))
kx = _km_per_deg_lon(lat0)
ky = _km_per_deg_lat()
if points_xy.size == 0:
return None
xs = points_xy[:, 0] * kx
ys = points_xy[:, 1] * ky
grid_n = int(os.getenv("HEAT_GRID_N", "220"))
xg = np.linspace(minx * kx, maxx * kx, grid_n)
yg = np.linspace(miny * ky, maxy * ky, grid_n)
z = _grid_kde(np.c_[xs, ys], weights, xg, yg, bw=None)
media_dir = settings.MEDIA_ROOT / "sites"
media_dir.mkdir(parents=True, exist_ok=True)
out_path = media_dir / f"{kind}_{search_id}.png"
fig, ax = plt.subplots(figsize=(8.0, 7.0))
im = ax.imshow(z, origin="lower",
extent=(minx, maxx, miny, maxy),
interpolation="bilinear", alpha=0.85)
if kind == "demand":
im.set_cmap("YlOrRd")
title = f"Demand heat — {business} @ {city}"
else:
im.set_cmap("GnBu")
title = f"Competition heat — {business} @ {city}"
cs = ax.contour(z, levels=6, linewidths=0.8, alpha=0.8,
extent=(minx, maxx, miny, maxy), colors="k")
ax.clabel(cs, inline=True, fontsize=7, fmt="%.2f")
for item in sorted(iso_list, key=lambda d: d["minutes"], reverse=True):
feat = item.get("feature") or {}
geom = feat.get("geometry") or {}
if geom.get("type") != "Polygon":
continue
coords = geom.get("coordinates")[0]
ring = np.array([(float(x[0]), float(x[1])) for x in coords])
ax.plot(ring[:, 0], ring[:, 1], lw=1.2, alpha=0.9)
ax.scatter([center[1]], [center[0]], s=55, zorder=5,
facecolor="#000", edgecolor="white", linewidth=1.0)
ax.set_title(title, fontsize=13, pad=10)
ax.set_xlabel("Longitude")
ax.set_ylabel("Latitude")
ax.grid(True, linestyle=":", linewidth=0.5, alpha=0.5)
for spine in ["top", "right"]:
ax.spines[spine].set_visible(False)
cbar = plt.colorbar(im, ax=ax, fraction=0.046, pad=0.04)
cbar.ax.set_ylabel("relative intensity", rotation=90, labelpad=8)
fig.tight_layout()
fig.savefig(out_path, dpi=150)
plt.close(fig)
return f"{settings.MEDIA_URL}sites/{out_path.name}"
except Exception:
return None
def _render_demand_map(center: Tuple[float, float], iso_list: List[dict],
city: str, search_id: str, business: str) -> Optional[str]:
prov = get_provider()
try:
grid = prov.popgrid(city) # cell_id, lat, lon, pop
except Exception:
return None
if grid.empty:
return None
pts = grid[["lon", "lat", "pop"]].dropna().copy()
points_xy = pts[["lon", "lat"]].to_numpy(dtype=float)
weights = pts["pop"].to_numpy(dtype=float)
return _render_density_map("demand", center, iso_list, points_xy, weights, search_id, city, business)
def _render_competition_map(center: Tuple[float, float], iso_list: List[dict],
city: str, business: str, search_id: str) -> Optional[str]:
prov = get_provider()
try:
pois = prov.denue(city, business) # name, lat, lon, category
except Exception:
return None
if pois.empty:
return None
pts = pois[["lon", "lat"]].dropna().copy()
points_xy = pts.to_numpy(dtype=float)
return _render_density_map("competition", center, iso_list, points_xy, None, search_id, city, business)
# --------------------------- Artefacto GeoJSON por búsqueda ---------------------------
def _fc(features: List[Dict[str, Any]]) -> Dict[str, Any]:
return {"type": "FeatureCollection", "features": features}
def _candidates_fc(center: Tuple[float,float],
top: List[Tuple[float,float,float,ScoreBreakdown,List[str],List[dict]]]) -> Dict[str, Any]:
feats = []
for idx, (lat, lon, score, br, _reasons, _iso) in enumerate(top, start=1):
feats.append({
"type": "Feature",
"geometry": {"type": "Point", "coordinates": [float(lon), float(lat)]},
"properties": {
"rank": idx,
"score": float(score),
"access": float(br.access),
"demand": float(br.demand),
"competition": float(br.competition),
"is_center": abs(lat - center[0]) < 1e-9 and abs(lon - center[1]) < 1e-9,
}
})
return _fc(feats)
def _isochrones_fc(iso_list: List[dict]) -> Dict[str, Any]:
feats = []
for item in iso_list:
f = item["feature"]
# ya tiene properties {"minutes","area_km2"}
feats.append(f)
return _fc(feats)
def _pois_fc(pois_df, poly: Polygon) -> Dict[str, Any]:
feats = []
if pois_df is None or pois_df.empty:
return _fc(feats)
count = 0
for row in pois_df.itertuples(index=False):
try:
lon = float(row.lon); lat = float(row.lat)
if not poly.contains(Point(lon, lat)):
continue
feats.append({
"type": "Feature",
"geometry": {"type": "Point", "coordinates": [lon, lat]},
"properties": {
"name": getattr(row, "name", None),
"category": getattr(row, "category", None),
}
})
count += 1
if count >= int(os.getenv("MAX_POIS_GEOJSON", "1000")):
break
except Exception:
continue
return _fc(feats)
def _popgrid_fc(grid_df, poly: Polygon) -> Dict[str, Any]:
feats = []
if grid_df is None or grid_df.empty:
return _fc(feats)
# filtra dentro del polígono
inside = []
for row in grid_df.itertuples(index=False):
try:
lon = float(row.lon); lat = float(row.lat); pop = float(row.pop)
if poly.contains(Point(lon, lat)):
inside.append((lon, lat, pop))
except Exception:
continue
if not inside:
return _fc(feats)
# ordena por población desc y limita
inside.sort(key=lambda t: t[2], reverse=True)
cap = int(os.getenv("MAX_POPGRID_GEOJSON", "800"))
inside = inside[:cap]
for lon, lat, pop in inside:
feats.append({
"type": "Feature",
"geometry": {"type": "Point", "coordinates": [lon, lat]},
"properties": {"pop": pop}
})
return _fc(feats)
def _save_run_artifact(search_id: str,
req: SiteSearchRequest,
chosen_center: Tuple[float,float],
top: List[Tuple[float,float,float,ScoreBreakdown,List[str],List[dict]]],
iso_list: List[dict]) -> Optional[str]:
"""
Guarda un JSON con:
- request_summary
- candidates_fc
- isochrones_fc
- pois_competition_fc
- popgrid_fc (muestra)
"""
try:
media_dir = settings.MEDIA_ROOT / "sites"
media_dir.mkdir(parents=True, exist_ok=True)
out_path = media_dir / f"run_{search_id}.json"
# polígono mayor para recortes
largest = max(iso_list, key=lambda x: x["minutes"]) if iso_list else None
poly = _polygon_from_feature(largest["feature"]) if largest else None
prov = get_provider()
try:
pois = prov.denue(req.city, req.business)
except Exception:
pois = None
try:
grid = prov.popgrid(req.city)
except Exception:
grid = None
artifact = {
"version": "sites-artifact-1",
"created_at": datetime.utcnow().isoformat() + "Z",
"request": req.model_dump(),
"center": {"lat": chosen_center[0], "lon": chosen_center[1]},
"candidates_fc": _candidates_fc(chosen_center, top),
"isochrones_fc": _isochrones_fc(iso_list),
"pois_competition_fc": _pois_fc(pois, poly) if poly is not None else _fc([]),
"popgrid_fc": _popgrid_fc(grid, poly) if poly is not None else _fc([]),
}
with open(out_path, "w", encoding="utf-8") as f:
json.dump(artifact, f, ensure_ascii=False)
return str(out_path)
except Exception:
return None
# --------------------------- Estimador principal ---------------------------
def run_site_search(req: SiteSearchRequest) -> SiteSearchResponse:
search_id = str(uuid.uuid4())
warnings: List[str] = []
candidates: List[CandidateSite] = []
map_url: str | None = None
demand_map_url: Optional[str] = None
competition_map_url: Optional[str] = None
w_access = float(os.getenv("WEIGHT_ACCESS", "0.35"))
w_demand = float(os.getenv("WEIGHT_DEMAND", "0.40"))
w_comp = float(os.getenv("WEIGHT_COMP", "0.25"))
if req.center:
center = (float(req.center[0]), float(req.center[1]))
base_iso = _build_isochrones(center, req.time_bands or [])
largest = max(base_iso, key=lambda x: x["minutes"]) if base_iso else None
poly = _polygon_from_feature(largest["feature"]) if largest else None
if poly is None:
access, access_r = _access_from_iso_list(base_iso)
comp, comp_r = _competition_from_pois(req.city, req.business, base_iso)
dem, dem_r = _demand_from_popgrid(req.city, base_iso)
score = w_access * access + w_demand * dem + w_comp * comp
score = float(max(0.0, min(1.0, score)))
breakdown = ScoreBreakdown(demand=dem, competition=comp, access=access)
reasons = (["Access from isochrone areas (normalized avg)"] + access_r +
["Competition from POI density (largest band)"] + comp_r +
["Demand from population grid (largest band)"] + dem_r)
candidates.append(CandidateSite(lat=center[0], lon=center[1], score=score,
breakdown=breakdown, reasons=reasons))
map_url = _save_sites_map(center, base_iso, search_id, req.city, req.business,
[(center[0], center[1], score)])
warnings.append("sampling_fallback_invalid_polygon")
demand_map_url = _render_demand_map(center, base_iso, req.city, search_id, req.business)
competition_map_url = _render_competition_map(center, base_iso, req.city, req.business, search_id)
# artefacto (solo center)
_save_run_artifact(
search_id, req, center,
[(center[0], center[1], score, breakdown, reasons, base_iso)],
base_iso
)
else:
rng = random.Random(int(search_id.replace("-", ""), 16) & 0xFFFFFFFF)
samples = _sample_points_in_polygon(poly, int(req.num_samples), rng)
cand_points: List[Tuple[float, float]] = [center] + samples
scored: List[Tuple[float, float, float, ScoreBreakdown, List[str], List[dict]]] = []
for (lat, lon) in cand_points:
iso_list = _build_isochrones((lat, lon), req.time_bands or [])
access, access_r = _access_from_iso_list(iso_list)
comp, comp_r = _competition_from_pois(req.city, req.business, iso_list)
dem, dem_r = _demand_from_popgrid(req.city, iso_list)
score = w_access * access + w_demand * dem + w_comp * comp
score = float(max(0.0, min(1.0, score)))
breakdown = ScoreBreakdown(demand=dem, competition=comp, access=access)
reasons = (["Access from isochrone areas (normalized avg)"] + access_r +
["Competition from POI density (largest band)"] + comp_r +
["Demand from population grid (largest band)"] + dem_r)
scored.append((lat, lon, score, breakdown, reasons, iso_list))
scored.sort(key=lambda t: t[2], reverse=True)
top = scored[: max(1, int(req.max_candidates))]
for (lat, lon, score, breakdown, reasons, _iso) in top:
candidates.append(CandidateSite(
lat=lat, lon=lon, score=score, breakdown=breakdown, reasons=reasons
))
top1_iso = top[0][5]
top_points = [(lat, lon, score) for (lat, lon, score, *_rest) in top]
map_url = _save_sites_map((top[0][0], top[0][1]), top1_iso, search_id,
req.city, req.business, top_points)
warnings.append("multi_candidate_sampling_ok")
demand_map_url = _render_demand_map((top[0][0], top[0][1]), top1_iso, req.city, search_id, req.business)
competition_map_url = _render_competition_map((top[0][0], top[0][1]), top1_iso, req.city, req.business, search_id)
if demand_map_url: warnings.append("demand_map_saved")
else: warnings.append("demand_map_failed")
if competition_map_url: warnings.append("competition_map_saved")
else: warnings.append("competition_map_failed")
# artefacto (Top-K + isócronas del Top-1)
_save_run_artifact(search_id, req, (top[0][0], top[0][1]), top, top1_iso)
else:
neutral = ScoreBreakdown(demand=0.5, competition=0.5, access=0.5)
for i in range(req.max_candidates):
candidates.append(CandidateSite(
lat=0.0, lon=0.0, score=0.5,
breakdown=neutral,
reasons=[f"stub candidate #{i+1} for {req.business} in {req.city}"],
))
warnings.append("no_center_provided_stub_output")
return SiteSearchResponse(
search_id=search_id,
city=req.city,
business=req.business,
time_bands=req.time_bands,
candidates=candidates,
map_url=map_url,
demand_map_url=demand_map_url,
competition_map_url=competition_map_url,
data_release=req.data_release,
warnings=warnings,
)

3
pxy_sites/tests.py Normal file
View File

@ -0,0 +1,3 @@
from django.test import TestCase
# Create your tests here.

View File

View File

3
pxy_sites/views.py Normal file
View File

@ -0,0 +1,3 @@
from django.shortcuts import render
# Create your views here.

View File

@ -155,3 +155,12 @@ crispy-bootstrap5>=0.6
polyline
# --- science deps for SAMI ---
# --- science deps for SAMI / Sites ---
numpy==1.26.4
pandas==2.2.3
scipy==1.15.3
statsmodels==0.14.5
djangorestframework==3.15.2