159 lines
5.7 KiB
Python
159 lines
5.7 KiB
Python
import json
|
|
import logging
|
|
from telegram import Update, Bot
|
|
from django.http import JsonResponse
|
|
from django.views.decorators.csrf import csrf_exempt
|
|
from asgiref.sync import sync_to_async
|
|
from .models import TelegramBot
|
|
from pxy_langchain.services import LangchainAIService
|
|
from .handlers import (
|
|
start, help_command, handle_location,
|
|
next_truck, report_trash, private_pickup, green_balance,
|
|
next_route, complete_stop, missed_stop, city_eco_score,
|
|
available_jobs, accept_job, next_pickup, complete_pickup, private_eco_score
|
|
)
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
# -------------------------------
|
|
# 🛠 Modular local handlers inside views.py
|
|
# -------------------------------
|
|
|
|
async def handle_location_message(update):
|
|
if update.message.location:
|
|
await handle_location(update)
|
|
return True
|
|
return False
|
|
|
|
async def handle_voice_or_general_message(update):
|
|
if update.message.voice:
|
|
file_id = update.message.voice.file_id
|
|
await update.message.reply_text(
|
|
f"🎙 Recibí tu mensaje de voz con ID {file_id}. Pronto calcularé tu CO₂."
|
|
)
|
|
return True
|
|
return False
|
|
|
|
|
|
async def dispatch_citizen_commands(update, text):
|
|
if text == "/start":
|
|
await start(update)
|
|
elif text == "/help":
|
|
await help_command(update)
|
|
elif text == "/next_truck":
|
|
await next_truck(update)
|
|
elif text == "/report_trash":
|
|
await report_trash(update)
|
|
elif text == "/private_pickup":
|
|
await private_pickup(update)
|
|
elif text == "/green_balance":
|
|
await green_balance(update)
|
|
else:
|
|
return False
|
|
return True
|
|
|
|
|
|
async def dispatch_city_commands(update, text):
|
|
if text == "/start":
|
|
await start(update)
|
|
elif text == "/help":
|
|
await help_command(update)
|
|
elif text == "/next_route":
|
|
await next_route(update)
|
|
elif text == "/complete_stop":
|
|
await complete_stop(update)
|
|
elif text == "/missed_stop":
|
|
await missed_stop(update)
|
|
elif text == "/my_eco_score":
|
|
await city_eco_score(update)
|
|
else:
|
|
return False
|
|
return True
|
|
|
|
|
|
async def dispatch_private_commands(update, text):
|
|
if text == "/start":
|
|
await start(update)
|
|
elif text == "/help":
|
|
await help_command(update)
|
|
elif text == "/available_jobs":
|
|
await available_jobs(update)
|
|
elif text.startswith("/accept_job"):
|
|
await accept_job(update)
|
|
elif text == "/next_pickup":
|
|
await next_pickup(update)
|
|
elif text == "/complete_pickup":
|
|
await complete_pickup(update)
|
|
elif text == "/my_eco_score":
|
|
await private_eco_score(update)
|
|
else:
|
|
return False
|
|
return True
|
|
|
|
|
|
# -------------------------------
|
|
# 🌐 Main webhook
|
|
# -------------------------------
|
|
|
|
@csrf_exempt
|
|
async def telegram_webhook(request, bot_name):
|
|
try:
|
|
logger.info(f"Webhook called for bot: {bot_name}")
|
|
|
|
try:
|
|
bot_instance = await sync_to_async(TelegramBot.objects.get)(name=bot_name, is_active=True)
|
|
logger.info(f"Loaded bot configuration: {bot_instance}")
|
|
except TelegramBot.DoesNotExist:
|
|
logger.error(f"Bot '{bot_name}' not found or inactive.")
|
|
return JsonResponse({"error": f"Bot '{bot_name}' not found."}, status=400)
|
|
|
|
if not bot_instance.assistant:
|
|
logger.error(f"No assistant configured for bot '{bot_name}'.")
|
|
return JsonResponse({"error": "Assistant not configured."}, status=400)
|
|
|
|
if request.method == "POST":
|
|
try:
|
|
request_body = json.loads(request.body.decode("utf-8"))
|
|
update = Update.de_json(request_body, Bot(token=bot_instance.token))
|
|
logger.info(f"Update received: {update}")
|
|
except json.JSONDecodeError as e:
|
|
logger.error(f"Failed to decode JSON: {e}")
|
|
return JsonResponse({"error": "Invalid JSON payload"}, status=400)
|
|
|
|
if update.message:
|
|
text = update.message.text or ""
|
|
|
|
# Handle location always first
|
|
if await handle_location_message(update):
|
|
return JsonResponse({"status": "ok"})
|
|
|
|
# Handle voice or general simple responses next
|
|
if await handle_voice_or_general_message(update):
|
|
return JsonResponse({"status": "ok"})
|
|
|
|
# Then dispatch commands per bot
|
|
if bot_name == "PepeBasuritaCoinsBot":
|
|
if await dispatch_citizen_commands(update, text):
|
|
return JsonResponse({"status": "ok"})
|
|
elif bot_name == "PepeCamioncitoBot":
|
|
if await dispatch_city_commands(update, text):
|
|
return JsonResponse({"status": "ok"})
|
|
elif bot_name == "PepeMotitoBot":
|
|
if await dispatch_private_commands(update, text):
|
|
return JsonResponse({"status": "ok"})
|
|
|
|
# Otherwise fallback to LLM
|
|
assistant_instance = await sync_to_async(LangchainAIService)(bot_instance.assistant)
|
|
bot_response = await sync_to_async(assistant_instance.generate_response)(text)
|
|
await update.message.reply_text(bot_response)
|
|
|
|
return JsonResponse({"status": "ok"})
|
|
|
|
logger.warning("Received non-POST request")
|
|
return JsonResponse({"error": "Invalid request method"}, status=400)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error in webhook: {e}")
|
|
return JsonResponse({"error": f"Unexpected error: {str(e)}"}, status=500)
|