116 lines
5.7 KiB
Python
116 lines
5.7 KiB
Python
import json
|
|
import logging
|
|
from telegram import Update, Bot
|
|
from django.http import JsonResponse
|
|
from django.views.decorators.csrf import csrf_exempt
|
|
from asgiref.sync import sync_to_async
|
|
from .models import TelegramBot
|
|
from pxy_langchain.services import LangchainAIService
|
|
from .handlers import (
|
|
start, help_command, handle_location,
|
|
next_truck, report_trash, private_pickup, green_balance,
|
|
next_route, complete_stop, missed_stop, city_eco_score,
|
|
available_jobs, accept_job, next_pickup, complete_pickup, private_eco_score
|
|
)
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
@csrf_exempt
|
|
async def telegram_webhook(request, bot_name):
|
|
"""
|
|
Webhook view that routes each bot to its own set of handlers based on bot_name.
|
|
"""
|
|
try:
|
|
logger.info(f"Webhook called for bot: {bot_name}")
|
|
|
|
try:
|
|
bot_instance = await sync_to_async(TelegramBot.objects.get)(name=bot_name, is_active=True)
|
|
logger.info(f"Loaded bot configuration: {bot_instance}")
|
|
except TelegramBot.DoesNotExist:
|
|
logger.error(f"Bot '{bot_name}' not found or inactive.")
|
|
return JsonResponse({"error": f"Bot '{bot_name}' not found."}, status=400)
|
|
|
|
if not bot_instance.assistant:
|
|
logger.error(f"No assistant configured for bot '{bot_name}'.")
|
|
return JsonResponse({"error": "Assistant not configured."}, status=400)
|
|
|
|
if request.method == "POST":
|
|
try:
|
|
request_body = json.loads(request.body.decode("utf-8"))
|
|
update = Update.de_json(request_body, Bot(token=bot_instance.token))
|
|
logger.info(f"Update received: {update}")
|
|
except json.JSONDecodeError as e:
|
|
logger.error(f"Failed to decode JSON: {e}")
|
|
return JsonResponse({"error": "Invalid JSON payload"}, status=400)
|
|
|
|
if update.message:
|
|
text = update.message.text or ""
|
|
|
|
# Si hay localización primero
|
|
if update.message.location:
|
|
await handle_location(update)
|
|
return JsonResponse({"status": "ok"})
|
|
|
|
# Primero procesar con LLM para cualquier texto
|
|
assistant_instance = await sync_to_async(LangchainAIService)(bot_instance.assistant)
|
|
bot_response = await sync_to_async(assistant_instance.generate_response)(text)
|
|
|
|
# 🚀 Citizen bot
|
|
if bot_name == "PepeBasuritaCoinsBot":
|
|
if text == "/start":
|
|
await start(update); return JsonResponse({"status": "ok"})
|
|
elif text == "/help":
|
|
await help_command(update); return JsonResponse({"status": "ok"})
|
|
elif text == "/next_truck":
|
|
await next_truck(update); return JsonResponse({"status": "ok"})
|
|
elif text == "/report_trash":
|
|
await report_trash(update); return JsonResponse({"status": "ok"})
|
|
elif text == "/private_pickup":
|
|
await private_pickup(update); return JsonResponse({"status": "ok"})
|
|
elif text == "/green_balance":
|
|
await green_balance(update); return JsonResponse({"status": "ok"})
|
|
|
|
# 🚚 City collector bot
|
|
elif bot_name == "PepeCamioncitoBot":
|
|
if text == "/start":
|
|
await start(update); return JsonResponse({"status": "ok"})
|
|
elif text == "/help":
|
|
await help_command(update); return JsonResponse({"status": "ok"})
|
|
elif text == "/next_route":
|
|
await next_route(update); return JsonResponse({"status": "ok"})
|
|
elif text == "/complete_stop":
|
|
await complete_stop(update); return JsonResponse({"status": "ok"})
|
|
elif text == "/missed_stop":
|
|
await missed_stop(update); return JsonResponse({"status": "ok"})
|
|
elif text == "/my_eco_score":
|
|
await city_eco_score(update); return JsonResponse({"status": "ok"})
|
|
|
|
# 🚛 Private collector bot
|
|
elif bot_name == "PepeMotitoBot":
|
|
if text == "/start":
|
|
await start(update); return JsonResponse({"status": "ok"})
|
|
elif text == "/help":
|
|
await help_command(update); return JsonResponse({"status": "ok"})
|
|
elif text == "/available_jobs":
|
|
await available_jobs(update); return JsonResponse({"status": "ok"})
|
|
elif text.startswith("/accept_job"):
|
|
await accept_job(update); return JsonResponse({"status": "ok"})
|
|
elif text == "/next_pickup":
|
|
await next_pickup(update); return JsonResponse({"status": "ok"})
|
|
elif text == "/complete_pickup":
|
|
await complete_pickup(update); return JsonResponse({"status": "ok"})
|
|
elif text == "/my_eco_score":
|
|
await private_eco_score(update); return JsonResponse({"status": "ok"})
|
|
|
|
# Si no fue comando, respondemos con LLM
|
|
await update.message.reply_text(bot_response)
|
|
|
|
return JsonResponse({"status": "ok"})
|
|
|
|
logger.warning("Received non-POST request")
|
|
return JsonResponse({"error": "Invalid request method"}, status=400)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error in webhook: {e}")
|
|
return JsonResponse({"error": f"Unexpected error: {str(e)}"}, status=500)
|