LLM answer first before the commands
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
Ekaropolus 2025-07-06 02:30:40 -06:00
parent dc221eab29
commit 52e0f46364

View File

@ -12,15 +12,13 @@ from .handlers import (
next_route, complete_stop, missed_stop, city_eco_score,
available_jobs, accept_job, next_pickup, complete_pickup, private_eco_score
)
logger = logging.getLogger(__name__)
@csrf_exempt
async def telegram_webhook(request, bot_name):
"""
Webhook view that routes each bot to its own set of handlers based on bot_name.
- 'pepebasurita': bot para ciudadanos
- 'pepecamioncito': bot para recolectores municipales
- 'pepemotito': bot para recolectores privados
"""
try:
logger.info(f"Webhook called for bot: {bot_name}")
@ -48,58 +46,64 @@ async def telegram_webhook(request, bot_name):
if update.message:
text = update.message.text or ""
# Si hay localización primero
if update.message.location:
await handle_location(update)
return JsonResponse({"status": "ok"})
# Primero procesar con LLM para cualquier texto
assistant_instance = await sync_to_async(LangchainAIService)(bot_instance.assistant)
bot_response = await sync_to_async(assistant_instance.generate_response)(text)
# 🚀 Citizen bot
if bot_name == "PepeBasuritaCoinsBot":
if text == "/start":
await start(update)
await start(update); return JsonResponse({"status": "ok"})
elif text == "/help":
await help_command(update)
await help_command(update); return JsonResponse({"status": "ok"})
elif text == "/next_truck":
await next_truck(update)
await next_truck(update); return JsonResponse({"status": "ok"})
elif text == "/report_trash":
await report_trash(update)
await report_trash(update); return JsonResponse({"status": "ok"})
elif text == "/private_pickup":
await private_pickup(update)
await private_pickup(update); return JsonResponse({"status": "ok"})
elif text == "/green_balance":
await green_balance(update)
elif update.message.location:
await handle_location(update)
else:
assistant_instance = await sync_to_async(LangchainAIService)(bot_instance.assistant)
bot_response = await sync_to_async(assistant_instance.generate_response)(text)
await update.message.reply_text(bot_response)
await green_balance(update); return JsonResponse({"status": "ok"})
# 🚚 City collector bot
elif bot_name == "PepeCamioncitoBot":
if text == "/start":
await start(update)
await start(update); return JsonResponse({"status": "ok"})
elif text == "/help":
await help_command(update)
await help_command(update); return JsonResponse({"status": "ok"})
elif text == "/next_route":
await next_route(update)
await next_route(update); return JsonResponse({"status": "ok"})
elif text == "/complete_stop":
await complete_stop(update)
await complete_stop(update); return JsonResponse({"status": "ok"})
elif text == "/missed_stop":
await missed_stop(update)
await missed_stop(update); return JsonResponse({"status": "ok"})
elif text == "/my_eco_score":
await city_eco_score(update)
await city_eco_score(update); return JsonResponse({"status": "ok"})
# 🚛 Private collector bot
elif bot_name == "PepeMotitoBot":
if text == "/start":
await start(update)
await start(update); return JsonResponse({"status": "ok"})
elif text == "/help":
await help_command(update)
await help_command(update); return JsonResponse({"status": "ok"})
elif text == "/available_jobs":
await available_jobs(update)
await available_jobs(update); return JsonResponse({"status": "ok"})
elif text.startswith("/accept_job"):
await accept_job(update)
await accept_job(update); return JsonResponse({"status": "ok"})
elif text == "/next_pickup":
await next_pickup(update)
await next_pickup(update); return JsonResponse({"status": "ok"})
elif text == "/complete_pickup":
await complete_pickup(update)
await complete_pickup(update); return JsonResponse({"status": "ok"})
elif text == "/my_eco_score":
await private_eco_score(update)
await private_eco_score(update); return JsonResponse({"status": "ok"})
# Si no fue comando, respondemos con LLM
await update.message.reply_text(bot_response)
return JsonResponse({"status": "ok"})