Setting up bot response using llm
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
Ekaropolus 2025-07-07 10:54:36 -06:00
parent 7d948fddad
commit 41fdd96225

View File

@ -139,9 +139,9 @@ async def telegram_webhook(request, bot_name):
"No pude entender tu mensaje de voz. Intenta de nuevo." "No pude entender tu mensaje de voz. Intenta de nuevo."
) )
return JsonResponse({"status": "ok"}) return JsonResponse({"status": "ok"})
# setattr(update.message, "_text", transcript) assistant_instance = await sync_to_async(LangchainAIService)(bot_instance.assistant)
await update.message.reply_text(transcript) bot_response = await sync_to_async(assistant_instance.generate_response)(transcript)
# await report_trash(update) await update.message.reply_text(bot_response)
return JsonResponse({"status": "ok"}) return JsonResponse({"status": "ok"})
# 3) Comandos de texto # 3) Comandos de texto