minions-ai-agents/src/app.py

69 lines
2.9 KiB
Python

import os
# Force disable tracing to prevent timeout in Docker
os.environ["CREWAI_TRACING_ENABLED"] = "false"
import chainlit as cl
from src.crews.definitions import CrewDefinitions
from src.router import SmartRouter
@cl.on_chat_start
async def on_chat_start():
# Welcome message without emojis, more natural
await cl.Message(content="**Antigravity Brain Online**\n\nEstou pronto para ajudar. Pode me dizer o que precisa? Por exemplo: *'Verificar a saúde do servidor'* ou *'Criar um novo agente'*.").send()
cl.user_session.set("selected_crew", None)
@cl.on_message
async def on_message(message: cl.Message):
user_input = message.content
# 1. Check for commands
if user_input.strip() == "/reset":
cl.user_session.set("selected_crew", None)
await cl.Message(content="Sessão reiniciada. Vou reavaliar a melhor equipe para sua próxima solicitação.").send()
return
# 2. Determine Crew
current_crew = cl.user_session.get("selected_crew")
# If no crew selected, OR if the input strongly suggests a switch (naive check, improved by router logic later if we want sticky sessions but smart switching)
# For now: Sticky session. Once routed, stays routed until /reset.
# PRO: Better context. CON: User asks 'Fix server' then 'Write poem' -> Infra tries to write poem.
# Let's try "Auto-Detect on Every Turn" IF the context switch is obvious?
# No, that's risky. Let's stick to: Route First -> Sticky -> User can /reset.
if not current_crew:
# Show loading indicator while routing
msg_routing = cl.Message(content="Analisando sua solicitação...")
await msg_routing.send()
current_crew = SmartRouter.route(user_input)
cl.user_session.set("selected_crew", current_crew)
await msg_routing.update()
await cl.Message(content=f"**Direcionando para:** {current_crew}").send()
# 3. Execution with proper loading indicator
# Create a message that will show loading state and be updated with the result
msg = cl.Message(content=f"**{current_crew}** está processando sua solicitação...")
await msg.send()
try:
# Assemble Crew
crew = CrewDefinitions.assemble_crew(current_crew, inputs={"topic": user_input})
# Run
# Note: In a real async production app, we'd offload this to a thread pool properly.
result = crew.kickoff(inputs={"topic": user_input})
final_answer = str(result)
# Update message with final result
msg.content = f"### Relatório de {current_crew}\n\n{final_answer}"
await msg.update()
except Exception as e:
msg.content = f"**Erro ao processar solicitação:** {str(e)}"
await msg.update()
# Reset crew on failure so user can try again or get re-routed
cl.user_session.set("selected_crew", None)