46 lines
1.7 KiB
Python
46 lines
1.7 KiB
Python
from langchain_core.messages import HumanMessage, AIMessage, SystemMessage
|
|
from langfuse.langchain import CallbackHandler
|
|
|
|
from .config import REGION
|
|
from .agent_bedrock import create_agent
|
|
from .tools import build_knowledge_base_tool
|
|
|
|
|
|
def main(user_query, history, model="anthropic.claude-sonnet-4-5-20250929-v1:0"):
|
|
"""Main execution function."""
|
|
report_tools = [build_knowledge_base_tool()]
|
|
|
|
|
|
SYSTEM_PROMPT = """Você é um assistente de matrículas para o campus capivari do instituo federal de são paulo, tem acesso a uma tool que acessa uma knowledge base com informações sobre tanto a matricula dos alunos do técnico quanto superior do procedimento iterno, não responda perguntas sobre o meio de ingresso SISU."""
|
|
langfuse_handler = CallbackHandler()
|
|
agent = create_agent(model, REGION, tools=report_tools)
|
|
|
|
initial_state = {
|
|
"messages": [
|
|
SystemMessage(content=SYSTEM_PROMPT),
|
|
HumanMessage(content=user_query),
|
|
],
|
|
"current_step": "init",
|
|
}
|
|
|
|
config = {"callbacks": [langfuse_handler]}
|
|
final_state = agent.invoke(initial_state, config=config)
|
|
|
|
total_input_tokens = 0
|
|
total_output_tokens = 0
|
|
for msg in final_state["messages"]:
|
|
if isinstance(msg, AIMessage) and hasattr(msg, "usage_metadata") and msg.usage_metadata:
|
|
total_input_tokens += msg.usage_metadata.get("input_tokens", 0)
|
|
total_output_tokens += msg.usage_metadata.get("output_tokens", 0)
|
|
return {
|
|
"response": final_state["messages"][-1].content,
|
|
"input_tokens": total_input_tokens,
|
|
"output_tokens": total_output_tokens,
|
|
"total_tokens": total_input_tokens + total_output_tokens,
|
|
}
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main(
|
|
)
|