Feat: Final changes to prompt
This commit is contained in:
@@ -7,7 +7,7 @@ COPY requirements.txt ${LAMBDA_TASK_ROOT}
|
|||||||
RUN pip install -r requirements.txt
|
RUN pip install -r requirements.txt
|
||||||
|
|
||||||
# Copy function code
|
# Copy function code
|
||||||
COPY agent.py ${LAMBDA_TASK_ROOT}
|
COPY ./ ${LAMBDA_TASK_ROOT}
|
||||||
|
|
||||||
# Set the CMD to your handler (could also be done as a parameter override outside of the Dockerfile)
|
# Set the CMD to your handler (could also be done as a parameter override outside of the Dockerfile)
|
||||||
CMD ["agent.hello" ]
|
CMD ["agent.agent_call"]
|
||||||
@@ -1,37 +1,22 @@
|
|||||||
import json
|
import json
|
||||||
from langchain_core.tools import tool
|
|
||||||
from langchain.agents.output_parsers import ReActJsonSingleInputOutputParser
|
|
||||||
from langchain_core.prompts import ChatPromptTemplate,MessagesPlaceholder, PromptTemplate
|
|
||||||
from langchain_core.messages import HumanMessage,AIMessage
|
|
||||||
from langchain_core.tools import render_text_description
|
|
||||||
from langchain.chains import create_history_aware_retriever
|
|
||||||
from langchain.chains.combine_documents import create_stuff_documents_chain
|
|
||||||
import langchain.chains
|
|
||||||
from langchain.chains import create_history_aware_retriever
|
|
||||||
from langchain.chains.combine_documents import create_stuff_documents_chain
|
|
||||||
from langchain.agents.format_scratchpad import format_log_to_str
|
|
||||||
from langchain.chains import create_retrieval_chain
|
|
||||||
from langchain_aws import ChatBedrock
|
from langchain_aws import ChatBedrock
|
||||||
from langchain_aws.retrievers import AmazonKnowledgeBasesRetriever
|
from langchain_aws.retrievers import AmazonKnowledgeBasesRetriever
|
||||||
from langchain.chains import ConversationalRetrievalChain
|
|
||||||
from typing import Union
|
|
||||||
from langchain_core.agents import AgentAction, AgentFinish
|
|
||||||
from langchain.agents.output_parsers import ReActSingleInputOutputParser
|
|
||||||
from langchain.tools import Tool
|
|
||||||
from langgraph.checkpoint.memory import MemorySaver
|
from langgraph.checkpoint.memory import MemorySaver
|
||||||
from langgraph.prebuilt import create_react_agent
|
from langgraph.prebuilt import create_react_agent
|
||||||
import os
|
import os
|
||||||
|
from langfuse import Langfuse
|
||||||
def find_tool_by_name(tools: list[Tool],tool_name:str):
|
from langfuse.langchain import CallbackHandler
|
||||||
for tool in tools:
|
from tools import secrets,dynamo
|
||||||
if tool.name==tool_name:
|
import time
|
||||||
print(tool.name)
|
langfuse = Langfuse(
|
||||||
print("\n\n")
|
public_key=json.loads(secrets.get_secret())['api-langfuse-public'],
|
||||||
return tool
|
secret_key=json.loads(secrets.get_secret())['api-langfuse-secret'],
|
||||||
raise ValueError(f"Tool with name {tool_name} not found")
|
host="http://107.20.48.139:3000"
|
||||||
def hello(event,context):
|
)
|
||||||
|
langfuse_handler = CallbackHandler()
|
||||||
|
def agent_call(event,context):
|
||||||
llm = ChatBedrock(
|
llm = ChatBedrock(
|
||||||
model_id="arn:aws:bedrock:us-east-1:654654422992:application-inference-profile/glvyppv11ds5",
|
model_id="us.anthropic.claude-sonnet-4-20250514-v1:0",
|
||||||
region_name="us-east-1",
|
region_name="us-east-1",
|
||||||
#aws_access_key_id=os.environ["AWS_ACCESS_KEY_ID"],
|
#aws_access_key_id=os.environ["AWS_ACCESS_KEY_ID"],
|
||||||
#aws_secret_access_key=os.environ["AWS_SECRET_ACCESS_KEY"],
|
#aws_secret_access_key=os.environ["AWS_SECRET_ACCESS_KEY"],
|
||||||
@@ -45,6 +30,10 @@ def hello(event,context):
|
|||||||
retrieval_config={"vectorSearchConfiguration": {"numberOfResults": 4}},
|
retrieval_config={"vectorSearchConfiguration": {"numberOfResults": 4}},
|
||||||
|
|
||||||
)
|
)
|
||||||
|
if event['chat_history']==[]:
|
||||||
|
history=dynamo.read_memory(event['username'])
|
||||||
|
else:
|
||||||
|
history=event['chat_history']
|
||||||
memory = MemorySaver()
|
memory = MemorySaver()
|
||||||
model = llm
|
model = llm
|
||||||
tools = [retriever.as_tool()]
|
tools = [retriever.as_tool()]
|
||||||
@@ -53,18 +42,47 @@ def hello(event,context):
|
|||||||
Act like a human in Portuguese Brasil.
|
Act like a human in Portuguese Brasil.
|
||||||
You are a assistant for students from diferent school campuses of the Instituto Federal de São Paulo.
|
You are a assistant for students from diferent school campuses of the Instituto Federal de São Paulo.
|
||||||
Your objective is to answer diferent questions about editais de bolsas, which can be acessed by the AmazonKnowledgeBaseRetriever tool.
|
Your objective is to answer diferent questions about editais de bolsas, which can be acessed by the AmazonKnowledgeBaseRetriever tool.
|
||||||
The editais change based on the student's campus, if he is a superior or médio student, and if he is already receiving any bolsa or not. Get these informations from the user, only prompting one per message to the user, before ansewering any questions.
|
The editais change based on the student's campus, if he is a superior or médio student, and if he is already receiving any type of "Bolsa PAP", not including "Iniciação Cientifica" or "bolsa de extensão" . Get these informations from the user, only prompting one per message to the user, before ansewering any questions.
|
||||||
|
The criteria for every type of "bolsa" is different, look at the editals to check. Been a student, who doesnt have a bolsa for any campi is not enough for any bolsa, so check for the extra conditions.
|
||||||
Retrieve the answer from the most recent edital that contemplates this type of student.
|
Retrieve the answer from the most recent edital that contemplates this type of student.
|
||||||
The chat history will be given, without the previous retrieved editais info.
|
The chat history will be given, without the previous retrieved editais info.
|
||||||
If there are info or context missing ask the user before proceding with the document retrie
|
If there are info or context missing ask the user before proceding with the document retrie
|
||||||
Also return the title of the source document, with edital number and year, not the file name.val.
|
Also return the title of the source document, with edital number and year, not the file name.val.
|
||||||
Document with MTO are from "Campus Matão" and SPO from "Campus São Paulo".
|
Document with MTO are from "Campus Matão", SPO from "Campus São Paulo" and CBT from "Campus Cubatão".
|
||||||
|
If you are prompted after a campus that is not Matão, São Paulo or Cubatão, answer that you can only work with one of these.
|
||||||
Don't get information from diferent campus under no circunstance.
|
Don't get information from diferent campus under no circunstance.
|
||||||
You cant evaluate if someone can receive a bolsa, only get the info from docs and send them to the procedure in the document.
|
You cant evaluate if someone can receive a bolsa, only get the info from docs and send them to the procedure in the document.
|
||||||
|
If the prompt is too generic like or too short, like "sim" or "quero bolsa", ask for more context before retrieving from the documents.
|
||||||
|
If the user prompt you about wanting to get or to know more about a bolsa pap, ask wich of them (saúde,moradia,transporte,creche didático-pedagócio) they want to know more.
|
||||||
<\rules>
|
<\rules>
|
||||||
<glossary>
|
<glossary>
|
||||||
"não registrado" means trabalhador informal.
|
"não registrado" means trabalhador informal.
|
||||||
|
"banco inter" means "banco intermedium", if a user asks about inter, include in the answer this info.
|
||||||
<\glossary>
|
<\glossary>
|
||||||
|
<chain_of_thought>
|
||||||
|
Use these questions as examples:
|
||||||
|
Quando a minha bolsa vai cair?
|
||||||
|
Resposta: Devido a complexidade da estrtura em volta do pagamento das bolsas, não existe um dia fixo, ele tende a variar dependendo do mês.
|
||||||
|
|
||||||
|
Não tenho conta bancária em meu nome, pode ser no nome do meu responsável?
|
||||||
|
Resposta: Não! A conta deve ser aberta no nome do estudante e pode ser em bancos digitais sem que seja necessário sair de casa, fazendo tudo pela internet!
|
||||||
|
|
||||||
|
Posso enviar a Documentação solicitada no Edital por e-mail?
|
||||||
|
Resposta: Não! Documentos enviados por e-mail não serão considerados para análise.
|
||||||
|
|
||||||
|
A comprovação de renda deve ser referente ao qual mês?
|
||||||
|
Resposta: agosto que é o mês mais recente em relação a inscrição.
|
||||||
|
|
||||||
|
Não tenho a Carteira de Trabalho impressa. Serve a digital?
|
||||||
|
Resposta: Sim!
|
||||||
|
|
||||||
|
Na carteira de trabalho consta o salário. Ela substitui o contracheque?
|
||||||
|
Resposta: Não, pois somente no contracheque encontramos o valor da renda BRUTA.
|
||||||
|
|
||||||
|
Trabalhador informal e trabalhador autônomo são sinônimos?
|
||||||
|
Resposta: Não! Trabalhadores informais são os que trabalham por conta própria, NÃO tem CNPJ e geralmente não pagam o carnê do INSS, não tendo nenhum direito trabalhista. Diferente deste, o profissional autônomo geralmente é especializado em algum segmento do mercado e atua por conta própria, geralmente sendo MEI – Micro Empreendedores Individuais e tendo CNPJ.
|
||||||
|
|
||||||
|
<\chain_of_thought>
|
||||||
<general_info>
|
<general_info>
|
||||||
Tipos de bolsa:
|
Tipos de bolsa:
|
||||||
Auxílio Alimentação: É um auxílio financeiro que tem por objetivo garantir e disponibilizar condições de que o estudante usufrua de ao menos uma refeição por dia. É pago mensalmente, e caso o aluno não frequente as aulas todos os dias ele poderá ser oferecido de forma parcial, calculado pela quantidade de dias que o aluno frequenta a aula.
|
Auxílio Alimentação: É um auxílio financeiro que tem por objetivo garantir e disponibilizar condições de que o estudante usufrua de ao menos uma refeição por dia. É pago mensalmente, e caso o aluno não frequente as aulas todos os dias ele poderá ser oferecido de forma parcial, calculado pela quantidade de dias que o aluno frequenta a aula.
|
||||||
@@ -99,13 +117,16 @@ Answer the following questions as best you can. You have access to the following
|
|||||||
|
|
||||||
{tools}
|
{tools}
|
||||||
|
|
||||||
Chat History:"""+str(event["chat_history"])
|
Chat History:"""+str(history)
|
||||||
|
|
||||||
agent_executor = create_react_agent(model, tools, checkpointer=memory, prompt=prompt)
|
agent_executor = create_react_agent(model, tools, checkpointer=memory, prompt=prompt)
|
||||||
config = {"configurable": {"thread_id": "abc123"}}
|
config = {"configurable": {"thread_id": "abc123"},"callbacks": [langfuse_handler]}
|
||||||
input_message = event["message"]
|
input_message = event["message"]
|
||||||
#input_message=[{"role":"user","content":"aluno superior, nunca recebi auxilio, campus são paulo, Meu pai não é registrado, como faço para ganhar auxilio?"}]
|
dict=input_message[0]
|
||||||
response=""
|
response=""
|
||||||
for step in agent_executor.stream({"messages": input_message}, config, stream_mode="values"):
|
for step in agent_executor.stream({"messages": input_message}, config, stream_mode="values"):
|
||||||
response={"json":(step["messages"][-1].text())}
|
response={"json":(step["messages"][-1].text())}
|
||||||
|
response['dynamo_reponse']=dynamo.write_memory(event['username'],int(time.time()),dict['role'],dict['content'])
|
||||||
|
#response['chat_history']=history
|
||||||
|
response['chat_history']=[]
|
||||||
return (response)
|
return (response)
|
||||||
@@ -1,74 +0,0 @@
|
|||||||
import boto3
|
|
||||||
from langchain_core.prompts import ChatPromptTemplate,MessagesPlaceholder
|
|
||||||
from langchain_core.messages import HumanMessage,AIMessage
|
|
||||||
from langchain.chains import create_history_aware_retriever
|
|
||||||
from langchain.chains.combine_documents import create_stuff_documents_chain
|
|
||||||
import langchain.chains
|
|
||||||
from langchain.chains import create_retrieval_chain
|
|
||||||
from langchain_aws import ChatBedrock
|
|
||||||
from langchain_aws.retrievers import AmazonKnowledgeBasesRetriever
|
|
||||||
from langchain.chains import ConversationalRetrievalChain
|
|
||||||
from dotenv import load_dotenv
|
|
||||||
load_dotenv()
|
|
||||||
import os
|
|
||||||
llm = ChatBedrock(
|
|
||||||
model_id="arn:aws:bedrock:us-east-1:654654422992:application-inference-profile/d9blf0g3fzqz",
|
|
||||||
region_name="us-east-1",
|
|
||||||
aws_access_key_id=os.environ["AWS_ACCESS_KEY_ID"],
|
|
||||||
aws_secret_access_key=os.environ["AWS_SECRET_ACCESS_KEY"],
|
|
||||||
aws_session_token=os.environ["AWS_SESSION_TOKEN"],
|
|
||||||
model_kwargs={"temperature": 0.2, 'max_tokens': 1000,},
|
|
||||||
provider='anthropic'
|
|
||||||
)
|
|
||||||
# Cria o prompt de busca
|
|
||||||
prompt_search_query = ChatPromptTemplate.from_messages([
|
|
||||||
MessagesPlaceholder(variable_name="chat_history"),
|
|
||||||
("user", "{input}"),
|
|
||||||
("assistant", "-"),
|
|
||||||
("user", "Given the above conversation, generate a search query to look up to get information relevant to the conversation")
|
|
||||||
])
|
|
||||||
|
|
||||||
# Cria o prompt de resposta
|
|
||||||
prompt_get_answer = ChatPromptTemplate.from_messages([
|
|
||||||
("system", "Answer the user's questions based on the below context:\n\n{context}"),
|
|
||||||
MessagesPlaceholder(variable_name="chat_history"),
|
|
||||||
("user", "{input}")
|
|
||||||
])
|
|
||||||
|
|
||||||
# Conecta ao Qdrant
|
|
||||||
retriever = AmazonKnowledgeBasesRetriever(
|
|
||||||
knowledge_base_id="RBD9TI5HYU",
|
|
||||||
region_name="us-east-1",
|
|
||||||
retrieval_config={"vectorSearchConfiguration": {"numberOfResults": 4}},
|
|
||||||
|
|
||||||
)
|
|
||||||
# Cria o retriever com histórico
|
|
||||||
retriever_chain = create_history_aware_retriever(llm, retriever, prompt_search_query)
|
|
||||||
|
|
||||||
# Cria o documento chain
|
|
||||||
document_chain = create_stuff_documents_chain(llm, prompt_get_answer)
|
|
||||||
|
|
||||||
# Cria o retrieval chain
|
|
||||||
retrieval_chain= create_retrieval_chain(
|
|
||||||
retriever_chain,document_chain
|
|
||||||
)
|
|
||||||
|
|
||||||
def chat_with_bot(user_input, chat_history):
|
|
||||||
"""
|
|
||||||
Função para interagir com o chatbot.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
user_input (str): A entrada do usuário.
|
|
||||||
chat_history (list): O histórico da conversa, incluindo mensagens do usuário e do assistente.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: A resposta do chatbot.
|
|
||||||
"""
|
|
||||||
# Chama o chain de recuperação com o histórico e a entrada do usuário
|
|
||||||
response = retrieval_chain.invoke({
|
|
||||||
"chat_history": chat_history,
|
|
||||||
"input": user_input,
|
|
||||||
})
|
|
||||||
# Retorna a resposta do assistente
|
|
||||||
return response['answer']
|
|
||||||
print(chat_with_bot("Quanto é o auxilio?",chat_history=[{"role":"user","content":"Hello"}]))
|
|
||||||
1470
agent/poetry.lock
generated
1470
agent/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,20 +0,0 @@
|
|||||||
[tool.poetry]
|
|
||||||
name = "agent"
|
|
||||||
version = "0.1.0"
|
|
||||||
description = "Agente edital IFSP"
|
|
||||||
authors = ["Lucas DNX"]
|
|
||||||
readme = "README.md"
|
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
|
||||||
python = "^3.12"
|
|
||||||
langchain-core = "^0.3.75"
|
|
||||||
langchain = "^0.3.27"
|
|
||||||
boto3 = "^1.40.19"
|
|
||||||
langchain-aws = "^0.2.31"
|
|
||||||
dotenv = "^0.9.9"
|
|
||||||
langgraph = "^0.6.7"
|
|
||||||
|
|
||||||
|
|
||||||
[build-system]
|
|
||||||
requires = ["poetry-core"]
|
|
||||||
build-backend = "poetry.core.masonry.api"
|
|
||||||
@@ -2,4 +2,5 @@ langchain_core
|
|||||||
langchain
|
langchain
|
||||||
langchain_aws
|
langchain_aws
|
||||||
langgraph
|
langgraph
|
||||||
|
langfuse
|
||||||
|
boto3
|
||||||
41
agent/tools/dynamo.py
Normal file
41
agent/tools/dynamo.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import boto3
|
||||||
|
def write_memory(user,timestamp,role,content):
|
||||||
|
dynamodb = boto3.resource('dynamodb')
|
||||||
|
|
||||||
|
table = dynamodb.Table('br-edu-ifsp-ifsp-ret-memoria-tabela-chatbot-editais-dev') # Replace 'YourTableName' with your actual table name
|
||||||
|
|
||||||
|
item_data = {
|
||||||
|
'UserId': user, # Replace with your partition key attribute and value
|
||||||
|
'Timestamp': timestamp, # Replace with your sort key attribute and value (if applicable)
|
||||||
|
'role': role,
|
||||||
|
'content': content
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = table.put_item(Item=item_data)
|
||||||
|
return response
|
||||||
|
except Exception as e:
|
||||||
|
return "Error adding item:"+str(e)
|
||||||
|
def read_memory(userid):
|
||||||
|
dynamodb = boto3.resource('dynamodb')
|
||||||
|
table = dynamodb.Table('br-edu-ifsp-ifsp-ret-memoria-tabela-chatbot-editais-dev')
|
||||||
|
|
||||||
|
# Query parameters
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = table.query(
|
||||||
|
KeyConditionExpression=boto3.dynamodb.conditions.Key('UserId').eq(userid),
|
||||||
|
ScanIndexForward=False, # Descending order
|
||||||
|
Limit=30 # Get only the latest item
|
||||||
|
)
|
||||||
|
|
||||||
|
items = response.get('Items', [])
|
||||||
|
if items:
|
||||||
|
latest_items = items
|
||||||
|
return latest_items
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print("Error querying DynamoDB:", str(e))
|
||||||
|
|
||||||
25
agent/tools/secrets.py
Normal file
25
agent/tools/secrets.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
import boto3
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
|
def get_secret():
|
||||||
|
|
||||||
|
secret_name = "dev/chatboteditais/apigateway/apikey"
|
||||||
|
region_name = "us-east-1"
|
||||||
|
|
||||||
|
# Create a Secrets Manager client
|
||||||
|
session = boto3.session.Session()
|
||||||
|
client = session.client(
|
||||||
|
service_name='secretsmanager',
|
||||||
|
region_name=region_name
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
get_secret_value_response = client.get_secret_value(
|
||||||
|
SecretId=secret_name
|
||||||
|
)
|
||||||
|
except ClientError as e:
|
||||||
|
# For a list of exceptions thrown, see
|
||||||
|
# https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_GetSecretValue.html
|
||||||
|
raise e
|
||||||
|
|
||||||
|
secret = get_secret_value_response['SecretString']
|
||||||
|
return secret
|
||||||
1
front/.dockerignore
Normal file
1
front/.dockerignore
Normal file
@@ -0,0 +1 @@
|
|||||||
|
.venv
|
||||||
@@ -20,6 +20,9 @@ if "chat_answer_history" not in st.session_state:
|
|||||||
if "chat_history" not in st.session_state:
|
if "chat_history" not in st.session_state:
|
||||||
st.session_state["chat_history"] = []
|
st.session_state["chat_history"] = []
|
||||||
prompt=st.chat_input(placeholder="Digite uma mensagem...",key="prompt")
|
prompt=st.chat_input(placeholder="Digite uma mensagem...",key="prompt")
|
||||||
|
for generated_response, user_query in zip(st.session_state["chat_answer_history"],st.session_state["user_prompt_history"]):
|
||||||
|
st.chat_message("user").write(user_query)
|
||||||
|
st.chat_message("assistant").write(generated_response)
|
||||||
def create_sources_string(source_urls: Set[str])->str:
|
def create_sources_string(source_urls: Set[str])->str:
|
||||||
if not source_urls:
|
if not source_urls:
|
||||||
return ""
|
return ""
|
||||||
@@ -30,22 +33,25 @@ def create_sources_string(source_urls: Set[str])->str:
|
|||||||
sources_string+=f"{i+1}, {source}\n"
|
sources_string+=f"{i+1}, {source}\n"
|
||||||
return sources_string
|
return sources_string
|
||||||
if prompt:
|
if prompt:
|
||||||
|
st.chat_message("user").write(prompt)
|
||||||
with st.spinner("Generating response.."):
|
with st.spinner("Generating response.."):
|
||||||
payload=[{"role":"user","content":prompt}]
|
payload=[{"role":"user","content":prompt}]
|
||||||
content={"message":payload,"chat_history":st.session_state["chat_history"]}
|
content={"message":payload,"chat_history":st.session_state["chat_history"],"username":st.session_state["username"]}
|
||||||
headers={"Content-type":"application/json","x-api-key":json.loads(st_auth.get_secret())['api-gateway-api-key']}
|
headers={"Content-type":"application/json","x-api-key":json.loads(st_auth.get_secret())['api-gateway-api-key']}
|
||||||
generated_response=json.loads(requests.post(url,json=content,headers=headers).text)
|
generated_response=json.loads(requests.post(url,json=content,headers=headers).text)
|
||||||
|
if 'chat_history' in generated_response:
|
||||||
|
if st.session_state["chat_history"] == []:
|
||||||
|
st.session_state["chat_history"] = generated_response['chat_history']
|
||||||
if 'json' in generated_response:
|
if 'json' in generated_response:
|
||||||
generated_response=generated_response['json']
|
generated_response=generated_response['json']
|
||||||
|
if 'message' in generated_response and generated_response['message']=="Endpoint request timed out":
|
||||||
|
generated_response="Falta de dados: Por favor encaminhe a conversa para a equipe desenvolvedora"
|
||||||
#generated_response=[{"role":"user","content":prompt}]
|
#generated_response=[{"role":"user","content":prompt}]
|
||||||
# sources= set([doc.metadata["source"] for doc in generated_response['context']])
|
# sources= set([doc.metadata["source"] for doc in generated_response['context']])
|
||||||
#formatted_response=f"{generated_response['answer']} \n\n {create_sources_string(sources)}"
|
#formatted_response=f"{generated_response['answer']} \n\n {create_sources_string(sources)}"
|
||||||
formatted_response=generated_response
|
formatted_response=generated_response
|
||||||
|
st.chat_message("assistant").write(formatted_response)
|
||||||
st.session_state["user_prompt_history"].append(prompt)
|
st.session_state["user_prompt_history"].append(prompt)
|
||||||
st.session_state["chat_answer_history"].append(formatted_response)
|
st.session_state["chat_answer_history"].append(formatted_response)
|
||||||
st.session_state["chat_history"]=st.session_state["chat_history"]+[{"role":"user","content":prompt}]+[{"role":"assistant","content":formatted_response}]
|
st.session_state["chat_history"]=st.session_state["chat_history"]+[{"role":"user","content":prompt}]+[{"role":"assistant","content":formatted_response}]
|
||||||
st.session_state.user_input=""
|
st.session_state.user_input=""
|
||||||
if st.session_state["chat_answer_history"]:
|
|
||||||
for generated_response, user_query in zip(st.session_state["chat_answer_history"],st.session_state["user_prompt_history"]):
|
|
||||||
st.chat_message("user").write(user_query)
|
|
||||||
st.chat_message("assistant").write(generated_response)
|
|
||||||
@@ -3,3 +3,4 @@ requests
|
|||||||
pyyaml
|
pyyaml
|
||||||
boto3
|
boto3
|
||||||
streamlit-authenticator
|
streamlit-authenticator
|
||||||
|
botocore
|
||||||
24
infra/dynamodb/Pulumi.ifsp-chatbot.yaml
Normal file
24
infra/dynamodb/Pulumi.ifsp-chatbot.yaml
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
config:
|
||||||
|
aws:region: us-east-1
|
||||||
|
dynamo:account_id: "654654422992"
|
||||||
|
dynamo:project_name: br-edu-ifsp-ifsp-ret-memoria-chatbot-editais-dev
|
||||||
|
dynamo:environment: dev
|
||||||
|
|
||||||
|
dynamo:tags:
|
||||||
|
nome: memoria-chatbot-editais
|
||||||
|
projeto: chatbot-editais
|
||||||
|
ambinte: dev
|
||||||
|
responsavel: dti.prd@ifsp.edu.bt
|
||||||
|
centro-de-custo: ti-poc-2025
|
||||||
|
campus: ifsp-ret
|
||||||
|
criticidade: baixa
|
||||||
|
data-de-criacao: 10/10/2025
|
||||||
|
backup: nao
|
||||||
|
servico: nosql-database
|
||||||
|
owner: ai-team
|
||||||
|
setor: ti
|
||||||
|
area: infra
|
||||||
|
prospeccao: sim
|
||||||
|
poc: sim
|
||||||
|
ia: sim
|
||||||
|
modelo: claude
|
||||||
3
infra/dynamodb/Pulumi.yaml
Normal file
3
infra/dynamodb/Pulumi.yaml
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
name: dynamodb-dev
|
||||||
|
runtime: python
|
||||||
|
description: Infraestrutura da aplicação Dynamodb
|
||||||
18
infra/dynamodb/__main__.py
Normal file
18
infra/dynamodb/__main__.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import pulumi
|
||||||
|
import pulumi_aws as aws
|
||||||
|
import conf as config
|
||||||
|
basic_dynamodb_table = aws.dynamodb.Table(config.project_name,
|
||||||
|
name="br-edu-ifsp-ifsp-ret-memoria-tabela-chatbot-editais-dev",
|
||||||
|
billing_mode="PAY_PER_REQUEST",
|
||||||
|
hash_key="UserId",
|
||||||
|
range_key="Timestamp",
|
||||||
|
attributes=[
|
||||||
|
{
|
||||||
|
"name": "UserId",
|
||||||
|
"type": "S",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Timestamp",
|
||||||
|
"type": "N",
|
||||||
|
}
|
||||||
|
])
|
||||||
13
infra/dynamodb/autotag/autotag.py
Normal file
13
infra/dynamodb/autotag/autotag.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import pulumi
|
||||||
|
from autotag.taggable import is_taggable
|
||||||
|
|
||||||
|
# registerAutoTags registers a global stack transformation that merges a set
|
||||||
|
# of tags with whatever was also explicitly added to the resource definition.
|
||||||
|
def register_auto_tags(auto_tags):
|
||||||
|
pulumi.runtime.register_stack_transformation(lambda args: auto_tag(args, auto_tags))
|
||||||
|
|
||||||
|
# auto_tag applies the given tags to the resource properties if applicable.
|
||||||
|
def auto_tag(args, auto_tags):
|
||||||
|
if is_taggable(args.type_):
|
||||||
|
args.props['tags'] = {**(args.props['tags'] or {}), **auto_tags}
|
||||||
|
return pulumi.ResourceTransformationResult(args.props, args.opts)
|
||||||
12
infra/dynamodb/autotag/policy-config.json
Normal file
12
infra/dynamodb/autotag/policy-config.json
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"all": "mandatory",
|
||||||
|
"check-required-tags": {
|
||||||
|
"requiredTags": [
|
||||||
|
"user:project",
|
||||||
|
"user:env",
|
||||||
|
"user:account",
|
||||||
|
"user:costCenter",
|
||||||
|
"user:owner"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
234
infra/dynamodb/autotag/taggable.py
Normal file
234
infra/dynamodb/autotag/taggable.py
Normal file
@@ -0,0 +1,234 @@
|
|||||||
|
# isTaggable returns true if the given resource type is an AWS resource that supports tags.
|
||||||
|
def is_taggable(t):
|
||||||
|
return t in taggable_resource_types
|
||||||
|
|
||||||
|
# taggable_resource_types is a list of known AWS type tokens that are taggable.
|
||||||
|
taggable_resource_types = [
|
||||||
|
'aws:accessanalyzer/analyzer:Analyzer',
|
||||||
|
'aws:acm/certificate:Certificate',
|
||||||
|
'aws:acmpca/certificateAuthority:CertificateAuthority',
|
||||||
|
'aws:alb/loadBalancer:LoadBalancer',
|
||||||
|
'aws:alb/targetGroup:TargetGroup',
|
||||||
|
'aws:apigateway/apiKey:ApiKey',
|
||||||
|
'aws:apigateway/clientCertificate:ClientCertificate',
|
||||||
|
'aws:apigateway/domainName:DomainName',
|
||||||
|
'aws:apigateway/restApi:RestApi',
|
||||||
|
'aws:apigateway/stage:Stage',
|
||||||
|
'aws:apigateway/usagePlan:UsagePlan',
|
||||||
|
'aws:apigateway/vpcLink:VpcLink',
|
||||||
|
'aws:applicationloadbalancing/loadBalancer:LoadBalancer',
|
||||||
|
'aws:applicationloadbalancing/targetGroup:TargetGroup',
|
||||||
|
'aws:appmesh/mesh:Mesh',
|
||||||
|
'aws:appmesh/route:Route',
|
||||||
|
'aws:appmesh/virtualNode:VirtualNode',
|
||||||
|
'aws:appmesh/virtualRouter:VirtualRouter',
|
||||||
|
'aws:appmesh/virtualService:VirtualService',
|
||||||
|
'aws:appsync/graphQLApi:GraphQLApi',
|
||||||
|
'aws:athena/workgroup:Workgroup',
|
||||||
|
'aws:autoscaling/group:Group',
|
||||||
|
'aws:backup/plan:Plan',
|
||||||
|
'aws:backup/vault:Vault',
|
||||||
|
'aws:cfg/aggregateAuthorization:AggregateAuthorization',
|
||||||
|
'aws:cfg/configurationAggregator:ConfigurationAggregator',
|
||||||
|
'aws:cfg/rule:Rule',
|
||||||
|
'aws:cloudformation/stack:Stack',
|
||||||
|
'aws:cloudformation/stackSet:StackSet',
|
||||||
|
'aws:cloudfront/distribution:Distribution',
|
||||||
|
'aws:cloudhsmv2/cluster:Cluster',
|
||||||
|
'aws:cloudtrail/trail:Trail',
|
||||||
|
'aws:cloudwatch/eventRule:EventRule',
|
||||||
|
'aws:cloudwatch/logGroup:LogGroup',
|
||||||
|
'aws:cloudwatch/metricAlarm:MetricAlarm',
|
||||||
|
'aws:codebuild/project:Project',
|
||||||
|
'aws:codecommit/repository:Repository',
|
||||||
|
'aws:codepipeline/pipeline:Pipeline',
|
||||||
|
'aws:codepipeline/webhook:Webhook',
|
||||||
|
'aws:codestarnotifications/notificationRule:NotificationRule',
|
||||||
|
'aws:cognito/identityPool:IdentityPool',
|
||||||
|
'aws:cognito/userPool:UserPool',
|
||||||
|
'aws:datapipeline/pipeline:Pipeline',
|
||||||
|
'aws:datasync/agent:Agent',
|
||||||
|
'aws:datasync/efsLocation:EfsLocation',
|
||||||
|
'aws:datasync/locationSmb:LocationSmb',
|
||||||
|
'aws:datasync/nfsLocation:NfsLocation',
|
||||||
|
'aws:datasync/s3Location:S3Location',
|
||||||
|
'aws:datasync/task:Task',
|
||||||
|
'aws:dax/cluster:Cluster',
|
||||||
|
'aws:directconnect/connection:Connection',
|
||||||
|
'aws:directconnect/hostedPrivateVirtualInterfaceAccepter:HostedPrivateVirtualInterfaceAccepter',
|
||||||
|
'aws:directconnect/hostedPublicVirtualInterfaceAccepter:HostedPublicVirtualInterfaceAccepter',
|
||||||
|
'aws:directconnect/hostedTransitVirtualInterfaceAcceptor:HostedTransitVirtualInterfaceAcceptor',
|
||||||
|
'aws:directconnect/linkAggregationGroup:LinkAggregationGroup',
|
||||||
|
'aws:directconnect/privateVirtualInterface:PrivateVirtualInterface',
|
||||||
|
'aws:directconnect/publicVirtualInterface:PublicVirtualInterface',
|
||||||
|
'aws:directconnect/transitVirtualInterface:TransitVirtualInterface',
|
||||||
|
'aws:directoryservice/directory:Directory',
|
||||||
|
'aws:dlm/lifecyclePolicy:LifecyclePolicy',
|
||||||
|
'aws:dms/endpoint:Endpoint',
|
||||||
|
'aws:dms/replicationInstance:ReplicationInstance',
|
||||||
|
'aws:dms/replicationSubnetGroup:ReplicationSubnetGroup',
|
||||||
|
'aws:dms/replicationTask:ReplicationTask',
|
||||||
|
'aws:docdb/cluster:Cluster',
|
||||||
|
'aws:docdb/clusterInstance:ClusterInstance',
|
||||||
|
'aws:docdb/clusterParameterGroup:ClusterParameterGroup',
|
||||||
|
'aws:docdb/subnetGroup:SubnetGroup',
|
||||||
|
'aws:dynamodb/table:Table',
|
||||||
|
'aws:ebs/snapshot:Snapshot',
|
||||||
|
'aws:ebs/snapshotCopy:SnapshotCopy',
|
||||||
|
'aws:ebs/volume:Volume',
|
||||||
|
'aws:ec2/ami:Ami',
|
||||||
|
'aws:ec2/amiCopy:AmiCopy',
|
||||||
|
'aws:ec2/amiFromInstance:AmiFromInstance',
|
||||||
|
'aws:ec2/capacityReservation:CapacityReservation',
|
||||||
|
'aws:ec2/customerGateway:CustomerGateway',
|
||||||
|
'aws:ec2/defaultNetworkAcl:DefaultNetworkAcl',
|
||||||
|
'aws:ec2/defaultRouteTable:DefaultRouteTable',
|
||||||
|
'aws:ec2/defaultSecurityGroup:DefaultSecurityGroup',
|
||||||
|
'aws:ec2/defaultSubnet:DefaultSubnet',
|
||||||
|
'aws:ec2/defaultVpc:DefaultVpc',
|
||||||
|
'aws:ec2/defaultVpcDhcpOptions:DefaultVpcDhcpOptions',
|
||||||
|
'aws:ec2/eip:Eip',
|
||||||
|
'aws:ec2/fleet:Fleet',
|
||||||
|
'aws:ec2/instance:Instance',
|
||||||
|
'aws:ec2/internetGateway:InternetGateway',
|
||||||
|
'aws:ec2/keyPair:KeyPair',
|
||||||
|
'aws:ec2/launchTemplate:LaunchTemplate',
|
||||||
|
'aws:ec2/natGateway:NatGateway',
|
||||||
|
'aws:ec2/networkAcl:NetworkAcl',
|
||||||
|
'aws:ec2/networkInterface:NetworkInterface',
|
||||||
|
'aws:ec2/placementGroup:PlacementGroup',
|
||||||
|
'aws:ec2/routeTable:RouteTable',
|
||||||
|
'aws:ec2/securityGroup:SecurityGroup',
|
||||||
|
'aws:ec2/spotInstanceRequest:SpotInstanceRequest',
|
||||||
|
'aws:ec2/subnet:Subnet',
|
||||||
|
'aws:ec2/vpc:Vpc',
|
||||||
|
'aws:ec2/vpcDhcpOptions:VpcDhcpOptions',
|
||||||
|
'aws:ec2/vpcEndpoint:VpcEndpoint',
|
||||||
|
'aws:ec2/vpcEndpointService:VpcEndpointService',
|
||||||
|
'aws:ec2/vpcPeeringConnection:VpcPeeringConnection',
|
||||||
|
'aws:ec2/vpcPeeringConnectionAccepter:VpcPeeringConnectionAccepter',
|
||||||
|
'aws:ec2/vpnConnection:VpnConnection',
|
||||||
|
'aws:ec2/vpnGateway:VpnGateway',
|
||||||
|
'aws:ec2clientvpn/endpoint:Endpoint',
|
||||||
|
'aws:ec2transitgateway/routeTable:RouteTable',
|
||||||
|
'aws:ec2transitgateway/transitGateway:TransitGateway',
|
||||||
|
'aws:ec2transitgateway/vpcAttachment:VpcAttachment',
|
||||||
|
'aws:ec2transitgateway/vpcAttachmentAccepter:VpcAttachmentAccepter',
|
||||||
|
'aws:ecr/repository:Repository',
|
||||||
|
'aws:ecs/capacityProvider:CapacityProvider',
|
||||||
|
'aws:ecs/cluster:Cluster',
|
||||||
|
'aws:ecs/service:Service',
|
||||||
|
'aws:ecs/taskDefinition:TaskDefinition',
|
||||||
|
'aws:efs/fileSystem:FileSystem',
|
||||||
|
'aws:eks/cluster:Cluster',
|
||||||
|
'aws:eks/fargateProfile:FargateProfile',
|
||||||
|
'aws:eks/nodeGroup:NodeGroup',
|
||||||
|
'aws:elasticache/cluster:Cluster',
|
||||||
|
'aws:elasticache/replicationGroup:ReplicationGroup',
|
||||||
|
'aws:elasticbeanstalk/application:Application',
|
||||||
|
'aws:elasticbeanstalk/applicationVersion:ApplicationVersion',
|
||||||
|
'aws:elasticbeanstalk/environment:Environment',
|
||||||
|
'aws:elasticloadbalancing/loadBalancer:LoadBalancer',
|
||||||
|
'aws:elasticloadbalancingv2/loadBalancer:LoadBalancer',
|
||||||
|
'aws:elasticloadbalancingv2/targetGroup:TargetGroup',
|
||||||
|
'aws:elasticsearch/domain:Domain',
|
||||||
|
'aws:elb/loadBalancer:LoadBalancer',
|
||||||
|
'aws:emr/cluster:Cluster',
|
||||||
|
'aws:fsx/lustreFileSystem:LustreFileSystem',
|
||||||
|
'aws:fsx/windowsFileSystem:WindowsFileSystem',
|
||||||
|
'aws:gamelift/alias:Alias',
|
||||||
|
'aws:gamelift/build:Build',
|
||||||
|
'aws:gamelift/fleet:Fleet',
|
||||||
|
'aws:gamelift/gameSessionQueue:GameSessionQueue',
|
||||||
|
'aws:glacier/vault:Vault',
|
||||||
|
'aws:glue/crawler:Crawler',
|
||||||
|
'aws:glue/job:Job',
|
||||||
|
'aws:glue/trigger:Trigger',
|
||||||
|
'aws:iam/role:Role',
|
||||||
|
'aws:iam/user:User',
|
||||||
|
'aws:inspector/resourceGroup:ResourceGroup',
|
||||||
|
'aws:kinesis/analyticsApplication:AnalyticsApplication',
|
||||||
|
'aws:kinesis/firehoseDeliveryStream:FirehoseDeliveryStream',
|
||||||
|
'aws:kinesis/stream:Stream',
|
||||||
|
'aws:kms/externalKey:ExternalKey',
|
||||||
|
'aws:kms/key:Key',
|
||||||
|
'aws:lambda/function:Function',
|
||||||
|
'aws:lb/loadBalancer:LoadBalancer',
|
||||||
|
'aws:lb/targetGroup:TargetGroup',
|
||||||
|
'aws:licensemanager/licenseConfiguration:LicenseConfiguration',
|
||||||
|
'aws:lightsail/instance:Instance',
|
||||||
|
'aws:mediaconvert/queue:Queue',
|
||||||
|
'aws:mediapackage/channel:Channel',
|
||||||
|
'aws:mediastore/container:Container',
|
||||||
|
'aws:mq/broker:Broker',
|
||||||
|
'aws:mq/configuration:Configuration',
|
||||||
|
'aws:msk/cluster:Cluster',
|
||||||
|
'aws:neptune/cluster:Cluster',
|
||||||
|
'aws:neptune/clusterInstance:ClusterInstance',
|
||||||
|
'aws:neptune/clusterParameterGroup:ClusterParameterGroup',
|
||||||
|
'aws:neptune/eventSubscription:EventSubscription',
|
||||||
|
'aws:neptune/parameterGroup:ParameterGroup',
|
||||||
|
'aws:neptune/subnetGroup:SubnetGroup',
|
||||||
|
'aws:opsworks/stack:Stack',
|
||||||
|
'aws:organizations/account:Account',
|
||||||
|
'aws:pinpoint/app:App',
|
||||||
|
'aws:qldb/ledger:Ledger',
|
||||||
|
'aws:ram/resourceShare:ResourceShare',
|
||||||
|
'aws:rds/cluster:Cluster',
|
||||||
|
'aws:rds/clusterEndpoint:ClusterEndpoint',
|
||||||
|
'aws:rds/clusterInstance:ClusterInstance',
|
||||||
|
'aws:rds/clusterParameterGroup:ClusterParameterGroup',
|
||||||
|
'aws:rds/clusterSnapshot:ClusterSnapshot',
|
||||||
|
'aws:rds/eventSubscription:EventSubscription',
|
||||||
|
'aws:rds/instance:Instance',
|
||||||
|
'aws:rds/optionGroup:OptionGroup',
|
||||||
|
'aws:rds/parameterGroup:ParameterGroup',
|
||||||
|
'aws:rds/securityGroup:SecurityGroup',
|
||||||
|
'aws:rds/snapshot:Snapshot',
|
||||||
|
'aws:rds/subnetGroup:SubnetGroup',
|
||||||
|
'aws:redshift/cluster:Cluster',
|
||||||
|
'aws:redshift/eventSubscription:EventSubscription',
|
||||||
|
'aws:redshift/parameterGroup:ParameterGroup',
|
||||||
|
'aws:redshift/snapshotCopyGrant:SnapshotCopyGrant',
|
||||||
|
'aws:redshift/snapshotSchedule:SnapshotSchedule',
|
||||||
|
'aws:redshift/subnetGroup:SubnetGroup',
|
||||||
|
'aws:resourcegroups/group:Group',
|
||||||
|
'aws:route53/healthCheck:HealthCheck',
|
||||||
|
'aws:route53/resolverEndpoint:ResolverEndpoint',
|
||||||
|
'aws:route53/resolverRule:ResolverRule',
|
||||||
|
'aws:route53/zone:Zone',
|
||||||
|
'aws:s3/bucket:Bucket',
|
||||||
|
'aws:s3/bucketObject:BucketObject',
|
||||||
|
'aws:sagemaker/endpoint:Endpoint',
|
||||||
|
'aws:sagemaker/endpointConfiguration:EndpointConfiguration',
|
||||||
|
'aws:sagemaker/model:Model',
|
||||||
|
'aws:sagemaker/notebookInstance:NotebookInstance',
|
||||||
|
'aws:secretsmanager/secret:Secret',
|
||||||
|
'aws:servicecatalog/portfolio:Portfolio',
|
||||||
|
'aws:sfn/activity:Activity',
|
||||||
|
'aws:sfn/stateMachine:StateMachine',
|
||||||
|
'aws:sns/topic:Topic',
|
||||||
|
'aws:sqs/queue:Queue',
|
||||||
|
'aws:ssm/activation:Activation',
|
||||||
|
'aws:ssm/document:Document',
|
||||||
|
'aws:ssm/maintenanceWindow:MaintenanceWindow',
|
||||||
|
'aws:ssm/parameter:Parameter',
|
||||||
|
'aws:ssm/patchBaseline:PatchBaseline',
|
||||||
|
'aws:storagegateway/cachesIscsiVolume:CachesIscsiVolume',
|
||||||
|
'aws:storagegateway/gateway:Gateway',
|
||||||
|
'aws:storagegateway/nfsFileShare:NfsFileShare',
|
||||||
|
'aws:storagegateway/smbFileShare:SmbFileShare',
|
||||||
|
'aws:swf/domain:Domain',
|
||||||
|
'aws:transfer/server:Server',
|
||||||
|
'aws:transfer/user:User',
|
||||||
|
'aws:waf/rateBasedRule:RateBasedRule',
|
||||||
|
'aws:waf/rule:Rule',
|
||||||
|
'aws:waf/ruleGroup:RuleGroup',
|
||||||
|
'aws:waf/webAcl:WebAcl',
|
||||||
|
'aws:wafregional/rateBasedRule:RateBasedRule',
|
||||||
|
'aws:wafregional/rule:Rule',
|
||||||
|
'aws:wafregional/ruleGroup:RuleGroup',
|
||||||
|
'aws:wafregional/webAcl:WebAcl',
|
||||||
|
'aws:workspaces/directory:Directory',
|
||||||
|
'aws:workspaces/ipGroup:IpGroup',
|
||||||
|
]
|
||||||
16
infra/dynamodb/conf.py
Normal file
16
infra/dynamodb/conf.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import pulumi
|
||||||
|
import pulumi_aws as aws
|
||||||
|
from autotag.autotag import register_auto_tags
|
||||||
|
|
||||||
|
config = pulumi.Config("dynamo")
|
||||||
|
|
||||||
|
project_name = config.require("project_name")
|
||||||
|
stack_name = pulumi.get_stack()
|
||||||
|
environment = config.require("environment")
|
||||||
|
account_id = config.require("account_id")
|
||||||
|
tags = config.require_object("tags")
|
||||||
|
|
||||||
|
aws_region = aws.get_region().id
|
||||||
|
current = aws.get_caller_identity()
|
||||||
|
|
||||||
|
register_auto_tags(tags)
|
||||||
12
infra/ecr/Pulumi.ifsp-chatbot-editais.yaml
Normal file
12
infra/ecr/Pulumi.ifsp-chatbot-editais.yaml
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
config:
|
||||||
|
ecr_dev:entity_extraction_dev: ecr
|
||||||
|
ecr_dev:environment: dev
|
||||||
|
ecr_dev:ecr:
|
||||||
|
backend:
|
||||||
|
image_mutability: MUTABLE
|
||||||
|
name: chatbot-editais-backend-dev
|
||||||
|
frontend:
|
||||||
|
image_mutability: MUTABLE
|
||||||
|
name: chatbot-editais-frontend-dev
|
||||||
|
ecr_dev:project: Chatbot editais
|
||||||
|
aws:region: us-east-1
|
||||||
3
infra/ecr/Pulumi.yaml
Normal file
3
infra/ecr/Pulumi.yaml
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
name: ecr_dev
|
||||||
|
runtime: python
|
||||||
|
description: Infraestrutura da aplicação ECR
|
||||||
93
infra/ecr/README.md
Normal file
93
infra/ecr/README.md
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
# ecr
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Getting started
|
||||||
|
|
||||||
|
To make it easy for you to get started with GitLab, here's a list of recommended next steps.
|
||||||
|
|
||||||
|
Already a pro? Just edit this README.md and make it your own. Want to make it easy? [Use the template at the bottom](#editing-this-readme)!
|
||||||
|
|
||||||
|
## Add your files
|
||||||
|
|
||||||
|
- [ ] [Create](https://docs.gitlab.com/ee/user/project/repository/web_editor.html#create-a-file) or [upload](https://docs.gitlab.com/ee/user/project/repository/web_editor.html#upload-a-file) files
|
||||||
|
- [ ] [Add files using the command line](https://docs.gitlab.com/ee/gitlab-basics/add-file.html#add-a-file-using-the-command-line) or push an existing Git repository with the following command:
|
||||||
|
|
||||||
|
```
|
||||||
|
cd existing_repo
|
||||||
|
git remote add origin https://gitlab.shared.cloud.dnxbrasil.com.br/dnx-br/sandbox/genai/ecr.git
|
||||||
|
git branch -M main
|
||||||
|
git push -uf origin main
|
||||||
|
```
|
||||||
|
|
||||||
|
## Integrate with your tools
|
||||||
|
|
||||||
|
- [ ] [Set up project integrations](https://gitlab.shared.cloud.dnxbrasil.com.br/dnx-br/sandbox/genai/ecr/-/settings/integrations)
|
||||||
|
|
||||||
|
## Collaborate with your team
|
||||||
|
|
||||||
|
- [ ] [Invite team members and collaborators](https://docs.gitlab.com/ee/user/project/members/)
|
||||||
|
- [ ] [Create a new merge request](https://docs.gitlab.com/ee/user/project/merge_requests/creating_merge_requests.html)
|
||||||
|
- [ ] [Automatically close issues from merge requests](https://docs.gitlab.com/ee/user/project/issues/managing_issues.html#closing-issues-automatically)
|
||||||
|
- [ ] [Enable merge request approvals](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/)
|
||||||
|
- [ ] [Set auto-merge](https://docs.gitlab.com/ee/user/project/merge_requests/merge_when_pipeline_succeeds.html)
|
||||||
|
|
||||||
|
## Test and Deploy
|
||||||
|
|
||||||
|
Use the built-in continuous integration in GitLab.
|
||||||
|
|
||||||
|
- [ ] [Get started with GitLab CI/CD](https://docs.gitlab.com/ee/ci/quick_start/index.html)
|
||||||
|
- [ ] [Analyze your code for known vulnerabilities with Static Application Security Testing (SAST)](https://docs.gitlab.com/ee/user/application_security/sast/)
|
||||||
|
- [ ] [Deploy to Kubernetes, Amazon EC2, or Amazon ECS using Auto Deploy](https://docs.gitlab.com/ee/topics/autodevops/requirements.html)
|
||||||
|
- [ ] [Use pull-based deployments for improved Kubernetes management](https://docs.gitlab.com/ee/user/clusters/agent/)
|
||||||
|
- [ ] [Set up protected environments](https://docs.gitlab.com/ee/ci/environments/protected_environments.html)
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
# Editing this README
|
||||||
|
|
||||||
|
When you're ready to make this README your own, just edit this file and use the handy template below (or feel free to structure it however you want - this is just a starting point!). Thanks to [makeareadme.com](https://www.makeareadme.com/) for this template.
|
||||||
|
|
||||||
|
## Suggestions for a good README
|
||||||
|
|
||||||
|
Every project is different, so consider which of these sections apply to yours. The sections used in the template are suggestions for most open source projects. Also keep in mind that while a README can be too long and detailed, too long is better than too short. If you think your README is too long, consider utilizing another form of documentation rather than cutting out information.
|
||||||
|
|
||||||
|
## Name
|
||||||
|
Choose a self-explaining name for your project.
|
||||||
|
|
||||||
|
## Description
|
||||||
|
Let people know what your project can do specifically. Provide context and add a link to any reference visitors might be unfamiliar with. A list of Features or a Background subsection can also be added here. If there are alternatives to your project, this is a good place to list differentiating factors.
|
||||||
|
|
||||||
|
## Badges
|
||||||
|
On some READMEs, you may see small images that convey metadata, such as whether or not all the tests are passing for the project. You can use Shields to add some to your README. Many services also have instructions for adding a badge.
|
||||||
|
|
||||||
|
## Visuals
|
||||||
|
Depending on what you are making, it can be a good idea to include screenshots or even a video (you'll frequently see GIFs rather than actual videos). Tools like ttygif can help, but check out Asciinema for a more sophisticated method.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
Within a particular ecosystem, there may be a common way of installing things, such as using Yarn, NuGet, or Homebrew. However, consider the possibility that whoever is reading your README is a novice and would like more guidance. Listing specific steps helps remove ambiguity and gets people to using your project as quickly as possible. If it only runs in a specific context like a particular programming language version or operating system or has dependencies that have to be installed manually, also add a Requirements subsection.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
Use examples liberally, and show the expected output if you can. It's helpful to have inline the smallest example of usage that you can demonstrate, while providing links to more sophisticated examples if they are too long to reasonably include in the README.
|
||||||
|
|
||||||
|
## Support
|
||||||
|
Tell people where they can go to for help. It can be any combination of an issue tracker, a chat room, an email address, etc.
|
||||||
|
|
||||||
|
## Roadmap
|
||||||
|
If you have ideas for releases in the future, it is a good idea to list them in the README.
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
State if you are open to contributions and what your requirements are for accepting them.
|
||||||
|
|
||||||
|
For people who want to make changes to your project, it's helpful to have some documentation on how to get started. Perhaps there is a script that they should run or some environment variables that they need to set. Make these steps explicit. These instructions could also be useful to your future self.
|
||||||
|
|
||||||
|
You can also document commands to lint the code or run tests. These steps help to ensure high code quality and reduce the likelihood that the changes inadvertently break something. Having instructions for running tests is especially helpful if it requires external setup, such as starting a Selenium server for testing in a browser.
|
||||||
|
|
||||||
|
## Authors and acknowledgment
|
||||||
|
Show your appreciation to those who have contributed to the project.
|
||||||
|
|
||||||
|
## License
|
||||||
|
For open source projects, say how it is licensed.
|
||||||
|
|
||||||
|
## Project status
|
||||||
|
If you have run out of energy or time for your project, put a note at the top of the README saying that development has slowed down or stopped completely. Someone may choose to fork your project or volunteer to step in as a maintainer or owner, allowing your project to keep going. You can also make an explicit request for maintainers.
|
||||||
27
infra/ecr/__main__.py
Normal file
27
infra/ecr/__main__.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import json
|
||||||
|
import pulumi
|
||||||
|
import pulumi_aws as aws
|
||||||
|
|
||||||
|
caller_identity = aws.get_caller_identity()
|
||||||
|
account_id = caller_identity.account_id
|
||||||
|
|
||||||
|
config = pulumi.Config()
|
||||||
|
project = config.require("project")
|
||||||
|
environment = config.require("environment")
|
||||||
|
stacks=["backend","frontend"]
|
||||||
|
for stack in stacks:
|
||||||
|
ecr_config = config.require_object("ecr")[stack]
|
||||||
|
|
||||||
|
ecr_repo = aws.ecr.Repository(ecr_config['name'],
|
||||||
|
name=ecr_config['name'],
|
||||||
|
encryption_configurations=[{
|
||||||
|
"encryption_type": "AES256",
|
||||||
|
}],
|
||||||
|
image_scanning_configuration={
|
||||||
|
"scan_on_push": False,
|
||||||
|
},
|
||||||
|
image_tag_mutability=ecr_config['image_mutability'],
|
||||||
|
opts = pulumi.ResourceOptions(protect=False))
|
||||||
|
|
||||||
|
|
||||||
|
pulumi.export("url", pulumi.Output.concat("ECR REPO ID:", ecr_repo.id))
|
||||||
5
infra/ecr/requirements.txt
Normal file
5
infra/ecr/requirements.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
pulumi
|
||||||
|
pulumi-aws
|
||||||
|
pulumi-docker
|
||||||
|
boto3
|
||||||
|
setuptools
|
||||||
53
infra/langfuse/Pulumi.ifsp-chatbot.yaml
Normal file
53
infra/langfuse/Pulumi.ifsp-chatbot.yaml
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
config:
|
||||||
|
aws:region: us-east-1
|
||||||
|
langfuse:account_id: "654654422992"
|
||||||
|
langfuse:project_name: br-edu-ifsp-ifsp-ret-chatbot-editais-dev
|
||||||
|
langfuse:environment: dev
|
||||||
|
|
||||||
|
langfuse:tags:
|
||||||
|
nome: langfuse-chatbot-editais
|
||||||
|
projeto: chatbot-editais
|
||||||
|
ambinte: dev
|
||||||
|
responsavel: dti.prd@ifsp.edu.bt
|
||||||
|
centro-de-custo: ti-poc-2025
|
||||||
|
campus: ifsp-ret
|
||||||
|
criticidade: baixa
|
||||||
|
data-de-criacao: 10/10/2025
|
||||||
|
backup: nao
|
||||||
|
servico: metric-server
|
||||||
|
owner: ai-team
|
||||||
|
setor: ti
|
||||||
|
area: infra
|
||||||
|
prospeccao: sim
|
||||||
|
poc: sim
|
||||||
|
ia: não
|
||||||
|
modelo: não
|
||||||
|
|
||||||
|
langfuse:network:
|
||||||
|
vpc_id: vpc-0855d5ec4296e1867
|
||||||
|
subnet_ids:
|
||||||
|
- subnet-0a79c417292d29062 # public-us-east-1a-subnet
|
||||||
|
- subnet-01f2e27a014777d11 # public-us-east-1b-subnet
|
||||||
|
|
||||||
|
langfuse:ec2:
|
||||||
|
key_name:
|
||||||
|
allowed_ports:
|
||||||
|
- 22
|
||||||
|
- 3000
|
||||||
|
- 443
|
||||||
|
- 80
|
||||||
|
ebs_volume:
|
||||||
|
size: 100 # em GB
|
||||||
|
device_name: /dev/sdf # nome do device
|
||||||
|
volume_type: gp2 # tipo de volume
|
||||||
|
instance_type: t3.xlarge # langfuse requires at least t3.xlarge
|
||||||
|
instance_name: LangfuseEC2
|
||||||
|
sg_name: langfuse-sg
|
||||||
|
|
||||||
|
langfuse:langfuse_config:
|
||||||
|
repo_url: "https://github.com/langfuse/langfuse.git"
|
||||||
|
web_port: 3000
|
||||||
|
worker_port: 3030
|
||||||
|
database_url: "postgresql://langfuse:langfuse@postgres:5432/langfuse"
|
||||||
|
clickhouse_url: "http://clickhouse:8123"
|
||||||
|
telemetry_enabled: false
|
||||||
3
infra/langfuse/Pulumi.yaml
Normal file
3
infra/langfuse/Pulumi.yaml
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
name: langfuse-dev
|
||||||
|
runtime: python
|
||||||
|
description: Infraestrutura da aplicação Langfuse em EC2 usando Docker
|
||||||
93
infra/langfuse/README.md
Normal file
93
infra/langfuse/README.md
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
# langfuse
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Getting started
|
||||||
|
|
||||||
|
To make it easy for you to get started with GitLab, here's a list of recommended next steps.
|
||||||
|
|
||||||
|
Already a pro? Just edit this README.md and make it your own. Want to make it easy? [Use the template at the bottom](#editing-this-readme)!
|
||||||
|
|
||||||
|
## Add your files
|
||||||
|
|
||||||
|
- [ ] [Create](https://docs.gitlab.com/ee/user/project/repository/web_editor.html#create-a-file) or [upload](https://docs.gitlab.com/ee/user/project/repository/web_editor.html#upload-a-file) files
|
||||||
|
- [ ] [Add files using the command line](https://docs.gitlab.com/ee/gitlab-basics/add-file.html#add-a-file-using-the-command-line) or push an existing Git repository with the following command:
|
||||||
|
|
||||||
|
```
|
||||||
|
cd existing_repo
|
||||||
|
git remote add origin https://gitlab.shared.cloud.dnxbrasil.com.br/dnx-br/sandbox/genai/langfuse.git
|
||||||
|
git branch -M main
|
||||||
|
git push -uf origin main
|
||||||
|
```
|
||||||
|
|
||||||
|
## Integrate with your tools
|
||||||
|
|
||||||
|
- [ ] [Set up project integrations](https://gitlab.shared.cloud.dnxbrasil.com.br/dnx-br/sandbox/genai/langfuse/-/settings/integrations)
|
||||||
|
|
||||||
|
## Collaborate with your team
|
||||||
|
|
||||||
|
- [ ] [Invite team members and collaborators](https://docs.gitlab.com/ee/user/project/members/)
|
||||||
|
- [ ] [Create a new merge request](https://docs.gitlab.com/ee/user/project/merge_requests/creating_merge_requests.html)
|
||||||
|
- [ ] [Automatically close issues from merge requests](https://docs.gitlab.com/ee/user/project/issues/managing_issues.html#closing-issues-automatically)
|
||||||
|
- [ ] [Enable merge request approvals](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/)
|
||||||
|
- [ ] [Set auto-merge](https://docs.gitlab.com/ee/user/project/merge_requests/merge_when_pipeline_succeeds.html)
|
||||||
|
|
||||||
|
## Test and Deploy
|
||||||
|
|
||||||
|
Use the built-in continuous integration in GitLab.
|
||||||
|
|
||||||
|
- [ ] [Get started with GitLab CI/CD](https://docs.gitlab.com/ee/ci/quick_start/index.html)
|
||||||
|
- [ ] [Analyze your code for known vulnerabilities with Static Application Security Testing (SAST)](https://docs.gitlab.com/ee/user/application_security/sast/)
|
||||||
|
- [ ] [Deploy to Kubernetes, Amazon EC2, or Amazon ECS using Auto Deploy](https://docs.gitlab.com/ee/topics/autodevops/requirements.html)
|
||||||
|
- [ ] [Use pull-based deployments for improved Kubernetes management](https://docs.gitlab.com/ee/user/clusters/agent/)
|
||||||
|
- [ ] [Set up protected environments](https://docs.gitlab.com/ee/ci/environments/protected_environments.html)
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
# Editing this README
|
||||||
|
|
||||||
|
When you're ready to make this README your own, just edit this file and use the handy template below (or feel free to structure it however you want - this is just a starting point!). Thanks to [makeareadme.com](https://www.makeareadme.com/) for this template.
|
||||||
|
|
||||||
|
## Suggestions for a good README
|
||||||
|
|
||||||
|
Every project is different, so consider which of these sections apply to yours. The sections used in the template are suggestions for most open source projects. Also keep in mind that while a README can be too long and detailed, too long is better than too short. If you think your README is too long, consider utilizing another form of documentation rather than cutting out information.
|
||||||
|
|
||||||
|
## Name
|
||||||
|
Choose a self-explaining name for your project.
|
||||||
|
|
||||||
|
## Description
|
||||||
|
Let people know what your project can do specifically. Provide context and add a link to any reference visitors might be unfamiliar with. A list of Features or a Background subsection can also be added here. If there are alternatives to your project, this is a good place to list differentiating factors.
|
||||||
|
|
||||||
|
## Badges
|
||||||
|
On some READMEs, you may see small images that convey metadata, such as whether or not all the tests are passing for the project. You can use Shields to add some to your README. Many services also have instructions for adding a badge.
|
||||||
|
|
||||||
|
## Visuals
|
||||||
|
Depending on what you are making, it can be a good idea to include screenshots or even a video (you'll frequently see GIFs rather than actual videos). Tools like ttygif can help, but check out Asciinema for a more sophisticated method.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
Within a particular ecosystem, there may be a common way of installing things, such as using Yarn, NuGet, or Homebrew. However, consider the possibility that whoever is reading your README is a novice and would like more guidance. Listing specific steps helps remove ambiguity and gets people to using your project as quickly as possible. If it only runs in a specific context like a particular programming language version or operating system or has dependencies that have to be installed manually, also add a Requirements subsection.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
Use examples liberally, and show the expected output if you can. It's helpful to have inline the smallest example of usage that you can demonstrate, while providing links to more sophisticated examples if they are too long to reasonably include in the README.
|
||||||
|
|
||||||
|
## Support
|
||||||
|
Tell people where they can go to for help. It can be any combination of an issue tracker, a chat room, an email address, etc.
|
||||||
|
|
||||||
|
## Roadmap
|
||||||
|
If you have ideas for releases in the future, it is a good idea to list them in the README.
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
State if you are open to contributions and what your requirements are for accepting them.
|
||||||
|
|
||||||
|
For people who want to make changes to your project, it's helpful to have some documentation on how to get started. Perhaps there is a script that they should run or some environment variables that they need to set. Make these steps explicit. These instructions could also be useful to your future self.
|
||||||
|
|
||||||
|
You can also document commands to lint the code or run tests. These steps help to ensure high code quality and reduce the likelihood that the changes inadvertently break something. Having instructions for running tests is especially helpful if it requires external setup, such as starting a Selenium server for testing in a browser.
|
||||||
|
|
||||||
|
## Authors and acknowledgment
|
||||||
|
Show your appreciation to those who have contributed to the project.
|
||||||
|
|
||||||
|
## License
|
||||||
|
For open source projects, say how it is licensed.
|
||||||
|
|
||||||
|
## Project status
|
||||||
|
If you have run out of energy or time for your project, put a note at the top of the README saying that development has slowed down or stopped completely. Someone may choose to fork your project or volunteer to step in as a maintainer or owner, allowing your project to keep going. You can also make an explicit request for maintainers.
|
||||||
90
infra/langfuse/__main__.py
Normal file
90
infra/langfuse/__main__.py
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
import pulumi
|
||||||
|
import pulumi_aws as aws
|
||||||
|
import conf as config
|
||||||
|
|
||||||
|
# 🔐 Security Group
|
||||||
|
ingress_rules = [{"protocol": "tcp", "from_port": port, "to_port": port, "cidr_blocks": ["0.0.0.0/0"]}
|
||||||
|
for port in config.ec2_config["allowed_ports"]]
|
||||||
|
|
||||||
|
sg = aws.ec2.SecurityGroup(config.ec2_config["sg_name"],
|
||||||
|
vpc_id=config.network["vpc_id"],
|
||||||
|
description="Allow defined ports",
|
||||||
|
ingress=ingress_rules,
|
||||||
|
egress=[{"protocol": "-1", "from_port": 0, "to_port": 0, "cidr_blocks": ["0.0.0.0/0"]}],
|
||||||
|
)
|
||||||
|
|
||||||
|
# 🐳 Script user_data com Docker, Langfuse e montagem do volume EBS
|
||||||
|
user_data = f"""#!/bin/bash
|
||||||
|
set -e
|
||||||
|
sudo apt-get update -y
|
||||||
|
sudo apt-get install -y ca-certificates curl gnupg git
|
||||||
|
sudo install -m 0755 -d /etc/apt/keyrings
|
||||||
|
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg
|
||||||
|
sudo chmod a+r /etc/apt/keyrings/docker.gpg
|
||||||
|
echo \
|
||||||
|
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu \
|
||||||
|
$(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \
|
||||||
|
sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
|
||||||
|
sudo apt-get update -y
|
||||||
|
sudo apt-get install -y docker-ce docker-ce-cli containerd.io docker-compose-plugin
|
||||||
|
sudo groupadd docker || true
|
||||||
|
sudo usermod -aG docker ubuntu
|
||||||
|
sudo chmod 666 /var/run/docker.sock
|
||||||
|
sudo systemctl enable docker
|
||||||
|
sudo systemctl restart docker
|
||||||
|
|
||||||
|
cd /opt
|
||||||
|
git clone {config.langfuse_config["repo_url"]}
|
||||||
|
cd langfuse
|
||||||
|
|
||||||
|
NEXTAUTH_SECRET=$(openssl rand -hex 32)
|
||||||
|
PUBLIC_IP=$(curl -s http://169.254.169.254/latest/meta-data/public-ipv4)
|
||||||
|
|
||||||
|
cat > .env <<EOF
|
||||||
|
NEXTAUTH_SECRET=$NEXTAUTH_SECRET
|
||||||
|
NEXTAUTH_URL=http://$PUBLIC_IP:{config.langfuse_config["web_port"]}
|
||||||
|
DATABASE_URL=postgresql://langfuse:langfuse@postgres:5432/langfuse
|
||||||
|
CLICKHOUSE_URL=http://clickhouse:8123
|
||||||
|
TELEMETRY_ENABLED=false
|
||||||
|
EOF
|
||||||
|
sudo docker compose -f docker-compose.yml up -d
|
||||||
|
|
||||||
|
# 📦 Montar volume EBS
|
||||||
|
DEVICE="{config.ec2_config['ebs_volume']['device_name']}"
|
||||||
|
MOUNT_DIR="/mnt/langfuse-data"
|
||||||
|
|
||||||
|
if [ -b "$DEVICE" ]; then
|
||||||
|
sudo mkfs -t ext4 $DEVICE
|
||||||
|
sudo mkdir -p $MOUNT_DIR
|
||||||
|
sudo mount $DEVICE $MOUNT_DIR
|
||||||
|
echo "$DEVICE $MOUNT_DIR ext4 defaults,nofail 0 2" | sudo tee -a /etc/fstab
|
||||||
|
else
|
||||||
|
echo "Volume $DEVICE não encontrado."
|
||||||
|
fi
|
||||||
|
"""
|
||||||
|
|
||||||
|
# 🖥️ Criar EC2
|
||||||
|
instance = aws.ec2.Instance("assistente-produtos-servicos-langfuse-ec2",
|
||||||
|
instance_type=config.ec2_config["instance_type"],
|
||||||
|
ami=aws.ec2.get_ami(
|
||||||
|
most_recent=True,
|
||||||
|
owners=["099720109477"],
|
||||||
|
filters=[{"name": "name", "values": ["ubuntu/images/hvm-ssd/ubuntu-jammy-22.04-amd64-server-*"]}]
|
||||||
|
).id,
|
||||||
|
subnet_id=config.network["subnet_ids"][0],
|
||||||
|
vpc_security_group_ids=[sg.id],
|
||||||
|
#key_name=config.ec2_config["key_name"],
|
||||||
|
user_data=user_data,
|
||||||
|
associate_public_ip_address=True,
|
||||||
|
tags={"Name": config.ec2_config["instance_name"]},
|
||||||
|
|
||||||
|
root_block_device=aws.ec2.InstanceRootBlockDeviceArgs(
|
||||||
|
volume_size=config.ec2_config["ebs_volume"]["size"],
|
||||||
|
volume_type=config.ec2_config["ebs_volume"]["volume_type"],
|
||||||
|
delete_on_termination=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
)
|
||||||
|
|
||||||
|
pulumi.export("instance_ip", instance.public_ip)
|
||||||
|
pulumi.export("url", pulumi.Output.concat("http://", instance.public_ip, f":{config.langfuse_config['web_port']}"))
|
||||||
13
infra/langfuse/autotag/autotag.py
Normal file
13
infra/langfuse/autotag/autotag.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import pulumi
|
||||||
|
from autotag.taggable import is_taggable
|
||||||
|
|
||||||
|
# registerAutoTags registers a global stack transformation that merges a set
|
||||||
|
# of tags with whatever was also explicitly added to the resource definition.
|
||||||
|
def register_auto_tags(auto_tags):
|
||||||
|
pulumi.runtime.register_stack_transformation(lambda args: auto_tag(args, auto_tags))
|
||||||
|
|
||||||
|
# auto_tag applies the given tags to the resource properties if applicable.
|
||||||
|
def auto_tag(args, auto_tags):
|
||||||
|
if is_taggable(args.type_):
|
||||||
|
args.props['tags'] = {**(args.props['tags'] or {}), **auto_tags}
|
||||||
|
return pulumi.ResourceTransformationResult(args.props, args.opts)
|
||||||
12
infra/langfuse/autotag/policy-config.json
Normal file
12
infra/langfuse/autotag/policy-config.json
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"all": "mandatory",
|
||||||
|
"check-required-tags": {
|
||||||
|
"requiredTags": [
|
||||||
|
"user:project",
|
||||||
|
"user:env",
|
||||||
|
"user:account",
|
||||||
|
"user:costCenter",
|
||||||
|
"user:owner"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
234
infra/langfuse/autotag/taggable.py
Normal file
234
infra/langfuse/autotag/taggable.py
Normal file
@@ -0,0 +1,234 @@
|
|||||||
|
# isTaggable returns true if the given resource type is an AWS resource that supports tags.
|
||||||
|
def is_taggable(t):
|
||||||
|
return t in taggable_resource_types
|
||||||
|
|
||||||
|
# taggable_resource_types is a list of known AWS type tokens that are taggable.
|
||||||
|
taggable_resource_types = [
|
||||||
|
'aws:accessanalyzer/analyzer:Analyzer',
|
||||||
|
'aws:acm/certificate:Certificate',
|
||||||
|
'aws:acmpca/certificateAuthority:CertificateAuthority',
|
||||||
|
'aws:alb/loadBalancer:LoadBalancer',
|
||||||
|
'aws:alb/targetGroup:TargetGroup',
|
||||||
|
'aws:apigateway/apiKey:ApiKey',
|
||||||
|
'aws:apigateway/clientCertificate:ClientCertificate',
|
||||||
|
'aws:apigateway/domainName:DomainName',
|
||||||
|
'aws:apigateway/restApi:RestApi',
|
||||||
|
'aws:apigateway/stage:Stage',
|
||||||
|
'aws:apigateway/usagePlan:UsagePlan',
|
||||||
|
'aws:apigateway/vpcLink:VpcLink',
|
||||||
|
'aws:applicationloadbalancing/loadBalancer:LoadBalancer',
|
||||||
|
'aws:applicationloadbalancing/targetGroup:TargetGroup',
|
||||||
|
'aws:appmesh/mesh:Mesh',
|
||||||
|
'aws:appmesh/route:Route',
|
||||||
|
'aws:appmesh/virtualNode:VirtualNode',
|
||||||
|
'aws:appmesh/virtualRouter:VirtualRouter',
|
||||||
|
'aws:appmesh/virtualService:VirtualService',
|
||||||
|
'aws:appsync/graphQLApi:GraphQLApi',
|
||||||
|
'aws:athena/workgroup:Workgroup',
|
||||||
|
'aws:autoscaling/group:Group',
|
||||||
|
'aws:backup/plan:Plan',
|
||||||
|
'aws:backup/vault:Vault',
|
||||||
|
'aws:cfg/aggregateAuthorization:AggregateAuthorization',
|
||||||
|
'aws:cfg/configurationAggregator:ConfigurationAggregator',
|
||||||
|
'aws:cfg/rule:Rule',
|
||||||
|
'aws:cloudformation/stack:Stack',
|
||||||
|
'aws:cloudformation/stackSet:StackSet',
|
||||||
|
'aws:cloudfront/distribution:Distribution',
|
||||||
|
'aws:cloudhsmv2/cluster:Cluster',
|
||||||
|
'aws:cloudtrail/trail:Trail',
|
||||||
|
'aws:cloudwatch/eventRule:EventRule',
|
||||||
|
'aws:cloudwatch/logGroup:LogGroup',
|
||||||
|
'aws:cloudwatch/metricAlarm:MetricAlarm',
|
||||||
|
'aws:codebuild/project:Project',
|
||||||
|
'aws:codecommit/repository:Repository',
|
||||||
|
'aws:codepipeline/pipeline:Pipeline',
|
||||||
|
'aws:codepipeline/webhook:Webhook',
|
||||||
|
'aws:codestarnotifications/notificationRule:NotificationRule',
|
||||||
|
'aws:cognito/identityPool:IdentityPool',
|
||||||
|
'aws:cognito/userPool:UserPool',
|
||||||
|
'aws:datapipeline/pipeline:Pipeline',
|
||||||
|
'aws:datasync/agent:Agent',
|
||||||
|
'aws:datasync/efsLocation:EfsLocation',
|
||||||
|
'aws:datasync/locationSmb:LocationSmb',
|
||||||
|
'aws:datasync/nfsLocation:NfsLocation',
|
||||||
|
'aws:datasync/s3Location:S3Location',
|
||||||
|
'aws:datasync/task:Task',
|
||||||
|
'aws:dax/cluster:Cluster',
|
||||||
|
'aws:directconnect/connection:Connection',
|
||||||
|
'aws:directconnect/hostedPrivateVirtualInterfaceAccepter:HostedPrivateVirtualInterfaceAccepter',
|
||||||
|
'aws:directconnect/hostedPublicVirtualInterfaceAccepter:HostedPublicVirtualInterfaceAccepter',
|
||||||
|
'aws:directconnect/hostedTransitVirtualInterfaceAcceptor:HostedTransitVirtualInterfaceAcceptor',
|
||||||
|
'aws:directconnect/linkAggregationGroup:LinkAggregationGroup',
|
||||||
|
'aws:directconnect/privateVirtualInterface:PrivateVirtualInterface',
|
||||||
|
'aws:directconnect/publicVirtualInterface:PublicVirtualInterface',
|
||||||
|
'aws:directconnect/transitVirtualInterface:TransitVirtualInterface',
|
||||||
|
'aws:directoryservice/directory:Directory',
|
||||||
|
'aws:dlm/lifecyclePolicy:LifecyclePolicy',
|
||||||
|
'aws:dms/endpoint:Endpoint',
|
||||||
|
'aws:dms/replicationInstance:ReplicationInstance',
|
||||||
|
'aws:dms/replicationSubnetGroup:ReplicationSubnetGroup',
|
||||||
|
'aws:dms/replicationTask:ReplicationTask',
|
||||||
|
'aws:docdb/cluster:Cluster',
|
||||||
|
'aws:docdb/clusterInstance:ClusterInstance',
|
||||||
|
'aws:docdb/clusterParameterGroup:ClusterParameterGroup',
|
||||||
|
'aws:docdb/subnetGroup:SubnetGroup',
|
||||||
|
'aws:dynamodb/table:Table',
|
||||||
|
'aws:ebs/snapshot:Snapshot',
|
||||||
|
'aws:ebs/snapshotCopy:SnapshotCopy',
|
||||||
|
'aws:ebs/volume:Volume',
|
||||||
|
'aws:ec2/ami:Ami',
|
||||||
|
'aws:ec2/amiCopy:AmiCopy',
|
||||||
|
'aws:ec2/amiFromInstance:AmiFromInstance',
|
||||||
|
'aws:ec2/capacityReservation:CapacityReservation',
|
||||||
|
'aws:ec2/customerGateway:CustomerGateway',
|
||||||
|
'aws:ec2/defaultNetworkAcl:DefaultNetworkAcl',
|
||||||
|
'aws:ec2/defaultRouteTable:DefaultRouteTable',
|
||||||
|
'aws:ec2/defaultSecurityGroup:DefaultSecurityGroup',
|
||||||
|
'aws:ec2/defaultSubnet:DefaultSubnet',
|
||||||
|
'aws:ec2/defaultVpc:DefaultVpc',
|
||||||
|
'aws:ec2/defaultVpcDhcpOptions:DefaultVpcDhcpOptions',
|
||||||
|
'aws:ec2/eip:Eip',
|
||||||
|
'aws:ec2/fleet:Fleet',
|
||||||
|
'aws:ec2/instance:Instance',
|
||||||
|
'aws:ec2/internetGateway:InternetGateway',
|
||||||
|
'aws:ec2/keyPair:KeyPair',
|
||||||
|
'aws:ec2/launchTemplate:LaunchTemplate',
|
||||||
|
'aws:ec2/natGateway:NatGateway',
|
||||||
|
'aws:ec2/networkAcl:NetworkAcl',
|
||||||
|
'aws:ec2/networkInterface:NetworkInterface',
|
||||||
|
'aws:ec2/placementGroup:PlacementGroup',
|
||||||
|
'aws:ec2/routeTable:RouteTable',
|
||||||
|
'aws:ec2/securityGroup:SecurityGroup',
|
||||||
|
'aws:ec2/spotInstanceRequest:SpotInstanceRequest',
|
||||||
|
'aws:ec2/subnet:Subnet',
|
||||||
|
'aws:ec2/vpc:Vpc',
|
||||||
|
'aws:ec2/vpcDhcpOptions:VpcDhcpOptions',
|
||||||
|
'aws:ec2/vpcEndpoint:VpcEndpoint',
|
||||||
|
'aws:ec2/vpcEndpointService:VpcEndpointService',
|
||||||
|
'aws:ec2/vpcPeeringConnection:VpcPeeringConnection',
|
||||||
|
'aws:ec2/vpcPeeringConnectionAccepter:VpcPeeringConnectionAccepter',
|
||||||
|
'aws:ec2/vpnConnection:VpnConnection',
|
||||||
|
'aws:ec2/vpnGateway:VpnGateway',
|
||||||
|
'aws:ec2clientvpn/endpoint:Endpoint',
|
||||||
|
'aws:ec2transitgateway/routeTable:RouteTable',
|
||||||
|
'aws:ec2transitgateway/transitGateway:TransitGateway',
|
||||||
|
'aws:ec2transitgateway/vpcAttachment:VpcAttachment',
|
||||||
|
'aws:ec2transitgateway/vpcAttachmentAccepter:VpcAttachmentAccepter',
|
||||||
|
'aws:ecr/repository:Repository',
|
||||||
|
'aws:ecs/capacityProvider:CapacityProvider',
|
||||||
|
'aws:ecs/cluster:Cluster',
|
||||||
|
'aws:ecs/service:Service',
|
||||||
|
'aws:ecs/taskDefinition:TaskDefinition',
|
||||||
|
'aws:efs/fileSystem:FileSystem',
|
||||||
|
'aws:eks/cluster:Cluster',
|
||||||
|
'aws:eks/fargateProfile:FargateProfile',
|
||||||
|
'aws:eks/nodeGroup:NodeGroup',
|
||||||
|
'aws:elasticache/cluster:Cluster',
|
||||||
|
'aws:elasticache/replicationGroup:ReplicationGroup',
|
||||||
|
'aws:elasticbeanstalk/application:Application',
|
||||||
|
'aws:elasticbeanstalk/applicationVersion:ApplicationVersion',
|
||||||
|
'aws:elasticbeanstalk/environment:Environment',
|
||||||
|
'aws:elasticloadbalancing/loadBalancer:LoadBalancer',
|
||||||
|
'aws:elasticloadbalancingv2/loadBalancer:LoadBalancer',
|
||||||
|
'aws:elasticloadbalancingv2/targetGroup:TargetGroup',
|
||||||
|
'aws:elasticsearch/domain:Domain',
|
||||||
|
'aws:elb/loadBalancer:LoadBalancer',
|
||||||
|
'aws:emr/cluster:Cluster',
|
||||||
|
'aws:fsx/lustreFileSystem:LustreFileSystem',
|
||||||
|
'aws:fsx/windowsFileSystem:WindowsFileSystem',
|
||||||
|
'aws:gamelift/alias:Alias',
|
||||||
|
'aws:gamelift/build:Build',
|
||||||
|
'aws:gamelift/fleet:Fleet',
|
||||||
|
'aws:gamelift/gameSessionQueue:GameSessionQueue',
|
||||||
|
'aws:glacier/vault:Vault',
|
||||||
|
'aws:glue/crawler:Crawler',
|
||||||
|
'aws:glue/job:Job',
|
||||||
|
'aws:glue/trigger:Trigger',
|
||||||
|
'aws:iam/role:Role',
|
||||||
|
'aws:iam/user:User',
|
||||||
|
'aws:inspector/resourceGroup:ResourceGroup',
|
||||||
|
'aws:kinesis/analyticsApplication:AnalyticsApplication',
|
||||||
|
'aws:kinesis/firehoseDeliveryStream:FirehoseDeliveryStream',
|
||||||
|
'aws:kinesis/stream:Stream',
|
||||||
|
'aws:kms/externalKey:ExternalKey',
|
||||||
|
'aws:kms/key:Key',
|
||||||
|
'aws:lambda/function:Function',
|
||||||
|
'aws:lb/loadBalancer:LoadBalancer',
|
||||||
|
'aws:lb/targetGroup:TargetGroup',
|
||||||
|
'aws:licensemanager/licenseConfiguration:LicenseConfiguration',
|
||||||
|
'aws:lightsail/instance:Instance',
|
||||||
|
'aws:mediaconvert/queue:Queue',
|
||||||
|
'aws:mediapackage/channel:Channel',
|
||||||
|
'aws:mediastore/container:Container',
|
||||||
|
'aws:mq/broker:Broker',
|
||||||
|
'aws:mq/configuration:Configuration',
|
||||||
|
'aws:msk/cluster:Cluster',
|
||||||
|
'aws:neptune/cluster:Cluster',
|
||||||
|
'aws:neptune/clusterInstance:ClusterInstance',
|
||||||
|
'aws:neptune/clusterParameterGroup:ClusterParameterGroup',
|
||||||
|
'aws:neptune/eventSubscription:EventSubscription',
|
||||||
|
'aws:neptune/parameterGroup:ParameterGroup',
|
||||||
|
'aws:neptune/subnetGroup:SubnetGroup',
|
||||||
|
'aws:opsworks/stack:Stack',
|
||||||
|
'aws:organizations/account:Account',
|
||||||
|
'aws:pinpoint/app:App',
|
||||||
|
'aws:qldb/ledger:Ledger',
|
||||||
|
'aws:ram/resourceShare:ResourceShare',
|
||||||
|
'aws:rds/cluster:Cluster',
|
||||||
|
'aws:rds/clusterEndpoint:ClusterEndpoint',
|
||||||
|
'aws:rds/clusterInstance:ClusterInstance',
|
||||||
|
'aws:rds/clusterParameterGroup:ClusterParameterGroup',
|
||||||
|
'aws:rds/clusterSnapshot:ClusterSnapshot',
|
||||||
|
'aws:rds/eventSubscription:EventSubscription',
|
||||||
|
'aws:rds/instance:Instance',
|
||||||
|
'aws:rds/optionGroup:OptionGroup',
|
||||||
|
'aws:rds/parameterGroup:ParameterGroup',
|
||||||
|
'aws:rds/securityGroup:SecurityGroup',
|
||||||
|
'aws:rds/snapshot:Snapshot',
|
||||||
|
'aws:rds/subnetGroup:SubnetGroup',
|
||||||
|
'aws:redshift/cluster:Cluster',
|
||||||
|
'aws:redshift/eventSubscription:EventSubscription',
|
||||||
|
'aws:redshift/parameterGroup:ParameterGroup',
|
||||||
|
'aws:redshift/snapshotCopyGrant:SnapshotCopyGrant',
|
||||||
|
'aws:redshift/snapshotSchedule:SnapshotSchedule',
|
||||||
|
'aws:redshift/subnetGroup:SubnetGroup',
|
||||||
|
'aws:resourcegroups/group:Group',
|
||||||
|
'aws:route53/healthCheck:HealthCheck',
|
||||||
|
'aws:route53/resolverEndpoint:ResolverEndpoint',
|
||||||
|
'aws:route53/resolverRule:ResolverRule',
|
||||||
|
'aws:route53/zone:Zone',
|
||||||
|
'aws:s3/bucket:Bucket',
|
||||||
|
'aws:s3/bucketObject:BucketObject',
|
||||||
|
'aws:sagemaker/endpoint:Endpoint',
|
||||||
|
'aws:sagemaker/endpointConfiguration:EndpointConfiguration',
|
||||||
|
'aws:sagemaker/model:Model',
|
||||||
|
'aws:sagemaker/notebookInstance:NotebookInstance',
|
||||||
|
'aws:secretsmanager/secret:Secret',
|
||||||
|
'aws:servicecatalog/portfolio:Portfolio',
|
||||||
|
'aws:sfn/activity:Activity',
|
||||||
|
'aws:sfn/stateMachine:StateMachine',
|
||||||
|
'aws:sns/topic:Topic',
|
||||||
|
'aws:sqs/queue:Queue',
|
||||||
|
'aws:ssm/activation:Activation',
|
||||||
|
'aws:ssm/document:Document',
|
||||||
|
'aws:ssm/maintenanceWindow:MaintenanceWindow',
|
||||||
|
'aws:ssm/parameter:Parameter',
|
||||||
|
'aws:ssm/patchBaseline:PatchBaseline',
|
||||||
|
'aws:storagegateway/cachesIscsiVolume:CachesIscsiVolume',
|
||||||
|
'aws:storagegateway/gateway:Gateway',
|
||||||
|
'aws:storagegateway/nfsFileShare:NfsFileShare',
|
||||||
|
'aws:storagegateway/smbFileShare:SmbFileShare',
|
||||||
|
'aws:swf/domain:Domain',
|
||||||
|
'aws:transfer/server:Server',
|
||||||
|
'aws:transfer/user:User',
|
||||||
|
'aws:waf/rateBasedRule:RateBasedRule',
|
||||||
|
'aws:waf/rule:Rule',
|
||||||
|
'aws:waf/ruleGroup:RuleGroup',
|
||||||
|
'aws:waf/webAcl:WebAcl',
|
||||||
|
'aws:wafregional/rateBasedRule:RateBasedRule',
|
||||||
|
'aws:wafregional/rule:Rule',
|
||||||
|
'aws:wafregional/ruleGroup:RuleGroup',
|
||||||
|
'aws:wafregional/webAcl:WebAcl',
|
||||||
|
'aws:workspaces/directory:Directory',
|
||||||
|
'aws:workspaces/ipGroup:IpGroup',
|
||||||
|
]
|
||||||
20
infra/langfuse/conf.py
Normal file
20
infra/langfuse/conf.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import pulumi
|
||||||
|
import pulumi_aws as aws
|
||||||
|
from autotag.autotag import register_auto_tags
|
||||||
|
|
||||||
|
config = pulumi.Config("langfuse")
|
||||||
|
|
||||||
|
project_name = config.require("project_name")
|
||||||
|
stack_name = pulumi.get_stack()
|
||||||
|
environment = config.require("environment")
|
||||||
|
account_id = config.require("account_id")
|
||||||
|
tags = config.require_object("tags")
|
||||||
|
|
||||||
|
network = config.require_object("network")
|
||||||
|
ec2_config = config.require_object("ec2")
|
||||||
|
langfuse_config = config.require_object("langfuse_config")
|
||||||
|
|
||||||
|
aws_region = aws.get_region().id
|
||||||
|
current = aws.get_caller_identity()
|
||||||
|
|
||||||
|
register_auto_tags(tags)
|
||||||
5
infra/langfuse/requirements.txt
Normal file
5
infra/langfuse/requirements.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
pulumi
|
||||||
|
pulumi-aws
|
||||||
|
pulumi-docker
|
||||||
|
boto3
|
||||||
|
setuptools
|
||||||
Reference in New Issue
Block a user