|
import streamlit as st |
|
import os |
|
|
|
from groq import Groq |
|
from datetime import datetime |
|
from langchain_community.embeddings import HuggingFaceEmbeddings |
|
from langchain_groq import ChatGroq |
|
from langchain.chains import RetrievalQA |
|
from langchain_pinecone import PineconeVectorStore |
|
|
|
st.set_page_config('Opositor') |
|
st.header("Pregunta al trebep") |
|
|
|
|
|
os.environ["GROQ_API_KEY"] = "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" |
|
model = 'llama3-8b-8192' |
|
llm = ChatGroq(model = model) |
|
|
|
|
|
model_name = 'intfloat/multilingual-e5-base' |
|
embedding = HuggingFaceEmbeddings(model_name=model_name) |
|
|
|
|
|
os.environ["PINECONE_API_KEY"] ='4bf0b4cf-4ced-4f70-8977-d60bb8ae405a' |
|
index_name = "boe-intfloat-multilingual-e5-base" |
|
namespace = "trebep" |
|
vectorstore = PineconeVectorStore(index_name=index_name, namespace=namespace, embedding=embedding) |
|
|
|
|
|
qa = RetrievalQA.from_chain_type( |
|
llm=llm, |
|
chain_type="stuff", |
|
retriever=vectorstore.as_retriever(), |
|
|
|
|
|
) |
|
|
|
|
|
def mostrar_logs(logs): |
|
st.sidebar.header("Registro de preguntas") |
|
for entry in logs: |
|
st.sidebar.write(f"**Pregunta: {entry['Pregunta']}**") |
|
st.sidebar.write(f"Respuesta: {entry['Respuesta']}") |
|
|
|
|
|
|
|
logs = [] |
|
|
|
user_question = st.text_input("¡A jugar! Haz una pregunta al trebep:") |
|
if user_question: |
|
|
|
|
|
respuesta = qa.invoke(user_question) |
|
|
|
|
|
st.subheader("Respuesta") |
|
st.write(f":green[{str(respuesta)}]") |
|
|
|
|
|
logs.append({"Pregunta": user_question, "Respuesta": respuesta}) |
|
|
|
|
|
mostrar_logs(logs) |
|
|