File size: 2,197 Bytes
f9366f8 297db7c f9366f8 974c6dd f9366f8 7564ede f9366f8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 |
import os
from langchain.text_splitter import CharacterTextSplitter
from langchain.vectorstores import Pinecone, Chroma
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.chains import ConversationalRetrievalChain
from langchain.chat_models import ChatOpenAI
OPENAI_API_KEY=os.getenv('OPENAI_API_KEY')
from langchain.document_loaders import DirectoryLoader
pdf_loader = DirectoryLoader('archivos', glob="**/*.pdf")
#Crear loader
loaders = [pdf_loader]
#Crear objeto document
documents = []
for loader in loaders:
documents.extend(loader.load())
#TEXT SPLITTER
text_splitter = CharacterTextSplitter(chunk_size=3500, chunk_overlap=10)
documents = text_splitter.split_documents(documents)
embeddings = OpenAIEmbeddings()
from langchain.vectorstores import Chroma
vectorstore = Chroma.from_documents(documents, embeddings)
from langchain.llms import OpenAI
retriever = vectorstore.as_retriever(search_type="similarity", search_kwargs={"k":2})
qa = ConversationalRetrievalChain.from_llm(OpenAI(temperature=0.5), retriever)
#GRADIO WIDGET
import gradio as gr
with gr.Blocks() as demo:
img1 = gr.Image("logo.jpg")
#img1.css = "max-width: 200px; max-height: 200px; display: block; margin: 0 auto;"
gr.Markdown(
"""
# NOMBRE DEL CHATBOT
Descripción del chatbot
""")
msg = gr.Textbox()
clear = gr.Button("Clear")
chatbot = gr.Chatbot()
def respond(user_message, chat_history):
print(user_message)
# QA chain
# Convertir Gradio's chat history en el formato LangChain's esperado
langchain_history = [(msg[1], chat_history[i+1][1] if i+1 < len(chat_history) else "") for i, msg in enumerate(chat_history) if i % 2 == 0]
response = qa({"question": user_message, "chat_history": langchain_history})
#response = qa({"question": user_message, "chat_history": chat_history})
# Crear chat history
chat_history.append((user_message, response["answer"]))
print(chat_history)
return "", chat_history
msg.submit(respond, [msg, chatbot], [msg, chatbot], queue=False)
clear.click(lambda: None, None, chatbot, queue=False)
demo.launch(debug=True) |