Spaces:
Running
Running
File size: 2,616 Bytes
7b4485b c02ed03 7b4485b 8c1fc61 7b4485b 5ee15fe 7b4485b 5ee15fe 7b4485b dbe0201 6422366 8ded146 98f09c8 8ded146 7b4485b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 |
import os
from langchain.text_splitter import CharacterTextSplitter
from langchain.vectorstores import Pinecone, Chroma
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.chains import ConversationalRetrievalChain
from langchain.chat_models import ChatOpenAI
OPENAI_API_KEY=os.getenv('OPENAI_API_KEY')
from langchain.document_loaders import DirectoryLoader
pdf_loader = DirectoryLoader('archivos', glob="**/*.pdf")
#Crear loader
loaders = [pdf_loader]
#Crear objeto document
documents = []
for loader in loaders:
documents.extend(loader.load())
#TEXT SPLITTER
text_splitter = CharacterTextSplitter(chunk_size=3500, chunk_overlap=10)
documents = text_splitter.split_documents(documents)
embeddings = OpenAIEmbeddings()
from langchain.vectorstores import Chroma
vectorstore = Chroma.from_documents(documents, embeddings)
from langchain.llms import OpenAI
retriever = vectorstore.as_retriever(search_type="similarity", search_kwargs={"k":2})
qa = ConversationalRetrievalChain.from_llm(OpenAI(temperature=0.5), retriever)
#GRADIO WIDGET
import gradio as gr
with gr.Blocks() as demo:
img1 = gr.Image("logo5.jpeg")
img1.css = "max-width: 200px; max-height: 200px; display: block; margin: 0 auto;"
gr.Markdown(
"""
## IAMIND - CHATBOT SALUD MENTAL
Este chatbot de salud mental para adolescentes ofrece apoyo confidencial y personalizado, tiene la capacidad para comprender y generar respuestas naturales. Es una herramienta prometedora para abordar preocupaciones de salud mental en adolescentes.
Desarrollado como ponencia en:
XIII ENCUENTRO NACIONAL Y IX NACIONAL DE
CIENCIA, TECNOLOGÍA E INVESTIGACIÓN
“NUEVOS DESARROLLOS Y AVANCES EN INTELIGENCIA ARTIFICIAL – IA”
""")
msg = gr.Textbox()
clear = gr.Button("Clear")
chatbot = gr.Chatbot()
def respond(user_message, chat_history):
print(user_message)
# QA chain
# Convertir Gradio's chat history en el formato LangChain's esperado
langchain_history = [(msg[1], chat_history[i+1][1] if i+1 < len(chat_history) else "") for i, msg in enumerate(chat_history) if i % 2 == 0]
response = qa({"question": user_message, "chat_history": langchain_history})
#response = qa({"question": user_message, "chat_history": chat_history})
# Crear chat history
chat_history.append((user_message, response["answer"]))
print(chat_history)
return "", chat_history
msg.submit(respond, [msg, chatbot], [msg, chatbot], queue=False)
clear.click(lambda: None, None, chatbot, queue=False)
demo.launch(debug=True)
|