import gradio as gr from langchain.llms import Replicate from langchain.vectorstores import Pinecone from langchain.text_splitter import CharacterTextSplitter from langchain.document_loaders import PyPDFLoader from langchain.llms import HuggingFaceHub from langchain.vectorstores import Chroma from langchain.embeddings import HuggingFaceEmbeddings from langchain.chains import ConversationalRetrievalChain from datasets import load_dataset import os key = os.environ.get('API') os.environ["REPLICATE_API_TOKEN"] = key import sentence_transformers def loading_pdf(): return "Loading..." def pdf_changes(pdf_doc): loader = PyPDFLoader(pdf_doc.name) documents = loader.load() text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0) texts = text_splitter.split_documents(documents) embeddings = HuggingFaceEmbeddings() db = Chroma.from_documents(texts, embeddings) retriever = db.as_retriever(search_kwargs={'k': 2}) llm = Replicate( model="a16z-infra/llama13b-v2-chat:df7690f1994d94e96ad9d568eac121aecf50684a0b0963b25a41cc40061269e5", input={"temperature": 0.2, "max_length": 3000, "length_penalty":0.1, "num_beams":3} ) global qa qa = ConversationalRetrievalChain.from_llm( llm, retriever, return_source_documents=True ) return "Ready" def query(history, text): langchain_history = [(msg[1], history[i+1][1] if i+1 < len(history) else "") for i, msg in enumerate(history) if i % 2 == 0] result = qa({"question": text, "chat_history": langchain_history}) new_history = history + [(text,result['answer'])] return new_history,"" css=""" #col-container {max-width: 700px; margin-left: auto; margin-right: auto;} """ title = """

Chat with PDF

""" with gr.Blocks(css=css) as demo: with gr.Column(elem_id="col-container"): gr.HTML(title) with gr.Column(): pdf_doc = gr.File(label="Load a PDF", file_types=['.pdf'], type="file") load_pdf = gr.Button("Load PDF") langchain_status = gr.Textbox(label="Status", placeholder="", interactive=False) chatbot = gr.Chatbot([], elem_id="chatbot").style(height=350) question = gr.Textbox(label="Question", placeholder="Type your question and hit Enter ") submit_btn = gr.Button("Send message") load_pdf.click(pdf_changes, inputs=[pdf_doc], outputs=[langchain_status], queue=False) question.submit(query, [chatbot, question], [chatbot, question]) submit_btn.click(query, [chatbot, question], [chatbot, question]) demo.launch()