import gradio as gr from langchain.document_loaders import OnlinePDFLoader from langchain.text_splitter import CharacterTextSplitter from langchain.llms import HuggingFaceHub from langchain.embeddings import HuggingFaceHubEmbeddings from langchain.vectorstores import Chroma from langchain.chains import RetrievalQA def loading_pdf(): return "Loading..." def pdf_changes(pdf_doc, repo_id): loader = OnlinePDFLoader(pdf_doc.name) documents = loader.load() text_splitter = CharacterTextSplitter(chunk_size=300, chunk_overlap=0) texts = text_splitter.split_documents(documents) embeddings = HuggingFaceHubEmbeddings() db = Chroma.from_documents(texts, embeddings) retriever = db.as_retriever() llm = HuggingFaceHub(repo_id=repo_id, model_kwargs={"temperature":0.1, "max_new_tokens":250}) global qa qa = RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", retriever=retriever, return_source_documents=True) return "Ready" def add_text(history, text): history = history + [(text, None)] return history, "" def bot(history): response = infer(history[-1][0]) history[-1][1] = response['result'] return history def infer(question): query = question result = qa({"query": query}) return result # CSS css = """ #col-container { max-width: 700px; margin-left: auto; margin-right: auto; } .title { text-align: center; max-width: 600px; margin-left: auto; margin-right: auto; color: #000; } .pdf-doc { margin-bottom: 10px; } .chatbot { max-height: 350px; margin-left: auto; margin-right: auto; padding: 10px; background-color: #fff; font-family: sans-serif; font-size: 16px; line-height: 24px; } .chatbot .message { color: #000; } .chatbot .user-message { background-color: #eee; } .chatbot .bot-message { background-color: #ccc; } """ # HTML title = """
Upload a .pdf from local machine, click the "Load PDF🚀" button,
When ready, you are all set to start asking questions from the pdf