import gradio as gr import os import tempfile from langchain.document_loaders import UnstructuredPDFLoader from langchain.indexes import VectorstoreIndexCreator from langchain.chains import RetrievalQA from langchain.schema import AIMessage, HumanMessage from langchain.vectorstores import FAISS from langchain.embeddings import HuggingFaceEmbeddings from langchain.text_splitter import CharacterTextSplitter from langchain import HuggingFaceHub # Set your API keys API_KEY = os.environ["API_KEY"] # Create a temporary upload directory # Define global variables for loaders and index index = None def chat(message,history): global index history_langchain_format = [] for human, ai in history: history_langchain_format.append(HumanMessage(content=human)) history_langchain_format.append(AIMessage(content=ai)) history_langchain_format.append(HumanMessage(content=message)) history_langchain_format.append(HumanMessage(content=message)) # Create the index (update index) llm2 = HuggingFaceHub(repo_id="declare-lab/flan-alpaca-large", model_kwargs={"temperature": 0, "max_length": 512},huggingfacehub_api_token = API_KEY ) chain = RetrievalQA.from_chain_type(llm=llm2, chain_type="stuff", retriever=index.vectorstore.as_retriever(), input_key="question") # Perform question-answering on the uploaded PDF with the user's question gpt_response = chain.run("Based on the file you have processed, provide a related answer to this question: "+ message) return gpt_response # Create a Gradio interface for chat chat_interface = gr.ChatInterface( chat, theme=gr.themes.Soft() ) with gr.Blocks(theme=gr.themes.Soft()) as demo: with gr.Row(): with gr.Column(scale=1): with gr.Row(): upload_file = gr.File(label="Upload a PDF",file_types=["pdf"]) with gr.Row(): upload_button = gr.Button(label="Upload a PDF") with gr.Row(): text = gr.Textbox(label="Status") def load_file(pdf_file): global index pdf_loader = UnstructuredPDFLoader(pdf_file.name) index = VectorstoreIndexCreator( embedding=HuggingFaceEmbeddings(), text_splitter=CharacterTextSplitter(chunk_size=1000, chunk_overlap=0) ).from_loaders([pdf_loader]) return "DONE ✅" upload_button.click(load_file, [upload_file], text) with gr.Column(scale=2): chat_interface = gr.ChatInterface( chat, theme=gr.themes.Soft() ) demo.queue().launch(inline=False)