import os import gradio as gr from langchain.document_loaders import OnlinePDFLoader from langchain.text_splitter import CharacterTextSplitter from langchain.chat_models import ChatAnthropic from langchain.prompts import ChatPromptTemplate # Set API keys from environment variables os.environ['ANTHROPIC_API_KEY'] = os.getenv("Your_Anthropic_API_Key") pdf_content = "" def load_pdf(pdf_doc): global pdf_content try: if pdf_doc is None: return "No PDF uploaded." # Load and split PDF content loader = OnlinePDFLoader(pdf_doc.name) documents = loader.load() text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0) pdf_content = ' '.join(text_splitter.split_documents(documents)) return "PDF Loaded Successfully." except Exception as e: return f"Error processing PDF: {e}" def chat_with_pdf(question): # Create an instance of the ChatAnthropic model model = ChatAnthropic() # Define the chat prompt template prompt = ChatPromptTemplate.from_messages([ ("human", pdf_content), ("human", question), ]) # Invoke the model using the chain chain = prompt | model response = chain.invoke({}) return response.content # Define Gradio UI def gradio_interface(pdf_doc, question): if not pdf_content: return load_pdf(pdf_doc) else: return chat_with_pdf(question) gr.Interface(fn=gradio_interface, inputs=[gr.File(label="Load a pdf", file_types=['.pdf'], type="file"), gr.Textbox(label="Ask a question about the PDF")], outputs="text", live=True, title="Chat with PDF content using Anthropic", description="Upload a .PDF and interactively chat about its content." ).launch()