Spaces:
Sleeping
Sleeping
File size: 4,936 Bytes
46f282d 6cb69fd 46f282d 6cb69fd 46f282d 6cb69fd 46f282d 6cb69fd 46f282d 6cb69fd 46f282d 6cb69fd 46f282d 6cb69fd 46f282d 6cb69fd 46f282d 6cb69fd 46f282d 6cb69fd 46f282d 6cb69fd 46f282d 6cb69fd 46f282d 6cb69fd 46f282d 6cb69fd 46f282d 6cb69fd 46f282d 6cb69fd 46f282d 6cb69fd 46f282d 6cb69fd 46f282d 6cb69fd 46f282d 6cb69fd 46f282d 6cb69fd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 |
import streamlit as st
from utils.pdf_processor import PDFProcessor
from utils.llm_handler import LLMHandler
import time
from dotenv import load_dotenv
import os
import requests
from PIL import Image
from io import BytesIO
load_dotenv()
if not os.getenv("HUGGINGFACEHUB_API_TOKEN"):
st.error("HUGGINGFACEHUB_API_TOKEN does not exist in environment variables!")
st.stop()
st.set_page_config(
page_title="PDF Chatbot by phucbienvan",
page_icon="📚",
layout="wide"
)
# Display logo and title side by side
col1, col2 = st.columns([1, 5])
with col1:
try:
logo_url = "https://cdn-avatars.huggingface.co/v1/production/uploads/65129261353a60593b1dc353/PY40eVSt4jkYQinleKGua.jpeg"
response = requests.get(logo_url)
logo = Image.open(BytesIO(response.content))
st.image(logo, width=150)
except Exception as e:
st.error(f"Could not load logo: {e}")
with col2:
st.title("📚 PDF Chatbot")
st.markdown("### Upload PDF files and ask questions about the content")
st.markdown("##### Author: phucbienvan")
if "vector_store" not in st.session_state:
st.session_state["vector_store"] = None
if "qa_chain" not in st.session_state:
st.session_state["qa_chain"] = None
if "chat_history" not in st.session_state:
st.session_state["chat_history"] = []
if "pdf_name" not in st.session_state:
st.session_state["pdf_name"] = None
with st.sidebar:
st.header("Upload Document")
uploaded_file = st.file_uploader("Choose PDF file", type="pdf")
if uploaded_file is not None and (st.session_state["pdf_name"] != uploaded_file.name):
with st.spinner("Processing PDF file..."):
pdf_processor = PDFProcessor()
st.session_state["vector_store"] = pdf_processor.process_pdf(uploaded_file)
llm_handler = LLMHandler()
st.session_state["qa_chain"] = llm_handler.create_qa_chain(st.session_state["vector_store"])
st.session_state["pdf_name"] = uploaded_file.name
st.session_state["chat_history"] = []
st.success(f"Processed file: {uploaded_file.name}")
st.markdown("---")
st.markdown("### User Guide")
st.markdown("""
1. Upload a PDF file from your computer
2. Wait for the system to process the file
3. Ask questions about the file content
4. Get answers from the chatbot
""")
# Display logo in sidebar
st.markdown("---")
try:
logo_url = "https://cdn-avatars.huggingface.co/v1/production/uploads/65129261353a60593b1dc353/PY40eVSt4jkYQinleKGua.jpeg"
response = requests.get(logo_url)
logo = Image.open(BytesIO(response.content))
st.image(logo, width=100, caption="phucbienvan")
except:
pass
st.subheader("Conversation")
for i, (question, answer) in enumerate(st.session_state["chat_history"]):
message_container = st.container()
with message_container:
col1, col2 = st.columns([1, 9])
with col1:
st.markdown("🧑")
with col2:
st.markdown(f"**You:** {question}")
message_container = st.container()
with message_container:
col1, col2 = st.columns([1, 9])
with col1:
st.markdown("🤖")
with col2:
st.markdown(f"**Bot:** {answer}")
st.markdown("---")
question = st.text_input("Enter your question:", key="question_input")
if st.button("Send Question"):
if st.session_state["qa_chain"] is None:
st.error("Please upload a PDF file before asking questions!")
elif not question:
st.warning("Please enter a question!")
else:
with st.spinner("Finding answer..."):
llm_handler = LLMHandler()
answer, sources = llm_handler.get_answer(st.session_state["qa_chain"], question)
st.session_state["chat_history"].append((question, answer))
message_container = st.container()
with message_container:
col1, col2 = st.columns([1, 9])
with col1:
st.markdown("🧑")
with col2:
st.markdown(f"**You:** {question}")
message_container = st.container()
with message_container:
col1, col2 = st.columns([1, 9])
with col1:
st.markdown("🤖")
with col2:
st.markdown(f"**Bot:** {answer}")
if sources:
with st.expander("View References"):
for i, doc in enumerate(sources):
st.markdown(f"**Source {i+1}:**")
st.markdown(doc.page_content)
st.markdown("---")
st.rerun()
st.markdown("---")
st.markdown("### 📚 PDF Chatbot | Author: phucbienvan")
|