Spaces:
Runtime error
Delete chatbots.py
Browse filesimport os
import streamlit as st
from PyPDF2 import PdfReader
from langchain.text_splitter import CharacterTextSplitter
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.vectorstores import FAISS
from langchain.chains.question_answering import load_qa_chain
import random
from langchain import HuggingFaceHub
from langchain.callbacks import get_openai_callback
def main():
# ---------------------------- created personal API -----------------------------
os.environ["HUGGINGFACEHUB_API_TOKEN"] = "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"
# ------------------ Designing Page ---------------
st.set_page_config(page_title="Ask Your PDF")
st.header("Ask your PDF :")
pdf = st.file_uploader("Upload your File here", type="pdf")
# Check Pdf
if pdf is not None:
pdf_reader = PdfReader(pdf)
text = ""
# Extract pages from pdf
for page in pdf_reader.pages:
text += page.extract_text()
# split into chunks
text_spliter = CharacterTextSplitter(
separator="\n",
chunk_size=1000,
chunk_overlap=200,
length_function=len
)
chunks = text_spliter.split_text(text)
# create embeddings
embedding = HuggingFaceEmbeddings()
knowledge_base = FAISS.from_texts(chunks, embedding)
user_questions = st.text_input("Ask a Question from PDF : ")
if user_questions:
greeting = ["hy", 'hello', 'hey', "hi"]
greet_msg = ["Hello Dear!", 'Hey!', 'Hey Friend!']
if user_questions in greeting:
response = random.choice(greet_msg)
elif user_questions == "by" or user_questions == "bye":
response = "GoodBye Sir!, Have a Nice Day....."
else:
docs = knowledge_base.similarity_search(user_questions)
chain = load_qa_chain(
HuggingFaceHub(repo_id="google/flan-t5-xxl", model_kwargs={"temperature": 1, "max_length": 512}),
chain_type="stuff")
with get_openai_callback() as cb:
response = chain.run(input_documents=docs, question=user_questions)
print(cb)
st.write(response)
if __name__ == "__main__":
main()
- chatbots.py +0 -63
@@ -1,63 +0,0 @@
|
|
1 |
-
import os
|
2 |
-
import streamlit as st
|
3 |
-
from PyPDF2 import PdfReader
|
4 |
-
from langchain.text_splitter import CharacterTextSplitter
|
5 |
-
from langchain.embeddings import HuggingFaceEmbeddings
|
6 |
-
from langchain.vectorstores import FAISS
|
7 |
-
from langchain.chains.question_answering import load_qa_chain
|
8 |
-
import random
|
9 |
-
from langchain import HuggingFaceHub
|
10 |
-
from langchain.callbacks import get_openai_callback
|
11 |
-
|
12 |
-
def main():
|
13 |
-
# ---------------------------- created personal API -----------------------------
|
14 |
-
os.environ["HUGGINGFACEHUB_API_TOKEN"] = "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"
|
15 |
-
|
16 |
-
# ------------------ Designing Page ---------------
|
17 |
-
st.set_page_config(page_title="Ask Your PDF")
|
18 |
-
st.header("Ask your PDF :")
|
19 |
-
|
20 |
-
pdf = st.file_uploader("Upload your File here", type="pdf")
|
21 |
-
|
22 |
-
# Check Pdf
|
23 |
-
if pdf is not None:
|
24 |
-
pdf_reader = PdfReader(pdf)
|
25 |
-
|
26 |
-
text = ""
|
27 |
-
|
28 |
-
# Extract pages from pdf
|
29 |
-
for page in pdf_reader.pages:
|
30 |
-
text += page.extract_text()
|
31 |
-
|
32 |
-
# split into chunks
|
33 |
-
text_spliter = CharacterTextSplitter(
|
34 |
-
separator="\n",
|
35 |
-
chunk_size=1000,
|
36 |
-
chunk_overlap=200,
|
37 |
-
length_function=len
|
38 |
-
)
|
39 |
-
|
40 |
-
chunks = text_spliter.split_text(text)
|
41 |
-
|
42 |
-
# create embeddings
|
43 |
-
embedding = HuggingFaceEmbeddings()
|
44 |
-
knowledge_base = FAISS.from_texts(chunks, embedding)
|
45 |
-
|
46 |
-
user_questions = st.text_input("Ask a Question from PDF : ")
|
47 |
-
if user_questions:
|
48 |
-
|
49 |
-
greeting = ["hy", 'hello', 'hey', "hi"]
|
50 |
-
greet_msg = ["Hello Dear!", 'Hey!', 'Hey Friend!']
|
51 |
-
if user_questions in greeting:
|
52 |
-
response = random.choice(greet_msg)
|
53 |
-
elif user_questions == "by" or user_questions == "bye":
|
54 |
-
response = "GoodBye Sir!, Have a Nice Day....."
|
55 |
-
else:
|
56 |
-
docs = knowledge_base.similarity_search(user_questions)
|
57 |
-
chain = load_qa_chain(HuggingFaceHub(repo_id="google/flan-t5-xxl", model_kwargs={"temperature":0.9, "max_length":512}), chain_type="stuff")
|
58 |
-
with get_openai_callback() as cb:
|
59 |
-
response = chain.run(input_documents=docs, question=user_questions)
|
60 |
-
print(cb)
|
61 |
-
st.write(response)
|
62 |
-
if __name__ == "__main__":
|
63 |
-
main()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|