improprogrammer commited on
Commit
b18f12a
1 Parent(s): d9fc955

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +55 -0
app.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain.document_loaders import PyPDFLoader
2
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
3
+ from langchain_community.embeddings.huggingface import HuggingFaceEmbeddings
4
+ from langchain_community.vectorstores import Chroma
5
+ from langchain_google_genai import ChatGoogleGenerativeAI
6
+ from langchain.prompts import ChatPromptTemplate
7
+ from langchain_core.output_parsers import StrOutputParser
8
+ from langchain_core.runnables import RunnablePassthrough
9
+ import streamlit as st
10
+
11
+ # Loading and spliting the document
12
+ pdf = PyPDFLoader("/content/quran-in-modern-english.pdf")
13
+ data = pdf.load()
14
+ rs = RecursiveCharacterTextSplitter(chunk_size=1313, chunk_overlap=200)
15
+ splits = rs.split_documents(data)
16
+
17
+ # Initializing the embedding model
18
+ em = HuggingFaceEmbeddings()
19
+
20
+ # Creating vector data base
21
+ vectordb = Chroma.from_documents(documents=splits, embedding=em)
22
+
23
+ # Initializizing the LLM
24
+ llm = ChatGoogleGenerativeAI(model="gemini-pro", temperature=0, google_api_key="AIzaSyBG8UJFmZnGq417gnWyoA-5mrTKBn1D1r0")
25
+
26
+ #prompt template
27
+ template = """
28
+ You are a helpful assistant to answer the queries. If you don't get an answer from the context, generate it by yourself. Use this context to answer the user's question.
29
+
30
+ Context: {context}
31
+
32
+ Question: {question}
33
+ """
34
+ prompt_template = ChatPromptTemplate.from_template(template)
35
+
36
+ #retriever
37
+ retriever = vectordb.as_retriever()
38
+
39
+ # Creating RAG chain
40
+ rag_chain = (
41
+ {"context": retriever, "question": RunnablePassthrough()}
42
+ | prompt_template
43
+ | llm
44
+ | StrOutputParser()
45
+ )
46
+
47
+ # Streamlit app
48
+ st.title("Quran Query Answering Bot")
49
+ st.write("Ask your query, you will get an answer from the context of the Quran.")
50
+
51
+ user_query = st.text_input("Enter your query:")
52
+
53
+ if user_query:
54
+ answer = rag_chain.invoke({"question": user_query})
55
+ st.write("Answer:", answer)