File size: 1,812 Bytes
b18f12a
 
 
 
 
 
 
 
 
 
 
aecc92b
b18f12a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
from langchain.document_loaders import PyPDFLoader
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain_community.embeddings.huggingface import HuggingFaceEmbeddings
from langchain_community.vectorstores import Chroma
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
from langchain_core.runnables import RunnablePassthrough
import streamlit as st

# Loading and spliting the document
pdf = PyPDFLoader("quran-in-modern-english.pdf")
data = pdf.load()
rs = RecursiveCharacterTextSplitter(chunk_size=1313, chunk_overlap=200)
splits = rs.split_documents(data)

# Initializing the embedding model
em = HuggingFaceEmbeddings()

# Creating vector data base
vectordb = Chroma.from_documents(documents=splits, embedding=em)

# Initializizing the LLM
llm = ChatGoogleGenerativeAI(model="gemini-pro", temperature=0, google_api_key="AIzaSyBG8UJFmZnGq417gnWyoA-5mrTKBn1D1r0")

#prompt template
template = """
You are a helpful assistant to answer the queries. If you don't get an answer from the context, generate it by yourself. Use this context to answer the user's question.

Context: {context}

Question: {question}
"""
prompt_template = ChatPromptTemplate.from_template(template)

#retriever
retriever = vectordb.as_retriever()

# Creating RAG chain
rag_chain = (
    {"context": retriever, "question": RunnablePassthrough()}
    | prompt_template
    | llm
    | StrOutputParser()
)

# Streamlit app
st.title("Quran Query Answering Bot")
st.write("Ask your query, you will get an answer from the context of the Quran.")

user_query = st.text_input("Enter your query:")

if user_query:
    answer = rag_chain.invoke({"question": user_query})
    st.write("Answer:", answer)