Spaces:
Sleeping
Sleeping
File size: 1,608 Bytes
570faf9 4ddaad2 570faf9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 |
import nest_asyncio
# from llama_parse import LlamaParse
from langchain_text_splitters import RecursiveCharacterTextSplitter
from langchain_community.vectorstores import Chroma
from langchain_community.embeddings.huggingface import HuggingFaceEmbeddings
from langchain_groq import ChatGroq
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
from langchain_core.runnables import RunnablePassthrough
import pickle
class tafsir:
def __init__(self):
pkl_docs=pickle.load(open("docs.pkl","rb"))
self.store=Chroma.from_documents(documents=pkl_docs,embedding=HuggingFaceEmbeddings(model_name="BAAI/bge-base-en-v1.5"))
self.retreiver=self.store.as_retriever()
llm=ChatGroq(api_key="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",model="mixtral-8x7b-32768")
rag_template = """
Provide a summary from the context, which contains interpretations of Quranic Texts that highlight the importance of the topic mentioned in the question. Do not include the Quranic Texts themselves, but mention which Surah and verse.
Context:
{context}
Question:
{question}
"""
rag_prompt=ChatPromptTemplate.from_template(rag_template)
self.rag_chain=(
{"context":self.retreiver,"question":RunnablePassthrough()}
| rag_prompt
| llm
| StrOutputParser()
)
def return_tafsir(self,topic):
response=self.rag_chain.invoke(topic)
# print(response)
return response
|