realambuj's picture
Upload 6 files
1ffc5c5 verified
raw
history blame
No virus
1.63 kB
from langchain.chains.question_answering import load_qa_chain
from langchain.prompts import PromptTemplate
import json
from prompts import *
# from langchain.text_splitter import RecursiveCharacterTextSplitter
# from PyPDF2 import PdfReader
# from langchain_google_genai import GoogleGenerativeAIEmbeddings
# from langchain.vectorstores import FAISS
# import PyPDF2
# utils function to generate QA Pairs
def util(context, numPairs, inputPrompt,model):
stuff_chain = load_qa_chain(model, chain_type="stuff", prompt=inputPrompt)
stuff_answer = stuff_chain(
{"input_documents": context, "numPairs": numPairs}, return_only_outputs=True
)
output_text = stuff_answer['output_text']
output_json = json.loads(output_text)
return output_json
### Generating Q-A pairs Full Length QA Pairs
def getLongQAPairs(context, numPairs,model):
prompt_template = getLongQAPrompt()
prompt = PromptTemplate(
template=prompt_template, input_variables=["context", "numPairs"]
)
return util(context, numPairs, prompt,model)
### Generating Q-A pairs - One Word Answer Type Pair
def getShortQAPairs(context, numPairs,model):
prompt_template = getShortQAPrompt()
prompt = PromptTemplate(
template=prompt_template, input_variables=["context", "numPairs"]
)
return util(context, numPairs, prompt,model)
### Generating Q-A pairs - MCQ
def getMcqQAPairs(context, numPairs,model):
prompt_template = getMcqQAPrompt()
prompt = PromptTemplate(
template=prompt_template, input_variables=["context", "numPairs"]
)
return util(context, numPairs, prompt,model)