File size: 5,629 Bytes
1280ff1 79a310b 1280ff1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 |
import streamlit as st #Create graphical elements for the app
from streamlit_chat import message
from streamlit_extras.colored_header import colored_header
from streamlit_extras.add_vertical_space import add_vertical_space
from langchain import PromptTemplate, HuggingFaceHub, LLMChain
from dotenv import load_dotenv #Loads environment variables
pip install streamlit-chat
# load the Environment Variables.
load_dotenv()
st.set_page_config(page_title="OpenAssistant Powered Quiz app") #sets Title bar
# Sidebar contents
with st.sidebar:
st.title('🤗💬 HuggingQuiz App')
st.markdown('''
## About
This app is an LLM-powered chatbot built using:
- [Streamlit](https://streamlit.io/)
- [LangChain](https://python.langchain.com/)
- [OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5](https://huggingface.co/OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5) LLM model
''')
add_vertical_space(3)
st.write('Made with ❤️ by [ninTea](https://github.com/NandiniTandon21)')
def main():
st.title("Quiz App")
st.write("This app generates a quiz based on a given context.")
def chain_setup():
"""Create the prompt template for the quiz app."""
template = """<|im_end|>
You are an expert quiz maker for technical fields. Let's think step by step and
create a quiz with {num_questions} {quiz_type} questions about the following concept/content: {quiz_context}.
The format of the quiz could be one of the following:
- Multiple-choice:
- Questions:
<Question1>: <a. Answer 1>, <b. Answer 2>, <c. Answer 3>, <d. Answer 4>
<Question2>: <a. Answer 1>, <b. Answer 2>, <c. Answer 3>, <d. Answer 4>
....
- Answers:
<Answer1>: <a|b|c|d>
<Answer2>: <a|b|c|d>
....
Example:
- Questions:
- 1. What is the time complexity of a binary search tree?
a. O(n)
b. O(log n)
c. O(n^2)
d. O(1)
- Answers:
1. b
- True-false:
- Questions:
<Question1>: <True|False>
<Question2>: <True|False>
.....
- Answers:
<Answer1>: <True|False>
<Answer2>: <True|False>
.....
Example:
- Questions:
- 1. What is a binary search tree?
- 2. How are binary search trees implemented?
- Answers:
- 1. True
- 2. False
- Open-ended:
- Questions:
<Question1>:
<Question2>:
- Answers:
<Answer1>:
<Answer2>:
Example:
Questions:
- 1. What is a binary search tree?
- 2. How are binary search trees implemented?
- Answers:
1. A binary search tree is a data structure that is used to store data in a sorted manner.
2. Binary search trees are implemented using linked lists.
<|im_end|>"""
prompt = PromptTemplate(template=template, input_variables=["question"])
prompt.format(num_questions=3, quiz_type="multiple-choice", quiz_context="Data Structures in Python Programming")
return prompt
llm=HuggingFaceHub(repo_id="OpenAssistant/llama2-70b-oasst-sft-v10", model_kwargs={"max_new_tokens":1200})
llm_chain=LLMChain(
llm=llm,
prompt=prompt
)
return llm_chain
# generate response
def generate_response(question, llm_chain):
response = llm_chain.run(question)
return response
## load LLM
llm_chain = chain_setup()
def split_questions_answers(quiz_response):
"""Function that splits the questions and answers from the quiz response."""
questions = quiz_response.split("Answers:")[0]
answers = quiz_response.split("Answers:")[1]
return questions, answers
context = st.text_area("Enter the concept/context for the quiz")
num_questions = st.number_input("Enter the number of questions",min_value=1,max_value=10,value=3)
quiz_type = st.selectbox("Select the quiz type",["multiple-choice","true-false", "open-ended"])
if st.button("Generate Quiz"):
quiz_response = llm_chain.invoke({"quiz_type":quiz_type,"num_questions":num_questions,"quiz_context":context})
st.write("Quiz Generated!")
questions,answers = split_questions_answers(quiz_response)
st.session_state.answers = answers
st.session_state.questions = questions
st.write(questions)
if st.button("Show Answers"):
st.markdown(st.session_state.questions)
st.write("----")
st.markdown(st.session_state.answers)
if __name__=="__main__":
main() |