import streamlit as st #Create graphical elements for the app from streamlit_chat import message from streamlit_extras.colored_header import colored_header from streamlit_extras.add_vertical_space import add_vertical_space from langchain import PromptTemplate, HuggingFaceHub, LLMChain from dotenv import load_dotenv #Loads environment variables pip install streamlit-chat # load the Environment Variables. load_dotenv() st.set_page_config(page_title="OpenAssistant Powered Quiz app") #sets Title bar # Sidebar contents with st.sidebar: st.title('🤗💬 HuggingQuiz App') st.markdown(''' ## About This app is an LLM-powered chatbot built using: - [Streamlit](https://streamlit.io/) - [LangChain](https://python.langchain.com/) - [OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5](https://huggingface.co/OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5) LLM model ''') add_vertical_space(3) st.write('Made with ❤️ by [ninTea](https://github.com/NandiniTandon21)') def main(): st.title("Quiz App") st.write("This app generates a quiz based on a given context.") def chain_setup(): """Create the prompt template for the quiz app.""" template = """<|im_end|> You are an expert quiz maker for technical fields. Let's think step by step and create a quiz with {num_questions} {quiz_type} questions about the following concept/content: {quiz_context}. The format of the quiz could be one of the following: - Multiple-choice: - Questions: : , , , : , , , .... - Answers: : : .... Example: - Questions: - 1. What is the time complexity of a binary search tree? a. O(n) b. O(log n) c. O(n^2) d. O(1) - Answers: 1. b - True-false: - Questions: : : ..... - Answers: : : ..... Example: - Questions: - 1. What is a binary search tree? - 2. How are binary search trees implemented? - Answers: - 1. True - 2. False - Open-ended: - Questions: : : - Answers: : : Example: Questions: - 1. What is a binary search tree? - 2. How are binary search trees implemented? - Answers: 1. A binary search tree is a data structure that is used to store data in a sorted manner. 2. Binary search trees are implemented using linked lists. <|im_end|>""" prompt = PromptTemplate(template=template, input_variables=["question"]) prompt.format(num_questions=3, quiz_type="multiple-choice", quiz_context="Data Structures in Python Programming") return prompt llm=HuggingFaceHub(repo_id="OpenAssistant/llama2-70b-oasst-sft-v10", model_kwargs={"max_new_tokens":1200}) llm_chain=LLMChain( llm=llm, prompt=prompt ) return llm_chain # generate response def generate_response(question, llm_chain): response = llm_chain.run(question) return response ## load LLM llm_chain = chain_setup() def split_questions_answers(quiz_response): """Function that splits the questions and answers from the quiz response.""" questions = quiz_response.split("Answers:")[0] answers = quiz_response.split("Answers:")[1] return questions, answers context = st.text_area("Enter the concept/context for the quiz") num_questions = st.number_input("Enter the number of questions",min_value=1,max_value=10,value=3) quiz_type = st.selectbox("Select the quiz type",["multiple-choice","true-false", "open-ended"]) if st.button("Generate Quiz"): quiz_response = llm_chain.invoke({"quiz_type":quiz_type,"num_questions":num_questions,"quiz_context":context}) st.write("Quiz Generated!") questions,answers = split_questions_answers(quiz_response) st.session_state.answers = answers st.session_state.questions = questions st.write(questions) if st.button("Show Answers"): st.markdown(st.session_state.questions) st.write("----") st.markdown(st.session_state.answers) if __name__=="__main__": main()