MathGenerator / app.py
JasonData's picture
Update app.py
1e7cb8f
raw history blame
No virus
5.24 kB
import openai
import gradio as gr
import os
STARTING_PROMPT = [{"role": "user", "content": """You are a math question generator. For each question, I will provide you with 4 things:
1. the main topic to be tested, 2. the types of question type, 3. the difficulty level, and 4. the required skillsets to solve the question.
You will then reply with appropriate math question as well as the step by step solution for the question. Reply in Four parts.
1. Question Information:
Topic(s) Tested: ...
Question Type: ...
Difficulty Level: ...
Skills required: ...
Case Study: True/False
2. Question: ....
3. Step by Step Solution: ...
4. Final answer(s): ..."""},
{"role": "assistant", "content": f"OK"}]
openai.api_key = os.environ['OPENAI']
def predict(input, msg_history=STARTING_PROMPT):
msg_history.append({"role": "user", "content": f"{input}"})
print(msg_history)
completion = openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=msg_history, temperature=0.8)
response = completion.choices[0].message.content
msg_history.append({"role": "assistant", "content": f"{response}"})
return [response, msg_history]
def prompt_builder_predict(questionType=None, difficulty=0, topic=None, prerequisites=None, caseStudy=False, additionalPrompt=None, msg_history=STARTING_PROMPT, latex=False):
level = ['Very Easy', 'Easy', 'Medium', 'Difficult', 'Extremely Difficult']
prompt = 'randomly generatate a math question '
if topic:
prompt = prompt + f'on the topic of {topic}. '
if difficulty:
prompt = prompt + f'The difficulty level of the question should be: {level[difficulty-1]}, which means that it must require at least {difficulty} steps to solve. '
if questionType:
prompt = prompt + f'The question type should be in {questionType} format. '
if prerequisites:
prompt = prompt + f"This question will require to use the following methods to solve: {' and '.join(prerequisites)}. "
if caseStudy:
prompt = prompt + 'This question must be in the form of case study where it tries to test the application of the topic in the real life scenario. '
if latex:
prompt = prompt + 'Display all mathematical equation parts of the question to LaTeX format. '
if additionalPrompt:
prompt = prompt + f"In addition, {additionalPrompt}."
return predict(prompt, msg_history)
with gr.Blocks() as demo:
msg_history = gr.State(STARTING_PROMPT)
gr.Markdown(
"""
# Math Question Generator
This webapp demostrates an API plugin that can be used with LearningANTs to generate questions. The response will contain three parts: [Question, Step by Step Solution, Final answer].
""")
with gr.Row():
questionType = gr.Radio(["MCQ", "True or False", "Short Response"], value='Short Response', label="Question Type")
difficulty = gr.Slider(1, 5, value=3, step=1, label="Difficult Level", info="Choose between 1 and 5")
with gr.Row():
topic = gr.Dropdown(["Simultaneous Equation", "Linear Equation", "Derivatives", "Integrals", "Optimization"], value='Simultaneous Equation', label="Main Testing Topic")
prerequisites = gr.Dropdown(["Elimination", "Subsitution", "Linear Equation", "Algebra", "Geometry", "Trigonometry", "Logarithms", "Power Rule", "Sum Rule", 'Difference Rule', "Product Rule", "Quotient Rule", 'Reciprocal Rule', "Chain Rule", "Implicit Differentiation", "Logarithmic Differentiation"], multiselect=True, interactive=True, label="Prerequisite Topics")
with gr.Row():
caseStudy = gr.Checkbox(label="Case Study", info="Does this question test the application of theory in real life scenarios?")
latex = gr.Checkbox(label="LaTeX", value=True, info="Display all equations in LaTeX format?")
additionalInfo = gr.Textbox(label="Additional information (prompt)", placeholder="Give a scenario where Jim and John are working in a garden....")
gen_btn = gr.Button("Generate A New Question")
with gr.Row():
question = gr.TextArea(label="Generated Question")
gen_btn.click(fn=prompt_builder_predict, inputs = [questionType, difficulty, topic, prerequisites, caseStudy, additionalInfo, msg_history, latex], outputs= [question, msg_history])
with gr.Row():
prompt = gr.Textbox(label='Additional Prompt', info='Not satified with the result? Enter instructions to modify the question.', placeholder='Include the case study of....', visible=False)
with gr.Row():
modify_btn = gr.Button('Modify Question', visible=False)
modify_btn.click(fn=predict, inputs = [prompt, msg_history], outputs= [question, msg_history])
# restart_btn = gr.Button("Generate Another Question", visible=False)
def show_display():
return gr.update(visible=True)
def hide_display():
return gr.update(visible=False)
def clear_value():
return gr.update(value='')
question.change(fn=show_display, outputs=prompt)
question.change(fn=show_display, outputs=modify_btn)
demo.launch( share=False)