AnishaG0201 commited on
Commit
0048548
1 Parent(s): d6832b2

Update function.py

Browse files
Files changed (1) hide show
  1. function.py +5 -2
function.py CHANGED
@@ -5,6 +5,7 @@ import streamlit as st
5
 
6
 
7
  def get_answers(questions,model):
 
8
 
9
 
10
  answer_prompt = ( "I want you to become a teacher answer this specific Question: {questions}. You should gave me a straightforward and consise explanation and answer to each one of them")
@@ -29,7 +30,7 @@ def get_answers(questions,model):
29
  def GetLLMResponse(selected_topic_level, selected_topic,num_quizzes, model):
30
  question_prompt = ('I want you to just generate question with this specification: Generate a {selected_topic_level} math quiz on the topic of {selected_topic}. Generate only {num_quizzes} questions not more and without providing answers.')
31
 
32
-
33
  if model == "Open AI":
34
  llm = OpenAI(temperature=0.7, openai_api_key=st.secrets["OPENAI_API_KEY"])
35
  questions = llm(question_prompt)
@@ -41,8 +42,10 @@ def GetLLMResponse(selected_topic_level, selected_topic,num_quizzes, model):
41
  questions = questions.content
42
  # return questions.content
43
 
 
 
44
  answers = get_answers(questions,model)
45
 
46
-
47
  return(questions,answers)
48
 
 
5
 
6
 
7
  def get_answers(questions,model):
8
+ st.write("running get answers function answering following questions",questions)
9
 
10
 
11
  answer_prompt = ( "I want you to become a teacher answer this specific Question: {questions}. You should gave me a straightforward and consise explanation and answer to each one of them")
 
30
  def GetLLMResponse(selected_topic_level, selected_topic,num_quizzes, model):
31
  question_prompt = ('I want you to just generate question with this specification: Generate a {selected_topic_level} math quiz on the topic of {selected_topic}. Generate only {num_quizzes} questions not more and without providing answers.')
32
 
33
+ st.write("running get llm response and print question prompt",question_prompt)
34
  if model == "Open AI":
35
  llm = OpenAI(temperature=0.7, openai_api_key=st.secrets["OPENAI_API_KEY"])
36
  questions = llm(question_prompt)
 
42
  questions = questions.content
43
  # return questions.content
44
 
45
+
46
+ st.write("print questions",questions)
47
  answers = get_answers(questions,model)
48
 
49
+ st.write(questions,answers)
50
  return(questions,answers)
51