AnishaG0201 commited on
Commit
a1b20d6
1 Parent(s): 221a124

Update function.py

Browse files
Files changed (1) hide show
  1. function.py +19 -8
function.py CHANGED
@@ -2,34 +2,45 @@ from langchain_community.llms import OpenAI
2
  from langchain_google_genai import ChatGoogleGenerativeAI
3
  import streamlit as st
4
 
5
- def GetLLMResponse(selected_topic_level, selected_topic,num_quizzes, model):
6
- question_prompt = ('I want you to just generate question with this specification: Generate a {selected_topic_level} math quiz on the topic of {selected_topic}. Generate only {num_quizzes} questions not more and without providing answers.')
7
 
8
 
 
 
 
 
 
 
9
  if model == "Open AI":
10
  llm = OpenAI(temperature=0.7, openai_api_key=st.secrets["OPENAI_API_KEY"])
11
- questions = [llm(question_prompt)]
12
  # return questions
13
 
14
  elif model == "Gemini":
15
  llm = ChatGoogleGenerativeAI(model="gemini-pro", google_api_key=st.secrets["GOOGLE_API_KEY"])
16
- questions = [llm.invoke(question_prompt)]
17
  # return questions.content
18
 
 
19
 
20
- answer_prompt = ( "I want you to become a teacher answer this specific Question:\n {questions}\n\n. You should gave me a straightforward and consise explanation and answer to each one of them")
21
 
 
 
 
 
22
 
 
23
  if model == "Open AI":
24
  llm = OpenAI(temperature=0.7, openai_api_key=st.secrets["OPENAI_API_KEY"])
25
- answers = [llm(answer_prompt)]
26
- # return questions
27
 
28
  elif model == "Gemini":
29
  llm = ChatGoogleGenerativeAI(model="gemini-pro", google_api_key=st.secrets["GOOGLE_API_KEY"])
30
- answers = [llm.invoke(answer_prompt)]
31
  # return questions.content
32
 
 
 
33
 
34
  return(questions,answers)
35
 
 
2
  from langchain_google_genai import ChatGoogleGenerativeAI
3
  import streamlit as st
4
 
 
 
5
 
6
 
7
+ def get_answers(questions,model):
8
+
9
+
10
+ answer_prompt = ( "I want you to become a teacher answer this specific Question:\n {questions}\n\n. You should gave me a straightforward and consise explanation and answer to each one of them")
11
+
12
+
13
  if model == "Open AI":
14
  llm = OpenAI(temperature=0.7, openai_api_key=st.secrets["OPENAI_API_KEY"])
15
+ answers = [llm(answer_prompt)]
16
  # return questions
17
 
18
  elif model == "Gemini":
19
  llm = ChatGoogleGenerativeAI(model="gemini-pro", google_api_key=st.secrets["GOOGLE_API_KEY"])
20
+ answers = (llm.invoke(answer_prompt)).content
21
  # return questions.content
22
 
23
+ return(answers)
24
 
 
25
 
26
+
27
+
28
+ def GetLLMResponse(selected_topic_level, selected_topic,num_quizzes, model):
29
+ question_prompt = ('I want you to just generate question with this specification: Generate a {selected_topic_level} math quiz on the topic of {selected_topic}. Generate only {num_quizzes} questions not more and without providing answers.')
30
 
31
+
32
  if model == "Open AI":
33
  llm = OpenAI(temperature=0.7, openai_api_key=st.secrets["OPENAI_API_KEY"])
34
+ questions = [llm(question_prompt)]
35
+
36
 
37
  elif model == "Gemini":
38
  llm = ChatGoogleGenerativeAI(model="gemini-pro", google_api_key=st.secrets["GOOGLE_API_KEY"])
39
+ questions = (llm.invoke(question_prompt)).content
40
  # return questions.content
41
 
42
+ answers = get_answers(questions,model)
43
+
44
 
45
  return(questions,answers)
46