File size: 1,491 Bytes
0d09775
 
cfd5951
0d09775
 
 
 
 
 
 
ff6158d
0d09775
 
 
 
ff6158d
0d09775
 
 
 
 
 
 
 
ff6158d
0d09775
 
 
 
ff6158d
0d09775
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
from langchain_community.llms import OpenAI
from langchain_google_genai import ChatGoogleGenerativeAI
import streamlit as st

def GetLLMResponse(selected_topic_level, selected_topic,num_quizzes, model):
    question_prompt = ('I want you to just generate question with this specification: Generate a {selected_topic_level} math quiz on the topic of {selected_topic}. Generate only {num_quizzes} questions not more and without providing answers.')


    if model == "Open AI":
        llm = OpenAI(temperature=0.7, openai_api_key=st.secrets["OPENAI_API_KEY"])
        questions = [llm(question_prompt)]
        # return questions
        
    elif model == "Gemini":
        llm = ChatGoogleGenerativeAI(model="gemini-pro", google_api_key=st.secrets["GOOGLE_API_KEY"])
        questions = [llm.invoke(question_prompt)]
        # return questions.content


    answer_prompt = ( "I want you to become a teacher answer this specific Question:\n {questions}\n\n. You should gave me a straightforward and consise explanation and answer to each one of them")

    
    if model == "Open AI":
        llm = OpenAI(temperature=0.7, openai_api_key=st.secrets["OPENAI_API_KEY"])
        answers = [llm(answer_prompt)]
        # return questions
        
    elif model == "Gemini":
        llm = ChatGoogleGenerativeAI(model="gemini-pro", google_api_key=st.secrets["GOOGLE_API_KEY"])
        answers = [llm.invoke(answer_prompt)]
        # return questions.content


    return(questions,answers)