import google.generativeai as genai import streamlit as st from dotenv import load_dotenv import os @st.cache_resource def get_llm(): load_dotenv() api_key = os.environ.get('API_KEY') genai.configure(api_key=api_key) model = genai.GenerativeModel( 'gemini-pro', generation_config={"temperature": temperature}) return model st.title("Medium.com Blog Generator 🧑🏻‍💻!!!") def get_genai_response(prompt, question): response = model.generate_content([prompt, question]) # print(response.text) return response.text prompt = """ Imagine you're a friendly expert at a local library. Tailor your explanation to a general audience who might have some curiosity about the topic but no prior in-depth knowledge. Make the information engaging and informative, and keep in mind the best practices for writing on Medium.com""" topic = st.text_input("Topic") creativity_arr = ["High Accuracy", "Balanced Accuracy & Creativity", "High Creativity "] creativity = st.selectbox("Choose creativity level:", creativity_arr) if creativity == "High Accuracy": temperature = 0.1 elif creativity == "Balanced Accuracy & Creativity": temperature = 0.5 else: temperature = 0.8 submit_button = st.button("Submit") model = get_llm() if submit_button: st.header("Response") with st.spinner(text="Fetching response from LLM..."): text = get_genai_response( prompt, topic) st.write(text)