assist / app.py
dkwak's picture
Update app.py
ad2a0d1 verified
# app.py
import os
import time
import gradio as gr
from openai import OpenAI
import google.generativeai as genai
# --- 1. CONFIGURATION & SECRETS ---
# Load secrets from environment variables
SECRET_PASSWORD = os.getenv("PWD")
OPENAI_API_KEY = os.getenv('API')
GOOGLE_API_KEY = os.getenv("API_GOOGLE")
# Constants for the app
SYSTEM_PROMPT = "You are a helpful AI assistant. Your answer should be as detailed as possible."
MODEL_CHOICES = ["gpt-4.1", "o4-mini", "gemini-2.5-pro"] # Using updated model names
# --- 2. INITIALIZE API CLIENTS ---
# Check if secrets are loaded correctly
if not SECRET_PASSWORD:
raise ValueError("Secret 'PWD' not found. Set it in the Hugging Face Space secrets.")
if not OPENAI_API_KEY:
raise ValueError("Secret 'API' (for OpenAI) not found. Set it in the Hugging Face Space secrets.")
if not GOOGLE_API_KEY:
print("Warning: 'API_GOOGLE' not found. The Gemini model will be unavailable.")
# Initialize OpenAI client
client = OpenAI(api_key=OPENAI_API_KEY)
# Configure Google Gemini client if the key is available
if GOOGLE_API_KEY:
try:
genai.configure(api_key=GOOGLE_API_KEY)
except Exception as e:
print(f"Could not configure Google AI client: {e}")
# --- 3. RESPONSIVE CSS STYLING (CORRECTED) ---
CSS = """
footer {visibility: hidden}
"""
# --- 4. CORE FUNCTIONS ---
def check_pw(pw):
"""Validates the password and shows/hides the main app."""
if pw == SECRET_PASSWORD:
return ("βœ… Welcome!", gr.update(visible=False), gr.update(visible=True))
else:
time.sleep(1) # Add a small delay to prevent brute-forcing
return ("🚫 Wrong password.", gr.update(visible=True), gr.update(visible=False))
def add_user_message(user_message, history):
"""Appends the user's message to the chat history."""
return "", history + [{"role": "user", "content": user_message}]
def generate_bot_response(history, model_name):
"""
Calls the appropriate LLM, streams the response back to the chatbot,
and handles potential errors gracefully.
"""
response_text = ask_llm(history, model_name)
history.append({"role": "assistant", "content": ""})
# Stream the response back
for char in response_text:
history[-1]["content"] += char
time.sleep(0.01)
yield history
def ask_llm(history, model_name):
"""Generic function to call the appropriate LLM API."""
try:
if model_name.startswith('gpt-') or model_name.startswith('o'):
messages_to_send = [{"role": "system", "content": SYSTEM_PROMPT}] + history
response = client.chat.completions.create(
model=model_name, messages=messages_to_send, stream=False
)
return response.choices[0].message.content
elif model_name.startswith('gemini-'):
if not GOOGLE_API_KEY:
raise ValueError("Google API Key is not configured.")
# Make sure there are messages to process for Gemini
if not history:
return "Please start the conversation."
gemini_model = genai.GenerativeModel(
model_name=model_name, system_instruction=SYSTEM_PROMPT
)
# Gemini requires an alternating user/model history
gemini_history = convert_history_to_gemini(history[:-1]) # History excluding the last user message
user_message = history[-1]["content"]
chat = gemini_model.start_chat(history=gemini_history)
response = chat.send_message(user_message)
return response.text
else:
raise ValueError(f"Unknown model provider for model: {model_name}")
except Exception as e:
print(f"Error calling API for model {model_name}: {e}")
return f"❌ **Error:** Could not get a response from `{model_name}`. Please check the API key and server logs.\n\n_Details: {e}_"
def convert_history_to_gemini(openai_history):
"""Converts OpenAI-formatted history to Gemini-formatted history."""
# Filter out any non-user/assistant messages if necessary
filtered_history = [msg for msg in openai_history if msg["role"] in ["user", "assistant"]]
return [
{"role": "model" if msg["role"] == "assistant" else "user", "parts": [msg["content"]]}
for msg in filtered_history
]
# --- 5. BUILD GRADIO APP ---
with gr.Blocks(css=CSS, title='Hello, Yoda.') as demo:
# Login Page - Initially Visible
with gr.Column(elem_id="login_page", visible=True) as login_page:
gr.Markdown("## πŸ”’ Please log in to access this App")
pw = gr.Textbox(label="Password", type="password", placeholder="Enter the password")
btn_login = gr.Button("Log in")
msg_login = gr.Textbox(value="", interactive=False, show_label=False, container=False)
# Main App - Initially Hidden
with gr.Column(elem_id="main_app", visible=False) as main_app:
gr.Markdown("<h1><center>Hello, Yoda.</center></h1>")
model_selector = gr.Radio(
choices=MODEL_CHOICES, value=MODEL_CHOICES[0], label="Choose a Model"
)
chatbot = gr.Chatbot(
elem_id="chatbot",
label="Chatbot",
bubble_full_width=False,
height=600, # Set a height for better layout control
type="messages"
)
with gr.Row(elem_id="input_row"):
msg = gr.Textbox(
elem_id="msg_textbox",
show_label=False,
placeholder="Ask me a question... (Shift+Enter for new line, Enter for send)",
scale=4,
lines=1, # Start with a single line
autofocus=True
)
submit_btn = gr.Button("Submit", scale=1, variant="primary")
clear_btn = gr.ClearButton([msg, chatbot])
# Event Listeners
btn_login.click(check_pw, pw, [msg_login, login_page, main_app])
pw.submit(check_pw, pw, [msg_login, login_page, main_app]) # Allow login on Enter key
msg.submit(add_user_message, [msg, chatbot], [msg, chatbot], queue=False).then(
generate_bot_response, [chatbot, model_selector], chatbot
)
submit_btn.click(add_user_message, [msg, chatbot], [msg, chatbot], queue=False).then(
generate_bot_response, [chatbot, model_selector], chatbot
)
# --- 6. LAUNCH APP ---
if __name__ == '__main__':
demo.launch()