File size: 6,479 Bytes
6c88364 96a9f31 424048b 96a9f31 6c88364 424048b 6c88364 5de6565 6c88364 5de6565 6c88364 ad2a0d1 6c88364 5de6565 6c88364 5de6565 6c88364 5de6565 424048b 96a9f31 6c88364 424048b 6c88364 5de6565 6c88364 463f287 6c88364 424048b 96a9f31 6c88364 96a9f31 5de6565 6c88364 424048b 6c88364 96a9f31 6c88364 5de6565 6c88364 ad2a0d1 6c88364 5de6565 6c88364 5de6565 6c88364 5de6565 6c88364 5de6565 6c88364 5de6565 6c88364 5de6565 6c88364 5de6565 96a9f31 5de6565 424048b 5de6565 00dc7f1 96a9f31 6c88364 424048b 6c88364 5de6565 6c88364 4720e69 6c88364 5de6565 6c88364 5de6565 6c88364 424048b 6c88364 00dc7f1 6c88364 424048b 96a9f31 5de6565 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 |
# app.py
import os
import time
import gradio as gr
from openai import OpenAI
import google.generativeai as genai
# --- 1. CONFIGURATION & SECRETS ---
# Load secrets from environment variables
SECRET_PASSWORD = os.getenv("PWD")
OPENAI_API_KEY = os.getenv('API')
GOOGLE_API_KEY = os.getenv("API_GOOGLE")
# Constants for the app
SYSTEM_PROMPT = "You are a helpful AI assistant. Your answer should be as detailed as possible."
MODEL_CHOICES = ["gpt-4.1", "o4-mini", "gemini-2.5-pro"] # Using updated model names
# --- 2. INITIALIZE API CLIENTS ---
# Check if secrets are loaded correctly
if not SECRET_PASSWORD:
raise ValueError("Secret 'PWD' not found. Set it in the Hugging Face Space secrets.")
if not OPENAI_API_KEY:
raise ValueError("Secret 'API' (for OpenAI) not found. Set it in the Hugging Face Space secrets.")
if not GOOGLE_API_KEY:
print("Warning: 'API_GOOGLE' not found. The Gemini model will be unavailable.")
# Initialize OpenAI client
client = OpenAI(api_key=OPENAI_API_KEY)
# Configure Google Gemini client if the key is available
if GOOGLE_API_KEY:
try:
genai.configure(api_key=GOOGLE_API_KEY)
except Exception as e:
print(f"Could not configure Google AI client: {e}")
# --- 3. RESPONSIVE CSS STYLING (CORRECTED) ---
CSS = """
footer {visibility: hidden}
"""
# --- 4. CORE FUNCTIONS ---
def check_pw(pw):
"""Validates the password and shows/hides the main app."""
if pw == SECRET_PASSWORD:
return ("β
Welcome!", gr.update(visible=False), gr.update(visible=True))
else:
time.sleep(1) # Add a small delay to prevent brute-forcing
return ("π« Wrong password.", gr.update(visible=True), gr.update(visible=False))
def add_user_message(user_message, history):
"""Appends the user's message to the chat history."""
return "", history + [{"role": "user", "content": user_message}]
def generate_bot_response(history, model_name):
"""
Calls the appropriate LLM, streams the response back to the chatbot,
and handles potential errors gracefully.
"""
response_text = ask_llm(history, model_name)
history.append({"role": "assistant", "content": ""})
# Stream the response back
for char in response_text:
history[-1]["content"] += char
time.sleep(0.01)
yield history
def ask_llm(history, model_name):
"""Generic function to call the appropriate LLM API."""
try:
if model_name.startswith('gpt-') or model_name.startswith('o'):
messages_to_send = [{"role": "system", "content": SYSTEM_PROMPT}] + history
response = client.chat.completions.create(
model=model_name, messages=messages_to_send, stream=False
)
return response.choices[0].message.content
elif model_name.startswith('gemini-'):
if not GOOGLE_API_KEY:
raise ValueError("Google API Key is not configured.")
# Make sure there are messages to process for Gemini
if not history:
return "Please start the conversation."
gemini_model = genai.GenerativeModel(
model_name=model_name, system_instruction=SYSTEM_PROMPT
)
# Gemini requires an alternating user/model history
gemini_history = convert_history_to_gemini(history[:-1]) # History excluding the last user message
user_message = history[-1]["content"]
chat = gemini_model.start_chat(history=gemini_history)
response = chat.send_message(user_message)
return response.text
else:
raise ValueError(f"Unknown model provider for model: {model_name}")
except Exception as e:
print(f"Error calling API for model {model_name}: {e}")
return f"β **Error:** Could not get a response from `{model_name}`. Please check the API key and server logs.\n\n_Details: {e}_"
def convert_history_to_gemini(openai_history):
"""Converts OpenAI-formatted history to Gemini-formatted history."""
# Filter out any non-user/assistant messages if necessary
filtered_history = [msg for msg in openai_history if msg["role"] in ["user", "assistant"]]
return [
{"role": "model" if msg["role"] == "assistant" else "user", "parts": [msg["content"]]}
for msg in filtered_history
]
# --- 5. BUILD GRADIO APP ---
with gr.Blocks(css=CSS, title='Hello, Yoda.') as demo:
# Login Page - Initially Visible
with gr.Column(elem_id="login_page", visible=True) as login_page:
gr.Markdown("## π Please log in to access this App")
pw = gr.Textbox(label="Password", type="password", placeholder="Enter the password")
btn_login = gr.Button("Log in")
msg_login = gr.Textbox(value="", interactive=False, show_label=False, container=False)
# Main App - Initially Hidden
with gr.Column(elem_id="main_app", visible=False) as main_app:
gr.Markdown("<h1><center>Hello, Yoda.</center></h1>")
model_selector = gr.Radio(
choices=MODEL_CHOICES, value=MODEL_CHOICES[0], label="Choose a Model"
)
chatbot = gr.Chatbot(
elem_id="chatbot",
label="Chatbot",
bubble_full_width=False,
height=600, # Set a height for better layout control
type="messages"
)
with gr.Row(elem_id="input_row"):
msg = gr.Textbox(
elem_id="msg_textbox",
show_label=False,
placeholder="Ask me a question... (Shift+Enter for new line, Enter for send)",
scale=4,
lines=1, # Start with a single line
autofocus=True
)
submit_btn = gr.Button("Submit", scale=1, variant="primary")
clear_btn = gr.ClearButton([msg, chatbot])
# Event Listeners
btn_login.click(check_pw, pw, [msg_login, login_page, main_app])
pw.submit(check_pw, pw, [msg_login, login_page, main_app]) # Allow login on Enter key
msg.submit(add_user_message, [msg, chatbot], [msg, chatbot], queue=False).then(
generate_bot_response, [chatbot, model_selector], chatbot
)
submit_btn.click(add_user_message, [msg, chatbot], [msg, chatbot], queue=False).then(
generate_bot_response, [chatbot, model_selector], chatbot
)
# --- 6. LAUNCH APP ---
if __name__ == '__main__':
demo.launch() |