nyxion-rag / app.py
nyxionlabs's picture
Update app.py
d32e99b verified
import os
import gradio as gr
import google.generativeai as genai
# Gemini setup
GOOGLE_API_KEY = 'AIzaSyD17tAA1QiJlGrzQXgjKc4OShU_sF2_UAI'
if not GOOGLE_API_KEY:
raise ValueError("GOOGLE_API_KEY must be set in Space Settings!")
genai.configure(api_key=GOOGLE_API_KEY)
# Try different model names that actually work
GEMINI_MODEL = os.getenv("GEMINI_MODEL", "gemini-2.0-flash")
print(f"[Q&A] Using Gemini model: {GEMINI_MODEL}")
# List available models for debugging
try:
print("[Q&A] Available models:")
for m in genai.list_models():
if 'generateContent' in m.supported_generation_methods:
print(f" - {m.name}")
except Exception as e:
print(f"[Q&A] Could not list models: {e}")
model = genai.GenerativeModel(GEMINI_MODEL)
def answer_question(question: str, temperature: float = 0.7):
"""Generate an answer using Gemini"""
if not question.strip():
return "Please enter a question.", {"status": "idle"}
try:
print(f"[Q&A] Processing question with temperature={temperature}")
response = model.generate_content(
question,
generation_config=genai.GenerationConfig(
temperature=temperature,
max_output_tokens=2048,
)
)
answer = response.text
print("[Q&A] Response received successfully")
return answer, {
"status": "success",
"model": GEMINI_MODEL,
"temperature": temperature
}
except Exception as e:
error_msg = f"Error: {str(e)}\n\nTry one of these models in Space Settings:\n- gemini-1.5-flash-latest\n- gemini-1.5-pro-latest\n- models/gemini-1.5-flash\n- models/gemini-1.5-pro"
print(f"[Q&A] Error: {e}")
return error_msg, {"status": "error", "error": str(e)}
# UI
with gr.Blocks(theme=gr.themes.Soft(), title="Nyxion Labs Q&A") as demo:
gr.Markdown("""
# Nyxion Labs · AI Q&A
Ask any question and get AI-generated answers.
Powered by Google Gemini.
""")
with gr.Row():
with gr.Column(scale=4):
question = gr.Textbox(
label="Ask a question",
placeholder="e.g., What is the capital of Pakistan?",
lines=3
)
with gr.Column(scale=1):
temperature = gr.Slider(
0, 1,
value=0.7,
step=0.1,
label="Temperature",
info="Higher = more creative"
)
btn = gr.Button("Ask", variant="primary", size="lg")
answer = gr.Markdown(label="Answer")
meta = gr.JSON(label="System Status")
btn.click(answer_question, [question, temperature], [answer, meta])
question.submit(answer_question, [question, temperature], [answer, meta])
# Show startup status
demo.load(
lambda: {"status": "ready", "model": GEMINI_MODEL},
inputs=None,
outputs=meta
)
if __name__ == "__main__":
demo.launch()