QandA_chatbot / app.py
Pranav0111's picture
Update app.py
211406e verified
import gradio as gr
import os
import requests # For HTTP requests to Gemini API
# Clean the API key by stripping any extra whitespace or newline characters
gemini_api_key = os.getenv("GEMINI_API_KEY").strip()
# This function will handle the conversation
def chat_with_bot(user_input, history=None):
if history is None:
history = [] # Initialize history if it's None
# Append the user's question to the conversation history
history.append({"role": "user", "content": user_input})
try:
# Send the conversation history to the Google Gemini API for processing
response = requests.post(
"https://api.gemini.google.com/v1/chat/completions", # Replace with actual Gemini API endpoint
headers={"Authorization": f"Bearer {gemini_api_key}"},
json={
"model": "gemini-1", # Replace with the correct Gemini model identifier
"messages": history,
"temperature": 0.7,
"max_tokens": 150,
"top_p": 1,
"frequency_penalty": 0,
"presence_penalty": 0,
},
)
# Print out the response status code and body for debugging
print("Status Code:", response.status_code)
print("Response Text:", response.text)
# Try to parse the response JSON
try:
response_data = response.json() # This will throw an error if the response isn't valid JSON
except ValueError:
return f"Error: Invalid JSON response from Gemini API. Response text: {response.text}", history
if response.status_code == 200:
# Extract the response text
bot_response = response_data['choices'][0]['message']['content']
# Append the bot's response to the conversation history
history.append({"role": "assistant", "content": bot_response})
# Return both the new bot response and the updated conversation history
return bot_response, history
else:
# Handle errors if status code isn't 200
return f"Error: {response_data.get('error', {}).get('message', 'Unknown error')}", history
except Exception as e:
# If there's an error with the API request, return an error message
return f"Error: {str(e)}", history
# Define the Gradio interface with a Submit button
chatbot_interface = gr.Interface(
fn=chat_with_bot,
inputs=[
gr.Textbox(label="Ask me anything", placeholder="Type your question here..."),
gr.State() # The history state input
],
outputs=[
gr.Textbox(label="Bot Response", interactive=False), # Output is non-interactive
gr.State() # The history state output
],
live=False, # Disable live mode so the user submits after typing
title="AI-Powered Multi-Step Q&A Chatbot",
description="This chatbot can handle multi-step conversations. Ask any question and have a conversation!",
flagging_mode="never" # Use the new parameter to disable flagging
)
# Launch the app
chatbot_interface.launch() # No need for share=True in Hugging Face Spaces