Chat_Bott / app.py
Guhanselvam's picture
Update app.py
0b6b23c verified
import gradio as gr
import requests
# Define the FastAPI API URL
API_URL = "http://0.0.0.0:8000/llm_api" # Ensure this matches your FastAPI server URL
def query_api(question):
"""
Function to send a question to the FastAPI backend and return the response.
"""
try:
# Send the question to the API
response = requests.post(API_URL, json={"question": question})
# Check response status and return the appropriate response
if response.status_code == 200:
return response.json().get("response", "No response from the model.")
else:
return f"Error: Unable to fetch response. Status Code: {response.status_code}"
except requests.exceptions.RequestException as e:
return f"Request failed: {e}"
# Create the Gradio interface
iface = gr.Interface(
fn=query_api,
inputs=gr.Textbox(label="Ask a Question"),
outputs="text",
title="Chatbot Interface",
description="Ask any question and get responses from the LLaMA model."
)
# Launch the Gradio interface
if __name__ == "__main__":
try:
iface.launch()
except KeyboardInterrupt:
print("Gradio interface stopped manually.")
except Exception as e:
print(f"An error occurred: {e}")