rogkesavan's picture
Update app.py
cb99d36 verified
raw
history blame
1.25 kB
import gradio as gr
import requests
# Define a function to send the input to the FastAPI endpoint and get the response
def get_response(query):
try:
# Send the query to the FastAPI endpoint
response = requests.post("http://164.52.213.121:8000/chat", data=query)
# Check if the request was successful
response.raise_for_status()
# Extract the text from the response JSON
return response.json().get("response", "No response from server.")
except requests.exceptions.RequestException as e:
return f"Error: {e}"
# Define the Gradio interface
iface = gr.Interface(
fn=get_response,
inputs=gr.Textbox(lines=2, placeholder="Enter your query here..."),
outputs="text",
title="Nidum.ai",
description=(
"Enter a query and get a response from the model.\n\n"
"**Note:** This is just a basic demo. It's a quantized version, so the output may be slightly different from the original unquantized version. "
"Additionally, this model doesn't have context memory, which means it doesn't remember previous interactions. "
"For business needs, please contact info@nidum.ai."
)
)
# Launch the Gradio app
if __name__ == "__main__":
iface.launch()