File size: 1,436 Bytes
9eac11a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cb99d36
 
 
e758bfd
 
cb99d36
 
 
9eac11a
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
import gradio as gr
import requests

# Define a function to send the input to the FastAPI endpoint and get the response
def get_response(query):
    try:
        # Send the query to the FastAPI endpoint
        response = requests.post("http://164.52.213.121:8000/chat", data=query)
        # Check if the request was successful
        response.raise_for_status()
        # Extract the text from the response JSON
        return response.json().get("response", "No response from server.")
    except requests.exceptions.RequestException as e:
        return f"Error: {e}"

# Define the Gradio interface
iface = gr.Interface(
    fn=get_response,
    inputs=gr.Textbox(lines=2, placeholder="Enter your query here..."),
    outputs="text",
    title="Nidum.ai",
    description=(
        "Enter a query and get a response from the model.\n\n"
        "**Note:** This is just a basic demo. It's a quantized version, so the output may be slightly different from the original unquantized version.\n\n"
        "**Important:** This model is running in a decentralized network, which means it's distributed across various nodes rather than being hosted on a single central server. "
        "Additionally, this model doesn't have context memory, which means it doesn't remember previous interactions. "
        "For business needs, please contact info@nidum.ai."
    )
)

# Launch the Gradio app
if __name__ == "__main__":
    iface.launch()