|
import gradio as gr |
|
import requests |
|
|
|
|
|
def get_response(query): |
|
try: |
|
|
|
response = requests.post("http://164.52.213.121:8000/chat", data=query) |
|
|
|
response.raise_for_status() |
|
|
|
return response.json().get("response", "No response from server.") |
|
except requests.exceptions.RequestException as e: |
|
return f"Error: {e}" |
|
|
|
|
|
iface = gr.Interface( |
|
fn=get_response, |
|
inputs=gr.Textbox(lines=2, placeholder="Enter your query here..."), |
|
outputs="text", |
|
title="Nidum.ai", |
|
description=( |
|
"Enter a query and get a response from the model.\n\n" |
|
"**Note:** This is just a basic demo. It's a quantized version, so the output may be slightly different from the original unquantized version.\n\n" |
|
"**Important:** This model is running in a decentralized network, which means it's distributed across various nodes rather than being hosted on a single central server. " |
|
"Additionally, this model doesn't have context memory, which means it doesn't remember previous interactions. " |
|
"For business needs, please contact info@nidum.ai." |
|
) |
|
) |
|
|
|
|
|
if __name__ == "__main__": |
|
iface.launch() |
|
|