Spaces:
Sleeping
Sleeping
import gradio as gr | |
import requests | |
history = [] # Initialize an empty list to store conversation history | |
def chat_with_model(prompt,context): | |
# Ensure this URL matches your FastAPI configuration | |
global history | |
url = "http://43.205.120.87/chat" | |
# print(history) | |
response = requests.get(url, json={"prompt": prompt, "history": history}) | |
if response.status_code == 200: | |
data = response.json() | |
history.append({"role": "user", "content": prompt}) # Add user's prompt to history | |
history.append({"role": "assistant", "content": data["response"]}) # Add bot's response to history | |
return data["response"] | |
else: | |
return "Error communicating with the backend." | |
# Defining the Gradio interface | |
iface = gr.ChatInterface(fn=chat_with_model, | |
textbox=gr.Textbox(placeholder="Type your message here..."), | |
#outputs=gr.Textbox(label="Chatbot's response"), | |
title="Akhil's Chatbot (powered by GPT-3.5 Turbo)", | |
theme='HaleyCH/HaleyCH_Theme', | |
# allow_flagging="never", | |
# css=custom_css, | |
description="Type your prompt and get a response from GPT-3.5 Turbo!") | |
iface.launch() |