CHAT / app.py
ar08's picture
Update app.py
e216e88 verified
import aiohttp
import asyncio
import json
import gradio as gr
import os
# Define the API URLs
BASE_URL = os.getenv("URL") # Ensure the environment variable "URL" is set
if not BASE_URL:
raise ValueError("Environment variable 'URL' not set")
TOKEN_URL = BASE_URL + "/get-token"
CHAT_URL = BASE_URL + "/conversation"
# Initialize the token and message history
token = ""
messHistory: list = []
async def chat(messList):
"""Async function to send and receive messages with the server."""
global token
async with aiohttp.ClientSession() as session:
# Request token if not already set
if token == "":
async with session.get(TOKEN_URL) as resp:
data = await resp.json()
token = data["token"]
body = {
"token": token,
"message": messList,
"stream": True
}
fullmessage = ""
# Make the POST request to the chat API
async with session.post(CHAT_URL, json=body) as resp:
if resp.status != 200:
return "Error occurred during the chat process."
# Use a buffer to accumulate data chunks
buffer = ""
async for chunk in resp.content.iter_any():
buffer += chunk.decode("utf-8")
# Attempt to decode JSON objects from the buffer
while True:
try:
# Find the end of the JSON object
index = buffer.index('\n')
json_str = buffer[:index].strip()
buffer = buffer[index+1:]
if json_str.strip() == "[DONE]":
break
data_dict = json.loads(json_str)
fullmessage += data_dict.get("message", "")
token = data_dict.get("resp_token", token) # Update token
except (ValueError, json.JSONDecodeError):
# Handle incomplete or malformed JSON
break
messHistory.append({"role": "assistant", "content": fullmessage}) # Append assistant response
return fullmessage
def gradio_chat(user_input, mode):
"""Synchronous wrapper for the async chat function, integrated with Gradio."""
messHistory.append({"role": "user", "content": f"[{mode}] {user_input}"})
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
assistant_response = loop.run_until_complete(chat(messHistory))
return assistant_response
# Gradio interface for user interaction
def chat_interface(user_input, mode):
return gradio_chat(user_input, mode)
with gr.Blocks() as demo:
gr.Markdown("# Chat with AI")
with gr.Row():
radio_mode = gr.Radio(["Friendly", "Formal", "Humorous"], label="Chat Mode", value="Friendly")
with gr.Row():
chatbot = gr.Interface(
fn=lambda user_input: chat_interface(user_input, radio_mode.value),
inputs=[gr.Textbox(label="Your message")],
outputs=[gr.Markdown(label="Assistant response")] # Use Markdown for the response
)
# Launch the Gradio app
demo.launch()