File size: 1,024 Bytes
97127b4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
import gradio as gr
import requests

API_URL = "http://localhost:8000/chat"

def chat_with_agentiq(message, history):
    # Convert Gradio history to ChatML-style format
    messages = []
    for user_msg, agent_msg in history:
        messages.append({"role": "User", "content": user_msg})
        messages.append({"role": "Assistant", "content": agent_msg})
    messages.append({"role": "User", "content": message})

    payload = {
        "messages": messages,
        "model": "",  # Fill in your model name if required
        "temperature": 0.7,
        "max_tokens": 512,
        "top_p": 1.0,
        "additionalProp1": {}
    }

    try:
        response = requests.post(API_URL, json=payload)
        data = response.json()
        reply = data["choices"][0]["message"]["content"]
    except Exception as e:
        reply = f"[Error: {str(e)}]"

    return reply

demo= gr.ChatInterface(fn=chat_with_agentiq, title="AgentIQ Chat")

if __name__=="__main__":
    demo.launch(server_name="0.0.0.0", server_port=7860)