Spaces:
Sleeping
Sleeping
File size: 1,600 Bytes
89784d2 b9b41a6 7337f9c 89784d2 c80e1d9 b9b41a6 7337f9c b9b41a6 7337f9c c363697 b9b41a6 af94bd7 b9b41a6 c363697 b9b41a6 dc6e3d1 7337f9c dc6e3d1 b9b41a6 af94bd7 f574446 af94bd7 89784d2 b9b41a6 af94bd7 b9b41a6 af94bd7 b9b41a6 dd95794 af94bd7 c363697 af94bd7 deb653c af94bd7 deb653c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 |
import gradio as gr
from agents.philosopher import PhilosopherAgent
from agents.historian import HistorianAgent
from agents.hacker import HackerAgent
from agents.comedian import ComedianAgent
from agents.lawyer import LawyerAgent
from agents.scientist import ScientistAgent
from agents.journalist import JournalistAgent
from agents.trader import TraderAgent
from agents.base_agent import ACPMessage
# ✅ Initialize agents
all_agents = [
PhilosopherAgent(),
HistorianAgent(),
HackerAgent(),
ComedianAgent(),
LawyerAgent(),
ScientistAgent(),
JournalistAgent(),
TraderAgent()
]
agent_map = {agent.name: agent for agent in all_agents}
# ✅ Core chat function
def chat(prompt, selected_agents):
responses = {}
for name in selected_agents:
agent = agent_map.get(name)
try:
output = agent.generate([ACPMessage(role="user", content=prompt)])
except Exception as e:
output = f"[ERROR: {e}]"
responses[name] = output
return [responses] # must return list
# ✅ Interface
demo = gr.Interface(
fn=chat,
inputs=[
gr.Textbox(label="Prompt"),
gr.CheckboxGroup(
choices=list(agent_map.keys()),
label="Agents",
value=list(agent_map.keys()) # default select all
)
],
outputs=gr.JSON(label="Responses"),
live=False,
title="PerspectiveAI Backend API"
)
# ✅ Launch ONLY in API mode
demo.launch(
server_name="0.0.0.0",
server_port=7860,
show_api=True, # 🔥 Enables /run/predict
share=False # Optional
)
|