Spaces:
Sleeping
Sleeping
import os | |
import gradio as gr | |
from mcp.client.stdio import StdioServerParameters | |
from smolagents import ToolCollection, CodeAgent | |
from smolagents import CodeAgent, InferenceClientModel | |
from smolagents.mcp_client import MCPClient | |
token = os.environ.get("HF_TOKEN") | |
try: | |
mcp_client = MCPClient( | |
# {"url": "https://abidlabs-mcp-tools.hf.space/gradio_api/mcp/sse"} | |
{"url": "https://rajeshthangaraj1-mcp-sentiment.hf.space/gradio_api/mcp/sse"} | |
) | |
tools = mcp_client.get_tools() | |
model = InferenceClientModel(token=token) | |
agent = CodeAgent(tools=[*tools], model=model) | |
def call_agent(message, history): | |
return str(agent.run(message)) | |
demo = gr.ChatInterface( | |
fn=lambda message, history: str(agent.run(message)), | |
type="messages", | |
examples=["Prime factorization of 68"], | |
title="Agent with MCP Tools", | |
description="This is a simple agent that uses MCP tools to answer questions.", | |
#messages=[], | |
) | |
demo.launch() | |
finally: | |
if hasattr(mcp_client, "close"): | |
mcp_client.close() | |