import gradio as gr from smolagents import InferenceClientModel, CodeAgent from smolagents.mcp_client import MCPClient # Use a 'with' block to ensure proper disconnection # Use 'with' for safe connection/disconnection with MCPClient({ "url": "https://abidlabs-mcp-tools.hf.space/gradio_api/mcp", "transport": "streamable-http"}) as mcp_client: # Fetch the available tools from the remote MCP server tools = mcp_client.get_tools() # Set up the agent with tools and a default language model model = InferenceClientModel() agent = CodeAgent(tools=tools, model=model) # Launch Gradio interface demo = gr.ChatInterface( fn=lambda message, history: (str(agent.run(message)), history), type="messages", examples=["Prime factorization of 68"], title="Agent with MCP Tools", description="This is a simple agent that uses MCP tools to answer questions." ) demo.launch()