# import gradio as gr # import os # from mcp import StdioServerParameters # from smolagents import InferenceClientModel, CodeAgent, ToolCollection, MCPClient # mcp_client = MCPClient( # {"url": "https://abidlabs-mcp-tool-http.hf.space/gradio_api/mcp/sse","transport":"sse"} # This is the MCP Client we created in the previous section # ) # tools = mcp_client.get_tools() # model = InferenceClientModel(token=os.getenv("HF_TOKEN")) # agent = CodeAgent(tools=[*tools], model=model) # demo = gr.ChatInterface( # fn=lambda message, history: str(agent.run(message)), # type="messages", # examples=["Prime factorization of 68"], # title="Agent with MCP Tools", # description="This is a simple agent that uses MCP tools to answer questions." # ) # demo.launch() import gradio as gr import os from smolagents import InferenceClientModel, CodeAgent, MCPClient try: mcp_client = MCPClient( {"url": "https://abidlabs-mcp-tool-http.hf.space/gradio_api/mcp/sse"} ) tools = mcp_client.get_tools() model = InferenceClientModel(token=os.getenv("HF_TOKEN")) agent = CodeAgent(tools=[*tools], model=model, additional_authorized_imports=["json", "ast", "urllib", "base64"]) demo = gr.ChatInterface( fn=lambda message, history: str(agent.run(message)), type="messages", examples=["Analyze the sentiment of the following text 'This is awesome'"], title="Agent with MCP Tools", description="This is a simple agent that uses MCP tools to answer questions.", ) demo.launch() finally: mcp_client.disconnect()