Spaces:
Sleeping
Sleeping
#!/usr/bin/env python3 | |
import os | |
from smolagents import InferenceClientModel, CodeAgent | |
from smolagents.mcp_client import MCPClient | |
def main(): | |
# 1) Point at your local MVP server | |
url = os.getenv("MCP_URL", "http://localhost:7860/gradio_api/mcp/sse") | |
mcp = MCPClient({"url": url}) | |
tools = mcp.get_tools() # discover available tools (e.g. sentiment) | |
# 2) Wire up an LLM + those tools | |
model = InferenceClientModel() # default = OpenAI | |
agent = CodeAgent(tools=tools, model=model) | |
print("β Tiny agent ready! Type your query (or Ctrl-C to quit).") | |
try: | |
while True: | |
user = input("User> ") | |
# 3) Let the agent decide whether to call sentiment() or answer directly | |
answer = agent.run(user) | |
print("Agent>", answer) | |
finally: | |
mcp.disconnect() # clean up the SSE connection | |
if __name__ == "__main__": | |
main() | |