Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,36 +1,26 @@
|
|
1 |
-
|
2 |
-
import
|
3 |
-
from huggingface_hub import MCPClient
|
4 |
|
5 |
-
|
6 |
-
# Ollama's OpenAI-compatible endpoint
|
7 |
-
client = MCPClient(
|
8 |
-
provider="openai",
|
9 |
-
base_url="http://localhost:11434/v1",
|
10 |
-
model="devstral", # or your custom Ollama model name
|
11 |
-
api_key="ollama" # any non-empty string works for Ollama
|
12 |
-
)
|
13 |
|
14 |
-
# 1) Await the coroutine
|
15 |
-
await client.add_mcp_server(
|
16 |
-
type="stdio",
|
17 |
-
command="npx",
|
18 |
-
args=["@playwright/mcp@latest"]
|
19 |
-
)
|
20 |
|
21 |
-
|
22 |
-
|
23 |
-
"
|
24 |
-
|
25 |
-
|
26 |
|
27 |
-
|
28 |
-
|
29 |
-
print(chunk)
|
30 |
|
31 |
-
|
32 |
-
|
33 |
-
|
|
|
|
|
|
|
|
|
34 |
|
35 |
-
|
36 |
-
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import os
|
|
|
3 |
|
4 |
+
from smolagents import InferenceClientModel, CodeAgent, MCPClient
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
|
7 |
+
try:
|
8 |
+
mcp_client = MCPClient(
|
9 |
+
{"url": "https://abidlabs-mcp-tool-http.hf.space/gradio_api/mcp/sse"}
|
10 |
+
)
|
11 |
+
tools = mcp_client.get_tools()
|
12 |
|
13 |
+
model = InferenceClientModel(token=os.getenv("HUGGINGFACE_API_TOKEN"))
|
14 |
+
agent = CodeAgent(tools=[*tools], model=model, additional_authorized_imports=["json", "ast", "urllib", "base64"])
|
|
|
15 |
|
16 |
+
demo = gr.ChatInterface(
|
17 |
+
fn=lambda message, history: str(agent.run(message)),
|
18 |
+
type="messages",
|
19 |
+
examples=["Analyze the sentiment of the following text 'This is awesome'"],
|
20 |
+
title="Agent with MCP Tools",
|
21 |
+
description="This is a simple agent that uses MCP tools to answer questions.",
|
22 |
+
)
|
23 |
|
24 |
+
demo.launch()
|
25 |
+
finally:
|
26 |
+
mcp_client.disconnect()
|