ndurner commited on
Commit
2a35023
·
1 Parent(s): 3214c78

local MCP support

Browse files
Files changed (6) hide show
  1. README.md +15 -1
  2. app.py +112 -4
  3. mcp_registry.py +197 -21
  4. mcp_registry.sample.json +6 -0
  5. requirements.txt +2 -1
  6. tests/greet_mcp.py +16 -0
README.md CHANGED
@@ -23,7 +23,7 @@ Features:
23
  * example: download an ICS calendar file the model has created for you
24
  * streaming chat
25
  * image generation (via DALL-E 3)
26
- * remote MCP server support via configurable registry
27
  * optional UnrestrictedPython execution when `CODE_EXEC_UNRESTRICTED_PYTHON=1`
28
 
29
  The MCP registry is looked up in the following order:
@@ -34,5 +34,19 @@ The MCP registry is looked up in the following order:
34
  See `mcp_registry.sample.json` for an example configuration.
35
  Headers and query parameters may reference environment variables using the `env:` prefix.
36
  Use `"allowed_tools": ["*"]` to permit all tools from a server.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
  When an MCP tool requires approval, the assistant will notify you in chat.
38
  Reply with `y` to approve or `n` to deny the request, optionally adding a comment after the `y` or `n`.
 
23
  * example: download an ICS calendar file the model has created for you
24
  * streaming chat
25
  * image generation (via DALL-E 3)
26
+ * MCP server support (both remote and local) via configurable registry
27
  * optional UnrestrictedPython execution when `CODE_EXEC_UNRESTRICTED_PYTHON=1`
28
 
29
  The MCP registry is looked up in the following order:
 
34
  See `mcp_registry.sample.json` for an example configuration.
35
  Headers and query parameters may reference environment variables using the `env:` prefix.
36
  Use `"allowed_tools": ["*"]` to permit all tools from a server.
37
+
38
+ For local MCP servers, use the `command` and `args` fields to specify how to launch the server. Environment variables can be passed via the `env` field. For example:
39
+ ```json
40
+ {
41
+ "name": "exa_local",
42
+ "command": "npx",
43
+ "args": ["-y", "exa-mcp-server"],
44
+ "env": {
45
+ "EXA_API_KEY": "env:EXA_API_KEY"
46
+ },
47
+ "allowed_tools": ["*"]
48
+ }
49
+ ```
50
+
51
  When an MCP tool requires approval, the assistant will notify you in chat.
52
  Reply with `y` to approve or `n` to deny the request, optionally adding a comment after the `y` or `n`.
app.py CHANGED
@@ -6,9 +6,10 @@ from openai import OpenAI
6
  import json
7
  from PIL import Image
8
  import io
 
9
  from settings_mgr import generate_download_settings_js, generate_upload_settings_js
10
  from chat_export import import_history, get_export_js
11
- from mcp_registry import load_registry, to_openai_tool
12
  from gradio.components.base import Component
13
  from types import SimpleNamespace
14
 
@@ -185,7 +186,7 @@ def process_values_js():
185
  }
186
  """
187
 
188
- def bot(message, history, oai_key, system_prompt, temperature, max_tokens, model, python_use, web_search, *mcp_selected):
189
  global pending_mcp_request
190
  try:
191
  client = OpenAI(
@@ -296,9 +297,10 @@ def bot(message, history, oai_key, system_prompt, temperature, max_tokens, model
296
  "type": "web_search",
297
  "search_context_size": "high"
298
  })
 
299
  for sel, entry in zip(mcp_selected, mcp_servers):
300
  if sel:
301
- tools.append(to_openai_tool(entry))
302
  if not tools:
303
  tools = None
304
 
@@ -480,7 +482,113 @@ def bot(message, history, oai_key, system_prompt, temperature, max_tokens, model
480
  yield assistant_msgs
481
 
482
  elif output.type == "function_call":
483
- if output.name == "eval_python":
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
484
  try:
485
  history_openai_format.append({
486
  "type": "function_call",
 
6
  import json
7
  from PIL import Image
8
  import io
9
+ import asyncio
10
  from settings_mgr import generate_download_settings_js, generate_upload_settings_js
11
  from chat_export import import_history, get_export_js
12
+ from mcp_registry import load_registry, get_tools_for_server, call_local_mcp_tool, function_to_mcp_map, shutdown_local_mcp_clients
13
  from gradio.components.base import Component
14
  from types import SimpleNamespace
15
 
 
186
  }
187
  """
188
 
189
+ async def bot(message, history, oai_key, system_prompt, temperature, max_tokens, model, python_use, web_search, *mcp_selected):
190
  global pending_mcp_request
191
  try:
192
  client = OpenAI(
 
297
  "type": "web_search",
298
  "search_context_size": "high"
299
  })
300
+ # Add selected MCP servers to tools
301
  for sel, entry in zip(mcp_selected, mcp_servers):
302
  if sel:
303
+ tools.extend(await get_tools_for_server(entry))
304
  if not tools:
305
  tools = None
306
 
 
482
  yield assistant_msgs
483
 
484
  elif output.type == "function_call":
485
+ # Check if this is a local MCP tool call
486
+ function_name = output.name
487
+ if function_name in function_to_mcp_map:
488
+ try:
489
+ mcp_info = function_to_mcp_map[function_name]
490
+ server_name = mcp_info["server_name"]
491
+ tool_name = mcp_info["tool_name"]
492
+
493
+ # Find the server entry
494
+ server_entry = None
495
+ for entry in mcp_servers:
496
+ if entry["name"] == server_name:
497
+ server_entry = entry
498
+ break
499
+
500
+ if server_entry:
501
+ history_openai_format.append({
502
+ "type": "function_call",
503
+ "name": function_name,
504
+ "arguments": output.arguments,
505
+ "call_id": output.call_id
506
+ })
507
+
508
+ # Parse arguments
509
+ arguments = json.loads(output.arguments)
510
+ call_id = output.call_id
511
+
512
+ # Show the function call to the user
513
+ parent_msg = gr.ChatMessage(
514
+ role="assistant",
515
+ content="",
516
+ metadata={"title": f"MCP: {server_name} - {tool_name}", "id": call_id, "status": "pending"},
517
+ )
518
+ assistant_msgs.append(parent_msg)
519
+ assistant_msgs.append(
520
+ gr.ChatMessage(
521
+ role="assistant",
522
+ content=f"``` arguments\n{output.arguments}\n```",
523
+ metadata={"title": "request", "parent_id": call_id},
524
+ )
525
+ )
526
+ yield assistant_msgs
527
+
528
+ # Call the MCP tool (async)
529
+ try:
530
+ tool_result = await call_local_mcp_tool(server_entry, tool_name, arguments)
531
+ # Extract text from result
532
+ if isinstance(tool_result, list) and tool_result and hasattr(tool_result[0], 'text'):
533
+ result_text = "\n".join([item.text for item in tool_result])
534
+ elif hasattr(tool_result, 'text'):
535
+ result_text = tool_result.text
536
+ else:
537
+ result_text = str(tool_result)
538
+ # Show result to the user
539
+ assistant_msgs.append(
540
+ gr.ChatMessage(
541
+ role="assistant",
542
+ content=f"``` result\n{result_text}\n```",
543
+ metadata={"title": "response", "parent_id": call_id, "status": "done"},
544
+ )
545
+ )
546
+ parent_msg.metadata["status"] = "done"
547
+ yield assistant_msgs
548
+ # Add result to history
549
+ history_openai_format.append(
550
+ {
551
+ "type": "function_call_output",
552
+ "call_id": output.call_id,
553
+ "output": result_text,
554
+ }
555
+ )
556
+ except Exception as e:
557
+ error_message = str(e)
558
+ history_openai_format.append({
559
+ "type": "function_call_output",
560
+ "call_id": output.call_id,
561
+ "output": json.dumps({"error": error_message})
562
+ })
563
+ assistant_msgs.append(
564
+ gr.ChatMessage(
565
+ role="assistant",
566
+ content=f"``` error\n{error_message}\n```",
567
+ metadata={"title": "response", "parent_id": call_id, "status": "done"},
568
+ )
569
+ )
570
+ parent_msg.metadata["status"] = "done"
571
+ yield assistant_msgs
572
+
573
+ # Need to continue the loop to process the function output
574
+ loop_tool_calling = True
575
+ else:
576
+ # Server entry not found
577
+ error_message = f"Server {server_name} not found"
578
+ history_openai_format.append({
579
+ "type": "function_call_output",
580
+ "call_id": output.call_id,
581
+ "output": json.dumps({"error": error_message})
582
+ })
583
+ except Exception as e:
584
+ # Some error occurred during processing
585
+ error_message = f"Error processing local MCP tool call: {str(e)}"
586
+ history_openai_format.append({
587
+ "type": "function_call_output",
588
+ "call_id": output.call_id,
589
+ "output": json.dumps({"error": error_message})
590
+ })
591
+ elif output.name == "eval_python":
592
  try:
593
  history_openai_format.append({
594
  "type": "function_call",
mcp_registry.py CHANGED
@@ -1,6 +1,21 @@
1
  import os
2
  import json
 
 
3
  from urllib.parse import urlencode
 
 
 
 
 
 
 
 
 
 
 
 
 
4
 
5
 
6
  _SEARCH_PATHS = [
@@ -9,18 +24,17 @@ _SEARCH_PATHS = [
9
  os.path.expanduser("~/.oai_chat/mcp_registry.json"),
10
  ]
11
 
 
 
 
12
 
13
  def _merge_defaults(reg: dict) -> list[dict]:
14
  defaults = reg.get("defaults", {})
15
  servers = []
16
  for entry in reg.get("servers", []):
17
- if entry.get("url"):
18
- merged = dict(defaults)
19
- merged.update(entry)
20
- servers.append(merged)
21
- else:
22
- # Local MCPs not yet supported
23
- pass
24
  return servers
25
 
26
 
@@ -45,20 +59,123 @@ def env_subst(values: dict, kind: str) -> dict:
45
  return out
46
 
47
 
48
- def to_openai_tool(entry: dict) -> dict:
49
- server_url = entry["url"]
50
- if "query_params" in entry:
51
- qp = urlencode(env_subst(entry["query_params"], "query parameter"))
52
- if "?" in server_url:
53
- server_url += "&" + qp
54
- else:
55
- server_url += "?" + qp
56
- tool = {
57
- "type": "mcp",
58
- "server_label": entry.get("server_label", entry["name"]),
59
- "server_url": server_url,
60
- "headers": env_subst(entry.get("headers", {}), "header"),
61
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
62
  if "allowed_tools" in entry:
63
  allowed = entry["allowed_tools"]
64
  if not (len(allowed) == 1 and allowed[0] == "*"):
@@ -66,3 +183,62 @@ def to_openai_tool(entry: dict) -> dict:
66
  if "require_approval" in entry:
67
  tool["require_approval"] = entry["require_approval"]
68
  return tool
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import os
2
  import json
3
+ import asyncio
4
+ import logging
5
  from urllib.parse import urlencode
6
+ from typing import Dict, List, Any, Optional, Union
7
+
8
+ try:
9
+ from fastmcp import Client
10
+ from fastmcp.client.transports import StdioTransport, PythonStdioTransport
11
+ except ImportError:
12
+ logging.warning("FastMCP library not installed. Local MCP servers will not be available.")
13
+ Client = None
14
+ StdioTransport = None
15
+
16
+ # Global dictionary to store local MCP clients
17
+ local_mcp_clients = {}
18
+ local_mcp_tools_cache = {}
19
 
20
 
21
  _SEARCH_PATHS = [
 
24
  os.path.expanduser("~/.oai_chat/mcp_registry.json"),
25
  ]
26
 
27
+ async def log(msg):
28
+ print("[MCP SERVER]", msg.data, flush=True)
29
+
30
 
31
  def _merge_defaults(reg: dict) -> list[dict]:
32
  defaults = reg.get("defaults", {})
33
  servers = []
34
  for entry in reg.get("servers", []):
35
+ merged = dict(defaults)
36
+ merged.update(entry)
37
+ servers.append(merged)
 
 
 
 
38
  return servers
39
 
40
 
 
59
  return out
60
 
61
 
62
+ def is_local_mcp(entry: dict) -> bool:
63
+ """Check if an MCP entry is a local MCP server"""
64
+ return "command" in entry and "args" in entry
65
+
66
+ async def start_local_mcp_client(entry: dict) -> Optional[Client]:
67
+ """Start a local MCP client for a given entry"""
68
+ if Client is None or StdioTransport is None:
69
+ logging.error("FastMCP library not installed. Cannot start local MCP client.")
70
+ return None
71
+
72
+ try:
73
+ name = entry["name"]
74
+ command = entry["command"]
75
+ args = entry["args"]
76
+
77
+ # Prepare environment variables
78
+ env_vars = {}
79
+ if "env" in entry:
80
+ env_vars = env_subst(entry["env"], "environment variable")
81
+
82
+ # Create transport with environment variables
83
+ transport = StdioTransport(
84
+ command=command,
85
+ args=args,
86
+ env=env_vars if env_vars else None
87
+ )
88
+
89
+ # Create client with the transport
90
+ client = Client(transport, log_handler=log)
91
+
92
+ # Store the client in the global dictionary
93
+ local_mcp_clients[name] = client
94
+
95
+ return client
96
+ except Exception as e:
97
+ logging.error(f"Failed to start local MCP client: {str(e)}")
98
+ return None
99
+
100
+ async def get_local_mcp_tools(entry: dict) -> List[Dict[str, Any]]:
101
+ """Get available tools from a local MCP server"""
102
+ name = entry["name"]
103
+
104
+ # Check if we have cached tools for this server
105
+ if name in local_mcp_tools_cache:
106
+ return local_mcp_tools_cache[name]
107
+
108
+ # Check if client exists or create a new one
109
+ client = local_mcp_clients.get(name)
110
+ if client is None:
111
+ client = await start_local_mcp_client(entry)
112
+ if client is None:
113
+ return []
114
+
115
+ try:
116
+ # Use client in async context manager
117
+ async with client:
118
+ # List available tools
119
+ tools = await client.list_tools()
120
+ # Cache the tools
121
+ local_mcp_tools_cache[name] = tools
122
+ return tools
123
+ except Exception as e:
124
+ logging.error(f"Failed to list tools from local MCP server: {str(e)}")
125
+ return []
126
+
127
+ async def call_local_mcp_tool(entry: dict, tool_name: str, arguments: Dict[str, Any]) -> Any:
128
+ """Call a tool on a local MCP server"""
129
+ name = entry["name"]
130
+
131
+ # Check if client exists or create a new one
132
+ client = local_mcp_clients.get(name)
133
+ if client is None:
134
+ client = await start_local_mcp_client(entry)
135
+ if client is None:
136
+ return {"error": "Failed to connect to local MCP server"}
137
+
138
+ try:
139
+ # Use client in async context manager
140
+ async with client:
141
+ if not client.is_connected():
142
+ logging.warning("MCP server not connected")
143
+
144
+ # Call the tool
145
+ result = await client.call_tool(tool_name, arguments)
146
+ return result
147
+ except Exception as e:
148
+ logging.error(f"Failed to call tool on local MCP server: {str(e)}")
149
+ return {"error": str(e)}
150
+
151
+ async def shutdown_local_mcp_clients():
152
+ """Shutdown all local MCP clients"""
153
+ for name, client in local_mcp_clients.items():
154
+ try:
155
+ await client.close()
156
+ except Exception as e:
157
+ logging.error(f"Failed to close local MCP client {name}: {str(e)}")
158
+ local_mcp_clients.clear()
159
+ local_mcp_tools_cache.clear()
160
+
161
+ def to_openai_tool(entry: dict) -> Union[Dict[str, Any], List[Dict[str, Any]]]:
162
+ """Convert an MCP entry to an OpenAI tool definition(s)"""
163
+ # For remote MCP servers, use the standard "mcp" type
164
+ if "url" in entry:
165
+ tool = {
166
+ "type": "mcp",
167
+ "server_label": entry.get("server_label", entry["name"]),
168
+ }
169
+
170
+ server_url = entry["url"]
171
+ if "query_params" in entry:
172
+ qp = urlencode(env_subst(entry["query_params"], "query parameter"))
173
+ if "?" in server_url:
174
+ server_url += "&" + qp
175
+ else:
176
+ server_url += "?" + qp
177
+ tool["server_url"] = server_url
178
+ tool["headers"] = env_subst(entry.get("headers", {}), "header")
179
  if "allowed_tools" in entry:
180
  allowed = entry["allowed_tools"]
181
  if not (len(allowed) == 1 and allowed[0] == "*"):
 
183
  if "require_approval" in entry:
184
  tool["require_approval"] = entry["require_approval"]
185
  return tool
186
+
187
+ # Global mapping to track function names back to their MCP servers and tool names
188
+ function_to_mcp_map = {}
189
+
190
+ # Cache for local MCP tools
191
+ local_mcp_tool_cache = {}
192
+
193
+ # Helper function to create a function tool definition for a local MCP tool
194
+ def create_function_tool_for_local_mcp_tool(server_name: str, tool_name: str, tool_obj) -> Dict[str, Any]:
195
+ """Create an OpenAI function tool definition for a local MCP tool"""
196
+ function_name = f"{server_name}_{tool_name}"
197
+
198
+ # Save the mapping for later lookup during function call
199
+ function_to_mcp_map[function_name] = {
200
+ "server_name": server_name,
201
+ "tool_name": tool_name
202
+ }
203
+
204
+ # Handle FastMCP Tool object format (based on observed structure)
205
+ description = getattr(tool_obj, 'description', f"Tool {tool_name} from {server_name} MCP server")
206
+ parameters = getattr(tool_obj, 'inputSchema', {"type": "object", "properties": {}})
207
+
208
+ return {
209
+ "type": "function",
210
+ "name": function_name,
211
+ "description": description,
212
+ "parameters": parameters
213
+ }
214
+
215
+ async def get_tools_for_server(entry: dict) -> List[Dict[str, Any]]:
216
+ """Get all tools for a given server entry (local or remote)
217
+ For remote servers, it returns a single MCP tool.
218
+ For local servers, it returns multiple function tools (one for each MCP tool).
219
+ """
220
+ if is_local_mcp(entry):
221
+ server_name = entry["name"]
222
+ # Try to get tools from cache first
223
+ if server_name in local_mcp_tool_cache:
224
+ mcp_tools = local_mcp_tool_cache[server_name]
225
+ else:
226
+ try:
227
+ mcp_tools = await get_local_mcp_tools(entry)
228
+ local_mcp_tool_cache[server_name] = mcp_tools
229
+ except Exception as e:
230
+ logging.error(f"Error getting tools from local MCP server {server_name}: {str(e)}")
231
+ mcp_tools = []
232
+ result = []
233
+ for tool_obj in mcp_tools:
234
+ tool_name = getattr(tool_obj, 'name', None)
235
+ if tool_name:
236
+ function_tool = create_function_tool_for_local_mcp_tool(server_name, tool_name, tool_obj)
237
+ result.append(function_tool)
238
+ return result
239
+ else:
240
+ tool = to_openai_tool(entry)
241
+ if isinstance(tool, list):
242
+ return tool
243
+ else:
244
+ return [tool]
mcp_registry.sample.json CHANGED
@@ -16,6 +16,12 @@
16
  "require_approval": {
17
  "never": { "tool_names": ["search"] }
18
  }
 
 
 
 
 
 
19
  }
20
  ]
21
  }
 
16
  "require_approval": {
17
  "never": { "tool_names": ["search"] }
18
  }
19
+ },
20
+ {
21
+ "name": "greeting",
22
+ "command": "python3",
23
+ "args": ["-u", "tests/greet_mcp.py"],
24
+ "allowed_tools": ["*"]
25
  }
26
  ]
27
  }
requirements.txt CHANGED
@@ -1,5 +1,6 @@
1
  pydantic == 2.10.6
2
- gradio == 5.36.2
3
  openai == 1.76.0
4
  lxml
5
  RestrictedPython
 
 
1
  pydantic == 2.10.6
2
+ gradio == 5.38.0
3
  openai == 1.76.0
4
  lxml
5
  RestrictedPython
6
+ fastmcp
tests/greet_mcp.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # taken from FastMCP documentation at https://gofastmcp.com/servers/server: my_server.py
2
+ from fastmcp import FastMCP
3
+
4
+ mcp = FastMCP(name="MyServer")
5
+
6
+ @mcp.tool
7
+ def greet(name: str) -> str:
8
+ """Greet a user by name."""
9
+ return f"Hello, {name}! .:This message is powered by MCP.:"
10
+
11
+ if __name__ == "__main__":
12
+ # This runs the server, defaulting to STDIO transport
13
+ mcp.run()
14
+
15
+ # To use a different transport, e.g., Streamable HTTP:
16
+ # mcp.run(transport="http", host="127.0.0.1", port=9000)