|
|
import pathlib |
|
|
import os |
|
|
import gradio as gr |
|
|
from mcp import StdioServerParameters |
|
|
from smolagents import MCPClient, CodeAgent, InferenceClientModel |
|
|
|
|
|
|
|
|
SERVER_PATH = pathlib.Path(__file__).with_name("mcp_server.py") |
|
|
|
|
|
|
|
|
HF_MODEL_ID = os.getenv("HF_MODEL_ID", "Qwen/Qwen3-0.6B") |
|
|
|
|
|
|
|
|
def respond(message: str, history: list): |
|
|
"""Route the prompt through an MCP‑enabled CodeAgent and return the answer.""" |
|
|
params = StdioServerParameters(command="python", args=[str(SERVER_PATH)]) |
|
|
|
|
|
with MCPClient(params) as tools: |
|
|
model = InferenceClientModel(model_id=HF_MODEL_ID) |
|
|
agent = CodeAgent(tools=tools, model=model) |
|
|
answer = agent.run(message) |
|
|
|
|
|
|
|
|
history.append({"role": "user", "content": message}) |
|
|
history.append({"role": "assistant", "content": answer}) |
|
|
return history, history |
|
|
|
|
|
|
|
|
with gr.Blocks(title="Enterprise SQL Agent") as demo: |
|
|
chat_state = gr.State([]) |
|
|
chatbot = gr.Chatbot(type="messages", label="Enterprise SQL Agent") |
|
|
|
|
|
textbox = gr.Textbox( |
|
|
placeholder="Ask: Who are my inactive Northeast customers?", |
|
|
show_label=False, |
|
|
) |
|
|
textbox.submit(respond, [textbox, chat_state], [chatbot, chat_state]) |
|
|
|
|
|
gr.Markdown( |
|
|
""" |
|
|
### Example Prompts |
|
|
- Who are my Northeast customers who haven’t ordered in 6 months? |
|
|
- List customers sorted by last order date. |
|
|
- Find clients from the West with recent orders. |
|
|
|
|
|
_Powered by smolagents + MCP + Hugging Face Inference API_ |
|
|
""" |
|
|
) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
demo.launch() |
|
|
|