AI_SQL / app.py
mgbam's picture
Update app.py
304549d verified
raw
history blame
1.74 kB
import pathlib
import os
import gradio as gr
from mcp import StdioServerParameters
from smolagents import MCPClient, CodeAgent, InferenceClientModel
# Absolute path to sibling mcp_server.py
SERVER_PATH = pathlib.Path(__file__).with_name("mcp_server.py")
# Lazy‑load a Hugging Face Inference model (defaults to Qwen if no id given)
HF_MODEL_ID = os.getenv("HF_MODEL_ID", "Qwen/Qwen3-0.6B") # small & free-tier friendly
def respond(message: str, history: list):
"""Route the prompt through an MCP‑enabled CodeAgent and return the answer."""
params = StdioServerParameters(command="python", args=[str(SERVER_PATH)])
with MCPClient(params) as tools:
model = InferenceClientModel(model_id=HF_MODEL_ID)
agent = CodeAgent(tools=tools, model=model)
answer = agent.run(message)
# Update chat in OpenAI‑style messages
history.append({"role": "user", "content": message})
history.append({"role": "assistant", "content": answer})
return history, history
with gr.Blocks(title="Enterprise SQL Agent") as demo:
chat_state = gr.State([])
chatbot = gr.Chatbot(type="messages", label="Enterprise SQL Agent")
textbox = gr.Textbox(
placeholder="Ask: Who are my inactive Northeast customers?",
show_label=False,
)
textbox.submit(respond, [textbox, chat_state], [chatbot, chat_state])
gr.Markdown(
"""
### Example Prompts
- Who are my Northeast customers who haven’t ordered in 6 months?
- List customers sorted by last order date.
- Find clients from the West with recent orders.
_Powered by smolagents + MCP + Hugging Face Inference API_
"""
)
if __name__ == "__main__":
demo.launch()