# client.py

"""
run client  --> (via a tool) LLM --> Infer the user's intention (from the user's prompt)
-- (extract json code from the response of LLM) --> call tools
"""

import asyncio

from mcp.client.stdio import stdio_client
from mcp import ClientSession, StdioServerParameters


# server-end params (using uv)
params = StdioServerParameters(
    command="uv",
    args=["run", "server.py"])


async def main():
    async with stdio_client(params) as (stdio, write):
        async with ClientSession(stdio, write) as session:

            await session.initialize()

            # --- List available tools ---
            resp = await session.list_tools()
            print("\n🔧 Available Tools:")
            for t in resp.tools:  # t: Tool
                print(f" - {t.name}: {t.description}")

            # --- Example 1: Call a simple tool directly ---
            # print("\n🧠 Calling `search_docs` manually...")
            # result = await session.call_tool("search_docs", {"query": "nginx reverse proxy"})
            # print("Result:\n", result)

            # --- Example 2: Use Deepseek-powered orchestrator ---
            print("\n🤖 Calling `smart_orchestrator` (intelligent reasoning)...")
            user_request = "Find how to configure nginx reverse proxy and summarize it."
            orchestrated = await session.call_tool("smart_orchestrator", {"user_request": user_request})
            print(orchestrated.structuredContent["result"]["result"])
            # print(orchestrated["content"])
            # if orchestrated["tool"]:
            #     print("Smart orchestration result:\n", orchestrated["result"])

            # --- Clean up ---
            # await session.close()
            # print("\n🔌 Client session closed.")

if __name__ == "__main__":
    asyncio.run(main())

