import os
import shlex
import asyncio
from typing import Any, Dict, Optional

import logging

logger = logging.getLogger('my_logger')

logger.setLevel(logging.INFO)
handler = logging.FileHandler('my_log.log', mode='w')
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)

from mcp.server.fastmcp import FastMCP, Context, Image

mcp = FastMCP("Deepseek-Orchestrator-MCP")


from openai import AsyncOpenAI, OpenAIError

from dotenv import dotenv_values
config = dotenv_values(".env.key")
api_key = config[f'DEEPSEEK_API_KEY']

ds = AsyncOpenAI(api_key=api_key, base_url="https://api.deepseek.com")

system_prompt = {"role": "system",
"content": "If the author intends to call a tool(from search_docs/summarize/run_shell), please output the tool name (with the arguments in json format) at the end of the response in Markdown. Otherwise reply normally. "}

tools=[
        {
            "type": "function",
            "function": {
                "name": "search_doces",
                "description": "configure nginx reverse proxy and summarize it",
                "parameters": {
                    "type": "object",
                    "properties": {
                        "query": {"type": "str"}
                    },
                    "required": ["a"]
                }
            }
        },
        {
            "type": "function",
            "function": {
                "name": "multiply",
                "description": "multiplication",
                "parameters": {
                    "type": "object",
                    "properties": {
                        "a": {"type": "number"},
                        "b": {"type": "number"}
                    },
                    "required": ["a", "b"]
                }
            }
        }
    ]

# ---- Helper: Deepseek API call ----
async def call_deepseek(prompt: str, max_tokens: int = 512, n_loop=20) -> Optional[Dict[str, Any]]:
    """
    Send a natural language prompt to Deepseek for intent parsing.
    """

    try:
        message = {"role": "user", "content": prompt}
        k = 0
        while True:
            try:  
                logger.info(f"Getting response using `deepseek-chat`")
                resp = await ds.chat.completions.create(
                    model="deepseek-chat",
                    messages=[system_prompt, message],
                    tools=[])  # :ChatCompletion
                logger.info(f"Get Response: {resp.model_dump_json()}")
                return resp
            except OpenAIError as e:
                k +=1
                if k >= n_loop:
                    logger.info(f"System: An error occurred after {n_loop} attempts:")
                    raise e
            except Exception as e:
                logger.error(f"An unexpected error occurred when try deepseek: {e}")
                raise f"An unexpected error occurred: {e}"
        
    except Exception as e:
        logger.error(f"An unexpected error occurred")
        return {"error": f"Error in `call_deepseek`: {e}"}

# ---- Example Tools ----

@mcp.tool()
async def search_docs(query: str) -> str:
    """Search local documentation (mocked)."""
    # In a real implementation, you could integrate Elasticsearch, SQLite FTS, or a vector DB.
    return f"Mock search: found documents related to '{query}': ['doc1.md', 'doc2.md']"


@mcp.tool()
async def summarize(text: str, length: int = 200) -> str:
    """Simple summarization by truncation (placeholder)."""
    if not text:
        return "No text provided."
    if len(text) <= length:
        return text
    return text[:length].rstrip() + "… (truncated)"


@mcp.tool()
async def run_shell(command: str, timeout_seconds: int = 10) -> str:
    """Cautiously execute a shell command (limited whitelist)."""
    allowed = {"uname", "ls", "echo", "whoami", "uptime"}
    parts = shlex.split(command)
    if not parts:
        return "Empty command"
    cmd = parts[0]
    if cmd not in allowed:
        return f"Command '{cmd}' is not allowed."

    try:
        async def run_cmd():
            proc = await asyncio.create_subprocess_exec(
                *parts, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
            )
            out, err = await asyncio.wait_for(proc.communicate(), timeout=timeout_seconds)
            return out.decode("utf-8", errors="ignore"), err.decode("utf-8", errors="ignore")

        out, err = asyncio.get_event_loop().run_until_complete(run_cmd())
        if err:
            return f"STDERR:\n{err}\nSTDOUT:\n{out}"
        return out or "(no output)"
    except Exception as e:
        return f"Error executing command: {e}"


# ---- Intelligent Orchestrator ----
@mcp.tool()
async def smart_orchestrator(user_request: str, ctx: Context) -> Dict[str, Any]:
    """
    Core logic:
    - Send user_request to Deepseek for reasoning and tool selection.
    - Execute the suggested tool on this MCP server.
    - Return a structured JSON response.
    """

    logger.info("Sending request to Deepseek for intent parsing...")
    await ctx.info("Sending request to Deepseek for intent parsing...")
    resp = await call_deepseek(user_request)
    content = resp.choices[0].message.content

    tool_calls = resp.choices[0].message.tool_calls

    logger.info(f"Deepseek replies: {content}. call: {tool_calls}")
    if tool_calls:
        tool_name = tool_calls[0].function.name
        args = json.loads(tool_calls[0].function.arguments)

        logger.info(f"Deepseek suggests tool '{tool_name}' with args: {args}")
        await ctx.info(f"Deepseek suggests tool '{tool_name}' with args: {args}")

        try:
            result = await session.tool_call(tool_name, args)
            return {
                "tool": tool_name,
                "args": args,
                "result": result,
                "content": content
            }
        except:
            fn = globals().get(tool_name)
            if callable(fn):
                maybe = fn(**args)
                result = await maybe if asyncio.iscoroutine(maybe) else maybe
            else:
                logger.error(f"No handler found for tool '{tool_name}'.")
                return {"error": f"No handler found for tool '{tool_name}'."}
    else:

        import re
        match = re.search(r'```json(.*?)```', content, re.DOTALL)
        if match:
            import json
            data = json.loads(match.group(1))
            logger.info(f"Deepseek suggests tool '{data["tool"]}'")
            return data | {
                "result": f"{data["tool_name"]}({",".join("k"+"=v" for k, v in data["arguments"])})",
                "content": content
            }
        else:
            return {
                "tool_name": None,
                "arguments": None,
                "result": None,
                "content": content
            }


# ---- Prompt Template (for debugging / client use) ----
@mcp.prompt()
async def orchestrate_prompt(task: str) -> str:
    return f"Convert the following task into a JSON instruction containing tool, args, and explain:\n\n{task}"

# ---- Server Entrypoint ----
if __name__ == "__main__":
    # For development:
    #   python server.py
    # or use:
    #   mcp dev server.py
    mcp.run()