from .schemas import *

import httpx
import json
import os
from openai import OpenAI
import uuid
from collections import defaultdict, deque


DEEPSEEK_API_KEY = "sk-c03c0d71332f491e992ebb12b615d4af"
DEEPSEEK_API_BASE = "https://api.deepseek.com"
DEEPSEEK_MODEL = "deepseek-chat"
DEEPSEEK_API_URL = f"{DEEPSEEK_API_BASE}/v1/chat/completions"


client = OpenAI(api_key=DEEPSEEK_API_KEY, base_url=DEEPSEEK_API_BASE)

# 对话历史缓存，按 request_id 存储，每个用户最多保存 N 轮
MAX_HISTORY = int(os.environ.get("AGENT_MAX_HISTORY", 10))
_chat_history = defaultdict(lambda: deque(maxlen=MAX_HISTORY*2))  # 每轮2条消息

def recommend_hotels(req: HotelRecommendRequest) -> HotelRecommendResponse:
    # 假数据
    return HotelRecommendResponse(hotels=[
        f"{req.location}大酒店", f"{req.location}快捷宾馆", f"{req.location}青年旅社"
    ])

def send_hotel_order(req: HotelOrderRequest) -> HotelOrderResponse:
    return HotelOrderResponse(order_id="HOTEL123456", status="success")

def query_weather(req: WeatherQueryRequest) -> WeatherQueryResponse:
    return WeatherQueryResponse(location=req.location, weather="晴", temperature="25°C")

def query_train_tickets(req: TrainTicketQueryRequest) -> TrainTicketQueryResponse:
    return TrainTicketQueryResponse(trains=["G1234 08:00-12:00", "D5678 09:30-13:30"])

def send_train_order(req: TrainOrderRequest) -> TrainOrderResponse:
    return TrainOrderResponse(order_id="TRAIN987654", status="success")

def recommend_sights(req: SightRecommendRequest) -> SightRecommendResponse:
    return SightRecommendResponse(sights=["景点A", "景点B", "景点C"])

def get_sight_info(req: SightInfoRequest) -> SightInfoResponse:
    return SightInfoResponse(name=req.sight_name, intro=f"{req.sight_name}是一个著名景点，适合旅游观光。")



def make_travel_plan(req: TravelPlanRequest) -> str:
    # 非流式：直接返回完整字符串
    return f"为您定制的旅游计划：\n出发地/目的地：{','.join(req.locations)}\n日期：{','.join(req.dates)}\n推荐景点：景点A、景点B、景点C\n推荐美食：美食1、美食2、美食3\n祝您旅途愉快！"



# --------- LLM Agent自动工具调用 ---------
async def llm_agent_answer(question: str) -> str:
    """
    1. 获取所有工具描述（OpenAPI schema）
    2. 构造 prompt 让 deepseek 作为 function calling agent
    3. 让 deepseek 规划并调用工具，返回最终答案
    """
    # 动态获取 MCP 工具 schema，兼容 openapi.json 作为工具描述
    print("[Agent] 用户问题:", question, flush=True)
    # 1. 获取 openapi.json 并转为 tools schema
    async with httpx.AsyncClient(timeout=30) as http_client:
        try:
            resp = await http_client.get("http://127.0.0.1:8000/openapi.json")
            print("[Agent] 获取到 openapi.json", flush=True)
            openapi_schema = resp.json()
            print("[Agent] 获取到 openapi schema", flush=True)
        except Exception as e:
            print("[Agent] 获取 openapi schema 失败:", e, flush=True)
            openapi_schema = {}
    tools = openapi_to_tools(openapi_schema)
    print("[Agent] tools schema:", json.dumps(tools, ensure_ascii=False, indent=2), flush=True)

    # 2. 多轮 function calling agent，支持 request_id 及历史
    # 获取 request_id，若无则新建
    request_id = getattr(llm_agent_answer, "_current_request_id", None)
    if not request_id:
        request_id = str(uuid.uuid4())
        llm_agent_answer._current_request_id = request_id
    history = _chat_history[request_id]
    # 系统提示只加一次
    if not history or history[0]["role"] != "system":
        history.appendleft({"role": "system", "content": "你是一个智能旅游助手，具备工具调用能力。如果用户提出的问题与旅游无关，可以引导用户提问。如果缺少必要信息，可以主动询问用户。"})
    # 新增本轮用户问题
    history.append({"role": "user", "content": question})
    # 只取最后 MAX_HISTORY*2 条
    messages = list(history)[-MAX_HISTORY*2:]
   
    while True:
        print("[Agent] 当前对话历史:", json.dumps([c(m) for m in messages], ensure_ascii=False, indent=2), flush=True)
        response = client.chat.completions.create(
            model=DEEPSEEK_MODEL,
            messages=messages,
            tools=tools
        )
        
        message = response.choices[0].message
        print("[Agent] LLM 回复:", message, flush=True)
        if hasattr(message, "tool_calls") and message.tool_calls:
            messages.append(message)
            # 不再写入 history
            for tool_call in message.tool_calls:
                tool_name = tool_call.function.name  # 修正属性名
                arguments = json.loads(tool_call.function.arguments)
                print(f"[Agent] 调用本地工具: {tool_name}, 参数: {arguments}", flush=True)
                tool_result = await call_local_tool(tool_name, arguments)
                print(f"[Agent] 工具 {tool_name} 调用结果: {tool_result}", flush=True)
                tool_msg = {
                    "role": "tool",
                    "tool_call_id": tool_call.id,
                    "content": tool_result
                }
                messages.append(tool_msg)
                # 不再写入 history
        else:
            # 记录 LLM 的最终回复
            history.append({"role": "assistant", "content": message.content})
            return message.content

# 本地工具自动调用
async def call_local_tool(tool_name, arguments):
    # 这里直接用 httpx 调用本地 FastAPI 接口
    url_map = {
        "hotel_recommend": "http://127.0.0.1:8000/hotel/recommend",
        "hotel_order": "http://127.0.0.1:8000/hotel/order",
        "weather_query": "http://127.0.0.1:8000/weather/query",
        "train_query": "http://127.0.0.1:8000/train/query",
        "train_order": "http://127.0.0.1:8000/train/order",
        "sight_recommend": "http://127.0.0.1:8000/sight/recommend",
        "sight_info": "http://127.0.0.1:8000/sight/info",
        "plan_make": "http://127.0.0.1:8000/plan/make",
        "plan_make_stream": "http://127.0.0.1:8000/plan/make/stream"  # 新增
    }
    url = url_map.get(tool_name)
    if not url:
        return f"工具 {tool_name} 未实现本地调用"
    async with httpx.AsyncClient(timeout=30) as client:
        try:
            resp = await client.post(url, json=arguments)
            return resp.text
        except Exception as e:
            return f"调用本地工具 {tool_name} 失败: {e}"

def openapi_to_tools(openapi_schema):
    """
    将 openapi.json 转为 deepseek function calling 所需的 tools 列表
    """
    print(json.dumps)
    tools = []
    paths = openapi_schema.get("paths", {})
    components = openapi_schema.get("components", {}).get("schemas", {})
    def resolve_schema(schema):
        # 递归解析 $ref
        if "$ref" in schema:
            ref = schema["$ref"]
            ref_name = ref.split("/")[-1]
            return components.get(ref_name, {})
        return schema

    for path, methods in paths.items():
        for method, detail in methods.items():
            # 只处理 requestBody 为 application/json 且 type=object 的接口
            req_body = detail.get("requestBody", {})
            content = req_body.get("content", {})
            schema = content.get("application/json", {}).get("schema", {})
            schema = resolve_schema(schema)
            properties = schema.get("properties", {})
            required = schema.get("required", [])
            # 构造 parameters
            parameters = {
                "type": "object",
                "properties": {},
                "required": required
            }
            for k, v in properties.items():
                parameters["properties"][k] = {
                    "type": v.get("type", "string"),
                    "description": v.get("description", "")
                }
            tools.append({
                "type": "function",
                "function": {
                    "name": detail["operationId"],
                    "description": detail.get("summary", detail.get("description", "")),
                    "parameters": parameters
                }
            })
    return tools

def c(msg):
    # 如果是 dict 直接返回，否则转为 dict
    if isinstance(msg, dict):
        return msg
    # OpenAI/Deepseek SDK 的 ChatCompletionMessage
    if hasattr(msg, "role") and hasattr(msg, "content"):
        d = {"role": msg.role, "content": msg.content}
        # 兼容 tool_calls
        if hasattr(msg, "tool_calls") and msg.tool_calls:
            d["tool_calls"] = [
                {
                    "id": tc.id,
                    "type": tc.type,
                    "function": {
                        "name": tc.function.name,
                        "arguments": tc.function.arguments
                    }
                } for tc in msg.tool_calls
            ]
        return d
    return str(msg)

# print("[Agent] 当前对话历史:", json.dumps([message_to_dict(m) for m in messages], ensure_ascii=False, indent=2))
