import json
import os
from typing import Optional, Any
from uuid import UUID

import dotenv
from langchain_core.agents import AgentAction
from langchain_core.callbacks import BaseCallbackHandler, CallbackManager
from langchain_core.messages import HumanMessage, SystemMessage
from langchain_core.outputs import LLMResult
from langchain_core.tools import tool

from langchain_openai import ChatOpenAI
from langgraph.prebuilt import create_react_agent

dotenv.load_dotenv()


class TokenUsageCallback(BaseCallbackHandler):
    """
    自定义LLM回调处理器,用于记录LLM token消耗
    """

    def __init__(self, chat_id: str = None):
        self.current_chat_id = chat_id if chat_id else "unknown"

    def on_llm_end(self, response: LLMResult, *, run_id: UUID, parent_run_id: Optional[UUID] = None,
                   **kwargs: Any) -> Any:
        log_prefix = f"[ChatID: {self.current_chat_id}, RunID: {run_id}]"
        total_tokens = response.llm_output.get("token_usage", {}).get("total_tokens", 0)
        prompt_tokens = response.llm_output.get("token_usage", {}).get("prompt_tokens", 0)
        completion_tokens = response.llm_output.get("token_usage", {}).get("completion_tokens", 0)

        print(
            f"LLM executed for chat_id: {self.current_chat_id}, "
            f"Total Tokens: {total_tokens}, "
            f"Prompt Tokens: {prompt_tokens}, "
            f"Completion Tokens: {completion_tokens}"
        )

        # 2. 安全地记录工具调用信息
        for generation_chunk in response.generations:
            for generation in generation_chunk:
                message = getattr(generation, 'message', None)
                if not message:
                    continue

                tool_calls = getattr(message, 'tool_calls', []) or message.additional_kwargs.get('tool_calls', [])
                if not tool_calls:
                    continue

                print(f"{log_prefix} Tool Calls Detected:")
                for tool_call in tool_calls:
                    name = tool_call.get('name') or tool_call.get('function', {}).get('name', 'unknown_tool')
                    args = tool_call.get('args', tool_call.get('function', {}).get('arguments', {}))
                    tool_id = tool_call.get('id', 'unknown_id')

                    # 尝试将参数解析为字典，如果失败则保持原样
                    if isinstance(args, str):
                        try:
                            args = json.loads(args)
                        except json.JSONDecodeError:
                            pass  # 保持原始字符串

                    print(f"{log_prefix}   - Tool: {name}, Args: {args}, ID: {tool_id}")


@tool
def get_weather(city: str) -> str:
    """
    获取指定城市的温度
    Args:
        city: 要获取的城市名字

    Returns:
        该城市的天气
    """
    return f"{city} 今天天气晴朗,温度26°"


llm = ChatOpenAI(
    model="deepseek-chat",
    base_url=os.getenv("DS_BASE"),
    api_key=os.getenv("DS_API_KEY"),
    temperature=0,
    callbacks=[TokenUsageCallback()],
)
# .bind_tools([get_weather]))

agent = create_react_agent(
    model=llm,
    tools=[get_weather],
    prompt="你是一个智能助手,能够帮助用户解决问题",
)

ret = agent.invoke(
    {
        "messages": [HumanMessage(content="今天西安的天气怎么样?")]
    },
    stream_mode="values"
)

# prompts = [
#     SystemMessage(content="你是一个智能助手,能够帮助用户解决问题"),
#     HumanMessage(content="今天西安的天气怎么样?"),
# ]
#
# ret = llm.invoke(prompts)

print(ret)
