from typing import List, Union, Optional
from uuid import uuid4, UUID

from langchain_core.messages import HumanMessage, SystemMessage, AIMessage
from pydantic import Field

from app.agent.react import ReActAgent
from app.config import config
from app.exception import TokenLimitExceeded
from app.logger import logger
from app.prompt.toolcall import SYSTEM_PROMPT, STUCK_PROMPT


class ToolCallAgent(ReActAgent):
    thread_id_internal: Union[UUID, str] = Field(default_factory=uuid4)

    system_prompt: Optional[None, str] = SYSTEM_PROMPT
    input_msg_of_this_round: List[Union[dict, HumanMessage]] = Field(default_factory=list)
    new_conversation: bool = Field(default=True, description="Whether to start a new conversation")

    max_input_tokens_each_round: int = int(config.get("token_limit", "max_input_tokens_each_round"))
    max_input_tokens_each_conversation: int = int(config.get("token_limit", "max_input_tokens_each_conversation"))
    total_input_tokens: int = Field(default=0)
    total_output_tokens: int = Field(default=0)

    @property
    def thread_id(self):
        return self.thread_id_internal

    @thread_id.setter
    def thread_id(self, value):
        old_value = self.thread_id_internal
        self.thread_id_internal = value
        if old_value != value:
            self.on_thread_id_change()

    def on_thread_id_change(self):
        self.total_input_tokens = 0
        self.total_output_tokens = 0
        self.new_conversation = True

    def update_token(self, input_tokens: int, ouput_tokens: int = 0) -> None:
        """Update token counts"""
        # Only track tokens if max_input_tokens is set
        self.total_input_tokens += input_tokens
        self.total_output_tokens += ouput_tokens
        logger.info(
            f"Tokens of this round: input: {input_tokens}, output: {ouput_tokens}, sum: {input_tokens + ouput_tokens}. "
            f"Total tokens of this thread: input: {self.total_input_tokens}, output: {self.total_output_tokens}, "
            f"sum: {self.total_input_tokens + self.total_output_tokens}."
        )

    def token_enough(self, input_tokens: int):
        """
        Check if token limits are exceeded.

        Args:
        input_tokens (int): Number of input tokens to check.

        Raises:
            TokenLimitExceeded: If the input tokens exceed either per-round or per-conversation limits.
        """
        if input_tokens > self.max_input_tokens_each_round:
            error_msg = f"请求超出每次输入长度限制，请精简输入。限制: {self.max_input_tokens_each_round}，输入: {input_tokens}"
            raise TokenLimitExceeded(error_msg)

        self.total_input_tokens += input_tokens
        if self.total_input_tokens > self.max_input_tokens_each_conversation:
            error_msg = (f"请求超出这轮对话总输入长度限制，请开启新对话。"
                         f"限制: {self.max_input_tokens_each_conversation}，输入: {self.total_input_tokens}")
            raise TokenLimitExceeded(error_msg)

    async def execute(self) -> list:
        """
        Ask LLM using functions/tools and return the response.
        """
        res: List = []

        if not self.new_conversation and self.is_stuck:
            self.input_msg_of_this_round.append({"role": "system", "content": STUCK_PROMPT})

        # 先本地计算input tokens，判断是否超过阈值
        input_tokens = self.token_counter.count_msg(self.input_msg_of_this_round)
        self.token_enough(input_tokens)

        async for step in self.agent.astream(
                input={"messages": self.input_msg_of_this_round},
                config={"configurable": {"thread_id": self.thread_id}},
                stream_mode="values"
        ):
            res.append(step)
            output_msg = step["messages"][-1]
            if isinstance(output_msg, AIMessage):
                input_tokens, output_tokens = self.token_counter.count_one_round(self.input_msg_of_this_round,
                                                                                 output_msg)
                self.update_token(input_tokens, output_tokens)

        self.new_conversation = False
        self.check_stuck(res[-1])
        return res

    async def run(self) -> list:
        """Run the agent with cleanup when done."""
        return await super().run()
