import logging
import logging.config
import os
from contextlib import nullcontext
from logging import LoggerAdapter
from typing import Any, Callable, ClassVar, Dict, MutableMapping, Tuple, Union
from rich.console import Console
from rich.theme import Theme
import warnings
from transformers import logging as hf_logging
from transformers.utils import logging as hf_logging_tq
import transformers.trainer_callback
import tqdm.rich
from rich.progress import (
    ProgressColumn,
    Text,
    filesize,
)
import datasets.arrow_dataset
from importlib import import_module

ds_tm = import_module("datasets.utils.tqdm")


class ds_tqdm(tqdm.rich.tqdm):
    """
    Class to override `disable` argument in case progress bars are globally disabled.

    Taken from https://github.com/tqdm/tqdm/issues/619#issuecomment-619639324.
    """

    def __init__(self, *args, **kwargs):
        if ds_tm.are_progress_bars_disabled():
            kwargs["disable"] = True
        super().__init__(*args, **kwargs)

    def __delattr__(self, attr: str) -> None:
        """Fix for https://github.com/huggingface/datasets/issues/6066"""
        try:
            super().__delattr__(attr)
        except AttributeError:
            if attr != "_lock":
                raise


class _tqdm_cls:
    def __call__(self, *args, **kwargs):
        if hf_logging_tq._tqdm_active:
            return tqdm.rich.tqdm(*args, **kwargs)
        else:
            return hf_logging_tq.EmptyTqdm(*args, **kwargs)

    def set_lock(self, *args, **kwargs):
        self._lock = None
        if hf_logging_tq._tqdm_active:
            return tqdm.rich.tqdm.set_lock(*args, **kwargs)

    def get_lock(self):
        if hf_logging_tq._tqdm_active:
            return tqdm.rich.tqdm.get_lock()


class RateColumn(ProgressColumn):
    """Renders human readable transfer speed."""

    def __init__(self, unit="", unit_scale=False, unit_divisor=1000):
        self.unit = unit
        self.unit_scale = unit_scale
        self.unit_divisor = unit_divisor
        super().__init__()

    def render(self, task):
        """Show data transfer speed."""
        speed = task.speed
        if speed is None:
            return Text(f"? {self.unit}/s", style="progress.data.speed")
        if self.unit_scale:
            unit, suffix = filesize.pick_unit_and_suffix(
                speed,
                ["", "K", "M", "G", "T", "P", "E", "Z", "Y"],
                self.unit_divisor,
            )
        else:
            unit, suffix = filesize.pick_unit_and_suffix(speed, [""], 1)
        precision = 2 if unit == 1 else 1
        if speed / unit > 1:
            return Text(
                f"{speed / unit:,.{precision}f} {suffix}{self.unit}/s",
                style="progress.data.speed",
            )
        else:
            return Text(
                f"{unit / speed:,.{precision}f} s/{suffix}{self.unit}",
                style="progress.data.speed",
            )


tqdm.rich.RateColumn = RateColumn
datasets.arrow_dataset.hf_tqdm = ds_tqdm
hf_logging_tq.tqdm = _tqdm_cls()
transformers.trainer_callback.tqdm = hf_logging_tq.tqdm

hf_logging.set_verbosity_error()
warnings.filterwarnings("ignore", category=FutureWarning)

custom_theme = Theme(
    {
        # "logging.level.debug": "cyan",
        # "logging.level.info": "green",
        # "logging.level.warning": "yellow",
        # "logging.level.error": "red",
        # "logging.level.critical": "bold red",
        # "logging.level": "bold bright_black",
    }
)

# Overwatch Compact Format String
RICH_FORMATTER, DATEFMT = (
    "[%(asctime)s] %(levelname)s %(filename)s: %(message)s",
    "[%m/%d %H:%M]",
)


# Set Logging Configuration
LOG_CONFIG = {
    "version": 1,
    "disable_existing_loggers": True,
    "format": RICH_FORMATTER,
    "datefmt": DATEFMT,
    "level": "NOTSET",
    "handlers": {
        "console": {
            "class": "rich.logging.RichHandler",
            "rich_tracebacks": True,
            "log_time_format": DATEFMT,
            "console": Console(force_terminal=True, color_system="auto"),
        }
    },
    "root": {"level": "ERROR", "handlers": ["console"]},
}
logging.config.dictConfig(LOG_CONFIG)


def setup():
    import torch
    import torch.distributed as dist
    from datetime import timedelta
    # 从环境变量获取关键参数
    local_rank = int(os.environ["LOCAL_RANK"])  # 当前进程的GPU编号（每台机器内独立）
    global_rank = int(os.environ["RANK"])  # 全局进程编号（所有机器累加）
    world_size = int(os.environ["WORLD_SIZE"])  # 全局总进程数
    master_addr = os.environ["MASTER_ADDR"]  # 主节点IP（如"127.0.0.1"）
    master_port = os.environ["MASTER_PORT"]  # 主节点端口（如"29500"）

    # 设置当前 GPU 设备
    torch.cuda.set_device(local_rank)

    # 初始化进程组（NCCL 后端）
    dist.init_process_group(
        backend="nccl",
        init_method="env://",  # 自动从环境变量读取 MASTER_ADDR:MASTER_PORT
        world_size=world_size,
        rank=global_rank,
        device_id=torch.device("cuda", local_rank),
        timeout=timedelta(hours=8),
    )
    if dist.get_rank() != 0:
        hf_logging.disable_progress_bar()
        datasets.disable_progress_bars()
if os.environ.get("LOCAL_RANK"):
    setup()


# === Custom Contextual Logging Logic ===
class ContextAdapter(LoggerAdapter):
    CTX_PREFIXES: ClassVar[Dict[int, str]] = {
        **{0: "[*] "},
        **{idx: "|=> ".rjust(4 + (idx * 4)) for idx in [1, 2, 3]},
    }

    def process(
        self, msg: str, kwargs: MutableMapping[str, Any]
    ) -> Tuple[str, MutableMapping[str, Any]]:
        ctx_level = kwargs.pop("ctx_level", 0)
        return f"{self.CTX_PREFIXES[ctx_level]}{msg}", kwargs


class DistributedOverwatch:
    def __init__(self, name: str) -> None:
        """Initializer for an Overwatch object that wraps logging & `accelerate.PartialState`."""

        self.logger = ContextAdapter(logging.getLogger(name), extra={})
        self._state = None

        self.debug = self.logger.debug
        self.info = self.logger.info
        self.warning = self.logger.warning
        self.error = self.logger.error
        self.critical = self.logger.critical

        self.logger.setLevel(
            logging.INFO if self.distributed_state.is_main_process else logging.ERROR
        )
        self.tqdm = (
            lambda *args, **kwargs: tqdm.rich.tqdm(*args, **kwargs)
            if self.distributed_state.is_main_process
            else (None if not args else args[0])
        )

    @property
    def distributed_state(self):
        if self._state is None:
            from accelerate import PartialState
            self._state = PartialState()
        return self._state

    @property
    def rank_zero_only(self) -> Callable[..., Any]:
        return self.distributed_state.on_main_process

    @property
    def local_zero_only(self) -> Callable[..., Any]:
        return self.distributed_state.on_local_main_process

    @property
    def rank_zero_first(self) -> Callable[..., Any]:
        return self.distributed_state.main_process_first

    @property
    def local_zero_first(self) -> Callable[..., Any]:
        return self.distributed_state.local_main_process_first

    def is_rank_zero(self) -> bool:
        return self.distributed_state.is_main_process

    def rank(self) -> int:
        return self.distributed_state.process_index

    def local_rank(self) -> int:
        return self.distributed_state.local_process_index

    def world_size(self) -> int:
        return self.distributed_state.num_processes


class PureOverwatch:
    def __init__(self, name: str) -> None:
        """Initializer for an Overwatch object that just wraps logging."""
        self.logger = ContextAdapter(logging.getLogger(name), extra={})

        self.debug = self.logger.debug
        self.info = self.logger.info
        self.warning = self.logger.warning
        self.error = self.logger.error
        self.critical = self.logger.critical

        self.logger.setLevel(logging.INFO)

    @staticmethod
    def get_identity_ctx() -> Callable[..., Any]:
        def identity(fn: Callable[..., Any]) -> Callable[..., Any]:
            return fn

        return identity

    @property
    def rank_zero_only(self) -> Callable[..., Any]:
        return self.get_identity_ctx()

    @property
    def local_zero_only(self) -> Callable[..., Any]:
        return self.get_identity_ctx()

    @property
    def rank_zero_first(self) -> Callable[..., Any]:
        return nullcontext

    @property
    def local_zero_first(self) -> Callable[..., Any]:
        return nullcontext

    @staticmethod
    def is_rank_zero() -> bool:
        return True

    @staticmethod
    def rank() -> int:
        return 0

    @staticmethod
    def world_size() -> int:
        return 1


def initialize_overwatch(name: str) -> Union[DistributedOverwatch, PureOverwatch]:
    return (
        DistributedOverwatch(name)
        if int(os.environ.get("WORLD_SIZE", -1)) != -1
        else PureOverwatch(name)
    )
