from typing import Callable, TYPE_CHECKING, TypeVar, Union

from tqdm import tqdm
from zkl_training import ProcessingTaskPlugin

from .utils import torch_distributed_get_info

if TYPE_CHECKING:
    from .training import GPTTraining
    from .validating import GPTValidating


class TqdmPlugin(ProcessingTaskPlugin):
    def __init__(self):
        super().__init__()
        self.process_rank, _ = torch_distributed_get_info()
        self.progress_tqdm: tqdm | None = None

    @property
    def task(self) -> Union['GPTTraining', 'GPTValidating']:
        task = super().task
        from .training import GPTTraining
        from .validating import GPTValidating
        assert isinstance(task, (GPTTraining, GPTValidating))
        return task

    def on_before_run(self):
        super().on_before_run()

        if self.process_rank == 0:
            from .training import GPTTraining
            from .validating import GPTValidating
            if isinstance(self.task, GPTTraining):
                total_tokens_n = reduce_with_none(min,
                    self.task.iterator.total_tokens_n,
                    self.task.training_hparams.train_tokens_n)
            elif isinstance(self.task, GPTValidating):
                total_tokens_n = reduce_with_none(min,
                    self.task.iterator.total_tokens_n,
                    self.task.valid_hparams.valid_tokens_n)
            else:
                raise AssertionError(f"Unexpected task of type {type(self.task)}")

            self.progress_tqdm = tqdm(
                initial=self.task.progress_tokens_n,
                total=total_tokens_n,
                unit='token', unit_scale=True)

    def on_after_step(self):
        super().on_after_step()

        if self.process_rank == 0:
            progress_tokens_n = self.task.progress_tokens_n - self.progress_tqdm.n
            self.progress_tqdm.update(progress_tokens_n)


# utils

T = TypeVar('T')


def reduce_with_none(reduce_func: Callable[[T, T], T], *values: T | None):
    values = tuple(filter(lambda value: value is not None, values))
    if len(values) == 0:
        return None
    if len(values) == 1:
        return values[0]
    reduced_value = values[0]
    for value in values[1:]:
        reduced_value = reduce_func(reduced_value, value)
    return reduced_value
