from typing import TYPE_CHECKING, Union

import torch
from tqdm import tqdm
from zkl_training import ProcessingTaskPlugin

from .iterator import compute_total_tokens_n

if TYPE_CHECKING:
    from .training import GPTTraining
    from .validating import GPTValidating


class TqdmPlugin(ProcessingTaskPlugin):
    def __init__(self):
        super().__init__()

        try:
            self.process_rank = torch.distributed.get_rank()
        except ValueError:
            self.process_rank = 0

        self.progress_tqdm: tqdm | None = None

    @property
    def task(self) -> Union['GPTTraining', 'GPTValidating']:
        task = super().task
        from .training import GPTTraining
        from .validating import GPTValidating
        assert isinstance(task, (GPTTraining, GPTValidating))
        return task

    def on_before_run(self):
        super().on_before_run()

        if self.process_rank == 0:
            from .training import GPTTraining
            from .validating import GPTValidating
            if isinstance(self.task, GPTTraining):
                total_tokens_n = compute_total_tokens_n(
                    dataset=self.task.train_dataset,
                    limit_tokens_n=self.task.hyperparams.train_tokens_n,
                    limit_repeats_n=self.task.hyperparams.train_repeats_n)
            elif isinstance(self.task, GPTValidating):
                total_tokens_n = compute_total_tokens_n(
                    dataset=self.task.dataset,
                    limit_tokens_n=self.task.hyperparams.valid_tokens_n,
                    limit_repeats_n=self.task.hyperparams.valid_repeats_n)
            else:
                raise AssertionError(f"Unexpected task of type {type(self.task)}")

            self.progress_tqdm = tqdm(
                initial=self.task.progress_tokens_n,
                total=total_tokens_n,
                unit='token', unit_scale=True)

    def on_after_step(self):
        super().on_after_step()

        if self.process_rank == 0:
            progress_tokens_n = self.task.progress_tokens_n - self.progress_tqdm.n
            self.progress_tqdm.update(progress_tokens_n)
