# mmlab system, Hook
# 实现参考链接: https://blog.csdn.net/qq_16137569/article/details/121195529

from mmcv.runner import HOOKS, Hook
from mmcv.runner import EpochBasedRunner,IterBasedRunner
from mmcv.runner.builder import RUNNERS

import nvtx
from ..torch import Snapshot, NVTXModuleWrapper

@RUNNERS.register_module()
class NVTXIterBasedRunner(IterBasedRunner):
    '''
        IterBasedRunner 装饰器，在train函数前添加
    '''
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.wrapper = NVTXModuleWrapper(self.model)
        self.register_hook(NVTXRunnerHook(), priority='LOWEST')
    
    def call_hook(self, fn_name: str) -> None:
        """Call all hooks.

        Args:
            fn_name (str): The function name in each hook to be called, such as
                "before_train_epoch".
        """
        for hook in self._hooks:
            # for hook not NVTXRunnerHook, ban this two func execute
            if fn_name in ['before_dataloader_iter', 'after_dataloader_iter']:
                if not isinstance(hook, NVTXRunnerHook):
                    continue
            getattr(hook, fn_name)(self)
    
    # use mmcv.runner.IterBasedRunner.train() as a template
    # redefine the function
    def train(self, data_loader, **kwargs):
        self.model.train()
        self.mode = 'train'
        self.data_loader = data_loader
        self._epoch = data_loader.epoch
        self.call_hook('before_dataloader_iter')
        data_batch = next(data_loader)
        self.data_batch = data_batch
        self.call_hook('after_dataloader_iter')
        self.call_hook('before_train_iter')
        outputs = self.model.train_step(data_batch, self.optimizer, **kwargs)
        if not isinstance(outputs, dict):
            raise TypeError('model.train_step() must return a dict')
        if 'log_vars' in outputs:
            self.log_buffer.update(outputs['log_vars'], outputs['num_samples'])
        self.outputs = outputs
        self.wrapper._backward_pre_hook_fn(self.model, None, None)
        self.call_hook('after_train_iter')
        self.wrapper._backward_post_hook_fn(self.model, None, None)
        del self.data_batch
        self._inner_iter += 1
        self._iter += 1


@HOOKS.register_module()
class NVTXRunnerHook(Hook):
    '''
        usage: runner.register_hook(NVTXRunnerHook())
    '''
    def __init__(self):
        super().__init__()
        self.parameters_bytes = 0
        self.activations_bytes = 0
        self.gradients_bytes = 0
        self.optimizer_bytes = 0

        self.total_bytes = 0

    def before_train_epoch(self, runner):
        message = "epoch_{}_train".format(runner.epoch)
        nvtx.push_range(message)
    
    def after_train_epoch(self, runner):
        nvtx.pop_range()
    
    def before_dataloader_iter(self, runner):
        message = "epoch_{}_iter_{}_load".format(runner.epoch, runner.iter)
        nvtx.push_range(message)
    
    def after_dataloader_iter(self, runner):
        nvtx.pop_range()
    
    def before_train_iter(self, runner): # OK
        message = "epoch_{}_iter_{}_train".format(runner.epoch, runner.iter)
        nvtx.push_range(message)
    
    def after_train_iter(self, runner): # OK
        nvtx.pop_range()
    
    def before_run(self, runner): # OK
        message = "train_run"
        nvtx.push_range(message)

    def after_run(self, runner): # OK
        nvtx.pop_range()
    
    def before_epoch(self, runner): # OK
        message = "epoch_{}".format(runner.epoch)
        nvtx.push_range(message)

    def after_epoch(self, runner): # OK
        nvtx.pop_range()

    def before_iter(self, runner):
        message = "iter_{}".format(runner.iter)
        nvtx.push_range(message)

    def after_iter(self, runner):
        nvtx.pop_range()

