from torch.nn.utils import clip_grad

from .hook import Hook


class OptimizerHook(Hook):

    def __init__(self, grad_clip=None):
        self.grad_clip = grad_clip

    def clip_grads(self, params):
        clip_grad.clip_grad_norm_(
            filter(lambda p: p.requires_grad, params), **self.grad_clip)

    def after_train_iter(self, runner):
        runner.optimizer.zero_grad()
        runner.outputs['loss'].backward()
        # import torch
        # named_parameters = dict(runner.model.module.named_parameters())
        # for k, v in named_parameters.items():
        #     if v.grad is not None and torch.any(torch.isnan(v.grad)):
        #         import ipdb
        #         ipdb.set_trace()
        if self.grad_clip is not None:
            self.clip_grads(runner.model.parameters())
        runner.optimizer.step()
