import mindspore
import mindspore.ops as ops
from mindspore import Parameter, Tensor
from mindspore import dtype as mstype
from mindspore import nn
from mindspore.common.initializer import Normal, initializer
from mindspore.ops import stop_gradient


def l2normalize(v, eps=1e-12):
    # return v / (ops.LpNorm(axis=0, p=2)(v) + eps)
    if v.size > 1:
        return v / (v.norm(axis=0, p=2) + eps)
    else:
        return Tensor([1.], dtype=mindspore.float32)


class SpectralNorm(nn.Cell):
    def __init__(self, module, name='weight', power_iterations=1):
        super(SpectralNorm, self).__init__()
        self.module = module
        self.name = name
        self.power_iterations = power_iterations
        if not self._made_params():
            self._make_params()

    def _update_u_v(self):
        u = getattr(self.module, self.name + "_u")
        v = getattr(self.module, self.name + "_v")
        w = getattr(self.module, self.name + "_bar")

        height = w.shape[0]

        for _ in range(self.power_iterations):
            v = l2normalize(ops.MatMul()(w.view(height, -1).T, u.view(-1, 1)))
            u = l2normalize(ops.MatMul()(w.view(height, -1), v.view(-1, 1)))

        # sigma = torch.dot(u.data, torch.mv(w.view(height,-1).data, v.data))
        sigma = ops.tensor_dot(u.view(-1), ops.MatMul()(w.view(height, -1), v).view(-1), axes=1)  # .astype(mstype.float16)
        setattr(self.module, self.name, w / sigma)
        return True

    def _made_params(self):
        try:
            u = getattr(self.module, self.name + "_u")
            v = getattr(self.module, self.name + "_v")
            w = getattr(self.module, self.name + "_bar")
            return True
        except AttributeError:
            return False

    def _make_params(self):
        w = getattr(self.module, self.name)

        height = w.shape[0]
        width = w.reshape(height, -1).shape[1]

        u = mindspore.numpy.randn(height)
        v = mindspore.numpy.randn(width)

        u = l2normalize(u)
        v = l2normalize(v)

        delattr(self.module, self.name)
        w_bar = Parameter(w.data)

        setattr(self.module, self.name + "_u", Parameter(u, requires_grad=False))
        setattr(self.module, self.name + "_v", Parameter(v, requires_grad=False))
        setattr(self.module, self.name + "_bar", w_bar)

    def construct(self, *inputs):
        self._update_u_v()
        return self.module(*inputs)
