import tensorflow as tf
import numpy as np
import mindspore.common.dtype as mstype
from mindspore import Tensor
from mindspore.nn import Cell
from mindspore.ops import operations as op
from ...utils import allclose_nparray
from ...meta import OpsFactory
from mindspore.common.parameter import Parameter
from mindspore import log as logger

tf.compat.v1.disable_eager_execution()
tf.config.experimental.set_visible_devices([], 'GPU')


class ApplyAddSignNet(Cell):
    def __init__(self, var_np, accum_np):
        super().__init__()
        self.apply_addsign = op.ApplyAddSign()
        self.var = Parameter(Tensor(var_np), name="var")
        self.accum = Parameter(Tensor(accum_np), name="m")

    def construct(self, lr, alpha, sign_decay, beta, grad):
        z = self.apply_addsign(self.var, self.accum, lr, alpha, sign_decay, beta, grad)
        return z


class ApplyAddSign(Cell):
    def __init__(self, var, m, lr, alpha, sign_decay, beta):
        super().__init__()
        self.applyaddsign = op.ApplyAddSign()
        self.var = var
        self.m = m
        self.lr = lr
        self.alpha = alpha
        self.sign_decay = sign_decay
        self.beta = beta

    def construct(self, grad):
        x = self.applyaddsign(self.var, self.m, self.lr, self.alpha, self.sign_decay, self.beta,
                              grad)
        return x


class ApplyAddSignFactory(OpsFactory):
    def __init__(self, var_shape, m_shape, lr, alpha, sign_decay, beta, grad, dtype=np.float32):
        super().__init__(dtype=dtype)
        self.var = np.random.randn(*var_shape).astype(dtype)
        self.m = np.random.randn(*m_shape).astype(dtype)
        self.lr = lr
        self.alpha = alpha
        self.sign_decay = sign_decay
        self.beta = beta
        self.grad = np.random.randn(*grad).astype(dtype)
        self.dtype = dtype

    def forward_mindspore_impl(self):
        var = Parameter(Tensor(self.var), name="var")
        m = Parameter(Tensor(self.m), name="m")
        grad = Tensor(self.grad)
        lr=Tensor(self.lr, var.dtype)
        alpha=Tensor(self.alpha, var.dtype)
        sign_decay=Tensor(self.sign_decay, var.dtype)
        beta=Tensor(self.beta, var.dtype)
        net = ApplyAddSign(var, m, lr, alpha, sign_decay, beta)
        out = net(grad)
        return out[0].asnumpy()

    def forward_tensorflow_impl(self):
        if (self.dtype == np.float16):       
            var_tf = tf.Variable(self.var.copy().astype(np.float32))
            m_tf = tf.Variable(self.m.copy().astype(np.float32))
            grad_tf = tf.Variable(self.grad.copy().astype(np.float32))
            lr_tf = tf.constant(self.lr, dtype=np.float32)
            alpha_tf = tf.constant(self.alpha, dtype=np.float32)
            sign_decay_tf = tf.constant(self.sign_decay, dtype=np.float32)
            beta_tf = tf.constant(self.beta, dtype=np.float32)
        else:
            dtype = self.var.dtype
            var_tf = tf.Variable(self.var.copy())
            m_tf = tf.Variable(self.m.copy())
            grad_tf = tf.Variable(self.grad.copy())
            lr_tf = tf.constant(self.lr, dtype=dtype)
            alpha_tf = tf.constant(self.alpha, dtype=dtype)
            sign_decay_tf = tf.constant(self.sign_decay, dtype=dtype)
            beta_tf = tf.constant(self.beta, dtype=dtype)

        out = tf.raw_ops.ResourceApplyAddSign(var=var_tf.handle, m=m_tf.handle,
                                              lr=lr_tf, alpha=alpha_tf,
                                              sign_decay=sign_decay_tf, beta=beta_tf, grad=grad_tf)
        init = tf.compat.v1.global_variables_initializer()
        with tf.compat.v1.Session() as sess:
            sess.run(init)
            sess.run(out)
            var_tf = sess.run(var_tf)
        return var_tf

    def forward_cmp(self):
        out_tensorflow = self.forward_tensorflow_impl()
        out_mindspore = self.forward_mindspore_impl()
        if (self.dtype == np.float16):
            allclose_nparray(out_tensorflow.astype(self.dtype), out_mindspore, self.loss, self.loss)
        else:
            allclose_nparray(out_tensorflow, out_mindspore, self.loss, self.loss)

    def forward_profile_cmp(self):
        run_time = 10
        var = Parameter(Tensor(self.var), name="var")
        m = Parameter(Tensor(self.m), name="m")
        lr=Tensor(self.lr, var.dtype)
        alpha=Tensor(self.alpha, var.dtype)
        sign_decay=Tensor(self.sign_decay, var.dtype)
        beta=Tensor(self.beta, var.dtype)
        net = ApplyAddSign(var, m, lr, alpha, sign_decay, beta)
        op_name = "ApplyAddSign"
        inputs = [Tensor(self.grad)]
        forward_profile_ms = self.mindspore_profile(net, run_time, op_name, *inputs)

        net_tf = tf.raw_ops.ResourceApplyAddSign
        op_name_tf = 'ResourceApplyAddSign'
        dtype_tf = self.var.dtype
        var_tf = tf.Variable(self.var)
        m_tf = tf.Variable(self.m)
        grad_tf = tf.Variable(self.grad.copy())
        lr_tf = tf.constant(self.lr, dtype=dtype_tf)
        alpha_tf = tf.constant(self.alpha, dtype=dtype_tf)
        sign_decay_tf = tf.constant(self.sign_decay, dtype=dtype_tf)
        beta_tf = tf.constant(self.beta, dtype=dtype_tf)
        inputs = [var_tf]
        kwargs = {"var":var_tf.handle, "m":m_tf.handle, "lr":lr_tf, "alpha":alpha_tf,
                  "sign_decay":sign_decay_tf, "beta":beta_tf, "grad":grad_tf}
        forward_profile_tf = self.tensorflow_forward_profile(net_tf, run_time, op_name_tf, **kwargs)

        logger.info("forward_profile_tf: {}us".format(forward_profile_tf))
        logger.info("forward_profile_ms: {}us".format(forward_profile_ms))
        assert forward_profile_tf >= 0.9 * forward_profile_ms