import sys
import getopt
import numpy as np
import mindspore.context as context
from mindspore import Tensor
from mindspore import nn 
from mindspore.common import dtype as mstype
import mindspore.ops.operations as P
import mindspore.ops.operations._grad_ops as GP
import mindspore.ops.functional as F

opts,_ = getopt.getopt(sys.argv[1:], "d:t:")
opts = dict(opts)
dev_id = 0 if '-d' not in opts else int(opts['-d'])
test_type = 0 if '-t' not in opts else int(opts['-t'])

context.set_context(mode=context.GRAPH_MODE, device_target="GPU", device_id=dev_id)
if test_type > 0:
    context.set_context(enable_graph_kernel=True)
if test_type > 1:
    context.set_context(graph_kernel_flags="--enable_stitch_fusion")

class BertAttentionPiece(nn.Cell):
    def __init__(self):
        super(BertAttentionPiece, self).__init__()
        self.add = P.Add()
        self.reducesum = P.ReduceSum(keep_dims=True)
        self.dropout_grad = GP.DropoutGrad(1-0.1)
        self.sub = P.Sub()
        self.multiply = P.Mul()
        self.cast = P.Cast()

    def construct(self, x, y, z):
        out1 = self.dropout_grad(x, y)
        out2 = self.multiply(out1, z)
        out3 = self.reducesum(self.cast(out2, mstype.float32), (-1,))
        out4 = self.sub(out1, self.cast(out3, mstype.float16))
        return out4

shape = [64, 12, 128, 128]
x = Tensor(np.random.normal(0, 1, shape).astype(np.float16))
y = Tensor(np.random.normal(0, 1, shape).astype(np.float16))
z = Tensor(np.random.normal(0, 1, shape).astype(np.float16))
net = BertAttentionPiece()
result = net(x,y,z)
