import sys
import getopt
import numpy as np
import mindspore.context as context
from mindspore import Tensor
from mindspore import nn 
import mindspore.ops.operations as P
import mindspore.ops.functional as F

opts,_ = getopt.getopt(sys.argv[1:], "d:t:")
opts = dict(opts)
dev_id = 0 if '-d' not in opts else int(opts['-d'])
test_type = 0 if '-t' not in opts else int(opts['-t'])

context.set_context(mode=context.GRAPH_MODE, device_target="GPU", device_id=dev_id)
if test_type > 0:
    context.set_context(enable_graph_kernel=True)
if test_type > 1:
    context.set_context(graph_kernel_flags="--enable_stitch_fusion")

class BertAttentionPiece(nn.Cell):
    def __init__(self):
        super(BertAttentionPiece, self).__init__()
        self.add = P.Add()
        self.dropout = nn.Dropout(1-0.1)
        self.softmax = nn.Softmax()
        self.multiply_data = -10000.0
        self.sub = P.Sub()
        self.multiply = P.Mul()
        self.get_dtype = P.DType()
        self.cast = P.Cast()

    def construct(self, attention_mask, attention_scores):
        multiply_out = self.sub(self.cast(F.tuple_to_array((1.0,)), self.get_dtype(attention_scores)),
               self.cast(attention_mask, self.get_dtype(attention_scores)))
        adder = self.multiply(multiply_out, self.multiply_data)
        attention_scores = self.add(adder, attention_scores)
        attention_probs = self.softmax(attention_scores)
        attention_probs = self.dropout(attention_probs)
        return attention_probs

shape1 = [64, 12, 128, 128]
shape2 = [64, 12, 128, 128]
x = Tensor(np.random.normal(0, 1, shape1).astype(np.float16))
y = Tensor(np.random.normal(0, 1, shape2).astype(np.float16))
net = BertAttentionPiece()
result = net(x,y)
#print(result)
