import sys
import getopt
import numpy as np
import mindspore.context as context
from mindspore import Tensor
from mindspore import nn 
import mindspore.ops.operations as P

opts,_ = getopt.getopt(sys.argv[1:], "d:t:")
opts = dict(opts)
dev_id = 0 if '-d' not in opts else int(opts['-d'])
test_type = 0 if '-t' not in opts else int(opts['-t'])

context.set_context(mode=context.GRAPH_MODE, device_target="GPU", device_id=dev_id)
if test_type > 0:
    context.set_context(enable_graph_kernel=True, graph_kernel_flags="--disable_expand_ops=AddN")
if test_type > 1:
    context.set_context(graph_kernel_flags="--enable_parallel_fusion --disable_expand_ops=AddN")

class Net(nn.Cell):
    def __init__(self):
        super(Net, self).__init__()
        self.sum = P.ReduceSum(keep_dims=True)
        self.addn = P.AddN()

    def construct(self, x1, x2, x3):
        rs1 = self.sum(x1 + 0.5, (1,))
        rs2 = self.sum(x2 + 0.5, (1,))
        rs3 = self.sum(x3 + 0.5, (1,))
        return self.addn([rs1, rs2, rs3])

test_shape = [512, 768]
i0 = Tensor(np.random.normal(0, 1, test_shape).astype(np.float32))
i1 = Tensor(np.random.normal(0, 1, test_shape).astype(np.float32))
i2 = Tensor(np.random.normal(0, 1, test_shape).astype(np.float32))

Net()(i0, i1, i2)

