

import mindspore
from mindspore import ops as P
from mindspore import nn
import numpy as np
from mindspore import Tensor, ops
from mindspore import dtype as mstype
from mindspore.common.initializer import Normal


def test_crossentropy():

    labels  = Tensor([
                    [[1, 2, 0, 4],
                     [3, 0, 1, 2],
                     [0, 1, 1, 2],
                     [2, 1, 3, 4]],


                      ], mindspore.int32)
    logits = Tensor(shape=(1, 5, 4, 4), dtype=mstype.float32, init=Normal())

    one_hot = P.OneHot(axis=-1)
    on_value = Tensor(1.0, mstype.float32)
    off_value = Tensor(0.0, mstype.float32)
    cast = P.Cast()
    ce = nn.SoftmaxCrossEntropyWithLogits()
    not_equal = P.NotEqual()
    num_cls = 5
    ignore_label = 255
    mul = P.Mul()   # 逐个元素相乘
    sum = P.ReduceSum(False)
    div = P.RealDiv()
    transpose = P.Transpose()
    reshape = P.Reshape()

    reduce_mean = P.ReduceMean()

    labels_int = cast(labels, mstype.int32)
    labels_int = reshape(labels_int, (-1,))  # 16
    logits_ = transpose(logits, (0, 2, 3, 1))
    logits_ = reshape(logits_, (-1, num_cls))
    weights = not_equal(labels_int, ignore_label)  # 相等为False，不等为True
    weights = cast(weights, mstype.float32)
    one_hot_labels = one_hot(labels_int, num_cls, on_value, off_value)
    loss = ce(logits_, one_hot_labels)  # 16个ce loss
    loss = mul(weights, loss)
    loss = div(sum(loss), sum(weights))
    print(loss)

    # ------------------------------------------
    softmax_cross_entropy_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
    loss_2 = reduce_mean(
        softmax_cross_entropy_loss(reshape(logits_, (-1, num_cls)), reshape(labels, (-1,))))
    print("loss:", loss_2)


def test2():
    from mindspore.ops import operations as P
    expand_dim = P.ExpandDims()

    logits = Tensor(shape=(2, 4, 4), dtype=mstype.float32, init=Normal())

    logits_new = expand_dim.infer_value(logits, axis=1)
    print(logits_new)

def test3():
    x = np.arange(18).reshape(1,2,3,3)
    y = np.arange(24).reshape(1,2,3,4)

    x = mindspore.Tensor(x, dtype=mindspore.float32)
    y = mindspore.Tensor(y, dtype=mindspore.float32)

    mul = ops.BatchMatMul()

    out = mul(x, y)
    print(out)




if __name__ == '__main__':
    # test_crossentropy()
    # test2()
    test3()

