"""
PointTransformer Transition Up blocks
"""
import sys, os
sys.path.append(os.path.abspath(os.path.join(os.getcwd())))
import mindspore.nn as nn
import mindspore.ops as ops
from mind3d.utils.PointTransformerUtils import PointNetFeaturePropagation

class TransitionUp(nn.Cell):
    """
    TransitionTransitionup is a transition module in the decoder. 
    Its main function is to map features from subset P2 to superset P1. 
    To this end, each input feature passes through a linear layer, then batch normalization, and then ReLU. 
    Finally, the feature is mapped to the P1 point set on the high resolution by trilinear interpolation. 
    These features interpolated from the previous decoder stage summarize the features of the corresponding encoder stageUp Block.

        Input:
            Xyz1(Tensor) : The first input tensor. 
            Point1(Tensor) : The first input feature.
            Xyz2(Tensore) : The second input tensor
            Point2(Tensoor) : The second input feature
        Output:
            Output(Tensor) : The output tensor. The shapes of the tensor is (B,*), where * is label dimensions.
        Argument:
            Dim1: The dim of first input.
            Dim2: The dim of second input.
            Dim_out: The dim of output.
        Example:
            XYZ1 = mindspore.Tensor(np.random.rand(1, 4, 3), dtype = mindspore.float32)
            XYZ2 = mindspore.Tensor(np.random.rand(1, 4, 512), dtype = mindspore.float32)
            Points1 = mindspore.Tensor(np.random.rand(1, 16, 3), dtype = mindspore.float32)
            Points2 = mindspore.Tensor(np.random.rand(1, 16, 256), dtype = mindspore.float32)
            TransitionUp = TransitionUp(512,256,256)
            Output = TransitionUp(XYZ1,XYZ2,Points1,Points2)
            Print(output.shape)
    """
    def __init__(self, dim1, dim2, dim_out):
        class SwapAxes(nn.Cell):
            def __init__(self):
                super(SwapAxes, self).__init__()
                self.random = ops.Transpose()

            def construct(self, x):
                new_features = self.random(x, (0, 2, 1))
                return new_features

        super(TransitionUp, self).__init__()
        self.random = ops.Transpose()
        self.f1 = nn.Dense(dim1, dim_out, weight_init = "Uniform", bias_init = "Uniform")
        self.Swap = SwapAxes()
        self.BN1 = nn.BatchNorm2d(dim_out, momentum=0.1, use_batch_statistics = None)
        self.BN2 = nn.BatchNorm2d(dim_out, momentum=0.1, use_batch_statistics = None)
        self.relu = nn.ReLU()
        self.f2 = nn.Dense(dim2, dim_out, weight_init = "Uniform", bias_init = "Uniform")
        self.fp = PointNetFeaturePropagation(-1, [])

    def construct(self, xyz1, point1, xyz2, point2):
        """
        TransitionUp Block
        """
        feats1 = self.relu(self.Swap(ops.Squeeze(-1)(self.BN1(ops.ExpandDims()(self.Swap(self.f1(point1)), -1)))))
        feats2 = self.relu(self.Swap(ops.Squeeze(-1)(self.BN2(ops.ExpandDims()(self.Swap(self.f2(point2)), -1)))))
        feats1 = ops.Transpose()(feats1,(0,2,1))
        xyz1 = ops.Transpose()(xyz1,(0,2,1))
        xyz2 = ops.Transpose()(xyz2,(0,2,1))
        mind =ops.Transpose()(self.fp(xyz2,xyz1,None,feats1), (0,2,1))
        return  mind + feats2