from mindspore import nn,Tensor
from mindspore import load_checkpoint, save_checkpoint
from mind3d.models.blocks.TransitionDown import Square_distance
import mindspore.numpy as mnp
from mindspore import ops
from mind3d.utils.pointnet2_util import square_distance, index_points
import mindspore
import numpy as np
np.random.seed(1234)

class PointNetFeaturePropagation(nn.Cell):
    def __init__(self, in_channel,mlp):
        super(PointNetFeaturePropagation, self).__init__()
        self.mlp_convs = nn.CellList()
        self.mlp_bn = nn.CellList()
        last_channel = in_channel
        self.relu = nn.ReLU()
        for out_channel in mlp:
            self.mlp_convs.append(nn.Conv1d(last_channel,out_channel,1, has_bias = True, bias_init = "Uniform", weight_init = "Uniform"))
            self.mlp_bn.append(nn.BatchNorm2d(out_channel, momentum=0.1,use_batch_statistics = None))
            last_channel = out_channel
    
    def construct(self, xyz1,xyz2,points1,points2):
        xyz1 = ops.Transpose()(xyz1,(0,2,1))
        xyz2 = ops.Transpose()(xyz2,(0,2,1))
        points2 = ops.Transpose()(points2,(0,2,1))

        B,N,C = xyz1.shape
        _,S,_ = xyz2.shape

        if S == 1:
            interpolated_points = mnp.tile(points2, (1, N, 1))
        else:
            dists = Square_distance(xyz1,xyz2)
            idx, dists = ops.Sort(axis=-1)(dists)
            idx, dists = dists[:, :, :3], idx[:, :, :3]

            dist_recip = 1.0 / (dists + 1e-8)
            norm = ops.ReduceSum(keep_dims=True)(dist_recip, 2)
            weight = dist_recip/norm
            #weight = ops.ExpandDims()(weight,3)
            interpolated_points = ops.ReduceSum()(index_points(points2,idx) * weight.view(B,N,3,1),2)
        
        if points1 is not None:
            points1 = ops.Transpose()(points1,(0,2,1))
            new_points = ops.Concat(-1)((points1, interpolated_points))
        else:
            new_points = interpolated_points

        new_points = ops.Transpose()(new_points,(0,2,1))
        for i,conv in enumerate(self.mlp_convs):
            bn = self.mlp_bn[i]
            new_points = self.relu(ops.Squeeze(-1)(bn(ops.ExpandDims()(conv(new_points), -1))))
        return new_points




class AttrDict(dict):
    def __getattr__(self, key):
        return self[key]

    def __setattr__(self, key, value):
        if key in self.__dict__:
            self.__dict__[key] = value
        else:
            self[key] = value

def create_attr_dict(yaml_config):
    from ast import literal_eval
    for key, value in yaml_config.items():
        if type(value) is dict:
            yaml_config[key] = value = AttrDict(value)
        if isinstance(value, str):
            try:
                value = literal_eval(value)
            except BaseException:
                pass
        if isinstance(value, AttrDict):
            create_attr_dict(yaml_config[key])
        else:
            yaml_config[key] = value

class CustumWithLoss(nn.Cell):
    def __init__(self, models, loss_fn, num_part):
        super(CustumWithLoss, self).__init__()
        self.models = models
        self.loss = loss_fn
        self.num_part = num_part

    def construct(self, data, target):
        output = self.models(data)
        loss = self.loss(output.view(-1, self.num_part), target.view(-1, 1)[:, 0])
        return loss


class pointnetCustomWithLossCell(nn.Cell):
    """连接前向网络和损失函数"""

    def __init__(self, backbone, loss_fn):
        """输入有两个,前向网络backbone和损失函数loss_fn"""
        super(pointnetCustomWithLossCell, self).__init__(auto_prefix=False)
        self._backbone = backbone
        self._loss_fn = loss_fn

    def construct(self, data, label, seg_label, model):  # B N C, B 16, B N
        "net construct"
        output = self._backbone(data, label)  # 前向计算得到网络输出
        loss = self._loss_fn(output, seg_label, method=model)
        return loss


class shift_point_cloud:
    """
    Randomly shift point cloud. Shift is per point cloud.
    """
    def __init__(self, shift_range=0.1):
        self.shift_range = shift_range

    def __call__(self, batch_pc):
        _, C = batch_pc.shape

        shifts = np.random.uniform(-self.shift_range, self.shift_range, (1, 3))
        if C > 3:
            batch_pc[..., 0:3] += shifts
        else:
            batch_pc += shifts
        return batch_pc


class random_scale_point_cloud:
    """
    Randomly scale the point cloud. Scale is per point cloud.
    """
    def __init__(self, scale_low=0.8, scale_high=1.25):
        self.scale_low = scale_low
        self.scale_high = scale_high

    def __call__(self, batch_pc):
        _, C = batch_pc.shape
        scales = np.random.uniform(self.scale_low, self.scale_high)
        if C > 3:
            batch_pc[..., 0:3] *= scales
        else:
            batch_pc *= scales

        return batch_pc


class random_point_dropout:
    """
    Pad the boxes and labels.
    """
    def __init__(self, max_dropout_ratio=0.875):
        self.max_dropout_ratio = max_dropout_ratio

    def __call__(self, batch_pc):
        """
        Call method.
        """
        dropout_ratio = np.random.random()*self.max_dropout_ratio # 0~0.875
        drop_idx = np.where(np.random.random((batch_pc.shape[0])) <= dropout_ratio)[0]
        if drop_idx.size != 0:
            batch_pc[drop_idx, :] = batch_pc[0, :] # set to the first point

        return batch_pc

def to_categorical(y, num_class):
    Eye = ops.Eye()
    new_y = Eye(num_class, num_class, mindspore.float32)[mindspore.Tensor(y)]
    new_y = mindspore.Tensor(new_y)
    return new_y

def pointnet_to_categorical(y, num_classes):
    onehot = ops.OneHot()
    new_y = onehot(y, num_classes, Tensor(1.0, mindspore.float32), Tensor(0.0, mindspore.float32))
    return new_y

if __name__ == "__main__":
    print(np.random.random())