# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
PointTransformer Backbone Module

"""
import os, sys
sys.path.append(os.path.abspath(os.path.join(os.getcwd())))
path = os.path.abspath(os.path.join(os.getcwd(), ".."))
from mind3d.models.blocks import Transformer, TransitionDown, TransitionUp
import mindspore.nn as nn
import mindspore.ops as ops
from mindspore import context
import mindspore

class Backbone(nn.Cell):
    def __init__(self, task):
        super(Backbone, self).__init__()
        npoints = 1024
        nblocks = 4
        nneighbor = 16
        self.task = task
        if self.task :
            d_points = 6
        elif self.task == False:
            d_points = 22
        self.fc1 = nn.SequentialCell(

            nn.Dense(d_points, 32, weight_init = "Uniform", bias_init = "Uniform"),
            nn.ReLU(),
            nn.Dense(32, 32, weight_init = "Uniform", bias_init = "Uniform")

        )
        self.transpose = ops.Transpose()
        self.transformer1 = Transformer.Transformer(32, 512, nneighbor)
        self.transition_downs = nn.CellList()
        self.transformers = nn.CellList()
        for i in range(nblocks):
            channel = 32 * 2 ** (i + 1)
            self.transition_downs.append(
                TransitionDown.Transitiondown(npoints // 4 ** (i + 1), nneighbor,
                                               [channel // 2 + 3, channel, channel]))
            self.transformers.append(Transformer.Transformer(channel, 512, nneighbor))
        self.nblocks = nblocks

    def construct(self, x):
        xyz = x[..., :3]
        y = self.fc1(x)
        points = self.transformer1(xyz, y)[0]
        xyz_and_feat = [(xyz, points)]
        for i in range(self.nblocks):
            xyz, points = self.transition_downs[i](xyz, points)
            points = self.transformers[i](xyz, points)[0]
            xyz_and_feat.append((xyz, points))
        return points, xyz_and_feat

class PointTransformerCls(nn.Cell):
    """
    Input
        X(Tensor) : Input point cloud data. The shape of x is [B,N,C].
    Output
        Output(Tensor) : Model output. The shapes of output is [B, Class].
    Argument
        None.
    Example:
        C = PointTransformerCls()
        x_dim = mindspore.Tensor(np.random.random((1, 1024, 6)), dtype=mindspore.float32)
        print(C(x_dim).shape)
    """
    def __init__(self):
        super(PointTransformerCls, self).__init__()
        self.backbone = Backbone(task = True)
        self.fc2 = nn.SequentialCell(
            nn.Dense(32 * 2 ** 4, 256, has_bias = True, weight_init = "Uniform", bias_init = "Uniform"),
            nn.ReLU(),
            nn.Dense(256, 64, has_bias = True, weight_init = "Uniform", bias_init = "Uniform"),
            nn.ReLU(),
            nn.Dense(64, 40, has_bias = True, weight_init = "Uniform", bias_init = "Uniform")
        )
        self.reducemean = ops.ReduceMean()

    def construct(self, x):
        point, _ = self.backbone(x)
        point = self.reducemean(point, 1)
        res = self.fc2(point)
        return res

class PointTransformerSeg(nn.Cell):
    """
    Input
        X(Tensor) - Input point cloud data. The shape of x is [B,N,C].
    Output
        Output(Tensor) : Model output. The shapes of output is [B, Class].
    Argument:
        None.
    Example:
        C = PointTransformerSeg()
        x_dim = mindspore.Tensor(np.random.random((1, 1024, 22)), dtype=mindspore.float32)
        print(C(x_dim).shape)

    """

    def __init__(self):
        super(PointTransformerSeg, self).__init__()
        self.backbone = Backbone(task = False)
        npoints, nblocks, nneighbor, n_c = 1024, 4, 16, 50
        self.fc1 = nn.SequentialCell(
            nn.Dense(32 * 2 ** nblocks, 512, has_bias = True, weight_init = "Uniform", bias_init = "Uniform"),
            nn.ReLU(),
            nn.Dense(512, 512, has_bias = True, weight_init = "Uniform", bias_init = "Uniform"),
            nn.ReLU(),
            nn.Dense(512, 32 * 2 ** nblocks, has_bias = True, weight_init = "Uniform", bias_init = "Uniform")
        )
        self.transformer2 = Transformer.Transformer(32 * 2 ** nblocks, 512, nneighbor)
        self.nblocks = nblocks
        self.transition_ups = nn.CellList()
        self.Transformers = nn.CellList()
        for i in reversed(range(nblocks)):
            channel = 32 * 2 ** i
            self.transition_ups.append(TransitionUp.TransitionUp(channel * 2, channel, channel))
            self.Transformers.append(Transformer.Transformer(channel, 512, nneighbor))
        self.fc3 = nn.SequentialCell(
            nn.Dense(32, 64, weight_init = "Uniform", bias_init = "Uniform"),
            nn.ReLU(),
            nn.Dense(64, 64, weight_init = "Uniform", bias_init = "Uniform"),
            nn.ReLU(),
            nn.Dense(64, n_c, weight_init = "Uniform", bias_init = "Uniform")
        )
        self.transpore = ops.Transpose()

    def construct(self, x):
        points, xyz_and_feats = self.backbone(x)
        xyz = xyz_and_feats[-1][0]
        points = self.transformer2(xyz, self.fc1(points))[0]
        for i in range(self.nblocks):
            points = self.transition_ups[i](xyz, points, xyz_and_feats[- i - 2][0], xyz_and_feats[- i - 2][1])
            xyz = xyz_and_feats[- i - 2][0]

            points = self.Transformers[i](xyz, points)[0]

        return self.fc3(points)

if __name__ == "__main__":
    import numpy as np
    context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU",device_id = 5)
    C = PointTransformerSeg()
    #x_dim = mindspore.Tensor(np.random.random((1, 1024, 22)), dtype=mindspore.float32)
    #out = C(x_dim)
    #print(out.shape)i
    print(C)