""" PointNet classification infer script. """

import argparse, os, sys
import mindspore
import mindspore.dataset as ds
import mindspore.ops as ops
from tqdm import tqdm

sys.path.append(os.path.abspath(os.path.join(os.getcwd())))
path = os.path.abspath(os.path.join(os.getcwd()))


from mindspore import context, load_checkpoint, load_param_into_net
from mindspore.context import ParallelMode
from mindspore.communication.management import init
from mindspore.common import set_seed
from mindspore.context import ParallelMode
from visualization.visualization import show_pointcloud_seg
from mind3d.models.pointnet import PointNet_seg
from mind3d.models.PointTransformer import PointTransformerSeg
from mind3d.dataset.ShapeNet import ShapeNetpartDataset
from tools.segmentation.shapenetpart.eval import ModifyNetwork
from mind3d.utils.provider import to_categorical

set_seed(1)

def infer(opt):
    """infer."""
    # device.
    device_id = int(os.getenv('DEVICE_ID', '1'))
    device_num = int(os.getenv('RANK_SIZE', '1'))

    if not opt.device_target in ("Ascend", "GPU"):
        raise ValueError("Unsupported platform {}".format(opt.device_target))

    if opt.device_target == "Ascend":
        context.set_context(mode=context.GRAPH_MODE,
                            device_target="Ascend",
                            save_graphs=False,
                            device_id=device_id)
        context.set_context(max_call_depth=2048)

    else:
        context.set_context(mode=context.GRAPH_MODE,
                            device_target="GPU",
                            save_graphs=False,
                            device_id=device_id,
                            max_call_depth=2048)

    # run distribute.
    if opt.run_distribute:
        if opt.device_target == "Ascend":
            if device_num > 1:
                init()
                context.set_auto_parallel_context(
                    parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True)
        else:
            if device_num > 1:
                mindspore.dataset.config.set_enable_shared_mem(False)
                context.set_auto_parallel_context(
                    parallel_mode=context.ParallelMode.DATA_PARALLEL,
                    gradients_mean=True,
                    device_num=device_num)
                mindspore.common.set_seed(1234)
                init()
            else:
                context.set_context(device_id=device_id)

    # Data pipeline.


    train_dataset = ShapeNetpartDataset(root_path=opt.TrainSeg.data_path, num_points = opt.TrainSeg.num_points, split="val", 
                                        normal_channel= True)
    train_ds = ds.GeneratorDataset(train_dataset, ["output","cls", "label"],
                                        num_parallel_workers=opt.TrainSeg.num_work, shuffle=True)
    #train_ds = train_ds.batch(batch_size=args_opt.TrainSeg.batch_size)
    train_ds = train_ds.batch(batch_size=1, drop_remainder=True)
    step_size = train_ds.get_dataset_size()

   
   
    '''MODEL LOADING'''

    if opt.model == "PointNet_seg": 
        network = PointNet_seg()
    elif opt.model == "pointtransformer":
        network = PointTransformerSeg()
        network = ModifyNetwork(network)

    # Load checkpoint file for ST test.
    param_dict = load_checkpoint(opt.EvalSeg.ckpt_file)
    load_param_into_net(network, param_dict)

    # Begin to infer.
    for _, data in tqdm(enumerate(train_ds.create_dict_iterator()),total = step_size, smoothing = 0.9):

        points, cls, target = data["output"], data['cls'], data["label"]
        _, N, C = points.shape #B N 3

        if opt.model == "PointNet_seg": 
            pred = network(points, to_categorical(cls, 16))
        elif opt.model == "pointtransformer":
            cat = ops.Concat(axis = 2)
            onehot = ops.OneHot()
            point = cat((mindspore.Tensor(points), mindspore.numpy.tile(
            onehot(mindspore.Tensor(cls, mindspore.int32), 16, mindspore.Tensor(1.0, mindspore.float32),
                    mindspore.Tensor(0.0, mindspore.float32)),
            (1, N, 1))))
            pred = network(point)


        pred = ops.ExpandDims()(pred, 0)
        preds = pred.argmax(3)

        points = points.view(-1, C).asnumpy()
        cls_pred = preds.squeeze(0).view(-1).asnumpy()
        show_pointcloud_seg(points, seg=cls_pred, save=None)


def main(args):
    from mind3d.utils.PointTransformerUtils import AttrDict,create_attr_dict
    import yaml
    #path = os.path.abspath(os.path.join(os.getcwd(), "configs/", str(args.model_name)))
    with open(args.opt, 'r') as f:
        args_ops = AttrDict(yaml.safe_load(f.read()))
    create_attr_dict(args_ops)
    infer(args_opt=args_ops)



if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='Shapenet segmentation train.')
    parser.add_argument('-opt',default="./configs/pointtransformer/pointtransformerseg.yaml",
                        help='pointtransformer or pointnet')
    args = parser.parse_known_args()[0]
    main(args)

