import argparse
import os
import sys
import numpy as np
from tqdm import tqdm

import mindspore
from mindspore import context,load_checkpoint,load_param_into_net
from mindspore.context import ParallelMode
from mindspore.common import set_seed
import mindspore.dataset as ds
import  mindspore.ops as ops
import sklearn.metrics as metrics
from mindspore.communication.management import init
from mindspore.train import Model
from mindspore.train.callback import TimeMonitor

set_seed(1)
BASE_DIR=os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(BASE_DIR))))
ROOT_DIR=os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(BASE_DIR))),"mind3d")
sys.path.append(ROOT_DIR)

from mind3d.dataset.S3DIS import S3DISDataset
from mind3d.models.losses.Lossfunction import NLLLoss
from mind3d.utils.load_yaml import load_yaml
from mind3d.models.dgcnn import DGCNN_seg
from mind3d.models.pointnet2 import Pointnet2segModelSSG
from mind3d.utils.common import mIoU, CustomWithLossCell, ScannetWithEvalCell
from mind3d.dataset.scannet import create_scannet_dataset

def calculate_sem_IoU(pred_np, seg_np, visual=False):
    I_all = np.zeros(13)
    U_all = np.zeros(13)
    for sem_idx in tqdm(range(seg_np.shape[0])):
        for sem in range(13):
            I = np.sum(np.logical_and(pred_np[sem_idx] == sem, seg_np[sem_idx] == sem))
            U = np.sum(np.logical_or(pred_np[sem_idx] == sem, seg_np[sem_idx] == sem))
            I_all[sem] += I
            U_all[sem] += U
    if visual:
        for sem in range(13):
            if U_all[sem] == 0:
                I_all[sem] = 1
                U_all[sem] = 1

    return I_all / U_all

def dgcnn_seg_eval(opt):
    print("=========strating======")
    all_true_cls=[]
    all_pred_cls=[]
    all_true_seg=[]
    all_pred_seg=[]
    for test_area in range(1, 7):

        if os.path.exists("./dataset/data/indoor3d_sem_seg_hdf5_data_test/room_filelist.txt"):
            with open("./dataset/data/indoor3d_sem_seg_hdf5_data_test/room_filelist.txt") as f:
                for line in f:
                    if (line[5]) == test_area:
                        break
        if (opt["datasets"]['val'].get("area") == "all") or (test_area == opt["datasets"]['val'].get("area")):
            test_dataset_generator = S3DISDataset(split="eval", num_points=opt["datasets"]['val'].get("resize"),
                                                  test_area=test_area)

            test_ds = ds.GeneratorDataset(test_dataset_generator, ["data", "label"], shuffle=True)
            test_ds = test_ds.batch(batch_size=1)

            # Create model
            model = DGCNN_seg(opt, opt['train'].get("k"))

            #param_dict = load_checkpoint(opt['val']['pretrained_ckpt'])
            param_dict=load_checkpoint(os.path.join(opt["val"]["pretrained_ckpt"],"model_%s.ckpt"%test_area))
            load_param_into_net(model, param_dict)
            print("sucessfully load pretrain model_%s.ckpt"%test_area)
            model.set_train(False)
            model.set_grad(False)

            test_acc=0.0
            test_true_cls=[]
            test_pred_cls=[]
            test_true_seg=[]
            test_pred_seg=[]
            for _, data in tqdm(enumerate(test_ds.create_dict_iterator(), 0)):
                points,target=data["data"],data["label"]
                seg_pred=model(points)
                seg_pred=seg_pred.transpose(0,2,1)
                argmax=ops.ArgMaxWithValue(axis=2)
                index,pred=argmax(seg_pred)
                pred_np=index.asnumpy()
                seg_np=target.asnumpy()
                test_true_seg.append(seg_np)
                test_pred_seg.append(pred_np)

                test_true_cls.append(seg_np.reshape(-1))
                test_pred_cls.append(pred_np.reshape(-1))
            test_true_seg=np.concatenate(test_true_seg,axis=0)
            test_pred_seg=np.concatenate(test_pred_seg,axis=0)

            test_true_cls=np.concatenate(test_true_cls)
            test_pred_cls=np.concatenate(test_pred_cls)

            test_ious=calculate_sem_IoU(test_pred_seg,test_true_seg)
            test_acc=metrics.accuracy_score(test_true_cls,test_pred_cls)
            avg_per_class_acc=metrics.balanced_accuracy_score(test_true_cls,test_pred_cls)

            outstr = 'Test :: test area: %s, test acc: %.6f, test avg acc: %.6f, test iou: %.6f' % (test_area,
                                                                                                    test_acc,
                                                                                                    avg_per_class_acc,
                                                                                                     np.mean(test_ious))
            print(outstr)
            all_true_cls.append(test_true_cls)
            all_pred_cls.append(test_pred_cls)
            all_true_seg.append(test_true_seg)
            all_pred_seg.append(test_pred_seg)


    if opt["datasets"]['val'].get("area") == 'all':
        all_true_cls = np.concatenate(all_true_cls)
        all_pred_cls = np.concatenate(all_pred_cls)
        all_acc = metrics.accuracy_score(all_true_cls, all_pred_cls)
        avg_per_class_acc = metrics.balanced_accuracy_score(all_true_cls, all_pred_cls)
        all_true_seg = np.concatenate(all_true_seg, axis=0)
        all_pred_seg = np.concatenate(all_pred_seg, axis=0)
        all_ious = calculate_sem_IoU(all_pred_seg, all_true_seg)
        outstr = 'Overall Test :: test acc: %.6f, test avg acc: %.6f, test iou: %.6f' % (all_acc,
                                                                                         avg_per_class_acc,
                                                                                         np.mean(all_ious))
        print(outstr)

def pointnet2_seg_eval(opt):
    """PointNet++ eval."""
    # device.
    device_id = int(os.getenv('DEVICE_ID', '1'))
    device_num = int(os.getenv('RANK_SIZE', '1'))

    if not opt['device_target'] in ("Ascend", "GPU"):
        raise ValueError("Unsupported platform {}".format(opt['device_target']))

    if opt['device_target'] == "Ascend":
        context.set_context(mode=context.GRAPH_MODE,
                            device_target="Ascend",
                            device_id=device_id)
        context.set_context(max_call_depth=20480)

    else:
        context.set_context(mode=context.GRAPH_MODE,
                            device_target="GPU",
                            device_id = device_id,
                            max_call_depth=20480)

    # run distribute.
    if opt['run_distribute']:
        if opt['device_target'] == "Ascend":
            if device_num > 1:
                init()
                context.set_auto_parallel_context(
                    parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True)
        else:
            if device_num > 1:
                mindspore.dataset.config.set_enable_shared_mem(False)
                context.set_auto_parallel_context(
                    parallel_mode=context.ParallelMode.DATA_PARALLEL,
                    gradients_mean=True,
                    device_num=device_num)
                mindspore.common.set_seed(1234)
                init()
            else:
                context.set_context(device_id=device_id)

    # Data Pipeline.
    eval_dataset = create_scannet_dataset(opt, split="val")
    test_steps_per_epoch = eval_dataset.get_dataset_size()
    step_size = test_steps_per_epoch

    '''MODEL LOADING'''
    network = Pointnet2segModelSSG(num_classes=opt['datasets']['val'].get('num_classes'),
                                   use_color=opt['datasets']['val'].get('use_color'),
                                   use_normal=opt['datasets']['val'].get('use_normal'))
    # load pretrained ckpt
    param_dict = load_checkpoint(opt['val']['pretrained_ckpt'])
    load_param_into_net(network, param_dict)

    # Define metrics.
    metrics = {'IoU': mIoU(test_steps_per_epoch, opt['datasets']['val'].get('batch'), opt['datasets']['train'].get('num_classes'))}
    time_cb = TimeMonitor(step_size)

    # eval
    #Define loss function.
    network_loss = NLLLoss(reduction="mean")
    net_with_criterion = CustomWithLossCell(network, network_loss)

    eval_network = ScannetWithEvalCell(network, True)

    model = Model(net_with_criterion,
                  eval_network=eval_network,
                  metrics=metrics)

    # Begin to eval.
    result = model.eval(eval_dataset, dataset_sink_mode=True)
    print("=============Accuracy================")
    print(result)
    print("=====================================")
    print("eval completed...")


if __name__=="__main__":
    parser=argparse.ArgumentParser(description="Eval.")
    parser.add_argument("-opt",type=str,default="/home/czy/HuaWei/final2023/mind3d/configs/pointnet2/pointnet2_scannet_seg.yaml",help='Path to option YAML file.')
    args = parser.parse_known_args()[0]
    opt = load_yaml(args.opt)
    network_name = opt["model_name"]
    if network_name == "DGCNN_seg":
        dgcnn_seg_eval(opt)
    elif network_name == "Pointnet2_seg":
        pointnet2_seg_eval(opt)