import argparse
import glob
from pathlib import Path

#try:
#    import open3d
#    from visual_utils import open3d_vis_utils as V
#    OPEN3D_FLAG = True
#except:
#    import mayavi.mlab as mlab
#    from visual_utils import visualize_utils as V
#    OPEN3D_FLAG = False

import numpy as np
import torch
import time
from pcdet.config import cfg, cfg_from_yaml_file
from pcdet.datasets import DatasetTemplate
from pcdet.models import build_network, load_data_to_gpu
from pcdet.utils import common_utils


class DemoDataset(DatasetTemplate):
    def __init__(self, dataset_cfg, class_names, training=True, root_path=None, logger=None, ext='.bin'):
        """
        Args:
            root_path:
            dataset_cfg:
            class_names:
            training:
            logger:
        """
        super().__init__(
            dataset_cfg=dataset_cfg, class_names=class_names, training=training, root_path=root_path, logger=logger
        )
        self.root_path = root_path
        self.ext = ext
        data_file_list = glob.glob(str(root_path / f'*{self.ext}')) if self.root_path.is_dir() else [self.root_path]
        data_file_list.sort()
        self.sample_file_list = data_file_list

    def __len__(self):
        return len(self.sample_file_list)

    def __getitem__(self, index):
        if self.ext == '.bin':
            # points = np.fromfile(self.sample_file_list[index], dtype=np.float32).reshape(-1, 4)#original
            points = np.fromfile(self.sample_file_list[index], dtype=np.float32).reshape(-1, 5)#for nuscenes
        elif self.ext == '.npy':
            points = np.load(self.sample_file_list[index])
        elif self.ext == '.pcd':
            with open(self.sample_file_list[index], 'r', encoding='utf-8') as f:
                lines = f.readlines()
            # Find the start of the data section
            data_start_idx = 0
            fields = []
            for i, line in enumerate(lines):
                if line.startswith('FIELDS'):
                    fields = line.split()[1:]
                elif line.startswith('DATA'):
                    data_start_idx = i + 1
                    break

            if 'x' not in fields or 'y' not in fields or 'z' not in fields or 'intensity' not in fields:
                raise ValueError("PCD file does not contain x, y, z, and intensity fields.")

            # Get indices of the required fields
            x_idx = fields.index('x')
            y_idx = fields.index('y')
            z_idx = fields.index('z')
            intensity_idx = fields.index('intensity')

            # Read the data section and parse x, y, z, and intensity
            data = []
            for line in lines[data_start_idx:]:
                if "nan" in line:
                    continue
                if line.strip():  # skip empty lines
                    values = line.split()
                    x = float(values[x_idx])
                    y = float(values[y_idx])
                    z = float(values[z_idx])
                    intensity = float(values[intensity_idx])
                    data.append([x, y, z, intensity])

            # Convert the list to a NumPy ndarray
            data = np.array(data)
            zeros = np.zeros((data.shape[0], 1))
            points = np.hstack((data, zeros))
        else:
            raise NotImplementedError

        input_dict = {
            'points': points,
            'frame_id': index,
        }

        data_dict = self.prepare_data(data_dict=input_dict)
        return data_dict


def parse_config():
    parser = argparse.ArgumentParser(description='arg parser')
    parser.add_argument('--cfg_file', type=str, default='cfgs/kitti_models/second.yaml',
                        help='specify the config for demo')
    parser.add_argument('--data_path', type=str, default='demo_data',
                        help='specify the point cloud data file or directory')
    parser.add_argument('--ckpt', type=str, default=None, help='specify the pretrained model')
    parser.add_argument('--ext', type=str, default='.bin', help='specify the extension of your point cloud data file')

    args = parser.parse_args()

    cfg_from_yaml_file(args.cfg_file, cfg)

    return args, cfg

def nms_3d(boxes, scores, iou_threshold):
    """
    Apply 3D NMS to filter boxes based on the given IoU threshold.
    Args:
        boxes (torch.Tensor): The boxes to apply NMS, shape [N, 7].
        scores (torch.Tensor): Scores for each box, shape [N].
        iou_threshold (float): IoU threshold for NMS.
    Returns:
        keep (torch.Tensor): Indices of boxes to keep.
    """
    # Calculate IoU for 3D boxes
    def iou_3d(box1, box2):
        # Compute intersection volume
        max_xyz = torch.min(box1[:, :3] + box1[:, 3:6] / 2, box2[:, :3] + box2[:, 3:6] / 2)
        min_xyz = torch.max(box1[:, :3] - box1[:, 3:6] / 2, box2[:, :3] - box2[:, 3:6] / 2)
        inter = torch.clamp(max_xyz - min_xyz, min=0)
        inter_vol = inter[:, 0] * inter[:, 1] * inter[:, 2]

        # Compute volume of each box
        vol1 = box1[:, 3] * box1[:, 4] * box1[:, 5]
        vol2 = box2[:, 3] * box2[:, 4] * box2[:, 5]

        # Compute union volume
        union_vol = vol1 + vol2 - inter_vol
        return inter_vol / union_vol

    keep = []
    _, idxs = scores.sort(descending=True)

    while idxs.numel() > 0:
        i = idxs[0]
        keep.append(i)
        if idxs.numel() == 1:
            break

        ious = iou_3d(boxes[i].unsqueeze(0), boxes[idxs[1:]])
        idxs = idxs[1:][ious < iou_threshold]

    return torch.tensor(keep)


def post_process(pred_dict, score_th=0.3, iou_th=0.15):
    data_cpu = {
    'pred_boxes': pred_dict['pred_boxes'].cpu(),
    'pred_scores': pred_dict['pred_scores'].cpu(),
    'pred_labels': pred_dict['pred_labels'].cpu()
    }
 
    # 过滤分数大于 阈值 的项
    filtered_mask = data_cpu['pred_scores'] > score_th
    filtered_boxes = data_cpu['pred_boxes'][filtered_mask]
    filtered_scores = data_cpu['pred_scores'][filtered_mask]
    filtered_labels = data_cpu['pred_labels'][filtered_mask]
    keep_indices = nms_3d(filtered_boxes, filtered_scores, iou_th)
    # 筛选后的结果
    nms_boxes = filtered_boxes[keep_indices]
    nms_scores = filtered_scores[keep_indices]
    nms_labels = filtered_labels[keep_indices]
    post_process_ret = {}
    post_process_ret.update({'pred_boxes': nms_boxes,'pred_scores': nms_scores,'pred_labels': nms_labels})
    return post_process_ret


def main():
    args, cfg = parse_config()
    logger = common_utils.create_logger()
    logger.info('-----------------Quick Demo of OpenPCDet-------------------------')

    demo_dataset = DemoDataset(
        dataset_cfg=cfg.DATA_CONFIG, class_names=cfg.CLASS_NAMES, training=False,
        root_path=Path(args.data_path), ext=args.ext, logger=logger
    )
    logger.info(f'Total number of samples: \t{len(demo_dataset)}')
    model = build_network(model_cfg=cfg.MODEL, num_class=len(cfg.CLASS_NAMES), dataset=demo_dataset)
    model.load_params_from_file(filename=args.ckpt, logger=logger, to_cpu=True)
    model.cuda()
    model.eval()
    with torch.no_grad():
        for idx, data_dict in enumerate(demo_dataset):
            logger.info(f'Visualized sample index: \t{idx + 1}')
            data_dict = demo_dataset.collate_batch([data_dict])
            load_data_to_gpu(data_dict)
            pred_dicts, _ = model.forward(data_dict)
            final_ret = post_process(pred_dicts[0])
            print(final_ret)

            #V.draw_scenes(
            #    points=data_dict['points'][:, 1:], ref_boxes=final_ret['pred_boxes'],
            #    ref_scores=final_ret['pred_scores'], ref_labels=final_ret['pred_labels'])

            #if not OPEN3D_FLAG:
            #    mlab.show(stop=True)

    logger.info('Demo done.')


if __name__ == '__main__':
    main()
