import os, numpy as np, argparse, json, sys, numba, yaml, shutil
import multiprocessing
# import torch.multiprocessing as multiprocessing
import mot_3d.visualization as visualization, mot_3d.utils as utils
from mot_3d.data_protos import BBox, Validity
from mot_3d.mot import MOTModel
from mot_3d.frame_data import FrameData
from mot_3d.utils import Timer
import time
from cyw_devkit import CywDataset

timer = Timer(10)

parser = argparse.ArgumentParser()
# running configurations
parser.add_argument('--name', type=str, default='immortal')
parser.add_argument('--det_name', type=str, default='cp')
parser.add_argument('--process', type=int, default=1)
parser.add_argument('--visualize', action='store_true', default=False)
parser.add_argument('--test', action='store_true', default=False)
# parser.add_argument('--test', action='store_true', default=False)
parser.add_argument('--obj_type', type=str, default='Car', choices=['Car', 'Pedestrian', 'Bus', 'Truck'])
# paths
parser.add_argument('--datasets_path', type=str)
parser.add_argument('--config_path', type=str, default='configs/carla_configs/immortal_for_ctrl_keep10.yaml')
parser.add_argument('--result_folder', type=str, default='./mot_results/carla')
args = parser.parse_args()

class2idx = {
    'Car': 0,
    'Pedestrian': 1,
    'Bus': 2,
    'Truck': 3,
}
idx2class = {val: key for key, val in class2idx.items()}

if not os.path.exists(args.result_folder):
    os.makedirs(args.result_folder)


def gt_bbox2world(bboxes, egos):
    frame_num = len(egos)
    for i in range(frame_num):
        ego = egos[i]
        bbox_num = len(bboxes[i])
        for j in range(bbox_num):
            bboxes[i][j] = BBox.bbox2world(ego, bboxes[i][j])
    return bboxes


def frame_visualization(bboxes, ids, states, gt_bboxes=None, gt_ids=None, pc=None, dets=None, name=''):
    visualizer = visualization.Visualizer2D(name=name, figsize=(12, 12))
    if pc is not None:
        visualizer.handler_pc(pc)
    for _, bbox in enumerate(gt_bboxes):
        visualizer.handler_box(bbox, message='', color='black')
    dets = [d for d in dets if d.s >= 0.1]
    for det in dets:
        visualizer.handler_box(det, message='%.2f' % det.s, color='green', linestyle='dashed')
    for _, (bbox, id, state) in enumerate(zip(bboxes, ids, states)):
        if Validity.agein1(state):
            visualizer.handler_box(bbox, message=str(id), color='red')
        else:
            visualizer.handler_box(bbox, message=str(id), color='light_blue')
    visualizer.show()
    visualizer.save('temp.jpg')
    visualizer.close()
    import pdb
    pdb.set_trace()


def sequence_mot(configs, data_loader: CywDataset):
    tracker = MOTModel(configs)
    frame_num = len(data_loader)
    IDs, bboxes, states, types = list(), list(), list(), list()
    for idx, data in enumerate(data_loader):
        if data['label'] is None:
            continue

        stamp = data['label'].header.stamp
        ego = data_loader.data_dict['super_transform'].localizer.get_tf(stamp=stamp)
        det_types = []
        dets = []
        for obj_type, tensor in zip(data['label'].data.obj_types, data['label'].data.tensors):
            if args.obj_type != obj_type:
                continue
            if obj_type in class2idx.keys():
                det_types.append(class2idx[obj_type])
                dets.append(
                    np.array((tensor[0], tensor[1], tensor[2], tensor[6], tensor[3], tensor[4], tensor[5], 1.0)))
        if len(det_types) == 0:
            continue
        frame_data = {
            'time_stamp_int': stamp,
            'time_stamp': stamp / 1e3,
            'dets': dets,
            'ego': ego,
            'pc': None,
            'det_types': det_types,
            'aux_info': {
                'is_key_frame': True,
                'velos': None
            },
            'abs_frame_index': data_loader.keyframe_idxs.index(idx),
        }
        frame_data = FrameData(dets=frame_data['dets'], ego=frame_data['ego'], pc=frame_data['pc'],
                               det_types=frame_data['det_types'], aux_info=frame_data['aux_info'],
                               time_stamp=frame_data['time_stamp'], abs_frame_index=frame_data['abs_frame_index'])

        # mot
        frame_results = tracker.frame_mot(frame_data)

        if args.test:
            save_path = os.path.join(data_loader.father_path,
                                     data_loader.data_dict['frames'][idx]['label'].replace('label', 'label_tracker'))
            test(frame_results, save_path)

        result_pred_bboxes = [trk['bboxes'] for trk in frame_results]
        result_pred_ids = [trk['id'] for trk in frame_results]
        result_pred_states = [trk['state'] for trk in frame_results]
        result_types = [trk['type'] for trk in frame_results]
        # wrap for output
        IDs.append(result_pred_ids)
        result_pred_bboxes = [BBox.bbox2array(bbox) for bbox in result_pred_bboxes]
        bboxes.append(result_pred_bboxes)
        states.append(result_pred_states)
        types.append(result_types)

    return IDs, bboxes, states, types


def test(frame_results, save_path):
    result_pred_bboxes = [trk['bboxes'] for trk in frame_results]
    result_pred_ids = [trk['id'] for trk in frame_results]
    result_pred_states = [trk['state'] for trk in frame_results]
    result_types = [trk['type'] for trk in frame_results]

    json_data = []
    for id, cidx, box, state_string in zip(result_pred_ids, result_types, result_pred_bboxes, result_pred_states):

        tokens = state_string.split('_')
        if len(tokens) < 3:
            continue
        if tokens[0] == 'alive' and int(tokens[1]) == 0:
            continue

        data = {
            'obj_id': str(id),
            'obj_type': idx2class[cidx],
            'psr': {
                'position': {
                    'x': box.x,
                    'y': box.y,
                    'z': box.z
                },
                'rotation': {
                    'x': 0,
                    'y': 0,
                    'z': box.o
                },
                'scale': {
                    'x': box.l,
                    'y': box.w,
                    'z': box.h
                }
            }
        }
        json_data.append(data)
    with open(save_path, 'w', encoding='utf-8') as f:
        json.dump(json_data, f, ensure_ascii=False, indent=2)


def main(name, config_path, result_folder, counter_list, token=0, process=1):
    summary_folder = os.path.join(result_folder, 'summary', args.obj_type)
    datasets = os.listdir(args.datasets_path)

    datasets_valid = []
    for dataset in datasets:
        if "__" != dataset[:2]:
            continue
        datasets_valid.append(dataset)
    datasets_valid.sort(key=lambda x: x)

    # load model configs
    configs = yaml.safe_load(open(config_path, 'r'))
    gpu = configs['running'].get('gpu', False)
    if gpu:
        import torch
        torch.cuda.set_device(token % 8)

    for file_index, dataset in enumerate(datasets_valid):
        if "__" != dataset[:2]:
            continue
        if file_index % process != token:
            continue
        full_path = os.path.join(args.datasets_path, dataset)
        data_loader = CywDataset(dataset_path=full_path, radar_dim=7)
        ids, bboxes, states, types = sequence_mot(configs, data_loader)
        np.savez_compressed(os.path.join(summary_folder, '{}.npz'.format(dataset)), ids=ids, bboxes=bboxes,
                            states=states)
        counter_list.append(file_index)
        print('FINISH TYPE {:} SEQ {:} / {:}'.format(args.obj_type, len(counter_list), len(datasets_valid)))


if __name__ == '__main__':
    result_folder = os.path.join(args.result_folder, args.name + f'_{args.det_name}')
    if not os.path.exists(result_folder):
        os.makedirs(result_folder)

    summary_folder = os.path.join(result_folder, 'summary')
    if not os.path.exists(summary_folder):
        os.makedirs(summary_folder)
    summary_folder = os.path.join(summary_folder, args.obj_type)
    if not os.path.exists(summary_folder):
        os.makedirs(summary_folder)
    manager = multiprocessing.Manager()
    counter_list = manager.list()
    beg = time.time()
    if args.process > 1:
        pool = multiprocessing.Pool(args.process)
        for token in range(args.process):
            result = pool.apply_async(main, args=(
                args.name, args.config_path, result_folder, counter_list, token, args.process))
        pool.close()
        pool.join()
    else:
        main(args.name, args.config_path, result_folder, counter_list, 0, 1)
    end = time.time()
    print(f'Tracking time cost: {end - beg}s')
