from concurrent import futures
import os 
import numpy as np
import pickle
import json
import time
import copy
from tqdm import tqdm
from datetime import datetime
import pytz
frametype_v0 = np.dtype([
	('scene', 'S40'),  
	('frame', 'int64'),  
	('map_gt', 'bool'), 
	('box_gt', 'bool'), 
	('has_anno', 'bool'),  
    ('has_interp','bool'),
    ('has_cmd', 'bool'),  
	('has_motion', 'bool'),  
    ('has_self_traj', 'bool'),  
    ('has_box_traj', 'bool'), 
    ('has_navi', 'bool'),  
    ('has_sdmap', 'bool'), 
])
frametype_v1 = np.dtype([
	('scene', 'S20'),  
	('frame', 'int64'),  # 64位整数
	('map_gt', 'bool'),  # 布尔值
	('box_gt', 'bool'),  # 布尔值
	('has_interp', 'bool'),  # 布尔值
	('has_motion', 'bool'),  # 布尔值
	('has_anno', 'bool')   # 布尔值
])
frametype_v2 = np.dtype([
	('scene', 'uint8'),  
	('frame', 'int64'),  # 64位整数
	('map_gt', 'bool'),  # 布尔值
	('box_gt', 'bool'),  # 布尔值
	('has_interp', 'bool'),  # 布尔值
	('has_motion', 'bool'),  # 布尔值
	('has_anno', 'bool')   # 布尔值
])

cameras_list = ['front', 'front_fisheye', 'left_fisheye', 'back_fisheye','right_fisheye','back']
class FastHJdataset():
    __scene=0
    __frame=1
    __map_gt=2
    __box_gt=3
    __has_anno=4
    __has_interp=5
    __has_cmd=6
    __has_motion=7
    __has_self_traj=8
    __has_box_traj=9
    __has_navi=10
    __has_sdmap=11

    def __init__(self, dataset_path, 
                data_type='release',
                pag: list=[],
                exclude_pag=[],
                frametype=frametype_v0):

        self.frametype=frametype
        self.data_type = data_type
        self.dataset_path = dataset_path
        
        if pag == []:
            self.data_pag = os.listdir(dataset_path)
        else:
            self.data_pag = pag
        self.data_pag.sort()
        self.exclude_pag = exclude_pag
        self.scenes_list_mask = None
        self.scenes_list = None
        self.infos = []
        self.scene_suffix_name_num = dict()
        # self.data = self.check_cache()
    def check_cache(self):
        _path = os.path.join(self.dataset_path,'fast_cache.pkl')
        if os.path.exists(_path):
            with open(_path, 'rb') as f:
                data = pickle.load(f)
            return data
        else:
            self.get_fast_cache()
            self.print_log()
            self.write_to_pkl('fast_cache.pkl')
    def get_scenes_list_anno(self):
        scenes_list = []
        for pag in self.data_pag:
            _path = os.path.join(self.dataset_path, pag)
            map_path = os.path.join(_path, 'map_anno')
            obj_path = os.path.join(_path, 'obj_anno')
            if os.path.exists(map_path):
                map_anno = True
            if os.path.exists(obj_path):
                obj_anno = True
            target = map_anno or obj_anno
            if target:
                scenes_list.append(pag)
        return scenes_list

    def get_scenes_list_box_anno(self):
        print(f'get scenes list box anno: {len(self.data_pag)}')
        for k, pag in enumerate(self.data_pag):
            _path = os.path.join(self.dataset_path, pag)
            obj_list = ['obj_anno','obj_interp','obj_motion']
            for obj in obj_list:
                tmp_path = os.path.join(_path, obj)
                if os.path.exists(tmp_path):
                    self.scenes_list_mask[k, 0] |= True
        return self.scenes_list_mask

    def get_scenes_list_map_anno(self):
        print(f'get scenes list map anno: {len(self.data_pag)}')
        scenes_list = []
        for k, pag in enumerate(self.data_pag):
            _path = os.path.join(self.dataset_path, pag)
            tmp_path = os.path.join(_path, 'map_anno')
            if os.path.exists(tmp_path):
                self.scenes_list_mask[k, 1] = True
        return self.scenes_list_mask
    

    # def get_scenes_list_motion_anno(self, dw="e2e_info"):
    #     return self.get_scenes_list_x("e2e_motion", self.__has_motion, dw=dw)
    # def get_scenes_list_cmd_anno(self, dw="e2e_info"):
    #     return self.get_scenes_list_x("e2e_cmd", self.__has_cmd, dw=dw)
    # def get_scenes_list_ego_future_anno(self, dw="e2e_info"):
    #     return self.get_scenes_list_x("e2e_ego_future", self.__has_box_traj, dw=dw)
    # def get_scenes_list_obj_future_anno(self, dw="e2e_info"):
    #     return self.get_scenes_list_x("e2e_obj_future", self.__has_obj_traj, dw=dw, endswith='.pkl')
    # def get_scenes_list_navi_anno(self, dw="e2e_info"):
    #     return self.get_scenes_list_x("e2e_navi", self.__has_navi, dw=dw, endswith='.pkl')
    def get_scenes_list_occ_anno(self):
        print(f'get scenes list map anno: {len(self.data_pag)}')
        scenes_list = []
        for k, pag in enumerate(self.data_pag):
            _path = os.path.join(self.dataset_path, pag)
            tmp_path = os.path.join(_path, 'map_anno')
            if os.path.exists(tmp_path):
                self.scenes_list_mask[k, 1] = True
        return self.scenes_list_mask
    def _load_X(self,_path,_x_file,endwith='.json'):
        sync_info = None
        if _x_file.endswith(endwith):
            with open(os.path.join(_path, _x_file), 'r') as f:
                sync_info = json.load(f)
            return sync_info
    
    def get_frame_list(self, scene):
        sync_file = None
        _path = os.path.join(self.dataset_path, scene,'sync_info')
        if os.path.exists(_path):
            sync_file = os.listdir(_path)
            sync_file.sort()
        return sync_file

    def get_link_from_scene(self, scene):
        sync_file = self.get_frame_list(scene)
        for _sync_file in sync_file:
            sync_info = self.load_sync(_path,_sync_file)
            if sync_info:
                return sync_info

    def check_anno_x(self, scenes, anno_x, sync_info):
        _file = os.path.join(self.dataset_path, scenes, anno_x, sync_info)
        if os.path.exists(_file):
            return True
        else:
            return False

    def decode_scene(self, scenes, timestamp):
        frame_type = self.frametype['scene']
        if np.issubdtype(frame_type,np.string_) and  frame_type.itemsize>=40:
            return scenes
        beijing = pytz.timezone("Asia/Shanghai")
        timeStamp = int(timestamp//1e6)
        utc_date = datetime.utcfromtimestamp(timeStamp)
        utc_loc_time = utc.localize(utc_date)
        beijing_time = utc_loc_time.astimezone(geijing)
        otherStyleTime = beijing_time.strftime("%Y_%m_%d_%H_%M_%S")
        if np.issubdtype(frame_type, np.integer):
            return otherStyleTime + '_' + self.data['scene_suffix_num_name'][scenes]
        else:
            return otherStyleTime + '_' + scenes

    def encode_scene(self, scene: str):
        frame_type = self.frametype['scene']
        _scene = ''.join(scene.split('_')[6:])
        if _scene not in self.scene_suffix_name_num:
            self.scene_suffix_name_num[_scene] = len(self.scene_suffix_name_num)

        if np.issubdtype(frame_type, np.integer):
            # _scene = self.scene_suffix_name_num[_scene]
            assert self.scene_suffix_name_num[_scene] < \
                 np.iinfo(frame_type).max,\
                    f"{scene} suffix category num is too long than {np.iinfo(frame_type).max}(type: {frame_type})"
            return self.scene_suffix_name_num[_scene]
        else:
            if frame_type.itemsize>=40:
                return scene
            else:
                assert len(_scene)<frame_type.itemsize,\
                    f"{scene} suffix str len is too long than {frame_type.itemsize}(type: {frame_type})"
                return _scene
    def _get_scenes_list_x(self, keys, _path,endswith='.npy'):
        tmp_path = os.path.join(_path, keys+endswith)
        data = None
        if os.path.exists(tmp_path):
            if endswith=='.npy':
                data =  np.load(tmp_path)
            else:
                with open(tmp_path, 'rb') as f:
                    data = pickle.load(f)
            return data
        return None
    def check_e2e_info_(self, scene, dw="e2e_info"):
        _path = os.path.join(self.dataset_path,'../',dw, scene)
        e2e_cmd = self._get_scenes_list_x("e2e_cmd", _path, endswith='.npy')
        e2e_ego_future = self._get_scenes_list_x("e2e_ego_future", _path,endswith='.npy')
        e2e_motion = self._get_scenes_list_x("e2e_motion", _path,endswith='.npy')
        e2e_obj_future = self._get_scenes_list_x("e2e_obj_future", _path,endswith='.pkl')
        return e2e_cmd, e2e_ego_future, e2e_motion, e2e_obj_future
    def get_scenes_one(self, scene_id):
        # print(frame_id)
        scene = self.scenes_list[scene_id]
        # scene='2023_11_04_10_04_40_ramp'
        sync_file = self.get_frame_list(scene)
        sync_file.sort()
        e2e_cmd, e2e_ego_future, e2e_motion, e2e_obj_future = self.check_e2e_info_(scene)
        _frames = np.zeros(len(sync_file), dtype=self.frametype)
        for frame_id, _sync_file in enumerate(sync_file):
            _frames[frame_id][self.__scene] = self.encode_scene(scene)
            _frames[frame_id][self.__frame] = int(_sync_file.split('.')[0])
            if e2e_cmd is not None and  len(e2e_cmd)==len(sync_file):
                _frames[frame_id][self.__has_cmd] = 1
            if e2e_ego_future is not None  and len(e2e_ego_future)==len(sync_file):
                _frames[frame_id][self.__has_self_traj] = 1
            if e2e_motion is not None and  len(e2e_motion)==len(sync_file):
                _frames[frame_id][self.__has_motion] = 1
            if e2e_obj_future is not None and len(e2e_obj_future)==len(sync_file):
                if _frames[frame_id][self.__frame] in e2e_obj_future.keys():
                    _frames[frame_id][self.__has_box_traj] = 1
            mask = self.scenes_list_mask[scene_id, :]
            if mask[0]:
                if self.check_anno_x(scene, 'obj_interp', _sync_file):
                    _frames[frame_id][self.__has_interp]=1
                if self.check_anno_x(scene, 'obj_motion', _sync_file):
                    _frames[frame_id][self.__has_interp]=1
                if self.check_anno_x(scene, 'obj_anno', _sync_file):
                    _frames[frame_id][self.__has_anno]=1

                _frames[frame_id][self.__box_gt] |= _frames[frame_id][self.__has_interp]
                _frames[frame_id][self.__box_gt] |= _frames[frame_id][self.__has_anno]

            if mask[1]:
                _frames[frame_id][self.__map_gt] = self.check_anno_x(scene, 'map_anno', _sync_file)
        self.scenes[scene_id] = _frames
        return None

    def get_scenes_list_exclude_pag(self):
        for i,pag in enumerate(self.data_pag):
            if pag not in self.exclude_pag:
                self.scenes_list_mask[i, 2] = True
                
    def get_scenes_list(self,has_box=True, has_map=True):
        # self.data_pag = os.listdir(self.dataset_path)
        # self.data_pag.sort()
        self.exclude_sensor = ['']
        self.scenes_list_mask = np.zeros(shape=(len(self.data_pag), 3),dtype=bool)
        self.scenes_list = None
        self.get_scenes_list_box_anno()
        self.get_scenes_list_map_anno()
        self.get_scenes_list_exclude_pag()

        mask = self.scenes_list_mask[:, 0] | self.scenes_list_mask[:, 1]
        mask &= self.scenes_list_mask[:, 2]
        self.scenes_list = np.array(self.data_pag)[mask]
        print('all scenes:',len(self.data_pag))
        print('has_box:',sum(self.scenes_list_mask[:, 0]))
        print('has_map:',sum(self.scenes_list_mask[:, 1]))
        print('has_pag:',sum(self.scenes_list_mask[:, 2]))
        print('all scenes after filter:',len(self.scenes_list))
        return self.scenes_list

    def get_scenes_not_box(self):
        self.exclude_sensor = ['']
        self.scenes_list_mask = np.zeros(shape=(len(self.data_pag), 3),dtype=bool)
        self.scenes_list = None
        self.get_scenes_list_box_anno()

        mask = self.scenes_list_mask[:, 0]
        self.scenes_list = np.array(self.data_pag)[~mask]
        self.scenes_list = list(filter(lambda file: file.endswith('city'), self.scenes_list))
        print('all scenes:',len(self.data_pag))
        print('all scenes after filter:',len(self.scenes_list))
        return self.scenes_list
    
    def get_fast_cache_loop(self,):
        self.get_scenes_list()
        # self.scenes = np.zeros(len(self.scenes_list), dtype=self.frametype)
        self.scenes = [[] for i in range(len(self.scenes_list))]
        # self.scenes_list = np.array(['2023_08_11_16_31_00_ramp'])
        with tqdm(total=len(self.scenes_list)) as pbar:
            pbar.set_description('Processing:')
            for k, scene in enumerate(self.scenes_list):
                self.get_scenes_one(k)
                pbar.update(1)
            # print(scene)
        return True

    def task_pool(self,func, iter, workers=10):
        with futures.ThreadPoolExecutor(workers) as executor:  # 实例化线程池
            info_scenes = executor.map(func, iter)
        return list(info_scenes)

    def get_fast_cache_pool(self,workers=10):
        self.get_scenes_list()
        self.scenes = [] * len(self.scenes_list)
        info_scenes = self.task_pool(self.get_scenes_one, range(len(self.scenes_list)), workers=workers)
        def flatten(nest_list:list):
            return [j for i in nest_list for j in flatten(i)] if isinstance(nest_list, list) else [nest_list]
        self.infos = flatten(list(info_scenes))
        return True
    def check_dataset(self):
        """
        check dataset
        camera: from camera list
        anno: box & map
        motion
        """
        source_folder = "sync_info"
        check_folder = ["ego_motion"]
        def check_d(i):
            # print(i)
            scene = self.data_pag[i]
            _souce_folder = os.path.join(self.dataset_path,scene, source_folder)
            if not os.path.exists(_souce_folder):
                print(f"{scene} {source_folder} not exist")
                return False
            souce_sum = len(os.listdir(_souce_folder))
            for check_folder_one in check_folder:
                check_path = os.path.join(self.dataset_path,scene, check_folder_one)
                check_file = os.listdir(check_path)
                check_sum = len(check_file)
                if not os.path.exists(check_path):
                    print(f"{scene} {check_folder_one} not exist")
                    return False
                if check_sum != souce_sum:
                    print(f"{scene} {check_folder_one} not match")
                    return False
                for _file in check_file:
                    _path = os.path.join(check_path,_file)
                    sz = os.path.getsize(_path)
                    if sz <= 10:
                        print(f"{scene} {check_folder_one} {_file} is empty")
                        return False
            sync_dir = os.path.join(self.dataset_path,scene, 'sync_info')
            sync_list = os.listdir(sync_dir)
            sync_list.sort()
            for idx, sync_name in enumerate(sync_list):
                sync = self._load_X(sync_dir, sync_name)
                for cam in cameras_list:
                    if cam not in sync['cameras'].keys():
                        print(f"{scene} {sync_name} {cam} not exist in sync_info")
                        return False
                    source_cameras_dir = os.path.join(self.dataset_path, scene, 'cameras', cam)
                    if not os.path.exists(source_cameras_dir):
                        print(f"{scene} {cam}  not exist in cameras")
                        return False
                    source_cam_file_path = os.path.join(source_cameras_dir, str(sync['cameras'][cam]) + '.jpg')
                    if not os.path.exists(source_cam_file_path):
                        print(f"{scene} {str(sync['cameras'][cam]) + '.jpg'}  not exist in cameras {cam}")
                        return False
            return True
        data_save = np.array([check_d(i) for i in range(len(self.data_pag))])
        print(f"{len(self.data_pag)} scenes check success")
        self.data_pag = np.array(self.data_pag)[data_save].tolist()
        print(f"{len(self.data_pag)} scenes check successed")
                # import pdb;pdb.set_trace()
        return self.data_pag
                #for i in range(len(self.data_pag)):
        #	check_d(i)

    def print_log(self):
        print(f"scenes: {len(self.scenes_list)}, frames: {self.infos}")
    def add_static(self, data):
        key = [i[0] for i in data['infos'].dtype.descr]
        key = key[2:]
        def static(a,key):
            b = [np.sum(a[i]) for i in key]
            static_by_gt = {}
            for k in range(len(b)):
                static_by_gt[key[k]] = b[k]
            return static_by_gt
        # a = [np.sum(data['infos'][i]) for i in key]
        static_by_gt = static(data['infos'], key)
        static_by_scene = {}

        for d in data['infos']:
            scene = d['scene']
            scena = str(scene).split('_')[-1]
            if not scena in static_by_scene.keys():
                static_by_scene[scena] = []
            static_by_scene[scena].append(d)
        for k,v in static_by_scene.items():
            a = np.stack(v,0)
            static_by_scene[k] = static(a,key)
        data['static'] = {'static_by_gt': static_by_gt,
                        'static_by_scene': static_by_scene}
        return data
    def write_to_pkl(self, filename):
        data = {}
        data['infos'] = np.concatenate(self.scenes,0)
        data['date'] = time.localtime()
        data['metadata'] = {'version': "scenes_frame_3.0"}
        data['datatype'] = self.frametype
        data['scene_suffix_name_num'] = self.scene_suffix_name_num
        data['scene_attr'] = None
        self.add_static(data)
        with open(filename, 'wb') as f:
            pickle.dump(data, f)
        
    def get_frame_calib(self, scenes, sensor=None):
        _path = os.path.join(self.dataset_path, self.data_type,scenes,sensor+'.json')
    def get_frame_image_path(self, scenes, frame, camera=None):
        _path = os.path.join(self.dataset_path, self.data_type,scenes,camera,frame+'.jpg')
        return _path
    def get_frame_lidar_path(self, scenes, frame, lidar=None):
        _path = os.path.join(self.dataset_path, self.data_type,scenes,lidar,frame+'.bin')
        return _path
    
    def get_anno(self,scenes,frame):
        return self.get_anno_box(), self.get_anno_map()
        
    def get_anno_box(self, scenes, frame):
        pass
    def get_anno_map(self, scenes, frame):
        pass
    def _load_json(self,_path,_x_file):
        info = None
        with open(os.path.join(_path, _x_file), 'rb') as f:
            info = json.load(f)
        return info

    def R_camera_check(self, cam: str):
        cam_list = cam.split('_')
        if cam_list[0] == 'R':
            return True, '_'.join(cam_list[1:-1])
        else:
            return False, cam
    def _load_cams(self, info, sync_info):
        cams = self.pipeline.transforms[0].data_config['cams']
        _sync_info = sync_info['cameras']
        info['cams'] = {}
        for cam in cams:
            cam_r_flag, _cam = self.R_camera_check(cam)
            info['cams'][cam] = {}
            # load img path
            img_path = os.path.join(self.data_root,
                                    'fisheyes',
                                    info['scene'],
                                    "cameras",
                                    _cam,
                                    f"{_sync_info[_cam]}.jpg")
            info['cams'][cam]['data_path'] = img_path
            info['cams'][cam]['type'] = _cam
            info['cams'][cam]['ego2global'] = None
            _path = os.path.join(self.data_root,
                                    'fisheyes',
                                    info['scene'],
                                    "calib",
                                    f"{_cam}_ego.txt")
            info['cams'][cam]['sensor2ego'] = np.loadtxt(_path, dtype=np.float32)
            info['cams'][cam]['timestamp'] = _sync_info[_cam]
            _path = os.path.join(self.data_root,
                                    'fisheyes',
                                    info['scene'],
                                    "calib",
                                    _cam + '_intrinsic.txt')
            info['cams'][cam]['cam_intrinsic'] = np.loadtxt(_path, dtype=np.float32) 
            info['cams'][cam]['cam_distortion'] = np.zeros(10,dtype=np.float32)
            if cam.find(self.data_type) > 0:
                _path = os.path.join(self.data_root,
                                        'fisheyes',
                                        info['scene'],
                                        "calib",
                                        _cam + '_distortion.txt')
                info['cams'][cam]['cam_distortion'][0] = 1
                _distortion= np.loadtxt(_path, dtype=np.float32) 
                info['cams'][cam]['cam_distortion'][1:] = _distortion[:9]
        return info

    def _load_ego(self,info):
        _path = os.path.join(self.data_root,
                                    'fisheyes',
                                    info['scene'],
                                    'ego_motion',
                                    f"{info['frame'].split('.')[0]}.txt")
        info['ego2global'] = np.loadtxt(_path,dtype=np.float32)
        return info
        
    def _load_occ(self,info):
        _path = os.path.join(self.data_root,
                                    'fisheyes',
                                    info['scene'],
                                    'occ',
                                    info['frame']+'.png')
        info['occ'] = cv2.imread(_path, cv2.IMREAD_GRAYSCALE)
        return info
    def _load_map(self, info):
        _path = os.path.join(self.data_root,
                                    'fisheyes',
                                    info['scene'],
                                    'map_anno')
        if info['lane_gt']:
            info['vec_lines'] = self._load_json(_path, info['frame'])
        return info

    def get_box(self, ann_info):
        boxes = []
        ca = []
        for ann in ann_info:
            center = ann['center']
            size = ann['size']
            rotation = ann['rotation']
            box = np.array([center['x'], center['y'], center['z'],
                            size['x'], size['y'], size['z'], 
                            rotation['z'], 0, 0])
            boxes.append(box.astype(np.float32))
            if ann['category'] not in self.class_dict:
                ca.append(0)
            else:
                ca.append(self.class_dict[ann['category']]+1)
        if len(boxes) >0:
            boxes = np.stack(boxes)
        else:
            boxes = np.array(boxes).reshape(-1,9)
        return boxes,np.array(ca)
    def _load_object(self, info):
        _path = os.path.join(self.data_root,
                                    'fisheyes',
                                    info['scene'],
                                    'obj_anno')
        if info['object_gt']:
            info['ann_infos'] = self._load_json(_path, info['frame'])
            info['ann_infos'] = self.get_box(info['ann_infos'])
        return info
    def load_input_dict(self, info):
        _path = os.path.join(self.data_root,'fisheyes',info['scene'],"sync_info")

        sync_info = self._load_json(_path, info['frame'])
        info = self._load_cams(info, sync_info)
        # info = self.load_lidar(info)   
        info = self._load_ego(info)
        # info = self._load_occ(info)
        info = self._load_map(info)
        info = self._load_object(info)
        # info = self._load_ann(info)
        return info	
def readtxt(file):
    lines = []
    with open(file, 'r') as f:
        for lin in f.readlines():
            lines.append(lin.strip())  
    return lines
def writetxt(file,lines):
    lines = [line + '\n' for line in lines]
    with open(file, 'w') as f:
        f.writelines(lines)
    return None
def split_pag():
    pass
def split_pag_test():
    dataset_path = 'data/hj_dataset/fisheyes/'
    all_pag = os.listdir(dataset_path)
    filterd_pag = []
    print(f'get scenes list box anno: {len(all_pag)}')
    for k, pag in enumerate(all_pag):
        # pag = '2023_10_17_15_27_40_city'
        _path = os.path.join(dataset_path, pag)
        tmp = ['obj_anno','obj_motion','obj_interp']
        flag = 0
        for _tmp in tmp:
            tmp_path = os.path.join(_path, _tmp)
            if os.path.exists(tmp_path):
                flag = 1
                break
        if flag==1 or (not pag.endswith('city')):
                continue
        filterd_pag.append(pag)

    print(f'get scenes list not box anno with city: {len(filterd_pag)}')
    dataset = FastHJdataset(dataset_path=dataset_path, data_type='fisheye',pag=filterd_pag,frametype=frametype_v0)
    dataset.check_dataset()
    dataset.get_fast_cache_loop()
    version='3.0'
    name='fast_cache'
    save_path='./data/hj_dataset/'
    time_now = get_data()
    mode = 'val'
    file = os.path.join(dataset_path,'../pkl',f'{name}_{mode}_city_{time_now}_{version}.pkl')
    dataset.write_to_pkl(file)
def get_data():
    from datetime import datetime
    return datetime.now().strftime('%Y_%m_%d_%H_%M%S')
def get_trainval():
    dataset_path = 'data/hj_dataset/fisheyes/'
    all_pag = os.listdir(dataset_path)
    test_pag = readtxt('data/hj_dataset/fisheyes_207_test_0717.txt')
    train_pag = list(set(all_pag) - set(test_pag))
    train_pag.sort()
    writetxt(f'data/hj_dataset/fisheyes_{len(train_pag)}_train_0717.txt',train_pag)
    train_pag_num = len(train_pag)
    train_pag = readtxt(f'data/hj_dataset/fisheyes_{train_pag_num}_train_0717.txt')
    test_pag = readtxt('data/hj_dataset/fisheyes_207_test_0717.txt')
    def get_pkl(pag,path,name,mode,time_now,version):
        dataset = FastHJdataset(dataset_path=dataset_path, data_type='fisheye',pag=pag,frametype=frametype_v0)
        # dataset.check_dataset()
        dataset.get_fast_cache_loop()
        # dataset.get_fast_cache_pool(workers=8)
        # dataset.print_log()
        file = os.path.join(path,'pkl',f'{name}_{mode}_{len(pag)}_{time_now}_{version}.pkl')
        dataset.write_to_pkl(file)
        return None 
        
    version='3.0'
    name='fast_cache'
    save_path='./data/hj_dataset/'
    time_now = get_data()

    get_pkl(train_pag,save_path,name,'train',time_now,version)
    get_pkl(test_pag,save_path,name,'test',time_now,version)
if "__main__" == __name__:
    split_pag_test()    
    get_trainval()