'''
@author: Xian Zhang
@time: 2021/10/19 15:51
'''
import os
import numpy as np
import warnings
import pickle

from torch.utils.data import Dataset
import glob
import environment

warnings.filterwarnings('ignore')


def pc_normalize(pc):
    centroid = np.mean(pc, axis=0)
    pc = pc - centroid
    m = np.max(np.sqrt(np.sum(pc**2, axis=1)))
    pc = pc / m
    return pc






class DvsGestureDataset(Dataset):
    def __init__(self, args, split='train', process_data=True):
        self.npoints = args.num_point
        # self.uniform = args.use_uniform_sample # 是否使用 fps 采样
        self.num_category = args.num_class
        self.classes = list(range(self.num_category))
        self.use_shuffle = args.use_shuffle
        self.process_data = process_data
        self.cache = {}

        assert (split == 'train' or split == 'test')
        self.datapath_list = []
        self.label_list = []
        for label in self.classes:
            self.datapath_list.extend(glob.glob(os.path.join(environment.DvsGesture_txt_dir, split, str(label), "*.txt")))

        if self.process_data:
            # if not os.path.exists(self.process_path):
            #     self.data_list = []
            #     self.label_list = []
                # for index in range(len(self.datapath_list)):
                #     point_set = np.loadtxt(self.datapath_list[index]).astype(np.float32)
                #     if self.uniform:
                #         point_set = farthest_point_sample(point_set, self.npoints)
                #     else:
                #         point_set = point_set[0:self.npoints, :]
                #     cls = int(self.datapath_list[index].split(os.sep)[-2])
                #     self.data_list.append(point_set)
                #     self.label_list.append(cls)

            with open(environment.DvsGesture_post_process_path.format(environment.shuffle_type[self.use_shuffle], split), 'rb') as f:
                self.data_list, self.label_list = pickle.load(f)


            # fps 预处理
        # if self.uniform:
        #     self.save_path = os.path.join(root, 'modelnet%d_%s_%dpts_fps.dat' % (self.num_category, split, self.npoints))
        # else:
        #     self.save_path = os.path.join(root, 'modelnet%d_%s_%dpts.dat' % (self.num_category, split, self.npoints))
        #
        # if self.process_data:
        #     if not os.path.exists(self.save_path):
        #         print('Processing data %s (only running in the first time)...' % self.save_path)
        #         self.list_of_points = [None] * len(self.datapath)
        #         self.list_of_labels = [None] * len(self.datapath)
        #
        #         for index in tqdm(range(len(self.datapath)), total=len(self.datapath)):
        #             fn = self.datapath[index]
        #             cls = self.classes[self.datapath[index][0]]
        #             cls = np.array([cls]).astype(np.int32)
        #             point_set = np.loadtxt(fn[1], delimiter=',').astype(np.float32)
        #
        #             if self.uniform:
        #                 point_set = farthest_point_sample(point_set, self.npoints)
        #             else:
        #                 point_set = point_set[0:self.npoints, :]
        #
        #             self.list_of_points[index] = point_set
        #             self.list_of_labels[index] = cls
        #
        #         with open(self.save_path, 'wb') as f:
        #             pickle.dump([self.list_of_points, self.list_of_labels], f)
        #     else:
        #         print('Load processed data from %s...' % self.save_path)
        #         with open(self.save_path, 'rb') as f:
        #             self.list_of_points, self.list_of_labels = pickle.load(f)

    def __len__(self):
        return len(self.datapath_list)

    def _get_item(self, index):
        # 预处理数据，进行了 fps，加快训练
        if self.process_data:
            point_set, cls = self.data_list[index], self.label_list[index]
        else:
            if index in self.cache:
                point_set, cls = self.cache[index]
            else:
                point_set = np.loadtxt(self.datapath_list[index]).astype(np.float32)
                if self.uniform:
                    point_set = farthest_point_sample(point_set, self.npoints)
                else:
                    point_set = point_set[0:self.npoints, :]
                cls = int(self.datapath_list[index].split(os.sep)[-2])
                # point_set[:, 0:3] = pc_normalize(point_set[:, 0:3])
                self.cache[index] = (point_set, cls)
        return point_set, cls # (1024, 3)

    def __getitem__(self, index):
        return self._get_item(index)


if __name__ == '__main__':
    with open(environment.DvsGesture_post_process_path.format(environment.shuffle_type[False], 'train'),
              'rb') as f:
        data_list, label_list = pickle.load(f)
        print(len(label_list))
    datapath_list = []
    label_list = []
    for label in range(10):
        datapath_list.extend(glob.glob(os.path.join(environment.DvsGesture_txt_dir, 'train', str(label), "*.txt")))
    print(len(datapath_list))

    import torch
    # pass
    # data = ModelNetDataLoader('/data/modelnet40_normal_resampled/', split='train')
    # DataLoader = torch.utils.data.DataLoader(data, batch_size=12, shuffle=True)
    # for point, label in DataLoader:
    #     print(point.shape)
    #     print(label.shape)
