import os
import numpy as np
import torch
import torch.utils.data as data
import time
from . import data_utils as d_utils
#import data_utils as d_utils
import torchvision.transforms as transforms
import random
import math
from copy import deepcopy
import trimesh

BASE_DIR = os.path.dirname(os.path.abspath(__file__))


class Bindataset(data.Dataset):
    def __init__(self, dataset_dir=None, num_points=20000, transforms=None, mode='train', use_normal=False, platform='remote', non_uniform_sampling=True, aug_scene=True):
        super().__init__()
        assert dataset_dir is not None
        self.platform = platform
        self.use_normal = use_normal
        self.dataset_dir = dataset_dir
        self.transforms = transforms
        self.non_uniform_sampling = non_uniform_sampling
        self.aug_scene = aug_scene
        #all_files = self._list_data_files(dataset_dir)

        self.mode, self.num_points = mode, num_points
        assert self.mode in ['train', 'validate', 'test']
        print('dataset mode {}'.format(self.mode))
        if self.mode == 'train':
            self.idxs = np.load(os.path.join(self.dataset_dir, 'train_indices.npy'))
        elif self.mode == 'validate':
            self.idxs = np.load(os.path.join(self.dataset_dir, 'test_indices.npy'))
        else:
            self.idxs = np.array(range(len(self._list_data_files())))

        #self.range_set = set(list(range(196608)))
        self.dict_ = {}

    def _list_data_files(self):
        file_list = []
        for root, dirs, files in os.walk(self.dataset_dir):
            for file_name in files:
                if file_name.endswith('.npy'):
                    file_path = os.path.join(root, file_name)
                    file_list.append(file_path)
        return file_list

    def _load_data_file(self, idx):
        if self.mode in ['train', 'validate']: 
            point_data = np.load(os.path.join(self.dataset_dir, 'pointcloud_data/xyz_{:05d}.npy'.format(idx)))
            point_dict = np.load(os.path.join(self.dataset_dir, 'image_{:05d}.npz'.format(idx)),allow_pickle=True)['arr_0'].item()
            if self.use_normal:
                return point_data, point_dict
            else:
                return point_data[:, :3], point_dict
        elif self.mode == 'test':
            point_data = np.load(os.path.join(self.dataset_dir, 'pointcloud_data/xyz_{:05d}.npy'.format(idx)))
            return point_data
        else:
            assert False, 'No mode {}'.format(self.mode)

                #np.load(os.path.join('/data2/weiwei/two_stage_grasp/mask_label/', 'image_{:05d}.npz'.format(idx)), allow_pickle=True)['arr_0'].item()

    def __getitem__(self, idx):
#        start = time.time()
        #if self.train:
        #    dataset_idx = self.train_idxs[idx]
        #else:
        dataset_idx = self.idxs[idx]

        if self.platform == 'remote':
            if self.mode in ['train', 'validate']:
                if dataset_idx not in self.dict_.keys():
                    point_data, label_dict_ = self._load_data_file(dataset_idx)
                    #self.dict_[dataset_idx] = (point_data, label_dict_)
                else:
                    point_data, label_dict_ = self.dict_(dataset_idx)
            else:
                point_data = self._load_data_file(dataset_idx)


        elif self.platform == 'local':
            if self.mode in ['train', 'validate']:
                point_data, label_dict_ = self._load_data_file(dataset_idx)
            else:
                point_data = self._load_data_file(dataset_idx)
        else:
            raise Exception('do not support platform {}'.format(self.platform))
#        #if not os.path.exists('../mask_label_test/pointcloud_data/xyz_{:05d}.npy'.format(dataset_idx)) 
#        mask_1 = point_data[:, 2] > 0.01
#        #print(mask_1)
#        index_array = np.where(mask_1)[0]
#        #print(index_array)
#        point_data_new = point_data[mask_1]
#        #print(point_data.shape)
#        #print(point_data_new.shape)
#        #point_cloud = trimesh.PointCloud(point_data_new, colors=[0, 255, 0])
#        index_list = index_array.tolist()
#        #point_cloud.show()
#        #for key, value in label_dict_.items():
#        #    new_key = index_list.index(key)
#        #    print(key, new_key)
#        #    exit()
#        label_dict_new = dict((index_list.index(key), value) for key, value in label_dict_.items())
#        np.save('../mask_label_test/pointcloud_data/xyz_{:05d}.npy'.format(dataset_idx), point_data_new)
#        np.savez('../mask_label_test/image_{:05d}.npz'.format(dataset_idx), label_dict_new)
#        print('save finished')
#        #point_cloud = trimesh.PointCloud(point_data, colors=[0, 255, 0])
#        #point_cloud.show()

        cls = []
        score = []
        angle_transform = []
        matrix_transform = []
        if self.aug_scene:
            rotation_angle = np.random.uniform() * 2 * np.pi
            rotation_matrix = d_utils.angle_axis(rotation_angle, np.array([0.0, 0.0, 1.0]))
        else:
            rotation_matrix = None
        
        if self.mode == 'train': 
            if self.non_uniform_sampling:
                mask_1 = np.logical_and(abs(point_data[:, 1])< 0.2725,abs(point_data[:,0]) < 0.1825)
                mask_2 = point_data[:, 2]>0.031
                mask = mask_1 & mask_2
                mask_ = ~mask
                mask_f = np.where(mask)
                mask_b = np.where(mask_)
                pro_f = (0.4/len(mask_f[0])) * mask
                pro_b = (0.6/len(mask_b[0])) * mask_
                pro = pro_f + pro_b
                #np.random.seed(1) # for debug use
                choices = np.random.choice(point_data.shape[0], self.num_points,replace=False, p=pro)
            else:
                choices = np.random.choice(point_data.shape[0], self.num_points, replace=False)

            ##choices = np.sort(choices)
            point_data_choice = point_data[choices, :]
            mean_point = point_data_choice[:, :3].mean(axis=0)
            new_point_data = np.hstack((point_data_choice[:, :3] - mean_point, point_data_choice[:, 3:]))
            if rotation_matrix is not None:
                new_point_data = np.matmul(new_point_data, rotation_matrix.T)
            for i in choices:
                if i in label_dict_.keys() and label_dict_[i][0] == 1:
                    transform = label_dict_[i][2][:3].copy()
                    #transform[:, 3] -= mean_point.T
                    #transfrom[:, 3] = np.dot(transfrom[:, 3], rotation_matrix.T)
                    # virtual contact point
                    #contact_points = [transform[:, 3] + transform[:, 1] * label_dict_[i][4] / 2, transform[:, 3] - transform[:, 1] * label_dict_[i][4] / 2]
                    #distance = np.linalg.norm(contact_points-(point_data[i]-mean_point), axis=1)
                    #index = np.argmin(distance)
                    #virtual_contact_shift.append(contact_points[index]- (point_data[i]-mean_point))
                    #transform[:, 3] -= point_data[i]
                    #print((transform[:, 3] - point_data[i]).shape)
                   # exit()
                    if rotation_matrix is not None: 
                        transform[:, 3] = np.dot(transform[:, 3] - point_data[i], rotation_matrix.T)
                        transform[:, :3] = np.dot(rotation_matrix, transform[:, :3])
                    else:
                        transform[:, 3] -= point_data[i]
                    angle_ = math.atan2(transform[1, 1], transform[0, 1])
                    if angle_ < 0:
                        angle_ += math.pi
                    #if anlge_ > np.pi /2:
                    #    angle_ -= np.pi
                    #elif angle_ < -np.pi/2:
                    #    angle_ += np.pi
                    grasp_angle = angle_ / math.pi * 180
                    azimuth_ = math.atan2(transform[1, 0], transform[0, 0]) 
                    if azimuth_ < 0:
                        azimuth = 2 * math.pi+ azimuth_
                    else:
                        azimuth = azimuth_

                    azimuth_angle = azimuth / math.pi * 180
                    elevation_ = math.atan2(-transform[2, 0], math.sqrt(transform[0, 0]**2 + transform[1, 0]**2))
                    elevation_angle = elevation_ / math.pi * 180
                    angle_transform.append(np.hstack((transform[:, 3].T, azimuth_angle, elevation_angle-15.0, label_dict_[i][3], label_dict_[i][4], grasp_angle)))
                    if elevation_angle < 0:
                        exit()
                    score.append(label_dict_[i][1])
                    matrix_transform.append(transform[:3, :3])
                    #width.append(label_dict_[i][3])
                    cls.append(int(1))
                else:
                    angle_transform.append(np.zeros(8, dtype=np.float32))
                    score.append(0.)
                    matrix_transform.append(np.identity(3, dtype=np.float32))
                    #width.append(label_dict_[i][3])
                    cls.append(int(0))

            #p_list = list(label_dict_.keys())
            #p_point = point_data[p_list]
            #n_set = self.range_set.difference(p_list)
            ##n_list = [x for x in self.range_list if x not in p_list]
            #n_list = list(n_set)
            #n_point = point_data[n_list]

            #for i in label_dict_.keys():
            #    angle_transform.append(label_dict_[i][1][:3, [0, 1, 3]])
            #    score.append(label_dict_[i][0])
            #    cls.append(int(1))
            #for i in choices:
            #    angle_transform.append(np.zeros((3, 3), dtype=np.float64))
            #    score.append(0.)
            #    cls.append(int(0))

            #seq = list(range(20000))
            #randnum = random.randint(0, 100)
            #random.seed(randnum)
            #random.shuffle(seq)
            #random.seed(randnum)
            #random.shuffle(angle_transform)
            #random.seed(randnum)
            #random.shuffle(score)
            #random.seed(randnum)
            #random.shuffle(cls)
            #point_data = point_data[seq]
        elif self.mode == 'validate':
            if self.non_uniform_sampling:
                mask_1 = np.logical_and(abs(point_data[:, 1])< 0.2725,abs(point_data[:,0]) < 0.1825)
                mask_2 = point_data[:, 2]>0.031
                mask = mask_1 & mask_2
                mask_ = ~mask
                mask_f = np.where(mask)
                mask_b = np.where(mask_)
                pro_f = (0.4/len(mask_f[0])) * mask
                pro_b = (0.6/len(mask_b[0])) * mask_
                pro = pro_f + pro_b
                #np.random.seed(1) # for debug use
                choices = np.random.choice(point_data.shape[0], self.num_points,replace=False, p=pro)
            else:
                choices = np.random.choice(point_data.shape[0], self.num_points, replace=False)
            #forground_mask = np.where(np.logical_and(np.logical_and(abs(point_data[:,1])< 0.2725,abs(point_data[:, 0]) < 0.1825), point_data[:, 2] > 0.031))
            #background_mask = np.setdiff1d()

            #choices = np.sort(choices)
            point_data_choice = point_data[choices, :]
            mean_point = point_data_choice[:, :3].mean(axis=0)
            new_point_data = np.hstack((point_data_choice[:, :3] - mean_point, point_data_choice[:, 3:]))
            #if rotation_matrix is not None:
            #    new_point_data = np.matmul(new_point_data, rotation_matrix.T)
            for i in choices:
            #for i in range(point_data.shape[0]):
                if i in label_dict_.keys() and label_dict_[i][0] == 1:
                    transform = label_dict_[i][2][:3].copy()
                    #if rotation_matrix is not None: 
                    #    transform[:, 3] = np.dot(transform[:, 3] - point_data[i], rotation_matrix.T)
                    #    transform[:, :3] = np.dot(rotation_matrix, transform[:, :3])
                    #else:
                    transform[:, 3] -= point_data[i]
                    #transform[:, 3] -= mean_point.T
                    #transform[:, 3] -= (point_data[i] - mean_point).T
                    angle_ = math.atan2(transform[1, 1], transform[0, 1])
                    if angle_ < 0:
                        angle_ += math.pi
                    grasp_angle = angle_ / math.pi * 180
                    azimuth_ = math.atan2(transform[1, 0], transform[0, 0]) 
                    if azimuth_ < 0:
                        azimuth = 2 * math.pi + azimuth_
                    else:
                        azimuth = azimuth_
                    azimuth_angle = azimuth / math.pi * 180
                    elevation_ = math.atan2(-transform[2, 0], math.sqrt(transform[0, 0]**2 + transform[1, 0]**2))
                    elevation_angle = elevation_ / math.pi * 180
                    angle_transform.append(np.hstack((transform[:, 3].T, azimuth_angle, elevation_angle-15.0, label_dict_[i][3], label_dict_[i][4], grasp_angle)))
                    if elevation_angle < 0:
                        exit()
                    score.append(label_dict_[i][1])
                    matrix_transform.append(transform[:3, :3])
                    #width.append(label_dict_[i][3])
                    cls.append(int(1))
                else:
                    angle_transform.append(np.zeros(8, dtype=np.float32))
                    score.append(0.)
                    matrix_transform.append(np.identity(3, dtype=np.float32))
                    #width.append(label_dict_[i][3])
                    cls.append(int(0))
        elif self.mode == 'test':
            if self.non_uniform_sampling:
                mask_1 = np.logical_and(abs(point_data[:, 1])< 0.30,abs(point_data[:,0]-0.475) < 0.175)
                mask_2 = point_data[:, 2]>0.005
                #mask_1 = np.logical_and(abs(point_data[:, 1])< 0.2725,abs(point_data[:,0]) < 0.1825)
                #mask_2 = point_data[:, 2]>0.031
                mask = mask_1 & mask_2
                mask_ = ~mask
                mask_f = np.where(mask)
                mask_b = np.where(mask_)
                pro_f = (0.4/len(mask_f[0])) * mask
                pro_b = (0.6/len(mask_b[0])) * mask_
                pro = pro_f + pro_b
                #np.random.seed(1) # for debug use
                choices = np.random.choice(point_data.shape[0], self.num_points,replace=False, p=pro)
            else:
                choices = np.random.choice(point_data.shape[0], self.num_points, replace=False)
            #forground_mask = np.where(np.logical_and(np.logical_and(abs(point_data[:,1])< 0.2725,abs(point_data[:, 0]) < 0.1825), point_data[:, 2] > 0.031))
            #background_mask = np.setdiff1d()

            #choices = np.sort(choices)
            point_data_choice = point_data[choices, :]
            mean_point = point_data_choice[:, :3].mean(axis=0)
            new_point_data = np.hstack((point_data_choice[:, :3] - mean_point, point_data_choice[:, 3:]))
        else:
            assert False, 'NO mode {}'.format(self.mode)

        current_points = new_point_data.copy()
        #cls = np.asarray(cls, dtype=np.float32)
        cls = np.asarray(cls, dtype=np.int)
        score =  np.asarray(score, dtype=np.float32)
        angle_transform = np.asarray(angle_transform, dtype=np.float32)
        matrix_transform = np.asarray(matrix_transform, dtype=np.float32)
#        end = time.time() - start
        if self.transforms is not None:
            #if isinstance(self.transorm, 
            current_points = self.transforms(current_points)
#        print(end)
        
        return current_points, (cls, score, angle_transform, matrix_transform), dataset_idx
        #return point_data

    def __len__(self):
        return len(self.idxs)

    def set_num_points(self, pts):
        self.num_points = pts



if __name__ == "__main__":
    import trimesh
    import sys
    sys.path.append('../')
    from grasping.farthest_points_sampling import fps
    dataset = Bindataset(dataset_dir='/home/v-wewei/code/two_stage_pointnet/mask_label_train/',
        num_points=int(8192),
        transforms = transforms.Compose([d_utils.PointcloudToTensor(), d_utils.PointcloudJitter(std=0.0001, clip=0.0001),]),        
        mode='train', non_uniform_sampling=True)
    print(len(dataset))
    #dloader = torch.utils.data.DataLoader(dataset, batch_size=32, num_workers=12,  shuffle=False)
    dloader = torch.utils.data.DataLoader(dataset, batch_size=1, num_workers=0,  shuffle=False)
    for i, batch in enumerate(dloader, 0):
        #pass
        data, labels, dataset_idx = batch
        #data = data.cpu().numpy().squeeze()
        import time
        #from utils import pointnet2_utils
        data = data.cuda(non_blocking=True)
        cls_label = labels[0].cuda(non_blocking=True)
        pose_label = labels[2].cuda(non_blocking=True)
        torch.cuda.synchronize()
        time_start = time.time()
        #index_sample = fps(data, npoint=16384, mesh=None)
        #with torch.no_grad():

        #idx = pointnet2_utils.furthest_point_sample(data, int(8192))
        #data = pointnet2_utils.gather_operation(
        #    data.transpose(1, 2).contiguous(), idx).transpose(1, 2).contiguous()
        #cls_label =  pointnet2_utils.gather_operation(
        #    cls_label.float().unsqueeze(dim=2).transpose(1, 2).contiguous(), idx).squeeze().long().contiguous()
        #pose_label =  pointnet2_utils.gather_operation(
        #    pose_label.transpose(1, 2).contiguous(), idx).transpose(1, 2).contiguous()

        #idx = pointnet2_utils.furthest_point_sample(data, 16384).long()
        #fg_sum = fg_mask.long().sum().item()
        torch.cuda.synchronize()
        time_cost = time.time() - time_start
        print(time_cost)
        fg_mask = (cls_label.view(-1) > 0)
        #new_xyz = new_xyz.cpu().numpy().squeeze()
        data = data.cpu().numpy().squeeze()
        #pointcloud_sampled = trimesh.PointCloud(new_xyz, colors=[255, 0, 0])
        pointcloud_ori = trimesh.PointCloud(data, colors=[0, 255, 0])
        pointcloud_fg = trimesh.PointCloud(data[fg_mask.cpu()], colors=[255, 0, 0])
        scene = trimesh.Scene()
        scene.add_geometry(pointcloud_ori)
        scene.add_geometry(pointcloud_fg)
        scene.show()
