import json
import os
import numpy as np
import torch
import torch.utils.data as data
import time
from dataset import data_utils as d_utils
# from . import data_utils as d_utils
# import data_utils as d_utils
import torchvision.transforms as transforms
import random
import math
from copy import deepcopy
import trimesh
# # warnings.warn('WARNING', DeprecationWarning)
# import perception
from perception import DepthImage, CameraIntrinsics

BASE_DIR = os.path.dirname(os.path.abspath(__file__))





def generate_label( image_data, camera_intr_, camera_pose):

    ###--------------------------------point_noise_data------------------------#######
    noise = np.random.normal(0, 0.00025, image_data.shape)
    # -0.001 +0.001
    noise_limit = np.clip(noise, -0.00025, 0.00025)

    image_data_noise = image_data + noise_limit
    depth_image_noise = DepthImage(image_data_noise, frame='camera')
    camera_intr = CameraIntrinsics(frame='camera', fx=camera_intr_[0], fy=camera_intr_[1], cx=camera_intr_[2],
                                   cy=camera_intr_[3], skew=camera_intr_[4], height=camera_intr_[5],
                                   width=camera_intr_[6])
    #point_normal_cloud_noise = depth_image_noise.point_normal_cloud(camera_intr)
    #point_data_noise = point_normal_cloud_noise.points.data
    point_data_noise = camera_intr.deproject_to_image(depth_image_noise).to_point_cloud().data

    T_matrix = trimesh.transformations.translation_matrix(camera_pose[:3])
    rotation_matrix = trimesh.transformations.quaternion_matrix(camera_pose[3:])
    matrix = trimesh.transformations.concatenate_matrices(T_matrix, rotation_matrix)
    matrix = np.asanyarray(matrix, order='C', dtype=np.float64)
    point_data_noise = trimesh.transformations.transform_points(point_data_noise.transpose(1, 0), matrix)
    point_data_noise = point_data_noise[point_data_noise[:, 2] > 0.01]

    return point_data_noise


class SuctionBindataset(data.Dataset):
    def __init__(self, dataset_dir=None, num_points=20000, transforms=None, mode='train', use_normal=False,
                 platform='remote', non_uniform_sampling=True, aug_scene=True):
        super().__init__()
        assert dataset_dir is not None
        self.platform = platform
        self.use_normal = use_normal
        self.dataset_dir = dataset_dir
        self.transforms = transforms
        self.non_uniform_sampling = non_uniform_sampling
        self.aug_scene = aug_scene
        # all_files = self._list_data_files(dataset_dir)

        self.mode, self.num_points = mode, num_points
        assert self.mode in ['train', 'validate', 'test']
        print('dataset mode {}'.format(self.mode))
        if self.mode == 'train':
            self.idxs = np.load(os.path.join(self.dataset_dir, 'train_indices.npy'))
        elif self.mode == 'validate':
            self.idxs = np.load(os.path.join(self.dataset_dir, 'test_indices.npy'))
        else:
            self.idxs = np.array(range(len(self._list_data_files())))

        # self.range_set = set(list(range(196608)))
        self.dict_ = {}

    def _list_data_files(self):
        file_list = []
        for root, dirs, files in os.walk(self.dataset_dir):
            for file_name in files:
                if file_name.endswith('.ply'):
                    file_path = os.path.join(root, file_name)
                    file_list.append(file_path)
        return file_list

    def _load_data_file(self, idx):
        if self.mode in ['train', 'validate']:
            # point_data = np.load(os.path.join(self.dataset_dir, 'pointcloud_noise_data/xyz_{:05d}.npy'.format(idx)))
            point_data = self.perturb_depth(idx)
            point_dict = np.load(os.path.join(self.dataset_dir, 'image_{:05d}.npz'.format(idx)), allow_pickle=True)[
                'arr_0'].item()
            suction_pose_label = np.load(os.path.join(self.dataset_dir, 'pointcloud_data_normal/xyz_normal_{:05d}.npy'.format(idx)))
            if self.use_normal:
                assert 1 == 0
                return point_data, point_dict,suction_pose_label
            else:
                return point_data[:, :3], point_dict,suction_pose_label
        elif self.mode == 'test':
            # point_data = np.load(os.path.join(self.dataset_dir, 'pointcloud_data/xyz_{:05d}.npy'.format(idx)))
            pc = trimesh.load("/media/yumi/Program/weiwei/pointcloud_data/0915/scene.ply")
            point_data = np.asarray(pc.vertices)
            return point_data
        else:
            assert False, 'No mode {}'.format(self.mode)

            # np.load(os.path.join('/data2/weiwei/two_stage_grasp/mask_label/', 'image_{:05d}.npz'.format(idx)), allow_pickle=True)['arr_0'].item()

    def perturb_depth(self, idx):
        time_start = time.time()
        dst_path = self.dataset_dir

        time_load_start = time.time()
        image_data = np.load(os.path.join(dst_path, 'depth/depth_{:05d}.npy'.format(idx)))
        camera_intr_ = np.load(os.path.join(dst_path, 'camera_intr/camera_intr_{:05d}.npy'.format(idx)))
        camera_pose = np.load(os.path.join(dst_path, 'camera_pose/camera_pose_{:05d}.npy'.format(idx)))
        time_load_cost = time.time() - time_load_start

        # time1 = time.time()
        point_data = generate_label( image_data, camera_intr_, camera_pose)
        # time_cost = time.time() -time1
        time_finished = time.time()-time_start
        # print('tiem load_cost is : ', time_load_cost)

        # print('use', time_finished)
        return point_data

    def __getitem__(self, idx):
        # time_start = time.time()
        # if self.train:
        #    dataset_idx = self.train_idxs[idx]
        # else:
        # self.perturb_depth()

        dataset_idx = self.idxs[idx]

        if self.platform == 'remote':
            if self.mode in ['train', 'validate']:
                if dataset_idx not in self.dict_.keys():
                    point_data, label_dict_, suction_pose_label = self._load_data_file(dataset_idx)
                    # self.dict_[dataset_idx] = (point_data, label_dict_)
                else:
                    point_data, label_dict_ = self.dict_(dataset_idx)
            else:
                point_data = self._load_data_file(dataset_idx)


        elif self.platform == 'local':
            if self.mode in ['train', 'validate']:
                point_data, label_dict_ = self._load_data_file(dataset_idx)
            else:
                point_data = self._load_data_file(dataset_idx)
        else:
            raise Exception('do not support platform {}'.format(self.platform))
        # print('time cost is : ', time.time() - time_start)
        # for visualization

        # pc_cloud = trimesh.PointCloud(point_data, colors=[0, 255, 0])
        # pc_cloud.show()
        # exit()
        #        #if not os.path.exists('../mask_label_test/pointcloud_data/xyz_{:05d}.npy'.format(dataset_idx))
        #        mask_1 = point_data[:, 2] > 0.01
        #        #print(mask_1)
        #        index_array = np.where(mask_1)[0]
        #        #print(index_array)
        #        point_data_new = point_data[mask_1]
        #        #print(point_data.shape)
        #        #print(point_data_new.shape)
        #        #point_cloud = trimesh.PointCloud(point_data_new, colors=[0, 255, 0])
        #        index_list = index_array.tolist()
        #        #point_cloud.show()
        #        #for key, value in label_dict_.items():
        #        #    new_key = index_list.index(key)
        #        #    print(key, new_key)
        #        #    exit()
        #        label_dict_new = dict((index_list.index(key), value) for key, value in label_dict_.items())
        #        np.save('../mask_label_test/pointcloud_data/xyz_{:05d}.npy'.format(dataset_idx), point_data_new)
        #        np.savez('../mask_label_test/image_{:05d}.npz'.format(dataset_idx), label_dict_new)
        #        print('save finished')
        #        #point_cloud = trimesh.PointCloud(point_data, colors=[0, 255, 0])
        #        #point_cloud.show()

        cls = []
        score = []
        normals_label = []
        objectness_label = []
        angle_transform = []
        matrix_transform = []
        if self.aug_scene:
            rotation_angle = np.random.uniform() * 2 * np.pi
            rotation_matrix = d_utils.angle_axis(rotation_angle, np.array([0.0, 0.0, 1.0]))
        else:
            rotation_matrix = None

        if self.mode == 'train':
            if self.non_uniform_sampling:
                mask_1 = np.logical_and(abs(point_data[:, 1]) < 0.160, abs(point_data[:, 0]) < 0.22)
                mask_2 = point_data[:, 2] > 0.0255
                mask = mask_1 & mask_2
                mask_3 = point_data[:, 2] < 0.0255
                # print(mask)
                # mask_ = ~mask
                mask_f = np.where(mask)
                mask_b = np.where(mask_3)
                # print(len(mask_f[0]))
                # print(len(mask_b[0]))
                # pc_cloud = trimesh.PointCloud(point_data[mask_f[0]], colors=[0, 255, 0])
                # pc_cloud.show()
                # exit()
                alpha = 0.4 * random.random()
                beta = 0.4 - alpha
                pro_f = ((0.35 + alpha) / len(mask_f[0])) * mask
                pro_b = ((0.25 + beta) / len(mask_b[0])) * mask_3
                pro = pro_f + pro_b
                # np.random.seed(1) # for debug use
                choices = np.random.choice(point_data.shape[0], self.num_points, replace=False, p=pro)
            else:
                choices = np.random.choice(point_data.shape[0], self.num_points, replace=False)

            ##choices = np.sort(choices)
            point_data_choice = point_data[choices, :]
            mean_point = point_data_choice[:, :3].mean(axis=0)
            new_point_data = np.hstack((point_data_choice[:, :3] - mean_point, point_data_choice[:, 3:]))
            if rotation_matrix is not None:
                new_point_data = np.matmul(new_point_data, rotation_matrix.T)
            for i in choices:
                if i in label_dict_.keys() and label_dict_[i][0] == 1:
                    # transform = label_dict_[i][2][:3].copy()
                    # transform[:, 3] -= mean_point.T
                    # transfrom[:, 3] = np.dot(transfrom[:, 3], rotation_matrix.T)
                    # virtual contact point
                    # contact_points = [transform[:, 3] + transform[:, 1] * label_dict_[i][4] / 2, transform[:, 3] - transform[:, 1] * label_dict_[i][4] / 2]
                    # distance = np.linalg.norm(contact_points-(point_data[i]-mean_point), axis=1)
                    # index = np.argmin(distance)
                    # virtual_contact_shift.append(contact_points[index]- (point_data[i]-mean_point))
                    # transform[:, 3] -= point_data[i]
                    # print((transform[:, 3] - point_data[i]).shape)
                    # exit()
                    # if rotation_matrix is not None:
                    #     transform[:, 3] = np.dot(transform[:, 3] - point_data[i], rotation_matrix.T)
                    #     transform[:, :3] = np.dot(rotation_matrix, transform[:, :3])
                    # else:
                    #     transform[:, 3] -= point_data[i]
                    # angle_ = math.atan2(transform[1, 1], transform[0, 1])
                    # if angle_ < 0:
                    #     angle_ += math.pi
                    # # if anlge_ > np.pi /2:
                    # #    angle_ -= np.pi
                    # # elif angle_ < -np.pi/2:
                    # #    angle_ += np.pi
                    # grasp_angle = angle_ / math.pi * 180
                    # azimuth_ = math.atan2(transform[1, 0], transform[0, 0])
                    # if azimuth_ < 0:
                    #     azimuth = 2 * math.pi + azimuth_
                    # else:
                    #     azimuth = azimuth_
                    #
                    # azimuth_angle = azimuth / math.pi * 180
                    # elevation_ = math.atan2(-transform[2, 0], math.sqrt(transform[0, 0] ** 2 + transform[1, 0] ** 2))
                    # elevation_angle = elevation_ / math.pi * 180
                    # angle_transform.append(np.hstack((transform[:, 3].T, azimuth_angle, elevation_angle - 15.0,
                    #                                   label_dict_[i][3], label_dict_[i][4], grasp_angle)))
                    # if elevation_angle < 0:
                    #     exit()
                    score.append(label_dict_[i][1])
                    # matrix_transform.append(transform[:3, :3])
                    # width.append(label_dict_[i][3])
                    cls.append(int(1))
                    normals_label.append(np.matmul(suction_pose_label[i], rotation_matrix.T))
                    objectness_label.append(label_dict_[i][-1])
                else:
                    # angle_transform.append(np.zeros(8, dtype=np.float32))
                    score.append(0.)
                    # matrix_transform.append(np.identity(3, dtype=np.float32))
                    # width.append(label_dict_[i][3])
                    cls.append(int(0))
                    normals_label.append(np.matmul(suction_pose_label[i], rotation_matrix.T))
                    objectness_label.append(1)

            ####---------------------------vis_label-------------------------############
            # scene = trimesh.Scene()
            # pc = trimesh.PointCloud(new_point_data,colors=[0,255,0])
            # ray_origins = new_point_data#[0:16300:10]
            # normals_label = np.asarray(normals_label)
            # ray_directions = normals_label#[0:16300:10]
            # vis_path = np.hstack((ray_origins, ray_origins+ray_directions/100)).reshape(-1, 2, 3)
            # ray_visualize = trimesh.load_path(vis_path)
            # print('show normal')
            # scene.add_geometry(ray_visualize)
            # scene.add_geometry(pc)
            # scene.show()
            # print('debug')
            # p_list = list(label_dict_.keys())
            # p_point = point_data[p_list]
            # n_set = self.range_set.difference(p_list)
            ##n_list = [x for x in self.range_list if x not in p_list]
            # n_list = list(n_set)
            # n_point = point_data[n_list]

            # for i in label_dict_.keys():
            #    angle_transform.append(label_dict_[i][1][:3, [0, 1, 3]])
            #    score.append(label_dict_[i][0])
            #    cls.append(int(1))
            # for i in choices:
            #    angle_transform.append(np.zeros((3, 3), dtype=np.float64))
            #    score.append(0.)
            #    cls.append(int(0))

            # seq = list(range(20000))
            # randnum = random.randint(0, 100)
            # random.seed(randnum)
            # random.shuffle(seq)
            # random.seed(randnum)
            # random.shuffle(angle_transform)
            # random.seed(randnum)
            # random.shuffle(score)
            # random.seed(randnum)
            # random.shuffle(cls)
            # point_data = point_data[seq]
        elif self.mode == 'validate':
            if self.non_uniform_sampling:
                mask_1 = np.logical_and(abs(point_data[:, 1]) < 0.16, abs(point_data[:, 0]) < 0.22)
                mask_2 = point_data[:, 2] > 0.0255
                mask = mask_1 & mask_2
                mask_3 = point_data[:, 2] < 0.0255
                # mask_ = ~mask
                mask_f = np.where(mask)
                mask_b = np.where(mask_3)
                pro_f = (0.6 / len(mask_f[0])) * mask
                pro_b = (0.4 / len(mask_b[0])) * mask_3
                pro = pro_f + pro_b
                # np.random.seed(1) # for debug use
                choices = np.random.choice(point_data.shape[0], self.num_points, replace=False, p=pro)
            else:
                choices = np.random.choice(point_data.shape[0], self.num_points, replace=False)
            # forground_mask = np.where(np.logical_and(np.logical_and(abs(point_data[:,1])< 0.2725,abs(point_data[:, 0]) < 0.1825), point_data[:, 2] > 0.031))
            # background_mask = np.setdiff1d()

            # choices = np.sort(choices)
            point_data_choice = point_data[choices, :]
            mean_point = point_data_choice[:, :3].mean(axis=0)
            # print("mean_point:", mean_point)
            new_point_data = np.hstack((point_data_choice[:, :3] - mean_point, point_data_choice[:, 3:]))
            # if rotation_matrix is not None:
            #    new_point_data = np.matmul(new_point_data, rotation_matrix.T)
            for i in choices:
                # for i in range(point_data.shape[0]):
                if i in label_dict_.keys() and label_dict_[i][0] == 1:
                    # transform = label_dict_[i][2][:3].copy()
                    # # if rotation_matrix is not None:
                    # #    transform[:, 3] = np.dot(transform[:, 3] - point_data[i], rotation_matrix.T)
                    # #    transform[:, :3] = np.dot(rotation_matrix, transform[:, :3])
                    # # else:
                    # transform[:, 3] -= point_data[i]
                    # # transform[:, 3] -= mean_point.T
                    # # transform[:, 3] -= (point_data[i] - mean_point).T
                    # angle_ = math.atan2(transform[1, 1], transform[0, 1])
                    # if angle_ < 0:
                    #     angle_ += math.pi
                    # grasp_angle = angle_ / math.pi * 180
                    # azimuth_ = math.atan2(transform[1, 0], transform[0, 0])
                    # if azimuth_ < 0:
                    #     azimuth = 2 * math.pi + azimuth_
                    # else:
                    #     azimuth = azimuth_
                    # azimuth_angle = azimuth / math.pi * 180
                    # elevation_ = math.atan2(-transform[2, 0], math.sqrt(transform[0, 0] ** 2 + transform[1, 0] ** 2))
                    # elevation_angle = elevation_ / math.pi * 180
                    # angle_transform.append(np.hstack((transform[:, 3].T, azimuth_angle, elevation_angle - 15.0,
                    #                                   label_dict_[i][3], label_dict_[i][4], grasp_angle)))
                    # if elevation_angle < 0:
                    #     exit()
                    score.append(label_dict_[i][1])
                    # matrix_transform.append(transform[:3, :3])
                    # width.append(label_dict_[i][3])
                    cls.append(int(1))
                    normals_label.append(suction_pose_label[i])
                    objectness_label.append(label_dict_[i][-1])

                else:
                    # angle_transform.append(np.zeros(8, dtype=np.float32))
                    score.append(0.)
                    # matrix_transform.append(np.identity(3, dtype=np.float32))
                    # width.append(label_dict_[i][3])
                    cls.append(int(0))
                    normals_label.append(suction_pose_label[i])
                    objectness_label.append(1)
        elif self.mode == 'test':
            if self.non_uniform_sampling:
                mask_1 = np.logical_and(abs(point_data[:, 1] - 0.0025) < 0.2475, abs(point_data[:, 0] - 0.444) < 0.20)
                mask_2 = point_data[:, 2] > 0.06225
                # mask_1 = np.logical_and(abs(point_data[:, 1])< 0.2725,abs(point_data[:,0]) < 0.1825)
                # mask_2 = point_data[:, 2]>0.031
                mask = mask_1 & mask_2
                mask_3 = point_data[:,2] < 0.06225
                mask_f = np.where(mask)
                mask_b = np.where(mask_3)
                pro_f = (0.8 / len(mask_f[0])) * mask
                pro_b = (0.2 / len(mask_b[0])) * mask_3
                pro = pro_f + pro_b
                # np.random.seed(1) # for debug use
                choices = np.random.choice(point_data.shape[0], self.num_points, replace=False, p=pro)
                # pc = trimesh.PointCloud(point_data[mask_f],colors=[0,255,0])
                # pc.show()
                # exit()
            else:
                choices = np.random.choice(point_data.shape[0], self.num_points, replace=False)
            # forground_mask = np.where(np.logical_and(np.logical_and(abs(point_data[:,1])< 0.2725,abs(point_data[:, 0]) < 0.1825), point_data[:, 2] > 0.031))
            # background_mask = np.setdiff1d()

            # choices = np.sort(choices)
            point_data_choice = point_data[choices, :]
            mean_point = point_data_choice[:, :3].mean(axis=0)
            new_point_data = np.hstack((point_data_choice[:, :3] - mean_point, point_data_choice[:, 3:]))
        else:
            assert False, 'NO mode {}'.format(self.mode)

        current_points = new_point_data.copy()
        # cls = np.asarray(cls, dtype=np.float32)
        cls = np.asarray(cls, dtype=np.int)
        score = np.asarray(score, dtype=np.float32)
        normals_label = np.asarray(normals_label,dtype=np.float32)
        objectness_label = np.asarray(objectness_label,dtype=np.int)

        angle_transform = np.asarray(angle_transform, dtype=np.float32)
        matrix_transform = np.asarray(matrix_transform, dtype=np.float32)
        #        end = time.time() - start
        if self.transforms is not None:
            # if isinstance(self.transorm,
            current_points = self.transforms(current_points)
        #        print(end)

        # return current_points, (cls, score, angle_transform, matrix_transform), dataset_idx
        # print('time cost is : ', time.time() - time_start)
        return current_points, (cls, score, normals_label, objectness_label), dataset_idx,mean_point

    def __len__(self):
        return len(self.idxs)

    def set_num_points(self, pts):
        self.num_points = pts


if __name__ == "__main__":
    import trimesh
    import sys

    sys.path.append('../')
    from grasping.farthest_points_sampling import fps

    # lt = ['a', 'b', 'c']
    # for i, item in enumerate(tqdm(lt)):
    #     print(i, item)
    # exit()
    dataset = SuctionBindataset(dataset_dir='/home/v-wewei/code/two_stage_pointnet/mask_label/',
                         num_points=int(8192*2),
                         transforms=transforms.Compose(
                             [d_utils.PointcloudToTensor(),
                              # d_utils.PointcloudJitter(std=0.0001, clip=0.0001),
                              ]),
                         mode='train', non_uniform_sampling=True)
    print(len(dataset))
    # dloader = torch.utils.data.DataLoader(dataset, batch_size=32, num_workers=12,  shuffle=False)
    dloader = torch.utils.data.DataLoader(dataset, batch_size=1, num_workers=0, shuffle=False)
    for i, batch in enumerate(dloader, 0):
        # for i, batch in enumerate(tqdm(dloader)):
        # pass
        data, labels, dataset_idx = batch
        # data = data.cpu().numpy().squeeze()
        import time

        # from utils import pointnet2_utils
        data = data.cuda(non_blocking=True)
        cls_label = labels[0].cuda(non_blocking=True)
        pose_label = labels[2].cuda(non_blocking=True)
        objectness_label = labels[3].cuda(non_blocking=True)
        torch.cuda.synchronize()
        time_start = time.time()
        # index_sample = fps(data, npoint=16384, mesh=None)
        # with torch.no_grad():

        # idx = pointnet2_utils.furthest_point_sample(data, int(8192))
        # data = pointnet2_utils.gather_operation(
        #    data.transpose(1, 2).contiguous(), idx).transpose(1, 2).contiguous()
        # cls_label =  pointnet2_utils.gather_operation(
        #    cls_label.float().unsqueeze(dim=2).transpose(1, 2).contiguous(), idx).squeeze().long().contiguous()
        # pose_label =  pointnet2_utils.gather_operation(
        #    pose_label.transpose(1, 2).contiguous(), idx).transpose(1, 2).contiguous()

        # idx = pointnet2_utils.furthest_point_sample(data, 16384).long()
        # fg_sum = fg_mask.long().sum().item()
        torch.cuda.synchronize()
        time_cost = time.time() - time_start
        print(time_cost)
        fg_mask = (cls_label.view(-1) > 0)
        # new_xyz = new_xyz.cpu().numpy().squeeze()
        data = data.cpu().numpy().squeeze()
        # pointcloud_sampled = trimesh.PointCloud(new_xyz, colors=[255, 0, 0])
        pointcloud_ori = trimesh.PointCloud(data, colors=[0, 255, 0])
        pointcloud_fg = trimesh.PointCloud(data[fg_mask.cpu()], colors=[255, 0, 0])
        scene = trimesh.Scene()
        scene.add_geometry(pointcloud_ori)
        scene.add_geometry(pointcloud_fg)
        scene.show()
