import pickle
import multiprocessing as mp
import trimesh
from autolab_core import RigidTransform
from copy import deepcopy
import numpy as np 
import json
import os
from grasping.grasp_sampler import GraspSampler
from grasping.grasp import ParallelJawPtGrasp3D 


def prune_grasp_method(dataset_dict, R_l, R_r, key):
        path = dataset_dict[key]['path']
        mesh = trimesh.load_mesh(path, use_embree=False)
        trimesh.smoothing.filter_humphrey(mesh)
        files = os.listdir('/home/v-wewei/code/two_stage_pointnet/generated_grasp_2000/{}'.format(key))
        proximity = trimesh.proximity.ProximityQuery(mesh)

        for file_name in files:
#            if os.path.splitext(file_name)[0].startswith('prune'):
#                os.remove(os.path.join('/home/v-wewei/code/two_stage_pointnet/generated_grasp_5000/{}/{}'.format(key, file_name)))
#                print('remove', file_name)
#                continue
            

            if os.path.splitext(file_name)[0].startswith('pruned'):
#            if os.path.splitext(file_name)[1] == ".pickle":
                with open('/home/v-wewei/code/two_stage_pointnet/generated_grasp_2000/{}/{}'.format(key, file_name), 'rb') as f:
                    grasps = pickle.load(f)
                    print(key, 'grasps length is : ', len(grasps))
                    selected_grasps = []
                    scene = trimesh.Scene([mesh])
                    prune_grasps = grasps
                    m = trimesh.collision.CollisionManager()
                    m.add_object('{}'.format(key), mesh)
                    for grasp in prune_grasps:
                        dist = -0.05
                        angle_dist = 60
                        angle_candidates = np.arange(grasp.approach_angle-40, grasp.approach_angle+41, 10)
                        use_approach_vector = True
                        grasp_angle_ = None
                        grasp_angle_dist = None
                        grasp_angle_ori = grasp.approach_angle
                        for grasp_angle in angle_candidates:
                            grasp.approach_angle = grasp_angle
                            pose = RigidTransform(grasp.rotated_full_axis, grasp.center, from_frame='gripper', to_frame='obj')
                            ## simple collision detection (with 6 lines) ##
                            grasp_z_axis = grasp.rotated_full_axis[:, 2]
                            #grasp_approach_axis = grasp.rotated_full_axis[:, 0]
                            virtual_contact_0 = grasp.virtual_contactpoints[0] - grasp.axis_ * 0.0005
                            virtual_contact_1 = grasp.virtual_contactpoints[1] + grasp.axis_ * 0.0005
                            ray_origins = np.array([
                            virtual_contact_0, 
                            virtual_contact_0+grasp_z_axis*0.0090, 
                            virtual_contact_0-grasp_z_axis*0.0090, 
                            virtual_contact_0+grasp_z_axis*0.0060, 
                            virtual_contact_0-grasp_z_axis*0.0060, 
                            virtual_contact_0+grasp_z_axis*0.0030, 
                            virtual_contact_0-grasp_z_axis*0.0030, 
                            virtual_contact_1, 
                            virtual_contact_1+grasp_z_axis*0.0090, 
                            virtual_contact_1-grasp_z_axis*0.0090,
                            virtual_contact_1+grasp_z_axis*0.0060, 
                            virtual_contact_1-grasp_z_axis*0.0060,
                            virtual_contact_1+grasp_z_axis*0.0030, 
                            virtual_contact_1-grasp_z_axis*0.0030,
                            ])
                            ray_directions = np.array([-grasp.rotated_full_axis[:,0]] * 14)
                            locations, _, _ = mesh.ray.intersects_location(ray_origins, ray_directions, multiple_hits=True)
                            if len(locations) > 0:
                                #print('Collision Detection with finger')
                                continue
                            base_copy = deepcopy(base)
                            transform = pose.matrix
                            base_copy.apply_transform(transform)
                            is_collision = m.in_collision_single(base_copy, return_names=False, return_data=False)
                            if is_collision:
                                #print('Collsion Detection with base')
                                continue
                            signed_distance = proximity.signed_distance(((base_copy.vertices[4] + base_copy.vertices[7])/2).reshape(-1, 3))
                            if signed_distance[0] < dist:
                                grasp_angle_dist = grasp_angle
                                dist = signed_distance[0]
                                #print('-----', key, grasp_angle, dist, '-----')

                            ray_origins = [grasp.center]
                            ray_directions = [-grasp.rotated_full_axis[:, 0]]
                            if use_approach_vector:
                                c, _, index_tri = mesh.ray.intersects_location(ray_origins, ray_directions, multiple_hits=True)
                            if use_approach_vector and len(index_tri) > 0:
                                data_ = zip(c, index_tri)
                                data = sorted(data_, key=lambda item: ParallelJawPtGrasp3D.distance_L2(item[0], grasp.center, ray_directions), reverse=True)
                                #face_normal = mesh.face_normals[index_tri[0]]
                                point_index = mesh.faces[index_tri]
                                total = mesh.vertex_faces[point_index].flatten()
                                total = np.unique(total)
                                total =  np.setdiff1d(total, -1)
                                normal = mesh.face_normals[total]
                                face_area = mesh.area_faces[total]
                                mean_vec = [x * y for x, y in zip(normal, face_area)]
                                face_normal = np.array(mean_vec).sum(axis=0)/face_area.sum()
                                face_normal /= np.linalg.norm(face_normal)

                                value = face_normal.dot(ray_directions[0])
                                if value > 1:
                                    value = 1
                                if value < -1:
                                    value = -1
                                angle = np.arccos(value) / np.pi * 180
                                #print(key, grasp_angle, angle)
                                if angle < angle_dist:
                                    grasp_angle_ = grasp_angle
                                    angle_dist = angle
                                    #print('----', key, grasp_angle, angle_dist, '----')
                            else:
                                print('Using distance instead')
                                grasp_angle_ = None
                                use_approach_vector = False
                        if grasp_angle_ is not None:
                            grasp.approach_angle = grasp_angle_
                            selected_grasps.append(grasp)
                        elif grasp_angle_dist is not None:
                            grasp.approach_angle = grasp_angle_dist
                            selected_grasps.append(grasp)
                        else:
                            print('***** there is no suitable grasp configuration *****')
                            #grasp.approach_angle = grasp_angle_ori
                            #pose = RigidTransform(grasp.rotated_full_axis, grasp.center, from_frame='gripper', to_frame='obj')
                            #T_leftfinger = RigidTransform(grasp.rotated_full_axis, grasp.virtual_contactpoints[0],from_frame='left_finger', to_frame='obj')
                            #T_rightfinger = RigidTransform(grasp.rotated_full_axis, grasp.virtual_contactpoints[1],from_frame='right_finger', to_frame='obj')
                            #transform = pose.matrix

                            #scene_ = deepcopy(scene)
                            #base_copy = deepcopy(base)
                            #base_copy.apply_transform(transform)
                            #scene_.add_geometry(base_copy)
                            #finger_l_copy = deepcopy(finger_l)
                            #finger_r_copy = deepcopy(finger_r)
                            #finger_l_copy.apply_transform(T_leftfinger.matrix)
                            #finger_r_copy.apply_transform(T_rightfinge.matrix)
                            #scene_.add_geometry(finger_l_copy)
                            #scene_.add_geometry(finger_r_copy)
                            #grasp_z_axis = grasp.rotated_full_axis[:, 2]
                            ##grasp_approach_axis = grasp.rotated_full_axis[:, 0]
                            #virtual_contact_0 = grasp.virtual_contactpoints[0] - grasp.axis_ * 0.0003
                            #virtual_contact_1 = grasp.virtual_contactpoints[1] + grasp.axis_ * 0.0003
                            #ray_origins = np.array([
                            #virtual_contact_0, 
                            #virtual_contact_0+grasp_z_axis*0.008, 
                            #virtual_contact_0-grasp_z_axis*0.008, 
                            #virtual_contact_0+grasp_z_axis*0.004, 
                            #virtual_contact_0-grasp_z_axis*0.004, 
                            #virtual_contact_1, 
                            #virtual_contact_1+grasp_z_axis*0.008, 
                            #virtual_contact_1-grasp_z_axis*0.008,
                            #virtual_contact_1+grasp_z_axis*0.004, 
                            #virtual_contact_1-grasp_z_axis*0.004,
                            #])
                            #ray_directions = np.array([-grasp.rotated_full_axis[:,0]] * 10)
                            #ray_visualize = trimesh.load_path(np.hstack((ray_origins, ray_origins+ray_directions/20)).reshape(-1, 2, 3))
                            #ray_visualize.colors= np.array([
                            #[255, 0, 0,255],[255, 0, 0,255],
                            #[255, 0, 0,255],[255, 0, 0,255],
                            #[255, 0, 0,255],[255, 0, 0,255],
                            #[255, 0, 0,255],[255, 0, 0,255],
                            #[255, 0, 0,255],[255, 0, 0, 255]])
                            #scene_.add_geometry(ray_visualize)
                            #scene_.show()
                        if debug_vis and grasp_angle_ is not None:
                            grasp.approach_angle = grasp_angle_
                            pose = RigidTransform(grasp.rotated_full_axis, grasp.center, from_frame='gripper', to_frame='obj')
                            T_leftfinger = RigidTransform(grasp.rotated_full_axis, grasp.virtual_contactpoints[0],from_frame='left_finger', to_frame='obj')
                            T_rightfinger = RigidTransform(grasp.rotated_full_axis, grasp.virtual_contactpoints[1],from_frame='right_finger', to_frame='obj')
                            transform = pose.matrix

                            scene_ = deepcopy(scene)
                            base_copy = deepcopy(base)
                            base_copy.apply_transform(transform)
                            scene_.add_geometry(base_copy)
                            finger_l_copy = deepcopy(finger_l)
                            finger_r_copy = deepcopy(finger_r)
                            finger_l_copy.apply_transform(T_leftfinger.matrix)
                            finger_r_copy.apply_transform(T_rightfinger.matrix)
                            scene_.add_geometry(finger_l_copy)
                            scene_.add_geometry(finger_r_copy)
                            grasp_z_axis = grasp.rotated_full_axis[:, 2]
                            #grasp_approach_axis = grasp.rotated_full_axis[:, 0]
                            virtual_contact_0 = grasp.virtual_contactpoints[0] - grasp.axis_ * 0.0005
                            virtual_contact_1 = grasp.virtual_contactpoints[1] + grasp.axis_ * 0.0005
                            ray_origins = np.array([
                            virtual_contact_0, 
                            virtual_contact_0+grasp_z_axis*0.0090, 
                            virtual_contact_0-grasp_z_axis*0.0090, 
                            virtual_contact_0+grasp_z_axis*0.0060, 
                            virtual_contact_0-grasp_z_axis*0.0060, 
                            virtual_contact_0+grasp_z_axis*0.0030, 
                            virtual_contact_0-grasp_z_axis*0.0030, 
                            virtual_contact_1, 
                            virtual_contact_1+grasp_z_axis*0.0090, 
                            virtual_contact_1-grasp_z_axis*0.0090,
                            virtual_contact_1+grasp_z_axis*0.0060, 
                            virtual_contact_1-grasp_z_axis*0.0060,
                            virtual_contact_1+grasp_z_axis*0.0030, 
                            virtual_contact_1-grasp_z_axis*0.0030,
                            ])
                            ray_directions = np.array([-grasp.rotated_full_axis[:,0]] * 14)
                            ray_visualize = trimesh.load_path(np.hstack((ray_origins, ray_origins+ray_directions/20)).reshape(-1, 2, 3))
                            ray_visualize.colors= np.array([
                            [255, 0, 0,255],[255, 0, 0,255],
                            [255, 0, 0,255],[255, 0, 0,255],
                            [255, 0, 0,255],[255, 0, 0,255],
                            [255, 0, 0,255],[255, 0, 0,255],
                            [255, 0, 0,255],[255, 0, 0,255],
                            [255, 0, 0,255],[255, 0, 0,255],
                            [255, 0, 0,255],[255, 0, 0, 255]
                            ])
                            scene_.add_geometry(ray_visualize)
                            scene_.show()
                            del finger_l_copy
                            del finger_r_copy
                            del base_copy
                    print(key, 'Length of Selected grasps is : ', len(selected_grasps))

                    with open('/home/v-wewei/code/two_stage_pointnet/generated_grasp_2000/{}/approach_{}.pickle'.format(key, len(selected_grasps)), 'wb') as f_1:
                        pickle.dump(selected_grasps, f_1)                                     #
        print('finish')

if __name__ == "__main__":
    debug_vis = False
    use_pool = True
    debug_pool = False

    T = trimesh.transformations.translation_matrix([-0.002, -0.0065, -0.048])
    R_l = trimesh.transformations.euler_matrix(np.pi/2, np.pi/2, 0, 'rxyz')
    R_r = trimesh.transformations.euler_matrix(np.pi/2, np.pi/2, np.pi, 'rxyz')
    base_path = '/home/v-wewei/code/two_stage_pointnet/data/grippers/yumi/coarse/box_base.stl'
    base = trimesh.load_mesh(base_path)
    T_base = trimesh.transformations.translation_matrix([0, 0, -0.131])
    R_base = trimesh.transformations.euler_matrix(np.pi/2, np.pi/2, 0,'rxyz')
    base.apply_transform(T_base)
    base.apply_transform(R_base)
    finger_l = trimesh.load('/home/v-wewei/code/two_stage_pointnet/data/grippers/yumi/coarse/box_finger.stl')
    finger_r = deepcopy(finger_l)
    finger_l.apply_transform(T)
    finger_r.apply_transform(T)
    finger_l.apply_transform(R_l)
    finger_r.apply_transform(R_r)
    pool = mp.Pool(processes=int(mp.cpu_count()/2))
    with open('/home/v-wewei/finish_stl/dataset_dict.json', 'r') as f:
        dataset_dict = json.load(fp=f)
        for key in dataset_dict.keys():
            #if key.startswith('0') or key.startswith('3dnet'):
            #    continue
            if use_pool:
                if debug_pool:
                    pool.apply(prune_grasp_method, args=(dataset_dict, R_l, R_r, key,))
                else:
                    pool.apply_async(prune_grasp_method, args=(dataset_dict, R_l, R_r, key,))
            else:

                prune_grasp_method(dataset_dict, R_l, R_r, key)
        if use_pool:
            pool.close()
            pool.join()
