#! /usr/bin/env python
import sys, os, time, yaml

src_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../..')
sys.path.append(src_dir)
import rospy
import ros_np_multiarray as ros_nm
import numpy as np
from geometry_msgs.msg import PoseStamped
from visualization_msgs.msg import Marker
from std_srvs.srv import Empty

import moveit_commander
import tf2_ros
from tf2_geometry_msgs import do_transform_pose

############# perception module #################
sys.path.insert(0, '{}/perception/vision_mask/src/'.format(src_dir))
from mask_with_plane_detection_client import Mask_Client
sys.path.pop(0)
sys.path.insert(0, '{}/perception/graspnet/src/'.format(src_dir))
from graspnet_client import GraspNet_Client
sys.path.pop(0)
#################################################

############# planning module ###################
from planning.scripts.action_client import *
#################################################

from utilities.tools.vis_utils import get_posestamped_msg, get_marker_msg
from utilities.tools.tf_utils import *

def correct_grasp_pose(grasp_RT, table_normal):
    """
    Make sure that 
    #TODO: 1. one-finger side is closer to the obj center for mug and bowl.
    #DONE: 2. camera is up when graspping bottle.  
    """
    grasp_pose_y = grasp_RT[:3,1]
    flag = np.sum(grasp_pose_y * table_normal)
    if flag < 0:
        # rotate RT along z axis by 180 degree
        delta_RT = np.eye(4)
        delta_RT[:3, :3] = np.array([[-1, 0, 0], [0, -1, 0], [0, 0, 1]])
        grasp_RT = np.matmul(grasp_RT,delta_RT)
    return grasp_RT


class Start():
    def __init__(self, srv_name='graspnet'):
        start_ = rospy.Service('/{}'.format(srv_name), Empty, self.start)
        skip_ = rospy.Service('/skip_{}'.format(srv_name), Empty, self.skip)
        self.srv_name = srv_name
        self.is_start = False
        self.is_skip = False
        self.rate = rospy.Rate(0.5)
    
    def run(self):
        while not rospy.is_shutdown():
            rospy.loginfo('Wait for \'rosservice call /{}\' command...'.format(self.srv_name))
            if self.is_start:
                self.is_start = False
                rospy.loginfo('Start \'{}. Pay attention to the arm!'.format(self.srv_name))
                return True
            if self.is_skip:
                self.is_skip = False
                rospy.loginfo('Skip {}!'.format(self.srv_name))
                return False
            self.rate.sleep()

    def start(self, req):
        self.is_start = True
        return {}
        
    def skip(self, req):
        self.is_skip = True
        return {}

if __name__ == '__main__':

    vis = True
    grasp_flag = True
    robot_prefix = 'j2n6s300_'
    camera_frame = 'camera_color_optical_frame'
    src_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../..')
    rospy.init_node('yolo_grasp_node')
    tfBuffer = tf2_ros.Buffer()
    tf_listenser = tf2_ros.TransformListener(tfBuffer)
    arm_group = moveit_commander.MoveGroupCommander("arm")
    initial_joint_pose(robot_prefix)
    grasp_est_client = GraspNet_Client()
    detect_waiting = Start(srv_name='detect')
    grasp_waiting = Start(srv_name='grasp')

    with open(os.path.join(src_path, 'perception/vision_grasp_est/src/panda_grasp_data/yolograsp_grasp.yaml'), 'r') as stream:
        grasp_cfg = yaml.safe_load(stream)

    # vis topic
    grasp_pose_topic = rospy.Publisher('/graspnet/single_grasp_pose', PoseStamped, queue_size=1)
    gripper_topic = rospy.Publisher('/graspnet/single_grasp_marker', Marker, queue_size=1)

    while True:
        initial_joint_pose(robot_prefix)
        detect_waiting.run()

        start_time = time.time()
        perception_client = Mask_Client()
        perception_result = perception_client.client()
        if perception_result is not None:
            
            obj_cate = perception_result.category_name
            rospy.loginfo('Detect Obj Category:  {}'.format(obj_cate))
            rospy.loginfo('2D use time : {}'.format(time.time() - start_time))

            cam_pcs = ros_nm.to_numpy_f32(perception_result.cam_pcs)
            table_normal = ros_nm.to_numpy_f32(perception_result.table_normal)
            
            grasp_est_pose = grasp_est_client.get_grasp_pose(cam_pcs)
            rospy.loginfo('Total use time : {}'.format(time.time() - start_time))


            if grasp_est_pose is not None:
                grasp_est_pose = correct_grasp_pose(grasp_est_pose, table_normal)
                grasp_pose_CAM = grasp_est_pose

                pre_position = grasp_pose_CAM[:3,3] - 0.10 * grasp_pose_CAM[:3, 2]
                pre_position = pre_position.tolist()
                # depth_offset = grasp_cfg[obj_cate]['depth_offset'][grasp_cfg[obj_cate]['type']]
                # position = grasp_pose_CAM[:3,3] + depth_offset * grasp_pose_CAM[:3, 2]
                position = grasp_pose_CAM[:3,3]
                position = position.tolist()
                orientation = R2q(grasp_pose_CAM[:3,:3]).tolist()

                pre_grasp_pose_stamped = get_posestamped_msg(pre_position, orientation, camera_frame)
                grasp_pose_stamped = get_posestamped_msg(position, orientation, camera_frame)

                # vis #
                if vis:
                    grasp_pose_msg = get_posestamped_msg(position, orientation, camera_frame)
                    grasp_pose_topic.publish(grasp_pose_msg)

                    mesh_resource = 'file://{}/common/assets/jaco_hand_mesh.ply'.format(src_path)
                    marker = get_marker_msg(position, orientation, 1.0, 1, mesh_resource, camera_frame, color=[0,1,0,1])
                    gripper_topic.publish(marker)
                
                # grasp #
                if grasp_flag:
                    if not grasp_waiting.run():
                        continue

                    pose_list = [pre_grasp_pose_stamped, grasp_pose_stamped]
                    transformed_pose_list = transform_poselist_to_tgt(pose_list, robot_prefix+'link_base', tfBuffer)
                    pre_grasp_pose_stamped, grasp_pose_stamped = transformed_pose_list

                    # input of official function
                    pre_pose = get_pose_list_from_pose_stamped(pre_grasp_pose_stamped)
                    pose = get_pose_list_from_pose_stamped(grasp_pose_stamped)
                    
                    open_size = {'mug':[50,50,50], 'bottle':[10,10,10], 'bowl':[30,30,30]}
                    open_finger_positions = open_size[obj_cate]
                    close_finger_positions = [100,100,100]
           
                    try:
                        '''open gripper'''
                        result = finger_action(open_finger_positions, robot_prefix)
                        '''move to grasp pose'''  
                        result = arm_action(pre_pose, robot_prefix)
                        result = arm_action(pose, robot_prefix)
                        '''close gripper to grasp object'''
                        result = finger_action(close_finger_positions, robot_prefix)
                        '''take up object'''
                        result = arm_take_up_action(0.06, robot_prefix)
                    except rospy.ROSInterruptException:
                        print('program interrupted before completion')
            
            else:
                rospy.loginfo('WARNING! No Grasp estimated!')
                continue
        else:
            rospy.loginfo('WARNING! No Object detected! Please check again!')
            continue

        # Grasp Only Once
        # if grasp_flag:
        #     break
