#! /usr/bin/env python
import sys, os, time, yaml

src_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../..')
sys.path.append(src_dir)
import rospy
import ros_np_multiarray as ros_nm
import numpy as np
from geometry_msgs.msg import PoseStamped
from visualization_msgs.msg import Marker
from std_srvs.srv import Empty

import moveit_commander
import tf2_ros
from tf2_geometry_msgs import do_transform_pose

############# perception module #################
sys.path.insert(0, '{}/perception/vision_perception/src/'.format(src_dir))
# from perception.vision_perception.src.perception_client import Perception_Client
from perception_client import Perception_Client
sys.path.pop(0)
sys.path.insert(0, '{}/perception/vision_grasp_est/src/'.format(src_dir))
# from perception.vision_grasp_est.src.grasp_est_client import GraspEstClient
from grasp_est_client import GraspEstClient
sys.path.pop(0)
#################################################

############# planning module ###################
from planning.scripts.action_client import *
#################################################

from utilities.tools.vis_utils import get_posestamped_msg, get_marker_msg
from utilities.tools.tf_utils import R2q, get_pose_list_from_pose_stamped

class Start():
    def __init__(self, srv_name):
        start_ = rospy.Service('/{}'.format(srv_name), Empty, self.start)
        skip_ = rospy.Service('/skip_{}'.format(srv_name), Empty, self.skip)
        self.srv_name = srv_name
        self.is_start = False
        self.is_skip = False
        self.rate = rospy.Rate(0.5)
    
    def run(self):
        while not rospy.is_shutdown():
            rospy.loginfo('Wait for \'rosservice call /{}\' command...'.format(self.srv_name))
            if self.is_start:
                self.is_start = False
                rospy.loginfo('Start to {}. '.format(self.srv_name))
                return True
            if self.is_skip:
                self.is_skip = False
                rospy.loginfo('Skip {}!'.format(self.srv_name))
                return False
            self.rate.sleep()

    def start(self, req):
        self.is_start = True
        return {}

    def skip(self, req):
        self.is_skip = True
        return {}

if __name__ == '__main__':

    vis = True
    grasp_flag = True
    robot_prefix = 'j2n6s300_'
    camera_frame = 'camera_color_optical_frame'
    src_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../..')
    rospy.init_node('yolo_grasp_node')
    tfBuffer = tf2_ros.Buffer()
    tf_listenser = tf2_ros.TransformListener(tfBuffer)
    arm_group = moveit_commander.MoveGroupCommander("arm")
    initial_joint_pose(robot_prefix)
    grasp_est_client = GraspEstClient()
    detect_waiting = Start('detect')
    grasp_waiting = Start('grasp')

    with open(os.path.join(src_path, 'perception/vision_grasp_est/src/panda_grasp_data/yolograsp_grasp.yaml'), 'r') as stream:
        grasp_cfg = yaml.safe_load(stream)

    # vis topic
    obj_pose_topic = rospy.Publisher('/yolograsp/single_obj_pose', PoseStamped, queue_size=1)
    obj_topic = rospy.Publisher('/yolograsp/single_obj_marker', Marker, queue_size=1)
    grasp_pose_topic = rospy.Publisher('/yolograsp/single_grasp_pose', PoseStamped, queue_size=1)
    gripper_topic = rospy.Publisher('/yolograsp/single_grasp_marker', Marker, queue_size=1)

    while True:
        initial_joint_pose(robot_prefix)
        detect_waiting.run()
        start_time = time.time()
        perception_client = Perception_Client()
        perception_results = perception_client.client() # msg type: SingleObjPose[]
        if perception_results is not None:
            i = 0
            perception_result = perception_results.obj_pose_list[i] # msg type: SingObjPose

            obj_cate = perception_result.category_name
            rospy.loginfo('Detect Obj Category:  {}'.format(obj_cate))
            rospy.loginfo('2D+6D use time : {}'.format(time.time() - start_time))

            grasp_est_pose = grasp_est_client.get_grasp_pose(perception_result) # msg type: std_msgs/MultiArray and SingleObjPose[]

            if grasp_est_pose is not None:
                grasp_pose = ros_nm.to_numpy_f32(grasp_est_pose.grasp_pose) # grasp pose in OBJ COORDINATES [4x4]
                obj_pose = grasp_est_pose.single_obj_pose.RT
                obj_pose = ros_nm.to_numpy_f32(obj_pose)
                grasp_pose_CAM = np.matmul(obj_pose, grasp_pose) # grasp pose in CAMERA COORDINATES, [4x4]
                
                rospy.loginfo('Total use time : {}'.format(time.time() - start_time))
                # vis #
                if vis:
                    position = obj_pose[:3,3].tolist()
                    orientation = R2q(obj_pose[:3,:3]).tolist()
            
                    obj_pose_msg = get_posestamped_msg(position, orientation, camera_frame)
                    obj_pose_topic.publish(obj_pose_msg)

                    mesh_resource = 'file://{}/perception/vision_grasp_est/src/lib/shape_encoder/recon_meshes/{}_mesh.ply'.format(src_path, obj_cate)
                    scale = perception_result.scale * 0.5
                    marker = get_marker_msg(position, orientation, scale, 0, mesh_resource, camera_frame, color=[0,0,1,1])
                    obj_topic.publish(marker)

                    depth_offset = grasp_cfg[obj_cate]['depth_offset'][grasp_cfg[obj_cate]['type']]
                    print('depth offset is {}'.format(depth_offset))
                    position = grasp_pose_CAM[:3,3] + depth_offset * grasp_pose_CAM[:3, 2]
                    orientation = R2q(grasp_pose_CAM[:3,:3]).tolist()
                    grasp_pose_msg = get_posestamped_msg(position, orientation, camera_frame)
                    grasp_pose_topic.publish(grasp_pose_msg)

                    mesh_resource = 'file://{}/common/assets/jaco_hand_mesh.ply'.format(src_path)
                    marker = get_marker_msg(position, orientation, 1.0, 1, mesh_resource, camera_frame, color=[0,1,0,1])
                    gripper_topic.publish(marker)                
                
                # grasp #
                if grasp_flag:
                    if not grasp_waiting.run():
                        continue
                    pre_position = grasp_pose_CAM[:3,3] - 0.10 * grasp_pose_CAM[:3, 2]
                    pre_position = pre_position.tolist()
                    depth_offset = grasp_cfg[obj_cate]['depth_offset'][grasp_cfg[obj_cate]['type']]
                    position = grasp_pose_CAM[:3,3] + depth_offset * grasp_pose_CAM[:3, 2]
                    position = position.tolist()
                    orientation = R2q(grasp_pose_CAM[:3,:3]).tolist()

                    pre_grasp_pose = get_posestamped_msg(pre_position, orientation, camera_frame)                    
                    grasp_pose = get_posestamped_msg(position, orientation, camera_frame)

                    transform_ok = False
                    while not transform_ok and not rospy.is_shutdown():
                        try:
                            transform = tfBuffer.lookup_transform("j2n6s300_link_base", pre_grasp_pose.header.frame_id, rospy.Time(0))
                            pre_grasp_pose = do_transform_pose(pre_grasp_pose, transform)
                            transform = tfBuffer.lookup_transform("j2n6s300_link_base", grasp_pose.header.frame_id, rospy.Time(0))
                            grasp_pose = do_transform_pose(grasp_pose, transform)
                            transform_ok = True
                        except tf2_ros.ExtrapolationException as e:
                            rospy.logwarn( "Exception on transforming point... trying again \n(" +str(e) + ")")
                            rospy.sleep(0.01)
                            pre_grasp_pose.header.stamp = tfBuffer.get_latest_common_time("j2n6s300_link_base", grasp_pose.header.frame_id)
                            grasp_pose.header.stamp = tfBuffer.get_latest_common_time("j2n6s300_link_base", grasp_pose.header.frame_id)

                    # input of official function
                    pre_pose = get_pose_list_from_pose_stamped(pre_grasp_pose)
                    pose = get_pose_list_from_pose_stamped(grasp_pose)
                    
                    open_size = {'mug':[50,50,50], 'bottle':[10,10,10], 'bowl':[50,50,50]}
                    open_finger_positions = open_size[obj_cate]
                    close_finger_positions = [100,100,100]
           
                    try:
                        '''open gripper'''
                        result = finger_action(open_finger_positions, robot_prefix)
                        '''move to grasp pose'''
                        ## choice 1: moveit!
                        # result = moveit_arm_plan(arm_group, pre_grasp_pose)
                        # if not result:
                        #     break
                        # result = moveit_arm_plan(arm_group, grasp_pose)
                        # if not result:
                        #     break  
                        ## choice 2: official function       
                        result = arm_action(pre_pose, robot_prefix)
                        result = arm_action(pose, robot_prefix)
                        '''close gripper to grasp object'''
                        result = finger_action(close_finger_positions, robot_prefix)
                        '''take up object'''
                        result = arm_take_up_action(0.06, robot_prefix)
                    except rospy.ROSInterruptException:
                        print('program interrupted before completion')
            
            else:
                rospy.loginfo('WARNING! No Grasp estimated!')
                continue
        else:
            rospy.loginfo('WARNING! No Object detected! Please check again!')
            continue

        # Grasp Only Once    
        # break
