#! /usr/bin/env python
import sys, os, time, yaml

src_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../..')
sys.path.append(src_dir)
import rospy
import ros_np_multiarray as ros_nm
import numpy as np
from geometry_msgs.msg import PoseStamped
from visualization_msgs.msg import Marker
from sensor_msgs.msg import Image

import moveit_commander
import tf2_ros
from tf2_geometry_msgs import do_transform_pose

############# perception module #################
sys.path.insert(0, '{}/perception/vision_perception/src/'.format(src_dir))
# from perception.vision_perception.src.perception_client import Perception_Client
from perception_client import Perception_Client
sys.path.pop(0)
sys.path.insert(0, '{}/perception/vision_grasp_est/src/'.format(src_dir))
# from perception.vision_grasp_est.src.grasp_est_client import GraspEstClient
from grasp_est_client import GraspEstClient
sys.path.pop(0)
sys.path.insert(0, '{}/perception/face_detect/src/'.format(src_dir))
from face_client import Face_Client
sys.path.pop(0)
#################################################

############# planning module ###################
from planning.scripts.action_client import *
#################################################

from utilities.tools.vis_utils import get_posestamped_msg, get_marker_msg
from utilities.tools.tf_utils import R2q, get_pose_list_from_pose_stamped, rot_X, rot_Y, rot_Z

################### Params ######################
SAFE_DISTANCE = 12               # unit: cm
BOTTLE_DELTA_DISTANCE_Y = 12      # unit: cm
ROTATE_DEGREE_DELTA = 9          # unit: degree
#################################################

def get_lip_location(face_mask_pub):
    # Get Person's lip location
    face_detect_client = Face_Client()
    result = face_detect_client.client()  # location relative to camera
    # vis
    ros_frame = result.frame
    face_mask_pub.publish(ros_frame)
    if len(result.lip_point) == 0:
        rospy.loginfo('WARNING! No Object detected! Please check again!')
        return None
    rospy.loginfo('Object detected successfully!')
    lip_location_CAM = np.array(result.lip_point)
    lip_location_CAM -= [0, 0, SAFE_DISTANCE/100.]                  # fall back for safety
    lip_location_CAM += [0, BOTTLE_DELTA_DISTANCE_Y/100., 0]        # make the bottle lower to help drink
    lip_location_pose = get_posestamped_msg(lip_location_CAM, [0,0,0,1], camera_frame)
    # print(lip_location_CAM)
    return lip_location_pose

if __name__ == '__main__':

    vis = True
    grasp_flag = True
    help_drink = True
    rotate_degree_delta = ROTATE_DEGREE_DELTA
    robot_prefix = 'j2n6s300_'
    camera_frame = 'camera_color_optical_frame'
    src_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../..')
    rospy.init_node('helpdrink_grasp_node')
    tfBuffer = tf2_ros.Buffer()
    tf_listenser = tf2_ros.TransformListener(tfBuffer)
    arm_group = moveit_commander.MoveGroupCommander("arm")

    ##################### init pose ##########################
    initial_joint_pose(robot_prefix)
    joint_degree_init, _ = get_current_joint()
    ##########################################################

    ################ Rotate to detect face ###################
    # Start to rotate arm by some degree
    # rotate rotate_degree_delta degree each loop untill detecting face:
    if help_drink:
        # publish the mask to rviz for vis
        face_mask_pub = rospy.Publisher('/help_drink/face_mask', Image, queue_size = 10)

        # [ 83.90391541, -12.6219635 , -41.02987671,  54.5380249 ,  -63.14701653, -10.09796143,   0.        ]                   # relative=True
        # [135.164413452, 155.492233276, 56.0648345947, 254.31552124, 52.9071540833, 68.0238265991, 0.0]                        # relative=False
        # [37.240005492999984, -8.708129882999998, -0.31977844240000053, -98.09765625, 13.2249565125, 97.6104984283, 0.0]       # relative=True
        rotate_degree_init = [37.240005492999984, -8.708129882999998, -0.31977844240000053, -98.09765625, 13.2249565125, 97.6104984283, 0.0]
        rotate_degree_loop = 0
        try:
            result = arm_change_joint_action(rotate_degree_init, robot_prefix, relative=True)
            lip_location_pose = get_lip_location(face_mask_pub)
            while lip_location_pose is None:
                rotate_degree_loop += rotate_degree_delta
                result = arm_change_joint_action([rotate_degree_loop]+[0]*6, robot_prefix)
                lip_location_pose = get_lip_location(face_mask_pub)
        except rospy.ROSInterruptException:
            print('program interrupted before completion')
        rotate_degree_loop += rotate_degree_init[0]
        # Rotate:
        R1 = rot_X(math.pi/2.)
        R2 = rot_Z(rotate_degree_loop * math.pi / 180.)    # rotate according to the degree
        R3 = rot_Y(math.pi/6.)                                  # Pitch degree
        q = R2q(np.matmul(R3, np.matmul(R2, R1)))

        transform = tfBuffer.lookup_transform("j2n6s300_link_base", lip_location_pose.header.frame_id, rospy.Time(0))
        
        lip_grasp_pose = do_transform_pose(lip_location_pose, transform)
        lip_grasp_pose.pose.orientation.x, lip_grasp_pose.pose.orientation.y, lip_grasp_pose.pose.orientation.z, lip_grasp_pose.pose.orientation.w = q

        # publish the pose to rviz for vis
        lip_grasp_pose_topic = rospy.Publisher('/help_drink/gripper_in_grasp_pose', PoseStamped, queue_size=1)
        lip_grasp_pose_topic.publish(lip_grasp_pose)
    ##########################################################

    ##################### init pose ##########################
    initial_joint_pose(robot_prefix)
    ##########################################################

    grasp_est_client = GraspEstClient()

    with open(os.path.join(src_path, 'perception/vision_grasp_est/src/panda_grasp_data/help_drink_grasp.yaml'), 'r') as stream:
        grasp_cfg = yaml.safe_load(stream)

    # vis topic
    obj_pose_topic = rospy.Publisher('/help_drink/obj_pose_in_camera_frame', PoseStamped, queue_size=1)
    obj_topic = rospy.Publisher('/help_drink/obj', Marker, queue_size=1)
    grasp_pose_topic = rospy.Publisher('/help_drink/grasp_pose_in_camera_frame', PoseStamped, queue_size=1)
    gripper_topic = rospy.Publisher('/help_drink/gripper_in_grasp_pose', Marker, queue_size=1)

    while True:
        start_time = time.time()
        perception_client = Perception_Client()
        perception_results = perception_client.client() # msg type: SingleObjPose[]
        if perception_results is not None:
            i = 0
            perception_result = perception_results.obj_pose_list[i] # msg type: SingObjPose

            obj_cate = perception_result.category_name
            rospy.loginfo('Detect Obj Category:  {}'.format(obj_cate))
            rospy.loginfo('2D+6D use time : {}'.format(time.time() - start_time))

            grasp_est_pose = grasp_est_client.get_grasp_pose(perception_result) # msg type: std_msgs/MultiArray and SingleObjPose[]

            if grasp_est_pose is not None:
                grasp_pose = ros_nm.to_numpy_f32(grasp_est_pose.grasp_pose) # grasp pose in OBJ COORDINATES [4x4]
                obj_pose = grasp_est_pose.single_obj_pose.RT
                obj_pose = ros_nm.to_numpy_f32(obj_pose)
                grasp_pose_CAM = np.matmul(obj_pose, grasp_pose) # grasp pose in CAMERA COORDINATES, [4x4]
                
                rospy.loginfo('Total use time : {}'.format(time.time() - start_time))
                # vis #
                if vis:
                    position = obj_pose[:3,3].tolist()
                    orientation = R2q(obj_pose[:3,:3]).tolist()
            
                    obj_pose_msg = get_posestamped_msg(position, orientation, camera_frame)
                    obj_pose_topic.publish(obj_pose_msg)

                    mesh_resource = 'file://{}/perception/vision_grasp_est/src/lib/shape_encoder/recon_meshes/{}_mesh.ply'.format(src_path, obj_cate)
                    scale = perception_result.scale * 0.5
                    marker = get_marker_msg(position, orientation, scale, 0, mesh_resource, camera_frame, color=[0,0,1,1])
                    obj_topic.publish(marker)

                    depth_offset = grasp_cfg[obj_cate]['depth_offset'][grasp_cfg[obj_cate]['type']]
                    print('depth offset is {}'.format(depth_offset))
                    position = grasp_pose_CAM[:3,3] + depth_offset * grasp_pose_CAM[:3, 2]
                    orientation = R2q(grasp_pose_CAM[:3,:3]).tolist()
                    grasp_pose_msg = get_posestamped_msg(position, orientation, camera_frame)
                    grasp_pose_topic.publish(grasp_pose_msg)

                    mesh_resource = 'file://{}/common/assets/jaco_hand_mesh.ply'.format(src_path)
                    marker = get_marker_msg(position, orientation, 1.0, 1, mesh_resource, camera_frame, color=[0,1,0,1])
                    gripper_topic.publish(marker)                
                
                # grasp #
                if grasp_flag:
                    pre_position = grasp_pose_CAM[:3,3] - 0.10 * grasp_pose_CAM[:3, 2]
                    pre_position = pre_position.tolist()
                    depth_offset = grasp_cfg[obj_cate]['depth_offset'][grasp_cfg[obj_cate]['type']]
                    position = grasp_pose_CAM[:3,3] + depth_offset * grasp_pose_CAM[:3, 2]
                    position = position.tolist()
                    orientation = R2q(grasp_pose_CAM[:3,:3]).tolist()

                    pre_grasp_pose = get_posestamped_msg(pre_position, orientation, camera_frame)                    
                    grasp_pose = get_posestamped_msg(position, orientation, camera_frame)

                    transform_ok = False

                    while not transform_ok and not rospy.is_shutdown():
                        try:
                            transform = tfBuffer.lookup_transform("j2n6s300_link_base", pre_grasp_pose.header.frame_id, rospy.Time(0))
                            pre_grasp_pose = do_transform_pose(pre_grasp_pose, transform)
                            transform = tfBuffer.lookup_transform("j2n6s300_link_base", grasp_pose.header.frame_id, rospy.Time(0))
                            grasp_pose = do_transform_pose(grasp_pose, transform)
                            transform_ok = True
                        except tf2_ros.ExtrapolationException as e:
                            rospy.logwarn( "Exception on transforming point... trying again \n(" +str(e) + ")")
                            rospy.sleep(0.01)
                            pre_grasp_pose.header.stamp = tfBuffer.get_latest_common_time("j2n6s300_link_base", grasp_pose.header.frame_id)
                            grasp_pose.header.stamp = tfBuffer.get_latest_common_time("j2n6s300_link_base", grasp_pose.header.frame_id)

                    # input of official function
                    pre_pose = get_pose_list_from_pose_stamped(pre_grasp_pose)
                    pose = get_pose_list_from_pose_stamped(grasp_pose)
                    
                    open_size = {'mug':[50,50,50], 'bottle':[10,10,10], 'bowl':[50,50,50]}
                    open_finger_positions = open_size[obj_cate]
                    close_finger_positions = [100,100,100]
           
                    try:
                        ####################### Grasp Obj ########################
                        result = finger_action(open_finger_positions, robot_prefix)
                            
                        result = arm_action(pre_pose, robot_prefix)
                        result = arm_action(pose, robot_prefix)
                        
                        result = finger_action(close_finger_positions, robot_prefix)
                        
                        result = arm_take_up_action(0.06, robot_prefix)
                        ##########################################################

                        ######################## HELP DRINK ######################
                        # After obtaining obj, move to the location near mouth
                        if help_drink:
                            # Note that rotate_degree_loop should subtract the degree of grasp pose
                            # thus, before rotating, here we get the current degree:
                            joint_degree, _ = get_current_joint()
                            rotate_degree_loop -= joint_degree[0] - joint_degree_init[0]
                            # First, rotate to face human
                            rotate_degree = [rotate_degree_loop,0,0,0,0,0,0]
                            # result = arm_change_joint_action(rotate_degree, robot_prefix)

                            # Second, move to the location before human's lip
                            
                            drink_pose = get_pose_list_from_pose_stamped(lip_grasp_pose)
                            result = arm_action(drink_pose, robot_prefix)

                            #TODO wait for human's command to move back
                            time.sleep(2)
                            
                            # move back to desk
                            rotate_degree = [-rotate_degree_loop,0,0,0,0,0,0]
                            # result = arm_change_joint_action(rotate_degree, robot_prefix)
                            
                            # move back to the grasp pose
                            result = arm_action(pose, robot_prefix)

                            # open gripper to loosen obj
                            result = finger_action(open_finger_positions, robot_prefix)

                            result = arm_action(pre_pose, robot_prefix)

                            # take up to stay away from obj
                            result = arm_take_up_action(0.15, robot_prefix)

                            # move back to init pose
                            initial_joint_pose(robot_prefix)
                        ##########################################################

                    except rospy.ROSInterruptException:
                        print('program interrupted before completion')
                            
            else:
                rospy.loginfo('WARNING! No Grasp estimated!')
                continue
        else:
            rospy.loginfo('WARNING! No Object detected! Please check again!')
            continue

        # Grasp Only Once    
        break
