'''
Created on Nov 10, 2009
@license: Unreleased. Do not publish.
@author: Peter Brook
'''
PKG = 'simple_grasper'
from copy import copy
import MatrixMath as mm
import math
import numpy as np

try:
    import openravepy as orp
except ImportError:
    PKG = 'simple_grasper'
    import roslib
    roslib.load_manifest(PKG)
finally:
    import openravepy as orp
    import tf.transformations as tf


HAND_OUT = np.array((0, 0, 1))
HAND_UP = np.array((0, 1, 0))

class SimpleGraspGenerator(object):
    '''
    The main class for determining a grasp for an object
    from the model geometry and the principle axis
    '''

    def __init__(self, search_info, env = None, try_pinch_grasp = False):
        '''
        Takes a string for the model file, initializes
        the OpenRAVE world, and prepares for grasp computation
        '''
        print "Initializing OpenRAVE"
        self.wam = None
        self.hand = None
        self.hand_rotation = None
        self.arrows = []

        self.try_pinch_grasp = try_pinch_grasp
        if env is None:
            self.env = orp.Environment()
            self.env.SetViewer('qtcoin')
        else:
            self.env = env
        if search_info.modelFile is not None:
            self.object = self.env.ReadKinBodyXMLFile(search_info.modelFile)
            self.object.SetTransform(search_info.objectTransform)
        self.search_info = search_info
        self.draw_axes()
        self.setup_world()


    def draw_axes(self):
        """Draws the three principle axes of the point cloud in the OpenRAVe environment"""
        p1 = np.array(self.search_info.midpoint)
        length_scaling = 1.0
        p2 = p1 + self.search_info.axes[0] * (length_scaling * self.search_info.extents[0] / 2)
        self.arrows = []
        arrowradius = 0.005
        self.arrows.append(self.env.drawarrow(p1, p2, arrowradius,
            np.array((0.3, 0, 0))))
        p3 = p1 + self.search_info.axes[1] * (length_scaling * self.search_info.extents[1] / 2)
        self.arrows.append(self.env.drawarrow(p1, p3, arrowradius,
            np.array((0, 0.6, 0))))
        p4 = p1 + self.search_info.axes[2] * (length_scaling * self.search_info.extents[2] / 2)
        self.arrows.append(self.env.drawarrow(p1, p4, arrowradius,
            np.array((0, 0, 1))))

    def align_hand(self, rotation):
        """Align the hand model to the object point cloud
         
        @param hand_rotation: the hand_rotation about the secondary axis that should be used
        @precondition: self.search_info is initialized
        @precondition:  self.hand is initialized
        """
        self.hand_rotation = rotation
        print "Rotation: ", rotation
        debug = False
        show_arrows = False
        if show_arrows:
            arrows = []
        if debug:
            raw_input("Step 0")
        #TODO: Check all cross products for parallel vectors
        hand_transform = mm.getTrMatrix(self.search_info.midpoint)
        self.hand.SetTransform(hand_transform)
        if debug:
            print "About to align hand out with secondary axis"
            raw_input("Step 1")
        current_hand_out_vector = mm.unitV(np.dot(hand_transform[:3, :3], HAND_OUT))

        if show_arrows:
            arrows.append(self.env.drawarrow(hand_transform[:3, 3], hand_transform[:3, 3] + current_hand_out_vector * 0.3))

        axis_of_rotation = np.cross(current_hand_out_vector, self.search_info.axes[1])
        axis_of_rotation = mm.unitV(axis_of_rotation)

        if show_arrows:
            arrows.append(self.env.drawarrow(hand_transform[:3, 3], hand_transform[:3, 3] + axis_of_rotation * 0.5))

        rotation_angle = math.acos(np.dot(current_hand_out_vector, self.search_info.axes[1]) /
                             (np.linalg.norm(current_hand_out_vector) * np.linalg.norm(self.search_info.axes[1]))
                             ) + (rotation * math.pi / 180)

        print "RotAxis: ", axis_of_rotation
        print "RotAngle: ", rotation_angle
        hand_transform = np.dot(hand_transform, tf.rotation_matrix(rotation_angle, axis_of_rotation))
        self.hand.SetTransform(hand_transform)
        assert np.allclose(np.dot(self.hand.GetTransform()[:3, 2], self.search_info.axes[1]), 0),\
            "hand out is not parallel to secondary axis!"
        if debug:
            print "About to align hand up with primary axis"
            raw_input("Step 2")
        current_hand_up_vector = mm.unitV(np.dot(hand_transform[:3, :3], HAND_UP))
        axis_of_rotation = HAND_OUT#np.dot(hand_transform[:3,:3],HAND_OUT)
        if show_arrows:
            arrows.append(self.env.drawarrow(hand_transform[:3, 3], hand_transform[:3, 3] + current_hand_up_vector * 0.5, 0.005, np.array((1, 1, 0))))
        rotation_angle = math.acos(np.dot(current_hand_up_vector, self.search_info.axes[0]) /
                             (np.linalg.norm(current_hand_up_vector) * np.linalg.norm(self.search_info.axes[0]))
                             )
        print "RotAxis: ", axis_of_rotation
        print "RotAngle: ", rotation_angle
        hand_transform = np.dot(hand_transform, tf.rotation_matrix(rotation_angle, axis_of_rotation))
        self.hand.SetTransform(hand_transform)
        if debug:
            raw_input("Step 3")
        current_hand_up_vector = np.dot(hand_transform[:3, :3], HAND_UP)
        rotation_angle = math.acos(np.dot(current_hand_up_vector, self.search_info.axes[0]) /
                                   np.linalg.norm(current_hand_up_vector) * np.linalg.norm(self.search_info.axes[0])
                                   )

        if not np.allclose(rotation_angle, 0):#we rotated the wrong way :(
            print "We rotated the wrong way, fixing.."
            hand_transform = np.dot(hand_transform, tf.rotation_matrix(-2 * rotation_angle, axis_of_rotation))
        self.hand.SetTransform(hand_transform)
        assert np.allclose(np.dot(self.hand.GetTransform()[:3, 1], self.search_info.axes[0]), 0),\
            "hand up is not parallel to primary axis!"

        if debug:
            raw_input("Step 4")

        current_hand_out_vector = np.dot(hand_transform[:3, :3], HAND_OUT)
        current_hand_out_vector = mm.unitV(current_hand_out_vector)
        axis_of_rotation = np.cross(self.search_info.axes[1], current_hand_out_vector)
        axis_of_rotation = mm.unitV(axis_of_rotation)
        return

    def compute_grasp(self):
        """
        Iterate through possible hand configurations and return the IK solution of the first
        valid grasp.
        """
        self.hand.Enable(False)
        if self.try_pinch_grasp:
            states = [False, True]
        else:
            states = [False]
        for pinch_grasp_enabled_flag in states:
            
            #Swap principal axes around if the initial configuration fails 
            #to give a grasp  
            swapList = []
            swapList.append((0, 0))
            swapList.append((0, 1))
            swapList.append((1, 2))
            swapList.append((0, 2))

            swapList.reverse()
            originalAxes = copy(self.search_info.axes)
            rotations = [0, 180]
            while len(swapList) > 0:
                self.search_info.axes = copy(originalAxes)
                swapPair = swapList.pop()
                print "Swap: ", swapPair
                self.search_info.axes[swapPair[0]] = originalAxes[swapPair[1]]
                self.search_info.axes[swapPair[1]] = originalAxes[swapPair[0]]
                print self.search_info.axes
                self.set_hand_translation()
                for rotation in rotations:
                    self.align_hand(rotation)
                    '''
                    #while we don't have a valid grasp
                    colliding = self.check_hand_collision()
                    while colliding:
                        #move hand back (-z) until object and hand are not colliding
                        self.move_hand_backward()
                        colliding = self.check_hand_collision()
                    '''
                    #raw_input("enter to move hand back")
                    #0.1515 is from hand origin to hand palm
                    #0.0125 is thickness of palm sensor

                    if pinch_grasp_enabled_flag:
                        print "Using Pinch Grasp"
                        pinch_offset = 0.01
                    else:
                        pinch_offset = 0
                    def find_extent_in_palm_path():
                        """Finds the nearest part of the object that might hit the palm
                        
                        The following code takes the projection of the point cloud
                        in the hand's coordinate frame and computes the point which is nearest
                        to the palm and is also in the bounding rectangle formed by extending the
                        palm outward along the hand's z-axis
                        """
                        #limit along first and third axes

                        pointsWeCareAbout = []
                        ptia = self.search_info.pointsInAxes
                        axes = [0, 1, 2]

                        tmp = copy(axes[swapPair[0]])
                        axes[swapPair[0]] = copy(axes[swapPair[1]])
                        axes[swapPair[1]] = tmp
                        for i in range(len(ptia[0])):
                            if - 0.042 < ptia[axes[0]][i] < 0.042 and - 0.025 < ptia[axes[2]][i] < 0.025:
                                pointsWeCareAbout.append(ptia[axes[1]][i])
                        if len(pointsWeCareAbout) == 0:
                            return 0
                        if rotation > 0:
                            print "we care about max val"
                            return max(pointsWeCareAbout)
                        else:
                            print "we care about min val"
                            return min(pointsWeCareAbout)
                    val = abs(find_extent_in_palm_path())
                    print "Backup val: ", val
                    #raw_input("before moveback")
                    self.move_hand_backward(val + 0.185 + pinch_offset) # really 0.1775, but this give some extra
                    #raw_input("Observe grasp")
                    #check if position gives a valid IK solution
                    ik_valid, soln = self.check_ik_valid()
                    #fire rays from the fingers in the palm's z-direction
                    #rayCollisions = self.fire_collision_rays()
                    if ik_valid:# and rayCollisions is None:
                        #if they don't collide, then the object is within the hand workspace from this angle
                        #and the IK is valid, therefore we have a valid grasp.
                        #foundValidGrasp = True
                        return True, soln
                        #self.env.RemoveKinBody(self.hand)
                        #self.env.RemoveKinBody(self.wam)
                        #self.wam.SetJointValues(soln,range(7))
                    else:
                        print "IK Not Valid.."
                        #raw_input("<enter to continue searching>")

#                        if self.search_info.axes is not None:
#                            if rotation == 0:
#                                rotation += 180
#                            else:
#                                rotation = 0
#                                rotation2 += 180
#                                if rotation2 == 360 and not swapped:
#                                    print "Swapping"
#                                    swap = copy(self.search_info.axes[0])
#                                    self.search_info.axes[0] = copy(self.search_info.axes[1])
#                                    self.search_info.axes[1] = copy(swap)
#                                    swapped = True
#                                    rotation2 = 0
#                        else:
#                            rotation += 10 #arbitrary, lets try it and see how it works 

        return False, None

    def check_hand_collision(self):
        """Checks to see if the hand model is in collision with other objects
        
        @precondition: self.hand is initialized
        """
        return self.env.CheckCollision(self.hand)

    def move_hand_backward(self, amount = 0.01):
        """Move the hand model backward (in the negative z direction) by a specified amount
        
        @precondition: self.hand is initialized 
        """
        hand_transform = self.hand.GetTransform()
        hand_transform = np.dot(hand_transform, mm.getTrMatrix((0, 0, -1 * amount)))
        self.hand.SetTransform(hand_transform)

#===============================================================================
#This code was originally for computing grasp validity in openrave by 
#ray collisions with the object, but that doesn't work well with point clouds
#
#    def align_hand(self, hand_rotation):
#        hand_transform = self.hand.GetTransform()
#        rotAxis = np.cross(self.search_info.principleAxis, HAND_UP)
#
#        rotAngle = math.acos(np.dot(HAND_UP, self.search_info.principleAxis) /
#                             (np.linalg.norm(HAND_UP) * np.linalg.norm(self.search_info.principleAxis)))
#
#        hand_transform = np.dot(hand_transform, tf.rotation_matrix(rotAngle, rotAxis))
#        self.set_hand_translation()
#        self.set_hand_rotation(hand_rotation)
#        self.set_hand_axis_location()
#
#    def set_hand_axis_location(self):
#        hand_transform = self.hand.GetTransform() 
#        #TODO: This is breaking, why?
#        hand_transform = np.dot(hand_transform, mm.getTrMatrix((0, self.search_info.midpoint, 0)))
#        self.hand.SetTransform(hand_transform)
#    def set_hand_rotation(self, hand_rotation):
#        hand_transform = self.hand.GetTransform()
#        #newAxis = np.dot(search_info.principleAxis,mm.getXRotationMatrix(angleX))
#        rot = mm.getAxisRotationMatrix(np.dot(si.principleAxis, hand_transform[:3, :3]), hand_rotation * math.pi / 180)
#        hand_transform = np.dot(hand_transform, rot)
#
#        self.hand.SetTransform(hand_transform)
#
#        hand_transform = np.dot(hand_transform, mm.getTrMatrix((0, 0, -0.11)))
#        self.hand.SetTransform(hand_transform)
#
#    def fire_collision_rays(self):
#        links = self.hand.GetLinks()
#        ends = [links[3], links[6], links[8]]
#        trs = [x.GetTransform() for x in ends]
#        j2t = mm.getTrMatrix((0.05, 0, 0))
#        trs2 = [np.dot(x, j2t) for x in trs]
#        points = [x[:3, 3] for x in trs2]
#
#        handZ = self.hand.GetTransform()[:3, 2]
#===============================================================================

    def set_hand_translation(self):
        """Sets the hand location to the midpoint of the object
        
        @precondition: self.hand is initialized
        """
        handTr = mm.getTrMatrix(self.search_info.midpoint)
        self.hand.SetTransform(handTr)

    def check_ik_valid(self):
        """
        Checks if the current hand location in the environment has a valid IK solution
        
        @precondition: self.wam is initialized
        @precondition: self.hand is initialized
        @return: boolean indicating whether there is a valid solution
        @return: The solution if one exists, None otherwise
        """
        manip = self.wam.GetActiveManipulator()
        soln = manip.FindIKSolution(self.hand.GetActiveManipulator().GetEndEffectorTransform(), True)
        if soln is not None:
            return True, soln
        else:
            return False, None

    def setup_world(self):
        '''
        Adds the objects to the openrave scene
        '''
        try:
            self.env.AddKinBody(self.object)
        except AttributeError:
            pass
        self.hand = self.env.ReadRobotXMLFile("robots/barretthand.robot.xml")
        assert self.hand is not None, "Hand xml failed to load"
        self.wam = self.env.ReadRobotXMLFile("robots/barrettwam.robot.xml")
        self.wam.SetTransform(np.eye(4))
        assert self.wam is not None, "WAM xml failed to load"
        self.env.AddRobot(self.hand)
        self.env.AddRobot(self.wam)

if __name__ == '__main__':
    '''
    from SearchInformation import *
    search_info = SearchInformation()
    search_info.modelFile = "data/ketchup.kinbody.xml"
    search_info.principleAxis = np.array((0,0,1))
    search_info.secondAxis = np.array((1,0,0))
    search_info.midpoint = 0.05
    
    rm = np.eye(4)
    rm[:3,:3] = mm.getXRotationMatrix(-90)
    search_info.objectTransform = np.dot(mm.getTrMatrix((1,0,0.4)), rm)
    
    mugSI = SearchInformation()
    mugSI.modelFile = "data/mug1.kinbody.xml"
    mugSI.principleAxis = np.array((0,1,0))
    mugSI.secondAxis = np.array((0,0,1))
    mugSI.midpoint = 0.025
    mugSI.objectTransform = mm.getTrMatrix((1,0,0.4))
    
    cerealSI = SearchInformation()
    cerealSI.modelFile = "data/cerealbox.kinbody.xml"
    cerealSI.principleAxis = np.array((0,0,1))
    cerealSI.secondAxis = np.array((1,0,0))
    cerealSI.midpoint = 0.08
    cerealSI.objectTransform = np.array([[  8.00732672e-01,   5.57749689e-01,   2.18500599e-01, 7.73655713e-01],
       [ -5.38076401e-01,   8.30009222e-01,  -1.46828100e-01, -1.89668238e-01],
       [ -2.63250828e-01,   4.25306368e-08,   9.64727402e-01, 2.93078005e-01],
       [  0.00000000e+00,   0.00000000e+00,   0.00000000e+00, 1.00000000e+00]], dtype="float32") 
    cerealSI.principleAxis = np.dot(cerealSI.objectTransform[:3,:3],(0,0,1))#np.array((0,0,1))
    cerealSI.secondAxis = np.dot(cerealSI.objectTransform[:3,:3],(1,0,0))#np.array((1,0,0))
    #mm.getTrMatrix((1,0,0.4))
    
    
    sgg = SimpleGraspGenerator(search_info)
    #sgg = SimpleGraspGenerator(mugSI)
    #sgg = SimpleGraspGenerator(cerealSI)
    '''
