#!/usr/bin/env python
"""
Created on Nov 10, 2009
@license: Unreleased. Do not publish.
@author: Peter Brook
"""
PKG = 'simple_grasper'
import roslib
roslib.load_manifest(PKG)

from simple_grasper import SimpleGraspGenerator
from sensor_msgs.msg import PointCloud
from time import time, sleep

import numpy as np
import openravepy as orp
import socket
import tf.transformations as tf
import math

from ORUtil import get_xml_ball
from simple_grasper import GraspTester

import rospy

TRY_PINCH_GRASP = False
APPLY_CLUSTERING = False
POINT_CLOUD_DECIMATION_FACTOR = 20
class SimpleGrasper():
    """Main class which implements all of the logic for orientation-based grasping"""

    def __init__(self):
        self.current_point_cloud = None
        self.data = None
        self.clustered_points_arrays = []
        self.selected_point_cloud = []

        #openrave object references
        self.table = None
        self.box = None
        self.wall = None
        self.wam = None
        self.object_point_cloud_plot = None
        self.point_ball_list = []

    def listener(self):
        """Initialize the ROS node and begin listening for new point cloud data"""

        rospy.init_node('simple_grasper', anonymous = False)

        def callback(data):
            """
            callback function for receiving point cloud data
            @param data: The pointcloud data
            """
            print "Callback: we now have pointcloud data"
            self.data = data

        subscription = rospy.Subscriber("rgbd/cloud", PointCloud, callback)
        print "spinning"
        while self.data is None:
            sleep(0.2)
        
        #If we're here, we have point cloud data
        subscription.unregister()
        self.find_valid_grasp()

    def find_valid_grasp(self):
        """
        Main function which takes raw pointcloud data, processes it, and tries different
        perpendicular orientations until a valid grasp is found 
        
        @precondition: self.data 
        """

        initial_point_cloud = self.initORWorld()
        #self.env.SetViewer('qtcoin')
        filtered_point_cloud = self.filter_points(initial_point_cloud)

        if APPLY_CLUSTERING:
            clustered_points_arrays = self.apply_ransac_clustering(filtered_point_cloud)
            largest_cluster_array = [clustered_points_arrays[0]]#,self.pts]
        else:
            largest_cluster_array = [filtered_point_cloud]

        #largest_cluster_array is a list of pointclouds since I might want to try one point cloud
        #first and then fallback to another (larger, less clustered, etc) point cloud if that fails
        for point_cloud in largest_cluster_array:
            self.selected_point_cloud = point_cloud
            print "PC Size:", str(len(self.selected_point_cloud))

            axes = self.compute_principal_axes()
            midpoint, extents, points_in_axes = self.compute_extents_midpoint(axes)
            #raw_input("<enter> to compute grasp")
            class SearchInformation: pass
            si = SearchInformation()
            si.axes = np.array(axes)
            si.midpoint = np.array(midpoint)
            si.extents = np.array(extents)
            si.points_in_axes = points_in_axes
            self.sgg = SimpleGraspGenerator(si, self.env, TRY_PINCH_GRASP)

            #Render the point cloud with larger balls to take advantage 
            #of OpenRAVE's path planning with collision detection
            for point in self.selected_point_cloud[::5]:#+self.boringPts[::30]:
                ball = self.env.ReadKinBodyXMLData(get_xml_ball(pt))
                self.point_ball_list.append(ball)
                self.env.AddKinBody(ball)

            success, solution = self.sgg.compute_grasp()
            self.wam = self.env.GetRobots()[1]
            #blt=self.env.CreateCollisionChecker('bullet')
            #self.env.SetCollisionChecker(blt)
            #self.env.AddRobot(self.wam)
            self.wam.SetTransform(np.eye(4))

            if success:
                print solution
                
                self.test_grasp(solution)
                return
            else:
                self.env.RemoveKinBody(self.sgg.hand)
                self.env.RemoveKinBody(self.sgg.wam)
                print "No Grasp"

            
    def initORWorld(self):
        """
        Initializes the OpenRAVE world to match the real-world robot setup 
        and transforms the raw point cloud into the robot frame
        """
        self.env = orp.Environment()
        self.env.SetViewer('qtcoin')

        #decimate the raw point cloud (it starts off as ~200k points)
        points = self.data.points[::POINT_CLOUD_DECIMATION_FACTOR]

        #points = np.array([(p.x,p.y,p.z for ps])
        print "numPts:", len(points)
        self.table = self.env.ReadKinBodyXMLFile("data/table.kinbody.xml")
        #zRot = tf.rotation_matrix(math.pi / 2, np.array((0, 0, 1)))
        #Hardcoded transform of the lab table
        Tr = np.array([
                       [ -7.08746910e-02, -9.97485340e-01, 0.00000000e+00, 8.19913220e-01],
                       [  9.97485340e-01, -7.08746910e-02, 0.00000000e+00, 2.30686128e-01],
                       [  0.00000000e+00, 0.00000000e+00, 1.00000000e+00, -0.004],
                       [  0.00000000e+00, 0.00000000e+00, 0.000000e+00, 1.00000000e+00]], dtype = 'float32')
        self.table.SetTransform(Tr)#tf.concatenate_matrices(tf.translation_matrix(np.array((0.2,0,0))),zRot))
        self.env.AddKinBody(self.table)

        #=======================================================================
        # Adds a soapbottle model to the scene to make some of the pictures for the paper more
        # clear.
        # self.box = self.env.ReadKinBodyXMLFile("data/soapbottle.kinbody.xml")
        # rot = tf.quaternion_matrix([0, 0, 0.880, -0.475])
        # trans = tf.translation_matrix([0.808, 0.372, -0.001])
        # Tr = tf.concatenate_matrices(trans, rot)
        # self.box.SetTransform(Tr)
        # #self.env.AddKinBody(self.box)
        #=======================================================================

        self.wall = self.env.ReadKinBodyXMLFile("data/wall.kinbody.xml")
        #Hardcoded wall transform
        Tr = np.array([[-0.06144834, 0.        , -0.99811035, 1.153],
       [ 0.99811035, 0.        , -0.06144831, 0.20040591],
       [ 0., -1.        , 0.       , 0.15608893],
       [ 0.        , 0.        , 0.        , 1.        ]], dtype = 'float32')
        self.wall.SetTransform(Tr)#tf.concatenate_matrices(tf.rotation_matrix(-math.pi/2,np.arr0,1,0))),zRot))
        self.env.AddKinBody(self.wall)
        
        
        #=======================================================================
        # showWam = False
        # if showWam:
        #    self.wam = self.env.ReadRobotXMLFile("robots/barrettwam.robot.xml")
        #    self.env.AddRobot(self.wam)
        #    self.wam.SetTransform(np.eye(4))
        #    jvals = np.array([2, 66, 1, 109, 3, 7, 6])
        #    jvals = jvals * math.pi / 180
        #    self.wam.SetJointValues(jvals, range(7))
        #=======================================================================

        #self.pc = self.env.plot3(points=points,pointsize=1)

        T = np.array([-0.008, 0.310, 0.75])
        #=======================================================================
        # Debugging code to allow keyboard adjustment of the point cloud->robot base Tr matrix        
        # cont = True
        # while not cont:
        #    act = raw_input("rx+ rx- ry+ ry- rz+ rz- d q")
        #    if act == "q":
        #        cont = True
        #        continue
        #    elif act == "rx":
        #        val = raw_input("val")
        #        T = tf.rotation_matrix(float(val), [1, 0, 0])
        #        #T = tf.translation_matrix([float(val), 0, 0])
        #    elif act == "ry":
        #        val = raw_input("val")
        #        T = tf.rotation_matrix(float(val), [0, 1, 0])
        #        #T = tf.translation_matrix([0, float(val), 0])
        #    elif act == "rz":
        #        val = raw_input("val")
        #        T = tf.rotation_matrix(float(val), [0, 0, 1])
        #        #T = tf.translation_matrix([0, 0, float(val)])
        #    elif act == "d":
        #        T = np.eye(4)
        #        print Tr
        #=======================================================================
        start = time()
        points = np.array([(p.x, p.y, p.z) for p in points])
        Tr = tf.rotation_matrix(-1.0 * math.pi / 2, (0, 0, 1))
        Tr2 = tf.rotation_matrix(math.pi / 2 + 0.88, (0, 1, 0))
        Tr3 = tf.rotation_matrix(0.00, (0, 0, 1))
        ptsindices = range(len(points))

        for i in ptsindices:
            if i % 1000 == 0:
                print i
            pt = points[i]
            pt2 = (pt[0], pt[1], pt[2], 0)
            pt2 = np.dot(Tr, pt2)
            pt2 = np.dot(Tr2, pt2)
            pt2 = np.dot(Tr3, pt2)
            pt2[:3] += T
            points[i] = (pt2[0], pt2[1], pt2[2])

        self.object_point_cloud_plot = self.env.plot3(points = np.array(points), pointsize = 3)
        end = time()
        print "time: " + str(end - start)
        #raw_input("hit enter")
        return points

    def filter_points(self, initial_point_cloud):
        """Removes points from the point cloud that fall outside a bounding box.
        
        These values are tuned to get the left half of the table, from a few cm away from the robot
        to a few cm from the wall. 
        @precondition: The point cloud is in the robot base frame
        """
        min_x = 0.45
        max_x = 1.05
        min_y = 0.10
        max_y = 0.967
        min_z = 0.03
        max_z = 0.5

        points_in_object = []

        #we'll store these for debugging in case we might need them
        points_to_discard = []

        raw_input("Press enter to filter")
        for point in initial_point_cloud:
            if max_x > point[0] > min_x and max_y > point[1] > min_y and max_z > point[2] > min_z:
                points_in_object.append(point)
            else:
                points_to_discard.append(point)
        assert len(points_in_object) != 0, "No points found in the region! Is there an object present?"

        #write the object point cloud out to a file for debugging purposes
        out_file = file("/tmp/pc", 'w')
        [out_file.write(str(point) + '\n') for point in points_in_object]
        out_file.close()

        self.object_point_cloud_plot = self.env.plot3(points = np.array(points_in_object), pointsize = 3)
        #raw_input("you should see points now")
        return points_in_object

    def apply_ransac_clustering(self, point_cloud):
        """
        Send the points to a matlab script which will cluster them, and return the largest cluster   
        @param point_cloud: The point cloud to send
        """
        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        s.connect(("localhost", 8886))
        totalsent = 0
        msg = "\n".join(str(point) for point in point_cloud)
        msg += "\n!\n"
        while totalsent < len(msg):
            sent = s.send(msg[totalsent:])
            if sent == 0:
                raise RuntimeError, "socket broken!"
            totalsent += sent
        point_indices = []
        msg = ''
        while True:
            try:
                chunk = s.recv(1024)
            except socket.timeout:
                continue
            print "Chunk: ", chunk
            if chunk == '':
                break
            if chunk == "!":
                break
            msg = msg + chunk
        msg = msg.replace('!', '')
        rows = msg.splitlines()
        [point_indices.append([int(val) for val in row.strip("[]").split()]) for row in rows]
        print point_indices
        clustered_points_arrays = []
        for bitmask in point_indices:
            pts = [pt for i, pt in enumerate(point_cloud) if bitmask[i] == 1]
            clustered_points_arrays.append(pts)
        return clustered_points_arrays

    def compute_principal_axes(self):
        """
        Straightforward principal axis computation via eigenvectors of the covariance matrix
        @precondition: self.selected_point_cloud contains the target point cloud
        """
        cov_mat = np.cov(np.array(self.selected_point_cloud).T)
        evals, evecs = np.linalg.eig(cov_mat)
        print "evals:", evals
        print "evecs", evecs

        #sort evecs by evals
        bundle = [(evals[i], evecs[:, i]) for i in range(len(evals))]
        bundle.sort(reverse = True)
        evecs = [v[1] for v in bundle]

        return evecs

#===============================================================================
#Old, more simple midpoint calculator 
#    def compute_extents_midpoint(self):
#        extents = []
#        midpoint = []
# 
#        ptsInAxes = []
#        for axis in self.axes:
#            proj = [np.dot(p, axis) for p in self.pts]
#            minval = min(proj)
#            maxval = max(proj)
#            mp = minval + (maxval - minval) / 2
#            proj = [p - mp for p in proj]
#            ptsInAxes.append(proj)
# 
#        self.pointsInAxes = ptsInAxes
#===============================================================================

    def compute_extents_midpoint(self, axes):
        """
        Computes the midpoint and extents of the selected point cloud
        @precondition: self.selected_point_cloud is initialized
        @param axes: The principal axes of the cloud
        
        @return midpoint: The midpoint (in the robot base frame of reference)
        @return extents: The point cloud extents from midpoint (in principal axes frame)
        @return points_in_axes: The point cloud with each (x,y,z) value corresponding to the 
        point's position in the three principal axes of the cloud, where the midpoint is the origin
        
        """
        extents = []
        midpoint = []
        midpoint_in_axes = []
        points_in_axes = []
        for axis in axes:
            proj = [np.dot(p, axis) for p in self.selected_point_cloud]
            minval = min(proj)
            maxval = max(proj)
            extents.append((minval, maxval))
            midpoint.append((minval + (maxval - minval) / 2) * axis)
            midpoint_in_axes.append((minval + (maxval - minval) / 2))
        extents = [(extent[1] - extent[0]) for extent in extents]
        print "Extents: ", extents
        midpoint = sum(midpoint)
        print "Midpoint: ", midpoint

        #project each point into the principal axes frame with the midpoint as the origin
        for i, axis in enumerate(axes):
            proj = [np.dot(p, axis) for p in self.selected_point_cloud]
            minval = min(proj)
            maxval = max(proj)
            mp = midpoint_in_axes[i]
            proj = [p - mp for p in proj]
            points_in_axes.append(proj)

        return midpoint, extents, points_in_axes

    def test_grasp(self, solution):
        """
        Once we have a solution, generate a trajectory from the robot rest pose to the 
        grasp pose, avoiding collisions with the table and the object/cloud itself.
        Then send the trajectory to the robot     
        """
        manip_prob = self.env.CreateProblem('basemanipulation')
        self.wam.SetActiveDOFs(range(7))
        self.env.LoadProblem(manip_prob, self.wam.GetName())#+" planner ra*")
        self.env.GetViewer().EnvironmentSync()
        self.env.StopSimulation()
        self.env.LockPhysics(True)
        #Grasp Testing
        raw_input("test?")
        gt = GraspTester(self.env, wam, manip_prob)
        gt.test_move_to_grasp(soln)
        #self.test_raise()
        #self.test_handing_over()
        self.env.LockPhysics(False)


if __name__ == '__main__':
    print "executing"
    simple_grasper = SimpleGrasper()
    simple_grasper.listener()
    
    #===========================================================================
    #Uncomment to add in axes at the origin
    # axes = simple_grasper.env.ReadKinBodyXMLFile('data/axes.kinbody.xml')
    # simple_grasper.env.AddKinBody(axes)
    #Tr = simple_grasper.wam.GetActiveManipulator().GetEndEffectorTransform()
    #T_axes = tf.translation_matrix((0, 0, -0.2))
    #Tr = np.dot(Tr, T_axes)
    #axes.SetTransform(Tr)
    #===========================================================================
    
