// ROS includes
#include <ros/ros.h>
#include <geometry_msgs/Twist.h>
#include <sensor_msgs/Image.h>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/image_encodings.h>

#include "jason_msgs/perception.h"

// c++ includes
#include <cstdlib>
#include <cstdio>
#include <fstream>

// OpenCV includes
#include <opencv2/core/core.hpp>
#include <opencv2/contrib/contrib.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/objdetect/objdetect.hpp>
#include <opencv2/imgproc/imgproc_c.h>
#include <opencv2/core/types_c.h>

#include <boost/thread/mutex.hpp>

#include "zbar.h"  


// standart c++ namespace
using namespace std;

// OpenCV namespace
using namespace cv;
using namespace cv_bridge;
//using namespace cvb;

using namespace zbar;  


// ROS namespace
namespace enc = sensor_msgs::image_encodings;


//-----------------------------------------------------------------------------

struct ST_PerceptionData {
    
    Point2f        position;
    unsigned short distance;
};

bool            g_bnHasDepth = false;
unsigned short  g_awDepth[ 640 * 480 ];
ros::Publisher  g_oPerceptionPub;

std::map< std::string, ST_PerceptionData *> g_mpstRobotData;



void RGBImageCallback(const sensor_msgs::Image::ConstPtr& msg)
{
    cv_bridge::CvImagePtr cv_ptr;
    try {
        
      cv_ptr = cv_bridge::toCvCopy(msg, enc::BGR8);
    }
    catch( cv_bridge::Exception& e ) {
        
      ROS_ERROR( "cv_bridge exception: %s", e.what() );
      return;
    }
    
    ImageScanner scanner;  
    Mat frame = cv_ptr->image;
     Mat grey;  
     cvtColor(frame,grey,CV_BGR2GRAY);  
     int width = frame.cols;   
     int height = frame.rows;   
     uchar *raw = (uchar *)grey.data;   
     // wrap image data   
     Image image(width, height, "Y800", raw, width * height);   
     // scan the image for barcodes   
     int n = scanner.scan(image);   
     // extract results   

     jason_msgs::perception perception;
     perception.source = ros::this_node::getName();

     for ( Image::SymbolIterator symbol = image.symbol_begin(); symbol != image.symbol_end(); ++symbol ) {
         
        vector<Point> vp;   

       int n = symbol->get_location_size();   
       for(int i=0;i<n;i++){   
         vp.push_back(Point(symbol->get_location_x(i),symbol->get_location_y(i)));   
       }   
       RotatedRect r = minAreaRect(vp);   
       Point2f pts[4];   
       r.points(pts);   
       for(int i=0;i<4;i++){   
         line(frame,pts[i],pts[(i+1)%4],Scalar(255,0,0),3);   
       }
       //cout<<"Angle: "<<r.angle<<endl;   
       std::string data = symbol->get_data();
       if ( data.substr(0,6) == "Robot<" ) {
           
           if ( data[data.length()-1]=='>') {
               
               std::string strRobotName = data.substr( 6, data.length() - 7 );
               if ( g_bnHasDepth && !strRobotName.empty() ) {
                   
                   const int nDepthIndex = floor( r.center.y + 0.5f ) * 640 + floor( r.center.x + 0.5f );
                   const unsigned short wDepth = g_awDepth[ nDepthIndex ];
                   std::string strSide = (r.center.x < 320.0f ? "left" : "right");
                   const float fAngle = (fabs(r.center.x - 320.0f) / 320.0f)*28.5f;
                   
                   try {
                       
                       ST_PerceptionData *poData = g_mpstRobotData.at( strRobotName );
                       if ( cv::norm(poData->position - r.center ) > 10.0 || poData->distance != wDepth ) {
                           
                           poData->position = r.center;
                           poData->distance = wDepth;
                           
                           std::string strDepth = ( wDepth > 0 ? boost::str( boost::format( "%.2f" ) % ( wDepth / 100.0f ) ).c_str() : ( cv::norm( pts[0] - pts[1] ) > 64 ? "-INF" : "+INF" ) ); 

                            perception.perception.push_back( boost::str( boost::format( "robot(%s,%s,%.0f,%s)" ) 
                                    % strRobotName.c_str() 
                                    % strSide.c_str()
                                    % fAngle
                                    % strDepth.c_str() ) );

                            printf("robot %s @ %f,%f distance %s\n", 
                                    strRobotName.c_str(),
                                    r.center.x,
                                    r.center.y,
                                    strDepth.c_str() );
                       }
                   }
                   catch( const std::out_of_range& ) {
                       
                       ST_PerceptionData *poData = new ST_PerceptionData;
                           
                        poData->position = r.center;
                        poData->distance = wDepth;
                        g_mpstRobotData.insert( std::make_pair( strRobotName, poData ) );

                        std::string strDepth = ( wDepth > 0 ? boost::str( boost::format( "%.2f" ) % ( wDepth / 100.0f ) ).c_str() : ( cv::norm( pts[0] - pts[1] ) > 64 ? "-INF" : "+INF" ) ); 
                        
                        perception.perception.push_back( boost::str( boost::format( "robot(%s,%s,%.0f,%s)" ) 
                                % strRobotName.c_str() 
                                % strSide.c_str()
                                % fAngle
                                % strDepth.c_str() ) );

                        printf("robot %s @ %f,%f distance %s\n", 
                                strRobotName.c_str(),
                                r.center.x,
                                r.center.y,
                                strDepth.c_str() );
                   }
               }
           }
       }
     }   
     if ( !perception.perception.empty() ) {
         
        g_oPerceptionPub.publish<jason_msgs::perception>( perception );
     }
    cv::imshow( "imgwnd", frame );
}

void DepthMapCallback(const sensor_msgs::Image::ConstPtr& msg ) {

    cv_bridge::CvImagePtr cv_ptr;
    try {
        
      cv_ptr = cv_bridge::toCvCopy(msg, enc::TYPE_16UC1);
    }
    catch ( cv_bridge::Exception& e ) {
        
      ROS_ERROR( "cv_bridge exception: %s", e.what() );
      return;
    }
    
    char *    depthData  = (char*)cv_ptr->image.data;
    const int nTotalSize = 640*480*2;
    for ( int i = 0, j = 0; i < nTotalSize; i+=2, ++j ) {

        unsigned short tmp = depthData[i + 1];
        tmp <<= 8;
        tmp += depthData[i];
        tmp &= 0xfff8;
        tmp >>= 3;
        g_awDepth[j]=tmp;
    }
    g_bnHasDepth = true;
}


//-----------------------------------------------------------------------------

int main(int argc, char **argv) {
    
    ros::init(argc, argv, "img_tracker");
    ros::NodeHandle n;

    ros::Subscriber subRGB = n.subscribe( "/camera/rgb/image_raw"  , 1, RGBImageCallback );
    ros::Subscriber subD   = n.subscribe( "/camera/depth/image_raw", 1, DepthMapCallback );

    g_oPerceptionPub = n.advertise< jason_msgs::perception >( "/jason/perception", 5 );

    cv::namedWindow( "imgwnd" );
    cv::resizeWindow( "imgwnd", 320, 240 );

    while( ros::ok() ) {
        
        ros::spinOnce();
        
        cvWaitKey(1); 
    }

    return 0;	
}

