// Based on OPENCV2 examples on face detection (facedetect.cpp)
// Modified for ROS by : Raquel Torres Peralta 
//                       Federico Cirett
//                       Daniel Mathis
// University of Arizona

#define CV_NO_BACKWARD_COMPATIBILITY

#include <ros/ros.h>
#include <ros/package.h>

#include <opencv/cv.h>
#include <opencv/highgui.h>

#include <cv_bridge/CvBridge.h>

#include <roslib/Header.h>
#include <std_msgs/String.h>
#include <sensor_msgs/Image.h>

#include <sensor_msgs/RegionOfInterest.h>
#include <facedetect/FaceRectanglesStamped.h>
#include <conversation/PersonEvent.h>

using namespace std;
using namespace cv;

ros::Publisher people_pub;
ros::Publisher people_rois_pub;
ros::Publisher person_event;

ros::Subscriber image_sub;

double scale;
int    face_on = 0;

String cascadeName;
String nestedCascadeName;

CascadeClassifier cascade;
CascadeClassifier nestedCascade;

//This is to compute the average of faces detected
#define MAXV 10 
#define AVG_THRESH 0.50F
int idx =1;
int idx_lim = 1;
int vec[MAXV];
int sumv=0;
int j=0;
float avg=0;

float compute_average(int v)
{
	sumv=0;
	vec[idx] = v;		//add value to the vector (0 or 1)
	if (idx_lim<MAXV)	//vector not completly populated
	{
		for(j=1;j<=idx;j++)
		{
			sumv = sumv + vec[j];
		}
		avg = (float)sumv / (float)idx;
		idx_lim++;
	} else 			//vector full
	{
		for(j=1;j<=idx_lim;j++)
		{
			sumv = sumv + vec[j];
		}
		avg = (float)sumv / (float)idx_lim;
	}
	if (idx<MAXV){
		idx++;
	} else
	{
		idx=1;
	}
	return avg;
} //end compute_average()


void detectAndDraw(Mat& img, roslib::Header header)
{
    int i = 0;
    double t = 0;
    vector<Rect> faces;
    const static Scalar colors[] =  { CV_RGB(245,204,176),
                                      CV_RGB(176,176,176),
                                      CV_RGB(188,210,238),
                                      CV_RGB(245,245,220),
                                      CV_RGB(210,180,140),
                                      CV_RGB(205,198,115),
                                      CV_RGB(224,238,224),
                                      CV_RGB(255,228,225)} ;
                                      
    Mat gray, smallImg( cvRound (img.rows/scale), cvRound(img.cols/scale), CV_8UC1 );

    cvtColor( img, gray, CV_BGR2GRAY );
    resize( gray, smallImg, smallImg.size(), 0, 0, INTER_LINEAR );
    equalizeHist( smallImg, smallImg );

    t = (double)cvGetTickCount();
    cascade.detectMultiScale( smallImg,
                              faces,
                              1.1,
                              2,
                              0
                              //|CV_HAAR_FIND_BIGGEST_OBJECT
                              //|CV_HAAR_DO_ROUGH_SEARCH
                              |CV_HAAR_SCALE_IMAGE,
                              Size(30, 30) );
                              
    t = (double)cvGetTickCount() - t;
    ROS_DEBUG("detection time = %g ms\n", t/((double)cvGetTickFrequency()*1000.));
    ROS_DEBUG("faces total %d\n", faces.size());

    if (faces.size() == 0)
    {
        std_msgs::String msg;
        msg.data = "NO";
        people_pub.publish(msg);
        
        //People event - EXIT | Check for the avg of the last frames
        if ( (face_on == 1) && (compute_average(0) < AVG_THRESH)  )
        {
            face_on = 0;
            conversation::PersonEvent pmsg;
            pmsg.event = "exited";
            person_event.publish(pmsg);
        }

        return;
    }

    facedetect::FaceRectanglesStamped frs_msg;
    frs_msg.header.stamp = header.stamp;
    
    for( vector<Rect>::const_iterator r = faces.begin(); r != faces.end(); r++, i++ )
    {
        facedetect::Face face;
        face.face_rect.x_offset = r->x;
        face.face_rect.y_offset = r->y;
        face.face_rect.width = r->width;
        face.face_rect.height = r->height;
        
        std_msgs::String msg;
        msg.data = "YES";
        people_pub.publish(msg);
        //People Event - ENTER | Check for the avg of the last frames
        if ( (face_on == 0) && (compute_average(1)>= AVG_THRESH) )
        {
            face_on = 1;
            conversation::PersonEvent pmsg;
            pmsg.event = "entered";
            person_event.publish(pmsg);
        } 
       
        Mat smallImgROI;
        vector<Rect> nestedObjects;
        Point center;
        Point p1;
        Point p2;
        
        Scalar color = colors[i%8];
        int radius;
        center.x = cvRound((r->x + r->width*0.5)*scale);
        center.y = cvRound((r->y + r->height*0.5)*scale);
        p1.x = cvRound((center.x - r->width*0.5)*scale);
        p1.y = cvRound((center.y + r->height*0.5)*scale);
        p2.x = cvRound((center.x + r->width*0.5)*scale);
        p2.y = cvRound((center.y - r->height*0.5)*scale);
       
        radius = cvRound((r->width + r->height)*0.25*scale);
        rectangle( img, p1, p2, color, 1, 8, 0 );

        if( nestedCascade.empty() )
            continue;

        smallImgROI = smallImg(*r);
        nestedCascade.detectMultiScale( smallImgROI,
                                        nestedObjects,
                                        1.1,
                                        2,
                                        0
                                        //|CV_HAAR_FIND_BIGGEST_OBJECT
                                        //|CV_HAAR_DO_ROUGH_SEARCH
                                        //|CV_HAAR_DO_CANNY_PRUNING
                                        |CV_HAAR_SCALE_IMAGE,
                                        Size(30, 30) );
                                        
        for( vector<Rect>::const_iterator nr = nestedObjects.begin(); nr != nestedObjects.end(); nr++ )
        {
            sensor_msgs::RegionOfInterest eye;
            eye.x_offset = nr->x;
            eye.y_offset = nr->y;
            eye.width = nr->width;
            eye.height = nr->height;
            face.eye_rects.push_back(eye);
            
            center.x = cvRound((r->x + nr->x + nr->width*0.5)*scale);
            center.y = cvRound((r->y + nr->y + nr->height*0.5)*scale);
            radius = cvRound((nr->width + nr->height)*0.25*scale);
            circle( img, center, radius, color, 3, 8, 0 );
        }
        
        frs_msg.faces.push_back(face);
    }
    
    people_rois_pub.publish(frs_msg);
}

void handle_image(const sensor_msgs::ImageConstPtr& msg_ptr)
{
    sensor_msgs::CvBridge bridge;
    IplImage *image_in = NULL;

    try
    {
        image_in = bridge.imgMsgToCv(msg_ptr);
        
        Mat frame = image_in;
        detectAndDraw(frame, msg_ptr->header);
    }
    catch (sensor_msgs::CvBridgeException error)
    {
        ROS_ERROR("CvBridgeError");
    }
}

int main( int argc,  char** argv )
{
    ros::init(argc, argv, "face_detector");
    ros::NodeHandle n;
    
    people_pub = n.advertise<std_msgs::String>("people", 3);
    people_rois_pub = n.advertise<facedetect::FaceRectanglesStamped>("people_rois", 3);
    person_event = n.advertise<conversation::PersonEvent>("person_event", 3);
    image_sub = n.subscribe("image", 3, handle_image);
    
    ros::NodeHandle local_nh("~");
    local_nh.param("scale", scale, 1.0);
    local_nh.param("cascade", cascadeName, ros::package::getPath("opencv2") + "/opencv/share/opencv/haarcascades/haarcascade_frontalface_alt.xml");
    local_nh.param("nested_cascade", nestedCascadeName, ros::package::getPath("opencv2") + "/opencv/share/opencv/haarcascades/haarcascade_eye_tree_eyeglasses.xml");
    
    if( !nestedCascade.load( nestedCascadeName ) )
    {
        ROS_WARN("WARNING: Could not load classifier cascade for nested objects");
    }
    
    if( !cascade.load( cascadeName ) )
    {
        ROS_ERROR("ERROR: Could not load classifier cascade");
        return -1;
    }
    
    ros::spin();
    
    return 0;
}

