/***************************************************************************
 *  Filename: videoprocessing.cpp
 *  Class: VideoProcessing
 *
 *  Copyright 2012  Jacco van der Spek
 *  <jaccovdspek@gmail.com>
 *
 *  Copyright 2012  Harald Homulle
 *  <harald.homulle@gmail.com>
 *
 *  Copyright 2012  Jörn Zimmerling
 *  <forcaeluz@gmail.com>
*
 *  Copyright 2012  Mario Henrique Voorsluys
 *  <forcaeluz@gmail.com>

 ****************************************************************************/

/*
 *
 * This file is part of %ProjectName%.
 *
 *    %ProjectName% is free software: you can redistribute it and/or modify
 *    it under the terms of the GNU General Public License as published by
 *    the Free Software Foundation, either version 3 of the License, or
 *    (at your option) any later version.
 *
 *    %ProjectName% is distributed in the hope that it will be useful,
 *    but WITHOUT ANY WARRANTY; without even the implied warranty of
 *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 *    GNU General Public License for more details.
 *
 *    You should have received a copy of the GNU General Public License
 *    along with %ProjectName%.  If not, see <http://www.gnu.org/licenses/>.
 */

#include "videoprocessing.h"
#include <QDebug>
#include <qmath.h>
#include <QCoreApplication>

#define PI (float) 3.141592654

VideoProcessing::VideoProcessing(QObject *parent):QObject(parent)
{
    steering = 85;
    theta = 0;
    delta = 0;
    average_x = 0;
    average_y = 0;
    picx = 0;
    picy = 0;
    realx = 0;
    realy = 0;
    realx_old = 0;
    realy_old = 0;
    speed = 0;
    speedx = 0;
    speedy = 0;
    loopCounter = 0;
    takePicture = false;
}

void VideoProcessing::init(int device){
    capture.open(device);
    if(!capture.isOpened()){
        qCritical() << "[VideoProcessing] Unable to open device";
        qCritical() << "[VideoProcessing] Unfortunally OpenCV doesn't tell more about the error";
        return;
    }else{
        qDebug() << "[VideoProcessing] Device opened";
    }
    qDebug() << "[VideoProcessing] Setting Parameters";
    
    capture.set(CV_CAP_PROP_FRAME_WIDTH, picWidth);
    capture.set(CV_CAP_PROP_FRAME_HEIGHT, picHeight);
}

void VideoProcessing::run(){
    //cv::VideoWriter writter("test.avi", CV_FOURCC('D','I','V','X'), 12, cv::Size(320, 240));
    forever{
        timeKeeper.restart();
        cv::Mat tmpFrame;
        capture >> tmpFrame;
        detectObject(tmpFrame);
        mutex.lock();
        if(takePicture){
            makeImage(tmpFrame);
        }
        mutex.unlock();
        fps = 1000.0/timeKeeper.elapsed();
        qDebug() << "[VideoProcessing] FPS is: " << fps;
        calculateSteering();
        calculatePosition();
        calculateVelocity();
       // writter << tmpFrame;
    }
}

void VideoProcessing::detectObject(cv::Mat &image){

    // Rotate 180 degrees, image is upside down, because camera is upside down.
    cv::flip(image,image,-1);
    qDebug() << "[VideoProcessing] Color Detection processing frame";
    int nc = image.rows;
    int nl = image.cols;
    int count = 0;
    int xcenter = 0;
    int ycenter = 0;

    cv::Mat hsv_frame;
    cv::cvtColor(image, hsv_frame, CV_BGR2HSV);

    // Loop through all the pixels. Due to the used logic cv::inrange is not suitable.
    for (int j=0; j<nl; j++){
        for (int i=0; i<nc; i++){
            // Each pixel is colored black if it is detected beyond a certain treshold.
            // Using the HSV frame for checking the pixels, than coloring them in the original picture.
            cv::Vec3b pixel = hsv_frame.at<cv::Vec3b>(i, j);
            if((pixel[0] > 160 && pixel[1] > 150 && pixel[2] > 35) ||
                    (pixel[0] < 15 && pixel[1] > 170 && pixel[2] > 35)){
                image.at<cv::Vec3b>(i,j)[0]= 0 ;
                image.at<cv::Vec3b>(i,j)[1]= 0 ;
                image.at<cv::Vec3b>(i,j)[2]= 0 ;

                xcenter = xcenter + i ;
                ycenter = ycenter + j ;
                count++;
            }
        }
    }

    // Now center of mass calculation, only if red pixels detected.
    if(count!=0){
        xcenter = int(xcenter/count);
        ycenter = int(ycenter/count);
        // Color the center of mass of the detected pixels.
        // Cycle through the pixels around the center
        for (int j=(ycenter-2); j<(ycenter+2); j++) {
            for (int i=(xcenter-2); i<(xcenter+2); i++) {
                if(i>=0 && j>=0 && i<= nc && j<= nl ) // Clip to boundaries.
                {
                    image.at<cv::Vec3b>(i,j)[0]= 255 ;
                    image.at<cv::Vec3b>(i,j)[1]= 255 ;
                    image.at<cv::Vec3b>(i,j)[2]= 255 ;
                }
            }
        }
        qDebug() << "[VideoProcessing] Object found at: " << xcenter << "," << ycenter;
    }
    //Set global variables.

    x_pixel_data[loopCounter%LENGTH_AVERAGE] = xcenter;
    y_pixel_data[loopCounter%LENGTH_AVERAGE] = ycenter;
    loopCounter++;
    double sumx = 0;
    double sumy = 0;
    if(loopCounter > LENGTH_AVERAGE){
        for(int i=0; i < LENGTH_AVERAGE; i++){
            sumx += x_pixel_data[i];
            sumy += y_pixel_data[i];
        }
        picx = sumx/(double)LENGTH_AVERAGE;
        picy = sumy/(double)LENGTH_AVERAGE;
    }else{
        for(uint i=0; i < loopCounter; i++){
            sumx += x_pixel_data[i];
            sumy += y_pixel_data[i];
        }
        picx = sumx/(double)loopCounter;
        picy = sumy/(double)loopCounter;
    }
    qDebug() << "[VideoProcessing] Color detection finished" << picx << ","  << picy;
}

void VideoProcessing::calculatePosition(){
    // Implementation of the function to calculate the relative distance to the object.
    // Needs FoV, Position Angle, Height of camera aboven ground, coördinates of object in picture frame.

    // Declare two temp variables.
    qDebug() << "[VideoProcessing] Calculating position";
    double temp = 0;
    double temp2 = 0;

    mutex.lock();
    // Firstly Calculation of Relative X is done.
    temp = beta + delta/2 - delta*average_x/(picHeight -1);
    temp2 = height*tan(temp*PI/180.0);

    // Secondly Relative y is calculated.
    temp = (theta/(picWidth -1))*(average_y-picWidth /2.0);
    relativey = temp2*tan(temp*PI/180.0);
    relativex = temp2;
    mutex.unlock();
    qDebug() << "[VideoProcessing] Position is: " << relativex << "," << relativey;
}

void VideoProcessing::calculateVelocity(){
    double temp;
    double dx = realx - realx_old;
    double dy = realy - realy_old;

    speedx = uavspeedx + dx*fps ;
    speedy = uavspeedy + dy*fps ;

    temp = dx*dx + dy*dy;
    temp = sqrt(temp);
    speed = uavspeed + temp*fps;
}

void VideoProcessing::calculateSteering(){

    if(picx == 0 && picy == 0) //no object
        steering=85;
    else if(picx < (picHeight)/3)
    {
        if (picy < (picWidth)/3){
            steering=0;}
        else if (picy < (2*(picWidth)/3)){
            steering=60;}
        else{steering=204;}
    }
    else if(picx < (2*(picHeight)/3))
    {
        if (picy < (picWidth)/3){
            steering=15;}
        else if (picy < (2*(picWidth)/3)){
            steering=85;}
        else{steering=240;}
    }
    else
    {
        if (picy < (picWidth)/3){
            steering=51;}
        else if (picy < 2*(picWidth)/3){
            steering=195;}
        else{steering=255;}
    }

}

void VideoProcessing::makeImage(cv::Mat &image){
    // No resizing is done now, but can be implemented.

    cv::imwrite("capture.jpg", image);
    takePicture = false;
    emit newFile("capture.jpg");
}

void VideoProcessing::setProperties(double theight, double tbeta, double VelUAVx, double VelUAVy){
    mutex.lock();
    height = theight;
    beta = tbeta;
    uavspeedx = VelUAVx;
    uavspeedy = VelUAVy;
    uavspeed = sqrt(uavspeedx*uavspeedx + uavspeedy*uavspeedy);
    mutex.unlock();
}

void VideoProcessing::takePictures(){
    qDebug() << "[VideoProcessing] Take a picture man";
    mutex.lock();
    takePicture = true;
    mutex.unlock();

}

quint8 VideoProcessing::getSteering(){
    qDebug() << "[VideoProcessing] Steering";
    mutex.lock();
    quint8 ster = steering;
    mutex.unlock();
    return ster;
}

float VideoProcessing::getXtarget(){
    qDebug() << "[VideoProcessing] X Target reading";
    mutex.lock();
    float tmpX = realx;
    mutex.unlock();
    return tmpX;
}

float VideoProcessing::getYtarget(){
    qDebug() << "[VideoProcessing] X Target reading";
    mutex.lock();
    float tmpX = realy;
    mutex.unlock();
    return tmpX;
}

