//
// Created by PanCheng on 16/03/2017.
//

#include "FaceTrackDefine.h"

#include <android/bitmap.h>
#include <android/log.h>
#include <unistd.h>
#include <sys/system_properties.h>


#include  <sys/time.h>
#include <unistd.h>

static struct timeval tv_begin;
static struct timeval tv_end;
static double elasped;

void bench_start()
{
    gettimeofday(&tv_begin, NULL);
}

void bench_end(const char* comment)
{
    gettimeofday(&tv_end, NULL);
    elasped = ((tv_end.tv_sec - tv_begin.tv_sec) * 1000000.0f + tv_end.tv_usec - tv_begin.tv_usec) / 1000.0f;
    __android_log_print(ANDROID_LOG_DEBUG, "FaceTrack",  "%.2fms   %s\n", elasped, comment);
}

void bench_end(const char* comment, const int & cnt)
{
    gettimeofday(&tv_end, NULL);
    elasped = ((tv_end.tv_sec - tv_begin.tv_sec) * 1000000.0f + tv_end.tv_usec - tv_begin.tv_usec) / 1000.0f / cnt;
    __android_log_print(ANDROID_LOG_DEBUG, "FaceTrack", "%.2fms %s\n", elasped, comment);
}


static cv::Rect get_enclosing_bbox(const TC_FaceShape& shape)
{
    const cv::Point2f* landmarks = (const cv::Point2f*)&shape;
    float min_x_val = landmarks[0].x;
    float max_x_val = landmarks[0].x;
    float min_y_val = landmarks[0].y;
    float max_y_val = landmarks[0].y;
    for (int i = 0; i < 90; ++i) {
        min_x_val = std::min(min_x_val, landmarks[i].x);
        max_x_val = std::max(max_x_val, landmarks[i].x);
        min_y_val = std::min(min_y_val, landmarks[i].y);
        max_y_val = std::max(max_y_val, landmarks[i].y);
    }

    float w = max_x_val - min_x_val;
    float h = max_y_val - min_y_val;
    float size = std::max(w, h);

    float cx = min_x_val + w / 2;
    float cy = min_y_val + h / 2;
    float x = cx - size / 2;
    float y = cy - size / 2;

    return cv::Rect(x, y, size, size);
}


//FaceTrack
FaceTrack::FaceTrack(){
    nice(-20);
    force_facedetect = 0;
    __android_log_print(ANDROID_LOG_DEBUG, "pancheng", "sdk_version : %s",ytfacedetect::facetrackpro_sdk_version());

    __android_log_print(ANDROID_LOG_DEBUG, "pancheng", "stable : %s",ytfacetrack::facetrack_stable_version());
}
FaceTrack::~FaceTrack(){}

int FaceTrack::detect_face(const cv::Mat& rgb, std::vector<cv::Rect>& faces){
   FaceDetectionParam param;
   param.big_face_only = false;
   param.min_face_size = std::max(std::min(rgb.cols, rgb.rows) / 5, 40);
   param.max_face_size = std::min(rgb.cols, rgb.rows) * 0.8;
   param.num_threads = 2;
    __android_log_print(ANDROID_LOG_DEBUG, "pancheng", "222");
   //bench_start();
   ytfacedetect::yt_facedetection(rgb, faces, param);
   //bench_end("facedetection");
    __android_log_print(ANDROID_LOG_DEBUG, "pancheng", "faces.size():%i", faces.size());
   return faces.size();
}


int FaceTrack::detect_face_with_mask(const cv::Mat& rgb, std::vector<FaceStatus>& trackerfaces, std::vector<cv::Rect>& faces){
   // mask tracked faces
   cv::Mat rgb2 = rgb.clone();
   for (size_t i=0; i<trackerfaces.size(); i++)
   {
       cv::Rect rect = trackerfaces[i].rect;
       if (rect.x < 0) rect.x = 0;
       if (rect.y < 0) rect.y = 0;
       if (rect.x + rect.width > rgb.cols) rect.width = rgb.cols - rect.x;
       if (rect.y + rect.height > rgb.rows) rect.height = rgb.rows - rect.y;

       unsigned char* rgbptr = rgb2.data + rect.y * rgb.cols * 3 + rect.x * 3;
       for (int y=0; y<rect.height; y++)
       {
           for (int x=0; x<rect.width * 3; x++)
           {
               rgbptr[x] = 0;
           }
           rgbptr += rgb.cols * 3;
       }
   }

   FaceDetectionParam param;
   param.big_face_only = false;
   param.min_face_size = std::max(std::min(rgb.cols, rgb.rows) / 5, 40);
   param.max_face_size = std::min(rgb.cols, rgb.rows) * 0.8;
   param.num_threads = 2;

   //bench_start();
   ytfacedetect::yt_facedetection(rgb2, faces, param);
   //bench_end("facedetection2");

   return faces.size();
}



int FaceTrack::detect_faceshape(const cv::Mat& rgb, std::vector<TC_FaceShape>& shapes){
   //bench_start();
   tracker.track(rgb);
   //bench_end("track");

   force_facedetect++;

#define FACEDETECT_INTERVAL 7

   if (tracker.faces.empty() || force_facedetect > FACEDETECT_INTERVAL)
   {
       std::vector<cv::Rect> detected_faces;
       int nface;
       if (tracker.faces.empty())
           nface = detect_face(rgb, detected_faces);
       else
           nface = detect_face_with_mask(rgb, tracker.faces, detected_faces);
       if (nface == 0)
       {
           if (!tracker.faces.empty())
           {
               for (int i=0; i<tracker.faces.size(); i++)
               {
                   shapes.push_back(tracker.faces[i].shape);
               }
               force_facedetect = 0;
               return 0;
           }
           return -1;
       }

      // bench_start();
       tracker.track(rgb, detected_faces);
      // bench_end("track0");
       force_facedetect = 0;
   }

   for (int i=0; i<tracker.faces.size(); i++)
   {
       shapes.push_back(tracker.faces[i].shape);
   }

   return tracker.faces.empty() ? -1 : 0;
}


//FaceTracker
void FaceTracker::track(const cv::Mat& rgb){
   #pragma omp parallel for
   for (size_t j=0; j<faces.size(); j++)
   {
       TC_FaceShape oldshape = faces[j].shape;
       TC_FaceShape shape;
       float score = 0.f;
       bench_start();
       ytfacetrack::yt_face_shape_tracking(rgb, oldshape, shape, score);
       bench_end("tack oldshape");
       faces[j].stablizer.stablize(shape);

       faces[j].shape = shape;
       faces[j].score = score;
       faces[j].rect = get_enclosing_bbox(shape);
       faces[j].valid = face_valid(faces[j]);
   }

   std::vector<FaceStatus>::iterator it = faces.begin();
   while (it != faces.end())
   {
       if (!it->valid)
       {
           it = faces.erase(it);
       }
       else
       {
           ++it;
       }
   }

}


void FaceTracker::track(const cv::Mat& rgb, const std::vector<cv::Rect>& detected_faces){
  for (size_t i=0; i<detected_faces.size(); i++)
  {
      const cv::Rect& rect = detected_faces[i];

      bool tracking = false;

      for (size_t j=0; j<faces.size(); j++)
      {
          cv::Rect intersect = rect & faces[j].rect;
          if (intersect.area() > faces[j].rect.area() * 0.5 && intersect.area() > rect.area() * 0.5)
          {
              // already tracked
              tracking = true;
              break;
          }
      }

      if (!tracking)
      {
          // init tracking
          TC_FaceShape shape;
          float score = 0.f;
          bench_start();
          ytfacetrack::yt_face_shape_tracking(rgb, rect, shape, score);
          bench_end("track rect");

          FaceStatus newface;
          newface.valid = true;
          newface.shape = shape;
          newface.score = score;
          newface.rect = get_enclosing_bbox(shape);

          newface.stablizer.init(shape);

          if (face_valid(newface))
          {
              faces.push_back(newface);
          }
      }
  }
}


bool FaceTracker::face_valid(const FaceStatus& face) const{
    return face.rect.width > 20 && face.rect.height > 20 && face.score >= 0.5;
}




