#include <iostream>
#include <string>
#include <vector>
#include <map>
#include <httpserver.hpp>

#include "utils.h"
#include "stream_reader_ffmpeg.h"
#include "httpclient.h"
#include "darknet.h"
#include "darknet_yolo.h"
#include "blobtracker.h"

using namespace httpserver;


std::string recvhost;
IniParser* g_iniparser;
HttpClient* httpclient = new HttpClient();
std::vector<std::shared_ptr<StreamReaderFFmpeg>> g_readers(0);
DarknetYolo* g_fdetector = NULL;

std::map<int, std::vector<TargetBlob>> g_existBlobs;
BlobTracker* g_btracker = new BlobTracker();

std::vector<cv::Vec2f> g_stdAlignPoints;
std::vector<cv::Vec2f> g_stdPostPoints;

typedef struct Detection {
  int camid = -1;
  cv::Mat image;
  int nboxes = 0;
  detection* dets = NULL;
  ~Detection(){free_detections(this->dets, this->nboxes);};
} Detection;

std::queue<std::shared_ptr<Detection>> g_detecQueue;

const int g_bufsize = 16; //max queue size

//mutex
pthread_mutex_t lock;


class recvhost_management : public http_resource {
public:
  const std::shared_ptr<http_response> render(const http_request& req) {

    //json resp
    boost::property_tree::ptree json;
    json.put<int>("code", 200);
    json.put("msg", "success");
    json.put("recvhost", recvhost);

    std::string resp = json2string(json);
    return std::shared_ptr<http_response>(new string_response(resp));
  }
  const std::shared_ptr<http_response> render_POST(const http_request& req) {
    std::string content = req.get_content();

    auto pt = string2json(content);

    std::string ip = pt.get<std::string>("ip");
    std::string port = pt.get<std::string>("port");
    std::string uri = pt.get<std::string>("uri");


    pthread_mutex_lock(&lock); //lock
    recvhost = std::string("http://");
    recvhost.append(ip).append(":").append(port).append("/").append(uri);
    pthread_mutex_unlock(&lock); //unlock

    std::cout << "[INFO]: change recvhost to ==> " << recvhost << std::endl;


    //ini update
    g_iniparser->PutString("ip", ip.c_str(), "recvhost");
    g_iniparser->PutString("port", port.c_str(), "recvhost");
    g_iniparser->PutString("uri", uri.c_str(), "recvhost");

    //json resp
    boost::property_tree::ptree json;
    json.put<int>("code", 200);
    json.put("msg", "success");

    std::string resp = json2string(json);
    return std::shared_ptr<http_response>(new string_response(resp));
  }
};

class camera_management : public http_resource {
public:
  const std::shared_ptr<http_response> render(const http_request& req) {

    //json resp
    boost::property_tree::ptree json, rejson;
    rejson.put<int>("code", 200);
    rejson.put("msg", "success");


    pthread_mutex_lock(&lock); //lock
    int reader_size = g_readers.size();
    if (reader_size > 0) {
      for (int i = 0; i < g_readers.size(); i++) {
        std::string rstp = g_readers[i]->GetUrl();
        int camid = g_readers[i]->GetCamid();

        boost::property_tree::ptree tjson;
        tjson.put<int>("camid", camid);
        tjson.put("rtsp", rstp);
        json.push_back(std::make_pair("",tjson));
      }
    }
    pthread_mutex_unlock(&lock); //unlock


    rejson.add_child("camlist", json);
    std::string resp = json2string(rejson);
    return std::shared_ptr<http_response>(new string_response(resp));
  }
  const std::shared_ptr<http_response> render_POST(const http_request& req) {
    std::string content = req.get_content();

    auto pt = string2json(content);
    int camid = pt.get<int>("camid");
    const std::string rtsp = pt.get<std::string>("rtsp");
    const std::string op = pt.get<std::string>("op");

    //json resp
    boost::property_tree::ptree json;
    json.put<int>("code", 200);
    json.put("msg", "success");

    //camera
    bool isOpened = false;
    int index = -1;

    pthread_mutex_lock(&lock); //lock
    for (int i = 0; i < g_readers.size(); i++) {
      if (rtsp == g_readers[i]->GetUrl()) {
        isOpened = true;
        index = i;
        break;
      }
    }
    pthread_mutex_unlock(&lock); //unlock

    //op
    if (op == "open") {
      if (!isOpened) {
        std::shared_ptr<StreamReaderFFmpeg> rtspreader(new StreamReaderFFmpeg(rtsp, camid));
        if (rtspreader->init()<0) {
          json.put<int>("code", 400);
          json.put("msg", "failure");
          rtspreader->release(); //note: just can use for init failure!
        } else {
          rtspreader->Run();
          pthread_mutex_lock(&lock); //lock
          g_readers.push_back(rtspreader);
          pthread_mutex_unlock(&lock); //unlock
          std::cout << "[INFO]: Opened New Stream: " << rtspreader->GetUrl() << " Left: " << g_readers.size() << std::endl;
        }
      }
    }
    else if (op == "close") {
      if (isOpened) {
        std::shared_ptr<StreamReaderFFmpeg> ptr = g_readers[index];
        ptr->Stop();
        std::cout << "[INFO]: Closed Stream: " << ptr->GetUrl() << " Left: " << g_readers.size() << std::endl;
      }
    }

    //ack
    std::string resp = json2string(json);
    return std::shared_ptr<http_response>(new string_response(resp));
  }
};


void *thread_detec(void* arg)
{
  while (true) {

    pthread_mutex_lock(&lock); //lock
    int reader_size = g_readers.size();
    pthread_mutex_unlock(&lock); //unlock

    if (reader_size > 0) {
      for (int i = 0; i < g_readers.size(); i++) {

        auto start = std::chrono::high_resolution_clock::now();

        int camid = -1;
        cv::Mat image;

        if (g_readers[i]->isStoped()) {
          pthread_mutex_lock(&lock); //lock
          g_readers.erase(g_readers.begin()+i);
          pthread_mutex_unlock(&lock); //unlock
          std::cout << "streamer size: " << g_readers.size() << std::endl;
          continue;
        }

        camid = g_readers[i]->GetCamid();
        image = g_readers[i]->GetCurFrame();

        if (image.empty()) { continue; }

        //detec
        int nboxes = 0;
        detection* dets = NULL;

        pthread_mutex_lock(&lock); //lock
        dets = g_fdetector->Getdets(image, &nboxes);
        pthread_mutex_unlock(&lock); //unlock

        std::shared_ptr<Detection> detec(new Detection());
        detec->camid = camid;
        detec->image = image;
        detec->nboxes = nboxes;
        detec->dets = dets;

        pthread_mutex_lock(&lock); //lock
        if (g_detecQueue.size() >= g_bufsize)
        {
          g_detecQueue.pop();
          g_detecQueue.push(detec);
        }
        else {
          g_detecQueue.push(detec);
        }
        pthread_mutex_unlock(&lock); //unlock


        std::cout << "[INFO]: [camid]: " << camid <<  " [detec] @@@@@@@@: " << std::chrono::duration<double, std::milli>(std::chrono::high_resolution_clock::now()-start).count() << "ms" << std::endl;
      }
    }
  }
}


void *thread_track(void* arg)
{
  while (true) {

    if (g_detecQueue.empty()) {
      continue;
    }

    pthread_mutex_lock(&lock); //lock
    std::shared_ptr<Detection> data = g_detecQueue.front();
    g_detecQueue.pop();
    pthread_mutex_unlock(&lock); //unlock

    auto start = std::chrono::high_resolution_clock::now();

    float objness = g_iniparser->GetFloat("objthre", "setting");
    std::vector<TargetBlob> currentFrameBlobs;

    for (unsigned int i = 0; i < data->nboxes; i++) {
      if (0.01 > data->dets[i].prob[0]) continue;
      if (objness > data->dets[i].objectness) continue;
      std::cout << "[INFO]: ==== [objness] ==== " << data->dets[i].objectness << std::endl;
      box b = data->dets[i].bbox;

      //rect
      int left = (b.x - b.w / 2.)*data->image.cols;
      int right = (b.x + b.w / 2.)*data->image.cols;
      int top = (b.y - b.h / 2.)*data->image.rows;
      int bot = (b.y + b.h / 2.)*data->image.rows;

      if (left < 0) left = 0;
      if (right > data->image.cols - 1) right = data->image.cols - 1;
      if (top < 0) top = 0;
      if (bot > data->image.rows - 1) bot = data->image.rows - 1;

      //face points
      std::vector<cv::Vec2f> currentROIPoints;
      currentROIPoints.push_back(cv::Vec2f((b.lx1)*data->image.cols, (b.ly1)*data->image.rows));
      currentROIPoints.push_back(cv::Vec2f((b.lx2)*data->image.cols, (b.ly2)*data->image.rows));
      currentROIPoints.push_back(cv::Vec2f((b.lx3)*data->image.cols, (b.ly3)*data->image.rows));
      currentROIPoints.push_back(cv::Vec2f((b.lx4)*data->image.cols, (b.ly4)*data->image.rows));
      currentROIPoints.push_back(cv::Vec2f((b.lx5)*data->image.cols, (b.ly5)*data->image.rows));

      cv::Mat resultImg(112, 96, CV_8UC3), f_mat;
      cv::Mat warp_mat = estimateRigidTransform_l(currentROIPoints, g_stdAlignPoints, false);
      cv::warpAffine(data->image, resultImg, warp_mat, resultImg.size());

      cv::Mat postROI(112 * 2, 96 * 2, CV_8UC3);
      cv::Mat warp_mat_align = estimateRigidTransform_l(currentROIPoints, g_stdPostPoints, false);
      warpAffine(data->image, postROI, warp_mat_align, postROI.size());

      //track box
      int rectX = left;
      int rectY = top;
      int rectW = right - left;
      int rectH = bot - top;


      TargetBlob possibleBlob;
      cv::Point currentCenter;
      possibleBlob.camid = data->camid;

      possibleBlob.currentBoundingRect = cv::Rect(rectX, rectY, rectW, rectH);
      currentCenter.x = (possibleBlob.currentBoundingRect.x + possibleBlob.currentBoundingRect.x + possibleBlob.currentBoundingRect.width) / 2;
      currentCenter.y = (possibleBlob.currentBoundingRect.y + possibleBlob.currentBoundingRect.y + possibleBlob.currentBoundingRect.height) / 2;
      possibleBlob.centerPositions.push_back(currentCenter);
      possibleBlob.currentDiagonalLength = sqrt(pow(possibleBlob.currentBoundingRect.width, 2) + pow(possibleBlob.currentBoundingRect.height, 2));
      possibleBlob.currentAspectRatio = (float)possibleBlob.currentBoundingRect.width / (float)possibleBlob.currentBoundingRect.height;

      possibleBlob.stillBeingTracked = true;
      possibleBlob.currentMatchFoundOrNewBlob = true;
      possibleBlob.intNumOfConsecutiveFramesWithoutAMatch = 0;

      possibleBlob.rois.push_back(resultImg);
      possibleBlob.postrois.push_back(postROI);

      currentFrameBlobs.push_back(possibleBlob);
      //cv::imwrite("person.jpg", possibleBlob.postrois.back());
    }

    pthread_mutex_lock(&lock); //lock
    if (g_existBlobs.count(data->camid) == 0) {
      g_existBlobs[data->camid] = std::vector<TargetBlob>();
    }
    g_btracker->matchCurrentFrameBlobsToExistingBlobs(g_existBlobs[data->camid], currentFrameBlobs);
    pthread_mutex_unlock(&lock); //unlock

    std::cout << "[INFO] @@@@ [camera num]: " << g_existBlobs.size() <<  " @@@@ [camid]: " << data->camid << " @@@@ [tracking obj]: " << g_existBlobs[data->camid].size() \
      << " @@@@ [time]: " << std::chrono::duration<double, std::milli>(std::chrono::high_resolution_clock::now()-start).count() << "ms" << std::endl;

  }
}


void *thread_filter(void* arg)
{
  while (true) {

    pthread_mutex_lock(&lock); //lock
    for (auto iter = g_existBlobs.begin(); iter != g_existBlobs.end(); iter++) {

      //camera
      int camid = iter->first;
      std::vector<TargetBlob> blobs = iter->second;

      for (unsigned int i = 0; i < blobs.size(); i++) {

        //tracking object
        int trackid = i;

        auto start = std::chrono::high_resolution_clock::now();
        bool stillBeingTracked = blobs[i].stillBeingTracked;

        for (unsigned int j = 0; j < blobs[i].postrois.size(); j++) {

          //frame series
          cv::Mat image = blobs[i].postrois[j];

          if (!stillBeingTracked) {
            //json
            boost::property_tree::ptree json;
            std::string simg = Mat2Base64(image, "jpg");
            json.put<int>("camid",camid);
            json.put<int>("trackid",trackid);
            json.put<std::string>("image", simg);
            //post
            std::string post = json2string(json);

            pthread_mutex_lock(&lock); //lock
            std::string host = recvhost;
            pthread_mutex_unlock(&lock); //unlock

            std::string resp;
            int code = httpclient->Post(host, post, resp);
          }

        }

        if (!stillBeingTracked) {
          iter->second.erase(iter->second.begin()+i);
          i--;
        }

        std::cout << "[INFO]: [camid]: " << camid <<  " [process one series] @@@@@@@@: " << std::chrono::duration<double, std::milli>(std::chrono::high_resolution_clock::now()-start).count() << "ms" << std::endl;
      }
    }
    pthread_mutex_unlock(&lock); //unlock

  }
}


void *thread_post(void* arg)
{
  while (true) {


  }
}


int main(int argc, char** argv)
{
  //iniparser
  g_iniparser = new IniParser();
  int port = g_iniparser->GetInt("port", "setting");

  //recvhost
  std::string recvhsot_ip = g_iniparser->GetString("ip", "recvhost");
  std::string recvhost_port = g_iniparser->GetString("port", "recvhost");
  std::string recvhost_uri = g_iniparser->GetString("uri", "recvhost");

  recvhost = std::string("http://");
  recvhost.append(recvhsot_ip).append(":").append(recvhost_port).append("/").append(recvhost_uri);

  //init mutex
  pthread_mutex_init(&lock,NULL);

  //model
  g_fdetector = new DarknetYolo("models/mbv2_yolov3_face.cfg", "models/mbv2_yolov3_face_final.weights");

  //align points
  g_stdAlignPoints.push_back(cv::Point2f(30.2946, 51.6963));
  g_stdAlignPoints.push_back(cv::Point2f(65.5318, 51.6963));
  g_stdAlignPoints.push_back(cv::Point2f(48.0252, 71.7366));
  g_stdAlignPoints.push_back(cv::Point2f(33.5493, 92.3655));
  g_stdAlignPoints.push_back(cv::Point2f(62.7299, 92.3655));

  g_stdPostPoints.push_back(cv::Point2f(48 + 30.2946, 56 + 51.6963));
  g_stdPostPoints.push_back(cv::Point2f(48 + 65.5318, 56 + 51.6963));
  g_stdPostPoints.push_back(cv::Point2f(48 + 48.0252, 56 + 71.7366));
  g_stdPostPoints.push_back(cv::Point2f(48 + 33.5493, 56 + 92.3655));
  g_stdPostPoints.push_back(cv::Point2f(48 + 62.7299, 56 + 92.3655));

  //holding threads
  pthread_t thd1, thd2, thd3;
  pthread_create(&thd1, NULL, thread_detec, NULL);
  pthread_create(&thd2, NULL, thread_track, NULL);
  pthread_create(&thd3, NULL, thread_post, NULL);
  pthread_detach(thd1);
  pthread_detach(thd2);
  pthread_detach(thd3);


  //httpserver
  webserver ws = create_webserver(port);
  std::cout << "Http Server is Running on: " << port << std::endl;

  recvhost_management rm;
  ws.register_resource("/management/recvhost", &rm);
  camera_management cm;
  ws.register_resource("/management/camera", &cm);

  ws.start(true);

  //destroy mutex
  pthread_mutex_destroy(&lock);
  return 0;
}

