#include <iostream>
#include <sstream>
#include <opencv2/opencv.hpp>
#include <chrono>
#include <string>

#include "tracking/include/TrackingAlgorithm.h"
#include "yolov5/detection.h"
#include "FastRT/include/fastrt/reid_interface.h"
#include "OsnetReid/osnetreid/osnet_proc.h"
#include "util.h"

using namespace std;

struct Object{
    bool match;
    bool arm_alert;
    int id;
    int arm_direction;
    int frame_num;
    float speed;
    cv::Rect2f box;
};
/*
 * 三种状态，左臂标志，右臂标志，无标志
 * 如左臂匹配状态为1，如果同时匹配右臂状态也为1
 * 如果状态为1，右臂匹配状态为2，如果状态为0，右臂匹配无效
 * */

class OjectSpeed
{
public:
    int getSpeed(const std::vector<std::vector<float>> &objects, std::vector<Object> &preframe_object);

private:
    Object curframe_object;
};

int OjectSpeed::getSpeed(const std::vector<std::vector<float>> &objects, std::vector<Object> &preframe_object)
{
    const int preframe_object_num = preframe_object.size();
    for (int i = 0; i < objects.size(); i++)
    {
        const int current_object_id = objects[i][0];

        curframe_object.match = false;
        curframe_object.arm_alert = false;
        curframe_object.id = current_object_id;
        curframe_object.arm_direction = int(objects[i][5]);//0:'arm_left' 1:'arm_right'
        curframe_object.frame_num = 0;
        curframe_object.speed = 0;
        curframe_object.box = cv::Rect2f(cv::Point2f(objects[i][1], objects[i][2]),
                            cv::Point2f(objects[i][3], objects[i][4]));

        for (int j = 0; j < preframe_object_num; j++)
        {
            if (current_object_id == preframe_object[j].id)
            {
                curframe_object.match = true;

                cv::Point2f curCenter((objects[i][3] - objects[i][1]) / 2.0f + objects[i][1],
                                      (objects[i][4] - objects[i][2]) / 2.0f + objects[i][2]);
                cv::Point2f preCenter(preframe_object[j].box.x + preframe_object[j].box.width / 2.0f,
                                      preframe_object[j].box.y + preframe_object[j].box.height / 2.0f
                                      );
                float speed = sqrtf((curCenter.x - preCenter.x) * (curCenter.x - preCenter.x)
                                    + (curCenter.y - preCenter.y) * (curCenter.y - preCenter.y));

                if ((0 == curframe_object.arm_direction && 1 == preframe_object[j].arm_direction) ||
                    (1 == curframe_object.arm_direction && 0 == preframe_object[j].arm_direction)) {
                    curframe_object.arm_alert = true;
                }
                preframe_object[j].match = true;
                preframe_object[j].id = current_object_id;
                preframe_object[j].arm_direction = curframe_object.arm_direction;
                preframe_object[j].speed = speed;
                preframe_object[j].frame_num += 1;
                preframe_object[j].box = cv::Rect2f(cv::Point2f(objects[i][1], objects[i][2]),
                                                    cv::Point2f(objects[i][3], objects[i][4]));
            }

            if (objects.size() - 1 == i)
            {
                if(!preframe_object[j].match)//如果当前帧没有目标与上一帧第j个目标匹配，目标丢失，则删除该目标
                {
                    preframe_object.erase(preframe_object.begin() + j);
                }
                else
                {
                    preframe_object[j].match = false;
                }
            }
        }

        if (!curframe_object.match)//当前帧目标没有与上一帧匹配, 为新出现目标
        {
            preframe_object.push_back(curframe_object);
        }
    }

    return 0;
}

int main(int argc, const char *argv[]) {
    std::string test_video_name = "test";
    if (argc < 3){
        std::cout << "please input video path " << std::endl;
    }
    auto input_path = "/home/ssy/Downloads/hand-tracking/"+ test_video_name + ".mp4"; //argv[1];// + test_video_name + ".mp4";
    auto scale_factor = 1;//argc == 3 ? stoi(argv[2]) : 1;
    cv::VideoCapture cap(input_path);
    double fps = cap.get(cv::CAP_PROP_FPS);

    if (!cap.isOpened()) {
        throw runtime_error("Cannot open cv::VideoCapture");
    }

    array<int64_t, 2> orig_dim{int64_t(cap.get(cv::CAP_PROP_FRAME_HEIGHT)), int64_t(cap.get(cv::CAP_PROP_FRAME_WIDTH))};
    array<int64_t, 2> inp_dim;
    for (size_t i = 0; i < 2; ++i) {
        auto factor = 1 << 5;
        inp_dim[i] = (orig_dim[i] / 1 / factor + 1) * factor;
    }

    const int device_id = 0;
    const int max_batch_size = 2;
    // const std::string& weight_path
    ObjectDetect *detector = new ObjectDetect(device_id, DET_TYPE(0), 0, false, false);
    // int device_id, int max_batch_size, cv::Size size, bool alway_serialize=false
//    FastREID *extractor = new FastREID(device_id, max_batch_size, cv::Size(128, 384), false);
    OsnetREID *extractor = new OsnetREID(device_id, max_batch_size, 0, cv::Size(128, 256), false, false, true);

    // detector->init("/home/ssy/Downloads/deep-sort/weights/yolov5m.wts");
    detector->init("/home/ssy/Downloads/deep-sort/weights/yolov5m_PedestrianDetection.wts");
    // extractor->init("/home/ssy/Downloads/deep-sort/weights/sbs_R50-ibn.wts");
    // extractor->init(argv[3]);
    // extractor->init("/home/ssy/Downloads/deep-sort/weights/osnet_x1_0.onnx");
    extractor->init("/home/ssy/Downloads/deep-sort/weights/osnet_x1_0.onnx");

    //TrackingAlgorithm(int width, int height, void *detector, void *extractor)
    TrackingAlgorithm *tracking = new TrackingAlgorithm(int(cap.get(cv::CAP_PROP_FRAME_WIDTH)), int(cap.get(cv::CAP_PROP_FRAME_HEIGHT)), \
                        (NetworkDetect *)detector, (NetworkFeatExtract *)extractor);

    cv::VideoWriter writer;
    writer.open("/home/ssy/Downloads/hand-tracking/" +test_video_name + "_osnet.avi", cv::VideoWriter::fourcc('M', 'J', 'P', 'G'), fps, \
                cv::Size(int(cap.get(cv::CAP_PROP_FRAME_WIDTH)), int(cap.get(cv::CAP_PROP_FRAME_HEIGHT))), true);

    auto image = cv::Mat();
    while (1) {
        bool flag = cap.read(image);
        if (!flag)
        {
            break;
        }
        auto frame_processed = static_cast<uint32_t>(cap.get(cv::CAP_PROP_POS_FRAMES)) - 1;
//        if (frame_processed >= 500){
//            std::cout << "test " << std::endl;
//        }
        std::string image_name = std::to_string(frame_processed);
        for (int id = 0; image_name.size() < 8; id++) {
            image_name = "0" + image_name;
        }

//        cv::imwrite(image_name + ".png", image);

        auto start = chrono::steady_clock::now();
//        if (frame_processed > 500){
//            std::cout << "test " << std::endl;
//        }
        std::vector <std::vector<Detection>> pedestrianbox;
        std::vector<Detection> trks = tracking->update(image, pedestrianbox);

        stringstream str;
        str << "Frame: " << frame_processed << "/" << cap.get(cv::CAP_PROP_FRAME_COUNT) << ", "
            << "FPS: " << fixed << setprecision(2)
            << 1000.0 / chrono::duration_cast<chrono::milliseconds>(chrono::steady_clock::now() - start).count();
        draw_text(image, str.str(), {0, 0, 0}, {image.cols, 0}, true);

        std::cout << "Frame: " << frame_processed << "/" << cap.get(cv::CAP_PROP_FRAME_COUNT) << ", "
            << "FPS: " << fixed << setprecision(2)
            << 1000.0 / chrono::duration_cast<chrono::milliseconds>(chrono::steady_clock::now() - start).count()
            << std::endl;

        for (auto &t:trks) {
            draw_bbox(image, t.box, to_string(t.tracking_id), color_map(t.tracking_id));
            std::cout << "person state is: " << t.state << std::endl;
        }

        cv::imwrite(image_name + ".png", image);
        writer.write(image);
    }

    delete detector;
    delete tracking;
    delete extractor;
}
