//
// Created by ZhaoXiaoFei on 2022/6/20.
//

#include <string>
#include <vector>
#include <ros/ros.h>
#include <glog/logging.h>
#include <sensor_msgs/Image.h>
#include <sensor_msgs/PointCloud.h>
#include <std_msgs/Bool.h>
#include <opencv2/opencv.hpp>
#include <opencv2/highgui.hpp>
#include <cv_bridge/cv_bridge.h>
#include "camodocal/camera_models/CameraFactory.h"

ros::Publisher feature_image_pub, pub_match, pub_restart;

cv::Mat curImg, preImg, tempImg;
std::vector<cv::Point2f> nPoints;
std::vector<cv::Point2f> prePoints, curPoints, tempPoints;
std::vector<cv::Point2f> UndistortedPrePoints, UndistortedCurPoints;
std::map<int, cv::Point2f> UndistortedPrePointsMap, UndistortedCurPointsMap;
std::vector<cv::Point2f> pointsVel;
std::vector<int> ids, track_cnts;
cv::Mat mask;
double curTime, preTime;
static int id_index = 0;

bool first_image_flag = true;
double first_image_time;
double last_image_time = 0;
int pub_count = 1;
bool PUB_THIS_FRAME = false;
int init_pub = 0;

camodocal::CameraPtr camera;
int ROW, COL;
int FOCAL_LENGTH = 460;


template<class T>
void reduceVector(std::vector<T> &vec, std::vector<unsigned char> &status){
    int j = 0;
    for(int i = 0; i < vec.size(); i++){
        if(status[i]){
            vec[j++] = vec[i];
        }
    }
    vec.resize(j);
}

void setMask()
{
    mask = cv::Mat(ROW, COL, CV_8UC1, cv::Scalar(255));


    // prefer to keep features that are tracked for long time
    std::vector<std::pair<int, std::pair<cv::Point2f, int>>> cnt_pts_id;

    for (unsigned int i = 0; i < tempPoints.size(); i++)
        cnt_pts_id.push_back(make_pair(track_cnts[i], std::make_pair(tempPoints[i], ids[i])));

    sort(cnt_pts_id.begin(), cnt_pts_id.end(), [](const std::pair<int, std::pair<cv::Point2f, int>> &a, const std::pair<int, std::pair<cv::Point2f, int>> &b)
    {
        return a.first > b.first;
    });

    tempPoints.clear();
    ids.clear();
    track_cnts.clear();

    for (auto &it : cnt_pts_id)
    {
        if (mask.at<uchar>(it.second.first) == 255)
        {
            tempPoints.push_back(it.second.first);
            ids.push_back(it.second.second);
            track_cnts.push_back(it.first);
            cv::circle(mask, it.second.first, 30, 0, -1);
        }
    }
}

bool inBorder(const cv::Point2f &pt)
{
    const int BORDER_SIZE = 1;
    int img_x = cvRound(pt.x);
    int img_y = cvRound(pt.y);
    return BORDER_SIZE <= img_x && img_x < COL - BORDER_SIZE && BORDER_SIZE <= img_y && img_y < ROW - BORDER_SIZE;
}

void showUndistortion(const std::string &name)
{
    cv::Mat undistoredImg(ROW + 600, COL + 600, CV_8UC1, cv::Scalar(0));
    std::vector<Eigen::Vector2d> distortedVec, undistortedVec;
    for(int u = 0; u < COL; u++){
        for(int v = 0; v < ROW; v++){
            Eigen::Vector2d a(u, v);
            Eigen::Vector3d b;
            camera->liftProjective(a, b);
            distortedVec.push_back(a);
            undistortedVec.push_back(Eigen::Vector2d(b.x()/b.z(), b.y()/b.z()));
        }
    }

    for(int i = 0; i < undistortedVec.size(); i++){
        Eigen::Vector2f p;
        p.x() = undistortedVec[i].x() * FOCAL_LENGTH + COL / 2.0;
        p.y() = undistortedVec[i].y() * FOCAL_LENGTH + ROW / 2.0;
        if(p.x() + 300 >= 0 && p.x() + 300 < COL + 600 && p.y() + 300 >= 0 && p.y() + 300 < ROW + 600){
            undistoredImg.at<uchar>(p.y() + 300, p.x() + 300) = curImg.at<uchar>(distortedVec[i].y(), distortedVec[i].x());
        }
    }
    cv::imshow(name, undistoredImg);
    cv::waitKey(3);
}


void image_callback(const sensor_msgs::ImageConstPtr& image){
//    std::cout << "PUB_THIS_FRAME: " << PUB_THIS_FRAME << std::endl;
    if(first_image_flag){
        first_image_flag = false;
        first_image_time = image->header.stamp.toSec();
        last_image_time = image->header.stamp.toSec();
        return;
    }
    if(image->header.stamp.toSec() - last_image_time > 1.0 || image->header.stamp.toSec() < last_image_time){
        LOG(INFO) << "image discontinue, RESET!!!" << std::endl;
        first_image_flag = true;
        first_image_time = 0;
        last_image_time = 0;
        pub_count = 1;
        std_msgs::Bool restart_flag;
        restart_flag.data = true;
        pub_restart.publish(restart_flag);
        return;
    }
    last_image_time = image->header.stamp.toSec();

    if(std::round(1.0 * pub_count / (image->header.stamp.toSec() - first_image_time)) <= 10){
        PUB_THIS_FRAME = true;
//        std::cout << "222222222: " << std::endl;
        if(std::abs(1.0 * pub_count / (image->header.stamp.toSec() - first_image_time) - 10) < 10 * 0.01){
//            std::cout << "333333333: " << std::endl;
            first_image_time = image->header.stamp.toSec();
            pub_count = 0;
        }
    }
    else{
//        std::cout << "111111111: " << std::endl;
        PUB_THIS_FRAME = false;
    }

    cv_bridge::CvImageConstPtr ptr;
    if (image->encoding == "8UC1")
    {
        sensor_msgs::Image img;
        img.header = image->header;
        img.height = image->height;
        img.width = image->width;
        img.is_bigendian = image->is_bigendian;
        img.step = image->step;
        img.data = image->data;
        img.encoding = "mono8";
        ptr = cv_bridge::toCvCopy(img, sensor_msgs::image_encodings::MONO8);
    }
    else
        ptr = cv_bridge::toCvCopy(image, sensor_msgs::image_encodings::MONO8);



    curTime = image->header.stamp.toSec();

    cv::Mat img;
    cv::Ptr<cv::CLAHE> clahe = cv::createCLAHE(3.0, cv::Size(8, 8));
    clahe->apply(ptr->image.rowRange(0, ROW), img);

    if(tempImg.empty()){
        preImg = curImg = tempImg = img;
    }
    else{
        tempImg = img;
    }
    tempPoints.clear();
    //showUndistortion("jibian");
    if(curPoints.size() > 0){
        std::vector<unsigned char> status;
        std::vector<float> err;
        cv::calcOpticalFlowPyrLK(curImg, tempImg, curPoints, tempPoints, status,  err, cv::Size(21, 21), 3);
        for(int i = 0; i < tempPoints.size(); i++){
            if(status[i] && !inBorder(tempPoints[i])){
                status[i] = 0;
            }
        }
        reduceVector(prePoints, status);
        reduceVector(curPoints, status);
        reduceVector(tempPoints, status);
        reduceVector(ids, status);
        reduceVector(UndistortedCurPoints, status);
        reduceVector(track_cnts, status);
    }

    for(auto &n : track_cnts){
        n++;
    }

    if(PUB_THIS_FRAME){

        if(tempPoints.size() >= 8){
            std::vector<cv::Point2f> undis_curPoints(curPoints.size()), undis_tempPoints(tempPoints.size());
            for(int i = 0; i < curPoints.size(); i++){
                Eigen::Vector3d tempP;
                camera->liftProjective(Eigen::Vector2d(curPoints[i].x, curPoints[i].y), tempP);
                tempP.x() = FOCAL_LENGTH * tempP.x() / tempP.z()  + COL / 2.0;
                tempP.y() = FOCAL_LENGTH * tempP.y() / tempP.z()  + ROW / 2.0;
                undis_curPoints[i] = cv::Point2f(tempP.x(), tempP.y());


                camera->liftProjective(Eigen::Vector2d(tempPoints[i].x, tempPoints[i].y), tempP);
                tempP.x() = FOCAL_LENGTH * tempP.x() / tempP.z()  + COL / 2.0;
                tempP.y() = FOCAL_LENGTH * tempP.y() / tempP.z()  + ROW / 2.0;
                undis_tempPoints[i] = cv::Point2f(tempP.x(), tempP.y());
            }

            std::vector<unsigned char> status;
            cv::findFundamentalMat(undis_curPoints, undis_tempPoints,  cv::FM_RANSAC, 1.0, 0.99, status);
            reduceVector(prePoints, status);
            reduceVector(curPoints, status);
            reduceVector(tempPoints, status);
            reduceVector(UndistortedCurPoints, status);
            reduceVector(ids, status);
            reduceVector(track_cnts, status);
//            LOG(INFO) << "track_cnt.size: " << track_cnts.size() << std::endl;
        }

        mask = cv::Mat(ROW, COL, CV_8UC1, cv::Scalar(255));
        std::vector<std::pair<int, std::pair<cv::Point2f, int>>> cnts_ids_points;
        for(int i = 0; i < tempPoints.size(); i++){
            cnts_ids_points.push_back(std::make_pair(track_cnts[i], std::make_pair(tempPoints[i], ids[i])));
        }
        std::sort(cnts_ids_points.begin(), cnts_ids_points.end(),
                  [](std::pair<int, std::pair<cv::Point2f, int>>& a, std::pair<int, std::pair<cv::Point2f, int>>& b){
                      return a.first > b.first;
                  });
        tempPoints.clear();
        track_cnts.clear();
        ids.clear();
        for(auto &it : cnts_ids_points){
            if(mask.at<uchar>(it.second.first) == 255){
                track_cnts.push_back(it.first);
                ids.push_back(it.second.second);
                tempPoints.push_back(it.second.first);
                cv::circle(mask, it.second.first, 30, 0, -1);
            }
        }
//        LOG(INFO) << "track_cnt.size: " << track_cnts.size() << std::endl;

        int num = 150 - tempPoints.size();
        if(num > 0){
            cv::goodFeaturesToTrack(tempImg, nPoints, 150 - tempPoints.size(), 0.01, 30, mask);
        }else{
            nPoints.clear();
        }

        //ptr = cv_bridge::cvtColor(ptr, sensor_msgs::image_encodings::RGB8);

        for(auto& point : nPoints){
            tempPoints.push_back(point);
            ids.push_back(-1);
            track_cnts.push_back(1);
        }
    }

//    for(auto point : curPoints){
//        cv::circle(ptr->image, point, 2, cv::Scalar(0,0,255), 2);
//    }


//    cv::imshow("FeatureImg: ", ptr->image);
//    cv::waitKey(5);
    // feature_image_pub.publish(ptr->toImageMsg());

    preImg = curImg;
    prePoints = curPoints;
    UndistortedPrePoints = UndistortedCurPoints;
    curImg = tempImg;
    curPoints = tempPoints;

    //undistortedPoints
    UndistortedCurPoints.clear();
    UndistortedCurPointsMap.clear();
    for(int i = 0; i < curPoints.size(); i++){
        Eigen::Vector2d a(curPoints[i].x, curPoints[i].y);
        Eigen::Vector3d b;
        camera->liftProjective(a, b);
        UndistortedCurPoints.push_back(cv::Point2f(b.x()/b.z(), b.y()/b.z()));
        UndistortedCurPointsMap.insert(std::make_pair(ids[i], cv::Point2f(b.x()/b.z(), b.y()/b.z())));
    }
    if(!UndistortedPrePointsMap.empty()){
        double dt = curTime - preTime;
        pointsVel.clear();
        for(int i = 0; i < UndistortedCurPoints.size(); i++){
            if(ids[i] != -1){
                auto it = UndistortedPrePointsMap.find(ids[i]);
                if(it != UndistortedPrePointsMap.end()){
                    double vx = (UndistortedCurPoints[i].x - it->second.x) / dt;
                    double vy = (UndistortedCurPoints[i].y - it->second.y) / dt;
                    pointsVel.push_back(cv::Point2f(vx, vy));
                }
                else{
                    pointsVel.push_back(cv::Point2f(0, 0));
                }
            }
            else{
                pointsVel.push_back(cv::Point2f(0, 0));
            }
        }
    }
    else{
        for(int i = 0; i < curPoints.size(); i++){
            pointsVel.push_back(cv::Point2f(0, 0));
        }
    }

    UndistortedPrePointsMap = UndistortedCurPointsMap;
    preTime = curTime;

    //update id;
    for(int i = 0; ; i++){
        if(i < ids.size()){
            if(ids[i] == -1){
                ids[i] = id_index++;
            }
        }
        else{
            break;
        }
    }

    if(PUB_THIS_FRAME){ //pub this frame
        pub_count++;
        sensor_msgs::PointCloud feature_points;
        sensor_msgs::ChannelFloat32 id_of_point;
        sensor_msgs::ChannelFloat32 u_of_point;
        sensor_msgs::ChannelFloat32 v_of_point;
        sensor_msgs::ChannelFloat32 velocity_x_of_point;
        sensor_msgs::ChannelFloat32 velocity_y_of_point;

        feature_points.header = image->header;
        feature_points.header.frame_id = "world";

        for(int i = 0; i < ids.size(); i++){
            if(track_cnts[i] > 1){
                geometry_msgs::Point32 p;
                p.x = UndistortedCurPoints[i].x;
                p.y = UndistortedCurPoints[i].y;
                p.z = 1;
//                LOG(INFO) << "p.x: " << p.x << std::endl;
//                LOG(INFO) << "p.y: " << p.y << std::endl;
                feature_points.points.push_back(p);
                id_of_point.values.push_back(ids[i]);
                u_of_point.values.push_back(curPoints[i].x);
                v_of_point.values.push_back(curPoints[i].y);
                velocity_x_of_point.values.push_back(pointsVel[i].x);
                velocity_x_of_point.values.push_back(pointsVel[i].y);
//                LOG(INFO) << "pointsVel[i].x: " << pointsVel[i].x << std::endl;
//                LOG(INFO) << "pointsVel[i].y: " << pointsVel[i].y << std::endl;
            }
//            else{
//                LOG(INFO) << "i: " << i << " track_cnts[i]: " << track_cnts[i] << std::endl;
//            }
        }
        feature_points.channels.push_back(id_of_point);
        feature_points.channels.push_back(u_of_point);
        feature_points.channels.push_back(v_of_point);
        feature_points.channels.push_back(velocity_x_of_point);
        feature_points.channels.push_back(velocity_x_of_point);
        if(!init_pub){
            init_pub = 1;
        }else{
            LOG(INFO) << "feature_points.points.size() " << feature_points.points.size() << std::endl;
//            for(int i = 0; i < feature_points.points.size(); i++){
//                LOG(INFO) << i << "  |  " << feature_points.points[i].x << "  |  " << feature_points.points[i].y << std::endl;
//            }
            pub_match.publish(feature_points);
        }
    }
}

int main(int argc, char* argv[]){

    FLAGS_log_dir = "/home/ubuntu/Log/front";
    google::InitGoogleLogging("vins_log");
    FLAGS_alsologtostderr = true;


    ros::init(argc, argv, "feature_track");
    ros::NodeHandle n("~");
    std::string file = "/home/ubuntu/WorkSpace/VSlam/catkin_test/src/config/config.yaml";
    camera = camodocal::CameraFactory::instance()->generateCameraFromYamlFile(file);
    cv::FileStorage fs(file, cv::FileStorage::READ);
    ROW = fs["image_height"];
    COL = fs["image_width"];

    ros::Subscriber image_sub = n.subscribe("/cam0/image_raw", 100, image_callback);
    feature_image_pub = n.advertise<sensor_msgs::Image>("feature_image", 1000);
    pub_match = n.advertise<sensor_msgs::PointCloud>("feature", 1000);
    pub_restart = n.advertise<std_msgs::Bool>("restart", 1000);
    ros::spin();

    return 0;
}

