#include "ros/ros.h"
#include "ros/package.h"

#include <std_msgs/String.h>
#include <std_msgs/Empty.h>
#include <geometry_msgs/PoseStamped.h>
#include <geometry_msgs/TwistStamped.h>
#include <geometry_msgs/Point.h>
#include <geometry_msgs/Pose.h>
#include <geometry_msgs/PoseWithCovarianceStamped.h>

#include <iostream>
#include <fstream>
#include <sstream>
#include <queue>
#include <opencv2/imgcodecs.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/video.hpp>
#include <opencv2/video/background_segm.hpp>
#include "opencv2/bgsegm.hpp"
#include <vector>
#include <opencv2/viz.hpp>
#include <opencv2/core/utility.hpp>
#include <opencv2/core.hpp>
#include <opencv2/calib3d.hpp>


#define PENGZHANG 1
#define THETA_X (28*3.141592653589/180)
#define THETA_Y 0
#define THETA_Z 3.141592653589

//#define CAP_INDEX 1
#define CAP_INDEX "/home/ljy/CLionProjects/VideoFiles/WIN_20190326_21_32_49_Pro.mp4"
//"/media/lijiayi/OS/Users/69067/Pictures/Camera Roll/WIN_20190326_21_32_49_Pro.mp4"

using namespace cv;
using namespace std;
using namespace ros;

ofstream ViconFile, CameraFile;
Time start_time;

class myImgPoint : public Point2f {
public:
    float angle_with_center;
    float length_to_next_point;

    myImgPoint(float x, float y) : Point2f(x, y) {}

    bool operator<(const myImgPoint &myImgPoint1) const {
        return angle_with_center < myImgPoint1.angle_with_center;
    }

    bool operator>(const myImgPoint &myImgPoint1) const {
        return angle_with_center > myImgPoint1.angle_with_center;
    }
};

float dist_btn_pts(Point2f p1, Point2f p2) {
    return sqrt(pow(p1.x - p2.x, 2) + pow(p1.y - p2.y, 2) * 1.0);
}

void ViconPosCallback(const geometry_msgs::PoseStamped::ConstPtr &msg) {
    Duration dur = Time::now() - start_time;
    ViconFile << dur.toSec() << "," << msg->pose.position.x << "," << msg->pose.position.y << ","
              << msg->pose.position.z << endl;
}

int main(int argc, char *argv[]) {
    init(argc, argv, "drone_pnp_node");
    NodeHandle n;
//    Subscriber pos_sub=n.subscribe("/mocap/pose",10,ViconPosCallback);
    Publisher pos_pub = n.advertise<geometry_msgs::PoseStamped>("/cam/pose", 10);
    Publisher vel_pub = n.advertise<geometry_msgs::TwistStamped>("/cam/vel", 10);
    Publisher pos_raw_pub = n.advertise<geometry_msgs::PoseStamped>("/cam_raw/pose", 10);
    Publisher vel_raw_pub = n.advertise<geometry_msgs::TwistStamped>("/cam_raw/vel", 10);

    Rate loop_rate(30);
    int count = 0;
    int count2 = 0;

    String algo = "MOG2";


    //相机参数
    Mat cameraMatrix;
    cameraMatrix = Mat::zeros(3, 3, CV_64F);
    const vector<double> distCoeffs{0.03819613038058793, -0.06112508916119349, -5.515361356058103e-05,
                                    0.003470678820019871, 0};

    cameraMatrix.at<double>(0, 0) = 411.1663471332097;
    cameraMatrix.at<double>(0, 2) = 331.9546551659722;
    cameraMatrix.at<double>(1, 1) = 411.4132376251969;
    cameraMatrix.at<double>(1, 2) = 268.2985302141446;
    cameraMatrix.at<double>(2, 2) = 1;

    //! [create]
    //create Background Subtractor objects
    Ptr<BackgroundSubtractor> pBackSub;
    if (algo == "MOG2")
        pBackSub = createBackgroundSubtractorMOG2(500, 400, false);
    else if (algo == "KNN")
        pBackSub = createBackgroundSubtractorKNN(300, 400, false);
    else if (algo == "MOG")
        pBackSub = bgsegm::createBackgroundSubtractorMOG(500, 5, 0.7, 0); //MOG approach
    //! [create]

    //! [capture]
    VideoCapture capture(CAP_INDEX);
    capture.set(CAP_PROP_AUTO_EXPOSURE, 0.25);
    capture.set(CAP_PROP_EXPOSURE, 0.000001);
    if (!capture.isOpened()) {
        //error in opening the video input
        cerr << "Unable to open: " << CAP_INDEX << endl;
        return 0;
    }
    //! [capture]

    //region 3D virtualization
    viz::Viz3d myWindow("Coordinate Frame");
    myWindow.showWidget("Coordinate Widget", viz::WCoordinateSystem());
    viz::WCube cube_widget1(Point3f(-0.0663, -0.03966, -0.01), Point3f(0.0663, 0, 0.01), false, viz::Color::blue());
    viz::WCube cube_widget2(Point3f(-0.03135, 0, -0.01), Point3f(0.03135, 0.06132, 0.01), false, viz::Color::blue());
    myWindow.showWidget("Cube Widget1", cube_widget1);
    myWindow.showWidget("Cube Widget2", cube_widget2);
    viz::WLine line_widget(Point3d(0, 0, 3), Point3d(0, 0, 0), viz::Color::red());
    myWindow.showWidget("line", line_widget);
    line_widget.setRenderingProperty(viz::LINE_WIDTH, 1);
    myWindow.spinOnce(1, true);

//endregion


    Mat frame, fgMask, frame_masked, binImg, hsvImg, img, valueImg, result, prevValueImg;
    img = Mat::zeros(480, 640, CV_8UC3);
    valueImg = Mat::zeros(480, 640, CV_8UC1);
    result = Mat::zeros(1000, 1000, CV_8UC1);

    std::string pkg_path = ros::package::getPath("drone_pnp");

    CameraFile.open(pkg_path + "/data/cameraresult.csv");

//    Time last_time, current_time;
//    current_time=Time::now();
//    Vec3f last_pos, current_pos={0,0,0};

    Vec3f unfiltered_pos;
    queue<Vec3f> sliding_win;
    Vec3f sum_pos(0, 0, 0);
    geometry_msgs::PoseStamped msg_pos, msg_pos_old, msg_pos_raw, msg_pos_raw_old;
    start_time = Time::now();

    while (ros::ok()) {
//        geometry_msgs::PoseStamped msg_pos;
        geometry_msgs::TwistStamped msg_vel;


        capture >> frame;
        if (frame.empty())
            break;
        undistort(frame, img, cameraMatrix, distCoeffs);
        imshow("undistorted", img);

        cvtColor(img, hsvImg, CV_BGR2HSV);

        for (int i = 0; i < img.rows; i++) {
            for (int j = 0; j < img.cols; j++) {
                const int hi = i * img.cols * 3 + j * 3,
                        gi = i * img.cols + j;
                valueImg.data[gi] = hsvImg.data[hi + 2];
            }
        }

        //! [apply]
        //update the background model
        pBackSub->apply(img, fgMask);
        //! [apply]
        frame_masked = Mat::zeros(frame.size(), frame.type());
        binImg = frame_masked.clone();
        valueImg.copyTo(frame_masked, fgMask);
        threshold(frame_masked, binImg, 200, 255, THRESH_BINARY);



        //! [show]
        //show the current frame and the fg masks
//        imshow("Frame", frame);
//        imshow("FG Mask", fgMask);
        imshow("frame_masked", frame_masked);
//        imshow("bin",binImg);
        //! [show]

        //region 膨胀
        Mat dilatedImg;
        if (PENGZHANG) {
            int structElementSize = 1;
            Mat structElement = getStructuringElement(MORPH_ELLIPSE,
                                                      Size(2 * structElementSize + 1, 2 * structElementSize + 1),
                                                      Point(structElementSize, structElementSize));
            dilate(binImg, dilatedImg, structElement);
        } else {
            dilatedImg = binImg.clone();
        }

//        imshow("Dilated", dilatedImg);
        //endregion

        //region 寻找轮廓
        vector<vector<Point>> contours, contours_org;

        vector<Vec4i> hierarchy;
        findContours(dilatedImg, contours_org, hierarchy, RETR_CCOMP, CHAIN_APPROX_NONE);
//        cout<< "["<< count <<"] contours_count: " << contours_org.size() << "  ";
//        double i1=atan2(-1,-1)/3.1415*180, i2=atan2(1,-1)/3.1415*180;
//        cout << i1 <<",,," << i2;


        //region find max_Area
        double maxArea;
        vector<Point> temp;
        for (auto i = contours_org.begin(); i != contours_org.end(); i++) {
            maxArea = contourArea(*i);
            for (auto j = i + 1; j != contours_org.end(); j++) {
                if (contourArea(*j) > maxArea) {
                    maxArea = contourArea(*j);
                    temp = *i;
                    (*i) = (*j);
                    (*j) = temp;
                }
            }
        }
        if (contours_org.size() >= 4)
            for (int i = 0; i < 4; i++) contours.push_back(contours_org[i]);
        //endregion


        Mat contoursImg = Mat::zeros(img.size(), CV_8UC1);
//          rectImg = depth_img.clone();

//            drawContours(contoursImg, contours, -1, Scalar(255), 3, 8, hierarchy);

//endregion


        vector<myImgPoint> imgPts;
        vector<Point2f> imgPts2;


        for (auto itContour = contours.begin(); itContour != contours.end(); itContour++) {
//            if(contourArea(*itContour)<40)
//            {
//                continue;
//            }

            Moments m;
            m = moments(*itContour, true);
            myImgPoint p = myImgPoint((m.m10 / m.m00), (m.m01 / m.m00));
            imgPts.push_back(p);

            for (auto itContourPt = (*itContour).begin(); itContourPt != (*itContour).end(); itContourPt++) {
                contoursImg.at<uchar>(Point((*itContourPt).x, (*itContourPt).y)) = 255;
            }
            contoursImg.at<uchar>(p) = 255;
        }

//        contoursImg.at<uchar>(Point2f(50,200))=255;
        imshow("contours", contoursImg);


        if (imgPts.size() == 4 && count >= 10) {
            Point2f centerP;
            imgPts2.clear();
            imgPts2.assign(4, myImgPoint(0, 0));
            centerP = Point2f(((imgPts[0].x + imgPts[1].x + imgPts[2].x + imgPts[3].x) / 4),
                              ((imgPts[0].y + imgPts[1].y + imgPts[2].y + imgPts[3].y) / 4));
            //与中心点形成的角度
            for (int i = 0; i < 4; i++) {
                imgPts[i].angle_with_center = atan2(imgPts[i].y - centerP.y, imgPts[i].x - centerP.x);
            }
            //sort according to angle with center Point
            sort(imgPts.begin(), imgPts.end());
            imgPts[0].length_to_next_point = dist_btn_pts(imgPts[0], imgPts[1]);
            imgPts[1].length_to_next_point = dist_btn_pts(imgPts[1], imgPts[2]);
            imgPts[2].length_to_next_point = dist_btn_pts(imgPts[2], imgPts[3]);
            imgPts[3].length_to_next_point = dist_btn_pts(imgPts[3], imgPts[0]);
            int Index_maxL = 0;
            for (int i = 1; i < 4; i++) {
                if (imgPts[i].length_to_next_point > imgPts[Index_maxL].length_to_next_point)
                    Index_maxL = i;
            }
            imgPts2[0] = imgPts[Index_maxL];
            imgPts2[1] = imgPts[(Index_maxL + 1) % 4];
            imgPts2[2] = imgPts[(Index_maxL + 2) % 4];
            imgPts2[3] = imgPts[(Index_maxL + 3) % 4];


            vector<Point3f> objectPts;
            //cracyflie
//            objectPts.emplace_back(-0.0663, -0.03966, 0);
//            objectPts.emplace_back(0.06526, -0.03966, 0);
//            objectPts.emplace_back(0.03135, 0.06132, 0);
//            objectPts.emplace_back(-0.03135, 0.06132, 0);

//            //miniflyold
//            objectPts.emplace_back(-0.072, -0.050, 0);
//            objectPts.emplace_back(0.072, -0.050, 0);
//            objectPts.emplace_back(0.036, 0.051, 0);
//            objectPts.emplace_back(-0.039, 0.048, 0);
            //minifly blue
            objectPts.emplace_back(-0.071, -0.054, 0);
            objectPts.emplace_back(0.071, -0.054, 0);
            objectPts.emplace_back(0.029, 0.054, 0);
            objectPts.emplace_back(-0.029, 0.053, 0);


            Mat rvec, tvec;

            solvePnP(objectPts, imgPts2, cameraMatrix, distCoeffs, rvec, tvec);


            Point ppp;
            ppp = Point(int(tvec.at<double>(0, 0) * 50 + 300), int(tvec.at<double>(0, 1) * 50 + 300));
            if (ppp.x >= 0 && ppp.x <= result.size().width && ppp.y >= 0 && ppp.y <= result.size().height)
                result.at<uchar>(ppp) = 255;

//            imshow("results", result);

            Vec3d rvec1 = Vec3d(rvec.at<double>(0, 0), rvec.at<double>(1, 0), rvec.at<double>(2, 0));
            Vec3d tvec1 = Vec3d(tvec.at<double>(0, 0), tvec.at<double>(1, 0), tvec.at<double>(2, 0));
            Affine3f pose(rvec1, tvec1);

            msg_pos_raw.pose.position.x = tvec1[0];
            msg_pos_raw.pose.position.y = tvec1[1];
            msg_pos_raw.pose.position.z = tvec1[2];
            pos_raw_pub.publish(msg_pos_raw);

//            //相机坐标系修正
////            1.00412352941177	0.614449098039216	0.299829411764706
////            -0.523598775598299	-0.0349065850398866	1.53588974175501
//            Matx33f R_x(1, 0, 0,
//                        0, cos(THETA_X), -sin(THETA_X),
//                        0, sin(THETA_X), cos(THETA_X));
//            Matx33f R_y(cos(THETA_Y), 0, sin(THETA_Y),
//                        0, 1, 0,
//                        -sin(THETA_Y), 0, cos(THETA_Y));
//            Matx33f R_z(cos(THETA_Z), -sin(THETA_Z), 0,
//                        sin(THETA_Z), cos(THETA_Z), 0,
//                        0, 0, 1);
//            double ag_rp = 3.141592653589/2;
//            Matx33f R_repair(cos(ag_rp), -sin(ag_rp), 0,
//                        sin(ag_rp), cos(ag_rp), 0,
//                        0, 0, 1);
//            Matx33f R = R_z * R_x * R_repair;
//            Vec3f tvec_cam = {0, -1, 0.3};
//            Affine3f pose_cam(R, tvec_cam);
//            Affine3f ppposee = pose.concatenate(pose_cam.inv());


            //read transformation matrix from file.
            Matx44d trans_cam;
            ifstream mtx;
            mtx.open(pkg_path + "/data/transformation_matrix.txt");
            if (mtx.is_open()) {
                mtx >> trans_cam.val[0] >> trans_cam.val[1] >> trans_cam.val[2] >> trans_cam.val[3]
                    >> trans_cam.val[4] >> trans_cam.val[5] >> trans_cam.val[6] >> trans_cam.val[7]
                    >> trans_cam.val[8] >> trans_cam.val[9] >> trans_cam.val[10] >> trans_cam.val[11]
                    >> trans_cam.val[12] >> trans_cam.val[13] >> trans_cam.val[14] >> trans_cam.val[15];
                mtx.close();
            } else
                ROS_ERROR("Can't read matrix file.");
            Affine3d pose_cam(trans_cam);
            cout << pose_cam.matrix;
            Affine3f ppposee = pose.concatenate(pose_cam);


            unfiltered_pos = ppposee.translation();
            sliding_win.push(unfiltered_pos);
            sum_pos[0] += unfiltered_pos[0];
            sum_pos[1] += unfiltered_pos[1];
            sum_pos[2] += unfiltered_pos[2];


            if (sliding_win.size() >= 3) {
                count2++;
                Vec3f t_pos;
                t_pos = sliding_win.front();
                sliding_win.pop();
                sum_pos[0] -= t_pos[0];
                sum_pos[1] -= t_pos[1];
                sum_pos[2] -= t_pos[2];

                msg_pos_old = msg_pos;
                Vec3f current_pos(sum_pos[0] / sliding_win.size(), sum_pos[1] / sliding_win.size(),
                                  sum_pos[2] / sliding_win.size());
                Affine3f ppposee2(ppposee.rotation(), current_pos);
                myWindow.setWidgetPose("Cube Widget1", ppposee2);
                myWindow.setWidgetPose("Cube Widget2", ppposee2);


//            last_time=current_time;
//            current_time=Time::now();
//            Duration dur= current_time-last_time;
//            last_pos=current_pos;



                msg_pos.pose.position.x = current_pos[0];
                msg_pos.pose.position.y = current_pos[1];
                msg_pos.pose.position.z = current_pos[2];

                msg_pos.header.stamp = Time::now();
                msg_vel.header.stamp = Time::now();

                cout << msg_pos.pose.position << endl;

                Duration dur = msg_pos.header.stamp - msg_pos_old.header.stamp;
                msg_vel.twist.linear.x = (current_pos[0] - msg_pos_old.pose.position.x) / dur.toSec();
                msg_vel.twist.linear.y = (current_pos[1] - msg_pos_old.pose.position.y) / dur.toSec();
                msg_vel.twist.linear.z = (current_pos[2] - msg_pos_old.pose.position.z) / dur.toSec();

//                cout << "Vel: " << msg_vel.twist.linear <<endl;
                Duration time_from_start = msg_pos.header.stamp - start_time;

                CameraFile << time_from_start.toSec() << "," << msg_pos.pose.position.x << ","
                           << msg_pos.pose.position.y << "," << msg_pos.pose.position.z;
                CameraFile << "," << msg_vel.twist.linear.x << "," << msg_vel.twist.linear.y << ","
                           << msg_vel.twist.linear.z << endl;

//                if(count2==2){
                pos_pub.publish(msg_pos);
                vel_pub.publish(msg_vel);
//                    count2=0;
//                }

            }

        } else {
            pos_pub.publish(msg_pos);
            msg_vel.twist.linear.x = 0;
            msg_vel.twist.linear.y = 0;
            msg_vel.twist.linear.z = 0;
            vel_pub.publish(msg_vel);
        }

        waitKey(1);



//        std_msgs::String msg;
//        std::stringstream ss;
//        ss << "hello world " << count;
//        msg.data = ss.str();
//        ROS_INFO("%s", msg.data.c_str());
//        chatter_pub.publish(msg);

        myWindow.spinOnce(1, true);

        spinOnce();
        ++count;

        loop_rate.sleep();
    }

    CameraFile.close();
    return 0;


}
