#include <ros/ros.h>
#include <ros/package.h>
#include <vector>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include <stdlib.h>
#include <pthread.h>
#include <string>
#include <opencv2/opencv.hpp>

#include <geometry_msgs/PoseStamped.h>
#include <mavros_msgs/CommandBool.h>
#include <mavros_msgs/SetMode.h>
#include <mavros_msgs/State.h>
#include <tf/tf.h>


ros::Publisher pose_pub;
ros::Publisher pose_raw_pub;

class PointC
{
  public: 
    int number;
    float x;
    float y;
  public:
    PointC(int a, float xPos, float yPos)
    {
        number = a;
        x = xPos;
        y = yPos;
    }
};


class PointB
{
  public: 
    float x;
    float y;
  public:
    PointB(float xPos, float yPos)
    {
        x = xPos;
        y = yPos;
    }
};


float Conv(PointC p1, PointC p2, PointC p3, PointC p4)
{
    PointB p12 = PointB(p2.x - p1.x, p2.y - p1.y);
    PointB p31 = PointB(p1.x - p3.x, p1.y - p3.y);
    PointB p41 = PointB(p1.x - p4.x, p1.y - p4.y);

    if ((p12.x * p31.y - p12.y * p31.x) >= 0 && (p12.x * p41.y - p12.y * p41.x) >= 0)
        return 2;
    else if ((p12.x * p31.y - p12.y * p31.x) * (p12.x * p41.y - p12.y * p41.x) <= 0)
        return 3;
    else if ((p12.x * p31.y - p12.y * p31.x) <= 0 && (p12.x * p41.y - p12.y * p41.x) <= 0)
        return 4;
}

bool calculatePose(cv::Mat& image)
{
    ///intrinct parameter of camera
    static cv::Mat cameraMatrix;
    static cv::Mat rvec, tvec;
    cameraMatrix = cv::Mat::zeros(3, 3, CV_64F);
    static const std::vector<double> distcoeffs{-0.0622484869016584, 0.116141620134878, 0.0, 0.0, 0.0};
    cameraMatrix.at<double>(0, 0) = 1684.659238683180;
    cameraMatrix.at<double>(0, 2) = 656.480920681900;
    cameraMatrix.at<double>(1, 1) = 1684.163794939990;
    cameraMatrix.at<double>(1, 2) = 535.952107137916;
    cameraMatrix.at<double>(2, 2) = 1;

    cv::Mat binImg(image.size(), CV_8UC1);
    // cv::adaptiveThreshold(image, binImg, 255, cv::ADAPTIVE_THRESH_GAUSSIAN_C, cv::THRESH_BINARY, 101, 20);
    cv::threshold(image, binImg, 100, 255, cv::THRESH_TOZERO);
    cv::threshold(binImg, binImg, 250, 255, cv::THRESH_BINARY + cv::THRESH_OTSU);

    std::vector<std::vector<cv::Point>> contours, contours_org;
    std::vector<cv::Vec4i> hierarchy;

    cv::findContours(binImg, contours_org, hierarchy, cv::RETR_TREE, cv::CHAIN_APPROX_SIMPLE);
    cv::Mat contoursImg = cv::Mat::zeros(binImg.size(), CV_8UC3);
    cv::drawContours(contoursImg, contours_org, -1, cv::Scalar(0, 0, 255), 1, 8, hierarchy);

    // there are two different hierarchy trees.
    std::vector <int> Cont;
    for (int i = 0; i < contours_org.size(); i++)
    {
        if (hierarchy[i][0] != -1 && hierarchy[i][1] == -1 && hierarchy[i][2] != -1)
        {
            if (hierarchy[i][3] != -1)
            {
                int pre_contour0 = hierarchy[i][3];
                int pre_contour1 = i;
                int pre_contour2 = hierarchy[pre_contour1][2];
                int pre_contour3 = hierarchy[pre_contour1][0];

                if (hierarchy[pre_contour3][0] == -1 && hierarchy[pre_contour3][2] != -1)
                {
                    int pre_contour4 = hierarchy[pre_contour3][2];

                    if (hierarchy[pre_contour4][2] == -1 && hierarchy[pre_contour4][0] != -1)
                    {
                        int pre_contour5 = hierarchy[pre_contour4][0];

                        if (hierarchy[pre_contour5][2] == -1 && hierarchy[pre_contour5][0] != -1)
                        {
                            int pre_contour6 = hierarchy[pre_contour5][0];

                            if (hierarchy[pre_contour6][0] == -1 && hierarchy[pre_contour6][2] == -1)
                            { 
                                Cont = {pre_contour0, pre_contour1, pre_contour2, pre_contour3, pre_contour4, pre_contour5, pre_contour6};
                                }
                        }
                    }

                    else if (hierarchy[pre_contour2][0] != -1 && hierarchy[pre_contour2][2] == -1)
                    {
                        pre_contour4 = hierarchy[pre_contour2][0];

                        if (hierarchy[pre_contour4][0] != -1 && hierarchy[pre_contour4][2] == -1)
                        {
                            int pre_contour5 = hierarchy[pre_contour4][0];

                            if (hierarchy[pre_contour5][0] == -1 && hierarchy[pre_contour5][2] == -1)
                            {
                                if (hierarchy[pre_contour3][2] != -1 && hierarchy[pre_contour3][0] == -1)
                                { 
                                    int pre_contour6 = hierarchy[pre_contour3][2];
                                    Cont = {pre_contour0, pre_contour3, pre_contour6, pre_contour1, pre_contour2, pre_contour4, pre_contour5};
                                }
                            }
                        }
                    }
                } 
            }
        } 
    }                

    // the type of vector needs the length of data initially. and the .size() function reflects the factual size of the array.   
    // make sure the position of these feature points
    int N = 2;
    if (Cont.size() != 0)
    {
        std::vector<PointC> myPoints;
        for (int i = 0; i < 7; i++)
        {
            cv::Moments m;
            m = moments(contours_org[Cont[i]], true);
            if (m.m00 == 0)
            { 
                N = 1;
                std::cout<<"can't find the corner"<<std::endl;
                continue;
            }
            else
            {
                PointC p = PointC(i, (m.m10 / m.m00), (m.m01 / m.m00));
                myPoints.push_back(p); 
            }
        }
        if (N == 2)
        {
            std::vector<PointC> centers;
            centers.push_back(myPoints[2]);

            int m, n, k;
            m = Conv(myPoints[2], myPoints[4], myPoints[5], myPoints[6]);
            n = Conv(myPoints[2], myPoints[5], myPoints[4], myPoints[6]);
            k = Conv(myPoints[2], myPoints[6], myPoints[4], myPoints[5]);
            // std::cout<<myPoints[0].x<<" " <<myPoints[0].y<<std::endl;
            // std::cout<<myPoints[1].x<<" " <<myPoints[1].y<<std::endl;
            // std::cout<<myPoints[2].x<<" " <<myPoints[2].y<<std::endl;
            // std::cout<<myPoints[3].x<<" " <<myPoints[3].y<<std::endl;
            // std::cout<<myPoints[4].x<<" " <<myPoints[4].y<<std::endl;
            // std::cout<<myPoints[5].x<<" " <<myPoints[5].y<<std::endl;
            // std::cout<<myPoints[6].x<<" " <<myPoints[6].y<<std::endl;
            if (myPoints[0].x != myPoints[1].x && myPoints[0].y != myPoints[1].y)
            {
                if (m == 2)
                    centers.emplace_back(myPoints[4]);
                else if (n == 2)
                    centers.emplace_back(myPoints[5]);
                else if (k == 2)
                    centers.emplace_back(myPoints[6]);

                if (m == 3)
                    centers.emplace_back(myPoints[4]);
                else if (n == 3)
                    centers.emplace_back(myPoints[5]);
                else if (k == 3)
                    centers.emplace_back(myPoints[6]);

                if (m == 4)
                    centers.emplace_back(myPoints[4]);
                else if (n == 4)
                    centers.emplace_back(myPoints[5]);
                else if (k == 4)
                    centers.emplace_back(myPoints[6]);

                std::vector<cv::Point3f> objectPts;
                // objectPts.emplace_back(-80, 40, 0);
                // objectPts.emplace_back(-80, -40, 0);
                // objectPts.emplace_back(80, -40, 0);
                // objectPts.emplace_back(80, 40, 0);
                objectPts.emplace_back(-150, 70, 20);
                objectPts.emplace_back(-150, 70, -20);
                objectPts.emplace_back(-150, -70, -20);
                objectPts.emplace_back(-150, -70, 20);

                std::vector<cv::Point2f> ImagePts;
                for (int j = 0; j < 4 ; j++)
                {
                    ImagePts.emplace_back(centers[j].x, centers[j].y);
                    cv::putText(contoursImg, std::to_string(j) , ImagePts[j], cv::FONT_HERSHEY_PLAIN, 3, cv::Scalar(255,255,255));
                }

                cv::solvePnP(objectPts, ImagePts, cameraMatrix, distcoeffs, rvec, tvec);
                // std::cout<<rvec<<std::endl;
                std::cout<<tvec<<std::endl;
                cv::Affine3d cam_to_drone = cv::Affine3d(rvec, tvec);
                cv::Matx44d world_to_cam_mtx( 0, 0, 1,   0,
                                            -1, 0, 0,   0,
                                            0,-1, 0, 0.611,
                                            0, 0, 0,   1);
                cv::Affine3d world_to_cam(world_to_cam_mtx);
                cv::Affine3d world_to_drone = cam_to_drone.concatenate(world_to_cam);

                tf::Quaternion q;
                double theta = sqrt(world_to_drone.rvec()[0] * world_to_drone.rvec()[0] +
                                    world_to_drone.rvec()[1] * world_to_drone.rvec()[1] +
                                    world_to_drone.rvec()[2] * world_to_drone.rvec()[2]);

                tf::Vector3 axis = tf::Vector3(world_to_drone.rvec()[0]/theta,
                                            world_to_drone.rvec()[1]/theta,
                                            world_to_drone.rvec()[2]/theta);
                q.setRotation(axis,theta);


                // tf::Transform transform;
                // transform.setOrigin(tf::Vector3(pose_raw.translation()[0],pose_raw.translation()[1],pose_raw.translation()[2]));
                // transform.setRotation(q);

                // br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", "drone"));


                geometry_msgs::PoseStamped world_to_drone_posestamped;
                world_to_drone_posestamped.header.stamp = ros::Time::now();

                world_to_drone_posestamped.pose.position.x = world_to_drone.translation()[0] / 1000.0;
                world_to_drone_posestamped.pose.position.y = world_to_drone.translation()[1] / 1000.0;
                world_to_drone_posestamped.pose.position.z = world_to_drone.translation()[2] / 1000.0;

                world_to_drone_posestamped.pose.orientation.w = q.w();
                world_to_drone_posestamped.pose.orientation.x = q.x();
                world_to_drone_posestamped.pose.orientation.y = q.y();
                world_to_drone_posestamped.pose.orientation.z = q.z();


                pose_pub.publish(world_to_drone_posestamped);
            }
        }
    }

    cv::imshow("contoursImg", contoursImg);


}


int main(int argc, char *argv[])
{


    ros::init(argc, argv, "pnp_MV_cam");
    ros::NodeHandle nh;
    pose_pub = nh.advertise<geometry_msgs::PoseStamped>("/mavros/vision_pose/pose", 1);
    pose_raw_pub = nh.advertise<geometry_msgs::PoseStamped>("/cam_raw/pose", 1);

    cv::VideoCapture cap("/home/ljy/Videos/Video_20200930145154704.avi");
    cv::Mat frame;
    while(true)
    {
        cap>>frame;
        cv::cvtColor(frame, frame, CV_BGR2GRAY);
        cv::imshow("frame", frame); 
        calculatePose(frame);
        int key = cv::waitKey(1);
        if(key == 27) break;
    }

    return 0;
}
