#include "complx_tracker.h"

#include <std_msgs/String.h>
#include <std_msgs/Empty.h>
#include <geometry_msgs/PoseStamped.h>
#include <geometry_msgs/TwistStamped.h>
#include <geometry_msgs/Point.h>
#include <geometry_msgs/Pose.h>
#include <geometry_msgs/PoseWithCovarianceStamped.h>

#include <iostream>
#include <fstream>
#include <sstream>
#include <queue>
#include <tf/tf.h>
#include <tf/transform_broadcaster.h>
//#include <tf_conversions/tf_eigen.h>
#include <ros/ros.h>
#include <image_transport/image_transport.h>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/image_encodings.h>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <string>
#include <iostream>
#include <sensor_msgs/PointCloud.h>
#include <cmath>
#include <opencv2/core/types.hpp>
#include <opencv2/core.hpp>
#include <std_msgs/Float32MultiArray.h>
#include <std_msgs/UInt8.h>
#include <rospack/rospack.h>


#define CAP_INDEX 1

using namespace cv;
using namespace std;
using namespace ros;

ofstream ViconFile, CameraFile;
Time start_time;
cv::Mat frame;


void colorCb(const sensor_msgs::ImageConstPtr &msg) {

    cv_bridge::CvImagePtr cv_ptr;
    try
    {
        cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);
    }
    catch (cv_bridge::Exception &e)
    {
        ROS_ERROR("cv_bridge exception: %s", e.what());
        return;
    }
    frame = cv_ptr->image;
}

//
//void camerainfoCb(const sensor_msgs::CameraInfoConstPtr &msg) {
//    fx = msg->K[0];
//    fy = msg->K[4];
//    cx = msg->K[2];
//    cy = msg->K[5];
//    camera_inited = true;
//}


int main(int argc, char *argv[]) {
    init(argc, argv, "red_point");
    NodeHandle n;
    Subscriber camera_sub = n.subscribe("/iris/usb_cam/image_raw",10,colorCb);
//    Subscriber camera_info_sub = n.subscribe("/iris/usb_cam/camera_info",10,camerainfoCb);
    Publisher pos_pub = n.advertise<geometry_msgs::PoseStamped>("/cam/pose", 10);
    Publisher pos_raw_pub = n.advertise<geometry_msgs::PoseStamped>("/cam/pose_raw", 10);

    static tf::TransformBroadcaster br;

    Rate loop_rate(30);

    complx_tracker droneTracker;
    Vec3d outputRvecRaw, outputTvecRaw, outputRvec, outputTvec;
    Mat showImg;

//    //! [capture]
//    VideoCapture capture(CAP_INDEX);
//    capture.set(CAP_PROP_AUTO_EXPOSURE, 0.25);
//    capture.set(CAP_PROP_EXPOSURE, 0.000001);
//    if (!capture.isOpened()) {
//        //error in opening the video input
//        cerr << "Unable to open: " << CAP_INDEX << endl;
//        return 0;
//    }
//    //! [capture]

    std::string pkg_path = ros::package::getPath("laser_target");

    CameraFile.open(pkg_path + "/data/cameraresult.csv");

    Vec3f unfiltered_pos;
    queue<Vec3f> sliding_win;
    Vec3f sum_pos(0, 0, 0);
    geometry_msgs::PoseStamped msg_pos, msg_pos_old, msg_pos_raw, msg_pos_raw_old;
    start_time = Time::now();

    while (ros::ok()) {
        geometry_msgs::TwistStamped msg_vel;
        ros::spinOnce();
//        capture >> frame;
        if (frame.empty())
            continue;

        Affine3d pose_world, pose_raw;

        if (droneTracker.apply(frame, showImg, pose_world, pose_raw)) {

            cout << frame.size() << endl;
            msg_pos_raw.pose.position.x = pose_raw.translation()[0];
            msg_pos_raw.pose.position.y = pose_raw.translation()[1];
            msg_pos_raw.pose.position.z = pose_raw.translation()[2];
            msg_pos_raw.header.stamp = Time::now();

            tf::Quaternion q;
            double theta = sqrt(pose_raw.rvec()[0] * pose_raw.rvec()[0] +
                                pose_raw.rvec()[1]*pose_raw.rvec()[1] +
                                pose_raw.rvec()[2]*pose_raw.rvec()[2]);

            tf::Vector3 axis = tf::Vector3(pose_raw.rvec()[0]/theta,pose_raw.rvec()[1]/theta,pose_raw.rvec()[2]/theta);
            q.setRotation(axis,theta);

            tf::Transform transform;
            transform.setOrigin(tf::Vector3(pose_raw.translation()[0],pose_raw.translation()[1],pose_raw.translation()[2]));
            transform.setRotation(q);

            br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "map", "drone"));



            msg_pos_raw.pose.orientation.w = q.w();
            msg_pos_raw.pose.orientation.x = q.x();
            msg_pos_raw.pose.orientation.y = q.y();
            msg_pos_raw.pose.orientation.z = q.z();


            pos_raw_pub.publish(msg_pos_raw);


            ///
            ros::Duration dur_temp;
            dur_temp = Time::now() - msg_pos.header.stamp;
            if (dur_temp.toSec() > 0 && dur_temp.toSec() < 0.1) {
                msg_pos.header.stamp = Time::now();
                msg_pos.pose.position.x += (pose_world.translation()[0] - msg_pos.pose.position.x) * 0.5;
                msg_pos.pose.position.y += (pose_world.translation()[1] - msg_pos.pose.position.y) * 0.5;
                msg_pos.pose.position.z += (pose_world.translation()[2] - msg_pos.pose.position.z) * 0.5;
            } else {
                msg_pos.header.stamp = Time::now();
                msg_pos.pose.position.x = pose_world.translation()[0];
                msg_pos.pose.position.y = pose_world.translation()[1];
                msg_pos.pose.position.z = pose_world.translation()[2];
            }
//            cout << msg_pos << endl;
//            pos_pub.publish(msg_pos);

        } else {
            pos_pub.publish(msg_pos);
            msg_vel.twist.linear.x = 0;
            msg_vel.twist.linear.y = 0;
            msg_vel.twist.linear.z = 0;
//            vel_pub.publish(msg_vel);
        }

        loop_rate.sleep();
    }

    CameraFile.close();
    return 0;
}


//
//class pillar {
//public:
//    double dis_to_center;
//    double depth;
//    double width;
//    Vec3d pos;
//
//    pillar(double dis_to_center, double depth, double width) : dis_to_center(dis_to_center), depth(depth),
//                                                               width(width) {};
//
//    pillar(Vec3d pos) : pos(pos) {};
//
//    friend ostream &operator<<(ostream &out, const pillar &s) {
//        out << "dis_to_center: " << s.dis_to_center << "\t" << "depth: " << s.depth << "\t width: " << s.width;
//        return out;
//    }
//
//};
//
//class pillar_seeker {
//
//    string path="/home/dd/rs_ws/src/d435_cv/";
//    int store_sequence[5]={106,579,482,615,759};
//
//    ros::NodeHandle nh_;
////    image_transport::ImageTransport it_;
//    ros::Subscriber depth_sub;
//    ros::Subscriber color_sub;
//    ros::Subscriber depth_camerainfo_sub;
//    ros::Subscriber stage_sub;
//    ros::Publisher cloud_pub;
//    ros::Publisher pillar_pub;
//    double angle = 0.; //in rad. +/-: when || becomes //, it's positive. when || becomes \\, it's negative.
//    Mat depth_img;
//    Mat color_img;
//    std::vector<pillar> pillarlist;
//    pillar closest{Vec3f{0, 0, 999}};
//    int pillercount = 0;
//    float fx = 611.855712890625, fy = 611.8430786132812, cx = 317.46136474609375, cy = 247.88717651367188;
//    bool camera_inited = false;
//    bool is_ready = true;
//    bool store_img_now = false;
//
//public:
//    pillar_seeker() {
//        // Subscribe to input video feed and publish output video feed
//        depth_sub = nh_.subscribe("/camera/aligned_depth_to_color/image_raw", 1,
//                                  &pillar_seeker::depthCb, this);
//        color_sub = nh_.subscribe("/camera/color/image_raw", 1,
//                                  &pillar_seeker::colorCb, this);
//        depth_camerainfo_sub = nh_.subscribe("/camera/aligned_depth_to_color/camera_info", 1,
//                                             &pillar_seeker::camerainfoCb, this);
//        cloud_pub = nh_.advertise<sensor_msgs::PointCloud>("/cloud", 10);
//
//        pillar_pub = nh_.advertise<std_msgs::Float32MultiArray>("/pillarpos", 10);
//
//        cv::namedWindow(OPENCV_WINDOW);
//
//    }
//
//
//
//
//
//
//
//    void depthCb(const sensor_msgs::ImageConstPtr &msg) {
//        if (is_ready)
//        {
//            cv_bridge::CvImagePtr cv_ptr;
//            try
//            {
////      cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);
//                cv_ptr = cv_bridge::toCvCopy(msg, msg->encoding);
//            }
//            catch (cv_bridge::Exception &e)
//            {
//                ROS_ERROR("cv_bridge exception: %s", e.what());
//                return;
//            }
//
//            Mat blur_img, mor_img;
//            depth_img = cv_ptr->image;
//
//
//            imshow("res", show_img);
//
//            cv::waitKey(1);
//
//        }
//    }
//
//    Vec3d getPos(const Point &center, double width, double height) {
//        int u0, u1, v0, v1;
//        u0 = center.x - width / 2;
//        u1 = center.x + width / 2;
//        v0 = center.y - height / 2;
//        v1 = center.y + height / 2;
//        u0 = u0 < 0 ? 0 : u0;
//        u1 = u1 > depth_img.size().width - 1 ? depth_img.size().width - 1 : u1;
//        v0 = v0 < 0 ? 0 : v0;
//        v1 = v1 > depth_img.size().height - 1 ? depth_img.size().height - 1 : v1;
//
//        if (u1 < 0 || u0 >= u1 || v0 >= v1 || v1 < 0 || u0 > depth_img.size().width - 1 ||
//            v0 > depth_img.size().height - 1)
//        {
//            ROS_ERROR("getPos center or width or height WRONG!!!");
//            return Vec3d(0, 0, 0);
//        }
//        double sum = 0;
//        for (int u = u0; u < u1; u++)
//            for (int v = v0; v < v1; v++)
//                sum += depth_img.at<ushort>(Point(u, v));
//        double x, y, z;
//        z = sum / (u1 - u0) / (v1 - v0) * 0.001;
//        x = (0.5 * (u0 + u1) - cx) * z / fx;
//        y = (0.5 * (v0 + v1) - cy) * z / fy;
//
//        return Vec3d(x, y, z);
//
//    }
//
//
//};
