//
// Created by shine on 2020/3/17.
//

#include <iostream>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <image_transport/image_transport.h>
#include <opencv2/highgui/highgui.hpp>
#include <cv_bridge/cv_bridge.h>
#include <ros/ros.h>
#include <nodelet/nodelet.h>
#include <std_msgs/String.h>
#include <stereo_msgs/DisparityImage.h>
#include <sensor_msgs/PointCloud2.h>
#include <sensor_msgs/Imu.h>

#include <pcl/point_types.h>
#include <pcl/features/normal_3d.h>
#include <math.h>
#include <pcl/point_cloud.h>
#include <pcl/visualization/pcl_visualizer.h>
#include <pcl/io/ply_io.h>
#include <pcl/filters/filter_indices.h>
#include <sensor_msgs/PointCloud2.h>
#include <pcl_conversions/pcl_conversions.h>

#define PI 3.1415926
#define MAX_DISTANCE 10
using namespace cv;
using namespace std;

typedef pcl::PointXYZRGBA PointT;
typedef pcl::PointCloud<PointT> PointCloud;
typedef cv::Point_<short> Point2s;

//void imageCallback(const sensor_msgs::ImageConstPtr& msg)
image_transport::Publisher pub32f_;
ros::Publisher pcl_pub;

float fx =  261.642908041018;
float fy =  261.642908041018;
float cx =  320.5;
float cy =  240.5;

Eigen::Vector3d v_rotated ( 0,1,0 );

Point3f uvd2Point3f(float u,float v,float d){

    Point3f point3F;
    point3F.z = d;
    point3F.x =  point3F.z*(u-cx)/fx;
    point3F.y =  point3F.z*(v-cy)/fy;

    return point3F;
}

void disparityCallback(const stereo_msgs::DisparityImageConstPtr& disparityMsg)
{

    cv::Mat disparity(disparityMsg->image.height, disparityMsg->image.width, CV_32FC1, const_cast<uchar*>(disparityMsg->image.data.data()));
    cv::Mat depth32f= cv::Mat::zeros(disparity.rows, disparity.cols, CV_32F);

#pragma omp parallel for
    for (int i = 0; i < disparity.rows; i++)
    {
        for (int j = 0; j < disparity.cols; j++)
        {
            float disparity_value = disparity.at<float>(i,j);
            if (disparity_value > disparityMsg->min_disparity && disparity_value < disparityMsg->max_disparity){
                // baseline * focal / disparity
                float depth = disparityMsg->T * disparityMsg->f / disparity_value;
                depth32f.at<float>(i,j) = depth;
            }
        }
    }

    // convert to ROS sensor_msg::Image
    cv_bridge::CvImage cvDepth(disparityMsg->header, sensor_msgs::image_encodings::TYPE_32FC1, depth32f);
    sensor_msgs::Image depthMsg;
    cvDepth.toImageMsg(depthMsg);
    //publish the message
    pub32f_.publish(depthMsg);


    GaussianBlur( depth32f, depth32f , cv::Size(5, 5), 7, 7 );
//    medianBlur(depth32f, depth32f,5);
///normal estimate
    Mat normals = cv::Mat::zeros(disparity.rows, disparity.cols, CV_32FC3);
    for(int v = 0; v < depth32f.rows; ++v) {
        for (int u = 0; u < depth32f.cols; ++u) {

            Point3f pointMiddle = uvd2Point3f(u, v, depth32f.at<float>(v, u));
            Point3f pointBack = uvd2Point3f(u, v - 1, depth32f.at<float>(v - 1, u));
            Point3f pointFront = uvd2Point3f(u, v + 1, depth32f.at<float>(v + 1, u));
            Point3f pointLeft = uvd2Point3f(u - 1, v, depth32f.at<float>(v, u - 1));
            Point3f pointRight = uvd2Point3f(u + 1, v, depth32f.at<float>(v, u + 1));

            Vec3f r(pointRight.x, pointRight.y, pointRight.z);
            Vec3f b(pointBack.x, pointBack.y, pointBack.z);
            Vec3f c(pointMiddle.x, pointMiddle.y, pointMiddle.z);
            Vec3f d = (r - c).cross(b - c);
            Vec3f n = normalize(-d);
            normals.at<Vec3f>(v, u) = n;// + - zhi
        }
    }

    Mat filtedNormals = cv::Mat::zeros(disparity.rows, disparity.cols, CV_32FC3);;
    GaussianBlur( normals, normals , cv::Size(9, 9), 7, 7 );
    imshow("normals", normals);

    Mat weight = cv::Mat::zeros(disparity.rows, disparity.cols, CV_32FC1);
    Mat binary = cv::Mat::zeros(disparity.rows, disparity.cols, CV_32FC1);
    for(int v = 0; v < depth32f.rows; ++v) {
        for (int u = 0; u < depth32f.cols; ++u) {

            if(depth32f.at<float>(v,u) < 0.01 || depth32f.at<float>(v,u) > MAX_DISTANCE + 2){
                weight.at<float>(v,u) = 1;
                binary.at<float>(v,u) = 1;
                continue;
            }

            Vec3f g(v_rotated.x(),v_rotated.y(),v_rotated.z());
//            Vec3f g(0,1,0);
            Vec3f n = normals.at<Vec3f>(v, u);

            double w = g.dot(n);

            double angle = acos(abs(w));
            double value =  pow(abs(w),5);
            weight.at<float>(v,u) = value;
//
//            if(angle > 30 * PI / 180)
//                binary.at<float>(v,u) = 0;
//            else
//                binary.at<float>(v,u) = 1;

            if(value > 0.3)
                binary.at<float>(v,u) = 1;
            else
                binary.at<float>(v,u) = 0;

//            weight.at<float>(v,u) = pow(abs(w),5);

//            float dydx = (pointRight.y - pointMiddle.y) / (pointRight.x - pointMiddle.x);
//            float dydz = (pointBack.y - pointMiddle.y) / (pointBack.z - pointMiddle.z);
//            float dydx = (pointRight.y - pointLeft.y) / (pointRight.x - pointLeft.x);
//            float dydz = (pointBack.y - pointFront.y) / (pointBack.z - pointFront.z);
        }
    }

//    imshow("before",binary);
    Mat element1 = getStructuringElement(MORPH_RECT, Size(15, 15));
    Mat element2 = getStructuringElement(MORPH_RECT, Size(15, 15));

    dilate( binary, binary, element1);
    imshow("mid",binary);
//    erode( binary, binary, element2);//
//    imshow("after",binary);
    imshow("weight",weight);

//    for(int v = 0; v < depth32f.rows; ++v) {
//        for (int u = 0; u < depth32f.cols; ++u) {
//
//            if (depth32f.at<float>(v, u) < 0.01 || depth32f.at<float>(v, u) > MAX_DISTANCE) {
//                binary.at<float>(v, u) = 1;
//                continue;
//            }
//        }
//    }
//    imshow("finial",binary);
    waitKey(3);

//PointCloud
    PointCloud::Ptr cloud(new PointCloud);
// 遍历深度图
    for (int v = 0; v < depth32f.rows; v++)
        for (int u = 0; u < depth32f.cols; u++)
        {
// 获取深度图中(m,n)处的值
            float normal_weight = weight.at<float>(v,u);
// d 可能没有值，若如此，跳过此点
            if (depth32f.at<float>(v, u) < 0.01 || depth32f.at<float>(v, u) > MAX_DISTANCE || binary.at<float>(v, u) > 0.5)
                continue;
// d 存在值，则向点云增加一个点
            float d = depth32f.ptr<float>(v)[u];
            PointT p;

// 计算这个点的空间坐标
            p.z = double(d) ;
            p.x = (u - cx) * p.z / fx;
            p.y = (v - cy) * p.z / fy;

// 从rgb图像中获取它的颜色
// rgb是三通道的BGR格式图，所以按下面的顺序获取颜色
            p.b = 255;
            p.g = 255;
            p.r = 255;

// 把p加入到点云中
            cloud->points.push_back(p);
        }
// 设置并保存点云
    cloud->height = 1;
    cloud->width = cloud->points.size();
    cout << "point cloud size = " << cloud->points.size() << endl;
    cloud->is_dense = false;

    sensor_msgs::PointCloud2 output;
    pcl::toROSMsg(*cloud, output);
    output.header.frame_id = "/cam1_left_optical_link";
    pcl_pub.publish(output);

}

void imuCallback(const sensor_msgs::Imu imuMsg){

    Eigen::Quaterniond q(imuMsg.orientation.w,
                         imuMsg.orientation.x,
                         imuMsg.orientation.y,
                         imuMsg.orientation.z);

    Eigen::Vector3d eulerAngle=q.matrix().eulerAngles(2,1,0);

    Eigen::AngleAxisd robot_roll(eulerAngle(2),Eigen::Vector3d::UnitX());
    Eigen::AngleAxisd robot_pitch(eulerAngle(1),Eigen::Vector3d::UnitY());

    //该处由于像极坐标系为z轴向前，x轴向右，y轴向下.
    Eigen::AngleAxisd cam_Z(robot_roll.angle(),Eigen::Vector3d::UnitZ());
    Eigen::AngleAxisd cam_X(-robot_pitch.angle(),Eigen::Vector3d::UnitX());
//    Eigen::AngleAxisd cam_yaw(0,Eigen::Vector3d::UnitY());
//    cam_rotation = cam_yaw*cam_pitch*cam_roll;

    cout << "robot_rollAngle:" << robot_roll.angle()<< endl;
    cout << "robot_pitchAngle:" << robot_pitch.angle()<< endl;
    cout << "cam_Z:" << cam_Z.angle()<< endl;
    cout << "cam_X:" << cam_X.angle()<< endl;
//    cout << "cam_rotationAngle:" << cam_rotation.angle()<< endl;
//
//    Eigen::Vector3d v ( 0,1,0 );
//    Eigen::Vector3d v_rotated = cam_rotation * v;
//    cout<<"(0,1,0) after rotation = "<<v_rotated.transpose()<<endl;

    Eigen::Vector3d v ( 0,1,0 );
    v_rotated = cam_X*v;
    v_rotated = cam_Z*v_rotated;
    cout<<"(0,1,0) after rotation = "<<v_rotated.transpose()<<endl;

}

int main(int argc, char **argv)
{
    ros::init(argc, argv, "image_listener");
    ros::NodeHandle nh;
//    cv::namedWindow("view");
    cv::startWindowThread();
    image_transport::ImageTransport it(nh);

//    image_transport::Subscriber sub = it.subscribe("/cam1/left/image_raw", 1, imageCallback);
    ros::Subscriber disparitySub = nh.subscribe("/cam1/disparity", 1, disparityCallback);
    ros::Subscriber imuSub = nh.subscribe("/imu/data", 1, imuCallback);

    pub32f_ = it.advertise("/cam1/depth", 1);
    pcl_pub = nh.advertise<sensor_msgs::PointCloud2> ("obstacle_cloud", 1);

    ros::spin();
//    cv::destroyWindow("view");
}


