#include <ros/ros.h>
#include <ros/package.h>
#include <vector>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include <stdlib.h>
#include <pthread.h>
#include <string>
#include "MvCameraControl.h"
#include <opencv2/opencv.hpp>

#include <geometry_msgs/PoseStamped.h>
#include <mavros_msgs/CommandBool.h>
#include <mavros_msgs/SetMode.h>
#include <mavros_msgs/State.h>
#include <tf/tf.h>
#include "UWB_message_type.h"
#include <nlink_parser/LinktrackNodeframe0.h>
#include <nlink_parser/LinktrackNodeframe3.h>
#include <nlink_parser/LinktrackNodeframe2.h>
#include <nlink_parser/LinktrackNode0.h>
#include <std_msgs/String.h>
#include <eigen3/Eigen/Dense>


ros::Publisher pose_pub;
ros::Publisher pose_raw_pub;
ros::Publisher uwb_data_pub;

uint32_t g_systemTime = 0;
nlink_parser::LinktrackNodeframe0 g_frame0;

Eigen::Vector3d world_to_cam_position_sum(0,0,0);
Eigen::Matrix4Xd world_to_cam_orientation_sum(4,0);

tf::Vector3 world_to_cam_translation(0,0,0);
tf::Matrix3x3 world_to_cam_rotation_matrix(0, 0, 1, 
                                            -1, 0, 0,
                                            0,-1, 0);
tf::Transform world_to_cam_tf(world_to_cam_rotation_matrix, world_to_cam_translation);


bool g_bExit = false;
unsigned int g_nPayloadSize = 0;

// 等待用户输入enter键来结束取流或结束程序
// wait for user to input enter to stop grabbing or end the sample program
void PressEnterToExit(void)
{
    int c;
    while ( (c = getchar()) != '\n' && c != EOF );
    fprintf( stderr, "\nPress enter to exit.\n");
    while( getchar() != '\n');
    g_bExit = true;
    sleep(1);
}

bool PrintDeviceInfo(MV_CC_DEVICE_INFO* pstMVDevInfo)
{
    if (NULL == pstMVDevInfo)
    {
        printf("The Pointer of pstMVDevInfo is NULL!\n");
        return false;
    }
    if (pstMVDevInfo->nTLayerType == MV_GIGE_DEVICE)
    {
        int nIp1 = ((pstMVDevInfo->SpecialInfo.stGigEInfo.nCurrentIp & 0xff000000) >> 24);
        int nIp2 = ((pstMVDevInfo->SpecialInfo.stGigEInfo.nCurrentIp & 0x00ff0000) >> 16);
        int nIp3 = ((pstMVDevInfo->SpecialInfo.stGigEInfo.nCurrentIp & 0x0000ff00) >> 8);
        int nIp4 = (pstMVDevInfo->SpecialInfo.stGigEInfo.nCurrentIp & 0x000000ff);

        // ch:打印当前相机ip和用户自定义名字 | en:print current ip and user defined name
        printf("CurrentIp: %d.%d.%d.%d\n" , nIp1, nIp2, nIp3, nIp4);
        printf("UserDefinedName: %s\n\n" , pstMVDevInfo->SpecialInfo.stGigEInfo.chUserDefinedName);
    }
    else if (pstMVDevInfo->nTLayerType == MV_USB_DEVICE)
    {
        printf("UserDefinedName: %s\n", pstMVDevInfo->SpecialInfo.stUsb3VInfo.chUserDefinedName);
        printf("Serial Number: %s\n", pstMVDevInfo->SpecialInfo.stUsb3VInfo.chSerialNumber);
        printf("Device Number: %d\n\n", pstMVDevInfo->SpecialInfo.stUsb3VInfo.nDeviceNumber);
    }
    else
    {
        printf("Not support.\n");
    }

    return true;
}

class PointC
{
  public: 
    int number;
    float x;
    float y;
  public:
    PointC(int a, float xPos, float yPos)
    {
        number = a;
        x = xPos;
        y = yPos;
    }
};


class PointB
{
  public: 
    float x;
    float y;
  public:
    PointB(float xPos, float yPos)
    {
        x = xPos;
        y = yPos;
    }
};


float Conv(PointC p1, PointC p2, PointC p3, PointC p4)
{
    PointB p12 = PointB(p2.x - p1.x, p2.y - p1.y);
    PointB p31 = PointB(p1.x - p3.x, p1.y - p3.y);
    PointB p41 = PointB(p1.x - p4.x, p1.y - p4.y);

    if ((p12.x * p31.y - p12.y * p31.x) >= 0 && (p12.x * p41.y - p12.y * p41.x) >= 0)
        return 2;
    else if ((p12.x * p31.y - p12.y * p31.x) * (p12.x * p41.y - p12.y * p41.x) <= 0)
        return 3;
    else if ((p12.x * p31.y - p12.y * p31.x) <= 0 && (p12.x * p41.y - p12.y * p41.x) <= 0)
        return 4;
}

bool calculatePose(cv::Mat& image)
{
    ///intrinct parameter of camera
    static cv::Mat cameraMatrix;
    static cv::Mat rvec, tvec;
    cameraMatrix = cv::Mat::zeros(3, 3, CV_64F);
    static const std::vector<double> distcoeffs{-0.0622484869016584, 0.116141620134878, 0.0, 0.0, 0.0};
    cameraMatrix.at<double>(0, 0) = 1684.659238683180;
    cameraMatrix.at<double>(0, 2) = 656.480920681900;
    cameraMatrix.at<double>(1, 1) = 1684.163794939990;
    cameraMatrix.at<double>(1, 2) = 535.952107137916;
    cameraMatrix.at<double>(2, 2) = 1;

    cv::Mat binImg(image.size(), CV_8UC1);
    // cv::adaptiveThreshold(image, binImg, 255, cv::ADAPTIVE_THRESH_GAUSSIAN_C, cv::THRESH_BINARY, 101, 20);
    cv::threshold(image, binImg, 100, 255, cv::THRESH_TOZERO);
    cv::threshold(binImg, binImg, 0, 255, cv::THRESH_BINARY + cv::THRESH_OTSU);

    std::vector<std::vector<cv::Point>> contours, contours_org;
    std::vector<cv::Vec4i> hierarchy;

    cv::findContours(binImg, contours_org, hierarchy, cv::RETR_TREE, cv::CHAIN_APPROX_SIMPLE);
    cv::Mat contoursImg;
    cv::cvtColor(image, contoursImg, cv::COLOR_GRAY2BGR);
    cv::drawContours(contoursImg, contours_org, -1, cv::Scalar(0, 0, 255), 1, 8, hierarchy);

    // there are two different hierarchy trees.
    std::vector <int> Cont;
    for (int i = 0; i < contours_org.size(); i++)
    {
        if (hierarchy[i][0] != -1 && hierarchy[i][1] == -1 && hierarchy[i][2] != -1)
        {
            if (hierarchy[i][3] != -1)
            {
                int pre_contour0 = hierarchy[i][3];
                int pre_contour1 = i;
                int pre_contour2 = hierarchy[pre_contour1][2];
                int pre_contour3 = hierarchy[pre_contour1][0];

                if (hierarchy[pre_contour3][0] == -1 && hierarchy[pre_contour3][2] != -1)
                {
                    int pre_contour4 = hierarchy[pre_contour3][2];

                    if (hierarchy[pre_contour4][2] == -1 && hierarchy[pre_contour4][0] != -1)
                    {
                        int pre_contour5 = hierarchy[pre_contour4][0];

                        if (hierarchy[pre_contour5][2] == -1 && hierarchy[pre_contour5][0] != -1)
                        {
                            int pre_contour6 = hierarchy[pre_contour5][0];

                            if (hierarchy[pre_contour6][0] == -1 && hierarchy[pre_contour6][2] == -1)
                            { 
                                Cont = {pre_contour0, pre_contour1, pre_contour2, pre_contour3, pre_contour4, pre_contour5, pre_contour6};
                                }
                        }
                    }

                    else if (hierarchy[pre_contour2][0] != -1 && hierarchy[pre_contour2][2] == -1)
                    {
                        pre_contour4 = hierarchy[pre_contour2][0];

                        if (hierarchy[pre_contour4][0] != -1 && hierarchy[pre_contour4][2] == -1)
                        {
                            int pre_contour5 = hierarchy[pre_contour4][0];

                            if (hierarchy[pre_contour5][0] == -1 && hierarchy[pre_contour5][2] == -1)
                            {
                                if (hierarchy[pre_contour3][2] != -1 && hierarchy[pre_contour3][0] == -1)
                                { 
                                    int pre_contour6 = hierarchy[pre_contour3][2];
                                    Cont = {pre_contour0, pre_contour3, pre_contour6, pre_contour1, pre_contour2, pre_contour4, pre_contour5};
                                }
                            }
                        }
                    }
                } 
            }
        } 
    }                

    // the type of vector needs the length of data initially. and the .size() function reflects the factual size of the array.   
    // make sure the position of these feature points
    int N = 2;
    if (Cont.size() != 0)
    {
        std::vector<PointC> myPoints;
        for (int i = 0; i < 7; i++)
        {
            cv::Moments m;
            m = moments(contours_org[Cont[i]], true);
            if (m.m00 == 0)
            { 
                N = 1;
                std::cout<<"can't find the corner"<<std::endl;
                continue;
            }
            else
            {
                PointC p = PointC(i, (m.m10 / m.m00), (m.m01 / m.m00));
                myPoints.push_back(p); 
            }
        }
        if (N == 2)
        {
            std::vector<PointC> centers;
            centers.push_back(myPoints[2]);

            int m, n, k;
            m = Conv(myPoints[2], myPoints[4], myPoints[5], myPoints[6]);
            n = Conv(myPoints[2], myPoints[5], myPoints[4], myPoints[6]);
            k = Conv(myPoints[2], myPoints[6], myPoints[4], myPoints[5]);
            // std::cout<<myPoints[0].x<<" " <<myPoints[0].y<<std::endl;
            // std::cout<<myPoints[1].x<<" " <<myPoints[1].y<<std::endl;
            // std::cout<<myPoints[2].x<<" " <<myPoints[2].y<<std::endl;
            // std::cout<<myPoints[3].x<<" " <<myPoints[3].y<<std::endl;
            // std::cout<<myPoints[4].x<<" " <<myPoints[4].y<<std::endl;
            // std::cout<<myPoints[5].x<<" " <<myPoints[5].y<<std::endl;
            // std::cout<<myPoints[6].x<<" " <<myPoints[6].y<<std::endl;
            if (myPoints[0].x != myPoints[1].x && myPoints[0].y != myPoints[1].y)
            {
                if (m == 2)
                    centers.emplace_back(myPoints[4]);
                else if (n == 2)
                    centers.emplace_back(myPoints[5]);
                else if (k == 2)
                    centers.emplace_back(myPoints[6]);

                if (m == 3)
                    centers.emplace_back(myPoints[4]);
                else if (n == 3)
                    centers.emplace_back(myPoints[5]);
                else if (k == 3)
                    centers.emplace_back(myPoints[6]);

                if (m == 4)
                    centers.emplace_back(myPoints[4]);
                else if (n == 4)
                    centers.emplace_back(myPoints[5]);
                else if (k == 4)
                    centers.emplace_back(myPoints[6]);

                std::vector<cv::Point3f> objectPts;
                // objectPts.emplace_back(-80, 40, 0);
                // objectPts.emplace_back(-80, -40, 0);
                // objectPts.emplace_back(80, -40, 0);
                // objectPts.emplace_back(80, 40, 0);
                objectPts.emplace_back(-150, 70, 20);
                objectPts.emplace_back(-150, 70, -20);
                objectPts.emplace_back(-150, -70, -20);
                objectPts.emplace_back(-150, -70, 20);

                std::vector<cv::Point2f> ImagePts;
                for (int j = 0; j < 4 ; j++)
                {
                    ImagePts.emplace_back(centers[j].x, centers[j].y);
                    cv::putText(contoursImg, std::to_string(j) , ImagePts[j], cv::FONT_HERSHEY_PLAIN, 3, cv::Scalar(255,120,0), 3);
                }

                cv::solvePnP(objectPts, ImagePts, cameraMatrix, distcoeffs, rvec, tvec);
                // std::cout<<rvec<<std::endl;
                // std::cout<<tvec<<std::endl;

                ///old publish message
                // cv::Affine3d cam_to_drone = cv::Affine3d(rvec, tvec);
                // cv::Matx44d world_to_cam_mtx( 0, 0, 1,   0,
                //                             -1, 0, 0,   0,
                //                             0,-1, 0, 0.611,
                //                             0, 0, 0,   1);
                // cv::Affine3d world_to_cam(world_to_cam_mtx);
                // cv::Affine3d world_to_drone = cam_to_drone.concatenate(world_to_cam);

                // tf::Quaternion q;
                // double theta = sqrt(world_to_drone.rvec()[0] * world_to_drone.rvec()[0] +
                //                     world_to_drone.rvec()[1] * world_to_drone.rvec()[1] +
                //                     world_to_drone.rvec()[2] * world_to_drone.rvec()[2]);

                // tf::Vector3 axis = tf::Vector3(world_to_drone.rvec()[0]/theta,
                //                             world_to_drone.rvec()[1]/theta,
                //                             world_to_drone.rvec()[2]/theta);
                // q.setRotation(axis,theta);

                // // tf::Transform transform;
                // // transform.setOrigin(tf::Vector3(cam_to_drone.translation()[0],cam_to_drone.translation()[1],cam_to_drone.translation()[2]));
                // // transform.setRotation(q);


                // // br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", "drone"));


                // geometry_msgs::PoseStamped world_to_drone_posestamped;
                // world_to_drone_posestamped.header.stamp = ros::Time::now();

                // world_to_drone_posestamped.pose.position.x = world_to_drone.translation()[0] / 1000.0;
                // world_to_drone_posestamped.pose.position.y = world_to_drone.translation()[1] / 1000.0;
                // world_to_drone_posestamped.pose.position.z = world_to_drone.translation()[2] / 1000.0;

                // world_to_drone_posestamped.pose.orientation.w = q.w();
                // world_to_drone_posestamped.pose.orientation.x = q.x();
                // world_to_drone_posestamped.pose.orientation.y = q.y();
                // world_to_drone_posestamped.pose.orientation.z = q.z();
                // pose_pub.publish(world_to_drone_posestamped);
                // std::cout << world_to_drone_posestamped << std::endl;

                //UWB message !!

                ros::spinOnce();
                bool initial_good_position = false;
                if(!ros::param::has("initial_good_position"))
                {
                    ROS_ERROR("ros::param initial_good_position DOES NOT EXIST!!");
                    return false;
                }
                ros::param::get("initial_good_position", initial_good_position);
                if(initial_good_position)
                {
                    UWB_messages::ObservationMessage obs_msg;
                    obs_msg.systemTime = g_systemTime;
                    obs_msg.world_to_cam = UWB_messages::Pose(world_to_cam_tf);
                    obs_msg.observedState[0].id = 0;
                    cv::Mat tvec_meter = tvec/1000.0;
                    obs_msg.observedState[0].pose = UWB_messages::Pose(rvec, tvec_meter);
                    UWB_messages::Pose world_to_drone_pose(obs_msg.get_tf_transform_world());
                    ROS_INFO_STREAM(world_to_drone_pose);
                    std_msgs::String uwb_msg;
                    uwb_msg.data = UWB_messages::convertToString<UWB_messages::ObservationMessage>(obs_msg);
                    uwb_data_pub.publish(uwb_msg);
                }
                else
                {

                    for(int i = 0; i < g_frame0.nodes.size();i++)
                    {
                        if(g_frame0.nodes[i].data.size() == sizeof(UWB_messages::ObservationMessage))
                        {
                            UWB_messages::ObservationMessage obs_msg_outside(UWB_messages::convertToMessage<UWB_messages::ObservationMessage>(g_frame0.nodes[i].data));
                            cv::Mat tvec_meter = tvec/1000.0;
                            UWB_messages::Pose cam_to_drone(rvec, tvec_meter);
                            tf::Transform world_to_cam_tf_temp;
                            world_to_cam_tf_temp = obs_msg_outside.get_tf_transform_world() * cam_to_drone.to_tf_transform().inverse();

                            //test 
                            UWB_messages::Pose world_to_cam_pose_temp(world_to_cam_tf_temp);
                            std::cout << world_to_cam_orientation_sum.cols() <<  world_to_cam_pose_temp << std::endl;

                            Eigen::Vector3d world_to_cam_position(world_to_cam_tf_temp.getOrigin().getX(),world_to_cam_tf_temp.getOrigin().getY(),world_to_cam_tf_temp.getOrigin().getZ());
                            world_to_cam_position_sum += world_to_cam_position;
                            Eigen::Vector4d world_to_cam_orientation(world_to_cam_tf_temp.getRotation().getX(),world_to_cam_tf_temp.getRotation().getY(),world_to_cam_tf_temp.getRotation().getZ(),world_to_cam_tf_temp.getRotation().getW());
                            world_to_cam_orientation_sum.conservativeResize(4, world_to_cam_orientation_sum.cols()+1);
                            world_to_cam_orientation_sum.col(world_to_cam_orientation_sum.cols()-1) = world_to_cam_orientation;
                            // ROS_INFO_STREAM_THROTTLE(0.5, world_to_cam_orientation_sum.cols() << " world_to_cam_position:    \n" << world_to_cam_position);
                            // ROS_INFO_STREAM_THROTTLE(0.5, world_to_cam_orientation_sum.cols() << " world_to_cam_orientation: \n" << world_to_cam_orientation);
                        }
                    }
                    int calibration_pose_count;
                    if(!ros::param::has("calibration_pose_count"))
                    {
                        ROS_ERROR("ros::param calibration_pose_count DOES NOT EXIST!!");
                        return false;
                    }
                    ros::param::get("calibration_pose_count", calibration_pose_count);
                    if(world_to_cam_orientation_sum.cols() >= calibration_pose_count)
                    {
                        Eigen::Vector3d world_to_cam_position_averaged;
                        Eigen::Vector4d world_to_cam_orientation_averaged;
                        world_to_cam_position_averaged = world_to_cam_position_sum/world_to_cam_orientation_sum.cols();
                        /*
                            Matrix<double, 4, 4>K= MatrixXd::Random(4,4);
                            EigenSolver<Matrix<double, 4, 4>> es(K);
                            MatrixXcd evecs = es.eigenvectors();//获取矩阵特征向量4*4，这里定义的MatrixXcd必须有c，表示获得的是complex复数矩阵
                            MatrixXcd evals = es.eigenvalues();//获取矩阵特征值 4*1
                            MatrixXd evalsReal;//注意这里定义的MatrixXd里没有c
                            evalsReal=evals.real();//获取特征值实数部分
                            MatrixXf::Index evalsMax;
                            evalsReal.rowwise().sum().maxCoeff(&evalsMax);//得到最大特征值的位置
                            Vector4d q;
                            q << evecs.real()(0, evalsMax), evecs.real()(1, evalsMax), evecs.real()(2, evalsMax), evecs.real()(3, evalsMax);//得到对应特征向量
                         */

                        Eigen::EigenSolver<Eigen::Matrix4d> es(world_to_cam_orientation_sum * world_to_cam_orientation_sum.transpose());
                        Eigen::MatrixXcd evecs = es.eigenvectors();//获取矩阵特征向量4*4，这里定义的MatrixXcd必须有c，表示获得的是complex复数矩阵
                        Eigen::MatrixXcd evals = es.eigenvalues();//获取矩阵特征值 4*1
                        Eigen::MatrixXd evalsReal;//注意这里定义的MatrixXd里没有c
                        evalsReal=evals.real();//获取特征值实数部分
                        Eigen::MatrixXf::Index evalsMax;
                        evalsReal.rowwise().sum().maxCoeff(&evalsMax);//得到最大特征值的位置
                        world_to_cam_orientation_averaged << evecs.real()(0, evalsMax), 
                                                            evecs.real()(1, evalsMax), 
                                                            evecs.real()(2, evalsMax), 
                                                            evecs.real()(3, evalsMax);//得到对应特征向量

                        world_to_cam_tf.setOrigin(tf::Vector3(world_to_cam_position_averaged.x(),world_to_cam_position_averaged.y(),world_to_cam_position_averaged.z()));
                        if(world_to_cam_orientation_averaged.w() < 0) 
                            world_to_cam_orientation_averaged = -world_to_cam_orientation_averaged;
                        world_to_cam_tf.setRotation(tf::Quaternion(world_to_cam_orientation_averaged.x(),
                                                                    world_to_cam_orientation_averaged.y(),
                                                                    world_to_cam_orientation_averaged.z(),
                                                                    world_to_cam_orientation_averaged.w()));
                        ROS_WARN_STREAM("Calibration complete!! " << calibration_pose_count);
                        UWB_messages::Pose world_to_cam_pose(world_to_cam_tf);
                        ROS_WARN_STREAM(world_to_cam_pose);
                        world_to_cam_orientation_sum = Eigen::Matrix4Xd();
                        world_to_cam_position_sum = Eigen::Vector3d(0,0,0);
                        ros::param::set("initial_good_position", true);
                    }
                }
            }
        }
    }

    cv::imshow("contoursImg", contoursImg);


}

static  void* WorkThread(void* pUser)
{
    int nRet = MV_OK;
    static int image_index = 0;


    MV_FRAME_OUT stOutFrame = {0};
    memset(&stOutFrame, 0, sizeof(MV_FRAME_OUT));

    while(ros::ok())
    {
        nRet = MV_CC_GetImageBuffer(pUser, &stOutFrame, 1000);
        cv::Mat image = cv::Mat(stOutFrame.stFrameInfo.nHeight, stOutFrame.stFrameInfo.nWidth, CV_8UC1, (void *)stOutFrame.pBufAddr);
        if (nRet == MV_OK)
        {
            // printf("Get One Frame: Width[%d], Height[%d], nFrameNum[%d]\n",
            //     stOutFrame.stFrameInfo.nWidth, stOutFrame.stFrameInfo.nHeight, stOutFrame.stFrameInfo.nFrameNum);
            
            // cv::imshow("mv_image", image);
            calculatePose(image);
            char key_board = cv::waitKey(1);
            if(key_board == 'w')
            {
                std::string file_name;
                file_name = file_name + "/home/ljy/cali_image/image";
                file_name = file_name + char(image_index + '0');
                file_name = file_name + ".jpg";
                cv::imwrite(file_name, image);
                std::cout << file_name << std::endl;
                image_index++;
            }
            else if(key_board == 27)
            {
                g_bExit = true;
                return 0;
            }
        }
        else
        {
            printf("No data[0x%x]\n", nRet);
        }
        if(NULL != stOutFrame.pBufAddr)
        {
            nRet = MV_CC_FreeImageBuffer(pUser, &stOutFrame);
            if(nRet != MV_OK)
            {
                printf("Free Image Buffer fail! nRet [0x%x]\n", nRet);
            }
        }
        if(g_bExit)
        {
            break;
        }
    }

    return 0;
}

void UWB_frame3_callback(const nlink_parser::LinktrackNodeframe3::ConstPtr &msg)
{
    g_systemTime = msg->system_time;
}

void UWB_frame2_callback(const nlink_parser::LinktrackNodeframe2::ConstPtr &msg)
{
    g_systemTime = msg->system_time;
}

void UWB_frame0_callback(const nlink_parser::LinktrackNodeframe0::ConstPtr &msg)
{
    g_frame0 = *msg;
}

int main(int argc, char *argv[])
{
    int nRet = MV_OK;
    void* handle = NULL;

    ros::init(argc, argv, "pnp_MV_cam");
    ros::NodeHandle nh;
    pose_pub = nh.advertise<geometry_msgs::PoseStamped>("/mavros/vision_pose/pose", 1);
    pose_raw_pub = nh.advertise<geometry_msgs::PoseStamped>("/cam_raw/pose", 1);
    uwb_data_pub = nh.advertise<std_msgs::String>("/nlink_linktrack_data_transmission", 2);
    ros::Subscriber UWB_frame0_sub = nh.subscribe<nlink_parser::LinktrackNodeframe0>("/nlink_linktrack_nodeframe0",1,UWB_frame0_callback);
    ros::Subscriber UWB_frame2_sub = nh.subscribe<nlink_parser::LinktrackNodeframe2>("/nlink_linktrack_nodeframe2",1,UWB_frame2_callback);
    ros::Subscriber UWB_frame3_sub = nh.subscribe<nlink_parser::LinktrackNodeframe3>("/nlink_linktrack_nodeframe3",1,UWB_frame3_callback);

    do 
    {
        // ch:枚举设备 | en:Enum device
        MV_CC_DEVICE_INFO_LIST stDeviceList;
        memset(&stDeviceList, 0, sizeof(MV_CC_DEVICE_INFO_LIST));
        nRet = MV_CC_EnumDevices(MV_GIGE_DEVICE | MV_USB_DEVICE, &stDeviceList);
        if (MV_OK != nRet)
        {
            printf("Enum Devices fail! nRet [0x%x]\n", nRet);
            break;
        }

        if (stDeviceList.nDeviceNum > 0)
        {
            for (unsigned int i = 0; i < stDeviceList.nDeviceNum; i++)
            {
                printf("[device %d]:\n", i);
                MV_CC_DEVICE_INFO* pDeviceInfo = stDeviceList.pDeviceInfo[i];
                if (NULL == pDeviceInfo)
                {
                    break;
                } 
                PrintDeviceInfo(pDeviceInfo);            
            }  
        } 
        else
        {
            printf("Find No Devices!\n");
            break;
        }

        printf("Please Intput camera index:");
        unsigned int nIndex = 0;
        // scanf("%d", &nIndex);

        if (nIndex >= stDeviceList.nDeviceNum)
        {
            printf("Intput error!\n");
            break;
        }

        // ch:选择设备并创建句柄 | en:Select device and create handle
        nRet = MV_CC_CreateHandle(&handle, stDeviceList.pDeviceInfo[nIndex]);
        if (MV_OK != nRet)
        {
            printf("Create Handle fail! nRet [0x%x]\n", nRet);
            break;
        }

        // ch:打开设备 | en:Open device
        nRet = MV_CC_OpenDevice(handle);
        if (MV_OK != nRet)
        {
            printf("Open Device fail! nRet [0x%x]\n", nRet);
            break;
        }

        //设置参数
        nRet = MV_CC_SetEnumValue(handle, "ExposureAuto", 0);    // 0: Off
        if (MV_OK == nRet)
        {
            printf("set ExposureAuto OK!\n\n");
        }
        else
        {
            printf("set ExposureAuto failed! nRet [%x]\n\n", nRet);
        }
        
        double exposureTime;
        if(!ros::param::get("camera_exposure_time", exposureTime))
        {
            ROS_ERROR("ros::param::get(\"camera_exposure_time\", exposureTime) FAILED!!! ");
            return 0;
        }
        nRet = MV_CC_SetFloatValue(handle, "ExposureTime", (float)exposureTime);    
        if (MV_OK == nRet)
        {
            printf("set ExposureTime OK!\n\n");
        }
        else
        {
            printf("set ExposureTime failed! nRet [%x]\n\n", nRet);
        }
        
        double camera_frame_rate;
        if(!ros::param::get("camera_frame_rate", camera_frame_rate))
        {
            ROS_ERROR("ros::param::get(\"camera_frame_rate\", camera_frame_rate) FAILED!!! ");
            return 0;
        }

        nRet = MV_CC_SetFloatValue(handle, "AcquisitionFrameRate", (float)camera_frame_rate);    // 0: Off
        if (MV_OK == nRet)
        {
            printf("set AcquisitionFrameRate OK!\n\n");
        }
        else
        {
            printf("set AcquisitionFrameRate failed! nRet [%x]\n\n", nRet);
        }


        // nRet = MV_CC_SetIntValue(handle, "GevSCPD", 0);    
        // if (MV_OK == nRet)
        // {
        //     printf("set GevSCPD OK!\n\n");
        // }
        // else
        // {
        //     printf("set GevSCPD failed! nRet [%x]\n\n", nRet);
        // }


        // ch:探测网络最佳包大小(只对GigE相机有效) | en:Detection network optimal package size(It only works for the GigE camera)
        if (stDeviceList.pDeviceInfo[nIndex]->nTLayerType == MV_GIGE_DEVICE)
        {
            int nPacketSize = MV_CC_GetOptimalPacketSize(handle);
            if (nPacketSize > 0)
            {
                nRet = MV_CC_SetIntValue(handle,"GevSCPSPacketSize",nPacketSize);
                if(nRet != MV_OK)
                {
                    printf("Warning: Set Packet Size fail nRet [0x%x]!\n", nRet);
                }
            }
            else
            {
                printf("Warning: Get Packet Size fail nRet [0x%x]!\n", nPacketSize);
            }
        }

        // ch:设置触发模式为off | en:Set trigger mode as off
        nRet = MV_CC_SetEnumValue(handle, "TriggerMode", 0);
        if (MV_OK != nRet)
        {
            printf("Set Trigger Mode fail! nRet [0x%x]\n", nRet);
            break;
        }

        // ch:获取数据包大小 | en:Get payload size
        MVCC_INTVALUE stParam;
        memset(&stParam, 0, sizeof(MVCC_INTVALUE));
        nRet = MV_CC_GetIntValue(handle, "PayloadSize", &stParam);
        if (MV_OK != nRet)
        {
            printf("Get PayloadSize fail! nRet [0x%x]\n", nRet);
            break;
        }
        g_nPayloadSize = stParam.nCurValue;

        // ch:开始取流 | en:Start grab image
        nRet = MV_CC_StartGrabbing(handle);
        if (MV_OK != nRet)
        {
            printf("Start Grabbing fail! nRet [0x%x]\n", nRet);
            break;
        }

		pthread_t nThreadID;
        nRet = pthread_create(&nThreadID, NULL ,WorkThread , handle);
        if (nRet != 0)
        {
            printf("thread create failed.ret = %d\n",nRet);
            break;
        }

        //PressEnterToExit();
        while(ros::ok() && !g_bExit)
        {
            sleep(1);
        }

        // ch:停止取流 | en:Stop grab image
        nRet = MV_CC_StopGrabbing(handle);
        if (MV_OK != nRet)
        {
            printf("Stop Grabbing fail! nRet [0x%x]\n", nRet);
            break;
        }

        // ch:关闭设备 | Close device
        nRet = MV_CC_CloseDevice(handle);
        if (MV_OK != nRet)
        {
            printf("ClosDevice fail! nRet [0x%x]\n", nRet);
            break;
        }

        // ch:销毁句柄 | Destroy handle
        nRet = MV_CC_DestroyHandle(handle);
        if (MV_OK != nRet)
        {
            printf("Destroy Handle fail! nRet [0x%x]\n", nRet);
            break;
        }
    } while (0);
    

    if (nRet != MV_OK)
    {
        if (handle != NULL)
        {
            MV_CC_DestroyHandle(handle);
            handle = NULL;
        }
    }

    printf("exit.\n");

    return 0;
}
