//
// Created by xuwj on 2021/11/1.
//
#include "visual_frames_system.h"
#include "iomanip"

namespace VNavAssistant{

    //-----------------------------------------------------------------------------------------------------------------------------------
    PoseRectifySystem::PoseRectifySystem(const string cam_f, const string odom_f,
                       const string map_f, const string base_f, const string base_footprint_f)
    :map_frame(map_f),odom_frame(odom_f),base_footprint_frame(base_footprint_f),
    base_link_frame(base_f),camera_frame(cam_f){
        pTF_pub = new TFPublisher(map_frame,odom_frame);
    }

    void PoseRectifySystem::SLAMPoseCallback(const geometry_msgs::PoseStamped_<allocator<void>>::ConstPtr &msg) {
        tf::Transform trans_map2cam;
        tf::Quaternion Q_map2cam(msg->pose.orientation.x,msg->pose.orientation.y,
                                 msg->pose.orientation.z,msg->pose.orientation.w);
        tf::Vector3 t_map2cam(msg->pose.position.x,msg->pose.position.y,msg->pose.position.z);
        trans_map2cam.setRotation(Q_map2cam);
        trans_map2cam.setOrigin(t_map2cam);

        tf::StampedTransform trans_cam2odom;
        try {
            listener.waitForTransform(camera_frame, odom_frame, ros::Time(0), ros::Duration(2.0));
            listener.lookupTransform(camera_frame, odom_frame, ros::Time(0), trans_cam2odom);

//            listener.waitForTransform(base_link_frame, camera_frame, ros::Time(0), ros::Duration(2.0));
//            listener.lookupTransform(base_link_frame, camera_frame, ros::Time(0), trans_map2origin);
        }
        catch (tf::TransformException &tf_exception){
            ROS_ERROR("%s",tf_exception.what());
            ros::Duration(1.0).sleep();
            return;
        }

        tf::Transform trans_map2odom = trans_map2cam*trans_cam2odom;
        pTF_pub->publishTF(trans_map2odom, map_frame, odom_frame);

    }

    //---------------------------------------------------------------------------------------------------------------------------------------

    PoseManager::PoseManager(const cv::Mat T_camk_map_, const cv::Mat T_cam0_camk_, const double time) :
    T_camk_map(T_camk_map_),T_cam0_camk(T_cam0_camk_), timestamp(time), is_good_(false){

        cv::Mat T_cam0_map = T_cam0_camk_*T_camk_map_;
        cv::Mat R_mc = T_cam0_map.rowRange(0,3).colRange(0,3);
        R_mc = R_mc.t();
        cv::Mat t_mc = T_cam0_map.rowRange(0,3).col(3);
        t_mc = -R_mc*t_mc;

        T_map_cam0 = (cv::Mat_<float>(4,4) << R_mc.at<float>(0,0) , R_mc.at<float>(0,1) , R_mc.at<float>(0,2) , t_mc.at<float>(0),
                         R_mc.at<float>(1,0) , R_mc.at<float>(1,1) , R_mc.at<float>(1,2) , t_mc.at<float>(1),
                         R_mc.at<float>(2,0) , R_mc.at<float>(2,1) , R_mc.at<float>(2,2) , t_mc.at<float>(2),
                                 0.0,   0.0,    0.0,    1.0);

        quat = Eigen::Quaterniond (Converter::toMatrix3d(T_map_cam0.colRange(0,3).rowRange(0,3)));
        t = Converter::toVector3d(T_map_cam0.col(3).rowRange(0,3));
    }

    void PoseStats::push_back(const float *q, const float *t) {

        for(int i=0; i<4; ++i){
            avg_q[i] = (avg_q[i]*counts + q[i]) / (counts+1);
        }
        for(int i=0; i<3; ++i){
            avg_t[i] = (avg_t[i]*counts + t[i]) / (counts+1);
        }
        counts++;
    }

    void PoseStats::push_back(const Eigen::Quaterniond &q, const Eigen::Vector3d &t) {

        double inv = 1.0 / (counts + 1);
        avg_q[0] = ( avg_q[0]*counts + q.x() ) * inv;
        avg_q[1] = ( avg_q[1]*counts + q.y() ) * inv;
        avg_q[2] = ( avg_q[2]*counts + q.z() ) * inv;
        avg_q[3] = ( avg_q[3]*counts + q.w() ) * inv;

        avg_t[0] = ( avg_t[0]*counts + t.x() ) * inv;
        avg_t[1] = ( avg_t[1]*counts + t.y() ) * inv;
        avg_t[2] = ( avg_t[2]*counts + t.z() ) * inv;
        counts++;
    }

    PoseStatisticalGrouping::PoseStatisticalGrouping(const float th_Q_xyz, const float th_Q_w, const float th_t_xy, const float th_t_z) :
            th_rotation_xyz(th_Q_xyz), th_rotation_w(th_Q_w), th_translation_xy(th_t_xy), th_translation_z(th_t_z){
        max_index = 0;  max_counts = 0;
        second_max_counts = 0; second_max_index = 0;
    }

    void PoseStatisticalGrouping::dealWith(int index, const PoseManager &pm) {

        auto cur_q = pm.quat;
        auto cur_t = pm.t;
        for(int i=0; i<vGroupPoses.size(); ++i){
            auto temp_q = vGroupPoses[i].avg_q;
            auto temp_t = vGroupPoses[i].avg_t;
            if( (fabs(cur_q.x() - temp_q[0]) > th_rotation_xyz ) || (fabs(cur_q.y() - temp_q[1]) > th_rotation_xyz)
                || (fabs(cur_q.z() - temp_q[2]) > th_rotation_xyz ) || (fabs(cur_q.w() - temp_q[3]) > th_rotation_w) ){
                continue;
            }

            // TODO: 是否需要检查平移部分的差距
            if( (fabs(cur_t.x() - temp_t[0]) > th_translation_xy ) || (fabs(cur_t.y() - temp_t[1]) > th_translation_xy)
                || (fabs(cur_t.z() - temp_t[2]) > th_translation_z ) ){
                continue;
            }

            vGroupPoses[i].push_back(cur_q, cur_t);
            vGroupIndices[i].push_back(index);
            if(vGroupIndices[i].size() > max_counts){
                max_counts = vGroupIndices[i].size();
                max_index = i;
            }else if(vGroupIndices[i].size() > second_max_counts){
                second_max_counts = vGroupIndices[i].size();
                second_max_index = i;
            }
            return;
        }

        if(!vGroupPoses.empty()) {
            cout << "New Group:  " << cur_q.x() << "\t" << cur_q.y() << "\t" << cur_q.z() << "\t" << cur_q.w() << "\n";
            cout << "\t" << cur_t.x() << "\t" << cur_t.y() << "\t" << cur_t.z() << endl;
            cout << "Old Max: " << vGroupPoses[max_index].avg_q[0]<<"\t"<< vGroupPoses[max_index].avg_q[1]<<"\t"<< vGroupPoses[max_index].avg_q[2]<<"\t"<< vGroupPoses[max_index].avg_q[3]<<"\n";
            cout << "\t" <<vGroupPoses[max_index].avg_t[0]<< "\t" <<vGroupPoses[max_index].avg_t[1]<< "\t" <<vGroupPoses[max_index].avg_t[2]<<"\n\n";
        }

        //旧分组不合适， 创建新的分组
        vector<int> tmp_indices;
        tmp_indices.emplace_back(index);
        vGroupIndices.emplace_back(tmp_indices);
        PoseStats tmp_stat;
        tmp_stat.push_back(cur_q, cur_t);
        vGroupPoses.emplace_back(tmp_stat);

    }

    VisualOdomTFManager::VisualOdomTFManager(ros::NodeHandle &nh, const string config_path, const string tag_topic, int buff_size,
                                             const string cam_f, const string odom_f, const string map_f, const string base_f, const string base_footprint_f)
        :map_frame(map_f),odom_frame(odom_f),base_footprint_frame(base_footprint_f),
         base_link_frame(base_f),camera_frame(cam_f), stuck_count(0){

//        pTF_pub = new TFPublisher(map_frame,odom_frame);
        pTF_pub = new tf::TransformBroadcaster();

        cv::FileStorage fSettings(config_path,CV_STORAGE_READ);
        fSettings["TransTimeTolerance"] >> trans_time_tolerance;

        pTagPoseSubscriber = make_shared<TagItems::OdomSubscriber>(nh, tag_topic, buff_size, "", 64);

        initialized = false;
        {
            std::unique_lock<std::mutex> lock(mutexPoseMap);
            mT_map_cam0 = cv::Mat::eye(4,4,CV_32F);
            trans_map_cam0.setIdentity();
        }

    }

    /**
     * 处理 变换关系， 并发出map到odom的TF广播
     * @param T_cam0_camk
     * @param timestamp
     */
    void VisualOdomTFManager::MainProcess(const cv::Mat T_cam0_camk, const double timestamp, ros::Time tstamp){

        cur_time = timestamp;
        if(CheckDataQueue()){
            if(!initialized){
                Initialize();
            }
            else{
                CorrectPose();
            }
        }

        //从tf树上监听 从odom到camera_link之间的变换
        tf::StampedTransform trans_cam_odom;
        try {
            listener.waitForTransform(camera_frame, odom_frame, ros::Time(0), ros::Duration(2.0));
            listener.lookupTransform(camera_frame, odom_frame, ros::Time(0), trans_cam_odom);
//            listener.waitForTransform(camera_frame, tstamp, camera_frame, ros::Time(0), odom_frame, ros::Duration(2.0));
//            listener.lookupTransform(camera_frame, tstamp, camera_frame, ros::Time(0), odom_frame, trans_cam_odom);
//            listener.waitForTransform(camera_frame, ros::Time(0), odom_frame, ros::Time(0), camera_frame, ros::Duration(2.0));
//            listener.lookupTransform(camera_frame, ros::Time(0), odom_frame, ros::Time(0), camera_frame, trans_cam_odom);
        }
        catch (tf::TransformException &tf_exception){
            ROS_ERROR("%s",tf_exception.what());
            ros::Duration(1.0).sleep();
            return;
        }

        tf::Transform trans_cam0_camk;
        Eigen::Quaterniond Q(Converter::toMatrix3d(T_cam0_camk.rowRange(0,3).colRange(0,3)));
        cv::Mat t = T_cam0_camk.rowRange(0,3).col(3);
        trans_cam0_camk.setRotation(tf::Quaternion(Q.x(),Q.y(),Q.z(),Q.w()));
        trans_cam0_camk.setOrigin(tf::Vector3(t.at<float>(0), t.at<float>(1), t.at<float>(2)));

        tf::Transform trans_map_odom;
        {
            std::unique_lock<std::mutex> lock(mutexPoseMap);
            trans_map_odom = trans_map_cam0 * trans_cam0_camk * trans_cam_odom;
        }
//        ros::Time transform_expiration = ros::Time().fromSec(timestamp + trans_time_tolerance);
        ros::Time transform_expiration = ros::Time().fromSec(ros::Time::now().toSec() + trans_time_tolerance);
        tf::StampedTransform tf_stamped(trans_map_odom, transform_expiration, map_frame, odom_frame);
        pTF_pub->sendTransform(tf_stamped);

    }

    void VisualOdomTFManager::Initialize() {

        // 初步筛选： 统计每个数据(T_map_cam0)和其他数据的差异情况， 和大多数数据差距明显的来自tag_odom跳变
        PoseStatisticalGrouping poseGrouping(0.1, 0.1, 0.6, 0.6);
        for(int i=0; i<mvNeedProcessData_.size(); ++i){
            poseGrouping.dealWith(i, mvNeedProcessData_[i]);
        }

        // 去除明显差的数据， 并统计初步筛选后数据的平均值
        if(poseGrouping.max_counts < poseGrouping.second_max_counts + mvNeedProcessData_.size()*0.1){
            cout<<"The best pose is ambigous, total poses: "<<mvNeedProcessData_.size()<< endl <<
            "The biggest group: "<< poseGrouping.max_counts<<" poses.\nAverage:  "<<
            poseGrouping.vGroupPoses[poseGrouping.max_index].avg_q[0]<<"\t"<<poseGrouping.vGroupPoses[poseGrouping.max_index].avg_q[1]<<"\t"<<poseGrouping.vGroupPoses[poseGrouping.max_index].avg_q[2]<<"\t"<<poseGrouping.vGroupPoses[poseGrouping.max_index].avg_q[3]<<"\n"<<
            poseGrouping.vGroupPoses[poseGrouping.max_index].avg_t[0]<<"\t"<<poseGrouping.vGroupPoses[poseGrouping.max_index].avg_t[1]<<"\t"<<poseGrouping.vGroupPoses[poseGrouping.max_index].avg_t[2]<<"\n"<<
            "The second big group: "<< poseGrouping.second_max_counts<<" poses.\nAverage:  "<<
            poseGrouping.vGroupPoses[poseGrouping.second_max_index].avg_q[0]<<"\t"<<poseGrouping.vGroupPoses[poseGrouping.second_max_index].avg_q[1]<<"\t"<<poseGrouping.vGroupPoses[poseGrouping.second_max_index].avg_q[2]<<"\t"<<poseGrouping.vGroupPoses[poseGrouping.second_max_index].avg_q[3]<<"\n"<<
            poseGrouping.vGroupPoses[poseGrouping.second_max_index].avg_t[0]<<"\t"<<poseGrouping.vGroupPoses[poseGrouping.second_max_index].avg_t[1]<<"\t"<<poseGrouping.vGroupPoses[poseGrouping.second_max_index].avg_t[2]<<"\n\n";
            mvNeedProcessData_.clear();
            return;
        }

        if(poseGrouping.max_counts < mvNeedProcessData_.size() * 0.2){
            cout<<"There are only "<<poseGrouping.max_counts <<" / "<<mvNeedProcessData_.size()<<" good data , Initialization Failed.\n\n";
            mvNeedProcessData_.clear();
            return;
        }

        vector<int> good_indices = poseGrouping.vGroupIndices[poseGrouping.max_index];
        for(int i=0; i<good_indices.size(); ++i){
            mvNeedProcessData_[good_indices[i]].is_good_ = true;
        }
        int deserted_count = mvNeedProcessData_.size() - good_indices.size();
        cout<< deserted_count << " extremely bad tag-pose have been deserted. \n";

        // 进一步筛选， 去除离统计的平均值差距较大的数据
        PoseStatisticalGrouping refineGrouping(0.01, 0.008, 0.2, 0.2);
        for(int i=0; i<good_indices.size(); ++i){
            refineGrouping.dealWith(good_indices[i], mvNeedProcessData_[good_indices[i]]);
        }

        //如果最后符合的数据不够，保存这些接近的数据，继续等待
        if(refineGrouping.max_counts < mvNeedProcessData_.size() * 0.1){
            cout<<" The Initializtion result is not reliable enough, \n"
                  "Good/All: "<<refineGrouping.max_counts<<" / "<<mvNeedProcessData_.size() <<"waiting for more data.\n";
            if(deserted_count < mvNeedProcessData_.size()*0.1)
                stuck_count++;

            if(stuck_count < 4){
                vector<PoseManager> tmp_mem = mvNeedProcessData_;
                mvNeedProcessData_.clear();
                for(int i = 0; i<tmp_mem.size(); ++i){
                    if(!tmp_mem[i].is_good_)
                        continue;
                    mvNeedProcessData_.push_back(tmp_mem[i]);
                }
            }
            else{
                mvNeedProcessData_.clear();
                stuck_count = 0;
            }

            return;
        }

        PoseStats best_poses = refineGrouping.vGroupPoses[refineGrouping.max_index];

        Eigen::Quaterniond eigQ(best_poses.avg_q[3], best_poses.avg_q[0], best_poses.avg_q[1], best_poses.avg_q[2]);
        eigQ.normalize();
        cv::Mat cvR = Converter::toCvMat(eigQ.toRotationMatrix());
        cv::Mat cvt = (cv::Mat_<float>(3,1) << best_poses.avg_t[0], best_poses.avg_t[1], best_poses.avg_t[2]);

        {
            std::unique_lock<std::mutex> lock(mutexPoseMap);
            cvR.copyTo(mT_map_cam0.colRange(0, 3).rowRange(0, 3));
            cvt.copyTo(mT_map_cam0.col(3).rowRange(0, 3));
            trans_map_cam0.setRotation(tf::Quaternion(eigQ.x(), eigQ.y(), eigQ.z(), eigQ.w()));
            trans_map_cam0.setOrigin(tf::Vector3(best_poses.avg_t[0], best_poses.avg_t[1], best_poses.avg_t[2]));
        }

        initialized = true;
        last_corrected_time = mvNeedProcessData_[mvNeedProcessData_.size()-1].timestamp;
        mvNeedProcessData_.clear();
        stuck_count = 0;
        cout<<"\nTRANSFORM between map and cam-world frames has been INITIALIZED !\n"<<endl;
    }

    void VisualOdomTFManager::CorrectPose() {

        cout<<"Trying to correct pose.\n";
        // 初步筛选： 统计每个数据(T_map_cam0)和其他数据的差异情况， 和大多数数据差距明显的来自tag_odom跳变
        PoseStatisticalGrouping poseGrouping(0.1, 0.1, 0.3, 0.5);
        for(int i=0; i<mvNeedProcessData_.size(); ++i){
            poseGrouping.dealWith(i, mvNeedProcessData_[i]);
        }

        // 去除明显差的数据， 并统计初步筛选后数据的平均值
        if(poseGrouping.max_counts < poseGrouping.second_max_counts + mvNeedProcessData_.size()*0.1){
            cout<<"The best pose is ambigous, total poses: "<<mvNeedProcessData_.size()<< endl <<
                "The biggest group: "<< poseGrouping.max_counts<<" poses.\n"<<
                "The second big group: "<< poseGrouping.second_max_counts<<" poses.\n\n";
            mvNeedProcessData_.clear();
            return;
        }
        if(poseGrouping.max_counts < mvNeedProcessData_.size() * 0.2){
            cout<<"There are only "<<poseGrouping.max_counts <<" / "<<mvNeedProcessData_.size()<<" good data , Correcting delayed.\n\n";
            mvNeedProcessData_.clear();
            return;
        }

        vector<int> good_indices = poseGrouping.vGroupIndices[poseGrouping.max_index];
        for(int i=0; i<good_indices.size(); ++i){
            mvNeedProcessData_[good_indices[i]].is_good_ = true;
        }
        int deserted_count = mvNeedProcessData_.size() - good_indices.size();
        cout<< deserted_count << " extremely bad tag-pose have been deserted. \n";

        // 进一步筛选， 去除离统计的平均值差距较大的数据
        PoseStatisticalGrouping refineGrouping(0.015, 0.008, 0.2, 0.2);
        for(int i=0; i<good_indices.size(); ++i){
            refineGrouping.dealWith(good_indices[i], mvNeedProcessData_[good_indices[i]]);
        }

        //如果最后符合的数据不够，保存这些接近的数据，继续等待
        if(refineGrouping.max_counts < mvNeedProcessData_.size() * 0.1){
            cout<<" The Corrected Pose-Result is not reliable enough, waiting for more data.\n";
            if(deserted_count < mvNeedProcessData_.size()*0.1)
                stuck_count++;

            if(stuck_count < 4){
                vector<PoseManager> tmp_mem = mvNeedProcessData_;
                mvNeedProcessData_.clear();
                for(int i = 0; i<tmp_mem.size(); ++i){
                    if(!tmp_mem[i].is_good_)
                        continue;
                    mvNeedProcessData_.push_back(tmp_mem[i]);
                }
            }
            else{
                mvNeedProcessData_.clear();
                stuck_count = 0;
            }

            return;
        }

        PoseStats best_poses = refineGrouping.vGroupPoses[refineGrouping.max_index];

        Eigen::Quaterniond eigQ(best_poses.avg_q[3], best_poses.avg_q[0], best_poses.avg_q[1], best_poses.avg_q[2]);
        eigQ.normalize();
        cv::Mat cvR = Converter::toCvMat(eigQ.toRotationMatrix());
        cv::Mat cvt = (cv::Mat_<float>(3,1) << best_poses.avg_t[0], best_poses.avg_t[1], best_poses.avg_t[2]);

        {
            std::unique_lock<std::mutex> lock(mutexPoseMap);
            cvR.copyTo(mT_map_cam0.colRange(0, 3).rowRange(0, 3));
            cvt.copyTo(mT_map_cam0.col(3).rowRange(0, 3));
            trans_map_cam0.setRotation(tf::Quaternion(eigQ.x(), eigQ.y(), eigQ.z(), eigQ.w()));
            trans_map_cam0.setOrigin(tf::Vector3(best_poses.avg_t[0], best_poses.avg_t[1], best_poses.avg_t[2]));
        }

        last_corrected_time = mvNeedProcessData_[mvNeedProcessData_.size()-1].timestamp;
        mvNeedProcessData_.clear();

        cout<<"\nTRANSFORM between map and cam-world frames CORRECTED !\n"<<endl;

    }

    bool VisualOdomTFManager::CheckDataQueue() {

        pTagPoseSubscriber->ParseTagData(tagOdom_dataBuf_);

        //距离上一次更新的时间还不够久， 则不更新
        if(cur_time - last_corrected_time < 5.0){
            tagOdom_dataBuf_.clear();
            mvNeedProcessData_.clear();
            return false;
        }

        int visual_data_size, tag_data_size;
        if(initialized){
            visual_data_size = 80;
            tag_data_size = 40;
        }
        //若未初始化， 则需要更多数据
        else{
            visual_data_size = 120;
            tag_data_size = 60;
        }


        if( !mvNeedProcessData_.empty()){

            //去除过老的数据
            if( (mvNeedProcessData_[mvNeedProcessData_.size()-1].timestamp - mvNeedProcessData_[0].timestamp) > 7.0 ){
                mvNeedProcessData_.clear();
                cout<<" Clearing those really old data...\n";
                return false;
            }
        }

        // Tag相关数据不足，则继续等待
        int num_got = mvNeedProcessData_.size();
        if(tagOdom_dataBuf_.size() < (tag_data_size - num_got)){
            while(visualPose_dataBuf_.size() > visual_data_size){
                visualPose_dataBuf_.pop_front();
            }

            return false;
        }

        // XU TODO: 是否应该设置一个更新的冷却时间（用以防止位姿频繁跳动？）


        //数据足够，开始按时间戳配对位姿
        auto iter_tag = tagOdom_dataBuf_.begin();
        auto iter_Vpose = visualPose_dataBuf_.begin();
        while(iter_tag != tagOdom_dataBuf_.end() && iter_Vpose != visualPose_dataBuf_.end()){

//            cout<<setprecision(20)<<iter_Vpose->timestamp_<<"\t"<<iter_tag->timestamp_<<endl;
            if(iter_tag->timestamp_ > iter_Vpose->timestamp_ ){
                iter_Vpose++;
                visualPose_dataBuf_.pop_front();
            }
            else if(iter_Vpose ->timestamp_ > iter_tag->timestamp_){
                iter_tag++;
                tagOdom_dataBuf_.pop_front();
            }
            else if(iter_tag->timestamp_ == iter_Vpose->timestamp_){
                PoseManager pm(iter_tag->Tcam2map, iter_Vpose->T_cam0_camk_, iter_tag->timestamp_);
                mvNeedProcessData_.push_back(pm);
                iter_tag++;
                iter_Vpose++;
                tagOdom_dataBuf_.pop_front();
                visualPose_dataBuf_.pop_front();
            }
        }

//        cout<<"after Pair: "<<mvNeedProcessData_.size()<<endl;

        if(mvNeedProcessData_.size() < tag_data_size)
            return false;

        return true;
    }


    void VisualOdomTFManager::OrbOdomPoseCallback(const geometry_msgs::PoseStamped::ConstPtr &msg) {

        Eigen::Quaterniond Q_cam0_camk(msg->pose.orientation.w, msg->pose.orientation.x, msg->pose.orientation.y, msg->pose.orientation.z);
        Eigen::Vector3d t_cam0_camk(msg->pose.position.x, msg->pose.position.y, msg->pose.position.z);
        Eigen::Matrix3d R_cam0_camk = Q_cam0_camk.toRotationMatrix();

        cv::Mat T_cam0_camk = Converter::toCvSE3(R_cam0_camk, t_cam0_camk);
        PoseWithTime pwt(T_cam0_camk, msg->header.stamp.toSec());
        visualPose_dataBuf_.push_back(pwt);

        {
            std::unique_lock<std::mutex> lock(mutexPoseCam);
            curT_cam0_camk = T_cam0_camk;
        }

        MainProcess(T_cam0_camk, msg->header.stamp.toSec(), msg->header.stamp);

    }

    void VisualOdomTFManager::VinsOdomPoseCallback(const nav_msgs::Odometry::ConstPtr &msg){

        Eigen::Quaterniond Q_cam0_camk(msg->pose.pose.orientation.x, msg->pose.pose.orientation.x, msg->pose.pose.orientation.y, msg->pose.pose.orientation.z);
        Eigen::Vector3d t_cam0_camk(msg->pose.pose.position.x, msg->pose.pose.position.y, msg->pose.pose.position.z);
        Eigen::Matrix3d R_cam0_camk = Q_cam0_camk.toRotationMatrix();

        cv::Mat T_cam0_camk = Converter::toCvSE3(R_cam0_camk, t_cam0_camk);
        PoseWithTime pwt(T_cam0_camk, msg->header.stamp.toSec());
        visualPose_dataBuf_.push_back(pwt);

        {
            std::unique_lock<std::mutex> lock(mutexPoseCam);
            curT_cam0_camk = T_cam0_camk;
        }

        MainProcess(T_cam0_camk, msg->header.stamp.toSec(), msg->header.stamp);

    }

    void VisualOdomTFManager::InitialPoseCallback(const geometry_msgs::PoseWithCovarianceStamped::ConstPtr &msg) {

        cout<<"Initial pose from rviz received.\n";
        Eigen::Quaterniond Q_map_base(msg->pose.pose.orientation.w, msg->pose.pose.orientation.x, msg->pose.pose.orientation.y, msg->pose.pose.orientation.z);
        Eigen::Vector3d t_map_base(msg->pose.pose.position.x, msg->pose.pose.position.y, msg->pose.pose.position.z);
        Eigen::Matrix3d R_map_base = Q_map_base.toRotationMatrix();

        cv::Mat T_map_base = Converter::toCvSE3(R_map_base, t_map_base);

        //从tf树上监听 从camera_link到base_link之间的变换
        tf::StampedTransform tf_base_cam;
        try {
            listener.waitForTransform(base_link_frame, ros::Time(0), camera_frame, ros::Time(0), odom_frame, ros::Duration(2.0));
            listener.lookupTransform(base_link_frame, ros::Time(0), camera_frame, ros::Time(0), odom_frame, tf_base_cam);
        }
        catch (tf::TransformException &tf_exception){
            ROS_ERROR("%s",tf_exception.what());
            ros::Duration(1.0).sleep();
            return;
        }

        Eigen::Quaterniond Q_base_cam(tf_base_cam.getRotation().w(), tf_base_cam.getRotation().x(),
                                      tf_base_cam.getRotation().y(),tf_base_cam.getRotation().z());
        Eigen::Vector3d t_base_cam(tf_base_cam.getOrigin().x(), tf_base_cam.getOrigin().y(), tf_base_cam.getOrigin().z());
        Eigen::Matrix3d R_base_cam = Q_base_cam.toRotationMatrix();
        cv::Mat T_base_cam = Converter::toCvSE3(R_base_cam,t_base_cam);

        cv::Mat T_map_camk = T_map_base*T_base_cam;
        {
            std::unique_lock<std::mutex> lock1(mutexPoseCam);
            cv::Mat R_k0 = curT_cam0_camk.rowRange(0,3).colRange(0,3);
            R_k0 = R_k0.t();
            cv::Mat t_k0 = curT_cam0_camk.rowRange(0,3).col(3);
            t_k0 = -R_k0*t_k0;

            cv::Mat T_camk_cam0 = (cv::Mat_<float>(4,4) << R_k0.at<float>(0,0) , R_k0.at<float>(0,1) , R_k0.at<float>(0,2) , t_k0.at<float>(0),
                    R_k0.at<float>(1,0) , R_k0.at<float>(1,1) , R_k0.at<float>(1,2) , t_k0.at<float>(1),
                    R_k0.at<float>(2,0) , R_k0.at<float>(2,1) , R_k0.at<float>(2,2) , t_k0.at<float>(2),
                    0.0,   0.0,    0.0,    1.0);

            std::unique_lock<std::mutex> lock2(mutexPoseMap);
            mT_map_cam0 = T_map_camk*T_camk_cam0;
            Eigen::Quaterniond Q(Converter::toMatrix3d(mT_map_cam0.rowRange(0,3).colRange(0,3)));
            cv::Mat t = mT_map_cam0.rowRange(0,3).col(3);
            trans_map_cam0.setRotation(tf::Quaternion(Q.x(),Q.y(),Q.z(),Q.w()));
            trans_map_cam0.setOrigin(tf::Vector3(t.at<float>(0), t.at<float>(1), t.at<float>(2)));
        }

    }



}//namespace
