#include <glog/logging.h>
#include "pangolin_viewer/viewer.h"
#include "read_kaist_data.h"

#include "imu_project.h"

#include "sensor/sensor_sync.h"

#include "feature/extractor.h"

using namespace feature;
using namespace sensor;
using namespace mapping;
using namespace pangolin_viewer;

static bool use_pangolin = true;

void drawFeaturePoints(cv::Mat& frame, const std::vector<OriginFeaturePoint>& features, const std::vector<FeaturePoint>& feature_data = std::vector<FeaturePoint>()) {
    if(!feature_data.empty()) {
        for (const auto& pt : feature_data) {
            cv::circle(frame, cv::Point(pt.image_point[0], pt.image_point[1]), 3, cv::Scalar(0, 0, 255), -1);
        }

        // 在图像左上角显示特征点数量（白色文字）
        std::string count_text = "Features: " + std::to_string(feature_data.size());
        cv::putText(frame, count_text, cv::Point(20, 30), 
                    cv::FONT_HERSHEY_SIMPLEX, 0.7, cv::Scalar(0, 0, 0), 2);
        return;
    }

    // 绘制所有特征点（红色圆点）
    for (const auto& pt : features) {
        cv::circle(frame, cv::Point(pt.x, pt.y), 3, cv::Scalar(255, 255, 255), -1);
    }

    // 在图像左上角显示特征点数量（白色文字）
    std::string count_text = "Features: " + std::to_string(features.size());
    cv::putText(frame, count_text, cv::Point(20, 30), 
                cv::FONT_HERSHEY_SIMPLEX, 0.7, cv::Scalar(0, 0, 0), 2);
}

int main(int argc, char** argv) {
    std::cout << std::fixed;         // 固定小数格式（避免科学计数法）
    std::cout << std::setprecision(10); // 保留6位小数（根据需求调整，如10位）
    std::cout << std::setw(10);      // 每个数值占15个字符宽度（避免挤压）
    std::cout << std::right;         // 右对齐（数值靠右，左侧补空格，便于列对齐）

    google::InitGoogleLogging(argv[0]);  // 初始化 glog
    FLAGS_logtostderr = true;           // 输出到 stderr（控制台）
    FLAGS_colorlogtostderr = true;  // 启用彩色输出

    std::shared_ptr<Publisher> publisher_ptr = std::make_shared<Publisher>();
    if(use_pangolin) {
        pangolin_viewer::viewer* viewer_ptr = new pangolin_viewer::viewer(publisher_ptr);
        std::thread* viewer_thread_ptr = new thread(&viewer::run, viewer_ptr);      
    }

    ImuProject imu_project;
    
    Extractor extractor;

    // std::string data_path = "/home/zl/Public/kaist/";
    std::string data_path = "/media/zl/新加卷/kaist_dataset/urban38-pankyo/";
    
    double last_left = 0;
    double last_right = 0;
    double last_wheel_time = 0;
    bool used_simple_point = true;

    Options options_;
    Eigen::Matrix3d Rwo = Eigen::Matrix3d::Identity();
    Eigen::Vector3d pwo = Eigen::Vector3d::Zero();

    std::string data_stamp_path = data_path + "sensor_data/data_stamp.csv";
    std::ifstream fin(data_stamp_path);
    if (!fin) {
        return -1;
    }
    
    std::vector<std::string> line_data_vec;
    line_data_vec.reserve(3);
    std::string line_str, value_str;

    std::unordered_map<std::string, std::string> time_encoder_map;
    std::unordered_map<std::string, std::string> time_imu_map;
    std::unordered_map<std::string, std::string> time_gps_map; 
    std::unordered_map<std::string, std::string> time_vrs_gps_map;
    std::unordered_map<std::string, std::string> time_sick_middle_map;
    std::unordered_map<std::string, std::string> time_velodyne_left_map;

    read_data* read_input_data = new read_data(data_path);
    if(!read_input_data->read_only_kaist_data(time_encoder_map, time_imu_map, time_gps_map, time_vrs_gps_map, time_sick_middle_map, time_velodyne_left_map)) {
        std::cout << "[main]: Failed to find data " << std::endl;
        return -1;
    }

    Undistortion undistortion(data_path);
    Eigen::Matrix4d extrinsic_matrix = read_input_data->read_extrinsic();
    Eigen::Matrix3d intrinsic_matrix = undistortion.read_intrinsic();

    static int first_frame = 0;
    double first_timestamp = read_input_data->get_origin_timestamp();

    SlidingWindow sliding_window(intrinsic_matrix, extrinsic_matrix, publisher_ptr);
    SensorSync sensor_sync;

    // start add data
    while (std::getline(fin, line_str)) {
        line_data_vec.clear();
        std::stringstream ss(line_str);
        while (std::getline(ss, value_str, ',')) { line_data_vec.push_back(value_str); }
        const double kToSecond = 1e-9;
        if(line_data_vec.empty()) {
            continue;
        }
        const std::string& time_str = line_data_vec[0];

        const double timestamp = std::stod(time_str) * kToSecond - first_timestamp;

        const std::string& sensor_type = line_data_vec[1];
        if (sensor_type == "imu") {
         if (time_imu_map.find(time_str) == time_imu_map.end()) {
                std::cout << "[main]: Failed to find imu data at time: " << time_str << std::endl;
                continue;
            }
            const std::string& imu_str = time_imu_map.at(time_str);
            // std::cout << "imu_str : " << imu_str << std::endl;

            if(imu_str.size() < 17) {
                continue;
            }   
            std::stringstream imu_ss(imu_str);
            line_data_vec.clear();
            while (std::getline(imu_ss, value_str, ',')) { line_data_vec.push_back(value_str); }
            if(line_data_vec.empty()) {
                continue;
            }

            double time = timestamp;

            double qx = std::stod(line_data_vec[1]);
            double qy = std::stod(line_data_vec[2]);
            double qz = std::stod(line_data_vec[3]);
            double qw = std::stod(line_data_vec[4]);

            // std::cout << "line_data_vec : " << line_data_vec.size();
            double gx = std::stod(line_data_vec[8]);
            double gy = std::stod(line_data_vec[9]);
            double gz = std::stod(line_data_vec[10]);
            double ax = std::stod(line_data_vec[11]);
            double ay = std::stod(line_data_vec[12]);
            double az = std::stod(line_data_vec[13]);  

            Eigen::Vector3d acc(ax, ay, az);
            Eigen::Vector3d gyro(gx, gy, gz);
            Eigen::Quaterniond quaternion(Eigen::Vector4d(qx, qy, qz, qw));  // 向量顺序为x, y, z, w（虚部在前）:ml-citation{ref="1" data="citationList"}  

            Eigen::Matrix3d R_WB = quaternion.toRotationMatrix();
            Eigen::Matrix3d R_BW = R_WB.transpose();

            // std::cout << "R_WB : " << R_WB << std::endl;

            Eigen::Vector2d acc_2d;
            double omega_yaw;
        
            imu_project.processIMUData(acc, gyro, R_BW, 
                                acc_2d, omega_yaw);

            gx = 0;
            gy = 0;
            gz = omega_yaw;
            ax = acc_2d[0];
            ay = acc_2d[1];
            az = 0;

            // std::cout << "Projected imu : " << acc_2d.transpose() << ", " << omega_yaw << std::endl;

            ImuData imu_data;
            imu_data.timestamp = time;
            imu_data.gx = gx;
            imu_data.gy = gy;
            imu_data.gz = gz;
            imu_data.ax = ax;
            imu_data.ay = ay;
            imu_data.az = az;

            // LOG(INFO) << " Imu data: " << ax << ", " << ay << ", " << az << ", " << gx << ", " << gy << ", " << gz << std::endl;
            
            sensor_sync.AddImuData(imu_data);

        } else if (sensor_type == "encoder") {
             if (time_encoder_map.find(time_str) == time_encoder_map.end()) {
                std::cout << "[main]: Failed to find encoder data at time: " << time_str << std::endl;
                continue;
            }

            const std::string& encoder_str = time_encoder_map.at(time_str);
            std::stringstream enc_ss(encoder_str);
            line_data_vec.clear();
            while (std::getline(enc_ss, value_str, ',')) { line_data_vec.push_back(value_str); }
            if(line_data_vec.empty()) {
                continue;
            }
            const double left_enc_cnt = std::stod(line_data_vec[1]);
            const double right_enc_cnt = std::stod(line_data_vec[2]);
            
            static bool is_init = false;
            static double init_left = 0;
            static double init_right = 0;
            if(!is_init) {
                init_left = left_enc_cnt;
                init_right = right_enc_cnt;
                last_left = left_enc_cnt;
                last_right = right_enc_cnt;
                last_wheel_time = timestamp;
                is_init = true;
                continue;
            }
            // std::cout << "init_left : " << init_left;
            // std::cout << "init_right : " << init_right;
            // std::cout << "left_enc_cnt : " << left_enc_cnt;
            // std::cout << "right_enc_cnt : " << right_enc_cnt;

            double time = timestamp;
            double delta_left_pulse = left_enc_cnt - last_left;
            double delta_right_pulse = right_enc_cnt - last_right;

            // double wl = left_enc_cnt - init_left - last_left;
            // double wr = right_enc_cnt - init_right - last_right;

            options_.odom_span_ = time - last_wheel_time;
            if(options_.odom_span_ > 1.0 || options_.odom_span_ < 1.0e-5) {
                options_.odom_span_ = 0.01;
            }  

            double velo_l = options_.left_wheel_radius_ * delta_left_pulse / options_.circle_pulse_ * 2 * M_PI / options_.odom_span_;
            double velo_r = options_.right_wheel_radius_ * (delta_right_pulse) / options_.circle_pulse_ * 2 * M_PI / options_.odom_span_;
            // velocity
            double average_vel = 0.5 * (velo_l + velo_r);
            // double average_omega = (velo_r - velo_l) / options_.wheel_base_; 

            const double left_dist = delta_left_pulse * options_.kl_;
            const double right_dist = delta_right_pulse * options_.kr_;

            const double delta_yaw = (right_dist - left_dist) / options_.wheel_base_;
            const double delta_dist = (right_dist + left_dist) * 0.5;
            // Mean.
            const Eigen::Matrix3d delta_R = Eigen::AngleAxisd(delta_yaw, Eigen::Vector3d::UnitZ()).toRotationMatrix();
            const Eigen::Vector3d delta_p = Eigen::Vector3d(delta_dist, 0., 0.);    
            
            // todo : add wheel data 
            pwo = pwo + Rwo * delta_p;
            Rwo = Rwo * delta_R;

            WheelData wheel_data;
            wheel_data.timestamp = time;
            wheel_data.x = pwo(0); 
            wheel_data.y = pwo(1); 
            wheel_data.t = atan2(Rwo(1, 0), Rwo(0, 0));

            wheel_data.vehicle_speed = average_vel;
      
            sensor_sync.AddWheelOdometryData(wheel_data);
            
            publisher_ptr->SetOdometryPose(Eigen::Vector3d(wheel_data.x, wheel_data.y, 0));

            // // std::cout << "average_vel ========= : " << average_vel << std::endl;
            // // std::cout << "theta ========= : " << theta << std::endl;
            // LOG(INFO) << "p ========= : " << p.transpose() << std::endl;

            last_left = left_enc_cnt;
            last_right = right_enc_cnt;
            last_wheel_time = time;
        }else if (sensor_type == "gps") {
            if (time_gps_map.find(time_str) == time_gps_map.end()) {
                std::cout << "[main]: Failed to find gps data at time: " << time_str << std::endl;
                continue;
            }

            const std::string& gps_str = time_gps_map.at(time_str);

            std::stringstream gps_ss(gps_str);
            line_data_vec.clear();
            while (std::getline(gps_ss, value_str, ',')) { line_data_vec.push_back(value_str); }
            if(line_data_vec.empty()) {
                continue;
            }
            double time = timestamp;
            const double lat = std::stod(line_data_vec[1]);
            const double lon = std::stod(line_data_vec[2]);
            const double alt = std::stod(line_data_vec[3]);

            //Eigen::Matrix3d cov;
            //for (size_t i = 0; i < 9; ++i) {
            //    cov.data()[i] = std::stod(line_data_vec[4+i]);
            //}

            double latitude_std = std::stod(line_data_vec[4]);
            double longitude_std = std::stod(line_data_vec[5]);
            double altitude_std = std::stod(line_data_vec[6]);

            double heading = 0.0;
            bool heading_valid = true;

        } else if (sensor_type == "vrs") {
            continue;
        } else if (sensor_type == "sick_middle") {
            continue;
        } else if(sensor_type == "velodyne_left") {
            continue;
        }  else if (sensor_type == "stereo") {
            double time = timestamp;

            const std::string img0_file = data_path + "image/stereo_left/" + time_str + ".png"; 
            const std::string img1_file = data_path + "image/stereo_right/" + time_str + ".png"; 
            cv::Mat im_left = cv::imread(img0_file);
            cv::Mat im_right = cv::imread(img1_file);

            // std::cout << "fd_interence_result ============ : " << fd_interence_result.size() << std::endl;

#if 0
            std::vector<FeatureOutput> fd_interence_result;
            std::vector<cv::KeyPoint> undistorted_kps; // 存储当前帧所有un_kp
            read_input_data->LoadFrameData(fd_interence_result, undistorted_kps, 
                    data_path + "feature_bin/" + time_str + ".bin");

            if(fd_interence_result.empty()) {
                continue;
            }

            std::vector<OriginFeaturePoint> filter_features;
  
            cv::KeyPoint un_kp, kp;
            
            for(const auto& undist_kp : undistorted_kps) {
                un_kp = undist_kp;
                undistortion.DistortionKeyPoint(kp, un_kp);
                 
                OriginFeaturePoint resize_fp = OriginFeaturePoint(kp.pt.x, kp.pt.y, kp.response);   
       
                filter_features.push_back(resize_fp);

                // std::cout << "kp ============ : " << kp.pt.x << ", " << kp.pt.y << std::endl;
                // std::cout << "un_kp ============ : " << un_kp.pt.x << ", " << un_kp.pt.y << std::endl;
            }

            std::vector<FeaturePoint> feature_data = read_input_data->ConvertFeatureOutputToFeaturePoints(fd_interence_result, filter_features, undistorted_kps, intrinsic_matrix);
            // std::cout << "feature_data ============ : " << feature_data.size() << std::endl;
#else 
            std::vector<FeaturePoint> feature_data;
            std::vector<FeaturePoint> feature_points;

        #if 1    
            std::vector<cv::KeyPoint> undistorted_kps;
            read_input_data->read_feature_txt(feature_data, undistorted_kps, intrinsic_matrix, data_path + "feature/" + time_str + ".txt");
        #else    
            feature_data = extractor.ExtractFeature(im_left);
        #endif
            std::vector<OriginFeaturePoint> filter_features;
            for(const auto &fd : feature_data) {
                cv::KeyPoint undistorted_kp;

                undistortion.UnDistortionKeyPoint(undistorted_kp, fd.keypoint);

                FeaturePoint fp;

                fp = fd;
                fp.undistorted_point << undistorted_kp.pt.x, undistorted_kp.pt.y;
                fp.normalized_point = undistortion.NormalizedPoint(undistorted_kp);

                OriginFeaturePoint resize_fp = OriginFeaturePoint(fd.image_point[0], fd.image_point[1], fd.score);   
                filter_features.push_back(resize_fp);
            }
      
#endif
            drawFeaturePoints(im_left, filter_features, feature_data);
            cv::imshow("image left", im_left);
            cv::waitKey(5);

            sensor_sync.AddFeatureData(timestamp, feature_data);

            sliding_window.StartMapping(sensor_sync.GetImuData(), sensor_sync.GetWheelData(), sensor_sync.GetFeatureData());
        }

        usleep(5000);
    }

    usleep(500000000);
    return 0;
}