#include "vision_service_direct_funcs.h"
#include "vision_service_direct_types.h"

// #include <opencv2/opencv.hpp>

#include <unistd.h>
#include <fstream>
#include <iostream>


#include <ros/ros.h>
#include <sensor_msgs/Image.h>
#include <sensor_msgs/Imu.h>
#include <sensor_msgs/NavSatFix.h>
#include <camera_info_manager/camera_info_manager.h>
// #include <sstream>
// #include <tf/transform_broadcaster.h>
// #include <message_filters/subscriber.h>
#include <sensor_msgs/image_encodings.h>
#include <std_msgs/Int16.h>
#include <std_msgs/Int16.h>
#include <zhz_msgs/FCVisionSwitchFrame.h>
#include <zhz_msgs/NedFrame.h>
#include <zhz_msgs/RCFrame.h>

// Publish ImageFrame data as a ROS Image message
sensor_msgs::Image ImageFrameToROSImg(const ImageFrame& image_frame)
{
    sensor_msgs::Image image_msg;

    image_msg.header.stamp = ros::Time(image_frame.timestamp / 1000000000, image_frame.timestamp % 1000000000);
    image_msg.header.frame_id = "camera_frame";

    image_msg.height = image_frame.rows;
    image_msg.width = image_frame.cols;
    image_msg.step = image_frame.step;

    switch (image_frame.color) {
        case COLOR_GRAY:
            image_msg.encoding = "mono8";
            break;
        case COLOR_YUV:
            image_msg.encoding = "yuv422";
            break;
        case COLOR_BGR:
            image_msg.encoding = "bgr8";
            break;
        default:
            image_msg.encoding = "mono8";  // Default to mono8 if unknown
    }

    // if (image_frame.data != nullptr && image_frame.data[0] != '\0') {
    if (image_frame.data != nullptr) {
      image_msg.data.assign(image_frame.data, image_frame.data + (image_frame.step * image_frame.rows));
    }

    return image_msg;
}

sensor_msgs::Imu AttFrameToImuMsg(const AttFrame& att_frame) {
    sensor_msgs::Imu imu_msg;

    // 填充IMU消息的数据
    imu_msg.header.stamp = ros::Time(att_frame.timestamp / 1000000, (att_frame.timestamp % 1000000) * 1000);
    imu_msg.header.frame_id = "imu_frame";
    

    // 设置角速度数据
    imu_msg.angular_velocity.x = att_frame.imu_gyro[0];
    imu_msg.angular_velocity.y = att_frame.imu_gyro[1];
    imu_msg.angular_velocity.z = att_frame.imu_gyro[2];

    // 设置线性加速度数据
    imu_msg.linear_acceleration.x = att_frame.imu_accel[0];
    imu_msg.linear_acceleration.y = att_frame.imu_accel[1];
    imu_msg.linear_acceleration.z = att_frame.imu_accel[2];

    // 设置四元数数据
    imu_msg.orientation.w = att_frame.uav_quat[0];
    imu_msg.orientation.x = att_frame.uav_quat[1];
    imu_msg.orientation.y = att_frame.uav_quat[2];
    imu_msg.orientation.z = att_frame.uav_quat[3];
    

    return imu_msg;
}

sensor_msgs::NavSatFix GpsFrameToROSGNSS(const GpsFrame& gps_frame)
{
    sensor_msgs::NavSatFix gnss_msg;

    gnss_msg.header.stamp = ros::Time(gps_frame.timestamp / 1000000000, gps_frame.timestamp % 1000000000);
    gnss_msg.header.frame_id = "gps_frame";

    gnss_msg.latitude = gps_frame.gps_lat;
    gnss_msg.longitude = gps_frame.gps_lon;
    gnss_msg.altitude = gps_frame.gps_alt;

    gnss_msg.position_covariance[0] = gps_frame.accuracy_hor * gps_frame.accuracy_hor;
    gnss_msg.position_covariance[4] = gps_frame.accuracy_hor * gps_frame.accuracy_hor;
    gnss_msg.position_covariance[8] = gps_frame.accuracy_ver * gps_frame.accuracy_ver;
    gnss_msg.position_covariance_type = sensor_msgs::NavSatFix::COVARIANCE_TYPE_DIAGONAL_KNOWN;

    switch (gps_frame.GNSS_fix_type) {
        case 0:
            gnss_msg.status.status = sensor_msgs::NavSatStatus::STATUS_NO_FIX;
            break;
        case 1:
        case 2:
            gnss_msg.status.status = sensor_msgs::NavSatStatus::STATUS_FIX;
            break;
        case 3:
        case 4:
            gnss_msg.status.status = sensor_msgs::NavSatStatus::STATUS_SBAS_FIX;
            break;
        case 5:
            gnss_msg.status.status = sensor_msgs::NavSatStatus::STATUS_GBAS_FIX;
            break;
        default:
            gnss_msg.status.status = sensor_msgs::NavSatStatus::STATUS_NO_FIX;
    }

    gnss_msg.status.service = sensor_msgs::NavSatStatus::SERVICE_GPS;

    return gnss_msg;
}


zhz_msgs::FCVisionSwitchFrame FCVisionSwitchFrameToROS(const FCVisionSwitchFrame& fc_frame) {
    zhz_msgs::FCVisionSwitchFrame ros_msg;
    ros_msg.header.stamp = ros::Time::now();
    ros_msg.header.frame_id = "gps_frame";
    // Fill in the ROS message fields from the FCVisionSwitchFrame struct
    ros_msg.enable_vio = fc_frame.enable_vio;
    ros_msg.enable_head_soa = fc_frame.enable_head_soa;
    ros_msg.enable_rear_soa = fc_frame.enable_rear_soa;
    ros_msg.enable_bottom_soa = fc_frame.enable_bottom_soa;
    ros_msg.enable_right_soa = fc_frame.enable_right_soa;
    ros_msg.enable_left_soa = fc_frame.enable_left_soa;
    ros_msg.enable_top_soa = fc_frame.enable_top_soa;
    ros_msg.enable_prior_soa = fc_frame.enable_prior_soa;
    ros_msg.enable_secure_landing = fc_frame.enable_secure_landing;
    ros_msg.enable_precise_landing = fc_frame.enable_precise_landing;
    ros_msg.enable_mapping_planning = fc_frame.enable_mapping_planning;

    return ros_msg;
}

zhz_msgs::NedFrame NedFrameToROS(const NedFrame& ned_frame) {
    zhz_msgs::NedFrame ros_msg;
    ros_msg.coord.resize(3);
    ros_msg.speed.resize(3);
    ros_msg.header.stamp = ros::Time(ned_frame.timestamp / 1000000000, ned_frame.timestamp % 1000000000);
    ros_msg.header.frame_id = "ned_frame";
    // Fill in the ROS message fields from the NedFrame struct
    ros_msg.coord[0] = ned_frame.coord[0];
    ros_msg.coord[1] = ned_frame.coord[1];
    ros_msg.coord[2] = ned_frame.coord[2];
    ros_msg.speed[0] = ned_frame.speed[0];
    ros_msg.speed[1] = ned_frame.speed[1];
    ros_msg.speed[2] = ned_frame.speed[2];
    ros_msg.valid = ned_frame.valid;

    // Add more fields as needed

    return ros_msg;
}

zhz_msgs::RCFrame RCFrameToROS(const RCFrame& rc_frame) {
    zhz_msgs::RCFrame ros_msg;
    ros_msg.header.stamp = ros::Time(rc_frame.timestamp / 1000000000, rc_frame.timestamp % 1000000000);
    ros_msg.header.frame_id = "rc_frame";
    // Fill in the ROS message fields from the RCFrame struct
    ros_msg.button = rc_frame.button;
    ros_msg.mode = rc_frame.mode;
    ros_msg.gimbal_pitch = rc_frame.gimbal_pitch;
    ros_msg.roll = rc_frame.roll;
    ros_msg.pitch = rc_frame.pitch;
    ros_msg.yaw = rc_frame.yaw;
    ros_msg.thrust = rc_frame.thrust;

    // Add more fields as needed
    return ros_msg;
}

void GetImageFrame(ImageFrame& image_frame) {
  // ImageFrame image_frame;
  image_frame.rows = 480;
  image_frame.cols = 640;

  int num_channels = 3;
  // 计算图像数据的步长 int step = width * byte_depth * num_channels;
  image_frame.step = image_frame.cols * 1 * num_channels;
  image_frame.color = num_channels == 1 ? COLOR_GRAY : COLOR_BGR;
  // 假设这里有图像数据赋值给 char *data
  int nums = image_frame.rows * image_frame.cols;
  char *data = new char[image_frame.rows  * image_frame.step]; // 假设是RGB图像，每个像素3个字节
  // 手动赋值测试，插入 *data 的每一位，模拟生成彩色条纹图像
  static bool change = false;
  for (int i = 0; i < nums; i++) {
      // 生成彩色条纹图像，每个通道的值随着像素位置变化
      // 使用随机种子初始化
      // srand(time(0));
      // data[i * num_channels] = rand()  % 255; // R通道
      // data[i * num_channels + 1] = rand()  % 255; // G通道
      // data[i * num_channels + 2] = rand()  % 255; // B通道
      // data[i * num_channels] = i % 255; // R通道
      // data[i * num_channels + 1] = (i + 85) % 255; // G通道
      // data[i * num_channels + 2] = (i + 170) % 255; // B通道

    // 模拟生成风景图像，如蓝天、绿地、黄花等
    if (change) {
      if (i < nums / 3) {
          // 蓝天
          data[i * num_channels] = 135; // R通道
          data[i * num_channels + 1] = 206; // G通道
          data[i * num_channels + 2] = 235; // B通道
      } else if (i < 2 * nums / 3) {
          // 绿地
          data[i * num_channels] = 34; // R通道
          data[i * num_channels + 1] = 139; // G通道
          data[i * num_channels + 2] = 34; // B通道
      } else {
          // 黄花
          data[i * num_channels] = 255; // R通道
          data[i * num_channels + 1] = 215; // G通道
          data[i * num_channels + 2] = 0; // B通道
      }
    } else {
      if (i < nums / 3) {
          // 红色
          data[i * num_channels] = 255; // R通道
          data[i * num_channels + 1] = 0; // G通道
          data[i * num_channels + 2] = 0; // B通道
      } else if (i < 2 * nums / 3) {
          // 紫色
          data[i * num_channels] = 128; // R通道
          data[i * num_channels + 1] = 0; // G通道
          data[i * num_channels + 2] = 128; // B通道
      } else {
          // 橙色
          data[i * num_channels] = 255; // R通道
          data[i * num_channels + 1] = 165; // G通道
          data[i * num_channels + 2] = 0; // B通道
      }
    }
  }
  image_frame.data = data;
  change = !change;

}
int main(int argc, char** argv) {

    ros::init(argc, argv, "msg_convert");
    ros::NodeHandle nh;
    std::string imu_topic, left_topic, right_topic, gps_topic;
    std::string camera_info_url_l, camera_info_url_r, camera_info_l, camera_info_r;
    int use_cam_type = 4;
    // std::string FCVisionSwitchFrame_topic, NedFrame_topic, RCFrame_topic;
    ros::param::get("~imu_topic", imu_topic);
    ros::param::get("~gps_topic", gps_topic);
    ros::param::get("~use_cam_type", use_cam_type);
    ros::param::get("~left_topic", left_topic);
    ros::param::get("~right_topic", right_topic);
    ros::param::get("~camera_info_url_l", camera_info_url_l);
    ros::param::get("~camera_info_url_r", camera_info_url_r);
    ros::param::get("~camera_info_l", camera_info_l);
    ros::param::get("~camera_info_r", camera_info_r);
    boost::shared_ptr<camera_info_manager::CameraInfoManager> cinfo_l;
    boost::shared_ptr<camera_info_manager::CameraInfoManager> cinfo_r;
    ros::Publisher imu_pub = nh.advertise<sensor_msgs::Imu>(imu_topic, 1, true);
    ros::Publisher gnss_pub = nh.advertise<sensor_msgs::NavSatFix>(gps_topic, 1, true);
    ros::Publisher img1_pub = nh.advertise<sensor_msgs::Image>(left_topic, 1, true);
    ros::Publisher img2_pub = nh.advertise<sensor_msgs::Image>(right_topic, 1, true);

    std::cout << "Node use_cam_type: " << use_cam_type << std::endl;


    ros::Publisher FCVisionSwitchFrame_pub = nh.advertise<zhz_msgs::FCVisionSwitchFrame>("/zhz/driver/FCVisionSwitchFrame", 1, true);
    ros::Publisher NedFrame_pub = nh.advertise<zhz_msgs::NedFrame>("/zhz/driver/NedFrame", 1, true);
    ros::Publisher RCFrame_pub = nh.advertise<zhz_msgs::RCFrame>("/zhz/driver/RCFrame", 1, true);
    

    cinfo_l.reset(new camera_info_manager::CameraInfoManager(nh, "camera", camera_info_url_l));
    ros::Publisher camera_info_pub_l_ = nh.advertise<sensor_msgs::CameraInfo>(camera_info_l, 1);
    ros::Publisher camera_info_pub_r_ = nh.advertise<sensor_msgs::CameraInfo>(camera_info_r, 1);
    sensor_msgs::CameraInfo camera_info_msg_l = cinfo_l->getCameraInfo();
    cinfo_r.reset(new camera_info_manager::CameraInfoManager(nh, "camera", camera_info_url_r));
    sensor_msgs::CameraInfo camera_info_msg_r = cinfo_r->getCameraInfo();
    // sensor_msgs::CameraInfoPtr camera_info_msg_l(new sensor_msgs::CameraInfo(cinfo_l->getCameraInfo()));
    // sensor_msgs::CameraInfoPtr camera_info_msg_r(new sensor_msgs::CameraInfo(cinfo_r->getCameraInfo()));

    // 假设您有一个AttFrame类型的数据att_frame
    AttFrame att_frame;
    att_frame.imu_gyro[0] = 0.1;
    att_frame.imu_gyro[1] = 0.2;
    att_frame.imu_gyro[2] = 0.3;
    att_frame.imu_accel[0] = 1.0;
    att_frame.imu_accel[1] = 2.0;
    att_frame.imu_accel[2] = 3.0;
    att_frame.uav_quat[0] = 0.5;
    att_frame.uav_quat[1] = 0.5;
    att_frame.uav_quat[2] = 0.5;
    att_frame.uav_quat[3] = 0.5;
    att_frame.timestamp = 1234567890; // 示例时间戳
    // 将AttFrame数据转换为ROS中的IMU消息格式

    GpsFrame gps_frame;
    gps_frame.gps_lat = 120;

    ImageFrame image_frame;
    GetImageFrame(image_frame);
    

    FCVisionSwitchFrame FCVisionSwitch_Frame;
    FCVisionSwitch_Frame.enable_vio = 1;

    NedFrame Ned_Frame;
    Ned_Frame.speed[0] = 20;

    RCFrame RC_Frame;
    RC_Frame.pitch = 30;


    ros::Rate rate(1);
    int frame_id = 0;

    while(ros::ok()) {
      // 发布IMU消息
      sensor_msgs::Imu imu_msg = AttFrameToImuMsg(att_frame);
      sensor_msgs::NavSatFix gnss_msg = GpsFrameToROSGNSS(gps_frame);
      sensor_msgs::Image img_msg = ImageFrameToROSImg(image_frame);
      zhz_msgs::FCVisionSwitchFrame FCVisionSwitch_Frame_msg = FCVisionSwitchFrameToROS(FCVisionSwitch_Frame);
      zhz_msgs::NedFrame Ned_Frame_msg = NedFrameToROS(Ned_Frame);
      zhz_msgs::RCFrame RC_Frame_msg = RCFrameToROS(RC_Frame);
      imu_msg.header.frame_id = "world";
      imu_msg.header.seq = frame_id++;
      imu_msg.header.stamp = ros::Time::now();
      // grab the camera info

      imu_pub.publish(imu_msg);
      gnss_pub.publish(gnss_msg);
      img1_pub.publish(img_msg);


      camera_info_msg_l.header.frame_id = img_msg.header.frame_id;
      camera_info_msg_l.header.stamp = img_msg.header.stamp;
      camera_info_pub_l_.publish(camera_info_msg_l);
      camera_info_msg_r.header.frame_id = img_msg.header.frame_id;
      camera_info_msg_r.header.stamp = img_msg.header.stamp;
      camera_info_pub_r_.publish(camera_info_msg_r);

      FCVisionSwitchFrame_pub.publish(FCVisionSwitch_Frame_msg);
      NedFrame_pub.publish(Ned_Frame_msg);
      RCFrame_pub.publish(RC_Frame_msg);

      GetImageFrame(image_frame);

      ros::spinOnce();
      rate.sleep();
    }
    ros::shutdown();
    exit(0);
    return 0;
}