#include "nvpfm.hpp"

#include <string>
#include <ros/ros.h>
#include "feynman_camera/GetSN.h"
#include "feynman_camera/GetLEFTIRParam.h"
#include "feynman_camera/GetRGBParam.h"
#include "feynman_camera/GetRGBExposure.h"
#include "feynman_camera/SetRGBExposure.h"
#include "feynman_camera/GetImuExternalRef.h"
#include "feynman_camera/GetImuInternalRef.h"
#include "feynman_camera/GetExposure.h"
#include "feynman_camera/GetDeviceList.h"
#include "feynman_camera/UploadFile.h"
#include "feynman_camera/SaveDepth.h"
#include "feynman_camera/RemoveLow.h"
#include "feynman_camera/RemoveLight.h"
#include "feynman_camera/RemoveBad.h"
#include "feynman_camera/RemoveEdge.h"
#include "feynman_camera/SetStreamMode.h"
#include "feynman_camera/SwitchRectify.h"
#include "feynman_camera/SetManualExposure.h"
#include "feynman_camera/SetAutoExposure.h"
#include "feynman_camera/SetDepthMode.h"
#include "feynman_camera/SetProjector.h"
#include "feynman_camera/SetSysTime.h"
#include "feynman_camera/EnablePointCloud.h"
#include "feynman_camera/EnableIMU.h"
#include "feynman_camera/EnableLK.h"
#include "feynman_camera/temp_info.h"
#include "feynman_camera/imu_frame.h"
#include "feynman_camera/h265_raw.h"
#include "feynman_camera/imu_info.h"
#include "feynman_camera/cnn_box.h"
#include "feynman_camera/cnn_info.h"
#include "feynman_camera/GetVersions.h"
// #include "feynman_camera/device_ids.h"
#include <dynamic_reconfigure/server.h>
#include "feynman_camera/resfpsConfig.h"
#include "yuv_rgb.h"
#include <std_msgs/String.h>
#include <pcl/point_types.h>
#include <pcl/point_cloud.h>
#include <pcl/filters/conditional_removal.h>
#include <image_transport/image_transport.h>
#include <pcl_conversions/pcl_conversions.h>
#include <sensor_msgs/PointCloud2.h>
#include <sensor_msgs/CameraInfo.h>
#include <sensor_msgs/Imu.h>
#include "ring_queue.h"
#include <opencv2/opencv.hpp>

#include <map>
// #include <pcl/ros/conversions.h>

#include <sensor_msgs/Image.h>
#include <sensor_msgs/image_encodings.h>
#include <sensor_msgs/distortion_models.h>
#include <ros/ros.h>
#include <tf2_ros/static_transform_broadcaster.h>
#include <tf2/LinearMath/Quaternion.h>
#include <string.h>

#include <eigen3/Eigen/Dense>
#include <eigen3/Eigen/Core>
#include <unordered_map>
#include <mutex>

#include "codec_ctx.h"

#include <sys/time.h>
// 0.5s
static constexpr uint64_t request_period = 500000;

#define MAXWIDTH 1280
#define MAXHEIGHT 800

// #define SAVE_IMU_DATA
// #define SAVE_IMU_DATA_PATH "/home/nvp/catkin_ws/imu_data.csv" //"/home/yf/FC2/catkin_ws/devel/lib/feynman_camera/imu_data.csv"
using namespace cv;

double DEPTHMAX = 20000.0;
double DEPTHMIN = 200.0;

int g_runconfig = 2;

static std::unordered_map<NVPFM_IMAGE_SIZE, std::string> ResolutionToString = {
    {IMAGE_1280_800, "1280x800"},
    {IMAGE_1280_720, "1280x720"},
    {IMAGE_640_480, "640x480"},
    {IMAGE_640_400, "640x400"},
    {IMAGE_320_200, "320x200"},
    {IMAGE_640_360, "640x360"},
    {IMAGE_320_240, "320x240"},
    {IMAGE_960_600, "960x600"},
    {IMAGE_480_300, "480x300"},
    {IMAGE_1600_1200, "1600x1200"},
    {IMAGE_1280_1080, "1280x1080"},
    {IMAGE_1280_960, "1280x960"},
    {IMAGE_800_600, "800x600"},
    {IMAGE_848_480, "848x480"},
    {IMAGE_768_480, "768x480"},
    {IMAGE_1280_480, "1280x480"},
    {IMAGE_1920_1080, "1920x1080"},
    {IMAGE_960_1280, "960x1280"},
    {IMAGE_480_640, "480x640"}}; //,
                                 //{IMAGE_960_720, "960x720"}};

static std::string get_res_desc(NVPFM_IMAGE_SIZE size) {
  auto it = ResolutionToString.find(size);
  if (it != ResolutionToString.end()) {
    return it->second;
  }
  return "";
}

// s_nvpfm_device_info *g_deviceinfo = NULL;
static std::pair<int, int> get_res_pair(NVPFM_IMAGE_SIZE res) {
  static std::unordered_map<NVPFM_IMAGE_SIZE, std::pair<int, int>> mapping = {
      {IMAGE_1280_800, {1280, 800}},
      {IMAGE_1280_720, {1280, 720}},
      {IMAGE_640_480, {640, 480}},
      {IMAGE_640_400, {640, 400}},
      {IMAGE_320_200, {320, 200}},
      {IMAGE_640_360, {640, 360}},
      {IMAGE_960_600, {960, 600}},
      {IMAGE_480_300, {480, 300}},
      {IMAGE_480_640, {480, 640}}};
  auto it = mapping.find(res);
  if (it == mapping.end()) {
    return {-1, -1};
  }
  return it->second;
}
std::string replace_str(const std::string &str, const std::string &to_replaced, const std::string &newchars);
#if 1
static std::string devid_to_topicid(const std::string &devid) {
  std::string name = devid;
  name = replace_str(name, ".", "_");
  name = replace_str(name, "-", "_");
  name = replace_str(name, "feynman_", "");
  return name;
}

#endif
// TBD: 改造成std::unordered_map 配置，减少错误概率
typedef struct
{
  ros::NodeHandle node_obj;
  NVPFM_IMAGE_SIZE resolution;
  NVPFM_IMAGE_SIZE rgbresolution;
  bool group;
  unsigned int fps;

  int rgbrotatedegree;
  bool pubrgb;
  bool enumnet;

  bool pubir;
  bool pubdotcloud;
  bool pubdepth;
  bool savedata;
  bool confidence;
  bool pubdepthalign;
  bool pubpseudo;
  bool lightfilter;
  bool badfilter;
  bool highprecision;
  bool pubimu;
  bool pubgoodfeature;
  bool edgefilter;
  float clip_distance;
  int iredgeoffset;
  int rgbedgeoffset;
  int depthedgeoffset;
  bool projectoron;

  // service config
  bool getimuinternalref;
  bool getimuexternalref;
  bool get_sensor_exposure;
  bool set_sensor_exposure_manaul;
  bool set_sensor_exposure_auto;
  bool getversions;
  bool get_device_list;
  bool getsnservice;
  bool getleftirparamservice;
  bool getrgbparamservice;
} COMMONCONFIG;

typedef struct {
  uint32_t countfps;
  uint32_t lastfps;
  time_t last;
} COUNTFPSDATA;

typedef struct
{
  pthread_t threadid;
  pthread_t depththreadid;
  pthread_t saveimudatathreadid;
  COMMONCONFIG config;
  depthtransformer *depthtrans;
  NVPTL_DEVICE_INFO rawdevinfo;
  nvpfm *fm;
  char devicename[64];
  // char port_id[128];

  bool hasinitswitch;

  bool isfirsttime;

  ros::ServiceServer getimuinternalrefservice;
  ros::ServiceServer getimuexternalrefservice;
  ros::ServiceServer setprojectorservice;
  ros::ServiceServer getversions;
  ros::ServiceServer getsnservice;
  ros::ServiceServer getleftirparamservice;
  ros::ServiceServer getrgbparamservice;
  ros::ServiceServer get_exposure_srv;
  ros::ServiceServer set_exposure_manual_srv;
  ros::ServiceServer set_exposure_auto_srv;

  ros::Publisher leftircamerainfopublisher;
  ros::Publisher rightircamerainfopublisher;
  ros::Publisher rgbcamerainfopublisher;
  ros::Publisher depthrawpublisher;
  ros::Publisher depthrawleftpublisher;
  ros::Publisher depthrawrightpublisher;
  ros::Publisher temperaturepublisher;
  ros::Publisher dotcloudpublisher;
  ros::Publisher depthcamerainfopublisher;
  image_transport::Publisher rgbpublisher;
  // image_transport::Publisher rgb_blend_publisher;

  ros::Publisher h265publisher;
  ros::Publisher rgbrawpublisher;
  ros::Publisher sensorrawleftpublisher;
  ros::Publisher sensorrawrightpublisher;
  image_transport::Publisher rectifyleftpublisher;
  image_transport::Publisher rectifyrightpublisher;
  ros::Publisher cnnpublisher;
  ros::Publisher logpublisher;
  // ros::Publisher imupublisher;
  ros::Publisher imupublisher_single;
  ros::Publisher depthalignrgbpublisher;
  ros::Publisher depthalignrgbviewpublisher;
  image_transport::Publisher depthpseudopublisher;
  ros::Publisher lkpublisher;
  bool hasinitpointcloud;

  Mat *Xs;
  Mat *Ys;
  sensor_msgs::CameraInfo caminfo;
  bool initCaminfo;

  float DEPTHINRGBROTATE[9];
  float DEPTHINRGBOFFSET[3];
  float RGBFX;
  float RGBFY;
  float RGBX;
  float RGBY;
  bool hassetedge;
  bool hassetconfidence;
  tf2_ros::StaticTransformBroadcaster *tf_broadcaster;
  std::vector<s_nvpfm_camera_param> *camparam;
  Ring_Queue *depthdataqueue;
  Ring_Queue *depthaligndataqueue;
  Ring_Queue *imudataqueue;
  bool willrun;
  s_nvpfm_dev_info devinfo;
  int rgbwidth;
  int rgbheight;
  char topic_name[255];

  bool chn2_265_enabled;
  bool chn3_265_enabled;

  COUNTFPSDATA rgbcountfps;
  COUNTFPSDATA depthcountfps;
  COUNTFPSDATA leftircountfps;
  COUNTFPSDATA rightircountfps;
  // global_vars *gv;
} DEVICEINFO;

void countfps(COUNTFPSDATA *pdata, const char *streamtype, int width, int height) {
  if (pdata->last == 0)
    pdata->last = time(NULL);
  pdata->countfps++;
  time_t current = time(NULL);
  if ((current - pdata->last) > 5) {
    printf("%s %dx%d fps:%f\n", streamtype, width, height, (float)(pdata->countfps - pdata->lastfps) / (float)(current - pdata->last));
    pdata->last = current;
    pdata->lastfps = pdata->countfps;
  }
}

// global value related ...
struct global_vars {
  std::map<std::string, std::string> g_bindports;
  ros::ServiceServer get_device_list_srv;
  std::atomic<bool> global_srv_created;
  // protection lock of device map
  std::mutex device_mu;
  std::map<std::string, DEVICEINFO *> g_devicemap;
  std::unordered_map<std::string, int> g_stream_and_sensor_configs;
  std::string rgbdecode;
  codec_context *ctx;
#if 0
  struct timeval cur_time;
  std::atomic<int32_t> rgb_chn_enabled;
#endif
};

global_vars *gv = NULL;

static DEVICEINFO *get_dev_info(const std::string &topic_id);

void createpublisher(ros::NodeHandle node_obj, DEVICEINFO *info);

// 以下函数实现的功能是根据x,y,z轴方向对点云进行过滤，例如对于x坐标值大于1的点进行滤除，就是range_remove(cloud, "x", 1, ">")
typedef pcl::PointCloud<pcl::PointXYZ> PointCloud;

PointCloud::Ptr range_remove(PointCloud::Ptr cloud, std::string axis, float threshold, std::string op) {
  // axis 想要操作的轴
  // threshold 坐标值
  // op 大于还是小于
  //	std::cout << "start removing points outside the range "<<op +to_string(threshold) <<" along "<<axis+" axis"<< std::endl;
  PointCloud::Ptr filtered(new PointCloud);
  pcl::ConditionOr<pcl::PointXYZ>::Ptr range_cond(
      new pcl::ConditionOr<pcl::PointXYZ>());
  pcl::ComparisonOps::CompareOp oper;
  if (op == ">")
    oper = pcl::ComparisonOps::LT;
  else
    oper = pcl::ComparisonOps::GT;

  range_cond->addComparison(pcl::FieldComparison<pcl::PointXYZ>::ConstPtr(
      new pcl::FieldComparison<pcl::PointXYZ>(axis, oper, threshold)));
  // Build the filter
  pcl::ConditionalRemoval<pcl::PointXYZ> condrem;
  condrem.setCondition(range_cond);
  condrem.setInputCloud(cloud);
  condrem.filter(*filtered);
  return filtered;
}

unsigned char *g_leftdepth = NULL;

bool g_removelight = false;
bool g_removebad = false;

// float pixthreshold = 20;
float sigmathreshold = 0.0392;
float light_sigmathreshold = 1;
int light_pixthreshold = 200;
int8_t light_erodesize = 9;
int8_t light_kernelsizew = 21;
int8_t light_kernelsizeh = 5;

int32_t removeLowerOutliers(
    const cv::Mat &img,
    cv::Mat &depth,
    int32_t sigmaThr,
    int8_t winsize) {
  // img mean

  Mat meanI;
  boxFilter(img, meanI, -1, Size(winsize, winsize), Point(-1, -1), true, 2);

  // img - mean
  Mat diff;
  cv::absdiff(img, meanI, diff); // todo: check: uint8 sub

  // diff square
  Mat sDiff = diff.mul(diff); // todo: check: mul out of range
                              // box filter SAD
  Mat sad;
  boxFilter(sDiff, sad, -1, Size(winsize, winsize), Point(-1, -1), true, 2);

  // output maskSAD
  Mat maskSAD, maskOUT;
  maskSAD = sad < sigmaThr;
  // imwrite("C:\\Users\\yuanyuanchang\\Desktop\\removeguo\\depth\\maskout.png", maskSAD);
  // maskSAD.convertTo(maskOUT, CV_16UC1, 1, 0);
  Mat ndepth;
  depth.copyTo(ndepth, 255 - maskSAD);
  depth = ndepth;
  return 0;
}

inline double deg2rad(const double &deg) {
  return (deg * M_PI / 180.0);
}

// 0.obtain device id from launch file
// 1.when device plugin,enumrate device and open it and recv data from it
// 2.while device id is not the one request,close it
// 3.while device id is the one request,open it and continue recv data from it

typedef struct
{
  int filenums;
  char filepath[256];
} SAVEDEPTHTASK;

typedef struct
{
  int len;
  char filename[256];
  char data[1280 * 800 * 2];
} DEPTHDATAINFO;

Ring_Queue *taskqueue = NULL;
std::string replace_str(const std::string &str, const std::string &to_replaced, const std::string &newchars) {
  std::string tstr = str;
  for (std::string::size_type pos(0); pos != std::string::npos; pos += newchars.length()) {
    pos = tstr.find(to_replaced, pos);
    if (pos != std::string::npos)
      tstr.replace(pos, to_replaced.length(), newchars);
    else
      break;
  }
  return tstr;
}

bool handle_getrgbparam_request(feynman_camera::GetRGBParamRequest &req,
                                feynman_camera::GetRGBParamResponse &res) {
  std::unique_lock<std::mutex> lg(gv->device_mu);

  auto it = std::find_if(gv->g_devicemap.begin(), gv->g_devicemap.end(), [&req, &res](auto &pair) {
    DEVICEINFO *info = pair.second;
    std::string tmpport=info->rawdevinfo.usb_camera_name;
   tmpport=replace_str(tmpport,"feynman-",""); 
   tmpport=replace_str(tmpport,".","_"); 
   tmpport=replace_str(tmpport,"-","_"); 
  	printf("compare:%s,%s\n",req.deviceid.c_str(),tmpport.c_str()); 
    if(info->topic_name != req.deviceid&&req.deviceid!=tmpport){
      return false;
    } 
    s_nvpfm_camera_param param;
      if (NVPTL_OK == info->fm->get_camera_param(&param))
      {
                res.fx = param.color_focus[0];
            res.fy = param.color_focus[1];
            res.pcx = param.color_photocenter[0];
            res.pcy = param.color_photocenter[1];

            res.width = param.rgbwidth;
            res.height = param.rgbheight;
            return true;
      }
      printf("Failed to get param!\n");
      return false; });
  if (it == gv->g_devicemap.end()) {
    printf("Can't find device with id:%s.\n", req.deviceid.c_str());
    return false;
  }
  return true;
}
bool handle_getleftirparam_request(feynman_camera::GetLEFTIRParamRequest &req,
                                   feynman_camera::GetLEFTIRParamResponse &res) {
  std::unique_lock<std::mutex> lg(gv->device_mu);
  DEVICEINFO *info = get_dev_info(req.deviceid);
  if (!info) {
    printf("Failed to find dev %s\n", req.deviceid);
    return false;
  }
  s_nvpfm_camera_param param;
  if (NVPTL_OK == info->fm->get_camera_param(&param)) {
    res.fx = param.left_ir_focus[0];
    res.fy = param.left_ir_focus[1];
    res.pcx = param.left_ir_photocenter[0];
    res.pcy = param.left_ir_photocenter[1];

    res.width = param.irwidth;
    res.height = param.irheight;
    return true;
  }
  printf("Failed to get ir  param!\n");
  return false;
}
bool handle_setprojector_request(feynman_camera::SetProjectorRequest &req,
                                 feynman_camera::SetProjectorResponse &res) {
  std::unique_lock<std::mutex> lg(gv->device_mu);
  auto info = get_dev_info(req.deviceid);
  if (!info) {
    return false;
  }
  s_nvpfm_get_projector param;
  param.channel = NVPFM_PROJECTOR_CHANNEL0;
  s_nvpfm_get_projector_ret retprojector;
  if (NVPTL_OK == info->fm->get_projector(&param, &retprojector)) {
    if (retprojector.ret == 0) {
      s_nvpfm_set_projector setparam;
      setparam.channel = NVPFM_PROJECTOR_CHANNEL0;
      setparam.config = retprojector.config;
      setparam.config.open = (req.enable ? 1 : 0);
      setparam.config.projector_mA = req.current;
      if (NVPTL_OK == info->fm->set_projector(&setparam)) {
        return true;
      }
    }
  }
  return false;
}
bool handle_getsn_request(feynman_camera::GetSNRequest &req,
                          feynman_camera::GetSNResponse &res) {
  std::unique_lock<std::mutex> lg(gv->device_mu);
  auto info = get_dev_info(req.deviceid);
  if (!info) {
    return false;
  }
  s_nvpfm_dev_info param;
  if (NVPTL_OK == info->fm->get_devinfo(&param)) {
    res.sn = param.sn;
    return true;
  }
  return false;
}
static DEVICEINFO *get_dev_info(const std::string &topic_id) {
  if (gv->g_devicemap.empty()) {
    printf("Empty device map!\n");
    return nullptr;
  }
  auto it = std::find_if(gv->g_devicemap.begin(), gv->g_devicemap.end(), [&topic_id](auto &p) {
    DEVICEINFO *info = p.second;
   	std::string tmpport=info->rawdevinfo.usb_camera_name;
	tmpport=replace_str(tmpport,"feynman-","");	
	tmpport=replace_str(tmpport,"-","_");	
	tmpport=replace_str(tmpport,".","_");	
    if(info->topic_name == topic_id||tmpport==topic_id){
      return true;
    }
    return false; });
  if (it == gv->g_devicemap.end()) {
    printf("No device named:%s\n", topic_id.c_str());
    return nullptr;
  }
  return it->second;
}

enum ExposureMode {
  ExposureModeNon = -1,
  ExposureModeAuto = 0,
  ExposureModeManual
};
static bool
handle_get_sensor_exposure(feynman_camera::GetExposureRequest &req, feynman_camera::GetExposureResponse &rsp) {
  std::unique_lock<std::mutex> lg(gv->device_mu);
  DEVICEINFO *info = get_dev_info(req.device_id);
  if (!info) {
    return false;
  }
  s_nvpfm_get_sensor_exposure_ret r;
  if (req.sensor_name == "leftir" || req.sensor_name == "rightir") {

    info->fm->get_irexposure(&r);
  } else if (req.sensor_name == "rgb") {
    info->fm->get_rgbexposure(&r);
  } else {
    printf("Invalid request sensor type:%s\n", req.sensor_name.c_str());
    return false;
  }

  rsp.exposure_mode = r.exposure.exposure_mode;
  rsp.exposure_time = r.exposure.exposure_time;
  rsp.digital_gain = r.exposure.digital_gain;
  rsp.AE_compensation_id = r.exposure.AE_compensation_id;
  rsp.AE_tail_weight = r.exposure.AE_tail_weight;
  rsp.max_exposure_time = r.exposure.max_exposure_time;
  rsp.max_again = r.exposure.max_again;
// printf("Exposure info of %s, expus:%d, gain:%d, isauto:%d\n", req.sensor_name.c_str(), rsp.exposureus, rsp.gain, rsp.isauto);
#ifdef PRINT_DEBUG
  printf("DUMP Get Exposure =======================>\n");
  printf("exposure_mode: %d\n", r.exposure.exposure_mode);
  printf("exposure_time: %d\n", r.exposure.exposure_time);
  printf("digital_gain: %d\n", r.exposure.digital_gain);
  printf("AE_compensation_id: %d\n", r.exposure.AE_compensation_id);
  printf("AE_tail_weight: %d\n", r.exposure.AE_tail_weight);
  printf("max_exposure_time: %d\n", r.exposure.max_exposure_time);
  printf("max_again: %d\n", r.exposure.max_again);
  printf("DUMP Get Exposure End =======================>\n\n");
#endif
  return true;
}
static bool
handle_set_manual_exposure(feynman_camera::SetManualExposureRequest &req, feynman_camera::SetManualExposureResponse &rsp) {
  std::unique_lock<std::mutex> lg(gv->device_mu);
  auto info = get_dev_info(req.device_id);
  if (!info) {
    return false;
  }
  s_nvpfm_get_sensor_exposure_ret old_conf;
  s_nvpfm_set_sensor_exposure e;

  if (req.sensor_name == "leftir") {
    e.channel = E_NVPFM_SENSOR_CHANNEL::CHANNEL0;
    info->fm->get_irexposure(&old_conf);
  } else if (req.sensor_name == "rightir") {
    e.channel = E_NVPFM_SENSOR_CHANNEL::CHANNEL1;
    info->fm->get_irexposure(&old_conf);
  } else if (req.sensor_name == "rgb") {
    e.channel = E_NVPFM_SENSOR_CHANNEL::CHANNEL2;
    info->fm->get_rgbexposure(&old_conf);
  } else {
    return false;
  }
  e.config = old_conf.exposure;
#if 0
  if (req.isauto >= -1 && req.isauto <= 1)
  {
    e.config.exposure_mode = req.isauto;
  }
#endif
  e.config.exposure_mode = ExposureModeManual;
  e.config.exposure_time = req.exposure_time;
  // max or digital?
  e.config.digital_gain = req.digital_gain;

#ifdef PRINT_DEBUG
  printf("DUMP Set Manual Exposure =======================>\n");

  printf("old exposure_mode: %d\n", old_conf.exposure.exposure_mode);
  printf("old exposure_time: %d\n", old_conf.exposure.exposure_time);
  printf("old digital_gain: %d\n", old_conf.exposure.digital_gain);
  printf("old AE_compensation_id: %d\n", old_conf.exposure.AE_compensation_id);
  printf("old AE_tail_weight: %d\n", old_conf.exposure.AE_tail_weight);
  printf("old max_exposure_time: %d\n", old_conf.exposure.max_exposure_time);
  printf("old max_again: %d\n", old_conf.exposure.max_again);

  printf("exposure_mode: %d\n", e.config.exposure_mode);
  printf("exposure_time: %d\n", e.config.exposure_time);
  printf("digital_gain: %d\n", e.config.digital_gain);
  printf("AE_compensation_id: %d\n", e.config.AE_compensation_id);
  printf("AE_tail_weight: %d\n", e.config.AE_tail_weight);
  printf("max_exposure_time: %d\n", e.config.max_exposure_time);
  printf("max_again: %d\n", e.config.max_again);
  printf("DUMP Set Exposure End =======================>\n\n");
#endif
  NVPTL_RESULT res;
  if (req.sensor_name == "leftir" || req.sensor_name == "rightir") {
    // e.channel = CHANNEL0;
    res = info->fm->set_irexposure(&e);
#if 0
    e.channel = CHANNEL1;
    res = info->fm->set_irexposure(&e);
#endif
  } else {
    res = info->fm->set_rgbexposure(&e);
  }
  if (res != NVPTL_OK) {
    printf("Set exposure failed!\n");
    return false;
  }
  return true;
}

static bool
handle_set_auto_exposure(feynman_camera::SetAutoExposureRequest &req, feynman_camera::SetAutoExposureResponse &rsp) {
  std::unique_lock<std::mutex> lg(gv->device_mu);
  auto info = get_dev_info(req.device_id);
  if (!info) {
    return false;
  }
  s_nvpfm_get_sensor_exposure_ret old_conf;
  s_nvpfm_set_sensor_exposure e;

  if (req.sensor_name == "leftir") {
    e.channel = E_NVPFM_SENSOR_CHANNEL::CHANNEL0;
    info->fm->get_irexposure(&old_conf);
  } else if (req.sensor_name == "rightir") {
    e.channel = E_NVPFM_SENSOR_CHANNEL::CHANNEL1;
    info->fm->get_irexposure(&old_conf);
  } else if (req.sensor_name == "rgb") {
    e.channel = E_NVPFM_SENSOR_CHANNEL::CHANNEL2;
    info->fm->get_rgbexposure(&old_conf);
  } else {
    return false;
  }
  e.config = old_conf.exposure;
#if 0
  if (req.isauto >= -1 && req.isauto <= 1)
  {
    e.config.exposure_mode = req.isauto;
  }
#endif
  e.config.exposure_mode = ExposureModeAuto;
  e.config.max_exposure_time = req.max_exposure_time;
  // max or digital?
  e.config.max_again = req.max_again;
  e.config.AE_compensation_id = req.AE_compensation_id;
  e.config.AE_tail_weight = req.AE_tail_weight;

#ifdef PRINT_DEBUG
  printf("DUMP Set Auto Exposure =======================>\n");
  printf("exposure_mode: %d\n", e.config.exposure_mode);
  printf("exposure_time: %d\n", e.config.exposure_time);
  printf("digital_gain: %d\n", e.config.digital_gain);
  printf("AE_compensation_id: %d\n", e.config.AE_compensation_id);
  printf("AE_tail_weight: %d\n", e.config.AE_tail_weight);
  printf("max_exposure_time: %d\n", e.config.max_exposure_time);
  printf("max_again: %d\n", e.config.max_again);
  printf("DUMP Set Exposure End =======================>\n\n");
#endif

  if (req.sensor_name == "leftir" || req.sensor_name == "rightir") {
    info->fm->set_irexposure(&e);
  } else {
    info->fm->set_rgbexposure(&e);
  }
  return true;
}
static bool handle_get_dev_list(feynman_camera::GetDeviceListRequest &req, feynman_camera::GetDeviceListResponse &rsp) {
  std::unique_lock<std::mutex> lg(gv->device_mu);
  rsp.device_ids.resize(gv->g_devicemap.size());
  rsp.device_sns.resize(gv->g_devicemap.size());
  int idx = 0;
  for (auto iter = gv->g_devicemap.begin(); iter != gv->g_devicemap.end(); ++iter) {
    rsp.device_ids[idx] = iter->second->topic_name;
    rsp.device_sns[idx] = iter->second->devinfo.sn;
    ++idx;
  }
  return true;
}

bool handle_getimuexternalref_request(feynman_camera::GetImuExternalRefRequest &req,
                                      feynman_camera::GetImuExternalRefResponse &res) {
  std::unique_lock<std::mutex> lg(gv->device_mu);
  if (gv->g_devicemap.empty())
    return false;
  // std::map<std::string, DEVICEINFO *>::iterator it = g_devicemap.begin();
  DEVICEINFO *info = get_dev_info(req.deviceid);
  if (!info) {
    return false;
  }
  s_nvpfm_imu_external_reference *tmpparam = info->fm->get_imu_externalref();
  if (NULL != tmpparam) {
    printf("got imu external ref:\n");
    printf("t_cam_imu:\n");
    for (int i = 0; i < 16; i++) {
      printf("%f ", tmpparam->t_cam_imu[i]);
    }
    printf("\n");
    printf("imu acc_noise_density:%f\n", tmpparam->acc_noise_density);
    printf("imu acc_random_walk:%f\n", tmpparam->acc_random_walk);
    printf("imu gyro_noise_density:%f\n", tmpparam->gyro_noise_density);
    printf("imu gyro_random_walk:%f\n", tmpparam->gyro_random_walk);
    printf("imu timeshift_cam_imu:%f\n", tmpparam->timeshift_cam_imu);

    res.t_cam_imu.resize(16);
    for (int i = 0; i < 16; i++)
      res.t_cam_imu[i] = tmpparam->t_cam_imu[i];
    res.acc_noise_density = tmpparam->acc_noise_density;
    res.acc_random_walk = tmpparam->acc_random_walk;
    res.gyro_noise_density = tmpparam->gyro_noise_density;
    res.gyro_random_walk = tmpparam->gyro_random_walk;
    res.timeshift_cam_imu = tmpparam->timeshift_cam_imu;
    return true;
  } else {
    std::cout << "fail to get imu externalref!" << std::endl;
    return false;
  }
}

bool handle_getimuinternalref_request(feynman_camera::GetImuInternalRefRequest &req,
                                      feynman_camera::GetImuInternalRefResponse &res) {
  std::unique_lock<std::mutex> lg(gv->device_mu);
  if (gv->g_devicemap.empty())
    return false;
  DEVICEINFO *info = get_dev_info(req.deviceid);
  if (!info) {
    // printf("No device named %s\n");
    return false;
  }
  s_nvpfm_imu_internal_reference *tmpparam = info->fm->get_imu_internalref();
  if (NULL != tmpparam) {
    printf("got imu internal ref:\n");
    printf("imu accel_m:\n");
    for (int i = 0; i < 9; i++) {
      printf("%f ", tmpparam->accel_m[i]);
    }
    printf("\n");
    printf("imu accel_b:%f %f %f\n",
           tmpparam->accel_b[0],
           tmpparam->accel_b[1],
           tmpparam->accel_b[2]);
    printf("imu gyro_b:%f %f %f\n",
           tmpparam->gyro_b[0],
           tmpparam->gyro_b[1],
           tmpparam->gyro_b[2]);

    res.accel_m.resize(9);
    res.accel_b.resize(3);
    res.gyro_b.resize(3);
    for (int i = 0; i < 9; i++) {
      res.accel_m[i] = tmpparam->accel_m[i];
    }
    for (int i = 0; i < 3; i++) {
      res.accel_b[i] = tmpparam->accel_b[i];
    }
    for (int i = 0; i < 3; i++) {
      res.gyro_b[i] = tmpparam->gyro_b[i];
    }
    return true;
  } else {
    std::cout << "fail to get imu internalref!" << std::endl;
    return false;
  }
}

bool handle_getversions_request(feynman_camera::GetVersionsRequest &req,
                                feynman_camera::GetVersionsResponse &res) {
  std::unique_lock<std::mutex> lg(gv->device_mu);
  if (std::find_if(gv->g_devicemap.begin(), gv->g_devicemap.end(), [&req, &res](auto &pair) { DEVICEINFO* info = pair.second;
                          if(info->devinfo.sn != req.sn){
                            return false;
                          }
                          s_nvpfm_dev_info devinfo;
                          auto r = info->fm->get_devinfo(&devinfo);
                          if(r!=NVPTL_OK){
                            return false;
                          }
                          res.firewareVer = devinfo.software_version;
                          res.sdkVer = info->fm->get_sdk_version();
                          printf("software version: %s, sdk version:%s\n", res.firewareVer.c_str(), res.sdkVer.c_str());
                          return true; }) == gv->g_devicemap.end()) {
    return false;
  }
  return true;
}
sensor_msgs::PointCloud2 pc2;
uint64_t pc2c1 = 0;
uint64_t pc2c2 = 0;

#ifdef SAVE_IMU_DATA
void *saveimudatathread(void *param) {
  DEVICEINFO *ptmpinfo = (DEVICEINFO *)param;
  IMU_APPLICATION_DATA_STRUC *pdata = (IMU_APPLICATION_DATA_STRUC *)malloc(sizeof(IMU_APPLICATION_DATA_STRUC));
  while (ptmpinfo->willrun) {
    IMU_APPLICATION_DATA_STRUC *p = (IMU_APPLICATION_DATA_STRUC *)SOLO_Read(ptmpinfo->imudataqueue);
    if (p) {
      memcpy(pdata, p, sizeof(IMU_APPLICATION_DATA_STRUC));
      SOLO_Read_Over(ptmpinfo->imudataqueue);

      static FILE *fp = NULL;

      if (fp == NULL) {
        fp = fopen(SAVE_IMU_DATA_PATH, "w+");
      }
      fprintf(fp, "%llu,%f,%f,%f,%f,%f,%f\n",
              pdata->timestamp,
              deg2rad(pdata->stGyroCaliData.fX),
              deg2rad(pdata->stGyroCaliData.fY),
              deg2rad(pdata->stGyroCaliData.fZ),
              pdata->stAccelCaliData.fX,
              pdata->stAccelCaliData.fY,
              pdata->stAccelCaliData.fZ);
      fflush(fp);
    }
  }
  free(pdata);
}
#endif
void imucallback(void *data, void *userdata) {
  // printf("imucallback!!!\n");
  DEVICEINFO *info = (DEVICEINFO *)userdata;
  // ROS_INFO("got imu packet!!!\n");
  NVPTL_USBHeaderDataPacket *tmppack = (NVPTL_USBHeaderDataPacket *)data;

  s_nvpfm_imu_data *tmpimudata = (s_nvpfm_imu_data *)tmppack->data;
  // feynman_camera::imu_info imudata;
  // ROS_INFO("type:%d,sub_type:%d,imu data number:%d\n", tmppack->type, tmppack->sub_type, tmpimudata->data_number);

  if (tmppack->type == NVPFM_IMU_DATA) {

    if (sizeof(s_nvpfm_imu_data) < tmppack->len && tmpimudata->data_number > 0) {
      // imudata.imu_frames.resize(tmpimudata->data_number);
      NVP_U64 firsttimestamp = 0, lasttimestamp = 0;
      if (tmpimudata->factory_data == 0 && tmpimudata->data_number > 0) {
        IMU_APPLICATION_DATA_STRUC *pdata = (IMU_APPLICATION_DATA_STRUC *)tmpimudata->data;
        firsttimestamp = pdata->timestamp;
        lasttimestamp = (pdata + tmpimudata->data_number - 1)->timestamp;
#ifdef SAVE_IMU_DATA
        if (NULL == info->imudataqueue) {
          info->imudataqueue = Create_Ring_Queue(tmpimudata->data_number, sizeof(IMU_APPLICATION_DATA_STRUC));
          pthread_create(&info->saveimudatathreadid, NULL, saveimudatathread, info);
        }
#endif
      } else if (tmpimudata->factory_data == 1 && tmpimudata->data_number > 0) {
        IMU_FACTORY_DATA_STRUC *pdata = (IMU_FACTORY_DATA_STRUC *)tmpimudata->data;
        firsttimestamp = pdata->timestamp;
        lasttimestamp = (pdata + tmpimudata->data_number - 1)->timestamp;
      }
      static uint64_t lastbegintimestamp = 0, lastendtimestamp = 0;
      int offset = firsttimestamp - lastendtimestamp;
      lastbegintimestamp = firsttimestamp;
      lastendtimestamp = lasttimestamp;

      for (int i = 0; i < tmpimudata->data_number; i++) {
        sensor_msgs::Imu imu_msg;
        imu_msg.header.frame_id = "imu";

        ros::Time tmptime;

        if (tmpimudata->factory_data == 0 && tmpimudata->data_number > 0) {
          IMU_APPLICATION_DATA_STRUC *pdata = (IMU_APPLICATION_DATA_STRUC *)tmpimudata->data;

          tmptime.fromNSec((pdata + i)->timestamp * 1000);
          imu_msg.header.stamp = tmptime; // hardTimeToSoftTime((pdata+i)->timestamp);
          imu_msg.angular_velocity.x = deg2rad((pdata + i)->stGyroCaliData.fX);
          imu_msg.angular_velocity.y = deg2rad((pdata + i)->stGyroCaliData.fY);
          imu_msg.angular_velocity.z = deg2rad((pdata + i)->stGyroCaliData.fZ);
          imu_msg.linear_acceleration.x = (pdata + i)->stAccelCaliData.fX;
          imu_msg.linear_acceleration.y = (pdata + i)->stAccelCaliData.fY;
          imu_msg.linear_acceleration.z = (pdata + i)->stAccelCaliData.fZ;
#ifdef SAVE_IMU_DATA
          if (NULL != info->imudataqueue) {
            IMU_APPLICATION_DATA_STRUC *p = (IMU_APPLICATION_DATA_STRUC *)SOLO_Write(info->imudataqueue);
            if (p) {
              memcpy(p, pdata + i, sizeof(IMU_APPLICATION_DATA_STRUC));
              SOLO_Write_Over(info->imudataqueue);
            }
          }
#endif
        } else if (tmpimudata->factory_data == 1 && tmpimudata->data_number > 0) {
          IMU_FACTORY_DATA_STRUC *pdata = (IMU_FACTORY_DATA_STRUC *)tmpimudata->data;
          tmptime.fromNSec((pdata + i)->timestamp * 1000);
          imu_msg.header.stamp = tmptime; // hardTimeToSoftTime((pdata+i)->timestamp);
          imu_msg.angular_velocity.x = deg2rad((pdata + i)->stGyroCaliData.fX);
          imu_msg.angular_velocity.y = deg2rad((pdata + i)->stGyroCaliData.fY);
          imu_msg.angular_velocity.z = deg2rad((pdata + i)->stGyroCaliData.fZ);
          imu_msg.linear_acceleration.x = (pdata + i)->stAccelCaliData.fX;
          imu_msg.linear_acceleration.y = (pdata + i)->stAccelCaliData.fY;
          imu_msg.linear_acceleration.z = (pdata + i)->stAccelCaliData.fZ;
        }

        if (info->imupublisher_single.getNumSubscribers() > 0)
          info->imupublisher_single.publish(imu_msg);
        {
          static unsigned int countfps = 0, lastfps = 0;
          countfps++;
          static time_t last = time(NULL);
          time_t current = time(NULL);
          if ((current - last) > 5) {
            // printf("imufps:%f\n", (float)(countfps - lastfps) / (float)(current - last));
            last = current;
            lastfps = countfps;
          }
        }
      }
    } else {
      printf("imudata len invalid:%d!=%lu\n", tmppack->len, sizeof(s_nvpfm_imu_data));
    }
  }
}
/*
typedef struct
{
  unsigned char r;
  unsigned char g;
  unsigned char b;
} MYRGB;*/
MYRGB *g_colortable = NULL;

void calculatecolortable() {

  static bool inited = false;
  if (inited) {
    return;
  } else {
    inited = true;
  }
  if (NULL == g_colortable) {
    g_colortable = (MYRGB *)calloc(1, sizeof(MYRGB) * 65536);
  }
  memset(g_colortable, 0, sizeof(MYRGB) * 65536);

  for (int i = (int)DEPTHMIN; i <= (int)DEPTHMAX; i++) { // 0.2m-5m
    int effectindex = (int)((float)(i - (int)DEPTHMIN) * 255.0 / (float)(DEPTHMAX - DEPTHMIN));
    //	if (effectindex > 255)effectindex = 255;
    if (effectindex == 255)
      effectindex = 254;

    unsigned char y = 0;
    unsigned char u = 0;
    unsigned char v = 0;
    int result = nvpfm_getyuvfromindex(effectindex, &y, &u, &v);
    // y = 26; u = 170; v = 122;
    if (result >= 0) {
      float fr = y + 1.4075 * (v - 128);

      float fg = y - 0.3455 * (u - 128) - 0.7169 * (v - 128);

      float fb = y + 1.779 * (u - 128);
      /*		static FILE* fp = fopen("table.txt", "wt");
        fprintf(fp,"table:%f,%f,%f\n", fr, fg, fb);
        fflush(fp);*/
      g_colortable[i].r = (unsigned char)fr;
      g_colortable[i].g = (unsigned char)fg;
      g_colortable[i].b = (unsigned char)fb;
    }
  }
}
/*
static void merge_rgb8_image(std::vector<uint8_t> &li, const std::vector<uint8_t> &ri, double alpha)
{
  // printf("rgb size:%d, persudo size:%d\n", li.size() / 3, ri.size() / 3);
  assert(li.size() == ri.size());
  for (int i = 0; i < li.size(); ++i)
  {
    li[i] = li[i] * (1 - alpha) + ri[i] * (alpha);
  }
}

template <typename T>
struct MatType
{
  using Type = Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic, Eigen::RowMajor>;
};

struct transform_matrix
{
  MatType<float>::Type col_mat;
  MatType<float>::Type row_mat;
  int width = 0;
  int height = 0;
};

static const transform_matrix &get_trans_matrix(int w, int h)
{
  static transform_matrix tm;
  if (tm.width != w || tm.height != h)
  {
    tm.width = w;
    tm.height = h;
    tm.col_mat.resize(h, w);
    tm.row_mat.resize(h, w);
    for (uint32_t i = 0; i < h; ++i)
    {
      for (uint32_t j = 0; j < w; ++j)
      {
        // 记住每个元素的列信息
        tm.col_mat(i, j) = float(j);
        // 记住每个元素的行信息
        tm.row_mat(i, j) = float(i);
      }
    }
  }
  return tm;
}

static void
blend_rgb_frame(void *rgb_frame, void *depth_frame, void *ud)
{
  static constexpr int packet_size = sizeof(NVPTL_USBHeaderDataPacket);

  auto func_get_frame_pointer = [](NVPFM_USB_IMAGE_HEADER *h)
  {
    return (reinterpret_cast<uint8_t *>(h) + sizeof(NVPFM_USB_IMAGE_HEADER));
  };
  DEVICEINFO *info = (DEVICEINFO *)ud;
  nvpfm *fm = info->fm;
  // grouppkt_info_custom_t *images = reinterpret_cast<grouppkt_info_custom_t *>(data);
  NVPFM_USB_IMAGE_HEADER *depth_header, *rgb_header, *leftir_header, *rightir_headr;
  // char *d = reinterpret_cast<char *>(images->depthbuffer);

  depth_header = reinterpret_cast<NVPFM_USB_IMAGE_HEADER *>(depth_frame + packet_size);
  rgb_header = reinterpret_cast<NVPFM_USB_IMAGE_HEADER *>(rgb_frame + packet_size);

  s_nvpfm_camera_param param;
#if 0
  if (NVPTL_OK != fm->get_camera_param(&param))
  {
    printf("Get camera parameter failed!\n");
    return;
  }
#endif
  if (info->camparam->empty())
  {
    printf("Camera param list is empty!\n");
    return;
  }
  param = info->camparam->at(0);
  // s_nvpfm_cam_param *param = &fm->get_camera_param(false)->get_cam_param();
  // focus coords.
  float rgbfx = param.color_focus[0];
  float rgbfy = param.color_focus[1];
  // rgb center coord
  float rgbcx = param.color_photocenter[0];
  float rgbcy = param.color_photocenter[1];
  // 左目相机的 中心点坐标和 fx/fy
  float cx = param.left_ir_photocenter[0];
  float cy = param.left_ir_photocenter[1];
  float fx = param.left_ir_focus[0];
  float fy = param.left_ir_focus[1];
  // rotate.
  std::vector<float> depth_rgb_r(9);
  // offset.
  std::vector<float> depth_rgb_off(3);
  // left2color_matrix
  memcpy(depth_rgb_r.data(), &param.left2color_matrix[0], sizeof(float) * 9);
  memcpy(depth_rgb_off.data(), &param.left2color_matrix[9], sizeof(float) * 3);
  Eigen::Map<MatType<uint16_t>::Type> depth_mat((uint16_t *)func_get_frame_pointer(depth_header), depth_header->height, depth_header->width);
  // to float
  MatType<float>::Type mat_depth_f = depth_mat.cast<float>();
#if 0
  MatType<float>::Type col_mat, row_mat;

  col_mat.resize(depth_header->height, depth_header->width);
  row_mat.resize(depth_header->height, depth_header->width);
  for (uint32_t i = 0; i < depth_header->height; ++i)
  {
    for (uint32_t j = 0; j < depth_header->width; ++j)
    {
      // 记住每个元素的列信息
      col_mat(i, j) = float(j);
      // 记住每个元素的行信息
      row_mat(i, j) = float(i);
    }
  }
#endif
  auto &tm = get_trans_matrix(depth_header->width, depth_header->height);
// what if fx = 0?
// the point cloud.
#if 1
  MatType<float>::Type fx_map(tm.col_mat.rows(), tm.col_mat.cols());
  fx_map.fill(cx);
  MatType<float>::Type fy_map(tm.row_mat.rows(), tm.row_mat.cols());
  fy_map.fill(cy);
#endif
  // 按元素相乘
  // 将列转换成以cx 为原点的 相对值 同下，得出大X 此既是点云

  MatType<float>::Type x_mat = (tm.col_mat - fx_map).cwiseProduct(mat_depth_f) / fx;
  // 将行转换成以cy 为原点的相对值，除以fy 就是 Y
  MatType<float>::Type y_mat = (tm.row_mat - fy_map).cwiseProduct(mat_depth_f) / fy;

  MatType<float>::Type z_mat = mat_depth_f;
  // z_mat = z_mat + depth_rgb_off[100];
// projection and to rgb.
// 点云映射
#if 1
  // 利用外参, 转换
  // 这个旋转矩阵可以是3轴旋转矩阵的乘积后与坐标的相乘
  // 此处就相当于把这个乘法展开了，依次对(x y z) 做乘法
  MatType<float>::Type rgb_x_mat = (depth_rgb_r[0] * x_mat + depth_rgb_r[1] * y_mat + depth_rgb_r[2] * z_mat);
  rgb_x_mat = rgb_x_mat.array() + depth_rgb_off[0];
  MatType<float>::Type rgb_y_mat = (depth_rgb_r[3] * x_mat + depth_rgb_r[4] * y_mat + depth_rgb_r[5] * z_mat);
  rgb_y_mat = rgb_y_mat.array() + depth_rgb_off[1];
  MatType<float>::Type rgb_z_mat = depth_rgb_r[6] * x_mat + depth_rgb_r[7] * y_mat + depth_rgb_r[8] * z_mat;
  rgb_z_mat = rgb_z_mat.array() + depth_rgb_off[2];
#endif

  // calculatecolortable();
  std::vector<uint8_t> rgb_coord_dp(rgb_header->width * 3 * rgb_header->height, 0);
  for (int i = 0; i < depth_header->height; ++i)
  {
    for (int j = 0; j < depth_header->width; ++j)
    {
      uint32_t coord_idx = i * depth_header->width + j;
      uint32_t pixel_color_idx = 3 * (coord_idx);
      float depth = rgb_z_mat(i, j); /// ???
      MYRGB color;
      if (depth >= DEPTHMIN && depth <= DEPTHMAX)
      {
        color = g_colortable[(int)depth];
      }
      else if (depth < DEPTHMIN)
      {
        color = color = MYRGB{0, 0, 0};
      }
      else
      {
        color = MYRGB{255, 0, 0};
      }

      if (depth > 0.00001)
      {
        // 此处要对上式子展开
        float u = rgb_x_mat(i, j);
        float v = rgb_y_mat(i, j);
        // float z = rgb_z_mat(i, j);
        u = u / depth;
        v = v / depth;
        // 计算点云在rgb 平面的投影，这个0.5不知道干啥的
        int iu = (int)(u * rgbfx + rgbcx + 0.5);
        int iv = (int)(v * rgbfy + rgbcy + 0.5);
        if (iu >= 0 && iu < rgb_header->width && iv >= 0 && iv < rgb_header->height)
        {
          int rgb_pix_idx = 3 * (iv * rgb_header->width + iu);
          rgb_coord_dp[rgb_pix_idx] = color.r;
          rgb_coord_dp[rgb_pix_idx + 1] = color.g;
          rgb_coord_dp[rgb_pix_idx + 2] = color.b;
        }
      }
    }
  }

  // 产生合并的image
  static constexpr float merge_alpha = 0.5;

  //  cv::merge(channels,merged);
  // 从rgb frame中读取彩色图像

  /// load rgb image
  // NVPTL_USBHeaderDataPacket *dp = (NVPTL_USBHeaderDataPacket *)func_get_frame_pointer(rgb_header);
  NVPTL_USBHeaderDataPacket *dp = (NVPTL_USBHeaderDataPacket *)(rgb_frame);
  NVPFM_USB_IMAGE_HEADER *pheader = (NVPFM_USB_IMAGE_HEADER *)((uint8_t *)rgb_frame + sizeof(NVPTL_USBHeaderDataPacket));
  if (info->config.rgbrotatedegree == 90)
  {
    nvpfm::nv12_rotate_90_i420(pheader);
  }
  else if (info->config.rgbrotatedegree == 270)
  {
    nvpfm::nv12_rotate_270_i420(pheader);
  }
  sensor_msgs::Image new_image;
  new_image.header.frame_id = "blended rgb frame";
  ros::Time tmptime;
  tmptime.fromNSec(rgb_header->timestamp * 1000);
  new_image.header.stamp = tmptime;
  new_image.width = rgb_header->width;
  new_image.height = rgb_header->height;
  new_image.is_bigendian = 0;
  new_image.encoding = sensor_msgs::image_encodings::RGB8;
  new_image.step = 3 * new_image.width;

  std::size_t data_size = new_image.step * new_image.height;
  new_image.data.resize(data_size);

  int width = new_image.width;
  int height = new_image.height;

  unsigned char *tmprgbimgdata = func_get_frame_pointer(rgb_header);
  static unsigned char *buffer = NULL;

  if (NULL == buffer)
    buffer = (unsigned char *)malloc(MAXWIDTH * MAXHEIGHT * 3);

  if (info->config.rgbrotatedegree == 90 || info->config.rgbrotatedegree == 270)
  {
    yuv420_rgb24_std(width, height, tmprgbimgdata, tmprgbimgdata + width * height, tmprgbimgdata + width * height * 5 / 4, width, width / 2, buffer, width * 3, YCBCR_601);
  }
  else
  {
    nv12_rgb24_std(width, height, tmprgbimgdata, tmprgbimgdata + width * height, width, width, buffer, width * 3, YCBCR_601);
  }
  unsigned char *in_ptr = reinterpret_cast<unsigned char *>(&new_image.data[0]);
  memcpy(in_ptr, buffer, data_size);
  // merge
  // printf("rgb size [%d, %d]; pesudo size [%d, %d]\n", height, width, depth_header->height, depth_header->width);
  merge_rgb8_image(new_image.data, rgb_coord_dp, merge_alpha);
  // public the image.
  info->rgb_blend_publisher.publish(new_image);
  // printf("Blend image is sent!\n");
}*/
void *pointcloudthread(void *param) {
  NVPFM_USB_IMAGE_HEADER *depthheader = (NVPFM_USB_IMAGE_HEADER *)malloc(sizeof(NVPFM_USB_IMAGE_HEADER) + 1280 * 800 * 2);
  uint16_t *depthdata = (uint16_t *)((uint8_t *)depthheader + sizeof(NVPFM_USB_IMAGE_HEADER));
  DEVICEINFO *ptmpinfo = (DEVICEINFO *)param;

  while (ptmpinfo->willrun) {
    if (ptmpinfo->dotcloudpublisher.getNumSubscribers() <= 0) {
      // ROS_INFO("there's no pointcloud subcribers!!!\n");
      sleep(1);
      continue;
    }
    nvpfm_debug_printf("got pointcloud subscriber!!!!!!\n");
    NVPFM_USB_IMAGE_HEADER *p = (NVPFM_USB_IMAGE_HEADER *)SOLO_Read(ptmpinfo->depthdataqueue);
    if (p) {
      memcpy(depthheader, p, sizeof(NVPFM_USB_IMAGE_HEADER) + 1280 * 800 * 2);
      SOLO_Read_Over(ptmpinfo->depthdataqueue);
      nvpfm_debug_printf("got depth data!!!!!!\n");
      if (ptmpinfo->config.pubdotcloud && ptmpinfo->camparam->size() == 1) {
        s_nvpfm_camera_param theparam = ptmpinfo->camparam->at(0);

        sensor_msgs::PointCloud2 pnew_dotcloud;
        pcl::PointCloud<pcl::PointXYZ>::Ptr pcloud(new pcl::PointCloud<pcl::PointXYZ>);
        Mat mp_pcloud;
        Mat mp_pcloudX;
        Mat mp_pcloudY;
        Mat mp_pcloudZ;
        float LEFTCAMERAFX;
        float LEFTCAMERAFY;
        float CAMERAT;
        float LEFTCAMERAX;
        float LEFTCAMERAY;

        LEFTCAMERAX = theparam.left_ir_photocenter[0];
        LEFTCAMERAY = theparam.left_ir_photocenter[1];
        LEFTCAMERAFX = theparam.left_ir_focus[0];
        LEFTCAMERAFY = theparam.left_ir_focus[1];

        if (!ptmpinfo->hasinitpointcloud) {
          ptmpinfo->hasinitpointcloud = true;
          ptmpinfo->Xs = new Mat(depthheader->width * depthheader->height, 1, CV_32FC1);
          ptmpinfo->Ys = new Mat(depthheader->width * depthheader->height, 1, CV_32FC1);
          int i = 0;
          float *mpX = (float *)ptmpinfo->Xs->data;
          float *mpY = (float *)ptmpinfo->Ys->data;
          for (int row = 0; row < depthheader->height; row++) {
            for (int col = 0; col < depthheader->width; col++) {
              mpX[i] = ((float)col - LEFTCAMERAX) / LEFTCAMERAFX;
              mpY[i] = ((float)row - LEFTCAMERAY) / LEFTCAMERAFY;
              i++;
            }
          }
        }
        pcloud->width = depthheader->width;
        pcloud->height = depthheader->height;
        pcloud->points.resize(depthheader->width * depthheader->height);
        mp_pcloud = Mat(depthheader->width * depthheader->height, 4, CV_32FC1, &pcloud->points[0]);
        mp_pcloudX = mp_pcloud.colRange(0, 1);
        mp_pcloudY = mp_pcloud.colRange(1, 2);
        mp_pcloudZ = mp_pcloud.colRange(2, 3);

        uint16_t *depthdata = (uint16_t *)((uint8_t *)depthheader + sizeof(NVPFM_USB_IMAGE_HEADER));

        Mat depthmat(depthheader->height * depthheader->width, 1, CV_16UC1, depthdata);

        depthmat.convertTo(depthmat, CV_32FC1, 0.001);
        if (ptmpinfo->config.clip_distance > 0.0) {
          float thresh = (float)(ptmpinfo->config.clip_distance);
          cv::threshold(depthmat, mp_pcloudZ, thresh, 65.535, THRESH_TOZERO_INV);
        } else {
          depthmat.copyTo(mp_pcloudZ);
        }

        /*for (int i = 0,k=0; i <(depthheader->width*depthheader->height); i++)
        {
          float Z = float(depthdata[i]) / 1000.0f;

          float *mpZ = (float *)mp_pcloudZ.data;
            mpZ[k] = Z;
            k += 4;

        }*/
        mp_pcloudX = ptmpinfo->Xs->mul(mp_pcloudZ);

        mp_pcloudY = ptmpinfo->Ys->mul(mp_pcloudZ);

        pcl::toROSMsg(*pcloud, pnew_dotcloud);

        pnew_dotcloud.header.frame_id = "camera_depth_optical_frame";
        ros::Time tmptime;
        tmptime.fromNSec(depthheader->timestamp * 1000);
        pnew_dotcloud.header.stamp = tmptime;
        nvpfm_debug_printf("will publish pointcloud!!!\n");
        ptmpinfo->dotcloudpublisher.publish(pnew_dotcloud);
      }
    }
    usleep(33 * 1000);
  }
  free(depthheader);
  return 0;
}
void *depthalignrgbthread(void *param) {
  NVPFM_USB_IMAGE_HEADER *depthheader = (NVPFM_USB_IMAGE_HEADER *)malloc(sizeof(NVPFM_USB_IMAGE_HEADER) + 1280 * 800 * 2);
  uint16_t *depthdata = (uint16_t *)((uint8_t *)depthheader + sizeof(NVPFM_USB_IMAGE_HEADER));
  DEVICEINFO *ptmpinfo = (DEVICEINFO *)param;

  while (ptmpinfo->willrun) {
    if (ptmpinfo->depthalignrgbpublisher.getNumSubscribers() <= 0 && ptmpinfo->depthalignrgbviewpublisher.getNumSubscribers() <= 0) {
      // ROS_INFO("there's no pointcloud subcribers!!!\n");
      sleep(1);
      continue;
    }
    nvpfm_debug_printf("got depthalignrgbpublisher subscriber!!!!!!\n");
    NVPFM_USB_IMAGE_HEADER *p = (NVPFM_USB_IMAGE_HEADER *)SOLO_Read(ptmpinfo->depthaligndataqueue);
    if (p) {
      memcpy(depthheader, p, sizeof(NVPFM_USB_IMAGE_HEADER) + 1280 * 800 * 2);
      SOLO_Read_Over(ptmpinfo->depthaligndataqueue);
      nvpfm_debug_printf("got depth data!!!!!!\n");
      if (ptmpinfo->config.pubdepthalign && ptmpinfo->camparam->size() == 1) {
        s_nvpfm_camera_param theparam = ptmpinfo->camparam->at(0);

        int width = ptmpinfo->rgbwidth;
        int height = ptmpinfo->rgbheight;

        sensor_msgs::Image depthimage;
        depthimage.header.frame_id = "camera_color_optical_frame";
        ros::Time tmptime;
        tmptime.fromNSec(depthheader->timestamp * 1000);
        depthimage.header.stamp = tmptime;
        depthimage.width = width;
        depthimage.height = height;
        depthimage.is_bigendian = 0;
        depthimage.encoding = sensor_msgs::image_encodings::TYPE_16UC1;
        depthimage.step = sizeof(unsigned short) * depthimage.width;
        int data_size = depthimage.step * depthimage.height;
        depthimage.data.resize(data_size);

        ptmpinfo->depthtrans->compute_depth_rgb_align(depthdata, depthheader->width, depthheader->height,
                                                      (uint16_t *)&depthimage.data[0], ptmpinfo->rgbwidth, ptmpinfo->rgbheight,
                                                      NULL, theparam.left_ir_focus, theparam.left_ir_photocenter,
                                                      theparam.color_focus, theparam.color_photocenter, theparam.left2color_matrix);

        if (ptmpinfo->depthalignrgbviewpublisher.getNumSubscribers() > 0) {
          sensor_msgs::Image pseudoimage;
          pseudoimage.header.frame_id = "camera_color_optical_frame";
          ros::Time tmptime;
          tmptime.fromNSec(depthheader->timestamp * 1000);
          pseudoimage.header.stamp = tmptime;
          pseudoimage.width = width;
          pseudoimage.height = height;
          pseudoimage.is_bigendian = 0;
          // pseudoimage.encoding = sensor_msgs::image_encodings::MONO8;
          pseudoimage.encoding = sensor_msgs::image_encodings::RGB8;
          // pseudoimage.step = pseudoimage.width;
          pseudoimage.step = 3 * pseudoimage.width;
          int data_size = pseudoimage.step * pseudoimage.height;
          pseudoimage.data.resize(data_size);
          uint8_t *depthalignpseudo = (uint8_t *)(&pseudoimage.data[0]);
          for (int i = 0; i < (width * height); i++) {
            uint16_t depth = *((uint16_t *)&depthimage.data[0] + i);
            MYRGB *color = g_colortable + depth;

            depthalignpseudo[i * 3] = color->r;
            depthalignpseudo[i * 3 + 1] = color->g;
            depthalignpseudo[i * 3 + 2] = color->b;
          }
          ptmpinfo->depthalignrgbviewpublisher.publish(pseudoimage);
        }

        if (ptmpinfo->depthalignrgbpublisher.getNumSubscribers() > 0) {

          // depthimage.header.seq = depthseq;
          // ptmpinfo->depthseq++;

          ptmpinfo->depthalignrgbpublisher.publish(depthimage);
        }
      }
    }
    usleep(33 * 1000);
  }
  free(depthheader);
  return 0;
}

void depthcallback(void *data, void *userdata) {
  // printf("depthcallback!!!\n");
  DEVICEINFO *ptmpinfo = (DEVICEINFO *)userdata;

  NVPFM_USB_IMAGE_HEADER *depthheader = (NVPFM_USB_IMAGE_HEADER *)((unsigned char *)data + sizeof(NVPTL_USBHeaderDataPacket));
  // countfps(&ptmpinfo->depthcountfps, "depth", depthheader->width, depthheader->height);
  if (ptmpinfo->config.pubdotcloud && ptmpinfo->depthdataqueue == NULL) {
    ptmpinfo->depthdataqueue = Create_Ring_Queue(2, sizeof(NVPFM_USB_IMAGE_HEADER) + 1280 * 800 * 2);
    pthread_create(&ptmpinfo->depththreadid, NULL, pointcloudthread, ptmpinfo);
  }
  if (ptmpinfo->dotcloudpublisher.getNumSubscribers() > 0 && ptmpinfo->config.pubdotcloud && ptmpinfo->camparam->size() == 1) {
    NVPFM_USB_IMAGE_HEADER *p = (NVPFM_USB_IMAGE_HEADER *)SOLO_Write(ptmpinfo->depthdataqueue);
    if (p) {
      memcpy(p, depthheader, sizeof(NVPFM_USB_IMAGE_HEADER) + 1280 * 800 * 2);
      SOLO_Write_Over(ptmpinfo->depthdataqueue);
    }
  }

  if (ptmpinfo->config.pubpseudo && ptmpinfo->depthpseudopublisher.getNumSubscribers() > 0) {
    sensor_msgs::Image pseudoimage;
    int width = depthheader->width;
    int height = depthheader->height;
    pseudoimage.header.frame_id = "camera_depth_optical_frame";
    ros::Time tmptime;
    tmptime.fromNSec(depthheader->timestamp * 1000);
    pseudoimage.header.stamp = tmptime;
    pseudoimage.width = depthheader->width;
    pseudoimage.height = depthheader->height;
    pseudoimage.is_bigendian = 0;
    // pseudoimage.encoding = sensor_msgs::image_encodings::MONO8;
    pseudoimage.encoding = sensor_msgs::image_encodings::RGB8;
    // pseudoimage.step = pseudoimage.width;
    pseudoimage.step = 3 * pseudoimage.width;
    int data_size = pseudoimage.step * pseudoimage.height;
    pseudoimage.data.resize(data_size);

    uint16_t *depthdata = (uint16_t *)((uint8_t *)depthheader + sizeof(NVPFM_USB_IMAGE_HEADER));
    uint8_t *depthpseudo = (uint8_t *)(&pseudoimage.data[0]);
    for (int i = 0; i < (width * height); i++) {
      /*  if (depthdata[i] < DEPTHMIN)
        {
          depthpseudo[i] = 0;
        }
        else if (depthdata[i] > DEPTHMAX)
        {
          depthpseudo[i] = 255;
        }
        else
        {
          depthpseudo[i] = 50 + (uint8_t)((double)(depthdata[i] - DEPTHMIN) * 255.0 / (double)(DEPTHMAX - DEPTHMIN));
        }*/
      uint16_t depth = *(depthdata + i);
      MYRGB *color = g_colortable + depth;

      depthpseudo[i * 3] = color->r;
      depthpseudo[i * 3 + 1] = color->g;
      depthpseudo[i * 3 + 2] = color->b;
    }
    ptmpinfo->depthpseudopublisher.publish(pseudoimage);
  }
  if (ptmpinfo->config.pubdepth && ptmpinfo->depthrawpublisher.getNumSubscribers() > 0) {
    sensor_msgs::Image depthimage;
    int width = depthheader->width;
    int height = depthheader->height;
    depthimage.header.frame_id = "camera_depth_optical_frame";
    ros::Time tmptime;
    tmptime.fromNSec(depthheader->timestamp * 1000);
    depthimage.header.stamp = tmptime;
    depthimage.width = depthheader->width;
    depthimage.height = depthheader->height;
    depthimage.is_bigendian = 0;
    depthimage.encoding = sensor_msgs::image_encodings::TYPE_16UC1;
    depthimage.step = sizeof(unsigned short) * depthimage.width;
    int data_size = depthimage.step * depthimage.height;
    depthimage.data.resize(data_size);

    uint16_t *depthdata = (uint16_t *)((uint8_t *)depthheader + sizeof(NVPFM_USB_IMAGE_HEADER));
    memcpy(&depthimage.data[0], depthdata, data_size);

    // depthimage.header.seq = depthseq;
    // ptmpinfo->depthseq++;

    ptmpinfo->depthrawpublisher.publish(depthimage);
  }
}

static void request_i_frame_callback(const ros::TimerEvent &event) {
  // printf("Request i frame for all devices!\n");
  std::unique_lock<std::mutex> lg(gv->device_mu);
  for (auto iter = gv->g_devicemap.begin(); iter != gv->g_devicemap.end(); ++iter) {
    if (iter->second->chn2_265_enabled) {
      if (iter->second->fm->request_iframe(CHANNEL2) != NVPTL_OK) {
        printf("Request iframe of channel[%d] device[%s] failed!\n", CHANNEL2, iter->second->devicename);
      }
    }
    if (iter->second->chn3_265_enabled) {
      if (iter->second->fm->request_iframe(CHANNEL3) != NVPTL_OK) {
        printf("Request iframe of channel[%d] device[%s] failed!\n", CHANNEL3, iter->second->devicename);
      }
    }
  }
}

// publish
void rgbcallback(void *data, void *userdata) {

  static bool first_time = true;
  NVPTL_USBHeaderDataPacket *tmppack = (NVPTL_USBHeaderDataPacket *)data;
  // hardcoded
  if (tmppack->sub_type != IMAGE_CHANNEL2_ORIGNAL &&
      tmppack->sub_type != IMAGE_CHANNEL2_CALIBRATED) {
    return;
  }
  DEVICEINFO *ptmpinfo = (DEVICEINFO *)userdata;
  int chn = 0;
  bool *en = nullptr;
  if (tmppack->sub_type == IMAGE_CHANNEL2_ORIGNAL || tmppack->sub_type == IMAGE_CHANNEL2_CALIBRATED) {
    // chn = 1 << 2;
    en = &ptmpinfo->chn2_265_enabled;
  } else if (tmppack->sub_type == IMAGE_CHANNEL3_ORIGNAL || tmppack->sub_type == IMAGE_CHANNEL3_CALIBRATED) {
    // chn = 1 << 3;
    en = &ptmpinfo->chn3_265_enabled;
  } else {
    assert(false);
  }

  NVPFM_USB_IMAGE_HEADER *rgbheader = (NVPFM_USB_IMAGE_HEADER *)((unsigned char *)data + sizeof(NVPTL_USBHeaderDataPacket));
  // countfps(&ptmpinfo->rgbcountfps, "rgb", rgbheader->width, rgbheader->height);
  if (ptmpinfo->config.rgbrotatedegree == 90)
    nvpfm::nv12_rotate_90_i420(rgbheader);
  else if (ptmpinfo->config.rgbrotatedegree == 270)
    nvpfm::nv12_rotate_270_i420(rgbheader);
#if 0
  if (!ptmpinfo->config.pubrgb || ptmpinfo->rgbpublisher.getNumSubscribers() == 0)
  {
    return;
  }
#endif
  sensor_msgs::Image new_image;

  new_image.header.frame_id = "camera_color_optical_frame";
  ros::Time tmptime;
  tmptime.fromNSec(rgbheader->timestamp * 1000);
  new_image.header.stamp = tmptime;
  new_image.width = rgbheader->width;
  new_image.height = rgbheader->height;
  new_image.is_bigendian = 0;
  new_image.encoding = sensor_msgs::image_encodings::RGB8;
  new_image.step = 3 * new_image.width;

  std::size_t data_size = new_image.step * new_image.height;
  new_image.data.resize(data_size);

  int width = new_image.width;
  int height = new_image.height;

  unsigned char *tmprgbimgdata = (uint8_t *)rgbheader + sizeof(NVPFM_USB_IMAGE_HEADER);
  int pic_len = tmppack->len - sizeof(NVPFM_USB_IMAGE_HEADER);
  static unsigned char *buffer = NULL;

  unsigned char *in_ptr = reinterpret_cast<unsigned char *>(&new_image.data[0]);

  // if(rgbheader->format )
  if (rgbheader->format == NVPFM_IMAGE_FORMAT::IMAGE_YUV_NV12) {
    *en = false;
    if (first_time) {
      first_time = false;
      printf("Got NV12 Stream!\n");
    }

    if (ptmpinfo->config.rgbrotatedegree == 90 || ptmpinfo->config.rgbrotatedegree == 270)
      yuv420_rgb24_std(width, height, tmprgbimgdata, tmprgbimgdata + width * height, tmprgbimgdata + width * height * 5 / 4, width, width / 2, in_ptr, width * 3, YCBCR_601);
    else {
      // nv12_rgb24_std(width, height, tmprgbimgdata, tmprgbimgdata + width * height, width, width, in_ptr, width * 3, YCBCR_601);
      cv::Mat nv12frame(height * 3 / 2, width, CV_8UC1, tmprgbimgdata);
      cv::Mat rgbframe(height, width, CV_8UC3, in_ptr); // buffer);
      cv::cvtColor(nv12frame, rgbframe, COLOR_YUV2RGB_NV12);
    }
  } else if (rgbheader->format == NVPFM_IMAGE_FORMAT::IMAGE_H265) {
    if (gv->rgbdecode == "decode") {
      *en = true;
      if (first_time) {
        first_time = false;
        printf("Got 265 Stream!\n");
      }
      if (!gv->ctx) {
        printf("Got h265, and decode!\n");
        gv->ctx = new codec_context();
#ifdef SYNC_DECODE
        if (!gv->ctx->create(false))
#else
        if (!gv->ctx->create(true))
#endif
        {
          printf("Create h265 decoder failed!\n");
          delete gv->ctx;
          gv->ctx = nullptr;
          // fatal error.
          exit(-1);
        }
      }
#ifdef SYNC_DECODE
      std::vector<std::vector<uint8_t>> res;
      if (gv->ctx->decode_frames(tmprgbimgdata, pic_len, res) < 0) {
        printf("decode failed!\n");
        return;
      }
      if (!res.empty()) {
        memcpy(in_ptr, res.back().data(), res.back().size());
      }
#else
      if (gv->ctx->commit_packet(tmprgbimgdata, pic_len) < 0) {
        printf("Commit packet failed! device maybe weak.\n");
        return;
      }
      std::vector<uint8_t> frame = std::move(gv->ctx->get_last_decoded_frame());
      if (frame.empty()) {
        // no frame avaliable
        return;
      }
      mempcpy(in_ptr, frame.data(), frame.size());
#endif

// #define REQUEST_IFRAME
#ifdef REQUEST_IFRAME
      struct timeval tv;
      gettimeofday(&tv, NULL);
      uint64_t sec_diff = tv.tv_sec - gv->cur_time.tv_sec;
      uint64_t usec_diff = tv.tv_usec - gv->cur_time.tv_usec;
      usec_diff += sec_diff * 1000000;
      if (usec_diff >= request_period) {
        // channel 2 or channel 3
        printf("=====> request iframe!\n");
        if (NVPTL_OK != ptmpinfo->fm->request_iframe(chn)) {
          printf("Request iframe failed!\n");
        } else {
          printf("Request iframe success!\n");
        }
        gv->cur_time = tv;
      }
#endif
      //    printf("send image!\n");
    } else if (gv->rgbdecode == "raw") { // here we publish raw h.265 data
      feynman_camera::h265_raw raw_image;

      raw_image.header.frame_id = "camera_color_optical_frame";
      ros::Time tmptime;
      tmptime.fromNSec(rgbheader->timestamp * 1000);
      raw_image.header.stamp = tmptime;
      raw_image.width = rgbheader->width;
      raw_image.height = rgbheader->height;

      std::size_t data_size = pic_len;
      raw_image.h265data.resize(data_size);
      unsigned char *in_ptr = reinterpret_cast<unsigned char *>(&raw_image.h265data[0]);
      memcpy(in_ptr, tmprgbimgdata, data_size);

      ptmpinfo->h265publisher.publish(raw_image);
      return;
    }
  } else {
    printf("Bad format %d\n", rgbheader->format);
    return;
  }

  ptmpinfo->rgbpublisher.publish(new_image);
}

void leftircallback(void *data, void *userdata) {
  NVPTL_USBHeaderDataPacket *tmppack = (NVPTL_USBHeaderDataPacket *)data;
  if (tmppack->sub_type != IMAGE_CHANNEL0_ORIGNAL &&
      tmppack->sub_type != IMAGE_CHANNEL0_CALIBRATED) {
    return;
  }
  // printf("leftircallback!!!\n");
  DEVICEINFO *ptmpinfo = (DEVICEINFO *)userdata;
  // ROS_INFO("Got leftir frame!\n");

  NVPFM_USB_IMAGE_HEADER *leftirheader = (NVPFM_USB_IMAGE_HEADER *)((unsigned char *)data + sizeof(NVPTL_USBHeaderDataPacket));
  // countfps(&ptmpinfo->leftircountfps, "leftif", leftirheader->width, leftirheader->height);
  if (leftirheader->rotate == 90 && leftirheader->format == IMAGE_YUV_NV12) {
    // nvpfm::nv12_rotate_90_i420((NVPTL_USBHeaderDataPacket*)data);
    nvpfm::grayscale_rotate_90(leftirheader);
  } else if (leftirheader->rotate == 270 && leftirheader->format == IMAGE_YUV_NV12) {
    // nvpfm::nv12_rotate_270_i420((NVPTL_USBHeaderDataPacket*)data);
    nvpfm::grayscale_rotate_270(leftirheader);
  }

  /*
    static FILE *fp=NULL;
    if(NULL==fp)
            fp=fopen("/home/yf/FC2/catkin_ws/devel/lib/feynman_camera/leftirtimestamp.csv","a+");
    fprintf(fp,"%llu\n", leftirheader->timestamp);
              fflush(fp);
        fclose(fp);
        fp=NULL;
  */
  if (ptmpinfo->config.pubir && ptmpinfo->rectifyleftpublisher.getNumSubscribers() > 0) {
    sensor_msgs::Image new_image;

    new_image.width = leftirheader->width;
    new_image.height = leftirheader->height;
    new_image.is_bigendian = 0;
    new_image.encoding = sensor_msgs::image_encodings::MONO8;
    new_image.step = new_image.width;

    std::size_t data_size = new_image.step * new_image.height;
    new_image.data.resize(data_size);

    int width = new_image.width;
    int height = new_image.height;

    unsigned char *tmpleftimgdata = (uint8_t *)leftirheader + sizeof(NVPFM_USB_IMAGE_HEADER);

    unsigned char *in_ptr = reinterpret_cast<unsigned char *>(&new_image.data[0]);

    ros::Time tmptime;
    if (ptmpinfo->rectifyleftpublisher.getNumSubscribers() > 0) {
      // if(leftirheader->format==IMAGE_YUV_NV12){
      //  yuv420_rgb24_std(width,height,tmpleftimgdata,tmpleftimgdata+width*height,tmpleftimgdata+width*height*5/4,width,width/2,in_ptr,width*3,YCBCR_601);
      memcpy(in_ptr, tmpleftimgdata, width * height);

      new_image.header.frame_id = "camera_infra1_optical_frame";
      tmptime.fromNSec(leftirheader->timestamp * 1000);
      new_image.header.stamp = tmptime;
      ptmpinfo->rectifyleftpublisher.publish(new_image);
    }
  }
}

void rightircallback(void *data, void *userdata) {
  NVPTL_USBHeaderDataPacket *tmppack = (NVPTL_USBHeaderDataPacket *)data;
  if (tmppack->sub_type != IMAGE_CHANNEL1_ORIGNAL &&
      tmppack->sub_type != IMAGE_CHANNEL1_CALIBRATED) {
    return;
  }
  // printf("rightircallback!!!\n");
  DEVICEINFO *ptmpinfo = (DEVICEINFO *)userdata;
  // ROS_INFO("got right ir frame!\n");
  NVPFM_USB_IMAGE_HEADER *rightirheader = (NVPFM_USB_IMAGE_HEADER *)((unsigned char *)data + sizeof(NVPTL_USBHeaderDataPacket));
  // countfps(&ptmpinfo->rightircountfps, "rightir", rightirheader->width, rightirheader->height);
  if (rightirheader->rotate == 90 && rightirheader->format == IMAGE_YUV_NV12) {
    // nvpfm::nv12_rotate_90_i420((NVPTL_USBHeaderDataPacket*)data);
    nvpfm::grayscale_rotate_90(rightirheader);
  } else if (rightirheader->rotate == 270 && rightirheader->format == IMAGE_YUV_NV12) {
    // nvpfm::nv12_rotate_270_i420((NVPTL_USBHeaderDataPacket*)data);
    nvpfm::grayscale_rotate_270(rightirheader);
  }
  /*
    static FILE *fp=NULL;
    if(NULL==fp)
            fp=fopen("/home/yf/catkin_ws/rightirtimestamp.csv","a+");
    fprintf(fp,"%llu\n", rightirheader->timestamp);
              fflush(fp);
  */
  if (ptmpinfo->config.pubir && ptmpinfo->rectifyrightpublisher.getNumSubscribers() > 0) {
    sensor_msgs::Image new_image;

    new_image.width = rightirheader->width;
    new_image.height = rightirheader->height;
    new_image.is_bigendian = 0;
    new_image.encoding = sensor_msgs::image_encodings::MONO8;
    new_image.step = new_image.width;

    std::size_t data_size = new_image.step * new_image.height;
    new_image.data.resize(data_size);

    int width = new_image.width;
    int height = new_image.height;

    unsigned char *tmprightimgdata = (uint8_t *)rightirheader + sizeof(NVPFM_USB_IMAGE_HEADER);

    unsigned char *in_ptr = reinterpret_cast<unsigned char *>(&new_image.data[0]);

    ros::Time tmptime;
    if (ptmpinfo->rectifyrightpublisher.getNumSubscribers() > 0) {
      // if(rightirheader->format==IMAGE_YUV_NV12){
      //   yuv420_rgb24_std(width,height,tmprightimgdata,tmprightimgdata+width*height,tmprightimgdata+width*height*5/4,width,width/2,in_ptr,width*3,YCBCR_601);
      memcpy(in_ptr, tmprightimgdata, width * height);
      new_image.header.frame_id = "camera_infra2_optical_frame";
      tmptime.fromNSec(rightirheader->timestamp * 1000);
      new_image.header.stamp = tmptime;
      ptmpinfo->rectifyrightpublisher.publish(new_image);
    }
  }
}

void othercallback(void *data, void *userdata) {
  NVPTL_USBHeaderDataPacket *tmppack = (NVPTL_USBHeaderDataPacket *)data;
  if (tmppack->type == NVPFM_CNN_DATA) {
    s_nvpfm_cnn_data tmpcnndata = {0};
    nvpfm::get_cnn_data(tmppack, &tmpcnndata);

    for (int i = 0; i < tmpcnndata.groups; i++) {
      LABELINFO info = nvpfm::get_cnn_label_by_index(tmpcnndata.group[i].label, (EM_CNN_TYPE)tmpcnndata.type);
      printf("############################\n");
      printf("label:%s\nscore:%f\nxmin:%f,ymin:%f,\nxmax:%f,ymax:%f\n",
             info.labelname, tmpcnndata.group[i].score,
             tmpcnndata.group[i].xmin,
             tmpcnndata.group[i].ymin,
             tmpcnndata.group[i].xmax,
             tmpcnndata.group[i].ymax);
      printf("############################\n");
    }
  }
}

void groupcallback(void *depthframe, void *rgbframe, void *leftirframe, void *rightirframe, void *imu, uint16_t mask, void *userdata) {
  // printf("group callback!\n");
  DEVICEINFO *ptmpinfo = (DEVICEINFO *)userdata;

  // grouppkt_info_custom_t *tmpimages = (grouppkt_info_custom_t *)data;

  NVPFM_USB_IMAGE_HEADER *depthheader = (NVPFM_USB_IMAGE_HEADER *)((unsigned char *)depthframe + sizeof(NVPTL_USBHeaderDataPacket));
  NVPFM_USB_IMAGE_HEADER *rgbheader = (NVPFM_USB_IMAGE_HEADER *)((unsigned char *)rgbframe + sizeof(NVPTL_USBHeaderDataPacket));
  NVPFM_USB_IMAGE_HEADER *leftirheader = (NVPFM_USB_IMAGE_HEADER *)((unsigned char *)leftirframe + sizeof(NVPTL_USBHeaderDataPacket));
  NVPFM_USB_IMAGE_HEADER *rightirheader = (NVPFM_USB_IMAGE_HEADER *)((unsigned char *)rightirframe + sizeof(NVPTL_USBHeaderDataPacket));

  if (ptmpinfo->config.pubdotcloud && ptmpinfo->depthdataqueue == NULL) {
    ptmpinfo->depthdataqueue = Create_Ring_Queue(2, sizeof(NVPFM_USB_IMAGE_HEADER) + 1280 * 800 * 2);
    pthread_create(&ptmpinfo->depththreadid, NULL, pointcloudthread, ptmpinfo);
  }
  if (ptmpinfo->dotcloudpublisher.getNumSubscribers() > 0 && ptmpinfo->config.pubdotcloud && ptmpinfo->camparam->size() == 1) {
    NVPFM_USB_IMAGE_HEADER *p = (NVPFM_USB_IMAGE_HEADER *)SOLO_Write(ptmpinfo->depthdataqueue);
    if (p) {
      memcpy(p, depthheader, sizeof(NVPFM_USB_IMAGE_HEADER) + 1280 * 800 * 2);
      SOLO_Write_Over(ptmpinfo->depthdataqueue);
    }
  }

  if (ptmpinfo->config.pubdepthalign && ptmpinfo->depthaligndataqueue == NULL) {
    ptmpinfo->depthaligndataqueue = Create_Ring_Queue(2, sizeof(NVPFM_USB_IMAGE_HEADER) + 1280 * 800 * 2);
    ptmpinfo->rgbwidth = rgbheader->width;
    ptmpinfo->rgbheight = rgbheader->height;
    printf("will ceate dephthalignrgbthread!\n");
    pthread_create(&ptmpinfo->depththreadid, NULL, depthalignrgbthread, ptmpinfo);
  }
  if ((ptmpinfo->depthalignrgbpublisher.getNumSubscribers() > 0 || ptmpinfo->depthalignrgbviewpublisher.getNumSubscribers() > 0) &&
      ptmpinfo->config.pubdepthalign && ptmpinfo->camparam->size() == 1) {
    NVPFM_USB_IMAGE_HEADER *p = (NVPFM_USB_IMAGE_HEADER *)SOLO_Write(ptmpinfo->depthaligndataqueue);
    if (p) {
      memcpy(p, depthheader, sizeof(NVPFM_USB_IMAGE_HEADER) + 1280 * 800 * 2);
      SOLO_Write_Over(ptmpinfo->depthaligndataqueue);
    }
  }

  if (ptmpinfo->config.pubpseudo && ptmpinfo->depthpseudopublisher.getNumSubscribers() > 0) {
    sensor_msgs::Image pseudoimage;
    int width = depthheader->width;
    int height = depthheader->height;
    pseudoimage.header.frame_id = "camera_depth_optical_frame";
    ros::Time tmptime;
    tmptime.fromNSec(depthheader->timestamp * 1000);
    pseudoimage.header.stamp = tmptime;
    pseudoimage.width = depthheader->width;
    pseudoimage.height = depthheader->height;
    pseudoimage.is_bigendian = 0;
    // pseudoimage.encoding = sensor_msgs::image_encodings::MONO8;
    pseudoimage.encoding = sensor_msgs::image_encodings::RGB8;
    // pseudoimage.step = pseudoimage.width;
    pseudoimage.step = 3 * pseudoimage.width;
    int data_size = pseudoimage.step * pseudoimage.height;
    pseudoimage.data.resize(data_size);

    uint16_t *depthdata = (uint16_t *)((uint8_t *)depthheader + sizeof(NVPFM_USB_IMAGE_HEADER));
    uint8_t *depthpseudo = (uint8_t *)(&pseudoimage.data[0]);
    for (int i = 0; i < (width * height); i++) {
      uint16_t depth = *(depthdata + i);
      MYRGB *color = g_colortable + depth;

      depthpseudo[i * 3] = color->r;
      depthpseudo[i * 3 + 1] = color->g;
      depthpseudo[i * 3 + 2] = color->b;
    }
    ptmpinfo->depthpseudopublisher.publish(pseudoimage);
  }
  if (ptmpinfo->config.pubdepth && ptmpinfo->depthrawpublisher.getNumSubscribers() > 0) {
    sensor_msgs::Image depthimage;
    int width = depthheader->width;
    int height = depthheader->height;
    depthimage.header.frame_id = "camera_depth_optical_frame";
    ros::Time tmptime;
    tmptime.fromNSec(depthheader->timestamp * 1000);
    depthimage.header.stamp = tmptime;
    depthimage.width = depthheader->width;
    depthimage.height = depthheader->height;
    depthimage.is_bigendian = 0;
    depthimage.encoding = sensor_msgs::image_encodings::TYPE_16UC1;
    depthimage.step = sizeof(unsigned short) * depthimage.width;
    int data_size = depthimage.step * depthimage.height;
    depthimage.data.resize(data_size);

    uint16_t *depthdata = (uint16_t *)((uint8_t *)depthheader + sizeof(NVPFM_USB_IMAGE_HEADER));
    memcpy(&depthimage.data[0], depthdata, data_size);
    ptmpinfo->depthrawpublisher.publish(depthimage);
  }

  if (ptmpinfo->config.pubrgb && ptmpinfo->rgbpublisher.getNumSubscribers() > 0) {
    sensor_msgs::Image new_image;

    new_image.header.frame_id = "camera_color_optical_frame";
    ros::Time tmptime;
    tmptime.fromNSec(rgbheader->timestamp * 1000);
    new_image.header.stamp = tmptime;
    new_image.width = rgbheader->width;
    new_image.height = rgbheader->height;
    new_image.is_bigendian = 0;
    new_image.encoding = sensor_msgs::image_encodings::RGB8;
    new_image.step = 3 * new_image.width;

    std::size_t data_size = new_image.step * new_image.height;
    new_image.data.resize(data_size);

    int width = new_image.width;
    int height = new_image.height;

    unsigned char *tmprgbimgdata = (uint8_t *)rgbheader + sizeof(NVPFM_USB_IMAGE_HEADER);
    static unsigned char *buffer = NULL;

    if (NULL == buffer)
      buffer = (unsigned char *)malloc(MAXWIDTH * MAXHEIGHT * 3);

    unsigned char *in_ptr = reinterpret_cast<unsigned char *>(&new_image.data[0]);
    //   printf("will memcpy:%d!%d\n", data_size, width * height * 3);
    memcpy(in_ptr, buffer, data_size);

    // nv12_rgb24_std(width, height, tmprgbimgdata, tmprgbimgdata + width * height, width, width, buffer, width * 3, YCBCR_601);
    cv::Mat nv12frame(height * 3 / 2, width, CV_8UC1, tmprgbimgdata);
    cv::Mat rgbframe(height, width, CV_8UC3, in_ptr); // buffer);
    cv::cvtColor(nv12frame, rgbframe, COLOR_YUV2RGB_NV12);
    //   printf("ok convert nv12 to rgb24!\n");

    ptmpinfo->rgbpublisher.publish(new_image);
  }
  if (ptmpinfo->config.pubir && (ptmpinfo->rectifyleftpublisher.getNumSubscribers() > 0 || ptmpinfo->rectifyrightpublisher.getNumSubscribers() > 0)) {
    sensor_msgs::Image new_image;

    new_image.width = leftirheader->width;
    new_image.height = leftirheader->height;
    new_image.is_bigendian = 0;
    new_image.encoding = sensor_msgs::image_encodings::MONO8;
    new_image.step = new_image.width;

    std::size_t data_size = new_image.step * new_image.height;
    new_image.data.resize(data_size);

    int width = new_image.width;
    int height = new_image.height;

    unsigned char *tmpleftimgdata = (uint8_t *)leftirheader + sizeof(NVPFM_USB_IMAGE_HEADER);
    unsigned char *tmprightimgdata = (uint8_t *)rightirheader + sizeof(NVPFM_USB_IMAGE_HEADER);

    unsigned short *in_ptr = reinterpret_cast<unsigned short *>(&new_image.data[0]);

    ros::Time tmptime;
    if (ptmpinfo->rectifyleftpublisher.getNumSubscribers() > 0) {
      memcpy(in_ptr, tmpleftimgdata, width * height);
      new_image.header.frame_id = "camera_infra1_optical_frame";
      tmptime.fromNSec(leftirheader->timestamp * 1000);
      new_image.header.stamp = tmptime;
      ptmpinfo->rectifyleftpublisher.publish(new_image);
    }
    if (ptmpinfo->rectifyrightpublisher.getNumSubscribers() > 0) {
      memcpy(in_ptr, tmprightimgdata, width * height);
      new_image.header.frame_id = "camera_infra2_optical_frame";

      tmptime.fromNSec(rightirheader->timestamp * 1000);
      new_image.header.stamp = tmptime;
      ptmpinfo->rectifyrightpublisher.publish(new_image);
    }
  }
  // blend_rgb_frame(rgbframe, depthframe, userdata);
}

static inline void set_sensor_enabled(DEVICEINFO *info, E_NVPFM_SENSOR_CHANNEL chn, bool enable) {
  s_set_sensor_status sdata;
  sdata.channel = chn;
  sdata.work = enable ? 1 : 0;
  if (info->fm->set_sensor_status(&sdata) != NVPTL_OK) {
    printf("set sensor %d status failed!\n", chn);
  }
}

static void setup_sensor(DEVICEINFO *info, const std::vector<E_NVPFM_SENSOR_CHANNEL> &chns, int fps, bool enable, NVPFM_IMAGE_FORMAT fmt = NVPFM_IMAGE_FORMAT::IMAGE_FORMAT_UNKNOWN) {

  printf("Setup sensors!\n");
  NVPTL_RESULT nr;
  for (const auto &chn : chns) {
    set_sensor_enabled(info, chn, enable);
  }
  if (!enable) {
    printf("sensor is disabled, setup sensor return!\n");
    return;
  }
  s_nvpfm_get_sensor_config_ret gs;
  nr = info->fm->get_sensorcfg(&gs);
  if (nr != NVPTL_OK) {
    printf("get sensor config failed! setup sensor return!\n");
    return;
  }
  bool need_set = false;
  for (const auto &s : chns) {
    if (fps != -1 && gs.config.fps[s] != fps) {
      printf("for sensor channel %d, orginal fps %d, set to %d.\n", s, gs.config.fps[s], fps);
      gs.config.fps[s] = fps;
      need_set = true;
    }
    if (fmt != NVPFM_IMAGE_FORMAT::IMAGE_FORMAT_UNKNOWN && gs.config.format[s] != fmt) {
      need_set = true;
      printf("for sensor channel %d, orginal fmt %d, set to %d.\n", s, gs.config.format[s], fmt);
      gs.config.format[s] = fmt;
    }
  }
  if (!need_set) {
    return;
  } else {
    printf("sensor config don't need to be changed.\n");
  }
  nr = info->fm->set_sensorcfg(&gs.config);
  if (nr != NVPTL_OK) {
    printf("set sensor config failed!\n");
  }
  printf("Setup sensors finished!\n");
}

static void setup_stream(DEVICEINFO *info, const std::vector<NVPFM_IMAGE_TYPE> &chns, int fps, bool enable) {
  NVPTL_RESULT nr;
  s_get_transfer_config_ret gs;
  s_set_transfer_config ss;
  printf("Setup streams!\n");
  nr = info->fm->get_transfer_config(&gs);
  if (nr != NVPTL_OK) {
    printf("get transfer config failed, setup stream return!\n");
    return;
  }
  ss.config = gs.config;
  for (const auto &chn : chns) {
    if (!enable) {
      ss.config.image_transfer_enable[chn] = 0;
      continue;
    }
    ss.config.image_transfer_enable[chn] = 1;
    if (fps != -1 && ss.config.image_transfer_frame_fps[chn] != fps) {
      printf("for stream channel %d, orginal fps %d, set to %d.\n", chn, ss.config.image_transfer_frame_fps[chn], fps);
      ss.config.image_transfer_frame_fps[chn] = fps;
    } else {
      printf("for stream channel %d, orginal fps %d, don't need to set.\n", chn, ss.config.image_transfer_frame_fps[chn]);
    }
  }
  nr = info->fm->set_transfer_config(&ss);
  if (nr != NVPTL_OK) {
    printf("set transfer config failed !\n");
  }
  printf("Setup streams finished!\n");
}

#define LOG_BEGIN() printf("%s begin!\n", __FUNCTION__)
#define LOG_END() printf("%s finished!\n", __FUNCTION__)

static void setup_ir(DEVICEINFO *info) {
  LOG_BEGIN();
  // setup sensor
  auto it = gv->g_stream_and_sensor_configs.find("ir_sensor_fps");
  if (it == gv->g_stream_and_sensor_configs.end()) {
    assert(false);
  }
  setup_sensor(info, {CHANNEL0, CHANNEL1}, gv->g_stream_and_sensor_configs["ir_sensor_fps"], info->config.pubir);
  setup_stream(info, {IMAGE_CHANNEL0_ORIGNAL, IMAGE_CHANNEL1_ORIGNAL}, gv->g_stream_and_sensor_configs["ir_stream_fps"], info->config.pubir);
  LOG_END();
}

static void setup_rgb(DEVICEINFO *info) {
  LOG_BEGIN();
  setup_sensor(info, {CHANNEL2}, gv->g_stream_and_sensor_configs["rgb_sensor_fps"], info->config.pubrgb, (NVPFM_IMAGE_FORMAT)(gv->g_stream_and_sensor_configs["rgb_format"]));
  setup_stream(info, {IMAGE_CHANNEL2_ORIGNAL}, gv->g_stream_and_sensor_configs["rgb_stream_fps"], info->config.pubrgb);
  LOG_END();
}

static void setup_depth(DEVICEINFO *info) {
  LOG_BEGIN();
  setup_stream(info, {IMAGE_DEPTH0}, gv->g_stream_and_sensor_configs["depth_stream_fps"], info->config.pubdepth);
  LOG_END();
}

void init(DEVICEINFO *info) {
  ////////////////////////////////////////////////////////////////////////////////////////
  info->hasinitswitch = false;
  info->isfirsttime = true;
  info->hassetedge = false;
  info->hassetconfidence = false;
  /////////////////////////////////////////////////////////////////////////////

  if (info->fm->isplugged()) {
    printf("will connect usb feynman\n");
    ////////////////////here we config resolution and fps and start stream with nvpfm_start
    while (info->willrun) {
      if (NVPTL_OK == info->fm->get_devinfo(&info->devinfo)) {
        createpublisher(info->config.node_obj, info);
        break;
      }
    }

    if (!info->willrun)
      return;
    ROS_INFO("will set timesync cycle!\n");

    info->fm->set_timesynccycle(true, 5);

    if (!info->willrun)
      return;
    // sleep(5);
    setup_ir(info);
    setup_rgb(info);
    setup_depth(info);
    info->fm->start_other(othercallback);
    if (info->config.group) {
      printf("Group message is enabled!\n");
      uint16_t mask = 0;
      mask |= (0x0001 << NVPFM_STREAM_DEPTH0);
      mask |= (0x0001 << NVPFM_STREAM_DEPTH0_LEFTIR);
      mask |= (0x0001 << NVPFM_STREAM_DEPTH0_RIGHTIR);
      mask |= (0x0001 << NVPFM_STREAM_RGB0);
      //  mask |= (0x0001 << NVPFM_STREAM_IMU0);
      info->fm->start_groupimages(groupcallback, mask);
    } else {
#if 0
      if (info->config.pubir)
      {
        s_get_transfer_config_ret data;
        info->fm->get_transfer_config(&data);
        data.config.image_transfer_enable[0] = 1;
        data.config.image_transfer_enable[1] = 1;

        s_set_transfer_config setdata;
        setdata.config = data.config;
        info->fm->set_transfer_config(&setdata);
      }
      else
      {
        s_get_transfer_config_ret data;
        info->fm->get_transfer_config(&data);
        data.config.image_transfer_enable[0] = 0;
        data.config.image_transfer_enable[1] = 0;
        s_set_transfer_config setdata;
        setdata.config = data.config;
        info->fm->set_transfer_config(&setdata);

        s_set_sensor_status sdata;
        sdata.channel = CHANNEL0;
        sdata.work = 0;
        info->fm->set_sensor_status(&sdata);
        sdata.channel = CHANNEL1;
        sdata.work = 0;
        info->fm->set_sensor_status(&sdata);
      }
#endif

      if (info->config.pubir)
        info->fm->start_leftir(leftircallback);
      if (info->config.pubir)
        info->fm->start_rightir(rightircallback);
      if (info->config.pubdepth) {
        printf("will start depth...\n");
        info->fm->start_depth(depthcallback);
      }
      if (info->config.pubrgb)
        info->fm->start_rgb(rgbcallback);
#if 0
      else
      {
        s_set_sensor_status sdata;
        sdata.channel = CHANNEL2;
        sdata.work = 0;
        info->fm->set_sensor_status(&sdata);
      }
#endif
    }
    if (info->config.pubimu)
      info->fm->start_imu(imucallback);

    if (!info->willrun)
      return;
  }
}

void mergedevices(NVPTL_DEVICE_INFO **pps_device, NVPTL_DEVICE_INFO *tmpdevice) {
  // 测试已存在的device是否没有枚举到
  {
    NVPTL_DEVICE_INFO *s_device = *pps_device;
    NVPTL_DEVICE_INFO *s_tmp = s_device;
    while (s_tmp != NULL) {
      bool exist = false;
      NVPTL_DEVICE_INFO *newtmp = tmpdevice;
      while (newtmp != NULL) {
        if (0 == strcmp(newtmp->usb_camera_name, s_tmp->usb_camera_name)) {
          exist = true;
          break;
        }
        newtmp = newtmp->next;
      }
      if (!exist) { // 已存在的device是否没有枚举到，计数加1
        s_tmp->notexists++;
        nvpfm_info_printf("device:%s not enumerated this time,increase to %d!!!\n", s_tmp->usb_camera_name, s_tmp->notexists);
      } else { // 已存在的device没有枚举到的次数计数清0
        s_tmp->notexists = 0;
      }
      s_tmp = s_tmp->next;
    }
  }
  // 测试新枚举到的是否已经存在
  {
    NVPTL_DEVICE_INFO *newtmp = tmpdevice;
    while (newtmp != NULL) {
      NVPTL_DEVICE_INFO *s_tmp = *pps_device;
      bool exist = false;
      while (s_tmp != NULL) {
        if (0 == strcmp(newtmp->usb_camera_name, s_tmp->usb_camera_name)) {
          exist = true;
          break;
        }
        s_tmp = s_tmp->next;
      }
      if (!exist) { // 新枚举到的设备不存在列表中，加入列表
        NVPTL_DEVICE_INFO *tmp = (NVPTL_DEVICE_INFO *)calloc(1, sizeof(NVPTL_DEVICE_INFO));
        memcpy(tmp, newtmp, sizeof(NVPTL_DEVICE_INFO));
        tmp->next = *pps_device;
        *pps_device = tmp;

        nvpfm_info_printf("device:%s is new,add to list!!!\n", tmp->usb_camera_name);
      }
      newtmp = newtmp->next;
    }
  }
  // 干掉列表中枚举不到计数超过2的节点
  {
    NVPTL_DEVICE_INFO *s_tmp = *pps_device;
    NVPTL_DEVICE_INFO *prev = NULL;
    while (s_tmp != NULL) {
      if (s_tmp->notexists >= 2) { // 连续超过2次没有枚举到，干掉
        nvpfm_info_printf("device:%s not enumerated more than 2:%d!!!will delete\n",
                          s_tmp->usb_camera_name, s_tmp->notexists);
        if (prev == NULL) {
          *pps_device = s_tmp->next;
          free(s_tmp);
          s_tmp = *pps_device;
          continue;
        } else {
          prev->next = s_tmp->next;
          free(s_tmp);
          s_tmp = prev;
        }
      }
      s_tmp = s_tmp->next;
    }
  }
}

MUTEXHANDLE g_enummutexhandle = CREATEMUTEX();
void enum_feynman_callback(int total, NVPTL_DEVICE_INFO *pinfo, void *userdata) {
  NVPTL_DEVICE_INFO **ppdevice = (NVPTL_DEVICE_INFO **)userdata;
  MUTEXLOCK(g_enummutexhandle);
  mergedevices(ppdevice, pinfo);
  MUTEXUNLOCK(g_enummutexhandle);
  nvptl_freedevices(pinfo);
}
#if 0
std::string getlabelbyport(std::string port)
{
  std::map<std::string, std::string>::iterator it;
  for (it = g_bindports.begin(); it != g_bindports.end(); it++)
  {
    if (port == it->first)
    {
      return it->second;
    }
  }
  return "default";
}
#endif
// todo update
bool isbinding(const char *port) {
  if (gv->g_bindports.empty())
    return true;

  if (std::find_if(gv->g_bindports.begin(), gv->g_bindports.end(), [&port](auto &pair) {
      if((port == "feynman-" + pair.first)||(port==pair.first)){
        return true;
      }
      return false; }) == gv->g_bindports.end()) {
    return false;
  }
  return true;
}

void *enumthread(void *param) { // here,we enum feynman and create corresponding devicethread to open and recv data from it
  COMMONCONFIG *configinfo = (COMMONCONFIG *)param;
  while (true) {

    int total = 0;
    NVPTL_DEVICE_INFO *g_pdevice = NULL;
    uint8_t mask = ENUMUSB;
    if (configinfo->enumnet) {
      mask |= ENUMNET;
    }
    if (NVPTL_OK == nvptl_enum_sync_mask(&total, &g_pdevice, mask)) {
      if (total > 0) {
        std::unique_lock<std::mutex> lg(gv->device_mu);
        NVPTL_DEVICE_INFO *tmpdevice = g_pdevice;
        for (int i = 0; i < total; i++) {
          // ROS_INFO("feynman:%s", tmpdevice->usb_camera_name);
          //  if not connected
          if (gv->g_devicemap.find(tmpdevice->usb_camera_name) == gv->g_devicemap.end() && isbinding(tmpdevice->usb_camera_name)) { // 没有找到，新的相机连接
            ROS_INFO("no feynman:%s in map,so create new camera instance", tmpdevice->usb_camera_name);
            DEVICEINFO *info = (DEVICEINFO *)calloc(1, sizeof(DEVICEINFO));

            info->rawdevinfo = *tmpdevice;

            // std::string foo = devid_to_topicid(info->rawdevinfo.usb_camera_name);
            strcpy(info->topic_name, devid_to_topicid(info->rawdevinfo.usb_camera_name).c_str());
            info->camparam = new std::vector<s_nvpfm_camera_param>;
            info->tf_broadcaster = new tf2_ros::StaticTransformBroadcaster;

            info->config = *configinfo;

            void *devicethread(void *param);
            info->willrun = true;
            ROS_INFO("Create device thread!\n");
            pthread_create(&info->threadid, NULL, devicethread, info);
            gv->g_devicemap[tmpdevice->usb_camera_name] = info;
          }
          tmpdevice = tmpdevice->next;
        }
      }
      nvptl_freedevices(g_pdevice);
    }
    sleep(1);
  }
  return 0;
}

static void release_publisher(DEVICEINFO *info) {
  info->leftircamerainfopublisher.shutdown();
  info->rightircamerainfopublisher.shutdown();
  info->rgbcamerainfopublisher.shutdown();
  info->depthrawpublisher.shutdown();
  info->depthrawleftpublisher.shutdown();
  info->depthrawrightpublisher.shutdown();
  info->temperaturepublisher.shutdown();
  info->dotcloudpublisher.shutdown();
  info->depthcamerainfopublisher.shutdown();
  info->rgbpublisher.shutdown();
  info->rgbrawpublisher.shutdown();
  info->sensorrawleftpublisher.shutdown();
  info->sensorrawrightpublisher.shutdown();
  info->rectifyleftpublisher.shutdown();
  info->rectifyrightpublisher.shutdown();
  info->cnnpublisher.shutdown();
  info->logpublisher.shutdown();
  info->imupublisher_single.shutdown();
  info->depthalignrgbpublisher.shutdown();
  info->depthalignrgbviewpublisher.shutdown();
  info->depthpseudopublisher.shutdown();
  info->lkpublisher.shutdown();

  // service shutdown
  if (info->config.getimuinternalref) {
    info->getimuinternalrefservice.shutdown();
  }
  if (info->config.getimuexternalref) {
    info->getimuexternalrefservice.shutdown();
  }
  info->setprojectorservice.shutdown();
  if (info->config.getsnservice) {
    info->getsnservice.shutdown();
  }
  if (info->config.getleftirparamservice) {
    info->getleftirparamservice.shutdown();
  }
  if (info->config.getrgbparamservice) {
    info->getrgbparamservice.shutdown();
  }
  if (info->config.get_sensor_exposure) {
    info->get_exposure_srv.shutdown();
  }
  if (info->config.set_sensor_exposure_manaul) {
    info->set_exposure_manual_srv.shutdown();
  }
  if (info->config.set_sensor_exposure_auto) {
    info->set_exposure_auto_srv.shutdown();
  }
}

void *releasethread(void *userdata) {
  DEVICEINFO *info = (DEVICEINFO *)userdata;

  info->willrun = false;
  ROS_INFO("in release thread,join threadid!");
  pthread_join(info->threadid, NULL);
  ROS_INFO("join depththreadid!");
  pthread_join(info->depththreadid, NULL);
  ROS_INFO("join saveimuthreadid!");
  pthread_join(info->saveimudatathreadid, NULL);

  ROS_INFO("delete nvpfm!");
  ////////////////////////////delete instance
  delete info->fm;
  if (info->depthtrans != NULL)
    delete info->depthtrans;
  release_publisher(info);
  ROS_INFO("erase instance in map:%s!", info->rawdevinfo.usb_camera_name);
  //////////////////////////////remove from map
  {
    std::unique_lock<std::mutex> lg(gv->device_mu);
    gv->g_devicemap.erase(info->rawdevinfo.usb_camera_name);
  }
  if (info->depthdataqueue != NULL) {
    ROS_INFO("destory depthdataqueue ring!");
    Destroy_Ring_Queue(info->depthdataqueue);
  }
  if (info->depthaligndataqueue != NULL) {
    ROS_INFO("destory depthaligndataqueue ring!");
    Destroy_Ring_Queue(info->depthaligndataqueue);
  }
  if (info->imudataqueue != NULL) {
    ROS_INFO("destroy imudata queue ring!");
    Destroy_Ring_Queue(info->imudataqueue);
  }

  ROS_INFO("delete camparam!");
  delete info->camparam;
  ROS_INFO("delete tf_broadcaster!");
  delete info->tf_broadcaster;
  ROS_INFO("free info!\n");
  if (info->Xs != NULL)
    delete info->Xs;
  if (info->Ys != NULL)
    delete info->Ys;
  free(info);
  return 0;
}

void eventcallback(EVENTREASON reason, void *userdata) {
  DEVICEINFO *info = (DEVICEINFO *)userdata;

  if (reason == INFORMREBOOT) {
    printf("will send reboot to camera!\n");
    info->fm->send_reboot();
    printf("after send reboot to camera!\n");
    return;
  } else {
    printf("plugout!\n");
  }

  pthread_t releasethreadid;
  pthread_create(&releasethreadid, NULL, releasethread, info);
  pthread_detach(releasethreadid);
}
typedef struct
{
  NVPFM_IMAGE_SIZE res;
  const char *resstr;
  int width;
  int height;
} RESINFO;
static RESINFO resmapper[] = {
    {IMAGE_1280_800, "1280x800", 1280, 800},
    {IMAGE_1280_720, "1280x720", 1280, 720},
    {IMAGE_640_480, "640x480", 640, 480},
    {IMAGE_640_400, "640x400", 640, 400},
    {IMAGE_320_200, "320x200", 320, 200},
    {IMAGE_640_360, "640x360", 640, 360},
    {IMAGE_320_240, "320x240", 320, 240},
    {IMAGE_960_600, "960x600", 960, 600},
    {IMAGE_480_300, "480x300", 480, 300},
    {IMAGE_1600_1200, "1600x1200", 1600, 1200},
    {IMAGE_1280_1080, "1280x1080", 1280, 1080},
    {IMAGE_1280_960, "1280x960", 1280, 960},
    {IMAGE_800_600, "800x600", 800, 600},
    {IMAGE_848_480, "848x480", 848, 480},
    {IMAGE_768_480, "768x480", 768, 480},
    {IMAGE_1280_480, "1280x480", 1280, 480},
    {IMAGE_1920_1080, "1920x1080", 1920, 1080},
    {IMAGE_960_1280, "960x1280", 960, 1280},
    {IMAGE_480_640, "480x640", 480, 640},
    {IMAGE_UNKNOWN, "?x?", -1, -1},
};

void *devicethread(void *param) {
  DEVICEINFO *info = (DEVICEINFO *)param;

  if (info->fm == NULL) {
    ROS_INFO("will new nvpfm!\n");
    info->fm = new nvpfm(&info->rawdevinfo, eventcallback, info);
  }

  // info->fm->set_global_time(true);

  if (info->willrun) {
    ROS_INFO("will init nvpfm!\n");
    init(info);
  }
  while (info->willrun) {
    s_nvpfm_get_sensor_config_ret mycfg;
    if (NVPTL_OK == info->fm->get_sensorcfg(&mycfg)) {
      if (mycfg.ret == 0) {
        // printf("got sensor cfg\nir:%s %d\nrgb:%s %d\n",resmapper[mycfg.config.isp_frame_size[0]].resstr,mycfg.config.fps[0],resmapper[mycfg.config.isp_frame_size[2]].resstr,mycfg.config.fps[2]);
        if (info->config.pubrgb) {
          if (mycfg.config.isp_frame_size[0] != info->config.resolution ||
              mycfg.config.isp_frame_size[2] != info->config.rgbresolution ||
              mycfg.config.fps[0] != info->config.fps) {
            printf("res or fps not as launch config,will test if can change!\n");
            if (IMAGE_UNKNOWN != nvpfm::findsensorresbyisp(info->devinfo, info->config.resolution, CHANNEL0) &&
                IMAGE_UNKNOWN != nvpfm::findsensorresbyisp(info->devinfo, info->config.rgbresolution, CHANNEL2)) {
              mycfg.config.isp_frame_size[0] = info->config.resolution;
              mycfg.config.isp_frame_size[1] = info->config.resolution;
              mycfg.config.isp_frame_size[2] = info->config.rgbresolution;

              mycfg.config.frame_size[0] = nvpfm::findsensorresbyisp(info->devinfo, info->config.resolution, CHANNEL0);
              mycfg.config.frame_size[1] = nvpfm::findsensorresbyisp(info->devinfo, info->config.resolution, CHANNEL1);
              mycfg.config.frame_size[2] = nvpfm::findsensorresbyisp(info->devinfo, info->config.rgbresolution, CHANNEL2);

              mycfg.config.fps[0] = info->config.fps;
              mycfg.config.fps[1] = info->config.fps;
              mycfg.config.fps[2] = info->config.fps;

              info->fm->set_sensorcfg(&mycfg.config);
              printf("ok to change res to:%s->%s,%d\n",
                     nvpfm::framesize2str(mycfg.config.frame_size[0]),
                     nvpfm::framesize2str(mycfg.config.isp_frame_size[0]), info->config.fps);
            } else {
              std::string res_desc = get_res_desc(info->config.resolution);
              std::string res_rgb_desc = get_res_desc(info->config.rgbresolution);
              printf("launch config resolution res[%s] and res_rgb[%s]  are not supported!\n", res_desc.c_str(), res_rgb_desc.c_str());
            }
          } else {
            // printf("ok,res and fps is as launch set!\n");
          }
        } else {
          if (mycfg.config.isp_frame_size[0] != info->config.resolution ||
              mycfg.config.fps[0] != info->config.fps) {
            printf("res or fps not as launch config,will test if can change!\n");
            if (IMAGE_UNKNOWN != nvpfm::findsensorresbyisp(info->devinfo, info->config.resolution, CHANNEL0)) {
              mycfg.config.isp_frame_size[0] = info->config.resolution;
              mycfg.config.isp_frame_size[1] = info->config.resolution;

              mycfg.config.frame_size[0] = nvpfm::findsensorresbyisp(info->devinfo, info->config.resolution, CHANNEL0);
              mycfg.config.frame_size[1] = nvpfm::findsensorresbyisp(info->devinfo, info->config.resolution, CHANNEL1);

              mycfg.config.fps[0] = info->config.fps;
              mycfg.config.fps[1] = info->config.fps;

              info->fm->set_sensorcfg(&mycfg.config);
              printf("ok to change res to:%s->%s,%d\n",
                     nvpfm::framesize2str(mycfg.config.frame_size[0]),
                     nvpfm::framesize2str(mycfg.config.isp_frame_size[0]), info->config.fps);
            } else {
              std::string res_desc = get_res_desc(info->config.resolution);
              printf("launch config resolution res[%s] are not supported!\n", res_desc.c_str());
            }
          } else {
            // printf("ok,res and fps is as launch set!\n");
          }
        }
      }
    }

    while (info->willrun && info->camparam->size() == 0) {
      s_nvpfm_camera_param param;
      if (NVPTL_OK == info->fm->get_camera_param(&param)) {
        printf("got camera param!!!!\n");
        info->camparam->push_back(param);
        break;
      } else {
        printf("fail to get camera param!\n");
        sleep(1);
      }
    }

    sleep(1);
    if (info->camparam->size() > 0) {
      // ROS_INFO("publish tf of camera:%s",info->rawdevinfo.usb_camera_name);
      geometry_msgs::TransformStamped t1;
      t1.header.seq = 100;
      t1.header.stamp = ros::Time::now();
      t1.header.frame_id = "camera_link";
      t1.child_frame_id = "camera_depth_frame";
      t1.transform.translation.x = 0;
      t1.transform.translation.y = 0;
      t1.transform.translation.z = 0;

      t1.transform.rotation.x = 0;
      t1.transform.rotation.y = 0;
      t1.transform.rotation.z = 0;
      t1.transform.rotation.w = 1;

      info->tf_broadcaster->sendTransform(t1);

      // from camera_depth_frame to optical frame,rotate z--->-90 degree and x---->-90 degree

      geometry_msgs::TransformStamped t2;
      t2.header.seq = 101;
      t2.header.stamp = ros::Time::now();
      t2.header.frame_id = "camera_depth_frame";
      t2.child_frame_id = "camera_depth_optical_frame";
      t2.transform.translation.x = 0;
      t2.transform.translation.y = 0;
      t2.transform.translation.z = 0;

      t2.transform.rotation.x = -0.5;
      t2.transform.rotation.y = 0.5;
      t2.transform.rotation.z = -0.5;
      t2.transform.rotation.w = 0.5;

      info->tf_broadcaster->sendTransform(t2);

      // from camera_link to infera1,also equal.....
      geometry_msgs::TransformStamped t3;
      t3.header.seq = 102;
      t3.header.stamp = ros::Time::now();
      t3.header.frame_id = "camera_link";
      t3.child_frame_id = "camera_infra1_frame";
      t3.transform.translation.x = 0;
      t3.transform.translation.y = 0;
      t3.transform.translation.z = 0;

      t3.transform.rotation.x = 0;
      t3.transform.rotation.y = 0;
      t3.transform.rotation.z = 0;
      t3.transform.rotation.w = 1;

      info->tf_broadcaster->sendTransform(t3);

      geometry_msgs::TransformStamped t4;
      t4.header.seq = 103;
      t4.header.stamp = ros::Time::now();
      t4.header.frame_id = "camera_infra1_frame";
      t4.child_frame_id = "camera_infra1_optical_frame";
      t4.transform.translation.x = 0;
      t4.transform.translation.y = 0;
      t4.transform.translation.z = 0;

      t4.transform.rotation.x = -0.5;
      t4.transform.rotation.y = 0.5;
      t4.transform.rotation.z = -0.5;
      t4.transform.rotation.w = 0.5;

      info->tf_broadcaster->sendTransform(t4);

      s_nvpfm_camera_param tmpparam = info->camparam->at(0);
      /*nvpfm_info_printf("left fx:%f,fy:%f,px:%f,py:%f\n"
          "right fx:%f,fy:%f,px:%f,py:%f\n",
          tmpparam.left_ir_focus[0],
          tmpparam.left_ir_focus[1],
          tmpparam.left_ir_photocenter[0],
          tmpparam.left_ir_photocenter[1],
          tmpparam.right_ir_focus[0],
          tmpparam.right_ir_focus[1],
          tmpparam.right_ir_photocenter[0],
          tmpparam.right_ir_photocenter[1]);*/
      sensor_msgs::CameraInfo caminfo;
      auto pair = get_res_pair(info->config.rgbresolution);
      caminfo.width = pair.first;
      caminfo.height = pair.second;
      caminfo.distortion_model = sensor_msgs::distortion_models::PLUMB_BOB;
      caminfo.D.resize(5, 0.0);
      caminfo.D[0] = 0;
      caminfo.D[1] = 0;
      caminfo.D[2] = 0;
      caminfo.D[3] = 0;
      caminfo.D[4] = 0;

      caminfo.K.assign(0.0);
      caminfo.K[0] = tmpparam.color_focus[0];
      caminfo.K[2] = tmpparam.color_photocenter[0];
      caminfo.K[4] = tmpparam.color_focus[1];
      caminfo.K[5] = tmpparam.color_photocenter[1];
      caminfo.K[8] = 1.0;

      caminfo.R.assign(0.0);
      // for (int i = 0; i < 9; i++)
      //{
      //   caminfo.R[i] = p.r2l_r[i];
      // }
      caminfo.R[0] = 1;
      caminfo.R[4] = 1;
      caminfo.R[8] = 1;

      caminfo.P.assign(0.0);
      caminfo.P[0] = caminfo.K[0];
      caminfo.P[2] = caminfo.K[2];
      caminfo.P[3] = tmpparam.left2color_matrix[9];
      caminfo.P[5] = caminfo.K[4];
      caminfo.P[6] = caminfo.K[5];
      caminfo.P[7] = 0;
      caminfo.P[10] = 1.0;
      caminfo.P[11] = 0;
      // Fill in header
      caminfo.header.stamp = ros::Time::now();
      caminfo.header.frame_id = "camera_color_optical_frame";

      info->rgbcamerainfopublisher.publish(caminfo);
      pair = get_res_pair(info->config.resolution);
      caminfo.width = pair.first;   // getwidthofres(info->config.resolution);
      caminfo.height = pair.second; // getheightofres(info->config.resolution);
      caminfo.distortion_model = sensor_msgs::distortion_models::PLUMB_BOB;
      caminfo.D.resize(5, 0.0);
      caminfo.D[0] = 0;
      caminfo.D[1] = 0;
      caminfo.D[2] = 0;
      caminfo.D[3] = 0;
      caminfo.D[4] = 0;

      caminfo.K.assign(0.0);
      caminfo.K[0] = tmpparam.left_ir_focus[0];
      caminfo.K[2] = tmpparam.left_ir_photocenter[0];
      caminfo.K[4] = tmpparam.left_ir_focus[1];
      caminfo.K[5] = tmpparam.left_ir_photocenter[1];
      caminfo.K[8] = 1.0;

      caminfo.R.assign(0.0);
      // for (int i = 0; i < 9; i++)
      //{
      //   caminfo.R[i] = p.r2l_r[i];
      // }
      caminfo.R[0] = 1;
      caminfo.R[4] = 1;
      caminfo.R[8] = 1;

      caminfo.P.assign(0.0);
      caminfo.P[0] = caminfo.K[0];
      caminfo.P[2] = caminfo.K[2];
      caminfo.P[3] = tmpparam.left2right_matrix[9];
      caminfo.P[5] = caminfo.K[4];
      caminfo.P[6] = caminfo.K[5];
      caminfo.P[7] = 0;
      caminfo.P[10] = 1.0;
      caminfo.P[11] = 0;
      caminfo.header.frame_id = "camera_depth_optical_frame";
      // Fill in header
      caminfo.header.stamp = ros::Time::now();

      info->depthcamerainfopublisher.publish(caminfo);
      pair = get_res_pair(info->config.resolution);
      caminfo.width = pair.first;   // getwidthofres(info->config.resolution);
      caminfo.height = pair.second; // getheightofres(info->config.resolution);
      caminfo.distortion_model = sensor_msgs::distortion_models::PLUMB_BOB;
      caminfo.D.resize(5, 0.0);
      caminfo.D[0] = 0;
      caminfo.D[1] = 0;
      caminfo.D[2] = 0;
      caminfo.D[3] = 0;
      caminfo.D[4] = 0;

      caminfo.K.assign(0.0);
      caminfo.K[0] = tmpparam.right_ir_focus[0];
      caminfo.K[2] = tmpparam.right_ir_photocenter[0];
      caminfo.K[4] = tmpparam.right_ir_focus[1];
      caminfo.K[5] = tmpparam.right_ir_photocenter[1];
      caminfo.K[8] = 1.0;

      caminfo.R.assign(0.0);
      // for (int i = 0; i < 9; i++)
      //{
      //   caminfo.R[i] = p.r2l_r[i];
      // }
      caminfo.R[0] = 1;
      caminfo.R[4] = 1;
      caminfo.R[8] = 1;

      caminfo.P.assign(0.0);
      caminfo.P[0] = caminfo.K[0];
      caminfo.P[2] = caminfo.K[2];
      caminfo.P[3] = tmpparam.left2right_matrix[9];
      caminfo.P[5] = caminfo.K[4];
      caminfo.P[6] = caminfo.K[5];
      caminfo.P[7] = 0;
      caminfo.P[10] = 1.0;
      caminfo.P[11] = 0;
      // Fill in header
      caminfo.header.stamp = ros::Time::now();
      caminfo.header.frame_id = "camera_infra2_optical_frame";

      info->rightircamerainfopublisher.publish(caminfo);
      pair = get_res_pair(info->config.resolution);
      caminfo.width = pair.first;   // getwidthofres(info->config.resolution);
      caminfo.height = pair.second; // getheightofres(info->config.resolution);
      caminfo.distortion_model = sensor_msgs::distortion_models::PLUMB_BOB;
      caminfo.D.resize(5, 0.0);
      caminfo.D[0] = 0;
      caminfo.D[1] = 0;
      caminfo.D[2] = 0;
      caminfo.D[3] = 0;
      caminfo.D[4] = 0;

      caminfo.K.assign(0.0);
      caminfo.K[0] = tmpparam.left_ir_focus[0];
      caminfo.K[2] = tmpparam.left_ir_photocenter[0];
      caminfo.K[4] = tmpparam.left_ir_focus[1];
      caminfo.K[5] = tmpparam.left_ir_photocenter[1];
      caminfo.K[8] = 1.0;

      caminfo.R.assign(0.0);
      // for (int i = 0; i < 9; i++)
      //{
      //   caminfo.R[i] = p.r2l_r[i];
      // }
      caminfo.R[0] = 1;
      caminfo.R[4] = 1;
      caminfo.R[8] = 1;

      caminfo.P.assign(0.0);
      caminfo.P[0] = caminfo.K[0];
      caminfo.P[2] = caminfo.K[2];
      caminfo.P[3] = tmpparam.left2right_matrix[9];
      caminfo.P[5] = caminfo.K[4];
      caminfo.P[6] = caminfo.K[5];
      caminfo.P[7] = 0;
      caminfo.P[10] = 1.0;
      caminfo.P[11] = 0;
      // Fill in header
      caminfo.header.stamp = ros::Time::now();
      caminfo.header.frame_id = "camera_infra1_optical_frame";

      info->leftircamerainfopublisher.publish(caminfo);

      // from left ir to rgb,just use extern param of rgb

      // tmpparam.left2color_matrix[0-8],rotate,9:x offset(baseline),10:yoffset,11:zoffset

      Eigen::Matrix<float, 4, 4> rotation_matrix_rgb;
      rotation_matrix_rgb << tmpparam.left2color_matrix[0],
          tmpparam.left2color_matrix[1],
          tmpparam.left2color_matrix[2],
          tmpparam.left2color_matrix[9],
          tmpparam.left2color_matrix[3],
          tmpparam.left2color_matrix[4],
          tmpparam.left2color_matrix[5],
          tmpparam.left2color_matrix[10],
          tmpparam.left2color_matrix[6],
          tmpparam.left2color_matrix[7],
          tmpparam.left2color_matrix[8],
          tmpparam.left2color_matrix[11],
          0,
          0,
          0,
          1;
      Eigen::Matrix<float, 4, 4> rgb_inverse = rotation_matrix_rgb.inverse();
      Eigen::Matrix3d rgbrotation;
      rgbrotation << rgb_inverse(0, 0),
          rgb_inverse(0, 1),
          rgb_inverse(0, 2),
          rgb_inverse(1, 0),
          rgb_inverse(1, 1),
          rgb_inverse(1, 2),
          rgb_inverse(2, 0),
          rgb_inverse(2, 1),
          rgb_inverse(2, 2);
      Eigen::Quaterniond rotation_rgb(rgbrotation);

      geometry_msgs::TransformStamped t5;
      t5.header.seq = 104;
      t5.header.stamp = ros::Time::now();
      t5.header.frame_id = "camera_depth_optical_frame";
      t5.child_frame_id = "camera_color_optical_frame";
      t5.transform.translation.x = rgb_inverse(0, 3) / 1000.0;
      t5.transform.translation.y = rgb_inverse(1, 3) / 1000.0;
      t5.transform.translation.z = rgb_inverse(2, 3) / 1000.0;

      t5.transform.rotation.x = rotation_rgb.x();
      t5.transform.rotation.y = rotation_rgb.y();
      t5.transform.rotation.z = rotation_rgb.z();
      t5.transform.rotation.w = rotation_rgb.w();

      info->tf_broadcaster->sendTransform(t5);

      // from left ir to rightir,just use extern param of ir

      // tmpparam.left2color_matrix[0-8],rotate,9:x offset(baseline),10:yoffset,11:zoffset
      Eigen::Matrix<float, 4, 4> rotation_matrix_right;
      rotation_matrix_right << tmpparam.left2right_matrix[0],
          tmpparam.left2right_matrix[1],
          tmpparam.left2right_matrix[2],
          tmpparam.left2right_matrix[9],
          tmpparam.left2right_matrix[3],
          tmpparam.left2right_matrix[4],
          tmpparam.left2right_matrix[5],
          tmpparam.left2right_matrix[10],
          tmpparam.left2right_matrix[6],
          tmpparam.left2right_matrix[7],
          tmpparam.left2right_matrix[8],
          tmpparam.left2right_matrix[11],
          0,
          0,
          0,
          1;
      Eigen::Matrix<float, 4, 4> right_inverse = rotation_matrix_right.inverse();
      Eigen::Matrix3d rightrotation;

      rightrotation << right_inverse(0, 0),
          right_inverse(0, 1),
          right_inverse(0, 2),
          right_inverse(1, 0),
          right_inverse(1, 1),
          right_inverse(1, 2),
          right_inverse(2, 0),
          right_inverse(2, 1),
          right_inverse(2, 2);

      Eigen::Quaterniond rotation_right(rightrotation);

      geometry_msgs::TransformStamped t6;
      t6.header.seq = 105;
      t6.header.stamp = ros::Time::now();
      t6.header.frame_id = "camera_depth_optical_frame";
      t6.child_frame_id = "camera_infra2_optical_frame";
      t6.transform.translation.x = right_inverse(0, 3) / 1000.0;
      t6.transform.translation.y = right_inverse(1, 3) / 1000.0;
      t6.transform.translation.z = right_inverse(2, 3) / 1000.0;

      t6.transform.rotation.x = rotation_right.x();
      t6.transform.rotation.y = rotation_right.y();
      t6.transform.rotation.z = rotation_right.z();
      t6.transform.rotation.w = rotation_right.w();
      info->tf_broadcaster->sendTransform(t6);
      sleep(1);
    }
  }
  return 0;
}

// 创建与设备有关的节点和service
// 用户可以从代码中分割出device id，所以像imu以及sensor之类的配置都应该带上device id
void createpublisher(ros::NodeHandle node_obj, DEVICEINFO *info) {
  info->depthtrans = new depthtransformer;
  image_transport::ImageTransport it(node_obj);
  std::string tmpport = info->rawdevinfo.usb_camera_name;
  std::string tmprealport = info->rawdevinfo.usb_camera_name;
  tmprealport = replace_str(tmprealport, "feynman-", "");
  tmpport = replace_str(tmpport, "feynman-", "");
  tmpport = replace_str(tmpport, "-", "_");
  tmpport = replace_str(tmpport, ".", "_");

  //  std::string tmpsn = getlabelbyport(tmpport);
  std::string tmpsn = info->devinfo.sn;
  auto iter = gv->g_bindports.find(tmprealport);
  if (iter != gv->g_bindports.end()) {
    tmpsn = iter->second;
  }
  if (tmpsn == info->devinfo.sn) {
    tmpsn = replace_str(tmpsn, "#", "_");
  }
  memset(info->topic_name, 0x0, sizeof(info->topic_name));
  strcpy(info->topic_name, tmpsn.c_str());
  char tmpparamsstr[256];
  // std::string tmpdevice_id = tmpport;

  if (info->config.getsnservice) {
    sprintf(tmpparamsstr, "/sn/%s", tmpport.c_str());
    info->getsnservice = node_obj.advertiseService(tmpparamsstr, handle_getsn_request);
  }

  if (info->config.getleftirparamservice) {
    sprintf(tmpparamsstr, "/leftirparam/%s", tmpport.c_str());
    info->getleftirparamservice = node_obj.advertiseService(tmpparamsstr, handle_getleftirparam_request);
  }

  if (info->config.getrgbparamservice) {
    sprintf(tmpparamsstr, "/rgbparam/%s", tmpport.c_str());
    info->getrgbparamservice = node_obj.advertiseService(tmpparamsstr, handle_getrgbparam_request);
  }
  /// update
  std::string service_base_path = "/feynman_camera/" + tmpsn;
  auto node_path = [&service_base_path](const std::string &sub_path) {
    return service_base_path + "/" + sub_path;
  };
  if (info->config.getimuinternalref) {
    info->getimuinternalrefservice = node_obj.advertiseService(node_path("getimuinternalref"), handle_getimuinternalref_request);
  }
  if (info->config.getimuexternalref) {
    info->getimuexternalrefservice = node_obj.advertiseService(node_path("getimuexternalref"), handle_getimuexternalref_request);
  }

  info->setprojectorservice = node_obj.advertiseService(node_path("setprojector"), handle_setprojector_request);

#if 1
  // handle_get_sensor_exposure
  if (info->config.get_sensor_exposure) {
    info->get_exposure_srv = node_obj.advertiseService(node_path("get_sensor_exposure"), handle_get_sensor_exposure);
  }
  if (info->config.set_sensor_exposure_manaul) {
    info->set_exposure_manual_srv = node_obj.advertiseService(node_path("set_sensor_exposure_manaul"), handle_set_manual_exposure);
  }
  if (info->config.set_sensor_exposure_auto) {
    info->set_exposure_auto_srv = node_obj.advertiseService(node_path("set_sensor_exposure_auto"), handle_set_auto_exposure);
  }

  if (info->config.getversions) {
    info->getversions = node_obj.advertiseService(node_path("getversions"), handle_getversions_request);
  }
  // 仅创建一次
  if (!gv->global_srv_created.load(std::memory_order::memory_order_acquire)) {
    gv->get_device_list_srv = node_obj.advertiseService("/feynman_camera/get_device_list", handle_get_dev_list);
    gv->global_srv_created.store(std::memory_order::memory_order_release);
  }
#endif

  info->leftircamerainfopublisher = node_obj.advertise<sensor_msgs::CameraInfo>(node_path("leftir/camera_info"), 10);
  info->rightircamerainfopublisher = node_obj.advertise<sensor_msgs::CameraInfo>(node_path("rightir/camera_info"), 10);
  info->rgbcamerainfopublisher = node_obj.advertise<sensor_msgs::CameraInfo>(node_path("rgb/camera_info"), 10);
  info->depthrawpublisher = node_obj.advertise<sensor_msgs::Image>(node_path("depth/image_raw"), 10);
  info->depthalignrgbpublisher = node_obj.advertise<sensor_msgs::Image>(node_path("depthalignrgb/image_raw"), 10);
  info->depthalignrgbviewpublisher = node_obj.advertise<sensor_msgs::Image>(node_path("depthalignrgb/image_color"), 10);
  info->depthpseudopublisher = it.advertise(node_path("depth/image_color"), 10);
  info->depthrawleftpublisher = node_obj.advertise<sensor_msgs::Image>(node_path("leftir/image_rect"), 10);
  info->depthrawrightpublisher = node_obj.advertise<sensor_msgs::Image>(node_path("rightir/image_rect"), 10);
  info->temperaturepublisher = node_obj.advertise<feynman_camera::temp_info>(node_path("temperature"), 10);
  info->imupublisher_single = node_obj.advertise<sensor_msgs::Imu>(node_path("imu_single"), 100);
  info->dotcloudpublisher = node_obj.advertise<sensor_msgs::PointCloud2>(node_path("depth/dotcloud"), 10);
  info->lkpublisher = node_obj.advertise<sensor_msgs::PointCloud2>(node_path("lk"), 10);
  info->depthcamerainfopublisher = node_obj.advertise<sensor_msgs::CameraInfo>(node_path("depth/camera_info"), 10);
  info->rgbpublisher = it.advertise(node_path("rgb/image_rect_color"), 10);
  info->h265publisher = node_obj.advertise<feynman_camera::h265_raw>(node_path("rgb/h265raw"), 10);
  // info->rgb_blend_publisher = it.advertise(node_path("rgb/rgb_depth_align"), 10);
  info->rgbrawpublisher = node_obj.advertise<sensor_msgs::Image>(node_path("rgb/image_color"), 10);
  info->sensorrawleftpublisher = node_obj.advertise<sensor_msgs::Image>(node_path("leftir/image_color"), 10);
  info->sensorrawrightpublisher = node_obj.advertise<sensor_msgs::Image>(node_path("rightir/image_color"), 10);
  info->rectifyleftpublisher = it.advertise(node_path("leftir/image_rect_color"), 10);
  info->rectifyrightpublisher = it.advertise(node_path("rightir/image_rect_color"), 10);
  info->cnnpublisher = node_obj.advertise<feynman_camera::cnn_info>(node_path("cnn_info"), 10);
  info->logpublisher = node_obj.advertise<std_msgs::String>(node_path("cameralog"), 10);
}

template <typename T>
void load_single_config(const std::string &node_path, T &target) {
  int trytimes = 0;
  while (!ros::param::get(node_path, target)) {
    if (trytimes > 10) {
      printf("try 10 times,failed,exit...!\n");
      exit(0);
    }
    trytimes++;
    printf("fail to get node_path:%s from param!\n", node_path.c_str());
    usleep(100 * 1000);
  }
}

template <typename T>
void load_single_config(const std::string &node_path, T &target, bool &query_res) {
  int trytimes = 0;
  while (!ros::param::get(node_path, target)) {
    if (trytimes > 10) {
      printf("try 10 times,failed,break...!\n");
      query_res = false;
      return;
    }
    trytimes++;
    printf("fail to get bindports from param!\n");
    usleep(100 * 1000);
  }
  query_res = true;
}

// sensor and stream

static std::string func_gen_node_path(const std::string &node_name, const std::string &target) {
  return node_name + "/" + target;
};

static void load_ss_int_config(const std::string &node_name, const std::string &key) {
  int v = -1;
  std::string rkey = func_gen_node_path(node_name, key);
  load_single_config(rkey, v);
  printf("key[%s] value %d\n", key.c_str(), v);
  gv->g_stream_and_sensor_configs[key] = v;
}

static void load_config(COMMONCONFIG *config) {
  COMMONCONFIG *info = config;
  std::string node_name = ros::this_node::getName();
  auto func_gen = std::bind(func_gen_node_path, node_name, std::placeholders::_1);
  int fps = 30;
  // std::map<std::string, std::string> g_bindports;
  bool found;
  load_single_config(func_gen("pubrgb"), info->pubrgb);
  load_single_config(func_gen("enumnet"), info->enumnet);
  load_single_config(func_gen("bindports"), gv->g_bindports, found);
  if (!found) {
    printf("Failed to find bindports!\n");
  }
  load_single_config(func_gen("group"), info->group);
  load_single_config(func_gen("pubir"), info->pubir);
  load_single_config(func_gen("pubdepth"), info->pubdepth);
  load_single_config(func_gen("savedata"), info->savedata);
  load_single_config(func_gen("pubdepthalign"), info->pubdepthalign);
  load_single_config(func_gen("pubdotcloud"), info->pubdotcloud);
  load_single_config(func_gen("rgbrotatedegree"), info->rgbrotatedegree);

  load_single_config(func_gen("lightfilter"), info->lightfilter);
  load_single_config(func_gen("highprecision"), info->highprecision);
  load_single_config(func_gen("badfilter"), info->badfilter);
  load_single_config(func_gen("pubpseudo"), info->pubpseudo);
  load_single_config(func_gen("pubgoodfeature"), info->pubgoodfeature);
  load_single_config(func_gen("pubimu"), info->pubimu);
  load_single_config(func_gen("clip_distance"), info->clip_distance);
  std::string resolutionstr, rgbresolutionstr;

  load_single_config(func_gen("resolution"), resolutionstr);
  load_single_config(func_gen("rgbresolution"), rgbresolutionstr);
#if 1
  NVPFM_IMAGE_SIZE size_res, size_rgb_res;
  size_res = nvpfm::parsewxh(resolutionstr.c_str());
  size_rgb_res = nvpfm::parsewxh(rgbresolutionstr.c_str());
  if (size_res == IMAGE_UNKNOWN ||
      size_rgb_res == IMAGE_UNKNOWN) {
    printf("do not support:ir:%s,rgb:%s!\n", resolutionstr.c_str(), rgbresolutionstr.c_str());
    exit(0);
  }
  info->rgbresolution = size_rgb_res;
  info->resolution = size_res;
#endif
  // int fps;
  load_single_config(func_gen("fps"), fps);
  info->fps = fps;
  printf("fps:%d\n", info->fps);
  load_single_config(func_gen("confidence"), info->confidence);

  // services

#if 0
    <param name="getimuinternalref"   
    <param name="getimuexternalref" 
    <param name="get_sensor_exposure" 
    <param name="set_sensor_exposure_manaul" 
    <param name="set_sensor_exposure_auto" 
    <param name="getversions" 
    <param name="get_device_list"
#endif
  load_single_config(func_gen("getimuinternalref"), info->getimuinternalref);
  load_single_config(func_gen("getimuexternalref"), info->getimuexternalref);
  load_single_config(func_gen("get_sensor_exposure"), info->get_sensor_exposure);
  load_single_config(func_gen("set_sensor_exposure_manaul"), info->set_sensor_exposure_manaul);
  load_single_config(func_gen("set_sensor_exposure_auto"), info->set_sensor_exposure_auto);
  load_single_config(func_gen("getversions"), info->getversions);
  load_single_config(func_gen("get_device_list"), info->get_device_list);
  // added service
  load_single_config(func_gen("getsnservice"), info->getsnservice);
  load_single_config(func_gen("getleftirparamservice"), info->getleftirparamservice);
  load_single_config(func_gen("getrgbparamservice"), info->getrgbparamservice);
#if 0
    <!--take effects on both left and right ir sensor-->
    <param name="ir_sensor_fps", value="30">
    <param name="ir_stream_fps", value="15">
    <!--rgb sensor-->
    <param name="rgb_sensor_fps", value="30">
    <param name="rgb_stream_fps", value="30">
    <!--optipns: nv12 h265-->
    <param name="rgb_format", value="h265">
    <!--depth-->
    <param name="depth_stream_fps", value="30">
#endif
  load_ss_int_config(node_name, "ir_sensor_fps");
  load_ss_int_config(node_name, "ir_stream_fps");
  load_ss_int_config(node_name, "rgb_sensor_fps");
  load_ss_int_config(node_name, "rgb_stream_fps");
  load_ss_int_config(node_name, "depth_stream_fps");
  std::string rgb_format;
  std::string rgb_decode;
  load_single_config(func_gen("rgb_format"), rgb_format);
  load_single_config(func_gen("rgb_decode"), rgb_decode);
  if (rgb_format == "h265") {
    gv->g_stream_and_sensor_configs["rgb_format"] = IMAGE_H265;
  } else if (rgb_format == "nv12") {
    gv->g_stream_and_sensor_configs["rgb_format"] = IMAGE_YUV_NV12;
  } else {
    gv->g_stream_and_sensor_configs["rgb_format"] = IMAGE_FORMAT_UNKNOWN;
  }

  gv->rgbdecode = rgb_decode;

  // load_ss_int_config(node_name, "rgb_format");
}

int main(int argc, char *argv[]) {
  // sleep(20);
  // gv = new global_vars();
  printf("Service started!\n");
  std::unique_ptr<global_vars> g(new global_vars);
  gv = g.get();
  // gettimeofday(&gv->cur_time, NULL);
  // gv->rgb_chn_enabled.store(0, std::memory_order::memory_order_relaxed);
  gv->global_srv_created.store(false, std::memory_order::memory_order_release);
  gv->ctx = nullptr;
  nvpfm_init("./feynman.log", 1024 * 1024);
  ROS_INFO("enter main func!");
  ros::init(argc, argv, "feynman_camera");
  ros::NodeHandle node_obj;

  float clip_distance;
  int fps = 30;
  std::string resolutionstr = "";
  std::string rgbresolutionstr = "";
  std::string nodename = ros::this_node::getName();

  COMMONCONFIG *config = (COMMONCONFIG *)calloc(1, sizeof(COMMONCONFIG));
  config->node_obj = node_obj;
  auto timer = node_obj.createTimer(ros::Duration(0.5), request_i_frame_callback);
  load_config(config);
  calculatecolortable();

  printf("will create device thread!\n");

  pthread_t threadid;

  pthread_create(&threadid, NULL, enumthread, config);

  printf("will spin!\n");
  ros::spin();
  printf("will exit normally!\n");
  delete gv->ctx;
  return 0;
}
