﻿/**
 *****************************************************************************
 * @file    realsense.cpp
 * @author  JunChao.Yuan, DongKai.Zhou
 * @version V1.1.3
 * @date    2019.12.18
 * @brief   realsense双目摄像头函数库文件
 *****************************************************************************
 * 可改进的地方:
 */

#include <QDebug>

#include "vision_module/realsense.h"
#include "movement/movement_upper.h"

RealSense *RealSense::instance_ = nullptr;

RealSense *RealSense::GetInstance()
{
  if (instance_ == nullptr) {
    instance_ = new RealSense();
  }
  return instance_;
}

RealSense::RealSense()
{
  depth_info_set_.resize(kHorizontalZoneNum_ * kVerticalZoneNum_);
  SetThreadIndex(3);
  //tr:视觉探测线程2
  SetThreadName(tr("Vision Detect Thread 2"));
  connect(&temp_start_timer_, SIGNAL(timeout()),
          this, SLOT(TempStartTimeout()));
  connect(&m_restart_realsense_timer_, SIGNAL(timeout()),
          this, SLOT(RestartRealsenseTimeout()));
  connect(&m_realsense_receive_timer_, SIGNAL(timeout()),
          this, SLOT(SlotRealsenseReceiveTimeout()));

  connect(this,SIGNAL(SignalRestartRealsense()),this, SLOT(RestartRealsenseTimeout()));

  m_vision_module_power_limited = Movement::GetInstance()->GetVisionModulePowerLimitedFromSettings();
  m_restart_realsense_switch_ = Movement::GetInstance()->GetVisionModuleRestartSwitchFromSettings();
  m_restart_realsense_time_ =  Movement::GetInstance()->GetVisionModuleRestartTimeCycleFromSettings();
  if(!m_restart_realsense_time_)
  {
    m_restart_realsense_time_ = 30;
  }

#if (DCU_SERIAL_TYPE == 1)
  dcu_base_ = DcuBase::GetInstance();
#endif //(DCU_SERIAL_TYPE == 1)

  QString text = "\n\n---------------------------------------------------------"
      "-----------------------------------------------------------------";
  DebugOutput(text);
}

bool RealSense::CheckDeviceStatus()
{
  return m_realsense_work_status;
}

#if (MOVEMENT_PROTOCOL_TYPE == 1)
void RealSense::SetSendDataSerial(MovementSerialUtils *serial_fd)
{
  serial_utils_ = serial_fd;
}
#else //(MOVEMENT_PROTOCOL_TYPE == 0)
void RealSense::SetSendDataSerial(int serial_fd)
{
  dev_handle_ = serial_fd;
}
#endif //(MOVEMENT_PROTOCOL_TYPE == 1)

RealSense::~RealSense()
{
  //CloseDev();
  CloseDeviceOrbbec();

  disconnect(this);
}

void RealSense::SetImageDisplayFlag(bool active)
{
  image_display_flag_ = active;
  if (image_display_flag_) {
    x_sample_val_ = 1;
    y_sample_val_ = 1;
  } else {
    x_sample_val_ = 4;
    y_sample_val_ = 4;
    image_display_flag_keep_ = true;
  }
}

bool RealSense::GetImageDisplayFlag()
{
  return image_display_flag_;
}

void RealSense::SetVideoRecordFlag(bool active)
{
  video_record_flag_ = active;
}

bool RealSense::GetVideoRecordFlag()
{
  return video_record_flag_;
}

void RealSense::DebugOutput(const QString &text, int level)
{
  if (level > REALSENSE_DEBUG_OUTPUT) {
    return ;
  }

  QString current_date = QDateTime::currentDateTime().toString("yyyy-MM-dd");
  QString log_dir_path = FileManager::GetLogDirPath();
  QString log_file_path = log_dir_path + current_date + "_vision_log.txt";
  QString msg = "RealSense: " + text;
  ErrorDisplay::OutputMessageToFile(log_file_path, msg);
}

int RealSense::OpenDev()
{
  if (dev_status_ != kVisionDevClosed) {
    DebugOutput("vision module2: OpenDev(): dev already opened！");
    return NO_ERROR;
  }

  if(m_restart_realsense_switch_)
  {
    RealsensePowerControl(1);
  }

  stat_ = OpenNI::initialize();
  if (stat_ != STATUS_OK) {
    printf("Initialize failed\n%s\n", OpenNI::getExtendedError());
  }

  stat_ = device.open(ANY_DEVICE);
  if (stat_ != STATUS_OK) {
    printf("Couldn't open device\n%s\n", OpenNI::getExtendedError());
    return 2;
  }

#if 0
#if !REALSENSE_CROCESS_THREAD
  if(video_record_flag_)
  {
    cfg_.enable_stream(RS2_STREAM_COLOR, kColorFrameWidth_, kColorFrameHeight_,
                       RS2_FORMAT_BGR8, int(kFrameFpsIndict_));
  }
  cfg_.enable_stream(RS2_STREAM_DEPTH, kDepthFrameWidth_, kDepthFrameHeight_,
                     RS2_FORMAT_Z16, int(kFrameFpsIndict_));
#endif //!REALSENSE_CROCESS_THREAD
#endif
  dev_status_ = kVisionDevOpened;

  return NO_ERROR;
}

int RealSense::CloseDev()
{
  if (dev_status_ == kVisionDevStart) {
    StopDev();
  }

  if (dev_status_ != kVisionDevClosed) {
#if !REALSENSE_CROCESS_THREAD
    cfg_.disable_stream(RS2_STREAM_DEPTH);
    cfg_.disable_stream(RS2_STREAM_COLOR);
#endif // !REALSENSE_CROCESS_THREAD
    dev_status_ = kVisionDevClosed;
  }

  if(m_restart_realsense_switch_)
  {
    RealsensePowerControl(0);
    m_restart_realsense_timer_.stop();
  }
  return NO_ERROR;
}

int RealSense::StartDev(bool temp_start)
{
  if (temp_start) {
    temp_start_timer_.start(temp_start_timeout_val_);
  }

  m_write_new_video_file_ = true;

  if(m_restart_realsense_switch_)
  {
    CheckGhostInit();
    m_restart_realsense_timer_.start(m_restart_realsense_time_*1000);
  }

  //m_realsense_receive_timer_.start(3000);
  m_realsense_receive_timer_.start(10000);

  if (device.getSensorInfo(SENSOR_DEPTH) != NULL) {
    stat_ = depth.create(device, SENSOR_DEPTH);
    if (stat_ != STATUS_OK) {
      printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError());
      return 3;
    }
  }

  stat_ = depth.start();
  if (stat_ != STATUS_OK) {
    printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError());
    return 4;
  }

#if 0
#if REALSENSE_CROCESS_THREAD
  StartRealSenseDataFlow();
#else // !REALSENSE_CROCESS_THREAD
  //RunThread();
#endif //REALSENSE_CROCESS_THREAD
#endif

  return NO_ERROR;
}

int RealSense::StopDev()
{
  if (dev_status_ != kVisionDevStart) {
    DebugOutput("vision module2: StopDev(): dev didn't need to stop.");
    return NO_ERROR;
  }

//  if(m_restart_realsense_switch_)
//  {
//    m_restart_realsense_timer_.stop();
//  }
  //上报服务器
  emit SignalClientCloseRealTimeVideo();
  m_realsense_receive_timer_.stop();
  StopThread(false, -1);
#if !REALSENSE_CROCESS_THREAD
  pipe_.stop();
#else //REALSENSE_CROCESS_THREAD
  realsense_video_.stopDev();
#endif //!REALSENSE_CROCESS_THREAD
  dev_status_ = kVisionDevStop;
  return NO_ERROR;
}


int RealSense::OpenDeviceOrbbec()
{
    stat_ = OpenNI::initialize();
	if (stat_ != STATUS_OK) {
		printf("Initialize failed\n%s\n", OpenNI::getExtendedError());
	}

    stat_ = device.open(ANY_DEVICE);
    if (stat_ != STATUS_OK) {
		printf("Couldn't open device\n%s\n", OpenNI::getExtendedError());
		return 2;
	}

    if (device.getSensorInfo(SENSOR_DEPTH) != NULL) {
		stat_ = depth.create(device, SENSOR_DEPTH);
		if (stat_ != STATUS_OK) {
			printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError());
			return 3;
		}
	}

	stat_ = depth.start();
	if (stat_ != STATUS_OK) {
		printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError());
		return 4;
	}
}

int RealSense::CloseDeviceOrbbec()
{
	depth.stop();
	depth.destroy();
	device.close();
    OpenNI::shutdown();
}

void RealSense::GetCameraParamsOrbbec()
{
    OBCameraParams cameraParam;
    int dataSize = sizeof(cameraParam);
    memset(&cameraParam, 0, sizeof(cameraParam));
    openni::Status rc = device.getProperty(openni::OBEXTENSION_ID_CAM_PARAMS, (uint8_t *)&cameraParam, &dataSize);
    if (rc != openni::STATUS_OK) {
        std::cout << "Error:" << openni::OpenNI::getExtendedError() << std::endl;
        return;
    }
    cam_para.fx = cameraParam.l_intr_p[0]; 
    cam_para.fy = cameraParam.l_intr_p[1]; 
    cam_para.cx = cameraParam.l_intr_p[2];
    cam_para.cy = cameraParam.l_intr_p[3];
    std::cout<<"cam_para.fx: "<<cam_para.fx<<", cam_para.fy: "<<cam_para.fy<<std::endl;
    std::cout<<"cam_para.cx: "<<cam_para.cx<<", cam_para.cy: "<<cam_para.cy<<std::endl;
}

int RealSense::StartProcessOrbbec(cv::Mat &depth_mat)
{
    int changedStreamDummy;
    VideoStream* pStream = &depth;
	stat_ = OpenNI::waitForAnyStream(&pStream, 1, &changedStreamDummy, SAMPLE_READ_WAIT_TIMEOUT);
	if (stat_ != STATUS_OK) {
		printf("Wait failed! (timeout is %d ms)\n%s\n", SAMPLE_READ_WAIT_TIMEOUT, OpenNI::getExtendedError());
	}

	stat_ = depth.readFrame(&frame);
	if (stat_ != STATUS_OK) {
		printf("Read failed!\n%s\n", OpenNI::getExtendedError());
	}

	if (frame.getVideoMode().getPixelFormat() != PIXEL_FORMAT_DEPTH_1_MM &&
		frame.getVideoMode().getPixelFormat() != PIXEL_FORMAT_DEPTH_100_UM) {
		printf("Unexpected frame format\n");
	}
    const int w = frame.getWidth();
	const int h = frame.getHeight();
	cv::Mat img_depth(cv::Size(w, h), CV_16UC1, (DepthPixel*)frame.getData(), cv::Mat::AUTO_STEP);
    img_depth.copyTo(depth_mat);
    //process_depth_data(img_depth);
}


void RealSense::TempStartTimeout()
{
  int result = StopDev();
  if (result != NO_ERROR) {
    emit SignalDeviceResult(result);
  }
}

void RealSense::RestartRealsenseTimeout()
{
  StopDev();
  m_write_new_video_file_ = false;
  if(m_restart_realsense_switch_)
  {
    CheckGhostInit();
    //m_restart_realsense_timer_.start(m_restart_realsense_time_*1000);
  }
  RunThread();
}

void RealSense::SlotRealsenseReceiveTimeout()
{
  if (!thread_on_flag_) {
    if (m_realsense_receive_timer_.isActive()) {
      m_realsense_receive_timer_.stop();
    }
    return;
  }

  if (m_realsense_work_status && !m_realsense_last_work_status){ //异常转正常
    m_realsense_last_work_status = m_realsense_work_status;
    DebugOutput("双目接收数据恢复正常");
    emit SignalRealsenseWorkStatus(m_realsense_work_status);
  } else if (!m_realsense_work_status && m_realsense_last_work_status){ //正常转异常
    m_realsense_last_work_status = m_realsense_work_status;
    emit SignalRealsenseWorkStatus(m_realsense_work_status);
    DebugOutput("双目接收数据超时丢失");
  }
  m_realsense_work_status = false;
}

int RealSense::EnableObstacleDetect()
{
  obstacle_detect_flag_ = true;
  return NO_ERROR;
}

int RealSense::DisableObstacleDetect()
{
  obstacle_detect_flag_ = false;
  return NO_ERROR;
}

VisionDevStatus RealSense::GetDevStatus()
{
  return dev_status_;
}

int RealSense::GetZoneRange(int index, int *x1, int *x2,
                            int *y1, int *y2)
{
  if (kHorizontalZoneNum_ == 0) {
    *x1 = 0;
    *x2 = 0;
    *y1 = 0;
    *y2 = 0;
    return ERROR_REALSENSE_NULL_PARAM;
  }

  int zone_width = kDepthFrameWidth_ / kHorizontalZoneNum_;
  *x1 = index * zone_width;
  *x2 = (index + 1) * zone_width;
  *y1 = int(kDepthFrameHeight_ * kYTopIgnoreRate_);
  *y2 = int(kDepthFrameHeight_ * (1 - kYBotIgnoreRate_));
  return NO_ERROR;
}

int RealSense::GetZoneXRange(int index, int *x1, int *x2)
{
  if (kHorizontalZoneNum_ == 0) {
    *x1 = 0;
    *x2 = 0;
    return ERROR_REALSENSE_NULL_PARAM;
  }

  int zone_width = kDepthFrameWidth_ / kHorizontalZoneNum_;
  *x1 = index * zone_width;
  *x2 = (index + 1) * zone_width;
  return NO_ERROR;
}

int RealSense::GetZoneYRange(int index, int *y1, int *y2)
{
  if (kVerticalZoneNum_ == 0) {
    *y1 = 0;
    *y2 = 0;
    return ERROR_REALSENSE_NULL_PARAM;
  }

  int zone_height = kDepthFrameHeight_ / kVerticalZoneNum_;
  *y1 = index * zone_height;
  *y2 = (index + 1) * zone_height;
  return NO_ERROR;
}


bool RealSense::WardCamObj(Mat &dep_src, double inval_pre)
{
  static int start_num = 0;
  if(start_num < 20) {
    start_num++;
    return false;
  }

  int count1 = 0, count2 = 0;
  for(int i = 0; i < dep_src.rows; i += 6) {
    ushort* val_dep_ptr = dep_src.ptr<ushort>(i);
    for(int j = 0; j < dep_src.cols; j += 6) {
      ushort val_dep = val_dep_ptr[j];
      count1++;
      if(val_dep == 0) {
        count2++;
      }
    }
  }
  double val_pre = count2 * 1.0 / count1;
  if(val_pre > inval_pre) {
    //printf("val_pre:%f\n", val_pre);
    return true;
  }
  return false;
}


bool RealSense::FallDownObj(Mat &src, int element_size, double val_prop)
{
  cv::Mat dep_fall(src.rows, src.cols, src.type(), Scalar::all(0));
  int element_half = element_size / 2;
  int step_num = element_size;
  int count1 = 0, count2 = 0;
  int row_num = 4;

  for(int i = src.rows-1; i >= src.rows - row_num*element_size; i--) {
    ushort* val_dep_ptr = src.ptr<ushort>(i);
    ushort* val_fall_ptr = dep_fall.ptr<ushort>(i);
    for(int j = 0; j < src.cols; j++) {
      ushort val_dep = val_dep_ptr[j];
      if (val_dep < 3500 && val_dep > 200) {
        val_fall_ptr[j] = val_dep;
      }
    }
  }

  for(int i = dep_fall.rows - element_half; i >= dep_fall.rows - row_num*element_half; i -= step_num) {
    for(int j = element_half + 2*step_num; j <= dep_fall.cols - element_half; j += step_num) {
      if(j == 1245 && element_size == 30) {
        j = j - 10;
      }

      cv::Rect rect;
      rect.x = j - element_half;
      rect.y = i - element_half;
      rect.width = 2 * element_half;
      rect.height = 2 * element_half;

      int val_num_eff = cv::countNonZero(src(rect));
      int val_num_all = rect.width * rect.height;

      count1++;
      double val_prop_now = val_num_eff * 1.0 / val_num_all;
      if(val_prop_now > val_prop) {
        ushort val_fall = src.at<ushort>(i, j);
        int n = 2;
        for(int k = 1; k < n; k++) {
          val_fall = val_fall + src.at<ushort>(i+k, j) + src.at<ushort>(i, j+k);
          val_fall = val_fall + src.at<ushort>(i-k, j) + src.at<ushort>(i, j-k);
        }
        int val_fall_m = val_fall / (4*(n-1)+1);
        if(val_fall_m > 3200 || val_fall_m < -3200) {
          count2++;
        }
      }
    }
  }

  double val_pre = count2 * 1.0 / count1;
  if (val_pre > 0.12) {
    //printf("count2: %d, pre: %f\n", count2, val_pre);
    return true;
  }
  return false;
}

int RealSense::FilerDepthNoise(Mat &src, int element_size, double val_prop)
{
  int element_half = element_size / 2;
  int step_num = element_size;
  int count = 0;

  for(int i = element_half; i <= src.rows - element_half; i = i + step_num){
    for(int j = element_half; j <= src.cols - element_half; j = j + step_num){
      if(j == 1245 && element_size == 30) {
        j = j - 10;
      }

      cv::Rect rect;
      rect.x = j - element_half;
      rect.y = i - element_half;
      rect.width = 2 * element_half;
      rect.height = 2 * element_half;

      int val_num_eff = cv::countNonZero(src(rect));
      int val_num_all = rect.width * rect.height;
      double val_prop_now = val_num_eff * 1.0 / val_num_all;

      cv::Mat all_zero(rect.height, rect.width, src.type(), cv::Scalar::all(0));
      cv::Mat tmp_mat = src(rect);
      if(val_prop_now < val_prop){
        all_zero.copyTo(tmp_mat);
      } else {
        count++;
      }
    }
  }

  return count;
}

bool RealSense::PointAngleTrans(cv::Mat& src, int angle, int height, int height_max)
{
  cv::Mat src_angle(src.rows, src.cols, src.type(), Scalar::all(0));
  cv::Mat src_fall(src.rows, src.cols, src.type(), Scalar::all(0));
  double angle_h = angle * 3.1415926 / 180;
  int height_m = height_max - height;
  double cos_angle = cos(angle_h);
  double sin_angle = sin(angle_h);
  double cam_para_yb = 1.0 / camera_info.fy;

  for (int i = 0; i<src.rows; i++) {
    ushort* val_dep_ptr = src.ptr<ushort>(i);
    ushort* val_angle_ptr = src_angle.ptr<ushort>(i);
    ushort* val_fall_ptr = src_fall.ptr<ushort>(i);
    for (int j = 0; j<src.cols; j++) {
      ushort val_dep = val_dep_ptr[j];
      if (val_dep > 4000 || val_dep < 200) {
        continue;
      }

      double y_val = (i - camera_info.cy) * val_dep * cam_para_yb;
      double y_val_trans = y_val * cos_angle + val_dep * sin_angle;
      if(y_val_trans > height + 360) {
        val_fall_ptr[j] = val_dep;
      }

      if((y_val_trans > height - 80) || (y_val_trans < -height_m)) {
        continue;
      }

      double val_trans = val_dep * cos_angle - y_val * sin_angle;
      val_angle_ptr[j] = ushort(val_trans);
    }
  }

  src_angle.copyTo(src);
  int count = FilerDepthNoise(src_fall, 10, 0.7);
  if(count > 40) {
    return true;
  }
  return false;
}

void RealSense::GetThresholdMask(Mat &src, Mat& dest, int threshold_min, int threshold_max)
{
  src.copyTo(dest);
  int nr = src.rows;
  int nc = src.cols;

  for (int i=0; i<nr; ++i) {
    ushort* val_dep_ptr = dest.ptr<ushort>(i);
    for (int j=0; j<nc; ++j) {
      ushort val_dep = val_dep_ptr[j];
      if ((val_dep > threshold_max) || (val_dep <= threshold_min)) {
        val_dep_ptr[j] = 0;
      }
    }
  }
}

int RealSense::ProcessDepthData(Mat &mat)
{
  int xindex, yindex, i, j, x1, x2, y1, y2, scale = 10;
  Mat mask_mat, mask_mat_t, out_mat, src_mat, src_dep, element;
  mat.copyTo(src_mat);
  mat.copyTo(src_dep);

  bool fall_down = false;
  //需要用到不同角度的双目时不要因为防跌落注释下面一句
  //clockwise rotation angle+ / camera height
  fall_down = PointAngleTrans(mat, device_angle, device_height,
                              robot_max_height);
#if (MOVEMENT_PROTOCOL_TYPE == 0)
  if(!fall_down) {
    fall_down = FallDownObj(src_dep, 20, 0.98);
  }
  if(fall_down) {
    if(!fall_down_status_){
      DebugOutput("检测到跌落风险");
      emit SignalDeviceFallDown(true); //检测到跌落风险告警
      fall_down_status_ = true;
    }
  } else if(fall_down_status_){
    DebugOutput("跌落风险解除");
    emit SignalDeviceFallDown(false);//风险告警解除
    fall_down_status_ = false;
  }

  bool ward_cam = WardCamObj(src_dep, 0.7);
  if(ward_cam) {
    DebugOutput("双目被遮挡");
  }
#else //(MOVEMENT_PROTOCOL_TYPE == 1)
  Q_UNUSED(fall_down);
#endif //(MOVEMENT_PROTOCOL_TYPE == 0)

  GetThresholdMask(mat, mask_mat, 300, 1000);//0.2m~1.0m
  GetThresholdMask(mat, mask_mat_t, 1000, 1800);//1.0m~1.8m

  FilerDepthNoise(mask_mat, 10, 0.75);//(size 20 or 30)
  FilerDepthNoise(mask_mat_t, 10, 0.7);//(prop 0.1~1.0)
  cv::add(mask_mat, mask_mat_t, mask_mat);

  mask_mat.convertTo(mask_mat, CV_8UC1, 1.0 / scale);
  mask_mat.copyTo(out_mat);

  /*
  //核的大小可适当调整
  element = cv::getStructuringElement(cv::MORPH_RECT, Size(30, 30));
  //进行开操作
  cv::morphologyEx(mask_mat, out_mat, cv::MORPH_OPEN, element);
  //dilate(dhc, out_mat, element);
  */

  cv::Vec4b red(0, 0, 255), green(0, 255, 0), blue(255, 0, 0),
      purple(255, 0, 255), black(0, 0, 0), yellow(0, 255, 255),
      white(255, 255, 255);
  display_image_lock_.lock();
  Mat rgb_mat(out_mat.rows, out_mat.cols, CV_8UC4);
  uchar limit_val[5] = {0};
  for (ulong i=0; i<sizeof(limit_val); i++) {
    limit_val[i] = uchar(kWarnDistance_ * (0.6 + i * 0.3));
  }

  uchar value = 0, max_dist = 0, min_dist = kMaxDetectDistance_;
  ushort* ptr_srcmat;
  const uchar *ptr_usermat = nullptr;
  double x_distance = 0;
  int detect_num = 0;

//  cout << "src_mat:" << endl << src_mat << endl << endl;
//  cout << "out_mat:" << endl << out_mat << endl << endl;

  for (xindex=0; xindex<kHorizontalZoneNum_; ++xindex) {
    //GetZoneRange(index, &x1, &x2, &y1, &y2);
    GetZoneXRange(xindex, &x1, &x2);
    max_dist = 0;
    min_dist = kMaxDetectDistance_ + 1;
    for (yindex=0; yindex<kVerticalZoneNum_; ++yindex) {
      GetZoneYRange(yindex, &y1, &y2);

      for(i=y1; i<y2; i+=y_sample_val_) {
        ptr_usermat = out_mat.ptr<uchar>(i);
        ptr_srcmat = src_mat.ptr<ushort>(i);
        if (!ptr_usermat || !ptr_srcmat) {
          continue;
        }

        for (j=x1; j<x2; j+=x_sample_val_) {
          value = ptr_usermat[j];

          if ((value > kMinDetectDistance_) &&
              (value < kMaxDetectDistance_)) {
            if (value > max_dist){
              max_dist = value;
            } else if (value < min_dist) {
              min_dist = value;
              x_distance = (j - camera_info.cx) * (ptr_srcmat[j]/scale) /
                  camera_info.fx;
            }

            //根据kWarnDistance_的0.5、0.7、0.9、1.1、1.3、>1.3倍
            //依次填充红、黄、绿、蓝、紫、白。0或超范围填充黑
            if (image_display_flag_) {
              if (value < limit_val[0]) {
                rgb_mat.at<cv::Vec4b>(i, j) = red;
              } else if (value < limit_val[1]) {
                rgb_mat.at<cv::Vec4b>(i, j) = yellow;
              } else if (value < limit_val[2]) {
                rgb_mat.at<cv::Vec4b>(i, j) = green;
              } else if (value < limit_val[3]) {
                rgb_mat.at<cv::Vec4b>(i, j) = blue;
              } else if (value < limit_val[4]) {
                rgb_mat.at<cv::Vec4b>(i, j) = purple;
              } else {
                rgb_mat.at<cv::Vec4b>(i,j) = white;
              }
            }
          } else {
            if (image_display_flag_) {
              rgb_mat.at<cv::Vec4b>(i, j) = black;
            }
          }
        }
      }

      if (min_dist > kMaxDetectDistance_) {
        depth_info_set_[detect_num].min_depth = scale * kMaxDetectDistance_;
        depth_info_set_[detect_num].x_distance = scale * kMaxDetectDistance_;
      } else {
        depth_info_set_[detect_num].min_depth = scale * min_dist;
        depth_info_set_[detect_num].x_distance = int(scale * x_distance);
      }
      detect_num++;

      //.cout << "detect_num: " << detect_num << ", x: " << xindex
      //      << ", y: " <<y index<<endl;
      if (image_display_flag_) {
        cv::line(rgb_mat, Point(x1, y1), Point(x1, y2), purple);
        cv::line(rgb_mat, Point(0, y1), Point(kDepthFrameWidth_, y1), purple);
        float min_dist_meter = min_dist * float(0.01);
        char dist_str[128] = {0};
        sprintf(dist_str, "%.2fm", double(min_dist_meter));
        std::string text = dist_str;
        cv::Point origin;
        origin.x = x1;
        origin.y = y1 + 30;
        cv::putText(rgb_mat, text, origin, cv::FONT_HERSHEY_COMPLEX, 0.7,
                    cv::Scalar(255, 255, 255), 1, 1, 0);
      }
    }
  }

  if (image_display_flag_) {
    // display_image_lock_.lock();
    rgb_mat.copyTo(display_mat_);
    //display_image_lock_.unlock();
    emit SignalDepthImageDisplay();
  } else {
    if (image_display_flag_keep_) {
      rgb_mat.copyTo(display_mat_);
      cv::imwrite("depth_src.jpg", mat);
      cv::imwrite("depth_deal.jpg", display_mat_);
      msleep(800);
      image_display_flag_keep_ = false;
    }
  }

  display_image_lock_.unlock();
  if (send_depth_data_) {
    SendDepthSet(depth_info_set_);
  }
  return NO_ERROR;
}

int RealSense::SendDepthSet(QVector<DepthInfoPack> &set)
{
#if MOVEMENT_MODULE_SWITCH
#if (MOVEMENT_PROTOCOL_TYPE == 1)
  int result = NO_ERROR;
  QVector<uchar> package_array(2+kHorizontalZoneNum_*kVerticalZoneNum_*4);
  uchar *package = package_array.data();
  //设备ID(取值：前双目：0)
  package[0] = 0;
  //视觉数据点数
  package[1] = uchar(kHorizontalZoneNum_*kVerticalZoneNum_);
  uint16_t min_dep = 0, x_distance = 0;
  //视觉障碍物数据x，y坐标
  for (int i=0; i<kHorizontalZoneNum_*kVerticalZoneNum_; ++i) {
    min_dep = uint16_t(set[i].min_depth);
    x_distance = uint16_t(set[i].x_distance);
    package[2+4*i] = uchar(x_distance);
    package[2+4*i+1] = uchar(x_distance >> 8);
    package[2+4*i+2] = uchar(min_dep);
    package[2+4*i+3] = uchar(min_dep >> 8);
  }
  int send_len = package_array.size();
  QByteArray data_buff = QByteArray(reinterpret_cast<char *>(package), send_len);
  if (serial_utils_ != nullptr) {
    serial_utils_->AddDataList(MOVEMENT_SYS, 0x40, REAL_TIME_FlAME, data_buff);
  }
#else //(MOVEMENT_PROTOCOL_TYPE ==  0)
  QVector<uchar> package_array(4+kHorizontalZoneNum_*2);
  uchar *package = package_array.data();
  package[0] = 0xaa;
  package[1] = 0x53;
  for (int i=0; i<kHorizontalZoneNum_; ++i) {
    package[2+2*i] = uchar(set.info_pack[i].min_depth / 20);
    package[2+2*i+1] = uchar(set.info_pack[i].x_distance / 20);
    //package[2+2*i+1] = set.info_pack[i].avr / 20;
    //package[2+2*i+1] = set.info_pack[i].zx_hypotenuse_angle / 2;
  }

  int send_len = package_array.size() - 2;
  uchar send_sum = CalculateSum(package, send_len);
  package[send_len] = send_sum;
  package[send_len+1] = '*';
  int result = NO_ERROR;
  Movement::write_lock_.lock();
  result = LinuxSerial::Write(dev_handle_,
                              reinterpret_cast<char *>(package), send_len+2);
  Movement::write_lock_.unlock();
#endif //(MOVEMENT_PROTOCOL_TYPE == 1)

#if (REALSENSE_DEBUG_OUTPUT > 2)
  DebugOutput(QString("双目模块发送到底盘数据(%1): ").arg(send_len) +
              PublicFunc::UcharDataToHexString(package, send_len), 3);
#endif //(REALSENSE_DEBUG_OUTPUT > 2)
  if (result != NO_ERROR) {
    return result;
  }
#else // !MOVEMENT_MODULE_SWITCH
  //只为了屏蔽编译警告
  if (set.info_pack[0].min_depth < kFloatMinimum_) {
    set.info_pack[0].min_depth = 0;
  }
#endif //MOVEMENT_MODULE_SWITCH
  return NO_ERROR;
}

uchar RealSense::CalculateSum(uchar *data, int num)
{
  uchar sum = 0;
  for (int i=0; i<num; ++i) {
    sum += data[i];
  }
  return sum;
}

QImage RealSense::MatToQImage(Mat &frame)
{
  QImage img;
  int type = frame.type();
  switch (type) {
    case CV_8UC1: {
      img = QImage(reinterpret_cast<const uchar *>(frame.data),
                   frame.cols, frame.rows, frame.cols,
                   QImage::Format_Grayscale8);
      break;
    }
    case CV_8UC3: {
      img = QImage(reinterpret_cast<const uchar *>(frame.data),
                   frame.cols, frame.rows, frame.cols * 3,
                   QImage::Format_RGB888).rgbSwapped();
      break;
    }
    case CV_8UC4: {
      img = QImage(reinterpret_cast<const uchar *>(frame.data),
                   frame.cols, frame.rows, frame.cols * 4,
                   QImage::Format_ARGB32);
      break;
    }
    default: break;
  }
  return img;
}

QImage RealSense::GetDepthDisplayImage()
{
  if (display_mat_.empty()) {
    return QImage();
  }

  Mat temp;
  display_image_lock_.lock();
  //获取处理的彩图
  cv::cvtColor(display_mat_, temp, cv::COLOR_RGBA2RGB);
  //cv::flip(temp, temp, 1); //Y轴反转
  display_image_ = MatToQImage(temp);
  display_image_lock_.unlock();
  return display_image_;
}

QImage RealSense::GetDepthImage()
{
  if (depth_mat_.empty()) {
    return QImage();
  }

  Mat threshold_mat, temp;
  depth_image_lock_.lock();
  //0~2.5m
  GetThresholdMask(depth_mat_, threshold_mat, 0, 2550);
  threshold_mat.convertTo(temp, CV_8UC1, 1.0 / 10);
  //获取深度图
  depth_image_ = MatToQImage(temp);
  depth_image_lock_.unlock();
  return depth_image_;
}

QImage RealSense::GetColorImage()
{
  if (color_mat_.empty()) {
    return QImage();
  }

  Mat temp;
  color_image_lock_.lock();
  //获取原生彩图
  cv::cvtColor(color_mat_, temp, cv::COLOR_RGBA2RGB);
  //cv::flip(temp, temp, 1); //Y轴反转
  color_image_ = MatToQImage(temp);
  color_image_lock_.unlock();
  return color_image_;
}

int RealSense::WriteColorMatToDisk(QString path)
{
  std::string save_path;
  save_path = path.toStdString();
  color_image_lock_.lock();
  cv::imwrite(save_path, color_mat_);
  color_image_lock_.unlock();
  return NO_ERROR;
}

void RealSense::SetDeviceParam(int angle, int height, int max_height) {
  if (angle != -1) {
    device_angle = angle;
  }
  if (height != 0) {
    device_height = height;
  }
  if (max_height != 0) {
    robot_max_height = max_height;
  }
}

void RealSense::SetSendDepthData(bool on_off)
{
  send_depth_data_ = on_off;
}

void RealSense::RealTimeVideoControl(bool on_off,QString addr)
{
#if !REALSENSE_CROCESS_THREAD
  m_video_save_.PlugRtspFrame(on_off,addr);
#else //REALSENSE_CROCESS_THREAD
  if (on_off && addr.isEmpty()) {
    on_off = false;
  }
#endif // !REALSENSE_CROCESS_THREAD
}

void RealSense::CheckRealSenseWorkStatus()
{
#if !REALSENSE_CROCESS_THREAD
  rs2::device selected_device = m_profile_.get_device();
  auto depth_sensor = selected_device.first<rs2::depth_sensor>();

  if(depth_sensor.supports(RS2_OPTION_LASER_POWER) && thread_on_flag_)
  {
    float power =0;
    try {

      power = depth_sensor.get_option(RS2_OPTION_LASER_POWER);

    } catch (const rs2::error &e) {
      DebugOutput(
            QString("vision module2: CheckRealSenseWorkStatus(): error calling %1(%2) -> %3.")
            .arg(QString::fromStdString(e.get_failed_function()))
            .arg(QString::fromStdString(e.get_failed_args()))
            .arg(QString::fromStdString(e.what())));
    }

    if(power < m_vision_module_power_limited)
    {
      if(m_upload_work_status_ != ERROR_REALSENSE_UDER_POWER)
      {
        m_upload_work_status_ = ERROR_REALSENSE_UDER_POWER;
        //先关闭电源，由主线程重新打开
        RealsensePowerControl(0);
        //上报
        emit SignalRealSenseWorkException(ERROR_REALSENSE_UDER_POWER);
        DebugOutput("双目衰减功率：" + QString::number(int(power)));
      }
      return;
    }
    else
    {
      if(m_upload_work_status_ != NO_ERROR && m_upload_work_status_ != -1)
      {
        m_upload_work_status_ = NO_ERROR;
        emit SignalRealSenseWorkException(NO_ERROR);
      }

    }

#if (REALSENSE_DEBUG_OUTPUT > 0)
    static int print = 0;
    print++;
    if (print > 50) {
      print = 0;
      DebugOutput("双目功率：" + QString::number(int(power)));
    }
#endif //(REALSENSE_DEBUG_OUTPUT > 0)
  }
#endif // !REALSENSE_CROCESS_THREAD
  //    if(depth_sensor.supports(RS2_OPTION_TOTAL_FRAME_DROPS) && thread_on_flag_)
  //    {
  //        float drop_total = depth_sensor.get_option(RS2_OPTION_TOTAL_FRAME_DROPS);
  //        DebugOutput("drop frame total: " + QString::number(int(drop_total)));
  //    }
}

void RealSense::CheckGhostInit()
{
  cv::Mat color_later(480,640,CV_8UC3,Scalar(0));
  for(int i=0;i<25;i++)
  {
    color_later.copyTo(m_color_flame_[i]);
  }
}

bool RealSense::CheckGhostImage(cv::Mat color_now)
{
  static int num0 = 0, num_f = 0, num_f0 = 0;

  num_f++;
  if(num_f > 10000) {
    num_f = 0;
  }

  double pro = 0.0, prop0 = 0.0;
  for(int i=10;i<25;i++) {
    pro = double(m_check_similar_.
                 ReflectiveFilterC(color_now,m_color_flame_[i]));
    if(pro > 0.995 && Movement::GetInstance()->GetCurSpeed() > 0.2) {
      DebugOutput("检测到残影触发软重启，相似度:" + QString::number(pro));
      return true;
    }
  }

  prop0 = double(m_check_similar_.
                 ReflectiveFilterC(color_now,m_color_flame_[0]));
  if(prop0 < 0.3) {
    num0++;
    DebugOutput(QString("相似度小于0.3 次数：%1, 值：%2.").arg(num0).arg(prop0), 2);
    if(num0 == 1) {
      num_f0 = num_f;
    }
  }

  if(num0 > 2) {
    if(num_f - num_f0 < 40) {
      DebugOutput("检测到残影触发软重启.", 1);
      num0 = 0;
      return true;
    } else {
      num0 = 0;
    }
  }

  for(int i=24;i>0;i--) {
    m_color_flame_[i-1].copyTo(m_color_flame_[i]);
  }

  color_now.copyTo(m_color_flame_[0]);
  return false;
}

int RealSense::RealsensePowerControl(uchar on_off)
{
#if (DCU_SERIAL_TYPE == 1)
  return dcu_base_->VisionDevPowerControl(on_off);
#elif (MOVEMENT_SERIAL_TYPE > 0)
#if (MOVEMENT_PROTOCOL_TYPE == 1)
  QByteArray data;
  data.append(char(on_off));
  if (serial_utils_ != nullptr) {
    serial_utils_->AddDataList(DCU_CONTROL_SYS, 0x99,
                               COMMAND_FLAME, data);
  }
#elif (MOVEMENT_PROTOCOL_TYPE == 0)
  QByteArray send_array(12, 0);
  uchar *send_buf = reinterpret_cast<uchar*>(send_array.data());
  //数据头
  send_buf[0] = 0x66;
  send_buf[1] = 0xAA;
  //长度
  send_buf[2] = 0x01;
  send_buf[3] = 0x00;
  //固定值
  send_buf[4] = 0x02;
  send_buf[5] = 0x04;
  send_buf[6] = 0x03;
  //功能码
  send_buf[7] = 0xb5;
  //固定值
  send_buf[8] = 0x00;
  //数据段
  send_buf[9] = on_off;
  //校验和
  ushort crc16 = PublicFunc::CalculateCRC16Ccitt(send_buf, 10);
  send_buf[10] = crc16 & 0xff;
  send_buf[11] = ((crc16) >> 8) & 0xff;
  emit SignalTransmitDataToDcu(0xb5, COMMAND_FLAME, send_array);

#if (REALSENSE_DEBUG_OUTPUT > 1)
 DebugOutput(QString("开关双目数据(%1): ").arg(send_array.size()) +
              PublicFunc::QByteArrayToHexString(send_array), 2);
#endif //(REALSENSE_DEBUG_OUTPUT > 1)
#endif //(MOVEMENT_PROTOCOL_TYPE == 1)
#else //(MOVEMENT_SERIAL_TYPE == 0)
  Q_UNUSED(on_off);
#endif //(DCU_SERIAL_TYPE == 1)
  return NO_ERROR;
}

int RealSense::StartRealSenseDataFlow()
{
  if ((dev_status_ == kVisionDevStart) || (dev_status_ == kVisionDevClosed)) {
    DebugOutput("vision module2: StartDev(): dev already start or not opened!");
    return NO_ERROR;
  }

  try {
#if !REALSENSE_CROCESS_THREAD
    m_profile_ = pipe_.start(cfg_);
#else //REALSENSE_CROCESS_THREAD
    realsense_video_.startDev();
#endif // !REALSENSE_CROCESS_THREAD
    //DebugOutput("vision module2: pipe.start ok!");
  } catch (const rs2::error &e) {
    DebugOutput(
          QString("vision module2: pipe_.start(): error calling %1(%2) -> %3.")
          .arg(QString::fromStdString(e.get_failed_function()),
               QString::fromStdString(e.get_failed_args()),
               QString::fromStdString(e.what())));
    return ERROR_REALSENSE_START_DEV;
  }
  catch (const std::exception & e) {
    DebugOutput(QString("vision module2: pipe_.start(): std error %1.")
                .arg(e.what()));
    return ERROR_REALSENSE_START_DEV;
  }
  dev_status_ = kVisionDevStart;
#if REALSENSE_CROCESS_THREAD
  RunThread();
#endif //REALSENSE_CROCESS_THREAD
  return NO_ERROR;
}

void RealSense::RealSenseWorkExceptionDeal(int error)
{
  m_upload_work_status_ = error;
  //先关闭电源，由主线程重新打开
  RealsensePowerControl(0);
  //上报,上层负责重启流程
  emit SignalRealSenseWorkException(m_upload_work_status_);
  m_upload_work_status_ = -2;
#if !REALSENSE_CROCESS_THREAD
  if(video_record_flag_)
    m_video_save_.VideoSaveStop();
#endif // !REALSENSE_CROCESS_THREAD
}

#if !REALSENSE_CROCESS_THREAD
void RealSense::run()
{
  int once = 0, height = 0, width = 0;
  QString path;

  if(video_record_flag_)
    m_video_save_.VideoSaveStart(kColorFrameHeight_,kColorFrameWidth_,(int)kFrameFpsIndict_,m_write_new_video_file_);

  while(thread_on_flag_ && StartRealSenseDataFlow())
  {
    static char times = 0;
    times ++;
    if((times >= 5) && (m_upload_work_status_ != ERROR_REALSENSE_OPEN_DEV))
    {
      times = 0;
      RealSenseWorkExceptionDeal(ERROR_REALSENSE_OPEN_DEV);
      return;
    }
    DebugOutput("双目打开失败：" + QString::number(times));
    msleep(500);
  }

  while (thread_on_flag_) {
    rs2::frameset frameset;
    try {
      frameset = pipe_.wait_for_frames();
      if (frameset.get_data_size() <= 0) {
        if (once) {
          DebugOutput("vision module2: waitForNewFrame timeout！");
          once = 1;
        }
        continue ;
      }
    } catch (const rs2::error &e) {
      DebugOutput(
            QString("vision module2: Run(): error calling %1(%2) -> %3.")
            .arg(QString::fromStdString(e.get_failed_function()))
            .arg(QString::fromStdString(e.get_failed_args()))
            .arg(QString::fromStdString(e.what())));
      if(m_upload_work_status_ != ERROR_REALSENSE_THROW_EXCEPTION)
      {
        RealSenseWorkExceptionDeal(ERROR_REALSENSE_THROW_EXCEPTION);
        return;
      }
    }
    if (obstacle_detect_flag_) {
      try{
        auto depth = frameset.get_depth_frame();
        width = depth.as<rs2::video_frame>().get_width();
        height = depth.as<rs2::video_frame>().get_height();
        cv::Mat depth_mat(Size(width, height), CV_16UC1,
                          const_cast<void *>(depth.get_data()), cv::Mat::AUTO_STEP);
        //发生异常后的状态恢复
        if(m_upload_work_status_ != NO_ERROR && m_upload_work_status_ != -1)
        {
          m_upload_work_status_ = NO_ERROR;
          emit SignalRealSenseWorkException(NO_ERROR);
        }

        ProcessDepthData(depth_mat);

      } catch(const rs2::error &e){
        DebugOutput(
              QString("vision module2: Run(): error calling %1(%2) -> %3.")
              .arg(QString::fromStdString(e.get_failed_function()))
              .arg(QString::fromStdString(e.get_failed_args()))
              .arg(QString::fromStdString(e.what())));
        if(m_upload_work_status_ != ERROR_REALSENSE_THROW_EXCEPTION)
        {
          RealSenseWorkExceptionDeal(ERROR_REALSENSE_THROW_EXCEPTION);
          return;
        }
      }
    }

    if (video_record_flag_) {
      try {
        auto color = frameset.get_color_frame();
        width = color.as<rs2::video_frame>().get_width();
        height = color.as<rs2::video_frame>().get_height();

        cv::Mat rgb_mat(Size(width, height), CV_8UC3,
                        const_cast<void *>(color.get_data()),
                        cv::Mat::AUTO_STEP);
        cv::Mat color_now;
        rgb_mat.copyTo(color_now);
        //检测是否有残影
        //if(m_restart_realsense_switch_ && CheckGhostImage(color_now))
        //{
        //  //RestartRealsenseTimeout();
        //  emit SignalRestartRealsense();
        //  return;
        //}
        m_video_save_.SendFrame(const_cast<void *>(color.get_data()), height, width);
        color_image_lock_.lock();
        rgb_mat.copyTo(color_mat_);
        color_image_lock_.unlock();
        color_mat_.release();

      }
      catch(const rs2::error &e){
        DebugOutput(
              QString("vision module2: Run(): error calling %1(%2) -> %3.")
              .arg(QString::fromStdString(e.get_failed_function()))
              .arg(QString::fromStdString(e.get_failed_args()))
              .arg(QString::fromStdString(e.what())));
        if(m_upload_work_status_ != ERROR_REALSENSE_THROW_EXCEPTION)
        {
          RealSenseWorkExceptionDeal(ERROR_REALSENSE_THROW_EXCEPTION);
          return;
        }
      }
    }
  }

  if(video_record_flag_)
    m_video_save_.VideoSaveStop();
}
#else //REALSENSE_CROCESS_THREAD
void RealSense::run()
{
#if 0
  if(!realsense_video_.isRunning()){
    DebugOutput("realsen thread is not running");
    return;
  }
#endif
  //video 文件名
  QString data_time = QDateTime::currentDateTime()
      .toString("yyyy-MM-dd_hh-mm-ss");
  QString path = QString("%1M_%2.avi")
      .arg(FileManager::GetMissionVideoDirPath(), data_time);
  //video写入
  cv::VideoWriter videowriter;
  if (video_record_flag_) {
    videowriter = cv::VideoWriter(path.toStdString(),
        cv::VideoWriter::fourcc('x', '2', '6', '4'), kFrameFpsIndict_,
        Size(kColorFrameWidth_, kColorFrameHeight_));
  }

  OpenDeviceOrbbec();
  cv::Mat depth_mat;
  cv::Mat rgb_mat;
  while(thread_on_flag_) {
    //realsense_video_.getMatFrame(depth_mat, rgb_mat);
    StartProcessOrbbec(depth_mat);

    //处理深度数据
    if (obstacle_detect_flag_) {
      depth_image_lock_.lock();
      depth_mat.copyTo(depth_mat_);
      depth_image_lock_.unlock();

      if (depth_mat.empty()) {
        usleep(500);
        continue;
      }
      ProcessDepthData(depth_mat);
      m_realsense_work_status = true;
    }
#if 0
    //保存彩色数据
    if (video_record_flag_) {
      if (rgb_mat.empty()) {
        usleep(500);
        continue;
      }
      videowriter << rgb_mat;
      color_image_lock_.lock();
      rgb_mat.copyTo(color_mat_);
      color_image_lock_.unlock();
    }
#endif
  }

  if (video_record_flag_) {
    emit SignalVideoReady(path, kFileMissionVideoType);
  }
}
#endif // !REALSENSE_CROCESS_THREAD
