#include <CameraHal/CamCifDevHwItf.h>
#include <CameraHal/CamHalVersion.h>
#include <CameraHal/CamHwItf.h>
#include <CameraHal/CamIsp11DevHwItf.h>
#include <CameraHal/BufferBase.h>
#include <CameraHal/CameraIspTunning.h>
#include <CameraHal/linux/v4l2-controls.h>
#include <CameraHal/linux/media/rk-isp11-config.h>
#include <CameraHal/linux/media/v4l2-config_rockchip.h>
#include <CameraHal/IonCameraBuffer.h>
#include <CameraHal/StrmPUBase.h>
#include <iostream>

#include <linux/videodev2.h>
#include <math.h>
#include <stdio.h>
#include <string.h>
#include <dirent.h>
#include <errno.h>
#include <pthread.h>
#include <pwd.h>
#include <stdarg.h>
#include <stdlib.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <sys/time.h>
#include <sys/types.h>
#include <unistd.h>

extern "C" {
#include "common.h"
#include <iep/iep.h>
#include <iep/iep_api.h>
#include "fs_manage/fs_storage.h"
#include "fs_manage/fs_manage.h"
#include <fs_manage/fs_picture.h>
#include "huffman.h"
#include "rk_rga/rk_rga.h"
#include "video_ion_alloc.h"
#include "vpu.h"

#include <dpp/dpi.h>
#include <dpp/dpp_buffer.h>
#include <dpp/dpp_err.h>
#include <dpp/dpp_frame.h>
#include <dpp/dpp_packet.h>
#include <jpeglib.h>

#include "watermark.h"
#include "power/thermal.h"
#include "ui_resolution.h"
}

#include "encode_handler.h"
#include "jpeg_header.h"
#include "video_interface.h"

#include "av_wrapper/handler/audio_encode_handler.h"
#include "handler/scale_encode_ts_handler.h"

#include "fly_server.h"

#include <fstream>
#include "AlgTk_Interface.h"
#define ALG_BUFFER_SIZE (5 * 1024 * 1024)
#define ADAS_BUFFER_SIZE   (ADAS_BUFFER_WIDTH * ADAS_BUFFER_HEIGHT)
#define WRITE_TO_FILE 0
#define READ_FROM_FILE 0
// TODO: no need limit the number of video devices
#define MAX_VIDEO_DEVICE 5

#define ADAS_BUFFER_NUM 3
#define ADAS_BUFFER_WIDTH 144
#define ADAS_BUFFER_HEIGHT 108
#define ADAS_FRAME_COUNT 10

#define LDW_TYPE_DAY 1
#define LDW_TYPE_NIGHT 0

#define NV12_RAW_CNT 3

#define STACKSIZE (256 * 1024)

using namespace std;

#define FRONT "isp"
#define CIF "cif"
#define USB_FMT_TYPE HAL_FRMAE_FMT_MJPEG

class MP_DSP;
class NV12_ADAS;

struct hal {
  shared_ptr<CamHwItf> dev;
  shared_ptr<CamHwItf::PathBase> mpath;
  shared_ptr<CamHwItf::PathBase> spath;
  shared_ptr<IonCameraBufferAllocator> bufAlloc;
  shared_ptr<MP_DSP> mp_dsp;
  shared_ptr<NV12_ADAS> nv12_adas;
};

typedef struct ispinfo {
  float exp_gain;
  float exp_time;
  int doortype;

  unsigned char exp_mean[25];

  float wb_gain_red;
  float wb_gain_green_r;
  float wb_gain_blue;
  float wb_gain_green_b;

  unsigned char luma_nr_en;
  unsigned char chroma_nr_en;
  unsigned char shp_en;
  unsigned char luma_nr_level;
  unsigned char chroma_nr_level;
  unsigned char shp_level;

  int reserves[16];
} ispinfo_t;

static pthread_attr_t global_attr;

static bool is_record_mode = true;

static unsigned int user_noise = 0;

static HAL_COLORSPACE color_space = HAL_COLORSPACE_SMPTE170M;

struct dpp_buffer {
  DppBuffer buffer;
  struct timeval pts;
  uint32_t noise[4];
  struct dpp_sharpness sharpness;
  struct HAL_Buffer_MetaData* isp_meta;
};

struct dpp_buffer_list {
  list<struct dpp_buffer*> buffers;
  pthread_mutex_t mutex;
  pthread_cond_t condition;
};

struct video_dpp {
  DppCtx context;
  unsigned int fun;
  pthread_t dpp_thread;
  bool exit;
  pthread_t encode_thread;
  pthread_t photo_thread;
  pthread_t live_thread;  // scale, encode, and send for live video
  struct dpp_buffer_list encode_buffer_list;
  struct dpp_buffer_list photo_buffer_list;
  struct dpp_buffer_list live_buffer_list;
  bool stop_flag;
};

struct video_adas {
  DppCtx context;
  unsigned int fun;
  pthread_t dpp_id;
  bool exit;
  bool initFlag;
  bool startFlag;
  struct video_ion input[ADAS_BUFFER_NUM];
  list<struct video_ion*> pool;
  list<struct video_ion*> dsp_pool;
  pthread_mutex_t pool_lock;
  pthread_mutex_t dsp_pool_lock;
  dsp_ldw_out out;
  int rga_fd;
  void* alg_virt;
};

enum photo_state { PHOTO_ENABLE, PHOTO_BEGIN, PHOTO_END, PHOTO_DISABLE };

struct video_photo {
  enum photo_state state;
  struct video_ion rga_photo;
  struct vpu_encode encode;
  pthread_t pid;
  pthread_mutex_t mutex;
  pthread_cond_t condition;
  int rga_fd;
  char pic_name[128];
};

struct video_jpeg_dec {
  struct vpu_decode* decode;
  bool decoding;
};

#define JPEG_STREAM_NUM 2
struct JpegConfig {
  int width;
  int height;
};

extern "C" void storage_setting_event_callback(int cmd, void *msg0, void *msg1);
static void* jpeg_encode_one_frame(void* arg);

class JpegStreamReceiver {
 public:
  struct video_ion yuv_hw_buf;
  bool is_processing;
  JpegStreamReceiver()
      : data_cb(nullptr), request_encode(0), is_processing(false) {
    memset(&yuv_hw_buf, 0, sizeof(yuv_hw_buf));
    yuv_hw_buf.client = -1;
    yuv_hw_buf.fd = -1;
  }

  ~JpegStreamReceiver() { video_ion_free(&yuv_hw_buf); }

  inline void set_notify_new_enc_stream_callback(NotifyNewEncStream cb) {
    data_cb = cb;
  }

  inline int get_request_encode() {
    return __atomic_load_n(&request_encode, __ATOMIC_SEQ_CST);
  }

  inline void set_request_encode(bool val) {
    int expected = val ? 0 : 1;
    __atomic_compare_exchange_n(&request_encode, &expected, val ? 1 : 0, false,
                                __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
  }

  int process(int src_fd, int src_w, int src_h, struct JpegConfig& config) {
    if (is_processing)
      return 0;

    int ret = 0;
    pthread_t pid = 0;
    int dst_w = config.width;
    int dst_h = config.height;
    DEBUG_INFO(" DstWH:%d,%d SrcWH:%d,%d", dst_w, dst_h, yuv_hw_buf.width, yuv_hw_buf.height);
    if (yuv_hw_buf.width != dst_w || yuv_hw_buf.height != dst_h) {
      video_ion_free(&yuv_hw_buf);
      ret = video_ion_alloc(&yuv_hw_buf, dst_w, dst_h);
      if (ret) {
        printf("%s yuv ion alloc<%d x %d> fail!\n", __func__, dst_w, dst_h);
        return -1;
      }
    }

    printf("src w,h : [%d x %d], dst w,h : [%d x %d]\n", src_w, src_h, dst_w,
           dst_h);
    assert(src_w != 0 && src_h != 0 && dst_w != 0 && dst_h != 0);
    ret = rk_rga_ionfdnv12_to_ionfdnv12_scal_ext(src_fd, src_w, src_h,
                                                 yuv_hw_buf.fd, dst_w, dst_h, 0,
                                                 0, dst_w, dst_h, src_w, src_h);
    if (ret) {
      printf("%s rga scale fail!\n", __func__);
      return -1;
    }
    // create threads for every request?
    ret = pthread_create(&pid, &global_attr, jpeg_encode_one_frame, this);
    if (ret) {
      printf("%s pthread create fail!\n", __func__);
      return -1;
    }
    is_processing = true;
    return 0;
  }

  void notify(void* buf, int size, int width, int height) {
    if (data_cb) {
      VEncStreamInfo info = {.frm_type = JPEG_FRAME,
                             .buf_addr = buf,
                             .buf_size = size,
                             .time_val = {0, 0},
                             .ExtraInfo = {.jpeg_info = {width, height}}};
      data_cb(&info);
    }
  }

 private:
  NotifyNewEncStream data_cb;
  volatile int request_encode;
};

static void* jpeg_encode_one_frame(void* arg) {
  JpegStreamReceiver* receiver = (JpegStreamReceiver*)arg;
  struct video_ion& yuv_hw_buf = receiver->yuv_hw_buf;
  void* src_buf = yuv_hw_buf.buffer;
  int src_size = yuv_hw_buf.width * yuv_hw_buf.height * 3 / 2;
  int src_fd = yuv_hw_buf.fd;

#ifdef USE_WATERMARK
  //if(video_photo_watermark(video) == -1)
#endif
  {
    struct vpu_encode encode;
    memset(&encode, 0, sizeof(encode));
    int ret =
        vpu_nv12_encode_jpeg_init(&encode, yuv_hw_buf.width, yuv_hw_buf.height);
    if (!ret)
      ret = vpu_nv12_encode_jpeg_doing(&encode, src_buf, src_fd, src_size, -1);
    receiver->set_request_encode(false);
    if (!ret)
      receiver->notify(encode.enc_out_data, encode.enc_out_length,
                       yuv_hw_buf.width, yuv_hw_buf.height);
    else
      receiver->notify(NULL, 0, 0, 0);
    vpu_nv12_encode_jpeg_done(&encode);
  }
  receiver->is_processing = false;
  pthread_detach(pthread_self());
  pthread_exit(NULL);
}

struct Video {
  unsigned char businfo[32];
  int width;
  int height;

  struct hal* hal;

  struct Video* pre;
  struct Video* next;

  int type;
  int usb_type;
  int cif_type;
  pthread_t record_id;
  int deviceid;
  volatile int pthread_run;
  char save_en;

  struct video_dpp* dpp;
  bool dpp_init;
  struct video_adas* adas;
  bool adas_init;

  bool mp4_encoding;

  pthread_mutex_t record_mutex;
  pthread_cond_t record_cond;

  frm_info_t frmFmt;
  frm_info_t spfrmFmt;

  // Means the video starts successfully with all below streams.
  // We need know whether the video is valid in some critical cases.
  volatile bool valid;
  // Maintain the encode status internally, user call interfaces too wayward.
  // Service the start_xx/stop_xx related to encode_hanlder.
  uint32_t encode_status;
#define RECORDING_FLAG          0x00000001
#define WIFI_TRANSFER_FLAG      0x00000002
#define CACHE_ENCODE_DATA_FLAG  0x00000004
  EncodeHandler* encode_handler;
  struct watermark_info watermark;

  // Transfer stream handler.
  ScaleEncodeTSHandler* ts_handler;

  bool cachemode;

  int fps_total;
  int fps_last;
  int fps;

  bool video_save;
  bool front;

  bool high_temp;

  int fps_n;
  int fps_d;

  struct video_photo photo;

  // ugly code, for adapt dingdingpai
  struct JpegConfig jpeg_config[JPEG_STREAM_NUM];
  JpegStreamReceiver* jpeg_receiver[JPEG_STREAM_NUM];

  struct ui_frame ui_frame[2];
};

extern "C" {
#include "show.h"
#include "parameter.h"
}

#include "video.h"

struct Video* video_list = NULL;
static pthread_rwlock_t notelock;
static bool record_init_flag = false;
static int enablerec = 0; // TODO: enablerec seems not necessary
static int enableaudio = 1;

static list<pthread_t> record_id_list;

static bool with_mp = false;
static bool with_adas = false;
static bool with_sp = false;

AudioEncodeHandler global_audio_ehandler(0, !enableaudio, &global_attr);

static struct rk_cams_dev_info g_test_cam_infos;
static void (*rec_event_call)(int cmd, void *msg0, void *msg1);

static void fps_count(struct timeval* t0, int* i, const char* name) {
  long int time = 0;
  struct timeval tv;
  struct timeval* t1 = &tv;
  gettimeofday(t1, NULL);
  if (t0->tv_sec != 0) {
    time = (t1->tv_sec - t0->tv_sec) * 1000000 + t1->tv_usec - t0->tv_usec;
    if (time >= 30000000) {
      printf("%s fps:%.2f\n", name, *i / 30.0);
      *i = 0;
      t0->tv_sec = t1->tv_sec;
      t0->tv_usec = t1->tv_usec;
    }
  } else {
    t0->tv_sec = t1->tv_sec;
    t0->tv_usec = t1->tv_usec;
  }
}

static void video_record_wait(struct Video* video) {
  pthread_mutex_lock(&video->record_mutex);
  if (video->pthread_run)
    pthread_cond_wait(&video->record_cond, &video->record_mutex);
  pthread_mutex_unlock(&video->record_mutex);
}

static void video_record_signal(struct Video* video) {
  pthread_mutex_lock(&video->record_mutex);
  video->pthread_run = 0;
  pthread_cond_signal(&video->record_cond);
  pthread_mutex_unlock(&video->record_mutex);
}

static struct Video* getfastvideo(void) {
  return video_list;
}

static void video_record_thermal_fun(struct Video* video);
static int video_set_fps(struct Video* video, int numerator, int denominator);
static int show_video_mark(int width,
                           int height,
                           void* dstbuf,
                           int fps,
                           void* meta1,
                           uint32_t* noise,
                           struct dpp_sharpness* sharpness) {
#if TEST_VIDEO_MARK
  int x_pos = 50;
  int y_pos = 200;
  char name[30] = {0};
  struct HAL_Buffer_MetaData* meta = (struct HAL_Buffer_MetaData*)meta1;

  snprintf(name, sizeof(name), "HAL %s", CAMHALVERSION);
  y_pos += 50;
  show_string(name, strlen(name), x_pos, y_pos, width, height, dstbuf);

  memset(name, 0, sizeof(name));
  snprintf(name, sizeof(name), "fps %d", fps);
  y_pos += 50;
  show_string(name, strlen(name), x_pos, y_pos, width, height, dstbuf);

  if (meta) {
    memset(name, 0, sizeof(name));
    snprintf(name, sizeof(name), "%u %u %u %u %.0f %.0f",
             meta->enabled[HAL_ISP_WDR_ID], meta->enabled[HAL_ISP_GOC_ID],
             meta->wdr.wdr_gain_max_clip_enable, meta->wdr.wdr_gain_max_value,
             meta->exp_time * 1000, meta->exp_gain);
    y_pos += 50;
    show_string(name, strlen(name), x_pos, y_pos, width, height, dstbuf);
  }

  if (noise) {
    memset(name, 0, sizeof(name));
    snprintf(name, sizeof(name), "%u %u %u   %d  %u", noise[0], noise[1],
             noise[2], parameter_get_ex(), user_noise);
    y_pos += 50;
    show_string(name, strlen(name), x_pos, y_pos, width, height, dstbuf);
  }

  if (sharpness) {
    memset(name, 0, sizeof(name));
    snprintf(name, sizeof(name), "%u %u", sharpness->src_shp_l,
             sharpness->src_shp_c);
    y_pos += 50;
    show_string(name, strlen(name), x_pos, y_pos, width, height, dstbuf);
  }
#endif
  return 0;
}

static int convert_yuyv(int width,
                        int height,
                        void* srcbuf,
                        void* dstbuf,
                        int neadshownum) {
  int *dstint_y, *dstint_uv, *srcint;
  int y_size = width * height;
  int i, j;

  dstint_y = (int*)dstbuf;
  srcint = (int*)srcbuf;
  dstint_uv = (int*)((char*)dstbuf + y_size);

  for (i = 0; i < height; i++) {
    for (j = 0; j<width>> 2; j++) {
      if (i % 2 == 0) {
        *dstint_uv++ =
            (*(srcint + 1) & 0xff000000) | ((*(srcint + 1) & 0x0000ff00) << 8) |
            ((*srcint & 0xff000000) >> 16) | ((*srcint & 0x0000ff00) >> 8);
      }
      *dstint_y++ = ((*(srcint + 1) & 0x00ff0000) << 8) |
                    ((*(srcint + 1) & 0x000000ff) << 16) |
                    ((*srcint & 0x00ff0000) >> 8) | (*srcint & 0x000000ff);
      srcint += 2;
    }
  }

  return 0;
}

static int video_query_businfo(struct Video* video) {
  if (video->hal->dev->queryBusInfo(video->businfo))
    return -1;

  return 0;
}

void video_record_getfilename(char* str,
                              unsigned short size,
                              const char* path,
                              int ch,
                              const char* filetype) {
  time_t now;
  struct tm* timenow;
  int year, mon, day, hour, min, sec;

  time(&now);
  timenow = localtime(&now);

  year = timenow->tm_year + 1900;
  mon = timenow->tm_mon + 1;
  day = timenow->tm_mday;
  hour = timenow->tm_hour;
  min = timenow->tm_min;
  sec = timenow->tm_sec;
  snprintf(str, size, "%s/%04d%02d%02d_%02d%02d%02d_%c.%s", path, year, mon,
           day, hour, min, sec, 'O', filetype);
}

static void send_record_time(EncodeHandler* handler, int sec) {
  UNUSED(handler);
  static int last = -1;
  if (last == sec)
    return;
  else
    last = sec;

  if (rec_event_call)
    (*rec_event_call)(CMD_UPDATETIME, (void *)sec, (void *)0);
}

// must be called among locking notelock
static void set_record_time_cb() {
  struct Video* video_cur = getfastvideo();
  while (video_cur) {
    if (video_cur->encode_handler) {
      video_cur->encode_handler->record_time_notify = NULL;
    }
    video_cur = video_cur->next;
  }
}

static int video_encode_init(struct Video* video) {
  int audio_id = -1;
  int fps = 25;
  
  if (video->type == VIDEO_TYPE_ISP) {
    fps = video->fps_d;
    MediaConfig config;
    VideoConfig& vconfig = config.video_config;
    vconfig.fmt = PIX_FMT_NV12;
    vconfig.width = SCALED_WIDTH;
    vconfig.height = SCALED_HEIGHT;
    vconfig.bit_rate = SCALED_BIT_RATE;
    vconfig.frame_rate = fps;
    vconfig.level = 51;
    vconfig.gop_size = fps;
    vconfig.profile = 100;
    vconfig.quality = MPP_ENC_RC_QUALITY_MEDIUM;
    vconfig.qp_step = 6;
    vconfig.qp_min = 18;
    vconfig.qp_max = 48;
    vconfig.rc_mode = MPP_ENC_RC_MODE_CBR;
    ScaleEncodeTSHandler* ts_handler = new ScaleEncodeTSHandler(config);
    vconfig.width = video->width;
    vconfig.height = video->height;
    if (!ts_handler || ts_handler->Init(config)) {
      fprintf(stderr, "create ts handler failed\n");
      return -1;
    }
    video->ts_handler = ts_handler;
	DEBUG_INFO();

	if(is_record_mode)
	{
      EncodeHandler* handler = EncodeHandler::create(
          video->deviceid, video->type, video->usb_type, video->width,
          video->height, fps, audio_id);
      if (handler) {
        handler->set_global_attr(&global_attr);
        handler->set_audio_mute(enableaudio ? false : true);
        video->encode_handler = handler;
        DEBUG_INFO();
        return 0;
      } 
	  else {
	  	DEBUG_INFO();
        return -1;
	  }
	}	
  }
  
  return 0;
}

static int h264_encode_process(struct Video* video,
                               void* virt,
                               int fd,
                               void* hnd,
                               size_t size,
                               struct timeval& time_val,
                               PixelFormat fmt = PIX_FMT_NV12) {
  int ret = -1;
  video->mp4_encoding = true;
  if (!video->pthread_run)
    goto exit_h264_encode_process;

  if (video->encode_handler) {
    BufferData input_data;
    input_data.vir_addr_ = virt;
    input_data.ion_data_.fd_ = fd;
    input_data.ion_data_.handle_ = (ion_user_handle_t)hnd;
    input_data.mem_size_ = size;
    input_data.update_timeval_ = time_val;
    const Buffer input_buffer(input_data);
    ret = video->encode_handler->h264_encode_process(input_buffer, fmt);
  }

exit_h264_encode_process:
  video->mp4_encoding = false;
  return ret;
}

static void video_encode_exit(struct Video* video) {
  PRINTF_FUNC;
  video->save_en = 0;

  if (video->encode_handler) {
#ifdef USE_WATERMARK
    video->encode_handler->watermark = NULL;
#endif
    global_audio_ehandler.RmPacketDispatcher(
        video->encode_handler->get_h264aac_pkt_dispatcher());
    delete video->encode_handler;
    video->encode_handler = NULL;
  }

  if (video->ts_handler) {
    delete video->ts_handler;
    video->ts_handler = NULL;
  }
  PRINTF_FUNC_OUT;
}

static int video_save_jpeg(struct Video* video,
                           char* filename,
                           void* srcbuf,
                           unsigned int size) {
  return fs_picture_mjpg_write(filename, srcbuf, size);

}

static void video_record_takephoto_end(struct Video* video) {
  struct Video* video_cur;
  bool end = false;

  pthread_rwlock_rdlock(&notelock);
  video->photo.state = PHOTO_END;

  video_cur = getfastvideo();
  while (video_cur) {
    if (video_cur->photo.state != PHOTO_END)
      break;
    video_cur = video_cur->next;
  }
  if (!video_cur)
    end = true;
  pthread_rwlock_unlock(&notelock);

  // send message for photoend
  if (end && rec_event_call)
    (*rec_event_call)(CMD_PHOTOEND, (void *)0, (void *)1);
}

static int video_record_save_jpeg(struct Video* video,
                                  void* srcbuf,
                                  unsigned int size) {
  char filename[128];

  video_record_getfilename(filename, sizeof(filename),
                           fs_storage_folder_get_bytype(video->type, PICFILE_TYPE),
                           video->deviceid, "jpg");
  video_save_jpeg(video, filename, srcbuf, size);

  video_record_takephoto_end(video);

  return 0;
}

static void video_print_name(const char* name, bool* print) {
  if (*print) {
    printf("%s\n", name);
    *print = false;
  }
}

static int vpu_nv12_encode_mjpg(struct Video* video,
                                void* srcbuf,
                                int src_fd,
                                size_t src_size) {
  int ret = 0;
  int fd = -1;
  char filename[128];

  video_record_getfilename(filename, sizeof(filename),
                           fs_storage_folder_get_bytype(video->type, PICFILE_TYPE),
                           video->deviceid, "jpg");
  fd = fs_picture_open((char*)filename, O_WRONLY | O_CREAT, 0666);
  if (fd < 0) {
    printf("Cannot open jpg file\n");
    return -1;
  }

  ret = vpu_nv12_encode_jpeg_doing(&video->photo.encode, srcbuf, src_fd, src_size, fd);
  if (ret) {
    goto exit;
  }

  memmove(video->photo.pic_name,filename,sizeof(filename));
  printf("%s:%s\n", __func__, filename);

  if (fd >= 0) {
    fs_picture_close(fd);
    fd = -1;
  }

  return 0;

exit:

  if (fd >= 0) {
    fs_picture_close(fd);
    fd = -1;
  }

  return ret;
}

static void* video_adas_pthread(void* arg) {
    struct Video* video = (struct Video*)arg;
    int cnt = 0;
    struct video_ion* out;
    list<struct video_ion*>::iterator iterator;
    struct timeval start, end;
    unsigned int difftime;
    static int frames = 0;    
    static AlgTk_param algParam;
	TRACE_APPDATA stTraceNetData={0};
	int traceRequestFlag = 0;

    sleep(5);
    video->adas->startFlag = true;

    //get trace queue
    MAIN_SYS_STATUS_S* p_svr_sys_mode=msg_svr_sys_mode_get();
    QUEUE* pTraceMsgQueue=NULL;
    if(p_svr_sys_mode)
		pTraceMsgQueue = &p_svr_sys_mode->traceMsgQueue;

    do {
		out = NULL;
		pthread_mutex_lock(&video->adas->pool_lock);
		if (!video->adas->pool.empty()) {
		  iterator = video->adas->pool.begin();
		  out = *iterator;
		  video->adas->pool.pop_front();
		}
		pthread_mutex_unlock(&video->adas->pool_lock);
		if (out) {
			if(get_tracing_is_running()){
		        algParam.src_phy = (unsigned char*)out->buffer;
				//get trace data from app
		      	get_tracing_appdata(&stTraceNetData);
		        if(false == video->adas->initFlag) {
		            algParam.pMem = (char*)video->adas->alg_virt;
		            algParam.szMem = ALG_BUFFER_SIZE;
		            algParam.width = ADAS_BUFFER_WIDTH;
		            algParam.height = ADAS_BUFFER_HEIGHT;
		            algParam.mode = 0;
		            algParam.rtIn.x = stTraceNetData.tractoNet.trac_x * ADAS_BUFFER_WIDTH / 10000;//car
		            algParam.rtIn.y = stTraceNetData.tractoNet.trac_y * ADAS_BUFFER_HEIGHT / 10000;
		            algParam.rtIn.width = stTraceNetData.tractoNet.trac_w * ADAS_BUFFER_WIDTH / 10000;
		            algParam.rtIn.height = stTraceNetData.tractoNet.trac_h * ADAS_BUFFER_HEIGHT / 10000;
		            DEBUG_INFO("%d--%d--%d--%d",algParam.rtIn.x,algParam.rtIn.y,algParam.rtIn.width,algParam.rtIn.height);
		            traceRequestFlag = 0;          
		            video->adas->initFlag = true;
		        }
		        else {
		            algParam.mode = 1;
					algParam.bAnswer = stTraceNetData.bAnswer;
					//DEBUG_INFO("algParam.bAnswer = %d--algParam.bRequest = %d",algParam.bAnswer,algParam.bRequest);
					if(stTraceNetData.bAnswer != -1)
					{
						stTraceNetData.bAnswer = -1;
						set_tracing_appdata(&stTraceNetData);
					}
		        }
	     		//gettimeofday(&start, NULL);
	        	AlgTk_process(&algParam);
			    //gettimeofday(&end, NULL);
			    //difftime = 1000000*(end.tv_sec - start.tv_sec) + end.tv_usec - start.tv_usec;
			
			    //DEBUG_INFO("tarck%d:(%d,%d,%d,%d), gt:%ld,%d--%d--%d", frames, algParam.pRtOut.x, algParam.pRtOut.y, 
				//	algParam.pRtOut.width, algParam.pRtOut.height, difftime,algParam.detaX,algParam.detaY,algParam.detaZ);

				//send result to msg_process
				MSG_TRACE_DATA_S stMsgTrace;
				memset(&stMsgTrace,0,sizeof(MSG_TRACE_DATA_S));
				stMsgTrace.tractoNet.trac_x = algParam.pRtOut.x*10000/ADAS_BUFFER_WIDTH;
				stMsgTrace.tractoNet.trac_y = algParam.pRtOut.y*10000/ADAS_BUFFER_HEIGHT;
				stMsgTrace.tractoNet.trac_w = algParam.pRtOut.width*10000/ADAS_BUFFER_WIDTH;
				stMsgTrace.tractoNet.trac_h = algParam.pRtOut.height*10000/ADAS_BUFFER_HEIGHT;
				stMsgTrace.state = algParam.state;
				//DEBUG_INFO("algParam.state = %d",algParam.state);
				if(algParam.bAnswer == 1 && algParam.bRequest == 0)
					traceRequestFlag = 0;
				if(algParam.bRequest && (!traceRequestFlag))
				{
					stMsgTrace.bRequest = algParam.bRequest;
					traceRequestFlag = 1;
				}
				if(algParam.bValid)
				{
					stMsgTrace.tractoCom.track_x = algParam.detaX;
					stMsgTrace.tractoCom.track_y = algParam.detaY;
					stMsgTrace.tractoCom.track_z = algParam.detaZ;
				}
				
				MSG_BUF_S *p_msg_buf = (MSG_BUF_S *)GetTail(pTraceMsgQueue);
				if ( NULL == p_msg_buf)
				{
					DEBUG_INFO("queue full!");
					continue;
				}
				p_msg_buf->dst = MSG_MOD_ARMSVR;
				p_msg_buf->src = MSG_MOD_TRACE;
				p_msg_buf->len= sizeof(MSG_TRACE_DATA_S);
				
				((COM_BUF_S*)(p_msg_buf->buf))->cmd = MSG_CMD_TRAC;
				memcpy(p_msg_buf->buf + sizeof(u32), &stMsgTrace, sizeof(MSG_TRACE_DATA_S));
				PushQueue(pTraceMsgQueue, p_msg_buf);
				//DEBUG_INFO("enqueue:(%d,%d,%d,%d),%d--%d--%d",stMsgTrace.tractoNet.trac_x,stMsgTrace.tractoNet.trac_y,
				//	stMsgTrace.tractoNet.trac_w,stMsgTrace.tractoNet.trac_h,stMsgTrace.tractoCom.track_x,
				//	stMsgTrace.tractoCom.track_y,stMsgTrace.tractoCom.track_z);

				usleep(100*1000);//this para can ajust
       		}
			else{
				video->adas->initFlag = false;
				usleep(500*1000);
			}
			
			pthread_mutex_lock(&video->adas->dsp_pool_lock);
		    video->adas->dsp_pool.push_back(out);
		    pthread_mutex_unlock(&video->adas->dsp_pool_lock);
        	frames++;
		}
		
   } while (video->pthread_run && !video->high_temp && !video->adas->exit);

adas_thread_exit:  
    pthread_exit(NULL);
} 

static int video_adas_init(struct Video* video) {
  int i = 0;

  printf("%s enter\n", __func__);
  video->adas = new video_adas();
  if (!video->adas) {
    printf("new video_adas() failed!\n");
    return -1;
  }
  video->adas->context = 0;
  video->adas->dpp_id = 0;

  pthread_mutex_init(&video->adas->pool_lock, NULL);
  pthread_mutex_init(&video->adas->dsp_pool_lock, NULL);

  if ((video->adas->rga_fd = rk_rga_open()) < 0)
    return -1;

  for (i = 0; i < ADAS_BUFFER_NUM; i++) {
    memset (&video->adas->input[i], 0, sizeof(struct video_ion));
    video->adas->input[i].client = -1;
    video->adas->input[i].fd = -1;
    if (video_ion_alloc(&video->adas->input[i], ADAS_BUFFER_WIDTH,
                        ADAS_BUFFER_HEIGHT)) {   
      printf("video_adas_init ion alloc fail!\n");
      return -1;
    }
    video->adas->dsp_pool.push_back(&video->adas->input[i]);
  }

  if (pthread_create(&video->adas->dpp_id, &global_attr, video_adas_pthread,
                     video)) {
    printf("%s pthread create err!\n", __func__);
    return -1;
  }

  video->adas->alg_virt = (void *)malloc(ALG_BUFFER_SIZE);

  video->adas_init = true;
  video->adas->initFlag = false;
  video->adas->startFlag = false;
  printf("%s exit\n", __func__);

  return 0;
}

static void video_adas_exit(struct Video* video) {
  int i = 0;

  printf("%s enter\n", __func__);
  if (video->adas) {
    video->adas->exit = true;
    if (video->adas->context) {
      dpp_stop(video->adas->context);
      printf("dpp_stop!\n");
    }

    if (video->adas->dpp_id) {
      printf("pthread_join dpp_id enter\n");
      pthread_join(video->adas->dpp_id, NULL);
      printf("pthread_join dpp_id exit\n");
      video->adas->dpp_id = 0;
    }

    if (video->adas->context) {
      dpp_destroy(video->adas->context);
      video->adas->context = 0;
    }

    while (video->adas->pool.size())
      video->adas->pool.pop_back();

    while (video->adas->dsp_pool.size())
      video->adas->dsp_pool.pop_back();    

    for (i = 0; i < ADAS_BUFFER_NUM; i++)
      video_ion_free(&video->adas->input[i]);
	
    free(video->adas->alg_virt);

    rk_rga_close(video->adas->rga_fd);

    pthread_mutex_destroy(&video->adas->pool_lock);
    pthread_mutex_destroy(&video->adas->dsp_pool_lock);

    delete video->adas;
    video->adas = NULL;
  }

  video->adas_init = false;
  printf("%s exit\n", __func__);
}

class NV12_ADAS : public StreamPUBase {
  struct Video* video;

 public:
  NV12_ADAS(struct Video* p) : StreamPUBase("NV12_ADAS", true, true) {  
    video = p;
  }
  ~NV12_ADAS() {}
  bool processFrame(shared_ptr<BufferBase> inBuf,
                    shared_ptr<BufferBase> outBuf) {
    static int i = 0;
    static int frames = 0;
    struct video_ion* out;
    int src_fd, src_w, src_h, dst_fd, dst_w, dst_h;
    list<struct video_ion*>::iterator iterator;
    static bool print = true;
    video_print_name("NV12_ADAS", &print);

#if READ_FROM_FILE
    static std::ifstream istream;
    static int frameCount;  
    static size_t length;
#endif
    out = NULL;

    if (video->high_temp) {
      if (video->adas_init)
        video_adas_exit(video);
      return true;
    } else {
      if (!video->adas_init) {
        if (video_adas_init(video)) {
          printf("NV12_ADAS: adas init fail\n");
          video_record_signal(video);
          return false;
        }
      }
    }

    i++;
    if (i % 4 == 0 && video->pthread_run && inBuf.get() && video->adas->startFlag) {
      pthread_mutex_lock(&video->adas->dsp_pool_lock);
      if (!video->adas->dsp_pool.empty()) {
        iterator = video->adas->dsp_pool.begin();
        out = *iterator;
        video->adas->dsp_pool.pop_front();
      }
      pthread_mutex_unlock(&video->adas->dsp_pool_lock);
      
      if (out) {            
        src_fd = (int)(inBuf->getFd());
        src_w = inBuf->getWidth();
        src_h = inBuf->getHeight();
        dst_fd = out->fd;
        dst_w = out->width;
        dst_h = out->height;
       
        rk_rga_ionfd_to_ionfd_scal(video->adas->rga_fd,
                                   src_fd, src_w, src_h, RGA_FORMAT_YCBCR_420_SP,
                                   dst_fd, dst_w, dst_h, RGA_FORMAT_YCBCR_420_SP,
                                   0, 0, dst_w, dst_h, src_w, src_h);  
#if READ_FROM_FILE
        if(0 == frames) {         
          istream.open("/tmp/input.yuv",
                   std::ios_base::in | std::ios_base::binary);
          if (!istream) {
              printf("Cannot open input file.\n");
              return false;
          }
          istream.seekg(0, istream.end);
          length = istream.tellg();
          istream.seekg(0, istream.beg);
          frameCount = length/ADAS_BUFFER_SIZE;  
          printf("Read input data length=%d, fcnt=%d \n", length, frameCount);
        } 
        //Read image date from file
        if(frames<300)
        {
            istream.read((char*)out->buffer, ADAS_BUFFER_SIZE);
            if(frames == 299)
                istream.close();
        }        
#endif        
        frames++;

        pthread_mutex_lock(&video->adas->pool_lock);
        video->adas->pool.push_back(out);
        pthread_mutex_unlock(&video->adas->pool_lock);
      }
    }

    return true;
  }
};

#ifdef USE_WATERMARK
static void video_photo_insert_watermark(void* dstbuf,
                                         uint32_t resolution_width,
                                         uint32_t resolution_height,
                                         uint8_t* srcbuf,
                                         uint32_t src_width,
                                         uint32_t src_height,
                                         uint32_t x_pos,
                                         uint32_t y_pos) {
  uint32_t i, j;
  uint8_t index;
  uint8_t *y_addr = NULL, *uv_addr = NULL;
  uint8_t *start_y = NULL, *start_uv = NULL;

  if ((x_pos + src_width) >= resolution_width || y_pos < 0 ||
      (y_pos + src_height) >= resolution_height) {
    printf("%s error input number or position.\n", __func__);
    return;
  }

  y_addr = (unsigned char*)dstbuf + y_pos * resolution_width + x_pos;
  uv_addr = (unsigned char*)dstbuf + resolution_width * resolution_height +
            y_pos * resolution_width / 2 + x_pos;

  for (j = 0; j < src_height; j++) {
    start_y = y_addr + j * resolution_width;
    start_uv = uv_addr + j * resolution_width / 2;

    for (i = 0; i < src_width; i++) {
      index = srcbuf[i + j * src_width];
      if (((yuv444_palette_table[index] & 0xff000000) >> 24) == 0xff) {
        *start_y = yuv444_palette_table[index] & 0x000000ff;

        if ((j % 2 == 0) && (i % 2 == 0)) {
          *start_uv = (yuv444_palette_table[index] & 0x0000ff00) >> 8;
          *(start_uv + 1) = (yuv444_palette_table[index] & 0x00ff0000) >> 16;
        }
      }

      start_y++;
      if ((j % 2 == 0) && (i % 2 == 0))
        start_uv += 2;
    }
  }
}

static int video_photo_watermark(struct Video* video) {
  struct watermark_info *watermark = &video->watermark;
  int buffer_index = watermark->buffer_index;

  if (!parameter_get_video_mark())
    return 0;

  if (watermark->watermark_data[buffer_index].buffer) {
    // show time
    if (watermark->type & WATERMARK_TIME) {
      video_photo_insert_watermark(
          video->photo.rga_photo.buffer, video->width, video->height,
          (uint8_t*)watermark->watermark_data[buffer_index].buffer +
          watermark->osd_data_offset.time_data_offset,
          watermark->coord_info.time_bmp.width,
          watermark->coord_info.time_bmp.height,
          watermark->coord_info.time_bmp.x,
          watermark->coord_info.time_bmp.y);
    }

    // show image
    if (watermark->type & WATERMARK_LOGO) {
      video_photo_insert_watermark(
          video->photo.rga_photo.buffer, video->width, video->height,
          (uint8_t*)watermark->watermark_data[buffer_index].buffer +
          watermark->osd_data_offset.logo_data_offset,
          watermark->coord_info.logo_bmp.width,
          watermark->coord_info.logo_bmp.height,
          watermark->coord_info.logo_bmp.x,
          watermark->coord_info.logo_bmp.y);
    }

    /* Show license plate */
    if (parameter_get_licence_plate_flag()) {
      if (watermark->type & WATERMARK_LICENSE) {
        video_photo_insert_watermark(
              video->photo.rga_photo.buffer, video->width, video->height,
              (uint8_t*)watermark->watermark_data[buffer_index].buffer +
              watermark->osd_data_offset.license_data_offset,
              watermark->coord_info.license_bmp.width,
              watermark->coord_info.license_bmp.height,
              watermark->coord_info.license_bmp.x,
              watermark->coord_info.license_bmp.y);
      }
    }
  }

  return 0;
}
#endif
static void* video_rga_photo_pthread(void* arg) {
  struct Video* video = (struct Video*)arg;

  while (video->pthread_run) {
    pthread_mutex_lock(&video->photo.mutex);
    if (video->photo.state != PHOTO_DISABLE)
      pthread_cond_wait(&video->photo.condition, &video->photo.mutex);
    pthread_mutex_unlock(&video->photo.mutex);

    if (video->photo.state == PHOTO_DISABLE) {
      printf("receive the signal from video_photo_exit()\n");
      break;
    }

    void* buffer = video->photo.rga_photo.buffer;
    int size = video->photo.rga_photo.width * video->photo.rga_photo.height * 3 / 2;
    int fd = video->photo.rga_photo.fd;

#ifdef USE_WATERMARK
    video_photo_watermark(video);
#endif

    vpu_nv12_encode_mjpg(video, buffer, fd, size);

    video_record_takephoto_end(video);
  }

  pthread_exit(NULL);
}

static int video_rga_photo_process(struct Video* video, int fd) {
  int ret = 0;
  int src_w, src_h, src_fd, dst_w, dst_h, dst_fd;

  src_w = video->width;
  src_h = video->height;
  src_fd = fd;
  dst_w = video->photo.rga_photo.width;
  dst_h = video->photo.rga_photo.height;
  dst_fd = video->photo.rga_photo.fd;
  ret = rk_rga_ionfd_to_ionfd_scal(video->photo.rga_fd,
                                   src_fd, src_w, src_h, RGA_FORMAT_YCBCR_420_SP,
                                   dst_fd, dst_w, dst_h, RGA_FORMAT_YCBCR_420_SP,
                                   0, 0, dst_w, dst_h, src_w, src_h);

  if (ret) {
    printf("%s rga fail!\n", __func__);
    goto exit;
  }

  pthread_mutex_lock(&video->photo.mutex);
  pthread_cond_signal(&video->photo.condition);
  pthread_mutex_unlock(&video->photo.mutex);

  return 0;

exit:
  video_record_takephoto_end(video);

  return ret;
}

static int video_dpp_buffer_create(struct dpp_buffer** buffer) {
  struct dpp_buffer* buf = (struct dpp_buffer*)calloc(1, sizeof(*buf));
  if (!buf) {
    printf("Malloc memory for buffer failed.\n");
    (*buffer) = NULL;
    return -1;
  }

  (*buffer) = buf;
  return 0;
}

static int video_dpp_buffer_create_from_frame(struct dpp_buffer** buffer,
                                              DppFrame frame) {
  dpp_buffer* buf = NULL;

  video_dpp_buffer_create(&buf);
  if (buf) {
    buf->buffer = dpp_frame_get_buffer(frame);
    dpp_buffer_inc_ref(buf->buffer);
    buf->pts = dpp_frame_get_pts(frame);
    buf->isp_meta = (struct HAL_Buffer_MetaData*)dpp_frame_get_private_data(frame);
    dpp_frame_get_noise(frame, buf->noise);
    dpp_frame_get_sharpness(frame, &buf->sharpness);
    (*buffer) = buf;
    return 0;
  }

  return -1;
}

static void video_dpp_buffer_destroy(struct dpp_buffer* buffer) {
  if (buffer) {
    dpp_buffer_dec_ref(buffer->buffer);
    free(buffer);
  }
}

static int video_dpp_buffer_list_init(struct dpp_buffer_list* list) {
  // Cannot memset list which would case null std::list object
  pthread_mutex_init(&list->mutex, NULL);
  pthread_cond_init(&list->condition, NULL);
  return 0;
}

static int video_dpp_buffer_list_deinit(struct dpp_buffer_list* list) {
  pthread_mutex_lock(&list->mutex);
  while (list->buffers.size()) {
    printf("DPP buffer list is not null when deinit.\n");
    struct dpp_buffer* buffer = list->buffers.front();
    list->buffers.pop_front();
    video_dpp_buffer_destroy(buffer);
  }
  pthread_mutex_unlock(&list->mutex);
  pthread_mutex_destroy(&list->mutex);
  pthread_cond_destroy(&list->condition);
  return 0;
}

static int video_dpp_buffer_list_push(struct dpp_buffer_list* list,
                                      struct dpp_buffer* buffer) {
  pthread_mutex_lock(&list->mutex);
  list->buffers.push_back(buffer);
  pthread_cond_signal(&list->condition);
  pthread_mutex_unlock(&list->mutex);

  return 0;
}

static int video_dpp_buffer_list_pop(struct dpp_buffer_list* list,
                                     struct dpp_buffer** buffer,
                                     bool enable_timeout) {
  pthread_mutex_lock(&list->mutex);
  if (list->buffers.empty()) {
    if (enable_timeout) {
      struct timespec timeout;
      struct timeval now;
      gettimeofday(&now, NULL);
      timeout.tv_sec = now.tv_sec + 1;
      timeout.tv_nsec = now.tv_usec * 1000;
      pthread_cond_timedwait(&list->condition, &list->mutex, &timeout);
    } else {
      pthread_cond_wait(&list->condition, &list->mutex);
    }
  }

  if (!list->buffers.empty()) {
    (*buffer) = list->buffers.front();
    list->buffers.pop_front();
  } else {
    (*buffer) = NULL;
  }

  pthread_mutex_unlock(&list->mutex);
  return 0;
}

static void video_dpp_buffer_list_signal(struct dpp_buffer_list* list) {
  pthread_mutex_lock(&list->mutex);
  pthread_cond_signal(&list->condition);
  pthread_mutex_unlock(&list->mutex);
}

static void* video_encode_thread_func(void* arg) {
  struct Video* video = (struct Video*)arg;

  while (video->pthread_run && !video->dpp->stop_flag) {
    struct dpp_buffer* buffer = NULL;
    video_dpp_buffer_list_pop(&video->dpp->encode_buffer_list, &buffer, true);
    if (!buffer)
      continue;

    int fd = dpp_buffer_get_fd(buffer->buffer);
    size_t size = dpp_buffer_get_size(buffer->buffer);

    assert(fd > 0);

    if (video->save_en) {
      MppEncPrepCfg precfg;

      precfg.change = MPP_ENC_PREP_CFG_CHANGE_SHARPEN;
      precfg.sharpen.enable_y = buffer->sharpness.src_shp_l;
      precfg.sharpen.enable_uv = buffer->sharpness.src_shp_c;
      precfg.sharpen.threshold = buffer->sharpness.src_shp_thr;
      precfg.sharpen.div = buffer->sharpness.src_shp_div;
      precfg.sharpen.coef[0] = buffer->sharpness.src_shp_w0;
      precfg.sharpen.coef[1] = buffer->sharpness.src_shp_w1;
      precfg.sharpen.coef[2] = buffer->sharpness.src_shp_w2;
      precfg.sharpen.coef[3] = buffer->sharpness.src_shp_w3;
      precfg.sharpen.coef[4] = buffer->sharpness.src_shp_w4;
      if (video->encode_handler)
        video->encode_handler->h264_encode_control(MPP_ENC_SET_PREP_CFG,
                                                   (void*)&precfg);
      if (h264_encode_process(video, NULL, fd, NULL, size, buffer->pts))
        printf("Encode failed!\n");
    }

    video_dpp_buffer_destroy(buffer);
  }

  pthread_exit(NULL);
}

void* video_photo_thread_func(void* arg) {
  struct Video* video = (struct Video*)arg;

  while (video->pthread_run && !video->dpp->stop_flag) {
    struct dpp_buffer* buffer = NULL;
    video_dpp_buffer_list_pop(&video->dpp->photo_buffer_list, &buffer, false);
    if (!buffer)
      continue;

    int fd = dpp_buffer_get_fd(buffer->buffer);
    assert(fd > 0);
    video_rga_photo_process(video, fd);

    for (int i = 0; i < JPEG_STREAM_NUM; ++i) {
      JpegStreamReceiver* receiver = video->jpeg_receiver[i];
      if (receiver && receiver->get_request_encode()) {
        int fd = dpp_buffer_get_fd(buffer->buffer);
        int ret = receiver->process(fd, video->width, video->height,
                                    video->jpeg_config[i]);
        if (ret < 0) {
          receiver->set_request_encode(false);
          receiver->notify(NULL, 0, 0, 0);
        }
      }
    }

    video_dpp_buffer_destroy(buffer);
  }

  pthread_exit(NULL);
}

static void* video_live_thread_func(void* arg) {
  struct Video* video = (struct Video*)arg;

  while (video->pthread_run && !video->dpp->stop_flag) {
    struct dpp_buffer* buffer = NULL;
    video_dpp_buffer_list_pop(&video->dpp->live_buffer_list, &buffer, true);
    if (!buffer)
      continue;
    int fd = dpp_buffer_get_fd(buffer->buffer);
    assert(fd > 0);
    if (video->ts_handler) {
      VideoConfig config;
      config.fmt = PIX_FMT_NV12;
      config.width = video->width;
      config.height = video->height;
      video->ts_handler->Process(fd, config, buffer->pts);
    }
    video_dpp_buffer_destroy(buffer);
  }
  pthread_exit(NULL);
}

static void* video_dpp_thread_func(void* arg) {
  struct Video* video = (struct Video*)arg;

  // Create encode thread
  if (with_mp) {
    if (pthread_create(&video->dpp->encode_thread, &global_attr,
                       video_encode_thread_func, video)) {
      printf("Encode thread create failed!\n");
      goto out;
    }

    if (pthread_create(&video->dpp->live_thread, &global_attr,
                       video_live_thread_func, video)) {
      printf("Live thread create failed!\n");
      goto out;
    }
  }

  // Create photo thread
  if (pthread_create(&video->dpp->photo_thread, &global_attr,
                     video_photo_thread_func, video)) {
    printf("Photo pthread create failed!\n");
    goto out;
  }

  // DPP thread main loop. Get a frame from dpp, then consturct buffers
  // pushed into next level threads for further processing.
  printf("DPP thread main loop start.\n");
  while (video->pthread_run && !video->high_temp && !video->dpp->exit) {
    DppFrame frame = NULL;
    DPP_RET ret = dpp_get_frame(video->dpp->context, (DppFrame*)&frame);
    if (!frame) {
      if (ret == DPP_ERR_TIMEOUT) {
        printf("Get frame from dpp failed cause by timeout.\n");
        continue;
      } else {
        printf("Get frame failed, DPP request exit.\n");
        break;
      }
    }

    struct dpp_buffer* encode_buffer = NULL;
    struct dpp_buffer* photo_buffer = NULL;
    struct dpp_buffer* live_buffer = NULL;

    if (video->dpp->encode_thread) {
      video_dpp_buffer_create_from_frame(&encode_buffer, frame);
      video_dpp_buffer_list_push(&video->dpp->encode_buffer_list,
                                 encode_buffer);
    }

    if (video->photo.state == PHOTO_ENABLE && video->dpp->photo_thread) {
      video->photo.state = PHOTO_BEGIN;
      video_dpp_buffer_create_from_frame(&photo_buffer, frame);
      video_dpp_buffer_list_push(&video->dpp->photo_buffer_list, photo_buffer);
    }

    if (video->dpp->live_thread) {
      video_dpp_buffer_create_from_frame(&live_buffer, frame);
      video_dpp_buffer_list_push(&video->dpp->live_buffer_list,
                                 live_buffer);
    }

    dpp_frame_deinit((DppFrame*)frame);
  }

out:
  // Before exit dpp thread, we should wait photo/encode/display thread
  // have exited first. When stop_flag is set "true", these threads will
  // exit immediately.
  video->dpp->stop_flag = true;

  video_dpp_buffer_list_signal(&video->dpp->encode_buffer_list);
  if (video->dpp->encode_thread) {
    // Release video encode wait condition first.

    printf("Encode thread exit start.\n");
    pthread_join(video->dpp->encode_thread, NULL);
    printf("Encode thread exit end.\n");
    video->dpp->encode_thread = 0;
  }

  video_dpp_buffer_list_signal(&video->dpp->photo_buffer_list);
  if (video->dpp->photo_thread) {
    // Release video photo wait condition first.

    printf("Photo thread exit start.\n");
    pthread_join(video->dpp->photo_thread, NULL);
    printf("Photo thread exit end.\n");
    video->dpp->photo_thread = 0;
  }

  video_dpp_buffer_list_signal(&video->dpp->live_buffer_list);
  if (video->dpp->live_thread) {
    printf("Live thread exit start.\n");
    pthread_join(video->dpp->live_thread, NULL);
    printf("Live thread exit end.\n");
    video->dpp->live_thread = 0;
  }

  pthread_exit(NULL);
}

static int video_dpp_init(struct Video* video) {
  DPP_RET ret = DPP_OK;

  printf("%s enter\n", __func__);
  video->dpp = new video_dpp();
  if (!video->dpp) {
    printf("new dpp() failed!\n");
    return -1;
  }

  video->dpp->context = 0;
  video->dpp->dpp_thread = 0;
  video->dpp->encode_thread = 0;
  video->dpp->photo_thread = 0;
  video->dpp->live_thread = 0;

  ret = dpp_create((DppCtx*)&video->dpp->context, DPP_FUN_3DNR);
  if (DPP_OK != ret) {
    printf(">> Test dpp_create failed.\n");
    return -1;
  }

  dpp_start(video->dpp->context);

  video_dpp_buffer_list_init(&video->dpp->encode_buffer_list);
  video_dpp_buffer_list_init(&video->dpp->photo_buffer_list);  
  video_dpp_buffer_list_init(&video->dpp->live_buffer_list);

  if (pthread_create(&video->dpp->dpp_thread, &global_attr,
                     video_dpp_thread_func, video)) {
    printf("%s pthread create err!\n", __func__);
    return -1;
  }

  video->dpp_init = true;
  printf("%s exit\n", __func__);

  return 0;
}

static void video_dpp_exit(struct Video* video) {
  printf("%s enter\n", __func__);
  if (video->dpp) {
    video->dpp->exit = true;
    if (video->dpp->context)
      dpp_stop(video->dpp->context);

    if (video->dpp->dpp_thread) {
      printf("DPP thread exit start.\n");
      pthread_join(video->dpp->dpp_thread, NULL);
      printf("DPP thread exit end.\n");
      video->dpp->dpp_thread = 0;
    }

    video_dpp_buffer_list_deinit(&video->dpp->encode_buffer_list);
    video_dpp_buffer_list_deinit(&video->dpp->photo_buffer_list);
    video_dpp_buffer_list_deinit(&video->dpp->live_buffer_list);
    if (video->dpp->context) {
      dpp_destroy(video->dpp->context);
      video->dpp->context = 0;
    }

    delete video->dpp;
    video->dpp = NULL;
  }

  video->dpp_init = false;

  printf("%s exit\n", __func__);
}

static void video_dpp_packet_release(void* packet) {
  DppBufferInfo* buf_info = NULL;
  buf_info = (DppBufferInfo*)dpp_packet_get_buf_info(packet);

  if (!buf_info) {
    printf("video_dpp_packet_release failed!\n");
    return;
  }

  BufferBase* cam_buffer = (BufferBase*)buf_info->private_data;
  if (cam_buffer)
    cam_buffer->decUsedCnt();

  delete buf_info;
}

static int video_dpp_packet_process(struct Video* video,
                                    shared_ptr<BufferBase>& inBuf) {
  DppBufferInfo* buf_info;
  DPP_RET ret = DPP_OK;
  DppPacket packet_in = 0;
  assert(inBuf->getMetaData());
  struct v4l2_buffer_metadata_s* metadata_drv =
      (struct v4l2_buffer_metadata_s*)inBuf->getMetaData()->metedata_drv;
  assert(metadata_drv);
  struct timeval time_val = metadata_drv->frame_t.vs_t;
  int rt = 0;
  struct HAL_Buffer_MetaData* meta = inBuf->getMetaData();

  if (!video->dpp)
    return -1;

  buf_info = new DppBufferInfo();
  if (!buf_info)
    return -1;

  buf_info->type = DPP_BUFFER_TYPE_ION;
  buf_info->size = ((inBuf->getWidth() + 15) & ~15) *
                   ((inBuf->getHeight() + 15) & ~15) * 3 / 2;
  buf_info->ptr = inBuf->getVirtAddr();
  buf_info->hnd = inBuf->getHandle();
  buf_info->fd = (int)(inBuf->getFd());
  buf_info->phys = (unsigned int)(inBuf->getPhyAddr());
  buf_info->private_data = inBuf.get();

  ret = dpp_packet_init((DppPacket*)&packet_in, inBuf->getWidth(),
                        inBuf->getHeight());
  if (DPP_OK != ret) {
    printf("dpp_packet_init_with_buf_info failed.\n");
    rt = -1;
    goto packet_process_exit;
  }
  ret = dpp_packet_set_cb(packet_in, video_dpp_packet_release);
  if (DPP_OK != ret) {
    printf("dpp_packet_set_cb failed.\n");
    rt = -1;
    goto packet_process_exit;
  }
  dpp_packet_set_buf_info(packet_in, buf_info);

  dpp_packet_set_pts(packet_in, time_val);

  dpp_packet_set_noise(packet_in, &user_noise);

  dpp_packet_set_idc_enabled(packet_in, parameter_get_video_idc());
  dpp_packet_set_nr_enabled(packet_in, parameter_get_video_3dnr());

  if (meta) {
    struct ispinfo info;
    memset(&info, 0, sizeof(info));
    info.exp_gain = meta->exp_gain;
    info.exp_time = meta->exp_time;
    info.doortype = meta->awb.DoorType;
    info.wb_gain_red = 0;  // TODO, add other isp information below
    info.wb_gain_green_r = 0;
    info.wb_gain_blue = 0;
    info.wb_gain_green_b = 0;
    //info.luma_nr_en = meta->dsp_3DNR.luma_nr_en;
    //info.chroma_nr_en = meta->dsp_3DNR.chroma_nr_en;
    info.shp_en = meta->dsp_3DNR.shp_en;
    //info.luma_nr_level = meta->dsp_3DNR.luma_nr_level;
    //info.chroma_nr_level = meta->dsp_3DNR.chroma_nr_level;
    info.shp_level = meta->dsp_3DNR.shp_level;
    dpp_packet_set_private_data(packet_in, meta);
    dpp_packet_set_params(packet_in, &info, sizeof(info));
  }

  ret = dpp_put_packet(video->dpp->context, packet_in);
  if (DPP_OK != ret) {
    printf("put_packet failed.\n");
    rt = -1;
    goto packet_process_exit;
  }

packet_process_exit:
  if (packet_in)
    dpp_packet_deinit((DppPacket*)packet_in);

  return rt;
}

static void video_record_thermal_fun(struct Video* video) {
  int fps = parameter_get_video_frontcamera_fps();
  int status = thermal_get_status();

  switch (status) {
    case THERMAL_LEVEL2:
      video->high_temp = false;
      break;
    case THERMAL_LEVEL3:
      video->high_temp = false;	//true;
      break;
    case THERMAL_LEVEL4: {
      if (1 != video->fps_n || (fps / 2) != video->fps_d)
        video_set_fps(video, 1, (fps / 2));
      break;
    default:
      break;
    }
  }

  if (status != THERMAL_LEVEL4)
    if (1 != video->fps_n || fps != video->fps_d)
      video_set_fps(video, 1, fps);
}

static int video_set_fps(struct Video* video, int numerator, int denominator) {
  int ret = 0;
  HAL_FPS_INFO_t fps;
  fps.numerator = numerator;
  fps.denominator = denominator;
  printf("fps : %d/%d\n", fps.numerator, fps.denominator);
  ret = video->hal->dev->setFps(fps);
  if (!ret) {
    video->fps_n = numerator;
    video->fps_d = denominator;
    printf("video set fps is %.2f\n", 1.0 * video->fps_d / video->fps_n);
  }
  return ret;
}

class MP_DSP : public StreamPUBase {
  struct Video* video;

 public:
  MP_DSP(struct Video* p) : StreamPUBase("MP_DSP", true, true) { video = p; }
  ~MP_DSP() {}
  bool processFrame(shared_ptr<BufferBase> inBuf,
                    shared_ptr<BufferBase> outBuf) {
    static unsigned char cnt = 0;
    static bool print = true;
    video_print_name("MP_DSP", &print);
    //static struct timeval t0;
    //static int i = 0;
    //i++;
    //fps_count(&t0,&i,"MP_DSP");
    video->fps_total++;

    cnt++;
    video_record_thermal_fun(video);

    if (with_mp) {
      if (video->pthread_run && inBuf.get()) {
        if (!video->high_temp && is_record_mode) {
          if (!video->dpp_init) {
            printf("MP:temperature is low.\n");
            if (video_dpp_init(video)) {
              printf("MP: dpp init fail!\n");
              video_record_signal(video);
              return false;
            }
          }

          inBuf->incUsedCnt();

          if (video_dpp_packet_process(video, inBuf)) {
            inBuf->decUsedCnt();
            printf("PU push BufferBase to DPP failed.\n");
            return false;
          }
        } else {
          void* virt = inBuf->getVirtAddr();
          int fd = inBuf->getFd();
          void* hnd = inBuf->getHandle();
          size_t size = inBuf->getDataSize();
          struct timeval time;
          gettimeofday(&time, NULL);

          if (video->dpp_init) {
            printf("MP:temperature is high.\n");
            video_dpp_exit(video);
          }

          // high temperature fps is less than 30,so all process in one.
          if (is_record_mode && video->save_en && video->pthread_run) {
            if (h264_encode_process(video, virt, fd, hnd, size, time)) {
              video_record_signal(video);
            }
          }

          if (video->ts_handler) {
            VideoConfig config;
            config.fmt = PIX_FMT_NV12;
            config.width = video->width;
            config.height = video->height;
            video->ts_handler->Process(fd, config, time);
          }

          if (video->photo.state == PHOTO_ENABLE) {
            video->photo.state = PHOTO_BEGIN;
            video_rga_photo_process(video, fd);
          }		  
        }
      }
    }

    return true;
  }
};

static int video_set_white_balance(struct Video* video, int i) {
  enum HAL_WB_MODE mode;

  switch (i) {
    case 0:
      mode = HAL_WB_AUTO;
      break;
    case 1:
      mode = HAL_WB_DAYLIGHT;
      break;
    case 2:
      mode = HAL_WB_FLUORESCENT;
      break;
    case 3:
      mode = HAL_WB_CLOUDY_DAYLIGHT;
      break;
    case 4:
      mode = HAL_WB_INCANDESCENT;
      break;
    default:
      printf("video%d set white balance input error!\n", video->deviceid);
      return -1;
  }

  if (video->hal->dev->setWhiteBalance(mode)) {
    printf("video%d set white balance failed!\n", video->deviceid);
    return -1;
  }

  printf("video%d set white balance sucess!\n", video->deviceid);

  return 0;
}

static int video_set_exposure_compensation(struct Video* video, int i) {
  int aeBias;

  switch (i) {
    case 0:
      aeBias = -300;
      break;
    case 1:
      aeBias = -200;
      break;
    case 2:
      aeBias = -100;
      break;
    case 3:
      aeBias = 0;
      break;
    case 4:
      aeBias = 100;
      break;
    default:
      printf("video%d set AeBias input error!\n", video->deviceid);
      return -1;
  }

  if (video->hal->dev->setAeBias(aeBias)) {
    printf("video%d set AeBias failed!\n", video->deviceid);
    return -1;
  }

  printf("video%d set AeBias success!\n", video->deviceid);

  return 0;
}

static int video_set_power_line_frequency(struct Video* video, int i) {
  if (i != 1 && i != 2) {
    printf("%s error paramerter:%d!\n", __func__, i);
    return 0;
  }

  if (video->hal->dev->setAntiBandMode((enum HAL_AE_FLK_MODE)i)) {
    printf("video%d set power line frequency failed!\n", video->deviceid);
    return -1;
  }

  return 0;
}

static int video_init_setting(struct Video* video) {
  if (video->type != VIDEO_TYPE_CIF) {
    if (video_set_power_line_frequency(video, parameter_get_video_fre()))
      return -1;
  }

  if (video_set_white_balance(video, parameter_get_wb()))
    return -1;

  if (video->type == VIDEO_TYPE_ISP) {
    if (video_set_exposure_compensation(video, parameter_get_ex()))
      return -1;
  }

  return 0;
}

static int isp_video_init(struct Video* video,
                          int num,
                          unsigned int width,
                          unsigned int height,
                          unsigned int fps) {
  int i = 0;
  bool exist = false;
  frm_info_t in_frmFmt = {
      .frmSize = {width, height}, .frmFmt = HAL_FRMAE_FMT_NV12, .colorSpace = color_space, .fps = fps,
  };
  frm_info_t spfrmFmt = {
          .frmSize = {ISP_SP_WIDTH, ISP_SP_HEIGHT},
          .frmFmt = HAL_FRMAE_FMT_NV12,
          .colorSpace = color_space,
          .fps = fps,
  };
  
  memcpy(&video->spfrmFmt, &spfrmFmt, sizeof(frm_info_t));

  video->hal->dev = getCamHwItf(&(g_test_cam_infos.isp_dev));
  //(shared_ptr<CamHwItf>)(new CamIsp11DevHwItf());
  if (!video->hal->dev.get()) {
    printf("no memory!\n");
    return -1;
  }

  for (i = 0; i < g_test_cam_infos.num_camers; i++) {
    if (g_test_cam_infos.cam[i]->type == RK_CAM_ATTACHED_TO_ISP) {
      printf("connected isp camera name %s,input id %d\n",
             g_test_cam_infos.cam[i]->name, g_test_cam_infos.cam[i]->index);

      if (video->hal->dev->initHw(g_test_cam_infos.cam[i]->index) == false) {
        printf("video%d init fail!\n", num);
        return -1;
      }

      exist = true;
      break;
    }
  }

  if (!exist)
    return -1;

#if 0
    if (video_try_format(video,in_frmFmt)) {
        printf("video try format failed!\n");
        return -1;
    }
#else
  memcpy(&video->frmFmt, &in_frmFmt, sizeof(frm_info_t));
  video->type = VIDEO_TYPE_ISP;
  video->width = video->frmFmt.frmSize.width;
  video->height = video->frmFmt.frmSize.height;
#endif

  if (video_init_setting(video))
    return -1;

  return 0;
}

int isp_video_path_sp(struct Video* video) {
  video->hal->spath = video->hal->dev->getPath(CamHwItf::SP);
  if (video->hal->spath.get() == NULL) {
    printf("%s:path doesn't exist!\n", __func__);
    return -1;
  }

  if (video->hal->spath->prepare(video->spfrmFmt, 4,
          *(video->hal->bufAlloc.get()), false,
          0) == false) {
    printf("sp prepare faild!\n");
    return -1;
  }

  printf("sp: width = %4d,height = %4d\n", video->spfrmFmt.frmSize.width,
         video->spfrmFmt.frmSize.height);

  video_set_fps(video, 1, video->spfrmFmt.fps);
  if (with_adas) {
    video->hal->nv12_adas = shared_ptr<NV12_ADAS>(new NV12_ADAS(video));
    if (!video->hal->nv12_adas.get()) {
      printf("new NV12_ADAS failed!\n");
      return -1;
    }
    video->hal->spath->addBufferNotifier(video->hal->nv12_adas.get());
    video->hal->nv12_adas->prepare(video->spfrmFmt, 0, NULL);
  }

  return 0;
}

int isp_video_path_mp(struct Video* video) {
  int i = 0;

  video->hal->mpath = video->hal->dev->getPath(CamHwItf::MP);
  if (video->hal->mpath.get() == NULL) {
    printf("%s:path doesn't exist!\n", __func__);
    return -1;
  }

  if (video->hal->mpath->prepare(
          video->frmFmt, 5, *(video->hal->bufAlloc.get()), false, 0) == false) {
    printf("mp prepare faild!\n");
    return -1;
  }
  printf("mp: width = %4d,height = %4d\n", video->frmFmt.frmSize.width,
         video->frmFmt.frmSize.height);

  video_set_fps(video, 1, video->frmFmt.fps);
  video->hal->mp_dsp = shared_ptr<MP_DSP>(new MP_DSP(video));
  if (!video->hal->mp_dsp.get()) {
    printf("new MP_DSP failed!\n");
    return -1;
  }
  video->hal->mpath->addBufferNotifier(video->hal->mp_dsp.get());
  video->hal->mp_dsp->prepare(video->frmFmt, 0, NULL);

  return 0;
}

static int isp_video_path(struct Video* video) {
  video->hal->bufAlloc =
      shared_ptr<IonCameraBufferAllocator>(new IonCameraBufferAllocator());
  if (!video->hal->bufAlloc.get()) {
    printf("new IonCameraBufferAllocator failed!\n");
    return -1;
  }

  if (with_mp) {
    if (isp_video_path_mp(video))
      return -1;
  }

  if (with_sp) {
    if (isp_video_path_sp(video))
      return -1;
  }

  return 0;
}

int isp_video_start_sp(struct Video* video) {
  if (!video->hal->spath->start()) {
    printf("spath start failed!\n");
    return -1;
  }

  if (with_adas) {
    if (!video->hal->nv12_adas->start()) {
      printf("nv12_adas start failed!\n");
      return -1;
    }
  }

  return 0;
}

int isp_video_start_mp(struct Video* video) {
  if (!video->hal->mpath->start()) {
    printf("mpath start failed!\n");
    return -1;
  }

  if (!video->hal->mp_dsp->start()) {
    printf("mp_dsp start failed!\n");
    return -1;
  }
  
  return 0;
}

static int isp_video_start(struct Video* video) {
  if (with_mp) {
    if (isp_video_start_mp(video))
      return -1;
  }

  if (with_sp) {
    if (isp_video_start_sp(video))
      return -1;
  }

  return 0;
}

void isp_video_deinit_sp(struct Video* video) {
  if (with_adas) {
    if (video->hal->nv12_adas.get()) {
      video->hal->spath->removeBufferNotifer(video->hal->nv12_adas.get());
      video->hal->nv12_adas->stop();
      video->hal->nv12_adas->releaseBuffers();
    }
  }

  if (video->hal->spath.get()) {
    video->hal->spath->stop();
    video->hal->spath->releaseBuffers();
  }
}

void isp_video_deinit_mp(struct Video* video) {
  int i = 0;

  if (video->hal->mpath.get()) {
    video->hal->mpath->stop();
    video->hal->mpath->releaseBuffers();
  }
}

void isp_video_mp_dsp_stop(struct Video* video) {
  if (video->hal->mp_dsp.get()) {
    video->hal->mpath->removeBufferNotifer(video->hal->mp_dsp.get());
    video->hal->mp_dsp->stop();
  }
}

void isp_video_mp_dsp_release(struct Video* video) {
  if (video->hal->mp_dsp.get())
    video->hal->mp_dsp->releaseBuffers();
}

static void isp_video_deinit(struct Video* video) {
  if (with_mp)
    isp_video_deinit_mp(video);

  if (with_sp)
    isp_video_deinit_sp(video);

  if (video->hal->dev.get()) {
    video->hal->dev->deInitHw();
  }
}

// must be called among locking notelock
static inline void video_record_addnode(struct Video* video) {
  struct Video* video_cur;

  video_cur = getfastvideo();
  if (video_cur == NULL) {
    video_list = video;
  } else {
    while (video_cur->next) {
      video_cur = video_cur->next;
    }
    video->pre = video_cur;
    video_cur->next = video;
  }
}

static void video_record_deletenode(struct Video* video) {
  struct Video* video_cur = NULL;
  pthread_rwlock_wrlock(&notelock);

  if (video->pre == 0) {
    video_list = video->next;
    if (video_list){
      video_list->pre = 0;
    }
    else{
    }
  } else {
    video->pre->next = video->next;
    if (video->next)
      video->next->pre = video->pre;
  }

  video->pre = NULL;
  video->next = NULL;

  video_cur = getfastvideo();
  while (video_cur) {
    video_cur = video_cur->next;
  }
  if (!video_cur) {
    video_cur = getfastvideo();
    while (video_cur) {
      if (video_cur->usb_type != USB_TYPE_H264) {
        break;
      } else
        video_cur = video_cur->next;
    }
  }

#ifdef USE_WATERMARK
  watermark_deinit(&video->watermark);
#endif

  if (video->hal) {
    delete video->hal;
    video->hal = NULL;
  }

  pthread_mutex_destroy(&video->record_mutex);
  pthread_cond_destroy(&video->record_cond);

  if (video)
    free(video);

  set_record_time_cb();
  pthread_rwlock_unlock(&notelock);
}

// must be called among locking notelock
static inline void video_record_set_front_camera(void) {
  struct Video* video = NULL;
  struct Video* head = NULL;
  struct Video* pre = NULL;
  struct Video* next = NULL;
  struct Video* end = NULL;

  video = getfastvideo();
  while (video) {
    if (strstr((char*)video->businfo, FRONT) &&
        video->usb_type != USB_TYPE_H264 && video->pre) {
      head = getfastvideo();
      pre = video->pre;
      next = video->next;
      pre->next = next;
      if (next)
        next->pre = pre;
      video_list = video;
      video_list->pre = NULL;
      video_list->next = head;
      head->pre = video_list;
      break;
    }
    video = video->next;
  }

  video = getfastvideo();
  if (video && video->type != VIDEO_TYPE_CIF) {
    video = video->next;
    while (video) {
      video = video->next;
    }
  } else {
    while (video && video->next) {
      if (video->type != VIDEO_TYPE_CIF)
        break;
      video = video->next;
    }
    if (video) {
      video = video->next;
    }
    while (video) {
      video = video->next;
    }
  }
  set_record_time_cb();
}

static void video_record_wait_flag(const bool* flag, const char* name) {
  struct timeval t0, t1;
  gettimeofday(&t0, NULL);

  while (*flag) {
    gettimeofday(&t1, NULL);
    if (t1.tv_sec - t0.tv_sec > 0) {
      printf("%s %s\n", __func__, name);
      t0 = t1;
    }
    pthread_yield();
  }
}

static int video_photo_init(struct Video *video) {
  if (video->type == VIDEO_TYPE_USB && video->usb_type == USB_TYPE_MJPEG)
    return 0;

  pthread_mutex_init(&video->photo.mutex, NULL);
  pthread_cond_init(&video->photo.condition, NULL);

  if (video_ion_alloc(&video->photo.rga_photo, video->width, video->height))
    return -1;

  if (vpu_nv12_encode_jpeg_init(&video->photo.encode, video->width, video->height))
    return -1;

  DEBUG_INFO("videoWH:%d,%d jpegWH:%d,%d\n", video->width, video->height, video->photo.encode.width, video->photo.encode.height);
  if ((video->photo.rga_fd = rk_rga_open()) <= 0)
    return -1;

  if (pthread_create(&video->photo.pid, &global_attr, video_rga_photo_pthread, video)) {
    printf("%s pthread create fail!\n", __func__);
    return -1;
  }

  return 0;
}

static void video_photo_exit(struct Video *video) {
  if (video->type == VIDEO_TYPE_USB && video->usb_type == USB_TYPE_MJPEG)
    return;

  vpu_nv12_encode_jpeg_done(&video->photo.encode);
  video_ion_free(&video->photo.rga_photo);

  if (video->photo.pid) {
    pthread_mutex_lock(&video->photo.mutex);
    video->photo.state = PHOTO_DISABLE;
    pthread_cond_signal(&video->photo.condition);
    pthread_mutex_unlock(&video->photo.mutex);
    pthread_join(video->photo.pid, NULL);
  }

  rk_rga_close(video->photo.rga_fd);

  pthread_mutex_destroy(&video->photo.mutex);
  pthread_cond_destroy(&video->photo.condition);
}

static void* video_record(void* arg) {
  struct Video* video = (struct Video*)arg;
  struct timeval t0;

  if (video->type == VIDEO_TYPE_ISP && with_mp && is_record_mode) {
    if (video_dpp_init(video)) {
      printf("test init failed!\n");
      goto record_exit;
    }
  }

  if (video->type == VIDEO_TYPE_ISP && with_adas && with_sp) {
    if (video_adas_init(video)) {
      printf("adas init failed!\n");
      goto record_exit;
    }
  }

  if (video_photo_init(video))
    goto record_exit;

  if (video->type == VIDEO_TYPE_ISP) {
    if (isp_video_path(video))
      goto record_exit;

    if (isp_video_start(video)) {
      printf("isp video start err!\n");
      goto record_exit;
    }
  } 
  else {
    goto record_exit;
  }

  printf("%s start\n", __func__);
  video->valid = true;
  video_record_wait(video);

record_exit:
  video_record_wait_flag(&video->mp4_encoding, "mp4_encoding");
  while(video->photo.state != PHOTO_END)
    pthread_yield();

  video_encode_exit(video);

  // MP_DSP release buffer is asynchronous, we need stop MP_DSP first,
  // and wait dpp release the buffer, the last release MP_DSP buffer.
  if (video->type == VIDEO_TYPE_ISP && with_mp && is_record_mode) {
    isp_video_mp_dsp_stop(video);
    video_dpp_exit(video);
    isp_video_mp_dsp_release(video);
  }

  if (video->type == VIDEO_TYPE_ISP)
    isp_video_deinit(video);

  if (video->type == VIDEO_TYPE_ISP && with_adas &&  with_sp)
    video_adas_exit(video);

  video_photo_exit(video);

  video_record_deletenode(video);
  video = NULL;

  // uevent call video_record_deletevideo() or other error occur, need to detach self
  pthread_rwlock_wrlock(&notelock);
  if (record_init_flag) {
    printf("pthread_detach self: %lu\n", pthread_self());
    pthread_detach(pthread_self());
    for (list<pthread_t>::iterator it = record_id_list.begin();
         it != record_id_list.end(); ++it) {
      if (*it == pthread_self()) {
        record_id_list.erase(it);
        break;
      }
    }
  }
  pthread_rwlock_unlock(&notelock);

  pthread_exit(NULL);
}

static int video_record_query_businfo(struct Video* video, int id) {
  int fd = 0;
  char dev[20] = {0};
  struct v4l2_capability cap;

  snprintf(dev, sizeof(dev), "/dev/video%d", id);
  fd = open(dev, O_RDWR);
  if (fd <= 0) {
    printf("open %s failed\n",dev);
    return -1;
  }

  if (ioctl(fd, VIDIOC_QUERYCAP, &cap)) {
    printf("%s VIDIOC_QUERYCAP failed!\n",dev);
    close(fd);
    return -1;
  }

  close(fd);

  memcpy(video->businfo, cap.bus_info, sizeof(video->businfo));
  printf("%s businfo:%s\n",dev,video->businfo);

  return 0;
}

// must be called among locking notelock
static inline bool video_record_isp_have_added(void) {
  struct Video* video = NULL;
  bool ret = false;

  video = getfastvideo();

  while (video) {
    if (video->type == VIDEO_TYPE_ISP) {
      ret = true;
      break;
    }
    video = video->next;
  }

  return ret;
}

static void start_record(struct Video* vnode) {
  EncodeHandler* handler = vnode->encode_handler;
  if (handler) {
    assert(!(vnode->encode_status & RECORDING_FLAG));
    if (!handler->get_audio_capture())
      global_audio_ehandler.AddPacketDispatcher(
          handler->get_h264aac_pkt_dispatcher());
    handler->send_record_mp4_start(global_audio_ehandler.GetEncoder());
    vnode->encode_status |= RECORDING_FLAG;
  }
}

static void stop_record(struct Video* vnode) {
  if (!vnode->valid)
    return;
  EncodeHandler* handler = vnode->encode_handler;
  if (handler && (vnode->encode_status & RECORDING_FLAG)) {
    handler->send_record_mp4_stop(NULL);
    if (!handler->get_audio_capture())
      global_audio_ehandler.RmPacketDispatcher(
          handler->get_h264aac_pkt_dispatcher());
    vnode->encode_status &= ~RECORDING_FLAG;
  }
}

extern "C" int video_record_addvideo(int id,
                                     struct ui_frame* front,
                                     char check_record_init) {
  struct Video* video;
  int width = 0, height = 0, fps = 0;
  pthread_attr_t attr;
  int ret = 0;

  pthread_rwlock_wrlock(&notelock);

  if (check_record_init && !record_init_flag) {
    ret = -1;
    goto addvideo_ret;
  }

  if (id < 0 || id >= MAX_VIDEO_DEVICE) {
    printf("video%d exit!\n", id);
    ret = 0;
    goto addvideo_ret;
  }

  video = (struct Video*)calloc(1, sizeof(struct Video));
  if (!video) {
    printf("no memory!\n");
    goto addvideo_exit;
  }

  pthread_mutex_init(&video->record_mutex, NULL);
  pthread_cond_init(&video->record_cond, NULL);

  video->photo.rga_photo.client = -1;
  video->photo.rga_photo.fd = -1;
  video->photo.encode.jpeg_enc_out.client = -1;
  video->photo.encode.jpeg_enc_out.fd = -1;
  video->pthread_run = 1;
  video->photo.state = PHOTO_END;

  if (video_record_query_businfo(video, id))
    goto addvideo_exit;

  if (strstr((char*)video->businfo, "isp")) {
    if (video_record_isp_have_added())
      goto addvideo_exit;
    else
      video->type = VIDEO_TYPE_ISP;
  } else {
    printf("no isp camera , businfo error!\n");
    goto addvideo_exit;
  }

  if (strstr((char*)video->businfo, FRONT)) {
    width = front->width;
    height = front->height;
    fps = front->fps;
    video->front = true;
    video->fps_n = 1;
    video->fps_d = fps;
  }

  video->hal = new hal();
  if (!video->hal) {
    printf("no memory!\n");
    goto addvideo_exit;
  }

  video->deviceid = id;
  video->save_en = 1;

  if (video->type == VIDEO_TYPE_ISP) {
    printf("video%d is isp\n", video->deviceid);
    if (isp_video_init(video, id, width, height, fps))
      goto addvideo_exit;
  } 
  else {
      goto addvideo_exit;
  }

  video->pre = 0;
  video->next = 0;

  video_record_addnode(video);

#ifdef USE_WATERMARK
  if (!watermark_config(video->width, video->height, &video->watermark))
    watermark_init(&video->watermark);
#endif

  if (pthread_attr_init(&attr)) {
    printf("pthread_attr_init failed!\n");
    goto addvideo_exit;
  }
  if (pthread_attr_setstacksize(&attr, STACKSIZE)) {
    printf("pthread_attr_setstacksize failed!\n");
    goto addvideo_exit;
  }

  if (video_encode_init(video)) {
  	DEBUG_INFO("not encode mode");
    goto addvideo_exit;
  }

#ifdef USE_WATERMARK
  if (video->encode_handler)
    video->encode_handler->watermark = &video->watermark;
#endif

  video_record_set_front_camera();

  if (pthread_create(&video->record_id, &attr, video_record, video)) {
    printf("%s pthread create err!\n", __func__);
    goto addvideo_exit;
  }

  record_id_list.push_back(video->record_id);

  if (pthread_attr_destroy(&attr))
    printf("pthread_attr_destroy failed!\n");

  ret = 0;
  goto addvideo_ret;

addvideo_exit:

  if (video) {
    video_encode_exit(video);

    if (video->hal) {
      if (video->hal->dev.get())
        video->hal->dev->deInitHw();

      delete video->hal;
      video->hal = NULL;
    }

    pthread_mutex_destroy(&video->record_mutex);
    pthread_cond_destroy(&video->record_cond);

    free(video);
    video = NULL;
  }

  printf("video%d exit!\n", id);
  ret = -1;

addvideo_ret:

  pthread_rwlock_unlock(&notelock);
  return ret;
}

extern "C" int video_record_deletevideo(int deviceid) {
  struct Video* video_cur;

  pthread_rwlock_rdlock(&notelock);
  video_cur = getfastvideo();
  while (video_cur) {
    if (video_cur->deviceid == deviceid) {
      stop_record(video_cur);
      video_record_signal(video_cur);
      break;
    }
    video_cur = video_cur->next;
  }
  pthread_rwlock_unlock(&notelock);

  return 0;
}

extern "C" void video_record_init_lock() {
  pthread_rwlock_init(&notelock, NULL);
}

extern "C" void video_record_destroy_lock() {
  pthread_rwlock_destroy(&notelock);
}

extern "C" void video_record_init(struct ui_frame* front,
                                  bool withMp,
                                  bool withSp) {
  int i;
  unsigned int temp;
  // system("echo -1000 > /proc/$(pidof video)/oom_score_adj");
  system("echo 0 > /proc/sys/vm/overcommit_memory");
  if (pthread_attr_init(&global_attr)) {
    printf("pthread_attr_init failed!\n");
    return;
  }
  if (pthread_attr_setstacksize(&global_attr, STACKSIZE)) {
    printf("pthread_attr_setstacksize failed!\n");
    return;
  }

  with_mp = withMp;
  with_sp = withSp;
  with_adas = ((parameter_get_video_ldw() == 1) ? true : false);

  memset(&g_test_cam_infos, 0, sizeof(g_test_cam_infos));
  CamHwItf::getCameraInfos(&g_test_cam_infos);

  for (i = 0; i < MAX_VIDEO_DEVICE; i++)
    video_record_addvideo(i, front, 0);

  pthread_rwlock_wrlock(&notelock);
  record_init_flag = true;
  pthread_rwlock_unlock(&notelock);
}

extern "C" void video_record_deinit(void) {
  struct Video* video_cur;
  int i = 0, j = 0;
  list<pthread_t> save_list;

  pthread_rwlock_wrlock(&notelock);
  if (!record_init_flag) {
    printf("video record have been deinit!\n");
    pthread_rwlock_unlock(&notelock);
    return ;
  }
  record_init_flag = false;
  video_cur = getfastvideo();
  while (video_cur) {
    video_record_signal(video_cur);
    video_cur = video_cur->next;
  }
  save_list.clear();
  for (list<pthread_t>::iterator it = record_id_list.begin();
       it != record_id_list.end(); ++it)
    save_list.push_back(*it);
  record_id_list.clear();
  pthread_rwlock_unlock(&notelock);

  for (list<pthread_t>::iterator it = save_list.begin();
       it != save_list.end(); ++it) {
    printf("pthread_join record id: %lu\n", *it);
    pthread_join(*it, NULL);
  }
  save_list.clear();

  if (pthread_attr_destroy(&global_attr))
    printf("pthread_attr_destroy failed!\n");
}

static inline bool encode_handler_is_ready(struct Video* node) {
  return node->valid && node->encode_handler;
}

void video_record_file_rename(char* oldpath,char* newpath)
{
	if(!oldpath || !newpath)
		return;
	char* pStar = strstr(oldpath,"_O.mp4");
	if(pStar)
	{
		memmove(newpath,oldpath,pStar-oldpath);
		strcat(newpath,"_A.mp4");
	}
}

extern "C" int video_record_startrec(void) {
  int ret = -1;
  pthread_rwlock_rdlock(&notelock);
  Video* video = getfastvideo();
  while (video) {
    EncodeHandler* ehandler = video->encode_handler;
    if (ehandler &&
        !(video->encode_status & RECORDING_FLAG)) {
      enablerec += 1;
      if (!ehandler->get_audio_capture())
        global_audio_ehandler.AddPacketDispatcher(
            ehandler->get_h264aac_pkt_dispatcher());
      ehandler->send_record_mp4_start(global_audio_ehandler.GetEncoder());
      video->encode_status |= RECORDING_FLAG;
      ret = 0;
    }
    video = video->next;
  }
  pthread_rwlock_unlock(&notelock);
  PRINTF("%s, enablerec: %d\n", __func__, enablerec);
  PRINTF_FUNC_OUT;
  return ret;
}
extern "C" int runapp(char* cmd);
extern "C" void video_record_stoprec(void) {
  pthread_rwlock_rdlock(&notelock);
  char file_name[128]={0};
  char re_name[128]={0};
  Video* video = getfastvideo();
  fprintf(stderr, "In %s, fastvideo: %p\n", __func__, video);
  while (video) {
    if (video->encode_status & RECORDING_FLAG) {
      EncodeHandler* ehandler = video->encode_handler;
      //if (encode_handler_is_ready(video)) {
        ehandler->send_record_mp4_stop();
      //}
      if (!ehandler->get_audio_capture())
        global_audio_ehandler.RmPacketDispatcher(
            ehandler->get_h264aac_pkt_dispatcher());
      video->encode_status &= ~RECORDING_FLAG;
      enablerec -= 1;
	  strcpy(file_name,ehandler->filename);
    }
    video = video->next;
  }
  pthread_rwlock_unlock(&notelock);
  usleep(250000);
  char cmd[] = "sync\0";
  runapp(cmd);
  usleep(100000);
  video_record_file_rename(file_name,re_name);
  rename(file_name,re_name);
  PRINTF("%s, enablerec: %d\n", __func__, enablerec);
  PRINTF_FUNC_OUT;
}

void video_record_stop_savecache() {
#ifdef CACHE_ENCODEDATA_IN_MEM
  pthread_rwlock_rdlock(&notelock);
  struct Video* video = getfastvideo();
  while (video) {
    if (video->encode_status & CACHE_ENCODE_DATA_FLAG)
      video->encode_handler->send_save_cache_stop();
    video = video->next;
  }
  pthread_rwlock_unlock(&notelock);
#endif
}

void video_record_start_ts_transfer(char* url) {
  PRINTF_FUNC;
  pthread_rwlock_rdlock(&notelock);
  Video* video = getfastvideo();
  if (video && url && video->ts_handler &&
      !(video->encode_status & WIFI_TRANSFER_FLAG)) {
    assert(url);
    if (!video->ts_handler->StartTransferStream(url, &global_attr)) {
      enablerec += 1;
      video->encode_status |= WIFI_TRANSFER_FLAG;
    }
  }
  pthread_rwlock_unlock(&notelock);
  PRINTF("%s, enablerec: %d\n", __func__, enablerec);
}

void video_record_stop_ts_transfer(char sync) {
  PRINTF_FUNC;
  pthread_rwlock_rdlock(&notelock);
  Video* video = getfastvideo();
  while (video) {
    // Alas! The fastvideo may be change between start/stop ts transfer...
    if (video->encode_status & WIFI_TRANSFER_FLAG) {
      if (video->ts_handler)
        video->ts_handler->StopTransferStream();
      enablerec -= 1;
      video->encode_status &= ~WIFI_TRANSFER_FLAG;
    }
    video = video->next;
  }
  pthread_rwlock_unlock(&notelock);
  PRINTF("%s, enablerec: %d\n", __func__, enablerec);
}

void video_record_start_cache(int sec) {
#ifdef CACHE_ENCODEDATA_IN_MEM
  pthread_rwlock_rdlock(&notelock);
  Video* video = getfastvideo();
  while (video) {
    EncodeHandler* ehandler = video->encode_handler;
    if (ehandler &&
        !(video->encode_status & CACHE_ENCODE_DATA_FLAG)) {
      if (!ehandler->get_audio_capture())
        global_audio_ehandler.AddPacketDispatcher(
            ehandler->get_h264aac_pkt_dispatcher());
      ehandler->send_cache_data_start(sec, global_audio_ehandler.GetEncoder());
      video->encode_status |= CACHE_ENCODE_DATA_FLAG;
    }
    video = video->next;
  }
  pthread_rwlock_unlock(&notelock);
#endif
}

void video_record_stop_cache() {
#ifdef CACHE_ENCODEDATA_IN_MEM
  pthread_rwlock_rdlock(&notelock);
  Video* video = getfastvideo();
  while (video) {
    if (video->encode_status & CACHE_ENCODE_DATA_FLAG) {
      EncodeHandler* ehandler = video->encode_handler;
      //if (encode_handler_is_ready(video))
        video->encode_handler->send_cache_data_stop();
      if (!ehandler->get_audio_capture())
        global_audio_ehandler.RmPacketDispatcher(
            ehandler->get_h264aac_pkt_dispatcher());
      video->encode_status &= ~CACHE_ENCODE_DATA_FLAG;
    }
    video = video->next;
  }
  pthread_rwlock_unlock(&notelock);
#endif
}

void video_record_reset_bitrate() {
  pthread_rwlock_rdlock(&notelock);
  Video* video = getfastvideo();
  while (video && encode_handler_is_ready(video)) {
    video->encode_handler->reset_video_bit_rate();
    video = video->next;
  }
  pthread_rwlock_unlock(&notelock);
}

extern "C" void video_record_setaudio(int flag) {
  pthread_rwlock_rdlock(&notelock);
  enableaudio = flag;
  global_audio_ehandler.SetMute(!flag);
  Video* video = getfastvideo();
  if (video && video->encode_handler) {
    video->encode_handler->set_audio_mute(enableaudio ? false : true);
  }
  pthread_rwlock_unlock(&notelock);
}

extern "C" int video_record_set_power_line_frequency(int i) {
  struct Video* video = NULL;

  // 1:50Hz,2:60Hz
  if (i != 1 && i != 2) {
    printf("%s parameter wrong\n", __func__);
    return -1;
  }

  pthread_rwlock_rdlock(&notelock);
  video = getfastvideo();

  while (video) {
    video_set_power_line_frequency(video, i);
    video = video->next;
  }

  pthread_rwlock_unlock(&notelock);

  return 0;
}

extern "C" int video_record_set_white_balance(int i) {
  struct Video* video = NULL;

  pthread_rwlock_rdlock(&notelock);
  video = getfastvideo();

  while (video) {
    video_set_white_balance(video, i);

    video = video->next;
  }

  pthread_rwlock_unlock(&notelock);

  return 0;
}

extern "C" int video_record_set_exposure_compensation(int i) {
  struct Video* video = NULL;

  pthread_rwlock_rdlock(&notelock);
  video = getfastvideo();

  while (video) {
    video_set_exposure_compensation(video, i);

    video = video->next;
  }

  pthread_rwlock_unlock(&notelock);

  return 0;
}

extern "C" int video_record_takephoto_stronger(int is_return,char* result) {
  struct Video* video_cur;
  int ret = -1;
  int i=0,is_end=0;

  pthread_rwlock_rdlock(&notelock);
  video_cur = getfastvideo();
  while (video_cur) {
    ret = 0;
    if (video_cur->photo.state == PHOTO_END) {
      video_cur->photo.state = PHOTO_ENABLE;
	  DEBUG_INFO("start get photo!");
		
    }
    video_cur = video_cur->next;
  }
  pthread_rwlock_unlock(&notelock);

  if(is_return)
  {
  	  if(!result)
	  	return -1;
	  while(i<50)
	  {
	  	is_end = 0;
		video_cur = getfastvideo();
		while (video_cur)
		{
		    if (video_cur->photo.state == PHOTO_END) 
			{
		    	//DEBUG_INFO("@@@@--%s",video_cur->photo.pic_name);
		    	memmove(result,video_cur->photo.pic_name,sizeof(video_cur->photo.pic_name));
				is_end = 1;
				break;
		    }
		    video_cur = video_cur->next;
		}

		if(is_end)
			break;
		usleep(20000);
		i++;
	  }

	  if(is_end)
	  	ret = 0;
	  else
	  	ret = -1;
  }
  return ret;
}


extern "C" int video_record_takephoto(void) {
  struct Video* video_cur;
  int ret = -1;

  pthread_rwlock_rdlock(&notelock);
  video_cur = getfastvideo();
  while (video_cur) {
    ret = 0;
    if (video_cur->photo.state == PHOTO_END) {
      video_cur->photo.state = PHOTO_ENABLE;
	  DEBUG_INFO("start get photo!");
		
    }
    video_cur = video_cur->next;
  }
  pthread_rwlock_unlock(&notelock);

  return ret;
}

extern "C" void video_record_set_record_mode(bool mode) {
  is_record_mode = mode;
  color_space = is_record_mode ? HAL_COLORSPACE_SMPTE170M : HAL_COLORSPACE_JPEG;
  DEBUG_INFO("is_record_mode = %d, color_space = %d\n", is_record_mode, color_space);
}

extern "C" void video_record_fps_count(void) {
  struct Video* video = NULL;

  pthread_rwlock_rdlock(&notelock);

  video = getfastvideo();
  while (video) {
    video->fps = video->fps_total - video->fps_last;
    video->fps_last = video->fps_total;

    video = video->next;
  }

  pthread_rwlock_unlock(&notelock);
}

extern "C" int video_record_get_list_num(void) {
  struct Video* video = NULL;
  int num = 0;

  pthread_rwlock_rdlock(&notelock);

  video = getfastvideo();
  while (video) {
    num++;

    video = video->next;
  }

  pthread_rwlock_unlock(&notelock);

  return num;
}

extern "C" void video_record_update_time(uint32_t* src, uint32_t srclen) {
  struct Video* video = NULL;

  pthread_rwlock_rdlock(&notelock);

  video = getfastvideo();
  while (video) {
    if (video->watermark.type & WATERMARK_TIME) {
      uint32_t index = video->watermark.buffer_index;
      uint32_t offset = video->watermark.osd_data_offset.time_data_offset;

      uint8_t *dst = (uint8_t*)video->watermark.watermark_data[index ^ 1].buffer
                        + offset;

      watermark_update_rect_bmp(src, srclen, video->watermark.coord_info.time_bmp,
                        dst, video->watermark.color_info);

      // The data update is complete, switch buffer.
      video->watermark.buffer_index = index ^ 1;
    }

    video = video->next;
  }

  pthread_rwlock_unlock(&notelock);
}

extern "C" void video_record_update_license(uint32_t* src, uint32_t srclen) {
  struct Video* video = NULL;

  pthread_rwlock_rdlock(&notelock);

  video = getfastvideo();
  while (video) {
    if (video->watermark.type & WATERMARK_LICENSE) {
      watermart_get_license_data(&video->watermark, src, srclen);
    }

    video = video->next;
  }

  pthread_rwlock_unlock(&notelock);
}

extern "C" void video_record_get_user_noise(void) {
  FILE* fp = NULL;
  char noise[10] = {0};

  fp = fopen("/mnt/sdcard/dsp_cfg_noise", "rb");
  if (fp) {
    fgets(noise, sizeof(noise), fp);
    user_noise = atoi(noise);
    printf("user_noise = %u\n", user_noise);
    fclose(fp);
  } else {
    printf("/mnt/sdcard/dsp_cfg_noise not exist!\n");
  }
}

extern "C" int REC_RegEventCallback(void (*call)(int cmd, void *msg0, void *msg1))
{
  rec_event_call = call;
  return 0;
}
