#include "stream_puller.h"

namespace stream {

CUStreamPuller::CUStreamPuller(BaseDecoderManger* decode_mnger, StreamPullerInfo* spinfo, LogInfo* log_info): 
    BasePuller(spinfo, log_info) {
  decode_mng = (CUDecoderManger*)decode_mnger;
  cur_fps = 1;
}

CUStreamPuller::~CUStreamPuller() {

}

void CUStreamPuller::release_ffmpeg() {
  avcodec_close(pVideoCodecCtx);
  avformat_close_input(&ifmt_ctx);
  // if (pFrame != nullptr) av_free(pFrame);

  ifmt_ctx = nullptr;
}

int CUStreamPuller::init_params() {
  int ret = 0;
  char errbuf[64];

	av_register_all();       // Register all codecs and formats so that they can be used.
	avformat_network_init(); // Initialization of network components

  av_dict_set(&optionsDict, "stimeout", "3000000", 0);           // if don't setting this property，av_read_frame will run as block mode (ms)
  av_dict_set(&optionsDict, "bufsize", "1024000", 0);            // buffer size
  av_dict_set(&optionsDict, "rtsp_transport", puller_ifo->transferType.c_str(), 0); // transfer type,udp will faster but may lost some packet,tcp slower but stable
  if ((ret = avformat_open_input(&ifmt_ctx, puller_ifo->streamAddress.c_str(), 0, &optionsDict)) < 0) { // Open the input file for reading.
    spdlog::get("logger")->info("Could not open input file '{}' (error '{}')\n", puller_ifo->streamAddress, av_make_error_string(errbuf, sizeof(errbuf), ret));
    return -103;
  }
  spdlog::get("logger")->info("avformat_open_input ok!\n");

  if ((ret = avformat_find_stream_info(ifmt_ctx, nullptr)) < 0) { // Get information on the input file (number of streams etc.).
    spdlog::get("logger")->info("Could not open find stream info (error '%s')\n", av_make_error_string(errbuf, sizeof(errbuf), ret));
    return -102;
  } 
  spdlog::get("logger")->info("avformat_find_stream_info ok!\n");

  // // dump information
  // for (unsigned int i = 0; i < ifmt_ctx->nb_streams; i++) { av_dump_format(ifmt_ctx, i, puller_ifo->streamAddress.c_str(), 0); }
  // printf("av_dump_format ok!\n");

  // find video stream index
  for (unsigned int i = 0; i < ifmt_ctx->nb_streams; i++) { 
    AVStream *st = ifmt_ctx->streams[i];
    if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) { audio_st_index = i; }
    else if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO) { video_st_index = i; }
  }

  if (-1 == video_st_index) {
    printf("No H.264 video stream in the input file\n");
    return -100;
  }

  // getting stream msg
  pVst = ifmt_ctx->streams[video_st_index];
  cur_fps = (double)pVst->r_frame_rate.num / (double)pVst->r_frame_rate.den;
  if (cur_fps < 1) {cur_fps=25;}
  pVideoCodecCtx = pVst->codec;
  if (pVideoCodecCtx->codec_id == AV_CODEC_ID_H264) { 
    puller_ifo->enType = AV_CODEC_ID_H264;
    pVideoCodec = avcodec_find_decoder_by_name("h264_cuvid");
  }
  else if (pVideoCodecCtx->codec_id == AV_CODEC_ID_H265) { 
    puller_ifo->enType  = AV_CODEC_ID_H265;
    pVideoCodec = avcodec_find_decoder_by_name("hevc_cuvid");
  }
  // 无需ffmpeg解码
  if (pVideoCodec == nullptr) {return -101;}
  puller_ifo->oriWidth = pVideoCodecCtx->width;
  puller_ifo->oriHeight = pVideoCodecCtx->height;
  // spdlog::get("logger")->info("decodeChn:{},oriWidth:{},oriHeight:{}\n", puller_ifo->vdecChn, puller_ifo->oriWidth, puller_ifo->oriHeight);

  // 对超过1920的图片进行resize, 节约内存
  // if (pull_mode == stream::STRATEGY_FIXTIME) {
  // if (puller_ifo->oriWidth > 1920 || puller_ifo->oriHeight > 1080) {
  //   float ratio = std::max(puller_ifo->oriWidth*1.0 / 1920, puller_ifo->oriHeight*1.0 / 1080);
  //   useWidth = puller_ifo->oriWidth / ratio;
  //   useHeight = puller_ifo->oriHeight / ratio;
  //   useWidth = useWidth / 16 * 16;
  //   useHeight = useHeight / 16 * 16;
  // } else {
    puller_ifo->useWidth = puller_ifo->oriWidth;
    puller_ifo->useHeight = puller_ifo->oriHeight;
  // }
  spdlog::get("logger")->info("decodeChn:{},oriWidth:{},oriHeight:{}, cur_fps:{}\n", puller_ifo->vdecChn, puller_ifo->oriWidth, puller_ifo->oriHeight, cur_fps);
  // }

  // 创建BGR图片buffer
  bgr_host_buffer.resize(puller_ifo->oriWidth * puller_ifo->oriHeight * 3);
  input_batch = 1;
  YUVFormat input_format = YUVFormat::NV12BlockLinear;
  PixelLayout output_layout = PixelLayout::NHWC_BGR;
  DataType output_dtype = DataType::Uint8;
  input_gpu = create_yuv_gpu_image(puller_ifo->oriWidth, puller_ifo->oriHeight, input_batch, input_format);
  output_gpu = create_rgb_gpu_image(puller_ifo->useWidth, puller_ifo->useHeight, input_batch, output_layout, output_dtype);

  if (decode_mng == nullptr || pull_mode != STRATEGY_FIXTIME) {
    pVideoCodecCtx = avcodec_alloc_context3(pVideoCodec);
    if (NULL == pVideoCodecCtx) {
      av_log(NULL, AV_LOG_FATAL, "avcodec_alloc_context3 failed\n");
      return -102;
    }
    // av_opt_set(pVideoCodecCtx->priv_data, "gpu", "0", 0);
    
    if (avcodec_open2(pVideoCodecCtx, pVideoCodec, nullptr) < 0) {return -103;}

    std::cout << "pVideoCodecCtx->pix_fmt:" << pVideoCodecCtx->pix_fmt << std::endl;

  }

  /*Begin decoder and display*/
  // getting stream height and width
  stop_flag = common::CameraOpened;
  return 0;
}

int CUStreamPuller::get_frame(stream::ImageBlob& buffer) {
  int ret = 0;
  queue_lock.lock();
  if (queue_data.empty()) { queue_lock.unlock(); return -1; }
  buffer = queue_data.front();
  queue_data.pop();
  queue_lock.unlock();
  return 0;
}

int CUStreamPuller::start_pull() {
  int ret = 0;

  AVPacket pkt;
  av_init_packet(&pkt);    // initialize packet.
  pkt.data = nullptr;
  pkt.size = 0;
	AVFrame *frame = av_frame_alloc();

  int size_y = puller_ifo->oriWidth * puller_ifo->oriHeight;
  int size_uv = size_y / 2;
  std::vector<char> out_buf(size_y+size_uv);
  YUVHostImage *yuvhost = new YUVHostImage();
  yuvhost->data = (unsigned char*)out_buf.data();
  yuvhost->width = puller_ifo->oriWidth;
  yuvhost->height = puller_ifo->oriHeight;
  yuvhost->format = YUVFormat::NV12BlockLinear;
  yuvhost->stride = puller_ifo->oriWidth;
  yuvhost->y_area = puller_ifo->oriWidth * puller_ifo->oriHeight;

  stream::ImageBlob buffer(stream::ImageBlobMode_BGR);
  DecoderData decode_data;
  decode_data.codec_id = (AVCodecID)puller_ifo->enType;
  decode_data.size_y = size_y;
  decode_data.size_uv = size_uv;

  /*End*/
  uint8_t *seqHeader = nullptr;
  int seqHeaderSize = 0;
  int firstPacket = 1;
  int eos = 0;
  int bufSize = 0;
  float vdec_fps = 0;
  int frame_cnt = 0;
  int vdec_cnt = 0;
  int seconds_cnt = 0;

  int pts_count = 0, fps_cnt = 0;
  int64_t start_time = 0, end_time = 0, cur_time = 0, decode_time = 0, prev_seconds_time = 0;
  prev_seconds_time = start_time = duration_cast<milliseconds>(system_clock::now().time_since_epoch()).count();
  int mmz_index = 0;
  int empty_error=0;
  int av_error=0;
  int av_send_error=0;
  int av_recv_error=0;
  stop_flag = common::CameraGrabing;
  while (stop_flag == common::CameraGrabing) {
    std::this_thread::sleep_for(milliseconds(3));
    cur_time = duration_cast<milliseconds>(system_clock::now().time_since_epoch()).count();

    ret = av_read_frame(ifmt_ctx, &pkt); // read frames
    if (ret < 0) {
      av_error++; 
      if (av_error%50==0) {spdlog::get("logger")->info("ERROR av_read_frame ret < 0, av_error:{}\n", av_error); }
      // 退出重新拉流
      if (av_error > 1000) { av_packet_unref(&pkt); pkt.size = 0; break; }
      continue; 
    } else {
      if (av_error > 0) { av_error--; }
    }
    if (pkt.stream_index != video_st_index) { goto free_packet; }

    // 执行策略 解码关键帧
    if (pull_mode == STRATEGY_FIXTIME && (pkt.flags & AV_PKT_FLAG_KEY) <= 0) { 
      goto free_packet; 
    }
    // 全速分析时，抽帧无意义，只能连续解码才能获得无花屏的图片
    // else if (pull_mode != STRATEGY_FIXTIME && int((cur_fps / pull_mode_value) * threshold_fps) > seconds_cnt  && pkt.flags != AV_PKT_FLAG_KEY) { 
    //   goto free_packet; 
    // }
    // threshold_fps++;

    if (decode_mng != nullptr && pull_mode == STRATEGY_FIXTIME) {
      int set_idx = 0;
      decode_data.pkt_id = puller_ifo->channelId + "." + std::to_string(cur_time);
      decode_data.pkt = &pkt;
      decode_data.yuv = &out_buf;
      decode_data.source_id = puller_ifo->vdecChn;
      decode_data.width = puller_ifo->useWidth;
      decode_data.height= puller_ifo->useHeight;
      // std::cout << "decode_data.pkt_id:" << decode_data.pkt_id << std::endl;

      decode_mng->set_data(set_idx, &decode_data);
      // std::cout << "decode_mng->set_data:" << set_idx << std::endl;
      if (set_idx < 0) {
        goto free_packet;
      }

      // 这里必须是阻塞模式 当前pkt在当前循环结束会被释放
      ret = decode_mng->get_data(set_idx);
      // std::cout << "decode_mng->get_data set_idx:" << set_idx << " ret:" << ret << std::endl;
      if (ret != 0) {
        goto free_packet;
      }

    }
    else {
      //解码packet
      ret = avcodec_send_packet(pVideoCodecCtx, &pkt);
      if (ret != AVERROR(EAGAIN) && ret != AVERROR_EOF && ret < 0) {
        av_send_error+=1;
        if (av_send_error > 100) {av_send_error=0; spdlog::get("logger")->info("ERROR. CUStreamPuller::start_pull_imp. avcodec_send_packet error.\n");}
        goto free_packet;
      }
      ret = avcodec_receive_frame(pVideoCodecCtx, frame);
      if (ret == AVERROR(EAGAIN)) {
        // std::cout << "CUStreamPuller::start_pull. avcodec_receive_frame. ret:" << ret << std::endl;

        av_recv_error+=1;
        if (av_recv_error > 50) {av_recv_error=0; spdlog::get("logger")->info("ERROR. CUStreamPuller::start_pull_imp. avcodec_receive_frame error. AVERROR(EAGAIN).\n");}
        goto free_frame;
      } else if (ret == AVERROR_EOF) {
        spdlog::get("logger")->info("ERROR. CUStreamPuller::start_pull_imp. avcodec_receive_frame error. AVERROR_EOF.\n");
        printf("\n");
        av_packet_unref(&pkt);
        break;
      } else if (ret < 0) {
        // std::cout << "CUStreamPuller::start_pull. avcodec_receive_frame. ret:" << ret << std::endl;
        av_recv_error+=1;
        if (av_recv_error > 50) {av_recv_error=0; spdlog::get("logger")->info("ERROR. CUStreamPuller::start_pull_imp. avcodec_receive_frame error. ret < 0.\n");}
        goto free_frame;
      }

      std::copy(frame->data[0], frame->data[0] + size_y, out_buf.begin());
      std::copy(frame->data[1], frame->data[1] + size_uv, out_buf.begin()+size_y);
    }

    yuv2rgb_cuda(puller_ifo->oriWidth, puller_ifo->oriHeight, yuvhost, puller_ifo->useWidth, puller_ifo->useHeight, buffer.img);
    // nv122bgr(out_buf, cv::Size(puller_ifo->oriWidth, puller_ifo->oriHeight), buffer.img);
      // std::cout << "buffer.img.empty():" << buffer.img.empty() << std::endl;

    if (buffer.img.empty()) {
      empty_error++; 
      if (empty_error % 50 == 0) {spdlog::get("logger")->info("ERROR buffer.img.empty. empty_error:{}\n", empty_error); }
      // 退出重新拉流
      if (empty_error > 1000) { av_frame_unref(frame); av_packet_unref(&pkt); pkt.size = 0; break; }
    }

    queue_lock.lock();
    if (queue_data.size() >= buffer_cnt) {
      queue_data.pop();
      // printf("avcodec_receive_frame pop. max_size:3.\n");
    }
    queue_data.push(buffer);
    queue_lock.unlock();
    decode_time = cur_time;
    vdec_cnt++;

free_frame:
    if (frame) {av_frame_unref(frame);}
free_packet:
    if (pkt.data) {av_packet_unref(&pkt);}


    // 输出fps
    end_time = duration_cast<milliseconds>(system_clock::now().time_since_epoch()).count();
    if (end_time - start_time > 5 * 1e3) {
      vdec_fps = 1.0 / ((end_time - start_time) / (vdec_cnt * 1e3));
      printf("vdec_fps c:%s : %f, vdec_cnt:%d\n", puller_ifo->channelId.c_str(), vdec_fps, vdec_cnt);
      spdlog::get("logger")->info("CUStreamPuller::start_pull. vdec_fps c:{} : {}, vdec_cnt:{}", puller_ifo->channelId, vdec_fps, vdec_cnt);
      vdec_cnt = 0;
      start_time = end_time;
    }

  }

  // 注销
  if (decode_mng != nullptr) {
    decode_mng->release_data(puller_ifo->vdecChn);
  }
  // std::cout << "decode_mng->release_data:" << puller_ifo->vdecChn << std::endl;

  delete yuvhost;
  free_rgb_gpu_image(output_gpu);
  free_yuv_gpu_image(input_gpu);
  // free
  while (!queue_data.empty()) { queue_data.pop(); }
  release_ffmpeg();
  stop_flag = common::CameraClosed;

  return 0;

}


int CUStreamPuller::yuv2rgb_cuda(int input_width, int input_height, YUVHostImage *yuvhost, int output_width, int output_height, cv::Mat& img){
  Interpolation interp = Interpolation::Nearest;
  FillColor color;
  // memset(&color, 0, sizeof(color));
  color.color[0] = 0;
  color.color[1] = 255;
  color.color[2] = 128;

  cudaStream_t stream = nullptr;
  // cudaEvent_t start, end;
  cudaStreamCreateWithFlags(&stream, cudaStreamNonBlocking); //checkRuntime();

  // float gpu_time;
  for (int ibatch = 0; ibatch < input_batch; ++ibatch)
      copy_yuv_host_to_gpu(yuvhost, input_gpu, ibatch, input_width, input_height, stream);

  batched_convert_yuv_to_rgb(input_gpu, output_gpu, output_gpu->width, output_gpu->height, 0, 0, color, 0, 0, 0, 1, 1, 1, interp, stream);

  size_t buffer_size = output_gpu->width * output_gpu->height * output_gpu->channel * output_gpu->batch;
  if (bgr_host_buffer.size() != buffer_size) {
    bgr_host_buffer.resize(buffer_size);
  }
  size_t sizeof_element = dtype_sizeof(output_gpu->dtype);
  cudaMemcpyAsync(bgr_host_buffer.data(), output_gpu->data, buffer_size * sizeof_element, ::cudaMemcpyDeviceToHost, stream); // checkRuntime();
  cudaStreamSynchronize(stream); //checkRuntime();
  // fout.write((char*)phost, num_element * sizeof_element);
  img = cv::Mat(output_gpu->height,output_gpu->width,CV_8UC3,bgr_host_buffer.data());

  // checkRuntime(cudaStreamDestroy(stream));
  cudaStreamDestroy(stream);
  return 0;
}


int CUStreamPuller::stop_pull() {
  int try_cnt = 200;
  if (stop_flag == common::CameraGrabing && try_cnt-- > 0) {
    stop_flag = common::CameraOpened;
    while (stop_flag != common::CameraClosed) {
      spdlog::get("logger")->info("CUStreamPuller::stop_pull stop_flag: {} ", stop_flag);
      std::this_thread::sleep_for(milliseconds(50));
    }
    if (try_cnt < 0) {exit(0);}
  }
  delete optionsDict;
  return 0;
}


}