#include "stream_pusher.h"

namespace stream {

CUStreamPusher::CUStreamPusher(StreamPusherInfo* pusher_ifo, LogInfo* log_info): 
  BasePusher(pusher_ifo, log_info){
    queue_data_pop_cnt = 0;
}

CUStreamPusher::~CUStreamPusher() {

}


void CUStreamPusher::release_ffmpeg() {
  // TODO:详细研究释放规则
  // avformat_close_input(&ofmt_ctx);
  // avcodec_close(out_codec_ctx);
    // avcodec_free_context(out_codec_ctx);
  // avformat_free_context(ofmt_ctx);

  //TODO:cuda解码释放功能有问题 要么崩溃要么内存泄露
  if (ofmt_ctx->pb) {

    av_write_trailer(ofmt_ctx);
    av_freep(&ofmt_ctx->pb->buffer);
    avio_context_free(&ofmt_ctx->pb);
    // avio_close(ofmt_ctx->pb);
  }
  // ofmt_ctx->oformat->deinit(ofmt_ctx);
  av_dict_free(&ofmt_ctx->metadata);
  av_freep(&ofmt_ctx->streams);
  // av_free(ofmt_ctx);
  // avformat_close_input(&ofmt_ctx);

  // avio_context_free(&ofmt_ctx->pb);
  // avformat_free_context(ofmt_ctx);
  // avcodec_free_context(&out_codec_ctx);
  // avcodec_close(out_codec_ctx);
  if (out_codec_ctx) {
    avcodec_free_context(&out_codec_ctx);
  }

  ofmt_ctx = nullptr;
}


int CUStreamPusher::init_params() {
  int ret = 0;
  char errbuf[64];

  av_register_all();       // Register all codecs and formats so that they can be used.
  ret = avformat_network_init();
  if ((ret = avformat_alloc_output_context2(&ofmt_ctx, nullptr, pusher_ifo->formatName.c_str(), nullptr)) < 0) {
    spdlog::get("logger")->info("ERROR. CUStreamPusher::init_params. Could not allocate output format context!");
    return -103;
  }

  if (!(ofmt_ctx->oformat->flags & AVFMT_NOFILE)) {
    if ((ret = avio_open2(&ofmt_ctx->pb, pusher_ifo->streamAddress.c_str(), AVIO_FLAG_WRITE, nullptr, nullptr)) < 0) {
    spdlog::get("logger")->info("ERROR. CUStreamPusher::init_params. Could not open output IO context!");
    return -102;
    }
  }

  AVCodecID encoder_type = AV_CODEC_ID_NONE;
  if (pusher_ifo->enType == AV_CODEC_ID_H264) { 
    out_codec = avcodec_find_encoder_by_name("h264_nvenc");
  }
  else if (pusher_ifo->enType == AV_CODEC_ID_H265) {
    out_codec = avcodec_find_encoder_by_name("h265_nvenc");
  }
  out_stream = avformat_new_stream(ofmt_ctx, out_codec);
  out_codec_ctx = avcodec_alloc_context3(out_codec);

  out_codec_ctx->codec_tag = 0;
  out_codec_ctx->codec_id = out_codec->id;
  out_codec_ctx->codec_type = AVMEDIA_TYPE_VIDEO;
  out_codec_ctx->width = pusher_ifo->dstWidth;
  out_codec_ctx->height = pusher_ifo->dstHeight;
  out_codec_ctx->gop_size = pusher_ifo->fps;
  out_codec_ctx->pix_fmt = AV_PIX_FMT_NV12;
  out_codec_ctx->framerate = {pusher_ifo->fps, 1};
  out_codec_ctx->time_base = {1, pusher_ifo->fps};
  out_codec_ctx->bit_rate = pusher_ifo->bitrate * 1000;
  if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) {
    out_codec_ctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
  }

  out_codec_ctx->thread_count = 8;
  out_codec_ctx->max_b_frames = 0;  //编码器不使用Ｂ帧，提高直播解码效率


  if ((ret = avcodec_parameters_from_context(out_stream->codecpar, out_codec_ctx)) < 0) {
    spdlog::get("logger")->info("ERROR. CUStreamPusher::init_params. Could not initialize stream codec parameters!");
    return -100;
  }

  
  AVDictionary *codec_options = nullptr;
  av_dict_set(&codec_options, "profile", pusher_ifo->codecProfile.c_str(), 0);
  // av_dict_set(&codec_options, "preset", "superfast", 0);
  av_dict_set(&codec_options, "tune", "zerolatency", 0);
  // //无需打开编码器
  // open video encoder
  if ((ret = avcodec_open2(out_codec_ctx, out_codec, &codec_options)) < 0) {
    spdlog::get("logger")->info("ERROR. CUStreamPusher::init_params. Could not open video encoder!");
    return -104;
  }
  av_dict_free(&codec_options);


  out_stream->codecpar->extradata = out_codec_ctx->extradata;
  out_stream->codecpar->extradata_size = out_codec_ctx->extradata_size;

  out_stream->codecpar->codec_tag = 0;
  av_dump_format(ofmt_ctx, 0, pusher_ifo->streamAddress.c_str(), 1);

  if ((ret = avformat_write_header(ofmt_ctx, nullptr)) < 0) {
    spdlog::get("logger")->info("ERROR. CUStreamPusher::init_params. Could not write header!");
    return -105;
  }
  
  stop_flag = common::CameraOpened;
  return 0;
}


SwsContext* CUStreamPusher::initialize_sample_scaler() {
  SwsContext *swsctx = nullptr;
  swsctx = sws_getContext(pusher_ifo->dstWidth, pusher_ifo->dstHeight, AV_PIX_FMT_BGR24, pusher_ifo->dstWidth, pusher_ifo->dstHeight, out_codec_ctx->pix_fmt, SWS_BICUBIC, nullptr, nullptr, nullptr);
  if (!swsctx) {
    std::cout << "Could not initialize sample scaler!" << std::endl;
  }
  return swsctx;
}

AVFrame* CUStreamPusher::allocate_frame_buffer() {
  AVFrame *frame = av_frame_alloc();
  int i = av_image_get_buffer_size(out_codec_ctx->pix_fmt, pusher_ifo->dstWidth, pusher_ifo->dstHeight, 1);
  uint8_t *framebuf = new uint8_t[i];

  av_image_fill_arrays(frame->data, frame->linesize, framebuf, out_codec_ctx->pix_fmt, pusher_ifo->dstWidth, pusher_ifo->dstHeight, 1);
  frame->width = pusher_ifo->dstWidth;
  frame->height = pusher_ifo->dstHeight;
  frame->format = static_cast<int>(out_codec_ctx->pix_fmt);

  return frame;
}


int CUStreamPusher::add_frame(stream::ImageBlob& buffer) {
  int ret = 0;
  queue_lock.lock();
  if (queue_data.size() > 5) { 
    queue_data.pop(); 
    queue_data_pop_cnt +=1;
    if (queue_data_pop_cnt >= 50) {
      printf("CUStreamPusher %d pop %d. cur size:%d\n", pusher_ifo->vEncChn, queue_data_pop_cnt, queue_data.size());
      queue_data_pop_cnt = 0;
    }
    
  }
  queue_data.push(buffer);
  queue_lock.unlock();
  return 0;
}


int CUStreamPusher::start_push() {
  int ret = 0;

  auto *frame = allocate_frame_buffer();
  auto *swsctx = initialize_sample_scaler();

  int frame_cnt = 0;
  int64_t pts = 0;
  float push_fps;
  int pts_count = 0, fps_cnt = 0;
  int64_t start_time = 0, end_time = 0;
  int64_t prev_time, cur_time;
  prev_time = cur_time = duration_cast<milliseconds>(system_clock::now().time_since_epoch()).count();;

  int step_time = 1000 / pusher_ifo->fps;
  int send_ok = 0;
  // AVBitStreamFilterContext h264bsfc = av_bsf_init();
  stream::ImageBlob buffer(stream::ImageBlobMode_BGR);
  stop_flag = common::CameraGrabing;
  // std::vector<int> speed_times;
  while (stop_flag == common::CameraGrabing) {
    cur_time = duration_cast<milliseconds>(system_clock::now().time_since_epoch()).count();
    if (fps_cnt == 0) { start_time = cur_time; }

    // 控制发送速度
    if (send_ok >= 0) {
      int leave_time = step_time - (cur_time - prev_time);
      if (leave_time > 0) std::this_thread::sleep_for(std::chrono::milliseconds(leave_time));
      prev_time = cur_time + leave_time;
    } else {
      std::this_thread::sleep_for(std::chrono::milliseconds(2));
    }

    int get_new_frame = 0;
    queue_lock.lock();
    if (!queue_data.empty()) {
      buffer = queue_data.front();
      get_new_frame = 1;
      queue_data.pop();
    }
    queue_lock.unlock();


    send_ok = -1;
    if (get_new_frame == 1 && !buffer.img.empty()) {
      // int64_t time_1 = duration_cast<milliseconds>(system_clock::now().time_since_epoch()).count();
      cv::Mat bgr_img;
      if (buffer.img.channels() == 1) { cv::cvtColor(buffer.img, bgr_img, cv::COLOR_GRAY2BGR);} 
      else { bgr_img = buffer.img; }
      cv::resize(bgr_img, bgr_img, cv::Size(pusher_ifo->dstWidth, pusher_ifo->dstHeight));
      const int stride[] = {static_cast<int>(bgr_img.step[0])};
      sws_scale(swsctx, &bgr_img.data, stride, 0, bgr_img.rows, frame->data, frame->linesize);
      frame->pts += av_rescale_q(1, out_codec_ctx->time_base, out_stream->time_base);
      send_ok = write_frame(frame);
      // int64_t time_2 = duration_cast<milliseconds>(system_clock::now().time_since_epoch()).count();
      // speed_times.push_back(time_2 - time_1);
      fps_cnt++;
    }

        
    // 输出fps
    end_time = duration_cast<milliseconds>(system_clock::now().time_since_epoch()).count();
    if (end_time - start_time > 5 * 1e3) {
      push_fps = 1.0 / ((end_time - start_time) / (fps_cnt * 1e3));
      // spdlog::get("logger")->info("pull_fps: {}", pull_fps);
      printf("push_fps %d: %f\n", pusher_ifo->vEncChn, push_fps);

      // std::cout << "speed_time " << pusher_ifo->vEncChn << ":";
      // for (auto& speed_time : speed_times) {std::cout << speed_time << ", ";}
      // std::cout << std::endl;

      fps_cnt = 0;
    }
  }

    av_frame_free(&frame);

  release_ffmpeg();
  stop_flag = common::CameraClosed;
  return 0;

}


int CUStreamPusher::write_frame(AVFrame *frame) {
  AVPacket* pkt = av_packet_alloc();;
  av_new_packet(pkt, 0);

  int ret = 0;
  if ((ret = avcodec_send_frame(out_codec_ctx, frame)) < 0) {
    spdlog::get("logger")->info("ERROR. CUStreamPusher::write_frame. sending frame to codec context!");
    goto free;
  }

  if ((ret = avcodec_receive_packet(out_codec_ctx, pkt)) < 0) {
    spdlog::get("logger")->info("ERROR. CUStreamPusher::write_frame. receiving packet from codec context!");
    goto free;
  }

  av_interleaved_write_frame(ofmt_ctx, pkt);
free:
  av_packet_unref(pkt);
  av_packet_free(&pkt);
  return 0;
}


int CUStreamPusher::stop_push() {
  if (stop_flag == common::CameraGrabing) {
    stop_flag = common::CameraOpened;
    while (stop_flag != common::CameraClosed) {
      spdlog::get("logger")->info("CUStreamPusher::stop_push stop_flag: {} ", stop_flag);
      std::this_thread::sleep_for(milliseconds(50));
    }
  }

  return 0;
}


}