/**
 * @file: main.cpp
 * @author: Lynxi SDK Team
 * @brief
 * @version: 1.0
 * @date 2022-10-25
 *
 * Copyright:
 * © 2018 北京灵汐科技有限公司 版权所有。
 * 注意：以下内容均为北京灵汐科技有限公司原创，未经本公司允许，不得转载，否则将视为侵权；对于不遵守此声明或者其他违法使用以下内容者，本公司依法保留追究权。
 * © 2018 Lynxi Technologies Co., Ltd. All rights reserved.
 * NOTICE: All information contained here is, and remains the property of Lynxi.
 * This file can not be copied or distributed without the permission of Lynxi
 * Technologies Co., Ltd.
 */

extern "C"
{
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/avutil.h>
#include <libavutil/hwcontext.h>
#include <libavutil/imgutils.h>
#include <libavutil/samplefmt.h>
#include <libavutil/timestamp.h>
#include <libswscale/swscale.h>
}

#include <arpa/inet.h>
#include <lyn_api.h>
#include <unistd.h>
#include <atomic>
#include <fstream>
#include <iostream>
#include "blockQueue.hpp"
#include "bufferPool.hpp"
#include "imageEncoder.h"
#include "util.hpp"
#include "CLI/CLI11.hpp" // 引入 CLI11 头文件

#define TESTMODE 0
#define LYNDEMUX 0

std::atomic_int g_totalSendNum(0);
std::atomic_int g_totalRecvNum(0);
std::atomic_int g_runningDecoders(0);
std::chrono::_V2::system_clock::time_point g_start;
std::chrono::_V2::system_clock::time_point g_end;

struct DecoderAttr
{
  std::string url;
  lynPixelFormat_t outFmt = LYN_PIX_FMT_NV12;
  lynScale_t scale = SCALE_NONE;
  std::string outputPath;
  int outnumMin = 1;
  int outnumMax = 10;
  int chanId = 0;
  uint32_t repeatNum = 1;
  uint32_t deviceId = 0;
  std::string h264name;
};

struct RecvCbData
{
  lynFrame_t frame;
  std::shared_ptr<BufferPool> framePool;
  int recvNum = 0;
  DecoderAttr *attr;
};

enum
{
  NAL_SLICE = 1,
  NAL_DPA = 2,
  NAL_DPB = 3,
  NAL_DPC = 4,
  NAL_IDR_SLICE = 5,
  NAL_SEI = 6,
  NAL_SPS = 7,
  NAL_PPS = 8,
};
const unsigned char NAL_HEADER[4] = {0x00, 0x00, 0x00, 0x01};

class VideoDecoder
{
public:
  void Run(DecoderAttr attr);
  ~VideoDecoder();
  inline void set_interrupt_time(int64_t t) { interrupt_time = t; }
  inline int64_t get_interrupt_time() const { return interrupt_time; }
  inline void set_interrupt_flag(bool flag) { is_interrupt = flag; }
  inline bool get_interrupt_flag() const { return is_interrupt; }

private:
  void RecvThreadFunc();
  int open(int transport);
  int testSend(int &sendNum);
  void close();
  bool check_sps_pps(AVPacket &pkt);

  DecoderAttr m_attr;
  lynContext_t m_ctx = nullptr;
  std::thread m_recvThread;
  lynDemuxHandle_t m_demuxHdl = nullptr;
  lynCodecPara_t m_codecPara;
  lynVdecHandle_t m_vdecHdl = nullptr;
  lynVdecAttr_t m_vdecAttr;
  lynVdecOutInfo_t m_vdecOutInfo;
  BlockQueue<int> m_sendQueue;
  int m_sendNum = 0;

  int64_t interrupt_time{0}; // 阻塞时长，单位s
  bool is_interrupt{false};  // 是否阻塞标识
  AVFrame *frame = nullptr;
  AVDictionary *options = nullptr;
  AVFormatContext *fmt_ctx = nullptr;
  int video_stream_idx = -1;
  std::string m_url;
  std::string m_codec;
  int m_width;
  int m_height;
  AVStream *stream = nullptr;
  AVBSFContext *m_av_bsf_context =
      nullptr; // 通过比特流过滤，可以实现各种功能，如添加头信息、转换编码格式、修复错误数据等，从而提高音视频的可处理性和兼容性。
  FILE *h264_fd = NULL;
  std::string h264_filename;
};

namespace
{
  int MAX_INTERRUPT_TIME = 10; // 10s

  inline int64_t get_timestamp_sec()
  {
    return std::chrono::time_point_cast<std::chrono::seconds>(
               std::chrono::system_clock::now())
        .time_since_epoch()
        .count();
  }

  // av_read_frame阻塞10s则退出阻塞状态
  int interrupt_callback(void *ctx)
  {
    VideoDecoder *ff = (VideoDecoder *)ctx;
    if (NULL == ff)
      return 0;

    int64_t interrupt_time = ff->get_interrupt_time();
    if (interrupt_time == 0)
    {
      return 0;
    }

    int64_t interrupt_sec = get_timestamp_sec() - interrupt_time;
    if (interrupt_sec >= MAX_INTERRUPT_TIME)
    {
      ff->set_interrupt_flag(true);
      printf("interrupt times more than 10s, quit!");
      return 1;
    }

    return 0;
  }
} // namespace

bool VideoDecoder::check_sps_pps(AVPacket &pkt)
{
  bool need_sps_pps = false;
  if (stream->codecpar->codec_id == AV_CODEC_ID_H264)
  {
    if (!((pkt.data[0] == NAL_HEADER[0]) && (pkt.data[1] == NAL_HEADER[1]) &&
          (pkt.data[2] == NAL_HEADER[2]) && (pkt.data[3] == NAL_HEADER[3])))
    {
      // AVC1
      int len = 0;
      uint8_t *p = pkt.data;
      do
      { // add start_code for each NAL, one frame may have multi NALs.
        len = ntohl(*((long *)p));
        memcpy(p, NAL_HEADER, 4);

        p += 4;
        p += len;
        if (p >= pkt.data + pkt.size)
        {
          break;
        }
      } while (1);
    }
  }

  // I-Frame SPS PPS checking
  if (pkt.flags & AV_PKT_FLAG_KEY)
  {
    // 仅仅关键帧需要SPS PPS
    if (stream->codecpar->codec_id == AV_CODEC_ID_H264)
    {
      if ((pkt.data[0] == NAL_HEADER[0]) && (pkt.data[1] == NAL_HEADER[1]) &&
          (pkt.data[2] == NAL_HEADER[2]) && (pkt.data[3] == NAL_HEADER[3]))
      {
        if ((pkt.data[4] & 0x1F) != NAL_SPS &&
            (pkt.data[4] & 0x1F) != NAL_PPS)
        {
          need_sps_pps = true;
        }
      }
    }
    else if (stream->codecpar->codec_id == AV_CODEC_ID_H265)
    {
      // TODO H265 support
    }
  }

  return need_sps_pps;
}

void VideoDecoder::close()
{
  printf("RtspPlay::close start.");
  if (m_vdecHdl != nullptr)
  {
    // m_recvThread.join();

    if (LYNDEMUX)
    {
      CHECK_ERR(lynDemuxClose(m_demuxHdl));
    }
    CHECK_ERR(lynVdecClose(m_vdecHdl));
  }

  if (frame)
  {
    av_frame_free(&frame);
    frame = nullptr;
  }

  if (fmt_ctx)
  {
    avformat_close_input(&fmt_ctx);
    avformat_free_context(fmt_ctx);
    fmt_ctx = nullptr;
  }

  if (options)
  {
    av_dict_free(&options);
    options = nullptr;
  }

  avformat_network_deinit();

  printf("RtspPlay::close end.");
}

int VideoDecoder::open(int transport)
{
  // close();

  if (!h264_filename.empty())
  {
    h264_fd = fopen(h264_filename.c_str(), "wb");
    if (!h264_fd)
    {
      printf("fopen %s failed\n", h264_filename.c_str());
      return -1;
    }
  }

  CHECK_ERR(lynSetCurrentContext(m_ctx));
  lynVdecAttr_t m_vdecAttr;
  // 4 打开解码器
  m_vdecAttr.codecId = LYN_CODEC_ID_H264;
  m_vdecAttr.outputFmt = LYN_PIX_FMT_NV12;
  m_vdecAttr.scale = SCALE_NONE;
  m_vdecHdl = nullptr;
  CHECK_ERR(lynVdecOpen(&m_vdecHdl, &m_vdecAttr));

  av_register_all();
  if (avformat_network_init() != 0)
  {
    std::cout << "Failed in avformat_network_init" << std::endl;
    return -1;
  }

  // av_dict_set(&options, "stimeout", "5000000", 0);
  // av_dict_set(&options, "buffer_size", "8192000", 0);
  // av_dict_set(&options, "rtbufsize", "8192000", 0);
  // av_dict_set(&options, "max_delay", "500000", 0);
  // av_dict_set(&options, "threads", "auto", 0);

  // if (m_url.compare(0, 4, "rtsp") == 0) {
  //   if (transport == 0)  // udp
  //   {
  //     std::cout << "set transport is udp" << std::endl;
  //     av_dict_set(&options, "rtsp_transport", "udp", 0);
  //   } else {
  //     std::cout << "set transport is tcp" << std::endl;
  //     av_dict_set(&options, "rtsp_transport", "tcp", 0);
  //   }
  // } else if (m_url.compare(0, 4, "http") == 0) {
  //   av_dict_set(&options, "timeout", "20000000", 0);
  //   av_dict_set(&options, "max_reload", "5", 0);
  // }

  set_interrupt_time(get_timestamp_sec());
  int net = avformat_open_input(&fmt_ctx, m_url.c_str(), nullptr, nullptr);
  set_interrupt_time(0);

  if (net < 0)
  {
    // 打开流失败
    close();
    printf("error, read_frames error, couldn't open input stream, rtsp=%s",
           m_url.c_str());
    return -1;
  }

  // fmt_ctx->interrupt_callback.callback = interrupt_callback;
  // fmt_ctx->interrupt_callback.opaque = this;

  if (avformat_find_stream_info(fmt_ctx, nullptr) < 0)
  {
    // 找不到流信息
    close();
    printf("error, read_frames error, couldn't find stream info, rtsp=%s",
           m_url.c_str());
    return -1;
  }

  // Find the first stream
  for (size_t i = 0; i < fmt_ctx->nb_streams; i++)
  {
    if (fmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
    {
      video_stream_idx = i;
    }
  }

  if (video_stream_idx == -1)
  {
    printf("Failed to find video stream");
    return -1;
  }

  stream = fmt_ctx->streams[video_stream_idx];
  AVCodecParameters *p_codec_ctx_orig =
      fmt_ctx->streams[video_stream_idx]->codecpar;
  std::transform(m_codec.begin(), m_codec.end(), m_codec.begin(),
                 ::tolower); // not use std::tolower please!

  if (p_codec_ctx_orig->codec_id == AV_CODEC_ID_H264)
  {
    printf("format is h264 \n");
    m_codec = "h264";
  }
  else if (p_codec_ctx_orig->codec_id == AV_CODEC_ID_HEVC)
  {
    printf("format is h265 \n ");
    m_codec = "hevc";
  }
  else
  {
    printf("format is not h264, h265. \n ");
    return -1;
  }

  m_width = p_codec_ctx_orig->width;
  m_height = p_codec_ctx_orig->height;

  printf("width is %d, height is %d", m_width, m_height);

  if (p_codec_ctx_orig->codec_id == AV_CODEC_ID_H264 ||
      p_codec_ctx_orig->codec_id == AV_CODEC_ID_HEVC)
  {
    AVBitStreamFilter *av_bitstream_filter = nullptr;
    if (p_codec_ctx_orig->codec_id == AV_CODEC_ID_H264)
    {
      av_bitstream_filter = const_cast<AVBitStreamFilter *>(
          av_bsf_get_by_name("h264_mp4toannexb"));
    }
    else
    {
      av_bitstream_filter = const_cast<AVBitStreamFilter *>(
          av_bsf_get_by_name("hevc_mp4toannexb"));
    }

    av_bsf_alloc(av_bitstream_filter, &m_av_bsf_context);
    avcodec_parameters_copy(m_av_bsf_context->par_in, p_codec_ctx_orig);
    av_bsf_init(m_av_bsf_context);
  }

  printf("URL %s open success. Resolution is %d x %d", m_url.c_str(), m_width,
         m_height);

  av_dict_free(&options);
  return 0;
}

/**
 * @brief 解码器运行函数
 *
 * @param  attr 运行参数
 */
void VideoDecoder::Run(DecoderAttr attr)
{
  m_attr = attr;
  // 1 创建上下文环境
  CHECK_ERR(lynCreateContext(&m_ctx, attr.deviceId));
  CHECK_ERR(lynRegisterErrorHandler(StreamErrorHandler, nullptr));

  h264_filename = "";
  m_url = m_attr.url;
  h264_filename = m_attr.h264name;
  open(0);

  // std::cout << "codecId is : " << m_vdecAttr.codecId << std::endl;

  // 7 创建接收线程
  m_recvThread = std::thread(&VideoDecoder::RecvThreadFunc, this);

  bool sending = true;
  int testcount = 0;
  int sendNum = 0;

  while (sending)
  {
    testcount++;
    int getsendI = testSend(sendNum);
    if (testcount >= 100)
    {
      std::cout << "testcount >=100" << std::endl;
      break;
    }

    if (getsendI < 0)
    {
      std::cout << "getsendI=" << getsendI << " break sending; " << std::endl;
      break;
    }
  }
}

int VideoDecoder::testSend(int &sendNum)
{
  unsigned int actual_size = 0;
  int modeFlag = 1;
  // static int sendNum = 0;

  // 6.1 设置上下文环境
  CHECK_ERR(lynSetCurrentContext(m_ctx));

  // 6.2 创建解码器发送指令流
  lynStream_t vdecSendStream = nullptr;
  CHECK_ERR(lynCreateStream(&vdecSendStream));
  while (1)
  {
    AVPacket packet;
    av_init_packet(&packet);
    if (fmt_ctx == nullptr)
    {
      av_packet_unref(&packet);
      return actual_size;
    }
    // printf("RtspPlay::get_video_frame_decode loop start. \n");
    set_interrupt_flag(false);
    set_interrupt_time(get_timestamp_sec());
    int ret = av_read_frame(fmt_ctx, &packet);
    set_interrupt_time(0);
    if (ret < 0)
    {
      char errorbuf[1024] = {0};
      av_strerror(ret, errorbuf, sizeof(errorbuf));
      printf("av read frame failure: %s", errorbuf);
      if (get_interrupt_flag())
      {
        ret = (int)AVERROR_EOF;
      }
      av_packet_unref(&packet);
      ret = -10;
      return ret;
    }

    int flags = 0;
    int ret1 = av_bsf_send_packet(m_av_bsf_context, &packet);
    if (ret1 < 0)
    {
      printf("av_bsf_send_packet failed");
      break;
    }
    flags = av_bsf_receive_packet(m_av_bsf_context, &packet);

    // printf("packet.stream_index = %d , video_stream id =%d \n",
    //        packet.stream_index, video_stream_idx);
    if (packet.stream_index == video_stream_idx && flags == 0)
    {
      /// 发送解码
      bool bEos = false;
      lynPacket_t *pDmxPacket = new lynPacket_t();
      int size_rd = 0;

      pDmxPacket->data = (uint8_t *)malloc(4096 * 4096 * 3 / 2);
      size_rd = packet.size;

      bool need_sps_pps = check_sps_pps(packet);
      if (need_sps_pps)
      {
        memcpy(pDmxPacket->data, stream->codecpar->extradata,
               stream->codecpar->extradata_size);
        memcpy(pDmxPacket->data + stream->codecpar->extradata_size, packet.data,
               size_rd);
        size_rd += stream->codecpar->extradata_size;
      }
      else
      {
        memcpy(pDmxPacket->data, packet.data, packet.size);
      }

      pDmxPacket->size = size_rd;
      // printf("vdecSendStream  start. pDmxPacket.size is : %d \n",
      //        pDmxPacket->size);
      pDmxPacket->eos = false;
      // 向解码器发送
      CHECK_ERR(lynVdecSendPacketAsync(vdecSendStream, m_vdecHdl, pDmxPacket));
      CHECK_ERR(lynStreamAddCallback(vdecSendStream, [](void *userData) -> lynError_t
                                          {
                                            lynError_t lRet = 0;
                                            lynPacket_t *pDmxPacket =
                                                (lynPacket_t *)userData;
                                            if (!pDmxPacket->eos) {
                                              free(pDmxPacket->data);
                                            }
                                            delete pDmxPacket;
                                            return lRet; }, pDmxPacket));

      CHECK_ERR(lynSynchronizeStream(vdecSendStream));
      g_totalSendNum++;

      if (!bEos)
      {
        m_sendQueue.put(++sendNum);
      }
      else
      {
        m_sendQueue.put(-1);
      }
      // printf("after vdecSendStream, sendNum is :%d \n ", sendNum);
      lynSynchronizeStream(vdecSendStream);
      actual_size = m_width * m_height * 3 / 2;

      av_packet_unref(&packet);
      break;
    }
    else
    {
      av_packet_unref(&packet);
    }
    // printf("get_video_frame_decode continue read \n");

    continue;
  }
  // printf("after while, sendNum is :%d \n ", sendNum);
  CHECK_ERR(lynSynchronizeStream(vdecSendStream));
  CHECK_ERR(lynDestroyStream(vdecSendStream));
  return actual_size;
}

/**
 * @brief 接收线程函数
 *
 */
void VideoDecoder::RecvThreadFunc()
{
  // 7.1 设置接收线程上下文
  CHECK_ERR(lynSetCurrentContext(m_ctx));

  // 7.2 创建解码器接收指令流
  lynStream_t recvStream = nullptr;
  CHECK_ERR(lynCreateStream(&recvStream));

  int recvNum = 0;

  // 7.3 创建解码帧内存池
  int bufsize = m_width * m_height * 3 / 2;
  auto framePool = std::make_shared<BufferPool>(bufsize, 5);
  std::cout << "start RecvThreadFunc" << std::endl;
  do
  {
    // 7.4 从解码器中获取解码结果
    // std::cout << "before take" << std::endl;
    m_sendQueue.take(recvNum);
    // std::cout << "after take ,recvNum=" << recvNum << std::endl;
    auto cbData = new RecvCbData;
    cbData->frame.eos = recvNum < 0 ? true : false;
    cbData->frame.data = (uint8_t *)framePool->Pop();
    cbData->frame.size = bufsize;
    cbData->framePool = framePool;
    cbData->recvNum = recvNum;
    cbData->attr = &m_attr;
    CHECK_ERR(lynVdecRecvFrameAsync(recvStream, m_vdecHdl, &cbData->frame));

    // 7.5 在指令执行完成后，将内存归还到内存池
    lynStreamAddCallback(
        recvStream,
        [](void *data) -> int
        {
          RecvCbData *cb = (RecvCbData *)data;
          if (cb->recvNum > 0)
          {
            g_totalRecvNum++;
            if (g_totalRecvNum == 1)
            {
              g_start = system_clock::now();
            }
          }

          if (cb->recvNum >= 100)
          {
            g_runningDecoders--;
            if (g_runningDecoders == 0)
            {
              g_end = system_clock::now();
            }
          }
          if (!cb->attr->outputPath.empty() &&
              cb->recvNum >= cb->attr->outnumMin &&
              cb->recvNum <= cb->attr->outnumMax)
          {
            uint8_t *hostBuf = (uint8_t *)malloc(cb->frame.size);
            CHECK_ERR(lynMemcpy(hostBuf, cb->frame.data, cb->frame.size,
                                ServerToClient));
            cb->framePool->Push(cb->frame.data);
            char dumpPath[128];
            sprintf(dumpPath, "%sdump_%d_%04d.yuv",
                    cb->attr->outputPath.c_str(), cb->attr->chanId,
                    cb->recvNum);
            CHECK_ERR(DumpFile(hostBuf, cb->frame.size, dumpPath));

            free(hostBuf);
          }
          else
          {
            cb->framePool->Push(cb->frame.data);
          }
          // std::cout << "callback recv, recvNum:" << cb->recvNum << " ,"
          //           << std::endl;
          delete cb;
          return 0;
        },
        cbData);

    // std::cout << "recvNum=" << recvNum << " ," << std::endl;
    if (recvNum == 100)
    {
      std::cout << std::endl;
      break;
    }
  } while (recvNum > 0);

  // 7.6 销毁解码器接收指令流
  std::cout << "recv decode finish , before synchron" << std::endl;
  CHECK_ERR(lynSynchronizeStream(recvStream));
  CHECK_ERR(lynDestroyStream(recvStream));
  std::cout << "recv decode finish" << std::endl;
  std::cout << std::endl;
}

/**
 * @brief 析构函数
 *
 */
VideoDecoder::~VideoDecoder()
{
  if (h264_fd)
  {
    fclose(h264_fd);
  }

  if (m_recvThread.joinable())
  {
    m_recvThread.join();
  }

  if (m_vdecHdl != nullptr)
  {
    CHECK_ERR(lynVdecClose(m_vdecHdl));
  }

  if (m_ctx != nullptr)
  {
    CHECK_ERR(lynDestroyContext(m_ctx));
  }
}

uint32_t channelNum = 1;

bool argsParser(int argc, char *argv[], DecoderAttr &attr)
{
  std::string argFrameNum;
  std::string argChannelCount;

  CLI::App app;
  // 添加选项
  app.add_option("-i", attr.url, "input MP4 video file path")->default_val("../data/video_decode_ffmpeg/MOT16-09.mp4");
  app.add_option("-o", attr.outputPath, "output file path")->default_val("../data/");
  app.add_option("-n", argFrameNum, "output frame num, eg: num_min,num_max")->default_val("1,10");
  app.add_option("-d", attr.deviceId, "lynxi device id")->default_val(0);
  app.add_option("-c", channelNum, "process channel count")->default_val(1);
  app.add_option("-r", attr.repeatNum, "repeat count")->default_val(1);

  // 解析命令行参数
  CLI11_PARSE(app, argc, argv);

  if (attr.url.empty())
  {
    std::cout << "input file miss" << std::endl;
    return false;
  }

  if (attr.outnumMin >= attr.outnumMax || attr.outnumMin == 0 ||
      attr.outnumMax == 0)
  {
    std::cout << "args error" << std::endl;
    return false;
  }

  return true;
}

int main(int argc, char *argv[])
{
  DecoderAttr attr;
  if (!argsParser(argc, argv, attr))
  {
    return 0;
  }

  std::vector<std::shared_ptr<VideoDecoder>> decoders;
  bool exit = false;

  // 帧率统计
  auto showThread = std::thread([&]()
                                {
    while (!exit) {
      std::this_thread::sleep_for(std::chrono::milliseconds(100));
      printf("send:%d / recv %d \r", g_totalSendNum.load(),
             g_totalRecvNum.load());
      fflush(stdout);
    }
    std::chrono::duration<double> runtime(g_end - g_start);
    printf("totalnum %d time %.2f, rate %.2f fps\n", g_totalRecvNum.load(),
           runtime.count(), g_totalRecvNum.load() / runtime.count()); });

  // 根据通道数创建多路解码器
  for (uint32_t i = 0; i < channelNum; i++)
  {
    auto decoder = std::make_shared<VideoDecoder>();
    decoders.push_back(decoder);
    g_runningDecoders++;
    attr.chanId = i;
    decoder->Run(attr);
  }

  decoders.clear();
  std::cout << "decoders clear" << std::endl;
  exit = true;
  showThread.join();

  return 0;
}