/**
 * FFMpeg复用过程：
 *  1.初始化网络模块 avformat_network_init()
 *  2.申请封装格式上下文 avformat_alloc_output_context2(ppFormatCtx, pOFormat, formatName, fileName)
 *  3.打开输出文件 avio_open2(ppIOContext, url, flags, int_cb, option)
 *      1.s 分配初始化AVIOContext
 *      2.URL 地址
 *      3.flags 指定url资源的打开方式
 *      4.int_cb 协议中断的回调
 *      5.协议的私有选项
 *  4.创建视频、音频流 avformat_new_stream(pFormatCtx, nullptr)
 *  5.初始化视频、音频流的参数
 *以上是初始化复用过程
 *  6.写入头文件 avformat_write_header(pFormatCtx, nullptr)
 *  7.写入视频、音频数据包 av_interleaved_write_frame(pFormatCtx, pPacket)
 *以上是写入数据包过程
 *  8.关闭输出文件 avio_close(ppIOContext)
 *  9.释放封装格式上下文 avformat_free_context(pFormatCtx)
 *以上是释放封装格式上下文
 */

#include "FFMpegRender.h"

#define LOG_TAG "FFMpegRender"

static int FFMpeg_InterruptCb(void *ctx) {
  return 1;  // return 1 时会立刻结束阻塞
}

FFMpegRender::FFMpegRender(RenderParam *pRenderParam) : IRender(pRenderParam) {
  avformat_network_init();
  const char *url;
  int32_t ret = avformat_alloc_output_context2(&_pFormatCtx, nullptr, getFormatName(), url);
  if (ret < SUCCESS) {
    LOG_ERROR("avformat_alloc_output_context2 error. errCode:%d, errStr:%s", ret, av_err2str(ret))
    return;
  }

  _pFormatCtx->interrupt_callback.callback = &FFMpeg_InterruptCb;
  _pFormatCtx->interrupt_callback.opaque = this;

  if (!(_pFormatCtx->oformat->flags & AVFMT_NOFILE)) {
    //打开输出文件
    ret = avio_open2(&_pFormatCtx->pb, url, AVIO_FLAG_READ_WRITE, &_pFormatCtx->interrupt_callback,
                     &_pOptions);
    if (ret < SUCCESS) {
      LOG_ERROR("avio_open2 error. errCode:%d, errStr:%s", ret, av_err2str(ret))
      return;
    }
  }

  _pVStream = avformat_new_stream(_pFormatCtx, nullptr);
  _pVStream->id = _pFormatCtx->nb_streams - 1;
  avcodec_parameters_from_context(_pVStream->codecpar, nullptr);

  _pAStream = avformat_new_stream(_pFormatCtx, nullptr);
  _pAStream->id = _pFormatCtx->nb_streams - 1;
  avcodec_parameters_from_context(_pAStream->codecpar, nullptr);

}

FFMpegRender::~FFMpegRender() noexcept {
  if (_pFormatCtx) {
    if (!(_pFormatCtx->oformat->flags & AVFMT_NOFILE)) {
      avio_close(_pFormatCtx->pb);
    }

    avformat_free_context(_pFormatCtx);
    _pFormatCtx = nullptr;
  }

  avformat_network_deinit();
}

void FFMpegRender::startRender() {

  int32_t ret = avformat_write_header(_pFormatCtx, nullptr);
  if (ret < SUCCESS) {
    LOG_ERROR("avformat_write_header error. errCode:%d, errStr:%s", ret, av_err2str(ret))
  }
}

void FFMpegRender::stopRender() {

}

void FFMpegRender::resumeRender() {

}

void FFMpegRender::pauseRender() {

}

int32_t FFMpegRender::capacity() {
  int32_t capacity = 0;
  for (int i = 0; i < _pFormatCtx->nb_streams; ++i) {
    auto pCodecParam = _pFormatCtx->streams[i]->codecpar;
    if (pCodecParam->codec_type == AVMEDIA_TYPE_VIDEO) {
      capacity |= MediaType_Video;
    } else if (pCodecParam->codec_type == AVMEDIA_TYPE_AUDIO) {
      capacity |= MediaType_Audio;
    }
  }

  return capacity;
}

void FFMpegRender::onEncodeData(IEncoder *pEncoder, AVPacket *pPacket) {
  if (pEncoder->getMediaType() == MediaType_Video) {
    auto ts = av_make_q(1, 1);
    av_packet_rescale_ts(pPacket, ts, _pVStream->time_base);
    pPacket->stream_index = _pVStream->index;
  } else {
    auto ts = av_make_q(1, 1);
    av_packet_rescale_ts(pPacket, ts, _pAStream->time_base);
    pPacket->stream_index = _pAStream->index;
  }

  int32_t ret = av_interleaved_write_frame(_pFormatCtx, pPacket);
  if (ret < SUCCESS) {
    LOG_ERROR("av_interleaved_write_frame error. errCode:%d, errStr:%s", ret, av_err2str(ret))
  }
  av_packet_free(&pPacket);
}
