﻿#pragma execution_character_set("utf-8")

#include <QDebug>

#include "ffmpeg.h"

const int ERROR_BUFFER_SIZE = 1024;

namespace {
static double r2d(AVRational r) {
  return r.num == 0 || r.den == 0 ? 0 : (double)r.num / (double)r.den;
}

void outputAVError(const int &ret) {
  char buffer[ERROR_BUFFER_SIZE] = {0};
  av_strerror(ret, buffer, sizeof(buffer));
  qDebug() << __FILE__ << "-" << __LINE__ << ":" << buffer << "--" << ret;
}
}

namespace HeaderMedia {

FFmpeg::FFmpeg() : xIsPlay(false) {
  // 注册
  qDebug() << "XPlay";
  av_register_all();
}

bool FFmpeg::openCodec(AVCodecContext *acc) {
  if (!acc) {
    qDebug() << "AVCodecContext is null " << __LINE__;
  }
  // 查找解码器
  AVCodec *codec = avcodec_find_decoder(acc->codec_id);
  if (!codec) {
    qDebug() << "无法找到解码器";
    return false;
  }

  // 打开解码器
  int avRetrun = avcodec_open2(acc, codec, nullptr);
  if (avRetrun != 0) {
    qDebug() << "解码器打开失败";
    outputAVError(avRetrun);
    return false;
  }

  return true;
}

bool FFmpeg::open(const QString &videoPath) {
  close();
  std::lock_guard<std::mutex> _(xMtx);
  int avRetrun;

  // 打开视频文件，填充文件信息到 AVFormatContext
  avRetrun = avformat_open_input(&xAVForContext, videoPath.toStdString().c_str(), 0, 0);
  if (avRetrun != 0) {
    qDebug() << "打开视频文件 " << videoPath << "失败";
    outputAVError(avRetrun);
    return false;
  }

  // 视频时间，毫秒
  xTotalMs = (xAVForContext->duration / AV_TIME_BASE) * 1000;

  // 获取文件中的流信息
  avRetrun = avformat_find_stream_info(xAVForContext, nullptr);
  if (avRetrun < 0) {
    qDebug() << "获取文件中的流信息失败";
    outputAVError(avRetrun);
    return false;
  }

  // 输出文件信息
  av_dump_format(xAVForContext, -1, videoPath.toStdString().c_str(), 0);

  // 遍历视频和音频流, 并打开解码器
  // nb_streams 打开的视频流文件中流的数量, 一般为 2 为音频流和视频流
  for (int i = 0; i < xAVForContext->nb_streams; i++) {
    // 分别获取音频流和视频流的解码器
    AVCodecContext *acc = xAVForContext->streams[i]->codec;
    // 视频流
    if (acc->codec_type == AVMEDIA_TYPE_VIDEO) {
      xVideoStream = i;
      qDebug() << "xVideoStream = " << xVideoStream;
      if (!openCodec(acc)) {
        qDebug() << "处理视频流解码器失败";
        return false;
      }
    } else if (acc->codec_type == AVMEDIA_TYPE_AUDIO) { // 音频
      xAudioStream = i;
      qDebug() << "xAudioStream = " << xAudioStream;
      if (!openCodec(acc)) {
        qDebug() << "处理音频流解码器失败";
        return false;
      }

      // 设置音频参数
      // 采样率
      xSampleRate = acc->sample_rate;
      // 通道数
      xChannel = acc->channels;
      switch (acc->sample_fmt) {
      case AV_SAMPLE_FMT_S16:
        xSampleSize = 16;
        break;
      case AV_SAMPLE_FMT_S32:
        xSampleSize = 32;
        break;
      default:
        break;
      }
    }
  }

  return false;
}

void FFmpeg::close() {
  std::lock_guard<std::mutex> _(xMtx);
  if (xAVForContext) {
    avformat_close_input(&xAVForContext);
  }

  if (xYUV) {
    av_frame_free(&xYUV);
  }

  if (xSws) {
    sws_freeContext(xSws);
    xSws = nullptr;
  }

  if (xSwrContext) {
    swr_free(&xSwrContext);
  }
}

bool FFmpeg::seek(float pos) {
  std::lock_guard<std::mutex> _(xMtx);
  if (!xAVForContext) {
    return false;
  }

  int64_t stamp = 0;
  stamp = pos * xAVForContext->streams[xVideoStream]->duration;
  // 向后 | 关键帧
  int avReturn = av_seek_frame(xAVForContext,
                               xVideoStream, stamp,
                               AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_FRAME);
  if (avReturn < 0) {
    qDebug() << "seek 关键帧失败";
    outputAVError(avReturn);
    return false;
  }

  // 清除之前的解码缓冲
  avcodec_flush_buffers(xAVForContext->streams[xVideoStream]->codec);

  // 关键帧，P 帧, B 帧
  return true;
}

int FFmpeg::getPts(const AVPacket *pkt) {
  if (!pkt) {
    qDebug() << "AVPacket is null " << __LINE__;
  }
  std::lock_guard<std::mutex> _(xMtx);
  if (!xAVForContext) {
    return -1;
  }

  // 毫秒数
  int pts = (pkt->pts *
             r2d(xAVForContext->
                 streams[pkt->stream_index]->
             time_base)) * 1000;

  return pts;
}

int FFmpeg::decode(const AVPacket *pkt) {
  if (!pkt) {
    qDebug() << "AVPacket is null " << __LINE__;
  }
  std::lock_guard<std::mutex> _(xMtx);
  if (!xAVForContext) {
    return 0;
  }

  if (xYUV == nullptr) {
    xYUV = av_frame_alloc();
  }
  if (xPCM == nullptr) {
    xPCM = av_frame_alloc();
  }

  // 解码后 YUV 会改变
  AVFrame *frame = xYUV;
  if (pkt->stream_index == xAudioStream) {
    frame = xPCM;
  }

  // 根据索引 stream_index 来判断是音频还是视频
  int avRetrun = avcodec_send_packet(
              xAVForContext->streams[pkt->stream_index]->codec, pkt);
  if (avRetrun != 0) {
    qDebug() << "判断是音频还是视频失败";
    outputAVError(avRetrun);
    return 0;
  }

  avRetrun = avcodec_receive_frame(
              xAVForContext->streams[pkt->stream_index]->codec,
          frame);
  if (avRetrun != 0) {
    qDebug() << "接受视频帧失败";
    outputAVError(avRetrun);
    return 0;
  }

  int p = (frame->pts *
           r2d(xAVForContext->streams[pkt->stream_index]->time_base)) * 1000;
  if (pkt->stream_index == xAudioStream) {
    this->xPts = p;
  }

  return xPts;
}

AVPacket FFmpeg::read(bool &ok) {
  AVPacket pkt;
  memset(&pkt, 0, sizeof(AVPacket));

  std::lock_guard<std::mutex> _(xMtx);
  if (!xAVForContext) {
    ok = false;
    return pkt;
  }

  int avReturn = av_read_frame(xAVForContext, &pkt);
  if (avReturn != 0) {
    ok = false;
    qDebug() << "读取视频帧失败";
    outputAVError(avReturn);
    return pkt;
  }

  ok = true;
  return pkt;
}

bool FFmpeg::yuvToRGB(char *out, int outWidth, int outHeight) {
  if (!out) {
    qDebug() << "yuvToRGB is null " << __LINE__;
  }
  // 像素转换的前提是视频已经打开
  std::lock_guard<std::mutex> _(xMtx);
  if (!xAVForContext || !xYUV) {
    return false;
  }

  AVCodecContext *videoCtx = xAVForContext->streams[this->xVideoStream]->codec;
  xSws = sws_getCachedContext(xSws, videoCtx->width, videoCtx->height,
                              videoCtx->pix_fmt,     // 像素点格式
                              outWidth, outHeight,   // 目标宽度和高度
                              AV_PIX_FMT_BGRA,       // 输出的格式
                              SWS_BICUBIC,           // 算法标记
                              nullptr, nullptr, nullptr);
  if (!xSws) {
    return false;
  }

  uint8_t *data[AV_NUM_DATA_POINTERS] = {0};
  data[0] = (uint8_t *)out;
  int linesize[AV_NUM_DATA_POINTERS] = {0};
  // 每一行转码的宽度
  linesize[0] = outWidth * 4;

  // 返回转码后的高度
  int h = sws_scale(xSws, xYUV->data, xYUV->linesize, 0,
                    videoCtx->height, data, linesize);

  return true;
}

int FFmpeg::toPCM(char *out) {
  if (!out) {
    qDebug() << "toPCM is null " << __LINE__;
  }
  std::lock_guard<std::mutex> _(xMtx);
  if (!xAVForContext || !xPCM || !out) {
    return 0;
  }

  AVCodecContext *ctx = xAVForContext->streams[xAudioStream]->codec;
  if (xSwrContext == nullptr) {
    xSwrContext = swr_alloc();
    swr_alloc_set_opts(xSwrContext, ctx->channel_layout,
                       AV_SAMPLE_FMT_S16, ctx->sample_rate,
                       ctx->channels, ctx->sample_fmt,
                       ctx->sample_rate, 0, 0);
    swr_init(xSwrContext);
  }

  uint8_t *data[1];
  data[0] = (uint8_t *)out;
  int len = swr_convert(xSwrContext, data, 10000,
                        (const uint8_t **)xPCM->data,
                        xPCM->nb_samples);
  if (len <= 0) {
    return 0;
  }

  int outSize = av_samples_get_buffer_size(nullptr,
                                           ctx->channels,
                                           xPCM->nb_samples,
                                           AV_SAMPLE_FMT_S16, 0);

  return outSize;
}

}




























