#include "raw2mp4.h"

#include <iostream>
#include <functional>

#include <QDebug>
#include <QDateTime>

/**
 * 时间戳基本概念
 * 时间戳：计算的单位不是秒，时间戳的单位采用的是采样频率的倒数，这样做的目的就是为了时间戳单位更精准。比如说一个音频的采样频率为8000Hz，那么我们可以把时间戳单位设为1 / 8000。
 * 时间戳增量：相邻两帧之间的时间差（以时间戳单位为基准）。ORTP库中根据负载类型直接给定了时间戳的单位（音频负载1/8000，视频负载1/90000）。如果采样频率为90000Hz，则由上面讨论可知，时间戳单位为1/90000，我们就假设1秒钟被划分了90000个时间块，那么，如果每秒发送25帧，那么，每一个帧的发送占多少个时间块呢？当然是 90000/25 = 3600。因此，我们根据定义“时间戳增量是发送第二个RTP包相距发送第一个RTP包时的时间间隔”，故时间戳增量应该为3600。如果fps=50，那么pts增量就是1800。本人此处踩坑，设置fps=50，h264编码始终是25.
 * 采样频率： 也叫时钟频率，即每秒钟抽取样本的次数，例如音频的采样率8000Hz，48k Hz等。
 */

Raw2Mp4Tool::Raw2Mp4Tool(int width, int height)
    : m_srcWidth(width)
    , m_srcHeight(height)
    , m_outContext(nullptr)
    , m_codecContext(nullptr)
    , m_ctx(nullptr)
    , m_yuv(nullptr)
    , m_data(nullptr)
    , m_open(false)
{
    //  ffmpeg 视频宽高 有限制
    if (width % 32 != 0) {
        m_width = width / 32 * 32;
        m_width = m_width > 1920 ? 1920 : m_width;
    }

    if (height % 2 != 0) {
        m_height = height / 2 * 2;
        m_height = m_height > 1080 ? 1080 : m_height;
    }
}

/**
 * @brief 打开要保存的文件
 *
 * @param file
 * @return true
 * @return false
 */
bool Raw2Mp4Tool::open(std::string file)
{
    if (file.empty() or m_width < 0 or m_height < 0) {
        return false;
    }

    m_fileName = file;

    int fps = 25;

    // 1 创建编码器
    AVCodec *codec = avcodec_find_encoder(AV_CODEC_ID_H264);
    if (!codec) {
        std::cout << " avcodec_find_encoder AV_CODEC_ID_H264 failed!" << std::endl;
        return false;
    }
    // 编码器上下文
    m_codecContext = avcodec_alloc_context3(codec);
    if (!m_codecContext) {
        std::cout << " avcodec_alloc_context3  failed!" << std::endl;
        return false;
    }
    // 设置视频编码相关参数
    // 比特率
    m_codecContext->bit_rate = 400000000;
    m_codecContext->width = m_width;
    m_codecContext->height = m_height;
    // 把1秒钟分成fps个单位
    m_codecContext->time_base = {1, fps};
    m_codecContext->framerate = {fps, 1};
    m_codecContext->gop_size = 50;
    m_codecContext->max_b_frames = 0;
    m_codecContext->pix_fmt = AV_PIX_FMT_YUV420P;
    m_codecContext->codec_id = AV_CODEC_ID_H264;
    m_codecContext->thread_count = 8;

    // 全局的编码信息
    m_codecContext->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
    // h264编码器
    AVDictionary *param = 0;
    if (m_codecContext->codec_id == AV_CODEC_ID_H264) {
        av_dict_set(&param, "preset", "fast", 0);
        av_dict_set(&param, "tune", "zerolatency", 0);
    }

    av_opt_set(m_codecContext->priv_data, "tune", "zerolatency", 0);
    // 打开编码器
    int ret = avcodec_open2(m_codecContext, codec, NULL);
    if (ret < 0) {
        std::cout << " avcodec_open2  failed!" << std::endl;
        return false;
    }
    std::cout << "avcodec_open2 success!" << std::endl;

    // 2 create out context
    avformat_alloc_output_context2(&m_outContext, 0, 0, m_fileName.c_str());

    // 3 add video stream
    AVStream *st = avformat_new_stream(m_outContext, NULL);
    st->id = 0;
    st->codecpar->codec_tag = 0;
    avcodec_parameters_from_context(st->codecpar, m_codecContext);

    std::cout << "===============================================" << std::endl;
    av_dump_format(m_outContext, 0, m_fileName.c_str(), 1);
    std::cout << "===============================================" << std::endl;

    // 4 rgb to yuv
    // 改变视频尺寸
    m_ctx = sws_getCachedContext(m_ctx,
                                 m_srcWidth, m_srcHeight, AV_PIX_FMT_RGBA,
                                 m_width, m_height, AV_PIX_FMT_YUV420P, SWS_BICUBIC,
                                 NULL, NULL, NULL);

    // 输出空间
    m_yuv = av_frame_alloc();
    m_yuv->format = AV_PIX_FMT_YUV420P;
    m_yuv->width = m_width;
    m_yuv->height = m_height;
    // 分配空间
    ret = av_frame_get_buffer(m_yuv, 32);

    if (ret < 0) {
        std::cout << " av_frame_get_buffer  failed!" << std::endl;
        return false;
    }

    // 5 write mp4 head
    ret = avio_open(&m_outContext->pb, m_fileName.c_str(), AVIO_FLAG_WRITE);
    if (ret < 0) {
        std::cout << " avio_open  failed!" << std::endl;
        return false;
    }
    ret = avformat_write_header(m_outContext, NULL);
    if (ret < 0) {
        std::cout << " avformat_write_header  failed!" << std::endl;
        return false;
    }

    m_p = 0;
    m_open.store(true);
    m_thread = std::thread(std::bind(&Raw2Mp4Tool::run, this));
    return true;
}

/**
 * @brief 保存完成进行关闭文件
 *
 */
void Raw2Mp4Tool::close()
{
    m_open.store(false);
    m_thread.join();

    // 写文件尾
    av_write_trailer(m_outContext);
    // 关闭视频输出IO
    avio_close(m_outContext->pb);
    // 清理封装输出上下文
    avformat_free_context(m_outContext);
    // 关闭编码器
    avcodec_close(m_codecContext);
    // 清理编码器上下文
    avcodec_free_context(&m_codecContext);
    // 清理视频重采样上下文
    sws_freeContext(m_ctx);
    m_lock.writeLock();
    if (m_data) {
        delete[] m_data;
        m_data = nullptr;
    }
    m_lock.writeUnlock();
    std::cout << "======================end=========================" << std::endl;
}

/**
 * @brief写入raw数据
 *
 * @param rawData
 * @param len
 */
void Raw2Mp4Tool::write(const uint8_t *rawData, int len)
{
    m_lock.writeLock();
    if (m_data) {
        delete[] m_data;
        m_data = nullptr;
    }
    m_data = new char[len];
    memcpy(m_data, rawData, len);
    m_lock.writeUnlock();
}

void sleep_ms(unsigned int secs)

{
    struct timeval tval;
    tval.tv_sec = secs / 1000;
    tval.tv_usec = (secs * 1000) % 1000000;
    select(0, NULL, NULL, NULL, &tval);
}

void Raw2Mp4Tool::run()
{
    while (m_open.load()) {
        m_lock.readLock();
        // qInfo() << QDateTime::currentDateTime();
        do {
            if (!m_data) {
                break;
            }
            uint8_t *indata[AV_NUM_DATA_POINTERS] = {0};
            indata[0] = (uint8_t *)m_data;
            int inlinesize[AV_NUM_DATA_POINTERS] = {0};
            inlinesize[0] = m_srcWidth * 4;
            if (sws_scale(m_ctx, indata, inlinesize, 0, m_srcHeight, m_yuv->data, m_yuv->linesize) <= 0) {
                break;
            }
            // 6 encode frame
            m_yuv->pts = m_p;
            m_p = m_p + 3600;
            // 发送到编码器
            if (avcodec_send_frame(m_codecContext, m_yuv) != 0) {
                break;
            }

            AVPacket pkt;
            av_init_packet(&pkt);
            // 接收编码结果
            if (avcodec_receive_packet(m_codecContext, &pkt) != 0) {
                break;
            }
            // qInfo() << "<" << pkt.size << ">" << (pkt.flags & AV_PKT_FLAG_KEY);
            // 将编码后的帧写入文件
            av_interleaved_write_frame(m_outContext, &pkt);
        } while (0);
        m_lock.readUnlock();
        sleep_ms(40);
    }
}
