﻿#include "QlyVideoWriter.h"

QlyVideoWriter::QlyVideoWriter()
{
    //connect(&m_timer, &QTimer::timeout, this, &QlyVideoWriter::timeout);
}

bool QlyVideoWriter::setAVCodecID(AVCodecID id)
{
    m_codecID = id;
    AVCodec * pCodec = avcodec_find_encoder(id);
    if(pCodec)
    {
        const enum AVPixelFormat * pFormat = pCodec->pix_fmts;
        if(pFormat)
        {
            while (*pFormat != AV_PIX_FMT_NONE)
            {
                if(*pFormat == m_format)
                {
                    return true;
                }
                pFormat ++;
            }
            // 到这里说明 m_format 不在当前 codec 支持的 format 里
            pFormat = pCodec->pix_fmts;
            if(*pFormat != AV_PIX_FMT_NONE)
            {
                m_format = *pFormat; // 默认使用 codec 支持的第一个 format
                return true;
            }
        }
    }
    return false;
}

void QlyVideoWriter::initStreamParameters(AVStream * stream)
{
    stream->time_base.den = m_time_base.den;
    stream->time_base.num = m_time_base.num;
    stream->id = m_videoContext.streamCount() - 1;
    stream->index = m_videoContext.streamCount() - 1;
    stream->codecpar->codec_tag = 0;
    stream->codecpar->codec_type = m_codec.mediaType();
    stream->codecpar->codec_id = m_codec.id();
    stream->codecpar->format = AV_PIX_FMT_YUV420P;
    stream->codecpar->width = m_width;
    stream->codecpar->height = m_height;
    stream->codecpar->bit_rate = m_bit_rate;
}

bool QlyVideoWriter::initFile(enum AVCodecID codec_id)
{
    m_codec.findEncoder(codec_id);

    AVStream * stream = m_videoContext.newStream(nullptr);
    if(stream == nullptr) return false;
    initStreamParameters(stream);

    m_codecContext.allocContext(&m_codec);
    m_codecContext.setTimeBase(stream->time_base);
    m_codecContext.setGopSize(10);
    m_codecContext.setMaxBFrame(0);

    if (codec_id == AV_CODEC_ID_H264)
    {
        m_codecContext.opt_set("preset", "medium", 0);
        m_codecContext.opt_set("tune", "zerolatency", 0);
        //m_codecContext.opt_set("profile", "main", 0);
    }
    else if(codec_id == AV_CODEC_ID_H265)
    {
        m_codecContext.opt_set("preset", "ultrafast", 0);
        m_codecContext.opt_set("tune", "zerolatency", 0);
        //m_codecContext.opt_set("profile", "main", 0);
    }
    /* Some formats want stream headers to be separate. */
    if (m_videoContext.ptr()->oformat->flags & AVFMT_GLOBALHEADER)
    {
        m_codecContext.ptr()->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
    }
    m_codecContext.openCodec(&m_codec, stream->codecpar);

    return true;
}

bool QlyVideoWriter::openFile(QString url)
{
    m_startTime = QTime();
    bool ret = (m_videoContext.createFile("", url) == 0);
    if(ret)
    {
        m_recording = true;
        return true;
    }
    return false;
}

bool QlyVideoWriter::setVideoFrame(QVideoFrame & videoframe, int pts)
{
    if(!m_recording) return false;
    updatePts(pts);
    if(m_width == 0)  // 说明这是第一个帧
    {
        m_width = videoframe.width();
        m_height = videoframe.height();
        initFile(m_codecID);
        if( m_videoContext.writeHeader() != 0) return false;
        m_timer.singleShot(m_timeout, this, SLOT(timeout()));
    }
    QlyAVFrame frame;
    frame.buildFromQVideoFrame(videoframe, m_format, m_count++);
    return writeFrame(frame);
}

void QlyVideoWriter::timeout()
{
    m_recording = false;
    close();
}

void QlyVideoWriter::updatePts(int &pts)
{
    if( pts < 0 ) // 说明这时要用真实的时间来做为 pts
    {
        QTime t = QTime::currentTime();
        if(m_startTime.isNull())
        {
            m_startTime = t; // 说明这是第一帧。需要初始化起始时间。
        }
        int oldpts = m_startTime.msecsTo(t);

        pts = av_rescale_q_rnd(oldpts, AVRational({1, 1000}), m_time_base, AV_ROUND_NEAR_INF);
        qDebug() << "oldpts = " << oldpts << ", pts = " << pts;
    }
}

bool QlyVideoWriter::setQImage2(const QImage &image, int pts)
{
    if(!m_recording) return false;
    updatePts(pts);
    if(m_width == 0) // 说明这是第一个帧
    {
        m_width = image.width();
        m_height = image.height();
        initFile(m_codecID);
        if( m_videoContext.writeHeader() != 0) return false;
        m_videoContext.dumpFormat();
        m_timer.singleShot(m_timeout, this, SLOT(timeout()));
    }
    QlyAVFrame frame;
    frame.buildFromQImage(image, m_format, pts);
    return writeFrame(frame);
}

bool QlyVideoWriter::setQImage(const QImage &image)
{
    return setQImage2(image, -1);
}

bool QlyVideoWriter::writeFrame(const QlyAVFrame &frame)
{
    m_codecContext.encodeFrame(frame.ptr(), m_packetQueue);
    while(!m_packetQueue.isEmpty())
    {
        QlyAVPacket pkt = m_packetQueue.dequeue();
        //av_packet_rescale_ts(pkt.ptr(), m_codecContext.timeBase(), m_videoContext.rawStream(0)->time_base);
        pkt.setStreamIndex(0);
        int ret = m_videoContext.writeFrame(pkt, m_codecContext.timeBase(), false);
        if(!ret) return false;
    }
    return true;
}

bool QlyVideoWriter::close()
{
    m_recording = false;
    m_timer.stop();
    writeFrame(QlyAVFrame(nullptr));
    m_videoContext.writeTrailer();
    m_videoContext.closeOutputFile();
    m_width = 0;
    m_height = 0;
    return true;
}
