#include "MYVideoOutput.h"
#include <QDebug>
#include <QPainter>
#include <QTimer>
#include <malloc.h>
extern "C" {
    #include <libavutil/frame.h>
    #include <libavutil/imgutils.h>
    #include <libswscale/swscale.h>
    #include <libavformat/avformat.h>
    #include <libavcodec/avcodec.h>
    #include "libavformat/avformat.h"
}


MYVideoOutput::MYVideoOutput(void) : m_VideoFrame()
{
    // 连接信号与槽， 当requestUpdate被触发时，会调用produceUpdate
    QObject::connect(this, &MYVideoOutput::requestFlush,
                     this, &MYVideoOutput::flushDisplay);
}

MYVideoOutput::~MYVideoOutput()
{
    sws_freeContext(m_SwsContext);
    m_SwsContext = nullptr;
}

void MYVideoOutput::Repaint(AVFrame *frame)
{
//    av_frame_free(&frame);
//    return;
    if(!frame) return;

    // 将 YUV420P 转化为 RGB32 （Qt不支持直接显示YUV的图像）
    if(m_SwsContext == nullptr || m_VideoFrame.width() != frame->width
            || m_VideoFrame.height() != frame->height) {
        sws_freeContext(m_SwsContext);
        m_SwsContext = nullptr;
        // 重新初始化用于转化图像的上下文管理器
        m_SwsContext = sws_getContext(frame->width,
                                       frame->height,
                                       static_cast<AVPixelFormat>(frame->format),
                                       frame->width,
                                       frame->height,
                                       AV_PIX_FMT_BGRA,
                                       SWS_FAST_BILINEAR, NULL, NULL, NULL);
    }

    int linesize[8] = {frame->width * 4};
    // 计算一帧所需要的内存大小
    // 最后一个参数1, 是代表按1字节对齐，那得到的结果就是实际大小
    int num_bytes = av_image_get_buffer_size(AV_PIX_FMT_BGRA, frame->width, frame->height, 1);

    qDebug() << num_bytes;
    uint8_t *rgb_buf = new uint8_t[num_bytes + 1];
    uint8_t *sz_rgb_buf[8] = {rgb_buf};


    // 用来做视频像素格式和分辨率的转换
    sws_scale(m_SwsContext, frame->data, frame->linesize,
              0, frame->height, sz_rgb_buf, linesize);




//    uint8_t *out_buffer = (uint8_t *) av_malloc(num_bytes * sizeof(uint8_t));
//    avpicture_fill((AVPicture *) m_FrameRGB, out_buffer, AV_PIX_FMT_BGRA,
//            frame->width, frame->height);

//    sws_scale(m_SwsContext, frame->data, frame->linesize,
//     0, frame->height, m_FrameRGB->data, m_FrameRGB->linesize);


    mux.lock();
//    m_VideoFrame = QImage("E:/BaiduNetdiskDownload/1.png");
	if (m_VideoFrame.isNull()) {
		qDebug() << "m_VideoFrame is null";
	}
    m_VideoFrame = QImage(rgb_buf, frame->width, frame->height,
                          QImage::QImage::Format_ARGB32,
                          [](void *info)->void  // QImage析构时不会清理传入的数据，要自行清理
                            {
                                uint8_t *p = static_cast<uint8_t *>(info);
                                delete []p;
                            }, rgb_buf);

    mux.unlock();
    av_frame_free(&frame);
    // 刷新显示
    emit requestFlush();
}


void MYVideoOutput::paint(QPainter *painter)
{
    // 画背景
    painter->fillRect(0, 0, width(), height(), Qt::black);
    // 画视频帧
    mux.lock();
    if(!m_VideoFrame.isNull()) {
        // 建立目标矩形，该区域是显示图像的位置
        QRectF target(0, 0, width(), height());
        // 建立源矩形，指定m_VideoFrame需要显示的区域（即图片可以不完全显示）
        QRectF source(m_VideoFrame.rect());
        painter->drawImage(target, m_VideoFrame, source);
        // av_freep(m_FrameRGB);
    }
    mux.unlock();
}

void MYVideoOutput::Init(int width, int height)
{
    mux.lock();
    mux.unlock();
}

void MYVideoOutput::flushDisplay()
{
    // 调用update 重新渲染下一帧画面, 即调用上面重载的paint函数
    update();
}

