﻿#include "FrameProvider.h"
#include <QVideoFrame>
#include <QDebug>
#include <QThread>
#include <QCoreApplication>


FrameProvider::FrameProvider(QObject *parent)
    : QObject{parent}
{
    connect(&m_updateTimer, &QTimer::timeout, this, &FrameProvider::onUpdateTimeout);
    m_updateTimer.setInterval(0);
    m_updateTimer.setTimerType(Qt::PreciseTimer);
}

FrameProvider::~FrameProvider()
{
}

AVFrame* FrameProvider::nextFrame()
{
    if (!m_decoder || m_decoder->videoFrameQueueSize() <= 0) {
        return nullptr;
    }

    return m_decoder->nextVideoFrame();
}

int FrameProvider::frameTimeCheck(int64_t pts, const AVFrame* frame)
{
    if(pts < frame->pts)
        return pts - frame->pts;

    if(pts > frame->pts + frame->pkt_duration)
        return pts - (frame->pts + frame->pkt_duration);

    return 0;
}

void FrameProvider::onUpdateTimeout()
{
    if(!m_decoder || m_decoder->isEnding() || m_decoder->videoFrameQueueSize() == 0) {
        return;
    }

    // 如果当前没有显示任何图像帧，则直接从队列中获取一帧并显示
    if(!m_curFrame) {
        m_curFrame = nextFrame();
        deliverFrame();
        return;
    }

    // 如果当前的队列序号与帧队列中的序号不同，表明可能发生了seek操作，此时直接获取帧队列中的新图像帧并显示。
    if (m_serial != m_decoder->decodecSerial()) {
        m_serial = m_decoder->decodecSerial();
        av_frame_free(&m_curFrame);
        m_curFrame = nextFrame();
        deliverFrame();
        return;
    }

    if(!m_synchronizer || m_synchronizer->isPausing())
        return;

    // 从同步器获取时间点，将当前显示的帧与同步器的时间进行比较
    // frameTimeCheck返回 0 表示当前显示的帧与同步器的时间一致，则直接结束函数
    // 如果当前帧已经超前同步器的时间，则拉长当前帧的显示时间等待同步器时间
    int64_t pts = int64_t(m_synchronizer->clock() / av_q2d(m_timebase));
    auto c = frameTimeCheck(pts, m_curFrame);
    if (c == 0)
        return;
    else if (c < 0) {
        m_curFrame->pts = pts;
        return;
    }

    // 到达此处说明当前帧已经落后同步器时间，此时从队列中获取该时间点对应的帧
    // 可能当前帧已经落后不止一帧，所以此处循环获取队列中的帧直到找到对应的帧

    AVFrame* avFrame = nullptr;
    while(m_decoder->videoFrameQueueSize() > 0) {
        av_frame_free(&avFrame);
        avFrame = m_decoder->nextVideoFrame();

        if(pts - avFrame->pts < 0) {
            avFrame->pkt_duration = avFrame->pts - pts;
            avFrame->pts = pts;
            break;
        }

        // 找到对应帧，提交该帧
        if(pts >= avFrame->pts && pts < avFrame->pts + avFrame->pkt_duration)
            break;
    }

    if(avFrame) {
        av_frame_free(&m_curFrame);
        m_curFrame = avFrame;
        deliverFrame();
    }
}

QVideoFrame FrameProvider::copyAVFrameToQVideoFrame(AVFrame* avFrame)
{
    if (!avFrame || avFrame->format != AV_PIX_FMT_YUV420P) {
        qWarning() << "Invalid AVFrame or unsupported format";
        return QVideoFrame();
    }

    int width = avFrame->width;
    int height = avFrame->height;

    if(!m_filter) {
        m_filter = new VideoFrameFilter();
        m_filter->init(width, height, AV_PIX_FMT_YUV420P, width, height, AV_PIX_FMT_RGBA);
    }

    // 调试专用片段

    // 首先将avFrame转为RGB格式
    AVFrame* rgbFrame = m_filter->filter(avFrame);

    // 再将rgbFrame的数据复制到一个新的QVideoFrame
    QVideoFrameFormat fmt(QSize(width, height), QVideoFrameFormat::Format_RGBA8888);
    QVideoFrame qVideoFrame(fmt);
    if (!qVideoFrame.isValid()) {
        qWarning() << "Could not allocate QVideoFrame";
        return QVideoFrame();
    }
    if (!qVideoFrame.map(QVideoFrame::WriteOnly)) {
        qWarning() << "Could not map QVideoFrame";
        return QVideoFrame();
    }
    memcpy(qVideoFrame.bits(0), rgbFrame->data[0], rgbFrame->linesize[0] * height);
    qVideoFrame.unmap();

    return qVideoFrame;
/*
    // 1. 创建QVideoFrameFormat时显式设置各平面步长，匹配AVFrame的linesize
    QVideoFrameFormat fmt(QSize(width, height), QVideoFrameFormat::Format_YUV420P);

    QVideoFrame qVideoFrame(fmt);
    if (!qVideoFrame.isValid()) {
        qWarning() << "Could not allocate QVideoFrame";
        return QVideoFrame();
    }

    if (!qVideoFrame.map(QVideoFrame::WriteOnly)) {
        qWarning() << "Could not map QVideoFrame";
        return QVideoFrame();
    }

    // 2. Y平面复制：使用AVFrame的linesize[0]作为每行字节数
    uchar* dstY = qVideoFrame.bits(0);
    uint8_t* srcY = avFrame->data[0];
    for (int i = 0; i < height; ++i) {
        memcpy(dstY + i * qVideoFrame.bytesPerLine(0), 
               srcY + i * avFrame->linesize[0], 
               avFrame->linesize[0]);  // 关键修复：使用linesize而非width
    }

    // 3. U平面复制：使用AVFrame的linesize[1]作为每行字节数（高度为height/2）
    uchar* dstU = qVideoFrame.bits(1);
    uint8_t* srcU = avFrame->data[1];
    for (int i = 0; i < height / 2; ++i) {
        memcpy(dstU + i * qVideoFrame.bytesPerLine(1), 
               srcU + i * avFrame->linesize[1], 
               avFrame->linesize[1]);  // 关键修复：使用linesize而非width/2
    }

    // 4. V平面复制：使用AVFrame的linesize[2]作为每行字节数（高度为height/2）
    uchar* dstV = qVideoFrame.bits(2);
    uint8_t* srcV = avFrame->data[2];
    for (int i = 0; i < height / 2; ++i) {
        memcpy(dstV + i * qVideoFrame.bytesPerLine(2), 
               srcV + i * avFrame->linesize[2], 
               avFrame->linesize[2]);  // 关键修复：使用linesize而非width/2
    }

    qVideoFrame.unmap();
    return qVideoFrame;

    */
}

void FrameProvider::deliverFrame()
{
    if(m_frameRenderer) {
        m_frameRenderer->setFrame(m_curFrame);
    }
}

FrameRenderer *FrameProvider::frameRenderer() const
{
    return m_frameRenderer;
}

void FrameProvider::setFrameRenderer(FrameRenderer *newFrameRenderer)
{
    if (m_frameRenderer == newFrameRenderer)
        return;
    m_frameRenderer = newFrameRenderer;
    emit frameRendererChanged();
}

double FrameProvider::currPTS() const
{
    if(m_curFrame)
        return m_curFrame->pts * av_q2d(m_timebase);

    return 0;
}

Decoder *FrameProvider::decoder() const
{
    return m_decoder;
}

void FrameProvider::setDecoder(Decoder *newDecoder)
{
    if (m_decoder == newDecoder)
        return;
    m_decoder = newDecoder;

    if(m_decoder->isRunning()) {
        m_serial = DecodecSerial();
        m_timebase = m_decoder->videoTimebase();
        m_updateTimer.start();
    }

    connect(m_decoder, &Decoder::isRunningChanged, this, [this](){
        if(m_decoder->isRunning()) {
            m_serial = DecodecSerial();
            m_timebase = m_decoder->videoTimebase();
            m_updateTimer.start();
        }
    }, Qt::QueuedConnection);

    emit decoderChanged();
}

ISynchronizer *FrameProvider::synchronizer() const
{
    return m_synchronizer;
}

void FrameProvider::setSynchronizer(ISynchronizer *newSynchronizer)
{
    if (m_synchronizer == newSynchronizer)
        return;
    m_synchronizer = newSynchronizer;
    emit synchronizerChanged();
}
