#include "ffmpeg.h"
#include <QImage>

FFmpegThread::FFmpegThread(QObject *parent)
    : QThread(parent)
{

}

FFmpegThread::~FFmpegThread()
{
}

void FFmpegThread::open(const QString &filename)
{
    this->filename = filename;
    formatContext = avformat_alloc_context();
    avformat_open_input(&formatContext, filename.toUtf8().data(), nullptr, nullptr);
    avformat_find_stream_info(formatContext, nullptr);

    // find video stream
    for (int i=0; i < formatContext->nb_streams; i++) {
        if (formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoStream = i;
            break;
        }
    }

    // open video codec
    if (videoStream != -1) {
        codecContext = avcodec_alloc_context3(nullptr);
        avcodec_parameters_to_context(codecContext, formatContext->streams[videoStream]->codecpar);
        codec = avcodec_find_decoder(codecContext->codec_id);
        avcodec_open2(codecContext, codec, nullptr);
        swsContext = sws_getContext(codecContext->width, codecContext->height, codecContext->pix_fmt,
                                                            codecContext->width, codecContext->height, AV_PIX_FMT_RGB32,
                                                            SWS_BICUBIC, nullptr, nullptr, nullptr);
    }
}

void FFmpegThread::close()
{
    isPlay = false;
    if (formatContext != nullptr) {
        avformat_close_input(&formatContext);
        avformat_free_context(formatContext);
        formatContext = nullptr;
    }
    if (codecContext != nullptr) {
        avcodec_close(codecContext);
        avcodec_free_context(&codecContext);
        codecContext = nullptr;
        sws_freeContext(swsContext);
        swsContext = nullptr;
    }
    videoStream = -1;
}

void FFmpegThread::run()
{
    int64_t startTime = av_gettime();
    isPlay = true;
    if (videoStream != -1) {
        packet = av_packet_alloc();
        frameRecieve = av_frame_alloc();
        while (isPlay && av_read_frame(formatContext, packet) >= 0) {
            if (packet->stream_index == videoStream) {
                avcodec_send_packet(codecContext, packet);
                if (avcodec_receive_frame(codecContext, frameRecieve) == 0) {
                    // convert YUV420P to RGB32
                    uint8_t *data[AV_NUM_DATA_POINTERS] = {nullptr};
                    data[0] = (uint8_t *)malloc(codecContext->width * codecContext->height * 4);
                    int linesize[AV_NUM_DATA_POINTERS] = {0};
                    linesize[0] = codecContext->width * 4;
                    sws_scale(swsContext, frameRecieve->data, frameRecieve->linesize, 0, codecContext->height, data, linesize);
                    QImage image(data[0], codecContext->width, codecContext->height, QImage::Format_RGB32);
                    emit getOneFrame(image);
                    AVRational timeBase = {1, AV_TIME_BASE};
                    int64_t ptsTime = av_rescale_q(frameRecieve->pts, formatContext->streams[videoStream]->time_base, timeBase);
                    int64_t nowTime = av_gettime() - startTime;
                    if (ptsTime > nowTime) {
                        av_usleep(ptsTime - nowTime);
                    }
                    free(data[0]);
                }
            }
            av_packet_unref(packet);
        }
        av_packet_free(&packet);
        av_frame_free(&frameRecieve);
    }
}