#include "hwffmpegfind.h"

static enum AVPixelFormat hw_pix_fmt;
static AVBufferRef *hw_device_context = NULL;

//static enum AVCodecID s_codec_id_h264 = AV_CODEC_ID_H264;
//static enum AVCodecID s_codec_id_hevc = AV_CODEC_ID_HEVC;
static const char* s_codec_name = "h264_omx_dec";

HWFFmpegFind::HWFFmpegFind(QObject *parent):
    QObject(parent),
    m_is_stop(false)
{
    m_video_stream_index = -1;
    avformat_network_init();
    m_avformat_context = avformat_alloc_context();
    m_av_frame_buffer = av_frame_alloc(); // 申请一个AVFrame帧内存
    m_av_packet = av_packet_alloc();
}

HWFFmpegFind::~HWFFmpegFind()
{
    // 释放申请的各种内存
    avformat_free_context(m_avformat_context);
    av_frame_free(&m_av_frame_buffer);
    sws_freeContext(m_sws_context);
    av_packet_unref(m_av_packet);
    avcodec_free_context(&m_avcodec_context);
}

void HWFFmpegFind::setUrl(QString url)
{
    this->m_url = url;
}

bool HWFFmpegFind::init()
{
    AVDictionary *format_opt = nullptr;
    av_dict_set(&format_opt, "timeout", "10000000", 0);
    av_dict_set(&format_opt, "rtsp_transport", "tcp", 0);   // 要使用tcp，否则会出现丢帧
    av_dict_set(&format_opt, "max_delay", "5000000", 0);

    qint32 result = 0;
    //    QTime start = QTime::currentTime();
//    qDebug() << start.toString("hh:mm:ss");
    // 打开视频流
    result = avformat_open_input(&m_avformat_context,
                                        m_url.toStdString().c_str(),
                                        NULL, &format_opt);
//    QTime end = QTime::currentTime();
//    qDebug() << end.toString("hh:mm:ss");
    if (result < 0) {
        qDebug() << "Failed to open video stream!";
        return false;
    }

    // 获取视频流信息
    result = avformat_find_stream_info(m_avformat_context, NULL);
    if (result < 0) {
        qDebug() << "Failed to get video stream information!";
        return false;
    }

    // 获取视频流索引
    m_video_stream_index = -1;
    for (uint i = 0; i < m_avformat_context->nb_streams; i++) {
        if (m_avformat_context->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            m_video_stream_index = i;
            break;
        }
    }

    if (m_video_stream_index == -1) {
        qDebug() << "Failed to get video stream index!";
        return false;
    }

    // test_h264
    //查找解码器
    const AVCodec *avcodec = avcodec_find_decoder_by_name(s_codec_name);
    if (avcodec == nullptr) {
        qDebug() << "not find codec";
    }

    // test hevc
//    const AVCodec *avcodec = avcodec_find_decoder(s_codec_id_hevc);
    // 获取视频流解码器
//    const AVCodec *avcodec = avcodec_find_decoder(avcodec_context_->codec_id);

    // 获取视频流的分辨率大小
    m_avcodec_context = avcodec_alloc_context3(avcodec);
    if (m_avcodec_context == nullptr) {
        qDebug() << "Could not allocate AVCodecContext";
        return false;
    }
    avcodec_parameters_to_context(m_avcodec_context, m_avformat_context->streams[m_video_stream_index]->codecpar);
    // 视频宽
    m_video_width = m_avcodec_context->width;
    // 视频高
    m_video_height = m_avcodec_context->height;

    m_av_frame_buffer = alloc_picture(AV_PIX_FMT_RGB24, m_video_width, m_video_height);

    // 打开解码器
    if ((result = avcodec_open2(m_avcodec_context, avcodec, NULL)) < 0) {
        qDebug() << stderr << "Failed to open codec for stream " << m_video_stream_index;
        return false;
    }

    // 像素格式转换YUV--->RGB
    m_sws_context = sws_getContext(m_video_width, m_video_height, AV_PIX_FMT_NV12,   // 转换前的长、宽、像素格式
                                  m_video_width, m_video_height, AV_PIX_FMT_RGB24,     // 转换后的长、宽、像素格式
                                  SWS_BICUBIC,                                      // 转换方法  libswscale/swscale.h
                                  NULL, NULL, NULL);                                // 其他参数默认为空

    qDebug() << "Success to init vedio stream!";
    return true;
}

void HWFFmpegFind::play()
{
    qint32 ret;
    AVFrame *av_frame = nullptr, *sw_frame = nullptr, *tmp_frame = nullptr;
    while (!m_is_stop) {
        if (av_read_frame(m_avformat_context, m_av_packet) >= 0) {
            if (m_av_packet->stream_index == m_video_stream_index) {
                ret = avcodec_send_packet(m_avcodec_context, m_av_packet);
                if (ret < 0) {
                    qDebug() << stderr << "Error during decoding";
                    break;
                }
                while (1) {
                    // apply memory for frame
                    if (!(av_frame = av_frame_alloc()) || !(sw_frame = av_frame_alloc())) {
                        qDebug() << stderr << "Can not alloc frame";
                        ret = AVERROR(ENOMEM);
                        break;
                    }
                    ret = avcodec_receive_frame(m_avcodec_context, av_frame);
                    if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
                        av_frame_free(&av_frame);
                        av_frame_free(&sw_frame);
                        break;
                    } else if (ret < 0) {
                        qDebug() << stderr << "Error while decoding";
                        break;
                    }

                    if (av_frame->format == hw_pix_fmt) {
                        if ((ret = av_hwframe_transfer_data(sw_frame, av_frame, 0)) < 0) {
                            qDebug() << stderr << "Error transferring the data to system memory";
                            break;
                        }
                        tmp_frame = sw_frame;
                    } else {
                        tmp_frame = av_frame;
                    }

                    m_mutex.lock();
                    sws_scale(m_sws_context,
                              (const uint8_t* const *)tmp_frame->data,
                              tmp_frame->linesize,
                              0,
                              m_video_height,
                              m_av_frame_buffer->data,
                              m_av_frame_buffer->linesize);
                    QImage image(m_av_frame_buffer->data[0],
                            m_video_width, m_video_height,
                            QImage::Format_RGB888);

                    emit drawImage(image);
                    m_mutex.unlock();
                    av_frame_free(&av_frame);
                    av_frame_free(&sw_frame);
                }
            }
            av_packet_unref(m_av_packet);
        }
    }
}

void HWFFmpegFind::stopReadFrame()
{
    this->m_is_stop = true;
}

AVFrame *HWFFmpegFind::alloc_picture(AVPixelFormat pix_fmt, qint32 width, qint32 height)
{
    AVFrame *picture;
    qint32 ret;
    picture = av_frame_alloc();
    if (!picture) {
        return nullptr;
    }

    picture->format = pix_fmt;
    picture->width = width;
    picture->height = height;

    /* allocate the buffers for the frame data */
    ret = av_frame_get_buffer(picture, 0);
    if (ret < 0) {
        qDebug() << stderr << "Could not allocate frame data.";
        exit(1);
    }

    return picture;
}
