#include "videopullstream.h"

VideoPullStream::VideoPullStream(QString _url, QObject *parent)
    :QThread(parent),
    url(_url)
{ }

void VideoPullStream::stop_pull()
{
    stop = false;
    quit(); // 请求事件循环退出
    wait(); // 等待线程结束
}

void VideoPullStream::cleanup()
{
    if (c) avcodec_free_context(&c);
    if (pFormatCtx) avformat_close_input(&pFormatCtx);
}

int VideoPullStream::_PullFlow()
{
    qDebug() << "拉流程序内部" << url;

    AVDictionary* options = nullptr;
    // 设置读取或写入超时为30秒（单位为微秒）
    av_dict_set(&options, "rw_timeout", "5000000", 0);

    // 打开输入文件
    if (avformat_open_input(&pFormatCtx, url.toStdString().c_str(), NULL, &options) != 0) {
        qDebug() << "输入流打开失败";
        return - 1;
    }
    await = true;
    av_dict_free(&options);
    // 获取流信息
    if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
        avformat_close_input(&pFormatCtx);
        qDebug() << "输出流获取失败";
        return -1;
    }
    // 查找视频流
    videoStream = -1;
    for (unsigned int i = 0; i < pFormatCtx->nb_streams; i++){
        if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoStream = i;
            break;
        }
    }
    if (videoStream == -1) {
        avformat_close_input(&pFormatCtx);
        std::cerr << "No video stream found." << std::endl;
        return -1;
    }

    // 解码视频流
    AVCodecParameters* codecpar = pFormatCtx->streams[videoStream]->codecpar;
    const AVCodec* codec = avcodec_find_decoder(codecpar->codec_id);
    c = avcodec_alloc_context3(codec);
    if (avcodec_parameters_to_context(c, codecpar) < 0) {
        std::cerr << "Failed to get codec context parameters." << std::endl;
        return -1;
    }

    if (avcodec_open2(c, codec, NULL) < 0) {
        std::cerr << "Could not open codec." << std::endl;
        return -1;
    }

    // 分配一个AVPacket用于存储解码后的数据
    AVPacket packet;
    struct SwsContext* sws_ctx = NULL;
    AVFrame* frame = av_frame_alloc();
    AVFrame* rgbFrame = av_frame_alloc();

    // 创建SWS上下文
    sws_ctx = sws_getContext(c->width, c->height, c->pix_fmt,
                             c->width, c->height, AV_PIX_FMT_RGB24,
                             SWS_BILINEAR, NULL, NULL, NULL);

    // 为RGB帧分配内存
    int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB24, c->width, c->height, 1);
    buffer = (uint8_t*)av_malloc(numBytes);
    av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize, buffer, AV_PIX_FMT_RGB24,
                         c->width, c->height, 1);

    while (stop && av_read_frame(pFormatCtx, &packet) >= 0){
        if (packet.stream_index == videoStream) {
            avcodec_send_packet(c, &packet);
            if (avcodec_receive_frame(c, frame) == 0) {
                // qDebug() << "hhh";
                // 将YUV帧转换为RGB帧
                sws_scale(sws_ctx, (uint8_t const* const*)frame->data,
                          frame->linesize, 0, c->height,
                          rgbFrame->data, rgbFrame->linesize);
                // qDebug() << "拉流111";

                QImage img((uchar*)rgbFrame->data[0], c->width, c->height, QImage::Format_RGB888);
                // 在Qt界面中显示图像
                emit frameReady(img); // 发射信号，传递图像
            }
        }
        av_packet_unref(&packet);
    }

    // if (buffer) av_free(buffer);
    av_frame_free(&frame);
    av_frame_free(&rgbFrame);
    sws_freeContext(sws_ctx);
    av_packet_unref(&packet);

    return 0;
}
