#include "testthread.h"
#include <QDebug>
#include <QDateTime>
#include <QImage>
#include <QPixmap>

extern "C"
{
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavdevice/avdevice.h>
}




TestThread::TestThread():formatContext(nullptr), codecContext(nullptr), videoStreamIndex(-1)
{

}

TestThread::~TestThread()
{
    if (formatContext) {
        avformat_close_input(&formatContext);
    }
    if (codecContext) {
        avcodec_free_context(&codecContext);
    }
    avformat_network_deinit();
}


void TestThread::exitThread()
{
    this->requestInterruption();
}

void TestThread::run()
{
    // 初始化网络库（可选，但某些输入格式可能需要）
    avformat_network_init();
    qDebug() << "111111111111";

    // 注册设备
    avdevice_register_all();

    // 设置输入格式上下文
    formatContext = avformat_alloc_context();

    // 打开摄像头设备
    const AVInputFormat *inputFormat = av_find_input_format("dshow"); // 对于Windows使用dshow
    if (avformat_open_input(&formatContext, "video=USB2.0 HD UVC WebCam", inputFormat, NULL) != 0) {
        qDebug() << "无法打开摄像头";
        return;
    }

    // 获取流信息
    if (avformat_find_stream_info(formatContext, NULL) < 0) {
        qDebug() << "无法获取流信息";
        return;
    }

    // 查找视频流索引
    for (unsigned int i = 0; i < formatContext->nb_streams; i++) {
        if (formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoStreamIndex = i;
            break;
        }
    }
    if (videoStreamIndex == -1) {
        qDebug() << "找不到视频流";
        return;
    }

    // 获取解码器上下文
    AVCodecParameters *codecParameters = formatContext->streams[videoStreamIndex]->codecpar;
    const AVCodec *codec = avcodec_find_decoder(codecParameters->codec_id);
    codecContext = avcodec_alloc_context3(codec);
    avcodec_parameters_to_context(codecContext, codecParameters);
    if (avcodec_open2(codecContext, codec, NULL) < 0) {
        qDebug() << "无法打开解码器";
        return;
    }

    // 准备接收帧
    AVFrame *frame = av_frame_alloc();
    AVPacket packet;


    while (!this->isInterruptionRequested()) {

        // to do something
        //qDebug()<<QDateTime::currentDateTime();
        // 读取数据包
        if (av_read_frame(formatContext, &packet) >= 0) {
            // 如果是视频流的数据包
            if (packet.stream_index == videoStreamIndex) {
                // 解码视频帧
                int response = avcodec_send_packet(codecContext, &packet);
                if (response < 0) {
                    qDebug() << "发送数据包失败";
                    continue;
                }
                while (response >= 0) {
                    response = avcodec_receive_frame(codecContext, frame);
                    if (response == AVERROR(EAGAIN) || response == AVERROR_EOF) {
                        break;
                    } else if (response < 0) {
                        qDebug() << "接收帧失败";
                        break;
                    }

                    // 在此处处理帧，例如显示或保存
                    qDebug() << "接收到一帧：" << frame->width << "x" << frame->height;
                    // 假设你需要从YUV转换到RGB
                    struct SwsContext* img_convert_ctx = sws_getContext(codecContext->width, codecContext->height,
                        codecContext->pix_fmt,
                        codecContext->width, codecContext->height,
                        AV_PIX_FMT_RGB24, SWS_BILINEAR,
                        NULL, NULL, NULL);

                    QImage img(codecContext->width, codecContext->height, QImage::Format_RGB888);
                    uint8_t* outData[1] = { (uint8_t*)img.bits() };
                    int outLinesize[1] = { 3 * codecContext->width };
                    sws_scale(img_convert_ctx, frame->data, frame->linesize, 0, codecContext->height, outData, outLinesize);
                    sws_freeContext(img_convert_ctx);

                    QPixmap pixmap = QPixmap::fromImage(img);

                    emit frameReady(pixmap);
                }
            }
            av_packet_unref(&packet);
        }

        msleep(40); // 简单的延迟以控制帧率
    }

    // 清理资源
    av_frame_free(&frame);
}
