#include "XReceiveVideo.h"

#include "define.h"



class CReceiveVideo : public XReceiveVideo {

public:

    /**
     * 打开流媒体
     * @param url
     * @return
     */
    bool Init(const char *url) override {

        murl = url;

        return true;
    }

    void Stop() override {
       isExit = true;

    }


protected:
    VideoCapture cam; // 本地摄像头

    /**
     * 这个是QThread的线程执行体
     */
    void run() override {
        AVCodec         *pCodec;
        AVCodecContext  *pCodecCtx= NULL;
        AVFrame         *pFrame,*pFrameRGB;
        AVFormatContext *pFormatCtx;

        isExit = false;

        av_register_all(); //初始化FFMPEG

        avformat_network_init();

        pFormatCtx = avformat_alloc_context();

    //    AVDictionary* options = NULL;
    //    av_dict_set(&options, "buffer_size", "102400", 0); //设置缓存大小，1080p可将值调大
    //    av_dict_set(&options, "rtsp_transport", "tcp", 0); //以udp方式打开，如果以tcp方式打开将udp替换为tcp
    //    av_dict_set(&options, "stimeout", "2000000", 0); //设置超时断开连接时间，单位微秒
    //    av_dict_set(&options, "max_delay", "500000", 0); //设置最大时延



        //=================================打开网络流或文件流 ============================//
        if (avformat_open_input(&pFormatCtx, murl.c_str(), NULL, NULL) != 0)
        {
            qDebug()<<"Couldn't open input stream";
            return;
        }
    qDebug()<<"here";
        //===================================查找码流信息================================//
        if (avformat_find_stream_info(pFormatCtx, NULL)<0)
        {
            qDebug()<<"Couldn't find stream information";
            return;
        }
        //=============================查找码流中是否有视频流=============================//
        int videoindex = -1;
        for (int i = 0; i<pFormatCtx->nb_streams; i++)
        {
            if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
            {
                videoindex = i;
                break;
            }
        }

        if (videoindex == -1)
        {
            printf("Didn't find a video stream.\n");
            return;
        }

        //=================================  查找解码器 ===================================//
        pCodecCtx = pFormatCtx->streams[videoindex]->codec;

        pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
        if (pCodec == NULL)
        {
            printf("Codec not found.\n");
            return ;
        }

        //================================  打开解码器 ===================================//
        if (avcodec_open2(pCodecCtx, pCodec, NULL)<0)
        {
            printf("Could not open codec.\n");
            return ;
        }

        //==================================== 分配空间 ==================================//
        pFrame    = av_frame_alloc();
        pFrameRGB = av_frame_alloc();
        //一帧图像数据大小
        int numBytes = avpicture_get_size(AV_PIX_FMT_RGB32, pCodecCtx->width,pCodecCtx->height);

        unsigned char *out_buffer;
        out_buffer = (unsigned char *) av_malloc(numBytes * sizeof(unsigned char));


        //会将pFrameRGB的数据按RGB格式自动"关联"到buffer  即pFrameRGB中的数据改变了 out_buffer中的数据也会相应的改变
        avpicture_fill((AVPicture *)pFrameRGB, out_buffer, AV_PIX_FMT_RGB32, pCodecCtx->width, pCodecCtx->height);

        //Output Info---输出一些文件（RTSP）信息
        printf("---------------- File Information ---------------\n");
        av_dump_format(pFormatCtx, 0, murl.c_str(), 0);
        printf("-------------------------------------------------\n");

        //================================ 设置数据转换参数 ================================//
        struct SwsContext *img_convert_ctx;
        img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
            pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGB32, SWS_BICUBIC, NULL, NULL, NULL);


        //=========================== 分配AVPacket结构体 ===============================//
        int y_size = pCodecCtx->width * pCodecCtx->height;
        AVPacket *packet = (AVPacket *) malloc(sizeof(AVPacket)); //分配一个packet
        av_new_packet(packet, y_size); //分配packet的数据

        while(!isExit)
        {
            //===========================  读取视频信息 ===============================//
            if (av_read_frame(pFormatCtx, packet) < 0) //读取的是一帧视频  数据存入一个AVPacket的结构中
            {
                qDebug()  << "read error." ;
                return ;
            }
            //此时数据存储在packet中

            //=========================== 对视频数据进行解码 ===============================//


            int got_picture;
            if (packet->stream_index == videoindex)
            {
                //视频解码函数  解码之后的数据存储在 pFrame中
                int ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
                if (ret < 0)
                {
                     qDebug()  << "decode error." ;
                    return ;
                }

                //=========================== YUV=>RGB ===============================//
                if (got_picture)
                {
                    //转换一帧图像
                    sws_scale(img_convert_ctx,pFrame->data, pFrame->linesize, 0, pCodecCtx->height,  //源
                              pFrameRGB->data, pFrameRGB->linesize);                                 //目的

                    QImage tmpImg((uchar *)out_buffer,pCodecCtx->width,pCodecCtx->height,QImage::Format_RGB32);
                    QImage image = tmpImg.copy(); //把图像复制一份 传递给界面显示
                    mEvent->OnReceives(image);

                }
            }
        }


        avcodec_close(pCodecCtx);
        av_free(pCodecCtx);
        av_frame_free(&pFrame);
    }


};


XReceiveVideo *XReceiveVideo::Get() {
    static CReceiveVideo xc[255];
    return &xc[0];
}

XReceiveVideo::XReceiveVideo() {

}

XReceiveVideo::~XReceiveVideo() {

}



