#include "rtspcodecthread.h"

RtspCodecThread::RtspCodecThread(QObject *parent) : QThread(parent)
{
    stopped = false;
    isPlay = false;
    isPause = false;
    isRtsp = false;
    setObjectName("RtspCodecThread");

    lastTime = QDateTime::currentDateTime();
    frameCount = 0;
    frameFinish = 0;
    videoWidth = 0;
    videoHeight = 0;
    oldWidth = 0;
    oldHeight = 0;
    videoStreamIndex = -1;
    audioStreamIndex = -1;
    videoFps = 0;

    interval = 1;
    sleepTime = 0;
    checkTime = 1000;
    checkConn = false;
    url = "rtsp://192.168.1.200:554/1";
    hardware = "none";

    saveFile = false;
    saveInterval = 0;
    savePath = qApp->applicationDirPath();
    fileName = QString("%1/%2.mp4").arg(savePath).arg(QDateTime::currentDateTime().toString("yyyyMMddHHmmss"));

    timerSave = new QTimer(this);
    timerSave->setInterval(60 * 1000);
    connect(timerSave, SIGNAL(timeout()), this, SLOT(save()));
    connect(this, SIGNAL(sig_startSave()), this, SLOT(startSave()));
    connect(this, SIGNAL(sig_stopSave()), this, SLOT(stopSave()));

    buffer = NULL;
    avPacket = NULL;
    avFrame = NULL;
    avFrame2 = NULL;
    avFrame3 = NULL;
    avFormatContext = NULL;
    videoCodec = NULL;
    audioCodec = NULL;
    swsContext = NULL;

    int profile = 2;
    int freqIdx = 4;
    int chanCfg = 2;
    dtsData = (char *)malloc(sizeof(char) * 7);
    dtsData[0] = (char)0xFF;
    dtsData[1] = (char)0xF1;
    dtsData[2] = (char)(((profile - 1) << 6) + (freqIdx << 2) + (chanCfg >> 2));
    dtsData[6] = (char)0xFC;

#ifndef gcc45
    filter = av_bsf_get_by_name("h264_mp4toannexb");
#endif

    RtspCodecThread::initlib();
}

void RtspCodecThread::initlib()
{
    static QMutex mutex;
    QMutexLocker locker(&mutex);
    static bool isInit = false;
    if(!isInit)
    {
        av_register_all();
        avformat_network_init();
        isInit = true;
    }
}

int RtspCodecThread::av_bsf_filter(const AVBitStreamFilter *filter, AVPacket *pPacket, const AVCodecParameters *src)
{
#ifndef gcc45
    int ret;
    AVBSFContext *ctx = NULL;
    if(!filter)
    {
        return 0;
    }

    ret = av_bsf_alloc(filter, &ctx);
    if(ret < 0)
    {
        return ret;
    }

    ret = avcodec_parameters_copy(ctx->par_in, src);
    if(ret < 0)
    {
        return ret;
    }

    ret = av_bsf_init(ctx);
    if(ret < 0)
    {
        return ret;
    }

    AVPacket pkt = {0};
    pkt.data = pPacket->data;
    pkt.size = pPacket->size;

    ret = av_bsf_send_packet(ctx, &pkt);
    if(ret < 0)
    {
        return ret;
    }

    ret = av_bsf_receive_packet(ctx, &pkt);
    if(pkt.data == pPacket->data)
    {
        uint8_t *poutbuf = (uint8_t *)av_malloc(pkt.size);
        if(!poutbuf)
        {
            av_packet_unref(&pkt);
            av_free(poutbuf);
            return -1;
        }

        memcpy(poutbuf, pkt.data, pkt.size);
        av_packet_unref(pPacket);
        pPacket->data = poutbuf;
        pPacket->size = pkt.size;
        av_packet_unref(&pkt);
        av_bsf_free(&ctx);
        av_free(poutbuf);
        return 1;
    }

    if(ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
    {
        return 0;
    } else if(ret < 0)
    {
        return ret;
    }

    uint8_t *poutbuf = (uint8_t *)av_malloc(pkt.size + AV_INPUT_BUFFER_PADDING_SIZE);
    if(!poutbuf)
    {
        av_packet_unref(&pkt);
        av_free(poutbuf);
        return AVERROR(ENOMEM);
    }

    int poutbuf_size = pkt.size;
    memcpy(poutbuf, pkt.data, pkt.size);
    pPacket->data = poutbuf;
    pPacket->size = poutbuf_size;
    av_packet_unref(&pkt);

    while(ret >= 0)
    {
        ret = av_bsf_receive_packet(ctx, &pkt);
        av_packet_unref(&pkt);
    }

    av_packet_unref(&pkt);
    av_bsf_free(&ctx);
    av_free(poutbuf);
#endif
    return 1;
}

int RtspCodecThread::decode_packet(AVCodecContext *avctx, AVPacket *packet)
{
#ifndef gcc45
    int ret = 0;
    ret = avcodec_send_packet(avctx, packet);
    if(ret < 0)
    {
        qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "Error during decoding";
        return ret;
    }

    while(ret >= 0)
    {
        ret = avcodec_receive_frame(avctx, avFrame);
        if(ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
            break;
        } else if(ret < 0) {
            qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "Error during decoding";
            break;
        }

        ret = av_hwframe_transfer_data(avFrame2, avFrame, 0);
        if(ret < 0) {
            qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "Error transferring the data to system memory";
            av_frame_unref(avFrame2);
            av_frame_unref(avFrame);
            return ret;
        }
    }
#endif
    return 1;
}

static AVPixelFormat get_qsv_format(AVCodecContext *avctx, const enum AVPixelFormat *pix_fmts)
{
#ifndef gcc45
    while(*pix_fmts != AV_PIX_FMT_NONE)
    {
        if(*pix_fmts == AV_PIX_FMT_QSV)
        {
            DecodeContext *decode = (DecodeContext *)avctx->opaque;
            avctx->hw_frames_ctx = av_hwframe_ctx_alloc(decode->hw_device_ref);
            if(!avctx->hw_frames_ctx)
            {
                return AV_PIX_FMT_NONE;
            }

            AVHWFramesContext *frames_ctx = (AVHWFramesContext *)avctx->hw_frames_ctx->data;
            AVQSVFramesContext *frames_hwctx = (AVQSVFramesContext *)frames_ctx->hwctx;

            frames_ctx->format = AV_PIX_FMT_QSV;
            frames_ctx->sw_format = avctx->sw_pix_fmt;
            frames_ctx->width = FFALIGN(avctx->coded_width, 32);
            frames_ctx->height = FFALIGN(avctx->coded_height, 32);
            frames_ctx->initial_pool_size = 32;
            frames_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;

            int ret = av_hwframe_ctx_init(avctx->hw_frames_ctx);
            if(ret < 0)
            {
                return AV_PIX_FMT_NONE;
            }

            return AV_PIX_FMT_QSV;
        }

        pix_fmts++;
    }
#endif

    return AV_PIX_FMT_NONE;
}

enum AVPixelFormat hw_pix_fmt;
static enum AVPixelFormat get_hw_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts)
{
    const enum AVPixelFormat *p;
    for (p = pix_fmts; *p != -1; p++)
    {
        if(*p == hw_pix_fmt)
        {
            return *p;
        }
    }
    return AV_PIX_FMT_NONE;
}

bool RtspCodecThread::init()
{
    if(!checkUrl())
    {
        return false;
    }

    decode = {NULL};
    AVDictionary *options = NULL;
    AVCodec *videoDecoder = NULL;
    AVCodec *audioDecoder = NULL;

    av_dict_set(&options, "buffer_size", "8192000", 0);     //设置缓存大小,1080p可将值调大
    av_dict_set(&options, "rtsp_transport", "udp", 0);      //以udp方式打开,如果以tcp方式打开将udp替换为tcp
    av_dict_set(&options, "stimeout", "3000000", 0);        //设置超时断开连接时间,单位微秒,3000000表示3秒
    av_dict_set(&options, "max_delay", "1000000", 0);       //设置最大时延,单位微秒,1000000表示1秒
    av_dict_set(&options, "threads", "auto", 0);            //自动开启线程数

    avFormatContext = avformat_alloc_context();             //打开视频流
    int result = avformat_open_input(&avFormatContext, url.toStdString().data(), NULL, &options);
    if(result < 0)
    {
        qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "open input error" << url;
        return false;
    }

    //释放设置参数
    if(options != NULL)
    {
        av_dict_free(&options);
    }

    //获取流信息
    result = avformat_find_stream_info(avFormatContext, NULL);
    if(result < 0)
    {
        qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "find stream info error";
        return false;
    }

    if(1)      //循环查找视频流索引,以下两种方法都可以
    {

        videoStreamIndex = -1;
        videoStreamIndex = av_find_best_stream(avFormatContext, AVMEDIA_TYPE_VIDEO, -1, -1, &videoDecoder, 0);
        if(videoStreamIndex < 0)
        {
            qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "find video stream index error";
            return false;
        }

        AVStream *videoStream = avFormatContext->streams[videoStreamIndex];         //获取视频流

        //如果选择了硬解码则根据硬解码的类型处理
        if(hardware == "none")
        {
            //获取视频流解码器,或者指定解码器
            videoCodec = videoStream->codec;
            videoDecoder = avcodec_find_decoder(videoCodec->codec_id);
            //videoDecoder = avcodec_find_decoder_by_name("h264_qsv");
            if(videoDecoder == NULL)
            {
                qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "video decoder not found";
                return false;
            }
        }else if(hardware == "qsv")
        {
#ifndef gcc45
            //创建硬解码设备
            result = av_hwdevice_ctx_create(&decode.hw_device_ref, AV_HWDEVICE_TYPE_QSV, "auto", NULL, 0);
            if(result < 0)
            {
                qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "open the hardware device error";
                return false;
            }

            //英特尔处理器是h264_qsv,英伟达处理器是h264_cuvid
            videoDecoder = avcodec_find_decoder_by_name("h264_qsv");
            if(videoDecoder == NULL)
            {
                qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "video decoder not found";
                return false;
            }

            videoCodec = avcodec_alloc_context3(videoDecoder);
            if(!videoCodec)
            {
                qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "avcodec_alloc_context3 error";
                return false;
            }

            videoCodec->codec_id = AV_CODEC_ID_H264;
            if(videoStream->codecpar->extradata_size)
            {
                videoCodec->extradata = (uint8_t *)av_mallocz(videoStream->codecpar->extradata_size + AV_INPUT_BUFFER_PADDING_SIZE);
                if(!videoCodec->extradata)
                {
                    return false;
                }

                memcpy(videoCodec->extradata, videoStream->codecpar->extradata, videoStream->codecpar->extradata_size);
                videoCodec->extradata_size = videoStream->codecpar->extradata_size;
            }

            videoCodec->refcounted_frames = 1;
            videoCodec->opaque = &decode;
            videoCodec->get_format = get_qsv_format;
#endif
        } else
        {
#ifndef gcc45
            enum AVHWDeviceType type = av_hwdevice_find_type_by_name(hardware.toStdString().data());
            qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "AVHWDeviceType" << type;
            //找到对应的硬解码格式
            switch (type)
            {
            case AV_HWDEVICE_TYPE_QSV:
                hw_pix_fmt = AV_PIX_FMT_QSV;
                break;
            case AV_HWDEVICE_TYPE_VAAPI:
                hw_pix_fmt = AV_PIX_FMT_VAAPI;
                break;
            case AV_HWDEVICE_TYPE_DXVA2:
                hw_pix_fmt = AV_PIX_FMT_DXVA2_VLD;
                break;
            case AV_HWDEVICE_TYPE_D3D11VA:
                hw_pix_fmt = AV_PIX_FMT_D3D11;
                break;
            case AV_HWDEVICE_TYPE_VDPAU:
                hw_pix_fmt = AV_PIX_FMT_VDPAU;
                break;
            case AV_HWDEVICE_TYPE_VIDEOTOOLBOX:
                hw_pix_fmt = AV_PIX_FMT_VIDEOTOOLBOX;
                break;
            case AV_HWDEVICE_TYPE_DRM:
                hw_pix_fmt = AV_PIX_FMT_DRM_PRIME;
                break;
            default:
                hw_pix_fmt = AV_PIX_FMT_NONE;
                break;
            }

            if(hw_pix_fmt == -1)
            {
                qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "cannot support hardware";
                return false;
            }

            videoCodec = avcodec_alloc_context3(videoDecoder);
            if(!videoCodec)
            {
                qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "avcodec_alloc_context3 error";
                return false;
            }

            result = avcodec_parameters_to_context(videoCodec, videoStream->codecpar);
            if(result < 0)
            {
                qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "avcodec_parameters_to_context error";
                return false;
            }

            videoCodec->get_format = get_hw_format;
            //av_opt_set_int(videoCodec, "refcounted_frames", 1, 0);

            //创建硬解码设备
            result = av_hwdevice_ctx_create(&decode.hw_device_ref, type, NULL, NULL, 0);
            if(result < 0)
            {
                qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "open the hardware device error";
                return false;
            }
            videoCodec->hw_device_ctx = av_buffer_ref(decode.hw_device_ref);
#endif
        }
        //设置加速解码
        videoCodec->lowres = videoDecoder->max_lowres;
#ifndef gcc45
        videoCodec->flags2 |= AV_CODEC_FLAG2_FAST;
#endif

        //打开视频解码器
        result = avcodec_open2(videoCodec, videoDecoder, NULL);
        if(result < 0)
        {
            qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "open video codec error";
            return false;
        }

        //获取分辨率大小
        videoWidth = videoStream->codec->width;
        videoHeight = videoStream->codec->height;
        //如果没有获取到宽高则返回
        if(videoWidth == 0 || videoHeight == 0)
        {
            qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "find width height error";
            return false;
        }
        //获取视频流的帧率 fps,要对0进行过滤,除数不能为0,有些时候获取到的是0
        int num = videoStream->avg_frame_rate.num;
        int den = videoStream->avg_frame_rate.den;
        if(num != 0 && den != 0)
        {
            videoFps = num / den;
        }
        /*QString videoInfo = QString("视频流信息 -> 索引: %1  解码: %2  格式: %3  时长: %4 秒  fps: %5  分辨率: %6*%7")
                .arg(videoStreamIndex).arg(videoDecoder->name).arg(avFormatContext->iformat->name)
                .arg((avFormatContext->duration) / 1000000).arg(videoFps).arg(videoWidth).arg(videoHeight);
        qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << videoInfo;*/
    }

    //----------视频流部分开始----------
    if(1)
    {
        //循环查找音频流索引
        audioStreamIndex = -1;
        for (uint i = 0; i < avFormatContext->nb_streams; i++)
        {
            if(avFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
            {
                audioStreamIndex = i;
                break;
            }
        }

        //有些没有音频流,所以这里不用返回
        if(audioStreamIndex == -1)
        {
            //qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "find audio stream index error";
        }else{

            AVStream *audioStream = avFormatContext->streams[audioStreamIndex];         //获取音频流
            audioCodec = audioStream->codec;

            audioDecoder = avcodec_find_decoder(audioCodec->codec_id);      //获取音频流解码器,或者指定解码器
            //audioDecoder = avcodec_find_decoder_by_name("aac");
            if(audioDecoder == NULL)
            {
                qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "audio codec not found";
                return false;
            }


            result = avcodec_open2(audioCodec, audioDecoder, NULL);     //打开音频解码器
            if(result < 0)
            {
                qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "open audio codec error";
                return false;
            }

            QString audioInfo = QString("音频流信息 -> 索引: %1  解码: %2  比特率: %3  声道数: %4  采样: %5")
                    .arg(audioStreamIndex).arg(audioDecoder->name).arg(avFormatContext->bit_rate)
                    .arg(audioCodec->channels).arg(audioCodec->sample_rate);
            qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << audioInfo;
        }
    }
    //----------音频流部分结束----------

    //预分配好内存
#ifndef gcc45
    avPacket = av_packet_alloc();
#else
    avPacket = new AVPacket;
#endif

    avFrame = av_frame_alloc();
    avFrame2 = av_frame_alloc();
    avFrame3 = av_frame_alloc();

    //比较上一次文件的宽度高度,当改变时,需要重新分配内存
    if(oldWidth != videoWidth || oldHeight != videoHeight)
    {
        int byte = avpicture_get_size(AV_PIX_FMT_RGB32, videoWidth, videoHeight);
        buffer = (uint8_t *)av_malloc(byte * sizeof(uint8_t));
        oldWidth = videoWidth;
        oldHeight = videoHeight;
    }

    //以下两种方法都可以
    //avpicture_fill((AVPicture *)avFrame3, buffer, AV_PIX_FMT_RGB32, videoWidth, videoHeight);
    av_image_fill_arrays(avFrame3->data, avFrame3->linesize, buffer, AV_PIX_FMT_RGB32, videoWidth, videoHeight, 1);

    if(hardware == "none")
    {
        swsContext = sws_getContext(videoWidth, videoHeight, AV_PIX_FMT_YUV420P, videoWidth, videoHeight, AV_PIX_FMT_RGB32, SWS_FAST_BILINEAR, NULL, NULL, NULL);
    } else
    {
        swsContext = sws_getContext(videoWidth, videoHeight, AV_PIX_FMT_NV12, videoWidth, videoHeight, AV_PIX_FMT_RGB32, SWS_FAST_BILINEAR, NULL, NULL, NULL);
    }

    return true;
}

void RtspCodecThread::run()
{
    QTime time;
    while(!stopped)
    {
        //根据标志位执行初始化操作
        if(isPlay)
        {
            if(init())
            {
                //启用保存文件,先关闭文件
                if(saveFile)
                {
                    if(fileVideo.isOpen())
                    {
                        fileVideo.close();
                    }

                    if(fileAudio.isOpen())
                    {
                        fileAudio.close();
                    }

                    //如果存储间隔大于0说明需要定时存储
                    if(saveInterval > 0)
                    {
                        fileName = QString("%1/%2.mp4").arg(savePath).arg(QDateTime::currentDateTime().toString("yyyyMMddHHmmss"));
                        emit sig_startSave();
                    }

                    if(videoStreamIndex >= 0)
                    {
                        fileVideo.setFileName(fileName);
                        fileVideo.open(QFile::WriteOnly);
                    }

                    if(audioStreamIndex >= 0)
                    {
                        fileAudio.setFileName(fileName.replace(QFileInfo(fileName).suffix(), "aac"));
                        fileAudio.open(QFile::WriteOnly);
                    }
                }

                emit receivePlayOk();
            } else
            {
                break;
                emit receivePlayError();
            }

            isPlay = false;
            continue;
        }

        if(isPause)
        {
            //这里需要假设正常,暂停期间继续更新时间
            lastTime = QDateTime::currentDateTime();
            msleep(1);
            continue;
        }

        time.restart();
        if(av_read_frame(avFormatContext, avPacket) >= 0)
        {
            //判断当前包是视频还是音频
            int packetSize = avPacket->size;
            int index = avPacket->stream_index;
            if(index == videoStreamIndex)
            {
                //解码视频流
                if(hardware == "none")
                {
                    avcodec_decode_video2(videoCodec, avFrame2, &frameFinish, avPacket);
                } else {
                    frameFinish = decode_packet(videoCodec, avPacket);
                }

                if(frameFinish)
                {
                    //计数,只有到了设定的帧率才刷新图片
                    frameCount++;
                    if(frameCount != interval)
                    {
                        av_packet_unref(avPacket);
                        msleep(1);
                        continue;
                    } else {
                        frameCount = 0;
                    }

                    //保存视频流数据到文件
                    QMutexLocker lock(&mutex);
                    if(fileVideo.isOpen())
                    {
                        //rtmp视频流需要添加pps sps
#ifndef gcc45
                        av_bsf_filter(filter, avPacket, avFormatContext->streams[videoStreamIndex]->codecpar);
#endif
                        fileVideo.write((const char *)avPacket->data, packetSize);
                    }

                    //将数据转成一张图片
                    sws_scale(swsContext, (const uint8_t *const *)avFrame2->data, avFrame2->linesize, 0, videoHeight, avFrame3->data, avFrame3->linesize);
                    //以下两种方法都可以
                    //QImage image(avFrame3->data[0], videoWidth, videoHeight, QImage::Format_RGB32);
                    QImage image((uchar *)buffer, videoWidth, videoHeight, QImage::Format_RGB32);

                    if(!image.isNull())
                    {
                        lastTime = QDateTime::currentDateTime();
                        emit receiveImage(image);

                        int useTime = time.elapsed();
                        if(!isRtsp && videoFps > 0)
                        {
                            //一帧解码用时+固定休眠1毫秒+其他用时1毫秒
                            int frameTime = useTime + 1 + 1;
                            //等待时间=1秒钟即1000毫秒-所有帧解码完成用的毫秒数/帧数
                            sleepTime = (1000 - (videoFps * frameTime)) / videoFps;
                            //有时候如果图片很大或者解码很难比如h265造成解码一张图片耗时很大可能出现负数
                            sleepTime = sleepTime < 0 ? 0 : sleepTime;
                        }

                        //qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << image.size() << "use time" << time.elapsed() << "sleep time" << sleepTime;
                    }

                    msleep(sleepTime);
                }
            } else if(index == audioStreamIndex)
            {
                //解码音频流,这里暂不处理,以后交给sdl播放

                //保存音频流数据到文件
                QMutexLocker lock(&mutex);
                if(fileAudio.isOpen())
                {
                    //先写入dts头,再写入音频流数据
                    dtsData[3] = (char)(((2 & 3) << 6) + ((7 + packetSize) >> 11));
                    dtsData[4] = (char)(((7 + packetSize) & 0x7FF) >> 3);
                    dtsData[5] = (char)((((7 + packetSize) & 7) << 5) + 0x1F);
                    fileAudio.write((const char *)dtsData, 7);
                    fileAudio.write((const char *)avPacket->data, packetSize);
                }
            }
        } else if(!isRtsp)
        {
            //如果不是视频流则说明是视频文件播放完毕
            break;
        }

        av_packet_unref(avPacket);
        msleep(1);
    }

    emit sig_stopSave();

    //线程结束后释放资源
    free();
    stopped = false;
    isPlay = false;
    isPause = false;

    emit receivePlayFinsh();
}

void RtspCodecThread::startSave()
{
    timerSave->start(saveInterval * 1000);
}

void RtspCodecThread::stopSave()
{
    //停止存储定时器
    if(timerSave->isActive())
    {
        timerSave->stop();
    }
}

void RtspCodecThread::save()
{
    QMutexLocker lock(&mutex);
    //只有启用了保存文件才保存,先关闭文件
    if(saveFile) {
        if(fileVideo.isOpen())
        {
            fileVideo.close();
        }

        if(fileAudio.isOpen())
        {
            fileAudio.close();
        }

        //重新设置文件名称
        fileName = QString("%1/%2.mp4").arg(savePath).arg(QDateTime::currentDateTime().toString("yyyyMMddHHmmss"));

        if(videoStreamIndex >= 0)
        {
            fileVideo.setFileName(fileName);
            fileVideo.open(QFile::WriteOnly);
        }

        if(audioStreamIndex >= 0)
        {
            fileAudio.setFileName(fileName.replace(QFileInfo(fileName).suffix(), "aac"));
            fileAudio.open(QFile::WriteOnly);
        }
    }
}

QDateTime RtspCodecThread::getLastTime()
{
    return this->lastTime;
}

QString RtspCodecThread::getUrl()
{
    return this->url;
}

int RtspCodecThread::getVideoWidth()
{
    return this->videoWidth;
}

int RtspCodecThread::getVideoHeight()
{
    return this->videoHeight;
}

void RtspCodecThread::setInterval(int interval)
{
    QMutexLocker lock(&mutex);
    if(interval > 0)
    {
        this->interval = interval;
        this->frameCount = 0;
    }
}

void RtspCodecThread::setSleepTime(int sleepTime)
{
    if(sleepTime > 0)
    {
        this->sleepTime = sleepTime;
    }
}

void RtspCodecThread::setCheckTime(int checkTime)
{
    this->checkTime = checkTime;
}

void RtspCodecThread::setCheckConn(bool checkConn)
{
    this->checkConn = checkConn;
}

void RtspCodecThread::setUrl(const QString &url)
{
    this->url = url;
    isRtsp = (url.startsWith("rtsp") || url.startsWith("rtmp") || url.startsWith("http"));
}

void RtspCodecThread::setHardware(const QString &hardware)
{
    this->hardware = hardware;
}

void RtspCodecThread::setSaveFile(bool saveFile)
{
    this->saveFile = saveFile;
}

void RtspCodecThread::setSaveInterval(int saveInterval)
{
    this->saveInterval = saveInterval;
    timerSave->setInterval(saveInterval * 1000);
}

void RtspCodecThread::setSavePath(const QString &savePath)
{
    this->savePath = savePath;
}

void RtspCodecThread::setFileName(const QString &fileName)
{
    this->fileName = fileName;
}

bool RtspCodecThread::checkUrl()
{
    if(checkConn && isRtsp)
    {
        QRegExp reg("((?:(?:25[0-5]|2[0-4]\\d|[01]?\\d?\\d)\\.){3}(?:25[0-5]|2[0-4]\\d|[01]?\\d?\\d))");
        reg.indexIn(url);
        QString ip = url.mid(url.indexOf(reg), reg.matchedLength());
        int port = 554;
        int index = url.indexOf(ip);

        //取出端口号
        if(index >= 0)
        {
            //判断该IP地址后面是不是:,是则说明有端口号
            index = index + ip.length();
            QString flag = url.mid(index, 1);
            if(flag == ":") {
                //取出端口号后面的斜杠位置
                bool end = false;
                int start = 1;
                while(!end)
                {
                    flag = url.mid(index + start, 1);
                    if(flag >= "0" && flag <= "9")
                    {
                        start++;
                    } else {
                        port = url.mid(index + 1, start - 1).toInt();
                        end = true;
                    }
                }
            }
        }

        QTcpSocket tcpClient;
        tcpClient.connectToHost(ip, port);

        //超时没有连接上则判断该摄像机不在线
        bool ok = tcpClient.waitForConnected(checkTime);
        tcpClient.abort();
        if(!ok)
        {
            qDebug() << QTime::currentTime().toString("HH:mm:ss zzz") << "rtsp connect error";
            return false;
        }
    }

    return true;
}

void RtspCodecThread::free()
{
    //关闭文件
    if(fileVideo.isOpen())
    {
        fileVideo.close();
    }

    if(fileAudio.isOpen())
    {
        fileAudio.close();
    }

    if(swsContext != NULL)
    {
        sws_freeContext(swsContext);
        swsContext = NULL;
    }

    if(avPacket != NULL)
    {
        av_packet_unref(avPacket);
        avPacket = NULL;
    }

    if(avFrame != NULL)
    {
        av_frame_free(&avFrame);
        avFrame = NULL;
    }

    if(avFrame2 != NULL)
    {
        av_frame_free(&avFrame2);
        avFrame2 = NULL;
    }

    if(avFrame3 != NULL)
    {
        av_frame_free(&avFrame3);
        avFrame3 = NULL;
    }

    if(videoCodec != NULL)
    {
        avcodec_close(videoCodec);
        videoCodec = NULL;
    }

    if(audioCodec != NULL)
    {
        avcodec_close(audioCodec);
        audioCodec = NULL;
    }

    if(avFormatContext != NULL)
    {
        avformat_close_input(&avFormatContext);
        avFormatContext = NULL;
    }

    av_buffer_unref(&decode.hw_device_ref);
}

void RtspCodecThread::play()
{
    //通过标志位让线程执行初始化
    isPlay = true;
    isPause = false;
}

void RtspCodecThread::pause()
{
    //只对非视频流起作用
    if(!isRtsp)
    {
        isPause = true;
    }
}

void RtspCodecThread::next()
{
    //只对非视频流起作用
    if(!isRtsp)
    {
        isPause = false;
    }
}

void RtspCodecThread::stop()
{
    //通过标志位让线程停止
    stopped = true;
}
