﻿#if defined(_MSC_VER) && (_MSC_VER >= 1600)
# pragma execution_character_set("utf-8")
#endif

#include "ffmpegthread.h"

FFmpegThread::FFmpegThread(QObject *parent) : QThread(parent)
{
    isRun = false;
    isFirstInit = false;
    isPause = false;
    isSpeed = false;
    isSendPosition = true;
    enableFilter = false;

    isPlayAudio = true;
    audioDeviceOk = false;

    enableSyncList = false;

    frameFinish = 0;
    videoWidth = 0;
    videoHeight = 0;
    videoStreamIndex = -1;
    audioStreamIndex = -1;

    videoData = NULL;
    audioData = NULL;
    avPacket = NULL;
    videoFrame = NULL;
    videoFrameDst = NULL;
    audioFrame = NULL;
    avFormatContext = NULL;
    videoCodecCtx = NULL;
    audioCodecCtx = NULL;
    videoSwsCtx = NULL;
    audioSwrCtx = NULL;
    options = NULL;
    videoDecoder = NULL;
    audioDecoder = NULL;
    audioOutput = NULL;
    audioDevice = NULL;
    formatOut = NULL;

    videoFilterFrame = NULL;
    filterInputs = NULL;
    filterOutputs = NULL;
    filter_graph = NULL;
    buffersrc_ctx = NULL;
    buffersink_ctx = NULL;

    videoSync = new FFmpegSyncList(this);
    audioSync = new FFmpegSyncList(this);
}

FFmpegThread::~FFmpegThread()
{
    freeResources();
}

bool FFmpegThread::processInput()
{
    //初始化参数
    setOption();

    //初始化输入
    if (!setInput())
    {
        return false;
    }

    //初始化视频
    if (!setVideo())
    {
        return false;
    }

    //初始化过滤器
    if (!setFilter())
    {
        return false;
    }

    //初始化音频
    if (!setAudio())
    {
        return false;
    }

    //初始化其他
    if (!setOther())
    {
        return false;
    }

    //输出视频信息
    //av_dump_format(avFormatContext, 0, url.toStdString().data(), 0);

    return true;
}

void FFmpegThread::setOption()
{
    //设置缓存大小,1080p可将值调大
    av_dict_set(&options, "buffer_size", "8192000", 0);

    //以tcp方式打开,如果以udp方式打开将tcp替换为udp
    av_dict_set(&options, "rtsp_transport", "tcp", 0);

    //设置超时断开连接时间,单位微秒,3000000表示3秒
    av_dict_set(&options, "stimeout", "3000000", 0);

    //设置最大时延,单位微秒,1000000表示1秒
    av_dict_set(&options, "max_delay", "1000000", 0);

    //自动开启线程数
    av_dict_set(&options, "threads", "auto", 0);
}

bool FFmpegThread::setInput()
{
    avFormatContext = avformat_alloc_context();

    AVInputFormat *iFmt = NULL;
    if(isUsbCamera)
    {
        iFmt = av_find_input_format("dshow");
    }

    int result = avformat_open_input(&avFormatContext, playUrl.toStdString().data(), iFmt, &options);
    if (result < 0)
    {
        qDebug() << TIMEMS << "open input error" << playUrl;
        return false;
    }

    //获取流信息
    result = avformat_find_stream_info(avFormatContext, NULL);
    if (result < 0)
    {
        qDebug() << TIMEMS << "find stream info error";
        return false;
    }

    return true;
}

bool FFmpegThread::setVideo()
{
    //获取固定长度失败，如直播流无时长信息，则不发送当前播放位置
    duration = avFormatContext->duration / AV_TIME_BASE;
    if(duration < 0)
    {
        isSendPosition = false;
    }

    //获取视频流index
    videoStreamIndex = av_find_best_stream(avFormatContext, AVMEDIA_TYPE_VIDEO, -1, -1, &videoDecoder, 0);
    if (videoStreamIndex < 0)
    {
        qDebug() << TIMEMS << "find video stream index error";
        return false;
    }

    //获取视频流
    AVStream *videoStream = avFormatContext->streams[videoStreamIndex];
    videoFirstPts = videoStream->start_time;
    frameRate = av_q2d(videoStream->r_frame_rate);

    //防止获取的帧率异常
    if(frameRate < 15 && frameRate > 35)
    {
        frameRate = 25;
    }

    //获取视频流解码器,或者指定解码器
    videoCodecCtx = videoStream->codec;
    videoDecoder = avcodec_find_decoder(videoCodecCtx->codec_id);
    //videoDecoder = avcodec_find_decoder_by_name("h264_qsv");
    if (videoDecoder == NULL)
    {
        qDebug() << TIMEMS << "video decoder not found";
        return false;
    }

    //打开视频解码器 设置加速解码
    videoCodecCtx->lowres = videoDecoder->max_lowres;
    videoCodecCtx->flags2 |= AV_CODEC_FLAG2_FAST;
    int result = avcodec_open2(videoCodecCtx, videoDecoder, NULL);
    if (result < 0)
    {
        qDebug() << TIMEMS << "open video codec error";
        return false;
    }

    //获取分辨率大小
    videoWidth = videoStream->codec->width;
    videoHeight = videoStream->codec->height;
    if (videoWidth == 0 || videoHeight == 0)
    {
        qDebug() << TIMEMS << "find width height error";
        return false;
    }

    //分配对象内存 退出后释放
    videoFrame = av_frame_alloc();
    videoFrameDst = av_frame_alloc();
    videoFilterFrame = av_frame_alloc();

    int byte = avpicture_get_size(AV_PIX_FMT_RGB32, videoWidth, videoHeight);
    videoData = (uint8_t *)av_malloc(byte * sizeof(uint8_t));
    if (!videoData)
    {
        av_free(videoData);
        qDebug() << "videoData malloc fail";
        return false;
    }

    //定义像素格式
    AVPixelFormat srcFormat = videoCodecCtx->pix_fmt;  //AV_PIX_FMT_YUV420P
    AVPixelFormat dstFormat = AV_PIX_FMT_RGB32;

    //默认最快速度的解码采用的SWS_FAST_BILINEAR参数,可能会丢失部分图片数据,可以自行更改成其他参数
    int flags = SWS_BILINEAR;

    //开辟缓存存储一帧数据到videoData,设置转换  videoFrameDst->data指向videoData
    av_image_fill_arrays(videoFrameDst->data, videoFrameDst->linesize, videoData, dstFormat, videoWidth, videoHeight, 1);
    videoSwsCtx = sws_getContext(videoWidth, videoHeight, srcFormat, videoWidth, videoHeight, dstFormat, flags, NULL, NULL, NULL);

    QString videoInfo = QString("视频流信息 -> 索引: %1  解码: %2  时长: %3 秒  分辨率: %4*%5")
            .arg(videoStreamIndex)
            .arg(videoDecoder->name)
            .arg(duration)
            .arg(videoWidth).arg(videoHeight);

    qDebug() << TIMEMS << videoInfo;
    return true;
}

bool FFmpegThread::setAudio()
{
    //循环查找音频流索引
    audioStreamIndex = -1;
    for(uint i = 0; i < avFormatContext->nb_streams; i++)
    {
        if (avFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
        {
            audioStreamIndex = i;
            break;
        }
    }

    //有些没有音频流,所以这里不视为失败
    if(audioStreamIndex == -1)
    {
        qDebug() << TIMEMS << "find audio stream index error";
        audioDeviceOk = false;
    }
    else
    {
        //获取音频流
        AVStream *audioStream = avFormatContext->streams[audioStreamIndex];
        audioCodecCtx = audioStream->codec;
        audioFirstPts = audioStream->start_time;

        //获取音频流解码器,或者指定解码器
        audioDecoder = avcodec_find_decoder(audioCodecCtx->codec_id);
        //audioDecoder = avcodec_find_decoder_by_name("aac");
        if (audioDecoder == NULL)
        {
            qDebug() << TIMEMS << "audio codec not found";
            return false;
        }

        //打开音频解码器
        int result = avcodec_open2(audioCodecCtx, audioDecoder, NULL);
        if (result < 0)
        {
            qDebug() << TIMEMS << "open audio codec error";
            return false;
        }

        //初始化音频设备（qt自带接口）
        int sampleRate = audioCodecCtx->sample_rate;
        int sampleSize = av_get_bytes_per_sample(audioCodecCtx->sample_fmt)/2;
        int channelCount = audioCodecCtx->channels;
        audioDeviceOk = setAudioDevice(sampleRate, sampleSize, channelCount);

        //设置音频转换
        if(audioDeviceOk)
        {
            //内存分配
            audioFrame = av_frame_alloc();
            audioSwrCtx = swr_alloc();

            int64_t channelOut = AV_CH_LAYOUT_STEREO;
            int64_t channelIn = av_get_default_channel_layout(audioCodecCtx->channels);
            audioSwrCtx = swr_alloc_set_opts(audioSwrCtx, channelOut, AV_SAMPLE_FMT_S16, sampleRate, channelIn, audioCodecCtx->sample_fmt, sampleRate, 0, 0);
            audioDeviceOk = (swr_init(audioSwrCtx) >= 0);

            //分配音频数据内存 192000 ffplay代码值 可调节
            if (audioDeviceOk)
            {
                quint64 byte = 19200;
                audioData = (uint8_t *)av_malloc(byte * sizeof(uint8_t));
                if (!audioData)
                {
                    audioDeviceOk = false;
                    qDebug() << "audioData malloc fail";
                    av_free(audioData);
                    return false;
                }
            }
        }

        QString audioInfo = QString("音频流信息 -> 索引: %1  解码: %2  比特率: %3  声道数: %4  采样: %5")
                .arg(audioStreamIndex)
                .arg(audioDecoder->name)
                .arg(avFormatContext->bit_rate)
                .arg(audioCodecCtx->channels)
                .arg(audioCodecCtx->sample_rate);
        qDebug() << TIMEMS << audioInfo;
    }

    return true;
}

bool FFmpegThread::setAudioDevice(int sampleRate, int sampleSize, int channelCount)
{
    QAudioFormat format;
    format.setCodec("audio/pcm");
    format.setSampleRate(sampleRate);
    format.setSampleSize(sampleSize * 8);
    format.setChannelCount(channelCount);
    format.setSampleType(QAudioFormat::SignedInt);
    format.setByteOrder(QAudioFormat::LittleEndian);

    QAudioDeviceInfo info(QAudioDeviceInfo::defaultOutputDevice());
    bool res = info.isFormatSupported(format);
    if(res)
    {
        audioOutput = new QAudioOutput(QAudioDeviceInfo::defaultOutputDevice(), format);

        //设置下缓存不然部分文件播放音频一卡卡  太大可能会导致崩溃分配内存失败
        audioOutput->setBufferSize(40960);
        audioOutput->setVolume(1.0);
        audioDevice = audioOutput->start();
    }
    else
    {
        qDebug() << TIMEMS << "Raw audio format not supported, cannot play audio.";
    }

    return res;
}

bool FFmpegThread::setOther()
{  
    //avPacket内存分配
    avPacket = av_packet_alloc();

    //其它初始化设置
    // ...

    return true;
}

void FFmpegThread::decodeVideoPacket(AVPacket *packet)
{
    frameFinish = avcodec_send_packet(videoCodecCtx, packet);
    if (frameFinish < 0)
    {
        return;
    }

    frameFinish = avcodec_receive_frame(videoCodecCtx, videoFrame);
    if(frameFinish >= 0)
    {
        //根据功能选择解码帧还是加水印后的帧
        AVFrame *readyFrame = videoFrame;
        if(enableFilter)
        {
            //解码后的frame添加至filtergraph
            if (av_buffersrc_add_frame(buffersrc_ctx, videoFrame) < 0)
            {
                qDebug() << "Error while feeding the filtergraph";
            }

            //filtergraph中取出处理好的frame
            if(av_buffersink_get_frame(buffersink_ctx, videoFilterFrame) < 0)
            {
                qDebug() << "av_buffersink_get_frame error";
            }
            else
            {
                readyFrame = videoFilterFrame;
            }
        }

        //将解码后的frame数据转换
        sws_scale(videoSwsCtx, (const uint8_t *const *)readyFrame->data, readyFrame->linesize, 0, videoHeight, videoFrameDst->data, videoFrameDst->linesize);

        //构造QImage videoFrameDst->data[0]指向的就是videoData，二者等同
        QImage image((uchar *)videoData, videoWidth, videoHeight, QImage::Format_RGB32);
        if (!image.isNull())
        {
            emit sendImage(image);
        }

        //获取当前帧的播放时间  avPacket与videoFrame pts相同为同一时刻，避免水印影响统一使用packet的pts
        if(isSendPosition)
        {
            int curtime = packet->pts * av_q2d(avFormatContext->streams[videoStreamIndex]->time_base);
            if(curtime != frameTime)
            {
                frameTime = curtime;
                emit sendPlayPosition(frameTime);
            }
        }

        //释放资源引用
        av_frame_unref(videoFrame);
        if(enableFilter)
        {
            av_frame_unref(videoFilterFrame);
        }
    }
}

void FFmpegThread::decodeAudioPacket(AVPacket *packet)
{   
    //是否启用音频
    if(!isPlayAudio)
    {
        return;
    }

    //初始化设置未成功
    if(!audioDeviceOk)
    {
        return;
    }

    frameFinish = avcodec_send_packet(audioCodecCtx, packet);
    if(frameFinish < 0)
    {
        return;
    }

    frameFinish = avcodec_receive_frame(audioCodecCtx, audioFrame);
    if(frameFinish >= 0)
    {
        int outChannel = av_get_channel_layout_nb_channels(AV_CH_LAYOUT_STEREO);
        int outSize = av_samples_get_buffer_size(NULL, outChannel, audioFrame->nb_samples, AV_SAMPLE_FMT_S16, 1);

        int result = swr_convert(audioSwrCtx, &audioData, outSize, (const uint8_t **)audioFrame->data, audioFrame->nb_samples);
        if(result >= 0)
        {
            audioDevice->write((char *)audioData, outSize);
        }

        av_frame_unref(audioFrame);
    }
}

void FFmpegThread::freeResources()
{
    //需要先释放队列同步线程的资源
    freeSync();

    if (videoSwsCtx != NULL)
    {
        sws_freeContext(videoSwsCtx);
        videoSwsCtx = NULL;
    }

    if (audioSwrCtx != NULL)
    {
        swr_free(&audioSwrCtx);
        audioSwrCtx = NULL;
    }

    if (avPacket != NULL)
    {
        av_packet_free(&avPacket);
        avPacket = NULL;
    }

    if (videoFrame != NULL)
    {
        av_frame_free(&videoFrame);
        videoFrame = NULL;
    }

    if (videoFrameDst != NULL)
    {
        av_frame_free(&videoFrameDst);
        videoFrameDst = NULL;
    }

    if (videoFilterFrame != NULL)
    {
        av_frame_free(&videoFilterFrame);
        videoFilterFrame = NULL;
    }

    if (audioFrame != NULL)
    {
        av_frame_free(&audioFrame);
        audioFrame = NULL;
    }

    if (videoCodecCtx != NULL)
    {
        avcodec_close(videoCodecCtx);
        videoCodecCtx = NULL;
    }

    if (audioCodecCtx != NULL)
    {
        avcodec_close(audioCodecCtx);
        audioCodecCtx = NULL;
    }

    if (avFormatContext != NULL)
    {
        avformat_close_input(&avFormatContext);
        avFormatContext = NULL;
    }

    if(filterInputs != NULL)
    {
        avfilter_inout_free(&filterInputs);
        filterInputs = NULL;
    }

    if(filterOutputs != NULL)
    {
        avfilter_inout_free(&filterOutputs);
        filterOutputs = NULL;
    }

    if(filter_graph != NULL)
    {
        avfilter_graph_free(&filter_graph);
        filter_graph = NULL;
    }

    av_dict_free(&options);

    //释放音频资源  请教刘大佬说这是公共音频类，没理解透彻，难道释放会影响其它正在播放的文件？未测试
    //目前释放暂无问题，特此记录
    freeAudio();
}

void FFmpegThread::freeAudio()
{
    //因存在缓存Buffer,析构存在较小耗时，基本无影响
    if(audioOutput != NULL)
     {
         delete audioOutput;
         audioOutput = NULL;
     }

    if(audioDevice != NULL)
    {
        delete audioDevice;
        audioDevice = NULL;
    }
}

void FFmpegThread::freeSync()
{
    //停止解码同步线程
    if (audioSync->isRunning())
    {
        audioSync->stopRun();
        audioSync->quit();
        audioSync->wait(150);
    }

    if (videoSync->isRunning())
    {
        videoSync->stopRun();
        videoSync->quit();
        videoSync->wait(150);
    }
}

bool FFmpegThread::setFilter()
{
    //文本水印
    std::string filters_descr = "drawtext=fontfile=./font_watermark/simhei.ttf:fontsize=60:fontcolor=green:alpha=0.5:text=你好hello:x=50:y=50";
    //图片水印
    //    std::string filters_descr = "movie=./font_watermark/test.png[watermark];[in][watermark]overlay=10:10[out]";
    enum AVPixelFormat pix_fmts[] = {AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE};

    const AVFilter *buffersrc  = avfilter_get_by_name("buffer");
    const AVFilter *buffersink = avfilter_get_by_name("buffersink");

    filterOutputs = avfilter_inout_alloc();
    filterInputs  = avfilter_inout_alloc();
    filter_graph = avfilter_graph_alloc();
    if (!filterOutputs || !filterInputs || !filter_graph)
    {
        qDebug() << "avfilter inout/graph alloc error";
        return false;
    }

    char args[512];
    AVCodecContext *codecContext = videoCodecCtx;

    /* buffer video source: the decoded frames from the decoder will be inserted here. */
    sprintf_s(args, sizeof(args),"video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
                                 codecContext->width, codecContext->height, codecContext->pix_fmt,
                                 codecContext->time_base.num, codecContext->time_base.den,
                                 codecContext->sample_aspect_ratio.num, codecContext->sample_aspect_ratio.den);

    int ret = avfilter_graph_create_filter(&buffersrc_ctx, buffersrc, "in", args, NULL, filter_graph);
    if (ret < 0)
    {
        qDebug() << "Cannot create buffer source";
        return false;
    }

    /* buffer video sink: to terminate the filter chain. */
    ret = avfilter_graph_create_filter(&buffersink_ctx, buffersink, "out", NULL, NULL, filter_graph);
    if (ret < 0)
    {
        qDebug() << "Cannot create buffer sink";
        return false;
    }

    ret = av_opt_set_int_list(buffersink_ctx, "pix_fmts", pix_fmts, AV_PIX_FMT_NONE, AV_OPT_SEARCH_CHILDREN);
    if (ret < 0)
    {
        qDebug() << "Cannot set output pixel format";
        return false;
    }

    /* Endpoints for the filter graph. */
    filterOutputs->name       = av_strdup("in");
    filterOutputs->filter_ctx = buffersrc_ctx;
    filterOutputs->pad_idx    = 0;
    filterOutputs->next       = NULL;

    filterInputs->name       = av_strdup("out");
    filterInputs->filter_ctx = buffersink_ctx;
    filterInputs->pad_idx    = 0;
    filterInputs->next       = NULL;

    if (avfilter_graph_parse_ptr(filter_graph, filters_descr.c_str(),&filterInputs, &filterOutputs, NULL) < 0)
    {
        qDebug() << "avfilter_graph_parse_ptr error";
        return false;
    }

    if (avfilter_graph_config(filter_graph, NULL) < 0)
    {
        qDebug() << "avfilter_graph_config error";
        return false;
    }

    return true;
}

void FFmpegThread::transVideoPacket(AVPacket *packet)
{
    //有些视频保存的MP4文件首帧开始的时间不是0 需要减去
    if (videoFirstPts > AV_TIME_BASE)
    {
        packet->pts -= videoFirstPts;
        packet->dts = packet->pts;
    }

    //队列方式
    if(enableSyncList)
    {
        videoSync->setType(1);
        videoSync->appendPacket(av_packet_clone(packet));
    }
    else
    {
        //当存在音频时，以音频解码为准
        if(!audioDevice)
        {
            timeDelay(avFormatContext,avPacket);
        }
        decodeVideoPacket(packet);
    }
}

void FFmpegThread::transAudioPacket(AVPacket *packet)
{
    //有些视频保存的MP4文件首帧开始的时间不是0 需要减去
    if (audioFirstPts > AV_TIME_BASE)
    {
        packet->pts -= audioFirstPts;
        packet->dts = packet->pts;
    }

    if(enableSyncList)
    {
        audioSync->setType(2);
        audioSync->appendPacket(av_packet_clone(packet));
    }
    else
    {
        //控制播放速度 pts计算延时
        timeDelay(avFormatContext,avPacket);
        decodeAudioPacket(packet);
    }
}

qint64 FFmpegThread::getPacketPts(AVPacket *packet)
{
    //有些文件(比如asf文件)取不到pts需要矫正
    int64_t pts = 0;
    if(packet->dts == AV_NOPTS_VALUE && packet->pts && packet->pts != AV_NOPTS_VALUE)
    {
        pts = packet->pts;
    }
    else if (packet->dts != AV_NOPTS_VALUE)
    {
        pts = packet->dts;
    }
    return pts;
}

qint64 FFmpegThread::calcDelayTime(AVFormatContext *formatCtx, AVPacket *packet)
{   
    //播放速度处理
    int64_t pts = getPacketPts(packet);
    int64_t newPts = pts * playSpeed;
    int64_t lastPts = pts * lastPlaySpeed;

    //当前视音频帧应该显示的时刻
    AVRational time_base = formatCtx->streams[packet->stream_index]->time_base;
    AVRational time_base_q = {1, AV_TIME_BASE};
    int64_t lastPts_time = 0;
    int64_t newPts_time = av_rescale_q(newPts, time_base, time_base_q);

    //切换播放速度
    if(switchSpeed)
    {
        lastPts_time = av_rescale_q(lastPts, time_base, time_base_q);
        int64_t correction = lastPts_time - newPts_time;
        startPrecessTime += correction;

        lastPlaySpeed = playSpeed;
        switchSpeed = false;
    }

    //开始播放时刻到此刻的时间差值
    int64_t now_time = av_gettime() - startPrecessTime;

    //计算差值 进行延时或者无延时跟进
    int64_t offset_time = newPts_time - now_time;
    return offset_time;
}

void FFmpegThread::timeDelay(AVFormatContext *formatCtx, AVPacket *packet)
{
    qint64 offset_time = calcDelayTime(formatCtx, packet);
    if(offset_time>0 && offset_time < 1*1000*1000)
    {
        av_usleep(offset_time);
    }
}

void FFmpegThread::setPlayUrl(const QString &url)
{
    this->playUrl = url;

    isUsbCamera = url.startsWith("video");
    isNetUrl = url.startsWith("rtsp", Qt::CaseInsensitive) ||
               url.startsWith("rtmp", Qt::CaseInsensitive) ||
               url.startsWith("http", Qt::CaseInsensitive);
}

void FFmpegThread::startPlay()
{
    isRun = true;
    isFirstInit = true;
    isPause = false;
}

void FFmpegThread::stopPlay()
{
    isRun = false;
}

void FFmpegThread::pausePlay()
{
    pauseTime = av_gettime();
    isPause = true;
}

void FFmpegThread::nextPlay()
{
    int64_t offset = av_gettime() - pauseTime;
    startPrecessTime += offset;

    isPause = false;
}

int FFmpegThread::getLength()
{
    return this->duration;
}

void FFmpegThread::setAudioEnable(bool enable)
{
    this->isPlayAudio = enable;
}

void FFmpegThread::setPlayPosition(qint64 position)
{
    if(!isRun)
    {
        return;
    }

    if(this->isRunning())
    {
        pausePlay();
    }

    //timestamp正常播放速度下的播放时刻 单位微秒
    int64_t timestamp = position * 1000;
    av_seek_frame(avFormatContext, -1, timestamp, AVSEEK_FLAG_BACKWARD);

    //时间轴处理 pts方式需要
    double level = 1 - playSpeed;
    int64_t offset = timestamp * level;
    startPrecessTime = av_gettime() - timestamp + offset;

    msleep(5);
    nextPlay();
}

void FFmpegThread::setPlaySpeed(int level)
{
    //level 0,1,2,3 分别为 0.5倍 正常 2倍 4倍
    if(lastSpeedLevel == level)
    {
        return;
    }
    else
    {
        switchSpeed = true;
        lastSpeedLevel = level;
    }

    pausePlay();

    //playSpeed为pts方式，计算pts解码播放
    switch (level)
    {
    case 0:
        playSpeed = 1.4;
        break;
    case 1:
        playSpeed = 1.0;
        break;
    case 2:
        playSpeed = 0.6;
        break;
    case 3:
        playSpeed = 0.4;
        break;
    default:
        break;
    }

    msleep(5);
    nextPlay();
}

void FFmpegThread::setPositionEnable(bool enable)
{
    this->isSendPosition = enable;
}

int FFmpegThread::getVideoWidth()
{
    return this->videoWidth;
}

int FFmpegThread::getVideoHeight()
{
    return this->videoHeight;
}

void FFmpegThread::setEnableSyncList(bool enable)
{
    this->enableSyncList = enable;
}

void FFmpegThread::initSaveFile()
{
    initSaveOk = false;

    //开辟一个格式上下文用来处理视频流输出
    std::string tempName = saveFileName.toStdString();
    const char *filename = tempName.c_str();
    avformat_alloc_output_context2(&formatOut, NULL, "mp4", filename);

    //开辟一个视频流用来输出mp4文件
    AVStream *streamOut = avformat_new_stream(formatOut, NULL);
    AVStream *streamIn = avFormatContext->streams[videoStreamIndex];

    //复制参数
    int res = avcodec_parameters_copy(streamOut->codecpar, streamIn->codecpar);
    if(res < 0)
    {
        qDebug() << "Failed to copy codec parameters" << res;
        return;
    }
    streamOut->codecpar->codec_tag = 0;

    //打开输出文件
    if(!(formatOut->oformat->flags & AVFMT_NOFILE))
    {
        res = avio_open(&formatOut->pb, filename, AVIO_FLAG_WRITE);
        if(res < 0)
        {
            qDebug() << "Could not open output file" << res;
            return;
        }
    }

    //写入头部标识
    res = avformat_write_header(formatOut, NULL);
    if(res < 0)
    {
        qDebug() << "avformat_write_header error" << res;
        return;
    }

    initSaveOk = true;
}

void FFmpegThread::startSaveData(AVPacket *packet)
{
    if(!initSaveOk)
    {
        stopSaveData();
        return;
    }

    //保存文件需要重新计算 pts dts
    saveCount++;
    AVStream *streamOut = formatOut->streams[0];
    packet->pts = (saveCount * streamOut->time_base.den) / (streamOut->time_base.num * frameRate);
    packet->dts = packet->pts;
    av_write_frame(formatOut, packet);
    //qDebug() << TIMEMS << packetCount << packet->pts << streamOut->time_base.num << streamOut->time_base.den;
}

void FFmpegThread::stopSaveData()
{
    if(formatOut == NULL)
    {
        return;
    }

    //写入结束标识
    if(initSaveOk)
    {
        av_write_trailer(formatOut);
    }

    avcodec_close(formatOut->streams[0]->codec);
    av_freep(&formatOut->streams[0]->codec);
    av_freep(&formatOut->streams[0]);
    avio_close(formatOut->pb);
    av_free(formatOut);
    formatOut = NULL;
    saveCount = 0;

    isSave = false;
    initSaveOk = false;
    isFirstInitSave = true;
}

void FFmpegThread::startSaveFile(const QString &path)
{
    initSaveOk = false;
    saveCount = 0;

    if(videoStreamIndex < 0)
    {
        return;
    }

    if(path.isEmpty())
    {
        return;
    }

    this->saveFileName = path;

    isSave = true;
    isFirstInitSave = true;
    saveStatus = true;
}

void FFmpegThread::stopSaveFile()
{
    saveStatus = false;
}

void FFmpegThread::run()
{
    //必要的变量初始化
    frameTime = 0;
    pauseTime = 0;
    switchSpeed = false;
    lastSpeedLevel = 1;
    lastPlaySpeed = 1;
    playSpeed = 1;

    isFirstInitSave = false;
    isSave = false;

    startPrecessTime = av_gettime();

    while(isRun)
    {
        //根据标志位执行初始化操作
        if (isFirstInit)
        {
            bool res = processInput();
            if(!res)
            {
                emit sendPlayError();
                break;
            }

            isFirstInit = false;
            if(enableSyncList)
            {
                videoSync->startRun();
                videoSync->setThread(this);
                audioSync->startRun();
                audioSync->setThread(this);
            }
            emit sendPlayStart();
        }

        //根据标志位执行暂停操作,即停止解码以及图像渲染等
        if(isPause)
        {
            msleep(50);
            continue;
        }

        //解码队列中数量过多暂停读取 值可以自行调整
        if (enableSyncList && (videoSync->getPacketListSize() >= 50 || audioSync->getPacketListSize() >= 50))
        {
            msleep(1);
            continue;
        }

        //读取包进行解码操作
        if(av_read_frame(avFormatContext, avPacket) >= 0)
        {
            //判断当前包是视频还是音频
            int index = avPacket->stream_index;
            if (index == videoStreamIndex)
            {
                transVideoPacket(avPacket);
            }
            else if (index == audioStreamIndex)
            {
                transAudioPacket(avPacket);
            }
        }
        else
        {
            break;
        }

        //根据标志位执行保存操作
        if(isSave)
        {
            if(isFirstInitSave)
            {
                initSaveFile();
                isFirstInitSave = false;
            }

            if(saveStatus)
            {
                startSaveData(avPacket);
            }
            else
            {
                stopSaveData();
            }
        }

        av_packet_unref(avPacket);
    }

    //线程结束后释放资源
    isRun = false;
    isFirstInit = false;
    isPause = false;
    freeResources();

    emit sendPlayFinish();
}
