#include <QWidget>
#include <QDebug>
#include "renderthread.h"
#include "videoplayer.h"

const struct TextureFormatEntry {
    enum AVPixelFormat format;
    int texture_fmt;
} sdl_texture_format_map2[] = {
{ AV_PIX_FMT_RGB8,           SDL_PIXELFORMAT_RGB332 },
{ AV_PIX_FMT_RGB444,         SDL_PIXELFORMAT_RGB444 },
{ AV_PIX_FMT_RGB555,         SDL_PIXELFORMAT_RGB555 },
{ AV_PIX_FMT_BGR555,         SDL_PIXELFORMAT_BGR555 },
{ AV_PIX_FMT_RGB565,         SDL_PIXELFORMAT_RGB565 },
{ AV_PIX_FMT_BGR565,         SDL_PIXELFORMAT_BGR565 },
{ AV_PIX_FMT_RGB24,          SDL_PIXELFORMAT_RGB24 },
{ AV_PIX_FMT_BGR24,          SDL_PIXELFORMAT_BGR24 },
{ AV_PIX_FMT_0RGB32,         SDL_PIXELFORMAT_RGB888 },
{ AV_PIX_FMT_0BGR32,         SDL_PIXELFORMAT_BGR888 },
{ AV_PIX_FMT_NE(RGB0, 0BGR), SDL_PIXELFORMAT_RGBX8888 },
{ AV_PIX_FMT_NE(BGR0, 0RGB), SDL_PIXELFORMAT_BGRX8888 },
{ AV_PIX_FMT_RGB32,          SDL_PIXELFORMAT_ARGB8888 },
{ AV_PIX_FMT_RGB32_1,        SDL_PIXELFORMAT_RGBA8888 },
{ AV_PIX_FMT_BGR32,          SDL_PIXELFORMAT_ABGR8888 },
{ AV_PIX_FMT_BGR32_1,        SDL_PIXELFORMAT_BGRA8888 },
{ AV_PIX_FMT_YUV420P,        SDL_PIXELFORMAT_IYUV },
{ AV_PIX_FMT_YUYV422,        SDL_PIXELFORMAT_YUY2 },
{ AV_PIX_FMT_UYVY422,        SDL_PIXELFORMAT_UYVY },
{ AV_PIX_FMT_NONE,           SDL_PIXELFORMAT_UNKNOWN },
};


/*
    渲染线程需要音视频线程启动后才能启动，因为需要获取音频
    解码器的采样率、声道等来打开音频输出设备。
*/
RenderThread::RenderThread(VideoPlayer *player) : QThread(player)
{
    m_player=player;
    m_reqStep=false;
    m_reqPause=false;
    m_reqStop=false;
    m_lastReqPause=false;

    m_renderer=NULL;
    m_window=NULL;
    m_audioDeviceId=0;
    m_textTexture=NULL;
    m_videoTexture=NULL;
    m_videoConvertCtx=NULL;
    m_textConvertCtx=NULL;
    m_audioConvertCtx=NULL;

}

RenderThread::~RenderThread()
{
    this->stop();
}

void RenderThread::play()
{        
    m_reqStop=false;
    m_reqPause=false;
    m_lastReqPause=false;

    if(!this->isRunning()){

         m_renderer=NULL;
        m_window=NULL;
         m_audioDeviceId=0;
         m_textTexture=NULL;
         m_videoTexture=NULL;
       m_videoConvertCtx=NULL;
       m_textConvertCtx=NULL;
       m_audioConvertCtx=NULL;

         m_videoFrameTimer=0;

         audio_clock=0;
         audio_clock_serial=0;
         audio_diff_cum=0;
         audio_diff_avg_coef=0;
         audio_diff_threshold=0;
         audio_diff_avg_count=0;

         audio_hw_buf_size=-1;
         audio_buf=NULL;
         audio_buf1=NULL;
          audio_buf_size=0;
          audio_buf1_size=0;
         audio_buf_index=-1;
         audio_write_buf_size=-1;

        //窗口以及音频设备需要在主线程顺序创建，
        //否则会引起SDL以及FFmpeg调用Crash
        if(!createWindow()){
            emit error(m_lastErrorMsg);
        }
        if(!openAudioDevice()){
            destoryWindow();
            emit error(m_lastErrorMsg);
            return;
        }

         this->start();
    }
}

void RenderThread::pause()
{
    m_reqStop=false;
    m_reqPause=true;
}

void RenderThread::stop()
{
    m_reqPause=false;
    m_lastReqPause=false;
    m_reqStop=true;    

    this->wait();
}

/*
    逐帧显示
*/
void RenderThread::step()
{
    m_reqStep=true;
}

void RenderThread::run()
{
//    if(!createWindow()){
//        emit error(m_lastErrorMsg);
//    }

    //打开音频输出设备
//    if(!openAudioDevice()){
//        destoryWindow();
//        emit error(m_lastErrorMsg);
//        return;
//    }

    emit inited();

    //开始播放音频
    if(m_audioDeviceId!=0){
        SDL_PauseAudioDevice(m_audioDeviceId,0);
    }

    //开始渲染视频和字幕
    double nextFrameDelayTime=0.0;      //根据PTS计算的下一帧渲染延迟时间（s）
    double defaultDelayTime=0.01;       //默认下一帧渲染延迟时间（s）,用于暂停或无视频帧时
    while (!m_reqStop) {        
        if(m_reqPause!=m_lastReqPause){
            m_lastReqPause=m_reqPause;
            togglePauseState(m_reqPause);
        }

        if (nextFrameDelayTime > 0.0){
            QThread::usleep((unsigned long)(nextFrameDelayTime * 1000000.0));
        }
        nextFrameDelayTime = defaultDelayTime;

        if (!m_reqPause){
            renderNextVideoAndText(&nextFrameDelayTime);
        }
    }

    //关闭音频设备、销毁窗口
    closeAudioDevice();
    destoryWindow();
}

/*
    打开SDL音频输出设备
*/
bool RenderThread::openAudioDevice()
{
    PlayerInfo* playerInfo=&m_player->m_playerInfo;

    int stream_sample_rate = playerInfo->audioCodecContext->sample_rate;
    int stream_nb_channels = playerInfo->audioCodecContext->channels;
    int64_t stream_channel_layout = playerInfo->audioCodecContext->channel_layout;
    if (stream_sample_rate <= 0 ) {
        m_lastErrorMsg="音频流采样率无效";
        return false;
    }
    if (stream_nb_channels <= 0) {
        m_lastErrorMsg="音频流声道数无效";
        return false;
    }


    //打开音频设备
    int64_t wanted_channel_layout=stream_channel_layout;
    int wanted_nb_channels=stream_nb_channels;
    int wanted_sample_rate=stream_sample_rate;


    SDL_AudioSpec wanted_spec, spec;
    const char *env;
    static const int next_nb_channels[] = {0, 0, 1, 6, 2, 6, 4, 6};
    static const int next_sample_rates[] = {0, 44100, 48000, 96000, 192000};
    int next_sample_rate_idx = FF_ARRAY_ELEMS(next_sample_rates) - 1;

    //TODO：测试声道数量对音频渲染的影响
    env = SDL_getenv("SDL_AUDIO_CHANNELS");
    if (env) {
        wanted_nb_channels = atoi(env);
        wanted_channel_layout = av_get_default_channel_layout(wanted_nb_channels);
    }

    if (!wanted_channel_layout || wanted_nb_channels != av_get_channel_layout_nb_channels(wanted_channel_layout)) {
        wanted_channel_layout = av_get_default_channel_layout(wanted_nb_channels);
        wanted_channel_layout &= ~AV_CH_LAYOUT_STEREO_DOWNMIX;
    }
    wanted_nb_channels = av_get_channel_layout_nb_channels(wanted_channel_layout);
    wanted_spec.channels = wanted_nb_channels;
    wanted_spec.freq = wanted_sample_rate;
    while (next_sample_rate_idx && next_sample_rates[next_sample_rate_idx] >= wanted_spec.freq){
        //找到最低的音频采样率
        next_sample_rate_idx--;
    }
    wanted_spec.format = AUDIO_S16SYS;
    wanted_spec.silence = 0;
    wanted_spec.samples = FFMAX(SDL_AUDIO_MIN_BUFFER_SIZE, 2 << av_log2(wanted_spec.freq / SDL_AUDIO_MAX_CALLBACKS_PER_SEC));
    wanted_spec.callback = sdlAudioDeviceCallback;
    wanted_spec.userdata = this;

    SDL_AudioDeviceID audioDevice=0;
    while (!(audioDevice = SDL_OpenAudioDevice(NULL, 0, &wanted_spec, &spec, SDL_AUDIO_ALLOW_FREQUENCY_CHANGE | SDL_AUDIO_ALLOW_CHANNELS_CHANGE))) {
        qDebug()<<"[Warn]"<<QString("SDL_OpenAudio失败（%1 channels, %2 Hz, %3）")
                  .arg(wanted_spec.channels).arg(wanted_spec.freq).arg(SDL_GetError());

        wanted_spec.channels = next_nb_channels[FFMIN(7, wanted_spec.channels)];
        if (!wanted_spec.channels) {
            wanted_spec.freq = next_sample_rates[next_sample_rate_idx--];
            wanted_spec.channels = wanted_nb_channels;

            if (!wanted_spec.freq) {
                m_lastErrorMsg="未找到可用的音频采样率和声道组合，可能是音频设备占用过多或不支持";
                return false;
            }
        }
        wanted_channel_layout = av_get_default_channel_layout(wanted_spec.channels);
    }
    if (spec.format != AUDIO_S16SYS) {
        m_lastErrorMsg="设备不支持尝试使用的音频格式AUDIO_S16SYS";
        return false;
    }
    if (spec.channels != wanted_spec.channels) {
        wanted_channel_layout = av_get_default_channel_layout(spec.channels);
        if (!wanted_channel_layout) {
            m_lastErrorMsg=QString("设备不支持尝试使用的声道数%1").arg(spec.channels);
            return false;
        }
    }
    struct AudioParams *audio_hw_params=&this->audio_tgt;
    audio_hw_params->fmt = AV_SAMPLE_FMT_S16;
    audio_hw_params->freq = spec.freq;
    audio_hw_params->channel_layout = wanted_channel_layout;
    audio_hw_params->channels =  spec.channels;
    audio_hw_params->frame_size = av_samples_get_buffer_size(NULL, audio_hw_params->channels, 1, audio_hw_params->fmt, 1);
    audio_hw_params->bytes_per_sec = av_samples_get_buffer_size(NULL, audio_hw_params->channels, audio_hw_params->freq, audio_hw_params->fmt, 1);
    if(audio_hw_params->frame_size  <= 0 || audio_hw_params->bytes_per_sec <= 0) {
        m_lastErrorMsg="av_samples_get_buffer_size失败";
        return false;
    }
    this->audio_src =this->audio_tgt;
    this->audio_hw_buf_size = spec.size;
    this->audio_buf_size  = 0;
    this->audio_buf_index = 0;
    this->audio_diff_avg_coef  = exp(log(0.01) / AUDIO_DIFF_AVG_NB);
    this->audio_diff_avg_count = 0;
    this->audio_diff_threshold = (double)(this->audio_hw_buf_size) /this->audio_tgt.bytes_per_sec;

    m_audioDeviceId=audioDevice;

    return true;
}

void RenderThread::closeAudioDevice()
{
    if(m_audioDeviceId!=0){
        SDL_CloseAudioDevice(m_audioDeviceId);
        m_audioDeviceId=0;
    }
    if(m_audioConvertCtx){
        swr_free(&m_audioConvertCtx);
    }

    m_audioConvertCtx=NULL;
    m_audioDeviceId=0;
}

/*
    创建SDL窗口
*/
bool RenderThread::createWindow()
{
    PlayerConfig* playerConfig=&m_player->m_playerConfig;

    SDL_Renderer* renderer=NULL;
    SDL_Window* window=NULL;
    QWidget* widget=NULL;

    if(playerConfig->winId==NULL){
        m_lastErrorMsg="窗口句柄为空";
        return false;
    }
    widget=QWidget::find((WId)playerConfig->winId);
    if(!widget){
        m_lastErrorMsg="QWidget未找到";
        return false;
    }else{
        m_lastWindowSize=widget->size();
    }

    window = SDL_CreateWindowFrom(playerConfig->winId);
    if(!window){
        m_lastErrorMsg=QString("SDL窗口创建失败（%1）").arg(SDL_GetError());
        return false;
    }else{
        //SDL销毁窗口时会隐藏窗口，因此每次启动应确保窗口显示
        SDL_ShowWindow(window);
    }

    if(playerConfig->hwRender){
        renderer = SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED | SDL_RENDERER_PRESENTVSYNC);
        if (!renderer) {
            qDebug()<<"[Warn] 当前运行环境不支持SDL硬件加速渲染";
        }
    }
    if(!renderer){
        renderer = SDL_CreateRenderer(window, -1, SDL_RENDERER_SOFTWARE);
    }
    if(!renderer){
        SDL_DestroyWindow(window);
        m_lastErrorMsg=QString("SDL渲染器创建失败（%1）").arg(SDL_GetError());
        return false;
    }
    SDL_RendererInfo renderInfo;
    if (SDL_GetRendererInfo(renderer, &renderInfo)!=0 || renderInfo.num_texture_formats<=0){
        SDL_DestroyWindow(window);
        SDL_DestroyRenderer(renderer);
        m_lastErrorMsg="SDL渲染器不可用";
        return false;
    }

    m_window=window;
    m_renderer=renderer;
    m_widget=widget;

    return true;
}

/*
    销毁窗口
*/
void RenderThread::destoryWindow()
{
    if(m_renderer){
        SDL_DestroyRenderer(m_renderer);
    }
    if(m_window){
        SDL_DestroyWindow(m_window);
    }
    if(m_textConvertCtx){
        sws_freeContext(m_textConvertCtx);
    }
    if(m_videoConvertCtx){
        sws_freeContext(m_videoConvertCtx);
    }
    if(m_textTexture){
        SDL_DestroyTexture(m_textTexture);
    }
    if(m_videoTexture){
        SDL_DestroyTexture(m_videoTexture);
    }

    m_widget=NULL;
    m_renderer=NULL;
    m_window=NULL;
    m_textTexture=NULL;
    m_videoTexture=NULL;
    m_videoConvertCtx=NULL;
    m_textConvertCtx=NULL;
}

/*
    从缓存中确定下一次要显示的视频和字幕帧，并调用
    sdlRenderVideoAndText渲染它们
*/
void RenderThread::renderNextVideoAndText(double *delayTime)
{
    PlayerInfo *playerInfo = &m_player->m_playerInfo;
    bool reqRender=false;

    //采用外部时钟同步
    if (!this->m_reqPause && playerInfo->syncType== AV_SYNC_EXTERNAL_CLOCK && playerInfo->isLiveStream){
        updateExternalClockSpeed();
    }

    if(playerInfo->videoStream){
retry:
        if (frame_queue_nb_remaining(&playerInfo->videoFrameQueue) == 0) {
            //缓存的包为0时不执行任何渲染
        } else {
            Frame* lastVideoFrame = frame_queue_peek_last(&playerInfo->videoFrameQueue);
            Frame* videoFrame = frame_queue_peek_current(&playerInfo->videoFrameQueue);

            //队列被flush时直接丢弃旧的帧
            if (videoFrame->serial != playerInfo->videoPacketQueue.serial) {
                frame_queue_next(&playerInfo->videoFrameQueue);
                goto retry;
            }

            if (lastVideoFrame->serial != videoFrame->serial){
                this->m_videoFrameTimer = av_gettime_relative() / 1000000.0;
            }

            //暂停时一直显示当前帧
            if (this->m_reqPause){
                goto display;
            }

            //执行视频同步
            double lastDuration = computeDuration(lastVideoFrame,videoFrame);
            double delay = syncVideo(lastDuration);
            double curTime= av_gettime_relative()/1000000.0;
            if (curTime < this->m_videoFrameTimer + delay) {
                *delayTime = FFMIN(this->m_videoFrameTimer + delay - curTime, *delayTime);
                goto display;
            }
            this->m_videoFrameTimer += delay;
            if (delay > 0 && curTime - this->m_videoFrameTimer > AV_SYNC_THRESHOLD_MAX){
                this->m_videoFrameTimer = curTime;
            }
            SDL_LockMutex(playerInfo->videoFrameQueue.mutex);
            if (!isnan(videoFrame->pts)){
                updateVideoClock(videoFrame->pts, videoFrame->pos, videoFrame->serial);
            }
            SDL_UnlockMutex(playerInfo->videoFrameQueue.mutex);
            /*
            if (frame_queue_nb_remaining(&playerInfo->videoFrameQueue) > 1) {
                Frame *nextVideoFrame = frame_queue_peek_next(&playerInfo->videoFrameQueue);
                double duration = computeDuration(videoFrame, nextVideoFrame);
                bool isDelay=(playerInfo->syncType != AV_SYNC_VIDEO_MASTER) && (curTime > this->vframeTimer + duration);
                if(!this->m_reqStep && (playerConfig->framedrop || isDelay)){
                    playerInfo->videoFrameDropsOnRender++;
                    frame_queue_next(&playerInfo->videoFrameQueue);
                    goto retry;
                }
            }*/

            //渲染字幕帧
            if (playerInfo->textStream) {
                Frame *sp=NULL, *sp2=NULL;
                while (frame_queue_nb_remaining(&playerInfo->textFrameQueue) > 0) {
                    sp = frame_queue_peek_current(&playerInfo->textFrameQueue);

                    if (frame_queue_nb_remaining(&playerInfo->textFrameQueue) > 1)
                        sp2 = frame_queue_peek_next(&playerInfo->textFrameQueue);
                    else
                        sp2 = NULL;

                    if (sp->serial != playerInfo->textPacketQueue.serial
                            || (playerInfo->videoClock.pts > (sp->pts + ((float) sp->sub.end_display_time / 1000)))
                            || (sp2 && playerInfo->videoClock.pts > (sp2->pts + ((float) sp2->sub.start_display_time / 1000))))
                    {
                        if (sp->uploaded) {
                            for (unsigned int i = 0; i < sp->sub.num_rects; i++) {
                                AVSubtitleRect *sub_rect = sp->sub.rects[i];
                                uint8_t *pixels;
                                int pitch, j;

                                if (!SDL_LockTexture(m_textTexture, (SDL_Rect *)sub_rect, (void **)&pixels, &pitch)) {
                                    for (j = 0; j < sub_rect->h; j++, pixels += pitch)
                                        memset(pixels, 0, sub_rect->w << 2);
                                    SDL_UnlockTexture(m_textTexture);
                                }
                            }
                        }
                        frame_queue_next(&playerInfo->textFrameQueue);
                    } else {
                        break;
                    }
                }
            }

            frame_queue_next(&playerInfo->videoFrameQueue);
            reqRender = true;

            if (this->m_reqStep && !this->m_reqPause){
                //如果要步进播放 TODO
                togglePauseState(playerInfo);
            }
        }

display:
        if (reqRender && playerInfo->videoFrameQueue.rindex_shown){
            sdlRenderVideoAndText();
        }
    }
}

/*
    返回两帧之间的间隔
*/
double RenderThread::computeDuration(Frame *vp, Frame *nextvp) {
    PlayerInfo* playerInfo = &m_player->m_playerInfo;

    if (vp->serial == nextvp->serial) {
        double duration = nextvp->pts - vp->pts;
        if (isnan(duration) || duration <= 0 || duration > playerInfo->maxFrameDuration)
            return vp->duration;
        else
            return duration;
    } else {
        return 0.0;
    }
}

/*
    使视频向音频或外部时钟同步

    delay: 同步前的延迟时间
    return: 同步后需要的延迟时间
*/
double RenderThread::syncVideo(double delay)
{
    PlayerInfo* playerInfo = &m_player->m_playerInfo;

    if (playerInfo->syncType != AV_SYNC_VIDEO_MASTER) {
        double diff = get_clock(&playerInfo->videoClock) - get_clock(playerInfo->masterClock);
        double sync_threshold = FFMAX(AV_SYNC_THRESHOLD_MIN, FFMIN(AV_SYNC_THRESHOLD_MAX, delay));
        if (!isnan(diff) && fabs(diff) < playerInfo->maxFrameDuration) {
            if (diff <= -sync_threshold)
                delay = FFMAX(0, delay + diff);
            else if (diff >= sync_threshold && delay > AV_SYNC_FRAMEDUP_THRESHOLD)
                delay = delay + diff;
            else if (diff >= sync_threshold)
                delay = 2 * delay;
        }
    }

    return delay;
}

/*
    更新视频时钟
*/
void RenderThread::updateVideoClock(double pts, int64_t pos, int serial) {
    PlayerInfo *playerInfo = &m_player->m_playerInfo;

    set_clock(&playerInfo->videoClock, pts, serial);
    sync_clock_to_slave(&playerInfo->externalClock, &playerInfo->videoClock);
}


/*
    SDL渲染视频和字幕帧
*/
void RenderThread::sdlRenderVideoAndText()
{
    PlayerInfo *playerInfo = &m_player->m_playerInfo;

    SDL_SetRenderDrawColor(m_renderer,0, 0, 0, 255);
    SDL_RenderClear(m_renderer);

    Frame *vp;
    Frame *sp = NULL;
    SDL_Rect rect;

    vp = frame_queue_peek_last(&playerInfo->videoFrameQueue);  //视频帧队列设置了keeplast，此处获取的是renderNextFrame取出的视频帧

    //对当前字幕帧进行渲染准备（缩放），字幕帧渲染是在renderNextFrame中完成的
    //一帧字幕可能需要对应多个视频帧，字幕帧出列是在renderNextFrame中完成的
    if (playerInfo->textStream) {
        if (frame_queue_nb_remaining(&playerInfo->textFrameQueue) > 0) {
            sp = frame_queue_peek_current(&playerInfo->textFrameQueue);

            if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000)) {
                if (!sp->uploaded) {
                    uint8_t* pixels[4];
                    int pitch[4];
                    if (!sp->width || !sp->height) {
                        sp->width = vp->width;
                        sp->height = vp->height;
                    }
                    if (reallocTexture(&m_textTexture, SDL_PIXELFORMAT_ARGB8888, sp->width, sp->height, SDL_BLENDMODE_BLEND, 1) < 0)
                        return;

                    for (unsigned int i = 0; i < sp->sub.num_rects; i++) {
                        AVSubtitleRect *sub_rect = sp->sub.rects[i];

                        sub_rect->x = av_clip(sub_rect->x, 0, sp->width );
                        sub_rect->y = av_clip(sub_rect->y, 0, sp->height);
                        sub_rect->w = av_clip(sub_rect->w, 0, sp->width  - sub_rect->x);
                        sub_rect->h = av_clip(sub_rect->h, 0, sp->height - sub_rect->y);

                        m_textConvertCtx = sws_getCachedContext(m_textConvertCtx,
                                                                   sub_rect->w, sub_rect->h, AV_PIX_FMT_PAL8,
                                                                    sub_rect->w, sub_rect->h, AV_PIX_FMT_BGRA,
                                                                    0, NULL, NULL, NULL);
                        if (!m_textConvertCtx) {
                            m_lastErrorMsg="sws_getCachedContext调用失败";
                            return;
                        }
                        if (!SDL_LockTexture(m_textTexture, (SDL_Rect *)sub_rect, (void **)pixels, pitch)) {
                            sws_scale(m_textConvertCtx, (const uint8_t * const *)sub_rect->data, sub_rect->linesize,
                                      0, sub_rect->h, pixels, pitch);
                            SDL_UnlockTexture(m_textTexture);
                        }
                    }
                    sp->uploaded = 1;
                }
            } else
                sp = NULL;
        }
    }

    //渲染视频帧
    computeDisplayRect(&rect,vp->width, vp->height, vp->sar);
    if (!vp->uploaded) {
        if (uploadTexture(&m_videoTexture, vp->frame, &m_videoConvertCtx) < 0)
            return;
        vp->uploaded = 1;
        vp->flip_v = vp->frame->linesize[0] < 0;
    }
    SDL_RenderCopyEx(m_renderer, m_videoTexture, NULL, &rect, 0, NULL, (SDL_RendererFlip)(vp->flip_v ? SDL_FLIP_VERTICAL : 0));

    //渲染字幕帧
    if (sp) {
#if USE_ONEPASS_SUBTITLE_RENDER
        SDL_RenderCopy(m_renderer, m_textTexture, NULL, &rect);
#else
        int i;
        double xratio = (double)rect.w / (double)sp->width;
        double yratio = (double)rect.h / (double)sp->height;
        for (i = 0; i < sp->sub.num_rects; i++) {
            SDL_Rect *sub_rect = (SDL_Rect*)sp->sub.rects[i];
            SDL_Rect target = {.x = rect.x + sub_rect->x * xratio,
                               .y = rect.y + sub_rect->y * yratio,
                               .w = sub_rect->w * xratio,
                               .h = sub_rect->h * yratio};
            SDL_RenderCopy(renderer, playerInfo->sub_texture, sub_rect, &target);
        }
#endif
    }

    SDL_RenderPresent(m_renderer);
}

/*
    调整外部时钟速度
*/
void RenderThread::updateExternalClockSpeed()
{
    PlayerInfo *playerInfo=&m_player->m_playerInfo;

    if ((playerInfo->videoStream && playerInfo->videoPacketQueue.nb_packets <= EXTERNAL_CLOCK_MIN_FRAMES) ||
            (playerInfo->audioStream  && playerInfo->audioPacketQueue.nb_packets <= EXTERNAL_CLOCK_MIN_FRAMES))
    {
        set_clock_speed(&playerInfo->externalClock, FFMAX(EXTERNAL_CLOCK_SPEED_MIN, playerInfo->externalClock.speed - EXTERNAL_CLOCK_SPEED_STEP));
    }
    else if ((!playerInfo->videoStream || playerInfo->videoPacketQueue.nb_packets > EXTERNAL_CLOCK_MAX_FRAMES) &&
             (!playerInfo->audioStream || playerInfo->audioPacketQueue.nb_packets > EXTERNAL_CLOCK_MAX_FRAMES))
    {
        set_clock_speed(&playerInfo->externalClock, FFMIN(EXTERNAL_CLOCK_SPEED_MAX, playerInfo->externalClock.speed + EXTERNAL_CLOCK_SPEED_STEP));
    }
    else {
        double speed = playerInfo->externalClock.speed;
        if (speed != 1.0){
            set_clock_speed(&playerInfo->externalClock, speed + EXTERNAL_CLOCK_SPEED_STEP * (1.0 - speed) / fabs(1.0 - speed));
        }
    }
}


int RenderThread::computeMod(int a, int b)
{
    return a < 0 ? a%b + b : a%b;
}

/*
    检查纹理texture与帧参数是否匹配，不匹配则重新创建
*/
int RenderThread::reallocTexture(SDL_Texture **texture, Uint32 new_format, int new_width, int new_height, SDL_BlendMode blendmode, int init_texture)
{
    Uint32 format;
    int access, w, h;
    if (SDL_QueryTexture(*texture, &format, &access, &w, &h) < 0 || new_width != w || new_height != h || new_format != format) {

        void *pixels;
        int pitch;
        SDL_DestroyTexture(*texture);
        if (!(*texture = SDL_CreateTexture(m_renderer, new_format, SDL_TEXTUREACCESS_STREAMING, new_width, new_height)))
            return -1;
        if (SDL_SetTextureBlendMode(*texture, blendmode) < 0)
            return -1;
        if (init_texture) {
            if (SDL_LockTexture(*texture, NULL, &pixels, &pitch) < 0)
                return -1;
            memset(pixels, 0, pitch * new_height);
            SDL_UnlockTexture(*texture);
        }
    }
    return 0;
}

/*
    FFmpeg像素格式转换为SDL像素格式
*/
void RenderThread::avPixelFormtToSdlFormat(int format, Uint32 *sdl_pix_fmt, SDL_BlendMode *sdl_blendmode)
{
    *sdl_blendmode = SDL_BLENDMODE_NONE;
    *sdl_pix_fmt = SDL_PIXELFORMAT_UNKNOWN;
    if (format == AV_PIX_FMT_RGB32   ||
            format == AV_PIX_FMT_RGB32_1 ||
            format == AV_PIX_FMT_BGR32   ||
            format == AV_PIX_FMT_BGR32_1)
        *sdl_blendmode = SDL_BLENDMODE_BLEND;
    for (unsigned int i = 0; i < FF_ARRAY_ELEMS(sdl_texture_format_map2) - 1; i++) {
        if (format == sdl_texture_format_map2[i].format) {
            *sdl_pix_fmt = sdl_texture_format_map2[i].texture_fmt;
            return;
        }
    }
}

/*
    将frame的像素数据上传到纹理tex中
*/
int RenderThread::uploadTexture(SDL_Texture **tex, AVFrame *frame, SwsContext **img_convert_ctx) {
    //FFmpeg/SDL像素格式映射
    int ret = 0;
    Uint32 sdl_pix_fmt;
    SDL_BlendMode sdl_blendmode;
    avPixelFormtToSdlFormat(frame->format, &sdl_pix_fmt, &sdl_blendmode);

    //检查是否需要重新创建纹理
    if (reallocTexture(tex, sdl_pix_fmt == (Uint32)SDL_PIXELFORMAT_UNKNOWN ? (Uint32)SDL_PIXELFORMAT_ARGB8888 : sdl_pix_fmt,frame->width, frame->height,sdl_blendmode, 0) < 0){
        return -1;
    }

    int sws_flags=SWS_BICUBIC;
    switch (sdl_pix_fmt) {
    case SDL_PIXELFORMAT_UNKNOWN:
        *img_convert_ctx = sws_getCachedContext(*img_convert_ctx,
                                                frame->width, frame->height, (AVPixelFormat)frame->format,
                                                frame->width, frame->height, (AVPixelFormat)AV_PIX_FMT_BGRA,
                                                sws_flags, NULL, NULL, NULL);
        if (*img_convert_ctx != NULL) {
            uint8_t *pixels[4];
            int pitch[4];
            if (!SDL_LockTexture(*tex, NULL, (void **)pixels, pitch)) {
                sws_scale(*img_convert_ctx, (const uint8_t * const *)frame->data, frame->linesize,
                          0, frame->height, pixels, pitch);
                SDL_UnlockTexture(*tex);
            }
        } else {
            m_lastErrorMsg="sws_getCachedContext调用失败";
            ret = -1;
        }
        break;
    case SDL_PIXELFORMAT_IYUV:
        if (frame->linesize[0] > 0 && frame->linesize[1] > 0 && frame->linesize[2] > 0) {
            ret = SDL_UpdateYUVTexture(*tex, NULL, frame->data[0], frame->linesize[0],
                    frame->data[1], frame->linesize[1],
                    frame->data[2], frame->linesize[2]);
        } else if (frame->linesize[0] < 0 && frame->linesize[1] < 0 && frame->linesize[2] < 0) {
            ret = SDL_UpdateYUVTexture(*tex, NULL, frame->data[0] + frame->linesize[0] * (frame->height                    - 1), -frame->linesize[0],
                    frame->data[1] + frame->linesize[1] * (AV_CEIL_RSHIFT(frame->height, 1) - 1), -frame->linesize[1],
                    frame->data[2] + frame->linesize[2] * (AV_CEIL_RSHIFT(frame->height, 1) - 1), -frame->linesize[2]);
        } else {
            m_lastErrorMsg="AVFrame的linesizes无效";
            return -1;
        }
        break;
    default:
        if (frame->linesize[0] < 0) {
            ret = SDL_UpdateTexture(*tex, NULL, frame->data[0] + frame->linesize[0] * (frame->height - 1), -frame->linesize[0]);
        } else {
            ret = SDL_UpdateTexture(*tex, NULL, frame->data[0], frame->linesize[0]);
        }
        break;
    }
    return ret;
}

/*
    计算视频帧在窗口上的渲染位置

    rect：返回渲染位置
    pic_width/pic_height：视频帧宽高
    pic_sar: 视频帧宽高比
*/
void RenderThread::computeDisplayRect(SDL_Rect *rect,int pic_width, int pic_height, AVRational pic_sar)
{
    PlayerConfig* playerConfig= &m_player->m_playerConfig;

    if(m_widget->size()!=m_lastWindowSize){
        m_lastWindowSize=m_widget->size();

        //当Qt窗口尺寸变化时，需要手动将尺寸应用到SDL窗口。否则SDL渲染器的
        //可用渲染区域不会发生变化
        SDL_SetWindowSize(m_window, m_lastWindowSize.width(), m_lastWindowSize.height());
    }



    int wantedX=0,wantedY=0,wantedW=0,wantedH=0;
    if(playerConfig->renderRect.isValid()){
        wantedX=playerConfig->renderRect.x();
        wantedY=playerConfig->renderRect.y();
        wantedW=playerConfig->renderRect.width();
        wantedH=playerConfig->renderRect.height();       
    }else{
        SDL_GetRendererOutputSize(m_renderer,&wantedW,&wantedH);
    }

    float aspect_ratio;
    int width, height, x, y;

    if (pic_sar.num == 0){
        aspect_ratio = 0;
    }else{
        aspect_ratio = av_q2d(pic_sar);
    }

    if (aspect_ratio <= 0.0){
        aspect_ratio = 1.0;
    }
    aspect_ratio *= (float)pic_width / (float)pic_height;

    height = wantedH;
    width = lrint(height * aspect_ratio) & ~1;
    if (width > wantedW) {
        width = wantedW;
        height = lrint(width / aspect_ratio) & ~1;
    }
    x = (wantedW - width) / 2;
    y = (wantedH - height) / 2;

//    rect->x = wantedX + x;
//    rect->y = wantedY  + y;
//    rect->w = FFMAX(width,  1);
//    rect->h = FFMAX(height, 1);

    rect->x=0;
    rect->y=0;
    rect->w=m_lastWindowSize.width();
    rect->h=m_lastWindowSize.height();
}

void RenderThread::togglePauseState(bool pause)
{
    PlayerInfo* playerInfo=&m_player->m_playerInfo;

    if (!pause) {
        //如果当前是恢复状态，则启动时钟
        this->m_videoFrameTimer += av_gettime_relative() / 1000000.0 - playerInfo->videoClock.last_updated;
        if (playerInfo->readPauseReturn != AVERROR(ENOSYS)) {
            playerInfo->videoClock.paused = 0;
        }
        set_clock(&playerInfo->videoClock, get_clock(&playerInfo->videoClock), playerInfo->videoClock.serial);
    }

    set_clock(&playerInfo->externalClock, get_clock(&playerInfo->externalClock), playerInfo->externalClock.serial);
    playerInfo->audioClock.paused=pause;
    playerInfo->videoClock.paused=pause;
    playerInfo->externalClock.paused=pause;
}

/*
    SDL音频输出设备回调函数。每当音频设备需要更多的数据进行输出时，便会
    调用此函数。
*/
void RenderThread::sdlAudioDeviceCallback(void *opaque, Uint8 *stream, int len)
{
    RenderThread* pThis = (RenderThread*)opaque;
    PlayerInfo *playerInfo = &pThis->m_player->m_playerInfo;
    PlayerConfig* playerConfig= &pThis->m_player->m_playerConfig;

    int audio_size, len1;

    int64_t audio_callback_time = av_gettime_relative();

    while (len > 0) {
        //取出解码后的音频数据
        if (pThis->audio_buf_index >= (int)pThis->audio_buf_size) {
            audio_size = renderNextAudio(pThis);
            if (audio_size < 0) {
                pThis->audio_buf = NULL;
                pThis->audio_buf_size = SDL_AUDIO_MIN_BUFFER_SIZE / pThis->audio_tgt.frame_size * pThis->audio_tgt.frame_size;
            } else {
                pThis->audio_buf_size = audio_size;
            }
            pThis->audio_buf_index = 0;
        }
        len1 = pThis->audio_buf_size - pThis->audio_buf_index;
        if (len1 > len){
            len1 = len;
        }

        //拷贝音频数据到SDL音频设备缓冲区
        if (!playerConfig->muted && pThis->audio_buf && playerConfig->volume == SDL_MIX_MAXVOLUME)
            memcpy(stream, (uint8_t *)pThis->audio_buf + pThis->audio_buf_index, len1);
        else {
            memset(stream, 0, len1);
            if (!playerConfig->muted && pThis->audio_buf){
                SDL_MixAudioFormat(stream, (uint8_t *)pThis->audio_buf + pThis->audio_buf_index, AUDIO_S16SYS, len1, playerConfig->volume);
            }
        }
        len -= len1;
        stream += len1;
        pThis->audio_buf_index += len1;
    }
    pThis->audio_write_buf_size = pThis->audio_buf_size - pThis->audio_buf_index;

    //更新音频时钟
    if (!isnan(pThis->audio_clock)) {
        set_clock_at(&playerInfo->audioClock, pThis->audio_clock - (double)(2 * pThis->audio_hw_buf_size + pThis->audio_write_buf_size) / pThis->audio_tgt.bytes_per_sec, pThis->audio_clock_serial, audio_callback_time / 1000000.0);
        sync_clock_to_slave(&playerInfo->externalClock, &playerInfo->audioClock);
    }
}

/*
    使音频向视频或外部时钟同步（通过增加或减少采样数据量）

    nb_samples: 同步前采样数据量
    return: 同步后的采样数据量
*/
int RenderThread::syncAudio(RenderThread *pThis, int nb_samples)
{
    PlayerInfo *playerInfo=&pThis->m_player->m_playerInfo;

    int wanted_nb_samples = nb_samples;

    if (playerInfo->syncType != AV_SYNC_AUDIO_MASTER) {
        double diff, avg_diff;
        int min_nb_samples, max_nb_samples;

        diff = get_clock(&playerInfo->audioClock) - get_clock(playerInfo->masterClock);
        if (!isnan(diff) && fabs(diff) < AV_NOSYNC_THRESHOLD) {
            pThis->audio_diff_cum = diff + pThis->audio_diff_avg_coef * pThis->audio_diff_cum;
            if (pThis->audio_diff_avg_count < AUDIO_DIFF_AVG_NB) {
                /* not enough measures to have a correct estimate */
                pThis->audio_diff_avg_count++;
            } else {
                /* estimate the A-V difference */
                avg_diff = pThis->audio_diff_cum * (1.0 - pThis->audio_diff_avg_coef);

                if (fabs(avg_diff) >= pThis->audio_diff_threshold) {
                    wanted_nb_samples = nb_samples + (int)(diff * pThis->audio_src.freq);
                    min_nb_samples = ((nb_samples * (100 - SAMPLE_CORRECTION_PERCENT_MAX) / 100));
                    max_nb_samples = ((nb_samples * (100 + SAMPLE_CORRECTION_PERCENT_MAX) / 100));
                    wanted_nb_samples = av_clip(wanted_nb_samples, min_nb_samples, max_nb_samples);
                }
            }
        } else {
            /* too big difference : may be initial PTS errors, so
                reset A-V filter */
            pThis->audio_diff_avg_count = 0;
            pThis->audio_diff_cum = 0;
        }
    }

    return wanted_nb_samples;
}


/*
    取出音频帧，重采样后存储到audio_buf
*/
int RenderThread::renderNextAudio(RenderThread *pThis)
{
    PlayerInfo* playerInfo=&pThis->m_player->m_playerInfo;

    if (pThis->m_reqPause){
        return -1;
    }

    int retCode=-1;
    int resampled_data_size=-1;

    //取包
    Frame *frame;
    do {
#if defined(_WIN32)
        while (frame_queue_nb_remaining(&playerInfo->audioFrameQueue) == 0) {
            if ((av_gettime_relative() - audio_callback_time) > 1000000LL * pThis->audio_hw_buf_size / pThis->audio_tgt.bytes_per_sec / 2)
                return -1;
            av_usleep (1000);
        }
#endif
        if (!(frame = frame_queue_peek_readable(&playerInfo->audioFrameQueue,true))){
            return -1;
        }
        frame_queue_next(&playerInfo->audioFrameQueue);
    } while (frame->serial != playerInfo->audioPacketQueue.serial);

    int data_size = av_samples_get_buffer_size(NULL, frame->frame->channels,
                                               frame->frame->nb_samples,
                                               (AVSampleFormat)frame->frame->format, 1);
    int64_t dec_channel_layout =
            (frame->frame->channel_layout && frame->frame->channels == av_get_channel_layout_nb_channels(frame->frame->channel_layout)) ?
                frame->frame->channel_layout : av_get_default_channel_layout(frame->frame->channels);
    int wanted_nb_samples = syncAudio(pThis, frame->frame->nb_samples);

    //重新分配转换器
    if (frame->frame->format        != pThis->audio_src.fmt            ||
            dec_channel_layout       != pThis->audio_src.channel_layout ||
            frame->frame->sample_rate   != pThis->audio_src.freq           ||
            (wanted_nb_samples       != frame->frame->nb_samples && !pThis->m_audioConvertCtx))
    {
        if(pThis->m_audioConvertCtx){
            swr_free(&pThis->m_audioConvertCtx);
        }

        pThis->m_audioConvertCtx = swr_alloc_set_opts(NULL,
                                                      pThis->audio_tgt.channel_layout,
                                                      pThis->audio_tgt.fmt,
                                                      pThis->audio_tgt.freq,
                                                      dec_channel_layout,
                                                      (AVSampleFormat)frame->frame->format,
                                                      frame->frame->sample_rate,
                                                      0, NULL);

        if (!pThis->m_audioConvertCtx) {
            pThis->m_lastErrorMsg="swr_alloc_set_opts失败";
            return -1;
        }
        if((retCode=swr_init(pThis->m_audioConvertCtx)) < 0){
            pThis->m_lastErrorMsg=QString("swr_init调用失败（%1）").arg(avErrorCodeToString(retCode));
            swr_free(&pThis->m_audioConvertCtx);
            return -1;
        }

        pThis->audio_src.channel_layout = dec_channel_layout;
        pThis->audio_src.channels = frame->frame->channels;
        pThis->audio_src.freq = frame->frame->sample_rate;
        pThis->audio_src.fmt = (AVSampleFormat)frame->frame->format;
    }

    //音频重采样
    if (pThis->m_audioConvertCtx) {
        const uint8_t **in = (const uint8_t **)frame->frame->extended_data;
        uint8_t **out = &pThis->audio_buf1;
        int out_count = (int64_t)wanted_nb_samples * pThis->audio_tgt.freq / frame->frame->sample_rate + 256;
        int out_size  = av_samples_get_buffer_size(NULL, pThis->audio_tgt.channels, out_count, pThis->audio_tgt.fmt, 0);
        if (out_size < 0) {
            pThis->m_lastErrorMsg=QString("av_samples_get_buffer_size调用失败（%1）").arg(avErrorCodeToString(out_size));
            qDebug()<<"[Error]"<<pThis->m_lastErrorMsg;
            return -1;
        }
        if (wanted_nb_samples != frame->frame->nb_samples) {
            if (retCode=swr_set_compensation(pThis->m_audioConvertCtx, (wanted_nb_samples - frame->frame->nb_samples) * pThis->audio_tgt.freq / frame->frame->sample_rate,
                                     wanted_nb_samples * pThis->audio_tgt.freq / frame->frame->sample_rate) < 0) {
                pThis->m_lastErrorMsg=QString("swr_set_compensation调用失败（%1）").arg(avErrorCodeToString(out_size));
                qDebug()<<"[Error]"<<pThis->m_lastErrorMsg;
                return -1;
            }
        }

        av_fast_malloc(&pThis->audio_buf1, &pThis->audio_buf1_size, out_size);
        if (!pThis->audio_buf1){
            pThis->m_lastErrorMsg="av_fast_malloc调用失败";
            qDebug()<<"[Error]"<<pThis->m_lastErrorMsg;
            return AVERROR(ENOMEM);
        }

        int len2 = swr_convert(pThis->m_audioConvertCtx, out, out_count, in, frame->frame->nb_samples);
        if (len2 < 0) {
            pThis->m_lastErrorMsg=QString("swr_convert调用失败（%1）").arg(avErrorCodeToString(len2));
            qDebug()<<"[Error]"<<pThis->m_lastErrorMsg;
            return -1;
        }
        if (len2 == out_count) {
            if (swr_init(pThis->m_audioConvertCtx) < 0){
                swr_free(&pThis->m_audioConvertCtx);
            }
        }
        pThis->audio_buf = pThis->audio_buf1;
        resampled_data_size = len2 * pThis->audio_tgt.channels * av_get_bytes_per_sample(pThis->audio_tgt.fmt);
    } else {
        pThis->audio_buf = frame->frame->data[0];
        resampled_data_size = data_size;
    }

    if (!isnan(frame->pts)){
        pThis->audio_clock = frame->pts + (double) frame->frame->nb_samples / frame->frame->sample_rate;
    }else{
        pThis->audio_clock = NAN;
    }
    pThis->audio_clock_serial = frame->serial;

    return resampled_data_size;
}

