//
// Created by aruba on 2020/10/21.
//

#include "_ffmpeg_video_decoder.h"

//解码视频数据线程
void *decodeVideo(void *arg) {
    FFmpegVideoDecoder *videoDecoder = (FFmpegVideoDecoder *) (arg);
    //初始化
    //存放压缩数据
    videoDecoder->pkt = (AVPacket *) (av_malloc(sizeof(AVPacket)));
    av_init_packet(videoDecoder->pkt);

    //存放解压数据
    videoDecoder->picture = av_frame_alloc();

    //存放转码数据
    videoDecoder->picture_rgb = av_frame_alloc();
    //为转码数据分配内存
    videoDecoder->data_size = (uint8_t *) (av_malloc(
            (size_t) avpicture_get_size(AV_PIX_FMT_RGBA, videoDecoder->codecContext->width,
                                        videoDecoder->codecContext->height)));
    avpicture_fill((AVPicture *) videoDecoder->picture_rgb, videoDecoder->data_size,
                   AV_PIX_FMT_RGBA,
                   videoDecoder->codecContext->width,
                   videoDecoder->codecContext->height);

    //为Window配置长宽和像素编码
    videoDecoder->setWindowBuffer();

    //转码组件上下文,前三个参数为原视频的宽高和编码，后三个为转码后的视频宽高和编码，还可以传入过滤器对视频做处理，这边不做处理
    videoDecoder->swsContext = sws_getContext(videoDecoder->codecContext->width,
                                              videoDecoder->codecContext->height,
                                              videoDecoder->codecContext->pix_fmt,
                                              videoDecoder->codecContext->width,
                                              videoDecoder->codecContext->height,
                                              AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL
    );


    //开始解码
    int picture_ptr = 0;
    double last_play  //上一帧的播放时间
    , play             //当前帧的播放时间
    , last_delay    // 上一次播放视频的两帧视频间隔时间
    , delay         //两帧视频间隔时间
    , audio_clock //音频轨道 实际播放时间
    , diff   //音频帧与视频帧相差时间
    , sync_threshold
    , start_time  //从第一帧开始的绝对时间 单位：s
    , pts
    , actual_delay//真正需要延迟时间
    ;//两帧间隔合理间隔时间

    while (videoDecoder->isPlay) {
        start_time = av_gettime() / 1000000.0;
        
        videoDecoder->get(videoDecoder->pkt);//从队列获取压缩数据
        //解码
        avcodec_decode_video2(videoDecoder->codecContext, videoDecoder->picture, &picture_ptr,
                              videoDecoder->pkt);

        if (picture_ptr > 0) {
            //转码 data中存放着真实数据，linesize为一行的数据，0为转码起始位置，高度为整个画面高
            sws_scale(videoDecoder->swsContext, videoDecoder->picture->data,
                      videoDecoder->picture->linesize, 0, videoDecoder->picture->height,
                      videoDecoder->picture_rgb->data, videoDecoder->picture_rgb->linesize);

            if ((pts = av_frame_get_best_effort_timestamp(videoDecoder->picture)) ==
                AV_NOPTS_VALUE) {
                pts = 0;
            }
            //当前播放时间
            play = pts * av_q2d(videoDecoder->time_base);
            //纠正时间
            play = videoDecoder->synchronize(videoDecoder->picture, play);
            delay = play - last_play;
            if (delay <= 0 || delay > 1) {//用上一帧延迟时间修正
                delay = last_delay;
            }
            audio_clock = videoDecoder->audioDecoder->clock;
            last_delay = delay;
            last_play = play;
            //音频与视频的时间差
            diff = videoDecoder->clock - audio_clock;
            __android_log_print(ANDROID_LOG_ERROR, LOG_TAG_FFMPEG_VIDEO_DECODER,
                                "音频与视频的时间差:%f", diff);
            //在合理范围外  才会延迟  加快
            sync_threshold = (delay > 0.05 ? 0.05 : delay);

            if (fabs(diff) < 10) {//时间差一般不会大于10s
                if (diff <= -sync_threshold) {//视频播放慢了，则不休眠，直接播放下一帧
                    delay = 0;
                } else if (diff >= sync_threshold) {//视频播放快了，则休眠的久一些
                    delay = 2 * delay;
                }

                __android_log_print(ANDROID_LOG_ERROR, LOG_TAG_FFMPEG_VIDEO_DECODER,
                                    "修正后的延迟:%f", delay);
            }
            start_time += delay;
            //真正的延迟时间：需要减去上面代码跑的时间
            actual_delay = start_time - av_gettime() / 1000000.0;
            if (actual_delay < 0.01) {//视频播放慢了，直接播放下一帧
                actual_delay = 0.01;
            }
            __android_log_print(ANDROID_LOG_ERROR, LOG_TAG_FFMPEG_VIDEO_DECODER,
                                "真正的延迟:%f", actual_delay);
            //经验算法+6ms
            av_usleep(actual_delay * 1000000.0 + 6000);

            //=========绘制========
            //锁定
            ANativeWindow_lock(videoDecoder->aNativeWindow, &videoDecoder->out_buff, NULL);

            //将转码后的frame（picture_rgb）中的每一行数据复制到window的视频缓冲区（out_buff）的每一行
            //picture_rgb中二维数据的首地址
            uint8_t *frame_data_p = videoDecoder->picture_rgb->data[0];
            //视频缓存区中二维数据的首地址
            uint8_t *buff_data_p = (uint8_t *) (videoDecoder->out_buff.bits);
            //视频缓存区有多少个字节 RGBA8888占4个字节
            int destStride = videoDecoder->out_buff.stride * 4;
            for (int i = 0; i < videoDecoder->codecContext->height; i++) {
                memcpy(buff_data_p, frame_data_p, videoDecoder->picture_rgb->linesize[0]);
                buff_data_p += destStride;
                frame_data_p += videoDecoder->picture_rgb->linesize[0];
            }

            ANativeWindow_unlockAndPost(videoDecoder->aNativeWindow);
        }
    }

    videoDecoder->release();
    //内存释放
    delete (videoDecoder);
    pthread_exit(0);
}

//设置缓冲区
void FFmpegVideoDecoder::setWindowBuffer() {
    if (codecContext && aNativeWindow) {
        ANativeWindow_setBuffersGeometry(aNativeWindow, codecContext->width,
                                         codecContext->height,
                                         WINDOW_FORMAT_RGBA_8888);
    }
}

void FFmpegVideoDecoder::initWindow(ANativeWindow *aNativeWindow) {
    if (aNativeWindow == NULL) {
        return;
    }

    this->aNativeWindow = aNativeWindow;
    //设置缓冲区
    setWindowBuffer();
}

void FFmpegVideoDecoder::play() {
    //开启线程
    startPlay(decodeVideo, this);
}

//释放资源
void FFmpegVideoDecoder::release() {
    __android_log_print(ANDROID_LOG_ERROR, LOG_TAG_FFMPEG_VIDEO_DECODER,
                        "释放视频资源");
    av_packet_unref(pkt);
    av_free_packet(pkt);
    sws_freeContext(swsContext);
    aNativeWindow = NULL;
    av_frame_free(&picture_rgb);
    av_frame_free(&picture);
    free(data_size);
    avcodec_close(codecContext);
}

double FFmpegVideoDecoder::synchronize(AVFrame *frame, double play) {
    //clock是当前播放的时间位置
    if (play != 0)
        clock = play;
    else //pst为0 则先把pts设为上一帧时间
        play = clock;
    //可能有pts为0 则主动增加clock
    //frame->repeat_pict = 当解码时，这张图片需要要延迟多少
    //需要求出扩展延时：
    //extra_delay = repeat_pict / (2*fps) 显示这样图片需要延迟这么久来显示
    double repeat_pict = frame->repeat_pict;
    //使用AvCodecContext的而不是stream的
    double frame_delay = av_q2d(codecContext->time_base);
    //如果time_base是1,25 把1s分成25份，则fps为25
    //fps = 1/(1/25)
    double fps = 1 / frame_delay;
    //pts 加上 这个延迟 是显示时间
    double extra_delay = repeat_pict / (2 * fps);
    double delay = extra_delay + frame_delay;
    //当前播放变为下一帧播放时间
    clock += delay;
    return play;
}
