//
// Created by tys on 2017/10/25.
//

#include <log.h>
#include <pthread.h>
#include <jni.h>
#include "include/VideoMedia.h"


VideoMedia::VideoMedia(void (*native_window_play)(AVFrame *, VideoMedia *)) : play_call_back(
        native_window_play) {
    LOG_E("--", "%s\n", "Video构造函数");
    pthread_mutex_init(&mutex, NULL);
    pthread_cond_init(&cond, NULL);
    clock = 0;
}


VideoMedia::~VideoMedia() {
    LOG_E("--", "%s\n", "Video析构函数");
    pthread_mutex_destroy(&mutex);
    pthread_cond_destroy(&cond);
    if (sws_ctx)
        sws_freeContext(sws_ctx);
}

void VideoMedia::put(AVPacket *packet) {
    LOG_I("--", "%s\n", "视频入队列");
    pthread_mutex_lock(&mutex);
    queue.push(av_packet_clone(packet));
    pthread_cond_signal(&cond);
    pthread_mutex_unlock(&mutex);
}

void VideoMedia::get(AVPacket **packet) {
    pthread_mutex_lock(&mutex);
    if (!queue.empty() && isPlay) {
        LOG_I("---", "%s", "取视频数据播放");
        *packet = queue.front();
        queue.pop();
    } else {
        LOG_E("---", "%s", "video waiting");//数据队列不为空时
        pthread_cond_wait(&cond, &mutex);
        if (!queue.empty()) {
            *packet = queue.front();
            queue.pop();
        }
    }
    pthread_mutex_unlock(&mutex);
}


void VideoMedia::setIsPlay(bool isPlay) {
    VideoMedia::isPlay = isPlay;
}

void *playVideo(void *content) {
    LOG_E("---", "%s, tid:%ld\n", "视频播放线程", pthread_self());
    VideoMedia *video = (VideoMedia *) content;
    int got_frame, ret;
    AVFrame *frame = av_frame_alloc();
    AVFrame *dst_frame = av_frame_alloc();
    uint8_t *out_buffer = (uint8_t *) av_malloc((size_t) avpicture_get_size(AV_PIX_FMT_RGBA,
                                                                            video->dst_pic_w,
                                                                            video->dst_pic_h));
    ret = avpicture_fill((AVPicture *) dst_frame, out_buffer, AV_PIX_FMT_RGBA,
                         video->dst_pic_w, video->dst_pic_h);
    double start_time = av_gettime() / 1000000.0;//当前时间，微秒
    double pts;//frame pts
    double play, last_play = 0, delay, last_delay = 0;//当前帧, 上一帧显示时间, 2帧时间差
    double diff, sync_thread, actual_delay;
    AVPacket *packet = NULL;
    while (video->isPlay) {

        video->get(&packet);
        if ((ret = avcodec_decode_video2(video->dec_ctx, frame, &got_frame, packet)) < 0) {
            LOG_E("Video", "decode video error %s\n", av_err2str(ret));
            return (void *) ret;
        }
        if (!got_frame) {
            continue;
        }
        /* convert to destination format 转换后frame宽高一直是0*/
        sws_scale(video->sws_ctx, (const uint8_t *const *) frame->data, frame->linesize, 0,
                  frame->height, dst_frame->data, dst_frame->linesize);

        if ((pts = av_frame_get_best_effort_timestamp(frame)) ==
            AV_NOPTS_VALUE) {//等于 frame->pts，frame->best_effort_timestamp
            pts = 0;
        }
        play = pts * (av_q2d(video->time_base));
        play = video->synchronize_display_time(frame, play);

        delay = play - last_play;
        if (delay <= 0 || delay > 1) {//极端情况
            delay = last_delay;
        }
        last_delay = delay;
        last_play = play;
        //音频与视频的时间差
        diff = video->clock - video->audio->clock;
        //在合理范围外  才会延迟  加快
        sync_thread = delay > 0.01 ? 0.01 : delay;

        if (fabs(diff) < 10) {// >10s 视频，音频有一个已经结束了
            if (diff <= -sync_thread) {//视频慢了需要加快
                delay = 0;
            } else if (diff >= sync_thread) {
                delay = 2 * delay;
            }
        }
        start_time += delay;
        actual_delay = start_time - av_gettime() / 1000000.0;
        LOG_E("1111", "--- actual_delay:%f",  actual_delay);
        if (actual_delay < 0.01) {
            actual_delay = 0.01;
        }
        LOG_E("1111", "actual_delay:%f, diff:%f, sync_thread:%f, delay:%f, play:%f,last_play:%f",actual_delay,diff,sync_thread,delay,play,last_delay);
        LOG_E("1111", "--- %d",  (unsigned int) (actual_delay * 1000000.0 + 5000));
        av_usleep((unsigned int) (actual_delay * 1000000.0 + 6000));
        video->play_call_back(dst_frame, video);
        av_packet_unref(packet);
    }
    av_frame_free(&frame);
    av_frame_free(&dst_frame);
    av_free(out_buffer);
    return (void *) ret;
}

void VideoMedia::play() {
    isPlay = 1;
    initSwsCtx();
    pthread_create(&thread_t_video, NULL, playVideo, this);
    pthread_detach(thread_t_video);
}

int VideoMedia::initSwsCtx() {
    int ret = 0;
    /* create scaling context */
    sws_ctx = sws_getContext(dec_ctx->width, dec_ctx->height, dec_ctx->pix_fmt,
                             dst_pic_w, dst_pic_h, AV_PIX_FMT_RGBA,
                             SWS_BILINEAR, NULL, NULL, NULL);
    if (!sws_ctx) {
        LOG_E("Video", "Impossible to create scale context for the conversion "
                "fmt:%s s:%dx%d -> fmt:%s s:%dx%d\n",
              av_get_pix_fmt_name(dec_ctx->pix_fmt), dec_ctx->width, dec_ctx->height,
              av_get_pix_fmt_name(AV_PIX_FMT_RGBA), dec_ctx->width, dec_ctx->height);
        ret = AVERROR(EINVAL);
        goto end;
    }
    return ret;

    end:
    sws_freeContext(sws_ctx);
}

double VideoMedia::synchronize_display_time(AVFrame *frame, double display) {
    //clock是当前播放的时间位置
    if (display != 0) {
        clock = display;
    } else {//pst为0 则先把pts设为上一帧时间
        display = clock;
    }
    //可能有pts为0 则主动增加clock
    //frame->repeat_pict = 当解码时，这张图片需要要延迟多少
    //需要求出扩展延时：
    //extra_delay = repeat_pict / (2*fps) 显示这样图片需要延迟这么久来显示
    double repeat_pict = frame->repeat_pict;
    //使用AvCodecContext的而不是stream的
    double frame_delay = av_q2d(dec_ctx->time_base);
    //如果time_base是1,25 把1s分成25份，则fps为25
    //fps = 1/(1/25)
    double fps = 1 / frame_delay;
    //pts 加上 这个延迟 是显示时间
    double extra_delay = repeat_pict / (2 * fps);

    double delay = extra_delay + frame_delay;
//    LOGI("extra_delay:%f",extra_delay);
    clock += delay;
    return display;

}

void VideoMedia::setAudio(AudioMedia *audio) {
    VideoMedia::audio = audio;
}





