//
// Created by 张易生 on 2022/1/14.
//

#include "ZYSVideoPlayer.h"

ZYSVideoPlayer::ZYSVideoPlayer(ZYSPlayStatus *playStatus, ZYSCallJava *callJava) {
    this->playStatus = playStatus;
    this->callJava = callJava;
    queue = new ZYSQueue(playStatus);
    pthread_mutex_init(&codecMutex, NULL);
}

void *playVideo_c(void *data) {
    ZYSVideoPlayer *zysVideo = (ZYSVideoPlayer *) data;

    if (zysVideo != nullptr) {
        zysVideo->playVideo_cpp();
    }

    pthread_exit(&zysVideo->thread_play);
}

void ZYSVideoPlayer::playVideo_cpp() {

    //死循环轮循拿队列的数据进行解码
    while (playStatus != nullptr && !playStatus->exit) {
        if (playStatus->exit) {
            break;
        }
        if (playStatus->seek) {
            av_usleep(1000 * 100);
            continue;
        }
        if (playStatus->pause) {
            av_usleep(1000 * 100);
            continue;
        }
        if (queue->getQueueSize() == 0) {
//            网络不佳  请慢慢等待  回调应用层
            if (!playStatus->load) {
                playStatus->load = true;
                callJava->onCallLoad(CHILD_THREAD, true);
                av_usleep(1000 * 100);
                continue;
            }
        }

        AVPacket *avPacket = av_packet_alloc();
        if (queue->getAVPacket(avPacket) != 0) {//非0为失败
            av_packet_free(&avPacket);
            av_free(avPacket);
            avPacket = nullptr;
            continue;
        }
//        视频解码比较耗时  多线程环境，加锁
        pthread_mutex_lock(&codecMutex);

        if (avcodec_send_packet(avCodecContext, avPacket) != 0) {
            //非0为失败
            av_packet_free(&avPacket);
            av_free(avPacket);
            avPacket = nullptr;
            pthread_mutex_unlock(&codecMutex);
            continue;
        }

        //取解码后的原始数据
        AVFrame *avFrame = av_frame_alloc();
        if (avcodec_receive_frame(avCodecContext, avFrame) != 0) {
            //非0为失败
            av_frame_free(&avFrame);
            av_free(avFrame);
            avFrame = nullptr;
            av_packet_free(&avPacket);
            av_free(avPacket);
            avPacket = nullptr;
            pthread_mutex_unlock(&codecMutex);
            continue;
        }
        //此时解码成功了

        //    原始数据
//           Y= avFrame->data[0];
//           U= avFrame->data[1];
//           V= avFrame->data[2];

        // 如果编码端之前是yuv420直接丢给opengl渲染
        if (avFrame->format == AV_PIX_FMT_YUV420P) {
            if (audio != nullptr) {
                double diff = getFrameDiffTime(avFrame);
                av_usleep(getDelayTime(diff) * 1000000);
            } else {
                av_usleep(defaultDelayTime * 1000000);
            }
//            av_usleep(33 * 1000);
            callJava->onCallRenderYUV(
                    avCodecContext->width,
                    avCodecContext->height,
                    avFrame->data[0],
                    avFrame->data[1],
                    avFrame->data[2]);

        } else {//如果编码端之前不是yuv420，使用转换器转换为yuv420
            AVFrame *pFrameYUV420P = av_frame_alloc();
            int num = av_image_get_buffer_size(
                    AV_PIX_FMT_YUV420P,
                    avCodecContext->width,
                    avCodecContext->height,
                    1);
            uint8_t *buffer = static_cast<uint8_t *>(av_malloc(num * sizeof(uint8_t)));
            av_image_fill_arrays(
                    pFrameYUV420P->data,
                    pFrameYUV420P->linesize,
                    buffer,
                    AV_PIX_FMT_YUV420P,
                    avCodecContext->width,
                    avCodecContext->height,
                    1);
            SwsContext *sws_ctx = sws_getContext(
                    avCodecContext->width,
                    avCodecContext->height,
                    avCodecContext->pix_fmt,
                    avCodecContext->width,
                    avCodecContext->height,
                    AV_PIX_FMT_YUV420P,
                    SWS_BICUBIC, NULL, NULL, NULL);

            if (!sws_ctx) {
                av_frame_free(&pFrameYUV420P);
                av_free(pFrameYUV420P);
                av_free(buffer);
                pthread_mutex_unlock(&codecMutex);
                continue;
            }
            sws_scale(
                    sws_ctx,
                    reinterpret_cast<const uint8_t *const *>(avFrame->data),
                    avFrame->linesize,
                    0,
                    avFrame->height,
                    pFrameYUV420P->data,
                    pFrameYUV420P->linesize);

            if (audio != nullptr) {
                double diff = getFrameDiffTime(avFrame);
                av_usleep(getDelayTime(diff) * 1000000);
            } else {
                av_usleep(defaultDelayTime * 1000000);
            }

            //渲染
            callJava->onCallRenderYUV(
                    avCodecContext->width,
                    avCodecContext->height,
                    avFrame->data[0],
                    avFrame->data[1],
                    avFrame->data[2]);

            av_frame_free(&pFrameYUV420P);
            av_free(pFrameYUV420P);
            av_free(buffer);
            sws_freeContext(sws_ctx);
        }

        av_frame_free(&avFrame);
        av_free(avFrame);
        avFrame = NULL;
        av_packet_free(&avPacket);
        av_free(avPacket);
        avPacket = NULL;
        pthread_mutex_unlock(&codecMutex);
    }

    //退出子线程
    pthread_exit(&thread_play);

}

void ZYSVideoPlayer::play() {
//    子线程播放   解码
    pthread_create(&thread_play, NULL, playVideo_c, this);
}

void ZYSVideoPlayer::pause() {
    if (playStatus != nullptr) {
        playStatus->pause = true;
    }
}

void ZYSVideoPlayer::resume() {
    if (playStatus != nullptr) {
        playStatus->pause = false;
    }
}

void ZYSVideoPlayer::release() {
    if (queue != nullptr) {
        queue->clearAVPacket();
        delete (queue);
        queue = nullptr;
    }
    if (playStatus != nullptr) {
        playStatus->exit = true;
    }
    if (avCodecContext != nullptr) {
        avcodec_close(avCodecContext);
        avcodec_free_context(&avCodecContext);
        avCodecContext = nullptr;
    }
    if (playStatus != nullptr) {
        delete (playStatus);
        playStatus = nullptr;
    }
    if (callJava != nullptr) {
        delete (callJava);
        callJava = nullptr;
    }
}

ZYSVideoPlayer::~ZYSVideoPlayer() {

}

//计算和音频当前播放时间的差值
double ZYSVideoPlayer::getFrameDiffTime(AVFrame *avFrame) {
    //    获取处理之后的视频时间戳
    double pts = av_frame_get_best_effort_timestamp(avFrame);
    if (pts == AV_NOPTS_VALUE) {
        pts = 0;
    }

    //     1.001*40ms
//    pts=pts * time_base.num / time_base.den;
    pts *= av_q2d(time_base);

    if (pts > 0) {
        clock = pts;
    }

    double diff = audio->clock - clock;
    return diff;
}

double ZYSVideoPlayer::getDelayTime(double diff) {
    //1、大于10s，音频超前，直接清空视频队列
    //2、小于-10s，视频超前，直接清空音频队列
    //3、大于500ms,音频超前，视频不休眠播放
    //4、小于-500ms,视频超前，视频用最慢速度播放
    //5、大于3ms,音频超前，视频缩小休眠时间：默认休眠时间的3分之2
    //6、小于-3ms,视频超前，视频增大休眠时间：默认休眠时间的1.5倍

    if (diff >= 10) {
        queue->clearAVPacket();
        delayTime = defaultDelayTime;
        return delayTime;
    }
    if (diff <= -10) {
        audio->queue->clearAVPacket();
        delayTime = defaultDelayTime;
        return delayTime;
    }

    if (diff >= 0.5) {
        delayTime = 0;
        return delayTime;
    }
    if (diff <= -0.5) {
        delayTime = defaultDelayTime * 2;
        return delayTime;
    }
    if (diff >= 0.003) {//音频超前，视频加速
        delayTime = defaultDelayTime * 2 / 3;
        if (delayTime < defaultDelayTime / 2) {
            delayTime = defaultDelayTime * 2 / 3;
        }else if(delayTime > defaultDelayTime * 2) {
            delayTime = defaultDelayTime * 2;
        }
        return delayTime;
    }
    if (diff <= -0.003) {//视频超前，视频减速
        delayTime = defaultDelayTime * 3 / 2;
        if(delayTime < defaultDelayTime / 2){
            delayTime = defaultDelayTime * 2 / 3;
        }else if(delayTime > defaultDelayTime * 2){
            delayTime = defaultDelayTime * 2;
        }
        return delayTime;
    }

    return defaultDelayTime;
}


