//
// Created by yyl on 2018/6/21.
//

#include "YVideo.h"


YVideo::YVideo(PlayStatus *playStatus, CallJava *callJava) {
    this->playStatus = playStatus;
    this->callJava = callJava;
    this->queue = new YQueue(playStatus);
    pthread_mutex_init(&codecMutexLock, NULL);
}

YVideo::~YVideo() {
    pthread_mutex_destroy(&codecMutexLock);
}


void YVideo::initVideoStart() {
    while (playStatus != NULL && !playStatus->exit) {
        if (playStatus->seek) {
            av_usleep(1000 * 100);//100毫秒
            continue;
        }
        if (playStatus->pause) {
            av_usleep(1000 * 100);//100毫秒
            continue;
        }
        if (queue->getQueueSize() == 0) {//音频播放中 队列没有数据就等待
            if (!playStatus->load) {
                playStatus->load = true;
                if (callJava != NULL)
                    callJava->onCallBackEvent(THREAD_CHILD, EVENT_STATE_PLAY, EVENT_TRUE, 0);
            }
            av_usleep(1000 * 100);//100毫秒
            continue;
        } else {
            if (playStatus->load) {
                playStatus->load = false;
                if (callJava != NULL)
                    callJava->onCallBackEvent(THREAD_CHILD, EVENT_STATE_PLAY, EVENT_FALSE, 0);
            }
        }
        AVPacket *avPacket = av_packet_alloc();
        if (queue->getAvpacket(avPacket) != 0) {
            //   av_usleep(1000 * 100);//100毫秒
            av_packet_free(&avPacket);//这里只会释放avPacket *data 以外的引用 因为 *data 被另一个 packet 引用了
            av_free(avPacket);
            avPacket = NULL;
            continue;
        }
        //解码渲染

        if (codecType == HardwareAcceleration) {//硬件解码渲染
            LOGI("硬件解码渲染");
            if (av_bsf_send_packet(abs_ctx, avPacket) != 0) {
                av_packet_free(&avPacket);
                av_free(avPacket);
                avPacket = NULL;
                continue;
            }
            while (av_bsf_receive_packet(abs_ctx, avPacket) == 0) {
                double diff = getFrameDiffTime(NULL, avPacket);
                //     LOGE("diff is %f", diff);
                av_usleep(getDelayTime(diff) * 1000000);
                callJava->onCallDecodeAVPacket(avPacket->size, avPacket->data);
                av_packet_free(&avPacket);
                av_free(avPacket);
                continue;
            }
            avPacket = NULL;
        } else {  //软解码渲染
            pthread_mutex_lock(&codecMutexLock);
            result = avcodec_send_packet(avCodecContext, avPacket);// 第一步
            if (result != 0) {
                //    av_usleep(1000 * 100);//100毫秒
                av_packet_free(&avPacket);
                av_free(avPacket);
                avPacket = NULL;
                pthread_mutex_unlock(&codecMutexLock);
                continue;
            }
            AVFrame *avFrame = av_frame_alloc();
            result = avcodec_receive_frame(avCodecContext, avFrame);// 第二步
            if (result != 0) {
                av_frame_free(&avFrame);
                av_free(avFrame);
                avFrame = NULL;
                av_packet_free(&avPacket);
                av_free(avPacket);
                avPacket = NULL;
                pthread_mutex_unlock(&codecMutexLock);
                continue;
            }
            //  LOGI("解码 video")

            if (avFrame->format == AV_PIX_FMT_YUV420P) {//直接渲染
                LOGI("解码 format = AV_PIX_FMT_YUV420P")
                double diff = getFrameDiffTime(avFrame, NULL);
                double time = getDelayTime(diff);
                //    LOGI("解码 为 YUV420P   format = AV_PIX_FMT_YUV420P  diff=%lf time=%lf", diff, time);
                av_usleep(time * 1000 * 1000);
                callJava->onCallRenderYUV(
                        avCodecContext->width,
                        avCodecContext->height,
                        avFrame->data[0],
                        avFrame->data[1],
                        avFrame->data[2]);
            } else {//转换格式 为 YUV420P

                //   LOGI("转换格式 为 YUV420P   format = AV_PIX_FMT_YUV420P")
                AVFrame *avFrameYUV420P = av_frame_alloc();

                int result1 = av_image_get_buffer_size(
                        AV_PIX_FMT_YUV420P,
                        avCodecContext->width,
                        avCodecContext->height,
                        1);
                uint8_t *buffer = static_cast<uint8_t *>(av_malloc(result1 * sizeof(uint8_t)));
                av_image_fill_arrays(//根据指定的图像*参数和提供的数组设置数据指针和行数
                        avFrameYUV420P->data,
                        avFrameYUV420P->linesize,
                        buffer,
                        AV_PIX_FMT_YUV420P,
                        avCodecContext->width,
                        avCodecContext->height,
                        1);
                SwsContext *swsContext = sws_getContext(
                        avCodecContext->width,
                        avCodecContext->height,
                        avCodecContext->pix_fmt,
                        avCodecContext->width,
                        avCodecContext->height,
                        AV_PIX_FMT_YUV420P,
                        SWS_BICUBIC, NULL, NULL, NULL);
                if (!swsContext) {
                    av_frame_free(&avFrameYUV420P);
                    av_free(avFrameYUV420P);
                    av_free(buffer);
                    pthread_mutex_unlock(&codecMutexLock);
                    continue;
                }
                sws_scale(swsContext,
                          reinterpret_cast<const uint8_t *const *>(avFrame->data),
                          avFrame->linesize,
                          0,
                          avFrame->height,
                          avFrameYUV420P->data,
                          avFrameYUV420P->linesize);

                double diff = getFrameDiffTime(avFrameYUV420P, NULL);
                double time = getDelayTime(diff);
                LOGI("转换格式 为 YUV420P   format = AV_PIX_FMT_YUV420P  diff=%lf time=%lf", diff, time);
                av_usleep(time * 1000 * 1000);
                callJava->onCallRenderYUV(
                        avCodecContext->width,
                        avCodecContext->height,
                        avFrameYUV420P->data[0],
                        avFrameYUV420P->data[1],
                        avFrameYUV420P->data[2]);

                av_frame_free(&avFrameYUV420P);
                av_free(avFrameYUV420P);
                av_free(buffer);
                sws_freeContext(swsContext);
            }
            av_frame_free(&avFrame);
            av_free(avFrame);
            avFrame = NULL;
            av_packet_free(&avPacket);
            av_free(avPacket);
            avPacket = NULL;
            pthread_mutex_unlock(&codecMutexLock);
        }
    }
}

void YVideo::release() {
    if (queue != NULL) {
        queue->release();
    }
    pthread_join(threadPlay, NULL);
    if (queue != NULL) {
        delete (queue);
        queue = NULL;
    }
    if (abs_ctx != NULL) {
        av_bsf_free(&abs_ctx);
        abs_ctx = NULL;
    }
    if (avCodecContext != NULL) {
        pthread_mutex_lock(&codecMutexLock);
        avcodec_close(avCodecContext);
        avcodec_free_context(&avCodecContext);
        avCodecContext = NULL;
        pthread_mutex_unlock(&codecMutexLock);
    }

    if (playStatus != NULL)
        playStatus = NULL;
    if (callJava != NULL)
        callJava = NULL;

    LOGI("YVideo::release")
}

void *decodeVideoPlay(void *data) {
    YVideo *video = (YVideo *) (data);
    video->initVideoStart();
    pthread_exit(&video->threadPlay);
}

void YVideo::play() {
    pthread_create(&threadPlay, NULL, decodeVideoPlay, this);
}

/**
 *
 * 跟距声音的时间 计算视频同步差值
 * @param avFrame
 * @param avPacket
 * @return
 */
double YVideo::getFrameDiffTime(AVFrame *avFrame, AVPacket *avPacket) {
    double pts = 0;
    if (avFrame != NULL) {
        pts = av_frame_get_best_effort_timestamp(avFrame);
    }
    if (avPacket != NULL) {
        pts = avPacket->pts;
    }
    if (pts == AV_NOPTS_VALUE) {
        pts = 0;
    }
    pts *= av_q2d(time_base);

    if (pts > 0) {
        clock = pts;
    }

    double diff = audio->clock - clock;
    return diff;
}

/**
 * 跟距声音差值   延迟 渲染的时间
 * @param diff
 * @return
 */
double YVideo::getDelayTime(double diff) {
    if (diff > 0.003) {
        delayTime = delayTime * 2 / 3;
        if (delayTime < defaultDelayTime / 2) {
            delayTime = defaultDelayTime * 2 / 3;
        } else if (delayTime > defaultDelayTime * 2) {
            delayTime = defaultDelayTime * 2;
        }
    } else if (diff < -0.003) {
        delayTime = delayTime * 3 / 2;
        if (delayTime < defaultDelayTime / 2) {
            delayTime = defaultDelayTime * 2 / 3;
        } else if (delayTime > defaultDelayTime * 2) {
            delayTime = defaultDelayTime * 2;
        }
    } else if (diff == 0.003) {

    }
    if (diff >= 0.5) {
        delayTime = 0;
    } else if (diff <= -0.5) {
        delayTime = defaultDelayTime * 2;
    }

    if (fabs(diff) >= 10) {
        delayTime = defaultDelayTime;
    }
    return delayTime;
}

