//
// Created by yfux on 2022/4/18.
//

#include "Log.h"
#include "VideoPlayer.h"
#include <cmath>

extern "C" {
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
}

VideoPlayer::VideoPlayer(int channelId, JavaCallHelper *helper, AVCodecContext *avCodecContext,
                         AVRational &base, double fps) :
        BasePlayer(channelId, helper, avCodecContext, base),
        fps(fps) {
    pthread_mutex_init(&windowMutex, NULL);
}

VideoPlayer::VideoPlayer() {
    pthread_mutex_init(&windowMutex, NULL);
}

void *vdecode_t(void *args) {
//    LOGE("decode_t");
    VideoPlayer *videoPlayer = static_cast<VideoPlayer *>(args);
    videoPlayer->_decode();
//    LOGE("docode_t end.");
    return 0;
}

void *vplay_t(void *args) {
    VideoPlayer *videoPlayer = static_cast<VideoPlayer *>(args);
    videoPlayer->_play();
    return 0;
}


void VideoPlayer::play() {
    isPlaying = true;
    setEnable(true);
    // decode
    decode();
    //play
    pthread_create(&playThread, NULL, vplay_t, this);
}

void VideoPlayer::decode() {
    pthread_create(&decodeThread, NULL, vdecode_t, this);
}

void VideoPlayer::stop() {
    isPlaying = false;
    javaCallHelper = NULL;
    setEnable(false);
    pthread_join(decodeThread, 0);
    pthread_join(playThread, 0);
    if (window) {
        ANativeWindow_release(window);
        window = NULL;
    }
}

void VideoPlayer::_decode() {
    AVPacket *avPacket = NULL;
    AVFrame *avFrame = NULL;
    int ret;
//    LOGE("_decode begin...");
    while (isPlaying) {
        ret = avPacket_queue.pop(avPacket);
//        LOGE("ret = %s", av_err2str(ret));
        if (!isPlaying)break;
//        LOGE("isPlaying  = %d", isPlaying);
        if (!ret) continue;
        ret = avcodec_send_packet(avCodecContext, avPacket);
//        if (avCodecContext == NULL || avPacket == NULL) LOGE("null...");
        releaseAvPacket(avPacket);
        if (ret < 0) break;
//        LOGE("ret = %s", av_err2str(ret));
        avFrame = av_frame_alloc();
        ret = avcodec_receive_frame(avCodecContext, avFrame);
//        LOGE("ret2 = %s", av_err2str(ret));
        if (ret == AVERROR(EAGAIN)) continue;
        else if (ret < 0)break;
//        LOGE("avFrame : %x", avFrame);
        while (avFrame_queue.size() > 50 && isPlaying) {
            av_usleep(1000 * 10 * avFrame_queue.size() / 100);
//            LOGE("queue.size() : %d", avFrame_queue.size());
        }
        avFrame_queue.push(avFrame);
    }
    releaseAvPacket(avPacket);
//    LOGE("_decode end...");
}

void VideoPlayer::_play() {
    LOGE("width=%d,height=%d", avCodecContext->width, avCodecContext->height);
    SwsContext *swsContext = sws_getContext(avCodecContext->width, avCodecContext->height,
                                            avCodecContext->pix_fmt, avCodecContext->width,
                                            avCodecContext->height, AV_PIX_FMT_RGBA,
                                            SWS_FAST_BILINEAR, NULL, NULL, NULL);
    AVFrame *avFrame = NULL;
    int ret;
    double frame_delay = 1.0 / fps;
    uint8_t *data[4];
    int linesize[4];
    av_image_alloc(data, linesize, avCodecContext->width, avCodecContext->height,
                   AV_PIX_FMT_RGBA, 1);
    while (isPlaying) {
        ret = avFrame_queue.pop(avFrame);
        if (!isPlaying) break;
        if (!ret) continue;

        // 使视频不快速播放
        double extra_delay = avFrame->repeat_pict / (2 * fps);
        double delay = extra_delay + frame_delay;

        if (audioPlayer) {
            clock = avFrame->best_effort_timestamp * av_q2d(time_base);
            double diff = clock - audioPlayer->clock;
            //喵喵妙
            //sync为允许的误差范围
            double sync = FFMAX(AV_SYNC_THRESHOLD_MIN, FFMIN(AV_SYNC_THRESHOLD_MAX, delay));
            if (diff < -sync) {
                delay = FFMAX(0, delay + diff);
            } else if (diff > sync) {
                delay = delay + diff;
            }
//            LOGE("A:%lf V:%lf V-A:%lf delay:%lf", audioPlayer->clock, clock, diff, delay);
        }

        av_usleep(delay * 1000000);

        sws_scale(swsContext, avFrame->data, avFrame->linesize, 0, avFrame->height,
                  data, linesize);
        onDraw(data, linesize, avCodecContext->width, avCodecContext->height);
        releaseAvFrame(avFrame);
    }
    av_free(&data[0]);
    isPlaying = false;
    releaseAvFrame(avFrame);
    sws_freeContext(swsContext);
}

void VideoPlayer::setWindow(ANativeWindow *window) {
    pthread_mutex_lock(&windowMutex);
    if (this->window) {
        ANativeWindow_release(this->window);
    }
    this->window = window;
    pthread_mutex_unlock(&windowMutex);
}

VideoPlayer::~VideoPlayer() {
    pthread_mutex_destroy(&windowMutex);
}

void VideoPlayer::onDraw(uint8_t **data, int *linesize, int width, int height) {
    pthread_mutex_lock(&windowMutex);
    if (!window) {
        pthread_mutex_unlock(&windowMutex);
        return;
    }
    ANativeWindow_setBuffersGeometry(window, width, height, WINDOW_FORMAT_RGBA_8888);
    ANativeWindow_Buffer buffer;
    if (ANativeWindow_lock(window, &buffer, 0)) {
        //fail
        ANativeWindow_release(window);
        window = NULL;
        pthread_mutex_unlock(&windowMutex);
        return;
    }
    uint8_t *dstData = static_cast<uint8_t *>(buffer.bits);
    int dstSize = buffer.stride * 4;
    uint8_t *srcData = data[0];
    int srcSize = linesize[0];

    for (int i = 0; i < buffer.height; i++) {
        memcpy(i * dstSize + dstData, i * srcSize + srcData, srcSize);
    }

    ANativeWindow_unlockAndPost(window);
    pthread_mutex_unlock(&windowMutex);
}
