//
// Created by amon.huang on 2021/7/28.
//

#include "Video.h"
#include "MusicConstDefine.h"

#define IJKALIGN(x, align) ((( x ) + (align) - 1) / (align) * (align))
#define IJKMIN(a, b)    ((a) < (b) ? (a) : (b))

Video::Video(int videoStreamIndex, JNICall *pJniCall, PlayerStatus *pPlayerStatus, Audio *pAudio)
        : playThreadT(0), MediaBase(videoStreamIndex, pJniCall, pPlayerStatus) {
    pthread_mutex_init(&seekMutex, nullptr);
    this->pAudio = pAudio;
}

Video::~Video() {
    release();
}

void Video::release() {
    pPlayerStatus->isExit = true;
    if (playThreadT != 0) {
        pthread_join(playThreadT, nullptr);
    }
    MediaBase::release();

    if (pSwsContext) {
        sws_freeContext(pSwsContext);
        free(pSwsContext);
        pSwsContext = nullptr;
    }

    if (pFrameBuffer) {
        free(pFrameBuffer);
        pFrameBuffer = nullptr;
    }

    if (pRGBAFrame) {
        av_frame_free(&pRGBAFrame);
        pRGBAFrame = nullptr;
    }

    if (pJniCall) {
        pJniCall->jniEnv->DeleteGlobalRef(jSurface);
    }

    pthread_mutex_destroy(&seekMutex);
}

static int android_render_rgb_on_rgb(ANativeWindow_Buffer *out_buffer, int oh, int olinesize,
                                     uint8_t *src_pixels, int bpp) {
    int min_height = IJKMIN(out_buffer->height, oh);
    int dst_stride = out_buffer->stride;
    int src_line_size = olinesize;
    int dst_line_size = dst_stride * bpp / 8;

    auto *dst_pixels = static_cast<uint8_t *>(out_buffer->bits);

    if (dst_line_size == src_line_size) {
        int plane_size = src_line_size * min_height;
//        LOGE("android_render_rgb_on_rgb (pix-match) %p %p %d",
//              dst_pixels, src_pixels, plane_size);
        memcpy(dst_pixels, src_pixels, plane_size);
    } else {
        // TODO: 9 padding
        int bytewidth = IJKMIN(dst_line_size, src_line_size);
//        LOGE("android_render_rgb_on_rgb (pix-mismatch) %p %d %p %d %d %d %d, %d",
//              dst_pixels, dst_line_size, src_pixels, src_line_size, bytewidth, min_height, oh, out_buffer->height);
        av_image_copy_plane(dst_pixels, dst_line_size, src_pixels, src_line_size, bytewidth,
                            min_height);
    }

    return 0;
}

void *threadVideoPlay(void *context) {
    auto *pVideo = (Video *) context;

    if (!pVideo->jSurface) {
        return ((void *) nullptr);
    }
    JNIEnv *env = pVideo->pJniCall->jniEnv;
    JavaVM *javaVm = pVideo->pJniCall->javaVM;
    if (javaVm->AttachCurrentThread(&env, nullptr) != JNI_OK) {
        return ((void *) nullptr);
    }
    ANativeWindow *pNativeWindow = ANativeWindow_fromSurface(env, pVideo->jSurface);
    javaVm->DetachCurrentThread();

    int buff_w = IJKALIGN(pVideo->pCodecContext->width, 2);
    int buff_h = IJKALIGN(pVideo->pCodecContext->height, 2);

    ANativeWindow_setBuffersGeometry(pNativeWindow, buff_w, buff_h,
                                     WINDOW_FORMAT_RGBA_8888);
    ANativeWindow_Buffer outBuffer;
    AVPacket *pPacket = nullptr;
    AVFrame *pFrame = av_frame_alloc();
    while (pVideo->pPlayerStatus != nullptr && !pVideo->pPlayerStatus->isExit) {
        pPacket = pVideo->pPackQueue->pop();
        if (pPacket == nullptr) {
            continue;
        }
        int codecSendPacketRes = avcodec_send_packet(pVideo->pCodecContext, pPacket);
        if (codecSendPacketRes == 0) {
            int codecReceiveFrameRes = avcodec_receive_frame(pVideo->pCodecContext, pFrame);
            if (codecReceiveFrameRes == 0) {
                pFrame->sample_aspect_ratio = av_guess_sample_aspect_ratio(pVideo->pFormatContext,
                                                                           pVideo->pStreams,
                                                                           pFrame);
                sws_scale(pVideo->pSwsContext, pFrame->data, pFrame->linesize, 0,
                          pFrame->height, pVideo->pRGBAFrame->data,
                          pVideo->pRGBAFrame->linesize);
                double frameSleepTime = pVideo->getFrameSleepTime(pFrame);
                auto sleep = (unsigned) unsigned(frameSleepTime * 1000000);
                av_usleep(sleep);
                ANativeWindow_lock(pNativeWindow, &outBuffer, nullptr);
//                memcpy(outBuffer.bits, pVideo->pFrameBuffer, pVideo->frameSize);
                android_render_rgb_on_rgb(&outBuffer, pFrame->height,
                                          pVideo->pRGBAFrame->linesize[0], pVideo->pFrameBuffer,
                                          32);
                ANativeWindow_unlockAndPost(pNativeWindow);
            }
        }
        av_packet_unref(pPacket);
        av_frame_unref(pFrame);
    }
    av_packet_free(&pPacket);
    av_frame_free(&pFrame);
    return nullptr;
}

void Video::pause() {

}

void Video::play() {
    pthread_create(&playThreadT, nullptr, threadVideoPlay, this);
}

void Video::seek(uint64_t second) {

}

void Video::analysisStreamInner(ThreadMode threadMode, AVFormatContext *formatContext) {
    this->pFormatContext = formatContext;
    pJniCall->callVideoInfo(threadMode, pCodecContext->width, pCodecContext->height);
    pSwsContext = sws_getContext(pCodecContext->width, pCodecContext->height,
                                 pCodecContext->pix_fmt,
                                 pCodecContext->width, pCodecContext->height, AV_PIX_FMT_RGBA,
                                 SWS_BILINEAR, nullptr, nullptr, nullptr);
    pRGBAFrame = av_frame_alloc();
    frameSize = av_image_get_buffer_size(AV_PIX_FMT_RGBA,
                                         pCodecContext->width,
                                         pCodecContext->height,
                                         1);
    pFrameBuffer = (uint8_t *) malloc(frameSize);
    av_image_fill_arrays(pRGBAFrame->data, pRGBAFrame->linesize, pFrameBuffer, AV_PIX_FMT_RGBA,
                         pCodecContext->width, pCodecContext->height, 1);
    this->pStreams = pFormatContext->streams[streamIndex];
    int num = this->pStreams->avg_frame_rate.num;
    int den = this->pStreams->avg_frame_rate.den;
    if (num != 0 && den != 0) {
        defaultDelayTime = 1.0 * den / num;
        LOGE("%d, %d, %f", num, den, defaultDelayTime);
    }
}

void Video::setNativeSurface(jobject surface) {
    this->jSurface = pJniCall->jniEnv->NewGlobalRef(surface);
}

double Video::getFrameSleepTime(AVFrame *pFrame) {
    double times = av_frame_get_best_effort_timestamp(pFrame) * av_q2d(timeBase);
    if (times > currentTime) {
        currentTime = times;
    }
    double diffTime = pAudio->currentTime - currentTime;
    if (diffTime > 0.016 || diffTime < -0.016) {
        if (diffTime > 0.016) {
            delayTime = delayTime * 2 / 3;
        } else if (diffTime < -0.016) {
            delayTime = delayTime * 3 / 2;
        }

        if (delayTime < defaultDelayTime / 2) {
            delayTime = defaultDelayTime * 2 / 3;
        } else if (delayTime > defaultDelayTime * 2) {
            delayTime = defaultDelayTime * 3 / 2;
        }
    }
    if (diffTime >= 0.25) {
        delayTime = 0;
    } else if (diffTime <= -0.25) {
        delayTime = defaultDelayTime * 2;
    }
//    LOGE("times:%lf, audio currentTime:%lf, frameSleepTime:%lf",
//         currentTime, pAudio->currentTime, delayTime);
    return delayTime;
}
