//
// Created by shen on 2025/2/22.
//

#include "VideoPlayer.h"



void VideoPlayer::play() {
    if (ffstatus != nullptr && !ffstatus->exit) {
        m_Thread = std::thread(videoPlay, this);
    }
}

void VideoPlayer::OnFrameAvailable(AVFrame *frame) {
    if(getGLRender() != nullptr && frame != nullptr) {
        NativeImage image;
        if (avCodecConext->pix_fmt == AV_PIX_FMT_YUV420P
            || avCodecConext->pix_fmt == AV_PIX_FMT_YUVJ420P) {
            image.format = IMAGE_FORMAT_I420;
            image.width = frame->width;
            image.height = frame->height;
            image.pLineSize[0] = frame->linesize[0];
            image.pLineSize[1] = frame->linesize[1];
            image.pLineSize[2] = frame->linesize[2];
            image.ppPlane[0] = frame->data[0];
            image.ppPlane[1] = frame->data[1];
            image.ppPlane[2] = frame->data[2];
            if(frame->data[0] && frame->data[1] && !frame->data[2] && frame->linesize[0] == frame->linesize[1] && frame->linesize[2] == 0) {
                // on some android device, output of h264 mediacodec decoder is NV12 兼容某些设备可能出现的格式不匹配问题
                image.format = IMAGE_FORMAT_NV12;
            }
        } else if (avCodecConext->pix_fmt == AV_PIX_FMT_NV12) {
            image.format = IMAGE_FORMAT_NV12;
            image.width = frame->width;
            image.height = frame->height;
            image.pLineSize[0] = frame->linesize[0];
            image.pLineSize[1] = frame->linesize[1];
            image.ppPlane[0] = frame->data[0];
            image.ppPlane[1] = frame->data[1];
        } else if (avCodecConext->pix_fmt == AV_PIX_FMT_NV21) {
            image.format = IMAGE_FORMAT_NV21;
            image.width = frame->width;
            image.height = frame->height;
            image.pLineSize[0] = frame->linesize[0];
            image.pLineSize[1] = frame->linesize[1];
            image.ppPlane[0] = frame->data[0];
            image.ppPlane[1] = frame->data[1];
        } else if (avCodecConext->pix_fmt == AV_PIX_FMT_RGBA) {
            image.format = IMAGE_FORMAT_RGBA;
            image.width = frame->width;
            image.height = frame->height;
            image.pLineSize[0] = frame->linesize[0];
            image.ppPlane[0] = frame->data[0];
        } else {
//            sws_scale(m_SwsContext, frame->data, frame->linesize, 0,
//                      m_VideoHeight, m_RGBAFrame->data, m_RGBAFrame->linesize);
//            image.format = IMAGE_FORMAT_RGBA;
//            image.width = m_RenderWidth;
//            image.height = m_RenderHeight;
//            image.ppPlane[0] = m_RGBAFrame->data[0];
//            image.pLineSize[0] = image.width * 4;
        }
        SLog::GetInstance()->getLogger()->info("image.format = {}",image.format);
        getGLRender()->RenderVideoFrame(&image);
    }




}
struct timeval tv;
long startTime;

double VideoPlayer::getFrameDiffTime(AVFrame *avFrame, AVPacket *aPacket) {
    if (audioPlayer== nullptr||ffstatus->isFramePreview){
        return 0;
    }

    double pts;

    if (avFrame!= nullptr){
        pts=avFrame->best_effort_timestamp;
    }

    if (aPacket != nullptr){
        pts = aPacket->pts;
    }
    if (pts == AV_NOPTS_VALUE){
        pts = 0;
    }
    //transform to sec
    pts *= av_q2d(timeBase);
    if (pts > 0){
        lock  = pts;
    }
    double diff = audioPlayer->clock - lock;
    return diff;


}

double VideoPlayer::getDelayTime(double diff) {

    if (diff> 0.003) {
        ////如果视频比音频慢，就减少播放视频帧的间隔时间
        delayTime = delayTime * 2 / 3;

        if (delayTime < defaultDelayTime / 2) {
            delayTime = defaultDelayTime * 2 / 3;
        } else if (delayTime > defaultDelayTime * 2){
            delayTime = defaultDelayTime * 2;
        }
    } else if (diff< - 0.003){
        //如果视频比音频快，就增加视频帧的间隔时间
        delayTime = delayTime * 3 / 2;
        if(delayTime < defaultDelayTime / 2)
        {
            delayTime = defaultDelayTime * 2 / 3;
        }
        else if (delayTime > defaultDelayTime * 2)
        {
            delayTime = defaultDelayTime * 2;
        }
    }else if(diff == 0.003)
    {

    }

    if (diff>=0.5)
    {
        //视频太慢的就将视频帧的间隔时间设置为0
        delayTime=0;
    }
    else if (diff <= -0.5)
    {
        //视频太快了，就将视频帧的间隔时间设置为原来帧的间隔时间的2倍
        delayTime = defaultDelayTime * 2;
    }
    if(fabs(diff) >= 10)
    {
        delayTime = defaultDelayTime;
    }

    return delayTime;
}
void *VideoPlayer::videoPlay(void *arg) {

    auto *videoParam=static_cast<VideoPlayer *>(arg);

    int dstSize[2] = {0};
    videoParam->getGLRender()->Init(videoParam->avCodecConext->width,
                                   videoParam->avCodecConext->height,dstSize);
    while(videoParam->ffstatus != nullptr
          && !videoParam->ffstatus->exit) {


        if (videoParam->frameQueue->getQueueSize()==0){
            gettimeofday(&tv, nullptr);
            startTime = tv.tv_sec*1000 + tv.tv_usec/1000;
        }

        if (videoParam->ffstatus->isPause){
            av_usleep(1000 * 100);
            continue;
        }

        //loading
        if (!videoParam->ffstatus->isInited){
            videoParam->ffstatus->isInited=true;
            if (videoParam->queue->getQueueSize() == 0){
                av_usleep(1000 * 20);
                LOGCATI("loading");
                continue;
            }else{

            }
        }
        //如果是预览模式
        if (videoParam->ffstatus->isFramePreview){
            int queueSize = 0;
            queueSize = videoParam->frameQueue->getQueueSize();
            if (queueSize >= videoParam->cacheFrameNum){
                av_usleep(1000*10);
                continue;
            }
        }

        if (videoParam->ffstatus->isSeekPause &&
        videoParam->ffstatus->isBackSeekFramePreview){
            av_usleep(1000*10);
            continue;
        }
//        if(videoParam->ffstatus->isSeekPause &&
//                videoParam->ffstatus->isBackSeekFramePreview){
//            av_usleep(1000*1);
//            continue;
//        }

        AVPacket *avPacket = av_packet_alloc();

        if (!videoParam->ffstatus->isFramePreview &&
        videoParam->frameQueue->getQueueSize() > 0){

        }else{
            if (videoParam->queue->getAvPacket(avPacket) != 0){
                av_packet_free(&avPacket);
                av_free(avPacket);
                avPacket = nullptr;
                continue;
            }
        }

///  start to decode start lock

        std::lock_guard<std::mutex> lock(videoParam->codecMutex);
        if (!videoParam->ffstatus->isFramePreview
        && videoParam->frameQueue->getQueueSize() > 0){
//            videoParam->frameQueue->getAvFrame(avFrame);
        }else {

            //原始数据发送给avCodecConext
            if (avcodec_send_packet(videoParam->avCodecConext, avPacket) != 0) {
                av_packet_free(&avPacket);
                av_free(avPacket);
                avPacket = nullptr;
                continue;
            }
        }




        AVFrame *avFrame = av_frame_alloc();
        //soft decode
        if (!videoParam->ffstatus->isFramePreview && videoParam->frameQueue->getQueueSize()>0){
            videoParam->frameQueue->getAvFrame(avFrame);
        }else {
            //获取解码以后的数据
            if (avcodec_receive_frame(videoParam->avCodecConext, avFrame) != 0) {
                av_frame_free(&avFrame);
                av_free(avFrame);
                avFrame = nullptr;
                av_packet_free(&avPacket);
                av_free(avPacket);
                avPacket = nullptr;
                continue;
            }
        }



        if (avFrame->format == AV_PIX_FMT_YUV420P
            || avFrame->format == AV_PIX_FMT_YUVJ420P) {


            if (videoParam->ffstatus->isFramePreview){
                if (videoParam->ffstatus->isBackSeekFramePreview
                || !videoParam->ffstatus->isBackSeekFramePreview
                 && videoParam->ffstatus->isBackSeekForAdvance){
                    // now pts is less than seek time
                    if (avFrame->pts * av_q2d(videoParam->timeBase)*AV_TIME_BASE < videoParam->seekTime){
                        av_frame_free(&avFrame);
                        av_free(avFrame);
                        avFrame = nullptr;
                        av_packet_free(&avPacket);
                        av_free(avPacket);
                        avPacket = nullptr;
                        continue;
                    }
                    videoParam->ffstatus->isSeekPause= true;
                    videoParam->ffstatus->isShowSeekFrame= false;
                    gettimeofday(&tv, nullptr);
                    long endTime = tv.tv_sec*1000 + tv.tv_usec/1000;
//                    LOGE("seek 一帧frame 耗时：%ld" ,(endTime-startSeekTime));
                }else{
                    if (avFrame->linesize[0]>videoParam->avCodecConext->width && false){

                    }else{
                        videoParam ->ffstatus->isCrop = false;
                        avFrame->pts = avFrame->best_effort_timestamp;
                        if ((avFrame->pts *av_q2d( videoParam->timeBase) * AV_TIME_BASE)==0 && videoParam->seekTime == 0){

                            int width=avFrame->linesize[0]> videoParam->avCodecConext->width?videoParam->avCodecConext->width:avFrame->linesize[0];
                            //TODO Render
                            av_frame_free(&avFrame);
                            av_free(avFrame);
                            avFrame = NULL;

                        }else{
                            videoParam->frameQueue->putAvFrame(avFrame);
                        }
                    }


                    videoParam->ffstatus->isShowSeekFrame=true;

                    if (videoParam->frameQueue->getQueueSize()==videoParam->cacheFrameNum){
                        gettimeofday(&tv, nullptr);
                        long endTime=tv.tv_sec*1000+tv.tv_usec/1000;
//                        videoParam-
                        LOGCATI("软解码60帧耗时：%ld" ,(endTime-startTime));
                    }
                    av_packet_free(&avPacket);
                    av_free(avPacket);
                    avPacket = nullptr;
                    continue;
                }
            }


            if (!videoParam->ffstatus->isFramePreview){
                double diff = videoParam->getFrameDiffTime(avFrame,avPacket);
                double delayTime=videoParam->getDelayTime(diff);
                av_usleep(delayTime * AV_TIME_BASE);
            }

            int width=avFrame->linesize[0]>videoParam->avCodecConext->width?videoParam->avCodecConext->width:avFrame->linesize[0];
//            if (avFrame->linesize[0] > videoParam->avCodecConext->width){
//                LOGCATI("avFrame->linesize[0] = {}",avFrame->linesize[0]);
//            }else{
//                LOGCATI("avFrame->linesize[0] = {}",avFrame->linesize[0]);
                //TODO render
//            }

            videoParam->OnFrameAvailable(avFrame);


            int64_t currentTime=avFrame->pts* av_q2d(videoParam->timeBase);
            if (!videoParam->ffstatus->isFramePreview){

            }

            if (videoParam->ffstatus->isBackSeekFramePreview&& videoParam->ffstatus->isBackSeekForAdvance) {
                double pts = currentTime / 1000000;
                if (videoParam->showFrameTimestamp >= (pts - 0.06) ||
                    videoParam->showFrameTimestamp <= pts) {
                    videoParam->ffstatus->isBackSeekForAdvance = false;
                }
            }

//            int nowTime =avFrame->pts* av_q2d(videoParam->timeBase)* AV_TIME_BASE;
//            SLog::GetInstance()->getLogger()->info("nowTime = {}",nowTime);
//
        }
        else{

            //解码后的数据
            AVFrame *avFrameYUV420P = av_frame_alloc();


            int size= av_image_get_buffer_size(AV_PIX_FMT_YUV420P,
                                               (videoParam->avCodecConext)->width,
                                               (videoParam->avCodecConext)->height,
                                               1);

            //分配内存
            uint8_t *buffer = static_cast<uint8_t *>(av_malloc(size * sizeof(uint8_t)));

            //初始化
            av_image_fill_arrays(
                    avFrameYUV420P->data,
                    avFrameYUV420P->linesize,
                    buffer,
                    AV_PIX_FMT_YUV420P,
                    (videoParam->avCodecConext)->width,
                    (videoParam->avCodecConext)->height,
                    1);


            //转换的上下文
            SwsContext *swsContext = sws_getContext(
                    (videoParam->avCodecConext)->width,
                    (videoParam->avCodecConext)->height,
                    (videoParam->avCodecConext)->pix_fmt,
                    (videoParam->avCodecConext)->width,
                    (videoParam->avCodecConext)->height,
                    AV_PIX_FMT_YUV420P,
                    SWS_FAST_BILINEAR, nullptr,nullptr,nullptr);


            //失败
            if (!swsContext){
                av_frame_free(&avFrameYUV420P);
                av_free(avFrameYUV420P);
                av_free(buffer);

                continue;
            }

            //格式转换
            sws_scale(
                    swsContext,
                    reinterpret_cast<const uint8_t *const *>(avFrame->data),
                    avFrame->linesize,
                    0,
                    avFrame->height,
                    avFrameYUV420P->data,
                    avFrameYUV420P->linesize);

            SLog::GetInstance()->getLogger()->info("transform ending render ");

            av_frame_free(&avFrameYUV420P);
            av_free(avFrameYUV420P);
            av_free(buffer);
            sws_freeContext(swsContext);

        }


    }

    return nullptr;
}

VideoPlayer::VideoPlayer(FFStatus *ffstatus) : BasePlayer(ffstatus) {
    queue=new SafeQueue(ffstatus);
    frameQueue=new AVFrameQueue(ffstatus);

}

VideoPlayer::~VideoPlayer() {
    delete queue;
    delete frameQueue;
    VideoGLRender::ReleaseInstance();
}


void VideoPlayer::setGLRender(VideoRender *baseGLRender) {
    this->videoRender = baseGLRender;
}

VideoRender *VideoPlayer::getGLRender() {
    return videoRender;
}

//VideoPlayer::VideoPlayer(FFStatus *ffstatus) : ffstatus(ffstatus) {
//
//    queue=new SafeQueue(ffstatus);
//    frameQueue=new AVFrameQueue(ffstatus);
//}

//VideoPlayer::~VideoPlayer() {
//
//    delete queue;
//
//    delete frameQueue;
//}



