//
// Created by RandBII on 2020/12/27.
//

#include "VideoDecoder.h"

VideoDecoder::VideoDecoder(char *url) {
    Init(url, AVMEDIA_TYPE_VIDEO);
}

void VideoDecoder::OnDecoderReady() {

    m_videoWidth = GetCodecContext()->width;
    m_videoHeight = GetCodecContext()->height;
    if (m_MsgContext && m_MessageCallback) m_MessageCallback(m_MsgContext, MSG_DECODER_READY, 0);

    if (m_VideoRender) {
        int dstSize[2] = {0};
        m_VideoRender->Init(m_videoWidth, m_videoHeight, dstSize);
        m_RenderWidth = dstSize[0];
        m_RenderHeight = dstSize[1];
        m_RGBAFrame = av_frame_alloc();
        int bufferSize = av_image_get_buffer_size(DST_PIXEL_FORMAT, m_RenderWidth, m_RenderHeight,
                                                  1);
        m_FrameBuffer = static_cast<uint8_t *>(av_malloc(bufferSize * sizeof(uint8_t)));
        av_image_fill_arrays(m_RGBAFrame->data,
                             m_RGBAFrame->linesize,
                             m_FrameBuffer,
                             DST_PIXEL_FORMAT,
                             m_RenderWidth,
                             m_RenderHeight,
                             1);

        m_SwsContext = sws_getContext(m_videoWidth, m_videoHeight,
                                      GetCodecContext()->pix_fmt, m_RenderWidth, m_RenderHeight,
                                      DST_PIXEL_FORMAT, SWS_FAST_BILINEAR,
                                      NULL, NULL, NULL);
    } else {
        LOG_E("----VideoDecoder-OnDecoderReady--m_VideoRender is Null");
    }

}


void VideoDecoder::OnFrameAvailable(AVFrame *avFrame) {
    if (m_VideoRender == nullptr || avFrame == nullptr) return;
// 渲染器的类型
    NativeImage image;
    if (m_VideoRender->GetRenderType() == VIDEO_RENDER_ANWINDOW) {

        sws_scale(m_SwsContext, avFrame->data, avFrame->linesize, 0, m_videoHeight,
                  m_RGBAFrame->data, m_RGBAFrame->linesize);
        image.format = IMAGE_FORMAT_RGBA;
        image.width = m_RenderWidth;
        image.height = m_RenderHeight;
        image.ppPlane[0] = m_RGBAFrame->data[0];
    } else {
        switch (GetCodecContext()->pix_fmt) {
            case AV_PIX_FMT_YUV420P:
            case AV_PIX_FMT_YUVJ420P:
                image.format = IMAGE_FORMAT_I420;
                image.width = avFrame->width;
                image.height = avFrame->height;
                image.pLineSize[0] = avFrame->linesize[0];
                image.pLineSize[1] = avFrame->linesize[1];
                image.pLineSize[2] = avFrame->linesize[2];

                image.ppPlane[0] = avFrame->data[0];
                image.ppPlane[1] = avFrame->data[1];
                image.ppPlane[2] = avFrame->data[2];

                if (avFrame->data[0]
                    && avFrame->data[1]
                    && !avFrame->data[2]
                    && avFrame->linesize[0] == avFrame->linesize[1]
                    && avFrame->linesize[2] == 0) {
                    //兼容某些设备可能出现的格式不匹配问题
                    image.format = AV_PIX_FMT_NV12;
                }

                break;
            case AV_PIX_FMT_NV12:
                image.format = AV_PIX_FMT_NV12;
                image.width = avFrame->width;
                image.height = avFrame->height;
                image.pLineSize[0] = avFrame->linesize[0];
                image.pLineSize[1] = avFrame->linesize[1];

                image.ppPlane[0] = avFrame->data[0];
                image.ppPlane[1] = avFrame->data[1];

                break;
            case AV_PIX_FMT_NV21:
                image.format = AV_PIX_FMT_NV21;
                image.width = avFrame->width;
                image.height = avFrame->height;
                image.pLineSize[0] = avFrame->linesize[0];
                image.pLineSize[1] = avFrame->linesize[1];

                image.ppPlane[0] = avFrame->data[0];
                image.ppPlane[1] = avFrame->data[1];

                break;
            case AV_PIX_FMT_RGBA:
                image.format = AV_PIX_FMT_RGBA;
                image.width = avFrame->width;
                image.height = avFrame->height;
                image.pLineSize[0] = avFrame->linesize[0];
                image.ppPlane[0] = avFrame->data[0];
                break;
            default:
                sws_scale(m_SwsContext,
                          avFrame->data,
                          avFrame->linesize,
                          0,
                          m_videoHeight,
                          m_RGBAFrame->data,
                          m_RGBAFrame->linesize);
                image.format = AV_PIX_FMT_RGBA;
                image.width = m_RenderWidth;
                image.height = m_RenderHeight;
                image.ppPlane[0] = m_RGBAFrame->data[0];
                break;
        }
    }
    m_VideoRender->RenderVideoFrame(&image);
    if (m_MsgContext && m_MessageCallback) m_MessageCallback(m_MsgContext, MSG_REQUEST_RENDER, 0);
}

VideoDecoder::~VideoDecoder() {
    UnInit();
}

int VideoDecoder::GetVideoWidth() {
    return m_videoWidth;
}

int VideoDecoder::GetVideoHeight() {
    return m_videoHeight;
}

void VideoDecoder::SetVideoRender(VideoRender *videoRender) {
    this->m_VideoRender = videoRender;
}

long VideoDecoder::GetVideoDecoderForTimestampAVSync(void *context) {
    if (context) {
        auto *videoDecoder = static_cast<VideoDecoder *>(context);
        return videoDecoder->GetCurrentPosition();
    }
    return 0;
}

float VideoDecoder::GetCurrentPosition() {
    return 0;
}

void VideoDecoder::OnDecoderDone() {
    if (m_MsgContext && m_MessageCallback) m_MessageCallback(m_MsgContext, MSG_DECODER_DONE, 0);
    if (m_VideoRender) m_VideoRender->UnInit();
    if (!m_RGBAFrame) {
        free(m_RGBAFrame);
        m_RGBAFrame = nullptr;
    }
    if (!m_FrameBuffer) {
        free(m_FrameBuffer);
        m_FrameBuffer = nullptr;
    }
    if (!m_SwsContext) {
        free(m_SwsContext);
        m_SwsContext = nullptr;
    }
}



