//
// Created by linyongzhi on 2023/12/8.
//

#include "CPHH264Decoder.h"
#include "CPHBuffer.h"
#include "CPHBufferQueue.h"

int CPHH264Decoder::Init() {
    int ret = avformat_open_input(&pFormatCtx, "output.shu-bitrate-2.h264", NULL, NULL);
    if (ret != 0) {
        return -1;
    }

    if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
        return -1;
    }

    videoStream = -1;
    for (int i = 0; i < pFormatCtx->nb_streams; ++i) {
        if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoStream = i;
            break;
        }
    }

    if (videoStream == -1) {
        return -1;
    }

    pCodecParam = pFormatCtx->streams[videoStream]->codecpar;
    pCodec = avcodec_find_decoder(pCodecParam->codec_id);

    if (pCodec == NULL) {
        return -1;
    }

    pCodecCtx = avcodec_alloc_context3(pCodec);
    if (!pCodecCtx) {
        return -1;
    }

    if (avcodec_parameters_to_context(pCodecCtx, pCodecParam) < 0) {
        return -1;
    }

    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
        return -1;
    }
    pFrame = av_frame_alloc();
    pFrameYUV = av_frame_alloc();

    numBytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
    buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
    av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);

    sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BILINEAR, NULL, NULL, NULL);

    taskRunning = true;
    decodeThread = new (std::nothrow) std::thread(CPHH264Decoder::DecodeTaskEntry, this);
    if (decodeThread == nullptr) {
        return -1;
    }
    return 0;
}

int CPHH264Decoder::DecodeOneFrame() {
    if (av_read_frame(pFormatCtx, &packet) < 0) {
        return -1;
    }
    if (packet.stream_index != videoStream) {
        return -1;
    }
    if (avcodec_send_packet(pCodecCtx, &packet) != 0) {
        return -1;
    }
    if (avcodec_receive_frame(pCodecCtx, pFrame) != 0) {
        return -1;
    }
    sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
    av_packet_unref(&packet);
    return 0;
}

int CPHH264Decoder::DeInit() {
    taskRunning = false;

    if (decodeThread != nullptr) {
        decodeThread->join();
        delete decodeThread;
        decodeThread = nullptr;
    }
    av_free(buffer);
    av_frame_free(&pFrameYUV);
    av_frame_free(&pFrame);
    avcodec_close(pCodecCtx);
    avformat_close_input(&pFormatCtx);
    return 0;
}

void CPHH264Decoder::DecodeTaskEntry(CPHH264Decoder *decoder) {
    while (decoder->taskRunning) {
        if (decoder->DecodeOneFrame() != 0) {
            std::this_thread::sleep_for(std::chrono::milliseconds(1));
            continue;
        }
        int height = 1280;
        int YSize = decoder->pFrameYUV->linesize[0] * height;
        int USize = decoder->pFrameYUV->linesize[1] * height / 2;
        int VSize = decoder->pFrameYUV->linesize[2] * height / 2;
        int totalLen = YSize + USize + VSize;

        CPHBuffer yuvData;
        yuvData.Init(totalLen);
        yuvData.FillInData(decoder->pFrameYUV->data[0], totalLen);
        decoder->m_yuvQueue->pushData(yuvData);

        std::this_thread::sleep_for(std::chrono::milliseconds(20));
    }
}

void CPHH264Decoder::SetYuvQueue(CPHBufferQueue *yuvQueue) {
    m_yuvQueue = yuvQueue;
}