//
// Created by hjie on 23-12-28.
//

#include "format_translate.h"
#include "codec_config.h"
#include "message_queue.h"
#include <cstdint>
#include <cstdio>
#include "operate_define.h"

void QsvDecodeAndUseFilterTranslateFormat(const std::string & filename)
{
    InputFormatContext input_output_context;
    CodecConfig codec_config;

    int result = input_output_context.OpenUrlWithFindStreamInfo(filename);
    if (result)
    {
        FilterGraphContext filter_graph;
        filter_graph.InitGraph();
        auto stream_info = input_output_context.VideoMediaStream();
        VideoFilterParams params;
        params.m_format    = AV_PIX_FMT_NV12;
        params.m_framerate = stream_info->AvStream()->avg_frame_rate;
        params.m_timebase  = stream_info->AvStream()->time_base;
        params.m_width     = stream_info->AvStream()->codecpar->width;
        params.m_height    = stream_info->AvStream()->codecpar->height;

        if (!filter_graph.InitBufferCtxForVideo(params))
        {
            return;
        }
        AVFilterContext * format_ctx = nullptr;
        const AVFilter * filter = avfilter_get_by_name("format");
        int ret = avfilter_graph_create_filter(&format_ctx, filter, "format_fmt", "yuv420p", nullptr, filter_graph.GetFilterGraph());
        if (ret < 0)
        {
            PrintLogMsg(result);
        }
        else
        {
            avfilter_link(filter_graph.GetFilterBufferCtx(), 0, format_ctx, 0);
            filter_graph.InitBufferSinkForVideo();
            avfilter_link(format_ctx, 0, filter_graph.GetFilterBufferSinkCtx(), 0);
            filter_graph.ConfigGraph();
        }
        codec_config.m_video_config.m_hw_name = "qsv";
        result = input_output_context.CreateVideoDecoderCtx("", codec_config);
        if (result)
        {
            int64_t frame_count = 0;
            FrameMessageQueue packet_frame_operate;
            FrameMessageQueue filter_frame_message;
            packet_frame_operate.InitMessageQueue(1024);
            filter_frame_message.InitMessageQueue(1000);
            PrintLogMsg("create video decoder ctx success");
            ReadPacketDataWithCallback(&input_output_context, nullptr, [&packet_frame_operate,&frame_count,&filter_frame_message, &filter_graph](InputFormatContext * input_output_context, AVPacket * packet){

                if (packet->stream_index == input_output_context->VideoMediaStream()->AvStream()->index)
                    DecodePacketDataToMessageQueue(input_output_context->VideoDecoder(), packet, packet_frame_operate);
                ProcessFrameDataFromMessageQueue(packet_frame_operate, [&frame_count, &filter_frame_message, &filter_graph](AVFrame * frame){

                    // static int64_t frame_count = 0;
                    printf("new frame count : %ld and format : %d.\n", frame_count++, frame->format);
                    FilterFrame(&filter_graph, frame, filter_frame_message);
                    ProcessFrameDataFromMessageQueue(filter_frame_message, [](AVFrame * filter_frame) {

                        printf("filter frame format : %d.\n", filter_frame->format);

                    }, false);

                }, false);
                return true;
            });
            DecodePacketDataToMessageQueue(input_output_context.VideoDecoder(), nullptr, packet_frame_operate);
            ProcessFrameDataFromMessageQueue(packet_frame_operate, [&frame_count,&filter_frame_message, &filter_graph](AVFrame * frame){

                printf("new frame count : %ld and format : %d.\n", frame_count++, frame->format);
                FilterFrame(&filter_graph, frame, filter_frame_message);
                ProcessFrameDataFromMessageQueue(filter_frame_message, [](AVFrame * filter_frame) {

                    printf("filter frame format : %d.\n", filter_frame->format);

                }, false);

            }, false);
        }
        else
        {
            PrintLogMsg("create video decoder ctx failed");
        }
    }
    else
    {
        PrintLogMsg("open file failed");
    }
}