//
// Created by hjie on 23-12-9.
//

#include "format_template.h"
#include "base_util.h"
#include "operate_define.h"
#include "codec_config.h"
#include "message_queue.h"
#include <cstdio>

int NullSrcAndOverlayInit(NullSrcAndOverlay &data)
{
	const AVFilter * filter = nullptr;
	int result = 0;
	{
		filter = avfilter_get_by_name("nullsrc");
		AVFilterContext * nullsrc_ctx = nullptr;
		result = avfilter_graph_create_filter(&nullsrc_ctx, filter, "nullsrc_0", "s=1920x1080:d=55:r=25/1", nullptr, data.m_graph);
		if (result < 0)
		{
			PrintLog(result);
		}
		else
		{
			AVFilterContext * drawbox_ctx = nullptr;
			{
				// fill bg color
				filter = avfilter_get_by_name("drawbox");
				result = avfilter_graph_create_filter(&drawbox_ctx, filter, "drawbox_0", "x=0:y=0:w=1920:h=1080:color=black@1.0:t=fill", nullptr, data.m_graph);

				avfilter_link(nullsrc_ctx, 0, drawbox_ctx, 0);
			}
			filter = avfilter_get_by_name("overlay");
			AVFilterContext * overlay_ctx = nullptr;
			result = avfilter_graph_create_filter(&overlay_ctx, filter, "overlay_0", "format=yuv420:x=0:y=270", nullptr, data.m_graph);
			if (result < 0)
			{
				PrintLog(result);
			}
			else
			{
				result = avfilter_link(drawbox_ctx, 0, overlay_ctx, 0);
				if (result < 0)
				{
					PrintLog(result);
				}

				{
					// buffer
					filter = avfilter_get_by_name("buffer");
					AVFilterContext * buffer_ctx = nullptr;
					char args[1024] = {0};
					AVStream * stream = data.m_input_ctx.VideoMediaStream()->AvStream();
                    int format = data.m_input_ctx.VideoDecoder()->AvCodecCtx()->pix_fmt;
					snprintf(args, 1000, "video_size=960x540:pix_fmt=%d:time_base=%d/%d:frame_rate=%d/%d", format,stream->time_base.num, stream->time_base.den, stream->r_frame_rate.den, stream->r_frame_rate.num);

					PrintLogMsg(args);

					result = avfilter_graph_create_filter(&buffer_ctx, filter, "in1", args, nullptr, data.m_graph);
					if (result < 0)
					{
						PrintLog(result);
						return -1;
					}
					else
					{
						avfilter_link(buffer_ctx, 0, overlay_ctx, 1);
						data.m_buffer_ctx = buffer_ctx;
					}
				}
				// {
					filter = avfilter_get_by_name("overlay");
					AVFilterContext * overlay_ctx_1 = nullptr;
					result = avfilter_graph_create_filter(&overlay_ctx_1, filter, "overlay_2", "format=yuv420:x=960:y=270", nullptr, data.m_graph);
					if (result < 0)
					{
						PrintLog(result);
						return -1;
					}
					result = avfilter_link(overlay_ctx, 0, overlay_ctx_1, 0);
					if (result < 0)
					{
						PrintLog(result);
						return -1;
					}
				// }
				{
					// buffer
					filter = avfilter_get_by_name("buffer");
					AVFilterContext * buffer_ctx = nullptr;
					char args[1024] = {0};
                    int format = data.m_input_ctx_1.VideoDecoder()->AvCodecCtx()->pix_fmt;
                    AVStream * stream = data.m_input_ctx_1.VideoMediaStream()->AvStream();
					snprintf(args, 1000, "video_size=960x540:pix_fmt=%d:time_base=%d/%d:frame_rate=%d/%d", format, stream->time_base.num, stream->time_base.den, stream->r_frame_rate.den, stream->r_frame_rate.num);

					PrintLogMsg(args);

					result = avfilter_graph_create_filter(&buffer_ctx, filter, "in2", args, nullptr, data.m_graph);
					if (result < 0)
					{
						PrintLog(result);
						return -1;
					}
					else
					{
						avfilter_link(buffer_ctx, 0, overlay_ctx_1, 1);
						data.m_buffer_ctx_1 = buffer_ctx;
					}
				}
				AVFilterContext * format_ctx = nullptr;
				{
					filter = avfilter_get_by_name("format");
					std::string format_content = "";
					format_content = av_get_pix_fmt_name(AV_PIX_FMT_NV12);
					avfilter_graph_create_filter(&format_ctx, filter, "format", format_content.c_str(), nullptr, data.m_graph);
					avfilter_link(overlay_ctx_1, 0, format_ctx, 0);
				}
				// if (overlay_ctx_1->pix_fmt)
				{
					printf("FilterFormatsState : %d.\n", overlay_ctx_1->filter->formats_state);
				}

				filter = avfilter_get_by_name("buffersink");
				AVFilterContext * buffersink_filter = nullptr;
				result = avfilter_graph_create_filter(&buffersink_filter, filter, "out", nullptr, nullptr, data.m_graph);
				if (result < 0)
				{
					PrintLog(result);
				}
				else
				{
					result = avfilter_link(format_ctx, 0, buffersink_filter, 0);
				}
				if (result >= 0)
				{
					result = avfilter_graph_config(data.m_graph, nullptr);
					if (result < 0)
					{
						PrintLog(result);
					}
					else
					{
						data.m_nullsrc_ctx = nullsrc_ctx;
						data.m_overlay_ctx = overlay_ctx;
						data.m_buffer_sink_ctx = buffersink_filter;
						char * ptr = avfilter_graph_dump(data.m_graph, nullptr);
						if (ptr)
						{
							printf("%s.\n", ptr);
							av_free(ptr);
						}
					}
				}
			}
		}
	}
	return result;
}

int InitInputStream(NullSrcAndOverlay &data)
{
	data.m_input_ctx.OpenUrlWithFindStreamInfo("/home/hjie/source/study/FFmpeg/fftools/cmake-build-debug/test.mp4");
	if (data.m_input_ctx.VideoMediaStream())
	{
		CodecConfig decoder_config;
        decoder_config.m_video_config.m_hw_name = "qsv";
        data.m_input_ctx.CreateVideoDecoderCtx("", decoder_config);
	}
    data.m_input_ctx_1.OpenUrlWithFindStreamInfo("/home/hjie/download/filesample/50_s.mp4");
    if (data.m_input_ctx_1.VideoMediaStream())
    {
        CodecConfig decoder_config;
        decoder_config.m_video_config.m_hw_name = "qsv";
        data.m_input_ctx_1.CreateVideoDecoderCtx("", decoder_config);
    }
	return 0;
}

int FilterProcessFrameInput0(NullSrcAndOverlay & data, AVFrame * frame, FrameMessageQueue & message_queue)
{
	int result = av_buffersrc_add_frame_flags(data.m_buffer_ctx, frame, AV_BUFFERSRC_FLAG_KEEP_REF);
	if (result < 0)
	{
		PrintLog(result);
	}
	while (result >= 0)
	{
		AVFrame * out = av_frame_alloc();
		result = av_buffersink_get_frame(data.m_buffer_sink_ctx, out);
		if (result < 0)
		{
			if (result == AVERROR(EAGAIN) || result == AVERROR_EOF)
			{
				result = 0;
			}
			else
			{
				PrintLog(result);
			}
			av_frame_free(&out);
			break;
		}
		else
		{
			message_queue.PushFrame(&out);
		}
	}
	return result;
}

int FilterProcessFrameInput1(NullSrcAndOverlay & data, AVFrame * frame, FrameMessageQueue & message_queue)
{
	int result = av_buffersrc_add_frame_flags(data.m_buffer_ctx_1, frame, AV_BUFFERSRC_FLAG_KEEP_REF);
	if (result < 0)
	{
		PrintLog(result);
	}
	while (result >= 0)
	{
		AVFrame * out = av_frame_alloc();
		result = av_buffersink_get_frame(data.m_buffer_sink_ctx, out);
		if (result < 0)
		{
			if (result == AVERROR(EAGAIN) || result == AVERROR_EOF)
			{
				result = 0;
			}
			else
			{
				PrintLog(result);
			}
			av_frame_free(&out);
			break;
		}
		else
		{
			message_queue.PushFrame(&out);
		}
	}
	return result;
}

int FilterProcessFrame(NullSrcAndOverlay & data, AVFilterContext * buffer, AVFrame * frame, FrameMessageQueue & message_queue)
{
    int result = av_buffersrc_add_frame_flags(buffer, frame, AV_BUFFERSRC_FLAG_KEEP_REF);
    if (result < 0)
    {
        PrintLog(result);
    }
    while (result >= 0)
    {
        AVFrame * out = av_frame_alloc();
        result = av_buffersink_get_frame(data.m_buffer_sink_ctx, out);
        if (result < 0)
        {
            if (result == AVERROR(EAGAIN) || result == AVERROR_EOF)
            {
                result = 0;
            }
            else
            {
                PrintLog(result);
            }
            av_frame_free(&out);
            break;
        }
        else
        {
            message_queue.PushFrame(&out);
        }
    }
    return result;
}

int InitOutputData(NullSrcAndOverlay &data, AVFrame *frame)
{
	data.m_output_ctx.CreateUrl("./test.mp4");
	CodecConfig codec_config;
	codec_config.m_video_config.m_height = 1080;
	codec_config.m_video_config.m_width  = 1920;
	codec_config.m_video_config.m_frame   = frame;
    codec_config.m_video_config.m_hw_name = "qsv";

	codec_config.m_video_config.m_config_callback = [&data](AVCodecContext * codec_ctx){

        codec_ctx->width   = av_buffersink_get_w(data.m_buffer_sink_ctx);
        codec_ctx->height  = av_buffersink_get_h(data.m_buffer_sink_ctx);

        codec_ctx->sample_aspect_ratio = av_buffersink_get_sample_aspect_ratio(data.m_buffer_sink_ctx);
        // codec_ctx->profile = 66;
		codec_ctx->time_base    = av_inv_q(av_buffersink_get_frame_rate(data.m_buffer_sink_ctx));
		codec_ctx->framerate    = av_buffersink_get_frame_rate(data.m_buffer_sink_ctx);
        codec_ctx->pix_fmt      = (AVPixelFormat)av_buffersink_get_format(data.m_buffer_sink_ctx);
        printf("codec ctx format : %d.\n", codec_ctx->pix_fmt);
        // codec_ctx->gop_size     = 250;
        // codec_ctx->keyint_min   = 25;
        // codec_ctx->refs         = 3;
        // codec_ctx->max_b_frames = 3;
        // codec_ctx->bit_rate     = 0;
        codec_ctx->level          = 40;
        // codec_ctx->flags         |= AV_CODEC_FLAG_QSCALE;
        // codec_ctx->global_quality = 1;
        // MFX_RATECONTROL_CBR cbr codec for qsv
        codec_ctx->bit_rate = codec_ctx->rc_max_rate = 1500 * 1000;

        printf("timebase : %d/%d \t frame : %d/%d.\n", codec_ctx->time_base.num, codec_ctx->time_base.den, codec_ctx->framerate.num, codec_ctx->framerate.den);
	};
	data.m_output_ctx.CreateVideoStream(data.m_input_ctx.VideoMediaStream(), codec_config);
	if (!codec_config.m_video_config.m_hw_name.empty())
	{
		data.m_output_ctx.CreateVideoStreamAndInitHwFrameCtx(data.m_input_ctx.VideoMediaStream());
	}
	return 0;
}