
#include "codec_config.h"
#include "date_filter.h"
#include <cstdint>
#include <cstdio>
#include <cstdlib>
#include <cstring>
#include <string>
#include <strings.h>
#include <thread>
#include <unistd.h>
#include "base/input_output_context.h"
#include "base/base_util.h"
#include "base/message_queue.h"
#include "video_debug/video_audio_info.h"

AVFilterContext * buffer_filter_context = nullptr;
AVFilterContext * buffer_sink_filter_context = nullptr;
AVFilterGraph   * filter_graph = nullptr;


struct MessageQueueObject
{
	PacketMessageQueue read_packet_queue;
	FrameMessageQueue  decoder_frame_queue;

	FrameMessageQueue  filter_frame_queue;
	PacketMessageQueue encoder_packet_queue;
};

MessageQueueObject message_queue_obj;

void ReadFrameThread(InputFileData * input_data, PacketMessageQueue & message_queue)
{
	ReadFrameToQueue(input_data->m_format_ctx, input_data->m_read_packet, message_queue);
}

void InitFilter(InputFileData * input_data, OutputFileData * output_data)
{
	char args[512] = {0};
	
	const AVFilter * buffer_src  = nullptr;
	const AVFilter * buffer_sink = nullptr;

	AVFilterInOut * inputs  = avfilter_inout_alloc();
	AVFilterInOut * outputs = avfilter_inout_alloc(); 

	buffer_filter_context = nullptr;
	buffer_sink_filter_context = nullptr;
	filter_graph = avfilter_graph_alloc();

	buffer_src  = avfilter_get_by_name("buffer");
	buffer_sink = avfilter_get_by_name("buffersink");


	// snprintf(args, sizeof(args) - 1, "video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d", 
	// 	1280, 720, 
	// 	AV_PIX_FMT_YUV420P, 1, 24000,
	// 	1, 1); 
	snprintf(args, sizeof(args) - 1, "video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:frame_rate=%d/%d", 
		input_data->m_video_decoder_ctx->width, input_data->m_video_decoder_ctx->height, 
		input_data->m_video_decoder_ctx->pix_fmt, input_data->m_video_decoder_ctx->pkt_timebase.num, input_data->m_video_decoder_ctx->pkt_timebase.den,
		25, 1); 

	printf("args parameter : %s.\n", args);

	int ret = avfilter_graph_create_filter(&buffer_filter_context, buffer_src, "in", args, nullptr, filter_graph);
	if (ret >= 0)
	{
		ret = avfilter_graph_create_filter(&buffer_sink_filter_context, buffer_sink, "out", nullptr, nullptr, filter_graph);
		if (ret >= 0)
		{
			// enum AVPixelFormat pix_fmts = {output_data->m_video_encoder_ctx->pix_fmt};

			// ret = av_opt_set_bin(buffer_sink_filter_context, "pix_fmts", (unsigned char const*)&pix_fmts, sizeof(pix_fmts), AV_OPT_SEARCH_CHILDREN);

			av_opt_set_int_list(buffer_sink_filter_context, "pix_fmts", output_data->m_video_encoder_ctx->codec->pix_fmts, AV_PIX_FMT_NONE, AV_OPT_SEARCH_CHILDREN);

			if (output_data->m_video_encoder_ctx->hw_device_ctx)
			{
				buffer_sink_filter_context->hw_device_ctx = av_buffer_ref(output_data->m_video_encoder_ctx->hw_device_ctx);
			}
			// std::string filter_content = "zscale=640:480";
			std::string filter_content = "drawtext=fontsize=56:fontcolor=red:text='hello,world'";
			// std::string filter_content = "scale=640x480";

			{

				outputs->name = av_strdup("in");
				outputs->filter_ctx = buffer_filter_context;
				outputs->pad_idx = 0;
				outputs->next    = nullptr;
			}

			inputs->name = av_strdup("out");
			inputs->filter_ctx = buffer_sink_filter_context;
			inputs->pad_idx = 0;
			inputs->next    = nullptr;

			ret = avfilter_graph_parse_ptr(filter_graph, filter_content.c_str(), &inputs, &outputs, nullptr);

			if (ret >= 0)
			{
				ret = avfilter_graph_config(filter_graph, nullptr);
				if (ret < 0)
				{
					PrintLog(ret);
					exit(-1);
				}
			}
			else
			{
				printf("AVERROR.\n");
			}

		}
		// avfilter_link(AVFilterContext *src, unsigned int srcpad, AVFilterContext *dst, unsigned int dstpad)
	}
	else
	{
		PrintLog(ret);
		exit(1);
	}
	// avfilter_inout_free(&inputs);
	// avfilter_inout_free(&outputs);
}

void FilterProcess(InputFileData * input_data, AVCodecContext * codec_ctx, AVFrame * frame, DateFilter & date_filter)
{
	// int result = av_buffersrc_add_frame_flags(buffer_filter_context, frame, AV_BUFFERSRC_FLAG_KEEP_REF);
	int result = av_buffersrc_add_frame_flags(date_filter.m_input_filter_ctx, frame, AV_BUFFERSRC_FLAG_KEEP_REF);
	if (result < 0)
	{
		PrintLog(result);
	}
	else 
	{
		while (1)
		{
			AVFrame * result_frame = av_frame_alloc();
			// result = av_buffersink_get_frame(buffer_sink_filter_context, result_frame);
			result = av_buffersink_get_frame(date_filter.m_output_filter_ctx, result_frame);
			if (result < 0)
			{
				av_frame_unref(result_frame);
				av_frame_free(&result_frame);
				if (result == AVERROR(EAGAIN) || result == AVERROR_EOF)
				{
					break;
				}
				PrintLog(result);
				break;
			}
			// result_frame->pict_type = AV_PICTURE_TYPE_NONE;
			// PrintFrame(input_data->m_format_ctx, input_data->m_video_stream_index, result_frame);
			// av_frame_unref(result_frame);
			// av_frame_free(&result_frame);
			message_queue_obj.filter_frame_queue.PushFrame(&result_frame);
		}
	}
}

void EncoderFilterFrameData(OutputFileData * output_data, FrameMessageQueue & message_queue)
{
	for (;true;)
	{
		AVFrame * frame = nullptr;
		message_queue.PopFrameNonBlocking(&frame);
		if (!frame)
		{
			break;
		}
		int result = avcodec_send_frame(output_data->m_video_encoder_ctx, frame);
		if (result < 0)
		{
			if (result == AVERROR_EOF || result == AVERROR(EAGAIN))
			{
			}
			PrintLog(result);
			break;
		}
		else 
		{
			AVPacket * packet = av_packet_alloc();
			while (true)
			{
				result = avcodec_receive_packet(output_data->m_video_encoder_ctx, packet);
				if (result < 0)
				{
					av_packet_free(&packet);
					if (result == AVERROR_EOF || result == AVERROR(EAGAIN))
					{	
						break;
					}
					PrintLog(result);
					break;
				}
				av_interleaved_write_frame(output_data->m_output_fmt_ctx, packet);
				// PrintPacket(output_data->m_output_fmt_ctx, packet);
				av_packet_unref(packet);
			}
			av_packet_free(&packet);
		}
		av_frame_unref(frame);
		av_frame_free(&frame);
	}
}

void DecoderThread(InputFileData * input_data, 
				   OutputFileData * output_data, 
				   PacketMessageQueue & packet_message_queue, 
				   FrameMessageQueue  & frame_message_queue,
				   DateFilter         & date_filter)
{
	// ReProcessFrameThread(input_data, output_data);
	// int64_t duration = input_data->m_format_ctx->duration;
	int64_t now_time = av_gettime();
	int count = 0;
	while (true)
	{
		AVPacket * packet = nullptr;
		int result = packet_message_queue.PopPacketNonBlocking(&packet);
		if (result < 0)
		{
			if (result == AVERROR(EAGAIN))
			{
				usleep(1000);
				continue;
			}
			break;
		}
		if (packet->stream_index == input_data->m_audio_stream_index)
		{

		}
		else
		{
			// 
			int64_t pts_time = packet->duration * av_q2d(input_data->m_video_decoder_ctx->pkt_timebase) * AV_TIME_BASE * count++;
			pts_time = now_time + pts_time;
			int64_t now = av_gettime();
			if (now > pts_time)
			{

			}
			else
			{
				usleep(pts_time - now);
			}
		}
		if (packet->stream_index == input_data->m_audio_stream_index)
		{
			av_interleaved_write_frame(output_data->m_output_fmt_ctx, packet);
		}
		else if (packet->stream_index == input_data->m_video_stream_index)
		{
			DecoderPacketData(input_data->m_video_decoder_ctx, packet, frame_message_queue);
		}
		int size = frame_message_queue.GetSize();
		for (int index = 0; index < size; index++)
		{
			AVFrame * frame = nullptr;
			result = frame_message_queue.PopFrameNonBlocking(&frame);
			if (result < 0)
			{
				if (result ==  AVERROR(EAGAIN))
				{
					usleep(1000);
					continue;
				}
				break;
			}
			if (packet->stream_index == input_data->m_video_stream_index)
			{
				FilterProcess(input_data, input_data->m_video_decoder_ctx, frame, date_filter);
				EncoderFilterFrameData(output_data, message_queue_obj.filter_frame_queue);
			}			
			else
			{
				
			}
			av_frame_unref(frame);
			av_frame_free(&frame);
		}
		av_packet_unref(packet);
		av_packet_free(&packet);
	}
	for (unsigned int index = 0; index < input_data->m_format_ctx->nb_streams; index++)
	{
		if (input_data->m_format_ctx->streams[index]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
		{
			// DecoderPacketData(input_data->m_video_decoder_ctx, nullptr, frame_message_queue);
		}
		else 
		{
			DecoderPacketData(input_data->m_audio_decoder_ctx, nullptr, frame_message_queue);
		}
		int size = frame_message_queue.GetSize();
		for (int i = 0; i < size; i++)
		{
			AVFrame * frame = nullptr;
			int result = frame_message_queue.PopFrameNonBlocking(&frame);
			if (result < 0)
			{
				if (result ==  AVERROR(EAGAIN))
				{
					usleep(1000);
					continue;
				}
				break;
			}
			if (input_data->m_format_ctx->streams[index]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
			{
				FilterProcess(input_data, input_data->m_video_decoder_ctx, frame, date_filter);
				EncoderFilterFrameData(output_data, message_queue_obj.filter_frame_queue);
			}
			else
			{

			}
			av_frame_unref(frame);
			av_frame_free(&frame);
		}
	}
	FilterProcess(input_data, input_data->m_video_decoder_ctx, nullptr, date_filter);
	EncoderFilterFrameData(output_data, message_queue_obj.filter_frame_queue);
	// audio process
}

int main(int argc, const char * argv[])
{
	if (argc < 3)
	{
		printf("need input media file and output media file.\n");
		exit(1);
	}

	printf("input file : %s and output file : %s.\n", argv[1], argv[2]);

	std::string file = argv[1];
	// av_log_set_level(AV_LOG_DEBUG);
	InputFileData * input_data = new InputFileData();
	int result = input_data->OpenFile(file);
	result = !result || input_data->InitDecoderCtx();
	if (!result)
	{
		delete input_data;
		input_data = nullptr;
		return EXIT_FAILURE;
	}
	OutputFileData * output_data = new OutputFileData();
	CodecConfig codec_config;

	codec_config.SetCodecName("h264_qsv");
	codec_config.SetFormatAndSwFormat(AV_PIX_FMT_NV12, AV_PIX_FMT_YUV420P);

	std::string output_file = argv[2];

	result = output_data->InitOutputFmtCtx(output_file, nullptr);
	result = !result || output_data->CreateVideoNewStream(input_data, "", &codec_config);
	result = !result || output_data->CreateAudioNewStream(input_data);
	if (!result)
	{
		delete output_data;
		delete input_data;
		output_data = nullptr;
		input_data  = nullptr;
		return EXIT_FAILURE;
	}
	output_data->WriterHeader();
	// InitFilter(input_data, output_data);
	message_queue_obj.read_packet_queue.InitMessageQueue(120);
	message_queue_obj.decoder_frame_queue.InitMessageQueue(100);
	message_queue_obj.filter_frame_queue.InitMessageQueue(120);
	message_queue_obj.encoder_packet_queue.InitMessageQueue(120);

	DateFilter date_filter;
	CodecConfig filter_config;
	filter_config.SetWidthHeight(input_data->m_video_decoder_ctx->width, input_data->m_video_decoder_ctx->height);
	filter_config.SetTimebase(input_data->m_video_decoder_ctx->pkt_timebase);
	filter_config.SetFormatAndSwFormat(AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE);

	date_filter.CodeConfig(filter_config);

	DateDrawTextConfig config;
	config.m_font_size = 30;
	config.m_x_position = 0;
	config.m_y_position = 0;

	if (!date_filter.InitDateFilter(config))
	{
		exit(-1);
	}

	auto thd = std::thread([&input_data](){

		ReadFrameThread(input_data, message_queue_obj.read_packet_queue);
	});
	DecoderThread(input_data, output_data, message_queue_obj.read_packet_queue, message_queue_obj.decoder_frame_queue, date_filter);
	if (thd.joinable())
	{
		thd.join();
	}

	av_write_trailer(output_data->m_output_fmt_ctx);

	delete input_data;
	input_data = nullptr;

	delete output_data;
	output_data = nullptr;

	return EXIT_SUCCESS;
}

