
#include <cstdio>
#include <cstdlib>
#include <string>
#include <cstring>
#include <thread>
#include <unistd.h>
#include "./base/base_util.h"
#include "./base/input_output_operate.h"
#include "./base/input_output_context.h"
#include "codec_config.h"
#include "message_queue.h"
#include "video_debug/video_audio_info.h"

struct MessageQueue
{
	PacketMessageQueue m_video_packet_queue;
	PacketMessageQueue m_audio_packet_queue;

	FrameMessageQueue  m_video_frame_queue;
	FrameMessageQueue  m_audio_frame_queue;
	PacketMessageQueue m_encoder_packet_queue;
};

SwsContext * sws_ctx = nullptr;
AVFrame * sws_frame = nullptr;

void ReadFrame(InputFileData * input_data, MessageQueue & message_queue)
{
	ReadFrameToQueue(input_data->m_format_ctx, input_data->m_read_packet, message_queue.m_video_packet_queue);
}

int main(int argc, const char * argv[])
{

	std::string input_file;
	if (argc < 2)
	{
		printf("need input file will use default file.\n");
		input_file = "/home/hjie/source/open_net/ffmpeg/resource/sample_1280x720.mp4"; 
	}
	else
	{
		input_file = argv[1];
	}
	InputFileData * input_data = new InputFileData();
	if (!input_data)
	{
		PrintLog("malloc InputFileData fail");
		exit(-1);
	}
	int result = input_data->OpenFile(input_file);
	if (!result)
	{
		PrintLog("open file fail");
		exit(-1);
	}
	result = input_data->InitDecoderCtx();
	if (!result)
	{
		PrintLog("init deocder fail");
		exit(-1);
	}
	OutputFileData * output_data = new OutputFileData();
	if (!output_data)
	{
		PrintLog("malloc OutputFileData fail");
		exit(-1);
	}
	result = output_data->InitOutputFmtCtx("./test.mp4", nullptr);
	if (!result)
	{
		PrintLog("init output format context");
		exit(-1);
	}
	CodecConfig codec_config;

	// codec_config.SetBitrate(3000 * 1000);
	// codec_config.SetCodecName("libx265");

	// use hevc qsv (need forward format)
	codec_config.SetCodecName("hevc_qsv");
	// codec_config.SetBitrate(3000 * 1000);
	codec_config.SetFormatAndSwFormat(AV_PIX_FMT_NV12, AV_PIX_FMT_YUV420P);

	result = output_data->CreateVideoNewStream(input_data, "", &codec_config);

	result = !result || output_data->CreateAudioNewStream(input_data);

	if (!result)
	{
		PrintLog("create video stream fail");
		exit(-1);
	}

	output_data->WriterHeader();

	MessageQueue message_queue;

	message_queue.m_video_packet_queue.InitMessageQueue(100);
	message_queue.m_audio_packet_queue.InitMessageQueue(100);

	message_queue.m_video_frame_queue.InitMessageQueue(100);
	message_queue.m_audio_frame_queue.InitMessageQueue(100);


	message_queue.m_encoder_packet_queue.InitMessageQueue(100);

	std::thread thd  = std::thread([input_data, &message_queue](){

		ReadFrame(input_data, message_queue);
	});
	sws_frame = av_frame_alloc();

	sws_frame->format = AV_PIX_FMT_NV12;
	sws_frame->width  = input_data->m_video_decoder_ctx->width;
	sws_frame->height = input_data->m_video_decoder_ctx->height;

	av_frame_get_buffer(sws_frame, 0);


    sws_ctx = sws_getContext(sws_frame->width, sws_frame->height, AV_PIX_FMT_YUV420P,
                             sws_frame->width, sws_frame->height, AV_PIX_FMT_NV12,
                             SWS_BILINEAR, NULL, NULL, NULL);

 

	AVPacket * packet = nullptr;
	AVFrame * frame   = nullptr;
	while (true)
	{
		packet = nullptr;
		result = message_queue.m_video_packet_queue.PopPacketNonBlocking(&packet);
		if (result < 0)
		{
			usleep(1000);
			if (result == AVERROR_EOF)
			{
				break;
			}
			continue;
		}
		bool is_video = false;
		if (packet->stream_index == input_data->m_audio_stream_index)
		{
			// DecoderPacketData(input_data->m_audio_decoder_ctx, packet, message_queue.m_audio_frame_queue);
		}
		else if (packet->stream_index == input_data->m_video_stream_index)
		{
			DecoderPacketData(input_data->m_video_decoder_ctx, packet, message_queue.m_video_frame_queue);
			is_video = true;
		}
		while (true && is_video)
		{
			result = message_queue.m_video_frame_queue.PopFrameNonBlocking(&frame);
			if (result < 0)
			{
				break;
			}
			sws_frame->pkt_dts = frame->pkt_dts;
			sws_frame->pts = frame->pts;
			
			sws_scale(sws_ctx, frame->data, frame->linesize, 0, sws_frame->height, sws_frame->data, sws_frame->linesize);

			EncoderFrameData(output_data->m_video_encoder_ctx, sws_frame, message_queue.m_encoder_packet_queue);
			for (;true;)
			{
				AVPacket * pkt = nullptr;
				result = message_queue.m_encoder_packet_queue.PopPacketNonBlocking(&pkt);
				if (result < 0)
				{
					break;
				}

				PrintPacket(output_data->m_output_fmt_ctx, pkt);
				av_interleaved_write_frame(output_data->m_output_fmt_ctx, pkt);
				av_packet_unref(pkt);
				av_packet_free(&pkt);
			}
			av_frame_unref(frame);
			av_frame_free(&frame);
		}
		// process audio no decoder and direct write to fmt
		if (!is_video && true)
		{
			av_interleaved_write_frame(output_data->m_output_fmt_ctx, packet);
		}
		av_packet_unref(packet);
		av_packet_free(&packet);
	}
	ProcessNullptrPacket(input_data->m_video_decoder_ctx, message_queue.m_video_frame_queue);

	bool control = true;

	while (control && true)
	{
		frame = nullptr;
		result = message_queue.m_video_frame_queue.PopFrameNonBlocking(&frame);
		if (result < 0)
		{
			break;
		}

		sws_frame->pkt_dts = frame->pkt_dts;
		sws_frame->pts = frame->pts;

		sws_scale(sws_ctx, frame->data, frame->linesize, 0, sws_frame->height, sws_frame->data, sws_frame->linesize);

		EncoderFrameData(output_data->m_video_encoder_ctx, sws_frame, message_queue.m_encoder_packet_queue);
		for (;true;)
		{
			AVPacket * pkt = nullptr;
			result = message_queue.m_encoder_packet_queue.PopPacketNonBlocking(&pkt);
			if (result < 0)
			{
				break;
			}
			PrintPacket(output_data->m_output_fmt_ctx, pkt);
			av_interleaved_write_frame(output_data->m_output_fmt_ctx, pkt);
			av_packet_unref(pkt);
			av_packet_free(&pkt);
		}
		av_frame_unref(frame);
		av_frame_free(&frame);
	}
	EncoderFrameData(output_data->m_video_encoder_ctx, nullptr, message_queue.m_encoder_packet_queue);
	
	for (;control && true;)
	{
		AVPacket * pkt = nullptr;
		result = message_queue.m_encoder_packet_queue.PopPacketNonBlocking(&pkt);
		if (result < 0)
		{
			break;
		}
		PrintPacket(output_data->m_output_fmt_ctx, pkt);
		av_interleaved_write_frame(output_data->m_output_fmt_ctx, pkt);
		av_packet_unref(pkt);
		av_packet_free(&pkt);
	}

	if (thd.joinable())
	{
		thd.join();
	}
	av_write_trailer(output_data->m_output_fmt_ctx);

	sws_freeContext(sws_ctx);

	av_frame_unref(sws_frame);
	av_frame_free(&sws_frame);

	return EXIT_SUCCESS;
}