#include "rgb2mp4thread.h"
#include <QDebug>


/// <summary>
/// 线程启动后会执行这个方法
/// </summary>
void RGB2MP4Thread::run()
{
	qDebug() << "运行run方法";
	//打开RGB文件，如果打开失败就报异常
	FILE* fp = fopen(inFile.toStdString().c_str(), "rb");
	if (!fp)
	{
		qDebug() << inFile << " open failed!";
		return;
	}

	//1.创建加码器
	const AVCodec* codec = avcodec_find_decoder(AV_CODEC_ID_H264);
	if (!codec)
	{
		qDebug() << " avcodec_find_encoder AV_CODEC_ID_H264 failed!";
		return;
	}
	//创建解码器上下文
	AVCodecContext* c = avcodec_alloc_context3(codec);
	if (!c)
	{
		qDebug() << " avcodec_alloc_context3  failed!";
		return;
	}
	//压缩比特率
	c->bit_rate = 400000000;
	c->width = width;
	c->height = height;
	c->time_base = { 1, fps };
	c->framerate = { fps, 1 };

	//画面组大小，关键帧
	c->gop_size = 50;

	//不要b帧
	c->max_b_frames = 0;

	c->pix_fmt = AV_PIX_FMT_YUV420P;
	c->codec_id = AV_CODEC_ID_H264;
	c->thread_count = 8;

	//全局的编码信息
	c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;

	int ret = avcodec_open2(c, codec, NULL);
	if (ret < 0)
	{
		qDebug() << " avcodec_open2  failed!";
		return;
	}
	qDebug() << "avcodec_open2 success!";

	//2 创建封装器上下文（输出），输出是mp4
	AVFormatContext* oc = NULL;
	avformat_alloc_output_context2(&oc, 0, 0, outFile.toStdString().c_str());

	//3 添加视频流
 	AVStream* st = avformat_new_stream(oc, NULL);
	//st->codec = c;
	st->id = 0;
	st->codecpar->codec_tag = 0;
	avcodec_parameters_from_context(st->codecpar, c);//将解码器参数复制到流中

	//4 rgb to yuv
	SwsContext* ctx = NULL;
	ctx = sws_getCachedContext(ctx,
		width, height, AV_PIX_FMT_BGRA,
		width, height, AV_PIX_FMT_YUV420P,
		SWS_BICUBIC,
		NULL, NULL, NULL
	);
	//输入空间
	unsigned char* rgb = new unsigned char[width * height * 4];

	//输出的空间
	AVFrame* yuv = av_frame_alloc();
	yuv->format = AV_PIX_FMT_YUV420P;
	yuv->width = width;
	yuv->height = height;
	ret = av_frame_get_buffer(yuv, 32);

	if (ret < 0)
	{
		qDebug() << " av_frame_get_buffer  failed!";
		return;
	}


	//5 打开io流
	ret = avio_open(&oc->pb, outFile.toStdString().c_str(), AVIO_FLAG_WRITE);
	if (ret < 0)
	{
		qDebug()<<" avio_open  failed!";
		return;
	}
	ret = avformat_write_header(oc, NULL);//写入文件头
	if (ret < 0)
	{
		qDebug() << " avformat_write_header  failed!";
		return;
	}
	int p = 0;
	//循环读取rgb文件中的数据，并将数据转换为yuv格式，然后送入编码器解码成AVPacket。然后存入mp4封装文件
	for (;;)
	{
		int len = fread(rgb, 1, width * height * 4, fp);
		if (len <= 0)
		{
			break;
		}
		uint8_t* indata[AV_NUM_DATA_POINTERS] = { 0 };
		indata[0] = rgb;
		int inlinesize[AV_NUM_DATA_POINTERS] = { 0 };
		inlinesize[0] = width * 4;

		int h = sws_scale(ctx, indata, inlinesize, 0, height,
			yuv->data, yuv->linesize
		);
		if (h <= 0)
			break;

		//6 encode frame
		yuv->pts = p;
		//yuv->pict_type = AV_PICTURE_TYPE_I;
		p = p + 3600;
		ret = avcodec_send_frame(c, yuv);
		if (ret != 0)
		{
			continue;
		}
		AVPacket pkt;
		av_init_packet(&pkt);
		ret = avcodec_receive_packet(c, &pkt);
		if (ret != 0)
			continue;

		//av_write_frame(oc, &pkt);
		//av_packet_unref(&pkt);
		av_interleaved_write_frame(oc, &pkt);//此方法会自动释放pkt的内存，以及数据的内存

		qDebug()<< "<" << pkt.size << ">";
	}

	//写入视频索引（尾部）
	av_write_trailer(oc);

	//关闭视频输出io
	avio_close(oc->pb);

	//清理封装输出上下文
	avformat_free_context(oc);

	//关闭编码器
	avcodec_close(c);

	//清理编码器上下文
	avcodec_free_context(&c);

	//清理视频重采样上下文
	sws_freeContext(ctx);


	qDebug() << "======================end=========================";

	delete rgb;
}
void RGB2MP4Thread::Start()
{
	qDebug() << "开始调用线程";
	start();//启动线程
}
void RGB2MP4Thread::Stop()
{
	wait();//等待线程结束
}

RGB2MP4Thread::RGB2MP4Thread()
{

}

RGB2MP4Thread::~RGB2MP4Thread()
{

}
