#include "FFMpegRecordInterfaceWindow.h"

FFMpegRecordInterfaceWindow::FFMpegRecordInterfaceWindow(QWidget* parent)
	: QWidget(parent)
{
	this->resize(QSize(640, 480));
	this->setWindowTitle("ffmpeg rtsp转rtmp推流");
	this->setWindowIcon(QIcon("images/opencv.png"));
	initViews();
}
void FFMpegRecordInterfaceWindow::initViews() {
	QVBoxLayout* vLayout = new QVBoxLayout(this);
	etRtsp = new EditText(this);
	etRtsp->setEnabled(true);
	etRtsp->setFixedHeight(30);
	etRtsp->setPlaceholderText("请输入rtsp拉流地址");
	etRtsp->setText("E:/BaiduNetdiskDownload/video_test_files/test.flv");

	etRtmp = new EditText(this);
	etRtmp->setEnabled(true);
	etRtmp->setFixedHeight(30);
	etRtmp->setPlaceholderText("请输入推流地址");
	etRtmp->setText("rtmp://124.223.218.248:1935/live/test");

	Button* btnStartPusher = new Button(this);
	btnStartPusher->setText("rtsp转rtmp并推流");

	vLayout->addWidget(etRtsp);
	vLayout->addWidget(etRtmp);
	vLayout->addWidget(btnStartPusher);
	vLayout->setAlignment(Qt::AlignTop);

	//开始推流
	connect(btnStartPusher, &Button::clicked, [=]() {
		if (etRtsp->text().isEmpty())
		{
			QMessageBox::warning(this, "警告", "拉流地址不能为空");
		}
		else if (etRtmp->text().isEmpty())
		{
			QMessageBox::warning(this, "警告", "推流地址不能为空");
		}
		else//开始推流
		{
			startPusher();
		}
		});


}
void FFMpegRecordInterfaceWindow::startPusher() {
	std::string rtspUrl = etRtsp->text().toStdString();
	std::string rtmpUrl = etRtmp->text().toStdString();
	const char* inUrl = rtspUrl.c_str();
	const char* outUrl = rtmpUrl.c_str();

	//初始化网络库
	avformat_network_init();

	//创建AVFormatContext并打开文件流
	AVFormatContext* ictx = NULL;

	//设置rtsp的最大延时及rtsp使用的底层协议
	AVDictionary* opts = NULL;
	char key[] = "max_delay";
	char value[] = "500";
	av_dict_set(&opts, key, value, 0);
	char key2[] = "rtsp_transport";
	char value2[] = "tcp";
	av_dict_set(&opts, key2, value2, 0);
	//打开rtsp流，解封装头文件
	int ret = avformat_open_input(&ictx, inUrl, NULL, &opts);
	if (ret != 0) {
		FError(ret);
	}
	qDebug() << "打开rtsp流成功";

	//获取音视频流信息
	ret = avformat_find_stream_info(ictx, NULL);
	if (ret < 0) {
		FError(ret);
	}
	qDebug() << "音视频流信息获取成功";

	//输入的媒体流已经打开成功了，流信息也获取到了，接下来应该获取
	//输出流的信息
	//创建输出流上下文
	AVFormatContext* octx = NULL;
	ret = avformat_alloc_output_context2(&octx, NULL, "flv", outUrl);
	if (ret < 0) {
		FError(ret);
	}
	qDebug() << "输出流上下文创建成功";

	//创建AVStream并把输入流信息copy到输出流信息的参数中
	for (int i = 0;i < ictx->nb_streams;i++) {
		//创建输出流
		const AVCodec* codec = avcodec_find_decoder(ictx->streams[i]->codecpar->codec_id);
		AVStream* out = avformat_new_stream(octx, codec);
		if (!out) {
			FError(0);
		}
		//将输入流的参数copy到输出流的参数中
		avcodec_parameters_copy(out->codecpar, ictx->streams[i]->codecpar);
		out->codecpar->codec_tag = 0;
	}

	//rtmp推流
	//打开io
	ret = avio_open(&octx->pb, outUrl, AVIO_FLAG_WRITE);
	if (ret < 0) {
		FError(ret);
	}
	//写入头信息
	ret = avformat_write_header(octx, NULL);
	if (ret < 0) {
		FError(ret);
	}
	qDebug() << "写入头信息成功";

	//死循环推流
	AVPacket pkt;
	long long startTime = av_gettime();
	for (;;) {
		//读取压缩帧
		ret = av_read_frame(ictx, &pkt);
		if (ret != 0 || pkt.size <= 0)
		{
			continue;
		}
		qDebug() << "pkt.pts=" << pkt.pts;
		//计算转换pts dts
		AVRational itime = ictx->streams[pkt.stream_index]->time_base;
		AVRational otime = octx->streams[pkt.stream_index]->time_base;
		pkt.pts = av_rescale_q_rnd(pkt.pts, itime, otime, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_NEAR_INF));
		pkt.dts = av_rescale_q_rnd(pkt.pts, itime, otime, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_NEAR_INF));
		pkt.duration = av_rescale_q_rnd(pkt.duration, itime, otime, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_NEAR_INF));
		pkt.pos = -1;

		////视频帧推送速度(如果是rtsp流转rtmp，则不需要延时，因为rtsp本身已经延时过了)
		//if (ictx->streams[pkt.stream_index]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
		//{
		//	AVRational tb = ictx->streams[pkt.stream_index]->time_base;
		//	//已经过去的时间
		//	long long now = av_gettime() - startTime;
		//	long long dts = 0;
		//	dts = pkt.dts * (1000 * 1000 * r2d(tb));
		//	if (dts > now)
		//		av_usleep(dts - now);
		//}
		//把流真正的推送到服务器，其内部会自动释放pkt.buff
		ret = av_interleaved_write_frame(octx, &pkt);
	}

}

void FFMpegRecordInterfaceWindow::FError(int errNum) {
	char buf[1024] = { 0 };
	av_strerror(errNum, buf, sizeof(buf));
	qDebug() << "error=" << buf;
	return;
}
double FFMpegRecordInterfaceWindow::r2d(AVRational r) {
	return r.num == 0 || r.den == 0 ? 0. : (double)r.num / (double)r.den;
}

void FFMpegRecordInterfaceWindow::yuvTest() {
	char data[] = { 'Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','U','U','U','U','U','V','V','V','V','V' };
	qDebug() << "原始数据:";
	for (int i = 0;i < 20;i++) {
		qDebug() << "data->i=" << data[i];
	}
	int y_length = 10;
	int u_length = 5;
	int v_length = 5;
	char* y_data = new  char[10];
	char* u_data = new  char[5];
	char* v_data = new  char[5];
	//把内存中的char初始化为0
	memset(y_data, '0', 10);
	memset(u_data, '0', 5);
	memset(v_data, '0', 5);
	qDebug() << "copy前的数据：";
	for (int i = 0;i < 10;i++) {
		qDebug() << "y_data->i=" << y_data[i];
	}
	for (int i = 0;i < 5;i++) {
		qDebug() << "u_data->i=" << u_data[i];
	}
	for (int i = 0;i < 5;i++) {
		qDebug() << "v_data->i=" << v_data[i];
	}
	//这块是重点
	//将data中的y_length长度的数据copy到y_data，从第0个位置开始copy
	memcpy(y_data, data, y_length);
	//从data中copy u_length长度的数据到u_data中。从y_length位置开始copy
	memcpy(u_data, data + y_length, u_length);
	//从data中copy v_length长度的数据到v_data中，从y_length+u_length位置开始copy
	memcpy(v_data, data + y_length + u_length, v_length);
	qDebug() << "copy后的数据：";
	for (int i = 0;i < 10;i++) {
		qDebug() << "y_data->i=" << y_data[i];
	}
	for (int i = 0;i < 5;i++) {
		qDebug() << "u_data->i=" << u_data[i];
	}
	for (int i = 0;i < 5;i++) {
		qDebug() << "v_data->i=" << v_data[i];
	}
	if (y_data) {
		free(y_data);
	}
	if (u_data) {
		free(u_data);
	}
	if (v_data) {
		free(v_data);
	}
}
FFMpegRecordInterfaceWindow::~FFMpegRecordInterfaceWindow()
{



}
