#include "OpenCVRtspToRtmpWindow.h"

OpenCVRtspToRtmpWindow::OpenCVRtspToRtmpWindow(QWidget* parent)
	: QWidget(parent)
{
	this->resize(QSize(480, 480));
	this->setWindowTitle("利用OpenCV将rtsp转rtmp推流到服务器");
	this->setWindowIcon(QIcon("images/opencv.png"));
	initViews();
}

void OpenCVRtspToRtmpWindow::initViews() {
	QVBoxLayout* vLayout = new QVBoxLayout(this);
	QHBoxLayout* hLayout = new QHBoxLayout(this);
	et = new EditText(this);
	et->setEnabled(true);
	et->setFixedHeight(30);
	et->setPlaceholderText("请输入数据源，如：文件，rtmp流、rtsp流");
	et->setText("E:/BaiduNetdiskDownload/video_test_files/test.mp4");
	Button* btnChoiceBtn = new Button(this);
	btnChoiceBtn->setText("请选择推流文件");
	hLayout->addWidget(et);
	hLayout->addWidget(btnChoiceBtn);
	hLayout->setAlignment(Qt::AlignTop);
	this->setLayout(hLayout);

	etDst = new EditText(this);
	etDst->setEnabled(true);
	etDst->setFixedHeight(30);
	etDst->setPlaceholderText("请输入推流地址");
	etDst->setText("rtmp://124.223.218.248:1935/live/test");

	Button* btnStartPusher = new Button(this);
	btnStartPusher->setText("开始推流");

	vLayout->addLayout(hLayout);
	vLayout->addWidget(etDst);
	vLayout->addWidget(btnStartPusher);
	vLayout->setAlignment(Qt::AlignTop);

	//选择文件地址
	connect(btnChoiceBtn, &Button::clicked, this, [=]() {
		DialogUtils::showVideo(this, [=](QString filePath) {
			et->setText(filePath);
			});
		});
	PushStreamBean pb;
	//开始推流
	connect(btnStartPusher, &Button::clicked, [=]() {
		if (et->text().isEmpty())
		{
			QMessageBox::warning(this, "警告", "数据源不能为空");
		}
		else if (etDst->text().isEmpty())
		{
			QMessageBox::warning(this, "警告", "推流地址不能为空");
		}
		else//开始推流
		{
			/*std::string inUrl2 = et->text().toStdString();
			std::string outUrl2 = etDst->text().toStdString();
			const char* inUrl = inUrl2.c_str();
			const char* outUrl = outUrl2.c_str();
			PushStreamBean pb;
			pb.inUrl = inUrl;
			pb.outUrl = outUrl;
			std::thread t(&startPusher, &pb);
			t.detach();*/
			//workerThread.runThreadFunc = OpenCVRtspToRtmpWindow::startPusher;
			//workerThread.start();
			startPusher();
		}
		});
	//workerThread = new WorkerThread();
	//workerThread->start();

}

/// <summary>
/// 开始推流
/// </summary>
void OpenCVRtspToRtmpWindow::startPusher() {
	//这里必须要利用std::string中转一下，不然如果直接toStdString().c_str()会取出空值
	std::string inUrl2 = et->text().toStdString();
	std::string outUrl2 = etDst->text().toStdString();
	const char* inUrl = inUrl2.c_str();
	const char* outUrl = outUrl2.c_str();
	//const char* inUrl = pb->inUrl;
	//const char* outUrl = pb->outUrl;
	qDebug() << "推流数据源=" << inUrl;
	qDebug() << "推流地址=" << outUrl;
	qDebug() << "子线程执行了";
	//初始化ffmpeg网络库
	avformat_network_init();
	//1.利用opencv读取rtsp流（读取出的是BGR的源数据）
	//2.将BGR24转为YUV420P
	//3.利用ffmpeg对YUV420P的数据进行编码
	//4.将编码好的数据推送的rtmp服务器

	VideoCapture  cam;
	Mat frame;
	namedWindow("Video");
	//初始化像素格式转换上下文
	SwsContext* vsc = NULL;
	//输出的数据结构（）
	AVFrame* yuv = NULL;
	//编码器上下文
	AVCodecContext* vc = NULL;
	//格式封装器
	AVFormatContext* ic = NULL;
	qDebug() << "开始打开文件";
	try {
		//使用opencv打开rtsp相机
		bool isOP = cam.open(inUrl);
		if (!cam.isOpened()) {
			throw std::exception("cam open failed!");
		}
		qDebug() << "打开相机成功";
		int inWidth = cam.get(CAP_PROP_FRAME_WIDTH);//视频宽
		int inHeight = cam.get(CAP_PROP_FRAME_HEIGHT);//视频高
		int fps = cam.get(CAP_PROP_FPS);//视频fps
		//初始化格式上下文
		vsc = sws_getCachedContext(vsc,
			inWidth, inHeight, AV_PIX_FMT_BGR24,//源视频的宽、高、像素格式
			inWidth, inHeight, AV_PIX_FMT_YUV420P,//目标视频的宽、高、像素格式
			SWS_BICUBIC,//尺寸变化方法
			0, 0, 0
		);
		if (!vsc) {
			throw std::exception("sws_getCachedContext failed!");
		}
		//初始化输出数据结构
		yuv = av_frame_alloc();
		yuv->format = AV_PIX_FMT_YUV420P;
		yuv->width = inWidth;
		yuv->height = inHeight;
		yuv->pts = 0;
		//给yuv.data分配空间
		int ret = av_frame_get_buffer(yuv, 32);
		if (ret != 0)
		{
			char buf[1024] = { 0 };
			av_strerror(ret, buf, sizeof(buf) - 1);
			throw std::exception(buf);
		}

		//初始化编码器上下文
		//找编码器
		const AVCodec* codec = avcodec_find_encoder(AV_CODEC_ID_H264);
		if (!codec)
		{
			throw std::exception("Can‘t find h264 encoder!");
		}
		//创建编码器上下文
		vc = avcodec_alloc_context3(codec);
		if (!vc) {
			throw std::exception("acodec_alloc_context3 failed!");
		}
		//配置编码器上下文参数
		vc->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;//设值全局参数，直播推流可以不设置
		vc->codec_id = codec->id;
		vc->thread_count = 8;
		vc->bit_rate = 200 * 1024 * 8;//压缩后每秒视频的bit位大小，200kb每秒
		vc->width = inWidth;
		vc->height = inHeight;
		vc->time_base = { 1,fps };
		vc->framerate = { fps,1 };
		//每组画面的大小，也就是会所多少帧会出现一个关键帧
		vc->gop_size = 50;
		vc->max_b_frames = 0;//不要b帧
		vc->pix_fmt = AV_PIX_FMT_YUV420P;
		//打开编码器上下文
		ret = avcodec_open2(vc, NULL, NULL);
		if (ret != 0)
		{
			char buf[1024] = { 0 };
			av_strerror(ret, buf, sizeof(buf) - 1);
			throw std::exception(buf);
		}
		qDebug() << "avcodec_open2 success!";

		//输出封装器和视频流配置
		//创建输出封装器上下文
		ret = avformat_alloc_output_context2(&ic, 0, "flv", outUrl);
		if (ret != 0)
		{
			char buf[1024] = { 0 };
			av_strerror(ret, buf, sizeof(buf) - 1);
			throw std::exception(buf);
		}
		//添加视频流
		AVStream* vs = avformat_new_stream(ic, NULL);
		if (!vs) {
			throw std::exception("avformat_new_stream failed");
		}
		vs->codecpar->codec_tag = 0;
		//从编码器复制参数
		avcodec_parameters_from_context(vs->codecpar, vc);

		//打开rtmp网络输出流
		ret = avio_open(&ic->pb, outUrl, AVIO_FLAG_WRITE);
		if (ret != 0)
		{
			char buf[1024] = { 0 };
			av_strerror(ret, buf, sizeof(buf) - 1);
			throw std::exception(buf);
		}
		//写入封装头
		ret = avformat_write_header(ic, NULL);
		if (ret != 0)
		{
			char buf[1024] = { 0 };
			av_strerror(ret, buf, sizeof(buf) - 1);
			throw std::exception(buf);
		}

		AVPacket pack;
		memset(&pack, 0, sizeof(pack));
		int vpts = 0;
		for (;;)
		{
			//读取rtsp视频帧，解码视频帧
			if (!cam.grab())
			{
				continue;
			}
			//yuv转rgb
			if (!cam.retrieve(frame))
			{
				continue;
			}
			imshow("video", frame);
			waitKey(1);


			//rgb to yuv
			//输入的数据结构
			uint8_t* indata[AV_NUM_DATA_POINTERS] = { 0 };
			indata[0] = frame.data;
			int insize[AV_NUM_DATA_POINTERS] = { 0 };
			//一行（宽）数据的字节数
			insize[0] = frame.cols * frame.elemSize();
			int h = sws_scale(vsc,
				indata,//源数据
				insize,//w*h中的w，字节为单位
				0,
				frame.rows,//w*h中的h
				yuv->data,//目标数据
				yuv->linesize//
			);
			if (h <= 0) {
				continue;
			}

			//h264编码
			yuv->pts = vpts;
			vpts++;
			ret = avcodec_send_frame(vc, yuv);
			if (ret != 0) {
				continue;
			}
			ret = avcodec_receive_packet(vc, &pack);
			if (ret != 0 || pack.size > 0)
			{
				//cout << "*" << pack.size << flush;
			}
			else
			{
				continue;
			}
			//推流
			//av_rescale_q把pack的时间基转为vs->time_base的时间基
			pack.pts = av_rescale_q(pack.pts, vc->time_base, vs->time_base);
			pack.dts = av_rescale_q(pack.dts, vc->time_base, vs->time_base);
			pack.duration = av_rescale_q(pack.duration, vc->time_base, vs->time_base);
			ret = av_interleaved_write_frame(ic, &pack);
			if (ret == 0) {
				qDebug() << "#" << flush;
			}
		}



	}
	catch (std::exception& ex) {
		if (cam.isOpened())
			cam.release();
		if (vsc) {
			sws_freeContext(vsc);
			vsc = NULL;
		}
		if (vc)
		{
			avio_closep(&ic->pb);
			avcodec_free_context(&vc);
		}
	}


	//t.detach();//线程分类，不阻塞主线程
	//t.join();//等待子线程执行完成，阻塞主线程
}

void OpenCVRtspToRtmpWindow::printFFError(int code) {
	char buf[1024] = { 0 };
	av_strerror(code, buf, sizeof(buf));
	qDebug() << buf << endl;
}

double OpenCVRtspToRtmpWindow::r2d(AVRational r)
{
	return r.num == 0 || r.den == 0 ? 0. : (double)r.num / (double)r.den;
}

OpenCVRtspToRtmpWindow::~OpenCVRtspToRtmpWindow()
{
}

