 /* Copyright (C)
  * 2024 ZKLX Optoelectronic Technology Co.LTD
  * All rights reserved.
  */
 /**
  * @file my_gst.cpp
  * @brief 
  * @author xuke
  * @version 1.0
  * @date 2024-05-12
  */

#include "/home/xuke/bin/dbg.h"
#include <iostream>
#include <thread>
#include <opencv2/core.hpp>
#include <opencv2/opencv.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <gst/gst.h>
#include <gst/rtsp-server/rtsp-server.h>
#include <unistd.h>

using namespace std;

#define FRAME_WIDTH     640
#define FRAME_HEIGHT    512

/*--------------------------------extern--------------------------------------------*/

/*--------------------------------globals-------------------------------------------*/

/*--------------------------------locals--------------------------------------------*/
static GstClockTime g_ctx_timestamp;
static cv::Mat g_render_frame;
static std::mutex render_mutex;

/*--------------------------------funcs---------------------------------------------*/
static int image_proccess_func(uint8_t *data)
{
	cv::Mat rgb_image(cv::Size(FRAME_WIDTH, FRAME_HEIGHT), CV_8UC3, data);

	//cv::imshow("new image", rgb_image);
	//cv::waitKey(0);
	//推流
	std::lock_guard<std::mutex> locker(render_mutex);
	g_render_frame = rgb_image;

	return 0;
}

static int cap_file_frame_task(const string filename)
{
	while (1) {
		cv::VideoCapture f_capture(filename);

		if (!f_capture.isOpened()) {
			perror("f_capture");
			return -1;
		}

		cv::Mat frame;
		while (f_capture.read(frame)) {
			if (frame.empty()) {
				printf("frame is empty");
				break;
			}

			cv::resize(frame, frame, cv::Size(FRAME_WIDTH, FRAME_HEIGHT));

			//图像处理
			image_proccess_func(frame.data);

			usleep(30*1000);
		}
	}

	return 0;
}

static GstBuffer* get_image_raw()
{
	cv::Mat raw_frame;
	std::unique_lock<std::mutex> locker(render_mutex);
	g_render_frame.copyTo(raw_frame);
	locker.unlock();

	//渲染
	cv::circle(raw_frame, cv::Point(FRAME_WIDTH/2, FRAME_HEIGHT/2), 16, cv::Scalar(225, 255, 225), 2, 8);

	//包装图像到buffer
	long frame_size = (raw_frame.total() * raw_frame.elemSize());
	gpointer data = g_malloc(frame_size);
	memcpy((void *)data, (void *)raw_frame.data, frame_size);

	return gst_buffer_new_wrapped(data, frame_size);
}

static void need_data_cb(GstElement * appsrc, guint unused, GstClockTime *ctx_timestamp)
{
	//获取图像数据
	GstBuffer *buffer = get_image_raw();
	if (!buffer) {
		std::cout << "buffer is NULL!\n";
		return;
	}

	/* increment the timestamp every 1/2 second */
	GST_BUFFER_PTS(buffer) = *ctx_timestamp; //设置timestamp
	GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale_int(1, GST_SECOND, 30); //timestamp递增1/2
	*ctx_timestamp += GST_BUFFER_DURATION(buffer);

	/*3.Push the buffer into the appsrc*/
	GstFlowReturn ret;
	g_signal_emit_by_name(appsrc, "push-buffer", buffer, &ret); //推送该帧
	gst_buffer_unref(buffer);
}

/**
 * @brief 流通道建立连接时配置
 * @param factory
 * @param media
 * @param user_data
 */
static void media_configure_raw(GstRTSPMediaFactory *factory, GstRTSPMedia *media, gpointer user_data)
{
	GstElement *element = gst_rtsp_media_get_element(media); //获取构造媒体时使用的元素
	GstElement *appsrc = gst_bin_get_by_name_recurse_up(GST_BIN(element), "rawsrc"); //获取名为rawsrc的元素

	/* this instructs appsrc that we will be dealing with timed buffer */
	gst_util_set_object_arg(G_OBJECT(appsrc), "format", "time"); //配置G_OBJECT(appsrc).format = time
	/* configure the caps of the video */
	g_object_set(G_OBJECT(appsrc), "caps",
			gst_caps_new_simple("video/x-raw",
				"format",    G_TYPE_STRING,     "BGR",
				"width",     G_TYPE_INT,        FRAME_WIDTH,
				"height",    G_TYPE_INT,        FRAME_HEIGHT,
				"framerate", GST_TYPE_FRACTION, 0, 30,
				NULL),
			NULL);

	/* make sure ther datais freed when the media is gone */
	g_object_set_data_full(G_OBJECT(media), "my-extra-data", nullptr, (GDestroyNotify)g_free);

	/* install the callback that will be called when a buffer is needed */
	g_ctx_timestamp = 0;
	g_signal_connect(appsrc, "need-data", (GCallback)need_data_cb, &g_ctx_timestamp);

	gst_object_unref(appsrc);
	gst_object_unref(element);
}

/**
 * @brief 视频逐帧推流使用模板
 * @param argc
 * @param argv[]
 * @return 0
 */
int my_gst_appsrc_start(int argc, const char *argv[])
{
	gst_init(nullptr, nullptr);

	/* create a server instance */
	GstRTSPServer *server = gst_rtsp_server_new(); //创建rtsp server
	gst_rtsp_server_set_service (server, "8554");  //自定义指定port, 默认为8554

	// raw factory
	std::string pipeline_raw = "( appsrc name=rawsrc ! videoconvert ! video/x-raw,format=I420 ! x264enc ! rtph264pay name=pay0 pt=96 )";
	GstRTSPMediaFactory *factory_raw = gst_rtsp_media_factory_new();
	gst_rtsp_media_factory_set_launch(factory_raw, pipeline_raw.c_str());
	g_signal_connect(factory_raw, "media-configure", (GCallback)media_configure_raw, nullptr);
	gst_rtsp_media_factory_set_shared(factory_raw, TRUE);
	// live factory

	/*获取rtsp_server上的挂载点*/
	GstRTSPMountPoints *mounts_raw = gst_rtsp_server_get_mount_points(server);
	gst_rtsp_mount_points_add_factory(mounts_raw, "/raw", factory_raw);
	g_object_unref(mounts_raw);

	//分离server线程,等待连接
	gst_rtsp_server_attach(server, nullptr);
	g_print("stream ready at rtsp://localhost:8554/raw\n");

	/*使用 Glib 中的 loop 保持线程*/
	GMainLoop *loop = g_main_loop_new(nullptr, FALSE);
	g_main_loop_run(loop);
	return 0;
}

/**
 * @brief gst推流模板
 * @param argc
 * @param argv[]
 * @return 0-success
 */
int main(int argc, const char *argv[])
{
	thread cap_thread(cap_file_frame_task, "./test_file");
	cap_thread.detach();

	sleep(1);

	thread gst_thread(my_gst_appsrc_start, argc, argv);
	gst_thread.detach();

	while (1) {
		sleep(64);
	}
	return 0;
}

