#include "writer_gstreamer_impl.h"
#include <cmath>
#include <fstream>
extern "C" {
#include <assert.h>
#include <errno.h>
#include <fcntl.h>
#include <getopt.h>
#include <pthread.h>
#include <signal.h>
#include <stdbool.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/prctl.h>
#include <time.h>
#include <unistd.h>

#include <sys/types.h>
#include <sys/stat.h>
#include <sys/ioctl.h>
#include <linux/serial.h>
#include <linux/videodev2.h>
#include <sys/mman.h>

#include <gst/gst.h>

#include "sample_comm.h"
}
// C code
typedef struct _rkMpiCtx {
	SAMPLE_VI_CTX_S vi;
	SAMPLE_VO_CTX_S vo;
	SAMPLE_VPSS_CTX_S vpss;
	SAMPLE_VENC_CTX_S venc;
	SAMPLE_RGN_CTX_S rgn[2];
} SAMPLE_MPI_CTX_S;

static SAMPLE_MPI_CTX_S *g_ctx;
static bool quit = false;

#define USB_WIDTH (1920)
#define USB_HEIGHT (1080)
#define CAPTURE_BUF_LEN_usb (4)
int video_width = 1920;
int video_height = 1080;
// static COMPRESS_MODE_E g_compressMode = COMPRESS_AFBC_16x16;
static COMPRESS_MODE_E g_compressMode = COMPRESS_MODE_NONE;
MB_POOL pool = MB_INVALID_POOLID;
static gint num_samples = 0;
char filePath[500] = {0};
int dts = 0;
double fps_use = 30;
bool ish265 = false;
pthread_t p_gstinit;
static GstElement* app_source = nullptr;
static GMainLoop* loop = nullptr;
static GstElement* pipeline = nullptr;
static guint source_id = 0;
int gstinitFlag = 0;
void signalHandler(int sig) {
	printf("Signal usbgetimg, exiting...\n");
	pthread_exit(NULL);
}

void self_release();
static void sigterm_handler(int sig) {
	fprintf(stderr, "signal %d\n", sig);
	gstinitFlag = 1;
	self_release();
	exit(-1);
}

static int self_push_data(unsigned char* data, int size) {
	
	GstBuffer* buffer = gst_buffer_new_allocate(nullptr, size, nullptr);

	// 设置时间戳
	GstMapInfo map;
	gst_buffer_map(buffer, &map, GST_MAP_WRITE);
	// 实际应用中，应将真实的H.264数据 memcpy 到 map.data
	memcpy(map.data, data, size);
	gst_buffer_unmap(buffer, &map);

	// 设置时间戳（假设30fps）
	GstClockTime timestamp = num_samples * (GST_SECOND / fps_use);
	GST_BUFFER_PTS(buffer) = timestamp;
	GST_BUFFER_DTS(buffer) = timestamp;
	GST_BUFFER_DURATION(buffer) = GST_SECOND / fps_use;

	// 推送数据到appsrc
	GstFlowReturn ret;
	g_signal_emit_by_name(app_source, "push-buffer", buffer, &ret);

	gst_buffer_unref(buffer);

	if (ret != GST_FLOW_OK) {
		std::cerr << "Error pushing buffer to appsrc" << std::endl;
		return -1;
	}
	num_samples++;

	return 0;
}

static void *self_venc_get_stream(void *pArgs) {
	SAMPLE_VENC_CTX_S *ctx = (SAMPLE_VENC_CTX_S *)(pArgs);
	RK_S32 s32Ret = RK_FAILURE;
	char name[256] = {0};
	void *pData = RK_NULL;
	RK_S32 loopCount = 0;
	signal(SIGUSR2, signalHandler); // 设置信号处理函数
	//printf("in venc_get_stream\n");
	// sleep(1);
	while (!quit) {
		//printf("in venc_get_stream while\n");
		s32Ret = SAMPLE_COMM_VENC_GetStream(ctx, &pData);
		if (s32Ret == RK_SUCCESS) {
			// exit when complete
			if (ctx->s32loopCount > 0) {
				if (loopCount >= ctx->s32loopCount) {
					SAMPLE_COMM_VENC_ReleaseStream(ctx);
					quit = true;
					break;
				}
			}

			PrintStreamDetails(ctx->s32ChnId, ctx->stFrame.pstPack->u32Len);
			if (quit)
			{
				break;
				SAMPLE_COMM_VENC_ReleaseStream(ctx);
			}
			self_push_data((unsigned char* )pData, ctx->stFrame.pstPack->u32Len);

			//printf("chn:%d, loopCount:%d wd:%d\n", ctx->s32ChnId, loopCount,
			//        ctx->stFrame.pstPack->u32Len);

			SAMPLE_COMM_VENC_ReleaseStream(ctx);
			loopCount++;
		}
		usleep(1000);
	}
	//printf("venc_get_stream end\n");
	return RK_NULL;
}

void* self_gstMp4_init(void* argv)
{
	std::string filename = filePath;
	std::string::size_type iPos = filename.find_last_of(".") + 1;
	std::string container = filename.substr(iPos, filename.size() - iPos);
	std::cout << "file type: " << container << std::endl;
	if (ish265 && container == "avi")
	{
		filename = filename.substr(0, filename.find_last_of(".")) + ".mp4";
		memcpy(filePath, filename.c_str(), strlen(filename.c_str()) + 1);
		container = "mp4";
		std::cout << "file new name: " << filePath << std::endl;
	}
	// 初始化GStreamer
	gst_init(NULL, NULL);
	// 创建主循环
	loop = g_main_loop_new(NULL, FALSE);
	// 创建管道
	pipeline = gst_pipeline_new("encoding-pipeline");
	if (!pipeline) {
		std::cerr << "Could not create pipeline" << std::endl;
		gstinitFlag = 1;
		return NULL;
	}

	// 创建appsrc作为输入源
	app_source = gst_element_factory_make("appsrc", "h264-app-source");
	if (!app_source) {
		std::cerr << "Could not create appsrc" << std::endl;
		gstinitFlag = 1;
		return NULL;
	}

	// 设置appsrc属性
	g_object_set(G_OBJECT(app_source),
		"format", GST_FORMAT_TIME,
		"is-live", TRUE,
		"do-timestamp", TRUE,
		NULL);

	// 设置媒体类型
	GstCaps* caps;
	if (!ish265) {
		caps = gst_caps_new_simple(
			"video/x-h264",
			"stream-format", G_TYPE_STRING, "byte-stream",
			"alignment", G_TYPE_STRING, "au",
			NULL);
	}
	else if (ish265) {
		caps = gst_caps_new_simple(
			"video/x-h265",
			"stream-format", G_TYPE_STRING, "byte-stream",
			"alignment", G_TYPE_STRING, "au",
			NULL);
	}

	g_object_set(G_OBJECT(app_source), "caps", caps, NULL);
	gst_caps_unref(caps);

	// 创建解析器
	GstElement* parser;
	if (!ish265) {
		parser = gst_element_factory_make("h264parse", "h264-parser");
	}
	else {
		parser = gst_element_factory_make("h265parse", "h265-parser");
	}

	// 创建复用器
	GstElement* muxer;
	if (container == "mp4") {
		muxer = gst_element_factory_make("mp4mux", "mp4-muxer");
	}
	else if (container == "avi") {
		muxer = gst_element_factory_make("avimux", "avi-muxer");
	}
	else {
		std::cerr << "Unsupported container: " << container << std::endl;
		gstinitFlag = 1;
		return NULL;
	}

	// 创建输出文件
	GstElement* sink = gst_element_factory_make("filesink", "file-sink");
	g_object_set(G_OBJECT(sink), "location", filePath, "sync", TRUE, NULL);

	// 将元素添加到管道
	gst_bin_add_many(GST_BIN(pipeline), app_source, parser, muxer, sink, NULL);

	// 链接元素
	if (!gst_element_link_many(app_source, parser, muxer, sink, NULL)) {
		std::cerr << "Could not link elements" << std::endl;
		gstinitFlag = 1;
		return NULL;
	}

	// 设置总线消息回调
	// GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
	// gst_bus_add_signal_watch(bus);
	// g_signal_connect(G_OBJECT(bus), "message", G_CALLBACK(handle_message), NULL);
	// g_object_unref(bus);

	// 启动管道
	std::cout << "Starting encoding to: " << filePath << std::endl;
	gst_element_set_state(pipeline, GST_STATE_PLAYING);

	// 设置数据源（实际应用中，这里应替换为从其他编码器获取数据的逻辑）
	//source_id = g_timeout_add(33, push_data, NULL); // 约30fps
	gstinitFlag = 1;
	// 运行主循环
	g_main_loop_run(loop);
	return NULL;
}

//!判断保存通道是否已经打开
bool self_isOpened()
{
	while (gstinitFlag != 1)
	{
		usleep(200000);
	}
	if (pipeline == NULL)
	{
		return false;
	}
	return true;
}

bool self_setup(std::string file_path, cv::Size size, double fps, bool use_h265)
{
	//printf("in self setup !! \n");
	signal(SIGINT, sigterm_handler);
	int ret = 0;
	quit = false;
  
	gstinitFlag = 0;
	ish265 = use_h265;
	video_width = size.width;
	video_height = size.height;
	file_path += ".avi";
	memcpy(filePath, file_path.c_str(), strlen(file_path.c_str())+1);
	fps_use = fps;
	printf("file %s, fps %0.2f, h265 %d\n", filePath, fps_use, use_h265);
	dts = 0;
	RK_S32 s32BitRate = 5 * 1024; // 5Mbps
	RK_S32 s32loopCnt = -1;
	SAMPLE_MPI_CTX_S* ctx;
	CODEC_TYPE_E enCodecType = RK_CODEC_TYPE_H264;
	VENC_RC_MODE_E enRcMode = VENC_RC_MODE_H264CBR;

	int profile = 0;
	if (use_h265)
	{
		enCodecType = RK_CODEC_TYPE_H265;
		enRcMode = VENC_RC_MODE_H265CBR;
		profile = 0;
	}
	else
	{
		enCodecType = RK_CODEC_TYPE_H264;
		enRcMode = VENC_RC_MODE_H264CBR;
		profile = 100;
	}
	//init rockit
	//printf("init rockit\n");
	ctx = (SAMPLE_MPI_CTX_S*)(malloc(sizeof(SAMPLE_MPI_CTX_S)));
	memset(ctx, 0, sizeof(SAMPLE_MPI_CTX_S));
	g_ctx = ctx;
	if (RK_MPI_SYS_Init() != RK_SUCCESS) {
		RK_MPI_SYS_Exit();
		if (ctx) {
			free(ctx);
			ctx = RK_NULL;
		}
		return false;
	}

	//init mem
	//printf("init mem\n");
	MB_POOL_CONFIG_S stMbPoolCfg;
	memset(&stMbPoolCfg, 0, sizeof(MB_POOL_CONFIG_S));
	stMbPoolCfg.u64MBSize = video_width * video_height * 2;
	stMbPoolCfg.u32MBCnt = 10;
	stMbPoolCfg.enAllocType = MB_ALLOC_TYPE_DMA;
	pool = RK_MPI_MB_CreatePool(&stMbPoolCfg);
	if (pool == MB_INVALID_POOLID)
	{
		printf("mem init error\n");
		return false;
	}
	ctx->venc.pool = pool;

	// Init VENC[0]
	//printf("init venc\n");
	ctx->venc.s32ChnId = 0;
	ctx->venc.u32Width = video_width;
	ctx->venc.u32Height = video_height;
	ctx->venc.u32Fps = fps_use;
	ctx->venc.u32Gop = 60;
	ctx->venc.u32BitRate = s32BitRate;
	ctx->venc.enCodecType = enCodecType;
	ctx->venc.enRcMode = enRcMode;
	ctx->venc.getStreamCbFunc = self_venc_get_stream;
	ctx->venc.s32loopCount = s32loopCnt;
	//ctx->venc.dstFilePath = "test";//pOutPathVenc; guo
	// H264  66：Baseline  77：Main Profile 100：High Profile
	// H265  0：Main Profile  1：Main 10 Profile
	// MJPEG 0：Baseline
	ctx->venc.stChnAttr.stVencAttr.u32Profile = profile;
	ctx->venc.stChnAttr.stGopAttr.enGopMode = VENC_GOPMODE_NORMALP; // VENC_GOPMODE_SMARTP
	ret = SAMPLE_COMM_VENC_CreateChn(&ctx->venc);
	if (ret != RK_SUCCESS)
	{
		printf("init h264enc error\n");
		return false;
	}

	pthread_create(&p_gstinit, 0, self_gstMp4_init, NULL);
	self_isOpened();
	return true;
}

void self_write(cv::Mat image)
{
	cv::Mat getimg = image.clone();
	//默认image格式为BGR
	if (quit)
	{
		// printf("stop send img to venc\n");
		return;
	}
	//输入图像与rtsp初始化时设置的分辨率不一致，guo:确认是否需要缩放为一致
	if (image.cols != video_width || image.rows != video_height)
	{
		cv::resize(getimg, getimg, cv::Size(video_width, video_height));
		//printf("error: rtsp input img %dx%d  init size %dx%d\n", image.cols, image.rows, video_width, video_height);
	}
	cv::cvtColor(image, getimg, cv::COLOR_BGR2YUV_I420);//cv::COLOR_BGR2GRAY);//
	SAMPLE_COMM_VENC_SendStream(&g_ctx->venc, getimg.data, video_width, video_height,
		video_width * video_height * 3 / 2, g_compressMode);
	return;
}

//!释放资源
void self_release()
{
	int ret = 0;
	quit = true;
	num_samples = 0;
	if (app_source)
	{
		g_signal_emit_by_name(app_source, "end-of-stream", &ret);
	}
	
	//sleep(5);
	//关闭线程
	//printf("to kill usb thread\n");
	//pthread_kill(g_ctx->venc.getStreamThread, SIGUSR2);
	printf("kill usb thread\n");
	if(g_ctx)
	{
		if (g_ctx->venc.getStreamCbFunc)
		{
			pthread_join(g_ctx->venc.getStreamThread, NULL); // 等待线程退出
		}
		printf("to stop enc\n");
		//关闭解码器
		ret = SAMPLE_COMM_VENC_DestroyChn(&g_ctx->venc);
		if (ret != RK_SUCCESS)
		{
			printf("stop enc error!\n");
		}
		printf("to stop pool\n");
		//关闭缓存
		if (pool != MB_INVALID_POOLID)
		{
			RK_MPI_MB_DestroyPool(pool);
			pool = MB_INVALID_POOLID;
		}
		printf("to stop rkmpi\n");
		//关闭rkmpi
		if (g_ctx) {
			free(g_ctx);
			g_ctx = RK_NULL;
			RK_MPI_SYS_Exit();
			}
	}
		
	// 清理资源
	std::cout << "Stopping pipeline..." << std::endl;
	//if (source_id) g_source_remove(source_id);
	if (pipeline)
	{
		gst_element_set_state(pipeline, GST_STATE_NULL);
		gst_object_unref(GST_OBJECT(pipeline));
	}
	if (loop)
	{
		printf("to kill gst thread\n");
		g_main_loop_quit(loop);
		printf("kill gst thread\n");
		pthread_join(p_gstinit, NULL); // 等待线程退出
		g_main_loop_unref(loop);
		
	}
	printf("to stop end\n");
	return;
}

namespace sv2 {


VideoWriterGstreamerImpl::VideoWriterGstreamerImpl()
{
}
VideoWriterGstreamerImpl::~VideoWriterGstreamerImpl()
{
}

bool VideoWriterGstreamerImpl::gstreamerSetup(VideoWriterBase* base_, std::string file_name_)
{
  this->_file_path = base_->getFilePath() + file_name_;
  this->_fps = base_->getFps();
  this->_image_size = base_->getSize();
  this->_with_targets = base_->getTargets();

  bool ret = self_setup(this->_file_path, this->_image_size, this->_fps, false);
  return ret;
}

bool VideoWriterGstreamerImpl::gstreamerIsOpened()
{
  return self_isOpened();
}

void VideoWriterGstreamerImpl::gstreamerWrite(cv::Mat img_)
{
  self_write(img_);
}

void VideoWriterGstreamerImpl::gstreamerRelease()
{
  self_release();
}


}

