/*
 * 2018/4/16  HUALONG 
 * use OpenCV capture usb camera video, ans operate with opencv、cuda、TensorRT and so on.
 * funnal transport with gstreamer RTP videostream
 * 
 * 
 */
#include "gstAppsrc.h"
#include "gstUtility.h"
#include <sstream> 
#include <unistd.h>
#include <string.h>

cv::VideoCapture cap1(0);

gstAppsrc::gstAppsrc() 
{
	mPipeline = NULL;
	mAppSrc = NULL;
	mBus = NULL;
	mAppSrc = NULL;
	printf(LOG_GSTAPPSRC "creat gstAppsrc\n");	
}


gstAppsrc::~gstAppsrc()
{
	printf(LOG_GSTAPPSRC "delate gstAppsrc pipeine\n");
}


// buildLaunchStr  parse pipeline
bool gstAppsrc::buildLaunchStr()
{
	std::ostringstream ss;

#if 1	
//just display 
	ss << "appsrc block=TRUE name=myappsrc ! videoconvert ! video/x-raw,format=I420 !   autovideosink";
#endif 

#if 0
//gst_camera udp transmit for TX1
//	ss << "appsrc block=TRUE name=myappsrc ! videoconvert ! video/x-raw,format=I420 ! "
//	   << "omxh264enc ! video/x-h264, stream-format=byte-stream ! rtph264pay ! udpsink host=192.168.0.9 port=5000";
#endif 

	mLaunchStr = ss.str();
	printf(LOG_GSTAPPSRC "gstreamer  pipeline string:\n");
	printf("%s\n", mLaunchStr.c_str());
	return true;
}

// init
bool gstAppsrc::init_Pipeline()
{
	GError* err = NULL;

	// parse pipeline string
	if( !buildLaunchStr() )
	{
		printf(LOG_GSTAPPSRC " gstreamer  failed to parse pipeline string\n");
		return false;
	}

	// launch pipeline
	mPipeline = gst_parse_launch(mLaunchStr.c_str(), &err);//creat pipeline

	if( err != NULL )
	{
		printf(LOG_GSTAPPSRC " gstreamer  failed to create pipeline\n");
		printf(LOG_GSTAPPSRC "   (%s)\n", err->message);
		g_error_free(err);
		return false;
	}
	// mPipeline is gstElement
	GstPipeline* pipeline = GST_PIPELINE(mPipeline);

	if( !pipeline )
	{
		printf(LOG_GSTAPPSRC "gstreamer failed to cast GstElement into GstPipeline\n");
		return false;
	}	

	mBus = gst_pipeline_get_bus(pipeline);	
	if( !mBus )
	{
		printf(LOG_GSTAPPSRC "gstreamer failed to retrieve GstBus from pipeline\n");
		return false;
	}

	// get the appsrc
	GstElement*  appsrcElement = gst_bin_get_by_name(GST_BIN(pipeline), "myappsrc");
	GstAppSrc*   appsrc = GST_APP_SRC(appsrcElement);

	if( !appsrcElement || !appsrc) //确认pipiline 
	{
		printf(LOG_GSTAPPSRC "gstreamer failed to retrieve Appsrc  element from pipeline\n");
		return false;
	}
	mAppSrc = appsrcElement;	

	// set the appsrc caps( match with OpenCV catture)
    GstCaps* caps = gst_caps_new_simple ("video/x-raw",
				"format", G_TYPE_STRING, "BGR",
                "width", G_TYPE_INT, 640,
                "height", G_TYPE_INT, 480,
               // "framerate", GST_TYPE_FRACTION, 25, 1,
                NULL);
    gst_app_src_set_caps(GST_APP_SRC(mAppSrc), caps);	

	// g_object_set(G_OBJECT(appsrcElement), "caps",
	// 	gst_caps_new_simple("video/x-raw",
	// 	"format", G_TYPE_STRING, "BGR",  //BGR
	// 	"width", G_TYPE_INT, 640,
	// 	"height", G_TYPE_INT, 480,
	// 	//"framerate", GST_TYPE_FRACTION, 0, 1,
	// 	NULL), NULL);


	//set appsrc element properties
	gst_app_src_set_stream_type(GST_APP_SRC(mAppSrc),GST_APP_STREAM_TYPE_STREAM);
	// g_object_set(G_OBJECT(appsrcElement),
	// 	"stream-type", 0,		//0 stream /1 seekable  2/random_access
	// 	"format", GST_FORMAT_TIME, NULL);



	// signal connnct  when the appsrc need of data, callback the cb_need_data
	g_signal_connect(appsrcElement, "need-data", G_CALLBACK(cb_need_data), NULL);
	
	
	// setup callbacks
	// GstAppSrcCallbacks cb; //可以安装在install上的一组回调函数
	// memset(&cb, 0, sizeof(GstAppSrcCallbacks));
	
	// cb.need_data    = onNeed_data;//  end of the stream调用
	// cb.enough_data	= onEnough_data; //new preroll时，调用 PAUSED状态调用
	// cb.seek_data  	= onSeek_data; // new sample时，调用 READY/NULL状态调用  【onBuffer很关键】
	
	// gst_app_src_set_callbacks(mAppSrc, &cb, (void*)this, NULL);
	//每次有eos new preroll new sample,执行回调函数，这是使用信号的替代方案

	// add watch for messages (disabled when we poll the bus ourselves, instead of gmainloop)
	//gst_bus_add_watch(mBus, (GstBusFunc)gst_message_print, NULL);




/*

	g_main_loop_run(loop);

	// clean up
	gst_element_set_state(mPipeline, GST_STATE_NULL);
	gst_object_unref(GST_OBJECT(mPipeline));
	g_main_loop_unref(loop);
*/
	return true;
}

gstAppsrc* gstAppsrc::Create( uint32_t width, uint32_t height, int v4l2_device, std::string target_ip, int target_port)
{
//	loop = g_main_loop_new(NULL, FALSE);

	if( !gstreamerInit() )
	{
		printf(LOG_GSTREAMER "failed to initialize gstreamer API\n");
		return NULL;
	}
	
	gstAppsrc* cam = new gstAppsrc();
	
	if( !cam )
		return NULL;
	
	cam->mV4L2Device = v4l2_device;
	cam->mWidth      = width;
	cam->mHeight     = height;
	cam->mTarget_ip  = target_ip;	
	cam->mPort       = target_port;

	if(!cam->init_Pipeline())
	{
		printf(LOG_GSTAPPSRC "failed to init the pipeline\n");
		return NULL;
	}

	/* clean up */
	//gst_element_set_state(pipeline, GST_STATE_NULL);
	//gst_object_unref(GST_OBJECT(pipeline));
	//g_main_loop_unref(loop);

	return cam;
}




void gstAppsrc::cb_need_data(GstElement *appsrc,
	guint       unused_size,
	gpointer    user_data)
{
	static GstClockTime timestamp = 0;
	GstBuffer *buffer;
	GstMemory *memory;
	guint size, depth, height, width, step, channels;
	GstFlowReturn ret;
	IplImage img;
	guchar *data1;
	GstMapInfo map;

//std::cout << "\r\n appsrc need of data"<<std::endl;

	cv::Mat frame1;


	cap1 >> frame1; // holt sich einen neuen Frame für Kamera 1
	//cap2 >> frame2; // holt sich einen neuen Frame für Kamera 2


	double frame_rate = cap1.get(CV_CAP_PROP_FPS);//frame rate
	double format = cap1.get(CV_CAP_PROP_FORMAT);//frame rate

	//std::cout << "\r\nframe rate: "<< frame_rate << "format: " <<format <<std::endl;

	img = frame1;
	height = img.height;
	width = img.width;
	step = img.widthStep;
	channels = img.nChannels;
	depth = img.depth;
	data1 = (guchar *)img.imageData;
	size = height*width*channels;
	//client.tcpSend((std::string) data1);

	buffer = gst_buffer_new();
	memory = gst_allocator_alloc(NULL, size, NULL);
	gst_buffer_insert_memory(buffer, -1, memory);
	gst_buffer_map(buffer, &map, GST_MAP_WRITE);

	memcpy((guchar *)map.data, data1, gst_buffer_get_size(buffer));
	/* this makes the image black/white */
	//gst_buffer_memset (buffer, 0, white ? 0xff : 0x0, size);

	GST_BUFFER_PTS(buffer) = timestamp;


	timestamp += GST_BUFFER_DURATION(buffer);


	g_signal_emit_by_name(appsrc, "push-buffer", buffer, &ret);
	gst_buffer_unmap(buffer, &map);
	gst_buffer_unref(buffer);
}


// Open
bool gstAppsrc::Open(GstElement* pipeline, GstBus* bus)
{
	// transition pipline to STATE_PLAYING
	printf(LOG_GSTAPPSRC "gstreamer transitioning pipeline to GST_STATE_PLAYING\n");
	
	const GstStateChangeReturn result = gst_element_set_state(pipeline, GST_STATE_PLAYING);

	if( result == GST_STATE_CHANGE_ASYNC )
	{
//#if 0
		GstMessage* asyncMsg = gst_bus_timed_pop_filtered(bus, 5 * GST_SECOND, 
    	 					      (GstMessageType)(GST_MESSAGE_ASYNC_DONE|GST_MESSAGE_ERROR)); 

		if( asyncMsg != NULL )
		{
			gst_message_print(bus, asyncMsg, this);
			gst_message_unref(asyncMsg);
		}
		else
			printf(LOG_GSTAPPSRC "gstreamer NULL message after transitioning pipeline to PLAYING...\n");
//#endif
	}
	else if( result != GST_STATE_CHANGE_SUCCESS )
	{
		printf(LOG_GSTAPPSRC "gstreamer failed to set pipeline state to PLAYING (error %u)\n", result);
		return false;
	}

	checkMsgBus(bus);
	usleep(100*1000); //sleep 100ms 等待gstreamer pipuline状态改变
	checkMsgBus(bus);

	return true;
}


// checkMsgBus
void gstAppsrc::checkMsgBus(GstBus* bus)
{
	while(true)
	{
		GstMessage* msg = gst_bus_pop(bus);

		if( !msg )
			break;

		gst_message_print(bus, msg, this);
		gst_message_unref(msg);
	}
}