#include "StreamServer.hpp"
#include <iostream>
#include <unistd.h>
#include "gstUtility.h"

cv::VideoCapture cap1(0);
cv::VideoCapture cap2;
static GMainLoop *loop;

StreamServer::StreamServer() : target_ip("172.20.90.151")
{
	int c0 = 0, c1 = 1;
}


StreamServer::~StreamServer()
{
}

void StreamServer::set_target_ip(std::string ip){
	this->target_ip = ip;
}

void StreamServer::start(){
	GstElement *pipeline, *appsrc, *udpsink;
	GstBus *bus;

	

	/* init GStreamer */
	gst_init(NULL, NULL);
	std::cout << "\r\n [gstreamer] -- before parse_launch gstreamer pipeline"<<std::endl;
	loop = g_main_loop_new(NULL, FALSE);

	/* setup pipeline */
	//pipeline = gst_parse_launch("appsrc block=TRUE name=source ! videoconvert ! video/x-raw,format=YUY2 ! tee name=local ! queue ! videoconvert ! autovideosink local. ! queue ! jpegenc ! rtpjpegpay ! udpsink name=udpsink host=127.0.0.1 port=5506", NULL);
	//pipeline = gst_parse_launch("appsrc block=TRUE name=source ! videoconvert ! video/x-raw,format=YUY2 ! tee name=local ! queue ! videoconvert ! autovideosink local. ! queue ! x264enc ! rtph264pay ! udpsink name=udpsink host=127.0.0.1 port=5506",NULL);
	

	//just display
	//pipeline = gst_parse_launch("appsrc block=TRUE name=source ! videoconvert ! video/x-raw,format=I420 !   autovideosink local.",NULL);

//gst-launch-1.0 v4l2src ! 'video/x-raw, format=(string)I420,\
width=(int)640, height=(int)480' ! omxh264enc ! 'video/x-h264, stream-format=(string)byte-stream' ! rtph264pay ! udpsink host=192.168.0.103 port=5000
	//just udp transport TX1
	pipeline = gst_parse_launch("appsrc block=TRUE name=source ! videoconvert ! video/x-raw,format=I420 !  omxh264enc ! video/x-h264, stream-format=byte-stream ! rtph264pay ! udpsink host=192.168.0.9 port=5000",NULL);

	//pipeline = gst_parse_launch("appsrc block=TRUE name=source ! videoconvert ! video/x-raw,format=I420  ! omxh264enc ! 'video/x-h264, stream-format=byte-stream' ! rtph264pay ! udpsink host=192.168.0.9 port=5000",NULL);
	// if( !pipeline )pipeline = gst_parse_launch("appsrc block=TRUE name=source ! videoconvert ! video/x-raw,format=YUY2 !   autovideosink local. ",NULL);
	// {
	// 	printf("gstreamer failed to gst parse launch Pipeline\n");
	// 	//return false;
	// }
	
	std::cout << "\r\n [gstreamer] -- parse_launch gstreamer pipeline"<<std::endl;

	bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
	std::cout << "\r\n [gstreamer] -- get bus"<<std::endl;

	appsrc = gst_bin_get_by_name(GST_BIN(pipeline), "source");
	std::cout << "\r\n [gstreamer] -- get appsrc "<<std::endl;
	//udpsink = gst_bin_get_by_name(GST_BIN(pipeline), "udpsink");

	std::cout << "\r\n get appsrc and udpsink"<<std::endl;

	//g_object_set(G_OBJECT(udpsink), "host", target_ip.c_str(), NULL);
	/* setup */
	g_object_set(G_OBJECT(appsrc), "caps",
		gst_caps_new_simple("video/x-raw",
		"format", G_TYPE_STRING, "BGR",  //BGR
		"width", G_TYPE_INT, 640,
		"height", G_TYPE_INT, 480,
		//"framerate", GST_TYPE_FRACTION, 0, 1,
		NULL), NULL);

	/* setup appsrc */
	g_object_set(G_OBJECT(appsrc),
		"stream-type", 0,
		"format", GST_FORMAT_TIME, NULL);
	std::cout << "\r\n set appsrc config"<<std::endl;

	g_signal_connect(appsrc, "need-data", G_CALLBACK(cb_need_data), NULL);

	std::cout << "\r\n set appsrc callback"<<std::endl;

	/* play */
	// if(gst_element_set_state(pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS)
	// 	std::cout << "\r\n change status playing success"<<std::endl;
	// else
	// 	std::cout << "\r\n change status playing error"<<std::endl;

	const GstStateChangeReturn result = gst_element_set_state(pipeline, GST_STATE_PLAYING);

	if( result == GST_STATE_CHANGE_ASYNC )
	{
		std::cout << "\r\n GST_STATE_CHANGE_ASYNC"<<std::endl;
#if 1
		GstMessage* asyncMsg = gst_bus_timed_pop_filtered(bus, 5 * GST_SECOND, 
    	 					      (GstMessageType)(GST_MESSAGE_ASYNC_DONE|GST_MESSAGE_ERROR)); 
		std::cout << "\r\n asynMSG"<<std::endl;

		if( asyncMsg != NULL )
		{
			std::cout << "\r\n asyncMsg != NULL"<<std::endl;
			
			gst_message_print(bus, asyncMsg, this);
			gst_message_unref(asyncMsg);
		}
		else
			printf( "gstreamer NULL message after transitioning pipeline to PLAYING...\n");
#endif
	}
	else if( result != GST_STATE_CHANGE_SUCCESS )
	{
		printf( "gstreamer failed to set pipeline state to PLAYING (error %u)\n", result);
		//return false;
	}

	//checkMsgBus();
	usleep(100*1000); //sleep 100ms 等待gstreamer pipuline状态改变
	//checkMsgBus();
	//GstState *state;
    //GstStateChangeReturn gst_status = gst_element_get_state(pipeline, state,NULL,5);

    //std::cout << "\r\n status: "<< *state <<std::endl;
	//client.tcpSend()
	g_main_loop_run(loop);

	/* clean up */
	gst_element_set_state(pipeline, GST_STATE_NULL);
	gst_object_unref(GST_OBJECT(pipeline));
	g_main_loop_unref(loop);

}

void StreamServer::cb_need_data(GstElement *appsrc,
	guint       unused_size,
	gpointer    user_data)
{
	static GstClockTime timestamp = 0;
	GstBuffer *buffer;
	GstMemory *memory;
	guint size, depth, height, width, step, channels;
	GstFlowReturn ret;
	IplImage img;
	guchar *data1;
	GstMapInfo map;

//std::cout << "\r\n appsrc need of data"<<std::endl;

	cv::Mat frame1;
	cv::Mat frame2;

	cap1 >> frame1; // holt sich einen neuen Frame für Kamera 1
	//cap2 >> frame2; // holt sich einen neuen Frame für Kamera 2

	//hconcat(frame1, frame2, frame1);
	//cvtColor(frame1, frame1, CV_RGB2BGR);
	double frame_rate = cap1.get(CV_CAP_PROP_FPS);//frame rate
	double format = cap1.get(CV_CAP_PROP_FORMAT);//frame rate

	std::cout << "\r\nframe rate: "<< frame_rate << "format: " <<format <<std::endl;

	img = frame1;
	height = img.height;
	width = img.width;
	step = img.widthStep;
	channels = img.nChannels;
	depth = img.depth;
	data1 = (guchar *)img.imageData;
	size = height*width*channels;
	//client.tcpSend((std::string) data1);

	buffer = gst_buffer_new();
	memory = gst_allocator_alloc(NULL, size, NULL);
	gst_buffer_insert_memory(buffer, -1, memory);
	gst_buffer_map(buffer, &map, GST_MAP_WRITE);

	memcpy((guchar *)map.data, data1, gst_buffer_get_size(buffer));
	/* this makes the image black/white */
	//gst_buffer_memset (buffer, 0, white ? 0xff : 0x0, size);

	GST_BUFFER_PTS(buffer) = timestamp;


	timestamp += GST_BUFFER_DURATION(buffer);


	g_signal_emit_by_name(appsrc, "push-buffer", buffer, &ret);
	gst_buffer_unmap(buffer, &map);
	gst_buffer_unref(buffer);


}


int main()
{
	StreamServer server_test;
	server_test.start();

	return 0;

}

