/* -*- Mode: C; indent-tabs-mode: t; c-basic-offset: 4; tab-width: 4 -*- */
/*
 * main.c
 * Copyright (C) The Pennsylvania State University 2008 <tpk5010@psu.edu>
 * 
 */

#include "streaming.h"

/* Biography of a video stream:
 * 
 * We're using h264 to encode our video packets. Hopefully, the h264 encoder
 * is present on the handheld with minimal code reworking, or we'd be in
 * serious trouble. Anyway, this solution works on my laptop, if it doesn't
 * work on your devel machine please let me know. You can test the plugins and
 * whatnot by running the following from the command line:
 *
 * gst-launch-0.10 -v videotestsrc ! capsfilter caps="video/x-raw-yuv,width=320,height=240" \
 * ! x264enc byte-stream=true ! rtph264pay ! udpsink host=localhost port=5442 sync=false
 *
 * And then running the videotest script in the root project folder. If this 
 * setup doesn't work for you, you're likely missing some gstreamer packages
 * and the application will likely fail to work.
 *
 * As you can see, we use a capsfilter to specify the format of our input
 * stream, a RTP payloader to format the stream and encase it in RTP packets,
 * and finally a udpsink to toss packets over the network. The videostream
 * script does the reverse, and puts the output in a autovideosink so you can
 * see the results. 
 *
 * As of right now, this does NOT implement audio streaming, but as that needs
 * to be done on a seperate port anyway, we can worry about that later
 *
 * TODO: If VideoSrc isn't videotest src, we need to set the source to the devnode
 * specified in the config file. Right now it defaults to /dev/video0
 */

videoStream* new_video_stream(AppUIData *appuidata) {
	gdk_threads_enter();
	logit("Attempting to initalize a video stream...\n", GTK_TEXT_VIEW(appuidata->logview));
	
	char *source = appuidata->data->config->VideoSrc;
	
	/* Zero memory with g_new0 */
	videoStream* result = g_new0(videoStream, 1);

	/* Make a pipeline */	
	result->pipeline = gst_pipeline_new("thePipeline");

	/* Add source element */
	result->sourceFactory = gst_element_factory_find(source);
	if (!result->sourceFactory) {
		logit("You don't have the ", GTK_TEXT_VIEW(appuidata->logview));
		logit(source, GTK_TEXT_VIEW(appuidata->logview));
		logit(" factory installed!\nCheck your GStreamer installation.\n", GTK_TEXT_VIEW(appuidata->logview));
	}
	result->src = gst_element_factory_create(result->sourceFactory, "source");

	/* Add encoder element */
	result->h264encFactory = gst_element_factory_find("x264enc");
	if(!result->h264encFactory) {
		logit("You don't have the x264enc factory installed!\nCheck your GStreamer installation.\n", GTK_TEXT_VIEW(appuidata->logview));
	}
	result->h264enc = gst_element_factory_create(result->h264encFactory, "encoder");
	g_object_set(G_OBJECT(result->h264enc), "byte-stream", TRUE, NULL);

	/* Now add the RTP payloader */
	result->rtpPayloaderFactory = gst_element_factory_find("rtph264pay");
	if(!result->rtpPayloaderFactory) {
		logit("You don't have the rtph264paay factory installed!\nCheck your GStreamer installation.\n", GTK_TEXT_VIEW(appuidata->logview));
	}
	result->rtpPayloader = gst_element_factory_create(result->rtpPayloaderFactory, "rtpPayloader");

	/* Need to add elements to pipeline */
	gst_bin_add_many(GST_BIN(result->pipeline), result->src, result->h264enc, result->rtpPayloader, NULL);
	
	/* Add filters to source. For our purposes, these filters will always be the same,
     * simplifying things quite a bit */
	/* Note that this code likely will change to ge the thing to run on the device */
	result->caps = gst_caps_new_simple ("video/x-raw-yuv",
				"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC('I', '4', '2', '0'),
				"width", G_TYPE_INT, 320,
				"height", G_TYPE_INT, 240,
				"framerate", GST_TYPE_FRACTION, 25, 1,
				NULL);
	if(!gst_element_link_filtered(result->src, result->h264enc, result->caps)) {
		logit("Failed to link the source and encoder factories with the capsfilter!\n", GTK_TEXT_VIEW(appuidata->logview));
	}
	gst_caps_unref(result->caps);

	logit("Video stream parameters sucessfully set.\n", GTK_TEXT_VIEW(appuidata->logview));
	gdk_threads_leave();
	return result;
}

gboolean startVideo(videoStream *stream, AppUIData *appuidata) {
	gdk_threads_enter();
	
	char *dest = appuidata->data->config->Server;
	gint port = stream->vport;
	/* In order to start, we need to add the UDP sink */	
	stream->udpsinkFactory = gst_element_factory_find("udpsink");
	if(!stream->udpsinkFactory) {
		logit("You don't have the udpsink factory installed!\nCheck your GStreamer installation.\n", GTK_TEXT_VIEW(appuidata->logview));
		return FALSE;
	}
	stream->udpsink = gst_element_factory_create(stream->udpsinkFactory, "udpsink");
	
	/* Set the sink's properties */
	g_object_set(G_OBJECT(stream->udpsink), "host", dest, "port", port, "sync", FALSE, NULL);
	
	gst_bin_add_many(GST_BIN(stream->pipeline), stream->udpsink, NULL);
	 
	if(!gst_element_link_many(stream->h264enc, stream->rtpPayloader, stream->udpsink, NULL)) {
		logit("Failed to link the pipeline together!\nSomething went wrong.\n", GTK_TEXT_VIEW(appuidata->logview));
		return FALSE;
	}
	
	if(gst_element_set_state(GST_ELEMENT(stream->pipeline), GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
		logit("Failed to start video stream.\n", GTK_TEXT_VIEW(appuidata->logview));
		return FALSE;
	}
	else {	
		logit("Video stream started\n", GTK_TEXT_VIEW(appuidata->logview));
		return TRUE;
	}
	gdk_threads_leave();
}

void destroy_video_stream( videoStream *stream ) {
	gst_object_unref(GST_OBJECT(stream->pipeline));
	g_free(stream);
}
