// Gstream.cpp : Defines the entry point for the console application.
//

#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
#include <string.h>
#include <glib-object.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
#include <gst/app/gstappbuffer.h>
#include "protocol.h"

#define WIDTH 160
#define HEIGHT 120

// mplayer -demuxer rawvideo -rawvideo w=160:h=120:format=i420 file.yuv

// WINDOWS
// gst-launch ksvideosrc device-index=0 ! ffmpegcolorspace ! autovideosink
// gst-launch ksvideosrc device-index=0 ! video/x-raw-yuv,width=160,height=120 ! ffmpegcolorspace ! autovideosink

//GstPipeline:pipeline0/GstV4l2Src:v4l2src0.GstPad:src: caps = video/x-raw-yuv, format=(fourcc)YUY2, width=(int)640, height=(int)480, interlaced=(boolean)false, framerate=(fraction)30/1
//http://forum.videohelp.com/threads/219769-YUY2-format-byte-value

typedef struct
{
   GMainLoop *loop;
   GstElement *source;
   GstElement *sink;
   SerialConn *conn;
} ProgramData;

static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
   GMainLoop *loop = (GMainLoop *) data;
   switch (GST_MESSAGE_TYPE(msg))
   {
   case GST_MESSAGE_EOS:
      g_print("End of stream\n");
      g_main_loop_quit(loop);
      break;

   case GST_MESSAGE_ERROR:
   {
      gchar *debug;
      GError *error;
      gst_message_parse_error(msg, &error, &debug);
      g_free(debug);
      g_printerr("Error: %s\n", error->message);
      g_error_free(error);
      g_main_loop_quit(loop);
      break;
   }

   default:
      break;
   }
   return TRUE;
}

static unsigned char* raw_buffer;
static int start_u = WIDTH * HEIGHT;
static int start_v = WIDTH * HEIGHT + WIDTH * 30;
static int length = WIDTH * 30;

/* called when the appsink notifies us that there is a new buffer ready for
 * processing */
static void on_new_buffer(GstElement * elt, ProgramData * data)
{
   guint size;
   GstBuffer *app_buffer, *buffer = NULL;
   GstElement *source;
   int i;

   buffer = gst_app_sink_pull_buffer(GST_APP_SINK(elt));
   if (buffer != NULL)
   {
      size = GST_BUFFER_SIZE(buffer);

	  // first copy the received frame
      memcpy(raw_buffer, GST_BUFFER_DATA(buffer), size);

      int centre_x = 0, centre_y = 0;
      int tot_x = 0, tot_y = 0;
	  
	  // now calculate the center of orange in the screen
      for (i = 0; i < size - 3; i += 4)
      {
	     //yuy2 format
         unsigned char u, v;
         int x, y;
         x = (i / 4) % (WIDTH / 2); //x index
         y = (i / 4) / (WIDTH / 2); //y index
         u = raw_buffer[i + 1]; // u value of YUV
         v = raw_buffer[i + 3]; // v value of YUV

         if (u < 90 && v > 180) // this means that the pixel is orange
         {
            centre_x++;
            centre_y++;
            tot_x += x;
            tot_y += y;
         }
      }
	  
      if (centre_x > 10) //threshold of 10 detected samples
      {
         int cx, cy;
         cx = (int) ((double) tot_x / centre_x) * 2; //this is centre of orange x value
         cy = (int) ((double) tot_y / centre_y); //this is centre of orange y value

         double update_x, update_y;
         //update_x = (WIDTH / 2 - cx) / (WIDTH / 2.0) / 6;
         //update_y = (HEIGHT / 2 - cy) / (HEIGHT / 2.0) / 6;
		 
		 //difference between center of view and center of orange, then multiply by factor related to view radius
         update_x = (WIDTH / 2 - cx) * 0.002;
         update_y = (HEIGHT / 2 - cy) * 0.0025;

         g_print("Centre: %d, %d, %f, %f\n", cx, cy, update_x, update_y);

		 //send new position to NIOS
         set_position(data->conn, SET_HORIZONTAL_DIFF, update_x);
         set_position(data->conn, SET_VERTICAL_DIFF, -update_y);
      }

      gst_buffer_unref(buffer);

   }
}

void start_video(SerialConn* conn)
{
   ProgramData *data = NULL;
   GMainLoop *loop;
   GstElement *pipeline, *source, *filter2,
         *sink;
   GstBus *bus;
   GstCaps *filtercaps, *filter2caps;
   gint width, height, num, denom;
   const GstStructure *str;

   data = g_new0(ProgramData, 1);
   data->conn = conn;

   /* Initialisation */
   gst_init(NULL, NULL);
   loop = g_main_loop_new(NULL, FALSE);
   data->loop = loop;

   /* Create gstreamer elements */
   pipeline = gst_pipeline_new("video-player");
   source = gst_element_factory_make("v4l2src", "file-source");
   //source   = gst_element_factory_make ("ksvideosrc", "file-source");
   //filter = gst_element_factory_make("capsfilter", "filter");
   //decoder = gst_element_factory_make("jpegdec", "jpeg-decoder");
   //colorspace = gst_element_factory_make("ffmpegcolorspace", "colorspace");
   filter2 = gst_element_factory_make("capsfilter", "filter2");
   //sink = gst_element_factory_make ("filesink","file-sink");
   sink = gst_element_factory_make("appsink", "app-sink");
   data->sink = pipeline;

   raw_buffer = (unsigned char*) g_malloc0((int) (WIDTH * HEIGHT * 2));

   if (!pipeline || !source || !filter2
         || !sink)
   {
      g_printerr("One element could not be created. Exiting.\n");
   }

   /* Set up the pipeline */
   /* we set the input filename to the source element */
   //filtercaps = gst_caps_new_simple("image/jpeg", "width", G_TYPE_INT, WIDTH,
   //      "height", G_TYPE_INT, HEIGHT, "framerate", GST_TYPE_FRACTION, 30, 1,
   //      NULL);

   filter2caps = gst_caps_new_simple("video/x-raw-yuv", "width", G_TYPE_INT,
         WIDTH, "height", G_TYPE_INT, HEIGHT, "framerate", GST_TYPE_FRACTION,
         30, 1, NULL);

   g_object_set(G_OBJECT(source), "device", "/dev/video0", NULL);
   //g_object_set (G_OBJECT (source), "device-index", 1, NULL);

   //g_object_set(G_OBJECT(filter), "caps", filtercaps, NULL);
   //gst_caps_unref(filtercaps);

   g_object_set(G_OBJECT(filter2), "caps", filter2caps, NULL);
   gst_caps_unref(filter2caps);

   //gst_app_sink_set_emit_signals ((GstAppSink*) sink, TRUE);
   g_object_set(G_OBJECT(sink), "emit-signals", TRUE, "sync", FALSE, NULL);

   g_signal_connect(sink, "new-buffer", G_CALLBACK(on_new_buffer), data);
   g_object_set(G_OBJECT(sink), "max-buffers", 1, NULL);
   //g_object_set(G_OBJECT(sink), "drop", FALSE, NULL);


   /* we add a message handler */
   bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
   gst_bus_add_watch(bus, bus_call, loop);
   gst_object_unref(bus);

   /* we add all elements into the pipeline */
   //gst_bin_add_many (GST_BIN (pipeline), source, filter, decoder, colorspace, filter2, sink, NULL);
   gst_bin_add_many(GST_BIN(pipeline), source, filter2, sink, NULL);

   /* we link the elements together */
   gst_element_link_many(source, filter2, sink, NULL);

   /* Set the pipeline to "playing" state*/
   g_print("Now playing: \n");
   gst_element_set_state(pipeline, GST_STATE_PLAYING);

   /* Iterate */
   g_print("Running...\n");
   g_main_loop_run(loop);

   /* Out of the main loop, clean up nicely */
   g_print("Returned, stopping playback\n");
   gst_element_set_state(pipeline, GST_STATE_NULL);
   g_print("Deleting pipeline\n");
   gst_object_unref(GST_OBJECT(pipeline));

}
