// Gstream.cpp : Defines the entry point for the console application.
//

#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
#include <string.h>
#include <glib-object.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
#include <gst/app/gstappbuffer.h>

// mplayer -demuxer rawvideo -rawvideo w=160:h=120:format=i420 file.yuv

// WINDOWS
// gst-launch ksvideosrc device-index=0 ! ffmpegcolorspace ! autovideosink
// gst-launch ksvideosrc device-index=0 ! video/x-raw-yuv,width=160,height=120 ! ffmpegcolorspace ! autovideosink

//GstPipeline:pipeline0/GstV4l2Src:v4l2src0.GstPad:src: caps = video/x-raw-yuv, format=(fourcc)YUY2, width=(int)640, height=(int)480, interlaced=(boolean)false, framerate=(fraction)30/1
//http://forum.videohelp.com/threads/219769-YUY2-format-byte-value

typedef struct
{
   GMainLoop *loop;
   GstElement *source;
   GstElement *sink;
} ProgramData;

static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data)
{
   GMainLoop *loop = (GMainLoop *) data;
   switch (GST_MESSAGE_TYPE (msg)) {
   case GST_MESSAGE_EOS:
      g_print ("End of stream\n");
      g_main_loop_quit (loop);
      break;

   case GST_MESSAGE_ERROR: {
      gchar *debug;
      GError *error;
      gst_message_parse_error (msg, &error, &debug);
      g_free (debug);
      g_printerr ("Error: %s\n", error->message);
      g_error_free (error);
      g_main_loop_quit (loop);
      break;
                           }

   default: break;
   }
   return TRUE;
}

/* called when the appsink notifies us that there is a new buffer ready for
* processing */
static void
   on_new_buffer(GstElement * elt, ProgramData * data)
{
   guint size;
   unsigned char* raw_buffer;
   GstBuffer *app_buffer, *buffer = NULL;
   GstElement *source;
   int i, lum=0;

   //g_debug("%d", gst_element_get_state(elt, NULL,NULL,100));

   /* get the buffer from appsink */
   int start_u = 160 * 120;
   int start_v = 160 * 120 + 160 * 30;
   int length = 160 * 30;

   buffer = gst_app_sink_pull_buffer (GST_APP_SINK (elt));
   if (buffer != NULL)
   {
      /* turn it into an app buffer, it's not really needed, we could simply push
      * the retrieved buffer from appsink into appsrc just fine.  */
      size = GST_BUFFER_SIZE (buffer);
      //g_print ("Pushing a buffer of size %d\n", size);

      raw_buffer = (unsigned char*)g_malloc0 (size);
      memcpy (raw_buffer, GST_BUFFER_DATA (buffer), size);
      for (i = 0; i < size; i+=2)
      {
         lum += ((unsigned char*)raw_buffer)[i];
      }
      int centre_x = 0, centre_y = 0;
      int tot_x = 0, tot_y = 0;
      for (i = 0; i < size - 3; i+=4)
      {
         unsigned char u,v;
         int x,y;
         x = (i / 4) % 80;
         y = (i / 4) / 80;
         u = raw_buffer[i + 1];
         v = raw_buffer[i + 3];
         //if (i == length / 2)
         //   g_print("%d ", v);
         //g_print("%d %d\n", x,y);
         if (u < 80 && v > 200)
         {
            centre_x++;
            centre_y++;
            tot_x += x;
            tot_y += y;
            //g_print("%d ", y);
         }
      }
      if (centre_x > 10)
      {
         int cx, cy;
         cx = (int)((double)tot_x / centre_x) * 2;
         cy = (int)((double)tot_y / centre_y);

         double update_x, update_y;
         update_x = (80 - cx) / 80.0;
         update_y = (60 - cy) / 60.0;
         g_print("Centre: %d, %d, %f, %f\n", cx, cy, update_x, update_y);
      }
      //g_print("Average luminance: %d\n", 2 * lum / size);

      g_free(raw_buffer);
      //

      /* we don't need the appsink buffer anymore */
      gst_buffer_unref (buffer);

   }
}

static void on_new_buffer2 (GstElement* object,
   gpointer user_data)
{
   g_debug("Data pulled from appsink and writed to file with success!!");

}

void start_video()
{
   ProgramData *data = NULL;
   GMainLoop *loop;
   GstElement *pipeline, *source, *filter, *decoder, *colorspace, *filter2, *sink;
   GstBus *bus;
   GstCaps *filtercaps, *filter2caps;
   gint width, height, num, denom;
   const GstStructure *str;

   data = g_new0 (ProgramData, 1);

   /* Initialisation */
   gst_init (NULL, NULL);
   loop = g_main_loop_new (NULL, FALSE);
   data->loop = loop;

   /* Create gstreamer elements */
   pipeline = gst_pipeline_new ("video-player");
   source = gst_element_factory_make ("v4l2src", "file-source");
   //source   = gst_element_factory_make ("ksvideosrc", "file-source");
   filter = gst_element_factory_make ("capsfilter", "filter");
   decoder = gst_element_factory_make ("jpegdec","jpeg-decoder");
   colorspace = gst_element_factory_make ("ffmpegcolorspace","colorspace");
   filter2 = gst_element_factory_make ("capsfilter", "filter2");
   //sink = gst_element_factory_make ("filesink","file-sink");
   sink = gst_element_factory_make("appsink","app-sink");
   data->sink = pipeline;

   if (!pipeline || !source || !filter || !decoder || !colorspace || !filter2 || !sink) {
      g_printerr ("One element could not be created. Exiting.\n");
   }

   /* Set up the pipeline */
   /* we set the input filename to the source element */
   filtercaps = gst_caps_new_simple ("image/jpeg",
      "width", G_TYPE_INT, 160,
      "height", G_TYPE_INT, 120,
      "framerate", GST_TYPE_FRACTION, 30, 1,
      NULL);

   filter2caps = gst_caps_new_simple ("video/x-raw-yuv",
      "width", G_TYPE_INT, 160,
      "height", G_TYPE_INT, 120,
      "framerate", GST_TYPE_FRACTION, 30, 1,
      NULL);

   g_object_set (G_OBJECT (source), "device", "/dev/video0", NULL);
   //g_object_set (G_OBJECT (source), "device-index", 1, NULL);

   g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
   gst_caps_unref (filtercaps);

   g_object_set (G_OBJECT (filter2), "caps", filter2caps, NULL);
   gst_caps_unref (filter2caps);

   //gst_app_sink_set_emit_signals ((GstAppSink*) sink, TRUE);
   g_object_set (G_OBJECT (sink), "emit-signals", TRUE, "sync", FALSE, NULL);

   g_signal_connect (sink, "new-buffer", G_CALLBACK (on_new_buffer), NULL);
   g_object_set(G_OBJECT(sink), "max-buffers", 1, NULL);
   //g_object_set(G_OBJECT(sink), "drop", FALSE, NULL);


   /* we add a message handler */
   bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
   gst_bus_add_watch (bus, bus_call, loop);
   gst_object_unref (bus);

   /* we add all elements into the pipeline */
   //gst_bin_add_many (GST_BIN (pipeline), source, filter, decoder, colorspace, filter2, sink, NULL);
   gst_bin_add_many (GST_BIN (pipeline), source, filter2, sink, NULL);

   /* we link the elements together */
   gst_element_link_many (source, filter2, sink, NULL);

   /* Set the pipeline to "playing" state*/
   g_print ("Now playing: \n");
   gst_element_set_state (pipeline, GST_STATE_PLAYING);

   /* Iterate */
   g_print ("Running...\n");
   g_main_loop_run (loop);

   /* Out of the main loop, clean up nicely */
   g_print ("Returned, stopping playback\n");
   gst_element_set_state (pipeline, GST_STATE_NULL);
   g_print ("Deleting pipeline\n");
   gst_object_unref (GST_OBJECT (pipeline));

}
