/**
Author: Brian Fehrman
Class: Computer Vision Fall 2012
Professor: Dr. Hoover
Assign: HW3

This program is a twist on Homework 3. It uses OpenCV to capture video feed
from an attached webcam. Each frame is converted to 32bit float, then 
converted to gray scale, then the program performs an SVD on the gray
scale frame. 7 windows are then output. 6 windows show reconstructions of the
gray scale image using varying rank orders. The other window shows the original
gray scale image. The program is setup so that you can easily change the ranks
used, the step size for the ranks, and the overall number of ranks.


**/

/***** Includes *****/
#include <ctime>
#include <math.h>
#include <opencv/cv.h>
#include <opencv/highgui.h>
#include <iostream>
#include <stdlib.h>
#include <stdio.h>
#include <string>
#include <sstream>
#include <fstream>
#include <sys/types.h>
#include <unistd.h>
#include <stdlib.h>
#include <stdio.h>
#include "tbb/blocked_range.h"
#include "tbb/parallel_for.h"
#include <omp.h>


/**** Defines ****/
#define WINDOW_SIZE_CHOICE CV_WINDOW_NORMAL
#define NUM_EXPS 6

/**** Using Statements ****/
using namespace cv;
using namespace std;
using namespace tbb;

/**** Structs ******/

/****Consts********/
const string exposure_base =  "uvcdynctrl -d /dev/video0 -s \"Exposure (Absolute)\" ";
const int NUM_THREADS = 4;

/****** Prototypes *****/
void AUTO();
void HDR();
void set_exposure( int curr_exp );
void find_w_vals( float pixs[3], float curr_w_vals[3] );
void combine_exps( vector<Mat>& exp_images, Mat& dst );
void video_test( int n );
void get_frames( int id);
void single_video( int id);


/********** TBB Classes **************/

class Frame_Getter {
	
	public:
	
	Frame_Getter() {}
	
	void operator() ( const blocked_range< int >& r) const
	{
		
		for( int i = r.begin(); i != r.end(); i++ ) 
		{
			get_frames( i );
		}
		
	}
	
};

/******** Main ***********/

int main( int argc, char *argv[])
{	
	int id = 0;
	
	if( argc > 1 )
	{
		id = atoi( argv[ 1 ] );
		single_video( id );

	}
	 system("uvcdynctrl -d /dev/video1 -s \"Exposure, Auto Priority\" 1");
   system("uvcdynctrl -d /dev/video1 -s \"Exposure, Auto\" 3");
   	 system("uvcdynctrl -d /dev/video2 -s \"Exposure, Auto Priority\" 1");
   system("uvcdynctrl -d /dev/video2 -s \"Exposure, Auto\" 3");
   	 system("uvcdynctrl -d /dev/video3 -s \"Exposure, Auto Priority\" 1");
   system("uvcdynctrl -d /dev/video3 -s \"Exposure, Auto\" 3");
   	 system("uvcdynctrl -d /dev/video4 -s \"Exposure, Auto Priority\" 1");
   system("uvcdynctrl -d /dev/video4 -s \"Exposure, Auto\" 3");
	
	get_frames( 0 );
	//video_test( NUM_THREADS );
   //HDR();
   //AUTO();
	return 0;
}

void single_video( int id )
{
	VideoCapture cam( id );
	cam.set( CV_CAP_PROP_FRAME_WIDTH, 160 );
	cam.set( CV_CAP_PROP_FRAME_HEIGHT, 120);
	stringstream ss;
	ss << id;
	string dev_name = ss.str();
	
	namedWindow( dev_name, CV_WINDOW_NORMAL | CV_WINDOW_KEEPRATIO );
	
	for( ;; )
	{
		Mat cap;
		
		cam >> cap;
		
		imshow( dev_name, cap );
		
		if(waitKey(1) >= 0) break;
	}
}

void get_frames( int id )
{
	//id++;
	VideoCapture cam[ NUM_THREADS ];
	string window_names[ NUM_THREADS ];
	
	for( int i = 0; i < NUM_THREADS; i++ )
	{
		cam[ i ].open( i + 1 );
		
		cam[ i ].set( CV_CAP_PROP_FRAME_WIDTH, 160 );
		cam[ i ].set( CV_CAP_PROP_FRAME_HEIGHT, 120);

		stringstream ss;
		ss << i;
	
		window_names[ i ] = ss.str();
	
		namedWindow( window_names[ i ], CV_WINDOW_NORMAL | CV_WINDOW_KEEPRATIO );
	}
	
	
	for( ;; )
	{
		Mat cap[ NUM_THREADS ];
	
		#pragma omp parallel for
		for( int curr_vid = 0; curr_vid < NUM_THREADS; curr_vid++)
		{
			cam[ curr_vid ] >> cap[ curr_vid ];
		}
		
		for( int curr_vid = 0; curr_vid < NUM_THREADS; curr_vid++)
		{
			imshow( window_names[ curr_vid ], cap[ curr_vid ] );
		}
	
		if(waitKey(30) >= 0) break;
	}
	
}

void video_test( int n )
{

	//parallel_for( blocked_range< int >( 0, NUM_THREADS, 1 ),
					//Frame_Getter() );
					
	get_frames( 0 );
}

void set_exposure( int curr_exp )
{
   ostringstream convert;
   string curr_string;
   
   convert << curr_exp;

   curr_string = exposure_base + convert.str();

   system( curr_string.c_str() );
}

void HDR()
{
   //clock_t begin;
   //VideoCapture cap(0); //open the default camera
	//clock_t end;
	//double time_diff = 0;
   vector <Mat> exp_images(NUM_EXPS);
   int exp_vals[] = { 15, 30, 60, 120, 240, 480};
   Mat combined_images;
   char char_num[10];
   string base_img = "Exp ";
   Mat blurred1,blurred2, diff_blur;

	//Check to see if the camera opened
	//if(!cap.isOpened())
	//{
	//  cout << "Video error" <<endl;
	//	return -1;
	//}
  // system("ls");
   
    //pid_t pid = fork();

    //if( pid < 0 ) { 
        ///* This is an error! */
        //return -1;
    //}   

    //if( pid == 0 ) { 
        ///* This is the child */

      //cout << "JERE" << endl;
      //execl( "ls", "ls");
       //// execl( "uvcdynctrl", "uvcdynctrl", "-d \"/dev/video1\" -g \"Exposure, Auto Priority\" 1", (char *)0 );
        ////execl( "uvcdynctrl", "uvcdynctrl", "-d \"/dev/video1\" -g \"Exposure, Auto Priority\"", (char *)0 );

        ///* This is also an error! */
        //return 0;
    //}

         
   system("uvcdynctrl -d /dev/video0 -s \"Exposure, Auto Priority\" 0");
   system("uvcdynctrl -d /dev/video0 -s \"Exposure, Auto\" 1");
   
	for(int i = 0; i < 1;i++)
	{

		/*Uncomment the commented code in this loop 
		  to get FPS timing output. Will add this as 
		  an input option*/
		
		//Start clock for FPS timing
		//begin = clock();

	//	s_port << "70?" << endl;
		//Create new bgr matrix and fill it with the
		//current frame from the video camera
      for( int curr_exp = 0; curr_exp < NUM_EXPS; curr_exp++)
      {  VideoCapture cap(0); 
         set_exposure( exp_vals[ curr_exp ] );

         if(waitKey(1) >= 0) break;
      //Mat frame_hsv;
         Mat frame_bgr;
         cap >> frame_bgr;
         cvtColor( frame_bgr, exp_images[ curr_exp ], CV_BGR2GRAY );
         exp_images[ curr_exp ].convertTo( exp_images[ curr_exp ], CV_32F, 1/255.0);
         
         cap.release();
      }
   //cvtColor(frame_bgr, frame_hsv, CV_BGR2HSV);
   //vector<Mat> frame_hsv_chans(3);
   //split( frame_hsv, frame_hsv_chans );
		//Convert the frame to be 32bit floating
		//frame_bgr.convertTo(frame_bgr, CV_32F, 1/255.0);	
		
		//Convert to gray scale
		//cvtColor(frame_bgr, frame_bgr, CV_BGR2GRAY);
		
		//Show the unmodified gray scale image
		//imshow("Non SVD", frame_bgr);
      combine_exps( exp_images, combined_images );
      
      GaussianBlur( combined_images, blurred1, Size(7,7), 1.5 );
      GaussianBlur( blurred1, blurred2, Size(7,7) , 1.5 );
      
      absdiff( blurred1, blurred2, diff_blur);
      
      for( int curr_show = 0; curr_show < NUM_EXPS; curr_show++)
      {
         base_img = "Exp ";
         sprintf( char_num, "%d", curr_show );
         imshow( strcat( (char*) base_img.c_str(), char_num ), exp_images[ curr_show ] ); 
         imwrite( strcat( (char*) base_img.c_str(), ".jpg"),  exp_images[ curr_show ] * 255);
      }
      
      
      imshow("Combined", combined_images);
      imwrite("Combined.jpg", combined_images * 255);
      imshow( "Diff", diff_blur * 50);
      imwrite( "diff_blur.jpg", diff_blur * 50 * 255);

		/*Timing code*/
      /*
		end = clock();
		time_diff = (double)(end - begin);
		cout << 1.0 / (time_diff / CLOCKS_PER_SEC ) << endl;
      */
		
		
		//Will exit if a window is in focus an a key is pressed
		if(waitKey(1) >= 0) break;
	}
}

void AUTO()
{
   VideoCapture cap(0);
   Mat auto_img;
   
   system("uvcdynctrl -d /dev/video0 -s \"Exposure, Auto Priority\" 1");
   system("uvcdynctrl -d /dev/video0 -s \"Exposure, Auto\" 3");
   	
   if(waitKey(100) >= 0) return;
   
   cap >> auto_img;
   cvtColor( auto_img, auto_img, CV_BGR2GRAY );
   
   imwrite( "auto.jpg", auto_img );
   
   cap.release();
}


void combine_exps( vector<Mat>& exp_images, Mat& dst )
{
   int num_cols = exp_images[ 0 ].cols;
   int num_rows = exp_images[ 0 ].rows;
   Mat out_img( num_rows, num_cols, CV_32F);
   float curr_w_vals[NUM_EXPS] = {0};
   float numer, denom;
   float pixs[NUM_EXPS];
   
   for( int curr_row = 0; curr_row < num_rows; curr_row++ )
   {
      for( int curr_col = 0; curr_col < num_cols; curr_col++ )
      {
         for( int curr_exp= 0; curr_exp < NUM_EXPS; curr_exp ++ )
         { 
            pixs[ curr_exp ] = exp_images[ curr_exp ].at<float>( curr_row, curr_col );
         }

         find_w_vals( pixs, curr_w_vals );
         
         numer = 0;
         denom = 0;
         
         for( int curr_exp = 0; curr_exp < NUM_EXPS; curr_exp++ )
         {
            numer += pixs[ curr_exp ] * curr_w_vals[curr_exp];
            denom += curr_w_vals[ curr_exp ];
         }
         
         out_img.at<float>( curr_row, curr_col ) = numer / denom;
      }
   }
   
   dst = out_img;
}

void find_w_vals( float pixs[ NUM_EXPS ], float curr_w_vals[ NUM_EXPS ] )
{
   for(int i = 0; i < NUM_EXPS; i++ )
   {
      if( pixs[i] < 0.5 )
      {
         curr_w_vals[ i ] = pixs[i] / 0.5;
      }
      else
      {
         curr_w_vals[ i ] = ( 1 - pixs[i] ) / 0.5;
      }
   }
}

