/*
 *
 *  Salient region extraction
 *
 *
 *
 *
 *
 * */

#include <stdlib.h>
#include <pcl/point_cloud.h>			// holds basic data structure for PointT typename and PointCloud class                               http://docs.pointclouds.org/trunk/classpcl_1_1_point_cloud.html
#include <pcl/point_types.h>			// defines different types of point cloud structures, which can be templated with PointCloud class   http://docs.pointclouds.org/trunk/common_2include_2pcl_2point__types_8h.html
#include <pcl/win32_macros.h>			// windows related

#include <boost/shared_ptr.hpp>			// shared pointers 

#include <pcl/cuda/features/normal_3d.h>	//  http://docs.pointclouds.org/trunk/features_2include_2pcl_2features_2normal__3d_8h.html
#include <pcl/cuda/time_cpu.h>
#include <pcl/cuda/time_gpu.h>
#include <pcl/cuda/io/cloud_to_pcl.h>
#include <pcl/cuda/io/extract_indices.h>
#include <pcl/cuda/io/disparity_to_cloud.h>
#include <pcl/cuda/io/host_device.h>

#include <pcl/cuda/segmentation/connected_components.h>
#include <pcl/cuda/segmentation/mssegmentation.h>


#include <pcl/io/openni_grabber.h>
#include <pcl/io/pcd_grabber.h>
#include <pcl/visualization/cloud_viewer.h>

#include <pcl/keypoints/uniform_sampling.h>
#include <iostream>
#include <pcl/common/impl/io.hpp>
#include <pcl/octree/octree.h>

//OpenCV stuff

#include <opencv2/highgui/highgui.hpp>
#include "opencv2/gpu/gpu.hpp"
#include <opencv2/opencv.hpp>
#include <opencv2/legacy/compat.hpp>
#include <opencv2/imgproc/imgproc.hpp>

# define SALIENCY_SPHERE_NUMBER 15




using namespace pcl::cuda;


//---- Normal image and segmentation

template <template <typename> class Storage>
struct ImageType
{
    typedef void type;
};

template <>
struct ImageType<Device>
{
    typedef cv::gpu::GpuMat type;
    static void createContinuous (int h, int w, int typ, type &mat)
    {
        cv::gpu::createContinuous (h, w, typ, mat);
    }
};

template <>
struct ImageType<Host>
{
    typedef cv::Mat type;
    static void createContinuous (int h, int w, int typ, type &mat)
    {
        mat = cv::Mat (h, w, typ); // assume no padding at the end of line
    }
};
//---- Normal image and segmentation


struct SaliencyPoints
{
    int centerPointIdx;
    double entropy;
};


string convertInt(int number)
{
    std::stringstream ss;//create a stringstream
    ss << number;//add number to the stream
    return ss.str();//return a string with the contents of the stream
}


bool minEntropySort(const SaliencyPoints& d1, const SaliencyPoints& d2)
{
    return d1.entropy < d2.entropy;
}

class SaliencyEstimation
{

public:

    /// Constructor
    SaliencyEstimation () :  octree(0.2f), viewer ("CUDA - SaliencyEstimation")
    {
        new_cloud = false;
        go_on = true;
	
	cubeCoeff.values.resize (10);
	cubeCoeff.values[3] = 0;
	cubeCoeff.values[4] = 0;
	cubeCoeff.values[5] = 0;
	cubeCoeff.values[6] = 1;
	
	// Related to uniform_sampling
       // float leaf_size = 0.05;
       // pass_.setRadiusSearch (leaf_size);
    }



    /// Visualization callback function
    void viz_cb (pcl::visualization::PCLVisualizer& viz)
    {
        static bool first_time = true;
        boost::mutex::scoped_lock l(m_mutex); 

        if (new_cloud)                        
        {
            typedef pcl::visualization::PointCloudColorHandlerGenericField <pcl::PointXYZRGBNormal> ColorHandler;    

            ColorHandler Color_handler (normal_cloud,"curvature");  

            if (!first_time)
            {
                //  viz.removePointCloud ("normalcloud");     	// Visualizer specific command to clear off the normals each time a new cloud is received
                //viz.removePointCloud ("cloud");		// Visualizer specific command to clear off the cloud each time a new cloud is received
                //  viz.removePointCloud ("keypoints");
                //viz.removeAllShapes();
            }
            else
	    {
                first_time = false;
		viz.addPointCloud<pcl::PointXYZRGBNormal> (normal_cloud, Color_handler, std::string("cloud"), 0); //finally add the cloud itself alongwith the color handler
		
		cubeCoeff.values[7] = voxelSideLen;
		cubeCoeff.values[8] = voxelSideLen;
		cubeCoeff.values[9] = voxelSideLen;
	    
		for(size_t i=0 ; i<voxelCenters.size() ; i++)
		{
		    cubeCoeff.values[0] = voxelCenters[i].x;
		    cubeCoeff.values[1] = voxelCenters[i].y;
		    cubeCoeff.values[2] = voxelCenters[i].z;
		    
		    viz.addCube(cubeCoeff, boost::lexical_cast<string>(i), 0);
		}
		
	    }

            // viz.addPointCloudNormals<pcl::PointXYZRGBNormal> (normal_cloud, 200, 0.1, "normalcloud");   // First add the normals by calling templated function addPointCloudNormals
            // pass on the normal_cloud object , level=200 , scale = 0.1 , string identifier of cloud
            
            //  viz.addPointCloud<pcl::PointXYZRGBNormal> (keypoints_, "keypoints");



            new_cloud = false;
        }
    }



    /// Templated function for OpenNI grabber callback (templated section uses a CUDA devise or a CPU)

    template <template <typename> class Storage> void
    cloud_cb (const boost::shared_ptr<openni_wrapper::Image>& image,
              const boost::shared_ptr<openni_wrapper::DepthImage>& depth_image,
              float constant)
    {
        static unsigned count = 0;
        static double last = getTime ();
        double now = getTime ();
        if (++count == 30 || (now - last) > 5)
        {
            std::cout << "Average framerate: " << double(count)/double(now - last) << " Hz..........................................." <<  std::endl;
            count = 0;
            last = now;
        }

        // Create a new point cloud pointer with memory allocation
        //pcl::PointCloud<pcl::PointXYZRGB>::Ptr output (new pcl::PointCloud<pcl::PointXYZRGB>);

        // Create a pointer to the data on Device, coz Storage=Device when called in such a manner from the run function
        typename PointCloudAOS<Storage>::Ptr data;

        //ScopeTimeCPU timer ("All: ");


        // Compute the PointCloud and store it on the device because <Storage>=<Device> in this call, downsample=true,
        //depthhImg from Grabber, Image from Grabber, Constant val, Output cloud, downsample=true, stride=2 , smoothing_nr_iterations = 0 , smoothing_filter_size = 2
        d2c.compute<Storage> (depth_image, image, constant, data, true, 2); // data=output pointCloud located on the GPU device



        // create a pointer of quadruple of floats on the device
        boost::shared_ptr<typename Storage<float4>::type> normals;
        float focallength = 580/2.0;

        {
            //ScopeTimeCPU time ("Normal Estimation");
            // method located in normals_3d.h
            /// Compute normals to save as float quadruples(normals), on the Storage=GPU, (IteratorBegin, IteratorEnd, focalLength, rawPointCloudOnGPU, radius=0.05, desired_no_oF_neigbhours=30
            normals = computeSaliency<Storage, typename PointIterator<Storage,PointXYZRGB>::type > (data->points.begin (), data->points.end (), focallength, data, 0.05, 30);
        }





        boost::mutex::scoped_lock l(m_mutex);
        normal_cloud.reset (new pcl::PointCloud<pcl::PointXYZRGBNormal>);
        toPCL (*data, *normals, *normal_cloud);
        /*
        	    {
        	    //---------------- Uniform sampling stuff
        	    cloud_.reset (new Cloud);
        	    indices_.reset (new pcl::PointCloud<int>);
        	    keypoints_.reset (new pcl::PointCloud<pcl::PointXYZRGBNormal>);

        	    pass_.setInputCloud (normal_cloud);
        	    pass_.compute (*indices_);
        	    { // code to see the points, normals, rgbs, and curvature
        	    //int idx = (*indices_)[10];
        	    //cout<<(*normal_cloud)[idx];
        	    }
        	    cloud_  = normal_cloud;

        	    pcl::copyPointCloud<pcl::PointXYZRGBNormal, pcl::PointXYZRGBNormal> (*normal_cloud, indices_->points, *keypoints_);
        	    //------------------- Uniform sampling finish

        	    }

         */






        //------------- Normal Image and segmentation


        // retrieve normals as an image..
        // typename StoragePointer<Storage,char4>::type ptr = StorageAllocator<Storage,char4>::alloc (data->width * data->height);
        // createNormalsImage<Storage> (ptr, normals);
        // typename ImageType<Storage>::type normal_image (data->height, data->width, CV_8UC4, thrust::raw_pointer_cast<char4>(ptr), data->width);

        // Segmentation
        /*
              {

        	      typename ImageType<Storage>::type normal_image;
        	      typename StoragePointer<Storage,char4>::type ptr;
        	      {
        	      // ScopeTimeCPU time ("TIMING: Matrix Creation");
        	      ImageType<Storage>::createContinuous ((int)data->height, (int)data->width, CV_8UC4, normal_image);
        	      ptr = typename StoragePointer<Storage,char4>::type ((char4*)normal_image.data);
        	      createNormalsImage<Storage> (ptr, *normals);
        	      }
              //  ScopeTimeCPU time ("TIMING: Vis");


        		cv::Mat normalCvImage (normal_image);
        		// cv::Mat normalSmooth(normalCvImage);
        		// cv::Mat::convertTo();
        		// Trying to apply gaussian kernel to the exisint normalImage

        		//cv::GaussianBlur(normalCvImage,normalSmooth,cv::Size(15,15),3,3);
        		//cv::bilateralFilter ( normalCvImage, normalSmooth, 3, 3*2, 3/2 );
        		//cv::blur( normalCvImage, normalSmooth, cv::Size(5, 5 ), cv::Point(-1,-1) );

        		cv::Mat gray_image;
        		cv::cvtColor( normalCvImage, gray_image, CV_RGB2GRAY );
        		cv::Mat normalSmooth(gray_image);
        		static bool firstRun = true;
        		if(firstRun)
        		{
        		  cv::imwrite( "Normal_Image.jpg", normalCvImage );
        		  cv::imwrite( "Grey_Image.jpg", gray_image );
        		  firstRun = false;
        		}

        		cv::adaptiveThreshold(gray_image, normalSmooth, 150, cv::ADAPTIVE_THRESH_MEAN_C, cv::THRESH_BINARY_INV, 9, 0);

        		cv::namedWindow("NormalImage", CV_WINDOW_NORMAL);
        		//cv::namedWindow("NormalSegImage", CV_WINDOW_NORMAL);
        		cv::imshow ("NormalImage",normalCvImage );
        		//cv::imshow ("NormalSegImage", seg);
        		cv::waitKey (2);
        	      }

              }
              */


        //--------------- Normal Image and segmentation



        // --------------- Octree to perform nearest neighbour search and finally calculate saliency

        octree.setInputCloud (normal_cloud);
        octree.addPointsFromInputCloud ();
	
	voxelCenters.clear();
	octree.getOccupiedVoxelCenters(voxelCenters);
	voxelSideLen = sqrt(octree.getVoxelSquaredSideLen());

	octree.deleteTree();

        // --------------- Octree finish

        new_cloud = true;
    }



    /// Run function called from main() which invokes the normal estimation on GPU

    void
    run (bool use_device, bool use_file)
    {
        if (use_file)
        {
            /*	      pcl::Grabber* filegrabber = 0;

            	      float frames_per_second = 1;
            	      bool repeat = false;

            	      std::string path = "./frame_0.pcd";
            	      filegrabber = new pcl::PCDGrabber<pcl::PointXYZRGB > (path, frames_per_second, repeat);

            	      if (use_device)
            	      {
            		std::cerr << "[NormalEstimation] Using GPU..." << std::endl;
            		boost::function<void (const pcl::PointCloud<pcl::PointXYZRGB>::ConstPtr&)> f = boost::bind (&SaliencyEstimation::file_cloud_cb<Device>, this, _1);
            		filegrabber->registerCallback (f);
            	      }
            	      else
            	      {
            		std::cerr << "[NormalEstimation] Using CPU..." << std::endl;
            		boost::function<void (const pcl::PointCloud<pcl::PointXYZRGB>::ConstPtr&)> f = boost::bind (&SaliencyEstimation::file_cloud_cb<Host>, this, _1);
            		filegrabber->registerCallback (f);
            	      }

            	      filegrabber->start ();
            	      while (go_on)//!viewer.wasStopped () && go_on)
            	      {
            		pcl_sleep (1);
            	      }
            	      filegrabber->stop ();
            	      */
        }
        else
        {
            pcl::Grabber* grabber = new pcl::OpenNIGrabber();

            boost::signals2::connection c;
            if (use_device)
            {
                std::cerr << "[SaliencyEstimation] Using GPU..." << std::endl;
                boost::function<void (const boost::shared_ptr<openni_wrapper::Image>& image, const boost::shared_ptr<openni_wrapper::DepthImage>& depth_image, float)> f = boost::bind (&SaliencyEstimation::cloud_cb<Device>, this, _1, _2, _3);
                c = grabber->registerCallback (f);
            }
            else
            {
                //std::cerr << "[SaliencyEstimation] Using CPU..." << std::endl;
                //boost::function<void (const boost::shared_ptr<openni_wrapper::Image>& image, const boost::shared_ptr<openni_wrapper::DepthImage>& depth_image, float)> f = boost::bind (&SaliencyEstimation::cloud_cb<Host>, this, _1, _2, _3);
                //c = grabber->registerCallback (f);
            }

            viewer.runOnVisualizationThread (boost::bind(&SaliencyEstimation::viz_cb, this, _1), "viz_cb");

            grabber->start ();


            while (!viewer.wasStopped ())
            {
                pcl_sleep (1);
            }

            grabber->stop ();
        }
    }

    //Octree Related stuff
      pcl::octree::OctreePointCloudSearch<pcl::PointXYZRGBNormal> octree ;
      typedef vector< pcl::PointXYZRGBNormal, Eigen::aligned_allocator<pcl::PointXYZRGBNormal> > AlignedPointTVector;
      AlignedPointTVector voxelCenters;
      double voxelSideLen ;
    
    //Essentials for Normal Extraction
      pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr normal_cloud;
      DisparityToCloud d2c;
      pcl::visualization::CloudViewer viewer;
      boost::mutex m_mutex;
      bool new_cloud, go_on;
    
    // For drawing voxels
       pcl::ModelCoefficients cubeCoeff;


    // Private members for unform sampling
    //typedef pcl::PointCloud<pcl::PointXYZRGBNormal> Cloud;
    //typedef Cloud::Ptr CloudPtr;
    //typedef Cloud::ConstPtr CloudConstPtr;
    //pcl::UniformSampling<pcl::PointXYZRGBNormal> pass_;
    //pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr keypoints_;
    //pcl::PointCloud<int>::Ptr indices_;
    //CloudPtr cloud_;

};

int main(int argc, char **argv)
{
    bool use_device = true;
    bool use_file = false;
    if (argc >= 2)
        use_device = true;
    if (argc >= 3)
        use_file = true;
    SaliencyEstimation v;
    v.run (use_device, use_file);
    return 0;
}
