#include "stdafx.h"

#include "glViewer.h"
#include "NISensorController.h"
#include "NIConverter.h"
#include "SkinTracker.h"
using namespace cv;
static void key_callback(GLFWwindow* window, int key, int scancode, int action, int mods);

cv::Mat colorPreview;
cv::Mat depthPreview;

NISensorController * p_ni_sensor;
glViewer * p_viewer1;
SkinTracker * p_skinTracker;

string snapshotPath = "C:\\Users\\HoangQC\\Desktop\\KinectDB\\TrainingDB-temp\\";
//SensorController ms_sensor;

void takeSnapShot();
void processCurrentImage();

void main()
{
	glViewer viewer1;
	p_viewer1 = &viewer1;

	NISensorController ni_sensor;
	ni_sensor.initKinect();
	p_ni_sensor = &ni_sensor;
	
	SkinTracker skinTracker;
	skinTracker.initialize();
	p_skinTracker = &skinTracker;
	
	viewer1.initializeWindow(640,480,"viewer 001");
	viewer1.drawDepth=true;

	glfwSetKeyCallback(viewer1.window,key_callback);

	
#ifdef IS_USING_YUV
	cv::Mat colorRaw = cv::Mat(480,640,CV_8UC2);
	cv::Mat colorRawForSplit = cv::Mat(480,320,CV_8UC4);
#else
	cv::Mat colorRaw = cv::Mat(480,640,CV_8UC3);
#endif
	

	cv::Mat depthRaw = cv::Mat(480,640,CV_16UC1);
	colorPreview = cv::Mat(480,640,CV_8UC3);
	std::string windowsName;
	vector<Mat> imgChannels;
	while (!glfwWindowShouldClose(viewer1.window))
	{
		viewer1.colorData = ni_sensor.pColor;
		viewer1.depthData = ni_sensor.pDepth;
		viewer1.handData = ni_sensor.handCoordinates;
		viewer1.rendering();

		depthRaw.data = (uchar*)viewer1.depthData;
		colorRaw.data = ni_sensor.pColor;

#ifdef IS_USING_YUV
		cvtColor(colorRaw,colorPreview,CV_YUV2BGR_Y422);
		colorRawForSplit.data = ni_sensor.pColor;
#else
		cvtColor(colorRaw,colorPreview,CV_RGB2BGR);
#endif		
		depthPreview = depthRaw;
		if (ni_sensor.isHandTracking)
		{
			Point3f center = Point3f(ni_sensor.handCoordinates[0],ni_sensor.handCoordinates[1],ni_sensor.wHandPoint.z);
			skinTracker.trackFramesYUV422(colorRawForSplit,depthRaw,center);
			Mat temp = colorPreview(skinTracker.d_crop_rect);
			compareImage(temp,skinTracker.previewMask);
		}
		//printf("Hand Point: %f; %f; %f \n",ni_sensor.wHandPoint.x,ni_sensor.wHandPoint.y,ni_sensor.wHandPoint.z);
		imshow(windowsName,colorPreview);

		//cv::split(colorRawForSplit,imgChannels);
		//imshow(windowsName+"[1]",imgChannels[0]);
		//imshow(windowsName+"[2]",imgChannels[1]);
		//imshow(windowsName+"[3]",imgChannels[2]);
		//imshow(windowsName+"[4]",imgChannels[3]);
	}
	

}

static void key_callback(GLFWwindow* window, int key, int scancode, int action, int mods)
{
	if (action == GLFW_RELEASE)
	{
			return;
	}
	if (key == GLFW_KEY_ESCAPE) glfwSetWindowShouldClose(window, GL_TRUE);
	//if (key == GLFW_KEY_END) p_ni_sensor->setNeedClose();

	if (key == GLFW_KEY_F1) p_viewer1->drawDepth = !p_viewer1->drawDepth;
	if (key == GLFW_KEY_F3) takeSnapShot();
	if (key == GLFW_KEY_F5) p_ni_sensor->recordStart();
	if (key == GLFW_KEY_F10) p_ni_sensor->recordStop();
}

void takeSnapShot()
{
	static int num_shot = 0;
	num_shot+=1;
	string colorFile = "color_" + std::to_string(num_shot) + ".bmp";
	string depthFile = "depth_" + std::to_string(num_shot) + ".tiff";

	cv::imwrite(snapshotPath+colorFile,colorPreview);
	cv::imwrite(snapshotPath+depthFile,depthPreview);
	printf("shoot %d\n",num_shot);
}