#include "stdafx.h"

#include "NISensorController.h"
#define RECORD_PATH			"C:\\Users\\HoangQC\\Desktop\\KinectDB\\record.oni"
#define ONI_FILE_PATH		"C:\\Users\\HoangQC\\Desktop\\KinectDB\\convert.oni"


NISensorController::NISensorController(void)
{

}


NISensorController::~NISensorController(void)
{
	recordStop();
	closeKinect();
}

int NISensorController::processHandFrame()
{
	p_handTracker->readFrame(&m_handFrame);
	m_depthFrame =  m_handFrame.getDepthFrame();
	pDepth = (openni::DepthPixel *)m_depthFrame.getData();
	colorStream.readFrame(&m_colorFrame);
	pColor = (uchar*)m_colorFrame.getData();

	/************* WARNING ************
	**FUCTION BASE ON PARALLEL EVENTS** 
	**********************************/
	if (!m_handFrame.isValid()) return 1;
	const nite::Array<nite::GestureData>& gestures = m_handFrame.getGestures();
	for (int i = 0; i < gestures.getSize(); ++i)
	{
		if (gestures[i].isComplete())
		{
			const nite::Point3f& position = gestures[i].getCurrentPosition();
			printf("Gesture %d at (%f,%f,%f)\n", gestures[i].getType(), position.x, position.y, position.z);

			nite::HandId newId;
			p_handTracker->startHandTracking(gestures[i].getCurrentPosition(), &newId);
		}
	}

	if (!m_handFrame.isValid()) return 1;
	const nite::Array<nite::HandData>& hands = m_handFrame.getHands();
	for (int i = 0; i < hands.getSize(); ++i)
	{
		const nite::HandData& user = hands[i];

		if (!user.isTracking())
		{
			printf("Lost hand %d\n", user.getId());
			nite::HandId id = user.getId();
			HistoryBuffer<20>* pHistory = g_histories[id];
			g_histories.erase(g_histories.find(id));
			delete pHistory;
			delete handCoordinates;
			isHandTracking = false;
		}
		else
		{
			if (user.isNew())
			{
				printf("Found hand %d\n", user.getId());
				g_histories[user.getId()] = new HistoryBuffer<20>;
			}
			// Add to history
			HistoryBuffer<20> * pHistory = g_histories[user.getId()];
			pHistory->AddPoint(user.getPosition());
			// Draw history
			handCoordinates = new float[60];
			convertHistory(user.getId(), pHistory,handCoordinates);
			isHandTracking = true;
		}
	}
	return openni::STATUS_OK;
}

int NISensorController::initKinect()
{
	openni::Status rc = openni::STATUS_OK;
	const char* deviceURI = openni::ANY_DEVICE;
#ifdef IS_USING_ONI
	deviceURI = ONI_FILE_PATH;
#endif

	rc = openni::OpenNI::initialize();
	printf("After initialization:\n%s\n", openni::OpenNI::getExtendedError());

	openni::VideoMode depthVM,colorVM;
	
	depthVM.setFps(30);
	depthVM.setResolution(640,480);
	depthVM.setPixelFormat(openni::PIXEL_FORMAT_DEPTH_1_MM);

	colorVM.setFps(30);
	colorVM.setResolution(640,480);
	colorVM.setPixelFormat(openni::PIXEL_FORMAT_RGB888);

#ifdef IS_USING_YUV
	colorVM.setFps(15);
	colorVM.setPixelFormat(openni::PIXEL_FORMAT_YUV422);
#endif

	p_device = new openni::Device;
	rc = p_device->open(deviceURI);	
	
	if (!HandleStatus(rc)) return openni::STATUS_ERROR;   

	rc = depthStream.create(*p_device, openni::SENSOR_DEPTH);
	rc = depthStream.start();
	if (!HandleStatus(rc))			{   depthStream.destroy();return  openni::STATUS_ERROR;   }

	rc = colorStream.create(*p_device, openni::SENSOR_COLOR);
	rc = colorStream.setVideoMode(colorVM);
	rc = colorStream.start();
	if (!HandleStatus(rc))			{   colorStream.destroy();return  openni::STATUS_ERROR;   }
	
	getSensorInfo(depthStream);
	getSensorInfo(colorStream);
	
	if (!depthStream.isValid() || !colorStream.isValid())
	{
		printf("No valid streams. Exiting\n");
		openni::OpenNI::shutdown();
		return 2;
	}
	colorStream.addNewFrameListener(this);

#ifdef IS_HANDTRACKING
	nite::NiTE::initialize();
	p_handTracker = new nite::HandTracker;
	if (p_handTracker->create(p_device) != nite::STATUS_OK)		return openni::STATUS_ERROR;
	p_handTracker->startGestureDetection(nite::GESTURE_WAVE);
	p_handTracker->startGestureDetection(nite::GESTURE_CLICK);
	p_handTracker->addNewFrameListener(this);
	isHandTracking = false;
#endif

	p_device->setImageRegistrationMode(openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR);
	p_device->setDepthColorSyncEnabled(true);
	pDepthToColorMap = new int[640*480*2];
	
	return openni::STATUS_OK;
}

void NISensorController::closeKinect()
{
	m_depthFrame.release();
	m_colorFrame.release();
	

#ifdef IS_HANDTRACKING
	m_handFrame.release();
	p_handTracker->removeNewFrameListener(this);
	p_handTracker->destroy();
	nite::NiTE::shutdown();
#endif
	
	colorStream.removeNewFrameListener(this);
	depthStream.destroy();
	colorStream.destroy();
	p_device->close();
	openni::OpenNI::shutdown();
	

	delete p_device; p_device = NULL;
	delete p_handTracker; p_handTracker = NULL;
	pDepth = NULL;
	pColor = NULL;
}

void NISensorController::recordStart()
{
	if (!m_recorder.isValid())
	{
		m_recorder.create(RECORD_PATH);
		m_recorder.attach(depthStream);
		m_recorder.attach(colorStream);
		m_recorder.start();
	}
	
}
void NISensorController::recordStop()
{
	if (m_recorder.isValid())
	{
		m_recorder.stop();
		m_recorder.destroy();
	}
}

/* 
Update Depth & Hand Event 
*/
void NISensorController::onNewFrame(nite::HandTracker & hand_tracker)
{
	processHandFrame();
}

/* 
Update Color Event 
*/
void NISensorController::onNewFrame(openni::VideoStream & videoStream)
{
#ifndef IS_HANDTRACKING
	colorStream.readFrame(&m_colorFrame);
	pColor = (uchar *)m_colorFrame.getData();

	depthStream.readFrame(&m_depthFrame);
	pDepth = (openni::DepthPixel *)m_depthFrame.getData();
#endif
}

void NISensorController::convertHistory(int id, HistoryBuffer<20>* pHistory, float * output)
{
	for (int i = 0; i < pHistory->GetSize(); ++i)
	{
		const nite::Point3f& position = pHistory->operator[](i);
		//if (i==0)printf("hand point %f - %f - %f \n",position.x,position.y,position.z);
		if (i==0)
		{
			wHandPoint = position;
			p_handTracker->convertHandCoordinatesToDepth(position.x, position.y, position.z, &output[i*3], &output[i*3+1]);
			output[i*3+2]=0;
		}
	}
	
}