#include "StdAfx.h"
#include "SensorConnector.h"

using namespace TipTepCore;

namespace TipTepOpenNI {

//Constructor
SensorConnector::SensorConnector(void)
{
	stopProcessing = false;	
	locker = gcnew System::Object();  //locker
}

//Stop image processing
//Must be called in UpdateImage event handler
void SensorConnector::Stop(void)
{	
	//System::Threading::Monitor::Enter(locker);	
	this->stopProcessing = true;
	(*handFramePtr).~HandTrackerFrameRef();
	(*pDepthFramePtr).~VideoFrameRef();
	(*depthPtr).~VideoStream();
	(*colorPtr).~VideoStream();	
	(*DevicePtr).close();
	//delete m_pHandTracker;
	//nite::NiTE::shutdown();
	//openni::OpenNI::shutdown();	
	//System::Threading::Monitor::Exit(locker);
}

bool SensorConnector::SensorIsStoped()
{
	System::Threading::Monitor::Enter(locker);
	return this->stopProcessing;
	System::Threading::Monitor::Exit(locker);
}

//Start image rendering
Sensor^ SensorConnector::Start(void)
{
	Sensor^ sc;	
	nite::HandTrackerFrameRef handFrame;	
	openni::Status rc = openni::STATUS_OK;
	nite::Status rc_nite;
	openni::Device device;	
	openni::VideoStream depth, color;
	openni::VideoMode videoMode;
	const char* deviceURI = openni::ANY_DEVICE;
	m_pHandTracker = new nite::HandTracker;				    
	openni::VideoFrameRef pDepthFrame;

	handFramePtr = &handFrame;
	pDepthFramePtr = &pDepthFrame;
	depthPtr = &depth;
	colorPtr = &color;
	DevicePtr = &device;

	rc = openni::OpenNI::initialize();
	if(rc !=  openni::STATUS_OK)
	{		
		String^ clistr = gcnew String(openni::OpenNI::getExtendedError());
		TipTepLogger::Log::Write("SensorConnector::Start", "After initialization:" + clistr);
	}	

	rc = device.open(deviceURI);
	if (rc != openni::STATUS_OK)
	{
		String^ clistr = gcnew String(openni::OpenNI::getExtendedError());
		TipTepLogger::Log::Write("SensorConnector::Start", "Device open failed:" + clistr);		
		return sc;
	}

	rc = depth.create(device, openni::SENSOR_DEPTH);
	if (rc == openni::STATUS_OK)
	{
//		rc = depth.Start();
		//if (rc == ONI_STATUS_OK)
		{			
			depth.setMirroringEnabled(true);
			videoMode = depth.getVideoMode();
			videoMode.setResolution(640, 480);
			rc = depth.setVideoMode(videoMode);
		}		
	}

	rc_nite = nite::NiTE::initialize();
	if(rc_nite != NITE_STATUS_OK)
	{
		String^ clistr = gcnew String(oniGetExtendedError());
		TipTepLogger::Log::Write("SensorConnector::Start", "Nite create hand tracker failed:" + clistr);		
		return sc;
	}

//	rc_nite = nite::NiTE::CreateHandTracker(*m_pHandTracker, &device);
	//if (rc_nite != NITE_STATUS_OK)
	if(m_pHandTracker->create(&device) != nite::STATUS_OK)
	{
		String^ clistr = gcnew String(oniGetExtendedError());
		TipTepLogger::Log::Write("SensorConnector::Start", "Nite create hand tracker failed:" + clistr);			
		return sc;
	}

	rc_nite = m_pHandTracker->startGestureDetection(nite::GESTURE_WAVE);
	if (rc_nite != NITE_STATUS_OK)
	{
		String^ clistr = gcnew String(oniGetExtendedError());
		TipTepLogger::Log::Write("SensorConnector::Start", "Nite StartGestureDetection failed:" + clistr);			
		return sc;
	}

	//m_pHandTracker->setSmoothingFactor(0.1);

	/*rc = device.CreateStream(ONI_SOURCE_DEPTH, depth);
	if (rc == ONI_STATUS_OK)
	{
		rc = depth.Start();
		if (rc != ONI_STATUS_OK)
		{
			printf("TipTep.Viewer: Couldn't start depth stream:\n%s\n", oniGetExtendedError());
			depth.Destroy();
		}
	}
	else
	{
		printf("TipTep.Viewer: Couldn't find depth stream:\n%s\n", oniGetExtendedError());
	}

	rc = device.CreateStream(ONI_SOURCE_COLOR, color);
	if (rc == ONI_STATUS_OK)
	{
		rc = color.Start();
		if (rc != ONI_STATUS_OK)
		{
			printf("TipTep.Viewer: Couldn't start color stream:\n%s\n", oniGetExtendedError());
			color.Destroy();
		}
	}
	else
	{
		printf("TipTep.Viewer: Couldn't find color stream:\n%s\n", oniGetExtendedError());
	}

	if (!depth.IsValid() && !color.IsValid())
	{
		printf("TipTep.Viewer: No valid streams. Exiting\n");
		return sc;
	}*/

	while(!SensorIsStoped() && depth.isValid())
	{				
		//nite::HandTrackerFrameRef handFrame;
	    //openni::VideoFrameRef pDepthFrame;
		nite::Status rc = m_pHandTracker->readFrame(&handFrame);
		if (rc != NITE_STATUS_OK)
		{			
			TipTepLogger::Log::Write("SensorConnector::Start", "GetNextData failed");				
			return sc;
		}

		pDepthFrame = handFrame.getDepthFrame();
		if (pDepthFrame.isValid())
		{
			const nite::Array<nite::GestureData>& pGestures = handFrame.getGestures();
//			printf("Detected gestures count = %d\n",  pGestures->Size());
			for (int i = 0; i < pGestures.getSize(); ++i)
			{
				if (pGestures[i].isComplete())
				{
					const nite::Point3f& position = pGestures[i].getCurrentPosition();
					//printf("Gesture %d at (%f,%f,%f)\n", pGestures[i].GetType(), position.x, position.y, position.z);
					nite::HandId newId;
					m_pHandTracker->startHandTracking(pGestures[i].getCurrentPosition(), &newId);
				}
			}
			//add hand points
			const nite::Array<nite::HandData>& pHands= handFrame.getHands();
			array<TipTepCore::Point3D^>^ handPoints = gcnew array<TipTepCore::Point3D^>(0);
			if(pHands.getSize() > 0)
			{
				handPoints = gcnew array<TipTepCore::Point3D^>(pHands.getSize());		
				//printf("Detected hands count = %d\n", pHands->Size());
				for (int i = 0; i < pHands.getSize(); ++i)
				{
					const nite::HandData& user = pHands[i];
					const nite::Point3f& position = user.getPosition();
					float depthX = 0;
					float depthY = 0;
					//Convert hand coordinates to depth one									
					float f = position.x;
					m_pHandTracker->convertHandCoordinatesToDepth(position.x, position.y, position.z, &depthX, &depthY);
																					
					//if (user.getState() != NITE_HAND_STATE_LOST)
					{
						nite::Point3f position = user.getPosition();
						handPoints[i] = gcnew TipTepCore::Point3D(depthX, depthY, user.getPosition().z);
					}
					/*else
					{
						nite::Point3f position = user.getPosition();
						handPoints[i] = gcnew TipTepCore::Point3D(depthX, depthY, user.getPosition().z);
					}	*/				
				}
			}
			//hand points are saved in the array handPoints
						
			Frame^ p_rgbFrame;
			//Frame^ p_depthFrame;			
			//sc = gcnew Sensor(depth, color);
			//sc->WaitForStreams(p_rgbFrame, p_depthFrame);
			int imageSize = pDepthFrame.getDataSize();
			Frame^ depthFrame = gcnew Frame(pDepthFrame.getStrideInBytes()/ sizeof(openni::DepthPixel),pDepthFrame.getWidth(), pDepthFrame.getHeight(), (OniDepthPixel*)pDepthFrame.getData());					
			UpdateImage(this, p_rgbFrame, depthFrame, handPoints);			
		}		
	}

	return sc;
}

}
