/****************************************************************************
*                                                                           *
*  OpenNI 1.x Alpha                                                         *
*  Copyright (C) 2011 PrimeSense Ltd.                                       *
*                                                                           *
*  This file is part of OpenNI.                                             *
*                                                                           *
*  OpenNI is free software: you can redistribute it and/or modify           *
*  it under the terms of the GNU Lesser General Public License as published *
*  by the Free Software Foundation, either version 3 of the License, or     *
*  (at your option) any later version.                                      *
*                                                                           *
*  OpenNI is distributed in the hope that it will be useful,                *
*  but WITHOUT ANY WARRANTY; without even the implied warranty of           *
*  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the             *
*  GNU Lesser General Public License for more details.                      *
*                                                                           *
*  You should have received a copy of the GNU Lesser General Public License *
*  along with OpenNI. If not, see <http://www.gnu.org/licenses/>.           *
*                                                                           *
****************************************************************************/

/***
 * We use this class as template to have an visual feedback of the kinect interaction
 * out-of-the-box.
 *
 * If something is not well commented, it is from the original sample file
 */
#include "SceneDrawer.h"

bool DEBUG = false;

/**
 * Reference to the Nite usergenerator that is used for
 * controling detected/tracked/lost/new users
 */
extern xn::UserGenerator g_UserGenerator;

/**
 * Reference to DepthGenerator that keeps information about
 * the depth image of the currently capturered kinect information
 */
extern xn::DepthGenerator g_DepthGenerator;

/**********************GUI options*******************/
extern XnBool g_bDrawBackground;
extern XnBool g_bDrawPixels;
extern XnBool g_bDrawSkeleton;
extern XnBool g_bPrintID;
extern XnBool g_bPrintState;
extern XnBool g_kinectToServer;

const Size frameSize(640, 480);
#define MAX_DEPTH 10000
float g_pDepthHist[MAX_DEPTH];

GLfloat texcoords[8];

//Indicating values to display according swipe information
int swipeCounterL = 0;
int swipeCounterR = 0;
XnChar strSWIPE[200];
/****************************************************/

/*****************Interaction offsets****************/
/*********************an options********************/
double distance_offset = 0.25;
double distance_range = 0.32;
double distance_interaction = distance_range * 2.7/3.0;

double direction_offset = 0.0;
double direction_range = 0.35;
double swipe_offset = 20;
int swipeActivatorFactor = 4;

extern XnBool g_useRelativeTrackingPosition;
/***************************************************/

/**
 * Last hand position is used to recognize swipes.
 * They are recognized if the distance between the last 3 han positions is
 * greater than the swipe_offset.
 */
XnPoint3D lastPositionsRightHand[2];

/**
 * Used to know if at least 3 hand positions have been detected
 * and control drawing of swipes only if the array lastPositionsRightHand is
 * completely filled
 */
int countPositions = 0;

/**
 * The last detected position of the right hand
 */
XnSkeletonJointPosition lastInitialRightHandPosition;


/*****************Indicators for interaction mode****************/
// Is the hand in the interaction area
bool isActiveLHandTracking = false;
// Is the hand in the area to control the systemcursor
bool isActiveMouseTracking = false;
// Is a fist detected (using OpenCV). Works pretty bad on long distances and is, therefore, not used
bool isFist = false;
/****************************************************************/

const unsigned int maxUsers = 20;

/**********************Server configurations*********************/
/**************************and constants*************************/
// Reference to the opened socket server
int serverSocket;
// retValue is used to check for errors during sending
int retValue;
// Datastructure used for server adress configuration
struct sockaddr_in serverAddress;

std::string rightHandMessage[11];
const int MSG_FORWARD = 0;
const int MSG_BACKWARD = 1;
const int MSG_LEFT = 2;
const int MSG_RIGHT = 3;
const int MSG_UP = 4;
const int MSG_DOWN = 5;
const int MSG_SWIPE_LEFT = 6;
const int MSG_SWIPE_RIGHT = 7;
const int MSG_CLICK = 8;
const int MSG_ACTIVE_MOUSE = 9;
const int MSG_ACTIVE_BLOCK = 10;
/****************************************************************/

/************************Global values for OpenCV*****************/
Mat bgrMat(frameSize, CV_8UC3);
Mat depthMat(frameSize, CV_16UC1);
Mat depthMat8(frameSize, CV_8UC1);
Mat depthMatBgr(frameSize, CV_8UC3);
const Mat emptyMat();
Mat mask(frameSize, CV_8UC1);
const float minHandExtension = 0.2f; // in meters
const double grabConvexity = 0.8;
/*****************************************************************/



std::map<XnUInt32, std::pair<XnCalibrationStatus, XnPoseDetectionStatus> > m_Errors;
void XN_CALLBACK_TYPE MyCalibrationInProgress(xn::SkeletonCapability& capability, XnUserID id, XnCalibrationStatus calibrationError, void* pCookie)
{
	m_Errors[id].first = calibrationError;
}
void XN_CALLBACK_TYPE MyPoseInProgress(xn::PoseDetectionCapability& capability, const XnChar* strPose, XnUserID id, XnPoseDetectionStatus poseError, void* pCookie)
{
	m_Errors[id].second = poseError;
}


/**************************************************************************************/
/****************************OpenCV fist recoognition**********************************/

float getJointImgCoordinates(const SkeletonCapability &skeletonCapability, const XnUserID userId, const XnSkeletonJoint skeletonJoint, float *v) {
	XnVector3D projective;
	XnSkeletonJointPosition skeletonJointPosition;
	skeletonCapability.GetSkeletonJointPosition(userId, skeletonJoint, skeletonJointPosition);

	g_DepthGenerator.ConvertRealWorldToProjective(1, &skeletonJointPosition.position, &projective);

	v[0] = projective.X;
	v[1] = projective.Y;
	v[2] = projective.Z / 1000.0f;

	return skeletonJointPosition.fConfidence;
}

/**
 * Draws a circle at the given projected position (rHand_proj) with the given radius.
 * Simple OpenGL drawing is used here.
 */
void DrawCircle(XnPoint3D rHand_proj, double radius) {
	if (DEBUG)
		printf("I drew something at %f, %f, %f with radius %i\n", rHand_proj.X,rHand_proj.Y,rHand_proj.Z,radius);

	glVertex2f(rHand_proj.X, rHand_proj.Y);
	for (int angle = 0; angle <= 360; angle+=6) {
		glVertex2f(rHand_proj.X + sin(angle) * radius, rHand_proj.Y + cos(angle) * radius);
	}
}

/**
 * Using OpenCV to get the contours of the trackked hand.
 * It is used to recognize fingers later on (see: detectFingerTips())
 *
 * The basic idea of this method is copied from an sample application in the web.
 * Since we do not react on fist recognition, it must not be taken into account.
 */
bool getHandContour(const Mat &depthMat, const float *v, vector<Point> &handContour) {
	const int maxHandRadius = 128; // in px
	const double epsilon = 17.5; // approximation accuracy (maximum distance between the original hand contour and its approximation)

	unsigned short depth = (unsigned short) (v[2] * 1000.0f); // hand depth
	unsigned short near = depth - 100; // near clipping plane
	unsigned short far = depth + 100; // far clipping plane

	mask.setTo(0);

	// extract hand region
	circle(mask, Point(v[0], v[1]), maxHandRadius, 255, CV_FILLED);
	mask = mask & depthMat > near & depthMat < far;

	// assume largest contour in hand region to be the hand contour
	vector<vector<Point> > contours;
	findContours(mask, contours, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE);
	int n = contours.size();
	int maxI = -1;
	int maxSize = -1;
	for (int i=0; i<n; i++) {
		int size  = contours[i].size();
		if (size > maxSize) {
			maxSize = size;
			maxI = i;
		}
	}

	bool handContourFound = (maxI >= 0);

	if (handContourFound) {
		approxPolyDP( Mat(contours[maxI]), handContour, epsilon, true );
	}

	return maxI >= 0;
}

void detectFingerTips(const vector<Point> &handContour, vector<Point> &fingerTips, Mat *debugFrame = NULL) {
	Mat handContourMat(handContour);
	double area = cv::contourArea(handContourMat);

	const Scalar debugFingerTipColor(255,0,0);

	vector<int> hull;
	cv::convexHull(handContourMat, hull);

	// find upper and lower bounds of the hand and define cutoff threshold (don't consider lower vertices as fingers)
	int upper = 640, lower = 0;
	for (int j=0; j<hull.size(); j++) {
		int idx = hull[j]; // corner index
		if (handContour[idx].y < upper) upper = handContour[idx].y;
		if (handContour[idx].y > lower) lower = handContour[idx].y;
	}
	float cutoff = (lower - (lower - upper) * 0.1f);

	// find interior angles of hull corners
	for (int j=0; j<hull.size(); j++) {
		int idx = hull[j]; // corner index
		int pdx = idx == 0 ? handContour.size() - 1 : idx - 1; //  predecessor of idx
		int sdx = idx == handContour.size() - 1 ? 0 : idx + 1; // successor of idx

		Point v1 = handContour[sdx] - handContour[idx];
		Point v2 = handContour[pdx] - handContour[idx];

		float angle = acos( (v1.x*v2.x + v1.y*v2.y) / (norm(v1) * norm(v2)) );

		// low interior angle + within upper 90% of region -> we got a finger
		if (angle < 1.5 && handContour[idx].y < cutoff * 0.65) {
			isFist = false;
		}
	}
}

//void drawContour(Mat &img, const vector<Point> &contour, const Scalar &color) {
//	vector<vector<Point> > contours;
//	contours.push_back(contour);
//
//	glBegin(GL_LINES);
//	for (unsigned i=0; i < contours[0].size(); i++) {
//		glVertex3f(contours[0][i].x, contours[0][i].y, 0); // origin of the FIRST line
//		if (i == contour.size()-1) {
//			glVertex3f(contours[0][0].x, contours[0][0].y, 0);
//		} else {
//			glVertex3f(contours[0][i+1].x, contours[0][i+1].y, 0);
//		}
//	}
//	glEnd( );
//}

double convexity(const vector<Point> &contour) {
	Mat contourMat(contour);

	vector<int> hull;
	convexHull(contourMat, hull);

	int n = hull.size();
	vector<Point> hullContour;

	for (int i=0; i<n; i++) {
		hullContour.push_back(contour[hull[i]]);
	}

	Mat hullContourMat(hullContour);

	return (contourArea(contourMat) / contourArea(hullContourMat));
}

void checkFist(ImageGenerator imgGen, XnUserID activeUser) {
	isFist = true;

	// iterate over all user
	float conf; // joint confidence
	float rh[3]; // right hand coordinates (x[px], y[px], z[meters])
	float lh[3]; // left hand coordinates
	float t[3]; // torso coordinate

	// acquire bgr image
	{
		Mat mat(frameSize, CV_8UC3, (unsigned char*) imgGen.GetImageMap());
		cvtColor(mat, bgrMat, CV_RGB2BGR);
	}

	// acquire depth image
	{
		Mat mat(frameSize, CV_16UC1, (unsigned char*) g_DepthGenerator.GetDepthMap());
		mat.copyTo(depthMat);
		depthMat.convertTo(depthMat8, CV_8UC1, 255.0f / 3000.0f);
		cvtColor(depthMat8, depthMatBgr, CV_GRAY2BGR);
	}

		SkeletonCapability skelCap = g_UserGenerator.GetSkeletonCap();
		// torso
		XnPoint3D p1;
		if ( getJointImgCoordinates(skelCap, activeUser, XN_SKEL_TORSO, t) == 1 ) {
			unsigned char shade = 255 - (unsigned char)(t[2] *  128.0f);

			p1.X = t[0];
			p1.Y = t[1];
			p1.Z = 0;
//			glBegin(GL_TRIANGLE_FAN);
//			glColor4f(1,0,0,1);
			//DrawCircle(p1, 5);
//			glEnd();

			// right hand
			if (
				(getJointImgCoordinates(skelCap, activeUser, XN_SKEL_RIGHT_HAND, rh) == 1) /* confident detection */ &&
				(rh[2] < t[2] - minHandExtension) /* user extends hand towards screen */ &&
				(rh[1] < t[1]) /* user raises his hand */
			) {
				unsigned char shade = 255 - (unsigned char)(rh[2] *  128.0f);
				Scalar color(0, 0, shade);

				vector<Point> handContour;
				getHandContour(depthMat, rh, handContour);
				bool grasp = convexity(handContour) > grabConvexity;

				XnPoint3D p2;
				p2.X = rh[0];
				p2.Y = rh[1];
				p2.Z = 0;

//				glBegin(GL_TRIANGLE_FAN);
//				glColor4f(1,0,0,1);
//					DrawCircle(p2, 5);
//				glEnd();

				vector<Point> fingerTips;
				detectFingerTips(handContour, fingerTips, &depthMatBgr);
			}

			// left hand
			if (
				(getJointImgCoordinates(skelCap, activeUser, XN_SKEL_LEFT_HAND, lh) == 1) &&
				(lh[2] < t[2] - minHandExtension) &&
				(lh[1] < t[1]) /* user raises his hand */
			) {
				unsigned char shade = 255 - (unsigned char)(lh[2] *  128.0f);
				Scalar color(0, shade, 0);

				vector<Point> handContour;
				getHandContour(depthMat, lh, handContour);
				bool grasp = convexity(handContour) > grabConvexity;
				int thickness = grasp ? CV_FILLED : 3;

				XnPoint3D p3;
				p3.X = t[0];
				p3.Y = t[1];
				p3.Z = 0;

//				glBegin(GL_TRIANGLE_FAN);
//				glColor4f(1,0,0,1);
//					DrawCircle(p3, 5);
//				glEnd();

				vector<Point> fingerTips;
				detectFingerTips(handContour, fingerTips, &depthMatBgr);
			}
		}
}
/**************************************************************************************/
/**************************************************************************************/



/*********************************Server Communication*********************************/
/**************************************************************************************/

/**
 * Called after an OpenGL rendering loop is completed.
 * This method just take the last message information and send it to the server.
 */
void UpdateServer() {
	std::stringstream kinectActionMsg;
	kinectActionMsg << "KINECT-ACTION";

	for (int i = 0; i < 11; i++) {
		kinectActionMsg << "#";
		kinectActionMsg << rightHandMessage[i];
	}

	const char* message = kinectActionMsg.str().c_str();
	retValue = sendto(serverSocket, message, strlen(message), 0, (struct sockaddr *)&serverAddress, sizeof(serverAddress));

	if (retValue == -1) {
		perror("Could not send message.");
	}
}

/**
 * Initiate server connection.
 * Since it is running on the same machine in our application
 * we define localhost as server.
 * That is also caused, because we do not have a configuration GUI for the kinect client.
 */
int initServer() {
	//Configuring server address
	serverAddress.sin_family = AF_INET;
	retValue = inet_aton("127.0.0.1", &serverAddress.sin_addr);
	if (retValue == 0) {
		perror("Cant translate ip to any host.");
		return 1;
	}
	serverAddress.sin_port = htons(9876);

	//Create socket and open it
	serverSocket = socket(PF_INET, SOCK_DGRAM, 0);
	if (serverSocket == -1) {
		perror("Cant create SocketConnection.");
		return 1;
	}

	printf("Kinect sensor is connected to server\n");
	return 0;
}

//Clean up server configuration
void closeServerConnection() {
	close(serverSocket);
	printf("Connection to server is closed\n");
}

/**************************************************************************************/
/**************************************************************************************/



/**************************************Kinect Stuff************************************/
/**************************************************************************************/

unsigned int getClosestPowerOfTwo(unsigned int n)
{
	unsigned int m = 2;
	while(m < n) m<<=1;

	return m;
}

GLuint initTexture(void** buf, int& width, int& height)
{
	GLuint texID = 0;
	glGenTextures(1,&texID);

	width = getClosestPowerOfTwo(width);
	height = getClosestPowerOfTwo(height); 
	*buf = new unsigned char[width*height*4];
	glBindTexture(GL_TEXTURE_2D,texID);

	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);

	return texID;
}

/**
 * Draws a rectancle according to the given paramaters using OpenGL
 */
void DrawRectangle(float topLeftX, float topLeftY, float bottomRightX, float bottomRightY)
{
	GLfloat verts[8] = {	topLeftX, topLeftY,
		topLeftX, bottomRightY,
		bottomRightX, bottomRightY,
		bottomRightX, topLeftY
	};
	glVertexPointer(2, GL_FLOAT, 0, verts);
	glDrawArrays(GL_TRIANGLE_FAN, 0, 4);

	//TODO: Maybe glFinish needed here instead - if there's some bad graphics crap
	glFlush();
}

void DrawTexture(float topLeftX, float topLeftY, float bottomRightX, float bottomRightY)
{
	glEnableClientState(GL_TEXTURE_COORD_ARRAY);
	glTexCoordPointer(2, GL_FLOAT, 0, texcoords);
	DrawRectangle(topLeftX, topLeftY, bottomRightX, bottomRightY);
	glDisableClientState(GL_TEXTURE_COORD_ARRAY);
}

XnFloat Colors[][3] =
{
	{0,1,1},
	{0,0,1},
	{0,1,0},
	{1,1,0},
	{1,0,0},
	{1,.5,0},
	{.5,1,0},
	{0,.5,1},
	{.5,0,1},
	{1,1,.5},
	{1,1,1}
};
XnUInt32 nColors = 10;
#ifndef USE_GLES
void glPrintString(void *font, char *str)
{
	int i,l = strlen(str);

	for(i=0; i<l; i++)
	{
		glutBitmapCharacter(font,*str++);
	}
}
#endif

/**
 * From the original PlayerSample.
 */
void DrawLimb(XnUserID player, XnSkeletonJoint eJoint1, XnSkeletonJoint eJoint2)
{
	if (!g_UserGenerator.GetSkeletonCap().IsTracking(player))
	{
		printf("not tracked!\n");
		return;
	}

	XnSkeletonJointPosition joint1, joint2;
	g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(player, eJoint1, joint1);
	g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(player, eJoint2, joint2);

	if (joint1.fConfidence < 0.5 || joint2.fConfidence < 0.5)
	{
		return;
	}

	XnPoint3D pt[2];
	pt[0] = joint1.position;
	pt[1] = joint2.position;

	g_DepthGenerator.ConvertRealWorldToProjective(2, pt, pt);
	glVertex3i(pt[0].X, pt[0].Y, 0);
	glVertex3i(pt[1].X, pt[1].Y, 0);
}

const XnChar* GetCalibrationErrorString(XnCalibrationStatus error)
{
	switch (error)
	{
	case XN_CALIBRATION_STATUS_OK:
		return "OK";
	case XN_CALIBRATION_STATUS_NO_USER:
		return "NoUser";
	case XN_CALIBRATION_STATUS_ARM:
		return "Arm";
	case XN_CALIBRATION_STATUS_LEG:
		return "Leg";
	case XN_CALIBRATION_STATUS_HEAD:
		return "Head";
	case XN_CALIBRATION_STATUS_TORSO:
		return "Torso";
	case XN_CALIBRATION_STATUS_TOP_FOV:
		return "Top FOV";
	case XN_CALIBRATION_STATUS_SIDE_FOV:
		return "Side FOV";
	case XN_CALIBRATION_STATUS_POSE:
		return "Pose";
	default:
		return "Unknown";
	}
}
const XnChar* GetPoseErrorString(XnPoseDetectionStatus error)
{
	switch (error)
	{
	case XN_POSE_DETECTION_STATUS_OK:
		return "OK";
	case XN_POSE_DETECTION_STATUS_NO_USER:
		return "NoUser";
	case XN_POSE_DETECTION_STATUS_TOP_FOV:
		return "Top FOV";
	case XN_POSE_DETECTION_STATUS_SIDE_FOV:
		return "Side FOV";
	case XN_POSE_DETECTION_STATUS_ERROR:
		return "";
	default:
		return "Unknown";
	}
}

/**
 * Draws a line, indicating the length of swipe interaction.
 * It is mostly used for debugging to "see" swipes (as a line).
 */
void DrawSwipe(XnPoint3D rHand_proj) {
	//Collect position information if less than 3 positions has been recognized by now
	if (countPositions <= 1) {
		lastPositionsRightHand[countPositions] = rHand_proj;
		countPositions++;
	} else {
		//Just get the difference distance between to captured hand positions
		double lengthOfSwipe = sqrt(pow(lastPositionsRightHand[1].X - lastPositionsRightHand[0].X, 2.0) + pow(lastPositionsRightHand[1].Y - lastPositionsRightHand[0].Y, 2.0));
		//Change history of last captured positions
		lastPositionsRightHand[1] = lastPositionsRightHand[0];
		lastPositionsRightHand[0] = rHand_proj;

		//Actually draw the line
		glBegin(GL_LINES);
		glColor4f(1, 1, 1, 1);
		glVertex3i(lastPositionsRightHand[1].X, lastPositionsRightHand[1].Y, 0);
		glVertex3i(lastPositionsRightHand[0].X, lastPositionsRightHand[0].Y, 0);
		glEnd();

		//Configuring the message that is send to the server.
		//If swipes are recognized, the according flag is set to true
		if (lengthOfSwipe <= swipe_offset) {
			rightHandMessage[MSG_SWIPE_LEFT] = "false";
			rightHandMessage[MSG_SWIPE_RIGHT] = "false";
		} else if (lastPositionsRightHand[1].X > lastPositionsRightHand[0].X) {
			swipeCounterR = 0;
			swipeCounterL++;
			if (swipeCounterL >= swipeActivatorFactor) {
				sprintf(strSWIPE, "LAST SWIPE: LEFT X %i", swipeCounterL/swipeActivatorFactor);
				rightHandMessage[MSG_SWIPE_LEFT] = "true";
				rightHandMessage[MSG_SWIPE_RIGHT] = "false";
			}
		} else {
			swipeCounterL = 0;
			swipeCounterR++;
			if (swipeCounterR >= swipeActivatorFactor) {
				sprintf(strSWIPE, "LAST SWIPE: RIGHT X %i", swipeCounterR/swipeActivatorFactor);
				rightHandMessage[MSG_SWIPE_LEFT] = "false";
				rightHandMessage[MSG_SWIPE_RIGHT] = "true";
			}
		}
	}
}

/**
 * In this method, we calculate new information for the kinect interaction gestures.
 * Visual feedback is drawn as well for debugging
 */
void DrawLegoThings(XnUserID activeUser, XnSkeletonJointPosition torso_pos,
		XnSkeletonJointPosition r_hand_pos, XnSkeletonJointPosition head_pos, XnSkeletonJointPosition l_hand_pos, ImageGenerator imgGen) {

	//Define projected information for drawing
	XnPoint3D rHand_proj;
	XnPoint3D head_proj;
	XnPoint3D lHand_proj;

	g_DepthGenerator.ConvertRealWorldToProjective(1, &r_hand_pos.position, &rHand_proj);
	g_DepthGenerator.ConvertRealWorldToProjective(1, &head_pos.position, &head_proj);
	g_DepthGenerator.ConvertRealWorldToProjective(1, &l_hand_pos.position, &lHand_proj);


	//Active if right hand is at least distance_offset nearer to the sensor than the head
	//and if IS_ACTIVE_SINCE_HIPS is true, then also the hand must be higher then the hips
	//if IS_ACTIVE_SINCE_HIPS is false, then hand get tracked as soon as distance_offset is reached
	if (isActiveMouseTracking || isActiveLHandTracking) {
		//REFERENCE value for interaction
		//It is the defines center for relative acceleration
		//(projected first right hand position after brickmode became active)
		XnSkeletonJointPosition ref_pos;

		//Visually indicating that left hand has activated the BrickMode
	    if (isActiveLHandTracking) {
	    	glColor4f(0, 1, 0, 1);
	    	glRectf(lHand_proj.X - 5, lHand_proj.Y + 5, lHand_proj.X + 5, lHand_proj.Y - 5);
	    }

		if (g_useRelativeTrackingPosition) {
			ref_pos = lastInitialRightHandPosition;
		} else {
			g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(
					activeUser, XN_SKEL_RIGHT_SHOULDER, ref_pos);
		}

		//Set color of interaction indicator
		//grey if only the mouse is moved
		//yellow if click is done
		//red is interaction is enabled
		glBegin(GL_TRIANGLE_FAN);
		if (isActiveLHandTracking == true) {
			glColor4f(0.8, 0.2, 0.2, 1);
			rightHandMessage[MSG_CLICK] = "false";
		} else if (isActiveMouseTracking == true && ((head_pos.position.Z/1000.0 - distance_offset) - r_hand_pos.position.Z/1000.0 <= distance_interaction)) {
			glColor4f(0.5, 0.5, 0.5, 1);
			rightHandMessage[MSG_CLICK] = "false";
	    } else if (isActiveMouseTracking == true) {
			glColor4f(0.8, 0.8, 0.2, 1);
			rightHandMessage[MSG_CLICK] = "true";
	    }

		//Draw interaction indicator
		//medium grey circle for no interaction
		//relative circle size for interaction
		//yellow circle for click

		//default is grey and medium circle
		double radius = 15.0;
		double zValue = 0.0;
		if (isActiveLHandTracking == true) {
			zValue = (lastInitialRightHandPosition.position.Z/1000.0) - r_hand_pos.position.Z/1000.0;
			radius = min(27.0, 27.0 * fabs(zValue)/distance_range);
		}
			DrawCircle(rHand_proj, radius);
		glEnd();

		if (isActiveLHandTracking == true) {
			glColor4f(1, 1, 0, 1);
		} else {
			glColor4f(1, 1, 1, 1);
		}

	//////////////////////////////
	//FORWARD AND BACKWARD VALUES
	//////////////////////////////

		std::stringstream forward;
		std::stringstream backward;
		//Calculate a relative distance from the centered join position to the current hand joint position
		int tmp = min(100, fabs(zValue)/distance_interaction * 100.0);

		XnChar directionFB[200];
		if (zValue < 0) {
			forward << "0";
			backward << tmp;
			sprintf(directionFB, "Ba(%i)", tmp);
			glRasterPos2i(rHand_proj.X, rHand_proj.Y);
			glPrintString(GLUT_BITMAP_TIMES_ROMAN_24, directionFB);
		} else if (zValue > 0) {
			forward << tmp;
			backward << "0";
			sprintf(directionFB, "Fo(%i)", tmp);
			glRasterPos2i(rHand_proj.X, rHand_proj.Y);
			glPrintString(GLUT_BITMAP_TIMES_ROMAN_24, directionFB);
		} else {
			forward << "0";
			backward << "0";
		}
		rightHandMessage[MSG_FORWARD] = forward.str(); //forward
		rightHandMessage[MSG_BACKWARD] = backward.str(); //backward


	//////////////////////////////
	//LEFT VALUE
	//////////////////////////////

		XnChar directionL[200];
		int valueL = 0;
		if (r_hand_pos.position.X/1000 < ref_pos.position.X/1000 - ((g_useRelativeTrackingPosition) ? 0 : direction_offset)) {
			//Calculate a relative distance from the centered join position to the current hand joint position
			//Kepp in mind, that we must use relative acceleration calculation, if brickmode is enabled and take just the current dstance if information is used for mousemode
			valueL =  min(100, ((ref_pos.position.X/1000.0 - ((isActiveLHandTracking) ? 0 : direction_offset)) - r_hand_pos.position.X/1000.0) / direction_range * 100.0);
			sprintf(directionL, "LEFT(%i)", valueL);
			glRasterPos2i(rHand_proj.X - 35, rHand_proj.Y);
			glPrintString(GLUT_BITMAP_TIMES_ROMAN_24, directionL);
		}
		std::stringstream left;
		left << valueL;
		rightHandMessage[MSG_LEFT] = left.str();


	//////////////////////////////
	//RIGHT VALUE
	//////////////////////////////

		XnChar directionR[200];
		int valueR = 0;
		if (r_hand_pos.position.X/1000 > ref_pos.position.X/1000 + ((g_useRelativeTrackingPosition) ? 0 : direction_offset)) {
			//Calculate a relative distance from the centered join position to the current hand joint position
			//Kepp in mind, that we must use relative acceleration calculation, if brickmode is enabled and take just the current dstance if information is used for mousemode
			valueR =  min(100, (r_hand_pos.position.X/1000.0 - (ref_pos.position.X/1000.0 + ((isActiveLHandTracking) ? 0 : direction_offset))) / direction_range * 100.0);
			sprintf(directionR, "RIGHT(%i)", valueR);
			glRasterPos2i(rHand_proj.X + 25, rHand_proj.Y);
			glPrintString(GLUT_BITMAP_TIMES_ROMAN_24, directionR);
		}
		std::stringstream right;
		right << valueR;
		rightHandMessage[MSG_RIGHT] = right.str();


	//////////////////////////////
	//UP VALUE
	//////////////////////////////

		XnChar directionU[200];
		int valueU = 0;
		if (r_hand_pos.position.Y/1000 > ref_pos.position.Y/1000 + ((g_useRelativeTrackingPosition) ? 0 : direction_offset)) {
			//Calculate a relative distance from the centered join position to the current hand joint position
			//Kepp in mind, that we must use relative acceleration calculation, if brickmode is enabled and take just the current dstance if information is used for mousemode
			valueU = min(100, (r_hand_pos.position.Y/1000 - (ref_pos.position.Y/1000 + ((isActiveLHandTracking) ? 0 : direction_offset))) / direction_range * 100.0);
			sprintf(directionU, "UP(%i)", valueU);
			glRasterPos2i(rHand_proj.X, rHand_proj.Y - 25);
			glPrintString(GLUT_BITMAP_TIMES_ROMAN_24, directionU);
		}
		std::stringstream up;
		up << valueU;
		rightHandMessage[MSG_UP] = up.str();

	//////////////////////////////
	//DOWN VALUE
	//////////////////////////////

		XnChar directionD[200];
		int valueD = 0;
		if (r_hand_pos.position.Y/1000 < ref_pos.position.Y/1000 - ((g_useRelativeTrackingPosition) ? 0 : direction_offset)) {
			//Calculate a relative distance from the centered join position to the current hand joint position
			//Kepp in mind, that we must use relative acceleration calculation, if brickmode is enabled and take just the current dstance if information is used for mousemode
			valueD =  min(100, ((ref_pos.position.Y/1000.0 - ((isActiveLHandTracking) ? 0 : direction_offset)) - r_hand_pos.position.Y/1000.0) / direction_range * 100.0);
			sprintf(directionD, "DOWN(%i)", valueD);
			glRasterPos2i(rHand_proj.X, rHand_proj.Y + 25);
			glPrintString(GLUT_BITMAP_TIMES_ROMAN_24, directionD);
		}
		std::stringstream down;
		down << valueD;
		rightHandMessage[MSG_DOWN] = down.str();


	//////////////////////////////
	//SWIPES
	//////////////////////////////

		DrawSwipe(rHand_proj);

	//////////////////////////////
	//CHECK FINGERS / FIST
	//////////////////////////////
		//Experimentally. Not working very well for large distances
		checkFist(imgGen, activeUser);
	} else {
		//DRAW NOTHING
	}

	//////////////////////////////
	//SERVER UPDATE
	//////////////////////////////

	if (g_kinectToServer == true) {
		UpdateServer();
	}


	////DEBUG outputs on the screen////
	if (DEBUG) {
		XnChar strOrigHand[200];
		XnChar strOrigHead[200];
		XnChar strMode[200];

		sprintf(strOrigHand, "HandZ = %f // HandY = %f // HandX = %f", r_hand_pos.position.Z/1000.0, r_hand_pos.position.Y/1000.0, r_hand_pos.position.X/1000.0);
		sprintf(strOrigHead, "HeadZ = %f // HeadY = %f // HeadX = %f", head_pos.position.Z/1000.0, head_pos.position.Y/1000.0, head_pos.position.X/1000.0);
		sprintf(strMode, "MouseMode = %s // BrickMode = %s", (isActiveMouseTracking)?"true":"false", (isActiveLHandTracking)?"true":"false");

		glColor4f(1, 0.9, 0.9, 1);
		glRasterPos2i(20, 40);
		glPrintString(GLUT_BITMAP_HELVETICA_18, strOrigHand);
		glRasterPos2i(20, 60);
		glPrintString(GLUT_BITMAP_HELVETICA_18, strOrigHead);
		glRasterPos2i(20, 80);
		glPrintString(GLUT_BITMAP_HELVETICA_18, strMode);
	}

	//Always draw information about swipes and fist recognition
	glColor4f(1, 0.9, 0.9, 1);
	glRasterPos2i(20, 20);
	glPrintString(GLUT_BITMAP_HELVETICA_18, strSWIPE);

	XnChar strFist[200];
	sprintf(strFist, "Fist = %s", (isFist) ? "TRUE" : "FALSE");
	glRasterPos2i(20, 40);
	glPrintString(GLUT_BITMAP_HELVETICA_18, strFist);
}

/**
 * This is the method, that is called in each OpenGL rendering loop.
 * It starts, drawing the kinect default depthmap information with user related additional stuff.
 * Then, we call DrawLegoStuff for our debug information being drawn and send new information to the server.
 */
void DrawDepthMap(const xn::DepthMetaData& dmd, const xn::SceneMetaData& smd, ImageGenerator imgGen, Context context)
{
	static bool bInitialized = false;	
	static GLuint depthTexID;
	static unsigned char* pDepthTexBuf;
	static int texWidth, texHeight;

	float topLeftX;
	float topLeftY;
	float bottomRightY;
	float bottomRightX;
	float texXpos;
	float texYpos;

	if(!bInitialized)
	{
		texWidth =  getClosestPowerOfTwo(dmd.XRes());
		texHeight = getClosestPowerOfTwo(dmd.YRes());

		depthTexID = initTexture((void**)&pDepthTexBuf,texWidth, texHeight) ;

		bInitialized = true;

		topLeftX = dmd.XRes();
		topLeftY = 0;
		bottomRightY = dmd.YRes();
		bottomRightX = 0;
		texXpos =(float)dmd.XRes()/texWidth;
		texYpos  =(float)dmd.YRes()/texHeight;

		memset(texcoords, 0, 8*sizeof(float));
		texcoords[0] = texXpos, texcoords[1] = texYpos, texcoords[2] = texXpos, texcoords[7] = texYpos;
	}

	unsigned int nValue = 0;
	unsigned int nHistValue = 0;
	unsigned int nIndex = 0;
	unsigned int nX = 0;
	unsigned int nY = 0;
	unsigned int nNumberOfPoints = 0;
	XnUInt16 g_nXRes = dmd.XRes();
	XnUInt16 g_nYRes = dmd.YRes();

	unsigned char* pDestImage = pDepthTexBuf;

	const XnDepthPixel* pDepth = dmd.Data();
	const XnLabel* pLabels = smd.Data();

	// Calculate the accumulative histogram
	memset(g_pDepthHist, 0, MAX_DEPTH*sizeof(float));
	for (nY=0; nY<g_nYRes; nY++)
	{
		for (nX=0; nX<g_nXRes; nX++)
		{
			nValue = *pDepth;

			if (nValue != 0)
			{
				g_pDepthHist[nValue]++;
				nNumberOfPoints++;
			}

			pDepth++;
		}
	}

	for (nIndex=1; nIndex<MAX_DEPTH; nIndex++)
	{
		g_pDepthHist[nIndex] += g_pDepthHist[nIndex-1];
	}
	if (nNumberOfPoints)
	{
		for (nIndex=1; nIndex<MAX_DEPTH; nIndex++)
		{
			g_pDepthHist[nIndex] = (unsigned int)(256 * (1.0f - (g_pDepthHist[nIndex] / nNumberOfPoints)));
		}
	}

	pDepth = dmd.Data();
	if (g_bDrawPixels)
	{
		XnUInt32 nIndex = 0;
		// Prepare the texture map
		for (nY=0; nY<g_nYRes; nY++)
		{
			for (nX=0; nX < g_nXRes; nX++, nIndex++)
			{

				pDestImage[0] = 0;
				pDestImage[1] = 0;
				pDestImage[2] = 0;
				if (g_bDrawBackground || *pLabels != 0)
				{
					nValue = *pDepth;
					XnLabel label = *pLabels;
					XnUInt32 nColorID = label % nColors;
					if (label == 0)
					{
						nColorID = nColors;
					}

					if (nValue != 0)
					{
						nHistValue = g_pDepthHist[nValue];

						pDestImage[0] = nHistValue * Colors[nColorID][0]; 
						pDestImage[1] = nHistValue * Colors[nColorID][1];
						pDestImage[2] = nHistValue * Colors[nColorID][2];
					}
				}

				pDepth++;
				pLabels++;
				pDestImage+=3;
			}

			pDestImage += (texWidth - g_nXRes) *3;
		}
	}
	else
	{
		xnOSMemSet(pDepthTexBuf, 0, 3*2*g_nXRes*g_nYRes);
	}

	glBindTexture(GL_TEXTURE_2D, depthTexID);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, texWidth, texHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, pDepthTexBuf);

	// Display the OpenGL texture map
	glColor4f(0.75,0.75,0.75,1);

	glEnable(GL_TEXTURE_2D);
	DrawTexture(dmd.XRes(),dmd.YRes(),0,0);	
	glDisable(GL_TEXTURE_2D);

	char strLabel[50] = "";
	XnUserID aUsers[15];
	XnUInt16 nUsers = 15;
	g_UserGenerator.GetUsers(aUsers, nUsers);

	//If no user is tracked, set all information of server message to it's default
	if (nUsers == 0) {
		rightHandMessage[MSG_FORWARD] = "0";
		rightHandMessage[MSG_BACKWARD] = "0";
		rightHandMessage[MSG_LEFT] = "0";
		rightHandMessage[MSG_RIGHT] = "0";
		rightHandMessage[MSG_UP] = "0";
		rightHandMessage[MSG_DOWN] = "0";
		rightHandMessage[MSG_SWIPE_LEFT] = "false";
		rightHandMessage[MSG_SWIPE_RIGHT] = "false";
		rightHandMessage[MSG_CLICK] = "false";
		rightHandMessage[MSG_ACTIVE_MOUSE] = "false";
		rightHandMessage[MSG_ACTIVE_BLOCK] = "false";
	}


	//Go through all tracked users and calculate changes in hand positions and gesture detection
	for (int i = 0; i < nUsers; ++i)
	{
		if (g_bPrintID)
		{
			XnPoint3D com;
			g_UserGenerator.GetCoM(aUsers[i], com);
			g_DepthGenerator.ConvertRealWorldToProjective(1, &com, &com);

			xnOSMemSet(strLabel, 0, sizeof(strLabel));
			if (!g_bPrintState)
			{
				// Tracking
				sprintf(strLabel, "%d", aUsers[i]);
			}
			else if (g_UserGenerator.GetSkeletonCap().IsTracking(aUsers[i]))
			{
				// Tracking
				sprintf(strLabel, "%d - Active", aUsers[i]);
			}
			else if (g_UserGenerator.GetSkeletonCap().IsCalibrating(aUsers[i]))
			{
				// Calibrating
				sprintf(strLabel, "%d - Calibrating [%s]", aUsers[i], GetCalibrationErrorString(m_Errors[aUsers[i]].first));
			}
			else
			{
				// Nothing
				sprintf(strLabel, "%d - Looking for pose [%s]", aUsers[i], GetPoseErrorString(m_Errors[aUsers[i]].second));
			}


			glColor4f(1-Colors[i%nColors][0], 1-Colors[i%nColors][1], 1-Colors[i%nColors][2], 1);

			glRasterPos2i(com.X, com.Y);
			glPrintString(GLUT_BITMAP_HELVETICA_18, strLabel);
		}

		if (g_bDrawSkeleton && g_UserGenerator.GetSkeletonCap().IsTracking(aUsers[i]))
		{
			glBegin(GL_LINES);
			glColor4f(1-Colors[aUsers[i]%nColors][0], 1-Colors[aUsers[i]%nColors][1], 1-Colors[aUsers[i]%nColors][2], 1);
			DrawLimb(aUsers[i], XN_SKEL_HEAD, XN_SKEL_NECK);

			DrawLimb(aUsers[i], XN_SKEL_NECK, XN_SKEL_LEFT_SHOULDER);
			DrawLimb(aUsers[i], XN_SKEL_LEFT_SHOULDER, XN_SKEL_LEFT_ELBOW);
			DrawLimb(aUsers[i], XN_SKEL_LEFT_ELBOW, XN_SKEL_LEFT_HAND);

			DrawLimb(aUsers[i], XN_SKEL_NECK, XN_SKEL_RIGHT_SHOULDER);
			DrawLimb(aUsers[i], XN_SKEL_RIGHT_SHOULDER, XN_SKEL_RIGHT_ELBOW);
			DrawLimb(aUsers[i], XN_SKEL_RIGHT_ELBOW, XN_SKEL_RIGHT_HAND);

			DrawLimb(aUsers[i], XN_SKEL_LEFT_SHOULDER, XN_SKEL_TORSO);
			DrawLimb(aUsers[i], XN_SKEL_RIGHT_SHOULDER, XN_SKEL_TORSO);

			DrawLimb(aUsers[i], XN_SKEL_TORSO, XN_SKEL_LEFT_HIP);
			DrawLimb(aUsers[i], XN_SKEL_LEFT_HIP, XN_SKEL_LEFT_KNEE);
			DrawLimb(aUsers[i], XN_SKEL_LEFT_KNEE, XN_SKEL_LEFT_FOOT);

			DrawLimb(aUsers[i], XN_SKEL_TORSO, XN_SKEL_RIGHT_HIP);
			DrawLimb(aUsers[i], XN_SKEL_RIGHT_HIP, XN_SKEL_RIGHT_KNEE);
			DrawLimb(aUsers[i], XN_SKEL_RIGHT_KNEE, XN_SKEL_RIGHT_FOOT);

			DrawLimb(aUsers[i], XN_SKEL_LEFT_HIP, XN_SKEL_RIGHT_HIP);
			glEnd();

			//Get all the needed Join information for our Lego3D gesture detections
			XnSkeletonJointPosition tosro_pos;
			g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(
					aUsers[i], XN_SKEL_TORSO, tosro_pos);

			XnSkeletonJointPosition l_hand_pos;
			g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(
					aUsers[i], XN_SKEL_LEFT_HAND, l_hand_pos);

			XnSkeletonJointPosition r_hand_pos;
			g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(
					aUsers[i], XN_SKEL_RIGHT_HAND, r_hand_pos);

			XnSkeletonJointPosition head_pos;
			g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(
					aUsers[i], XN_SKEL_HEAD, head_pos);

			// Check modes
			// If left hand is nearer to the sensor than the distance_offset, brickMode is active
			// If right hand is nearer to the sensor than the distance_offset, mouseMode is active
			// Later, brickmode has a higher priory than mousemode
			bool checkLHand = isActiveLHandTracking;
			bool checkMouse = isActiveMouseTracking;
			isActiveLHandTracking = l_hand_pos.position.Z/1000.0 <= tosro_pos.position.Z/1000.0 - (distance_offset);
			isActiveMouseTracking = (head_pos.position.Z/1000.0 - distance_offset) - r_hand_pos.position.Z/1000.0 >= 0;

			//If brickmode was not active but is now, than reset the center of the
			//interaction area to the current hanposition and reset countPositions for swipes
			if ((checkLHand == false && isActiveLHandTracking == true)
					|| (checkMouse == false && isActiveMouseTracking == true && isActiveLHandTracking == false)
					|| (checkLHand == true && checkMouse == true && isActiveLHandTracking == false && isActiveMouseTracking == true)) {
				g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(
						aUsers[i], XN_SKEL_RIGHT_HAND, lastInitialRightHandPosition);
				countPositions = 0;
			}

			//Set first server message information
			rightHandMessage[MSG_ACTIVE_MOUSE] = (isActiveMouseTracking) ? "true" : "false";
			rightHandMessage[MSG_ACTIVE_BLOCK] = (isActiveLHandTracking) ? "true" : "false";

			//Go on and calculate and draw gestures for our lego3D application.
			DrawLegoThings(aUsers[i], tosro_pos, r_hand_pos, head_pos, l_hand_pos, imgGen);
		}
	}
}

/**************************************************************************************/
/**************************************************************************************/
