package es.uji.viselab.image.source.kinect;

import java.text.DecimalFormat;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import javax.vecmath.Vector3d;

import peasy.PeasyCam;
import processing.core.PApplet;
import processing.core.PImage;
import processing.core.PVector;
import saito.objloader.OBJModel;
import SimpleOpenNI.IntVector;
import SimpleOpenNI.SimpleOpenNI;
import es.uji.viselab.LogLevel;
import es.uji.viselab.LogRecorder;
import es.uji.viselab.visualservoing.PBVSEyeInHandRelative;

@SuppressWarnings({ "serial" })
/**
 * Draws in a PApplet the images obtained from a Kinnect, this class uses the SimpleOpenNi and Processing libraries. 
 * It can show several kind of images, see KinectFunctions to see what is available.
 * 
 * All the functions are aimed to paint (depth, 3D, skeleton...) but one. The funciton VISUALSERVOING calls a VisualServoing instance to provide some data 
 * 
 * @author Roger Esteller-Curto
 * @see KinectFunctions
 * @see VisualServoing
 *
 */
public class KinectProcessing extends PApplet {

	protected int regionWidth = 640, regionHeight = 480;
	protected PBVSEyeInHandRelative vs;
	protected SimpleOpenNI context;
	protected PImage depthImage, openGLImage;
	protected Set<KinectFunctions> activeFunctions = new HashSet<KinectFunctions>(16);
	protected PeasyCam cam;
	protected OBJModel model;
	float zoomF = 0.3f;
	float rotX = radians(180);
	// by default rotate the hole scene 180 deg around
	// the x-axis, the data from openni comes upside down
	float rotY = radians(0);
	protected float rotation = 0;
	protected Integer currentUser;
	protected PVector previousRightHandXYZ = null;

	/**
	 * Initializacion of the PApplet. You can test this PApplet running it
	 * alone, but first you should uncomment the the function you want to test.
	 */
	@Override
	public void setup() {

		size(regionWidth, regionHeight, P3D);
		this.context = new SimpleOpenNI(this);

		context.setMirror(false);

		this.context.enableDepth();
		this.context.enableRGB();

		stroke(255, 255, 255);
		smooth();

		this.context.enableUser(SimpleOpenNI.SKEL_PROFILE_ALL);

		// you can test this Applet alone, by uncomenting the function you want
		// to test and running it:

		// activate(KinectFunctions.DEPTH);
		// activate(KinectFunctions.VISUALSERVOING);
		// activate(KinectFunctions.OPENGL);
		// activate(KinectFunctions.SKELETON);

	}

	/**
	 * Specific
	 * 
	 * @param function
	 *            of type KinectFunctions, depending of the function, it
	 *            initializes its functionality
	 * 
	 * @see KinectFunctions
	 */
	public void initializeFunction(KinectFunctions function) {

		switch (function) {
		case DEPTH:
			depthImage = this.createImage(regionWidth, regionHeight, RGB);
			break;
		case OPENGL:
			perspective(95, regionWidth / regionHeight, 10, 150000);
			break;
		case VISUALSERVOING:
			vs = PBVSEyeInHandRelative.getInstance();
			break;
		case SKELETON:
			depthImage = this.createImage(regionWidth, regionHeight, RGB);
			break;
		}
	}

	/**
	 * Mandatory for PApplet, this is handled by the PApplet thread called to
	 * draw the image
	 * 
	 * This calls the process method, which exactly executes the imaging
	 * procedure depending of the KinectFuncitons activated
	 * 
	 * @see #process()
	 */
	@Override
	public void draw() {

		if (!activeFunctions.isEmpty()) {
			context.update();
			process();
		}
	}

	/**
	 * Depending of the imaging function activated, this paints the image or
	 * activates the Visual Servoing procedure
	 */
	private void process() {

		double closestX = 0, closestY = 0, closestZ = 0;
		int xPosition = 0;
		int yPosition = 0;
		long lDateTime = new Date().getTime();

		if (activeFunctions.contains(KinectFunctions.DEPTH)) {
			// draw the depth image on the screen
			depthImage = context.depthImage();
			image(depthImage, xPosition, yPosition, xPosition + regionWidth, yPosition
					+ regionHeight);
			// The next image will be located in other (x,y) of the panel
			xPosition = xPosition + regionWidth;

		}

		if (activeFunctions.contains(KinectFunctions.VISUALSERVOING)
				&& activeFunctions.contains(KinectFunctions.DEPTH)) {
			int closestValue = 9999;

			// get the depth array from the kinect
			int[] depthValues = context.depthMap();
			// for each row in the depth image

			for (int y = 0; y < regionHeight; y++) {
				// for (int y = 200; y < 240; y++) {
				// look at each pixel in the row
				for (int x = 0; x < regionWidth; x++) {
					// for (int x = 220; x < 280; x++) {
					// pull out the corresponding value from the depth array
					int i = x + y * regionWidth;
					int currentDepthValue = depthValues[i];
					// if that pixel is the closest one we've seen so far
					if (currentDepthValue > 0 && currentDepthValue < closestValue) {
						// save its value
						closestValue = currentDepthValue;
						// and save its position (both X and Y coordinates)
						closestX = x;
						closestY = y;
						closestZ = currentDepthValue;
					}
				}
			}

			// draw a red circle over it,
			// positioned at the X and Y coordinates
			// we saved of the closest pixel.
			this.fill(255, 0, 0);
			this.ellipse((int) closestX, (int) closestY, 20, 20);
			LogRecorder.log(LogLevel.STATS, "KinectProcessing:process() DEPTH+VISUALSERVOING   "
					+ (int) closestX + "   " + (int) closestY + "   " + (int) closestZ);
			// The point will be provided as a displacement, where the center is
			// 0,0,0 and the desplacement varies from -1 to +1
			// The Visual Servoing class will be the one to transform this
			// displacement to move the robot
			Vector3d displacementXYZ = new Vector3d((closestX - regionWidth / 2)
					/ (regionWidth / 2), -1 * (closestY - regionHeight / 2) / (regionHeight / 2),
					(1200 - closestZ) / (400));

			printPointCoordinates(displacementXYZ);

			// Sending the displacement to the VisualServoing instance
			vs.featurePoints(displacementXYZ.x, displacementXYZ.y, displacementXYZ.z);

		}

		if (activeFunctions.contains(KinectFunctions.OPENGL)) {
			// update the cam
			context.update();

			background(0, 0, 0);

			translate(width / 2, height / 2, 0);
			rotateX(rotX);
			rotateY(rotY);
			scale(zoomF);

			int[] depthMap = context.depthMap();
			int steps = 1; // to speed up the drawing, draw every third point
			int index;
			PVector realWorldPoint;

			translate(0, 0, -1000); // set the rotation center of the scene 1000
									// in front of the camera

			stroke(255);

			PImage rgbImage = context.rgbImage();

			for (int y = 0; y < context.depthHeight(); y += steps) {
				for (int x = 0; x < context.depthWidth(); x += steps) {
					index = x + y * context.depthWidth();
					if (depthMap[index] > 0) {
						// draw the projected point
						realWorldPoint = context.depthMapRealWorld()[index];
						stroke(rgbImage.pixels[index]);
						// make realworld z negative, in the 3d drawing
						// coordsystem +z points in the direction of the eye
						point(realWorldPoint.x, realWorldPoint.y, realWorldPoint.z);
					}
				}
			}
			// draw the kinect cam
			context.drawCamFrustum();

		}

		if (activeFunctions.contains(KinectFunctions.SKELETON)) {
			depthImage = context.depthImage();
			image(depthImage, xPosition, yPosition, xPosition + regionWidth, yPosition
					+ regionHeight);
			// The next image will be located in other (x,y) of the panel
			xPosition = xPosition + 640;

			// image(kinect.depthImage(), 0, 0);
			IntVector userList = new IntVector();
			context.getUsers(userList);
			// if (userList.size()>1 && currentUser==1)
			// System.out.println("Stop!!!");
			if (userList.size() > 0) {

				String output;
				this.textSize(20);
				if (userList.size() == 1)
					output = "Found " + userList.size() + " skeleton.";
				else
					output = "Found " + userList.size()
							+ " skeletons, press a number (1, 2...) to change data gather";
				this.text(output, 20, regionHeight - 40);

				if (currentUser == null)
					currentUser = 0;
				if (currentUser >= userList.size()) // CurrentUser = 0, 1, 2...
													// size()= 1, 2, 3
					currentUser = 0;
				output = "Current user: " + currentUser + 1;

				int userId = userList.get(currentUser);
				if (context.isTrackingSkeleton(userId)) {
					drawSkeleton(userId);
				}
			}
		}

		if (activeFunctions.contains(KinectFunctions.SKELETON)
				&& activeFunctions.contains(KinectFunctions.VISUALSERVOING)) {

			// This only prints a vertical and horizontal line and
			// a circle on the image center

			double error = 0;

			this.line(regionWidth / 2, 0, regionWidth / 2, regionHeight);
			this.line(0, regionHeight / 2, regionWidth, regionHeight / 2);
			ellipse(regionWidth / 2, regionHeight / 2, 10, 10);

			IntVector userList = new IntVector();
			context.getUsers(userList);

			if (userList.size() > 0) {
				int userId = userList.get(0);
				if (context.isTrackingSkeleton(userId)) {
					PVector rightHandXYZ = new PVector();
					rightHandXYZ = getJointPosition(userId, SimpleOpenNI.SKEL_RIGHT_HAND);

					if (rightHandXYZ != null && previousRightHandXYZ != null) {
						error = Math.sqrt(Math.pow(rightHandXYZ.x - previousRightHandXYZ.x, 2)
								+ Math.pow(rightHandXYZ.y - previousRightHandXYZ.y, 2)
								+ Math.pow(rightHandXYZ.z - previousRightHandXYZ.z, 2));
					}
					if (rightHandXYZ != null) {
						previousRightHandXYZ = new PVector();
						previousRightHandXYZ.x = rightHandXYZ.x;
						previousRightHandXYZ.y = rightHandXYZ.y;
						previousRightHandXYZ.z = rightHandXYZ.z;
					}

					if (rightHandXYZ != null && error > 10) {
						//System.out.println("Moving !!!!, error: " + error);
						this.textSize(20);
						this.text("Point: ", 20, 20);
						LogRecorder.log(LogLevel.STATS,
								"KinectProcessing:process() SKELETON+VISUALSERVOING   "
										+ (int) rightHandXYZ.x + "   " + (int) rightHandXYZ.y
										+ "   " + (int) rightHandXYZ.z);
						Vector3d displacementXYZ = new Vector3d((rightHandXYZ.x) / (800 / 2),
								(rightHandXYZ.y) / (600 / 2), (1200 - rightHandXYZ.z) / (400));

						// This is only to print the position
						printPointCoordinates(displacementXYZ);

						/*
						 * TODO Some way to make a visual calibration of depth
						 * should exist this.line(regionWidth / 3, regionHeight
						 * / 2 + 200 * pointXYZ.z, regionWidth / 3 * 2,
						 * regionHeight / 2 + 200 * pointXYZ.z);
						 */
						vs.featurePoints(displacementXYZ.x, displacementXYZ.y, displacementXYZ.z);
					}// else
						//System.out.println("NOT Moving !!!!, error: " + error);
				}
			}

		}
	}

	private void printPointCoordinates(Vector3d displacementXYZ) {
		this.textSize(20);
		DecimalFormat myFormatter = new DecimalFormat("#.###");
		String output = myFormatter.format(displacementXYZ.x);
		this.text(output, 40, 40);
		output = myFormatter.format(displacementXYZ.y);
		this.text(output, 40, 60);
		output = myFormatter.format(displacementXYZ.z);
		this.text(output, 40, 80);
	}

	private void drawSkeleton(int userId) {

		stroke(0);
		strokeWeight(5);
		context.drawLimb(userId, SimpleOpenNI.SKEL_HEAD, SimpleOpenNI.SKEL_NECK);
		context.drawLimb(userId, SimpleOpenNI.SKEL_NECK, SimpleOpenNI.SKEL_LEFT_SHOULDER);
		context.drawLimb(userId, SimpleOpenNI.SKEL_LEFT_SHOULDER, SimpleOpenNI.SKEL_LEFT_ELBOW);
		context.drawLimb(userId, SimpleOpenNI.SKEL_LEFT_ELBOW, SimpleOpenNI.SKEL_LEFT_HAND);
		context.drawLimb(userId, SimpleOpenNI.SKEL_NECK, SimpleOpenNI.SKEL_RIGHT_SHOULDER);
		context.drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_SHOULDER, SimpleOpenNI.SKEL_RIGHT_ELBOW);
		context.drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_ELBOW, SimpleOpenNI.SKEL_RIGHT_HAND);
		context.drawLimb(userId, SimpleOpenNI.SKEL_LEFT_SHOULDER, SimpleOpenNI.SKEL_TORSO);
		context.drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_SHOULDER, SimpleOpenNI.SKEL_TORSO);
		context.drawLimb(userId, SimpleOpenNI.SKEL_TORSO, SimpleOpenNI.SKEL_LEFT_HIP);
		context.drawLimb(userId, SimpleOpenNI.SKEL_LEFT_HIP, SimpleOpenNI.SKEL_LEFT_KNEE);
		context.drawLimb(userId, SimpleOpenNI.SKEL_LEFT_KNEE, SimpleOpenNI.SKEL_LEFT_FOOT);
		context.drawLimb(userId, SimpleOpenNI.SKEL_TORSO, SimpleOpenNI.SKEL_RIGHT_HIP);
		context.drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_HIP, SimpleOpenNI.SKEL_RIGHT_KNEE);
		context.drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_KNEE, SimpleOpenNI.SKEL_RIGHT_FOOT);
		context.drawLimb(userId, SimpleOpenNI.SKEL_RIGHT_HIP, SimpleOpenNI.SKEL_LEFT_HIP);
		noStroke();
		fill(255, 0, 0);
		drawJoint(userId, SimpleOpenNI.SKEL_HEAD);
		drawJoint(userId, SimpleOpenNI.SKEL_NECK);
		drawJoint(userId, SimpleOpenNI.SKEL_LEFT_SHOULDER);
		drawJoint(userId, SimpleOpenNI.SKEL_LEFT_ELBOW);
		drawJoint(userId, SimpleOpenNI.SKEL_NECK);
		drawJoint(userId, SimpleOpenNI.SKEL_RIGHT_SHOULDER);
		drawJoint(userId, SimpleOpenNI.SKEL_RIGHT_ELBOW);
		drawJoint(userId, SimpleOpenNI.SKEL_TORSO);
		drawJoint(userId, SimpleOpenNI.SKEL_LEFT_HIP);
		drawJoint(userId, SimpleOpenNI.SKEL_LEFT_KNEE);
		drawJoint(userId, SimpleOpenNI.SKEL_RIGHT_HIP);
		drawJoint(userId, SimpleOpenNI.SKEL_LEFT_FOOT);
		drawJoint(userId, SimpleOpenNI.SKEL_RIGHT_KNEE);
		drawJoint(userId, SimpleOpenNI.SKEL_LEFT_HIP);
		drawJoint(userId, SimpleOpenNI.SKEL_RIGHT_FOOT);
		drawJoint(userId, SimpleOpenNI.SKEL_RIGHT_HAND);
		drawJoint(userId, SimpleOpenNI.SKEL_LEFT_HAND);

	}

	PVector getJointPosition(int userId, int jointID) {
		PVector joint = new PVector();
		float confidence = context.getJointPositionSkeleton(userId, jointID, joint);
		if (confidence < 0.5) {
			return null;
		}
		PVector convertedJoint = new PVector();
		float ellipseSize = map(convertedJoint.z, 500, 2000, 30, 1);
		context.convertRealWorldToProjective(joint, convertedJoint);
		ellipse(convertedJoint.x, convertedJoint.y, ellipseSize, ellipseSize);

		return joint;
	}

	void drawJoint(int userId, int jointID) {
		PVector joint = new PVector();
		float confidence = context.getJointPositionSkeleton(userId, jointID, joint);
		if (confidence < 0.5) {
			return;
		}

		PVector convertedJoint = new PVector();
		context.convertRealWorldToProjective(joint, convertedJoint);
		ellipse(convertedJoint.x, convertedJoint.y, 5, 5);
	}

	// user-tracking callbacks!
	public void onNewUser(int userId) {
		LogRecorder.log(LogLevel.DETAIL, "Start pose detection");
		context.startPoseDetection("Psi", userId);
	}

	public void onEndCalibration(int userId, boolean successful) {
		if (successful) {
			LogRecorder.log(LogLevel.DETAIL, "User calibrated !!!");
			context.startTrackingSkeleton(userId);
		} else {
			LogRecorder.log(LogLevel.DETAIL, "Failed to calibrate user !!!");
			context.startPoseDetection("Psi", userId);
		}
	}

	public void onStartPose(String pose, int userId) {
		LogRecorder.log(LogLevel.DETAIL, "Started pose for user");
		context.stopPoseDetection(userId);
		context.requestCalibrationSkeleton(userId, true);
	}

	public void activate(KinectFunctions function) {
		activeFunctions.add(function);
		initializeFunction(function);
	}

	public void deActivate(KinectFunctions function) {
		activeFunctions.remove(function);
		disposeFuntion(function);
	}

	private void disposeFuntion(KinectFunctions function) {
		switch (function) {
		case DEPTH:
			break;
		case OPENGL:
			break;
		case VISUALSERVOING:
			break;
		case SKELETON:
			this.context.enableUser(SimpleOpenNI.SKEL_PROFILE_NONE);
			break;
		}

	}

	public void keyPressed() {
		// The key pushed is 1.... 9 ? This is the user Id
		if (keyCode >= 49 && keyCode <= 57)
			currentUser = keyCode - 49; // key 1 = user 0
		LogRecorder.log(LogLevel.DETAIL, "Switched to user " + currentUser + 1);
		switch (key) {
		case ' ':
			context.setMirror(!context.mirror());
			break;
		}

		switch (keyCode) {
		case LEFT:
			rotY += 0.1f;
			break;
		case RIGHT:
			// zoom out
			rotY -= 0.1f;
			break;
		case UP:
			if (keyEvent.isShiftDown())
				zoomF += 0.02f;
			else
				rotX += 0.1f;
			break;
		case DOWN:
			if (keyEvent.isShiftDown()) {
				zoomF -= 0.02f;
				if (zoomF < 0.01)
					zoomF = (float) 0.01;
			} else
				rotX -= 0.1f;
			break;
		}

	}

	public boolean isConnected() {
		boolean result;
		return true;
		/*
		 * PApplet a = new PApplet(); a.init(); kinect = new SimpleOpenNI(a);
		 * result = kinect.enableScene(); kinect.dispose(); kinect.close();
		 * a.dispose(); return result;
		 */
	}

}
