﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;


using Nui = Microsoft.Research.Kinect.Nui;
using Microsoft.Research.Kinect.Nui;
using Microsoft.Research.Kinect.Audio;
using MSUS.Kinect.SkeletonTracker.Utils;
using System.IO;
using System.Windows.Controls;
using System.Windows.Input;
using Coding4Fun.Kinect.Wpf;
using System.Windows.Media.Imaging;
using System.Windows.Media;

namespace MSUS.Kinect.SkeletonTracker
{
	public class KinectController : IDisposable
	{
		#region Properties

		private const int HistoricNodesLength = 20;
		private Runtime nui;
		private GestureList gestures;
		
		public string GesturePath;
		public Canvas MainCanvas;
		public Image FullTrackingImage;
		public Image HandTrackingImage;
		private int screenWidth;
		private int screenHeight;

		private bool _NoSkeletonTrackedWarning;
		private bool NoSkeletonTrackedWarning
		{
			get
			{
				return _NoSkeletonTrackedWarning;
			}
			set
			{
				if (_NoSkeletonTrackedWarning != value)
				{
					_NoSkeletonTrackedWarning = value;
					OnTrackingWarningChanged(value);
				}
			}
		}


		private SkeletonPositionHistory skeletalHistory;
		public SkeletonPositionHistory SkeletalHistory
		{
			get { return skeletalHistory; }
		}

		private JointID _currentJointId;
		/// <summary>
		/// The Joint that we are currently recording
		/// </summary>
		public JointID CurrentJointId
		{
			get
			{
				return _currentJointId;
			}
			set
			{
				_currentJointId = value;
			}
		}

		#endregion

		#region Init and Startup

		public KinectController()
		{
			CurrentJointId = JointID.HandRight;
			jointProximityTriggers = new List<JointProximityTrigger>();
			gestures = new GestureList();
		}

		public void Begin()
		{
			if (MainCanvas == null)
			{
				throw new Exception("MainCanvas not set in KinectController");
			}
			if (string.IsNullOrWhiteSpace(GesturePath))
			{
				throw new Exception("GesturePath not set in KinectController");
			}

			screenHeight = (int)System.Windows.SystemParameters.VirtualScreenHeight;
			screenWidth = (int)System.Windows.SystemParameters.VirtualScreenWidth;

			nui = new Runtime();

			//nui.Initialize(RuntimeOptions.UseColor | RuntimeOptions.UseDepthAndPlayerIndex | RuntimeOptions.UseSkeletalTracking);
			nui.Initialize(RuntimeOptions.UseSkeletalTracking | RuntimeOptions.UseDepthAndPlayerIndex);
			//RuntimeOptions.UseDepthAndPlayerIndex | 
			//Must set to true and set after call to Initialize
			nui.SkeletonEngine.TransformSmooth = true;

			//Use to transform and reduce jitter
			var parameters = new TransformSmoothParameters
			{
				Smoothing = 0.75f,
				Correction = 0.0f,
				Prediction = 0.0f,
				JitterRadius = 0.05f,
				MaxDeviationRadius = 0.04f
			};

			nui.SkeletonEngine.SmoothParameters = parameters;

			//	to reset if an uninitialize was missed before
			//nui.Uninitialize();

			//skeletalHistory = new SkeletonPositionHistory(HistoricNodesLength, (int)MainCanvas.Width, (int)MainCanvas.Height);
			skeletalHistory = new SkeletonPositionHistory(HistoricNodesLength, (int)MainCanvas.ActualWidth, (int)MainCanvas.ActualHeight);
			ReloadGestures();

			nui.SkeletonFrameReady += new EventHandler<Nui.SkeletonFrameReadyEventArgs>(nui_SkeletonFrameReady);

			//	attempt to find the hand tracking images
			if (FullTrackingImage != null )
			{
				nui.DepthFrameReady += new EventHandler<ImageFrameReadyEventArgs>(nui_DepthFrameReady);
				nui.DepthStream.Open(ImageStreamType.Depth, 2, ImageResolution.Resolution320x240	, ImageType.DepthAndPlayerIndex);
			}
		}



		/// <summary>
		/// Reloads all of the Gesture files from the gesture path
		/// </summary>
		private void ReloadGestures()
		{
			string[] gestureFiles = Directory.GetFiles(GesturePath);

			gestures.ClearGestures();
			List<string> gesturesList = new List<string>();

			foreach (string file in gestureFiles)
			{
				string shortName = file.Substring(file.LastIndexOf("\\"));
				try
				{
					gestures.LoadGesture(file);
					gesturesList.Add(shortName);
				}
				catch (Exception ex)
				{
					gesturesList.Add("Unable to load:" + shortName);
				}
			}

			gestures.RenderGestures(MainCanvas, (int)MainCanvas.ActualWidth, (int)MainCanvas.ActualHeight);
			OnGesturesLoaded(gesturesList);
		}


		#endregion


		#region fist tracking

		// (by Microsoft)We want to control how depth data gets converted into false-color data
		// for more intuitive visualization, so we keep 32-bit color frame buffer versions of
		// these, to be updated whenever we receive and process a 16-bit frame.
		const int RED_IDX = 2;
		const int GREEN_IDX = 1;
		const int BLUE_IDX = 0;
		const int HANDBOXSIZE = 50;
		const int DepthFrameWidth = 320;
		const int DepthFrameHeight = 240;
		//	create this outside the method so that it isn't re-allocated every frame hit.
		byte[] depthFrame32 = new byte[DepthFrameWidth * DepthFrameHeight * 4];

		void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
		{
			if (FullTrackingImage != null)
			{
//				FullTrackingImage.Source = e.ImageFrame.ToBitmapSource();
			}

			if (HandTrackingImage != null)
			{
				PlanarImage image = e.ImageFrame.Image;
				//byte[] depthFrame = ConvertDepthFrame(image.Bits);
				
				//BitmapSource fullSource = BitmapSource.Create(image.Width, image.Height, 96, 96, PixelFormats.Bgr32, null, depthFrame, image.Width * 4);

				//if (FullTrackingImage != null)
				//	FullTrackingImage.Source = fullSource;


				////figure out where in the image the hand is located.
				System.Windows.Int32Rect boundingRect = GetCroppedHandImageRect();


				byte[] depthFrameHand = ConvertDepthFrameOfHand(image.Bits, boundingRect);

				////e.ImageFrame.Image
				BitmapSource handSource = BitmapSource.Create(HANDBOXSIZE, HANDBOXSIZE, 96, 96, PixelFormats.Bgr32, null, depthFrameHand, HANDBOXSIZE * 4);
				HandTrackingImage.Source = handSource;

			}

		}

		////figure out where in the image the hand is located.
		private System.Windows.Int32Rect GetCroppedHandImageRect()
		{
			////figure out where in the image the hand is located.
			Vector handVector = skeletalHistory.GetJointHistory(JointID.HandRight).CurrentPosition;
			Vector scaledHand = handVector.ScaleToScreenAccurate(DepthFrameWidth, DepthFrameHeight, 1.0f, 1.0f);
			System.Windows.Int32Rect boundingRect = new System.Windows.Int32Rect();
			boundingRect.X = (int)scaledHand.X - (HANDBOXSIZE / 2);
			boundingRect.Y = (int)scaledHand.Y - (HANDBOXSIZE / 2);
			boundingRect.Width = HANDBOXSIZE;
			boundingRect.Height = HANDBOXSIZE;
			//	check that we're not bounding before 0,0
			boundingRect.X = (boundingRect.X < 0) ? 0 : boundingRect.X;
			boundingRect.Y = (boundingRect.Y < 0) ? 0 : boundingRect.Y;
			//	check that we're not bounding more than the image size
			if (boundingRect.X + HANDBOXSIZE > DepthFrameWidth)
				boundingRect.X = (int)DepthFrameWidth - HANDBOXSIZE;
			if (boundingRect.Y + HANDBOXSIZE > DepthFrameHeight)
				boundingRect.Y = (int)DepthFrameHeight - HANDBOXSIZE;

			return boundingRect;
		}




		// Converts a 16-bit grayscale depth frame which includes player indexes into a 32-bit frame
		byte[] ConvertDepthFrame(byte[] depthFrame16)
		{
			for (int i16 = 0, i32 = 0; i16 < depthFrame16.Length && i32 < depthFrame32.Length; i16 += 2, i32 += 4)
			{
				int player = depthFrame16[i16] & 0x07;

				// reduce the intensity for areas that aren't the player.
				if (player == 0)
				{
					depthFrame32[i32 + RED_IDX] = 255;
					depthFrame32[i32 + GREEN_IDX] = 255;
					depthFrame32[i32 + BLUE_IDX] = 255;
				}
				else
				{
					int realDepth = (depthFrame16[i16 + 1] << 5) | (depthFrame16[i16] >> 3);
					// transform 13-bit depth information into an 8-bit intensity appropriate
					// for display (we disregard information in most significant bit)
					byte intensity = (byte)(255 - (255 * realDepth / 0x0fff));

					depthFrame32[i32 + RED_IDX] = (byte)(255 - intensity);
					depthFrame32[i32 + GREEN_IDX] = (byte)(255 - intensity);
					depthFrame32[i32 + BLUE_IDX] = (byte)(255 - intensity);
				}
			}
			return depthFrame32;
		}

		byte[] depthFrameHand32 = new byte[HANDBOXSIZE * HANDBOXSIZE * 4];
		// Converts a 16-bit grayscale depth frame which includes player indexes into a 32-bit frame
		byte[] ConvertDepthFrameOfHand(byte[] depthFrame16, System.Windows.Int32Rect boundingRect)
		{
			int sourceStepping = DepthFrameWidth * 2;	//	number of bytes per source image row
			int startByte = (boundingRect.X * 2) + (sourceStepping * boundingRect.Y);
			int rowLength = boundingRect.Width;
			int steppingWrap = (DepthFrameWidth - (rowLength - 1)) * 2; // number of bytes between end of row and start of next row at given width
			int numRows = boundingRect.Height;
			int thisRowStart = startByte;
			int thisRowEnd = thisRowStart + ((rowLength - 1) * 2);

			byte minIntensity = 0xff;
			byte maxIntensity = 0x0f;

			//for (int i16 = 0, i32 = 0; i16 < depthFrame16.Length && i32 < depthFrame32.Length; i16 += 2, i32 += 4)
			for (int i16 = startByte, i32 = 0; i16 < depthFrame16.Length && i32 < depthFrameHand32.Length; /* i16 += 2,*/ i32 += 4  )
			{
				// combine the 2 bytes of depth data representing this pixel
                short depthValue = (short)(depthFrame16[i16] | (depthFrame16[i16 + 1] << 8));
 
				// extract the id of a tracked player from the first bit of depth data for this pixel
				int player = depthValue & 0x07;

				// white out areas that aren't the player.
				if (player == 0)
				{
					depthFrameHand32[i32 + RED_IDX] = 0xff;
					depthFrameHand32[i32 + GREEN_IDX] = 0xff;
					depthFrameHand32[i32 + BLUE_IDX] = 0xff;
				}
				else
				{
					int depthSansPlayer = (depthValue ^ 0x07);	//	(XOR)
					
					int realDepth = (depthFrame16[i16 + 1] << 5) | (depthFrame16[i16] >> 3);
					
					// transform 13-bit depth information into an 8-bit intensity appropriate
					// for display (we disregard information in most significant bit)
					byte intensity = (byte)(255 - (255 * realDepth / 0x0fff));

					if (intensity < 255)
					{
						//	update the min/max found intensities
						if (minIntensity > intensity)
						{
							minIntensity = intensity;
						}
						if (maxIntensity < intensity)
						{
							maxIntensity = intensity;
						}
					}

					depthFrameHand32[i32 + RED_IDX] = (byte)(0xff - intensity);
					depthFrameHand32[i32 + GREEN_IDX] = (byte)(0xff - intensity); ;
					depthFrameHand32[i32 + BLUE_IDX] = (byte)(0xff - intensity); ;
				}

				//determine the next byte

				if (i16 >= thisRowEnd)
				{
					i16 += steppingWrap;
					thisRowStart = i16;
					thisRowEnd = thisRowStart + ((rowLength - 1) * 2);

				}
				else
				{
					i16 += 2;
				}

			}

			int midIntensity = (byte)(((short)minIntensity + (short)maxIntensity) / 2);
			//	now strip out the lower half intensity values.  this should strip out images behind the hand
			for (int i32 = 0; i32 < depthFrameHand32.Length; i32 += 4)
			{
				if (depthFrameHand32[i32 + RED_IDX] > midIntensity)
				{
					depthFrameHand32[i32 + RED_IDX] = 0xff;
				}
				if (depthFrameHand32[i32 + GREEN_IDX] > midIntensity)
				{
					depthFrameHand32[i32 + GREEN_IDX] = 0xff;
				}
				if (depthFrameHand32[i32 + BLUE_IDX] > midIntensity)
				{
					depthFrameHand32[i32 + BLUE_IDX] = 0xff;
				}
			}
		
			return depthFrameHand32;
		}

		#endregion



		#region Joint Proximity Triggers


		private List<JointProximityTrigger> jointProximityTriggers;

		/// <summary>
		/// add an event trigger for when two joints come within dist
		/// </summary>
		/// <param name="jointA"></param>
		/// <param name="jointB"></param>
		/// <param name="dist"></param>
		public void AddJointProximityTrigger(JointProximityTrigger trigger)
		{
			jointProximityTriggers.Add(trigger);
		}

		private void CheckJointProximityTriggers()
		{
			foreach (JointProximityTrigger trigger in jointProximityTriggers)
			{
				Vector jointA = skeletalHistory.GetJointHistory(trigger.JointA).CurrentPosition;
				Vector jointB = skeletalHistory.GetJointHistory(trigger.JointB).CurrentPosition;

				float dist = jointA.GetDist(jointB);
				bool isActive = dist <= trigger.dist;

				if (trigger.active != isActive)
				{
					trigger.active = isActive;
					OnJointProximityTriggerChanged(trigger);
				}
			}
		}

		#endregion


		#region Exposed Events

		#region GesturesLoaded

		public delegate void GesturesLoadedEventHandler(List<string> gestureNames);
		public event GesturesLoadedEventHandler GesturesLoaded;
		protected void OnGesturesLoaded(List<string> gestureNames)
		{
			if (GesturesLoaded != null)
				GesturesLoaded(gestureNames);
		}

		#endregion

		#region GestureRecognised

		public delegate void GestureRecognisedEventHandler(string gestureName);
		public event GestureRecognisedEventHandler GestureRecognised;
		protected void OnGestureRecognised(string gestureName)
		{
			if (GestureRecognised != null)
				GestureRecognised(gestureName);
		}

		#endregion

		#region TrackedJointChanged

		public delegate void TrackedJointChangedEventHandler(JointID trackedJoint);
		public event TrackedJointChangedEventHandler TrackedJointChanged;
		protected void OnTrackedJointChanged(JointID trackedJoint)
		{
			if (TrackedJointChanged != null)
				TrackedJointChanged(trackedJoint);
		}

		#endregion

		#region TrackingWarningChanged

		public delegate void TrackingWarningChangedEventHandler(bool trackingWarning);
		public event TrackingWarningChangedEventHandler TrackingWarningChanged;
		protected void OnTrackingWarningChanged(bool trackingWarning)
		{
			if (TrackingWarningChanged != null)
				TrackingWarningChanged(trackingWarning);
		}

		#endregion

		#region MouseMove

		public delegate void MouseMoveEventHandler(int x, int y);
		public event MouseMoveEventHandler MouseMove;
		protected void OnMouseMove(int x, int y)
		{

			if (MouseMove != null)
				MouseMove(x, y);
		}

		#endregion

		#region JointProximityTriggerChanged
		//	basically, alert when two joints move in/out of a certain proximity

		public delegate void JointProximityTriggerChangedEventHandler(JointProximityTrigger trigger);
		public event JointProximityTriggerChangedEventHandler JointProximityTriggerChanged;
		protected void OnJointProximityTriggerChanged(JointProximityTrigger trigger)
		{
			if (JointProximityTriggerChanged != null)
				JointProximityTriggerChanged(trigger);
		}

		#endregion

		#endregion
		

		#region EventHandlers




		/// <summary>
		/// this is fired every skeletal "frame"
		/// </summary>
		/// <param name="sender"></param>
		/// <param name="e"></param>
		private void nui_SkeletonFrameReady(object sender, Nui.SkeletonFrameReadyEventArgs e)
		{
			Nui.SkeletonFrame allSkeletons = e.SkeletonFrame;

			//get the first tracked skeleton
			Nui.SkeletonData skeleton = (from s in allSkeletons.Skeletons
										 where s.TrackingState == Nui.SkeletonTrackingState.Tracked
										 select s).FirstOrDefault();

			if (skeleton == null)
			{
				NoSkeletonTrackedWarning = true;
			}
			else
			{
				NoSkeletonTrackedWarning = false;
				skeletalHistory.UpdateSkeleton(skeleton);
				skeletalHistory.RenderSkeletalHistory(MainCanvas);

				string lastGesture = "";
				if (gestures.CheckGestures(skeletalHistory, out lastGesture))
				{
					OnGestureRecognised(lastGesture);
				}
				Vector mouseMovePosn = skeletalHistory.GetJointHistory(_currentJointId).CurrentPosition.ScaleToScreen(screenWidth, screenHeight, 0.5f, 0.5f);

				OnMouseMove((int)mouseMovePosn.X, (int)mouseMovePosn.Y);
				CheckJointProximityTriggers();
			}
		}



		public void Dispose()
		{
			//	we need to do this because it shuts down the Kinect unit.
			if (nui != null)
				nui.Uninitialize();
		}


		public void OnKeyDown(Key key)
		{
			if (key == Key.S || key == Key.Space)
			{
				//	record the gesture
				var newGesture = skeletalHistory.GetJointHistory(CurrentJointId);

				string jointName = Enum.GetName(typeof(JointID), _currentJointId);
				string gestureName = string.Format("{0}SavedGesture", jointName);
				newGesture.StorePositionHistory(string.Format("{1}\\{0}.ges", gestureName, GesturePath), skeletalHistory.NeckVector, gestureName);

				//	clone the last gesture so that we can draw it on the screen.
				JointPositionHistory lastGesture = newGesture.Clone();
				lastGesture.GestureName = "LastSaved";
				//lastGesture.RenderPath(MainCanvas, (int)MainCanvas.Width, (int)MainCanvas.Height);

				ReloadGestures();
				lastGesture.RenderPath(MainCanvas, (int)MainCanvas.Width, (int)MainCanvas.Height);
			}
			else if (key == Key.Left)
			{
				CurrentJointId = JointID.HandLeft;
				TrackedJointChanged(CurrentJointId);
			}
			else if (key == Key.Right)
			{
				CurrentJointId = JointID.HandRight;
				TrackedJointChanged(CurrentJointId);
			}
		}
		#endregion



	}
}
