﻿#define USE_SPEECH //COMMENT OUT THIS LINE IF USING THE NAOSIM (as opposed to the real robot)

using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
using System.IO;

// Use robot connection proxies.
using Aldebaran.Proxies;

// Use Kinect SDK.
using Microsoft.Research.Kinect.Nui;
using Microsoft.Research.Kinect.Audio;

// Use Coding4Fun.
using Coding4Fun.Kinect.Wpf;

// Use 3D vectors.
using System.Windows.Media.Media3D;
using System.Threading;

// Use speech/grammer. For Kinect, use Microsoft.Speech, not System.Speech
using System.Speech.Recognition.SrgsGrammar;
using System.Speech.AudioFormat;
using System.Speech.Recognition;

namespace NaoController
{
    #region Motors ------------------------------------------------------------

    enum Motor
    {
        HeadPitch = 0,
        HeadYaw,
        LShoulderPitch,
        LShoulderRoll,
        LElbowRoll,
        LElbowYaw,
        LWristYaw,
        LHipYawPitch,
        LHipPitch,
        LHipRoll,
        LKneePitch,
        LAnklePitch,
        LAnkleRoll,
        RShoulderPitch,
        RShoulderRoll,
        RElbowRoll,
        RElbowYaw,
        RWristYaw,
        RHipYawPitch,
        RHipPitch,
        RHipRoll,
        RKneePitch,
        RAnklePitch,
        RAnkleRoll
    };

    #endregion ----------------------------------------------------------------

    /// <summary>
    /// Interaction logic for MainWindow.xaml
    /// </summary>
    public partial class MainWindow : Window
    {
        #region Constants -----------------------------------------------------

        private const string NAO_IP = "128.208.4.225"; // loopback ip: 127.0.0.1, current nao ip: 128.208.7.48
        private const int NAO_PORT = 9559;

        private const int CANVAS_WIDTH = 640;
        private const int CANVAS_HEIGHT = 480;

        private const float DEFAULT_MOTOR_SPEED = 0.5f;

        // Joint angle contraints in radians.
        private const float HEAD_PITCH_MAX = 0.5149f;
        private const float HEAD_PITCH_MIN = -0.6720f;
        private const float HEAD_YAW_MAX = 2.0857f;
        private const float HEAD_YAW_MIN = -2.0857f;
        private const float L_SHOULDER_PITCH_MAX = 2.0857f;
        private const float L_SHOULDER_PITCH_MIN = -2.0857f;
        private const float L_SHOULDER_ROLL_MAX = 1.3265f;
        private const float L_SHOULDER_ROLL_MIN = -0.3142f;
        private const float L_ELBOW_ROLL_MAX = -0.0349f;
        private const float L_ELBOW_ROLL_MIN = -1.5446f;
        private const float L_ELBOW_YAW_MAX = 2.0857f;
        private const float L_ELBOW_YAW_MIN = -2.0857f;
        private const float L_WRIST_YAW_MAX = 1.8238f;
        private const float L_WRIST_YAW_MIN = -1.8238f;
        private const float L_HIP_YAW_PITCH_MAX = 0.740810f;
        private const float L_HIP_YAW_PITCH_MIN = -1.145303f;
        private const float L_HIP_PITCH_MAX = 0.484090f;
        private const float L_HIP_PITCH_MIN = -1.773912f;
        private const float L_HIP_ROLL_MAX = 0.790477f;
        private const float L_HIP_ROLL_MIN = -0.379472f;
        private const float L_KNEE_PITCH_MAX = 2.112528f;
        private const float L_KNEE_PITCH_MIN = -0.092346f;
        private const float L_ANKLE_PITCH_MAX = 0.922747f;
        private const float L_ANKLE_PITCH_MIN = -1.189516f;
        private const float L_ANKLE_ROLL_MAX = 0.397880f;
        private const float L_ANKLE_ROLL_MIN = -0.769001f;
        private const float R_SHOULDER_PITCH_MAX = 2.0857f;
        private const float R_SHOULDER_PITCH_MIN = -2.0857f;
        private const float R_SHOULDER_ROLL_MAX = 0.3142f;
        private const float R_SHOULDER_ROLL_MIN = -1.3265f;
        private const float R_ELBOW_ROLL_MAX = 1.5446f;
        private const float R_ELBOW_ROLL_MIN = 0.0349f;
        private const float R_ELBOW_YAW_MAX = 2.0857f;
        private const float R_ELBOW_YAW_MIN = -2.0857f;
        private const float R_WRIST_YAW_MAX = 1.8238f;
        private const float R_WRIST_YAW_MIN = -1.8238f;
        private const float R_HIP_YAW_PITCH_MAX = 0.740810f;
        private const float R_HIP_YAW_PITCH_MIN = -1.145303f;
        private const float R_HIP_PITCH_MAX = 0.484090f;
        private const float R_HIP_PITCH_MIN = -1.773912f;
        private const float R_HIP_ROLL_MAX = 0.414754f;
        private const float R_HIP_ROLL_MIN = -0.738321f;
        private const float R_KNEE_PITCH_MAX = 2.120198f;
        private const float R_KNEE_PITCH_MIN = -0.103083f;
        private const float R_ANKLE_PITCH_MAX = 0.932056f;
        private const float R_ANKLE_PITCH_MIN = -1.186448f;
        private const float R_ANKLE_ROLL_MAX = 0.785875f;
        private const float R_ANKLE_ROLL_MIN = -0.388676f;

        #endregion ------------------------------------------------------------

        bool sendMotorMessages = false; // Send motor commands to the robot.
        bool videoStreams = true; // Use video streams.
        bool evaluating = true; // True means spacebar is recording for evaluation by classifier

        Runtime nui = new Runtime();
        MotionProxy motionProxy = null;
        TextToSpeechProxy tts = null;
        KinectAudioSource kinectAudioSource = null;
        SpeechRecognitionEngine speechEngine = null;

        SkeletonDataParser skeletonCapture = null;
        Position lastTorsoPosition;
        bool lastTorsoPositionKnown = false;
        bool processingSkeletonFrame = false;

        // Audio recognition parameters
        Stream stream;
        string RecognizerId = "SR_MS_ZXX_Lightweight_v10.0"; //"SR_MS_en-US_Kinect_10.0";
        NaoGrammar naoGrammar;
        Grammar currentGrammar;

        // Maintain a dictionary of motor -> min/max angle.
        Dictionary<Motor, KeyValuePair<float, float>> angleLimits;

        /// <summary>
        /// Main execution.
        /// </summary>
        public MainWindow()
        {
            InitializeComponent();

            this.KeyDown += new KeyEventHandler(OnKeyPress);
        }

        #region Setup----------------------------------------------------------

        /// <summary>
        /// Fires when the window completes loading.
        /// </summary>
        /// <param name="sender">reference to the sender object.</param>
        /// <param name="e">applicable arguments.</param>

        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            //InitializeAudioSource();

            // Build dictionary of angle limits.
            angleLimits = BuildAngleLimitDictionary();

            // Establish connection with NAO robot.
            Connect(NAO_IP, NAO_PORT);

            if (motionProxy != null)
            {
                // Turn all motors on.
                motionProxy.setStiffnesses("Body", 1.0f);
            }

            if (videoStreams)
                nui.Initialize(RuntimeOptions.UseColor | RuntimeOptions.UseDepth | RuntimeOptions.UseSkeletalTracking);
            else
                nui.Initialize(RuntimeOptions.UseSkeletalTracking);

            // Enable skeleton smoothing.
            nui.SkeletonEngine.TransformSmooth = true;

            // Use to transform and reduce jitter.
            var parameters = new TransformSmoothParameters
            {
                Smoothing = 0.05f,
                Correction = 0.05f,
                Prediction = 0.05f,
                JitterRadius = 0.05f,
                MaxDeviationRadius = 0.04f
            };

            // Apply the above parameters to the skeleton.
            nui.SkeletonEngine.SmoothParameters = parameters;

            // Add event handlers.
            if (videoStreams)
            {
                nui.VideoFrameReady += new EventHandler<ImageFrameReadyEventArgs>(nui_VideoFrameReady);
                nui.DepthFrameReady += new EventHandler<ImageFrameReadyEventArgs>(nui_DepthFrameReady);
            }

            nui.SkeletonFrameReady += new EventHandler<SkeletonFrameReadyEventArgs>(nui_SkeletonFrameReady);

            // Open video streams.
            if (videoStreams)
            {
                nui.VideoStream.Open(ImageStreamType.Video, 2, ImageResolution.Resolution640x480, ImageType.Color);
                nui.DepthStream.Open(ImageStreamType.Depth, 2, ImageResolution.Resolution320x240, ImageType.Depth);
            }

            // Open audio stream
            Console.WriteLine("Recognizing speech");
            var capAudio = new Thread(captureAudio);
            capAudio.Start();
        }

        /// <summary>
        /// Sets up the Kinect to capture audio and recognize words
        /// </summary>
        [MTAThread]
        private void captureAudio()
        {
            kinectAudioSource = new KinectAudioSource();
            kinectAudioSource.FeatureMode = true;
            kinectAudioSource.AutomaticGainControl = false;
            kinectAudioSource.SystemMode = SystemMode.OptibeamArrayOnly;

            // recognizers
            RecognizerInfo rec = (from r in SpeechRecognitionEngine.InstalledRecognizers()
                                  //where r.Id == RecognizerId
                                  select r).FirstOrDefault();

            speechEngine = new SpeechRecognitionEngine(rec.Id);
            naoGrammar = new NaoGrammar(rec);

            this.currentGrammar = naoGrammar.CreateNAOBaseGrammar();

            speechEngine.LoadGrammar(this.currentGrammar); // LoadGrammarAsync
            speechEngine.SpeechHypothesized += new EventHandler<SpeechHypothesizedEventArgs>(speechEngine_SpeechHypothesized);
            speechEngine.SpeechRecognized += new EventHandler<SpeechRecognizedEventArgs>(speechEngine_SpeechRecognized);
            speechEngine.SpeechRecognitionRejected += new EventHandler<SpeechRecognitionRejectedEventArgs>(speechEngine_SpeechRecognitionRejected);

            stream = kinectAudioSource.Start();
            
            //speechEngine.RequestRecognizerUpdate();
            //speechEngine.UnloadGrammar(grammar);
            //wordsToAdd.Add("spaceship");
            //grammar = naoGrammar.CreateNAOGrammar();
            //speechEngine.LoadGrammarAsync(grammar);

            speechEngine.SetInputToAudioStream(stream,
                new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null));
            speechEngine.RecognizeAsync(RecognizeMode.Multiple);
        }

        #endregion

        #region Speech Detection-----------------------------------------------

        void speechEngine_SpeechDetected(object sender, SpeechDetectedEventArgs e)
        {
            Console.WriteLine("Speech Detected.");
            
        }

        void speechEngine_SpeechRecognitionRejected(object sender, SpeechRecognitionRejectedEventArgs e)
        {
            Console.WriteLine("Speech Recognition Rejected: {0}, confidence: {1}", e.Result.Text, e.Result.Confidence);
            
            // Say: I don't know how to do that
#if USE_SPEECH
            //tts.say("Unknown command.");
#endif
            //if (e.Result.Grammar.Name.Equals("base"))
            //{
            Console.WriteLine("Say: Unknown command.");
            //}
        }

        void speechEngine_SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
        {
            if (e.Result.Confidence > .5)
            {
                Console.WriteLine("SPEECH RECOGNIZED: {0}, grammar name: {1}, confidence: {2}", e.Result.Text, e.Result.Grammar.Name, e.Result.Confidence);

                Console.WriteLine("grammar name: {0}", e.Result.Grammar.Name);

                if (e.Result.Grammar.Name.Equals("base"))
                {
                    Console.WriteLine("=== Recognize a word from the base grammar ===");

//                    // get the gesture(s) it is actually doing
//                    Tuple<int, List<HmmGesture>> gestureInfo = this.naoGrammar.ParseGestureOutput(e.Result.Text);

//                    int numMetaGestures = gestureInfo.Item1;
//                    this.previousGestures = gestureInfo.Item2;

//                    if (numMetaGestures == 0)
//                    {
//                        // Say: "I don't know how to do that"
//#if USE_SPEECH
//                    tts.say("Unknown command.");
//#endif
//                        Console.WriteLine("Say: Unknown command (speech recognized).");
//                    }
//                    else
//                    {
//                        // Say: "Yes, I can 'cmd'(s)"
                        
//#if USE_SPEECH
//                    tts.say("Yes I can do that");
//#endif
//                        Console.WriteLine("Say: Yes, I can do that.");

                        
//                        for (int i = 0; i < this.previousGestures.Count; i++)
//                        {
//#if USE_SPEECH
//                            tts.say(naoGrammar.parseEnumName(this.previousGestures[i]));
//#endif
//                            doAction(this.previousGestures[i]);
//                        }

                        
                        
//                        //
//                        //================================================================
//                        //
//                        // ADD ACTION-DOING HERE
//                        // The list of base actions to perform (list of stuff found in HmmGesture)
//                        // is in this.previousGestures
//                        //
//                        //================================================================
//                        //


//                        if (numMetaGestures > 1)
//                        {
//                            // Say: "You asked for a sequence. Would you like to save this sequence
//#if USE_SPEECH
//                        tts.say("You made a sequence. Would you like to save this sequence?");
//#endif
//                            Console.WriteLine("Say: You made a sequence. Would you like to save this sequence?");

//                            // switch the grammar to be the yes/no grammar
//                            LoadGrammar(naoGrammar.CreateNAOYesNoGrammar());
//                        }
//                    }
                }
                else if (e.Result.Grammar.Name.Equals("yesno"))
                {
                    Console.WriteLine("=== Recognize a word from yesno grammar === ");

                    // listen for "yes" or "no" (or "i don't know what you said")

                    // if yes -> Say "What is the name of your sequence?"
                    //      switch grammar to sequence naming

                    // if no -> Say "waiting for next command"
                    //      switch grammar to base

                    if ("yes save sequence".Equals(e.Result.Text, StringComparison.OrdinalIgnoreCase))
                    {
#if USE_SPEECH
                    tts.say("What is the name of your sequence?");
#endif
                        Console.WriteLine("What is the name of your sequence?");
                        LoadGrammar(naoGrammar.CreateNAOSequenceGrammar());
                    }
                    else
                    {
                        // We don't want to save anything, so go back to the base grammar
#if USE_SPEECH
                    tts.say("Waiting for next command");
#endif
                        Console.WriteLine("Say: Waiting for next command");
                        LoadGrammar(naoGrammar.CreateNAOBaseGrammar());
                    }

                }
                else
                {
                    Console.WriteLine("=== Recognize a word from sequence grammar === ");

                    // listen for "The sequence name is {0}"
                    // take {0} and add it to the grammar (call .AddAction)
                    
                    // Should be of the format "The sequence name is {0}"

                    //int lengthToTrim = "The sequence name is ".Length;
                    //Console.WriteLine("Found sequence with name: " + e.Result.Text.Substring(lengthToTrim));
                    
                    //this.naoGrammar.AddAction(e.Result.Text.Substring(lengthToTrim), this.previousGestures);


                    // switch the grammar back to the sequence grammar
                    LoadGrammar(naoGrammar.CreateNAOBaseGrammar());
                }
            }
        }

        void LoadGrammar(Grammar g)
        {
            Console.WriteLine("LOADING GRAMMAR");
            Console.WriteLine("\tgrammar name: {0}", g.Name);

            speechEngine.RequestRecognizerUpdate();
            speechEngine.UnloadGrammar(this.currentGrammar);
            this.currentGrammar = g;
            speechEngine.LoadGrammar(this.currentGrammar);
        }

        void speechEngine_SpeechHypothesized(object sender, SpeechHypothesizedEventArgs e)
        {
            //Console.WriteLine("Speech Hypothesized: {0}, confidence: {1}", e.Result.Text, e.Result.Confidence);
            Console.WriteLine("[{0}]", e.Result.Text);
        }

        #endregion

        #region Event Handlers ------------------------------------------------

        /// <summary>
        /// A skeleton frame is ready; update accordingly.
        /// </summary>
        /// <param name="sender">reference to the sender object.</param>
        /// <param name="e">applicable arguments.</param>
        void nui_SkeletonFrameReady(object sender, SkeletonFrameReadyEventArgs e)
        {
            processingSkeletonFrame = true;
            
            SkeletonFrame allSkeletons = e.SkeletonFrame;

            // Get the first tracked skeleton.
            SkeletonData skeleton = (from s in allSkeletons.Skeletons
                                     where s.TrackingState == SkeletonTrackingState.Tracked
                                     select s).FirstOrDefault();

            if (skeleton != null)
            {
                // We have a valid skeleton.
                Joint HandRight = skeleton.Joints[JointID.HandRight].ScaleTo(CANVAS_WIDTH, CANVAS_HEIGHT, .5f, .5f);
                SetEllipsePosition(leftWristEllipse, skeleton.Joints[JointID.WristLeft]);
                SetEllipsePosition(leftElbowEllipse, skeleton.Joints[JointID.ElbowLeft]);
                SetEllipsePosition(leftShoulderEllipse, skeleton.Joints[JointID.ShoulderLeft]);
                SetEllipsePosition(headEllipse, skeleton.Joints[JointID.Head]);
                SetEllipsePosition(shoulderCenterEllipse, skeleton.Joints[JointID.ShoulderCenter]);
                SetEllipsePosition(hipCenterEllipse, skeleton.Joints[JointID.HipCenter]);
                SetEllipsePosition(rightShoulderEllipse, skeleton.Joints[JointID.ShoulderRight]);
                SetEllipsePosition(rightElbowEllipse, skeleton.Joints[JointID.ElbowRight]);
                SetEllipsePosition(rightWristEllipse, skeleton.Joints[JointID.WristRight]);

                // Retrieve the position vectors we care about.
                Vector3D leftShoulder = ExtractVectorFromJoint(skeleton.Joints[JointID.ShoulderLeft]);
                Vector3D leftElbow = ExtractVectorFromJoint(skeleton.Joints[JointID.ElbowLeft]);
                Vector3D leftWrist = ExtractVectorFromJoint(skeleton.Joints[JointID.WristLeft]);
                //Vector3D head = ExtractVectorFromJoint(skeleton.Joints[JointID.Head]);
                Vector3D centerShoulder = ExtractVectorFromJoint(skeleton.Joints[JointID.ShoulderCenter]);
                Vector3D centerHip = ExtractVectorFromJoint(skeleton.Joints[JointID.HipCenter]);
                Vector3D rightShoulder = ExtractVectorFromJoint(skeleton.Joints[JointID.ShoulderRight]);
                Vector3D rightElbow = ExtractVectorFromJoint(skeleton.Joints[JointID.ElbowRight]);
                Vector3D rightWrist = ExtractVectorFromJoint(skeleton.Joints[JointID.WristRight]);

                // Directional vectors
                Vector3D up = centerShoulder - centerHip;
                Vector3D leftToRight = rightShoulder - leftShoulder;
                Vector3D rightToLeft = leftShoulder - rightShoulder;

                // Right-side vectors
                Vector3D rightElbowToShoulder = rightShoulder - rightElbow;
                Vector3D rightElbowToWrist = rightWrist - rightElbow;
                Vector3D rightShoulderPitch = Vector3D.CrossProduct(rightElbowToShoulder, rightToLeft);
                Vector3D rightShoulderToHip = centerHip - rightShoulder; 

                // Left-side vectors
                Vector3D leftElbowToShoulder = leftShoulder - leftElbow;
                Vector3D leftElbowToWrist = leftWrist - leftElbow;
                Vector3D leftShoulderPitch = Vector3D.CrossProduct(leftElbowToShoulder, rightToLeft);
                Vector3D leftShoulderToHip = centerHip - leftShoulder;

                // Find angles of right-side joints
                float rightElbowRollAngle = (float)(Vector3D.AngleBetween(rightElbowToShoulder, rightElbowToWrist) - 180);
                float rightShoulderRollAngle = (float)(90 -  Vector3D.AngleBetween(rightToLeft, rightElbowToShoulder));
                float rightShoulderPitchAngle = (float)(180 - Vector3D.AngleBetween(rightShoulderPitch, up));

                // Find angles of left-side joints
                float leftElbowRollAngle = (float)(180- Vector3D.AngleBetween(leftElbowToShoulder, leftElbowToWrist));
                float leftShoulderRollAngle = (float)(90 - Vector3D.AngleBetween(rightToLeft, leftElbowToShoulder));
                float leftShoulderPitchAngle = (float)(180 - Vector3D.AngleBetween(leftShoulderPitch, up));

                // finding the right elbow yaw
                Vector3D rightShoulderReference = Vector3D.CrossProduct(-rightElbowToShoulder, Vector3D.CrossProduct(rightShoulderToHip, -rightElbowToShoulder));
                Vector3D rightElbowYaw = Vector3D.CrossProduct(rightElbowToShoulder, Vector3D.CrossProduct(rightElbowToShoulder, rightElbowToWrist));
                rightElbowYaw.Normalize();
                rightShoulderReference.Normalize();
                //Vector3D checkVector = Vector3D.CrossProduct(-rightElbowToShoulder, Vector3D.CrossProduct(rightShoulderReference, rightElbowYaw));
                float rightElbowYawAngle = (float)Vector3D.AngleBetween(rightElbowYaw, rightShoulderReference) - 90;

                // finding the left elbow yaw
                Vector3D leftShoulderReference = Vector3D.CrossProduct(-leftElbowToShoulder, Vector3D.CrossProduct(leftShoulderToHip, -leftElbowToShoulder));
                Vector3D leftElbowYaw = Vector3D.CrossProduct(leftElbowToShoulder, Vector3D.CrossProduct(leftElbowToShoulder, leftElbowToWrist));
                leftElbowYaw.Normalize();
                leftShoulderReference.Normalize();
                //Vector3D checkVector = Vector3D.CrossProduct(-leftElbowToShoulder, Vector3D.CrossProduct(leftShoulderReference, leftElbowYaw));
                float leftElbowYawAngle = 90 - (float)Vector3D.AngleBetween(leftElbowYaw, leftShoulderReference);

                // TODO: Hack for lifting arm above head; make this better.
                if (rightElbow.Y > rightShoulder.Y)
                    rightShoulderPitchAngle = -rightShoulderPitchAngle;
                else
                    rightElbowYawAngle = rightElbowYawAngle - 90;
                if (leftElbow.Y > leftShoulder.Y)
                    leftShoulderPitchAngle = -leftShoulderPitchAngle;
                else
                    leftElbowYawAngle = leftElbowYawAngle + 90;

                // Setting the robot-left, human-right motor angles
                SetMotorAngle(Motor.LShoulderRoll, rightShoulderRollAngle);
                SetMotorAngle(Motor.LShoulderPitch, rightShoulderPitchAngle);
                SetMotorAngle(Motor.LElbowRoll, rightElbowRollAngle);
                SetMotorAngle(Motor.LElbowYaw, rightElbowYawAngle);

                // Setting the robot-right, human-left motor angles
                SetMotorAngle(Motor.RShoulderRoll, leftShoulderRollAngle);
                SetMotorAngle(Motor.RShoulderPitch, leftShoulderPitchAngle);
                SetMotorAngle(Motor.RElbowRoll, leftElbowRollAngle);
                SetMotorAngle(Motor.RElbowYaw, leftElbowYawAngle);

                textSkeletonDebug.Text = String.Format("Elbow yaw: {0,4:0.0} {1,4:0.0} {2,4:0.0}, Shoulder ref:" + 
                    "{3,4:0.0} {4,4:0.0} {5,4:0.0}, angle: {6,3:0}",
                    rightElbowYaw.X, rightElbowYaw.Y, rightElbowYaw.Z,
                    rightShoulderReference.X, rightShoulderReference.Y, rightShoulderReference.Z,
                    leftElbowYawAngle);
            }

            processingSkeletonFrame = false;
        }

        /// <summary>
        /// A color video frame is ready; update the image in the window.
        /// </summary>
        /// <param name="sender">reference to the sender object.</param>
        /// <param name="e">applicable arguments.</param>
        void nui_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            colorImage.Source = e.ImageFrame.ToBitmapSource();
        }

        /// <summary>
        /// A depth video frame is ready; update the image in the window.
        /// </summary>
        /// <param name="sender">reference to the sender object.</param>
        /// <param name="e">applicable arguments.</param>
        void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            depthImage.Source = e.ImageFrame.ToBitmapSource();
        }

        void OnKeyPress(object sender, EventArgs e)
        {
            switch (((KeyEventArgs)e).Key)
            {
                case Key.F1:

                    break;

                case Key.F2:

                    break;

                case Key.F3:

                    break;

                case Key.F4:

                    break;

                case Key.F5:

                    break;

                case Key.F6:

                    break;

                case Key.F7:

                    break;

                case Key.F8:

                    break;

                case Key.F12:
                    
                    break;

                case Key.Space:

                    break;
            }
        }

        /// <summary>
        /// Cleanup for when the window is closed.
        /// </summary>
        /// <param name="sender">reference to the sender object.</param>
        /// <param name="e">applicable arguments.</param>
        private void Window_Closed(object sender, EventArgs e)
        {
            // Disable all motors.
            SetMotors("Body", 0.0f);

            nui.Uninitialize();
        }

        #endregion ------------------------------------------------------------

        #region Utility Methods -----------------------------------------------

        /// <summary>
        /// Sets the given ellipse's position to the given joint.
        /// </summary>
        /// <param name="ellipse">the ellipse whose position we want to set.</param>
        /// <param name="joint">the joint whose position we care about.</param>
        private void SetEllipsePosition(FrameworkElement ellipse, Joint joint)
        {
            var scaledJoint = joint.ScaleTo(CANVAS_WIDTH, CANVAS_HEIGHT, .5f, .5f);

            Canvas.SetLeft(ellipse, scaledJoint.Position.X);
            Canvas.SetTop(ellipse, scaledJoint.Position.Y);
        }

        /// <summary>
        /// Extracts a Vector3D of the position of the given joint.
        /// </summary>
        /// <param name="j">joint of interest</param>
        /// <returns>3D vector of joint position</returns>
        private Vector3D ExtractVectorFromJoint(Microsoft.Research.Kinect.Nui.Joint j)
        {
            return new Vector3D(j.Position.X, j.Position.Y, j.Position.Z);
        }

        /// <summary>
        /// Converts degrees to radians.
        /// </summary>
        /// <param name="angle">angle in degrees</param>
        /// <returns>radian value</returns>
        private float DegreesToRadians(double angle)
        {
            return (float)(Math.PI * angle / 180.0);
        }

        /// <summary>
        /// Converts radians to degrees.
        /// </summary>
        /// <param name="angle">angle in radians</param>
        /// <returns>degree value</returns>
        private float RadiansToDegrees(double angle)
        {
            return (float)(angle * 180.0 / Math.PI);
        }

        /// <summary>
        /// Construct a dictionary of motor -> min/max angle mappings.
        /// </summary>
        /// <returns>a new dictionary of motor -> min/max angle mappings</returns>
        private Dictionary<Motor, KeyValuePair<float, float>> BuildAngleLimitDictionary()
        {
            Dictionary<Motor, KeyValuePair<float, float>> d = new Dictionary<Motor, KeyValuePair<float, float>>();

            d.Add(Motor.HeadPitch, new KeyValuePair<float, float>(HEAD_PITCH_MIN, HEAD_PITCH_MAX));
            d.Add(Motor.HeadYaw, new KeyValuePair<float, float>(HEAD_YAW_MIN, HEAD_YAW_MAX));
            d.Add(Motor.LShoulderPitch, new KeyValuePair<float, float>(L_SHOULDER_PITCH_MIN, L_SHOULDER_PITCH_MAX));
            d.Add(Motor.RShoulderPitch, new KeyValuePair<float, float>(R_SHOULDER_PITCH_MIN, R_SHOULDER_PITCH_MAX));
            d.Add(Motor.LShoulderRoll, new KeyValuePair<float, float>(L_SHOULDER_ROLL_MIN, L_SHOULDER_ROLL_MAX));
            d.Add(Motor.RShoulderRoll, new KeyValuePair<float, float>(R_SHOULDER_ROLL_MIN, R_SHOULDER_ROLL_MAX));
            d.Add(Motor.LElbowRoll, new KeyValuePair<float, float>(L_ELBOW_ROLL_MIN, L_ELBOW_ROLL_MAX));
            d.Add(Motor.RElbowRoll, new KeyValuePair<float, float>(R_ELBOW_ROLL_MIN, R_ELBOW_ROLL_MAX));
            d.Add(Motor.LElbowYaw, new KeyValuePair<float, float>(L_ELBOW_YAW_MIN, L_ELBOW_YAW_MAX));
            d.Add(Motor.RElbowYaw, new KeyValuePair<float, float>(R_ELBOW_YAW_MIN, R_ELBOW_YAW_MAX));
            d.Add(Motor.LWristYaw, new KeyValuePair<float, float>(L_WRIST_YAW_MIN, L_WRIST_YAW_MAX));
            d.Add(Motor.RWristYaw, new KeyValuePair<float, float>(R_WRIST_YAW_MIN, R_WRIST_YAW_MAX));
            d.Add(Motor.LHipYawPitch, new KeyValuePair<float, float>(L_HIP_YAW_PITCH_MIN, L_HIP_YAW_PITCH_MAX));
            d.Add(Motor.RHipYawPitch, new KeyValuePair<float, float>(R_HIP_YAW_PITCH_MIN, R_HIP_YAW_PITCH_MAX));
            d.Add(Motor.LHipPitch, new KeyValuePair<float, float>(L_HIP_PITCH_MIN, L_HIP_PITCH_MAX));
            d.Add(Motor.RHipPitch, new KeyValuePair<float, float>(R_HIP_PITCH_MIN, R_HIP_PITCH_MAX));
            d.Add(Motor.LHipRoll, new KeyValuePair<float, float>(L_HIP_ROLL_MIN, L_HIP_ROLL_MAX));
            d.Add(Motor.RHipRoll, new KeyValuePair<float, float>(R_HIP_ROLL_MIN, R_HIP_ROLL_MAX));
            d.Add(Motor.LKneePitch, new KeyValuePair<float, float>(L_KNEE_PITCH_MIN, L_KNEE_PITCH_MAX));
            d.Add(Motor.RKneePitch, new KeyValuePair<float, float>(R_KNEE_PITCH_MIN, R_KNEE_PITCH_MAX));
            d.Add(Motor.LAnklePitch, new KeyValuePair<float, float>(L_ANKLE_PITCH_MIN, L_ANKLE_PITCH_MAX));
            d.Add(Motor.RAnklePitch, new KeyValuePair<float, float>(R_ANKLE_PITCH_MIN, R_ANKLE_PITCH_MAX));
            d.Add(Motor.LAnkleRoll, new KeyValuePair<float, float>(L_ANKLE_ROLL_MIN, L_ANKLE_ROLL_MAX));
            d.Add(Motor.RAnkleRoll, new KeyValuePair<float, float>(R_ANKLE_ROLL_MIN, R_ANKLE_ROLL_MAX));

            return d;
        }

        #endregion ------------------------------------------------------------

        #region NAO Robot Control Methods -------------------------------------

        /// <summary>
        /// Establish connection proxies right nao.
        /// </summary>
        /// <param name="ip">ip of NAO robot</param>
        /// <param name="port">port of NAO robot</param>
        public void Connect(string ip, int port)
        {
            try
            {
                if (motionProxy == null)
                    motionProxy = new MotionProxy(ip, port);

                // COMMENT THIS OUT IF YOU ARE USING THE SIM, BECAUSE IT WILL THROW AN EXCEPTION (because it is a super-great piece of software)

#if USE_SPEECH
                
                if (tts == null)
                    tts = new TextToSpeechProxy(ip, port);
#endif
            }
            catch (Exception e)
            {
                Console.Out.WriteLine("Connect exception: " + e);
            }
        }

        /// <summary>
        /// Sets the specified motor to the "stiffness" value given.
        /// Setting the motors "stiff" is required prior to using them.
        /// 
        /// e.g.:
        ///  * SetMotors("Body", 1.0f) should enable all motors.
        ///  * SetMotors("Body", 0.0f) should disable all motors.
        /// </summary>
        /// <param name="name">motor group to influence</param>
        /// <param name="value">amount of stiffness</param>
        private void SetMotors(string name, float value)
        {
            if (motionProxy == null)
                Connect(NAO_IP, NAO_PORT);

            if (motionProxy != null)
                motionProxy.setStiffnesses(name, value);
        }

        /// <summary>
        /// Sets the specified motor to the specified angle (in degrees).
        /// </summary>
        /// <param name="name">the enum representation of the motor</param>
        /// <param name="angle">angle to set motor</param>
        private void SetMotorAngle(Motor name, float angle)
        {
            if (sendMotorMessages)
            {
                if (motionProxy == null)
                    Connect(NAO_IP, NAO_PORT);

                float minAngle = angleLimits[name].Key;
                float maxAngle = angleLimits[name].Value;

                float naoAngle = Math.Max(Math.Min(DegreesToRadians(angle), maxAngle), minAngle);

                if (motionProxy != null)
                    motionProxy.setAngles(name.ToString(), naoAngle, DEFAULT_MOTOR_SPEED);
            }
        }

        //private void doAction(HmmGesture action)
        //{
        //    SetMotors("Body", 1.0f);
        //    if (action == HmmGesture.WalkForward)
        //    {
        //        motionProxy.walkTo((float).5, 0, 0);
        //        motionProxy.waitUntilWalkIsFinished();
        //    }
        //    else if (action == HmmGesture.WalkBackward)
        //    {
        //        motionProxy.walkTo((float)(-.5), 0, 0);
        //        motionProxy.waitUntilWalkIsFinished();
        //    }
        //    else if (action == HmmGesture.WalkLeft)
        //    {
        //        motionProxy.walkTo(0, (float)0.5, 0);
        //        motionProxy.waitUntilWalkIsFinished();
        //    }
        //    else if (action == HmmGesture.WalkRight)
        //    {
        //        motionProxy.walkTo(0, (float)(-.5), 0);
        //        motionProxy.waitUntilWalkIsFinished();
        //    }
        //}

        #endregion ------------------------------------------------------------
    }
}