﻿using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;

using System.Timers;

using Microsoft.Research.Kinect.Nui;

namespace InteractiveKidsShow
{
    class Kinect
    {
        //Kinect
        Runtime nui;
        private Camera cam;
        int totalFrames = 0;
        int lastFrames = 0;
        DateTime lastTime = DateTime.MaxValue;
        DateTime exampleButtonActivated = DateTime.MinValue;
        KinectVideo videoWindow;
        KinectDepth depthWindow;
        KinectSkeleton skeletonWindow;
        Point leftHand;
        Point rightHand;
        ArrayList activeButtons;
        Boolean needButtonAnswer = false;
        Boolean needGestureAnswer = false;
        EnumKinectGestures.Gestures gestureRequested = EnumKinectGestures.Gestures.NoGestureDetected;
        DateTime timeRequested = DateTime.MinValue;
        int secondsToWaitForAnswer;
        DtwGestureRecognizer _dtw;
        private ArrayList jump = new ArrayList();

        //Switch used to ignore certain skeleton frames
        private int _flipFlop;

        //How many skeleton frames to ignore (_flipFlop)
        //1 = capture every frame, 2 = capture every second frame etc.
        private const int Ignore = 2;

        //How many skeleton frames to store in the _video buffer
        private const int BufferSize = 32;

        //The minumum number of frames in the _video buffer before we attempt to start matching gestures
        private const int MinimumFrames = 6;

        //ArrayList of coordinates which are recorded in sequence to define one gesture
        private ArrayList _video;

        //We want to control how depth data gets converted into false-color data
        //for more intuitive visualization, so we keep 32-bit color frame buffer versions of
        //these, to be updated whenever we receive and process a 16-bit frame.
        const int RED_IDX = 2;
        const int GREEN_IDX = 1;
        const int BLUE_IDX = 0;
        byte[] depthFrame32 = new byte[320 * 240 * 4];

        public delegate void HandsMovedEventHandler(object sender, HandsMovedArgs e);
        public event HandsMovedEventHandler HandsMoved;
        public delegate void GestureResponseEventHandler(object sender, GestureResponseArgs e);
        public event GestureResponseEventHandler GestureResponse;
        public delegate void ButtonResponseEventHandler(object sender, ButtonResponseArgs e);
        public event ButtonResponseEventHandler ButtonResponse;

        Dictionary<JointID, Brush> jointColors = new Dictionary<JointID, Brush>() { 
            {JointID.HipCenter, new SolidColorBrush(Color.FromRgb(169, 176, 155))},
            {JointID.Spine, new SolidColorBrush(Color.FromRgb(169, 176, 155))},
            {JointID.ShoulderCenter, new SolidColorBrush(Color.FromRgb(168, 230, 29))},
            {JointID.Head, new SolidColorBrush(Color.FromRgb(200, 0,   0))},
            {JointID.ShoulderLeft, new SolidColorBrush(Color.FromRgb(79,  84,  33))},
            {JointID.ElbowLeft, new SolidColorBrush(Color.FromRgb(84,  33,  42))},
            {JointID.WristLeft, new SolidColorBrush(Color.FromRgb(255, 126, 0))},
            {JointID.HandLeft, new SolidColorBrush(Color.FromRgb(215,  86, 0))},
            {JointID.ShoulderRight, new SolidColorBrush(Color.FromRgb(33,  79,  84))},
            {JointID.ElbowRight, new SolidColorBrush(Color.FromRgb(33,  33,  84))},
            {JointID.WristRight, new SolidColorBrush(Color.FromRgb(77,  109, 243))},
            {JointID.HandRight, new SolidColorBrush(Color.FromRgb(37,   69, 243))},
            {JointID.HipLeft, new SolidColorBrush(Color.FromRgb(77,  109, 243))},
            {JointID.KneeLeft, new SolidColorBrush(Color.FromRgb(69,  33,  84))},
            {JointID.AnkleLeft, new SolidColorBrush(Color.FromRgb(229, 170, 122))},
            {JointID.FootLeft, new SolidColorBrush(Color.FromRgb(255, 126, 0))},
            {JointID.HipRight, new SolidColorBrush(Color.FromRgb(181, 165, 213))},
            {JointID.KneeRight, new SolidColorBrush(Color.FromRgb(71, 222,  76))},
            {JointID.AnkleRight, new SolidColorBrush(Color.FromRgb(245, 228, 156))},
            {JointID.FootRight, new SolidColorBrush(Color.FromRgb(77,  109, 243))}
        };

        public void Load()
        {
            nui = new Runtime();

            try
            {
                nui.Initialize(RuntimeOptions.UseDepthAndPlayerIndex | RuntimeOptions.UseSkeletalTracking | RuntimeOptions.UseColor);
                this.cam = nui.NuiCamera;
            }
            catch (InvalidOperationException)
            {
                System.Windows.MessageBox.Show("Runtime initialization failed. Please make sure Kinect device is plugged in.");
                return;
            }

            try
            {
                nui.VideoStream.Open(ImageStreamType.Video, 2, ImageResolution.Resolution640x480, ImageType.Color);
                nui.DepthStream.Open(ImageStreamType.Depth, 2, ImageResolution.Resolution320x240, ImageType.DepthAndPlayerIndex);
            }
            catch (InvalidOperationException)
            {
                System.Windows.MessageBox.Show("Failed to open stream. Please make sure to specify a supported image type and resolution.");
                return;
            }

            lastTime = DateTime.Now;

            Skeleton2DDataExtract.Skeleton2DdataCoordReady += NuiSkeleton2DdataCoordReady;
            nui.DepthFrameReady += new EventHandler<ImageFrameReadyEventArgs>(nui_DepthFrameReady);
            nui.SkeletonFrameReady += new EventHandler<SkeletonFrameReadyEventArgs>(nui_SkeletonFrameReady);
            nui.VideoFrameReady += new EventHandler<ImageFrameReadyEventArgs>(nui_ColorFrameReady);
            
            //create windows to show kinect debug images
            videoWindow = new KinectVideo();
            depthWindow = new KinectDepth();
            skeletonWindow = new KinectSkeleton();

            //show the debug windows
            //videoWindow.Show();
            //depthWindow.Show();
            //skeletonWindow.Show();

            leftHand = new Point();
            rightHand = new Point();

            activeButtons = new ArrayList();

            _dtw = new DtwGestureRecognizer(12, 0.6, 2, 2, 10);
            _video = new ArrayList();
            LoadGesturesFromFile();
        }

        private void LoadGesturesFromFile()
        {
            int itemCount = 0;
            string line;
            string gestureName = String.Empty;

            ArrayList frames = new ArrayList();
            double[] items = new double[12];

            System.IO.StreamReader file = new System.IO.StreamReader("Gestures.txt");
            while ((line = file.ReadLine()) != null)
            {
                if (line.StartsWith("@"))
                {
                    gestureName = line;
                    continue;
                }

                if (line.StartsWith("~"))
                {
                    frames.Add(items);
                    itemCount = 0;
                    items = new double[12];
                    continue;
                }

                if (!line.StartsWith("----"))
                {
                    items[itemCount] = Double.Parse(line);
                }

                itemCount++;

                if (line.StartsWith("----"))
                {
                    _dtw.AddOrUpdate(frames, gestureName);
                    frames = new ArrayList();
                    gestureName = String.Empty;
                    itemCount = 0;
                }
            }

            file.Close();
        }

        private void gestureIsDone()
        {
            resetGestureAndButtonRequests();

            GestureResponseArgs args = new GestureResponseArgs(true, gestureRequested);
            GestureResponse(this, args);
        }

        /// <summary>
        /// Raised by the DTW class
        /// </summary>
        private void NuiSkeleton2DdataCoordReady(object sender, Skeleton2DdataCoordEventArgs a)
        {
            //if we somehow get to here but don't need a gesture anymore, return
            if (!needGestureAnswer)
                return;

            // We need a sensible number of frames before we start attempting to match gestures against remembered sequences
            if (_video.Count > MinimumFrames)
            {
                string s = _dtw.Recognize(_video);
                skeletonWindow.Title = "Recognised as: " + s;

                if (!s.Contains("__UNKNOWN"))
                {
                    if (gestureRequested == (EnumKinectGestures.Gestures)Enum.Parse(typeof(EnumKinectGestures.Gestures), s.Substring(1)))
                    {
                        //done gesture requested!
                        gestureIsDone();
                    }
                    else
                    {
                        // There was no match so reset the buffer
                        _video = new ArrayList();
                    }
                }
            }

            // Ensures that we remember only the last x frames
            if (_video.Count > BufferSize)
            {
                    // Remove the first frame in the buffer
                    _video.RemoveAt(0);
            }

            // Decide which skeleton frames to capture. Only do so if the frames actually returned a number. 
            if (!double.IsNaN(a.GetPoint(0).X))
            {
                // Optionally register only 1 frame out of every n
                _flipFlop = (_flipFlop + 1) % Ignore;

                if (_flipFlop == 0)
                    _video.Add(a.GetCoords());
            }
        }

        /// <summary>
        /// Kinect object not needed anymore
        /// </summary>
        public void Unload()
        {
            nui.Uninitialize();
        }

        /// <summary>
        /// Tilt the Kinect upwards
        /// </summary>
        public void TiltUp()
        {
            try
            {
                cam.ElevationAngle = cam.ElevationAngle + 5;
            }
            catch (Exception exc)
            {
                MessageBox.Show("Maximum tilt angle reached or too many tilt requests", "Tilt Warning", MessageBoxButton.OK, MessageBoxImage.Exclamation);
            }
        }

        /// <summary>
        /// Tilt the Kinect downwards
        /// </summary>
        public void TiltDown()
        {
            try
            {
                cam.ElevationAngle = cam.ElevationAngle - 5;
            }
            catch (Exception exc)
            {
                MessageBox.Show("Minimum tilt angle reached or too many tilt requests", "Tilt Warning", MessageBoxButton.OK, MessageBoxImage.Exclamation);
            }
        }

        /// <summary>
        /// Add an invisible "button" on top of the video player. One or more buttons must be added before
        /// a button request can be made. The buttonName is what will be passed back if the user selects that button.
        /// </summary>
        public void addActiveButton(String buttonName, int x, int y, int width, int height)
        {
            KinectOverlayButton newButton = new KinectOverlayButton(buttonName, x, y, width, height);
            activeButtons.Add(newButton);
        }

        /// <summary>
        /// Requests a button answer from the user within a certain amount of time. Before this method is called,
        /// buttons must first be added using addActiveButton().
        /// </summary>
        public void requestButtonAnswer(int secondsToWaitForAnswer)
        {
            if (activeButtons.Count == 0)
                MessageBox.Show("A button answer was requested, but there are no buttons to select", "Logical Error", MessageBoxButton.OK, MessageBoxImage.Error);

            this.secondsToWaitForAnswer = secondsToWaitForAnswer;
            timeRequested = DateTime.Now;
            needButtonAnswer = true;
        }

        /// <summary>
        /// Requests that the user does a certain gesture within a certain amount of time.
        /// </summary>
        public void requestGesture(EnumKinectGestures.Gestures gesture, int secondsToWaitForAnswer)
        {
            this.secondsToWaitForAnswer = secondsToWaitForAnswer;
            gestureRequested = gesture;
            timeRequested = DateTime.Now;
            needGestureAnswer = true;
        }

        /// <summary>
        /// Converts a 16-bit grayscale depth frame which includes player indexes into a 32-bit frame
        /// that displays different players in different colors
        /// </summary>
        byte[] convertDepthFrame(byte[] depthFrame16)
        {
            for (int i16 = 0, i32 = 0; i16 < depthFrame16.Length && i32 < depthFrame32.Length; i16 += 2, i32 += 4)
            {
                int player = depthFrame16[i16] & 0x07;
                int realDepth = (depthFrame16[i16 + 1] << 5) | (depthFrame16[i16] >> 3);
                // transform 13-bit depth information into an 8-bit intensity appropriate
                // for display (we disregard information in most significant bit)
                byte intensity = (byte)(255 - (255 * realDepth / 0x0fff));

                depthFrame32[i32 + RED_IDX] = 0;
                depthFrame32[i32 + GREEN_IDX] = 0;
                depthFrame32[i32 + BLUE_IDX] = 0;

                // choose different display colors based on player
                switch (player)
                {
                    case 0:
                        depthFrame32[i32 + RED_IDX] = (byte)(intensity / 2);
                        depthFrame32[i32 + GREEN_IDX] = (byte)(intensity / 2);
                        depthFrame32[i32 + BLUE_IDX] = (byte)(intensity / 2);
                        break;
                    case 1:
                        depthFrame32[i32 + RED_IDX] = intensity;
                        break;
                    case 2:
                        depthFrame32[i32 + GREEN_IDX] = intensity;
                        break;
                    case 3:
                        depthFrame32[i32 + RED_IDX] = (byte)(intensity / 4);
                        depthFrame32[i32 + GREEN_IDX] = (byte)(intensity);
                        depthFrame32[i32 + BLUE_IDX] = (byte)(intensity);
                        break;
                    case 4:
                        depthFrame32[i32 + RED_IDX] = (byte)(intensity);
                        depthFrame32[i32 + GREEN_IDX] = (byte)(intensity);
                        depthFrame32[i32 + BLUE_IDX] = (byte)(intensity / 4);
                        break;
                    case 5:
                        depthFrame32[i32 + RED_IDX] = (byte)(intensity);
                        depthFrame32[i32 + GREEN_IDX] = (byte)(intensity / 4);
                        depthFrame32[i32 + BLUE_IDX] = (byte)(intensity);
                        break;
                    case 6:
                        depthFrame32[i32 + RED_IDX] = (byte)(intensity / 2);
                        depthFrame32[i32 + GREEN_IDX] = (byte)(intensity / 2);
                        depthFrame32[i32 + BLUE_IDX] = (byte)(intensity);
                        break;
                    case 7:
                        depthFrame32[i32 + RED_IDX] = (byte)(255 - intensity);
                        depthFrame32[i32 + GREEN_IDX] = (byte)(255 - intensity);
                        depthFrame32[i32 + BLUE_IDX] = (byte)(255 - intensity);
                        break;
                }
            }
            return depthFrame32;
        }

        /// <summary>
        /// Raised by the nui object when there is a new depth frame ready to be processed
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage Image = e.ImageFrame.Image;
            byte[] convertedDepthFrame = convertDepthFrame(Image.Bits);

            depthWindow.depth.Source = BitmapSource.Create(
                Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, convertedDepthFrame, Image.Width * 4);

            ++totalFrames;

            DateTime cur = DateTime.Now;
            if (cur.Subtract(lastTime) > TimeSpan.FromSeconds(1))
            {
                int frameDiff = totalFrames - lastFrames;
                lastFrames = totalFrames;
                lastTime = cur;
                //frameRate.Text = "Kinect: " + frameDiff.ToString() + " fps";
            }
        }

        /// <summary>
        /// Get the display position of any joint for the skeleton debug window
        /// </summary>
        /// <param name="joint"></param>
        /// <returns></returns>
        private Point getDisplayPosition(Joint joint)
        {
            return getDisplayPosition(joint, (int)skeletonWindow.skeleton.Width, (int)skeletonWindow.skeleton.Height);
        }

        /// <summary>
        /// Get the display position for any joint using any display width and height
        /// </summary>
        private Point getDisplayPosition(Joint joint, int displayWidth, int displayHeight)
        {
            float depthX, depthY;
            nui.SkeletonEngine.SkeletonToDepthImage(joint.Position, out depthX, out depthY);
            depthX = depthX * 320; //convert to 320, 240 space
            depthY = depthY * 240; //convert to 320, 240 space
            int colorX, colorY;
            ImageViewArea iv = new ImageViewArea();
            // only ImageResolution.Resolution640x480 is supported at this point
            nui.NuiCamera.GetColorPixelCoordinatesFromDepthPixel(ImageResolution.Resolution640x480, iv, (int)depthX, (int)depthY, (short)0, out colorX, out colorY);

            // map back to skeleton.Width & skeleton.Height
            return new Point((int)(displayWidth * colorX / 640.0), (int)(displayHeight * colorY / 480));
        }

        /// <summary>
        /// Get a line which represents a part of the user's body ie. arm, leg, spine - not a joint
        /// </summary>
        private Polyline getBodySegment(Microsoft.Research.Kinect.Nui.JointsCollection joints, Brush brush, params JointID[] ids)
        {
            PointCollection points = new PointCollection(ids.Length);
            for (int i = 0; i < ids.Length; ++i)
            {
                points.Add(getDisplayPosition(joints[ids[i]]));
            }

            Polyline polyline = new Polyline();
            polyline.Points = points;
            polyline.Stroke = brush;
            polyline.StrokeThickness = 5;
            return polyline;
        }

        /// <summary>
        /// Raised by the nui object when there is a skeleton frame ready to be processed
        /// </summary>
        void nui_SkeletonFrameReady(object sender, SkeletonFrameReadyEventArgs e)
        {
            SkeletonFrame skeletonFrame = e.SkeletonFrame;
            int iSkeleton = 0;
            Brush[] brushes = new Brush[6];
            brushes[0] = new SolidColorBrush(Color.FromRgb(255, 0, 0));
            brushes[1] = new SolidColorBrush(Color.FromRgb(0, 255, 0));
            brushes[2] = new SolidColorBrush(Color.FromRgb(64, 255, 255));
            brushes[3] = new SolidColorBrush(Color.FromRgb(255, 255, 64));
            brushes[4] = new SolidColorBrush(Color.FromRgb(255, 64, 255));
            brushes[5] = new SolidColorBrush(Color.FromRgb(128, 128, 255));

            //clear the current elements from the skeleton debug window
            skeletonWindow.skeleton.Children.Clear();

            //check if there is a button or gesture answer needed
            if (needButtonAnswer || needGestureAnswer)
            {
                //if the time has run out
                if (DateTime.Now.Subtract(timeRequested).Seconds > secondsToWaitForAnswer)
                {
                    //the gestures/buttons aren't needed anymore
                    resetGestureAndButtonRequests();
                }
            }

            //for each skeleton in the frame (if user gets lost from frame and comes back, this
            //allows them to be treated as the same person)
            foreach (SkeletonData data in skeletonFrame.Skeletons)
            {
                if (SkeletonTrackingState.Tracked == data.TrackingState)
                {
                    //draw the bones in the skeleton debug window
                    Brush brush = brushes[iSkeleton % brushes.Length];
                    skeletonWindow.skeleton.Children.Add(getBodySegment(data.Joints, brush, JointID.HipCenter, JointID.Spine, JointID.ShoulderCenter, JointID.Head));
                    skeletonWindow.skeleton.Children.Add(getBodySegment(data.Joints, brush, JointID.ShoulderCenter, JointID.ShoulderLeft, JointID.ElbowLeft, JointID.WristLeft, JointID.HandLeft));
                    skeletonWindow.skeleton.Children.Add(getBodySegment(data.Joints, brush, JointID.ShoulderCenter, JointID.ShoulderRight, JointID.ElbowRight, JointID.WristRight, JointID.HandRight));
                    skeletonWindow.skeleton.Children.Add(getBodySegment(data.Joints, brush, JointID.HipCenter, JointID.HipLeft, JointID.KneeLeft, JointID.AnkleLeft, JointID.FootLeft));
                    skeletonWindow.skeleton.Children.Add(getBodySegment(data.Joints, brush, JointID.HipCenter, JointID.HipRight, JointID.KneeRight, JointID.AnkleRight, JointID.FootRight));

                    //draw the joints in the skeleton debug window
                    foreach (Joint joint in data.Joints)
                    {
                        Point jointPos = getDisplayPosition(joint);
                        Line jointLine = new Line();
                        jointLine.X1 = jointPos.X - 3;
                        jointLine.X2 = jointLine.X1 + 6;
                        jointLine.Y1 = jointLine.Y2 = jointPos.Y;
                        jointLine.Stroke = jointColors[joint.ID];
                        jointLine.StrokeThickness = 6;
                        skeletonWindow.skeleton.Children.Add(jointLine);
                    }

                    //so that the hand ellipses are in the /correct/ place on the screen
                    int halfEllipseSize = 12;

                    //calculate and store the correct position for the left hand
                    Point displayPosition = getDisplayPosition(data.Joints[JointID.HandLeft], 640, 480);
                    leftHand.X = displayPosition.X - halfEllipseSize;
                    leftHand.Y = displayPosition.Y - halfEllipseSize;

                    //calculate and store the correct position for the right hand
                    displayPosition = getDisplayPosition(data.Joints[JointID.HandRight], 640, 480);
                    rightHand.X = displayPosition.X - halfEllipseSize;
                    rightHand.Y = displayPosition.Y - halfEllipseSize;

                    //call the event to update the hand positions on the video player
                    HandsMovedArgs args = new HandsMovedArgs(leftHand, rightHand);
                    HandsMoved(this, args);
                    
                    if (needGestureAnswer)
                    {
                        if (gestureRequested == EnumKinectGestures.Gestures.Jump)
                        {
                            //only keeping the last 5 positions so delete the one at the start of the arraylist now
                            if (jump.Count == 5)
                                jump.RemoveAt(0);

                            //add the hip position to the arraylist, ignore it if it is 17 (which it shouldn't be able to be anyway)
                            //because 17 seems to be the default value and results in problems
                            if (getDisplayPosition(data.Joints[JointID.HipCenter], 640, 480).Y != 17.0)
                                jump.Add(getDisplayPosition(data.Joints[JointID.HipCenter], 640, 480).Y);

                            //as long as there are 5 positions in the jump array, we can check and see if a jump has been performed
                            if (jump.Count == 5)
                            {
                                if ((double)jump[0] < (double)jump[1] && (double)jump[1] < (double)jump[2] &&
                                    (double)jump[2] < (double)jump[3] && (double)jump[3] > (double)jump[4])
                                {
                                    gestureIsDone();
                                }
                            }
                        }
                        else if (gestureRequested == EnumKinectGestures.Gestures.RaiseLeftHandAboveHead)
                        {
                            //if the position of the left hand is above the position of the head, the position is done
                            if (getDisplayPosition(data.Joints[JointID.HandLeft], 640, 480).Y < getDisplayPosition(data.Joints[JointID.Head], 640, 480).Y)
                                gestureIsDone();
                        }
                        else if (gestureRequested == EnumKinectGestures.Gestures.RaiseRightHandAboveHead)
                        {
                            //if the position of the right hand is above the position of the head, the position is done
                            if (getDisplayPosition(data.Joints[JointID.HandRight], 640, 480).Y < getDisplayPosition(data.Joints[JointID.Head], 640, 480).Y)
                                gestureIsDone();
                        }
                        else if (gestureRequested == EnumKinectGestures.Gestures.RaiseBothHandsAboveHead)
                        {
                            //if the position of both hands are above the position of the head, the gesture is done
                            if (getDisplayPosition(data.Joints[JointID.HandLeft], 640, 480).Y < getDisplayPosition(data.Joints[JointID.Head], 640, 480).Y
                                && getDisplayPosition(data.Joints[JointID.HandRight], 640, 480).Y < getDisplayPosition(data.Joints[JointID.Head], 640, 480).Y)
                                gestureIsDone();
                        }
                        //if the gesture wasn't any of the above, it must be one of the DTW gestures
                        else
                        {
                            Skeleton2DDataExtract.ProcessData(data);
                        }
                    }

                    if (needButtonAnswer)
                    {
                        foreach (KinectOverlayButton thisButton in activeButtons)
                        {
                            //check if one of the user's hands is within the area of this button
                            if ((leftHand.Y + halfEllipseSize >= thisButton.y &&
                                leftHand.Y + halfEllipseSize <= thisButton.y + thisButton.height &&
                                leftHand.X + halfEllipseSize >= thisButton.x &&
                                leftHand.X + halfEllipseSize <= thisButton.x + thisButton.width)
                                ||
                                (rightHand.Y + halfEllipseSize >= thisButton.y &&
                                rightHand.Y + halfEllipseSize <= thisButton.y + thisButton.height &&
                                rightHand.X + halfEllipseSize >= thisButton.x &&
                                rightHand.X + halfEllipseSize <= thisButton.x + thisButton.width))
                            {
                                //if the button hasn't already started to activate, do that now
                                if (thisButton.activated == DateTime.MinValue)
                                {
                                    thisButton.activated = DateTime.Now;
                                }
                                //otherwise, see if it has been activating for at least a second
                                else if (DateTime.Now.Subtract(thisButton.activated).Seconds >= 1)
                                {
                                    //button activated
                                    resetGestureAndButtonRequests();

                                    ButtonResponseArgs buttonArgs = new ButtonResponseArgs(thisButton.buttonName);
                                    ButtonResponse(this, buttonArgs);
                                }
                            }
                            //if no hand is within the area, make sure the activation time is reset
                            else
                            {
                                thisButton.activated = DateTime.MinValue;
                            }
                        }
                    }
                }

                iSkeleton++;
            }
        }

        /// <summary>
        /// Raised by the nui object when there is a normal video frame ready
        /// </summary>
        void nui_ColorFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            // 32-bit per pixel, RGBA image
            PlanarImage Image = e.ImageFrame.Image;

            videoWindow.video.Source = BitmapSource.Create(
                Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);
        }

        /// <summary>
        /// Reset the gesture/button variables do they aren't trying to be detected when an answer isn't needed
        /// </summary>
        private void resetGestureAndButtonRequests()
        {
            //gestures
            needGestureAnswer = false;

            //buttons
            needButtonAnswer = false;
            activeButtons = new ArrayList();

            //both
            timeRequested = DateTime.MinValue;
        }
    }
}