﻿using System;
using System.IO;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
using Coding4Fun.Kinect.Wpf;
using Microsoft.Kinect;
using Microsoft.Speech.AudioFormat;
using Microsoft.Speech.Recognition;
using System.Net;
using System.Net.Sockets;
//using Microsoft.SPOT;
//using Microsoft.Xna.Framework;





namespace videoTest
{
    /// <summary>
    /// Interaction logic for MainWindow.xaml
    /// </summary>
    public partial class MainWindow : Window
    {
        //build speech engine
        private SpeechRecognitionEngine speechEngine;

        //creates a new instance of the fez (robot controller)
        fezConnect fez = new fezConnect();
        movements move = new movements();

        //speech speeching = new speech();

        KinectSensor newSensor;


        SolidColorBrush speechFill = new SolidColorBrush();
        SolidColorBrush colorBrush = new SolidColorBrush();
        SolidColorBrush colorBrush2 = new SolidColorBrush(); 
        SolidColorBrush colorBrush3 = new SolidColorBrush();
        LinearGradientBrush myGradBrush = new LinearGradientBrush();
        LinearGradientBrush myGradBrush2 = new LinearGradientBrush();  

        //a byte array that will be sent to the fez
        byte[] truncated;
        byte[] data = new byte[4];

        //chest depth data
        double centerData;
        //left hand depth
        double calibX;
        //right hand depth
        double calibY;
        //data being worked on and trucated for each had
        double truncX;
        double truncY;
        //data that is going to be sent to the FEZ
        double convertX;
        double convertY;
        //
        double combineXY;
       //used to calculate angle of arm
        public double offSetA = 0;
        // the starting positon for the camera angle in degrees
        int camPos = 170;
        // used to limit the amount of data that is sent to the FEZ, one in every 10
        int sendCounterX = 0;
        int sendCounterY = 1;

        //on off states for each part of the robot. Turned on and off by voice
        bool startData = false;
        bool carEnabled = false;
        bool handEnabled = false;
        bool allStopEnabled = false;
        //lets the dispose method know if the program is closing, also used to shut down the kinect
        bool closing = false;
        //set and intialize the conter for sendTo method
        bool sendToCount = false;
        // sets the amount of skeletons that will be held in memory, each time a new person comes inthe
        // scene they are considered a new skeleton
        const int skeletonCount = 6;

        // the array that will hold the skeletons.
        Skeleton[] allSkeletons = new Skeleton[skeletonCount];

        // instanciates the speech recognizer engine
        private SpeechRecognitionEngine speechRecognizer;


        public MainWindow()
        {
            InitializeComponent();

            //starts the communication to the fez
            fez.start();
            // sends the FEZ the start degree for the camera angle
            fez.sendData("Z170");
            //sets colors for the gradients
            myGradBrush.StartPoint = new System.Windows.Point(0,0);
            myGradBrush.EndPoint = new System.Windows.Point(1, 1);
            myGradBrush2.StartPoint = new System.Windows.Point(0, 0);
            myGradBrush2.EndPoint = new System.Windows.Point(1, 1);

        } 
        
        


        /// <summary>
        // Sets the information for the recognizers, Language and Culture
        /// </summary>
        /// <returns></returns>
        private static RecognizerInfo GetKinectRecognizer()
        {
            foreach (RecognizerInfo recognizer in SpeechRecognitionEngine.InstalledRecognizers())
            {
                string value;
                recognizer.AdditionalInfo.TryGetValue("Kinect", out value);
                if ("True".Equals(value, StringComparison.OrdinalIgnoreCase) && "en-US".Equals(recognizer.Culture.Name, StringComparison.OrdinalIgnoreCase))
                {
                    return recognizer;
                }
            }

            return null;
        }

        
        // Sets all of the audio preferences for the kinect sensor, these can be modified to try and get better results.
        private void setAudioPrefs()
        {
            var audioSource = newSensor.AudioSource;

            audioSource.BeamAngleMode = BeamAngleMode.Adaptive;

            var kinectStream = audioSource.Start();

            speechRecognizer.SetInputToAudioStream(kinectStream, new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null));

            speechRecognizer.RecognizeAsync(RecognizeMode.Multiple);

            newSensor.AudioSource.EchoCancellationMode = EchoCancellationMode.None;

            newSensor.AudioSource.AutomaticGainControlEnabled = false;
        }

        //Returns a speech recognition engine that has its grammer library built.
        private SpeechRecognitionEngine createSpeechRecognizer()
        {

                RecognizerInfo ri = GetKinectRecognizer();
            
            

                this.speechEngine = new SpeechRecognitionEngine(ri.Id);


                //programmicaly creates a grammer file
            //What you can say
                var directions = new Choices();
                directions.Add("start");
                directions.Add("stop");
                directions.Add("car");
                directions.Add("Hand");
                directions.Add("Camera Up");
                directions.Add("Camera Down");
                directions.Add("Forward");
                directions.Add("Backward");
                directions.Add("Turn Left");
                directions.Add("Turn Right");
                

                var gb = new GrammarBuilder { Culture = ri.Culture };
                gb.Append(directions);

                var g = new Grammar(gb);
                speechEngine.LoadGrammar(g);


                speechEngine.SpeechRecognized += speechEngine_SpeechRecognized;
                speechEngine.SpeechRecognitionRejected += speechEngine_SpeechRecognitionRejected;
                speechEngine.SpeechHypothesized += speechEngine_SpeechHypothesized;

                return speechEngine;

                //speechEngine.SetInputToAudioStream(newSensor.AudioSource.Start(), new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null));
                //speechEngine.RecognizeAsync(RecognizeMode.Multiple);
         
        }


        //makes sure that a kinect sensor is available when the window is loaded, if it is it sends kinect sensor information
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            //kinectSensorChooser1.KinectSensorChanged += new DependencyPropertyChangedEventHandler(kinectSensorChooser1_KinectSensorChanged);
            
            colorBrush2.Color = System.Windows.Media.Color.FromRgb(0, 0, 0);


        }

       private void speechEngine_SpeechRecognitionRejected(object sender, SpeechRecognitionRejectedEventArgs e)
        {
            alert.Text = "Not recognized, please try again.";

           
        }

       private void speechEngine_SpeechHypothesized(object sender, SpeechHypothesizedEventArgs e)
        {
            alert.Text = "Hypothosized: " + e.Result.Text + " " + e.Result.Confidence;

           
        }

      private void speechEngine_SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
        {
            string temp = e.Result.Text.ToLowerInvariant();


            switch (temp)
            {

                case "stop":

                    if (allStopEnabled == false)
                    {
                        fez.sendData("X0");
                        fez.sendData("Y0");


                        startData = false;
                        allStopEnabled = true;
                        handEnabled = false;
                        carEnabled = false;

                        colorBrush.Color = System.Windows.Media.Color.FromRgb(255, 0, 0);
                        //red.Fill = colorBrush;

                        //blue.Fill = colorBrush2;
                        //green.Fill = colorBrush2;

                        

                    }
                    break;

                case "hand":

                    if (handEnabled == false)
                    {
                        handEnabled = true;
                        allStopEnabled = false;
                        carEnabled = false;

                        colorBrush.Color = System.Windows.Media.Color.FromRgb(0, 0, 248);
                        //blue.Fill = colorBrush;

                        //red.Fill = colorBrush2;
                        //green.Fill = colorBrush2;

                    }

                    break;

                case "car":

                    if (carEnabled == false)
                    {
                        carEnabled = true;
                        allStopEnabled = false;
                        handEnabled = false;

                        colorBrush.Color = System.Windows.Media.Color.FromRgb(0, 250, 0);
                        //green.Fill = colorBrush;

                        //red.Fill = colorBrush2;
                        //blue.Fill = colorBrush2;

                    }

                    break;

                case "start":

                    startData = true;
                    allStopEnabled = false;

                    colorBrush3.Color = System.Windows.Media.Color.FromRgb(0, 250, 0);
                    //startEllipse.Fill = colorBrush3;

                    //red.Fill = colorBrush2;
                    //blue.Fill = colorBrush2;

                    break;


                case "camera up":
                    {
                        camPos-=5;
                        fez.sendData("Z"+camPos);
                        break;
                    }
                case "camera down":
                    {
                        camPos += 5;
                        fez.sendData("Z"+camPos);
                        break;
                    }
            }

              
        }
      


        //checks for the kinect sensor and enables it
        void kinectSensorChooser1_KinectSensorChanged(object sender, DependencyPropertyChangedEventArgs e)
        {
            KinectSensor oldSensor = (KinectSensor)e.OldValue;

            stopKinect(oldSensor);

            newSensor = (KinectSensor)e.NewValue;

            if (newSensor == null)
            {
                return;
            }

            newSensor.SkeletonStream.Enable();
            newSensor.DepthStream.Enable();
            newSensor.ColorStream.Enable();

            newSensor.AllFramesReady += new EventHandler<AllFramesReadyEventArgs>(newSensor_AllFramesReady);

            try
            {
                newSensor.Start();
            }
            catch (System.IO.IOException)
            {
                //kinectSensorChooser1.AppConflictOccurred();
            }

            speechRecognizer = createSpeechRecognizer();

            setAudioPrefs();


        }
        // checks for data from the kinect sensor and send it to the get skeleton 
        void newSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {

            if (closing)
            {
                return;
            }

            //Get a skeleton
            Skeleton first = GetFirstSkeleton(e);

            if (first == null)
            {
                return;
            }


           // ScalePosition(leftEllipse, first.Joints[JointType.HandLeft]);
           // ScalePosition(rightEllipse1, first.Joints[JointType.HandRight]);

            GetCameraPoint(first, e); 
            
        }

        // creates skeleton information from the raw data

        Skeleton GetFirstSkeleton(AllFramesReadyEventArgs e)
        {
            using (SkeletonFrame skeletonFrameData = e.OpenSkeletonFrame())
            {

                // stops the motors when no skeleton is detected
                if (skeletonFrameData == null)
                { 
                    //fez.sendData("X0");
                    //fez.sendData("Y0");

                    return null;
                }

                //copys skeleton data to the allSkeletons array of skeletons
                skeletonFrameData.CopySkeletonDataTo(allSkeletons);

                //get the first tracked skeleton
                Skeleton first = (from s in allSkeletons
                                  where s.TrackingState == SkeletonTrackingState.Tracked
                                  select s).FirstOrDefault();

                return first;

            }
        }

        // scales the joint information to work with specified screen size
        private void ScalePosition(FrameworkElement element, Joint joint)
        {
            //convert the value to X/Y
            //Joint scaledJoint = joint.ScaleTo(1280, 720); 

            //convert & scale (.3 = means 1/3 of joint distance)
         Joint scaledJoint = joint.ScaleTo(1280, 980, .7f, .7f);

         Canvas.SetLeft(element, scaledJoint.Position.X);
         Canvas.SetTop(element, scaledJoint.Position.Y);

        }

        void GetCameraPoint(Skeleton first, AllFramesReadyEventArgs e)
        {

            using (DepthImageFrame depth = e.OpenDepthImageFrame())
            {
                //if (depth == null ||
                //    kinectSensorChooser1.Kinect == null)
                //{
                //    fez.sendData("X0");
                //    fez.sendData("Y0");
                //    return;
                //}


                //Map a joint location to a point on the depth map
                //sholder center
                DepthImagePoint sholderDepthPoint =
                    depth.MapFromSkeletonPoint(first.Joints[JointType.ShoulderCenter].Position);

                centerData = sholderDepthPoint.Depth;


                //left hand
                DepthImagePoint leftDepthPoint =
                    depth.MapFromSkeletonPoint(first.Joints[JointType.HandLeft].Position);

                calibX = leftDepthPoint.Depth;
               
                //Right Hand
                DepthImagePoint rightDepthPoint =
                    depth.MapFromSkeletonPoint(first.Joints[JointType.HandRight].Position);

                calibY = rightDepthPoint.Depth;
                
                // Use the chest to calculate the distances of the arms. This will allow me to not have a calibration so it works from any distance within sensor range.
                convertX = (centerData - calibX) / 3;
                convertY = (centerData - calibY) / 3;
                combineXY = (convertX + convertY) / 2;

                //truncated = BitConverter.GetBytes((Math.Truncate(convertX)));              
                truncX = (Math.Truncate(convertX));
                truncY = (Math.Truncate(convertY));

                //combined.Text = (Math.Truncate(combineXY).ToString());

                //used to find the angle between the right wrist and the right sholder. 
              //  double xe = Math.Truncate(jointAngleCalc(first));

               // text1.Text = xe.ToString();
                
               


                

                //convertSend_Left(truncX);
               // convertSend_Right(truncY);

                //convDataX.Text = (Math.Truncate(convertX).ToString());
               // convDataY.Text = (Math.Truncate(convertY).ToString());

                

                ColorImagePoint sholderColorPoint =
                    depth.MapToColorImagePoint(sholderDepthPoint.X, sholderDepthPoint.Y,
                    ColorImageFormat.RgbResolution1280x960Fps12);
                //left hand
                ColorImagePoint leftColorPoint =
                    depth.MapToColorImagePoint(leftDepthPoint.X, leftDepthPoint.Y,
                    ColorImageFormat.RgbResolution1280x960Fps12);
                //right hand
                ColorImagePoint rightColorPoint =
                    depth.MapToColorImagePoint(rightDepthPoint.X, rightDepthPoint.Y,
                    ColorImageFormat.RgbResolution1280x960Fps12);


                //Set location of the onscreen ellipses
                //CameraPosition(sholderEllipse, sholderColorPoint);
                //CameraPosition(lftEllipsGrid, leftColorPoint);
                //CameraPosition(rtEllipseGrid, rightColorPoint);  
                
                
                //if (SendCountX() == true)
                //{
                   
                //    convertSend_X(truncX);

                //    //brkThresh(truncY, truncX);
                //    //convertSend_Left(truncX);
                //    //convertSend_Right(truncY);

                //}

                //if (SendCountY() == true)
                //{
                //    convDataY.Text = move.convertSend_Y(truncY);
                    
                //    fez.sendData(move.convertSend_Y(truncY));
                //}
                //if (SendCountX() == true)
                //{
                //    convDataX.Text = move.convertSend_X(truncX);
                //    fez.sendData(move.convertSend_X(truncX));
                //}


                //*** Testing new switch statement and sendTo method to attempt to speed up reponse time ****
                switch (sendTo())
                {
                       

                    case "X":
                         //lftVal.Text = move.convertSend_X(truncX);
                        

                        leftSlider.Value = App.xValue;



                         if (startData == true)
                         {
                             fez.sendData(move.convertSend_X(truncX));
                         }
                        
                       //lftVal.Text = Math.Truncate(convertX).ToString();

                        break;

                    case "Y":
                        
                         //rtVal.Text = move.convertSend_Y(truncY);

                        rtSlider.Value = App.yValue;

                         if (startData == true)
                         {
                             fez.sendData(move.convertSend_Y(truncY));
                         }
                        
                        break;

                    case "N":
                        break;
   
                }   
                gradiantChange(convertY, convertX);


            }
        }

   
        private void CameraPosition(FrameworkElement element, ColorImagePoint point)
        {
            //Divide by 2 for width and height so point is right in the middle 
            // instead of in top/left corner
            Canvas.SetLeft(element, point.X - element.Width / 2);
            Canvas.SetTop(element, point.Y - element.Height / 2);

        }

        // method to stop the kinect sensor nicely
        private void stopKinect(KinectSensor sensor)
        {
            if (sensor != null)
            {
                if (sensor.IsRunning)
                {
                    //stop sensor 
                    sensor.Stop();

                    //stop audio if not null
                    if (sensor.AudioSource != null)
                    {
                        sensor.AudioSource.Stop();
                    }


                }
            }
        }

    /// <summary>
    ///
    /// </summary>
    /// <param name="me"></param>
    /// <returns></returns>
    /// calculates the joint between three points and two vectors. Uses the right arm for the control. Uses the Vector builder from XNA to build the vectors in 3d space and calculate the angels between them.
    ///

        //public double jointAngleCalc(Skeleton me)
        //{

        //    double r;
        //    double degrees;

        //    Vector3 a1 = new Vector3(me.Joints[JointType.ElbowRight].Position.X, me.Joints[JointType.ElbowRight].Position.Y, me.Joints[JointType.ElbowRight].Position.Z);
        //    Vector3 a2 = new Vector3(me.Joints[JointType.ShoulderRight].Position.X, me.Joints[JointType.ShoulderRight].Position.Y, me.Joints[JointType.ShoulderRight].Position.Z);
        //    Vector3 a3 = new Vector3(me.Joints[JointType.WristRight].Position.X, me.Joints[JointType.WristRight].Position.Y, me.Joints[JointType.WristRight].Position.Z);

        //    Vector3 b1 = a1 - a3;
        //    Vector3 b2 = a1 - a2;


        //    // produces the angle using dotproduct
        //   r = angleTwoVectors(Vector3.Normalize(b1), Vector3.Normalize(b2));
        //    //converts the angle into a usable range 
        //   degrees = r * (180 / Math.PI);

            
        //        return degrees;
        //}


        //public float angleTwoVectors(Vector3 a, Vector3 b)
        //{
        //    float dotProduct = 0.0f;
        //    dotProduct = Vector3.Dot(a, b);

        //    return (float)Math.Acos(dotProduct);
        //}

        // Counter methods to break up the sending of commands to each wheel. significantly spead up the response time for the car.
        //private bool SendCountY()
        //{
        //    bool canSend = false;
        //    sendCounterY++;


        //    if (sendCounterY == 1)
        //    {
        //        canSend = true;
        //        sendCounterY = 0;
        //    }


        //    return canSend;
        //}

        //private bool SendCountX()
        //{
        //    bool canSend = false;
        //    sendCounterX++;


        //    if (sendCounterX == 2)
        //    {
        //        canSend = true;
        //        sendCounterX = 0;
        //    }


        //    return canSend;
        //}


        //reduces the amout of data that is sent to the FEZ instead of 30 times a second this method brings it down to 20, may need to be reduced more.
        private string sendTo()
        {
            if (sendCounterX == 15)
            {
                sendToCount = false;
                sendCounterX = 0;
                return "X";
            }
            else if (sendCounterY == 15)
            {
                sendToCount = true;
                sendCounterY = 1;
                return "Y";
            }

            else
            {
                sendCounterX++;
                sendCounterY++;
                return "N";
            }
        }

        

        // Decided that this needs alot of thought to ensure proper use. Also was making the car lag behind commands.
        // used to detect if the threshold has been passed for the two hands. If they have then it will send the correct data to the fez for each wheel.
        // uses the chest as the base point for calculations.
        private void brkThresh(double y, double x)
        {
            double sendDataRight = Math.Truncate(combineXY);
            double sendDataLeft = Math.Truncate(combineXY);


            

            if (x - 20 > y)
            {
                //convertSend_Left(x);
                //convertSend_Right(y);
            }
            if (y - 20 > x)
            {

                //convertSend_Left(x);
                //convertSend_Right(y);

            }

            else
            {

                //convertSend_Left(sendDataLeft);
                //convertSend_Right(sendDataRight);


            }
 
        }


       

       
        //used to nicely shut down the kinect sensor when the window is closed
        private void Window_Closing(object sender, System.ComponentModel.CancelEventArgs e)
        {
            closing = true;
           // stopKinect(kinectSensorChooser1.Kinect);
        }

        private void armControll_Click(object sender, RoutedEventArgs e)
        {
            //var armWindow = new armControll();
            //armWindow.Show();
            fez.Dispose();
            this.Close();
        }
        public static void incomingAlert(String a)
        {
            //alert.Text = a;
        }


        // changes the gradiant of the rectangles based on the position of the users hands
        public void gradiantChange(double y, double x)
        {
            myGradBrush.GradientStops.Clear();
            myGradBrush2.GradientStops.Clear();

            double b = 0.0;
            double c = y / 170;
            double d = x / 170;

            double a = Math.Truncate(c * 100) / 100;

            double e = Math.Truncate(d * 100) / 100;

            if (a > 2)
            {
                a = 2;
            }

            if (a < 0)
            {
                a = 0;
            }


            //alert.Text = e.ToString();

            myGradBrush.GradientStops.Add(new GradientStop(Colors.Red, a));
            myGradBrush.GradientStops.Add(new GradientStop(Colors.Orange, b));

            myGradBrush2.GradientStops.Add(new GradientStop(Colors.Red, e));
            myGradBrush2.GradientStops.Add(new GradientStop(Colors.Orange, b));

         
            
        }
    }
}

 