﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
using Microsoft.Samples.Kinect.WpfViewers;
using Microsoft.Kinect;
using System.Speech;
using Microsoft.Speech;
using Microsoft.Speech.Recognition;
using Microsoft.Speech.AudioFormat;
using System.Threading;
using System.Diagnostics;
using System.Timers;
using Microsoft.Xna.Framework;
using System.Net;
using System.IO;






namespace KinBot_Controller_V2
{
    /// <summary>
    /// Interaction logic for MainWindow.xaml
    /// </summary>
    public partial class MainWindow : Window
    {


        //build speech engine
        private SpeechRecognitionEngine speechEngine;

        //creates a new instance of the App.fez (robot controller)
        //App.fezConnectt App.fez = new App.fezConnectt();
        //movements move = new movements();

        //speech speeching = new speech();

        KinectSensor newSensor;

       


        Uri carOn = new Uri("Images/carOn.png", UriKind.Relative);
        Uri carOff = new Uri("Images/car.png", UriKind.Relative);



        Image tempImage = new Image();
        BitmapImage tempBitmap = new BitmapImage();

        int picIncrementer = 0;


        //a byte array that will be sent to the App.fez
        //byte[] truncated;
        byte[] data = new byte[4];

        //chest depth data
        double centerData;
        //left hand depth
        double calibX;
        //right hand depth
        double calibY;
        //data being worked on and trucated for each had
        double truncX;
        double truncY;
        //data that is going to be sent to the App.fez
        double convertX;
        double convertY;
        //
        double combineXY;
        //used to calculate angle of arm
        public double offSetA = 0;
        // the starting positon for the camera angle in degrees
        int camPos = 140;
        // used to limit the amount of data that is sent to the App.fez, one in every 10
        int sendCounterX = 0;
        int sendCounterY = 1;

        //on off states for each part of the robot. Turned on and off by voice
        bool startData = false;
        bool carEnabled = false;
        bool handEnabled = false;
        bool allStopEnabled = false;
        //lets the dispose method know if the program is closing, also used to shut down the kinect
        bool closing = false;
        //set and intialize the conter for sendTo method
        bool sendToCount = false;
        bool camInitiated = false;

        // sets the amount of skeletons that will be held in memory, each time a new person comes inthe
        // scene they are considered a new skeleton
        const int skeletonCount = 6;

         //the array that will hold the skeletons.
        Skeleton[] allSkeletons = new Skeleton[skeletonCount];

        // instanciates the speech recognizer engine
        private SpeechRecognitionEngine speechRecognizer;

        imageChanger imgChanger = new imageChanger();

       // kinect kinect1 = new kinect();
        
      


        public MainWindow()
        {
            InitializeComponent();

            speechRecognizer = createSpeechRecognizer();

            App.fez.sendData("Z" + camPos);
            
            //Thread t = new Thread(new ThreadStart(getCamImage));
            //t.IsBackground = true;
            //t.Start();
           
        }

        //public  void startTimer()
        //{
        //    myTimer = new System.Threading.Timer(2000);

        //    myTimer.Elapsed += new ElapsedEventHandler(timerElapsed);
        //    myTimer.Enabled = true;
        //}

        // void timerElapsed(object sender, ElapsedEventArgs e)
        //{
        //    //alert1.Text = "Timer Finished";
        //    getKinectData();
        //}

        private void KinectSensorChooser_KinectSensorChanged_1(object sender, DependencyPropertyChangedEventArgs e)
        {
            KinectSensor oldSensor = (KinectSensor)e.OldValue;

            stopKinect(oldSensor);

            newSensor = (KinectSensor)e.NewValue;

            if (newSensor == null)
            {
                return;
            }

            newSensor.SkeletonStream.Enable();
            newSensor.DepthStream.Enable();
            newSensor.ColorStream.Enable();





            newSensor.AllFramesReady += new EventHandler<AllFramesReadyEventArgs>(newSensor_AllFramesReady);

            try
            {
                newSensor.Start();
             
        
            }
            catch (System.IO.IOException)
            {
                //kinectSensorChooser1.AppConflictOccurred();
            }

            //** NEW **//

            //if (kinect1.setKinect(sender,e) == true)
            //{

            //    getKinectData();

            //    //kinect1.createSpeechRecognizer();

            //    //kinect1.setAudioPrefs();

               
            //}

            setAudioPrefs();
        }


        //public void getKinectData()
        //{
        //    if (kinect1.isReady())
        //    {

        //        while (kinect1.first != null )
        //        {
        //            myTimer.Stop();
        //            myTimer.Dispose();
        //            string i = kinect1.GetCameraPoint(kinect1.first, kinect1.framePass);

        //            setLighup(kinect1.lightUpState);

        //            this.alert1.Text = "Kinected" + i;


        //        }

        //        if (kinect1.first == null)
        //        {
        //            //alert1.Text = "Please connect a Kinect";
        //            //startTimer();
        //            sleepThread();

        //        }
        //    }

        //    else
        //    {
        //        //alert1.Text = "No Kinect";
        //        //startTimer();
        //        //Thread.Sleep(1000);
        //        sleepThread();

        //    }
        //}

        //public void sleepThread()
        //{
        //    Thread.Sleep(1000);

        //    getKinectData();
       // }

        //private void setLighup(string s)
        //{
        //    switch (s)
        //    {
        //        case "camera up":
        //            {
        //                cameraLens.Source = imgChanger.setImages("cameraOn");
        //                cameraUp.Source = imgChanger.setImages("cameraUp");

        //                cameraDown.Source = imgChanger.setImages("cameraDownOff");
        //                //camInitiated = true;
        //                //camPos -= 10;
        //                //App.fez.sendData("Z" + camPos);
        //                cameraUp.Source = imgChanger.setImages("cameraUpOff");
        //                cameraLens.Source = imgChanger.setImages("cameraOff");
        //                break;
        //            }
        //        case "camera down":
        //            {
        //                cameraLens.Source = imgChanger.setImages("cameraOn");
        //                cameraDown.Source = imgChanger.setImages("cameraDown");
        //                cameraUp.Source = imgChanger.setImages("cameraUpOff");
        //                //camInitiated = false;
        //                //camPos += 10;
        //                //App.fez.sendData("Z" + camPos);
        //                cameraLens.Source = imgChanger.setImages("cameraOff");
        //                cameraDown.Source = imgChanger.setImages("cameraDownOff");


        //                //cameraDown.Source = imgChanger.setImages("cameraDownOff");
        //                break;
        //            }
        //    }
        //}

        private static RecognizerInfo GetKinectRecognizer()
        {
            foreach (RecognizerInfo recognizer in SpeechRecognitionEngine.InstalledRecognizers())
            {
                string value;
                recognizer.AdditionalInfo.TryGetValue("Kinect", out value);
                if ("True".Equals(value, StringComparison.OrdinalIgnoreCase) && "en-US".Equals(recognizer.Culture.Name, StringComparison.OrdinalIgnoreCase))
                {
                    return recognizer;
                }
            }

            return null;
        }



        // checks for data from the kinect sensor and send it to the get skeleton 
        void newSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {

            if (closing)
            {
                return;
            }

                       //Get a skeleton
            Skeleton first = GetFirstSkeleton(e);

            if (first == null)
            {
                //setOffState(); 
                return;
            }


             //ScalePosition(leftEllipse, first.Joints[JointType.HandLeft]);
             //ScalePosition(rightEllipse1, first.Joints[JointType.HandRight]);

            GetCameraPoint(first, e);

        }

         //creates skeleton information from the raw data

        Skeleton GetFirstSkeleton(AllFramesReadyEventArgs e)
        {
            using (SkeletonFrame skeletonFrameData = e.OpenSkeletonFrame())
            {

                 //stops the motors when no skeleton is detected
                if (skeletonFrameData == null)
                {
                    setOffState();
                    return null;
                }

                //copys skeleton data to the allSkeletons array of skeletons
                skeletonFrameData.CopySkeletonDataTo(allSkeletons);

               // get the first tracked skeleton
                Skeleton first = (from s in allSkeletons
                                  where s.TrackingState == SkeletonTrackingState.Tracked
                                  select s).FirstOrDefault();

                return first;

            }
        }
        private void setOffState()
        {
            App.fez.sendData("X0");
            App.fez.sendData("Y0");
            //mic.Source = imgChanger.setImages("micOff");
            //cameraLens.Source = imgChanger.setImages("cameraOff");
            cameraDown.Source = imgChanger.setImages("cameraDownOff");
            cameraUp.Source = imgChanger.setImages("cameraUpOff");
        }


        public void GetCameraPoint(Skeleton first, AllFramesReadyEventArgs e)
        {




            using (DepthImageFrame depth = e.OpenDepthImageFrame())
            {

                if (depth != null)
                {

                    //Map a joint location to a point on the depth map sholder center
                    DepthImagePoint sholderDepthPoint =
                        depth.MapFromSkeletonPoint(first.Joints[JointType.ShoulderCenter].Position);

                    centerData = sholderDepthPoint.Depth;


                    //left hand
                    DepthImagePoint leftDepthPoint =
                        depth.MapFromSkeletonPoint(first.Joints[JointType.HandLeft].Position);

                    calibX = leftDepthPoint.Depth;

                    // Right Hand
                    DepthImagePoint rightDepthPoint =
                        depth.MapFromSkeletonPoint(first.Joints[JointType.HandRight].Position);

                    calibY = rightDepthPoint.Depth;

                    // Use the chest to calculate the distances of the arms. This will allow me to not have a calibration so it works from any distance within sensor range.
                    convertX = (centerData - calibX) / 3;
                    convertY = (centerData - calibY) / 3;
                    combineXY = (convertX + convertY) / 2;

                    // truncated = BitConverter.GetBytes((Math.Truncate(convertX)));              
                    truncX = (Math.Truncate(convertX));
                    truncY = (Math.Truncate(convertY));

                    //alert.Text = truncX.ToString();

                    // combined.Text = (Math.Truncate(combineXY).ToString());

                    //used to find the angle between the right wrist and the right sholder. 
                     double xe = Math.Truncate(jointAngleCalc(first));
                     alert1.Text = xe.ToString();


                    ColorImagePoint sholderColorPoint =
                        depth.MapToColorImagePoint(sholderDepthPoint.X, sholderDepthPoint.Y,
                        ColorImageFormat.RgbResolution640x480Fps30);
                    //left hand
                    ColorImagePoint leftColorPoint =
                        depth.MapToColorImagePoint(leftDepthPoint.X, leftDepthPoint.Y,
                        ColorImageFormat.RgbResolution640x480Fps30);
                    // right hand
                    ColorImagePoint rightColorPoint =
                        depth.MapToColorImagePoint(rightDepthPoint.X, rightDepthPoint.Y,
                        ColorImageFormat.RgbResolution640x480Fps30);

                    //*** Testing new switch statement and sendTo method to attempt to speed up reponse time ****
                    switch (sendTo())
                    {
                        case "X":



                            //alert.Text = truncX.ToString();

                            leftSlider.Value = truncX;



                            if (startData == true)
                            {

                                App.fez.sendData(App.move.convertSend_X(truncX));
                            }

                            //lftVal.Text = Math.Truncate(convertX).ToString();

                            break;

                        case "Y":

                            //rtVal.Text = move.convertSend_Y(truncY);



                            rtSlider.Value = truncY;


                            if (startData == true)
                            {
                                App.fez.sendData(App.move.convertSend_Y(truncY));
                            }

                            break;

                        case "N":
                            break;

                    }



                }
                else
                {
                    return;
                }
            }
        }


        private string sendTo()
        {
            if (sendCounterX == 1)
            {
                sendToCount = false;
                sendCounterX = 0;
                return "X";
            }
            else if (sendCounterY == 2)
            {
                sendToCount = true;
                sendCounterY = 1;
                return "Y";
            }

            else
            {
                sendCounterX++;
                sendCounterY++;
                return "N";
            }
        }



        // Sets all of the audio preferences for the kinect sensor, these can be modified to try and get better results.
        private void setAudioPrefs()
        {
            var audioSource = newSensor.AudioSource;

            audioSource.BeamAngleMode = BeamAngleMode.Adaptive;

            //added to help remove background noise. remove if audio is not working.
            audioSource.NoiseSuppression = true;

            var kinectStream = audioSource.Start();

            speechRecognizer.SetInputToAudioStream(kinectStream, new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null));

            speechRecognizer.RecognizeAsync(RecognizeMode.Multiple);

            newSensor.AudioSource.EchoCancellationMode = EchoCancellationMode.None;

            newSensor.AudioSource.AutomaticGainControlEnabled = false;

            mic.Source = imgChanger.setImages("micOn");

        }

        //Returns a speech recognition engine that has its grammer library built.
        private SpeechRecognitionEngine createSpeechRecognizer()
        {

            RecognizerInfo ri = GetKinectRecognizer();



            this.speechEngine = new SpeechRecognitionEngine(ri.Id);


            //programmicaly creates a grammer file
            //What you can say
            var directions = new Choices();
            directions.Add("start");
            directions.Add("stop");
            directions.Add("car");
            directions.Add("Hand");
            directions.Add("Camera Up");
            directions.Add("Camera Down");
            directions.Add("Camera Focus");
            directions.Add("Camera Take Picture");
            directions.Add("Camera Go Back");
            directions.Add("Forward");
            directions.Add("Backward");
            directions.Add("Turn Left");
            directions.Add("Turn Right");
            directions.Add("Light On");
            directions.Add("Light off");
            directions.Add("Domo");


            var gb = new GrammarBuilder { Culture = ri.Culture };
            gb.Append(directions);

            var g = new Grammar(gb);
            speechEngine.LoadGrammar(g);


            speechEngine.SpeechRecognized += speechEngine_SpeechRecognized;
            speechEngine.SpeechRecognitionRejected += speechEngine_SpeechRecognitionRejected;
            speechEngine.SpeechHypothesized += speechEngine_SpeechHypothesized;

            return speechEngine;

            //speechEngine.SetInputToAudioStream(newSensor.AudioSource.Start(), new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null));
            //speechEngine.RecognizeAsync(RecognizeMode.Multiple);

        }


        private void speechEngine_SpeechRecognitionRejected(object sender, SpeechRecognitionRejectedEventArgs e)
        {
            alert.Text = "Not recognized, please try again.";


        }

        private void speechEngine_SpeechHypothesized(object sender, SpeechHypothesizedEventArgs e)
        {

            alert.Text = e.Result.Text;

            //alert.Text = "Hypothosized: " + e.Result.Text + " " + e.Result.Confidence;

        }

        private void speechEngine_SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
        {
            string temp = e.Result.Text.ToLowerInvariant();


            switch (temp)
            {

                case "light on":

                    if (ipCamControll.IsLoaded)
                    {
                        try
                        {
                            ipCamControll.InvokeScript("ipwajax", new object[] { "enabletorch" });
                            light.Source = imgChanger.setImages("lightOn");
                        }
                        catch (Exception ex)
                        {
                            alert1.Text = "Could not call script: " +
                            ex.Message +
                            "\n\nPlease click the 'Load HTML Document with Script' button to load.";

                        }
                    }
                    break;

                case "light off":

                    if (ipCamControll.IsLoaded)
                    {
                        try
                        {
                            ipCamControll.InvokeScript("ipwajax", new object[] { "disabletorch" });
                        }
                        catch (Exception ex)
                        {
                            alert1.Text = "Could not call script: " +
                            ex.Message +
                            "\n\nPlease click the 'Load HTML Document with Script' button to load.";

                        }
                    }
                    break;

                case "stop":

                    if (allStopEnabled == false)
                    {
                        App.fez.sendData("X0");
                        App.fez.sendData("Y0");


                        startData = false;
                        allStopEnabled = true;
                        handEnabled = false;
                        carEnabled = false;

                        power.Source = imgChanger.setImages("stop");





                    }
                    break;

                //case "hand":

                //    if (handEnabled == false)
                //    {
                //        handEnabled = true;
                //        allStopEnabled = false;
                //        carEnabled = false;

                //        //colorBrush.Color = System.Windows.Media.Color.FromRgb(0, 0, 248);
                //        //blue.Fill = colorBrush;

                //        //red.Fill = colorBrush2;
                //        //green.Fill = colorBrush2;

                //    }

                //    break;

                case "car":

                    if (carEnabled == false)
                    {
                        carEnabled = true;
                        allStopEnabled = false;
                        handEnabled = false;

                        //colorBrush.Color = System.Windows.Media.Color.FromRgb(0, 250, 0);
                        //green.Fill = colorBrush;

                        //red.Fill = colorBrush2;
                        //blue.Fill = colorBrush2;

                    }
                    else
                    {

                    }

                    break;

                case "start":

                    startData = true;
                    allStopEnabled = false;

                    power.Source = imgChanger.setImages("start");

                    break;


                case "camera up":
                    {
                        //cameraLens.Source = imgChanger.setImages("cameraOn");
                        cameraUp.Source = imgChanger.setImages("cameraUp");
                        
                        cameraDown.Source = imgChanger.setImages("cameraDownOff");

                        camPos -= 10;
                        App.fez.sendData("Z" + camPos);
                        //cameraUp.Source = imgChanger.setImages("cameraUpOff");
                        //cameraLens.Source = imgChanger.setImages("cameraOff");
                        break;
                    }
                case "camera down":
                    {
                        //cameraLens.Source = imgChanger.setImages("cameraOn");
                        cameraDown.Source = imgChanger.setImages("cameraDown");
                        cameraUp.Source = imgChanger.setImages("cameraUpOff");
                        
                        camPos += 10;
                        App.fez.sendData("Z" + camPos);
                        //cameraLens.Source = imgChanger.setImages("cameraOff");
                        //cameraDown.Source = imgChanger.setImages("cameraDownOff");
                
                       
                        //cameraDown.Source = imgChanger.setImages("cameraDownOff");
                        break;
                    }

                case "camera focus":
                    if (ipCamControll.IsLoaded)
                    {
                        try
                        {
                            ipCamControll.InvokeScript("ipwajax", new object[] { "focus" });
                        }
                        catch (Exception ex)
                        {
                            alert1.Text = "Could not call script: " +
                            ex.Message +
                            "\n\nPlease click the 'Load HTML Document with Script' button to load.";

                        }
                    }
                    break;

                case "camera take picture":
                    if (ipCamControll.IsLoaded)
                    {


                        getCamImage();
                    //    Uri tmp = new Uri("http://192.168.2.201:8080/photoaf.jpg");
                    //    BitmapImage bi = new BitmapImage();
                    //    bi.UriSource = tmp;


                    //    camPic.Source = bi;

                    //    camPic.Visibility = Visibility.Visible;

                        
                    }
                    break;

             

                case "camera go back":
                    {
                    

                        camPic.Visibility = Visibility.Collapsed;


                    }
                    break;

                case "Domo":
                    {

                        break;
                    }
            }


        }

         private void getCamImage()
        {


             WebClient client = new WebClient();


            string picLoc = "http://192.168.2.250:8080/photoaf.jpg";
            Uri tmp = new Uri("http://192.168.2.250:8080/photoaf.jpg", UriKind.Absolute);
            client.DownloadDataCompleted += client_DownloadDataCompleted;

            client.DownloadDataAsync(tmp);




         
            

           

        }

         void client_DownloadDataCompleted(object sender, DownloadDataCompletedEventArgs e)
         {
             byte[] image = e.Result;
             int i = image.Count();

             BitmapImage bi = new BitmapImage();


             //var stream = new MemoryStream();

            //var writer = new StreamWriter(stream);

             try
             {
                 if (image != null)
                 {

                     using (MemoryStream ms = new MemoryStream(image))
                     {
                         ms.Seek(0, SeekOrigin.Begin);
                         bi.StreamSource = ms;
                     }
                 }
             }





             catch (Exception ex)
             {
                 alert1.Text = "ERROR:" + ex;
             }
             
           // stream.Write(image,0,i);

             

           



            camImage.Source = bi;


         }


        public double jointAngleCalc(Skeleton me)
        {

            double r;
            double degrees;

            Vector3 a1 = new Vector3(me.Joints[JointType.ElbowRight].Position.X, me.Joints[JointType.ElbowRight].Position.Y, me.Joints[JointType.ElbowRight].Position.Z);
            Vector3 a2 = new Vector3(me.Joints[JointType.ShoulderRight].Position.X, me.Joints[JointType.ShoulderRight].Position.Y, me.Joints[JointType.ShoulderRight].Position.Z);
            Vector3 a3 = new Vector3(me.Joints[JointType.WristRight].Position.X, me.Joints[JointType.WristRight].Position.Y, me.Joints[JointType.WristRight].Position.Z);

            Vector3 b1 = a1 - a3;
            Vector3 b2 = a1 - a2;


            // produces the angle using dotproduct
            r = angleTwoVectors(Vector3.Normalize(b1), Vector3.Normalize(b2));
            //converts the angle into a usable range 
            degrees = r * (180 / Math.PI);


            return degrees;
        }


        

        public float angleTwoVectors(Vector3 a, Vector3 b)
        {
            float dotProduct = 0.0f;
            dotProduct = Vector3.Dot(a, b);

            return (float)Math.Acos(dotProduct);
        }



        private void stopKinect(KinectSensor sensor)
        {
            if (sensor != null)
            {
                if (sensor.IsRunning)
                {
                    //stop sensor 
                    sensor.Stop();

                    //stop audio if not null
                    if (sensor.AudioSource != null)
                    {
                        sensor.AudioSource.Stop();
                    }
                }
            }
        }

        private void IPCam_Navigated(object sender, NavigationEventArgs e)
        {
            cameraLens.Source = imgChanger.setImages("cameraOn");
        }

        private void lightOn()
        {
        }

        private void camPic_LoadCompleted(object sender, NavigationEventArgs e)
        {

            
        }

        private void zoomIn_Click(object sender, RoutedEventArgs e)
        {
            getCamImage();
        }

        

        }
    }



    

    

