﻿
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Net;
using System.Threading;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Navigation;
using System.Windows.Threading;
using EnglishStudy.Entity;
using Microsoft.Phone.Controls;
using Microsoft.Phone.Shell;
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Audio;
using Windows.Phone.Speech.Recognition;
using Common;
using Windows.Phone.Speech.Synthesis;
using Windows.Foundation;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Text.RegularExpressions;
using EnglishStudy.ViewModel;
using System.IO.IsolatedStorage;
using System.Collections.ObjectModel;

namespace EnglishStudy.View
{
    public partial class Study : PhoneApplicationPage
    {
        SpeechSynthesizer synth;
        LessonItem lessonItem = new LessonItem();
        private Microphone microphone = Microphone.Default;     // Object representing the physical microphone on the device
        private byte[] buffer;                                  // Dynamic buffer to retrieve audio data from the microphone
        private MemoryStream stream = new MemoryStream();       // Stores the audio data for later playback
        private SoundEffectInstance soundInstance;              // Used to play back audio
        private bool soundIsPlaying = false;                    // Flag to monitor the state of sound playback
        private SpeechRecognizer recognizer;
        string ssmlText;
        bool _recoEnabled = false;                                  // When this is true, we will continue to recognize 
        IAsyncOperation<SpeechRecognitionResult> _recoOperation;    // Used to canel the current asynchronous speech recognition operation
        int currentIndex = 0;
        List<string> listSpeakItem = new List<string>();
        List<TextBlock> listTextBlock = new List<TextBlock>();
        string resultSpeak = string.Empty;
        DispatcherTimer dt = new DispatcherTimer();
        string DBConnectionString = "Data Source=isostore:/LessonData.sdf";
        StudyViewModel viewModel;
        TranslateControl translateControl = new TranslateControl();
        VoiceInformation voiceinfo;
        TimeSpan timeStudy = new TimeSpan();
        IsolatedStorageSettings settings = IsolatedStorageSettings.ApplicationSettings;

        public Study()
        {
            InitializeComponent();
            viewModel = new StudyViewModel(DBConnectionString);
            translateControl.LayoutRoot.Visibility = Visibility.Collapsed;
            grid_translate.Children.Add(translateControl);
            translateControl.Tap += translateControl_Tap;

            this.DataContext = viewModel;
            // Timer to simulate the XNA Framework game loop (Microphone is 
            // from the XNA Framework). We also use this timer to monitor the 
            // state of audio playback so we can update the UI appropriately.
            DispatcherTimer dt = new DispatcherTimer();
            dt.Interval = TimeSpan.FromMilliseconds(33);
            dt.Tick += new EventHandler(dt_Tick);
            dt.Start();

            // Event handler for getting audio data when the buffer is full
            microphone.BufferReady += new EventHandler<EventArgs>(microphone_BufferReady);

            lessonItem = PhoneApplicationService.Current.State["lessonEntity"] as LessonItem;
            viewModel.NewWordList = viewModel.getNewWords(lessonItem.LessonContent);
            listSpeakItem = lessonItem.LessonContent.Split('\n').ToList();
            lessonIcon.Source = new BitmapImage(new Uri(lessonItem.LessonLogo, UriKind.Relative));
            tbl_title.Text = lessonItem.LessonName;
            for (int i = 0; i < listSpeakItem.Count; i++)
            {
                TextBlock tb = new TextBlock();
                tb.TextWrapping = TextWrapping.Wrap;
                tb.FontSize = 22;
                if (i < lessonItem.CurrentIndex)
                    tb.Foreground = new SolidColorBrush(Colors.Blue);
                listSpeakItem[i] = listSpeakItem[i].Trim();
                tb.Text = listSpeakItem[i];
                lb_Content.Items.Add(tb);
                listTextBlock.Add(tb);
            }
            currentIndex = lessonItem.CurrentIndex;
            if (lessonItem.MissPronounedWord != null && lessonItem.MissPronounedWord.Contains(','))
            {
                string[] misspronound = lessonItem.MissPronounedWord.Split(',');
                misspronound = misspronound.Where(x => !string.IsNullOrEmpty(x)).ToArray();
                for (int i = 0; i < misspronound.Count(); i++)
                {
                    WordTable word = new WordTable();
                    word.Word = (misspronound[i]);
                    if(viewModel.MissPronouncedWordList==null)
                        viewModel.MissPronouncedWordList = new ObservableCollection<WordTable>();
                    viewModel.MissPronouncedWordList.Add(word);
                }
            }

            if (synth == null)
            {
                synth = new SpeechSynthesizer();
            }
            if (recognizer == null)
            {
                recognizer = new SpeechRecognizer();
            }
            getSpeechOption();
            //Start recognize when Silence Timeout = 1.2 s
            recognizer.Settings.EndSilenceTimeout = TimeSpan.FromSeconds(1.2);

            //Capture Audio State Changed (active - inactive)
            recognizer.AudioCaptureStateChanged += recognizer_AudioCaptureStateChanged;

            lb_Content.SelectionChanged += lb_Content_SelectionChanged;

        }

        async void lb_Content_SelectionChanged(object sender, SelectionChangedEventArgs e)
        {
            try
            {
                if (lb_Content.SelectedIndex != -1)
                {
                    var textBlock = lb_Content.SelectedItem as TextBlock;
                    string toTranslate = App.ViewModel.CountryInfo.Code.Split('-')[0];
                    translateControl.translating(textBlock.Text, "en", toTranslate);
                  //  translateControl.LayoutRoot.Visibility = Visibility.Visible;
                    synth = new SpeechSynthesizer();

                    // Set the voice as identified by the query.
                    synth.SetVoice(voiceinfo);
                    // Count in English.
                    await synth.SpeakTextAsync(textBlock.Text);
                    lb_Content.SelectedIndex = -1;
                }
            }
            catch (Exception ex)
            {
            }
        }
        private void getSpeechOption()
        {
            IEnumerable<VoiceInformation> frenchVoices;
            // Query for a voice that speaks English US or UK.
            if (App.ViewModel.UserInfo.SpeakerGender == true)
                frenchVoices = from voice in InstalledVoices.All
                               where (voice.Language == "en-US" || voice.Language == "en-GB") && voice.Gender == VoiceGender.Male
                               select voice;
            else
                frenchVoices = from voice in InstalledVoices.All
                               where (voice.Language == "en-US" || voice.Language == "en-GB") && voice.Gender == VoiceGender.Female
                               select voice;
            if (frenchVoices.Count() > 1 && (bool)settings["isUSVoice"])
            {
                if(frenchVoices.ElementAt(0).Language == "en-US")
                     voiceinfo = frenchVoices.ElementAt(0);
                else
                    voiceinfo = frenchVoices.ElementAt(1);
            }
            else if (frenchVoices.Count() > 1 && !(bool)settings["isUSVoice"])
            {
                if (frenchVoices.ElementAt(0).Language == "en-GB")
                    voiceinfo = frenchVoices.ElementAt(0);
                else
                    voiceinfo = frenchVoices.ElementAt(1);
            }
            else if (frenchVoices.Count() == 1 && (bool)settings["isUSVoice"])
            {
                if (frenchVoices.ElementAt(0).Language == "en-GB")
                {
                    MessageBox.Show("You need to download the English-US Speech package to use this function. \n Go to Settings -> speech -> Speech language -> choose English-US!");
                }
                voiceinfo = frenchVoices.ElementAt(0);
            }
            else if (frenchVoices.Count() == 1 && !(bool)settings["isUSVoice"])
            {
                if (frenchVoices.ElementAt(0).Language == "en-US")
                {
                    MessageBox.Show("You need to download the English-UK Speech package to use this function. \n Go to Settings -> speech -> Speech language -> choose English-UK!");
                }
                voiceinfo = frenchVoices.ElementAt(0);
            }
            else
            {
                MessageBox.Show("You need to download the English Speech package to use this function. \n Go to Settings -> speech -> Speech language -> choose either UK or US!");
            }
        }

        void translateControl_Tap(object sender, System.Windows.Input.GestureEventArgs e)
        {
            translateControl.LayoutRoot.Visibility = Visibility.Collapsed;
        }

        protected override void OnNavigatedFrom(System.Windows.Navigation.NavigationEventArgs e)
        {
            recognizer = null;
            if (_recoOperation != null && _recoOperation.Status == AsyncStatus.Started)
            {
                _recoOperation.Cancel();
            }
            if (microphone.State == MicrophoneState.Started)
            {
                // In RECORD mode, user clicked the 
                // stop button to end recording
                microphone.Stop();
            }
            // Save changes to the database.
            viewModel.SaveChangesToDB();
            settings["TotalTime"] = timeStudy + (DateTime.Now - startTime);
            //App.ViewModel.SaveChangesToDB();
            base.OnNavigatedFrom(e);
        }
        DateTime startTime;
        protected override void OnNavigatedTo(System.Windows.Navigation.NavigationEventArgs e)
        {
            startTime = DateTime.Now;
            timeStudy = (TimeSpan)settings["TotalTime"];
        }

        private async void btn_Click(object sender, RoutedEventArgs e)
        {
            // Change the button text. 
            if (this._recoEnabled)
            {
                // Update the UI to the initial state
                _recoEnabled = false;
                txtStart.Text = "Start";
                this.circling.Stop();
                // btn.Content = "Start speech recognition";
                // Cancel the outstanding recognition operation, if one exists
                if (_recoOperation != null && _recoOperation.Status == AsyncStatus.Started)
                {
                    _recoOperation.Cancel();
                }
                return;
            }
            else
            {
                listTextBlock[currentIndex].Foreground = new SolidColorBrush(Colors.Green);
                // Set the flag to say that we are in recognition mode
                _recoEnabled = true;
                this.circling.Begin();
                txtStart.Text = "Stop";
                // Update the UI
                // btn.Content = "Listening... tap to cancel";
            }

            // Continuously recognize speech until the user has canceled 
            while (this._recoEnabled && recognizer != null)
            {
                try
                {
                    // Perform speech recognition.  
                    _recoOperation = recognizer.RecognizeAsync();
                    var recoResult = await this._recoOperation;
                    try
                    {
                        resultSpeak = recoResult.Text;
                        tbl_yousay.Text = resultSpeak;
                    }
                    catch (Exception ex)
                    {
                        Debug.WriteLine(ex.Message);
                    }


                    var CS = new CompareString();

                    Regex pattern = new Regex(@"[^a-zA-Z0-9\s']");
                    string a = pattern.Replace(resultSpeak.ToLower(), "");
                    string b = pattern.Replace(listSpeakItem[currentIndex].ToLower(), "");
                    int number_edit = CS.Score2String(a, b);
                    double dou = (Math.Abs(b.Length - number_edit));
                    double score = Math.Round(dou / b.Length, 0) * 100;

                    if (score > 50)
                    {
                        listTextBlock[currentIndex].Foreground = new SolidColorBrush(Colors.Blue);
                        if (currentIndex < listTextBlock.Count - 1)
                        {
                            currentIndex++;
                            viewModel.MissPronouncedWordList = missedWord(a, b, viewModel.MissPronouncedWordList);
                            listTextBlock[currentIndex].Foreground = new SolidColorBrush(Colors.Green);

                            lessonItem.MissLetters = lessonItem.MissLetters + number_edit;
                            lessonItem.TotalLetters = lessonItem.TotalLetters + b.Length;
                            App.ViewModel.UserInfo.Score = App.ViewModel.UserInfo.Score + (b.Length - number_edit);
                            lessonItem.CurrentIndex = currentIndex;
                            App.ViewModel.SaveChangesToDB();

                            if (currentIndex + 3 < listTextBlock.Count)
                                lb_Content.ScrollIntoView(lb_Content.Items[currentIndex + 3]);
                        }
                        else
                        {
                            lessonItem.MissLetters = lessonItem.MissLetters + number_edit;
                            lessonItem.TotalLetters = lessonItem.TotalLetters + b.Length;
                            double miss = lessonItem.MissLetters;
                            double total = lessonItem.TotalLetters;
                            score = 100 - (miss / total) * 100;
                            score = Math.Round(score, 0);
                            tb_score.Text = score + "%";
                            if (lessonItem.Score < score)
                                lessonItem.Score = score;
                            lessonItem.IsComplete = true;

                            lessonItem.MissLetters = 0;
                            lessonItem.TotalLetters = 0;
                            lessonItem.CurrentIndex = 0;
                            currentIndex = 0;
                            for (int i = 0; i < listTextBlock.Count; i++)
                            {
                                listTextBlock[i].Foreground = new SolidColorBrush(Colors.White);
                            }
                            App.ViewModel.SaveChangesToDB();
                            _recoEnabled = false;
                            // btn.Content = "Start Again!";



                            // Cancel the outstanding recognition operation, if one exists
                            if (_recoOperation != null && _recoOperation.Status == AsyncStatus.Started)
                            {
                                _recoOperation.Cancel();
                            }
                            lb_Content.ScrollIntoView(lb_Content.Items[0]);
                            return;
                        }


                    }
                    else
                    {
                        tb_score.Text = "Try again!";
                    }
                }
                catch (System.Threading.Tasks.TaskCanceledException)
                {
                    // Ignore the cancellation exception of the recoOperation.
                    // When recoOperation.Cancel() is called to cancel the asynchronous speech recognition operation
                    // initiated by RecognizeAsync(),  a TaskCanceledException is thrown to signify early exit.
                }
                catch (Exception err)
                {
                    // Handle the speech privacy policy error.
                    const int privacyPolicyHResult = unchecked((int)0x80045509);

                    if (err.HResult == privacyPolicyHResult)
                    {
                        MessageBox.Show("To run this sample, you must first accept the speech privacy policy. To do so, navigate to Settings -> speech on your phone and check 'Enable Speech Recognition Service' ");
                        _recoEnabled = false;
                        // btn.Content = "Start speech recognition";
                    }
                    else
                    {
                        tbl_yousay.Text = "Error: " + err.Message;
                    }
                }
            }
        }

        void recognizer_AudioCaptureStateChanged(SpeechRecognizer sender, SpeechRecognizerAudioCaptureStateChangedEventArgs args)
        {

            //Get the state of the audio that has changed to figure out what happened from event args
            SpeechRecognizerAudioCaptureState speechRecognizerAudioCaptureState = args.State;
            //Handle changed audio state
            if (speechRecognizerAudioCaptureState == SpeechRecognizerAudioCaptureState.Capturing)
            {
                                
                // Get audio data in 1/2 second chunks
                microphone.BufferDuration = TimeSpan.FromMilliseconds(500);

                // Allocate memory to hold the audio data
                buffer = new byte[microphone.GetSampleSizeInBytes(microphone.BufferDuration)];

                // Set the stream back to zero in case there is already something in it
                stream.SetLength(0);

                // Start recording
                microphone.Start();
            }
            else if (speechRecognizerAudioCaptureState == SpeechRecognizerAudioCaptureState.Inactive)
            {
                
                if (microphone.State == MicrophoneState.Started)
                {
                    // In RECORD mode, user clicked the 
                    // stop button to end recording
                    Deployment.Current.Dispatcher.BeginInvoke(() =>
                    {
                        btn_record.Visibility = Visibility.Visible;
                    });
                    microphone.Stop();
                }
            }
        }


        private void btn_record_Click(object sender, RoutedEventArgs e)
        {
            if (stream.Length > 0)
            {
                // Play the audio in a new thread so the UI can update.
                Thread soundThread = new Thread(new ThreadStart(playSound));
                soundThread.Start();
            }
        }

        /// <summary>
        /// Plays the audio using SoundEffectInstance 
        /// so we can monitor the playback status.
        /// </summary>
        private void playSound()
        {
            // Play audio using SoundEffectInstance so we can monitor it's State 
            // and update the UI in the dt_Tick handler when it is done playing.
            SoundEffect sound = new SoundEffect(stream.ToArray(), microphone.SampleRate, AudioChannels.Mono);
            soundInstance = sound.CreateInstance();
            soundIsPlaying = true;
            soundInstance.Play();
        }

        /// <summary>
        /// Updates the XNA FrameworkDispatcher and checks to see if a sound is playing.
        /// If sound has stopped playing, it updates the UI.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        void dt_Tick(object sender, EventArgs e)
        {
            try { FrameworkDispatcher.Update(); }
            catch { }

            if (true == soundIsPlaying)
            {
                if (soundInstance.State != SoundState.Playing)
                {
                    // Audio has finished playing
                    soundIsPlaying = false;
                }
            }
        }

        /// <summary>
        /// The Microphone.BufferReady event handler.
        /// Gets the audio data from the microphone and stores it in a buffer,
        /// then writes that buffer to a stream for later playback.
        /// Any action in this event handler should be quick!
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        void microphone_BufferReady(object sender, EventArgs e)
        {
            // Retrieve audio data
            microphone.GetData(buffer);

            // Store the audio data in a stream
            stream.Write(buffer, 0, buffer.Length);
        }

        private void MissPronounce_Tap(object sender, System.Windows.Input.GestureEventArgs e)
        {
            if (stpMissPronounce.Height == 160)
            {
                stpMissPronounce.Height = 690;
                lb_Content.Height = 100;
            }
            else
            {
                stpMissPronounce.Height = 160;
                lb_Content.Height = 480;
            }
        }

        public ObservableCollection<WordTable> missedWord(string toCompareString, string originalString, ObservableCollection<WordTable> wordList)
        {
            Regex pattern = new Regex(@"[^a-zA-Z0-9\s']");
            originalString = pattern.Replace(originalString.ToLower(), "");
            toCompareString = pattern.Replace(toCompareString.ToLower(), "");
            List<string> listWordOrigin = originalString.Split(' ').ToList();
            List<string> missedWord = new List<string>();
            if (wordList == null)
                wordList = new ObservableCollection<WordTable>();
            for (int i = 0; i < listWordOrigin.Count(); i++)
            {
                if (!toCompareString.Contains(listWordOrigin[i]))
                {
                    WordTable word = new WordTable();
                    word.Word = (listWordOrigin[i]);
                    lessonItem.MissPronounedWord = lessonItem.MissPronounedWord + listWordOrigin[i] + ",";
                    wordList.Add(word);
                }
            }
            return wordList;
        }

    }
}